diff --git a/.gitee/PULL_REQUEST_TEMPLATE.md b/.gitee/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index b58f1abbf3aed46febe2e514c270d47409be02de..0000000000000000000000000000000000000000 --- a/.gitee/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,26 +0,0 @@ - - -**What type of PR is this?** -> Uncomment only one ` /kind <>` line, hit enter to put that in a new line, and remove leading whitespaces from that line: -> -> /kind bug -> /kind task -> /kind feature - - -**What does this PR do / why do we need it**: - - -**Which issue(s) this PR fixes**: - -Fixes # - -**Special notes for your reviewers**: - - diff --git a/.github/ISSUE_TEMPLATE/RFC.md b/.github/ISSUE_TEMPLATE/RFC.md deleted file mode 100644 index 7dd17f56b6881c516069c2243992a37a12b63378..0000000000000000000000000000000000000000 --- a/.github/ISSUE_TEMPLATE/RFC.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -name: RFC -about: Use this template for the new feature or enhancement -labels: kind/feature or kind/enhancement - ---- - -## Background -- Describe the status of the problem you wish to solve -- Attach the relevant issue if have - -## Introduction -- Describe the general solution, design and/or pseudo-code - -## Trail -| No. | Task Description | Related Issue(URL) | -| --- | ---------------- | ------------------ | -| 1 | | | -| 2 | | | diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md deleted file mode 100644 index 2c0260ae2b5d9c9cb0d1ec79fd452eef74d5a1d3..0000000000000000000000000000000000000000 --- a/.github/ISSUE_TEMPLATE/bug-report.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -name: Bug Report -about: Use this template for reporting a bug -labels: kind/bug - ---- - - - -## Environment -### Hardware Environment(`Ascend`/`GPU`/`CPU`): -> Uncomment only one ` /device <>` line, hit enter to put that in a new line, and remove leading whitespaces from that line: -> -> `/device ascend`
-> `/device gpu`
-> `/device cpu`
- -### Software Environment: -- **MindSpore version (source or binary)**: -- **Python version (e.g., Python 3.7.5)**: -- **OS platform and distribution (e.g., Linux Ubuntu 16.04)**: -- **GCC/Compiler version (if compiled from source)**: - -## Describe the current behavior - - -## Describe the expected behavior - - -## Steps to reproduce the issue -1. -2. -3. - -## Related log / screenshot - - -## Special notes for this issue - - diff --git a/.github/ISSUE_TEMPLATE/task-tracking.md b/.github/ISSUE_TEMPLATE/task-tracking.md deleted file mode 100644 index f2d3d23ae7973a609503119d0c9c4f560bb32c5c..0000000000000000000000000000000000000000 --- a/.github/ISSUE_TEMPLATE/task-tracking.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -name: Task -about: Use this template for task tracking -labels: kind/task - ---- - -## Task Description - - -## Task Goal - - -## Sub Task -| No. | Task Description | Issue ID | -| --- | ---------------- | -------- | -| 1 | | | -| 2 | | | - diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 96800b55f7f2fb3a3bdc751e74403b26180e09d7..0000000000000000000000000000000000000000 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,24 +0,0 @@ - - -**What type of PR is this?** -> Uncomment only one ` /kind <>` line, hit enter to put that in a new line, and remove leading whitespaces from that line: -> -> `/kind bug`
-> `/kind task`
-> `/kind feature`
- -**What does this PR do / why do we need it**: - - -**Which issue(s) this PR fixes**: - -Fixes # - -**Special notes for your reviewers**: - diff --git a/.gitignore b/.gitignore deleted file mode 100644 index e6af4aa9f0e2a8cd27dee2d2781c7865d4b0001a..0000000000000000000000000000000000000000 --- a/.gitignore +++ /dev/null @@ -1,21 +0,0 @@ -# Built html files -docs/api_cpp/build_en -docs/api_cpp/build_zh_cn -docs/api_java/build_en -docs/api_java/build_zh_cn -docs/api_python/build_en -docs/api_python/build_zh_cn -docs/faq/build_en -docs/faq/build_zh_cn -docs/note/build_en -docs/note/build_zh_cn -docs/programming_guide/build_en -docs/programming_guide/build_zh_cn -tutorials/inference/build_en -tutorials/inference/build_zh_cn -tutorials/training/build_en -tutorials/training/build_zh_cn - -# Workspace -.idea/ -.vscode/ diff --git a/3RBY0/Preparation.md b/3RBY0/Preparation.md new file mode 100644 index 0000000000000000000000000000000000000000..f5f03836df535a39287fec5631f655972e0accf4 --- /dev/null +++ b/3RBY0/Preparation.md @@ -0,0 +1,119 @@ +# Preparation + +[![img](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/migration_guide/source_zh_cn/preparation.md) + +## Overview + +Please install MindSpore and master the knowledge of machine learning before network development or migration. Users can learn about related knowledge from the book "Deep Learning and MindSpore Practice", and the usage of MindSpore by visiting [MindSpore official website](https://www.mindspore.cn/en). + +## Installing MindSpore + +MindSpore can be installed and run under the Ascend, CPU or GPU context, and supports EulerOS-arm, CentOS-arm, CentOS-x86, Ubuntu-arm, Ubuntu-x86, Windows-X86 operating systems. Please visit [MindSpore Installation Guide](https://www.mindspore.cn/install/en) to download MindSpore installation package, and follow the guide to install MindSpore. + +### Installing MindSpore by pip + +Please download the installation package `mindspore_{device}-{version}-cp37-cp37m-linux_{arch}.whl` , and install MindSpore with pip command. + +``` +pip install mindspore_{device}-{version}-cp37-cp37m-linux_{arch}.whl +``` + +If another version of MindSpore exists in your work space, please uninstall it first and then install the downloaded installation package. + +### Installing MindSpore with Source Code + +Visit [MindSpore repository](https://github.com/mindspore-ai/mindspore), and download MindSpore source code with `git clone https://gitee.com/mindspore/mindspore.git`, then run the `build.sh` in the root directory of source code to customize MindSpore service with a variety of arguments. Some of the common arguments for compiling are listed as follows:, + +``` +cd mindspore +bash build.sh -e cpu -j{thread_num} # cpu context +bash build.sh -e ascend -j{thread_num} # Ascend context +bash build.sh -e gpu -j{thread_num} # gpu context +``` + +When the compilation succeeds, the MindSpore installation package will be put in the `output` directory. **Install MindSpore by pip** and then **add current directory to PYTHONPATH**. + +> The advantage of installation by pip is simple and easy to operate +> +> When installing MindSpore with source code, users can customize MindSpore services and switch to any commit_id to compile and use MindSpore. + +### Set Context Variables (only for Ascend Context) + +``` +# control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. +export GLOG_v=2 + +# Conda environmental options +LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package + +# lib libraries that the run package depends on +export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/lib64:${LOCAL_ASCEND}/driver/lib64:${LOCAL_ASCEND}/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} + +# Environment variables that must be configured +export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path +export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path +export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path +export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # Python library that TBE implementation depends on +``` + +### MindSpore Verification + +If the following commands are executed and exit successfully, the installation is successful. + +For CPU Context: + +``` +import numpy as np +from mindspore import Tensor +import mindspore.ops as ops +import mindspore.context as context + +context.set_context(device_target="CPU") +x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) +y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) +print(ops.add(x, y)) +``` + +For Ascend Context: + +``` +import numpy as np +from mindspore import Tensor +import mindspore.ops as ops +import mindspore.context as context + +context.set_context(device_target="Ascend") +x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) +y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) +print(ops.add(x, y)) +``` + +For GPU Context: + +``` +import numpy as np +from mindspore import Tensor +import mindspore.ops as ops +import mindspore.context as context + +context.set_context(device_target="GPU") +x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) +y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) +print(ops.add(x, y)) +``` + +## Preparation of Knowledge + +### MindSpore Programming Guide + +Please refer to the [MindSpore Tutorial](https://www.mindspore.cn/tutorial/training/en/master/index.html) to learn how to use MindSpore for training, debugging, tuning, and inference, the [MindSpore Programming Guide](https://www.mindspore.cn/doc/programming_guide/en/master/index.html) to understand the basic modules and common programming methods, and the [MindSpore Python API](https:/ /www.mindspore.cn/doc/api_python/zh-CN/master/index.html) to learn more about the relevant information of the MindSpore interfaces. + +### ModelZoo and MindSpore Hub + +[ModelZoo](https://github.com/mindspore-ai/mindspore/tree/master/model_zoo) is a highly organized model market provided by MindSpore and the community, providing deeply optimized models, facilitating all the developers in the ecosystem to develop personalized models based on the models in ModelZoo. Up to now, ModelZoo provides mainstream models in many fields such as computer vision, natural language processing, speech recognition, and recommendation systems. + + [MindSpore Hub](https://www.mindspore.cn/resources/hub/en) is a storage platform for pre-trained models provided by MindSpore official or third-party developers. It provides application developers with easy-to-use model loading and fine-tuning APIs, allowing users to perform inference or fine-tuning based on pre-trained models and deploy for their own applications. Users can also follow the specified steps when they want to [publish model](https://mindspore.cn/tutorial/training/en/master/use/publish_model.html) to the MindSpore Hub for other users to download and develop. + +### Training on the Cloud + +ModelArts is a one-stop development platform for AI developers provided by HUAWEI CLOUD. It integrates the Ascend AI processor resource pool. Users can experience MindSpore on this platform. Please refer to [Use MindSpore on the Cloud](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/use_on_the_cloud.html) and [AI Development Platform ModelArts](https://support.huaweicloud.com/intl/en-us/wtsnew-modelarts/index.html) for further information. \ No newline at end of file diff --git a/CONTRIBUTING_DOC.md b/CONTRIBUTING_DOC.md deleted file mode 100644 index 871118ae61b803f5f294a13df811f43348140061..0000000000000000000000000000000000000000 --- a/CONTRIBUTING_DOC.md +++ /dev/null @@ -1,191 +0,0 @@ -# Contributing Documents - -[查看中文](./CONTRIBUTING_DOC_CN.md) - -You are welcome to contribute MindSpore documents. Documents that meet requirements will be displayed on the [MindSpore website](https://www.mindspore.cn/en). - - - -- [Contributing Documents](#contributing-documents) - - [Document](#document) - - [Updating or Adding a Document](#updating-or-adding-a-document) - - [Updating a Document](#updating-a-document) - - [Adding a Document](#adding-a-document) - - [Checking a Document](#checking-a-document) - - [Confirming the Content](#confirming-the-content) - - [API](#api) - - [Updating or Adding an API](#updating or adding an-api) - - [Updating a Python API](#updating-a-python-api) - - [Adding a Python API](#adding-a-python-api) - - [Checking the Python API](#checking-the-python-api) - - [Confirming the Content](#confirming-the-content-1) - - [Image](#image) - - [Updating or Adding an Image](#updating-or-adding-an-image) - - [Image Citation](#image-citation) - - [Confirming the Content](#confirming-the-content-2) - - - -This project supports contribution documents in markdown and reStructuredText formats. You can create the `.md` or `.rst` files or modify existing files. - -## Document - -MindSpore docs repository provides [Document Writing Specifications](https://gitee.com/mindspore/docs/wikis/Document%20Writing%20Specifications?sort_id=3379825) for your reference. - -### Updating or Adding a Document - -#### Updating a Document - -If you want to update an existing document, click `View source on Gitee` (as shown in the following figure) on the top of the page to go to the source file. Modify the document and commit changes to a PR to make the contribution. - -![View Source on Gitee](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_source.png) - -#### Adding a Document - -If you need to add a document, create a markdown or reStructuredText file in a proper directory. For details about the directory structure of the MindSpore docs repository, see [README](https://gitee.com/mindspore/docs/blob/master/README.md#directory-structure-description). - -1. Create a document. - - Requirements for the new document are as follows: - - - Storage path: A Chinese document is stored in the `source_zh_cn` directory and an English document is stored in the `source_en` directory. - - Document name: A document name must consist of lowercase letters or underscores (_). - -2. Add the new document to the web page. - - After the writing is complete, add the new document to the web page directory. - - Take a training tutorial as an example. Find the [`index.rst`](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_en/index.rst) file in the `source_en` directory. This file corresponds to the organization structure of the training tutorial web page. - - Add the new document to the corresponding category. You can also create a category before adding the document. Take **Implementing an Image Classification Application** as an example. Save the document in the `quick_start` directory and name it as `quick_start.md`. Add `quick_start/quick_start` to the Quick Start category, as shown below. - - ```rst - .. toctree:: - :glob: - :maxdepth: 1 - :caption: Quick Start - :hidden: - - quick_start/quick_start - quick_start/linear_regression - quick_start/quick_video - ``` - -After completing the preceding operations, commit to a PR to make contribution. - -### Checking a Document - -After committing to a PR, ensure that the `mindspore-cla/yes` and `ci-pipeline-passed` labels exist and the `stat/need-squash` label does not exist. The PR can be merged only after being approved by the committer. - -- `mindspore-cla/yes`: indicates that the CLA has been properly signed. If the CLA is not signed, the system automatically adds the `mindspore-cla/no` label. After the CLA is signed, add the comment `/check-cla` under the PR. Then the `mindspore-cla/yes` label is automatically added. -- `ci-pipeline-passed`: indicates that the MindSpore CI check is passed. When a PR is created, MindSpore CI automatically starts the check. If the check is passed, the system automatically adds the label. If the check fails, the system automatically adds the `ci-pipeline-failed` label. After problems are resolved, add the comment `/retest` under the PR. If the check is successful, the `ci-pipeline-passed` label is automatically added. -- `stat/need-squash`: indicates that the PR has multiple commits. The label is automatically deleted only after the commits are merged into one commit through the `git rebase` operation. - -MindSpore CI uses check tools such as Markdownlint, Pylint, Shellcheck, Cppcheck, Cpplint, and Tab. - -Markdownlint is a tool for checking the markdown file format. You can use the tool to check the format based on the configured rules and new rules. MindSpore CI modifies the following rules based on the default configuration: - -- MD007 (unordered list indentation): The **indent** parameter is set to **4**, indicating that all content in the unordered list needs to be indented using four spaces. -- MD009 (spaces at the line end): The **br_spaces** parameter is set to **2**, indicating that there can be 0 or 2 spaces at the end of a line. -- MD029 (sequence numbers of an ordered list): The **style** parameter is set to **ordered**, indicating that the sequence numbers of the ordered list are in ascending order. - -For details, see [RULES](https://github.com/markdownlint/markdownlint/blob/master/docs/RULES.md). - -### Confirming the Content - -On the next day after the PR is merged, you can view the new content on the MindSpore website. A new link will be created for the new document. - -By default, tutorials and documents of the latest version are displayed on the official website. To view the newly merged content, switch to master from the drop-down list. - -![master_doc_en](./resource/_static/master_doc_en.png) - -Take **Implementing an Image Classification Application** as an example. The document link is . - -## API - -MindSpore docs repository provides [API Comment Specifications](https://gitee.com/mindspore/docs/wikis/MindSpore%20API%20Comment%20Specifications?sort_id=3379820) for your reference. - -### Updating or Adding an API - -#### Updating a Python API - -If you want to update an existing API, find the source file of the API in the MindSpore code. - -If you do not know the file link, click **source** and find the file link by referring to the content following `_modules` in the link. - -Take Tensor as an example. After clicking **source**, you can obtain the link . Then, the source file link is . - -![API Source](./resource/_static/api_source.png) - -Modify the comments in the source file and commit changes to a PR to make the contribution. - -#### Adding a Python API - -If you want to add an API, check whether the API has been added to an existing module. For details about the existing modules, see . - -- If the API belongs to an existing module, comment out the API in the MindSpore code repository based on the requirements and add the API to \_\_all\_\_ of the corresponding module. Ensure that the API can be used in importing **mindspore.*Module name*.*API name*** format. - - If the API belongs to the following modules, the API list of the MindSpore docs repository needs to be updated. Please add APIs in alphabetical order. - - - `mindspore.dataset`: [中文](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.rst) | [English](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.dataset.rst) - - `mindspore.dataset.text`: [中文](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.text.rst) | [English](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.dataset.text.rst) - - `mindspore.dataset.transforms`: [中文](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.transforms.rst) | [English](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.dataset.transforms.rst) - - `mindspore.dataset.vision`: [中文](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.vision.rst) | [English](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.dataset.vision.rst) - - `mindspore.nn`: [中文](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.nn.rst) | [English](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.nn.rst) - - `mindspore.nn.probability`: [中文](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.nn.probability.rst) | [English](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.nn.probability.rst) - - `mindspore.ops`: [中文](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.ops.rst) | [English](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.ops.rst) - - `mindspore.ops.operations`: [中文](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/operations.rst) | [English](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/operations.rst) - -- If the API does not belong to the existing module, add an API project file of the MindSpore docs repository. Please add modules to the [directory structure](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/index.rst) in alphabetical order. To add the API of the `mindspore.context` module, you need to create the [`mindspore.context.rst`](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.context.rst) file in the `docs/docs/api_python/source_en/mindspore` directory and add the file to the directory structure. - - ```rst - .. toctree:: - :maxdepth: 1 - :caption: MindSpore Python API - - ... - mindspore/mindspore.context - ... - ``` - -After completing the preceding modification, commit to a PR to make contribution. - -### Checking the Python API - -After committing to a PR, ensure that the `mindspore-cla/yes` and `ci-pipeline-passed` labels exist and the `stat/need-squash` label does not exist. The PR can be merged only after being approved by the committer. - -For details about each label, see [Checking a Document](#checking-a-document). - -MindSpore CI uses the Pylint check tool. - -### Confirming the Content - -On the next day after the PR is merged, you can view the new content on the [MindSpore Python API](https://www.mindspore.cn/doc/api_python/en/master/index.html) page. - -By default, APIs of the latest version are displayed. To view the newly merged content, switch to the master version. - -![master_api_en](./resource/_static/master_api_en.png) - -## Image - -The images in the document are mainly divided into program flowcharts, configuration flowcharts, functional structure diagrams and so on. - -For specific image requirements and specifications, please refer to [Image Specifications](https://gitee.com/mindspore/docs/wikis/%E4%BD%9C%E5%9B%BE%E8%A7%84%E8%8C%83?sort_id=3498531) provided by MindSpore docs. - -### Updating or Adding an Image - -If you want to update an existing image or adding a new image, click ![View Source on Gitee](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_source.png) on the top of the page to go to the source file. Usually the images will be stored in the directory ./images. You only need to put the new images into this directory, or replace the modified images with the original ones, and commit changes to a PR to make the contribution. - -> Please submit the original image to Gitee at the same time, and put it in the same path as the image for subsequent modification. - -### Image Citation - -The format of the image citation is: \!\[image name] (the path where the image is located). For details, please refer to [Markdown Image Citation Requirements](https://gitee.com/mindspore/docs/wikis/Document%20Writing%20Specifications?sort_id=3379825#image) and [Notebook Image Citation Requirements](https://gitee.com/mindspore/docs/wikis/Notebook%E5%86%99%E4%BD%9C%E8%A6%81%E6%B1%82?sort_id=3462614). - -### Confirming the Content - -On the next day after the PR is merged, you can view the new content on the MindSpore website. The new image will shown in the document. - -By default, tutorials and documents of the latest version are displayed on the official website. To view the newly merged content, switch to master from the drop-down list. - -![master_doc_en](./resource/_static/master_doc_en.png) diff --git a/CONTRIBUTING_DOC_CN.md b/CONTRIBUTING_DOC_CN.md deleted file mode 100644 index d3d35a0fecea44f6013c3a51debea81808ff5468..0000000000000000000000000000000000000000 --- a/CONTRIBUTING_DOC_CN.md +++ /dev/null @@ -1,191 +0,0 @@ -# 贡献文档 - -[View English](./CONTRIBUTING_DOC.md) - -欢迎参与MindSpore文档贡献,符合要求的文档将会在[MindSpore官网](http://www.mindspore.cn)中呈现。 - - - -- [贡献文档](#贡献文档) - - [文档](#文档) - - [更新/新增文档](#更新新增文档) - - [更新文档](#更新文档) - - [新增文档](#新增文档) - - [检查文档](#检查文档) - - [确认内容](#确认内容) - - [API](#api) - - [更新/新增API](#更新新增api) - - [更新Python API](#更新python-api) - - [新增Python API](#新增python-api) - - [检查Python API](#检查python-api) - - [确认内容](#确认内容-1) - - [作图](#作图) - - [更新/新增图片](#更新新增图片) - - [引用图片](#引用图片) - - [确认内容](#确认内容-2) - - - -本项目支持Markdown和reStructuredText格式的内容贡献,对应地可创建`.md`和`.rst`为后缀的文档或修改已存在的文档。 - -## 文档 - -MindSpore docs仓提供了[文档写作要求](https://gitee.com/mindspore/docs/wikis/文档写作要求?sort_id=3363974)供写作时参考。 - -### 更新/新增文档 - -#### 更新文档 - -如果您发现现有文档需要刷新,可点击页面上方的“View Source on Gitee”(如下图所示),跳转至源文件。修改该文件,并提交PR即可参与贡献。 - -![View Source on Gitee](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_source.png) - -#### 新增文档 - -如果您需要新增文档,请在合适目录新建Markdown或reStructuredText文件,MindSpore docs仓目录结构说明可参考[README](https://gitee.com/mindspore/docs/blob/master/README_CN.md#目录结构说明)。 - -1. 新建文件 - - 新建文件要求如下: - - - 存放路径:中文文档需新建在`source_zh_cn`目录下,英文文档需新建在`source_en`目录下。 - - 文件名:文件名需由英文小写或下划线组成。 - -2. 将新建文件添加到网页 - - 完成写作后,需在网页目录中添加新建的文件。 - - 以训练教程为例,先在`source_zh_cn`目录下找到[`index.rst`](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/index.rst)文件,该文件即对应训练教程网页的组织结构。 - - 在对应的分类中添加新建的文件,也可新建分类后再添加。以《实现一个图片分类应用》文档为例,该文档存放在`quick_start`目录,命名为`quick_start.md`,需将`quick_start/quick_start`添加至“快速入门”分类下,如下所示。 - - ```rst - .. toctree:: - :glob: - :maxdepth: 1 - :caption: 快速入门 - :hidden: - - quick_start/quick_start - quick_start/linear_regression - quick_start/quick_video - ``` - -完成上述操作后,并提交PR即可参与贡献。 - -### 检查文档 - -提交PR后,需要确保有`mindspore-cla/yes`和`ci-pipeline-passed`标签,没有`stat/need-squash`标签,并经过Committer审核后方可合入。 - -- `mindspore-cla/yes`:表示已正确签署CLA。如果已签署,系统会自动添加该标签。如果没有签署,系统会自动添加`mindspore-cla/no`标签,签署完后在PR下添加评论`/check-cla`,即可自动添加`mindspore-cla/yes`标签。 -- `ci-pipeline-passed`:表示已通过MindSpore CI检查。创建PR时,MindSpore CI会自动启动检查,如果已检查通过,系统会自动添加该标签。如果没有检查通过,系统会自动添加`ci-pipeline-failed`标签,问题修改完后在PR下添加评论`/retest`,检查通过即可自动添加`ci-pipeline-passed`标签。 -- `stat/need-squash`:表示该PR存在多次提交记录,需通过`git rebase`操作整合成一次提交记录后,才可自动删除该标签。 - -MindSpore CI采用了Markdownlint、Pylint、Shellcheck、Cppcheck、Cpplint、Tab等检查工具。 - -其中,Markdownlint是一款检查Markdown文件格式正确性的工具,可以根据设置的规则以及创建的新规则对Markdown文件进行全面的检查。MindSpore CI在默认配置的基础上,修改了如下规则: - -- MD007(无序列表缩进)规则将参数indent设置为4,表示无序列表内的所有内容需缩进4格写作。 -- MD009(行尾空格)规则将参数br_spaces设置为2,表示行尾可以有0个或2个空格。 -- MD029(有序列表的前缀序号)规则将参数style设置为ordered,表示有序列表的前缀序号需按顺序递增。 - -更为详细规则信息请参考[RULES](https://github.com/markdownlint/markdownlint/blob/master/docs/RULES.md)。 - -### 确认内容 - -PR合入后次日,即可在MindSpore官网中查看到新增内容,新增文档将新建链接。 - -官网各教程和文档默认选中最新发布版本,如需查看新合入的内容,需在下拉列表中切换至master。 - -![master_doc](./resource/_static/master_doc.png) - -以《实现一个图片分类应用》文档为例,该文档的链接为。 - -## API - -MindSpore docs仓提供了[API注释写作要求](https://gitee.com/mindspore/docs/wikis/API注释写作要求?sort_id=3364069)供写作时参考。 - -### 更新/新增API - -#### 更新Python API - -如果您发现现有API需要刷新,请先在MindSpore代码中找到该接口所在的源文件。 - -如果不清楚所在文件,可点击“source”,并参考跳转的链接地址中`_modules`后的内容,找到该文件。 - -以Tensor为例,点击“source”后得到地址,源文件地址即为。 - -![API Source](./resource/_static/api_source.png) - -修改源文件的注释,并提交PR即可参与贡献。 - -#### 新增Python API - -如果您需要新增API,请先确认是否在已有模块中添加,已有模块列表请查看。 - -- 如果属于已有模块,在MindSpore代码仓按注释要求完成注释内容,并将该API添加至对应模块的\_\_all\_\_中,确保能通过导入“mindspore.模块名.API名”使用该API。 - - 如果属于以下模块,还需更新MindSpore docs仓的接口列表,请按字母序添加API。 - - - `mindspore.dataset`:[中文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.rst) | [英文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.dataset.rst) - - `mindspore.dataset.text`:[中文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.text.rst) | [英文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.dataset.text.rst) - - `mindspore.dataset.transforms`:[中文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.transforms.rst) | [英文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.dataset.transforms.rst) - - `mindspore.dataset.vision`:[中文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.vision.rst) | [英文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.dataset.vision.rst) - - `mindspore.nn`:[中文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.nn.rst) | [英文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.nn.rst) - - `mindspore.nn.probability`:[中文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.nn.probability.rst) | [英文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.nn.probability.rst) - - `mindspore.ops`:[中文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.ops.rst) | [英文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/mindspore.ops.rst) - - `mindspore.ops.operations`:[中文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/operations.rst) | [英文页面列表](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_en/mindspore/operations.rst) - -- 如果不属于已有模块,需新增MindSpore docs仓的接口工程文件,并按字母序添加模块到[目录结构](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/index.rst)中。如需新增`mindspore.context`模块接口,需在`docs/docs/api_python/source_zh_cn/mindspore`目录下新增[`mindspore.context.rst`](https://gitee.com/mindspore/docs/blob/master/docs/api_python/source_zh_cn/mindspore/mindspore.context.rst)文件,并将其添加到目录结构中。同时,在`docs/docs/api_python/source_en/mindspore`目录下做相应修改,即可生成英文页面内容。 - - ```rst - .. toctree:: - :maxdepth: 1 - :caption: MindSpore Python API - - ... - mindspore/mindspore.context - ... - ``` - -完成上述修改,并提交PR即可参与贡献。 - -### 检查Python API - -提交PR后,需要确保有`mindspore-cla/yes`和`ci-pipeline-passed`标签,没有`stat/need-squash`标签,并经过Committer审核后方可合入。 - -各标签的详细说明可参见[检查文档](#检查文档)中的相关内容。 - -MindSpore CI采用了Pylint检查工具。 - -### 确认内容 - -PR合入后次日,即可在MindSpore官网[Python API页面](https://www.mindspore.cn/doc/api_python/zh-CN/master/index.html)中查看到新增内容。 - -官网API默认展示最新发布版本,如需查看新合入的内容,如下图所示切换至master分支版本。 - -![master_api](./resource/_static/master_api.png) - -## 作图 - -文档中的图片主要分为程序流程图、配置流程图和功能结构图等。 - -具体的作图要求及规范,请参考MindSpore docs仓提供的[作图规范](https://gitee.com/mindspore/docs/wikis/%E4%BD%9C%E5%9B%BE%E8%A7%84%E8%8C%83?sort_id=3498531)。 - -### 更新/新增图片 - -如果您发现现有文档中的图片需要更新/新增,可点击页面上方的![View Source on Gitee](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_source.png),跳转至源文件。通常图片会存放在./images目录中,只需将需要新增的图片放进这个目录,或将修改后的图片替换原图,并提交PR即可参与贡献。 - -> 请同时把原图一并提交到Gitee,放在与存放图片相同的路径下,方便后续修改。 - -### 图片引用 - -图片引用的格式为:\!\[图片名称](图片所在目录)。详情请参考[Markdown图片引用要求](https://gitee.com/mindspore/docs/wikis/%E6%96%87%E6%A1%A3%E5%86%99%E4%BD%9C%E8%A6%81%E6%B1%82?sort_id=3363974#%E5%9B%BE%E7%89%87)和[Notebook图片引用要求](https://gitee.com/mindspore/docs/wikis/Notebook%E5%86%99%E4%BD%9C%E8%A6%81%E6%B1%82?sort_id=3462614)。 - -### 确认内容 - -PR合入后次日,即可在MindSpore官网中查看到新增内容,更新/新增图片将会出现在文档中。 - -官网各教程和文档默认选中最新发布版本,如需查看新合入的内容,需在下拉列表中切换至master。 - -![master_doc](./resource/_static/master_doc.png) diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64..0000000000000000000000000000000000000000 --- a/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/LICENSE-CC-BY-4.0 b/LICENSE-CC-BY-4.0 deleted file mode 100644 index 2f244ac814036ecd9ba9f69782e89ce6b1dca9eb..0000000000000000000000000000000000000000 --- a/LICENSE-CC-BY-4.0 +++ /dev/null @@ -1,395 +0,0 @@ -Attribution 4.0 International - -======================================================================= - -Creative Commons Corporation ("Creative Commons") is not a law firm and -does not provide legal services or legal advice. Distribution of -Creative Commons public licenses does not create a lawyer-client or -other relationship. Creative Commons makes its licenses and related -information available on an "as-is" basis. Creative Commons gives no -warranties regarding its licenses, any material licensed under their -terms and conditions, or any related information. Creative Commons -disclaims all liability for damages resulting from their use to the -fullest extent possible. - -Using Creative Commons Public Licenses - -Creative Commons public licenses provide a standard set of terms and -conditions that creators and other rights holders may use to share -original works of authorship and other material subject to copyright -and certain other rights specified in the public license below. The -following considerations are for informational purposes only, are not -exhaustive, and do not form part of our licenses. - - Considerations for licensors: Our public licenses are - intended for use by those authorized to give the public - permission to use material in ways otherwise restricted by - copyright and certain other rights. Our licenses are - irrevocable. Licensors should read and understand the terms - and conditions of the license they choose before applying it. - Licensors should also secure all rights necessary before - applying our licenses so that the public can reuse the - material as expected. Licensors should clearly mark any - material not subject to the license. This includes other CC- - licensed material, or material used under an exception or - limitation to copyright. More considerations for licensors: - wiki.creativecommons.org/Considerations_for_licensors - - Considerations for the public: By using one of our public - licenses, a licensor grants the public permission to use the - licensed material under specified terms and conditions. If - the licensor's permission is not necessary for any reason--for - example, because of any applicable exception or limitation to - copyright--then that use is not regulated by the license. Our - licenses grant only permissions under copyright and certain - other rights that a licensor has authority to grant. Use of - the licensed material may still be restricted for other - reasons, including because others have copyright or other - rights in the material. A licensor may make special requests, - such as asking that all changes be marked or described. - Although not required by our licenses, you are encouraged to - respect those requests where reasonable. More_considerations - for the public: - wiki.creativecommons.org/Considerations_for_licensees - -======================================================================= - -Creative Commons Attribution 4.0 International Public License - -By exercising the Licensed Rights (defined below), You accept and agree -to be bound by the terms and conditions of this Creative Commons -Attribution 4.0 International Public License ("Public License"). To the -extent this Public License may be interpreted as a contract, You are -granted the Licensed Rights in consideration of Your acceptance of -these terms and conditions, and the Licensor grants You such rights in -consideration of benefits the Licensor receives from making the -Licensed Material available under these terms and conditions. - - -Section 1 -- Definitions. - - a. Adapted Material means material subject to Copyright and Similar - Rights that is derived from or based upon the Licensed Material - and in which the Licensed Material is translated, altered, - arranged, transformed, or otherwise modified in a manner requiring - permission under the Copyright and Similar Rights held by the - Licensor. For purposes of this Public License, where the Licensed - Material is a musical work, performance, or sound recording, - Adapted Material is always produced where the Licensed Material is - synched in timed relation with a moving image. - - b. Adapter's License means the license You apply to Your Copyright - and Similar Rights in Your contributions to Adapted Material in - accordance with the terms and conditions of this Public License. - - c. Copyright and Similar Rights means copyright and/or similar rights - closely related to copyright including, without limitation, - performance, broadcast, sound recording, and Sui Generis Database - Rights, without regard to how the rights are labeled or - categorized. For purposes of this Public License, the rights - specified in Section 2(b)(1)-(2) are not Copyright and Similar - Rights. - - d. Effective Technological Measures means those measures that, in the - absence of proper authority, may not be circumvented under laws - fulfilling obligations under Article 11 of the WIPO Copyright - Treaty adopted on December 20, 1996, and/or similar international - agreements. - - e. Exceptions and Limitations means fair use, fair dealing, and/or - any other exception or limitation to Copyright and Similar Rights - that applies to Your use of the Licensed Material. - - f. Licensed Material means the artistic or literary work, database, - or other material to which the Licensor applied this Public - License. - - g. Licensed Rights means the rights granted to You subject to the - terms and conditions of this Public License, which are limited to - all Copyright and Similar Rights that apply to Your use of the - Licensed Material and that the Licensor has authority to license. - - h. Licensor means the individual(s) or entity(ies) granting rights - under this Public License. - - i. Share means to provide material to the public by any means or - process that requires permission under the Licensed Rights, such - as reproduction, public display, public performance, distribution, - dissemination, communication, or importation, and to make material - available to the public including in ways that members of the - public may access the material from a place and at a time - individually chosen by them. - - j. Sui Generis Database Rights means rights other than copyright - resulting from Directive 96/9/EC of the European Parliament and of - the Council of 11 March 1996 on the legal protection of databases, - as amended and/or succeeded, as well as other essentially - equivalent rights anywhere in the world. - - k. You means the individual or entity exercising the Licensed Rights - under this Public License. Your has a corresponding meaning. - - -Section 2 -- Scope. - - a. License grant. - - 1. Subject to the terms and conditions of this Public License, - the Licensor hereby grants You a worldwide, royalty-free, - non-sublicensable, non-exclusive, irrevocable license to - exercise the Licensed Rights in the Licensed Material to: - - a. reproduce and Share the Licensed Material, in whole or - in part; and - - b. produce, reproduce, and Share Adapted Material. - - 2. Exceptions and Limitations. For the avoidance of doubt, where - Exceptions and Limitations apply to Your use, this Public - License does not apply, and You do not need to comply with - its terms and conditions. - - 3. Term. The term of this Public License is specified in Section - 6(a). - - 4. Media and formats; technical modifications allowed. The - Licensor authorizes You to exercise the Licensed Rights in - all media and formats whether now known or hereafter created, - and to make technical modifications necessary to do so. The - Licensor waives and/or agrees not to assert any right or - authority to forbid You from making technical modifications - necessary to exercise the Licensed Rights, including - technical modifications necessary to circumvent Effective - Technological Measures. For purposes of this Public License, - simply making modifications authorized by this Section 2(a) - (4) never produces Adapted Material. - - 5. Downstream recipients. - - a. Offer from the Licensor -- Licensed Material. Every - recipient of the Licensed Material automatically - receives an offer from the Licensor to exercise the - Licensed Rights under the terms and conditions of this - Public License. - - b. No downstream restrictions. You may not offer or impose - any additional or different terms or conditions on, or - apply any Effective Technological Measures to, the - Licensed Material if doing so restricts exercise of the - Licensed Rights by any recipient of the Licensed - Material. - - 6. No endorsement. Nothing in this Public License constitutes or - may be construed as permission to assert or imply that You - are, or that Your use of the Licensed Material is, connected - with, or sponsored, endorsed, or granted official status by, - the Licensor or others designated to receive attribution as - provided in Section 3(a)(1)(A)(i). - - b. Other rights. - - 1. Moral rights, such as the right of integrity, are not - licensed under this Public License, nor are publicity, - privacy, and/or other similar personality rights; however, to - the extent possible, the Licensor waives and/or agrees not to - assert any such rights held by the Licensor to the limited - extent necessary to allow You to exercise the Licensed - Rights, but not otherwise. - - 2. Patent and trademark rights are not licensed under this - Public License. - - 3. To the extent possible, the Licensor waives any right to - collect royalties from You for the exercise of the Licensed - Rights, whether directly or through a collecting society - under any voluntary or waivable statutory or compulsory - licensing scheme. In all other cases the Licensor expressly - reserves any right to collect such royalties. - - -Section 3 -- License Conditions. - -Your exercise of the Licensed Rights is expressly made subject to the -following conditions. - - a. Attribution. - - 1. If You Share the Licensed Material (including in modified - form), You must: - - a. retain the following if it is supplied by the Licensor - with the Licensed Material: - - i. identification of the creator(s) of the Licensed - Material and any others designated to receive - attribution, in any reasonable manner requested by - the Licensor (including by pseudonym if - designated); - - ii. a copyright notice; - - iii. a notice that refers to this Public License; - - iv. a notice that refers to the disclaimer of - warranties; - - v. a URI or hyperlink to the Licensed Material to the - extent reasonably practicable; - - b. indicate if You modified the Licensed Material and - retain an indication of any previous modifications; and - - c. indicate the Licensed Material is licensed under this - Public License, and include the text of, or the URI or - hyperlink to, this Public License. - - 2. You may satisfy the conditions in Section 3(a)(1) in any - reasonable manner based on the medium, means, and context in - which You Share the Licensed Material. For example, it may be - reasonable to satisfy the conditions by providing a URI or - hyperlink to a resource that includes the required - information. - - 3. If requested by the Licensor, You must remove any of the - information required by Section 3(a)(1)(A) to the extent - reasonably practicable. - - 4. If You Share Adapted Material You produce, the Adapter's - License You apply must not prevent recipients of the Adapted - Material from complying with this Public License. - - -Section 4 -- Sui Generis Database Rights. - -Where the Licensed Rights include Sui Generis Database Rights that -apply to Your use of the Licensed Material: - - a. for the avoidance of doubt, Section 2(a)(1) grants You the right - to extract, reuse, reproduce, and Share all or a substantial - portion of the contents of the database; - - b. if You include all or a substantial portion of the database - contents in a database in which You have Sui Generis Database - Rights, then the database in which You have Sui Generis Database - Rights (but not its individual contents) is Adapted Material; and - - c. You must comply with the conditions in Section 3(a) if You Share - all or a substantial portion of the contents of the database. - -For the avoidance of doubt, this Section 4 supplements and does not -replace Your obligations under this Public License where the Licensed -Rights include other Copyright and Similar Rights. - - -Section 5 -- Disclaimer of Warranties and Limitation of Liability. - - a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE - EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS - AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF - ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, - IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, - WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR - PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, - ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT - KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT - ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. - - b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE - TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, - NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, - INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, - COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR - USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN - ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR - DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR - IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. - - c. The disclaimer of warranties and limitation of liability provided - above shall be interpreted in a manner that, to the extent - possible, most closely approximates an absolute disclaimer and - waiver of all liability. - - -Section 6 -- Term and Termination. - - a. This Public License applies for the term of the Copyright and - Similar Rights licensed here. However, if You fail to comply with - this Public License, then Your rights under this Public License - terminate automatically. - - b. Where Your right to use the Licensed Material has terminated under - Section 6(a), it reinstates: - - 1. automatically as of the date the violation is cured, provided - it is cured within 30 days of Your discovery of the - violation; or - - 2. upon express reinstatement by the Licensor. - - For the avoidance of doubt, this Section 6(b) does not affect any - right the Licensor may have to seek remedies for Your violations - of this Public License. - - c. For the avoidance of doubt, the Licensor may also offer the - Licensed Material under separate terms or conditions or stop - distributing the Licensed Material at any time; however, doing so - will not terminate this Public License. - - d. Sections 1, 5, 6, 7, and 8 survive termination of this Public - License. - - -Section 7 -- Other Terms and Conditions. - - a. The Licensor shall not be bound by any additional or different - terms or conditions communicated by You unless expressly agreed. - - b. Any arrangements, understandings, or agreements regarding the - Licensed Material not stated herein are separate from and - independent of the terms and conditions of this Public License. - - -Section 8 -- Interpretation. - - a. For the avoidance of doubt, this Public License does not, and - shall not be interpreted to, reduce, limit, restrict, or impose - conditions on any use of the Licensed Material that could lawfully - be made without permission under this Public License. - - b. To the extent possible, if any provision of this Public License is - deemed unenforceable, it shall be automatically reformed to the - minimum extent necessary to make it enforceable. If the provision - cannot be reformed, it shall be severed from this Public License - without affecting the enforceability of the remaining terms and - conditions. - - c. No term or condition of this Public License will be waived and no - failure to comply consented to unless expressly agreed to by the - Licensor. - - d. Nothing in this Public License constitutes or may be interpreted - as a limitation upon, or waiver of, any privileges and immunities - that apply to the Licensor or You, including from the legal - processes of any jurisdiction or authority. - - -======================================================================= - -Creative Commons is not a party to its public -licenses. Notwithstanding, Creative Commons may elect to apply one of -its public licenses to material it publishes and in those instances -will be considered the “Licensor.” The text of the Creative Commons -public licenses is dedicated to the public domain under the CC0 Public -Domain Dedication. Except for the limited purpose of indicating that -material is shared under a Creative Commons public license or as -otherwise permitted by the Creative Commons policies published at -creativecommons.org/policies, Creative Commons does not authorize the -use of the trademark "Creative Commons" or any other trademark or logo -of Creative Commons without its prior written consent including, -without limitation, in connection with any unauthorized modifications -to any of its public licenses or any other arrangements, -understandings, or agreements concerning use of licensed material. For -the avoidance of doubt, this paragraph does not form part of the -public licenses. - -Creative Commons may be contacted at creativecommons.org. diff --git a/NOTICE b/NOTICE deleted file mode 100644 index 6bc85ba4bc34f7cfd1b3a5365e7d8f75a77eb9a6..0000000000000000000000000000000000000000 --- a/NOTICE +++ /dev/null @@ -1,2 +0,0 @@ -MindSpore Document -Copyright 2019-2020 Huawei Technologies Co., Ltd diff --git a/README.md b/README.md deleted file mode 100644 index 13b0b905cbb95d6c692f99a1014f6b4c348753a1..0000000000000000000000000000000000000000 --- a/README.md +++ /dev/null @@ -1,90 +0,0 @@ -# MindSpore Documents - -![MindSpore Logo](resource/MindSpore-logo.png) - -[查看中文](./README_CN.md) - -## Overview - -This project provides the source files of the installation guide, tutorials, and other documents, as well as API configurations on the MindSpore official website . - -## Contributions - -You are welcome to contribute documents. If you want to contribute documents, read the [CONTRIBUTING_DOC.md](./CONTRIBUTING_DOC.md). Please comply with the document writing specifications, and submit documents according to the process rules. After the documents are approved, the changes will be displayed in the document project and on the official website. - -If you have any comments or suggestions on the documents, submit them in Issues. - -## Directory Structure Description - -```text -docs -├───activity // Record the activity experience. -| -├───docs // Technical documents about architecture, network list, operator list, programming guide and so on. Configuration files for API generation. -| | -| ├───api_cpp // C++ API project. -| | -| ├───api_java // Java API project. -| | -| ├───api_python // Python API project. -| | -| ├───faq // Frequently asked questions. -| | -| ├───note // Design and specification. -| | -| └───programming_guide // Programming guide documents. -│ -├───install // Installation guide. -│ -├───lite // Summary of all documents related to mindspore lite and their links. -│ -├───resource // Resource-related documents. -│ -├───tools // Automation tools. -│ -├───tutorials // Tutorial-related documents. -| | -| ├───inference // Inference-related documents. -| | -| ├───lite // MindSpore Lite related documents. -| | -| ├───notebook // Experiential documents. -| | -| ├───training // Training-related documents. -| | -| ├───tutorial_code // Sample code corresponding to the tutorial. -│ -└───README.md // Docs repository description. -``` - -## Document Construction - -MindSpore tutorials and API documents can be generated by [Sphinx](https://www.sphinx-doc.org/en/master/). The following uses the Python API document as an example to describe the procedure, and ensure that MindSpore, MindSpore Hub and MindArmour have been installed. - -1. Download code of the MindSpore Docs repository. - - ```shell - git clone https://gitee.com/mindspore/docs.git - ``` - -2. Go to the api_python directory and install the dependency items in the `requirements.txt` file. - - ```shell - cd docs/api_python - pip install -r requirements.txt - ``` - - > [pandoc](https://pandoc.org/) needs to be installed when generating [Training Tutorial](https://gitee.com/mindspore/docs/tree/master/tutorials/training) and [Programming Guide](https://gitee.com/mindspore/docs/tree/master/docs/programming_guide), please refer to for pandoc download and installation. - -3. Run the following command in the api_python directory to create the `build_zh_cn/html` directory that stores the generated document web page. You can open `build_zh_cn/html/index.html` to view the API document. - - ```shell - make html - ``` - - > If you only need to generate the MindSpore API, please modify the `source_zh_cn/conf.py` file, comment the `import mindspore_hub` and `import mindarmour` statements, and then perform this step. - -## License - -- [Apache License 2.0](LICENSE) -- [Creative Commons License version 4.0](LICENSE-CC-BY-4.0) diff --git a/README_CN.md b/README_CN.md deleted file mode 100644 index 1aa65c5d1fdc765dbf3b21643bb6d0788342233b..0000000000000000000000000000000000000000 --- a/README_CN.md +++ /dev/null @@ -1,90 +0,0 @@ -# MindSpore文档 - -![MindSpore Logo](resource/MindSpore-logo.png) - -[View English](./README.md) - -## 简介 - -此工程提供MindSpore官方网站所呈现安装指南、教程、文档的源文件以及API的相关配置。 - -## 贡献 - -我们非常欢迎您贡献文档!如果想要参与,请阅读[CONTRIBUTING_DOC_CN.md](./CONTRIBUTING_DOC_CN.md),务必遵守文档写作规范,并按照流程规则提交,审核通过后,改动会在文档工程和官网中呈现。 - -同时,如果您对文档有任何意见或建议,请在Issues中提交。 - -## 目录结构说明 - -```text -docs -├───activity // 活动体验记录 -| -├───docs // 架构、网络和算子支持、编程指南等技术文档以及用于生成API的相关配置文件 -| | -| ├───api_cpp // C++ API工程 -| | -| ├───api_java // Java API工程 -| | -| ├───api_python // Python API工程 -| | -| ├───faq // 常见问题 -| | -| ├───note // 设计和规格 -| | -| └───programming_guide // 编程指南相关文档 -│ -├───install // 安装指南 -│ -├───lite // MindSpore Lite相关所有文档汇总及其链接 -│ -├───resource // 资源相关文档 -│ -├───tools // 自动化工具 -│ -├───tutorials // 教程相关文档 -| | -| ├───inference // 推理教程相关文档 -| | -| ├───lite // 手机及IoT教程相关文档 -| | -| ├───notebook // 体验式文档 -| | -| ├───training // 训练教程相关文档 -| | -| ├───tutorial_code // 教程对应样例代码 -│ -└───README_CN.md // Docs仓说明 -``` - -## 文档构建 - -MindSpore的教程和API文档均可由[Sphinx](https://www.sphinx-doc.org/en/master/)工具生成。下面以Python API文档为例介绍具体步骤,操作前需完成MindSpore、MindSpore Hub和MindArmour的安装。 - -1. 下载MindSpore Docs仓代码。 - - ```shell - git clone https://gitee.com/mindspore/docs.git - ``` - -2. 进入api_python目录,安装该目录下`requirements.txt`文件中的依赖项。 - - ```shell - cd docs/api_python - pip install -r requirements.txt - ``` - - > 生成[训练教程](https://gitee.com/mindspore/docs/tree/master/tutorials/training)和[编程指南](https://gitee.com/mindspore/docs/tree/master/docs/programming_guide)时还需安装[pandoc](https://pandoc.org/),下载和安装pandoc请参考。 - -3. 在api_python目录下执行如下命令,完成后会新建`build_zh_cn/html`目录,该目录中存放了生成后的文档网页,打开`build_zh_cn/html/index.html`即可查看API文档内容。 - - ```shell - make html - ``` - - > 如仅需生成MindSpore API,请先修改`source_zh_cn/conf.py`文件,注释`import mindspore_hub`和`import mindarmour`语句后,再执行此步骤。 - -## 版权 - -- [Apache License 2.0](LICENSE) -- [Creative Commons License version 4.0](LICENSE-CC-BY-4.0) diff --git a/activity/ActivityRecord.md b/activity/ActivityRecord.md deleted file mode 100644 index c8fe12d649bc53d1d91e4fd859296323d35b58e4..0000000000000000000000000000000000000000 --- a/activity/ActivityRecord.md +++ /dev/null @@ -1,33 +0,0 @@ -# MindSpore活动体验 - -MindSpore团队将持续推出体验项目,以便于用户的快速入门和进阶。本表仅用于记录MindSpore活动体验,用户在项目学习完成后,从这里分享体验,踩坑避雷或是记录打卡。我们将第一时间合入您的pr,并展现给所有人。 - -
- - 感谢您的体验!期待您的贡献! - -
- -目前上线的体验项目如下: - -[《1小时入门AI开发工程师》](https://www.mindspore.cn/news/newschildren?id=354) - -
- ->以防冲突,请尽量选择不同序号填写内容,并在提交PR后关注评论信息。 - -## MindSpore活动体验记录表 - -| 序号 | 体验项目 | 参与链接 | 体验意见 | 体验者 | 联系方式 | 完成时间 | -| :-----------: | :-----------: | :-----------: | :-----------: | :---------------: | :----------------: | :------------: | -| 示例 | 1小时入门AI开发工程师 | https://bbs.huaweicloud.com/forum/thread-103016-1-1.html | 活动简单易懂,对新手小白特别友好,除去网络因素,真正做到了1小时即可入门!| 琦玉 | 1261354409@qq.com | 2021-01-21| -| 1 | 1小时入门AI开发工程师 | https://bbs.huaweicloud.com/forum/thread-103124-1-1.html | 1、需按照文档安装python,如果cmd查看版本没问题,无需再配置环境变量2、安装MindSpore可能会报错,原因是网络问题,建议全程网络好一点3、将训练好的模型部署到手机,app安装文件夹可以通过手机的内存设备按时间排序快速查找4、整个过程没有太大问题,最终测试成功还是蛮开心的| 张晓波 | 1920969038@qq.com | 2021-01-22| -| 2 | 1小时入门AI开发工程师 | https://bbs.huaweicloud.com/forum/thread-103091-1-1.html | 千万记得把python更新到3.7.5,刚开始忘记了,后来一直报错。还有就是用流量下载pip的内容,超大坑!!| 刘佩 | 547278563@qq.com | 2021-01-25| -| 3 | 1小时入门AI开发工程师 | https://bbs.huaweicloud.com/forum/thread-103090-1-1.html | 文档很详细!| DevFeng | 523339263@qq.com | 2021-01-25| -| 4 | 1小时入门AI开发工程师 | https://bbs.huaweicloud.com/forum/thread-103162-1-1.html | 根据文档可以顺利完成,Python版本不一致可配置双版本环境,不必卸载旧版,只需要把原来的版本python.exe重命名即可;提示pip版本可升级时忽略即可,不需要update| 黄雅杰 | 344399145@qq.com | 2021-01-25| -| 5 | 1小时入门AI开发工程师 | https://bbs.huaweicloud.com/forum/thread-103385-1-1.html |活动教程整体还是很容易理解才很好操作,教程的平台采用Windows也照顾到了很多初学者可能不怎么会用Linux。教程中如果能加上提醒Linux环境的用户可以在python train.py后面加上--dataset_path指定一下数据目录的话就完美很多了(不然的话会报路径错误)当然更完美一点的话可以写一下linux下如何编译convert(可以是一个链接),不然的话还需要找个windows平台进行转换模型.总体来说通过猫狗识别实现1小时入门AI这个教程还是蛮有意思的。 | 丁一超 | JeffDing890430@163.com | 2021-01-24 | -| 6 | 1小时入门AI开发工程师 | https://bbs.huaweicloud.com/forum/thread-103213-1-1.html | 1小时即可入门,简单便捷,感谢版主| 多米诺的古牌 | 845053993@qq.com | 2021-01-25| -| 7 | 1小时入门AI开发工程师 | https://bbs.huaweicloud.com/forum/thread-103123-1-1.html | 学练结合的方式很好,学到了很多东西,超赞,而且很方便!| 贾淳 | chunjcsx20@vip.qq.com | 2021-01-22| -| 8 | 1小时入门AI开发工程师 | https://bbs.huaweicloud.com/forum/thread-103095-1-1.html | 文档很好,非常给力。 | 刘炳成 | xjtlbc@163.com | 2021-01-22 | -| 9 |1小时入门AI开发工程师 | https://bbs.huaweicloud.com/forum/thread-103299-1-1.html | 体验很有趣,可以把AI模型部署到手机上,让AI走进生活中,给自己的猫猫进行识别!而且是用华为自己的MindSpore来做的,MindSpore加油!| JinhengZhang | 931136898@qq.com | 2021-01-23| -| 10 |1小时入门AI开发工程师 |https://bbs.huaweicloud.com/forum/thread-103198-1-1.html | 受限于硬件环境和创作条件,没能 1 小时体验完毕,除去其他干扰,可能半小时能完成,当然知识点很丰富,值得深入学习!| 胡琦 | huqi@gpdi.com | 2021-01-23| \ No newline at end of file diff --git a/activity/sample_code/README.md b/activity/sample_code/README.md deleted file mode 100644 index 3a5ff208bcdeba94ab847d80d40a296513736446..0000000000000000000000000000000000000000 --- a/activity/sample_code/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# Title -- For example, xxx dataset conversion - -## Installing Third-party Dependencies - -pip install PIL -pip install pandas -... - -## Dataset Source - -Dataset link: - -## Dataset placement - -```text -└─XXX Datasets - ├─test - │ xxx - │ xxx - │ - └─train - xxx - xxx -``` - -## Running Mode - -python xxx.py \ No newline at end of file diff --git a/docs/api_cpp/Makefile b/docs/api_cpp/Makefile deleted file mode 100644 index 1eff8952707bdfa503c8d60c1e9a903053170ba2..0000000000000000000000000000000000000000 --- a/docs/api_cpp/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = source_zh_cn -BUILDDIR = build_zh_cn - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/api_cpp/requirements.txt b/docs/api_cpp/requirements.txt deleted file mode 100644 index 162b50040286bb9a0177801c580a31013082a360..0000000000000000000000000000000000000000 --- a/docs/api_cpp/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -sphinx >= 2.2.1, <= 2.4.4 -recommonmark -sphinx-markdown-tables -sphinx_rtd_theme -numpy -jieba diff --git a/docs/api_cpp/source_en/_static/logo_notebook.png b/docs/api_cpp/source_en/_static/logo_notebook.png deleted file mode 100644 index 18c2e29e4b73ee428f70253feffdd855fdf0c422..0000000000000000000000000000000000000000 Binary files a/docs/api_cpp/source_en/_static/logo_notebook.png and /dev/null differ diff --git a/docs/api_cpp/source_en/_static/logo_source.png b/docs/api_cpp/source_en/_static/logo_source.png deleted file mode 100644 index 880f2bc87172daf487654c0ba4f1657c672bd2b8..0000000000000000000000000000000000000000 Binary files a/docs/api_cpp/source_en/_static/logo_source.png and /dev/null differ diff --git a/docs/api_cpp/source_en/class_list.md b/docs/api_cpp/source_en/class_list.md deleted file mode 100644 index 4fde28edf1124220f3e570dff46a1ab4588bb5d5..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_en/class_list.md +++ /dev/null @@ -1,27 +0,0 @@ -# Class List - - - -Here is a list of all classes with links to the namespace documentation for each member in MindSpore Lite: - -| Namespace | Class Name | Description | -| --- | --- | --- | -| mindspore | [KernelCallBack](https://www.mindspore.cn/doc/api_cpp/en/master/mindspore.html#kernelcallback) | KernelCallBack defines the function pointer for callback. | -| mindspore::lite | [Allocator](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#allocator) | Allocator defines a memory pool for dynamic memory malloc and memory free. | -| mindspore::lite | [Context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#context) | Context is defined for holding environment variables during runtime. | -| mindspore::lite | [ModelImpl](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#modelimpl) | ModelImpl defines the implement class of Model in MindSpore Lite. | -| mindspore::lite | [PrimitiveC](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#primitivec) | Primitive is defined as prototype of operator. | -| mindspore::lite | [Model](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#model) | Model defines model in MindSpore Lite for managing graph. | -| mindspore::lite | [ModelBuilder](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#modelbuilder) | ModelBuilder is defined to build the model. | -| mindspore::session | [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) | LiteSession defines sessions in MindSpore Lite for compiling Model and forwarding model. | -| mindspore::tensor | [MSTensor](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mstensor) | MSTensor defines tensor in MindSpore Lite. | -| mindspore::dataset | [LiteMat](https://www.mindspore.cn/doc/api_cpp/en/master/dataset.html#litemat) |LiteMat is a class used to process images. | - -The definitions and namespaces of classes in MindSpore are as follows: - -| Namespace | Class Name | Description | -| --------- | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------- | -| mindspore | [Context](https://www.mindspore.cn/doc/api_cpp/en/master/mindspore.html#context) | The Context class is used to store environment variables during execution. | -| mindspore | [Serialization](https://www.mindspore.cn/doc/api_cpp/en/master/mindspore.html#serialization) | The Serialization class is used to summarize methods for reading and writing model files. | -| mindspore | [Model](https://www.mindspore.cn/doc/api_cpp/en/master/mindspore.html#model) | The Model class is used to define a MindSpore model, facilitating computational graph management. | -| mindspore | [MSTensor](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mstensor) | The MSTensor class defines a tensor in MindSpore. | diff --git a/docs/api_cpp/source_en/conf.py b/docs/api_cpp/source_en/conf.py deleted file mode 100644 index 4787de3f631f53db97bad94ffb7c95441edf0bb7..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_en/conf.py +++ /dev/null @@ -1,60 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -# import sys -# sys.path.append('..') -# sys.path.insert(0, os.path.abspath('.')) - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx_markdown_tables', - 'recommonmark', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_static_path = ['_static'] \ No newline at end of file diff --git a/docs/api_cpp/source_en/dataset.md b/docs/api_cpp/source_en/dataset.md deleted file mode 100644 index b8ae7a6d2ee12c90049fd6638defcd839b74ad09..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_en/dataset.md +++ /dev/null @@ -1,578 +0,0 @@ -# mindspore::dataset - - - -## Execute - -\#include <[execute.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/execute.h)> - -```cpp -// shared_ptr -Execute::Execute(std::shared_ptr op, MapTargetDevice deviceType, uint32_t device_id); -Execute::Execute(std::vector> ops, MapTargetDevice deviceType, uint32_t device_id); - -// normal pointer -Execute::Execute(std::reference_wrapper op, MapTargetDevice deviceType, uint32_t device_id); -Execute::Execute(std::vector> ops, MapTargetDevice deviceType, uint32_t device_id); - -// reference_wrapper -Execute::Execute(TensorTransform *op, MapTargetDevice deviceType, uint32_t device_id); -Execute::Execute(std::vector ops, MapTargetDevice deviceType, uint32_t device_id); -``` - -Transform(image, text)Transform operators in eager mode executor class. Multiple constructors are supported,include shared_ptr, normal pointer and reference_wrapper. - -- Parameters - - - `op`: Single transform operator. - - `ops`: A list of transform operators. - - `deviceType`: Runtime hardware. Options are: CPU, GPU and Ascend310. - - `device_id`: Device id of runtime hardware, only valid when `deviceType = MapTargetDevice::kAscend310`. - -```cpp -Status operator()(const mindspore::MSTensor &input, mindspore::MSTensor *output); -``` - -Eager mode execution API. - -- Parameters - - - `input`: Tensor before transformations. - - `output`: Tensor after transformations. - -- Returns - - Status code which indicate the execution result. - -```cpp -std::string Execute::AippCfgGenerator() -``` - -Aipp module config file generator, Aipp module binds with Dvpp module. This API takes effects on case `deviceType = kAscend310`, generates Aipp config file according to the parameters of operators defined in data pre-processing. - -- Parameters - - None. - -- Returns - - Return a string indicates the system path of Aipp config file. - -## Dvpp Module - -Dvpp module is a hardware decoder embedded in Ascend 310 AI chip which has a better performance on image processing compare with CPU operators. Several transforms applied on JPEG format image are supported. - -- If let `deviceType = kAscend310` when constructing `execute` object, Dvpp operators will be applied during runtime. -- Dvpp module supporting transforms list: `Decode(), Resize(), CenterCrop(), Normalize()`. -- The above Dvpp operator and the CPU operator of the same function share a unified API, which is only distinguished by `deviceType`. - -Example: - -```cpp -// Define dvpp transforms -std::vector crop_paras = {224, 224}; - -std::vector resize_paras = {256}; - -std::vector mean = {0.485 * 255, 0.456 * 255, 0.406 * 255}; - -std::vector std = {0.229 * 255, 0.224 * 255, 0.225 * 255}; - -std::shared_ptr decode(new vision::Decode()); - -std::shared_ptr resize(new vision::Resize(resize_paras)); - -std::shared_ptr centercrop(new vision::CenterCrop(crop_paras)); - -std::shared_ptr normalize(new vision::Normalize(mean, std)); - -std::vector> trans_list = {decode, resize, centercrop, normalize}; -mindspore::dataset::Execute Transform(trans_list, MapTargetDevice::kAscend310); -``` - -## ResizeBilinear - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool ResizeBilinear(LiteMat &src, LiteMat &dst, int dst_w, int dst_h) -``` - -Resize image by bilinear algorithm, currently the data type only supports uint8, the channel only supports 3 and 1. - -- Parameters - - - `src`: Input image data. - - `dst`: Output image data. - - `dst_w`: The width of the output image data. - - `dst_h`: The height of the output image data. - -- Returns - - Return true if the execution is successful, otherwise return false if the condition is not met. - -## InitFromPixel - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool InitFromPixel(const unsigned char *data, LPixelType pixel_type, LDataType data_type, int w, int h, LiteMat &m) -``` - -Initialize LiteMat from pixel, providing data in RGB or BGR format does not need to be converted. Currently the conversion supports RGB_TO_BGR, RGBA_To_RGB, RGBA_To_BGR, NV21_To_BGR and NV12_To_BGR. - -- Parameters - - - `data`: Input data. - - `pixel_type`: The type of pixel. - - `data_type`: The type of data. - - `w`: The width of the output data. - - `h`: The height of the output data. - - `mat`: Used to store image data. - -- Returns - - Return true if the initialization is successful, otherwise return false. - -## ConvertTo - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool ConvertTo(LiteMat &src, LiteMat &dst, double scale = 1.0) -``` - -Convert the data type, currently it supports converting the data type from uint8 to float. - -- Parameters - - - `src`: Input image data. - - `dst`: Output image data. - - `scale`: Scale pixel values (default: 1.0). - -- Returns - - Return true if the data type is converted successfully, otherwise return false. - -## Crop - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool Crop(LiteMat &src, LiteMat &dst, int x, int y, int w, int h) -``` - -Crop image, the channel supports 3 and 1. - -- Parameters - - - `src`: Input image data. - - `dst`: Output image data. - - `x`: The x coordinate value of the starting point of the screenshot. - - `y`: The y coordinate value of the starting point of the screenshot. - - `w`: The width of the screenshot. - - `h`: The height of the screenshot. - -- Returns - - Return true if the image is cropped successfully, otherwise return false. - -## SubStractMeanNormalize - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool SubStractMeanNormalize(const LiteMat &src, LiteMat &dst, const std::vector &mean, const std::vector &std) -``` - -Normalize image, currently the supports data type is float. - -- Parameters - - - `src`: Input image data. - - `dst`: Output image data. - - `mean`: Mean of the data set. - - `std`: Norm of the data set. - -- Returns - - Return true if the normalization is successful, otherwise return false. - -## Pad - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool Pad(const LiteMat &src, LiteMat &dst, int top, int bottom, int left, int right, PaddBorderType pad_type, uint8_t fill_b_or_gray, uint8_t fill_g, uint8_t fill_r) -``` - -Pad image, the channel supports 3 and 1. - -- Parameters - - - `src`: Input image data. - - `dst`: Output image data. - - `top`: The length of top. - - `bottom`: The length of bottom. - - `left`: The length of left. - - `right`: The length of right. - - `pad_type`: The type of pad. - - `fill_b_or_gray`: B or GRAY. - - `fill_g`: G. - - `fill_r`: R. - -- Returns - - Return true if the image is filled successfully, otherwise return false. - -## ExtractChannel - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool ExtractChannel(const LiteMat &src, LiteMat &dst, int col) -``` - -Extract image channel by index. - -- Parameters - - - `src`: Input image data. - - `col`: The serial number of the channel. - -- Returns - - Return true if the image channel is extracted successfully, otherwise return false. - -## Split - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool Split(const LiteMat &src, std::vector &mv) -``` - -Split image channels to single channel. - -- Parameters - - - `src`: Input image data. - - `mv`: Single channel data. - -- Returns - - Return true if the image channel is split successfully, otherwise return false. - -## Merge - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool Merge(const std::vector &mv, LiteMat &dst) -``` - -Create a multi-channel image out of several single-channel arrays. - -- Parameters - - - `mv`: Single channel data. - - `dst`: Output image data. - -- Returns - - Return true if the multi-channel image is created successfully, otherwise returns false. - -## Affine - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -void Affine(LiteMat &src, LiteMat &out_img, double M[6], std::vector dsize, UINT8_C1 borderValue) -``` - -Apply affine transformation to the 1-channel image. - -- Parameters - - - `src`: Input image data. - - `out_img`: Output image data. - - `M[6]`: Affine transformation matrix. - - `dsize`: The size of the output image. - - `borderValue`: The pixel value is used for filing after the image is captured. - -```cpp -void Affine(LiteMat &src, LiteMat &out_img, double M[6], std::vector dsize, UINT8_C3 borderValue) -``` - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -Apply affine transformation to the 3-channel image. - -- Parameters - - - `src`: Input image data. - - `out_img`: Output image data. - - `M[6]`: Affine transformation matrix. - - `dsize`: The size of the output image. - - `borderValue`: The pixel value is used for filing after the image is captured. - -## GetDefaultBoxes - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -std::vector> GetDefaultBoxes(BoxesConfig config) -``` - -Get default anchor boxes for Faster R-CNN, SSD, YOLO, etc. - -- Parameters - - - `config`: Objects of BoxesConfig structure. - -- Returns - - Return the default boxes. - -## ConvertBoxes - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -void ConvertBoxes(std::vector> &boxes, std::vector> &default_boxes, BoxesConfig config) -``` - -Convert the prediction boxes to the actual boxes with (y, x, h, w). - -- Parameters - - - `boxes`: Actual size box. - - `default_boxes`: Default box. - - `config`: Objects of BoxesConfig structure. - -## ApplyNms - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -std::vector ApplyNms(std::vector> &all_boxes, std::vector &all_scores, float thres, int max_boxes) -``` - -Real-size box non-maximum suppression. - -- Parameters - - - `all_boxes`: All input boxes. - - `all_scores`: Score after all boxes are executed through the network. - - `thres`: Pre-value of IOU. - - `max_boxes`: Maximum value of output box. - -- Returns - - Return the id of the boxes. - -## LiteMat - -\#include <[lite_mat.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.h)> - -LiteMat is a class that processes images. - -### Constructors & Destructors - -#### LiteMat - -```cpp -LiteMat() - -LiteMat(int width, LDataType data_type = LDataType::UINT8) - -LiteMat(int width, int height, LDataType data_type = LDataType::UINT8) - -LiteMat(int width, int height, int channel, LDataType data_type = LDataType::UINT8) -``` - -Constructor of MindSpore dataset LiteMat using default value of parameters. - -#### ~LiteMat - -```cpp -~LiteMat() -``` - -Destructor of MindSpore dataset LiteMat. - -### Public Member Functions - -#### Init - -```cpp -void Init(int width, LDataType data_type = LDataType::UINT8) - -void Init(int width, int height, LDataType data_type = LDataType::UINT8) - -void Init(int width, int height, int channel, LDataType data_type = LDataType::UINT8) -``` - -The function to initialize the channel, width and height of the image, but the parameters are different. - -#### IsEmpty - -```cpp -bool IsEmpty() const -``` - -A function to determine whether the object is empty. - -- Returns - - Return true or false. - -#### Release - -```cpp -void Release() -``` - -A function to release memory. - -### Public Attributes - -#### data_ptr_ - -```cpp -data_ptr_ -``` - -A **pointer** to the address of the image. - -#### elem_size_ - -```cpp -elem_size_ -``` - -An **int** value. Bytes of the element. - -#### width_ - -```cpp -width_ -``` - -An **int** value. The width of the image. - -#### height_ - -```cpp -height_ -``` - -An **int** value. The height of the image. - -#### channel_ - -```cpp -channel_ -``` - -An **int** value. The number of channels of the image. - -#### c_step_ - -```cpp -c_step_ -``` - -An **int** value. The product of width and height after alignment. - -#### dims_ - -```cpp -dims_ -``` - -An **int** value. The dimensions of the image. - -#### size_ - -```cpp -size_ -``` - -The memory size of the image. - -#### data_type_ - -```cpp -data_type_ -``` - -The data type of the image. - -#### ref_count_ - -```cpp -ref_count_ -``` - -A **pointer** to the address of the reference counter. - -## Subtract - -\#include <[lite_mat.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.h)> - -```cpp -bool Subtract(const LiteMat &src_a, const LiteMat &src_b, LiteMat *dst) -``` - -Calculate the element difference between two images. - -- Parameters - - - `src_a`: Input image_a data. - - `src_b`: Input image_b data. - - `dst`: Output image data. - -- Returns - - Return true if the calculation satisfies the conditions, otherwise return false. - -## Divide - -\#include <[lite_mat.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.h)> - -```cpp -bool Divide(const LiteMat &src_a, const LiteMat &src_b, LiteMat *dst) -``` - -Calculates the division between the two images for each element. - -- Parameters - - - `src_a`: Input image_a data. - - `src_b`: Input image_b data. - - `dst`: Output image data. - -- Returns - - Return true if the calculation satisfies the conditions, otherwise return false. - -## Multiply - -\#include <[lite_mat.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.h)> - -```cpp -bool Multiply(const LiteMat &src_a, const LiteMat &src_b, LiteMat *dst) -``` - -Calculates the multiply between the two images for each element. - -- Parameters - - - `src_a`: Input image_a data. - - `src_b`: Input image_b data. - - `dst`: Output image data. - -- Returns - - Return true if the calculation satisfies the conditions, otherwise return false. diff --git a/docs/api_cpp/source_en/errorcode_and_metatype.md b/docs/api_cpp/source_en/errorcode_and_metatype.md deleted file mode 100644 index f52859d4fd0790a9c488ec81ad5eba8f3bda918d..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_en/errorcode_and_metatype.md +++ /dev/null @@ -1,141 +0,0 @@ -# ErrorCode and MetaType - - - -## 1.0.1 - -Description of error code and meta type supported in MindSpore Lite. - -### ErrorCode - -| Definition | Value | Description | -| --- | --- | --- | -| RET_OK | 0 | No error occurs. | -| RET_ERROR | -1 | Common error code. | -| RET_NULL_PTR | -2 | NULL pointer returned. | -| RET_PARAM_INVALID | -3 | Invalid parameter. | -| RET_NO_CHANGE | -4 | No change. | -| RET_SUCCESS_EXIT | -5 | No error but exit. | -| RET_MEMORY_FAILED | -6 | Fail to create memory. | -| RET_NOT_SUPPORT | -7 | Fail to support. | -| RET_OUT_OF_TENSOR_RANGE | -100 | Failed to check range. | -| RET_INPUT_TENSOR_ERROR | -101 | Failed to check input tensor. | -| RET_REENTRANT_ERROR | -102 | Exist executor running. | -| RET_GRAPH_FILE_ERR | -200 | Failed to verify graph file. | -| RET_NOT_FIND_OP | -300 | Failed to find operator. | -| RET_INVALID_OP_NAME | -301 | Invalid operator name. | -| RET_INVALID_OP_ATTR | -302 | Invalid operator attr. | -| RET_OP_EXECUTE_FAILURE | -303 | Failed to execute the operator. | -| RET_FORMAT_ERR | -400 | Failed to check the tensor format. | -| RET_INFER_ERR | -500 | Failed to infer shape. | -| RET_INFER_INVALID | -501 | Invalid infer shape before runtime. | -| RET_INPUT_PARAM_INVALID | -600 | Invalid input param by user. | - -### MetaType - -An **enum** type. - -| Type Definition | Value | Description | -| --- | --- | --- | -|kNumberTypeBegin| 29 | The beginning of number type. | -|kNumberTypeBool| 30 | Indicating a data type of bool. | -|kNumberTypeInt| 31 | Indicating a data type of int. | -|kNumberTypeInt8| 32 | Indicating a data type of int8. | -|kNumberTypeInt16| 33 | Indicating a data type of int16. | -|kNumberTypeInt32| 34 | Indicating a data type of int32. | -|kNumberTypeInt64| 35 | Indicating a data type of int64. | -|kNumberTypeUInt| 36 | Indicating a data type of unit. | -|kNumberTypeUInt8| 37 | Indicating a data type of unit8. | -|kNumberTypeUInt16| 38 | Indicating a data type of uint16. | -|kNumberTypeUInt32| 39 | Indicating a data type of uint32. | -|kNumberTypeUInt64| 40 | Indicating a data type of uint64. | -|kNumberTypeFloat| 41 | Indicating a data type of float. | -|kNumberTypeFloat16| 42 | Indicating a data type of float16. | -|kNumberTypeFloat32| 43 | Indicating a data type of float32. | -|kNumberTypeFloat64| 44 | Indicating a data type of float64.| -|kNumberTypeEnd| 45 | The end of number type. | - -### Function - -```cpp -std::string GetErrorInfo(STATUS error_code) -``` - -Function to obtain description of errorcode. - -- Parameters - - - `error_code`: Define which errorcode info to obtain. - -- Returns - - String of errorcode info. - -## master - -Description of error code and meta type supported in MindSpore Lite. - -### ErrorCode - -| Definition | Value | Description | -| --- | --- | --- | -| RET_OK | 0 | No error occurs. | -| RET_ERROR | -1 | Common error code. | -| RET_NULL_PTR | -2 | NULL pointer returned. | -| RET_PARAM_INVALID | -3 | Invalid parameter. | -| RET_NO_CHANGE | -4 | No change. | -| RET_SUCCESS_EXIT | -5 | No error but exit. | -| RET_MEMORY_FAILED | -6 | Fail to create memory. | -| RET_NOT_SUPPORT | -7 | Fail to support. | -| RET_OUT_OF_TENSOR_RANGE | -100 | Failed to check range. | -| RET_INPUT_TENSOR_ERROR | -101 | Failed to check input tensor. | -| RET_REENTRANT_ERROR | -102 | Exist executor running. | -| RET_GRAPH_FILE_ERR | -200 | Failed to verify graph file. | -| RET_NOT_FIND_OP | -300 | Failed to find operator. | -| RET_INVALID_OP_NAME | -301 | Invalid operator name. | -| RET_INVALID_OP_ATTR | -302 | Invalid operator attr. | -| RET_OP_EXECUTE_FAILURE | -303 | Failed to execute the operator. | -| RET_FORMAT_ERR | -400 | Failed to check the tensor format. | -| RET_INFER_ERR | -500 | Failed to infer shape. | -| RET_INFER_INVALID | -501 | Invalid infer shape before runtime. | -| RET_INPUT_PARAM_INVALID | -600 | Invalid input param by user. | - -### MetaType - -An **enum** type. - -| Type Definition | Value | Description | -| --- | --- | --- | -|kNumberTypeBegin| 29 | The beginning of number type. | -|kNumberTypeBool| 30 | Indicating a data type of bool. | -|kNumberTypeInt| 31 | Indicating a data type of int. | -|kNumberTypeInt8| 32 | Indicating a data type of int8. | -|kNumberTypeInt16| 33 | Indicating a data type of int16. | -|kNumberTypeInt32| 34 | Indicating a data type of int32. | -|kNumberTypeInt64| 35 | Indicating a data type of int64. | -|kNumberTypeUInt| 36 | Indicating a data type of unit. | -|kNumberTypeUInt8| 37 | Indicating a data type of unit8. | -|kNumberTypeUInt16| 38 | Indicating a data type of uint16. | -|kNumberTypeUInt32| 39 | Indicating a data type of uint32. | -|kNumberTypeUInt64| 40 | Indicating a data type of uint64. | -|kNumberTypeFloat| 41 | Indicating a data type of float. | -|kNumberTypeFloat16| 42 | Indicating a data type of float16. | -|kNumberTypeFloat32| 43 | Indicating a data type of float32. | -|kNumberTypeFloat64| 44 | Indicating a data type of float64.| -|kNumberTypeEnd| 45 | The end of number type. | - -### Function - -```cpp -std::string GetErrorInfo(STATUS error_code) -``` - -Function to obtain description of errorcode. - -- Parameters - - - `error_code`: Define which errorcode info to obtain. - -- Returns - - String of errorcode info. diff --git a/docs/api_cpp/source_en/index.rst b/docs/api_cpp/source_en/index.rst deleted file mode 100644 index 779317bee1f0397ac1c5a78905b31b236f33d4f8..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_en/index.rst +++ /dev/null @@ -1,21 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 10:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore C++ API -======================= - -.. toctree:: - :glob: - :maxdepth: 1 - - class_list - mindspore - dataset - vision - lite - session - tensor - errorcode_and_metatype - lite_cpp_example \ No newline at end of file diff --git a/docs/api_cpp/source_en/lite.md b/docs/api_cpp/source_en/lite.md deleted file mode 100644 index 8f2d8f9ad9376ff3c405239f64e8c522100c385e..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_en/lite.md +++ /dev/null @@ -1,419 +0,0 @@ -# mindspore::lite - - - -## Allocator - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -Allocator defines a memory pool for dynamic memory malloc and memory free. - -## Context - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -Context is defined for holding environment variables during runtime. - -### Constructors & Destructors - -#### Context - -```cpp -Context() -``` - -Constructor of MindSpore Lite Context using default value for parameters. - -#### ~Context - -```cpp -~Context() -``` - -Destructor of MindSpore Lite Context. - -### Public Attributes - -#### vendor_name_ - -```cpp -vendor_name_ -``` - -A **string** value. Describes the vendor information. This attribute is used to distinguish from different vendors. - -#### thread_num_ - -```cpp -thread_num_ -``` - -An **int** value. Defaults to **2**. Thread number config for thread pool. - -#### allocator - -```cpp -allocator -``` - -A **pointer** pointing to [**Allocator**](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#allocator). - -#### device_list_ - -```cpp -device_list_ -``` - -A [**DeviceContextVector**](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#devicecontextvector) contains [**DeviceContext**](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#devicecontext) variables. - -> CPU, GPU and NPU are supported now. If GPU device context is set and GPU is supported in the current device, use GPU device first, otherwise use CPU device first. If NPU device context is set and GPU is supported in the current device, use NPU device first, otherwise use CPU device first. - -## PrimitiveC - -\#include <[model.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/model.h)> - -Primitive is defined as prototype of operator. - -## Model - -\#include <[model.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/model.h)> - -Model defines model in MindSpore Lite for managing graph. - -### Destructors - -#### ~Model - -```cpp -virtual ~Model() -``` - -Destructor of MindSpore Lite Model. - -### Public Member Functions - -#### Free - -```cpp -void Free() -``` - -Free MetaGraph in MindSpore Lite Model to reduce memory usage during inference. - -#### Destroy - -```cpp -void Destroy() -``` - -Free all temporary memory in MindSpore Lite Model. - -### Static Public Member Functions - -#### Import - -```cpp -static Model *Import(const char *model_buf, size_t size) -``` - -Static method to create a Model pointer. - -- Parameters - - - `model_buf`: Defines the buffer read from a model file. - - - `size`: variable. Defines the byte number of model buffer. - -- Returns - - Pointer that points to the MindSpore Lite Model. - -## CpuBindMode - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -An **enum** type. CpuBindMode is defined for holding arguments of the bind CPU strategy. - -### Public Attributes - -#### MID_CPU - -```cpp -MID_CPU = 2 -``` - -Bind middle cpu first. - -#### HIGHER_CPU - -```cpp -HIGHER_CPU = 1 -``` - -Bind higher cpu first. - -#### NO_BIND - -```cpp -NO_BIND = 0 -``` - -No bind. - -## DeviceType - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -An **enum** type. DeviceType is defined for holding user's preferred backend. - -### Public Attributes - -#### DT_CPU - -```cpp -DT_CPU = 0 -``` - -CPU device type. - -#### DT_GPU - -```cpp -DT_GPU = 1 -``` - -GPU device type. - -#### DT_NPU - -```cpp -DT_NPU = 2 -``` - -NPU device type. - -## Version - -\#include <[version.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/version.h)> - -```cpp -std::string Version() -``` - -Global method to get a version string. - -- Returns - - The version string of MindSpore Lite. - -## StringsToMSTensor - -```cpp -int StringsToMSTensor(const std::vector &inputs, tensor::MSTensor *tensor) -``` - -Global method to store strings into MSTensor. - -- Returns - - STATUS, STATUS is defined in errorcode.h. - -## MSTensorToStrings - -```cpp -std::vector MSTensorToStrings(const tensor::MSTensor *tensor) -``` - -Global method to get strings from MSTensor. - -- Returns - - The vector of strings. - -## DeviceContextVector - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -A **vector** contains [**DeviceContext**](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#devicecontext) variable. - -## DeviceContext - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -DeviceContext defines different device contexts. - -### Public Attributes - -#### device_type_ - -```cpp -device_type_ -``` - -An **enum** type. Defaults to **DT_CPU**. DeviceType is defined for holding user’s CPU backend. - -#### device_info_ - -```cpp -device_info_ -``` - -An **union** value, contains [**CpuDeviceInfo**](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#cpudeviceinfo) , [**GpuDeviceInfo**](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#gpudeviceinfo) and [**NpuDeviceInfo**](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#npudeviceinfo) . - -## DeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -An **union** value. DeviceInfo is defined for backend's configuration information. - -### Public Attributes - -#### cpu_device_info_ - -```cpp -cpu_device_info_ -``` - -[**CpuDeviceInfo**](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#cpudeviceinfo) is defined for CPU's configuration information. - -#### gpu_device_info_ - -```cpp -gpu_device_info_ -``` - -[**GpuDeviceInfo**](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#gpudeviceinfo) is defined for GPU's configuration information. - -```cpp -npu_device_info_ -``` - -[**GpuDeviceInfo**](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#gpudeviceinfo) is defined for NPU's configuration information. - -## CpuDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -CpuDeviceInfo is defined for CPU's configuration information. - -### Public Attributes - -#### enable_float16_ - -```cpp -enable_float16_ -``` - -A **bool** value. Defaults to **false**. This attribute enables to perform the GPU float16 inference. - -> Enabling float16 inference may cause low precision inference,because some variables may exceed the range of float16 during forwarding. - -#### cpu_bind_mode_ - -```cpp -cpu_bind_mode_ -``` - -A [**CpuBindMode**](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#cpubindmode) **enum** variable. Defaults to **MID_CPU**. - -## GpuDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -GpuDeviceInfo is defined for GPU's configuration information. - -### Public Attributes - -#### enable_float16_ - -```cpp -enable_float16_ -``` - -A **bool** value. Defaults to **false**. This attribute enables to perform the GPU float16 inference. - -> Enabling float16 inference may cause low inference precision, because some variables may exceed the range of float16 during forwarding. - -## NpuDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -NpuDeviceInfo is defined for NPU's configuration information. - -```cpp -frequency_ -``` - -A **int** value. Defaults to **3**. This attribute is used to set the NPU frequency, which can be set to 1 (low power consumption), 2 (balanced), 3 (high performance), 4 (extreme performance). - -## TrainModel - -\#include <[model.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/model.h)> - -Inherited from Model, TrainModel defines a class that allows to import and export the MindSpore trainable model. - -### Constructors & Destructors - -#### ~TrainModel - -```cpp -virtual ~TrainModel(); -``` - -Class destructor, free all memory. - -### Public Member Functions - -#### Import - -```cpp -static TrainModel *Import(const char *model_buf, size_t size); -``` - -Static method to create a TrainModel object. - -- Parameters - - - `model_buf`: A buffer that was read from a MS model file. - - - `size`: Length of the buffer. - -- Returns - - Pointer to MindSpore Lite TrainModel. - -#### Free - -```cpp -void Free() override; -``` - -Free meta graph related data. - -#### ExportBuf - -```cpp -char *ExportBuf(char *buf, size_t *len) const; -``` - -Export Model into a buffer. - -- Parameters - - - `buf`: The buffer to be exported into. If it is equal to nullptr, `buf` will be allocated. - - - `len`: Size of the pre-allocated buffer and the returned size of the exported buffer. - -- Returns - - Pointer to buffer with exported model. - -### Public Attributes - -#### buf_size_ - -```cpp -size_t buf_size_; -``` - -The length of the buffer with exported model. diff --git a/docs/api_cpp/source_en/lite_cpp_example.rst b/docs/api_cpp/source_en/lite_cpp_example.rst deleted file mode 100644 index 676b31ff9b15ddf75dda77991a0c8540337e391b..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_en/lite_cpp_example.rst +++ /dev/null @@ -1,9 +0,0 @@ -Example -======= - -.. toctree:: - :maxdepth: 1 - - Simple Demo - Android Application Development Based on JNI Interface - High-level Usage \ No newline at end of file diff --git a/docs/api_cpp/source_en/mindspore.md b/docs/api_cpp/source_en/mindspore.md deleted file mode 100644 index 174854fa0aea2c72216c49d5d7bc0fb280ec7e6e..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_en/mindspore.md +++ /dev/null @@ -1,658 +0,0 @@ -# mindspore - - - -## Context - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -The Context class is used to store environment variables during execution. - -### Public Member Functions - -#### SetThreadNum - -```cpp -void SetThreadNum(int32_t thread_num); -``` - -Set the number of threads at runtime. This option is only valid for MindSpore Lite. - -- Parameters - - - `thread_num`: the number of threads at runtime. - -#### GetThreadNum - -```cpp -int32_t GetThreadNum() const; -``` - -Get the current thread number setting. - -- Returns - - The current thread number setting. - -#### SetAllocator - -```cpp -void SetAllocator(const std::shared_ptr &allocator); -``` - -Set Allocator, which defines a memory pool for dynamic memory malloc and memory free. This option is only valid for MindSpore Lite. - -- Parameters - - - `allocator`: A pointer to an Allocator. - -#### GetAllocator - -```cpp -std::shared_ptr GetAllocator() const; -``` - -Get the current Allocator setting. - -- Returns - - The current Allocator setting. - -#### MutableDeviceInfo - -```cpp -std::vector> &MutableDeviceInfo(); -``` - -Get a mutable reference of [DeviceInfoContext](#deviceinfocontext) vector in this context. Only MindSpore Lite supports heterogeneous scenarios with multiple members in the vector. - -- Returns - - Mutable reference of DeviceInfoContext vector in this context. - -## DeviceInfoContext - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -DeviceInfoContext defines different device contexts. - -### Public Member Functions - -#### GetDeviceType - -```cpp -virtual enum DeviceType GetDeviceType() const = 0 -``` - -Get the type of this DeviceInfoContext. - -- Returns - - Type of this DeviceInfoContext. - - ```cpp - enum DeviceType { - kCPU = 0, - kMaliGPU, - kNvidiaGPU, - kKirinNPU, - kAscend910, - kAscend310, - // add new type here - kInvalidDeviceType = 100, - }; - ``` - -#### Cast - -```cpp -template std::shared_ptr Cast(); -``` - -A similar function to RTTI is provided when the `-fno-rtti` compilation option is turned on, which converts DeviceInfoContext to a shared pointer of type `T`, and returns `nullptr` if the conversion fails. - -- Returns - - A pointer of type `T` after conversion. If the conversion fails, it will be `nullptr`. - -## CPUDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -Derived from [DeviceInfoContext](#deviceinfocontext), The configuration of the model running on the CPU. This option is only valid for MindSpore Lite. - -### Public Member Functions - -| Functions | Notes | -| ----------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `void SetThreadAffinity(int mode)` | Set thread affinity mode

- `mode`: 0: no affinity, 1: big cores first, 2: little cores first. | -| `int GetThreadAffinity() const` | - Returns: The thread affinity mode | -| `void SetEnableFP16(bool is_fp16)` | Enables to perform the float16 inference

- `is_fp16`: Enable float16 inference or not. | -| `bool GetEnableFP16() const` | - Returns: whether enable float16 inference. | - -## MaliGPUDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -Derived from [DeviceInfoContext](#deviceinfocontext), The configuration of the model running on the GPU. This option is only valid for MindSpore Lite. - -### Public Member Functions - -| Functions | Notes | -| ----------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `void SetEnableFP16(bool is_fp16)` | Enables to perform the float16 inference

- `is_fp16`: Enable float16 inference or not. | -| `bool GetEnableFP16() const` | - Returns: whether enable float16 inference. | - -## KirinNPUDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -Derived from [DeviceInfoContext](#deviceinfocontext), The configuration of the model running on the NPU. This option is only valid for MindSpore Lite. - -### Public Member Functions - -| Functions | Notes | -| ----------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `void SetFrequency(int frequency)` | Used to set the NPU frequency

- `frequency`: can be set to 1 (low power consumption), 2 (balanced), 3 (high performance), 4 (extreme performance), default as 3. | -| `int GetFrequency() const` | - Returns: NPU frequency | - -## NvidiaGPUDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -Derived from [DeviceInfoContext](#deviceinfocontext), The configuration of the model running on the GPU. This option is invalid for MindSpore Lite. - -### Public Member Functions - -| Functions | Notes | -| ----------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `void SetDeviceID(uint32_t device_id)` | Used to set device id

- `device_id`: The device id. | -| `uint32_t GetDeviceID() const` | - Returns: The device id. | - -## Ascend910DeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -Derived from [DeviceInfoContext](#deviceinfocontext), The configuration of the model running on the Ascend910. This option is invalid for MindSpore Lite. - -### Public Member Functions - -| Functions | Notes | -| ----------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `void SetDeviceID(uint32_t device_id)` | Used to set device id

- `device_id`: The device id. | -| `uint32_t GetDeviceID() const` | - Returns: The device id. | - -## Ascend310DeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -Derived from [DeviceInfoContext](#deviceinfocontext), The configuration of the model running on the Ascend310. This option is invalid for MindSpore Lite. - -### Public Member Functions - -| Functions | Notes | -| ----------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `void SetDeviceID(uint32_t device_id)` | Used to set device id

- `device_id`: The device id. | -| `uint32_t GetDeviceID() const` | - Returns: The device id. | -| `void SetInsertOpConfigPath(const std::string &cfg_path)` | Set [AIPP](https://support.huaweicloud.com/intl/en-us/adevg-ms-atlas200dkappc32/atlasadm_01_0023.html) configuration file path | -| `std::string GetInsertOpConfigPath()` | - Returns: [AIPP](https://support.huaweicloud.com/intl/en-us/adevg-ms-atlas200dkappc32/atlasadm_01_0023.html) configuration file path | -| `void SetInputFormat(const std::string &format)` | Set format of model inputs

- `format`: Optional `"NCHW"`, `"NHWC"`, etc. | -| `std::string GetInputFormat()` | - Returns: The set format of model inputs | -| `void SetInputShape(const std::string &shape)` | Set shape of model inputs

- `shape`: e.g., `"input_op_name1: 1,2,3,4;input_op_name2: 4,3,2,1"` | -| `std::string GetInputShape()` | - Returns: The set shape of model inputs | -| `void SetOutputType(enum DataType output_type)` | Set type of model outputs

- `output_type`: Only uint8, fp16 and fp32 are supported | -| `enum DataType GetOutputType()` | - Returns: The set type of model outputs | -| `void SetPrecisionMode(const std::string &precision_mode)` | Set precision mode of model

- `precision_mode`: Optional `"force_fp16"`, `"allow_fp32_to_fp16"`, `"must_keep_origin_dtype"` and `"allow_mix_precision"`, `"force_fp16"` is set as default | -| `std::string GetPrecisionMode(t)` | - Returns: The set precision mode | -| `void SetOpSelectImplMode(const std::string &op_select_impl_mode)` | Set op select implementation mode

- `op_select_impl_mode`: Optional `"high_performance"` and `"high_precision"`, `"high_performance"` is set as default | -| `std::string GetOpSelectImplMode()` | - Returns: The set op select implementation mode | - -## Serialization - -\#include <[serialization.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/serialization.h)> - -The Serialization class is used to summarize methods for reading and writing model files. - -### Static Public Member Function - -#### Load - -Loads a model file from path, is not supported on MindSpore Lite. - -```cpp -Status Load(const std::string &file, ModelType model_type, Graph *graph); -``` - -- Parameters - - - `file`: the path of model file. - - `model_type`:the Type of model file, options are `ModelType::kMindIR`, `ModelType::kOM`. - - `graph`:the output parameter, a object saves graph data. - -- Returns - - Status code. - -#### Load - -Loads a model file from memory buffer. - -```cpp -Status Load(const void *model_data, size_t data_size, ModelType model_type, Graph *graph); -``` - -- Parameters - - - `model_data`:a buffer filled by model file. - - `data_size`:the size of the buffer. - - `model_type`:the Type of model file, options are `ModelType::kMindIR`, `ModelType::kOM`. - - `graph`:the output parameter, a object saves graph data. - -- Returns - - Status code. - -## Model - -\#include <[model.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/model.h)> - -The Model class is used to define a MindSpore model, facilitating computational graph management. - -### Constructor and Destructor - -```cpp -Model(); -~Model(); -``` - -### Public Member Function - -#### Build - -```cpp -Status Build(GraphCell graph, const std::shared_ptr &model_context); -``` - -Builds a model so that it can run on a device. - -- Parameters - - - `graph`: `GraphCell` is a derivative of `Cell`. `Cell` is not available currently. `GraphCell` can be constructed from `Graph`, for example, `model.Build(GraphCell(graph), context)`. - - `model_context`: a [context](#context) used to store options during execution. - -- Returns - - Status code. - -> Modifications to `model_context` after `Build` will no longer take effect. - -#### Predict - -```cpp -Status Predict(const std::vector &inputs, std::vector *outputs); -``` - -Inference model. - -- Parameters - - - `inputs`: a `vector` where model inputs are arranged in sequence. - - `outputs`: output parameter, which is a pointer to a `vector`. The model outputs are filled in the container in sequence. - -- Returns - - Status code. - -#### GetInputs - -```cpp -std::vector GetInputs(); -``` - -Obtains all input tensors of the model. - -- Returns - - The vector that includes all input tensors. - -#### GetInputByTensorName - -```cpp -MSTensor GetInputByTensorName(const std::string &tensor_name); -``` - -Obtains the input tensor of the model by name. - -- Returns - - The input tensor with the given name, if the name is not found, an invalid tensor is returned. - -#### GetOutputs - -```cpp -std::vector GetOutputs(); -``` - -Obtains all output tensors of the model. - -- Returns - - A `vector` that includes all output tensors. - -#### GetOutputTensorNames - -```cpp -std::vector GetOutputTensorNames(); -``` - -Obtains names of all output tensors of the model. - -- Returns - - A `vector` that includes names of all output tensors. - -#### GetOutputByTensorName - -```cpp -MSTensor GetOutputByTensorName(const std::string &tensor_name); -``` - -Obtains the output tensor of the model by name. - -- Returns - - The output tensor with the given name, if the name is not found, an invalid tensor is returned. - -#### Resize - -```cpp -Status Resize(const std::vector &inputs, const std::vector> &dims); -``` - -Resizes the shapes of inputs. - -- Parameters - - - `inputs`: a `vector` that includes all input tensors in order. - - `dims`: defines the new shapes of inputs, should be consistent with `inputs`. - -- Returns - - Status code. - -#### CheckModelSupport - -```cpp -static bool CheckModelSupport(enum DeviceType device_type, ModelType model_type); -``` - -Checks whether the type of device supports the type of model. - -- Parameters - - - `device_type`: device type,options are `kMaliGPU`, `kAscend910`, etc. - - `model_type`: the Type of model file, options are `ModelType::kMindIR`, `ModelType::kOM`. - -- Returns - - A bool value. - -## MSTensor - -\#include <[types.h](https://gitee.com/mindspore/mindspore/blob/r1.1/include/api/types.h)> - -The MSTensor class defines a tensor in MindSpore. - -### Constructor and Destructor - -```cpp -MSTensor(); -explicit MSTensor(const std::shared_ptr &impl); -MSTensor(const std::string &name, DataType type, const std::vector &shape, const void *data, size_t data_len); -~MSTensor(); -``` - -### Static Public Member Function - -#### CreateTensor - -```cpp -MSTensor *CreateTensor(const std::string &name, DataType type, const std::vector &shape, - const void *data, size_t data_len) noexcept; -``` - -Creates a MSTensor object, whose data need to be copied before accessed by `Model`, must be used in pairs with `DestroyTensorPtr`. - -- Parameters - - - `name`: the name of the `MSTensor`. - - `type`: the data type of the `MSTensor`. - - `shape`: the shape of the `MSTensor`. - - `data`: the data pointer that points to allocated memory. - - `data`: the length of the memory, in bytes. - -- Returns - - An pointer of `MStensor`. - -#### CreateRefTensor - -```cpp -MSTensor *CreateRefTensor(const std::string &name, DataType type, const std::vector &shape, void *data, - size_t data_len) noexcept; -``` - -Creates a MSTensor object, whose data can be directly accessed by `Model`, must be used in pairs with `DestroyTensorPtr`. - -- Parameters - - - `name`: the name of the `MSTensor`. - - `type`: the data type of the `MSTensor`. - - `shape`: the shape of the `MSTensor`. - - `data`: the data pointer that points to allocated memory. - - `data`: the length of the memory, in bytes. - -- Returns - - An pointer of `MStensor`. - -#### StringsToTensor - -```cpp -MSTensor *StringsToTensor(const std::string &name, const std::vector &str); -``` - -Create a string type `MSTensor` object whose data can be accessed by `Model` only after being copied, must be used in pair with `DestroyTensorPtr`. - -- Parameters - - - `name`: the name of the `MSTensor`. - - `str`:a `vector` container containing several strings. - -- Returns - - An pointer of `MStensor`. - -#### TensorToStrings - -```cpp -std::vector TensorToStrings(const MSTensor &tensor); -``` - -Parse the string type `MSTensor` object into strings. - -- Parameters - - - `tensor`: a `MSTensor` object. - -- Returns - - A `vector` container containing several strings. - -#### DestroyTensorPtr - -```cpp -void DestroyTensorPtr(MSTensor *tensor) noexcept; -``` - -Destroy an object created by `Clone`, `StringsToTensor`, `CreateRefTensor` or `CreateTensor`. Do not use it to destroy `MSTensor` from other sources. - -- Parameters - - - `tensor`: a pointer returned by `Clone`, `StringsToTensor`, `CreateRefTensor` or `CreateTensor`. - -### Public Member Functions - -#### Name - -```cpp -std::string Name() const; -``` - -Obtains the name of the `MSTensor`. - -- Returns - - The name of the `MSTensor`. - -#### DataType - -```cpp -enum DataType DataType() const; -``` - -Obtains the data type of the `MSTensor`. - -- Returns - - The data type of the `MSTensor`. - -#### Shape - -```cpp -const std::vector &Shape() const; -``` - -Obtains the shape of the `MSTensor`. - -- Returns - - A `vector` that contains the shape of the `MSTensor`. - -#### ElementNum - -```cpp -int64_t ElementNum() const; -``` - -Obtains the number of elements of the `MSTensor`. - -- Returns - - The number of elements of the `MSTensor`. - -#### Data - -```cpp -std::shared_ptr Data() const; -``` - -Obtains a shared pointer to the copy of data of the `MSTensor`. - -- Returns - - A shared pointer to the copy of data of the `MSTensor`. - -#### MutableData - -```cpp -void *MutableData(); -``` - -Obtains the pointer to the data of the `MSTensor`. - -- Returns - - The pointer to the data of the `MSTensor`. - -#### DataSize - -```cpp -size_t DataSize() const; -``` - -Obtains the length of the data of the `MSTensor`, in bytes. - -- Returns - - The length of the data of the `MSTensor`, in bytes. - -#### IsDevice - -```cpp -bool IsDevice() const; -``` - -Gets the boolean value that indicates whether the memory of `MSTensor` is on device. - -- Returns - - The boolean value that indicates whether the memory of `MSTensor` is on device. - -#### Clone - -```cpp -MSTensor *Clone() const; -``` - -Gets a deep copy of the `MSTensor`, must be used in pair with `DestroyTensorPtr`. - -- Returns - - A pointer points to a deep copy of the `MSTensor`. - -#### operator==(std::nullptr_t) - -```cpp -bool operator==(std::nullptr_t) const; -``` - -Gets the boolean value that indicates whether the `MSTensor` is valid. - -- Returns - - The boolean value that indicates whether the `MSTensor` is valid. - -## KernelCallBack - -\#include <[ms_tensor.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/ms_tensor.h)> - -```cpp -using KernelCallBack = std::function inputs, std::vector outputs, const CallBackParam &opInfo)> -``` - -A function wrapper. KernelCallBack defines the pointer for callback function. - -## CallBackParam - -\#include <[ms_tensor.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/ms_tensor.h)> - -A **struct**. CallBackParam defines input arguments for callback function. - -### Public Attributes - -#### node_name - -```cpp -node_name -``` - -A **string** variable. Node name argument. - -#### node_type - -```cpp -node_type -``` - -A **string** variable. Node type argument. diff --git a/docs/api_cpp/source_en/session.md b/docs/api_cpp/source_en/session.md deleted file mode 100644 index 927f23b6bdbd2376a7a89ffac758f05693aa9e46..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_en/session.md +++ /dev/null @@ -1,739 +0,0 @@ -# mindspore::session - - - -## LiteSession - -\#include <[lite_session.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/lite_session.h)> - -LiteSession defines sessions in MindSpore Lite for compiling Model and forwarding inference. - -### Constructors & Destructors - -#### LiteSession - -```cpp -LiteSession() -``` - -Constructor of MindSpore Lite LiteSession using default value for parameters. - -#### ~LiteSession - -```cpp -~LiteSession() -``` - -Destructor of MindSpore Lite LiteSession. - -### Public Member Functions - -#### BindThread - -```cpp -virtual void BindThread(bool if_bind) -``` - -Attempts to bind threads in the thread pool to the specified CPU core or unbind threads from the core. - -- Parameters - - - `if_bind`: Define whether to bind or unbind threads. - -#### CompileGraph - -```cpp -virtual int CompileGraph(lite::Model *model) -``` - -Compile MindSpore Lite model. - -> CompileGraph should be called before RunGraph. - -- Parameters - - - `model`: Define the model to be compiled. - -- Returns - - STATUS as an error code of compiling graph, STATUS is defined in [errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h). - -#### GetInputs - -```cpp -virtual std::vector GetInputs() const -``` - -Get input MindSpore Lite MSTensors of model. - -- Returns - - The vector of MindSpore Lite MSTensor. - -#### GetInputsByTensorName - -```cpp -mindspore::tensor::MSTensor *GetInputsByTensorName(const std::string &name) const -``` - -Get input MindSpore Lite MSTensors of model by tensor name. - -- Parameters - - - `name`: Define tensor name. - -- Returns - - MindSpore Lite MSTensor. - -#### RunGraph - -```cpp -virtual int RunGraph(const KernelCallBack &before = nullptr, const KernelCallBack &after = nullptr) -``` - -Run session with callback. -> RunGraph should be called after CompileGraph. - -- Parameters - - - `before`: A [**KernelCallBack**](https://www.mindspore.cn/doc/api_cpp/en/master/mindspore.html#kernelcallback) function. Define a callback function to be called before running each node. - - - `after`: A [**KernelCallBack**](https://www.mindspore.cn/doc/api_cpp/en/master/mindspore.html#kernelcallback) function. Define a callback function to be called after running each node. - -- Returns - - STATUS as an error code of running graph, STATUS is defined in [errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h). - -#### GetOutputsByNodeName - -```cpp -virtual std::vector GetOutputsByNodeName(const std::string &node_name) const -``` - -Get output MindSpore Lite MSTensors of model by node name. - -- Parameters - - - `node_name`: Define node name. - -- Returns - - The vector of MindSpore Lite MSTensor. - -#### GetOutputs - -```cpp -virtual std::unordered_map GetOutputs() const -``` - -Get the MSTensors output of the MindSpore Lite model mapped by tensor name. - -- Returns - - The map of output tensor name and MindSpore Lite MSTensor. - -#### GetOutputTensorNames - -```cpp -virtual std::vector GetOutputTensorNames() const -``` - -Get name of output tensors of model compiled by this session. - -- Returns - - A string variable, contains the output tensors’ names in order. - -#### GetOutputByTensorName - -```cpp -virtual mindspore::tensor::MSTensor *GetOutputByTensorName(const std::string &tensor_name) const -``` - -Get output MindSpore Lite MSTensors of model by tensor name. - -- Parameters - - - `tensor_name`: Define tensor name. - -- Returns - - Pointer of MindSpore Lite MSTensor. - -#### Resize - -```cpp -virtual int Resize(const std::vector &inputs, const std::vector> &dims) - -``` - -Resize inputs shape. - -- Parameters - - - `inputs`: Model inputs. - - - `dims`: defines the new inputs shape. Its order should be consistent with inputs. - -- Returns - - STATUS as an error code of resize inputs, STATUS is defined in [errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h). - -### Static Public Member Functions - -#### CreateSession - -```cpp -static LiteSession *CreateSession(const lite::Context *context) -``` - -Static method to create a LiteSession pointer. - -- Parameters - - - `context`: Define the context of session to be created. - -- Returns - - Pointer that points to MindSpore Lite MSTensor. - -```cpp -static LiteSession *CreateSession(const char *model_buf, size_t size, const lite::Context *context); -``` - -Static method to create a LiteSession pointer. The returned LiteSession pointer has already read model_buf and completed graph compilation. - -- Parameters - - - `model_buf`: Define the buffer read from a model file. - - - `size`: Define the byte number of model buffer. - - - `context`: Define the context of session to be created. - -- Returns - - Pointer that points to MindSpore Lite LiteSession. - -## TrainSession - -\#include <[train_session.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/train/train_session.h)> - -Inherited from LiteSession, TrainSession defines the class that allows training the MindSpore model. - -### Constructors & Destructors - -#### ~TrainSession - -```cpp -virtual ~TrainSession() = default; -``` - -Static method to create a TrainSession object. - -### Public Member Functions - -#### CreateSession - -```cpp -static TrainSession *CreateSession(const char *model_buf, size_t size, lite::Context *context, bool train_mode = false); -``` - -Static method to create a TrainSession object. - -- Parameters - - - `model_buf`: A buffer that was read from a MS model file. - - - `size`: Length of the buffer. - - - `context`: Defines the context of the session to be created. - - - `train_mode`: Training mode to initialize Session with. - -- Returns - - Pointer that points to MindSpore Lite TrainSession. - -```cpp -static TrainSession *CreateSession(const std::string &filename, lite::Context *context, bool train_mode = false); -``` - -Static method to create a TrainSession object. - -- Parameters - - - `filename`: Filename to read flatbuffer from. - - - `context`: Defines the context of the session to be created. - - - `train_mode`: Training mode to initialize Session with. - -- Returns - - Pointer that points to MindSpore Lite TrainSession. - -#### CreateTransferSession - -```cpp -static TrainSession *CreateTransferSession(const char *model_buf_backbone, size_t size_backbone, const char *model_buf_head, size_t size_head, lite::Context *context, bool train_mode = false); -``` - -Static method that creates the object pointer that points to the transfer learning training session. - -- Parameters - - - `model_buf_backbone`: Pointer that points to the backbone network. - - `size_backbone`: The size of backbone network temporary saving buffer. - - `model_buf_head`: Pointer that points to the head network. - - `size_head`: The size of head network temporary saving buffer. - - `context`: Pointer that points to the target session. - - `train_mode`: Training mode to initialize Session. - -- Returns - - Pointer that points to MindSpore Lite TrainSession. - -```cpp -static TrainSession *CreateTransferSession(const std::string &filename_backbone, const std::string &filename_head, lite::Context *context, bool train_mode = false); -``` - -Static method that creates the object pointer that points to the transfer learning training session. - -- Parameters - - - `filename_backbone`: Filename of the backbone network to read flatbuffer. - - `filename_head`: Filename of the head network to read flatbuffer. - - `context`: Pointer that points to the target session. - - `train_mode`: Training mode to initialize Session. - -- Returns - - Pointer that points to MindSpore Lite TrainSession. - -#### ExportToBuf - -```cpp -virtual void *ExportToBuf(char *buf, size_t *len) const = 0; -``` - -Export the trained model into a buffer. - -- Parameters - - - `buf`: The buffer to be exported into. If equal to nullptr, `buf` will be allocated. - - - `len`: Size of the pre-allocated buffer, and the returned size of the exported buffer. - -- Returns - - Pointer that points to MindSpore Lite TrainSession. - -#### SaveToFile - -```cpp -virtual int SaveToFile(const std::string &filename) const = 0; -``` - -Save the trained model into a flatbuffer file. - -- Parameters - - - `filename`: Filename of the file to save buffer. - -- Returns - - 0 represents success or -1 in case of error. - -#### Train - -```cpp -virtual int Train() = 0; -``` - -Set model to train mode. - -- Returns - - STATUS as an error code of compiling graph, STATUS is defined in [errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h) - -#### IsTrain - -```cpp -bool IsTrain() { return train_mode_ == true; } -``` - -Checks whether the current model is under the train mode. - -- Returns - - Boolean indication if model is in train mode. - -#### Eval - -```cpp -virtual int Eval() = 0; -``` - -Set model to eval mode. - -- Returns - - STATUS as an error code of compiling graph, STATUS is defined in [errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h). - -#### IsEval - -```cpp -bool IsEval() { return train_mode_ == false; } -``` - -Check mode of model. - -- Returns - - Boolean indication if model is in eval mode. - -#### SetLearningRate - -```cpp -virtual int SetLearningRate(float learning_rate) = 0; -``` - -Set the learning rate for the current model. - -- Returns - - 0 represents success or -1 in case of error. - -#### GetLearningRate - -```cpp -virtual float GetLearningRate() = 0; -``` - -Get the learning rate of the current model. - -- Returns - - The learning rate of the current model, default is 0.0. - -#### SetupVirtualBatch - -```cpp -virtual int SetupVirtualBatch(int virtual_batch_multiplier, float lr = -1.0f, float momentum = -1.0f) = 0; -``` - -Customize the virtual batch size, in order to reduce memory consumption. - -- Parameters - - - `virtual_batch_multiplier`: virtual batch number. - - `lr`: learning rate. - - `momentum`: momentum. - -- Returns - - 0 represents success or -1 in case of error. - -#### GetPredictions - -```cpp -virtual std::vector GetPredictions() const = 0; -``` - -Get the predicting result of the trained model. - -- Returns - - Return the pointer vector of prediction results. - -#### SetLossName - -```cpp -void SetLossName(std::string loss_name) { loss_name_ = loss_name; } -``` - -Set the loss name. - -- Parameters - - - `loss_name`: The name of loss kernels. - -## TrainLoop - -\#include <[ltrain_loop.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/train/train_loop.h)> - -Inherited from Session and used for reducing the RAM consumption during model training, user can set hyper-parameters and customized data preprocessing function. - -### Constructors & Destructors - -#### ~TrainLoop - -```cpp -virtual ~TrainLoop() = default; -``` - -Destructor function. - -### Public Member Functions - -#### CreateTrainLoop - -```cpp -static TrainLoop *CreateTrainLoop(session::TrainSession *train_session, lite::Context *context, int batch_size = -1); -``` - -A static method of creating TrainLoop pointer. - -- Parameters - - - `train_session`: Pointer that points to the CreateSession or CreateTransferSession object. - - `context`: Pointer that points to a context. - - `batch_size`: Batch size number. - -- Returns - - Pointer that points to the TrainLoop object . - -#### Reset - -```cpp -virtual int Reset() = 0; -``` - -Reset the epoch to 0. - -- Returns - - 0 means resetting successfully while -1 means failed. - -#### train_session - -```cpp -virtual session::TrainSession *train_session() = 0; -``` - -Get the object of the current TrainSession. - -- Returns - - Pointer that points to the object of TrainSession. - -#### Init - -```cpp -virtual int Init(std::vector metrics) = 0; -``` - -Initialize the model evaluation matrix. - -- Parameters - - - `metrics`: Pointer vector of the model evaluating matrix. - -- Returns - - 0 means initializing successfully while -1 means failed. - -#### GetMetrics - -```cpp -virtual std::vector GetMetrics() = 0; -``` - -Get the model evaluation matrix. - -- Returns - - Pointer vector of the model evaluation matrix. - -#### SetKernelCallBack - -```cpp -virtual int SetKernelCallBack(const KernelCallBack &before, const KernelCallBack &after) = 0; -``` - -Set the callback function during training. - -- Parameters - - - `before`: Callback pointer before execution. - - `after`: Callback pointer after execution. - -- Returns - - 0 means setting successfully while -1 means failed. - -#### Train - -```cpp -virtual int Train(int epochs, mindspore::dataset::Dataset *dataset, std::vector cbs, LoadDataFunc load_func = nullptr)= 0; -``` - -Execute training. - -- Parameters - - - `epochs`: Training epoch number. - - `dataset`: Pointer that points to the MindData object. - - `cbs`: Object pointer vector. - - `load_func`: Class template function object. - -- Returns - - 0 means training successfully while -1 means failed. - -#### Eval - -```cpp -virtual int Eval(mindspore::dataset::Dataset *dataset, std::vector cbs, LoadDataFunc load_func = nullptr, int max_steps = INT_MAX) = 0; -``` - -Execute evaluating. - -- Parameters - - - `dataset`: Pointer that points to the DataSet object. - - `cbs`: Object pointer vector. - - `load_func`: Class template function object. - - `max_steps`: Eval epoch number. - -- Returns - - 0 means evaluating successfully while -1 means failed. - -## TrainLoopCallback - -\#include <[ltrain_loop_callback.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/train/train_loop_callback.h)> - -Execute the callback functions during the model training. - -### Constructors & Destructors - -#### ~TrainLoopCallback - -```cpp -virtual ~TrainLoopCallback() = default; -``` - -Destructor function. - -### Public Member Functions - -#### Begin - -```cpp -virtual void Begin(const TrainLoopCallBackData &cb_data) {} -``` - -The method is called once before the network is executed. - -- Parameters - - - `cb_data`: cb_data info about current execution. - -#### End - -```cpp -virtual void End(const TrainLoopCallBackData &cb_data) {} -``` - -The method is called once after the network executed. - -- Parameters - - - `cb_data`: cb_data info about current execution. - -#### EpochBegin - -```cpp -virtual void EpochBegin(const TrainLoopCallBackData &cb_data) {} -``` - -The method is called at the beginning of each epoch. - -- Parameters - - - `cb_data`: cb_data info about current execution. - -#### EpochEnd - -```cpp -virtual int EpochEnd(const TrainLoopCallBackData &cb_data) { return RET_CONTINUE; } -``` - -The method is called at the end of each epoch. - -- Parameters - - - `cb_data`: cb_data info about current execution. - -- Returns - STATUS as an error code of compiling graph, STATUS is defined in [errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h). - -#### StepBegin - -```cpp -virtual void StepBegin(const TrainLoopCallBackData &cb_data) {} -``` - -The method is called at the beginning of each step. - -- Parameters - - - `cb_data`: cb_data info about current execution. - -#### StepEnd - -```cpp -virtual void StepEnd(const TrainLoopCallBackData &cb_data) {} -``` - -The method is called after each step has finished. - -- Parameters - - - `cb_data`: cb_data info about current execution. - -## Metrics - -\#include <[metrics.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/train/metrics.h)> - -Evaluation metrics of the training model. - -### Constructors & Destructors - -#### ~Metrics - -```cpp -virtual ~Metrics() = default; -``` - -Destructor function. - -### Public Member Functions - -#### Clear - -```cpp -virtual void Clear() {} -``` - -Reset the member variables `total_accuracy_` and `total_steps_` to 0. - -#### Eval - -```cpp -virtual float Eval() {} -``` - -Evaluation the model. - -#### Update - -```cpp -virtual void Update(std::vector inputs, std::vector outputs) = 0; -``` - -Update the member variables `total_accuracy_` and `total_steps_`. diff --git a/docs/api_cpp/source_en/tensor.md b/docs/api_cpp/source_en/tensor.md deleted file mode 100644 index 8ae519f68b82e874f448b8d97a47a04aaef2306b..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_en/tensor.md +++ /dev/null @@ -1,97 +0,0 @@ -# mindspore::tensor - - - -## MSTensor - -\#include <[ms_tensor.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/ms_tensor.h)> - -MSTensor defined tensor in MindSpore Lite. - -### Constructors & Destructors - -#### MSTensor - -```cpp -MSTensor() -``` - -Constructor of MindSpore Lite MSTensor. - -- Returns - - Instance of MindSpore Lite MSTensor. - -#### ~MSTensor - -```cpp -virtual ~MSTensor() -``` - -Destructor of MindSpore Lite Model. - -### Public Member Functions - -#### data_type - -```cpp -virtual TypeId data_type() const -``` - -Get data type of the MindSpore Lite MSTensor. - -> TypeId is defined in [mindspore/mindspore/core/ir/dtype/type_id.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/core/ir/dtype/type_id.h). Only number types or kObjectTypeString in TypeId enum are applicable for MSTensor. - -- Returns - - MindSpore Lite TypeId of the MindSpore Lite MSTensor. - -#### shape - -```cpp -virtual std::vector shape() const -``` - -Get shape of the MindSpore Lite MSTensor. - -- Returns - - A vector of int as the shape of the MindSpore Lite MSTensor. - -#### ElementsNum - -```cpp -virtual int ElementsNum() const -``` - -Get number of element in MSTensor. - -- Returns - - Number of element in MSTensor. - -#### Size - -```cpp -virtual size_t Size() const -``` - -Get byte size of data in MSTensor. - -- Returns - - Byte size of data in MSTensor. - -#### MutableData - -```cpp -virtual void *MutableData() const -``` - -Get the pointer of data in MSTensor. - -> The data pointer can be used to both write and read data in MSTensor. - -- Returns - - The pointer points to data in MSTensor. diff --git a/docs/api_cpp/source_en/vision.md b/docs/api_cpp/source_en/vision.md deleted file mode 100644 index 9436f08e2c49c4dc224d18363d1227ac72264784..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_en/vision.md +++ /dev/null @@ -1,114 +0,0 @@ -# mindspore::dataset::vision - - - -## HWC2CHW - -\#include <[vision.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/vision.h)> - -```cpp -std::shared_ptr HWC2CHW() -``` - -Convert the channel of the input image from (H, W, C) to (C, H, W). - -- Returns - - Return a HwcToChw operator. - -## CenterCrop - -\#include <[vision_lite.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/vision_lite.h)> - -```cpp -std::shared_ptr CenterCrop(std::vector size) -``` - -Crop the center area of the input image to the given size. - -- Parameters - - - `size`: The output size of the resized image. If the size is a single value, the image will be resized to this value with the same image aspect ratio. If the size has 2 values, it should be (height, width). - -- Returns - - Return a CenterCrop operator. - -## Crop - -\#include <[vision_lite.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/vision_lite.h)> - -```cpp -std::shared_ptr Crop(std::vector coordinates, std::vector size) -``` - -Crop an image based on the location and crop size. - -- Parameters - - - `coordinates`: The starting location of the crop. - - `size`: Size of the cropped area. - -- Returns - - Return a Crop operator. - -## Decode - -\#include <[vision_lite.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/vision_lite.h)> - -```cpp -std::shared_ptr Decode(bool rgb = true) -``` - -Decode the input image. - -- Parameters - - - `rgb`: Whether to decode in RGB mode. - -- Returns - - Return a Decode operator. - -## Normalize - -\#include <[vision_lite.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/vision_lite.h)> - -```cpp -std::shared_ptr Normalize(std::vector mean, std::vector std) -``` - -Normalize the input image with the given mean and standard deviation. - -- Parameters - - - `mean`: The mean value to do normalization. - - `std`: The standard deviation value to do normalization. - -- Returns - - Return a Normalize operator. - -## Resize - -\#include <[vision_lite.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/vision_lite.h)> - -```cpp -std::shared_ptr Resize(std::vector size, InterpolationMode interpolation = InterpolationMode::kLinear) -``` - -Resize the input image to the given size. - -- Parameters - - - `size`: The output size of the resized image. If the size is a single value, the image will be resized to this value with the same image aspect ratio. If the size has 2 values, it should be (height, width). - - `interpolation`: An enumeration for the mode of interpolation. - - kLinear, Linear interpolation. - - kNearestNeighbour, Nearest Interpolation. - - kCubic, Bicubic interpolation. - - kArea, Area interpolation. - -- Returns - - Return a Resize operator. diff --git a/docs/api_cpp/source_zh_cn/_static/logo_notebook.png b/docs/api_cpp/source_zh_cn/_static/logo_notebook.png deleted file mode 100644 index 18c2e29e4b73ee428f70253feffdd855fdf0c422..0000000000000000000000000000000000000000 Binary files a/docs/api_cpp/source_zh_cn/_static/logo_notebook.png and /dev/null differ diff --git a/docs/api_cpp/source_zh_cn/_static/logo_source.png b/docs/api_cpp/source_zh_cn/_static/logo_source.png deleted file mode 100644 index 880f2bc87172daf487654c0ba4f1657c672bd2b8..0000000000000000000000000000000000000000 Binary files a/docs/api_cpp/source_zh_cn/_static/logo_source.png and /dev/null differ diff --git a/docs/api_cpp/source_zh_cn/class_list.md b/docs/api_cpp/source_zh_cn/class_list.md deleted file mode 100644 index 6d413185bff4d42fcbcd50ded278a3cc180589a9..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_zh_cn/class_list.md +++ /dev/null @@ -1,27 +0,0 @@ -# 类列表 - - - -MindSpore Lite中的类定义及其所属命名空间和描述: - -| 命名空间 | 类 | 描述 | -| --- | --- | --- | -| mindspore | [KernelCallBack](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/mindspore.html#kernelcallback) | KernelCallBack定义了指向回调函数的指针。 | -| mindspore::lite | [Allocator](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#allocator) | Allocator定义了一个内存池,用于动态地分配和释放内存。 | -| mindspore::lite | [Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#context) | Context用于保存执行期间的环境变量。 | -| mindspore::lite | [ModelImpl](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#modelimpl) | ModelImpl定义了MindSpore Lite中的Model的实现类。 | -| mindspore::lite | [PrimitiveC](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#primitivec) | PrimitiveC定义为算子的原型。 | -| mindspore::lite | [Model](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#model) | Model定义了MindSpore Lite中的模型,便于计算图管理。 | -| mindspore::lite | [ModelBuilder](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#modelbuilder) | ModelBuilder定义了MindSpore Lite中的模型构建器。 | -| mindspore::session | [LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession) | LiteSession定义了MindSpore Lite中的会话,用于进行Model的编译和前向推理。 | -| mindspore::tensor | [MSTensor](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#mstensor) | MSTensor定义了MindSpore Lite中的张量。 | -| mindspore::dataset | [LiteMat](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/dataset.html#litemat) |LiteMat是一个处理图像的类。 | - -MindSpore中的类定义及其所属命名空间和描述: - -| 命名空间 | 类 | 描述 | -| --- | --- | --- | -| mindspore | [Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/mindspore.html#context) | Context用于保存执行期间的环境变量。 | -| mindspore | [Serialization](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/mindspore.html#serialization) | Serialization汇总了模型文件读写的方法。 | -| mindspore | [Model](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/mindspore.html#model) | Model定义了MindSpore中的模型,便于计算图管理。 | -| mindspore | [MSTensor](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#mstensor) | MSTensor定义了MindSpore中的张量。 | diff --git a/docs/api_cpp/source_zh_cn/conf.py b/docs/api_cpp/source_zh_cn/conf.py deleted file mode 100644 index 625e5acd3bde751f170596e75261be4bb2bde60f..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_zh_cn/conf.py +++ /dev/null @@ -1,65 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -# import sys -# sys.path.append('..') -# sys.path.insert(0, os.path.abspath('.')) - - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx_markdown_tables', - 'recommonmark', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_search_language = 'zh' - -html_search_options = {'dict': '../../resource/jieba.txt'} - -html_static_path = ['_static'] \ No newline at end of file diff --git a/docs/api_cpp/source_zh_cn/dataset.md b/docs/api_cpp/source_zh_cn/dataset.md deleted file mode 100644 index 94adab74931193b31f2a697041d40c7ca548c529..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_zh_cn/dataset.md +++ /dev/null @@ -1,573 +0,0 @@ -# mindspore::dataset - - - -## Execute - -\#include <[execute.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/execute.h)> - -```cpp -// shared_ptr -Execute::Execute(std::shared_ptr op, MapTargetDevice deviceType, uint32_t device_id); -Execute::Execute(std::vector> ops, MapTargetDevice deviceType, uint32_t device_id); - -// normal pointer -Execute::Execute(std::reference_wrapper op, MapTargetDevice deviceType, uint32_t device_id); -Execute::Execute(std::vector> ops, MapTargetDevice deviceType, uint32_t device_id); - -// reference_wrapper -Execute::Execute(TensorTransform *op, MapTargetDevice deviceType, uint32_t device_id); -Execute::Execute(std::vector ops, MapTargetDevice deviceType, uint32_t device_id); -``` - -Transform(图像、文本)变换算子Eager模式执行类。支持多种构造函数形式,包括智能指针,普通指针以及引用封装。 - -- 参数 - - - `op`: 指定单个使用的变换算子。 - - `ops`: 指定一个列表,包含多个使用的变换算子。 - - `deviceType`: 指定运行硬件设备,选项为CPU,GPU以及Ascend 310。 - - `device_id`: 指定运行硬件设备的设备号,仅当`deviceType = MapTargetDevice::kAscend310`时生效。 - -```cpp -Status operator()(const mindspore::MSTensor &input, mindspore::MSTensor *output); -``` - -Eager模式执行接口。 - -- 参数 - - - `input`: 待变换的Tensor张量。 - - `output`: 变换后的Tensor张量。 - -- 返回值 - - 返回一个状态码指示执行变换是否成功。 - -```cpp -std::string Execute::AippCfgGenerator(); -``` - -与Dvpp相关的Aipp配置文件生成器。 -该接口在`deviceType = kAscend310`时生效,依据数据预处理算子的参数自动生成Ascend 310内置Aipp模块推理配置文件。 - -- 参数 - - 无。 - -- 返回值 - - 返回一个`string`表示Aipp配置文件的系统路径。 - -## Dvpp模块 - -Dvpp模块为Ascend 310芯片内置硬件解码器,相较于CPU拥有对图形处理更强劲的性能。支持JPEG图片的解码缩放等基础操作。 - -- 定义`execute`对象时,若设置`deviceType = kAscend310`则会调用Dvpp模块执行数据预处理算子。 -- 当前Dvpp模块支持`Decode()`, `Resize()`, `CenterCrop()`, `Normalize()`。 -- 上述Dvpp算子与同功能CPU算子共用统一API,仅以`deviceType`区分。 - -示例代码: - -```cpp -// Define dvpp transforms -std::vector crop_paras = {224, 224}; -std::vector resize_paras = {256}; -std::vector mean = {0.485 * 255, 0.456 * 255, 0.406 * 255}; -std::vector std = {0.229 * 255, 0.224 * 255, 0.225 * 255}; - -std::shared_ptr decode(new vision::Decode()); -std::shared_ptr resize(new vision::Resize(resize_paras)); -std::shared_ptr centercrop(new vision::CenterCrop(crop_paras)); -std::shared_ptr normalize(new vision::Normalize(mean, std)); - -std::vector> trans_list = {decode, resize, centercrop, normalize}; -mindspore::dataset::Execute Transform(trans_list, MapTargetDevice::kAscend310); -``` - -## ResizeBilinear - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool ResizeBilinear(LiteMat &src, LiteMat &dst, int dst_w, int dst_h) -``` - -通过双线性算法调整图像大小,当前仅支持的数据类型为uint8,当前支持的通道为3和1。 - -- 参数 - - - `src`: 输入的图片数据。 - - `dst`: 输出的图片数据。 - - `dst_w`: 输出图片数据的宽度。 - - `dst_h`: 输出图片数据的高度。 - -- 返回值 - - 执行成功返回true,执行失败返回false。 - -## InitFromPixel - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool InitFromPixel(const unsigned char *data, LPixelType pixel_type, LDataType data_type, int w, int h, LiteMat &m) -``` - -从像素初始化LiteMat,提供数据为RGB或者BGR格式,不用进行格式转换,当前支持的转换是RGB_TO_BGR、RGBA_To_RGB、RGBA_To_BGR、NV21_To_BGR和NV12_To_BGR。 - -- 参数 - - - `data`: 输入的数据。 - - `pixel_type`: 像素点的类型。 - - `data_type`: 数据的类型。 - - `w`: 输出数据的宽度。 - - `h`: 输出数据的高度。 - - `mat`: 用于存储图像数据。 - -- 返回值 - - 初始化成功返回true,否则返回false。 - -## ConvertTo - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool ConvertTo(LiteMat &src, LiteMat &dst, double scale = 1.0) -``` - -转换数据类型,当前支持的转换是将uint8转换为float。 - -- 参数 - - - `src`: 输入的图片数据。 - - `dst`: 输出图像数据。 - - `scale`: 对像素进行缩放(默认值为1.0)。 - -- 返回值 - - 转换数据类型成功返回true,否则返回false。 - -## Crop - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool Crop(LiteMat &src, LiteMat &dst, int x, int y, int w, int h) -``` - -裁剪图像,通道支持为3和1。 - -- 参数 - - - `src`: 输入的图片数据。 - - `dst`: 输出图像数据。 - - `x`: 屏幕截图起点的x坐标值。 - - `y`: 屏幕截图起点的y坐标值。 - - `w`: 截图的宽度。 - - `h`: 截图的高度。 - -- 返回值 - - 裁剪图像成功返回true,否则返回false。 - -## SubStractMeanNormalize - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool SubStractMeanNormalize(const LiteMat &src, LiteMat &dst, const std::vector &mean, const std::vector &std) -``` - -归一化图像,当前支持的数据类型为float。 - -- 参数 - - - `src`: 输入的图片数据。 - - `dst`: 输出图像数据。 - - `mean`: 数据集的均值。 - - `std`: 数据集的方差。 - -- 返回值 - - 归一化成功返回true,否则返回false。 - -## Pad - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool Pad(const LiteMat &src, LiteMat &dst, int top, int bottom, int left, int right, PaddBorderType pad_type, uint8_t fill_b_or_gray, uint8_t fill_g, uint8_t fill_r) -``` - -填充图像,通道支持为3和1。 - -- 参数 - - - `src`: 输入的图片数据。 - - `dst`: 输出图像数据。 - - `top`: 图片顶部长度。 - - `bottom`: 图片底部长度。 - - `left`: 图片左边长度。 - - `right`: 图片右边长度。 - - `pad_type`: padding的类型。 - - `fill_b_or_gray`: R或者GRAY。 - - `fill_g`: G。 - - `fill_r`: R。 - -- 返回值 - - 填充图像成功返回true,否则返回false。 - -## ExtractChannel - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool ExtractChannel(const LiteMat &src, LiteMat &dst, int col) -``` - -按索引提取图像通道。 - -- 参数 - - - `src`: 输入的图片数据。 - - `col`: 通道的序号。 - -- 返回值 - - 提取图像通道成功返回true,否则返回false。 - -## Split - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool Split(const LiteMat &src, std::vector &mv) -``` - -将图像通道拆分为单通道。 - -- 参数 - - - `src`: 输入的图片数据。 - - `mv`: 单个通道数据。 - -- 返回值 - - 图像通道拆分成功返回true,否则返回false。 - -## Merge - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -bool Merge(const std::vector &mv, LiteMat &dst) -``` - -用几个单通道阵列创建一个多通道图像。 - -- 参数 - - - `mv`: 单个通道数据。 - - `dst`: 输出图像数据。 - -- 返回值 - - 创建多通道图像成功返回true,否则返回false。 - -## Affine - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -void Affine(LiteMat &src, LiteMat &out_img, double M[6], std::vector dsize, UINT8_C1 borderValue) -``` - -对1通道图像应用仿射变换。 - -- 参数 - - - `src`: 输入图片数据。 - - `out_img`: 输出图片数据。 - - `M[6]`: 仿射变换矩阵。 - - `dsize`: 输出图像的大小。 - - `borderValue`: 采图之后用于填充的像素值。 - -```cpp -void Affine(LiteMat &src, LiteMat &out_img, double M[6], std::vector dsize, UINT8_C3 borderValue) -``` - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -对3通道图像应用仿射变换。 - -- 参数 - - - `src`: 输入图片数据。 - - `out_img`: 输出图片数据。 - - `M[6]`: 仿射变换矩阵。 - - `dsize`: 输出图像的大小。 - - `borderValue`: 采图之后用于填充的像素值。 - -## GetDefaultBoxes - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -std::vector> GetDefaultBoxes(BoxesConfig config) -``` - -获取Faster R-CNN,SSD,YOLO等的默认框。 - -- 参数 - - - `config`: BoxesConfig结构体对象。 - -- 返回值 - - 返回默认框。 - -## ConvertBoxes - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -void ConvertBoxes(std::vector> &boxes, std::vector> &default_boxes, BoxesConfig config) -``` - -将预测框转换为(y,x,h,w)的实际框。 - -- 参数 - - - `boxes`: 实际框的大小。 - - `default_boxes`: 默认框。 - - `config`: BoxesConfig结构体对象。 - -## ApplyNms - -\#include <[image_process.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h)> - -```cpp -std::vector ApplyNms(std::vector> &all_boxes, std::vector &all_scores, float thres, int max_boxes) -``` - -对实际框的非极大值抑制。 - -- 参数 - - - `all_boxes`: 所有输入的框。 - - `all_scores`: 通过网络执行后所有框的得分。 - - `thres`: IOU的预值。 - - `max_boxes`: 输出框的最大值。 - -- 返回值 - - 返回框的id。 - -## LiteMat - -\#include <[lite_mat.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.h)> - -LiteMat是一个处理图像的类。 - -### 构造函数和析构函数 - -#### LiteMat - -```cpp -LiteMat() - -LiteMat(int width, LDataType data_type = LDataType::UINT8) - -LiteMat(int width, int height, LDataType data_type = LDataType::UINT8) - -LiteMat(int width, int height, int channel, LDataType data_type = LDataType::UINT8) -``` - -MindSpore中dataset模块下LiteMat的构造方法,使用参数的默认值。 - -#### ~LiteMat - -```cpp -~LiteMat() -``` - -MindSpore dataset LiteMat的析构函数。 - -### 公有成员函数 - -#### Init - -```cpp -void Init(int width, LDataType data_type = LDataType::UINT8) - -void Init(int width, int height, LDataType data_type = LDataType::UINT8) - -void Init(int width, int height, int channel, LDataType data_type = LDataType::UINT8) -``` - -该函数用于初始化图像的通道,宽度和高度,参数不同。 - -#### IsEmpty - -```cpp -bool IsEmpty() const -``` - -确定对象是否为空的函数。 - -- 返回值 - - 返回true或者false。 - -#### Release - -```cpp -void Release() -``` - -释放内存的函数。 - -### 公有属性 - -#### data_ptr_ - -```cpp -data_ptr_ -``` - -**pointer**类型,表示存放图像数据的地址。 - -#### elem_size_ - -```cpp -elem_size_ -``` - -**int**类型,表示元素的字节数。 - -#### width_ - -```cpp -width_ -``` - -**int**类型,表示图像的宽度。 - -#### height_ - -```cpp -height_ -``` - -**int**类型,表示图像的高度。 - -#### channel_ - -```cpp -channel_ -``` - -**int**类型,表示图像的通道数。 - -#### c_step_ - -```cpp -c_step_ -``` - -**int**类型,表示经过对齐后的图像宽高之积。 - -#### dims_ - -```cpp -dims_ -``` - -**int**类型,表示图像的维数。 - -#### size_ - -```cpp -size_ -``` - -**size_t**类型,表示图像占用内存的大小。 - -#### data_type_ - -```cpp -data_type_ -``` - -**LDataType**类型,表示图像的数据类型。 - -#### ref_count_ - -```cpp -ref_count_ -``` - -**pointer**类型,表示引用计数器的地址。 - -## Subtract - -\#include <[lite_mat.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.h)> - -```cpp -bool Subtract(const LiteMat &src_a, const LiteMat &src_b, LiteMat *dst) -``` - -对两个图像间的元素进行减法运算。 - -- 参数 - - - `src_a`: 输入的图像a的数据。 - - `src_b`: 输入的图像b的数据。 - - `dst`: 输出图像的数据。 - -- 返回值 - - 满足条件的计算返回true,否则返回false。 - -## Divide - -\#include <[lite_mat.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.h)> - -```cpp -bool Divide(const LiteMat &src_a, const LiteMat &src_b, LiteMat *dst) -``` - -对两个图像间的元素进行除法运算。 - -- 参数 - - - `src_a`: 输入的图像a的数据。 - - `src_b`: 输入的图像b的数据。 - - `dst`: 输出图像的数据。 - -- 返回值 - - 满足条件的计算返回true,否则返回false。 - -## Multiply - -\#include <[lite_mat.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.h)> - -```cpp -bool Multiply(const LiteMat &src_a, const LiteMat &src_b, LiteMat *dst) -``` - -对两个图像间的元素进行乘法运算。 - -- 参数 - - - `src_a`: 输入的图像a的数据。 - - `src_b`: 输入的图像b的数据。 - - `dst`: 输出图像的数据。 - -- 返回值 - - 满足条件的计算返回true,否则返回false。 diff --git a/docs/api_cpp/source_zh_cn/errorcode_and_metatype.md b/docs/api_cpp/source_zh_cn/errorcode_and_metatype.md deleted file mode 100644 index abe152a5790bf8d2a9d7daafdb19e4fbc40cc269..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_zh_cn/errorcode_and_metatype.md +++ /dev/null @@ -1,141 +0,0 @@ -# 错误码及元类型 - - - -## 1.0.1 - -以下表格描述了MindSpore Lite中支持的错误码和元类型。 - -### ErrorCode - -| 定义 | 值 | 描述 | -| --- | --- | --- | -| RET_OK | 0 | 执行成功。 | -| RET_ERROR | -1 | 通用错误码。 | -| RET_NULL_PTR | -2 | 返回空指针。 | -| RET_PARAM_INVALID | -3 | 无效参数。 | -| RET_NO_CHANGE | -4 | 无改变。 | -| RET_SUCCESS_EXIT | -5 | 无错误退出。 | -| RET_MEMORY_FAILED | -6 | 创建内存失败。 | -| RET_NOT_SUPPORT | -7 | 尚未支持。 | -| RET_OUT_OF_TENSOR_RANGE | -100 | 输出检查越界。 | -| RET_INPUT_TENSOR_ERROR | -101 | 输入检查越界。 | -| RET_REENTRANT_ERROR | -102 | 存在运行中的执行器。 | -| RET_GRAPH_FILE_ERR | -200 | 图文件识别失败。 | -| RET_NOT_FIND_OP | -300 | 无法找到算子。 | -| RET_INVALID_OP_NAME | -301 | 无效算子名。 | -| RET_INVALID_OP_ATTR | -302 | 无效算子属性。 | -| RET_OP_EXECUTE_FAILURE | -303 | 算子执行失败。 | -| RET_FORMAT_ERR | -400 | 张量格式检查失败。 | -| RET_INFER_ERR | -500 | 维度推理失败。 | -| RET_INFER_INVALID | -501 | 无效的维度推理。 | -| RET_INPUT_PARAM_INVALID | -600 | 无效的用户输入参数。 | - -### MetaType - - **enum**类型变量。 - -| 类型定义 | 值 | 描述 | -| --- | --- | --- | -|kNumberTypeBegin| 29 | 表示Number类型的起始。 | -|kNumberTypeBool| 30 | 表示Bool数据类型。 | -|kNumberTypeInt| 31 | 表示Int数据类型。 | -|kNumberTypeInt8| 32 | 表示Int8数据类型。 | -|kNumberTypeInt16| 33 | 表示Int16数据类型。 | -|kNumberTypeInt32| 34 | 表示Int32数据类型。 | -|kNumberTypeInt64| 35 | 表示Int64数据类型。 | -|kNumberTypeUInt| 36 | 表示UInt数据类型。 | -|kNumberTypeUInt8| 37 | 表示UInt8数据类型。 | -|kNumberTypeUInt16| 38 | 表示UInt16数据类型。 | -|kNumberTypeUInt32| 39 | 表示UInt32数据类型。 | -|kNumberTypeUInt64| 40 | 表示UInt64数据类型。 | -|kNumberTypeFloat| 41 | 表示Float数据类型。 | -|kNumberTypeFloat16| 42 | 表示Float16数据类型。 | -|kNumberTypeFloat32| 43 | 表示Float32数据类型。 | -|kNumberTypeFloat64| 44 | 表示Float64数据类型。| -|kNumberTypeEnd| 45 | 表示Number类型的结尾。 | - -### 函数接口 - -```cpp -std::string GetErrorInfo(STATUS error_code) -``` - -获取错误码描述信息。 - -- 参数 - - - `error_code`: 需获取描述信息的错误码。 - -- 返回值 - - 错误码描述信息字符串。 - -## master - -以下表格描述了MindSpore Lite中支持的错误码和元类型。 - -### ErrorCode - -| 定义 | 值 | 描述 | -| --- | --- | --- | -| RET_OK | 0 | 执行成功。 | -| RET_ERROR | -1 | 通用错误码。 | -| RET_NULL_PTR | -2 | 返回空指针。 | -| RET_PARAM_INVALID | -3 | 无效参数。 | -| RET_NO_CHANGE | -4 | 无改变。 | -| RET_SUCCESS_EXIT | -5 | 无错误退出。 | -| RET_MEMORY_FAILED | -6 | 创建内存失败。 | -| RET_NOT_SUPPORT | -7 | 尚未支持。 | -| RET_OUT_OF_TENSOR_RANGE | -100 | 输出检查越界。 | -| RET_INPUT_TENSOR_ERROR | -101 | 输入检查越界。 | -| RET_REENTRANT_ERROR | -102 | 存在运行中的执行器。 | -| RET_GRAPH_FILE_ERR | -200 | 图文件识别失败。 | -| RET_NOT_FIND_OP | -300 | 无法找到算子。 | -| RET_INVALID_OP_NAME | -301 | 无效算子名。 | -| RET_INVALID_OP_ATTR | -302 | 无效算子属性。 | -| RET_OP_EXECUTE_FAILURE | -303 | 算子执行失败。 | -| RET_FORMAT_ERR | -400 | 张量格式检查失败。 | -| RET_INFER_ERR | -500 | 维度推理失败。 | -| RET_INFER_INVALID | -501 | 无效的维度推理。 | -| RET_INPUT_PARAM_INVALID | -600 | 无效的用户输入参数。 | - -### MetaType - - **enum**类型变量。 - -| 类型定义 | 值 | 描述 | -| --- | --- | --- | -|kNumberTypeBegin| 29 | 表示Number类型的起始。 | -|kNumberTypeBool| 30 | 表示Bool数据类型。 | -|kNumberTypeInt| 31 | 表示Int数据类型。 | -|kNumberTypeInt8| 32 | 表示Int8数据类型。 | -|kNumberTypeInt16| 33 | 表示Int16数据类型。 | -|kNumberTypeInt32| 34 | 表示Int32数据类型。 | -|kNumberTypeInt64| 35 | 表示Int64数据类型。 | -|kNumberTypeUInt| 36 | 表示UInt数据类型。 | -|kNumberTypeUInt8| 37 | 表示UInt8数据类型。 | -|kNumberTypeUInt16| 38 | 表示UInt16数据类型。 | -|kNumberTypeUInt32| 39 | 表示UInt32数据类型。 | -|kNumberTypeUInt64| 40 | 表示UInt64数据类型。 | -|kNumberTypeFloat| 41 | 表示Float数据类型。 | -|kNumberTypeFloat16| 42 | 表示Float16数据类型。 | -|kNumberTypeFloat32| 43 | 表示Float32数据类型。 | -|kNumberTypeFloat64| 44 | 表示Float64数据类型。| -|kNumberTypeEnd| 45 | 表示Number类型的结尾。 | - -### 函数接口 - -```cpp -std::string GetErrorInfo(STATUS error_code) -``` - -获取错误码描述信息。 - -- 参数 - - - `error_code`: 需获取描述信息的错误码。 - -- 返回值 - - 错误码描述信息字符串。 diff --git a/docs/api_cpp/source_zh_cn/index.rst b/docs/api_cpp/source_zh_cn/index.rst deleted file mode 100644 index 779317bee1f0397ac1c5a78905b31b236f33d4f8..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_zh_cn/index.rst +++ /dev/null @@ -1,21 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 10:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore C++ API -======================= - -.. toctree:: - :glob: - :maxdepth: 1 - - class_list - mindspore - dataset - vision - lite - session - tensor - errorcode_and_metatype - lite_cpp_example \ No newline at end of file diff --git a/docs/api_cpp/source_zh_cn/lite.md b/docs/api_cpp/source_zh_cn/lite.md deleted file mode 100644 index aa8efbb65eb44473e9f79cf298d79b0befe1912e..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_zh_cn/lite.md +++ /dev/null @@ -1,425 +0,0 @@ -# mindspore::lite - - - -## Allocator - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -Allocator类定义了一个内存池,用于动态地分配和释放内存。 - -## Context - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -Context类用于保存执行中的环境变量。 - -### 构造函数和析构函数 - -#### Context - -```cpp -Context() -``` - -用默认参数构造MindSpore Lite Context 对象。 - -#### ~Context - -```cpp -~Context() -``` - -MindSpore Lite Context 的析构函数。 - -### 公有属性 - -#### vendor_name_ - -```cpp -vendor_name_ -``` - -**string**值,芯片厂商名字,用于区别不同的芯片厂商。 - -#### thread_num_ - -```cpp -thread_num_ -``` - -**int**值,默认为**2**,设置线程数。 - -#### allocator - -```cpp -allocator -``` - -**pointer**类型,指向内存分配器 [**Allocator**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#allocator) 的指针。 - -#### device_list_ - -```cpp -device_list_ -``` - -[**DeviceContextVector**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#devicecontextvector) 类型, 元素为 [**DeviceContext**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#devicecontext) 的**vector**. - -> 现在支持CPU、GPU和NPU。如果设置了GPU设备环境变量并且设备支持GPU,优先使用GPU设备,否则优先使用CPU设备。如果设置了NPU设备环境变量并且设备支持NPU,优先使用NPU设备,否则优先使用CPU设备。 - -## PrimitiveC - -\#include <[model.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/model.h)> - -PrimitiveC定义为算子的原型。 - -## Model - -\#include <[model.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/model.h)> - -Model定义了MindSpore Lite中的模型,便于计算图管理。 - -### 析构函数 - -#### ~Model - -```cpp -~Model() -``` - -MindSpore Lite Model的析构函数。 - -### 公有成员函数 - -#### Destroy - -```cpp -void Destroy() -``` - -释放Model内的所有过程中动态分配的内存。 - -#### Free - -```cpp -void Free() -``` - -释放MindSpore Lite Model中的MetaGraph,用于减小运行时的内存。 - -### 静态公有成员函数 - -#### Import - -```cpp -static Model *Import(const char *model_buf, size_t size) -``` - -创建Model指针的静态方法。 - -- 参数 - - - `model_buf`: 定义了读取模型文件的缓存区。 - - - `size`: 定义了模型缓存区的字节数。 - -- 返回值 - - 指向MindSpore Lite的Model的指针。 - -## CpuBindMode - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -枚举类型,设置cpu绑定策略。 - -### 公有属性 - -#### MID_CPU - -```cpp -MID_CPU = 2 -``` - -优先中等CPU绑定策略。 - -#### HIGHER_CPU - -```cpp -HIGHER_CPU = 1 -``` - -优先高级CPU绑定策略。 - -#### NO_BIND - -```cpp -NO_BIND = 0 -``` - -不绑定。 - -## DeviceType - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -枚举类型,设置设备类型。 - -### 公有属性 - -#### DT_CPU - -```cpp -DT_CPU = 0 -``` - -设备为CPU。 - -#### DT_GPU - -```cpp -DT_GPU = 1 -``` - -设备为GPU。 - -#### DT_NPU - -```cpp -DT_NPU = 2 -``` - -设备为NPU。 - -## Version - -\#include <[version.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/version.h)> - -```cpp -std::string Version() -``` - -全局方法,用于获取版本的字符串。 - -- 返回值 - - MindSpore Lite版本的字符串。 - -## StringsToMSTensor - -```cpp -int StringsToMSTensor(const std::vector &inputs, tensor::MSTensor *tensor) -``` - -全局方法,用于将字符串存入MSTensor。 - -- 返回值 - - STATUS,STATUS在errorcode.h中定义。 - -## MSTensorToStrings - -```cpp -std::vector MSTensorToStrings(const tensor::MSTensor *tensor) -``` - -全局方法,用于从MSTensor获取字符串。 - -- 返回值 - - 字符串的vector。 - -## DeviceContextVector - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -元素为[**DeviceContext**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#devicecontext) 的**vector**。 - -## DeviceContext - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -DeviceContext类定义不同硬件设备的环境信息。 - -### 公有属性 - -#### device_type - -```cpp -device_type -``` - -[**DeviceType**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#devicetype) 枚举类型。默认为**DT_CPU**,标明设备信息。 - -#### device_info_ - -```cpp -device_info_ -``` - -**union**类型,包含 [**CpuDeviceInfo**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#cpudeviceinfo) 、 [**GpuDeviceInfo**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#gpudeviceinfo) 和 [**NpuDeviceInfo**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#npudeviceinfo) 。 - -## DeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -**union**类型,设置不同硬件的环境变量。 - -### 公有属性 - -#### cpu_device_info_ - -```cpp -cpu_device_info_ -``` - -[**CpuDeviceInfo**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#cpudeviceinfo) 类型,配置CPU的环境变量。 - -#### gpu_device_info_ - -```cpp -gpu_device_info_ -``` - -[**GpuDeviceInfo**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#gpudeviceinfo) 类型,配置GPU的环境变量。 - -#### npu_device_info_ - -```cpp -npu_device_info_ -``` - -[**NpuDeviceInfo**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#npudeviceinfo) 类型,配置NPU的环境变量。 - -## CpuDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -CpuDeviceInfo类,配置CPU的环境变量。 - -### Public Attributes - -#### enable_float16_ - -```cpp -enable_float16_ -``` - -**bool**值,默认为**false**,用于使能float16 推理。 - -> 使能float16推理可能会导致模型推理精度下降,因为在模型推理的中间过程中,有些变量可能会超出float16的数值范围。 - -#### cpu_bind_mode_ - -```cpp -cpu_bind_mode_ -``` - -[**CpuBindMode**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#cpubindmode) 枚举类型,默认为**MID_CPU**。 - -## GpuDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -GpuDeviceInfo类,用来配置GPU的环境变量。 - -### 公有属性 - -#### enable_float16_ - -```cpp -enable_float16_ -``` - -**bool**值,默认为**false**,用于使能float16 推理。 - -> 使能float16推理可能会导致模型推理精度下降,因为在模型推理的中间过程中,有些变量可能会超出float16的数值范围。 - -## NpuDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/context.h)> - -NpuDeviceInfo类,用来配置NPU的环境变量。 - -### 公有属性 - -#### frequency - -```cpp -frequency_ -``` - -**int**值,默认为**3**,用来设置NPU频率,可设置为1(低功耗)、2(均衡)、3(高性能)、4(极致性能)。 - -## TrainModel - -\#include <[model.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/model.h)> - -继承于结构体Model,用于导入或导出训练模型。 - -### 析构函数 - -#### ~TrainModel - -```cpp -virtual ~TrainModel(); -``` - -虚析构函数。 - -### 公有成员函数 - -#### Import - -```cpp -static TrainModel *Import(const char *model_buf, size_t size); -``` - -导入模型。 - -- 参数 - - - `model_buf`: 指向存储读入MindSpore模型缓冲区的常量字符型指针。 - - - `size`: 缓冲区大小。 - -- 返回值 - - 返回一个指向MindSpore Lite训练模型(TrainModel)的指针。 - -#### ExportBuf - -```cpp -char* ExportBuf(char *buf, size_t *len) const; -``` - -导出模型缓冲区。 - -- 参数 - - - `buf`: 指向模型导出的目标缓冲区的指针,如果指针为空则自动分配一块内存。 - - - `len`: 指向预分配缓冲区大小的指针。 - -- 返回值 - - 返回一个指向存储导出模型缓冲区的字符指针。 - -#### Free - -```cpp -void Free() override; -``` - -释放计算-图的元数据。 - -### 公有属性 - -#### buf_size_ - -```cpp -size_t buf_size_; -``` - -缓冲区大小。 diff --git a/docs/api_cpp/source_zh_cn/lite_cpp_example.rst b/docs/api_cpp/source_zh_cn/lite_cpp_example.rst deleted file mode 100644 index 7e0f1d18fa7de7c4d00611b8b61b5f79e4a3e8bc..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_zh_cn/lite_cpp_example.rst +++ /dev/null @@ -1,9 +0,0 @@ -样例 -======= - -.. toctree:: - :maxdepth: 1 - - 极简Demo - 基于JNI接口的Android应用开发 - 高阶用法 \ No newline at end of file diff --git a/docs/api_cpp/source_zh_cn/mindspore.md b/docs/api_cpp/source_zh_cn/mindspore.md deleted file mode 100644 index c2cf679e7e29277c196faadf0a56ffa67fa58d9c..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_zh_cn/mindspore.md +++ /dev/null @@ -1,658 +0,0 @@ -# mindspore - - - -## Context - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -Context类用于保存执行中的环境变量。 - -### 公有成员函数 - -#### SetThreadNum - -```cpp -void SetThreadNum(int32_t thread_num); -``` - -设置运行时的线程数,该选项仅MindSpore Lite有效。 - -- 参数 - - - `thread_num`: 运行时的线程数。 - -#### GetThreadNum - -```cpp -int32_t GetThreadNum() const; -``` - -获取当前线程数设置。 - -- 返回值 - - 当前线程数设置。 - -#### SetAllocator - -```cpp -void SetAllocator(const std::shared_ptr &allocator); -``` - -设置Allocator,Allocator定义了用于动态内存分配和释放的内存池,该选项仅MindSpore lite有效。 - -- 参数 - - - `allocator`: Allocator指针。 - -#### GetAllocator - -```cpp -std::shared_ptr GetAllocator() const; -``` - -获取当前Allocator设置。 - -- 返回值 - - 当前Allocator的指针。 - -#### MutableDeviceInfo - -```cpp -std::vector> &MutableDeviceInfo(); -``` - -修改该context下的[DeviceInfoContext](#deviceinfocontext)数组,仅mindspore lite支持数组中有多个成员是异构场景。 - -- 返回值 - - 存储DeviceInfoContext的vector的引用。 - -## DeviceInfoContext - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -DeviceInfoContext类定义不同硬件设备的环境信息。 - -### 公有成员函数 - -#### GetDeviceType - -```cpp -virtual enum DeviceType GetDeviceType() const = 0 -``` - -获取该DeviceInfoContext的类型。 - -- 返回值 - - 该DeviceInfoContext的类型。 - - ```cpp - enum DeviceType { - kCPU = 0, - kMaliGPU, - kNvidiaGPU, - kKirinNPU, - kAscend910, - kAscend310, - // add new type here - kInvalidDeviceType = 100, - }; - ``` - -#### Cast - -```cpp -template std::shared_ptr Cast(); -``` - -在打开`-fno-rtti`编译选项的情况下提供类似RTTI的功能,将DeviceInfoContext转换为`T`类型的指针,若转换失败返回`nullptr`。 - -- 返回值 - - 转换后`T`类型的指针,若转换失败则为`nullptr`。 - -## CPUDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -派生自[DeviceInfoContext](#deviceinfocontext),模型运行在CPU上的配置,仅mindspore lite支持该选项。 - -### 公有成员函数 - -| 函数 | 说明 | -| ----------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `void SetThreadAffinity(int mode)` | 设置线程亲和性模式

- `mode`: 0:无亲和性, 1:大核优先, 2:小核优先。 | -| `int GetThreadAffinity() const` | - 返回值: 已配置的线程亲和性模式 | -| `void SetEnableFP16(bool is_fp16)` | 用于指定是否以FP16精度进行推理

- `is_fp16`: 是否以FP16精度进行推理 | -| `bool GetEnableFP16() const` | - 返回值: 已配置的精度模式 | - -## MaliGPUDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -派生自[DeviceInfoContext](#deviceinfocontext),模型运行在GPU上的配置,仅mindspore lite支持该选项。 - -### 公有成员函数 - -| 函数 | 说明 | -| ----------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `void SetEnableFP16(bool is_fp16)` | 用于指定是否以FP16精度进行推理

- `is_fp16`: 是否以FP16精度进行推理 | -| `bool GetEnableFP16() const` | - 返回值: 已配置的精度模式 | - -## KirinNPUDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -派生自[DeviceInfoContext](#deviceinfocontext),模型运行在NPU上的配置,仅mindspore lite支持该选项。 - -### 公有成员函数 - -| 函数 | 说明 | -| ----------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `void SetFrequency(int frequency)` | 用于指定NPU频率

- `frequency`: 设置为1(低功耗)、2(均衡)、3(高性能)、4(极致性能),默认为3 | -| `int GetFrequency() const` | - 返回值: 已配置的NPU频率模式 | - -## NvidiaGPUDeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -派生自[DeviceInfoContext](#deviceinfocontext),模型运行在GPU上的配置,mindspore lite不支持该选项。 - -### 公有成员函数 - -| 函数 | 说明 | -| ----------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `void SetDeviceID(uint32_t device_id)` | 用于指定设备ID

- `device_id`: 设备ID | -| `uint32_t GetDeviceID() const` | - 返回值: 已配置的设备ID | - -## Ascend910DeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -派生自[DeviceInfoContext](#deviceinfocontext),模型运行在Ascend910上的配置,mindspore lite不支持该选项。 - -### 公有成员函数 - -| 函数 | 说明 | -| ----------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `void SetDeviceID(uint32_t device_id)` | 用于指定设备ID

- `device_id`: 设备ID | -| `uint32_t GetDeviceID() const` | - 返回值: 已配置的设备ID | - -## Ascend310DeviceInfo - -\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)> - -派生自[DeviceInfoContext](#deviceinfocontext),模型运行在Ascend310上的配置,mindspore lite不支持该选项。 - -### 公有成员函数 - -| 函数 | 说明 | -| ----------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `void SetDeviceID(uint32_t device_id)` | 用于指定设备ID

- `device_id`: 设备ID | -| `uint32_t GetDeviceID() const` | - 返回值: 已配置的设备ID | -| `void SetInsertOpConfigPath(const std::string &cfg_path)` | 模型插入[AIPP](https://support.huaweicloud.com/adevg-ms-atlas200dkappc32/atlasadm_01_0023.html)算子

- `cfg_path`: [AIPP](https://support.huaweicloud.com/adevg-ms-atlas200dkappc32/atlasadm_01_0023.html)配置文件路径 | -| `std::string GetInsertOpConfigPath()` | - 返回值: 已配置的[AIPP](https://support.huaweicloud.com/adevg-ms-atlas200dkappc32/atlasadm_01_0023.html) | -| `void SetInputFormat(const std::string &format)` | 指定模型输入formatt

- `format`: 可选有`"NCHW"`,`"NHWC"`等 | -| `std::string GetInputFormat()` | - 返回值: 已配置模型输入format | -| `void SetInputShape(const std::string &shape)` | 指定模型输入shape

- `shape`: 如`"input_op_name1:1,2,3,4;input_op_name2:4,3,2,1"` | -| `std::string GetInputShape()` | - 返回值: 已配置模型输入shape | -| `void SetOutputType(enum DataType output_type)` | 指定模型输出type

- `output_type`: 仅支持uint8、fp16和fp32 | -| `enum DataType GetOutputType()` | - 返回值: 已配置模型输出type | -| `void SetPrecisionMode(const std::string &precision_mode)` | 配置模型精度模式

- `precision_mode`: 可选有`"force_fp16"`,`"allow_fp32_to_fp16"`,`"must_keep_origin_dtype"`或者`"allow_mix_precision"`,默认为`"force_fp16"` | -| `std::string GetPrecisionMode(t)` | - 返回值: 已配置模型精度模式 | -| `void SetOpSelectImplMode(const std::string &op_select_impl_mode)` | 配置算子选择模式

- `op_select_impl_mode`: 可选有`"high_performance"`和`"high_precision"`,默认为`"high_performance"` | -| `std::string GetOpSelectImplMode()` | - 返回值: 已配置算子选择模式 | - -## Serialization - -\#include <[serialization.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/serialization.h)> - -Serialization类汇总了模型文件读写的方法。 - -### 静态公有成员函数 - -#### Load - -从文件加载模型,MindSpore Lite未提供此功能。 - -```cpp -Status Load(const std::string &file, ModelType model_type, Graph *graph); -``` - -- 参数 - - - `file`: 模型文件路径。 - - `model_type`:模型文件类型,可选有`ModelType::kMindIR`、`ModelType::kOM`。 - - `graph`:输出参数,保存图数据的对象。 - -- 返回值 - - 状态码类`Status`对象,可以使用其公有函数`StatusCode`或`ToString`函数来获取具体错误码及错误信息。 - -#### Load - -从内存缓冲区加载模型。 - -```cpp -Status Load(const void *model_data, size_t data_size, ModelType model_type, Graph *graph); -``` - -- 参数 - - - `model_data`:模型数据指针。 - - `data_size`:模型数据字节数。 - - `model_type`:模型文件类型,可选有`ModelType::kMindIR`、`ModelType::kOM`。 - - `graph`:输出参数,保存图数据的对象。 - -- 返回值 - - 状态码类`Status`对象,可以使用其公有函数`StatusCode`或`ToString`函数来获取具体错误码及错误信息。 - -## Model - -\#include <[model.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/model.h)> - -Model定义了MindSpore中的模型,便于计算图管理。 - -### 构造函数和析构函数 - -```cpp -Model(); -~Model(); -``` - -### 公有成员函数 - -#### Build - -```cpp -Status Build(GraphCell graph, const std::shared_ptr &model_context); -``` - -将模型编译至可在Device上运行的状态。 - -- 参数 - - - `graph`: `GraphCell`是`Cell`的一个派生,`Cell`目前没有开放使用。`GraphCell`可以由`Graph`构造,如`model.Build(GraphCell(graph), context)`。 - - `model_context`: 模型[Context](#context)。 - -- 返回值 - - 状态码类`Status`对象,可以使用其公有函数`StatusCode`或`ToString`函数来获取具体错误码及错误信息。 - -> `Build`之后对`model_context`的其他修改不再生效。 - -#### Predict - -```cpp -Status Predict(const std::vector &inputs, std::vector *outputs); -``` - -推理模型。 - -- 参数 - - - `inputs`: 模型输入按顺序排列的`vector`。 - - `outputs`: 输出参数,按顺序排列的`vector`的指针,模型输出会按顺序填入该容器。 - -- 返回值 - - 状态码类`Status`对象,可以使用其公有函数`StatusCode`或`ToString`函数来获取具体错误码及错误信息。 - -#### GetInputs - -```cpp -std::vector GetInputs(); -``` - -获取模型所有输入张量。 - -- 返回值 - - 包含模型所有输入张量的容器类型变量。 - -#### GetInputByTensorName - -```cpp -MSTensor GetInputByTensorName(const std::string &tensor_name); -``` - -获取模型指定名字的输入张量。 - -- 返回值 - - 指定名字的输入张量,如果该名字不存在则返回非法张量。 - -#### GetOutputs - -```cpp -std::vector GetOutputs(); -``` - -获取模型所有输出张量。 - -- 返回值 - - 包含模型所有输出张量的容器类型变量。 - -#### GetOutputTensorNames - -```cpp -std::vector GetOutputTensorNames(); -``` - -获取模型所有输出张量的名字。 - -- 返回值 - - 包含模型所有输出张量名字的容器类型变量。 - -#### GetOutputByTensorName - -```cpp -MSTensor GetOutputByTensorName(const std::string &tensor_name); -``` - -获取模型指定名字的输出张量。 - -- 返回值 - - 指定名字的输出张量,如果该名字不存在则返回非法张量。 - -#### Resize - -```cpp -Status Resize(const std::vector &inputs, const std::vector> &dims); -``` - -调整已编译模型的输入形状。 - -- 参数 - - - `inputs`: 模型输入按顺序排列的`vector`。 - - `dims`: 输入形状,按输入顺序排列的由形状组成的`vector`,模型会按顺序依次调整张量形状。 - -- 返回值 - - 状态码类`Status`对象,可以使用其公有函数`StatusCode`或`ToString`函数来获取具体错误码及错误信息。 - -#### CheckModelSupport - -```cpp -static bool CheckModelSupport(enum DeviceType device_type, ModelType model_type); -``` - -检查设备是否支持该模型。 - -- 参数 - - - `device_type`: 设备类型,例如`kMaliGPU`。 - - `model_type`: 模型类型,例如`MindIR`。 - -- 返回值 - - 状态码。 - -## MSTensor - -\#include <[types.h](https://gitee.com/mindspore/mindspore/blob/r1.1/include/api/types.h)> - -`MSTensor`定义了MindSpore中的张量。 - -### 构造函数和析构函数 - -```cpp -MSTensor(); -explicit MSTensor(const std::shared_ptr &impl); -MSTensor(const std::string &name, DataType type, const std::vector &shape, const void *data, size_t data_len); -~MSTensor(); -``` - -### 静态公有成员函数 - -#### CreateTensor - -```cpp -MSTensor *CreateTensor(const std::string &name, DataType type, const std::vector &shape, - const void *data, size_t data_len) noexcept; -``` - -创建一个`MSTensor`对象,其数据需复制后才能由`Model`访问,必须与`DestroyTensorPtr`成对使用。 - -- 参数 - - - `name`: 名称。 - - `type`:数据类型。 - - `shape`:形状。 - - `data`:数据指针,指向一段已开辟的内存。 - - `data`:数据长度,以字节为单位。 - -- 返回值 - - `MStensor`指针。 - -#### CreateRefTensor - -```cpp -MSTensor *CreateRefTensor(const std::string &name, DataType type, const std::vector &shape, void *data, - size_t data_len) noexcept; -``` - -创建一个`MSTensor`对象,其数据可以直接由`Model`访问,必须与`DestroyTensorPtr`成对使用。 - -- 参数 - - - `name`: 名称。 - - `type`:数据类型。 - - `shape`:形状。 - - `data`:数据指针,指向一段已开辟的内存。 - - `data`:数据长度,以字节为单位。 - -- 返回值 - - `MStensor`指针。 - -#### StringsToTensor - -```cpp -MSTensor *StringsToTensor(const std::string &name, const std::vector &str); -``` - -创建一个字符串类型的`MSTensor`对象,其数据需复制后才能由`Model`访问,必须与`DestroyTensorPtr`成对使用。 - -- 参数 - - - `name`: 名称。 - - `str`:装有若干个字符串的`vector`容器。 - -- 返回值 - - `MStensor`指针。 - -#### TensorToStrings - -```cpp -std::vector TensorToStrings(const MSTensor &tensor); -``` - -将字符串类型的`MSTensor`对象解析为字符串。 - -- 参数 - - - `tensor`: 张量对象。 - -- 返回值 - - 装有若干个字符串的`vector`容器。 - -#### DestroyTensorPtr - -```cpp -void DestroyTensorPtr(MSTensor *tensor) noexcept; -``` - -销毁一个由`Clone`、`StringsToTensor`、`CreateRefTensor`或`CreateTensor`所创建的对象,请勿用于销毁其他来源的`MSTensor`。 - -- 参数 - - - `tensor`: 由`Clone`、`StringsToTensor`、`CreateRefTensor`或`CreateTensor`返回的指针。 - -### 公有成员函数 - -#### Name - -```cpp -std::string Name() const; -``` - -获取`MSTensor`的名字。 - -- 返回值 - - `MSTensor`的名字。 - -#### DataType - -```cpp -enum DataType DataType() const; -``` - -获取`MSTensor`的数据类型。 - -- 返回值 - - `MSTensor`的数据类型。 - -#### Shape - -```cpp -const std::vector &Shape() const; -``` - -获取`MSTensor`的Shape。 - -- 返回值 - - `MSTensor`的Shape。 - -#### ElementNum - -```cpp -int64_t ElementNum() const; -``` - -获取`MSTensor`的元素个数。 - -- 返回值 - - `MSTensor`的元素个数。 - -#### Data - -```cpp -std::shared_ptr Data() const; -``` - -获取指向`MSTensor`中的数据拷贝的智能指针。 - -- 返回值 - - 指向`MSTensor`中的数据拷贝的智能指针。 - -#### MutableData - -```cpp -void *MutableData(); -``` - -获取`MSTensor`中的数据的指针。 - -- 返回值 - - 指向`MSTensor`中的数据的指针。 - -#### DataSize - -```cpp -size_t DataSize() const; -``` - -获取`MSTensor`中的数据的以字节为单位的内存长度。 - -- 返回值 - - `MSTensor`中的数据的以字节为单位的内存长度。 - -#### IsDevice - -```cpp -bool IsDevice() const; -``` - -判断`MSTensor`中是否在设备上。 - -- 返回值 - - `MSTensor`中是否在设备上。 - -#### Clone - -```cpp -MSTensor *Clone() const; -``` - -拷贝一份自身的副本。 - -- 返回值 - - 指向深拷贝副本的指针,必须与`DestroyTensorPtr`成对使用。 - -#### operator==(std::nullptr_t) - -```cpp -bool operator==(std::nullptr_t) const; -``` - -判断`MSTensor`是否合法。 - -- 返回值 - - `MSTensor`是否合法。 - -## KernelCallBack - -\#include <[ms_tensor.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/ms_tensor.h)> - -```cpp -using KernelCallBack = std::function inputs, std::vector outputs, const CallBackParam &opInfo)> -``` - -一个函数包装器。KernelCallBack 定义了指向回调函数的指针。 - -## CallBackParam - -\#include <[ms_tensor.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/ms_tensor.h)> - -一个结构体。CallBackParam定义了回调函数的输入参数。 - -### 公有属性 - -#### node_name - -```cpp -node_name -``` - -**string** 类型变量。节点名参数。 - -#### node_type - -```cpp -node_type -``` - -**string** 类型变量。节点类型参数。 diff --git a/docs/api_cpp/source_zh_cn/session.md b/docs/api_cpp/source_zh_cn/session.md deleted file mode 100644 index 59ed2af981cec946c66cbbe19363387f4b104d27..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_zh_cn/session.md +++ /dev/null @@ -1,731 +0,0 @@ -# mindspore::session - - - -## LiteSession - -\#include <[lite_session.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/lite_session.h)> - -LiteSession定义了MindSpore Lite中的会话,用于进行Model的编译和前向推理。 - -### 构造函数和析构函数 - -#### LiteSession - -```cpp -LiteSession() -``` - -MindSpore Lite LiteSession的构造函数,使用默认参数。 - -#### ~LiteSession - -```cpp -~LiteSession() -``` - -MindSpore Lite LiteSession的析构函数。 - -### 公有成员函数 - -#### BindThread - -```cpp -virtual void BindThread(bool if_bind) -``` - -尝试将线程池中的线程绑定到指定的cpu内核,或从指定的cpu内核进行解绑。 - -- 参数 - - - `if_bind`: 定义了对线程进行绑定或解绑。 - -#### CompileGraph - -```cpp -virtual int CompileGraph(lite::Model *model) -``` - -编译MindSpore Lite模型。 - -> CompileGraph必须在RunGraph方法之前调用。 - -- 参数 - - - `model`: 定义了需要被编译的模型。 - -- 返回值 - - STATUS,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -#### GetInputs - -```cpp -virtual std::vector GetInputs() const -``` - -获取MindSpore Lite模型的MSTensors输入。 - -- 返回值 - - MindSpore Lite MSTensor向量。 - -#### GetInputsByTensorName - -```cpp -mindspore::tensor::MSTensor *GetInputsByTensorName(const std::string &name) const -``` - -通过tensor名获取MindSpore Lite模型的MSTensors输入。 - -- 参数 - - - `name`: 定义了tensor名。 - -- 返回值 - - MindSpore Lite MSTensor。 - -#### RunGraph - -```cpp -virtual int RunGraph(const KernelCallBack &before = nullptr, const KernelCallBack &after = nullptr) -``` - -运行带有回调函数的会话。 -> RunGraph必须在CompileGraph方法之后调用。 - -- 参数 - - - `before`: 一个[**KernelCallBack**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/mindspore.html#kernelcallback) 结构体。定义了运行每个节点之前调用的回调函数。 - - - `after`: 一个[**KernelCallBack**](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/mindspore.html#kernelcallback) 结构体。定义了运行每个节点之后调用的回调函数。 - -- 返回值 - - STATUS ,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -#### GetOutputsByNodeName - -```cpp -virtual std::vector GetOutputsByNodeName(const std::string &node_name) const -``` - -通过节点名获取MindSpore Lite模型的MSTensors输出。 - -- 参数 - - - `node_name`: 定义了节点名。 - -- 返回值 - - MindSpore Lite MSTensor向量。 - -#### GetOutputs - -```cpp -virtual std::unordered_map GetOutputs() const -``` - -获取与张量名相关联的MindSpore Lite模型的MSTensors输出。 - -- 返回值 - - 包含输出张量名和MindSpore Lite MSTensor的容器类型变量。 - -#### GetOutputTensorNames - -```cpp -virtual std::vector GetOutputTensorNames() const -``` - -获取由当前会话所编译的模型的输出张量名。 - -- 返回值 - - 字符串向量,其中包含了按顺序排列的输出张量名。 - -#### GetOutputByTensorName - -```cpp -virtual mindspore::tensor::MSTensor *GetOutputByTensorName(const std::string &tensor_name) const -``` - -通过张量名获取MindSpore Lite模型的MSTensors输出。 - -- 参数 - - - `tensor_name`: 定义了张量名。 - -- 返回值 - - 指向MindSpore Lite MSTensor的指针。 - -#### Resize - -```cpp -virtual int Resize(const std::vector &inputs, const std::vector> &dims) -``` - -调整输入的形状。 - -- 参数 - - - `inputs`: 模型对应的所有输入。 - - `dims`: 输入对应的新的shape,顺序注意要与inputs一致。 - -- 返回值 - - STATUS,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -### 静态公有成员函数 - -#### CreateSession - -```cpp -static LiteSession *CreateSession(const lite::Context *context) -``` - -用于创建一个LiteSession指针的静态方法。 - -- 参数 - - - `context`: 定义了所要创建的session的上下文。 - -- 返回值 - - 指向MindSpore Lite LiteSession的指针。 - -```cpp -static LiteSession *CreateSession(const char *model_buf, size_t size, const lite::Context *context); -``` - -用于创建一个LiteSession指针的静态方法。返回的Lite Session指针已经完成了model_buf的读入和图编译。 - -- 参数 - - - `model_buf`: 定义了读取模型文件的缓存区。 - - - `size`: 定义了模型缓存区的字节数。 - - - `context`: 定义了所要创建的session的上下文。 - -- 返回值 - - 指向MindSpore Lite LiteSession的指针。 - -## TrainSession - -\#include <[ltrain_session.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/train/train_session.h)> - -继承于类 LiteSession,用于训练模型。 - -### 构造函数和析构函数 - -#### ~TrainSession - -```cpp -virtual ~TrainSession() = default; -``` - -虚析构函数。 - -### 公有成员函数 - -#### CreateSession - -```cpp -static TrainSession *CreateSession(const char *model_buf, size_t size, lite::Context *context, bool train_mode = false); -``` - -创建训练会话指针的静态方法。 - -- 参数 - - - `model_buf`: 指向包含模型文件的缓冲区指针。 - - `size`: 缓冲区长度。 - - `context`: 指向目标会话的指针。 - - `train_mode`: 训练模式,默认值为false。 - -- 返回值 - - 指向训练会话的指针。 - -```cpp -static TrainSession *CreateSession(const std::string &filename, lite::Context *context, bool train_mode = false); -``` - -创建训练会话指针的静态方法。 - -- 参数 - - - `filename`: 指向文件名称。 - - `context`: 指向会话指针。 - - `train_mode`: 训练模式。 - -- 返回值 - - 指向训练会话的指针。 - -#### CreateTransferSession - -```cpp -static TrainSession *CreateTransferSession(const char *model_buf_backbone, size_t size_backbone, const char *model_buf_head, size_t size_head, lite::Context *context, bool train_mode = false); -``` - -创建迁移学习训练会话指针的静态方法。 - -- 参数 - - - `model_buf_backbone`: 指向主干网络模型的字符指针。 - - `size_backbone`: 暂存主干网络模型的缓冲区大小。 - - `model_buf_head`: 指向顶层网络模型的字符指针。 - - `size_head`: 暂存顶层网络模型的缓冲区大小。 - - `context`: 指向目标会话的指针。 - - `train_mode`: 是否开启训练模式。 - -- 返回值 - - 指向训练会话的指针。 - -```cpp -static TrainSession *CreateTransferSession(const std::string &filename_backbone, const std::string &filename_head, lite::Context *context, bool train_mode = false); -``` - -创建迁移学习训练会话指针的静态方法。 - -- 参数 - - - `filename_backbone`: 主干网络的名称。 - - `filename_head`: 顶层网络的名称。 - - `context`: 指向目标会话的指针。 - - `train_mode`: 是否开启训练模式。 - -- 返回值 - - 指向训练会话的指针。 - -#### ExportToBuf - -```cpp -virtual void *ExportToBuf(char *buf, size_t *len) const = 0; -``` - -将训练好的模型导出至缓冲区。 - -- 参数 - - - `buf`: 指向导出模型的目标缓冲区的指针,如果指针为空则自动分配一块内存。 - - `len`: 预分配缓冲区大小。 - -- 返回值 - - 指向存储导出模型缓冲区的字符指针。 - -#### SaveToFile - -```cpp -virtual int SaveToFile(const std::string &filename) const = 0; -``` - -保存已训练模型。 - -- 参数 - - - `filename`: 保存模型的文件名。 - -- 返回值 - - STATUS,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -#### Train - -```cpp -virtual int Train() = 0; -``` - -设置为训练模式。 - -- 返回值 - - STATUS,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -#### IsTrain - -```cpp -bool IsTrain() { return train_mode_ == true; } -``` - -检查当前模型是否为训练模式。 - -- 返回值 - - true 或 false,即当前模型是否为训练模式。 - -#### Eval - -```cpp -virtual int Eval() = 0; -``` - -设置为验证模式。 - -- 返回值 - - STATUS,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -#### IsEval - -```cpp -bool IsEval() { return train_mode_ == false; } -``` - -检查当前模型是否为验证模式。 - -- 返回值 - - true 或 false,即当前模型是否为验证模式。 - -#### SetLearningRate - -```cpp -virtual int SetLearningRate(float learning_rate) = 0; -``` - -为当前模型设置学习率。 - -- 返回值 - - STATUS,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -#### GetLearningRate - -```cpp -virtual float GetLearningRate() = 0; -``` - -获取当前模型的学习率。 - -- 返回值 - - 当前模型的学习率, 如果未设置优化器则返回0.0。 - -#### SetupVirtualBatch - -```cpp -virtual int SetupVirtualBatch(int virtual_batch_multiplier, float lr = -1.0f, float momentum = -1.0f) = 0; -``` - -用户自定义虚拟批次数,,用于减少内存消耗。 - -- 参数 - - - `virtual_batch_multiplier`: 自定义虚拟批次数。 - - `lr`: 自定义学习率。 - - `momentum`: 自定义动量。 - -- 返回值 - - STATUS,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -#### GetPredictions - -```cpp -virtual std::vector GetPredictions() const = 0; -``` - -获取训练模型的预测结果。 - -- 返回值 - - 预测结果张量指针数组。 - -#### SetLossName - -```cpp -void SetLossName(std::string loss_name) { loss_name_ = loss_name; } -``` - -设置损失值名称。 - -- 参数 - - - `loss_name`: 损失值名称。 - -## TrainLoop - -\#include <[ltrain_loop.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/train/train_loop.h)> - -继承于Session,可设置训练参数和数据预处理函数,用于减少模型训练的资源消耗。 - -### 构造函数和析构函数 - -#### ~TrainLoop - -```cpp -virtual ~TrainLoop() = default; -``` - -虚析构函数。 - -### 公有成员函数 - -#### CreateTrainLoop - -```cpp -static TrainLoop *CreateTrainLoop(session::TrainSession *train_session, lite::Context *context, int batch_size = -1); -``` - -创建迭代训练指针的静态方法。 - -- 参数 - - - `model_filename`: 模型文件名。 - - `context`: 指向目标会话的指针。 - - `batch_size`: 批次数。 - -- 返回值 - - 指向迭代训练对象的指针。 - -#### Reset - -```cpp -virtual int Reset() = 0; -``` - -重置迭代次数为0。 - -- 返回值 - - STATUS,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -#### train_session - -```cpp -virtual session::TrainSession *train_session() = 0; -``` - -获取TrainSession会话对象。 - -- 返回值 - - 指向训练会话对象的指针。 - -#### Init - -```cpp -virtual int Init(std::vector metrics) = 0; -``` - -初始化模型评估矩阵。 - -- 参数 - - - `metrics`: 模型评估矩阵指针数组。 - -- 返回值 - - STATUS,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -#### GetMetrics - -```cpp -virtual std::vector GetMetrics() = 0; -``` - -获取模型评估矩阵。 - -- 返回值 - - 模型评估矩阵指针数组。 - -#### SetKernelCallBack - -```cpp -virtual int SetKernelCallBack(const KernelCallBack &before, const KernelCallBack &after) = 0; -``` - -设置运行时回调函数。 - -- 参数 - - - `before`: 执行前回调。 - - `after`: 执行后回调。 - -- 返回值 - - STATUS,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -#### Train - -```cpp -virtual int Train(int epochs, mindspore::dataset::Dataset *dataset, std::vector cbs, LoadDataFunc load_func = nullptr)= 0; -``` - -执行迭代训练。 - -- 参数 - - - `epochs`: 迭代次数。 - - `dataset`: 指向MindData类对象的指针。 - - `cbs`: 对象指针数组。 - - `load_func`: 类模板函数对象。 - -- 返回值 - - STATUS,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -#### Eval - -```cpp -virtual int Eval(mindspore::dataset::Dataset *dataset, std::vector cbs, LoadDataFunc load_func = nullptr, int max_steps = INT_MAX) = 0; -``` - -执行迭代训练。 - -- 参数 - - - `dataset`: 指向MindData类对象的指针。 - - `cbs`: 对象指针数组。 - - `load_func`: 类模板函数对象。 - - `max_steps`: 重复迭代次数。 - -- 返回值 - - STATUS,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -## TrainLoopCallback - -\#include <[ltrain_loop_callback.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/train/train_loop_callback.h)> - -在模型训练中执行回调函数。 - -### 构造函数和析构函数 - -#### ~TrainLoopCallback - -```cpp -virtual ~TrainLoopCallback() = default; -``` - -析构函数。 - -### Public Member Functions - -#### Begin - -```cpp -virtual void Begin(const TrainLoopCallBackData &cb_data) {} -``` - -在模型训练前执行。 - -- 参数 - - - `cb_data`: 回调函数对象。 - -#### End - -```cpp -virtual void End(const TrainLoopCallBackData &cb_data) {} -``` - -在模型训练后执行回调。 - -- 参数 - - - `cb_data`: 回调函数对象。 - -#### EpochBegin - -```cpp -virtual void EpochBegin(const TrainLoopCallBackData &cb_data) {} -``` - -每次迭代开始前执行回调。 - -- 参数 - - - `cb_data`: 回调函数对象。 - -#### EpochEnd - -```cpp -virtual int EpochEnd(const TrainLoopCallBackData &cb_data) { return RET_CONTINUE; } -``` - -每次迭代结束后执行回调。 - -- 参数 - - - `cb_data`: 回调函数对象。 - -- 返回 - STATUS,即编译图的错误码。STATUS在[errorcode.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/errorcode.h)中定义。 - -#### StepBegin - -```cpp -virtual void StepBegin(const TrainLoopCallBackData &cb_data) {} -``` - -每一步开始前执行回调。 - -- 参数 - - - `cb_data`: 回调函数对象。 - -#### StepEnd - -```cpp -virtual void StepEnd(const TrainLoopCallBackData &cb_data) {} -``` - -每一步开始后执行回调。 - -- 参数 - - - `cb_data`: 回调函数对象。 - -## Metrics - -\#include <[metrics.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/train/metrics.h)> - -训练模型评估矩阵类 - -### 构造函数和析构函数 - -#### ~Metrics - -```cpp -virtual ~Metrics() = default; -``` - -析构函数。 - -### Public Member Functions - -#### Clear - -```cpp -virtual void Clear() {} -``` - -将成员变量`total_accuracy_`和`total_steps_`置为零。 - -#### Eval - -```cpp -virtual float Eval() {} -``` - -评估模型。 - -#### Update - -```cpp -virtual void Update(std::vector inputs, std::vector outputs) = 0; -``` - -更新成员变量`total_accuracy_`和`total_steps_`的值。 diff --git a/docs/api_cpp/source_zh_cn/tensor.md b/docs/api_cpp/source_zh_cn/tensor.md deleted file mode 100644 index 9fe8d8b7bfce145d155b404dee9487357feb5c8c..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_zh_cn/tensor.md +++ /dev/null @@ -1,97 +0,0 @@ -# mindspore::tensor - - - -## MSTensor - -\#include <[ms_tensor.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/include/ms_tensor.h)> - -MSTensor定义了MindSpore Lite中的张量。 - -### 构造函数和析构函数 - -#### MSTensor - -```cpp -MSTensor() -``` - -MindSpore Lite MSTensor的构造函数。 - -- 返回值 - - MindSpore Lite MSTensor的实例。 - -#### ~MSTensor - -```cpp -virtual ~MSTensor() -``` - -MindSpore Lite Model的析构函数。 - -### 公有成员函数 - -#### data_type - -```cpp -virtual TypeId data_type() const -``` - -获取MindSpore Lite MSTensor的数据类型。 - -> TypeId在[mindspore/mindspore/core/ir/dtype/type_id\.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/core/ir/dtype/type_id.h)中定义。只有TypeId枚举中的数字类型或kObjectTypeString可用于MSTensor。 - -- 返回值 - - MindSpore Lite MSTensor类的MindSpore Lite TypeId。 - -#### shape - -```cpp -virtual std::vector shape() const -``` - -获取MindSpore Lite MSTensor的形状。 - -- 返回值 - - 一个包含MindSpore Lite MSTensor形状数值的整型向量。 - -#### ElementsNum - -```cpp -virtual int ElementsNum() const -``` - -获取MSTensor中的元素个数。 - -- 返回值 - - MSTensor中的元素个数 - -#### Size - -```cpp -virtual size_t Size() const -``` - -获取MSTensor中的数据的字节数大小。 - -- 返回值 - - MSTensor中的数据的字节数大小。 - -#### MutableData - -```cpp -virtual void *MutableData() const -``` - -获取MSTensor中的数据的指针。 - -> 该数据指针可用于对MSTensor中的数据进行读取和写入。 - -- 返回值 - - 指向MSTensor中的数据的指针。 diff --git a/docs/api_cpp/source_zh_cn/vision.md b/docs/api_cpp/source_zh_cn/vision.md deleted file mode 100644 index 88ca5f639bd00cc181a1db20a7158d024183b77c..0000000000000000000000000000000000000000 --- a/docs/api_cpp/source_zh_cn/vision.md +++ /dev/null @@ -1,114 +0,0 @@ -# mindspore::dataset::vision - - - -## HWC2CHW - -\#include <[vision.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/vision.h)> - -```cpp -std::shared_ptr HWC2CHW() -``` - -将输入图像的通道顺序从(H,W,C)转换成(C,H,W)。 - -- 返回值 - - 返回一个HwcToChw的算子。 - -## CenterCrop - -\#include <[vision_lite.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/vision_lite.h)> - -```cpp -std::shared_ptr CenterCrop(std::vector size) -``` - -将输入的PIL图像的中心区域裁剪到给定的大小。 - -- 参数 - - - `size`: 表示调整大小后的图像的输出大小。如果size为单个值,则将以相同的图像纵横比将图像调整为该值, 如果size具有2个值,则应为(高度,宽度)。 - -- 返回值 - - 返回一个CenterCrop的算子。 - -## Crop - -\#include <[vision_lite.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/vision_lite.h)> - -```cpp -std::shared_ptr Crop(std::vector coordinates, std::vector size) -``` - -根据位置和尺寸裁切图像。 - -- 参数 - - - `coordinates`: 裁剪的起始位置。 - - `size`: 裁剪区域的大小。 - -- 返回值 - - 返回一个Crop的算子。 - -## Decode - -\#include <[vision_lite.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/vision_lite.h)> - -```cpp -std::shared_ptr Decode(bool rgb = true) -``` - -对输入的图像进行解码。 - -- 参数 - - - `rgb`: 表示是否执行RGB模式解码。 - -- 返回值 - - 返回一个Decode的算子。 - -## Normalize - -\#include <[vision_lite.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/vision_lite.h)> - -```cpp -std::shared_ptr Normalize(std::vector mean, std::vector std) -``` - -通过给定的均值和标准差对输入的图像进行标准化。 - -- 参数 - - - `mean`: 表示进行标准化的均值。 - - `std`: 表示进行标准化的标准差。 - -- 返回值 - - 返回一个Normalize的算子。 - -## Resize - -\#include <[vision_lite.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/minddata/dataset/include/dataset/vision_lite.h)> - -```cpp -std::shared_ptr Resize(std::vector size, InterpolationMode interpolation = InterpolationMode::kLinear) -``` - -通过给定的大小对输入的PIL图像进行调整。 - -- 参数 - - - `size`: 表示调整大小后的图像的输出大小。如果size为单个值,则将以相同的图像纵横比将图像调整为该值,如果size具有2个值,则应为(高度,宽度)。 - - `interpolation`: 插值模式的枚举。 - - kLinear,线性差值。 - - kNearestNeighbour,最近邻插值。 - - kCubic,双三次插值。 - - kArea,区域插值。 - -- 返回值 - - 返回一个Resize的算子。 diff --git a/docs/api_java/Makefile b/docs/api_java/Makefile deleted file mode 100644 index 1eff8952707bdfa503c8d60c1e9a903053170ba2..0000000000000000000000000000000000000000 --- a/docs/api_java/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = source_zh_cn -BUILDDIR = build_zh_cn - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/api_java/requirements.txt b/docs/api_java/requirements.txt deleted file mode 100644 index 162b50040286bb9a0177801c580a31013082a360..0000000000000000000000000000000000000000 --- a/docs/api_java/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -sphinx >= 2.2.1, <= 2.4.4 -recommonmark -sphinx-markdown-tables -sphinx_rtd_theme -numpy -jieba diff --git a/docs/api_java/source_en/_static/logo_notebook.png b/docs/api_java/source_en/_static/logo_notebook.png deleted file mode 100644 index 18c2e29e4b73ee428f70253feffdd855fdf0c422..0000000000000000000000000000000000000000 Binary files a/docs/api_java/source_en/_static/logo_notebook.png and /dev/null differ diff --git a/docs/api_java/source_en/_static/logo_source.png b/docs/api_java/source_en/_static/logo_source.png deleted file mode 100644 index 880f2bc87172daf487654c0ba4f1657c672bd2b8..0000000000000000000000000000000000000000 Binary files a/docs/api_java/source_en/_static/logo_source.png and /dev/null differ diff --git a/docs/api_java/source_en/class_list.md b/docs/api_java/source_en/class_list.md deleted file mode 100644 index e59f7f68aece3377b2892fd9eea8564ac328d715..0000000000000000000000000000000000000000 --- a/docs/api_java/source_en/class_list.md +++ /dev/null @@ -1,14 +0,0 @@ -# Class List - - - -| Package | Class Name | Description | -| ------------------------- | -------------- | ------------------------------------------------------------ | -| com.mindspore.lite.config | [MSConfig](https://www.mindspore.cn/doc/api_java/en/master/msconfig.html) | MSConfig defines for holding environment variables during runtime. | -| com.mindspore.lite.config | [CpuBindMode](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/CpuBindMode.java) | CpuBindMode defines the CPU binding mode. | -| com.mindspore.lite.config | [DeviceType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/DeviceType.java) | DeviceType defines the back-end device type. | -| com.mindspore.lite | [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html) | LiteSession defines session in MindSpore Lite for compiling Model and forwarding model. | -| com.mindspore.lite | [Model](https://www.mindspore.cn/doc/api_java/en/master/model.html) | Model defines the model in MindSpore Lite for managing graph. | -| com.mindspore.lite | [MSTensor](https://www.mindspore.cn/doc/api_java/en/master/mstensor.html) | MSTensor defines the tensor in MindSpore Lite. | -| com.mindspore.lite | [DataType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/DataType.java) | DataType defines the supported data types. | -| com.mindspore.lite | [Version](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/Version.java) | Version is used to obtain the version information of MindSpore Lite. | diff --git a/docs/api_java/source_en/conf.py b/docs/api_java/source_en/conf.py deleted file mode 100644 index 4020d50f7b5f7a90b26785749cb1d41046b4723c..0000000000000000000000000000000000000000 --- a/docs/api_java/source_en/conf.py +++ /dev/null @@ -1,61 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -# import sys -# sys.path.append('..') -# sys.path.insert(0, os.path.abspath('.')) - - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx_markdown_tables', - 'recommonmark', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_static_path = ['_static'] \ No newline at end of file diff --git a/docs/api_java/source_en/index.rst b/docs/api_java/source_en/index.rst deleted file mode 100644 index 1a531e3f3a89cec32b3882e59a74980552ef0864..0000000000000000000000000000000000000000 --- a/docs/api_java/source_en/index.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Oct 13 10:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore Java API -======================= - -.. toctree:: - :glob: - :maxdepth: 1 - - class_list - lite_session - model - msconfig - mstensor - lite_java_example \ No newline at end of file diff --git a/docs/api_java/source_en/lite_java_example.rst b/docs/api_java/source_en/lite_java_example.rst deleted file mode 100644 index 84707e72802652c61facca91b67d642b9a3c2dd0..0000000000000000000000000000000000000000 --- a/docs/api_java/source_en/lite_java_example.rst +++ /dev/null @@ -1,9 +0,0 @@ -Example -======== - -.. toctree:: - :maxdepth: 1 - - Simple Demo - Android Application Development Based on Java Interface - High-level Usage \ No newline at end of file diff --git a/docs/api_java/source_en/lite_session.md b/docs/api_java/source_en/lite_session.md deleted file mode 100644 index da87e6d4f7291f42da4883d6fab769ce35e2ffa2..0000000000000000000000000000000000000000 --- a/docs/api_java/source_en/lite_session.md +++ /dev/null @@ -1,191 +0,0 @@ -# LiteSession - - - -```java -import com.mindspore.lite.LiteSession; -``` - -LiteSession defines session in MindSpore Lite for compiling Model and forwarding model. - -## Public Member Functions - -| function | -| ------------------------------------------------------------ | -| [boolean init(MSConfig config)](#init) | -| [void bindThread(boolean if_bind)](#bindthread) | -| [boolean compileGraph(Model model)](#compilegraph) | -| [boolean runGraph()](#rungraph) | -| [List getInputs()](#getinputs) | -| [MSTensor getInputsByTensorName(String tensorName)](#getinputsbytensorname) | -| [List getOutputsByNodeName(String nodeName)](#getoutputsbynodename) | -| [Map getOutputMapByTensor()](#getoutputmapbytensor) | -| [List getOutputTensorNames()](#getoutputtensornames) | -| [MSTensor getOutputByTensorName(String tensorName)](#getoutputbytensorname) | -| [boolean resize(List inputs, int[][] dims](#resize) | -| [void free()](#free) | - -## init - -```java -public boolean init(MSConfig config) -``` - -Initialize LiteSession. - -- Parameters - - - `MSConfig`: MSConfig class. - -- Returns - - Whether the initialization is successful. - -## bindThread - -```java -public void bindThread(boolean if_bind) -``` - -Attempt to bind or unbind threads in the thread pool to or from the specified cpu core. - -- Parameters - - - `if_bind`: Define whether to bind or unbind threads. - -## compileGraph - -```java -public boolean compileGraph(Model model) -``` - -Compile MindSpore Lite model. - -- Parameters - - - `Model`: Define the model to be compiled. - -- Returns - - Whether the compilation is successful. - -## runGraph - -```java -public boolean runGraph() -``` - -Run the session for inference. - -- Returns - - Whether the inference is successful. - -## getInputs - -```java -public List getInputs() -``` - -Get the MSTensors input of MindSpore Lite model. - -- Returns - - The vector of MindSpore Lite MSTensor. - -## getInputsByTensorName - -```java -public MSTensor getInputsByTensorName(String tensorName) -``` - -Get the MSTensors input of MindSpore Lite model by the node name. - -- Parameters - - - `tensorName`: Define the tensor name. - -- Returns - - MindSpore Lite MSTensor. - -## getOutputsByNodeName - -```java -public List getOutputsByNodeName(String nodeName) -``` - -Get the MSTensors output of MindSpore Lite model by the node name. - -- Parameters - - - `nodeName`: Define the node name. - -- Returns - - The vector of MindSpore Lite MSTensor. - -## getOutputMapByTensor - -```java -public Map getOutputMapByTensor() -``` - -Get the MSTensors output of the MindSpore Lite model associated with the tensor name. - -- Returns - - The map of output tensor name and MindSpore Lite MSTensor. - -## getOutputTensorNames - -```java -public List getOutputTensorNames() -``` - -Get the name of output tensors of the model compiled by this session. - -- Returns - - The vector of string as output tensor names in order. - -## getOutputByTensorName - -```java -public MSTensor getOutputByTensorName(String tensorName) -``` - -Get the MSTensors output of MindSpore Lite model by the tensor name. - -- Parameters - - - `tensorName`: Define the tensor name. - -- Returns - - Pointer of MindSpore Lite MSTensor. - -## resize - -```java -public boolean resize(List inputs, int[][] dims) -``` - -Resize inputs shape. - -- Parameters - - - `inputs`: Model inputs. - - `dims`: Define the new inputs shape. - -- Returns - - Whether the resize is successful. - -## free - -```java -public void free() -``` - -Free LiteSession. diff --git a/docs/api_java/source_en/model.md b/docs/api_java/source_en/model.md deleted file mode 100644 index a55a7f02ecd744ea2cdcb0748e8167369133b766..0000000000000000000000000000000000000000 --- a/docs/api_java/source_en/model.md +++ /dev/null @@ -1,65 +0,0 @@ -# Model - - - -```java -import com.mindspore.lite.Model; -``` - -Model defines model in MindSpore Lite for managing graph. - -## Public Member Functions - -| function | -| ------------------------------------------------------------ | -| [boolean loadModel(Context context, String modelName)](#loadmodel) | -| [boolean loadModel(String modelPath)](#loadmodel) | -| [void freeBuffer()](#freebuffer) | -| [void free()](#free) | - -## loadModel - -```java -public boolean loadModel(Context context, String modelName) -``` - -Load the MindSpore Lite model from Assets. - -- Parameters - - - `context`: Context in Android. - - `modelName`: Model file name. - -- Returns - - Whether the load is successful. - -```java -public boolean loadModel(String modelPath) -``` - -Load the MindSpore Lite model from path. - -- Parameters - - - `modelPath`: Model file path. - -- Returns - - Whether the load is successful. - -## freeBuffer - -```java -public void freeBuffer() -``` - -Free MetaGraph in MindSpore Lite Model to reduce memory usage during inference. - -## free - -```java -public void free() -``` - -Free all temporary memory in MindSpore Lite Model. diff --git a/docs/api_java/source_en/msconfig.md b/docs/api_java/source_en/msconfig.md deleted file mode 100644 index b939d556f463ed0d12c70af1b263ebae9fbfb7d8..0000000000000000000000000000000000000000 --- a/docs/api_java/source_en/msconfig.md +++ /dev/null @@ -1,101 +0,0 @@ -# MSConfig - - - -```java -import com.mindspore.lite.config.MSConfig; -``` - -MSConfig is defined for holding environment variables during runtime. - -## Public Member Functions - -| function | -| ------------------------------------------------------------ | -| [public boolean init(int deviceType, int threadNum, int cpuBindMode)](#init) | -| [boolean init(int deviceType, int threadNum, int cpuBindMode)](#init) | -| [boolean init(int deviceType, int threadNum)](#init) | -| [boolean init(int deviceType)](#init) | -| [boolean init()](#init) | -| [void free()](#free) | - -## init - -```java -public boolean init(int deviceType, int threadNum, int cpuBindMode, boolean enable_float16) -``` - -Initialize MSConfig. - -- Parameters - - - `deviceType`: A **[DeviceType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/DeviceType.java)** **enum** type.- `threadNum`: Thread number config for thread pool. - - `cpuBindMode`: A **[CpuBindMode](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/CpuBindMode.java)** **enum** variable. - - `enable_float16`:Whether to use float16 operator for priority. - -- Returns - - Whether the initialization is successful. - -```java -public boolean init(int deviceType, int threadNum, int cpuBindMode) -``` - -Initialize MSConfig. - -- Parameters - - - `deviceType`: A **[DeviceType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/DeviceType.java)** **enum** type. - - `threadNum`: Thread number config for thread pool. - - `cpuBindMode`: A **[CpuBindMode](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/CpuBindMode.java)** **enum** variable. - -- Returns - - Whether the initialization is successful. - -```java -public boolean init(int deviceType, int threadNum) -``` - -Initialize MSConfig, `cpuBindMode` defaults to `CpuBindMode.MID_CPU`. - -- Parameters - - - `deviceType`: A **[DeviceType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/DeviceType.java)** **enum** type. - - `threadNum`: Thread number config for thread pool. - -- Returns - - Whether the initialization is successful. - -```java -public boolean init(int deviceType) -``` - -Initialize MSConfig,`cpuBindMode` defaults to `CpuBindMode.MID_CPU`, `threadNum` defaults to `2`. - -- Parameters - - - `deviceType`: A **[DeviceType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/DeviceType.java)** **enum** type. - -- Returns - - Whether the initialization is successful. - -```java -public boolean init() -``` - -Initialize MSConfig,`deviceType` defaults to `DeviceType.DT_CPU`,`cpuBindMode` defaults to`CpuBindMode.MID_CPU`,`threadNum` defaults to `2`. - -- Returns - - Whether the initialization is successful. - -## free - -```java -public void free() -``` - -Free all temporary memory in MindSpore Lite MSConfig. diff --git a/docs/api_java/source_en/mstensor.md b/docs/api_java/source_en/mstensor.md deleted file mode 100644 index 1bea3918696f5b6552bb1fbdeb77f1abdfe7106b..0000000000000000000000000000000000000000 --- a/docs/api_java/source_en/mstensor.md +++ /dev/null @@ -1,151 +0,0 @@ -# MSTensor - - - -```java -import com.mindspore.lite.MSTensor; -``` - -MSTensor defined tensor in MindSpore Lite. - -## Public Member Functions - -| function | -| ------------------------------------------ | -| [int[] getShape()](#getshape) | -| [int getDataType()](#getdatatype) | -| [byte[] getByteData()](#getbytedata) | -| [float[] getFloatData()](#getfloatdata) | -| [int[] getIntData()](#getintdata) | -| [long[] getLongData()](#getlongdata) | -| [void setData(byte[] data)](#setdata) | -| [void setData(ByteBuffer data)](#setdata) | -| [long size()](#size) | -| [int elementsNum()](#elementsnum) | -| [void free()](#free) | - -## getShape - -```java -public int[] getShape() -``` - -Get the shape of the MindSpore Lite MSTensor. - -- Returns - - A array of int as the shape of the MindSpore Lite MSTensor. - -## getDataType - -```java -public int getDataType() -``` - -DataType is defined in [com.mindspore.lite.DataType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/DataType.java). - -- Returns - - The MindSpore Lite data type of the MindSpore Lite MSTensor class. - -## getByteData - -```java -public byte[] getByteData() -``` - -Get output data of MSTensor, the data type is byte. - -- Returns - - The byte array containing all MSTensor output data. - -## getFloatData - -```java -public float[] getFloatData() -``` - -Get output data of MSTensor, the data type is float. - -- Returns - - The float array containing all MSTensor output data. - -## getIntData - -```java -public int[] getIntData() -``` - -Get output data of MSTensor, the data type is int. - -- Returns - - The int array containing all MSTensor output data. - -## getLongData - -```java -public long[] getLongData() -``` - -Get output data of MSTensor, the data type is long. - -- Returns - - The long array containing all MSTensor output data. - -## setData - -```java -public void setData(byte[] data) -``` - -Set the input data of MSTensor. - -- Parameters - - - `data`: Input data of byte[] type. - -```java -public void setData(ByteBuffer data) -``` - -Set the input data of MSTensor. - -- Parameters - - - `data`: Input data of ByteBuffer type. - -## size - -```java -public long size() -``` - -Get the size of the data in MSTensor in bytes. - -- Returns - - The size of the data in MSTensor in bytes. - -## elementsNum - -```java -public int elementsNum() -``` - -Get the number of elements in MSTensor. - -- Returns - - The number of elements in MSTensor. - -## free - -```java -public void free() -``` - -Free all temporary memory in MindSpore Lite MSTensor. diff --git a/docs/api_java/source_zh_cn/_static/logo_notebook.png b/docs/api_java/source_zh_cn/_static/logo_notebook.png deleted file mode 100644 index 18c2e29e4b73ee428f70253feffdd855fdf0c422..0000000000000000000000000000000000000000 Binary files a/docs/api_java/source_zh_cn/_static/logo_notebook.png and /dev/null differ diff --git a/docs/api_java/source_zh_cn/_static/logo_source.png b/docs/api_java/source_zh_cn/_static/logo_source.png deleted file mode 100644 index 880f2bc87172daf487654c0ba4f1657c672bd2b8..0000000000000000000000000000000000000000 Binary files a/docs/api_java/source_zh_cn/_static/logo_source.png and /dev/null differ diff --git a/docs/api_java/source_zh_cn/class_list.md b/docs/api_java/source_zh_cn/class_list.md deleted file mode 100644 index 94520007021ca311bdae88963fdd47365a52ae31..0000000000000000000000000000000000000000 --- a/docs/api_java/source_zh_cn/class_list.md +++ /dev/null @@ -1,14 +0,0 @@ -# 类列表 - - - -| 包 | 类 | 描述 | -| ------------------------- | ------------------------------------------------------------ | ------------------------------------------------------------ | -| com.mindspore.lite.config | [MSConfig](https://www.mindspore.cn/doc/api_java/zh-CN/master/msconfig.html) | MSConfig用于保存执行期间的配置变量。 | -| com.mindspore.lite.config | [CpuBindMode](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/CpuBindMode.java) | CpuBindMode定义了CPU绑定模式。 | -| com.mindspore.lite.config | [DeviceType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/DeviceType.java) | DeviceType定义了后端设备类型。 | -| com.mindspore.lite | [LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html) | LiteSession定义了MindSpore Lite中的会话,用于进行Model的编译和前向推理。 | -| com.mindspore.lite | [Model](https://www.mindspore.cn/doc/api_java/zh-CN/master/model.html) | Model定义了MindSpore Lite中的模型,便于计算图管理。 | -| com.mindspore.lite | [MSTensor](https://www.mindspore.cn/doc/api_java/zh-CN/master/mstensor.html) | MSTensor定义了MindSpore Lite中的张量。 | -| com.mindspore.lite | [DataType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/DataType.java) | DataType定义了所支持的数据类型。 | -| com.mindspore.lite | [Version](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/Version.java) | Version用于获取MindSpore Lite的版本信息。 | diff --git a/docs/api_java/source_zh_cn/conf.py b/docs/api_java/source_zh_cn/conf.py deleted file mode 100644 index e3dfb2a0a9fc6653113e7b2bb878a5497ceb4a2b..0000000000000000000000000000000000000000 --- a/docs/api_java/source_zh_cn/conf.py +++ /dev/null @@ -1,65 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -# import sys -# sys.path.append('..') -# sys.path.insert(0, os.path.abspath('.')) - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx_markdown_tables', - 'recommonmark', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_search_language = 'zh' - -html_search_options = {'dict': '../../resource/jieba.txt'} - -html_static_path = ['_static'] \ No newline at end of file diff --git a/docs/api_java/source_zh_cn/index.rst b/docs/api_java/source_zh_cn/index.rst deleted file mode 100644 index 1a531e3f3a89cec32b3882e59a74980552ef0864..0000000000000000000000000000000000000000 --- a/docs/api_java/source_zh_cn/index.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Oct 13 10:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore Java API -======================= - -.. toctree:: - :glob: - :maxdepth: 1 - - class_list - lite_session - model - msconfig - mstensor - lite_java_example \ No newline at end of file diff --git a/docs/api_java/source_zh_cn/lite_java_example.rst b/docs/api_java/source_zh_cn/lite_java_example.rst deleted file mode 100644 index d24db0e0c9937617617fd41d5880f2d1ac6815d6..0000000000000000000000000000000000000000 --- a/docs/api_java/source_zh_cn/lite_java_example.rst +++ /dev/null @@ -1,9 +0,0 @@ -样例 -======= - -.. toctree:: - :maxdepth: 1 - - 极简Demo - 基于Java接口的Android应用开发 - 高阶用法 \ No newline at end of file diff --git a/docs/api_java/source_zh_cn/lite_session.md b/docs/api_java/source_zh_cn/lite_session.md deleted file mode 100644 index 6df2bf110160847495a3279ce9c1b4dd0adc9fea..0000000000000000000000000000000000000000 --- a/docs/api_java/source_zh_cn/lite_session.md +++ /dev/null @@ -1,191 +0,0 @@ -# LiteSession - - - -```java -import com.mindspore.lite.LiteSession; -``` - -LiteSession定义了MindSpore Lite中的会话,用于进行Model的编译和前向推理。 - -## 公有成员函数 - -| function | -| ------------------------------------------------------------ | -| [boolean init(MSConfig config)](#init) | -| [void bindThread(boolean if_bind)](#bindthread) | -| [boolean compileGraph(Model model)](#compilegraph) | -| [boolean runGraph()](#rungraph) | -| [List getInputs()](#getinputs) | -| [MSTensor getInputsByTensorName(String tensorName)](#getinputsbytensorname) | -| [List getOutputsByNodeName(String nodeName)](#getoutputsbynodename) | -| [Map getOutputMapByTensor()](#getoutputmapbytensor) | -| [List getOutputTensorNames()](#getoutputtensornames) | -| [MSTensor getOutputByTensorName(String tensorName)](#getoutputbytensorname) | -| [boolean resize(List inputs, int[][] dims](#resize) | -| [void free()](#free) | - -## init - -```java -public boolean init(MSConfig config) -``` - -初始化LiteSession。 - -- 参数 - - - `MSConfig`: MSConfig类。 - -- 返回值 - - 初始化是否成功。 - -## bindThread - -```java -public void bindThread(boolean if_bind) -``` - -尝试将线程池中的线程绑定到指定的CPU内核,或从指定的CPU内核进行解绑。 - -- 参数 - - - `if_bind`: 是否对线程进行绑定或解绑。 - -## compileGraph - -```java -public boolean compileGraph(Model model) -``` - -编译MindSpore Lite模型。 - -- 参数 - - - `Model`: 需要被编译的模型。 - -- 返回值 - - 编译是否成功。 - -## runGraph - -```java -public boolean runGraph() -``` - -运行图进行推理。 - -- 返回值 - - 推理是否成功。 - -## getInputs - -```java -public List getInputs() -``` - -获取MindSpore Lite模型的MSTensors输入。 - -- 返回值 - - 所有输入MSTensor组成的List。 - -## getInputsByTensorName - -```java -public MSTensor getInputByTensorName(String tensorName) -``` - -通过节点名获取MindSpore Lite模型的MSTensors输入。 - -- 参数 - - - `tensorName`: 张量名。 - -- 返回值 - - tensorName所对应的输入MSTensor。 - -## getOutputsByNodeName - -```java -public List getOutputsByNodeName(String nodeName) -``` - -通过节点名获取MindSpore Lite模型的MSTensors输出。 - -- 参数 - - - `nodeName`: 节点名。 - -- 返回值 - - 该节点所有输出MSTensor组成的List。 - -## getOutputMapByTensor - -```java -public Map getOutputMapByTensor() -``` - -获取与张量名相关联的MindSpore Lite模型的MSTensors输出。 - -- 返回值 - - 输出张量名和MSTensor的组成的Map。 - -## getOutputTensorNames - -```java -public List getOutputTensorNames() -``` - -获取由当前会话所编译的模型的输出张量名。 - -- 返回值 - - 按顺序排列的输出张量名组成的List。 - -## getOutputByTensorName - -```java -public MSTensor getOutputByTensorName(String tensorName) -``` - -通过张量名获取MindSpore Lite模型的MSTensors输出。 - -- 参数 - - - `tensorName`: 张量名。 - -- 返回值 - - 该张量所对应的MSTensor。 - -## resize - -```java -public boolean resize(List inputs, int[][] dims) -``` - -调整输入的形状。 - -- 参数 - - - `inputs`: 模型对应的所有输入。 - - `dims`: 输入对应的新的shape,顺序注意要与inputs一致。 - -- 返回值 - - 调整输入形状是否成功。 - -## free - -```java -public void free() -``` - -释放LiteSession。 diff --git a/docs/api_java/source_zh_cn/model.md b/docs/api_java/source_zh_cn/model.md deleted file mode 100644 index 3fbaed4fc2ff742b4ef8e7b490ec67e103c13cfd..0000000000000000000000000000000000000000 --- a/docs/api_java/source_zh_cn/model.md +++ /dev/null @@ -1,65 +0,0 @@ -# Model - - - -```java -import com.mindspore.lite.Model; -``` - -Model定义了MindSpore Lite中的模型,便于计算图管理。 - -## 公有成员函数 - -| function | -| ------------------------------------------------------------ | -| [boolean loadModel(Context context, String modelName)](#loadmodel) | -| [boolean loadModel(String modelPath)](#loadmodel) | -| [void freeBuffer()](#freebuffer) | -| [void free()](#free) | - -## loadModel - -```java -public boolean loadModel(Context context, String modelName) -``` - -导入Assets中的MindSpore Lite模型。 - -- 参数 - - - `context`: Android中的Context上下文 - - `modelName`: 模型文件名称 - -- 返回值 - - 是否导入成功 - -```java -public boolean loadModel(String modelPath) -``` - -导入modelPath中的ms模型。 - -- 参数 - - - `modelPath`: 模型文件路径 - -- 返回值 - - 是否导入成功 - -## freeBuffer - -```java -public void freeBuffer() -``` - -释放MindSpore Lite Model中的MetaGraph,用于减小运行时的内存。释放后该Model就不能再进行图编译了。 - -## free - -```java -public void free() -``` - -释放Model运行过程中动态分配的内存。 diff --git a/docs/api_java/source_zh_cn/msconfig.md b/docs/api_java/source_zh_cn/msconfig.md deleted file mode 100644 index 7f6fe542160d7724184fa523a6e93ca5b8582ff4..0000000000000000000000000000000000000000 --- a/docs/api_java/source_zh_cn/msconfig.md +++ /dev/null @@ -1,102 +0,0 @@ -# MSConfig - - - -```java -import com.mindspore.lite.config.MSConfig; -``` - -MSConfig类用于保存执行中的配置变量。 - -## 公有成员函数 - -| function | -| ------------------------------------------------------------ | -| [boolean init(int deviceType, int threadNum, int cpuBindMode, boolean enable_float16)](#init) | -| [boolean init(int deviceType, int threadNum, int cpuBindMode)](#init) | -| [boolean init(int deviceType, int threadNum)](#init) | -| [boolean init(int deviceType)](#init) | -| [boolean init()](#init) | -| [void free()](#free) | - -## init - -```java -public boolean init(int deviceType, int threadNum, int cpuBindMode, boolean enable_float16) -``` - -初始化MSConfig。 - -- 参数 - - - `deviceType`: 设备类型,`deviceType`在[com.mindspore.lite.config.DeviceType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/DeviceType.java)中定义。 - - `threadNum`: 线程数。 - - `cpuBindMode`: CPU绑定模式,`cpuBindMode`在[com.mindspore.lite.config.CpuBindMode](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/CpuBindMode.java)中定义。 - - `enable_float16`:是否优先使用float16算子。 - -- 返回值 - - 初始化是否成功。 - -```java -public boolean init(int deviceType, int threadNum, int cpuBindMode) -``` - -初始化MSConfig,`enable_float16`默认为false。 - -- 参数 - - - `deviceType`: 设备类型,`deviceType`在[com.mindspore.lite.config.DeviceType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/DeviceType.java)中定义。 - - `threadNum`: 线程数。 - - `cpuBindMode`: CPU绑定模式,`cpuBindMode`在[com.mindspore.lite.config.CpuBindMode](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/CpuBindMode.java)中定义。 - -- 返回值 - - 初始化是否成功。 - -```java -public boolean init(int deviceType, int threadNum) -``` - -初始化MSConfig,`cpuBindMode`默认为`CpuBindMode.MID_CPU`,`enable_float16`默认为false。 - -- 参数 - - - `deviceType`: 设备类型,`deviceType`在[com.mindspore.lite.config.DeviceType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/DeviceType.java)中定义。 - - `threadNum`: 线程数。 - -- 返回值 - - 初始化是否成功。 - -```java -public boolean init(int deviceType) -``` - -初始化MSConfig,`cpuBindMode`默认为`CpuBindMode.MID_CPU`,`threadNum`默认为`2`,`enable_float16`默认为false。 - -- 参数 - - - `deviceType`: 设备类型,`deviceType`在[com.mindspore.lite.config.DeviceType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/config/DeviceType.java)中定义。 - -- 返回值 - - 初始化是否成功。 - -```java -public boolean init() -``` - -初始化MSConfig,`deviceType`默认为`DeviceType.DT_CPU`,`cpuBindMode`默认为`CpuBindMode.MID_CPU`,`threadNum`默认为`2`,`enable_float16`默认为false。 - -- 返回值 - - 初始化是否成功。 - -## free - -```java -public void free() -``` - -释放MSConfig运行过程中动态分配的内存。LiteSession init之后即可释放。 diff --git a/docs/api_java/source_zh_cn/mstensor.md b/docs/api_java/source_zh_cn/mstensor.md deleted file mode 100644 index fc050c2feb79e37f6017c2581afb5b4c96c4f726..0000000000000000000000000000000000000000 --- a/docs/api_java/source_zh_cn/mstensor.md +++ /dev/null @@ -1,151 +0,0 @@ -# MSTensor - - - -```java -import com.mindspore.lite.MSTensor; -``` - -MSTensor定义了MindSpore Lite中的张量。 - -## 公有成员函数 - -| function | -| ------------------------------------------ | -| [int[] getShape()](#getshape) | -| [int getDataType()](#getdatatype) | -| [byte[] getByteData()](#getbytedata) | -| [float[] getFloatData()](#getfloatdata) | -| [int[] getIntData()](#getintdata) | -| [long[] getLongData()](#getlongdata) | -| [void setData(byte[] data)](#setdata) | -| [void setData(ByteBuffer data)](#setdata) | -| [long size()](#size) | -| [int elementsNum()](#elementsnum) | -| [void free()](#free) | - -## getShape - -```java -public int[] getShape() -``` - -获取MindSpore Lite MSTensor的形状。 - -- 返回值 - - 一个包含MindSpore Lite MSTensor形状数值的整型数组。 - -## getDataType - -```java -public int getDataType() -``` - -DataType在[com.mindspore.lite.DataType](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/java/java/common/src/main/java/com/mindspore/lite/DataType.java)中定义。 - -- 返回值 - - MindSpore Lite MSTensor类的MindSpore Lite DataType。 - -## getByteData - -```java -public byte[] getByteData() -``` - -获得MSTensor的输出数据,数据类型为byte类型。 - -- 返回值 - - 包含所有MSTensor输出数据的byte类型数组。 - -## getFloatData - -```java -public float[] getFloatData() -``` - -获得MSTensor的输出数据,数据类型为float类型。 - -- 返回值 - - 包含所有MSTensor输出数据的float类型数组。 - -## getIntData - -```java -public int[] getIntData() -``` - -获得MSTensor的输出数据,数据类型为int类型。 - -- 返回值 - - 包含所有MSTensor输出数据的int类型数组。 - -## getLongData - -```java -public long[] getLongData() -``` - -获得MSTensor的输出数据,数据类型为long类型。 - -- 返回值 - - 包含所有MSTensor输出数据的long类型数组。 - -## setData - -```java -public void setData(byte[] data) -``` - -设定MSTensor的输入数据。 - -- 参数 - - - `data`: byte[]类型的输入数据。 - -```java -public void setData(ByteBuffer data) -``` - -设定MSTensor的输入数据。 - -- 参数 - - - `data`: ByteBuffer类型的输入数据。 - -## size - -```java -public long size() -``` - -获取MSTensor中的数据的字节数大小。 - -- 返回值 - - MSTensor中的数据的字节数大小。 - -## elementsNum - -```java -public int elementsNum() -``` - -获取MSTensor中的元素个数。 - -- 返回值 - - MSTensor中的元素个数。 - -## free - -```java -public void free() -``` - -释放MSTensor运行过程中动态分配的内存。 diff --git a/docs/api_python/Makefile b/docs/api_python/Makefile deleted file mode 100644 index 09426dd648c1647540925be5c424ded33472fb3e..0000000000000000000000000000000000000000 --- a/docs/api_python/Makefile +++ /dev/null @@ -1,28 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = source_zh_cn -BUILDDIR = build_zh_cn - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -EXTRADIR = $(SOURCEDIR)/mindspore -EXTRADIR_MQ = $(SOURCEDIR)/mindquantum - -.PHONY: clean - -clean: - -rm -rf $(BUILDDIR)/* $(EXTRADIR)/ops $(EXTRADIR)/nn $(EXTRADIR)/nn_probability $(EXTRADIR)/dataset $(EXTRADIR)/dataset_vision $(EXTRADIR)/dataset_transforms $(EXTRADIR)/dataset_text $(EXTRADIR)/text $(EXTRADIR)/numpy $(EXTRADIR_MQ)/nn diff --git a/docs/api_python/numpy_objects.inv b/docs/api_python/numpy_objects.inv deleted file mode 100644 index e8aa0b26f2488b5309e394cd50b5e0607a88a1a7..0000000000000000000000000000000000000000 Binary files a/docs/api_python/numpy_objects.inv and /dev/null differ diff --git a/docs/api_python/python_objects.inv b/docs/api_python/python_objects.inv deleted file mode 100644 index 925d2e03e5e3bd19094c799d34ca6b7803725146..0000000000000000000000000000000000000000 Binary files a/docs/api_python/python_objects.inv and /dev/null differ diff --git a/docs/api_python/requirements.txt b/docs/api_python/requirements.txt deleted file mode 100644 index 162b50040286bb9a0177801c580a31013082a360..0000000000000000000000000000000000000000 --- a/docs/api_python/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -sphinx >= 2.2.1, <= 2.4.4 -recommonmark -sphinx-markdown-tables -sphinx_rtd_theme -numpy -jieba diff --git a/docs/api_python/run.sh b/docs/api_python/run.sh deleted file mode 100644 index b32f375df6612ce9b0d14d3c4e3c61903fd28592..0000000000000000000000000000000000000000 --- a/docs/api_python/run.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -set -e -make html - -if [ $? -ne 0 ]; then - echo "make html failed" - exit -fi - -cd build_zh_cn/html -python -m http.server diff --git a/docs/api_python/source_en/_ext/my_signature.py b/docs/api_python/source_en/_ext/my_signature.py deleted file mode 100644 index 627b12cc4dc55811fcfbae956f082b784de3797e..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/_ext/my_signature.py +++ /dev/null @@ -1,349 +0,0 @@ -""" -Rewrote the Signature module that fix default signature error for autodoc module. -""" - -import inspect -import re -import types -import functools - - -def _sort_param(param_list, target_str): - """Sort param_list as default order.""" - ls = [] - for param_name in param_list: - ls.append((param_name, target_str.find(param_name))) - ls.sort(key=lambda x: x[1], reverse=False) - ls = [i[0] for i in ls] - return ls - - -def get_default_params(func): - """ Get the default signatures from function. """ - source_code = inspect.getsource(func) - func_code = func.__code__ - pos_count = func_code.co_argcount - arg_names = func_code.co_varnames - karg_pos = func_code.co_kwonlyargcount - kwargs_num = arg_names.count("args") + arg_names.count("kwargs") - all_param_names = list(arg_names[:pos_count+karg_pos+kwargs_num]) - all_params = re.findall(r"def [\w_\d\-]+\(([\S\s]*?)\):", source_code)[0].replace("\n", "").replace("'", "\"") - - # sub null spaces from matched all param str. - re_space_sub = re.compile(r",\s+") - all_params = re_space_sub.sub(",", all_params) - - all_param_names = _sort_param(all_param_names, all_params) - - # sub the extra "=" from param. - re_equate_sub = re.compile("=") - - re_defaults_param = re.compile(r"(.*?)".join(all_param_names) + r"(.*)") - defaults_params = re_defaults_param.findall(all_params) - if defaults_params: - if isinstance(defaults_params[0], tuple): - defaults_params = list(defaults_params[0]) - defaults_params_list = [] - for i in defaults_params: - if "=" in i and i: - i = re_equate_sub.sub("", i, count=1).strip(",") - if i[:6] == "lambda": - i = "<" + i + ">" - defaults_params_list.append(i) - defaults_params_tuple = tuple(defaults_params_list) - return defaults_params_tuple - return func.__defaults__ - - -def _my_signature_from_function(cls, func): - """Private helper: constructs Signature for the given python function.""" - - is_duck_function = False - if not inspect.isfunction(func): - if inspect._signature_is_functionlike(func): # pylint: disable=protected-access - is_duck_function = True - else: - # If it's not a pure Python function, and not a duck type - # of pure function: - raise TypeError('{!r} is not a Python function'.format(func)) - - Parameter = cls._parameter_cls # pylint: disable=protected-access - - # Parameter information._partialmethod - func_code = func.__code__ - pos_count = func_code.co_argcount - arg_names = func_code.co_varnames - positional = tuple(arg_names[:pos_count]) - keyword_only_count = func_code.co_kwonlyargcount - keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)] - annotations = func.__annotations__ - defaults = get_default_params(func) - kwdefaults = func.__kwdefaults__ - pos_defaults = func.__defaults__ - - if pos_defaults: - pos_default_count = len(pos_defaults) - else: - pos_default_count = 0 - - parameters = [] - - # Non-keyword-only parameters w/o defaults. - non_default_count = pos_count - pos_default_count - for name in positional[:non_default_count]: - annotation = annotations.get(name, inspect._empty) # pylint: disable=protected-access - parameters.append(Parameter(name, annotation=annotation, - kind=inspect._POSITIONAL_OR_KEYWORD)) # pylint: disable=protected-access - - # ... w/ defaults. - for offset, name in enumerate(positional[non_default_count:]): - annotation = annotations.get(name, inspect._empty) # pylint: disable=protected-access - parameters.append(Parameter(name, annotation=annotation, - kind=inspect._POSITIONAL_OR_KEYWORD, # pylint: disable=protected-access - default=defaults[offset])) - - # *args - if func_code.co_flags & inspect.CO_VARARGS: - name = arg_names[pos_count + keyword_only_count] - annotation = annotations.get(name, inspect._empty) # pylint: disable=protected-access - parameters.append(Parameter(name, annotation=annotation, - kind=inspect._VAR_POSITIONAL)) # pylint: disable=protected-access - - # Keyword-only parameters. - for name in keyword_only: - default = inspect._empty # pylint: disable=protected-access - if kwdefaults is not None: - default = kwdefaults.get(name, inspect._empty) # pylint: disable=protected-access - - annotation = annotations.get(name, inspect._empty) # pylint: disable=protected-access - parameters.append(Parameter(name, annotation=annotation, - kind=inspect._KEYWORD_ONLY, # pylint: disable=protected-access - default=default)) - # **kwargs - if func_code.co_flags & inspect.CO_VARKEYWORDS: - index = pos_count + keyword_only_count - if func_code.co_flags & inspect.CO_VARARGS: - index += 1 - - name = arg_names[index] - annotation = annotations.get(name, inspect._empty) # pylint: disable=protected-access - parameters.append(Parameter(name, annotation=annotation, - kind=inspect._VAR_KEYWORD)) # pylint: disable=protected-access - - # Is 'func' is a pure Python function - don't validate the - # parameters list (for correct order and defaults), it should be OK. - return cls(parameters, - return_annotation=annotations.get('return', inspect._empty), # pylint: disable=protected-access - __validate_parameters__=is_duck_function) - - -def _my_signature_from_callable(obj, *, - follow_wrapper_chains=True, - skip_bound_arg=True, - sigcls): - """Private helper function to get signature for arbitrary - callable objects. - """ - - if not callable(obj): - raise TypeError('{!r} is not a callable object'.format(obj)) - - if isinstance(obj, types.MethodType): - # In this case we skip the first parameter of the underlying - # function (usually `self` or `cls`). - sig = _my_signature_from_callable( - obj.__func__, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - - if skip_bound_arg: - return inspect._signature_bound_method(sig) # pylint: disable=protected-access - return sig - - # Was this function wrapped by a decorator? - if follow_wrapper_chains: - obj = inspect.unwrap(obj, stop=(lambda f: hasattr(f, "__signature__"))) - if isinstance(obj, types.MethodType): - # If the unwrapped object is a *method*, we might want to - # skip its first parameter (self). - # See test_signature_wrapped_bound_method for details. - return _my_signature_from_callable( - obj, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - - try: - sig = obj.__signature__ - except AttributeError: - pass - else: - if sig is not None: - if not isinstance(sig, MySignature): - raise TypeError( - 'unexpected object {!r} in __signature__ ' - 'attribute'.format(sig)) - return sig - - try: - partialmethod = obj._partialmethod # pylint: disable=protected-access - except AttributeError: - pass - else: - if isinstance(partialmethod, functools.partialmethod): - # Unbound partialmethod (see functools.partialmethod) - # This means, that we need to calculate the signature - # as if it's a regular partial object, but taking into - # account that the first positional argument - # (usually `self`, or `cls`) will not be passed - # automatically (as for boundmethods) - - wrapped_sig = _my_signature_from_callable( - partialmethod.func, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - - sig = inspect._signature_get_partial(wrapped_sig, partialmethod, (None,)) # pylint: disable=protected-access - first_wrapped_param = tuple(wrapped_sig.parameters.values())[0] - if first_wrapped_param.kind is Parameter.VAR_POSITIONAL: # pylint: disable=no-else-return - # First argument of the wrapped callable is `*args`, as in - # `partialmethod(lambda *args)`. - return sig - else: - sig_params = tuple(sig.parameters.values()) - assert (not sig_params or - first_wrapped_param is not sig_params[0]) - new_params = (first_wrapped_param,) + sig_params - return sig.replace(parameters=new_params) - - if inspect.isfunction(obj) or inspect._signature_is_functionlike(obj): # pylint: disable=protected-access - # If it's a pure Python function, or an object that is duck type - # of a Python function (Cython functions, for instance), then: - return _my_signature_from_function(sigcls, obj) - - if inspect._signature_is_builtin(obj): # pylint: disable=protected-access - return inspect._signature_from_builtin(sigcls, obj, # pylint: disable=protected-access - skip_bound_arg=skip_bound_arg) - - if isinstance(obj, functools.partial): - wrapped_sig = _my_signature_from_callable( - obj.func, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - return inspect._signature_get_partial(wrapped_sig, obj) # pylint: disable=protected-access - - sig = None - if isinstance(obj, type): - # obj is a class or a metaclass - - # First, let's see if it has an overloaded __call__ defined - # in its metaclass - call = inspect._signature_get_user_defined_method(type(obj), '__call__') # pylint: disable=protected-access - if call is not None: - sig = _my_signature_from_callable( - call, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - else: - # Now we check if the 'obj' class has a '__new__' method - new = inspect._signature_get_user_defined_method(obj, '__new__') # pylint: disable=protected-access - if new is not None: - sig = _my_signature_from_callable( - new, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - else: - # Finally, we should have at least __init__ implemented - init = inspect._signature_get_user_defined_method(obj, '__init__') # pylint: disable=protected-access - if init is not None: - sig = _my_signature_from_callable( - init, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - - if sig is None: - # At this point we know, that `obj` is a class, with no user- - # defined '__init__', '__new__', or class-level '__call__' - - for base in obj.__mro__[:-1]: - # Since '__text_signature__' is implemented as a - # descriptor that extracts text signature from the - # class docstring, if 'obj' is derived from a builtin - # class, its own '__text_signature__' may be 'None'. - # Therefore, we go through the MRO (except the last - # class in there, which is 'object') to find the first - # class with non-empty text signature. - try: - text_sig = base.__text_signature__ - except AttributeError: - pass - else: - if text_sig: - # If 'obj' class has a __text_signature__ attribute: - # return a signature based on it - return inspect._signature_fromstr(sigcls, obj, text_sig) # pylint: disable=protected-access - - # No '__text_signature__' was found for the 'obj' class. - # Last option is to check if its '__init__' is - # object.__init__ or type.__init__. - if type not in obj.__mro__: - # We have a class (not metaclass), but no user-defined - # __init__ or __new__ for it - if (obj.__init__ is object.__init__ and # pylint: disable=no-else-return - obj.__new__ is object.__new__): - # Return a signature of 'object' builtin. - return sigcls.from_callable(object) - else: - raise ValueError( - 'no signature found for builtin type {!r}'.format(obj)) - - elif not isinstance(obj, inspect._NonUserDefinedCallables): # pylint: disable=protected-access - # An object with __call__ - # We also check that the 'obj' is not an instance of - # _WrapperDescriptor or _MethodWrapper to avoid - # infinite recursion (and even potential segfault) - call = inspect._signature_get_user_defined_method(type(obj), '__call__') # pylint: disable=protected-access - if call is not None: - try: - sig = _my_signature_from_callable( - call, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - except ValueError as ex: - msg = 'no signature found for {!r}'.format(obj) - raise ValueError(msg) from ex - - if sig is not None: - # For classes and objects we skip the first parameter of their - # __call__, __new__, or __init__ methods - if skip_bound_arg: - return inspect._signature_bound_method(sig) # pylint: disable=protected-access - return sig - - if isinstance(obj, types.BuiltinFunctionType): - # Raise a nicer error message for builtins - msg = 'no signature found for builtin function {!r}'.format(obj) - raise ValueError(msg) - - raise ValueError('callable {!r} is not supported by signature'.format(obj)) - - -class MySignature(inspect.Signature): - - @classmethod - def from_callable(cls, obj, *, follow_wrapped=True): - """Constructs Signature for the given callable object.""" - return _my_signature_from_callable(obj, sigcls=cls, - follow_wrapper_chains=follow_wrapped) - - -def signature(obj, *, follow_wrapped=True): - """Get a signature object for the passed callable.""" - return MySignature.from_callable(obj, follow_wrapped=follow_wrapped) diff --git a/docs/api_python/source_en/_static/logo_notebook.png b/docs/api_python/source_en/_static/logo_notebook.png deleted file mode 100644 index 18c2e29e4b73ee428f70253feffdd855fdf0c422..0000000000000000000000000000000000000000 Binary files a/docs/api_python/source_en/_static/logo_notebook.png and /dev/null differ diff --git a/docs/api_python/source_en/_static/logo_source.png b/docs/api_python/source_en/_static/logo_source.png deleted file mode 100644 index 880f2bc87172daf487654c0ba4f1657c672bd2b8..0000000000000000000000000000000000000000 Binary files a/docs/api_python/source_en/_static/logo_source.png and /dev/null differ diff --git a/docs/api_python/source_en/_templates/classtemplate.rst b/docs/api_python/source_en/_templates/classtemplate.rst deleted file mode 100644 index fd88815f7b49e1cd25195fc8eceba498eafe780c..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/_templates/classtemplate.rst +++ /dev/null @@ -1,24 +0,0 @@ -.. role:: hidden - :class: hidden-section - -.. currentmodule:: {{ module }} - -{% if objname in [] %} -{{ fullname | underline }} - -.. autofunction:: {{ fullname }} -{% elif objname[0].istitle() %} -{{ fullname | underline }} - -.. autoclass:: {{ name }} - :members: - -{% else %} -{{ fullname | underline }} - -.. autofunction:: {{ fullname }} -{% endif %} - -.. - autogenerated from _templates/classtemplate.rst - note it does not have :inherited-members: diff --git a/docs/api_python/source_en/_templates/classtemplate_inherited.rst b/docs/api_python/source_en/_templates/classtemplate_inherited.rst deleted file mode 100644 index 8f4a423dca6e678c191df73d142e4e52a862a3db..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/_templates/classtemplate_inherited.rst +++ /dev/null @@ -1,26 +0,0 @@ -.. role:: hidden - :class: hidden-section - -.. currentmodule:: {{ module }} - -{% if objname[0].istitle() %} -{{ fullname | underline }} - -.. autoclass:: {{ name }} - :inherited-members: - :members: - -{% elif fullname=="mindspore.numpy.ix_" %} - -mindspore.numpy.ix\_ -==================== - -.. autofunction:: mindspore.numpy.ix_ - -{% else %} -{{ fullname | underline }} - -.. autofunction:: {{ fullname }} -{% endif %} - -.. autogenerated from _templates/classtemplate_inherited.rst \ No newline at end of file diff --git a/docs/api_python/source_en/_templates/classtemplate_probability.rst b/docs/api_python/source_en/_templates/classtemplate_probability.rst deleted file mode 100644 index 6329880e1fc540de910b25d1724a2cfba8d501f2..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/_templates/classtemplate_probability.rst +++ /dev/null @@ -1,13 +0,0 @@ -.. role:: hidden - :class: hidden-section - -.. currentmodule:: {{ module }} - -{{ fullname | underline }} - -.. autoclass:: {{ name }} - :members: - -.. - autogenerated from _templates/classtemplate.rst - note it does not have :inherited-members: diff --git a/docs/api_python/source_en/conf.py b/docs/api_python/source_en/conf.py deleted file mode 100644 index 816132a1181e626a462a640dd3032bf4ed4cab70..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/conf.py +++ /dev/null @@ -1,404 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# - -import os -import re -import sys -sys.path.append(os.path.abspath('./_ext')) -import sphinx.ext.autosummary.generate as g -from sphinx.ext import autodoc as sphinx_autodoc -from sphinx.util import inspect as sphinx_inspect -from sphinx.domains import python as sphinx_domain_python -from textwrap import dedent -# sys.path.insert(0, os.path.abspath('.')) - -import mindspore -# If you don't want to generate MindArmour APIs, comment this line. -import mindarmour -# If you don't want to generate MindSpore_Hub APIs, comment this line. -import mindspore_hub -# If you don't want to generate MindSpore_Serving APIs, comment this line. -import mindspore_serving - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', - 'sphinx.ext.todo', - 'sphinx.ext.coverage', - 'sphinx.ext.napoleon', - 'sphinx.ext.viewcode', - 'sphinx_markdown_tables', - 'recommonmark', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -autodoc_inherit_docstrings = False - -autosummary_generate = True - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_static_path = ['_static'] - -# -- Options for Texinfo output ------------------------------------------- - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - 'python': ('https://docs.python.org/', '../python_objects.inv'), - 'numpy': ('https://docs.scipy.org/doc/numpy/', '../numpy_objects.inv'), -} - -from typing import List, Tuple -from docutils.nodes import Node - -from sphinx.locale import __ -from sphinx.ext.autosummary import Autosummary, posixpath, addnodes, logger, Matcher, autosummary_toc, get_import_prefixes_from_env -from sphinx.ext.autosummary import mock, StringList, ModuleType, get_documenter, ModuleAnalyzer, PycodeError, mangle_signature -from sphinx.ext.autosummary import import_by_name, extract_summary, autosummary_table, nodes, switch_source_input, rst -from sphinx.ext.autodoc.directive import DocumenterBridge, Options - -class MsAutosummary(Autosummary): - """ - Inherited from sphinx's autosummary, add titles and a column for the generated table. - """ - - def init(self): - """ - init method - """ - self.find_doc_name = "" - self.third_title = "" - self.default_doc = "" - - def extract_env_summary(self, doc: List[str]) -> str: - """Extract env summary from docstring.""" - env_sum = self.default_doc - for i, piece in enumerate(doc): - if piece.startswith(self.find_doc_name): - env_sum = doc[i+1][4:] - return env_sum - - def run(self): - """ - run method - """ - self.init() - self.bridge = DocumenterBridge(self.env, self.state.document.reporter, - Options(), self.lineno, self.state) - - names = [x.strip().split()[0] for x in self.content - if x.strip() and re.search(r'^[~a-zA-Z_]', x.strip()[0])] - items = self.get_items(names) - teble_nodes = self.get_table(items) - - if 'toctree' in self.options: - dirname = posixpath.dirname(self.env.docname) - - tree_prefix = self.options['toctree'].strip() - docnames = [] - excluded = Matcher(self.config.exclude_patterns) - for item in items: - docname = posixpath.join(tree_prefix, item[3]) - docname = posixpath.normpath(posixpath.join(dirname, docname)) - if docname not in self.env.found_docs: - location = self.state_machine.get_source_and_line(self.lineno) - if excluded(self.env.doc2path(docname, None)): - msg = __('autosummary references excluded document %r. Ignored.') - else: - msg = __('autosummary: stub file not found %r. ' - 'Check your autosummary_generate setting.') - logger.warning(msg, item[3], location=location) - continue - docnames.append(docname) - - if docnames: - tocnode = addnodes.toctree() - tocnode['includefiles'] = docnames - tocnode['entries'] = [(None, docn) for docn in docnames] - tocnode['maxdepth'] = -1 - tocnode['glob'] = None - teble_nodes.append(autosummary_toc('', '', tocnode)) - return teble_nodes - - def get_items(self, names: List[str]) -> List[Tuple[str, str, str, str, str]]: - """Try to import the given names, and return a list of - ``[(name, signature, summary_string, real_name, env_summary), ...]``. - """ - prefixes = get_import_prefixes_from_env(self.env) - items = [] # type: List[Tuple[str, str, str, str, str]] - max_item_chars = 50 - - for name in names: - display_name = name - if name.startswith('~'): - name = name[1:] - display_name = name.split('.')[-1] - try: - with mock(self.config.autosummary_mock_imports): - real_name, obj, parent, modname = import_by_name(name, prefixes=prefixes) - except ImportError: - logger.warning(__('failed to import %s'), name) - items.append((name, '', '', name, '')) - continue - - self.bridge.result = StringList() # initialize for each documenter - full_name = real_name - if not isinstance(obj, ModuleType): - # give explicitly separated module name, so that members - # of inner classes can be documented - full_name = modname + '::' + full_name[len(modname) + 1:] - # NB. using full_name here is important, since Documenters - # handle module prefixes slightly differently - doccls = get_documenter(self.env.app, obj, parent) - documenter = doccls(self.bridge, full_name) - - if not documenter.parse_name(): - logger.warning(__('failed to parse name %s'), real_name) - items.append((display_name, '', '', real_name, '')) - continue - if not documenter.import_object(): - logger.warning(__('failed to import object %s'), real_name) - items.append((display_name, '', '', real_name, '')) - continue - if documenter.options.members and not documenter.check_module(): - continue - - # try to also get a source code analyzer for attribute docs - try: - documenter.analyzer = ModuleAnalyzer.for_module( - documenter.get_real_modname()) - # parse right now, to get PycodeErrors on parsing (results will - # be cached anyway) - documenter.analyzer.find_attr_docs() - except PycodeError as err: - logger.debug('[autodoc] module analyzer failed: %s', err) - # no source file -- e.g. for builtin and C modules - documenter.analyzer = None - - # -- Grab the signature - - try: - sig = documenter.format_signature(show_annotation=False) - except TypeError: - # the documenter does not support ``show_annotation`` option - sig = documenter.format_signature() - - if not sig: - sig = '' - else: - max_chars = max(10, max_item_chars - len(display_name)) - sig = mangle_signature(sig, max_chars=max_chars) - - # -- Grab the summary - - documenter.add_content(None) - summary = extract_summary(self.bridge.result.data[:], self.state.document) - env_sum = self.extract_env_summary(self.bridge.result.data[:]) - items.append((display_name, sig, summary, real_name, env_sum)) - - return items - - def get_table(self, items: List[Tuple[str, str, str, str, str]]) -> List[Node]: - """Generate a proper list of table nodes for autosummary:: directive. - - *items* is a list produced by :meth:`get_items`. - """ - table_spec = addnodes.tabular_col_spec() - table_spec['spec'] = r'\X{1}{2}\X{1}{2}' - - table = autosummary_table('') - real_table = nodes.table('', classes=['longtable']) - table.append(real_table) - group = nodes.tgroup('', cols=3) - real_table.append(group) - group.append(nodes.colspec('', colwidth=10)) - group.append(nodes.colspec('', colwidth=70)) - group.append(nodes.colspec('', colwidth=30)) - body = nodes.tbody('') - group.append(body) - - def append_row(*column_texts: str) -> None: - row = nodes.row('', color="red") - source, line = self.state_machine.get_source_and_line() - for text in column_texts: - node = nodes.paragraph('') - vl = StringList() - vl.append(text, '%s:%d:' % (source, line)) - with switch_source_input(self.state, vl): - self.state.nested_parse(vl, 0, node) - try: - if isinstance(node[0], nodes.paragraph): - node = node[0] - except IndexError: - pass - row.append(nodes.entry('', node)) - body.append(row) - - # add table's title - append_row("**API Name**", "**Description**", self.third_title) - for name, sig, summary, real_name, env_sum in items: - qualifier = 'obj' - if 'nosignatures' not in self.options: - col1 = ':%s:`%s <%s>`\\ %s' % (qualifier, name, real_name, rst.escape(sig)) - else: - col1 = ':%s:`%s <%s>`' % (qualifier, name, real_name) - col2 = summary - col3 = env_sum - append_row(col1, col2, col3) - - return [table_spec, table] - - -class MsNoteAutoSummary(MsAutosummary): - """ - Inherited from MsAutosummary. Add a third column about `Note` to the table. - """ - - def init(self): - """ - init method - """ - self.find_doc_name = ".. note::" - self.third_title = "**Note**" - self.default_doc = "None" - - def extract_env_summary(self, doc: List[str]) -> str: - """Extract env summary from docstring.""" - env_sum = self.default_doc - for piece in doc: - if piece.startswith(self.find_doc_name): - env_sum = piece[10:] - return env_sum - - -class MsPlatformAutoSummary(MsAutosummary): - """ - Inherited from MsAutosummary. Add a third column about `Supported Platforms` to the table. - """ - def init(self): - """ - init method - """ - self.find_doc_name = "Supported Platforms:" - self.third_title = "**{}**".format(self.find_doc_name[:-1]) - self.default_doc = "To Be Developed" - - -def setup(app): - app.add_directive('msplatformautosummary', MsPlatformAutoSummary) - app.add_directive('msnoteautosummary', MsNoteAutoSummary) - -# Modify regex for sphinx.ext.autosummary.generate.find_autosummary_in_lines. -gfile_abs_path = os.path.abspath(g.__file__) -autosummary_re_line_old = r"autosummary_re = re.compile(r'^(\s*)\.\.\s+autosummary::\s*')" -autosummary_re_line_new = r"autosummary_re = re.compile(r'^(\s*)\.\.\s+(ms[a-z]*)?autosummary::\s*')" -with open(gfile_abs_path, "r+", encoding="utf8") as f: - data = f.read() - data = data.replace(autosummary_re_line_old, autosummary_re_line_new) - f.seek(0) - f.write(data) - -# Modify regex for sphinx.ext.autosummary.generate.find_autosummary_in_lines. -gfile_abs_path = os.path.abspath(g.__file__) -autosummary_re_line_old = r"autosummary_re = re.compile(r'^(\s*)\.\.\s+autosummary::\s*')" -autosummary_re_line_new = r"autosummary_re = re.compile(r'^(\s*)\.\.\s+(ms[a-z]*)?autosummary::\s*')" -with open(gfile_abs_path, "r+", encoding="utf8") as f: - data = f.read() - data = data.replace(autosummary_re_line_old, autosummary_re_line_new) - f.seek(0) - f.write(data) - -# Modify default signatures for autodoc. -autodoc_source_path = os.path.abspath(sphinx_autodoc.__file__) -inspect_source_path = os.path.abspath(sphinx_inspect.__file__) -autodoc_source_re = re.compile(r"(\s+)args = self\.format_args\(\*\*kwargs\)") -inspect_source_code_str = """signature = inspect.signature(subject)""" -inspect_target_code_str = """signature = my_signature.signature(subject)""" -autodoc_source_code_str = """args = self.format_args(**kwargs)""" -is_autodoc_code_str = """args = args.replace("'", "")""" -with open(autodoc_source_path, "r+", encoding="utf8") as f: - code_str = f.read() - if is_autodoc_code_str not in code_str: - code_str_lines = code_str.split("\n") - autodoc_target_code_str = None - for line in code_str_lines: - re_matched_str = autodoc_source_re.search(line) - if re_matched_str: - space_num = re_matched_str.group(1) - autodoc_target_code_str = dedent("""\ - {0} - {1}if type(args) != type(None): - {1} {2}""".format(autodoc_source_code_str, space_num, is_autodoc_code_str)) - break - if autodoc_target_code_str: - code_str = code_str.replace(autodoc_source_code_str, autodoc_target_code_str) - f.seek(0) - f.truncate() - f.write(code_str) -with open(inspect_source_path, "r+", encoding="utf8") as g: - code_str = g.read() - if inspect_target_code_str not in code_str: - code_str = code_str.replace(inspect_source_code_str, inspect_target_code_str) - if "import my_signature" not in code_str: - code_str = code_str.replace("import sys", "import sys\nimport my_signature") - g.seek(0) - g.truncate() - g.write(code_str) - -# remove extra space for default params for autodoc. -sphinx_domain_python_source_path = os.path.abspath(sphinx_domain_python.__file__) -python_code_source = """for argument in arglist.split(','):""" -python_code_target = """for argument in [" " + i if num > 1 else i for num,i in enumerate(arglist.split(", "))]:""" -with open(sphinx_domain_python_source_path, "r+", encoding="utf8") as f: - code_str = f.read() - if python_code_target not in code_str: - code_str = code_str.replace(python_code_source, python_code_target) - f.seek(0) - f.truncate() - f.write(code_str) diff --git a/docs/api_python/source_en/index.rst b/docs/api_python/source_en/index.rst deleted file mode 100644 index 1b2eac3b2697aca4ce51ea69cf56528c9c27f0bf..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/index.rst +++ /dev/null @@ -1,63 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 11:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore Python API -======================= - -.. toctree:: - :maxdepth: 1 - :caption: MindSpore Python API - - mindspore/mindspore - mindspore/mindspore.common.initializer - mindspore/mindspore.communication - mindspore/mindspore.compression - mindspore/mindspore.context - mindspore/mindspore.dataset - mindspore/mindspore.dataset.config - mindspore/mindspore.dataset.text - mindspore/mindspore.dataset.transforms - mindspore/mindspore.dataset.vision - mindspore/mindspore.explainer - mindspore/mindspore.mindrecord - mindspore/mindspore.nn - mindspore/mindspore.numpy - mindspore/mindspore.nn.probability - mindspore/mindspore.ops - mindspore/mindspore.profiler - mindspore/mindspore.train - -.. toctree:: - :maxdepth: 1 - :caption: MindArmour Python API - - mindarmour/mindarmour - mindarmour/mindarmour.adv_robustness.attacks - mindarmour/mindarmour.adv_robustness.defenses - mindarmour/mindarmour.adv_robustness.detectors - mindarmour/mindarmour.adv_robustness.evaluations - mindarmour/mindarmour.fuzz_testing - mindarmour/mindarmour.privacy.diff_privacy - mindarmour/mindarmour.privacy.evaluation - mindarmour/mindarmour.privacy.sup_privacy - mindarmour/mindarmour.utils - -.. toctree:: - :maxdepth: 1 - :caption: MindSpore Hub Python API - - mindspore_hub/mindspore_hub - -.. toctree:: - :maxdepth: 1 - :caption: MindSpore Serving Python API - - mindspore_serving/mindspore_serving - -.. toctree:: - :maxdepth: 1 - :caption: MindQuantum Python API - - mindquantum/mindquantum \ No newline at end of file diff --git a/docs/api_python/source_en/mindarmour/mindarmour.adv_robustness.attacks.rst b/docs/api_python/source_en/mindarmour/mindarmour.adv_robustness.attacks.rst deleted file mode 100644 index 5b38b93fe907365e3ca531ccf124a64daf2345de..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindarmour/mindarmour.adv_robustness.attacks.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.adv_robustness.attacks -================================= - -.. automodule:: mindarmour.adv_robustness.attacks - :members: diff --git a/docs/api_python/source_en/mindarmour/mindarmour.adv_robustness.defenses.rst b/docs/api_python/source_en/mindarmour/mindarmour.adv_robustness.defenses.rst deleted file mode 100644 index 5b01e203ae9d4b5345b70e490313afe174a26bdc..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindarmour/mindarmour.adv_robustness.defenses.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.adv_robustness.defenses -================================== - -.. automodule:: mindarmour.adv_robustness.defenses - :members: diff --git a/docs/api_python/source_en/mindarmour/mindarmour.adv_robustness.detectors.rst b/docs/api_python/source_en/mindarmour/mindarmour.adv_robustness.detectors.rst deleted file mode 100644 index d82a5b18194bf345455aade22499f17c41ef9849..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindarmour/mindarmour.adv_robustness.detectors.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.adv_robustness.detectors -=================================== - -.. automodule:: mindarmour.adv_robustness.detectors - :members: diff --git a/docs/api_python/source_en/mindarmour/mindarmour.adv_robustness.evaluations.rst b/docs/api_python/source_en/mindarmour/mindarmour.adv_robustness.evaluations.rst deleted file mode 100644 index 19aac889512a9671ea786d299b1a788c1daa68e1..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindarmour/mindarmour.adv_robustness.evaluations.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.adv_robustness.evaluations -===================================== - -.. automodule:: mindarmour.adv_robustness.evaluations - :members: diff --git a/docs/api_python/source_en/mindarmour/mindarmour.fuzz_testing.rst b/docs/api_python/source_en/mindarmour/mindarmour.fuzz_testing.rst deleted file mode 100644 index 558e8c1263926c0f701d048deb057e6f4531fadb..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindarmour/mindarmour.fuzz_testing.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.fuzz_testing -======================= - -.. automodule:: mindarmour.fuzz_testing - :members: diff --git a/docs/api_python/source_en/mindarmour/mindarmour.privacy.diff_privacy.rst b/docs/api_python/source_en/mindarmour/mindarmour.privacy.diff_privacy.rst deleted file mode 100644 index 9fd33f9e93ee95e814480d010903f73d60ece30d..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindarmour/mindarmour.privacy.diff_privacy.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.privacy.diff_privacy -=============================== - -.. automodule:: mindarmour.privacy.diff_privacy - :members: diff --git a/docs/api_python/source_en/mindarmour/mindarmour.privacy.evaluation.rst b/docs/api_python/source_en/mindarmour/mindarmour.privacy.evaluation.rst deleted file mode 100644 index eacc6a7cf687777fa6d9c6e1d87fa8352e0b6e6a..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindarmour/mindarmour.privacy.evaluation.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.privacy.evaluation -============================= - -.. automodule:: mindarmour.privacy.evaluation - :members: diff --git a/docs/api_python/source_en/mindarmour/mindarmour.privacy.sup_privacy.rst b/docs/api_python/source_en/mindarmour/mindarmour.privacy.sup_privacy.rst deleted file mode 100644 index 30396473a5c286e81b61982e85fe4cdfbb125d36..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindarmour/mindarmour.privacy.sup_privacy.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.privacy.sup_privacy -=============================== - -.. automodule:: mindarmour.privacy.sup_privacy - :members: diff --git a/docs/api_python/source_en/mindarmour/mindarmour.rst b/docs/api_python/source_en/mindarmour/mindarmour.rst deleted file mode 100644 index 3dfb013c7f822129783ea157627155045f8c8c51..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindarmour/mindarmour.rst +++ /dev/null @@ -1,6 +0,0 @@ -mindarmour -========== - -.. automodule:: mindarmour - :members: - :exclude-members: SuppressModel, SuppressMasker, SuppressCtrl \ No newline at end of file diff --git a/docs/api_python/source_en/mindarmour/mindarmour.utils.rst b/docs/api_python/source_en/mindarmour/mindarmour.utils.rst deleted file mode 100644 index 9b1bee754dd1c7314825d1a2210f3a805920c4ed..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindarmour/mindarmour.utils.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.utils -================ - -.. automodule:: mindarmour.utils - :members: \ No newline at end of file diff --git a/docs/api_python/source_en/mindquantum/mindquantum.rst b/docs/api_python/source_en/mindquantum/mindquantum.rst deleted file mode 100644 index 2464675921f5451b687aaa86b624ba3359230f55..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindquantum/mindquantum.rst +++ /dev/null @@ -1,82 +0,0 @@ -mindquantum -=========== - -.. automodule:: mindquantum - -mindquantum.circuit -------------------- - -.. automodule:: mindquantum.circuit - :members: - - -mindquantum.engine ------------------- - -.. automodule:: mindquantum.engine - :members: - -mindquantum.gate ----------------- - -.. automodule:: mindquantum.gate - :members: - -functional ----------- - -The functional gates are the pre-instantiated quantum gates, which can be used directly as an instance of quantum gate. - -.. list-table:: - :widths: 50 50 - :header-rows: 1 - - * - functional - - gates - * - mindquantum.gate.CNOT - - :class:`mindquantum.gate.CNOTGate` - * - mindquantum.gate.I - - :class:`mindquantum.gate.IGate` - * - mindquantum.gate.H - - :class:`mindquantum.gate.HGate` - * - mindquantum.gate.S - - :class:`mindquantum.gate.PhaseShift` (numpy.pi/2) - * - mindquantum.gate.SWAP - - :class:`mindquantum.gate.SWAPGate` - * - mindquantum.gate.X - - :class:`mindquantum.gate.XGate` - * - mindquantum.gate.Y - - :class:`mindquantum.gate.YGate` - * - mindquantum.gate.Z - - :class:`mindquantum.gate.ZGate` - -mindquantum.nn --------------- - -.. automodule:: mindquantum.nn - :exclude-members: PQC, MindQuantumLayer, Evolution - :members: - -Operators -^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindquantum.nn.Evolution - mindquantum.nn.MindQuantumLayer - mindquantum.nn.PQC - -mindquantum.parameterresolver ------------------------------ - -.. automodule:: mindquantum.parameterresolver - :members: - -mindquantum.utils ------------------ - -.. automodule:: mindquantum.utils - :members: diff --git a/docs/api_python/source_en/mindspore/mindspore.common.initializer.rst b/docs/api_python/source_en/mindspore/mindspore.common.initializer.rst deleted file mode 100644 index 5d6bbf0aa9dcc9374dc1bc83949a7d93517a8114..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.common.initializer.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindspore.common.initializer -============================ - -.. automodule:: mindspore.common.initializer - :members: \ No newline at end of file diff --git a/docs/api_python/source_en/mindspore/mindspore.communication.rst b/docs/api_python/source_en/mindspore/mindspore.communication.rst deleted file mode 100644 index 2b277b2e0673640555d8fc4cec88e071fdb9d8a5..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.communication.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindspore.communication -======================= - -.. automodule:: mindspore.communication - :members: diff --git a/docs/api_python/source_en/mindspore/mindspore.compression.rst b/docs/api_python/source_en/mindspore/mindspore.compression.rst deleted file mode 100644 index cff75c05247ba2911e782e38c0d7190bc3cbe23a..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.compression.rst +++ /dev/null @@ -1,14 +0,0 @@ -mindspore.compression -===================== - -mindspore.compression.quant ---------------------------- - -.. automodule:: mindspore.compression.quant - :members: - -mindspore.compression.common ----------------------------- - -.. automodule:: mindspore.compression.common - :members: \ No newline at end of file diff --git a/docs/api_python/source_en/mindspore/mindspore.context.rst b/docs/api_python/source_en/mindspore/mindspore.context.rst deleted file mode 100644 index a9aa8120fe4daf9f0c1e9df93aaff114a98ef3ba..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.context.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindspore.context -================= - -.. automodule:: mindspore.context - :members: diff --git a/docs/api_python/source_en/mindspore/mindspore.dataset.config.rst b/docs/api_python/source_en/mindspore/mindspore.dataset.config.rst deleted file mode 100644 index 55cf3631f462bd579c8b5764b0885a0a10b4b480..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.dataset.config.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindspore.dataset.config -======================== - -.. automodule:: mindspore.dataset.config - :members: diff --git a/docs/api_python/source_en/mindspore/mindspore.dataset.rst b/docs/api_python/source_en/mindspore/mindspore.dataset.rst deleted file mode 100644 index d5fb44e566727e8303276c880d03652ea0940eff..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.dataset.rst +++ /dev/null @@ -1,94 +0,0 @@ -mindspore.dataset -================= - -.. automodule:: mindspore.dataset - -Vision -------- - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.CelebADataset - mindspore.dataset.Cifar100Dataset - mindspore.dataset.Cifar10Dataset - mindspore.dataset.CocoDataset - mindspore.dataset.ImageFolderDataset - mindspore.dataset.MnistDataset - mindspore.dataset.VOCDataset - -Text ------ - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.CLUEDataset - -Graph ------- - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.GraphData - -Standard Format ----------------- - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.CSVDataset - mindspore.dataset.ManifestDataset - mindspore.dataset.MindDataset - mindspore.dataset.TextFileDataset - mindspore.dataset.TFRecordDataset - -User Defined --------------- - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.GeneratorDataset - mindspore.dataset.NumpySlicesDataset - mindspore.dataset.PaddedDataset - -Sampler --------- - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.DistributedSampler - mindspore.dataset.PKSampler - mindspore.dataset.RandomSampler - mindspore.dataset.SequentialSampler - mindspore.dataset.SubsetRandomSampler - mindspore.dataset.SubsetSampler - mindspore.dataset.WeightedRandomSampler - -Others -------- - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.DatasetCache - mindspore.dataset.Schema - mindspore.dataset.zip diff --git a/docs/api_python/source_en/mindspore/mindspore.dataset.text.rst b/docs/api_python/source_en/mindspore/mindspore.dataset.text.rst deleted file mode 100644 index ad2f38ee5fa962abab8d22f6e1f45c7e8e8d5058..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.dataset.text.rst +++ /dev/null @@ -1,45 +0,0 @@ -mindspore.dataset.text -====================== - -.. automodule:: mindspore.dataset.text - -mindspore.dataset.text.transforms ---------------------------------- - -.. msnoteautosummary:: - :toctree: dataset_text - :nosignatures: - :template: classtemplate.rst - - mindspore.dataset.text.transforms.BasicTokenizer - mindspore.dataset.text.transforms.BertTokenizer - mindspore.dataset.text.transforms.CaseFold - mindspore.dataset.text.transforms.JiebaTokenizer - mindspore.dataset.text.transforms.Lookup - mindspore.dataset.text.transforms.Ngram - mindspore.dataset.text.transforms.NormalizeUTF8 - mindspore.dataset.text.transforms.PythonTokenizer - mindspore.dataset.text.transforms.RegexReplace - mindspore.dataset.text.transforms.RegexTokenizer - mindspore.dataset.text.transforms.SentencePieceTokenizer - mindspore.dataset.text.transforms.SlidingWindow - mindspore.dataset.text.transforms.ToNumber - mindspore.dataset.text.transforms.TruncateSequencePair - mindspore.dataset.text.transforms.UnicodeCharTokenizer - mindspore.dataset.text.transforms.UnicodeScriptTokenizer - mindspore.dataset.text.transforms.WhitespaceTokenizer - mindspore.dataset.text.transforms.WordpieceTokenizer - - -mindspore.dataset.text.utils ----------------------------- - -.. msnoteautosummary:: - :toctree: dataset_text - :nosignatures: - :template: classtemplate.rst - - mindspore.dataset.text.utils.SentencePieceVocab - mindspore.dataset.text.utils.to_str - mindspore.dataset.text.utils.to_bytes - mindspore.dataset.text.utils.Vocab diff --git a/docs/api_python/source_en/mindspore/mindspore.dataset.transforms.rst b/docs/api_python/source_en/mindspore/mindspore.dataset.transforms.rst deleted file mode 100644 index 8de6f0a34218bd64730c8ea9e9138014fc178b04..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.dataset.transforms.rst +++ /dev/null @@ -1,39 +0,0 @@ -mindspore.dataset.transforms -============================ - -.. automodule:: mindspore.dataset.transforms - -mindspore.dataset.transforms.c_transforms ------------------------------------------ - -.. autosummary:: - :toctree: dataset_transforms - :nosignatures: - :template: classtemplate.rst - - mindspore.dataset.transforms.c_transforms.Compose - mindspore.dataset.transforms.c_transforms.Concatenate - mindspore.dataset.transforms.c_transforms.Duplicate - mindspore.dataset.transforms.c_transforms.Fill - mindspore.dataset.transforms.c_transforms.Mask - mindspore.dataset.transforms.c_transforms.OneHot - mindspore.dataset.transforms.c_transforms.PadEnd - mindspore.dataset.transforms.c_transforms.RandomApply - mindspore.dataset.transforms.c_transforms.RandomChoice - mindspore.dataset.transforms.c_transforms.Slice - mindspore.dataset.transforms.c_transforms.TypeCast - mindspore.dataset.transforms.c_transforms.Unique - -mindspore.dataset.transforms.py_transforms ------------------------------------------- - -.. autosummary:: - :toctree: dataset_transforms - :nosignatures: - :template: classtemplate.rst - - mindspore.dataset.transforms.py_transforms.Compose - mindspore.dataset.transforms.py_transforms.OneHotOp - mindspore.dataset.transforms.py_transforms.RandomApply - mindspore.dataset.transforms.py_transforms.RandomChoice - mindspore.dataset.transforms.py_transforms.RandomOrder diff --git a/docs/api_python/source_en/mindspore/mindspore.dataset.vision.rst b/docs/api_python/source_en/mindspore/mindspore.dataset.vision.rst deleted file mode 100644 index bf5d17a42cc0cff82762d2a6eef2c8446af2dcc6..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.dataset.vision.rst +++ /dev/null @@ -1,92 +0,0 @@ -mindspore.dataset.vision -=================================== - -.. automodule:: mindspore.dataset.vision - -mindspore.dataset.vision.c_transforms ------------------------------------------------- - -.. autosummary:: - :toctree: dataset_vision - :nosignatures: - :template: classtemplate.rst - - mindspore.dataset.vision.c_transforms.AutoContrast - mindspore.dataset.vision.c_transforms.BoundingBoxAugment - mindspore.dataset.vision.c_transforms.CenterCrop - mindspore.dataset.vision.c_transforms.CutMixBatch - mindspore.dataset.vision.c_transforms.CutOut - mindspore.dataset.vision.c_transforms.Decode - mindspore.dataset.vision.c_transforms.Equalize - mindspore.dataset.vision.c_transforms.HWC2CHW - mindspore.dataset.vision.c_transforms.Invert - mindspore.dataset.vision.c_transforms.MixUpBatch - mindspore.dataset.vision.c_transforms.Normalize - mindspore.dataset.vision.c_transforms.Pad - mindspore.dataset.vision.c_transforms.RandomAffine - mindspore.dataset.vision.c_transforms.RandomColor - mindspore.dataset.vision.c_transforms.RandomColorAdjust - mindspore.dataset.vision.c_transforms.RandomCrop - mindspore.dataset.vision.c_transforms.RandomCropDecodeResize - mindspore.dataset.vision.c_transforms.RandomCropWithBBox - mindspore.dataset.vision.c_transforms.RandomHorizontalFlip - mindspore.dataset.vision.c_transforms.RandomHorizontalFlipWithBBox - mindspore.dataset.vision.c_transforms.RandomPosterize - mindspore.dataset.vision.c_transforms.RandomResize - mindspore.dataset.vision.c_transforms.RandomResizedCrop - mindspore.dataset.vision.c_transforms.RandomResizedCropWithBBox - mindspore.dataset.vision.c_transforms.RandomResizeWithBBox - mindspore.dataset.vision.c_transforms.RandomRotation - mindspore.dataset.vision.c_transforms.RandomSelectSubpolicy - mindspore.dataset.vision.c_transforms.RandomSharpness - mindspore.dataset.vision.c_transforms.RandomSolarize - mindspore.dataset.vision.c_transforms.RandomVerticalFlip - mindspore.dataset.vision.c_transforms.RandomVerticalFlipWithBBox - mindspore.dataset.vision.c_transforms.Rescale - mindspore.dataset.vision.c_transforms.Resize - mindspore.dataset.vision.c_transforms.ResizeWithBBox - mindspore.dataset.vision.c_transforms.SoftDvppDecodeRandomCropResizeJpeg - mindspore.dataset.vision.c_transforms.SoftDvppDecodeResizeJpeg - mindspore.dataset.vision.c_transforms.UniformAugment - -mindspore.dataset.vision.py_transforms -------------------------------------------------- - -.. autosummary:: - :toctree: dataset_vision - :nosignatures: - :template: classtemplate.rst - - mindspore.dataset.vision.py_transforms.AutoContrast - mindspore.dataset.vision.py_transforms.CenterCrop - mindspore.dataset.vision.py_transforms.Cutout - mindspore.dataset.vision.py_transforms.Decode - mindspore.dataset.vision.py_transforms.Equalize - mindspore.dataset.vision.py_transforms.FiveCrop - mindspore.dataset.vision.py_transforms.Grayscale - mindspore.dataset.vision.py_transforms.HsvToRgb - mindspore.dataset.vision.py_transforms.HWC2CHW - mindspore.dataset.vision.py_transforms.Invert - mindspore.dataset.vision.py_transforms.LinearTransformation - mindspore.dataset.vision.py_transforms.MixUp - mindspore.dataset.vision.py_transforms.Normalize - mindspore.dataset.vision.py_transforms.Pad - mindspore.dataset.vision.py_transforms.RandomAffine - mindspore.dataset.vision.py_transforms.RandomColor - mindspore.dataset.vision.py_transforms.RandomColorAdjust - mindspore.dataset.vision.py_transforms.RandomCrop - mindspore.dataset.vision.py_transforms.RandomErasing - mindspore.dataset.vision.py_transforms.RandomGrayscale - mindspore.dataset.vision.py_transforms.RandomHorizontalFlip - mindspore.dataset.vision.py_transforms.RandomPerspective - mindspore.dataset.vision.py_transforms.RandomResizedCrop - mindspore.dataset.vision.py_transforms.RandomRotation - mindspore.dataset.vision.py_transforms.RandomSharpness - mindspore.dataset.vision.py_transforms.RandomVerticalFlip - mindspore.dataset.vision.py_transforms.Resize - mindspore.dataset.vision.py_transforms.RgbToHsv - mindspore.dataset.vision.py_transforms.TenCrop - mindspore.dataset.vision.py_transforms.ToPIL - mindspore.dataset.vision.py_transforms.ToTensor - mindspore.dataset.vision.py_transforms.ToType - mindspore.dataset.vision.py_transforms.UniformAugment diff --git a/docs/api_python/source_en/mindspore/mindspore.explainer.rst b/docs/api_python/source_en/mindspore/mindspore.explainer.rst deleted file mode 100644 index 7df9a5bb9e73b259d8c4043482bcf1cf0545fdbd..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.explainer.rst +++ /dev/null @@ -1,21 +0,0 @@ -mindspore.explainer -=================== - -mindspore.explainer -------------------- - -.. automodule:: mindspore.explainer - :members: - - -mindspore.explainer.explanation -------------------------------- - -.. automodule:: mindspore.explainer.explanation - :members: - -mindspore.explainer.benchmark ------------------------------ - -.. automodule:: mindspore.explainer.benchmark - :members: \ No newline at end of file diff --git a/docs/api_python/source_en/mindspore/mindspore.mindrecord.rst b/docs/api_python/source_en/mindspore/mindspore.mindrecord.rst deleted file mode 100644 index de4d020c711e16ba2ab8cb8b3ed34849c2fbebf3..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.mindrecord.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindspore.mindrecord -==================== - -.. automodule:: mindspore.mindrecord - :members: \ No newline at end of file diff --git a/docs/api_python/source_en/mindspore/mindspore.nn.probability.rst b/docs/api_python/source_en/mindspore/mindspore.nn.probability.rst deleted file mode 100644 index 00ad24f9cca247196661dd6c77b05a0537a1f3de..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.nn.probability.rst +++ /dev/null @@ -1,120 +0,0 @@ -mindspore.nn.probability -======================== - -.. automodule:: mindspore.nn.probability - -Bijectors ---------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.bijector.Bijector - mindspore.nn.probability.bijector.Exp - mindspore.nn.probability.bijector.GumbelCDF - mindspore.nn.probability.bijector.Invert - mindspore.nn.probability.bijector.PowerTransform - mindspore.nn.probability.bijector.ScalarAffine - mindspore.nn.probability.bijector.Softplus - -Bayesian Layers ---------------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.bnn_layers.ConvReparam - mindspore.nn.probability.bnn_layers.DenseLocalReparam - mindspore.nn.probability.bnn_layers.DenseReparam - -Prior and Posterior Distributions ----------------------------------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.bnn_layers.NormalPosterior - mindspore.nn.probability.bnn_layers.NormalPrior - -Bayesian Wrapper Functions ---------------------------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.bnn_layers.WithBNNLossCell - -Distributions --------------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.distribution.Bernoulli - mindspore.nn.probability.distribution.Beta - mindspore.nn.probability.distribution.Categorical - mindspore.nn.probability.distribution.Cauchy - mindspore.nn.probability.distribution.Distribution - mindspore.nn.probability.distribution.Exponential - mindspore.nn.probability.distribution.Gamma - mindspore.nn.probability.distribution.Geometric - mindspore.nn.probability.distribution.Gumbel - mindspore.nn.probability.distribution.Logistic - mindspore.nn.probability.distribution.LogNormal - mindspore.nn.probability.distribution.Normal - mindspore.nn.probability.distribution.Poisson - mindspore.nn.probability.distribution.TransformedDistribution - mindspore.nn.probability.distribution.Uniform - -Deep Probability Networks --------------------------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.dpn.ConditionalVAE - mindspore.nn.probability.dpn.VAE - -Infer ------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.infer.ELBO - mindspore.nn.probability.infer.SVI - -ToolBox ---------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.toolbox.UncertaintyEvaluation - mindspore.nn.probability.toolbox.VAEAnomalyDetection - -Model Transformer ------------------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.transforms.TransformToBNN diff --git a/docs/api_python/source_en/mindspore/mindspore.nn.rst b/docs/api_python/source_en/mindspore/mindspore.nn.rst deleted file mode 100644 index 96f7892c02247fc7967f0fee2223a093f9f4e209..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.nn.rst +++ /dev/null @@ -1,366 +0,0 @@ -mindspore.nn -============ - -.. automodule:: mindspore.nn - -Compared with the previous version, the added, deleted and supported platforms change information of `mindspore.nn` operators in MindSpore, please refer to the link ``_. - -Cell ----- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.Cell - mindspore.nn.GraphKernel - -Containers ----------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.CellList - mindspore.nn.SequentialCell - -Convolution Layers ------------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.Conv1d - mindspore.nn.Conv1dTranspose - mindspore.nn.Conv2d - mindspore.nn.Conv2dTranspose - mindspore.nn.Conv3d - mindspore.nn.Conv3dTranspose - -Recurrent Layers ----------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.LSTMCell - mindspore.nn.LSTM - -Sparse Layers -------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.Embedding - mindspore.nn.EmbeddingLookup - mindspore.nn.MultiFieldEmbeddingLookup - -Non-linear Activations ----------------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.ELU - mindspore.nn.FastGelu - mindspore.nn.GELU - mindspore.nn.get_activation - mindspore.nn.HSigmoid - mindspore.nn.HSwish - mindspore.nn.LeakyReLU - mindspore.nn.LogSigmoid - mindspore.nn.LogSoftmax - mindspore.nn.PReLU - mindspore.nn.ReLU - mindspore.nn.ReLU6 - mindspore.nn.Sigmoid - mindspore.nn.Softmax - mindspore.nn.Tanh - -Utilities ---------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.ClipByNorm - mindspore.nn.Dense - mindspore.nn.Dropout - mindspore.nn.Flatten - mindspore.nn.L1Regularizer - mindspore.nn.Norm - mindspore.nn.OneHot - mindspore.nn.Pad - mindspore.nn.Range - mindspore.nn.ResizeBilinear - mindspore.nn.SparseToDense - mindspore.nn.Tril - mindspore.nn.Triu - mindspore.nn.Unfold - -Images Functions ----------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.CentralCrop - mindspore.nn.ImageGradients - mindspore.nn.MSSSIM - mindspore.nn.PSNR - mindspore.nn.SSIM - -Normalization Layers --------------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.BatchNorm1d - mindspore.nn.BatchNorm2d - mindspore.nn.BatchNorm3d - mindspore.nn.GlobalBatchNorm - mindspore.nn.GroupNorm - mindspore.nn.InstanceNorm2d - mindspore.nn.LayerNorm - mindspore.nn.MatrixDiag - mindspore.nn.MatrixDiagPart - mindspore.nn.MatrixSetDiag - mindspore.nn.SyncBatchNorm - -Pooling layers --------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.AvgPool1d - mindspore.nn.AvgPool2d - mindspore.nn.MaxPool1d - mindspore.nn.MaxPool2d - -Quantized Functions -------------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.ActQuant - mindspore.nn.Conv2dBnAct - mindspore.nn.Conv2dBnFoldQuant - mindspore.nn.Conv2dBnFoldQuantOneConv - mindspore.nn.Conv2dBnWithoutFoldQuant - mindspore.nn.Conv2dQuant - mindspore.nn.DenseBnAct - mindspore.nn.DenseQuant - mindspore.nn.FakeQuantWithMinMaxObserver - mindspore.nn.MulQuant - mindspore.nn.TensorAddQuant - -Loss Functions --------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.BCELoss - mindspore.nn.BCEWithLogitsLoss - mindspore.nn.CosineEmbeddingLoss - mindspore.nn.DiceLoss - mindspore.nn.FocalLoss - mindspore.nn.L1Loss - mindspore.nn.MAELoss - mindspore.nn.MSELoss - mindspore.nn.MultiClassDiceLoss - mindspore.nn.RMSELoss - mindspore.nn.SampledSoftmaxLoss - mindspore.nn.SmoothL1Loss - mindspore.nn.SoftmaxCrossEntropyWithLogits - -Optimizer Functions -------------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.Adagrad - mindspore.nn.Adam - mindspore.nn.AdamOffload - mindspore.nn.AdamWeightDecay - mindspore.nn.FTRL - mindspore.nn.Lamb - mindspore.nn.LARS - mindspore.nn.LazyAdam - mindspore.nn.Momentum - mindspore.nn.Optimizer - mindspore.nn.ProximalAdagrad - mindspore.nn.RMSProp - mindspore.nn.SGD - -Wrapper Functions ------------------ - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.DistributedGradReducer - mindspore.nn.DynamicLossScaleUpdateCell - mindspore.nn.FixedLossScaleUpdateCell - mindspore.nn.ForwardValueAndGrad - mindspore.nn.GetNextSingleOp - mindspore.nn.ParameterUpdate - mindspore.nn.TimeDistributed - mindspore.nn.TrainOneStepCell - mindspore.nn.TrainOneStepWithLossScaleCell - mindspore.nn.WithEvalCell - mindspore.nn.WithGradCell - mindspore.nn.WithLossCell - -Math Functions --------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.DiGamma - mindspore.nn.IGamma - mindspore.nn.LBeta - mindspore.nn.LGamma - mindspore.nn.MatDet - mindspore.nn.MatInverse - mindspore.nn.MatMul - mindspore.nn.Moments - mindspore.nn.ReduceLogSumExp - -Metrics --------- - -.. autosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.Accuracy - mindspore.nn.auc - mindspore.nn.BleuScore - mindspore.nn.ConfusionMatrix - mindspore.nn.ConfusionMatrixMetric - mindspore.nn.CosineSimilarity - mindspore.nn.Dice - mindspore.nn.F1 - mindspore.nn.Fbeta - mindspore.nn.HausdorffDistance - mindspore.nn.get_metric_fn - mindspore.nn.Loss - mindspore.nn.MAE - mindspore.nn.MeanSurfaceDistance - mindspore.nn.Metric - mindspore.nn.MSE - mindspore.nn.names - mindspore.nn.OcclusionSensitivity - mindspore.nn.Perplexity - mindspore.nn.Precision - mindspore.nn.Recall - mindspore.nn.ROC - mindspore.nn.RootMeanSquareDistance - mindspore.nn.Top1CategoricalAccuracy - mindspore.nn.Top5CategoricalAccuracy - mindspore.nn.TopKCategoricalAccuracy - -Dynamic Learning Rate ------------------------ - -LearningRateSchedule -^^^^^^^^^^^^^^^^^^^^^^ - -The dynamic learning rates in this module are all subclasses of LearningRateSchedule. Pass the instance of -LearningRateSchedule to an optimizer. During the training process, the optimizer calls the instance taking current step -as input to get the current learning rate. - -.. code-block:: python - - import mindspore.nn as nn - - min_lr = 0.01 - max_lr = 0.1 - decay_steps = 4 - cosine_decay_lr = nn.CosineDecayLR(min_lr, max_lr, decay_steps) - - net = Net() - optim = nn.Momentum(net.trainable_params(), learning_rate=cosine_decay_lr, momentum=0.9) - -.. autosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.CosineDecayLR - mindspore.nn.ExponentialDecayLR - mindspore.nn.InverseDecayLR - mindspore.nn.NaturalExpDecayLR - mindspore.nn.PolynomialDecayLR - mindspore.nn.WarmUpLR - -Dynamic LR -^^^^^^^^^^^^^^^^^^^^^^ - -The dynamic learning rates in this module are all functions. Call the function and pass the result to an optimizer. -During the training process, the optimizer takes result[current step] as current learning rate. - -.. code-block:: python - - import mindspore.nn as nn - - min_lr = 0.01 - max_lr = 0.1 - total_step = 6 - step_per_epoch = 1 - decay_epoch = 4 - - lr= nn.cosine_decay_lr(min_lr, max_lr, total_step, step_per_epoch, decay_epoch) - - net = Net() - optim = nn.Momentum(net.trainable_params(), learning_rate=lr, momentum=0.9) - -.. autosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.cosine_decay_lr - mindspore.nn.exponential_decay_lr - mindspore.nn.inverse_decay_lr - mindspore.nn.natural_exp_decay_lr - mindspore.nn.piecewise_constant_lr - mindspore.nn.polynomial_decay_lr - mindspore.nn.warmup_lr diff --git a/docs/api_python/source_en/mindspore/mindspore.numpy.rst b/docs/api_python/source_en/mindspore/mindspore.numpy.rst deleted file mode 100644 index 0fbd70ae0142307141d613dd8c0943d424471c56..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.numpy.rst +++ /dev/null @@ -1,271 +0,0 @@ -mindspore.numpy -=============== - -.. automodule:: mindspore.numpy - -.. autosummary:: - :toctree: numpy - :nosignatures: - :template: classtemplate_inherited.rst - -Array Generation ----------------- - -.. msplatformautosummary:: - :toctree: numpy - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.numpy.arange - mindspore.numpy.array - mindspore.numpy.asarray - mindspore.numpy.asfarray - mindspore.numpy.bartlett - mindspore.numpy.blackman - mindspore.numpy.copy - mindspore.numpy.diag - mindspore.numpy.diag_indices - mindspore.numpy.diagflat - mindspore.numpy.diagonal - mindspore.numpy.empty - mindspore.numpy.empty_like - mindspore.numpy.eye - mindspore.numpy.full - mindspore.numpy.full_like - mindspore.numpy.geomspace - mindspore.numpy.hamming - mindspore.numpy.hanning - mindspore.numpy.histogram_bin_edges - mindspore.numpy.identity - mindspore.numpy.indices - mindspore.numpy.ix_ - mindspore.numpy.linspace - mindspore.numpy.logspace - mindspore.numpy.meshgrid - mindspore.numpy.mgrid - mindspore.numpy.ogrid - mindspore.numpy.ones - mindspore.numpy.ones_like - mindspore.numpy.pad - mindspore.numpy.trace - mindspore.numpy.tri - mindspore.numpy.tril - mindspore.numpy.tril_indices - mindspore.numpy.tril_indices_from - mindspore.numpy.triu - mindspore.numpy.triu_indices - mindspore.numpy.triu_indices_from - mindspore.numpy.vander - mindspore.numpy.zeros - mindspore.numpy.zeros_like - -Array Operation ---------------- - -.. msplatformautosummary:: - :toctree: numpy - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.numpy.append - mindspore.numpy.apply_along_axis - mindspore.numpy.apply_over_axes - mindspore.numpy.array_split - mindspore.numpy.array_str - mindspore.numpy.atleast_1d - mindspore.numpy.atleast_2d - mindspore.numpy.atleast_3d - mindspore.numpy.broadcast_arrays - mindspore.numpy.broadcast_to - mindspore.numpy.choose - mindspore.numpy.column_stack - mindspore.numpy.concatenate - mindspore.numpy.dsplit - mindspore.numpy.dstack - mindspore.numpy.expand_dims - mindspore.numpy.flip - mindspore.numpy.fliplr - mindspore.numpy.flipud - mindspore.numpy.hsplit - mindspore.numpy.hstack - mindspore.numpy.moveaxis - mindspore.numpy.piecewise - mindspore.numpy.ravel - mindspore.numpy.repeat - mindspore.numpy.reshape - mindspore.numpy.roll - mindspore.numpy.rollaxis - mindspore.numpy.rot90 - mindspore.numpy.select - mindspore.numpy.size - mindspore.numpy.split - mindspore.numpy.squeeze - mindspore.numpy.stack - mindspore.numpy.swapaxes - mindspore.numpy.take - mindspore.numpy.take_along_axis - mindspore.numpy.tile - mindspore.numpy.transpose - mindspore.numpy.unique - mindspore.numpy.unravel_index - mindspore.numpy.vsplit - mindspore.numpy.vstack - mindspore.numpy.where - -Logic ------ - -.. msplatformautosummary:: - :toctree: numpy - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.numpy.array_equal - mindspore.numpy.array_equiv - mindspore.numpy.equal - mindspore.numpy.greater - mindspore.numpy.greater_equal - mindspore.numpy.in1d - mindspore.numpy.isclose - mindspore.numpy.isfinite - mindspore.numpy.isin - mindspore.numpy.isinf - mindspore.numpy.isnan - mindspore.numpy.isneginf - mindspore.numpy.isposinf - mindspore.numpy.isscalar - mindspore.numpy.less - mindspore.numpy.less_equal - mindspore.numpy.logical_and - mindspore.numpy.logical_not - mindspore.numpy.logical_or - mindspore.numpy.logical_xor - mindspore.numpy.not_equal - mindspore.numpy.signbit - mindspore.numpy.sometrue - -Math ----- - -.. msplatformautosummary:: - :toctree: numpy - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.numpy.absolute - mindspore.numpy.add - mindspore.numpy.amax - mindspore.numpy.amin - mindspore.numpy.arccos - mindspore.numpy.arccosh - mindspore.numpy.arcsin - mindspore.numpy.arcsinh - mindspore.numpy.arctan - mindspore.numpy.arctan2 - mindspore.numpy.arctanh - mindspore.numpy.argmax - mindspore.numpy.argmin - mindspore.numpy.around - mindspore.numpy.average - mindspore.numpy.bincount - mindspore.numpy.bitwise_and - mindspore.numpy.bitwise_or - mindspore.numpy.bitwise_xor - mindspore.numpy.cbrt - mindspore.numpy.ceil - mindspore.numpy.clip - mindspore.numpy.convolve - mindspore.numpy.copysign - mindspore.numpy.corrcoef - mindspore.numpy.correlate - mindspore.numpy.cos - mindspore.numpy.cosh - mindspore.numpy.count_nonzero - mindspore.numpy.cov - mindspore.numpy.cross - mindspore.numpy.cumprod - mindspore.numpy.cumsum - mindspore.numpy.deg2rad - mindspore.numpy.diff - mindspore.numpy.digitize - mindspore.numpy.divide - mindspore.numpy.divmod - mindspore.numpy.dot - mindspore.numpy.ediff1d - mindspore.numpy.exp - mindspore.numpy.exp2 - mindspore.numpy.expm1 - mindspore.numpy.fix - mindspore.numpy.float_power - mindspore.numpy.floor - mindspore.numpy.floor_divide - mindspore.numpy.fmod - mindspore.numpy.gcd - mindspore.numpy.gradient - mindspore.numpy.heaviside - mindspore.numpy.histogram - mindspore.numpy.histogram2d - mindspore.numpy.histogramdd - mindspore.numpy.hypot - mindspore.numpy.inner - mindspore.numpy.interp - mindspore.numpy.invert - mindspore.numpy.kron - mindspore.numpy.lcm - mindspore.numpy.log - mindspore.numpy.log10 - mindspore.numpy.log1p - mindspore.numpy.log2 - mindspore.numpy.logaddexp - mindspore.numpy.logaddexp2 - mindspore.numpy.matmul - mindspore.numpy.matrix_power - mindspore.numpy.maximum - mindspore.numpy.mean - mindspore.numpy.minimum - mindspore.numpy.multi_dot - mindspore.numpy.multiply - mindspore.numpy.nancumsum - mindspore.numpy.nanmax - mindspore.numpy.nanmean - mindspore.numpy.nanmin - mindspore.numpy.nanstd - mindspore.numpy.nansum - mindspore.numpy.nanvar - mindspore.numpy.negative - mindspore.numpy.norm - mindspore.numpy.outer - mindspore.numpy.polyadd - mindspore.numpy.polyder - mindspore.numpy.polyint - mindspore.numpy.polymul - mindspore.numpy.polysub - mindspore.numpy.polyval - mindspore.numpy.positive - mindspore.numpy.power - mindspore.numpy.promote_types - mindspore.numpy.ptp - mindspore.numpy.rad2deg - mindspore.numpy.radians - mindspore.numpy.ravel_multi_index - mindspore.numpy.reciprocal - mindspore.numpy.remainder - mindspore.numpy.result_type - mindspore.numpy.rint - mindspore.numpy.searchsorted - mindspore.numpy.sign - mindspore.numpy.sin - mindspore.numpy.sinh - mindspore.numpy.sqrt - mindspore.numpy.square - mindspore.numpy.std - mindspore.numpy.subtract - mindspore.numpy.sum - mindspore.numpy.tan - mindspore.numpy.tanh - mindspore.numpy.tensordot - mindspore.numpy.trapz - mindspore.numpy.true_divide - mindspore.numpy.trunc - mindspore.numpy.unwrap - mindspore.numpy.var \ No newline at end of file diff --git a/docs/api_python/source_en/mindspore/mindspore.ops.rst b/docs/api_python/source_en/mindspore/mindspore.ops.rst deleted file mode 100644 index ed3374e90d74fc7105f560b6ad59d406e6d81b84..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.ops.rst +++ /dev/null @@ -1,275 +0,0 @@ -mindspore.ops -============= - -.. automodule:: mindspore.ops - -Compared with the previous version, the added, deleted and supported platforms change information of `mindspore.ops` operators in MindSpore, please refer to the link ``_. - -.. include:: operations.rst - -composite ---------- - -The composite operators are the pre-defined combination of operators. - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.batch_dot - mindspore.ops.clip_by_global_norm - mindspore.ops.clip_by_value - mindspore.ops.core - mindspore.ops.count_nonzero - mindspore.ops.dot - mindspore.ops.gamma - mindspore.ops.GradOperation - mindspore.ops.HyperMap - mindspore.ops.laplace - mindspore.ops.matmul - mindspore.ops.multinomial - mindspore.ops.MultitypeFuncGraph - mindspore.ops.normal - mindspore.ops.poisson - mindspore.ops.repeat_elements - mindspore.ops.sequence_mask - mindspore.ops.tensor_dot - mindspore.ops.uniform - -functional ----------- - -The functional operators are the pre-instantiated Primitive operators, which can be used directly as a function. - -.. list-table:: - :widths: 50 50 - :header-rows: 1 - - * - functional - - operations - * - mindspore.ops.add - - :class:`mindspore.ops.Add` - * - mindspore.ops.addn - - :class:`mindspore.ops.AddN` - * - mindspore.ops.array_reduce - - :class:`mindspore.ops.Primitive` ('array_reduce') - * - mindspore.ops.array_to_scalar - - :class:`mindspore.ops.Primitive` ('array_to_scalar') - * - mindspore.ops.assign - - :class:`mindspore.ops.Assign` - * - mindspore.ops.assign_add - - :class:`mindspore.ops.AssignAdd` - * - mindspore.ops.assign_sub - - :class:`mindspore.ops.AssignSub` - * - mindspore.ops.bool_and - - :class:`mindspore.ops.Primitive` ('bool_and') - * - mindspore.ops.bool_eq - - :class:`mindspore.ops.Primitive` ('bool_eq') - * - mindspore.ops.bool_not - - :class:`mindspore.ops.Primitive` ('bool_not') - * - mindspore.ops.bool_or - - :class:`mindspore.ops.Primitive` ('bool_or') - * - mindspore.ops.cast - - :class:`mindspore.ops.Cast` - * - mindspore.ops.distribute - - :class:`mindspore.ops.Primitive` ('distribute') - * - mindspore.ops.dtype - - :class:`mindspore.ops.DType` - * - mindspore.ops.equal - - :class:`mindspore.ops.Equal` - * - mindspore.ops.expand_dims - - :class:`mindspore.ops.ExpandDims` - * - mindspore.ops.fill - - :class:`mindspore.ops.Fill` - * - mindspore.ops.gather - - :class:`mindspore.ops.Gather` - * - mindspore.ops.gather_nd - - :class:`mindspore.ops.GatherNd` - * - mindspore.ops.hastype - - :class:`mindspore.ops.Primitive` ('hastype') - * - mindspore.ops.in_dict - - :class:`mindspore.ops.Primitive` ('in_dict') - * - mindspore.ops.is_not - - :class:`mindspore.ops.Primitive` ('is_not') - * - mindspore.ops.is\_ - - :class:`mindspore.ops.Primitive` ('is\_') - * - mindspore.ops.isconstant - - :class:`mindspore.ops.Primitive` ('is_constant') - * - mindspore.ops.isinstance\_ - - :class:`mindspore.ops.IsInstance` - * - mindspore.ops.issubclass\_ - - :class:`mindspore.ops.IsSubClass` - * - mindspore.ops.logical_and - - :class:`mindspore.ops.LogicalAnd` - * - mindspore.ops.logical_not - - :class:`mindspore.ops.LogicalNot` - * - mindspore.ops.logical_or - - :class:`mindspore.ops.LogicalOr` - * - mindspore.ops.make_row_tensor - - :class:`mindspore.ops.Primitive` ('MakeRowTensor') - * - mindspore.ops.make_sparse_tensor - - :class:`mindspore.ops.Primitive` ('MakeSparseTensor') - * - mindspore.ops.mixed_precision_cast - - :class:`mindspore.ops.Primitive` ('mixed_precision_cast') - * - mindspore.ops.neg_tensor - - :class:`mindspore.ops.Neg` - * - mindspore.ops.not_equal - - :class:`mindspore.ops.NotEqual` - * - mindspore.ops.not_in_dict - - :class:`mindspore.ops.Primitive` ('not_in_dict') - * - mindspore.ops.ones_like - - :class:`mindspore.ops.OnesLike` - * - mindspore.ops.print\_ - - :class:`mindspore.ops.Print` - * - mindspore.ops.rank - - :class:`mindspore.ops.Rank` - * - mindspore.ops.reduced_shape - - :class:`mindspore.ops.Primitive` ('reduced_shape') - * - mindspore.ops.reshape - - :class:`mindspore.ops.Reshape` - * - mindspore.ops.row_tensor_get_dense_shape - - :class:`mindspore.ops.Primitive` ('RowTensorGetDenseShape') - * - mindspore.ops.row_tensor_get_indices - - :class:`mindspore.ops.Primitive` ('RowTensorGetIndices') - * - mindspore.ops.row_tensor_get_values - - :class:`mindspore.ops.Primitive` ('RowTensorGetValues') - * - mindspore.ops.same_type_shape - - :class:`mindspore.ops.SameTypeShape` - * - mindspore.ops.scalar_add - - :class:`mindspore.ops.Primitive` ('scalar_add') - * - mindspore.ops.scalar_cast - - :class:`mindspore.ops.ScalarCast` - * - mindspore.ops.scalar_div - - :class:`mindspore.ops.Primitive` ('scalar_div') - * - mindspore.ops.scalar_eq - - :class:`mindspore.ops.Primitive` ('scalar_eq') - * - mindspore.ops.scalar_floordiv - - :class:`mindspore.ops.Primitive` ('scalar_floordiv') - * - mindspore.ops.scalar_ge - - :class:`mindspore.ops.Primitive` ('scalar_ge') - * - mindspore.ops.scalar_gt - - :class:`mindspore.ops.Primitive` ('scalar_gt') - * - mindspore.ops.scalar_le - - :class:`mindspore.ops.Primitive` ('scalar_le') - * - mindspore.ops.scalar_log - - :class:`mindspore.ops.Primitive` ('scalar_log') - * - mindspore.ops.scalar_lt - - :class:`mindspore.ops.Primitive` ('scalar_lt') - * - mindspore.ops.scalar_mod - - :class:`mindspore.ops.Primitive` ('scalar_mod') - * - mindspore.ops.scalar_mul - - :class:`mindspore.ops.Primitive` ('scalar_mul') - * - mindspore.ops.scalar_ne - - :class:`mindspore.ops.Primitive` ('scalar_ne') - * - mindspore.ops.scalar_pow - - :class:`mindspore.ops.Primitive` ('scalar_pow') - * - mindspore.ops.scalar_sub - - :class:`mindspore.ops.Primitive` ('scalar_sub') - * - mindspore.ops.scalar_to_array - - :class:`mindspore.ops.ScalarToArray` - * - mindspore.ops.scalar_to_tensor - - :class:`mindspore.ops.ScalarToTensor` - * - mindspore.ops.scalar_uadd - - :class:`mindspore.ops.Primitive` ('scalar_uadd') - * - mindspore.ops.scalar_usub - - :class:`mindspore.ops.Primitive` ('scalar_usub') - * - mindspore.ops.scatter_nd - - :class:`mindspore.ops.ScatterNd` - * - mindspore.ops.scatter_nd_update - - :class:`mindspore.ops.ScatterNdUpdate` - * - mindspore.ops.scatter_update - - :class:`mindspore.ops.ScatterUpdate` - * - mindspore.ops.select - - :class:`mindspore.ops.Select` - * - mindspore.ops.shape - - :class:`mindspore.ops.Shape` - * - mindspore.ops.shape_mul - - :class:`mindspore.ops.Primitive` ('shape_mul') - * - mindspore.ops.size - - :class:`mindspore.ops.Size` - * - mindspore.ops.sparse_tensor_get_dense_shape - - :class:`mindspore.ops.Primitive` ('SparseTensorGetDenseShape') - * - mindspore.ops.sparse_tensor_get_indices - - :class:`mindspore.ops.Primitive` ('SparseTensorGetIndices') - * - mindspore.ops.sparse_tensor_get_values - - :class:`mindspore.ops.Primitive` ('SparseTensorGetValues') - * - mindspore.ops.sqrt - - :class:`mindspore.ops.Sqrt` - * - mindspore.ops.square - - :class:`mindspore.ops.Square` - * - mindspore.ops.stack - - :class:`mindspore.ops.Stack` - * - mindspore.ops.stop_gradient - - :class:`mindspore.ops.Primitive` ('stop_gradient') - * - mindspore.ops.strided_slice - - :class:`mindspore.ops.StridedSlice` - * - mindspore.ops.string_concat - - :class:`mindspore.ops.Primitive` ('string_concat') - * - mindspore.ops.string_eq - - :class:`mindspore.ops.Primitive` ('string_equal') - * - mindspore.ops.tensor_div - - :class:`mindspore.ops.RealDiv` - * - mindspore.ops.tensor_floordiv - - :class:`mindspore.ops.FloorDiv` - * - mindspore.ops.tensor_ge - - :class:`mindspore.ops.GreaterEqual` - * - mindspore.ops.tensor_gt - - :class:`mindspore.ops.Greater` - * - mindspore.ops.tensor_le - - :class:`mindspore.ops.LessEqual` - * - mindspore.ops.tensor_lt - - :class:`mindspore.ops.Less` - * - mindspore.ops.tensor_mod - - :class:`mindspore.ops.FloorMod` - * - mindspore.ops.tensor_mul - - :class:`mindspore.ops.Mul` - * - mindspore.ops.tensor_pow - - :class:`mindspore.ops.Pow` - * - mindspore.ops.tensor_sub - - :class:`mindspore.ops.Sub` - * - mindspore.ops.tile - - :class:`mindspore.ops.Tile` - * - mindspore.ops.tuple_to_array - - :class:`mindspore.ops.TupleToArray` - * - mindspore.ops.typeof - - :class:`mindspore.ops.Primitive` ('typeof') - * - mindspore.ops.zeros_like - - :class:`mindspore.ops.ZerosLike` - -primitive ---------- - -.. autosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.constexpr - mindspore.ops.prim_attr_register - mindspore.ops.Primitive - mindspore.ops.PrimitiveWithCheck - mindspore.ops.PrimitiveWithInfer - -vm_impl_registry ----------------- - -.. autosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.get_vm_impl_fn - -op_info_register ----------------- - -.. autosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.AiCPURegOp - mindspore.ops.DataType - mindspore.ops.op_info_register - mindspore.ops.TBERegOp diff --git a/docs/api_python/source_en/mindspore/mindspore.profiler.rst b/docs/api_python/source_en/mindspore/mindspore.profiler.rst deleted file mode 100644 index 33122b6ff76b4efd65bc9050e709da0feda129bf..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.profiler.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindspore.profiler -================== - -.. automodule:: mindspore.profiler - :members: \ No newline at end of file diff --git a/docs/api_python/source_en/mindspore/mindspore.rst b/docs/api_python/source_en/mindspore/mindspore.rst deleted file mode 100644 index c786b0d1c1e558adf20d9adf2427c9c5b66b8031..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.rst +++ /dev/null @@ -1,100 +0,0 @@ -mindspore -========= - -.. class:: mindspore.dtype - - Create a data type object of MindSpore. - - The actual path of ``dtype`` is ``/mindspore/common/dtype.py``. - Run the following command to import the package: - - .. code-block:: - - from mindspore import dtype as mstype - - * **Numeric Type** - - Currently, MindSpore supports ``Int`` type, ``Uint`` type and ``Float`` type. - The following table lists the details. - - ============================================== ============================= - Definition Description - ============================================== ============================= - ``mindspore.int8`` , ``mindspore.byte`` 8-bit integer - ``mindspore.int16`` , ``mindspore.short`` 16-bit integer - ``mindspore.int32`` , ``mindspore.intc`` 32-bit integer - ``mindspore.int64`` , ``mindspore.intp`` 64-bit integer - ``mindspore.uint8`` , ``mindspore.ubyte`` unsigned 8-bit integer - ``mindspore.uint16`` , ``mindspore.ushort`` unsigned 16-bit integer - ``mindspore.uint32`` , ``mindspore.uintc`` unsigned 32-bit integer - ``mindspore.uint64`` , ``mindspore.uintp`` unsigned 64-bit integer - ``mindspore.float16`` , ``mindspore.half`` 16-bit floating-point number - ``mindspore.float32`` , ``mindspore.single`` 32-bit floating-point number - ``mindspore.float64`` , ``mindspore.double`` 64-bit floating-point number - ============================================== ============================= - - * **Other Type** - - For other defined types, see the following table. - - ============================ ================= - Type Description - ============================ ================= - ``tensor`` MindSpore's ``tensor`` type. Data format uses NCHW. For details, see `tensor `_. - ``bool_`` Boolean ``True`` or ``False``. - ``int_`` Integer scalar. - ``uint`` Unsigned integer scalar. - ``float_`` Floating-point scalar. - ``number`` Number, including ``int_`` , ``uint`` , ``float_`` and ``bool_`` . - ``list_`` List constructed by ``tensor`` , such as ``List[T0,T1,...,Tn]`` , where the element ``Ti`` can be of different types. - ``tuple_`` Tuple constructed by ``tensor`` , such as ``Tuple[T0,T1,...,Tn]`` , where the element ``Ti`` can be of different types. - ``function`` Function. Return in two ways, when function is not None, returns Func directly, the other returns Func(args: List[T0,T1,...,Tn], retval: T) when function is None. - ``type_type`` Type definition of type. - ``type_none`` No matching return type, corresponding to the ``type(None)`` in Python. - ``symbolic_key`` The value of a variable is used as a key of the variable in ``env_type`` . - ``env_type`` Used to store the gradient of the free variable of a function, where the key is the ``symbolic_key`` of the free variable's node and the value is the gradient. - ============================ ================= - - * **Tree Topology** - - The relationships of the above types are as follows: - - .. code-block:: - - - └─────── number - │ ├─── bool_ - │ ├─── int_ - │ │ ├─── int8, byte - │ │ ├─── int16, short - │ │ ├─── int32, intc - │ │ └─── int64, intp - │ ├─── uint - │ │ ├─── uint8, ubyte - │ │ ├─── uint16, ushort - │ │ ├─── uint32, uintc - │ │ └─── uint64, uintp - │ └─── float_ - │ ├─── float16 - │ ├─── float32 - │ └─── float64 - ├─── tensor - │ ├─── Array[Float32] - │ └─── ... - ├─── list_ - │ ├─── List[Int32,Float32] - │ └─── ... - ├─── tuple_ - │ ├─── Tuple[Int32,Float32] - │ └─── ... - ├─── function - │ ├─── Func - │ ├─── Func[(Int32, Float32), Int32] - │ └─── ... - ├─── type_type - ├─── type_none - ├─── symbolic_key - └─── env_type - -.. automodule:: mindspore - :members: diff --git a/docs/api_python/source_en/mindspore/mindspore.train.rst b/docs/api_python/source_en/mindspore/mindspore.train.rst deleted file mode 100644 index ff368bf47b07ac19f0c76c8133ddd0e9face92e3..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/mindspore.train.rst +++ /dev/null @@ -1,14 +0,0 @@ -mindspore.train -=============== - -mindspore.train.summary ------------------------ - -.. automodule:: mindspore.train.summary - :members: - -mindspore.train.callback ------------------------- - -.. automodule:: mindspore.train.callback - :members: diff --git a/docs/api_python/source_en/mindspore/operations.rst b/docs/api_python/source_en/mindspore/operations.rst deleted file mode 100644 index ccf0cb9538811bd005d8da528791aacfd5f2429c..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore/operations.rst +++ /dev/null @@ -1,408 +0,0 @@ -operations ----------- - -The Primitive operators in operations need to be instantiated before being used. - -Neural Network Operators -^^^^^^^^^^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.Acosh - mindspore.ops.Adam - mindspore.ops.AdamNoUpdateParam - mindspore.ops.ApplyAdadelta - mindspore.ops.ApplyAdagrad - mindspore.ops.ApplyAdagradV2 - mindspore.ops.ApplyAdaMax - mindspore.ops.ApplyAddSign - mindspore.ops.ApplyCenteredRMSProp - mindspore.ops.ApplyGradientDescent - mindspore.ops.ApplyMomentum - mindspore.ops.ApplyPowerSign - mindspore.ops.ApplyProximalAdagrad - mindspore.ops.ApplyProximalGradientDescent - mindspore.ops.ApplyRMSProp - mindspore.ops.AvgPool - mindspore.ops.BasicLSTMCell - mindspore.ops.BatchNorm - mindspore.ops.BCEWithLogitsLoss - mindspore.ops.BiasAdd - mindspore.ops.BinaryCrossEntropy - mindspore.ops.BNTrainingReduce - mindspore.ops.BNTrainingUpdate - mindspore.ops.ComputeAccidentalHits - mindspore.ops.Conv2D - mindspore.ops.Conv2DBackpropInput - mindspore.ops.Conv3D - mindspore.ops.Conv3DTranspose - mindspore.ops.CTCGreedyDecoder - mindspore.ops.CTCLoss - mindspore.ops.DataFormatDimMap - mindspore.ops.DepthwiseConv2dNative - mindspore.ops.Dropout2D - mindspore.ops.Dropout3D - mindspore.ops.DropoutDoMask - mindspore.ops.DropoutGenMask - mindspore.ops.DynamicGRUV2 - mindspore.ops.DynamicRNN - mindspore.ops.Elu - mindspore.ops.FastGeLU - mindspore.ops.Flatten - mindspore.ops.FloorMod - mindspore.ops.FusedSparseAdam - mindspore.ops.FusedSparseLazyAdam - mindspore.ops.FusedSparseProximalAdagrad - mindspore.ops.GeLU - mindspore.ops.GetNext - mindspore.ops.HSigmoid - mindspore.ops.HSwish - mindspore.ops.KLDivLoss - mindspore.ops.L2Loss - mindspore.ops.L2Normalize - mindspore.ops.LARSUpdate - mindspore.ops.LayerNorm - mindspore.ops.LogSoftmax - mindspore.ops.LRN - mindspore.ops.LSTM - mindspore.ops.MaxPool - mindspore.ops.MaxPool3D - mindspore.ops.MaxPoolWithArgmax - mindspore.ops.MirrorPad - mindspore.ops.Mish - mindspore.ops.NLLLoss - mindspore.ops.OneHot - mindspore.ops.Pad - mindspore.ops.PReLU - mindspore.ops.ReLU - mindspore.ops.ReLU6 - mindspore.ops.ReLUV2 - mindspore.ops.ResizeBilinear - mindspore.ops.RNNTLoss - mindspore.ops.ROIAlign - mindspore.ops.SeLU - mindspore.ops.SGD - mindspore.ops.Sigmoid - mindspore.ops.SigmoidCrossEntropyWithLogits - mindspore.ops.SmoothL1Loss - mindspore.ops.Softmax - mindspore.ops.SoftmaxCrossEntropyWithLogits - mindspore.ops.Softplus - mindspore.ops.Softsign - mindspore.ops.SparseApplyAdagrad - mindspore.ops.SparseApplyAdagradV2 - mindspore.ops.SparseApplyProximalAdagrad - mindspore.ops.SparseSoftmaxCrossEntropyWithLogits - mindspore.ops.Stack - mindspore.ops.Tanh - mindspore.ops.TopK - mindspore.ops.Unstack - -Math Operators -^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.Abs - mindspore.ops.AccumulateNV2 - mindspore.ops.ACos - mindspore.ops.Add - mindspore.ops.AddN - mindspore.ops.ApproximateEqual - mindspore.ops.Asin - mindspore.ops.Asinh - mindspore.ops.AssignAdd - mindspore.ops.AssignSub - mindspore.ops.Atan - mindspore.ops.Atan2 - mindspore.ops.Atanh - mindspore.ops.BatchMatMul - mindspore.ops.BesselI0e - mindspore.ops.BesselI1e - mindspore.ops.BitwiseAnd - mindspore.ops.BitwiseOr - mindspore.ops.BitwiseXor - mindspore.ops.Ceil - mindspore.ops.Cos - mindspore.ops.Cosh - mindspore.ops.CumProd - mindspore.ops.CumSum - mindspore.ops.Div - mindspore.ops.DivNoNan - mindspore.ops.Eps - mindspore.ops.Equal - mindspore.ops.EqualCount - mindspore.ops.Erf - mindspore.ops.Erfc - mindspore.ops.Exp - mindspore.ops.Expm1 - mindspore.ops.FloatStatus - mindspore.ops.Floor - mindspore.ops.FloorDiv - mindspore.ops.Greater - mindspore.ops.GreaterEqual - mindspore.ops.HistogramFixedWidth - mindspore.ops.IndexAdd - mindspore.ops.InplaceAdd - mindspore.ops.InplaceSub - mindspore.ops.Inv - mindspore.ops.Invert - mindspore.ops.IsInf - mindspore.ops.IsNan - mindspore.ops.Less - mindspore.ops.LessEqual - mindspore.ops.LinSpace - mindspore.ops.Log - mindspore.ops.Log1p - mindspore.ops.LogicalAnd - mindspore.ops.LogicalNot - mindspore.ops.LogicalOr - mindspore.ops.MatMul - mindspore.ops.MatrixInverse - mindspore.ops.Maximum - mindspore.ops.Minimum - mindspore.ops.Mod - mindspore.ops.Mul - mindspore.ops.MulNoNan - mindspore.ops.Neg - mindspore.ops.NMSWithMask - mindspore.ops.NotEqual - mindspore.ops.NPUAllocFloatStatus - mindspore.ops.NPUClearFloatStatus - mindspore.ops.NPUGetFloatStatus - mindspore.ops.Pow - mindspore.ops.RealDiv - mindspore.ops.Reciprocal - mindspore.ops.ReduceAll - mindspore.ops.ReduceAny - mindspore.ops.ReduceMax - mindspore.ops.ReduceMean - mindspore.ops.ReduceMin - mindspore.ops.ReduceProd - mindspore.ops.ReduceSum - mindspore.ops.Round - mindspore.ops.Rsqrt - mindspore.ops.Sign - mindspore.ops.Sin - mindspore.ops.Sinh - mindspore.ops.Sqrt - mindspore.ops.Square - mindspore.ops.SquaredDifference - mindspore.ops.SquareSumAll - mindspore.ops.Sub - mindspore.ops.Tan - mindspore.ops.TruncateDiv - mindspore.ops.TruncateMod - mindspore.ops.Xdivy - mindspore.ops.Xlogy - -Array Operators -^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.ApplyFtrl - mindspore.ops.Argmax - mindspore.ops.ArgMaxWithValue - mindspore.ops.Argmin - mindspore.ops.ArgMinWithValue - mindspore.ops.BatchToSpace - mindspore.ops.BatchToSpaceND - mindspore.ops.BroadcastTo - mindspore.ops.Cast - mindspore.ops.Concat - mindspore.ops.DepthToSpace - mindspore.ops.DType - mindspore.ops.DynamicShape - mindspore.ops.EditDistance - mindspore.ops.EmbeddingLookup - mindspore.ops.ExpandDims - mindspore.ops.Eye - mindspore.ops.Fill - mindspore.ops.FusedSparseFtrl - mindspore.ops.Gather - mindspore.ops.GatherD - mindspore.ops.GatherNd - mindspore.ops.Identity - mindspore.ops.InplaceUpdate - mindspore.ops.InvertPermutation - mindspore.ops.IsFinite - mindspore.ops.IsInstance - mindspore.ops.IsSubClass - mindspore.ops.Meshgrid - mindspore.ops.Ones - mindspore.ops.OnesLike - mindspore.ops.Padding - mindspore.ops.ParallelConcat - mindspore.ops.Randperm - mindspore.ops.Rank - mindspore.ops.Reshape - mindspore.ops.ResizeNearestNeighbor - mindspore.ops.ReverseSequence - mindspore.ops.ReverseV2 - mindspore.ops.Rint - mindspore.ops.SameTypeShape - mindspore.ops.ScalarCast - mindspore.ops.ScalarToArray - mindspore.ops.ScalarToTensor - mindspore.ops.ScatterAdd - mindspore.ops.ScatterDiv - mindspore.ops.ScatterMax - mindspore.ops.ScatterMin - mindspore.ops.ScatterMul - mindspore.ops.ScatterNd - mindspore.ops.ScatterNdAdd - mindspore.ops.ScatterNdSub - mindspore.ops.ScatterNdUpdate - mindspore.ops.ScatterNonAliasingAdd - mindspore.ops.ScatterSub - mindspore.ops.ScatterUpdate - mindspore.ops.Select - mindspore.ops.Shape - mindspore.ops.Size - mindspore.ops.Slice - mindspore.ops.Sort - mindspore.ops.SpaceToBatch - mindspore.ops.SpaceToBatchND - mindspore.ops.SpaceToDepth - mindspore.ops.SparseApplyFtrl - mindspore.ops.SparseApplyFtrlV2 - mindspore.ops.SparseGatherV2 - mindspore.ops.Split - mindspore.ops.Squeeze - mindspore.ops.StridedSlice - mindspore.ops.TensorScatterUpdate - mindspore.ops.Tile - mindspore.ops.Transpose - mindspore.ops.TupleToArray - mindspore.ops.Unique - mindspore.ops.UniqueWithPad - mindspore.ops.UnsortedSegmentMax - mindspore.ops.UnsortedSegmentMin - mindspore.ops.UnsortedSegmentProd - mindspore.ops.UnsortedSegmentSum - mindspore.ops.Zeros - mindspore.ops.ZerosLike - -Communication Operators -^^^^^^^^^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.AllGather - mindspore.ops.AllReduce - mindspore.ops.Broadcast - mindspore.ops.ReduceOp - mindspore.ops.ReduceScatter - -Debug Operators -^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.HistogramSummary - mindspore.ops.ImageSummary - mindspore.ops.InsertGradientOf - mindspore.ops.Print - mindspore.ops.ScalarSummary - mindspore.ops.TensorSummary - -Random Operators -^^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.Gamma - mindspore.ops.LogUniformCandidateSampler - mindspore.ops.Multinomial - mindspore.ops.Poisson - mindspore.ops.RandomCategorical - mindspore.ops.RandomChoiceWithMask - mindspore.ops.StandardLaplace - mindspore.ops.StandardNormal - mindspore.ops.UniformCandidateSampler - mindspore.ops.UniformInt - mindspore.ops.UniformReal - -Sponge Operators -^^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.AngleAtomEnergy - mindspore.ops.AngleEnergy - mindspore.ops.AngleForce - mindspore.ops.AngleForceWithAtomEnergy - mindspore.ops.BondAtomEnergy - mindspore.ops.BondEnergy - mindspore.ops.BondForce - mindspore.ops.BondForceWithAtomEnergy - mindspore.ops.BondForceWithAtomVirial - mindspore.ops.DihedralAtomEnergy - mindspore.ops.DihedralEnergy - mindspore.ops.DihedralForce - mindspore.ops.DihedralForceWithAtomEnergy - mindspore.ops.Dihedral14CFAtomEnergy - mindspore.ops.Dihedral14CFEnergy - mindspore.ops.Dihedral14LJAtomEnergy - mindspore.ops.Dihedral14LJCFForceWithAtomEnergy - mindspore.ops.Dihedral14LJEnergy - mindspore.ops.Dihedral14LJForce - mindspore.ops.Dihedral14LJForceWithDirectCF - mindspore.ops.LJEnergy - mindspore.ops.LJForce - mindspore.ops.LJForceWithPMEDirectForce - mindspore.ops.MDIterationLeapFrog - mindspore.ops.NeighborListUpdate - mindspore.ops.PMEEnergy - mindspore.ops.PMEExcludedForce - mindspore.ops.PMEReciprocalForce - -Image Operators -^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.CropAndResize - -Other Operators -^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.Assign - mindspore.ops.BoundingBoxDecode - mindspore.ops.BoundingBoxEncode - mindspore.ops.CheckValid - mindspore.ops.Depend - mindspore.ops.InTopK - mindspore.ops.IOU - mindspore.ops.NoRepeatNGram - mindspore.ops.PopulationCount diff --git a/docs/api_python/source_en/mindspore_hub/mindspore_hub.rst b/docs/api_python/source_en/mindspore_hub/mindspore_hub.rst deleted file mode 100644 index 34b6c1ceb89517e9799d566d0bb64dc497741b24..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore_hub/mindspore_hub.rst +++ /dev/null @@ -1,6 +0,0 @@ -mindspore_hub -============= - -.. automodule:: mindspore_hub - :members: - diff --git a/docs/api_python/source_en/mindspore_serving/mindspore_serving.rst b/docs/api_python/source_en/mindspore_serving/mindspore_serving.rst deleted file mode 100644 index a640954ab29893c1fa17daba25cebbc752a52927..0000000000000000000000000000000000000000 --- a/docs/api_python/source_en/mindspore_serving/mindspore_serving.rst +++ /dev/null @@ -1,34 +0,0 @@ -mindspore_serving -================= - -.. automodule:: mindspore_serving - -mindspore_serving.master ------------------------- - -.. automodule:: mindspore_serving.master - :members: - -mindspore_serving.worker ------------------------- - -.. automodule:: mindspore_serving.worker - :members: start_servable, start_servable_in_master, stop - -mindspore_serving.worker.register -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. automodule:: mindspore_serving.worker.register - :members: - -mindspore_serving.worker.distributed -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. automodule:: mindspore_serving.worker.distributed - :members: - -mindspore_serving.client ------------------------- - -.. automodule:: mindspore_serving.client - :members: diff --git a/docs/api_python/source_zh_cn/_ext/my_signature.py b/docs/api_python/source_zh_cn/_ext/my_signature.py deleted file mode 100644 index 627b12cc4dc55811fcfbae956f082b784de3797e..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/_ext/my_signature.py +++ /dev/null @@ -1,349 +0,0 @@ -""" -Rewrote the Signature module that fix default signature error for autodoc module. -""" - -import inspect -import re -import types -import functools - - -def _sort_param(param_list, target_str): - """Sort param_list as default order.""" - ls = [] - for param_name in param_list: - ls.append((param_name, target_str.find(param_name))) - ls.sort(key=lambda x: x[1], reverse=False) - ls = [i[0] for i in ls] - return ls - - -def get_default_params(func): - """ Get the default signatures from function. """ - source_code = inspect.getsource(func) - func_code = func.__code__ - pos_count = func_code.co_argcount - arg_names = func_code.co_varnames - karg_pos = func_code.co_kwonlyargcount - kwargs_num = arg_names.count("args") + arg_names.count("kwargs") - all_param_names = list(arg_names[:pos_count+karg_pos+kwargs_num]) - all_params = re.findall(r"def [\w_\d\-]+\(([\S\s]*?)\):", source_code)[0].replace("\n", "").replace("'", "\"") - - # sub null spaces from matched all param str. - re_space_sub = re.compile(r",\s+") - all_params = re_space_sub.sub(",", all_params) - - all_param_names = _sort_param(all_param_names, all_params) - - # sub the extra "=" from param. - re_equate_sub = re.compile("=") - - re_defaults_param = re.compile(r"(.*?)".join(all_param_names) + r"(.*)") - defaults_params = re_defaults_param.findall(all_params) - if defaults_params: - if isinstance(defaults_params[0], tuple): - defaults_params = list(defaults_params[0]) - defaults_params_list = [] - for i in defaults_params: - if "=" in i and i: - i = re_equate_sub.sub("", i, count=1).strip(",") - if i[:6] == "lambda": - i = "<" + i + ">" - defaults_params_list.append(i) - defaults_params_tuple = tuple(defaults_params_list) - return defaults_params_tuple - return func.__defaults__ - - -def _my_signature_from_function(cls, func): - """Private helper: constructs Signature for the given python function.""" - - is_duck_function = False - if not inspect.isfunction(func): - if inspect._signature_is_functionlike(func): # pylint: disable=protected-access - is_duck_function = True - else: - # If it's not a pure Python function, and not a duck type - # of pure function: - raise TypeError('{!r} is not a Python function'.format(func)) - - Parameter = cls._parameter_cls # pylint: disable=protected-access - - # Parameter information._partialmethod - func_code = func.__code__ - pos_count = func_code.co_argcount - arg_names = func_code.co_varnames - positional = tuple(arg_names[:pos_count]) - keyword_only_count = func_code.co_kwonlyargcount - keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)] - annotations = func.__annotations__ - defaults = get_default_params(func) - kwdefaults = func.__kwdefaults__ - pos_defaults = func.__defaults__ - - if pos_defaults: - pos_default_count = len(pos_defaults) - else: - pos_default_count = 0 - - parameters = [] - - # Non-keyword-only parameters w/o defaults. - non_default_count = pos_count - pos_default_count - for name in positional[:non_default_count]: - annotation = annotations.get(name, inspect._empty) # pylint: disable=protected-access - parameters.append(Parameter(name, annotation=annotation, - kind=inspect._POSITIONAL_OR_KEYWORD)) # pylint: disable=protected-access - - # ... w/ defaults. - for offset, name in enumerate(positional[non_default_count:]): - annotation = annotations.get(name, inspect._empty) # pylint: disable=protected-access - parameters.append(Parameter(name, annotation=annotation, - kind=inspect._POSITIONAL_OR_KEYWORD, # pylint: disable=protected-access - default=defaults[offset])) - - # *args - if func_code.co_flags & inspect.CO_VARARGS: - name = arg_names[pos_count + keyword_only_count] - annotation = annotations.get(name, inspect._empty) # pylint: disable=protected-access - parameters.append(Parameter(name, annotation=annotation, - kind=inspect._VAR_POSITIONAL)) # pylint: disable=protected-access - - # Keyword-only parameters. - for name in keyword_only: - default = inspect._empty # pylint: disable=protected-access - if kwdefaults is not None: - default = kwdefaults.get(name, inspect._empty) # pylint: disable=protected-access - - annotation = annotations.get(name, inspect._empty) # pylint: disable=protected-access - parameters.append(Parameter(name, annotation=annotation, - kind=inspect._KEYWORD_ONLY, # pylint: disable=protected-access - default=default)) - # **kwargs - if func_code.co_flags & inspect.CO_VARKEYWORDS: - index = pos_count + keyword_only_count - if func_code.co_flags & inspect.CO_VARARGS: - index += 1 - - name = arg_names[index] - annotation = annotations.get(name, inspect._empty) # pylint: disable=protected-access - parameters.append(Parameter(name, annotation=annotation, - kind=inspect._VAR_KEYWORD)) # pylint: disable=protected-access - - # Is 'func' is a pure Python function - don't validate the - # parameters list (for correct order and defaults), it should be OK. - return cls(parameters, - return_annotation=annotations.get('return', inspect._empty), # pylint: disable=protected-access - __validate_parameters__=is_duck_function) - - -def _my_signature_from_callable(obj, *, - follow_wrapper_chains=True, - skip_bound_arg=True, - sigcls): - """Private helper function to get signature for arbitrary - callable objects. - """ - - if not callable(obj): - raise TypeError('{!r} is not a callable object'.format(obj)) - - if isinstance(obj, types.MethodType): - # In this case we skip the first parameter of the underlying - # function (usually `self` or `cls`). - sig = _my_signature_from_callable( - obj.__func__, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - - if skip_bound_arg: - return inspect._signature_bound_method(sig) # pylint: disable=protected-access - return sig - - # Was this function wrapped by a decorator? - if follow_wrapper_chains: - obj = inspect.unwrap(obj, stop=(lambda f: hasattr(f, "__signature__"))) - if isinstance(obj, types.MethodType): - # If the unwrapped object is a *method*, we might want to - # skip its first parameter (self). - # See test_signature_wrapped_bound_method for details. - return _my_signature_from_callable( - obj, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - - try: - sig = obj.__signature__ - except AttributeError: - pass - else: - if sig is not None: - if not isinstance(sig, MySignature): - raise TypeError( - 'unexpected object {!r} in __signature__ ' - 'attribute'.format(sig)) - return sig - - try: - partialmethod = obj._partialmethod # pylint: disable=protected-access - except AttributeError: - pass - else: - if isinstance(partialmethod, functools.partialmethod): - # Unbound partialmethod (see functools.partialmethod) - # This means, that we need to calculate the signature - # as if it's a regular partial object, but taking into - # account that the first positional argument - # (usually `self`, or `cls`) will not be passed - # automatically (as for boundmethods) - - wrapped_sig = _my_signature_from_callable( - partialmethod.func, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - - sig = inspect._signature_get_partial(wrapped_sig, partialmethod, (None,)) # pylint: disable=protected-access - first_wrapped_param = tuple(wrapped_sig.parameters.values())[0] - if first_wrapped_param.kind is Parameter.VAR_POSITIONAL: # pylint: disable=no-else-return - # First argument of the wrapped callable is `*args`, as in - # `partialmethod(lambda *args)`. - return sig - else: - sig_params = tuple(sig.parameters.values()) - assert (not sig_params or - first_wrapped_param is not sig_params[0]) - new_params = (first_wrapped_param,) + sig_params - return sig.replace(parameters=new_params) - - if inspect.isfunction(obj) or inspect._signature_is_functionlike(obj): # pylint: disable=protected-access - # If it's a pure Python function, or an object that is duck type - # of a Python function (Cython functions, for instance), then: - return _my_signature_from_function(sigcls, obj) - - if inspect._signature_is_builtin(obj): # pylint: disable=protected-access - return inspect._signature_from_builtin(sigcls, obj, # pylint: disable=protected-access - skip_bound_arg=skip_bound_arg) - - if isinstance(obj, functools.partial): - wrapped_sig = _my_signature_from_callable( - obj.func, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - return inspect._signature_get_partial(wrapped_sig, obj) # pylint: disable=protected-access - - sig = None - if isinstance(obj, type): - # obj is a class or a metaclass - - # First, let's see if it has an overloaded __call__ defined - # in its metaclass - call = inspect._signature_get_user_defined_method(type(obj), '__call__') # pylint: disable=protected-access - if call is not None: - sig = _my_signature_from_callable( - call, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - else: - # Now we check if the 'obj' class has a '__new__' method - new = inspect._signature_get_user_defined_method(obj, '__new__') # pylint: disable=protected-access - if new is not None: - sig = _my_signature_from_callable( - new, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - else: - # Finally, we should have at least __init__ implemented - init = inspect._signature_get_user_defined_method(obj, '__init__') # pylint: disable=protected-access - if init is not None: - sig = _my_signature_from_callable( - init, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - - if sig is None: - # At this point we know, that `obj` is a class, with no user- - # defined '__init__', '__new__', or class-level '__call__' - - for base in obj.__mro__[:-1]: - # Since '__text_signature__' is implemented as a - # descriptor that extracts text signature from the - # class docstring, if 'obj' is derived from a builtin - # class, its own '__text_signature__' may be 'None'. - # Therefore, we go through the MRO (except the last - # class in there, which is 'object') to find the first - # class with non-empty text signature. - try: - text_sig = base.__text_signature__ - except AttributeError: - pass - else: - if text_sig: - # If 'obj' class has a __text_signature__ attribute: - # return a signature based on it - return inspect._signature_fromstr(sigcls, obj, text_sig) # pylint: disable=protected-access - - # No '__text_signature__' was found for the 'obj' class. - # Last option is to check if its '__init__' is - # object.__init__ or type.__init__. - if type not in obj.__mro__: - # We have a class (not metaclass), but no user-defined - # __init__ or __new__ for it - if (obj.__init__ is object.__init__ and # pylint: disable=no-else-return - obj.__new__ is object.__new__): - # Return a signature of 'object' builtin. - return sigcls.from_callable(object) - else: - raise ValueError( - 'no signature found for builtin type {!r}'.format(obj)) - - elif not isinstance(obj, inspect._NonUserDefinedCallables): # pylint: disable=protected-access - # An object with __call__ - # We also check that the 'obj' is not an instance of - # _WrapperDescriptor or _MethodWrapper to avoid - # infinite recursion (and even potential segfault) - call = inspect._signature_get_user_defined_method(type(obj), '__call__') # pylint: disable=protected-access - if call is not None: - try: - sig = _my_signature_from_callable( - call, - follow_wrapper_chains=follow_wrapper_chains, - skip_bound_arg=skip_bound_arg, - sigcls=sigcls) - except ValueError as ex: - msg = 'no signature found for {!r}'.format(obj) - raise ValueError(msg) from ex - - if sig is not None: - # For classes and objects we skip the first parameter of their - # __call__, __new__, or __init__ methods - if skip_bound_arg: - return inspect._signature_bound_method(sig) # pylint: disable=protected-access - return sig - - if isinstance(obj, types.BuiltinFunctionType): - # Raise a nicer error message for builtins - msg = 'no signature found for builtin function {!r}'.format(obj) - raise ValueError(msg) - - raise ValueError('callable {!r} is not supported by signature'.format(obj)) - - -class MySignature(inspect.Signature): - - @classmethod - def from_callable(cls, obj, *, follow_wrapped=True): - """Constructs Signature for the given callable object.""" - return _my_signature_from_callable(obj, sigcls=cls, - follow_wrapper_chains=follow_wrapped) - - -def signature(obj, *, follow_wrapped=True): - """Get a signature object for the passed callable.""" - return MySignature.from_callable(obj, follow_wrapped=follow_wrapped) diff --git a/docs/api_python/source_zh_cn/_static/logo_notebook.png b/docs/api_python/source_zh_cn/_static/logo_notebook.png deleted file mode 100644 index 18c2e29e4b73ee428f70253feffdd855fdf0c422..0000000000000000000000000000000000000000 Binary files a/docs/api_python/source_zh_cn/_static/logo_notebook.png and /dev/null differ diff --git a/docs/api_python/source_zh_cn/_static/logo_source.png b/docs/api_python/source_zh_cn/_static/logo_source.png deleted file mode 100644 index 880f2bc87172daf487654c0ba4f1657c672bd2b8..0000000000000000000000000000000000000000 Binary files a/docs/api_python/source_zh_cn/_static/logo_source.png and /dev/null differ diff --git a/docs/api_python/source_zh_cn/_templates/classtemplate.rst b/docs/api_python/source_zh_cn/_templates/classtemplate.rst deleted file mode 100644 index fd88815f7b49e1cd25195fc8eceba498eafe780c..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/_templates/classtemplate.rst +++ /dev/null @@ -1,24 +0,0 @@ -.. role:: hidden - :class: hidden-section - -.. currentmodule:: {{ module }} - -{% if objname in [] %} -{{ fullname | underline }} - -.. autofunction:: {{ fullname }} -{% elif objname[0].istitle() %} -{{ fullname | underline }} - -.. autoclass:: {{ name }} - :members: - -{% else %} -{{ fullname | underline }} - -.. autofunction:: {{ fullname }} -{% endif %} - -.. - autogenerated from _templates/classtemplate.rst - note it does not have :inherited-members: diff --git a/docs/api_python/source_zh_cn/_templates/classtemplate_inherited.rst b/docs/api_python/source_zh_cn/_templates/classtemplate_inherited.rst deleted file mode 100644 index 8f4a423dca6e678c191df73d142e4e52a862a3db..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/_templates/classtemplate_inherited.rst +++ /dev/null @@ -1,26 +0,0 @@ -.. role:: hidden - :class: hidden-section - -.. currentmodule:: {{ module }} - -{% if objname[0].istitle() %} -{{ fullname | underline }} - -.. autoclass:: {{ name }} - :inherited-members: - :members: - -{% elif fullname=="mindspore.numpy.ix_" %} - -mindspore.numpy.ix\_ -==================== - -.. autofunction:: mindspore.numpy.ix_ - -{% else %} -{{ fullname | underline }} - -.. autofunction:: {{ fullname }} -{% endif %} - -.. autogenerated from _templates/classtemplate_inherited.rst \ No newline at end of file diff --git a/docs/api_python/source_zh_cn/_templates/classtemplate_probability.rst b/docs/api_python/source_zh_cn/_templates/classtemplate_probability.rst deleted file mode 100644 index 6329880e1fc540de910b25d1724a2cfba8d501f2..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/_templates/classtemplate_probability.rst +++ /dev/null @@ -1,13 +0,0 @@ -.. role:: hidden - :class: hidden-section - -.. currentmodule:: {{ module }} - -{{ fullname | underline }} - -.. autoclass:: {{ name }} - :members: - -.. - autogenerated from _templates/classtemplate.rst - note it does not have :inherited-members: diff --git a/docs/api_python/source_zh_cn/conf.py b/docs/api_python/source_zh_cn/conf.py deleted file mode 100644 index 073012d085634213d93992be12141866a57d0db8..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/conf.py +++ /dev/null @@ -1,398 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# - -import os -import re -import sys -sys.path.append(os.path.abspath('./_ext')) -import sphinx.ext.autosummary.generate as g -from sphinx.ext import autodoc as sphinx_autodoc -from sphinx.util import inspect as sphinx_inspect -from sphinx.domains import python as sphinx_domain_python -from textwrap import dedent -# sys.path.insert(0, os.path.abspath('.')) - -import mindspore -# If you don't want to generate MindArmour APIs, comment this line. -import mindarmour -# If you don't want to generate MindSpore_Hub APIs, comment this line. -import mindspore_hub -# If you don't want to generate MindSpore_Serving APIs, comment this line. -import mindspore_serving - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', - 'sphinx.ext.todo', - 'sphinx.ext.coverage', - 'sphinx.ext.napoleon', - 'sphinx.ext.viewcode', - 'sphinx_markdown_tables', - 'recommonmark', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -autodoc_inherit_docstrings = False - -autosummary_generate = True - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_search_language = 'zh' - -html_search_options = {'dict': '../../resource/jieba.txt'} - -html_static_path = ['_static'] - -# -- Options for Texinfo output ------------------------------------------- - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - 'python': ('https://docs.python.org/', '../python_objects.inv'), - 'numpy': ('https://docs.scipy.org/doc/numpy/', '../numpy_objects.inv'), -} - -from typing import List, Tuple -from docutils.nodes import Node - -from sphinx.locale import __ -from sphinx.ext.autosummary import Autosummary, posixpath, addnodes, logger, Matcher, autosummary_toc, get_import_prefixes_from_env -from sphinx.ext.autosummary import mock, StringList, ModuleType, get_documenter, ModuleAnalyzer, PycodeError, mangle_signature -from sphinx.ext.autosummary import import_by_name, extract_summary, autosummary_table, nodes, switch_source_input, rst -from sphinx.ext.autodoc.directive import DocumenterBridge, Options - -class MsAutosummary(Autosummary): - """ - Inherited from sphinx's autosummary, add titles and a column for the generated table. - """ - - def init(self): - """ - init method - """ - self.find_doc_name = "" - self.third_title = "" - self.default_doc = "" - - def extract_env_summary(self, doc: List[str]) -> str: - """Extract env summary from docstring.""" - env_sum = self.default_doc - for i, piece in enumerate(doc): - if piece.startswith(self.find_doc_name): - env_sum = doc[i+1][4:] - return env_sum - - def run(self): - """ - run method - """ - self.init() - self.bridge = DocumenterBridge(self.env, self.state.document.reporter, - Options(), self.lineno, self.state) - - names = [x.strip().split()[0] for x in self.content - if x.strip() and re.search(r'^[~a-zA-Z_]', x.strip()[0])] - items = self.get_items(names) - teble_nodes = self.get_table(items) - - if 'toctree' in self.options: - dirname = posixpath.dirname(self.env.docname) - - tree_prefix = self.options['toctree'].strip() - docnames = [] - excluded = Matcher(self.config.exclude_patterns) - for item in items: - docname = posixpath.join(tree_prefix, item[3]) - docname = posixpath.normpath(posixpath.join(dirname, docname)) - if docname not in self.env.found_docs: - location = self.state_machine.get_source_and_line(self.lineno) - if excluded(self.env.doc2path(docname, None)): - msg = __('autosummary references excluded document %r. Ignored.') - else: - msg = __('autosummary: stub file not found %r. ' - 'Check your autosummary_generate setting.') - logger.warning(msg, item[3], location=location) - continue - docnames.append(docname) - - if docnames: - tocnode = addnodes.toctree() - tocnode['includefiles'] = docnames - tocnode['entries'] = [(None, docn) for docn in docnames] - tocnode['maxdepth'] = -1 - tocnode['glob'] = None - teble_nodes.append(autosummary_toc('', '', tocnode)) - return teble_nodes - - def get_items(self, names: List[str]) -> List[Tuple[str, str, str, str, str]]: - """Try to import the given names, and return a list of - ``[(name, signature, summary_string, real_name, env_summary), ...]``. - """ - prefixes = get_import_prefixes_from_env(self.env) - items = [] # type: List[Tuple[str, str, str, str, str]] - max_item_chars = 50 - - for name in names: - display_name = name - if name.startswith('~'): - name = name[1:] - display_name = name.split('.')[-1] - try: - with mock(self.config.autosummary_mock_imports): - real_name, obj, parent, modname = import_by_name(name, prefixes=prefixes) - except ImportError: - logger.warning(__('failed to import %s'), name) - items.append((name, '', '', name, '')) - continue - - self.bridge.result = StringList() # initialize for each documenter - full_name = real_name - if not isinstance(obj, ModuleType): - # give explicitly separated module name, so that members - # of inner classes can be documented - full_name = modname + '::' + full_name[len(modname) + 1:] - # NB. using full_name here is important, since Documenters - # handle module prefixes slightly differently - doccls = get_documenter(self.env.app, obj, parent) - documenter = doccls(self.bridge, full_name) - - if not documenter.parse_name(): - logger.warning(__('failed to parse name %s'), real_name) - items.append((display_name, '', '', real_name, '')) - continue - if not documenter.import_object(): - logger.warning(__('failed to import object %s'), real_name) - items.append((display_name, '', '', real_name, '')) - continue - if documenter.options.members and not documenter.check_module(): - continue - - # try to also get a source code analyzer for attribute docs - try: - documenter.analyzer = ModuleAnalyzer.for_module( - documenter.get_real_modname()) - # parse right now, to get PycodeErrors on parsing (results will - # be cached anyway) - documenter.analyzer.find_attr_docs() - except PycodeError as err: - logger.debug('[autodoc] module analyzer failed: %s', err) - # no source file -- e.g. for builtin and C modules - documenter.analyzer = None - - # -- Grab the signature - - try: - sig = documenter.format_signature(show_annotation=False) - except TypeError: - # the documenter does not support ``show_annotation`` option - sig = documenter.format_signature() - - if not sig: - sig = '' - else: - max_chars = max(10, max_item_chars - len(display_name)) - sig = mangle_signature(sig, max_chars=max_chars) - - # -- Grab the summary - - documenter.add_content(None) - summary = extract_summary(self.bridge.result.data[:], self.state.document) - env_sum = self.extract_env_summary(self.bridge.result.data[:]) - items.append((display_name, sig, summary, real_name, env_sum)) - - return items - - def get_table(self, items: List[Tuple[str, str, str, str, str]]) -> List[Node]: - """Generate a proper list of table nodes for autosummary:: directive. - - *items* is a list produced by :meth:`get_items`. - """ - table_spec = addnodes.tabular_col_spec() - table_spec['spec'] = r'\X{1}{2}\X{1}{2}' - - table = autosummary_table('') - real_table = nodes.table('', classes=['longtable']) - table.append(real_table) - group = nodes.tgroup('', cols=3) - real_table.append(group) - group.append(nodes.colspec('', colwidth=10)) - group.append(nodes.colspec('', colwidth=70)) - group.append(nodes.colspec('', colwidth=30)) - body = nodes.tbody('') - group.append(body) - - def append_row(*column_texts: str) -> None: - row = nodes.row('', color="red") - source, line = self.state_machine.get_source_and_line() - for text in column_texts: - node = nodes.paragraph('') - vl = StringList() - vl.append(text, '%s:%d:' % (source, line)) - with switch_source_input(self.state, vl): - self.state.nested_parse(vl, 0, node) - try: - if isinstance(node[0], nodes.paragraph): - node = node[0] - except IndexError: - pass - row.append(nodes.entry('', node)) - body.append(row) - - # add table's title - append_row("**API Name**", "**Description**", self.third_title) - for name, sig, summary, real_name, env_sum in items: - qualifier = 'obj' - if 'nosignatures' not in self.options: - col1 = ':%s:`%s <%s>`\\ %s' % (qualifier, name, real_name, rst.escape(sig)) - else: - col1 = ':%s:`%s <%s>`' % (qualifier, name, real_name) - col2 = summary - col3 = env_sum - append_row(col1, col2, col3) - - return [table_spec, table] - - -class MsNoteAutoSummary(MsAutosummary): - """ - Inherited from MsAutosummary. Add a third column about `Note` to the table. - """ - - def init(self): - """ - init method - """ - self.find_doc_name = ".. note::" - self.third_title = "**Note**" - self.default_doc = "None" - - def extract_env_summary(self, doc: List[str]) -> str: - """Extract env summary from docstring.""" - env_sum = self.default_doc - for piece in doc: - if piece.startswith(self.find_doc_name): - env_sum = piece[10:] - return env_sum - - -class MsPlatformAutoSummary(MsAutosummary): - """ - Inherited from MsAutosummary. Add a third column about `Supported Platforms` to the table. - """ - def init(self): - """ - init method - """ - self.find_doc_name = "Supported Platforms:" - self.third_title = "**{}**".format(self.find_doc_name[:-1]) - self.default_doc = "To Be Developed" - - -def setup(app): - app.add_directive('msplatformautosummary', MsPlatformAutoSummary) - app.add_directive('msnoteautosummary', MsNoteAutoSummary) - -# Modify regex for sphinx.ext.autosummary.generate.find_autosummary_in_lines. -gfile_abs_path = os.path.abspath(g.__file__) -autosummary_re_line_old = r"autosummary_re = re.compile(r'^(\s*)\.\.\s+autosummary::\s*')" -autosummary_re_line_new = r"autosummary_re = re.compile(r'^(\s*)\.\.\s+(ms[a-z]*)?autosummary::\s*')" -with open(gfile_abs_path, "r+", encoding="utf8") as f: - data = f.read() - data = data.replace(autosummary_re_line_old, autosummary_re_line_new) - f.seek(0) - f.write(data) - -# Modify default signatures for autodoc. -autodoc_source_path = os.path.abspath(sphinx_autodoc.__file__) -inspect_source_path = os.path.abspath(sphinx_inspect.__file__) -autodoc_source_re = re.compile(r"(\s+)args = self\.format_args\(\*\*kwargs\)") -inspect_source_code_str = """signature = inspect.signature(subject)""" -inspect_target_code_str = """signature = my_signature.signature(subject)""" -autodoc_source_code_str = """args = self.format_args(**kwargs)""" -is_autodoc_code_str = """args = args.replace("'", "")""" -with open(autodoc_source_path, "r+", encoding="utf8") as f: - code_str = f.read() - if is_autodoc_code_str not in code_str: - code_str_lines = code_str.split("\n") - autodoc_target_code_str = None - for line in code_str_lines: - re_matched_str = autodoc_source_re.search(line) - if re_matched_str: - space_num = re_matched_str.group(1) - autodoc_target_code_str = dedent("""\ - {0} - {1}if type(args) != type(None): - {1} {2}""".format(autodoc_source_code_str, space_num, is_autodoc_code_str)) - break - if autodoc_target_code_str: - code_str = code_str.replace(autodoc_source_code_str, autodoc_target_code_str) - f.seek(0) - f.truncate() - f.write(code_str) -with open(inspect_source_path, "r+", encoding="utf8") as g: - code_str = g.read() - if inspect_target_code_str not in code_str: - code_str = code_str.replace(inspect_source_code_str, inspect_target_code_str) - if "import my_signature" not in code_str: - code_str = code_str.replace("import sys", "import sys\nimport my_signature") - g.seek(0) - g.truncate() - g.write(code_str) - -# remove extra space for default params for autodoc. -sphinx_domain_python_source_path = os.path.abspath(sphinx_domain_python.__file__) -python_code_source = """for argument in arglist.split(','):""" -python_code_target = """for argument in [" " + i if num > 1 else i for num,i in enumerate(arglist.split(", "))]:""" -with open(sphinx_domain_python_source_path, "r+", encoding="utf8") as f: - code_str = f.read() - if python_code_target not in code_str: - code_str = code_str.replace(python_code_source, python_code_target) - f.seek(0) - f.truncate() - f.write(code_str) diff --git a/docs/api_python/source_zh_cn/index.rst b/docs/api_python/source_zh_cn/index.rst deleted file mode 100644 index abbe42c21a69f3b9a6db4bc7a049bcf8f8be5387..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/index.rst +++ /dev/null @@ -1,63 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 11:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore Python API -========================= - -.. toctree:: - :maxdepth: 1 - :caption: MindSpore Python API - - mindspore/mindspore - mindspore/mindspore.common.initializer - mindspore/mindspore.communication - mindspore/mindspore.compression - mindspore/mindspore.context - mindspore/mindspore.dataset - mindspore/mindspore.dataset.config - mindspore/mindspore.dataset.text - mindspore/mindspore.dataset.transforms - mindspore/mindspore.dataset.vision - mindspore/mindspore.explainer - mindspore/mindspore.mindrecord - mindspore/mindspore.nn - mindspore/mindspore.numpy - mindspore/mindspore.nn.probability - mindspore/mindspore.ops - mindspore/mindspore.profiler - mindspore/mindspore.train - -.. toctree:: - :maxdepth: 1 - :caption: MindArmour Python API - - mindarmour/mindarmour - mindarmour/mindarmour.adv_robustness.attacks - mindarmour/mindarmour.adv_robustness.defenses - mindarmour/mindarmour.adv_robustness.detectors - mindarmour/mindarmour.adv_robustness.evaluations - mindarmour/mindarmour.fuzz_testing - mindarmour/mindarmour.privacy.diff_privacy - mindarmour/mindarmour.privacy.evaluation - mindarmour/mindarmour.privacy.sup_privacy - mindarmour/mindarmour.utils - -.. toctree:: - :maxdepth: 1 - :caption: MindSpore Hub Python API - - mindspore_hub/mindspore_hub - -.. toctree:: - :maxdepth: 1 - :caption: MindSpore Serving Python API - - mindspore_serving/mindspore_serving - -.. toctree:: - :maxdepth: 1 - :caption: MindQuantum Python API - - mindquantum/mindquantum diff --git a/docs/api_python/source_zh_cn/mindarmour/mindarmour.adv_robustness.attacks.rst b/docs/api_python/source_zh_cn/mindarmour/mindarmour.adv_robustness.attacks.rst deleted file mode 100644 index 5b38b93fe907365e3ca531ccf124a64daf2345de..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindarmour/mindarmour.adv_robustness.attacks.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.adv_robustness.attacks -================================= - -.. automodule:: mindarmour.adv_robustness.attacks - :members: diff --git a/docs/api_python/source_zh_cn/mindarmour/mindarmour.adv_robustness.defenses.rst b/docs/api_python/source_zh_cn/mindarmour/mindarmour.adv_robustness.defenses.rst deleted file mode 100644 index 5b01e203ae9d4b5345b70e490313afe174a26bdc..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindarmour/mindarmour.adv_robustness.defenses.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.adv_robustness.defenses -================================== - -.. automodule:: mindarmour.adv_robustness.defenses - :members: diff --git a/docs/api_python/source_zh_cn/mindarmour/mindarmour.adv_robustness.detectors.rst b/docs/api_python/source_zh_cn/mindarmour/mindarmour.adv_robustness.detectors.rst deleted file mode 100644 index d82a5b18194bf345455aade22499f17c41ef9849..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindarmour/mindarmour.adv_robustness.detectors.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.adv_robustness.detectors -=================================== - -.. automodule:: mindarmour.adv_robustness.detectors - :members: diff --git a/docs/api_python/source_zh_cn/mindarmour/mindarmour.adv_robustness.evaluations.rst b/docs/api_python/source_zh_cn/mindarmour/mindarmour.adv_robustness.evaluations.rst deleted file mode 100644 index 19aac889512a9671ea786d299b1a788c1daa68e1..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindarmour/mindarmour.adv_robustness.evaluations.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.adv_robustness.evaluations -===================================== - -.. automodule:: mindarmour.adv_robustness.evaluations - :members: diff --git a/docs/api_python/source_zh_cn/mindarmour/mindarmour.fuzz_testing.rst b/docs/api_python/source_zh_cn/mindarmour/mindarmour.fuzz_testing.rst deleted file mode 100644 index 558e8c1263926c0f701d048deb057e6f4531fadb..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindarmour/mindarmour.fuzz_testing.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.fuzz_testing -======================= - -.. automodule:: mindarmour.fuzz_testing - :members: diff --git a/docs/api_python/source_zh_cn/mindarmour/mindarmour.privacy.diff_privacy.rst b/docs/api_python/source_zh_cn/mindarmour/mindarmour.privacy.diff_privacy.rst deleted file mode 100644 index 9fd33f9e93ee95e814480d010903f73d60ece30d..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindarmour/mindarmour.privacy.diff_privacy.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.privacy.diff_privacy -=============================== - -.. automodule:: mindarmour.privacy.diff_privacy - :members: diff --git a/docs/api_python/source_zh_cn/mindarmour/mindarmour.privacy.evaluation.rst b/docs/api_python/source_zh_cn/mindarmour/mindarmour.privacy.evaluation.rst deleted file mode 100644 index eacc6a7cf687777fa6d9c6e1d87fa8352e0b6e6a..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindarmour/mindarmour.privacy.evaluation.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.privacy.evaluation -============================= - -.. automodule:: mindarmour.privacy.evaluation - :members: diff --git a/docs/api_python/source_zh_cn/mindarmour/mindarmour.privacy.sup_privacy.rst b/docs/api_python/source_zh_cn/mindarmour/mindarmour.privacy.sup_privacy.rst deleted file mode 100644 index 30396473a5c286e81b61982e85fe4cdfbb125d36..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindarmour/mindarmour.privacy.sup_privacy.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.privacy.sup_privacy -=============================== - -.. automodule:: mindarmour.privacy.sup_privacy - :members: diff --git a/docs/api_python/source_zh_cn/mindarmour/mindarmour.rst b/docs/api_python/source_zh_cn/mindarmour/mindarmour.rst deleted file mode 100644 index bd8fa328080f839a303e6ffc66962f60a4dad99c..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindarmour/mindarmour.rst +++ /dev/null @@ -1,6 +0,0 @@ -mindarmour -========== - -.. automodule:: mindarmour - :members: - :exclude-members: SuppressModel, SuppressMasker, SuppressCtrl diff --git a/docs/api_python/source_zh_cn/mindarmour/mindarmour.utils.rst b/docs/api_python/source_zh_cn/mindarmour/mindarmour.utils.rst deleted file mode 100644 index 9b1bee754dd1c7314825d1a2210f3a805920c4ed..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindarmour/mindarmour.utils.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindarmour.utils -================ - -.. automodule:: mindarmour.utils - :members: \ No newline at end of file diff --git a/docs/api_python/source_zh_cn/mindquantum/mindquantum.rst b/docs/api_python/source_zh_cn/mindquantum/mindquantum.rst deleted file mode 100644 index 2464675921f5451b687aaa86b624ba3359230f55..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindquantum/mindquantum.rst +++ /dev/null @@ -1,82 +0,0 @@ -mindquantum -=========== - -.. automodule:: mindquantum - -mindquantum.circuit -------------------- - -.. automodule:: mindquantum.circuit - :members: - - -mindquantum.engine ------------------- - -.. automodule:: mindquantum.engine - :members: - -mindquantum.gate ----------------- - -.. automodule:: mindquantum.gate - :members: - -functional ----------- - -The functional gates are the pre-instantiated quantum gates, which can be used directly as an instance of quantum gate. - -.. list-table:: - :widths: 50 50 - :header-rows: 1 - - * - functional - - gates - * - mindquantum.gate.CNOT - - :class:`mindquantum.gate.CNOTGate` - * - mindquantum.gate.I - - :class:`mindquantum.gate.IGate` - * - mindquantum.gate.H - - :class:`mindquantum.gate.HGate` - * - mindquantum.gate.S - - :class:`mindquantum.gate.PhaseShift` (numpy.pi/2) - * - mindquantum.gate.SWAP - - :class:`mindquantum.gate.SWAPGate` - * - mindquantum.gate.X - - :class:`mindquantum.gate.XGate` - * - mindquantum.gate.Y - - :class:`mindquantum.gate.YGate` - * - mindquantum.gate.Z - - :class:`mindquantum.gate.ZGate` - -mindquantum.nn --------------- - -.. automodule:: mindquantum.nn - :exclude-members: PQC, MindQuantumLayer, Evolution - :members: - -Operators -^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindquantum.nn.Evolution - mindquantum.nn.MindQuantumLayer - mindquantum.nn.PQC - -mindquantum.parameterresolver ------------------------------ - -.. automodule:: mindquantum.parameterresolver - :members: - -mindquantum.utils ------------------ - -.. automodule:: mindquantum.utils - :members: diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.common.initializer.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.common.initializer.rst deleted file mode 100644 index 5d6bbf0aa9dcc9374dc1bc83949a7d93517a8114..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.common.initializer.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindspore.common.initializer -============================ - -.. automodule:: mindspore.common.initializer - :members: \ No newline at end of file diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.communication.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.communication.rst deleted file mode 100644 index 2b277b2e0673640555d8fc4cec88e071fdb9d8a5..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.communication.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindspore.communication -======================= - -.. automodule:: mindspore.communication - :members: diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.compression.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.compression.rst deleted file mode 100644 index cff75c05247ba2911e782e38c0d7190bc3cbe23a..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.compression.rst +++ /dev/null @@ -1,14 +0,0 @@ -mindspore.compression -===================== - -mindspore.compression.quant ---------------------------- - -.. automodule:: mindspore.compression.quant - :members: - -mindspore.compression.common ----------------------------- - -.. automodule:: mindspore.compression.common - :members: \ No newline at end of file diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.context.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.context.rst deleted file mode 100644 index a9aa8120fe4daf9f0c1e9df93aaff114a98ef3ba..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.context.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindspore.context -================= - -.. automodule:: mindspore.context - :members: diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.config.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.config.rst deleted file mode 100644 index 55cf3631f462bd579c8b5764b0885a0a10b4b480..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.config.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindspore.dataset.config -======================== - -.. automodule:: mindspore.dataset.config - :members: diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.rst deleted file mode 100644 index d5fb44e566727e8303276c880d03652ea0940eff..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.rst +++ /dev/null @@ -1,94 +0,0 @@ -mindspore.dataset -================= - -.. automodule:: mindspore.dataset - -Vision -------- - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.CelebADataset - mindspore.dataset.Cifar100Dataset - mindspore.dataset.Cifar10Dataset - mindspore.dataset.CocoDataset - mindspore.dataset.ImageFolderDataset - mindspore.dataset.MnistDataset - mindspore.dataset.VOCDataset - -Text ------ - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.CLUEDataset - -Graph ------- - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.GraphData - -Standard Format ----------------- - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.CSVDataset - mindspore.dataset.ManifestDataset - mindspore.dataset.MindDataset - mindspore.dataset.TextFileDataset - mindspore.dataset.TFRecordDataset - -User Defined --------------- - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.GeneratorDataset - mindspore.dataset.NumpySlicesDataset - mindspore.dataset.PaddedDataset - -Sampler --------- - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.DistributedSampler - mindspore.dataset.PKSampler - mindspore.dataset.RandomSampler - mindspore.dataset.SequentialSampler - mindspore.dataset.SubsetRandomSampler - mindspore.dataset.SubsetSampler - mindspore.dataset.WeightedRandomSampler - -Others -------- - -.. autosummary:: - :toctree: dataset - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.dataset.DatasetCache - mindspore.dataset.Schema - mindspore.dataset.zip diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.text.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.text.rst deleted file mode 100644 index ad2f38ee5fa962abab8d22f6e1f45c7e8e8d5058..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.text.rst +++ /dev/null @@ -1,45 +0,0 @@ -mindspore.dataset.text -====================== - -.. automodule:: mindspore.dataset.text - -mindspore.dataset.text.transforms ---------------------------------- - -.. msnoteautosummary:: - :toctree: dataset_text - :nosignatures: - :template: classtemplate.rst - - mindspore.dataset.text.transforms.BasicTokenizer - mindspore.dataset.text.transforms.BertTokenizer - mindspore.dataset.text.transforms.CaseFold - mindspore.dataset.text.transforms.JiebaTokenizer - mindspore.dataset.text.transforms.Lookup - mindspore.dataset.text.transforms.Ngram - mindspore.dataset.text.transforms.NormalizeUTF8 - mindspore.dataset.text.transforms.PythonTokenizer - mindspore.dataset.text.transforms.RegexReplace - mindspore.dataset.text.transforms.RegexTokenizer - mindspore.dataset.text.transforms.SentencePieceTokenizer - mindspore.dataset.text.transforms.SlidingWindow - mindspore.dataset.text.transforms.ToNumber - mindspore.dataset.text.transforms.TruncateSequencePair - mindspore.dataset.text.transforms.UnicodeCharTokenizer - mindspore.dataset.text.transforms.UnicodeScriptTokenizer - mindspore.dataset.text.transforms.WhitespaceTokenizer - mindspore.dataset.text.transforms.WordpieceTokenizer - - -mindspore.dataset.text.utils ----------------------------- - -.. msnoteautosummary:: - :toctree: dataset_text - :nosignatures: - :template: classtemplate.rst - - mindspore.dataset.text.utils.SentencePieceVocab - mindspore.dataset.text.utils.to_str - mindspore.dataset.text.utils.to_bytes - mindspore.dataset.text.utils.Vocab diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.transforms.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.transforms.rst deleted file mode 100644 index 8de6f0a34218bd64730c8ea9e9138014fc178b04..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.transforms.rst +++ /dev/null @@ -1,39 +0,0 @@ -mindspore.dataset.transforms -============================ - -.. automodule:: mindspore.dataset.transforms - -mindspore.dataset.transforms.c_transforms ------------------------------------------ - -.. autosummary:: - :toctree: dataset_transforms - :nosignatures: - :template: classtemplate.rst - - mindspore.dataset.transforms.c_transforms.Compose - mindspore.dataset.transforms.c_transforms.Concatenate - mindspore.dataset.transforms.c_transforms.Duplicate - mindspore.dataset.transforms.c_transforms.Fill - mindspore.dataset.transforms.c_transforms.Mask - mindspore.dataset.transforms.c_transforms.OneHot - mindspore.dataset.transforms.c_transforms.PadEnd - mindspore.dataset.transforms.c_transforms.RandomApply - mindspore.dataset.transforms.c_transforms.RandomChoice - mindspore.dataset.transforms.c_transforms.Slice - mindspore.dataset.transforms.c_transforms.TypeCast - mindspore.dataset.transforms.c_transforms.Unique - -mindspore.dataset.transforms.py_transforms ------------------------------------------- - -.. autosummary:: - :toctree: dataset_transforms - :nosignatures: - :template: classtemplate.rst - - mindspore.dataset.transforms.py_transforms.Compose - mindspore.dataset.transforms.py_transforms.OneHotOp - mindspore.dataset.transforms.py_transforms.RandomApply - mindspore.dataset.transforms.py_transforms.RandomChoice - mindspore.dataset.transforms.py_transforms.RandomOrder diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.vision.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.vision.rst deleted file mode 100644 index bf5d17a42cc0cff82762d2a6eef2c8446af2dcc6..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.dataset.vision.rst +++ /dev/null @@ -1,92 +0,0 @@ -mindspore.dataset.vision -=================================== - -.. automodule:: mindspore.dataset.vision - -mindspore.dataset.vision.c_transforms ------------------------------------------------- - -.. autosummary:: - :toctree: dataset_vision - :nosignatures: - :template: classtemplate.rst - - mindspore.dataset.vision.c_transforms.AutoContrast - mindspore.dataset.vision.c_transforms.BoundingBoxAugment - mindspore.dataset.vision.c_transforms.CenterCrop - mindspore.dataset.vision.c_transforms.CutMixBatch - mindspore.dataset.vision.c_transforms.CutOut - mindspore.dataset.vision.c_transforms.Decode - mindspore.dataset.vision.c_transforms.Equalize - mindspore.dataset.vision.c_transforms.HWC2CHW - mindspore.dataset.vision.c_transforms.Invert - mindspore.dataset.vision.c_transforms.MixUpBatch - mindspore.dataset.vision.c_transforms.Normalize - mindspore.dataset.vision.c_transforms.Pad - mindspore.dataset.vision.c_transforms.RandomAffine - mindspore.dataset.vision.c_transforms.RandomColor - mindspore.dataset.vision.c_transforms.RandomColorAdjust - mindspore.dataset.vision.c_transforms.RandomCrop - mindspore.dataset.vision.c_transforms.RandomCropDecodeResize - mindspore.dataset.vision.c_transforms.RandomCropWithBBox - mindspore.dataset.vision.c_transforms.RandomHorizontalFlip - mindspore.dataset.vision.c_transforms.RandomHorizontalFlipWithBBox - mindspore.dataset.vision.c_transforms.RandomPosterize - mindspore.dataset.vision.c_transforms.RandomResize - mindspore.dataset.vision.c_transforms.RandomResizedCrop - mindspore.dataset.vision.c_transforms.RandomResizedCropWithBBox - mindspore.dataset.vision.c_transforms.RandomResizeWithBBox - mindspore.dataset.vision.c_transforms.RandomRotation - mindspore.dataset.vision.c_transforms.RandomSelectSubpolicy - mindspore.dataset.vision.c_transforms.RandomSharpness - mindspore.dataset.vision.c_transforms.RandomSolarize - mindspore.dataset.vision.c_transforms.RandomVerticalFlip - mindspore.dataset.vision.c_transforms.RandomVerticalFlipWithBBox - mindspore.dataset.vision.c_transforms.Rescale - mindspore.dataset.vision.c_transforms.Resize - mindspore.dataset.vision.c_transforms.ResizeWithBBox - mindspore.dataset.vision.c_transforms.SoftDvppDecodeRandomCropResizeJpeg - mindspore.dataset.vision.c_transforms.SoftDvppDecodeResizeJpeg - mindspore.dataset.vision.c_transforms.UniformAugment - -mindspore.dataset.vision.py_transforms -------------------------------------------------- - -.. autosummary:: - :toctree: dataset_vision - :nosignatures: - :template: classtemplate.rst - - mindspore.dataset.vision.py_transforms.AutoContrast - mindspore.dataset.vision.py_transforms.CenterCrop - mindspore.dataset.vision.py_transforms.Cutout - mindspore.dataset.vision.py_transforms.Decode - mindspore.dataset.vision.py_transforms.Equalize - mindspore.dataset.vision.py_transforms.FiveCrop - mindspore.dataset.vision.py_transforms.Grayscale - mindspore.dataset.vision.py_transforms.HsvToRgb - mindspore.dataset.vision.py_transforms.HWC2CHW - mindspore.dataset.vision.py_transforms.Invert - mindspore.dataset.vision.py_transforms.LinearTransformation - mindspore.dataset.vision.py_transforms.MixUp - mindspore.dataset.vision.py_transforms.Normalize - mindspore.dataset.vision.py_transforms.Pad - mindspore.dataset.vision.py_transforms.RandomAffine - mindspore.dataset.vision.py_transforms.RandomColor - mindspore.dataset.vision.py_transforms.RandomColorAdjust - mindspore.dataset.vision.py_transforms.RandomCrop - mindspore.dataset.vision.py_transforms.RandomErasing - mindspore.dataset.vision.py_transforms.RandomGrayscale - mindspore.dataset.vision.py_transforms.RandomHorizontalFlip - mindspore.dataset.vision.py_transforms.RandomPerspective - mindspore.dataset.vision.py_transforms.RandomResizedCrop - mindspore.dataset.vision.py_transforms.RandomRotation - mindspore.dataset.vision.py_transforms.RandomSharpness - mindspore.dataset.vision.py_transforms.RandomVerticalFlip - mindspore.dataset.vision.py_transforms.Resize - mindspore.dataset.vision.py_transforms.RgbToHsv - mindspore.dataset.vision.py_transforms.TenCrop - mindspore.dataset.vision.py_transforms.ToPIL - mindspore.dataset.vision.py_transforms.ToTensor - mindspore.dataset.vision.py_transforms.ToType - mindspore.dataset.vision.py_transforms.UniformAugment diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.explainer.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.explainer.rst deleted file mode 100644 index 7df9a5bb9e73b259d8c4043482bcf1cf0545fdbd..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.explainer.rst +++ /dev/null @@ -1,21 +0,0 @@ -mindspore.explainer -=================== - -mindspore.explainer -------------------- - -.. automodule:: mindspore.explainer - :members: - - -mindspore.explainer.explanation -------------------------------- - -.. automodule:: mindspore.explainer.explanation - :members: - -mindspore.explainer.benchmark ------------------------------ - -.. automodule:: mindspore.explainer.benchmark - :members: \ No newline at end of file diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.mindrecord.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.mindrecord.rst deleted file mode 100644 index de4d020c711e16ba2ab8cb8b3ed34849c2fbebf3..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.mindrecord.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindspore.mindrecord -==================== - -.. automodule:: mindspore.mindrecord - :members: \ No newline at end of file diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.nn.probability.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.nn.probability.rst deleted file mode 100644 index 00ad24f9cca247196661dd6c77b05a0537a1f3de..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.nn.probability.rst +++ /dev/null @@ -1,120 +0,0 @@ -mindspore.nn.probability -======================== - -.. automodule:: mindspore.nn.probability - -Bijectors ---------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.bijector.Bijector - mindspore.nn.probability.bijector.Exp - mindspore.nn.probability.bijector.GumbelCDF - mindspore.nn.probability.bijector.Invert - mindspore.nn.probability.bijector.PowerTransform - mindspore.nn.probability.bijector.ScalarAffine - mindspore.nn.probability.bijector.Softplus - -Bayesian Layers ---------------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.bnn_layers.ConvReparam - mindspore.nn.probability.bnn_layers.DenseLocalReparam - mindspore.nn.probability.bnn_layers.DenseReparam - -Prior and Posterior Distributions ----------------------------------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.bnn_layers.NormalPosterior - mindspore.nn.probability.bnn_layers.NormalPrior - -Bayesian Wrapper Functions ---------------------------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.bnn_layers.WithBNNLossCell - -Distributions --------------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.distribution.Bernoulli - mindspore.nn.probability.distribution.Beta - mindspore.nn.probability.distribution.Categorical - mindspore.nn.probability.distribution.Cauchy - mindspore.nn.probability.distribution.Distribution - mindspore.nn.probability.distribution.Exponential - mindspore.nn.probability.distribution.Gamma - mindspore.nn.probability.distribution.Geometric - mindspore.nn.probability.distribution.Gumbel - mindspore.nn.probability.distribution.Logistic - mindspore.nn.probability.distribution.LogNormal - mindspore.nn.probability.distribution.Normal - mindspore.nn.probability.distribution.Poisson - mindspore.nn.probability.distribution.TransformedDistribution - mindspore.nn.probability.distribution.Uniform - -Deep Probability Networks --------------------------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.dpn.ConditionalVAE - mindspore.nn.probability.dpn.VAE - -Infer ------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.infer.ELBO - mindspore.nn.probability.infer.SVI - -ToolBox ---------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.toolbox.UncertaintyEvaluation - mindspore.nn.probability.toolbox.VAEAnomalyDetection - -Model Transformer ------------------- - -.. msplatformautosummary:: - :toctree: nn_probability - :nosignatures: - :template: classtemplate_probability.rst - - mindspore.nn.probability.transforms.TransformToBNN diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.nn.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.nn.rst deleted file mode 100644 index 96f7892c02247fc7967f0fee2223a093f9f4e209..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.nn.rst +++ /dev/null @@ -1,366 +0,0 @@ -mindspore.nn -============ - -.. automodule:: mindspore.nn - -Compared with the previous version, the added, deleted and supported platforms change information of `mindspore.nn` operators in MindSpore, please refer to the link ``_. - -Cell ----- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.Cell - mindspore.nn.GraphKernel - -Containers ----------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.CellList - mindspore.nn.SequentialCell - -Convolution Layers ------------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.Conv1d - mindspore.nn.Conv1dTranspose - mindspore.nn.Conv2d - mindspore.nn.Conv2dTranspose - mindspore.nn.Conv3d - mindspore.nn.Conv3dTranspose - -Recurrent Layers ----------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.LSTMCell - mindspore.nn.LSTM - -Sparse Layers -------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.Embedding - mindspore.nn.EmbeddingLookup - mindspore.nn.MultiFieldEmbeddingLookup - -Non-linear Activations ----------------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.ELU - mindspore.nn.FastGelu - mindspore.nn.GELU - mindspore.nn.get_activation - mindspore.nn.HSigmoid - mindspore.nn.HSwish - mindspore.nn.LeakyReLU - mindspore.nn.LogSigmoid - mindspore.nn.LogSoftmax - mindspore.nn.PReLU - mindspore.nn.ReLU - mindspore.nn.ReLU6 - mindspore.nn.Sigmoid - mindspore.nn.Softmax - mindspore.nn.Tanh - -Utilities ---------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.ClipByNorm - mindspore.nn.Dense - mindspore.nn.Dropout - mindspore.nn.Flatten - mindspore.nn.L1Regularizer - mindspore.nn.Norm - mindspore.nn.OneHot - mindspore.nn.Pad - mindspore.nn.Range - mindspore.nn.ResizeBilinear - mindspore.nn.SparseToDense - mindspore.nn.Tril - mindspore.nn.Triu - mindspore.nn.Unfold - -Images Functions ----------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.CentralCrop - mindspore.nn.ImageGradients - mindspore.nn.MSSSIM - mindspore.nn.PSNR - mindspore.nn.SSIM - -Normalization Layers --------------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.BatchNorm1d - mindspore.nn.BatchNorm2d - mindspore.nn.BatchNorm3d - mindspore.nn.GlobalBatchNorm - mindspore.nn.GroupNorm - mindspore.nn.InstanceNorm2d - mindspore.nn.LayerNorm - mindspore.nn.MatrixDiag - mindspore.nn.MatrixDiagPart - mindspore.nn.MatrixSetDiag - mindspore.nn.SyncBatchNorm - -Pooling layers --------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.AvgPool1d - mindspore.nn.AvgPool2d - mindspore.nn.MaxPool1d - mindspore.nn.MaxPool2d - -Quantized Functions -------------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.ActQuant - mindspore.nn.Conv2dBnAct - mindspore.nn.Conv2dBnFoldQuant - mindspore.nn.Conv2dBnFoldQuantOneConv - mindspore.nn.Conv2dBnWithoutFoldQuant - mindspore.nn.Conv2dQuant - mindspore.nn.DenseBnAct - mindspore.nn.DenseQuant - mindspore.nn.FakeQuantWithMinMaxObserver - mindspore.nn.MulQuant - mindspore.nn.TensorAddQuant - -Loss Functions --------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.BCELoss - mindspore.nn.BCEWithLogitsLoss - mindspore.nn.CosineEmbeddingLoss - mindspore.nn.DiceLoss - mindspore.nn.FocalLoss - mindspore.nn.L1Loss - mindspore.nn.MAELoss - mindspore.nn.MSELoss - mindspore.nn.MultiClassDiceLoss - mindspore.nn.RMSELoss - mindspore.nn.SampledSoftmaxLoss - mindspore.nn.SmoothL1Loss - mindspore.nn.SoftmaxCrossEntropyWithLogits - -Optimizer Functions -------------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.Adagrad - mindspore.nn.Adam - mindspore.nn.AdamOffload - mindspore.nn.AdamWeightDecay - mindspore.nn.FTRL - mindspore.nn.Lamb - mindspore.nn.LARS - mindspore.nn.LazyAdam - mindspore.nn.Momentum - mindspore.nn.Optimizer - mindspore.nn.ProximalAdagrad - mindspore.nn.RMSProp - mindspore.nn.SGD - -Wrapper Functions ------------------ - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.DistributedGradReducer - mindspore.nn.DynamicLossScaleUpdateCell - mindspore.nn.FixedLossScaleUpdateCell - mindspore.nn.ForwardValueAndGrad - mindspore.nn.GetNextSingleOp - mindspore.nn.ParameterUpdate - mindspore.nn.TimeDistributed - mindspore.nn.TrainOneStepCell - mindspore.nn.TrainOneStepWithLossScaleCell - mindspore.nn.WithEvalCell - mindspore.nn.WithGradCell - mindspore.nn.WithLossCell - -Math Functions --------------- - -.. msplatformautosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.DiGamma - mindspore.nn.IGamma - mindspore.nn.LBeta - mindspore.nn.LGamma - mindspore.nn.MatDet - mindspore.nn.MatInverse - mindspore.nn.MatMul - mindspore.nn.Moments - mindspore.nn.ReduceLogSumExp - -Metrics --------- - -.. autosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.Accuracy - mindspore.nn.auc - mindspore.nn.BleuScore - mindspore.nn.ConfusionMatrix - mindspore.nn.ConfusionMatrixMetric - mindspore.nn.CosineSimilarity - mindspore.nn.Dice - mindspore.nn.F1 - mindspore.nn.Fbeta - mindspore.nn.HausdorffDistance - mindspore.nn.get_metric_fn - mindspore.nn.Loss - mindspore.nn.MAE - mindspore.nn.MeanSurfaceDistance - mindspore.nn.Metric - mindspore.nn.MSE - mindspore.nn.names - mindspore.nn.OcclusionSensitivity - mindspore.nn.Perplexity - mindspore.nn.Precision - mindspore.nn.Recall - mindspore.nn.ROC - mindspore.nn.RootMeanSquareDistance - mindspore.nn.Top1CategoricalAccuracy - mindspore.nn.Top5CategoricalAccuracy - mindspore.nn.TopKCategoricalAccuracy - -Dynamic Learning Rate ------------------------ - -LearningRateSchedule -^^^^^^^^^^^^^^^^^^^^^^ - -The dynamic learning rates in this module are all subclasses of LearningRateSchedule. Pass the instance of -LearningRateSchedule to an optimizer. During the training process, the optimizer calls the instance taking current step -as input to get the current learning rate. - -.. code-block:: python - - import mindspore.nn as nn - - min_lr = 0.01 - max_lr = 0.1 - decay_steps = 4 - cosine_decay_lr = nn.CosineDecayLR(min_lr, max_lr, decay_steps) - - net = Net() - optim = nn.Momentum(net.trainable_params(), learning_rate=cosine_decay_lr, momentum=0.9) - -.. autosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.CosineDecayLR - mindspore.nn.ExponentialDecayLR - mindspore.nn.InverseDecayLR - mindspore.nn.NaturalExpDecayLR - mindspore.nn.PolynomialDecayLR - mindspore.nn.WarmUpLR - -Dynamic LR -^^^^^^^^^^^^^^^^^^^^^^ - -The dynamic learning rates in this module are all functions. Call the function and pass the result to an optimizer. -During the training process, the optimizer takes result[current step] as current learning rate. - -.. code-block:: python - - import mindspore.nn as nn - - min_lr = 0.01 - max_lr = 0.1 - total_step = 6 - step_per_epoch = 1 - decay_epoch = 4 - - lr= nn.cosine_decay_lr(min_lr, max_lr, total_step, step_per_epoch, decay_epoch) - - net = Net() - optim = nn.Momentum(net.trainable_params(), learning_rate=lr, momentum=0.9) - -.. autosummary:: - :toctree: nn - :nosignatures: - :template: classtemplate.rst - - mindspore.nn.cosine_decay_lr - mindspore.nn.exponential_decay_lr - mindspore.nn.inverse_decay_lr - mindspore.nn.natural_exp_decay_lr - mindspore.nn.piecewise_constant_lr - mindspore.nn.polynomial_decay_lr - mindspore.nn.warmup_lr diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.numpy.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.numpy.rst deleted file mode 100644 index 0fbd70ae0142307141d613dd8c0943d424471c56..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.numpy.rst +++ /dev/null @@ -1,271 +0,0 @@ -mindspore.numpy -=============== - -.. automodule:: mindspore.numpy - -.. autosummary:: - :toctree: numpy - :nosignatures: - :template: classtemplate_inherited.rst - -Array Generation ----------------- - -.. msplatformautosummary:: - :toctree: numpy - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.numpy.arange - mindspore.numpy.array - mindspore.numpy.asarray - mindspore.numpy.asfarray - mindspore.numpy.bartlett - mindspore.numpy.blackman - mindspore.numpy.copy - mindspore.numpy.diag - mindspore.numpy.diag_indices - mindspore.numpy.diagflat - mindspore.numpy.diagonal - mindspore.numpy.empty - mindspore.numpy.empty_like - mindspore.numpy.eye - mindspore.numpy.full - mindspore.numpy.full_like - mindspore.numpy.geomspace - mindspore.numpy.hamming - mindspore.numpy.hanning - mindspore.numpy.histogram_bin_edges - mindspore.numpy.identity - mindspore.numpy.indices - mindspore.numpy.ix_ - mindspore.numpy.linspace - mindspore.numpy.logspace - mindspore.numpy.meshgrid - mindspore.numpy.mgrid - mindspore.numpy.ogrid - mindspore.numpy.ones - mindspore.numpy.ones_like - mindspore.numpy.pad - mindspore.numpy.trace - mindspore.numpy.tri - mindspore.numpy.tril - mindspore.numpy.tril_indices - mindspore.numpy.tril_indices_from - mindspore.numpy.triu - mindspore.numpy.triu_indices - mindspore.numpy.triu_indices_from - mindspore.numpy.vander - mindspore.numpy.zeros - mindspore.numpy.zeros_like - -Array Operation ---------------- - -.. msplatformautosummary:: - :toctree: numpy - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.numpy.append - mindspore.numpy.apply_along_axis - mindspore.numpy.apply_over_axes - mindspore.numpy.array_split - mindspore.numpy.array_str - mindspore.numpy.atleast_1d - mindspore.numpy.atleast_2d - mindspore.numpy.atleast_3d - mindspore.numpy.broadcast_arrays - mindspore.numpy.broadcast_to - mindspore.numpy.choose - mindspore.numpy.column_stack - mindspore.numpy.concatenate - mindspore.numpy.dsplit - mindspore.numpy.dstack - mindspore.numpy.expand_dims - mindspore.numpy.flip - mindspore.numpy.fliplr - mindspore.numpy.flipud - mindspore.numpy.hsplit - mindspore.numpy.hstack - mindspore.numpy.moveaxis - mindspore.numpy.piecewise - mindspore.numpy.ravel - mindspore.numpy.repeat - mindspore.numpy.reshape - mindspore.numpy.roll - mindspore.numpy.rollaxis - mindspore.numpy.rot90 - mindspore.numpy.select - mindspore.numpy.size - mindspore.numpy.split - mindspore.numpy.squeeze - mindspore.numpy.stack - mindspore.numpy.swapaxes - mindspore.numpy.take - mindspore.numpy.take_along_axis - mindspore.numpy.tile - mindspore.numpy.transpose - mindspore.numpy.unique - mindspore.numpy.unravel_index - mindspore.numpy.vsplit - mindspore.numpy.vstack - mindspore.numpy.where - -Logic ------ - -.. msplatformautosummary:: - :toctree: numpy - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.numpy.array_equal - mindspore.numpy.array_equiv - mindspore.numpy.equal - mindspore.numpy.greater - mindspore.numpy.greater_equal - mindspore.numpy.in1d - mindspore.numpy.isclose - mindspore.numpy.isfinite - mindspore.numpy.isin - mindspore.numpy.isinf - mindspore.numpy.isnan - mindspore.numpy.isneginf - mindspore.numpy.isposinf - mindspore.numpy.isscalar - mindspore.numpy.less - mindspore.numpy.less_equal - mindspore.numpy.logical_and - mindspore.numpy.logical_not - mindspore.numpy.logical_or - mindspore.numpy.logical_xor - mindspore.numpy.not_equal - mindspore.numpy.signbit - mindspore.numpy.sometrue - -Math ----- - -.. msplatformautosummary:: - :toctree: numpy - :nosignatures: - :template: classtemplate_inherited.rst - - mindspore.numpy.absolute - mindspore.numpy.add - mindspore.numpy.amax - mindspore.numpy.amin - mindspore.numpy.arccos - mindspore.numpy.arccosh - mindspore.numpy.arcsin - mindspore.numpy.arcsinh - mindspore.numpy.arctan - mindspore.numpy.arctan2 - mindspore.numpy.arctanh - mindspore.numpy.argmax - mindspore.numpy.argmin - mindspore.numpy.around - mindspore.numpy.average - mindspore.numpy.bincount - mindspore.numpy.bitwise_and - mindspore.numpy.bitwise_or - mindspore.numpy.bitwise_xor - mindspore.numpy.cbrt - mindspore.numpy.ceil - mindspore.numpy.clip - mindspore.numpy.convolve - mindspore.numpy.copysign - mindspore.numpy.corrcoef - mindspore.numpy.correlate - mindspore.numpy.cos - mindspore.numpy.cosh - mindspore.numpy.count_nonzero - mindspore.numpy.cov - mindspore.numpy.cross - mindspore.numpy.cumprod - mindspore.numpy.cumsum - mindspore.numpy.deg2rad - mindspore.numpy.diff - mindspore.numpy.digitize - mindspore.numpy.divide - mindspore.numpy.divmod - mindspore.numpy.dot - mindspore.numpy.ediff1d - mindspore.numpy.exp - mindspore.numpy.exp2 - mindspore.numpy.expm1 - mindspore.numpy.fix - mindspore.numpy.float_power - mindspore.numpy.floor - mindspore.numpy.floor_divide - mindspore.numpy.fmod - mindspore.numpy.gcd - mindspore.numpy.gradient - mindspore.numpy.heaviside - mindspore.numpy.histogram - mindspore.numpy.histogram2d - mindspore.numpy.histogramdd - mindspore.numpy.hypot - mindspore.numpy.inner - mindspore.numpy.interp - mindspore.numpy.invert - mindspore.numpy.kron - mindspore.numpy.lcm - mindspore.numpy.log - mindspore.numpy.log10 - mindspore.numpy.log1p - mindspore.numpy.log2 - mindspore.numpy.logaddexp - mindspore.numpy.logaddexp2 - mindspore.numpy.matmul - mindspore.numpy.matrix_power - mindspore.numpy.maximum - mindspore.numpy.mean - mindspore.numpy.minimum - mindspore.numpy.multi_dot - mindspore.numpy.multiply - mindspore.numpy.nancumsum - mindspore.numpy.nanmax - mindspore.numpy.nanmean - mindspore.numpy.nanmin - mindspore.numpy.nanstd - mindspore.numpy.nansum - mindspore.numpy.nanvar - mindspore.numpy.negative - mindspore.numpy.norm - mindspore.numpy.outer - mindspore.numpy.polyadd - mindspore.numpy.polyder - mindspore.numpy.polyint - mindspore.numpy.polymul - mindspore.numpy.polysub - mindspore.numpy.polyval - mindspore.numpy.positive - mindspore.numpy.power - mindspore.numpy.promote_types - mindspore.numpy.ptp - mindspore.numpy.rad2deg - mindspore.numpy.radians - mindspore.numpy.ravel_multi_index - mindspore.numpy.reciprocal - mindspore.numpy.remainder - mindspore.numpy.result_type - mindspore.numpy.rint - mindspore.numpy.searchsorted - mindspore.numpy.sign - mindspore.numpy.sin - mindspore.numpy.sinh - mindspore.numpy.sqrt - mindspore.numpy.square - mindspore.numpy.std - mindspore.numpy.subtract - mindspore.numpy.sum - mindspore.numpy.tan - mindspore.numpy.tanh - mindspore.numpy.tensordot - mindspore.numpy.trapz - mindspore.numpy.true_divide - mindspore.numpy.trunc - mindspore.numpy.unwrap - mindspore.numpy.var \ No newline at end of file diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.ops.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.ops.rst deleted file mode 100644 index 93dee4d2a2ead3cc7782678bc0c460b3b321bb03..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.ops.rst +++ /dev/null @@ -1,275 +0,0 @@ -mindspore.ops -============= - -.. automodule:: mindspore.ops - -Compared with the previous version, the added, deleted and supported platforms change information of `mindspore.ops` operators in MindSpore, please refer to the link ``_. - -.. include:: operations.rst - -composite ---------- - -The composite operators are the pre-defined combination of operators. - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.batch_dot - mindspore.ops.clip_by_global_norm - mindspore.ops.clip_by_value - mindspore.ops.core - mindspore.ops.count_nonzero - mindspore.ops.dot - mindspore.ops.gamma - mindspore.ops.GradOperation - mindspore.ops.HyperMap - mindspore.ops.laplace - mindspore.ops.matmul - mindspore.ops.multinomial - mindspore.ops.MultitypeFuncGraph - mindspore.ops.normal - mindspore.ops.poisson - mindspore.ops.repeat_elements - mindspore.ops.sequence_mask - mindspore.ops.tensor_dot - mindspore.ops.uniform - -functional ----------- - -The functional operators are the pre-instantiated Primitive operators, which can be used directly as a function. - -.. list-table:: - :widths: 50 50 - :header-rows: 1 - - * - functional - - operations - * - mindspore.ops.add - - :class:`mindspore.ops.Add` - * - mindspore.ops.addn - - :class:`mindspore.ops.AddN` - * - mindspore.ops.array_reduce - - :class:`mindspore.ops.Primitive` ('array_reduce') - * - mindspore.ops.array_to_scalar - - :class:`mindspore.ops.Primitive` ('array_to_scalar') - * - mindspore.ops.assign - - :class:`mindspore.ops.Assign` - * - mindspore.ops.assign_add - - :class:`mindspore.ops.AssignAdd` - * - mindspore.ops.assign_sub - - :class:`mindspore.ops.AssignSub` - * - mindspore.ops.bool_and - - :class:`mindspore.ops.Primitive` ('bool_and') - * - mindspore.ops.bool_eq - - :class:`mindspore.ops.Primitive` ('bool_eq') - * - mindspore.ops.bool_not - - :class:`mindspore.ops.Primitive` ('bool_not') - * - mindspore.ops.bool_or - - :class:`mindspore.ops.Primitive` ('bool_or') - * - mindspore.ops.cast - - :class:`mindspore.ops.Cast` - * - mindspore.ops.distribute - - :class:`mindspore.ops.Primitive` ('distribute') - * - mindspore.ops.dtype - - :class:`mindspore.ops.DType` - * - mindspore.ops.equal - - :class:`mindspore.ops.Equal` - * - mindspore.ops.expand_dims - - :class:`mindspore.ops.ExpandDims` - * - mindspore.ops.fill - - :class:`mindspore.ops.Fill` - * - mindspore.ops.gather - - :class:`mindspore.ops.Gather` - * - mindspore.ops.gather_nd - - :class:`mindspore.ops.GatherNd` - * - mindspore.ops.hastype - - :class:`mindspore.ops.Primitive` ('hastype') - * - mindspore.ops.in_dict - - :class:`mindspore.ops.Primitive` ('in_dict') - * - mindspore.ops.is_not - - :class:`mindspore.ops.Primitive` ('is_not') - * - mindspore.ops.is\_ - - :class:`mindspore.ops.Primitive` ('is\_') - * - mindspore.ops.isconstant - - :class:`mindspore.ops.Primitive` ('is_constant') - * - mindspore.ops.isinstance\_ - - :class:`mindspore.ops.IsInstance` - * - mindspore.ops.issubclass\_ - - :class:`mindspore.ops.IsSubClass` - * - mindspore.ops.logical_and - - :class:`mindspore.ops.LogicalAnd` - * - mindspore.ops.logical_not - - :class:`mindspore.ops.LogicalNot` - * - mindspore.ops.logical_or - - :class:`mindspore.ops.LogicalOr` - * - mindspore.ops.make_row_tensor - - :class:`mindspore.ops.Primitive` ('MakeRowTensor') - * - mindspore.ops.make_sparse_tensor - - :class:`mindspore.ops.Primitive` ('MakeSparseTensor') - * - mindspore.ops.mixed_precision_cast - - :class:`mindspore.ops.Primitive` ('mixed_precision_cast') - * - mindspore.ops.neg_tensor - - :class:`mindspore.ops.Neg` - * - mindspore.ops.not_equal - - :class:`mindspore.ops.NotEqual` - * - mindspore.ops.not_in_dict - - :class:`mindspore.ops.Primitive` ('not_in_dict') - * - mindspore.ops.ones_like - - :class:`mindspore.ops.OnesLike` - * - mindspore.ops.print\_ - - :class:`mindspore.ops.Print` - * - mindspore.ops.rank - - :class:`mindspore.ops.Rank` - * - mindspore.ops.reduced_shape - - :class:`mindspore.ops.Primitive` ('reduced_shape') - * - mindspore.ops.reshape - - :class:`mindspore.ops.Reshape` - * - mindspore.ops.row_tensor_get_dense_shape - - :class:`mindspore.ops.Primitive` ('RowTensorGetDenseShape') - * - mindspore.ops.row_tensor_get_indices - - :class:`mindspore.ops.Primitive` ('RowTensorGetIndices') - * - mindspore.ops.row_tensor_get_values - - :class:`mindspore.ops.Primitive` ('RowTensorGetValues') - * - mindspore.ops.same_type_shape - - :class:`mindspore.ops.SameTypeShape` - * - mindspore.ops.scalar_add - - :class:`mindspore.ops.Primitive` ('scalar_add') - * - mindspore.ops.scalar_cast - - :class:`mindspore.ops.ScalarCast` - * - mindspore.ops.scalar_div - - :class:`mindspore.ops.Primitive` ('scalar_div') - * - mindspore.ops.scalar_eq - - :class:`mindspore.ops.Primitive` ('scalar_eq') - * - mindspore.ops.scalar_floordiv - - :class:`mindspore.ops.Primitive` ('scalar_floordiv') - * - mindspore.ops.scalar_ge - - :class:`mindspore.ops.Primitive` ('scalar_ge') - * - mindspore.ops.scalar_gt - - :class:`mindspore.ops.Primitive` ('scalar_gt') - * - mindspore.ops.scalar_le - - :class:`mindspore.ops.Primitive` ('scalar_le') - * - mindspore.ops.scalar_log - - :class:`mindspore.ops.Primitive` ('scalar_log') - * - mindspore.ops.scalar_lt - - :class:`mindspore.ops.Primitive` ('scalar_lt') - * - mindspore.ops.scalar_mod - - :class:`mindspore.ops.Primitive` ('scalar_mod') - * - mindspore.ops.scalar_mul - - :class:`mindspore.ops.Primitive` ('scalar_mul') - * - mindspore.ops.scalar_ne - - :class:`mindspore.ops.Primitive` ('scalar_ne') - * - mindspore.ops.scalar_pow - - :class:`mindspore.ops.Primitive` ('scalar_pow') - * - mindspore.ops.scalar_sub - - :class:`mindspore.ops.Primitive` ('scalar_sub') - * - mindspore.ops.scalar_to_array - - :class:`mindspore.ops.ScalarToArray` - * - mindspore.ops.scalar_to_tensor - - :class:`mindspore.ops.ScalarToTensor` - * - mindspore.ops.scalar_uadd - - :class:`mindspore.ops.Primitive` ('scalar_uadd') - * - mindspore.ops.scalar_usub - - :class:`mindspore.ops.Primitive` ('scalar_usub') - * - mindspore.ops.scatter_nd - - :class:`mindspore.ops.ScatterNd` - * - mindspore.ops.scatter_nd_update - - :class:`mindspore.ops.ScatterNdUpdate` - * - mindspore.ops.scatter_update - - :class:`mindspore.ops.ScatterUpdate` - * - mindspore.ops.select - - :class:`mindspore.ops.Select` - * - mindspore.ops.shape - - :class:`mindspore.ops.Shape` - * - mindspore.ops.shape_mul - - :class:`mindspore.ops.Primitive` ('shape_mul') - * - mindspore.ops.size - - :class:`mindspore.ops.Size` - * - mindspore.ops.sparse_tensor_get_dense_shape - - :class:`mindspore.ops.Primitive` ('SparseTensorGetDenseShape') - * - mindspore.ops.sparse_tensor_get_indices - - :class:`mindspore.ops.Primitive` ('SparseTensorGetIndices') - * - mindspore.ops.sparse_tensor_get_values - - :class:`mindspore.ops.Primitive` ('SparseTensorGetValues') - * - mindspore.ops.sqrt - - :class:`mindspore.ops.Sqrt` - * - mindspore.ops.square - - :class:`mindspore.ops.Square` - * - mindspore.ops.stack - - :class:`mindspore.ops.Stack` - * - mindspore.ops.stop_gradient - - :class:`mindspore.ops.Primitive` ('stop_gradient') - * - mindspore.ops.strided_slice - - :class:`mindspore.ops.StridedSlice` - * - mindspore.ops.string_concat - - :class:`mindspore.ops.Primitive` ('string_concat') - * - mindspore.ops.string_eq - - :class:`mindspore.ops.Primitive` ('string_equal') - * - mindspore.ops.tensor_div - - :class:`mindspore.ops.RealDiv` - * - mindspore.ops.tensor_floordiv - - :class:`mindspore.ops.FloorDiv` - * - mindspore.ops.tensor_ge - - :class:`mindspore.ops.GreaterEqual` - * - mindspore.ops.tensor_gt - - :class:`mindspore.ops.Greater` - * - mindspore.ops.tensor_le - - :class:`mindspore.ops.LessEqual` - * - mindspore.ops.tensor_lt - - :class:`mindspore.ops.Less` - * - mindspore.ops.tensor_mod - - :class:`mindspore.ops.FloorMod` - * - mindspore.ops.tensor_mul - - :class:`mindspore.ops.Mul` - * - mindspore.ops.tensor_pow - - :class:`mindspore.ops.Pow` - * - mindspore.ops.tensor_sub - - :class:`mindspore.ops.Sub` - * - mindspore.ops.tile - - :class:`mindspore.ops.Tile` - * - mindspore.ops.tuple_to_array - - :class:`mindspore.ops.TupleToArray` - * - mindspore.ops.typeof - - :class:`mindspore.ops.Primitive` ('typeof') - * - mindspore.ops.zeros_like - - :class:`mindspore.ops.ZerosLike` - -primitive ---------- - -.. autosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.constexpr - mindspore.ops.prim_attr_register - mindspore.ops.Primitive - mindspore.ops.PrimitiveWithCheck - mindspore.ops.PrimitiveWithInfer - -vm_impl_registry ----------------- - -.. autosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.get_vm_impl_fn - -op_info_register ----------------- - -.. autosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.AiCPURegOp - mindspore.ops.DataType - mindspore.ops.op_info_register - mindspore.ops.TBERegOp diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.profiler.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.profiler.rst deleted file mode 100644 index 33122b6ff76b4efd65bc9050e709da0feda129bf..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.profiler.rst +++ /dev/null @@ -1,5 +0,0 @@ -mindspore.profiler -================== - -.. automodule:: mindspore.profiler - :members: \ No newline at end of file diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.rst deleted file mode 100644 index aac6d36eb626c130523716530c6127db9f131089..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.rst +++ /dev/null @@ -1,100 +0,0 @@ -mindspore -========= - -.. class:: mindspore.dtype - - Create a data type object of MindSpore. - - The actual path of ``dtype`` is ``/mindspore/common/dtype.py``. - Run the following command to import the package: - - .. code-block:: - - from mindspore import dtype as mstype - - * **Numeric Type** - - Currently, MindSpore supports ``Int`` type, ``Uint`` type and ``Float`` type. - The following table lists the details. - - ============================================== ============================= - Definition Description - ============================================== ============================= - ``mindspore.int8`` , ``mindspore.byte`` 8-bit integer - ``mindspore.int16`` , ``mindspore.short`` 16-bit integer - ``mindspore.int32`` , ``mindspore.intc`` 32-bit integer - ``mindspore.int64`` , ``mindspore.intp`` 64-bit integer - ``mindspore.uint8`` , ``mindspore.ubyte`` unsigned 8-bit integer - ``mindspore.uint16`` , ``mindspore.ushort`` unsigned 16-bit integer - ``mindspore.uint32`` , ``mindspore.uintc`` unsigned 32-bit integer - ``mindspore.uint64`` , ``mindspore.uintp`` unsigned 64-bit integer - ``mindspore.float16`` , ``mindspore.half`` 16-bit floating-point number - ``mindspore.float32`` , ``mindspore.single`` 32-bit floating-point number - ``mindspore.float64`` , ``mindspore.double`` 64-bit floating-point number - ============================================== ============================= - - * **Other Type** - - For other defined types, see the following table. - - ============================ ================= - Type Description - ============================ ================= - ``tensor`` MindSpore's ``tensor`` type. Data format uses NCHW. For details, see `tensor `_. - ``bool_`` Boolean ``True`` or ``False``. - ``int_`` Integer scalar. - ``uint`` Unsigned integer scalar. - ``float_`` Floating-point scalar. - ``number`` Number, including ``int_`` , ``uint`` , ``float_`` and ``bool_`` . - ``list_`` List constructed by ``tensor`` , such as ``List[T0,T1,...,Tn]`` , where the element ``Ti`` can be of different types. - ``tuple_`` Tuple constructed by ``tensor`` , such as ``Tuple[T0,T1,...,Tn]`` , where the element ``Ti`` can be of different types. - ``function`` Function. Return in two ways, when function is not None, returns Func directly, the other returns Func(args: List[T0,T1,...,Tn], retval: T) when function is None. - ``type_type`` Type definition of type. - ``type_none`` No matching return type, corresponding to the ``type(None)`` in Python. - ``symbolic_key`` The value of a variable is used as a key of the variable in ``env_type`` . - ``env_type`` Used to store the gradient of the free variable of a function, where the key is the ``symbolic_key`` of the free variable's node and the value is the gradient. - ============================ ================= - - * **Tree Topology** - - The relationships of the above types are as follows: - - .. code-block:: - - - └─────── number - │ ├─── bool_ - │ ├─── int_ - │ │ ├─── int8, byte - │ │ ├─── int16, short - │ │ ├─── int32, intc - │ │ └─── int64, intp - │ ├─── uint - │ │ ├─── uint8, ubyte - │ │ ├─── uint16, ushort - │ │ ├─── uint32, uintc - │ │ └─── uint64, uintp - │ └─── float_ - │ ├─── float16 - │ ├─── float32 - │ └─── float64 - ├─── tensor - │ ├─── Array[Float32] - │ └─── ... - ├─── list_ - │ ├─── List[Int32,Float32] - │ └─── ... - ├─── tuple_ - │ ├─── Tuple[Int32,Float32] - │ └─── ... - ├─── function - │ ├─── Func - │ ├─── Func[(Int32, Float32), Int32] - │ └─── ... - ├─── type_type - ├─── type_none - ├─── symbolic_key - └─── env_type - -.. automodule:: mindspore - :members: diff --git a/docs/api_python/source_zh_cn/mindspore/mindspore.train.rst b/docs/api_python/source_zh_cn/mindspore/mindspore.train.rst deleted file mode 100644 index ff368bf47b07ac19f0c76c8133ddd0e9face92e3..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/mindspore.train.rst +++ /dev/null @@ -1,14 +0,0 @@ -mindspore.train -=============== - -mindspore.train.summary ------------------------ - -.. automodule:: mindspore.train.summary - :members: - -mindspore.train.callback ------------------------- - -.. automodule:: mindspore.train.callback - :members: diff --git a/docs/api_python/source_zh_cn/mindspore/operations.rst b/docs/api_python/source_zh_cn/mindspore/operations.rst deleted file mode 100644 index ccf0cb9538811bd005d8da528791aacfd5f2429c..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore/operations.rst +++ /dev/null @@ -1,408 +0,0 @@ -operations ----------- - -The Primitive operators in operations need to be instantiated before being used. - -Neural Network Operators -^^^^^^^^^^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.Acosh - mindspore.ops.Adam - mindspore.ops.AdamNoUpdateParam - mindspore.ops.ApplyAdadelta - mindspore.ops.ApplyAdagrad - mindspore.ops.ApplyAdagradV2 - mindspore.ops.ApplyAdaMax - mindspore.ops.ApplyAddSign - mindspore.ops.ApplyCenteredRMSProp - mindspore.ops.ApplyGradientDescent - mindspore.ops.ApplyMomentum - mindspore.ops.ApplyPowerSign - mindspore.ops.ApplyProximalAdagrad - mindspore.ops.ApplyProximalGradientDescent - mindspore.ops.ApplyRMSProp - mindspore.ops.AvgPool - mindspore.ops.BasicLSTMCell - mindspore.ops.BatchNorm - mindspore.ops.BCEWithLogitsLoss - mindspore.ops.BiasAdd - mindspore.ops.BinaryCrossEntropy - mindspore.ops.BNTrainingReduce - mindspore.ops.BNTrainingUpdate - mindspore.ops.ComputeAccidentalHits - mindspore.ops.Conv2D - mindspore.ops.Conv2DBackpropInput - mindspore.ops.Conv3D - mindspore.ops.Conv3DTranspose - mindspore.ops.CTCGreedyDecoder - mindspore.ops.CTCLoss - mindspore.ops.DataFormatDimMap - mindspore.ops.DepthwiseConv2dNative - mindspore.ops.Dropout2D - mindspore.ops.Dropout3D - mindspore.ops.DropoutDoMask - mindspore.ops.DropoutGenMask - mindspore.ops.DynamicGRUV2 - mindspore.ops.DynamicRNN - mindspore.ops.Elu - mindspore.ops.FastGeLU - mindspore.ops.Flatten - mindspore.ops.FloorMod - mindspore.ops.FusedSparseAdam - mindspore.ops.FusedSparseLazyAdam - mindspore.ops.FusedSparseProximalAdagrad - mindspore.ops.GeLU - mindspore.ops.GetNext - mindspore.ops.HSigmoid - mindspore.ops.HSwish - mindspore.ops.KLDivLoss - mindspore.ops.L2Loss - mindspore.ops.L2Normalize - mindspore.ops.LARSUpdate - mindspore.ops.LayerNorm - mindspore.ops.LogSoftmax - mindspore.ops.LRN - mindspore.ops.LSTM - mindspore.ops.MaxPool - mindspore.ops.MaxPool3D - mindspore.ops.MaxPoolWithArgmax - mindspore.ops.MirrorPad - mindspore.ops.Mish - mindspore.ops.NLLLoss - mindspore.ops.OneHot - mindspore.ops.Pad - mindspore.ops.PReLU - mindspore.ops.ReLU - mindspore.ops.ReLU6 - mindspore.ops.ReLUV2 - mindspore.ops.ResizeBilinear - mindspore.ops.RNNTLoss - mindspore.ops.ROIAlign - mindspore.ops.SeLU - mindspore.ops.SGD - mindspore.ops.Sigmoid - mindspore.ops.SigmoidCrossEntropyWithLogits - mindspore.ops.SmoothL1Loss - mindspore.ops.Softmax - mindspore.ops.SoftmaxCrossEntropyWithLogits - mindspore.ops.Softplus - mindspore.ops.Softsign - mindspore.ops.SparseApplyAdagrad - mindspore.ops.SparseApplyAdagradV2 - mindspore.ops.SparseApplyProximalAdagrad - mindspore.ops.SparseSoftmaxCrossEntropyWithLogits - mindspore.ops.Stack - mindspore.ops.Tanh - mindspore.ops.TopK - mindspore.ops.Unstack - -Math Operators -^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.Abs - mindspore.ops.AccumulateNV2 - mindspore.ops.ACos - mindspore.ops.Add - mindspore.ops.AddN - mindspore.ops.ApproximateEqual - mindspore.ops.Asin - mindspore.ops.Asinh - mindspore.ops.AssignAdd - mindspore.ops.AssignSub - mindspore.ops.Atan - mindspore.ops.Atan2 - mindspore.ops.Atanh - mindspore.ops.BatchMatMul - mindspore.ops.BesselI0e - mindspore.ops.BesselI1e - mindspore.ops.BitwiseAnd - mindspore.ops.BitwiseOr - mindspore.ops.BitwiseXor - mindspore.ops.Ceil - mindspore.ops.Cos - mindspore.ops.Cosh - mindspore.ops.CumProd - mindspore.ops.CumSum - mindspore.ops.Div - mindspore.ops.DivNoNan - mindspore.ops.Eps - mindspore.ops.Equal - mindspore.ops.EqualCount - mindspore.ops.Erf - mindspore.ops.Erfc - mindspore.ops.Exp - mindspore.ops.Expm1 - mindspore.ops.FloatStatus - mindspore.ops.Floor - mindspore.ops.FloorDiv - mindspore.ops.Greater - mindspore.ops.GreaterEqual - mindspore.ops.HistogramFixedWidth - mindspore.ops.IndexAdd - mindspore.ops.InplaceAdd - mindspore.ops.InplaceSub - mindspore.ops.Inv - mindspore.ops.Invert - mindspore.ops.IsInf - mindspore.ops.IsNan - mindspore.ops.Less - mindspore.ops.LessEqual - mindspore.ops.LinSpace - mindspore.ops.Log - mindspore.ops.Log1p - mindspore.ops.LogicalAnd - mindspore.ops.LogicalNot - mindspore.ops.LogicalOr - mindspore.ops.MatMul - mindspore.ops.MatrixInverse - mindspore.ops.Maximum - mindspore.ops.Minimum - mindspore.ops.Mod - mindspore.ops.Mul - mindspore.ops.MulNoNan - mindspore.ops.Neg - mindspore.ops.NMSWithMask - mindspore.ops.NotEqual - mindspore.ops.NPUAllocFloatStatus - mindspore.ops.NPUClearFloatStatus - mindspore.ops.NPUGetFloatStatus - mindspore.ops.Pow - mindspore.ops.RealDiv - mindspore.ops.Reciprocal - mindspore.ops.ReduceAll - mindspore.ops.ReduceAny - mindspore.ops.ReduceMax - mindspore.ops.ReduceMean - mindspore.ops.ReduceMin - mindspore.ops.ReduceProd - mindspore.ops.ReduceSum - mindspore.ops.Round - mindspore.ops.Rsqrt - mindspore.ops.Sign - mindspore.ops.Sin - mindspore.ops.Sinh - mindspore.ops.Sqrt - mindspore.ops.Square - mindspore.ops.SquaredDifference - mindspore.ops.SquareSumAll - mindspore.ops.Sub - mindspore.ops.Tan - mindspore.ops.TruncateDiv - mindspore.ops.TruncateMod - mindspore.ops.Xdivy - mindspore.ops.Xlogy - -Array Operators -^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.ApplyFtrl - mindspore.ops.Argmax - mindspore.ops.ArgMaxWithValue - mindspore.ops.Argmin - mindspore.ops.ArgMinWithValue - mindspore.ops.BatchToSpace - mindspore.ops.BatchToSpaceND - mindspore.ops.BroadcastTo - mindspore.ops.Cast - mindspore.ops.Concat - mindspore.ops.DepthToSpace - mindspore.ops.DType - mindspore.ops.DynamicShape - mindspore.ops.EditDistance - mindspore.ops.EmbeddingLookup - mindspore.ops.ExpandDims - mindspore.ops.Eye - mindspore.ops.Fill - mindspore.ops.FusedSparseFtrl - mindspore.ops.Gather - mindspore.ops.GatherD - mindspore.ops.GatherNd - mindspore.ops.Identity - mindspore.ops.InplaceUpdate - mindspore.ops.InvertPermutation - mindspore.ops.IsFinite - mindspore.ops.IsInstance - mindspore.ops.IsSubClass - mindspore.ops.Meshgrid - mindspore.ops.Ones - mindspore.ops.OnesLike - mindspore.ops.Padding - mindspore.ops.ParallelConcat - mindspore.ops.Randperm - mindspore.ops.Rank - mindspore.ops.Reshape - mindspore.ops.ResizeNearestNeighbor - mindspore.ops.ReverseSequence - mindspore.ops.ReverseV2 - mindspore.ops.Rint - mindspore.ops.SameTypeShape - mindspore.ops.ScalarCast - mindspore.ops.ScalarToArray - mindspore.ops.ScalarToTensor - mindspore.ops.ScatterAdd - mindspore.ops.ScatterDiv - mindspore.ops.ScatterMax - mindspore.ops.ScatterMin - mindspore.ops.ScatterMul - mindspore.ops.ScatterNd - mindspore.ops.ScatterNdAdd - mindspore.ops.ScatterNdSub - mindspore.ops.ScatterNdUpdate - mindspore.ops.ScatterNonAliasingAdd - mindspore.ops.ScatterSub - mindspore.ops.ScatterUpdate - mindspore.ops.Select - mindspore.ops.Shape - mindspore.ops.Size - mindspore.ops.Slice - mindspore.ops.Sort - mindspore.ops.SpaceToBatch - mindspore.ops.SpaceToBatchND - mindspore.ops.SpaceToDepth - mindspore.ops.SparseApplyFtrl - mindspore.ops.SparseApplyFtrlV2 - mindspore.ops.SparseGatherV2 - mindspore.ops.Split - mindspore.ops.Squeeze - mindspore.ops.StridedSlice - mindspore.ops.TensorScatterUpdate - mindspore.ops.Tile - mindspore.ops.Transpose - mindspore.ops.TupleToArray - mindspore.ops.Unique - mindspore.ops.UniqueWithPad - mindspore.ops.UnsortedSegmentMax - mindspore.ops.UnsortedSegmentMin - mindspore.ops.UnsortedSegmentProd - mindspore.ops.UnsortedSegmentSum - mindspore.ops.Zeros - mindspore.ops.ZerosLike - -Communication Operators -^^^^^^^^^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.AllGather - mindspore.ops.AllReduce - mindspore.ops.Broadcast - mindspore.ops.ReduceOp - mindspore.ops.ReduceScatter - -Debug Operators -^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.HistogramSummary - mindspore.ops.ImageSummary - mindspore.ops.InsertGradientOf - mindspore.ops.Print - mindspore.ops.ScalarSummary - mindspore.ops.TensorSummary - -Random Operators -^^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.Gamma - mindspore.ops.LogUniformCandidateSampler - mindspore.ops.Multinomial - mindspore.ops.Poisson - mindspore.ops.RandomCategorical - mindspore.ops.RandomChoiceWithMask - mindspore.ops.StandardLaplace - mindspore.ops.StandardNormal - mindspore.ops.UniformCandidateSampler - mindspore.ops.UniformInt - mindspore.ops.UniformReal - -Sponge Operators -^^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.AngleAtomEnergy - mindspore.ops.AngleEnergy - mindspore.ops.AngleForce - mindspore.ops.AngleForceWithAtomEnergy - mindspore.ops.BondAtomEnergy - mindspore.ops.BondEnergy - mindspore.ops.BondForce - mindspore.ops.BondForceWithAtomEnergy - mindspore.ops.BondForceWithAtomVirial - mindspore.ops.DihedralAtomEnergy - mindspore.ops.DihedralEnergy - mindspore.ops.DihedralForce - mindspore.ops.DihedralForceWithAtomEnergy - mindspore.ops.Dihedral14CFAtomEnergy - mindspore.ops.Dihedral14CFEnergy - mindspore.ops.Dihedral14LJAtomEnergy - mindspore.ops.Dihedral14LJCFForceWithAtomEnergy - mindspore.ops.Dihedral14LJEnergy - mindspore.ops.Dihedral14LJForce - mindspore.ops.Dihedral14LJForceWithDirectCF - mindspore.ops.LJEnergy - mindspore.ops.LJForce - mindspore.ops.LJForceWithPMEDirectForce - mindspore.ops.MDIterationLeapFrog - mindspore.ops.NeighborListUpdate - mindspore.ops.PMEEnergy - mindspore.ops.PMEExcludedForce - mindspore.ops.PMEReciprocalForce - -Image Operators -^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.CropAndResize - -Other Operators -^^^^^^^^^^^^^^^ - -.. msplatformautosummary:: - :toctree: ops - :nosignatures: - :template: classtemplate.rst - - mindspore.ops.Assign - mindspore.ops.BoundingBoxDecode - mindspore.ops.BoundingBoxEncode - mindspore.ops.CheckValid - mindspore.ops.Depend - mindspore.ops.InTopK - mindspore.ops.IOU - mindspore.ops.NoRepeatNGram - mindspore.ops.PopulationCount diff --git a/docs/api_python/source_zh_cn/mindspore_hub/mindspore_hub.rst b/docs/api_python/source_zh_cn/mindspore_hub/mindspore_hub.rst deleted file mode 100644 index 34b6c1ceb89517e9799d566d0bb64dc497741b24..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore_hub/mindspore_hub.rst +++ /dev/null @@ -1,6 +0,0 @@ -mindspore_hub -============= - -.. automodule:: mindspore_hub - :members: - diff --git a/docs/api_python/source_zh_cn/mindspore_serving/mindspore_serving.rst b/docs/api_python/source_zh_cn/mindspore_serving/mindspore_serving.rst deleted file mode 100644 index e555afa0f67b61e937cea9822a1cabe14ab3c067..0000000000000000000000000000000000000000 --- a/docs/api_python/source_zh_cn/mindspore_serving/mindspore_serving.rst +++ /dev/null @@ -1,34 +0,0 @@ -mindspore_serving -================= - -.. automodule:: mindspore_serving - -mindspore_serving.master ------------------------- - -.. automodule:: mindspore_serving.master - :members: - -mindspore_serving.worker ------------------------- - -.. automodule:: mindspore_serving.worker - :members: start_servable, start_servable_in_master, stop - -mindspore_serving.worker.register -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. automodule:: mindspore_serving.worker.register - :members: - -mindspore_serving.worker.distributed -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. automodule:: mindspore_serving.worker.distributed - :members: - -mindspore_serving.client ------------------------- - -.. automodule:: mindspore_serving.client - :members: diff --git a/docs/faq/Makefile b/docs/faq/Makefile deleted file mode 100644 index 1eff8952707bdfa503c8d60c1e9a903053170ba2..0000000000000000000000000000000000000000 --- a/docs/faq/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = source_zh_cn -BUILDDIR = build_zh_cn - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/faq/requirements.txt b/docs/faq/requirements.txt deleted file mode 100644 index 1755dcd967228348c2f9cb29bac44580af862770..0000000000000000000000000000000000000000 --- a/docs/faq/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -sphinx >= 2.2.1, <= 2.4.4 -recommonmark -sphinx-markdown-tables -sphinx_rtd_theme -numpy -nbsphinx -IPython -jieba diff --git a/docs/faq/source_en/_static/logo_notebook.png b/docs/faq/source_en/_static/logo_notebook.png deleted file mode 100644 index 18c2e29e4b73ee428f70253feffdd855fdf0c422..0000000000000000000000000000000000000000 Binary files a/docs/faq/source_en/_static/logo_notebook.png and /dev/null differ diff --git a/docs/faq/source_en/_static/logo_source.png b/docs/faq/source_en/_static/logo_source.png deleted file mode 100644 index 880f2bc87172daf487654c0ba4f1657c672bd2b8..0000000000000000000000000000000000000000 Binary files a/docs/faq/source_en/_static/logo_source.png and /dev/null differ diff --git a/docs/faq/source_en/backend_running.md b/docs/faq/source_en/backend_running.md deleted file mode 100644 index 54d4abc413b6e8b8a832cd4b4cb13909e7b80c62..0000000000000000000000000000000000000000 --- a/docs/faq/source_en/backend_running.md +++ /dev/null @@ -1,224 +0,0 @@ -# Backend Running - -`Ascend` `GPU` `CPU` `Environmental Setup` `Operation Mode` `Model Training` `Beginner` `Intermediate` `Expert` - - - -**Q: What is the difference between `c_transforms` and `py_transforms`? Which one is recommended?** - -A: `c_transforms` is recommended. Its performance is better because it is executed only at the C layer. - -Principle: The underlying layer of `c_transform` uses `opencv/jpeg-turbo` of the C version for data processing, and `py_transform` uses `Pillow` of the Python version for data processing. - -
- -**Q: When MindSpore performs multi-device training on the NPU hardware platform, how does the user-defined dataset transfer data to different NPUs?** - -A: When `GeneratorDataset` is used, the `num_shards=num_shards` and `shard_id=device_id` parameters can be used to control which shard of data is read by different devices. `__getitem__` and `__len__` are processed as full datasets. - -An example is as follows: - -```python -# Device 0: -ds.GeneratorDataset(..., num_shards=8, shard_id=0, ...) -# Device 1: -ds.GeneratorDataset(..., num_shards=8, shard_id=1, ...) -# Device 2: -ds.GeneratorDataset(..., num_shards=8, shard_id=2, ...) -... -# Device 7: -ds.GeneratorDataset(..., num_shards=8, shard_id=7, ...) -``` - -
- -**Q: How do I view the number of model parameters?** - -A: You can load the checkpoint to count the parameter number. Variables in the momentum and optimizer may be counted, so you need to filter them out. -You can refer to the following APIs to collect the number of network parameters: - -```python -def count_params(net): - """Count number of parameters in the network - Args: - net (mindspore.nn.Cell): Mindspore network instance - Returns: - total_params (int): Total number of trainable params - """ - total_params = 0 - for param in net.trainable_params(): - total_params += np.prod(param.shape) - return total_params -``` - -[Script Link](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/research/cv/tinynet/src/utils.py). - -
- -**Q: How do I build a multi-label MindRecord dataset for images?** - -A: The data schema can be defined as follows:`cv_schema_json = {"label": {"type": "int32", "shape": [-1]}, "data": {"type": "bytes"}}` - -Note: A label is an array of the numpy type, where label values 1, 1, 0, 1, 0, 1 are stored. These label values correspond to the same data, that is, the binary value of the same image. -For details, see [Converting Dataset to MindRecord](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/convert_dataset.html#id3). - -
- -**Q: How do I monitor the loss during training and save the training parameters when the `loss` is the lowest?** - -A: You can customize a `callback`.For details, see the writing method of `ModelCheckpoint`. In addition, the logic for determining loss is added. - -```python -class EarlyStop(Callback): -def __init__(self): - self.loss = None -def step_end(self, run_context): - loss = ****(get current loss) - if (self.loss == None or loss < self.loss): - self.loss = loss - # do save ckpt -``` - -
- -**Q: How do I execute a single `ut` case in `mindspore/tests`?** - -A: `ut` cases are usually based on the MindSpore package of the debug version, which is not provided on the official website. You can run `sh build.sh` to compile the source code and then run the `pytest` command. The compilation in debug mode does not depend on the backend. Run the `sh build.sh -t on` command. For details about how to execute cases, see the `tests/runtest.sh` script. - -
- -**Q: How do I obtain the expected `feature map` when `nn.Conv2d` is used?** - -A: For details about how to derive the `Conv2d shape`, click [here](https://www.mindspore.cn/doc/api_python/en/master/mindspore/nn/mindspore.nn.Conv2d.html#mindspore.nn.Conv2d.) Change `pad_mode` of `Conv2d` to `same`. Alternatively, you can calculate the `pad` based on the Conv2d shape derivation formula to keep the `shape` unchanged. Generally, the pad is `(kernel_size-1)//2`. - -
- -**Q: What can I do if the network performance is abnormal and weight initialization takes a long time during training after MindSpore is installed?** - -A: The `SciPy 1.4` series versions may be used in the environment. Run the `pip list | grep scipy` command to view the `SciPy` version and change the `SciPy` version to that required by MindSpore. You can view the third-party library dependency in the `requirement.txt` file. - -> Replace version with the specific version branch of MindSpore. - -
- -**Q: Can MindSpore be used to customize a loss function that can return multiple values?** - -A: After customizing the `loss function`, you need to customize `TrainOneStepCell`. The number of `sens` for implementing gradient calculation is the same as the number of `network` outputs. For details, see the following: - -```python -net = Net() - -loss_fn = MyLoss() - -loss_with_net = MyWithLossCell(net, loss_fn) - -train_net = MyTrainOneStepCell(loss_with_net, optim) - -model = Model(net=train_net, loss_fn=None, optimizer=None) -``` - -
- -**Q: How does MindSpore implement the early stopping function?** - -A: You can customize the `callback` method to implement the early stopping function. -Example: When the loss value decreases to a certain value, the training stops. - -```python -class EarlyStop(Callback): - def __init__(self, control_loss=1): - super(EarlyStep, self).__init__() - self._control_loss = control_loss - - def step_end(self, run_context): - cb_params = run_context.original_args() - loss = cb_params.net_outputs - if loss.asnumpy() < self._control_loss: - # Stop training. - run_context._stop_requested = True - -stop_cb = EarlyStop(control_loss=1) -model.train(epoch_size, ds_train, callbacks=[stop_cb]) -``` - -
- -**Q: What can I do if an error message `wrong shape of image` is displayed when I use a model trained by MindSpore to perform prediction on a `28 x 28` digital image with white text on a black background?** - -A: The MNIST gray scale image dataset is used for MindSpore training. Therefore, when the model is used, the data must be set to a `28 x 28` gray scale image, that is, a single channel. - -
- -**Q: What can I do if the error message `device target [CPU] is not supported in pynative mode` is displayed for the operation operator of MindSpore?** - -A: Currently, the PyNative mode supports only Ascend and GPU and does not support the CPU. - -
- -**Q: For Ascend users, how to get more detailed logs when the `run task error` is reported?** - -A: Use the msnpureport tool to set the on-device log level. The tool is stored in `/usr/local/Ascend/driver/tools/msnpureport`. - -```bash -- Global: /usr/local/Ascend/driver/tools/msnpureport -g info -``` - -```bash -- Module-level: /usr/local/Ascend/driver/tools/msnpureport -m SLOG:error -``` - -```bash -- Event-level: /usr/local/Ascend/driver/tools/msnpureport -e disable/enable -``` - -```bash -- Multi-device ID-level: /usr/local/Ascend/driver/tools/msnpureport -d 1 -g warning -``` - -Assume that the value range of deviceID is [0, 7], and `devices 0–3` and `devices 4–7` are on the same OS. `Devices 0–3` share the same log configuration file and `devices 4–7` share the same configuration file. In this way, changing the log level of any device (for example device 0) will change that of other devices (for example `devices 1–3`). This rule also applies to `devices 4–7`. - -After the driver package is installed (assuming that the installation path is /usr/local/HiAI and the execution file `msnpureport.exe` is in the C:\ProgramFiles\Huawei\Ascend\Driver\tools\ directory on Windows), run the command in the /home/shihangbo/ directory to export logs on the device to the current directory and store logs in a folder named after the timestamp. - -
- -**Q: What can I do if the error message `Pynative run op ExpandDims failed` is displayed when the ExpandDims operator is used? The code is as follows:** - -```python -context.set_context( -mode=cintext.GRAPH_MODE, -device_target='ascend') -input_tensor=Tensor(np.array([[2,2],[2,2]]),mindspore.float32) -expand_dims=ops.ExpandDims() -output=expand_dims(input_tensor,0) -``` - -A: The problem is that the Graph mode is selected but the PyNative mode is used. As a result, an error is reported. MindSpore supports the following running modes which are optimized in terms of debugging or running: - -- PyNative mode: dynamic graph mode. In this mode, operators in the neural network are delivered and executed one by one, facilitating the compilation and debugging of the neural network model. -- Graph mode: static graph mode. In this mode, the neural network model is compiled into an entire graph and then delivered for execution. This mode uses technologies such as graph optimization to improve the running performance and facilitates large-scale deployment and cross-platform running. - -You can select a proper mode and writing method to complete the training by referring to the official website [tutorial](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/debug_in_pynative_mode.html). - -
- -**Q: How to fix the error below when running MindSpore distributed training with GPU:** - -```text -Loading libgpu_collective.so failed. Many reasons could cause this: -1.libgpu_collective.so is not installed. -2.nccl is not installed or found. -3.mpi is not installed or found -``` - -A: This message means that MindSpore failed to load library `libgpu_collective.so`. The Possible causes are: - -- OpenMPI or NCCL is not installed in this environment. -- NCCL version is not updated to `v2.7.6`: MindSpore `v1.1.0` supports GPU P2P communication operator which relies on NCCL `v2.7.6`. `libgpu_collective.so` can't be loaded successfully if NCCL is not updated to this version. - -
- -**Q:How to set environment variable `DEVICE_ID` when using GPU version of MindSpore** - -A:Normally, GPU version of MindSpore doesn't need to set `DEVICE_ID`. MindSpore automatically chooses visible GPU devices according to the cuda environment variable `CUDA_VISIBLE_DEVICES`. After setting `CUDA_VISIBLE_DEVICES`, `DEVICE_ID` refers to the ordinal of the GPU device: - -- After `export CUDA_VISIBLE_DEVICES=1,3,5`, `DEVICE_ID` should be exported as `0`, `1` or `2`. If `3` is exported, MindSpore will fail to execute because of the invalid device ordinal. diff --git a/docs/faq/source_en/conf.py b/docs/faq/source_en/conf.py deleted file mode 100644 index a1fd767271ac159540440ed65bd0d676163366a9..0000000000000000000000000000000000000000 --- a/docs/faq/source_en/conf.py +++ /dev/null @@ -1,58 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os - - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx_markdown_tables', - 'recommonmark', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_static_path = ['_static'] \ No newline at end of file diff --git a/docs/faq/source_en/distributed_settings.md b/docs/faq/source_en/distributed_settings.md deleted file mode 100644 index 6ef6b90156051ef4ab3a98b443c5e16beec567a0..0000000000000000000000000000000000000000 --- a/docs/faq/source_en/distributed_settings.md +++ /dev/null @@ -1,20 +0,0 @@ -# Distributed Settings - -`Ascend` `GPU` `Distributed Training` `Beginner` `Intermediate` `Expert` - - - -**Q:The communication profile file needs to be configured on the Ascend environment, how should it be configured?** - -A:Please refer to the [Configuring Distributed Environment Variables](https://mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html#configuring-distributed-environment-variables) section of Ascend-based distributed training in the MindSpore tutorial. - -
- -**Q:How to perform distributed multi-machine multi-card training?** - -A:For Ascend environment, please refer to the [Multi-machine Training](https://mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html#multi-machine-training) section of the MindSpore tutorial "distributed_training_ascend". -For GPU-based environments, please refer to the [Run Multi-Host Script](https://mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_gpu.html#running-the-multi-host-script) section of the MindSpore tutorial "distributed_training_gpu". - -
- - diff --git a/docs/faq/source_en/index.rst b/docs/faq/source_en/index.rst deleted file mode 100644 index c38f1f82db55a23a9b878492a291ce2169756351..0000000000000000000000000000000000000000 --- a/docs/faq/source_en/index.rst +++ /dev/null @@ -1,23 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 10:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore FAQ -================= - -.. toctree:: - :glob: - :maxdepth: 1 - - installation - supported_operators - network_models - platform_and_system - backend_running - usage_migrate_3rd - programming_language_extensions - supported_features - mindinsight_use - distributed_settings - inference \ No newline at end of file diff --git a/docs/faq/source_en/inference.md b/docs/faq/source_en/inference.md deleted file mode 100644 index fbfff029db96c09af2fda5350fd00e44c3887b35..0000000000000000000000000000000000000000 --- a/docs/faq/source_en/inference.md +++ /dev/null @@ -1,55 +0,0 @@ -# Inference - - - -## MindSpore C++ Library Use - -**Q:What should I do when error `/usr/bin/ld: warning: libxxx.so, needed by libmindspore.so, not found` prompts during application compiling?** - -A:Find the directory where the missing dynamic library file is located, add the path to the environment variable `LD_LIBRARY_PATH`, and refer to [Inference Using the MindIR Model on Ascend 310 AI Processors#Building Inference Code](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference_ascend_310_mindir.html#building-inference-code) for environment variable settings. - -
- -**Q:What should I do when error `ModuleNotFoundError: No module named 'te'` prompts during application running?** - -A:First confirm whether the system environment is installed correctly and whether the whl packages such as `te` and `topi` are installed correctly. If there are multiple Python versions in the user environment, such as Conda virtual environment, you need to execute `ldd name_of_your_executable_app` to confirm whether the application link `libpython3.7m.so.1.0` is consistent with the current Python directory, if not, you need to adjust the order of the environment variable `LD_LIBRARY_PATH` . - -
- -**Q:What should I do when error `error while loading shared libraries: libge_compiler.so: cannot open shared object file: No such file or directory` prompts during application running?** - -A:While installing Ascend 310 AI Processor software packages,the `CANN` package should install the full-featured `toolkit` version instead of the `nnrt` version. - -## MindSpore Serving - -**Q: Does MindSpore Serving support hot update to avoid inference service interruption?** - -A: MindSpore Serving does not support hot update. You need to restart MindSpore Serving. You are advised to run multiple Serving services. When updating a model, restart some services to avoid service interruption. - -
- -**Q: Does MindSpore Serving allow multiple workers to be started for one model to support multi-device and single-model concurrency?** - -A: MindSpore Serving does not support distribution and this function is being developed. That is, multiple workers cannot be started for one model. It is recommended that multiple Serving services be deployed to implement distribution and load balancing. In addition, to avoid message forwarding between `master` and `worker`, you can use the `start_servable_in_master` API to enable `master` and `worker` to be executed in the same process, implementing lightweight deployment of the Serving services. - -
- -**Q: How does the MindSpore Serving version match the MindSpore version?** - -A: MindSpore Serving matches MindSpore in the same version. For example, Serving `1.1.1` matches MindSpore `1.1.1`. - -
- -**Q: What is the difference between `bash -p` and `bash -e` when an error is reported during application build?** - -A: MindSpore Serving build and running depend on MindSpore. Serving provides two build modes: 1. Use `bash -p {python site-packages}/mindspore/lib` to specify an installed MindSpore path to avoid building MindSpore when building Serving. 2. Build Serving and the corresponding MindSpore. Serving passes the `-e`, `-V`, and `-j` options to MindSpore. -For example, use `bash -e ascend -V 910 -j32` in the Serving directory as follows: - -- Build MindSpore in the `third_party/mindspore` directory using `bash -e ascend -V 910 -j32`. -- Use the MindSpore build result as the Serving build dependency. - -
- -**Q: What can I do if an error `libmindspore.so: cannot open shared object file: No such file or directory` is reported during application running?** - -A: Check whether MindSpore that MindSpore Serving depends on is installed. In Serving 1.1, `LD_LIBRARY_PATH` needs to be configured to explicitly specify the path of `libmindspore.so`. `libmindspore.so` is in the `lib` directory of the MindSpore Python installation path. In Serving 1.2 or later, the path of `libmindspore.so` does not need to be specified. Serving searches for and adds `LD_LIBRARY_PATH` based on the MindSpore installation path, which does not need to be perceived by users. diff --git a/docs/faq/source_en/installation.md b/docs/faq/source_en/installation.md deleted file mode 100644 index ba558d7d3a500f274964a510972b8daf42b3f1b6..0000000000000000000000000000000000000000 --- a/docs/faq/source_en/installation.md +++ /dev/null @@ -1,177 +0,0 @@ -# Installation - -`Linux` `Windows` `Ascend` `GPU` `CPU` `Environment Preparation` `Basic` `Intermediate` - - - -- [Installation](#installation) - - [Installing Using pip](#installing-using-pip) - - [Source Code Compilation Installation](#source-code-compilation-installation) - - [Uninstall](#uninstall) - - [Environment Variables](#environment-variables) - - [Verifying the Installation](#verifying-the-installation) - - - - - -## Installing Using pip - -**Q: What can I do if an error message `cannot open shared object file:file such file or directory` is displayed when I install MindSpore of the GPU, CUDA 10.1, 0.5.0-beta, or Ubuntu-x86 version?** - -A: The error message indicates that the cuBLAS library is not found. Generally, the cause is that the cuBLAS library is not installed or is not added to the environment variable. Generally, cuBLAS is installed together with CUDA and the driver. After the installation, add the directory where cuBLAS is located to the `LD_LIBRARY_PATH` environment variable. - -
- -**Q: What should I do if an error message `SSL:CERTIFICATE_VERIFY_FATLED` is displayed when I use pip to install MindSpore?** - -A: Add the `--trusted-host=ms-release.obs.cn-north-4.myhuaweicloud.com` parameter to the pip installation command and try again. - -
- -**Q: Any specific requirements for Python version when pip install MindSpore?** - -A: MindSpore utilizes many of the new features in Python3.7+,therefore we recommend you add Python3.7.5 develop environment via `conda`. - -
- -**Q: Any specific requirements for protobuf version when use MindSpore?** - -A: MindSpore installs version 3.8.0 of protobuf by default. If you have installed 3.12.0 or later version of protobuf locally, there will be many warnings in the log when using pytest to test the code. It is recommended that you use the command 'pip install protobuf==3.8.0' to reinstall version 3.8.0. - -
- -**Q: What should I do when error `ProxyError(Cannot connect to proxy)` prompts during pip install?** - -A: It is generally a proxy configuration problem, you can using `export http_proxy={your_proxy}` on Ubuntu environment, and using `set http_proxy={your_proxy}` in cmd on Windows environment to config your proxy. - -
- -**Q: What should I do when error prompts during pip install?** - -A: Please execute `pip -V` to check if pip is linked to Python3.7+. If not, we recommend you -use `python3.7 -m pip install` instead of `pip install` command. - -
- -**Q: What should I do if I cannot find whl package for MindInsight or MindArmour on the installation page of MindSpore website?** - -A: You can download whl package from the official [MindSpore Website download page](https://www.mindspore.cn/versions) and manually install it via `pip install`. - -
- -## Source Code Compilation Installation - -**Q: A cross compiler has been installed on Linux, but how do I write compilation commands?** - -A: Set environment variables and specify the Android NDK path using `export ANDROID_NDK=/path/to/android-ndk`. To compile the Arm64 version, run `bash build.sh -I arm64`. To compile the Arm32 version, run `bash build.sh -I arm32`. After the compilation is successful, find the compiled package in the output directory. - -
- -**Q: A sample fails to be executed after I installed MindSpore 0.6.0 beta on Ascend 910 using Ubuntu_aarch64 and Python 3.7.5 and manually downloaded the .whl package of the corresponding version, compiled and installed GMP6.1.2, and installed other Python library dependencies. An error message is displayed, indicating that the .so file cannot be found. What can I do?** - -A: The `libdatatransfer.so` dynamic library is in the `fwkacllib/lib64` directory. Find the path of the library in the `/usr/local` directory, and then add the path to the `LD_LIBRARY_PATH` environment variable. After the settings take effect, execute the sample again. - -
- -**Q: What should I do if the compilation time of MindSpore source code takes too long or the process is constantly interrupted by errors?** - -A: MindSpore imports third party dependencies through submodule mechanism, among which `protobuf` v3.8.0 might not have the optimal or steady download speed, it is recommended that you perform package cache in advance. - -
- -**Q: How to change installation directory of the third party libraries?** - -A: The third party libraries will be installed in build/mindspore/.mslib, you can change the installation directory by setting the environment variable MSLIBS_CACHE_PATH, eg. `export MSLIBS_CACHE_PATH = ~/.mslib`. - -
- -**Q: What should I do if the software version required by MindSpore is not the same with the Ubuntu default software version?** - -A: At the moment some software might need manual upgrade. (**Note**: MindSpore requires Python3.7.5 and gcc7.3,the default version in Ubuntu 16.04 are Python3.5 and gcc5,whereas the one in Ubuntu 18.04 are Python3.7.3 and gcc7.4) - -
- -**Q: What should I do if there is a prompt `tclsh not found` when I compile MindSpore from source code?** - -A: Please install the software manually if there is any suggestion of certain `software not found`. - -
- -**Q: what should I do when I have installed Python 3.7.5 and set environment variables accordingly, but still failed compiling MindSpore, with error message `Python3 not found`?** - -A: It's probably due to the lack of shared libraries in current Python environment. Compiling MindSpore requires linking Python shared libraries, hence you may need to compile and install Python 3.7.5 from source, using command `./configure --enable-shared`. - -
- -**Q: What are the directories to be cleaned if the previous compilation failed, so as to prevent the following compilation being affected by previous remains?** - -A: While compiling MindSpore, if: - -1. Failed while downloading or compiling third-party software. e.g. Failed applying patch on icu4c, with error message `Cmake Error at cmake/utils.cmake:301 (message): Failed patch:`. In this case, go to `build/mindspore/.mslib` or directories specified by environment vairiable `MSLIBS_CACHE_PATH` where third-party software are installed, and delete affected software respectively. - -2. Failed in other stages of compilation, or if you wish to clean all previous build results, simply delete `build` directory. - -
- -## Uninstall - -**Q: How to uninstall MindSpore?** - -A: Using `pip uninstall mindspore` to uninstall MindSpore. - -
- -## Environment Variables - -**Q: Some frequently-used environment settings need to be reset in the newly started terminal window, which is easy to be forgotten, What should I do?** - -A: You can write the frequently-used environment settings to `~/.bash_profile` or `~/.bashrc` so that the settings can take effect immediately when you start a new terminal window. - -
- -**Q: Ascend AI processor software package and other prerequisites have been installed, but executing MindSpore failed with error message `Cannot open shared objectfile: No such file or directory`, what should I do?** - -A: There are 2 common reasons: an incorrect version of Ascend AI processor software package is installed, or the packages are installed in customized paths, yet the environment varialbes are not set accordingly. - -1. Go to directory where Ascend AI processor software package is installed, being `/usr/local/Ascend` by default, open `version.info` files which are located under directories of subpackages, and check if the version number matches the requirement of MindSpore. Please refer to [Install](https://www.mindspore.cn/install/en) for the detailed version required by MindSpore versions. If the version number does not match, please update the packages accordingly. - -2. Check if Ascend AI processor software package is installed in the default directory. MindSpore attempts to load packages from default directory `/usr/local/Ascend`, if the packages are installed in a customized directory, please follow the instructions from `Configuring Environment Variables` section of [Install](https://www.mindspore.cn/install/en) and set environment variables to match the changes. If other dependencies are installed in customized directies, then please set `LD_LIBRARY_PATH` environment variable accordingly. - -
- -## Verifying the Installation - -**Q: After MindSpore is installed on a CPU of a PC, an error message `the pointer[session] is null` is displayed during code verification. The specific code is as follows. How do I verify whether MindSpore is successfully installed?** - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="Ascend") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x,y)) -``` - -A: After MindSpore is installed on a CPU hardware platform, run the `python -c'import mindspore'` command to check whether MindSpore is successfully installed. If no error message such as `No module named'mindspore'` is displayed, MindSpore is successfully installed. The verification code is used only to verify whether a Ascend platform is successfully installed. - -
- -**Q: What should I do do when the errors prompts, such as `sh:1:python:not found`, `No module named mindspore._extends.remote` that the Python was linked to Python2.7?** - -A: Use the following command to check whether the current Python environment meets the requirements of MindSpore. - -- Text `python` in terminal window, check whether the version of Python interactive environment is `3.7.x` -- If not, execute the `sudo ln -sf /usr/bin/python3.7.x /usr/bin/python` command to create Python's soft connection. - -
- -**Q: Here in script when we import other python lib before `import mindspore`, error raised like follows (`/your_path/libgomp.so.1: cannot allocate memory in static TLS block`), how can we solve it?** - -A: Above question is relatively common, and there are two feasible solutions, you can choose one of them: - -- Exchange the order of import, first `import mindspore` and then import other third party libraries. -- Before executing the program, we can add environment variables first (`export LD_PRELOAD=/your_path/libgomp.so.1`), where `your_path` is the path mentioned in above error. diff --git a/docs/faq/source_en/mindinsight_use.md b/docs/faq/source_en/mindinsight_use.md deleted file mode 100644 index 6f8dc164b59cc96963820f0d71715c1852b27dbc..0000000000000000000000000000000000000000 --- a/docs/faq/source_en/mindinsight_use.md +++ /dev/null @@ -1,40 +0,0 @@ -# Training Process Visualization - -`Linux` `Ascend` `GPU` `Environment Preparation` - - - -**Q: What can I do if the error message `ImportError: libcrypto.so.1.0.0: cannot open shared object file: No such file or directory` is displayed in the MindInsight running logs after MindInsight failed to start?** - -A: You can use "export LD_LIBRARY_PATH=dir:$LD_LIBRARY_PATH" command to export LD_LIBRARY_PATH variable in Linux environment. - -
- -**Q: What can I do if the error message `bash: mindinsight: command not found` is displayed in the MindInsight running logs after MindInsight failed to start?** - -A: This problem occurs when using Python source codes to compile and install in the user-defined path. When install MindInsight by using `pip`, the executable file will be installed in this path. If the installation directory is not found in the bash environment variable queried by using 'echo $PATH', the system will not find the installed executable file. You need to use `export PATH=$PATH: $YourPythonPath$/bin` on the command line to import the path variable. - -(Please change `$YourPythonPath$` to your installation path). Note: this command is only valid at the current terminal. If you want to make it permanent, please add it to the file `~/.bashrc`. - -
- -**Q: What can I do if the error message `No module named 'mindinsight'` is displayed in the MindInsight running logs after MindInsight is uninstalled?** - -A: After MindInsight is started, it becomes a background service. After MindInsight package is uninstalled, the started MindInsight background service will not automatically stop. When the MindInsight background service starts a new process to load data or performs other operations, it will trigger the error message of `No module named 'mindinsight'` and record it to a log file. - -In this case, you can perform either of the following operations: - -- Reinstall MindInsight and run the `mindinsight stop --port ` command to stop the started MindInsight background service. -- Run the `kill -9 ` command to kill the processes designed by MindInsight. - -
- -**Q: What can I do if the Google's Chrome browser prompts the error message `ERR_UNSAFE_PORT after` MindInsight is successfully started?** - -A: Chrome browser's kernel prohibits certain ports from being used as HTTP services. You can add `--explicitly-allowed-ports=port` in Chrome browser's configuration. Otherwise you can change the port or browser like IE browser. - -
- -**Q: What can I do if the error `Exeption calling application: Field number 0 is illegal` appears on Ascend after MindInsight is successfully started with debugger turning on, and the training script is trying to connecting to debugger?** - -A: It means the wrong version of protobuf is installed, please install the right version, see [Installing protobuf Python](https://support.huaweicloud.com/intl/en-us/instg-cli-cann/atlascli_03_0046.html). diff --git a/docs/faq/source_en/network_models.md b/docs/faq/source_en/network_models.md deleted file mode 100644 index df074419ca4ffde9878903ac90cfaeb60d0df85b..0000000000000000000000000000000000000000 --- a/docs/faq/source_en/network_models.md +++ /dev/null @@ -1,271 +0,0 @@ -# Network Models - -`Data Processing` `Environmental Setup` `Model Export` `Model Training` `Beginner` `Intermediate` `Expert` - - - -**Q: How do I understand the `dataset_sink_mode` parameter in `model.train` of MindSpore?** - -A: When `dataset_sink_mode` is set to `True`, data processing and network computing are performed in pipeline mode. That is, when data processing is performed step by step, after a `batch` of data is processed, the data is placed in a queue which is used to cache the processed data. Then, network computing obtains data from the queue for training. In this case, data processing and network computing are performed in pipeline mode. The entire training duration is the longest data processing/network computing duration. - -When `dataset_sink_mode` is set to `False`, data processing and network computing are performed in serial mode. That is, after a `batch` of data is processed, it is transferred to the network for computation. After the computation is complete, the next `batch` of data is processed and transferred to the network for computation. This process repeats until the training is complete. The total time consumed is the time consumed for data processing plus the time consumed for network computing. - -
- -**Q: Can MindSpore train image data of different sizes by batch?** - -A: You can refer to the usage of YOLOv3 which contains the resizing of different images. For details about the script, see [yolo_dataset](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/yolov3_darknet53/src/yolo_dataset.py). - -
- -**Q: Can the `vgg16` model be loaded and transferred on a GPU using the Hub?** - -A: Yes, but you need to manually modify the following two arguments: - -```python -# Add the **kwargs argument as follows: -def vgg16(num_classes=1000, args=None, phase="train", **kwargs): -``` - -```python -# Add the **kwargs argument as follows: -net = Vgg(cfg['16'], num_classes=num_classes, args=args, batch_norm=args.batch_norm, phase=phase, **kwargs) -``` - -
- -**Q: How to obtain middle-layer features of a VGG model?** - -A: Obtaining the middle-layer features of a network is not closely related to the specific framework. For the `vgg` model defined in `torchvison`, the `features` field can be used to obtain the middle-layer features. The `vgg` source code of `torchvison` is as follows: - -```python -class VGG(nn.Module): - - def __init__(self, features, num_classes=1000, init_weights=True): - super(VGG, self).__init__() - self.features = features - self.avgpool = nn.AdaptiveAvgPool2d((7, 7)) -``` - -The `vgg16` defined in ModelZoo of MindSpore can be obtained through the `layers` field as follows: - -```python -network = vgg16() -print(network.layers) -``` - -
- -**Q: When MindSpore is used for model training, there are four input parameters for `CTCLoss`: `inputs`, `labels_indices`, `labels_values`, and `sequence_length`. How do I use `CTCLoss` for model training?** - -A: The `dataset` received by the defined `model.train` API can consist of multiple pieces of data, for example, (`data1`, `data2`, `data3`, ...). Therefore, the `dataset` can contain `inputs`, `labels_indices`, `labels_values`, and `sequence_length` information. You only need to define the dataset in the corresponding format and transfer it to `model.train`. For details, see [Data Processing API](https://www.mindspore.cn/doc/programming_guide/en/master/dataset_loading.html). - -
- -**Q: How do I load the PyTorch weight to MindSpore during model transfer?** - -A: First, enter the `PTH` file of PyTorch. Take `ResNet-18` as an example. The network structure of MindSpore is the same as that of PyTorch. After transferring, the file can be directly loaded to the network. Only `BN` and `Conv2D` are used during loading. If the network names of MindSpore and PyTorch at other layers are different, change the names to the same. - -
- -**Q: After a model is trained, how do I save the model output in text or `npy` format?** - -A: The network output is `Tensor`. You need to use the `asnumpy()` method to convert the `Tensor` to `NumPy` and then save the data. For details, see the following: - -```python -out = net(x) - -np.save("output.npy", out.asnumpy()) -``` - -
- -**Q: Must data be converted into MindRecords when MindSpore is used for segmentation training?** - -A: [build_seg_data.py](https://github.com/mindspore-ai/mindspore/blob/master/model_zoo/official/cv/deeplabv3/src/data/build_seg_data.py) is used to generate MindRecords based on a dataset. You can directly use or adapt it to your dataset. Alternatively, you can use `GeneratorDataset` if you want to read the dataset by yourself. - -[GenratorDataset example](https://www.mindspore.cn/doc/programming_guide/en/master/dataset_loading.html#loading-user-defined-dataset) - -[GeneratorDataset API description](https://www.mindspore.cn/doc/api_python/en/master/mindspore/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset) - -
- -**Q: Can MindSpore read a TensorFlow checkpoint?** - -A: The checkpoint format of MindSpore is different from that of TensorFlow. Although both use the Protocol Buffers, their definitions are different. Currently, MindSpore cannot read the TensorFlow or Pytorch checkpoints. - -
- -**Q: How do I perform training without processing data in MindRecord format?** - -A: You can use the customized data loading method `GeneratorDataset`. For details, click [here](https://www.mindspore.cn/tutorial/en/r0.7/use/data_preparation/loading_the_datasets.html#id5). - -
- -**Q: What framework models and formats can be directly read by MindSpore? Can the PTH Model Obtained Through Training in PyTorch Be Loaded to the MindSpore Framework for Use?** - -A: MindSpore uses protocol buffers (protobuf) to store training parameters and cannot directly read framework models. A model file stores parameters and their values. You can use APIs of other frameworks to read parameters, obtain the key-value pairs of parameters, and load the key-value pairs to MindSpore. If you want to use the .ckpt file trained by a framework, read the parameters and then call the `save_checkpoint` API of MindSpore to save the file as a .ckpt file that can be read by MindSpore. - -
- -**Q: How do I use models trained by MindSpore on Ascend 310? Can they be converted to models used by HiLens Kit?** - -A: Yes. HiLens Kit uses Ascend 310 as the inference core. Therefore, the two questions are essentially the same. Ascend 310 requires a dedicated OM model. Use MindSpore to export the ONNX or AIR model and convert it into an OM model supported by Ascend 310. For details, see [Multi-platform Inference](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference_ascend_310.html). - -
- -**Q: How do I modify parameters (such as the dropout value) on MindSpore?** - -A: When building a network, use `if self.training: x = dropput(x)`. During verification, set `network.set_train(mode_false)` before execution to disable the dropout function. During training, set `network.set_train(mode_false)` to True to enable the dropout function. - -
- -**Q: Where can I view the sample code or tutorial of MindSpore training and inference?** - -A: Please visit the [MindSpore official website training](https://www.mindspore.cn/tutorial/training/en/master/index.html) and [MindSpore official website inference](https://www.mindspore.cn/tutorial/inference/en/master/index.html). - -
- -**Q: What types of model is currently supported by MindSpore for training?** - -A: MindSpore has basic support for common training scenarios, please refer to [Release note](https://gitee.com/mindspore/mindspore/blob/master/RELEASE.md#) for detailed information. - -
- -**Q: What are the available recommendation or text generation networks or models provided by MindSpore?** - -A: Currently, recommendation models such as Wide & Deep, DeepFM, and NCF are under development. In the natural language processing (NLP) field, Bert\_NEZHA is available and models such as MASS are under development. You can rebuild the network into a text generation network based on the scenario requirements. Please stay tuned for updates on the [MindSpore Model Zoo](https://gitee.com/mindspore/mindspore/tree/master/model_zoo). - -
- -**Q: How simple can the MindSpore model training code be?** - -A: MindSpore provides Model APIs except for network definitions. In most scenarios, model training can be completed using only a few lines of code. - -
- -**Q: How do I use MindSpore to fit functions such as $f(x)=a \times sin(x)+b$?** - -A: The following is based on the official MindSpore linear fitting case. - -```python -# The fitting function is:f(x)=2*sin(x)+3. -import numpy as np -from mindspore import dataset as ds -from mindspore.common.initializer import Normal -from mindspore import nn, Model, context -from mindspore.train.callback import LossMonitor - -context.set_context(mode=context.GRAPH_MODE, device_target="CPU") - -def get_data(num, w=2.0, b=3.0): - # f(x)=w * sin(x) + b - # f(x)=2 * sin(x) +3 - for i in range(num): - x = np.random.uniform(-np.pi, np.pi) - noise = np.random.normal(0, 1) - y = w * np.sin(x) + b + noise - yield np.array([np.sin(x)]).astype(np.float32), np.array([y]).astype(np.float32) - -def create_dataset(num_data, batch_size=16, repeat_size=1): - input_data = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data','label']) - input_data = input_data.batch(batch_size) - input_data = input_data.repeat(repeat_size) - return input_data - -class LinearNet(nn.Cell): - def __init__(self): - super(LinearNet, self).__init__() - self.fc = nn.Dense(1, 1, Normal(0.02), Normal(0.02)) - - def construct(self, x): - x = self.fc(x) - return x - -if __name__ == "__main__": - - num_data = 1600 - batch_size = 16 - repeat_size = 1 - lr = 0.005 - momentum = 0.9 - - net = LinearNet() - net_loss = nn.loss.MSELoss() - opt = nn.Momentum(net.trainable_params(), lr, momentum) - model = Model(net, net_loss, opt) - - ds_train = create_dataset(num_data, batch_size=batch_size, repeat_size=repeat_size) - model.train(1, ds_train, callbacks=LossMonitor(), dataset_sink_mode=False) - - print(net.trainable_params()[0], "\n%s" % net.trainable_params()[1]) -``` - -
- -**Q: How do I use MindSpore to fit quadratic functions such as $f(x)=ax^2+bx+c$?** - -A: The following code is referenced from the official [MindSpore tutorial code](https://gitee.com/mindspore/docs/blob/master/tutorials/tutorial_code/linear_regression.py). - -Modify the following items to fit $f(x) = ax^2 + bx + c$: - -1. Dataset generation. -2. Network fitting. -3. Optimizer. - -The following explains detailed information about the modification: - -```python -# The selected optimizer does not support CPUs. Therefore, the GPU computing platform is used for training. You need to install MindSpore of the GPU version. -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -# Assume that the function to be fitted is f(x)=2x^2+3x+4. Modify the data generation function as follows: -def get_data(num, a=2.0, b=3.0 ,c = 4): - for i in range(num): - x = np.random.uniform(-10.0, 10.0) - noise = np.random.normal(0, 1) - # For details about how to generate the value of y, see the to-be-fitted objective function ax^2+bx+c. - y = x * x * a + x * b + c + noise - # When fitting a*x^2 + b*x +c, a and b are weight parameters, and c is the offset parameter bias. The training data corresponding to the two weights is x^2 and x, respectively. Therefore, the dataset generation mode is changed as follows: - yield np.array([x*x, x]).astype(np.float32), np.array([y]).astype(np.float32) - -def create_dataset(num_data, batch_size=16, repeat_size=1): - input_data = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data','label']) - input_data = input_data.batch(batch_size) - input_data = input_data.repeat(repeat_size) - return input_data - -class LinearNet(nn.Cell): - def __init__(self): - super(LinearNet, self).__init__() - # Two training parameters are input for the full connection function. Therefore, the input value is changed to 2. The first Normal(0.02) automatically allocates random weights to the two input parameters, and the second Normal is the random bias. - self.fc = nn.Dense(2, 1, Normal(0.02), Normal(0.02)) - - def construct(self, x): - x = self.fc(x) - return x - -if __name__ == "__main__": - num_data = 1600 - batch_size = 16 - repeat_size = 1 - lr = 0.005 - momentum = 0.9 - - net = LinearNet() - net_loss = nn.loss.MSELoss() - # RMSProp optimizer with better effect is selected for quadratic function fitting. Currently, Ascend and GPU computing platforms are supported. - opt = nn.RMSProp(net.trainable_params(), learning_rate=0.1) - model = Model(net, net_loss, opt) - - ds_train = create_dataset(num_data, batch_size=batch_size, repeat_size=repeat_size) - model.train(1, ds_train, callbacks=LossMonitor(), dataset_sink_mode=False) - - print(net.trainable_params()[0], "\n%s" % net.trainable_params()[1]) -``` - -
- -**Q:What should I do if a Protobuf memory limit error is reported during the process of using ckpt or exporting a model?** - -A:When a single Protobuf data is too large, because Protobuf itself limits the size of the data stream, a memory limit error will be reported. At this time, the restriction can be lifted by setting the environment variable `PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python`. diff --git a/docs/faq/source_en/platform_and_system.md b/docs/faq/source_en/platform_and_system.md deleted file mode 100644 index dc7ce417658d68e53ea1422b60d52ff8b71c5d4e..0000000000000000000000000000000000000000 --- a/docs/faq/source_en/platform_and_system.md +++ /dev/null @@ -1,69 +0,0 @@ -# Platform and System - -`Linux` `Windows` `Ascend` `GPU` `CPU` `Hardware Support` `Beginner` `Intermediate` - - - -**Q: What is the difference between the PyNative and Graph modes?** - -A: In terms of efficiency, operators used in the two modes are the same. Therefore, when the same network and operators are executed in the two modes, the accuracy is the same. The network execution performance varies according to the execution mechanism. Theoretically, operators provided by MindSpore support both the PyNative and Graph modes. - -In terms of application scenarios, Graph mode requires the network structure to be built at the beginning, and then the framework performs entire graph optimization and execution. This mode is suitable to scenarios where the network is fixed and high performance is required. - -The two modes are supported on different hardware (such as `Ascend`, `GPU`, and `CPU`). - -In terms of code debugging, operators are executed line by line. Therefore, you can directly debug the Python code and view the `/api` output or execution result of the corresponding operator at any breakpoint in the code. In Graph mode, the network is built but not executed in the constructor function. Therefore, you cannot obtain the output of the corresponding operator at breakpoints in the `construct` function. The output can be viewed only after the network execution is complete. - -
- -**Q: How do I perform transfer learning in PyNative mode?** - -A: PyNative mode is compatible with transfer learning. For more tutorial information, see [Code for Loading a Pre-Trained Model](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/cv_mobilenetv2_fine_tune.html#code-for-loading-a-pre-trained-model). - -
- -**Q: Does MindSpore run only on Huawei `NPUs`?** - -A: MindSpore supports Huawei Ascend `NPUs`, `GPUs`, and `CPUs`, and supports heterogeneous computing. - -
- -**Q: Can MindSpore be converted to an AIR model on Ascend 310?** - -A: An AIR model cannot be exported from the Ascend 310. You need to load a trained checkpoint on the Ascend 910, export an AIR model, and then convert the AIR model into an OM model for inference on the Ascend 310. For details about the Ascend 910 installation, see the MindSpore Installation Guide at [here](https://www.mindspore.cn/install/en). - -
- -**Q: What is the limitation on the input size of a single tensor when exporting an AIR model from MindSpore?** - -A: For the input of a single tensor, the size of tensor should not exceed 2GB, otherwise it will be wrong when converting to air model. - -
- -**Q: Can a network script trained by MindSpore on a GPU be directly trained on an NPU without modification?** - -A: Yes. MindSpore provides unified APIs for NPUs, GPUs, and CPUs. With the support of operators, network scripts can run across platforms without modification. - -
- -**Q: Does MindSpore require computing units such as GPUs and NPUs? What hardware support is required?** - -A: MindSpore currently supports CPU, GPU, Ascend, and NPU. Currently, you can try out MindSpore through Docker images on laptops or in environments with GPUs. Some models in MindSpore Model Zoo support GPU-based training and inference, and other models are being improved. For distributed parallel training, MindSpore supports multi-GPU training. You can obtain the latest information from [Road Map](https://www.mindspore.cn/doc/note/en/master/roadmap.html) and [project release notes](https://gitee.com/mindspore/mindspore/blob/master/RELEASE.md#). - -
- -**Q: Does MindSpore have any plan on supporting other types of heterogeneous computing hardwares?** - -A: MindSpore provides pluggable device management interface so that developer could easily integrate other types of heterogeneous computing hardwares like FPGA to MindSpore. We welcome more backend support in MindSpore from the community. - -
- -**Q: Does MindSpore support Windows 10?** - -A: The MindSpore CPU version can be installed on Windows 10. For details about the installation procedure, please refer to the [MindSpore official website tutorial](https://www.mindspore.cn/install/en) - -
- -**Q: For Ascend users, what should I do when `RuntimeError: json.exception.parse_error.101 parse error at line 1, column 1: syntax error while parsing value - invalid literal; last read: 'T'` appears in personal Conda environment?** - -A: When you encounter the error, you should update the `te/topi/hccl` python toolkits, unload them firstly and then using command `pip install /usr/local/Ascend/fwkacllib/lib64/{te/topi/hccl}*any.whl` to reinstall. \ No newline at end of file diff --git a/docs/faq/source_en/programming_language_extensions.md b/docs/faq/source_en/programming_language_extensions.md deleted file mode 100644 index c9b234c2c54086c58464abefba4b5e9791ff9072..0000000000000000000000000000000000000000 --- a/docs/faq/source_en/programming_language_extensions.md +++ /dev/null @@ -1,15 +0,0 @@ -# Programming Language Extensions - -`Python` `Support Plan` - - - -**Q: The recent announced programming language such as taichi got Python extensions that could be directly used as `import taichi as ti`. Does MindSpore have similar support?** - -A: MindSpore supports Python native expression via `import mindspore`. - -
- -**Q: Does MindSpore plan to support more programming languages other than Python?** - -A: MindSpore currently supports Python extensions,bindings for languages like C++, Rust, Julia are on the way. diff --git a/docs/faq/source_en/supported_features.md b/docs/faq/source_en/supported_features.md deleted file mode 100644 index 8bda3f07bc9675cc186955c2367cf3abd180ed6f..0000000000000000000000000000000000000000 --- a/docs/faq/source_en/supported_features.md +++ /dev/null @@ -1,99 +0,0 @@ -# Supported Features - -`Characteristic Advantages` `On-device Inference` `Functional Module` `Reasoning Tools` - - - -**Q: Does MindSpore Serving support hot loading to avoid inference service interruption?** - -A: MindSpore does not support hot loading. It is recommended that you run multiple Serving services and restart some of them when switching the version. - -
- -**Q: Does MindSpore support truncated gradient?** - -A: Yes. For details, see [Definition and Usage of Truncated Gradient](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/nlp/transformer/src/transformer_for_train.py#L35). - -
- -**Q: How do I change hyperparameters for calculating loss values during neural network training?** - -A: Sorry, this function is not available yet. You can find the optimal hyperparameters by training, redefining an optimizer, and then training. - -
- -**Q: Can you introduce the dedicated data processing framework?** - -A: MindData provides the heterogeneous hardware acceleration function for data processing. The high-concurrency data processing `pipeline` supports `NPU`, `GPU` and `CPU`. The `CPU` usage is reduced by 30%. For details, see [Optimizing Data Processing](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/optimize_data_processing.html). - -
- -**Q: What is the MindSpore IR design concept?** - -A: Function expression: All expressions are functions, and differentiation and automatic parallel analysis are easy to implement without side effect. `JIT` compilation capability: The graph-based IR, control flow dependency, and data flow are combined to balance the universality and usability. Turing-complete IR: More flexible syntaxes are provided for converting `Python`, such as recursion. - -
- -**Q: Will MindSpore provide a reinforcement learning framework?** - -A: This function is at the design stage. You can contribute ideas and scenarios and participate in the construction. Thank you. - -
- -**Q: As Google Colab and Baidu AI Studio provide free `GPU` computing power, does MindSpore provide any free computing power?** - -A: If you cooperate with MindSpore in papers and scientific research, you can obtain free cloud computing power. If you want to simply try it out, we can also provide online experience similar to that of Colab. - -
- -**Q: What are the advantages and features of MindSpore parallel model training?** - -A: In addition to data parallelism, MindSpore distributed training also supports operator-level model parallelism. The operator input tensor can be tiled and parallelized. On this basis, automatic parallelism is supported. You only need to write a single-device script to automatically tile the script to multiple nodes for parallel execution. - -
- -**Q: Has MindSpore implemented the anti-pooling operation similar to `nn.MaxUnpool2d`?** - -A: Currently, MindSpore does not provide anti-pooling APIs but you can customize the operator to implement the operation. For details, refer to [Custom Operators](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/custom_operator.html). - -
- -**Q: How does MindSpore implement semantic collaboration and processing? Is the popular Formal Concept Analysis (FCA) used?** - -A: The MindSpore framework does not support FCA. For semantic models, you can call third-party tools to perform FCA in the data preprocessing phase. MindSpore supports Python therefore `import FCA` could do the trick. - -
- -**Q: Does MindSpore have any plan or consideration on the edge and device when the training and inference functions on the cloud are relatively mature?** - -A: MindSpore is a unified cloud-edge-device training and inference framework. Edge has been considered in its design, so MindSpore can perform inference at the edge. The open-source version will support Ascend 310-based inference. The optimizations supported in the current inference stage include quantization, operator fusion, and memory overcommitment. - -
- -**Q: How does MindSpore support automatic parallelism?** - -A: Automatic parallelism on CPUs and GPUs are being improved. You are advised to use the automatic parallelism feature on the Ascend 910 AI processor. Follow our open source community and apply for a MindSpore developer experience environment for trial use. - -
- -**Q: Does MindSpore have a module that can implement object detection algorithms as TensorFlow does?** - -A: The TensorFlow's object detection pipeline API belongs to the TensorFlow's Model module. After MindSpore's detection models are complete, similar pipeline APIs will be provided. - -
- -**Q: How do I migrate scripts or models of other frameworks to MindSpore?** - -A: For details about script or model migration, please visit the [MindSpore official website](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/migrate_3rd_scripts.html). - -
- -**Q: Does MindSpore provide open-source e-commerce datasets?** - -A: No. Please stay tuned for updates on the [MindSpore official website](https://www.mindspore.cn/en). - -
- -**Q:Can I encapsulate the Tensor data of MindSpore using numpy array?** - -A:No, all sorts of problems could arise. For example, `numpy.array(Tensor(1)).astype(numpy.float32)` will raise "ValueError: settinng an array element with a sequence.". diff --git a/docs/faq/source_en/supported_operators.md b/docs/faq/source_en/supported_operators.md deleted file mode 100644 index 0e73e0baa65f2a8a6fde82f2e53445556be43fbf..0000000000000000000000000000000000000000 --- a/docs/faq/source_en/supported_operators.md +++ /dev/null @@ -1,99 +0,0 @@ -# Supported Operators - -`Ascend` `GPU` `CPU` `Environmental Setup` `Beginner` `Intermediate` `Expert` - - - -**Q: What is the function of the `TransData` operator? Can the performance be optimized?** - -A: The `TransData` operator is used in the scenario where the data formats (such as NC1HWC0) used by interconnected operators on the network are inconsistent. In this case, the framework automatically inserts the `TransData` operator to convert the data formats into the same format and then performs computation. You can consider using the `amp` for mixed-precision training. In this way, some `FP32` operations and the invocation of some `TransData` operators can be reduced. - -
- -**Q: An error occurs when the `Concat` operator concatenates tuples containing multiple tensors. An error occurs when the number of `tensor list` elements entered is greater than or equal to 192. What is a better solution (running in dynamic mode) for `Concat` to concatenate tuples containing multiple Tensors?** - -A: The number of tensors to be concatenated at a time cannot exceed 192 according to the bottom-layer specifications of the Ascend operator. You can try to concatenate them twice. - -
- -**Q: When `Conv2D` is used to define convolution, the `group` parameter is used. Is it necessary to ensure that the value of `group` can be exactly divided by the input and output dimensions? How is the group parameter transferred?** - -A: The `Conv2d` operator has the following constraint: When the value of `group` is greater than 1, the value must be the same as the number of input and output channels. Do not use `ops.Conv2D`. Currently, this operator does not support a value of `group` that is greater than 1. Currently, only the `nn.Conv2d` API of MindSpore supports `group` convolution. However, the number of groups must be the same as the number of input and output channels. -The `Conv2D` operator function is as follows: - -```python -def __init__(self, - out_channel, - kernel_size, - mode=1, - pad_mode="valid", - pad=0, - stride=1, - dilation=1, - group=1, - data_format="NCHW"): -``` - -If the function contains a `group` parameter, the parameter will be transferred to the C++ layer by default. - -
- -**Q: Does MindSpore provide 3D convolutional layers?** - -A: 3D convolutional layers on Ascend are coming soon. Go to the [Operator List](https://www.mindspore.cn/doc/programming_guide/en/master/operator_list.html) on the official website to view the operators that are supported. - -
- -**Q: Does MindSpore support matrix transposition?** - -A: Yes. For details, see [mindspore.ops.Transpose](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Transpose.html#mindspore.ops.Transpose). - -
- -**Q: Can MindSpore calculate the variance of any tensor?** - -A: Currently, MindSpore does not have APIs or operators similar to variance which can directly calculate the variance of a `tensor`. However, MindSpore has sufficient small operators to support such operations. For details, see [class Moments(Cell)](https://www.mindspore.cn/doc/api_python/en/master/_modules/mindspore/nn/layer/math.html#Moments). - -
- -**Q: Why is data loading abnormal when MindSpore1.0.1 is used in graph data offload mode?** - -A: An operator with the `axis` attribute, for example, `ops.Concat(axis=1)((x1, x2))`, is directly used in `construct`. You are advised to initialize the operator in `__init__` as follows: - -```python -from mindspore import nn -import mindspore.ops as ops - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.concat = ops.Concat(axis=1) - def construct(self, x, y): - out = self.concat((x, y)) - return out -``` - -
- -**Q: When the `Tile` module in operations executes `__infer__`, the `value` is `None`. Why is the value lost?** - -A: The `multiples input` of the `Tile` operator must be a constant. (The value cannot directly or indirectly come from the input of the graph.) Otherwise, the `None` data will be obtained during graph composition because the graph input is transferred only during graph execution and the input data cannot be obtained during graph composition. - -
- -**Q: Compared with PyTorch, the `nn.Embedding` layer lacks the padding operation. Can other operators implement this operation?** - -A: In PyTorch, `padding_idx` is used to set the word vector in the `padding_idx` position in the embedding matrix to 0, and the word vector in the `padding_idx` position is not updated during backward propagation. -In MindSpore, you can manually initialize the weight corresponding to the `padding_idx` position of embedding to 0. In addition, the loss corresponding to `padding_idx` is filtered out through the mask operation during training. - -
- -**Q: What can I do if the LSTM example on the official website cannot run on Ascend?** - -A: Currently, the LSTM runs only on a GPU or CPU and does not support the hardware environment. You can click [MindSpore Operator List](https://www.mindspore.cn/doc/note/en/master/operator_list_ms.html) to view the supported operators. - -
- -**Q: When conv2d is set to (3,10), Tensor[2,2,10,10] and it runs on Ascend on ModelArts, the error message `FM_W+pad_left+pad_right-KW>=strideW` is displayed. However, no error message is displayed when it runs on a CPU. What should I do?** - -A: This is a TBE operator restriction that the width of x must be greater than that of the kernel. The CPU does not have this operator restriction. Therefore, no error is reported. diff --git a/docs/faq/source_en/usage_migrate_3rd.md b/docs/faq/source_en/usage_migrate_3rd.md deleted file mode 100644 index ffaec84bda5e74b9baad71a19bd99352a8b22166..0000000000000000000000000000000000000000 --- a/docs/faq/source_en/usage_migrate_3rd.md +++ /dev/null @@ -1,34 +0,0 @@ -# Migration from a Third-party Framework - - - -**Q:How do I load a pre-trained PyTorch model for fine-tuning on MindSpore?** - -A:Map parameters of PyTorch and MindSpore one by one. No unified conversion script is provided due to flexible network definitions. -Customize scripts based on scenarios. For details, see [Advanced Usage of Checkpoint](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/advanced_usage_of_checkpoint.html). - -
- -**Q:How do I convert a PyTorch `dataset` to a MindSpore `dataset`?** - -A:The custom dataset logic of MindSpore is similar to that of PyTorch. You need to define a `dataset` class containing `__init__`, `__getitem__`, and `__len__` to read your dataset, instantiate the class into an object (for example, `dataset/dataset_generator`), and transfer the instantiated object to `GeneratorDataset` (on MindSpore) or `DataLoader` (on PyTorch). Then, you are ready to load the custom dataset. MindSpore provides further `map`->`batch` operations based on `GeneratorDataset`. Users can easily add other custom operations to `map` and start `batch`. -The custom dataset of MindSpore is loaded as follows: - -```python -# 1. Perform operations such as data argumentation, shuffle, and sampler. -class Mydata: - def __init__(self): - np.random.seed(58) - self.__data = np.random.sample((5, 2)) - self.__label = np.random.sample((5, 1)) - def __getitem__(self, index): - return (self.__data[index], self.__label[index]) - def __len__(self): - return len(self.__data) -dataset_generator = Mydata() -dataset = ds.GeneratorDataset(dataset_generator, ["data", "label"], shuffle=False) -# 2. Customize data argumentation. -dataset = dataset.map(operations=pyFunc, {other_params}) -# 3. batch -dataset = dataset.batch(batch_size, drop_remainder=True) -``` diff --git a/docs/faq/source_zh_cn/_static/logo_notebook.png b/docs/faq/source_zh_cn/_static/logo_notebook.png deleted file mode 100644 index 18c2e29e4b73ee428f70253feffdd855fdf0c422..0000000000000000000000000000000000000000 Binary files a/docs/faq/source_zh_cn/_static/logo_notebook.png and /dev/null differ diff --git a/docs/faq/source_zh_cn/_static/logo_source.png b/docs/faq/source_zh_cn/_static/logo_source.png deleted file mode 100644 index 880f2bc87172daf487654c0ba4f1657c672bd2b8..0000000000000000000000000000000000000000 Binary files a/docs/faq/source_zh_cn/_static/logo_source.png and /dev/null differ diff --git a/docs/faq/source_zh_cn/backend_running.md b/docs/faq/source_zh_cn/backend_running.md deleted file mode 100644 index f547f331a4ea3ef14261669a3ef1f00c15759b0f..0000000000000000000000000000000000000000 --- a/docs/faq/source_zh_cn/backend_running.md +++ /dev/null @@ -1,294 +0,0 @@ -# 后端运行类 - -`Ascend` `GPU` `CPU` `环境准备` `运行模式` `模型训练` `初级` `中级` `高级` - - - -**Q:请问`c_transforms`和`py_transforms`有什么区别,比较推荐使用哪个?** - -A:推荐使用`c_transforms`,因为纯C层执行,所以性能会更好。 - -原理:`c_transform`底层使用的是C版本`opencv/jpeg-turbo`进行的数据处理,`py_transform`使用的是Python版本的`Pillow`进行数据处理。 - -
- -**Q:MindSpore在NPU硬件平台进行多卡训练,自定义数据集如何给不同NPU传递不同数据?** - -A:使用`GeneratorDataset`的时候,可以使用`num_shards=num_shards`,`shard_id=device_id`参数来控制不同卡读取哪个分片的数据,`__getitem__`和`__len__`按全量数据集处理即可。 - -举例: - -```python -# 卡0: -ds.GeneratorDataset(..., num_shards=8, shard_id=0, ...) -# 卡1: -ds.GeneratorDataset(..., num_shards=8, shard_id=1, ...) -# 卡2: -ds.GeneratorDataset(..., num_shards=8, shard_id=2, ...) -... -# 卡7: -ds.GeneratorDataset(..., num_shards=8, shard_id=7, ...) -``` - -
- -**Q:如何查看模型参数量?** - -A:可以直接加载CheckPoint统计,可能额外统计了动量和optimizer中的变量,需要过滤下相关变量。 -您可以参考如下接口统计网络参数量: - -```python -def count_params(net): - """Count number of parameters in the network - Args: - net (mindspore.nn.Cell): Mindspore network instance - Returns: - total_params (int): Total number of trainable params - """ - total_params = 0 - for param in net.trainable_params(): - total_params += np.prod(param.shape) - return total_params -``` - -具体[脚本链接](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/research/cv/tinynet/src/utils.py)。 - -
- -**Q:如何构建图像的多标签MindRecord格式数据集?** - -A:数据Schema可以按如下方式定义:`cv_schema_json = {"label": {"type": "int32", "shape": [-1]}, "data": {"type": "bytes"}}` - -说明:label是一个数组,numpy类型,这里面可以存你说的 1, 1,0,1, 0, 1 这么多label值,这些label值对应同一个data,即:同一个图像的二进制值。 -可以参考[将数据集转换为MindRecord](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/convert_dataset.html#将数据集转换为MindRecord)教程。 - -
- -**Q:如何在训练过程中监控`loss`在最低的时候并保存训练参数?** - -A:可以自定义一个`Callback`。参考`ModelCheckpoint`的写法,此外再增加判断`loss`的逻辑: - -```python -class EarlyStop(Callback): -def __init__(self): - self.loss = None -def step_end(self, run_context): - loss = ****(get current loss) - if (self.loss == None or loss < self.loss): - self.loss = loss - # do save ckpt -``` - -
- -**Q:`mindspore/tests`下怎样执行单个`ut`用例?** - -A:`ut`用例通常需要基于debug版本的MindSpore包,官网并没有提供。可以基于源码使用`sh build.sh`编译,然后通过`pytest`指令执行,debug模式编包不依赖后端。编译选项`sh build.sh -t on`,用例执行可以参考`tests/runtest.sh`脚本。 - -
- -**Q:使用`nn.Conv2d`时,怎样获取期望大小的`feature map`?** - -A:`Conv2d shape`推导方法可以[参考这里](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn/mindspore.nn.Conv2d.html#mindspore.nn.Conv2d),`Conv2d`的`pad_mode`改成`same`,或者可以根据`Conv2d shape`推导公式自行计算`pad`,想要使得`shape`不变,一般pad为`(kernel_size-1)//2`。 - -
- -**Q:MindSpore安装完成,执行训练时发现网络性能异常,权重初始化耗时过长,怎么办?** - -A:可能与环境中使用了`scipy 1.4`系列版本有关,通过`pip list | grep scipy`命令可查看scipy版本,建议改成MindSpore要求的`scipy`版本。版本第三方库依赖可以在`requirement.txt`中查看。 - -> 其中version替换为MindSpore具体的版本分支。 - -
- -**Q:使用MindSpore可以自定义一个可以返回多个值的loss函数?** - -A:自定义`loss function`后还需自定义`TrainOneStepCell`,实现梯度计算时`sens`的个数和`network`的输出个数相同。具体可参考: - -```python -net = Net() - -loss_fn = MyLoss() - -loss_with_net = MyWithLossCell(net, loss_fn) - -train_net = MyTrainOneStepCell(loss_with_net, optim) - -model = Model(net=train_net, loss_fn=None, optimizer=None) -``` - -
- -**Q:MindSpore如何实现早停功能?** - -A:可以自定义`callback`方法实现早停功能。 -例子:当loss降到一定数值后,停止训练。 - -```python -class EarlyStop(Callback): - def __init__(self, control_loss=1): - super(EarlyStep, self).__init__() - self._control_loss = control_loss - - def step_end(self, run_context): - cb_params = run_context.original_args() - loss = cb_params.net_outputs - if loss.asnumpy() < self._control_loss: - # Stop training - run_context._stop_requested = True - -stop_cb = EarlyStop(control_loss=1) -model.train(epoch_size, ds_train, callbacks=[stop_cb]) -``` - -
- -**Q:请问自己制作的黑底白字`28*28`的数字图片,使用MindSpore训练出来的模型做预测,报错提示`wrong shape of image`是怎么回事?** - -A:首先MindSpore训练使用的灰度图MNIST数据集。所以模型使用时对数据是有要求的,需要设置为`28*28`的灰度图,就是单通道才可以。 - -
- -**Q:在Ascend平台上,执行用例有时候会报错`run task error`,如何获取更详细的日志帮助问题定位?** - -A:使用msnpureport工具设置device侧日志级别,工具位置在:`/usr/local/Ascend/driver/tools/msnpureport`。 - -- 全局级别: - -```bash -/usr/local/Ascend/driver/tools/msnpureport -g info -``` - -- 模块级别: - -```bash -/usr/local/Ascend/driver/tools/msnpureport -m SLOG:error -```` - -- Event级别: - -```bash -/usr/local/Ascend/driver/tools/msnpureport -e disable/enable -``` - -- 多device id级别: - -```bash -/usr/local/Ascend/driver/tools/msnpureport -d 1 -g warning -``` - -假设deviceID的取值范围是[0-7],`device0`-`device3`和`device4`-`device7`分别在一个os上。其中`device0`-`device3`共用一个日志配置文件;`device4`-`device7`共用一个配置文件。如果修改了`device0`-`device3`中的任意一个日志级别,其他`device`的日志级别也会被修改。如果修改了`device4`-`device7`中的任意一个日志级别,其他device的日志级别也会被修改。 - -`Driver`包安装以后(假设安装路径为/usr/local/HiAI,在Windows环境下,`msnpureport.exe`执行文件在C:\ProgramFiles\Huawei\Ascend\Driver\tools\目录下),假设用户在/home/shihangbo/目录下直接执行命令行,则Device侧日志被导出到当前目录下,并以时间戳命名文件夹进行存放。 - -
- -**Q:使用ExpandDims算子报错:`Pynative run op ExpandDims failed`。具体代码:** - -```python -context.set_context( -mode=cintext.GRAPH_MODE, -device_target='ascend') -input_tensor=Tensor(np.array([[2,2],[2,2]]),mindspore.float32) -expand_dims=ops.ExpandDims() -output=expand_dims(input_tensor,0) -``` - -A:这边的问题是选择了Graph模式却使用了PyNative的写法,所以导致报错,MindSpore支持两种运行模式,在调试或者运行方面做了不同的优化: - -- PyNative模式:也称动态图模式,将神经网络中的各个算子逐一下发执行,方便用户编写和调试神经网络模型。 - -- Graph模式:也称静态图模式或者图模式,将神经网络模型编译成一整张图,然后下发执行。该模式利用图优化等技术提高运行性能,同时有助于规模部署和跨平台运行。 - -用户可以参考[官网教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/debug_in_pynative_mode.html)选择合适、统一的模式和写法来完成训练。 - -
- -**Q:使用Ascend平台执行训练过程,出现报错:`Out of Memory!!! total[3212254720] (dynamic[0] memory poll[524288000]) malloc[32611480064] failed!` 如何解决?** - -A:此问题属于内存占用过多导致的内存不够问题,可能原因有两种: - -- `batch_size`的值设置过大。解决办法:将`batch_size`的值设置减小。 -- 引入了异常大的`Parameter`,例如单个数据shape为[640,1024,80,81],数据类型为float32,单个数据大小超过15G,这样差不多大小的两个数据相加时,占用内存超过3*15G,容易造成`Out of Memory`。解决办法:检查参数的`shape`,如果异常过大,减少shape。 -- 如果以上操作还是未能解决,可以上[官方论坛](https://bbs.huaweicloud.com/forum/forum-1076-1.html)发帖提出问题,将会有专门的技术人员帮助解决。 - -
- -**Q:MindSpore执行GPU分布式训练报错如下,如何解决:** - -```text -Loading libgpu_collective.so failed. Many reasons could cause this: -1.libgpu_collective.so is not installed. -2.nccl is not installed or found. -3.mpi is not installed or found -``` - -A:此问题为MindSpore动态加载集合通信库失败,可能原因如下: - -- 执行环境未安装分布式训练依赖的OpenMPI以及NCCL。 -- NCCL版本未更新至`v2.7.6`:MindSpore `v1.1.0`新增GPU P2P通信算子,该特性依赖于NCCL `v2.7.6`,若环境使用的NCCL未升级为此版本,则会引起加载失败错误。 - -
- -**Q:启动缓存服务器时,若提示找不到`libpython3.7m.so.1.0`文件,应如何处理?** - -A:尝试在虚拟环境下查找其路径并设置LD_LIBRARY_PATH变量: - -```shell -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{path_to_conda}/envs/{your_env_name}/lib -``` - -
- -**Q:缓存服务器异常关闭如何处理?** - -A:缓存服务器使用过程中,会进行IPC共享内存和socket文件等系统资源的分配。若允许溢出,在磁盘空间还会存在溢出的数据文件。一般情况下,如果通过`cache_admin --stop`命令正常关闭服务器,这些资源将会被自动清理。 - -但如果缓存服务器被异常关闭,例如缓存服务进程被杀等,用户需要首先尝试重新启动服务器,若启动失败,则应该依照以下步骤手动清理系统资源: - -- 删除IPC资源。 - - 1. 检查是否有IPC共享内存残留。 - - 一般情况下,系统会为缓存服务分配4GB的共享内存。通过以下命令可以查看系统中的共享内存块使用情况。 - - ```shell - $ ipcs -m - ------ Shared Memory Segments -------- - key shmid owner perms bytes nattch status - 0x61020024 15532037 root 666 4294967296 1 - ``` - - 其中,`shmid`为共享内存块id,`bytes`为共享内存块的大小,`nattch`为链接到该共享内存块的进程数量。`nattch`不为0表示仍有进程使用该共享内存块。在删除共享内存前,需要停止使用该内存块的所有进程。 - - 2. 删除IPC共享内存。 - - 找到对应的共享内存id,并通过以下命令删除。 - - ```shell - ipcrm -m {shmid} - ``` - -- 删除socket文件。 - - 一般情况下,socket文件位于`/tmp/mindspore/cache`。进入文件夹,执行以下命令删除socket文件。 - - ```shell - rm cache_server_p{port_number} - ``` - - 其中`port_number`为用户创建缓存服务器时指定的端口号,默认为50052。 - -- 删除溢出到磁盘空间的数据文件。 - - 进入启用缓存服务器时指定的溢出数据路径。通常,默认溢出路径为`/tmp/mindspore/cache`。找到路径下对应的数据文件夹并逐一删除。 - -
- -**Q:使用GPU版本MindSpore时,如何设置`DEVICE_ID`环境变量** - -A:MindSpore GPU模式一般无需设置`DEVICE_ID`环境变量,MindSpore会根据cuda环境变量`CUDA_VISIBLE_DEVICES`,自动选择可见的GPU设备。设置`CUDA_VISIBLE_DEVICES`环境变量后,则`DEVICE_ID`环境变量代表可见GPU设备的下标: - -- 执行`export CUDA_VISIBLE_DEVICES=1,3,5`后,`DEVICE_ID`应当被设置为`0`,`1`或`2`,若设置为`3`及以上,MindSpore会由于设备ID不合法而运行失败。 - -
diff --git a/docs/faq/source_zh_cn/conf.py b/docs/faq/source_zh_cn/conf.py deleted file mode 100644 index 95d7701759707ab95a3c199cd8a22e2e2cc1194d..0000000000000000000000000000000000000000 --- a/docs/faq/source_zh_cn/conf.py +++ /dev/null @@ -1,62 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os - - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx_markdown_tables', - 'recommonmark', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_search_language = 'zh' - -html_search_options = {'dict': '../../resource/jieba.txt'} - -html_static_path = ['_static'] \ No newline at end of file diff --git a/docs/faq/source_zh_cn/distributed_settings.md b/docs/faq/source_zh_cn/distributed_settings.md deleted file mode 100644 index 515265265d25c43ac1e91fcb24597c713c76f34f..0000000000000000000000000000000000000000 --- a/docs/faq/source_zh_cn/distributed_settings.md +++ /dev/null @@ -1,20 +0,0 @@ -# 分布式配置类 - -`Ascend` `GPU` `分布式训练` `初级` `中级` `高级` - - - -**Q:基于Ascend环境需要配置通信配置文件,应该如何配置?** - -A:请参考MindSpore教程的基于Ascend分布式训练的[配置分布式环境变量](https://mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html#id4)部分。 - -
- -**Q:如何进行分布式多机多卡训练?** - -A:基于Ascend环境的,请参考MindSpore教程的基于Ascend分布式训练的[多机多卡训练](https://mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html#id20) 部分。 -基于GPU环境的,请参考MindSpore教程的基于GPU分布式训练的[运行多机脚本](https://mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_gpu.html#id8) 部分。 - -
- - diff --git a/docs/faq/source_zh_cn/index.rst b/docs/faq/source_zh_cn/index.rst deleted file mode 100644 index c38f1f82db55a23a9b878492a291ce2169756351..0000000000000000000000000000000000000000 --- a/docs/faq/source_zh_cn/index.rst +++ /dev/null @@ -1,23 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 10:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore FAQ -================= - -.. toctree:: - :glob: - :maxdepth: 1 - - installation - supported_operators - network_models - platform_and_system - backend_running - usage_migrate_3rd - programming_language_extensions - supported_features - mindinsight_use - distributed_settings - inference \ No newline at end of file diff --git a/docs/faq/source_zh_cn/inference.md b/docs/faq/source_zh_cn/inference.md deleted file mode 100644 index bb8eeaedb3248d20a665957f221920ede96b5a1a..0000000000000000000000000000000000000000 --- a/docs/faq/source_zh_cn/inference.md +++ /dev/null @@ -1,55 +0,0 @@ -# 推理类 - - - -## C++接口使用类 - -**Q:编译应用时报错`/usr/bin/ld: warning: libxxx.so, needed by libmindspore.so, not found`怎么办?** - -A:寻找缺少的动态库文件所在目录,添加该路径到环境变量`LD_LIBRARY_PATH`中,环境变量设置参考[Ascend 310 AI处理器上使用MindIR模型进行推理#编译推理代码](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_ascend_310_mindir.html#id6)。 - -
- -**Q:运行应用时出现`ModuleNotFoundError: No module named 'te'`怎么办?** - -A:首先确认环境安装是否正确,`te`、`topi`等whl包是否正确安装。如果用户环境中有多个Python版本,如Conda虚拟环境中,需`ldd name_of_your_executable_app`确认应用所链接的`libpython3.7m.so.1.0`是否与当前Python路径一致,如果不一致需要调整环境变量`LD_LIBRARY_PATH`顺序。 - -
- -**Q:运行应用时报错`error while loading shared libraries: libge_compiler.so: cannot open shared object file: No such file or directory`怎么办?** - -A:安装MindSpore所依赖的Ascend 310 AI处理器软件配套包时,`CANN`包不能安装`nnrt`版本,而是需要安装功能完整的`toolkit`版本。 - -## MindSpore Serving类 - -**Q:MindSpore Serving是否支持热更新,避免推理服务中断?** - -A:MindSpore Serving当前不支持热更新,需要用户重启;当前建议跑多个Serving服务,升级模型版本时,重启部分服务以避免服务中断。 - -
- -**Q:MindSpore Serving是否支持一个模型启动多个Worker,以支持多卡单模型并发?** - -A:MindSpore Serving暂未支持分流,即不支持一个模型启动多个Worker,这个功能正在开发中;当前建议跑多个Serving服务,通过对接多个Serving服务的服务器进行分流和负载均衡。另外,为了避免`master`和`worker`之间的消息转发,可以使用接口`start_servable_in_master`使`master`和`worker`执行在同一进程,实现Serving服务轻量级部署。 - -
- -**Q:MindSpore Serving的版本和MindSpore的版本如何配套?** - -A:MindSpore Serving配套相同版本号的MindSpore的版本,比如Serving `1.1.1`版本配套 MindSpore `1.1.1`版本。 - -
- -**Q:编译应用时报错`bash -p`方式和 `bash -e`方式的区别?** - -A:MindSpore Serving的编译和运行依赖MindSpore,Serving提供两种编译方式:一种指定已安装的MindSpore路径,即`bash -p {python site-packages}/mindspore/lib`,避免编译Serving时再编译MindSpore;另一种,编译Serving时,编译配套的MindSpore,Serving会将`-e`、`-V`和`-j`选项透传给MindSpore。 -比如,在Serving目录下,`bash -e ascend -V 910 -j32`: - -- 首先将会以`bash -e ascend -V 910 -j32`方式编译`third_party/mindspore`目录下的MindSpore; -- 其次,编译脚本将MindSpore编译结果作为Serving的编译依赖。 - -
- -**Q:运行应用时报错`libmindspore.so: cannot open shared object file: No such file or directory`怎么办?** - -A:首先,需要确认是否安装MindSpore Serving所依赖的MindSpore;其次,Serving 1.1需要配置`LD_LIBRARY_PATH`,显式指定`libmindspore.so`所在路径,`libmindspore.so`当前在MindSpore Python安装路径的`lib`目录下;Serving 1.2后不再需要显示指定`libmindspore.so`所在路径,Serving会基于MindSpore安装路径查找并追加配置`LD_LIBRARY_PATH`,用户不再需要感知。 diff --git a/docs/faq/source_zh_cn/installation.md b/docs/faq/source_zh_cn/installation.md deleted file mode 100644 index 05e0ea43afb165e0e1c5a85cb271ab59fbfbe6a8..0000000000000000000000000000000000000000 --- a/docs/faq/source_zh_cn/installation.md +++ /dev/null @@ -1,196 +0,0 @@ -# 安装类 - -`Linux` `Windows` `Ascend` `GPU` `CPU` `环境准备` `初级` `中级` - - - -- [安装类](#安装类) - - [pip安装](#pip安装) - - [源码编译安装](#源码编译安装) - - [卸载](#卸载) - - [环境变量](#环境变量) - - [安装验证](#安装验证) - - - - - -## pip安装 - -**Q:安装MindSpore版本:GPU、CUDA 10.1、0.5.0-beta、Ubuntu-x86,出现问题:`cannot open shared object file:file such file or directory`。** - -A:从报错情况来看,是cublas库没有找到。一般的情况下是cublas库没有安装,或者是因为没有加入到环境变量中去。通常cublas是随着cuda以及驱动一起安装的,确认安装后把cublas所在的目录加入`LD_LIBRARY_PATH`环境变量中即可。 - -
- -**Q:使用pip安装时报错:`SSL:CERTIFICATE_VERIFY_FATLED`应该怎么办?** - -A:在pip安装命令后添加参数 `--trusted-host=ms-release.obs.cn-north-4.myhuaweicloud.com`重试即可。 - -
- -**Q:pip安装MindSpore对Python版本是否有特别要求?** - -A:MindSpore开发过程中用到了Python3.7+的新特性,因此建议您通过`conda`工具添加Python3.7.5的开发环境。 - -
- -**Q:MindSpore对protobuf版本是否有特别要求?** - -A:MindSpore默认安装protobuf的3.8.0版本,如果您本地已安装protobuf的3.12.0或更高版本,在使用pytest测试代码时日志中会产生很多告警,建议您使用命令`pip install protobuf==3.8.0`重新安装3.8.0版本。 - -
- -**Q:使用pip安装时报错`ProxyError(Cannot connect to proxy)`,应该怎么办?** - -A:此问题一般是代理配置问题,Ubuntu环境下可通过`export http_proxy={your_proxy}`设置代理;Windows环境可以在cmd中通过`set http_proxy={your_proxy}`进行代理设置。 - -
- -**Q:使用pip安装时提示错误,应该怎么办?** - -A:请执行`pip -V`查看是否绑定了Python3.7+。如果绑定的版本不对,建议使用`python3.7 -m pip install`代替`pip install`命令。 - -
- -**Q:MindSpore网站安装页面找不到MindInsight和MindArmour的whl包,无法安装怎么办?** - -A:您可以从[MindSpore网站下载地址](https://www.mindspore.cn/versions)下载whl包,通过`pip install`命令进行安装。 - -
- -**Q:MindSpore是否支持Nvidia GPU独立显卡+Windows操作系统的个人电脑?** - -A:目前MindSpore支持的情况是GPU+Linux与CPU+Windows的组合配置,Windows+GPU的支持还在开发中。 -如果希望在GPU+Windows的环境上运行,可以尝试使用WSL+docker的方式,操作思路: - -1. 以WSL方式安装起Ubuntu18.04,参考。 -2. 安装支持WSL的Nvidia驱动以及在WSL运行容器的环境部署,参考。 - - > 由于CUDA on WSL还是预览特性,注意参考链接里对Windows版本要求的说明,版本不够的需要做升级。 -3. 参考,取MindSpore-GPU镜像。如取MindSpore1.0.0版本容器,在WSL Ubuntu18.04中执行`docker pull mindspore/mindspore-gpu:1.0.0`运行容器: - - ```docker - docker run -it --runtime=nvidia mindspore/mindspore-gpu:1.0.0 /bin/bash - ``` - -详细步骤可以参考社区提供的实践[张小白教你安装Windows10的GPU驱动(CUDA和cuDNN)](https://bbs.huaweicloud.com/blogs/212446)。 -在此感谢社区成员[张辉](https://bbs.huaweicloud.com/community/usersnew/id_1552550689252345)的分享。 - -
- -## 源码编译安装 - -**Q:在Linux中已经安装了交叉编译工具,但是编译命令要怎么写呢?** - -A:arm64版本编译:`bash build.sh -I arm64`;arm32版本编译:`bash build.sh -I arm32`;注意要先设置环境变量,指定Android NDK路径:`export ANDROID_NDK=/path/to/android-ndk`,编译成功后,在output目录可以找到编译出的包。 - -
- -**Q:MindSpore安装:版本0.6.0-beta + Ascend 910 + Ubuntu_aarch64 + Python3.7.5,手动下载对应版本的whl包,编译并安装gmp6.1.2。其他Python库依赖已经安装完成,执行样例失败,报错显示找不到so文件。** - -A:`libdatatransfer.so`动态库是`fwkacllib/lib64`目录下的,请先在`/usr/local`目录find到这个库所在的路径,然后把这个路径加到`LD_LIBRARY_PATH`环境变量中,确认设置生效后,再执行。 - -
- -**Q:源码编译MindSpore过程时间过长,或时常中断该怎么办?** - -A:MindSpore通过submodule机制引入第三方依赖包,其中`protobuf`依赖包(v3.8.0)下载速度不稳定,建议您提前进行包缓存。 - -
- -**Q:如何改变第三方依赖库安装路径?** - -A:第三方依赖库的包默认安装在build/mindspore/.mslib目录下,可以设置环境变量MSLIBS_CACHE_PATH来改变安装目录,比如 `export MSLIBS_CACHE_PATH = ~/.mslib`。 - -
- -**Q:MindSpore要求的配套软件版本与Ubuntu默认版本不一致怎么办?** - -A:当前MindSpore只提供版本配套关系,需要您手动进行配套软件的安装升级。(**注明**:MindSpore要求Python3.7.5和gcc7.3,Ubuntu 16.04默认为Python3.5和gcc5,Ubuntu 18.04默认自带Python3.7.3和gcc7.4)。 - -
- -**Q:当源码编译MindSpore,提示`tclsh not found`时,应该怎么办?** - -A:当有此提示时说明要用户安装`tclsh`;如果仍提示缺少其他软件,同样需要安装其他软件。 - -
- -**Q:环境上安装了Python3.7.5,环境变量设置正确,编译MindSpore时仍然报错`Python3 not found`,应该怎么办?** - -A:可能是因为当前环境上的Python未包含动态库。编译MindSpore需要动态链接Python库,因此需要使用开启动态库编译选项的Python3.7.5,即在源码编译Python时使用`./configure --enable-shared`命令。 - -
- -**Q:编译失败后,应该清理哪些路径以确保上次失败的编译结果不会影响到下一次编译?** - -A:在编译MindSpore时,如果: - -1. 第三方组件下载或编译失败,例如icu4c的patch动作失败返回错误信息`Cmake Error at cmake/utils.cmake:301 (message): Failed patch:`,则进入编译目录下的`build/mindspore/.mslib`目录,或由`MSLIBS_CACHE_PATH`环境变量指定的第三方软件安装目录,并删除其中的对应软件。 - -2. 其他阶段编译失败,或打算删除上一次编译结果,完全重新编译时,直接删除`build`目录即可。 - -
- -## 卸载 - -**Q:如何卸载MindSpore?** - -A:执行命令`pip uninstall mindspore`可卸载MindSpore。 - -
- -## 环境变量 - -**Q:一些常用的环境变量设置,在新启动的终端窗口中需要重新设置,容易忘记应该怎么办?** - -A:常用的环境变量设置写入到`~/.bash_profile` 或 `~/.bashrc`中,可让环境变量设置在新启动的终端窗口中立即生效。 - -
- -**Q:Ascend AI处理器软件配套包与其他依赖软件已安装,但是执行MindSpore时提示`Cannot open shared objectfile: No such file or directory`该怎么办?** - -A:常见原因有两种:Ascend AI处理器软件配套包或固件/驱动包版本不正确,或没有安装在默认位置且未配置相应的环境变量。 - -1. 打开Ascend AI处理器软件配套包安装目录,默认`/usr/local/Ascend`下,各个子目录中的`version.info`文件,观察其版本号是否与当前使用的MindSpore版本一直,参照[安装页面](https://www.mindspore.cn/install/)中关于Ascend AI处理器软件配套包版本的描述。如果版本不配套,请更换软件包或MindSpore版本。 - -2. 检查Ascend AI处理器软件配套包与其他依赖软件是否安装在默认位置,MindSpore会尝试从默认安装位置`/usr/local/Ascend`自动加载,如果将Ascend软件包安装在自定义位置,请参照[安装页面](https://www.mindspore.cn/install/)页面的安装指南一栏设置环境变量。如果将其他依赖软件安装在自定义位置,请根据其位置关系设置`LD_LIBRARY_PATH`环境变量。 - -
- -## 安装验证 - -**Q:个人电脑CPU环境安装MindSpore后验证代码时报错:`the pointer[session] is null`,具体代码如下,该如何验证是否安装成功呢?** - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="Ascend") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x,y)) -``` - -A:CPU硬件平台安装MindSpore后测试是否安装成功,只需要执行命令:`python -c 'import mindspore'`,如果没有显示`No module named 'mindspore'`等错误即安装成功。问题中的验证代码仅用于验证Ascend平台安装是否成功。 - -
- -**Q:`Linux`平台下执行用例的时候会报错`sh:1:python:not found`或者由于链接到了Python2.7的版本中而报错`No module named mindspore._extends.remote`,该怎么处理?** - -A:遇到类似的问题,大多是由于Python的环境问题,可以通过如下方式检查Python环境是否是MindSpore运行时所需要的环境。 - -- 在终端窗口中输入`python`,检查以下进入Python交互环境中的版本信息,如果直接报错则是没有Python的软连接;如果进入的是非Python3.7版本的环境,则当前Python环境不是MindSpore运行所需要的。 -- 执行`sudo ln -sf /usr/bin/python3.7.x /usr/bin/python`创建Python的软连接,然后再检查执行。 - -
- -**Q: 在脚本中`import mindspore`之前import了其他三方库,提示如下错误(`/your_path/libgomp.so.1: cannot allocate memory in static TLS block`)该怎么解决?** - -A: 上述问题较为常见,当前有两种可行的解决方法,可任选其一: - -- 交换import的顺序,先`import mindspore`再import其他三方库。 -- 执行程序之前先添加环境变量(`export LD_PRELOAD=/your_path/libgomp.so.1`),其中`your_path`是上述报错提示的路径。 diff --git a/docs/faq/source_zh_cn/mindinsight_use.md b/docs/faq/source_zh_cn/mindinsight_use.md deleted file mode 100644 index 50d819d393758d2aa7826d8d7467361fd403b809..0000000000000000000000000000000000000000 --- a/docs/faq/source_zh_cn/mindinsight_use.md +++ /dev/null @@ -1,40 +0,0 @@ -# 训练可视类 - -`Linux` `Ascend` `GPU` `环境准备` - - - -**Q:MindInsight启动失败并且提示:`ImportError: libcrypto.so.1.0.0: cannot open shared object file: No such file or directory` 如何处理?** - -A:需要在命令行中使用”export LD_LIBRARY_PATH=dir:$LD_LIBRARY_PATH”来导入LD_LIBRARY_PATH变量。 - -
- -**Q:MindInsight启动失败并且提示:`bash: mindinsight: command not found` 如何处理?** - -A:当使用Python源码编译安装在自定义路径下会出现该问题,pip安装MindInsight时可执行文件会安装在该路径下,若使用`echo $PATH`查询到的bash环境变量中没有该安装目录会导致系统找不到安装的可执行文件。需要在命令行中使用`export PATH=$PATH:$YourPythonPath$/bin`来导入PATH变量。 -(`$YourPythonPath$`请更换为你的安装路径)。注:该命令只在当前终端有效,若想永久有效请在`~/.bashrc`文件中加入该命令。 - -
- -**Q:卸载MindInsight后,在MindInsight的运行日志中出现:`No module named 'mindinsight'` 如何处理?** - -A:MindInsight启动后,会变成一个后台服务。卸载MindInsight后,已启动的MindInsight后台服务不会自行停止。 -当MindInsight后台服务启动新的进程加载新数据或者做其他操作时,则会触发`No module named 'mindinsight'`的异常信息,并记录到日志中。 - -此时可以通过下面两种方式进行处理: - -- 重新安装MindInsight,并使用`mindinsight stop --port `命令停止已启动的MindInsight后台服务。 -- 通过`kill -9 `命令,将MindInsight涉及的相关进程杀死。 - -
- -**Q:MindInsight成功启动后,在谷歌浏览器中访问时,提示:`ERR_UNSAFE_PORT` 如何处理?** - -A:谷歌浏览器内核禁止将某些端口作为`HTTP`服务,你需要在谷歌浏览器的属性中新增配置`--explicitly-allowed-ports=port`。或者,你可以更换端口或者更换为IE浏览器。 - -
- -**Q:在Ascend机器上启动Mindinsight并开启调试器后,训练脚本连接调试器时,提示:`Exeption calling application: Field number 0 is illegal` 如何处理?** - -A:说明安装的protobuf版本错误,需要安装正确版本的protobuf,安装方法请参照[安装python版本的proto](https://support.huaweicloud.com/instg-cli-cann/atlascli_03_0046.html)。 diff --git a/docs/faq/source_zh_cn/network_models.md b/docs/faq/source_zh_cn/network_models.md deleted file mode 100644 index 158c54155dd5fb6a09614a991fec34e267b97572..0000000000000000000000000000000000000000 --- a/docs/faq/source_zh_cn/network_models.md +++ /dev/null @@ -1,271 +0,0 @@ -# 网络模型类 - -`数据处理` `环境准备` `模型导出` `模型训练` `初级` `中级` `高级` - - - -**Q:MindSpore中`model.train`的`dataset_sink_mode`参数该如何理解?** - -A:当`dataset_sink_mode=True`时,数据处理会和网络计算构成Pipeline方式,即:数据处理在逐步处理数据时,处理完一个`batch`的数据,会把数据放到一个队列里,这个队列用于缓存已经处理好的数据,然后网络计算从这个队列里面取数据用于训练,那么此时数据处理与网络计算就`Pipeline`起来了,整个训练耗时就是数据处理/网络计算耗时最长的那个。 - -当`dataset_sink_mode=False`时,数据处理会和网络计算构成串行的过程,即:数据处理在处理完一个`batch`后,把这个`batch`的数据传递给网络用于计算,在计算完成后,数据处理再处理下一个`batch`,然后把这个新的`batch`数据传递给网络用于计算,如此的循环往复,直到训练完。该方法的总耗时是数据处理的耗时+网络计算的耗时=训练总耗时。 - -
- -**Q:MindSpore能否支持按批次对不同尺寸的图片数据进行训练?** - -A:你可以参考yolov3对于此场景的使用,里面有对于图像的不同缩放,脚本见[yolo_dataset](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/yolov3_darknet53/src/yolo_dataset.py)。 - -
- -**Q:通过Hub可以使用GPU加载`vgg16`模型以及是否可以做迁移模型吗?** - -A:请手动修改规避,修改如下两点即可: - -```python -# 增加**kwargs参数:如下 -def vgg16(num_classes=1000, args=None, phase="train", **kwargs): -``` - -```python -# 增加**kwargs参数:如下 -net = Vgg(cfg['16'], num_classes=num_classes, args=args, batch_norm=args.batch_norm, phase=phase, **kwargs) -``` - -
- -**Q:如何得到VGG模型中间层特征?** - -A:你好,获取网络中间层的特征,其实跟具体框架没有太大关系了。`torchvison`里定义的`vgg`模型,可以通过`features`字段获取"中间层特征",`torchvison`的`vgg`源码如下: - -```python -class VGG(nn.Module): - - def __init__(self, features, num_classes=1000, init_weights=True): - super(VGG, self).__init__() - self.features = features - self.avgpool = nn.AdaptiveAvgPool2d((7, 7)) -``` - -在MindSpore的ModelZoo里定义的`vgg16`,可以通过`layers`字段获取,如下: - -```python -network = vgg16() -print(network.layers) -``` - -
- -**Q:使用MindSpore进行模型训练时,`CTCLoss`的输入参数有四个:`inputs`, `labels_indices`, `labels_values`, `sequence_length`,如何使用`CTCLoss`进行训练?** - -A:定义的`model.train`接口里接收的`dataset`可以是多个数据组成,形如(`data1`, `data2`, `data3`, ...),所以`dataset`是可以包含`inputs`,`labels_indices`,`labels_values`,`sequence_length`的信息的。只需要定义好相应形式的`dataset`,传入`model.train`里就可以。具体的可以了解下相应的[数据处理接口](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_loading.html) - -
- -**Q:模型转移时如何把PyTorch的权重加载到MindSpore中?** - -A:首先输入PyTorch的`pth`文件,以`ResNet-18`为例,MindSpore的网络结构和PyTorch保持一致,转完之后可直接加载进网络,这边参数只用到`BN`和`Conv2D`,若有其他层`ms`和PyTorch名称不一致,需要同样的修改名称。 - -
- -**Q:模型已经训练好,如何将模型的输出结果保存为文本或者`npy`的格式?** - -A:您好,我们网络的输出为`Tensor`,需要使用`asnumpy()`方法将`Tensor`转换为`numpy`,再进行下一步保存。具体可参考: - -```python -out = net(x) - -np.save("output.npy", out.asnumpy()) -``` - -
- -**Q:使用MindSpore做分割训练,必须将数据转为MindRecords吗?** - -A:[build_seg_data.py](https://github.com/mindspore-ai/mindspore/blob/master/model_zoo/official/cv/deeplabv3/src/data/build_seg_data.py)是将数据集生成MindRecord的脚本,可以直接使用/适配下你的数据集。或者如果你想尝试自己实现数据集的读取,可以使用`GeneratorDataset`自定义数据集加载。 - -[GenratorDataset 示例](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_loading.html#id5) - -[GenratorDataset API说明](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset) - -
- -**Q:MindSpore可以读取TensorFlow的ckpt文件吗?** - -A:MindSpore的`ckpt`和TensorFlow的`ckpt`格式是不通用的,虽然都是使用`protobuf`协议,但是`proto`的定义是不同的。当前MindSpore不支持读取TensorFlow或PyTorch的`ckpt`文件。 - -
- -**Q:如何不将数据处理为MindRecord格式,直接进行训练呢?** - -A:可以使用自定义的数据加载方式 `GeneratorDataset`,具体可以参考[数据集加载](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_loading.html)文档中的自定义数据集加载。 - -
- -**Q:MindSpore现支持直接读取哪些其他框架的模型和哪些格式呢?比如PyTorch下训练得到的pth模型可以加载到MindSpore框架下使用吗?** - -A: MindSpore采用protbuf存储训练参数,无法直接读取其他框架的模型。对于模型文件本质保存的就是参数和对应的值,可以用其他框架的API将参数读取出来之后,拿到参数的键值对,然后再加载到MindSpore中使用。比如想用其他框架训练好的ckpt文件,可以先把参数读取出来,再调用MindSpore的`save_checkpoint`接口,就可以保存成MindSpore可以读取的ckpt文件格式了。 - -
- -**Q:用MindSpore训练出的模型如何在Ascend 310上使用?可以转换成适用于HiLens Kit用的吗?** - -A:Ascend 310需要运行专用的OM模型,先使用MindSpore导出ONNX或AIR模型,再转化为Ascend 310支持的OM模型。具体可参考[多平台推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_ascend_310.html)。可以,HiLens Kit是以Ascend 310为推理核心,所以前后两个问题本质上是一样的,需要转换为OM模型. - -
- -**Q:MindSpore如何进行参数(如dropout值)修改?** - -A:在构造网络的时候可以通过 `if self.training: x = dropput(x)`,验证的时候,执行前设置`network.set_train(mode_false)`,就可以不适用dropout,训练时设置为True就可以使用dropout。 - -
- -**Q:从哪里可以查看MindSpore训练及推理的样例代码或者教程?** - -A:可以访问[MindSpore官网教程训练](https://www.mindspore.cn/tutorial/training/zh-CN/master/index.html)和[MindSpore官网教程推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/index.html)。 - -
- -**Q:MindSpore支持哪些模型的训练?** - -A:MindSpore针对典型场景均有模型训练支持,支持情况详见[Release note](https://gitee.com/mindspore/mindspore/blob/master/RELEASE.md#)。 - -
- -**Q:MindSpore有哪些现成的推荐类或生成类网络或模型可用?** - -A:目前正在开发Wide & Deep、DeepFM、NCF等推荐类模型,NLP领域已经支持Bert_NEZHA,正在开发MASS等模型,用户可根据场景需要改造为生成类网络,可以关注[MindSpore Model Zoo](https://gitee.com/mindspore/mindspore/tree/master/model_zoo)。 - -
- -**Q:MindSpore模型训练代码能有多简单?** - -A:除去网络定义,MindSpore提供了Model类的接口,大多数场景只需几行代码就可完成模型训练。 - -
- -**Q:如何使用MindSpore拟合$f(x)=a \times sin(x)+b$这类函数?** - -A:以下拟合案例是基于MindSpore线性拟合官方案例改编而成。 - -```python -# The fitting function is:f(x)=2*sin(x)+3. -import numpy as np -from mindspore import dataset as ds -from mindspore.common.initializer import Normal -from mindspore import nn, Model, context -from mindspore.train.callback import LossMonitor - -context.set_context(mode=context.GRAPH_MODE, device_target="CPU") - - def get_data(num, w=2.0, b=3.0): - # f(x)=w * sin(x) + b - # f(x)=2 * sin(x) +3 - for i in range(num): - x = np.random.uniform(-np.pi, np.pi) - noise = np.random.normal(0, 1) - y = w * np.sin(x) + b + noise - yield np.array([np.sin(x)]).astype(np.float32), np.array([y]).astype(np.float32) - -def create_dataset(num_data, batch_size=16, repeat_size=1): - input_data = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data','label']) - input_data = input_data.batch(batch_size) - input_data = input_data.repeat(repeat_size) - return input_data - -class LinearNet(nn.Cell): - def __init__(self): - super(LinearNet, self).__init__() - self.fc = nn.Dense(1, 1, Normal(0.02), Normal(0.02)) - - def construct(self, x): - x = self.fc(x) - return x - -if __name__ == "__main__": - num_data = 1600 - batch_size = 16 - repeat_size = 1 - lr = 0.005 - momentum = 0.9 - - net = LinearNet() - net_loss = nn.loss.MSELoss() - opt = nn.Momentum(net.trainable_params(), lr, momentum) - model = Model(net, net_loss, opt) - - ds_train = create_dataset(num_data, batch_size=batch_size, repeat_size=repeat_size) - - model.train(1, ds_train, callbacks=LossMonitor(), dataset_sink_mode=False) - - print(net.trainable_params()[0], "\n%s" % net.trainable_params()[1]) -``` - -
- -**Q:如何使用MindSpore拟合$f(x)=ax^2+bx+c$这类的二次函数?** - -A:以下代码引用自MindSpore的官方教程的[代码仓](https://gitee.com/mindspore/docs/blob/master/tutorials/tutorial_code/linear_regression.py) - -在以下几处修改即可很好的拟合$f(x)=ax^2+bx+c$: - -1. 数据集生成。 -2. 拟合网络。 -3. 优化器。 - -修改的详细信息如下,附带解释。 - -```python -# Since the selected optimizer does not support CPU, so the training computing platform is changed to GPU, which requires readers to install the corresponding GPU version of MindSpore. -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -# Assuming that the function to be fitted this time is f(x)=2x^2+3x+4, the data generation function is modified as follows: -def get_data(num, a=2.0, b=3.0 ,c = 4): - for i in range(num): - x = np.random.uniform(-10.0, 10.0) - noise = np.random.normal(0, 1) - # The y value is generated by the fitting target function ax^2+bx+c. - y = x * x * a + x * b + c + noise - # When a*x^2+b*x+c is fitted, a and b are weight parameters and c is offset parameter bias. The training data corresponding to the two weights are x^2 and x respectively, so the data set generation mode is changed as follows: - yield np.array([x*x, x]).astype(np.float32), np.array([y]).astype(np.float32) - -def create_dataset(num_data, batch_size=16, repeat_size=1): - input_data = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data','label']) - input_data = input_data.batch(batch_size) - input_data = input_data.repeat(repeat_size) - return input_data - -class LinearNet(nn.Cell): - def __init__(self): - super(LinearNet, self).__init__() - # Because the full join function inputs two training parameters, the input value is changed to 2, the first Nomral(0.02) will automatically assign random weights to the input two parameters, and the second Normal is the random bias. - self.fc = nn.Dense(2, 1, Normal(0.02), Normal(0.02)) - - def construct(self, x): - x = self.fc(x) - return x - -if __name__ == "__main__": - num_data = 1600 - batch_size = 16 - repeat_size = 1 - lr = 0.005 - momentum = 0.9 - - net = LinearNet() - net_loss = nn.loss.MSELoss() - # RMSProp optimalizer with better effect is selected for quadratic function fitting, Currently, Ascend and GPU computing platforms are supported. - opt = nn.RMSProp(net.trainable_params(), learning_rate=0.1) - model = Model(net, net_loss, opt) - - ds_train = create_dataset(num_data, batch_size=batch_size, repeat_size=repeat_size) - model.train(1, ds_train, callbacks=LossMonitor(), dataset_sink_mode=False) - - print(net.trainable_params()[0], "\n%s" % net.trainable_params()[1]) -``` - -
- -**Q:在使用ckpt或导出模型的过程中,报Protobuf内存限制错误,如何处理?** - -A:当单条Protobuf数据过大时,因为Protobuf自身对数据流大小的限制,会报出内存限制的错误。这时可通过设置环境变量`PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python`解除限制。 diff --git a/docs/faq/source_zh_cn/platform_and_system.md b/docs/faq/source_zh_cn/platform_and_system.md deleted file mode 100644 index 621806890f394ab079b7d35c0851c5cfd58ecd39..0000000000000000000000000000000000000000 --- a/docs/faq/source_zh_cn/platform_and_system.md +++ /dev/null @@ -1,75 +0,0 @@ -# 平台系统类 - -`Linux` `Windows` `Ascend` `GPU` `CPU` `硬件支持` `初级` `中级` - - - -**Q:PyNative模式和Graph模式的区别?** - -A: 在使用效率上,两个模式使用的算子是一致的,因此相同的网络和算子,分别在两个模式下执行时,精度效果是一致的。由于执行机理的差异,网络的执行性能是会不同的,并且在理论上,MindSpore提供的算子同时支持PyNative模式和Graph模式; - -在场景使用方面,Graph模式需要一开始就构建好网络结构,然后框架做整图优化和执行,对于网络固定没有变化,且需要高性能的场景比较适合; - -在不同硬件(`Ascend`、`GPU`和`CPU`)资源上都支持这两种模式; - -代码调试方面,由于是逐行执行算子,因此用户可以直接调试Python代码,在代码中任意位置打断点查看对应算子`/api`的输出或执行结果。而Graph模式由于在构造函数里只是完成网络构造,实际没有执行,因此在`construct`函数里打断点是无法获取对应算子的输出,而只能等整网执行中指定对应算子的输出打印,在网络执行完成后进行查看。 - -
- -**Q:使用PyNative模式能够进行迁移学习?** - -A: PyNative模式是兼容迁移学习的,更多的教程信息,可以参考[预训练模型加载代码详解](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/cv_mobilenetv2_fine_tune.html#id7)。 - -
- -**Q:MindSpore只能在华为自己的`NPU`上跑么?** - -A: MindSpore同时支持华为自己的`Ascend NPU`、`GPU`与`CPU`,是支持异构算力的。 - -
- -**Q:MindSpore在Ascend 310上是否可以转AIR模型?** - -A:Ascend 310不能导出AIR,需要在Ascend 910加载训练好的checkpoint后,导出AIR,然后在Ascend 310转成OM模型进行推理。Ascend 910的安装方法可以参考官网MindSpore[安装指南](https://www.mindspore.cn/install)。 - -
- -**Q:MindSpore导出AIR模型对单个Tensor输入大小有什么限制?** - -A:对于单个Tensor的输入,Tensor大小不能超过2GB,否则导出AIR模型会产生错误。 - -
- -**Q:我用MindSpore在GPU上训练的网络脚本可以不做修改直接在NPU上进行训练么?** - -A:可以的,MindSpore面向NPU/GPU/CPU提供统一的API,在算子支持的前提下,网络脚本可以不做修改直接跨平台运行。 - -
- -**Q:安装运行MindSpore时,是否要求平台有GPU、NPU等计算单元?需要什么硬件支持?** - -A:MindSpore当前支持CPU/GPU/Ascend /NPU。目前笔记本电脑或者有GPU的环境,都可以通过Docker镜像来试用。当前MindSpore Model Zoo中有部分模型已经支持GPU的训练和推理,其他模型也在不断地进行完善。在分布式并行训练方面,MindSpore当前支持GPU多卡训练。你可以通过[RoadMap](https://www.mindspore.cn/doc/note/zh-CN/master/roadmap.html)和项目[Release note](https://gitee.com/mindspore/mindspore/blob/master/RELEASE.md#)获取最新信息。 - -
- -**Q:针对异构计算单元的支持,MindSpore有什么计划?** - -A:MindSpore提供了可插拔式的设备管理接口,其他计算单元(比如FPGA)可快速灵活地实现与MindSpore的对接,欢迎您参与社区进行异构计算后端的开发工作。 - -
- -**Q:MindSpore与ModelArts是什么关系,在ModelArts中能使用MindSpore吗?** - -A:ModelArts是华为公有云线上训练及推理平台,MindSpore是华为深度学习框架,可以查阅[MindSpore官网教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/use_on_the_cloud.html),教程中详细展示了用户如何使用ModelArts来做MindSpore的模型训练。 - -
- -**Q:MindSpore是否支持Windows 10?** - -A:MindSpore CPU版本已经支持在Windows 10系统中安装,具体安装步骤可以查阅[MindSpore官网教程](https://www.mindspore.cn/install/)。 - -
- -**Q:Ascend硬件平台,在个人的Conda环境中,有时候出现报错RuntimeError: json.exception.parse_error.101 parse error at line 1, column 1: syntax error while parsing value - invalid literal; last read: 'T',该怎么处理?** - -A:出现这种类型的报错,大概率是run包更新后个人的Conda环境中没有更新te或topi或hccl工具包,可以将当前Conda环境中的上述几个工具包卸载,然后使用如下命令再重新安装:`pip install /usr/local/Ascend/fwkacllib/lib64/{te/topi/hccl}*any.whl`。 \ No newline at end of file diff --git a/docs/faq/source_zh_cn/programming_language_extensions.md b/docs/faq/source_zh_cn/programming_language_extensions.md deleted file mode 100644 index 3d2bcff84a572d1aa466c3bb84633ebf526f010c..0000000000000000000000000000000000000000 --- a/docs/faq/source_zh_cn/programming_language_extensions.md +++ /dev/null @@ -1,15 +0,0 @@ -# 编程语言拓展类 - -`Python` `支持计划` - - - -**Q:最近出来的taichi编程语言有Python扩展,类似`import taichi as ti`就能直接用了,MindSpore是否也支持?** - -A:MindSpore支持Python原生表达,`import mindspore`相关包即可使用。 - -
- -**Q:MindSpore是否(计划)支持多语言扩展?** - -A:MindSpore目前支持Python扩展,针对C++、Rust、Julia等语言的支持正在开发中。 \ No newline at end of file diff --git a/docs/faq/source_zh_cn/supported_features.md b/docs/faq/source_zh_cn/supported_features.md deleted file mode 100644 index 59bcbdff50cb1f839c65b69694b6255f4bafc8ce..0000000000000000000000000000000000000000 --- a/docs/faq/source_zh_cn/supported_features.md +++ /dev/null @@ -1,99 +0,0 @@ -# 特性支持类 - -`特性优势` `端侧推理` `功能模块` `推理工具` - - - -**Q:MindSpore serving是否支持热加载,避免推理服务中断?** - -A:很抱歉,MindSpore当前还不支持热加载,需要重启。建议您可以跑多个Serving服务,切换版本时,重启部分。 - -
- -**Q:请问MindSpore支持梯度截断吗?** - -A:支持,可以参考[梯度截断的定义和使用](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/nlp/transformer/src/transformer_for_train.py#L35)。 - -
- -**Q:如何在训练神经网络过程中对计算损失的超参数进行改变?** - -A:您好,很抱歉暂时还未有这样的功能。目前只能通过训练-->重新定义优化器-->训练,这样的过程寻找较优的超参数。 - -
- -**Q:第一次看到有专门的数据处理框架,能介绍下么?** - -A:MindData提供数据处理异构硬件加速功能,高并发数据处理`pipeline`同时支持`NPU/GPU/CPU`,`CPU`占用降低30%,点击查询[优化数据处理](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/optimize_data_processing.html)。 - -
- -**Q:MindSpore的IR设计理念是什么?** - -A:函数式:一切皆函数,易于微分实现;无副作用,易于实现自动并行化分析;`JIT`编译能力:图形IR,控制流依赖和数据流合一,平衡通用性/易用性;图灵完备的IR:更多的转换`Python`灵活语法,包括递归等。 - -
- -**Q:MindSpore会出强化学习框架么?** - -A:您好,感谢您关注MindSpore。当前还暂不支持,但正在处于设计阶段,欢迎您贡献想法和场景,参与建设,谢谢。 - -
- -**Q:谷歌Colab、百度AI Studio都有免费`GPU`算力提供,MindSore有免费算力提供么?** - -A:当前如果与MindSpore展开论文、科研合作是可以获得免费云算力支持的。如果只是想简单试用,也提供类似Colab的在线体验 - -
- -**Q:MindSpore并行模型训练的优势和特色有哪些?** - -A:MindSpore分布式训练除了支持数据并行,还支持算子级模型并行,可以对算子输入tensor进行切分并行。在此基础上支持自动并行,用户只需要写单卡脚本,就能自动切分到多个节点并行执行。 - -
- -**Q:请问MindSpore实现了反池化操作了吗?类似于`nn.MaxUnpool2d` 这个反池化操作?** - -A:目前 MindSpore 还没有反池化相关的接口。如果用户想自己实现的话,可以通过自定义算子的方式自行开发算子,详情请见[自定义算子](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_operator.html)。 - -
- -**Q:MindSpore在语义协同和处理上是如何实现的?是否利用当前学术界流行的FCA理论?** - -A:MindSpore框架本身并不需要支持FCA。对于语义类模型,用户可以调用第三方的工具在数据预处理阶段做FCA数学分析。MindSpore本身支持Python语言,`import FCA`相关包即可使用。 - -
- -**Q:当前在云上MindSpore的训练和推理功能是比较完备的,至于边端场景(尤其是终端设备)MindSpore有什么计划?** - -A:MindSpore是端边云统一的训练和推理框架,支持将云侧训练的模型导出到Ascend AI处理器和终端设备进行推理。当前推理阶段支持的优化包括量化、算子融合、内存复用等。 - -
- -**Q:MindSpore自动并行支持情况如何?** - -A:自动并行特性对CPU GPU的支持还在完善中。推荐用户在Ascend 910 AI处理器上使用自动并行,可以关注开源社区,申请MindSpore开发者体验环境进行试用。 - -
- -**Q:MindSpore有没有类似基于TensorFlow实现的对象检测算法的模块?** - -A:TensorFlow的对象检测Pipeline接口属于TensorFlow Model模块。待MindSpore检测类模型完备后,会提供类似的Pipeline接口。 - -
- -**Q:其他框架的脚本或者模型怎么迁移到MindSpore?** - -A:关于脚本或者模型迁移,可以查询MindSpore官网中关于[网络迁移](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/migrate_3rd_scripts.html)的介绍。 - -
- -**Q:MindSpore是否附带开源电商类数据集?** - -A:暂时还没有,可以持续关注[MindSpore官网](https://www.mindspore.cn)。 - -
- -**Q:能否使用第三方库numpy array封装MindSpore的Tensor数据?** - -A:不能,可能出现各种问题。例如:`numpy.array(Tensor(1)).astype(numpy.float32)`的报错信息为"ValueError: settinng an array element with a sequence."。 diff --git a/docs/faq/source_zh_cn/supported_operators.md b/docs/faq/source_zh_cn/supported_operators.md deleted file mode 100644 index 80cb04a90a174bbd8e69a87c36a2c53755dd5b2c..0000000000000000000000000000000000000000 --- a/docs/faq/source_zh_cn/supported_operators.md +++ /dev/null @@ -1,99 +0,0 @@ -# 算子支持类 - -`Ascend` `CPU` `GPU` `环境准备` `初级` `中级` `高级` - - - -**Q:`TransData`算子的功能是什么,能否优化性能?** - -A:`TransData`算子出现的场景是:如果网络中相互连接的算子使用的数据格式不一致(如NC1HWC0),框架就会自动插入`transdata`算子使其转换成一致的数据格式,然后再进行计算。 可以考虑训练的时候用我们的`amp`做混合精度,这样能减少一些`fp32`的运算,应该能减少一些`transdata`算子的调用。 - -
- -**Q:算子`Concat`拼接包含多个Tensor的元组出错,似乎传入的`tensor list`元素个数>=192就会报错。如果要`Concat`包含多个Tensor的元组,有什么较好的解决方案?** - -A:这个昇腾算子底层规格限制一次拼接的Tensor个数不能超过192个,可以尝试分开两次进行拼接。 - -
- -**Q:在使用`Conv2D`进行卷积定义的时候使用到了`group`的参数,`group`的值不是只需要保证可以被输入输出的维度整除即可了吗?`group`参数的传递方式是怎样的呢?** - -A:`Conv2D`算子是有这个约束条件的:当`group`大于1 时,其值必须要与输入输出的通道数相等。不要使用`ops.Conv2D`,这个算子目前不支持`group`>1。目前MindSpore只有`nn.Conv2D`接口支持组卷积,但是有`group`要与输入输出的通道数相等的约束。 -`Conv2D`算子的 - -```python -def __init__(self, - out_channel, - kernel_size, - mode=1, - pad_mode="valid", - pad=0, - stride=1, - dilation=1, - group=1, - data_format="NCHW"): -``` - -函数中带有`group`参数,这个参数默认就会被传到C++层。 - -
- -**Q:Convolution Layers有没有提供3D卷积?** - -A:目前MindSpore在Ascend上有支持3D卷积的计划。您可以关注官网的[支持列表](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/operator_list.html),等到算子支持后会在表中展示。 - -
- -**Q:MindSpore支持矩阵转置吗?** - -A:支持,请参考`mindspore.ops.Transpose`的[算子教程](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Transpose.html#mindspore.ops.Transpose)。 - -
- -**Q:请问MindSpore能算给定任意一个`tensor`的方差吗?** - -A:MindSpore目前暂无可以直接求出`tensor`方差的算子或接口。不过MindSpore有足够多的小算子可以支持用户实现这样的操作,你可以参考[class Moments(Cell)](https://www.mindspore.cn/doc/api_python/zh-CN/master/_modules/mindspore/nn/layer/math.html#Moments)来实现。 - -
- -**Q:使用MindSpore-1.0.1版本在图数据下沉模式加载数据异常是什么原因?** - -A:应该是`construct`中直接使用了带有`axis`属性的算子,比如`ops.Concat(axis=1)((x1, x2))`这种,建议把算子在`__init__`中初始化 像这样 - -```python -from mindspore import nn -import mindspore.ops as ops - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.concat = ops.Concat(axis=1) - def construct(self, x, y): - out = self.concat((x, y)) - return out -``` - -
- -**Q:`nn.Embedding`层与PyTorch相比缺少了`Padding`操作,有其余的算子可以实现吗?** - -A:在PyTorch中`padding_idx`的作用是将embedding矩阵中`padding_idx`位置的词向量置为0,并且反向传播时不会更新`padding_idx`位置的词向量。在MindSpore中,可以手动将embedding的`padding_idx`位置对应的权重初始化为0,并且在训练时通过`mask`的操作,过滤掉`padding_idx`位置对应的`Loss`。 - -
- -**Q:Operations中`Tile`算子执行到`__infer__`时`value`值为`None`,丢失了数值是怎么回事?** - -A:`Tile`算子的`multiples input`必须是一个常量(该值不能直接或间接来自于图的输入)。否则构图的时候会拿到一个`None`的数据,因为图的输入是在图执行的时候才传下去的,构图的时候拿不到图的输入数据。 -相关的资料可以看[静态图语法支持](https://www.mindspore.cn/doc/note/zh-CN/master/static_graph_syntax_support.html)。 - -
- -**Q:官网的LSTM示例在Ascend上跑不通。** - -A:目前LSTM只支持在GPU和CPU上运行,暂不支持硬件环境,您可以通过[MindSpore算子支持列表](https://www.mindspore.cn/doc/note/zh-CN/master/operator_list_ms.html)查看算子支持情况。 - -
- -**Q:conv2d设置为(3,10),Tensor[2,2,10,10],在ModelArts上利用Ascend跑,报错:`FM_W+pad_left+pad_right-KW>=strideW`,CPU下不报错。** - -A:这是TBE这个算子的限制,x的width必须大于kernel的width。CPU的这个算子没有这个限制,所以不报错。 diff --git a/docs/faq/source_zh_cn/usage_migrate_3rd.md b/docs/faq/source_zh_cn/usage_migrate_3rd.md deleted file mode 100644 index 251dd9547886d250cc1bd7c7abc9434c1a358a3c..0000000000000000000000000000000000000000 --- a/docs/faq/source_zh_cn/usage_migrate_3rd.md +++ /dev/null @@ -1,34 +0,0 @@ -# 第三方框架迁移使用类 - - - -**Q:请问想加载PyTorch预训练好的模型用于MindSpore模型finetune有什么方法?** - -A:需要把PyTorch和MindSpore的参数进行一一对应,因为网络定义的灵活性,所以没办法提供统一的转化脚本。 -需要根据场景书写定制化脚本,可参考[checkpoint高级用法](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/advanced_usage_of_checkpoint.html) - -
- -**Q:怎么将PyTorch的`dataset`转换成MindSpore的`dataset`?** - -A:MindSpore和PyTorch的自定义数据集逻辑是比较类似的,需要用户先定义一个自己的`dataset`类,该类负责定义`__init__`,`__getitem__`,`__len__`来读取自己的数据集,然后将该类实例化为一个对象(如:`dataset/dataset_generator`),最后将这个实例化对象传入`GeneratorDataset`(mindspore用法)/`DataLoader`(pytorch用法),至此即可以完成自定义数据集加载了。而mindspore在`GeneratorDataset`的基础上提供了进一步的`map`->`batch`操作,可以很方便的让用户在`map`内添加一些其他的自定义操作,并将其`batch`起来。 -对应的MindSpore的自定义数据集加载如下: - -```python -#1 Data enhancement,shuffle,sampler. -class Mydata: - def __init__(self): - np.random.seed(58) - self.__data = np.random.sample((5, 2)) - self.__label = np.random.sample((5, 1)) - def __getitem__(self, index): - return (self.__data[index], self.__label[index]) - def __len__(self): - return len(self.__data) -dataset_generator = Mydata() -dataset = ds.GeneratorDataset(dataset_generator, ["data", "label"], shuffle=False) -#2 Custom data enhancement -dataset = dataset.map(operations=pyFunc, {other_params}) -#3 batch -dataset = dataset.batch(batch_size, drop_remainder=True) -``` diff --git a/docs/migration_guide/Makefile b/docs/migration_guide/Makefile deleted file mode 100644 index 1eff8952707bdfa503c8d60c1e9a903053170ba2..0000000000000000000000000000000000000000 --- a/docs/migration_guide/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = source_zh_cn -BUILDDIR = build_zh_cn - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/migration_guide/requirements.txt b/docs/migration_guide/requirements.txt deleted file mode 100644 index 1755dcd967228348c2f9cb29bac44580af862770..0000000000000000000000000000000000000000 --- a/docs/migration_guide/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -sphinx >= 2.2.1, <= 2.4.4 -recommonmark -sphinx-markdown-tables -sphinx_rtd_theme -numpy -nbsphinx -IPython -jieba diff --git a/docs/migration_guide/source_en/_static/logo_source.png b/docs/migration_guide/source_en/_static/logo_source.png deleted file mode 100644 index 880f2bc87172daf487654c0ba4f1657c672bd2b8..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_en/_static/logo_source.png and /dev/null differ diff --git a/docs/migration_guide/source_en/conf.py b/docs/migration_guide/source_en/conf.py deleted file mode 100644 index 540978e71d4fe921daf3a31b1a40ff597fdbc0ad..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_en/conf.py +++ /dev/null @@ -1,79 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import IPython -import re -import nbsphinx as nbs - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx_markdown_tables', - 'recommonmark', - 'nbsphinx', - 'sphinx.ext.mathjax', - 'IPython.sphinxext.ipython_console_highlighting' -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_static_path = ['_static'] - -# Remove extra outputs for nbsphinx extension. -nbsphinx_source_re = re.compile(r"(app\.connect\('html-collect-pages', html_collect_pages\))") -nbsphinx_math_re = re.compile(r"(\S.*$)") -mod_path = os.path.abspath(nbs.__file__) -with open(mod_path, "r+", encoding="utf8") as f: - contents = f.readlines() - for num, line in enumerate(contents): - _content_re = nbsphinx_source_re.search(line) - if _content_re and "#" not in line: - contents[num] = nbsphinx_source_re.sub(r"# \g<1>", line) - if "mathjax_config = app.config" in line and "#" not in line: - contents[num:num+10] = [nbsphinx_math_re.sub(r"# \g<1>", i) for i in contents[num:num+10]] - break - f.seek(0) - f.writelines(contents) \ No newline at end of file diff --git a/docs/migration_guide/source_en/images/profiler_case1_data_processing.png b/docs/migration_guide/source_en/images/profiler_case1_data_processing.png deleted file mode 100644 index 8b778003e622e1d604d5c6d8187ababf22d57e94..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_en/images/profiler_case1_data_processing.png and /dev/null differ diff --git a/docs/migration_guide/source_en/images/profiler_case1_dataset_process_step_interval.png b/docs/migration_guide/source_en/images/profiler_case1_dataset_process_step_interval.png deleted file mode 100644 index fae8a50a9c4e4d40fac7bb58868c8270b614bfd4..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_en/images/profiler_case1_dataset_process_step_interval.png and /dev/null differ diff --git a/docs/migration_guide/source_en/images/profiler_case1_helper.png b/docs/migration_guide/source_en/images/profiler_case1_helper.png deleted file mode 100644 index 1a83214ed09f17a8b94951703458d38395841474..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_en/images/profiler_case1_helper.png and /dev/null differ diff --git a/docs/migration_guide/source_en/images/profiler_case1_long_step_interval.png b/docs/migration_guide/source_en/images/profiler_case1_long_step_interval.png deleted file mode 100644 index 687a7df60fa70826c5ce006e1f6dce895eba55ab..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_en/images/profiler_case1_long_step_interval.png and /dev/null differ diff --git a/docs/migration_guide/source_en/images/profiler_case2_long_fp_bp.png b/docs/migration_guide/source_en/images/profiler_case2_long_fp_bp.png deleted file mode 100644 index 70b74bd9a701ca286106d7859e30f50b233ea324..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_en/images/profiler_case2_long_fp_bp.png and /dev/null differ diff --git a/docs/migration_guide/source_en/images/profiler_case2_operator_details.png b/docs/migration_guide/source_en/images/profiler_case2_operator_details.png deleted file mode 100644 index 83793e64ef0ad05a394566470780d9861c89f63d..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_en/images/profiler_case2_operator_details.png and /dev/null differ diff --git a/docs/migration_guide/source_en/images/profiler_case3_timeline.png b/docs/migration_guide/source_en/images/profiler_case3_timeline.png deleted file mode 100644 index ef8fabd2bdf5de0d532220eec6fec191f069fbf5..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_en/images/profiler_case3_timeline.png and /dev/null differ diff --git a/docs/migration_guide/source_en/index.rst b/docs/migration_guide/source_en/index.rst deleted file mode 100644 index 6a4d7a767c54283b83283f833d897bb53b142060..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_en/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 11:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore Migration Guide -================================= - -.. toctree:: - :maxdepth: 1 diff --git a/docs/migration_guide/source_en/performance_optimization.md b/docs/migration_guide/source_en/performance_optimization.md deleted file mode 100644 index 88cd7e4e526de5d84f828f209256b91c8ae7b4b3..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_en/performance_optimization.md +++ /dev/null @@ -1,98 +0,0 @@ -# Using Performance Profiling Tool - - - -- [Quick Start](#quick-start) - - [Case 1: Long Step Interval](#case-1-long-step-interval) - - [Case 2: Long Forward and Backward Propagation Interval Caused by Operations](#case-2-long-forward-and-backward-propagation-interval-caused-by-operations) - - [Case 3: Long Forward and Backward Propagation Interval Caused by Operation Intervals](#case-3-long-forward-and-backward-propagation-interval-caused-by-operation-intervals) -- [FAQ](#faq) - - - - - -Profiler provides performance tuning ability for MindSpore, and provides easy-to-use and rich debugging functions in operator performance, iteration performance, data processing performance, etc., helping users quickly locate and solve performance problems. - -This chapter introduces the common methods and cases of performance tuning in neural networks, as well as the resolution of some common problems. - -## Quick Start - -Please refer to the tutorials for the function introduction and instructions of MindSpore Profiler. - -[Performance Profiling(Ascend)](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/performance_profiling_ascend.html) - -[Performance Profiling(GPU)](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/performance_profiling_gpu.html) - -This section will introduce the common use of MindSpore Profiler through three typical cases. - -### Case 1: Long Step Interval - -As you observed on the MindInsight UI page, the step interval in the ```Step Trace``` is too long, which may indicate that performance can be optimized in the dataset processing process. - -![long_step_interval](images/profiler_case1_long_step_interval.png) - -Figure 1: Long Step Interval in Step Trace - -Looking at the ```Data Preparation``` details at the bottom of the webpage, we can see that the ratio of full queues in ```Host Queue``` is low, which can be preliminarily determined that the performance related to dataset processing can be improved. - -It could be a bottleneck of dataset operators or TDT channel transmission. Switch to the ```Data Processing``` page to check it. - -![dataset_process_step_interval](images/profiler_case1_dataset_process_step_interval.png) - -Figure 2: Data Preparation Details -- Step Interval - -![data_processing](images/profiler_case1_data_processing.png) - -Figure 3: Data Preparation Details -- Data Processing - -By observing the ```Queue relationship between operators```, we find that the average usage of ```Queue_3``` and ```Queue_2``` is relatively inefficient. - -Therefore, it can be determined that we can adjust the corresponding dataset operators, ```ShuffleOp_3``` and ```BatchOp_2```, to achieve better performance. Then, you can adjust the training script based on this information. - -You can also refer to the suggestion provided by the ```Helper``` on the left side of the home page to optimize the training script. - -![profiler_helper](images/profiler_case1_helper.png) - -Figure 4: Helper - -### Case 2: Long Forward and Backward Propagation Interval Caused by Operations - -When you find that the running time of steps is too long, you can first check the ```Step Trace``` to see if the time distribution of each part is normal. - -![long_fp_bp](images/profiler_case2_long_fp_bp.png) - -Figure 5: Long FP/BP intervals in ```Step Trace``` - -From the ```Step Trace``` in Figure 5, it is observed that the forward and backward propagation time is too long. - -We can open the details page of ```Operator Time Consumption Ranking``` to further determine whether there are time-consuming operators, and judge whether there is optimization method to reduce the operator execution time. - -![operator_details](images/profiler_case2_operator_details.png) - -Figure 6: Finding operators that can be optimized via the details page of ```Operator Time Consumption Ranking``` - -### Case 3: Long Forward and Backward Propagation Interval Caused by Operation Intervals - -In case 2, we introduced the case of long execution time of operations. In addition, the long FP/BP time can also be caused by the long time intervals among operations. - -To determine if there are long operation intervals, we can observe details of ```Timeline```. - -Click the ```Download``` button on the ```Timeline``` card in the right bottom of the UI page to download the timeline data. - -After downloading is done, enter the address ```chrome://tracing``` in Google browser, upload or drag the downloaded file into the browser to load data. - -![timeline](images/profiler_case3_timeline.png) - -Figure 7: Finding intervals between operations that can be optimized in ```Timeline```. - -If it is found that there is a large interval between operations, you can adjust the training script to optimize this section to further improve the performance. - -## FAQ - -### Startup Failure - -If you encounter the error of startup failure, you can check whether you encountered one of the following situations: - -- There is no space left in the system, or the remaining space is too small to run profiling tool. -- Mismatched versions of MindSpore and Ascend AI processor software package. diff --git a/docs/migration_guide/source_zh_cn/_static/logo_source.png b/docs/migration_guide/source_zh_cn/_static/logo_source.png deleted file mode 100644 index 9932d67ab50871edb0c95979c4e948c812c7cdea..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/_static/logo_source.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/accuracy_optimization.md b/docs/migration_guide/source_zh_cn/accuracy_optimization.md deleted file mode 100644 index f6e2a047fe88247a390b6ad05d31edd00b3bcc18..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/accuracy_optimization.md +++ /dev/null @@ -1,485 +0,0 @@ -# 精度调优 - - - -- [精度调优](#精度调优) - - [常见精度问题分析](#常见精度问题分析) - - [精度问题的常见现象和原因](#精度问题的常见现象和原因) - - [常见现象](#常见现象) - - [常见原因](#常见原因) - - [精度问题checklist](#精度问题checklist) - - [常用的精度调试调优思路](#常用的精度调试调优思路) - - [精度调优准备](#精度调优准备) - - [检查代码和超参](#检查代码和超参) - - [检查模型结构](#检查模型结构) - - [检查输入数据](#检查输入数据) - - [检查loss曲线](#检查loss曲线) - - [检查精度是否达到预期](#检查精度是否达到预期) - - [参考文档](#参考文档) - - [可视化工具](#可视化工具) - - [数据问题处理](#数据问题处理) - - [超参问题处理](#超参问题处理) - - [模型结构问题处理](#模型结构问题处理) - - - - - -模型训练的最终结果是为了得到一个精度达标的模型,而在AI训练过程中有时会遇到loss(模型损失值)无法下降,或者发散,metrics(模型度量指标)达不到预期等,造成无法得到一个理想精度的模型,这时候需要去进行分析训练过程中出现了什么样的问题,针对性地采用包括调整数据、调整超参、重构模型结构等方法,去解决模型精度调优过程中遇到的各种问题。 - -本文介绍MindSpore团队总结的精度调优的方法,及解决精度调优过程中问题的分析思路,并且将MindSpore中用于精度调优的工具做分类介绍。 - -## 常见精度问题分析 - -在精度调优实践中,发现异常现象是比较容易的。但是,如果我们对异常现象不够敏感、不会解释,还是会同问题根因失之交臂。下面对常见精度问题进行了解释,能够提高你对异常现象的敏感度,帮你更快定位精度问题。 - -### 精度问题的常见现象和原因 - -模型精度问题和一般的软件问题不同,定位周期一般也更长。在通常的程序中,程序输出和预期不符意味着存在bug(编码错误)。但是对一个深度学习模型来说,模型精度达不到预期,有着更复杂的原因和更多的可能性。由于模型精度要经过长时间的训练才能看到最终结果,定位精度问题通常会花费更长的时间。 - -#### 常见现象 - -精度问题的直接现象一般体现在loss(模型损失值)和metrics(模型度量指标)上。loss现象一般表现为: - -1. loss跑飞,出现NAN,+/- INF,极大值。 -2. loss不收敛、收敛慢。 -3. loss为0等。 - -模型metrics一般表现为模型的accuracy、precision等metric达不到预期。 - -精度问题的直接现象较容易观察,借助MindInsight等可视化工具,还可以在梯度、权重、激活值等张量上观察到更多现象。常见现象如: - -1. 梯度消失。 -2. 梯度爆炸。 -3. 权重不更新。 -4. 权重变化过小。 -5. 权重变化过大。 -6. 激活值饱和等。 - -#### 常见原因 - -对精度问题的原因分析,可以简单分为超参问题、模型结构问题、数据问题、算法设计问题等类别: - -- 超参问题。 - - 超参是模型和数据之间的润滑剂,超参的选择直接影响了模型对数据拟合效果的优劣。超参方面常见的问题如下: - - 1. 学习率设置不合理(过大、过小)。 - - 学习率可以说是模型训练中最重要的超参了。学习率过大,会导致loss震荡,不能收敛到预期值。学习率过小,会导致loss收敛慢。应根据理论和经验合理选择学习率策略。 - 2. loss_scale参数不合理。 - 3. 权重初始化参数不合理等。 - 4. epoch过大或过小。 - - epoch数目直接影响模型是欠拟合还是过拟合。epoch过小,模型未训练到最优解就停止了训练,容易欠拟合;epoch过大,模型训练时间过长,容易在训练集上过拟合,在测试集上达不到最优的效果。应根据训练过程中验证集上模型效果的变化情况,合理选择epoch数目。 - 5. batch size过大。 - - batch size过大。batch size过大时,模型可能不能收敛到较优的极小值上,从而降低模型的泛化能力。 - -- 数据问题。 - - 1. 数据集问题。 - - 数据集的质量决定了算法效果的上限,如果数据质量差,再好的算法也难以得到很好的效果。常见数据集问题如下: - - 1. 数据集中缺失值过多。 - - 数据集中存在缺失值、异常值,会导致模型学习到错误的数据关系。一般来说,应该从训练集中删除存在缺失值或异常值的数据,或者设置合理的默认值。数据标签错误是异常值的一种特殊情况,但是这种情况对训练的破坏性较大,应通过抽查输入模型的数据等方式提前识别这类问题。 - 2. 每个类别中的样本数目不均衡。 - - 数据集中每个类别的样本数目不均衡,是指数据集中每个类别中的样本数目有较大差距。例如,图像分类数据集(训练集)中,大部分类别都有1000个样本,但是“猫”这一类别只有100个样本,就可以认为出现了样本数目不均衡的情况。样本数目不均衡会导致模型在样本数目少的类别上预测效果差。如果出现了样本数目不均衡,应该酌情增加样本量小的类别的样本。一般来说,有监督深度学习算法在每类5000个标注样本的情况下将达到可以接受的性能,当数据集中有1000万个以上的已标注样本时,模型的表现将会超过人类。 - 3. 数据集中存在异常值。 - 4. 训练样本不足。 - - 训练样本不足则是指训练集相对于模型容量太小。训练样本不足会导致训练不稳定,且容易出现过拟合。如果模型的参数量同训练样本数量不成比例,应该考虑增加训练样本或者降低模型复杂度。 - - 5. 数据的标签错误。 - - 2. 数据处理问题。 - - 常见数据处理问题如下: - - 1. 常见数据处理算法问题。 - 2. 数据处理参数不正确等。 - 3. 未对数据进行归一化或标准化。 - - 未对数据进行归一化或标准化,是指输入模型的数据,各个维度不在一个尺度上。一般来说,模型要求各个维度的数据在-1到1之间,均值为0。如果某两个维度的尺度存在数量级的差异,可能会影响模型的训练效果,此时需要对数据进行归一化或标准化。 - - 4. 数据处理方式和训练集不一致。 - - 数据处理方式和训练集不一致是指在使用模型进行推理时,处理方式和训练集不一致。例如对图片的缩放、裁切、归一化参数和训练集不同,会导致推理时的数据分布和训练时的数据分布产生差异,可能会降低模型的推理精度。 - - > 一些数据增强操作(如随机旋转,随机裁切等)一般只应用在训练集,推理时无需进行数据增强。 - - 5. 没有对数据集进行shuffle。 - - 没有对数据集进行shuffle,是指训练时未对数据集进行混洗。未进行shuffle,或者混洗不充分,会导致总是以相同的数据顺序更新模型,严重限制了梯度优化方向的可选择性,导致收敛点的选择空间变少,容易过拟合。 - -- 算法问题。 - - - API使用问题。 - - 常见API使用问题如下: - - 1. 使用API没有遵循MindSpore约束。 - - 使用API未遵循MindSpore约束,是指使用的API和真实应用的场景不匹配。例如,在除数中可能含有零的场景,应该考虑使用DivNoNan而非Div以避免产生除零问题。又例如,MindSpore中,DropOut第一个参数为保留的概率,和其它框架正好相反(其它框架为丢掉的概率),使用时需要注意。 - - 2. 构图时未遵循MindSpore construct约束。 - - 构图未遵循MindSpore construct约束,是指图模式下的网络未遵循MindSpore静态图语法支持中声明的约束。例如,MindSpore目前不支持对带键值对参数的函数求反向。完整约束请见[静态图语法支持](https://mindspore.cn/doc/note/zh-CN/master/static_graph_syntax_support.html)。 - - - 计算图结构问题。 - - 计算图结构是模型计算的载体,计算图结构错误一般是实现算法时代码写错了。计算图结构方面常见的问题有: - - 1. 算子使用错误(使用的算子不适用于目标场景)。 - - 2. 权重共享错误(共享了不应共享的权重)。 - - 权重共享错误,是指应该共享的权重未共享,或者不应该共享的权重共享了。通过MindInsight计算图可视,可以检查这一类问题。 - - 3. 节点连接错误(应该连接到计算图中的block未连接)。 - - 节点连接错误,是指计算图中各block的连接和设计不一致。如果发现节点连接错误,应该仔细检查脚本是否编写出错。 - - 4. 节点模式不正确。 - - 节点模式不正确,是指部分区分训练、推理模式的算子,需要按照实际情况设置模式。典型的包括: - - 1)`BatchNorm`算子,训练时应打开`BatchNorm`的训练模式,此开关在调用`net.set_train(True)`的时候会自动打开。 - - 2)`DropOut`算子,推理时不应使用`DropOut`算子。 - - 5. 权重冻结错误(冻结了不应冻结的权重)。 - - 权重冻结错误,是指应该冻结的权重未冻结,或者不应该冻结的权重冻结了。在MindSpore中,冻结权重可以通过控制传入优化器的`params`参数来实现。未传入优化器的Parameter将不会被更新。可以通过检查脚本,或者查看MindInsight中的参数分布图确认权重冻结情况。 - - 6. loss函数有误。 - - loss函数有误,是指loss函数算法实现错误,或者未选择合理的loss函数。例如,`BCELoss`和`BCEWithLogitsLoss`是不同的,应根据是否需要`sigmoid`函数合理选择。 - - 7. 优化器算法错误(如果自行实现了优化器)等。 - - - 权重初始化问题。 - - 权重初始值是模型训练的起点,不合理的初始值将会影响模型训练的速度和效果。权重初始化方面常见问题如下: - - 1. 权重初始值全部为0。 - - 权重初始值全为0,是指初始化后,权重值为0。这一般会导致权重更新问题,应使用随机值初始化权重。 - - 2. 分布式场景不同节点的权重初始值不同。 - - 分布式场景不同节点的权重初始值不同,是指初始化后,不同节点上的同名权重初始值不同。正常来说,MindSpore会对梯度做全局AllReduce操作。确保每个step结尾,权重更新量是相同的,从而保证每个step中,各个节点上的权重一致。如果初始化时各节点的权重不同,就会导致不同节点的权重在接下来的训练中处于不同的状态,会直接影响模型精度。分布式场景应通过固定相同的随机数种子等方式,确保权重的初始值一致。 - -- 相同现象存在多个可能原因导致精度问题定位难。 - - 以loss不收敛为例(下图),任何可能导致激活值饱和、梯度消失、权重更新不正确的问题都可能导致loss不收敛。例如错误地冻结了部分权重,使用的激活函数和数据不匹配(使用relu激活函数,输入值全部小于0),学习率过小等原因都是loss不收敛的可能原因。 - - ![reson_for_accuracy_problem](./images/reson_for_accuracy_problem.png) - - 图1:相同现象存在多个可能原因导致精度问题定位难 - -#### 精度问题checklist - -| 常见数据集问题 | 常见超参问题 | 常见计算图结构问题 | 常见数据处理算法问题 | 常见API使用问题 | 常见权重初始化问题 | -| :------------ | :------------ | :---------------- | :------------------------ | :--------------------------------- | :-------------------------------- | -| 数据集中缺失值 | 学习率过大 | 权重共享错误 | 未对数据进行归一化或标准化 | 使用API没有遵循MindSpore约束 | 权重初始值全部为0 | -| 每个类别中的样 | 学习率过小 | 权重冻结错误 | 数据处理方式和训练集不一致 | 构图时未遵循MindSpore construct约束 | 分布式场景不同节点的权重初始值不同 | -| 数据集中存在异 | epoch过小 | 节点连接错误 | 没有对数据集进行shuffle | | | -| 训练样本不足 | epoch过大 | 节点模式不正确 | | | | -| 数据的标签错误 | batch size过大 | loss函数有误 | | | | - -### 常用的精度调试调优思路 - -遇到精度问题时,常用调试调优思路如下: - -1. 检查代码和超参。 - - 代码是精度问题的重要源头,检查代码重在对脚本和代码做检查,力争在源头发现问题;模型结构体现了MindSpore对代码的理解。 -2. 检查模型结构。 - - 检查模型结构重在检查MindSpore的理解和算法工程师的设计是否一致。 -3. 检查输入数据。 -4. 检查loss曲线。 - - 有的问题要到动态的训练过程中才会发现,检查输入数据和loss曲线正是将代码和动态训练现象结合进行检查。 -5. 检查精度是否达到预期。 - - 检查精度是否达到预期则是对整体精度调优过程重新审视,并考虑调整超参、解释模型、优化算法等调优手段。 - -检查模型结构和超参重在检查模型的静态特征;检查输入数据和loss曲线则是将静态特征和动态训练现象结合检查;检查精度是否达到预期则是对整体精度调优过程重新审视,并考虑调整超参、解释模型、优化算法等调优手段。此外,熟悉模型和工具也很重要,为了帮助用户高效实施上述的精度调优思路,MindInsight提供了配套的能力,如下图。 - -![accuracy_thought](./images/accuracy_thought.png) - -图2 精度问题定位思路及MindInsight对应能力 - -下面将分别介绍这些思路。 - -#### 精度调优准备 - -1. 回顾算法设计,全面熟悉模型。 - - 精度调优前,要先对算法设计做回顾,确保算法设计明确。如果参考论文实现模型,则应回顾论文中的全部设计细节和超参选择情况;如果参考其它框架脚本实现模型,则应确保有一个唯一的、精度能够达标的标杆脚本;如果是新开发的算法,也应将重要的设计细节和超参选择明确出来。这些信息是后面检查脚本步骤的重要依据。 - - 精度调优前,还要全面熟悉模型。只有熟悉了模型,才能准确理解MindInsight提供的信息,判断是否存在问题,查找问题源头。因此,花时间理解模型算法和结构、理解模型中算子的作用和参数的含义、理解模型所用优化器的特性等模型要素是很重要的。动手分析精度问题细节前,建议先带着问题加深对这些模型要素的了解。 - -2. 熟悉[MindInsight](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/visualization_tutorials.html)工具。 - - 定位精度问题时,建议使用MindInsight的[Summary训练信息收集](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/summary_record.html)功能,在脚本中加入`SummaryCollector`。如下训练代码片段所示,初始化`SummaryCollector`并加入到`model.train`的`callbacks`参数中: - - ```python - # Init a SummaryCollector callback instance, and use it in model.train or model.eval - summary_collector = SummaryCollector(summary_dir='./summary_dir', collect_freq=1) - - # Note: dataset_sink_mode should be set to False, else you should modify collect freq in SummaryCollector - model.train(epoch=1, train_dataset=ds_train, callbacks=[summary_collector], dataset_sink_mode=False) - - ds_eval = create_dataset('./dataset_path') - model.eval(ds_eval, callbacks=[summary_collector]) - ``` - - 使用训练看板[可视化功能](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/dashboard.html)查看训练过程数据: - - ![mindinsight_dashboard](./images/mindinsight_dashboard.png) - - 图3 训练看板 - - 需要在线调试模型时,参考[使用调试器](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/debugger.html)功能。 - -#### 检查代码和超参 - -代码是精度问题的重要源头,超参问题、模型结构问题、数据问题、算法设计和实现问题会体现在脚本中,对脚本做检查是定位精度问题很有效率的手段。检查代码主要依赖代码走读,建议使用小黄鸭调试法:在代码走读的过程中,耐心地向没有经验的“小黄鸭”解释每一行代码的作用,从而激发灵感,发现代码问题。检查脚本时,要注意检查脚本实现(包括数据处理、模型结构、loss函数、优化器等实现)同设计是否一致,如果参考了其它脚本,要重点检查脚本实现同其它脚本是否一致,所有不一致的地方都应该有充分合理的理由,否则就应修改。 - -检查脚本时,也要关注超参的情况,超参问题主要体现为超参取值不合理,例如 - -1. 学习率设置不合理; -2. `loss_scale`参数不合理; -3. 权重初始化参数不合理等。 - -MindInsight可以辅助用户对超参做检查,大多数情况下,`SummaryCollector`会自动记录常见超参,您可以通过MindInsight的训练参数详情功能和溯源分析功能查看超参。结合MindInsight模型溯源分析模块和脚本中的代码,可以确认超参的取值,识别明显不合理的超参。如果有标杆脚本,建议同标杆脚本一一比对超参取值,如果有默认参数值,则默认值也应一并比对,以避免不同框架的参数默认值不同导致精度下降或者训练错误。 - -![model_hyper_param](./images/model_hyper_param.png) - -图4 通过MindInsight训练参数详情查看模型超参 - -#### 检查模型结构 - -在模型结构方面,常见的问题有: - -1. 算子使用错误(使用的算子不适用于目标场景,如应该使用浮点除,错误地使用了整数除)。 -2. 权重共享错误(共享了不应共享的权重)。 -3. 权重冻结错误(冻结了不应冻结的权重)。 -4. 节点连接错误(应该连接到计算图中的block未连接)。 -5. loss函数错误。 -6. 优化器算法错误(如果自行实现了优化器)等。 - -建议通过检查模型代码的方式对模型结构进行检查。此外,MindInsight也可以辅助用户对模型结构进行检查。大多数情况下,`SummaryCollector`会自动记录计算图,通过MindInsight,用户可以方便地对计算图进行查看。 - -![graph](./images/graph.png) - -图5 通过MindInsight训练看板中的计算图模块查看模型结构 - -模型脚本运行后,建议使用MindInsight计算图可视模块查看模型结构,加深对计算图的理解,确认模型结构符合预期。若有标杆脚本,还可以同标杆脚本对照查看计算图,检查当前脚本和标杆脚本的计算图是否存在重要的差异。 - -考虑到模型结构一般都很复杂,期望在这一步就能发现所有的模型结构问题是不现实的。只要通过可视化的模型结构加深对计算图的理解,发现明显的结构问题即可。后面的步骤中,发现了更明确的精度问题现象后,我们还会回到这一步重新检查确认。 - -> MindInsight支持查看`SummaryCollector`记录的计算图和MindSpore context的`save_graphs`参数导出的pb文件计算图。请参考我们教程中的[计算图可视化](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/dashboard.html)部分了解更多信息。 -> -> 脚本迁移工具可以将PyTorch、TensorFlow框架下编写的模型转换为MindSpore脚本,请访问教程[使用工具迁移第三方框架脚本](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/migrate_3rd_scripts_mindconverter.html)以了解更多信息。 - -#### 检查输入数据 - -通过检查输入模型的数据,可以结合脚本判断数据处理流水线和数据集是否存在问题。输入数据的常见问题有: - -1. 数据缺失值过多; -2. 每个类别中的样本数目不均衡; -3. 数据中存在异常值; -4. 数据标签错误; -5. 训练样本不足; -6. 未对数据进行标准化,输入模型的数据不在正确的范围内; -7. finetune和pretrain的数据处理方式不同; -8. 训练阶段和推理阶段的数据处理方式不同; -9. 数据处理参数不正确等。 - -MindInsight可以辅助用户对输入数据、数据处理流水线进行检查。大多数情况下,`SummaryCollector`会自动记录输入模型的数据(数据处理后的数据)和数据处理流水线参数。输入模型的数据会展示在“数据抽样”模块,数据处理流水线参数会展示在“数据图”模块和“数据溯源”模块。 - -通过MindInsight的数据抽样模块,可以检查输入模型的(数据处理流水线处理后的)数据。若数据明显不符合预期(例如数据被裁剪的范围过大,数据旋转的角度过大等),可以判断输入数据出现了一定的问题。 - -通过MindInsight的数据图和数据溯源模块,可以检查数据处理流水线的数据处理过程和具体参数取值,从而发现不合理的数据处理方法。 - -![data_imput](./images/data_imput.png) - -图6 通过MindInsight训练看板中的数据抽样模块查看输入模型的数据 - -![data_pipeline](./images/data_pipeline.png) - -图7 通过MindInsight训练看板中的数据图查看数据处理流水线 - -如果有标杆脚本,还可以同标杆脚本对照,检查数据处理流水线输出的数据是否和当前脚本的数据相同。例如,将数据处理流水线输出的数据保存为`npy`文件,然后使用`numpy.allclose`方法对标杆脚本和当前脚本的数据进行对比。如果发现不同,则数据处理阶段可能存在精度问题。 - -若数据处理流水线未发现问题,可以手动检查数据集是否存在分类不均衡、标签匹配错误、缺失值过多、训练样本不足等问题。 - -#### 检查loss曲线 - -很多精度问题会在网络训练过程中发现,常见的问题或现象有: - -1. 权重初始化不合理(例如初始值为0,初始值范围不合理等); -2. 权重中存在过大、过小值; -3. 权重变化过大; -4. 权重冻结不正确; -5. 权重共享不正确; -6. 激活值饱和或过弱(例如Sigmoid的输出接近1,Relu的输出全为0); -7. 梯度爆炸、消失; -8. 训练epoch不足; -9. 算子计算结果存在NAN、INF; -10. 算子计算过程溢出(计算过程中的溢出不一定都是有害的)等。 - -上述这些问题或现象,有的可以通过loss表现出来,有的则难以观察。MindInsight提供了针对性的功能,可以观察上述现象、自动检查问题,帮助您更快定位问题根因。例如: - -- MindInsight的参数分布图模块可以展示模型权重随训练过程的变化趋势; -- MindInsight的张量可视模块可以展示张量的具体取值,对不同张量进行对比; -- [MindInsight调试器](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/debugger.html)内置了种类丰富,功能强大的检查能力,可以检查权重问题(例如权重不更新、权重更新过大、权重值过大/过小)、梯度问题(例如梯度消失、梯度爆炸)、激活值问题(例如激活值饱和或过弱)、张量全为0、NAN/INF、算子计算过程溢出等问题。 - -![loss](./images/loss.png) - -图8 通过MindInsight训练看板中的标量可视模块查看loss曲线 - -大多数情况下,`SummaryCollector`会自动记录模型的loss曲线,可以通过MindInsight的标量可视模块查看。loss曲线能够反映网络训练的动态趋势,通过观察loss曲线,可以得到模型是否收敛、是否过拟合等信息。 - -![histogram](./images/histogram.png) - -图9 通过MindInsight参数分布图可以查看训练过程中的权重变化情况 - -大多数情况下,`SummaryCollector`会自动记录模型参数变化情况(默认记录5个参数),可以通过MindInsight的参数分布图模块查看。如果想要记录更多参数的参数分布图,请参考[SummaryCollector](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.train.html#mindspore.train.callback.SummaryCollector)的`histogram_regular`参数,或参考[HistogramSummary](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/summary_record.html#summarysummarycollector)算子。 - -![tensor](./images/tensor.png) - -图10 通过MindInsight训练看板中的张量可视模块查看特定张量的具体取值 - -张量不会被自动记录,如果想要通过MindInsight查看张量的具体取值,请使用[TensorSummary](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/summary_record.html#summarysummarycollector)算子。 - -下面结合loss曲线的常见现象介绍使用MindInsight进行精度问题定位的思路。 - -1. loss跑飞。 - - loss跑飞是指loss中出现了NAN、+/-INF或者特别大的值。loss跑飞一般意味着算法设计或实现存在问题。定位思路如下: - - 1. 回顾脚本、模型结构和数据: - - 1)检查超参是否有不合理的特别大/特别小的取值; - - 2)检查模型结构是否实现正确,特别是检查loss函数是否实现正确; - - 3)检查输入数据中是否有缺失值、是否有特别大/特别小的取值。 - - 2. 观察训练看板中的参数分布图,检查参数更新是否有明显的异常。若发现参数更新异常,可以结合调试器定位参数更新异常的原因。 - 3. 使用调试器模块对训练现场进行检查。 - - 1)若loss值出现NAN、+/-INF,可使用“检查张量溢出”条件添加全局监测点,定位首先出现NAN、+/-INF的算子节点,检查算子的输入数据是否会导致计算异常(例如除零)。若是算子输入数据的问题,则可以针对性地加入小数值epsilon避免计算异常。 - - 2)若loss值出现特别大的值,可使用“检查过大张量”条件添加全局监测点,定位首先出现大值的算子节点,检查算子的输入数据是否会导致计算异常。若输入数据本身存在异常,则可以继续向上追踪产生该输入数据的算子,直到定位出具体原因。 - - 3)若怀疑参数更新、梯度等方面存在异常,可使用“检查权重变化过大”、“检查梯度消失”、“检查梯度过大”等条件设置监测点,定位到异常的权重或梯度,然后结合张量检查视图,逐层向上对可疑的正向算子、反向算子、优化器算子等进行检查。 - -2. loss收敛慢。 - - loss收敛慢是指loss震荡、收敛速度慢,经过很长时间才能达到预期值,或者最终也无法收敛到预期值。相较于loss跑飞,loss收敛慢的数值特征不明显,更难定位。定位思路如下: - - 1. 回顾脚本、模型结构和数据: - - 1)检查超参是否有不合理的特别大/特别小的取值,特别是检查学习率是否设置过小或过大,学习率设置过小会导致收敛速度慢,学习率设置过大会导致loss震荡、不下降; - - 2)检查模型结构是否实现正确,特别是检查loss函数、优化器是否实现正确; - - 3)检查输入数据的范围是否正常,特别是输入数据的值是否过小。 - - 2. 观察训练看板中的参数分布图,检查参数更新是否有明显的异常。若发现参数更新异常,可以结合调试器定位参数更新异常的原因。 - 3. 使用调试器模块对训练现场进程检查。 - - 1)可使用“检查权重变化过小”、“检查未变化权重”条件对可训练(未固定)的权重进行监测,检查权重是否变化过小。若发现权重变化过小,可进一步检查学习率取值是否过小、优化器算法是否正确实现、梯度是否消失,并做针对性的修复。 - - 2)可使用“检查梯度消失”条件对梯度进行监测,检查是否存在梯度消失的现象。若发现梯度消失,可进一步向上检查导致梯度消失的原因。例如,可以通过“检查激活值范围”条件检查是否出现了激活值饱和、Relu输出为0等问题。 - - 4. 其它loss现象。 - - 若训练集上loss为0,一般说明模型出现了过拟合,请尝试增大训练集大小。 - -#### 检查精度是否达到预期 - -MindInsight可以为用户记录每次训练的精度结果。在`model.train`和`model.eval`中使用同一个`SummaryCollector`实例时,会自动记录模型评估(metrics)信息。训练结束后,可以通过MindInsight的模型溯源模块检查训练结果精度是否达标。 - -![lineage_model_chart](./images/lineage_model_chart.png) - -图11 通过MindInsight溯源分析功能查看模型评估信息 - -1. 检查训练集上的精度。 - - 若训练集上模型的loss值、metric值未达到预期,可以参考以下思路进行定位和优化: - - 1. 回顾代码、模型结构、输入数据和loss曲线, - - 1)检查脚本,检查超参是否有不合理的值 - - 2)检查模型结构是否实现正确 - - 3)检查输入数据是否正确 - - 4)检查loss曲线的收敛结果和收敛趋势是否存在异常 - - 2. 尝试使用MindInsight溯源分析功能优化超参。溯源分析页面会对超参的重要性进行分析,用户应优先考虑调整重要性高的超参,从散点图中可以观察出超参和优化目标的关系,从而针对性地调整超参取值。 - - ![lineage_model_chart_1](./images/lineage_model_chart_1.png) - - 图12 通过MindInsight溯源分析查看参数重要性 - - ![lineage_model_chart_2](./images/lineage_model_chart_2.png) - - 图13 通过MindInsight溯源分析以散点图形式查看参数和优化目标的关系 - - 3. 尝试使用[MindInsight调参器](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/hyper_parameters_auto_tuning.html)优化超参。请注意,调参器通过执行多次完整训练的方式进行超参搜索,消耗的时间为网络一次训练用时的若干倍,如果网络一次训练耗时较长,则超参搜索将需要很长的时间。 - 4. 尝试使用[MindInsight模型解释](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/model_explanation.html)功能优化模型和数据集。模型解释功能可以通过显著图可视化展示对分类结果最重要的区域,还可以通过评分体系提示应该对哪类标签进行优化。 - 5. 尝试优化模型结构/算法。 - -2. 检查验证集上的精度。 - - 若训练集精度和验证集精度都未达到预期,则应首先参考上一节检查训练集精度。若训练集精度已达到预期,但是验证集精度未达到预期,大概率是模型出现了过拟合,处理思路如下: - - 1. 检查验证集评估脚本的评估逻辑有无错误。特别是数据处理方式是否与训练集一致,推理算法有误错误,是否加载了正确的模型checkpoint。 - 2. 增加数据量。包括增加样本量,进行数据增强和扰动等。 - 3. 正则化。常见的技术如参数范数惩罚(例如向目标函数中添加一个正则项),参数共享(强迫模型的两个组件共享相同的参数值),提前中止训练等。 - 4. 适当降低模型的规模。例如减少卷积层数等。 - -3. 检查测试集上的精度. - - 若验证集和测试集精度都未达到预期,则应首先参考上一节检查验证集精度。若验证集精度已达到预期,但是测试集精度未达到预期,考虑到测试集的数据是模型从未见过的新数据,原因一般是测试集的数据分布和训练集的数据分布不一致。处理思路如下: - - 1. 检查测试集评估脚本的评估逻辑有误错误。特别是数据处理方式是否与训练集一致,推理算法有误错误,是否加载了正确的模型CheckPoint。 - 2. 检查测试集中的数据质量,例如数据的分布范围是否明显同训练集不同,数据是否存在大量的噪声、缺失值或异常值。 - -## 参考文档 - -### 可视化工具 - -训练过程中进行可视化数据采集时,可参考资料[收集Summary数据](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/summary_record.html)。 - -训练过程中进行可视化数据分析时,可参考资料[训练看板](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/dashboard.html)和[溯源和对比看板](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/lineage_and_scalars_comparison.html)。 - -### 数据问题处理 - -对数据进行标准化、归一化、通道转换等操作,在图片数据处理上,增加随机视野图片,随机旋转度图片等,另外数据混洗、batch和数据倍增等操作,可参考[数据处理](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/pipeline.html)、[数据增强](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/augmentation.html)和[自动数据增强](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/auto_augmentation.html)。 - -### 超参问题处理 - -AI训练中的超参包含全局学习率,epoch和batch等,如果需要在不同的超参下,训练过程进行可视化时,可参考资料:[可视化的超参调优](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/hyper_parameters_auto_tuning.html);如果需要设置动态学习率超参时,可参考资料:[学习率的优化算法](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/optim.html?#id3)。 - -### 模型结构问题处理 - -一般的处理模型结构问题,需要用到的操作有:模型结构的重构,选择合适的优化器或者损失函数等。 - -需要重构模型结构时,可参考资料:[Cell构建及其子类](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/cell.html)。 - -选择合适的损失函数,可参考资料:[损失函数算子支持列表](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.nn.html#loss-functions)。 - -选择合适的优化器时,可参考资料:[优化器算子支持列表](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.nn.html#optimizer-functions)。 diff --git a/docs/migration_guide/source_zh_cn/conf.py b/docs/migration_guide/source_zh_cn/conf.py deleted file mode 100644 index 4bdc7a1bdc6acd64d40a0a705c0875d1bbf373bf..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/conf.py +++ /dev/null @@ -1,83 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import IPython -import re -import nbsphinx as nbs - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx_markdown_tables', - 'recommonmark', - 'nbsphinx', - 'sphinx.ext.mathjax', - 'IPython.sphinxext.ipython_console_highlighting' -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_search_language = 'zh' - -html_search_options = {'dict': '../../resource/jieba.txt'} - -html_static_path = ['_static'] - -# Remove extra outputs for nbsphinx extension. -nbsphinx_source_re = re.compile(r"(app\.connect\('html-collect-pages', html_collect_pages\))") -nbsphinx_math_re = re.compile(r"(\S.*$)") -mod_path = os.path.abspath(nbs.__file__) -with open(mod_path, "r+", encoding="utf8") as f: - contents = f.readlines() - for num, line in enumerate(contents): - _content_re = nbsphinx_source_re.search(line) - if _content_re and "#" not in line: - contents[num] = nbsphinx_source_re.sub(r"# \g<1>", line) - if "mathjax_config = app.config" in line and "#" not in line: - contents[num:num+10] = [nbsphinx_math_re.sub(r"# \g<1>", i) for i in contents[num:num+10]] - break - f.seek(0) - f.writelines(contents) \ No newline at end of file diff --git a/docs/migration_guide/source_zh_cn/faq.md b/docs/migration_guide/source_zh_cn/faq.md deleted file mode 100644 index 0972ad2a71974e95f4c329356581d4195a3c7eba..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/faq.md +++ /dev/null @@ -1,39 +0,0 @@ -# 常见问题 - - - -从其他框架迁移到MindSpore框架的过程中,可能会遇到各种各样的问题,下面将迁移过程中的常见问题及其解决方法汇总如下: - -- 准备工作 - - [安装MindSpore常见问题处理](https://www.mindspore.cn/doc/faq/zh-CN/master/installation.html) - -- 网络脚本分析 - - [算子映射及缺失算子处理策略](https://www.mindspore.cn/doc/migration_guide/zh-CN/master/script_analysis.html#id3) - - [常见语法限制及处理策略](https://www.mindspore.cn/doc/migration_guide/zh-CN/master/script_analysis.html#id6) - -- 网络脚本开发 - - [MindConverter工具常见问题处理](https://gitee.com/mindspore/mindinsight/blob/master/mindinsight/mindconverter/README_CN.md#%E5%B8%B8%E8%A7%81%E9%97%AE%E9%A2%98) - -- 网络调试 - - [流程调试常见问题处理](https://www.mindspore.cn/doc/migration_guide/zh-CN/master/neural_network_debug.html#id6) - - [loss值对比检查常见问题处理](https://www.mindspore.cn/doc/migration_guide/zh-CN/master/neural_network_debug.html#id8) - - [loss值异常常见问题处理](https://www.mindspore.cn/doc/migration_guide/zh-CN/master/neural_network_debug.html#id11) - -- 性能调试 - - [性能调试常见问题及优化方法](https://www.mindspore.cn/doc/migration_guide/zh-CN/master/sample_code.html#id26) - - [Profiler工具常见问题处理](https://www.mindspore.cn/doc/migration_guide/zh-CN/master/performance_optimization.html#id6) - -- 执行推理 - - [端侧使用类常见问题处理](https://www.mindspore.cn/tutorial/lite/zh-CN/master/faq.html) - - [推理常见问题处理](https://www.mindspore.cn/doc/faq/zh-CN/master/inference.html) diff --git a/docs/migration_guide/source_zh_cn/images/accuracy_thought.png b/docs/migration_guide/source_zh_cn/images/accuracy_thought.png deleted file mode 100644 index ba7b1a25b78daeaf5ee702216c7032b24d0053e9..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/accuracy_thought.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/accuracy_thought.pptx b/docs/migration_guide/source_zh_cn/images/accuracy_thought.pptx deleted file mode 100644 index 02b42f0bf4d79fcb87b54cf8ddad7648035b5d2f..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/accuracy_thought.pptx and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/data_imput.png b/docs/migration_guide/source_zh_cn/images/data_imput.png deleted file mode 100644 index 86ac47dfd075489539b4d6c7b96fac2cef5f3ec5..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/data_imput.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/data_pipeline.png b/docs/migration_guide/source_zh_cn/images/data_pipeline.png deleted file mode 100644 index da59dcafbe49139138eb34cc47c58957b0b0cc3f..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/data_pipeline.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/flowchart.PNG b/docs/migration_guide/source_zh_cn/images/flowchart.PNG deleted file mode 100644 index ff37e19558e0875ba43f681d729eaacc24436995..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/flowchart.PNG and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/flowchart.pptx b/docs/migration_guide/source_zh_cn/images/flowchart.pptx deleted file mode 100644 index 31e083002b1b3e32f54eb2922b69ab13abbce817..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/flowchart.pptx and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/graph.png b/docs/migration_guide/source_zh_cn/images/graph.png deleted file mode 100644 index 0bc13636b5c84952978469c652c38500e6d34f43..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/graph.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/histogram.png b/docs/migration_guide/source_zh_cn/images/histogram.png deleted file mode 100644 index 6ff84eaf6713e20e45ca2e1600d4729f2f6e3e27..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/histogram.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/image-20210318152607548.png b/docs/migration_guide/source_zh_cn/images/image-20210318152607548.png deleted file mode 100644 index 93150aa3d05917465e2c5c7aa4f35ac44ccf1cfb..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/image-20210318152607548.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/lineage_model_chart.png b/docs/migration_guide/source_zh_cn/images/lineage_model_chart.png deleted file mode 100644 index dd6bbcfc698dd38ec7fba3f2939972fcfefdc662..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/lineage_model_chart.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/lineage_model_chart_1.png b/docs/migration_guide/source_zh_cn/images/lineage_model_chart_1.png deleted file mode 100644 index 4541b0b52a3b5fa59c56e808ede937c86eb6b278..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/lineage_model_chart_1.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/lineage_model_chart_2.png b/docs/migration_guide/source_zh_cn/images/lineage_model_chart_2.png deleted file mode 100644 index db64e360a2e7b9d827fe225ddcb30a2f920e58fe..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/lineage_model_chart_2.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/loss.png b/docs/migration_guide/source_zh_cn/images/loss.png deleted file mode 100644 index 8403ee213e4e7948cc3425c95a15948de28766da..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/loss.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/mindinsight_dashboard.png b/docs/migration_guide/source_zh_cn/images/mindinsight_dashboard.png deleted file mode 100644 index 355d4ce219bb2e2766eef80928838f4a11976c46..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/mindinsight_dashboard.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/model_hyper_param.png b/docs/migration_guide/source_zh_cn/images/model_hyper_param.png deleted file mode 100644 index 3e4d2143132efc4edb4ba67cb9642ac734202c75..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/model_hyper_param.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/pic1.png b/docs/migration_guide/source_zh_cn/images/pic1.png deleted file mode 100644 index 2bf1eea22eaf3a2ad4e1613ed74f26a9484b2f2f..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/pic1.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/pic2.png b/docs/migration_guide/source_zh_cn/images/pic2.png deleted file mode 100644 index 56e81c795919a21f5a6e133568de9d8a405d269c..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/pic2.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/pic3.png b/docs/migration_guide/source_zh_cn/images/pic3.png deleted file mode 100644 index 9f457ca0debaf08f7f670f96d78422088cf3d993..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/pic3.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/pic4.png b/docs/migration_guide/source_zh_cn/images/pic4.png deleted file mode 100644 index 3eead5ff4ef7eb6ba6557697ec10fb9d454cb250..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/pic4.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/pic5.png b/docs/migration_guide/source_zh_cn/images/pic5.png deleted file mode 100644 index fdb822fc54ec93ed2309afba9696a25cbc369268..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/pic5.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/pic6.png b/docs/migration_guide/source_zh_cn/images/pic6.png deleted file mode 100644 index ad83bb00253c255725d181dffa59bbbbf2ce1cd2..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/pic6.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/profiler_case1_data_processing.png b/docs/migration_guide/source_zh_cn/images/profiler_case1_data_processing.png deleted file mode 100644 index 5a61547e2fcd194f0141cac15f516a3f99620dab..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/profiler_case1_data_processing.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/profiler_case1_dataset_process_step_interval.png b/docs/migration_guide/source_zh_cn/images/profiler_case1_dataset_process_step_interval.png deleted file mode 100644 index c9979c718797cf170439a34f5931389eae0a23e1..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/profiler_case1_dataset_process_step_interval.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/profiler_case1_helper.png b/docs/migration_guide/source_zh_cn/images/profiler_case1_helper.png deleted file mode 100644 index db996f7954caedf9f25e761eb9037e948052954d..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/profiler_case1_helper.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/profiler_case1_long_step_interval.png b/docs/migration_guide/source_zh_cn/images/profiler_case1_long_step_interval.png deleted file mode 100644 index 4367f9b37810d4c31b76475d7d563ea4668ff2c3..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/profiler_case1_long_step_interval.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/profiler_case2_long_fpbp.png b/docs/migration_guide/source_zh_cn/images/profiler_case2_long_fpbp.png deleted file mode 100644 index 2ce1996d5a0c05d386a1cee7d64834e87597c0b8..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/profiler_case2_long_fpbp.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/profiler_case2_operator_details.png b/docs/migration_guide/source_zh_cn/images/profiler_case2_operator_details.png deleted file mode 100644 index 24d022c647be5923049ec629267d6f70d3a081a5..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/profiler_case2_operator_details.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/profiler_case3_timeline.png b/docs/migration_guide/source_zh_cn/images/profiler_case3_timeline.png deleted file mode 100644 index ef8fabd2bdf5de0d532220eec6fec191f069fbf5..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/profiler_case3_timeline.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/reson_for_accuracy_problem.png b/docs/migration_guide/source_zh_cn/images/reson_for_accuracy_problem.png deleted file mode 100644 index 6d619ab83716fcee53ba5212b7de1cf12fc76f8e..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/reson_for_accuracy_problem.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/reson_for_accuracy_problem.pptx b/docs/migration_guide/source_zh_cn/images/reson_for_accuracy_problem.pptx deleted file mode 100644 index 90e6de30c8c5e071054be8b406f4a7cb536b80ee..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/reson_for_accuracy_problem.pptx and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/images/tensor.png b/docs/migration_guide/source_zh_cn/images/tensor.png deleted file mode 100644 index 0f549c4f5195d22fd7c7a53cde888cdd7be00907..0000000000000000000000000000000000000000 Binary files a/docs/migration_guide/source_zh_cn/images/tensor.png and /dev/null differ diff --git a/docs/migration_guide/source_zh_cn/index.rst b/docs/migration_guide/source_zh_cn/index.rst deleted file mode 100644 index 40c106414a658b23efdb557fa2f8b052f4190fb8..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/index.rst +++ /dev/null @@ -1,21 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 11:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore迁移指南 -=================== - -.. toctree:: - :maxdepth: 1 - - overview - preparation - script_analysis - script_development - neural_network_debug - accuracy_optimization - performance_optimization - inference - sample_code - faq diff --git a/docs/migration_guide/source_zh_cn/inference.md b/docs/migration_guide/source_zh_cn/inference.md deleted file mode 100644 index 085913eb4cd31b19e4c27d1d35fced244c839785..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/inference.md +++ /dev/null @@ -1,46 +0,0 @@ -# 推理执行 - - - -- [推理执行](#推理执行) - - [基于模型推理服务](#基于模型推理服务) - - [总览](#总览) - - [不同硬件平台执行推理](#不同硬件平台执行推理) - - [基于MindSpore Serving部署在线推理服务](#基于mindspore-serving部署在线推理服务) - - - - - -MindSpore可以基于训练好的模型,在不同的硬件平台上执行推理任务,还可以基于MindSpore Serving部署在线推理服务。 - -## 基于模型推理服务 - -### 总览 - -MindSpore支持保存为CheckPoint格式的[训练参数文件](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference.html#id2)和MindIR、AIR、ONNX格式的[网络模型文件](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference.html#id2)。 - -参考[执行推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference.html#id3),不仅可以直接通过`mindspore.model.predict`接口执行本机推理,还可以通过`mindspore.export`导出MindIR、AIR、ONNX格式的网络模型文件,以便于跨平台执行推理。 - -使用[MindIR格式](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference.html#id3)的模型文件消除了不同后端模型的差异,可以用于执行跨硬件平台推理,支持部署到云端Serving和端侧Lite平台。 - -### 不同硬件平台执行推理 - -- Ascend硬件平台参考[Ascend 910 AI处理器上推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_ascend_910.html)和[Ascend 310 AI处理器上推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_ascend_310.html)。 -- GPU硬件平台参考[GPU上推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_gpu.html)。 -- CPU硬件平台参考[CPU上推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_cpu.html)。 -- Lite端侧推理的相关应用参考[端侧推理](https://www.mindspore.cn/lite/docs?master)。 - -> Ascend硬件平台推理的接口使用问题参考[C++接口使用类](https://www.mindspore.cn/doc/faq/zh-CN/master/inference.html#c)解决。 - -## 基于MindSpore Serving部署在线推理服务 - -MindSpore Serving是一个轻量级、高性能的服务模块,旨在帮助MindSpore开发者在生产环境中高效部署在线推理服务。当用户使用MindSpore完成模型训练后,导出MindSpore模型,即可使用MindSpore Serving创建该模型的推理服务。参考以下几个样例进行部署: - -- [基于MindSpore Serving部署推理服务](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_example.html)。 -- [基于gRPC接口访问MindSpore Serving服务](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_grpc.html)。 -- [基于RESTful接口访问MindSpore Serving服务](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_restful.html)。 -- [通过配置模型提供Servable](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_model.html)。 -- [基于MindSpore Serving部署分布式推理服务](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_distributed_example.html)。 - -> MindSpore Serving部署在线推理服务的问题可以参考[MindSpore Serving类](https://www.mindspore.cn/doc/faq/zh-CN/master/inference.html#mindspore-serving)解决。 diff --git a/docs/migration_guide/source_zh_cn/migration_case_of_mindconverter.ipynb b/docs/migration_guide/source_zh_cn/migration_case_of_mindconverter.ipynb deleted file mode 100644 index 04bd9d60cc28759719720b8fbb32490999870bf6..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/migration_case_of_mindconverter.ipynb +++ /dev/null @@ -1,472 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "military-possible", - "metadata": {}, - "source": [ - "# 使用MindConverter迁移脚本\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/migration_guide/source_zh_cn/migration_case_of_mindconverter.ipynb)" - ] - }, - { - "cell_type": "markdown", - "id": "modular-arbitration", - "metadata": {}, - "source": [ - "## 概述" - ] - }, - { - "cell_type": "markdown", - "id": "stupid-british", - "metadata": {}, - "source": [ - "PyTorch模型转换为MindSpore脚本和权重,首先需要将PyTorch模型导出为ONNX模型,然后使用MindConverter CLI工具进行脚本和权重迁移。\n", - "HuggingFace Transformers是PyTorch框架下主流的自然语言处理三方库,我们以Transformer中的BertForMaskedLM为例,演示迁移过程。" - ] - }, - { - "cell_type": "markdown", - "id": "impossible-nebraska", - "metadata": {}, - "source": [ - "## 环境准备\n", - "\n", - "本案例需安装以下Python三方库:\n", - "```bash\n", - "pip install torch==1.5.1\n", - "pip install transformer==4.2.2\n", - "pip install mindspore==1.2.0\n", - "pip install mindinsight==1.2.0\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "revolutionary-bench", - "metadata": {}, - "source": [ - "## ONNX模型导出\n", - "\n", - "首先实例化HuggingFace中的BertForMaskedLM,以及相应的分词器(首次使用时需要下载模型权重、词表、模型配置等数据)。\n", - "\n", - "关于HuggingFace的使用,本文不做过多介绍,详细使用请参考[HuggingFace使用文档](https://huggingface.co/transformers/model_doc/bert.html)。\n", - "\n", - "该模型可对句子中被掩蔽(mask)的词进行预测。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "heated-millennium", - "metadata": {}, - "outputs": [], - "source": [ - "from transformers.models.bert import BertForMaskedLM, BertTokenizer\n", - "\n", - "tokenizer = BertTokenizer.from_pretrained(\"bert-base-uncased\")\n", - "model = BertForMaskedLM.from_pretrained(\"bert-base-uncased\")" - ] - }, - { - "cell_type": "markdown", - "id": "bacterial-picking", - "metadata": {}, - "source": [ - "我们使用该模型进行推理,生成若干组测试用例,以验证模型迁移的正确性。\n", - "\n", - "这里我们以一条句子为例`china is a poworful country, its capital is beijing.`。\n", - "\n", - "我们对`beijing`进行掩蔽(mask),输入`china is a poworful country, its capital is [MASK].`至模型,模型预期输出应为`beijing`。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "hawaiian-borough", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MASK TOKEN id: 12\n", - "Tokens: [[ 101 2859 2003 1037 23776 16347 5313 2406 1010 2049 3007 2003\n", - " 103 1012 102]]\n", - "Attention mask: [[1 1 1 1 1 1 1 1 1 1 1 1 1 1 1]]\n", - "Token type ids: [[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]]\n", - "Pred id: 7211\n", - "Pred token: beijing\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import torch\n", - "\n", - "text = \"china is a poworful country, its capital is [MASK].\"\n", - "tokenized_sentence = tokenizer(text)\n", - "\n", - "mask_idx = tokenized_sentence[\"input_ids\"].index(tokenizer.convert_tokens_to_ids(\"[MASK]\"))\n", - "input_ids = np.array([tokenized_sentence[\"input_ids\"]])\n", - "attention_mask = np.array([tokenized_sentence[\"attention_mask\"]])\n", - "token_type_ids = np.array([tokenized_sentence[\"token_type_ids\"]])\n", - "\n", - "# Get [MASK] token id.\n", - "print(f\"MASK TOKEN id: {mask_idx}\")\n", - "print(f\"Tokens: {input_ids}\") \n", - "print(f\"Attention mask: {attention_mask}\")\n", - "print(f\"Token type ids: {token_type_ids}\")\n", - "\n", - "model.eval()\n", - "with torch.no_grad():\n", - " predictions = model(input_ids=torch.tensor(input_ids),\n", - " attention_mask=torch.tensor(attention_mask),\n", - " token_type_ids=torch.tensor(token_type_ids))\n", - " predicted_index = torch.argmax(predictions[0][0][mask_idx])\n", - " predicted_token = tokenizer.convert_ids_to_tokens([predicted_index])[0]\n", - " print(f\"Pred id: {predicted_index}\")\n", - " print(f\"Pred token: {predicted_token}\")\n", - " assert predicted_token == \"beijing\"" - ] - }, - { - "cell_type": "markdown", - "id": "atomic-rebel", - "metadata": {}, - "source": [ - "HuggingFace提供了导出ONNX模型的工具,可使用如下方法将HuggingFace的预训练模型导出为ONNX模型:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "corresponding-vampire", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Creating folder exported_bert_base_uncased\n", - "Using framework PyTorch: 1.5.1+cu101\n", - "Found input input_ids with shape: {0: 'batch', 1: 'sequence'}\n", - "Found input token_type_ids with shape: {0: 'batch', 1: 'sequence'}\n", - "Found input attention_mask with shape: {0: 'batch', 1: 'sequence'}\n", - "Found output output_0 with shape: {0: 'batch', 1: 'sequence'}\n", - "Ensuring inputs are in correct order\n", - "position_ids is not present in the generated input list.\n", - "Generated inputs order: ['input_ids', 'attention_mask', 'token_type_ids']\n" - ] - } - ], - "source": [ - "from pathlib import Path\n", - "from transformers.convert_graph_to_onnx import convert\n", - "\n", - "# Exported onnx model path.\n", - "saved_onnx_path = \"./exported_bert_base_uncased/bert_base_uncased.onnx\"\n", - "convert(\"pt\", model, Path(saved_onnx_path), 11, tokenizer)" - ] - }, - { - "cell_type": "markdown", - "id": "adverse-outline", - "metadata": {}, - "source": [ - "根据打印的信息,我们可以看到导出的ONNX模型输入节点有3个:`input_ids`,`token_type_ids`,`attention_mask`,以及相应的输入轴,\n", - "输出节点有一个`output_0`。\n", - "\n", - "至此ONNX模型导出成功,接下来对导出的ONNX模型精度进行验证(ONNX模型导出过程在ARM机器上执行,可能需要用户自行编译安装PyTorch以及Transformers三方库)。" - ] - }, - { - "cell_type": "markdown", - "id": "paperback-playback", - "metadata": {}, - "source": [ - "## ONNX模型验证\n" - ] - }, - { - "cell_type": "markdown", - "id": "mysterious-courage", - "metadata": {}, - "source": [ - "我们仍然使用PyTorch模型推理时的句子`china is a poworful country, its capital is [MASK].`作为输入,观测ONNX模型表现是否符合预期。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "suitable-channels", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ONNX Pred id: 7211\n", - "ONNX Pred token: beijing\n" - ] - } - ], - "source": [ - "import onnx\n", - "import onnxruntime as ort\n", - "\n", - "model = onnx.load(saved_onnx_path)\n", - "sess = ort.InferenceSession(bytes(model.SerializeToString()))\n", - "result = sess.run(\n", - " output_names=None,\n", - " input_feed={\"input_ids\": input_ids, \n", - " \"attention_mask\": attention_mask,\n", - " \"token_type_ids\": token_type_ids}\n", - ")[0]\n", - "predicted_index = np.argmax(result[0][mask_idx])\n", - "predicted_token = tokenizer.convert_ids_to_tokens([predicted_index])[0]\n", - "\n", - "print(f\"ONNX Pred id: {predicted_index}\")\n", - "print(f\"ONNX Pred token: {predicted_token}\")\n", - "assert predicted_token == \"beijing\"" - ] - }, - { - "cell_type": "markdown", - "id": "essential-pharmacology", - "metadata": {}, - "source": [ - "可以看到,导出的ONNX模型功能与原PyTorch模型完全一致,接下来可以使用MindConverter进行脚本和权重迁移了!" - ] - }, - { - "cell_type": "markdown", - "id": "realistic-singapore", - "metadata": {}, - "source": [ - "## MindConverter进行模型脚本和权重迁移" - ] - }, - { - "cell_type": "markdown", - "id": "invisible-tracker", - "metadata": {}, - "source": [ - "MindConverter进行模型转换时,需要给定模型路径(`--model_file`)、输入节点(`--input_nodes`)、输入节点尺寸(`--shape`)、输出节点(`--output_nodes`)。\n", - "\n", - "生成的脚本输出路径(`--output`)、转换报告路径(`--report`)为可选参数,默认为当前路径下的output目录,若输出目录不存在将自动创建。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "processed-spanish", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "MindConverter: conversion is completed.\n", - "\n" - ] - } - ], - "source": [ - "!mindconverter --model_file ./exported_bert_base_uncased/bert_base_uncased.onnx --shape 1,128 1,128 1,128 \\\n", - " --input_nodes input_ids token_type_ids attention_mask \\\n", - " --output_nodes output_0 \\\n", - " --output ./converted_bert_base_uncased \\\n", - " --report ./converted_bert_base_uncased" - ] - }, - { - "cell_type": "markdown", - "id": "working-funeral", - "metadata": {}, - "source": [ - "**看到“MindConverter: conversion is completed.”即代表模型已成功转换!**" - ] - }, - { - "cell_type": "markdown", - "id": "classical-seminar", - "metadata": {}, - "source": [ - "转换完成后,该目录下生成如下文件:\n", - "- 模型定义脚本(后缀为.py)\n", - "- 权重ckpt文件(后缀为.ckpt)\n", - "- 迁移前后权重映射(后缀为.json)\n", - "- 转换报告(后缀为.txt)\n", - "\n", - "通过ls命令检查一下转换结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "equipped-bottom", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "bert_base_uncased.ckpt\treport_of_bert_base_uncased.txt\r\n", - "bert_base_uncased.py\tweight_map_of_bert_base_uncased.json\r\n" - ] - } - ], - "source": [ - "!ls ./converted_bert_base_uncased" - ] - }, - { - "cell_type": "markdown", - "id": "fuzzy-thinking", - "metadata": {}, - "source": [ - "可以看到所有文件已生成。\n", - "\n", - "迁移完成,接下来我们对迁移后模型精度进行验证。" - ] - }, - { - "cell_type": "markdown", - "id": "leading-punch", - "metadata": {}, - "source": [ - "## MindSpore模型验证\n", - "我们仍然使用`china is a poworful country, its capital is [MASK].`作为输入,观测迁移后模型表现是否符合预期。" - ] - }, - { - "cell_type": "markdown", - "id": "competent-dispute", - "metadata": {}, - "source": [ - "由于工具在转换时,需要将模型尺寸冻结,因此在使用MindSpore进行推理验证时,需要将句子补齐(Pad)到固定长度,可通过如下函数实现句子补齐。\n", - "\n", - "推理时,句子长度需小于转换时的最大句长(这里我们最长句子长度为128,即在转换阶段通过`--shape 1,128`指定)。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "essential-football", - "metadata": {}, - "outputs": [], - "source": [ - "def padding(input_ids, attn_mask, token_type_ids, target_len=128):\n", - " length = len(input_ids)\n", - " for i in range(target_len - length):\n", - " input_ids.append(0)\n", - " attn_mask.append(0)\n", - " token_type_ids.append(0)\n", - " return np.array([input_ids]), np.array([attn_mask]), np.array([token_type_ids])" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "greatest-louis", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ONNX Pred id: 7211\n" - ] - } - ], - "source": [ - "from converted_bert_base_uncased.bert_base_uncased import Model as MsBert\n", - "from mindspore import load_checkpoint, load_param_into_net, context, Tensor\n", - "\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "padded_input_ids, padded_attention_mask, padded_token_type = padding(tokenized_sentence[\"input_ids\"], \n", - " tokenized_sentence[\"attention_mask\"], \n", - " tokenized_sentence[\"token_type_ids\"], \n", - " target_len=128)\n", - "padded_input_ids = Tensor(padded_input_ids)\n", - "padded_attention_mask = Tensor(padded_attention_mask)\n", - "padded_token_type = Tensor(padded_token_type)\n", - "\n", - "model = MsBert()\n", - "param_dict = load_checkpoint(\"./converted_bert_base_uncased/bert_base_uncased.ckpt\")\n", - "not_load_params = load_param_into_net(model, param_dict)\n", - "output = model(padded_attention_mask, padded_input_ids, padded_token_type)\n", - "\n", - "assert not not_load_params\n", - "\n", - "predicted_index = np.argmax(output.asnumpy()[0][mask_idx])\n", - "print(f\"ONNX Pred id: {predicted_index}\")\n", - "assert predicted_index == 7211" - ] - }, - { - "cell_type": "markdown", - "id": "hybrid-intranet", - "metadata": {}, - "source": [ - "至此,使用MindConverter进行脚本和权重迁移完成。\n", - "\n", - "用户可根据使用场景编写训练、推理、部署脚本,实现个人业务逻辑。" - ] - }, - { - "cell_type": "markdown", - "id": "minute-sector", - "metadata": {}, - "source": [ - "## 常见问题" - ] - }, - { - "cell_type": "markdown", - "id": "favorite-worse", - "metadata": {}, - "source": [ - "**Q:如何修改迁移后脚本的批次大小(Batch size)、句子长度(Sequence length)等尺寸(shape)规格,以实现模型可支持任意尺寸的数据推理、训练?**\n", - "\n", - "A:迁移后脚本存在shape限制,通常是由于Reshape算子导致,或其他涉及张量排布变化的算子导致。以上述Bert迁移为例,首先创建两个全局变量,表示预期的批次大小、句子长度,而后修改Reshape操作的目标尺寸,替换成相应的批次大小、句子长度的全局变量即可。" - ] - }, - { - "cell_type": "markdown", - "id": "failing-smoke", - "metadata": {}, - "source": [ - "**Q:生成后的脚本中类名的定义不符合开发者的习惯,如`class Module0(nn.Cell)`,人工修改是否会影响转换后的权重加载?**\n", - "\n", - "A:权重的加载仅与变量名、类结构有关,因此类名可以修改,不影响权重加载。若需要调整类的结构,则相应的权重命名需要同步修改以适应迁移后模型的结构。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/migration_guide/source_zh_cn/migration_script.md b/docs/migration_guide/source_zh_cn/migration_script.md deleted file mode 100644 index fe6744180613864ce61556d82d9cf82be1d275b2..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/migration_script.md +++ /dev/null @@ -1,477 +0,0 @@ -# 迁移脚本 - - - -- [迁移脚本](#迁移脚本) - - [概述](#概述) - - [TensorFlow脚本迁移MindSpore](#tensorflow脚本迁移mindspore) - - [PyTorch脚本迁移MindSpore](#pytorch脚本迁移mindspore) - - - - - -## 概述 - -本文档主要介绍,怎样将网络脚本从TensorFlow或PyTorch框架迁移至MindSpore。 - -## TensorFlow脚本迁移MindSpore - -通过读TensorBoard图,进行脚本迁移。 - -1. 以TensorFlow实现的[PoseNet](https://arxiv.org/pdf/1505.07427v4.pdf)为例,演示如何利用TensorBoard读图,编写MindSpore代码,将[TensorFlow模型](https://github.com/kentsommer/tensorflow-posenet)迁移到MindSpore上。 - - > 此处提到的PoseNet代码为基于Python2的代码,需要对Python3做一些语法更改才能在Python3上运行,具体修改内容不予赘述。 - -2. 改写代码,利用`tf.summary`接口,保存TensorBoard需要的log,并启动TensorBoard。 - -3. 打开的TensorBoard如图所示,图例仅供参考,可能因log生成方式的差异,TensorBoard展示的图也有所差异。 - - ![PoseNet TensorBoard](images/pic1.png) - -4. 找到3个输入的Placeholder,通过看图并阅读代码得知,第二、第三个输入都只在计算loss时使用。 - - ![PoseNet Placeholder](images/pic3.png) - - ![PoseNet Placeholder_1 Placeholder_2](images/pic2.png) - - ![PoseNet script input1 2 3](images/pic4.png) - - 至此,我们可以初步划分出,构造网络模型三步: - - 第一步,在网络的三个输入中,第一个输入将在backbone中计算出六个输出; - - 第二步,上一步结果与第二、第三个输入在loss子网中计算loss; - - 第三步,利用`TrainOneStepCell`自动微分构造反向网络;利用TensorFlow工程中提供的Adam优化器及属性,写出对应的MindSpore优化器来更新参数,网络脚本骨干可写作: - - ```python - import mindspore - from mindspore import nn - from mindspore.nn import TrainOneStepCell - from mindspore.nn import Adam - - # combine backbone and loss - class PoseNetLossCell(nn.Cell): - def __init__(self, backbone, loss): - super(PoseNetLossCell, self).__init__() - self.pose_net = backbone - self.loss = loss - def construct(self, input_1, input_2, input_3): - p1_x, p1_q, p2_x, p2_q, p3_x, p3_q = self.poss_net(input_1) - loss = self.loss(p1_x, p1_q, p2_x, p2_q, p3_x, p3_q, input_2, input_3) - return loss - - # define backbone - class PoseNet(nn.Cell): - def __init__(self): - super(PoseNet, self).__init__() - def construct(self, input_1): - """do something with input_1, output num 6""" - return p1_x, p1_q, p2_x, p2_q, p3_x, p3_q - - # define loss - class PoseNetLoss(nn.Cell): - def __init__(self): - super(PoseNetLoss, self).__init__() - - def construct(self, p1_x, p1_q, p2_x, p2_q, p3_x, p3_q, poses_x, poses_q): - """do something to calc loss""" - return loss - - # define network - backbone = PoseNet() - loss = PoseNetLoss() - net_with_loss = PoseNetLossCell(backbone, loss) - opt = Adam(net_with_loss.trainable_params(), learning_rate=0.001, beta1=0.9, beta2=0.999, eps=1e-08, use_locking=False) - net_with_grad = TrainOneStepCell(net_with_loss, opt) - - - ``` - -5. 接下来,我们来具体实现backbone中的计算逻辑。 - - 第一个输入首先经过了一个名为conv1的子图,通过看图可得,其中计算逻辑为: - - ![PoseNet conv1 子图](images/pic5.png) - - 输入->Conv2D->BiasAdd->ReLU,虽然图上看起来,BiasAdd后的算子名虽然为conv1,但其实际执行的是ReLU。 - - ![PoseNet Conv1 conv1 relu](images/pic6.png) - - 这样一来,第一个子图conv1,可以定义如下,具体参数,与原工程中的参数对齐: - - ```python - class Conv1(nn.Cell): - def __init__(self): - super(Conv1, self).__init__() - self.conv = Conv2d() - self.relu = ReLU() - def construct(self, x): - x = self.conv(x) - x = self.relu(x) - return x - ``` - - 通过观察TensorBoard图和代码,我们不难发现,原TensorFlow工程中定义的conv这一类型的子网,可以复写为MindSpore的子网,减少重复代码。 - - TensorFlow工程conv子网定义: - - ```python - def conv(self, - input, - k_h, - k_w, - c_o, - s_h, - s_w, - name, - relu=True, - padding=DEFAULT_PADDING, - group=1, - biased=True): - # Verify that the padding is acceptable - self.validate_padding(padding) - # Get the number of channels in the input - c_i = input.get_shape()[-1] - # Verify that the grouping parameter is valid - assert c_i % group == 0 - assert c_o % group == 0 - # Convolution for a given input and kernel - convolve = lambda i, k: tf.nn.conv2d(i, k, [1, s_h, s_w, 1], padding=padding) - with tf.variable_scope(name) as scope: - kernel = self.make_var('weights', shape=[k_h, k_w, c_i / group, c_o]) - if group == 1: - # This is the common-case. Convolve the input without any further complications. - output = convolve(input, kernel) - else: - # Split the input into groups and then convolve each of them independently - input_groups = tf.split(3, group, input) - kernel_groups = tf.split(3, group, kernel) - output_groups = [convolve(i, k) for i, k in zip(input_groups, kernel_groups)] - # Concatenate the groups - output = tf.concat(3, output_groups) - # Add the biases - if biased: - biases = self.make_var('biases', [c_o]) - output = tf.nn.bias_add(output, biases) - if relu: - # ReLU non-linearity - output = tf.nn.relu(output, name=scope.name) - return output - ``` - - 则对应MindSpore子网定义如下: - - ```python - from mindspore import nn - from mindspore.nn import Conv2d, ReLU - - class ConvReLU(nn.Cell): - def __init__(self, channel_in, kernel_size, channel_out, strides): - super(ConvReLU, self).__init__() - self.conv = Conv2d(channel_in, channel_out, kernel_size, strides, has_bias=True) - self.relu = ReLU() - - def construct(self, x): - x = self.conv(x) - x = self.relu(x) - return x - ``` - - 那么,对照着TensorBoard中的数据流向与算子属性,backbone计算逻辑可编写如下: - - ```python - from mindspore.nn import MaxPool2d - import mindspore.ops as ops - - - class LRN(nn.Cell): - def __init__(self, radius, alpha, beta, bias=1.0): - super(LRN, self).__init__() - self.lrn = ops.LRN(radius, bias, alpha, beta) - def construct(self, x): - return self.lrn(x) - - - class PoseNet(nn.Cell): - def __init__(self): - super(PoseNet, self).__init__() - self.conv1 = ConvReLU(3, 7, 64, 2) - self.pool1 = MaxPool2d(3, 2, pad_mode="SAME") - self.norm1 = LRN(2, 2e-05, 0.75) - self.reduction2 = ConvReLU(64, 1, 64, 1) - self.conv2 = ConvReLU(64, 3, 192, 1) - self.norm2 = LRN(2, 2e-05, 0.75) - self.pool2 = MaxPool2d(3, 2, pad_mode="SAME") - self.icp1_reduction1 = ConvReLU(192, 1, 96, 1) - self.icp1_out1 = ConvReLU(96, 3, 128, 1) - self.icp1_reduction2 = ConvReLU(192, 1, 16, 1) - self.icp1_out2 = ConvReLU(16, 5, 32, 1) - self.icp1_pool = MaxPool2d(3, 1, pad_mode="SAME") - self.icp1_out3 = ConvReLU(192, 5, 32, 1) - self.icp1_out0 = ConvReLU(192, 1, 64, 1) - self.concat = ops.Concat(axis=1) - self.icp2_reduction1 = ConvReLU(256, 1, 128, 1) - self.icp2_out1 = ConvReLU(128, 3, 192, 1) - self.icp2_reduction2 = ConvReLU(256, 1, 32, 1) - self.icp2_out2 = ConvReLU(32, 5, 96, 1) - self.icp2_pool = MaxPool2d(3, 1, pad_mode="SAME") - self.icp2_out3 = ConvReLU(256, 1, 64, 1) - self.icp2_out0 = ConvReLU(256, 1, 128, 1) - self.icp3_in = MaxPool2d(3, 2, pad_mode="SAME") - self.icp3_reduction1 = ConvReLU(480, 1, 96, 1) - self.icp3_out1 = ConvReLU(96, 3, 208, 1) - self.icp3_reduction2 = ConvReLU(480, 1, 16, 1) - self.icp3_out2 = ConvReLU(16, 5, 48, 1) - self.icp3_pool = MaxPool2d(3, 1, pad_mode="SAME") - self.icp3_out3 = ConvReLU(480, 1, 64, 1) - self.icp3_out0 = ConvReLU(480, 1, 192, 1) - """etc""" - """...""" - - def construct(self, input_1): - """do something with input_1, output num 6""" - x = self.conv1(input_1) - x = self.pool1(x) - x = self.norm1(x) - x = self.reduction2(x) - x = self.conv2(x) - x = self.norm2(x) - x = self.pool2(x) - pool2 = x - - x = self.icp1_reduction1(x) - x = self.icp1_out1(x) - icp1_out1 = x - - icp1_reduction2 = self.icp1_reduction2(pool2) - icp1_out2 = self.icp1_out2(icp1_reduction2) - - icp1_pool = self.icp1_pool(pool2) - icp1_out3 = self.icp1_out3(icp1_pool) - - icp1_out0 = self.icp1_out0(pool2) - - icp2_in = self.concat((icp1_out0, icp1_out1, icp1_out2, icp1_out3)) - """etc""" - """...""" - - return p1_x, p1_q, p2_x, p2_q, p3_x, p3_q - ``` - - 相应的,loss计算逻辑可编写如下: - - ```python - class PoseNetLoss(nn.Cell): - def __init__(self): - super(PoseNetLoss, self).__init__() - self.sub = ops.Sub() - self.square = ops.Square() - self.reduce_sum = ops.ReduceSum() - self.sqrt = ops.Sqrt() - - def construct(self, p1_x, p1_q, p2_x, p2_q, p3_x, p3_q, poses_x, poses_q): - """do something to calc loss""" - l1_x = self.sqrt(self.reduce_sum(self.square(self.sub(p1_x, poses_x)))) * 0.3 - l1_q = self.sqrt(self.reduce_sum(self.square(self.sub(p1_q, poses_q)))) * 150 - l2_x = self.sqrt(self.reduce_sum(self.square(self.sub(p2_x, poses_x)))) * 0.3 - l2_q = self.sqrt(self.reduce_sum(self.square(self.sub(p2_q, poses_q)))) * 150 - l3_x = self.sqrt(self.reduce_sum(self.square(self.sub(p3_x, poses_x)))) * 1 - l3_q = self.sqrt(self.reduce_sum(self.square(self.sub(p3_q, poses_q)))) * 500 - return l1_x + l1_q + l2_x + l2_q + l3_x + l3_q - ``` - - 最终,你的训练脚本应该类似如下所示: - - ```python - if __name__ == "__main__": - backbone = PoseNet() - loss = PoseNetLoss() - net_with_loss = PoseNetLossCell(backbone, loss) - opt = Adam(net_with_loss.trainable_params(), learning_rate=0.001, beta1=0.9, beta2=0.999, eps=1e-08, use_locking=False) - net_with_grad = TrainOneStepCell(net_with_loss, opt) - """dataset define""" - model = Model(net_with_grad) - model.train(epoch_size, dataset) - ``` - - 这样,就基本完成了模型脚本从TensorFlow到MindSpore的迁移,接下来就是利用丰富的MindSpore工具和计算策略,对精度进行调优,在此不予详述。 - -## PyTorch脚本迁移MindSpore - -通过读PyTorch脚本,直接进行迁移。 - -1. PyTorch子网模块通常继承`torch.nn.Module`,MindSpore通常继承`mindspore.nn.Cell`;PyTorch子网模块正向计算逻辑需要重写forward方法,MindSpore子网模块正向计算逻辑需要重写construct方法。 - -2. 以常见的Bottleneck类在MindSpore下的迁移为例。 - - PyTorch工程代码 - - ```python - # defined in PyTorch - class Bottleneck(nn.Module): - def __init__(self, inplanes, planes, stride=1, mode='NORM', k=1, dilation=1): - super(Bottleneck, self).__init__() - self.mode = mode - self.relu = nn.ReLU(inplace=True) - self.k = k - - btnk_ch = planes // 4 - self.bn1 = nn.BatchNorm2d(inplanes) - self.conv1 = nn.Conv2d(inplanes, btnk_ch, kernel_size=1, bias=False) - - self.bn2 = nn.BatchNorm2d(btnk_ch) - self.conv2 = nn.Conv2d(btnk_ch, btnk_ch, kernel_size=3, stride=stride, padding=dilation, - dilation=dilation, bias=False) - - self.bn3 = nn.BatchNorm2d(btnk_ch) - self.conv3 = nn.Conv2d(btnk_ch, planes, kernel_size=1, bias=False) - - if mode == 'UP': - self.shortcut = None - elif inplanes != planes or stride > 1: - self.shortcut = nn.Sequential( - nn.BatchNorm2d(inplanes), - self.relu, - nn.Conv2d(inplanes, planes, kernel_size=1, stride=stride, bias=False) - ) - else: - self.shortcut = None - - def _pre_act_forward(self, x): - residual = x - - out = self.bn1(x) - out = self.relu(out) - out = self.conv1(out) - - out = self.bn2(out) - out = self.relu(out) - out = self.conv2(out) - - out = self.bn3(out) - out = self.relu(out) - out = self.conv3(out) - - if self.mode == 'UP': - residual = self.squeeze_idt(x) - elif self.shortcut is not None: - residual = self.shortcut(residual) - - out += residual - - return out - - def squeeze_idt(self, idt): - n, c, h, w = idt.size() - return idt.view(n, c // self.k, self.k, h, w).sum(2) - - def forward(self, x): - out = self._pre_act_forward(x) - return out - - ``` - - 根据PyTorch和MindSpore对卷积参数定义的区别,可以翻译成如下定义: - - ```python - from mindspore import nn - import mindspore.ops as ops - - # defined in MindSpore - class Bottleneck(nn.Cell): - def __init__(self, inplanes, planes, stride=1, k=1, dilation=1): - super(Bottleneck, self).__init__() - self.mode = mode - self.relu = nn.ReLU() - self.k = k - - btnk_ch = planes // 4 - self.bn1 = nn.BatchNorm2d(num_features=inplanes, momentum=0.9) - self.conv1 = nn.Conv2d(in_channels=inplanes, out_channels=btnk_ch, kernel_size=1, pad_mode='pad', has_bias=False) - - self.bn2 = nn.BatchNorm2d(num_features=btnk_ch, momentum=0.9) - self.conv2 = nn.Conv2d(in_channels=btnk_ch, out_channels=btnk_ch, kernel_size=3, stride=stride, pad_mode='pad', padding=dilation, dilation=dilation, has_bias=False) - - self.bn3 = nn.BatchNorm2d(num_features=btnk_ch, momentum=0.9) - self.conv3 = nn.Conv2d(in_channels=btnk_ch, out_channels=planes, kernel_size=1, pad_mode='pad', has_bias=False) - - self.shape = ops.Shape() - self.reshape = ops.Reshape() - self.reduce_sum = ops.ReduceSum() - - if mode == 'UP': - self.shortcut = None - elif inplanes != planes or stride > 1: - self.shortcut = nn.SequentialCell([ - nn.BatchNorm2d(num_features=inplanes, momentum=0.9), - nn.ReLU(), - nn.Conv2d(in_channels=inplanes, out_channels=planes, kernel_size=1, stride=stride, pad_mode='pad', has_bias=False) - ]) - else: - self.shortcut = None - - def _pre_act_forward(self, x): - residual = x - - out = self.bn1(x) - out = self.relu(out) - out = self.conv1(out) - - out = self.bn2(out) - out = self.relu(out) - out = self.conv2(out) - - out = self.bn3(out) - out = self.relu(out) - out = self.conv3(out) - - if self.shortcut is not None: - residual = self.shortcut(residual) - - out += residual - return out - - def construct(self, x): - out = self._pre_act_forward(x) - return out - ``` - -3. PyTorch的反向传播通常使用`loss.backward()`实现,参数更新通过`optimizer.step()`实现,在MindSpore中,这些不需要用户显式调用执行,可以交给`TrainOneStepCell`类进行反向传播和梯度更新。最后,训练脚本结构应如下所示: - - ```python - # define dataset - dataset = ... - - # define backbone and loss - backbone = Net() - loss = NetLoss() - - # combine backbone and loss - net_with_loss = WithLossCell(backbone, loss) - - # define optimizer - opt = ... - - # combine forward and backward - net_with_grad = TrainOneStepCell(net_with_loss, opt) - - # define model and train - model = Model(net_with_grad) - model.train(epoch_size, dataset) - ``` - -PyTorch和MindSpore在一些基础API的定义上比较相似,比如[mindspore.nn.SequentialCell](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn/mindspore.nn.SequentialCell.html#mindspore.nn.SequentialCell)和[torch.nn.Sequential](https://pytorch.org/docs/stable/generated/torch.nn.Sequential.html#torch.nn.Sequential),另外,一些算子API可能不尽相同,此处列举一些常见的API对照,更多信息可以参考MindSpore官网的[MindSpore与PyTorch对照表](https://www.mindspore.cn/doc/note/zh-CN/master/index.html#operator_api)。 - -| PyTorch | MindSpore | -| :-------------------------------: | :------------------------------------------------: | -| tensor.view() | mindspore.ops.operations.Reshape()(tensor) | -| tensor.size() | mindspore.ops.operations.Shape()(tensor) | -| tensor.sum(axis) | mindspore.ops.operations.ReduceSum()(tensor, axis) | -| torch.nn.Upsample[mode: nearest] | mindspore.ops.operations.ResizeNearestNeighbor | -| torch.nn.Upsample[mode: bilinear] | mindspore.ops.operations.ResizeBilinear | -| torch.nn.Linear | mindspore.nn.Dense | -| torch.nn.PixelShuffle | mindspore.ops.operations.DepthToSpace | - -值得注意的是,尽管`torch.nn.MaxPool2d`和`mindspore.nn.MaxPool2d`在接口定义上较为相似,但在Ascend上的训练过程中,MindSpore实际调用了`MaxPoolWithArgMax`算子,该算子与TensorFlow的同名算子功能相同,在迁移过程中MaxPool层后的输出MindSpore与PyTorch不一致是正常现象,理论上不影响最终训练结果。 diff --git a/docs/migration_guide/source_zh_cn/neural_network_debug.md b/docs/migration_guide/source_zh_cn/neural_network_debug.md deleted file mode 100644 index 8060599665befbfee933f6077323d8c706c7458d..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/neural_network_debug.md +++ /dev/null @@ -1,249 +0,0 @@ -# 网络调试 - - - -- [网络调试](#网络调试) - - [网络调试的基本流程](#网络调试的基本流程) - - [网络调试中的常用方法](#网络调试中的常用方法) - - [流程调试](#流程调试) - - [用PyNative模式进行流程调试](#用pynative模式进行流程调试) - - [获取更多报错信息](#获取更多报错信息) - - [常见错误](#常见错误) - - [loss值对比检查](#loss值对比检查) - - [主要步骤](#主要步骤) - - [相关问题定位](#相关问题定位) - - [精度调试工具](#精度调试工具) - - [自定义调试信息](#自定义调试信息) - - [使用MindOptimizer进行超参调优](#使用mindoptimizer进行超参调优) - - [loss值异常定位](#loss值异常定位) - - - - - -本章将介绍网络调试的基本思路、常用工具,以及一些常见问题处理。 - -## 网络调试的基本流程 - -网络调试的过程主要分为以下几个步骤: - -1. 网络流程调试成功,网络执行整体不报错,正确输出loss值,且正常完成参数更新。 - - 一般情况下,使用`model.train`接口完整执行一个step并且不报错,即正常执行并完成了参数更新;如果需要精确确认,可以通过`mindspore.train.callback.CheckpointConfig`中的参数`save_checkpoint_steps=1`保存连续两个step的Checkpoint文件,或者使用`save_checkpoint`接口直接保存Checkpoint文件,然后通过以下代码打印Checkpoint文件中的权重值,查看两个step的权重是否发生改变,并完成更新。 - - ```python - import mindspore - import numpy as np - ckpt = mindspore.load_checkpoint(ckpt_path) - for param in ckpt: - value = ckpt[param].data.asnumpy() - print(value) - ``` - -2. 网络多轮迭代执行输出loss值,且loss值具有基本的收敛趋势。 - -3. 网络精度调试,超参调优。 - -## 网络调试中的常用方法 - -### 流程调试 - -本节内容介绍脚本开发基本完成后,网络流程调试过程中可能出现的问题和解决方法。 - -#### 用PyNative模式进行流程调试 - -在脚本开发和网络流程调试中,我们推荐使用PyNative模式进行调试。PyNative模式支持执行单算子、普通函数和网络,以及单独求梯度的操作。在PyNative模式下,可以方便地设置断点,获取网络执行的中间结果,也可以通过pdb的方式对网络进行调试。 - -在默认情况下,MindSpore处于PyNative模式,也可以通过`context.set_context(mode=context.PYNATIVE_MODE)`进行显式定义,相关示例可参考[使用PyNative模式调试](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/debug_in_pynative_mode.html#pynative)。 - -#### 获取更多报错信息 - -在网络流程调试过程中,如果需要获取更多的报错信息,可通过以下方式获得: - -- 在PyNative模式下可使用pdb进行调试,利用pdb打印相关堆栈和上下文信息帮助问题定位。 -- 使用Print算子打印更多上下文信息,具体示例可参考[Print算子功能介绍](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#print)。 -- 调整日志级别获取更多报错信息,MindSpore可通过环境变量方便地调整日志级别,具体可参考[日志相关的环境变量和配置](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#id6)。 - -#### 常见错误 - -在网络流程调试中,常见的错误有以下几类: - -- 算子执行报错 - - 网络流程调试过程中,常出现shape不匹配、dtype不支持等算子执行报错,此时应根据报错信息检查是否正确使用算子,以及算子输入数据的shape是否与预期相符,并进行相应修改。 - - 相关算子支持和API介绍可参考[算子支持列表](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/operator_list.html)和[算子Python API](https://www.mindspore.cn/doc/api_python/zh-CN/master/index.html)。 - -- 相同脚本,在PyNative模式下能跑通,但Graph模式下报错 - - MindSpore的Graph模式下,`construct`函数中的代码由MindSpore框架进行解析,有一些Python语法还未支持,因此导致报错。此时应当根据报错信息按照[MindSpore的语法说明](https://www.mindspore.cn/doc/note/zh-CN/master/static_graph_syntax_support.html)修改相关代码。 - -- 分布式并行训练脚本配置错误 - - 分布式并行训练脚本及环境配置可参考[分布式并行训练教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_tutorials.html)。 - -### loss值对比检查 - -在具有对标脚本的情况下,可对比对标脚本跑出的loss值与MindSpore脚本跑出的loss值,以验证整体网络结构和算子精度的正确性。 - -#### 主要步骤 - -1. 保证输入相同 - - 需保证两个网络中输入相同,使得在相同的网络结构下,网络输出相同。可使用以下几种方式保证输入相同: - - - 使用numpy自行构造输入数据,保证网络输入相同,MindSpore支持Tensor和numpy的自由转换。构造输入数据可以参考以下脚本: - - ```python - input = Tensor(np.random.randint(0, 10, size=(3, 5, 10)).astype(np.float32)) - ``` - - - 使用相同数据集进行计算,MindSpore支持使用TFRecord数据集,可使用`mindspore.dataset.TFRecordDataset`接口读取。 - -2. 去除网络中随机性因素的影响 - - 去除网络中的随机性影响,主要方法有设置相同的随机性种子,关掉数据shuffle,修改代码去除dropout、initializer等网络中具有随机性的算子的影响等。 - -3. 保证相关超参数的设置相同 - - 需保证网络中的超参数设置相同,以保证相同的输入,算子的输出相同。 - -4. 运行网络,比较输出的loss值,一般loss值误差在1‰左右,因为算子本身存在一定精度误差,随着step数增大,误差会有一定累加。 - -#### 相关问题定位 - -如果loss值误差较大,可使用以下几种方式进行问题定位: - -- 检查输入、超参设置是否相同,以及是否完全去除了随机性影响。 - - 同一脚本多次重跑,loss值相差较大,则说明没有完全去除网络中的随机性影响。 - -- 整体判断。 - - 如果第一轮loss值就出现较大误差,则说明网络的前向计算就存在问题。 - - 如果第一轮loss值在误差范围内,第二轮开始loss值出现较大误差,则说明网络的前向计算应该没有问题,反向梯度和权重更新计算可能存在问题。 - -- 有了整体的判断之后,由粗到细进行输入输出数值的精度对比。 - - 首先,对各个子网从输入开始逐层对比输入输出值,确定初始出现问题的子网。 - - 然后,对比子网中的网络结构以及算子的输入输出,找到出现问题的网络结构或算子,进行修改。 - - 如果在此过程中发现了算子精度存在问题,可在[MindSpore代码托管平台](https://gitee.com/mindspore/mindspore)上提issue,相关人员将对问题进行跟踪处理。 - -- MindSpore提供了丰富的工具获取网络中间数据,可根据实际情况选用。 - - - [数据Dump功能](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#dump) - - [使用Print算子打印相关信息](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#print) - - [使用可视化组件MindInsight](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/visualization_tutorials.html) - -### 精度调试工具 - -#### 自定义调试信息 - -- [Callback功能](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#callback) - - MindSpore已提供ModelCheckpoint、LossMonitor、SummaryCollector等Callback类用于保存模型参数、监控loss值、保存训练过程信息等功能,用户也可自定义Callback函数用于实现在每个epoch和step的开始和结束运行相关功能,具体示例可参考[自定义Callback](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#id3)。 - -- [MindSpore metrics功能](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#mindspore-metrics) - - 当训练结束后,可以使用metrics评估训练结果的好坏。MindSpore提供了多种metrics评估指标,如:`accuracy`、`loss`、`precision`、`recall`、`F1`等。 - -- [边训练边推理](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/evaluate_the_model_during_training.html) - - 可通过定义推理的CallBack函数的方式在训练时进行推理。 - -- [自定义训练循环](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/train.html#%E8%87%AA%E5%AE%9A%E4%B9%89%E8%AE%AD%E7%BB%83%E5%BE%AA%E7%8E%AF) - -- 自定义学习率 - - MindSpore提供了一些常见的动态学习率实现以及一些常见的具有自适应学习率调整功能的优化器,可参考API文档中的[Dynamic Learning Rate](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.nn.html#dynamic-learning-rate)和[Optimizer Functions](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.nn.html#optimizer-functions)。 - - 同时,用户可实现自定义的动态学习率,以WarmUpLR为例: - - ```python - class WarmUpLR(LearningRateSchedule): - def __init__(self, learning_rate, warmup_steps): - super(WarmUpLR, self).__init__() - ## check the input - if not isinstance(learning_rate, float): - raise TypeError("learning_rate must be float.") - validator.check_non_negative_float(learning_rate, "learning_rate", self.cls_name) - validator.check_positive_int(warmup_steps, 'warmup_steps', self.cls_name) - ## define the operators - self.warmup_steps = warmup_steps - self.learning_rate = learning_rate - self.min = ops.Minimum() - self.cast = ops.Cast() - - def construct(self, global_step): - ## calculate the lr - warmup_percent = self.cast(self.min(global_step, self.warmup_steps), mstype.float32)/ self.warmup_steps - return self.learning_rate * warmup_percent - ``` - -#### 使用MindOptimizer进行超参调优 - -MindSpore提供了MindOptimizer工具帮助用户进行更便捷的超参调优,详细示例和使用方法可参考[使用MindOptimizer进行超参调优](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/hyper_parameters_auto_tuning.html)。 - -#### loss值异常定位 - -对loss值为INF、NAN,或者loss值不收敛的情况,可从以下几种情况进行排查: - -1. 检查loss_scale溢出。 - - 在混合精度使用loss_scale的场景下,出现loss值为INF、NAN的情况,可能是scale值过大造成的,如果为动态loss_scale,则会自动调整scale值;如果为静态loss_scale,则需要减小scale值。 - - 如果`scale=1`的情况下依旧存在loss值为INF、NAN的情况,则网络中应该有算子出现溢出,需要进行进一步定位。 - -2. 造成loss值异常的原因可能由输入数据异常、算子溢出、梯度消失、梯度爆炸等原因造成。 - - 排查算子溢出、梯度为0、权重异常、梯度消失和梯度爆炸等网络中间值情况,推荐使用[MindInsight调试器](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/debugger.html)设置相应检测点进行检测和调试,这种方式可较为全面地进行问题定位,可调试性也比较强。 - - 下面介绍几种简单的初步排查方法: - - - 观察权重值是否出现梯度爆炸的情况,也可通过加载保存的Checkpoint文件,打印权重值进行初步判断,打印权重值可参考以下代码: - - ```python - import mindspore - import numpy as np - ckpt = mindspore.load_checkpoint(ckpt_path) - for param in ckpt: - value = ckpt[param].data.asnumpy() - print(value) - ``` - - - 查看是否出现梯度为0的情况,也可以通过对比不同step保存的Checkpoint文件的权重值是否发生变化,进行初步判断,Checkpoint文件的权重值对比可参考以下代码: - - ```python - import mindspore - import numpy as np - ckpt1 = mindspore.load_checkpoint(ckpt1_path) - ckpt2 = mindspore.load_checkpoint(ckpt2_path) - sum = 0 - same = 0 - for param1,param2 in zip(ckpt1,ckpt2): - sum = sum + 1 - value1 = ckpt[param1].data.asnumpy() - value2 = ckpt[param2].data.asnumpy() - if value1 == value2: - print('same value: ', param1, value1) - same = same + 1 - print('All params num: ', sum) - print('same params num: ', same) - ``` - - - 查看权重值中是否出现NAN、INF异常数据,也可通过加载Checkpoint文件进行简单判断,一般来说,权重值中出现NAN、INF,则梯度计算中也出现了NAN、INF,可能有溢出情况发生,相关代码可参考: - - ```python - import mindspore - import numpy as np - ckpt = mindspore.load_checkpoint(ckpt_path) - for param in ckpt: - value = ckpt[param].data.asnumpy() - if np.isnan(value): - print('NAN value:', value) - if np.isinf(value): - print('INF value:', value) - ``` diff --git a/docs/migration_guide/source_zh_cn/overview.md b/docs/migration_guide/source_zh_cn/overview.md deleted file mode 100644 index 4f4163686dc6e52393603d04b4f97f1918dfe8ac..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/overview.md +++ /dev/null @@ -1,60 +0,0 @@ -# 概述 - - - -- [概述](#概述) - - [准备工作](#准备工作) - - [网络脚本分析](#网络脚本分析) - - [网络脚本开发](#网络脚本开发) - - [算子开发&调试](#算子开发调试) - - [网络调试](#网络调试) - - [精度&性能调优](#精度性能调优) - - [推理执行](#推理执行) - - [网络迁移调试实例](#网络迁移调试实例) - - [常见问题](#常见问题) - - - - - -本迁移指导包含从其他机器学习框架将神经网络迁移到MindSpore的完整步骤。 - -迁移流程的准备工作从必须的环境配置开始,然后分析网络脚本所包含的算子。MindSpore脚本开发会从数据处理代码开始,使用MindConverter进行网络构建得到迁移后的网络脚本,最后迁移推理执行脚本。构建完成后的优化过程包含缺失算子的开发与调试、网络性能与精度调优。迁移指导附迁移流程常见问题的解决方法和网络迁移完整样例。每章会提供具体样例供参考。具体迁移流程见下图: - -![flowchart](images/flowchart.PNG) - -## 准备工作 - -网络迁移首先需要安装最新版本的MindSpore,本章会详细描述安装过程与知识准备。知识准备包括对MindSpore组件ModelZoo和Hub的基本介绍,包含用途、场景与使用方法。此外还有云上训练的相关教程:使用ModelArts适配脚本,在OBS上传数据集,然后进行线上训练。 - -## 网络脚本分析 - -使用MindSpore进行网络脚本之前必须对原框架的脚本进行分析,这一章首先会介绍在MindSpore框架下评估算子方法,常见方法包括查询MindSpore与PyTorch/TensorFlow算子映射表等。需要分析的另一部分为语法评估与框架功能,包括静态图语法支持。 - -## 网络脚本开发 - -在网络脚本分析完成后就可以使用MindSpore对新网络进行开发了。这章从网络输入层的数据处理功能开始,重点介绍网络脚本构建流程:使用MindConverter或手写映射脚本,还可以借助PyTorch/TensorFlow自带的可视化工具构图。流程包含开发的最佳实践方法和常用自验项目,并列出了常见问题处理方法。 - -## 算子开发&调试 - -部分网络在迁移到MindSpore框架时会存在不支持的算子,除了向MindSpore开发者社区反馈之外,用户可以选择开发自定义MindSpore算子。本章包括进行算子开发的教程与示例,还有常见的调试技巧。 - -## 网络调试 - -在网络脚本开发完成并补齐算子后,需要对模型进行调试保证输出结果正确。这章介绍了常用的网络调试思路:单Step与多轮迭代调试。常用的调试方法包括使用PyNative模式比对子网输出结果,MindSpore也支持自定义调试信息。最后会提供常见问题的处理方法。 - -## 精度&性能调优 - -在网络脚本调试完成并可以顺利输出结果之后,我们需要对模型进行调试达到预期性能。MindSpore为开发者提供了Profiler性能调试工具,在算子性能、迭代性能、数据处理性能等方面提供了易用、丰富的调试功能,帮助用户快速定位、解决性能问题。调试教程分为Ascend平台与GPU平台调优,并提供了三个使用Profiler工具的例子。 - -## 推理执行 - -MindSpore可以基于训练好的模型,在不同的硬件平台上执行推理任务,还可以基于MindSpore Serving部署在线推理服务。基于模型的推理可以通过训练参数文件或者网络模型文件进行推理,基于MindSpore Serving的在线推理可以通过gRPC或者RESTful等接口访问推理服务。 - -## 网络迁移调试实例 - -本章包含了一个完整的网络迁移样例,使用ResNet50为例子,从对标网络的分析与复现开始,详细说明脚本开发与精度调试调优等步骤,最后列出了迁移过程中的常见问题与相应优化方法,如多机同步问题、框架性能问题等等。 - -## 常见问题 - -我们会在这里列出整个网络迁移过程的常见问题与相应解决方法。 diff --git a/docs/migration_guide/source_zh_cn/performance_optimization.md b/docs/migration_guide/source_zh_cn/performance_optimization.md deleted file mode 100644 index 12891cd17bc16f0cd4ccc30f0c98ed281ffd05db..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/performance_optimization.md +++ /dev/null @@ -1,92 +0,0 @@ -# 性能调试 - - - -- [性能调试](#性能调试) - - [快速入门](#快速入门) - - [案例一:迭代间隙过长](#案例一迭代间隙过长) - - [案例二:前后向运行时间长(算子计算时间过长)](#案例二前后向运行时间长算子计算时间过长) - - [案例三:前后向运行时间长(算子执行间隙过长)](#案例三前后向运行时间长算子执行间隙过长) - - [常见问题](#常见问题) - - [启动失败](#启动失败) - - - - - -Profiler为MindSpore提供了性能调优能力,在算子性能、迭代性能、数据处理性能等方面提供了易用、丰富的调试功能,帮助用户快速定位、解决性能问题。 - -本章将介绍性能调优的常见方法及案例,以及一些常见问题的处理。 - -## 快速入门 - -Profiler的功能介绍及使用说明请参见教程: - -[性能调试(Ascend)](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/performance_profiling_ascend.html) - -[性能调试(GPU)](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/performance_profiling_gpu.html) - -本节将通过三个典型案例介绍Profiler工具的常见使用方式。 - -### 案例一:迭代间隙过长 - -如您在MindInsight性能分析页面观察到,迭代轨迹中的迭代间隙过长,这通常可能说明数据处理过程中存在可以优化的性能点。 - -![long_step_interval](images/profiler_case1_long_step_interval.png) - -图1: 迭代轨迹中的迭代间隙过长 - -查看网页下方的数据处理详情卡片,我们观察到,主机队列为满的情况比例较少,可以初步判定是数据处理阶段存在性能问题。进入数据准备详情页查看具体问题。 - -![dataset_process_step_interval](images/profiler_case1_dataset_process_step_interval.png) - -图2:数据准备详情页面——迭代间隙 - -![data_processing](images/profiler_case1_data_processing.png) - -图3:数据准备详情页面——数据处理 - -通过观察数据处理的```算子间队列关系```,我们发现,Queue_3和Queue_2的使用比率较低,因此,可以判定是对应的数据处理算子(```ShuffleOp_3```和```BatchOp_2```)的性能还有优化空间。您可以根据这些信息调整训练脚本。 - -您也可以参考首页左侧小助手提供的建议信息,对训练脚本进行优化。 - -![profiler_helper](images/profiler_case1_helper.png) - -图4:小助手 - -### 案例二:前后向运行时间长(算子计算时间过长) - -当您发现迭代运行时间过长时,可以首先查看迭代轨迹,观察各部分的时间分布是否正常。 - -![long_fp_bp](images/profiler_case2_long_fpbp.png) - -图5:迭代轨迹中,前向后向运行时间过长 - -从上图的迭代轨迹中,我们发现前向和后向的运行时间偏长。打开算子耗时统计详情页面,进一步确定是否存在耗时过高的算子,判断算子执行时间上是否有优化空间。 - -![operator_details](images/profiler_case2_operator_details.png) - -图6:通过算子耗时详情页面寻找可优化算子 - -### 案例三:前后向运行时间长(算子执行间隙过长) - -在案例二中,我们介绍了由于算子执行时间较长导致迭代运行时间长的情况。除此之外,算子与算子间执行的时间间隙过大也会造成运行时间过长。 - -要确认算子的执行是否存在间隙过大的情况,我们可以观察时间线数据。 - -首先,在主页面右下角的时间线卡片点击`下载`按钮,对时间线数据进行下载。下载完成后,在谷歌浏览器中打开`chrome://tracing`,将文件上传或拖入网页中进行数据加载。 - -![timeline](images/profiler_case3_timeline.png) - -图7:通过时间线数据寻找可优化的算子执行间隙 - -在发现算子间执行存在较大间隙时,通常是与集合通信或AICPU算子产生的依赖还未解除,您可以调整脚本对该部分进行优化,进一步提升训练性能。 - -## 常见问题 - -### 启动失败 - -如您遇到启动失败的报错,请排查是否遇到了以下情况: - -- 系统内存已无可用空间或剩余可用空间过小。 -- MindSpore版本和昇腾AI处理器软件配套包版本不匹配。 diff --git a/docs/migration_guide/source_zh_cn/preparation.md b/docs/migration_guide/source_zh_cn/preparation.md deleted file mode 100644 index 1f2cb23332f38d97e58b21701e5cf092fb5d0f6a..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/preparation.md +++ /dev/null @@ -1,135 +0,0 @@ -# 准备工作 - - - -- [准备工作](#准备工作) - - [概述](#概述) - - [安装MindSpore](#安装mindspore) - - [使用pip安装](#使用pip安装) - - [使用源码安装](#使用源码安装) - - [设置环境变量(仅用于Ascend环境)](#设置环境变量仅用于ascend环境) - - [MindSpore验证](#mindspore验证) - - [知识准备](#知识准备) - - [MindSpore编程指南](#mindspore编程指南) - - [ModelZoo和Hub](#modelzoo和hub) - - [云上训练](#云上训练) - - - - - -## 概述 - -在进行网络开发或网络迁移工作之前,首先需要安装MindSpore,并掌握机器学习的相关知识。用户可以选择购买《深度学习与MindSpore实践》一书来了解相关知识,通过访问[MindSpore官网](https://www.mindspore.cn)了解MindSpore的用法。 - -## 安装MindSpore - -MindSpore支持在Ascend、CPU、GPU环境安装并使用,支持EulerOS-arm、CentOS-arm、CentOS-x86、Ubuntu-arm、Ubuntu-x86、Windows-X86操作系统,可访问[MindSpore安装页面](https://www.mindspore.cn/install)下载MindSpore安装包,并参考该网站指导完成安装。 - -### 使用pip安装 - -从官网下载MindSpore安装包后,得到`mindspore_{device}-{version}-cp37-cp37m-linux_{arch}.whl`文件,请使用pip安装。 - -```python -pip install mindspore_{device}-{version}-cp37-cp37m-linux_{arch}.whl -``` - -若环境已安装旧版本MindSpore,当前需要更新MindSpore,请在安装前卸载旧版本。 - -### 使用源码安装 - -访问[MindSpore代码仓](https://gitee.com/mindspore/mindspore),使用`git clone https://gitee.com/mindspore/mindspore.git`下载MindSpore源码,源码根目录下的`build.sh`文件提供了多个备选参数,用于选择定制MindSpore服务,一般通过以下命令编译MindSpore。 - -```python -cd mindspore -bash build.sh -e cpu -j{thread_num} # cpu环境 -bash build.sh -e ascend -j{thread_num} # Ascend环境 -bash build.sh -e gpu -j{thread_num} # gpu环境 -``` - -编译成功后,在`output`目录下会生成MindSpore安装包,然后使用 **pip安装** 或 **将当前目录添加到PYTHONPATH** 的方式使用源码编译的结果。 - -> 使用pip安装的优点在于能够快速上手,方便快捷。 -> -> 使用源码安装可以定制MindSpore服务,并可以切换到任意commit_id编译并使用MindSpore。 - -### 设置环境变量(仅用于Ascend环境) - -```bash -# control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. -export GLOG_v=2 - -# Conda environmental options -LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - -# lib libraries that the run package depends on -export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/lib64:${LOCAL_ASCEND}/driver/lib64:${LOCAL_ASCEND}/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - -# Environment variables that must be configured -export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path -export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path -export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path -export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # Python library that TBE implementation depends on -``` - -### MindSpore验证 - -若以下命令能正常执行成功并退出,说明安装成功。 - -对于CPU环境: - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="CPU") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -对于Ascend环境: - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="Ascend") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -对于GPU环境: - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="GPU") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -## 知识准备 - -### MindSpore编程指南 - -用户可以通过参考[MindSpore教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/index.html)了解如何使用MindSpore进行训练、调试、调优、推理;也可以通过参考[MindSpore编程指南](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/index.html)了解MindSpore的基本组成和常用编程方法;也可以通过参考[MindSpore Python API](https://www.mindspore.cn/doc/api_python/zh-CN/master/index.html)详细了解MindSpore各接口的相关信息,以便于用户能够更好地使用。 - -### ModelZoo和Hub - -[ModelZoo](https://gitee.com/mindspore/mindspore/tree/master/model_zoo)是MindSpore与社区共同提供的深度优化的模型集市,向开发者提供了深度优化的模型,以便于生态中的小伙伴可以方便地基于ModelZoo中的模型进行个性化开发。当前已经覆盖了机器视觉、自然语言处理、语音、推荐系统等多个领域的主流模型。 - -[MindSpore Hub](https://www.mindspore.cn/resources/hub)是存放MindSpore官方或者第三方开发者提供的预训练模型的平台。它向应用开发者提供了简单易用的模型加载和微调API,使得用户可以基于预训练模型进行推理或者微调,并部署到自己的应用中。用户也可以将自己训练好的模型按照指定的步骤[发布模型](https://mindspore.cn/tutorial/training/zh-CN/master/use/publish_model.html)到MindSpore Hub中,供其他用户下载和使用。 - -### 云上训练 - -ModelArts是华为云提供的面向AI开发者的一站式开发平台,集成了昇腾AI处理器资源池,用户可以在该平台下体验MindSpore。相关文档可参考[云上使用MindSpore](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/use_on_the_cloud.html)和[AI开发平台ModelArts](https://support.huaweicloud.com/wtsnew-modelarts/index.html)。 diff --git a/docs/migration_guide/source_zh_cn/sample_code.md b/docs/migration_guide/source_zh_cn/sample_code.md deleted file mode 100644 index 124440bdab1e96deaca0f630faee84136a19fac6..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/sample_code.md +++ /dev/null @@ -1,969 +0,0 @@ -# 网络迁移调试实例 - - - -- [网络迁移调试实例](#网络迁移调试实例) - - [对标网络分析与复现](#对标网络分析与复现) - - [确定迁移目标](#确定迁移目标) - - [复现迁移目标](#复现迁移目标) - - [复现单Step结果](#复现单step结果) - - [脚本开发](#脚本开发) - - [脚本开发前分析](#脚本开发前分析) - - [数据预处理](#数据预处理) - - [子网开发](#子网开发) - - [其他模块](#其他模块) - - [超参对比](#超参对比) - - [精度调试](#精度调试) - - [训练](#训练) - - [单机训练](#单机训练) - - [多机训练精度调优](#多机训练精度调优) - - [性能调优](#性能调优) - - [分析Profiling数据](#分析profiling数据) - - [常见问题及相应优化方法](#常见问题及相应优化方法) - - [MindData性能问题](#minddata性能问题) - - [多机同步性能问题](#多机同步性能问题) - - [算子性能问题](#算子性能问题) - - [框架性能问题](#框架性能问题) - - [其他通用优化方法](#其他通用优化方法) - - - - - -本章将结合用例来介绍网络迁移的基本步骤、常用工具、定位问题的思路及解决方法。 - -这里以经典网络 ResNet50 为例,结合代码来详细介绍网络迁移方法。 - -## 对标网络分析与复现 - -### 确定迁移目标 - -网络迁移的第一步是确定迁移目标,即先找到一个合适的、可达成的标准,通常一个深度神经网络的交付目标包括以下四个部分: - -1. 网络实现:这是迁移目标中最基本的部分,有时同一个神经网络有不同的版本、同一个版本有不同的实现方式或者在相同的神经网络下使用不同的超参,这些差别会对最终的收敛精度和性能造成一定影响。通常,我们以神经网络作者本身的实现为准,也可以参考不同框架(例如TensorFlow、PyTorch等)的官方实现或其他主流开源工具箱(例如 MMDetection)。 -2. 数据集:相同的神经网络和参数,在不同的数据集上往往差别很大,因此我们需要确认迁移网络所使用的数据集。一些数据集的数据内容会频繁更新,确定数据集时需要注意数据集的版本、训练数据和测试数据划分比例等问题。 -3. 收敛精度:不同的框架、不同的GPU型号、是否为分布式训练等因素会对精度有所影响,在确定迁移目标时需要分析清楚对标的框架、硬件等信息。 -4. 训练性能:和收敛精度相同,训练性能主要受网络脚本、框架性能、GPU硬件本身和是否为分布式训练等因素影响。 - -#### ResNet50 迁移示例 - -ResNet50 是 CV 中经典的深度神经网络,有较多开发者关注和复现,而 PyTorch 的语法和 MindSpore 较为相似,因此,我们选择 PyTorch 作为对标框架。 - -PyTorch 官方实现脚本可参考 [torchvision model](https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py) 或者 [英伟达 PyTorch 实现脚本](https://github.com/NVIDIA/DeepLearningExamples/tree/master/PyTorch/Classification/ConvNets/resnet50v1.5),其中包括了主流 ResNet 系列网络的实现(ResNet18、ResNet34、ResNet50、ResNet101、ResNet152)。ResNet50 所使用的数据集为 ImageNet2012,收敛精度可参考 [PyTorch Hub](https://pytorch.org/hub/pytorch_vision_resnet/#model-description)。 - -开发者可以基于 PyTorch 的 ResNet50 脚本直接在对标的硬件环境下运行,然后计算出性能数据,也可以参考同硬件环境下的官方数据。例如,当我们对标 Nvidia DGX-1 32GB(8x V100 32GB) 硬件时,可参考 [Nvidia 官方发布的 ResNet50 性能数据](https://github.com/NVIDIA/DeepLearningExamples/tree/master/PyTorch/Classification/ConvNets/resnet50v1.5#training-performance-nvidia-dgx-1-32gb-8x-v100-32gb)。 - -### 复现迁移目标 - -网络迁移目标确定完成后,接下来要做的就是复现指标。复现标杆数据对后续精度和性能调优十分重要,当我们在 MindSpore 开发的网络和对标脚本有精度/性能差距时,很多时候都是以标杆数据作为基准,一步一步地分析迁移脚本和对标脚本的差别,如果对标脚本无法复现指标,那我们以此为基准开发的 MindSpore 脚本就很难达到迁移目标。复现迁移指标时,不仅要复现训练阶段,推理阶段也同样重要。 - -需要注意的是,对于部分网络,使用相同的硬件环境和脚本,最终达到的收敛精度和性能也可能与原作者提出的结果有细微差别,这属于正常的波动范围,我们在迁移网络时要把这种波动考虑在内。 - -### 复现单Step结果 - -复现单 Step 结果主要是为了接下来的脚本开发和网络调优。对于复杂的神经网络,完整的训练需要耗时几天甚至几个月,如果仅以最终的训练精度和结果做参考,会极大地降低开发效率。因此,我们需要提前复现单 Step 的运行结果,即获取只执行第一个 Step 后网络的状态(该状态是经历了数据预处理、权重初始化、正向计算、loss 计算、反向梯度计算和优化器更新之后的结果,覆盖了网络训练的全部环节),并以此为对照展开后续的开发工作。 - -## 脚本开发 - -### 脚本开发前分析 - -在开始真正的开发脚本前,需要进行对标脚本分析。脚本分析的目的是识别出 MindSpore 与对标框架相比缺失的算子或功能。具体方法可以参考[脚本评估教程](https://gitee.com/mindspore/docs/blob/master/docs/migration_guide/source_zh_cn/script_analysis.md#)。 - -MindSpore 已支持绝大多数常用 [功能](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/index.html) 和 [算子](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/operator_list.html)。MindSpore 既支持动态图(PyNative)模式,又支持静态图(Graph)模式,动态图模式灵活、易于调试,因此动态图模式主要用于网络调试,静态图模式性能好,主要用于整网训练,在分析缺失算子和功能时,要分别分析这两种模式。 - -如果发现有缺失的算子和功能,首先可考虑基于当前算子或功能来组合出缺失的算子和功能,对于主流的 CV 和 NLP 类网络,新的缺失算子一般都可以通过组合已有算子的方式来解决。 - -组合的算子可以通过 Cell 的方式实现,在 MindSpore 中,[nn类算子](https://gitee.com/mindspore/mindspore/tree/master/mindspore/nn) 就是通过这种方式实现的。例如下面的 `ReduceSumExp` 算子,它是由已有的`Exp`、`ReduceSum`、`Log`小算子组合而成: - -```python -class ReduceLogSumExp(Cell): - def __init__(self, axis, keep_dims=False): - super(ReduceLogSumExp, self).__init__() - validator.check_value_type('axis', axis, [int, list, tuple], self.cls_name) - validator.check_value_type('keep_dims', keep_dims, [bool], self.cls_name) - self.axis = axis - self.exp = ops.Exp() - self.sum = ops.ReduceSum(keep_dims) - self.log = ops.Log() - - def construct(self, x): - exp = self.exp(x) - sumexp = self.sum(exp, self.axis) - logsumexp = self.log(sumexp) - return logsumexp -``` - -如果缺失的功能和算子无法规避,或者组合算子性能较差,严重影响网络的训练和推理,可联系 [MindSpore社区](https://gitee.com/mindspore/mindspore/issues) 反馈,我们会有专门的工作人员为您解决。 - -#### ResNet50 迁移示例 - -以下为 ResNet 系列网络结构: - -![image-20210318152607548](images/image-20210318152607548.png) - -PyTorch 实现的 ResNet50 脚本参考 [torchvision model](https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py)。 - -我们可以基于算子和功能两个方面分析: - -- 算子分析 - -| PyTorch 使用算子 | MindSpore 对应算子 | 是否支持该算子所需功能 | -| ---------------------- | ------------------ | ---------------------- | -| `nn.Conv2D` | `nn.Conv2d` | 是 | -| `nn.BatchNorm2D` | `nn.BatchNom2d` | 是 | -| `nn.ReLU` | `nn.ReLU` | 是 | -| `nn.MaxPool2D` | `nn.MaxPool2d` | 是 | -| `nn.AdaptiveAvgPool2D` | 无 | 不支持 | -| `nn.Linear` | `nn.Dense` | 是 | -| `torch.flatten` | `nn.Flatten` | 是 | - -注:对于 PyTorch 脚本,MindSpore 提供了 [PyTorch 算子映射工具](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/index.html#operator_api),可直接查询该算子是否支持。 - -- 功能分析 - -| Pytorch 使用功能 | MindSpore 对应功能 | -| ------------------------- | ------------------------------------- | -| `nn.init.kaiming_normal_` | `initializer(init='HeNormal')` | -| `nn.init.constant_` | `initializer(init='Constant')` | -| `nn.Sequential` | `nn.SequentialCell` | -| `nn.Module` | `nn.Cell` | -| `nn.distibuted` | `context.set_auto_parallel_context` | -| `torch.optim.SGD` | `nn.optim.SGD` or `nn.optim.Momentum` | - -(由于MindSpore 和 PyTorch 在接口设计上不完全一致,这里仅列出关键功能的比对) - -经过算子和功能分析,我们发现,相比 PyTorch,MindSpore 功能上没有缺失,但算子上缺失 `nn.AdaptiveAvgPool` ,这时我们需要更一步的分析,该缺失算子是否有可替代方案。在 ResNet50 网络中,输入的图片 shape 是固定的,统一为 `N,3,224,224`,其中 N 为 batch size,3 为通道的数量,224 和 224 分别为图片的宽和高,网络中改变图片大小的算子有 `Conv2d` 和 `Maxpool2d`,这两个算子对shape 的影响是固定的,因此,`nn.AdaptiveAvgPool2D` 的输入和输出 shape 是可以提前确定的,只要我们计算出 `nn.AdaptiveAvgPool2D` 的输入和输出 shape,就可以通过 `nn.AvgPool` 或 `nn.ReduceMean` 来实现,所以该算子的缺失是可替代的,并不影响网络的训练。 - -### 数据预处理 - -要理解一个神经网络的实现,首先要清楚网络的输入数据,因此,数据预处理是脚本开发的第一个环节。MindSpore 设计了一个专门进行数据处理的模块 - MindData,使用 MindData 进行数据预处理主要包括以下几个步骤: - -1. 传入数据路径,读取数据文件。 -2. 解析数据。 -3. 数据处理(如常见数据切分、shuffle、数据增强等操作)。 -4. 数据分发(以 batch_size 为单位分发数据,分布式训练涉及多机分发)。 - -在读取和解析数据过程中,MindSpore 提供了一种更友好的数据格式 - [MindRecord](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/convert_dataset.html)。用户可以将常规格式的数据集转换为 MindSpore数据格式,即 MindRecord,从而方便地加载到 MindSpore 中进行训练。同时,MindSpore 在部分场景做了性能优化,使用 MindRecord 数据格式可以获得更好的性能。 - -数据处理通常是数据准备中最耗时的阶段,大部分对数据的操作都被包含在这一步骤里,例如 CV 类网络中的Resize、Rescale、Crop 等操作。MindSpore 提供了一套常用的数据处理集成接口,用户可以不用自己实现而直接调用这些接口,这些集成接口不仅可以提升用户的易用性,还可以提升数据预处理的性能,减少训练过程中数据准备的耗时。具体可以参考[数据预处理教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/optimize_data_processing.html)。 - -在数据分发环节,MindData 提供了极为简洁的 API,可以通过直接调用 batch、repeat 等操作完成数据的 batch 组合、重复等操作。 - -当完成以上4个步骤后,我们理论上使用 MindSpore 脚本和对标脚本处理数据集后,可以得到完全相同的数据(如果有引入随机情况的操作需要去除)。 - -#### ResNet50 迁移示例 - -ResNet50 网络使用的是 ImageNet2012 数据集,其数据预处理的 PyTorch 代码如下: - -```python -# sample execution (requires torchvision) -from PIL import Image -from torchvision import transforms -input_image = Image.open(filename) -preprocess = transforms.Compose([ - transforms.Resize(256), - transforms.CenterCrop(224), - transforms.ToTensor(), - transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), -]) -input_tensor = preprocess(input_image) -input_batch = input_tensor.unsqueeze(0) # create a mini-batch as expected by the model -``` - -通过观察以上代码,我们发现 ResNet50 的数据预处理主要做了 Resize、CenterCrop、Normalize 操作,在 MindSpore 中实现这些操作有两种方式,一是使用 MindSpore 的数据处理模块 MindData 来调用已封装好的数据预处理接口,二是通过 [自定义数据集](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_loading.html?highlight=data%20generator#%E8%87%AA%E5%AE%9A%E4%B9%89%E6%95%B0%E6%8D%AE%E9%9B%86%E5%8A%A0%E8%BD%BD) 进行加载。这里更建议开发者选择第一种方式,这样不仅可以减少重复代码的开发,减少错误的引入,还可以得到更好的数据处理性能。更多关于MindData数据处理的介绍,可参考 [编程指南](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/index.html)中的数据管道部分。 - -以下是基于 MindData 开发的数据处理函数: - -```python -def create_dataset(dataset_path, do_train, repeat_num=1, batch_size=32, target="Ascend", distribute=False): - # device number: total number of devices of training - # rank_id: the sequence of current device of training - device_num, rank_id = _get_rank_info() - if distribute: - init() - rank_id = get_rank() - device_num = get_group_size() - else: - device_num = 1 - if device_num == 1: - # standalone training - # num_paralel_workers: parallel degree of data process - # shuffle: whether shuffle data or not - data_set = ds.ImageFolderDataset(dataset_path, num_parallel_workers=8, shuffle=True) - else: - # distributing traing (meaning of num_parallel_workers and shuffle is same as above) - # num_shards: total number devices for distribute training, which equals number shard of data - # shard_id: the sequence of current device in all distribute training devices, which equals the data shard sequence for current device - data_set = ds.ImageFolderDataset(dataset_path, num_parallel_workers=8, shuffle=True, num_shards=device_num, shard_id=rank) - - # define data operations - trans = [] - if do_train: - trans += [ - C.RandomHorizontalFlip(prob=0.5) - ] - - trans += [ - C.Resize((256, 256)), - C.CenterCrop(224), - C.Rescale(1.0 / 255.0, 0.0), - C.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) - C.HWC2CHW() - ] - - type_cast_op = C2.TypeCast(mstype.int32) - - # call data operations by map - data_set = data_set.map(operations=type_cast_op, input_columns="label", num_parallel_workers=8) - data_set = data_set.map(operations=trans, input_columns="image", num_parallel_workers=8) - - # batchinng data - data_set = data_set.batch(batch_size, drop_remainder=True) - # repeat data, usually repeat_num equals epoch_size - data_set = data_set.repeat(repeat_num) - - return data_set -``` - -在以上代码中我们可以发现,针对常用的经典数据集(如 ImageNet2012),MindData 也为我们提供了 `ImageFolderDataset` 接口直接读取原始数据,省去了手写代码读取文件的工作量。需要注意的是,单机训练和多机分布式训练时 MindData 创建数据集的参数是不一样的,分布式训练需要额外指定 `num_shard` 和 `shard_id` 两个参数。 - -### 子网开发 - -通常子网开发包含两个部分:训练子网和 loss 子网,其中训练子网可根据网络的复杂程度决定是否继续划分。直接开发一个大型的神经网络脚本可能会让我们无从下手,因此,我们可以将网络中不同模块或子模块作为一个个子网抽离出来单独开发,这样可以保证各个子网并行开发,互相不受干扰。子网开发完成后,还可以固定子网输入和权重,与对标脚本的子网代码形成对比,作为后续网络开发的测试用例。 - -在精度调优阶段,我们常常会遇到精度不达标的情况,这时我们会重新审视已开发的脚本并逐行排查。而使用子网方式开发脚本并形成测试用例可以高效地帮助我们排除怀疑点,从几十个算子里寻找可疑点,要比从成百上千个算子中找可疑点轻松得多,尤其是在很多时候,同一个子网会被重复调用多次,当我们以子网为单位排查时,可以减少很多工作量。 - -#### ResNet50 迁移示例 - -分析 ResNet50 网络代码,主要可以分成以下几个子网: - -- conv1x1、conv3x3:定义了不同 kernel_size 的卷积。 -- BasicBlock:ResNet 系列网络中 ResNet18 和 ResNet34 的最小子网,由 Conv、BN、ReLU 和 残差组成。 -- BottleNeck:ResNet 系列网络中 ResNet50、ResNet101 和 ResNet152 的最小子网,相比 BasicBlock 多了一层 Conv、BN 和 ReLU的结构,下采样的卷积位置也做了改变。 -- ResNet:封装了 BasiclBlock、BottleNeck 和 Layer 结构的网络,传入不同的参数即可构造不同的ResNet系列网络。在该结构中,也使用了一些 PyTorch 自定义的初始化功能。 - -基于以上子网划分,我们结合 MindSpore 语法,重新完成上述开发。 - -重新开发权重初始化(也可以直接使用 [MindSpore 已定义的权重初始化方法](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.common.initializer.html?highlight=common%20initializer#)): - -```python -def _conv_variance_scaling_initializer(in_channel, out_channel, kernel_size): - fan_in = in_channel * kernel_size * kernel_size - scale = 1.0 - scale /= max(1., fan_in) - stddev = (scale ** 0.5) / .87962566103423978 - mu, sigma = 0, stddev - weight = truncnorm(-2, 2, loc=mu, scale=sigma).rvs(out_channel * in_channel * kernel_size * kernel_size) - weight = np.reshape(weight, (out_channel, in_channel, kernel_size, kernel_size)) - return Tensor(weight, dtype=mstype.float32) - - -def _weight_variable(shape, factor=0.01): - init_value = np.random.randn(*shape).astype(np.float32) * factor - return Tensor(init_value) - - -def calculate_gain(nonlinearity, param=None): - """calculate_gain""" - linear_fns = ['linear', 'conv1d', 'conv2d', 'conv3d', 'conv_transpose1d', 'conv_transpose2d', 'conv_transpose3d'] - res = 0 - if nonlinearity in linear_fns or nonlinearity == 'sigmoid': - res = 1 - elif nonlinearity == 'tanh': - res = 5.0 / 3 - elif nonlinearity == 'relu': - res = math.sqrt(2.0) - elif nonlinearity == 'leaky_relu': - if param is None: - negative_slope = 0.01 - elif not isinstance(param, bool) and isinstance(param, int) or isinstance(param, float): - # True/False are instances of int, hence check above - negative_slope = param - else: - raise ValueError("negative_slope {} not a valid number".format(param)) - res = math.sqrt(2.0 / (1 + negative_slope ** 2)) - else: - raise ValueError("Unsupported nonlinearity {}".format(nonlinearity)) - return res - - -def _calculate_fan_in_and_fan_out(tensor): - """_calculate_fan_in_and_fan_out""" - dimensions = len(tensor) - if dimensions < 2: - raise ValueError("Fan in and fan out can not be computed for tensor with fewer than 2 dimensions") - if dimensions == 2: # Linear - fan_in = tensor[1] - fan_out = tensor[0] - else: - num_input_fmaps = tensor[1] - num_output_fmaps = tensor[0] - receptive_field_size = 1 - if dimensions > 2: - receptive_field_size = tensor[2] * tensor[3] - fan_in = num_input_fmaps * receptive_field_size - fan_out = num_output_fmaps * receptive_field_size - return fan_in, fan_out - - -def _calculate_correct_fan(tensor, mode): - mode = mode.lower() - valid_modes = ['fan_in', 'fan_out'] - if mode not in valid_modes: - raise ValueError("Mode {} not supported, please use one of {}".format(mode, valid_modes)) - fan_in, fan_out = _calculate_fan_in_and_fan_out(tensor) - return fan_in if mode == 'fan_in' else fan_out - - -def kaiming_normal(inputs_shape, a=0, mode='fan_in', nonlinearity='leaky_relu'): - fan = _calculate_correct_fan(inputs_shape, mode) - gain = calculate_gain(nonlinearity, a) - std = gain / math.sqrt(fan) - return np.random.normal(0, std, size=inputs_shape).astype(np.float32) -``` - -重新开发卷积核为 3x3 和 1x1 的卷积算子: - -```python -# conv3x3 and conv1x1 -def _conv3x3(in_channel, out_channel, stride=1): - weight_shape = (out_channel, in_channel, 3, 3) - # unlike pytorch, weight initialization is introduced when define conv2d - weight = Tensor(kaiming_normal(weight_shape, mode="fan_out", nonlinearity='relu')) - return nn.Conv2d(in_channel, out_channel, kernel_size=3, stride=stride, padding=0, pad_mode='same', weight_init=weight) - - -def _conv1x1(in_channel, out_channel, stride=1): - # unlike pytorch, weight initialization is introduced when define conv2d - weight_shape = (out_channel, in_channel, 1, 1) - weight = Tensor(kaiming_normal(weight_shape, mode="fan_out", nonlinearity='relu')) - return nn.Conv2d(in_channel, out_channel, kernel_size=1, stride=stride, - padding=0, pad_mode='same', weight_init=weight) -``` - -重新开发 BasicBlock 和 BottleNeck: - -```python -class BasicBlock(nn.Cell): - def __init__(self, - in_channel, - out_channel, - stride=1): - super(BasicBlock, self).__init__() - self.conv1 = _conv3x3(in_channel, out_channel, stride=stride) - self.bn1d = _bn(out_channel) - self.conv2 = _conv3x3(out_channel, out_channel, stride=1) - self.bn2d = _bn(out_channel) - self.relu = nn.ReLU() - - self.down_sample = False - if stride != 1 or in_channel != out_channel: - self.down_sample = True - - self.down_sample_layer = None - if self.down_sample: - self.down_sample_layer = nn.SequentialCell([_conv1x1(in_channel, out_channel, stride,), _bn(out_channel)]) - - def construct(self, x): - identity = x - - out = self.conv1(x) - out = self.bn1d(out) - out = self.relu(out) - - out = self.conv2(out) - out = self.bn2d(out) - - if self.down_sample: - identity = self.down_sample_layer(identity) - - out = out + identity - out = self.relu(out) - - return out - - -class BottleNeck(nn.Cell): - expansion = 4 - - def __init__(self, - in_channel, - out_channel, - stride=1): - super(BottleNeck, self).__init__() - self.stride = stride - channel = out_channel // self.expansion - self.conv1 = _conv1x1(in_channel, channel, stride=1) - self.bn1 = _bn(channel) - self.conv2 = _conv3x3(channel, channel, stride=stride) - self.bn2 = _bn(channel) - self.conv3 = _conv1x1(channel, out_channel, stride=1) - self.bn3 = _bn_last(out_channel) - self.relu = nn.ReLU() - self.down_sample = False - if stride != 1 or in_channel != out_channel: - self.down_sample = True - self.down_sample_layer = None - if self.down_sample: - self.down_sample_layer = nn.SequentialCell([_conv1x1(in_channel, out_channel, stride), _bn(out_channel)]) - def construct(self, x): - identity = x - out = self.conv1(x) - out = self.bn1(out) - out = self.relu(out) - out = self.conv2(out) - out = self.bn2(out) - out = self.relu(out) - out = self.conv3(out) - out = self.bn3(out) - if self.down_sample: - identity = self.down_sample_layer(identity) - - out = out + identity - out = self.relu(out) - return out -``` - -重新开发 ResNet 系列整网: - -```python -class ResNet(nn.Cell): - def __init__(self, - block, - layer_nums, - in_channels, - out_channels, - strides, - num_classes): - super(ResNet, self).__init__() - - if not len(layer_nums) == len(in_channels) == len(out_channels) == 4: - raise ValueError("the length of layer_num, in_channels, out_channels list must be 4!") - - self.conv1 = _conv7x7(3, 64, stride=2) - self.bn1 = _bn(64) - self.relu = ops.ReLU() - self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, pad_mode="same") - - self.layer1 = self._make_layer(block, - layer_nums[0], - in_channel=in_channels[0], - out_channel=out_channels[0], - stride=strides[0]) - self.layer2 = self._make_layer(block, - layer_nums[1], - in_channel=in_channels[1], - out_channel=out_channels[1], - stride=strides[1]) - self.layer3 = self._make_layer(block, - layer_nums[2], - in_channel=in_channels[2], - out_channel=out_channels[2], - stride=strides[2]) - self.layer4 = self._make_layer(block, - layer_nums[3], - in_channel=in_channels[3], - out_channel=out_channels[3], - stride=strides[3]) - - self.mean = ops.ReduceMean(keep_dims=True) - self.flatten = nn.Flatten() - self.end_point = _fc(out_channels[3], num_classes) - - def _make_layer(self, block, layer_num, in_channel, out_channel, stride): - layers = [] - - resnet_block = block(in_channel, out_channel, stride=stride) - layers.append(resnet_block) - for _ in range(1, layer_num): - resnet_block = block(out_channel, out_channel, stride=1) - layers.append(resnet_block) - return nn.SequentialCell(layers) - - def construct(self, x): - x = self.conv1(x) - x = self.bn1(x) - x = self.relu(x) - c1 = self.maxpool(x) - - c2 = self.layer1(c1) - c3 = self.layer2(c2) - c4 = self.layer3(c3) - c5 = self.layer4(c4) - - out = self.mean(c5, (2, 3)) - out = self.flatten(out) - out = self.end_point(out) - - return out -``` - -传入 ResNet50 层数信息,构造 ResNet50 整网: - -```python -def resnet50(class_num=1000): - return ResNet(ResidualBlock, - [3, 4, 6, 3], - [64, 256, 512, 1024], - [256, 512, 1024, 2048], - [1, 2, 2, 2], - class_num) -``` - -经过以上步骤,基于 MindSpore 的 ResNet50 整网结构和各子网结构已经开发完成,接下来就是开发其他模块。 - -### 其他模块 - -其他模块通常包括:反向构造、梯度裁剪、优化器、学习率生成等,这些模块要么本身结构单一,要么依赖已开发完成的子网结果才能和对标脚本形成对比。相比子网开发,这些模块的脚本开发难度更小一些。 - -#### ResNet50 迁移示例 - -关于其他训练配置,可以参考 [英伟达训练 ResNet50 的配置信息](https://github.com/NVIDIA/DeepLearningExamples/tree/master/PyTorch/Classification/ConvNets/resnet50v1.5#default-configuration),ResNet50 的训练主要涉及以下几项: - -- 使用了 SGD + Momentum 优化器 -- 使用了 WeightDecay 功能(但 BatchNorm 的 gamma 和 bias 没有使用) -- 使用了 cosine LR schedule -- 使用了 Label Smoothing - -实现 cosine LR schedule: - -```python -def _generate_cosine_lr(lr_init, lr_end, lr_max, total_steps, warmup_steps): - """ - Applies cosine decay to generate learning rate array. - - Args: - lr_init(float): init learning rate. - lr_end(float): end learning rate - lr_max(float): max learning rate. - total_steps(int): all steps in training. - warmup_steps(int): all steps in warmup epochs. - - Returns: - np.array, learning rate array. - """ - decay_steps = total_steps - warmup_steps - lr_each_step = [] - for i in range(total_steps): - if i < warmup_steps: - lr_inc = (float(lr_max) - float(lr_init)) / float(warmup_steps) - lr = float(lr_init) + lr_inc * (i + 1) - else: - linear_decay = (total_steps - i) / decay_steps - cosine_decay = 0.5 * (1 + math.cos(math.pi * 2 * 0.47 * i / decay_steps)) - decayed = linear_decay * cosine_decay + 0.00001 - lr = lr_max * decayed - lr_each_step.append(lr) - return lr_each_step -``` - -实现带 Momentum 的 SGD 优化器,除 BN 的 gamma 和 bias 外,其他权重应用 WeightDecay : - -```python -from mindspore.nn.optim import Momentum - -net = resnet50(class_num=1000) -lr = _generate_cosine_lr() -momentum = 0.875 -weight_decay = 1/32768 - -decayed_params = [] -no_decayed_params = [] -for param in net.trainable_params(): - if 'beta' not in param.name and 'gamma' not in param.name and 'bias' not in param.name: - decayed_params.append(param) - else: - no_decayed_params.append(param) - -group_params = [{'params': decayed_params, 'weight_decay': weight_decay}, - {'params': no_decayed_params}, - {'order_params': net.trainable_params()}] -opt = Momentum(group_params, lr momentum) -``` - -定义 Loss 函数和实现 Label Smoothing: - -```python -import mindspore.nn as nn -from mindspore import Tensor -from mindspore.common import dtype as mstype -from mindspore.nn.loss.loss import _Loss -import mindspore.ops as ops - - -# define cross entropy loss -class CrossEntropySmooth(_Loss): - """CrossEntropy""" - def __init__(self, sparse=True, reduction='mean', smooth_factor=0., num_classes=1000): - super(CrossEntropySmooth, self).__init__() - self.onehot = ops.OneHot() - self.sparse = sparse - self.on_value = Tensor(1.0 - smooth_factor, mstype.float32) - self.off_value = Tensor(1.0 * smooth_factor / (num_classes - 1), mstype.float32) - self.ce = nn.SoftmaxCrossEntropyWithLogits(reduction=reduction) - - def construct(self, logit, label): - if self.sparse: - label = self.onehot(label, ops.shape(logit)[1], self.on_value, self.off_value) - loss = self.ce(logit, label) - return loss - -# define loss with label smooth -label_smooth_factor = 0.1 -loss = CrossEntropySmooth(sparse=True, reduction="mean",smooth_factor=label_smooth_factor, num_classes=1000) -``` - -### 超参对比 - -当各子网已经打通,最后一步要做的是和对标脚本对齐超参,保证网络结构一致。需要注意的是,在不同的框架上,同一套超参可能有不同的精度表现,在迁移网络时不一定要严格按照对标脚本的超参进行设置,可在不改变网络结构的情况下进行微调。 - -#### ResNet50 迁移示例 - -在 ResNet50 的训练中,主要涉及以下超参: - -- momentum =0.875 -- batch_size = 256 -- learning rate = 0.256 -- learing rate schedule = cosine -- weight_decay = 1/32768 -- label_smooth = 0.1 -- epoch size = 90 - -## 流程打通 - -经过以上步骤后,我们已经开发完了网络迁移的必备脚本,接下来就是打通单机训练、分布式训练、推理流程。 - -### 单机训练 - -#### ResNet50 迁移示例 - -为了更好的阅读代码,建议按照以下结构组织脚本: - -```text -. -└──resnet - ├── README.md - ├── scripts - ├── run_distribute_train.sh # 启动Ascend分布式训练(8卡) - ├── run_eval.sh # 启动Ascend评估 - ├── run_standalone_train.sh # 启动Ascend单机训练(单卡) - ├── src - ├── config.py # 参数配置 - ├── dataset.py # 数据预处理 - ├── CrossEntropySmooth.py # ImageNet2012数据集的损失定义 - ├── lr_generator.py # 生成每个步骤的学习率 - └── resnet.py # ResNet骨干网络 - ├── eval.py # 评估网络 - └── train.py # 训练网络 -``` - -其中 train.py 定义如下: - -```python -import os -import argparse -import ast -from mindspore import context -from mindspore import Tensor -from mindspore.nn.optim import Momentum -from mindspore.train.model import Model -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor -from mindspore.train.loss_scale_manager import FixedLossScaleManager -from mindspore.train.serialization import load_checkpoint, load_param_into_net -from mindspore.communication.management import init, get_rank, get_group_size -from mindspore.common import set_seed -import mindspore.nn as nn -import mindspore.common.initializer as weight_init -from src.lr_generator import get_lr -from src.CrossEntropySmooth import CrossEntropySmooth -from src.config import cfg - -parser = argparse.ArgumentParser(description='Image classification') -parser.add_argument('--net', type=str, default=None, help='Resnet Model, resnet50') -parser.add_argument('--dataset', type=str, default=None, help='Dataset, imagenet2012') -parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path') -args_opt = parser.parse_args() -set_seed(1) - -from src.resnet import resnet50 as resnet -from src.config import config -from src.dataset import create_dataset as create_dataset - -if __name__ == '__main__': - ckpt_save_dir = config.save_checkpoint_path - - # init context - context.set_context(mode=context.GRAPH_MODE, save_graphs=False) - # create dataset - dataset = create_dataset(dataset_path=args_opt.dataset_path, do_train=True, repeat_num=1, - batch_size=config.batch_size) - step_size = dataset.get_dataset_size() - - # define net - net = resnet(class_num=config.class_num) - for _, cell in net.cells_and_names(): - if isinstance(cell, nn.Conv2d): - cell.weight.set_data(weight_init.initializer(weight_init.XavierUniform(), - cell.weight.shape, - cell.weight.dtype)) - if isinstance(cell, nn.Dense): - cell.weight.set_data(weight_init.initializer(weight_init.TruncatedNormal(), - cell.weight.shape, - cell.weight.dtype)) - lr = get_lr(lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max, - warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size, - steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode) - lr = Tensor(lr) - - # define opt - decayed_params = [] - no_decayed_params = [] - for param in net.trainable_params(): - if 'beta' not in param.name and 'gamma' not in param.name and 'bias' not in param.name: - decayed_params.append(param) - else: - no_decayed_params.append(param) - - group_params = [{'params': decayed_params, 'weight_decay': config.weight_decay}, - {'params': no_decayed_params}, - {'order_params': net.trainable_params()}] - opt = Momentum(group_params, lr, config.momentum, loss_scale=config.loss_scale) - # define loss, model - loss = CrossEntropySmooth(sparse=True, reduction="mean", smooth_factor=config.label_smooth_factor, num_classes=config.class_num) - loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) - model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics={'acc'}, - amp_level="O2", keep_batchnorm_fp32=False) - # define callbacks - time_cb = TimeMonitor(data_size=step_size) - loss_cb = LossMonitor() - cb = [time_cb, loss_cb] - if config.save_checkpoint: - config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size, keep_checkpoint_max=config.keep_checkpoint_max) - ckpt_cb = ModelCheckpoint(prefix="resnet", directory=ckpt_save_dir, config=config_ck) - cb += [ckpt_cb] - - # train model - dataset_sink_mode = True - model.train(config.epoch_size, dataset, callbacks=cb, sink_size=dataset.get_dataset_size(), - dataset_sink_mode=dataset_sink_mode) -``` - -注意:关于目录中其他文件的代码,可以参考 MindSpore model_zoo 的 [ResNet50 实现](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/resnet)(该脚本融合了其他 ResNet 系列网络及ResNet-SE 网络,具体实现可能和对标脚本有差异)。 - -### 分布式训练 - -分布式训练相比单机训练对网络结构没有影响,可以通过调用 MindSpore 提供的分布式训练接口改造单机脚本即可完成分布式训练,具体可参考 [分布式训练教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_tutorials.html)。 - -#### ResNet50 迁移示例 - -对单机训练脚本添加以下接口: - -```python -import os -import argparse -import ast -from mindspore import context -from mindspore.communication.management import init, get_rank, get_group_size -from src.config import cfg - -# .... -parser = argparse.ArgumentParser(description='Image classification') -# add two new options to support both standalone and distribute training -parser.add_argument('--run_distribute', type=ast.literal_eval, default=False, help='Run distribute') -parser.add_argument('--device_num', type=int, default=1, help='Device num.') -# ... -device_id = int(os.getenv('DEVICE_ID')) # get the current device id -context.set_context(device_id=device_id) -# enable distribute training -context.set_auto_parallel_context(device_num=args_opt.device_num, - parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) -# init distribute training -init() -``` - -修改 create_dataset 接口,使数据加载时对数据进行 shard 操作以支持分布式训练: - -```python -import mindspore.dataset as ds -from mindspore.communication.management import init, get_rank, get_group_size -# .... -device_num, rank_id = _get_rank_info() -if device_num == 1: - # standalone training - data_set = ds.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True) -else: - # distribute training - data_set = ds.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True, - num_shards=device_num, shard_id=rank_id) -# ... -``` - -### 推理 - -推理流程与训练相比有以下不同: - -- 无需定义loss 和 优化器 -- 无需在构造数据集时进行 repeat 操作 -- 网络定义后需要加载已训练好的 CheckPoint -- 定义计算推理精度的 metric - -#### ResNet50 迁移示例 - -修改后的推理脚本: - -```python -import os -import argparse -from mindspore import context -from mindspore.common import set_seed -from mindspore.train.model import Model -from mindspore.train.serialization import load_checkpoint, load_param_into_net - -parser = argparse.ArgumentParser(description='Image classification') -parser.add_argument('--net', type=str, default=None, help='Resnet Model, either resnet18, ' - 'resnet50 or resnet101') -parser.add_argument('--dataset', type=str, default=None, help='Dataset, either cifar10 or imagenet2012') - -parser.add_argument('--checkpoint_path', type=str, default=None, help='Checkpoint file path') -parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path') -parser.add_argument('--device_target', type=str, default='Ascend', choices=("Ascend", "GPU", "CPU"), - help="Device target, support Ascend, GPU and CPU.") -args_opt = parser.parse_args() - -set_seed(1) - -from src.resnet import resnet50 as resnet -from src.dataset import create_dataset -from src.config import config as config - - -if __name__ == '__main__': - target = args_opt.device_target - - # init context - context.set_context(mode=context.GRAPH_MODE, device_target=target, save_graphs=False) - device_id = int(os.getenv('DEVICE_ID')) - context.set_context(device_id=device_id) - - # create dataset - dataset = create_dataset(dataset_path=args_opt.dataset_path, do_train=False, batch_size=config.batch_size) - step_size = dataset.get_dataset_size() - - # define net - net = resnet(class_num=config.class_num) - - # load checkpoint - param_dict = load_checkpoint(args_opt.checkpoint_path) - load_param_into_net(net, param_dict) - net.set_train(False) - - # define model - model = Model(net, metrics={'top_1_accuracy', 'top_5_accuracy'}) - - # eval model - res = model.eval(dataset) - print("result:", res, "ckpt=", args_opt.checkpoint_path) -``` - -### 问题定位 - -在流程打通中可能会遇到一些中断训练的问题,可以参考 [网络训练调试教程](https://gitee.com/mindspore/docs/blob/master/docs/migration_guide/source_zh_cn/neural_network_debug.md#) 定位和解决。 - -## 精度调优 - -在打通流程后,就可以通过训练和推理两个步骤获得网络训练的精度。通常情况下,我们很难一次就复现对标脚本的精度,需要通过精度调优来逐渐提高精度,精度调优相比性能调优不够直观,效率低,工作量大。开发者可将我们提供的 [精度调优教程](https://gitee.com/mindspore/docs/blob/master/docs/migration_guide/source_zh_cn/accuracy_optimization.md#) 作为参考。 - -## 性能调优 - -通常我们所指的性能调优是在固定数据集、网络规模和硬件数量的情况下提高训练性能,而通过改变数据集大小、网络规模、硬件数量来提高性能是显然的,不在本文的讨论范围内。 - -除非性能问题已严重阻碍了精度调试,否则性能调优一定要放在精度达标以后进行,这其中主要有两个原因:一是在定位精度问题时很多修改会影响性能,使得已经调优过的性能再次未达标,可能浪费工作量;二是性能调优时有可能引入新的精度问题,如果没有已经达标的精度作为看护,后面再定位这次引入的精度问题难度会极大的增加。 - -### 分析Profiling数据 - -分析Profiling数据是性能调优阶段必不可少的步骤,MindSpore 的性能和精度调优工具 [MindInsight](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/visualization_tutorials.html) 提供了丰富的性能和精度调优方法,对于性能调优,最重要的信息就是Profiling数据。Profiling可以收集整网训练过程中端到端的详细性能数据,包含数据准备和迭代轨迹。在迭代轨迹中,你可以看到每个算子的起始运行时间、结束运行时间、调用次数和调用顺序等非常详细的信息,这对我们性能调优非常有帮助。生成Profiling数据的方式如下: - -```python -from mindspore.profiler import Profiler -from mindspore import Model, nn, context - -# init context -context.set_context(mode=context.GRAPH_MODE, device_target='Ascend', device_id=int(os.environ["DEVICE_ID"])) - -# init profiler, profiling data will be stored under folder ./data by default -profiler = Profiler() - -# start training -Model.train() - -# end training,parse profiling data to readable text -profiler.analyse() -``` - -关于Profiling更详细的使用方法,可以参考 [Profiling 性能分析方法](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/performance_profiling.html)。 - -获取到 Profiling 数据后,我们可以分析出性能瓶颈阶段和算子,然后进行性能优化,可以参考 [性能调优指导](https://gitee.com/mindspore/docs/blob/master/docs/migration_guide/source_zh_cn/performance_optimization.md#)。 - -### 常见问题及相应优化方法 - -#### MindData 性能问题 - -单Step性能抖动、数据队列一段时间内持续为空的情况都是由于数据预处理部分性能较差,使得数据处理速度跟不上单Step迭代速度导致,这两个现象通常成对出现。 - -当数据处理速度较慢时,队列从最开始的满队列情况逐渐消耗为空队列,训练进程会开始等待空队列填入数据,一旦有新的数据填入,网络才会继续进行单Step训练。由于数据处理没有队列作为缓冲,数据处理的性能抖动直接体现在单Step的性能上,因此还会造成单Step性能抖动。 - -#### 多机同步性能问题 - -当进行分布式训练时,在一个Step的训练过程中,完成前向传播和梯度计算后,各个机器开始进行AllReduce梯度同步,AllReduce同步时间主要受权重数量、机器数量影响,对于越复杂、机器规模越大的网络,其 AllReduce 梯度更新时间也越久,此时我们可以进行AllReduce 切分来优化这部分耗时。 - -正常情况下,AllReduce 梯度同步会等所有反向算子执行结束,也就是对所有权重都计算出梯度后再一次性同步所有机器的梯度,而使用AllReduce切分后,我们可以在计算出一部分权重的梯度后,就立刻进行这部分权重的梯度同步,这样梯度同步和剩余算子的梯度计算可以并行执行,也就隐藏了这部分 AllReduce 梯度同步时间。切分策略通常是手动尝试,寻找一个最优的方案(支持切分大于两段)。 -以 [ResNet50网络](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/resnet/train.py) 为例,该网络共有 160 个 权重, [85, 160] 表示第 0 至 85个权重计算完梯度后立刻进行梯度同步,第 86 至 160 个 权重计算完后再进行梯度同步,这里共切分两段,因此需要进行两次梯度同步。代码实现如下: - -```python -from mindspore import context -... - -device_id = int(os.getenv('DEVICE_ID')) -context.set_context(device_id=device_id, enable_auto_mixed_precision=True) -context.set_auto_parallel_context(device_num=args_opt.device_num, - parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) -set_algo_parameters(elementwise_op_strategy_follow=True) -if args_opt.net == "resnet50" or args_opt.net == "se-resnet50": - # AllReduce split - context.set_auto_parallel_context(all_reduce_fusion_config=[85, 160]) -else: - # Another split stratety - context.set_auto_parallel_context(all_reduce_fusion_config=[180, 313]) -init() -``` - -#### 算子性能问题 - -单算子耗时久、对于同一种算子在不同shape或者不同 datatype 下性能差异较大的情况主要是由算子性能问题引起,通常有以下两个解决思路: - -1. 使用计算量更小的数据类型。例如,同一个算子在 float16 和 float32 下精度无明显差别,可使用计算量更小的 float16 格式。 -2. 使用算法相同的其他算子规避。 - -如果您发现有性能较差的算子时,建议联系 [MindSpore社区](https://gitee.com/mindspore/mindspore/issues) 反馈,我们确认为性能问题后会及时优化。 - -#### 框架性能问题 - -转换算子过多(TransData、Cast类算子)且耗时明显时,如果是我们手动加入的Cast算子,可分析其必要性,如果对精度没有影响,可去掉冗余的Cast、TransData算子。 - -如果是MindSpore自动生成的转换算子过多,可能是MindSpore框架针对某些特殊情况没有充分优化,可联系 [MindSpore社区](https://gitee.com/mindspore/mindspore/issues) 反馈。 - -#### 其他通用优化方法 - -- 使用自动混合精度 - - 混合精度训练方法是通过混合使用单精度和半精度数据格式来加速深度神经网络训练的过程,同时保持了单精度训练所能达到的网络精度。混合精度训练能够加速计算过程,同时减少内存使用和存取,并使得在特定的硬件上可以训练更大的模型或 batch size。 - - 具体可参考 [混合精度教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/enable_mixed_precision.html)。 - -- 使能图算融合 - - 图算融合是 MindSpore 特有的网络性能优化技术。它可以通过自动分析和优化现有网络计算图逻辑,并结合目标硬件能力,对计算图进行计算化简和替代、算子拆分和融合、算子特例化编译等优化,以提升设备计算资源利用率,实现对网络性能的整体优化。相比传统优化技术,图算融合具有多算子跨边界联合优化、与算子编译跨层协同、基于Polyhedral的算子即时编译等独特优势。另外,图算融合只需要用户打开对应配置后,整个优化过程即可自动完成,不需要网络开发人员进行其它额外感知,使得用户可以聚焦网络算法实现。 - - 图算融合的适用场景包括:对网络执行时间具有较高性能要求的场景;通过拼接基本算子实现自定义组合算子,并希望对这些基本算子进行自动融合,以提升自定义组合算子性能的场景。 - - 具体可参考 [图算融合教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/enable_graph_kernel_fusion.html)。 diff --git a/docs/migration_guide/source_zh_cn/script_analysis.md b/docs/migration_guide/source_zh_cn/script_analysis.md deleted file mode 100644 index 402eca5c7da8f5825136c1af8b185dbbfdc37d83..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/script_analysis.md +++ /dev/null @@ -1,76 +0,0 @@ -# 网络脚本分析 - - - -- [网络脚本分析](#网络脚本分析) - - [算子评估](#算子评估) - - [MindSpore算子设计](#mindspore算子设计) - - [查询算子映射表](#查询算子映射表) - - [缺失算子处理策略](#缺失算子处理策略) - - [语法评估](#语法评估) - - [常见限制原则](#常见限制原则) - - [常见处理策略](#常见处理策略) - - - - - -## 算子评估 - -### MindSpore算子设计 - -使用MindSpore框架搭建神经网络流程与其他框架(TensorFlow/PyTorch)类似,但支持的算子存在差异,需要在进行网络迁移(例如由TensorFlow迁移至MindSpore Ascend平台)时找出MindSpore框架缺失的算子。 - -MindSpore API由各种Python/C++ API算子组成,可以大致分为: - -- 数据框架算子 - - 包括张量、基本数据类型、训练梯度、优化器算子,如`mindspore.int32`、`mindspore.nn.Cell`等。 - -- 数据预处理算子 - - 包括图片读取、数据类型转化算子,如`mindspore.dataset.MnistDataset`等。 - -- 网络结构算子 - - 包括网络构建中使用到的卷积、归一化算子,如`mindspore.nn.Conv2d`、`mindspore.nn.Dense`等。 - - 网络结构算子表层为ME算子,即用户调用的算子API(例如`mindspore.nn.Softmax`),ME算子底层调用TBE算子(C/C++)实现。 - - 统计缺失ME算子时,需要找出源码脚本中所有算子(含数据框架类、数据预处理、网络结构算子)在MindSpore框架的对应算子(例如`tf.nn.relu`对应MindSpore算子为`mindspore.nn.ReLU`)。如果MindSpore中没有对应算子,则计入缺失。 - -### 查询算子映射表 - -在代码库找到网络结构及实现训练功能的Python文件(名称一般为train.py model.py等等),在脚本文件中查找所有相关算子(含数据框架类、数据预处理、网络结构算子),并与[MindSpore算子API](https://www.mindspore.cn/doc/note/zh-CN/master/operator_list_ms.html)对比,查找`mindspore.nn`或者`mindspore.ops`下算子的平台支持情况。 - -若该网页均未能找到对应的ME算子,则可继续在[MindSpore API列表](https://www.mindspore.cn/doc/api_python/zh-CN/master/index.html)中搜索算子名称。 - -若源码为PyTorch脚本,则可以直接查询[MindSpore与PyTorch的算子映射](https://www.mindspore.cn/doc/note/zh-CN/master/index.html#operator_api)找到对应的MindSpore算子。注意,针对相同功能的算子,MindSpore的命名可能与其他框架不同,同名算子参数与功能也可能与其他框架有区别,均以官方描述为准。 - -### 缺失算子处理策略 - -1. 考虑用其他算子替换:需要分析算子实现公式,审视是否可以用现有MindSpore算子叠加达到预期目标。 -2. 考虑临时规避方案:比如某个loss不支持,可以替换为同类已支持的loss算子。 -3. [在MindSpore社区](https://gitee.com/mindspore/mindspore/issues)提交建议开发缺失算子。 - -## 语法评估 - -MindSpore提供`GRAPH_MODE`和`PYNATIVE_MODE`两种模式。 - -PyNative模式下模型进行**推理**的行为与一般Python代码无异。 - -而在使用GRAPH_MODE时,或使用PYNATIVE_MODE进行**训练**时,通常会出现语法限制。在这两种情况下,需要对Python代码进行图编译操作,而这一步操作中MindSpore目前还未能支持完整的Python语法全集,所以`construct`函数的编写会存在部分限制。具体限制内容可以参考[MindSpore静态图语法](https://www.mindspore.cn/doc/note/zh-CN/master/static_graph_syntax_support.html)。 - -### 常见限制原则 - -相较于详细的语法说明,常见的限制可以归结为以下几点: - -- 构图时不要调用其他Python库,例如numpy、scipy,相关的处理应该前移到`__init__`阶段。 -- 构图时不要使用自定义类型,而应该使用MindSpore提供的数据类型和Python基础类型,可以使用基于这些类型的tuple/list组合。 -- 构图时不要处理多线程、多进程数据。 - -### 常见处理策略 - -1. 使用MindSpore内部提供的算子替换其他Python库的功能。常量的处理可以前移到`__init__`阶段。 -2. 使用基础类型进行组合,可以考虑增加函数参数量。函数入参数没有限制,并且可以使用不定长输入。 -3. 避免网络中出现多线程处理。 diff --git a/docs/migration_guide/source_zh_cn/script_development.rst b/docs/migration_guide/source_zh_cn/script_development.rst deleted file mode 100644 index 5099574910cabbc6657a156a19eddb4d71ef1683..0000000000000000000000000000000000000000 --- a/docs/migration_guide/source_zh_cn/script_development.rst +++ /dev/null @@ -1,13 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 11:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -网络脚本开发 -=================== - -.. toctree:: - :maxdepth: 1 - - migration_case_of_mindconverter - migration_script diff --git a/docs/note/Makefile b/docs/note/Makefile deleted file mode 100644 index 1eff8952707bdfa503c8d60c1e9a903053170ba2..0000000000000000000000000000000000000000 --- a/docs/note/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = source_zh_cn -BUILDDIR = build_zh_cn - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/note/requirements.txt b/docs/note/requirements.txt deleted file mode 100644 index 1755dcd967228348c2f9cb29bac44580af862770..0000000000000000000000000000000000000000 --- a/docs/note/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -sphinx >= 2.2.1, <= 2.4.4 -recommonmark -sphinx-markdown-tables -sphinx_rtd_theme -numpy -nbsphinx -IPython -jieba diff --git a/docs/note/source_en/_static/logo_notebook.png b/docs/note/source_en/_static/logo_notebook.png deleted file mode 100644 index 18c2e29e4b73ee428f70253feffdd855fdf0c422..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/_static/logo_notebook.png and /dev/null differ diff --git a/docs/note/source_en/_static/logo_source.png b/docs/note/source_en/_static/logo_source.png deleted file mode 100644 index 880f2bc87172daf487654c0ba4f1657c672bd2b8..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/_static/logo_source.png and /dev/null differ diff --git a/docs/note/source_en/benchmark.md b/docs/note/source_en/benchmark.md deleted file mode 100644 index a20112e8c6212acfa722dd0e0999731c2a48af50..0000000000000000000000000000000000000000 --- a/docs/note/source_en/benchmark.md +++ /dev/null @@ -1,64 +0,0 @@ -# Benchmarks - -`Linux` `Ascend` `Model Training` `Intermediae` `Expert` - - - -- [Benchmarks](#benchmarks) - - [Training Performance](#training-performance) - - [ResNet](#resnet) - - [BERT](#bert) - - [Wide & Deep (data parallel)](#wide--deep-data-parallel) - - [Wide & Deep (Host-Device model parallel)](#wide--deep-host-device-model-parallel) - - - - - -This document describes the MindSpore benchmarks. -For details about the MindSpore networks, see [Model Zoo](https://gitee.com/mindspore/mindspore/tree/master/model_zoo). - -## Training Performance - -### ResNet - -| Network | Network Type | Dataset | MindSpore Version | Resource                 | Precision | Batch Size | Throughput | Speedup | -| --- | --- | --- | --- | --- | --- | --- | --- | --- | -| ResNet-50 v1.5 | CNN | ImageNet2012 | 0.5.0-beta | Ascend: 1 * Ascend 910
CPU: 24 Cores | Mixed | 256 | 2115 images/sec | - | -| | | | | Ascend: 8 * Ascend 910
CPU: 192 Cores | Mixed | 256 | 16600 images/sec | 0.98 | -| | | | | Ascend: 16 * Ascend 910
CPU: 384 Cores | Mixed | 256 | 32768 images/sec | 0.96 | - -1. The preceding performance is obtained based on ModelArts, the HUAWEI CLOUD AI development platform. It is the average performance obtained by the Ascend 910 AI processor during the overall training process. -2. For details about other open source frameworks, see [ResNet-50 v1.5 for TensorFlow](https://github.com/NVIDIA/DeepLearningExamples/tree/master/TensorFlow/Classification/ConvNets/resnet50v1.5). - -### BERT - -| Network | Network Type | Dataset | MindSpore Version | Resource                 | Precision | Batch Size | Throughput | Speedup | -| --- | --- | --- | --- | --- | --- | --- | --- | --- | -| BERT-Large | Attention | zhwiki | 0.5.0-beta | Ascend: 1 * Ascend 910
CPU: 24 Cores | Mixed | 96 | 269 sentences/sec | - | -| | | | | Ascend: 8 * Ascend 910
CPU: 192 Cores | Mixed | 96 | 2069 sentences/sec | 0.96 | - -1. The preceding performance is obtained based on ModelArts, the HUAWEI CLOUD AI development platform. The network contains 24 hidden layers, the sequence length is 128 tokens, and the vocabulary contains 21128 tokens. -2. For details about other open source frameworks, see [BERT For TensorFlow](https://github.com/NVIDIA/DeepLearningExamples/tree/master/TensorFlow/LanguageModeling/BERT). - -### Wide & Deep (data parallel) - -| Network | Network Type | Dataset | MindSpore Version | Resource                 | Precision | Batch Size | Throughput | Speedup | -| --- | --- | --- | --- | --- | --- | --- | --- | --- | -| Wide & Deep | Recommend | Criteo | 0.6.0-beta | Ascend: 1 * Ascend 910
CPU: 24 Cores | Mixed | 16000 | 796892 samples/sec | - | -| | | | | Ascend: 8 \* Ascend 910
CPU: 192 Cores | Mixed | 16000*8 | 4872849 samples/sec | 0.76 | - -1. The preceding performance is obtained based on Atlas 800, and the model is data parallel. -2. For details about other open source frameworks, see [Wide & Deep For TensorFlow](https://github.com/NVIDIA/DeepLearningExamples/tree/master/TensorFlow/Recommendation/WideAndDeep). - -### Wide & Deep (Host-Device model parallel) - -| Network | Network Type | Dataset | MindSpore Version | Resource                 | Precision | Batch Size | Throughput | Speedup | -| --- | --- | --- | --- | --- | --- | --- | --- | --- | -| Wide & Deep | Recommend | Criteo | 0.6.0-beta | Ascend: 1 * Ascend 910
CPU: 24 Cores | Mixed | 1000 | 68715 samples/sec | - | -| | | | | Ascend: 8 \* Ascend 910
CPU: 192 Cores | Mixed | 8000*8 | 283830 samples/sec | 0.51 | -| | | | | Ascend: 16 \* Ascend 910
CPU: 384 Cores | Mixed | 8000*16 | 377848 samples/sec | 0.34 | -| | | | | Ascend: 32 \* Ascend 910
CPU: 768 Cores | Mixed | 8000*32 | 433423 samples/sec | 0.20 | - -1. The preceding performance is obtained based on Atlas 800, and the model is model parallel. -2. For details about other open source frameworks, see [Wide & Deep For TensorFlow](https://github.com/NVIDIA/DeepLearningExamples/tree/master/TensorFlow/Recommendation/WideAndDeep). diff --git a/docs/note/source_en/community.rst b/docs/note/source_en/community.rst deleted file mode 100644 index 4609dee2bb085a96ce268ef1ad9f870b865ca6d2..0000000000000000000000000000000000000000 --- a/docs/note/source_en/community.rst +++ /dev/null @@ -1,12 +0,0 @@ -Participating in MindSpore Community -==================================== - -Contributing Code ------------------ - -If you want to contribute code, please read https://gitee.com/mindspore/mindspore/blob/master/CONTRIBUTING.md . - -Contributing Documents ----------------------- - -If you want to contribute documents, please read https://gitee.com/mindspore/docs/blob/master/CONTRIBUTING_DOC.md . \ No newline at end of file diff --git a/docs/note/source_en/conf.py b/docs/note/source_en/conf.py deleted file mode 100644 index a1fd767271ac159540440ed65bd0d676163366a9..0000000000000000000000000000000000000000 --- a/docs/note/source_en/conf.py +++ /dev/null @@ -1,58 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os - - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx_markdown_tables', - 'recommonmark', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_static_path = ['_static'] \ No newline at end of file diff --git a/docs/note/source_en/design/mindarmour.rst b/docs/note/source_en/design/mindarmour.rst deleted file mode 100644 index bfc20cfc28aab28e816fb3d976535139f731f0a8..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindarmour.rst +++ /dev/null @@ -1,8 +0,0 @@ -MindArmour Design -================== - -.. toctree:: - :maxdepth: 1 - - mindarmour/differential_privacy_design - mindarmour/fuzzer_design \ No newline at end of file diff --git a/docs/note/source_en/design/mindarmour/differential_privacy_design.md b/docs/note/source_en/design/mindarmour/differential_privacy_design.md deleted file mode 100644 index 5fce22dd7a2e22dffb523e4d722402597890b418..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindarmour/differential_privacy_design.md +++ /dev/null @@ -1,66 +0,0 @@ -# Differential Privacy - -`Ascend` `Model Development` `Model Optimization` `Framework Development` `Enterprise` `Expert` `Contributor` - - - -- [Differential Privacy](#differential-privacy) - - [Overall Design](#overall-design) - - [DP Optimizer](#dp-optimizer) - - [DP Mechanisms](#dp-mechanisms) - - [Monitor](#monitor) - - [Code Implementation](#code-implementation) - - [References](#references) - - - - - -## Overall Design - -The Differential-Privacy module of MindArmour implements the differential privacy training capability. Model training consists of building training dataset, computing loss, computing gradient, and updating model parameters. Currently, the differential privacy training of MindArmour focuses on the gradient computing process and uses the corresponding algorithm to clip and add noise to the gradient. In this way, user data privacy is protected. - -![dp_arch](./images/dp_arch.png) - -
Figure 1 Overall design of differential privacy
- -Figure 1 shows an overall design of differential privacy training, and mainly including differential privacy noise mechanisms (DP mechanisms), a differential privacy optimizer (DP optimizer), and a privacy monitor. - -### DP Optimizer - -DP optimizer inherits capabilities of the MindSpore optimizer and uses the DP mechanisms to scramble and protect gradients. Currently, MindArmour provides three types of DP optimizers: constant Gaussian optimizer, adaptive Gaussian optimizer, and adaptive clipping optimizer. Each type of DP optimizer adds differential privacy protection capabilities to common optimizers such as SGD and Momentum from different perspectives. - -- Constant Gaussian optimizer is a DP optimizer for non-adaptive Gaussian noise. The advantage is that the differential privacy budget ϵ can be strictly controlled. The disadvantage is that in the model training process, the noise amount added in each step is fixed. If the number of training steps is too large, the noise in the later phase of training makes the model convergence difficult, or even causes the performance to deteriorate greatly and the model availability to be poor. -- Adaptive Gaussian optimizer adaptively adjusts the standard deviation to adjust the Gaussian distribution noise. In the initial phase of model training, a large amount of noise is added. As the model gradually converges, the noise amount gradually decreases, and the impact of the noise on the model availability is reduced. A disadvantage of the adaptive Gaussian noise is that a differential privacy budget cannot be strictly controlled. -- Adaptive clipping optimizer is a DP optimizer that adaptively adjusts a clipping granularity. Gradient clipping is an important operation in differential privacy training. The adaptive clipping optimizer can control a ratio of gradient clipping to fluctuate within a given range and control the gradient clipping granularity during training steps. - -### DP Mechanisms - -The noise mechanism is a basis for building a differential privacy training capability. Different noise mechanisms meet requirements of different DP optimizers, including multiple mechanisms such as constant Gaussian distribution noise, adaptive Gaussian distribution noise, adaptive clipping Gaussian distribution noise, and Laplace distribution noise. - -### Monitor - -Monitor provides callback functions such as Rényi differential privacy (RDP) and zero-concentrated differential privacy (ZCDP) to monitor the differential privacy budget of the model. - -- ZCDP[1] - - ZCDP is a loose differential privacy definition. It uses the Rényi divergence to measure the distribution difference of random functions on adjacent datasets. - -- RDP[2] - - RDP is a more general differential privacy definition based on the Rényi divergence. It uses the Rényi divergence to measure the distribution difference between two adjacent datasets. - -Compared with traditional differential privacy, ZCDP and RDP provide stricter privacy budget upper bound guarantee. - -## Code Implementation - -- [mechanisms.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/privacy/diff_privacy/mechanisms/mechanisms.py): implements the noise generation mechanism required by differential privacy training, including simple Gaussian noise, adaptive Gaussian noise, and adaptive clipping Gaussian noise. -- [optimizer.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/privacy/diff_privacy/optimizer/optimizer.py): implements the fundamental logic of using the noise generation mechanism to add noise during backward propagation. -- [monitor.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/privacy/diff_privacy/monitor/monitor.py): implements the callback function for computing the differential privacy budget. During model training, the current differential privacy budget is returned. -- [model.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/privacy/diff_privacy/train/model.py): implements the logic of computing the loss and gradient as well as the gradient truncation logic of differential privacy training, which is the entry for users to use the differential privacy training capability. - -## References - -[1] Lee, Jaewoo, and Daniel Kifer. "Concentrated differentially private gradient descent with adaptive per-iteration privacy budget." *Proceedings of the 24th ACM SIGKDD International Conference on Knowledge Discovery & Data Mining*. 2018. - -[2] Mironov, Ilya. "Rényi differential privacy." *2017 IEEE 30th Computer Security Foundations Symposium (CSF)*. IEEE, 2017. diff --git a/docs/note/source_en/design/mindarmour/fuzzer_design.md b/docs/note/source_en/design/mindarmour/fuzzer_design.md deleted file mode 100644 index 4427d0d7f2c199f200bc46c8221c32592d20493d..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindarmour/fuzzer_design.md +++ /dev/null @@ -1,73 +0,0 @@ -# AI Model Security Testing - -`Linux` `Ascend` `GPU` `CPU` `Data Preparation` `Model Development` `Model Training` `Model Optimization` `Enterprise` `Expert` - - - -- [AI Model Security Testing](#ai-model-security-testing) - - [Background](#background) - - [Fuzz Testing Design](#fuzz-testing-design) - - [Fuzz Testing Process](#fuzz-testing-process) - - [Code Implementation](#code-implementation) - - [References](#references) - - - - - -## Background - -Different from [fuzzing security test for traditional programs](https://zhuanlan.zhihu.com/p/43432370), MindArmour provides the AI model security test module fuzz_testing for deep neural network. Based on the neural network features, the concept of neuron coverage rate [1] is introduced to guide the fuzz testing. Fuzz testing is guided to generate samples in the direction of increasing neuron coverage rate so that more neurons can be activated by inputs. The distribution range of neuron values is wider to fully test DNN and explore the output results of different types of models and model error behavior. - -## Fuzz Testing Design - -The following figure shows the security test design of the AI model. - -![fuzz_architecture](./images/fuzz_architecture.png) - -At the user interface layer, users need to provide the original dataset `DataSet`, tested model `Model`, and Fuzzer parameter `Fuzzer configuration`. After fuzzing the model and data, Fuzzer module returns the security report `Security Report`. - -Fuzz testting architecture consists of three modules: - -1. Natural Threat/Adversarial Example Generator: - - Randomly select a mutation method to mutate seed data and generate multiple variants. Mutation policies supporting multiple samples include: - - - Image affine transformation methods: Translate, Rotate, Scale, and Shear. - - Methods based on image pixel value changes: Contrast, Brightness, Blur, and Noise. - - Methods for generating adversarial examples based on white-box and black-box attacks: FGSM, PGD, and MDIIM. - -2. Fuzzer Moduler: - - Perform fuzz testing on the mutated data to observe the change of the neuron coverage rate. If the generated data increases the neuron coverage rate, add the data to the mutated seed queue for the next round of data mutation. Currently, the following neuron coverage metrics are supported: KMNC, NBC, and SNAC [2]. - -3. Evaluation: - - Evaluate the fuzz testing effect, quality of generated data, and strength of mutation methods. Five metrics of three types are supported, including the general evaluation metric (accuracy), neuron coverage rate metrics (kmnc, nbc, and snac), and adversarial attack evaluation metric (attack_success_rate). - -## Fuzz Testing Process - -![fuzz_process](./images/fuzz_process.png) - -The fuzz testing process is as follows: - -1. Select seed A from the seed queue according to the policy. -2. Randomly select a mutation policy to mutate seed A and generate multiple variants A1, A2, ... -3. Use the target model to predict the variants. If the semantics of variant is consistent with the seed, the variant enters the Fuzzed Tests. -4. If the prediction is correct, use the neuron coverage metric for analysis. -5. If a variant increases the coverage rate, place the variant in the seed queue for the next round of mutation. - -Through multiple rounds of mutations, you can obtain a series of variant data in the Fuzzed Tests, perform further analysis, and provide security reports from multiple perspectives. You can use them to deeply analyze defects of the neural network model and enhance the model to improve its universality and robustness. - -## Code Implementation - -1. [fuzzing.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/fuzz_testing/fuzzing.py): overall fuzz testing process. -2. [model_coverage_metrics.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/fuzz_testing/model_coverage_metrics.py): neuron coverage rate metrics, including KMNC, NBC, and SNAC. -3. [image_transform.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/fuzz_testing/image_transform.py): image mutation methods, including methods based on image pixel value changes and affine transformation methods. -4. [adversarial attacks](https://gitee.com/mindspore/mindarmour/tree/master/mindarmour/adv_robustness/attacks): methods for generating adversarial examples based on white-box and black-box attacks. - -## References - -[1] Pei K, Cao Y, Yang J, et al. Deepxplore: Automated whitebox testing of deep learning systems[C]//Proceedings of the 26th Symposium on Operating Systems Principles. ACM, 2017: 1-18. - -[2] Ma L, Juefei-Xu F, Zhang F, et al. Deepgauge: Multi-granularity testing criteria for deep learning systems[C]//Proceedings of the 33rd ACM/IEEE International Conference on Automated Software Engineering. ACM, 2018: 120-131. diff --git a/docs/note/source_en/design/mindarmour/images/dp_arch.png b/docs/note/source_en/design/mindarmour/images/dp_arch.png deleted file mode 100644 index c903e4e2acece6c6de882852dc3570126b6fcb05..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindarmour/images/dp_arch.png and /dev/null differ diff --git a/docs/note/source_en/design/mindarmour/images/fuzz_architecture.png b/docs/note/source_en/design/mindarmour/images/fuzz_architecture.png deleted file mode 100644 index d4e8b89bd9a9f4844c59790f5b2114d1d477f927..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindarmour/images/fuzz_architecture.png and /dev/null differ diff --git a/docs/note/source_en/design/mindarmour/images/fuzz_process.png b/docs/note/source_en/design/mindarmour/images/fuzz_process.png deleted file mode 100644 index 2e04347f7cfb0819562578a6be1e91b5cc7ce9d5..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindarmour/images/fuzz_process.png and /dev/null differ diff --git a/docs/note/source_en/design/mindinsight.rst b/docs/note/source_en/design/mindinsight.rst deleted file mode 100644 index 21c8d10567454ddc2b6228ef79e4727dfdd2a2b3..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindinsight.rst +++ /dev/null @@ -1,9 +0,0 @@ -MindInsight Design -================== - -.. toctree:: - :maxdepth: 1 - - mindinsight/training_visual_design - mindinsight/graph_visual_design - mindinsight/tensor_visual_design \ No newline at end of file diff --git a/docs/note/source_en/design/mindinsight/graph_visual_design.md b/docs/note/source_en/design/mindinsight/graph_visual_design.md deleted file mode 100644 index 26afce1c72d60ed0419abb6aa6f412b04725a5d0..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindinsight/graph_visual_design.md +++ /dev/null @@ -1,74 +0,0 @@ -# Computational Graph Visualization Design - -`Linux` `Ascend` `GPU` `CPU` `Model Development` `Model Optimization` `Framework Development` `Intermediate` `Expert` `Contributor` - - - -- [Computational Graph Visualization Design](#computational-graph-visualization-design) - - [Background](#background) - - [Overall Design](#overall-design) - - [Concept Design](#concept-design) - - [Backend Design](#backend-design) - - [Frontend Design](#frontend-design) - - [API Design](#api-design) - - [File API Design](#file-api-design) - - - - - -## Background - -The computational graph visualization function is mainly used in the following scenarios: - -- View a data flow direction of operators and a model structure when programming a deep learning neural network. -- View the input and output nodes of a specified node and attributes of a queried node. -- Trace data, including data dimension and type changes when debugging a network. - -## Overall Design - -### Concept Design - -| Concept | Description | -| ----------------------------------- | ---------------------------------------- | -| Root node, parent node, and subnode | Nodes are divided into different layers based on slashes in the operator name. Take node A `Network` and node B `Network/Conv2D` as an example. Node A is called the root node and is the parent node of node B. Node B is the subnode of node A. | -| Scope | Each node has a scope. A scope of a subnode is the name of its parent node. For example, the scope of an operator node A `Network/Conv2D` is `Network`, that is, the name of its parent node `Network`. The scope of a root node is an empty string. | -| Operator node | Node type. An original node is parsed from the file that stores a computational graph. It corresponds to an operation operator in the neural network code, for example, an `Add` operation operator. | -| Const node | Node type, indicating the constant input of an operator. Constant is parsed from the file that stores a computational graph. The scope is determined based on the input of other nodes. For example, if constant A `Const1` is the input of an operator node B `Network/Conv2D`, constant A is copied and named `Network/Const1` to make its scope the same as that of operator node B. | -| Parameter node | Node type, indicating the parameter input of an operator. | -| Name scope | Node type obtained based on the slash (/) in the operator node name, which is also a scope type. Take node A `Network/Conv2D` as an example. A name scope node B named `Network` is generated based on the slash (/). In a graph, node A is displayed as a subnode of node B, and the scope of node A is the name of node B. Node A is displayed only after node B is expanded. | -| Aggregation node | Node type, which is also a scope type. In the same scope, if there are too many nodes of the same type, an aggregation node is created to replace these nodes. These nodes are folded as subnodes of the aggregation node. | -| Proxy node | Node type. If the connection line between node A and node B is too complicated, a node C that can represent node B is created next to node A. A connection line between node A and node C is created, indicating that the data flows from node A to node B. In this way, the connection lines in a graph are optimized, preventing the layout from being disordered. | -| Data edge | Connection type, indicating the data flow direction using a solid line and an arrow. For example, A->B indicates that data flows from A to B. | -| Control edge | Connection type, indicating the dependency between operator nodes using a dashed line and an arrow. For example, A-->B indicates that A is executed before B. | -| Independent layout | In complex connection scenarios, a node is removed from the original connection so that other nodes cannot connect to it. Instead, proxy nodes are created on other nodes for connection, simplifying the connection relationship. For example, nodes of the parameter type are aggregated, which simplifies a connection relationship between the parameter node and other nodes. | - -### Backend Design - -The following figure shows a class diagram of a backend, which consists of the base class Graph and the class Node. MsGraph inherits the Graph base class and is used to parse the computational graph file of MindSpore ANF. Node classes are aggregated into a graph, which has an aggregation relationship with the Graph. - -![Class diagram design](./images/graph_visual_class_design.png) - -### Frontend Design - -The data is drawn and displayed on the WebUI. The frontend uses the `d3-graph-viz 3.x` plug-in to assist in drawing. - -![Input image description](./images/graph_visual_right_side.png) -Figure 1 Auxiliary functions - -As shown in Figure 1, auxiliary functions such as selecting files, querying nodes, viewing node information, and node input and output are provided. - -![Main part of the computational graph](./images/graph_visual_main.png) -Figure 2 Main part of the computational graph - -In the computational graph, nodes are divided into layers based on slashes (/) in names and are displayed layer by layer. For details, see Figure 2. When you double-click a scope node, its subnodes are displayed. - -### API Design - -In the computational graph, there are file API and RESTful API. The file API is the `summary.proto` file, which is used for data interconnection between MindInsight and MindSpore. -RESTful API is used for data interaction between the MindInsight frontend and backend. - -#### File API Design - -Data interaction between MindSpore and MindInsight uses the data format defined by [Protocol Buffer](https://developers.google.cn/protocol-buffers/docs/pythontutorial). -The main entry is the [summary.proto file](https://gitee.com/mindspore/mindinsight/blob/master/mindinsight/datavisual/proto_files/mindinsight_summary.proto). A message object of a computational graph is defined as `GraphProto`. For details about `GraphProto`, see the [anf_ir.proto file](https://gitee.com/mindspore/mindinsight/blob/master/mindinsight/datavisual/proto_files/mindinsight_anf_ir.proto). diff --git a/docs/note/source_en/design/mindinsight/images/graph_visual_class_design.png b/docs/note/source_en/design/mindinsight/images/graph_visual_class_design.png deleted file mode 100644 index 0a6aec0b2597cd0554c575dd11c2cddd9a7d3fcf..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindinsight/images/graph_visual_class_design.png and /dev/null differ diff --git a/docs/note/source_en/design/mindinsight/images/graph_visual_main.png b/docs/note/source_en/design/mindinsight/images/graph_visual_main.png deleted file mode 100644 index 0bc13636b5c84952978469c652c38500e6d34f43..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindinsight/images/graph_visual_main.png and /dev/null differ diff --git a/docs/note/source_en/design/mindinsight/images/graph_visual_right_side.png b/docs/note/source_en/design/mindinsight/images/graph_visual_right_side.png deleted file mode 100644 index e138bcfbbfda77ff3468442a3e5e169dcd7fed03..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindinsight/images/graph_visual_right_side.png and /dev/null differ diff --git a/docs/note/source_en/design/mindinsight/images/tensor_histogram.png b/docs/note/source_en/design/mindinsight/images/tensor_histogram.png deleted file mode 100644 index 4d3ca16b63261eca5e8318cb47ec4050539eca51..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindinsight/images/tensor_histogram.png and /dev/null differ diff --git a/docs/note/source_en/design/mindinsight/images/tensor_table.png b/docs/note/source_en/design/mindinsight/images/tensor_table.png deleted file mode 100644 index f2d1ad90b3930f71fa4014d94ae52df909bea434..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindinsight/images/tensor_table.png and /dev/null differ diff --git a/docs/note/source_en/design/mindinsight/images/training_visualization_architecture.png b/docs/note/source_en/design/mindinsight/images/training_visualization_architecture.png deleted file mode 100644 index 6b104eb964711f7855f5343c6ed66ed7573ca2cb..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindinsight/images/training_visualization_architecture.png and /dev/null differ diff --git a/docs/note/source_en/design/mindinsight/images/training_visualization_data_flow.png b/docs/note/source_en/design/mindinsight/images/training_visualization_data_flow.png deleted file mode 100644 index 102475c05471b2c19bb645d56a5a05952295b501..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindinsight/images/training_visualization_data_flow.png and /dev/null differ diff --git a/docs/note/source_en/design/mindinsight/images/training_visualization_data_model.png b/docs/note/source_en/design/mindinsight/images/training_visualization_data_model.png deleted file mode 100644 index 694a5e0c9c7ee3e51295665407213ea8289ab38d..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindinsight/images/training_visualization_data_model.png and /dev/null differ diff --git a/docs/note/source_en/design/mindinsight/images/training_visualization_data_model_en.pptx b/docs/note/source_en/design/mindinsight/images/training_visualization_data_model_en.pptx deleted file mode 100644 index 19bb0fc1d9ea50c8afd290664b90d3d264afe6bb..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindinsight/images/training_visualization_data_model_en.pptx and /dev/null differ diff --git a/docs/note/source_en/design/mindinsight/tensor_visual_design.md b/docs/note/source_en/design/mindinsight/tensor_visual_design.md deleted file mode 100644 index 4a3ee80d7296e06be2dd62cd5e9558601c60ca9b..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindinsight/tensor_visual_design.md +++ /dev/null @@ -1,105 +0,0 @@ -# Tensor Visualization Design - -`Linux` `Ascend` `GPU` `CPU` `Model Development` `Model Optimization` `Framework Development` `Intermediate` `Expert` `Contributor` - - - -- [Tensor Visualization Design](#tensor-visualization-design) - - [Background](#background) - - [Overall Design](#overall-design) - - [Backend Design](#backend-design) - - [Frontend Design](#frontend-design) - - [API Design](#api-design) - - [File API Design](#file-api-design) - - - - - -## Background - -Tensor visualization helps you intuitively view the tensor values during training. The tensor change trend can be displayed in a histogram, and the tensor value of a step can also be viewed. Tensor includes the weight value, gradient value, and activation value. - -## Overall Design - -Tensor visualization mainly refers to parsing a summary file generated by the tensor data which is recorded by the `TensorSummary` operator of MindSpore, and returning the result to the frontend for display. - -MindInsight parses tensor data based on a .proto file (Google Protocol Buffer, which is an efficient and convenient structured data storage mode) and caches the data. When specific data is queried at the frontend, the data is returned to the frontend for display. - -Tensor visualization supports the display of 1-dimensional to *N*-dimensional tensors in tables or histograms. The 0-dimensional tensors need to be recorded and displayed in scalar visualization through `ScalarSummary`. - -In the table view, you can query the tensor data of a specific step in the cache. The backend enables you to query the tensor data of any 0 to 2 dimensions at a time through slice. - -In the histogram view, you can query the histogram data of all steps in the cache. - -### Backend Design - -The `TensorContainer`, `Histogram`, and `TensorProcessor` classes are involved in tensor visualization. The `TensorContainer` class is used to store information such as the specific value, dimension, data type, maximum value, minimum value, and histogram (references the data of `Histogram`) of a tensor. `Histogram` is used to process histogram information, including saving the number of buckets and normalizing histogram data of all steps in the cache. `TensorProcessor` is used to process tensor-related HTTP requests, including obtaining a specific training job in the cache, the number of steps in a specific tag, tensor statistics of each step, tensor data of a specific dimension in a specific step (a maximum of two dimensions can be queried at a time), and the histogram data of a specific tag. - -### Frontend Design - -![tensor_table.png](./images/tensor_table.png) - -Figure 1: Table view - -Figure 1 displays tensors recorded by a user in a form of a table. The following functions are included: - -- The input boxes under the table display the tensor data of the current dimension. The colon (:) indicates index range of the current dimension which is basically the same as the meaning of Python index. If no specific index is specified, it indicates all the values of the current dimension and `2:5` indicates the value of index from 2 to 5 (not including 5). You can enter the corresponding index in the box or use index range containing `:` to query tensor data in a specific dimension. -- Drag the thumb of the linear slider below the table to query the tensor data of a specific step. - -![tensor_histogram.png](./images/tensor_histogram.png) - -Figure 2: Histogram view - -Figure 2 shows tensors recorded by a user in a form of a histogram. - -### API Design - -In tensor visualization, there are file API and RESTful API. The file API is the [summary.proto](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/utils/summary.proto) file, which is used for data interconnection between MindInsight and MindSpore. RESTful API is an internal API used for data interaction between the MindInsight frontend and backend. - -#### File API Design - -The `summary.proto` file is the main entry. TensorProto data is stored in the summary value, as shown in the following: - -```cpp -{ - message Summary { - message Image { - // Dimensions of the image. - required int32 height = 1; - required int32 width = 2; - ... - } - - message Histogram { - message bucket{ - // Counting number of values fallen in [left, left + width). - // For the rightmost bucket, the range is [left, left + width]. - required double left = 1; - required double width = 2; - required int64 count = 3; - } - - repeated bucket buckets = 1; - ... - } - - message Value { - // Tag name for the data. - required string tag = 1; - - // Value associated with the tag. - oneof value { - float scalar_value = 3; - Image image = 4; - TensorProto tensor = 8; - Histogram histogram = 9; - } - } - - // Set of values for the summary. - repeated Value value = 1; -} -``` - -TensorProto is defined in the [anf_ir.proto](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/utils/anf_ir.proto) file. diff --git a/docs/note/source_en/design/mindinsight/training_visual_design.md b/docs/note/source_en/design/mindinsight/training_visual_design.md deleted file mode 100644 index 9aade7b59f9d2efd585a13a2f2c6dd4288f301fe..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindinsight/training_visual_design.md +++ /dev/null @@ -1,132 +0,0 @@ -# Overall Design of Training Visualization - -`Linux` `Ascend` `GPU` `CPU` `Model Development` `Model Optimization` `Framework Development` `Intermediate` `Expert` `Contributor` - - - -- [Overall Design of Training Visualization](#overall-design-of-training-visualization) - - [Logical Architecture of Training Visualization](#logical-architecture-of-training-visualization) - - [Architecture of Training Information Collection](#architecture-of-training-information-collection) - - [Architecture of Training Information Analysis and Display](#architecture-of-training-information-analysis-and-display) - - [Code Organization](#code-organization) - - [Training Visualization Data Model](#training-visualization-data-model) - - [Training Information Data Flow](#training-information-data-flow) - - [Data Model](#data-model) - - [Training Job](#training-job) - - [Lineage Data](#lineage-data) - - [Training Process Data](#training-process-data) - - - - - -[MindInsight](https://gitee.com/mindspore/mindinsight) is a visualized debugging and tuning component of MindSpore. MindInsight can be used to complete tasks such as training visualization, performance tuning, and precision tuning. - -Training visualization includes functions such as training dashboard, model lineage, and data lineage. Training dashboard includes functions such as scalar, parameter distribution, computational graph, data graph, and data sampling. - -This document describes the logical architecture, code organization, and data model of the MindInsight training visualization function. - -## Logical Architecture of Training Visualization - -The logical architecture of training visualization is divided into two parts: architecture of training information collection and architecture of training information analysis and display. - -![Logical architecture of MindInsight training visualization](./images/training_visualization_architecture.png) - -Figure 1 Logical architecture of MindInsight training visualization - -### Architecture of Training Information Collection - -The training information collection function in MindSpore consists of training information collection API module and training information persistence module. - -Training information collection APIs include: - -- Training information collection API based on the summary operator. This API contains four summary operators, that is, the ScalarSummary operator for recording scalar data, the ImageSummary operator for recording image data, the HistogramSummary operator for recording parameter distribution histogram data, and the TensorSummary operator for recording tensor data. For details about the operators, see [Operator List](https://www.mindspore.cn/doc/note/en/master/operator_list.html). - -- Training information collection API based on the Python API. You can use the [SummaryRecord.add_value](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.train.html#mindspore.train.summary.SummaryRecord.add_value) method to collect training information in Python code. - -- Easy-to-use training information collection callback. The [SummaryCollector](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.train.html#mindspore.train.callback.SummaryCollector) callback function can be used to conveniently collect common training information to training logs. - -The training information persistence module mainly includes a summary_record module used to manage a cache and a write_pool module used to process data in parallel and write data into a file. After the training information is made persistent, it is stored in the training log file (summary file). - -### Architecture of Training Information Analysis and Display - -The architecture of training information analysis and display in MindInsight consists of the WebUI and backend. The backend can be divided into the data loading and cache layer, service logic layer, and API layer from bottom to top. The data loading and cache layer consists of the training log discovery module, training log parsing module, and cache management module. The service logic layer consists of the training dashboard service module and lineage service module. The API layer consists of the RESTful API module. Functions of each module are as follows: - -- Training log discovery module: scans and discovers training log directories that contain training log files in the specified training log root directory (summary-base-dir). Only directories containing training log files are identified as training log directories. - -- Training log parsing module: parses training log files. - -- Cache management module: manages training log parsing tasks and caches the parsing results. It periodically calls the training log discovery module to scan the latest training log directory list. Then, it calls the parsing module to parse the file content and stores the parsing result in the cache for query on the UI. - -- Training dashboard module: provides the service logic of the training dashboard function to support the training dashboard data query on the UI. - -- Lineage module: provides service logic of model lineage and data lineage to support lineage data query on the UI. - -- RESTful API module: encapsulates an API provided by a service module into a RESTful API. - -## Code Organization - -The following describes some important directories in MindInsight code repository. - -|Level-1 Directory|Level-2 Directory|Level-3 Directory|Description| -|---|---|---|---| -|build|||Code related to compilation and building | -|mindinsight||| -||backend||RESTful API | -|||datavisual|RESTful API related to training dashboard | -|||lineagemgr|RESTful API related to lineage | -||datavisual||Training dashboard module, including the data loading and cache layer code | -|||data_transform|Data loading and cache layer | -||lineagemgr||Lineage module | -||ui||MindInsight WebUI | -|tests|||Test case directory | - -## Training Visualization Data Model - -### Training Information Data Flow - -The training information is generated during training process. You can use the training information collection API to collect the training information and save it to a disk using the training information persistence module to generate a training log file (summary file). After the training log file is generated, you can use MindInsight to visualize the file information. - -![Training information data flow](./images/training_visualization_data_flow.png) - -Figure 2 Training information data flow - -### Data Model - -Figure 3 shows a brief data model of MindInsight. MindInsight identifies a training log directory as a training job. A training job is the minimum management unit of MindInsight. A training job can be associated with 0 to 1 piece of lineage data and 0 to 1 piece of training process data. The training process data has a rich structure. Each piece of specific data can be uniquely determined based on the given plugin name, tag, and step. These concepts are described in the following. - -![Data model](./images/training_visualization_data_model.png) - -Figure 3 Data model represented by a UML class diagram - -#### Training Job - -MindInsight uses directories to distinguish different training jobs. To distinguish training log files of different training jobs, you need to specify the directory for storing training log files for both `SummaryCollector` and `SummaryRecord`. Training log files in the same directory are considered as the training data generated in the same training job. Training log files in different directories are considered as the training data generated in different training jobs. - -In MindInsight code, a training job is called a TrainJob. A TrainJob ID is the name of the directory where the training log is located, for example, ./train_my_lenet_1. - -During a training process, a lineage data file (whose name ends with _lineage) and a training process data file (whose name ends with_MS) are generated. The lineage data mainly describes an invariant attribute of the training from a global perspective, for example, a dataset path used for training, an optimizer used for training, and user-defined lineage information. The most prominent feature of the lineage data file is that it does not change during the training process. The training process data mainly describes a change status of the training, for example, a loss value, parameter distribution, and image data sent to the model in a step. The most prominent feature of the training process data file is that each step changes. - -It should be noted that the classification about whether the training information changes is not absolute. For example, the training process data file contains computational graph data, which is determined when the training starts. - -#### Lineage Data - -The lineage data describes the invariant attribute of a training from a global perspective. When MindInsight identifies multiple training log directories, the lineage data of these trainings is organized and displayed in a table for comparison and analysis. - -#### Training Process Data - -- Plugin Name (plugin_name) - - The training data is classified into scalar, histogram, image, and tensor by type. In MindInsight, these types are called plugin names (plugin_name) which are defined in the `mindinsight.datavisual.common.enums.PluginNameEnum` file. - -- Tag - - No matter which type the data belongs to, the data is further divided into different sequences according to the tag. Generally, tags are named by users to distinguish data. For example, the tag of a scalar that records a loss value can be named loss. When processing data, MindInsight automatically adds a suffix to the tag based on the plugin name. For example, if a scalar's tag is loss, the tag is automatically renamed loss/scalar. - -- Step - - The training process data is generated in each training step. To distinguish them, data is marked with the corresponding step number. - -- Data Query and Display - - When displaying data, you might want to see how the data under a tag changes with the training process. Therefore, when querying data, you do not need to specify the step number. Instead, you can specify the training job, plugin name, and tag to query data of all steps under the tag. diff --git a/docs/note/source_en/design/mindspore.rst b/docs/note/source_en/design/mindspore.rst deleted file mode 100644 index c10bd35f4a4fdd734cbc37725c2a9f0b10c78179..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindspore.rst +++ /dev/null @@ -1,10 +0,0 @@ -MindSpore Design -================ - -.. toctree:: - :maxdepth: 1 - - mindspore/mindir - mindspore/distributed_training_design - mindspore/profiler_design - diff --git a/docs/note/source_en/design/mindspore/architecture.md b/docs/note/source_en/design/mindspore/architecture.md deleted file mode 100644 index 034a51156e5d2e2c6c707768385e831092658d98..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindspore/architecture.md +++ /dev/null @@ -1,16 +0,0 @@ -# Overall Architecture - -`Linux` `Windows` `Ascend` `GPU` `CPU` `On Device` `Model Development` `Model Optimization` `Framework Development` `Intermediate` `Expert` `Contributor` - - - -MindSpore is a deep learning framework in all scenarios, aiming to achieve easy development, efficient execution, and all-scenario coverage. Easy development features include API friendliness and low debugging difficulty. Efficient execution includes computing efficiency, data preprocessing efficiency, and distributed training efficiency. All-scenario coverage means that the framework supports cloud, edge, and device scenarios. - -The following figure shows the overall MindSpore architecture, which mainly consists of four parts: MindSpore Extend, MindExpress (ME), MindCompiler, and MindRE. - -- **MindSpore Extend**: MindSpore expansion package, which is expected to be contributed and built by more developers. -- **MindExpress**: Python-based frontend expression. In the future, more frontends based on C/C++ and Java will be provided. Cangjie, Huawei's self-developed programming language frontend, is now in the pre-research phase. In addition, Huawei is working on interconnection with third-party frontends such as Julia to introduce more third-party ecosystems. -- **MindCompiler**: The core compiler of the layer is mainly based on the unified MindIR of the end cloud to achieve three major functions, including hardware-independent optimization (type inference, automatic differentiation, expression simplification, etc.), hardware-related optimization (automatic parallelism, memory optimization, graph kernel fusion, pipeline execution, etc.), deployment and inference-related optimizations (quantization, pruning, etc.); MindAKG is MindSpore's automatic operator generation compiler, which is still being continuously improved. -- **MindRE**: all-scenario runtime, which covers the cloud, device, and smaller IoT scenarios. - -![MindSpore](images/architecture.png) diff --git a/docs/note/source_en/design/mindspore/distributed_training_design.md b/docs/note/source_en/design/mindspore/distributed_training_design.md deleted file mode 100644 index acd3b2d658ebfb093293eb1e6175ad863cbb8465..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindspore/distributed_training_design.md +++ /dev/null @@ -1,140 +0,0 @@ -# Distributed Training Design - -`Ascend` `GPU` `Model Development` `Model Optimization` `Framework Development` `Intermediate` `Expert` `Contributor` - - - -- [Distributed Training Design](#distributed-training-design) - - [Background](#background) - - [Concepts](#concepts) - - [Collective Communication](#collective-communication) - - [Synchronization Mode](#synchronization-mode) - - [Data Parallelism](#data-parallelism) - - [Principle of Data Parallelism](#principle-of-data-parallelism) - - [Data Parallel Code](#data-parallel-code) - - [Automatic Parallelism](#automatic-parallelism) - - [Principle of Automatic Parallelism](#principle-of-automatic-parallelism) - - [Automatic Parallel Code](#automatic-parallel-code) - - - - - -## Background - -With the rapid development of deep learning, the number of datasets and parameters are growing exponentially to improve the accuracy and generalization capability of neural networks. Parallel distributed training has become a development trend to resolve the performance bottleneck of ultra-large scale networks. MindSpore supports the mainstream distributed training paradigm and develops an automatic hybrid parallel solution. The following describes the design principles of several parallel training modes and provides guidance for users to perform custom development. - -## Concepts - -### Collective Communication - -Collective communication is defined as communication that involves a group of processes. All processes in the group send and receive data after meeting certain conditions. MindSpore implements data transmission during parallel training through collective communication. On Ascend chips, MindSpore depends on the Huawei Collective Communication Library (`HCCL`) to implement the task. On GPU, MindSpore depends on the NVIDIA Collective Communication Library (`NCCL`) to implement the task. - -### Synchronization Mode - -In synchronous mode, all devices strart training at the same time and update parameter values synchronously after the backward propagation algorithm is executed. Currently, MindSpore uses the synchronous training mode. - -## Data Parallelism - -This section describes how the data parallel mode `ParallelMode.DATA_PARALLEL` works in MindSpore. - -### Principle of Data Parallelism - -![Data Parallel Description](./images/data_parallel.png) - -1. Environment dependencies - - Each time before parallel training starts, the `mindspore.communication.init` API is called to initialize communication resources and the global communication group `WORLD_COMM_GROUP` is automatically created. - -2. Data distribution - - The key of data parallelism is to split datasets based on the sample dimension and deliver the split datasets to different devices. Each dataset loading API provided by the `mindspore.dataset` module has the `num_shards` and `shard_id` parameters. The parameters are used to split a dataset into multiple datasets, perform cyclic sampling, and collect data of the `batch` size to each device. When the data volume is insufficient, the sampling restarts from the beginning. - -3. Network structure - - The scripting method of data parallel network is the same as that of standalone network. This is because, although models of each device are executed independently during the forward and backward propagation processes, the same network structure is maintained. To ensure the synchronous training between devices, the initial values of corresponding network parameters must be the same. You are advised to enable `parameter_broadcast` to broadcast the values of weights in `DATA_PARALLEL` and `HYBRID_PARALLEL` modes. And in `AUTO_PARALLEL` and `SEMI_AUTO_PARALLEL` modes, the sharded dimensions of weights will be processed automatically by setting random seeds to ensure the initialization of weights are consistent on the devices which belongs to the same data parallel dimension. - -4. Gradient aggregation - - Theoretically, the training effect of data parallel network should be the same as that of the standalone network. To ensure the consistency of the calculation logic, the `AllReduce` operator is inserted after gradient calculation to implement the gradient aggregation operation between devices. You can enable `mean` to average the sum of gradient values, or regard `mean` as a hyperparameter. Enabling `mean` is equivalent to reducing the learning rate by multiple times. - -5. Parameter update - - Because the gradient aggregation operation is introduced, the models of each device perform parameter update with the same gradient value. Therefore, MindSpore implements a synchronous data parallel training mode. Theoretically, models trained by each device are the same. If the reduce operation on samples is involved on the network, the network output may be different. This is determined by the sharding attribute of data parallelism. - -### Data Parallel Code - -1. Collective communication - - - [management.py](https://gitee.com/mindspore/mindspore/blob/master/mindspore/communication/management.py): This file covers the `helper` function APIs commonly used during the collective communication process, for example, the APIs for obtaining the number of clusters and device ID. When collective communication is executed on the Ascend chip, the framework loads the `libhccl.so` library file in the environment and uses it to call the communication APIs from the Python layer to the underlying layer. - - [comm_ops.py](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ops/operations/comm_ops.py): MindSpore encapsulates supported collective communication operations as operators and stores the operators in this file. The operators include `AllReduce`, `AllGather`, `ReduceScatter`, and `Broadcast`. `PrimitiveWithInfer` defines the attributes required by the operators, as well as the `shape` and `dtype` inference methods from the input to the output during graph composition. - -2. Gradient aggregation - - - [grad_reducer.py](https://gitee.com/mindspore/mindspore/blob/master/mindspore/nn/wrap/grad_reducer.py): This file implements the gradient aggregation process. After the input parameter `grads` is expanded by using `HyperMap`, the `AllReduce` operator is inserted. The global communication group is used. You can also perform custom development by referring to this section based on your network requirements. In MindSpore, standalone and distributed execution shares a set of network encapsulation APIs. In the `Cell`, `ParallelMode` is used to determine whether to perform gradient aggregation. For details about the network encapsulation APIs, see the `TrainOneStepCell` code implementation. - -## Automatic Parallelism - -As a key feature of MindSpore, automatic parallelism is used to implement hybrid parallel training that combines automatic data parallelism and model parallelism. It aims to help users express the parallel algorithm logic using standalone scripts, reduce the difficulty of distributed training, improve the algorithm R&D efficiency, and maintain the high performance of training. This section describes how the automatic parallel mode `ParallelMode.AUTO_PARALLEL` and semi-automatic parallel mode `ParallelMode.SEMI_AUTO_PARALLEL` work in MindSpore. - -### Principle of Automatic Parallelism - -![Automatic Parallel Description](./images/auto_parallel.png) - -1. Distributed operator and tensor layout - - As shown in the preceding figure, the automatic parallel process traverses the standalone forward ANF graphs and performs shard modeling on tensors in the unit of distributed operator, indicating how the input and output tensors of an operator are distributed to each device of the cluster, that is, the tensor layout. Users do not need to know which device runs which slice of a model. The framework automatically schedules and allocates model slices. - - To obtain the tensor layout model, each operator has a shard strategy, which indicates the shard status of each input of the operator in the corresponding dimension. Generally, tensors can be sharded in any dimension as long as the value is a multiple of 2, and the even distribution principle is met. The following figure shows an example of the three-dimensional `BatchMatmul` operation. The parallel strategy consists of two tuples, indicating the sharding of `input` and `weight`, respectively. Elements in a tuple correspond to tensor dimensions one by one. `2^N` indicates the shard unit, and `1` indicates that the tuple is not sharded. If you want to express a parallel data shard strategy, that is, only data in the `batch` dimension of `input` is sharded, and data in other dimensions are not sharded, you can use `strategy=((2^N, 1, 1),(1, 1, 1))`. If you want to express a parallel model shard strategy, that is, only model in the non-`batch` dimension of `weight` is sharded, for example, only the `channel` dimension is sharded, you can use `strategy=((1, 1, 1),(1, 1, 2^N))`. If you want to express a hybrid parallel shard strategy, one of which is `strategy=((2^N, 1, 1),(1, 1, 2^N))`. - - ![Operator Sharding Definition](./images/operator_split.png) - - Based on the shard strategy of an operator, the framework automatically derives the distribution model of input tensors and output tensors of the operator. This distribution model consists of `device_matrix`, `tensor_shape`, and `tensor map`, which indicate the device matrix shape, tensor shape, and mapping between devices and tensor dimensions, respectively. Based on the tensor layout model, distributed operator determines whether to insert extra computation and communication operations in the graph to ensure that the operator computing logic is correct. - -2. Tensor Redistribution - - When the output tensor model of an operator is inconsistent with the input tensor model of the next operator, computation and communication operations need to be introduced to implement the change between tensor layouts. The automatic parallel process introduces the tensor redistribution algorithm, which can be used to derive the communication conversion operations between random tensor layouts. The following three examples represent a parallel computing process of the formula `Z=(X×W)×V`, that is, a `MatMul` operation of two two-dimensional matrices, and show how to perform conversion between different parallel modes. - - In example 1, the output of the first data parallel matrix multiplication is sharded in the row rection, and the input of the second model parallel matrix multiplication requires full tensors. The framework automatically inserts the `AllGather` operator to implement redistribution. - - ![Tensor Redistribution](./images/tensor_redistribution1.png) - - In example 2, the output of parallel matrix multiplication of the first model is sharded in the column direction, and the input of parallel matrix multiplication of the second model is sharded in the row direction. The framework automatically inserts a communication operator equivalent to the `AlltoAll` operation in collective communication to implement redistribution. - - ![Tensor Redistribution](./images/tensor_redistribution2.png) - - In example 3, an output shard mode of the first hybrid parallel matrix multiplication is the same as an input shard mode of the second hybrid parallel matrix multiplication. Therefore, redistribution does not need to be introduced. In the second matrix multiplication operation, the related dimensions of the two inputs are sharded. Therefore, the `AllReduce` operator needs to be inserted to ensure the operation correctness. - - ![Tensor Redistribution](./images/tensor_redistribution3.png) - - In general, this distributed representation breaks the boundary between data parallelism and model parallelism, making it easy to implement hybrid parallelism. From the perspective of scripts, users only need to construct a standalone network to express the parallel algorithm logic. Framework automatically shards the entire graph. - -3. Efficient parallel strategy search algorithm - - The `SEMI_AUTO_PARALLEL` semi-automatic parallel mode indicates that you manually configure the parallel strategy for operators when you are familiar with the operator sharding representation. This mode is helpful for manual optimization, with certain commissioning difficulty. You need to master the parallel principle and obtain a high-performance parallel solution based on the network structure and cluster topology. To further help users accelerate the parallel network training process, the automatic parallel mode `AUTO_PARALLEL` introduces the automatic search feature of the parallel strategy on the basis of the semi-automatic parallel mode. Automatic parallelism builds cost models based on the hardware platform, and calculates the computation cost, memory cost, and communication cost of a certain amount of data and specific operators based on different parallel strategies Then, by using the dynamic programming algorithm or recursive programming algorithm and taking the memory upper limit of a single device as a constraint condition, a parallel strategy with optimal performance is efficiently searched out. - - Strategy search replaces manual model sharding and provides a high-performance sharding solution within a short period of time, greatly reducing the threshold for parallel training. - -4. Convenient distributed automatic differentiation - - In addition to forward network communication, the traditional manual model sharding needs to consider backward parallel computing. MindSpore encapsulates communication operations into operators and automatically generates backward propagation of communication operators based on the original automatic differentiation operations of the framework. Therefore, even during distributed training, users only need to pay attention to the forward propagation of the network to implement actual automatic parallel training. - -### Automatic Parallel Code - -1. Tensor layout model - - [tensor_layout](https://gitee.com/mindspore/mindspore/tree/master/mindspore/ccsrc/frontend/parallel/tensor_layout): This directory contains the definitions and implementation of functions related to the tensor distribution model. `tensor_layout.h` declares the member variables `tensor_map_origin_`, `tensor_shape_`, and `device_arrangement_` required by a tensor distribution model. In `tensor_redistribution.h`, the related methods for implementing the `from_origin_` and `to_origin_` transformation between tensor distributions are declared. The deduced redistribution operation is stored in `operator_list_` and returned, in addition, the communication cost `comm_cost_`,, memory cost `memory_cost_`, and calculation cost `computation_cost_` required for redistribution are calculated. - -2. Distributed operators - - [ops_info](https://gitee.com/mindspore/mindspore/tree/master/mindspore/ccsrc/frontend/parallel/ops_info): This directory contains the implementation of distributed operators. In `operator_info.h`, the base class `OperatorInfo` of distributed operator implementation is defined. A distributed operator to be developed shall inherit the base class and explicitly implement related imaginary functions. The `InferTensorInfo`, `InferTensorMap`, and `InferDevMatrixShape` functions define the algorithms for deriving the input and output tensor distribution model of the operator. The `InferForwardCommunication` and `InferMirrorOps` functions define the extra calculation and communication operations to be inserted for operator sharding. The `CheckStrategy` and `GenerateStrategies` functions define the parallel strategy validation and generation for the operator. According to the parallel strategy `SetCostUnderStrategy`, the parallel cost `operator_cost_` of the distributed operator is generated. - -3. Strategy search algorithm - - [auto_parallel](https://gitee.com/mindspore/mindspore/tree/master/mindspore/ccsrc/frontend/parallel/auto_parallel): The shard strategy search algorithm is implemented in this directory. `graph_costmodel.h` defines the graph composition information. Each point indicates an operator `OperatorInfo`. The directed edge `edge_costmodel.h` indicates the input and output relationship of operators and the redistribution cost. `operator_costmodel.h` defines the cost model of each operator, including the calculation cost, communication cost, and memory cost. `dp_algorithm_costmodel.h` describes the main process of the dynamic planning algorithm, which consists of a series of graph operations. `costmodel.h` defines the data structures of cost and graph operations. - -4. Device management - - [device_manager.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/frontend/parallel/device_manager.h): This file is used to create and manage cluster device communication groups. The device matrix model is defined by `device_matrix.h`, and the communication domain is managed by `group_manager.h`. - -5. Entire graph sharding - - [step_auto_parallel.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/frontend/parallel/step_auto_parallel.h), and [step_parallel.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/frontend/parallel/step_parallel.h): The two files contain the core implementation of the automatic parallel process. `step_auto_parallel.h` calls the strategy search process and generates the `OperatorInfo` of the distributed operator. Then in `step_parallel.h`, processes such as operator sharding and tensor redistribution are processed to reconstruct the standalone computing graph in distributed mode. - -6. Backward propagation of communication operators - - [grad_comm_ops.py](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ops/_grad/grad_comm_ops.py): This file defines the backward propagation of communication operators, such as `AllReduce` and `AllGather`. diff --git a/docs/note/source_en/design/mindspore/images/analyser_class_profiler.png b/docs/note/source_en/design/mindspore/images/analyser_class_profiler.png deleted file mode 100644 index ff70c7eedd250437c1c01c17731cfde1d85b62b4..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/analyser_class_profiler.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/architecture.eddx b/docs/note/source_en/design/mindspore/images/architecture.eddx deleted file mode 100644 index 7fda36c045082d30d4b2186f135d2858f07e7c4e..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/architecture.eddx and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/architecture.png b/docs/note/source_en/design/mindspore/images/architecture.png deleted file mode 100644 index 621cc3b9aa26857cc32fe2291238716e3249cb5a..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/architecture.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/auto_parallel.png b/docs/note/source_en/design/mindspore/images/auto_parallel.png deleted file mode 100644 index d0135541eb76cedfcb22f2eb3e470a9d5d913957..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/auto_parallel.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/context_profiler.png b/docs/note/source_en/design/mindspore/images/context_profiler.png deleted file mode 100644 index 18aebed512102025957435939b55829e8aeb6614..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/context_profiler.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/data_parallel.png b/docs/note/source_en/design/mindspore/images/data_parallel.png deleted file mode 100644 index a92c82aa64615b398e83b9bc2cf0aa2c5db9f904..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/data_parallel.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/ir/cf.dot b/docs/note/source_en/design/mindspore/images/ir/cf.dot deleted file mode 100644 index 9da78b45beb7ea56365a300e601c79af4a55e130..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindspore/images/ir/cf.dot +++ /dev/null @@ -1,183 +0,0 @@ -digraph mindspore { -compound=true -subgraph cluster_0x8b8cc30{ -id=cluster_0x8b8cc30 -label="fibonacci[managed]" -fontname="Courier New" -node0x8bde4b0_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]0)
>,] -node0x8bee780_0[fontname="Courier New",shape=plaintext,label=< - - -
0
CNode([CNode]1)
>,] -node0x8bee900_0[fontname="Courier New",shape=plaintext,label=< - - -
0123
CNode([CNode]2)
>,] -node0x8b702a0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]54)
>,] -node0x8b6db30_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]37)
>,] -node0x8bc0bb0_0[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x8b768b0_1[fontname="Courier New",shape=plaintext,label=<
Primitive
switch
>,] -node0x8b6c9f0_2[fontname="Courier New",shape=oval,label="✓fibonacci",style=filled,fillcolor=palegreen,URL="#cluster_0x8b91500",] -node0x8bd9410_3[fontname="Courier New",shape=plaintext,label=<
Primitive
Partial
>,] -node0x8b85110_4[fontname="Courier New",shape=oval,label="✗fibonacci",style=filled,fillcolor=palegreen,URL="#cluster_0x8bda550",] -node0x8b7bab0_5[fontname="Courier New",shape=octagon,label="n",style=filled,fillcolor=paleturquoise,] -node0x8b76120_29[fontname="Courier New",shape=plaintext,label=<
PrimitivePy
scalar_lt
>,] -node0x8b7bab0_30[fontname="Courier New",shape=octagon,label="n",style=filled,fillcolor=paleturquoise,] -node0x8b90f50_31[fontname="Courier New",shape=plaintext,label=<
Int32Imm
1
>,] -parameters_0x8b8cc30[shape=plaintext label=<
parameters
n
>,];} -subgraph cluster_0x8bda550{ -id=cluster_0x8bda550 -label="✗fibonacci[managed]" -fontname="Courier New" -node0x8b6acd0_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]3)
>,] -node0x8b6dff0_0[fontname="Courier New",shape=plaintext,label=< - - -
0
CNode([CNode]4)
>,] -node0x8b7d410_0[fontname="Courier New",shape=plaintext,label=< - - -
0123
CNode([CNode]5)
>,] -node0x8b83a80_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]55)
>,] -node0x8b8c2a0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]35)
>,] -node0x8b62c70_6[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x8bbe5f0_7[fontname="Courier New",shape=plaintext,label=<
Primitive
switch
>,] -node0x8b8a0f0_8[fontname="Courier New",shape=oval,label="✓✗fibonacci",style=filled,fillcolor=palegreen,URL="#cluster_0x8b64c50",] -node0x8b8dbb0_9[fontname="Courier New",shape=plaintext,label=<
Primitive
Partial
>,] -node0x8bc0680_10[fontname="Courier New",shape=oval,label="✗✗fibonacci",style=filled,fillcolor=palegreen,URL="#cluster_0x8bedfe0",] -node0x8b76290_11[fontname="Courier New",shape=octagon,label="n",style=filled,fillcolor=paleturquoise,] -node0x8b90c20_24[fontname="Courier New",shape=plaintext,label=<
PrimitivePy
scalar_eq
>,] -node0x8b76290_25[fontname="Courier New",shape=octagon,label="n",style=filled,fillcolor=paleturquoise,] -node0x8b7da70_26[fontname="Courier New",shape=plaintext,label=<
Int32Imm
1
>,] -parameters_0x8bda550[shape=plaintext label=<
parameters
n
>,];} -subgraph cluster_0x8bedfe0{ -id=cluster_0x8bedfe0 -label="✗✗fibonacci[managed]" -fontname="Courier New" -node0x8b8e4a0_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]6)
>,] -node0x8bb9b70_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]33)
>,] -node0x8b7d610_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]8)
>,] -node0x8beae20_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]31)
>,] -node0x8b76cd0_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]11)
>,] -node0x8b849b0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]31)
>,] -node0x8b85200_12[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x8b84310_13[fontname="Courier New",shape=plaintext,label=<
PrimitivePy
scalar_add
>,] -node0x8bc14b0_14[fontname="Courier New",shape=oval,label="fibonacci",style=filled,fillcolor=palegreen,URL="#cluster_0x8b8cc30",] -node0x8b8d2e0_15[fontname="Courier New",shape=plaintext,label=<
PrimitivePy
scalar_sub
>,] -node0x8bbc810_16[fontname="Courier New",shape=octagon,label="n",style=filled,fillcolor=paleturquoise,] -node0x8b8d3d0_17[fontname="Courier New",shape=plaintext,label=<
Int32Imm
2
>,] -node0x8bd5920_18[fontname="Courier New",shape=oval,label="fibonacci",style=filled,fillcolor=palegreen,URL="#cluster_0x8b8cc30",] -node0x8bc15a0_19[fontname="Courier New",shape=plaintext,label=<
PrimitivePy
scalar_sub
>,] -node0x8bbc810_20[fontname="Courier New",shape=octagon,label="n",style=filled,fillcolor=paleturquoise,] -node0x8b83990_21[fontname="Courier New",shape=plaintext,label=<
Int32Imm
1
>,] -parameters_0x8bedfe0[shape=plaintext label=<
parameters
n
>,];} -subgraph cluster_0x8b64c50{ -id=cluster_0x8b64c50 -label="✓✗fibonacci[managed]" -fontname="Courier New" -node0x8be8e20_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]15)
>,] -node0x8bd5440_22[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x8b89ee0_23[fontname="Courier New",shape=plaintext,label=<
Int32Imm
1
>,] -parameters_0x8b64c50[shape=plaintext label=<
parameters
>,];} -subgraph cluster_0x8b91500{ -id=cluster_0x8b91500 -label="✓fibonacci[managed]" -fontname="Courier New" -node0x8bdacb0_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]18)
>,] -node0x8b7d900_27[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x8bb9e90_28[fontname="Courier New",shape=plaintext,label=<
Int32Imm
0
>,] -parameters_0x8b91500[shape=plaintext label=<
parameters
>,];} -node0x8bc0bb0_0:core->node0x8bde4b0_0:0[arrowhead=vee,style=dashed] -node0x8bee780_0:core->node0x8bde4b0_0:1[arrowhead=vee,] -node0x8bee900_0:core->node0x8bee780_0:0[arrowhead=vee,] -node0x8b768b0_1:core->node0x8bee900_0:0[arrowhead=vee,style=dashed] -node0x8b6db30_0:core->node0x8bee900_0:1[arrowhead=vee,] -node0x8b6c9f0_2->node0x8bee900_0:2[arrowhead=vee,] -node0x8b6c9f0_2->node0x8bdacb0_0[lhead=cluster_0x8b91500,dir=both,arrowhead=dot,style=filled,color=blue] -node0x8b702a0_0:core->node0x8bee900_0:3[arrowhead=vee,] -node0x8bd9410_3:core->node0x8b702a0_0:0[arrowhead=vee,style=dashed] -node0x8b85110_4->node0x8b702a0_0:1[arrowhead=vee,] -node0x8b85110_4->node0x8b6acd0_0[lhead=cluster_0x8bda550,dir=both,arrowhead=dot,style=filled,color=blue] -node0x8b7bab0_5->node0x8b702a0_0:2[arrowhead=vee,] -node0x8b62c70_6:core->node0x8b6acd0_0:0[arrowhead=vee,style=dashed] -node0x8b6dff0_0:core->node0x8b6acd0_0:1[arrowhead=vee,] -node0x8b7d410_0:core->node0x8b6dff0_0:0[arrowhead=vee,] -node0x8bbe5f0_7:core->node0x8b7d410_0:0[arrowhead=vee,style=dashed] -node0x8b8c2a0_0:core->node0x8b7d410_0:1[arrowhead=vee,] -node0x8b8a0f0_8->node0x8b7d410_0:2[arrowhead=vee,] -node0x8b8a0f0_8->node0x8be8e20_0[lhead=cluster_0x8b64c50,dir=both,arrowhead=dot,style=filled,color=blue] -node0x8b83a80_0:core->node0x8b7d410_0:3[arrowhead=vee,] -node0x8b8dbb0_9:core->node0x8b83a80_0:0[arrowhead=vee,style=dashed] -node0x8bc0680_10->node0x8b83a80_0:1[arrowhead=vee,] -node0x8bc0680_10->node0x8b8e4a0_0[lhead=cluster_0x8bedfe0,dir=both,arrowhead=dot,style=filled,color=blue] -node0x8b76290_11->node0x8b83a80_0:2[arrowhead=vee,] -node0x8b85200_12:core->node0x8b8e4a0_0:0[arrowhead=vee,style=dashed] -node0x8bb9b70_0:core->node0x8b8e4a0_0:1[arrowhead=vee,] -node0x8b84310_13:core->node0x8bb9b70_0:0[arrowhead=vee,style=dashed] -node0x8b76cd0_0:core->node0x8bb9b70_0:1[arrowhead=vee,] -node0x8b7d610_0:core->node0x8bb9b70_0:2[arrowhead=vee,] -node0x8bc14b0_14->node0x8b7d610_0:0[arrowhead=vee,style=dashed] -node0x8bc14b0_14->node0x8bde4b0_0[lhead=cluster_0x8b8cc30,dir=both,arrowhead=dot,style=filled,color=blue] -node0x8beae20_0:core->node0x8b7d610_0:1[arrowhead=vee,] -node0x8b8d2e0_15:core->node0x8beae20_0:0[arrowhead=vee,style=dashed] -node0x8bbc810_16->node0x8beae20_0:1[arrowhead=vee,] -node0x8b8d3d0_17:core->node0x8beae20_0:2[arrowhead=vee,] -node0x8bd5920_18->node0x8b76cd0_0:0[arrowhead=vee,style=dashed] -node0x8bd5920_18->node0x8bde4b0_0[lhead=cluster_0x8b8cc30,dir=both,arrowhead=dot,style=filled,color=blue] -node0x8b849b0_0:core->node0x8b76cd0_0:1[arrowhead=vee,] -node0x8bc15a0_19:core->node0x8b849b0_0:0[arrowhead=vee,style=dashed] -node0x8bbc810_20->node0x8b849b0_0:1[arrowhead=vee,] -node0x8b83990_21:core->node0x8b849b0_0:2[arrowhead=vee,] -node0x8bd5440_22:core->node0x8be8e20_0:0[arrowhead=vee,style=dashed] -node0x8b89ee0_23:core->node0x8be8e20_0:1[arrowhead=vee,] -node0x8b90c20_24:core->node0x8b8c2a0_0:0[arrowhead=vee,style=dashed] -node0x8b76290_25->node0x8b8c2a0_0:1[arrowhead=vee,] -node0x8b7da70_26:core->node0x8b8c2a0_0:2[arrowhead=vee,] -node0x8b7d900_27:core->node0x8bdacb0_0:0[arrowhead=vee,style=dashed] -node0x8bb9e90_28:core->node0x8bdacb0_0:1[arrowhead=vee,] -node0x8b76120_29:core->node0x8b6db30_0:0[arrowhead=vee,style=dashed] -node0x8b7bab0_30->node0x8b6db30_0:1[arrowhead=vee,] -node0x8b90f50_31:core->node0x8b6db30_0:2[arrowhead=vee,] -} diff --git a/docs/note/source_en/design/mindspore/images/ir/cf.png b/docs/note/source_en/design/mindspore/images/ir/cf.png deleted file mode 100644 index 196be66c223022c34fe34848d30c10985efa94c7..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/ir/cf.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/ir/closure.dot b/docs/note/source_en/design/mindspore/images/ir/closure.dot deleted file mode 100644 index fd3d402bf48b3410e2d92964c1e1ef5e289dda40..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindspore/images/ir/closure.dot +++ /dev/null @@ -1,93 +0,0 @@ -digraph mindspore { -compound=true -subgraph cluster_0x19e608f0{ -id=cluster_0x19e608f0 -label="ms_closure[managed]" -fontname="Courier New" -node0x19269490_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]0)
>,] -node0x1976cf00_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]1)
>,] -node0x1963d630_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode(out2)
>,] -node0x196d87f0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode(closure)
>,] -node0x196c2270_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode(out1)
>,] -node0x19e328a0_0[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x19e5e7c0_1[fontname="Courier New",shape=plaintext,label=<
DoSignaturePrimitive
S-Prim-make_tuple
>,] -node0x19b6a3d0_2[fontname="Courier New",shape=plaintext,label=<
Int32Imm
2
>,] -node0x19e68e20_3[fontname="Courier New",shape=oval,label="func_outer[func_outer]",style=filled,fillcolor=palegreen,URL="#cluster_0x19e63830",] -node0x19e38e00_4[fontname="Courier New",shape=plaintext,label=<
Int32Imm
1
>,] -node0x19e23c10_5[fontname="Courier New",shape=plaintext,label=<
Int32Imm
2
>,] -node0x19e1c020_14[fontname="Courier New",shape=plaintext,label=<
Int32Imm
1
>,] -parameters_0x19e608f0[shape=plaintext label=<
parameters
>,];} -subgraph cluster_0x19e63830{ -id=cluster_0x19e63830 -label="func_outer[managed]" -fontname="Courier New" -node0x19e69550_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]5)
>,] -node0x19e68f90_6[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x19e69100_7[fontname="Courier New",shape=oval,label="func_inner",style=filled,fillcolor=palegreen,URL="#cluster_0x19e64130",] -node0x19e035b0_12[fontname="Courier New",shape=octagon,label="a",style=filled,fillcolor=paleturquoise,] -node0x19e036b0_13[fontname="Courier New",shape=octagon,label="b",style=filled,fillcolor=paleturquoise,] -parameters_0x19e63830[shape=plaintext label=<
parameters
a
b
>,];} -subgraph cluster_0x19e64130{ -id=cluster_0x19e64130 -label="func_inner[managed]" -fontname="Courier New" -node0x19e68c80_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]6)
>,] -node0x19e68ae0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]7)
>,] -node0x19e682c0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]8)
>,] -node0x19e50a00_8[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x19c7ced0_9[fontname="Courier New",shape=plaintext,label=<
DoSignaturePrimitive
S-Prim-add
>,] -node0x19e645e0_10[fontname="Courier New",shape=octagon,label="c",style=filled,fillcolor=paleturquoise,] -node0x19e68790_11[fontname="Courier New",shape=plaintext,label=<
DoSignaturePrimitive
S-Prim-add
>,] -parameters_0x19e64130[shape=plaintext label=<
parameters
c
>,];} -node0x19e328a0_0:core->node0x19269490_0:0[arrowhead=vee,style=dashed] -node0x1976cf00_0:core->node0x19269490_0:1[arrowhead=vee,] -node0x19e5e7c0_1:core->node0x1976cf00_0:0[arrowhead=vee,style=dashed] -node0x196c2270_0:core->node0x1976cf00_0:1[arrowhead=vee,] -node0x1963d630_0:core->node0x1976cf00_0:2[arrowhead=vee,] -node0x196d87f0_0:core->node0x1963d630_0:0[arrowhead=vee,style=dashed] -node0x19b6a3d0_2:core->node0x1963d630_0:1[arrowhead=vee,] -node0x19e68e20_3->node0x196d87f0_0:0[arrowhead=vee,style=dashed] -node0x19e68e20_3->node0x19e69550_0[lhead=cluster_0x19e63830,dir=both,arrowhead=dot,style=filled,color=blue] -node0x19e38e00_4:core->node0x196d87f0_0:1[arrowhead=vee,] -node0x19e23c10_5:core->node0x196d87f0_0:2[arrowhead=vee,] -node0x19e68f90_6:core->node0x19e69550_0:0[arrowhead=vee,style=dashed] -node0x19e69100_7->node0x19e69550_0:1[arrowhead=vee,] -node0x19e69100_7->node0x19e68c80_0[lhead=cluster_0x19e64130,dir=both,arrowhead=dot,style=filled,color=blue] -node0x19e50a00_8:core->node0x19e68c80_0:0[arrowhead=vee,style=dashed] -node0x19e68ae0_0:core->node0x19e68c80_0:1[arrowhead=vee,] -node0x19c7ced0_9:core->node0x19e68ae0_0:0[arrowhead=vee,style=dashed] -node0x19e682c0_0:core->node0x19e68ae0_0:1[arrowhead=vee,] -node0x19e645e0_10->node0x19e68ae0_0:2[arrowhead=vee,] -node0x19e68790_11:core->node0x19e682c0_0:0[arrowhead=vee,style=dashed] -node0x19e035b0_12->node0x19e682c0_0:1[arrowhead=vee,] -node0x19e036b0_13->node0x19e682c0_0:2[arrowhead=vee,] -node0x196d87f0_0:core->node0x196c2270_0:0[arrowhead=vee,style=dashed] -node0x19e1c020_14:core->node0x196c2270_0:1[arrowhead=vee,] -} diff --git a/docs/note/source_en/design/mindspore/images/ir/closure.png b/docs/note/source_en/design/mindspore/images/ir/closure.png deleted file mode 100644 index 6a618dd46d4bceeabb0b68ddbd187babc24a16aa..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/ir/closure.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/ir/hof.dot b/docs/note/source_en/design/mindspore/images/ir/hof.dot deleted file mode 100644 index c0102eef9d47393572e241610f6dd1a2c303fb57..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindspore/images/ir/hof.dot +++ /dev/null @@ -1,85 +0,0 @@ -digraph mindspore { -compound=true -subgraph cluster_0x1b3c23b0{ -id=cluster_0x1b3c23b0 -label="hof[managed]" -fontname="Courier New" -node0x1b32ae50_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]0)
>,] -node0x1b064930_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode(res)
>,] -node0x1b3c0040_0[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x1b3bfbf0_1[fontname="Courier New",shape=oval,label="g",style=filled,fillcolor=palegreen,URL="#cluster_0x1b3be6c0",] -node0x1b3bfed0_2[fontname="Courier New",shape=oval,label="f",style=filled,fillcolor=palegreen,URL="#cluster_0x1b3c50c0",] -node0x1b3c6870_3[fontname="Courier New",shape=octagon,label="x",style=filled,fillcolor=paleturquoise,] -parameters_0x1b3c23b0[shape=plaintext label=<
parameters
x
>,];} -subgraph cluster_0x1b3c50c0{ -id=cluster_0x1b3c50c0 -label="f[managed]" -fontname="Courier New" -node0x1ab4e190_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]1)
>,] -node0x1ab61220_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]2)
>,] -node0x1b3c59e0_4[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x1b3bf5e0_5[fontname="Courier New",shape=plaintext,label=<
DoSignaturePrimitive
S-Prim-add
>,] -node0x1b348630_6[fontname="Courier New",shape=octagon,label="x",style=filled,fillcolor=paleturquoise,] -node0x1b3c60f0_7[fontname="Courier New",shape=plaintext,label=<
Int32Imm
3
>,] -parameters_0x1b3c50c0[shape=plaintext label=<
parameters
x
>,];} -subgraph cluster_0x1b3be6c0{ -id=cluster_0x1b3be6c0 -label="g[managed]" -fontname="Courier New" -node0x1b3bfa50_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]4)
>,] -node0x1a9fb8c0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]5)
>,] -node0x1a39f7a0_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]6)
>,] -node0x1a4daa20_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]7)
>,] -node0x1b3adfd0_8[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x1b3c2920_9[fontname="Courier New",shape=plaintext,label=<
DoSignaturePrimitive
S-Prim-mul
>,] -node0x1b3120e0_10[fontname="Courier New",shape=octagon,label="function",style=filled,fillcolor=paleturquoise,] -node0x1b3121e0_11[fontname="Courier New",shape=octagon,label="x",style=filled,fillcolor=paleturquoise,] -node0x1b3120e0_12[fontname="Courier New",shape=octagon,label="function",style=filled,fillcolor=paleturquoise,] -node0x1b3121e0_13[fontname="Courier New",shape=octagon,label="x",style=filled,fillcolor=paleturquoise,] -parameters_0x1b3be6c0[shape=plaintext label=<
parameters
function
x
>,];} -node0x1b3c0040_0:core->node0x1b32ae50_0:0[arrowhead=vee,style=dashed] -node0x1b064930_0:core->node0x1b32ae50_0:1[arrowhead=vee,] -node0x1b3bfbf0_1->node0x1b064930_0:0[arrowhead=vee,style=dashed] -node0x1b3bfbf0_1->node0x1b3bfa50_0[lhead=cluster_0x1b3be6c0,dir=both,arrowhead=dot,style=filled,color=blue] -node0x1b3bfed0_2->node0x1b064930_0:1[arrowhead=vee,] -node0x1b3bfed0_2->node0x1ab4e190_0[lhead=cluster_0x1b3c50c0,dir=both,arrowhead=dot,style=filled,color=blue] -node0x1b3c6870_3->node0x1b064930_0:2[arrowhead=vee,] -node0x1b3c59e0_4:core->node0x1ab4e190_0:0[arrowhead=vee,style=dashed] -node0x1ab61220_0:core->node0x1ab4e190_0:1[arrowhead=vee,] -node0x1b3bf5e0_5:core->node0x1ab61220_0:0[arrowhead=vee,style=dashed] -node0x1b348630_6->node0x1ab61220_0:1[arrowhead=vee,] -node0x1b3c60f0_7:core->node0x1ab61220_0:2[arrowhead=vee,] -node0x1b3adfd0_8:core->node0x1b3bfa50_0:0[arrowhead=vee,style=dashed] -node0x1a9fb8c0_0:core->node0x1b3bfa50_0:1[arrowhead=vee,] -node0x1b3c2920_9:core->node0x1a9fb8c0_0:0[arrowhead=vee,style=dashed] -node0x1a4daa20_0:core->node0x1a9fb8c0_0:1[arrowhead=vee,] -node0x1a39f7a0_0:core->node0x1a9fb8c0_0:2[arrowhead=vee,] -node0x1b3120e0_10->node0x1a39f7a0_0:0[arrowhead=vee,style=dashed] -node0x1b3121e0_11->node0x1a39f7a0_0:1[arrowhead=vee,] -node0x1b3120e0_12->node0x1a4daa20_0:0[arrowhead=vee,style=dashed] -node0x1b3121e0_13->node0x1a4daa20_0:1[arrowhead=vee,] -} diff --git a/docs/note/source_en/design/mindspore/images/ir/hof.png b/docs/note/source_en/design/mindspore/images/ir/hof.png deleted file mode 100644 index b7aed07a68798c31561de9461c94814ecec17d33..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/ir/hof.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/ir/ir.dot b/docs/note/source_en/design/mindspore/images/ir/ir.dot deleted file mode 100644 index 50faab23bdcd63c5199303cb8fdcbef1ccb3163c..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindspore/images/ir/ir.dot +++ /dev/null @@ -1,73 +0,0 @@ -digraph mindspore { -compound=true -subgraph cluster_0x55c9669c3c70{ -id=cluster_0x55c9669c3c70 -label="test_f" -fontname="HuaweiSans" -node0x55c9669c6cc0_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
01
CNode([CNode]0)
>,] -node0x55c9669c66a0_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
012
CNode(c)
>,] -node0x55c9669c6960_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
012
CNode([CNode]1)
>,] -node0x55c9669c58a0_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
012
CNode(b)
>,] -node0x55c9669c4fb0_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
012
CNode(a)
>,] -node0x55c9669c6b60_0[fontname="HuaweiSans",shape=plaintext,label=<
Primitive
return
>,] -node0x55c9669c9720_1[fontname="HuaweiSans",shape=plaintext,label=<
MultitypeFuncGraph
mul
>,] -node0x55c9669c9dd0_2[fontname="HuaweiSans",shape=oval,label="func",style=filled,fillcolor="palegreen",URL="#cluster_0x55c9669c7310",] -node0x55c9669c9800_3[fontname="HuaweiSans",shape=plaintext,label=<
MultitypeFuncGraph
add
>,] -node0x55c9669c4430_4[fontname="HuaweiSans",shape=octagon,label="y",style=filled,fillcolor=paleturquoise,] -node0x55c9669c9e80_5[fontname="HuaweiSans",shape=plaintext,label=<
MultitypeFuncGraph
sub
>,] -node0x55c9669c3fc0_6[fontname="HuaweiSans",shape=octagon,label="x",style=filled,fillcolor=paleturquoise,] -node0x55c96692eeb0_7[fontname="HuaweiSans",shape=plaintext,label=<
Int32Imm
1
>,] -parameters_0x55c9669c3c70[shape=plaintext label=<
parameters
x
y
>,];} -subgraph cluster_0x55c9669c7310{ -id=cluster_0x55c9669c7310 -label="func" -fontname="HuaweiSans" -node0x55c9669cc740_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
01
CNode([CNode]7)
>,] -node0x55c9669cc5c0_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
012
CNode([CNode]8)
>,] -node0x55c9669cafc0_8[fontname="HuaweiSans",shape=plaintext,label=<
Primitive
return
>,] -node0x55c9669cc930_9[fontname="HuaweiSans",shape=plaintext,label=<
MultitypeFuncGraph
div
>,] -node0x55c9669cab20_10[fontname="HuaweiSans",shape=octagon,label="x",style=filled,fillcolor=paleturquoise,] -node0x55c9669cacf0_11[fontname="HuaweiSans",shape=octagon,label="y",style=filled,fillcolor=paleturquoise,] -parameters_0x55c9669c7310[shape=plaintext label=<
parameters
x
y
>,];} -node0x55c9669c6b60_0:core->node0x55c9669c6cc0_0:0[arrowhead=vee,style=dashed] -node0x55c9669c66a0_0:core->node0x55c9669c6cc0_0:1[arrowhead=vee,] -node0x55c9669c9720_1:core->node0x55c9669c66a0_0:0[arrowhead=vee,style=dashed] -node0x55c9669c58a0_0:core->node0x55c9669c66a0_0:1[arrowhead=vee,] -node0x55c9669c6960_0:core->node0x55c9669c66a0_0:2[arrowhead=vee,] -node0x55c9669c9dd0_2->node0x55c9669c6960_0:0[arrowhead=vee,style=dashed] -node0x55c9669c9dd0_2->node0x55c9669cc740_0[lhead=cluster_0x55c9669c7310,dir=both,arrowhead=dot,style=filled,color="#444444"] -node0x55c9669c4fb0_0:core->node0x55c9669c6960_0:1[arrowhead=vee,] -node0x55c9669c58a0_0:core->node0x55c9669c6960_0:2[arrowhead=vee,] -node0x55c9669c9800_3:core->node0x55c9669c58a0_0:0[arrowhead=vee,style=dashed] -node0x55c9669c4fb0_0:core->node0x55c9669c58a0_0:1[arrowhead=vee,] -node0x55c9669c4430_4->node0x55c9669c58a0_0:2[arrowhead=vee,] -node0x55c9669c9e80_5:core->node0x55c9669c4fb0_0:0[arrowhead=vee,style=dashed] -node0x55c9669c3fc0_6->node0x55c9669c4fb0_0:1[arrowhead=vee,] -node0x55c96692eeb0_7:core->node0x55c9669c4fb0_0:2[arrowhead=vee,] -node0x55c9669cafc0_8:core->node0x55c9669cc740_0:0[arrowhead=vee,style=dashed] -node0x55c9669cc5c0_0:core->node0x55c9669cc740_0:1[arrowhead=vee,] -node0x55c9669cc930_9:core->node0x55c9669cc5c0_0:0[arrowhead=vee,style=dashed] -node0x55c9669cab20_10->node0x55c9669cc5c0_0:1[arrowhead=vee,] -node0x55c9669cacf0_11->node0x55c9669cc5c0_0:2[arrowhead=vee,] -} diff --git a/docs/note/source_en/design/mindspore/images/ir/ir.png b/docs/note/source_en/design/mindspore/images/ir/ir.png deleted file mode 100644 index 364c5de500557324c8af86e4d1c5bc0a8f347bf5..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/ir/ir.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/module_profiler.png b/docs/note/source_en/design/mindspore/images/module_profiler.png deleted file mode 100644 index 62e370d76e3f2fe097242b0af19e1172593be80e..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/module_profiler.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/operator_split.png b/docs/note/source_en/design/mindspore/images/operator_split.png deleted file mode 100644 index 4063170990c6816884361f195db5851cfbdf932e..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/operator_split.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/parser_module_profiler.png b/docs/note/source_en/design/mindspore/images/parser_module_profiler.png deleted file mode 100644 index a10d934559a7754ff605448b34c37015a4821d95..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/parser_module_profiler.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/proposer_class_profiler.png b/docs/note/source_en/design/mindspore/images/proposer_class_profiler.png deleted file mode 100644 index 9d83df184b46520f245d238ed9c08b0fddb9660f..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/proposer_class_profiler.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/proposer_module_profiler.png b/docs/note/source_en/design/mindspore/images/proposer_module_profiler.png deleted file mode 100644 index 33dff5fe12ca3e75df30e79e5d106c5976e00de3..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/proposer_module_profiler.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/tensor_redistribution1.png b/docs/note/source_en/design/mindspore/images/tensor_redistribution1.png deleted file mode 100644 index e4aa944dffa47a82fdde94a2d9e2cf4d81b8752e..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/tensor_redistribution1.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/tensor_redistribution2.png b/docs/note/source_en/design/mindspore/images/tensor_redistribution2.png deleted file mode 100644 index 4ed244d0a7d85a1eb46f80546810bc7147b07df3..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/tensor_redistribution2.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/tensor_redistribution3.png b/docs/note/source_en/design/mindspore/images/tensor_redistribution3.png deleted file mode 100644 index 74da52f7d4940fdbf4efcf03073746a330bf786e..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/tensor_redistribution3.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/images/time_order_profiler.png b/docs/note/source_en/design/mindspore/images/time_order_profiler.png deleted file mode 100644 index c879bf2391ca33f7480c0f95ec8592f1e74003fc..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/design/mindspore/images/time_order_profiler.png and /dev/null differ diff --git a/docs/note/source_en/design/mindspore/mindir.md b/docs/note/source_en/design/mindspore/mindir.md deleted file mode 100644 index 20b9fef412e3779abff08696a514c0b53b4cc22c..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindspore/mindir.md +++ /dev/null @@ -1,324 +0,0 @@ - -# MindSpore IR (MindIR) - -`Linux` `Windows` `Ascend` `GPU` `Framework Development` `Intermediate` `Model Development` `Expert` `Contributor` - - - -- [MindSpore IR (MindIR)](#mindspore-ir-mindir) - - [Overview](#overview) - - [Syntax](#syntax) - - [Example](#example) - - [Saving IR](#saving-ir) - - [IR File Contents Introduction](#ir-file-contents-introduction) - - [Function-style Semantics](#function-style-semantics) - - [Higher-Order Functions](#higher-order-functions) - - [Control Flows](#control-flows) - - [Free Variables and Closures](#free-variables-and-closures) - - [References](#references) - - - - - -## Overview - -An intermediate representation (IR) is a representation of a program between the source and target languages, which facilitates program analysis and optimization for the compiler. Therefore, the IR design needs to consider the difficulty in converting the source language to the target language, as well as the ease-of-use and performance of program analysis and optimization. - -MindSpore IR (MindIR) is a function-style IR based on graph representation. Its core purpose is to serve automatic differential transformation. Automatic differentiation uses the transformation method based on the function-style programming framework. Therefore, IR uses the semantics close to that of the ANF function. In addition, a manner of representation based on an explicit dependency graph is used by referring to excellent designs of Sea of Nodes[1] and Thorin[2]. For the specific introduction of ANF-IR, please refer to [MindSpore IR Syntax](#syntax). - -When a model compiled using MindSpore runs in the graph mode `context.set_context(mode=context.GRAPH_MODE)` and `context.set_context(save_graphs=True)` is set in the configuration, some intermediate files will be generated during graph compliation. These intermediate files are called IR files. Currently, there are three IR files: - -- .ir file: An IR file that describes the model structure in text format and can be directly viewed using any text editors. We will also introduce how to view it in the following sections. - -- .dat file: An IR file that describes the model structure more strictly than the .ir file. It contains more contents and can be directly viewed using any text editors. - -- .dot file: An IR file that describes the topology relationships between different nodes. You can use this file by [graphviz](http://graphviz.org/) as the input to generate images for users to view the model structure. For models with multiple operators, it is recommended using the visualization component [MindInsight](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/dashboard.html#computational-graph-visualization) to visualize computing graphs. - -## Syntax - -ANF is a simple IR commonly used during functional programming. The ANF syntax is defined as follows: - -```text - ::= NUMBER | STRING | VAR | BOOLEAN | PRIMOP - | (lambda (VAR …) ) - ::= ( …) - | (if ) - ::= (let ([VAR ]) ) | | - -``` - -Expressions in the ANF are classified into atomic expressions (aexp) and compound expressions (cexp). An atomic expression indicates a constant value, a variable, or an anonymous function. A compound expression consists of multiple atomic expressions, indicating that an anonymous function or primitive function call. The first input expression of a compound expression is the called function, and the other input expressions are the called parameters. - -The syntax of MindIR is inherited from the ANF and is defined as follows: - -```text - ::= | - ::= Parameter - ::= Scalar | Named | Tensor | Type | Shape - | Primitive | MetaFuncGraph | FuncGraph - ::= ( …) - ::= | -``` - -ANode in a MindIR corresponds to the atomic expression of ANF. ANode has two subclasses: ValueNode and ParameterNode. ValueNode refers to a constant node, which can carry a constant value (such as a scalar, symbol, tensor, type, and dimension), a primitive function (Primitive), a metafunction (MetaFuncGraph), or a common function (FuncGraph). In functional programming, the function definition itself is a value. ParameterNode refers to a parameter node, which indicates the formal parameter of a function. - -CNode in a MindIR corresponds to the compound expression of ANF, indicating a function call. - -During automatic differentiation of MindSpore, the gradient contribution of ParameterNode and CNode are calculated, and the final gradient of ParameterNode is returned. The gradient of ValueNode is not calculated. - -## Example - -The following uses a program code segment as an example to help you understand MindIR. - -```python -def func(x, y): - return x / y - -@ms_function -def test_f(x, y): - a = x - 1 - b = a + y - c = b * func(a, b) - return c -``` - -The ANF corresponding to the Python code is as follows: - -```python -lambda (x, y) - let a = x - 1 in - let b = a + y in - let func = lambda (x, y) - let ret = x / y in - ret end in - let %1 = func(a, b) in - let c = b * %1 in - c end -``` - -The corresponding MindIR is [ir.dot](https://gitee.com/mindspore/docs/blob/master/docs/note/source_en/design/mindspore/images/ir/ir.dot). - -![image](./images/ir/ir.png) - -In a MindIR, a function graph (FuncGraph) indicates the definition of a common function. A directed acyclic graph (DAG) usually consists of ParameterNode, ValueNode, and CNode, which clearly shows the calculation process from parameters to return values. As shown in the preceding figure, the `test_f` and `func` functions in the Python code are converted into two function graphs. The `x` and `y` parameters are converted into ParameterNode in the function graphs, and each expression is converted into a CNode. The first input of CNode links to the called functions, for example, `add`, `func`, and `return` in the figure. It should be noted that these nodes are all `ValueNode` because they are considered as constant function values. Other input of CNode links to the called parameters. The parameter values can be obtained from the ParameterNode, ValueNode, and other CNode. - -In the ANF, each expression is bound as a variable by using the let expression, and the dependency on the expression output is represented by referencing the variable. In the MindIR, each expression is bound as a node, and the dependency is represented by using the directed edges between nodes. - -## Saving IR - -`context.set_context(save_graphs=True)` is used to save the intermediate code in each compilation phase. The intermediate code can be saved in two formats. One is the text format with the suffix `.ir`, and the other is the graphical format with the suffix `.dot`. When the network scale is small, you are advised to use the graphical format that is more intuitive. When the network scale is large, you are advised to use the text format that is more efficient. - -You can run the graphviz command to convert a .dot file to the picture format. For example, you can run the `dot -Tpng *.dot -o *.png` command to convert a .dot file to a .png file. - -Add the following code to `train.py`, When running the script, MindSpore will automatically store the IR files generated during compilation under the specified path. - -```python -if __name__ == "__main__": - context.set_context(save_graphs=True, save_graphs_path="path/to/ir/files") -``` - -Here, we run the training script on stand-alone computing device. When running on multiple computing devices, MindSpore will generate separate processes for each computing device. So, in multiple computing devices scenario, you are advised to read the ID of the current computing device from the training script and set an independent `save_graphs_path` for each decive to save the IR files to a different path. For example: - -```python -device_id = os.getenv("DEVICE_ID") -context.set_context(save_graphs=True, save_graphs_path="path/to/ir/files"+device_id) -``` - -After the training command is executed, the following files are generated in the specified directory: the IR files starting with digits and underscores are generated during the ME diagram compilation. The calculation diagram is saved in each phase of the `pipeline`. Let's see the important phases, For examples, the `parse` phase parses the `construct` function of the entrance. The `symbol_resolve` phase recursively parses other functions and objects directly or indirectly referenced by the entry function. The `abstract_specialize` phase, type derivation and `shape` derivation are performed. The `optimize` phase, hardware-independent optimization is performed, The automatic differential and automatic parallel functions are also performed. The `validate` phase, the compiled compute graph is verified. The `task_emit` phase, the computing graph is transferred to the backend for further processing. The calculation graph is executed in the `execute` phase. - -```bash -. -├── 00_parse_[xxxx].ir -├── 00_parse.dat -├── 00_parse.dot -├── 01_symbol_resolve_[xxxx].ir -├── 01_symbol_resolve.dat -├── 01_symbol_resolve.dot -├── 02_combine_like_graphs_[xxxx].ir -├── 02_combine_like_graphs.dat -├── 02_combine_like_graphs.dot -├── 03_inference_opt_prepare_[xxxx].ir -├── 03_inference_opt_prepare.dat -├── 03_inference_opt_prepare.dot -├── 04_abstract_specialize_[xxxx].ir -├── 04_abstract_specialize.dat -├── 04_abstract_specialize.dot -├── 05_inline_[xxxx].ir -├── 05_inline.dat -├── 05_inline.dot -├── 06_py_pre_ad_[xxxx].ir -├── 06_py_pre_ad.dat -├── 06_py_pre_ad.dot -├── 07_pipeline_split_[xxxx].ir -├── 07_pipeline_split.dat -├── 07_pipeline_split.dot -├── 08_optimize_[xxxx].ir -├── 08_optimize.dat -├── 08_optimize.dot -├── 09_py_opt_[xxxx].ir -├── 09_py_opt.dat -├── 09_py_opt.dot -├── 10_validate_[xxxx].ir -├── 10_validate.dat -├── 10_validate.dot -├── 11_task_emit_[xxxx].ir -├── 11_task_emit.dat -├── 11_task_emit.dot -├── 12_execute_[xxxx].ir -├── 12_execute.dat -├── 12_execute.dot -... -``` - -## IR File Contents Introduction - -The following is an example to describe the contents of the IR file. - -```python -import mindspore.context as context -import mindspore.nn as nn -from mindspore import Tensor -from mindspore import dtype as mstype - -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") -context.set_context(save_graphs=True, save_graphs_path="./ir_files") - -class Net(nn.Cell): - def __init__(self): - super().__init__() - - def construct(self, x, y): - x = x + y - x = x * y - return x - -x = Tensor(3, mstype.float32) -y = Tensor(2, mstype.float32) -net = Net() -out = net(x, y) -print(out) -``` - -Use a text editing software (for example, vi) to open the `12_execute_[xxxx].ir` file. The file contents are as follows: - -```text - 1 #IR entry : @6_5_1_construct_wrapper.15 - 2 #attrs : - 3 check_set_strategy_valid_once_only : 1 - 4 #Total params : 2 - 5 - 6 %para1_x : - 7 %para2_y : - 8 - 9 #Total subgraph : 1 -10 -11 subgraph attr: -12 check_set_strategy_valid_once_only : 1 -13 subgraph @6_5_1_construct_wrapper.15() { -14 %0([CNode]8) = Add(%para1_x, %para2_y) primitive_attrs: {output_names: [output], input_names: [x, y]} -15 : (, ) -> () -16 # In file /home/workspace/mindspore/mindspore/ops/composite/multitype_ops/add_impl.py(129)/ return F.add(x, y)/ -17 # In file demo.py(14)/ x = x + y/ -18 %1([CNode]10) = Mul(%0, %para2_y) primitive_attrs: {output_names: [output], input_names: [x, y]} -19 : (, ) -> () -20 # In file /home/workspace/mindspore/mindspore/ops/composite/multitype_ops/mul_impl.py(48)/ return F.tensor_mul(x, y)/ -21 # In file demo.py(15)/ x = x * y/ -22 return(%1) -23 : () -24 } -``` - -The above contents can be divided into two parts, the first part is the input list and the second part is the graph structure. The first line tells us the name of the top MindSpore graph about the network, `@6_5_1_construct_wrapper.15`, or the entry graph. Line 4 tells us how many inputs are in the network. Line 6 to 7 are the input list, which is in the format of `%para[No.]_[name] : <[data_type]x[shape]>`. Line 9 tells us the number of subgraphs parsed by the network. Line 11 to 24 indicate the graph structure, which contains several nodes, namely, `CNode`. In this example, there are only two nodes: `Add` in row 14 and `Mul` in row 18. - -The `CNode` information format is as follows: including the node name, attribute, input node, output information, format, and source code parsing call stack. The ANF diagram is a unidirectional acyclic graph. So, the connection between nodes is displayed only based on the input relationshape. The source code parsing call stack reflects the relationship between the `CNode` and the script source code. For example, line 20 is parsed from line 21, and line 21 corresponds to `x = x * y` of the script. - -```text - %[No.]([debug_name]) = [OpName]([arg], ...) primitive_attrs: {[key]: [value], ...} - : (<[input data_type]x[input shape]>, ...) -> (<[output data_type]x[output shape]>, ...) - # Call stack for source code parsing -``` - -> After several optimizations by the compiler, the node may undergo several changes (such as operator splitting and operator merging). The source code parsing call stack information of the node may not be in a one-to-one correspondence with the script. This is only an auxiliary method. - -## Function-style Semantics - -Compared with traditional computational graphs, MindIR can not only express data dependency between operators, but also express rich function-style semantics. - -### Higher-Order Functions - -In a MindIR, a function is defined by a subgraph. However, the function itself can be transferred as the input or output of other higher-order functions. -In the following simple example, the `f` function is transferred as a parameter into the `g` function. Therefore, the `g` function is a higher-order function that receives function input, and the actual call site of the `f` function is inside the `g` function. - -```python -@ms_function -def hof(x): - def f(x): - return x + 3 - def g(function, x): - return function(x) * function(x) - res = g(f, x) - return res -``` - -The corresponding MindIR is [hof.dot](https://gitee.com/mindspore/docs/blob/master/docs/note/source_en/design/mindspore/images/ir/hof.dot). -![image](./images/ir/hof.png) - -In the actual network training scripts, the automatic derivation generic function `GradOperation` and `Partial` and `HyperMap` that are commonly used in the optimizer are typical high-order functions. Higher-order semantics greatly improve the flexibility and simplicity of MindSpore representations. - -### Control Flows - -In a MindIR, control flows are expressed in the form of high-order function selection and calling. This form transforms a control flow into a data flow of higher-order functions, making the automatic differential algorithm more powerful. It not only supports automatic differentiation of data flows, but also supports automatic differentiation of control flows such as conditional jumps, loops, and recursion. - -The following uses a simple Fibonacci instance as an example. - -```python -@ms_function -def fibonacci(n): - if(n < 1): - return 0 - elif(n == 1): - return 1 - else: - return fibonacci(n-1) + fibonacci(n-2) -``` - -The corresponding MindIR is [cf.dot](https://gitee.com/mindspore/docs/blob/master/docs/note/source_en/design/mindspore/images/ir/cf.dot). -![image](./images/ir/cf.png) - -`fibonacci` is a top-level function graph. Two function graphs at the top level are selected and called by `switch`. `✓fibonacci` is the True branch of the first `if`, and `✗fibonacci` is the False branch of the first `if`. `✓✗fibonacci` called in `✗fibonacci` is the True branch of `elif`, and `✗✗fibonacci` is the False branch of `elif`. The key is, in a MindIR, conditional jumps and recursion are represented in the form of higher-order control flows. For example, `✓✗fibonacci` and `✗fibonacci` are transferred in as parameters of the `switch` operator. `switch` selects a function as the return value based on the condition parameter. In this way, `switch` performs a binary selection operation on the input functions as common values and does not call the functions. The real function call is completed on CNode following `switch`. - -### Free Variables and Closures - -Closure is a programming language feature that refers to the combination of code blocks and scope environment. A free variable refers to a variable in the scope environment referenced in a code block instead of a local variable. In a MindIR, a code block is represented as a function graph. The scope environment can be considered as the context where the function is called. The capture method of free variables is value copy instead of reference. - -A typical closure instance is as follows: - -```python -@ms_function -def func_outer(a, b): - def func_inner(c): - return a + b + c - return func_inner - -@ms_function -def ms_closure(): - closure = func_outer(1, 2) - out1 = closure(1) - out2 = closure(2) - return out1, out2 -``` - -The corresponding MindIR is [closure.dot](https://gitee.com/mindspore/docs/blob/master/docs/note/source_en/design/mindspore/images/ir/closure.dot). -![image](./images/ir/closure.png) - -In the example, `a` and `b` are free variables because the variables `a` and `b` in `func_inner` are parameters defined in the referenced parent graph `func_outer`. The variable `closure` is a closure, which is the combination of the function `func_inner` and its context `func_outer(1, 2)`. Therefore, the result of `out1` is 4, which is equivalent to `1+2+1`, and the result of `out2` is 5, which is equivalent to `1+2+2`. - -## References - -[1] C. Click and M. Paleczny. A simple graph-based intermediate representation. -SIGPLAN Not., 30:35–49, March 1995. - -[2] Roland Leißa, Marcel Köster, and Sebastian Hack. A graph-based higher-order intermediate -representation. In Proceedings of the 13th Annual IEEE/ACM International Symposium on -Code Generation and Optimization, pages 202–212. IEEE Computer Society, 2015. diff --git a/docs/note/source_en/design/mindspore/profiler_design.md b/docs/note/source_en/design/mindspore/profiler_design.md deleted file mode 100644 index 19647478e1c2e75c52e87013ceffda1870bd760e..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/mindspore/profiler_design.md +++ /dev/null @@ -1,198 +0,0 @@ -# Profiler Design Document - -`Ascend` `GPU` `Model Development` `Model Optimization` `Framework Development` `Intermediate` `Expert` `Contributor` - - - -- [Profiler Design Document](#profiler-design-document) - - [Background](#background) - - [Profiler Architecture Design](#profiler-architecture-design) - - [Context](#context) - - [Module Structure](#module-structure) - - [Internal Module Interaction](#internal-module-interaction) - - [Sub-Module Design](#sub-module-design) - - [ProfilerAPI and Controller](#profilerapi-and-controller) - - [Description](#description) - - [Design](#design) - - [Parser](#parser) - - [Description](#description-1) - - [Design](#design-1) - - [Analyser](#analyser) - - [Description](#description-2) - - [Design](#design-2) - - [Proposer](#proposer) - - [Description](#description-3) - - [Design](#design-3) - - - - - -## Background - -To support model development and performance debugging in MindSpore, an easy-to-use profile tool is required to intuitively display the performance information of each dimension of a network model, provide users with easy-to-use and abundant profiling functions, and help users quickly locate network performance faults. - -## Profiler Architecture Design - -The Profiler architecture design is introduced from the following three aspects: the overall context interaction relationship of Profiler; the internal structure of Profiler, including the module structure and module layers; the interactive calling relationship between modules. - -### Context - -Profiler is a part of the MindSpore debugging and optimization tool. The following figure shows the tool context. - -![context_profiler.png](./images/context_profiler.png) - -Figure 1 Context relationship - -As shown in the preceding figure, the interaction between the Profiler and other components is as follows: - -1. In the training script, MindSpore Profiler is called to send the command to the MindSpore ada(Ascend device) or CUPTI(GPU device) module for starting performance data collection. Finally, the ada or CUPTI generates original performance data. - -2. MindSpore Profiler parses the original data in the user script and generates the intermediate data results in the specified folder. - -3. MindInsight Profiler connects to the intermediate data and provides the visualized Profiler function for users. - -### Module Structure - -Modules are classified into the following layers: - -![module_profiler.png](./images/module_profiler.png) - -Figure 2 Relationships between modules at different layers - -Module functions are as follows: - -1. ProfilerAPI is a calling entry provided by code, including the performance collection startup API and analysis API. -2. Controller is a module at a layer lower than that of ProfilerAPI. It is called by the startup API of ProfilerAPI to start or stop the performance collection function. The original data is written to a fixed position by ada. -3. Parser is a module for parsing original performance data which is collected on the device and cannot be directly understood by users. Parser parses, combines, and converts the data to generate intermediate results that can be understood by users and analyzed by upper layers. -4. Analyser obtains the intermediate results parsed by the lower-layer Parser, encapsulates, filters, and sorts the intermediate results, and returns the various information to the upper-layer Profiler API and RESTful. -5. RESTful is used to call the common API provided by the backend Analyser to obtain objective data and use RESTful to connect to the frontend. - -### Internal Module Interaction - -Users can use API or RESTful to complete internal module interaction process. The following uses the API as an example: - -![time_order_profiler.png](./images/time_order_profiler.png) - -Figure 3 Module interaction - -The interaction process of each module is as follows: - -1. ProfilerAPI calls the control function of the lower-layer Controller to control the lower-layer collection module to collect performance information. Currently, the collection module (ada on Ascend or CUPTI on GPU) receives commands and independently collects performance information. - -2. After the training is complete, users call the analysis API of ProfilerAPI. - -3. Profiler API analysis API uses the Parser module to parse performance data, generates intermediate results, calls the Aalayser module to analyze the results, and returns various information to users. - -## Sub-Module Design - -### ProfilerAPI and Controller - -#### Description - -ProfilerAPI provides an entry API in the training script for users to start performance collection and analyze performance data. -ProfilerAPI delivers commands through Controller to control the startup of ada/CUPTI. - -#### Design - -ProfilerAPI belongs to the API layer of upper-layer application and is integrated by the training script. The function is divided into two parts: - -- Before training, call the bottom-layer Controller API to deliver a command to start a profiling task. - -- After training, call the bottom-layer Controller API to deliver commands to stop the profiling task, call the Analyser and Parser APIs to parse data files and generate result data such as operator performance statistics and training trace statistics. - -Controller provides an API for the upper layer, calls API of the lower-layer performance collection module, and delivers commands for starting and stopping performance collection. - -The generated original performance data includes: - -Ascend: - -- `hwts.log.data.45.dev.profiler_default_tag` file: stores operator execution information, including the start and end of a task and stream ID. -- `DATA_PREPROCESS.dev.AICPU` file: specifies AI CPU operator execution time at each stage. -- `Framework.host.task_desc_info` file: stores the mapping between operator IDs and operator names and the input and output information of each operator. -- `training_trace.46.dev.profiler_default_tag` file: stores the start and end time of each step and time of step interval, forward and backward propagation, and step tail. - -GPU: - -- `step_trace_profiling_0.txt` file: stores the start and end operator of each step. - -### Parser - -#### Description - -Parser is a module for parsing original performance data which is collected on the device and cannot be directly understood by users. Parser parses, combines, and converts the data to generate intermediate results that can be understood by users and analyzed by upper layers. - -#### Design - -![parser_module_profiler.png](./images/parser_module_profiler.png) - -Figure 4 Parser module - -As shown in the preceding figure, there are HWTS Parser, AI CPU Parser, Framework Parser, and Training Trace Parser modules. Each module parses a type of original data to obtain the intermediate file that can be read by users. - -Ascend: - -- HWTS Parser: parses the `hwts.log.data.45.dev.profiler_default_tag` file to obtain the task-based statistics of the device, such as the start and end of each task and stream ID, which are used to compute the operator execution time. -- AI CPU Parser: parses the `DATA_PREPROCESS.dev.AICPU` file to obtain the AI CPU operator execution time at each stage. -- Framework Parser: parses the `Framework.host.task_desc_info` file to obtain the mapping between AI Core operator and task, and key operator information. -- Training Trace Parser: parses the `training_trace.46.dev.profiler_default_tag` file to analyze the time at each training stage. - -GPU: - -- Training Trace Parser: parses the `step_trace_profiling_0.txt` file to analyze the time at each training stage. - -### Analyser - -#### Description - -Analyzer is used to filter, sort, query, and page the intermediate results generated at the parsing stage. - -#### Design - -This module parses the intermediate files generated by Parser, provides a general API for upper-layer data analysis, and returns the analyzed data to the upper layer for display. Various intermediate files have certain common points which can be abstracted. Therefore, following figure shows the design of the Analyser class. - -![analyser_class_profiler.png](./images/analyser_class_profiler.png) - -Figure 5 Analyser class - -As shown in the preceding figure, multiple Analysers are implemented for different contents to be queried. Filter, sorting, and pagination conditions can be defined for each Analyser. Each Analyser knows which intermediate files are required to merge, filter, and sort data. Analyser is associated with Parser through the intermediate files generated by Parser, and no function is called. In this way, Analyser and Parser are decoupled. - -Currently, there are two types of analyzers for operator information: - -- Filter the average information of operator types. -- Filter the detailed average information of each operator in two Analysers (AicoreTypeAnalyser and AicoreDetailAnalyser for Ascend, GpuOpTypeAnalyser and GpuOpInfoAnalyser for GPU). - -To hide the internal implementation of Analyser and facilitate calling, the simple factory mode is used to obtain the specified Analyser through AnalyserFactory. - -### Proposer - -#### Description - -Proposer is a Profiler performance optimization suggestion module. Proposer calls the Analyser module to obtain performance data, analyzes the performance data based on optimization rules, and displays optimization suggestions for users through the UI and API. - -#### Design - -Modules are classified into the following layers: - -![proposer_module_profiler.png](./images/proposer_module_profiler.png) - -Figure 6 Proposer module - -As shown in the preceding figure: - -- Proposer provides API for calling the API and RESTful to obtain optimization suggestions. -- Proposer calls the Analyser API to obtain performance data and obtain optimization suggestions based on optimization rules. -- Proposer calls Analyser factory to obtain the Analyser object. - -You can call the query API of the Analyser object to obtain information, including the top N operators that are sorted by time and the time information of each training trace stage. - -The following figure shows the module class design: - -![proposer_class_profiler.png](./images/proposer_class_profiler.png) - -Figure 7 Proposer class - -As shown in the preceding figure: - -- Proposers of various types inherit the abstract class Proposer and implement the analyze methods. -- API and CLI call the ProposerFactory to obtain the Proposer and call the Proposer.analyze function to obtain the optimization suggestions of each type of Proposer. diff --git a/docs/note/source_en/design/overall.rst b/docs/note/source_en/design/overall.rst deleted file mode 100644 index 4fb9264191288c83696c00b4bd1d6122709dd90c..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/overall.rst +++ /dev/null @@ -1,8 +0,0 @@ -Overall Design -================ - -.. toctree:: - :maxdepth: 1 - - technical_white_paper - mindspore/architecture diff --git a/docs/note/source_en/design/technical_white_paper.md b/docs/note/source_en/design/technical_white_paper.md deleted file mode 100644 index 0b533897990008bdae72a29008aeac2168deeca7..0000000000000000000000000000000000000000 --- a/docs/note/source_en/design/technical_white_paper.md +++ /dev/null @@ -1,5 +0,0 @@ -# Technical White Paper - -Please stay tuned... - - diff --git a/docs/note/source_en/env_var_list.md b/docs/note/source_en/env_var_list.md deleted file mode 100644 index 1dda8e6e944089ff6a23cdebfc383a435eecaebc..0000000000000000000000000000000000000000 --- a/docs/note/source_en/env_var_list.md +++ /dev/null @@ -1,32 +0,0 @@ -# Environment Variables - -`Linux` `Ascend` `GPU` `CPU` `Beginner` `Intermediate` `Expert` - - - -MindSpore environment variables are as follows: - -| Environment Variable | Module | Function | Type | Value Range | Configuration Relationship | Mandatory or Not | -| --- | --- | --- | --- | --- | --- | --- | -|MS_ENABLE_CACHE|MindData|Determines whether to enable the cache function for datasets during data processing to accelerate dataset reading and argumentation processing. |String|TRUE: enables the cache function during data processing.
FALSE: disables the cache function during data processing. | This variable is used together with MS_CACHE_HOST and MS_CACHE_PORT. |Optional| -|MS_CACHE_HOST|MindData|Specifies the IP address of the host where the cache server is located when the cache function is enabled. |String|IP address of the host where the cache server is located. | This variable is used together with MS_ENABLE_CACHE=TRUE and MS_CACHE_PORT. |Optional| -|MS_CACHE_PORT|MindData|Specifies the port number of the host where the cache server is located when the cache function is enabled. |String|Port number of the host where the cache server is located. | This variable is used together with MS_ENABLE_CACHE=TRUE and MS_CACHE_HOST. |Optional| -|PROFILING_MODE|MindData|Determines whether to enable dataset profiling for performance analysis. This variable is used together with MindInsight where the time consumed in each phase can be displayed. |String|true: enables the profiling function.
false: disables the profiling function. | This variable is used together with MINDDATA_PROFILING_DIR. |Optional| -|MINDDATA_PROFILING_DIR|MindData|Specifies the system path for storing the dataset profiling result. |String| System path. A relative path is supported. |This variable is used together with PROFILING_MODE=true. |Optional| -|DATASET_ENABLE_NUMA|MindData|Determines whether to enable numa bind feature. Most of time this configuration can improve performance on distribute scenario. |String| True: Enables the numa bind feature. |This variable is used together with libnuma.so. |Optional| -|OPTIMIZE|MindData|Determines whether to optimize the pipeline tree for dataset during data processing. This variable can improve the data processing efficiency in the data processing operator fusion scenario. |String|true: enables pipeline tree optimization.
false: disables pipeline tree optimization. |None |Optional| -|ENABLE_MS_DEBUGGER|Debugger|Determines whether to enable Debugger during training. |Boolean|1: enables Debugger.
0: disables Debugger. |None |Optional| -|MS_DEBUGGER_PORT|Debugger|Specifies the port for connecting to the MindInsight Debugger Server. |Integer|Port number ranges from 1 to 65536. |None|Optional -|MS_DEBUGGER_PARTIAL_MEM|Debugger|Determines whether to enable partial memory overcommitment. (Memory overcommitment is disabled only for nodes selected on Debugger.)|Boolean|1: enables memory overcommitment for nodes selected on Debugger.
0: disables memory overcommitment for nodes selected on Debugger. |None|Optional| -|MS_BUILD_PROCESS_NUM|MindSpore|Specifies the number of parallel operator build processes during Ascend backend compilation.|Integer|The number of parallel operator build processes ranges from 1 to 24. |None|Optional| -|RANK_TABLE_FILE|MindSpore|Specifies the file to which a path points, including `DEVICE_IP`s corresponding to multiple Ascend AI Processor `DEVICE_ID`s. |String|File path, which can be a relative path or an absolute path.|This variable is used together with RANK_SIZE. |Mandatory (when the Ascend AI Processor is used)| -|RANK_SIZE|MindSpore|Specifies the number of Ascend AI Processors to be called during deep learning. |Integer|The number of Ascend AI Processors to be called ranges from 1 to 8. | This variable is used together with RANK_TABLE_FILE |Mandatory (when the Ascend AI Processor is used) | -|RANK_ID|MindSpore|Specifies the logical ID of the Ascend AI Processor called during deep learning.|Integer|The value ranges from 0 to 7. When multiple servers are running concurrently, `DEVICE_ID`s in different servers may be the same. RANK_ID can be used to avoid this problem. (RANK_ID = SERVER_ID * DEVICE_NUM + DEVICE_ID) |None|Optional| -|MS_SUBMODULE_LOG_v|MindSpore| For details about the function and usage, see [MS_SUBMODULE_LOG_v](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/custom_debugging_info.html#log-related-environment-variables-and-configurations)|Dict{String:Integer...}|LogLevel: 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR
SubModual: COMMON, MD, DEBUG, DEVICE, COMMON, IR...|None | Optional -|OPTION_PROTO_LIB_PATH|MindSpore|Specifies the RPOTO dependent library path. |String|File path, which can be a relative path or an absolute path.|None|Optional| -|MS_RDR_ENABLE|MindSpore|Determines whether to enable running data recorder (RDR). If a running exception occurs in MindSpore, the pre-recorded data in MindSpore is automatically exported to assist in locating the cause of the running exception.|Integer|1:enables RDR
0:disables RDR|This variable is used together with MS_RDR_PATH|Optional| -|MS_RDR_PATH|MindSpore|Specifies the system path for storing the data recorded by running data recorder (RDR).|String|Directory path, which should be an absolute path.|This variable is used together with MS_RDR_ENABLE=1|Optional| -|MS_OM_PATH|MindSpore|Specifies the save path for the file(analyze_fail.dat) which is dumped if a compiling graph error occurred.|String|File path, which can be a relative path or an absolute path.|None|Optional| -|GE_USE_STATIC_MEMORY|GraphEngine| When a network model has too many layers, the intermediate computing data of a feature map may exceed 25 GB, for example, on the BERT24 network. In the multi-device scenario, to ensure efficient memory collaboration, set this variable to 1, indicating that static memory allocation mode is used. For other networks, dynamic memory allocation mode is used by default.
In static memory allocation mode, the default allocation is 31 GB, which is determined by the sum of graph_memory_max_size and variable_memory_max_size. In dynamic memory allocation mode, the allocation is within the sum of graph_memory_max_size and variable_memory_max_size. |Integer|1: static memory allocation mode
0: dynamic memory allocation mode|None|Optional| -|DUMP_GE_GRAPH|GraphEngine|Outputs the graph description information of each phase in the entire process to a file. This environment variable controls contents of the dumped graph. |Integer|1: full dump
2: basic dump without data such as weight
3: simplified dump with only node relationships displayed|None|Optional| -|DUMP_GRAPH_LEVEL|GraphEngine|Outputs the graph description information of each phase in the entire process to a file. This environment variable controls the number of dumped graphs. |Integer|1: dumps all graphs.
2: dumps all graphs except subgraphs.
3: dumps the last generated graph. |None |Optional | diff --git a/docs/note/source_en/glossary.md b/docs/note/source_en/glossary.md deleted file mode 100644 index b04459044435854bcfa04ebb776daec0ac7c61de..0000000000000000000000000000000000000000 --- a/docs/note/source_en/glossary.md +++ /dev/null @@ -1,49 +0,0 @@ -# Glossary - -`Linux` `Windows` `Ascend` `GPU` `CPU` `Whole Process` `Beginner` `Intermediate` `Expert` - - - -| Acronym and Abbreviation | Description | -| ----- | ----- | -| ACL | Ascend Computer Language, for users to develop deep neural network applications, which provides the C++ API library including device management, context management, stream management, memory management, model loading and execution, operator loading and execution, media data processing, etc. | -| AIR | Ascend Intermediate Representation, such as ONNX, it is an open file format for machine learning. It is defined by Huawei and is better suited to Ascend AI processor. | -| Ascend | Name of Huawei Ascend series chips. | -| CCE | Cube-based Computing Engine, which is an operator development tool oriented to hardware architecture programming. | -| CCE-C | Cube-based Computing Engine C, which is C code developed by the CCE. | -| CheckPoint | MindSpore model training check point, which is used to save model parameters for inference or retraining. | -| CIFAR-10 | An open-source image dataset that contains 60000 32 x 32 color images of 10 categories, with 6000 images of each category. There are 50000 training images and 10000 test images. | -| CIFAR-100 | An open-source image dataset that contains 100 categories. Each category has 500 training images and 100 test images. | -| DaVinci | DaVinci architecture, Huawei-developed new chip architecture. | -| EulerOS | Euler operating system, which is developed by Huawei based on the standard Linux kernel. | -| FC Layer | Fully connected layer, which acts as a classifier in the entire convolutional neural network. | -| FE | Fusion Engine, which connects to GE and TBE operators and has the capabilities of loading and managing the operator information library and managing convergence rules. | -| Fine-tuning | A process to take a network model that has already been trained for a given task, and make it perform a second similar task. | -| FP16 | 16-bit floating point, which is a half-precision floating point arithmetic format, consuming less memory. | -| FP32 | 32-bit floating point, which is a single-precision floating point arithmetic format. | -| GE | Graph Engine, MindSpore computational graph execution engine, which is responsible for optimizing hardware (such as operator fusion and memory overcommitment) based on the front-end computational graph and starting tasks on the device side. | -| GHLO | Graph High Level Optimization. GHLO includes optimization irrelevant to hardware (such as dead code elimination), auto parallel, and auto differentiation. | -| GLLO | Graph Low Level Optimization. GLLO includes hardware-related optimization and in-depth optimization related to the combination of hardware and software, such as operator fusion and buffer fusion. | -| Graph Mode | MindSpore static graph mode. In this mode, the neural network model is compiled into an entire graph and then delivered for execution, featuring high performance. | -| HCCL | Huawei Collective Communication Library, which implements multi-device and multi-card communication based on the Da Vinci architecture chip. | -| ImageNet | Image database organized based on the WordNet hierarchy (currently nouns only). | -| LeNet | A classical convolutional neural network architecture proposed by Yann LeCun and others. | -| Loss | Difference between the predicted value and the actual value, which is a standard for determining the model quality of deep learning. | -| LSTM | Long short-term memory, an artificial recurrent neural network (RNN) architecture used for processing and predicting an important event with a long interval and delay in a time sequence. | -| Manifest | A data format file. Huawei ModelArt adopts this format. For details, see . | -| ME | Mind Expression, MindSpore frontend, which is used to compile tasks from user source code to computational graphs, control execution during training, maintain contexts (in non-sink mode), and dynamically generate graphs (in PyNative mode). | -| MindArmour | The security module of MindSpore, which improves the confidentiality, integrity and usability of the model through technical means such as differential privacy and adversarial attack and defense. MindArmour prevents attackers from maliciously modifying the model or cracking the internal components of the model to steal the parameters of the model. | -| MindData | MindSpore data framework, which provides data loading, enhancement, dataset management, and visualization. | -| MindInsight | MindSpore visualization component, which visualizes information such as scalars, images, computational graphs, and model hyperparameters. | -| MindIR | MindSpore IR, a functional IR based on graph representation, defines a scalable graph structure and operator IR representation, and stores the basic data structure of MindSpore. | -| MindRecord | It is a data format defined by MindSpore, it is a module for reading, writing, searching and converting data sets in MindSpore format. | -| MindSpore | Huawei-leaded open-source deep learning framework. | -| MindSpore Lite | A lightweight deep neural network inference engine that provides the inference function for models trained by MindSpore on the device side. | -| MNIST database | Modified National Handwriting of Images and Technology database, a large handwritten digit database, which is usually used to train various image processing systems. | -| ONNX | Open Neural Network Exchange, is an open format built to represent machine learning models.| -| PyNative Mode | MindSpore dynamic graph mode. In this mode, operators in the neural network are delivered and executed one by one, facilitating the compilation and debugging of the neural network model. | -| ResNet-50 | Residual Neural Network 50, a residual neural network proposed by four Chinese people, including Kaiming He from Microsoft Research Institute. | -| Schema | Data set structure definition file, which defines the fields contained in a dataset and the field types. | -| Summary | An operator that monitors the values of tensors on the network. It is a peripheral operation in the figure and does not affect the data flow. | -| TBE | Tensor Boost Engine, it is a self-developed NPU operator development tool developed by Huawei, which is expanded on the basis of the TVM (Tensor Virtual Machine) framework. It provides a set of Python API to implement development activities and develop custom operators. | -| TFRecord | Data format defined by TensorFlow. | diff --git a/docs/note/source_en/help_seeking_path.md b/docs/note/source_en/help_seeking_path.md deleted file mode 100644 index 3adbc95a376c2a52079e238f3440c7e1f00f2694..0000000000000000000000000000000000000000 --- a/docs/note/source_en/help_seeking_path.md +++ /dev/null @@ -1,29 +0,0 @@ -# Seeking Help and Support - -`Linux` `Windows` `Ascend` `GPU` `CPU` `Whole Process` `Beginner` `Intermediate` `Expert` - - - -This document describes how to seek help and support when you encounter problems in using MindSpore. The following flowchart shows the overall help-seeking process which starts from users encountering a problem in using MindSpore and ends with they finding a proper solution. Help-seeking methods are introduced based on the flowchart. - -![solution](./images/help_seeking_path.png) - -- Website search - - - Go to the [official search page](https://www.mindspore.cn/search/en). - - When encountering a problem, search on the official website first, which is simple and efficient. - - Enter a keyword in the search box and click the search icon. The related content is displayed. - - Resolve the problem based on the search result. - -- User group consultation - - - If you cannot solve the problem using the website search method and want a quick consultation. Get support by joining the [Slack group](https://mindspore.slack.com/join/shared_invite/zt-dgk65rli-3ex4xvS4wHX7UDmsQmfu8w#/ ) and start a conversation with our members. - - Resolve the problem by asking experts or communicating with other users. - -- Forum Help-Seeking - - - If you want a detailed solution, start a help post on the [Ascend forum](https://forum.huawei.com/enterprise/en/forum-100504.html). - - After the post is sent, a forum moderator collects the question and contacts technical experts to answer the question. The question will be resolved within three working days. - - Resolve the problem by referring to solutions provided by technical experts. - - If the expert test result shows that the MindSpore function needs to be improved, you are advised to submit an issue in the [MindSpore repository](https://gitee.com/mindspore). Issues will be resolved in later versions. diff --git a/docs/note/source_en/images/after_transfer.png b/docs/note/source_en/images/after_transfer.png deleted file mode 100644 index cb066922a36214a940741f4c2bca96ec35ec7d19..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/images/after_transfer.png and /dev/null differ diff --git a/docs/note/source_en/images/architecture.png b/docs/note/source_en/images/architecture.png deleted file mode 100644 index b824bf5e7231393deecbfe9bc4629e84f811e0fe..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/images/architecture.png and /dev/null differ diff --git a/docs/note/source_en/images/before_transfer.png b/docs/note/source_en/images/before_transfer.png deleted file mode 100644 index ba2fe024d6382a1bad7b0f6cc4f2623e4815c2cf..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/images/before_transfer.png and /dev/null differ diff --git a/docs/note/source_en/images/help_seek_path_en.pptx b/docs/note/source_en/images/help_seek_path_en.pptx deleted file mode 100644 index 307c2581c94f37253f83123db4e92090ba3eb580..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/images/help_seek_path_en.pptx and /dev/null differ diff --git a/docs/note/source_en/images/help_seeking_path.png b/docs/note/source_en/images/help_seeking_path.png deleted file mode 100644 index 7eee9611806afe0d4d18faa3b1a258d0991e2f53..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/images/help_seeking_path.png and /dev/null differ diff --git a/docs/note/source_en/images/image_classification_result.png b/docs/note/source_en/images/image_classification_result.png deleted file mode 100644 index a7cc49f582440e31b6b5b14dbba5131bfed2a4b4..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/images/image_classification_result.png and /dev/null differ diff --git a/docs/note/source_en/images/object_detection.png b/docs/note/source_en/images/object_detection.png deleted file mode 100644 index ad5425c86393a9367701166796df42c9e4702988..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/images/object_detection.png and /dev/null differ diff --git a/docs/note/source_en/images/posenet_detection.png b/docs/note/source_en/images/posenet_detection.png deleted file mode 100644 index db253e597caa3c8c825b466ef2bc0ce7893d1411..0000000000000000000000000000000000000000 Binary files a/docs/note/source_en/images/posenet_detection.png and /dev/null differ diff --git a/docs/note/source_en/index.rst b/docs/note/source_en/index.rst deleted file mode 100644 index f159022d41f50880b91c2658c8f4fe25802edcd9..0000000000000000000000000000000000000000 --- a/docs/note/source_en/index.rst +++ /dev/null @@ -1,41 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 10:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore Design And Specification -================================== - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Design - - design/overall - design/mindspore - design/mindinsight - design/mindarmour - - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Specification Note - - benchmark - network_list - operator_list - syntax_list - env_var_list - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Others - - glossary - roadmap - help_seeking_path - community - - \ No newline at end of file diff --git a/docs/note/source_en/index_support.md b/docs/note/source_en/index_support.md deleted file mode 100644 index 05ca22be2e2f832245e6a373ec01a93f0ae4b1c5..0000000000000000000000000000000000000000 --- a/docs/note/source_en/index_support.md +++ /dev/null @@ -1,472 +0,0 @@ -# Tensor Index Support - -`Linux` `Ascend` `GPU` `Model Development` `Beginner` `Intermediate` `Expert` - - - -- [Tensor Index Support](#tensor-index-support) - - [Index values](#index-values) - - [Index value assignment](#index-value-assignment) - - [Index value augmented-assignment](#index-value-augmented-assignment) - - - - - -Single-level and multi-level Tensor indexing is supported on both PyNative and Graph mode. - -## Index values - -The index value can be `int`, `bool`, `None`, `ellipsis`, `slice`, `Tensor`, `List`, or `Tuple`. - -- `int` index value - - Single-level and multi-level `int` index values are supported. The single-level `int` index value is `tensor_x[int_index]`, and the multi-level `int` index value is `tensor_x[int_index0][int_index1]...`. - - The `int` index value is obtained on dimension 0 and is less than the length of dimension 0. After the position data corresponding to dimension 0 is obtained, dimension 0 is eliminated. - - For example, if a single-level `int` index value is obtained for a tensor whose `shape` is `(3, 4, 5)`, the obtained `shape` is `(4, 5)`. - - The multi-level index value can be understood as obtaining the current-level `int` index value based on the previous-level index value. - - For example: - - ```python - tensor_x = Tensor(np.arange(2 * 3 * 2).reshape((2, 3, 2))) - data_single = tensor_x[0] - data_multi = tensor_x[0][1] - ``` - - The result is as follows: - - ```text - data_single: Tensor(shape=[3, 2], dtype=Int64, value=[[0, 1], [2, 3], [4, 5]]) - data_multi: Tensor(shape=[2], dtype=Int64, value=[2, 3]) - ``` - -- `bool` index value - - Single-level and multi-level `bool` index values are supported. The single-level `bool` index value is `tensor_x[True]`, and the multi-level `True` index value is `tensor_x[True][True]...`. - - The `True` index value operation is obtained on dimension 0. After all data is obtained, a dimension is extended on the `axis=0` axis. The length of the dimension is 1. `False` will introduce `0` in the shape, thus only `Ture` is supported now. - - For example, if a single-level `True` index value is obtained from a tensor whose `shape` is `(3, 4, 5)`, the obtained `shape` is `(1, 3, 4, 5)`. - - The multi-level index value can be understood as obtaining the current-level `bool` index value based on the previous-level index value. - - For example: - - ```python - tensor_x = Tensor(np.arange(2 * 3 ).reshape((2, 3))) - data_single = tensor_x[True] - data_multi = tensor_x[True][True] - ``` - - The result is as follows: - - ```text - data_single: Tensor(shape=[1, 2, 3], dtype=Int64, value=[[[0, 1, 2], [3, 4, 5]]]) - data_multi: Tensor(shape=[1, 0, 2, 3], dtype=Int64, value=[[[[0, 1, 2], [3 , 4, 5]]]]) - ``` - -- `None` index value - - The `None` index value is the same as the `True` index value. For details, see the `True` index value. - -- `ellipsis` index value - - Single-level and multi-level `ellipsis` index values are supported. The single-level `ellipsis` index value is `tensor_x[...]`, and the multi-level `ellipsis` index value is `tensor_x[...][...]...`. - - The `ellipsis` index value is obtained on all dimensions to get the original data without any change. Generally, it is used as a component of the `Tuple` index. The `Tuple` index is described as follows. - - For example, if the `ellipsis` index value is obtained for a tensor whose `shape` is `(3, 4, 5)`, the obtained `shape` is still `(3, 4, 5)`. - - For example: - - ```python - tensor_x = Tensor(np.arange(2 * 3 ).reshape((2, 3))) - data_single = tensor_x[...] - data_multi = tensor_x[...][...] - ``` - - The result is as follows: - - ```text - data_single: Tensor(shape=[2, 3], dtype=Int64, value=[[0, 1, 2], [3, 4, 5]]) - data_multi: Tensor(shape=[2, 3], dtype=Int64, value=[[0, 1, 2], [3, 4, 5]]) - ``` - -- `slice` index value - - Single-level and multi-level `slice` index values are supported. The single-level `slice` index value is `tensor_x[slice_index]`, and the multi-level `slice` index value is `tensor_x[slice_index0][slice_index1]...`. - - The `slice` index value is obtained on dimension 0. The element of the sliced position on dimension 0 is obtained. The `slice` does not reduce the dimension even if the length is 1, which is different from the `int` index value. - - For example, `tensor_x[0:1:1] != tensor_x[0]`, because `shape_former = (1,) + shape_latter`. - - The multi-level index value can be understood as obtaining the current-level `slice` index value based on the previous-level index value. - - `slice` consists of `start`, `stop`, and `step`. The default value of `start` is 0, the default value of `stop` is the length of the dimension, and the default value of `step` is 1. - - Example: `tensor_x[:] == tensor_x[0:length:1]`. - - For example: - - ```python - tensor_x = Tensor(np.arange(4 * 2 * 2).reshape((4, 2, 2))) - data_single = tensor_x[1:4:2] - data_multi = tensor_x[1:4:2][1:] - ``` - - The result is as follows: - - ```text - data_single: Tensor(shape=[2, 2, 2], dtype=Int64, value=[[[4, 5], [6, 7]], [[12, 13], [14, 15]]]) - data_multi: Tensor(shape=[1, 2, 2], dtype=Int64, value=[[[12, 13], [14, 15]]]) - ``` - -- `Tensor` index value - - Single-level and multi-level `Tensor` index values are supported. The single-level `Tensor` index value is `tensor_x[tensor_index]`, and the multi-level `Tensor` index value is `tensor_x[tensor_index0][tensor_index1]...`. - - The `Tensor` index value is obtained on dimension 0, and the element in the corresponding position of dimension 0 is obtained. - - The data type of the `Tensor` index must be one of int8, int16, int32, and int64, the element cannot be a negative number, and the value must be less than the length of dimension 0. - - The `Tensor` index value is obtained by `data_shape = tensor_inde4x.shape + tensor_x.shape[1:]`. - - For example, if the index value is obtained for a tensor whose shape is `(6, 4, 5)` by using a tensor whose shape is `(2, 3)`, the obtained shape is `(2, 3, 4, 5)`. - - The multi-level index value can be understood as obtaining the current-level `Tensor` index value based on the previous-level index value. - - For example: - - ```python - tensor_x = Tensor(np.arange(4 * 2 * 3).reshape((4, 2, 3))) - tensor_index0 = Tensor(np.array([[1, 2], [0, 3]]), mstype.int32) - tensor_index1 = Tensor(np.array([[0, 0]]), mstype.int32) - data_single = tensor_x[tensor_index0] - data_multi = tensor_x[tensor_index0][tensor_index1] - ``` - - The result is as follows: - - ```text - data_single: Tensor(shape=[2, 2, 2, 3], dtype=Int64, value=[[[[4, 5], [6, 7]], [[8, 9], [10, 11]]], [[[0, 1], [2, 3]], [[12, 13], [14, 15]]]]) - data_multi: Tensor(shape=[1, 2, 2, 2, 3], dtype=Int64, value=[[[[[4, 5], [6, 7]], [[8, 9], [10, 11]]], [[[4, 5], [6, 7]], [[8, 9], [10, 11]]]]]) - ``` - -- `List` index value - - Single-level and multi-level `Tensor` index values are supported. The single-level `List` index value is `tensor_x[list_index]`, and the multi-level `List` index value is `tensor_x[list_index0][list_index1]...`. - - The `List` index value is obtained on dimension 0, and the element in the corresponding position of dimension 0 is obtained. - - The data type of the `List` index must be all bool, all int or mixed of them. The `List` elements of int type must be in the range of [`-dimension_shape`, `dimension_shape-1`] and the count of `List` elements with bool type must be the same as the `dimension_shape` of dimension 0 and will perform as to filter the corresponding element of the Tenson data. If the above two types appear simultaneously, the `List` elements with the bool type will be converted to `1/0` for `True/False`. - - The `Tensor` index value is obtained by `data_shape = tensor_inde4x.shape + tensor_x.shape[1:]`. - - For example, if the index value is obtained for a tensor whose shape is `(6, 4, 5)` by using a tensor whose shape is `(2, 3)`, the obtained shape is `(2, 3, 4, 5)`. - - The multi-level index value can be understood as obtaining the current-level `Tensor` index value based on the previous-level index value. - - For example: - - ```python - tensor_x = Tensor(np.arange(4 * 2 * 3).reshape((4, 2, 3))) - tensor_index0 = Tensor(np.array([[1, 2], [0, 3]]), mstype.int32) - tensor_index1 = Tensor(np.array([[0, 0]]), mstype.int32) - data_single = tensor_x[tensor_index0] - data_multi = tensor_x[tensor_index0][tensor_index1] - ``` - - The result is as follows: - - ```text - data_single: Tensor(shape=[2, 2, 2, 3], dtype=Int64, value=[[[[4, 5], [6, 7]], [[8, 9], [10, 11]]], [[[0, 1], [2, 3]], [[12, 13], [14, 15]]]]) - data_multi: Tensor(shape=[1, 2, 2, 2, 3], dtype=Int64, value=[[[[[4, 5], [6, 7]], [[8, 9], [10, 11]]], [[[4, 5], [6, 7]], [[8, 9], [10, 11]]]]]) - ``` - -- `Tuple` index value - - The data type of the `Tuple` index can be `int`, `bool`, `None`, `slice`, `ellipsis`, `Tensor`, `List`, or `Tuple`. Single-level and multi-level `Tuple` index values are supported. For the single-level `Tuple` index, the value is `tensor_x[tuple_index]`. For the multi-level `Tuple` index, the value is `tensor_x[tuple_index0][tuple_index1]...`. The regulations of elements `List` and `Tuple` are the same as that of single index `List` index. The regulations of others are the same to the respondding single element type. - - Elements in the `Tuple` index can be sort out by `Basic Index` or `Advanced Index`. `slice`, `ellipsis` and `None` are `Basic Index` and `int`, `bool`, `Tensor`, `List`, `Tuple` are `Advanced Index`. In the Getitem Progress, all the elements of the `Advanced Index` type will be broadcast to the same shape, and the final shape will be inserted to the first `Advanced Index` element's position if they are continuous, else they will be inserted to the `0` position. - - In the index, the `None` elements will expand the corresponding dimensions, `bool` elements will expand the corresponding dimension and be broadcast with the other `Advanced Index` element. The others elements except the type of `ellipsis`, `bool`, and `None`, will correspond to each position dimension. That is, the 0th element in `Tuple` operates the 0th dimension, and the 1st element operates the 1st dimension. The index rule of each element is the same as the index value rule of the element type. - - The `Tuple` index contains a maximum of one `ellipsis`. The first half of the `ellipsis` index elements correspond to the `Tensor` dimensions starting from the dimension 0, and the second half of the index elements correspond to the `Tensor` dimensions starting from the last dimension. If other dimensions are not specified, all dimensions are obtained. - - The data type of `Tensor` contained in the element must be one of (int8, int16, int32, int64). In addition, the value of `Tensor` element must be non-negative and less than the length of the operation dimension. - - For example, `tensor_x[0:3, 1, tensor_index] == tensor_x[(0:3, 1, tensor_index)]`, because `0:3, 1, tensor_index` is a `Tuple`. - - The multi-level index value can be understood as obtaining the current-level `Tuple` index value based on the previous-level index value. - - For example: - - ```python - tensor_x = Tensor(np.arange(2 * 3 * 4).reshape((2, 3, 4))) - tensor_index = Tensor(np.array([[1, 2, 1], [0, 3, 2]]), mstype.int32) - data = tensor_x[1, 0:1, tensor_index] - ``` - - The result is as follows: - - ```text - data: Tensor(shape=[2, 3, 1], dtype=Int64, value=[[[13], [14], [13]], [[12], [15], [14]]]) - ``` - -## Index value assignment - -For a case like: `tensor_x[index] = value`, the type of the index can be `int`, `bool`, `ellipsis`, `slice`, `None`, `Tensor`, `List`, or`Tuple`. - -The type of the assigned `value` can be `Number`, `Tuple`, `List`, or `Tensor`, the `value` will be converted to `Tensor` and casted to the same dtype as the original tensor (`tensor_x`) before being assigned. - -When `value` is `Number`, all position elements obtained from the `tensor_x[index]` will be updated to `Number`. - -When `value` is a tensor whose type is `Tuple`, `List` or `Tensor` and only contains `Number`, the `value.shape` needs to be able to be broadcasted to `tensor_x[index].shape`. After the `value`' is broadcasted and casted to `Tensor`, the elements with the position `tensor_x[index]` will be updated with the value `broadcast(Tensor(value))`. - -When `value` is `Tuple/List`, and contains mixtures of `Number`, `Tuple`, `List` and `Tensor`, only one-dimensional `Tuple` and `List` are currently supported. - -When `value` is `Tuple` or `List`, and contains `Tensor`, all the `non-Tensor` elements in `value` will be converted to `Tensor` first, and then these `Tensor` values are packed on the `axis=0` axis and become new `Tensor`. In this case, the value is assigned according to the rule of assigning the `value` to `Tensor`. All `Tensors` must have the same dtype. - -Index value assignment can be understood as assigning values to indexed position elements based on certain rules. All index value assignment does not change the original `shape` of `Tensor`. - -> If there are multiple index elements in indices that correspond to the same position, the value of that position in the output will be nondeterministic. For more details, please see:[TensorScatterUpdate](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.TensorScatterUpdate.html) -> -> Only single-bracket indexing is supported (`tensor_x[index] = value`), multi-bracket(`tensor_x[index1][index2]... = value`) is not supported. - -- `int` index value assignment - - Single-level `int` index value assignments are supported. The single-level `int` index value assignment is `tensor_x[int_index] = u`. - - For example: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_y = np.arange(2 *3).reshape((2, 3)).astype(np.float32) - tensor_z = np.arange(2* 3).reshape((2, 3)).astype(np.float32) - tensor_x[1] = 88.0 - tensor_y[1]= np.array([66, 88, 99]).astype(np.float32) - tensor_z[1] = (66, np.array(88), 99) - ``` - - The result is as follows: - - ```text - tensor_x: Tensor(shape=[2, 3], dtype=Float32, value=[[0.0, 1.0, 2.0], [88.0, 88.0, 88.0]]) - tensor_y: Tensor(shape=[2, 3], dtype=Float32, value=[[0.0, 1.0, 2.0], [66.0, 88.0, 99.0]]) - tensor_z: Tensor(shape=[2, 3], dtype=Float32, value=[[0.0, 1.0, 2.0], [66.0, 88.0, 99.0]]) - ``` - -- `bool` index value assignment - - Single-level `bool` index value assignments are supported. The single-level `int` index value assignment is `tensor_x[bool_index] = u`. - - For example: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_y = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_z = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_x[True] = 88.0 - tensor_y[True]= np.array([66, 88, 99]).astype(np.float32) - tensor_z[True] = (66, 88, 99) - ``` - - The result is as follows: - - ```text - tensor_x: Tensor(shape=[2, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [88.0, 88.0, 88.0]]) - tensor_y: Tensor(shape=[2, 3], dtype=Float32, value=[[66.0, 88.0, 99.0], [66.0, 88.0, 99.0]]) - tensor_z: Tensor(shape=[2, 3], dtype=Float32, value=[[66.0, 88.0, 99.0], [66.0, 88.0, 99.0]]) - ``` - -- `ellipsis` index value assignment - - Single-level `ellipsis` index value assignments are supported. The single-level `ellipsis` index value assignment is `tensor_x[...] = u`. - - For example: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_y = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_z = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_x[...] = 88 - tensor_y[...]= np.array([[22, 44, 55], [22, 44, 55]]) - tensor_z[...] = ([11, 22, 33], [44, 55, 66]) - ``` - - The result is as follows: - - ```text - tensor_x: Tensor(shape=[2, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [88.0, 88.0, 88.0]]) - tensor_y: Tensor(shape=[2, 3], dtype=Float32, value=[[22.0, 44.0, 55.0], [22.0, 44.0, 55.0]]) - tensor_z: Tensor(shape=[2, 3], dtype=Float32, value=[[11.0, 22.0, 33.0], [44.0, 55.0, 66.0]]) - ``` - -- `slice` index value assignment - - Single-level `slice` index value assignments are supported. The single-level `slice` index value assignment is `tensor_x[slice_index] = u`. - - For example: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_y = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_z = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_k = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_x[0:1] = 88.0 - tensor_y[0:2] = 88.0 - tensor_z[0:2] = np.array([[11, 12, 13], [11, 12, 13]]).astype(np.float32) - tensor_k[0:2] = ([11, 12, 13], (14, 15, 16)) - ``` - - The result is as follows: - - ```text - tensor_x: Tensor(shape=[3, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]]) - tensor_y: Tensor(shape=[3, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [88.0, 88.0, 88.0], [6.0, 7.0, 8.0]]) - tensor_z: Tensor(shape=[3, 3], dtype=Float32, value=[[11.0, 12.0, 13.0], [11.0, 12.0, 13.0], [6.0, 7.0, 8.0]]) - tensor_k: Tensor(shape=[3, 3], dtype=Float32, value=[[11.0, 12.0, 13.0], [14.0, 15.0, 16.0], [6.0, 7.0, 8.0]]) - ``` - -- `None` index value assignment - - Single-level `None` index value assignments are supported. The single-level `int` index value assignment is `tensor_x[none_index] = u`. - - For example: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_y = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_z = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_x[None] = 88.0 - tensor_y[None]= np.array([66, 88, 99]).astype(np.float32) - tensor_z[None] = (66, 88, 99) - ``` - - The result is as follows: - - ```text - tensor_x: Tensor(shape=[2, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [88.0, 88.0, 88.0]]) - tensor_y: Tensor(shape=[2, 3], dtype=Float32, value=[[66.0, 88.0, 99.0], [66.0, 88.0, 99.0]]) - tensor_z: Tensor(shape=[2, 3], dtype=Float32, value=[[66.0, 88.0, 99.0], [66.0, 88.0, 99.0]]) - ``` - -- `Tensor` index value assignment - - Single-level `Tensor` index value assignments are supported. The single-level `Tensor` index value assignment is `tensor_x[tensor_index] = u`. - - Boolean `Tensor` index is not currently supported, only `mstype.int*` type is supported. - - For example: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_y = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_z = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_index = np.array([[2, 0, 2], [0, 2, 0], [0, 2, 0]], np.int32) - tensor_x[tensor_index] = 88.0 - tensor_y[tensor_index] = np.array([11.0, 12.0, 13.0]).astype(np.float32) - tensor_z[tensor_index] = [11, 12, 13] - ``` - - The result is as follows: - - ```text - tensor_x: Tensor(shape=[3, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [3.0, 4.0, 5.0], [88.0, 88.0, 88.0]]) - tensor_y: Tensor(shape=[3, 3], dtype=Float32, value=[[11.0, 12.0, 13.0], [3.0, 4.0, 5.0], [11.0, 12.0, 13.0]]) - tensor_z: Tensor(shape=[3, 3], dtype=Float32, value=[[11.0, 12.0, 13.0], [3.0, 4.0, 5.0], [11.0, 12.0, 13.0]]) - ``` - -- `List` index value assignment - - single-level `List` index value assignments are supported. The single-level `List` index value assignment is `tensor_x[list_index] = u`. - - The `List` index value assignment is the same as that of the `List` index value. - - For example: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_y = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_index = np.array([[0, 1], [1, 0]]).astype(np.int32) - tensor_x[[0,1]] = 88.0 - tensor_y[[True, False, False]] = np.array([11, 12, 13]).astype(np.float32) - ``` - - The result is as follows: - - ```text - tensor_x: Tensor(shape=[3, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [88.0, 88.0, 88.0], [6.0, 7.0, 8.0]]) - tensor_y: Tensor(shape=[3, 3], dtype=Float32, value=[[11.0, 12.0, 13.0], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]]) - ``` - -- `Tuple` index value assignment - - single-level `Tuple` index value assignments are supported. The single-level `Tuple` index value assignment is `tensor_x[tuple_index] = u`. - - The `Tuple` index value assignment is the same as that of the `Tuple` index value, but `None` type is not supported now. - - For example: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_y = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_z = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_index = np.array([0, 1]).astype(np.int32) - tensor_x[1, 1:3] = 88.0 - tensor_y[1:3, tensor_index] = 88.0 - tensor_z[1:3, tensor_index] = np.array([11, 12]).astype(np.float32) - ``` - - The result is as follows: - - ```text - tensor_x: Tensor(shape=[3, 3], dtype=Float32, value=[[0.0, 1.0, 2.0], [3.0, 88.0, 88.0], [6.0, 7.0, 8.0]]) - tensor_y: Tensor(shape=[3, 3], dtype=Float32, value=[[0.0, 1.0, 2.0], [88.0, 88.0, 5.0], [88.0, 88.0, 8.0]]) - tensor_z: Tensor(shape=[3, 3], dtype=Float32, value=[[0.0, 1.0, 2.0], [11.0, 12.0, 5.0], [11.0, 12.0, 8.0]]) - ``` - -## Index value augmented-assignment - -Index value augmented-assignment supports seven augmented_assignment operations: `+=`, `-=`, `*=`, `/=`, `%=`, `**=`, and `//=`. The rules and constraints of `index` and `value` are the same as index assignment. The index value supports eight types: `int`, `bool`, `ellipsis`, `slice`, `None`, `tensor`, `list` and `tuple`. The assignment value supports four types: `Number`, `Tensor`, `Tuple` and `List`. - -Index value augmented-assignment can be regarded as taking the value of the position elements to be indexed according to certain rules, and then performing operator operation with `value`. Finally, assign the operation result to the origin `Tensor`. All index augmented-assignments will not change the `shape` of the original `Tensor`. - -> If there are multiple index elements in indices that correspond to the same position, the value of that position in the output will be nondeterministic. For more details, please see:[TensorScatterUpdate](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.TensorScatterUpdate.html) -> -> Currently indices that contain `True`, `False` and `None` are not supported. - -- Rules and constraints: - - Compared with index assignment, the process of value and operation is increased. The constraint rules of `index` are the same as `index` in Index Value, and support `Int`, `Bool`, `Tensor`, `Slice`, `Ellipse`, `None`, `List` and `Tuple`. The values of `Int` contained in the above types of data should be in `[-dim_size, dim_size-1]` within the closed range. - - The constraint rules of `value` in the operation process are the same as those of `value` in index assignment. The type of `value` needs to be one of (`Number`, `Tensor`, `List`, `Tuple`). And if `value`'s type is not `number`, `value.shape` should be able to broadcast to `tensor_x[index].shape`. - - For example: - - ```python - tensor_x = Tensor(np.arange(3 * 4).reshape(3, 4).astype(np.float32)) - tensor_y = Tensor(np.arange(3 * 4).reshape(3, 4).astype(np.float32)) - tensor_x[[0, 1], 1:3] += 2 - tensor_y[[1], ...] -= [4, 3, 2, 1] - ``` - - The result is as follows: - - ```text - tensor_x: Tensor(shape=[3, 4], dtype=Float32, value=[[0.0, 3.0, 4.0, 3.0], [4.0, 7.0, 8.0, 7.0], [8.0, 9.0, 10.0, 11.0]]) - tensor_y: Tensor(shape=[3, 4], dtype=Float32, value=[[0.0, 1.0, 2.0, 3.0], [0.0, 2.0, 4.0, 6.0], [8.0, 9.0, 10.0, 11.0]]) - ``` - diff --git a/docs/note/source_en/network_list.rst b/docs/note/source_en/network_list.rst deleted file mode 100644 index f149a7e9bcd148abd3f6b933fa1597da008bab20..0000000000000000000000000000000000000000 --- a/docs/note/source_en/network_list.rst +++ /dev/null @@ -1,7 +0,0 @@ -Network List -============== - -.. toctree:: - :maxdepth: 1 - - network_list_ms \ No newline at end of file diff --git a/docs/note/source_en/network_list_ms.md b/docs/note/source_en/network_list_ms.md deleted file mode 100644 index f39ff4c5c1c16afc193f637389ab87b669af9357..0000000000000000000000000000000000000000 --- a/docs/note/source_en/network_list_ms.md +++ /dev/null @@ -1,110 +0,0 @@ -# MindSpore Network List - -`Linux` `Ascend` `GPU` `CPU` `Model Development` `Intermediate` `Expert` - - - -- [MindSpore Network List](#mindspore-network-list) - - [Model Zoo](#model-zoo) - - - - - -## Model Zoo - -### Official - -| Domain | Sub Domain | Network | Ascend (Graph) | Ascend (PyNative) | GPU (Graph) | GPU (PyNative) | CPU (Graph) | CPU (PyNative)| -|:------ |:------| :----------- |:------: |:------: |:------: |:------: |:-----: |:-----:| -|Computer Vision (CV) | Image Classification | [AlexNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/alexnet) | ✅ | ✅ | ✅ | ✅ | | | -| Computer Vision (CV) | Image Classification | [CNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/cnn_direction_model) | ✅ | | | | | | -| Computer Vision (CV) | Image Classification | [DenseNet100](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/densenet) | | | | | ✅ | ✅ | -| Computer Vision (CV) | Image Classification | [DenseNet121](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/densenet) | ✅ | ✅ | | | | | -| Computer Vision (CV) | Image Classification | [DPN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/dpn) | ✅ | | | | | | -| Computer Vision (CV) | Image Classification | [EfficientNet-B0](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/efficientnet) | | | ✅ | ✅ | | | -| Computer Vision (CV) | Image Classification | [GoogLeNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/googlenet) | ✅ | ✅ | ✅ | ✅ | | | -| Computer Vision (CV) | Image Classification | [InceptionV3](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/inceptionv3) | ✅ | | | | | | -| Computer Vision (CV) | Image Classification | [InceptionV4](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/inceptionv4) | ✅ | | | | | | -| Computer Vision (CV) | Image Classification | [LeNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/lenet) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| Computer Vision (CV) | Image Classification | [LeNet (Quantization)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/lenet_quant) | ✅ | | ✅ | | | | -| Computer Vision (CV) | Image Classification | [MobileNetV1](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/mobilenetv1) | ✅ | | | | | | -| Computer Vision (CV) | Image Classification | [MobileNetV2](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/mobilenetv2) | ✅ | ✅ | ✅ | ✅ | ✅ | | -| Computer Vision (CV) | Image Classification | [MobileNetV2 (Quantization)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/mobilenetv2_quant) | ✅ | | ✅ | | | | -| Computer Vision (CV) | Image Classification | [MobileNetV3](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/mobilenetv3) | | | ✅ | ✅ | | | -| Computer Vision (CV) | Image Classification | [NASNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/nasnet) | | | ✅ | ✅ | | | -| Computer Vision (CV) | Image Classification | [ResNet-18](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/resnet) | ✅ | | | | | | -| Computer Vision (CV) | Image Classification | [ResNet-50](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/resnet) | ✅ | ✅ | ✅ | ✅ | ✅ | | -| Computer Vision (CV) | Image Classification | [ResNet-50 (Quantization)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/resnet50_quant) | ✅ | | | | | | -|Computer Vision (CV) | Image Classification | [ResNet-101](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/resnet) | ✅ | ✅ | ✅ | ✅ | | | -|Computer Vision (CV) | Image Classification | [ResNeXt50](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/resnext50) | ✅ | | ✅ | ✅ | | | -|Computer Vision (CV) | Image Classification | [SE-ResNet50](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/resnet) | ✅ | ✅ | | | | | -| Computer Vision (CV) | Image Classification | [ShuffleNetV1](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/shufflenetv1) | ✅ | | | | | | -| Computer Vision (CV) | Image Classification | [ShuffleNetV2](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/shufflenetv2) | | | ✅ | ✅ | | | -| Computer Vision (CV) | Image Classification | [SqueezeNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/squeezenet) | ✅ | | | | | | -| Computer Vision (CV) | Image Classification | [Tiny-DarkNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/tinydarknet) | ✅ | | | | | | -| Computer Vision (CV) | Image Classification | [VGG16](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/vgg16) | ✅ | ✅ | ✅ | ✅ | | | -| Computer Vision (CV) | Image Classification | [Xception](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/xception) | ✅ | | | | | | -| Computer Vision (CV) | Object Detection | [CenterFace](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/centerface) | ✅ | | | | | | -| Computer Vision (CV) | Object Detection | [CTPN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/ctpn) | ✅ | | | | | | -| Computer Vision (CV) | Object Detection | [Faster R-CNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/faster_rcnn) | ✅ | | ✅ | | | | -| Computer Vision (CV) | Object Detection | [Mask R-CNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/maskrcnn) | ✅ | ✅ | | | | | -| Computer Vision (CV) | Object Detection | [Mask R-CNN (MobileNetV1)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/maskrcnn_mobilenetv1) | ✅ | | | | | | -| Computer Vision (CV) | Object Detection | [RetinaFace-ResNet50](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/retinaface_resnet50) | | | ✅ | ✅ | | | -| Computer Vision (CV) | Object Detection | [SSD](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/ssd) | ✅ | | ✅ | ✅ | ✅ | | -| Computer Vision (CV) | Object Detection | [SSD-MobileNetV1-FPN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/ssd) | ✅ | | | | | | -| Computer Vision (CV) | Object Detection | [SSD-Resnet50-FPN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/ssd) | ✅ | | | | | | -| Computer Vision (CV) | Object Detection | [SSD-VGG16](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/ssd) | ✅ | | | | | | -| Computer Vision (CV) | Object Detection | [WarpCTC](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/warpctc) | ✅ | | ✅ | | | | -| Computer Vision (CV) | Object Detection | [YOLOv3-ResNet18](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/yolov3_resnet18) | ✅ | | | | | | -| Computer Vision (CV) | Object Detection | [YOLOv3-DarkNet53](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/yolov3_darknet53) | ✅ | ✅ | ✅ | ✅ | | | -| Computer Vision (CV) | Object Detection | [YOLOv3-DarkNet53 (Quantization)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/yolov3_darknet53_quant) | ✅ | | | | | | -| Computer Vision (CV) | Object Detection | [YOLOv4](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/yolov4) | ✅ | | | | | | -| Computer Vision (CV) | Text Detection | [DeepText](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/deeptext) | ✅ | | | | | | -| Computer Vision (CV) | Text Detection | [PSENet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/psenet) | ✅ | ✅ | | | | | -| Computer Vision (CV) | Text Recognition | [CNN+CTC](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/cnnctc) | ✅ | ✅ | | | | | -| Computer Vision (CV) | Semantic Segmentation | [DeepLabV3](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/deeplabv3) | ✅ | | | | ✅ | | -| Computer Vision (CV) | Semantic Segmentation | [U-Net2D (Medical)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/unet) | ✅ | | | | | | -| Computer Vision (CV) | Semantic Segmentation | [U-Net3D (Medical)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/unet3d) | ✅ | | | | | | -| Computer Vision (CV) | Semantic Segmentation | [U-Net++](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/unet) | ✅ | | | | | | -| Computer Vision (CV) | Keypoint Detection | [OpenPose](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/openpose) | ✅ | | | | | | -| Computer Vision (CV) | Keypoint Detection | [SimplePoseNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/simple_pose) | ✅ | | | | | | -| Computer Vision (CV) | Optical Character Recognition | [CRNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/crnn) | ✅ | | | | | | -| Natural Language Processing (NLP) | Natural Language Understanding | [BERT](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/bert) | ✅ | ✅ | ✅ | ✅ | | | -| Natural Language Processing (NLP) | Natural Language Understanding | [FastText](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/fasttext) | ✅ | | | | | | -| Natural Language Processing (NLP) | Natural Language Understanding | [GNMT v2](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/gnmt_v2) | ✅ | | | | | | -| Natural Language Processing (NLP) | Natural Language Understanding | [GRU](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/gru) | ✅ | | | | | | -| Natural Language Processing (NLP) | Natural Language Understanding | [MASS](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/mass) | ✅ | ✅ | ✅ | ✅ | | | -| Natural Language Processing (NLP) | Natural Language Understanding | [SentimentNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/lstm) | ✅ | | ✅ | ✅ | ✅ | ✅ | -| Natural Language Processing (NLP) | Natural Language Understanding | [Transformer](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/transformer) | ✅ | ✅ | ✅ | ✅ | | | -| Natural Language Processing (NLP) | Natural Language Understanding | [TinyBERT](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/tinybert) | ✅ | ✅ | ✅ | | | | -| Natural Language Processing (NLP) | Natural Language Understanding | [TextCNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/textcnn) | ✅ | | | | | | -| Recommender | Recommender System, CTR prediction | [DeepFM](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/recommend/deepfm) | ✅ | ✅ | ✅ | ✅ | ✅ | | -| Recommender | Recommender System, Search, Ranking | [Wide&Deep](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/recommend/wide_and_deep) | ✅ | ✅ | ✅ | ✅ | | | -| Recommender | Recommender System | [NAML](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/recommend/naml) | ✅ | | | | | | -| Recommender | Recommender System | [NCF](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/recommend/ncf) | ✅ | | ✅ | | | | -| Graph Neural Networks (GNN) | Text Classification | [GCN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/gnn/gcn) | ✅ | ✅ | | | | | -| Graph Neural Networks (GNN) | Text Classification | [GAT](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/gnn/gat) | ✅ | ✅ | | | | | -| Graph Neural Networks (GNN) | Recommender System | [BGCF](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/gnn/bgcf) | ✅ | | | | | | - -### Research - -| Domain | Sub Domain | Network | Ascend (Graph) | Ascend (PyNative) | GPU (Graph) | GPU (PyNative)| CPU (Graph) | CPU (PyNative) | -|:------ |:------| :----------- |:------: |:------: |:------: |:------: |:-----: |:-----: | -| Computer Vision (CV) | Image Classification | [FaceAttributes](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/FaceAttribute) | ✅ | ✅ | | | | | -| Computer Vision (CV) | Object Detection | [FaceDetection](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/FaceDetection) | ✅ | | | | | | -| Computer Vision (CV) | Image Classification | [FaceQualityAssessment](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/FaceQualityAssessment) | ✅ | ✅ | | | | | -| Computer Vision (CV) | Image Classification | [FaceRecognition](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/FaceRecognition) | ✅ | | | | | | -| Computer Vision (CV) | Image Classification | [FaceRecognitionForTracking](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/FaceRecognitionForTracking) | ✅ | | | | | | -| Computer Vision (CV) | Object Detection | [SSD-GhostNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/ssd_ghostnet) | ✅ | | | | | | -| Computer Vision (CV) | Key Point Detection | [CenterNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/centernet) | ✅ | | | | ✅ | | -| Computer Vision (CV) | Image Style Transfer | [CycleGAN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/CycleGAN) | | | | ✅ | ✅ | | -| Natural Language Processing (NLP) | Natural Language Understanding | [DS-CNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/nlp/dscnn) | ✅ | ✅ | | | | | -| Natural Language Processing (NLP) | Natural Language Understanding | [TextRCNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/nlp/textrcnn) | ✅ | | | | | | -| Natural Language Processing (NLP) | Natural Language Understanding | [TPRR](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/nlp/tprr) | ✅ | | | | | | -| Recommender | Recommender System, CTR prediction | [AutoDis](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/recommend/autodis) | ✅ | | | | | | -| Audio | Audio Tagging | [FCN-4](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/audio/fcn-4) | ✅ | | | | | | -| High Performance Computing | Molecular Dynamics | [DeepPotentialH2O](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/hpc/molecular_dynamics) | ✅ | ✅ | | | | | -| High Performance Computing | Ocean Model | [GOMO](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/hpc/ocean_model) | | | ✅ | | | | - -> You can also use [MindWizard Tool](https://gitee.com/mindspore/mindinsight/tree/master/mindinsight/wizard/) to quickly generate classic network scripts. diff --git a/docs/note/source_en/operator_list.rst b/docs/note/source_en/operator_list.rst deleted file mode 100644 index 1dc2c6513828a7d02815bb05661e97f49f8a9b2a..0000000000000000000000000000000000000000 --- a/docs/note/source_en/operator_list.rst +++ /dev/null @@ -1,9 +0,0 @@ -Operator List -=============== - -.. toctree:: - :maxdepth: 1 - - operator_list_ms - operator_list_implicit - operator_list_parallel \ No newline at end of file diff --git a/docs/note/source_en/operator_list_implicit.md b/docs/note/source_en/operator_list_implicit.md deleted file mode 100644 index 397d78fad0298d6c286cb6cf8c2b45c07fe5a493..0000000000000000000000000000000000000000 --- a/docs/note/source_en/operator_list_implicit.md +++ /dev/null @@ -1,145 +0,0 @@ -# MindSpore Implicit Type Conversion Operator List - -`Linux` `Ascend` `GPU` `CPU` `Model Development` `Beginner` `Intermediate` `Expert` - - - -- [MindSpore Implicit Type Conversion Operator List](#mindspore-implicit-type-conversion-operator-list) - - [Implicit Type Conversion](#implicit-type-conversion) - - [conversion rules](#conversion-rules) - - [data types involved in conversion](#data-types-involved-in-conversion) - - [support ops](#support-ops) - - - - - -## Implicit Type Conversion - -### conversion rules - -- Scalar and Tensor operations: during operation, the scalar is automatically converted to Tensor, and the data type is consistent with the Tensor data type involved in the operation; when Tensor is bool data type and the scalar is int or float, both the scalar and Tensor are converted to the Tensor with the data type of int32 or float32; when Tensor is int or uint data type and the scalar is float, both the scalar and Tensor are converted to the Tensor with the data type of float32. -- Tensor operation of different data types: the priority of data type is bool < uint8 < int8 < int16 < int32 < int64 < float16 < float32 < float64, during the operation, first determine the data type with the relatively highest priority among the Tensors involved in the operation, and then convert the low priority data type Tensor to the relatively highest priority data type; when the Tensor of int8 and uint8 data types are operated, they are converted to int16 Tensor. -- Data type conversion of Parameter is not supported: If inferred according to the conversion rules, RuntimeError exception will be thrown when the data type conversion of Parameter defined in the network is required. - -### data types involved in conversion - -- bool -- int8 -- uint8 -- int16 -- int32 -- int64 -- float16 -- float32 -- float64 - -### support ops - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
mindspore.ops.Addmindspore.ops.ApplyAdadeltamindspore.ops.ApplyAdagrad
mindspore.ops.ApplyAdagradV2mindspore.ops.ApplyAdaMaxmindspore.ops.ApplyAddSign
mindspore.ops.ApplyGradientDescentmindspore.ops.ApplyMomentummindspore.ops.ApplyPowerSign
mindspore.ops.ApplyProximalAdagradmindspore.ops.ApplyProximalGradientDescentmindspore.ops.ApproximateEqual
mindspore.ops.Assignmindspore.ops.AssignAddmindspore.ops.AssignSub
mindspore.ops.Atan2mindspore.ops.BitwiseAndmindspore.ops.BitwiseOr
mindspore.ops.BitwiseXormindspore.ops.Divmindspore.ops.DivNoNan
mindspore.ops.Equalmindspore.ops.FloorDivmindspore.ops.FloorMod
mindspore.ops.FusedSparseAdammindspore.ops.FusedSparseFtrlmindspore.ops.FusedSparseLazyAdam
mindspore.ops.FusedSparseProximalAdagradmindspore.ops.Greatermindspore.ops.GreaterEqual
mindspore.ops.Lessmindspore.ops.LessEqualmindspore.ops.LogicalAnd
mindspore.ops.LogicalOrmindspore.ops.Maximummindspore.ops.Minimum
mindspore.ops.Modmindspore.ops.Mulmindspore.ops.NotEqual
mindspore.ops.Powmindspore.ops.RealDivmindspore.ops.ScatterAdd
mindspore.ops.ScatterDivmindspore.ops.ScatterMaxmindspore.ops.ScatterMin
mindspore.ops.ScatterMulmindspore.ops.ScatterNdAddmindspore.ops.ScatterNdSub
mindspore.ops.ScatterNdUpdatemindspore.ops.ScatterNonAliasingAddmindspore.ops.ScatterSub
mindspore.ops.ScatterUpdatemindspore.ops.SparseApplyAdagradmindspore.ops.SparseApplyAdagradV2
mindspore.ops.SparseApplyFtrlmindspore.ops.SparseApplyFtrlV2mindspore.ops.SparseApplyProximalAdagrad
mindspore.ops.SquaredDifferencemindspore.ops.Submindspore.ops.TruncateDiv
mindspore.ops.TruncateModmindspore.ops.Xdivymindspore.ops.Xlogy
diff --git a/docs/note/source_en/operator_list_ms.md b/docs/note/source_en/operator_list_ms.md deleted file mode 100644 index 1470d3ea699c4c0f0ecd07e511b10f6539af3b41..0000000000000000000000000000000000000000 --- a/docs/note/source_en/operator_list_ms.md +++ /dev/null @@ -1,10 +0,0 @@ -# MindSpore Operator List - -`Linux` `Ascend` `GPU` `CPU` `Model Development` `Beginner` `Intermediate` `Expert` - - - -You can choose the operators that are suitable for your hardware platform for building the network model according to your needs. - -- Supported operator lists in module `mindspore.nn` could be checked on [API page of mindspore.nn](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.nn.html). -- Supported operator lists in module `mindspore.ops` could be checked on [API page of mindspore.ops](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.ops.html). diff --git a/docs/note/source_en/operator_list_parallel.md b/docs/note/source_en/operator_list_parallel.md deleted file mode 100644 index ccd210d8296c1f4e43dc7a4d5b48e778d18ec2c8..0000000000000000000000000000000000000000 --- a/docs/note/source_en/operator_list_parallel.md +++ /dev/null @@ -1,125 +0,0 @@ -# MindSpore Distributed Operator List - -`Linux` `Ascend` `GPU` `CPU` `Model Development` `Beginner` `Intermediate` `Expert` - - - -- [MindSpore Distributed Operator List](#mindspore-distributed-operator-list) - - [Distributed Operator](#distributed-operator) - - - - - -## Distributed Operator - -| op name | constraints | -| :----------------------------------------------------------- | :----------------------------------------------------------- | -| [mindspore.ops.Abs](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Abs.html) | None | -| [mindspore.ops.ACos](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ACos.html) | None | -| [mindspore.ops.Acosh](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Acosh.html) | None | -| [mindspore.ops.Add](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Add.html) | None | -| [mindspore.ops.ApproximateEqual](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ApproximateEqual.html) | None | -| [mindspore.ops.ArgMaxWithValue](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ArgMaxWithValue.html) | When the input_x is splited on the axis dimension, the distributed result may be inconsistent with that on the single machine. | -| [mindspore.ops.ArgMinWithValue](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ArgMinWithValue.html) | When the input_x is splited on the axis dimension, the distributed result may be inconsistent with that on the single machine. | -| [mindspore.ops.Asin](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Asin.html) | None | -| [mindspore.ops.Asinh](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Asinh.html) | None | -| [mindspore.ops.Assign](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Assign.html) | None | -| [mindspore.ops.AssignAdd](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.AssignAdd.html) | None | -| [mindspore.ops.AssignSub](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.AssignSub.html) | None | -| [mindspore.ops.Atan](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Atan.html) | None | -| [mindspore.ops.Atan2](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Atan2.html) | None | -| [mindspore.ops.Atanh](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Atanh.html) | None | -| [mindspore.ops.BatchMatMul](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.BatchMatMul.html) | `transpore_a=True` is not supported. | -| [mindspore.ops.BesselI0e](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.BesselI0e.html) | None | -| [mindspore.ops.BesselI1e](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.BesselI1e.html) | None | -| [mindspore.ops.BiasAdd](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.BiasAdd.html) | None | -| [mindspore.ops.BroadcastTo](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.BroadcastTo.html) | None | -| [mindspore.ops.Cast](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Cast.html) | The shard strategy is ignored in the Auto Parallel and Semi Auto Parallel mode. | -| [mindspore.ops.Ceil](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Ceil.html) | None | -| [mindspore.ops.Concat](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Concat.html) | The input_x can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | -| [mindspore.ops.Cos](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Cos.html) | None | -| [mindspore.ops.Cosh](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Cosh.html) | None | -| [mindspore.ops.Div](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Div.html) | None | -| [mindspore.ops.DivNoNan](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.DivNoNan.html) | None | -| [mindspore.ops.DropoutDoMask](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.DropoutDoMask.html) | Need to be used in conjunction with `DropoutGenMask` | -| [mindspore.ops.DropoutGenMask](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.DropoutGenMask.html) | Need to be used in conjunction with `DropoutDoMask`, configuring shard strategy is not supported. | -| [mindspore.ops.Elu](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Elu.html) | None | -| [mindspore.ops.EmbeddingLookup](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.EmbeddingLookup.html) | The same as Gather. | -| [mindspore.ops.Equal](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Equal.html) | None | -| [mindspore.ops.Erf](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Erf.html) | None | -| [mindspore.ops.Erfc](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Erfc.html) | None | -| [mindspore.ops.Exp](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Exp.html) | None | -| [mindspore.ops.ExpandDims](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ExpandDims.html) | None | -| [mindspore.ops.Expm1](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Expm1.html) | None | -| [mindspore.ops.Floor](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Floor.html) | None | -| [mindspore.ops.FloorDiv](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.FloorDiv.html) | None | -| [mindspore.ops.FloorMod](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.FloorMod.html) | None | -| [mindspore.ops.Gather](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Gather.html) | Only support 1-dim and 2-dim parameters and the last dimension of the input_params should be 32-byte aligned; Scalar input_indices is not supported; Repeated calculation is not supported when the parameters are split in the dimension of the axis; Split input_indices and input_params at the same time is not supported. | -| [mindspore.ops.GeLU](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.GeLU.html) | None | -| [mindspore.ops.Greater](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Greater.html) | None | -| [mindspore.ops.GreaterEqual](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.GreaterEqual.html) | None | -| [mindspore.ops.Inv](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Inv.html) | None | -| [mindspore.ops.L2Normalize](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.L2Normalize.html) | The input_x can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | -| [mindspore.ops.Less](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Less.html) | None | -| [mindspore.ops.LessEqual](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.LessEqual.html) | None | -| [mindspore.ops.LogicalAnd](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.LogicalAnd.html) | None | -| [mindspore.ops.LogicalNot](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.LogicalNot.html) | None | -| [mindspore.ops.LogicalOr](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.LogicalOr.html) | None | -| [mindspore.ops.Log](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Log.html) | None | -| [mindspore.ops.Log1p](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Log1p.html) | None | -| [mindspore.ops.LogSoftmax](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.LogSoftmax.html) | The logits can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | -| [mindspore.ops.MatMul](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.MatMul.html) | `transpose_a=True` is not supported. | -| [mindspore.ops.Maximum](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Maximum.html) | None | -| [mindspore.ops.Minimum](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Minimum.html) | None | -| [mindspore.ops.Mod](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Mod.html) | None | -| [mindspore.ops.Mul](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Mul.html) | None | -| [mindspore.ops.Neg](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Neg.html) | None | -| [mindspore.ops.NotEqual](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.NotEqual.html) | None | -| [mindspore.ops.OneHot](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.OneHot.html) | Only support 1-dim indices. Must configure strategy for the output and the first and second inputs. | -| [mindspore.ops.OnesLike](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.OnesLike.html) | None | -| [mindspore.ops.Pow](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Pow.html) | None | -| [mindspore.ops.PReLU](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.PReLU.html) | When the shape of weight is not [1], the shard strategy in channel dimension of input_x should be consistent with weight. | -| [mindspore.ops.RealDiv](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.RealDiv.html) | None | -| [mindspore.ops.Reciprocal](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Reciprocal.html) | None | -| [mindspore.ops.ReduceMax](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ReduceMax.html) | When the input_x is splited on the axis dimension, the distributed result may be inconsistent with that on the single machine. | -| [mindspore.ops.ReduceMin](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ReduceMin.html) | When the input_x is splited on the axis dimension, the distributed result may be inconsistent with that on the single machine. | -| [mindspore.ops.ReduceSum](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ReduceSum.html) | None | -| [mindspore.ops.ReduceMean](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ReduceMean.html) | None | -| [mindspore.ops.ReLU](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ReLU.html) | None | -| [mindspore.ops.ReLU6](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ReLU6.html) | None | -| [mindspore.ops.ReLUV2](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ReLUV2.html) | None | -| [mindspore.ops.Reshape](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Reshape.html) | Configuring shard strategy is not supported. In auto parallel mode, if multiple operators are followed by the reshape operator, different shard strategys are not allowed to be configured for these operators. | -| [mindspore.ops.Round](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Round.html) | None | -| [mindspore.ops.Rsqrt](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Rsqrt.html) | None | -| [mindspore.ops.Sigmoid](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Sigmoid.html) | None | -| [mindspore.ops.SigmoidCrossEntropyWithLogits](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.SigmoidCrossEntropyWithLogits.html) | None | -| [mindspore.ops.Sign](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Sign.html) | None | -| [mindspore.ops.Sin](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Sin.html) | None | -| [mindspore.ops.Sinh](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Sinh.html) | None | -| [mindspore.ops.Softmax](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Softmax.html) | The logits can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | -| [mindspore.ops.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.SoftmaxCrossEntropyWithLogits.html) | The last dimension of logits and labels can't be splited; Only supports using output[0]. | -| [mindspore.ops.Softplus](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Softplus.html) | None | -| [mindspore.ops.Softsign](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Softsign.html) | None | -| [mindspore.ops.SparseGatherV2](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.SparseGatherV2.html) | The same as GatherV2. | -| [mindspore.ops.Split](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Split.html) | The input_x can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | -| [mindspore.ops.Sqrt](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Sqrt.html) | None | -| [mindspore.ops.Square](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Square.html) | None | -| [mindspore.ops.Squeeze](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Squeeze.html) | None | -| [mindspore.ops.Stack](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Stack.html) | None | -| [mindspore.ops.StridedSlice](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.StridedSlice.html) | Only support mask with all 0 values; The dimension needs to be split should be all extracted; Split is supported when the strides of dimension is 1. | -| [mindspore.ops.Slice](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Slice.html) | The dimension needs to be split should be all extracted. | -| [mindspore.ops.Sub](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Sub.html) | None | -| [mindspore.ops.Tan](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Tan.html) | None | -| [mindspore.ops.Tanh](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Tanh.html) | None | -| [mindspore.ops.Tile](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Tile.html) | Only support configuring shard strategy for multiples. | -| [mindspore.ops.TopK](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.TopK.html) | The input_x can't be split into the last dimension, otherwise it's inconsistent with the single machine in the mathematical logic. | -| [mindspore.ops.Transpose](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Transpose.html) | None | -| [mindspore.ops.Unique](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Unique.html) | Only support the repeat calculate shard strategy (1,). | -| [mindspore.ops.UnsortedSegmentSum](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.UnsortedSegmentSum.html) | The shard of input_x and segment_ids must be the same as the dimension of segment_ids. | -| [mindspore.ops.UnsortedSegmentMin](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.UnsortedSegmentMin.html) | The shard of input_x and segment_ids must be the same as the dimension of segment_ids. Note that if the segment id i is missing, then the output[i] will be filled with the maximum of the input type. The user needs to mask the maximum value to avoid value overflow. The communication operation such as AllReudce will raise an Run Task Error due to overflow. | -| [mindspore.ops.UnsortedSegmentMax](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.UnsortedSegmentMax.html) | The shard of input_x and segment_ids must be the same as the dimension of segment_ids. Note that if the segment id i is missing, then the output[i] will be filled with the minimum of the input type. The user needs to mask the minimum value to avoid value overflow. The communication operation such as AllReudce will raise an Run Task Error due to overflow. | -| [mindspore.ops.ZerosLike](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ZerosLike.html) | None | - -> Repeated calculation means that the device is not fully used. For example, the cluster has 8 devices to run distributed training, the splitting strategy only cuts the input into 4 copies. In this case, double counting will occur. -> diff --git a/docs/note/source_en/roadmap.md b/docs/note/source_en/roadmap.md deleted file mode 100644 index 13f558aa6ecba5bed2915cf6d7a4250190aa553e..0000000000000000000000000000000000000000 --- a/docs/note/source_en/roadmap.md +++ /dev/null @@ -1,90 +0,0 @@ -# RoadMap - -`Linux` `Windows` `Ascend` `GPU` `CPU` `Whole Process` `Framework Development` `Intermediate` `Expert` `Contributor` - - - -- [Preset Models](#preset-models) -- [Usability](#usability) -- [Performance Optimization](#performance-optimization) -- [Architecture Evolution](#architecture-evolution) -- [MindInsight Debugging and Optimization](#mindinsight-debugging-and-optimization) -- [MindArmour Security Hardening Package](#mindarmour-security-hardening-package) -- [Inference Framework](#inference-framework) - - - - - -MindSpore's top priority plans in the year are displayed as follows. We will continuously adjust the priority based on user feedback. - -In general, we will make continuous improvements in the following aspects: - -1. Support more preset models. -2. Continuously supplement APIs and operator libraries to improve usability and programming experience. -3. Comprehensively support for Huawei Ascend AI processor and continuously optimize the performance and software architecture. -4. Improve visualization, debugging and optimization, and security-related tools. - -We sincerely hope that you can join the discussion in the user community and contribute your suggestions. - -## Preset Models - -- CV: Classic models for object detection, GAN, image segmentation, and posture recognition. -- NLP: RNN and Transformer neural network, expanding the application based on the BERT pre-training model. -- Other: GNN, reinforcement learning, probabilistic programming, and AutoML. - -## Usability - -- Supplement APIs such as operators, optimizers, and loss functions. -- Complete the native expression support of the Python language. -- Support common Tensor/Math operations. -- Add more application scenarios of automatic parallelization to improve the accuracy of policy search. - -## Performance Optimization - -- Optimize the compilation time. -- Low-bit mixed precision training and inference. -- Improve memory utilization. -- Provide more fusion optimization methods. -- Improve the execution performance in PyNative. - -## Architecture Evolution - -- Optimize computational graph and operator fusion. Use fine-grained graph IR to express operators to form intermediate representation (IR) with operator boundaries and explore more layer optimization opportunities. -- Support more programming languages. -- Optimize the automatic scheduling and distributed training data cache mechanism of data augmentation. -- Continuously improve MindSpore IR. -- Support distributed training in parameter server mode. - -## MindInsight Debugging and Optimization - -- Training process observation - - Histogram - - Optimize the display of computational and data graphs. - - Integrate the performance profiling and debugger tools. - - Support comparison between multiple trainings. -- Training result lineage - - Data augmentation lineage comparison. -- Training process diagnosis - - Performance profiling. - - Graph model-based debugger. - -## MindArmour Security Hardening Package - -- Test the model security. -- Provide model security hardening tools. -- Protect data privacy during training and inference. - -## Inference Framework - -- Continuous optimization for operator, and add more operator. -- Support NLP neural networks. -- Visualization for MindSpore lite model. -- MindSpore Micro, which supports ARM Cortex-A and Cortex-M with Ultra-lightweight. -- Support re-training and federated learning on mobile device. -- Support auto-parallel. -- MindData on mobile device, which supports image resize and pixel data transform. -- Support post-training quantize, which supports inference with mixed precision to improve performance. -- Support Kirin NPU, MTK APU. -- Support inference for multi models with pipeline. -- C++ API for model construction. diff --git a/docs/note/source_en/static_graph_syntax_support.md b/docs/note/source_en/static_graph_syntax_support.md deleted file mode 100644 index 19d7e373c0699ea164d8a590e5c2e63be2664f4e..0000000000000000000000000000000000000000 --- a/docs/note/source_en/static_graph_syntax_support.md +++ /dev/null @@ -1,1152 +0,0 @@ -# Static Graph Syntax Support - -`Linux` `Ascend` `GPU` `CPU` `Model Development` `Beginner` `Intermediate` `Expert` - - - -- [Static Graph Syntax Support](#static-graph-syntax-support) - - [Overview](#overview) - - [Data Types](#data-types) - - [Built-in Python Data Types](#built-in-python-data-types) - - [Number](#number) - - [String](#string) - - [List](#list) - - [Tuple](#tuple) - - [Dictionary](#dictionary) - - [MindSpore User-defined Data Types](#mindspore-user-defined-data-types) - - [Tensor](#tensor) - - [Primitive](#primitive) - - [Cell](#cell) - - [Operators](#operators) - - [Arithmetic Operators](#arithmetic-operators) - - [Assignment Operators](#assignment-operators) - - [Logical Operators](#logical-operators) - - [Member Operators](#member-operators) - - [Identity Operators](#identity-operators) - - [Expressions](#expressions) - - [Conditional Control Statements](#conditional-control-statements) - - [single if](#single-if) - - [side-by-side if](#side-by-side-if) - - [if in if](#if-in-if) - - [Loop Statements](#loop-statements) - - [for](#for) - - [while](#while) - - [side-by-side while](#side-by-side-while) - - [while in while](#while-in-while) - - [Conditional Control Statements in Loop Statements](#conditional-control-statements-in-loop-statements) - - [if in for](#if-in-for) - - [if in while](#if-in-while) - - [Function Definition Statements](#function-definition-statements) - - [def Keyword](#def-keyword) - - [lambda Expression](#lambda-expression) - - [Functions](#functions) - - [Python Built-in Functions](#python-built-in-functions) - - [len](#len) - - [isinstance](#isinstance) - - [partial](#partial) - - [map](#map) - - [zip](#zip) - - [range](#range) - - [enumerate](#enumerate) - - [super](#super) - - [pow](#pow) - - [print](#print) - - [Function Parameters](#function-parameters) - - [Network Definition](#network-definition) - - [Instance Types on the Entire Network](#instance-types-on-the-entire-network) - - [Network Construction Components](#network-construction-components) - - [Network Constraints](#network-constraints) - - - - - -## Overview - -In graph mode, Python code is not executed by the Python interpreter. Instead, the code is compiled into a static computation graph, and then the static computation graph is executed. - -Currently, only the function, Cell, and subclass instances modified by the `@ms_function` decorator can be built. -For a function, build the function definition. For the network, build the `construct` method and other methods or functions called by the `construct` method. - -For details about how to use `ms_function`, click . - -For details about the definition of `Cell`, click . - -Due to syntax parsing restrictions, the supported data types, syntax, and related operations during graph building are not completely consistent with the Python syntax. As a result, some usage is restricted. - -The following describes the data types, syntax, and related operations supported during static graph building. These rules apply only to graph mode. - -> All the following examples run on the network in graph mode. The network definition is not described. - -## Data Types - -### Built-in Python Data Types - -Currently, the following built-in `Python` data types are supported: `Number`, `String`, `List`, `Tuple`, and `Dictionary`. - -#### Number - -Supports `int`, `float`, and `bool`, but does not support complex numbers. - -`Number` can be defined on the network. That is, the syntax `y = 1`, `y = 1.2`, and `y = True` are supported. - -Forcible conversion to `Number` is not supported on the network. That is, the syntax `y = int(x)`, `y = float(x)`, and `y = bool(x)` are not supported. - -#### String - -`String` can be constructed on the network. That is, the syntax `y = "abcd"` is supported. - -Forcible conversion to `String` is not supported on the network. That is, the syntax `y = str(x)` is not supported. - -#### List - -`List` can be constructed on the network, that is, the syntax `y = [1, 2, 3]` is supported. - -Forcible conversion to `List` is not supported on the network. That is, the syntax `y = list(x)` is not supported. - -`List` to be output in the computation graph will be converted into `Tuple`. - -- Supported APIs - - `append`: adds an element to `list`. - - For example: - - ```python - x = [1, 2, 3] - x.append(4) - ``` - - The result is as follows: - - ```text - x: (1, 2, 3, 4) - ``` - -- Supported index values and value assignment - - Single-level and multi-level index values and value assignment are supported. - - The index value supports only `int`. - - The assigned value can be `Number`, `String`, `Tuple`, `List`, or `Tensor`. - - For example: - - ```python - x = [[1, 2], 2, 3, 4] - - m = x[0][1] - x[1] = Tensor(np.array([1, 2, 3])) - x[2] = "ok" - x[3] = (1, 2, 3) - x[0][1] = 88 - n = x[-3] - ``` - - The result is as follows: - - ```text - m: 2 - x: ([1, 88], Tensor(shape=[3], dtype=Int64, value=[1, 2, 3]), 'ok', (1, 2, 3)) - n: Tensor(shape=[3], dtype=Int64, value=[1, 2, 3]) - ``` - -#### Tuple - -`Tuple` can be constructed on the network, that is, the syntax `y = (1, 2, 3)` is supported. - -Forcible conversion to `Tuple` is not supported on the network. That is, the syntax `y = tuple(x)` is not supported. - -- Supported index values - - The index value can be `int`, `slice`, `Tensor`, and multi-level index value. That is, the syntax `data = tuple_x[index0][index1]...` is supported. - - Restrictions on the index value `Tensor` are as follows: - - - `Tuple` stores `Cell`. Each `Cell` must be defined before a tuple is defined. The number of input parameters, input parameter type, and input parameter `shape` of each `Cell` must be the same. The number of outputs of each `Cell` must be the same. The output type must be the same as the output shape. - - - The index `Tensor` is a scalar `Tensor` whose `dtype` is `int32`. The value range is `[-tuple_len, tuple_len)`, negative index is not supported in `Ascend` backend. - - - This syntax does not support the running branches whose control flow conditions `if`, `while`, and `for` are variables. The control flow conditions can be constants only. - - - `GPU` and `Ascend` backend is supported. - - An example of the `int` and `slice` indexes is as follows: - - ```python - x = (1, (2, 3, 4), 3, 4, Tensor(np.array([1, 2, 3]))) - y = x[1][1] - z = x[4] - m = x[1:4] - n = x[-4] - ``` - - The result is as follows: - - ```text - y: 3 - z: Tensor(shape=[3], dtype=Int64, value=[1, 2, 3]) - m: ((2, 3, 4), 3, 4) - n: (2, 3, 4) - ``` - - An example of the `Tensor` index is as follows: - - ```python - class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.relu = nn.ReLU() - self.softmax = nn.Softmax() - self.layers = (self.relu, self.softmax) - - def construct(self, x, index): - ret = self.layers[index](x) - return ret - ``` - -#### Dictionary - -`Dictionary` can be constructed on the network. That is, the syntax `y = {"a": 1, "b": 2}` is supported. Currently, only `String` can be used as the `key` value. - -`Dictionary` to be output in the computational graph will extract all `value` values to form the `Tuple` output. - -- Supported APIs - - `keys`: extracts all `key` values from `dict` to form `Tuple` and return it. - - `values`: extracts all `value` values from `dict` to form `Tuple` and return it. - - For example: - - ```python - x = {"a": Tensor(np.array([1, 2, 3])), "b": Tensor(np.array([4, 5, 6])), "c": Tensor(np.array([7, 8, 9]))} - y = x.keys() - z = x.values() - ``` - - The result is as follows: - - ```text - y: ("a", "b", "c") - z: (Tensor(shape=[3], dtype=Int64, value=[1, 2, 3]), Tensor(shape=[3], dtype=Int64, value=[4, 5, 6]), Tensor(shape=[3], dtype=Int64, value=[7, 8, 9])) - ``` - -- Supported index values and value assignment - - The index value supports only `String`. The assigned value can be `Number`, `Tuple`, or `Tensor`. - - For example: - - ```python - x = {"a": Tensor(np.array([1, 2, 3])), "b": Tensor(np.array([4, 5, 6])), "c": Tensor(np.array([7, 8, 9]))} - y = x["b"] - x["a"] = (2, 3, 4) - ``` - - The result is as follows: - - ```text - y: Tensor(shape=[3], dtype=Int64, value=[4, 5, 6]) - x: {"a": (2, 3, 4), Tensor(shape=[3], dtype=Int64, value=[4, 5, 6]), Tensor(shape=[3], dtype=Int64, value=[7, 8, 9])} - ``` - -### MindSpore User-defined Data Types - -Currently, MindSpore supports the following user-defined data types: `Tensor`, `Primitive`, and `Cell`. - -#### Tensor - -Currently, tensors cannot be constructed on the network. That is, the syntax `x = Tensor(args...)` is not supported. - -You can use the `@constexpr` decorator to modify the function and generate the `Tensor` in the function. - -For details about how to use `@constexpr`, click . - -The constant `Tensor` used on the network can be used as a network attribute and defined in `init`, that is, `self.x = Tensor(args...)`. Then the constant can be used in `construct`. - -In the following example, `Tensor` of `shape = (3, 4), dtype = int64` is generated by `@constexpr`. - -```python -@constexpr -def generate_tensor(): - return Tensor(np.ones((3, 4))) -``` - -The following describes the attributes, APIs supported by the `Tensor`. - -- Supported attributes - - `shape`: obtains the shape of `Tensor` and returns a `Tuple`. - - `dtype`: obtains the data type of `Tensor` and returns a data type defined by `MindSpore`. - -- Supported APIs - - `all`: reduces `Tensor` through the `all` operation. Only `Tensor` of the `Bool` type is supported. - - `any`: reduces `Tensor` through the `any` operation. Only `Tensor` of the `Bool` type is supported. - -`view`: reshapes `Tensor` into input `shape`. - - `expand_as`: expands `Tensor` to the same `shape` as another `Tensor` based on the broadcast rule. - - For example: - - ```python - x = Tensor(np.array([[True, False, True], [False, True, False]])) - x_shape = x.shape - x_dtype = x.dtype - x_all = x.all() - x_any = x.any() - x_view = x.view((1, 6)) - - y = Tensor(np.ones((2, 3), np.float32)) - z = Tensor(np.ones((2, 2, 3))) - y_as_z = y.expand_as(z) - ``` - - The result is as follows: - - ```text - x_shape: (2, 3) - x_dtype: Bool - x_all: Tensor(shape=[], dtype=Bool, value=False) - x_any: Tensor(shape=[], dtype=Bool, value=True) - x_view: Tensor(shape=[1, 6], dtype=Bool, value=[[True, False, True, False, True, False]]) - - y_as_z: Tensor(shape=[2, 2, 3], dtype=Float32, value=[[[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]]) - ``` - -#### Primitive - -Currently, `Primitive` and its subclass instances can be constructed on the network. That is, the `reduce_sum = ReduceSum(True)` syntax is supported. - -However, during construction, the parameter can be specified only in position parameter mode, and cannot be specified in the key-value pair mode. That is, the syntax `reduce_sum = ReduceSum(keep_dims=True)` is not supported. - -Currently, the attributes and APIs related to `Primitive` and its subclasses cannot be called on the network. - -For details about the definition of `Primitive`, click . - -For details about the defined `Primitive`, click . - -#### Cell - -Currently, `Cell` and its subclass instances can be constructed on the network. That is, the syntax `cell = Cell(args...)` is supported. - -However, during construction, the parameter can be specified only in position parameter mode, and cannot be specified in the key-value pair mode. That is, the syntax `cell = Cell(arg_name=value)` is not supported. - -Currently, the attributes and APIs related to `Cell` and its subclasses cannot be called on the network unless they are called through `self` in `contrcut` of `Cell`. - -For details about the definition of `Cell`, click . - -For details about the defined `Cell`, click . - -## Operators - -Arithmetic operators and assignment operators support the `Number` and `Tensor` operations, as well as the `Tensor` operations of different `dtype`. - -This is because these operators are converted to operators with the same name for computation, and they support implicit type conversion. - -For details about the rules, click . - -### Arithmetic Operators - -| Arithmetic Operator | Supported Type -| :----------- |:-------- -| `+` |`Number` + `Number`, `Tensor` + `Tensor`, `Tensor` + `Number`, `Tuple` + `Tuple`, and `String` + `String` -| `-` |`Number` - `Number`, `Tensor` - `Tensor`, and `Tensor` - `Number` -| `*` |`Number` \* `Number`, `Tensor` \* `Tensor`, and `Tensor` \* `Number` -| `/` |`Number` / `Number`, `Tensor` / `Tensor`, and `Tensor` / `Number` -| `%` |`Number` % `Number`, `Tensor` % `Tensor`, and `Tensor`% `Number` -| `**` |`Number` \*\* `Number`, `Tensor` \*\* `Tensor`, and `Tensor` \*\* `Number` -| `//` |`Number` // `Number`, `Tensor` // `Tensor`, and `Tensor` // `Number` -| `~` | `~Tensor[Bool]` - -### Assignment Operators - -| Assignment Operator | Supported Type -| :----------- |:-------- -| `=` |Scalar and `Tensor` -| `+=` |`Number` += `Number`, `Tensor` += `Tensor`, `Tensor` += `Number`, `Tuple` += `Tuple`, and `String` += `String` -| `-=` |`Number` -= `Number`, `Tensor` -= `Tensor`, and `Tensor` -= `Number` -| `*=` |`Number` \*= `Number`, `Tensor` \*= `Tensor`, and `Tensor` \*= `Number` -| `/=` |`Number` /= `Number`, `Tensor` /= `Tensor`, and `Tensor` /= `Number` -| `%=` |`Number` %= `Number`, `Tensor` %= `Tensor`, and `Tensor` %= `Number` -| `**=` |`Number` \*\*= `Number`, `Tensor` \*\*= `Tensor`, and `Tensor` \*\*= `Number` -| `//=` |`Number` //= `Number`, `Tensor` //= `Tensor`, and `Tensor` //= `Number` - -### Logical Operators - -| Logical Operator | Supported Type -| :----------- |:-------- -| `and` |`Number` and `Number`, `Tensor`, and `Tensor` -| `or` |`Number` or `Number`, and `Tensor` or `Tensor` -| `not` |not `Number`, not `Tensor`, and not `tuple` - -### Member Operators - -| Member Operator | Supported Type -| :----------- |:-------- -| `in` |`Number` in `tuple`, `String` in `tuple`, `Tensor` in `Tuple`, `Number` in `List`, `String` in `List`, `Tensor` in `List`, and `String` in `Dictionary` -| `not in` | Same as `in` - -### Identity Operators - -| Identity Operator | Supported Type -| :----------- |:-------- -| `is` | The value can only be `None`, `True`, or `False`. -| `is not` | The value can only be `None`, `True`, or `False`. - -## Expressions - -### Conditional Control Statements - -#### single if - -Usage: - -- `if (cond): statements...` - -- `x = y if (cond) else z` - -Parameter: `cond` -- The supported types are `Number`, `Tuple`, `List`, `String`, `None`, `Tensor` and `Function`. It can also be an expression whose computation result type is one of them. - -Restrictions: - -- During graph building, if `if` is not eliminated, the data type and shape of `return` inside the `if` branch must be the same as those outside the `if` branch. - -- When only `if` is available, the data type and shape of the `if` branch variable after the update must be the same as those before the update. - -- When both `if` and `else` are available, the updated data type and shape of the `if` branch variable must be the same as those of the `else` branch. - -- Does not support higher-order differential. - -- Does not support `elif` statements. - -Example 1: - -```python -if x > y: - return m -else: - return n -``` - -The data types of `m` returned by the `if` branch and `n` returned by the `else` branch must be the same as those of shape. - -Example 2: - -```python -if x > y: - out = m -else: - out = n -return out -``` - -The data types of `out` after the `if` branch is updated and `else` after the `out` branch is updated must be the same as those of shape. - -#### side-by-side if - -Usage: - -- `if (cond1):statements else:statements...if (cond2):statements...` - -Parameters: `cond1` and `cond2` -- Consistent with `single if`. - -Restrictions: - -- Inherit all restrictions of `single if`. - -- The total number of `if` in calculating graph can not exceed 50. - -- Too many `if` will cause the compilation time to be too long. Reducing the number of `if` will help improve compilation efficiency. - -Example: - -```python -if x > y: - out = x -else: - out = y -if z > x: - out = out + 1 -return out -``` - -#### if in if - -Usage: - -- `if (cond1):if (cond2):statements...` - -Parameters: `cond1` and `cond2` -- Consistent with `single if`. - -Restrictions: - -- Inherit all restrictions of `single if`. - -- The total number of `if` in calculating graph can not exceed 50. - -- Too many `if` will cause the compilation time to be too long. Reducing the number of `if` will help improve compilation efficiency. - -Example: - -```python -if x > y: - z = z + 1 - if z > x: - return m -else: - return n -``` - -### Loop Statements - -#### for - -Usage: - -- `for i in sequence` - -Parameter: `sequence` --Iterative sequences (`Tuple` and `List`). - -Restrictions: - -- The total number of graph operations is a multiple of number of iterations of the `for` loop. Excessive number of iterations of the `for` loop may cause the graph to occupy more memory than usage limit. - -Example: - -```python -z = Tensor(np.ones((2, 3))) -x = (1, 2, 3) -for i in x: - z += i -return z -``` - -The result is as follows: - -```text -z: Tensor(shape=[2, 3], dtype=Int64, value=[[7, 7], [7, 7], [7, 7]]) -``` - -#### single while - -Usage: - -- `while (cond)` - -Parameter: `cond` -- Consistent with `single if`. - -Restrictions: - -- During graph building, if `while` is not eliminated, the data type and `shape` of `return` inside `while` must be the same as those outside `while`. - -- The data type and shape of the updated variables in `while` must be the same as those before the update. - -- Does not support training scenarios. - -Example 1: - -```python -while x < y: - x += 1 - return m -return n -``` - -The `m` data type returned inside `while` inside and `n` data type returned outside `while` must be the same as those of shape. - -Example 2: - -```python -out = m -while x < y: - x += 1 - out = out + 1 -return out -``` - -In `while`, the data types of `out` before and after update must be the same as those of shape. - -#### side-by-side while - -Usage: - -- `while (cond1):statements while (cond2):statemetns...` - -Parameters: `cond1` and `cond2` -- Consistent with `single if`. - -Restrictions: - -- Inherit all restrictions of `single while`. - -- The total number of `while` in calculating graph can not exceed 50. - -- Too many `while` will cause the compilation time to be too long. Reducing the number of `while` will help improve compilation efficiency. - -Example: - -```python -out = m -while x < y: - x += 1 - out = out + 1 -while out > 10: - out -= 10 -return out -``` - -#### while in while - -Usage: - --`while (cond1):while (cond2):statements...` - -Parameters: `cond1` and `cond2` -- Consistent with `single if`. - -Restrictions: - -- Inherit all restrictions of `single while`. - -- The total number of `while` in calculating graph can not exceed 50. - -- Too many `while` will cause the compilation time to be too long. Reducing the number of `while` will help improve compilation efficiency. - -Example: - -```python -out = m -while x < y: - while z < y: - z += 1 - out = out + 1 - x += 1 -return out -``` - -### Conditional Control Statements in Loop Statements - -#### if in for - -Usage: - -- for i in sequence:if (cond)` - -Parameters: - -- `cond` -- Consistent with `single if`. - -- `sequence` -- Iterative sequence(`Tuple`、`List`) - -Restrictions: - -- Inherit all restrictions of `single if`. - -- Inherit all restrictions of `for`. - -- If `cond` is variable, it is forbidden to use `if (cond):return`,`if (cond):continue`,`if (cond):break` statements. - -- The total number of `if` is a multiple of number of iterations of the `for` loop. Excessive number of iterations of the `for` loop may cause the compilation time to be too long. - -Example: - -```python -z = Tensor(np.ones((2, 3))) -x = (1, 2, 3) -for i in x: - if i < 3: - z += i -return z -``` - -The result is as follows: - -```text -z: Tensor(shape=[2, 3], dtype=Int64, value=[[4, 4], [4, 4], [4, 4]]) -``` - -#### if in while - -Usage: - -- `while (cond1):if (cond2)` - -Parameters: `cond1` and `cond2` -- Consistent with `single if`. - -Restrictions: - -- Inherit all restrictions of `single if` and `single while`. - -- If `cond2` is variable, it is forbidden to use `if (cond2):return`,`if (cond2):continue`,`if (cond2):break` statements. - -Example: - -```python -out = m -while x < y: - if z > 2*x: - out = out + 1 - x += 1 -return out -``` - -### Function Definition Statements - -#### def Keyword - -Defines functions. - -Usage: - -`def function_name(args): statements...` - -For example: - -```python -def number_add(x, y): - return x + y -ret = number_add(1, 2) -``` - -The result is as follows: - -```text -ret: 3 -``` - -#### lambda Expression - -Generates functions. - -Usage: `lambda x, y: x + y` - -For example: - -```python -number_add = lambda x, y: x + y -ret = number_add(2, 3) -``` - -The result is as follows: - -```text -ret: 5 -``` - -## Functions - -### Python Built-in Functions - -Currently, the following built-in Python functions are supported: `len`, `isinstance`, `partial`, `map`, `range`, `enumerate`, `super`, and `pow`. - -#### len - -Returns the length of a sequence. - -Calling: `len(sequence)` - -Input parameter: `sequence` -- `Tuple`, `List`, `Dictionary`, or `Tensor`. - -Return value: length of the sequence, which is of the `int` type. If the input parameter is `Tensor`, the length of dimension 0 is returned. - -For example: - -```python -x = (2, 3, 4) -y = [2, 3, 4] -d = {"a": 2, "b": 3} -z = Tensor(np.ones((6, 4, 5))) -x_len = len(x) -y_len = len(y) -d_len = len(d) -z_len = len(z) -``` - -The result is as follows: - -```text -x_len: 3 -y_len: 3 -d_len: 2 -z_len: 6 - ``` - -#### isinstance - -Determines whether an object is an instance of a class. Different from operator `Isinstance`, the second input parameter of `Isinstance` is the type defined in the `dtype` module of MindSpore. - -Calling: `isinstance(obj, type)` - -Input parameters: - -- `obj` -- Any instance of any supported type. - -- `type` -- A type in the `MindSpore dtype` module. - -Return value: If `obj` is an instance of `type`, return `True`. Otherwise, return `False`. - -For example: - -```python -x = (2, 3, 4) -y = [2, 3, 4] -z = Tensor(np.ones((6, 4, 5))) -x_is_tuple = isinstance(x, mstype.tuple_) -y_is_list= isinstance(y, mstype.list_) -z_is_tensor = isinstance(z, mstype.tensor) -``` - -The result is as follows: - -```text -x_is_tuple: True -y_is_list: True -z_is_tensor: True - ``` - -#### partial - -A partial function used to fix the input parameter of the function. - -Calling: `partial(func, arg, ...)` - -Input parameters: - -- `func` --Function. - -- `arg` -- One or more parameters to be fixed. Position parameters and key-value pairs can be specified. - -Return value: functions with certain input parameter values fixed - -For example: - -```python -def add(x, y): - return x + y - -add_ = partial(add, x=2) -m = add_(y=3) -n = add_(y=5) -``` - -The result is as follows: - -```text -m: 5 -n: 7 -``` - -#### map - -Maps one or more sequences based on the provided functions and generates a new sequence based on the mapping result. -If the number of elements in multiple sequences is inconsistent, the length of the new sequence is the same as that of the shortest sequence. - -Calling: `map(func, sequence, ...)` - -Input parameters: - -- `func` --Function. - -- `sequence` -- One or more sequences (`Tuple` or `List`). - -Return value: A `Tuple` - -For example: - -```python -def add(x, y): - return x + y - -elements_a = (1, 2, 3) -elements_b = (4, 5, 6) -ret = map(add, elements_a, elements_b) -``` - -The result is as follows: - -```text -ret: (5, 7, 9) -``` - -#### zip - -Packs elements in the corresponding positions in multiple sequences into tuples, and then uses these tuples to form a new sequence. -If the number of elements in each sequence is inconsistent, the length of the new sequence is the same as that of the shortest sequence. - -Calling: `zip(sequence, ...)` - -Input parameter: `sequence` -- One or more sequences (`Tuple` or `List`)`. - -Return value: A `Tuple` - -For example: - -```python -elements_a = (1, 2, 3) -elements_b = (4, 5, 6) -ret = zip(elements_a, elements_b) -``` - -The result is as follows: - -```text -ret: ((1, 4), (2, 5), (3, 6)) -``` - -#### range - -Creates a `Tuple` based on the start value, end value, and step. - -Calling: - -- `range(start, stop, step)` - -- `range(start, stop)` - -- `range(stop)` - -Input parameters: - -- `start` -- start value of the count. The type is `int`. The default value is 0. - -- `stop` -- end value of the count (exclusive). The type is `int`. - -- `step` -- Step. The type is `int`. The default value is 1. - -Return value: A `Tuple` - -For example: - -```python -x = range(0, 6, 2) -y = range(0, 5) -z = range(3) -``` - -The result is as follows: - -```text -x: (0, 2, 4) -y: (0, 1, 2, 3, 4) -z: (0, 1, 2) -``` - -#### enumerate - -Generates an index sequence of a sequence. The index sequence contains data and the corresponding subscript. - -Calling: - -- `enumerate(sequence, start)` - -- `enumerate(sequence)` - -Input parameters: - -- `sequence` -- A sequence (`Tuple`, `List`, or `Tensor`). - -- `start` -- Start position of the subscript. The type is `int`. The default value is 0. - -Return value: A `Tuple` - -For example: - -```python -x = (100, 200, 300, 400) -y = Tensor(np.array([[1, 2], [3, 4], [5 ,6]])) -m = enumerate(x, 3) -n = enumerate(y) -``` - -The result is as follows: - -```text -m: ((3, 100), (4, 200), (5, 300), (5, 400)) -n: ((0, Tensor(shape=[2], dtype=Int64, value=[1, 2])), (1, Tensor(shape=[2], dtype=Int64, value=[3, 4])), (2, Tensor(shape=[2], dtype=Int64, value=[5, 6]))) -``` - -#### super - -Calls a method of the parent class (super class). Generally, the method of the parent class is called after `super`. - -Calling: - -- `super().xxx()` - -- `super(type, self).xxx()` - -Input parameters: - -- `type` --Class. - -- `self` --Object. - -Return value: method of the parent class. - -For example: - -```python -class FatherNet(nn.Cell): - def __init__(self, x): - super(FatherNet, self).__init__(x) - self.x = x - - def construct(self, x, y): - return self.x * x - - def test_father(self, x): - return self.x + x - -class SingleSubNet(FatherNet): -def __init__(self, x, z): - super(SingleSubNet, self).__init__(x) - self.z = z - -def construct(self, x, y): - ret_father_construct = super().construct(x, y) - ret_father_test = super(SingleSubNet, self).test_father(x) - return ret_father_construct, ret_father_test -``` - -#### pow - -Returns the power. - -Calling: `pow(x, y)` - -Input parameters: - -- `x` -- Base number, `Number`, or `Tensor`. - -- `y` -- Power exponent, `Number`, or `Tensor`. - -Return value: `y` power of `x`, `Number`, or `Tensor` - -For example: - -```python -x = Tensor(np.array([1, 2, 3])) -y = Tensor(np.array([1, 2, 3])) -ret = pow(x, y) -``` - -The result is as follows: - -```text -ret: Tensor(shape=[3], dtype=Int64, value=[1, 4, 27])) -``` - -#### print - -Prints logs. - -Calling: `print(arg, ...)` - -Input parameter: `arg` -- Information to be printed (`int`, `float`, `bool`, `String` or `Tensor`). -When the `arg` is `int`, `float`, or `bool`, it will be printed out as a `0-D` tensor. - -Return value: none - -For example: - -```python -x = Tensor(np.array([1, 2, 3])) -y = 3 -print("x: ", x) -print("y: ", y) -``` - -The result is as follows: - -```text -x: Tensor(shape=[3], dtype=Int64, value=[1, 2, 3])) -y: Tensor(shape=[], dtype=Int64, value=3)) -``` - -### Function Parameters - -- Default parameter value: The data types `int`, `float`, `bool`, `None`, `str`, `tuple`, `list`, and `dict` are supported, whereas `Tensor` is not supported. - -- Variable parameters: Inference and training of networks with variable parameters are supported. - -- Key-value pair parameter: Functions with key-value pair parameters cannot be used for backward propagation on computational graphs. - -- Variable key-value pair parameter: Functions with variable key-value pairs cannot be used for backward propagation on computational graphs. - -## Network Definition - -### Instance Types on the Entire Network - -- Common Python function with the [@ms_function](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.html#mindspore.ms_function) decorator. - -- Cell subclass inherited from [nn.Cell](https://www.mindspore.cn/doc/api_python/en/master/mindspore/nn/mindspore.nn.Cell.html). - -### Network Construction Components - -| Category | Content -| :----------- |:-------- -| `Cell` instance |[mindspore/nn/*](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.nn.html) and user-defined [Cell](https://www.mindspore.cn/doc/api_python/en/master/mindspore/nn/mindspore.nn.Cell.html). -| Member function of a `Cell` instance | Member functions of other classes in the construct function of Cell can be called. -| `dataclass` instance | Class decorated with @dataclass. -| `Primitive` operator |[mindspore/ops/operations/*](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.ops.html) -| `Composite` operator |[mindspore/ops/composite/*](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.ops.html) -| `constexpr` generation operator | Value computation operator generated by [@constexpr](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.constexpr.html). -| Function | User-defined Python functions and system functions listed in the preceding content. - -### Network Constraints - -1. By default, the input parameters of the entire network (that is, the outermost network input parameters) support only `Tensor`. To support non-`Tensor`, you can set the `support_non_tensor_inputs` attribute of the network to `True`. - - During network initialization, `self.support_non_tensor_inputs = True` is set. Currently, this configuration supports only the forward network and does not support the backward network. That is, the backward operation cannot be performed on the network whose input parameters are not `Tensor`. - - The following is an example of supporting the outermost layer to transfer scalars: - - ```python - class ExpandDimsNet(nn.Cell): - def __init__(self): - super(ExpandDimsNet, self).__init__() - self.support_non_tensor_inputs = True - self.expandDims = ops.ExpandDims() - - def construct(self, input_x, input_axis): - return self.expandDims(input_x, input_axis) - expand_dim_net = ExpandDimsNet() - input_x = Tensor(np.random.randn(2,2,2,2).astype(np.float32)) - expand_dim_net(input_x, 0) - ``` - -2. You are not allowed to modify non-`Parameter` data members of the network. - - For example: - - ```python - class Net(Cell): - def __init__(self): - super(Net, self).__init__() - self.num = 2 - self.par = Parameter(Tensor(np.ones((2, 3, 4))), name="par") - - def construct(self, x, y): - return x + y - ``` - - In the preceding defined network, `self.num` is not a `Parameter` and cannot be modified. `self.par` is a `Parameter` and can be modified. - -3. When an undefined class member is used in the `construct` function, `AttributeError` is not thrown like the Python interpreter. Instead, it is processed as `None`. - - For example: - - ```python - class Net(Cell): - def __init__(self): - super(Net, self).__init__() - - def construct(self, x): - return x + self.y - ``` - - In the preceding defined network, `construct` uses the undefined class member `self.y`. In this case, `self.y` is processed as `None`. diff --git a/docs/note/source_en/syntax_list.rst b/docs/note/source_en/syntax_list.rst deleted file mode 100644 index caec9b767e02af447b4f75248d6bc3f17a951ce2..0000000000000000000000000000000000000000 --- a/docs/note/source_en/syntax_list.rst +++ /dev/null @@ -1,8 +0,0 @@ -Syntax Support -================ - -.. toctree:: - :maxdepth: 1 - - static_graph_syntax_support - index_support \ No newline at end of file diff --git a/docs/note/source_zh_cn/_static/logo_notebook.png b/docs/note/source_zh_cn/_static/logo_notebook.png deleted file mode 100644 index 18c2e29e4b73ee428f70253feffdd855fdf0c422..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/_static/logo_notebook.png and /dev/null differ diff --git a/docs/note/source_zh_cn/_static/logo_source.png b/docs/note/source_zh_cn/_static/logo_source.png deleted file mode 100644 index 880f2bc87172daf487654c0ba4f1657c672bd2b8..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/_static/logo_source.png and /dev/null differ diff --git a/docs/note/source_zh_cn/benchmark.md b/docs/note/source_zh_cn/benchmark.md deleted file mode 100644 index 63c067619d642f6d3c6f7960d9ffad87b6c24f6f..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/benchmark.md +++ /dev/null @@ -1,63 +0,0 @@ -# 基准性能 - -`Linux` `Ascend` `模型训练` `中级` `高级` - - - -- [基准性能](#基准性能) - - [训练性能](#训练性能) - - [ResNet](#resnet) - - [BERT](#bert) - - [Wide & Deep (数据并行)](#wide--deep-数据并行) - - [Wide & Deep (Host-Device混合计算模型并行)](#wide--deep-host-device混合计算模型并行) - - - - - -本文介绍MindSpore的基准性能。MindSpore网络定义可参考[Model Zoo](https://gitee.com/mindspore/mindspore/tree/master/model_zoo)。 - -## 训练性能 - -### ResNet - -| Network | Network Type | Dataset | MindSpore Version | Resource                 | Precision | Batch Size | Throughput | Speedup | -| --- | --- | --- | --- | --- | --- | --- | --- | --- | -| ResNet-50 v1.5 | CNN | ImageNet2012 | 0.5.0-beta | Ascend: 1 * Ascend 910
CPU:24 Cores | Mixed | 256 | 2115 images/sec | - | -| | | | | Ascend: 8 * Ascend 910
CPU:192 Cores | Mixed | 256 | 16600 images/sec | 0.98 | -| | | | | Ascend: 16 * Ascend 910
CPU:384 Cores | Mixed | 256 | 32768 images/sec | 0.96 | - -1. 以上数据基于华为云AI开发平台ModelArts测试获得,是训练过程整体下沉至Ascend 910 AI处理器执行所得的平均性能。 -2. 业界其他开源框架数据可参考:[ResNet-50 v1.5 for TensorFlow](https://github.com/NVIDIA/DeepLearningExamples/tree/master/TensorFlow/Classification/ConvNets/resnet50v1.5)。 - -### BERT - -| Network | Network Type | Dataset | MindSpore Version | Resource                 | Precision | Batch Size | Throughput | Speedup | -| --- | --- | --- | --- | --- | --- | --- | --- | --- | -| BERT-Large | Attention | zhwiki | 0.5.0-beta | Ascend: 1 * Ascend 910
CPU:24 Cores | Mixed | 96 | 269 sentences/sec | - | -| | | | | Ascend: 8 * Ascend 910
CPU:192 Cores | Mixed | 96 | 2069 sentences/sec | 0.96 | - -1. 以上数据基于华为云AI开发平台ModelArts测试获得,其中网络包含24个隐藏层,句长为128个token,字典表包含21128个token。 -2. 业界其他开源框架数据可参考:[BERT For TensorFlow](https://github.com/NVIDIA/DeepLearningExamples/tree/master/TensorFlow/LanguageModeling/BERT)。 - -### Wide & Deep (数据并行) - -| Network | Network Type | Dataset | MindSpore Version | Resource                 | Precision | Batch Size | Throughput | Speedup | -| --- | --- | --- | --- | --- | --- | --- | --- | --- | -| Wide & Deep | Recommend | Criteo | 0.6.0-beta | Ascend: 1 * Ascend 910
CPU:24 Cores | Mixed | 16000 | 796892 samples/sec | - | -| | | | | Ascend: 8 \* Ascend 910
CPU:192 Cores | Mixed | 16000*8 | 4872849 samples/sec | 0.76 | - -1. 以上数据基于Atlas 800测试获得,且网络模型为数据并行。 -2. 业界其他开源框架数据可参考:[Wide & Deep For TensorFlow](https://github.com/NVIDIA/DeepLearningExamples/tree/master/TensorFlow/Recommendation/WideAndDeep)。 - -### Wide & Deep (Host-Device混合计算模型并行) - -| Network | Network Type | Dataset | MindSpore Version | Resource                 | Precision | Batch Size | Throughput | Speedup | -| --- | --- | --- | --- | --- | --- | --- | --- | --- | -| Wide & Deep | Recommend | Criteo | 0.6.0-beta | Ascend: 1 * Ascend 910
CPU:24 Cores | Mixed | 8000 | 68715 samples/sec | - | -| | | | | Ascend: 8 \* Ascend 910
CPU:192 Cores | Mixed | 8000*8 | 283830 samples/sec | 0.51 | -| | | | | Ascend: 16 \* Ascend 910
CPU:384 Cores | Mixed | 8000*16 | 377848 samples/sec | 0.34 | -| | | | | Ascend: 32 \* Ascend 910
CPU:768 Cores | Mixed | 8000*32 | 433423 samples/sec | 0.20 | - -1. 以上数据基于Atlas 800测试获得,且网络模型为模型并行。 -2. 业界其他开源框架数据可参考:[Wide & Deep For TensorFlow](https://github.com/NVIDIA/DeepLearningExamples/tree/master/TensorFlow/Recommendation/WideAndDeep)。 diff --git a/docs/note/source_zh_cn/community.rst b/docs/note/source_zh_cn/community.rst deleted file mode 100644 index 97682d4ed419fc477d6c0fcd87988151d3cd8e6d..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/community.rst +++ /dev/null @@ -1,12 +0,0 @@ -如何参与社区 -=============== - -贡献代码 ------------ - -如何贡献代码,请参见链接 https://gitee.com/mindspore/mindspore/blob/master/CONTRIBUTING.md 。 - -贡献文档 ------------ - -如何贡献文档,请参见链接 https://gitee.com/mindspore/docs/blob/master/CONTRIBUTING_DOC_CN.md 。 \ No newline at end of file diff --git a/docs/note/source_zh_cn/conf.py b/docs/note/source_zh_cn/conf.py deleted file mode 100644 index 95d7701759707ab95a3c199cd8a22e2e2cc1194d..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/conf.py +++ /dev/null @@ -1,62 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os - - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx_markdown_tables', - 'recommonmark', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_search_language = 'zh' - -html_search_options = {'dict': '../../resource/jieba.txt'} - -html_static_path = ['_static'] \ No newline at end of file diff --git a/docs/note/source_zh_cn/design/mindarmour.rst b/docs/note/source_zh_cn/design/mindarmour.rst deleted file mode 100644 index d35c7aeab99e43e46d1baa9d57e9fb8fc3398c9c..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindarmour.rst +++ /dev/null @@ -1,8 +0,0 @@ -MindArmour设计 -============== - -.. toctree:: - :maxdepth: 1 - - mindarmour/differential_privacy_design - mindarmour/fuzzer_design \ No newline at end of file diff --git a/docs/note/source_zh_cn/design/mindarmour/differential_privacy_design.md b/docs/note/source_zh_cn/design/mindarmour/differential_privacy_design.md deleted file mode 100644 index 452748c6bc07e3587cd9edd418b55096e00fe246..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindarmour/differential_privacy_design.md +++ /dev/null @@ -1,66 +0,0 @@ -# 差分隐私 - -`Linux` `Ascend` `模型开发` `模型调优` `框架开发` `企业` `高级` `贡献者` - - - -- [差分隐私](#差分隐私) - - [总体设计](#总体设计) - - [差分隐私优化器](#差分隐私优化器) - - [差分隐私的噪声机制](#差分隐私的噪声机制) - - [Monitor](#monitor) - - [代码实现](#代码实现) - - [参考文献](#参考文献) - - - - - -## 总体设计 - -MindArmour的Differential-Privacy模块实现了差分隐私训练的能力。模型的训练主要由构建训练数据集、计算损失、计算梯度以及更新模型参数等过程组成,目前MindArmour的差分隐私训练主要着力于计算梯度的过程,通过相应的算法对梯度进行裁剪、加噪等处理,从而保护用户数据隐私。 - -![dp_arch](./images/dp_arch.png) - -
图1 差分隐私总体设计
- -图1是差分隐私训练的总体设计,主要由差分隐私噪声机制(DP Mechanisms)、差分隐私优化器(DP Optimizer)、差分隐私监控器(Privacy Monitor)组成。 - -### 差分隐私优化器 - -差分隐私优化器继承了MindSpore优化器的能力,并使用差分隐私的噪声机制对梯度加扰保护。目前,MindArmour提供三类差分隐私优化器:固定高斯优化器、自适应高斯优化器、自适应裁剪优化器,每类差分隐私优化器从不同的角度为SGD、Momentum等常规优化器增加差分隐私保护的能力。 - -- 固定高斯优化器,是一种非自适应高斯噪声的差分隐私优化器。其优势在于可以严格控制差分隐私预算ϵ,缺点是在模型训练过程中,每个Step添加的噪声量固定,若迭代次数过大,训练后期的噪声使得模型收敛困难,甚至导致性能大幅下跌,模型可用性差。 -- 自适应高斯优化器,通过自适应调整标准差,来调整高斯分布噪声的大小,在模型训练初期,添加的噪声量较大,随着模型逐渐收敛,噪声量逐渐减小,噪声对于模型可用性的影响减小。自适应高斯噪声的缺点是不能严格控制差分隐私预算。 -- 自适应裁剪优化器,是一种自适应调整调整裁剪粒度的差分隐私优化器,梯度裁剪是差分隐私训练的一个重要操作,自适应裁剪优化器能够自适应的控制梯度裁剪的比例在给定的范围波动,控制迭代训练过程中梯度裁剪的粒度。 - -### 差分隐私的噪声机制 - -噪声机制是构建差分隐私训练能力的基础,不同的噪声机制满足不同差分隐私优化器的需求,包括固定高斯分布噪声、自适应高斯分布噪声、自适应裁剪高斯分布噪声、拉普拉斯分布噪声等多种机制。 - -### Monitor - -Monitor提供RDP、ZCDP等回调函数,用于监测模型的差分隐私预算。 - -- ZCDP[1] - - ZCDP,zero-concentrated differential privacy,是一种宽松的差分隐私定义,利用Rényi散度来度量随机函数在相邻数据集上的分布差异。 - -- RDP[2] - - RDP,Rényi Differential Privacy,是一种更通用的基于R'enyi散度的差分隐私定义,利用Rényi散度来度量两个相邻数据集的分布差异。 - -相对于传统差分隐私,ZCDP和RDP都能能够提供更加严格的隐私预算上界保证。 - -## 代码实现 - -- [mechanisms.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/privacy/diff_privacy/mechanisms/mechanisms.py):这个文件实现了差分隐私训练所需的噪声生成机制,包括简单高斯噪声、自适应高斯噪声、自适应裁剪高斯噪声等。 -- [optimizer.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/privacy/diff_privacy/optimizer/optimizer.py):这个文件实现了使用噪声生成机制在反向传播时添加噪声的根本逻辑。 -- [monitor.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/privacy/diff_privacy/monitor/monitor.py):实现了计算差分隐私预算的回调函数,模型训练过程中,会反馈当前的差分隐私预算。 -- [model.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/privacy/diff_privacy/train/model.py):这个文件实现了计算损失和梯度的逻辑,差分隐私训练的梯度截断逻辑在此文件中实现,且model.py是用户使用差分隐私训练能力的入口。 - -## 参考文献 - -[1] Lee, Jaewoo, and Daniel Kifer. "Concentrated differentially private gradient descent with adaptive per-iteration privacy budget." *Proceedings of the 24th ACM SIGKDD International Conference on Knowledge Discovery & Data Mining*. 2018. - -[2] Mironov, Ilya. "Rényi differential privacy." *2017 IEEE 30th Computer Security Foundations Symposium (CSF)*. IEEE, 2017. diff --git a/docs/note/source_zh_cn/design/mindarmour/fuzzer_design.md b/docs/note/source_zh_cn/design/mindarmour/fuzzer_design.md deleted file mode 100644 index 30830f07020f9b6969a9d40c7a915b4e177946c5..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindarmour/fuzzer_design.md +++ /dev/null @@ -1,73 +0,0 @@ -# AI模型安全测试 - -`Linux` `Ascend` `GPU` `CPU` `数据准备` `模型开发` `模型训练` `模型调优` `企业` `高级` - - - -- [AI模型安全测试](#ai模型安全测试) - - [背景](#背景) - - [Fuzz Testing设计图](#fuzz-testing设计图) - - [Fuzz Testing流程](#fuzz-testing流程) - - [代码实现](#代码实现) - - [参考文献](#参考文献) - - - - - -## 背景 - -不同于[传统程序的Fuzz安全测试](https://zhuanlan.zhihu.com/p/43432370),MindArmour针对深度神经网络,提供AI模型安全测试模块fuzz_testing。根据神经网络的特点,引入神经元覆盖率[1]的概念,作为Fuzz的测试指导,引导Fuzz朝神经元覆盖率增加的方向生成样本,让输入能够激活更多的神经元,神经元值的分布范围更广,以充分测试DNN,探索不同类型的模型输出结果、模型错误行为。 - -## Fuzz Testing设计图 - -AI模型安全测试设计图如下。 - -![fuzz_architecture](./images/fuzz_architecture.png) - -在用户接口层,需要用户提供原始数据集`DataSet`、被测试模型`Model`和配置Fuzzer参数`Fuzzer configuration`。Fuzzer模块对模型和数据进行Fuzz测试后,返回安全评估报告`Security Report`。 - -Fuzz Testing架构主要包括三个模块: - -1. Natural Threat/Adversarial Example Generator(数据变异模块): - - 随机选择变异方法对种子数据变异生成多个变种。支持多种样本的变异策略, 包括: - - - 图像仿射变换方法如:平移、旋转、缩放、错切。 - - 基于图像像素值变化的方法如:改变对比度、亮度、模糊、加噪。 - - 基于对抗攻击的白盒、黑盒对抗样本生成方法,如FGSM、PGD、MDIIM。 - -2. Fuzzer moduler(变异指导模块): - - 对变异生成的数据进行fuzz测试,观察神经元覆盖率的变化情况,如果生成的数据使得神经元覆盖率增加,则加入变异的种子队列,用于下一轮的数据变异。目前支持的神经元覆盖率指标包括KMNC、NBC、SNAC[2]。 - -3. Evaluation(评估模块): - - 评估Fuzz Testing的效果,生成数据的质量,变异方法的强度。支持3个类型5种指标,包括通用评价指标:accuracy,神经元覆盖率指标:kmnc, nbc,snac,对抗攻击评价指标:attack_success_rate。 - -## Fuzz Testing流程 - -![fuzz_process](./images/fuzz_process.png) - -具体的Fuzz Testing流程如下: - -1. 根据策略从种子队列中选择一个种子A。 -2. 随机选择变异策略,对种子A进行变异,生成多个变种数据A1,A2... -3. 用目标模型对变种A1,A2...进行预测,如果变种的语意与种子保持一致,则进入Fuzzed Tests。 -4. 若目标模型对于变种的预测结果是正确的,用神经元覆盖率指标进行分析。 -5. 如果变种使得覆盖率增加,那么将该变种放入种子队列,用于下一轮变异。 - -通过多轮循环,我们获得一系列变异数据Fuzzed Tests,并进一步分析,从多个角度给出安全报告。可以用于深入分析神经网络模型的缺陷,从而针对这些缺陷,进行模型增强等,改善提升模型的通用性、鲁棒性。 - -## 代码实现 - -1. [fuzzing.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/fuzz_testing/fuzzing.py):Fuzzer总体流程。 -2. [model_coverage_metrics.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/fuzz_testing/model_coverage_metrics.py):神经元覆盖率指标,包括KMNC,NBC,SNAC。 -3. [image_transform.py](https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/fuzz_testing/image_transform.py):图像变异方法,包括基于像素值的变化方法和仿射变化方法。 -4. [adversarial attacks](https://gitee.com/mindspore/mindarmour/tree/master/mindarmour/adv_robustness/attacks):对抗样本攻击方法,包含多种黑盒、白盒攻击方法。 - -## 参考文献 - -[1] Pei K, Cao Y, Yang J, et al. Deepxplore: Automated whitebox testing of deep learning systems[C]//Proceedings of the 26th Symposium on Operating Systems Principles. ACM, 2017: 1-18. - -[2]Ma L, Juefei-Xu F, Zhang F, et al. Deepgauge: Multi-granularity testing criteria for deep learning systems[C]//Proceedings of the 33rd ACM/IEEE International Conference on Automated Software Engineering. ACM, 2018: 120-131. diff --git a/docs/note/source_zh_cn/design/mindarmour/images/dp_arch.png b/docs/note/source_zh_cn/design/mindarmour/images/dp_arch.png deleted file mode 100644 index 568f5792cb925a24b8263e0ed4ee8e5ac140088d..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindarmour/images/dp_arch.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindarmour/images/fuzz_architecture.png b/docs/note/source_zh_cn/design/mindarmour/images/fuzz_architecture.png deleted file mode 100644 index d4e8b89bd9a9f4844c59790f5b2114d1d477f927..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindarmour/images/fuzz_architecture.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindarmour/images/fuzz_process.png b/docs/note/source_zh_cn/design/mindarmour/images/fuzz_process.png deleted file mode 100644 index 2e04347f7cfb0819562578a6be1e91b5cc7ce9d5..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindarmour/images/fuzz_process.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindinsight.rst b/docs/note/source_zh_cn/design/mindinsight.rst deleted file mode 100644 index 242655ac2beb1953dfddcdcbb5a40d182a2721a0..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindinsight.rst +++ /dev/null @@ -1,9 +0,0 @@ -MindInsight设计 -=============== - -.. toctree:: - :maxdepth: 1 - - mindinsight/training_visual_design - mindinsight/graph_visual_design - mindinsight/tensor_visual_design \ No newline at end of file diff --git a/docs/note/source_zh_cn/design/mindinsight/graph_visual_design.md b/docs/note/source_zh_cn/design/mindinsight/graph_visual_design.md deleted file mode 100644 index e2eaffa4ab0d84afd5cf03067e4f3adf5d383e07..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindinsight/graph_visual_design.md +++ /dev/null @@ -1,74 +0,0 @@ -# 计算图可视设计 - -`Linux` `Ascend` `GPU` `CPU` `模型开发` `模型调优` `框架开发` `中级` `高级` `贡献者` - - - -- [计算图可视设计](#计算图可视设计) - - [特性背景](#特性背景) - - [总体设计](#总体设计) - - [概念设计](#概念设计) - - [后端设计](#后端设计) - - [前端设计](#前端设计) - - [接口设计](#接口设计) - - [文件接口设计](#文件接口设计) - - - - - -## 特性背景 - -计算图可视的功能,主要协助开发者在下面这些场景中使用。 - -- 开发者在编写深度学习神经网络的代码时,可以使用计算图的功能查看神经网络中算子的数据流走向,以及模型结构。 -- 计算图还可以方便开发者查看指定节点的输入和输出节点,以及所查找的节点的属性信息。 -- 开发者在调试网络时,可以通过可视化的计算图,轻易跟踪数据,包括数据维度、类型的变更等。 - -## 总体设计 - -### 概念设计 - - |概念|说明| - |--|--| - |根节点、父节点、子节点|我们根据算子名称中的斜线,对节点划分层级。比如节点A 'Network' 和节点B 'Network/Conv2D',我们称节点A为根节点,称节点B为节点A的子节点,同时节点A也是节点B的父节点。| - |作用域|每个节点都存在一个作用域,子节点的作用域即为父节点的节点名称,比如算子节点A 'Network/Conv2D',它的作用域为 'Network',即父节点 'Network' 的名称。而根节点的作用域为空字符串。| - |算子节点|节点类型。从保存了计算图的文件中解析出来的原始节点,其对应神经网络代码中一个操作算子,比如Add操作算子。| - |常量节点|节点类型。表明算子的常量输入。从保存了计算图的文件中解析出来的常量,并根据其他节点的输入,决定它的作用域,比如常量A,原始名称为 'Const1',由于算子节点B 'Network/Conv2D' 有一个输入是常量A,则复制一个常量A,并将其名称命名为 'Network/Const1',使其作用域与算子节点B一样。| - |参数节点|节点类型。表明算子的参数输入。| - |命名空间|节点类型,也是作用域类型。以算子节点名字中的斜线(/)进行分割而得到的一种节点类型。比如存在一个名字为 'Network/Conv2D' 的节点 A,根据斜线分割,可以产生一个命名空间节点B,名称为 'Network',在图中展示时,A是B的一个子节点,A的作用域即为B的名称。展开B节点后,才可以看到A节点。| - |聚合节点|节点类型,也是作用域类型。在同一个作用域下,当同一种类型的节点过多时,我们会新建一个聚合节点,用来代替这些类型的节点,而这些类型的节点则作为该聚合节点的子节点折叠起来。| - |代理节点|节点类型。为了优化图中的连线,当节点A与节点B之间的连线过于曲折,我们会在A的旁边新建一个能够代理表示B的节点C,并连线A和C,表明A的数据流向B,而避免了直接连线A和B,导致布局过乱。| - |数据边|连线类型。表明数据的流向,用带箭头的实线表示。比如A->B,表明A有数据流向B。| - |控制边|连线类型。表明算子节点之间执行的依赖关系,用带箭头的虚线表示。比如A-->B,表明A先执行,再执行B。| - |独立布局|在一些连线比较复杂的场景下,我们将某个节点从原来的连线中提出来,避免其他节点与它相连,相对的在其他节点新建代理节点,使节点与代理节点相连,达到简化连线的关系。比如将参数类型的节点进行聚合,简化了参数节点与其他节点的连线关系。| - -### 后端设计 - -后端的类图如下,主要分为Graph基类和Node两个类,其中MsGraph是继承了Graph基类,用于解析MindSpore ANF的计算图文件。Node类聚合成一张图,与Graph为聚合关系。 - -![类图设计](./images/graph_visual_class_design.png) - -### 前端设计 - -数据将通过Web UI进行绘图并展示,前端采用d3-graph-viz 3.x插件进行绘图辅助。 - -![输入图片说明](./images/graph_visual_right_side.png) -图1:辅助功能 - -如图1所示,提供选择文件、查询节点、查看节点信息、节点输入输出等辅助功能。 - -![计算图主体展示](./images/graph_visual_main.png) -图2:计算图主体展示 - -计算图中,根据斜线(/)对节点的名称划分层次,并逐层展示,参考图2计算图主体展示。双击一个作用域节点后,将会展示它的子节点。 - -### 接口设计 - -计算图中,主要有文件接口和RESTful API接口,其中文件接口为`summary.proto`文件,是MindInsight和MindSpore进行数据对接的接口。 -RESTful API接口是MindInsight前后端进行数据交互的接口。 - -#### 文件接口设计 - -MindSpore与MindInsight之间的数据交互,采用[protobuf](https://developers.google.cn/protocol-buffers/docs/pythontutorial?hl=zh-cn)定义数据格式。 -[summary.proto文件](https://gitee.com/mindspore/mindinsight/blob/master/mindinsight/datavisual/proto_files/mindinsight_summary.proto)为总入口,计算图的消息对象定义为 `GraphProto`。`GraphProto`的详细定义可以参考[anf_ir.proto文件](https://gitee.com/mindspore/mindinsight/blob/master/mindinsight/datavisual/proto_files/mindinsight_anf_ir.proto)。 diff --git a/docs/note/source_zh_cn/design/mindinsight/images/graph_visual_class_design.png b/docs/note/source_zh_cn/design/mindinsight/images/graph_visual_class_design.png deleted file mode 100644 index 0a6aec0b2597cd0554c575dd11c2cddd9a7d3fcf..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindinsight/images/graph_visual_class_design.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindinsight/images/graph_visual_main.png b/docs/note/source_zh_cn/design/mindinsight/images/graph_visual_main.png deleted file mode 100644 index 0bc13636b5c84952978469c652c38500e6d34f43..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindinsight/images/graph_visual_main.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindinsight/images/graph_visual_right_side.png b/docs/note/source_zh_cn/design/mindinsight/images/graph_visual_right_side.png deleted file mode 100644 index 1cfab2911877ed6a51097f0e7bac880479143e26..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindinsight/images/graph_visual_right_side.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindinsight/images/tensor_histogram.png b/docs/note/source_zh_cn/design/mindinsight/images/tensor_histogram.png deleted file mode 100644 index 4d3ca16b63261eca5e8318cb47ec4050539eca51..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindinsight/images/tensor_histogram.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindinsight/images/tensor_table.png b/docs/note/source_zh_cn/design/mindinsight/images/tensor_table.png deleted file mode 100644 index d04dffae59fd6f9e49aede94bae93f8b8621fcb0..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindinsight/images/tensor_table.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindinsight/images/training_visualization_architecture.png b/docs/note/source_zh_cn/design/mindinsight/images/training_visualization_architecture.png deleted file mode 100644 index bc294a2d30d54f041f77d3eba207f7d9c6721558..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindinsight/images/training_visualization_architecture.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindinsight/images/training_visualization_data_flow.png b/docs/note/source_zh_cn/design/mindinsight/images/training_visualization_data_flow.png deleted file mode 100644 index 1c406ca003cbb753aced7597bbcea5d25fb32343..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindinsight/images/training_visualization_data_flow.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindinsight/images/training_visualization_data_model.png b/docs/note/source_zh_cn/design/mindinsight/images/training_visualization_data_model.png deleted file mode 100644 index 4e3089d1ae6723fa1753e302e544256179852096..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindinsight/images/training_visualization_data_model.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindinsight/images/training_visualization_data_model_zh.pptx b/docs/note/source_zh_cn/design/mindinsight/images/training_visualization_data_model_zh.pptx deleted file mode 100644 index 3bf9f8df51fca74352c708aea8638f0e5309831a..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindinsight/images/training_visualization_data_model_zh.pptx and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindinsight/tensor_visual_design.md b/docs/note/source_zh_cn/design/mindinsight/tensor_visual_design.md deleted file mode 100644 index 0ec82f6c4b52d34c4fe5484359c2c55d3b578ade..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindinsight/tensor_visual_design.md +++ /dev/null @@ -1,105 +0,0 @@ -# 张量可视设计 - -`Linux` `Ascend` `GPU` `CPU` `模型开发` `模型调优` `框架开发` `中级` `高级` `贡献者` - - - -- [张量可视设计](#张量可视设计) - - [特性背景](#特性背景) - - [总体设计](#总体设计) - - [后端设计](#后端设计) - - [前端设计](#前端设计) - - [接口设计](#接口设计) - - [文件接口设计](#文件接口设计) - - - - - -## 特性背景 - -张量可视能够帮助用户直观查看训练过程中的Tensor值,既支持以直方图的形式呈现Tensor的变化趋势,也支持查看某次step的具体Tensor值。Tensor包括权重值、梯度值、激活值等。 - -## 总体设计 - -Tensor可视主要是解析由MindSpore的`TensorSummary`算子记录的Tensor数据生成的Summary文件,并把结果返回给前端展示。 - -MindInsight解析时会遵循proto文件(Google Protocol Buffer,是一种高效便捷的结构化数据存储方式)来解析Tensor数据,然后把数据缓存起来,在前端查询特定数据时将其返回供前端展示。 - -Tensor可视支持1-N维的Tensor以表格或直方图的形式展示,对于0维的Tensor,需要通过`ScalarSummary`来记录并在标量可视中展示。 - -在表格视图中,可以查询当前缓存中特定step的Tensor数据,后台通过切片操作使得用户单次可以查询任意0-2维的Tensor数据。 - -在直方图视图中,可以查询当前缓存中所有step的直方图数据。 - -### 后端设计 - -张量可视相关的类主要有`TensorContainer`、`Histogram`以及`TensorProcessor`类,其中`TensorContainer`用于保存Tensor的具体值、维度、数据类型、最大值、最小值、直方图等信息,这里的直方图引用了`Histogram`的数据。`Histogram`用于处理直方图相关的信息,包括保存桶个数,归一化缓存中所有step的直方图数据等。`TensorProcessor`用于处理与Tensor相关的HTTP请求,包括获取当前缓存中特定训练作业,特定tag有多少个step,每个step的Tensor统计信息,特定step的特定维度的Tensor数据(单次支持查询最多某两维的数据)以及特定tag的直方图数据。 - -### 前端设计 - -![tensor_table.png](./images/tensor_table.png) - -图1:表格展示 - -图1将用户所记录的张量以表格的形式展示,包含以下功能: - -- 表格中白色方框显示当前展示的是哪个维度下的张量数据,其中冒号`:`表示当前维度索引范围,和Python索引含义基本一致,不指定具体索引表示当前维度所有值,`2:5`表示索引2到5(不包括5)的值,可以在方框输入对应的索引或者含有`:`的索引范围来查询特定维度的张量数据。 -- 拖拽表格下方的空心圆圈可以查询特定步骤的张量数据。 - -![tensor_histogram.png](./images/tensor_histogram.png) - -图2:直方图展示 - -图2将用户所记录的张量以直方图的形式进行展示。 - -### 接口设计 - -在张量可视中,主要有文件接口和RESTful API接口,其中文件接口为[summary.proto](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/utils/summary.proto)文件,是MindInsight和MindSpore进行数据对接的接口。 RESTful API接口是MindInsight前后端进行数据交互的接口,是内部接口。 - -#### 文件接口设计 - -`summary.proto`文件为总入口,其中张量的数据(TensorProto)存放在Summary的Value中,如下所示: - -```protobuf -{ - message Summary { - message Image { - // Dimensions of the image. - required int32 height = 1; - required int32 width = 2; - ... - } - - message Histogram { - message bucket{ - // Counting number of values fallen in [left, left + width). - // For the rightmost bucket, the range is [left, left + width]. - required double left = 1; - required double width = 2; - required int64 count = 3; - } - - repeated bucket buckets = 1; - ... - } - - message Value { - // Tag name for the data. - required string tag = 1; - - // Value associated with the tag. - oneof value { - float scalar_value = 3; - Image image = 4; - TensorProto tensor = 8; - Histogram histogram = 9; - } - } - - // Set of values for the summary. - repeated Value value = 1; -} -``` - -而TensorProto的定义在[anf_ir.proto](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/utils/anf_ir.proto)文件中。 diff --git a/docs/note/source_zh_cn/design/mindinsight/training_visual_design.md b/docs/note/source_zh_cn/design/mindinsight/training_visual_design.md deleted file mode 100644 index c7bf75d115654144137351e93d801ad18095e6ea..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindinsight/training_visual_design.md +++ /dev/null @@ -1,132 +0,0 @@ -# 训练可视总体设计 - -`Linux` `Ascend` `GPU` `CPU` `模型开发` `模型调优` `框架开发` `中级` `高级` `贡献者` - - - -- [训练可视总体设计](#训练可视总体设计) - - [训练可视逻辑架构](#训练可视逻辑架构) - - [训练信息收集架构](#训练信息收集架构) - - [训练信息分析及展示架构](#训练信息分析及展示架构) - - [代码组织](#代码组织) - - [训练可视数据模型](#训练可视数据模型) - - [训练信息数据流](#训练信息数据流) - - [数据模型](#数据模型) - - [训练作业](#训练作业) - - [溯源数据](#溯源数据) - - [训练过程数据](#训练过程数据) - - - - - -[MindInsight](https://gitee.com/mindspore/mindinsight)是MindSpore的可视化调试调优组件。通过MindInsight可以完成训练可视、性能调优、精度调优等任务。 - -训练可视功能主要包括训练看板、模型溯源、数据溯源等功能,训练看板中又包括标量、参数分布图、计算图、数据图、数据抽样、张量等子功能。 - -本文主要介绍MindInsight训练可视功能的逻辑架构、代码组织和数据模型。 - -## 训练可视逻辑架构 - -在架构上,训练可视功能的逻辑架构分为两部分:训练信息收集架构,训练信息分析及展示架构。 - -![MindInsight训练可视逻辑架构](./images/training_visualization_architecture.png) - -图1 MindInsight训练可视逻辑架构 - -### 训练信息收集架构 - -训练信息收集功能在MindSpore中,包括训练信息收集API模块和训练信息持久化模块。 - -训练信息收集API包括: - -- 基于summary算子的训练信息收集API。这部分API主要包括4个summary算子,即用于记录标量数据的ScalarSummary算子,用于记录图片数据的ImageSummary算子,用于记录参数分布图(直方图)数据的HistogramSummary算子和用于记录张量数据的TensorSummary算子。请访问[算子支持列表](https://www.mindspore.cn/doc/note/zh-CN/master/operator_list.html)以获取关于这些算子的信息。 - -- 基于Python API的训练信息收集API。通过[SummaryRecord.add_value](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.train.html#mindspore.train.summary.SummaryRecord.add_value)方法,可以在Python代码中完成训练信息的收集。 - -- 易用的训练信息收集callback。通过[SummaryCollector](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.train.html#mindspore.train.callback.SummaryCollector)这一callback可以方便地收集常用训练信息到训练日志中。 - -训练信息持久化模块主要包括用于管理缓存的summary_record模块和用于并行处理数据、写入文件的write_pool模块。训练信息持久化后,存储在训练日志文件(summary文件中)。 - -### 训练信息分析及展示架构 - -训练信息分析及展示架构在MindInsight中,包括Web UI和后端两大部分。后端从下到上可以分为数据加载及缓存层、业务逻辑层、API 层。数据加载及缓存层主要由训练日志文件发现模块、训练日志文件解析模块及缓存管理模块组成。业务逻辑层主要由训练看板业务模块和溯源业务模块组成。API层主要由RESTful API模块组成。各模块的主要功能如下: - -- 训练日志文件发现模块:用于在给定的训练日志根目录(summary-base-dir)中扫描并发现含有训练日志文件的训练日志目录。只有含有训练日志文件的目录会被识别为训练日志目录。 - -- 训练日志文件解析模块:用于解析训练日志文件。 - -- 缓存管理模块:用于管理训练日志解析任务,缓存训练日志解析结果。其会定期调用训练日志发现模块,扫描最新的训练日志目录列表;然后调用解析模块解析文件内容,将解析结果存储在缓存中以供UI查询。 - -- 训练看板模块:用于提供训练看板功能的业务逻辑,支撑UI查询训练看板数据。 - -- 溯源模块:用于提供模型溯源和数据溯源的业务逻辑,支撑UI查询溯源数据。 - -- RESTful API模块:用于将业务模块提供的接口包装为RESTful API。 - -## 代码组织 - -以下是MindInsight代码仓库中的部分重要目录及说明。 - -|一级目录|二级目录|三级目录|说明| -|---|---|---|---| -|build|||编译、构建相关代码。| -|mindinsight||| -||backend||RESTful API。| -|||datavisual|训练看板相关RESTful API。| -|||lineagemgr|溯源相关RESTful API。| -||datavisual||训练看板模块。当前数据加载及缓存层的代码也在此模块中。| -|||data_transform|数据加载及缓存层。| -||lineagemgr||溯源模块。| -||ui||MindInsight Web UI。| -|tests|||测试用例目录。| - -## 训练可视数据模型 - -### 训练信息数据流 - -训练信息产生于用户训练的过程中。用户可以通过训练信息收集API将这些训练信息收集起来,并通过训练信息持久化模块将这些训练信息保存到磁盘上,产生训练日志文件(summary文件)。训练日志文件生成后,便可以使用MindInsight对其中的信息进行可视化。 - -![训练信息数据流](./images/training_visualization_data_flow.png) - -图2 训练信息数据流 - -### 数据模型 - -MindInsight的简要数据模型如图3所示。一个训练日志目录会被MindInsight识别为一个训练作业。训练作业是MindInsight的最小管理单元。一个训练作业可以关联0-1个溯源数据,关联0-1个训练过程数据。训练过程数据内部有着丰富的结构,每一个具体的数据,可以通过给定的插件名称、标签和迭代唯一确定。下面将分别介绍这些概念。 - -![数据模型](./images/training_visualization_data_model.png) - -图3 以UML类图表示的数据模型 - -#### 训练作业 - -MindInsight通过目录来区分不同的训练作业。为了方便用户区分不同训练作业的训练日志文件,`SummaryCollector`、`SummaryRecord`都要求用户指定存放训练日志文件的目录。相同目录中的训练日志文件会被认为是同一次训练作业中产生的训练数据,不同目录中的训练日志文件会被认为是不同训练作业中产生的文件。 - -在MindInsight的代码中,一次训练一般被称为一个TrainJob。TrainJob的id即该次训练的日志所在目录的目录名(例如 ./train_my_lenet_1)。 - -一次训练的过程中,一般会产生该次训练的溯源数据文件(文件名以_lineage结尾)和训练过程数据文件(文件名一般以_MS结尾)。其中溯源数据主要从全局出发描述该次训练的不变性质,例如训练所用的数据集路径、训练所用的优化器、以及用户自定义的溯源信息。这些信息最突出的特点是不会在训练过程中变化。而训练过程数据主要描述该次训练的变化情况,例如loss值、参数分布、一个迭代中送入模型的图片数据等。这些信息最突出的特点是每个迭代都会发生变化。 - -需要注意的是,关于训练信息是否发生变化的分类并不是绝对的。比如训练过程数据文件中会含有计算图数据,其一般在训练开始的时候就确定了。 - -#### 溯源数据 - -溯源数据主要从全局出发描述某次训练的不变性质。当MindInsight识别到多个训练日志目录时,这若干次训练的溯源数据会被组织成表格的形式展示,以方便对比和分析。 - -#### 训练过程数据 - -- 插件名称(plugin_name) - - 对于训练过程数据,我们首先将这些训练数据按类型分为标量数据(scalar)、直方图数据(histogram)、图片数据(image)、张量数据(tensor)等类型,这些类型在MindInsight的中被称为插件名称(plugin_name),当前mindinsight支持的插件名称定义在`mindinsight.datavisual.common.enums.PluginNameEnum`中。 - -- 标签(tag) - - 无论数据属于何种类型,其都会依照tag进一步被分为不同的序列。tag一般由用户命名,用于对数据进行区分。比如记录loss值的标量数据,其tag名可以为loss。需要说明的是,MindInsight在对数据进行处理时,会根据插件名称自动为tag附加后缀。例如tag为loss的数据为标量数据,则该tag会被自动重命名为loss/scalar。 - -- 迭代数(step) - - 训练过程数据是在训练的每个迭代中产生的,为了区分这些数据,数据会被标记上该数据所对应的迭代数。 - -- 数据的查询和展示 - - 在展示数据时,用户常常希望看到某个标签下的数据随着训练过程的变化情况。因此,查询数据时,一般不会指定迭代数,而是直接指定训练作业、插件名称和标签,查询该标签下所有迭代的数据。 diff --git a/docs/note/source_zh_cn/design/mindspore.rst b/docs/note/source_zh_cn/design/mindspore.rst deleted file mode 100644 index ccb0c24c4482c3a28bda79d7d2da62b337788208..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindspore.rst +++ /dev/null @@ -1,10 +0,0 @@ -MindSpore设计 -============== - -.. toctree:: - :maxdepth: 1 - - mindspore/mindir - mindspore/distributed_training_design - mindspore/profiler_design - diff --git a/docs/note/source_zh_cn/design/mindspore/architecture.md b/docs/note/source_zh_cn/design/mindspore/architecture.md deleted file mode 100644 index 4bc94fd2c7d90cbabe68c139ff8238bac5c30e3e..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindspore/architecture.md +++ /dev/null @@ -1,16 +0,0 @@ -# MindSpore总体架构 - -`Linux` `Windows` `Ascend` `GPU` `CPU` `端侧` `模型开发` `模型调优` `框架开发` `中级` `高级` `贡献者` - - - -MindSpore是一个全场景深度学习框架,旨在实现易开发、高效执行、全场景覆盖三大目标,其中易开发表现为API友好、调试难度低,高效执行包括计算效率、数据预处理效率和分布式训练效率,全场景则指框架同时支持云、边缘以及端侧场景。 - -MindSpore总体架构如下图所示,下面介绍主要的扩展层(MindSpore Extend)、前端表达层(MindExpress,ME)、编译优化层(MindCompiler)和全场景运行时(MindRE)四个部分。 - -- **MindSpore Extend(扩展层)**:MindSpore的扩展包,期待更多开发者来一起贡献和构建。 -- **MindExpress(表达层)**:基于Python的前端表达,未来计划陆续提供C/C++、Java等不同的前端;MindSpore也在考虑支持华为自研编程语言前端-仓颉,目前还处于预研阶段;同时也在做与Julia等第三方前端的对接工作,引入更多的第三方生态。 -- **MindCompiler(编译优化层)**:图层的核心编译器,主要基于端云统一的MindIR实现三大功能,包括硬件无关的优化(类型推导、自动微分、表达式化简等)、硬件相关优化(自动并行、内存优化、图算融合、流水线执行等)、部署推理相关的优化(量化、剪枝等);其中,MindAKG是MindSpore的自动算子生成编译器,目前还在持续完善中。 -- **MindRE(全场景运行时)**:这里含云侧、端侧以及更小的IoT。 - -![MindSpore](images/architecture.png) diff --git a/docs/note/source_zh_cn/design/mindspore/distributed_training_design.md b/docs/note/source_zh_cn/design/mindspore/distributed_training_design.md deleted file mode 100644 index d98d99bc186b40a1866a0fb3c46ccffbbe2d810f..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindspore/distributed_training_design.md +++ /dev/null @@ -1,139 +0,0 @@ -# 分布式训练设计 - -`Linux` `Ascend` `GPU` `模型开发` `模型调优` `框架开发` `中级` `高级` `贡献者` - - - -- [分布式训练设计](#分布式训练设计) - - [背景](#背景) - - [概念](#概念) - - [集合通信](#集合通信) - - [同步模式](#同步模式) - - [数据并行](#数据并行) - - [数据并行原理](#数据并行原理) - - [数据并行代码](#数据并行代码) - - [自动并行](#自动并行) - - [自动并行原理](#自动并行原理) - - [自动并行代码](#自动并行代码) - - - - - -## 背景 - -随着深度学习的快步发展,为了提升神经网络的精度和泛化能力,数据集和参数量都在呈指数级向上攀升。分布式并行训练成为一种解决超大规模网络性能瓶颈的发展趋势。MindSpore支持了当前主流的分布式训练范式并开发了一套自动混合并行解决方案。本篇设计文档将会集中介绍几种并行训练方式的设计原理,同时指导用户进行自定义开发。 - -## 概念 - -### 集合通信 - -集合通信指在一组进程间通信,组内所有进程满足一定规则的发送和接收数据。MindSpore通过集合通信的方式进行并行训练过程中的数据传输工作,在Ascend芯片上它依赖于华为集合通信库`HCCL`完成,在GPU上它依赖于英伟达集合通信库`NCCL`完成。 - -### 同步模式 - -在同步模式下,所有的设备同时开始训练,并且当反向传播算法完成之后同步更新参数的取值。MindSpore目前采用的是同步训练模式。 - -## 数据并行 - -这个小节介绍了在MindSpore中`ParallelMode.DATA_PARALLEL`数据并行模式是如何工作的。 - -### 数据并行原理 - -![数据并行图解](./images/data_parallel.png) - -1. 环境依赖 - - 每次开始进行并行训练前,通过调用`mindspore.communication.init`接口初始化通信资源,并自动创建全局通信组`WORLD_COMM_GROUP`。 - -2. 数据分发(Data distribution) - - 数据并行的核心在于将数据集在样本维度拆分并下发到不同的卡上。在`mindspore.dataset`模块提供的所有数据集加载接口中都有`num_shards`和`shard_id`两个参数,它们用于将数据集拆分为多份并循环采样的方式,采集`batch`大小的数据到各自的卡上,当出现数据量不足的情况时将会从头开始采样。 - -3. 网络构图 - - 数据并行网络的书写方式与单机网络没有差别,这是因为在正反向传播(Forward propagation & Backward Propagation)过程中各卡的模型间是独立执行的,只是保持了相同的网络结构。唯一需要特别注意的是为了保证各卡间训练同步,相应的网络参数初始化值应当是一致的,在`DATA_PRALLEL`和`HYBRID_PARALLEL`模式下建议通过使能`parameter_broadcast`达到权重广播的目的;在`AUTO_PRALLEL`和`SEMI_AUTO_PARALLEL`模式下,框架内部会自动分析参数的并行度,并设置相应的随机数种子,保证在数据并行维度的设备上参数初始化值一致。 - -4. 梯度聚合(Gradient aggregation) - - 数据并行理论上应该实现和单机一致的训练效果,为了保证计算逻辑的一致性,在梯度计算完成后插入`AllReduce`算子实现各卡间的梯度聚合操作。这里我们设置了`mean`开关,用户可以选择是否要对求和后的梯度值进行求平均操作,也可以将其视为超参项,打开开关等价于学习率倍数缩小。 - -5. 参数更新(Parameter update) - - 因为引入了梯度聚合操作,所以各卡的模型会以相同的梯度值一起进入参数更新步骤。因此MindSpore实现的是一种同步数据并行训练方式。理论上最终每卡训练出来的模型是相同的,如果网络中含有在样本维度的归约类型操作,网络的输出可能会有所差别,这是由数据并行的切分性质决定的。 - -### 数据并行代码 - -1. 集合通信 - - - [management.py](https://gitee.com/mindspore/mindspore/blob/master/mindspore/communication/management.py):这个文件中涵盖了集合通信过程中常用的`helper`函数接口,例如获取集群数量和卡的序号等。当在Ascend芯片上执行时,框架会加载环境上的`libhccl.so`库文件,通过它来完成从Python层到底层的通信接口调用。 - - [comm_ops.py](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ops/operations/comm_ops.py):MindSpore将支持的集合通信操作都封装为算子的形式放在这个文件下,包括`AllReduce`、`AllGather`、`ReduceScatter`和`Broadcast`等。`PrimitiveWithInfer`中除了定义算子所需属性外,还包括构图过程中输入到输出的`shape`和`dtype`推导。 - -2. 梯度聚合 - - - [grad_reducer.py](https://gitee.com/mindspore/mindspore/blob/master/mindspore/nn/wrap/grad_reducer.py):这个文件实现了梯度聚合的过程。对入参`grads`用`HyperMap`展开后插入`AllReduce`算子,这里采用的是全局通信组,用户也可以根据自己网络的需求仿照这个模块进行自定义开发。MindSpore中单机和分布式执行共用一套网络封装接口,在`Cell`内部通过`ParallelMode`来区分是否要对梯度做聚合操作,网络封装接口建议参考`TrainOneStepCell`代码实现。 - -## 自动并行 - -自动并行作为MindSpore的关键特性,用于实现自动的数据并行加模型并行的混合并行训练方式,旨在帮助用户以单机的脚本表达并行算法逻辑,降低分布式训练难度,提高算法研发效率,同时又能保持训练的高性能。这个小节介绍了在MindSpore中`ParallelMode.AUTO_PARALLEL`自动并行模式及`ParallelMode.SEMI_AUTO_PARALLEL`半自动并行模式是如何工作的。 - -### 自动并行原理 - -![自动并行图解](./images/auto_parallel.png) - -1. 分布式算子和张量排布模型 - - 在上面的架构图中,自动并行流程会对单机的正向计算图(ANF Graph)进行遍历,以分布式算子(Distributed Operator)为单位对张量进行切分建模,表示一个算子的输入输出张量如何分布到集群各个卡上(Tensor Layout)。这种模型充分地表达了张量和设备间的映射关系,用户无需感知模型各切片放到哪个设备上运行,框架会自动调度分配。 - - 为了得到张量的排布模型,每个算子都具有切分策略(Shard Strategy),它表示算子的各个输入在相应维度的切分情况。通常情况下只要满足以2为基、均匀分配的原则,张量的任意维度均可切分。以下图为例,这是一个三维矩阵乘(BatchMatMul)操作,它的切分策略由两个元组构成,分别表示`input`和`weight`的切分形式。其中元组中的元素与张量维度一一对应,`2^N`为切分份数,`1`表示不切。当我们想表示一个数据并行切分策略时,即`input`的`batch`维度切分,其他维度不切,可以表达为`strategy=((2^N, 1, 1),(1, 1, 1))`;当表示一个模型并行切分策略时,即`weight`的非`batch`维度切分,这里以`channel`维度切分为例,其他维度不切,可以表达为`strategy=((1, 1, 1),(1, 1, 2^N))`;当表示一个混合并行切分策略时,其中一种切分策略为`strategy=((2^N, 1, 1),(1, 1, 2^N))`。 - - ![算子切分定义](./images/operator_split.png) - - 依据切分策略,分布式算子中定义了推导算子输入张量和输出张量的排布模型的方法。这个排布模型由`device_matrix`,`tensor_shape`和`tensor map`组成,分别表示设备矩阵形状、张量形状、设备和张量维度间的映射关系。分布式算子会进一步根据张量排布模型判断是否要在图中中插入额外的计算、通信操作,以保证算子运算逻辑正确。 - -2. 张量排布变换 - - 当前一个算子的输出张量模型和后一个算子的输入张量模型不一致时,就需要引入计算、通信操作的方式实现张量排布间的变化。自动并行流程引入了张量重排布算法(Tensor Redistribution),可以推导得到任意排布的张量间通信转换方式。下面三个样例表示公式`Z=(X×W)×V`的并行计算过程, 即两个二维矩阵乘操作,体现了不同并行方式间如何转换。 - 在样例一中,第一个数据并行矩阵乘的输出在行方向上存在切分,而第二个模型并行矩阵乘的输入需要全量张量,框架将会自动插入`AllGather`算子实现排布变换。 - - ![张量排布变换](./images/tensor_redistribution1.png) - - 在样例二中,第一个模型并行矩阵乘的输出在列方向上存在切分,而第二个数据并行矩阵乘的输入在行方向上存在切分,框架将会自动插入等价于集合通信中`AlltoAll`操作的通信算子实现排布变换。 - - ![张量排布变换](./images/tensor_redistribution2.png) - - 在样例三中,第一个混合并行矩阵乘的输出切分方式和第二个混合并行矩阵乘的输入切分方式一致,所以不需要引入重排布变换。但由于第二个矩阵乘操作中,两个输入的相关维度存在切分,所以需要插入`AllReduce`算子保证运算正确性。 - - ![张量排布变换](./images/tensor_redistribution3.png) - - 综上,1、2两点是自动并行实现的基础,总体来说这种分布式表达打破了数据并行和模型并行的边界,轻松实现混合并行。从脚本层面上,用户仅需构造单机网络,即可表达并行算法逻辑,框架将自动实现对整图切分。 - -3. 切分策略搜索算法 - - 当用户熟悉了算子的切分表达,并手动对算子配置切分策略,这就是`SEMI_AUTO_PARALLEL`半自动并行模式。这种方式对手动调优有帮助,但还是具有一定的调试难度,用户需要掌握并行原理,并根据网络结构、集群拓扑等计算分析得到高性能的并行方案。为了进一步帮助用户加速并行网络训练过程,在半自动并行模式的基础上,`AUTO_PARALLEL`自动并行模式引入了并行切分策略自动搜索的特性。自动并行围绕硬件平台构建相应的代价函数模型(Cost Model),计算出一定数据量、一定算子在不同切分策略下的计算开销(Computation Cost),内存开销(Memory Cost)及通信开销(Communication Cost)。然后通过动态规划算法(Dynamic Programming)或者递归规划算法(Recursive Programming),以单卡的内存上限为约束条件,高效地搜索出性能较优的切分策略。 - - 策略搜索这一步骤代替了用户手动指定模型切分,在短时间内可以得到较高性能的切分方案,极大降低了并行训练的使用门槛。 - -4. 分布式自动微分 - - 传统的手动模型切分除了需要关注正向网络通信还需要考虑网络反向的并行运算,MindSpore通过将通信操作包装为算子,并利用框架原有的自动微分操作自动生成通信算子反向,所以即便在进行分布式训练时,用户同样只需关注网络的前向传播,真正实现训练的全自动并行。 - -### 自动并行代码 - -1. 张量排布模型 - - [tensor_layout](https://gitee.com/mindspore/mindspore/tree/master/mindspore/ccsrc/frontend/parallel/tensor_layout):这个目录下包含了张量排布模型相关功能的定义及实现。其中`tensor_layout.h`中声明了一个张量排布模型需要具备的成员变量`tensor_map_origin_`,`tensor_shape_`和`device_arrangement_`等。在`tensor_redistribution.h`中声明了实现张量排布间`from_origin_`和`to_origin_`变换的相关方法,将推导得到的重排布操作保存在`operator_list_`中返回,并计算得到重排布所需的通信开销`comm_cost_`, 内存开销`memory_cost_`及计算开销`computation_cost_`。 - -2. 分布式算子 - - [ops_info](https://gitee.com/mindspore/mindspore/tree/master/mindspore/ccsrc/frontend/parallel/ops_info):这个目录下包含了分布式算子的具体实现。在`operator_info.h`中定义了分布式算子实现的基类`OperatorInfo`,开发一个分布式算子需要继承于这个基类并显式实现相关的虚函数。其中`InferTensorInfo`,`InferTensorMap`和`InferDevMatrixShape`函数定义了推导该算子输入、输出张量排布模型的算法。`InferForwardCommunication`,`InferMirrorOps`等函数定义了切分该算子需要插入的额外计算、通信操作。`CheckStrategy`和`GenerateStrategies`函数定义了算子切分策略校验和生成。根据切分策略`SetCostUnderStrategy`将会产生该策略下分布式算子的并行开销值`operator_cost_`。 - -3. 策略搜索算法 - - [auto_parallel](https://gitee.com/mindspore/mindspore/tree/master/mindspore/ccsrc/frontend/parallel/auto_parallel):这个目录下实现了切分策略搜索的算法。`graph_costmodel.h`定义了构图信息,其中每个点表示一个算子`OperatorInfo`,有向边`edge_costmodel.h`表示算子的输入输出关系及重排布的代价。`operator_costmodel.h`中定义了每个算子的代价模型,包括计算代价、通信代价和内存代价。`dp_algorithm_costmodel.h`主要描述了动态规划算法的主要流程,由一系列图操作组成。在`costmodel.h`中定义了cost和图操作的数据结构。 - -4. 设备管理 - - [device_manager.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/frontend/parallel/device_manager.h):这个文件实现了集群设备通信组的创建及管理。其中设备矩阵模型由`device_matrix.h`定义,通信域由`group_manager.h`管理。 - -5. 整图切分 - - [step_auto_parallel.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/frontend/parallel/step_auto_parallel.h), [step_parallel.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/frontend/parallel/step_parallel.h):这两个文件包含了自动并行流程的核心实现。首先由`step_auto_parallel.h`调用策略搜索流程并产生分布式算子的`OperatorInfo`,然后在`step_parallel.h`中处理算子切分和张量重排布等流程,对单机计算图进行分布式改造。 - -6. 通信算子反向 - - [grad_comm_ops.py](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ops/_grad/grad_comm_ops.py):这个文件定义了`AllReduce`和`AllGather`等通信算子的反向操作。 diff --git a/docs/note/source_zh_cn/design/mindspore/images/analyser_class_profiler.png b/docs/note/source_zh_cn/design/mindspore/images/analyser_class_profiler.png deleted file mode 100644 index ff70c7eedd250437c1c01c17731cfde1d85b62b4..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/analyser_class_profiler.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/architecture.eddx b/docs/note/source_zh_cn/design/mindspore/images/architecture.eddx deleted file mode 100644 index dca4b7c635ab5e4aef6bc4e2a58e3f007d9c5e9a..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/architecture.eddx and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/architecture.png b/docs/note/source_zh_cn/design/mindspore/images/architecture.png deleted file mode 100644 index 7b0a0228323b226cc956a3db369f083c91b5e23b..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/architecture.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/auto_parallel.png b/docs/note/source_zh_cn/design/mindspore/images/auto_parallel.png deleted file mode 100644 index d0135541eb76cedfcb22f2eb3e470a9d5d913957..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/auto_parallel.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/context_profiler.png b/docs/note/source_zh_cn/design/mindspore/images/context_profiler.png deleted file mode 100644 index be41e5840f675b214da861a3ddb4286c91f82e36..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/context_profiler.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/data_parallel.png b/docs/note/source_zh_cn/design/mindspore/images/data_parallel.png deleted file mode 100644 index a92c82aa64615b398e83b9bc2cf0aa2c5db9f904..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/data_parallel.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/ir/cf.dot b/docs/note/source_zh_cn/design/mindspore/images/ir/cf.dot deleted file mode 100644 index 9da78b45beb7ea56365a300e601c79af4a55e130..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindspore/images/ir/cf.dot +++ /dev/null @@ -1,183 +0,0 @@ -digraph mindspore { -compound=true -subgraph cluster_0x8b8cc30{ -id=cluster_0x8b8cc30 -label="fibonacci[managed]" -fontname="Courier New" -node0x8bde4b0_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]0)
>,] -node0x8bee780_0[fontname="Courier New",shape=plaintext,label=< - - -
0
CNode([CNode]1)
>,] -node0x8bee900_0[fontname="Courier New",shape=plaintext,label=< - - -
0123
CNode([CNode]2)
>,] -node0x8b702a0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]54)
>,] -node0x8b6db30_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]37)
>,] -node0x8bc0bb0_0[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x8b768b0_1[fontname="Courier New",shape=plaintext,label=<
Primitive
switch
>,] -node0x8b6c9f0_2[fontname="Courier New",shape=oval,label="✓fibonacci",style=filled,fillcolor=palegreen,URL="#cluster_0x8b91500",] -node0x8bd9410_3[fontname="Courier New",shape=plaintext,label=<
Primitive
Partial
>,] -node0x8b85110_4[fontname="Courier New",shape=oval,label="✗fibonacci",style=filled,fillcolor=palegreen,URL="#cluster_0x8bda550",] -node0x8b7bab0_5[fontname="Courier New",shape=octagon,label="n",style=filled,fillcolor=paleturquoise,] -node0x8b76120_29[fontname="Courier New",shape=plaintext,label=<
PrimitivePy
scalar_lt
>,] -node0x8b7bab0_30[fontname="Courier New",shape=octagon,label="n",style=filled,fillcolor=paleturquoise,] -node0x8b90f50_31[fontname="Courier New",shape=plaintext,label=<
Int32Imm
1
>,] -parameters_0x8b8cc30[shape=plaintext label=<
parameters
n
>,];} -subgraph cluster_0x8bda550{ -id=cluster_0x8bda550 -label="✗fibonacci[managed]" -fontname="Courier New" -node0x8b6acd0_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]3)
>,] -node0x8b6dff0_0[fontname="Courier New",shape=plaintext,label=< - - -
0
CNode([CNode]4)
>,] -node0x8b7d410_0[fontname="Courier New",shape=plaintext,label=< - - -
0123
CNode([CNode]5)
>,] -node0x8b83a80_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]55)
>,] -node0x8b8c2a0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]35)
>,] -node0x8b62c70_6[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x8bbe5f0_7[fontname="Courier New",shape=plaintext,label=<
Primitive
switch
>,] -node0x8b8a0f0_8[fontname="Courier New",shape=oval,label="✓✗fibonacci",style=filled,fillcolor=palegreen,URL="#cluster_0x8b64c50",] -node0x8b8dbb0_9[fontname="Courier New",shape=plaintext,label=<
Primitive
Partial
>,] -node0x8bc0680_10[fontname="Courier New",shape=oval,label="✗✗fibonacci",style=filled,fillcolor=palegreen,URL="#cluster_0x8bedfe0",] -node0x8b76290_11[fontname="Courier New",shape=octagon,label="n",style=filled,fillcolor=paleturquoise,] -node0x8b90c20_24[fontname="Courier New",shape=plaintext,label=<
PrimitivePy
scalar_eq
>,] -node0x8b76290_25[fontname="Courier New",shape=octagon,label="n",style=filled,fillcolor=paleturquoise,] -node0x8b7da70_26[fontname="Courier New",shape=plaintext,label=<
Int32Imm
1
>,] -parameters_0x8bda550[shape=plaintext label=<
parameters
n
>,];} -subgraph cluster_0x8bedfe0{ -id=cluster_0x8bedfe0 -label="✗✗fibonacci[managed]" -fontname="Courier New" -node0x8b8e4a0_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]6)
>,] -node0x8bb9b70_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]33)
>,] -node0x8b7d610_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]8)
>,] -node0x8beae20_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]31)
>,] -node0x8b76cd0_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]11)
>,] -node0x8b849b0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]31)
>,] -node0x8b85200_12[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x8b84310_13[fontname="Courier New",shape=plaintext,label=<
PrimitivePy
scalar_add
>,] -node0x8bc14b0_14[fontname="Courier New",shape=oval,label="fibonacci",style=filled,fillcolor=palegreen,URL="#cluster_0x8b8cc30",] -node0x8b8d2e0_15[fontname="Courier New",shape=plaintext,label=<
PrimitivePy
scalar_sub
>,] -node0x8bbc810_16[fontname="Courier New",shape=octagon,label="n",style=filled,fillcolor=paleturquoise,] -node0x8b8d3d0_17[fontname="Courier New",shape=plaintext,label=<
Int32Imm
2
>,] -node0x8bd5920_18[fontname="Courier New",shape=oval,label="fibonacci",style=filled,fillcolor=palegreen,URL="#cluster_0x8b8cc30",] -node0x8bc15a0_19[fontname="Courier New",shape=plaintext,label=<
PrimitivePy
scalar_sub
>,] -node0x8bbc810_20[fontname="Courier New",shape=octagon,label="n",style=filled,fillcolor=paleturquoise,] -node0x8b83990_21[fontname="Courier New",shape=plaintext,label=<
Int32Imm
1
>,] -parameters_0x8bedfe0[shape=plaintext label=<
parameters
n
>,];} -subgraph cluster_0x8b64c50{ -id=cluster_0x8b64c50 -label="✓✗fibonacci[managed]" -fontname="Courier New" -node0x8be8e20_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]15)
>,] -node0x8bd5440_22[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x8b89ee0_23[fontname="Courier New",shape=plaintext,label=<
Int32Imm
1
>,] -parameters_0x8b64c50[shape=plaintext label=<
parameters
>,];} -subgraph cluster_0x8b91500{ -id=cluster_0x8b91500 -label="✓fibonacci[managed]" -fontname="Courier New" -node0x8bdacb0_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]18)
>,] -node0x8b7d900_27[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x8bb9e90_28[fontname="Courier New",shape=plaintext,label=<
Int32Imm
0
>,] -parameters_0x8b91500[shape=plaintext label=<
parameters
>,];} -node0x8bc0bb0_0:core->node0x8bde4b0_0:0[arrowhead=vee,style=dashed] -node0x8bee780_0:core->node0x8bde4b0_0:1[arrowhead=vee,] -node0x8bee900_0:core->node0x8bee780_0:0[arrowhead=vee,] -node0x8b768b0_1:core->node0x8bee900_0:0[arrowhead=vee,style=dashed] -node0x8b6db30_0:core->node0x8bee900_0:1[arrowhead=vee,] -node0x8b6c9f0_2->node0x8bee900_0:2[arrowhead=vee,] -node0x8b6c9f0_2->node0x8bdacb0_0[lhead=cluster_0x8b91500,dir=both,arrowhead=dot,style=filled,color=blue] -node0x8b702a0_0:core->node0x8bee900_0:3[arrowhead=vee,] -node0x8bd9410_3:core->node0x8b702a0_0:0[arrowhead=vee,style=dashed] -node0x8b85110_4->node0x8b702a0_0:1[arrowhead=vee,] -node0x8b85110_4->node0x8b6acd0_0[lhead=cluster_0x8bda550,dir=both,arrowhead=dot,style=filled,color=blue] -node0x8b7bab0_5->node0x8b702a0_0:2[arrowhead=vee,] -node0x8b62c70_6:core->node0x8b6acd0_0:0[arrowhead=vee,style=dashed] -node0x8b6dff0_0:core->node0x8b6acd0_0:1[arrowhead=vee,] -node0x8b7d410_0:core->node0x8b6dff0_0:0[arrowhead=vee,] -node0x8bbe5f0_7:core->node0x8b7d410_0:0[arrowhead=vee,style=dashed] -node0x8b8c2a0_0:core->node0x8b7d410_0:1[arrowhead=vee,] -node0x8b8a0f0_8->node0x8b7d410_0:2[arrowhead=vee,] -node0x8b8a0f0_8->node0x8be8e20_0[lhead=cluster_0x8b64c50,dir=both,arrowhead=dot,style=filled,color=blue] -node0x8b83a80_0:core->node0x8b7d410_0:3[arrowhead=vee,] -node0x8b8dbb0_9:core->node0x8b83a80_0:0[arrowhead=vee,style=dashed] -node0x8bc0680_10->node0x8b83a80_0:1[arrowhead=vee,] -node0x8bc0680_10->node0x8b8e4a0_0[lhead=cluster_0x8bedfe0,dir=both,arrowhead=dot,style=filled,color=blue] -node0x8b76290_11->node0x8b83a80_0:2[arrowhead=vee,] -node0x8b85200_12:core->node0x8b8e4a0_0:0[arrowhead=vee,style=dashed] -node0x8bb9b70_0:core->node0x8b8e4a0_0:1[arrowhead=vee,] -node0x8b84310_13:core->node0x8bb9b70_0:0[arrowhead=vee,style=dashed] -node0x8b76cd0_0:core->node0x8bb9b70_0:1[arrowhead=vee,] -node0x8b7d610_0:core->node0x8bb9b70_0:2[arrowhead=vee,] -node0x8bc14b0_14->node0x8b7d610_0:0[arrowhead=vee,style=dashed] -node0x8bc14b0_14->node0x8bde4b0_0[lhead=cluster_0x8b8cc30,dir=both,arrowhead=dot,style=filled,color=blue] -node0x8beae20_0:core->node0x8b7d610_0:1[arrowhead=vee,] -node0x8b8d2e0_15:core->node0x8beae20_0:0[arrowhead=vee,style=dashed] -node0x8bbc810_16->node0x8beae20_0:1[arrowhead=vee,] -node0x8b8d3d0_17:core->node0x8beae20_0:2[arrowhead=vee,] -node0x8bd5920_18->node0x8b76cd0_0:0[arrowhead=vee,style=dashed] -node0x8bd5920_18->node0x8bde4b0_0[lhead=cluster_0x8b8cc30,dir=both,arrowhead=dot,style=filled,color=blue] -node0x8b849b0_0:core->node0x8b76cd0_0:1[arrowhead=vee,] -node0x8bc15a0_19:core->node0x8b849b0_0:0[arrowhead=vee,style=dashed] -node0x8bbc810_20->node0x8b849b0_0:1[arrowhead=vee,] -node0x8b83990_21:core->node0x8b849b0_0:2[arrowhead=vee,] -node0x8bd5440_22:core->node0x8be8e20_0:0[arrowhead=vee,style=dashed] -node0x8b89ee0_23:core->node0x8be8e20_0:1[arrowhead=vee,] -node0x8b90c20_24:core->node0x8b8c2a0_0:0[arrowhead=vee,style=dashed] -node0x8b76290_25->node0x8b8c2a0_0:1[arrowhead=vee,] -node0x8b7da70_26:core->node0x8b8c2a0_0:2[arrowhead=vee,] -node0x8b7d900_27:core->node0x8bdacb0_0:0[arrowhead=vee,style=dashed] -node0x8bb9e90_28:core->node0x8bdacb0_0:1[arrowhead=vee,] -node0x8b76120_29:core->node0x8b6db30_0:0[arrowhead=vee,style=dashed] -node0x8b7bab0_30->node0x8b6db30_0:1[arrowhead=vee,] -node0x8b90f50_31:core->node0x8b6db30_0:2[arrowhead=vee,] -} diff --git a/docs/note/source_zh_cn/design/mindspore/images/ir/cf.png b/docs/note/source_zh_cn/design/mindspore/images/ir/cf.png deleted file mode 100644 index 196be66c223022c34fe34848d30c10985efa94c7..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/ir/cf.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/ir/closure.dot b/docs/note/source_zh_cn/design/mindspore/images/ir/closure.dot deleted file mode 100644 index fd3d402bf48b3410e2d92964c1e1ef5e289dda40..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindspore/images/ir/closure.dot +++ /dev/null @@ -1,93 +0,0 @@ -digraph mindspore { -compound=true -subgraph cluster_0x19e608f0{ -id=cluster_0x19e608f0 -label="ms_closure[managed]" -fontname="Courier New" -node0x19269490_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]0)
>,] -node0x1976cf00_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]1)
>,] -node0x1963d630_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode(out2)
>,] -node0x196d87f0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode(closure)
>,] -node0x196c2270_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode(out1)
>,] -node0x19e328a0_0[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x19e5e7c0_1[fontname="Courier New",shape=plaintext,label=<
DoSignaturePrimitive
S-Prim-make_tuple
>,] -node0x19b6a3d0_2[fontname="Courier New",shape=plaintext,label=<
Int32Imm
2
>,] -node0x19e68e20_3[fontname="Courier New",shape=oval,label="func_outer[func_outer]",style=filled,fillcolor=palegreen,URL="#cluster_0x19e63830",] -node0x19e38e00_4[fontname="Courier New",shape=plaintext,label=<
Int32Imm
1
>,] -node0x19e23c10_5[fontname="Courier New",shape=plaintext,label=<
Int32Imm
2
>,] -node0x19e1c020_14[fontname="Courier New",shape=plaintext,label=<
Int32Imm
1
>,] -parameters_0x19e608f0[shape=plaintext label=<
parameters
>,];} -subgraph cluster_0x19e63830{ -id=cluster_0x19e63830 -label="func_outer[managed]" -fontname="Courier New" -node0x19e69550_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]5)
>,] -node0x19e68f90_6[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x19e69100_7[fontname="Courier New",shape=oval,label="func_inner",style=filled,fillcolor=palegreen,URL="#cluster_0x19e64130",] -node0x19e035b0_12[fontname="Courier New",shape=octagon,label="a",style=filled,fillcolor=paleturquoise,] -node0x19e036b0_13[fontname="Courier New",shape=octagon,label="b",style=filled,fillcolor=paleturquoise,] -parameters_0x19e63830[shape=plaintext label=<
parameters
a
b
>,];} -subgraph cluster_0x19e64130{ -id=cluster_0x19e64130 -label="func_inner[managed]" -fontname="Courier New" -node0x19e68c80_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]6)
>,] -node0x19e68ae0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]7)
>,] -node0x19e682c0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]8)
>,] -node0x19e50a00_8[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x19c7ced0_9[fontname="Courier New",shape=plaintext,label=<
DoSignaturePrimitive
S-Prim-add
>,] -node0x19e645e0_10[fontname="Courier New",shape=octagon,label="c",style=filled,fillcolor=paleturquoise,] -node0x19e68790_11[fontname="Courier New",shape=plaintext,label=<
DoSignaturePrimitive
S-Prim-add
>,] -parameters_0x19e64130[shape=plaintext label=<
parameters
c
>,];} -node0x19e328a0_0:core->node0x19269490_0:0[arrowhead=vee,style=dashed] -node0x1976cf00_0:core->node0x19269490_0:1[arrowhead=vee,] -node0x19e5e7c0_1:core->node0x1976cf00_0:0[arrowhead=vee,style=dashed] -node0x196c2270_0:core->node0x1976cf00_0:1[arrowhead=vee,] -node0x1963d630_0:core->node0x1976cf00_0:2[arrowhead=vee,] -node0x196d87f0_0:core->node0x1963d630_0:0[arrowhead=vee,style=dashed] -node0x19b6a3d0_2:core->node0x1963d630_0:1[arrowhead=vee,] -node0x19e68e20_3->node0x196d87f0_0:0[arrowhead=vee,style=dashed] -node0x19e68e20_3->node0x19e69550_0[lhead=cluster_0x19e63830,dir=both,arrowhead=dot,style=filled,color=blue] -node0x19e38e00_4:core->node0x196d87f0_0:1[arrowhead=vee,] -node0x19e23c10_5:core->node0x196d87f0_0:2[arrowhead=vee,] -node0x19e68f90_6:core->node0x19e69550_0:0[arrowhead=vee,style=dashed] -node0x19e69100_7->node0x19e69550_0:1[arrowhead=vee,] -node0x19e69100_7->node0x19e68c80_0[lhead=cluster_0x19e64130,dir=both,arrowhead=dot,style=filled,color=blue] -node0x19e50a00_8:core->node0x19e68c80_0:0[arrowhead=vee,style=dashed] -node0x19e68ae0_0:core->node0x19e68c80_0:1[arrowhead=vee,] -node0x19c7ced0_9:core->node0x19e68ae0_0:0[arrowhead=vee,style=dashed] -node0x19e682c0_0:core->node0x19e68ae0_0:1[arrowhead=vee,] -node0x19e645e0_10->node0x19e68ae0_0:2[arrowhead=vee,] -node0x19e68790_11:core->node0x19e682c0_0:0[arrowhead=vee,style=dashed] -node0x19e035b0_12->node0x19e682c0_0:1[arrowhead=vee,] -node0x19e036b0_13->node0x19e682c0_0:2[arrowhead=vee,] -node0x196d87f0_0:core->node0x196c2270_0:0[arrowhead=vee,style=dashed] -node0x19e1c020_14:core->node0x196c2270_0:1[arrowhead=vee,] -} diff --git a/docs/note/source_zh_cn/design/mindspore/images/ir/closure.png b/docs/note/source_zh_cn/design/mindspore/images/ir/closure.png deleted file mode 100644 index 6a618dd46d4bceeabb0b68ddbd187babc24a16aa..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/ir/closure.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/ir/hof.dot b/docs/note/source_zh_cn/design/mindspore/images/ir/hof.dot deleted file mode 100644 index c0102eef9d47393572e241610f6dd1a2c303fb57..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindspore/images/ir/hof.dot +++ /dev/null @@ -1,85 +0,0 @@ -digraph mindspore { -compound=true -subgraph cluster_0x1b3c23b0{ -id=cluster_0x1b3c23b0 -label="hof[managed]" -fontname="Courier New" -node0x1b32ae50_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]0)
>,] -node0x1b064930_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode(res)
>,] -node0x1b3c0040_0[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x1b3bfbf0_1[fontname="Courier New",shape=oval,label="g",style=filled,fillcolor=palegreen,URL="#cluster_0x1b3be6c0",] -node0x1b3bfed0_2[fontname="Courier New",shape=oval,label="f",style=filled,fillcolor=palegreen,URL="#cluster_0x1b3c50c0",] -node0x1b3c6870_3[fontname="Courier New",shape=octagon,label="x",style=filled,fillcolor=paleturquoise,] -parameters_0x1b3c23b0[shape=plaintext label=<
parameters
x
>,];} -subgraph cluster_0x1b3c50c0{ -id=cluster_0x1b3c50c0 -label="f[managed]" -fontname="Courier New" -node0x1ab4e190_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]1)
>,] -node0x1ab61220_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]2)
>,] -node0x1b3c59e0_4[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x1b3bf5e0_5[fontname="Courier New",shape=plaintext,label=<
DoSignaturePrimitive
S-Prim-add
>,] -node0x1b348630_6[fontname="Courier New",shape=octagon,label="x",style=filled,fillcolor=paleturquoise,] -node0x1b3c60f0_7[fontname="Courier New",shape=plaintext,label=<
Int32Imm
3
>,] -parameters_0x1b3c50c0[shape=plaintext label=<
parameters
x
>,];} -subgraph cluster_0x1b3be6c0{ -id=cluster_0x1b3be6c0 -label="g[managed]" -fontname="Courier New" -node0x1b3bfa50_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]4)
>,] -node0x1a9fb8c0_0[fontname="Courier New",shape=plaintext,label=< - - -
012
CNode([CNode]5)
>,] -node0x1a39f7a0_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]6)
>,] -node0x1a4daa20_0[fontname="Courier New",shape=plaintext,label=< - - -
01
CNode([CNode]7)
>,] -node0x1b3adfd0_8[fontname="Courier New",shape=plaintext,label=<
Primitive
return
>,] -node0x1b3c2920_9[fontname="Courier New",shape=plaintext,label=<
DoSignaturePrimitive
S-Prim-mul
>,] -node0x1b3120e0_10[fontname="Courier New",shape=octagon,label="function",style=filled,fillcolor=paleturquoise,] -node0x1b3121e0_11[fontname="Courier New",shape=octagon,label="x",style=filled,fillcolor=paleturquoise,] -node0x1b3120e0_12[fontname="Courier New",shape=octagon,label="function",style=filled,fillcolor=paleturquoise,] -node0x1b3121e0_13[fontname="Courier New",shape=octagon,label="x",style=filled,fillcolor=paleturquoise,] -parameters_0x1b3be6c0[shape=plaintext label=<
parameters
function
x
>,];} -node0x1b3c0040_0:core->node0x1b32ae50_0:0[arrowhead=vee,style=dashed] -node0x1b064930_0:core->node0x1b32ae50_0:1[arrowhead=vee,] -node0x1b3bfbf0_1->node0x1b064930_0:0[arrowhead=vee,style=dashed] -node0x1b3bfbf0_1->node0x1b3bfa50_0[lhead=cluster_0x1b3be6c0,dir=both,arrowhead=dot,style=filled,color=blue] -node0x1b3bfed0_2->node0x1b064930_0:1[arrowhead=vee,] -node0x1b3bfed0_2->node0x1ab4e190_0[lhead=cluster_0x1b3c50c0,dir=both,arrowhead=dot,style=filled,color=blue] -node0x1b3c6870_3->node0x1b064930_0:2[arrowhead=vee,] -node0x1b3c59e0_4:core->node0x1ab4e190_0:0[arrowhead=vee,style=dashed] -node0x1ab61220_0:core->node0x1ab4e190_0:1[arrowhead=vee,] -node0x1b3bf5e0_5:core->node0x1ab61220_0:0[arrowhead=vee,style=dashed] -node0x1b348630_6->node0x1ab61220_0:1[arrowhead=vee,] -node0x1b3c60f0_7:core->node0x1ab61220_0:2[arrowhead=vee,] -node0x1b3adfd0_8:core->node0x1b3bfa50_0:0[arrowhead=vee,style=dashed] -node0x1a9fb8c0_0:core->node0x1b3bfa50_0:1[arrowhead=vee,] -node0x1b3c2920_9:core->node0x1a9fb8c0_0:0[arrowhead=vee,style=dashed] -node0x1a4daa20_0:core->node0x1a9fb8c0_0:1[arrowhead=vee,] -node0x1a39f7a0_0:core->node0x1a9fb8c0_0:2[arrowhead=vee,] -node0x1b3120e0_10->node0x1a39f7a0_0:0[arrowhead=vee,style=dashed] -node0x1b3121e0_11->node0x1a39f7a0_0:1[arrowhead=vee,] -node0x1b3120e0_12->node0x1a4daa20_0:0[arrowhead=vee,style=dashed] -node0x1b3121e0_13->node0x1a4daa20_0:1[arrowhead=vee,] -} diff --git a/docs/note/source_zh_cn/design/mindspore/images/ir/hof.png b/docs/note/source_zh_cn/design/mindspore/images/ir/hof.png deleted file mode 100644 index b7aed07a68798c31561de9461c94814ecec17d33..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/ir/hof.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/ir/ir.dot b/docs/note/source_zh_cn/design/mindspore/images/ir/ir.dot deleted file mode 100644 index 50faab23bdcd63c5199303cb8fdcbef1ccb3163c..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindspore/images/ir/ir.dot +++ /dev/null @@ -1,73 +0,0 @@ -digraph mindspore { -compound=true -subgraph cluster_0x55c9669c3c70{ -id=cluster_0x55c9669c3c70 -label="test_f" -fontname="HuaweiSans" -node0x55c9669c6cc0_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
01
CNode([CNode]0)
>,] -node0x55c9669c66a0_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
012
CNode(c)
>,] -node0x55c9669c6960_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
012
CNode([CNode]1)
>,] -node0x55c9669c58a0_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
012
CNode(b)
>,] -node0x55c9669c4fb0_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
012
CNode(a)
>,] -node0x55c9669c6b60_0[fontname="HuaweiSans",shape=plaintext,label=<
Primitive
return
>,] -node0x55c9669c9720_1[fontname="HuaweiSans",shape=plaintext,label=<
MultitypeFuncGraph
mul
>,] -node0x55c9669c9dd0_2[fontname="HuaweiSans",shape=oval,label="func",style=filled,fillcolor="palegreen",URL="#cluster_0x55c9669c7310",] -node0x55c9669c9800_3[fontname="HuaweiSans",shape=plaintext,label=<
MultitypeFuncGraph
add
>,] -node0x55c9669c4430_4[fontname="HuaweiSans",shape=octagon,label="y",style=filled,fillcolor=paleturquoise,] -node0x55c9669c9e80_5[fontname="HuaweiSans",shape=plaintext,label=<
MultitypeFuncGraph
sub
>,] -node0x55c9669c3fc0_6[fontname="HuaweiSans",shape=octagon,label="x",style=filled,fillcolor=paleturquoise,] -node0x55c96692eeb0_7[fontname="HuaweiSans",shape=plaintext,label=<
Int32Imm
1
>,] -parameters_0x55c9669c3c70[shape=plaintext label=<
parameters
x
y
>,];} -subgraph cluster_0x55c9669c7310{ -id=cluster_0x55c9669c7310 -label="func" -fontname="HuaweiSans" -node0x55c9669cc740_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
01
CNode([CNode]7)
>,] -node0x55c9669cc5c0_0[fontname="HuaweiSans",shape=plaintext,label=< - - -
012
CNode([CNode]8)
>,] -node0x55c9669cafc0_8[fontname="HuaweiSans",shape=plaintext,label=<
Primitive
return
>,] -node0x55c9669cc930_9[fontname="HuaweiSans",shape=plaintext,label=<
MultitypeFuncGraph
div
>,] -node0x55c9669cab20_10[fontname="HuaweiSans",shape=octagon,label="x",style=filled,fillcolor=paleturquoise,] -node0x55c9669cacf0_11[fontname="HuaweiSans",shape=octagon,label="y",style=filled,fillcolor=paleturquoise,] -parameters_0x55c9669c7310[shape=plaintext label=<
parameters
x
y
>,];} -node0x55c9669c6b60_0:core->node0x55c9669c6cc0_0:0[arrowhead=vee,style=dashed] -node0x55c9669c66a0_0:core->node0x55c9669c6cc0_0:1[arrowhead=vee,] -node0x55c9669c9720_1:core->node0x55c9669c66a0_0:0[arrowhead=vee,style=dashed] -node0x55c9669c58a0_0:core->node0x55c9669c66a0_0:1[arrowhead=vee,] -node0x55c9669c6960_0:core->node0x55c9669c66a0_0:2[arrowhead=vee,] -node0x55c9669c9dd0_2->node0x55c9669c6960_0:0[arrowhead=vee,style=dashed] -node0x55c9669c9dd0_2->node0x55c9669cc740_0[lhead=cluster_0x55c9669c7310,dir=both,arrowhead=dot,style=filled,color="#444444"] -node0x55c9669c4fb0_0:core->node0x55c9669c6960_0:1[arrowhead=vee,] -node0x55c9669c58a0_0:core->node0x55c9669c6960_0:2[arrowhead=vee,] -node0x55c9669c9800_3:core->node0x55c9669c58a0_0:0[arrowhead=vee,style=dashed] -node0x55c9669c4fb0_0:core->node0x55c9669c58a0_0:1[arrowhead=vee,] -node0x55c9669c4430_4->node0x55c9669c58a0_0:2[arrowhead=vee,] -node0x55c9669c9e80_5:core->node0x55c9669c4fb0_0:0[arrowhead=vee,style=dashed] -node0x55c9669c3fc0_6->node0x55c9669c4fb0_0:1[arrowhead=vee,] -node0x55c96692eeb0_7:core->node0x55c9669c4fb0_0:2[arrowhead=vee,] -node0x55c9669cafc0_8:core->node0x55c9669cc740_0:0[arrowhead=vee,style=dashed] -node0x55c9669cc5c0_0:core->node0x55c9669cc740_0:1[arrowhead=vee,] -node0x55c9669cc930_9:core->node0x55c9669cc5c0_0:0[arrowhead=vee,style=dashed] -node0x55c9669cab20_10->node0x55c9669cc5c0_0:1[arrowhead=vee,] -node0x55c9669cacf0_11->node0x55c9669cc5c0_0:2[arrowhead=vee,] -} diff --git a/docs/note/source_zh_cn/design/mindspore/images/ir/ir.png b/docs/note/source_zh_cn/design/mindspore/images/ir/ir.png deleted file mode 100644 index 364c5de500557324c8af86e4d1c5bc0a8f347bf5..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/ir/ir.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/module_profiler.png b/docs/note/source_zh_cn/design/mindspore/images/module_profiler.png deleted file mode 100644 index df98237b1af57778813570cf90c0e3bd65ec38fd..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/module_profiler.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/operator_split.png b/docs/note/source_zh_cn/design/mindspore/images/operator_split.png deleted file mode 100644 index 4063170990c6816884361f195db5851cfbdf932e..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/operator_split.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/parser_module_profiler.png b/docs/note/source_zh_cn/design/mindspore/images/parser_module_profiler.png deleted file mode 100644 index a10d934559a7754ff605448b34c37015a4821d95..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/parser_module_profiler.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/proposer_class_profiler.png b/docs/note/source_zh_cn/design/mindspore/images/proposer_class_profiler.png deleted file mode 100644 index 9d83df184b46520f245d238ed9c08b0fddb9660f..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/proposer_class_profiler.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/proposer_module_profiler.png b/docs/note/source_zh_cn/design/mindspore/images/proposer_module_profiler.png deleted file mode 100644 index 33dff5fe12ca3e75df30e79e5d106c5976e00de3..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/proposer_module_profiler.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/tensor_redistribution1.png b/docs/note/source_zh_cn/design/mindspore/images/tensor_redistribution1.png deleted file mode 100644 index e4aa944dffa47a82fdde94a2d9e2cf4d81b8752e..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/tensor_redistribution1.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/tensor_redistribution2.png b/docs/note/source_zh_cn/design/mindspore/images/tensor_redistribution2.png deleted file mode 100644 index 4ed244d0a7d85a1eb46f80546810bc7147b07df3..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/tensor_redistribution2.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/tensor_redistribution3.png b/docs/note/source_zh_cn/design/mindspore/images/tensor_redistribution3.png deleted file mode 100644 index 74da52f7d4940fdbf4efcf03073746a330bf786e..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/tensor_redistribution3.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/images/time_order_profiler.png b/docs/note/source_zh_cn/design/mindspore/images/time_order_profiler.png deleted file mode 100644 index c879bf2391ca33f7480c0f95ec8592f1e74003fc..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/design/mindspore/images/time_order_profiler.png and /dev/null differ diff --git a/docs/note/source_zh_cn/design/mindspore/mindir.md b/docs/note/source_zh_cn/design/mindspore/mindir.md deleted file mode 100644 index 6f5a4f27483018cf82d7fbaadd4477cdffbd17a7..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindspore/mindir.md +++ /dev/null @@ -1,324 +0,0 @@ -# MindSpore IR(MindIR) - -`Linux` `Windows` `Ascend` `GPU` `框架开发` `中级` `模型开发` `高级` `贡献者` - - - -- [MindSpore IR(MindIR)](#mindspore-irmindir) - - [简介](#简介) - - [文法定义](#文法定义) - - [示例](#示例) - - [如何保存IR](#如何保存ir) - - [IR文件内容介绍](#ir文件内容介绍) - - [函数式语义](#函数式语义) - - [高阶函数](#高阶函数) - - [控制流](#控制流) - - [自由变量和闭包](#自由变量和闭包) - - [参考文献](#参考文献) - - - - - -## 简介 - -中间表示(IR)是程序编译过程中介于源语言和目标语言之间的程序表示,以方便编译器进行程序分析和优化,因此IR的设计需要考虑从源语言到目标语言的转换难度,同时考虑程序分析和优化的易用性和性能。 - -MindIR是一种基于图表示的函数式IR,其最核心的目的是服务于自动微分变换。自动微分采用的是基于函数式编程框架的变换方法,因此IR采用了接近于ANF函数式的语义。此外,借鉴Sea of Nodes[1]和Thorin[2]的优秀设计,采用了一种基于显性依赖图的表示方式。关于ANF-IR的具体介绍,可以参考[MindSpore IR文法定义](#id2)。 - -在图模式`context.set_context(mode=context.GRAPH_MODE)`下运行用MindSpore编写的模型时,若配置中设置了`context.set_context(save_graphs=True)`,运行时会输出一些图编译过程中生成的一些中间文件,我们称为IR文件。当前主要有三种格式的IR文件: - -- ir后缀结尾的IR文件:一种比较直观易懂的以文本格式描述模型结构的文件,可以直接用文本编辑软件查看。在下文中我们也将介绍此文件的查看方式。 -- dat后缀结尾的IR文件:一种相对于ir后缀结尾的文件格式定义更为严谨的描述模型结构的文件,包含的内容更为丰富,可以直接用文本编辑软件查看。 -- dot后缀结尾的IR文件:描述了不同节点间的拓扑关系,可以用[graphviz](http://graphviz.org)将此文件作为输入生成图片,方便用户直观地查看模型结构。对于算子比较多的模型,推荐使用可视化组件[MindInsight](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/dashboard.html#id5)对计算图进行可视化。 - -## 文法定义 - -ANF是函数式编程中常用且简洁的中间表示,其文法定义如下所示: - -```text - ::= NUMBER | STRING | VAR | BOOLEAN | PRIMOP - | (lambda (VAR …) ) - ::= ( …) - | (if ) - ::= (let ([VAR ]) ) | | - -``` - -ANF中表达式分为原子表达式(aexp)和复合表达式(cexp),原子表达式表示一个常数值或一个变量或一个匿名函数;复合表达式由多个原子表达式复合组成,表示一个匿名函数或原语函数调用,组合的第一个输入是调用的函数,其余输入是调用的参数。 - -MindIR文法继承于ANF,其定义如下所示: - -```text - ::= | - ::= Parameter - ::= Scalar | Named | Tensor | Type | Shape - | Primitive | MetaFuncGraph | FuncGraph - ::= ( …) - ::= | -``` - -MindIR中的ANode对应于ANF的原子表达式,ANode有两个子类分别为ValueNode和ParameterNode。ValueNode表示常数节点,可承载一个常数值(标量、符号、张量、类型、维度等),也可以是一个原语函数(Primitive)或一个元函数(MetaFuncGraph)或一个普通函数(FuncGraph),因为在函数式编程中函数定义本身也是一个值。ParameterNode是参数节点,表示函数的形参。 - -MindIR中CNode对应于ANF的复合表达式,表示一次函数调用。 - -在MindSpore自动微分时,会计算ParameterNode和CNode的梯度贡献,并返回最终ParameterNode的梯度,而不计算ValueNode的梯度。 - -## 示例 - -下面以一段程序作为示例,对比理解MindIR。 - -```python -def func(x, y): - return x / y - -@ms_function -def test_f(x, y): - a = x - 1 - b = a + y - c = b * func(a, b) - return c -``` - -这段Python代码对应的ANF表达为: - -```python -lambda (x, y) - let a = x - 1 in - let b = a + y in - let func = lambda (x, y) - let ret = x / y in - ret end in - let %1 = func(a, b) in - let c = b * %1 in - c end -``` - -对应的MindIR为[ir.dot](https://gitee.com/mindspore/docs/blob/master/docs/note/source_zh_cn/design/mindspore/images/ir/ir.dot): - -![image](./images/ir/ir.png) - -在MindIR中,一个函数图(FuncGraph)表示一个普通函数的定义,函数图一般由ParameterNode、ValueNode和CNode组成有向无环图,可以清晰地表达出从参数到返回值的计算过程。在上图中可以看出,python代码中两个函数`test_f`和`func`转换成了两个函数图,其参数`x`和`y`转换为函数图的ParameterNode,每一个表达式转换为一个CNode。CNode的第一个输入链接着调用的函数,例如图中的`add`、`func`、`return`。值得注意的是这些节点均是`ValueNode`,因为它们被理解为常数函数值。CNode的其他输入链接这调用的参数,参数值可以来自于ParameterNode、ValueNode和其他CNode。 - -在ANF中每个表达式都用let表达式绑定为一个变量,通过对变量的引用来表示对表达式输出的依赖,而在MindIR中每个表达式都绑定为一个节点,通过节点与节点之间的有向边表示依赖关系。 - -## 如何保存IR - -通过`context.set_context(save_graphs=True)`来保存各个编译阶段的中间代码。被保存的中间代码有两种格式,一个是后缀名为`.ir`的文本格式,一个是后缀名为`.dot`的图形化格式。当网络规模不大时,建议使用更直观的图形化格式来查看,当网络规模较大时建议使用更高效的文本格式来查看。 - -DOT文件可以通过graphviz转换为图片格式来查看,例如将dot转换为png的命令是`dot -Tpng *.dot -o *.png`。 - -在训练脚本`train.py`中,我们在`set_context`函数中添加如下代码,运行训练脚本时,MindSpore会自动将编译过程中产生的IR文件存放到指定路径。 - -```python -if __name__ == "__main__": - context.set_context(save_graphs=True, save_graphs_path="path/to/ir/files") -``` - -此处为单机版本的训练脚本。当运行的脚本使用多个计算设备时,MindSpore会为每一个计算设备生成一个独立的进程。因此我们建议用户在多卡版本的训练脚本中读取当前的计算设id,从而为每个设备设置独立的`save_graphs_path`实现将每个设备的IR文件保存在不同的路径下。例如: - -```python -device_id = os.getenv("DEVICE_ID") -context.set_context(save_graphs=True, save_graphs_path="path/to/ir/files"+device_id) -``` - -执行训练命令后,在指定的目录生成如下文件。其中以数字下划线开头的IR文件是在ME编译图过程中输出的,`pipeline`各阶段分别会保存一次计算图。下面介绍比较重要的阶段,例如`parse`阶段会解析入口的`construct`函数;`symbol_resolve`阶段会递归解析入口函数直接或间接引用到的其他函数和对象;`abstract_specialize`阶段会做类型推导和`shape`推导;`optimize`阶段主要是进行和硬件无关的优化,自动微分与自动并行功能也是在该阶段展开;`validate`阶段会校验编译出来的计算图;`task_emit`阶段将计算图传给后端进一步处理;`execute`阶段会执行该计算图。 - -```bash -. -├── 00_parse_[xxxx].ir -├── 00_parse.dat -├── 00_parse.dot -├── 01_symbol_resolve_[xxxx].ir -├── 01_symbol_resolve.dat -├── 01_symbol_resolve.dot -├── 02_combine_like_graphs_[xxxx].ir -├── 02_combine_like_graphs.dat -├── 02_combine_like_graphs.dot -├── 03_inference_opt_prepare_[xxxx].ir -├── 03_inference_opt_prepare.dat -├── 03_inference_opt_prepare.dot -├── 04_abstract_specialize_[xxxx].ir -├── 04_abstract_specialize.dat -├── 04_abstract_specialize.dot -├── 05_inline_[xxxx].ir -├── 05_inline.dat -├── 05_inline.dot -├── 06_py_pre_ad_[xxxx].ir -├── 06_py_pre_ad.dat -├── 06_py_pre_ad.dot -├── 07_pipeline_split_[xxxx].ir -├── 07_pipeline_split.dat -├── 07_pipeline_split.dot -├── 08_optimize_[xxxx].ir -├── 08_optimize.dat -├── 08_optimize.dot -├── 09_py_opt_[xxxx].ir -├── 09_py_opt.dat -├── 09_py_opt.dot -├── 10_validate_[xxxx].ir -├── 10_validate.dat -├── 10_validate.dot -├── 11_task_emit_[xxxx].ir -├── 11_task_emit.dat -├── 11_task_emit.dot -├── 12_execute_[xxxx].ir -├── 12_execute.dat -├── 12_execute.dot -... -``` - -## IR文件内容介绍 - -下面以一个简单的例子来说明IR文件的内容,执行以下一段训练代码: - -```python -import mindspore.context as context -import mindspore.nn as nn -from mindspore import Tensor -from mindspore import dtype as mstype - -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") -context.set_context(save_graphs=True, save_graphs_path="./ir_files") - -class Net(nn.Cell): - def __init__(self): - super().__init__() - - def construct(self, x, y): - x = x + y - x = x * y - return x - -x = Tensor(3, mstype.float32) -y = Tensor(2, mstype.float32) -net = Net() -out = net(x, y) -print(out) -``` - -使用文本编辑软件(例如`vi`)打开执行完后输出的IR文件`12_execute_[xxxx].ir`,内容如下所示: - -```text - 1 #IR entry : @6_5_1_construct_wrapper.15 - 2 #attrs : - 3 check_set_strategy_valid_once_only : 1 - 4 #Total params : 2 - 5 - 6 %para1_x : - 7 %para2_y : - 8 - 9 #Total subgraph : 1 -10 -11 subgraph attr: -12 check_set_strategy_valid_once_only : 1 -13 subgraph @6_5_1_construct_wrapper.15() { -14 %0([CNode]8) = Add(%para1_x, %para2_y) primitive_attrs: {output_names: [output], input_names: [x, y]} -15 : (, ) -> () -16 # In file /home/workspace/mindspore/mindspore/ops/composite/multitype_ops/add_impl.py(129)/ return F.add(x, y)/ -17 # In file demo.py(14)/ x = x + y/ -18 %1([CNode]10) = Mul(%0, %para2_y) primitive_attrs: {output_names: [output], input_names: [x, y]} -19 : (, ) -> () -20 # In file /home/workspace/mindspore/mindspore/ops/composite/multitype_ops/mul_impl.py(48)/ return F.tensor_mul(x, y)/ -21 # In file demo.py(15)/ x = x * y/ -22 return(%1) -23 : () -24 } -``` - -以上内容可分为两个部分,第一部分为输入列表,第二部分为图结构。 其中第1行告诉了我们该网络的顶图名称`@6_5_1_construct_wrapper.15`,也就是入口图。 第4行告诉了我们该网络有多少个输入。 第6-7行为输入列表,遵循`%para[序号]_[name] : <[data_type]x[shape]>`的格式。 第9行告诉我们该网络解析出来的子图数量。 第11-24行为图结构,含有若干节点,即`CNode`。该示例中只有2个节点,分别为14行的`Add`和18行的`Mul`。 - -`CNode`的信息遵循如下格式,包含节点名称、属性、输入节点、输出信息、格式、源码解析调用栈等信息,由于ANF图为单向无环图,所以这里仅根据输入关系体现节点与节点的连接关系。源码解析调用栈则体现了`CNode`与脚本源码之间的关系,例如第20行由第21行解析而来,而第21行能对应到脚本的`x = x * y`。 - -```text -%[序号]([debug_name]) = [OpName]([arg], ...) primitive_attrs: {[key]: [value], ...} - : (<[输入data_type]x[输入shape]>, ...) -> (<[输出data_type]x[输出shape]>, ...) - # 源码解析调用栈 -``` - -> 需要注意的是经过编译器的若干优化处理后,节点可能经过了若干变幻(如算子拆分、算子融合等),节点的源码解析调用栈信息与脚本可能无法完全一一对应,这里仅作为辅助手段。 - -## 函数式语义 - -MindIR较传统计算图的一个重要特性是不仅可以表达算子之间的数据依赖,还可以表达丰富的函数式语义。 - -### 高阶函数 - -在MindIR中,函数的定义是由一个子图来定义,但其本身可以是一个被传递的值,作为其他高阶函数的输入或输出。 -例如下面一个简单的示例中,函数`f`作为参数传入了函数`g`,因此函数`g`是一个接收函数输入的高阶函数,函数`f`真正的调用点是在函数`g`内部。 - -```python -@ms_function -def hof(x): - def f(x): - return x + 3 - def g(function, x): - return function(x) * function(x) - res = g(f, x) - return res -``` - -对应的MindIR为[hof.dot](https://gitee.com/mindspore/docs/blob/master/docs/note/source_zh_cn/design/mindspore/images/ir/hof.dot): - -![image](./images/ir/hof.png) - -在实际网络训练脚本中,自动求导泛函`GradOperation`和优化器中常用到的`Partial`和`HyperMap`都是典型的高阶函数。高阶语义极大地提升了MindSpore表达的灵活性和简洁性。 - -### 控制流 - -控制流在MindIR中是以高阶函数选择调用的形式表达。这样的形式把控制流转换为高阶函数的数据流,从而使得自动微分算法更加强大。不仅可以支持数据流的自动微分,还可以支持条件跳转、循环和递归等控制流的自动微分。 - -下面以一个简单的斐波那契用例来演示说明。 - -```python -@ms_function -def fibonacci(n): - if(n < 1): - return 0 - elif(n == 1): - return 1 - else: - return fibonacci(n-1) + fibonacci(n-2) -``` - -对应的MindIR为[cf.dot](https://gitee.com/mindspore/docs/blob/master/docs/note/source_zh_cn/design/mindspore/images/ir/cf.dot): - -![image](./images/ir/cf.png) - -其中`fibonacci`是顶层函数图,在顶层中有两个函数图被`switch`选择调用。`✓fibonacci`是第一个`if`的True分支,`✗fibonacci`是第一个`if`的False分支。在`✗fibonacci`中被调用的`✓✗fibonacci`是`elif`的True分支,`✗✗fibonacci`是`elif`的False分支。这里需要理解的关键是在MindIR中,条件跳转和递归是以高阶控制流的形式表达的。例如,`✓fibonacci`和`✗fibonacci`是作为`switch`算子的参数传入,`switch`根据条件参数选择哪一个函数作为返回值。因此,`switch`是把输入的函数当成普通的值做了一个二元选择操作,并没有调用,而真正的函数调用是在紧随`switch`后的CNode上完成。 - -### 自由变量和闭包 - -闭包(closure)是一种编程语言特性,它指的是代码块和作用域环境的结合。自由变量(free variable)是指在代码块中引用作用域环境中的变量而非局部变量。在MindIR中,代码块是以函数图呈现的,而作用域环境可以理解为该函数被调用时的上下文环境,自由变量的捕获方式是值拷贝而非引用。 - -一个典型的闭包用例如下: - -```python -@ms_function -def func_outer(a, b): - def func_inner(c): - return a + b + c - return func_inner - -@ms_function -def ms_closure(): - closure = func_outer(1, 2) - out1 = closure(1) - out2 = closure(2) - return out1, out2 -``` - -对应的MindIR为[closure.dot](https://gitee.com/mindspore/docs/blob/master/docs/note/source_zh_cn/design/mindspore/images/ir/closure.dot): - -![image](./images/ir/closure.png) - -在例子中,`a`和`b`是自由变量,因为`func_inner`中变量`a`和`b`是引用的其父图`func_outer`中定义的参数。变量`closure`是一个闭包,它是函数`func_inner`与其上下文`func_outer(1, 2)`的结合。因此,`out1`的结果是4,因为其等价于`1+2+1`,`out2`的结果是5,因为其等价于`1+2+2`。 - -## 参考文献 - -[1] C. Click and M. Paleczny. A simple graph-based intermediate representation. -SIGPLAN Not., 30:35–49, March 1995. - -[2] Roland Leißa, Marcel Köster, and Sebastian Hack. A graph-based higher-order intermediate -representation. In Proceedings of the 13th Annual IEEE/ACM International Symposium on -Code Generation and Optimization, pages 202–212. IEEE Computer Society, 2015. diff --git a/docs/note/source_zh_cn/design/mindspore/profiler_design.md b/docs/note/source_zh_cn/design/mindspore/profiler_design.md deleted file mode 100644 index 4fe36afd706ee48572ba2cc67f77731cc47e6e7f..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/mindspore/profiler_design.md +++ /dev/null @@ -1,198 +0,0 @@ -# Profiler设计文档 - -`Linux` `Ascend` `GPU` `模型开发` `模型调优` `框架开发` `中级` `高级` `贡献者` - - - -- [Profiler设计文档](#profiler设计文档) - - [背景](#背景) - - [Profiler框架设计](#profiler架构设计) - - [上下文](#上下文) - - [模块层级结构](#模块层级结构) - - [内部模块交互](#内部模块交互) - - [子模块设计](#准备训练脚本) - - [ProfilerAPI和Controller](#profiler-api-controller) - - [ProfilerAPI和Controller模块介绍](#profiler-api-controller模块介绍) - - [Analyser](#analyser) - - [Analyser模块介绍](#analyser模块介绍) - - [Analyser模块设计](#analyser模块设计) - - [Parser](#parser) - - [Parser模块介绍](#parser模块介绍) - - [Parser模块设计](#parser模块设计) - - [Proposer](#proposer) - - [Proposer模块介绍](#proposer模块介绍) - - [Proposer模块设计](#proposer模块设计) - - - - - -## 背景 - -为了支持用户在MindSpore进行模型开发性能调试,需要提供易用的Profile工具,直观地展现网络模型各维度的性能信息,为用户提供易用、丰富的性能分析功能,帮助用户快速定位网络中性能问题。 - -## Profiler架构设计 - -这一章将介绍Profiler的架构设计,第一节从整体Profiler的角度出发介绍其上下文交互关系,第二节将打开Profiler内部,介绍模块层架结构以及模块划分,第三节将介绍模块间的交互调用关系。 - -### 上下文 - -Profiler是MindSpore调试调优工具的一部分,在整个使用过程中的上下文环境如下图所示: - -![context_profiler.png](./images/context_profiler.png) - -图1:上下文关系图 - -如上图所示,Profiler与其他部分的交互包括: - -1. 在训练脚本中调用MindSpore的Profiler启动收集性能数据的命令,Ascend类型下由ada模块生成性能原始数据,GPU类型下由CUPTI模块生成性能原始数据; - -2. MindSpore侧Profiler将在用户脚本中对原始数据进行解析,并在用户指定的文件夹下面生成中间数据结果; - -3. Mindinsight侧Profiler对接中间数据,提供可视化Profiler功能供用户使用。 - -### 模块层级结构 - -模块层级划分如下: - -![module_profiler.png](./images/module_profiler.png) - -图2:层级模块关系图 - -如上图所示,各个模块功能介绍如下: - -1. ProfilerAPI是代码侧为用户提供的调用入口,为用户提供了性能收集启动接口以及分析接口; -2. Controller是ProfilerAPI下层的模块,被ProfilerAPI中的启动接口调用,负责控制下方性能收集功能的启动停止,原始数据会被写入固定位置; -3. Parser是性能原始数据解析模块,由于性能原始数据是在设备侧收集的信息,所以信息不能直接被用户所理解,该模块负责将信息进行解析、组合、转换,最终形成用户可理解、上层可分析的中间结果; -4. Analyser获取下层Parser解析出的中间结果,负责对中间结果进行封装、筛选、排序,最终按照信息分类,返回各个类别对应的信息,提供给上层的表现层Profiler API、RESTful使用; -5. 通过RESTful调用后端Analyser提供的common API,获取目标数据,以RESTful接口对接前端。 - -### 内部模块交互 - -从用户角度,有两种使用形式API、RESTful,我们以API为例,阐述一个完整的内部模块交互流程: - -![time_order_profiler.png](./images/time_order_profiler.png) - -图3:模块交互图 - -如上图所示,各个模块交互流程如下: - -1. ProfilerAPI会调用下层Controller的控制函数,控制下层收集模块进行收集,Ascend收集模块为ada,GPU为CUPTI; - -2. 用户在训练结束后会调用ProfilerAPI的分析接口; - -3. Profiler API分析接口首先使用Parser模块对性能数据进行解析,产生中间结果,再调用Analyser进行中间结果分析,最终将各类信息返回至用户侧。 - -## 子模块设计 - -### ProfilerAPI和Controller - -#### ProfilerAPI和Controller模块说明 - -ProfilerAPI为用户在训练脚本侧提供入口API,用户通过ProfilerAPI启动性能收集以及对性能数据进行分析。 -ProfilerAPI通过Controller下发命令,启动性能数据收集模块。 - -#### ProfilerAPI和Controller模块设计 - -ProfilerAPI模块,属于上层应用接口层,由训练脚本集成。功能分为两部分: - -- 训练前调用底层Controller接口,下发命令,启动profiling统计任务。 - -- 训练完成后,调用底层Controller接口,下发命令,停止性能统计任务,再调用Analyser、Parser模块接口解析数据文件,生成算子性能统计、training trace统计等结果数据。 - -Controller模块提供对上层接口,并调用底层性能收集模块接口,下发启动和停止性能收集的命令。 - -最终生成的性能原始数据主要包含: - -Ascend: - -- `hwts.log.data.45.dev.profiler_default_tag`文件:存储算子执行信息,包括task的开始/结束,stream id的信息等; -- `DATA_PREPROCESS.dev.AICPU`文件:AI CPU算子的执行各阶段的执行时间信息; -- `Framework.host.task_desc_info`文件:存储算子id与算子名称的对应关系,以及每个算子的输入输出信息; -- `training_trace.46.dev.profiler_default_tag`文件:存储每个step的开始结束时刻,迭代间隙、迭代前向反向、迭代拖尾的时刻信息。 - -GPU: - -- `step_trace_profiling_0.txt`文件:存储了前向/反向的起止算子等信息。 - -### Parser - -#### Parser模块介绍 - -Parser是原始性能数据解析模块,由于原始性能数据是在设备侧收集的信息,所以信息不能直接被用户所理解,该模块负责将信息进行解析、组合、转换,最终形成用户可理解、上层可分析的中间结果。 - -#### Parser模块设计 - -![parser_module_profiler.png](./images/parser_module_profiler.png) - -图4:Parser模块图 - -如上图所示,Parser模块主要由HWTS Parser、AI CPU Parser、Framework Parser、Step Trace Parser组成,每个模块对应解析一种原始数据,通过解析原始数据得到用户能读懂的中间文件。其中 -Ascend主要用到HWTS Parser、AI CPU Parser、Framework Parser、Step Trace Parser,GPU主要用到Step Trace Parser。 - -Ascend: - -- HWTS Parser:解析`hwts.log.data.45.dev.profiler_default_tag`文件,获得Device基于task的统计信息,如每个task的开始/结束,stream id等数据,用于算子执行时间的计算。 -- AI CPU Parser:解析`DATA_PREPROCESS.dev.AICPU`文件,获得AI CPU算子的执行各阶段的执行时间信息。 -- Framework Parser:解析`Framework.host.task_desc_info`文件,用于获取AI Core算子与task的对应关系,算子关键信息等内容。 -- Step Trace Parser:解析`training_trace.46.dev.profiler_default_tag`文件,用于分析训练各阶段的时间。 - -GPU: - -- Step Trace Parser:解析`step_trace_profiling_0.txt`文件,用于分析训练各阶段的时间。 - -### Analyser - -#### Analyser模块介绍 - -分析器的作用是对解析阶段生成的中间结果,进行筛选、排序、查询、分页等相关操作。 - -#### Analyser模块设计 - -该模块负责解析Parser生成的中间文件,为上层数据分析提供通用接口,将分析后的数据返回给上层展示给用户,由于各种中间文件有一定的共同点,可以抽象出公共内容,所以Analyser类设计如下图所示: - -![analyser_class_profiler.png](./images/analyser_class_profiler.png) - -图5:Analyser类图 - -如上图所示,针对期望查询的不同内容,实现多个Analyser,每个Analyser可以定义筛选、排序、分页条件。每个Analyser知道自己需要哪些中间文件来进行数据的合并、筛选、排序。Analyser与Parser是通过Parser生成的中间文件关联起来的,本身不存在函数调用的情况,这样对两个模块进行了解耦。Ascend和GPU分别支持不同的Analyser类。 - -针对算子信息的Analyser,目前存在两种: - -- 针对算子类型平均信息的筛选。 -- 针对每个算子详细平均信息的筛选,分别在两个Analyser中实现(Ascend为AicoreTypeAnalyser、AicoreDetailAnalyser,GPU为GpuOpTypeAnalyser、GpuOpInfoAnalyser)。 - -为了隐藏Analyser内部实现,方便调用,使用简单工厂模式,通过AnalyserFactory获取指定的Analyser。 - -### Proposer - -#### Proposer模块介绍 - -Proposer是Profiler性能优化建议模块,Proposer调用Analyser模块获取性能数据,通过调优规则对性能数据进行分析,输出调优建议由UI、API接口展示给用户。 - -#### Proposer模块设计 - -模块划分如下所示: - -![proposer_module_profiler.png](./images/proposer_module_profiler.png) - -图6:Proposer模块图 - -模块设计如上图所示: - -- Proposer提供接口用于API、RESTful调用以获取优化建议。 -- Proposer调用Analyser接口,获取性能数据并根据优化规则,获得优化建议。 -- Proposer调用Analyser工厂获得Analyser对象。 - -调用Analyser对象的query接口获取信息,包括:按时间排序TOP N的算子信息、training trace各阶段的时间信息。 - -模块类设计如下所示: - -![proposer_class_profiler.png](./images/proposer_class_profiler.png) - -图7:Proposer类图 - -如上模块类图所示: - -- 各类型Proposer继承抽象类Proposer并实现analyze方法; -- API、CLI通过调用工厂ProposerFactory获取Proposer,并调用Proposer.analyze函数获取各类型的Proposer分析的优化建议。 diff --git a/docs/note/source_zh_cn/design/overall.rst b/docs/note/source_zh_cn/design/overall.rst deleted file mode 100644 index 01396b7cdb1ea169a4b65514bc09566e88e8181d..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/overall.rst +++ /dev/null @@ -1,8 +0,0 @@ -总体设计 -======== - -.. toctree:: - :maxdepth: 1 - - technical_white_paper - mindspore/architecture diff --git a/docs/note/source_zh_cn/design/technical_white_paper.md b/docs/note/source_zh_cn/design/technical_white_paper.md deleted file mode 100644 index e92cbb4e4a3436d1b2b978acbe582094f35cb684..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/design/technical_white_paper.md +++ /dev/null @@ -1,25 +0,0 @@ -# 技术白皮书 - -`Linux` `Ascend` `GPU` `CPU` `全流程` `框架开发` `中级` `高级` `贡献者` - - - -- [技术白皮书](#技术白皮书) - - [引言](#引言) - - [简介](#简介) - - - - - -## 引言 - -深度学习研究和应用在近几十年得到了爆炸式的发展,掀起了人工智能的第三次浪潮,并且在图像识别、语音识别与合成、无人驾驶、机器视觉等方面取得了巨大的成功。这也对算法的应用以及依赖的框架有了更高级的要求。深度学习框架的不断发展使得在大型数据集上训练神经网络模型时,可以方便地使用大量的计算资源。 - -深度学习是使用多层结构从原始数据中自动学习并提取高层次特征的一类机器学习算法。通常,从原始数据中提取高层次、抽象的特征是非常困难的。目前有两种主流的深度学习框架:一种是在执行之前构造一个静态图,定义所有操作和网络结构,典型代表是TensorFlow,这种方法以牺牲易用性为代价,来提高训练期间的性能;另一种是立即执行的动态图计算,典型代表是PyTorch。通过比较可以发现,动态图更灵活、更易调试,但会牺牲性能。因此,现有深度学习框架难以同时满足易开发、高效执行的要求。 - -## 简介 - -MindSpore作为新一代深度学习框架,是源于全产业的最佳实践,最佳匹配昇腾处理器算力,支持终端、边缘、云全场景灵活部署,开创全新的AI编程范式,降低AI开发门槛。MindSpore是一种全新的深度学习计算框架,旨在实现易开发、高效执行、全场景覆盖三大目标。为了实现易开发的目标,MindSpore采用基于源码转换(Source Code Transformation,SCT)的自动微分(Automatic Differentiation,AD)机制,该机制可以用控制流表示复杂的组合。函数被转换成函数中间表达(Intermediate Representation,IR),中间表达构造出一个能够在不同设备上解析和执行的计算图。在执行前,计算图上应用了多种软硬件协同优化技术,以提升端、边、云等不同场景下的性能和效率。MindSpore支持动态图,更易于检查运行模式。由于采用了基于源码转换的自动微分机制,所以动态图和静态图之间的模式切换非常简单。为了在大型数据集上有效训练大模型,通过高级手动配置策略,MindSpore可以支持数据并行、模型并行和混合并行训练,具有很强的灵活性。此外,MindSpore还有“自动并行”能力,它通过在庞大的策略空间中进行高效搜索来找到一种快速的并行策略。MindSpore框架的具体优势,请查看详细介绍。 - -[查看技术白皮书](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com:443/white_paper/MindSpore_white_paperV1.1.pdf) diff --git a/docs/note/source_zh_cn/env_var_list.md b/docs/note/source_zh_cn/env_var_list.md deleted file mode 100644 index 348f54ee79f15e18ddae9d564f163b02dacae1ea..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/env_var_list.md +++ /dev/null @@ -1,32 +0,0 @@ -# 环境变量 - -`Linux` `Ascend` `GPU` `CPU` `初级` `中级` `高级` - - - -本文介绍MindSpore的环境变量。 - -| 环境变量 | 所属模块 | 功能 | 类型 | 取值范围 | 配置关系 | 是否必选 | -| --- | --- | --- | --- | --- | --- | --- | -|MS_ENABLE_CACHE|MindData|是否开启dataset数据处理cache功能,可以实现数据处理过程中数据的cache能力,加速数据集读取及增强处理|String|TRUE:开启数据处理cache功能
FALSE:关闭数据处理cache功能|与MS_CACHE_HOST、MS_CACHE_PORT一起使用|可选| -|MS_CACHE_HOST|MindData|开启cache时,cache服务所在的IP|String|Cache Server所在机器的IP|与MS_ENABLE_CACHE=TRUE、MS_CACHE_PORT一起使用|可选| -|MS_CACHE_PORT|MindData|开启cache时,cache服务所在的端口|String|Cache Server所在机器的端口|与MS_ENABLE_CACHE=TRUE、MS_CACHE_HOST一起使用|可选| -|PROFILING_MODE|MindData|是否开启dataset profiling数据处理性能分析,用于与MindInsight一起配合使用,可以在网页中展示各个阶段的耗时|String|true: 开启profiling功能
false: 关闭profiling功能|与MINDDATA_PROFILING_DIR配合使用|可选| -|MINDDATA_PROFILING_DIR|MindData|系统路径,保存dataset profiling结果路径|String|系统路径,支持相对路径|与PROFILING_MODE=true配合使用|可选| -|DATASET_ENABLE_NUMA|MindData|是否开启numa绑核功能,在大多数分布式场景下numa绑核都能提升数据处理效率和端到端性能|String|True: 开启numa绑核功能|与libnuma.so配合使用|可选| -|OPTIMIZE|MindData|是否执行dataset数据处理 pipeline 树优化,在适合数据处理算子融合的场景下,可以提升数据处理效率|String|true: 开启pipeline树优化
false: 关闭pipeline树优化|无|可选| -|ENABLE_MS_DEBUGGER|Debugger|是否在训练中启动Debugger|Boolean|1:开启Debugger
0:关闭Debugger|无|可选| -|MS_DEBUGGER_PORT|Debugger|连接MindInsight Debugger Server的端口|Integer|1~65536,连接MindInsight Debugger Server的端口|无|可选 -|MS_DEBUGGER_PARTIAL_MEM|Debugger|是否开启部分内存复用(只有在Debugger选中的节点才会关闭这些节点的内存复用)|Boolean|1:开启Debugger选中节点的内存复用
0:关闭Debugger选中节点的内存复用|无|可选| -|MS_BUILD_PROCESS_NUM|MindSpore|Ascend后端编译时,指定并行编译进程数|Integer|1~24:允许设置并行进程数取值范围|无|可选| -|RANK_TABLE_FILE|MindSpore|路径指向文件,包含指定多Ascend AI处理器环境中Ascend AI处理器的"device_id"对应的"device_ip"。|String|文件路径,支持相对路径与绝对路径|与RANK_SIZE配合使用|必选(使用Ascend AI处理器时)| -|RANK_SIZE|MindSpore|指定深度学习时调用Ascend AI处理器的数量|Integer|1~8,调用Ascend AI处理器的数量|与RANK_TABLE_FILE配合使用|必选(使用Ascend AI处理器时)| -|RANK_ID|MindSpore|指定深度学习时调用Ascend AI处理器的逻辑ID|Integer|0~7,多机并行时不同server中DEVICE_ID会有重复,使用RANK_ID可以避免这个问题(多机并行时 RANK_ID = SERVER_ID * DEVICE_NUM + DEVICE_ID|无|可选| -|MS_SUBMODULE_LOG_v|MindSpore|[MS_SUBMODULE_LOG_v功能与用法](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#id9)|Dict{String:Integer...}|LogLevel: 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR
SubModual: COMMON, MD, DEBUG, DEVICE, COMMON, IR...|无|可选 -|OPTION_PROTO_LIB_PATH|MindSpore|RPOTO依赖库库路径|String|目录路径,支持相对路径与绝对路径|无|可选| -|MS_RDR_ENABLE|MindSpore|是否开启程序运行数据记录器(RDR),如果MindSpore出现了运行异常,会自动导出MindSpore中预先记录的数据以辅助定位运行异常的原因|Integer|1:开启RDR功能
0:关闭RDR功能|与MS_RDR_PATH一起使用|可选| -|MS_RDR_PATH|MindSpore|配置程序运行数据记录器(RDR)的文件导出路径|String|文件路径,仅支持绝对路径|与MS_RDR_ENABLE=1一起使用|可选| -|MS_OM_PATH|MindSpore|配置图编译出错时dump的analyze_fail.dat文件的保存目录|String|文件路径,支持相对路径与绝对路径|无|可选| -|GE_USE_STATIC_MEMORY|GraphEngine|当网络模型层数过大时,特征图中间计算数据可能超过25G,例如BERT24网络。多卡场景下为保证通信内存高效协同,需要配置为1,表示使用内存静态分配方式,其他网络暂时无需配置,默认使用内存动态分配方式。
静态内存默认配置为31G,如需要调整可以通过网络运行参数graph_memory_max_size和variable_memory_max_size的总和指定;动态内存是动态申请,最大不会超过graph_memory_max_size和variable_memory_max_size的总和。|Integer|1:使用内存静态分配方式
0:使用内存动态分配方式|无|可选| -|DUMP_GE_GRAPH|GraphEngine|把整个流程中各个阶段的图描述信息打印到文件中,此环境变量控制dump图的内容多少|Integer|1:全量dump
2:不含有权重等数据的基本版dump
3:只显示节点关系的精简版dump|无|可选| -|DUMP_GRAPH_LEVEL|GraphEngine|把整个流程中各个阶段的图描述信息打印到文件中,此环境变量可以控制dump图的个数|Integer|1:dump所有图
2:dump除子图外的所有图
3:dump最后的生成图|无|可选| diff --git a/docs/note/source_zh_cn/glossary.md b/docs/note/source_zh_cn/glossary.md deleted file mode 100644 index a59319010af5425bdce616441d40211d5c0d3d9d..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/glossary.md +++ /dev/null @@ -1,49 +0,0 @@ -# 术语 - -`Linux` `Windows` `Ascend` `GPU` `CPU` `全流程` `初级` `中级` `高级` - - - -| 术语/缩略语 | 说明 | -| ----- | ----- | -| ACL | Ascend Computer Language,提供Device管理、Context管理、Stream管理、内存管理、模型加载与执行、算子加载与执行、媒体数据处理等C++ API库,供用户开发深度神经网络应用。| -| AIR | Ascend Intermediate Representation,类似ONNX,是华为定义的针对机器学习所设计的开放式的文件格式,能更好地适配Ascend AI处理器。 | -| Ascend | 华为昇腾系列芯片的系列名称。 | -| CCE | Cube-based Computing Engine,面向硬件架构编程的算子开发工具。 | -| CCE-C | Cube-based Computing Engine C,使用CCE开发的C代码。 | -| CheckPoint | MindSpore模型训练检查点,保存模型的参数,可以用于保存模型供推理,或者再训练。 | -| CIFAR-10 | 一个开源的图像数据集,包含10个类别的60000个32x32彩色图像,每个类别6000个图像。有50000张训练图像和10000张测试图像。 | -| CIFAR-100 | 一个开源的图像数据集,它有100个类别,每个类别包含500张训练图像和100张测试图像。 | -| Davinci | 达芬奇架构,华为自研的新型芯片架构。 | -| EulerOS | 欧拉操作系统,华为自研的基于Linux标准内核的操作系统。 | -| FC Layer | Fully Conneted Layer,全连接层。整个卷积神经网络中起到分类器的作用。 | -| FE | Fusion Engine,负责对接GE和TBE算子,具备算子信息库的加载与管理、融合规则管理等能力。 | -| Fine-tuning | 基于面向某任务训练的网络模型,训练面向第二个类似任务的网络模型。 | -| FP16 | 16位浮点,半精度浮点算术,消耗更小内存。 | -| FP32 | 32位浮点,单精度浮点算术。 | -| GE | Graph Engine,MindSpore计算图执行引擎,主要负责根据前端的计算图完成硬件相关的优化(算子融合、内存复用等等)、device侧任务启动。 | -| GHLO | Graph High Level Optimization,计算图高级别优化。GHLO包含硬件无关的优化(如死代码消除等)、自动并行和自动微分等功能。 | -| GLLO | Graph Low Level Optimization,计算图低级别优化。GLLO包含硬件相关的优化,以及算子融合、Buffer融合等软硬件结合相关的深度优化。 | -| Graph Mode | MindSpore的静态图模式,将神经网络模型编译成一整张图,然后下发执行,性能高。 | -| HCCL | Huawei Collective Communication Library,实现了基于Davinci架构芯片的多机多卡通信。 | -| ImageNet | 根据WordNet层次结构(目前仅名词)组织的图像数据库。 | -| LeNet | 一个经典的卷积神经网络架构,由Yann LeCun等人提出。 | -| Loss | 损失,预测值与实际值的偏差,深度学习用于判断模型好坏的一个标准。 | -| LSTM | Long short-term memory,长短期记忆,对应的网络是一种时间循环神经网络,适合于处理和预测时间序列中间隔和延迟非常长的重要事件。 | -| Manifest | 一种数据格式文件,华为ModelArts采用了该格式,详细说明请参见。 | -| ME | Mind Expression,MindSpore前端,主要完成从用户源码到计算图的编译任务、训练中控制执行及上下文维护(非下沉模式配置下)、动态图(PyNative模式)等。 | -| MindArmour | MindSpore安全模块,通过差分隐私、对抗性攻防等技术手段,提升模型的保密性、完整性和可用性,阻止攻击者对模型进行恶意修改或是破解模型的内部构件,窃取模型的参数。 | -| MindData | MindSpore数据框架,提供数据加载、增强、数据集管理以及可视化。 | -| MindInsight | MindSpore可视化组件,可视化标量、图像、计算图以及模型超参等信息。 | -| MindIR | MindSpore IR,一种基于图表示的函数式IR,定义了可扩展的图结构以及算子IR表示,存储了MindSpore基础数据结构。 | -| MindRecord | MindSpore定义的一种数据格式,是一个执行读取、写入、搜索和转换MindSpore格式数据集的模块。 | -| MindSpore | 华为主导开源的深度学习框架。 | -| MindSpore Lite | 一个轻量级的深度神经网络推理引擎,提供了将MindSpore训练出的模型在端侧进行推理的功能。 | -| MNIST database | Modified National Institute of Standards and Technology database,一个大型手写数字数据库,通常用于训练各种图像处理系统。 | -| ONNX | Open Neural Network Exchange,是一种针对机器学习所设计的开放式的文件格式,用于存储训练好的模型。| -| PyNative Mode | MindSpore的动态图模式,将神经网络中的各个算子逐一下发执行,方便用户编写和调试神经网络模型。 | -| ResNet-50 | Residual Neural Network 50,由微软研究院的Kaiming He等四名华人提出的残差神经网络。 | -| Schema | 数据集结构定义文件,用于定义数据集包含哪些字段以及字段的类型。 | -| Summary | 是对网络中Tensor取值进行监测的一种算子,在图中是“外围”操作,不影响数据流本身。 | -| TBE | Tensor Boost Engine,华为自研的NPU算子开发工具,在TVM( Tensor Virtual Machine )框架基础上扩展,提供了一套Python API来实施开发活动,进行自定义算子开发。 | -| TFRecord | Tensorflow定义的数据格式。 | diff --git a/docs/note/source_zh_cn/help_seeking_path.md b/docs/note/source_zh_cn/help_seeking_path.md deleted file mode 100644 index 3a671e8d7f56489e807f5d81c5e5a27831e7b529..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/help_seeking_path.md +++ /dev/null @@ -1,32 +0,0 @@ -# 如何求助(求助路径) - -`Linux` `Windows` `Ascend` `GPU` `CPU` `全流程` `初级` `中级` `高级` - - - -本文将简述用户在使用MindSpore遇到问题时,如何使用官方提供的问题求助路径解决问题。MindSpore问题求助整体流程如图中所示,从用户使用MindSpore发现问题开始,直至选择到合适的问题解决方法。下面我们基于问题求助流程图对各种求助方法做解释说明。 - -![solution](./images/help_seeking_path.png) - -- 网站搜索 - - - 进入[官网搜索](https://www.mindspore.cn/search)。 - - 遇到问题时,首先推荐使用网站搜索方法,该方法操作简单、高效。 - - 在搜索框输入问题的关键词,点击搜索,可匹配出与关键词相关的内容。 - - 参考搜索结果,解决当前遇到的问题。 - -- 用户群咨询 - - - QQ用户群号:871543426。 - - 如果网站搜索方法不能解决当前问题,可通过QQ用户群咨询,建议想要简单咨询的用户选取此方法。 - - 加群后可以与其他用户讨论交流,还有技术专家在群中提供帮助解答。 - - 通过专家的解答或和其他用户的交流来解决当前遇到的问题。 - -- 论坛求助 - - - 如果用户想要详细的解决方法,可通过[MindSpore论坛](https://bbs.huaweicloud.com/forum/forum-1076-1.html)中发布问题求助帖获取解答。 - - 为提高问题解决速度与质量,发帖前请参考[发帖建议](https://bbs.huaweicloud.com/forum/thread-69695-1-1.html),按照建议格式发帖。 - - 帖子发出后会有论坛版主负责将问题收录,并联系技术专家进行解答,问题将在三个工作日内解决。 - - 参考技术专家的解决方案,解决当前遇到的问题。 - - 如果在专家测试后确定是MindSpore功能有待完善,推荐用户在[MindSpore仓](https://gitee.com/mindspore)中创建ISSUE,所提问题会在后续的版本中得到修复完善。 diff --git a/docs/note/source_zh_cn/images/after_transfer.png b/docs/note/source_zh_cn/images/after_transfer.png deleted file mode 100644 index cb066922a36214a940741f4c2bca96ec35ec7d19..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/images/after_transfer.png and /dev/null differ diff --git a/docs/note/source_zh_cn/images/before_transfer.png b/docs/note/source_zh_cn/images/before_transfer.png deleted file mode 100644 index ba2fe024d6382a1bad7b0f6cc4f2623e4815c2cf..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/images/before_transfer.png and /dev/null differ diff --git a/docs/note/source_zh_cn/images/help_seek_path_zh.pptx b/docs/note/source_zh_cn/images/help_seek_path_zh.pptx deleted file mode 100644 index 71b064d392c43a838d47cbaf92f5c042484081bc..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/images/help_seek_path_zh.pptx and /dev/null differ diff --git a/docs/note/source_zh_cn/images/help_seeking_path.png b/docs/note/source_zh_cn/images/help_seeking_path.png deleted file mode 100644 index 67883d9c5280a90e42447b23990ed25dc5888b3c..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/images/help_seeking_path.png and /dev/null differ diff --git a/docs/note/source_zh_cn/images/image_classification_result.png b/docs/note/source_zh_cn/images/image_classification_result.png deleted file mode 100644 index a7cc49f582440e31b6b5b14dbba5131bfed2a4b4..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/images/image_classification_result.png and /dev/null differ diff --git a/docs/note/source_zh_cn/images/object_detection.png b/docs/note/source_zh_cn/images/object_detection.png deleted file mode 100644 index ad5425c86393a9367701166796df42c9e4702988..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/images/object_detection.png and /dev/null differ diff --git a/docs/note/source_zh_cn/images/posenet_detection.png b/docs/note/source_zh_cn/images/posenet_detection.png deleted file mode 100644 index db253e597caa3c8c825b466ef2bc0ce7893d1411..0000000000000000000000000000000000000000 Binary files a/docs/note/source_zh_cn/images/posenet_detection.png and /dev/null differ diff --git a/docs/note/source_zh_cn/index.rst b/docs/note/source_zh_cn/index.rst deleted file mode 100644 index 290b13eccba571f1b0bacefdacbe9cbcdb9e3a74..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/index.rst +++ /dev/null @@ -1,39 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 10:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore设计和规格 -===================== - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 设计说明 - - design/overall - design/mindspore - design/mindinsight - design/mindarmour - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 规格说明 - - benchmark - network_list - operator_list - syntax_list - env_var_list - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 其他说明 - - glossary - roadmap - help_seeking_path - community - \ No newline at end of file diff --git a/docs/note/source_zh_cn/index_support.md b/docs/note/source_zh_cn/index_support.md deleted file mode 100644 index 233b6a2b39ea3ade00fe2d10108daca5f74fc32f..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/index_support.md +++ /dev/null @@ -1,471 +0,0 @@ -# Tensor索引支持 - -`Linux` `Ascend` `GPU` `模型开发` `初级` `中级` `高级` - - - -- [Tensor索引支持](#tensor索引支持) - - [索引取值](#索引取值) - - [索引赋值](#索引赋值) - - [索引增强赋值](#索引增强赋值) - - - - - -Tensor 支持单层与多层索引取值,赋值以及增强赋值,支持动态图(PyNative)以及静态图(Graph)模式。 - -## 索引取值 - -索引值支持`int`、`bool`、`None`、`ellipsis`、`slice`、`Tensor`、`List`、`Tuple`。 - -- `int`索引取值 - - 支持单层和多层`int`索引取值,单层`int`索引取值:`tensor_x[int_index]`,多层`int`索引取值:`tensor_x[int_index0][int_index1]...`。 - - `int`索引取值操作的是第0维,索引值小于第0维长度,在取出第0维对应位置数据后,会消除第0维。 - - 例如,如果对一个`shape`为`(3, 4, 5)`的tensor进行单层`int`索引取值,取得结果的`shape`是`(4, 5)`。 - - 多层索引取值可以理解为,后一层索引取值在前一层索引取值结果上再进行`int`索引取值。 - - 示例如下: - - ```python - tensor_x = Tensor(np.arange(2 * 3 * 2).reshape((2, 3, 2))) - data_single = tensor_x[0] - data_multi = tensor_x[0][1] - ``` - - 结果如下: - - ```text - data_single: Tensor(shape=[3, 2], dtype=Int64, value=[[0, 1], [2, 3], [4, 5]]) - data_multi: Tensor(shape=[2], dtype=Int64, value=[2, 3]) - ``` - -- `bool`索引取值 - - 支持单层和多层`bool`索引取值,单层`bool`索引取值:`tensor_x[True]`,多层`bool`索引取值:`tensor_x[True][True]...`。 - - `bool`索引取值操作的是第0维,在取出所有数据后,会在`axis=0`轴上扩展一维,对应`True`/`False`,该维长度分别为1/0。`False`将会在`shape`中引入`0`,因此暂只支持`True`。 - - 例如,对一个`shape`为`(3, 4, 5)`的tensor进行单层`True`索引取值,取得结果的`shape`是`(1, 3, 4, 5)`。 - - 多层索引取值可以理解为,后一层索引取值在前一层索引取值结果上再进行`bool`索引取值。 - - 示例如下: - - ```python - tensor_x = Tensor(np.arange(2 * 3 ).reshape((2, 3))) - data_single = tensor_x[True] - data_multi = tensor_x[True][True] - ``` - - 结果如下: - - ```text - data_single: Tensor(shape=[1, 2, 3], dtype=Int64, value=[[[0, 1, 2], [3, 4, 5]]]) - data_multi: Tensor(shape=[1, 1, 2, 3], dtype=Int64, value=[[[[0, 1, 2], [3, 4, 5]]]]) - ``` - -- `None`索引取值 - - `None`索引取值和`True`索引取值一致,可参考`True`索引取值,这里不再赘述。 - -- `ellipsis`索引取值 - - 支持单层和多层`ellipsis`索引取值,单层`ellipsis`索引取值:`tensor_x[...]`,多层`ellipsis`索引取值:`tensor_x[...][...]...`。 - - `ellipsis`索引取值操作在所有维度上取出所有数据。一般多作为`Tuple`索引的组成元素,`Tuple`索引将于下面介绍。 - - 例如,对一个`shape`为`(3, 4, 5)`的tensor进行`ellipsis`索引取值,取得结果的`shape`依然是`(3, 4, 5)`。 - - 示例如下: - - ```python - tensor_x = Tensor(np.arange(2 * 3 ).reshape((2, 3))) - data_single = tensor_x[...] - data_multi = tensor_x[...][...] - ``` - - 结果如下: - - ```text - data_single: Tensor(shape=[2, 3], dtype=Int64, value=[[0, 1, 2], [3, 4, 5]]) - data_multi: Tensor(shape=[2, 3], dtype=Int64, value=[[0, 1, 2], [3, 4, 5]]) - ``` - -- `slice`索引取值 - - 支持单层和多层`slice`索引取值,单层`slice`索引取值:`tensor_x[slice_index]`,多层`slice`索引取值:`tensor_x[slice_index0][slice_index1]...`。 - - `slice`索引取值操作的是第0维,取出第0维所切到位置的元素,`slice`不会降维,即使切到长度为1,区别于`int`索引取值。 - - 例如,`tensor_x[0:1:1] != tensor_x[0]`,因为`shape_former = (1,) + shape_latter`。 - - 多层索引取值可以理解为,后一层索引取值在前一层索引取值结果上再进行`slice`索引取值。 - - `slice`有`start`、`stop`和`step`组成。`start`默认值为0,`stop`默认值为该维长度,`step`默认值为1。 - - 例如,`tensor_x[:] == tensor_x[0:length:1]`。 - - 示例如下: - - ```python - tensor_x = Tensor(np.arange(4 * 2 * 2).reshape((4, 2, 2))) - data_single = tensor_x[1:4:2] - data_multi = tensor_x[1:4:2][1:] - ``` - - 结果如下: - - ```text - data_single: Tensor(shape=[2, 2, 2], dtype=Int64, value=[[[4, 5], [6, 7]], [[12, 13], [14, 15]]]) - data_multi: Tensor(shape=[1, 2, 2], dtype=Int64, value=[[[12, 13], [14, 15]]]) - ``` - -- `Tensor`索引取值 - - 支持单层和多层`Tensor`索引取值,单层`Tensor`索引取值:`tensor_x[tensor_index]`,多层`Tensor`索引取值:`tensor_x[tensor_index0][tensor_index1]...`。 - - `Tensor`索引取值操作的是第0维,取出第0维对应位置的元素。 - - 索引`Tensor`数据类型必须是int型,可以是(int8, int16, int32, int64),值必须为非负数,且小于第0维长度。 - - `Tensor`索引取值得到结果的`data_shape = tensor_index.shape + tensor_x.shape[1:]`。 - - 例如,对一个`shape`为`(6, 4, 5)`的tensor通过`shape`为`(2, 3)`的tensor进行索引取值,取得结果的`shape`为`(2, 3, 4, 5)`。 - - 多层索引取值可以理解为,后一层索引取值在前一层索引取值结果上再进行`Tensor`索引取值。 - - 示例如下: - - ```python - tensor_x = Tensor(np.arange(4 * 2 * 3).reshape((4, 2, 3))) - tensor_index0 = Tensor(np.array([[1, 2], [0, 3]]), mstype.int32) - tensor_index1 = Tensor(np.array([[0, 0]]), mstype.int32) - data_single = tensor_x[tensor_index0] - data_multi = tensor_x[tensor_index0][tensor_index1] - ``` - - 结果如下: - - ```text - data_single: Tensor(shape=[2, 2, 2, 3], dtype=Int64, value=[[[[4, 5], [6, 7]], [[8, 9], [10, 11]]], [[[0, 1], [2, 3]], [[12, 13], [14, 15]]]]) - data_multi: Tensor(shape=[1, 2, 2, 2, 3], dtype=Int64, value=[[[[[4, 5], [6, 7]], [[8, 9], [10, 11]]], [[[4, 5], [6, 7]], [[8, 9], [10, 11]]]]]) - ``` - -- `List`索引取值 - - 支持单层和多层`List`索引取值,单层`List`索引取值:`tensor_x[list_index]`,多层`List`索引取值:`tensor_x[list_index0][list_index1]...`。 - - `List`索引取值操作的是第0维,取出第0维对应位置的元素。 - - 索引`List`数据类型必须是int、bool或两者混合。若数据类型为int,则取值在[`-dimension_shape`, `dimension_shape-1`]之间;若数据类型为bool, 则限制bool个数为对应维度长度,筛选对应维度上值为`True`的元素;若值为前两者混合,则bool类型的`True/False`将转为int类型的`1/0`。 - - `List`索引取值得到结果的`data_shape = list_index.shape + tensor_x.shape[1:]`。 - - 例如,对一个`shape`为`(6, 4, 5)`的tensor通过`shape`为`(3,)`的tensor进行索引取值,取得结果的`shape`为`(3, 4, 5)`。 - - 多层索引取值可以理解为,后一层索引取值在前一层索引取值结果上再进行`List`索引取值。 - - 示例如下: - - ```python - tensor_x = Tensor(np.arange(4 * 2 * 3).reshape((4, 2, 3))) - list_index0 = [1, 2, 0] - list_index1 = [True, False, True] - data_single = tensor_x[list_index0] - data_multi = tensor_x[list_index0][list_index1] - ``` - - 结果如下: - - ```text - data_single: Tensor(shape=[3, 2, 3], dtype=Int64, value=[[[6, 7, 8], [9, 10, 11]], [[12, 13, 14], [15, 16, 17]], [[0, 1, 2], [3, 4, 5]]]) - data_multi: Tensor(shape=[2, 2, 3], dtype=Int64, value=[[[6, 7, 8], [9, 10, 11]], [[0, 1, 2], [3, 4, 5]]]) - ``` - -- `Tuple`索引取值 - - 索引`Tuple`的数据类型可以为`int`、`bool`、`None`、`slice`、`ellipsis`、`Tensor`、`List`、`Tuple`。支持单层和多层`Tuple`索引取值,单层`Tuple`索引取值:`tensor_x[tuple_index]`,多层`Tuple`索引取值:`tensor_x[tuple_index0][tuple_index1]...`。`Tuple`中包含的`List`与`Tuple`包含元素规则与单独的`List`规则相同,其他元素规则与单独元素也相同。 - - 索引`Tuple`中元素按照最终索引Broadcast规则,分为`Basic Index`、`Advanced Index`两类。`Basic Index`包含`slice`、`ellipsis`与`None`三种类型,`Advanced Index`包含`int`、`bool`、`Tensor`、`List`、`Tuple`等五种类型。索引过程中,所有的`Advanced Index`将会做Broadcast,若`Advaned Index`连续,最终broadcast shape将插入在第一个`Advanced Index`位置;若不连续,则broadcast shape插入在`0`位置。 - - 索引里除`None`扩展对应维度,`bool`扩展对应维度后与`Advanced Index`做Broadcast。除`ellipsis`、`bool`、`None`外每个元素操作对应位置维度,即`Tuple`中第0个元素操作第0维,第1个元素操作第1维,以此类推。每个元素的索引规则与该元素类型索引取值规则一致。 - - `Tuple`索引里最多只有一个`ellipsis`,`ellipsis`前半部分索引元素从前往后对应`Tensor`第0维往后,后半部分索引元素从后往前对应`Tensor`最后一维往前,其他未指定的维度,代表全取。 - - 元素里包含的`Tensor`数据类型必须是int型,可以是(int8, int16, int32, int64),值必须为非负数,且小于第0维长度。 - - 例如,`tensor_x[0:3, 1, tensor_index] == tensor_x[(0:3, 1, tensor_index)]`,因为`0:3, 1, tensor_index`就是一个`Tuple`。 - - 多层索引取值可以理解为,后一层索引取值在前一层索引取值结果上再进行`Tuple`索引取值。 - - 示例如下: - - ```python - tensor_x = Tensor(np.arange(2 * 3 * 4).reshape((2, 3, 4))) - tensor_index = Tensor(np.array([[1, 2, 1], [0, 3, 2]]), mstype.int32) - data = tensor_x[1, 0:1, tensor_index] - ``` - - 结果如下: - - ```text - data: Tensor(shape=[2, 3, 1], dtype=Int64, value=[[[13], [14], [13]], [[12], [15], [14]]]) - ``` - -## 索引赋值 - -对于形如: `tensor_x[index] = value`, `index`的类型支持`int`、`bool`、`ellipsis`、`slice`、`None`、`Tensor`、`List`、`Tuple`。 - -`value`的类型支持`Number`、`Tuple`、`List`和`Tensor`。被赋的值会首先被转换为张量,数据类型与原张量(`tensor_x`)相符。 - -当`value`为`Number`时,可以理解为将`tensor_x[index]`索引对应元素都更新为`Number`。 - -当`value`为数组,即只包含`Number`的`Tuple`、`List`或`Tensor`时,`value.shape`需要可以与`tensor_x[index].shape`做广播,将`value`广播到`tensor_x[index].shape`后,更新`tensor_x[index]`对应的值。 - -当`value`为`Tuple`或`List`时,若`value`中元素包含`Number`,`Tuple`,`List` 和 `Tensor`等多种类型,该`Tuple` 和 `List` 目前只支持1维。 - -当`value`为`Tuple`或`List`,且存在`Tensor`时,非`Tensor`的元素会首先被转换为`Tensor`,然后这些`Tensor`在`axis=0`轴上打包之后成为一个新的赋值`Tensor`,这时按照`value`为`Tensor`的规则进行赋值。所有`Tensor`的数据类型必须保持一致。 - -索引赋值可以理解为对索引到的位置元素按照一定规则进行赋值,所有索引赋值都不会改变原`Tensor`的`shape`。 - -> 当索引中有多个元素指向原张量的同一个位置时,该值的更新受底层算子限制,可能出现随机的情况。因此暂不支持索引中重复对张量中一个位置的值反复更新。详情请见:[TensorScatterUpdate 算子介绍](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.TensorScatterUpdate.html) -> -> 当前只支持单层索引(`tensor_x[index] = value`), 多层索引(`tensor_x[index1][index2]... = value`)暂不支持。 - -- `int`索引赋值 - - 支持单层`int`索引赋值:`tensor_x[int_index] = u`。 - - 示例如下: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_y = np.arange(2 *3).reshape((2, 3)).astype(np.float32) - tensor_z = np.arange(2* 3).reshape((2, 3)).astype(np.float32) - tensor_x[1] = 88.0 - tensor_y[1]= np.array([66, 88, 99]).astype(np.float32) - tensor_z[1] = (66, np.array(88).astype(np.int64), 99) - ``` - - 结果如下: - - ```text - tensor_x: Tensor(shape=[2, 3], dtype=Float32, value=[[0.0, 1.0, 2.0], [88.0, 88.0, 88.0]]) - tensor_y: Tensor(shape=[2, 3], dtype=Float32, value=[[0.0, 1.0, 2.0], [66.0, 88.0, 99.0]]) - tensor_z: Tensor(shape=[2, 3], dtype=Float32, value=[[0.0, 1.0, 2.0], [66.0, 88.0, 99.0]]) - ``` - -- `bool`索引赋值 - - 支持单层`bool`索引赋值:`tensor_x[bool_index] = u`。 - - 示例如下: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_y = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_z = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_x[True] = 88.0 - tensor_y[True]= np.array([66, 88, 99]).astype(np.float32) - tensor_z[True] = (66, 88, 99) - ``` - - 结果如下: - - ```text - tensor_x: Tensor(shape=[2, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [88.0, 88.0, 88.0]]) - tensor_y: Tensor(shape=[2, 3], dtype=Float32, value=[[66.0, 88.0, 99.0], [66.0, 88.0, 99.0]]) - tensor_z: Tensor(shape=[2, 3], dtype=Float32, value=[[66.0, 88.0, 99.0], [66.0, 88.0, 99.0]]) - ``` - -- `ellipsis`索引赋值 - - 支持单层`ellipsis`索引赋值,单层`ellipsis`索引赋值:`tensor_x[...] = u`。 - - 示例如下: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_y = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_z = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_x[...] = 88.0 - tensor_y[...] = np.array([[22, 44, 55], [22, 44, 55]]) - tensor_z[...] = ([11, 22, 33], [44, 55, 66]) - ``` - - 结果如下: - - ```text - tensor_x: Tensor(shape=[2, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [88.0, 88.0, 88.0]]) - tensor_y: Tensor(shape=[2, 3], dtype=Float32, value=[[22.0, 44.0, 55.0], [22.0, 44.0, 55.0]]) - tensor_z: Tensor(shape=[2, 3], dtype=Float32, value=[[11., 22., 33.], [44., 55., 66.]]) - ``` - -- `slice`索引赋值 - - 支持单层`slice`索引赋值:`tensor_x[slice_index] = u`。 - - 示例如下: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_y = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_z = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_k = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_x[0:1] = 88.0 - tensor_y[0:2] = 88.0 - tensor_z[0:2] = np.array([[11, 12, 13], [11, 12, 13]]).astype(np.float32) - tensor_k[0:2] = ([11, 12, 13], (14, 15, 16)) - ``` - - 结果如下: - - ```text - tensor_x: Tensor(shape=[3, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]]) - tensor_y: Tensor(shape=[3, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [88.0, 88.0, 88.0], [6.0, 7.0, 8.0]]) - tensor_z: Tensor(shape=[3, 3], dtype=Float32, value=[[11.0, 12.0, 13.0], [11.0, 12.0, 13.0], [6.0, 7.0, 8.0]]) - tensor_k: Tensor(shape=[3, 3], dtype=Float32, value=[[11.0, 12.0, 13.0], [14.0, 15.0, 16.0], [6.0, 7.0, 8.0]]) - ``` - -- `None`索引赋值 - - 支持单层`None`索引赋值:`tensor_x[none_index] = u`。 - - 示例如下: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_y = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_z = np.arange(2 * 3).reshape((2, 3)).astype(np.float32) - tensor_x[None] = 88.0 - tensor_y[None]= np.array([66, 88, 99]).astype(np.float32) - tensor_z[None] = (66, 88, 99) - ``` - - 结果如下: - - ```text - tensor_x: Tensor(shape=[2, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [88.0, 88.0, 88.0]]) - tensor_y: Tensor(shape=[2, 3], dtype=Float32, value=[[66.0, 88.0, 99.0], [66.0, 88.0, 99.0]]) - tensor_z: Tensor(shape=[2, 3], dtype=Float32, value=[[66.0, 88.0, 99.0], [66.0, 88.0, 99.0]]) - ``` - -- `Tensor`索引赋值 - - 支持单层`Tensor`索引赋值,即`tensor_x[tensor_index] = u`。 - - 当前不支持索引Tensor为`bool`类型,只能为`mstype.int*`型。 - - 示例如下: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_y = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_z = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_index = np.array([[2, 0, 2], [0, 2, 0], [0, 2, 0]], np.int32) - tensor_x[tensor_index] = 88.0 - tensor_y[tensor_index] = np.array([11.0, 12.0, 13.0]).astype(np.float32) - tensor_z[tensor_index] = [11, 12, 13] - ``` - - 结果如下: - - ```text - tensor_x: Tensor(shape=[3, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [3.0, 4.0, 5.0], [88.0, 88.0, 88.0]]) - tensor_y: Tensor(shape=[3, 3], dtype=Float32, value=[[11.0, 12.0, 13.0], [3.0, 4.0, 5.0], [11.0, 12.0, 13.0]]) - tensor_z: Tensor(shape=[3, 3], dtype=Float32, value=[[11.0, 12.0, 13.0], [3.0, 4.0, 5.0], [11.0, 12.0, 13.0]]) - ``` - -- `List`索引赋值 - - 支持单层`List`索引赋值:`tensor_x[list_index] = u`。 - - `List`索引赋值和`List`索引取值对索引的支持一致。 - - 示例如下: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_y = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_index = np.array([[0, 1], [1, 0]]).astype(np.int32) - tensor_x[[0,1]] = 88.0 - tensor_y[[True, False, False]] = np.array([11, 12, 13]).astype(np.float32) - ``` - - 结果如下: - - ```text - tensor_x: Tensor(shape=[3, 3], dtype=Float32, value=[[88.0, 88.0, 88.0], [88.0, 88.0, 88.0], [6.0, 7.0, 8.0]]) - tensor_y: Tensor(shape=[3, 3], dtype=Float32, value=[[11.0, 12.0, 13.0], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]]) - ``` - -- `Tuple`索引赋值 - - 支持单层`Tuple`索引赋值:`tensor_x[tuple_index] = u`。 - - `Tuple`索引赋值和`Tuple`索引取值对索引的支持一致,但不支持`Tuple`中包含`None`。 - - 示例如下: - - ```python - import mindspore.numpy as np - tensor_x = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_y = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_z = np.arange(3 * 3).reshape((3, 3)).astype(np.float32) - tensor_index = np.array([0, 1]).astype(np.int32) - tensor_x[1, 1:3] = 88.0 - tensor_y[1:3, tensor_index] = 88.0 - tensor_z[1:3, tensor_index] = np.array([11, 12]).astype(np.float32) - ``` - - 结果如下: - - ```text - tensor_x: Tensor(shape=[3, 3], dtype=Float32, value=[[0.0, 1.0, 2.0], [3.0, 88.0, 88.0], [6.0, 7.0, 8.0]]) - tensor_y: Tensor(shape=[3, 3], dtype=Float32, value=[[0.0, 1.0, 2.0], [88.0, 88.0, 5.0], [88.0, 88.0, 8.0]]) - tensor_z: Tensor(shape=[3, 3], dtype=Float32, value=[[0.0, 1.0, 2.0], [11.0, 12.0, 5.0], [11.0, 12.0, 8.0]]) - ``` - -## 索引增强赋值 - -增强索引赋值,支持`+=`、`-=`、`*=`、`/=`、`%=`、`**=`、`//=`七种类型,`index`与`value`的规则约束与索引赋值相同。索引值支持`int`、`bool`、`ellipsis`、`slice`、`None`、`Tensor`、`List`、`Tuple`八种类型,赋值支持`Number`、`Tensor`、`Tuple`、`List`四种类型。 - -索引增强赋值可以理解为对索引到的位置元素按照一定规则进行取值,取值所得再与`value`进行操作符运算,最终将运算结果进行赋值,所有索引增强赋值都不会改变原`Tensor`的`shape`。 - -> 当索引中有多个元素指向原张量的同一个位置时,该值的更新受底层算子限制,可能出现随机的情况。因此暂不支持索引中重复对张量中一个位置的值反复更新。详情请见:[TensorScatterUpdate 算子介绍](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.TensorScatterUpdate.html) -> -> 目前索引中包含 `True`、`False` 和 `None`的情况暂不支持. - -- 规则与约束 - - 与索引赋值相比,增加了取值与运算的过程。取值过程中`index`的约束规则与索引取值中`index`相同,支持`int`、`bool`、`Tensor`、`Slice`、`Ellipsis`、`None`、`List`与`Tuple`。上述几种类型的数据中所包含`int`值,需在`[-dim_size, dim_size-1]`闭合区间内。 - 运算过程中`value`的约束规则与索引赋值中`value`的约束规则相同,`value`类型需为(`Number`、`Tensor`、`List`、`Tuple`)之一,且`value`类型不是`Number`时, `value`的形状需要可以广播到`tensor_x[index]`的形状。 - - 示例如下: - - ```python - tensor_x = Tensor(np.arange(3 * 4).reshape(3, 4).astype(np.float32)) - tensor_y = Tensor(np.arange(3 * 4).reshape(3, 4).astype(np.float32)) - tensor_x[[0, 1], 1:3] += 2 - tensor_y[[1], ...] -= [4, 3, 2, 1] - ``` - - 结果如下: - - ```text - tensor_x: Tensor(shape=[3, 4], dtype=Float32, value=[[0.0, 3.0, 4.0, 3.0], [4.0, 7.0, 8.0, 7.0], [8.0, 9.0, 10.0, 11.0]]) - tensor_y: Tensor(shape=[3, 4], dtype=Float32, value=[[0.0, 1.0, 2.0, 3.0], [0.0, 2.0, 4.0, 6.0], [8.0, 9.0, 10.0, 11.0]]) - ``` - diff --git a/docs/note/source_zh_cn/network_list.rst b/docs/note/source_zh_cn/network_list.rst deleted file mode 100644 index e70ac86c49ea5a6903bff4ca49a9667b48b43500..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/network_list.rst +++ /dev/null @@ -1,7 +0,0 @@ -网络支持 -=========== - -.. toctree:: - :maxdepth: 1 - - network_list_ms \ No newline at end of file diff --git a/docs/note/source_zh_cn/network_list_ms.md b/docs/note/source_zh_cn/network_list_ms.md deleted file mode 100644 index 23b1775042bd8b1f3259614c14fe19e326bed535..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/network_list_ms.md +++ /dev/null @@ -1,110 +0,0 @@ -# MindSpore网络支持 - -`Linux` `Ascend` `GPU` `CPU` `模型开发` `中级` `高级` - - - -- [MindSpore网络支持](#mindspore网络支持) - - [Model Zoo](#model-zoo) - - - - - -## Model Zoo - -### 标准网络 - -| 领域 | 子领域 | 网络 | Ascend(Graph) | Ascend(PyNative) | GPU(Graph) | GPU(PyNative) | CPU(Graph) | CPU(PyNative)| -|:---- |:------- |:---- |:----: |:----: |:----: |:----: |:----: |:----: | -|计算机视觉(CV) | 图像分类(Image Classification) | [AlexNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/alexnet) | ✅ | ✅ | ✅ | ✅ | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [CNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/cnn_direction_model) | ✅ | | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [DenseNet100](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/densenet) | | | | | ✅ | ✅ | -| 计算机视觉(CV) | 图像分类(Image Classification) | [DenseNet121](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/densenet) | ✅ | ✅ | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [DPN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/dpn) | ✅ | | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [EfficientNet-B0](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/efficientnet) | | | ✅ | ✅ | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [GoogLeNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/googlenet) | ✅ | ✅ | ✅ | ✅ | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [InceptionV3](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/inceptionv3) | ✅ | | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [InceptionV4](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/inceptionv4) | ✅ | | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [LeNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/lenet) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| 计算机视觉(CV) | 图像分类(Image Classification) | [LeNet(量化)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/lenet_quant) | ✅ | | ✅ | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [MobileNetV1](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/mobilenetv1) | ✅ | | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [MobileNetV2](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/mobilenetv2) | ✅ | ✅ | ✅ | ✅ | ✅ | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [MobileNetV2(量化)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/mobilenetv2_quant) | ✅ | | ✅ | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [MobileNetV3](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/mobilenetv3) | | | ✅ | ✅ | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [NASNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/nasnet) | | | ✅ | ✅ | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [ResNet-18](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/resnet) | ✅ | | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [ResNet-50](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/resnet) | ✅ | ✅ | ✅ | ✅ | ✅ | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [ResNet-50(量化)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/resnet50_quant) | ✅ | | | | | | -|计算机视觉(CV) | 图像分类(Image Classification) | [ResNet-101](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/resnet) | ✅ | ✅ | ✅ | ✅ | | | -|计算机视觉(CV) | 图像分类(Image Classification) | [ResNeXt50](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/resnext50) | ✅ | | ✅ | ✅ | | | -|计算机视觉(CV) | 图像分类(Image Classification) | [SE-ResNet50](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/resnet) | ✅ | ✅ | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [ShuffleNetV1](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/shufflenetv1) | ✅ | | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [ShuffleNetV2](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/shufflenetv2) | | | ✅ | ✅ | | | -| 计算机视觉(CV) | 图像分类(Image Classification) |[SqueezeNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/squeezenet) | ✅ | | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [Tiny-DarkNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/tinydarknet) | ✅ | | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [VGG16](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/vgg16) | ✅ | ✅ | ✅ | ✅ | | | -| 计算机视觉(CV) | 图像分类(Image Classification) | [Xception](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/xception) | ✅ | | | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [CenterFace](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/centerface) | ✅ | | | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [CTPN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/ctpn) | ✅ | | | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [Faster R-CNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/faster_rcnn) | ✅ | | ✅ | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [Mask R-CNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/maskrcnn) | ✅ | ✅ | | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) |[Mask R-CNN (MobileNetV1)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/maskrcnn_mobilenetv1) | ✅ | | | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [RetinaFace-ResNet50](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/retinaface_resnet50) | | | ✅ | ✅ | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [SSD](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/ssd) | ✅ | | ✅ | ✅ | ✅ | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [SSD-MobileNetV1-FPN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/ssd) | ✅ | | | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [SSD-Resnet50-FPN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/ssd) | ✅ | | | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [SSD-VGG16](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/ssd) | ✅ | | | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [WarpCTC](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/warpctc) | ✅ | | ✅ | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [YOLOv3-ResNet18](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/yolov3_resnet18) | ✅ | | | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [YOLOv3-DarkNet53](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/yolov3_darknet53) | ✅ | ✅ | ✅ | ✅ | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [YOLOv3-DarkNet53(量化)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/yolov3_darknet53_quant) | ✅ | | | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) |[YOLOv4](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/yolov4) | ✅ | | | | | | -| 计算机视觉(CV) | 文本检测(Text Detection) | [DeepText](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/deeptext) | ✅ | | | | | | -| 计算机视觉(CV) | 文本检测(Text Detection) | [PSENet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/psenet) | ✅ | ✅ | | | | | -| 计算机视觉(CV) | 文本识别(Text Recognition) | [CNN+CTC](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/cnnctc) | ✅ | ✅ | | | | | -| 计算机视觉(CV) | 语义分割(Semantic Segmentation) | [DeepLabV3](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/deeplabv3) | ✅ | | | | ✅ | | -| 计算机视觉(CV) | 语义分割(Semantic Segmentation) | [U-Net2D (Medical)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/unet) | ✅ | | | | | | -| 计算机视觉(CV) | 语义分割(Semantic Segmentation) | [U-Net3D (Medical)](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/unet3d) | ✅ | | | | | | -| 计算机视觉(CV) | 语义分割(Semantic Segmentation) | [U-Net++](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/unet) | ✅ | | | | | | -| 计算机视觉(CV) | 关键点检测(Keypoint Detection) |[OpenPose](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/openpose) | ✅ | | | | | | -| 计算机视觉(CV) | 关键点检测(Keypoint Detection) |[SimplePoseNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/simple_pose) | ✅ | | | | | | -| 计算机视觉(CV) | 光学字符识别(Optical Character Recognition) |[CRNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/crnn) | ✅ | | | | | | -| 自然语言处理(NLP) | 自然语言理解(Natural Language Understanding) | [BERT](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/bert) | ✅ | ✅ | ✅ | ✅ | | | -| 自然语言处理(NLP) | 自然语言理解(Natural Language Understanding) | [FastText](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/fasttext) | ✅ | | | | | | -| 自然语言处理(NLP) | 自然语言理解(Natural Language Understanding) | [GNMT v2](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/gnmt_v2) | ✅ | | | | | | -| 自然语言处理(NLP) | 自然语言理解(Natural Language Understanding) | [GRU](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/gru) | ✅ | | | | | | -| 自然语言处理(NLP) | 自然语言理解(Natural Language Understanding) | [MASS](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/mass) | ✅ | ✅ | ✅ | ✅ | | | -| 自然语言处理(NLP) | 自然语言理解(Natural Language Understanding) | [SentimentNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/lstm) | ✅ | | ✅ | ✅ | ✅ | ✅ | -| 自然语言处理(NLP) | 自然语言理解(Natural Language Understanding) | [Transformer](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/transformer) | ✅ | ✅ | ✅ | ✅ | | | -| 自然语言处理(NLP) | 自然语言理解(Natural Language Understanding) | [TinyBERT](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/tinybert) | ✅ | ✅ | ✅ | | | | -| 自然语言处理(NLP) | 自然语言理解(Natural Language Understanding) | [TextCNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/nlp/textcnn) | ✅ | | | | | | -| 推荐(Recommender) | 推荐系统、点击率预估(Recommender System, CTR prediction) | [DeepFM](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/recommend/deepfm) | ✅ | ✅ | ✅ | ✅| ✅ | | -| 推荐(Recommender) | 推荐系统、搜索、排序(Recommender System, Search, Ranking) | [Wide&Deep](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/recommend/wide_and_deep) | ✅ | ✅ | ✅ | ✅ | | | -| 推荐(Recommender) | 推荐系统(Recommender System) | [NAML](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/recommend/naml) | ✅ | | | | | | -| 推荐(Recommender) | 推荐系统(Recommender System) | [NCF](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/recommend/ncf) | ✅ | | ✅ | | | | -| 图神经网络(GNN) | 文本分类(Text Classification) | [GCN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/gnn/gcn) | ✅ | ✅ | | | | | -| 图神经网络(GNN) | 文本分类(Text Classification) | [GAT](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/gnn/gat) | ✅ | ✅ | | | | | -| 图神经网络(GNN) | 推荐系统(Recommender System) | [BGCF](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/gnn/bgcf) | ✅ | | | | | | - -### 研究网络 - -| 领域 | 子领域 | 网络 | Ascend(Graph) | Ascend(PyNative) | GPU(Graph) | GPU(PyNative) | CPU(Graph) | CPU(PyNative) | -|:---- |:------- |:---- |:----: |:----: |:----: |:----: |:----: |:----: | -| 计算机视觉(CV) | 图像分类(Image Classification) |[FaceAttributes](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/FaceAttribute) | ✅ | ✅ | | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [FaceDetection](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/FaceDetection) | ✅ | | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) |[FaceQualityAssessment](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/FaceQualityAssessment) | ✅ | ✅ | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) |[FaceRecognition](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/FaceRecognition) | ✅ | | | | | | -| 计算机视觉(CV) | 图像分类(Image Classification) |[FaceRecognitionForTracking](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/FaceRecognitionForTracking) | ✅ | | | | | | -| 计算机视觉(CV) | 目标检测(Object Detection) | [SSD-GhostNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/ssd_ghostnet) | ✅ | | | | | | -| 计算机视觉(CV) | 关键点检测(Key Point Detection) | [CenterNet](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/centernet) | ✅ | | | | ✅ | | -| 计算机视觉(CV) | 图像风格迁移(Image Style Transfer) | [CycleGAN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/CycleGAN) | | | | ✅ | ✅ | | -| 自然语言处理(NLP) | 自然语言理解(Natural Language Understanding) | [DS-CNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/nlp/dscnn) | ✅ | ✅ | | | | | -| 自然语言处理(NLP) | 自然语言理解(Natural Language Understanding) | [TextRCNN](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/nlp/textrcnn) | ✅ | | | | | | -| 自然语言处理(NLP) | 自然语言理解(Natural Language Understanding) | [TPRR](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/nlp/tprr) | ✅ | | | | | | -| 推荐(Recommender) | 推荐系统、点击率预估(Recommender System, CTR prediction) | [AutoDis](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/recommend/autodis) | ✅ | | | | | | -|语音(Audio) | 音频标注(Audio Tagging) | [FCN-4](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/audio/fcn-4) | ✅ | | | | | | -|高性能计算(HPC) | 分子动力学(Molecular Dynamics) | [DeepPotentialH2O](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/hpc/molecular_dynamics) | ✅ | ✅| | | | | -|高性能计算(HPC) | 海洋模型(Ocean Model) | [GOMO](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/hpc/ocean_model) | | | ✅ | | | | - -> 你也可以使用 [MindWizard工具](https://gitee.com/mindspore/mindinsight/tree/master/mindinsight/wizard/) 快速生成经典网络脚本。 diff --git a/docs/note/source_zh_cn/operator_list.rst b/docs/note/source_zh_cn/operator_list.rst deleted file mode 100644 index 577064b22bc7821e36e902187892b83951d14392..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/operator_list.rst +++ /dev/null @@ -1,9 +0,0 @@ -算子支持 -=========== - -.. toctree:: - :maxdepth: 1 - - operator_list_ms - operator_list_implicit - operator_list_parallel \ No newline at end of file diff --git a/docs/note/source_zh_cn/operator_list_implicit.md b/docs/note/source_zh_cn/operator_list_implicit.md deleted file mode 100644 index 14a547832e9bc6931649cd4b80133a61417a2565..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/operator_list_implicit.md +++ /dev/null @@ -1,145 +0,0 @@ -# MindSpore隐式类型转换的算子支持 - -`Linux` `Ascend` `GPU` `CPU` `模型开发` `初级` `中级` `高级` - - - -- [MindSpore隐式类型转换的算子支持](#mindspore隐式类型转换的算子支持) - - [隐式类型转换](#隐式类型转换) - - [转换规则](#转换规则) - - [参与转换的数据类型](#参与转换的数据类型) - - [支持算子](#支持算子) - - - - - -## 隐式类型转换 - -### 转换规则 - -- 标量与Tensor运算:运算时,将标量自动转为Tensor,数据类型和参与运算的Tensor数据类型保持一致;当Tensor是bool数据类型,标量是int或float时,将标量和Tensor都转为数据类型为int32或float32的Tensor;当Tensor是int或者uint数据类型,标量是float时,将标量和Tensor都转为数据类型为float32的Tensor。 -- 不同数据类型Tensor运算:数据类型优先级排序为bool < uint8 < int8 < int16 < int32 < int64 < float16 < float32 < float64,运算时,先确定参与运算的Tensor中优先级相对最高的数据类型,然后将低优先级数据类型Tensor转换为相对最高优先级数据类型;而当int8和uint8数据类型的Tensor进行运算时,将其都转为int16的Tensor。 -- 不支持对Parameter进行数据类型转换:如果按照转换规则推导,需要对网络中定义的Parameter进行数据类型转换时,会抛出RuntimeError异常。 - -### 参与转换的数据类型 - -- bool -- int8 -- uint8 -- int16 -- int32 -- int64 -- float16 -- float32 -- float64 - -### 支持算子 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
mindspore.ops.Addmindspore.ops.ApplyAdadeltamindspore.ops.ApplyAdagrad
mindspore.ops.ApplyAdagradV2mindspore.ops.ApplyAdaMaxmindspore.ops.ApplyAddSign
mindspore.ops.ApplyGradientDescentmindspore.ops.ApplyMomentummindspore.ops.ApplyPowerSign
mindspore.ops.ApplyProximalAdagradmindspore.ops.ApplyProximalGradientDescentmindspore.ops.ApproximateEqual
mindspore.ops.Assignmindspore.ops.AssignAddmindspore.ops.AssignSub
mindspore.ops.Atan2mindspore.ops.BitwiseAndmindspore.ops.BitwiseOr
mindspore.ops.BitwiseXormindspore.ops.Divmindspore.ops.DivNoNan
mindspore.ops.Equalmindspore.ops.FloorDivmindspore.ops.FloorMod
mindspore.ops.FusedSparseAdammindspore.ops.FusedSparseFtrlmindspore.ops.FusedSparseLazyAdam
mindspore.ops.FusedSparseProximalAdagradmindspore.ops.Greatermindspore.ops.GreaterEqual
mindspore.ops.Lessmindspore.ops.LessEqualmindspore.ops.LogicalAnd
mindspore.ops.LogicalOrmindspore.ops.Maximummindspore.ops.Minimum
mindspore.ops.Modmindspore.ops.Mulmindspore.ops.NotEqual
mindspore.ops.Powmindspore.ops.RealDivmindspore.ops.ScatterAdd
mindspore.ops.ScatterDivmindspore.ops.ScatterMaxmindspore.ops.ScatterMin
mindspore.ops.ScatterMulmindspore.ops.ScatterNdAddmindspore.ops.ScatterNdSub
mindspore.ops.ScatterNdUpdatemindspore.ops.ScatterNonAliasingAddmindspore.ops.ScatterSub
mindspore.ops.ScatterUpdatemindspore.ops.SparseApplyAdagradmindspore.ops.SparseApplyAdagradV2
mindspore.ops.SparseApplyFtrlmindspore.ops.SparseApplyFtrlV2mindspore.ops.SparseApplyProximalAdagrad
mindspore.ops.SquaredDifferencemindspore.ops.Submindspore.ops.TruncateDiv
mindspore.ops.TruncateModmindspore.ops.Xdivymindspore.ops.Xlogy
diff --git a/docs/note/source_zh_cn/operator_list_ms.md b/docs/note/source_zh_cn/operator_list_ms.md deleted file mode 100644 index 062704cbb9381e223b4c1502ce2171324f6ef526..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/operator_list_ms.md +++ /dev/null @@ -1,10 +0,0 @@ -# MindSpore算子支持 - -`Linux` `Ascend` `GPU` `CPU` `模型开发` `初级` `中级` `高级` - - - -您可根据需要,选择适用于您硬件平台的算子,构建网络模型。 - -- `mindspore.nn`模块支持的算子列表可在[mindspore.nn模块的API页面](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.nn.html)进行查阅。 -- `mindspore.ops`模块支持的算子列表可在[mindspore.ops模块的API页面](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.ops.html)进行查阅。 diff --git a/docs/note/source_zh_cn/operator_list_parallel.md b/docs/note/source_zh_cn/operator_list_parallel.md deleted file mode 100644 index b591893c9a5e3a124558a6c9d5c75f4344138b4d..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/operator_list_parallel.md +++ /dev/null @@ -1,124 +0,0 @@ -# MindSpore分布式算子支持 - -`Linux` `Ascend` `GPU` `CPU` `模型开发` `初级` `中级` `高级` - - - -- [MindSpore分布式算子支持](#mindspore分布式算子支持) - - [分布式算子](#分布式算子) - - - - - -## 分布式算子 - -| 操作名 | 约束 | -| :----------------------------------------------------------- | :----------------------------------------------------------- | -| [mindspore.ops.Abs](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Abs.html) | 无 | -| [mindspore.ops.ACos](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ACos.html) | 无 | -| [mindspore.ops.Acosh](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Acosh.html) | 无 | -| [mindspore.ops.Add](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Add.html) | 无 | -| [mindspore.ops.ApproximateEqual](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ApproximateEqual.html) | 无 | -| [mindspore.ops.ArgMaxWithValue](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ArgMaxWithValue.html) | 输入在轴(axis)的维度进行切分时,分布式结果可能会和单机不一致 | -| [mindspore.ops.ArgMinWithValue](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ArgMinWithValue.html) | 输入在轴(axis)的维度进行切分时,分布式结果可能会和单机不一致 | -| [mindspore.ops.Asin](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Asin.html) | 无 | -| [mindspore.ops.Asinh](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Asinh.html) | 无 | -| [mindspore.ops.Assign](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Assign.html) | 无 | -| [mindspore.ops.AssignAdd](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.AssignAdd.html) | 无 | -| [mindspore.ops.AssignSub](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.AssignSub.html) | 无 | -| [mindspore.ops.Atan](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Atan.html) | 无 | -| [mindspore.ops.Atan2](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Atan2.html) | 无 | -| [mindspore.ops.Atanh](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Atanh.html) | 无 | -| [mindspore.ops.BatchMatMul](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.BatchMatMul.html) | 不支持`transpose_a=True` | -| [mindspore.ops.BesselI0e](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.BesselI0e.html) | 无 | -| [mindspore.ops.BesselI1e](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.BesselI1e.html) | 无 | -| [mindspore.ops.BiasAdd](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.BiasAdd.html) | 无 | -| [mindspore.ops.BroadcastTo](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.BroadcastTo.html) | 无 | -| [mindspore.ops.Cast](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Cast.html) | Auto Parallel和Semi Auto Parallel模式下,配置策略不生效 | -| [mindspore.ops.Ceil](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Ceil.html) | 无 | -| [mindspore.ops.Concat](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Concat.html) | 输入(input_x)在轴(axis)所对应的维度不能切分,切分后,在数学逻辑上和单机不等价 | -| [mindspore.ops.Cos](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Cos.html) | 无 | -| [mindspore.ops.Cosh](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Cosh.html) | 无 | -| [mindspore.ops.Div](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Div.html) | 无 | -| [mindspore.ops.DivNoNan](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.DivNoNan.html) | 无 | -| [mindspore.ops.DropoutDoMask](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.DropoutDoMask.html) | 需和`DropoutGenMask`联合使用 | -| [mindspore.ops.DropoutGenMask](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.DropoutGenMask.html) | 需和`DropoutDoMask`联合使用,不支持配置切分策略 | -| [mindspore.ops.Elu](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Elu.html) | 无 | -| [mindspore.ops.EmbeddingLookup](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.EmbeddingLookup.html) | 同Gather | -| [mindspore.ops.Equal](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Equal.html) | 无 | -| [mindspore.ops.Erf](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Erf.html) | 无 | -| [mindspore.ops.Erfc](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Erfc.html) | 无 | -| [mindspore.ops.Exp](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Exp.html) | 无 | -| [mindspore.ops.ExpandDims](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ExpandDims.html) | 无 | -| [mindspore.ops.Expm1](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Expm1.html) | 无 | -| [mindspore.ops.Floor](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Floor.html) | 无 | -| [mindspore.ops.FloorDiv](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.FloorDiv.html) | 无 | -| [mindspore.ops.FloorMod](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.FloorMod.html) | 无 | -| [mindspore.ops.Gather](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Gather.html) | 仅支持1维和2维的input_params,并且input_params的最后一维要32字节对齐(出于性能考虑);不支持标量input_indices;参数在轴(axis)所在维度切分时,不支持重复计算;不支持input_indices和input_params同时进行切分 | -| [mindspore.ops.GeLU](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.GeLU.html) | 无 | -| [mindspore.ops.Greater](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Greater.html) | 无 | -| [mindspore.ops.GreaterEqual](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.GreaterEqual.html) | 无 | -| [mindspore.ops.Inv](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Inv.html) | 无 | -| [mindspore.ops.L2Normalize](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.L2Normalize.html) | 输入(input_x)在轴(axis)对应的维度不能切,切分后,在数学逻辑上和单机不等价 | -| [mindspore.ops.Less](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Less.html) | 无 | -| [mindspore.ops.LessEqual](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.LessEqual.html) | 无 | -| [mindspore.ops.LogicalAnd](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.LogicalAnd.html) | 无 | -| [mindspore.ops.LogicalNot](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.LogicalNot.html) | 无 | -| [mindspore.ops.LogicalOr](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.LogicalOr.html) | 无 | -| [mindspore.ops.Log](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Log.html) | 无 | -| [mindspore.ops.Log1p](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Log1p.html) | 无 | -| [mindspore.ops.LogSoftmax](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.LogSoftmax.html) | 输入(logits)在轴(axis)对应的维度不可切分,切分后,在数学逻辑上和单机不等价 | -| [mindspore.ops.MatMul](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.MatMul.html) | 不支持`transpose_a=True` | -| [mindspore.ops.Maximum](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Maximum.html) | 无 | -| [mindspore.ops.Minimum](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Minimum.html) | 无 | -| [mindspore.ops.Mod](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Mod.html) | 无 | -| [mindspore.ops.Mul](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Mul.html) | 无 | -| [mindspore.ops.Neg](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Neg.html) | 无 | -| [mindspore.ops.NotEqual](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.NotEqual.html) | 无 | -| [mindspore.ops.OneHot](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.OneHot.html) | 仅支持输入(indices)是1维的Tensor,切分策略要配置输出的切分策略,以及第1和第2个输入的切分策略 | -| [mindspore.ops.OnesLike](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.OnesLike.html) | 无 | -| [mindspore.ops.Pow](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Pow.html) | 无 | -| [mindspore.ops.PReLU](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.PReLU.html) | weight的shape在非[1]的情况下,输入(input_x)的Channel维要和weight的切分方式一致 | -| [mindspore.ops.RealDiv](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.RealDiv.html) | 无 | -| [mindspore.ops.Reciprocal](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Reciprocal.html) | 无 | -| [mindspore.ops.ReduceMax](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ReduceMax.html) | 输入在轴(axis)的维度进行切分时,分布式结果可能会和单机不一致 | -| [mindspore.ops.ReduceMin](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ReduceMin.html) | 输入在轴(axis)的维度进行切分时,分布式结果可能会和单机不一致 | -| [mindspore.ops.ReduceSum](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ReduceSum.html) | 无 | -| [mindspore.ops.ReduceMean](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ReduceMean.html) | 无 | -| [mindspore.ops.ReLU](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ReLU.html) | 无 | -| [mindspore.ops.ReLU6](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ReLU6.html) | 无 | -| [mindspore.ops.ReLUV2](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ReLUV2.html) | 无 | -| [mindspore.ops.Reshape](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Reshape.html) | 不支持配置切分策略,并且,在自动并行模式下,当reshape算子后接有多个算子,不允许对这些算子配置不同的切分策略 | -| [mindspore.ops.Round](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Round.html) | 无 | -| [mindspore.ops.Rsqrt](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Rsqrt.html) | 无 | -| [mindspore.ops.Sigmoid](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Sigmoid.html) | 无 | -| [mindspore.ops.SigmoidCrossEntropyWithLogits](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.SigmoidCrossEntropyWithLogits.html) | 无 | -| [mindspore.ops.Sign](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Sign.html) | 无 | -| [mindspore.ops.Sin](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Sin.html) | 无 | -| [mindspore.ops.Sinh](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Sinh.html) | 无 | -| [mindspore.ops.Softmax](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Softmax.html) | 输入(logits)在轴(axis)对应的维度不可切分,切分后,在数学逻辑上和单机不等价 | -| [mindspore.ops.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.SoftmaxCrossEntropyWithLogits.html) | 输入(logits、labels)的最后一维不能切分;有两个输出,正向的loss只支持取[0] | -| [mindspore.ops.Softplus](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Softplus.html) | 无 | -| [mindspore.ops.Softsign](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Softsign.html) | 无 | -| [mindspore.ops.SparseGatherV2](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.SparseGatherV2.html) | 同GatherV2 | -| [mindspore.ops.Split](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Split.html) | 轴(axis)所对应的维度不能切分,切分后,在数学逻辑上和单机不等价 | -| [mindspore.ops.Sqrt](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Sqrt.html) | 无 | -| [mindspore.ops.Square](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Square.html) | 无 | -| [mindspore.ops.Squeeze](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Squeeze.html) | 无 | -| [mindspore.ops.Stack](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Stack.html) | 无 | -| [mindspore.ops.StridedSlice](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.StridedSlice.html) | 仅支持值为全0的mask;需要切分的维度必须全部提取;输入在strides不为1对应的维度不支持切分 | -| [mindspore.ops.Slice](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Slice.html) | 需要切分的维度必须全部提取 | -| [mindspore.ops.Sub](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Sub.html) | 无 | -| [mindspore.ops.Tan](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Tan.html) | 无 | -| [mindspore.ops.Tanh](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Tanh.html) | 无 | -| [mindspore.ops.Tile](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Tile.html) | 仅支持对multiples配置切分策略 | -| [mindspore.ops.TopK](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.TopK.html) | 最后一维不支持切分,切分后,在数学逻辑上和单机不等价 | -| [mindspore.ops.Transpose](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Transpose.html) | 无 | -| [mindspore.ops.Unique](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Unique.html) | 只支持重复计算的策略(1,) | -| [mindspore.ops.UnsortedSegmentSum](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.UnsortedSegmentSum.html) | 输入input_x和segment_ids的切分配置必须在segment_ids的维度上保持一致 | -| [mindspore.ops.UnsortedSegmentMin](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.UnsortedSegmentMin.html) | 输入input_x和segment_ids的切分配置必须在segment_ids的维度上保持一致。注意:在segment id为空时,输出向量的对应位置会填充为输入类型的最大值。需要用户进行掩码处理,将最大值转换成0。否则容易造成数值溢出,导致通信算子上溢错误,从而引发Run Task Error | -| [mindspore.ops.UnsortedSegmentMax](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.UnsortedSegmentMax.html) | 输入input_x和segment_ids的切分配置必须在segment_ids的维度上保持一致。注意:在segment id为空时,输出向量的对应位置会填充为输入类型的最小值。需要用户进行掩码处理,将最小值转换成0。否则容易造成数值溢出,导致通信算子上溢错误,从而引发Run Task Error | -| [mindspore.ops.ZerosLike](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ZerosLike.html) | 无 | - -> 重复计算是指,机器没有用满,比如:集群有8张卡跑分布式训练,切分策略只对输入切成了4份。这种情况下会发生重复计算。 diff --git a/docs/note/source_zh_cn/roadmap.md b/docs/note/source_zh_cn/roadmap.md deleted file mode 100644 index 10bdc2bbc25a896b35ee4e93113f9b2a3e650ddc..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/roadmap.md +++ /dev/null @@ -1,91 +0,0 @@ -# 路标 - -`Linux` `Windows` `Ascend` `GPU` `CPU` `全流程` `框架开发` `中级` `高级` `贡献者` - - - -- [RoadMap](#roadmap) - - [预置模型](#预置模型) - - [易用性](#易用性) - - [性能优化](#性能优化) - - [架构演进](#架构演进) - - [MindInsight调试调优](#mindinsight调试调优) - - [MindArmour安全增强包](#mindarmour安全增强包) - - [推理框架](#推理框架) - - - - - -以下将展示MindSpore近一年的高阶计划,我们会根据用户的反馈诉求,持续调整计划的优先级。 - -总体而言,我们会努力在以下几个方面不断改进。 - -1. 提供更多的预置模型支持。 -2. 持续补齐API和算子库,改善易用性和编程体验。 -3. 提供华为昇腾AI处理器的全面支持,并不断优化性能及软件架构。 -4. 完善可视化、调试调优、安全相关工具。 - -热忱希望各位在用户社区加入讨论,并贡献您的建议。 - -## 预置模型 - -- CV:目标检测、GAN、图像分割、姿态识别等场景经典模型。 -- NLP:RNN、Transformer类型神经网络,拓展基于Bert预训练模型的应用。 -- 其它:GNN、强化学习、概率编程、AutoML等。 - -## 易用性 - -- 补齐算子、优化器、Loss函数等各类API -- 完善Python语言原生表达支持 -- 支持常见的Tensor/Math操作 -- 增加更多的自动并行适用场景,提高策略搜索的准确性 - -## 性能优化 - -- 优化编译时间 -- 低比特混合精度训练/推理 -- 提升内存使用效率 -- 提供更多的融合优化手段 -- 加速PyNative执行性能 - -## 架构演进 - -- 图算融合优化:使用细粒度Graph IR表达算子,构成带算子边界的中间表达,挖掘更多图层优化机会。 -- 支持更多编程语言 -- 优化数据增强的自动调度及分布式训练数据缓存机制 -- 持续完善MindSpore IR -- Parameter Server模式分布式训练 - -## MindInsight调试调优 - -- 训练过程观察 - - 直方图 - - 计算图/数据图展示优化 - - 集成性能Profiling/Debugger工具 - - 支持多次训练间的对比 -- 训练结果溯源 - - 数据增强溯源对比 -- 训练过程诊断 - - 性能Profiling - - 基于图模型的Debugger - -## MindArmour安全增强包 - -- 测试模型的安全性 -- 提供模型安全性增强工具 -- 保护训练和推理过程中的数据隐私 - -## 推理框架 - -- 算子性能与完备度的持续优化 -- 支持语音模型推理 -- 端侧模型的可视化 -- Micro方案,适用于嵌入式系统的超轻量化推理, 支持ARM Cortex-A、Cortex-M硬件 -- 支持端侧重训及联邦学习 -- 端侧自动并行特性 -- 端侧MindData,包含图片Resize、像素数据转换等功能 -- 配套MindSpore混合精度量化训练(或训练后量化),实现混合精度推理,提升推理性能 -- 支持Kirin NPU、MTK APU等AI加速硬件 -- 支持多模型推理pipeline -- C++构图接口 diff --git a/docs/note/source_zh_cn/static_graph_syntax_support.md b/docs/note/source_zh_cn/static_graph_syntax_support.md deleted file mode 100644 index fcc51046dabdd39afafdee9a78f0309f5c1ce402..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/static_graph_syntax_support.md +++ /dev/null @@ -1,1178 +0,0 @@ -# 静态图语法支持 - -`Linux` `Ascend` `GPU` `CPU` `模型开发` `初级` `中级` `高级` - - - -- [静态图语法支持](#静态图语法支持) - - [概述](#概述) - - [数据类型](#数据类型) - - [Python内置数据类型](#python内置数据类型) - - [Number](#number) - - [String](#string) - - [List](#list) - - [Tuple](#tuple) - - [Dictionary](#dictionary) - - [MindSpore自定义数据类型](#mindspore自定义数据类型) - - [Tensor](#tensor) - - [Primitive](#primitive) - - [Cell](#cell) - - [运算符](#运算符) - - [算术运算符](#算术运算符) - - [赋值运算符](#赋值运算符) - - [逻辑运算符](#逻辑运算符) - - [成员运算符](#成员运算符) - - [身份运算符](#身份运算符) - - [表达式](#表达式) - - [条件控制语句](#条件控制语句) - - [单if](#单if) - - [并列if](#并列if) - - [嵌套if](#嵌套if) - - [循环语句](#循环语句) - - [for](#for) - - [while](#while) - - [并列while](#并列while) - - [嵌套while](#嵌套while) - - [循环嵌套条件控制语句](#循环嵌套条件控制语句) - - [if in for](#if-in-for) - - [if in while](#if-in-while) - - [函数定义语句](#函数定义语句) - - [def关键字](#def关键字) - - [lambda表达式](#lambda表达式) - - [函数](#函数) - - [Python内置函数](#python内置函数) - - [len](#len) - - [isinstance](#isinstance) - - [partial](#partial) - - [map](#map) - - [zip](#zip) - - [range](#range) - - [enumerate](#enumerate) - - [super](#super) - - [pow](#pow) - - [print](#print) - - [函数参数](#函数参数) - - [网络定义](#网络定义) - - [网络入参](#网络入参) - - [网络实例类型](#网络实例类型) - - [网络构造组件](#网络构造组件) - - [网络使用约束](#网络使用约束) - - - - - -## 概述 - -在Graph模式下,Python代码并不是由Python解释器去执行,而是将代码编译成静态计算图,然后执行静态计算图。 - -当前仅支持编译`@ms_function`装饰器修饰的函数、Cell及其子类的实例。 -对于函数,则编译函数定义;对于网络,则编译`construct`方法及其调用的其他方法或者函数。 - -`ms_function`使用规则可参考文档: - -`Cell`定义可参考文档: - -由于语法解析的限制,当前在编译构图时,支持的数据类型、语法以及相关操作并没有完全与Python语法保持一致,部分使用受限。 - -本文主要介绍,在编译静态图时,支持的数据类型、语法以及相关操作,这些规则仅适用于Graph模式。 - -> 以下所有示例都运行在Graph模式下的网络中,为了简洁,并未将网络的定义都写出来。 - -## 数据类型 - -### Python内置数据类型 - -当前支持的`Python`内置数据类型包括:`Number`、`String`、`List`、`Tuple`和`Dictionary`。 - -#### Number - -支持`int`、`float`、`bool`,不支持complex(复数)。 - -支持在网络里定义`Number`,即支持语法:`y = 1`、`y = 1.2`、 `y = True`。 - -不支持在网络里强转`Number`,即不支持语法:`y = int(x)`、`y = float(x)`、`y = bool(x)`。 - -#### String - -支持在网络里构造`String`,即支持语法`y = "abcd"`。 - -不支持在网络里强转`String`,即不支持语法 `y = str(x)`。 - -#### List - -支持在网络里构造`List`,即支持语法`y = [1, 2, 3]`。 - -不支持在网络里强转`List`,即不支持语法`y = list(x)`。 - -计算图中最终需要输出的`List`会转换为`Tuple`输出。 - -- 支持接口 - - `append`: 向`list`里追加元素。 - - 示例如下: - - ```python - x = [1, 2, 3] - x.append(4) - ``` - - 结果如下: - - ```text - x: (1, 2, 3, 4) - ``` - -- 支持索引取值和赋值 - - 支持单层和多层索引取值以及赋值。 - - 取值和赋值的索引值仅支持`int`。 - - 赋值时,所赋的值支持`Number`、`String`、`Tuple`、`List`、`Tensor`。 - - 示例如下: - - ```python - x = [[1, 2], 2, 3, 4] - - m = x[0][1] - x[1] = Tensor(np.array([1, 2, 3])) - x[2] = "ok" - x[3] = (1, 2, 3) - x[0][1] = 88 - n = x[-3] - ``` - - 结果如下: - - ```text - m: 2 - x: ([1, 88], Tensor(shape=[3], dtype=Int64, value=[1, 2, 3]), 'ok', (1, 2, 3)) - n: Tensor(shape=[3], dtype=Int64, value=[1, 2, 3]) - ``` - -#### Tuple - -支持在网络里构造`Tuple`,即支持语法`y = (1, 2, 3)`。 - -不支持在网络里强转`Tuple`,即不支持语法`y = tuple(x)`。 - -- 支持索引取值 - - 索引值支持`int`、`slice`、`Tensor`,也支持多层索引取值,即支持语法`data = tuple_x[index0][index1]...`。 - - 索引值为`Tensor`有如下限制: - - - `tuple`里存放的都是`Cell`,每个`Cell`要在tuple定义之前完成定义,每个`Cell`的入参个数、入参类型和入参`shape`要求一致,每个`Cell`的输出个数、输出类型和输出`shape`也要求一致。 - - - 索引`Tensor`是一个`dtype`为`int32`的标量`Tensor`,取值范围在`[-tuple_len, tuple_len)`,`Ascend`后端不支持负数索引。 - - - 该语法不支持`if`、`while`、`for`控制流条件为变量的运行分支,仅支持控制流条件为常量。 - - - 支持`GPU`和`Ascend`后端。 - - `int`、`slice`索引示例如下: - - ```python - x = (1, (2, 3, 4), 3, 4, Tensor(np.array([1, 2, 3]))) - y = x[1][1] - z = x[4] - m = x[1:4] - n = x[-4] - ``` - - 结果如下: - - ```text - y: 3 - z: Tensor(shape=[3], dtype=Int64, value=[1, 2, 3]) - m: ((2, 3, 4), 3, 4) - n: (2, 3, 4) - ``` - - `Tensor`索引示例如下: - - ```python - class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.relu = nn.ReLU() - self.softmax = nn.Softmax() - self.layers = (self.relu, self.softmax) - - def construct(self, x, index): - ret = self.layers[index](x) - return ret - ``` - -#### Dictionary - -支持在网络里构造`Dictionary`,即支持语法`y = {"a": 1, "b": 2}`,当前仅支持`String`作为`key`值。 - -计算图中最终需要输出的`Dictionary`,会取出所有的`value`组成`Tuple`输出。 - -- 支持接口 - - `keys`:取出`dict`里所有的`key`值,组成`Tuple`返回。 - - `values`:取出`dict`里所有的`value`值,组成`Tuple`返回。 - - 示例如下: - - ```python - x = {"a": Tensor(np.array([1, 2, 3])), "b": Tensor(np.array([4, 5, 6])), "c": Tensor(np.array([7, 8, 9]))} - y = x.keys() - z = x.values() - ``` - - 结果如下: - - ```text - y: ("a", "b", "c") - z: (Tensor(shape=[3], dtype=Int64, value=[1, 2, 3]), Tensor(shape=[3], dtype=Int64, value=[4, 5, 6]), Tensor(shape=[3], dtype=Int64, value=[7, 8, 9])) - ``` - -- 支持索引取值和赋值 - - 取值和赋值的索引值都仅支持`String`。赋值时,所赋的值支持`Number`、`Tuple`、`Tensor`。 - - 示例如下: - - ```python - x = {"a": Tensor(np.array([1, 2, 3])), "b": Tensor(np.array([4, 5, 6])), "c": Tensor(np.array([7, 8, 9]))} - y = x["b"] - x["a"] = (2, 3, 4) - ``` - - 结果如下: - - ```text - y: Tensor(shape=[3], dtype=Int64, value=[4, 5, 6]) - x: {"a": (2, 3, 4), Tensor(shape=[3], dtype=Int64, value=[4, 5, 6]), Tensor(shape=[3], dtype=Int64, value=[7, 8, 9])} - ``` - -### MindSpore自定义数据类型 - -当前MindSpore自定义数据类型包括:`Tensor`、`Primitive`和`Cell`。 - -#### Tensor - -当前不支持在网络里构造Tensor,即不支持语法`x = Tensor(args...)`。 - -可以通过`@constexpr`装饰器修饰函数,在函数里生成`Tensor`。 - -关于`@constexpr`的用法可参考: - -对于网络中需要用到的常量`Tensor`,可以作为网络的属性,在`init`的时候定义,即`self.x = Tensor(args...)`,然后在`construct`里使用。 - -如下示例,通过`@constexpr`生成一个`shape = (3, 4), dtype = int64`的`Tensor`。 - -```python -@constexpr -def generate_tensor(): - return Tensor(np.ones((3, 4))) -``` - -下面将介绍下`Tensor`支持的属性和接口。 - -- 支持属性: - - `shape`:获取`Tensor`的shape,返回一个`Tuple`。 - - `dtype`:获取`Tensor`的数据类型,返回一个`MindSpore`定义的数据类型。 - -- 支持接口: - - `all`:对`Tensor`通过`all`操作进行归约,仅支持`Bool`类型的`Tensor`。 - - `any`:对`Tensor`通过`any`操作进行归约,仅支持`Bool`类型的`Tensor`。 - - `view`:将`Tensor`reshape成输入的`shape`。 - - `expand_as`:将`Tensor`按照广播规则扩展成与另一个`Tensor`相同的`shape`。 - - 示例如下: - - ```python - x = Tensor(np.array([[True, False, True], [False, True, False]])) - x_shape = x.shape - x_dtype = x.dtype - x_all = x.all() - x_any = x.any() - x_view = x.view((1, 6)) - - y = Tensor(np.ones((2, 3), np.float32)) - z = Tensor(np.ones((2, 2, 3))) - y_as_z = y.expand_as(z) - ``` - - 结果如下: - - ```text - x_shape: (2, 3) - x_dtype: Bool - x_all: Tensor(shape=[], dtype=Bool, value=False) - x_any: Tensor(shape=[], dtype=Bool, value=True) - x_view: Tensor(shape=[1, 6], dtype=Bool, value=[[True, False, True, False, True, False]]) - - y_as_z: Tensor(shape=[2, 2, 3], dtype=Float32, value=[[[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]]) - ``` - -#### Primitive - -当前支持在网络里构造`Primitive`及其子类的实例,即支持语法`reduce_sum = ReduceSum(True)`。 - -但在构造时,参数只能通过位置参数方式传入,不支持通过键值对方式传入,即不支持语法`reduce_sum = ReduceSum(keep_dims=True)`。 - -当前不支持在网络调用`Primitive`及其子类相关属性和接口。 - -`Primitive`定义可参考文档: - -当前已定义的`Primitive`可参考文档: - -#### Cell - -当前支持在网络里构造`Cell`及其子类的实例,即支持语法`cell = Cell(args...)`。 - -但在构造时,参数只能通过位置参数方式传入,不支持通过键值对方式传入,即不支持在语法`cell = Cell(arg_name=value)`。 - -当前不支持在网络调用`Cell`及其子类相关属性和接口,除非是在`Cell`自己的`contrcut`中通过`self`调用。 - -`Cell`定义可参考文档: - -当前已定义的`Cell`可参考文档: - -## 运算符 - -算术运算符和赋值运算符支持`Number`和`Tensor`运算,也支持不同`dtype`的`Tensor`运算。 - -之所以支持,是因为这些运算符会转换成同名算子进行运算,这些算子支持了隐式类型转换。 - -规则可参考文档: - -### 算术运算符 - -| 算术运算符 | 支持类型 | -| :--------- | :------------------------------------------------------------------------------------------------------ | -| `+` | `Number` + `Number`、`Tensor` + `Tensor`、`Tensor` + `Number`、`Tuple` + `Tuple`、`String` + `String`。 | -| `-` | `Number` - `Number`、`Tensor` - `Tensor`、`Tensor` - `Number`。 | -| `*` | `Number` \* `Number`、`Tensor` \* `Tensor`、`Tensor` \* `Number`。 | -| `/` | `Number` / `Number`、`Tensor` / `Tensor`、`Tensor` / `Number`。 | -| `%` | `Number` % `Number`、`Tensor` % `Tensor`、`Tensor` % `Number`。 | -| `**` | `Number` \*\* `Number`、`Tensor` \*\* `Tensor`、`Tensor` \*\* `Number`。 | -| `//` | `Number` // `Number`、`Tensor` // `Tensor`、`Tensor` // `Number`。 | -| `~` | `~Tensor[Bool]`。 | - -### 赋值运算符 - -| 赋值运算符 | 支持类型 | -| :--------- | :----------------------------------------------------------------------------------------------------------- | -| `=` | 标量、`Tensor` | -| `+=` | `Number` += `Number`、`Tensor` += `Tensor`、`Tensor` += `Number`、`Tuple` += `Tuple`、`String` += `String`。 | -| `-=` | `Number` -= `Number`、`Tensor` -= `Tensor`、`Tensor` -= `Number`。 | -| `*=` | `Number` \*= `Number`、`Tensor` \*= `Tensor`、`Tensor` \*= `Number`。 | -| `/=` | `Number` /= `Number`、`Tensor` /= `Tensor`、`Tensor` /= `Number`。 | -| `%=` | `Number` %= `Number`、`Tensor` %= `Tensor`、`Tensor` %= `Number`。 | -| `**=` | `Number` \*\*= `Number`、`Tensor` \*\*= `Tensor`、`Tensor` \*\*= `Number`。 | -| `//=` | `Number` //= `Number`、`Tensor` //= `Tensor`、`Tensor` //= `Number`。 | - -### 逻辑运算符 - -| 逻辑运算符 | 支持类型 | -| :--------- | :--------------------------------------------- | -| `and` | `Number` and `Number`、`Tensor` and `Tensor`。 | -| `or` | `Number` or `Number`、`Tensor` or`Tensor`。 | -| `not` | not `Number`、not `Tensor`、not `tuple`。 | - -### 成员运算符 - -| 成员运算符 | 支持类型 | -| :--------- | :---------------------------------------------------------------------------------------------------------------------------------------------------- | -| `in` | `Number` in `tuple`、`String` in `tuple`、`Tensor` in `Tuple`、`Number` in `List`、`String` in `List`、`Tensor` in `List`、`String` in `Dictionary`。 | -| `not in` | 与`in`相同。 | - -### 身份运算符 - -| 身份运算符 | 支持类型 | -| :--------- | :----------------------------------------- | -| `is` | 仅支持判断是`None`、 `True`或者`False`。 | -| `is not` | 仅支持判断不是`None`、 `True`或者`False`。 | - -## 表达式 - -### 条件控制语句 - -#### 单if - -使用方式: - -- `if (cond): statements...` - -- `x = y if (cond) else z` - -参数:`cond` -- 支持类型`Number`、`Tuple`、`List`、`String`、`None`、`Tensor`、`Function`,也可以是计算结果类型是其中之一的表达式。 - -限制: - -- 在构图时,如果`if`未能消除,则`if`分支`return`的数据类型和shape,与`if`分支外`return`的数据类型和shape必须一致。 - -- 当只有`if`时,`if`分支变量更新后数据类型和shape,与更新前数据类型和shape必须一致。 - -- 当即有`if`又有`else`时,`if`分支变量更新后数据类型和shape,与`else`分支更新后数据类型和shape必须一致。 - -- 不支持高阶微分场景。 - -- 不支持`elif`语句。 - -示例1: - -```python -if x > y: - return m -else: - return n -``` - -`if`分支返回的`m`和`else`分支返回的`n`,二者数据类型和shape必须一致。 - -示例2: - -```python -if x > y: - out = m -else: - out = n -return out -``` - -`if`分支更新后`out`和`else`分支更新后`out`,二者数据类型和shape必须一致。 - -#### 并列if - -使用方式: - -- `if (cond1):statements else:statements...if (cond2):statements...` - -参数:`cond1`、 `cond2`-- 与`单if`一致。 - -限制: - -- 继承`单if`所有限制。 - -- 计算图总`if`数量不超过50个。 - -- `if`数量过多会导致编译时间过长,减少`if`数量有助于提升编译效率。 - -示例: - -```python -if x > y: - out = x -else: - out = y -if z > x: - out = out + 1 -return out -``` - -#### 嵌套if - -使用方式: - -- `if (cond1):if (cond2):statements...` - -参数:`cond1`、 `cond2`-- 与`单if`一致。 - -限制: - -- 继承`单if`所有限制。 - -- 计算图`if`数量不超过50个。 - -- `if`数量过多会导致编译时间过长,减少`if`数量有助于提升编译效率。 - -示例: - -```python -if x > y: - z = z + 1 - if z > x: - return m -else: - return n -``` - -### 循环语句 - -#### for - -使用方式: - -- `for i in sequence` - -参数:`sequence` -- 遍历序列(`Tuple`、`List`) - -限制: - -- 图的算子数量和`for`循环的迭代次数成倍数关系,`for`循环迭代次数过大可能会导致图占用内存超过使用限制。 - -示例: - -```python -z = Tensor(np.ones((2, 3))) -x = (1, 2, 3) -for i in x: - z += i -return z -``` - -结果如下: - -```text -z: Tensor(shape=[2, 3], dtype=Int64, value=[[7, 7], [7, 7], [7, 7]]) -``` - -#### 单while - -使用方式: - -- `while (cond)` - -参数:`cond` -- 与`单if`一致。 - -限制: - -- 在构图时,如果`while`未能消除,则`while`内`return`的数据类型和shape,与`while`外`return`的数据类型和shape必须一致。 - -- `while`内变量更新后数据类型和shape,与更新前数据类型和shape必须一致。 - -- 不支持训练场景。 - -示例1: - -```python -while x < y: - x += 1 - return m -return n -``` - -`while`内返回的`m`和`while`外返回的`n`数据类型必须和shape一致。 - -示例2: - -```python -out = m -while x < y: - x += 1 - out = out + 1 -return out -``` - -`while`内,`out`更新后和更新前的数据类型和shape必须一致。 - -#### 并列while - -使用方式: - -- `while (cond1):statements while (cond2):statemetns...` - -参数:`cond1`、 `cond2`-- 与`单if`一致。 - -限制: - -- 继承`单while`所有限制。 - -- 并列`while`总数不超过50个。 - -- `while`数量过多会导致编译时间过长,减少`while`数量有助于提升编译效率。 - -示例: - -```python -out = m -while x < y: - x += 1 - out = out + 1 -while out > 10: - out -= 10 -return out -``` - -#### 嵌套while - -使用方式: - -- `while (cond1):while (cond2):statements...` - -参数:`cond1`、 `cond2`-- 与`单if`一致。 - -限制: - -- 继承`单while`所有限制。 - -- 嵌套`while`总数不超过50个。 - -- `while`数量过多会导致编译时间过长,减少`while`数量有助于提升编译效率。 - -示例: - -```python -out = m -while x < y: - while z < y: - z += 1 - out = out + 1 - x += 1 -return out -``` - -### 循环嵌套条件控制语句 - -#### if in for - -使用方式: - -- `for i in sequence:if (cond)` - -参数: - -`cond` -- 与`单if`一致。 - -`sequence` -- 遍历序列(`Tuple`、`List`) - -限制: - -- 继承`单if`所有限制。 - -- 继承`for`所有限制。 - -- `cond`为变量时,不能有`if (cond):return`、`if (cond):continue`、`if (cond):break`语句。 - -- `if`数量和`for`循环的迭代次数成倍数关系,`for`循环迭代次数过大可能会导致编译时间过长。 - -示例如下: - -```python -z = Tensor(np.ones((2, 3))) -x = (1, 2, 3) -for i in x: - if i < 3: - z += i -return z -``` - -结果如下: - -```text -z: Tensor(shape=[2, 3], dtype=Int64, value=[[4, 4], [4, 4], [4, 4]]) -``` - -#### if in while - -使用方式: - -- `while (cond1):if (cond2)` - -参数:`cond1`、 `cond2`-- 与`单if`一致。 - -限制: - -- 继承`单if`、`单while`所有限制。 - -- `cond2`为变量时,不能有`if (cond2):return`、`if (cond2):continue`、`if (cond2):break`语句。 - -示例: - -```python -out = m -while x < y: - if z > 2*x: - out = out + 1 - x += 1 -return out -``` - -### 函数定义语句 - -#### def关键字 - -用于定义函数。 - -使用方式: - -`def function_name(args): statements...` - -示例如下: - -```python -def number_add(x, y): - return x + y -ret = number_add(1, 2) -``` - -结果如下: - -```text -ret: 3 -``` - -#### lambda表达式 - -用于生成函数。 - -使用方式:`lambda x, y: x + y` - -示例如下: - -```python -number_add = lambda x, y: x + y -ret = number_add(2, 3) -``` - -结果如下: - -```text -ret: 5 -``` - -## 函数 - -### Python内置函数 - -当前支持的Python内置函数包括:`len`、`isinstance`、`partial`、`map`、`range`、`enumerate`、`super`和`pow`。 - -#### len - -功能:求序列的长度。 - -调用:`len(sequence)` - -入参:`sequence` -- `Tuple`、`List`、`Dictionary`或者`Tensor`。 - -返回值:序列的长度,类型为`int`。当入参是`Tensor`时,返回的是`Tensor`第0维的长度。 - -示例如下: - -```python -x = (2, 3, 4) -y = [2, 3, 4] -d = {"a": 2, "b": 3} -z = Tensor(np.ones((6, 4, 5))) -x_len = len(x) -y_len = len(y) -d_len = len(d) -z_len = len(z) -``` - -结果如下: - -```text -x_len: 3 -y_len: 3 -d_len: 2 -z_len: 6 - ``` - -#### isinstance - -功能:判断对象是否为类的实例。区别于算子Isinstance,该算子的第二个入参是MindSpore的dtype模块下定义的类型。 - -调用:`isinstance(obj, type)` - -入参: - -- `obj` -- MindSpore支持类型的一个实例。 - -- `type` -- `bool`、`int`、`float`、`str`、`list`、`tuple`、`dict`、`Tensor`、`Parameter`,或者是一个只包含这些类型的`tuple`。 - -返回值:`obj`为`type`的实例,返回`True`,否则返回`False`。 - -示例如下: - -```python -x = (2, 3, 4) -y = [2, 3, 4] -z = Tensor(np.ones((6, 4, 5))) -x_is_tuple = isinstance(x, tuple) -y_is_list= isinstance(y, list) -z_is_tensor = isinstance(z, Tensor) -``` - -结果如下: - -```text -x_is_tuple: True -y_is_list: True -z_is_tensor: True - ``` - -#### partial - -功能:偏函数,固定函数入参。 - -调用:`partial(func, arg, ...)` - -入参: - -- `func` -- 函数。 - -- `arg` -- 一个或多个要固定的参数,支持位置参数和键值对传参。 - -返回值:返回某些入参固定了值的函数。 - -示例如下: - -```python -def add(x, y): - return x + y - -add_ = partial(add, x=2) -m = add_(y=3) -n = add_(y=5) -``` - -结果如下: - -```text -m: 5 -n: 7 -``` - -#### map - -功能:根据提供的函数对一个或者多个序列做映射,由映射的结果生成一个新的序列。 -如果多个序列中的元素个数不一致,则生成的新序列与最短的那个长度相同。 - -调用:`map(func, sequence, ...)` - -入参: - -- `func` -- 函数。 - -- `sequence` -- 一个或多个序列(`Tuple`或者`List`)。 - -返回值:返回一个`Tuple`。 - -示例如下: - -```python -def add(x, y): - return x + y - -elements_a = (1, 2, 3) -elements_b = (4, 5, 6) -ret = map(add, elements_a, elements_b) -``` - -结果如下: - -```text -ret: (5, 7, 9) -``` - -#### zip - -功能:将多个序列中对应位置的元素打包成一个个元组,然后由这些元组组成一个新序列, -如果各个序列中的元素个数不一致,则生成的新序列与最短的那个长度相同。 - -调用:`zip(sequence, ...)` - -入参:`sequence` -- 一个或多个序列(`Tuple`或`List`)`。 - -返回值:返回一个`Tuple`。 - -示例如下: - -```python -elements_a = (1, 2, 3) -elements_b = (4, 5, 6) -ret = zip(elements_a, elements_b) -``` - -结果如下: - -```text -ret: ((1, 4), (2, 5), (3, 6)) -``` - -#### range - -功能:根据起始值、结束值和步长创建一个`Tuple`。 - -调用: - -- `range(start, stop, step)` - -- `range(start, stop)` - -- `range(stop)` - -入参: - -- `start` -- 计数起始值,类型为`int`,默认为0。 - -- `stop` -- 计数结束值,但不包括在内,类型为`int`。 - -- `step` -- 步长,类型为`int`,默认为1。 - -返回值:返回一个`Tuple`。 - -示例如下: - -```python -x = range(0, 6, 2) -y = range(0, 5) -z = range(3) -``` - -结果如下: - -```text -x: (0, 2, 4) -y: (0, 1, 2, 3, 4) -z: (0, 1, 2) -``` - -#### enumerate - -功能:生成一个序列的索引序列,索引序列包含数据和对应下标。 - -调用: - -- `enumerate(sequence, start)` - -- `enumerate(sequence)` - -入参: - -- `sequence` -- 一个序列(`Tuple`、`List`、`Tensor`)。 - -- `start` -- 下标起始位置,类型为`int`,默认为0。 - -返回值:返回一个`Tuple`。 - -示例如下: - -```python -x = (100, 200, 300, 400) -y = Tensor(np.array([[1, 2], [3, 4], [5 ,6]])) -m = enumerate(x, 3) -n = enumerate(y) -``` - -结果如下: - -```text -m: ((3, 100), (4, 200), (5, 300), (5, 400)) -n: ((0, Tensor(shape=[2], dtype=Int64, value=[1, 2])), (1, Tensor(shape=[2], dtype=Int64, value=[3, 4])), (2, Tensor(shape=[2], dtype=Int64, value=[5, 6]))) -``` - -#### super - -功能:用于调用父类(超类)的一个方法,一般在`super`之后调用父类的方法。 - -调用: - -- `super().xxx()` - -- `super(type, self).xxx()` - -入参: - -- `type` -- 类。 - -- `self` -- 对象。 - -返回值:返回父类的方法。 - -示例如下: - -```python -class FatherNet(nn.Cell): - def __init__(self, x): - super(FatherNet, self).__init__(x) - self.x = x - - def construct(self, x, y): - return self.x * x - - def test_father(self, x): - return self.x + x - -class SingleSubNet(FatherNet): -def __init__(self, x, z): - super(SingleSubNet, self).__init__(x) - self.z = z - -def construct(self, x, y): - ret_father_construct = super().construct(x, y) - ret_father_test = super(SingleSubNet, self).test_father(x) - return ret_father_construct, ret_father_test -``` - -#### pow - -功能:求幂。 - -调用:`pow(x, y)` - -入参: - -- `x` -- 底数, `Number`或`Tensor`。 - -- `y` -- 幂指数, `Number`或`Tensor`。 - -返回值:返回`x`的`y`次幂,`Number`或`Tensor`。 - -示例如下: - -```python -x = Tensor(np.array([1, 2, 3])) -y = Tensor(np.array([1, 2, 3])) -ret = pow(x, y) -``` - -结果如下: - -```text -ret: Tensor(shape=[3], dtype=Int64, value=[1, 4, 27])) -``` - -#### print - -功能:用于打印。 - -调用:`print(arg, ...)` - -入参:`arg` -- 要打印的信息(`int` 、`float`、`bool`、`String`或`Tensor`)。 -当打印的数据是`int`,`float`或者`bool`时,会将其包成一个`0-D`的tensor打印出来。 - -返回值:无返回值。 - -示例如下: - -```python -x = Tensor(np.array([1, 2, 3])) -y = 3 -print("x", x) -print("y", y) -``` - -结果如下: - -```text -x Tensor(shape=[3], dtype=Int64, value=[1, 2, 3])) -y Tensor(shape=[], dtype=Int64, value=3)) -``` - -### 函数参数 - -- 参数默认值:目前不支持默认值设为`Tensor`类型数据,支持`int`、`float`、`bool`、`None`、`str`、`tuple`、`list`、`dict`类型数据。 - -- 可变参数:支持带可变参数网络的推理和训练。 - -- 键值对参数:目前不支持带键值对参数的函数求反向。 - -- 可变键值对参数:目前不支持带可变键值对的函数求反向。 - -## 网络定义 - -### 网络入参 - -整网(最外层网络)入参支持`bool`、`int`、`float`、`Tensor`、`mstype.number(mstype.bool_、mstype.int、mstype.float、mstype.uint)`,以及只包含这些类型对象的`list`或者`tuple`,和`value`值是这些类型的`dict`。 - -在对整网入参求梯度的时候,会忽略非`Tensor`的入参,只计算`Tensor`入参的梯度,例如整网入参`(x, y, z)`中,`x`和`z`是`Tensor`,`y`是非`Tensor`时,在对整网入参求梯度的时候,只会计算`x`和`z`的梯度,返回`(grad_x, grad_z)`。 - -如果网络里要使用其他类型,可在初始化网络的时候,传入该类型对象,作为网络属性保存起来,然后在`construct`里使用。 -内层调用的网络入参无此限制。 - -示例如下: - -```python -class Net(nn.Cell): - def __init__(self, flag): - super(Net, self).__init__() - self.flag = flag - - def construct(self, x, y, z): - if self.flag == "ok": - return x + y + z - return x - y - z - -class GradNet(nn.Cell): - def __init__(self, net): - super(GradNet, self).__init__() - self.grad_all = C.GradOperation(get_all=True) - self.forward_net = net - - def construct(self, x, y, z): - return self.grad_all(self.forward_net)(x, y, z) - -flag = "ok" -input_x = Tensor(np.ones((2, 3)).astype(np.float32)) -input_y = 2 -input_z = Tensor(np.ones((2, 3)).astype(np.float32) * 2) - -net = Net(flag) -grad_net = GradNet(net) -ret = grad_net(input_x, input_y, input_z) -``` - -上面定义的Net网络里,在初始化时传入一个`str`,作为网络的属性保存起来,然后在`construct`里使用`self.flag`这个属性。 - -整网入参`x`和`z`是`Tensor`,`y`是`int`数,`grad_net`在对整网入参`(x, y, z)`求梯度时,会自动忽略`y`的梯度,只计算`x`和`z`的梯度,`ret = (grad_x, grad_z)`。 - -### 网络实例类型 - -- 带[@ms_function](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.html#mindspore.ms_function)装饰器的普通Python函数。 - -- 继承自[nn.Cell](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn/mindspore.nn.Cell.html)的Cell子类。 - -### 网络构造组件 - -| 类别 | 内容 | -| :------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `Cell`实例 | [mindspore/nn/*](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.nn.html)、自定义[Cell](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn/mindspore.nn.Cell.html)。 | -| `Cell`实例的成员函数 | Cell的construct中可以调用其他类成员函数。 | -| `dataclass`实例 | 使用@dataclass装饰的类。 | -| `Primitive`算子 | [mindspore/ops/operations/*](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.ops.html) | -| `Composite`算子 | [mindspore/ops/composite/*](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.ops.html) | -| `constexpr`生成算子 | 使用[@constexpr](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.constexpr.html)生成的值计算算子。 | -| 函数 | 自定义Python函数、前文中列举的系统函数。 | - -### 网络使用约束 - -1. 不允许修改网络的非`Parameter`类型数据成员。 - - 示例如下: - - ```python - class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.num = 2 - self.par = Parameter(Tensor(np.ones((2, 3, 4))), name="par") - - def construct(self, x, y): - return x + y - ``` - - 上面所定义的网络里,`self.num`不是一个`Parameter`,不允许被修改,而`self.par`是一个`Parameter`,可以被修改。 - -2. 当`construct`函数里,使用未定义的类成员时,不会像Python解释器那样抛出`AttributeError`,而是作为`None`处理。 - - 示例如下: - - ```python - class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - - def construct(self, x): - return x + self.y - ``` - - 上面所定义的网络里,`construct`里使用了并未定义的类成员`self.y`,此时会将`self.y`作为`None`处理。 diff --git a/docs/note/source_zh_cn/syntax_list.rst b/docs/note/source_zh_cn/syntax_list.rst deleted file mode 100644 index b2f9c6cbd7e84e9ff6f96abf393312df11fbc713..0000000000000000000000000000000000000000 --- a/docs/note/source_zh_cn/syntax_list.rst +++ /dev/null @@ -1,8 +0,0 @@ -语法支持 -=========== - -.. toctree:: - :maxdepth: 1 - - static_graph_syntax_support - index_support \ No newline at end of file diff --git a/docs/programming_guide/Makefile b/docs/programming_guide/Makefile deleted file mode 100644 index 1eff8952707bdfa503c8d60c1e9a903053170ba2..0000000000000000000000000000000000000000 --- a/docs/programming_guide/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = source_zh_cn -BUILDDIR = build_zh_cn - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/programming_guide/requirements.txt b/docs/programming_guide/requirements.txt deleted file mode 100644 index 1755dcd967228348c2f9cb29bac44580af862770..0000000000000000000000000000000000000000 --- a/docs/programming_guide/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -sphinx >= 2.2.1, <= 2.4.4 -recommonmark -sphinx-markdown-tables -sphinx_rtd_theme -numpy -nbsphinx -IPython -jieba diff --git a/docs/programming_guide/source_en/_static/logo_notebook.png b/docs/programming_guide/source_en/_static/logo_notebook.png deleted file mode 100644 index 18c2e29e4b73ee428f70253feffdd855fdf0c422..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/_static/logo_notebook.png and /dev/null differ diff --git a/docs/programming_guide/source_en/_static/logo_source.png b/docs/programming_guide/source_en/_static/logo_source.png deleted file mode 100644 index 880f2bc87172daf487654c0ba4f1657c672bd2b8..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/_static/logo_source.png and /dev/null differ diff --git a/docs/programming_guide/source_en/advanced_usage_of_checkpoint.md b/docs/programming_guide/source_en/advanced_usage_of_checkpoint.md deleted file mode 100644 index b80005424cc741b1c61c8f50e33db7186a2290f2..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/advanced_usage_of_checkpoint.md +++ /dev/null @@ -1,428 +0,0 @@ -# Saving, Loading and Converting Models - -Translator: [franck23333](https://gitee.com/franck2333) - - - -- [Saving, Loading and Converting Models](#saving-loading-and-converting-models) - - [Summary](#summary) - - [Preparation](#preparation) - - [Advanced Usage](#advanced-usage) - - [Saving](#saving) - - [Saving CheckPoint Manually](#saving-checkpoint-manually) - - [Saving the Specified Cell](#saving-the-specified-cell) - - [Saving CheckPoint Asynchronously](#saving-checkpoint-asynchronously) - - [Saving Custom Parameter Dictionary](#saving-custom-parameter-dictionary) - - [Loading](#loading) - - [Strictly Match the Parameter Name](#strictly-match-the-parameter-name) - - [Filter Specified Prefix](#filter-specified-prefix) - - [Converting CheckPoints of Other Frameworks to MindSpore Format](#converting-checkpoints-of-other-frameworks-to-mindspore-format) - - - - - -## Summary - -In the process of model training or model loading, sometimes it is necessary to replace some optimizers or other hyperparameters -in the model file and the fully-connected layer changes in the classification function, but you do not want -to change too much, or start training the model from the beginning. In view of this situation, MindSpore provides advanced usages -of CheckPoint that only adjusts parts of the weight of the model, and applies the method to the model tuning process. - -For basic usage, please refer to [Saving Models](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html#checkpoint). - -## Preparation - -This article takes the LeNet network as an example to introduce the operation methods of saving, loading and converting -models in Mindspore. - -Firstly, the following resources should be prepared: - -- MNIST dataset. - -- The pretrained model file of the LeNet network: `checkpoint-lenet_1-1875.ckpt`. - -- The data augmentation files `dataset_process.py`. For using the data augmentation method `create_dataset`, you can refer to the data agumentation method `create_dataset` defined in the official website [Implementing an Image Classification Application](https://www.mindspore.cn/tutorial/training/en/master/quick_start/quick_start.html). - -- Define the LeNet network. - -Execute the following codes to complete the first three preparations. - -```bash -mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test -wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate -wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate -wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate -wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate -wget https://mindspore-website.obs.myhuaweicloud.com/notebook/source-codes/dataset_process.py -N --no-check-certificate -wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/checkpoint_lenet-1_1875.zip --no-check-certificate -unzip -o checkpoint_lenet-1_1875.zip -``` - -Define the LeNet network, the specific definition process is as follows. - -```python -from mindspore.common.initializer import Normal -import mindspore.nn as nn - -class LeNet5(nn.Cell): - """Lenet network structure.""" - def __init__(self, num_class=10, num_channel=1): - """define the operator required""" - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16*5*5, 120, weight_init=Normal(0.02)) - self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02)) - self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02)) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - """use the preceding operators to construct networks""" - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x -``` - -## Advanced Usage - -### Saving - -#### Saving CheckPoint Manually - -Use `save_checkpoint` to save CheckPoint files manually. - -Application Scenarios: - -1. Saving the initial values of the network. - -2. Saving the specified network manually. - -Execute the following code, after training 100 batches of the dataset for the pretrained model `checkpoint_lenet-1_1875.ckpt`, -use `save_checkpoint` to save the model `mindspore_lenet.ckpt` manually. - -```python -from mindspore import Model, load_checkpoint, save_checkpoint, load_param_into_net -from mindspore import context, Tensor -from dataset_process import create_dataset -import mindspore.nn as nn - -network = LeNet5() -net_opt = nn.Momentum(network.trainable_params(), learning_rate=0.01, momentum=0.9) -net_loss = nn.loss.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') - -params = load_checkpoint("checkpoint_lenet-1_1875.ckpt") -load_param_into_net(network, params) - -net_with_criterion = nn.WithLossCell(network, net_loss) -train_net = nn.TrainOneStepCell(net_with_criterion, net_opt) -train_net.set_train() - -train_path = "./datasets/MNIST_Data/train" -ds_train = create_dataset(train_path) - -count = 0 -for item in ds_train.create_dict_iterator(): - input_data = item["image"] - labels = item["label"] - train_net(input_data, labels) - count += 1 - if count==100: - print(train_net.trainable_params()) - save_checkpoint(train_net, "mindspore_lenet.ckpt") - break -``` - -```text -[Parameter (name=conv1.weight), Parameter (name=conv2.weight), Parameter (name=fc1.weight), Parameter (name=fc1.bias), -Parameter (name=fc2.weight), Parameter (name=fc2.bias), Parameter (name=fc3.weight), Parameter (name=fc3.bias), -Parameter (name=learning_rate), Parameter (name=momentum), Parameter (name=moments.conv1.weight), -Parameter (name=moments.conv2.weight), Parameter (name=moments.fc1.weight), Parameter (name=moments.fc1.bias), -Parameter (name=moments.fc2.weight), Parameter (name=moments.fc2.bias), Parameter (name=moments.fc3.weight), -Parameter (name=moments.fc3.bias)] -``` - -The weight parameters of `mindspore_lenet.ckpt` can be seen from the above printed information, including the weight -parameters of each hidden layer, learning rate, optimization rate in the process of forward propagation and the weight of optimizer function -in back propagation. - -#### Saving the Specified Cell - -Usage: The parameter `saved_network` of the Class `CheckpointConfig`. - -Application Scenarios: - -- Only save the parameters of the inference network model (Not saving the optimizer's parameters halves the size of the generated CheckPoint file) - -- Save subnet parameters to be used to the Fine-tune tasks. - -Use the method `CheckpointConfig` in the callback function and specify the cell of the saved model as `network`, that is, the forward propagation LeNet network. - -```python -import os -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor - -ds_train = create_dataset(train_path) -epoch_size = 1 -model = Model(train_net) -config_ck = CheckpointConfig(saved_network=network) -ckpoint = ModelCheckpoint(prefix="lenet", config=config_ck) -model.train(epoch_size, ds_train, callbacks=[ckpoint, LossMonitor(625)]) -``` - -```text -epoch: 1 step: 625, loss is 0.116291314 -epoch: 1 step: 1250, loss is 0.09527888 -epoch: 1 step: 1875, loss is 0.23090823 -``` - -The model is saved to the model file`lenet-1_1875.ckpt` after training. Next, compare the size and specific weight between the cell of the specified saved model and the original model. - -```python -model_with_opt = os.path.getsize("./checkpoint_lenet-1_1875.ckpt") // 1024 -params_without_change = load_checkpoint("./checkpoint_lenet-1_1875.ckpt") -print("with_opt size:", model_with_opt, "kB") -print(params_without_change) - -print("\n=========after train===========\n") -model_without_opt = os.path.getsize("./lenet-1_1875.ckpt") // 1024 -params_with_change = load_checkpoint("./lenet-1_1875.ckpt") -print("without_opt size:", model_without_opt, "kB") -print(params_with_change) -``` - -```text -with_opt size: 482 kB -{'conv1.weight': Parameter (name=conv1.weight), 'conv2.weight': Parameter (name=conv2.weight), -'fc1.weight': Parameter (name=fc1.weight), 'fc1.bias': Parameter (name=fc1.bias), -'fc2.weight': Parameter (name=fc2.weight), 'fc2.bias': Parameter (name=fc2.bias), -'fc3.weight': Parameter (name=fc3.weight), 'fc3.bias': Parameter (name=fc3.bias), -'learning_rate': Parameter (name=learning_rate), 'momentum': Parameter (name=momentum), -'moments.conv1.weight': Parameter (name=moments.conv1.weight), 'moments.conv2.weight': Parameter (name=moments.conv2.weight), -'moments.fc1.weight': Parameter (name=moments.fc1.weight), 'moments.fc1.bias': Parameter (name=moments.fc1.bias), -'moments.fc2.weight': Parameter (name=moments.fc2.weight), 'moments.fc2.bias': Parameter (name=moments.fc2.bias), -'moments.fc3.weight': Parameter (name=moments.fc3.weight), 'moments.fc3.bias': Parameter (name=moments.fc3.bias)} -=========after train=========== -without_opt size: 241 kB -{'conv1.weight': Parameter (name=conv1.weight), 'conv2.weight': Parameter (name=conv2.weight), -'fc1.weight': Parameter (name=fc1.weight), 'fc1.bias': Parameter (name=fc1.bias), 'fc2.weight': Parameter (name=fc2.weight), -'fc2.bias': Parameter (name=fc2.bias), 'fc3.weight': Parameter (name=fc3.weight), 'fc3.bias': Parameter (name=fc3.bias)} -``` - -The weight file size of the saved model`lenet-1_1875.ckpt` is 241kB after training. Compared with the size 482kB of the original full model, the overall reduction is nearly half; - -The specific comparison of the parameters in the model shows that the learning rate, optimization rate and the weight parameters related to reverse optimization are reduced in `lenet-1_1875.ckpt` compared with the parameters in `checkpoint_lenet-1_1875.ckpt`, and only the weight parameters of the forward propagation network LeNet are retained. It meets the expected result. - -#### Saving CheckPoint Asynchronously - -Usage: The parameters`async_save` of the Class`CheckpointConfig`. - -Application Scenarios: The trained model has a large amount of parameters, and it can be saved during training, including the writing time that saves the CheckPoint file. - -```python -config_ck = CheckpointConfig(async_save=True) -ckpoint = ModelCheckpoint(prefix="lenet", config=config_ck) -model.train(epoch_size, ds_train, callbacks=ckpoint) -``` - -#### Saving Custom Parameter Dictionary - -Usage: Construct an object `obj_dict` and pass it to the method `save_checkpoint`. - -Application Scenarios: - -- During the training process, additional parameters (`lr`, `epoch_size`, etc.) need to be saved to CheckPoint files. - -- Resave after modifying the parameters values in CheckPoint. - -- Convert the CheckPoint files of PyTorch and TensorFlow into the CheckPoint files of MindSpore. - -There are two situations according to specific scenarios: - -1. When the CheckPoint file exists, resave it after modifying the content. - - ```python - params = load_checkpoint("./lenet-1_1875.ckpt") - - # eg: param_list = [{"name": param_name, "data": param_data},...] - param_list = [{"name": k, "data":v} for k,v in params.items()] - print("==========param_list===========\n") - print(param_list) - - # del element - del param_list[2] - print("\n==========after delete param_list[2]===========\n") - print(param_list) - - - # add element "epoch_size" - param = {"name": "epoch_size"} - param["data"] = Tensor(10) - param_list.append(param) - print("\n==========after add element===========\n") - print(param_list) - - # modify element - param_list[3]["data"] = Tensor(66) - # save a new checkpoint file - print("\n==========after modify element===========\n") - print(param_list) - - save_checkpoint(param_list, 'modify.ckpt') - ``` - - ```text - ==========param_list=========== - [{'name': 'conv1.weight', 'data': Parameter (name=conv1.weight)}, {'name': 'conv2.weight', 'data': Parameter (name=conv2.weight)}, - {'name': 'fc1.weight', 'data': Parameter (name=fc1.weight)}, {'name': 'fc1.bias', 'data': Parameter (name=fc1.bias)}, - {'name': 'fc2.weight', 'data': Parameter (name=fc2.weight)}, {'name': 'fc2.bias', 'data': Parameter (name=fc2.bias)}, - {'name': 'fc3.weight', 'data': Parameter (name=fc3.weight)}, {'name': 'fc3.bias', 'data': Parameter (name=fc3.bias)}] - ==========after delete param_list[2]=========== - [{'name': 'conv1.weight', 'data': Parameter (name=conv1.weight)}, {'name': 'conv2.weight', 'data': Parameter (name=conv2.weight)}, - {'name': 'fc1.bias', 'data': Parameter (name=fc1.bias)}, {'name': 'fc2.weight', 'data': Parameter (name=fc2.weight)}, - {'name': 'fc2.bias', 'data': Parameter (name=fc2.bias)}, {'name': 'fc3.weight', 'data': Parameter (name=fc3.weight)}, - {'name': 'fc3.bias', 'data': Parameter (name=fc3.bias)}] - ==========after add element=========== - [{'name': 'conv1.weight', 'data': Parameter (name=conv1.weight)}, {'name': 'conv2.weight', 'data': Parameter (name=conv2.weight)}, - {'name': 'fc1.bias', 'data': Parameter (name=fc1.bias)}, {'name': 'fc2.weight', 'data': Parameter (name=fc2.weight)}, - {'name': 'fc2.bias', 'data': Parameter (name=fc2.bias)}, {'name': 'fc3.weight', 'data': Parameter (name=fc3.weight)}, - {'name': 'fc3.bias', 'data': Parameter (name=fc3.bias)}, {'name': 'epoch_size', 'data': Tensor(shape=[], dtype=Int64, value= 10)}] - ==========after modify element=========== - [{'name': 'conv1.weight', 'data': Parameter (name=conv1.weight)}, {'name': 'conv2.weight', 'data': Parameter (name=conv2.weight)}, - {'name': 'fc1.bias', 'data': Parameter (name=fc1.bias)}, {'name': 'fc2.weight', 'data': Tensor(shape=[], dtype=Int64, value= 66)}, - {'name': 'fc2.bias', 'data': Parameter (name=fc2.bias)}, {'name': 'fc3.weight', 'data': Parameter (name=fc3.weight)}, - {'name': 'fc3.bias', 'data': Parameter (name=fc3.bias)}, {'name': 'epoch_size', 'data': Tensor(shape=[], dtype=Int64, value= 10)}] - ``` - - After the loaded model file is converted to the list type, the model parameters can be deleted, added, modified, etc., - and manually saved with `save_checkpoint` to complete the modification of the content of the model weight file. - -2. Save the custom parameters list as a CheckPoint file. - - ```python - param_list = [] - # save epoch_size - param = {"name": "epoch_size"} - param["data"] = Tensor(10) - param_list.append(param) - - # save learning rate - param = {"name": "learning_rate"} - param["data"] = Tensor(0.01) - param_list.append(param) - # save a new checkpoint file - print(param_list) - - save_checkpoint(param_list, 'hyperparameters.ckpt') - ``` - - ```text - [{'name': 'epoch_size', 'data': Tensor(shape=[], dtype=Int64, value= 10)}, {'name': 'learning_rate', - 'data': Tensor(shape=[], dtype=Float64, value= 0.01)}] - ``` - -### Loading - -#### Strictly Match the Parameter Name - -When the weight parameters in the CheckPoint file are loaded into the `net`, the parameters with the same name in `net` and CheckPoint will be matched firstly. - -After the matching is completed, if there are some parameters that are not loaded in the net, it will match the parameters with the same suffix name in ckpt and net. - -For example: The parameter value named `conv.0.weight` in CheckPoint will be loaded into the parameter named `net.conv.0.weight` in net. - -If you want to cancel this kind of fuzzy matching, only adopt the strict matching mechanism, you can set it by the parameter `strict_load` in the method `load_param_into_net`. - -The default is False, which means the fuzzy matching mechanism is adopted. - -```python -net = LeNet5() -params = load_checkpoint("lenet-1_1875.ckpt") -load_param_into_net(net, params, strict_load=True) -print("==========strict load mode===========") -print(params) -``` - -```text -==========strict load mode=========== -{'conv1.weight': Parameter (name=conv1.weight), 'conv2.weight': Parameter (name=conv2.weight), 'fc1.weight': Parameter (name=fc1.weight), -'fc1.bias': Parameter (name=fc1.bias), 'fc2.weight': Parameter (name=fc2.weight), 'fc2.bias': Parameter (name=fc2.bias), -'fc3.weight': Parameter (name=fc3.weight), 'fc3.bias': Parameter (name=fc3.bias)} -``` - -#### Filter Specified Prefix - -Usage: The parameter `filter_prefix` of `load_checkpoint`. - -Application Scenarios: When loading the CheckPoint, you want to filter certain parameters that contain the specific prefix. - -- When loading the CheckPoint, do not load the parameters in the optimizer (for example:filter_prefix=’moments’). - -- Do not load the parameters of the convolutional layers (for example:filter_prefix=’conv1’). - -```python -net = LeNet5() -print("=============net params=============") -params = load_checkpoint("checkpoint_lenet-1_1875.ckpt") -load_param_into_net(net, params) -print(params) - -net = LeNet5() -print("\n=============after filter_prefix moments=============") -params = load_checkpoint("checkpoint_lenet-1_1875.ckpt", filter_prefix='moments') -load_param_into_net(net, params) -print(params) -``` - -```text -=============net params============= -{'conv1.weight': Parameter (name=conv1.weight), 'conv2.weight': Parameter (name=conv2.weight), 'fc1.weight': Parameter (name=fc1.weight), -'fc1.bias': Parameter (name=fc1.bias), 'fc2.weight': Parameter (name=fc2.weight), 'fc2.bias': Parameter (name=fc2.bias), -'fc3.weight': Parameter (name=fc3.weight), 'fc3.bias': Parameter (name=fc3.bias), 'learning_rate': Parameter (name=learning_rate), -'momentum': Parameter (name=momentum), 'moments.conv1.weight': Parameter (name=moments.conv1.weight), 'moments.conv2.weight': -Parameter (name=moments.conv2.weight), 'moments.fc1.weight': Parameter (name=moments.fc1.weight), 'moments.fc1.bias': -Parameter (name=moments.fc1.bias), 'moments.fc2.weight': Parameter (name=moments.fc2.weight), 'moments.fc2.bias': -Parameter (name=moments.fc2.bias), 'moments.fc3.weight': Parameter (name=moments.fc3.weight), 'moments.fc3.bias': Parameter (name=moments.fc3.bias)} -=============after filter_prefix moments============= -{'conv1.weight': Parameter (name=conv1.weight), 'conv2.weight': Parameter (name=conv2.weight), 'fc1.weight': Parameter (name=fc1.weight), -'fc1.bias': Parameter (name=fc1.bias), 'fc2.weight': Parameter (name=fc2.weight), 'fc2.bias': Parameter (name=fc2.bias), -'fc3.weight': Parameter (name=fc3.weight), 'fc3.bias': Parameter (name=fc3.bias), 'learning_rate': Parameter (name=learning_rate), 'momentum': Parameter (name=momentum)} -``` - -Using the mechanism of filtering prefixes, you can filter out the parameters that you do not want to load (in this case, it is the optimizer weight parameter). When performing Fine-tune, you can use other optimizers to optimize. - -> The above is the advanced usage of MindSpore checkpoint, all the above usages can be used together. - -### Converting CheckPoints of Other Frameworks to MindSpore Format - -Convert CheckPoint files of other frameworks to the MindSpore format. - -In general, the parameters names and parameters values are saved in the CheckPoint file. After invoking the loading interface -of the corresponding framework and obtaining the parameter names and values, construct the object according to the MindSpore format, -and then you can directly invoke the MindSpore interface to save as CheckPoint files in the MindSpore format. - -The main work is to compare the parameter names between different frameworks, so that all parameter names in the network of the two frameworks correspond to each other (a map can be used for mapping). -The logic of the following code is transforming the parameter format, excluding the corresponding parameter name. - -```python -import torch -from mindspore import Tensor, save_checkpoint - -def pytorch2mindspore(default_file = 'torch_resnet.pth'): - """read pth file""" - par_dict = torch.load(default_file)['state_dict'] - params_list = [] - for name in par_dict: - param_dict = {} - parameter = par_dict[name] - param_dict['name'] = name - param_dict['data'] = Tensor(parameter.numpy()) - params_list.append(param_dict) - save_checkpoint(params_list, 'ms_resnet.ckpt') -``` diff --git a/docs/programming_guide/source_en/api_structure.ipynb b/docs/programming_guide/source_en/api_structure.ipynb deleted file mode 100644 index 1d27db153c51262b56cc4355daa27159305456f9..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/api_structure.ipynb +++ /dev/null @@ -1,131 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "acquired-authorization", - "metadata": {}, - "source": [ - "# MindSpore API Overview\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_en/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_en/api_structure.ipynb)" - ] - }, - { - "cell_type": "markdown", - "id": "instant-configuration", - "metadata": {}, - "source": [ - "## Overall Architecture\n", - "\n", - "MindSpore is a deep learning framework in all scenarios, aiming to achieve easy development, efficient execution, and all-scenario coverage. Easy development features include API friendliness and low debugging difficulty. Efficient execution includes computing efficiency, data preprocessing efficiency, and distributed training efficiency. All-scenario coverage means that the framework supports cloud, edge, and device scenarios.\n", - "\n", - "The overall architecture of MindSpore consists of the Mind Expression (ME), Graph Engine (GE), and backend runtime. ME provides user-level APIs for scientific computing, building and training neural networks, and converting Python code of users into graphs. GE is a manager of operators and hardware resources, and is responsible for controlling execution of graphs received from ME. Backend runtime includes efficient running environments, such as the CPU, GPU, Ascend AI processors, and Android/iOS, on the cloud, edge, and device. For more information about the overall architecture, see [Overall Architecture](https://www.mindspore.cn/doc/note/en/master/design/mindspore/architecture.html).\n", - "\n", - "## Design Concept\n", - "\n", - "MindSpore originates from the best practices of the entire industry and provides unified model training, inference, and export APIs for data scientists and algorithm engineers. It supports flexible deployment in different scenarios such as the device, edge, and cloud, and promotes the prosperity of domains such as deep learning and scientific computing.\n", - "\n", - "MindSpore provides the Python programming paradigm. Users can use the native control logic of Python to build complex neural network models, simplifying AI programming. For details, see [Implementing an Image Classification Application](https://www.mindspore.cn/tutorial/training/en/master/quick_start/quick_start.html).\n", - "\n", - "Currently, there are two execution modes of a mainstream deep learning framework: a static graph mode and a dynamic graph mode. The static graph mode has a relatively high training performance, but is difficult to debug. On the contrary, the dynamic graph mode is easy to debug, but is difficult to execute efficiently. MindSpore provides an encoding mode that unifies dynamic and static graphs, which greatly improves the compatibility between static and dynamic graphs. Instead of developing multiple sets of code, users can switch between the dynamic and static graph modes by changing only one line of code. For example, set `context.set_context(mode=context.PYNATIVE_MODE)` to switch to the dynamic graph mode, or set `context.set_context(mode=context.GRAPH_MODE)` to switch to the static graph mode, which facilitates development and debugging, and improves performance experience.\n", - "\n", - "A neural network model is usually trained based on gradient descent algorithm, but the manual derivation process is complex and the result is prone to errors. The automatic differentiation mechanism of MindSpore based on source code transformation (SCT) uses a functional differential programming architecture and provides Python APIs at the API layer, including the expression of control flows. Users can focus on the native mathematical expression of the model algorithm without manual derivation. The sample code for automatic differentiation is as follows:\n", - "\n", - "> This example is applicable to GPU and Ascend environments." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "similar-fashion", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "12.0\n" - ] - } - ], - "source": [ - "import mindspore as ms\n", - "from mindspore import ops\n", - "\n", - "\n", - "grad_all = ops.composite.GradOperation()\n", - "\n", - "\n", - "def func(x): return x * x * x\n", - "\n", - "\n", - "def df_func(x):\n", - " return grad_all(func)(x)\n", - "\n", - "\n", - "@ms.ms_function\n", - "def df2_func(x):\n", - " return grad_all(df_func)(x)\n", - "\n", - "\n", - "if __name__ == \"__main__\":\n", - " print(df2_func(ms.Tensor(2, ms.float32)))" - ] - }, - { - "cell_type": "markdown", - "id": "noble-video", - "metadata": {}, - "source": [ - "In the first step, a function (computational graph) is defined. In the second step, automatic differentiation is performed by using a backward API provided by MindSpore, and the first derivative function (computational graph) is defined. In the third step, the second derivative function (computational graph) is defined. After the input is given, the second derivative of the function defined in step 1 can be obtained at the specified position. The result of the second derivative is `12`.\n", - "\n", - "In addition, the SCT can convert Python code into an intermediate representation (IR) of a MindSpore function. The IR constructs a computational graph that can be parsed and executed on different devices. Before the computational graph is executed, a plurality of software and hardware collaborative optimization technologies are used, and performance and efficiency in different scenarios such as device, edge, and cloud, are improved.\n", - "\n", - "Improving the data processing capability to match the computing power of AI chips is the key to ensure the ultimate performance of AI chips. MindSpore provides multiple data processing operators and uses automatic data acceleration technology to implement high-performance pipelines, including data loading, data demonstration, and data conversion. It supports data processing capabilities in all scenarios, such as CV, NLP, and GNN. MindRecord is a self-developed data format of MindSpore. It features efficient read and write and easy distributed processing. Users can convert non-standard and common datasets to the MindRecord format to obtain better performance experience. For details about the conversion, see [MindSpore Data Format Conversion](https://www.mindspore.cn/doc/programming_guide/en/master/dataset_conversion.html). MindSpore supports the loading of common datasets and datasets in multiple data storage formats. For example, users can use `dataset=dataset.Cifar10Dataset(\"Cifar10Data/\")` to load the CIFAR-10 dataset. `Cifar10Data/` indicates the local directory of the dataset, and users can also use `GeneratorDataset` to customize the dataset loading mode. Data augmentation is a method of generating new data based on (limited) data, which can reduce the overfitting phenomenon of network model and improve the generalization ability of the model. In addition to user-defined data augmentation, MindSpore provides automatic data augmentation, making data augmentation more flexible. For details, see [Automatic Data Augmentation](https://www.mindspore.cn/doc/programming_guide/en/master/auto_augmentation.html).\n", - "\n", - "The deep learning neural network model usually contains many hidden layers for feature extraction. However, the feature extraction is random and the debugging process is invisible, which limits the trustworthiness and optimization of the deep learning technology. MindSpore supports visualized debugging and optimization (MindInsight) and provides functions such as training dashboard, lineage, performance analysis, and debugger to help users detect deviations during model training and easily debug and optimize models. For example, before initializing the network, users can use `profiler=Profiler()` to initialize the `Profiler` object, automatically collect information such as the operator time consumption during training, and record the information in a file. After the training is complete, call `profiler.analyse()` to stop collecting data and generate performance analysis results. Users can view and analyze the visualized results to more efficiently debug network performance. For details about debugging and optimization, see [Training Process Visualization](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/visualization_tutorials.html).\n", - "\n", - "As a scale of neural network models and datasets continuously increases, parallel distributed training becomes a common practice of neural network training. However, policy selection and compilation of parallel distributed training are very complex, which severely restricts training efficiency of a deep learning model and hinders development of deep learning. MindSpore unifies the encoding methods of standalone and distributed training. Developers do not need to write complex distributed policies. Instead, they can implement distributed training by adding a small amount of codes to the standalone code. For example, after `context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL)` is set, a cost model can be automatically established, and a better parallel mode can be selected for users. This improves the training efficiency of neural networks, greatly decreases the AI development difficulty, and enables users to quickly implement model. For more information, see [Distributed Training](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_tutorials.html).\n", - "\n", - "## Level Structure\n", - "\n", - "To support network building, entire graph execution, subgraph execution, and single-operator execution, MindSpore provides users with three levels of APIs which are Low-Level Python API, Medium-Level Python API, and High-Level Python API in ascending order.\n", - "\n", - "![image](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_en/images/api_structure.png)\n", - "\n", - "- Low-Level Python API\n", - "\n", - " The first is low-level API, including tensor definition, basic operators, and automatic differential modules. Users can use the low-level API to easily define tensors and perform derivative calculation. For example, users can customize tensors by using the `Tensor` API, and use the `GradOperation` operator in the `ops.composite` module to calculate the derivative of the function at a specified position.\n", - "\n", - "- Medium-Level Python API\n", - "\n", - " The second is medium-level API which encapsulates low-cost APIs and provides modules such as the network layer, optimizer, and loss function. Users can flexibly build neural networks and control execution processes through the medium-level API to quickly implement model algorithm logic. For example, users can call the `Cell` API to build neural network models and computing logic, add the loss function and optimization methods to the neural network model by using the `loss` module and `Optimizer` API, and use the `dataset` module to process data for model training and evaluation.\n", - "\n", - "- High-Level Python API\n", - "\n", - " The third is high-level API. Based on the medium-level API, it provides advanced APIs such as training and inference management, mixed precision training, and debugging and optimization, facilitating users to control the execution process of the entire network and implement training, inference, and optimization of the neural network. For example, users can use the `Model` API, specify the neural network model to be trained and related training settings, train the neural network model, and debug the neural network performance through the `Profiler` API.\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/programming_guide/source_en/augmentation.md b/docs/programming_guide/source_en/augmentation.md deleted file mode 100644 index 29870fdabead0d7bef33d5129ffb4ef95714732d..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/augmentation.md +++ /dev/null @@ -1,557 +0,0 @@ -# Data Augmentation - - - -- [Data Augmentation](#data-augmentation) - - [Overview](#overview) - - [c_transforms](#c_transforms) - - [RandomCrop](#randomcrop) - - [RandomHorizontalFlip](#randomhorizontalflip) - - [Resize](#resize) - - [Invert](#invert) - - [py_transforms](#py_transforms) - - [Compose](#compose) - - [Usage Instructions](#usage-instructions) - - [References](#references) - - - - - -## Overview - -In a computer vision task, if the data volume is small or the scenario of the samples are simple, the training effect will be affected. In this case, you may preprocess images by performing data augmentation, so as to improve generalization of the model. - -MindSpore provides the `c_transforms` and `py_transforms` modules for data augmentation. You can also customize functions or operators to augment data. - -| Module | Implementation | Description | -| ------------- | ------------------------------- | ---------------------------------------------------------------------------------------------------- | -| c_transforms | Implemented based on C++. | This module provides high performance. | -| py_transforms | Implemented based on Python PIL | This module provides multiple image augmentation methods and can convert PIL images to NumPy arrays. | - -The following table lists the common data augmentation operators supported by MindSpore. For details about more data augmentation operators, see [MindSpore API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.vision.html). - -| Module | Operator | Description | -| ------------- | -------------------- | ------------------------------------------------------------------ | -| c_transforms | RandomCrop | Crops an image of a specified size at a random position. | -| | RandomHorizontalFlip | Flips the image horizontally based on the specified probability. | -| | Resize | Resizes the image to the specified size. | -| | Invert | Inverts the image. | -| py_transforms | RandomCrop | Crops an image of a specified size at a random position. | -| | Resize | Resizes the image to the specified size. | -| | Invert | Inverts the image. | -| | Compose | Performs the data augmentation operations in the list in sequence. | - -## c_transforms - -The following describes how to use common data augmentation operators of the `c_transforms` module. - -### RandomCrop - -Crops the input image at a random position. - -**Parameter description:** - -- `size`: size of the cropped image. -- `padding`: number of padded pixels. -- `pad_if_needed`: specifies whether the original image needs to be padded when it is smaller than the cropped size. -- `fill_value`: fill value used in the constant fill mode. -- `padding_mode`: padding mode. - -The following example uses a sequential sampler to load the CIFAR-10 dataset [1], randomly crops the loaded image to 10 in both length and width, outputs the image shapes and labels before and after cropping, and displays the cropped image. - -Download [CIFAR-10 dataset](https://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz) and decompress it to the specified path, execute the following command: - -```bash -wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-binary.tar.gz -mkdir -p datasets -tar -xzf cifar-10-binary.tar.gz -C datasets -mkdir -p datasets/cifar-10-batches-bin/train datasets/cifar-10-batches-bin/test -mv -f datasets/cifar-10-batches-bin/test_batch.bin datasets/cifar-10-batches-bin/test -mv -f datasets/cifar-10-batches-bin/data_batch*.bin datasets/cifar-10-batches-bin/batches.meta.txt datasets/cifar-10-batches-bin/train -tree ./datasets/cifar-10-batches-bin -``` - -```text -./datasets/cifar-10-batches-bin -├── readme.html -├── test -│ └── test_batch.bin -└── train - ├── batches.meta.txt - ├── data_batch_1.bin - ├── data_batch_2.bin - ├── data_batch_3.bin - ├── data_batch_4.bin - └── data_batch_5.bin - -2 directories, 8 files -``` - -```python -import matplotlib.pyplot as plt -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as c_trans - -ds.config.set_seed(5) -ds.config.set_num_parallel_workers(1) - -DATA_DIR = "./datasets/cifar-10-batches-bin/train/" - -sampler = ds.SequentialSampler(num_samples=3) -dataset1 = ds.Cifar10Dataset(DATA_DIR, sampler=sampler) - -random_crop = c_trans.RandomCrop([10, 10]) -dataset2 = dataset1.map(operations=random_crop, input_columns=["image"]) - -image_list1, label_list1 = [], [] -image_list2, label_list2 = [], [] -for data1, data2 in zip(dataset1.create_dict_iterator(), dataset2.create_dict_iterator()): - image_list1.append(data1['image']) - label_list1.append(data1['label']) - print("Source image Shape :", data1['image'].shape, ", Source label :", data1['label']) - image_list2.append(data2['image']) - label_list2.append(data2['label']) - print("Cropped image Shape:", data2['image'].shape, ", Cropped label:", data2['label']) - print("------") - -num_samples = len(image_list1) + len(image_list2) -for i in range(num_samples): - if i < len(image_list1): - plt.subplot(2, len(image_list1), i + 1) - plt.imshow(image_list1[i].asnumpy()) - plt.title(label_list1[i].asnumpy()) - else: - plt.subplot(2, len(image_list2), i + 1) - plt.imshow(image_list2[i % len(image_list2)].asnumpy()) - plt.title(label_list2[i % len(image_list2)].asnumpy()) -plt.show() -``` - -The output is as follows: - -```text -Source image Shape : (32, 32, 3) , Source label : 6 -Cropped image Shape: (10, 10, 3) , Cropped label: 6 ------- -Source image Shape : (32, 32, 3) , Source label : 9 -Cropped image Shape: (10, 10, 3) , Cropped label: 9 ------- -Source image Shape : (32, 32, 3) , Source label : 9 -Cropped image Shape: (10, 10, 3) , Cropped label: 9 ------- -``` - -The following shows the cropped image. - -![randomcrop](./images/randomcrop.png) - -### RandomHorizontalFlip - -Randomly flips the input image horizontally. - -**Parameter description:** - -- `prob`: probability of flipping a single image. - -The following example uses a random sampler to load the CIFAR-10 dataset [1], randomly flips the loaded image in the horizontal direction with a probability of 0.8, outputs the image shapes and labels before and after the flipping, and displays the flipped image. - -Follow the steps above to download the CIFAR-10 dataset and store it as required. - -```python -import matplotlib.pyplot as plt -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as c_trans - -ds.config.set_seed(6) -ds.config.set_num_parallel_workers(1) - -DATA_DIR = "./datasets/cifar-10-batches-bin/train/" - -sampler = ds.RandomSampler(num_samples=4) -dataset1 = ds.Cifar10Dataset(DATA_DIR, sampler=sampler) - -random_horizontal_flip = c_trans.RandomHorizontalFlip(prob=0.8) -dataset2 = dataset1.map(operations=random_horizontal_flip, input_columns=["image"]) - -image_list1, label_list1 = [], [] -image_list2, label_list2 = [], [] -for data1, data2 in zip(dataset1.create_dict_iterator(), dataset2.create_dict_iterator()): - image_list1.append(data1['image']) - label_list1.append(data1['label']) - print("Source image Shape :", data1['image'].shape, ", Source label :", data1['label']) - image_list2.append(data2['image']) - label_list2.append(data2['label']) - print("Flipped image Shape:", data2['image'].shape, ", Flipped label:", data2['label']) - print("------") - -num_samples = len(image_list1) + len(image_list2) -for i in range(num_samples): - if i < len(image_list1): - plt.subplot(2, len(image_list1), i + 1) - plt.imshow(image_list1[i].asnumpy()) - plt.title(label_list1[i].asnumpy()) - else: - plt.subplot(2, len(image_list2), i + 1) - plt.imshow(image_list2[i % len(image_list2)].asnumpy()) - plt.title(label_list2[i % len(image_list2)].asnumpy()) -plt.show() -``` - -The output is as follows: - -```text -Source image Shape : (32, 32, 3) , Source label : 3 -Flipped image Shape: (32, 32, 3) , Flipped label: 3 ------- -Source image Shape : (32, 32, 3) , Source label : 3 -Flipped image Shape: (32, 32, 3) , Flipped label: 3 ------- -Source image Shape : (32, 32, 3) , Source label : 6 -Flipped image Shape: (32, 32, 3) , Flipped label: 6 ------- -Source image Shape : (32, 32, 3) , Source label : 9 -Flipped image Shape: (32, 32, 3) , Flipped label: 9 ------- -``` - -The following shows the flipped image. - -![randomhorizontalflip](./images/randomhorizontalflip.png) - -### Resize - -Resizes the input image. - -**Parameter description:** - -- `size`: target size of the image. -- `interpolation`: interpolation mode used during resizing. - -The following example loads the MNIST dataset [2], resizes the loaded image to (101, 101), outputs the image shapes and labels before and after the resizing, and displays the resized image. - -Download and decompress the MNIST dataset, store it in the `./datasets/MNIST_data/` path, execute the following command: - -```bash -mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test -wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate -wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate -wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate -wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate -tree ./datasets/MNIST_Data -``` - -```text -./datasets/MNIST_Data -├── test -│ ├── t10k-images-idx3-ubyte -│ └── t10k-labels-idx1-ubyte -└── train - ├── train-images-idx3-ubyte - └── train-labels-idx1-ubyte - -2 directories, 4 files -``` - -```python -import matplotlib.pyplot as plt -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as c_trans - -DATA_DIR = "./datasets/MNIST_Data/train/" - -dataset1 = ds.MnistDataset(DATA_DIR, num_samples=4, shuffle=False) - -resize = c_trans.Resize(size=[101, 101]) -dataset2 = dataset1.map(operations=resize, input_columns=["image"]) - -image_list1, label_list1 = [], [] -image_list2, label_list2 = [], [] -for data1, data2 in zip(dataset1.create_dict_iterator(), dataset2.create_dict_iterator()): - image_list1.append(data1['image']) - label_list1.append(data1['label']) - print("Source image Shape :", data1['image'].shape, ", Source label :", data1['label']) - image_list2.append(data2['image']) - label_list2.append(data2['label']) - print("Flipped image Shape:", data2['image'].shape, ", Flipped label:", data2['label']) - print("------") - -num_samples = len(image_list1) + len(image_list2) -for i in range(num_samples): - if i < len(image_list1): - plt.subplot(2, len(image_list1), i + 1) - plt.imshow(image_list1[i].asnumpy().squeeze(), cmap=plt.cm.gray) - plt.title(label_list1[i].asnumpy()) - else: - plt.subplot(2, len(image_list2), i + 1) - plt.imshow(image_list2[i % len(image_list2)].asnumpy().squeeze(), cmap=plt.cm.gray) - plt.title(label_list2[i % len(image_list2)].asnumpy()) -plt.show() -``` - -The output is as follows: - -```text -Source image Shape : (28, 28, 1) , Source label : 5 -Flipped image Shape: (101, 101, 1) , Flipped label: 5 ------- -Source image Shape : (28, 28, 1) , Source label : 0 -Flipped image Shape: (101, 101, 1) , Flipped label: 0 ------- -Source image Shape : (28, 28, 1) , Source label : 4 -Flipped image Shape: (101, 101, 1) , Flipped label: 4 ------- -Source image Shape : (28, 28, 1) , Source label : 1 -Flipped image Shape: (101, 101, 1) , Flipped label: 1 ------- -``` - -The following shows the resized image. - -![ctrans_resize](./images/ctrans_resize.png) - -### Invert - -Inverts the input image. - -The following example loads the CIFAR-10 dataset [1], defines and performs the resizing and inverting operations on the loaded image, outputs the image shapes and labels before and after the resizing and inverting operations, and displays the inverted image. - -Follow the steps above to download the CIFAR-10 data set and store it as required. - -```python -import matplotlib.pyplot as plt -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as c_trans - -ds.config.set_seed(8) - -DATA_DIR = "./datasets/cifar-10-batches-bin/train/" - -dataset1 = ds.Cifar10Dataset(DATA_DIR, num_samples=4, shuffle=True) - -resize = c_trans.Resize(size=[101, 101]) -invert = c_trans.Invert() -dataset2 = dataset1.map(operations=[resize, invert], input_columns=["image"]) - -image_list1, label_list1 = [], [] -image_list2, label_list2 = [], [] -for data1, data2 in zip(dataset1.create_dict_iterator(), dataset2.create_dict_iterator()): - image_list1.append(data1['image']) - label_list1.append(data1['label']) - print("Source image Shape :", data1['image'].shape, ", Source label :", data1['label']) - image_list2.append(data2['image']) - label_list2.append(data2['label']) - print("Flipped image Shape:", data2['image'].shape, ", Flipped label:", data2['label']) - print("------") - -num_samples = len(image_list1) + len(image_list2) -for i in range(num_samples): - if i < len(image_list1): - plt.subplot(2, len(image_list1), i + 1) - plt.imshow(image_list1[i].asnumpy().squeeze(), cmap=plt.cm.gray) - plt.title(label_list1[i].asnumpy()) - else: - plt.subplot(2, len(image_list2), i + 1) - plt.imshow(image_list2[i % len(image_list2)].asnumpy().squeeze(), cmap=plt.cm.gray) - plt.title(label_list2[i % len(image_list2)].asnumpy()) -plt.show() -``` - -The output is as follows: - -```text -Source image Shape : (32, 32, 3) , Source label : 7 -Flipped image Shape: (101, 101, 3) , Flipped label: 7 ------- -Source image Shape : (32, 32, 3) , Source label : 0 -Flipped image Shape: (101, 101, 3) , Flipped label: 0 ------- -Source image Shape : (32, 32, 3) , Source label : 2 -Flipped image Shape: (101, 101, 3) , Flipped label: 2 ------- -Source image Shape : (32, 32, 3) , Source label : 1 -Flipped image Shape: (101, 101, 3) , Flipped label: 1 ------- -``` - -The following shows the inverted image. - -![ctrans_invert](./images/ctrans_invert.png) - -## py_transforms - -The following describes how to use common data augmentation operators of the `py_transforms` module. - -### Compose - -Receives a `transforms` list and applies the data augmentation operations in the list to dataset images in sequence. - -The following example loads the CIFAR-10 dataset [1], defines the decoding, resizing, and data type conversion operations, applies the operations to the loaded image, outputs the image shapes and labels before and after the processing, and displays the processed image. - -Follow the steps above to download the CIFAR-10 dataset and store it as required. - -```python -import matplotlib.pyplot as plt -import mindspore.dataset as ds -import mindspore.dataset.vision.py_transforms as py_trans -from mindspore.dataset.transforms.py_transforms import Compose -from PIL import Image - -ds.config.set_seed(8) - -DATA_DIR = "./datasets/cifar-10-batches-bin/train/" - -dataset1 = ds.Cifar10Dataset(DATA_DIR, num_samples=5, shuffle=True) - -def decode(image): - return Image.fromarray(image) - -transforms_list = [ - decode, - py_trans.Resize(size=(200,200)), - py_trans.ToTensor() -] -compose_trans = Compose(transforms_list) -dataset2 = dataset1.map(operations=compose_trans, input_columns=["image"]) - -image_list, label_list = [], [] -for data in dataset2.create_dict_iterator(): - image_list.append(data['image']) - label_list.append(data['label']) - print("Transformed image Shape:", data['image'].shape, ", Transformed label:", data['label']) - -num_samples = len(image_list) -for i in range(num_samples): - plt.subplot(1, len(image_list), i + 1) - plt.imshow(image_list[i].asnumpy().transpose(1, 2, 0)) - plt.title(label_list[i].asnumpy()) -plt.show() -``` - -The output is as follows: - -```text -Transformed image Shape: (3, 200, 200) , Transformed label: 7 -Transformed image Shape: (3, 200, 200) , Transformed label: 0 -Transformed image Shape: (3, 200, 200) , Transformed label: 2 -Transformed image Shape: (3, 200, 200) , Transformed label: 1 -Transformed image Shape: (3, 200, 200) , Transformed label: 6 -``` - -The following shows the processed image. - -![pytrans_compose](./images/pytrans_compose.png) - -## Eager Mode - -All data augmentation operators `c_transform` and `py_transform` we introduced above need to be run under pipeline mode. That is, we have to -define a `map` operator which helps us to start and execute the given data augmentation operator, and to map and transfor the data of the data pipeline, for example: - -```python -random_crop = c_trans.RandomCrop([10, 10]) -dataset = dataset.map(operations=random_crop, input_columns=["image"]) -``` - -However, the pipeline code seems heavy while we sometime just want to do a little experiment (e.g. model inference). -Thus, MindSpore provides a simple way to execute these augmentation operators, calls `Eager Mode`. - -To use Eager mode, you only need to use the data enhancement operator itself as an executable function, you can write code easily as following: - -```python -import numpy as np -from PIL import Image -import matplotlib.pyplot as plt -import mindspore.dataset.vision.c_transforms as C -import mindspore.dataset.vision.py_transforms as P -``` - -```bash -wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/banana.jpg -``` - -```python -img_ori = Image.open("banana.jpg").convert("RGB") -print("Image.type: {}, Image.shape: {}".format(type(img_ori), img_ori.size)) - -# Define a Resize op from c_transform and execute it immediately -op1 = C.Resize(size=(320)) -img = op1(img_ori) -print("Image.type: {}, Image.shape: {}".format(type(img), img.shape)) - -# Define a CenterCrop op from c_transform and execute it immediately -op2 = C.CenterCrop((280, 280)) -img = op2(img) -print("Image.type: {}, Image.shape: {}".format(type(img), img.shape)) - -# Define a Pad op from py_transform and execute it immediately -# Before calling Pad, you need to call ToPIL() -op3 = P.ToPIL() -op4 = P.Pad(40) -img = op4(op3(img)) -print("Image.type: {}, Image.shape: {}".format(type(img), img.size)) - -# Show the result -plt.subplot(1, 2, 1) -plt.imshow(img_ori) -plt.title("original image") -plt.subplot(1, 2, 2) -plt.imshow(img) -plt.title("transformed image") -plt.show() -``` - -The output is as follows: - -```text -Image.type: , Image.shape: (356, 200) -Image.type: , Image.shape: (320, 570, 3) -Image.type: , Image.shape: (280, 280, 3) -Image.type: , Image.shape: (360, 360) -``` - -The following shows the processed image. - -![eager_mode](./images/eager_mode.png) - -- Augmentation operators that support to be run in Eager Mode are listed in the following files: - - [mindspore.dataset.vision.c_transforms](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.vision.html#mindspore-dataset-vision-c-transforms) - - - [mindspore.dataset.vision.py_transforms](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.vision.html#mindspore-dataset-vision-py-transforms) - - - [mindspore.dataset.text.transforms](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.text.html#mindspore-dataset-text-transforms) - -## Usage Instructions - -Do not use `c_transforms` and `py_transforms` together because they apply to images in different ways and using them together will reduce the processing performance (Except for Eager Mode). - -(Note: The mixed use of `c_transforms` and `py_transforms` in Eager mode is not affected by differences in operating modes.) - -![map](./images/map.png) - -Using both C++ and Python will cause the cost of switching between them. You are advised not to use operators of the two modules together. However, it is acceptable to use a proper number of operators together. - -**Recommended usage:** - -- Use `py_transform` or `c_transform` separately. - - ![transform_c_py](./images/transform_recommended_1.png) - -- Use `py_transform` and then `c_transform`. - - ![transform_c_py](./images/transform_recommended_2.png) - -- Use `c_transform` and then `py_transform`. - - ![transform_c_py](./images/transform_recommended_3.png) - -**Not recommended:** - -- Frequent switching between `c_transforms` and `py_transforms`. - - ![transform_c_py](./images/transform_not_recommended.png) - -## References - -[1] Alex Krizhevsky. [Learning Multiple Layers of Features from Tiny Images](http://www.cs.toronto.edu/~kriz/learning-features-2009-TR.pdf). - -[2] Y. LeCun, L. Bottou, Y. Bengio, and P. Haffner. [Gradient-based learning applied to document recognition](http://yann.lecun.com/exdb/publis/pdf/lecun-98.pdf). diff --git a/docs/programming_guide/source_en/auto_augmentation.md b/docs/programming_guide/source_en/auto_augmentation.md deleted file mode 100644 index 2d29373ab14b764c8d3c159e2b95b75bfc7341eb..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/auto_augmentation.md +++ /dev/null @@ -1,151 +0,0 @@ -# Auto Augmentation - - - -- [Auto Augmentation](#auto-augmentation) - - [Overview](#overview) - - [Probability Based Auto Augmentation](#probability-based-auto-augmentation) - - [RandomApply](#randomapply) - - [RandomChoice](#randomchoice) - - [RandomSelectSubpolicy](#randomselectsubpolicy) - - [Callback Parameter based Auto Augmentation](#callback-parameter-based-auto-augmentation) - - - - - -## Overview - -MindSpore not only allows you to customize data augmentation, but also provides an auto augmentation method to automatically perform data augmentation on images based on specific policies. - -Auto augmentation can be implemented based on probability or callback parameters. - -## Probability Based Auto Augmentation - -MindSpore provides a series of probability-based auto augmentation APIs. You can randomly select and combine various data augmentation operations to make data augmentation more flexible. - -For details about APIs, see [MindSpore API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.transforms.html). - -### RandomApply - -The API receives a data augmentation operation list `transforms` and executes the data augmentation operations in the list in sequence at a certain probability or executes none of them. The default probability is 0.5. - -In the following code example, the `RandomCrop` and `RandomColorAdjust` operations are executed in sequence with a probability of 0.5 or none of them are executed. - -```python -import mindspore.dataset.vision.c_transforms as c_vision -from mindspore.dataset.transforms.c_transforms import RandomApply - -rand_apply_list = RandomApply([c_vision.RandomCrop(512), c_vision.RandomColorAdjust()]) -``` - -### RandomChoice - -The API receives a data augmentation operation list `transforms` and randomly selects a data augmentation operation to perform. - -In the following code example, an operation is selected from `CenterCrop` and `RandomCrop` for execution with equal probability. - -```python -import mindspore.dataset.vision.c_transforms as c_vision -from mindspore.dataset.transforms.c_transforms import RandomChoice - -rand_choice = RandomChoice([c_vision.CenterCrop(512), c_vision.RandomCrop(512)]) -``` - -### RandomSelectSubpolicy - -The API receives a preset policy list, including a series of sub-policy combinations. Each sub-policy consists of several data augmentation operations executed in sequence and their execution probabilities. - -First, a sub-policy is randomly selected for each image with equal probability, and then operations are performed according to the probability sequence in the sub-policy. - -In the following code example, two sub-policies are preset. Sub-policy 1 contains the `RandomRotation`, `RandomVerticalFlip`, and `RandomColorAdjust` operations, whose probabilities are 0.5, 1.0, and 0.8, respectively. Sub-policy 2 contains the `RandomRotation` and `RandomColorAdjust` operations, with the probabilities of 1.0 and 0.2, respectively. - -```python -import mindspore.dataset.vision.c_transforms as c_vision -from mindspore.dataset.vision.c_transforms import RandomSelectSubpolicy - -policy_list = [ - [(c_vision.RandomRotation((45, 45)), 0.5), (c_vision.RandomVerticalFlip(), 1.0), (c_vision.RandomColorAdjust(), 0.8)], - [(c_vision.RandomRotation((90, 90)), 1.0), (c_vision.RandomColorAdjust(), 0.2)] - ] -policy = RandomSelectSubpolicy(policy_list) -``` - -## Callback Parameter based Auto Augmentation - -The `sync_wait` API of MindSpore supports dynamic adjustment of the data augmentation policy by batch or epoch granularity during training. You can set blocking conditions to trigger specific data augmentation operations. - -`sync_wait` blocks the entire data processing pipeline until `sync_update` triggers the customized `callback` function. The two APIs must be used together. Their descriptions are as follows: - -- sync_wait(condition_name, num_batch=1, callback=None) - - This API adds a blocking condition `condition_name` to a dataset. When `sync_update` is called, the specified `callback` function is executed. - -- sync_update(condition_name, num_batch=None, data=None) - - This API releases the block corresponding to `condition_name` and triggers the specified `callback` function for `data`. - -The following demonstrates the use of automatic data augmentation based on callback parameters. - -1. Customize the `Augment` class where `preprocess` is a custom data augmentation function and `update` is a callback function for updating the data augmentation policy. - - ```python - import mindspore.dataset.vision.py_transforms as transforms - import mindspore.dataset as ds - import numpy as np - - class Augment: - def __init__(self): - self.ep_num = 0 - self.step_num = 0 - - def preprocess(self, input_): - return (np.array((input_ + self.step_num ** self.ep_num - 1), )) - - def update(self, data): - self.ep_num = data['ep_num'] - self.step_num = data['step_num'] - ``` - -2. The data processing pipeline calls back the custom data augmentation policy update function `update`, and then performs the data augmentation operation defined in `preprocess` based on the updated policy in the `map` operation. - - ```python - arr = list(range(1, 4)) - dataset = ds.NumpySlicesDataset(arr, shuffle=False) - aug = Augment() - dataset = dataset.sync_wait(condition_name="policy", callback=aug.update) - dataset = dataset.map(operations=[aug.preprocess]) - ``` - -3. Call `sync_update` in each step to update the data augmentation policy. - - ```python - epochs = 5 - itr = dataset.create_tuple_iterator(num_epochs=epochs) - step_num = 0 - for ep_num in range(epochs): - for data in itr: - print("epcoh: {}, step:{}, data :{}".format(ep_num, step_num, data)) - step_num += 1 - dataset.sync_update(condition_name="policy", data={'ep_num': ep_num, 'step_num': step_num}) - ``` - - The output is as follows: - - ```text - epcoh: 0, step:0, data :[Tensor(shape=[], dtype=Int64, value= 1)] - epcoh: 0, step:1, data :[Tensor(shape=[], dtype=Int64, value= 2)] - epcoh: 0, step:2, data :[Tensor(shape=[], dtype=Int64, value= 3)] - epcoh: 1, step:3, data :[Tensor(shape=[], dtype=Int64, value= 1)] - epcoh: 1, step:4, data :[Tensor(shape=[], dtype=Int64, value= 5)] - epcoh: 1, step:5, data :[Tensor(shape=[], dtype=Int64, value= 7)] - epcoh: 2, step:6, data :[Tensor(shape=[], dtype=Int64, value= 6)] - epcoh: 2, step:7, data :[Tensor(shape=[], dtype=Int64, value= 50)] - epcoh: 2, step:8, data :[Tensor(shape=[], dtype=Int64, value= 66)] - epcoh: 3, step:9, data :[Tensor(shape=[], dtype=Int64, value= 81)] - epcoh: 3, step:10, data :[Tensor(shape=[], dtype=Int64, value= 1001)] - epcoh: 3, step:11, data :[Tensor(shape=[], dtype=Int64, value= 1333)] - epcoh: 4, step:12, data :[Tensor(shape=[], dtype=Int64, value= 1728)] - epcoh: 4, step:13, data :[Tensor(shape=[], dtype=Int64, value= 28562)] - epcoh: 4, step:14, data :[Tensor(shape=[], dtype=Int64, value= 38418)] - ``` diff --git a/docs/programming_guide/source_en/auto_parallel.md b/docs/programming_guide/source_en/auto_parallel.md deleted file mode 100644 index b7590dbcb64dbba4295d25c993d178054cdc66a6..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/auto_parallel.md +++ /dev/null @@ -1,393 +0,0 @@ -# Parallel Distributed Training Usage - - - -- [Parallel Distributed Training Usage](#parallel-distributed-training-usage) - - [Overview](#overview) - - [Parallel Distributed Training Configuration](#parallel-distributed-training-configuration) - - [General Configuration](#general-configuration) - - [device_num](#device_num) - - [global_rank](#global_rank) - - [gradients_mean](#gradients_mean) - - [parallel_mode](#parallel_mode) - - [all_reduce_fusion_config](#all_reduce_fusion_config) - - [enable_parallel_optimizer](#enable_parallel_optimizer) - - [parameter_broadcast](#parameter_broadcast) - - [Automatic Parallel Configuration](#automatic-parallel-configuration) - - [gradient_fp32_sync](#gradient_fp32_sync) - - [auto_parallel_search_mode](#auto_parallel_search_mode) - - [strategy_ckpt_load_file](#strategy_ckpt_load_file) - - [strategy_ckpt_save_file](#strategy_ckpt_save_file) - - [full_batch](#full_batch) - - [pipeline_stages](#pipeline_stages) - - [grad_accumulation_step](#grad_accumulation_step) - - [Distributed Communication Interface](#distributed-communication-interface) - - [init](#init) - - [get_group_size](#get_group_size) - - [get_rank](#get_rank) - - [Distributed Attribute Configuration](#distributed-attribute-configuration) - - [cross_batch](#cross_batch) - - [fusion](#fusion) - - [layerwise_parallel](#layerwise_parallel) - - [Data Parallel](#data-parallel) - - [Automatic Parallel](#automatic-parallel) - - - - - -## Overview - -In deep learning, as the number of datasets and parameters increases, the time and hardware resources required for training increase, and finally become a bottleneck to the training. Parallel distributed training can reduce the requirements on hardware such as memory and computing performance and is an important optimization method for training. - -MindSpore provides the parallel distributed training function and supports multiple parallel modes, including data parallel and automatic parallel. - -## Parallel Distributed Training Configuration - -The parallel distributed training configuration of MindSpore is managed by `auto_parallel_context` in a centralized manner. You can customize the configuration based on the actual situation. These configurations can be classified into three types: - -- General configuration: takes effect on both data parallel and automatic parallel, for example, `device_num` and `global_rank` etc. -- Automatic parallel configuration: takes effect only in automatic parallel mode, for example, `gradient_fp32_sync` etc. - -You can use `context.set_auto_parallel_context` to configure the preceding parameters and use `context.get_auto_parallel_context` to obtain the parameters. - -### General Configuration - -#### device_num - -`device_num` indicates the number of available machines. The default value is 0. The value is of the int type and must range from 1 to 4096. If you do not set this parameter, the `Model` interface obtains the value by using the `get_group_size` method. If you set this parameter, your configuration is used. This configuration allows you to manually transfer `device_num` without using the `Model` interface. - -The following is a code example: - -```python -from mindspore import context - -context.set_auto_parallel_context(device_num=8) -context.get_auto_parallel_context("device_num") -``` - -#### global_rank - -`global_rank` indicates the logical sequence number of the current device. The default value is 0. The value is of the int type and must range from 0 to 4095. If you do not set this parameter, the `Model` interface obtains the value by using the `get_rank` method. If you set this parameter, your configuration is used. This configuration allows you to manually transfer `global_rank` without using the `Model` interface. - -The following is a code example: - -```python -from mindspore import context - -context.set_auto_parallel_context(global_rank=0) -context.get_auto_parallel_context("global_rank") -``` - -#### gradients_mean - -`gradients_mean` indicates whether to perform the averaging operation during reverse gradient aggregation. The value is of the Boolean type. The default value is False, indicating that only the SUM operation of AllReduce is performed for gradient aggregation. `gradients_means` affects network convergence. The setting of `gradients_means` may vary in different scenarios. Therefore, MindSpore provides this interface for users to configure parameters based on the actual situation. - -The following is a code example: - -```python -from mindspore import context - -context.set_auto_parallel_context(gradients_mean=False) -context.get_auto_parallel_context("gradients_mean") -``` - -#### parallel_mode - -`parallel_mode` indicates the parallel mode. The value is a character string. The options are as follows: - -- `stand_alone`: standalone mode. -- `data_parallel`: data parallel mode. -- `hybrid_parallel`: hybrid parallel mode. -- `semi_auto_parallel`: semi-automatic parallel mode. In this mode, you can use the `shard` method to configure a segmentation policy for an operator. If no policy is configured, the data parallel policy is used by default. -- `auto_parallel`: automatic parallel mode. In this mode, the framework automatically creates a cost model and selects the optimal segmentation policy for users. - -The complete examples of `auto_parallel` and `data_parallel` are provided in [Distributed Training](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_tutorials.html). - -The following is a code example: - -```python -from mindspore import context -import mindspore.ops as ops - -context.set_auto_parallel_context(parallel_mode="semi_auto_parallel") -mul = ops.Mul().shard(((2, 1), (2, 1))) -context.get_auto_parallel_context("parallel_mode") -``` - -#### all_reduce_fusion_config - -`all_reduce_fusion_config` allows users to customize the AllReduce segmentation policy by gradient aggregation. To reduce resource consumption and operator execution gaps, the framework fusions all the reverse gradient aggregation AllReduce operators into one by default. However, when the model is large, the iteration smearing time increases. You can set this parameter based on the actual network to manually tune and find the optimal segmentation policy by gradient aggregation. - -The following is a code example: - -```python -from mindspore import context - -context.set_auto_parallel_context(all_reduce_fusion_config=[20, 35]) -context.get_auto_parallel_context("all_reduce_fusion_config") -``` - -In the example, the value range of `all_reduce_fusion_config` is [20,35]. The first 20 AllReduce operators, the 20th to 35th AllReduce operators, and the remaining AllReduce operators are fused into three operators, respectively. - -#### enable_parallel_optimizer - -`enable_parallel_optimizer` is a feature under development. The default value is False. In data parallel, weight update has redundant computation among devices. Parallel optimizer shards the computation of optimizer to each device. For large-scale networks like Bert and GPT, this feature could reduce requirements on memory and improve the performance efficiently. - -When the `enable_parallel_optimizer` is enabled in the data_parallel mode, MindSpore will split the parameters that need to be updated into different devices, and then use the Broadcast operator to share weights between clusters after each update. It should be noted that the number of parameters should be greater than the number of machines. Currently, only the `Lamb` and `AdamWeightDecay` optimizers are supported. - -In the `auto_parallel` or `semi_auto_parallel` mode, the optimizer parallel is enabled. If one parameter which has been sliced by shard strategy still has repeated slices among devices, and the highest dimension of the shape can be divided by the number of devices, MindSpore would save parameters and update them by the smallest slice shapes. All optimizers are supported under this two modes. - -No matter which parallel mode is selected, parallel optimizer would not influence the forward and backward graph. Only the computation of weight update would be influenced. - -The following is a code example: - -```python -from mindspore import context - -context.set_auto_parallel_context(enable_parallel_optimizer=True) -context.get_auto_parallel_context("enable_parallel_optimizer") -``` - -#### parameter_broadcast - -Parameter broadcast shares the value of data parallel weights among devices, in the purpose of synchronization of weights. The default value is False and only the graph mode is supported. - -The following is a code example: - -```python -from mindspore import context - -context.set_auto_parallel_context(parameter_broadcast=True) -context.get_auto_parallel_context("parameter_broadcast") -``` - -### Automatic Parallel Configuration - -#### gradient_fp32_sync - -`gradient_fp32_sync` indicates whether gradients are aggregated based on the FP32 type. The value is of the Boolean type. The default value is True, indicating that gradients are aggregated based on the FP32 type. Due to the special structure of the `Ascend` AI processor, the speed of aggregating FP32 data is higher than that of aggregating FP16 data, but the precision may be affected. Therefore, MindSpore provides the `gradient_fp32_sync` interface for users to make choices based on the actual situation. - -The following is a code example: - -```python -from mindspore import context - -context.set_auto_parallel_context(gradient_fp32_sync=False) -context.get_auto_parallel_context("gradient_fp32_sync") -``` - -#### auto_parallel_search_mode - -MindSpore provides two search policies: `dynamic_programming` and `recursive_programming`. `dynamic_programming` can search for the optimal policy depicted by the cost model, but it takes a long time to search for the parallel policy of a huge network model. `recursive_programming` can quickly search out parallel policies, but the found policies may not have the optimal running performance. MindSpore provides parameters to allow users to select a search algorithm. The default value is `dynamic_programming`. - -The following is a code example: - -```python -from mindspore import context - -context.set_auto_parallel_context(auto_parallel_search_mode="dynamic_programming") -context.get_auto_parallel_context("auto_parallel_search_mode") -``` - -#### strategy_ckpt_load_file - -Specifies a path to load the segmentation information of all operators with weights in automatic parallel mode. - -The following is a code example: - -```python -from mindspore import context - -context.set_auto_parallel_context(strategy_ckpt_load_file="./") -context.get_auto_parallel_context("strategy_ckpt_load_file") -``` - -#### strategy_ckpt_save_file - -Specifies a path for storing the segmentation information of all operators with weights in automatic parallel mode. - -The following is a code example: - -```python -from mindspore import context - -context.set_auto_parallel_context(strategy_ckpt_save_file="./") -context.get_auto_parallel_context("strategy_ckpt_save_file") -``` - -#### full_batch - -`full_batch` allows users to determine whether to import datasets in full mode. The default value is False. That is, datasets are imported in data parallel mode. In special scenarios, the performance of full dataset import is better than that of import in data parallel mode. For example, the WideDeep network is used in uneven segmentation scenarios. Therefore, MindSpore provides the `full_batch` configurable interface. - -The following is a code example: - -```python -from mindspore import context - -context.set_auto_parallel_context(full_batch=False) -context.get_auto_parallel_context("full_batch") -``` - -#### pipeline_stages - -`pipeline_stages` is used to set parallel stage information of pipeline. It is used to show how the machines are distributed in `auto_parallel` mode in pipeline. Currently pipeline parallel is still under development. - -The following is a code example: - -```python -from mindspore import context - -context.set_auto_parallel_context(parallel_stages=4) -context.get_auto_parallel_context("parallel_stages") -``` - -#### grad_accumulation_step - -`grad_accumulation_step` means the steps where the gradients are accumulated. - -The following is a code example: - -```python -from mindspore import context -context.set_auto_parallel_context(grad_accumulation_step=4) -context.get_auto_parallel_context("grad_accumulation_step") -``` - -## Distributed Communication Interface - -`mindspore.communication.management` encapsulates a collection of communication interfaces used by parallel distributed training, facilitating users to configure distributed information. - -### init - -`init` enables MindSpore communication and initializes distributed training. `init` must be invoked after `context.set_context`. You can transfer the communication backend information to the `init`. The `init` performs initialization based on the backend information. - -- `hccl`: short for `Huawei Collective Communication Library`, used for the `Ascend` processor platform. -- `nccl`: short for `NVIDIA Collective Communication Library`, used for the `GPU` processor platform. - -If you do not configure the communication backend, MindSpore automatically configures it based on the `device_target` information in `context`. - -The following is a code example: - -```python -from mindspore import context -from mindspore.communication.management import init - -context.set_context(device_target='GPU') -init() -``` - -### get_group_size - -`get_group_size` allows users to obtain the number of clusters. Invoke `init` before using the `get_group_size` interface. - -The following is a code example: - -```python -from mindspore import context -from mindspore.communication.management import init, get_group_size - -context.set_context(device_target='GPU') -init() -group_size = get_group_size() -``` - -### get_rank - -`get_rank` allows users to obtain the ID of the current device in the cluster. Invoke `init` before using the `get_rank` interface. - -The following is a code example: - -```python -from mindspore import context -from mindspore.communication.management import init, get_rank - -context.set_context(device_target='GPU') -init() -rank_id = get_rank() -``` - -## Distributed Attribute Configuration - -### cross_batch - -In specific scenarios, the calculation logic of `data_parallel` is different from that of `stand_alone`. The calculation logic of `auto_parallel` is the same as that of `stand_alone` in any scenario. The convergence effect of `data_parallel` may be better. Therefore, MindSpore provides the `cross_batch` parameter to ensure that the calculation logic of `auto_parallel` is consistent with that of `data_parallel`. You can use the `add_prim_attr` method to configure the logic. The default value is False. - -The following is a code example: - -```python -import mindspore.ops as ops - -mul = ops.Mul().add_prim_attr("cross_batch", True) -``` - -### fusion - -To ensure performance, MindSpore provides the fusion function for the `AllGather` and `AllReduce` operators. Operators of the same type (of the same operator type and in the same communication domain) with the same `fusion` value will be fused together. The value of `fusion` must be greater than or equal to 0. When the value of `fusion` is 0, operators will not be fused together. Only `Ascend` backend is supported. - -There are two ways for configuration. If the communication operators are called explicitly, `add_prim_attr` could be used to configure. The following is a code example: - -```python -import mindspore.ops as ops - -allreduce1 = ops.AllReduce().add_prim_attr("fusion", 1) -allreduce2 = ops.AllReduce().add_prim_attr("fusion", 1) -``` - -`allreduce1` and `allreduce2` will be fused into one operator during execution. - -In `AUTO_PARALLEL` and `SEMI_AUTO_PARALLEL` mode, some communication operators used for parameters or gradients aggregation are inserted automatically. So the attribute should be added on a `Cell` or a `Parameter`. For example: - -```python -import mindspore.nn as nn -from mindspore import Tensor, Parameter -from mindspore import context - -class Net(nn.Cell): - """Net definition""" - def __init__(self): - super(Net, self).__init__() - self.fc1 = ops.MatMul() - self.fc2 = ops.MatMul() - self.p1 = Parameter(Tensor(np.ones([48, 64]).astype(np.float32)), name="weight1") - self.p1.comm_fusion = 2 - self.p2 = Parameter(Tensor(np.ones([64, 16]).astype(np.float32)), name="weight2") - - def construct(self, x, y): - x = self.fc1(x, self.p1) - x = self.fc2(x, self.p2) - return x - y - -context.set_context(mode=context.GRAPH_MODE) -context.set_auto_parallel_context(parallel_mode="auto_parallel", device_num=8) -net = Net().set_comm_fusion(2) -``` - -Here the `comm_fusion` of parameter `Net.p1` is 2, which means the attribute `fusion` is 2 for the communication operators generated for this parameter. When you need to manipulate the parameters in batches, it is recommended to call `set_comm_fusion` to set `comm_fusion` for all the parameters in the Net. The value of attribute will be overwritten when the function is called multiply. - -> When a parameter is shared, the operators connected with the parameter should have the same data type. Otherwise, fusion would failed. - -### layerwise_parallel - -In `HYBRID_PARALLEL` mode, you need to manually split the model. You need to manually add the `layerwise_parallel` flag to the parallel parameters of the model. The framework filters out the gradient aggregation operation for the parallel parameters of the model based on the flag. - -The following is a code example: - -```python -imoprt numpy as np -from mindspore import Parameter, Tensor - -x = Parameter(Tensor(np.ones([2, 2])), layerwise_parallel=True) -``` - -## Data Parallel - -Data parallel refers to the parallel mode in which data is segmented. Generally, data is segmented by batch and distributed to each computing unit (worker) for model calculation. In data parallel mode, datasets must be imported in data parallel mode, and `parallel_mode` must be set to `data_parallel`. - -For details about the test cases, see [Distributed Training](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_tutorials.html). - -## Automatic Parallel - -Automatic parallel is a distributed parallel mode that integrates data parallel, model parallel, and hybrid parallel. It can automatically establish a cost model and select a parallel mode for users. The cost model refers to modeling the training time based on the memory computing overhead and the communication overhead, and designing an efficient algorithm to find a parallel policy with a relatively short training time. In automatic parallel mode, datasets must be imported in data parallel mode, and `parallel_mode` must be set to `auto_parallel`. - -For details about the test cases, see the [Distributed Training](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_tutorials.html). diff --git a/docs/programming_guide/source_en/cache.md b/docs/programming_guide/source_en/cache.md deleted file mode 100644 index 79a7a2d2a888da35e10fe046b89059461850f95c..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/cache.md +++ /dev/null @@ -1,436 +0,0 @@ -# Single-Node Tensor Cache - - - -- [Single-Node Tensor Cache](#single-node-tensor-cache) - - [Overview](#overview) - - [Basic Cache Usage](#basic-cache-usage) - - [Cache Sharing](#cache-sharing) - - [Limitations](#limitations) - - [Cache Performance Tuning](#cache-performance-tuning) - - - - - -## Overview - -If you need to repeatedly access remote datasets or load datasets from disks, you can use the single-node cache operator to cache datasets in the local memory to accelerate dataset loading. - -The cache operator depends on the cache server started on the current node. Functioning as a daemon process and independent of the training script, the cache server is mainly used to manage cached data, including storing, querying, and loading data, and writing cached data when the cache is not hit. - -If the memory space is insufficient to cache all datasets, you can configure a cache operator to cache the remaining data to disks. - -Currently, the cache service supports only single-node cache. That is, the client and server are deployed on the same machine. This service can be used in the following scenarios: - -- Cache the loaded original dataset. - - You can use the cache in the dataset loading operator. The loaded data is stored in the cache server. If the same data is required subsequently, the data can be directly load from the cache server, avoiding repeated loading from the disk. - - ![cache on leaf pipeline](./images/cache_dataset.png) - -- Cache the data processed by argumentation. - - You can also use the cache in the `map` operator. The data processed by argumentation (such as image cropping or resizing) is directly cached, avoiding repeated data argumentation operations and reducing unnecessary computations. - - ![cache on map pipeline](./images/cache_processed_data.png) - - > You are advised to cache image data in `decode` + `resize` + `cache` mode. The data processed by `decode` can be directly cached only in single-node single-device mode. - -## Basic Cache Usage - -1. Configure the environment. - - Before using the cache service, you need to install MindSpore and set related environment variables. The Conda environment is used as an example. The setting method is as follows: - - ```shell - export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{path_to_conda}/envs/{your_env_name}/lib/python3.7/site-packages/mindspore:{path_to_conda}/envs/{your_env_name}/lib/python3.7/site-packages/mindspore/lib - export PATH=$PATH:{path_to_conda}/envs/{your_env_name}/bin - ``` - - > When the cache is used, the server memory may be insufficient. Therefore, you are advised to increase the swap memory space of the server to more than 100 GB before using the cache. For details about how to increase the swap memory space on Ubuntu, EulerOS, or CentOS, see [related tutorials](https://help.ubuntu.com/community/SwapFaq#How_do_I_add_a_swap_file.3F). - -2. Start the cache server. - - Before using the single-node cache service, run the following command to start the cache server: - - ```shell - cache_admin --start - ``` - - If the following information is displayed, the cache server is started successfully: - - ```text - Cache server startup completed successfully! - The cache server daemon has been created as process id 10394 and is listening on port 50052 - - Recommendation: - Since the server is detached into its own daemon process, monitor the server logs (under /tmp/mindspore/cache/log) for any issues that may happen after startup - ``` - - `cache_admin` supports the following commands and options: - - `--start`: starts the cache server. The following options are supported: - - `--workers` or `-w`: specifies the number of worker threads on the cache server. By default, the number of worker threads is half of the number of CPUs. This parameter relies on the NUMA architecture of the server. The value will be adjusted automatically by the server if it's not a multiple of number of NUMA nodes in the machine. - - `--spilldir` or `-s`: specifies the disk file path for storing remaining data when the cached data size exceeds the memory space. The default value is '' (which means disabling spilling). - - `--hostname` or `-h`: specifies the IP address of the cache server. The default value is 127.0.0.1. - - `--port` or `-p`: specifies the port number of the cache server. The default value is 50052. - - `--loglevel` or `-l`: sets the log level. The default value is 1 (WARNING). If this option is set to 0 (INFO), excessive logs will be generated, resulting in performance deterioration. - - `--stop`: stops the cache server. - - `--generate_session` or `-g`: generates a cache session. - - `--destroy_session` or `-d`: deletes a cache session. - - `--list_sessions`: displays the list of currently cached sessions and their details. - - `--server_info`:displays the configuration parameters and active session list of current server. - - `--help`: displays the help information. - - In the preceding options, you can use `-h` and `-p` to specify a server. Users can also set environment variables `MS_CACHE_HOST` and `MS_CACHE_PORT` to specify it. If hostname and port are not set, operations are performed on the server with the IP address 127.0.0.1 and port number 50052 by default. - - You can run the `ps -ef|grep cache_server` command to check whether the server is started and query server parameters. - - You can also run the `cache_admin --server_info` command to get the full list of configuration of cache server. - - ```shell - $ cache_admin --server_info - Cache Server Configuration: - ---------------------------------------- - config name value - ---------------------------------------- - hostname 127.0.0.1 - port 50052 - number of workers 16 - log level 1 - spill dir None - ---------------------------------------- - Active sessions: - No active sessions. - ``` - - Where, the table of Cache Server Configuration lists five detailed configuration information. Active sessions shows the list of active session ID in current server if any. - - > - Before setting cache_server initialization parameters, check the available memory of the system and the size of the dataset to be loaded. If the memory of cache_server or the dataset size exceeds the available memory of the system, the server may break down or restart, the cache_server may automatically shut down, or the training process fails to be executed. - > - To enable data spilling, you need to use `-s` to set spilling path when starting cache server. Otherwise, this feature is default to be disabled and it will bring up a memory-only cache server. - -3. Create a cache session. - - If no cache session exists on the cache server, a cache session needs to be created to obtain the cache session ID. - - ```shell - $ cache_admin -g - Session created for server on port 50052: 1456416665 - ``` - - In the preceding command, 1456416665 is the cache session ID allocated by the server with port number 50052. - - You can run the `cache_admin --list_sessions` command to view all cache sessions on the current server. - - ```shell - $ cache_admin --list_sessions - Listing sessions for server on port 50052 - - Session Cache Id Mem cached Disk cached Avg cache size Numa hit - 1456416665 n/a n/a n/a n/a n/a - ``` - - Output parameter description: - - `Session`: specifies the cache session ID. - - `Cache Id`: specifies the ID of the cache instance in the current cache session. `n/a` indicates that no cache instance is created. - - `Mem cached`: specifies the cached data volume in the memory. - - `Disk cached`: specifies the cached data volume in the disk. - - `Avg cache size`: specifies the average size of each line of data in the current cache. - - `Numa hit`: specifies the number of NUMA hits. A larger value indicates better time performance. - -4. Create a cache instance. - - In the Python training script, use the `DatasetCache` API to define a cache instance named `test_cache`, and specify the `session_id` parameter to a cache session ID created in the previous step. - - ```python - import mindspore.dataset as ds - - test_cache = ds.DatasetCache(session_id=1456416665, size=0, spilling=False) - ``` - - `DatasetCache` supports the following parameters: - - `session_id`: specifies the cache session ID, which can be created and obtained by running the `cache_admin -g` command. - - `size`: specifies the maximum memory space occupied by the cache. The unit is MB. For example, if the cache space is 512 GB, set `size` to `524288`. The default value is 0. - - `spilling`: determines whether to spill the remaining data to disks when the memory space exceeds the upper limit. The default value is False. - - `hostname`: specifies the IP address for connecting to the cache server. The default value is 127.0.0.1. - - `port`: specifies the port number for connecting to the cache server. The default value is 50052. - - `num_connections`: specifies the number of established TCP/IP connections. The default value is 12. - - `prefetch_size`: specifies the number of rows obtained for each access. The default value is 20. - - > - In actual use, you are advised to run the `cache_admin -g` command to obtain a cache session ID from the cache server and use it as the parameter of `session_id` to prevent errors caused by cache session nonexistence. - > - `size=0` indicates that the memory space used by the cache is not limited manually, but cannot exceed 80% of the total system memory. Note that `size=0` may cause the out of memory error. Therefore, you are advised to set `size` to a proper value based on the idle memory of the machine. - > - `spilling=True` indicates that the remaining data is written to disks when the memory space is insufficient. Therefore, ensure that you have the write permission on the configured disk path and the disk space is sufficient to store the remaining cache data. Note that if no spilling path is set when cache server starts, setting `spilling=True` will raise an error when calling the API. - > - `spilling=False` indicates that no data is written once the configured memory space is used up on the cache server. - > - If a dataset that does not support random access (such as `TFRecordDataset`) is used to load data and the cache service is enabled, ensure that the entire dataset is stored locally. In this scenario, if the local memory space is insufficient to store all data, spilling must be enabled to spill data to disks. - > - `num_connections` and `prefetch_size` are internal performance tuning parameters. Generally, you do not need to set these two parameters. - -5. Insert a cache instance. - - Currently, the cache service can be used to cache both original datasets and datasets processed by argumentation. The following example shows two usage methods. - - Note that you need to create a cache instance for each of the two examples according to step 4, and use the created `test_cache` as the `cache` parameter in the dataset loading operator or map operator. - - CIFAR-10 dataset is used in the following two examples. Before running the sample, download and store the CIFAR-10 dataset by referring to [Loading Dataset](https://www.mindspore.cn/doc/programming_guide/en/master/dataset_loading.html#cifar-10-100). - - - Cache the original loaded dataset. - - ```python - dataset_dir = "cifar-10-batches-bin/" - - # apply cache to dataset - data = ds.Cifar10Dataset(dataset_dir=dataset_dir, num_samples=4, shuffle=False, num_parallel_workers=1, cache=test_cache) - - num_iter = 0 - for item in data.create_dict_iterator(num_epochs=1): # each data is a dictionary - # in this example, each dictionary has a key "image" - print("{} image shape: {}".format(num_iter, item["image"].shape)) - num_iter += 1 - ``` - - The output is as follows: - - ```text - 0 image shape: (32, 32, 3) - 1 image shape: (32, 32, 3) - 2 image shape: (32, 32, 3) - 3 image shape: (32, 32, 3) - ``` - - You can run the `cache_admin --list_sessions` command to check whether there are four data records in the current session. If yes, the data is successfully cached. - - ```shell - $ cache_admin --list_sessions - Listing sessions for server on port 50052 - - Session Cache Id Mem cached Disk cached Avg cache size Numa hit - 1456416665 821590605 4 n/a 3226 4 - ``` - - - Cache the data processed by argumentation. - - ```python - import mindspore.dataset.vision.c_transforms as c_vision - - dataset_dir = "cifar-10-batches-bin/" - - # apply cache to dataset - data = ds.Cifar10Dataset(dataset_dir=dataset_dir, num_samples=5, shuffle=False, num_parallel_workers=1) - - # apply cache to map - rescale_op = c_vision.Rescale(1.0 / 255.0, -1.0) - data = data.map(input_columns=["image"], operations=rescale_op, cache=test_cache) - - num_iter = 0 - for item in data.create_dict_iterator(num_epochs=1): # each data is a dictionary - # in this example, each dictionary has a keys "image" - print("{} image shape: {}".format(num_iter, item["image"].shape)) - num_iter += 1 - ``` - - The output is as follows: - - ```text - 0 image shape: (32, 32, 3) - 1 image shape: (32, 32, 3) - 2 image shape: (32, 32, 3) - 3 image shape: (32, 32, 3) - 4 image shape: (32, 32, 3) - ``` - - You can run the `cache_admin --list_sessions` command to check whether there are five data records in the current session. If yes, the data is successfully cached. - - ```shell - $ cache_admin --list_sessions - Listing sessions for server on port 50052 - - Session Cache Id Mem cached Disk cached Avg cache size Numa hit - 1456416665 3618046178 5 n/a 12442 5 - ``` - -6. Destroy the cache session. - - After the training is complete, you can destroy the current cache and release the memory. - - ```shell - $ cache_admin --destroy_session 1456416665 - Drop session successfully for server on port 50052 - ``` - - The preceding command is used to destroy the cache with the session ID 1456416665 on the server with the port number 50052. - - If you choose not to destroy the cache, the cached data still exists in the cache session. You can use the cache when starting the training script next time. - -7. Stop the cache server. - - After using the cache server, you can stop it. This operation will destroy all cache sessions on the current server and release the memory. - - ```shell - $ cache_admin --stop - Cache server on port 50052 has been stopped successfully. - ``` - - The preceding command is used to shut down the server with the port number 50052. - - If you choose not to shut down the server, the cache sessions on the server will be retained for future use. During the next training, you can create a cache session or reuse the existing cache. - -## Cache Sharing - -During the single-node multi-device distributed training, the cache operator allows multiple same training scripts to share the same cache and read and write data from the cache. - -1. Start the cache server. - - ```shell - $ cache_admin --start - Cache server startup completed successfully! - The cache server daemon has been created as process id 39337 and listening on port 50052 - - Recommendation: - Since the server is detached into its own daemon process, monitor the server logs (under /tmp/mindspore/cache/log) for any issues that may happen after startup - ``` - -2. Create a cache session. - - Create the shell script `cache.sh` for starting Python training and run the following command to generate a cache session ID: - - ```shell - #!/bin/bash - # This shell script will launch parallel pipelines - - # get path to dataset directory - if [ $# != 1 ] - then - echo "Usage: sh cache.sh DATASET_PATH" - exit 1 - fi - dataset_path=$1 - - # generate a session id that these parallel pipelines can share - result=$(cache_admin -g 2>&1) - rc=$? - if [ $rc -ne 0 ]; then - echo "some error" - exit 1 - fi - - # grab the session id from the result string - session_id=$(echo $result | awk '{print $NF}') - ``` - -3. Pass the cache session ID to the training script. - - Continue to write the shell script and add the following command to pass `session_id` and other parameters when the Python training is started: - - ```shell - # make the session_id available to the python scripts - num_devices=4 - - for p in $(seq 0 $((${num_devices}-1))); do - python my_training_script.py --num_devices "$num_devices" --device "$p" --session_id $session_id --dataset_path $dataset_path - done - ``` - - > Complete sample code: [cache.sh](https://gitee.com/mindspore/docs/blob/master/tutorials/tutorial_code/cache/cache.sh) - -4. Create and apply a cache instance. - - CIFAR-10 dataset is used in the following example. Before running the sample, download and store the CIFAR-10 dataset by referring to [Loading Dataset](https://www.mindspore.cn/doc/programming_guide/en/master/dataset_loading.html#cifar-10-100). The directory structure is as follows: - - ```text - ├─cache.sh - ├─my_training_script.py - └─cifar-10-batches-bin - ├── batches.meta.txt - ├── data_batch_1.bin - ├── data_batch_2.bin - ├── data_batch_3.bin - ├── data_batch_4.bin - ├── data_batch_5.bin - ├── readme.html - └── test_batch.bin - ``` - - Create and write the Python script `my_training_script.py`. Use the following code to receive `session_id` and pass it as a parameter when defining a cache instance. - - ```python - import argparse - import mindspore.dataset as ds - - parser = argparse.ArgumentParser(description='Cache Example') - parser.add_argument('--num_devices', type=int, default=1, help='Device num.') - parser.add_argument('--device', type=int, default=0, help='Device id.') - parser.add_argument('--session_id', type=int, default=1, help='Session id.') - parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path') - args_opt = parser.parse_args() - - # apply cache to dataset - test_cache = ds.DatasetCache(session_id=args_opt.session_id, size=0, spilling=False) - dataset = ds.Cifar10Dataset(dataset_dir=args_opt.dataset_path, num_samples=4, shuffle=False, num_parallel_workers=1, - num_shards=args_opt.num_devices, shard_id=args_opt.device, cache=test_cache) - num_iter = 0 - for _ in dataset.create_dict_iterator(): - num_iter += 1 - print("Got {} samples on device {}".format(num_iter, args_opt.device)) - ``` - - > Complete sample code: [my_training_script.py](https://gitee.com/mindspore/docs/blob/master/tutorials/tutorial_code/cache/my_training_script.py) - -5. Execute the training script. - - Execute the shell script `cache.sh` to enable distributed training. - - ```shell - $ sh cache.sh cifar-10-batches-bin/ - Got 4 samples on device 0 - Got 4 samples on device 1 - Got 4 samples on device 2 - Got 4 samples on device 3 - ``` - - You can run the `cache_admin --list_sessions` command to check whether only one group of data exists in the current session. If yes, cache sharing is successful. - - ```shell - $ cache_admin --list_sessions - Listing sessions for server on port 50052 - - Session Cache Id Mem cached Disk cached Avg cache size Numa hit - 3392558708 821590605 16 n/a 3227 16 - ``` - -6. Destroy the cache session. - - After the training is complete, you can destroy the current cache and release the memory. - - ```shell - $ cache_admin --destroy_session 3392558708 - Drop session successfully for server on port 50052 - ``` - -7. Stop the cache server. - - After using the cache server, you can stop it. - - ```shell - $ cache_admin --stop - Cache server on port 50052 has been stopped successfully. - ``` - -## Limitations - -- Currently, dataset classes such as `MindDataset`, `GraphDataset`, `GeneratorDataset`, `PaddedDataset`, and `NumpySlicesDataset` do not support cache. `GeneratorDataset`, `PaddedDataset`, and `NumpySlicesDataset` belong to `GeneratorOp`, so their error message is displayed as "There is currently no support for GeneratorOp under cache." -- Data processed by `batch`, `concat`, `filter`, `repeat`, `skip`, `split`, `take`, and `zip` does not support cache. -- Data processed by random data argumentation operations (such as `RandomCrop`) does not support cache. -- The same cache instance cannot be nested in different locations of the same pipeline. - -## Cache Performance Tuning - -The cache service performance can be significantly improved in following scenarios: - -- Cache the data processed by augmentation, especially when the data processing pipeline contains high complexity operations such as decode. In this scenario, you do not need to perform the data augmentation operation repeatedly on each epoch, which saves a lot of time. -- Use cache services during simple network training and inference. Compared with complex networks, simple networks require less training time. Therefore, the time performance is significantly improved when cache services are used in this scenario. - -However, we may not benefit from cache in the following scenarios: - -- The system memory is insufficient or the cache is not hit, resulting in poor cache service time performance. You can check whether the available system memory is sufficient and set a proper cache size before using the cache. -- Too much cache spilling will deteriorate the time performance. Therefore, try not to spill cache to disks when datasets that support random access (such as `ImageFolderDataset`) are used for data loading. -- Using cache on NLP network such as Bert does not perform. In the NLP scenarios, there are usually no high complexity data augmentation operations like decode. -- There is expectable startup overhead when using cache in non-mappable datasets like `TFRecordDataset`. According to the current design, it is required to cache all rows to the cache server before the first epoch of training. So the first epoch time can be longer than the non-cache case. diff --git a/docs/programming_guide/source_en/callback.md b/docs/programming_guide/source_en/callback.md deleted file mode 100644 index 19151f34836728b403be1c91c033eb24e01d4fb5..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/callback.md +++ /dev/null @@ -1,56 +0,0 @@ -# Callback Mechanism - - - -- [Callback Mechanism](#callback-mechanism) - - [Overview](#overview) - - [MindSpore Built-in Callback Functions](#mindspore-built-in-callback-functions) - - [MindSpore Custom Callback Functions](#mindspore-custom-callback-functions) - - - - - -## Overview - -The callback function is implemented as a class in MindSpore. The callback mechanism is similar to a monitoring mode, which helps you observe parameter changes and network internal status during network training. You can also perform operations based on specified conditions. During the training, the callback list executes the callback functions in the defined sequence. The callback mechanism enables you to effectively learn the training status of network models in time and make adjustments as required, greatly improving development efficiency. - -In MindSpore, the callback mechanism is generally used in the network training process `model.train`. You can configure different built-in callback functions to transfer different parameters to implement various functions. For example, `LossMonitor` monitors the loss change of each epoch, `ModelCheckpoint` saves network parameters and models for re-training or inference, and `TimeMonitor` monitors the training time of each epoch and each step, terminates the training in advance, and dynamically adjusts parameters. - -## MindSpore Built-in Callback Functions - -- ModelCheckpoint - - This function is combined with the model training process, and saves the model and network parameters after training to facilitate re-inference or re-training. `ModelCheckpoint` is generally used together with `CheckpointConfig`. `CheckpointConfig` is a parameter configuration class that can be used to customize the checkpoint storage policy. - - For details, see [Saving Models](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html). - -- SummaryCollector - - This function collects common information, such as loss, learning rate, computational graph, and parameter weight, helping you visualize the training process and view information. In addition, you can perform the summary operation to collect data from the summary file. - - For details, see [Collecting Summary Record](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/summary_record.html). - -- LossMonitor - - This function monitors the loss change during training. When the loss is NAN or INF, the training is terminated in advance. Loss information can be recorded in logs for you to view. - - For details, see the [Custom Debugging Information](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/custom_debugging_info.html#mindsporecallback). - -- TimeMonitor - - This function monitors the running time of each epoch and each step during training. - -## MindSpore Custom Callback Functions - -MindSpore provides powerful built-in callback functions and allows you to customize callback functions. For special requirements, you can customize callback functions based on the Callback base class. The callback function records important information during training and transfers the information to the callback object through a dictionary variable cb_params. You can obtain related attributes from each custom callback function and perform custom operations. - -The following examples are used to introduce the custom callback functions: - -1. Set a time threshold to terminate the training within a specified time. When the training time reaches the threshold, the training process is terminated. - -2. Save the checkpoint file with the highest accuracy during training. You can customize the function to save a model with the highest accuracy after each epoch. - -For details, see [Custom Callback](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/custom_debugging_info.html#custom-callback). - -According to the tutorial, you can easily customize other callback functions. For example, customize a function to output the detailed training information, including the training progress, training step, training name, and loss value, after each training is complete; terminate training when the loss or model accuracy reaches a certain value by setting the loss or model accuracy threshold. When the loss or model accuracy reaches the threshold, the training is terminated in advance. diff --git a/docs/programming_guide/source_en/cell.md b/docs/programming_guide/source_en/cell.md deleted file mode 100644 index 6b28099963cc4bd924a63142d016c591c923d6d8..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/cell.md +++ /dev/null @@ -1,376 +0,0 @@ -# Cell Building and Its Subclasses - - - -- [Cell Building and Its Subclasses](#cell-building-and-its-subclasses) - - [Overview](#overview) - - [Key Member Functions](#key-member-functions) - - [construct](#construct) - - [parameters_dict](#parameters_dict) - - [cells_and_names](#cells_and_names) - - [set_grad](#set_grad) - - [Relationship Between the nn Module and the ops Module](#relationship-between-the-nn-module-and-the-ops-module) - - [Model Layers](#model-layers) - - [Built-in Model Layers](#built-in-model-layers) - - [Application Cases](#application-cases) - - [Loss Functions](#loss-functions) - - [Built-in Loss Functions](#built-in-loss-functions) - - [Application Cases](#application-cases-1) - - [Optimization Algorithms](#optimization-algorithms) - - [Building a Customized Network](#building-a-customized-network) - - - - - -## Overview - -The `Cell` class of MindSpore is the base class for building all networks and the basic unit of a network. When you need to customize a network, you need to inherit the `Cell` class and override the `__init__` and `construct` methods. - -Loss functions, optimizers, and model layers are parts of the network structure and can be implemented only by inheriting the `Cell` class. You can also customize them based on service requirements. - -The following describes the key member functions of the `Cell` class, the built-in loss functions, optimizers, and model layers of MindSpore implemented based on the `Cell` class, and how to use them, as well as describes how to use the `Cell` class to build a customized network. - -## Key Member Functions - -### construct - -The `Cell` class overrides the `__call__` method. When the `Cell` class instance is called, the `construct` method is executed. The network structure is defined in the `construct` method. - -In the following example, a simple network is built to implement the convolution computing function. The operators in the network are defined in `__init__` and used in the `construct` method. The network structure of the case is as follows: `Conv2d` -> `BiasAdd`. - -In the `construct` method, `x` is the input data, and `output` is the result obtained after the network structure computation. - -```python -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Parameter -from mindspore.common.initializer import initializer - -class Net(nn.Cell): - def __init__(self, in_channels=10, out_channels=20, kernel_size=3): - super(Net, self).__init__() - self.conv2d = ops.Conv2D(out_channels, kernel_size) - self.bias_add = ops.BiasAdd() - self.weight = Parameter(initializer('normal', [out_channels, in_channels, kernel_size, kernel_size])) - - def construct(self, x): - output = self.conv2d(x, self.weight) - output = self.bias_add(output, self.bias) - return output -``` - -### parameters_dict - -The `parameters_dict` method is used to identify all parameters in the network structure and return `OrderedDict` with key as the parameter name and value as the parameter value. - -There are many other methods for returning parameters in the `Cell` class, such as `get_parameters` and `trainable_params`. For details, see [MindSpore API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/nn/mindspore.nn.Cell.html). - -A code example is as follows: - -```python -net = Net() -result = net.parameters_dict() -print(result.keys()) -print(result['weight']) -``` - -The following information is displayed: - -```text -odict_keys(['weight']) -Parameter (name=weight, shape=(20, 10, 3, 3), dtype=Float32, requires_grad=True) -``` - -In the example, `Net` uses the preceding network building case to print names of all parameters on the network and the result of the `weight` parameter. - -### cells_and_names - -The `cells_and_names` method is an iterator that returns the name and content of each `Cell` on the network. - -The case simply implements the function of obtaining and printing the name of each `Cell`. According to the network structure, there is a `Cell` whose name is `nn.Conv2d`. - -`nn.Conv2d` is a convolutional layer encapsulated by MindSpore using `Cell` as the base class. For details, see "Model Layers". - -A code example is as follows: - -```python -import mindspore.nn as nn - -class Net1(nn.Cell): - def __init__(self): - super(Net1, self).__init__() - self.conv = nn.Conv2d(3, 64, 3, has_bias=False, weight_init='normal') - - def construct(self, x): - out = self.conv(x) - return out - -net = Net1() -names = [] -for m in net.cells_and_names(): - print(m) - names.append(m[0]) if m[0] else None -print('-------names-------') -print(names) -``` - -```text -('', Net1< - (conv): Conv2d - >) -('conv', Conv2d) --------names------- -['conv'] -``` - -### set_grad - -The `set_grad` API is used to construct a backward network. If no parameter is transferred for calling the API, the default value of `requires_grad` is True. This API needs to be used in the scenario where the backward network is computed. - -Take `TrainOneStepCell` as an example. Its API function is to perform single-step training on the network. The backward network needs to be computed. Therefore, `set_grad` needs to be used in the initialization method. - -A part of the `TrainOneStepCell` code is as follows: - -```python -class TrainOneStepCell(Cell): - def __init__(self, network, optimizer, sens=1.0): - super(TrainOneStepCell, self).__init__(auto_prefix=False) - self.network = network - self.network.set_grad() - ...... -``` - -If using similar APIs such as `TrainOneStepCell`, you do not need to use `set_grad`. The internal encapsulation is implemented. - -If you need to customize APIs of this training function, call APIs internally or set `network.set_grad` externally. - -## Relationship Between the nn Module and the ops Module - -The nn module of MindSpore is a model component implemented by Python. It encapsulates low-level APIs, including various model layers, loss functions, and optimizers. - -In addition, nn provides some APIs with the same name as the `Primitive` operator to further encapsulate the `Primitive` operator and provide more friendly APIs. - -Reanalyze the case of the `construct` method described above. This case is the simplified content of the `nn.Conv2d` source code of MindSpore, and `ops.Conv2D` is internally called. The `nn.Conv2d` convolution API adds the input parameter validation function and determines whether `bias` is used. It is an advanced encapsulated model layer. - -```python -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Parameter -from mindspore.common.initializer import initializer - -class Net(nn.Cell): - def __init__(self, in_channels=10, out_channels=20, kernel_size=3): - super(Net, self).__init__() - self.conv2d = ops.Conv2D(out_channels, kernel_size) - self.bias_add = ops.BiasAdd() - self.weight = Parameter(initializer('normal', [out_channels, in_channels, kernel_size, kernel_size])) - - def construct(self, x): - output = self.conv2d(x, self.weight) - output = self.bias_add(output, self.bias) - return output -``` - -## Model Layers - -MindSpore can use `Cell` as the base class to build the network structure. - -To facilitate user operations, MindSpore provides a large number of built-in model layers, which can be directly called by using APIs. - -You can also customize a model. For details, see "Building a Customized Network." - -### Built-in Model Layers - -The MindSpore framework provides abundant APIs at the layer of `mindspore.nn`. The APIs are as follows: - -- Activation layer - - The activation layer has a large number of built-in activation functions, which are often used in defining the network structure. The activation function adds a nonlinear operation to the network, so that the network can have a better fitting effect. - - Main APIs include `Softmax`, `Relu`, `Elu`, `Tanh` and `Sigmoid`. - -- Basic layer - - The basic layer implements some common basic structures on the network, such as the full connection layer, Onehot encoding, Dropout, and flat layer. - - Main APIs include `Dense`, `Flatten`, `Dropout`, `Norm` and `OneHot`. - -- Container layer - - The main function of the container layer is to implement the data structures for storing multiple cells. - - Main APIs include `SequentialCell` and `CellList`. - -- Convolutional layer - - Convolutional layer provides some convolution computation functions, such as common convolution, deep convolution, and convolution transposition. - - Main APIs include `Conv2d`, `Conv1d`, `Conv2dTranspose` and `Conv1dTranspose`. - -- Pooling layer - - The pooling layer provides computation functions such as average pooling and maximum pooling. - - The main APIs are `AvgPool2d`, `MaxPool2d`, and `AvgPool1d`. - -- Embedding layer - - The embedding layer provides the word embedding computation function to map input words into dense vectors. - - The main APIs include `Embedding`, `EmbeddingLookup` and `EmbeddingLookUpSplitMode`. - -- Long short-term memory recurrent layer - - The long short-term memory recurrent layer provides the LSTM computation function. `LSTM` internally calls the `LSTMCell` API. The `LSTMCell` is an LSTM unit that performs operations on an LSTM layer. When operations at multiple LSTM network layers are involved, the `LSTM` API is used. - - The main APIs include `LSTM` and `LSTMCell`. - -- Normalization layer - - The normalization layer provides some normalization methods, that is, converting data into a mean value and a standard deviation by means of linear transformation or the like. - - Main APIs include `BatchNorm1d`, `BatchNorm2d`, `LayerNorm`, `GroupNorm` and `GlobalBatchNorm`. - -- Mathematical computation layer - - The mathematical computation layer provides some computation functions formed by operators, for example, data generation and some other mathematical computations. - - Main APIs include `ReduceLogSumExp`, `Range`, `LinSpace` and `LGamma`. - -- Image layer - - The image computation layer provides some functions related to matrix computing to transform and compute image data. - - Main APIs include `ImageGradients`, `SSIM`, `MSSSIM`, `PSNR` and `CentralCrop`. - -- Quantization layer - - Quantization is to convert data from the float type to the int type within a data range. Therefore, the quantization layer provides some data quantization methods and model layer structure encapsulation. - - Main APIs include `Conv2dBnAct`, `DenseBnAct`, `Conv2dBnFoldQuant` and `LeakyReLUQuant`. - -### Application Cases - -Model layers of MindSpore are under `mindspore.nn`. The usage method is as follows: - -```python -import mindspore.nn as nn - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.conv = nn.Conv2d(3, 64, 3, has_bias=False, weight_init='normal') - self.bn = nn.BatchNorm2d(64) - self.relu = nn.ReLU() - self.flatten = nn.Flatten() - self.fc = nn.Dense(64 * 222 * 222, 3) - - def construct(self, x): - x = self.conv(x) - x = self.bn(x) - x = self.relu(x) - x = self.flatten(x) - out = self.fc(x) - return out -``` - -The preceding network building case shows that the program calls the APIs of the `Conv2d`, `BatchNorm2d`, `ReLU`, `Flatten`, and `Dense` model layers. - -It is defined in the `Net` initialization method and runs in the `construct` method. These model layer APIs are connected in sequence to form an executable network. - -## Loss Functions - -Currently, MindSpore supports the following loss functions: `L1Loss`, `MSELoss`, `SmoothL1Loss`, `SoftmaxCrossEntropyWithLogits`, `SampledSoftmaxLoss`, `BCELoss`, and `CosineEmbeddingLoss`. - -All loss functions of MindSpore are implemented by subclasses of `Cell`. Therefore, customized loss functions are also supported. For details about how to build a loss function, see "Building a Customized Network." - -### Built-in Loss Functions - -- L1Loss - - Computes the absolute value error of two input data for the regression model. The default value of `reduction` is mean. If the value of `reduction` is sum, the loss accumulation result is returned. If the value of `reduction` is none, the result of each loss is returned. - -- MSELoss - - Computes the square error of two input data for the regression model. The `reduction` parameter is the same as the `L1Loss` parameter. - -- SmoothL1Loss - - `SmoothL1Loss` is the smooth L1 loss function, which is used for the regression model. The default value of the `beta` threshold is 1. - -- SoftmaxCrossEntropyWithLogits - - Cross entropy loss function, which is used to classify models. If the tag data is not encoded in the one-hot mode, set `sparse` to True. The default value of `reduction` is none. The meaning of this parameter is the same as that of `L1Loss`. - -- CosineEmbeddingLoss - - `CosineEmbeddingLoss` is used to measure the similarity between two inputs and is used for classification models. The default value of `margin` is 0.0. The `reduction` parameter is the same as the `L1Loss` parameter. - -- BCELoss - - Binary cross entropy loss is used for binary classification. `weight` is a rescaling weight applied to the loss of each batch element. The default value of `weight` is None, which means the weight values are all 1. The default value of `reduction` parameter is none. The `reduction` parameter is the same as the `L1Loss` parameter. - -- SampledSoftmaxLoss - - Sampled softmax loss function, which is used for classification model when the number of class is large. `num_sampled` is the number of classes to randomly sample. `num_class` is the number of possible classes. `num_true` is the number of target classes per training example. `sampled_values` is the sampled candidate. The default value of `sampled_values` is None, which means UniformCandidateSampler is applied. `remove_accidental_hits` is the switch of whether to remove "accidental hits". The default value of `remove_accidental_hits` is True. `seed` is the random seed for candidate sampling with the default value of 0. The default value of reduction parameter is none. The `reduction` parameter is the same as the L1Loss parameter. - -### Application Cases - -All loss functions of MindSpore are stored in mindspore.nn. The usage method is as follows: - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import Tensor - -loss = nn.L1Loss() -input_data = Tensor(np.array([[1, 2, 3], [2, 3, 4]]).astype(np.float32)) -target_data = Tensor(np.array([[0, 2, 5], [3, 1, 1]]).astype(np.float32)) -print(loss(input_data, target_data)) -``` - -```text -1.5 -``` - -In this case, two pieces of tensor data are built. The `nn.L1Loss` API is used to define the loss, `input_data` and `target_data` are transferred to the loss, and the L1Loss computation is performed. The result is 1.5. If loss is set to nn.L1Loss(reduction='sum'), the result is 9.0. If loss is set to nn.L1Loss(reduction='none'), the result is [[1. 0. 2.] [1. 2. 3.]]. - -## Optimization Algorithms - -`mindspore.nn.optim` is a module that implements various optimization algorithms in the MindSpore framework. For details, see [Optimization Algorithms](https://www.mindspore.cn/doc/programming_guide/en/master/optim.html) - -## Building a Customized Network - -Both the network structure and the model layers (e.g. loss functions and optimizers mentioned above) are essentially a `Cell`. Therefore, they can be customized. - -Construct a subclass inherited from `Cell`, define the operator and model layer in the `__init__` method, and build the network structure in the `construct` method. - -Take the LeNet network as an example. Structure units such as the convolutional layer, pooling layer, and full connection layer are defined in the `__init__` method, and the defined content is connected together in the `construct` method to form a complete LeNet network structure. - -The LeNet network is implemented as follows: - -```python -import mindspore.nn as nn - -class LeNet5(nn.Cell): - def __init__(self): - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(1, 6, 5, pad_mode="valid") - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode="valid") - self.fc1 = nn.Dense(16 * 5 * 5, 120) - self.fc2 = nn.Dense(120, 84) - self.fc3 = nn.Dense(84, 3) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x -``` diff --git a/docs/programming_guide/source_en/conf.py b/docs/programming_guide/source_en/conf.py deleted file mode 100644 index 981c5b76ce56dd91c26e0e701079f88b9c6d9339..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/conf.py +++ /dev/null @@ -1,80 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys -import IPython -import re -import nbsphinx as nbs - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx_markdown_tables', - 'recommonmark', - 'nbsphinx', - 'sphinx.ext.mathjax', - 'IPython.sphinxext.ipython_console_highlighting' -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_static_path = ['_static'] - -# Remove extra outputs for nbsphinx extension. -nbsphinx_source_re = re.compile(r"(app\.connect\('html-collect-pages', html_collect_pages\))") -nbsphinx_math_re = re.compile(r"(\S.*$)") -mod_path = os.path.abspath(nbs.__file__) -with open(mod_path, "r+", encoding="utf8") as f: - contents = f.readlines() - for num, line in enumerate(contents): - _content_re = nbsphinx_source_re.search(line) - if _content_re and "#" not in line: - contents[num] = nbsphinx_source_re.sub(r"# \g<1>", line) - if "mathjax_config = app.config" in line and "#" not in line: - contents[num:num+10] = [nbsphinx_math_re.sub(r"# \g<1>", i) for i in contents[num:num+10]] - break - f.seek(0) - f.writelines(contents) \ No newline at end of file diff --git a/docs/programming_guide/source_en/context.md b/docs/programming_guide/source_en/context.md deleted file mode 100644 index b5abfabd764c6873da99fc71b90fee6b589f9f53..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/context.md +++ /dev/null @@ -1,193 +0,0 @@ -# Running Management - - - -- [Running Management](#running-management) - - [Overview](#overview) - - [Execution Mode Management](#execution-mode-management) - - [Mode Selection](#mode-selection) - - [Mode Switching](#mode-switching) - - [Hardware Management](#hardware-management) - - [Distributed Management](#distributed-management) - - [Maintenance and Test Management](#maintenance-and-test-management) - - [Profiling Data Collection](#profiling-data-collection) - - [Saving MindIR](#saving-mindir) - - [Print Operator Disk Flushing](#print-operator-disk-flushing) - - - - - -## Overview - -Before initializing the network, configure the context parameter to control the policy executed by the program. For example, you can select an execution mode and backend, and configure distributed parameters. Different context parameter configurations implement different functions, including execution mode management, hardware management, distributed management, and maintenance and test management. - -## Execution Mode Management - -MindSpore supports two running modes: PyNative and Graph. - -- `PYNATIVE_MODE`: dynamic graph mode. In this mode, operators in the neural network are delivered and executed one by one, facilitating the compilation and debugging of the neural network model. - -- `GRAPH_MODE`: static graph mode or graph mode. In this mode, the neural network model is compiled into an entire graph, and then the graph is delivered for execution. This mode uses graph optimization to improve the running performance and facilitates large-scale deployment and cross-platform running. - -### Mode Selection - -You can set and control the running mode of the program. By default, MindSpore is in PyNative mode. - -A code example is as follows: - -```python -from mindspore import context -context.set_context(mode=context.GRAPH_MODE) -``` - -### Mode Switching - -You can switch between the two modes. - -When MindSpore is in PyNative mode, you can switch it to the graph mode using `context.set_context(mode=context.GRAPH_MODE)`. Similarly, when MindSpore is in graph mode, you can switch it to PyNative mode using `context.set_context(mode=context.PYNATIVE_MODE)`. - -A code example is as follows: - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import context, Tensor - -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -conv = nn.Conv2d(3, 4, 3, bias_init='zeros') -input_data = Tensor(np.ones([1, 3, 5, 5]).astype(np.float32)) -conv(input_data) -context.set_context(mode=context.PYNATIVE_MODE) - -conv(input_data) -``` - -```text -Tensor(shape=[1, 4, 5, 5], dtype=Float32, value= -[[[[ 1.64782144e-02, 5.31007685e-02, 5.31007685e-02, 5.31007685e-02, 5.11828624e-02], - [ 3.00714076e-02, 6.57572001e-02, 6.57572001e-02, 6.57572001e-02, 4.35083285e-02], - [ 3.00714076e-02, 6.57572001e-02, 6.57572001e-02, 6.57572001e-02, 4.35083285e-02] - [ 3.00714076e-02, 6.57572001e-02, 6.57572001e-02, 6.57572001e-02, 4.35083285e-02], - [ 1.84759758e-02, 4.71352898e-02, 4.71352898e-02, 4.71352898e-02, 3.72093469e-02]], - [[-3.36203352e-02, -6.12429380e-02, -6.12429380e-02, -6.12429380e-02, -4.33492810e-02], - [-2.67659649e-02, -8.04031491e-02, -8.04031491e-02, -8.04031491e-02, -6.84653893e-02], - [-2.67659649e-02, -8.04031491e-02, -8.04031491e-02, -8.04031491e-02, -6.84653893e-02] - [-2.67659649e-02, -8.04031491e-02, -8.04031491e-02, -8.04031491e-02, -6.84653893e-02], - [-5.57974726e-03, -6.80863336e-02, -6.80863336e-02, -6.80863336e-02, -8.38923305e-02]], - [[-1.60222687e-02, 2.26615220e-02, 2.26615220e-02, 2.26615220e-02, 6.03060052e-02], - [-6.76476881e-02, -2.96694487e-02, -2.96694487e-02, -2.96694487e-02, 4.86185402e-02], - [-6.76476881e-02, -2.96694487e-02, -2.96694487e-02, -2.96694487e-02, 4.86185402e-02] - [-6.76476881e-02, -2.96694487e-02, -2.96694487e-02, -2.96694487e-02, 4.86185402e-02], - [-6.52819276e-02, -3.50066647e-02, -3.50066647e-02, -3.50066647e-02, 2.85858363e-02]] - [[-3.10218725e-02, -3.84682454e-02, -3.84682454e-02, -3.84682454e-02, -8.58424231e-03], - [-4.27014455e-02, -7.07850009e-02, -7.07850009e-02, -7.07850009e-02, -5.36267459e-02], - [-4.27014455e-02, -7.07850009e-02, -7.07850009e-02, -7.07850009e-02, -5.36267459e-02] - [-4.27014455e-02, -7.07850009e-02, -7.07850009e-02, -7.07850009e-02, -5.36267459e-02], - [-1.23060495e-02, -4.99926135e-02, -4.99926135e-02, -4.99926135e-02, -4.71802950e-02]]]]) -``` - -In the preceding example, the running mode is set to `GRAPH_MODE` and then switched to `PYNATIVE_MODE`. - -> This code is applicable to GPU environment. - -## Hardware Management - -Hardware management involves the `device_target` and `device_id` parameters. - -- `device_target`: sets the target device. Ascend, GPU, and CPU are supported. Set this parameter based on the actual requirements. - -- `device_id`: specifies the physical sequence number of a device, that is, the actual sequence number of the device on the corresponding host. If the target device is Ascend and the specification is N*Ascend (N > 1, for example, 8*Ascend), in non-distributed mode, you can set `device_id` to determine the device ID for program execution to avoid device usage conflicts. The value ranges from 0 to the total number of devices minus 1. The total number of devices cannot exceed 4096. The default value is 0. - -> On the GPU and CPU, the `device_id` parameter setting is invalid. - -A code example is as follows: - -```python -from mindspore import context -context.set_context(device_target="Ascend", device_id=6) -``` - -## Distributed Management - -The context contains the context.set_auto_parallel_context API that is used to configure parallel training parameters. This API must be called before the network is initialized. - -> For details about distributed management, see [Parallel Distributed Training](https://www.mindspore.cn/doc/programming_guide/en/master/auto_parallel.html). - -## Maintenance and Test Management - -To facilitate maintenance and fault locating, the context provides a large number of maintenance and test parameter configurations, such as profiling data collection, asynchronous data dump function, and print operator disk flushing. - -### Profiling Data Collection - -The system can collect profiling data during training and use the profiling tool for performance analysis. Currently, the following profiling data can be collected: - -- `enable_profiling`: indicates whether to enable the profiling function. If this parameter is set to True, the profiling function is enabled, and profiling options are read from `enable_options`. If this parameter is set to False, the profiling function is disabled and only training_trace is collected. - -- `profiling_options`: profiling collection options. The values are as follows. Multiple data items can be collected. - - - `result_path`: saving the path of the profiling collection result file. The directory spectified by this parameter needs to be created in advance on the training environment (container or host side) and ensure that the running user configured during installation has read and write permissions. It supports the configuration of absolute or relative paths(relative to the current path when executing the command line). The absolute path configuration starts with '/', for example:/home/data/output. The relative path configuration directly starts with the directory name, for example:output; - - - `training_trace`: collect iterative trajectory data, that is, the training task and software information of the AI software stack, to achieve performance analysis of the training task, focusing on data enhancement, forward and backward calculation, gradient aggregation update and other related data. The value is on/off; - - - `task_trace`: collect task trajectory data, that is, the hardware information of the HWTS/AICore of the Ascend 910 processor, and analyze the information of beginning and ending of the task. The value is on/off; - - - `aicpu_trace`: collect profiling data enhanced by aicpu data. The value is on/off; - - - `fp_point`: specify the start position of the forward operator of the training network iteration trajectory, which is used to record the start timestamp of the forward calculation. The configuration value is the name of the first operator specified in the forward direction. when the value is empty, the system will automatically obtain the forward operator name; - - - `bp_point`: specify the end position of the iteration trajectory reversal operator of the training network, record the end timestamp of the backward calculation. The configuration value is the name of the operator after the specified reverse. when the value is empty, the system will automatically obtain the backward operator name; - - - `ai_core_metrics` the values are as follows: - - - ArithmeticUtilization: percentage statistics of various calculation indicators; - - - PipeUtilization: the time-consuming ratio of calculation unit and handling unit, this item is the default value; - - - Memory: percentage of external memory read and write instructions; - - - MemoryL0: percentage of internal memory read and write instructions; - - - ResourceConflictRatio: proportion of pipline queue instructions. - -A code example is as follows: - -```python -from mindspore import context -context.set_context(enable_profiling=True, profiling_options= '{"result_path":"/home/data/output","training_trace":"on"}') -``` - -### Saving MindIR - -Saving the intermediate code of each compilation stage through context.set_context(save_graphs=True). - -The saved intermediate code has two formats: one is a text format with a suffix of `.ir`, and the other is a graphical format with a suffix of `.dot`. - -When the network is large, it is recommended to use a more efficient text format for viewing. When the network is not large, it is recommended to use a more intuitive graphical format for viewing. - -A code example is as follows: - -```python -from mindspore import context -context.set_context(save_graphs=True) -``` - -> For a detailed introduction of MindIR, please refer to [MindSpore IR(MindIR)](https://www.mindspore.cn/doc/note/en/master/design/mindspore/mindir.html). - -### Print Operator Disk Flushing - -By default, the MindSpore self-developed print operator can output the tensor or character string information entered by users. Multiple character string inputs, multiple tensor inputs, and hybrid inputs of character strings and tensors are supported. The input parameters are separated by commas (,). - -> For details about the print function, see [MindSpore Print Operator](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/custom_debugging_info.html#mindspore-print-operator). - -- `print_file_path`: saves the print operator data to a file and disables the screen printing function. If the file to be saved exists, a timestamp suffix is added to the file. Saving data to a file can solve the problem that the data displayed on the screen is lost when the data volume is large. - -A code example is as follows: - -```python -from mindspore import context -context.set_context(print_file_path="print.pb") -``` - -> For details about the context API, see [mindspore.context](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.context.html). diff --git a/docs/programming_guide/source_en/customized.rst b/docs/programming_guide/source_en/customized.rst deleted file mode 100644 index c32e781c7c56a12f091135e194808e4c4ff05179..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/customized.rst +++ /dev/null @@ -1,9 +0,0 @@ -Custom Operators -=================== - -.. toctree:: - :maxdepth: 1 - - Custom Operators (Ascend) - Custom Operators (GPU) - Custom Operators (CPU) diff --git a/docs/programming_guide/source_en/dataset_conversion.md b/docs/programming_guide/source_en/dataset_conversion.md deleted file mode 100644 index 475c42a6734429ac9e114ae181b6b30faf64ab09..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/dataset_conversion.md +++ /dev/null @@ -1,529 +0,0 @@ -# MindSpore Data Format Conversion - - - -- [MindSpore Data Format Conversion](#mindspore-data-format-conversion) - - [Overview](#overview) - - [Converting Non-Standard Dataset to MindRecord](#converting-non-standard-dataset-to-mindrecord) - - [Converting CV Dataset](#converting-cv-dataset) - - [Converting NLP Dataset](#converting-nlp-dataset) - - [Converting Common Dataset to MindRecord](#converting-common-dataset-to-mindrecord) - - [Converting the CIFAR-10 Dataset](#converting-the-cifar-10-dataset) - - [Converting the ImageNet Dataset](#converting-the-imagenet-dataset) - - [Converting CSV Dataset](#converting-csv-dataset) - - [Converting TFRecord Dataset](#converting-tfrecord-dataset) - - - - - -## Overview - -You can convert non-standard datasets and common datasets into the MindSpore data format (that is, MindRecord) to easily load the datasets to MindSpore for training. In addition, the performance of MindSpore is optimized in some scenarios. You can use MindRecord to achieve better performance. - -## Converting Non-Standard Dataset to MindRecord - -The following describes how to convert CV and NLP data into MindRecord and read the MindRecord file through `MindDataset`. - -### Converting CV Dataset - -This example describes how to convert a CV dataset into MindRecord and use `MindDataset` to load the dataset. - -Create a MindRecord file containing 100 records, whose samples include the `file_name` (string), `label` (integer), and `data` (binary) fields. Use `MindDataset` to read the MindRecord file. - -1. Import related modules. - - ```python - from io import BytesIO - import os - import mindspore.dataset as ds - from mindspore.mindrecord import FileWriter - import mindspore.dataset.vision.c_transforms as vision - from PIL import Image - ``` - -2. Generate 100 images and convert them to MindRecord. - - ```python - MINDRECORD_FILE = "test.mindrecord" - - if os.path.exists(MINDRECORD_FILE): - os.remove(MINDRECORD_FILE) - os.remove(MINDRECORD_FILE + ".db") - - writer = FileWriter(file_name=MINDRECORD_FILE, shard_num=1) - - cv_schema = {"file_name": {"type": "string"}, "label": {"type": "int32"}, "data": {"type": "bytes"}} - writer.add_schema(cv_schema, "it is a cv dataset") - - writer.add_index(["file_name", "label"]) - - data = [] - for i in range(100): - i += 1 - - sample = {} - white_io = BytesIO() - Image.new('RGB', (i*10, i*10), (255, 255, 255)).save(white_io, 'JPEG') - image_bytes = white_io.getvalue() - sample['file_name'] = str(i) + ".jpg" - sample['label'] = i - sample['data'] = white_io.getvalue() - - data.append(sample) - if i % 10 == 0: - writer.write_raw_data(data) - data = [] - - if data: - writer.write_raw_data(data) - - writer.commit() - ``` - - ```text - MSRStatus.SUCCESS - ``` - - **Parameter description:** - - `MINDRECORD_FILE`: path of the output MindRecord file. - -3. Read MindRecord using `MindDataset`. - - ```python - data_set = ds.MindDataset(dataset_file=MINDRECORD_FILE) - decode_op = vision.Decode() - data_set = data_set.map(operations=decode_op, input_columns=["data"], num_parallel_workers=2) - count = 0 - for item in data_set.create_dict_iterator(output_numpy=True): - count += 1 - print("Got {} samples".format(count)) - ``` - - ```text - Got 100 samples - ``` - -### Converting NLP Dataset - -This example describes how to convert an NLP dataset into MindRecord and use `MindDataset` to load the dataset. The process of converting the text into the lexicographic order is omitted in this example. - -Create a MindRecord file containing 100 records, whose samples include eight fields of the integer type. Use `MindDataset` to read the MindRecord file. - -1. Import related modules. - - ```python - import os - import numpy as np - import mindspore.dataset as ds - from mindspore.mindrecord import FileWriter - ``` - -2. Generate 100 text samples and convert them to MindRecord. - - ```python - MINDRECORD_FILE = "test.mindrecord" - - if os.path.exists(MINDRECORD_FILE): - os.remove(MINDRECORD_FILE) - os.remove(MINDRECORD_FILE + ".db") - - writer = FileWriter(file_name=MINDRECORD_FILE, shard_num=1) - - nlp_schema = {"source_sos_ids": {"type": "int64", "shape": [-1]}, - "source_sos_mask": {"type": "int64", "shape": [-1]}, - "source_eos_ids": {"type": "int64", "shape": [-1]}, - "source_eos_mask": {"type": "int64", "shape": [-1]}, - "target_sos_ids": {"type": "int64", "shape": [-1]}, - "target_sos_mask": {"type": "int64", "shape": [-1]}, - "target_eos_ids": {"type": "int64", "shape": [-1]}, - "target_eos_mask": {"type": "int64", "shape": [-1]}} - writer.add_schema(nlp_schema, "it is a preprocessed nlp dataset") - - data = [] - for i in range(100): - i += 1 - - sample = {"source_sos_ids": np.array([i, i + 1, i + 2, i + 3, i + 4], dtype=np.int64), - "source_sos_mask": np.array([i * 1, i * 2, i * 3, i * 4, i * 5, i * 6, i * 7], dtype=np.int64), - "source_eos_ids": np.array([i + 5, i + 6, i + 7, i + 8, i + 9, i + 10], dtype=np.int64), - "source_eos_mask": np.array([19, 20, 21, 22, 23, 24, 25, 26, 27], dtype=np.int64), - "target_sos_ids": np.array([28, 29, 30, 31, 32], dtype=np.int64), - "target_sos_mask": np.array([33, 34, 35, 36, 37, 38], dtype=np.int64), - "target_eos_ids": np.array([39, 40, 41, 42, 43, 44, 45, 46, 47], dtype=np.int64), - "target_eos_mask": np.array([48, 49, 50, 51], dtype=np.int64)} - - data.append(sample) - if i % 10 == 0: - writer.write_raw_data(data) - data = [] - - if data: - writer.write_raw_data(data) - - writer.commit() - ``` - - ```text - MSRStatus.SUCCESS - ``` - - **Parameter description:** - - `MINDRECORD_FILE`: path of the output MindRecord file. - -3. Read MindRecord using `MindDataset`. - - ```python - data_set = ds.MindDataset(dataset_file=MINDRECORD_FILE) - count = 0 - for item in data_set.create_dict_iterator(): - count += 1 - print("Got {} samples".format(count)) - ``` - - ```text - Got 100 samples - ``` - -## Converting Common Dataset to MindRecord - -MindSpore provides tool classes for converting common datasets to MindRecord. The following table lists part of common datasets and their corresponding tool classes. - -| Dataset | Tool Class | -| -------- | ------------ | -| CIFAR-10 | Cifar10ToMR | -| ImageNet | ImageNetToMR | -| TFRecord | TFRecordToMR | -| CSV File | CsvToMR | - -For details about dataset conversion, see [MindSpore API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.mindrecord.html). - -### Converting the CIFAR-10 Dataset - -You can use the `Cifar10ToMR` class to convert the original CIFAR-10 data to MindRecord and use `MindDataset` to load the data. - -1. Download and decompress the [CIFAR-10 dataset](https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz). Execute the following command: - - ```bash - !wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-python.tar.gz - !mkdir -p datasets - !tar -xzf cifar-10-python.tar.gz -C datasets - !tree ./datasets/cifar-10-batches-py - ``` - - ```text - ./datasets/cifar-10-batches-py - ├── batches.meta - ├── data_batch_1 - ├── data_batch_2 - ├── data_batch_3 - ├── data_batch_4 - ├── data_batch_5 - ├── readme.html - └── test_batch - - 0 directories, 8 files - ``` - -2. Import related modules. - - ```python - import os - import mindspore.dataset as ds - import mindspore.dataset.vision.c_transforms as vision - from mindspore.mindrecord import Cifar10ToMR - ``` - -3. Create the `Cifar10ToMR` object and call the `transform` API to convert the CIFAR-10 dataset to MindRecord. - - ```python - ds_target_path = "./datasets/mindspore_dataset_conversion/" - # clean old run files - os.system("rm -f {}*".format(ds_target_path)) - os.system("mkdir -p {}".format(ds_target_path)) - - CIFAR10_DIR = "./datasets/cifar-10-batches-py" - MINDRECORD_FILE = "./datasets/mindspore_dataset_conversion/cifar10.mindrecord" - cifar10_transformer = Cifar10ToMR(CIFAR10_DIR, MINDRECORD_FILE) - cifar10_transformer.transform(['label']) - ``` - - ```text - MSRStatus.SUCCESS - ``` - - **Parameter description:** - - `CIFAR10_DIR`: path of the CIFAR-10 dataset folder. - - `MINDRECORD_FILE`: path of the output MindRecord file. - -4. Read MindRecord using `MindDataset`. - - ```python - data_set = ds.MindDataset(dataset_file=MINDRECORD_FILE) - decode_op = vision.Decode() - data_set = data_set.map(operations=decode_op, input_columns=["data"], num_parallel_workers=2) - count = 0 - for item in data_set.create_dict_iterator(output_numpy=True): - count += 1 - print("Got {} samples".format(count)) - ``` - - ```text - Got 50000 samples - ``` - -### Converting the ImageNet Dataset - -You can use the `ImageNetToMR` class to convert the original ImageNet data (images and annotations) to MindRecord and use `MindDataset` to load the data. - -1. Download the [ImageNet dataset](http://image-net.org/download), save all images in the `images/` folder, and use a mapping file `labels_map.txt` to record the mapping between images and labels. The mapping file contains two columns, which are the directory and label ID of each type of images. The two columns are separated by spaces. The following is an example of the mapping file: - - ```text - n01440760 0 - n01443537 1 - n01484850 2 - n01491361 3 - n01494475 4 - n01496331 5 - ``` - - The file directory structure is as follows: - - ```text - ├─ labels_map.txt - └─ images - └─ ...... - ``` - -2. Import the dataset conversion tool class `ImageNetToMR`. - - ```python - import mindspore.dataset as ds - import mindspore.dataset.vision.c_transforms as vision - from mindspore.mindrecord import ImageNetToMR - ``` - -3. Create the `ImageNetToMR` object and call the `transform` API to convert the dataset to MindRecord. - - ```python - IMAGENET_MAP_FILE = "./labels_map.txt" - IMAGENET_IMAGE_DIR = "./images" - MINDRECORD_FILE = "./imagenet.mindrecord" - imagenet_transformer = ImageNetToMR(IMAGENET_MAP_FILE, IMAGENET_IMAGE_DIR, MINDRECORD_FILE, partition_number=1) - imagenet_transformer.transform() - ``` - - **Parameter description:** - - `IMAGENET_MAP_FILE`: path of the label mapping file of the ImageNet dataset. - - `IMAGENET_IMAGE_DIR`: path of the folder where all ImageNet images are stored. - - `MINDRECORD_FILE`: path of the output MindRecord file. - -4. Read MindRecord using `MindDataset`. - - ```python - data_set = ds.MindDataset(dataset_file=MINDRECORD_FILE) - decode_op = vision.Decode() - data_set = data_set.map(operations=decode_op, input_columns=["image"], num_parallel_workers=2) - count = 0 - for item in data_set.create_dict_iterator(output_numpy=True): - count += 1 - print("Got {} samples".format(count)) - ``` - -### Converting CSV Dataset - -Create a CSV file containing 5 records, convert the CSV file to MindRecord using the `CsvToMR` tool class, and then read the MindRecord file using `MindDataset`. - -1. Import related modules. - - ```python - import csv - import os - import mindspore.dataset as ds - from mindspore.mindrecord import CsvToMR - ``` - -2. Generate a CSV file and convert it to MindRecord. - - ```python - CSV_FILE = "test.csv" - MINDRECORD_FILE = "test.mindrecord" - - def generate_csv(): - headers = ["id", "name", "math", "english"] - rows = [(1, "Lily", 78.5, 90), - (2, "Lucy", 99, 85.2), - (3, "Mike", 65, 71), - (4, "Tom", 95, 99), - (5, "Jeff", 85, 78.5)] - with open(CSV_FILE, 'w', encoding='utf-8') as f: - writer = csv.writer(f) - writer.writerow(headers) - writer.writerows(rows) - - generate_csv() - - if os.path.exists(MINDRECORD_FILE): - os.remove(MINDRECORD_FILE) - os.remove(MINDRECORD_FILE + ".db") - - csv_transformer = CsvToMR(CSV_FILE, MINDRECORD_FILE, partition_number=1) - - csv_transformer.transform() - - assert os.path.exists(MINDRECORD_FILE) - assert os.path.exists(MINDRECORD_FILE + ".db") - ``` - - **Parameter description:** - - `CSV_FILE`: path of the CSV file. - - `MINDRECORD_FILE`: path of the output MindRecord file. - -3. Read MindRecord using `MindDataset`. - - ```python - data_set = ds.MindDataset(dataset_file=MINDRECORD_FILE) - count = 0 - for item in data_set.create_dict_iterator(output_numpy=True): - count += 1 - print("Got {} samples".format(count)) - ``` - - ```text - Got 5 samples - ``` - -### Converting TFRecord Dataset - -> Currently, only TensorFlow 1.13.0-rc1 and later versions are supported. - -In this part of the example, TensorFlow needs to be installed in advance. If it is not installed, execute the following command to install it. For example, when this document is running as a Notebook, after the installation is complete, you need to restart the kernel to execute the subsequent code. - -```python -os.system('pip install tensorflow') if os.system('python -c "import tensorflow"') else print("TensorFlow installed") -``` - -```text -0 -``` - -Use TensorFlow to create a TFRecord file and convert the file to MindRecord using the `TFRecordToMR` tool class. Read the file using `MindDataset` and decode the `image_bytes` field using the `Decode` operator. - -1. Import related modules. - - ```python - import collections - from io import BytesIO - import os - import mindspore.dataset as ds - from mindspore.mindrecord import TFRecordToMR - import mindspore.dataset.vision.c_transforms as vision - from PIL import Image - import tensorflow as tf - ``` - -2. Generate a TFRecord file. - - ```python - TFRECORD_FILE = "test.tfrecord" - MINDRECORD_FILE = "test.mindrecord" - - def generate_tfrecord(): - def create_int_feature(values): - if isinstance(values, list): - feature = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values))) - else: - feature = tf.train.Feature(int64_list=tf.train.Int64List(value=[values])) - return feature - - def create_float_feature(values): - if isinstance(values, list): - feature = tf.train.Feature(float_list=tf.train.FloatList(value=list(values))) - else: - feature = tf.train.Feature(float_list=tf.train.FloatList(value=[values])) - return feature - - def create_bytes_feature(values): - if isinstance(values, bytes): - white_io = BytesIO() - Image.new('RGB', (10, 10), (255, 255, 255)).save(white_io, 'JPEG') - image_bytes = white_io.getvalue() - feature = tf.train.Feature(bytes_list=tf.train.BytesList(value=[image_bytes])) - else: - feature = tf.train.Feature(bytes_list=tf.train.BytesList(value=[bytes(values, encoding='utf-8')])) - return feature - - writer = tf.io.TFRecordWriter(TFRECORD_FILE) - - example_count = 0 - for i in range(10): - file_name = "000" + str(i) + ".jpg" - image_bytes = bytes(str("aaaabbbbcccc" + str(i)), encoding="utf-8") - int64_scalar = i - float_scalar = float(i) - int64_list = [i, i+1, i+2, i+3, i+4, i+1234567890] - float_list = [float(i), float(i+1), float(i+2.8), float(i+3.2), - float(i+4.4), float(i+123456.9), float(i+98765432.1)] - - features = collections.OrderedDict() - features["file_name"] = create_bytes_feature(file_name) - features["image_bytes"] = create_bytes_feature(image_bytes) - features["int64_scalar"] = create_int_feature(int64_scalar) - features["float_scalar"] = create_float_feature(float_scalar) - features["int64_list"] = create_int_feature(int64_list) - features["float_list"] = create_float_feature(float_list) - - tf_example = tf.train.Example(features=tf.train.Features(feature=features)) - writer.write(tf_example.SerializeToString()) - example_count += 1 - writer.close() - print("Write {} rows in tfrecord.".format(example_count)) - - generate_tfrecord() - ``` - - ```text - Write 10 rows in tfrecord. - ``` - - **Parameter description:** - - `TFRECORD_FILE`: path of the TFRecord file. - - `MINDRECORD_FILE`: path of the output MindRecord file. - -3. Convert the TFRecord to MindRecord. - - ```python - feature_dict = {"file_name": tf.io.FixedLenFeature([], tf.string), - "image_bytes": tf.io.FixedLenFeature([], tf.string), - "int64_scalar": tf.io.FixedLenFeature([], tf.int64), - "float_scalar": tf.io.FixedLenFeature([], tf.float32), - "int64_list": tf.io.FixedLenFeature([6], tf.int64), - "float_list": tf.io.FixedLenFeature([7], tf.float32), - } - - if os.path.exists(MINDRECORD_FILE): - os.remove(MINDRECORD_FILE) - os.remove(MINDRECORD_FILE + ".db") - - tfrecord_transformer = TFRecordToMR(TFRECORD_FILE, MINDRECORD_FILE, feature_dict, ["image_bytes"]) - tfrecord_transformer.transform() - - assert os.path.exists(MINDRECORD_FILE) - assert os.path.exists(MINDRECORD_FILE + ".db") - ``` - -4. Read MindRecord using `MindDataset`. - - ```python - data_set = ds.MindDataset(dataset_file=MINDRECORD_FILE) - decode_op = vision.Decode() - data_set = data_set.map(operations=decode_op, input_columns=["image_bytes"], num_parallel_workers=2) - count = 0 - for item in data_set.create_dict_iterator(output_numpy=True): - count += 1 - print("Got {} samples".format(count)) - ``` - - ```text - Got 10 samples - ``` diff --git a/docs/programming_guide/source_en/dataset_loading.md b/docs/programming_guide/source_en/dataset_loading.md deleted file mode 100644 index 444940f7b3d4a2a815b4d4c081118dfe0d8311fb..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/dataset_loading.md +++ /dev/null @@ -1,699 +0,0 @@ -# Loading Dataset - - - -- [Loading Dataset](#loading-dataset) - - [Overview](#overview) - - [Loading Common Dataset](#loading-common-dataset) - - [CIFAR-10/100 Dataset](#cifar-10100-dataset) - - [VOC Dataset](#voc-dataset) - - [COCO Dataset](#coco-dataset) - - [Loading Datasets in Specific Format](#loading-datasets-in-specific-format) - - [MindRecord](#mindrecord) - - [Manifest](#manifest) - - [TFRecord](#tfrecord) - - [NumPy](#numpy) - - [CSV](#csv) - - [Loading User-defined Dataset](#loading-user-defined-dataset) - - [Constructing Dataset Generator Function](#constructing-dataset-generator-function) - - [Constructing Iterable Dataset Class](#constructing-iterable-dataset-class) - - [Constructing Random Accessible Dataset Class](#constructing-random-accessible-dataset-class) - - - - - -## Overview - -MindSpore can load common image datasets. You can directly use the classes in `mindspore.dataset` to load datasets. The following table lists the supported common datasets and corresponding classes. - -| Image Dataset | Dataset Class | Description | -| ---- | ---- | ---- | -| MNIST | MnistDataset | MNIST is a large handwritten digital image dataset. It has 60,000 training images and 10,000 test images and is often used to train various image processing systems. | -| CIFAR-10 | Cifar10Dataset | CIFAR-10 is a small image dataset that contains 60,000 32 x 32 color images of 10 categories. On average, each category contains 6,000 images, of which 5,000 images are training images and 1,000 images are test images. | -| CIFAR-100 | Cifar100Dataset | CIFAR-100 is similar to CIFAR-10, but it has 100 categories. On average, there are 600 images in each category, among which 500 images are training images and 100 images are test images. | -| CelebA | CelebADataset | CelebA is a large face image dataset that contains more than 200,000 face images of celebrities. Each image has 40 feature labels. | -| PASCAL-VOC | VOCDataset | PASCAL-VOC is a common image dataset, which is widely used in computer vision fields such as object detection and image segmentation. | -| COCO | CocoDataset | COCO is a large dataset for object detection, image segmentation, and pose estimation. | -| CLUE | CLUEDataset | CLUE is a large Chinese semantic comprehension dataset. | - -MindSpore can also load datasets in different data storage formats. You can directly use the corresponding classes in `mindspore.dataset` to load data files in the disk. The following table lists the supported data formats and corresponding classes. - -| Data Format | Dataset Class | Description | -| ---- | ---- | ---- | -| MindRecord | MindDataset | MindRecord is a self-developed data format of MindSpore. It features efficient read/write and easy distributed processing. | -| Manifest | ManifestDataset | Manifest is a data format supported by Huawei ModelArts. It describes the original files and labeling information and can be used for labeling, training, and inference. | -| TFRecord | TFRecordDataset | TFRecord is a binary data file format defined by TensorFlow. | -| NumPy | NumpySlicesDataset | NumPy data source refers to the NumPy array dataset that has been read into the memory. | -| Text File | TextFileDataset | Text File refers to common data in text format. | -| CSV File | CSVDataset | CSV refers to comma-separated values. Files in this format store tabular data in plain text. | - -MindSpore also supports user-defined dataset loading using `GeneratorDataset`. You can implement your own dataset classes as required. - -| Dataset Class | Description | -| ---- | ---- | -| GeneratorDataset | User defined class or function to load and process dataset. | -| NumpySlicesDataset | User defined data source to construct dataset using NumPy. | - -> For details about the API for dataset loading, see [MindSpore API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.html). - -## Loading Common Dataset - -The following describes how to load common datasets. - -### CIFAR-10/100 Dataset - -Download [CIFAR-10 dataset](https://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz) and decompress it, the directory structure is as follows: - -```bash -!wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-binary.tar.gz -!mkdir -p datasets -!tar -xzf cifar-10-binary.tar.gz -C datasets -!mkdir -p datasets/cifar-10-batches-bin/train datasets/cifar-10-batches-bin/test -!mv -f datasets/cifar-10-batches-bin/test_batch.bin datasets/cifar-10-batches-bin/test -!mv -f datasets/cifar-10-batches-bin/data_batch*.bin datasets/cifar-10-batches-bin/batches.meta.txt datasets/cifar-10-batches-bin/train -!tree ./datasets/cifar-10-batches-bin -``` - -```text -./datasets/cifar-10-batches-bin -├── readme.html -├── test -│ └── test_batch.bin -└── train - ├── batches.meta.txt - ├── data_batch_1.bin - ├── data_batch_2.bin - ├── data_batch_3.bin - ├── data_batch_4.bin - └── data_batch_5.bin - -2 directories, 8 files -``` - -The following example uses the `Cifar10Dataset` API to load the CIFAR-10 dataset, uses the sequential sampler to obtain five samples, and displays the shape and label of the corresponding image. - -The methods for loading the CIFAR-100 and MNIST datasets are similar. - -```python -import mindspore.dataset as ds - -DATA_DIR = "./datasets/cifar-10-batches-bin/train/" - -sampler = ds.SequentialSampler(num_samples=5) -dataset = ds.Cifar10Dataset(DATA_DIR, sampler=sampler) - -for data in dataset.create_dict_iterator(): - print("Image shape:", data['image'].shape, ", Label:", data['label']) -``` - -The output is as follows: - -```text -Image shape: (32, 32, 3) , Label: 6 -Image shape: (32, 32, 3) , Label: 9 -Image shape: (32, 32, 3) , Label: 9 -Image shape: (32, 32, 3) , Label: 4 -Image shape: (32, 32, 3) , Label: 1 -``` - -### VOC Dataset - -There are multiple versions of the VOC dataset, here uses VOC2012 as an example. Download [VOC2012 dataset](http://host.robots.ox.ac.uk/pascal/VOC/voc2012/VOCtrainval_11-May-2012.tar) and decompress it. The directory structure is as follows: - -```text -└─ VOCtrainval_11-May-2012 - └── VOCdevkit - └── VOC2012 - ├── Annotations - ├── ImageSets - ├── JPEGImages - ├── SegmentationClass - └── SegmentationObject -``` - -The following example uses the `VOCDataset` API to load the VOC2012 dataset, displays the original image shape and target image shape when segmentation and detection tasks are specified. - -```python -import mindspore.dataset as ds - -DATA_DIR = "VOCtrainval_11-May-2012/VOCdevkit/VOC2012/" - -dataset = ds.VOCDataset(DATA_DIR, task="Segmentation", usage="train", num_samples=2, decode=True, shuffle=False) - -print("[Segmentation]:") -for data in dataset.create_dict_iterator(): - print("image shape:", data["image"].shape) - print("target shape:", data["target"].shape) - -dataset = ds.VOCDataset(DATA_DIR, task="Detection", usage="train", num_samples=1, decode=True, shuffle=False) - -print("[Detection]:") -for data in dataset.create_dict_iterator(): - print("image shape:", data["image"].shape) - print("bbox shape:", data["bbox"].shape) -``` - -The output is as follows: - -```text -[Segmentation]: -image shape: (281, 500, 3) -target shape: (281, 500, 3) -image shape: (375, 500, 3) -target shape: (375, 500, 3) -[Detection]: -image shape: (442, 500, 3) -bbox shape: (2, 4) -``` - -### COCO Dataset - -There are multiple versions of the COCO dataset. Here, the validation dataset of COCO2017 is taken as an example. Download COCO2017 [validation dataset](http://images.cocodataset.org/zips/val2017.zip), [detection task annotation](http://images.cocodataset.org/annotations/annotations_trainval2017.zip) and [panoptic task annotation](http://images.cocodataset.org/annotations/panoptic_annotations_trainval2017.zip) and decompress them, take only a part of the validation dataset and store it as the following directory structure: - -```text -└─ COCO - ├── val2017 - └── annotations - ├── instances_val2017.json - ├── panoptic_val2017.json - └── person_keypoints_val2017.json -``` - -The following example uses the `CocoDataset` API to load the COCO dataset, and displays the data when object detection, stuff segmentation, keypoint detection, and panoptic segmentation tasks are specified. - -```python -import mindspore.dataset as ds - -DATA_DIR = "COCO/val2017/" -ANNOTATION_FILE = "COCO/annotations/instances_val2017.json" -KEYPOINT_FILE = "COCO/annotations/person_keypoints_val2017.json" -PANOPTIC_FILE = "COCO/annotations/panoptic_val2017.json" - -dataset = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", num_samples=1) -for data in dataset.create_dict_iterator(): - print("Detection:", data.keys()) - -dataset = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff", num_samples=1) -for data in dataset.create_dict_iterator(): - print("Stuff:", data.keys()) - -dataset = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint", num_samples=1) -for data in dataset.create_dict_iterator(): - print("Keypoint:", data.keys()) - -dataset = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic", num_samples=1) -for data in dataset.create_dict_iterator(): - print("Panoptic:", data.keys()) -``` - -The output is as follows: - -```text -Detection: dict_keys(['image', 'bbox', 'category_id', 'iscrowd']) -Stuff: dict_keys(['image', 'segmentation', 'iscrowd']) -Keypoint: dict_keys(['image', 'keypoints', 'num_keypoints']) -Panoptic: dict_keys(['image', 'bbox', 'category_id', 'iscrowd', 'area']) -``` - -## Loading Datasets in Specific Format - -The following describes how to load dataset files in specific formats. - -### MindRecord - -MindRecord is a data format defined by MindSpore. Using MindRecord can improve performance. - -> For details about how to convert a dataset into the MindRecord data format, see [Data Format Conversion](https://www.mindspore.cn/doc/programming_guide/en/master/dataset_conversion.html). - -Before executing this example, you need to download the corresponding test data `test_mindrecord.zip` and unzip it to the specified location, execute the following command: - -```bash -!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/test_mindrecord.zip -!unzip -o ./test_mindrecord.zip -d ./datasets/mindspore_dataset_loading/ -!tree ./datasets/mindspore_dataset_loading/ -``` - -```text -./datasets/mindspore_dataset_loading/ -├── test.mindrecord -└── test.mindrecord.db - -0 directories, 2 files -``` - -The following example uses the `MindDataset` API to load MindRecord files, and displays labels of the loaded data. - -```python -import mindspore.dataset as ds - -DATA_FILE = ["./datasets/mindspore_dataset_loading/test.mindrecord"] -mindrecord_dataset = ds.MindDataset(DATA_FILE) - -for data in mindrecord_dataset.create_dict_iterator(output_numpy=True): - print(data.keys()) -``` - -```text -dict_keys(['chinese', 'english']) -dict_keys(['chinese', 'english']) -dict_keys(['chinese', 'english']) -``` - -### Manifest - -Manifest is a data format file supported by Huawei ModelArts. For details, see [Specifications for Importing the Manifest File](https://support.huaweicloud.com/en-us/engineers-modelarts/modelarts_23_0009.html). - -In this example, you need to download the test data `test_manifest.zip` and unzip it to the specified location, and execute the following command: - -```bash -!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/test_manifest.zip -!unzip -o ./test_manifest.zip -d ./datasets/mindspore_dataset_loading/test_manifest/ -!tree ./datasets/mindspore_dataset_loading/test_manifest/ -``` - -```text -./datasets/mindspore_dataset_loading/test_manifest/ -├── eval -│ ├── 1.JPEG -│ └── 2.JPEG -├── test_manifest.json -└── train - ├── 1.JPEG - └── 2.JPEG - -2 directories, 5 files -``` - -The following example uses the `ManifestDataset` API to load a Manifest file, and displays labels of the loaded data. - -```python -import mindspore.dataset as ds - -DATA_FILE = "./datasets/mindspore_dataset_loading/test_manifest/test_manifest.json" -manifest_dataset = ds.ManifestDataset(DATA_FILE) - -for data in manifest_dataset.create_dict_iterator(): - print(data["label"]) -``` - -```text -0 -1 -``` - -### TFRecord - -TFRecord is a binary data file format defined by TensorFlow. - -The following example uses the `TFRecordDataset` API to load TFRecord files and introduces two methods for setting the format of datasets. - -Download the `tfrecord` test data `test_tftext.zip` and unzip it to the specified location, execute the following command: - -```bash -!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/test_tftext.zip -!unzip -o ./test_tftext.zip -d ./datasets/mindspore_dataset_loading/test_tfrecord/ -!tree ./datasets/mindspore_dataset_loading/test_tfrecord/ -``` - -```text -./datasets/mindspore_dataset_loading/test_tfrecord/ -└── test_tftext.tfrecord - -0 directories, 1 file -``` - -1. Specify the dataset path or TFRecord file list to create a `TFRecordDataset` object, this example uses test_tftext.tfrecord. - - ```python - import mindspore.dataset as ds - - DATA_FILE = "./datasets/mindspore_dataset_loading/test_tfrecord/test_tftext.tfrecord" - tfrecord_dataset = ds.TFRecordDataset(DATA_FILE) - - for tf_data in tfrecord_dataset.create_dict_iterator(): - print(tf_data.keys()) - ``` - - ```text - dict_keys(['chinese', 'line', 'words']) - dict_keys(['chinese', 'line', 'words']) - dict_keys(['chinese', 'line', 'words']) - ``` - -2. Compile a schema file or create a schema object to set the dataset format and features. - - - Compile a schema file. - - Write the dataset format and features to the schema file in JSON format. The following is an example: - - - `columns`: column information field, which needs to be defined based on the actual column name of the dataset. In the preceding example, the dataset columns are `image`, `label`, and `id`. - - When creating `TFRecordDataset`, transfer the path of the schema file. - - ```python - import os - import json - - data_json = { - "columns": { - "chinese": { - "type": "uint8", - "rank": 1 - }, - "line" : { - "type": "int8", - "rank": 1 - }, - "words" : { - "type": "uint8", - "rank": 0 - } - } - } - - if not os.path.exists("dataset_schema_path"): - os.mkdir("dataset_schema_path") - SCHEMA_DIR = "dataset_schema_path/schema.json" - with open(SCHEMA_DIR, "w") as f: - json.dump(data_json,f,indent=4) - - tfrecord_dataset = ds.TFRecordDataset(DATA_FILE, schema=SCHEMA_DIR) - - for tf_data in tfrecord_dataset.create_dict_iterator(): - print(tf_data.values()) - ``` - - ```text - dict_values([Tensor(shape=[57], dtype=UInt8, value= [230, 177, 159, 229, 183, 158, 229, 184, 130, 233, 149, 191, 230, 177, 159, 229, 164, 167, 230, 161, 165, 229, 143, 130, - 229, 138, 160, 228, 186, 134, 233, 149, 191, 230, 177, 159, 229, 164, 167, 230, 161, 165, 231, 154, 132, 233, 128, 154, - 232, 189, 166, 228, 187, 170, 229, 188, 143]), Tensor(shape=[22], dtype=Int8, value= [ 71, 111, 111, 100, 32, 108, 117, 99, 107, 32, 116, 111, 32, 101, 118, 101, 114, 121, 111, 110, 101, 46]), Tensor(shape=[32], dtype=UInt8, value= [229, 165, 179, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 101, 118, 101, 114, 121, 111, 110, 101, - 99, 32, 32, 32, 32, 32, 32, 32])]) - dict_values([Tensor(shape=[12], dtype=UInt8, value= [231, 148, 183, 233, 187, 152, 229, 165, 179, 230, 179, 170]), Tensor(shape=[19], dtype=Int8, value= [ 66, 101, 32, 104, 97, 112, 112, 121, 32, 101, 118, 101, 114, 121, 32, 100, 97, 121, 46]), Tensor(shape=[20], dtype=UInt8, value= [ 66, 101, 32, 32, 32, 104, 97, 112, 112, 121, 100, 97, 121, 32, 32, 98, 32, 32, 32, 32])]) - dict_values([Tensor(shape=[48], dtype=UInt8, value= [228, 187, 138, 229, 164, 169, 229, 164, 169, 230, 176, 148, 229, 164, 170, 229, 165, 189, 228, 186, 134, 230, 136, 145, - 228, 187, 172, 228, 184, 128, 232, 181, 183, 229, 142, 187, 229, 164, 150, 233, 157, 162, 231, 142, 169, 229, 144, 167 - ]), Tensor(shape=[20], dtype=Int8, value= [ 84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 116, 101, 120, 116, 32, 102, 105, 108, 101, 46]), Tensor(shape=[16], dtype=UInt8, value= [ 84, 104, 105, 115, 116, 101, 120, 116, 102, 105, 108, 101, 97, 32, 32, 32])]) - ``` - - - Create a schema object. - - Create a schema object, add user-defined fields to the schema object, and pass the schema object when creating a dataset object. - - ```python - from mindspore import dtype as mstype - schema = ds.Schema() - schema.add_column('chinese', de_type=mstype.uint8) - schema.add_column('line', de_type=mstype.uint8) - tfrecord_dataset = ds.TFRecordDataset(DATA_FILE, schema=schema) - - for tf_data in tfrecord_dataset.create_dict_iterator(): - print(tf_data) - ``` - - ```text - {'chinese': Tensor(shape=[12], dtype=UInt8, value= [231, 148, 183, 233, 187, 152, 229, 165, 179, 230, 179, 170]), 'line': Tensor(shape=[19], dtype=UInt8, value= [ 66, 101, 32, 104, 97, 112, 112, 121, 32, 101, 118, 101, 114, 121, 32, 100, 97, 121, 46])} - {'chinese': Tensor(shape=[48], dtype=UInt8, value= [228, 187, 138, 229, 164, 169, 229, 164, 169, 230, 176, 148, 229, 164, 170, 229, 165, 189, 228, 186, 134, 230, 136, 145, - 228, 187, 172, 228, 184, 128, 232, 181, 183, 229, 142, 187, 229, 164, 150, 233, 157, 162, 231, 142, 169, 229, 144, 167 - ]), 'line': Tensor(shape=[20], dtype=UInt8, value= [ 84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 116, 101, 120, 116, 32, 102, 105, 108, 101, 46])} - {'chinese': Tensor(shape=[57], dtype=UInt8, value= [230, 177, 159, 229, 183, 158, 229, 184, 130, 233, 149, 191, 230, 177, 159, 229, 164, 167, 230, 161, 165, 229, 143, 130, - 229, 138, 160, 228, 186, 134, 233, 149, 191, 230, 177, 159, 229, 164, 167, 230, 161, 165, 231, 154, 132, 233, 128, 154, - 232, 189, 166, 228, 187, 170, 229, 188, 143]), 'line': Tensor(shape=[22], dtype=UInt8, value= [ 71, 111, 111, 100, 32, 108, 117, 99, 107, 32, 116, 111, 32, 101, 118, 101, 114, 121, 111, 110, 101, 46])} - ``` - -Comparing step compile and step create above, we can see: - -|step|chinese|line|words -|:---|:---|:---|:--- -| compile|UInt8 |Int8|UInt8 -| create|UInt8 |UInt8| - -The data in the columns in the example step compile has changed from chinese (UInt8), line (Int8) and words (UInt8) to the chinese (UInt8) and line (UInt8) in the example step create. Through the Schema object, set the data type and characteristics of the dataset, so that the data type and characteristics in the column are changed accordingly. - -### NumPy - -If all data has been read into the memory, you can directly use the `NumpySlicesDataset` class to load the data. - -The following examples describe how to use `NumpySlicesDataset` to load array, list, and dict data. - -- Load NumPy array data. - - ```python - import numpy as np - import mindspore.dataset as ds - - np.random.seed(6) - features, labels = np.random.sample((4, 2)), np.random.sample((4, 1)) - - data = (features, labels) - dataset = ds.NumpySlicesDataset(data, column_names=["col1", "col2"], shuffle=False) - - for data in dataset: - print(data[0], data[1]) - ``` - - The output is as follows: - - ```text - [0.89286015 0.33197981] [0.33540785] - [0.82122912 0.04169663] [0.62251943] - [0.10765668 0.59505206] [0.43814143] - [0.52981736 0.41880743] [0.73588211] - ``` - -- Load Python list data. - - ```python - - import mindspore.dataset as ds - - data1 = [[1, 2], [3, 4]] - - dataset = ds.NumpySlicesDataset(data1, column_names=["col1"], shuffle=False) - - for data in dataset: - print(data[0]) - ``` - - The output is as follows: - - ```text - [1 2] - [3 4] - ``` - -- Load Python dict data. - - ```python - import mindspore.dataset as ds - - data1 = {"a": [1, 2], "b": [3, 4]} - - dataset = ds.NumpySlicesDataset(data1, column_names=["col1", "col2"], shuffle=False) - - for np_dic_data in dataset.create_dict_iterator(): - print(np_dic_data) - ``` - - The output is as follows: - - ```text - {'col1': Tensor(shape=[], dtype=Int64, value= 1), 'col2': Tensor(shape=[], dtype=Int64, value= 3)} - {'col1': Tensor(shape=[], dtype=Int64, value= 2), 'col2': Tensor(shape=[], dtype=Int64, value= 4)} - ``` - -### CSV - -The following example uses `CSVDataset` to load CSV dataset files, and displays labels of the loaded data. - -Download the test data `test_csv.zip` and unzip it to the specified location, execute the following command: - -```bash -!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/test_csv.zip -!unzip -o ./test_csv.zip -d ./datasets/mindspore_dataset_loading/test_csv/ -!tree ./datasets/mindspore_dataset_loading/test_csv/ -``` - -```text -./datasets/mindspore_dataset_loading/test_csv/ -├── test1.csv -└── test2.csv - -0 directories, 2 files -``` - -The method of loading a text dataset file is similar to that of loading a CSV file. - -```python -import mindspore.dataset as ds - -DATA_FILE = ["./datasets/mindspore_dataset_loading/test_csv/test1.csv","./datasets/mindspore_dataset_loading/test_csv/test2.csv"] -csv_dataset = ds.CSVDataset(DATA_FILE) - -for csv_data in csv_dataset.create_dict_iterator(output_numpy=True): - print(csv_data.keys()) -``` - -```text -dict_keys(['a', 'b', 'c', 'd']) -dict_keys(['a', 'b', 'c', 'd']) -dict_keys(['a', 'b', 'c', 'd']) -dict_keys(['a', 'b', 'c', 'd']) -``` - -## Loading User-defined Dataset - -For the datasets that cannot be directly loaded by MindSpore, you can construct the `GeneratorDataset` object to load them in a customized method or convert them into the MindRecord data format. The following demonstrates some different methods to load user-defined datasets. For comparison, keep the generated random data the same. - -### Constructing Dataset Generator Function - -Construct a generator function that defines the data return method, and then use this function to construct the user-defined dataset object. This method is applicable for simple scenarios. - -```python -import numpy as np -import mindspore.dataset as ds - -np.random.seed(58) -data = np.random.sample((5, 2)) -label = np.random.sample((5, 1)) - -def GeneratorFunc(): - for i in range(5): - yield (data[i], label[i]) - -dataset = ds.GeneratorDataset(GeneratorFunc, ["data", "label"]) - -for item in dataset.create_dict_iterator(): - print(item["data"], item["label"]) -``` - -The output is as follows: - -```text -[0.36510558 0.45120592] [0.78888122] -[0.49606035 0.07562207] [0.38068183] -[0.57176158 0.28963401] [0.16271622] -[0.30880446 0.37487617] [0.54738768] -[0.81585667 0.96883469] [0.77994068] -``` - -### Constructing Iterable Dataset Class - -Construct a dataset class to implement the `__iter__` and `__next__` methods, and then use the object of this class to construct the user-defined dataset object. Compared with directly defining the generating function, using the dataset class can achieve more customized functions. - -```python -import numpy as np -import mindspore.dataset as ds - -class IterDatasetGenerator: - def __init__(self): - np.random.seed(58) - self.__index = 0 - self.__data = np.random.sample((5, 2)) - self.__label = np.random.sample((5, 1)) - - def __next__(self): - if self.__index >= len(self.__data): - raise StopIteration - else: - item = (self.__data[self.__index], self.__label[self.__index]) - self.__index += 1 - return item - - def __iter__(self): - self.__index = 0 - return self - - def __len__(self): - return len(self.__data) - -dataset_generator = IterDatasetGenerator() -dataset = ds.GeneratorDataset(dataset_generator, ["data", "label"], shuffle=False) - -for data in dataset.create_dict_iterator(): - print(data["data"], data["label"]) -``` - -The output is as follows: - -```text -[0.36510558 0.45120592] [0.78888122] -[0.49606035 0.07562207] [0.38068183] -[0.57176158 0.28963401] [0.16271622] -[0.30880446 0.37487617] [0.54738768] -[0.81585667 0.96883469] [0.77994068] -``` - -### Constructing Random Accessible Dataset Class - -Construct a dataset class to implement the `__getitem__` method, and then use the object of this class to construct a user-defined dataset object. This method is applicable for achieving distributed training. - -```python -import numpy as np -import mindspore.dataset as ds - -class GetDatasetGenerator: - def __init__(self): - np.random.seed(58) - self.__data = np.random.sample((5, 2)) - self.__label = np.random.sample((5, 1)) - - def __getitem__(self, index): - return (self.__data[index], self.__label[index]) - - def __len__(self): - return len(self.__data) - -dataset_generator = GetDatasetGenerator() -dataset = ds.GeneratorDataset(dataset_generator, ["data", "label"], shuffle=False) - -for data in dataset.create_dict_iterator(): - print(data["data"], data["label"]) -``` - -The output is as follows: - -```text -[0.36510558 0.45120592] [0.78888122] -[0.49606035 0.07562207] [0.38068183] -[0.57176158 0.28963401] [0.16271622] -[0.30880446 0.37487617] [0.54738768] -[0.81585667 0.96883469] [0.77994068] -``` - -If you want to perform distributed training, you need to implement the `__iter__` method in the sampler class additionally. The index of the sampled data is returned each time. The code that needs to be added is as follows: - -```python -import math - -class MySampler(): - def __init__(self, dataset, local_rank, world_size): - self.__num_data = len(dataset) - self.__local_rank = local_rank - self.__world_size = world_size - self.samples_per_rank = int(math.ceil(self.__num_data / float(self.__world_size))) - self.total_num_samples = self.samples_per_rank * self.__world_size - - def __iter__(self): - indices = list(range(self.__num_data)) - indices.extend(indices[:self.total_num_samples-len(indices)]) - indices = indices[self.__local_rank:self.total_num_samples:self.__world_size] - return iter(indices) - - def __len__(self): - return self.samples_per_rank - -dataset_generator = GetDatasetGenerator() -sampler = MySampler(dataset_generator, local_rank=0, world_size=2) -dataset = ds.GeneratorDataset(dataset_generator, ["data", "label"], shuffle=False, sampler=sampler) - -for data in dataset.create_dict_iterator(): - print(data["data"], data["label"]) -``` - -The output is as follows: - -```text -[0.36510558 0.45120592] [0.78888122] -[0.57176158 0.28963401] [0.16271622] -[0.81585667 0.96883469] [0.77994068] -``` diff --git a/docs/programming_guide/source_en/dtype.md b/docs/programming_guide/source_en/dtype.md deleted file mode 100644 index cc2467de729524d70a7f4b428675c9c8143819b9..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/dtype.md +++ /dev/null @@ -1,64 +0,0 @@ -# dtype - - - -- [dtype](#dtype) - - [Overview](#overview) - - [Data Type Conversion API](#data-type-conversion-api) - - - - - -## Overview - -MindSpore tensors support different data types, including `int8`, `int16`, `int32`, `int64`, `uint8`, `uint16`, `uint32`, `uint64`, `float16`, `float32`, `float64` and `bool_`, which correspond to the data types of NumPy. - -In the computation process of MindSpore, the `int` data type in Python is converted into the defined `int64` type, and the `float` data type is converted into the defined `float32` type. - -For details about the supported types, see . - -In the following code, the data type of MindSpore is int32. - -```python -from mindspore import dtype as mstype - -data_type = mstype.int32 -print(data_type) -``` - -The following information is displayed: - -```text -Int32 -``` - -## Data Type Conversion API - -MindSpore provides the following APIs for conversion between NumPy data types and Python built-in data types: - -- `dtype_to_nptype`: converts the data type of MindSpore to the corresponding data type of NumPy. -- `dtype_to_pytype`: converts the data type of MindSpore to the corresponding built-in data type of Python. -- `pytype_to_dtype`: converts the built-in data type of Python to the corresponding data type of MindSpore. - -The following code implements the conversion between different data types and prints the converted type. - -```python -from mindspore import dtype as mstype - -np_type = mstype.dtype_to_nptype(mstype.int32) -ms_type = mstype.pytype_to_dtype(int) -py_type = mstype.dtype_to_pytype(mstype.float64) - -print(np_type) -print(ms_type) -print(py_type) -``` - -The following information is displayed: - -```text - -Int64 - -``` diff --git a/docs/programming_guide/source_en/extension.rst b/docs/programming_guide/source_en/extension.rst deleted file mode 100644 index dacfb670d3f20721b21b64faa9c470bc91d6a0c4..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/extension.rst +++ /dev/null @@ -1,7 +0,0 @@ -Function Extension -==================== - -.. toctree:: - :maxdepth: 1 - - probability \ No newline at end of file diff --git a/docs/programming_guide/source_en/images/api_structure.png b/docs/programming_guide/source_en/images/api_structure.png deleted file mode 100644 index 5af4744e922b9ddadbe60832af3f62c104412233..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/api_structure.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/batch.png b/docs/programming_guide/source_en/images/batch.png deleted file mode 100644 index ee974652d361b4085033a08789a036d331c2bec8..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/batch.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/cache_dataset.png b/docs/programming_guide/source_en/images/cache_dataset.png deleted file mode 100644 index 23d2fcaae1e8fb6751ef72029cb1f8ffd8772a58..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/cache_dataset.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/cache_pipeline_en.eddx b/docs/programming_guide/source_en/images/cache_pipeline_en.eddx deleted file mode 100644 index 6980ae017c8b284d246679c97ab418519435d4db..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/cache_pipeline_en.eddx and /dev/null differ diff --git a/docs/programming_guide/source_en/images/cache_processed_data.png b/docs/programming_guide/source_en/images/cache_processed_data.png deleted file mode 100644 index 2f693178e220103d6c2ee84476fc75ad9378cfe0..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/cache_processed_data.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/concat.png b/docs/programming_guide/source_en/images/concat.png deleted file mode 100644 index 7a28ff7826cc2a1c6334e2ff15eeaaffd6b67c06..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/concat.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/ctrans_invert.png b/docs/programming_guide/source_en/images/ctrans_invert.png deleted file mode 100644 index b73f9bd1abed0b4064d10461cc360160591ef4e3..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/ctrans_invert.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/ctrans_resize.png b/docs/programming_guide/source_en/images/ctrans_resize.png deleted file mode 100644 index e5275e371cbe0b668a0f6f1d699ea67efa09956f..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/ctrans_resize.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/eager_mode.png b/docs/programming_guide/source_en/images/eager_mode.png deleted file mode 100644 index 4be8958b7bb92244710bad82fc6b80169924648c..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/eager_mode.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/map.png b/docs/programming_guide/source_en/images/map.png deleted file mode 100644 index c37f58ec62af2cd01dc08a841514fcf4bdaf8f8e..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/map.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/pytrans_compose.png b/docs/programming_guide/source_en/images/pytrans_compose.png deleted file mode 100644 index d9e9568a685f54270d2d69031fb9c9b04feab1f0..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/pytrans_compose.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/randomcrop.png b/docs/programming_guide/source_en/images/randomcrop.png deleted file mode 100644 index 51f98fe416895febd4dd8852e33b78c1f913440b..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/randomcrop.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/randomhorizontalflip.png b/docs/programming_guide/source_en/images/randomhorizontalflip.png deleted file mode 100644 index 2d851183a8f858c54a26b636703b9177df4ec80e..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/randomhorizontalflip.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/repeat.png b/docs/programming_guide/source_en/images/repeat.png deleted file mode 100644 index 9717ec81c52f23615e236d27e0f7c96bd6ac1155..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/repeat.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/shuffle.png b/docs/programming_guide/source_en/images/shuffle.png deleted file mode 100644 index 4464cefad03beefac6bb413da22eebeffaf8fe41..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/shuffle.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/transform_not_recommended.png b/docs/programming_guide/source_en/images/transform_not_recommended.png deleted file mode 100644 index b3b8a6f5c4d7e13af4869976fa2d52a90dc7a5de..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/transform_not_recommended.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/transform_recommended_1.png b/docs/programming_guide/source_en/images/transform_recommended_1.png deleted file mode 100644 index dd5ccead2acefc4e2ecc16fe1cca431d68fe64ba..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/transform_recommended_1.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/transform_recommended_2.png b/docs/programming_guide/source_en/images/transform_recommended_2.png deleted file mode 100644 index 8524dc1503ec11cdb5fc8c7c8faa9604c00d6dec..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/transform_recommended_2.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/transform_recommended_3.png b/docs/programming_guide/source_en/images/transform_recommended_3.png deleted file mode 100644 index 7180a063e3778886d81a8d8d6fdf067e6485adda..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/transform_recommended_3.png and /dev/null differ diff --git a/docs/programming_guide/source_en/images/zip.png b/docs/programming_guide/source_en/images/zip.png deleted file mode 100644 index f0052435898ae6a3546dfea9c50711ab3f303699..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_en/images/zip.png and /dev/null differ diff --git a/docs/programming_guide/source_en/index.rst b/docs/programming_guide/source_en/index.rst deleted file mode 100644 index cc6130033ba58df9e3d21949ffc8bf268777fb32..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/index.rst +++ /dev/null @@ -1,79 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 11:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore Programming Guide -================================= - -.. toctree:: - :maxdepth: 1 - - api_structure - -.. toctree:: - :maxdepth: 1 - :caption: Data Type - - dtype - tensor - -.. toctree:: - :maxdepth: 1 - :caption: Compute Component - - operators - parameter - cell - network_component - initializer - numpy - -.. toctree:: - :maxdepth: 1 - :caption: Data Pipeline - - dataset_loading - sampler - pipeline - augmentation - tokenizer - dataset_conversion - auto_augmentation - cache - -.. toctree:: - :maxdepth: 1 - :caption: Execution Management - - context - run - callback - -.. toctree:: - :maxdepth: 1 - :caption: Distributed Training - - auto_parallel - -.. toctree:: - :maxdepth: 1 - :caption: Advanced Usage - - train - infer - advanced_usage_of_checkpoint - performance_optimization - customized - security_and_privacy - extension - -.. toctree:: - :maxdepth: 1 - :caption: Specification Note - - Benchmarks - network_list - operator_list - syntax_list - Environment Variables diff --git a/docs/programming_guide/source_en/infer.md b/docs/programming_guide/source_en/infer.md deleted file mode 100644 index 81e7f271c1cae0d7728b6e378ce32e24466fb6c8..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/infer.md +++ /dev/null @@ -1,24 +0,0 @@ -# Inference - - - -- [Inference](#inference) - - - - - -Based on the model trained by MindSpore, it supports the execution of inferences on various platforms such as Ascend 910 AI processor, Ascend 310 AI processor, GPU, CPU, and device side. For more details, please refer to the following tutorials: - -- [Inference on the Ascend 910 AI processor](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference_ascend_910.html) -- [Inference on the Ascend 310 AI processor](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference_ascend_310.html) -- [Inference on a GPU](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference_gpu.html) -- [Inference on a CPU](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference_cpu.html) -- [Inference on the device side](https://www.mindspore.cn/tutorial/lite/en/master/quick_start/quick_start.html) - -At the same time, MindSpore offers a lightweight and high-performance module called "MindSpore Serving", which helps MindSpore developers effectively deploy online inferences in a production environment. For more details, please refer to the following tutorials: - -- [MindSpore Serving-based Inference Service Deployment](https://www.mindspore.cn/tutorial/inference/en/master/serving_example.html) -- [gRPC-based MindSpore Serving Access](https://www.mindspore.cn/tutorial/inference/en/master/serving_grpc.html) -- [RESTful-based MindSpore Serving Access](https://www.mindspore.cn/tutorial/inference/en/master/serving_restful.html) -- [Servable Provided Through Model Configuration](https://www.mindspore.cn/tutorial/inference/en/master/serving_model.html) diff --git a/docs/programming_guide/source_en/initializer.md b/docs/programming_guide/source_en/initializer.md deleted file mode 100644 index cd934f345ffd31fe2502207806817a392642f10c..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/initializer.md +++ /dev/null @@ -1,5 +0,0 @@ -# Initialization of Network Parameters - -No English version right now, welcome to contribute. - - \ No newline at end of file diff --git a/docs/programming_guide/source_en/network_component.md b/docs/programming_guide/source_en/network_component.md deleted file mode 100644 index 6066fe8d2863e6aff1c2a38af0fa014b1584736d..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/network_component.md +++ /dev/null @@ -1,180 +0,0 @@ -# Common Network Components - - - -- [Common Network Components](#common-network-components) - - [Overview](#overview) - - [GradOperation](#gradoperation) - - [WithLossCell](#withlosscell) - - [TrainOneStepCell](#trainonestepcell) - - - - - -## Overview - -MindSpore encapsulates some common network components for network training, inference, gradient calculation, and data processing. - -These network components can be directly used by users and are also used in more advanced encapsulation APIs such as `model.train` and `model.eval`. - -The following describes three network components, `GradOperation`, `WithLossCell`, and `TrainOneStepCell`, in terms of functions, usage, and internal use. - -## GradOperation - -GradOperation is used to generate the gradient of the input function. The `get_all`, `get_by_list`, and `sens_param` parameters are used to control the gradient calculation method. For details, see [MindSpore API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.GradOperation.html) -The following is an example of using GradOperation: - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import Tensor, Parameter -from mindspore import dtype as mstype -import mindspore.ops as ops - - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.matmul = ops.MatMul() - self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z') - def construct(self, x, y): - x = x * self.z - out = self.matmul(x, y) - return out - -class GradNetWrtX(nn.Cell): - def __init__(self, net): - super(GradNetWrtX, self).__init__() - self.net = net - self.grad_op = ops.GradOperation() - def construct(self, x, y): - gradient_function = self.grad_op(self.net) - return gradient_function(x, y) - -x = Tensor([[0.5, 0.6, 0.4], [1.2, 1.3, 1.1]], dtype=mstype.float32) -y = Tensor([[0.01, 0.3, 1.1], [0.1, 0.2, 1.3], [2.1, 1.2, 3.3]], dtype=mstype.float32) -GradNetWrtX(Net())(x, y) -``` - -```text -Tensor(shape=[2, 3], dtype=Float32, value= -[[1.41000009e+000, 1.60000002e+000, 6.59999943e+000], - [1.41000009e+000, 1.60000002e+000, 6.59999943e+000]]) -``` - -The preceding example is used to calculate the gradient value of `Net` to x. You need to define the network `Net` as the input of `GradOperation`. The instance creates `GradNetWrtX` that contains the gradient operation. Calling `GradNetWrtX` transfers the network to `GradOperation` to generate a gradient function, and transfers the input data to the gradient function to return the final result. - -All other components, such as `WithGradCell` and `TrainOneStepCell`, involved in gradient calculation use `GradOperation`. -You can view the internal implementation of these APIs. - -## WithLossCell - -`WithLossCell` is essentially a `Cell` that contains the loss function. To build `WithLossCell`, you need to define the network and loss function in advance. - -The following uses an example to describe how to use this function. First, you need to build a network. The content is as follows: - -```python -import numpy as np - -import mindspore.context as context -import mindspore.nn as nn -from mindspore import Tensor -from mindspore.nn import TrainOneStepCell, WithLossCell -from mindspore.nn.optim import Momentum -import mindspore.ops as ops - -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - - -class LeNet(nn.Cell): - def __init__(self): - super(LeNet, self).__init__() - self.relu = ops.ReLU() - self.batch_size = 32 - - self.conv1 = nn.Conv2d(1, 6, kernel_size=5, stride=1, padding=0, has_bias=False, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, kernel_size=5, stride=1, padding=0, has_bias=False, pad_mode='valid') - self.pool = nn.MaxPool2d(kernel_size=2, stride=2) - self.reshape = ops.Reshape() - self.fc1 = nn.Dense(400, 120) - self.fc2 = nn.Dense(120, 84) - self.fc3 = nn.Dense(84, 10) - - def construct(self, input_x): - output = self.conv1(input_x) - output = self.relu(output) - output = self.pool(output) - output = self.conv2(output) - output = self.relu(output) - output = self.pool(output) - output = self.reshape(output, (self.batch_size, -1)) - output = self.fc1(output) - output = self.relu(output) - output = self.fc2(output) - output = self.relu(output) - output = self.fc3(output) - return output -``` - -The following is an example of using `WithLossCell`. Define the network and loss functions, create a `WithLossCell`, and input the input data and label data. `WithLossCell` returns the calculation result based on the network and loss functions. - -```python -data = Tensor(np.ones([32, 1, 32, 32]).astype(np.float32) * 0.01) -label = Tensor(np.ones([32]).astype(np.int32)) -net = LeNet() -criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') -net_with_criterion = WithLossCell(net, criterion) -loss = net_with_criterion(data, label) -print("+++++++++Loss+++++++++++++") -print(loss) -``` - -The following information is displayed: - -```text -+++++++++Loss+++++++++++++ -2.302585 -``` - -## TrainOneStepCell - -`TrainOneStepCell` is used to perform single-step training of the network and return the loss result after each training result. - -The following describes how to build an instance for using the `TrainOneStepCell` API to perform network training. The import code of the `LeNet` and package name is the same as that in the previous case. - -```python -data = Tensor(np.ones([32, 1, 32, 32]).astype(np.float32) * 0.01) -label = Tensor(np.ones([32]).astype(np.int32)) -net = LeNet() -learning_rate = 0.01 -momentum = 0.9 - -optimizer = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), learning_rate, momentum) -criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') -net_with_criterion = WithLossCell(net, criterion) -train_network = TrainOneStepCell(net_with_criterion, optimizer) # optimizer -for i in range(5): - train_network.set_train() - res = train_network(data, label) - print(f"+++++++++result:{i}++++++++++++") - print(res) -``` - -```text -+++++++++result:0++++++++++++ -2.302585 -+++++++++result:1++++++++++++ -2.2935712 -+++++++++result:2++++++++++++ -2.2764661 -+++++++++result:3++++++++++++ -2.2521412 -+++++++++result:4++++++++++++ -2.2214084 -``` - -In the case, an optimizer and a `WithLossCell` instance are built, and then a training network is initialized in `TrainOneStepCell`. The case is repeated for five times, that is, the network is trained for five times, and the loss result of each time is output, the result shows that the loss value gradually decreases after each training. - -The following content will describe how MindSpore uses more advanced encapsulation APIs, that is, the `train` method in the `Model` class to train a model. Many network components, such as `TrainOneStepCell` and `WithLossCell`, will be used in the internal implementation. -You can view the internal implementation of these components. diff --git a/docs/programming_guide/source_en/network_list.rst b/docs/programming_guide/source_en/network_list.rst deleted file mode 100644 index 5118f160b0b99ba8edf4e7cc9f3aba11a24a15a9..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/network_list.rst +++ /dev/null @@ -1,7 +0,0 @@ -Network List -============ - -.. toctree:: - :maxdepth: 1 - - MindSpore Network List \ No newline at end of file diff --git a/docs/programming_guide/source_en/numpy.md b/docs/programming_guide/source_en/numpy.md deleted file mode 100644 index e4f45e1a0901cac6487ef8fe6336029040a05af6..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/numpy.md +++ /dev/null @@ -1,458 +0,0 @@ -# Numpy Interfaces in MindSpore - - - -- [Numpy Interfaces in MindSpore](#numpy-interfaces-in-mindspore) - - [Overview](#overview) - - [Operator Functions](#operator-functions) - - [Array Generations](#array-generations) - - [Generate a tensor filled with the same element](#generate-a-tensor-filled-with-the-same-element) - - [Generate tensors in a specified range](#generate-tensors-in-a-specified-range) - - [Generate tensors with specific requirement](#generate-tensors-with-specific-requirement) - - [Array Operations](#array-operations) - - [Manipulate the shape of the tensor](#manipulate-the-shape-of-the-tensor) - - [Tensor splitting](#tensor-splitting) - - [Tensor combination](#tensor-combination) - - [Logic Operations](#logic-operations) - - [Math Operations](#math-operations) - - [Sum two tensors](#sum-two-tensors) - - [Matrics multiplication](#matrics-multiplication) - - [Take the average along a given axis](#take-the-average-along-a-given-axis) - - [Exponential arithmetic](#exponential-arithmetic) - - [Interact With MindSpore Functions](#interact-with-mindspore-functions) - - [Use ms_function to run code in static graph mode](#use-ms_function-to-run-code-in-static-graph-mode) - - [Use GradOperation to compute deratives](#use-gradoperation-to-compute-deratives) - - [Use mindspore.context to control execution mode](#use-mindsporecontext-to-control-execution-mode) - - [Use mindspore.numpy in MindSpore Deep Learning Models](#use-mindsporenumpy-in-mindspore-deep-learning-models) - - - - - -## Overview - -MindSpore Numpy package contains a set of Numpy-like interfaces, which allows developers to build models on MindSpore with similar syntax of Numpy. - -## Operator Functions - -Mindspore Numpy operators can be classified into four functional modules: `array generation`, `array operation`, `logic operation` and `math operation`. For details about the supported operators on the Ascend AI processors, GPU, and CPU, see [Numpy Interface List](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.numpy.html). - -### Array Generations - -Array generation operators are used to generate tensors. - -Here is an example to generate an array: - -```python -import mindspore.numpy as np -import mindspore.ops as ops -input_x = np.array([1, 2, 3], np.float32) -print("input_x =", input_x) -print("type of input_x =", ops.typeof(input_x)) -``` - -The output is: - -```python -input_x = [1. 2. 3.] -type of input_x = Tensor[Float32] -``` - -Here we have more examples: - -#### Generate a tensor filled with the same element - -`np.full` can be used to generate a tensor with user-specified values: - -```python -import mindspore.numpy as np -input_x = np.full((2, 3), 6, np.float32) -print(input_x) -``` - -The output is: - -```python -[[6. 6. 6.] - [6. 6. 6.]] -``` - -Here is another example to generate an array with the specified shape and filled with the value of 1: - -```python -import mindspore.numpy as np -input_x = np.ones((2, 3), np.float32) -print(input_x) -``` - -The output is: - -```python -[[1. 1. 1.] - [1. 1. 1.]] -``` - -#### Generate tensors in a specified range - -Generate an arithmetic array within the specified range: - -```python -import mindspore.numpy as np -input_x = np.arange(0, 5, 1) -print(input_x) -``` - -The output is: - -```python -[0 1 2 3 4] -``` - -#### Generate tensors with specific requirement - -Generate a matrix where the lower elements are 1 and the upper elements are 0 on the given diagonal: - -```python -import mindspore.numpy as np -input_x = np.tri(3, 3, 1) -print(input_x) -``` - -The output is: - -```python -[[1. 1. 0.] - [1. 1. 1.] - [1. 1. 1.]] -``` - -Another example, generate a 2-D matrix with a diagonal of 1 and other elements of 0: - -```python -import mindspore.numpy as np -input_x = np.eye(2, 2) -print(input_x) -``` - -The output is: - -```python -[[1. 0.] - [0. 1.]] -``` - -### Array Operations - -Array operations focus on tensor manipulation. - -#### Manipulate the shape of the tensor - -For example, transpose a matrix: - -```python -import mindspore.numpy as np -input_x = np.arange(10).reshape(5, 2) -output = np.transpose(input_x) -print(output) -``` - -The output is: - -```python -[[0 2 4 6 8] - [1 3 5 7 9]] -``` - -Another example, swap two axes: - -```python -import mindspore.numpy as np -input_x = np.ones((1, 2, 3)) -output = np.swapaxes(input_x, 0, 1) -print(output.shape) -``` - -The output is: - -```python -(2, 1, 3) -``` - -#### Tensor splitting - -Divide the input tensor into multiple tensors equally, for example: - -```python -import mindspore.numpy as np -input_x = np.arange(9) -output = np.split(input_x, 3) -print(output) -``` - -The output is: - -```python -(Tensor(shape=[3], dtype=Int32, value= [0, 1, 2]), - Tensor(shape=[3], dtype=Int32, value= [3, 4, 5]), - Tensor(shape=[3], dtype=Int32, value= [6, 7, 8])) -``` - -#### Tensor combination - -Concatenate the two tensors according to the specified axis, for example: - -```python -import mindspore.numpy as np -input_x = np.arange(0, 5) -input_y = np.arange(10, 15) -output = np.concatenate((input_x, input_y), axis=0) -print(output) -``` - -The output is: - -```python -[ 0 1 2 3 4 10 11 12 13 14] -``` - -### Logic Operations - -Logic operations define computations related with boolean types. -Examples of `equal` and `less` operations are as follows: - -```python -import mindspore.numpy as np -input_x = np.arange(0, 5) -input_y = np.arange(0, 10, 2) -output = np.equal(input_x, input_y) -print("output of equal:", output) -output = np.less(input_x, input_y) -print("output of less:", output) -``` - -The output is: - -```python -output of equal: [ True False False False False] -output of less: [False True True True True] -``` - -### Math Operations - -Math operations include basic and advanced math operations on tensors, and they have full support on Numpy broadcasting rules. Here are some examples: - -#### Sum two tensors - -The following code implements the operation of adding two tensors of `input_x` and `input_y`: - -```python -import mindspore.numpy as np -input_x = np.full((3, 2), [1, 2]) -input_y = np.full((3, 2), [3, 4]) -output = np.add(input_x, input_y) -print(output) -``` - -The output is: - -```python -[[4 6] - [4 6] - [4 6]] -``` - -#### Matrics multiplication - -The following code implements the operation of multiplying two matrices `input_x` and `input_y`: - -```python -import mindspore.numpy as np -input_x = np.arange(2*3).reshape(2, 3).astype('float32') -input_y = np.arange(3*4).reshape(3, 4).astype('float32') -output = np.matmul(input_x, input_y) -print(output) -``` - -The output is: - -```python -[[20. 23. 26. 29.] - [56. 68. 80. 92.]] -``` - -#### Take the average along a given axis - -The following code implements the operation of averaging all the elements of `input_x`: - -```python -import mindspore.numpy as np -input_x = np.arange(6).astype('float32') -output = np.mean(input_x) -print(output) -``` - -The output is: - -```python -2.5 -``` - -#### Exponential arithmetic - -The following code implements the operation of the natural constant `e` to the power of `input_x`: - -```python -import mindspore.numpy as np -input_x = np.arange(5).astype('float32') -output = np.exp(input_x) -print(output) -``` - -The output is: - -```python -[ 1. 2.718282 7.3890557 20.085537 54.598145 ] -``` - -## Interact With MindSpore Functions - -Since `mindspore.numpy` directly wraps MindSpore tensors and operators, it has all the advantages and properties of MindSpore. In this section, we will briefly introduce how to employ MindSpore execution management and automatic differentiation in `mindspore.numpy` coding scenarios. These include: - -- `ms_function`: for running codes in static graph mode for better efficiency. -- `GradOperation`: for automatic gradient computation. -- `mindspore.context`: for `mindspore.numpy` execution management. -- `mindspore.nn.Cell`: for using `mindspore.numpy` interfaces in MindSpore Deep Learning Models. - -### Use ms_function to run code in static graph mode - -Let's first see an example consisted of matrix multiplication and bias add, which is a typical process in Neural Networks: - -```python -import mindspore.numpy as np - -x = np.arange(8).reshape(2, 4).astype('float32') -w1 = np.ones((4, 8)) -b1 = np.zeros((8,)) -w2 = np.ones((8, 16)) -b2 = np.zeros((16,)) -w3 = np.ones((16, 4)) -b3 = np.zeros((4,)) - -def forward(x, w1, b1, w2, b2, w3, b3): - x = np.dot(x, w1) + b1 - x = np.dot(x, w2) + b2 - x = np.dot(x, w3) + b3 - return x - -print(forward(x, w1, b1, w2, b2, w3, b3)) -``` - -The output is: - -```python -[[ 768. 768. 768. 768.] - [2816. 2816. 2816. 2816.]] -``` - -In this function, MindSpore dispatches each computing kernel to device separately. However, with the help of `ms_function`, we can compile all operations into a single static computing graph. - -```python -from mindspore import ms_function - -forward_compiled = ms_function(forward) -``` - -> Currently, static graph cannot run in command line mode and not all python types can be passed into functions decorated with `ms_function`. For details about the static graph syntax support, see [Syntax Support](https://www.mindspore.cn/doc/note/en/master/static_graph_syntax_support.html). For details about how to use `ms_function`, see [API: ms_function](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.html#mindspore.ms_function). - -### Use GradOperation to compute deratives - -`GradOperation` can be used to take deratives from normal functions and functions decorated with `ms_function`. Take the previous example: - -```python -from mindspore import ops - -grad_all = ops.composite.GradOperation(get_all=True) -grad_all(forward)(x, w1, b1, w2, b2, w3, b3) -``` - -To take the gradient of `ms_function` compiled functions, first we need to set the execution mode to static graph mode. - -```python -from mindspore import ops, ms_function, context - -context.set_context(mode=context.GRAPH_MODE) - -grad_all = ops.composite.GradOperation(get_all=True) -grad_all(ms_function(forward))(x, w1, b1, w2, b2, w3, b3) -``` - - For more details, see [API: GradOperation](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.GradOperation.html). - -### Use mindspore.context to control execution mode - -Most functions in `mindspore.numpy` can run in Graph Mode and PyNative Mode, and can run on `CPU`,`GPU` and `Ascend`. Like MindSpore, users can manage the execution mode using `mindspore.context`: - -```python -import mindspore.numpy as np -from mindspore import context - -# Execution in static graph mode -context.set_context(mode=context.GRAPH_MODE) - -# Execution in dynamic graph mode -context.set_context(mode=context.PYNATIVE_MODE) - -# Execution on CPU backend -context.set_context(device_target="CPU") - -# Execution on GPU backend -context.set_context(device_target="GPU") - -# Execution on Ascend backend -context.set_context(device_target="Ascend") -... -``` - - For more details, see [API: mindspore.context](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.context.html). - -### Use mindspore.numpy in MindSpore Deep Learning Models - -`mindspore.numpy` interfaces can be used inside `nn.cell` blocks as well. For example, the above code can be modified to: - -```python -import mindspore.numpy as np -from mindspore import context -from mindspore.nn import Cell - -context.set_context(mode=context.GRAPH_MODE) - -x = np.arange(8).reshape(2, 4).astype('float32') -w1 = np.ones((4, 8)) -b1 = np.zeros((8,)) -w2 = np.ones((8, 16)) -b2 = np.zeros((16,)) -w3 = np.ones((16, 4)) -b3 = np.zeros((4,)) - -class NeuralNetwork(Cell): - def __init__(self): - super(NeuralNetwork, self).__init__() - def construct(self, x, w1, b1, w2, b2, w3, b3): - x = np.dot(x, w1) + b1 - x = np.dot(x, w2) + b2 - x = np.dot(x, w3) + b3 - return x - -net = NeuralNetwork() - -print(net(x, w1, b1, w2, b2, w3, b3)) -``` - -The output is: - -```python -[[ 768. 768. 768. 768.] - [2816. 2816. 2816. 2816.]] -``` - -For more details on building Neural Network with MindSpore, see [MindSpore Training Guide](https://www.mindspore.cn/tutorial/training/en/master/index.html). diff --git a/docs/programming_guide/source_en/operator_list.rst b/docs/programming_guide/source_en/operator_list.rst deleted file mode 100644 index 32c5db52166ff05588edd965e94d4969243dcd6d..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/operator_list.rst +++ /dev/null @@ -1,9 +0,0 @@ -Operator List -============= - -.. toctree:: - :maxdepth: 1 - - MindSpore Operator List - MindSpore Implicit Type Conversion - MindSpore Distributed Operator List \ No newline at end of file diff --git a/docs/programming_guide/source_en/operators.md b/docs/programming_guide/source_en/operators.md deleted file mode 100644 index c6fab803c2690840148bf316ca31168313f080b9..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/operators.md +++ /dev/null @@ -1,803 +0,0 @@ -# Operators - - - -- [Operators](#operators) - - [Overview](#overview) - - [Operator Usage](#operator-usage) - - [mindspore.ops.operations](#mindsporeopsoperations) - - [mindspore.ops.functional](#mindsporeopsfunctional) - - [mindspore.ops.composite](#mindsporeopscomposite) - - [Combination usage of operations/functional/composite three types of operators](#combination-usage-of-operationsfunctionalcomposite-three-types-of-operators) - - [Operator Functions](#operator-functions) - - [Tensor Operations](#tensor-operations) - - [Scalar Operations](#scalar-operations) - - [Addition](#addition) - - [Element-wise Multiplication](#element-wise-multiplication) - - [Trigonometric Function](#trigonometric-function) - - [Vector Operations](#vector-operations) - - [Squeeze](#squeeze) - - [Matrix Operations](#matrix-operations) - - [Matrix Multiplication](#matrix-multiplication) - - [Broadcast Mechanism](#broadcast-mechanism) - - [Network Operations](#network-operations) - - [Feature Extraction](#feature-extraction) - - [Activation Function](#activation-function) - - [Loss Function](#loss-function) - - [Optimization Algorithm](#optimization-algorithm) - - [Array Operations](#array-operations) - - [DType](#dtype) - - [Cast](#cast) - - [Shape](#shape) - - [Image Operations](#image-operations) - - [Encoding Operations](#encoding-operations) - - [BoundingBoxEncode](#boundingboxencode) - - [BoundingBoxDecode](#boundingboxdecode) - - [IOU Computing](#iou-computing) - - [Debugging Operations](#debugging-operations) - - [Debug](#debug) - - [HookBackward](#hookbackward) - - - - - -## Overview - -Operators of MindSpore can be classified based on the operator usage and operator functions. The following example code runs in PyNative mode. - -## Operator Usage - -APIs related to operators include operations, functional, and composite. Operators related to these three APIs can be directly obtained using ops. - -- The operations API provides a single primitive operator. An operator corresponds to a primitive and is the smallest execution object. An operator can be used only after being instantiated. -- The composite API provides some predefined composite operators and complex operators involving graph transformation, such as `GradOperation`. -- The functional API provides objects instantiated by the operations and composite to simplify the operator calling process. - -### mindspore.ops.operations - -The operations API provides all primitive operator APIs, which are the lowest-order operator APIs open to users. For details about the supported operators, see [Operator List](https://www.mindspore.cn/doc/note/en/master/operator_list.html). - -Primitive operators directly encapsulate the implementation of operators at bottom layers such as Ascend, GPU, AICPU, and CPU, providing basic operator capabilities for users. - -Primitive operator APIs are the basis for building high-order APIs, automatic differentiation, and network models. - -A code example is as follows: - -```python -import numpy as np -import mindspore -from mindspore import Tensor -import mindspore.ops.operations as P - -input_x = mindspore.Tensor(np.array([1.0, 2.0, 4.0]), mindspore.float32) -input_y = 3.0 -pow = P.Pow() -output = pow(input_x, input_y) -print("output =", output) -``` - -The following information is displayed: - -```text -output = [ 1. 8. 64.] -``` - -### mindspore.ops.functional - -To simplify the calling process of operators without attributes, MindSpore provides the functional version of some operators. For details about the input parameter requirements, see the input and output requirements of the original operator. For details about the supported operators, see [Operator List](https://www.mindspore.cn/doc/note/en/master/operator_list_ms.html#mindspore-ops-functional). - -For example, the functional version of the `P.Pow` operator is `F.tensor_pow`. - -A code example is as follows: - -```python -import numpy as np -import mindspore -from mindspore import Tensor -from mindspore.ops import functional as F - -input_x = mindspore.Tensor(np.array([1.0, 2.0, 4.0]), mindspore.float32) -input_y = 3.0 -output = F.tensor_pow(input_x, input_y) -print("output =", output) -``` - -The following information is displayed: - -```text -output = [ 1. 8. 64.] -``` - -### mindspore.ops.composite - -The composite API provides some operator combinations, including some operators related to clip_by_value and random, and functions (such as `GradOperation`, `HyperMap`, and `Map`) related to graph transformation. - -The operator combination can be directly used as a common function. For example, use `normal` to generate a random distribution: - -```python -from mindspore import dtype as mstype -from mindspore.ops import composite as C -from mindspore import Tensor - -mean = Tensor(1.0, mstype.float32) -stddev = Tensor(1.0, mstype.float32) -output = C.normal((2, 3), mean, stddev, seed=5) -print("output =", output) -``` - -The following information is displayed: - -```text -output = [[2.4911082 0.7941146 1.3117087] - [0.30582333 1.772938 1.525996]] -``` - -> The preceding code runs on the GPU version of MindSpore. - -For functions involving graph transformation, users can use `MultitypeFuncGraph` to define a group of overloaded functions. The implementation varies according to the function type. - -A code example is as follows: - -```python -import numpy as np -from mindspore.ops.composite import MultitypeFuncGraph -from mindspore import Tensor -import mindspore.ops as ops - -add = MultitypeFuncGraph('add') -@add.register("Number", "Number") -def add_scalar(x, y): - return ops.scalar_add(x, y) - -@add.register("Tensor", "Tensor") -def add_tensor(x, y): - return ops.add(x, y) - -tensor1 = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]]).astype('float32')) -tensor2 = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]]).astype('float32')) -print('tensor', add(tensor1, tensor2)) -print('scalar', add(1, 2)) -``` - -The following information is displayed: - -```text -tensor [[2.4 4.2] - [4.4 6.4]] -scalar 3 -``` - -In addition, the high-order function `GradOperation` provides the method of computing the gradient function corresponding to the input function. For details, see [mindspore.ops](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.GradOperation.html). - -### Combination usage of operations/functional/composite three types of operators - -In order to make it easier to use, in addition to the several usages introduced above, we have encapsulated the three operators of operations/functional/composite into mindspore.ops. It is recommended to directly call the interface in mindspore.ops. - -The code sample is as follows: - -```python -import mindspore.ops.operations as P -pow = P.Pow() -``` - -```python -import mindspore.ops as ops -pow = ops.Pow() -``` - -> The above two methods have the same effect. - -## Operator Functions - -Operators can be classified into seven functional modules: tensor operations, network operations, array operations, image operations, encoding operations, debugging operations, and quantization operations. For details about the supported operators on the Ascend AI processors, GPU, and CPU, see [Operator List](https://www.mindspore.cn/doc/note/en/master/operator_list.html). - -### Tensor Operations - -The tensor operations include the tensor structure operation and the tensor mathematical operation. - -Tensor structure operations include tensor creation, index sharding, dimension transformation, and integration and splitting. - -Tensor mathematical operations include scalar operations, vector operations, and matrix operations. - -The following describes how to use the tensor mathematical operation and operation broadcast mechanism. - -### Scalar Operations - -Tensor mathematical operators can be classified into scalar operator, vector operator, and matrix operator. - -Scalar operators include addition, subtraction, multiplication, division, exponentiation, common functions such as trigonometric function, exponential function, and logarithmic function, and logical comparison operators. - -Scalar operators are characterized by performing element-by-element operations on tensors. - -Some scalar operators overload commonly used mathematical operators. In addition, the broadcast feature similar to NumPy is supported. - - The following code implements the exponentiation, where the base is input_x and the exponent is input_y: - -```python -import numpy as np -import mindspore -from mindspore import Tensor - -input_x = mindspore.Tensor(np.array([1.0, 2.0, 4.0]), mindspore.float32) -input_y = 3.0 -print(input_x**input_y) -``` - - The following information is displayed: - -```text -[ 1. 8. 64.] -``` - -#### Addition - -The following code implements the addition of `input_x` and `input_y`: - -```python -print(input_x + input_y) -``` - - The following information is displayed: - -```text -[4. 5. 7.] -``` - -#### Element-wise Multiplication - -The following code implements the element-wise multiplication: - -```python -import numpy as np -import mindspore -from mindspore import Tensor -import mindspore.ops as ops - -input_x = Tensor(np.array([1.0, 2.0, 3.0]), mindspore.float32) -input_y = Tensor(np.array([4.0, 5.0, 6.0]), mindspore.float32) -mul = ops.Mul() -res = mul(input_x, input_y) - -print(res) -``` - - The following information is displayed: - -```text -[4. 10. 18.] -``` - -#### Trigonometric Function - -The following code implements Acos: - -```python -import numpy as np -import mindspore -from mindspore import Tensor -import mindspore.ops as ops - -acos = ops.ACos() -input_x = Tensor(np.array([0.74, 0.04, 0.30, 0.56]), mindspore.float32) -output = acos(input_x) -print(output) -``` - - The following information is displayed: - -```text -[0.7377037 1.5307858 1.2661037 0.97641146] -``` - -### Vector Operations - -Vector operators perform operations on only one particular axis, mapping a vector to a scalar or another vector. - -#### Squeeze - -The following code implements the compression of a channel whose dimension of the third channel is 1: - -```python -import numpy as np -import mindspore -from mindspore import Tensor -import mindspore.ops as ops - -input_tensor = Tensor(np.ones(shape=[3, 2, 1]), mindspore.float32) -squeeze = ops.Squeeze(2) -output = squeeze(input_tensor) - -print(output) -``` - - The following information is displayed: - -```text -[[1. 1.] - [1. 1.] - [1. 1.]] -``` - -### Matrix Operations - -Matrix operations include matrix multiplication, matrix norm, matrix determinant, matrix eigenvalue calculation, and matrix decomposition. - -#### Matrix Multiplication - - The following code implements the matrix multiplication of input_x and input_y: - -```python -import numpy as np -import mindspore -from mindspore import Tensor -import mindspore.ops as ops - -input_x = Tensor(np.ones(shape=[1, 3]), mindspore.float32) -input_y = Tensor(np.ones(shape=[3, 4]), mindspore.float32) -matmul = ops.MatMul() -output = matmul(input_x, input_y) - -print(output) -``` - -The following information is displayed: - -```text -[[3. 3. 3. 3.]] -``` - -#### Broadcast Mechanism - -Broadcast indicates that when the number of channels of each input variable is inconsistent, change the number of channels to obtain the result. - -- The following code implements the broadcast mechanism: - -```python -from mindspore import Tensor -import mindspore.ops as ops -import numpy as np - -shape = (2, 3) -input_x = Tensor(np.array([1, 2, 3]).astype(np.float32)) -broadcast_to = ops.BroadcastTo(shape) -output = broadcast_to(input_x) - -print(output) -``` - -The following information is displayed: - -```text -[[1. 2. 3.] - [1. 2. 3.]] -``` - -### Network Operations - -Network operations include feature extraction, activation function, loss function, and optimization algorithm. - -#### Feature Extraction - -Feature extraction is a common operation in machine learning. The core of feature extraction is to extract more representative tensors than the original input. - -Convolution Operation - -The following code implements the 2D convolution operation which is one of the common convolution operations: - -```python -from mindspore import Tensor -import mindspore.ops as ops -import numpy as np -import mindspore - -input = Tensor(np.ones([10, 32, 32, 32]), mindspore.float32) -weight = Tensor(np.ones([32, 32, 3, 3]), mindspore.float32) -conv2d = ops.Conv2D(out_channel=32, kernel_size=3) -res = conv2d(input, weight) - -print(res) -``` - -The following information is displayed: - -```text -[[[[288. 288. 288. ... 288. 288. 288.] - [288. 288. 288. ... 288. 288. 288.] - [288. 288. 288. ... 288. 288. 288.] - ... - [288. 288. 288. ... 288. 288. 288.] - [288. 288. 288. ... 288. 288. 288.] - [288. 288. 288. ... 288. 288. 288.]]] - - ... - - [[288. 288. 288. ... 288. 288. 288.] - [288. 288. 288. ... 288. 288. 288.] - [288. 288. 288. ... 288. 288. 288.] - ... - [288. 288. 288. ... 288. 288. 288.] - [288. 288. 288. ... 288. 288. 288.] - [288. 288. 288. ... 288. 288. 288.]] - - - ... - - - [[288. 288. 288. ... 288. 288. 288.] - [288. 288. 288. ... 288. 288. 288.] - [288. 288. 288. ... 288. 288. 288.] - ... - [288. 288. 288. ... 288. 288. 288.] - [288. 288. 288. ... 288. 288. 288.] - [288. 288. 288. ... 288. 288. 288.]]]] -``` - -Convolutional Backward Propagation Operator Operation - -The following code implements the propagation operation of backward gradient operators. The outputs are stored in dout and weight: - -```python -from mindspore import Tensor -import mindspore.ops as ops -import numpy as np -import mindspore - -dout = Tensor(np.ones([10, 32, 30, 30]), mindspore.float32) -weight = Tensor(np.ones([32, 32, 3, 3]), mindspore.float32) -x = Tensor(np.ones([10, 32, 32, 32])) -conv2d_backprop_input = ops.Conv2DBackpropInput(out_channel=32, kernel_size=3) -res = conv2d_backprop_input(dout, weight, ops.shape(x)) - -print(res) -``` - -The following information is displayed: - -```text -[[[[ 32. 64. 96. ... 96. 64. 32.] - [ 64. 128. 192. ... 192. 128. 64.] - [ 96. 192. 288. ... 288. 192. 96.] - ... - [ 96. 192. 288. ... 288. 192. 96.] - [ 64. 128. 192. ... 192. 128. 64.] - [ 32. 64. 96. ... 96. 64. 32.]] - - ... - - [[ 32. 64. 96. ... 96. 64. 32.] - [ 64. 128. 192. ... 192. 128. 64.] - [ 96. 192. 288. ... 288. 192. 96.] - ... - [ 96. 192. 288. ... 288. 192. 96.] - [ 64. 128. 192. ... 192. 128. 64.] - [ 32. 64. 96. ... 96. 64. 32.]]]] -``` - -#### Activation Function - -The following code implements the computation of the Softmax activation function: - -```python -from mindspore import Tensor -import mindspore.ops as ops -import numpy as np -import mindspore - -input_x = Tensor(np.array([1, 2, 3, 4, 5]), mindspore.float32) -softmax = ops.Softmax() -res = softmax(input_x) - -print(res) -``` - -The following information is displayed: - -```text -[0.01165623 0.03168492 0.08612853 0.23412164 0.63640857] -``` - -#### Loss Function - - L1Loss - - The following code implements the L1 loss function: - -```python -from mindspore import Tensor -import mindspore.ops as ops -import numpy as np -import mindspore - -loss = ops.SmoothL1Loss() -input_data = Tensor(np.array([1, 2, 3]), mindspore.float32) -target_data = Tensor(np.array([1, 2, 2]), mindspore.float32) -res = loss(input_data, target_data) -print(res) -``` - - The following information is displayed: - -```text -[0. 0. 0.5] -``` - -#### Optimization Algorithm - - The following code implements the stochastic gradient descent (SGD) algorithm. The output is stored in result. - -```python -from mindspore import Tensor -import mindspore.ops as ops -import numpy as np -import mindspore - -sgd = ops.SGD() -parameters = Tensor(np.array([2, -0.5, 1.7, 4]), mindspore.float32) -gradient = Tensor(np.array([1, -1, 0.5, 2]), mindspore.float32) -learning_rate = Tensor(0.01, mindspore.float32) -accum = Tensor(np.array([0.1, 0.3, -0.2, -0.1]), mindspore.float32) -momentum = Tensor(0.1, mindspore.float32) -stat = Tensor(np.array([1.5, -0.3, 0.2, -0.7]), mindspore.float32) -result = sgd(parameters, gradient, learning_rate, accum, momentum, stat) - -print(result) -``` - - The following information is displayed: - -```text -(Tensor(shape=[4], dtype=Float32, value= [ 1.99000001e+00, -4.90300000e-01, 1.69500005e+00, 3.98009992e+00]),) -``` - -### Array Operations - -Array operations refer to operations on arrays. - -#### DType - -Returns a Tensor variable that has the same data type as the input and adapts to MindSpore. It is usually used in a MindSpore project. - -The following is a code example: - -```python -from mindspore import Tensor -import mindspore.ops as ops -import numpy as np -import mindspore - -input_tensor = Tensor(np.array([[2, 2], [2, 2]]), mindspore.float32) -typea = ops.DType()(input_tensor) - -print(typea) -``` - - The following information is displayed: - -```text -Float32 -``` - -#### Cast - -Converts the input data type and outputs variables of the same type as the target data type. - -The following is a code example: - -```python -from mindspore import Tensor -import mindspore.ops as ops -import numpy as np -import mindspore - -input_np = np.random.randn(2, 3, 4, 5).astype(np.float32) -input_x = Tensor(input_np) -type_dst = mindspore.float16 -cast = ops.Cast() -result = cast(input_x, type_dst) -print(result.dtype) -``` - - The following information is displayed: - -```text -Float16 -``` - -#### Shape - -Returns the shape of the input data. - - The following code implements the operation of returning the input data input_tensor: - -```python -from mindspore import Tensor -import mindspore.ops as ops -import numpy as np -import mindspore - -input_tensor = Tensor(np.ones(shape=[3, 2, 1]), mindspore.float32) -shape = ops.Shape() -output = shape(input_tensor) -print(output) -``` - - The following information is displayed: - -```text -(3, 2, 1) -``` - -### Image Operations - -The image operations include image preprocessing operations, for example, image cropping (for obtaining a large quantity of training samples) and resizing (for constructing an image pyramid). - - The following code implements the cropping and resizing operations: - -```python -from mindspore import Tensor -import mindspore.ops as ops -import numpy as np - -BATCH_SIZE = 1 -NUM_BOXES = 5 -IMAGE_HEIGHT = 256 -IMAGE_WIDTH = 256 -CHANNELS = 3 -image = np.random.normal(size=[BATCH_SIZE, IMAGE_HEIGHT, IMAGE_WIDTH, CHANNELS]).astype(np.float32) -boxes = np.random.uniform(size=[NUM_BOXES, 4]).astype(np.float32) -box_index = np.random.uniform(size=[NUM_BOXES], low=0, high=BATCH_SIZE).astype(np.int32) -crop_size = (24, 24) -crop_and_resize = ops.CropAndResize() -output = crop_and_resize(Tensor(image), Tensor(boxes), Tensor(box_index), crop_size) -print(output.asnumpy()) -``` - -The following information is displayed: - -```text -[[[[ 6.51672244e-01 -1.85958534e-01 5.19907832e-01] -[ 1.53466597e-01 4.10562098e-01 6.26138210e-01] -[ 6.62892580e-01 3.81776541e-01 4.69261825e-01] -... -[-5.83377600e-01 -3.53377648e-02 -6.01786733e-01] -[ 1.36125124e+00 5.84172308e-02 -6.41442612e-02] -[-9.11651254e-01 -1.19495761e+00 1.96810793e-02]] - -[[ 6.06956100e-03 -3.73778701e-01 1.88935513e-03] -[-1.06859171e+00 2.00272346e+00 1.37180305e+00] -[ 1.69524819e-01 2.90421434e-02 -4.12243098e-01] -... - -[[-2.04489112e-01 2.36615837e-01 1.33802962e+00] -[ 1.08329034e+00 -9.00492966e-01 -8.21497202e-01] -[ 7.54147097e-02 -3.72897685e-01 -2.91040149e-02] -... -[ 1.12317121e+00 8.98950577e-01 4.22795087e-01] -[ 5.13781667e-01 5.12095273e-01 -3.68211865e-01] -[-7.04941899e-02 -1.09924078e+00 6.89047515e-01]]]] -``` - -> The preceding code runs on MindSpore of the Ascend version. - -### Encoding Operations - -The encoding operations include BoundingBox Encoding, BoundingBox Decoding, and IOU computing. - -#### BoundingBoxEncode - -The box of the area where the object is located is encoded to obtain more concise information similar to PCA, facilitating subsequent tasks such as feature extraction, object detection, and image restoration. - -The following code implements BoundingBox Encoding for anchor_box and groundtruth_box: - -```python -from mindspore import Tensor -import mindspore.ops as ops -import mindspore - -anchor_box = Tensor([[2,2,2,3],[2,2,2,3]],mindspore.float32) -groundtruth_box = Tensor([[1,2,1,4],[1,2,1,4]],mindspore.float32) -boundingbox_encode = ops.BoundingBoxEncode(means=(0.0, 0.0, 0.0, 0.0), stds=(1.0, 1.0, 1.0, 1.0)) -res = boundingbox_encode(anchor_box, groundtruth_box) -print(res) -``` - - The following information is displayed: - -```text -[[-1. 0.25 0. 0.40546513] - [-1. 0.25 0. 0.40546513]] -``` - -#### BoundingBoxDecode - -After decoding the area location information, the encoder uses this operator to decode the information. - - Code implementation: - -```python -from mindspore import Tensor -import mindspore.ops as ops -import mindspore - -anchor_box = Tensor([[4,1,2,1],[2,2,2,3]],mindspore.float32) -deltas = Tensor([[3,1,2,2],[1,2,1,4]],mindspore.float32) -boundingbox_decode = ops.BoundingBoxDecode(means=(0.0, 0.0, 0.0, 0.0), stds=(1.0, 1.0, 1.0, 1.0), max_shape=(768, 1280), wh_ratio_clip=0.016) -res = boundingbox_decode(anchor_box, deltas) -print(res) -``` - - The following information is displayed: - -```text -[[ 4.194528 0. 0. 5.194528 ] - [ 2.1408591 0. 3.8591409 60.59815 ]] -``` - -#### IOU Computing - -Computes the proportion of the intersection area and union area of the box where the predicted object is located and the box where the real object is located. It is often used as a loss function to optimize the model. - -The following code implements the IOU computing between `anchor_boxes` and `gt_boxes`. The output is stored in out: - -```python -from mindspore import Tensor -import mindspore.ops as ops -import numpy as np -import mindspore - -iou = ops.IOU() -anchor_boxes = Tensor(np.random.randint(1.0, 5.0, [3, 4]), mindspore.float16) -gt_boxes = Tensor(np.random.randint(1.0, 5.0, [3, 4]), mindspore.float16) -out = iou(anchor_boxes, gt_boxes) -print(out) -``` - - The following information is displayed: - -```text -[[ 0. -0. 0.] - [ 0. -0. 0.] - [ 0. 0. 0.]] -``` - -### Debugging Operations - -The debugging operations refer to some common operators and operations used to debug a network, for example, HookBackward. These operations are very convenient and important for entry-level deep learning, greatly improving learning experience. - -#### HookBackward - -Displays the gradient of intermediate variables. It is a common operator. Currently, only the PyNative mode is supported. - -The following code implements the function of printing the gradient of the intermediate variable (x,y in this example): - -```python -from mindspore import Tensor -import mindspore.ops as ops -import numpy as np -from mindspore import dtype as mstype - -def hook_fn(grad_out): - print(grad_out) - -grad_all = ops.GradOperation(get_all=True) -hook = ops.HookBackward(hook_fn) - -def hook_test(x, y): - z = x * y - z = hook(z) - z = z * y - return z - -def backward(x, y): - return grad_all(hook_test)(Tensor(x, mstype.float32), Tensor(y, mstype.float32)) - -print(backward(1, 2)) -``` - -The following information is displayed: - -```text -(Tensor(shape=[], dtype=Float32, value= 2),) -(Tensor(shape=[], dtype=Float32, value= 4), Tensor(shape=[], dtype=Float32, value= 4)) -``` diff --git a/docs/programming_guide/source_en/optim.md b/docs/programming_guide/source_en/optim.md deleted file mode 100644 index 24873839428cc9ecb8d6bd1bc8a470cd56f982d3..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/optim.md +++ /dev/null @@ -1,189 +0,0 @@ -# Optimization Algorithms - - - -- [Optimization Algorithms](#optimization-algorithms) - - [Overview](#overview) - - [Learning Rates](#learning-rates) - - [dynamic_lr](#dynamic_lr) - - [learning_rate_schedule](#learning_rate_schedule) - - [Optimzers](#optimzers) - - [Usage](#usage) - - [Built-in Optimizers](#built-in-optimizers) - - - - - -## Overview - -`mindspore.nn.optim` is a module in the MindSpore framework for implementing various optimization algorithms, including common optimizers and learning rates. In addition, the universal APIs can integrate updated and complex methods into the module. - -`mindspore.nn.optim` provides common optimizers for models, such as `SGD`, `ADAM`, and `Momentum`. The optimizer is used to compute and update the gradient. The selection of the model optimization algorithm directly affects the performance of the final model. If the effect is poor, the problem may be caused by the optimization algorithm instead of the feature or model design. In addition, `mindspore.nn` provides the learning rate module. Learning rates are classified into `dynamic_lr` and `learning_rate_schedule`, which are both dynamic learning rates. However, the implementation methods are different. The learning rate is the most important parameter in supervised learning and deep learning. It determines whether the objective function can converge to a local minimum and when it can converge to a minimum. An appropriate learning rate can make the objective function converge to a local minimum in an appropriate time. - -> All the following examples support the CPU, GPU, and Ascend environments. - -## Learning Rates - -### dynamic_lr - -The `mindspore.nn.dynamic_lr` module contains the following classes: - -- `piecewise_constant_lr` class: computes the learning rate based on the unchanged segment. -- `exponential_decay_lr` class: computes the learning rate based on the exponential decay function. -- `natural_exp_decay_lr` class: computes the learning rate based on the natural exponential decay function. -- `inverse_decay_lr` class: computes the learning rate based on the inverse time attenuation function. -- `cosine_decay_lr` class: computes the learning rate based on the cosine attenuation function. -- `polynomial_decay_lr` class: computes the learning rate based on the polynomial attenuation function. -- `warmup_lr` class: improves the learning rate. - -They are different implementations of `dynamic_lr`. - -For example, the code example of the `piecewise_constant_lr` class is as follows: - -```python -from mindspore.nn.dynamic_lr import piecewise_constant_lr - -def test_dynamic_lr(): - milestone = [2, 5, 10] - learning_rates = [0.1, 0.05, 0.01] - lr = piecewise_constant_lr(milestone, learning_rates) - print(lr) - - -if __name__ == '__main__': - test_dynamic_lr() -``` - -The following information is displayed: - -```text -[0.1, 0.1, 0.05, 0.05, 0.05, 0.01, 0.01, 0.01, 0.01, 0.01] -``` - -### learning_rate_schedule - -The `mindspore.nn.learning_rate_schedule` module has the following classes: `ExponentialDecayLR`, `NaturalExpDecayLR`, `InverseDecayLR`, and `CosineDecayLR`. `PolynomialDecayLR` class and `WarmUpLR` class. They belong to `learning_rate_schedule` but are implemented in different ways. Their meanings are as follows: - -- `ExponentialDecayLR` class: computes the learning rate based on the exponential decay function. -- `NaturalExpDecayLR` class: computes the learning rate based on the natural exponential decay function. -- `InverseDecayLR` class: computes the learning rate based on the inverse time attenuation function. -- `CosineDecayLR` class: computes the learning rate based on the cosine attenuation function. -- `PolynomialDecayLR` class: computes the learning rate based on the polynomial attenuation function. -- `WarmUpLR` class: improves the learning rate. - -They are different implementations of `learning_rate_schedule`. - -For example, the code example of the ExponentialDecayLR class is as follows: - -```python -from mindspore import dtype as mstype -from mindspore import Tensor -from mindspore.nn import ExponentialDecayLR - -def test_learning_rate_schedule(): - learning_rate = 0.1 # learning_rate(float) - The initial value of learning rate. - decay_rate = 0.9 # decay_rate(float) - The decay rate. - decay_steps = 4 # decay_steps(int) - A value used to calculate decayed learning rate. - global_step = Tensor(2, mstype.int32) - exponential_decay_lr = ExponentialDecayLR(learning_rate, decay_rate, decay_steps) - res = exponential_decay_lr(global_step) - print(res) - - -if __name__ == '__main__': - test_learning_rate_schedule() -``` - -The following information is displayed: - -```text -0.094868325 -``` - -## Optimzers - -### Usage - -To use `mindspore.nn.optim`, you need to build an `Optimizer` object. This object can maintain the current parameter status and update parameters based on the computed gradient. - -- Building - -To build an `Optimizer`, you need to give it an iterable that contains the parameters (must be Variable objects) that need to be optimized. Then, you can set the `Optimizer` parameter options, such as the learning rate and weight attenuation. - -A code example is as follows: - -```python -from mindspore import nn - -optim = nn.SGD(group_params, learning_rate=0.1, weight_decay=0.0) -optim = nn.Adam(params=net.trainable_params()) - -optim = nn.Adam(group_params, learning_rate=0.1, weight_decay=0.0) - -``` - -- Setting options for each parameter separately - -The optimizer also allows you to set options for each parameter separately. Do not pass in the variable directly but pass in the iterable of a dictionary. Each dictionary defines a group of parameters and contains a key, which corresponds to a parameter value. Other keys should be other parameters accepted by the optimizer and will be used to optimize this group of parameters. - -You can pass options as keyword parameters, which are used as default values in groups where these options are not overridden. This is useful when you want to change the options of only one parameter group without changing the options of other parameter groups. -Take `SGD` as an example. When you want to determine the learning rate of each layer, run the following command: - -```python -from mindspore import nn - -optim = nn.SGD([{'params': conv_params, 'weight_decay': 0.01}, - {'params': no_conv_params, 'lr': 0.01}, - {'order_params': net.trainable_params()}], - learning_rate=0.1, weight_decay=0.0) - -``` - -This example indicates that when the parameter is conv_params, the weight attenuation is 0.01 and the learning rate is 0.1. When the parameter is no_conv_params, the weight attenuation is 0.0 and the learning rate is 0.01. The learning_rate=0.1 is used for all groups where the learning rate is not set. The same rule applies to weight_deca. - -### Built-in Optimizers - -Common deep learning optimization algorithms include `SGD`, `Adam`, `Ftrl`, `lazyadam`, `Momentum`, `RMSprop`, `Lars`, `Proximal_ada_grad`, and `lamb`. -In the `mindspore.nn.optim` module, they have corresponding class implementations. For example: - -- `SGD`: The default parameter is pure SGD. When the `momentum` parameter is set to a value other than 0, the first-order momentum is considered. After `nesterov` is set to True, the value changes to `NAG`, that is, `Nesterov Accelerated Gradient`. When the gradient is computed, the gradient of the step forward is computed. - -- `RMSprop` considers the second-order momentum. Different parameters have different learning rates, that is, adaptive learning rates. `Adagrad` is optimized. Only the second-order momentum within a certain window is considered through exponential smoothing. - -- `Adam` considers both first-order momentum and second-order momentum. It can be seen as a further consideration of the first-order momentum based on `RMSprop`. - -For example, the code example of `SGD` is as follows: - -```python -from mindspore import nn, Model, Tensor -import mindspore.ops as ops -import numpy as np -from mindspore import dtype as mstype -from mindspore import Parameter - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.matmul = ops.MatMul() - self.conv = nn.Conv2d(1, 6, 5, pad_mode='valid') - self.z = Parameter(Tensor(np.array([1.0], np.float32))) - def construct(self, x, y): - x = x * self.z - out = self.matmul(x, y) - return out - -net = Net() -optim = nn.SGD(params=net.trainable_params()) - -conv_params = list(filter(lambda x: 'conv' in x.name, net.trainable_params())) -no_conv_params = list(filter(lambda x: 'conv' not in x.name, net.trainable_params())) -group_params = [{'params': conv_params, 'weight_decay': 0.01}, - {'params': no_conv_params, 'lr': 0.01}, - {'order_params': net.trainable_params()}] -optim = nn.SGD(group_params, learning_rate=0.1, weight_decay=0.0) - -loss = nn.SoftmaxCrossEntropyWithLogits() -model = Model(net, loss_fn=loss, optimizer=optim) - -``` diff --git a/docs/programming_guide/source_en/parameter.md b/docs/programming_guide/source_en/parameter.md deleted file mode 100644 index 8bfa3167b80d072caf183a47eb62a8c075622b7c..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/parameter.md +++ /dev/null @@ -1,183 +0,0 @@ -# Parameter - - - -- [Parameter](#parameter) - - [Overview](#overview) - - [Initialization](#initialization) - - [Attributes](#attributes) - - [Methods](#methods) - - [ParameterTuple](#parametertuple) - - - - - -## Overview - -`Parameter` is a variable tensor, indicating the parameters that need to be updated during network training. The following describes the `Parameter` initialization, attributes, methods, and `ParameterTuple`. - -## Initialization - -```python -mindspore.Parameter(default_input, name=None, requires_grad=True, layerwise_parallel=False) -``` - -Initialize a `Parameter` object. The input data supports the `Tensor`, `Initializer`, `int`, and `float` types. - -The `initializer` API can be called to generate the `Initializer` object. - -When `init` is used to initialize `Tensor`, the `Tensor` only stores the shape and type of the tensor, not the actual data. Therefore, `Tensor` does not occupy any memory, you can call the `init_data` API to convert `Tensor` saved in `Parameter` to the actual data. - -You can specify a name for each `Parameter` to facilitate subsequent operations and updates. It is recommended to use the default value of `name` when initialize a parameter as one attribute of a cell, otherwise, the parameter name may be different than expected. - -To update a parameter, set `requires_grad` to `True`. - -When `layerwise_parallel` is set to True, this parameter will be filtered out during parameter broadcast and parameter gradient aggregation. - -For details about the configuration of distributed parallelism, see . - -In the following example, `Parameter` objects are built using three different data types. All the three `Parameter` objects need to be updated, and layerwise parallelism is not used. - -```python -import numpy as np -from mindspore import Tensor, Parameter -from mindspore import dtype as mstype -from mindspore.common.initializer import initializer - -x = Parameter(default_input=Tensor(np.arange(2*3).reshape((2, 3))), name='x') -y = Parameter(default_input=initializer('ones', [1, 2, 3], mstype.float32), name='y') -z = Parameter(default_input=2.0, name='z') - -print(x, "\n\n", y, "\n\n", z) -``` - -The following information is displayed: - -```text -Parameter (name=x) - -Parameter (name=y) - -Parameter (name=z) -``` - -## Attributes - -- `inited_param`: returns `Parameter` that stores the actual data. - -- `name`: specifies a name for an instantiated `Parameter`. - -- `sliced`: specifies whether the data stored in `Parameter` is sharded data in the automatic parallel scenario. - -If yes, do not shard the data. Otherwise, determine whether to shard the data based on the network parallel strategy. - -- `is_init`: initialization status of `Parameter`. At the GE backend, an `init graph` is required to synchronize data from the host to the device. This parameter specifies whether the data has been synchronized to the device. - This parameter takes effect only at the GE backend. This parameter is set to False at other backends. - -- `layerwise_parallel`: specifies whether `Parameter` supports layerwise parallelism. If yes, parameters are not broadcasted and gradient aggregation is not performed. Otherwise, parameters need to be broadcasted and gradient aggregation is performed. - -- `requires_grad`: specifies whether to compute the parameter gradient. If a parameter needs to be trained, the parameter gradient needs to be computed. Otherwise, the parameter gradient does not need to be computed. - -- `data`: `Parameter`. - -In the following example, `Parameter` is initialized through `Tensor` to obtain its attributes. - -```python -import numpy as np - -from mindspore import Tensor, Parameter - -x = Parameter(default_input=Tensor(np.arange(2*3).reshape((2, 3)))) - -print("name: ", x.name, "\n", - "sliced: ", x.sliced, "\n", - "is_init: ", x.is_init, "\n", - "inited_param: ", x.inited_param, "\n", - "requires_grad: ", x.requires_grad, "\n", - "layerwise_parallel: ", x.layerwise_parallel, "\n", - "data: ", x.data) -``` - -The following information is displayed: - -```text -name: Parameter -sliced: False -is_init: False -inited_param: None -requires_grad: True -layerwise_parallel: False - -data: Parameter (name=Parameter, shape=(2, 3), dtype=Int64, requires_grad=True) -``` - -## Methods - -- `init_data`: When the network uses the semi-automatic or automatic parallel strategy, and the data input during `Parameter` initialization is `Initializer`, this API can be called to convert the data saved by `Parameter` to `Tensor`. - -- `set_data`: sets the data saved by `Parameter`. `Tensor`, `Initializer`, `int`, and `float` can be input for setting. - When the input parameter `slice_shape` of the method is set to True, the shape of `Parameter` can be changed. Otherwise, the configured shape must be the same as the original shape of `Parameter`. - -- `set_param_ps`: controls whether training parameters are trained by using the [Parameter Server](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/apply_parameter_server_training.html). - -- `clone`: clones `Parameter`. You can specify the parameter name after cloning. - -In the following example, `Initializer` is used to initialize `Tensor`, and methods related to `Parameter` are called. - -```python -import numpy as np - -from mindspore import Tensor, Parameter -from mindspore import dtype as mstype -from mindspore.common.initializer import initializer - -x = Parameter(default_input=initializer('ones', [1, 2, 3], mstype.float32)) - -print(x) - -x_clone = x.clone() -x_clone.name = "x_clone" -print(x_clone) - -print(x.init_data()) -print(x.set_data(data=Tensor(np.arange(2*3).reshape((1, 2, 3))))) -``` - -The following information is displayed: - -```text -Parameter (name=Parameter, shape=(1, 2, 3), dtype=Float32, requires_grad=True) -Parameter (name=x_clone, shape=(1, 2, 3), dtype=Float32, requires_grad=True) -Parameter (name=Parameter, shape=(1, 2, 3), dtype=Float32, requires_grad=True) -Parameter (name=Parameter, shape=(1, 2, 3), dtype=Float32, requires_grad=True) -``` - -## ParameterTuple - -Inherited from `tuple`, `ParameterTuple` is used to store multiple `Parameter` objects. `__new__(cls, iterable)` is used to transfer an iterator for storing `Parameter` for building, and the `clone` API is provided for cloning. - -The following example builds a `ParameterTuple` object and clones it. - -```python -import numpy as np -from mindspore import Tensor, Parameter, ParameterTuple -from mindspore import dtype as mstype -from mindspore.common.initializer import initializer - -x = Parameter(default_input=Tensor(np.arange(2*3).reshape((2, 3))), name='x') -y = Parameter(default_input=initializer('ones', [1, 2, 3], mstype.float32), name='y') -z = Parameter(default_input=2.0, name='z') -params = ParameterTuple((x, y, z)) -params_copy = params.clone("params_copy") -print(params, "\n") -print(params_copy) -``` - -The following information is displayed: - -```text -(Parameter (name=x), Parameter (name=y), Parameter (name=z)) - -(Parameter (name=params_copy.x), Parameter (name=params_copy.y), Parameter (name=params_copy.z)) -``` diff --git a/docs/programming_guide/source_en/performance_optimization.md b/docs/programming_guide/source_en/performance_optimization.md deleted file mode 100644 index ad0876422ab54eaf7b8dbe74627429a92f274790..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/performance_optimization.md +++ /dev/null @@ -1,19 +0,0 @@ -# Performance Optimization - - - -- [Performance Optimization](#performance-optimization) - - - - - -MindSpore provides a variety of performance optimization methods, users can use them to improve the performance of training and inference according to the actual situation. - -| Optimization Stage | Optimization Method | Supported | -| --- | --- | --- | -| Training | [Distributed Training](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_tutorials.html) | Ascend, GPU | -| | [Mixed Precision](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/enable_mixed_precision.html) | Ascend, GPU | -| | [Graph Kernel Fusion](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/enable_graph_kernel_fusion.html) | Ascend, GPU | -| | [Gradient Accumulation](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/apply_gradient_accumulation.html) | GPU | -| Inference | [Quantization After Training](https://www.mindspore.cn/tutorial/lite/en/master/use/post_training_quantization.html) | Lite | diff --git a/docs/programming_guide/source_en/pipeline.md b/docs/programming_guide/source_en/pipeline.md deleted file mode 100644 index cc26a15ce181f6c3ef9d3c811b2e33a3db4248ec..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/pipeline.md +++ /dev/null @@ -1,294 +0,0 @@ -# Processing Data - - - -- [Processing Data](#processing-data) - - [Overview](#overview) - - [Data Processing Operators](#data-processing-operators) - - [shuffle](#shuffle) - - [map](#map) - - [batch](#batch) - - [repeat](#repeat) - - [zip](#zip) - - [concat](#concat) - - - - - -## Overview - -Data is the basis of deep learning. Good data input can play a positive role in the entire deep neural network training. Before training, data processing is performed on a loaded dataset for resolving problems such as an excessively large data volume and uneven sample distribution, thereby obtaining a more optimized data input. - -Each dataset class of MindSpore provides multiple data processing operators. You can build a data processing pipeline to define the data processing operations to be used. In this way, data can be continuously transferred to the training system through the data processing pipeline during the training process. - -The following table lists part of the common data processing operators supported by MindSpore. For more data processing operations, see [MindSpore API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.html). - -| Data Processing Operator | Description | -| ---- | ---- | -| shuffle | Randomly shuffles datasets. | -| map | Provides customized functions or operators for specified column data in a dataset. | -| batch | Divides datasets into batches to reduce the number of training steps and accelerate the training process. | -| repeat | Repeats a dataset to expand the data volume. | -| zip | Zips two datasets into one vertically. | -| concat | Concatenates two datasets into one horizontally. | - -## Data Processing Operators - -### shuffle - -Randomly shuffles datasets. - -> The larger the value of `buffer_size`, the higher the shuffle degree, but the more time and computing resources are consumed. - -![shuffle](./images/shuffle.png) - -The following example builds a random dataset, then shuffles it, and finally shows the shuffled data result. - -```python -import numpy as np -import mindspore.dataset as ds - -ds.config.set_seed(0) - -def generator_func(): - for i in range(5): - yield (np.array([i, i+1, i+2]),) - -dataset1 = ds.GeneratorDataset(generator_func, ["data"]) - -dataset1 = dataset1.shuffle(buffer_size=2) -for data in dataset1.create_dict_iterator(): - print(data) -``` - -The output is as follows: - -```text -{'data': Tensor(shape=[3], dtype=Int64, value= [0, 1, 2])} -{'data': Tensor(shape=[3], dtype=Int64, value= [2, 3, 4])} -{'data': Tensor(shape=[3], dtype=Int64, value= [3, 4, 5])} -{'data': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3])} -{'data': Tensor(shape=[3], dtype=Int64, value= [4, 5, 6])} -``` - -### map - -Applies a specified function or operator to specified columns in a dataset to implement data mapping. You can customize the mapping function or use operators in `c_transforms` or `py_transforms` to augment image and text data. - -> For details about how to use data augmentation, see [Data Augmentation](https://www.mindspore.cn/doc/programming_guide/en/master/augmentation.html) in the Programming Guide. - -![map](./images/map.png) - -The following example builds a random dataset, defines a mapping function for data augmentation, applies the function to the dataset, and compares the data results before and after the mapping. - -```python -import numpy as np -import mindspore.dataset as ds - -def generator_func(): - for i in range(5): - yield (np.array([i, i+1, i+2]),) - -def pyfunc(x): - return x*2 - -dataset = ds.GeneratorDataset(generator_func, ["data"]) - -for data in dataset.create_dict_iterator(): - print(data) - -print("------ after processing ------") - -dataset = dataset.map(operations=pyfunc, input_columns=["data"]) - -for data in dataset.create_dict_iterator(): - print(data) -``` - -The output is as follows: - -```text -{'data': Tensor(shape=[3], dtype=Int64, value= [0, 1, 2])} -{'data': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3])} -{'data': Tensor(shape=[3], dtype=Int64, value= [2, 3, 4])} -{'data': Tensor(shape=[3], dtype=Int64, value= [3, 4, 5])} -{'data': Tensor(shape=[3], dtype=Int64, value= [4, 5, 6])} ------- after processing ------ -{'data': Tensor(shape=[3], dtype=Int64, value= [0, 2, 4])} -{'data': Tensor(shape=[3], dtype=Int64, value= [2, 4, 6])} -{'data': Tensor(shape=[3], dtype=Int64, value= [4, 6, 8])} -{'data': Tensor(shape=[3], dtype=Int64, value= [ 6, 8, 10])} -{'data': Tensor(shape=[3], dtype=Int64, value= [ 8, 10, 12])} -``` - -### batch - -Divides datasets into batches in the training system to reduce the number of training steps and accelerate the training process. - -![batch](./images/batch.png) - -The following example builds a random dataset, and then displays the batching results of the datasets that retain and do not retain redundant data. The batch size is 2. - -```python -import numpy as np -import mindspore.dataset as ds - -def generator_func(): - for i in range(5): - yield (np.array([i, i+1, i+2]),) - -dataset1 = ds.GeneratorDataset(generator_func, ["data"]) - -dataset1 = dataset1.batch(batch_size=2, drop_remainder=False) -for data in dataset1.create_dict_iterator(): - print(data) - -print("------ drop remainder ------") - -dataset2 = ds.GeneratorDataset(generator_func, ["data"]) - -dataset2 = dataset2.batch(batch_size=2, drop_remainder=True) -for data in dataset2.create_dict_iterator(): - print(data) -``` - -The output is as follows: - -```text -{'data': Tensor(shape=[2, 3], dtype=Int64, value= -[[0, 1, 2], - [1, 2, 3]])} -{'data': Tensor(shape=[2, 3], dtype=Int64, value= -[[2, 3, 4], - [3, 4, 5]])} -{'data': Tensor(shape=[1, 3], dtype=Int64, value= -[[4, 5, 6]])} ------- drop remainder ------ -{'data': Tensor(shape=[2, 3], dtype=Int64, value= -[[0, 1, 2], - [1, 2, 3]])} -{'data': Tensor(shape=[2, 3], dtype=Int64, value= -[[2, 3, 4], - [3, 4, 5]])} -``` - -### repeat - -Repeats a dataset to expand the data volume. - -> The operation sequence of `repeat` and `batch` affects the number of training batches. You are advised to place `repeat` after `batch`. - -![repeat](./images/repeat.png) - -The following example builds a random dataset, repeats it twice, and finally shows the data result after repetition. - -```python -import numpy as np -import mindspore.dataset as ds - -def generator_func(): - for i in range(5): - yield (np.array([i, i+1, i+2]),) - -dataset1 = ds.GeneratorDataset(generator_func, ["data"]) - -dataset1 = dataset1.repeat(count=2) -for data in dataset1.create_dict_iterator(): - print(data) -``` - -The output is as follows: - -```text -{'data': Tensor(shape=[3], dtype=Int64, value= [0, 1, 2])} -{'data': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3])} -{'data': Tensor(shape=[3], dtype=Int64, value= [2, 3, 4])} -{'data': Tensor(shape=[3], dtype=Int64, value= [3, 4, 5])} -{'data': Tensor(shape=[3], dtype=Int64, value= [4, 5, 6])} -{'data': Tensor(shape=[3], dtype=Int64, value= [0, 1, 2])} -{'data': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3])} -{'data': Tensor(shape=[3], dtype=Int64, value= [2, 3, 4])} -{'data': Tensor(shape=[3], dtype=Int64, value= [3, 4, 5])} -{'data': Tensor(shape=[3], dtype=Int64, value= [4, 5, 6])} -``` - -### zip - -Zips two datasets into one vertically. - -> If the column names in the two datasets are the same, the two datasets will not be zipped. Therefore, ensure that the column name is unique.
If the number of rows in the two datasets is different, the number of rows after zipping is the same as the smaller number of rows. - - ![zip](./images/zip.png) - -The following example builds two random datasets with different samples, zips columns, and displays the data result after zipping. - -```python -import numpy as np -import mindspore.dataset as ds - -def generator_func(): - for i in range(7): - yield (np.array([i, i+1, i+2]),) - -def generator_func2(): - for i in range(4): - yield (np.array([1, 2]),) - -dataset1 = ds.GeneratorDataset(generator_func, ["data1"]) -dataset2 = ds.GeneratorDataset(generator_func2, ["data2"]) - -dataset3 = ds.zip((dataset1, dataset2)) - -for data in dataset3.create_dict_iterator(): - print(data) -``` - -The output is as follows: - -```text -{'data1': Tensor(shape=[3], dtype=Int64, value= [0, 1, 2]), 'data2': Tensor(shape=[2], dtype=Int64, value= [1, 2])} -{'data1': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3]), 'data2': Tensor(shape=[2], dtype=Int64, value= [1, 2])} -{'data1': Tensor(shape=[3], dtype=Int64, value= [2, 3, 4]), 'data2': Tensor(shape=[2], dtype=Int64, value= [1, 2])} -{'data1': Tensor(shape=[3], dtype=Int64, value= [3, 4, 5]), 'data2': Tensor(shape=[2], dtype=Int64, value= [1, 2])} -``` - -### concat - -Concatenates two datasets into one horizontally. - -> Enter the column name in the dataset. The column data type and column data sequence must be the same. - -![concat](./images/concat.png) - -The following example builds two random datasets, concatenates them by row, and displays the concatenated data result. Note that the same effect can be achieved by using the `+` operator. - -```python -import numpy as np -import mindspore.dataset as ds - -def generator_func(): - for i in range(2): - yield (np.array([0, 0, 0]),) - -def generator_func2(): - for i in range(2): - yield (np.array([1, 2, 3]),) - -dataset1 = ds.GeneratorDataset(generator_func, ["data1"]) -dataset2 = ds.GeneratorDataset(generator_func2, ["data1"]) - -dataset3 = dataset1.concat(dataset2) - -for data in dataset3.create_dict_iterator(): - print(data) -``` - -The output is as follows: - -```text -{'data1': Tensor(shape=[3], dtype=Int64, value= [0, 0, 0])} -{'data1': Tensor(shape=[3], dtype=Int64, value= [0, 0, 0])} -{'data1': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3])} -{'data1': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3])} -``` diff --git a/docs/programming_guide/source_en/probability.md b/docs/programming_guide/source_en/probability.md deleted file mode 100644 index 3c04817fd957fb1534ad0900768db22cadc8846c..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/probability.md +++ /dev/null @@ -1,1095 +0,0 @@ -# Deep Probabilistic Programming Library - - - -- [Deep Probabilistic Programming Library](#deep-probabilistic-programming-library) - - [Probability Distribution](#probability-distribution) - - [Probability Distribution Class](#probability-distribution-class) - - [Distribution Base Class](#distribution-base-class) - - [Bernoulli Distribution](#bernoulli-distribution) - - [Exponential Distribution](#exponential-distribution) - - [Geometric Distribution](#geometric-distribution) - - [Normal Distribution](#normal-distribution) - - [Uniform Distribution](#uniform-distribution) - - [Categorical Distribution](#categorical-distribution) - - [Cauchy Distribution](#cauchy-distribution) - - [LogNormal Distribution](#lognormal-distribution) - - [Gumbel Distribution](#gumbel-distribution) - - [Logistic Distribution](#logistic-distribution) - - [Poisson Distribution](#poisson-distribution) - - [Gamma Distribution](#gamma-distribution) - - [Beta Distribution](#beta-distribution) - - [Probability Distribution Class Application in PyNative Mode](#probability-distribution-class-application-in-pynative-mode) - - [Probability Distribution Class Application in Graph Mode](#probability-distribution-class-application-in-graph-mode) - - [TransformedDistribution Class API Design](#transformeddistribution-class-api-design) - - [Invoking a TransformedDistribution Instance in PyNative Mode](#invoking-a-transformeddistribution-instance-in-pynative-mode) - - [Invoking a TransformedDistribution Instance in Graph Mode](#invoking-a-transformeddistribution-instance-in-graph-mode) - - [Probability Distribution Mapping](#probability-distribution-mapping) - - [Bijector API Design](#bijector-api-design) - - [Bijector Base Class](#bijector-base-class) - - [PowerTransform](#powertransform) - - [Exp](#exp) - - [ScalarAffine](#scalaraffine) - - [Softplus](#softplus) - - [GumbelCDF](#gumbelcdf) - - [Invert](#invert) - - [Invoking the Bijector Instance in PyNative Mode](#invoking-the-bijector-instance-in-pynative-mode) - - [Invoking a Bijector Instance in Graph Mode](#invoking-a-bijector-instance-in-graph-mode) - - [Deep Probabilistic Network](#deep-probabilistic-network) - - [VAE](#vae) - - [ConditionalVAE](#conditionalvae) - - [Probability Inference Algorithm](#probability-inference-algorithm) - - [Bayesian Layer](#bayesian-layer) - - [Bayesian Conversion](#bayesian-conversion) - - [Bayesian Toolbox](#bayesian-toolbox) - - [Uncertainty Estimation](#uncertainty-estimation) - - [Anomaly Detection](#anomaly-detection) - - - - - -MindSpore deep probabilistic programming is to combine Bayesian learning with deep learning, including probability distribution, probability distribution mapping, deep probability network, probability inference algorithm, Bayesian layer, Bayesian conversion, and Bayesian toolkit. For professional Bayesian learning users, it provides probability sampling, inference algorithms, and model build libraries. On the other hand, advanced APIs are provided for users who are unfamiliar with Bayesian deep learning, so that they can use Bayesian models without changing the deep learning programming logic. - -## Probability Distribution - -Probability distribution (`mindspore.nn.probability.distribution`) is the basis of probabilistic programming. The `Distribution` class provides various probability statistics APIs, such as *pdf* for probability density, *cdf* for cumulative density, *kl_loss* for divergence calculation, and *sample* for sampling. Existing probability distribution examples include Gaussian distribution, Bernoulli distribution, exponential distribution, geometric distribution, and uniform distribution. - -### Probability Distribution Class - -- `Distribution`: base class of all probability distributions. - -- `Bernoulli`: Bernoulli distribution, with a parameter indicating the number of experiment successes. - -- `Exponential`: exponential distribution, with a rate parameter. - -- `Geometric`: geometric distribution, with a parameter indicating the probability of initial experiment success. - -- `Normal`: normal distribution (Gaussian distribution), with two parameters indicating the average value and standard deviation. - -- `Uniform`: uniform distribution, with two parameters indicating the minimum and maximum values on the axis. - -- `Categorical`: categorical distribution, with one parameter indicating the probability of each category. - -- `Cauchy`: cauchy distribution, with two parameters indicating the location and scale. - -- `LogNormal`: lognormal distribution, with two parameters indicating the location and scale. - -- `Logistic`: logistic distribution, with two parameters indicating the location and scale. - -- `Gumbel`: gumbel distribution, with two parameters indicating the location and scale. - -#### Distribution Base Class - -`Distribution` is the base class for all probability distributions. - -The `Distribution` class supports the following functions: `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, `log_survival`, `mean`, `sd`, `var`, `entropy`, `kl_loss`, `cross_entropy`, and `sample`. The input parameters vary according to the distribution. These functions can be used only in a derived class and their parameters are determined by the function implementation of the derived class. - -- `prob`: probability density function (PDF) or probability quality function (PMF) -- `log_prob`: log-like function -- `cdf`: cumulative distribution function (CDF) -- `log_cdf`: log-cumulative distribution function -- `survival_function`: survival function -- `log_survival`: logarithmic survival function -- `mean`: average value -- `sd`: standard deviation -- `var`: variance -- `entropy`: entropy -- `kl_loss`: Kullback-Leibler divergence -- `cross_entropy`: cross entropy of two probability distributions -- `sample`: random sampling of probability distribution -- `get_dist_args`: returns the parameters of the distribution used in the network -- `get_dist_name`: returns the type of the distribution - -#### Bernoulli Distribution - -Bernoulli distribution, inherited from the `Distribution` class. - -Properties are described as follows: - -- `Bernoulli.probs`: returns the probability of success in the Bernoulli experiment as a `Tensor`. - -The `Distribution` base class invokes the private API in the `Bernoulli` to implement the public APIs in the base class. `Bernoulli` supports the following public APIs: - -- `mean`,`mode`,`var`, and `sd`: The input parameter *probs1* that indicates the probability of experiment success is optional. -- `entropy`: The input parameter *probs1* that indicates the probability of experiment success is optional. -- `cross_entropy` and `kl_loss`: The input parameters *dist* and *probs1_b* are mandatory. *dist* indicates another distribution type. Currently, only *'Bernoulli'* is supported. *probs1_b* is the experiment success probability of distribution *b*. Parameter *probs1_a* of distribution *a* is optional. -- `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`: The input parameter *value* is mandatory. The input parameter *probs* that indicates the probability of experiment success is optional. -- `sample`: Optional input parameters include sample shape *shape* and experiment success probability *probs1*. -- `get_dist_args`: The input parameter *probs1* that indicates the probability of experiment success is optional. Return `(probs1,)` with type tuple. -- `get_dist_type`: return *'Bernoulli'*. - -#### Exponential Distribution - -Exponential distribution, inherited from the `Distribution` class. - -Properties are described as follows: - -- `Exponential.rate`: returns the rate parameter as a `Tensor`. - -The `Distribution` base class invokes the `Exponential` private API to implement the public APIs in the base class. `Exponential` supports the following public APIs: - -- `mean`,`mode`,`var`, and `sd`: The input rate parameter *rate* is optional. -- `entropy`: The input rate parameter *rate* is optional. -- `cross_entropy` and `kl_loss`: The input parameters *dist* and *rate_b* are mandatory. *dist* indicates the name of another distribution type. Currently, only *'Exponential'* is supported. *rate_b* is the rate parameter of distribution *b*. Parameter *rate_a* of distribution *a* is optional. -- `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`: The input parameter *value* is mandatory. The input rate parameter *rate*is optional. -- `sample`: Optional input parameters include sample shape *shape* and rate parameter *rate*. -- `get_dist_args`: The input rate parameter *rate* is optional. Return `(rate,)` with type tuple. -- `get_dist_type`: returns *'Exponential'*. - -#### Geometric Distribution - -Geometric distribution, inherited from the `Distribution` class. - -Properties are described as follows: - -- `Geometric.probs`: returns the probability of success in the Bernoulli experiment as a `Tensor`. - -The `Distribution` base class invokes the private API in the `Geometric` to implement the public APIs in the base class. `Geometric` supports the following public APIs: - -- `mean`,`mode`,`var`, and `sd`: The input parameter *probs1* that indicates the probability of experiment success is optional. -- `entropy`: The input parameter *probs1* that indicates the probability of experiment success is optional. -- `cross_entropy` and `kl_loss`: The input parameters *dist* and *probs1_b* are mandatory. *dist* indicates the name of another distribution type. Currently, only *'Geometric'* is supported. *probs1_b* is the experiment success probability of distribution *b*. Parameter *probs1_a* of distribution *a* is optional. -- `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`: The input parameter *value* is mandatory. The input parameter *probs1* that indicates the probability of experiment success is optional. -- `sample`: Optional input parameters include sample shape *shape* and experiment success probability *probs1*. -- `get_dist_args`: The input parameter *probs1* that indicates the probability of experiment success is optional. Return `(probs1,)` with type tuple. -- `get_dist_type`: returns *'Geometric'*. - -#### Normal Distribution - -Normal distribution (also known as Gaussian distribution), inherited from the `Distribution` class. - -The `Distribution` base class invokes the private API in the `Normal` to implement the public APIs in the base class. `Normal` supports the following public APIs: - -- `mean`,`mode`,`var`, and `sd`: Input parameters *mean* (for average value) and *sd* (for standard deviation) are optional. -- `entropy`: Input parameters *mean* (for average value) and *sd* (for standard deviation) are optional. -- `cross_entropy` and `kl_loss`: The input parameters *dist*, *mean_b*, and *sd_b* are mandatory. *dist* indicates the name of another distribution type. Currently, only *'Normal'* is supported. *mean_b* and *sd_b* indicate the mean value and standard deviation of distribution *b*, respectively. Input parameters mean value *mean_a* and standard deviation *sd_a* of distribution *a* are optional. -- `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`: The input parameter *value* is mandatory. Input parameters mean value *mean_a* and standard deviation *sd_a* are optional. -- `sample`: Input parameters sample shape *shape*, average value *mean_a*, and standard deviation *sd_a* are optional. -- `get_dist_args`: Input parameters mean value *mean* and standard deviation *sd* are optional. Return `(mean, sd)` with type tuple. -- `get_dist_type`: returns *'Normal'*. - -#### Uniform Distribution - -Uniform distribution, inherited from the `Distribution` class. - -Properties are described as follows: - -- `Uniform.low`: returns the minimum value as a `Tensor`. -- `Uniform.high`: returns the maximum value as a `Tensor`. - -The `Distribution` base class invokes `Uniform` to implement public APIs in the base class. `Uniform` supports the following public APIs: - -- `mean`,`mode`,`var`, and `sd`: Input parameters maximum value *high* and minimum value *low* are optional. -- `entropy`: Input parameters maximum value *high* and minimum value *low* are optional. -- `cross_entropy` and `kl_loss`: The input parameters *dist*, *high_b*, and *low_b* are mandatory. *dist* indicates the name of another distribution type. Currently, only *'Uniform'* is supported. *high_b* and *low_b* are parameters of distribution *b*. Input parameters maximum value *high* and minimum value *low* of distribution *a* are optional. -- `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`: The input parameter *value* is mandatory. Input parameters maximum value *high* and minimum value *low* are optional. -- `sample`: Input parameters *shape*, maximum value *high*, and minimum value *low* are optional. -- `get_dist_args`: Input parameters maximum value *high* and minimum value *low* are optional. Return `(low, high)` with type tuple. -- `get_dist_type`: returns *'Uniform'*. - -#### Categorical Distribution - -Categorical distribution, inherited from the `Distribution` class. - -Properties are described as follows: - -- `Categorical.probs`: returns the probability of each category as a `Tensor`. - -The `Distribution` base class invokes the private API in the `Categorical` to implement the public APIs in the base class. `Categorical` supports the following public APIs: - -- `mean`,`mode`,`var`, and `sd`: The input parameter *probs* that indicates the probability of each category is optional. -- `entropy`: The input parameter *probs* that indicates the probability of each category is optional. -- `cross_entropy` and `kl_loss`: The input parameters *dist* and *probs_b* are mandatory. *dist* indicates the name of another distribution type. Currently, only *'Categorical'* is supported. *probs_b* is the categories' probabilities of distribution *b*. Parameter *probs_a* of distribution *a* is optional. -- `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`: The input parameter *value* is mandatory. The input parameter *probs* that indicates the probability of each category is optional. -- `sample`: Optional input parameters include sample shape *shape* and the categories' probabilities *probs*. -- `get_dist_args`: The input parameter *probs* that indicates the probability of each category is optional. Return `(probs,)` with type tuple. -- `get_dist_type`: returns *'Categorical'*. - -#### Cauchy Distribution - -Cauchy distribution, inherited from the `Distribution` class. - -Properties are described as follows: - -- `Cauchy.loc`: returns the location parameter as a `Tensor`. -- `Cauchy.scale`: returns the scale parameter as a `Tensor`. - -The `Distribution` base class invokes the private API in the `Cauchy` to implement the public APIs in the base class. `Cauchy` supports the following public APIs: - -- `entropy`: Input parameters *loc* (for location) and *scale* (for scale) are optional. -- `cross_entropy` and `kl_loss`: The input parameters *dist*, *loc_b*, and *scale_b* are mandatory. *dist* indicates the name of another distribution type. Currently, only *'Cauchy'* is supported. *loc_b* and *scale_b* indicate the location and scale of distribution *b*, respectively. Input parameters *loc* and *scale* of distribution *a* are optional. -- `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`: The input parameter *value* is mandatory. Input parameters location *loc* and scale *scale* are optional. -- `sample`: Input parameters sample shape *shape*, location *loc* and scale *scale* are optional. -- `get_dist_args`: Input parameters location *loc* and scale *scale* are optional. Return `(loc, scale)` with type tuple. -- `get_dist_type`: returns *'Cauchy'*. - -#### LogNormal Distribution - -LogNormal distribution, inherited from the `TransformedDistribution` class, constructed by `Exp` Bijector and `Normal` Distribution. - -Properties are described as follows: - -- `LogNormal.loc`: returns the location parameter as a `Tensor`. -- `LogNormal.scale`: returns the scale parameter as a `Tensor`. - -The `Distribution` base class invokes the private API in the `LogNormal` and `TransformedDistribution` to implement the public APIs in the base class. `LogNormal` supports the following public APIs: - -- `mean`,`mode`,`var`, and `sd`:Input parameters *loc* (for location) and *scale* (for scale) are optional. -- `entropy`: Input parameters *loc* (for location) and *scale* (for scale) are optional. -- `cross_entropy` and `kl_loss`: The input parameters *dist*, *loc_b*, and *scale_b* are mandatory. *dist* indicates the name of another distribution type. Currently, only *'LogNormal'* is supported. *loc_b* and *scale_b* indicate the location and scale of distribution *b*, respectively. Input parameters *loc* and *scale* of distribution *a* are optional. -- `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`: The input parameter *value* is mandatory. Input parameters location *loc* and scale *scale* are optional. -- `sample`: Input parameters sample shape *shape*, location *loc* and scale *scale* are optional. -- `get_dist_args`: Input parameters location *loc* and scale *scale* are optional. Return `(loc, scale)` with type tuple. -- `get_dist_type`: returns *'LogNormal'*. - -#### Gumbel Distribution - -Gumbel distribution, inherited from the `TransformedDistribution` class, constructed by `GumbelCDF` Bijector and `Uniform` Distribution. - -Properties are described as follows: - -- `Gumbel.loc`: returns the location parameter as a `Tensor`. -- `Gumbel.scale`: returns the scale parameter as a `Tensor`. - -The `Distribution` base class invokes the private API in the `Gumbel` and `TransformedDistribution` to implement the public APIs in the base class. `Gumbel` supports the following public APIs: - -- `mean`,`mode`,`var`, and `sd`:No parameter. -- `entropy`: No parameter. -- `cross_entropy` and `kl_loss`: The input parameters *dist*, *loc_b*, and *scale_b* are mandatory. *dist* indicates the name of another distribution type. Currently, only *'Gumbel'* is supported. *loc_b* and *scale_b* indicate the location and scale of distribution *b*. -- `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`: The input parameter *value* is mandatory. -- `sample`: Input parameters sample shape *shape* is optional. -- `get_dist_args`: Input parameters location *loc* and scale *scale* are optional. Return `(loc, scale)` with type tuple. -- `get_dist_type`: returns *'Gumbel'*. - -#### Logistic Distribution - -Logistic distribution, inherited from the `Distribution` class. - -Properties are described as follows: - -- `Logistic.loc`: returns the location parameter as a `Tensor`. -- `Logistic.scale`: returns the scale parameter as a `Tensor`. - -The `Distribution` base class invokes the private API in the `Logistic` and `TransformedDistribution` to implement the public APIs in the base class. `Logistic` supports the following public APIs: - -- `mean`,`mode`,`var`, and `sd`:Input parameters *loc* (for location) and *scale* (for scale) are optional. -- `entropy`: Input parameters *loc* (for location) and *scale* (for scale) are optional. -- `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`: The input parameter *value* is mandatory. Input parameters location *loc* and scale *scale* are optional. -- `sample`: Input parameters sample shape *shape*, location *loc* and scale *scale* are optional. -- `get_dist_args`: Input parameters location *loc* and scale *scale* are optional. Return `(loc, scale)` with type tuple. -- `get_dist_type`: returns *'Logistic'*. - -#### Poisson Distribution - -Poisson distribution, inherited from the `Distribution` class. - -Properties are described as follows: - -- `Poisson.rate`: returns the rate as a `Tensor`. - -The `Distribution` base class invokes the private API in the `Poisson` to implement the public APIs in the base class. `Poisson` supports the following public APIs: - -- `mean`,`mode`,`var`, and `sd`: The input parameter *rate* is optional. -- `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`: The input parameter *value* is mandatory. The input parameter rate* is optional. -- `sample`: Optional input parameters include sample shape *shape* and the parameter *rate*. -- `get_dist_args`: The input parameter *rate* is optional. Return `(rate,)` with type tuple. -- `get_dist_type`: returns *'Poisson'*. - -#### Gamma Distribution - -Gamma distribution, inherited from the `Distribution` class. - -Properties are described as follows: - -- `Gamma.concentration`: returns the concentration as a `Tensor`. -- `Gamma.rate`: returns the rate as a `Tensor`. - -The `Distribution` base class invokes the private API in the `Gamma` to implement the public APIs in the base class. `Gamma` supports the following public APIs: - -- `mean`,`mode`,`var`, and `sd`: The input parameters *concentration* and *rate* are optional. -- `entropy`: The input parameters *concentration* and *rate* are optional. -- `cross_entropy` and `kl_loss`: The input parameters *dist*, *concentration_b* and *rate_b* are mandatory. *dist* indicates the name of another distribution type. Currently, only *'Gamma'* is supported. *concentration_b* and *rate_b* are the parameters of distribution *b*. The input parameters *concentration_a* and *rate_a* for distribution *a* are optional. -- `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`: The input parameter *value* is mandatory. The input parameters *concentration* and *rate* are optional. -- `sample`: Optional input parameters include sample shape *shape* and parameters *concentration* and *rate*. -- `get_dist_args`: The input parameters *concentration* and *rate* are optional. Return `(concentration, rate)` with type tuple. -- `get_dist_type`: returns *'Gamma'*. - -#### Beta Distribution - -Beta distribution, inherited from the `Distribution` class. - -Properties are described as follows: - -- `Beta.concentration1`: returns the rate as a `Tensor`. -- `Beta.concentration0`: returns the rate as a `Tensor`. - -The `Distribution` base class invokes the private API in the `Beta` to implement the public APIs in the base class. `Beta` supports the following public APIs: - -- `mean`,`mode`,`var`, and `sd`: The input parameters *concentration1* and *concentration0* are optional. -- `entropy`: The input parameters *concentration1* and *concentration0* are optional. -- `cross_entropy` and `kl_loss`: The input parameters *dist*, *concentration1_b* and *rateconcentration0_b* are mandatory. *dist* indicates the name of another distribution type. Currently, only *'Beta'* is supported. *concentration1_b* and *concentration0_b* are the parameters of distribution *b*. The input parameters *concentratio1n_a* and *concentration0_a* for distribution *a* are optional. -- `prob` and `log_prob`: The input parameter *value* is mandatory. The input parameters *concentration1* and *concentration0* are optional. -- `sample`: Optional input parameters include sample shape *shape* and parameters *concentration1* and *concentration0*. -- `get_dist_args`: The input parameters *concentration1* and *concentration0* are optional. Return `(concentration1, concentration0)` with type tuple. -- `get_dist_type`: returns *'Beta'*. - -### Probability Distribution Class Application in PyNative Mode - -`Distribution` subclasses can be used in **PyNative** mode. - -Use `Normal` as an example. Create a normal distribution whose average value is 0.0 and standard deviation is 1.0. - -```python -from mindspore import Tensor -from mindspore import dtype as mstype -import mindspore.context as context -import mindspore.nn.probability.distribution as msd -context.set_context(mode=context.PYNATIVE_MODE) - -my_normal = msd.Normal(0.0, 1.0, dtype=mstype.float32) - -mean = my_normal.mean() -var = my_normal.var() -entropy = my_normal.entropy() - -value = Tensor([-0.5, 0.0, 0.5], dtype=mstype.float32) -prob = my_normal.prob(value) -cdf = my_normal.cdf(value) - -mean_b = Tensor(1.0, dtype=mstype.float32) -sd_b = Tensor(2.0, dtype=mstype.float32) -kl = my_normal.kl_loss('Normal', mean_b, sd_b) - -# get the distribution args as a tuple -dist_arg = my_normal.get_dist_args() - -print("mean: ", mean) -print("var: ", var) -print("entropy: ", entropy) -print("prob: ", prob) -print("cdf: ", cdf) -print("kl: ", kl) -print("dist_arg: ", dist_arg) -``` - -The output is as follows: - -```python -mean:  0.0 -var:  1.0 -entropy:  1.4189385 -prob:  [0.35206532 0.3989423  0.35206532] -cdf:  [0.30853754 0.5        0.69146246] -kl:  0.44314718 -dist_arg: (Tensor(shape=[], dtype=Float32, value= 0), Tensor(shape=[], dtype=Float32, value= 1)) -``` - -### Probability Distribution Class Application in Graph Mode - -In graph mode, `Distribution` subclasses can be used on the network. - -```python -import mindspore.nn as nn -from mindspore import Tensor -from mindspore import dtype as mstype -import mindspore.context as context -import mindspore.nn.probability.distribution as msd -context.set_context(mode=context.GRAPH_MODE) - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.normal = msd.Normal(0.0, 1.0, dtype=mstype.float32) - - def construct(self, value, mean, sd): - pdf = self.normal.prob(value) - kl = self.normal.kl_loss("Normal", mean, sd) - return pdf, kl - -net = Net() -value = Tensor([-0.5, 0.0, 0.5], dtype=mstype.float32) -mean = Tensor(1.0, dtype=mstype.float32) -sd = Tensor(1.0, dtype=mstype.float32) -pdf, kl = net(value, mean, sd) -print("pdf: ", pdf) -print("kl: ", kl) -``` - -The output is as follows: - -```python -pdf: [0.35206532 0.3989423 0.35206532] -kl: 0.5 -``` - -### TransformedDistribution Class API Design - -`TransformedDistribution`, inherited from `Distribution`, is a base class for mathematical distribution that can be obtained by mapping f(x) changes. The APIs are as follows: - -1. Properties - - - `bijector`: returns the distribution transformation method. - - `distribution`: returns the original distribution. - - `is_linear_transformation`: returns the linear transformation flag. - -2. API functions (The parameters of the following APIs are the same as those of the corresponding APIs of `distribution` in the constructor function.) - - - `cdf`: cumulative distribution function (CDF) - - `log_cdf`: log-cumulative distribution function - - `survival_function`: survival function - - `log_survival`: logarithmic survival function - - `prob`: probability density function (PDF) or probability quality function (PMF) - - `log_prob`: log-like function - - `sample`: random sampling - - `mean`: a non-parametric function, which can be invoked only when `Bijector.is_constant_jacobian=true` is invoked. - -### Invoking a TransformedDistribution Instance in PyNative Mode - -The `TransformedDistribution` subclass can be used in **PyNative** mode. - -```python -import numpy as np -import mindspore.nn as nn -import mindspore.nn.probability.bijector as msb -import mindspore.nn.probability.distribution as msd -import mindspore.context as context -from mindspore import Tensor, dtype - -context.set_context(mode=context.PYNATIVE_MODE) - -normal = msd.Normal(0.0, 1.0, dtype=dtype.float32) -exp = msb.Exp() -LogNormal = msd.TransformedDistribution(exp, normal, seed=0, name="LogNormal") - -# compute cumulative distribution function -x = np.array([2.0, 5.0, 10.0], dtype=np.float32) -tx = Tensor(x, dtype=dtype.float32) -cdf = LogNormal.cdf(tx) - -# generate samples from the distribution -shape = (3, 2) -sample = LogNormal.sample(shape) - -# get information of the distribution -print(LogNormal) -# get information of the underlying distribution and the bijector separately -print("underlying distribution:\n", LogNormal.distribution) -print("bijector:\n", LogNormal.bijector) -# get the computation results -print("cdf:\n", cdf) -print("sample shape:\n", sample.shape) -``` - -The output is as follows: - -```python -TransformedDistribution< -  (_bijector): Exp -  (_distribution): Normal -  > -underlying distribution: - Normal -bijector: - Exp -cdf: - [0.7558914 0.9462397 0.9893489] -sample shape: -(3, 2) -``` - -When the `TransformedDistribution` is constructed to map the transformed `is_constant_jacobian = true` (for example, `ScalarAffine`), the constructed `TransformedDistribution` instance can use the `mean` API to calculate the average value. For example: - -```python -normal = msd.Normal(0.0, 1.0, dtype=dtype.float32) -scalaraffine = msb.ScalarAffine(1.0, 2.0) -trans_dist = msd.TransformedDistribution(scalaraffine, normal, dtype=dtype.float32, seed=0) -mean = trans_dist.mean() -print(mean) -``` - -The output is as follows: - -```python -2.0 -``` - -### Invoking a TransformedDistribution Instance in Graph Mode - -In graph mode, the `TransformedDistribution` class can be used on the network. - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import Tensor, dtype -import mindspore.context as context -import mindspore.nn.probability.bijector as msb -import mindspore.nn.probability.distribution as msd -context.set_context(mode=context.GRAPH_MODE) - -class Net(nn.Cell): - def __init__(self, shape, dtype=dtype.float32, seed=0, name='transformed_distribution'): - super(Net, self).__init__() - # create TransformedDistribution distribution - self.exp = msb.Exp() - self.normal = msd.Normal(0.0, 1.0, dtype=dtype) - self.lognormal = msd.TransformedDistribution(self.exp, self.normal, seed=seed, name=name) - self.shape = shape - - def construct(self, value): - cdf = self.lognormal.cdf(value) - sample = self.lognormal.sample(self.shape) - return cdf, sample - -shape = (2, 3) -net = Net(shape=shape, name="LogNormal") -x = np.array([2.0, 3.0, 4.0, 5.0]).astype(np.float32) -tx = Tensor(x, dtype=dtype.float32) -cdf, sample = net(tx) -print("cdf: ", cdf) -print("sample shape: ", sample.shape) -``` - -The output is as follows: - -```python -cdf: [0.7558914 0.86403143 0.9171715 0.9462397 ] -sample shape: (2, 3) -``` - -## Probability Distribution Mapping - -Bijector (`mindspore.nn.probability.bijector`) is a basic component of probability programming. Bijector describes a random variable transformation method, and a new random variable $Y = f(x)$ may be generated by using an existing random variable X and a mapping function f. -`Bijector` provides four mapping-related transformation methods. It can be directly used as an operator, or used to generate a `Distribution` class instance of a new random variable on an existing `Distribution` class instance. - -### Bijector API Design - -#### Bijector Base Class - -The `Bijector` class is the base class for all probability distribution mappings. The APIs are as follows: - -1. Properties - - `name`: returns the value of `name`. - - `dtype`: returns the value of `dtype`. - - `parameters`: returns the value of `parameter`. - - `is_constant_jacobian`: returns the value of `is_constant_jacobian`. - - `is_injective`: returns the value of `is_injective`. - -2. Mapping functions - - `forward`: forward mapping, whose parameter is determined by `_forward` of the derived class. - - `inverse`: backward mapping, whose parameter is determined by`_inverse` of the derived class. - - `forward_log_jacobian`: logarithm of the derivative of the forward mapping, whose parameter is determined by `_forward_log_jacobian` of the derived class. - - `inverse_log_jacobian`: logarithm of the derivative of the backward mapping, whose parameter is determined by `_inverse_log_jacobian` of the derived class. - -When `Bijector` is invoked as a function: -The input is a `Distribution` class and a `TransformedDistribution` is generated **(cannot be invoked in a graph)**. - -#### PowerTransform - -`PowerTransform` implements variable transformation with $Y = g(X) = {(1 + X * c)}^{1 / c}$. The APIs are as follows: - -1. Properties - - `power`: returns the value of `power` as a `Tensor`. - -2. Mapping functions - - `forward`: forward mapping, with an input parameter `Tensor`. - - `inverse`: backward mapping, with an input parameter `Tensor`. - - `forward_log_jacobian`: logarithm of the derivative of the forward mapping, with an input parameter `Tensor`. - - `inverse_log_jacobian`: logarithm of the derivative of the backward mapping, with an input parameter `Tensor`. - -#### Exp - -`Exp` implements variable transformation with $Y = g(X)= exp(X)$. The APIs are as follows: - -Mapping functions - -- `forward`: forward mapping, with an input parameter `Tensor`. -- `inverse`: backward mapping, with an input parameter `Tensor`. -- `forward_log_jacobian`: logarithm of the derivative of the forward mapping, with an input parameter `Tensor`. -- `inverse_log_jacobian`: logarithm of the derivative of the backward mapping, with an input parameter `Tensor`. - -#### ScalarAffine - -`ScalarAffine` implements variable transformation with Y = g(X) = a * X + b. The APIs are as follows: - -1. Properties - - `scale`: returns the value of scale as a `Tensor`. - - `shift`: returns the value of shift as a `Tensor`. - -2. Mapping functions - - `forward`: forward mapping, with an input parameter `Tensor`. - - `inverse`: backward mapping, with an input parameter `Tensor`. - - `forward_log_jacobian`: logarithm of the derivative of the forward mapping, with an input parameter `Tensor`. - - `inverse_log_jacobian`: logarithm of the derivative of the backward mapping, with an input parameter `Tensor`. - -#### Softplus - -`Softplus` implements variable transformation with $Y = g(X) = log(1 + e ^ {kX}) / k $. The APIs are as follows: - -1. Properties - - `sharpness`: returns the value of `sharpness` as a `Tensor`. - -2. Mapping functions - - `forward`: forward mapping, with an input parameter `Tensor`. - - `inverse`: backward mapping, with an input parameter `Tensor`. - - `forward_log_jacobian`: logarithm of the derivative of the forward mapping, with an input parameter `Tensor`. - - `inverse_log_jacobian`: logarithm of the derivative of the backward mapping, with an input parameter `Tensor`. - -#### GumbelCDF - -`GumbelCDF` implements variable transformation with $Y = g(X) = \exp(-\exp(-\frac{X - loc}{scale}))$. The APIs are as follows: - -1. Properties - - `loc`: returns the value of `loc` as a `Tensor`. - - `scale`: returns the value of `scale` as a `Tensor`. - -2. Mapping functions - - `forward`: forward mapping, with an input parameter `Tensor`. - - `inverse`: backward mapping, with an input parameter `Tensor`. - - `forward_log_jacobian`: logarithm of the derivative of the forward mapping, with an input parameter `Tensor`. - - `inverse_log_jacobian`: logarithm of the derivative of the backward mapping, with an input parameter `Tensor`. - -#### Invert - -`Invert` implements the inverse of another bijector. The APIs are as follows: - -1. Properties - - `bijector`: returns the Bijector used during initialization with type `msb.Bijector`. - -2. Mapping functions - - `forward`: forward mapping, with an input parameter `Tensor`. - - `inverse`: backward mapping, with an input parameter `Tensor`. - - `forward_log_jacobian`: logarithm of the derivative of the forward mapping, with an input parameter `Tensor`. - - `inverse_log_jacobian`: logarithm of the derivative of the backward mapping, with an input parameter `Tensor`. - -### Invoking the Bijector Instance in PyNative Mode - -Before the execution, import the required library file package. The main library of the Bijector class is `mindspore.nn.probability.bijector`. After the library is imported, `msb` is used as the abbreviation of the library for invoking. - -The following uses `PowerTransform` as an example. Create a `PowerTransform` object whose power is 2. - -```python -import numpy as np -import mindspore.nn as nn -import mindspore.nn.probability.bijector as msb -import mindspore.context as context -from mindspore import Tensor, dtype - -context.set_context(mode=context.PYNATIVE_MODE) - -powertransform = msb.PowerTransform(power=2.) - -x = np.array([2.0, 3.0, 4.0, 5.0], dtype=np.float32) -tx = Tensor(x, dtype=dtype.float32) -forward = powertransform.forward(tx) -inverse = powertransform.inverse(tx) -forward_log_jaco = powertransform.forward_log_jacobian(tx) -inverse_log_jaco = powertransform.inverse_log_jacobian(tx) - -print(powertransform) -print("forward: ", forward) -print("inverse: ", inverse) -print("forward_log_jacobian: ", forward_log_jaco) -print("inverse_log_jacobian: ", inverse_log_jaco) -``` - -The output is as follows: - -```python -PowerTransform -forward:  [2.236068  2.6457515 3.        3.3166249] -inverse:  [ 1.5       4.        7.5      12.000001] -forward_log_jacobian:  [-0.804719  -0.9729551 -1.0986123 -1.1989477] -inverse_log_jacobian:  [0.6931472 1.0986123 1.3862944 1.609438 ] -``` - -### Invoking a Bijector Instance in Graph Mode - -In graph mode, the `Bijector` subclass can be used on the network. - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import Tensor -from mindspore import dtype as mstype -import mindspore.context as context -import mindspore.nn.probability.bijector as msb -context.set_context(mode=context.GRAPH_MODE) - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - # create a PowerTransform bijector - self.powertransform = msb.PowerTransform(power=2.) - - def construct(self, value): - forward = self.powertransform.forward(value) - inverse = self.powertransform.inverse(value) - forward_log_jaco = self.powertransform.forward_log_jacobian(value) - inverse_log_jaco = self.powertransform.inverse_log_jacobian(value) - return forward, inverse, forward_log_jaco, inverse_log_jaco - -net = Net() -x = np.array([2.0, 3.0, 4.0, 5.0]).astype(np.float32) -tx = Tensor(x, dtype=mstype.float32) -forward, inverse, forward_log_jaco, inverse_log_jaco = net(tx) -print("forward: ", forward) -print("inverse: ", inverse) -print("forward_log_jaco: ", forward_log_jaco) -print("inverse_log_jaco: ", inverse_log_jaco) -``` - -The output is as follows: - -```python -forward:  [2.236068  2.6457515 3.        3.3166249] -inverse:  [ 1.5       4.        7.5      12.000001] -forward_log_jacobian:  [-0.804719  -0.9729551 -1.0986123 -1.1989477] -inverse_log_jacobian:  [0.6931472 1.0986123 1.3862944 1.609438 ] -``` - -## Deep Probabilistic Network - -It is especially easy to use the MindSpore deep probabilistic programming library (`mindspore.nn.probability.dpn`) to construct a variational auto-encoder (VAE) for inference. You only need to define the encoder and decoder (a DNN model), invoke the VAE or conditional VAE (CVAE) API to form a derived network, invoke the ELBO API for optimization, and use the SVI API for variational inference. The advantage is that users who are not familiar with variational inference can build a probability model in the same way as they build a DNN model, and those who are familiar with variational inference can invoke these APIs to build a more complex probability model. VAE APIs are defined in `mindspore.nn.probability.dpn`, where dpn represents the deep probabilistic network. `mindspore.nn.probability.dpn` provides some basic APIs of the deep probabilistic network, for example, VAE. - -### VAE - -First, we need to define the encoder and decoder and invoke the `mindspore.nn.probability.dpn.VAE` API to construct the VAE network. In addition to the encoder and decoder, we need to input the hidden size of the encoder output variable and the latent size of the VAE network storage potential variable. Generally, the latent size is less than the hidden size. - -```python -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore.nn.probability.dpn import VAE - -IMAGE_SHAPE = (-1, 1, 32, 32) - - -class Encoder(nn.Cell): - def __init__(self): - super(Encoder, self).__init__() - self.fc1 = nn.Dense(1024, 800) - self.fc2 = nn.Dense(800, 400) - self.relu = nn.ReLU() - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - return x - - -class Decoder(nn.Cell): - def __init__(self): - super(Decoder, self).__init__() - self.fc1 = nn.Dense(400, 1024) - self.sigmoid = nn.Sigmoid() - self.reshape = ops.Reshape() - - def construct(self, z): - z = self.fc1(z) - z = self.reshape(z, IMAGE_SHAPE) - z = self.sigmoid(z) - return z - - -encoder = Encoder() -decoder = Decoder() -vae = VAE(encoder, decoder, hidden_size=400, latent_size=20) -``` - -### ConditionalVAE - -Similarly, the usage of CVAE is similar to that of VAE. The difference is that CVAE uses the label information of datasets. It is a supervised learning algorithm, and has a better generation effect than VAE. - -First, define the encoder and decoder and invoke the `mindspore.nn.probability.dpn.ConditionalVAE` API to construct the CVAE network. The encoder here is different from that of the VAE because the label information of datasets needs to be input. The decoder is the same as that of the VAE. For the CVAE API, the number of dataset label categories also needs to be input. Other input parameters are the same as those of the VAE API. - -```python -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore.nn.probability.dpn import ConditionalVAE - -IMAGE_SHAPE = (-1, 1, 32, 32) - - -class Encoder(nn.Cell): - def __init__(self, num_classes): - super(Encoder, self).__init__() - self.fc1 = nn.Dense(1024 + num_classes, 400) - self.relu = nn.ReLU() - self.flatten = nn.Flatten() - self.concat = ops.Concat(axis=1) - self.one_hot = nn.OneHot(depth=num_classes) - - def construct(self, x, y): - x = self.flatten(x) - y = self.one_hot(y) - input_x = self.concat((x, y)) - input_x = self.fc1(input_x) - input_x = self.relu(input_x) - return input_x - - -class Decoder(nn.Cell): - def __init__(self): - super(Decoder, self).__init__() - self.fc1 = nn.Dense(400, 1024) - self.sigmoid = nn.Sigmoid() - self.reshape = ops.Reshape() - - def construct(self, z): - z = self.fc1(z) - z = self.reshape(z, IMAGE_SHAPE) - z = self.sigmoid(z) - return z - - -encoder = Encoder(num_classes=10) -decoder = Decoder() -cvae = ConditionalVAE(encoder, decoder, hidden_size=400, latent_size=20, num_classes=10) -``` - -Load a dataset, for example, Mnist. For details about the data loading and preprocessing process, see [Implementing an Image Classification Application](https://www.mindspore.cn/tutorial/training/en/master/quick_start/quick_start.html). The create_dataset function is used to create a data iterator. - -```python -ds_train = create_dataset(image_path, 128, 1) -``` - -Next, use the infer API to perform variational inference on the VAE network. - -## Probability Inference Algorithm - -Invoke the `mindspore.nn.probability.infer.ELBO` API to define the loss function of the VAE network, invoke `WithLossCell` to encapsulate the VAE network and loss function, define the optimizer, and transfer them to the `mindspore.nn.probability.infer.SVI` API. The `run` function of the SVI API can be understood to trigger training of the VAE network. You can specify the `epochs` of the training, so that a trained network is returned. If you specify the `get_train_loss` function, the loss of the trained model will be returned. - -```python -from mindspore.nn.probability.infer import ELBO, SVI - -net_loss = ELBO(latent_prior='Normal', output_prior='Normal') -net_with_loss = nn.WithLossCell(vae, net_loss) -optimizer = nn.Adam(params=vae.trainable_params(), learning_rate=0.001) - -vi = SVI(net_with_loss=net_with_loss, optimizer=optimizer) -vae = vi.run(train_dataset=ds_train, epochs=10) -trained_loss = vi.get_train_loss() -``` - -After obtaining the trained VAE network, use `vae.generate_sample` to generate a new sample. You need to specify the number of samples to be generated and the shape of the generated samples. The shape must be the same as that of the samples in the original dataset. You can also use `vae.reconstruct_sample` to reconstruct samples in the original dataset to test the reconstruction capability of the VAE network. - -```python -generated_sample = vae.generate_sample(64, IMAGE_SHAPE) -for sample in ds_train.create_dict_iterator(): - sample_x = Tensor(sample['image'], dtype=mstype.float32) - reconstructed_sample = vae.reconstruct_sample(sample_x) -print('The shape of the generated sample is ', generated_sample.shape) -``` - -The shape of the newly generated sample is as follows: - -```python -The shape of the generated sample is (64, 1, 32, 32) -``` - -The CVAE training process is similar to the VAE training process. However, when a trained CVAE network is used to generate a new sample and rebuild a new sample, label information needs to be input. For example, the generated new sample is 64 digits ranging from 0 to 7. - -```python -sample_label = Tensor([i for i in range(0, 8)] * 8, dtype=mstype.int32) -generated_sample = cvae.generate_sample(sample_label, 64, IMAGE_SHAPE) -for sample in ds_train.create_dict_iterator(): - sample_x = Tensor(sample['image'], dtype=mstype.float32) - sample_y = Tensor(sample['label'], dtype=mstype.int32) - reconstructed_sample = cvae.reconstruct_sample(sample_x, sample_y) -print('The shape of the generated sample is ', generated_sample.shape) -``` - -Check the shape of the newly generated sample: - -```python -The shape of the generated sample is (64, 1, 32, 32) -``` - -If you want the generated sample to be better and clearer, you can define a more complex encoder and decoder. The example uses only two layers of full-connected layers. - -## Bayesian Layer - -The following uses the APIs in `nn.probability.bnn_layers` of MindSpore to implement the BNN image classification model. The APIs in `nn.probability.bnn_layers` of MindSpore include `NormalPrior`, `NormalPosterior`, `ConvReparam`, `DenseReparam`, `DenseLocalReparam` and `WithBNNLossCell`. The biggest difference between BNN and DNN is that the weight and bias of the BNN layer are not fixed values, but follow a distribution. `NormalPrior` and `NormalPosterior` are respectively used to generate a prior distribution and a posterior distribution that follow a normal distribution. `ConvReparam` and `DenseReparam` are the Bayesian convolutional layer and fully connected layers implemented by using the reparameterization method, respectively. `DenseLocalReparam` is the Bayesian fully connected layers implemented by using the local reparameterization method. `WithBNNLossCell` is used to encapsulate the BNN and loss function. - -For details about how to use the APIs in `nn.probability.bnn_layers` to build a Bayesian neural network and classify images, see [Applying the Bayesian Network](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/apply_deep_probability_programming.html#id3). - -## Bayesian Conversion - -For researchers who are unfamiliar with the Bayesian model, the MDP provides the `mindspore.nn.probability.transform` API to convert the DNN model into the BNN model by one click. - -The `__init__` function of the model conversion API `TransformToBNN` is defined as follows: - -```python -class TransformToBNN: - def __init__(self, trainable_dnn, dnn_factor=1, bnn_factor=1): - net_with_loss = trainable_dnn.network - self.optimizer = trainable_dnn.optimizer - self.backbone = net_with_loss.backbone_network - self.loss_fn = getattr(net_with_loss, "_loss_fn") - self.dnn_factor = dnn_factor - self.bnn_factor = bnn_factor - self.bnn_loss_file = None -``` - -The `trainable_bnn` parameter is a trainable DNN model packaged by `TrainOneStepCell`, `dnn_factor` and `bnn_factor` are the coefficient of the overall network loss calculated by the loss function and the coefficient of the KL divergence of each Bayesian layer, respectively. -`TransformToBNN` implements the following functions: - -- Function 1: Convert the entire model. - - The `transform_to_bnn_model` method can convert the entire DNN model into a BNN model. The definition is as follows: - - ```python - def transform_to_bnn_model(self, - get_dense_args=lambda dp: {"in_channels": dp.in_channels, "has_bias": dp.has_bias, - "out_channels": dp.out_channels, "activation": dp.activation}, - get_conv_args=lambda dp: {"in_channels": dp.in_channels, "out_channels": dp.out_channels, - "pad_mode": dp.pad_mode, "kernel_size": dp.kernel_size, - "stride": dp.stride, "has_bias": dp.has_bias, - "padding": dp.padding, "dilation": dp.dilation, - "group": dp.group}, - add_dense_args=None, - add_conv_args=None): - r""" - Transform the whole DNN model to BNN model, and wrap BNN model by TrainOneStepCell. - - Args: - get_dense_args (function): The arguments gotten from the DNN full connection layer. Default: lambda dp: - {"in_channels": dp.in_channels, "out_channels": dp.out_channels, "has_bias": dp.has_bias}. - get_conv_args (function): The arguments gotten from the DNN convolutional layer. Default: lambda dp: - {"in_channels": dp.in_channels, "out_channels": dp.out_channels, "pad_mode": dp.pad_mode, - "kernel_size": dp.kernel_size, "stride": dp.stride, "has_bias": dp.has_bias}. - add_dense_args (dict): The new arguments added to BNN full connection layer. Default: {}. - add_conv_args (dict): The new arguments added to BNN convolutional layer. Default: {}. - - Returns: - Cell, a trainable BNN model wrapped by TrainOneStepCell. - """ - - ``` - - `get_dense_args` specifies the parameters to be obtained from the fully connected layer of the DNN model. The default value is the common parameters of the fully connected layers of the DNN and BNN models. For details about the parameters, see [MindSpore API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/nn/mindspore.nn.Dense.html). `get_conv_args` specifies the parameters to be obtained from the convolutional layer of the DNN model. The default value is the common parameters of the convolutional layers of the DNN and BNN models. For details about the parameters, see [MindSpore API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/nn/mindspore.nn.Conv2d.html). `add_dense_args` and `add_conv_args` specify the new parameter values to be specified for the BNN layer. Note that the parameters in `add_dense_args` cannot be the same as those in `get_dense_args`. The same rule applies to `add_conv_args` and `get_conv_args`. - -- Function 2: Convert a specific layer. - - The `transform_to_bnn_layer` method can convert a specific layer (`nn.Dense` or `nn.Conv2d`) in the DNN model into a corresponding Bayesian layer. The definition is as follows: - - ```python - def transform_to_bnn_layer(self, dnn_layer, bnn_layer, get_args=None, add_args=None): - r""" - Transform a specific type of layers in DNN model to corresponding BNN layer. - - Args: - dnn_layer_type (Cell): The type of DNN layer to be transformed to BNN layer. The optional values are - nn.Dense, nn.Conv2d. - bnn_layer_type (Cell): The type of BNN layer to be transformed to. The optional values are - DenseReparameterization, ConvReparameterization. - get_args (dict): The arguments gotten from the DNN layer. Default: None. - add_args (dict): The new arguments added to BNN layer. Default: None. - - Returns: - Cell, a trainable model wrapped by TrainOneStepCell, whose sprcific type of layer is transformed to the corresponding bayesian layer. - """ - ``` - - `Dnn_layer` specifies a DNN layer to be converted into a BNN layer, and `bnn_layer` specifies a BNN layer to be converted into a DNN layer, and `get_args` and `add_args` specify the parameters obtained from the DNN layer and the parameters to be re-assigned to the BNN layer, respectively. - -For details about how to use `TransformToBNN` in MindSpore, see [DNN-to-BNN Conversion with One Click](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/apply_deep_probability_programming.html#dnnbnn). - -## Bayesian Toolbox - -### Uncertainty Estimation - -One of the advantages of the BNN is that uncertainty can be obtained. MDP provides a toolbox (`mindspore.nn.probability.toolbox`) for uncertainty estimation at the upper layer. You can easily use the toolbox to calculate uncertainty. Uncertainty means the uncertainty of the prediction result of the deep learning model. Currently, most deep learning algorithms can only provide high-confidence prediction results, but cannot determine the certainty of the prediction results. There are two types of uncertainty: aleatoric uncertainty and epistemic uncertainty. - -- Aleatoric uncertainty: describes the internal noise of data, that is, the unavoidable error. This phenomenon cannot be weakened by adding sampling data. -- Epistemic uncertainty: describes the estimation inaccuracy of input data incurred due to reasons such as poor training or insufficient training data. This may be alleviated by adding training data. - -The APIs of the uncertainty estimation toolbox are as follows: - -- `model`: trained model whose uncertainty is to be estimated. -- `train_dataset`: dataset used for training, which is of the iterator type. -- `task_type`: model type. The value is a character string. Enter regression or classification. -- `num_classes`: For a classification model, you need to specify the number of labels of the classification. -- `epochs`: number of epochs for training an uncertain model. -- `epi_uncer_model_path`: path for storing or loading models that compute cognitive uncertainty. -- `ale_uncer_model_path`: path used to store or load models that calculate epistemic uncertainty. -- `save_model`: whether to store the model, which is of the Boolean type. - -Before using the model, you need to train the model. The following uses LeNet5 as an example: - -```python -from mindspore.nn.probability.toolbox.uncertainty_evaluation import UncertaintyEvaluation -from mindspore.train.serialization import load_checkpoint, load_param_into_net - -if __name__ == '__main__': - # get trained model - network = LeNet5() - param_dict = load_checkpoint('checkpoint_lenet.ckpt') - load_param_into_net(network, param_dict) - # get train and eval dataset - ds_train = create_dataset('workspace/mnist/train') - ds_eval = create_dataset('workspace/mnist/test') - evaluation = UncertaintyEvaluation(model=network, - train_dataset=ds_train, - task_type='classification', - num_classes=10, - epochs=1, - epi_uncer_model_path=None, - ale_uncer_model_path=None, - save_model=False) - for eval_data in ds_eval.create_dict_iterator(): - eval_data = Tensor(eval_data['image'], mstype.float32) - epistemic_uncertainty = evaluation.eval_epistemic_uncertainty(eval_data) - aleatoric_uncertainty = evaluation.eval_aleatoric_uncertainty(eval_data) - print('The shape of epistemic uncertainty is ', epistemic_uncertainty.shape) - print('The shape of epistemic uncertainty is ', aleatoric_uncertainty.shape) -``` - -`eval_epistemic_uncertainty` calculates epistemic uncertainty, which is also called model uncertainty. Each estimation label of every sample has an uncertain value. `eval_aleatoric_uncertainty` calculates aleatoric uncertainty, which is also called data uncertainty. Each sample has an uncertain value. -The output is as follows: - -```python -The shape of epistemic uncertainty is (32, 10) -The shape of epistemic uncertainty is (32,) -``` - -The value of uncertainty is greater than or equal to zero. A larger value indicates higher uncertainty. - -### Anomaly Detection - -Anomaly Detection can find outliers that are "different from the main data distribution". For example, finding outliers in data preprocessing can help improve the model's fitting ability. - -MDP provides anomaly detection toolbox (`VAEAnomalyDetection`) based on the variational autoencoder (VAE) in the upper layer. Similar to the use of VAE, we only need to customize the encoder and decoder (DNN model), initialize the relevant parameters, then you can use the toolbox to detect abnormal points. - -The interface of the VAE-based anomaly detection toolbox is as follows: - -- `encoder`:Encoder(Cell) -- `decoder`:Decoder(Cell) -- `hidden_size`:The size of encoder's output tensor -- `latent_size`:The size of the latent space - -Use Encoder and Decoder, set hidden_size and latent_size, initialize the class, and then pass the dataset to detect abnormal points. - -```python -from mindspore.nn.probability.toolbox import VAEAnomalyDetection - -if __name__ == '__main__': - encoder = Encoder() - decoder = Decoder() - ood = VAEAnomalyDetection(encoder=encoder, decoder=decoder, - hidden_size=400, latent_size=20) - ds_train = create_dataset('workspace/mnist/train') - ds_eval = create_dataset('workspace/mnist/test') - model = ood.train(ds_train) - for sample in ds_eval.create_dict_iterator(output_numpy=True, num_epochs=1): - sample_x = Tensor(sample['image'], dtype=mstype.float32) - score = ood.predict_outlier_score(sample_x) - outlier = ood.predict_outlier(sample_x) - print(score, outlier) -``` - -The output of `score` is the anomaly score of the sample. `outlier` is a Boolean type, `True` represents an abnormal point, and `False` represents a normal point. diff --git a/docs/programming_guide/source_en/run.md b/docs/programming_guide/source_en/run.md deleted file mode 100644 index 64a57209f3904464e6ccf947be5479b6f5f60f00..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/run.md +++ /dev/null @@ -1,399 +0,0 @@ -# Running Mode - - - -- [Running Mode](#running-mode) - - [Overview](#overview) - - [Executing a Single Operator](#executing-a-single-operator) - - [Executing a Common Function](#executing-a-common-function) - - [Executing a Network Model](#executing-a-network-model) - - [Executing a Training Model](#executing-a-training-model) - - [Executing an Inference Model](#executing-an-inference-model) - - - - - -## Overview - -There are three execution modes: single operator, common function, and network training model. - -> Note: This document is applicable to GPU and Ascend environments. - -## Executing a Single Operator - -Execute a single operator and output the result. - -A code example is as follows: - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import context, Tensor - -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -conv = nn.Conv2d(3, 4, 3, bias_init='zeros') -input_data = Tensor(np.ones([1, 3, 5, 5]).astype(np.float32)) -output = conv(input_data) -print(output.asnumpy()) -``` - -The output is as follows: - -```python -[[[[ 0.06022915 0.06149777 0.06149777 0.06149777 0.01145121] - [ 0.06402162 0.05889071 0.05889071 0.05889071 -0.00933781] - [ 0.06402162 0.05889071 0.05889071 0.05889071 -0.00933781] - [ 0.06402162 0.05889071 0.05889071 0.05889071 -0.00933781] - [ 0.02712326 0.02096302 0.02096302 0.02096302 -0.01119636]] - - [[-0.0258286 -0.03362969 -0.03362969 -0.03362969 -0.00799183] - [-0.0513729 -0.06778982 -0.06778982 -0.06778982 -0.03168458] - [-0.0513729 -0.06778982 -0.06778982 -0.06778982 -0.03168458] - [-0.0513729 -0.06778982 -0.06778982 -0.06778982 -0.03168458] - [-0.04186669 -0.07266843 -0.07266843 -0.07266843 -0.04836193]] - - [[-0.00840744 -0.03043237 -0.03043237 -0.03043237 0.00172079] - [ 0.00401019 -0.03755453 -0.03755453 -0.03755453 -0.00851137] - [ 0.00401019 -0.03755453 -0.03755453 -0.03755453 -0.00851137] - [ 0.00401019 -0.03755453 -0.03755453 -0.03755453 -0.00851137] - [ 0.00270888 -0.03718876 -0.03718876 -0.03718876 -0.03043662]] - - [[-0.00982172 0.02009856 0.02009856 0.02009856 0.03327979] - [ 0.02529106 0.04035065 0.04035065 0.04035065 0.01782833] - [ 0.02529106 0.04035065 0.04035065 0.04035065 0.01782833] - [ 0.02529106 0.04035065 0.04035065 0.04035065 0.01782833] - [ 0.01015155 0.00781826 0.00781826 0.00781826 -0.02884173]]]] -``` - -> Note: Due to random factors in weight initialization, the actual output results may be different, which is for reference only. - -## Executing a Common Function - -Combine multiple operators into a function, execute these operators by calling the function, and output the result. - -A code example is as follows: - -```python -import numpy as np -from mindspore import context, Tensor -import mindspore.ops as ops - -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -def add_func(x, y): - z = ops.add(x, y) - z = ops.add(z, x) - return z - -x = Tensor(np.ones([3, 3], dtype=np.float32)) -y = Tensor(np.ones([3, 3], dtype=np.float32)) -output = add_func(x, y) -print(output.asnumpy()) -``` - -The output is as follows: - -```python -[[3. 3. 3.] - [3. 3. 3.] - [3. 3. 3.]] -``` - -## Executing a Network Model - -The [Model API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.html#mindspore.Model) of MindSpore is an advanced API used for training and validation. Layers with the training or inference function can be combined into an object. The training, inference, and prediction functions can be implemented by calling the train, eval, and predict APIs, respectively. - -You can transfer the initialized Model APIs such as the network, loss function, and optimizer as required. You can also configure amp_level to implement mixed precision and configure metrics to implement model evaluation. - -> Executing the network model will generate a `kernel_meta` directory under the execution directory, and save the operator cache files generated by network compilation to this directory during execution, including `.o`, `.info` and `.json` files. If the user executes the same network model again, or only some changes are made, MindSpore will automatically call the reusable operator cache file in the `kernel_meta` directory, which significantly reduces network compilation time and improves execution performance. For details, please refer to [Incremental Operator Build](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/incremental_operator_build.html) - -Before executing the network, download and unzip the required dataset to the specified directory: - -```bash -!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test -!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate -!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate -!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate -!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate -!tree ./datasets/MNIST_Data -``` - -```text -./datasets/MNIST_Data -├── test -│ ├── t10k-images-idx3-ubyte -│ └── t10k-labels-idx1-ubyte -└── train - ├── train-images-idx3-ubyte - └── train-labels-idx1-ubyte - -2 directories, 4 files -``` - -### Executing a Training Model - -Call the train API of Model to implement training. - -A code example is as follows: - -```python -import os -import mindspore.dataset.vision.c_transforms as CV -from mindspore.dataset.vision import Inter -import mindspore.dataset as ds -import mindspore.dataset.transforms.c_transforms as CT -import mindspore.nn as nn -from mindspore import context, Model -from mindspore import dtype as mstype -from mindspore.common.initializer import Normal -from mindspore.train.callback import LossMonitor, ModelCheckpoint, CheckpointConfig - - -def create_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1): - """ - create dataset for train or test - """ - # define dataset - mnist_ds = ds.MnistDataset(data_path) - - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - rescale_nml = 1 / 0.3081 - shift_nml = -1 * 0.1307 / 0.3081 - - # define map operations - resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Bilinear mode - rescale_nml_op = CV.Rescale(rescale_nml, shift_nml) - rescale_op = CV.Rescale(rescale, shift) - hwc2chw_op = CV.HWC2CHW() - type_cast_op = CT.TypeCast(mstype.int32) - - # apply map operations on images - mnist_ds = mnist_ds.map(input_columns="label", operations=type_cast_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=resize_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_nml_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=hwc2chw_op, num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script - mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True) - mnist_ds = mnist_ds.repeat(repeat_size) - - return mnist_ds - - -class LeNet5(nn.Cell): - """ - Lenet network - - Args: - num_class (int): Num classes. Default: 10. - num_channel (int): Num channels. Default: 1. - - Returns: - Tensor, output tensor - Examples: - >>> LeNet(num_class=10) - - """ - - def __init__(self, num_class=10, num_channel=1): - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02)) - self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02)) - self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02)) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x - - -if __name__ == "__main__": - context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - - model_path = "./models/ckpt/mindspore_run/" - os.system("rm -rf {0}*.ckpt {0}*.meta {0}*.pb".format(model_path)) - - ds_train_path = "./datasets/MNIST_Data/train/" - ds_train = create_dataset(ds_train_path, 32) - - network = LeNet5(10) - net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - net_opt = nn.Momentum(network.trainable_params(), 0.01, 0.9) - config_ck = CheckpointConfig(save_checkpoint_steps=1875, keep_checkpoint_max=5) - ckpoint_cb = ModelCheckpoint(prefix="checkpoint_lenet", directory=model_path, config=config_ck) - model = Model(network, net_loss, net_opt) - - print("============== Starting Training ==============") - model.train(1, ds_train, callbacks=[LossMonitor(375), ckpoint_cb], dataset_sink_mode=True) -``` - -```text -============== Starting Training ============== -epoch: 1 step: 375, loss is 2.2898183 -epoch: 1 step: 750, loss is 2.2777305 -epoch: 1 step: 1125, loss is 0.27802905 -epoch: 1 step: 1500, loss is 0.032973606 -epoch: 1 step: 1875, loss is 0.06105463 -``` - -> For details about how to obtain the MNIST dataset used in the example, see [Downloading the Dataset](https://www.mindspore.cn/tutorial/training/en/master/quick_start/quick_start.html#downloading-the-dataset). -> Use the PyNative mode for debugging, including the execution of single operator, common function, and network training model. For details, see [Debugging in PyNative Mode](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/debug_in_pynative_mode.html). -> To use free control loop iterations, traversing data sets, etc., you can refer to the "Customizing a Training Cycle" part of the official website programming guide "[Training](https://www.mindspore.cn/doc/programming_guide/en/master/train.html#customizing-a-training-cycle)". - -### Executing an Inference Model - -Call the eval API of Model to implement inference. To facilitate model evaluation, you can set metrics when the Model API is initialized. - -Metrics are used to evaluate models. Common metrics include Accuracy, Fbeta, Precision, Recall, and TopKCategoricalAccuracy. Generally, the comprehensive model quality cannot be evaluated by one model metric. Therefore, multiple metrics are often used together to evaluate the model. - -Common built-in evaluation metrics are as follows: - -- `Accuracy`: evaluates a classification model. Generally, accuracy refers to the percentage of results correctly predicted by the model to all results. Formula: $$Accuracy = (TP + TN)/(TP + TN + FP + FN)$$ - -- `Precision`: percentage of correctly predicted positive results to all predicted positive results. Formula: $$Precision = TP/(TP + FP)$$ - -- `Recall`: percentage of correctly predicted positive results to all actual positive results. Formula: $$Recall = TP/(TP + FN)$$ - -- `Fbeta`: harmonic mean of precision and recall. - -Formula: $$F_\beta = (1 + \beta^2) \cdot \frac{precisiont \cdot recall}{(\beta^2 \cdot precision) + recall}$$ - -- `TopKCategoricalAccuracy`: calculates the top K categorical accuracy. - -A code example is as follows: - -```python -import mindspore.dataset as ds -import mindspore.dataset.transforms.c_transforms as CT -import mindspore.dataset.vision.c_transforms as CV -import mindspore.nn as nn -from mindspore import context, Model, load_checkpoint, load_param_into_net -from mindspore import dtype as mstype -from mindspore.common.initializer import Normal -from mindspore.dataset.vision import Inter -from mindspore.nn import Accuracy, Precision - - -class LeNet5(nn.Cell): - """ - Lenet network - - Args: - num_class (int): Num classes. Default: 10. - num_channel (int): Num channels. Default: 1. - - Returns: - Tensor, output tensor - Examples: - >>> LeNet(num_class=10) - - """ - - def __init__(self, num_class=10, num_channel=1): - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02)) - self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02)) - self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02)) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x - - -def create_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1): - """ - create dataset for train or test - """ - # define dataset - mnist_ds = ds.MnistDataset(data_path) - - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - rescale_nml = 1 / 0.3081 - shift_nml = -1 * 0.1307 / 0.3081 - - # define map operations - resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Bilinear mode - rescale_nml_op = CV.Rescale(rescale_nml, shift_nml) - rescale_op = CV.Rescale(rescale, shift) - hwc2chw_op = CV.HWC2CHW() - type_cast_op = CT.TypeCast(mstype.int32) - - # apply map operations on images - mnist_ds = mnist_ds.map(input_columns="label", operations=type_cast_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=resize_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_nml_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=hwc2chw_op, num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script - mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True) - mnist_ds = mnist_ds.repeat(repeat_size) - - return mnist_ds - - -if __name__ == "__main__": - context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - - model_path = "./models/ckpt/mindspore_run/" - ds_eval_path = "./datasets/MNIST_Data/test/" - network = LeNet5(10) - net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - repeat_size = 1 - net_opt = nn.Momentum(network.trainable_params(), 0.01, 0.9) - model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy(), "Precision": Precision()}) - - print("============== Starting Testing ==============") - param_dict = load_checkpoint(model_path+"checkpoint_lenet-1_1875.ckpt") - load_param_into_net(network, param_dict) - ds_eval = create_dataset(ds_eval_path, 32, repeat_size) - - acc = model.eval(ds_eval, dataset_sink_mode=True) - print("============== {} ==============".format(acc)) -``` - -```text -============== Starting Testing ============== -============== {'Accuracy': 0.960136217948718, 'Precision': array([0.95763547, 0.98059965, 0.99153439, 0.93333333, 0.97322348, - 0.99385749, 0.98502674, 0.93179724, 0.8974359 , 0.97148676])} ============== -``` - -In the preceding information: - -- `load_checkpoint`: loads the checkpoint model parameter file and returns a parameter dictionary. -- `checkpoint_lenet-1_1875.ckpt`: name of the saved checkpoint model file. -- `load_param_into_net`: loads parameters to the network. - -> For details about how to save the `checkpoint_lenet-1_1875.ckpt` file, see [Training the Network](https://www.mindspore.cn/tutorial/training/en/master/quick_start/quick_start.html#training-the-network). diff --git a/docs/programming_guide/source_en/sampler.md b/docs/programming_guide/source_en/sampler.md deleted file mode 100644 index 6f65a6b71922dad52f66faf2dafb0327501728ab..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/sampler.md +++ /dev/null @@ -1,275 +0,0 @@ -# Sampler - - - -- [Sampler](#sampler) - - [Overview](#overview) - - [MindSpore Samplers](#mindspore-samplers) - - [RandomSampler](#randomsampler) - - [WeightedRandomSampler](#weightedrandomsampler) - - [SubsetRandomSampler](#subsetrandomsampler) - - [PKSampler](#pksampler) - - [DistributedSampler](#distributedsampler) - - [User-defined Sampler](#user-defined-sampler) - - - - - -## Overview - -MindSpore provides multiple samplers to help you sample datasets for various purposes to meet training requirements and solve problems such as oversized datasets and uneven distribution of sample categories. You only need to import the sampler object when loading the dataset for sampling the data. - -The following table lists part of the common samplers supported by MindSpore. In addition, you can define your own sampler class as required. For more samplers, see [MindSpore API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.html). - -| Sampler | Description | -| ---- | ---- | -| RandomSampler | Random sampler, which randomly samples a specified amount of data from a dataset. | -| WeightedRandomSampler | Weighted random sampler, which randomly samples a specified amount of data from the first N samples based on the specified probability list with the length of N. | -| SubsetRandomSampler | Subset random sampler, which randomly samples a specified amount of data within a specified index range. | -| PKSampler | PK sampler, which samples K pieces of data from the specified P categories. | -| DistributedSampler | Distributed sampler, which samples dataset shards in distributed training. | - -## MindSpore Samplers - -The following uses the CIFAR-10 as an example to introduce several common MindSpore samplers. - -Download the CIFAR-10 data set and unzip it to the specified path, execute the following command: - -```bash -!wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-binary.tar.gz -!mkdir -p datasets -!tar -xzf cifar-10-binary.tar.gz -C datasets -!mkdir -p datasets/cifar-10-batches-bin/train datasets/cifar-10-batches-bin/test -!mv -f datasets/cifar-10-batches-bin/test_batch.bin datasets/cifar-10-batches-bin/test -!mv -f datasets/cifar-10-batches-bin/data_batch*.bin datasets/cifar-10-batches-bin/batches.meta.txt datasets/cifar-10-batches-bin/train -!tree ./datasets/cifar-10-batches-bin -``` - -```text -./datasets/cifar-10-batches-bin -├── readme.html -├── test -│ └── test_batch.bin -└── train - ├── batches.meta.txt - ├── data_batch_1.bin - ├── data_batch_2.bin - ├── data_batch_3.bin - ├── data_batch_4.bin - └── data_batch_5.bin - -2 directories, 8 files -``` - -### RandomSampler - -Randomly samples a specified amount of data from the index sequence. - -The following example uses a random sampler to randomly sample five pieces of data from the CIFAR-10 dataset with and without replacement, and displays shapes and labels of the loaded data. - -```python -import mindspore.dataset as ds - -ds.config.set_seed(0) - -DATA_DIR = "./datasets/cifar-10-batches-bin/train/" - -print("------ Without Replacement ------") - -sampler = ds.RandomSampler(num_samples=5) -dataset1 = ds.Cifar10Dataset(DATA_DIR, sampler=sampler) - -for data in dataset1.create_dict_iterator(): - print("Image shape:", data['image'].shape, ", Label:", data['label']) - -print("------ With Replacement ------") - -sampler = ds.RandomSampler(replacement=True, num_samples=5) -dataset2 = ds.Cifar10Dataset(DATA_DIR, sampler=sampler) - -for data in dataset2.create_dict_iterator(): - print("Image shape:", data['image'].shape, ", Label:", data['label']) -``` - -The output is as follows: - -```text ------- Without Replacement ------ -Image shape: (32, 32, 3) , Label: 1 -Image shape: (32, 32, 3) , Label: 6 -Image shape: (32, 32, 3) , Label: 6 -Image shape: (32, 32, 3) , Label: 0 -Image shape: (32, 32, 3) , Label: 4 ------- With Replacement ------ -Image shape: (32, 32, 3) , Label: 0 -Image shape: (32, 32, 3) , Label: 9 -Image shape: (32, 32, 3) , Label: 3 -Image shape: (32, 32, 3) , Label: 9 -Image shape: (32, 32, 3) , Label: 6 -``` - -### WeightedRandomSampler - -Specifies a sampling probability list with the length of N and randomly samples a specified amount of data from the first N samples based on the probability. - -The following example uses a weighted random sampler to obtain 6 samples based on probability from the first 10 samples in the CIFAR-10 dataset, and displays shapes and labels of the loaded data. - -```python -import mindspore.dataset as ds - -ds.config.set_seed(1) - -DATA_DIR = "./datasets/cifar-10-batches-bin/train/" - -weights = [1, 1, 0, 0, 0, 0, 0, 0, 0, 0] -sampler = ds.WeightedRandomSampler(weights, num_samples=6) -dataset = ds.Cifar10Dataset(DATA_DIR, sampler=sampler) - -for data in dataset.create_dict_iterator(): - print("Image shape:", data['image'].shape, ", Label:", data['label']) -``` - -The output is as follows: - -```text -Image shape: (32, 32, 3) , Label: 9 -Image shape: (32, 32, 3) , Label: 9 -Image shape: (32, 32, 3) , Label: 6 -Image shape: (32, 32, 3) , Label: 9 -Image shape: (32, 32, 3) , Label: 6 -Image shape: (32, 32, 3) , Label: 6 -``` - -### SubsetRandomSampler - -Randomly samples a specified amount of data from the specified index subset. - -The following example uses a subset random sampler to obtain 3 samples from the specified subset in the CIFAR-10 dataset, and displays shapes and labels of the loaded data. - -```python -import mindspore.dataset as ds - -ds.config.set_seed(2) - -DATA_DIR = "./datasets/cifar-10-batches-bin/train/" - -indices = [0, 1, 2, 3, 4, 5] -sampler = ds.SubsetRandomSampler(indices, num_samples=3) -dataset = ds.Cifar10Dataset(DATA_DIR, sampler=sampler) - -for data in dataset.create_dict_iterator(): - print("Image shape:", data['image'].shape, ", Label:", data['label']) -``` - -The output is as follows: - -```text -Image shape: (32, 32, 3) , Label: 1 -Image shape: (32, 32, 3) , Label: 6 -Image shape: (32, 32, 3) , Label: 4 -``` - -### PKSampler - -Samples K pieces of data from the specified P categories. - -The following example uses the PK sampler to obtain 2 samples from each category in the CIFAR-10 dataset, not more than 20 samples in total, and displays shapes and labels of the read data. - -```python -import mindspore.dataset as ds - -ds.config.set_seed(3) - -DATA_DIR = "./datasets/cifar-10-batches-bin/train/" - -sampler = ds.PKSampler(num_val=2, class_column='label', num_samples=20) -dataset = ds.Cifar10Dataset(DATA_DIR, sampler=sampler) - -for data in dataset.create_dict_iterator(): - print("Image shape:", data['image'].shape, ", Label:", data['label']) -``` - -The output is as follows: - -```text -Image shape: (32, 32, 3) , Label: 0 -Image shape: (32, 32, 3) , Label: 0 -Image shape: (32, 32, 3) , Label: 1 -Image shape: (32, 32, 3) , Label: 1 -Image shape: (32, 32, 3) , Label: 2 -Image shape: (32, 32, 3) , Label: 2 -Image shape: (32, 32, 3) , Label: 3 -Image shape: (32, 32, 3) , Label: 3 -Image shape: (32, 32, 3) , Label: 4 -Image shape: (32, 32, 3) , Label: 4 -Image shape: (32, 32, 3) , Label: 5 -Image shape: (32, 32, 3) , Label: 5 -Image shape: (32, 32, 3) , Label: 6 -Image shape: (32, 32, 3) , Label: 6 -Image shape: (32, 32, 3) , Label: 7 -Image shape: (32, 32, 3) , Label: 7 -Image shape: (32, 32, 3) , Label: 8 -Image shape: (32, 32, 3) , Label: 8 -Image shape: (32, 32, 3) , Label: 9 -Image shape: (32, 32, 3) , Label: 9 -``` - -### DistributedSampler - -Samples dataset shards in distributed training. - -The following example uses a distributed sampler to divide a generated dataset into three shards, obtains no more than three data samples in each shard, and displays the loaded data on shard number 0. - -```python -import numpy as np -import mindspore.dataset as ds - -data_source = [0, 1, 2, 3, 4, 5, 6, 7, 8] - -sampler = ds.DistributedSampler(num_shards=3, shard_id=0, shuffle=False, num_samples=3) -dataset = ds.NumpySlicesDataset(data_source, column_names=["data"], sampler=sampler) - -for data in dataset.create_dict_iterator(): - print(data) -``` - -The output is as follows: - -```text -{'data': Tensor(shape=[], dtype=Int64, value= 0)} -{'data': Tensor(shape=[], dtype=Int64, value= 3)} -{'data': Tensor(shape=[], dtype=Int64, value= 6)} -``` - -## User-defined Sampler - -You can inherit the `Sampler` base class and define the sampling method of the sampler by implementing the `__iter__` method. - -The following example defines a sampler with an interval of 2 samples from subscript 0 to subscript 9, applies the sampler to the CIFAR-10 dataset, and displays shapes and labels of the read data. - -```python -import mindspore.dataset as ds - -class MySampler(ds.Sampler): - def __iter__(self): - for i in range(0, 10, 2): - yield i - -DATA_DIR = "./datasets/cifar-10-batches-bin/train/" - -dataset = ds.Cifar10Dataset(DATA_DIR, sampler=MySampler()) - -for data in dataset.create_dict_iterator(): - print("Image shape:", data['image'].shape, ", Label:", data['label']) -``` - -The output is as follows: - -```text -Image shape: (32, 32, 3) , Label: 6 -Image shape: (32, 32, 3) , Label: 9 -Image shape: (32, 32, 3) , Label: 1 -Image shape: (32, 32, 3) , Label: 2 -Image shape: (32, 32, 3) , Label: 8 -``` diff --git a/docs/programming_guide/source_en/security_and_privacy.md b/docs/programming_guide/source_en/security_and_privacy.md deleted file mode 100644 index 1e025933775dbc54aa59611e963a62a1c18d9586..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/security_and_privacy.md +++ /dev/null @@ -1,74 +0,0 @@ -# Security and Privacy Protection - - - -- [Security and Privacy Protection](#security-and-privacy-protection) - - [Overview](#overview) - - [Adversarial Robustness](#adversarial-robustness) - - [Attack](#attack) - - [Defense](#defense) - - [Detector](#detector) - - [Model Security Test](#model-security-test) - - [Fuzzer](#fuzzer) - - [Differential Privacy Training](#differential-privacy-training) - - [DPModel](#dpmodel) - - [Suppress Privacy Training](#suppress-privacy-training) - - [SuppressModel](#suppressmodel) - - [Privacy Breach Risk Assessment](#privacy-breach-risk-assessment) - - [Membership Inference](#membership-inference) - - - - - -## Overview - -This document describes AI security and privacy protection. As a universal technology, AI brings huge opportunities and benefits, but also faces new security and privacy protection challenges. MindArmour is a sub-project of MindSpore. It provides security and privacy protection capabilities for MindSpore, including technologies such as adversarial robustness, model security test, differential privacy training, and privacy breach risk assessment. - -## Adversarial Robustness - -### Attack - -The `Attack` base class defines the interface for generating adversarial examples. Its subclasses implement various specific generation algorithms and allow security personnel to quickly and efficiently generate adversarial examples for attacking AI models to evaluate the robustness of the models. - -### Defense - -The `Defense` base class defines the interface for adversarial training. Its subclasses implement various adversarial training algorithms to enhance the adversarial robustness of the models. - -### Detector - -The `Detector` base class defines the interface for adversarial sample detection. Its subclasses implement various specific detection algorithms to enhance the adversarial robustness of the models. - -For details, see [Improving Model Security with NAD Algorithm](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/improve_model_security_nad.html). - -## Model Security Test - -### Fuzzer - -The `Fuzzer` class controls the fuzzing process based on the neuron coverage gain. It uses natural perturbation and adversarial sample generation methods as the mutation policy to activate more neurons to explore different types of model output results and error behavior, helping users enhance model robustness. - -For details, see [Testing Model Security Using Fuzz Testing](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/test_model_security_fuzzing.html). - -## Differential Privacy Training - -### DPModel - -`DPModel` inherits `mindspore.Model` and provides the entry function for differential privacy training. - -For details, see [Protecting User Privacy with Differential Privacy Mechanism](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/protect_user_privacy_with_differential_privacy.html). - -## Suppress Privacy Training - -### SuppressModel - -`SuppressModel` inherits `mindspore.Model` and provides the entry function for suppress privacy training. - -For details, see [Protecting User Privacy with Suppress Privacy Mechanism](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/protect_user_privacy_with_suppress_privacy.html). - -## Privacy Breach Risk Assessment - -### Membership Inference - -The `MembershipInference` class provides a reverse analysis method. It can infer whether a sample is in the training set of a model based on the prediction information of the model on the sample to evaluate the privacy breach risk of the model. - -For details, see [Testing Model Security with Membership Inference](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/test_model_security_membership_inference.html). diff --git a/docs/programming_guide/source_en/syntax_list.rst b/docs/programming_guide/source_en/syntax_list.rst deleted file mode 100644 index 598315add5db81b1f10b7b60c13dc1558cbcb654..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/syntax_list.rst +++ /dev/null @@ -1,8 +0,0 @@ -Syntax list -============ - -.. toctree:: - :maxdepth: 1 - - Static Graph Syntax Support - Tensor Index Support diff --git a/docs/programming_guide/source_en/tensor.md b/docs/programming_guide/source_en/tensor.md deleted file mode 100644 index 3158e2db3456a9b05d0830bee51d8038001385cc..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/tensor.md +++ /dev/null @@ -1,130 +0,0 @@ -# Tensor - - - -- [Tensor](#tensor) - - [Overview](#overview) - - [Tensor Structure](#tensor-structure) - - [Tensor Attributes and Methods](#tensor-attributes-and-methods) - - [Attributes](#attributes) - - [Methods](#methods) - - - - - -## Overview - -Tensor is a basic data structure in the MindSpore network computing. For details about data types in tensors, see [dtype](https://www.mindspore.cn/doc/programming_guide/en/master/dtype.html). - -Tensors of different dimensions represent different data. For example, a 0-dimensional tensor represents a scalar, a 1-dimensional tensor represents a vector, a 2-dimensional tensor represents a matrix, and a 3-dimensional tensor may represent the three channels of RGB images. - -> All examples in this document can be run in the PyNative mode. - -## Tensor Structure - -During tensor creation, the `Tensor`, `float`, `int`, `bool`, `tuple`, `list`, and `NumPy.array` types can be transferred, while `tuple` and `list` can only store `float`, `int`, and `bool` data. - -`dtype` can be specified when `Tensor` is initialized. When the `dtype` is not specified, if the initial value is `int`, `float` or `bool`, then a 0-dimensional `Tensor` with data types `mindspore.int32`, `mindspore.float32` or `mindspore.bool_` will be generated respectively. If the initial values are `tuple` and `list`, the generated 1-dimensional `Tensor` data type corresponds to the type stored in `tuple` and `list`. If it contains multiple different types of data, follow the below priority: `bool` < `int` < `float`, to select the mindspore data type corresponding to the highest relative priority type. If the initial value is `Tensor`, the consistent data type `Tensor` is generated. If the initial value is `NumPy.array`, the corresponding data type `Tensor` is generated. - -A code example is as follows: - -```python -import numpy as np -from mindspore import Tensor -from mindspore import dtype as mstype - -x = Tensor(np.array([[1, 2], [3, 4]]), mstype.int32) -y = Tensor(1.0, mstype.int32) -z = Tensor(2, mstype.int32) -m = Tensor(True, mstype.bool_) -n = Tensor((1, 2, 3), mstype.int16) -p = Tensor([4.0, 5.0, 6.0], mstype.float64) -q = Tensor(p, mstype.float64) - -print(x, "\n\n", y, "\n\n", z, "\n\n", m, "\n\n", n, "\n\n", p, "\n\n", q) -``` - -The following information is displayed: - -```text -[[1 2] - [3 4]] - -1 - -2 - -True - -[1 2 3] - -[4. 5. 6.] - -[4. 5. 6.] -``` - -## Tensor Attributes and Methods - -### Attributes - -Tensor attributes include shape and data type (dtype). - -- shape: a tuple -- dtype: a data type of MindSpore - -A code example is as follows: - -```python -import numpy as np -from mindspore import Tensor -from mindspore import dtype as mstype - -x = Tensor(np.array([[1, 2], [3, 4]]), mstype.int32) -x_shape = x.shape -x_dtype = x.dtype - -print(x_shape, x_dtype) -``` - -The following information is displayed: - -```text -(2, 2) Int32 -``` - -### Methods - -Tensor methods include `all`, `any`, and `asnumpy`. Currently, the `all` and `any` methods support only Ascend, and the data type of `Tensor` is required to be `mindspore.bool_`. -. - -- `all(axis, keep_dims)`: performs the `and` operation on a specified dimension to reduce the dimension. `axis` indicates the reduced dimension, and `keep_dims` indicates whether to retain the reduced dimension. -- `any(axis, keep_dims)`: performs the `or` operation on a specified dimension to reduce the dimension. The parameter meaning is the same as that of `all`. -- `asnumpy()`: converts `Tensor` to an array of NumPy. - -A code example is as follows: - -```python -import numpy as np -from mindspore import Tensor -from mindspore import dtype as mstype - -x = Tensor(np.array([[True, True], [False, False]]), mstype.bool_) -x_all = x.all() -x_any = x.any() -x_array = x.asnumpy() - -print(x_all, "\n\n", x_any, "\n\n", x_array) -``` - -The following information is displayed: - -```text -False - -True - -[[ True True] - [False False]] - -``` diff --git a/docs/programming_guide/source_en/tokenizer.md b/docs/programming_guide/source_en/tokenizer.md deleted file mode 100644 index 5f62cd8c1f03f3e88d28dec50160aae98e57d8e1..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/tokenizer.md +++ /dev/null @@ -1,382 +0,0 @@ -# Tokenizer - - - -- [Tokenizer](#tokenizer) - - [Overview](#overview) - - [MindSpore Tokenizers](#mindspore-tokenizers) - - [BertTokenizer](#berttokenizer) - - [JiebaTokenizer](#jiebatokenizer) - - [SentencePieceTokenizer](#sentencepiecetokenizer) - - [UnicodeCharTokenizer](#unicodechartokenizer) - - [WhitespaceTokenizer](#whitespacetokenizer) - - [WordpieceTokenizer](#wordpiecetokenizer) - - - - - -## Overview - -Tokenization is a process of re-combining continuous character sequences into word sequences according to certain specifications. Reasonable tokenization is helpful for semantic comprehension. - -MindSpore provides a tokenizer for multiple purposes to help you process text with high performance. You can build your own dictionaries, use appropriate tokenizers to split sentences into different tokens, and search for indexes of the tokens in the dictionaries. - -MindSpore provides the following tokenizers. In addition, you can customize tokenizers as required. - -| Tokenizer | Description | -| -- | -- | -| BasicTokenizer | Performs tokenization on scalar text data based on specified rules. | -| BertTokenizer | Processes BERT text data. | -| JiebaTokenizer | Dictionary-based Chinese character string tokenizer. | -| RegexTokenizer | Performs tokenization on scalar text data based on a specified regular expression. | -| SentencePieceTokenizer | Performs tokenization based on the open-source tool package SentencePiece. | -| UnicodeCharTokenizer | Tokenizes scalar text data into Unicode characters. | -| UnicodeScriptTokenizer | Performs tokenization on scalar text data based on Unicode boundaries. | -| WhitespaceTokenizer | Performs tokenization on scalar text data based on spaces. | -| WordpieceTokenizer | Performs tokenization on scalar text data based on the word set. | - -For details about tokenizers, see [MindSpore API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.text.html). - -## MindSpore Tokenizers - -The following describes how to use common tokenizers. - -### BertTokenizer - -`BertTokenizer` performs tokenization by calling `BasicTokenizer` and `WordpieceTokenizer`. - -The following example builds a text dataset and a character string list, uses `BertTokenizer` to perform tokenization on the dataset, and displays the text results before and after tokenization. - -```python -import mindspore.dataset as ds -import mindspore.dataset.text as text - -input_list = ["床前明月光", "疑是地上霜", "举头望明月", "低头思故乡", "I am making small mistakes during working hours", - "😀嘿嘿😃哈哈😄大笑😁嘻嘻", "繁體字"] -dataset = ds.NumpySlicesDataset(input_list, column_names=["text"], shuffle=False) - -print("------------------------before tokenization----------------------------") - -for data in dataset.create_dict_iterator(output_numpy=True): - print(text.to_str(data['text'])) - -vocab_list = [ - "床", "前", "明", "月", "光", "疑", "是", "地", "上", "霜", "举", "头", "望", "低", "思", "故", "乡", - "繁", "體", "字", "嘿", "哈", "大", "笑", "嘻", "i", "am", "mak", "make", "small", "mistake", - "##s", "during", "work", "##ing", "hour", "😀", "😃", "😄", "😁", "+", "/", "-", "=", "12", - "28", "40", "16", " ", "I", "[CLS]", "[SEP]", "[UNK]", "[PAD]", "[MASK]", "[unused1]", "[unused10]"] - -vocab = text.Vocab.from_list(vocab_list) -tokenizer_op = text.BertTokenizer(vocab=vocab) -dataset = dataset.map(operations=tokenizer_op) - -print("------------------------after tokenization-----------------------------") - -for i in dataset.create_dict_iterator(num_epochs=1, output_numpy=True): - print(text.to_str(i['text'])) -``` - -The output is as follows: - -```text -------------------------before tokenization---------------------------- -床前明月光 -疑是地上霜 -举头望明月 -低头思故乡 -I am making small mistakes during working hours -😀嘿嘿😃哈哈😄大笑😁嘻嘻 -繁體字 -------------------------after tokenization----------------------------- -['床' '前' '明' '月' '光'] -['疑' '是' '地' '上' '霜'] -['举' '头' '望' '明' '月'] -['低' '头' '思' '故' '乡'] -['I' 'am' 'mak' '##ing' 'small' 'mistake' '##s' 'during' 'work' '##ing' - 'hour' '##s'] -['😀' '嘿' '嘿' '😃' '哈' '哈' '😄' '大' '笑' '😁' '嘻' '嘻'] -['繁' '體' '字'] -``` - -### JiebaTokenizer - -`JiebaTokenizer` performs Chinese tokenization based on Jieba. - -Download the dictionary files `hmm_model.utf8` and `jieba.dict.utf8` and put them in the specified location. - -```bash -!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/hmm_model.utf8 -!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/jieba.dict.utf8 -!mkdir -p ./datasets/tokenizer/ -!mv hmm_model.utf8 jieba.dict.utf8 -t ./datasets/tokenizer/ -!tree ./datasets/tokenizer/ -``` - -```text -./datasets/tokenizer/ -├── hmm_model.utf8 -└── jieba.dict.utf8 - -0 directories, 2 files -``` - -The following example builds a text dataset, uses the HMM and MP dictionary files to create a `JiebaTokenizer` object, performs tokenization on the dataset, and displays the text results before and after tokenization. - -```python -import mindspore.dataset as ds -import mindspore.dataset.text as text - -input_list = ["今天天气太好了我们一起去外面玩吧"] -dataset = ds.NumpySlicesDataset(input_list, column_names=["text"], shuffle=False) - -print("------------------------before tokenization----------------------------") - -for data in dataset.create_dict_iterator(output_numpy=True): - print(text.to_str(data['text'])) - -# files from open source repository https://github.com/yanyiwu/cppjieba/tree/master/dict -HMM_FILE = "./datasets/tokenizer/hmm_model.utf8" -MP_FILE = "./datasets/tokenizer/jieba.dict.utf8" -jieba_op = text.JiebaTokenizer(HMM_FILE, MP_FILE) -dataset = dataset.map(operations=jieba_op, input_columns=["text"], num_parallel_workers=1) - -print("------------------------after tokenization-----------------------------") - -for i in dataset.create_dict_iterator(num_epochs=1, output_numpy=True): - print(text.to_str(i['text'])) -``` - -The output is as follows: - -```text -------------------------before tokenization---------------------------- -今天天气太好了我们一起去外面玩吧 -------------------------after tokenization----------------------------- -['今天天气' '太好了' '我们' '一起' '去' '外面' '玩吧'] -``` - -### SentencePieceTokenizer - -`SentencePieceTokenizer` performs tokenization based on an open-source natural language processing tool package [SentencePiece](https://github.com/google/sentencepiece). - -Download the text dataset file `botchan.txt` and place it in the specified location. - -```bash -!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/botchan.txt -!mkdir -p ./datasets/tokenizer/ -!mv botchan.txt ./datasets/tokenizer/ -!tree ./datasets/tokenizer/ -``` - -```text -./datasets/tokenizer/ -└── botchan.txt - -0 directories, 1 files -``` - -The following example builds a text dataset, creates a `vocab` object from the `vocab_file` file, uses `SentencePieceTokenizer` to perform tokenization on the dataset, and displays the text results before and after tokenization. - -```python -import mindspore.dataset as ds -import mindspore.dataset.text as text -from mindspore.dataset.text import SentencePieceModel, SPieceTokenizerOutType - -input_list = ["I saw a girl with a telescope."] -dataset = ds.NumpySlicesDataset(input_list, column_names=["text"], shuffle=False) - -print("------------------------before tokenization----------------------------") - -for data in dataset.create_dict_iterator(output_numpy=True): - print(text.to_str(data['text'])) - -# file from MindSpore repository https://gitee.com/mindspore/mindspore/blob/master/tests/ut/data/dataset/test_sentencepiece/botchan.txt -vocab_file = "./datasets/tokenizer/botchan.txt" -vocab = text.SentencePieceVocab.from_file([vocab_file], 5000, 0.9995, SentencePieceModel.UNIGRAM, {}) -tokenizer_op = text.SentencePieceTokenizer(vocab, out_type=SPieceTokenizerOutType.STRING) -dataset = dataset.map(operations=tokenizer_op) - -print("------------------------after tokenization-----------------------------") - -for i in dataset.create_dict_iterator(num_epochs=1, output_numpy=True): - print(text.to_str(i['text'])) -``` - -The output is as follows: - -```text -------------------------before tokenization---------------------------- -I saw a girl with a telescope. -------------------------after tokenization----------------------------- -['▁I' '▁sa' 'w' '▁a' '▁girl' '▁with' '▁a' '▁te' 'les' 'co' 'pe' '.'] -``` - -### UnicodeCharTokenizer - -`UnicodeCharTokenizer` performs tokenization based on the Unicode character set. - -The following example builds a text dataset, uses `UnicodeCharTokenizer` to perform tokenization on the dataset, and displays the text results before and after tokenization. - -```python -import mindspore.dataset as ds -import mindspore.dataset.text as text - -input_list = ["Welcome to Beijing!", "北京欢迎您!", "我喜欢English!"] -dataset = ds.NumpySlicesDataset(input_list, column_names=["text"], shuffle=False) - -print("------------------------before tokenization----------------------------") - -for data in dataset.create_dict_iterator(output_numpy=True): - print(text.to_str(data['text'])) - -tokenizer_op = text.UnicodeCharTokenizer() -dataset = dataset.map(operations=tokenizer_op) - -print("------------------------after tokenization-----------------------------") - -for i in dataset.create_dict_iterator(num_epochs=1, output_numpy=True): - print(text.to_str(i['text']).tolist()) -``` - -The output is as follows: - -```text -------------------------before tokenization---------------------------- -Welcome to Beijing! -北京欢迎您! -我喜欢English! -------------------------after tokenization----------------------------- -['W', 'e', 'l', 'c', 'o', 'm', 'e', ' ', 't', 'o', ' ', 'B', 'e', 'i', 'j', 'i', 'n', 'g', '!'] -['北', '京', '欢', '迎', '您', '!'] -['我', '喜', '欢', 'E', 'n', 'g', 'l', 'i', 's', 'h', '!'] -``` - -### WhitespaceTokenizer - -`WhitespaceTokenizer` performs tokenization based on spaces. - -The following example builds a text dataset, uses `WhitespaceTokenizer` to perform tokenization on the dataset, and displays the text results before and after tokenization. - -```python -import mindspore.dataset as ds -import mindspore.dataset.text as text - -input_list = ["Welcome to Beijing!", "北京欢迎您!", "我喜欢English!"] -dataset = ds.NumpySlicesDataset(input_list, column_names=["text"], shuffle=False) - -print("------------------------before tokenization----------------------------") - -for data in dataset.create_dict_iterator(output_numpy=True): - print(text.to_str(data['text'])) - -tokenizer_op = text.WhitespaceTokenizer() -dataset = dataset.map(operations=tokenizer_op) - -print("------------------------after tokenization-----------------------------") - -for i in dataset.create_dict_iterator(num_epochs=1, output_numpy=True): - print(text.to_str(i['text']).tolist()) -``` - -The output is as follows: - -```text -------------------------before tokenization---------------------------- -Welcome to Beijing! -北京欢迎您! -我喜欢English! -------------------------after tokenization----------------------------- -['Welcome', 'to', 'Beijing!'] -['北京欢迎您!'] -['我喜欢English!'] -``` - -### WordpieceTokenizer - -`WordpieceTokenizer` performs tokenization based on the word set. A token can be a single word in the word set or a combination of words. - -The following example builds a text dataset, creates a `vocab` object from the word list, uses `WordpieceTokenizer` to perform tokenization on the dataset, and displays the text results before and after tokenization. - -```python -import mindspore.dataset as ds -import mindspore.dataset.text as text - -input_list = ["my", "favorite", "book", "is", "love", "during", "the", "cholera", "era", "what", - "我", "最", "喜", "欢", "的", "书", "是", "霍", "乱", "时", "期", "的", "爱", "情", "您"] -vocab_english = ["book", "cholera", "era", "favor", "##ite", "my", "is", "love", "dur", "##ing", "the"] -vocab_chinese = ["我", '最', '喜', '欢', '的', '书', '是', '霍', '乱', '时', '期', '爱', '情'] - -dataset = ds.NumpySlicesDataset(input_list, column_names=["text"], shuffle=False) - -print("------------------------before tokenization----------------------------") - -for data in dataset.create_dict_iterator(output_numpy=True): - print(text.to_str(data['text'])) - -vocab = text.Vocab.from_list(vocab_english+vocab_chinese) -tokenizer_op = text.WordpieceTokenizer(vocab=vocab) -dataset = dataset.map(operations=tokenizer_op) - -print("------------------------after tokenization-----------------------------") - -for i in dataset.create_dict_iterator(num_epochs=1, output_numpy=True): - print(text.to_str(i['text'])) -``` - -The output is as follows: - -```text -------------------------before tokenization---------------------------- -my -favorite -book -is -love -during -the -cholera -era -what -我 -最 -喜 -欢 -的 -书 -是 -霍 -乱 -时 -期 -的 -爱 -情 -您 -------------------------after tokenization----------------------------- -['my'] -['favor' '##ite'] -['book'] -['is'] -['love'] -['dur' '##ing'] -['the'] -['cholera'] -['era'] -['[UNK]'] -['我'] -['最'] -['喜'] -['欢'] -['的'] -['书'] -['是'] -['霍'] -['乱'] -['时'] -['期'] -['的'] -['爱'] -['情'] -['[UNK]'] -``` diff --git a/docs/programming_guide/source_en/train.md b/docs/programming_guide/source_en/train.md deleted file mode 100644 index cfca60b3f172a73f9c0ccaa8b1de208294d320a7..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_en/train.md +++ /dev/null @@ -1,488 +0,0 @@ -# Training - - - -- [Training](#training) - - [Overview](#overview) - - [Customizing a Training Network](#customizing-a-training-network) - - [Customizing a Training Cycle](#customizing-a-training-cycle) - - [Conducting Inference While Training](#conducting-inference-while-training) - - [On-Device Execution](#on-device-execution) - - [Computational Graphs on Devices](#computational-graphs-on-devices) - - [Data Sinking](#data-sinking) - - - - - -## Overview - -MindSpore provides a large number of network models such as object detection and natural language processing in ModelZoo for users to directly use. However, some senior users may want to design networks or customize training cycles. The following describes how to customize a training network, how to customize a training cycle, and how to conduct inference while training. In addition, the on-device execution mode is also described in detail. - -> Note: This document is applicable to GPU and Ascend environments. - -## Customizing a Training Network - -Before customizing a training network, you need to understand the network support of MindSpore, constraints on network construction using Python, and operator support. - -- Network support: Currently, MindSpore supports multiple types of networks, including computer vision, natural language processing, recommender, and graph neural network. For details, see [Network List](https://www.mindspore.cn/doc/note/en/master/network_list.html). If the existing networks cannot meet your requirements, you can define your own network as required. - -- Constraints on network construction using Python: MindSpore does not support the conversion of any Python source code into computational graphs. Therefore, the source code has the syntax and network definition constraints. For details, please refer to [Static Graph Syntax Support](https://www.mindspore.cn/doc/note/en/master/static_graph_syntax_support.html). These constraints may change as MindSpore evolves. - -- Operator support: As the name implies, the network is based on operators. Therefore, before customizing a training network, you need to understand the operators supported by MindSpore. For details about operator implementation on different backends (Ascend, GPU, and CPU), see [Operator List](https://www.mindspore.cn/doc/note/en/master/operator_list.html). - -> When the built-in operators of the network cannot meet the requirements, you can refer to [Custom Operators(Ascend)](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/custom_operator_ascend.html) to quickly expand the custom operators of the Ascend AI processor. - -The following is a code example: - -```python -import numpy as np - -from mindspore import Tensor -from mindspore.nn import Cell, Dense, SoftmaxCrossEntropyWithLogits, Momentum, TrainOneStepCell, WithLossCell -import mindspore.ops as ops - - -class ReLUReduceMeanDense(Cell): - def __init__(self, kernel, bias, in_channel, num_class): - super().__init__() - self.relu = ops.ReLU() - self.mean = ops.ReduceMean(keep_dims=False) - self.dense = Dense(in_channel, num_class, kernel, bias) - - def construct(self, x): - x = self.relu(x) - x = self.mean(x, (2, 3)) - x = self.dense(x) - return x - - -if __name__ == "__main__": - weight_np = np.ones((1000, 2048)).astype(np.float32) - weight = Tensor(weight_np.copy()) - bias_np = np.ones((1000,)).astype(np.float32) - bias = Tensor(bias_np.copy()) - net = ReLUReduceMeanDense(weight, bias, 2048, 1000) - criterion = SoftmaxCrossEntropyWithLogits(sparse=False) - optimizer = Momentum(learning_rate=0.1, momentum=0.1, - params=filter(lambda x: x.requires_grad, net.get_parameters())) - net_with_criterion = WithLossCell(net, criterion) - train_network = TrainOneStepCell(net_with_criterion, optimizer) - train_network.set_train() - input_np = np.random.randn(32, 2048, 7, 7).astype(np.float32) - input = Tensor(input_np.copy()) - label_np_onehot = np.zeros(shape=(32, 1000)).astype(np.float32) - label = Tensor(label_np_onehot.copy()) - for i in range(1): - loss = train_network(input, label) - print("-------loss------", loss) -``` - -The output is as follows: - -```python --------loss------ [0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. - 0. 0. 0. 0. 0. 0. 0. 0.] -``` - -## Customizing a Training Cycle - -Before performing a custom training cycle, download the MNIST dataset that needs to be used, and decompress and place it at the specified location: - -```bash -!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test -!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate -!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate -!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate -!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate -!tree ./datasets/MNIST_Data -``` - -```text -./datasets/MNIST_Data -├── test -│ ├── t10k-images-idx3-ubyte -│ └── t10k-labels-idx1-ubyte -└── train - ├── train-images-idx3-ubyte - └── train-labels-idx1-ubyte - -2 directories, 4 files -``` - -If you do not want to use the `Model` interface provided by MindSpore, you can also refer to the following examples to freely control the number of iterations, traverse the dataset, and so on. - -The following is a code example: - -```python -import os - -import mindspore.dataset as ds -import mindspore.dataset.transforms.c_transforms as CT -import mindspore.dataset.vision.c_transforms as CV -import mindspore.nn as nn -from mindspore import context, DatasetHelper, connect_network_with_dataset -from mindspore import dtype as mstype -from mindspore.common.initializer import TruncatedNormal -from mindspore import ParameterTuple -from mindspore.dataset.vision import Inter -from mindspore.nn import WithLossCell -import mindspore.ops as ops - - -def create_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1): - """ - create dataset for train or test - """ - # define dataset - mnist_ds = ds.MnistDataset(data_path) - - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - rescale_nml = 1 / 0.3081 - shift_nml = -1 * 0.1307 / 0.3081 - - # define map operations - resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Bilinear mode - rescale_nml_op = CV.Rescale(rescale_nml, shift_nml) - rescale_op = CV.Rescale(rescale, shift) - hwc2chw_op = CV.HWC2CHW() - type_cast_op = CT.TypeCast(mstype.int32) - - # apply map operations on images - mnist_ds = mnist_ds.map(input_columns="label", operations=type_cast_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=resize_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_nml_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=hwc2chw_op, num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script - mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True) - mnist_ds = mnist_ds.repeat(repeat_size) - - return mnist_ds - - -def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): - """weight initial for conv layer""" - weight = weight_variable() - return nn.Conv2d(in_channels, out_channels, - kernel_size=kernel_size, stride=stride, padding=padding, - weight_init=weight, has_bias=False, pad_mode="valid") - - -def fc_with_initialize(input_channels, out_channels): - """weight initial for fc layer""" - weight = weight_variable() - bias = weight_variable() - return nn.Dense(input_channels, out_channels, weight, bias) - - -def weight_variable(): - """weight initial""" - return TruncatedNormal(0.02) - - -class LeNet5(nn.Cell): - """ - Lenet network - Args: - num_class (int): Num classes. Default: 10. - - Returns: - Tensor, output tensor - - Examples: - >>> LeNet(num_class=10) - """ - - def __init__(self, num_class=10): - super(LeNet5, self).__init__() - self.num_class = num_class - self.batch_size = 32 - self.conv1 = conv(1, 6, 5) - self.conv2 = conv(6, 16, 5) - self.fc1 = fc_with_initialize(16 * 5 * 5, 120) - self.fc2 = fc_with_initialize(120, 84) - self.fc3 = fc_with_initialize(84, self.num_class) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.reshape = ops.Reshape() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.reshape(x, (self.batch_size, -1)) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x - - -class TrainOneStepCell(nn.Cell): - def __init__(self, network, optimizer, sens=1.0): - super(TrainOneStepCell, self).__init__(auto_prefix=False) - self.network = network - self.weights = ParameterTuple(network.trainable_params()) - self.optimizer = optimizer - self.grad = ops.GradOperation(get_by_list=True, sens_param=True) - self.sens = sens - - def set_sens(self, value): - self.sens = value - - def construct(self, data, label): - weights = self.weights - loss = self.network(data, label) - sens = ops.Fill()(ops.DType()(loss), ops.Shape()(loss), self.sens) - grads = self.grad(self.network, weights)(data, label, sens) - return ops.depend(loss, self.optimizer(grads)) - - -if __name__ == "__main__": - context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - - ds_data_path = "./datasets/MNIST_Data/train/" - ds_train = create_dataset(ds_data_path, 32) - - network = LeNet5(10) - net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - net_opt = nn.Momentum(network.trainable_params(), 0.01, 0.9) - net = WithLossCell(network, net_loss) - net = TrainOneStepCell(net, net_opt) - network.set_train() - print("============== Starting Training ==============") - epoch = 10 - for step in range(epoch): - for inputs in ds_train: - output = net(*inputs) - print("epoch: {0}/{1}, losses: {2}".format(step + 1, epoch, output.asnumpy(), flush=True)) -``` - -The output is as follows: - -```text -============== Starting Training ============== -epoch: 1/10, losses: 2.3086986541748047 -epoch: 1/10, losses: 2.309938430786133 -epoch: 1/10, losses: 2.302298069000244 -epoch: 1/10, losses: 2.310209035873413 -epoch: 1/10, losses: 2.3002336025238037 -epoch: 1/10, losses: 2.3022992610931396 -... ... -epoch: 1/10, losses: 0.18848800659179688 -epoch: 1/10, losses: 0.15532201528549194 -epoch: 2/10, losses: 0.179201140999794 -epoch: 2/10, losses: 0.20995387434959412 -epoch: 2/10, losses: 0.4867479205131531 -... ... -epoch: 10/10, losses: 0.027243722230196 -epoch: 10/10, losses: 0.07665436714887619 -epoch: 10/10, losses: 0.005962767638266087 -epoch: 10/10, losses: 0.026364721357822418 -epoch: 10/10, losses: 0.0003102901973761618 -``` - -> For details about how to obtain the MNIST dataset used in the example, see [Downloading the Dataset](https://www.mindspore.cn/tutorial/training/en/master/quick_start/quick_start.html#downloading-the-dataset). -> The typical application scenario is gradient accumulation. For details, see [Applying Gradient Accumulation Algorithm](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/apply_gradient_accumulation.html). - -## Conducting Inference While Training - -For some complex networks with a large data volume and a relatively long training time, to learn the change of model accuracy in different training phases, the model accuracy may be traced in a manner of inference while training. For details, see [Evaluating the Model during Training](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/evaluate_the_model_during_training.html). - -## On-Device Execution - -Currently, the backends supported by MindSpore include Ascend, GPU, and CPU. The device in the "On-Device" refers to the Ascend AI processor. - -The Ascend AI processor integrates the AI core, AI CPU, and CPU. The AI core is responsible for large Tensor Vector computing, the AI CPU is responsible for scalar computing, and the CPU is responsible for logic control and task distribution. - -The CPU on the host side delivers graphs or operators to the Ascend AI processor. The Ascend AI processor has the functions of computing, logic control, and task distribution. Therefore, it does not need to frequently interact with the CPU on the host side. It only needs to return the final calculation result to the host. In this way, the entire graph is sunk to the device for execution, avoiding frequent interaction between the host and device and reducing overheads. - -### Computational Graphs on Devices - -The entire graph is executed on the device to reduce the interaction overheads between the host and device. Multiple steps can be moved downwards together with cyclic sinking to further reduce the number of interactions between the host and device. - -Cyclic sinking is optimized based on on-device execution to further reduce the number of interactions between the host and device. Generally, each step returns a result. Cyclic sinking is used to control the number of steps at which a result is returned. - -By default, the result is returned for each epoch. In this way, the host and device need to exchange data only once in each epoch. - -You can also use `dataset_sink_mode` and `sink_size` of the `train` interface to control the sunk data volume of each epoch. - -### Data Sinking - -The `train` interface parameter `dataset_sink_mode` of `Model` can be used to control whether data sinks. If the value of `dataset_sink_mode` is True, data sinking is enabled. Otherwise, data sinking is disabled. Sinking means that data is directly transmitted to the device through a channel. - -The `dataset_sink_mode` parameter can be used with `sink_size` to control the amount of data sunk by each `epoch`. When `dataset_sink_mode` is set to True, that is, the data sinking mode is used: - -If `sink_size` is set to the default value –1, the amount of data sunk by each `epoch` is the size of the original entire dataset. - -If `sink_size` is greater than 0, the raw dataset can be traversed for an unlimited number of times. Each `epoch` sinks the data volume of `sink_size`, and the next `epoch` continues to traverse from the end position of the previous traversal. - -The total sunk data volume is controlled by the `epoch` and `sink_size` variables. That is, the total data volume is calculated as follows: Total data volume = `epoch` x `sink_size`. - -When using `LossMonitor`, `TimeMonitor` or other `Callback` interfaces, if the `dateset_sink_mode` is set to False, each `step` between the Host side and the Device side interacts once, so each `step` will return a result. If `dataset_sink_mode` is True, because the data is transmitted through the channel on the Device, there is one data interaction between the Host side and the Device side for each `epoch`, so each `epoch` only returns one result. - -> The CPU and pynative mode cannot support dataset sink mode currently. - -The following is a code example: - -```python -import os - -import mindspore.dataset as ds -import mindspore.dataset.transforms.c_transforms as CT -import mindspore.dataset.vision.c_transforms as CV -import mindspore.nn as nn -from mindspore import context, Model -from mindspore import dtype as mstype -from mindspore.common.initializer import TruncatedNormal -from mindspore.dataset.vision import Inter -from mindspore.nn import Accuracy -import mindspore.ops as ops -from mindspore.train.callback import LossMonitor - - -def create_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1): - """ - create dataset for train or test - """ - # define dataset - mnist_ds = ds.MnistDataset(data_path) - - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - rescale_nml = 1 / 0.3081 - shift_nml = -1 * 0.1307 / 0.3081 - - # define map operations - resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Bilinear mode - rescale_nml_op = CV.Rescale(rescale_nml, shift_nml) - rescale_op = CV.Rescale(rescale, shift) - hwc2chw_op = CV.HWC2CHW() - type_cast_op = CT.TypeCast(mstype.int32) - - # apply map operations on images - mnist_ds = mnist_ds.map(input_columns="label", operations=type_cast_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=resize_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_nml_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=hwc2chw_op, num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script - mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True) - mnist_ds = mnist_ds.repeat(repeat_size) - - return mnist_ds - - -def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): - """weight initial for conv layer""" - weight = weight_variable() - return nn.Conv2d(in_channels, out_channels, - kernel_size=kernel_size, stride=stride, padding=padding, - weight_init=weight, has_bias=False, pad_mode="valid") - - -def fc_with_initialize(input_channels, out_channels): - """weight initial for fc layer""" - weight = weight_variable() - bias = weight_variable() - return nn.Dense(input_channels, out_channels, weight, bias) - - -def weight_variable(): - """weight initial""" - return TruncatedNormal(0.02) - - -class LeNet5(nn.Cell): - """ - Lenet network - Args: - num_class (int): Num classes. Default: 10. - - Returns: - Tensor, output tensor - - Examples: - >>> LeNet(num_class=10) - """ - - def __init__(self, num_class=10): - super(LeNet5, self).__init__() - self.num_class = num_class - self.batch_size = 32 - self.conv1 = conv(1, 6, 5) - self.conv2 = conv(6, 16, 5) - self.fc1 = fc_with_initialize(16 * 5 * 5, 120) - self.fc2 = fc_with_initialize(120, 84) - self.fc3 = fc_with_initialize(84, self.num_class) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.reshape = ops.Reshape() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.reshape(x, (self.batch_size, -1)) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x - - -if __name__ == "__main__": - context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - ds_train_path = "./datasets/MNIST_Data/train/" - ds_train = create_dataset(ds_train_path, 32) - - network = LeNet5(10) - net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - net_opt = nn.Momentum(network.trainable_params(), 0.01, 0.9) - model = Model(network, net_loss, net_opt) - - print("============== Starting Training ==============") - model.train(epoch=10, train_dataset=ds_train, callbacks=[LossMonitor()], dataset_sink_mode=True, sink_size=1000) -``` - -The output is as follows: - -```python -============== Starting Training ============== -epoch: 1 step: 1000, loss is 0.110185064 -epoch: 2 step: 1000, loss is 0.12088283 -epoch: 3 step: 1000, loss is 0.15903473 -epoch: 4 step: 1000, loss is 0.030054657 -epoch: 5 step: 1000, loss is 0.013846226 -epoch: 6 step: 1000, loss is 0.052161213 -epoch: 7 step: 1000, loss is 0.0050197737 -epoch: 8 step: 1000, loss is 0.17207858 -epoch: 9 step: 1000, loss is 0.010310417 -epoch: 10 step: 1000, loss is 0.000672762 -``` - -When `batch_size` is 32, the size of the dataset is 1875. When `sink_size` is set to 1000, each `epoch` sinks 1000 batches of data, the number of sinks is `epoch` (=10), and the total sunk data volume is `epoch` x `sink_size` = 10000. - -`dataset_sink_mode` is True, so every `epoch` returns a result. - -> When `dataset_sink_mode` is set to False, the `sink_size` parameter is invalid. diff --git a/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png b/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png deleted file mode 100644 index 9b499805e2f8ab52dcde3fd4a7708ef753da9b84..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/_static/logo_notebook.png b/docs/programming_guide/source_zh_cn/_static/logo_notebook.png deleted file mode 100644 index f28598315f19f4be76a73ddf5dc6bbdbe4db35fd..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/_static/logo_notebook.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/_static/logo_source.png b/docs/programming_guide/source_zh_cn/_static/logo_source.png deleted file mode 100644 index 9932d67ab50871edb0c95979c4e948c812c7cdea..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/_static/logo_source.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/advanced_usage_of_checkpoint.ipynb b/docs/programming_guide/source_zh_cn/advanced_usage_of_checkpoint.ipynb deleted file mode 100644 index 4f4e056ab1ad74cdafacb52fbf7f7b7eb3384302..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/advanced_usage_of_checkpoint.ipynb +++ /dev/null @@ -1,572 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 保存、加载与转化模型\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/advanced_usage_of_checkpoint.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_advanced_usage_of_checkpoint.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9hZHZhbmNlZF91c2FnZV9vZl9jaGVja3BvaW50LmlweW5i&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "在模型训练或者加载模型的过程中,有时需要替换掉模型文件中某些优化器或者其他超参数以及分类函数中的全连接层改动,但是又不想改动太大,或者从0开始训练模型,针对这种情况,MindSpore提供了只调整模型部分权重的CheckPoint进阶用法,并将方法应用在模型调优过程中。\n", - "\n", - "基础用法可参考:[保存加载参数](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html#checkpoint)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 准备工作" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本篇以LeNet网络为例子,介绍在MindSpore中对模型进行保存,加载和转化等操作方法。\n", - "\n", - "在进行操作前,需做好如下准备好以下几个文件:\n", - "\n", - "- MNIST数据集。\n", - "- LeNet网络的预训练模型文件`checkpoint-lenet_1-1875.ckpt`。\n", - "- 数据增强文件`dataset_process.py`,使用其中的数据增强方法`create_dataset`,可参考官网[实现一个图片分类应用](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html)中定义的数据增强方法`create_dataset`。\n", - "- 定义LeNet网络。\n", - "\n", - "执行下述代码,完成前3项准备工作。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "ExecuteTime": { - "end_time": "2021-03-19T07:26:35.335788Z", - "start_time": "2021-03-19T07:26:35.312679Z" - } - }, - "outputs": [], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!wget https://mindspore-website.obs.myhuaweicloud.com/notebook/source-codes/dataset_process.py -N --no-check-certificate\n", - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/checkpoint_lenet-1_1875.zip --no-check-certificate\n", - "!unzip -o checkpoint_lenet-1_1875.zip" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "定义LeNet网络模型,具体定义过程如下。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.common.initializer import Normal\n", - "import mindspore.nn as nn\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " # define the operator required\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " # use the preceding operators to construct networks\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x)\n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 高级用法\n", - "\n", - "### 保存\n", - "\n", - "#### 手动保存CheckPoint\n", - "\n", - "使用`save_checkpoint`,手动保存CheckPoint文件。\n", - "\n", - "应用场景: \n", - "\n", - "1. 保存网络的初始值。\n", - "2. 手动保存指定网络。\n", - "\n", - "执行以下代码,在对预训练模型`checkpoint_lenet-1_1875.ckpt`训练过100个batch的数据集后,使用`save_checkpoint`手动保存出模型`mindspore_lenet.ckpt`。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[Parameter (name=conv1.weight), Parameter (name=conv2.weight), Parameter (name=fc1.weight), Parameter (name=fc1.bias), Parameter (name=fc2.weight), Parameter (name=fc2.bias), Parameter (name=fc3.weight), Parameter (name=fc3.bias), Parameter (name=learning_rate), Parameter (name=momentum), Parameter (name=moments.conv1.weight), Parameter (name=moments.conv2.weight), Parameter (name=moments.fc1.weight), Parameter (name=moments.fc1.bias), Parameter (name=moments.fc2.weight), Parameter (name=moments.fc2.bias), Parameter (name=moments.fc3.weight), Parameter (name=moments.fc3.bias)]\n" - ] - } - ], - "source": [ - "from mindspore import Model, load_checkpoint, save_checkpoint, load_param_into_net\n", - "from mindspore import context, Tensor\n", - "from dataset_process import create_dataset\n", - "import mindspore.nn as nn\n", - "\n", - "network = LeNet5()\n", - "net_opt = nn.Momentum(network.trainable_params(), learning_rate=0.01, momentum=0.9)\n", - "net_loss = nn.loss.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')\n", - "\n", - "params = load_checkpoint(\"checkpoint_lenet-1_1875.ckpt\")\n", - "load_param_into_net(network, params)\n", - "\n", - "net_with_criterion = nn.WithLossCell(network, net_loss)\n", - "train_net = nn.TrainOneStepCell(net_with_criterion, net_opt)\n", - "train_net.set_train()\n", - "\n", - "train_path = \"./datasets/MNIST_Data/train\"\n", - "ds_train = create_dataset(train_path)\n", - "\n", - "count = 0\n", - "for item in ds_train.create_dict_iterator():\n", - " input_data = item[\"image\"]\n", - " labels = item[\"label\"]\n", - " train_net(input_data, labels)\n", - " count += 1\n", - " if count==100:\n", - " print(train_net.trainable_params())\n", - " save_checkpoint(train_net, \"mindspore_lenet.ckpt\")\n", - " break" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "从上述打印信息可以看出`mindspore_lenet.ckpt`的权重参数,包括了前向传播过程中LeNet网络中各隐藏层中的权重参数、学习率、优化率以及反向传播中优化各权重层的优化器函数的权重。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### 保存指定的Cell\n", - "\n", - "使用方法:`CheckpointConfig`类的`saved_network`参数。\n", - "\n", - "应用场景:\n", - "\n", - "- 只保存推理网络模型的参数(不保存优化器的参数会使生成的CheckPoint文件大小减小一倍)。\n", - "\n", - "- 保存子网的参数,用于Fine-tune(模型微调)任务。\n", - "\n", - "在回调函数中使用方法`CheckpointConfig`,并指定保存模型的Cell为`network`即前向传播的LeNet网络。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 625, loss is 0.116291314\n", - "epoch: 1 step: 1250, loss is 0.09527888\n", - "epoch: 1 step: 1875, loss is 0.23090823\n" - ] - } - ], - "source": [ - "import os\n", - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor\n", - "\n", - "ds_train = create_dataset(train_path)\n", - "epoch_size = 1\n", - "model = Model(train_net)\n", - "config_ck = CheckpointConfig(saved_network=network)\n", - "ckpoint = ModelCheckpoint(prefix=\"lenet\", config=config_ck)\n", - "model.train(epoch_size, ds_train, callbacks=[ckpoint, LossMonitor(625)])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "模型经过训练后,保存出模型文件`lenet-1_1875.ckpt`。接下来对比指定保存的模型cell和原始模型在大小和具体权重有何区别。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "with_opt size: 482 kB\n", - "{'conv1.weight': Parameter (name=conv1.weight), 'conv2.weight': Parameter (name=conv2.weight), 'fc1.weight': Parameter (name=fc1.weight), 'fc1.bias': Parameter (name=fc1.bias), 'fc2.weight': Parameter (name=fc2.weight), 'fc2.bias': Parameter (name=fc2.bias), 'fc3.weight': Parameter (name=fc3.weight), 'fc3.bias': Parameter (name=fc3.bias), 'learning_rate': Parameter (name=learning_rate), 'momentum': Parameter (name=momentum), 'moments.conv1.weight': Parameter (name=moments.conv1.weight), 'moments.conv2.weight': Parameter (name=moments.conv2.weight), 'moments.fc1.weight': Parameter (name=moments.fc1.weight), 'moments.fc1.bias': Parameter (name=moments.fc1.bias), 'moments.fc2.weight': Parameter (name=moments.fc2.weight), 'moments.fc2.bias': Parameter (name=moments.fc2.bias), 'moments.fc3.weight': Parameter (name=moments.fc3.weight), 'moments.fc3.bias': Parameter (name=moments.fc3.bias)}\n", - "\n", - "=========after train===========\n", - "\n", - "without_opt size: 241 kB\n", - "{'conv1.weight': Parameter (name=conv1.weight), 'conv2.weight': Parameter (name=conv2.weight), 'fc1.weight': Parameter (name=fc1.weight), 'fc1.bias': Parameter (name=fc1.bias), 'fc2.weight': Parameter (name=fc2.weight), 'fc2.bias': Parameter (name=fc2.bias), 'fc3.weight': Parameter (name=fc3.weight), 'fc3.bias': Parameter (name=fc3.bias)}\n" - ] - } - ], - "source": [ - "model_with_opt = os.path.getsize(\"./checkpoint_lenet-1_1875.ckpt\") // 1024\n", - "params_without_change = load_checkpoint(\"./checkpoint_lenet-1_1875.ckpt\")\n", - "print(\"with_opt size:\", model_with_opt, \"kB\")\n", - "print(params_without_change)\n", - "\n", - "print(\"\\n=========after train===========\\n\")\n", - "model_without_opt = os.path.getsize(\"./lenet-1_1875.ckpt\") // 1024\n", - "params_with_change = load_checkpoint(\"./lenet-1_1875.ckpt\")\n", - "print(\"without_opt size:\", model_without_opt, \"kB\")\n", - "print(params_with_change)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "训练后,保存出来的模型`lenet-1_1875.ckpt`,模型权重文件大小为241kB,跟原始完整模型大小482kB相比,整体减少了将近一半;\n", - "\n", - "具体对比模型中的参数,可以看出`lenet-1_1875.ckpt`中参数相比`checkpoint_lenet-1_1875.ckpt`减少了学习率、优化率和反向优化等相关的权重参数,只保留了前向传播网络LeNet的权重参数。符合预期效果。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### 异步保存CheckPoint\n", - "\n", - "使用方法:`CheckpointConfig`类的`async_save`参数。\n", - "\n", - "应用场景:训练的模型参数量较大,可以边训练边保存,节省保存CheckPoint文件时的写入时间。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "config_ck = CheckpointConfig(async_save=True)\n", - "ckpoint = ModelCheckpoint(prefix=\"lenet\", config=config_ck)\n", - "model.train(epoch_size, ds_train, callbacks=ckpoint)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### 保存自定义参数字典\n", - "\n", - "使用方法:构造一个`obj_dict`传入`save_checkpoint`方法。\n", - "\n", - "使用场景:\n", - "\n", - "- 训练过程中需要额外保存参数(`lr`、`epoch_size`等)为CheckPoint文件。\n", - "\n", - "- 修改CheckPoint里面的参数值后重新保存。\n", - "\n", - "- 把PyTorch、TensorFlow的CheckPoint文件转化为MindSpore的CheckPoint文件。\n", - "\n", - "根据具体场景分为两种情况:\n", - "\n", - "1. 已有CheckPoint文件,修改内容后重新保存。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "==========param_list===========\n", - "\n", - "[{'name': 'conv1.weight', 'data': Parameter (name=conv1.weight)}, {'name': 'conv2.weight', 'data': Parameter (name=conv2.weight)}, {'name': 'fc1.weight', 'data': Parameter (name=fc1.weight)}, {'name': 'fc1.bias', 'data': Parameter (name=fc1.bias)}, {'name': 'fc2.weight', 'data': Parameter (name=fc2.weight)}, {'name': 'fc2.bias', 'data': Parameter (name=fc2.bias)}, {'name': 'fc3.weight', 'data': Parameter (name=fc3.weight)}, {'name': 'fc3.bias', 'data': Parameter (name=fc3.bias)}]\n", - "\n", - "==========after delete param_list[2]===========\n", - "\n", - "[{'name': 'conv1.weight', 'data': Parameter (name=conv1.weight)}, {'name': 'conv2.weight', 'data': Parameter (name=conv2.weight)}, {'name': 'fc1.bias', 'data': Parameter (name=fc1.bias)}, {'name': 'fc2.weight', 'data': Parameter (name=fc2.weight)}, {'name': 'fc2.bias', 'data': Parameter (name=fc2.bias)}, {'name': 'fc3.weight', 'data': Parameter (name=fc3.weight)}, {'name': 'fc3.bias', 'data': Parameter (name=fc3.bias)}]\n", - "\n", - "==========after add element===========\n", - "\n", - "[{'name': 'conv1.weight', 'data': Parameter (name=conv1.weight)}, {'name': 'conv2.weight', 'data': Parameter (name=conv2.weight)}, {'name': 'fc1.bias', 'data': Parameter (name=fc1.bias)}, {'name': 'fc2.weight', 'data': Parameter (name=fc2.weight)}, {'name': 'fc2.bias', 'data': Parameter (name=fc2.bias)}, {'name': 'fc3.weight', 'data': Parameter (name=fc3.weight)}, {'name': 'fc3.bias', 'data': Parameter (name=fc3.bias)}, {'name': 'epoch_size', 'data': Tensor(shape=[], dtype=Int64, value= 10)}]\n", - "\n", - "==========after modify element===========\n", - "\n", - "[{'name': 'conv1.weight', 'data': Parameter (name=conv1.weight)}, {'name': 'conv2.weight', 'data': Parameter (name=conv2.weight)}, {'name': 'fc1.bias', 'data': Parameter (name=fc1.bias)}, {'name': 'fc2.weight', 'data': Tensor(shape=[], dtype=Int64, value= 66)}, {'name': 'fc2.bias', 'data': Parameter (name=fc2.bias)}, {'name': 'fc3.weight', 'data': Parameter (name=fc3.weight)}, {'name': 'fc3.bias', 'data': Parameter (name=fc3.bias)}, {'name': 'epoch_size', 'data': Tensor(shape=[], dtype=Int64, value= 10)}]\n" - ] - } - ], - "source": [ - "params = load_checkpoint(\"./lenet-1_1875.ckpt\")\n", - "\n", - "# eg: param_list = [{\"name\": param_name, \"data\": param_data},...]\n", - "param_list = [{\"name\": k, \"data\":v} for k,v in params.items()]\n", - "print(\"==========param_list===========\\n\")\n", - "print(param_list)\n", - "\n", - "# del element\n", - "del param_list[2]\n", - "print(\"\\n==========after delete param_list[2]===========\\n\")\n", - "print(param_list)\n", - "\n", - "\n", - "# add element \"epoch_size\"\n", - "param = {\"name\": \"epoch_size\"}\n", - "param[\"data\"] = Tensor(10)\n", - "param_list.append(param)\n", - "print(\"\\n==========after add element===========\\n\")\n", - "print(param_list)\n", - "\n", - "# modify element\n", - "param_list[3][\"data\"] = Tensor(66)\n", - "# save a new checkpoint file\n", - "print(\"\\n==========after modify element===========\\n\")\n", - "print(param_list)\n", - "\n", - "save_checkpoint(param_list, 'modify.ckpt')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "将加载的模型文件转换成list类型后,可以对模型参数进行删除,添加,修改等操作,并使用`save_checkpoint`手动保存,完成对模型权重的内容修改操作。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 自定义参数列表保存成CheckPoint文件。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[{'name': 'epoch_size', 'data': Tensor(shape=[], dtype=Int64, value= 10)}, {'name': 'learning_rate', 'data': Tensor(shape=[], dtype=Float64, value= 0.01)}]\n" - ] - } - ], - "source": [ - "param_list = []\n", - "# save epoch_size\n", - "param = {\"name\": \"epoch_size\"}\n", - "param[\"data\"] = Tensor(10)\n", - "param_list.append(param)\n", - "\n", - "# save learning rate\n", - "param = {\"name\": \"learning_rate\"}\n", - "param[\"data\"] = Tensor(0.01)\n", - "param_list.append(param)\n", - "# save a new checkpoint file\n", - "print(param_list)\n", - "\n", - "save_checkpoint(param_list, 'hyperparameters.ckpt')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 加载\n", - "\n", - "#### 严格匹配参数名\n", - "\n", - "CheckPoint文件中的权重参数到`net`中的时候,会优先匹配`net`和CheckPoint中name相同的parameter。匹配完成后,发现net中存在没有加载的parameter,会匹配net中后缀名称与ckpt相同的parameter。\n", - "\n", - "例如:会把CheckPoint中名为`conv.0.weight`的参数值加载到net中名为`net.conv.0.weight`的parameter中。\n", - "\n", - "如果想取消这种模糊匹配,只采取严格匹配机制,可以通过方法`load_param_into_net`中的`strict_load`参数控制,默认为False,表示采取模糊匹配机制。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "==========strict load mode===========\n", - "{'conv1.weight': Parameter (name=conv1.weight), 'conv2.weight': Parameter (name=conv2.weight), 'fc1.weight': Parameter (name=fc1.weight), 'fc1.bias': Parameter (name=fc1.bias), 'fc2.weight': Parameter (name=fc2.weight), 'fc2.bias': Parameter (name=fc2.bias), 'fc3.weight': Parameter (name=fc3.weight), 'fc3.bias': Parameter (name=fc3.bias)}\n" - ] - } - ], - "source": [ - "net = LeNet5()\n", - "params = load_checkpoint(\"lenet-1_1875.ckpt\")\n", - "load_param_into_net(net, params, strict_load=True)\n", - "print(\"==========strict load mode===========\")\n", - "print(params)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### 过滤指定前缀\n", - "\n", - "使用方法:`load_checkpoint`的`filter_prefix`参数。\n", - "\n", - "使用场景:加载CheckPoint时,想要过滤某些包含特定前缀的parameter。\n", - "\n", - "- 加载CheckPoint时,不加载优化器中的`parameter(eg:filter_prefix=’moments’)`。\n", - "\n", - "- 不加载卷积层的`parameter(eg:filter_prefix=’conv1’)`。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "=============net params=============\n", - "{'conv1.weight': Parameter (name=conv1.weight), 'conv2.weight': Parameter (name=conv2.weight), 'fc1.weight': Parameter (name=fc1.weight), 'fc1.bias': Parameter (name=fc1.bias), 'fc2.weight': Parameter (name=fc2.weight), 'fc2.bias': Parameter (name=fc2.bias), 'fc3.weight': Parameter (name=fc3.weight), 'fc3.bias': Parameter (name=fc3.bias), 'learning_rate': Parameter (name=learning_rate), 'momentum': Parameter (name=momentum), 'moments.conv1.weight': Parameter (name=moments.conv1.weight), 'moments.conv2.weight': Parameter (name=moments.conv2.weight), 'moments.fc1.weight': Parameter (name=moments.fc1.weight), 'moments.fc1.bias': Parameter (name=moments.fc1.bias), 'moments.fc2.weight': Parameter (name=moments.fc2.weight), 'moments.fc2.bias': Parameter (name=moments.fc2.bias), 'moments.fc3.weight': Parameter (name=moments.fc3.weight), 'moments.fc3.bias': Parameter (name=moments.fc3.bias)}\n", - "\n", - "=============after filter_prefix moments=============\n", - "{'conv1.weight': Parameter (name=conv1.weight), 'conv2.weight': Parameter (name=conv2.weight), 'fc1.weight': Parameter (name=fc1.weight), 'fc1.bias': Parameter (name=fc1.bias), 'fc2.weight': Parameter (name=fc2.weight), 'fc2.bias': Parameter (name=fc2.bias), 'fc3.weight': Parameter (name=fc3.weight), 'fc3.bias': Parameter (name=fc3.bias), 'learning_rate': Parameter (name=learning_rate), 'momentum': Parameter (name=momentum)}\n" - ] - } - ], - "source": [ - "net = LeNet5()\n", - "print(\"=============net params=============\")\n", - "params = load_checkpoint(\"checkpoint_lenet-1_1875.ckpt\")\n", - "load_param_into_net(net, params)\n", - "print(params)\n", - "\n", - "net = LeNet5()\n", - "print(\"\\n=============after filter_prefix moments=============\")\n", - "params = load_checkpoint(\"checkpoint_lenet-1_1875.ckpt\", filter_prefix='moments')\n", - "load_param_into_net(net, params)\n", - "print(params)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "使用过滤前缀的机制,可以将不想载入的参数(本例为优化器权重参数)过滤掉,进行Fine-tune时,可以选用其他的优化器进行优化。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> 以上为使用MindSpore checkpoint功能的进阶用法,上述所有用法均可共同使用。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 转化其他框架CheckPoint为MindSpore的格式\n", - "\n", - "把其他框架的CheckPoint文件转化成MindSpore格式。\n", - "\n", - "一般情况下,CheckPoint文件中保存的就是参数名和参数值,调用相应框架的读取接口后,获取到参数名和数值后,按照MindSpore格式,构建出对象,就可以直接调用MindSpore接口保存成MindSpore格式的CheckPoint文件了。\n", - "\n", - "其中主要的工作量为对比不同框架间的parameter名称,做到两个框架的网络中所有parameter name一一对应(可以使用一个map进行映射),下面代码的逻辑转化parameter格式,不包括对应parameter name。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "import torch\n", - "from mindspore import Tensor, save_checkpoint\n", - "\n", - "def pytorch2mindspore(default_file = 'torch_resnet.pth'):\n", - " # read pth file\n", - " par_dict = torch.load(default_file)['state_dict']\n", - " params_list = []\n", - " for name in par_dict:\n", - " param_dict = {}\n", - " parameter = par_dict[name]\n", - " param_dict['name'] = name\n", - " param_dict['data'] = Tensor(parameter.numpy())\n", - " params_list.append(param_dict)\n", - " save_checkpoint(params_list, 'ms_resnet.ckpt')" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/api_structure.ipynb b/docs/programming_guide/source_zh_cn/api_structure.ipynb deleted file mode 100644 index 845bd53ae9e864220aa991bfdc9542011c87af29..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/api_structure.ipynb +++ /dev/null @@ -1,159 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# MindSpore API概述\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/api_structure.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_api_structure.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9hcGlfc3RydWN0dXJlLmlweW5i&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总体架构" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore是一个全场景深度学习框架,旨在实现易开发、高效执行、全场景覆盖三大目标,其中易开发表现为API友好、调试难度低,高效执行包括计算效率、数据预处理效率和分布式训练效率,全场景则指框架同时支持云、边缘以及端侧场景。\n", - "\n", - "MindSpore总体架构分为前端表示层(Mind Expression,ME)、计算图引擎(Graph Engine,GE)和后端运行时三个部分。ME提供了用户级应用软件编程接口(Application Programming Interface,API),用于科学计算以及构建和训练神经网络,并将用户的Python代码转换为数据流图。GE是算子和硬件资源的管理器,负责控制从ME接收的数据流图的执行。后端运行时包括云、边、端上不同环境中的高效运行环境,例如CPU、GPU、Ascend AI处理器、Android/iOS等。更多总体架构的相关内容请参见[总体架构](https://www.mindspore.cn/doc/note/zh-CN/master/design/mindspore/architecture.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 设计理念" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore源于全产业的最佳实践,向数据科学家和算法工程师提供了统一的模型训练、推理和导出等接口,支持端、边、云等不同场景下的灵活部署,推动深度学习和科学计算等领域繁荣发展。\n", - "\n", - "MindSpore目前提供了Python编程范式,用户使用Python原生控制逻辑即可构建复杂的神经网络模型,AI编程变得简单,具体示例请参见[实现一个图片分类应用](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html)。\n", - "\n", - "目前主流的深度学习框架的执行模式有两种,分别为静态图模式和动态图模式。静态图模式拥有较高的训练性能,但难以调试。动态图模式相较于静态图模式虽然易于调试,但难以高效执行。MindSpore提供了动态图和静态图统一的编码方式,大大增加了静态图和动态图的可兼容性,用户无需开发多套代码,仅变更一行代码便可切换动态图/静态图模式,例如设置`context.set_context(mode=context.PYNATIVE_MODE)`切换成动态图模式,设置`context.set_context(mode=context.GRAPH_MODE)`即可切换成静态图模式,用户可拥有更轻松的开发调试及性能体验。\n", - "\n", - "神经网络模型通常基于梯度下降算法进行训练,但手动求导过程复杂,结果容易出错。MindSpore的基于源码转换(Source Code Transformation,SCT)的自动微分(Automatic Differentiation)机制采用函数式可微分编程架构,在接口层提供Python编程接口,包括控制流的表达。用户可聚焦于模型算法的数学原生表达,无需手动进行求导,自动微分的样例代码如下所示。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> 本样例适用于GPU和Ascend环境。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "12.0\n" - ] - } - ], - "source": [ - "import mindspore as ms\n", - "from mindspore import ops\n", - "\n", - "grad_all = ops.composite.GradOperation()\n", - "\n", - "def func(x): return x * x * x\n", - "\n", - "def df_func(x):\n", - " return grad_all(func)(x)\n", - "\n", - "@ms.ms_function\n", - "def df2_func(x):\n", - " return grad_all(df_func)(x)\n", - "\n", - "if __name__ == \"__main__\":\n", - " print(df2_func(ms.Tensor(2, ms.float32)))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "其中,第一步定义了一个函数(计算图),第二步利用MindSpore提供的反向接口进行自动微分,定义了一个一阶导数函数(计算图),第三步定义了一个二阶导数函数(计算图),最后给定输入就能获取第一步定义的函数在指定处的二阶导数,二阶导数求导结果为`12`。\n", - "\n", - "此外,SCT能够将Python代码转换为MindSpore函数中间表达(Intermediate Representation,IR),该函数中间表达构造出能够在不同设备解析和执行的计算图,并且在执行该计算图前,应用了多种软硬件协同优化技术,端、边、云等不同场景下的性能和效率得到针对性的提升。\n", - "\n", - "如何提高数据处理能力以匹配人工智能芯片的算力,是保证人工智能芯片发挥极致性能的关键。MindSpore为用户提供了多种数据处理算子,通过自动数据加速技术实现了高性能的流水线,包括数据加载、数据论证、数据转换等,支持CV/NLP/GNN等全场景的数据处理能力。MindRecord是MindSpore的自研数据格式,具有读写高效、易于分布式处理等优点,用户可将非标准的数据集和常用的数据集转换为MindRecord格式,从而获得更好的性能体验,转换详情请参见[MindSpore数据格式转换](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_conversion.html)。MindSpore支持加载常用的数据集和多种数据存储格式下的数据集,例如通过`dataset=dataset.Cifar10Dataset(\"Cifar10Data/\")`即可完成CIFAR-10数据集的加载,其中`Cifar10Data/`为数据集本地所在目录,用户也可通过`GeneratorDataset`自定义数据集的加载方式。数据增强是一种基于(有限)数据生成新数据的方法,能够减少网络模型过拟合的现象,从而提高模型的泛化能力。MindSpore除了支持用户自定义数据增强外,还提供了自动数据增强方式,使得数据增强更加灵活,详情请见[自动数据增强](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/auto_augmentation.html)。\n", - "\n", - "深度学习神经网络模型通常含有较多的隐藏层进行特征提取,但特征提取随机化、调试过程不可视限制了深度学习技术的可信和调优。MindSpore支持可视化调试调优(MindInsight),提供训练看板、溯源、性能分析和调试器等功能,帮助用户发现模型训练过程中出现的偏差,轻松进行模型调试和性能调优。例如用户可在初始化网络前,通过`profiler=Profiler()`初始化`Profiler`对象,自动收集训练过程中的算子耗时等信息并记录到文件中,在训练结束后调用`profiler.analyse()`停止收集并生成性能分析结果,以可视化形式供用户查看分析,从而更高效地调试网络性能,更多调试调优相关内容请见[训练过程可视化](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/visualization_tutorials.html)。\n", - "\n", - "随着神经网络模型和数据集的规模不断增加,分布式并行训练成为了神经网络训练的常见做法,但分布式并行训练的策略选择和编写十分复杂,这严重制约着深度学习模型的训练效率,阻碍深度学习的发展。MindSpore统一了单机和分布式训练的编码方式,开发者无需编写复杂的分布式策略,在单机代码中添加少量代码即可实现分布式训练,例如设置`context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL)`便可自动建立代价模型,为用户选择一种较优的并行模式,提高神经网络训练效率,大大降低了AI开发门槛,使用户能够快速实现模型思路,更多内容请见[分布式并行训练](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_tutorials.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 层次结构\n", - "\n", - "MindSpore向用户提供了3个不同层次的API,支撑用户进行网络构建、整图执行、子图执行以及单算子执行,从低到高分别为Low-Level Python API、Medium-Level Python API以及High-Level Python API。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![image](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/images/api_structure.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- Low-Level Python API\n", - "\n", - " 第一层为低阶API,主要包括张量定义、基础算子、自动微分等模块,用户可使用低阶API轻松实现张量定义和求导计算,例如用户可通过`Tensor`接口自定义张量,使用`ops.composite`模块下的`GradOperation`算子计算函数在指定处的导数。\n", - "\n", - "\n", - "- Medium-Level Python API\n", - "\n", - " 第二层为中阶API,其封装了低阶API,提供网络层、优化器、损失函数等模块,用户可通过中阶API灵活构建神经网络和控制执行流程,快速实现模型算法逻辑,例如用户可调用`Cell`接口构建神经网络模型和计算逻辑,通过使用`loss`模块和`Optimizer`接口为神经网络模型添加损失函数和优化方式,利用`dataset`模块对数据进行处理以供模型的训练和推导使用。\n", - "\n", - "\n", - "- High-Level Python API\n", - "\n", - " 第三层为高阶API,其在中阶API的基础上又提供了训练推理的管理、混合精度训练、调试调优等高级接口,方便用户控制整网的执行流程和实现神经网络的训练推理及调优,例如用户使用Model接口,指定要训练的神经网络模型和相关的训练设置,对神经网络模型进行训练,通过`Profiler`接口调试神经网络性能。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/augmentation.ipynb b/docs/programming_guide/source_zh_cn/augmentation.ipynb deleted file mode 100644 index 395ff3663fc5eabbf28f87a2eef173476fa446db..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/augmentation.ipynb +++ /dev/null @@ -1,681 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 数据增强\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/augmentation.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_augmentation.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9hdWdtZW50YXRpb24uaXB5bmI=&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "在计算机视觉任务中,数据量过小或是样本场景单一等问题都会影响模型的训练效果,用户可以通过数据增强操作对图像进行预处理,从而提升模型的泛化性。\n", - "\n", - "MindSpore提供了`c_transforms`模块和`py_transforms`模块供用户进行数据增强操作,用户也可以自定义函数或者算子进行数据增强。\n", - "\n", - "| 模块 | 实现 | 说明 |\n", - "| :---- | :---- | :---- |\n", - "| c_transforms | 基于C++的OpenCV实现 | 具有较高的性能。 |\n", - "| py_transforms | 基于Python的PIL实现 | 该模块提供了多种图像增强功能,并提供了PIL Image和NumPy数组之间的传输方法。|\n", - "\n", - "MindSpore目前支持多种常用的数据增强算子,如下表所示,更多数据增强算子参见[API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.vision.html)。\n", - "\n", - "| 模块 | 算子 | 说明 |\n", - "| :---- | :---- | :---- |\n", - "| c_transforms | RandomCrop | 在图像随机位置裁剪指定大小子图像。 |\n", - "| | RandomHorizontalFlip | 按照指定概率对图像进行水平翻转。 |\n", - "| | Resize | 将图像缩放到指定大小。 |\n", - "| | Invert | 将图像进行反相。 |\n", - "| py_transforms | RandomCrop | 在图像随机位置裁剪指定大小子图像。 |\n", - "| | Resize | 将图像缩放到指定大小。 |\n", - "| | Invert | 将图像进行反相。 |\n", - "| |Compose | 将列表中的数据增强操作依次执行。 |" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## c_transforms\n", - "\n", - "下面将简要介绍几种常用的`c_transforms`模块数据增强算子的使用方法。\n", - "\n", - "### RandomCrop\n", - "\n", - "对输入图像进行在随机位置的裁剪。\n", - "\n", - "**参数说明:**\n", - "\n", - "- `size`:裁剪图像的尺寸。\n", - "\n", - "- `padding`:填充的像素数量。\n", - "\n", - "- `pad_if_needed`:原图小于裁剪尺寸时,是否需要填充。\n", - "\n", - "- `fill_value`:在常量填充模式时使用的填充值。\n", - "\n", - "- `padding_mode`:填充模式。\n", - "\n", - "下面的样例首先使用顺序采样器加载CIFAR-10数据集[1],然后对已加载的图片进行长宽均为10的随机裁剪,最后输出裁剪前后的图片形状及对应标签,并对图片进行了展示。\n", - "\n", - "下载[CIFAR-10数据集](https://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz)并解压到指定路径,执行如下命令:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/cifar-10-batches-bin\n", - "├── readme.html\n", - "├── test\n", - "│   └── test_batch.bin\n", - "└── train\n", - " ├── batches.meta.txt\n", - " ├── data_batch_1.bin\n", - " ├── data_batch_2.bin\n", - " ├── data_batch_3.bin\n", - " ├── data_batch_4.bin\n", - " └── data_batch_5.bin\n", - "\n", - "2 directories, 8 files\n" - ] - } - ], - "source": [ - "!wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-binary.tar.gz\n", - "!mkdir -p datasets\n", - "!tar -xzf cifar-10-binary.tar.gz -C datasets\n", - "!mkdir -p datasets/cifar-10-batches-bin/train datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/test_batch.bin datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/data_batch*.bin datasets/cifar-10-batches-bin/batches.meta.txt datasets/cifar-10-batches-bin/train\n", - "!tree ./datasets/cifar-10-batches-bin" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Source image Shape : (32, 32, 3) , Source label : 6\nCropped image Shape: (10, 10, 3) , Cropped label: 6\n------\nSource image Shape : (32, 32, 3) , Source label : 9\nCropped image Shape: (10, 10, 3) , Cropped label: 9\n------\nSource image Shape : (32, 32, 3) , Source label : 9\nCropped image Shape: (10, 10, 3) , Cropped label: 9\n------\n" - ] - }, - { - "output_type": "display_data", - "data": { - "text/plain": "
", - "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO29eZAk13Xe+53aq7q7et+mZ+nBYGYw2AEOQBAgCZAwV1EEZVEy6TBFKRSGF8pPfKQUomVbz5alsByhoBfRNo0QSZGWZIoSNwgiRYIEQQDEOoN19mnM1j29L9W1b1n3/VE1eU4WuqeyZ6pR3dnnF4HAnepbmTfzZN7K++VZyBgDRVEUZfPja/UAFEVRlOagE7qiKIpH0AldURTFI+iEriiK4hF0QlcURfEIOqEriqJ4BJ3QFUVRPIJO6FcIEX2MiI4TUYaIXieid7R6TMrVQ0QHiOgxIlomojEi+oVWj0m5eraKXXVCvwKI6D0A/jOAXwPQAeCdAM60dFDKVUNEAQDfBfAIgB4ADwL4cyLa19KBKVfFVrIraaTo2iGipwF8yRjzpVaPRWkeRHQjgGcBdJjajUFEPwTwnDHm37V0cMoVs5Xsqk/oa4SI/AAOAuivLd0miOgLRBRt9diUq4ZW+ezGN3sgSlPZMnbVCX3tDAIIAvgogHcAuBXAbQD+bSsHpTSFEwBmAfw2EQWJ6L0A7gUQa+2wlKtky9hVJ/S1k6v9/0+MMVPGmHkAnwfwwRaOSWkCxpgSgI8A+DkA0wA+C+AbACZaOS7l6thKdg20egCbDWPMEhFNANCXDx7EGPMqqk9vAOz3JV9t3YiUZrBV7KpP6FfGVwD8KyIaIKJuAJ9G9Q26sskhopuJKEJEMSL6LQDDAP6sxcNSrpKtYled0K+M/wjgBQCnABwH8BKAP2zpiJRm8QkAU6hqrvcDeI8xptDaISlNYEvYVd0WFUVRPII+oSuKongEndAVRVE8gk7oiqIoHqGpEzoRvZ+ITtYiKD/XzG0rrUPt6k3Urt6jaS9FayHxpwC8B1WH/RcAfNwYc6wpO1BagtrVm6hdvUkzA4vuBDBmjDkDAET0dQAPAFjxAgkGgyYciQAALMuyP/eJeB2/yMAQCvBiIhhwLiwCfr/dJiLRFv3Etspl3p/8OfPL7YgfuoqpcP8Kf06+lVJE1L5T4X3I7Tr6iO2SGKBs+8R3/T4+Hnmc1W3xuMyKqSsA4+jDjE/Nzxtj+lf80hrt2hHvNL0DgwCAYj5rf14u5sU4eHzBUMRuh8LcBgB/MGS3feJc53Npu10s5Oy2EdfRaueQxDlsa++w22Gxb2OVHePI5bLiXytfF/kcj8MS33ecc3HSy2X+bqUi+/DnABAIBESbj8NAXMNiuxXx9eVEsml2BYC+vj4zOjq62p83BZWK8/yWy2wrx/kVNvHJ+85xz3PbrPjp+nDu3DnMz8+vuJtmTugjAMbFvycAvFV2IKIHUU1diXA4jFtvfwsAIJFYtPuEfXzCe0J8mnb2ctqF/p42x477utrtdsgftNuBsMiX5edDXVxK2O1imffR3dVpt31WyW4XCuyums/zxBSJOicgS9xkWTHpdHbFuZPhPsVCkYcHHrf8Aeho52Nra+PjDgad+86JbRn5Q+bj45b7K4tJ9Tf+4H+fx+qsya49/QP4N5//n9WOJw7bfebOHrfblsVjGtx5nd3eueeAY8fdQzvtdiTK3zl19Gm7fX7sVbtdSvE594t9xLvZroEIX0d33vNOu33tPh5HfpmvRwA4euQlu12p8DkslvhaOHb0NbudTMzb7UKRr51Ske26uMA/Euksb6dsOV2j+/t77HZ3D18Llknxd/hSRT7H1/N3v/3Dq7Ir4LTtzp07cejQIQBvnBg3HGKGlQ8/uUzW0W1hkW3V09Ntty3xABKN8TXjD4V5F+I+q4hpfOXHt+Zx5513rvq3Zk7oK/1iOPQcY8xDAB4CgEAwaI4eOwoASMyLkyrmKerlf/RZ/DRF0QHHTjIVvgHTlnjaIX7Cy+b5RszmxE1m8YU5L5YEkQBvRz5N+cUEGQ6zcav7yPB3xI1P+V677RPWLokfimiAjzUtJt5F8bQXi/GETj7+AQAAEj9kEE8U2Tzf7eUSt/0B59gvw5rsumP3HpNcqtqjt4snI9M/yO0A/8AN77zGblsVMTMB8FX45qtk+TzklxZ4Wzm+8Ub6+LrYueNau73j2l12e9vIdrs9MMBjCgb5fJS7nDmbdmwf4r+V2Tb5PD+VJ5b4x2R+nq/HQEhe0Gz87l7eX6SNt7OcXHLsOxzh661i+BwEhf2Sy+IBpeBaQm1oV8Bp24MHD9p/l0+tm4lCdtnx78UJLmMwfpz/tpzke/med99vt+OOhzjx5C5XhM0Y6BXSzH1PANgh/r0dwGQTt6+0BrWrN1G7epBmTugvANhLRLuJKATgYwAebuL2ldagdvUmalcP0jTJxRhTJqLfAPADVGWkLxtjjq7W3wcgGqgtU8Tqf5eQWUYHWfscEFpiNObU0B0aWYGX4PkSSxpG9AlFhbYuNHRT4f6dPbzsLpe4TyjI3xXv4AA49bWC0OBKZd53TPQJtPG2IuLzMvFyzydekpWlTle3YG5v4/GmhU5YEgKrfJ+TSjqXnquxVrvCGKAm7RQLvO9slqWK0X0jYqx8rFKTBoCePqF9B/nZY+9erhx2910H7fbIIMspnZ38LrAUYEPFIuL8S51VvBzLZVg+AYCCkKpiUT7P3V0s8ey55nq7ffz4SbFh+S6G7dIZZ71WvPvFcnLGsW8DPm/y5enSEp+3XFZc5y4VlzXb9Y3fd9u1Jcjx+Yjb0+NnHf1efeYJu10SL7+D7WyfnLhX4j08D0ndXOrprTwzTU2fa4z5HoDvNXObSutRu3oTtav32JxvNhRFUZQ30LICF0QGEaouczs6eBj7Rnip0xtlr4BghZfj6UVehgKAVeHfpZzwhvCJpWxcuDYGhLyRWGb3L+Hyi54OXlqnxBvvovBkyeWdXhnS/7tduBiWiuzF4BPudEHhJWMJN8mA0FMKQrYISb/sitNXupAW3hHC0ycsvGrKwtVsObM+mUNNpYJyzfuDhL9/OMTy0rLwauodYplk5w3smQIAAzu22e2g1CWEjFQq83VxYoq9X7Jn5riPj6+Xk6+9YrfvOMAyyTvvvIOPoU5OSIol94Xz/N4wJFxHQyH23OnrZ0npwvhp7iNcJtM5vo6SST4fgaBTS4vH+TvSH166yksvrHBYnKd1pD4OYqNhwOekJKSuyXGnJ2c8xtdlrIs96WaXeF5YmLpotwd3sCutdFlz+KFfJj5lvdEndEVRFI+gE7qiKIpHaJnkEiBCd7i6+6iQHjqF50d/nINlLBFKX+dcAr8I2ZVBNQURqOIIoRaeI5YMHffzd2dnOVjDKvEeU1levmUtp/TTHhURoQUR+i+Wf/KNu1+Em+cyLB3EgrydgFj+50VwVK7klFwqYtGXSPO2Elk+B2kZnFNan99yU6mgkK3KCe0iCCPew14nt99yq93ecc1eu50qO4/p5BkOZEyK855OsG0WEiyzTE2z7BQXXi7wsbz0yF99024Hf5nPwb1vezt/HnRKaUNDLP3AsDySEMvyF1/iiNWACFJq62BbloUUVkzzMYjLzhEZCgCWuMZkVKNPFKyX13aXiHbeaqzm2TK3yNfIuXMXHN8piL91REQgYjppt0+8wpHCQ6N77HbXEEtrWCXFw5stTekTuqIoikfQCV1RFMUjtE5y8RP6u6pL8o4gSyaRCLd9fl67REUwUKnsFF0cDv5GJE8SQUNWkZfRFcNtI5a0JsBLrlSRvRAsi8eUFblfypYzQVEqw9u9uMjfD4qEY/E0j7U0zUvo3DJLCjv72NtjYIC9QKiDvS0KIp8JAKTTvL/lFEsu88ssKZ0b5+9b/vUxPfkI4XBVKiv52WsgF2Uvo7NJHtPLTz1vtxcXnAE9Fyc5yCYoPH/k+Sw4cqtwe7ifj292mj0b4sILJJXgZfWpsxxwMjzc5xhHMMjbGt7BeV22ifaFaZaHTr7G7YFhln7OXWB7oySyLRaFBBhwXtsy4CwcYAkyl+d+8biQ6Nzn6PEgUvbg83NxYsJun70w4fjG+Bjncunr4Gt0ex97qU1d4OvntUMv2O2D93XZ7VhcSF0tdADSJ3RFURSPoBO6oiiKR9AJXVEUxSO0TEMPBvzY1l/VqeIhdldrj7HGSUa6j4lKQXVVXQoigs4nBKzeDta12trYhS65zFpmp9AfUyLy8/xF7pMusIYeErseiTlPXyAo9OoFdksrGBHxKtypOuOsMd99PSeZSk6JajRZ0b+PNdRC1rnvdJp/m8NB7rdjiPch83/PJFlnP/eK05XravD5AojFqvuZTbBdx8ZZVz529Aj3F/q0VXC6C+ZS/F7AL3TzXIG170SK2ymRVOvcBBfUaIvyOdi/Zz/vQOjvP3vycbu9a/duxzj27edkYL29fE3JXOWdcdaufWV+V5EpyChmdp/MJdjl0bJk0RRnnvt0kvvFhQtkWLxrKhZlEjRnAYf1Q96Dq4nGLsVkI5vyH2IfshLZqs+h3KciIqllgrpU1pkAbmKGc9fPiLZlceK17QO8vxMv8DufgaFhu73vDll0gq8LnygkQ/VZu8RhiG5vmN/Wgj6hK4qieASd0BVFUTxCS90WezqqroiBIssTYbEEj4U5Gq6QEwmZ6hJTdXVxQi8ZLVa0+PeqJHJtx0Sdzsk5Xga/fp6XynMp3ocIsMQukTDsI+/giEcA2D7M2/2bw+wO9czYtN2WpekCPh5rKsHJpLJpHlNHh1iCW6JEXsS5NA+JJXiM+G9lkcVpp0h21bHIS/nHmii5+P0BdPVU3f7Gxk/Zn0+dY7fAWJCPbznD0Z3p5KxjWySSiSVEvdCEKDsXCPOx9g3yMjkq5LaR0Vvs9g5xns6+8gyPm9gupbpE93Pz7CJ6001c9/TavVw+b4dwT2y/6za7/eoJPreFPMt+haBwWwRLKbLMHABMT4tkYDKiuluWYRQJ40Sx6vWlcdbv1YqVv+HrjihLWcicz4VDZnHIL7It4X/tFIWtY0K2AoBkRpwvkdP8yDhfi1HhChoQrrFHn/6p3e4dYTmzeztfF1SWUrFzhPL8VMRc4LuKhOr6hK4oiuIRdEJXFEXxCK2TXAIBDPT0AgByi7yE9hEPKS0SS+WKvPwKiOrpAJAVybPkL1SuxMujrm5eahVFkqQzE7ykXUwK7xIRNeoX2ZPiEe4zEGDZAgAiiywl7I1zFOFUD39/JsFLuYIoy/bSKZYnfCK/dalNLBE7eVkHn9N0nZ0sT3WIUmV54QFhiuwRMtrvLOPXLAqFDF5/veoJcOL1MfvzyanX7bYlvFc6Onkc+/eOOrZ144Eb7fbUHC+Nz8/x9/uH+Jzs2sPeKR29LEnMiHJtZp6lnwvnWQ6ZE0m+RJp0AMB79rHMkknzOES+OJiiWIo/y1LO3v0syw2OcGThs89z6bPpGbZLqS7pWj7H210SycCi7bytivCKyGT5WNeXxs+Cb/DqEDhyzovrtSIiPEvCCykUEt5vjg2LKHHHznmO6O7myN+3v/M+xzhee/mE3T53liNCLRGNPuZnyTQyyrKldZJz3b/205/Z7bf+PMtv0RjLsFadAiXzdsk/lVeRsy7JS5dTZPQJXVEUxSPohK4oiuIRWii5BNHdV12adLdz4i2fj70WEkn2gCiJoBGfVZ+ci5ecRnjJtLezV0EJ3D5+huWNTIGXqBFRET4S4u1E20Sldz8viQ+POSu0l4v8nUInSy793bxvEh4NsnxaVpSpy4hgoqLIEU5CQqp3IAiKsldGlMYKilzZ5YKoDm9dxav0y5BJJ/HsE48CAAKDHMSz58BNdjsqklEduJ7zoe/fx4nIAMDKixJfPnF+IEu28bn1+1mGKJXZlpkUB4x0CulO5ie/MMvXWqSdS44BQGecvaiu2TPKYxLPQ7kEB/SceO5l7pPjY73xfe+32zfdzJ4QuUMsubw+ds6x75hYsnd29Yq/8D2QFPdJofAmBRaZVfQCRx/pseK83hwSg/DsOT3GMkZOlOm77gDLXmFRV9G3Sr7xigjmq4hp7u573uHod+Es2/pPv/inPCYhdV2YE154Mb6u9gop9eSTh+x2v/Byue4eDjjKwimnBUXpzJA4jsWsSMJX5Hv2kgxULK1ePlKf0BVFUTzCmid0IvoyEc0S0RHxWQ8RPUpEp2v/777cNpSNh9rVm6hdtxZXIrn8GYAvAPia+OxzAH5sjPkjIvpc7d+/c/nNEFCTVygYXLFHWATPxMDeEIG63yGfKDtXEvJLOMrBJfPT7CGQnecl6jU9IthDpHmICJll/x4uNeUTncp+57jl0jfg52VTR4jH3tvNJaz27OUK4mcvcJ7lE6d4GRgKCJnEsOxULjtN5xNeOcEQj6tSkQEsIgiD3vBb/mdogl1LxTJmx6uSyG23/Jz9eTjMb/57hJPS8DaWoBYTTq+h8TGWSooVkSuFRHm/gMglbsRStCxzxIgygyKHfXsnez8siHzyvpDTA6hiHFEw4g/cbI/wcYxu22G3IyKnvw9sv5tuZI+cri6Wih7O/dCx7+kpvqZGBoSHBfF1KPO1J5Ms3wDHgabdr07kOZFOJ47AIBHU9obLTUgM4xfZ2+hvv/eI3U4m+R66e569w95177vtdlgEW8kxyWwosm5Be0cHJB964EN2e+wkS7E/+v6jPA7heXTiInu8dBNLxZE8H+Czf882DPSyZOYbZDsDQCbBxxcULlNTSc7ZvpziPvl81ebprLSxkzU/oRtjngCwWPfxAwC+Wmt/FcBH1rpdpbWoXb2J2nVr0SwNfdAYMwUAtf8PrNSJiB4kokNEdKg+65myIVmzXct1hZ6VDYkruwJO287Nza3WTdkgvKleLsaYhwA8BAA7h3pNrpaulkoy/wRPCJkMLy2Kokp92ReBJJ3lpXpStEd28OGZMn++q4+Xe3u2sTyRzfPnI/s4/0fI8I/P0jIH6kQdXgcAFlhL2CFSayYyvJy/5jr26oh3x0Sb3+IvzfFYl5bFskxIAT7jLDVWEks2obLAEstF4QjjDOy4SqRd29vjJtZerVwfFLtIiICqcA8vPbMiiCpf9xsf7eblcbgiBp+XAWDi4xJ7eESiIoWpyNNSEQFZ7b0sYYQMP8T6o05J2YSExwTxPsgS9hAl/YJtLH9F27ldLrBdFy6yh1RvG8tRD3zwfY59H3rlnN1OC8+LfIEn14LI39LV4VzWXy3StgcPHhQWFZ5mQk9ZEqURl5f4nJLf6Y0yPcfXwzOHOCXt4aOv2O3kokhBLTy8briJA84G+lk28wsbJFNsp0SCtzO63elJtW07/5b96j/9J3Z7/CIHwj33yqs8jgxfC6cnWH6JDfHnC0c4PXT2W7yvPffc7tj3UlrIwEJGKRCPV3q0VGoBWDLgqp5mPaHPENEwANT+P9ugv7I5ULt6E7WrR2nWhP4wgE/W2p8E8N0mbVdpLWpXb6J29ShrllyI6P8CuA9AHxFNAPj/APwRgG8Q0a8DuADglxptx8DAqnkryLfhUgqIRvgtcnsHyxOTc84UoWcnePkZEOv80AznacnPcJ+9Ayyz3H8fSyCvX+QlYscIL4P7ejlIaHaOl8pdXU5vCF+FtxsSwT2zc+y1EojwcmouMWW3L06xB0QwyMfaFReVenLCgyDg/C0moadUhPwiAy9IeAPVxxU1y66hUBjDO3e/YX/5PC8pZ5J82YW6eMlcKrM8ATi9n3JpPj8lw9uVVe7Lfm7HRCWqgV4+52aRr52ikKNIBHlEo3zdAYAwpSO9rSUC3HxBEQQlcv+kM7yslumAw+LcJMU1FY31OPb9zrfdbLdPvs65Ro4c4+V+OsmSXijolCObZdcqBkBVApDXmIwSWk5y0NeTTz9lt89PsucGAMwn2SZL4hz5hFwVKfD9Nbsgt/uk3R4dZY8i6fFyUcwJJZFnJ5fl/QJAOsX/Fs5COHAHBwe9PPaa3S6m+MaZSPA1HQvxvrd3sg3OHnrRbvvDdd5529jWy2WWiByZqgyfj0ItMPByBY3WPKEbYz6+yp/uX+u2lI2D2tWbqF23FhopqiiK4hFalsvF7/ehq6vqdF8O8DI2nWZXByPS4koH+/MXnDlU0mI5Ho3wb9TUWV4SDUZ46TIysstud23jAI9gSqxlRFDT9ls4H0NkmuWTaNnpxmWBx57JcHs4xvJNUQQ5UBsHHWxvE9WEuljiSS3w0np2hj0ISuQMasqLnA+y5ElbmJd/xZyQdUIrB3NdLYYAU0tdKlPBZlO8rA4LSSOVFMFDeWeOiqwokBwUy/qONl7e9nfzsjXew0v0/i7ehxXgALNcmMe0uIvPecFi+QslZz4US3gVVIS3jSUKV5OQXLp62EumYvG2pMdRZyePLyQicxIppyRgSmyzWw/wddHVwefgkUc4kGVuhqWJZpPLZ3H0eNULJRDg60dKGkvCoySR5nv2wlRdfpwB9hDrEeeit4/vlbnX2SbHj7Ds8eiPOOinM87f9QfYBoWiyIckggH//gdOV6qgeKSVHi8xUZD9lluvs9svPXXSbmdF+NKpBSGbCe+n7jJ7ao09e9ix70Q/35uL4loKFvnzsryHagXAU8nVq1LpE7qiKIpH0AldURTFI7RMcqlYZaRqVWICRbm0Fr8x4nVvwM//yIqlHAB0d/ASp6uNlyu5JZZcBrbxEm/k5nvt9pEJXi6eGuP23cO8lE8k+PPBPRxw5INzaV4UwR5d4lV0cpalkqioIDTcI/Zh8RI6eDMv2XPCE+Zn33vYbk+MO+Uev0NCYVlAOMagJH6/faUS1gVjgJpEERAFscWLf+zo5PFddw0HwrRHnN4lfnEtZIRXRF6kF4228XHs38vnc8cuDiDxBVliSwtJYMcwB3/tP8uu2PEep6dIj6h2FRA5cyoyh4m4VmUeoHJeBHaJ/kHpAQSWmnr7WIYDgHSWr7FMguW3kX6WJj7y8++129/5ux9hvchk0nj6+acBADnhWdMW4fvvQx96wG6XRfDb4de4MhAAdHaIa7zCMsi2Aa5AVZphaWE5w+che5plj27hOdImql+1d/P5ibTxvdjZ5ax21im8oeJxPvfRdrbhfe9+K49jnq+9I0e4ELxV4mv6QkLm2eH7MjDtjKJOLYlUzh0ihXiUPb8ujvP9n6yd82J+/QOLFEVRlBajE7qiKIpHaJnkAgCX0jtYwvvCCLnAJ/K6WKLo61KdWpBMioCbAi9HhsUS7I53vctub99/l93+1le+bLeHhNeJX1QQuniG8zoMXcMVhCO91zrG0WZEboZFXsJHK7y8LOZ46Tgv8k109bO3Te/QqN3OpXlJ6BP1oq2Q8229DCwqibwXJIrdkijAW59+t1l0tMVw79veAgC45nqWpyYvspfDiAio2LeX0wkP9TtzRPlFVZxUSub1EPlUxHG3t4klt6hW5Q/xcjYoZKBchmWr229kWWZ036hjHKWKKLQtnoHKFREQJ3KV+EWESikvUrrKvDoiMIwiwoWnLmCsIKSxgEjXbBX5fPQLmebt77jDbn/jm+wN0gwKhSLOnKvKDMuiwtPe3RycF42yDSYn+R44f5ZT5AJAexvbxGFP4cGRSwiJQtj52j0c9LOnnz2YOoQ0NjvL0ki3qCw0vMMZDJhK8r5D0smtwvNNXOzjPe/neWRRSLozE3ys8wXeUGxZyL5C3gGAgPBuGunge6JtkL2ZLp47Z7eLtTxVpuKs2CbRJ3RFURSPoBO6oiiKR2iZ5ELgSieWWFbK/B9y9Wlyok9dLoOeXn4jPRTjZdrtB/fZ7QN3s8yyNMsST7jMS7NrRGrNitjJ0AC/MZdeC9mE822zLOhcyomKOeAl8esXOafFa0e4sOzdd/G2eofYIyeZ4qWcSPGCvtG6qjoyT0tRSCtCgloWxW4LKbGxJhKLRfGWm6uBGDfcxpJL7kaWVto6eekpTWnqCv76hMTQ08bLUJHKxfFEIqszyYAMiOurIKoX7bmWK0ZFRWriXMbpRWVEyl2QSMkslsyyWo5FMq+OCHARaW6tiki9G5Ayo/MZK7XAksD5s+N2+56332a3syWW+mIR5zlsJhXLQqaWzjmb52MJx1jecgQAjp+z212dTrnBEoF3JALKpqbHuD3JQVLk4z6//Iv/kMeU5sC0x556nPf9Kkt8vZ3smTR92nl+RrbxNbBcEgGLQb7venrZ8+am/Zy6t/gRvha+/KX/Y7dzKT62yQTPNQg4cxUVRLH09Dx7wm0T5yoU5Xugb6DqETY/6wyslOgTuqIoikfQCV1RFMUj6ISuKIriEVqmoRsDVGoudTnh5hMSroMyAZDfx1rwtUPOEmGRKP8uje7i/Mi3vJ1djIb3c17pl5/5it3euYO3NXTDTTyOftZ8AzF2W8rmWRPLJZ1V6mcmWeNcmmGt3BJuWdEO1hv7RAKg8cmX7Pbg8IjdLmeFS2eOdUTKsNsYAFhGVLYX2m40LHK0D3E7GV4frdXn8yFacx9sj3CkYFtMXGoiiZKMtqR6DV1q0SLytlISbVl9XrxHKAt13lF6T0Sftnexq5isDG9VnNGEEAm5jCi95pMbtkTSLnHdGogDFEm+SLiehcX+gpbzGastL/Ksi8jJuTOso27fz+9+5n1Cs20yFVNBsfYOIlvgSNGxs6x7f/s737TbT/30p3abjNO2M0ke59x5vm+C4qWKLKsYGuJ78GdPcD70gsi/fuz0KbudmeF3KIk53k5XrzMKeE5EbyaX+Zi6RXK3osXbffxxzm8ejfO7ru4+drmdL7Eeni3w9i+mnK7GRtyDMbFvvyjP19XLx32pxN7rpzlCtR59QlcURfEIOqEriqJ4hNa5LRIhWFtCLImISSvPy5BoTOQ6FpmNBnqdLnfjU+yOt+f299vt7Te9X/RiaaWU4uVNZwcvafr33Wq3MwFejh996QW7Xcjxd5NJZ+7q+YscDee3eHkdifBpHtnNcsrN+zjStOxnN7agnxNWBUMiUjDPS7bseWd+6YqICC2Ln+m0SGoW6+V9DIpkZc3E7/ejo7N67oxwO8wK90lTYOmoID7PpPncAkBRRLwWCnweymVel5eESxiPK0kAACAASURBVKKMkM2KpFZZUeKsLFwbO3pElGEnn/OuDk6OBACRELubWSLSFCQiP0VUc4eQ1RZmuX9eRERXRPQwQST8spw54eMi7/munew+l8vyuTIiYrWzw+nO2kz8AT86a+esJK6xZJqjIY+9/LLdnjl71m776qaaWECWa+TjNyK3uk9EjW8XMmSPSOy1lGUZ6prR/Xb7vMWSZGKRJRArzHYGgBnhPpnNWuI7LGmRuIfyJLab5Qhyn4hGrvjF8YREUkE4/a0tcR23ie+3d/Lx+UU5w0ot0tvvr5MEBfqEriiK4hF0QlcURfEIrfNyqVRQyFWXO7EwD4Mi4o2/TyQ/srgdbXcuOT78jz5st+/+ANe+jffxEnXmzHG77RfbTYjItrlznGd5MsXLr8e/8x273S4it/IFp0fB0CAv4eNi6Xt2gt/iF8W+e7aN2u19N72FNyRyoy8m2FsmK+SopZwztzIZPof5nIhAE14gRpT3O+BceTaNRCKJ7zz8fQCAFWRvhKUlXsKml9kzQeYIl/ILAMzM8Hcs4Q7TI5J4dfexdBT28znILLIcduo02z4pyhXu2M0Jufwib3W8wylH7d7N0YTbd3DE6u5rhAwgPBY6RPnCioyQFEvlkrie/SIk2l/nfTQ4yvJPJM7XRUkkWhMrfPT0OCMym4nf70d7TXIJiOu7uMDyz/wpvtZ3tPP9QD5nlGQqx9diXtwTFGW5KiwS8s3NcETo4edesduDHVzibWGJbb4sonLTQunIzbM8VNuj3QqIExkN8vWWFzLQnMinb/mEnBlgyUR6W/kicq6qC3E3LBdmMjzepEhQ1t0rblTb22p1D7U1PaET0Q4i+gkRHSeio0T0m7XPe4joUSI6Xft/d6NtKRuKoNrVm+g9u7VYq+RSBvBZY8wBAHcB+BQRXQ/gcwB+bIzZC+DHtX8rmwu1qzfRe3YLsSbJxRgzBWCq1k4R0XEAIwAeAHBfrdtXATwO4Hcuuy0YVExtKSMCCEi8+S2LJQmJYJlI2LmsvPUtLFeExdL52MscrLM0yW+kC6IKeGqJl3LjY8fsdtqIHNoW928XQTHxiNOjoL+bl5hTM1wurCw8MbIpXvKPO3JEH+V9p9krIxLg4y6HWWpYKDvPQVQsVWOinFU0wMv0VJaXmzKXN4CSMeZF4Ortmkyl8ehPqmXKuraz14Gx+LhfevondnuXSIjW1+uUOi5OiHMorpFYDy9Di6Ja+oyQtu6/8212+9abb7DbWWF7n8hbfvbCebt96jRfKwDw2hG+jro6OfDtFz/6C3b7nhs4EVxIZA/bPsyBbkUhucg87jI4qgRnrmtfQAQgdbGNo2JZX/GzJCALEQJNvmcJqISq+zUikCokPDGCJR7vzrgI3PI5ZdKUkET8ovSbLyRKSM6wHFpIsNdSaoHvj/kK7ztR4D6jt3Mg4fQce7kklpyJ19rb+R7OC8+hUpDHkRfBQTkR1CYDyyJi3Ib4freEzOIPOKdbX1kkdxPeV7MiiZ5wXkMgRLXP6qQbuc1V/9IAIhoFcBuA5wAM1i6cSxfQwOrfVDYyalfvorb1Plc0oRNRO4BvAvi0Mab+LcPlvvcgER0iokOZ3OqFTpXW0Ay7FouFxl9Q3nSaYdtsOtf4C0pLWbOXCxEFUb0w/sIY863axzNENGyMmSKiYQCzK33XGPMQgIcAYMdAh7n01rciclwERNJvS6w3iiJwY7DT+f7mBw8/Yrd7Blm6GJDLXVEpPhhkGaK9TVR0F8vCNiHdDA2wFJBLcWBB1M/bAYCFOfbeKImc5B2imn1ReFmcfonzoU+d4HwRhbK4cYI8JvlWvW17XQBJmwjICLOsEBHSSjd4HAdu4JJ3wItNs+vo7r3mlz7+KwCA8ACXJsumWD45/Rp7KQwPsY18PufzRTTCtilW+Jzsu5G32z3MD5bZPr4uPvSBf2C3pQSVEZKLSNGCssgVky87c27MzrIsd/7sJG83xuObnuBl/bmjp/mYRDDYmWk+fXe+96Dd3jW6zW5L7xcA8EWEd0hQSJNSMiOR86S+WACad88OjgyYRKIqdxSyfL21Ffm67B/iY1k4z5scO8eSFgDMlfi89PSwNOMT90qmwveaVWJjlbP80JAviIA6IcvOTfO9mEmzFGNKwq0KQCzM801ReN5QmO/tssjXHhJlDo2QPvLCQ6siXLeKYm4LB52ePiGR66g9xrJTVLRLYrz2/eE8BAdr9XIhAF8CcNwY83nxp4cBfLLW/iSA765lu8qGQO3qQfSe3Vqs9Qn9HgCfAPAaEV2K8f1dAH8E4BtE9OsALgD4peYNUXkTaIfa1avoPbuFWKuXy1NY3av9/lU+X2VjhEptzRsSniORgFgyirfIRuQ6qRT5LTIAzM/zcj49x+1oiaXCCngfPd0soXRtE+XlRB6Ni5O8HZkC1SfKkcmScwDgJ5Zp2iK8lBOOO/DLf4glolVkScgntIBklpedxTDLDh3bnFp1JspvxlMi30g+w4uw3jhXS+8bcHiUpI0xTbErERCueUKcOnHE/jy5LM6n9OoQQRvpulwuMp1uRKQBLmXZy2F5jrc1c4G9XL7/g+/b7aWU6J/m89whqrB3douq63GnlDYxwTLLQB8HE0XiLPc8+Xe8v8XTr9ptS1yrY9McKDUh8svsPcASUmfcmaeoU3hORUWpt842Ph9BEbwSiznH3tR7tkJArrZfcfmViaWEjHBmmRKBQVNlpxSUFuXXsMA28QdFDh7h+WHEPZET950RAVYhIWlcFPKn9AqhulMxtyTSUIvrzVi83WCUZaC4zOsjJGF5TctAsajwO/L5nYJIUIyXxHaNOG4S3/FdKn9ITQosUhRFUTYuOqEriqJ4hJblcgEIPqouDyNhXtIY4c3SFuXlZ5tIaZotOb0Qejt4uRIQ3y8u8xK3InJJZEVZlMFB9vaoiOX//ps54OXpn/yYt2l4SRisW/rkxNv0eIeo3C0CCvzCCyEtPCDOTom0nAk+hgKxDNG/j39/R0RFleq4+PiW5nkcobyQgUaEt07WGcDSLCrlElILVXnlse/+nf35+DTnpPGVWDp69VXhQVd3PstS0hLn7dFHHrPbIeGxdOttt9vtYohzfCRFwMmZC+x5sbDAOV6Ked7+5PQ5xzjOnuN+B2/jILb/51OfsdvPP/sMj3uZPV6SIlVwTkh3Zw6xPPTk4Sm73RZwyolBkX7VLzwvOoTksn3XqN1+4Bc/hvWCiBCoyYolITGkRSWtxSTbc1G4sJaDzqnGlEVKWuldIrxFSkYG8QgPL5EfR6aSlYE7IrbLKYfUpZ6V/5aBQtLhqiJzszj2J6tcCflFbsexfefzs6NCl6ikVRHbkreAfT+Y1d1c9AldURTFI+iEriiK4hFaJrn4CAjV3gZnxbLUL/KjVETgTlYs0/1B55IjLKp9BIP8/ZAo7twZ58+n51iKyY6wtDKwgysIXZzlt+Q33HGP3U7PscfDmVMcxAQAmTR7mgT8PN5OsUQkkdth6iJv68J54eUS5rHGB1l26hepUSnvlJ1okb/TvSQqJA2w98b2Lj7WsWPsddJMgsEQhgeHAQB7R1nOMuK4AyL/ip/k8tT5fGFEytyQzJsj8mxs28ZeJ/e97312uyPG560zwgFHx45wUNOpMc7ZMjQyarfzxjkOv5D+jpw6wds6xcFgsdEDdntykvfX3cXtAeHJEGvna3ZxmoNuFi5ywWUAmJvnazVvCe8g4fUxlWB7333/+hT/BoCKZSFdy0WUTLIUmBERpBlRAUgqCvEuZ+6hcNTpjWN/R8gS0QCfr2CI+0uZJCikHCm5WNJDxiFROOcO+Se/lESkB5olJRDpYSPsIT63ID1eeKyBulwu8vuRiEgbLI7JyGLiNcmtvpi6RJ/QFUVRPIJO6IqiKB6hZZJLIEAY7K/+npQW2CsgJ4IAMiLOxPgs8V3nsONx9t4IiRwsuQy/cY/Kt+xFbh96+mm7fc1+EfghUrfKt98xEeDir8vlEo2yLCCXoTmRKrQscju0i2Xn3bdx+tWI8JAp+8VSrsTeGrlxp+TiS/GSbSDGHh637ePUsQNdXMHp8BQX8G0m5XIZi3PV3Cd3vfVu+/O7773XbofDYhkqAyfqvAAqwsvBLwLDZJ6cXJHPycIEH9Ninr1FFuc5F8sZIbNMzrKN2wc4BwnCfC4BgEIi30eZ5cFHf/qU3d615ya7vaNHBB+JQLSY8Mgp5Dmw6EySpbv2Dqc0YRm2//QS5wHq6xu121mR0vWxnz6P9aJcLmO+dq9KG+TzfE0XhadYMCKDn5x5TOQ94XNcA8ILRbRl3FtZ5LvxySAeEVQlpRupq0gpph4pZdQHIF1CFh+XUkxAyiRivpDjqJdKnFKQ+Jv4OCJy21ySXOrvE4k+oSuKongEndAVRVE8Qsskl1CIsHNHdRnWSbzEHRvnJc2MyNNRFIWT29udw86I1LhWhZelfvF7tSiqlqTSvGTLl0QeCSPyfLSzd8LMNC/ZJ8Rb/Epd+pPBfpZ+qMJL/qUEBw2F2/g4ujpZGpFVXwpFWaaEl62ZAvcppp21adpE5ZZrRSHjbUM8pvEJlpQW5vg8NxOfj9BWW/ouJPlcvfTqYbs9MMDndnCAA8ZKJWdQzZIo+gvh1RMQ53ZkN0slO7r5fF48xcE6mTTLJAODfG5iogCvX6TqzeacctbwMBeJnp7kAKl5kYNkeBvrgySDbgrimET1qJL0XhBSXbhuWV5cmON/+Njmg8IrpyiCcS4Tc3LVVIxBqVTbl/AECohrVMQ+ISxyoNQrGCRuYem1IhybYIn7S8obfiHF+EXglS/IYwqJMUlpQ26n/m8SYR6HxNHVxdeMvF4LQmqyhIeMlFnq9yU9ZsplcZ1Y8j5449it9ahYpCiKomwsdEJXFEXxCDqhK4qieISWaej+ACHeXdW5ckLP7R4Qbktt7C42PyPKTgm9CgACIVGqTPypIiqQl0Su8+Uca9ptwnUwn2XtNJfnSNGi2I5VkjmQnYl+0kmRnCsucijHOWI1l+M+8ws8Dll93OHqJCqDhwLChcnpWYeQ0BJHrx3l/WX5+088ccxuv3pqxYpjV42PgHAt+Vkhzxr4009zgjMjkqvFY3xMpZIzv3xeuLYFxLPHrlEuW3fjXdfb7T07WU9PjLPWPb3EtgwJe+/pZT19bo7fvdy0/0bHOG64ab/d/vqff02MiV3xSuLdSrHIbSPLtkf4+GSirdHdnKd+dvykY9/SdS8q3r8cOMBurvksj33H8PrVeg4EAujtrb6T8Yk835aMYBV5z6WWnM8765GSX7j2ORJT8feLQiv2V5z3mv25Q38X93t59RzojnGIP1WEgF8WdqtYK0d+Sg1cRoqWRHlAmZzrcm6LjiRhK+jm1fFVat9TDV1RFMXz6ISuKIriEVomuRARApHq7iNxXrr2tAt3KJFnORjlZUZyqW7YlogWi/CS0xJ5z60CL/9DMf5+UCQA8vtZ4imIZU2xJN3CxFKxzuPJiKW2JTzfgsKFCiLJUEKUv8qJUmWdIpFRQOZiFmPNwilPzMxz5OGScMtMZdi17kePc2KpmfXxWkSlUkH2kqwkxv6+D3yI+xTZxc8vZJZKnTuWceSe5mOPCCluOsFL+VSCk2Ut5ni7JBIfnXz5jN1eeIZdAq/ZzbLKHddySTjAWQ0+KuxnhNuadHX0+fn6Ejm0kBNyQkBEO+7azpJLPs3utQBwvUgq9/zhl+z25HmWZnIipNqIkoXNxu/3I14r21exZGSjdLnlc5IUUlAgWJeHXPzb4UoomkFx/ZTFuatIGULILDKnOEmX4srqvpwVIXvI68+IZ10ZsVzMiXztwv4VGd4pS2fKfdVFqcrSljFxjcqSnD4h01yKkNdIUUVRlC2ATuiKoigeoWWSS6VCSF+KdvS325+3t/HSNRjlJUmbcOvo7KyrIJ7MiTZHQ6ZFmbVSntsdIY6ejIhkXmWRlz0gkv6ExM9eMCzfWjt/D2MiglXkZHIkEwpF+Q/xLpYOFhdZMkmJJV68h8eaFYm9Tp9zLs1PvMYlzQZF3vTB7aKKvMhD3ieiVM8uOD0Qrgafj9DWXosAFuvNjn72yiiI8xwRzxQhciZwMiLSMBzjv1XyvJRPpTgBmz/Gxz2whyP69sTYy+X0WU7OBVGVPigSO12cuuAYR29f94rtYo6ljkKBpS2ZE7wgZIeSKIUXiLBdBrf12+3zU3z9AsDMBR5vPs37eP3oyzymXv6+6eb89+sB1exFQm8sloQHWoGvpZKQKn11pd+klGiE1FEU3iIF4WlCqyS8kpKElCIqwjtslRRY1X6ibcS2HPnURflDX4D7BP3OaG3uL9qOKFWn9ONQgmS5PTmviM/LNQ+7pnm5EFGEiJ4noleI6CgR/Yfa57uJ6DkiOk1Ef0VUd2cqGx61rTdRu24t1iq5FAC82xhzC4BbAbyfiO4C8J8B/BdjzF4ASwB+vbnDVN4E1LbeRO26hViT5GKq64dLa8hg7T8D4N0A/nHt868C+PcA/tfltlUsAhO1yluFBMspHf285IpEhecHqzLo6XEOO53hpWwiwe2lhZBoc38ZpFBZLXGPyM4jf/Xk0s9fl5c9J7xtRBprBEUyqXKWE31ZIsjIEp4wiTR/LvN0LQpp6dyYU3JJLPDyv5jhLw11cvDMgV2cp1tsCi+cmW+abSuVPLKpmreJSBgWJDbgzAxLB6ePnbPbERE4BQChTpZN+kRCr219HKgll+69nSxPSYeZvAgkGxhgWWZkG8sTU9OcG/3UqeOOcYwWuZSelItSKT6ObJalkuQyy0BScrGKooyiKDN49AgnKJOJtqrj5Rz2IzdzwNNAP3/e1882jojtAs29Z2HYU6NQkN4eMh+6CLASfYp1QWPSc0QG/sgAm4gIvvIJzw9rlTJw0ouERECW3H69h0jIv3LAUl4kg5MBRLJMnRyrHIe8RrJZtnl9YJEsOye3Wy7y96X8EomsQwk6IvIT0csAZgE8CuB1AAlj7ClsAsDIat9XNi5qW2+idt06rHlCN8ZYxphbAWwHcCeAAyt1W+m7RPQgER0iokPL6fxKXZQWcqW2lXZNpdbJwV25Ypp1z8oqQ8rG5Iq9XIwxCSJ6HMBdALqIKFD7xd8OYHKV7zwE4CEAuHbnkLGC1aVmKXTQ7lOoiOVGmb0TIp28zOjqdyYy6fbxkqgny8uuxCIv4RPzvDzKZUR18LJ4F2TkW3LeTl4EjYRE5XaZ1wEAUnn+Tk78YAUNLz07fOxdUvHx0rxU4jGF20Q1cFG2rCvE27kGLEcAwE238FJ7/8232O3Ra6+123fexZPtxCRLAXiBg22AtdtW2vWancOmUlt2+8TzQqDE5youAr4OP/tTuz09w/YGABLHfuedb7Hbb38bXy/Lyyx7vPric3Y7I5bMpy6wB9CZc+fsdk6UE5MBY5E4e40AQDIpPJBEXphMkqUcuQgOiDwlnR3szbJtN0s33b3Ddntgm8hffxuXsgOAHhFYJOUBudyX3jryGq7nau/ZgcEBcymYRsosUpKQCdkdpSJ9zntFni9HHhPp/SLkTZkrRW5XyqQEmRuFJUxZ1u5y+VSMkGzkfS7HtJoUEwzK0pQrH099Lnb5/VCEr/VYmK8ZOdpLY79cbpq1ern0E1FXrR0F8A8AHAfwEwAfrXX7JIDvrmW7SutR23oTtevWYq1P6MMAvkpEflR/DL5hjHmEiI4B+DoR/QGAlwB8qcnjVNYfta03UbtuIWi1EkzrvmOiOQDnAfQBmG/Q3WtstGPeZYzpb9ytMWrXDXXMTbMrYNs2g411jG8WG8m2q9q1ZRO6PQCiQ8aYg417eoetcMxb4Rjr2QrHvBWOcSU2y3FrLhdFURSPoBO6oiiKR9gIE/pDrR5AC9gKx7wVjrGerXDMW+EYV2JTHHfLNXRFURSlOWyEJ/RNCRF9jIiOE1GGiF4none0ekzK1UNEB4joMSJaJqIxIvqFVo9JuXq2il11Qr8CiOg9qGar+zUAHQDeCeDMZb+kbHiIKIBqgM0jAHoAPAjgz4lo32W/qGxotpJdWzahE9H7iehk7dfyc60axxXyHwD8vjHmWWNMxRhz0RhzcaWORLSDiH5Se5o/SkS/Wfu8h4gereWjfpSIulf6/mZjk9v1OgDbUE0raxljHgPwMwCfqO+41ewKbGrbbhm7tmRCr0Wt/Q8AHwBwPYCPE9H1rRjLWqmN/SCA/tqFPUFEX6iFVa9EGcBnjTEHUM2h8anasX4OwI9r+ah/XPv3pmYz27XGSkkyCMCNK3y+ZewKbHrbbhm7tuoJ/U4AY8aYM8aYIoCvA3igRWNZK4Oo5pT+KIB3oFo04DYA/3alzsaYKWPMi7V2CtU8GiOoHu9Xa92+CuAj6zvsN4XNbFcAOIFqitnfJqIgEb0XwL0AYvUdt5hdgc1t2y1j11ZN6CMAxsW/N1M+5ks5RP+kZvx5AJ8H8MFGXySiUVQn/+cADBpjpoDqRQRgYF1G++ayme0KY0wJ1Rv15wBMA/gsgG+gehyrsgXsCmxi224lu7aqSPRKS6BN4T9pjFkiogmscbxE1A7gmwA+bYxJ1qfx9Aib1q6XMMa8iurTGwCAiJ4GP5m9gS1iV2CT23ar2LVVT+gTAHaIf6+aj3mD8hUA/4qIBmovRz6N6hv0FSGiIKoXx18YY75V+3iGiIZrfx9GdUm42dnsdgUR3UzVwsoxIvotVLMV/tkqfbeKXYFNbtutYtdWTegvANhL1crjIQAfA/Bwi8ZyJfxHVI/hFKoa20sA/nCljlT9af8SgOPGmM+LPz2Mah5qwDv5qDe7XYGq58MUqjfs/QDeY4wp1HfaYnYFNr9tt4RdW5k+94MA/isAP4AvG2NWnBA3O0T0dgBPAngNwKWSKL+Lqi73DQA7AVwA8EvGmMUVN7KJULt6067A1rDtZrerhv4riqJ4BI0UVRRF8Qg6oSuKongEndAVRVE8Qqv80BELB0xnLNiwH/lc+n+6fhfg7jesUqk07uNyn26H5tbV1b1HrLsdzyaL882qPdnd3WVGhoca9ivlU662ZxWL7nbs8joJhhtf8m59jsmlJcjlNWdZlqt+bq+nk2cnm2ZXAOjr6zOjo6PN2px7XB6vcdHR7TtDn9t5Zw13Y7M4d+4c5ufnV9xxyyb0zlgQv/ruPQ37RcLubgZjuZxcKeSqXzbfeCLJ5t7g9bQiJavxjwMABPzuLo5IwN2xlsvu9vsnPzx73lVHF4wMD+Ebf9m4gPzc8Z+62t7yxDl3Ow65u5SH9jYO8PMH3F0jQTR+IAGAMEVc9VtedPcjVzLuJv63f/z3mmZXABgdHcWhFw41bXuuHTIq7vpZLs5LqZx3ta1QyN014PO5uwbc/qi7+VG64447Vh+Pq8E0yLJGRGEi+qva35+rhcwqG5zz81n8n6cmAOBGtat3ePbl0/j4Z/4boHbdcjSc0F1mWft1AEvGmGsB/BdUc4UrG5iKMXj8+AI+fPsgAByF2tUTWJUKPv+VR/DHv/MJQO265XDzhO4my5rMRPY3AO6nzZL8YIsys1xAVyyI2nsMA7WrJzg+NoHtQz0YGewB1K5bDjcTupssa3YfY0wZwDKA3voNEdGDRHSIiA5lC+50QGV9yOQttEf88qOm2HUxkVinEStumFtKYaC3U350xXYFnLadm5tbhxErzcTNhO4my5qrTGzGmIeMMQeNMQdjYf8KX1HeLFZ59XLVdu3p6rr6wSlXzCovGq/IrrXt2bbt72+aw4yyTriZ0N1kWbP7ULV+XyeADZfnQGHaI36k845VktrVAwz0xDG7sCw/UrtuIdxM6G6yrMlMZB8F8JjRJDEbmsF4GIlsCcvZElB9YlO7eoDr9oxgfHoRk7NLgNp1y9FwQq9pbL8B4Aeopor9hjHmKBH9PhF9uNbtSwB6iWgMwGewQevtKYzPR7j3ul48/OI0ANwAtasnCPj9+Myv/hw+85++Bqhdtxwty7Y40ttm/tkHV6rR6sTty/dcxl3AQL5UdtXPuNkvuQsWKJZdBj1V3AUqDfa+oRTiipRL7vb7B3/1ymFjzEFXnRtw44HrzF9/rXFgUeL0k662F66UXPUr+t29k/F3u7Gry2hCl8Ei4aC7wCKr5C4QzOdzt9+b3/v/Ns2uAHDw4EFz6PkXGvZzExwDuI+xLFvu7tnTY6cb9snlMq62dd2BA676hcOr1YZ34qPmvTO84447cOjQoRVPn+ZyURRF8Qg6oSuKongEndAVRVE8gk7oiqIoHkEndEVRFI+gE7qiKIpH0AldURTFI+iEriiK4hF0QlcURfEILStBZ5UtpBcXGveruPvNyWXdRZP53FWWQryrvWGfQCjsaluJZXelxQIurdHT4S5SNJV0FxXXTEqlMqYnGtv13NHGUX0A4Mu7iwA+Mz3rqt+d720cOLlrdJurbZVcRjD6Ii4vuqC7lNJUcbffpmMMrHLjyF2XAdSui+iOX7zgqt/ffu+Rhn2SyeWGfQDg7nl319O77n23q37hsLu5wk2d4stF4uoTuqIoikfQCV1RFMUj6ISuKIriEXRCVxRF8Qg6oSuKoniEhhM6Ee0gop8Q0XEiOkpEv7lCn/uIaJmIXq7993vrM1ylWSQyRfzpj8bwX//2BADcoHb1BlOzS/i1T/8P/Pyv/BGgdt1yuHGUKwP4rDHmRSLqAHCYiB41xhyr6/ekMeZDzR+ish74fIQP3L4NIz0x/Ju/eOU4gE+pXTc/Ab8fv/0vH8D1+7bjxvs+o3bdYrgpQTdljHmx1k6hWoZuZL0Hpqwv8WgQIz22P3sFaldP0N8bx/X7tl/6p9p1i7EmDZ2IRgHcBuC5Ff78NiJ6hYi+T0Q3NGFsyptHCGpXL6J23WK4jhQlonYA3wTwaWNMsu7PLwLYZYxJE9EHAXwHwN4VtvEggAcBIB4NIOBvHCnmD7isxeeyzmLBZY3KgIuwzYBxVwPS/ml76gAADLFJREFUKuRc9TN+d8cwO5twt99S48jDYrkCAHsAfKIZdu1s78CTj/6k4X4XT7/asA8AWEV39hqbnnHVbyLTOGp374E3HOKKdMbdRex2dne66heNuas92tkWbNgnlysAV2lXwGnbbdu24fyFsw33TS7uawCYnnMXjfnMoedd9Tt89JWGfZKL7u6dQqnoqt8NNzWuiwwAA/19rvr5/VcXvO9qBiGiIKqT+V8YY75V/3djTNIYk661vwcgSERvOAJjzEPGmIPGmIOxUPOKpipXhlUx+NYLFwFgsVl2bYu6K5qrrB/lsoV//8dfB67SrrW/27bt6ele13ErV48bLxcC8CUAx40xn1+lz1CtH4joztp2Gyf0UFqGMQbfe3kavR1hAFjx8VbtuvkwxuCP/+d3sHN7P6B23XK4eb6/B8AnALxGRC/XPvtdADsBwBjzRQAfBfAviKgMIAfgY8a4yDKjtIyJxRyOTCTR3xECgOtrtlW7bnKOnLiAR594Bbt3DgJq1y1HwwndGPMUgMuKYsaYLwD4QrMGpaw/O3pj+Ncf3g8A+E8PnzxmjHlDGkK16+bjpgO78OO/+X0AwP0f/T216xZDI0UVRVE8gk7oiqIoHkEndEVRFI+gE7qiKIpH0AldURTFI7SspqjPR4iFG9darFzewcbGGHeRXcWyO+8sNxGKFeMuitFY7sZmAu5qT6aK7mqFWtabH7yVSWfw/LPPNOxXXnbn9pwsFFz1y12mzqLkzKHxhn2ePDzlalttAXf2D7oMovO7rDvZ4SJSdD1Ip9N48uknGvY7PznhanvzSXdRm0suonsBwNfW+P6JFNpcbWt2Yd5VvyefftJVv9HRHa76uak9ms1lV/2bPqEriqJ4BJ3QFUVRPIJO6IqiKB5BJ3RFURSPoBO6oiiKR9AJXVEUxSPohK4oiuIRdEJXFEXxCC0LLIIByEUJt8JlnOglPpcBSL0d7sqBtbU1LgeWXHYXfNAZj7vql8q7C1Q5f9HdftOFNz+wyBeOIDZ6oGG/yUl31W+6u9z1Gwi5C8qKtTeuqLQ4fd7VthYujrnqNzfvrjxe3nIXHFWquLvWm41VsZBILzfsd2HqoqvtdQ70uurX0+muClZvX3/DPnOvuwsaO37kNVf9Hv3Ro676dcbdHYObkpuLi6sH5ekTuqIoikdwW1P0HBG9RkQvE9GhFf5ORPTfiWiMiF4lotubP1Sl2Tx1ahnPjC0D1co2alePsJROIZFJA2rXLcdantDfZYy5daUKKAA+gGrV8L2oVgj/X80YnLL+vGW0AwBWrGwDteumJR6NAWrXLUezJJcHAHzNVHkWQBcRDTdp20rrULt6E7WrR3E7oRsAPySiw0T04Ap/HwEg09hN1D5zQEQPEtEhIjqUKZTXPlql6bx0Pg0AB5pl10Ihvz4DVdZEsupMcFV2BZy2TafTzR+o0lTcTuj3GGNuR3Wp9ikiemfd31d67f6GV/bGmIeMMQeNMQfbwq1zsFGq3LG7A2/dEweA02iSXcPhxt5ByvrSGWtDV1s7cJV2BZy2bW9vb/JIlWbjakI3xkzW/j8L4NsA7qzrMgFAJvzdDmCyGQNU1o9w0DZ/GWpXz+DzqV23Kg0ndCJqI6KOS20A7wVwpK7bwwB+pfb2/C4Ay8YYdw6fSkuwKgZl9nv2Qe3qCYwxMEbtulVxo3sMAvg2EV3q/5fGmL8non8OAMaYLwL4HoAPAhgDkAXwa+szXKVZFMoVvHrBrnx0AMAfqF03PxVjkOJgPLXrFqPhhG6MOQPglhU+/6JoGwCfWsuODYBS40BRdLmMFBRPJZelaLl7bVAqNX65F3OpKU7OuSuj9vr5xlF4ADCXcvdCOdug247Oajmu47PJo8aYPwSu3q4UCCLYO9iw346eFd/BvYGIz927lljQXfm2Qr5xObMzyaOuttXe4S4C2DLu7DW95O6lY1/f6GX/funsnzzxQtPsCgDRaAw337ySF6STw6+dcLW9zg5393au4u5F+7aBxtddaSbnalvLGXcR6tnTJ1316w67m3faOhuXyCsVVz8fGimqKIriEXRCVxRF8Qg6oSuKongEndAVRVE8gk7oiqIoHkEndEVRFI+gE7qiKIpH0AldURTFI+iEriiK4hFalvLQAChT4/p52ZLlantuf5lypaKrfl3djaMAiy5rQJ6ZcJf3aDHp7lhNwF39TL//zf+9NhULxVymYT9yGdmbLrirs4qAu0jRUqXxOQ5HG0frAUCY3NX2LC7MueoHX9BVt8GRUVf9Tp54wd1+XVIqlTE5Oduw3/mzF1xtr73NXZ3NQsld1CYlG0eB5hIu03b73Nn22j3XuOq3p99dLeMOF/POM0/Wp+Zh9AldURTFI+iEriiK4hF0QlcURfEIOqEriqJ4BJ3QFUVRPIJO6IqiKB7BTQm6/UT0svgvSUSfrutzHxEtiz6/t35DVppBoWRhbHoZY9PLAHC92tUbZDMpHH7uhzj83A8BteuWw03FopMAbgUAIvIDuIhq4dl6njTGfKi5w1PWi3DQj2uHqr6xR8YXj6FaKFjtusmJtXXgLW99LwDgiR//tdp1i7FWyeV+AK8bY86vx2CUlhGH2tWLqF23GGuNFP0YgP+7yt/eRkSvAJgE8FvGmDcUZiSiBwE8CACdsRB8LqL7KnBReBSACbo7lPb2iKt+JTTud/zMKVfbyhQaR04CQCTiLtoxEnJ3rNG2mKt+R8YXewD891X+vCa7RqPtKGUa14AsXqYuosSU3UXPIuIuAtAfbnyOR3e7i/6bHXdXTxK+xhHRABBtc2f/Awf2uer3xI9xVXYF6u7Z7i6cHz/XcL9dnS5rrbq4TgCA8u5q8k5NjzXuMznvbp8+d/v85V/8h676VdKLrvo99tTjDfsULxPt7voJnYhCAD4M4K9X+POLAHYZY24B8CcAvrPSNowxDxljDhpjDsbCLcs6oAgqFQMAnWiSXUNhdz+YyvpiWRZwlXYFnLZta3OXEkFpHWuRXD4A4EVjzEz9H4wxSWNMutb+HoAgEfU1aYzKOjJTzX+RVbt6i/HzY4Dadcuxlgn941hFbiGiIaJqpiIiurO23YWrH56y3lxcygLAiutBtevmZez0MUDtuuVwpXsQUQzAewD8M/HZPwcAY8wXAXwUwL8gojKAHICPGeMynZ7SMsqVCmaTeQBIXPpM7br5KZVKmLhwFlC7bjlcTejGmCyA3rrPvijaXwDwheYOTVlvAj4fPnjLdnz3xQv2m0e16+YnGAziV//pZ/C/v/CHatcthkaKKoqieASd0BVFUTyCTuiKoigeQSd0RVEUj9Cy6B6f34/2eFfjfj53vzkllxGl4ai72n7z06mGfbLzS662dU2Pu2CbgrvAOURcRoDu3zPiqt93X3RXA9INlUoF2VzjA/H53V16FXelHZGruLN/wGocUbpru7tI0Xzanaff9XF3ATnPH37JVb/J8y4jVJtMOpXCUz/9ScN+ZNwZbSaZdtVv7vy4q35BF5eAm5qyABAacjdP/OyJJ131KyTdRageO904+jyXWz2KVZ/QFUVRPIJO6IqiKB5BJ3RFURSPoBO6oiiKR9AJXVEUxSPohK4oiuIRdEJXFEXxCDqhK4qieASd0BVFUTwCtSoNMhHNAagvXtsHwF1I1cZlMx7DLmNMfzM25GG7ApvvOJpmV2BF226287Eam+04VrVryyb0lSCiQ8aYg60ex9XghWNoNl45J145jmbhlfPhleMAVHJRFEXxDDqhK4qieISNNqE/1OoBNAEvHEOz8co58cpxNAuvnA+vHMfG0tAVRVGUK2ejPaEriqIoV8iGmdCJ6P1EdJKIxojoc60ez5VAROeI6DUiepmIDrV6PBsBtas3UbtuTDaE5EJEfgCnALwHwASAFwB83BhzrKUDWyNEdA7AQWPMZvJpXTfUrt5E7bpx2ShP6HcCGDPGnDHGFAF8HcADLR6TcvWoXb2J2nWDslEm9BEAsnDgRO2zzYYB8EMiOkxED7Z6MBsAtas3UbtuUFpWJLqOlarKtl4LWjv3GGMmiWgAwKNEdMIY80SrB9VC1K7eRO26QdkoT+gTAHaIf28HMNmisVwxxpjJ2v9nAXwb1aXpVkbt6k3UrhuUjTKhvwBgLxHtJqIQgI8BeLjFY1oTRNRGRB2X2gDeC+BIa0fVctSu3kTtukHZEJKLMaZMRL8B4AcA/AC+bIw52uJhrZVBAN8mIqB6Xv/SGPP3rR1Sa1G7ehO168ZlQ7gtKoqiKFfPRpFcFEVRlKtEJ3RFURSPoBO6oiiKR9AJXVEUxSPohK4oiuIRdEJXFEXxCDqhK4qieASd0BVFUTzC/w/WrjEuBf60qgAAAABJRU5ErkJggg==\n" - }, - "metadata": { - "needs_background": "light" - } - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as c_trans\n", - "\n", - "ds.config.set_seed(5)\n", - "ds.config.set_num_parallel_workers(1)\n", - "\n", - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train/\"\n", - "\n", - "sampler = ds.SequentialSampler(num_samples=3)\n", - "dataset1 = ds.Cifar10Dataset(DATA_DIR, sampler=sampler)\n", - "\n", - "random_crop = c_trans.RandomCrop([10, 10])\n", - "dataset2 = dataset1.map(operations=random_crop, input_columns=[\"image\"])\n", - "\n", - "image_list1, label_list1 = [], []\n", - "image_list2, label_list2 = [], []\n", - "for data1, data2 in zip(dataset1.create_dict_iterator(), dataset2.create_dict_iterator()):\n", - " image_list1.append(data1['image'])\n", - " label_list1.append(data1['label'])\n", - " print(\"Source image Shape :\", data1['image'].shape, \", Source label :\", data1['label'])\n", - " image_list2.append(data2['image'])\n", - " label_list2.append(data2['label'])\n", - " print(\"Cropped image Shape:\", data2['image'].shape, \", Cropped label:\", data2['label'])\n", - " print(\"------\")\n", - "\n", - "num_samples = len(image_list1) + len(image_list2)\n", - "for i in range(num_samples):\n", - " if i < len(image_list1):\n", - " plt.subplot(2, len(image_list1), i + 1)\n", - " plt.imshow(image_list1[i].asnumpy())\n", - " plt.title(label_list1[i].asnumpy())\n", - " else:\n", - " plt.subplot(2, len(image_list2), i + 1)\n", - " plt.imshow(image_list2[i % len(image_list2)].asnumpy())\n", - " plt.title(label_list2[i % len(image_list2)].asnumpy())\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### RandomHorizontalFlip\n", - "\n", - "对输入图像进行随机水平翻转。\n", - "\n", - "**参数说明:**\n", - "\n", - "- `prob`: 单张图片发生翻转的概率。\n", - "\n", - "下面的样例首先使用随机采样器加载CIFAR-10数据集[1],然后对已加载的图片进行概率为0.8的随机水平翻转,最后输出翻转前后的图片形状及对应标签,并对图片进行了展示。\n", - "\n", - "依照上文步骤下载CIFAR-10数据集并按要求存放。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Source image Shape : (32, 32, 3) , Source label : 3\nFlipped image Shape: (32, 32, 3) , Flipped label: 3\n------\nSource image Shape : (32, 32, 3) , Source label : 3\nFlipped image Shape: (32, 32, 3) , Flipped label: 3\n------\nSource image Shape : (32, 32, 3) , Source label : 6\nFlipped image Shape: (32, 32, 3) , Flipped label: 6\n------\nSource image Shape : (32, 32, 3) , Source label : 9\nFlipped image Shape: (32, 32, 3) , Flipped label: 9\n------\n" - ] - }, - { - "output_type": "display_data", - "data": { - "text/plain": "
", - "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAADuCAYAAAAgAly4AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nOy9eZAlyVkn+POId595n5VVmXV1HV19n+pWd0uipQYhJG5Ju0JjC4PtAMNoFrCV7c4yxmCYMbCmWQYMYzQDSLOwAwwStK5BArW61Vf1Xfd9ZFXlfee7rwjfP77Pwz2rsqs6Mysz9ar8Z1b2sl74i/D4wsP9950upJSwsLCwsGg+OJvdAQsLCwuL1cFO4BYWFhZNCjuBW1hYWDQp7ARuYWFh0aSwE7iFhYVFk8JO4BYWFhZNCjuBW1hYWDQpbssJXAjxF0KIcSFETghxRgjxC5vdp1sJVr7rCyHEJ4UQJ4UQRSHEeSHE+ze7T7cChBB7hRDPCSEWhRDnhBA/vtl9uhHE7ZjII4TYD+CclLIqhNgD4HkAH5VSvrW5Pbs1YOW7fhBCPA3gvwD4WQCvA+gFACnl6Gb2q9khhAgBOAHgTwD8AYAnAXwdwL1SyjOb2bfr4bZk4FLK41LKqvov/9uxiV26pWDlu674LQD/Tkp5UErpSylH7eR9U7AHQB+A/yCl9KSUzwF4GcBnNrdb18dtOYEDgBDij4UQJQCnAIwD+NYmd+mWgpXvzYcQwgXwAIBOVvFHhBB/JISIb3bfbgGId/nuzo3uyEpw207gUspfApAG8H4AXwVQvf4vLFYCK991QTeAMICfAsn1HgD3Avg3m9mpWwSnAEwB+A0hRFgI8WGQGSWxud26Pm7bCRwAWFV6CcAWAP9is/tzq8HK96ajzJ9/KKUcl1LOAPgCgB/ZxD7dEpBS1gF8AsBHAUwA+DUAfwNgZDP7dSOENrsDPyAIwdpo1xNWvjcBUsp5IcQIyKdgcZMhpTwCYt0AACHEKwC+vHk9ujFuOwYuhOjiMKyUEMIVQnwEwKcAPLfZfbsVYOW77vhzAP+S5dwK4HMAvrHJfbolIIS4SwgRE0IkhBC/Dorw+dImd+u6uB0ZuASp838CWsAuAficlPLZTe3VrQMr3/XFbwPoAHAGQAWk5v/Opvbo1sFnAPwCyM/wIoCnjWiqH0jclnHgFhYWFrcCbjsTioWFhcWtAjuBW1hYWDQp1jSBCyGeEUKc5qSCz9+sTlkQrHzXD1a26wcr243Dqm3gnBV2BsDToFjJNwB8Skp54uZ17/aFle/6wcp2/WBlu7FYSxTKQ6CCRRcAQAjxVwA+DioIsyzisajMpFPwvEbwnSNICXAcymT1eT2RQme2hhxuw995vq9/f9UxqAXJ+L1apHwOnxV8TBhrl+Rj6ue+cQ11rFwlh7TXoGPpWMzoB52z4UYAAK5xDHy/udlZAECtUuH+aDghl5t6M1LKTqxQvkKou1kuI5gOKRm1tmSDI93dnQCAWIR6U68sBMdc6dE9+SzjMGVshyMp48JLlTjfv1b+jqvaqO+04NWzKRQKAIC5uXkAQCqp5ZdO0d9+g/qzkKsExyanF7iPSprXXsPAqmQLAMlEXLZk05idX9R9V394dG3B9+KEjXvn5yq4X77RL9Vjx3GXtPU9PTJq1RoAIBwNAwAi8SgAoLRQCNq4rno+9Dq7nnGNGo29Oj/LUIyuIeu6jZDUNzfCY1Do64sQnTMW44REHvvVYtH4vUSlWket7gmsQrYtLa2yp69fv3wG3gu9VO+qEO+WDW+cZ7VBG/y75X593V7Lq74xru9L7rfqo7qG0Ubd06WL59TYXYK1TOD9AK4Y/x8B8PD1fpBJp/Dpn/gIFvglBYBEnCaFWIQGZpU733D0xNCeygAAog51t1gpB8diKRpYMR78aNCAdY2JpebT4C149DLEwjTJhj3dtzq3qfNkW67oSaLKg//I2QsAgPxMDgDwxL49+j54wpnLbgEAZHfpY8jRxP3tL30JADBy5jQAoGJM4YnWFgDA7PTsJf5qxfIVwgWk8UiDNY3uKR4lGX3swx8Kmvxvn/vnAIDdW2lxmj7598GxZIMmx4UyyTjeczcAoHPI6IaT4UvxIlfhqKtQRJ8nwxM+L3LwteAbdXomL774EgDgr/7bVwAAjz6i5feBx+4AAJRmafL8xj+eCo793/+J+jtboDHhCXX/5gTlsxz8Vcu2JZvGL/38z+K//s3Xdd/VH4sluo5XBwAkeozFp5Xk4/CrVtK/QpXX3GSaFtREC42BfE5PzqPnqctdu3oBAAN37gYAHPnai/oaSXqHUv0d9P8FLd/KFRp7o3X67NzXRtceqQVtwj49q5YB6ms+osd+qJ3OuXsXPXs5TxP3hdffDNrEqnUcPKJEu3LZ9vT148/+379ZQuwUfLby+ldNcubflSo9+5DrBsdIEQAkL07B+r7MBHr1+cw2ihb5y0yuHv9en1pec17pq4mf57W68fyrJGeXF/AGL9aNhm4TidCz+flPfzQQsIm1TODvTvXMRkL8IoBfBIBMKomwcJFM6PICEWaqDX6pXX4I2YRmeSluE+fJXkT1gyo3eFL2qTvJKC8I0ahuU6NJRdTpd5EwsRm/Wtcddfl2eJLxjIegDmXS1KfiAr2wM7lc0KY/RdcdGtwGAAi3twXHag49kM6ebgDAlXNnqY0x4GLcJwM3lK8pWzrqLvmZEzQn2T7zoScAAL/9m78WtNmyjV7Q3Bjn2ZQng2MtadYqqnSe04deAABUG7rfXQNU6yfHMgHoWUXimuXPT9KCXeMJrrdfE4mwS7J5392DAIDdfT9D3ahOBG3mps8DAEoLNG4WC3qA1zyerIIFm7Us4/mpxcUQ3orHbiIaxdGXD6GnLRMcd5gNV9M0qc2U5uhAm57AFx3WYqrMgON67FeKNPFUCzP0yS933VAN1dip1Ol+Hnn4cQBAeCQftDl9/CQAIM4LgV/VxyIpmgDavDQAoDBH13SM92P3nQfojzgz+ZrWMrYP7QQADL9Ni2Z0nhaXyIxm4FXfh9Ssf8Wy7enpheMI+P61P71Wd9NtBL+rVyZozIbDelz29RGR8hpK+7y2I85VXQ0m4mXuwOW2nqcXR0edTH0yW3aME5ijENBEEQBOnaTnpibsNl7A51hTB4B0On1tZ5bcw+oxAmDA+P8WAGNXN5JSflFK+YCU8oF4LHr1YYt3xw3la8p2+ffG4l2w4rEbvXaBtVgeK5ZtS2vb1Yct3iPWMoG/AWCXEGJICBEB8EkAX7s53bKAle96wsp2/WBlu4FYtQlFStkQQvwKgG8DcAH8mZTy+PV+I4RASDgIhzSbcdlJUquRahFlh0wyotvE2TkTYj1IOXQAoFhnldAhdh+NLv0EAMH2dHGVE9QzTBhgmxn4u3BE23ArdTLBRNjOXmWVp1DTJpgkqz9R7vfIpWF96iqpmzv37gMA1MtkbpibmQraOMzwRsfIdLAa+UoAruGACgk2T7zvPgDA53715wEA/X3avIEaqduxEMlmoVwKDi0UyESUbt1F9y9IDq9+/5+CNg8/QaajmUlqe/Ec7S3g+/oZjY+TilssUpv9d23Xv3/fPQCAiTFq8+zf/R0AIJPV9zE42E/XDw8BAF5/U4thscTmMbYjKvulMG3gV2nwq5GthwZycgbb7t4WfHeRTUNzFRq7sS1EPNt6WoM29TyNEb9E/Wvp6w6OHXv1IN0rm6P9cfI5RNt0ee/uDjpXJt1O52Mbf97M8K7Sc06HyDwT7U/qQ6yZVSv0u8nFaQBAKq3Hd8ml/tfy1JE+492pHzsHADj+nVcAAHv37AcAZHv7gjZRUcaxETrvamQLQcEIjnMtn5RXOSF9w4QRcumeHDY5HTlyKDimzLQtLSQ3z1tq7wYAZWVT9ukggsJQZj02eVTYJ2bOK8o+cvX4Cuw1AKSjrkVt4jH9XuzZTfXdlO1c2bt7ekxf5fU16zXVQpFSfgu2UP+6wcp3/WBlu36wst04bGgxKyklsV9jUQkr2yKvWhFm25GI7lqUGbdycEaFZs7pJK20mSg5GF3F7kO6TcRdGmpYKLEDJqSvEeL2CWbQwlhFXdWeveTVBrEqEddMKQgb5BV7Zlyb/RoVYrU7+8mx0ttBjsMjb7watLk4fBFrg4QQPnypHXx9fXSdX/2VzwIAHnxoH/dfO6BCntIuiFnIupbJ8Fm6h86dbNJ06H6PH9NMZ5KIF+7dT0z6jReInS/Mawfv3Aw5vqIxDt+s6HutF+nvbz5PDp3pMdIIHntoZ9Dm0nnqx747B+n3huL0biFcjhlJwJ/eNW3fO0RIINweW+K4zrPjdpEjMzIRutLFqZmgTXWKjiU9Gh8x0wlXJlY3P07Oz0ycHFb5We2ElK30XAZ20zg//w6x9kRcO1OjIfr70Ev0XPbcqbWEWILGwyJrP6Ekje9Qmw4SyBWoj9XzpP11t+hz3/MwPdfvJ44CAM5evAwAuJedzgBQL1cChrlaCCGWMHDFSrU/d2kIMB0jCtzHwQEz09rxPTkxDgBob23nX6vf62sqn6mOPGZnt/HuF4r0jKenaaAPDg3qE9SXRs3IZUZjEAnmqP8a0XVtrUvuVVkG4nHtBDfDmZeDTaW3sLCwaFJsbDlZAUhXwDUuq5J0YhGy2zlsiw1i9wB4Pq10vooTNUJxshyWFYnQqqVCBhtGKE+CY8xjHGpWq7Nd0lhpVRKE+lmxpG3BpSIxyCozaZ+X8RFelQGgh0OZ9vYTcxQNbR+X3N9oilhPmKNxzLhVWTdCGlcJKRvobtXM6pf++acBAB968hEAgMsxyA3oa1WZgTtsW5WOjgiYmWK7aZ3uc16QrMcmdZzwy2++DABIpchu9+FnPggAOHn4SNDm6HFi2S1txDBbMzqUriVBjPBjP0whjo0asfzLF04GbSaniFltLbFNPqNtvIqBBEleKl7XYFrymj9WjnLVw6ELC4g72m8xEKe+37mf7MG1MNmQc4Z5OtNLbHZXK8Vxb7tjd3Cs8hDZQF94jmK6L44Tc49JbZ+uCxo7CUnPLH/6MACgJdURtBlpcDgix8nPXx4Oju3YSSwvlaFzlgqc71A0YsUFMXCvwQlmQl9/cYG0n13buwBwpCqAnXtagjaF8RhCYTP0e2UQEBBCLLH2aqatDNXMoE3KyQxcaeF3H9DbV168QGHTghmsESFuXOOq5D1lbzdousvzkwo9dg3OK7kzAfMOPq6NI1fXdYxzK6XlGpa9gmQjy8AtLCwsmhQbysCFcBCKRlAuanarlqG0SnDgNORqQ9OYMmdeOrxQRaKaIUQ5+qBQJhYh2ZZdrupszRCvnmFesSucLRiK6fOUOTVYZXnmCzobTtmVu7vI1lYNq+w6vf5VmEHPMitXaegA4PKqW+WolfExitQYNiJVakZ/VwNXCKRiEfyzT38i+O4X/tmnAADJBNvqOSpFGNZgybZv4RJT86NbgmOHT1N/T42T/bMUZjvspLYDzxaJdf71N54HAPzoBx4AADhJHemy6NMzmRqjKIs7790XHGttJfbaWKRn8g8vfh8AUKkb2bZJ+v1LrxKrHxvTiQ6Kz6ikCl9l3pmlFMRShrQauMJB1kkiG9Fc7sAWsq/uv5Pup6OPtJDF3FzQxs+TrO4apMibga06w1Rw1EF/lp7Pf/7aP9C1IjrSIe7RKzryDo2ZtEvjcs+jOtIlfd8gAOD4WdK+khltm925jSJ4ps4MAwDmWVNMe1oLC7fQc93yCGW8Vlr0uH5jjJhs7y4a+4KTuCpGNMiW/uySJJqVQyIED9emvQA+f6fs1WLJr5ZmOUJce0wxeWVe940oLcXgNYNmtm6MnQj/LsGZxRHPYOD8/jc4WSsYX2Ym/dXZnQa7DgwAjirtoe712iS0d4Nl4BYWFhZNCjuBW1hYWDQpNtSE4ksfpXoV+apRLCdG6mMXOyHDbAIxjf0NLkKVYFNAxEjyCYonMTJcPGg+r9X8IodrRXm9qrLZZmRUhx0Jdp6G2DxTNnyK7a3kwOnoIHU/kaEEjkRKOwwddrB6BXIkmaq2z8H7hVlSX2enKMQpbxTMylfX5sRsyabwsQ88gp/8sWeC79IZchr6rkpkYlNSXNdXUHIWIVLJFxydaPLaRTJ1lSqcQJWh36dTuo0fIvmPjtF9f+Ub5NSMGNUYr0zRMVXIp+0tXZfn9DA5yapc02aaKw3u2D0UtElm6ZnMzNC1EmldiCmocbHOWwNGQi6GelJojbXra5dJ1X37FXLS3vcIORbDnjbNZUJcn6RC/Ru/cDk4luDaOll2ID+0m8wrx8+PBG2mxmgcl+dILvffSSGd0bIee527aFye5XDGviGdCFLkWiozi2S+qk7R2M2ktROyXKX7mJonB233nQ8FxxbYKX75KPV7h0fn/kjfjqBNfubcmgo5CFDdHsd0/l1Vu2Q5phlYLFRbw4rjs1nDd68OwzNNKMr0wqYUdiY6Rgiyz2bHGmjM1UPadORfVYVyORuKrnK6jH3lqjtR5hLXCKc0a68sB8vALSwsLJoUG8vAfR+lUhGukY7qMYUqqzKwnIgjjK7Fkpwez47OguFgrLBjsJ3LsSr7v2tkexQKxCTrLocCRek8uZKuvFeu0TnjXGFucUEnu/R0MvNmp1N7kisgJnU4nAqHTLBTsGSEBdaUc4JX0+QgnS/qaqbz6quv0R/TutTuShCPRXH3nm2ICH1dwWGDEizTELFiJ2TU81ZOGa4HfkUrLjg9R4z3g49TKv7uPRQCd2lUJ6q8deQYAKDGv0t2UBji0ZPDQZsZTtHOcrW8147qRJ579pIs7ruPHIGXJ+h8p87rsLT+baQBJZPEfnfs0hrEoRNUqXAxx07PZWtCrx1OKo7YY3chMafrpT/MlQnPnyVN7sXX3gIAfPQj7w/adLJD8OTJNwAAEVc7GLexg9Fjx/m920g+7SmtYR5rJQ2ln5Oy+tnhOFXQ71D7IDH3NtZ0Tlw6HRzzJH3X3Uca1oF2CsGs1PT7dXmGrn/pDUqbb0tqBj/wADm1x/dTclB1muR7alZrCdIB6msUu5RyyaMLnI9XVxE0y8Hyp0qljziaXbtBnjvJWwUcyIbBoJlxhx1VspbPZ/TLdVVoIWvYjqEpsyYf+BzlUmeo2XNxDRM37hVBuWMAQMUolx0KXb+ImmXgFhYWFk2KjU3kkRJO3UfWqKnsczD8DNusPV5xY0YqvWCbVIntpMI41p4le2wmyckdalcTw45VLtGKpnKD5ktsew5pFrNniJhGJkHfHXlHJ6JUcsSKGyXqa5QL6LcbO9sou1WLqmNu0In5HLE2ZTOr8s4+Lbt0UadKntKnz5xbtm77DeF7DRRy80gntP3V8eg+PS58pGoRh6RR6EttAMF+htyCZtehGN3TyByFRo69QjbSmVldL3q+QP1u6SJ23L+V2HLNSE4Ij9L9l9mxYCbibN22lT+J4S0sUps333kjaHPkCF23paUHALBrn9Zc9u4l9vna6+/Q/fD3N9siXi2Vce6tI7i8oNnRtgdpk4Nt+yj87rtHKQzwbUND+bn3fRgAkHToWVw6/npwrM6+mdYWYteCi2JtN9LkHU7PH+cksn+6SIw81qZ9BOffpDDPt16lVPrFce3baeMQzOgO6vfOeyihqHxFv0Nph66fFcS8c+e0nb77TnquHT30nKpRLhx1WbN85MooLVMs6r1DMo01wluhQuvePQRUhd25zISjnn7nwkoT53rgboj9X1WtASk2HIqSLIJEHuM8Dv+tNn+JGiUDGkHYn+qiYtnGhg5X2b6X25DCYQbu8fz2zpt67B84cODaGzdgGbiFhYVFk8JO4BYWFhZNig01oYTcENrSWYQdfdkaeymqrHKXOesxEdVqdkTt5MPaRyajVcwEhyGGWP1xuaZJyjBv1MqkPpbmSZWfniGTgGfEHXW3k/q4Y4BMMiGjlsncHJkMlDM0yw7T3q6uoI2q5CDqXPckqu9Rco2JElc1jMbJzOEboYZ7DXPKapBMJfHwIw+j16g3LSWpZD6bbHKLZAqKJrWKl1JbnzXIFNLXrsP/BrfQMxhlldz3gnS2oE06RU7L7i5yyKk9Lvt7jV1WHDrnhYvk+Nrap+XW3cl7NJarfD4yQXV16lofxQr1Oxym64eMTYN376baM0eOkkqfL13rzFRq+FrMKsIH4gVg+9AdwXevH6E9UrufIPNPlitifufZfwjaRD165h97gswtex/84eDY7DRlbJbZ/DA9S+aRiWltgmmAxsrUJQo9HR+lLFSR1NnM40Uez2wuuWO/rrfSmCbT5NQIfUaepEzZ8+PngjYTgo71b6WxcHFYO2ojXDMl0821htrpXVxwdT3wWr0O5x8OY23wsXSb76XGsOB/S/ar5D0o1XZljmE2ZdNsnn/PFhRUjQqkaiszhwMOgnMbBVeq/GwaQS0TfcwJnI9LTSdL/Lm646rT73pIZYCWCroapde4fnixZeAWFhYWTYoNZeDhcAh9PT2QYe1oa7BDMscG/HSSQsSSMYOBc+JOsNIZq2CZwwiDCCJmm+mMDpVTbO7iLDGbeo2dR+1GQgon1XSkid1v29IfHEsmqC9ejVdY7nPGSOSJc/2K/CyxxUrRqLnNS60KV3KDKoRa/N2d5i4cK0c0GsXOXYNBQhIAlCsUGjkxQU6pKS7e3dFhbFk4QAw2zs7PVEazmI5OYl+eqvngUwibAx0+qWqORHlXlwxv7lyraqaxeyvJeXaU+hGJaFbhO/Tcy/xMInGSSW+/fjaTMyT3GCdyZVoN3uHS89q6hZzQJ85c4APLVQpfvaOtUa9janoKszkd5lkfJ81sqJ20p49/4qcBAP/pT/4oaPO9//E9AEBLkhjsZz75qeBYvJvO5XDoZyRFDDhf1Du+t2TonlvCNIa3cRjg2+fGgzbd22nseFm6RnFUh9mevsThfpwM1c81TbbepeX73EG63uQ7ND58ocfl4YPkHB7iHbMWOCS17Gpnrleso7HmappLq2lfXQNEjWoz1FC5PIvMavMRPS5OTVHtmEPjdP933kuhsGby3STPB1dGqE2d2a65e0+dNboEzz1l4/qDPT3cJ04AUhUxzeQb/6o6KYZzv8HtFHP3VdVV33C0+ktrjl8Ny8AtLCwsmhQbysBdx0U6k4Fv2qp49UmpnXBUze6yTpH32a7sMHN1pF531J9q0WxwlbVYXDOCBO+cI7hCYEdLmr/XmoDH1QwLbCdXbBUAwBpDG9u+PWZyEZMIqjRYZgPJpNYgwlFKolD76tWZrRQMW1cioduvCtKH3yhC+jrJZX6Owu9OH6ctCd96k+yUfVt0GvRjH6LPAU4qqRhlDlxOfGptJ8ZdqnKykpHA1NJB/Q6zvHfuJBnVy5qF5BaIRfzUTz4KAHAczSq6+2gIdveSBjPHbacWdT8qVU6dd+k5pg0WFYnSmNi7l+5p+DKxqZKRDGHaHVeLWCqG3Y/swcS0rgd+6QKxvFfffBsA8NSHn6a+7NY26FMnKMTvxDFKXro8q38/tI0rMbJvpbtGoXqRYyd0m17SLBJ9pDW9fIjOJ6ATnT6wl+zyXojHVVbfe0eJ5Hl0gsbzX37xWfrNw/uDNmkuleD1k2/C6dE0Mz9H/T7yNiUphZl1du/QGmrx8qx+AVcBCbJjN3wzxI7P56k9MVXIoB5Xee7mOHPxekOPmVKO7vfC2WEAwMI0+Q7CKR1C29ZN8j7P5Q0WuIqkMDT8hhp7rOlfHtfP76F9tFdsXwvvBsb+vExKv8sp9tFJlo/j6nNns9Qu2H2M/X8f+bBOBBsa0uGiy8EycAsLC4smxcam0kOi2qihvU3be33e+cbjLBuvwUHtdb3SqqgVtVdcyPAkq+SYCkc/zM7TKjozq9PkQ5ykonjFjkFiM7Oz2tsec2llTqeIwQpHp7DO56mPmSitpi5HQzRquqgSOEU3xppEulVHYVTZ1tVQWgJX3xGu1gDkDbzNN4T04ddLmBrTKc7zcySLyVFiDcMXKZrk+EltP01wanZXl2bVCvkCMaLZnNpDlHdW79QRPqkWehbbdxObbEnzM5Jafo0e+nv/vnsBAIWC9g+UyqSFSGbllTIxpWRCR8Ps5t27Q2Fi9y2tuqDU9Aw9w22DlKCyY+dWvkedaOKvcb9GgMZuRTSwxWBExTPU94sjNNYOn6IyAI8++WjQZph3FqrxPc9Ma9nv3sZ7IrLtNNlG7LarV9dkv3KeGHAvF5UamyS7u2zod2CUywn4EZJha1ZrKHdtI9t5lOt1j4zSWH7h2beCNrkE7xXLxDG9XY/dez/2IACgUiR2m2dGL9p1JBjSUTjf0VrDSiGlj1q1Cs9Ic1cvq9K2pbIzG7+rcXG0nM/vd10z8DYuiVHlpLE3XnoOAOBJ/c5u3U+RQceO0DOqLFI0TthgySF+n+++h/YGlUUdIVSeIxlUeexeGh4GANyxU2tg7fx31af3O5XS71lffwffP9232vWnu1tHYJXL198nwDJwCwsLiyaFncAtLCwsmhQba0Kp15Afv4xWQ0VJxUnFmclT6JPyUXQmtTPOYxVDqqSfmjY3THKSyegoOZTmFig0q26oU1muXXLHdgr3SnBVvHBUqzMR1tlOcrLJfFE72ua5/kV7jNSvCJtQihFtCmhvI/W+tZ3U4rrhbKmyY7BQo/MU2JkpjU2VfXdtj8KXPsrVEsYntOloeJicM6+/To6vy5dJ/Zua02Fm7S9QbY49u8g00NtjJNn00720dNG9qCSpmOH8TcVIjZy8TPd2eGIYAOAI7Sz6FIfObRmgTWdrNS3bZ//+mwCAg6+QSt/SwQ4hswgbx4jedQ/VPens1Q60SJQd0gmuxc3/93w9xo6dUOr9Gup1OAJ+XGB8Tsu3wfXRp6s0Dp5/+VUAwC9/9pNBm74BMteV2IQSNcxmdTb3HT9MZpJkF5lODhzQW85NhMhMU+YaNdPzZELpyGoz1mAf/S5XJxNAzHDa1rkezgP7yLR0534aw6MVLeAJh96PCwUyR7VoCxXKFfouzhUs29L0vg5PGtvG+R5wg7rV14OAQNgVcIydqJWzT1X6U5tVu0YSmatql/B3Xl3fd5XnE8nPRtbYVFvXwRGFPJnrKiWSqbdIv3FdwyoBwi0AACAASURBVJnKphi/RM+hYpg6Pa4R7nH7Im+oXqybYbIkZ9Vts05+iKMg1JaHIYeTpBYM067RfjlYBm5hYWHRpNhQBt6oVjB74TRKE6PBd0N3U2rv/CSthuc4pOepR7UjKMQr1Pg0se2pce1IuHJxGABQ5x1tVKhhNKoZhsvhZ9O8Y8nMLDm4TGfFIm+0XOc1rWDskFPI0+983ni4q52daZ2aqrS0t/H16feLRl3v+UVavYuKiXOIWyqltYyQWNujqFarOHfxEq5c0QzxAu/scuESOTELZWK+pbJmoq+9TA6cnvbnAQD/08/9UHDsngfJMTk9SQwxxnXcT5/W1eouzfHuRHGuclehexo1GNpCnp5JiHezyZc1w7g8Su3iCaUNEZNT5Q4A4P4HHgEAHNhPTtBEVsu9r58TTBbp+fX0EZtaWNAsP+QSazx09DWsFo7vI1WswK1p2XkcBtbVQ0kxLx0kLWJbl95w+OH3Ud+PHqJKgeWS1tpqRWJX7Zx0du7MGbpWp2ZdW3rJsTnNjnvPOQsASMT0+Jas2bnMAFvSOowtx47NhUUaA11ZTgkP6z6eP3YKADDPz6ylQzvh2vjezrxMfZs4ROGLrewsBoBwfxZXJZCvGI4AfMdgvqzB+mrjYabivsE5G5wUU+PdsPyGfuaznFA3McvjkN+1dKw3aNPaQZpc33bWqLMl7ovuR4T3IlhgLSXWpsdeg8tvSNZa5RTNS6+fORm0GeZwZJ+rIHa2awfzT/047Z7VzfNIo073095uqEA3gGXgFhYWFk2KDWXg0vfRKJWQK+nQmIsnKfSqxAH74xeoyM6ptA5TaummFe70BWKCKqQJAFwOM3I4lT3CLDFphOuEOYRKcKD8Vk5br5V0QaAohwuF2si2+MJbOp15hDWGGDgFn5NXnJhOSAol6feLi8QAp4w9OStlspWF2BDWxfWehbG350nWJFaLXL6Ef3z+TVw4PxZ8VypyHfC6YjGsnUS0HXZ+jmTwyovEEB9+WCf57L2DEhW8Bp3T4bri27bo358/TwyjVCLWsmvP+wAArd09unOC9xnlfSE9aPv4wBCl8mezxHDqHsmtb0j//tHHnwQAtLONWApDu4pwCFaMGFaYP/dykgUApNlevBYGngjHcP+WPTh3Wtdrv5ynkEAuOY0Eh/rd98jdQZvOGD3z7buI7amCaAAQDREbbEvSCbp4DM1eOR+0WeC9HYscYjfH2tydRqiZCo1bzHOxMl/beecrdCzJRdpUpYTpc1pT+9Aukuv97PM4V9dM1lmkH8Sq9D6depX61jmnbd49jw8tDQFcMSTgexBGmnlQO5/H3GvvkK+maiR4dewgX4HHDLZqMPA5Dv2dqtB3Qw/TuNzapn08C+P0/Pbu3gsAaN1FCVGJJcXyaKwmWEOMGGVAHPaFOWwDX2jQXHb2vH5+kzF6d8rsA6mXtf/prjtJ0xnoJ/kXufyGWbDL7olpYWFhcYtiY8vJOi5aU+lgB3IAyHNqcYE9yBneFX58VNvJRyeJLUzNE8NozehEgzinrNeFCvgnxmPuK9fgVTjqEBNLsGe5r0OfZ+tOilDJ8M4ysxPaztuYoZV6Sze17+2iz6hROEo5rvPsQQ5FtR2zK8U7rnBCTISjOWYbejU+efoU1oJcoYzvfv8Y8jnNvlQRMMUQ1C5HlYq5KwkxrKlZYr4vv6LLgv7IR6kA0J69TwAACpye3NqqWWSqhRhh1SPWspN3yOnu1gw4myRmp1iIa+yE9IEP/BCfm6OHfJKJ0QRd3RTJoQpnSTM1nimI4ESwFCdu3HO/ThX3nTVEnzAqlRqOnxjGuSt6XA48QPKRLmmNc6fJTmwQQfQMEMuu8f6rW/v0mCtxkkaDi45lE/R8kkK38TgJLT/HJU8jNK527RwM2mTZdty5jZhcxLSzc+JQpI00k61baJwPj/9T0CbEkS5330Esfe74cHDsnb94GQAwn2cbbgsxWLeiZVq6MAm/uvpENCEEQuEwisY7e/oU2ZFHp4nBHjzEu9TU9DvXzj6t/scfAwD4RtJLkZnrDGvre/vJZt8W19rb1GHyWVzkHab2svx6jVITUQ6HUuWqPSPCpMR2bckaSylP715lTu9YNQOyi9e4KFXYiKIp8HwQ4mukeC5rGJEuZtLicrAM3MLCwqJJYSdwCwsLiybFhppQHEcgnYxD1PS6kWcVR4XqZTMUoheKayfkFa4AV1cGfSP8r87VB0NcJ6XMAffzRV3pL8UmE7UjzZUrZB5JhrU69VSBwhafeoTCvj5+4N7g2AA7AVu3khqWZvOIX9IOlfI8mSCcKrdNtwTHlH+nyLVTJjlE7vCFM0GbN986hLXA8yTmCzWEDdNNgR1YPjtFYg3euaRm/pA+WsMk78kFbYI5eprU722D5IBLt/fxp3asdA2RbDMt7JDrpN1pYq4OZSvMkNwFJ5q4rlHNkOt2dPeQ2lprkHmmXNVmgLqndmXhhC7DhKJyP2TwB32YIZqxqHY8rRa1Wg0jo6OQrdq2s1glVTnGtbZ9Hot5Q4ZD+8hB1pai8bhgVLMrOuSoTcW4vnyY+ux36rET4Ve0lqBzl11KFvKMxK99d9FYnWYTo2/UWxEpGuNnCmTC8ap0zd079Y46E2NkpkhEyJE8GNcmnO8dJhOKt5XMCw//BJnTilKb0UqFEsZD2uS4UpRKJbz1zjs4fEKH39XLHA7K9XzKqh6PwTkXCtQmy3sAeEZtojon/4FNFx6bOWaLOrxVCPrdAtdEunyE3se2PiNRjOcINeQ8Y8Nil6uqumzmEFzvv7qoQ4jLbCaVHCbsuvr9HBum2vVnTpPZcmKMxkbZCK4Ih68/di0Dt7CwsGhSbDADdxBPxFCN6MtOT3HAfY4cENkQh4MZKa91XvXqnAo9l9dOgkyCGFw2zOmsi+Q0MCveRSPkeFHVy4rs5KlX9EpX5ySIHZy4sKNb71ji7iHH3HlO9jl1hpIp7til90csl4khlUr0mZsZDo4p5+voOLGgs5coFO3Qcc04xiY1M1sNfAlUGxJmPpDypXkcolnl8gDmbichdv6WORZualbLbWae2mfaiIU0BDHnaFKzggxrTKlWYnROiNoIY+uUGrOpBmtbiawOgQsrdsznBqceS6GvoeozB5uaSK0BqDRqyVzE89X+hPoeb5SO/F7gOx6KsRz8huEEO0n35fp0/miGPnfu0ffXPcDtq3Q/tWndl4hHjDcSo77XObU6bGgPyt/cmqIHu4V3ivruW8eCNncdIGdqikNgI3Et+wXeC7OxSONyQiXRGY5WyWUH5uYome7DP/xEcGye9239+jEqx1D26Z53PXJf0ObymctwjZDYlcJveChOz0OWjDrxXJmxyNpqLUfvtQ8dupupkVbTKNE9esY9qeGn9s1Ue7VWjHe+wFqeH6HGRXYmm+GIghlwsF+QUSu8ftW+nb5K/zfuzfXVbj00ZkNSn/vcieP8c5r7Tp/mqpJmXXT/+g54y8AtLCwsmhQbW8xK+ihWS5ip6DCZMbZtjXH4WzlMK2S70bNMB4WhtbNdvGGkMztsYI5ywkOSV9OysTQtMhtfZAaobMLCWOnO8r54h4cpJCya0Ox60i8vOXbsFH0uGMkLlTDZRo8c5sJRJ3R9ZLXSjzELmOB7XihoG3qjsbZUZNd1kE6mUDc0l2D/PbVtka/qkOtrtbRQ+J+qtZ410tQzKU4V9on15IvU//myZhGVBt231yDbYnsHtc3EtK04xXb50SuqVrl+uC1dxNyrvCdmmQtd+UYbJ6RYOidOGAw8uEXFRVjLMhl4sahZ12ohIiFEtrXCG9UaSpLrpWe5rvPQTgp33LtTF5qSJWV7VvZ7zSDrHCLo+1wkjLXJSKuul6+G2NQUaW137x4EAPy3b+kwwG++8CIA4KG7KXSzp1X7HxJcXK29Qc+1K0N9nRnV5Sg6O+kZjI0MU5tZnQz2sY99AAAw57L2yckm517X43vs4iXUStevW309hEIuOjvacNYI0bwwRv2Lsoa3+8BdAIAz57StfYp9K9kiXTsa1tqNslkrBltYIJbeHtHyV3tYRtR3XLO7YVD5hlDvDO9na3DeYB7honTKBu44uo1gPq5Gs2Mw8LERknORS3RMzZIGND+vS02YexYshxtO4EKIAQD/FUAPqJzbF6WUfyCEaAPw1wAGAQwD+Bkp5fy7ncfiWvieh1JuEb7vQ/o+HN64wsr25mBiYhL/9jd/CyMjo+ol6wKsfG8GKvkyjn/nbdRKVTTqdfi8W4mV7cbivZhQGgB+TUq5F8AjAH5ZCLEPwOcBfFdKuQvAd/n/FitELJVGpr0D4XBM7UYdg5XtTUHIdfGv//Wv4jf/7f+F3/iNXweALjt2bw6EI7Dr/fvx6Gc+iMH374JXa8DKduNxQwYupRwHMM5/54UQJwH0A/g4gKe42ZcBPA/gf7/euWo+MFoRKId0Ra62fqpDLVu4ElcnqXitbYYamKb20RiZUGRDq8cNlX1VpEXeYZWlbJgklG+kwiwhxGpR1Ej3q3I9iTdPc6XCdDw4Nj5JVRDfPkumkxGuMDb+/ZeCNqe5CuD4GH0uXh4OjokwZ4dyfZcamySkG4FwKZLP9wVoPfUiWIVsHSGQiLjwQtqZFOMaMLkcO3C4wlvICJ9Uzp1omO43ETWqvdXY1DND5o0Kh3EWjTBEVZp9lp3Rgq//zIeeDNps51DB3AKp4TXoOjFprllRAj1H5TQKh7X8lXlEqaZL9ihmNVbvqUv/r1YrSGdSSGdSOHrsDJKpCACUscqxK1wX0ZYMpk9oMikKNH7a02SeaE9xCGlYd3COx8XIGTIxRULGVnuS5Fvm7MIwy7Uxr0PdLiyQ03BihJyWB4bI3PGtVl2vY4Szme9YpH44rrEhNJvU1IbBPb0U7lnJa1V+aoxMF8rccPCg3m7tLjZLdHDd/vPP0ZiP91JIbXliDh2ZOM47DjzfW5VsF3ML+MY/fQ3nxmeD7/Y8RBv7DmyjGvIev0MLYb1V3uQChwpHqG/uMs55tZkw+P59w9EZj/M85NMYlBwW3BbTY89V4ckc5BA2nIpxDhutsPkv5F+bjVrlEFgVfmhWXJxn04+bJTNXgueHWcOEkjPCoZfDipyYQohBAPcCeA1AN0/uapLvepff/KIQ4k0hxJvlNaTb3uqQ0lc7cRewCtl6N/BW3+6YmZkBgARWOXYbZTt23w2VUlUVn1qVbCvV2nJNLN4D3rMTUwiRAvAVAJ+TUubMMLHrQUr5RQBfBIDurk5Za9+2pKLXlg46zwDXA4gz244ktUMizIHyYQ6cDxmOoDD3o8ZB/fE01zlO6LC8mQW1mwaH03EokTnlhXlT44uXiTFFo2eDY/EkMf96g9a7GtdtyReMmgfz5Lx0OcEiHDVWce6/79E1XLXLSI1WWil9eF4VjuPCV8bE9wBTtql4VLZnk8gbDrvZoCYG9TfEVQgrRphUmEOnMgkKXcumtDPKkaRphDy6/wLXYc8V9O8XCiRTtclPjmXbYWzr4uwlFhVPci3mlHbyzaqJkXeqiXE9d2nszqIYt1B1JKQetsofKwU7P5kVmZvBppJp/N7v/i4AXFnt2E20xGT+9AQmz2t2VJyhjrV3k6Ptwac+SPcZ1mGAz33vFQDAyBkaV4/e/0BwrB6mR53j19CJcv2OUV3x8OAw1cj5IFeG7OTxveeA3lz5MGuNPx6iyo6zsxeDY1PTNAaGJ8iBHJvnqpG7dD3vNt6dZ3qGxqxT0drvJWbFW/dRKO0DnGwyy5UTG3UPb/7DCUTTUZQXy6uSbVtrSpYLU6jX9TOrcL2kQ6couUakqG+zho9e8m5QqjplyNWvTpg1QRWo4HCQgdOh66CHGvz+zhGDHhiiZLK4cQ8uJ4+pnY1kXms30+fIkTs2STXSyzP0EgjDiVnjuF5P7R5kBBDMsmO7wTVU7t5NTvDFRa2hQuhNypfDe2LgQogwaPL+SynlV/nrSSFELx/vBbC2QObbFFJKVIs5CMeBcIKFycr2JqHRaOD3f//f4/1PPAkAava18r0J8H0fR757Gt17uhGOBYuqle0G4r1EoQgAfwrgpJTyC8ahrwH4LIDf5c9nb3QuJ+Qi0dq+JMxGVdsKs11WJQSEjTC0KDP2KK+qjhEq7/K5Qq3Eerq20irWPa/Z8aSyWY9NLPn/7KwOpSrkqH25zPbeqra1DQ0NAgDuPHAPAGDHLloxz527ELQZuULpy0FN37C+x/ZWDuXKUngYZ+ZjfmoClVIerutyhF9gH1uxbD3fR7FYDpIyACDG4XuNBt1TnG17tYamMe1xuuZAD9lmu9t0GvfWLcSY9+ym+tYvfI/Sqg8f1rb/SIoSLh5939PU9h6SUVuLZtkpZtfquVWMMMA6mD1xBI6yd0uDWwR/B8xIa2AqTAucKKE4WDgcgZQSv/d7X8DgtiH8zM9+Cl/+0p+rn61Yvo1qA3Onp5Gb0iwxxFpbiPeN3Mr1wDMZHQb4ymuUnHHu6DAAYFunTtNu6yPNpjVMTHKan8U/Tutxle6lNskWfnbsl9jdr5nk994k5neYE0Ha0vr9iKgUbo80oyujFIZnJsrdMUR+pzci9H5EwprJ5kZJc+gboLDa9z1ICTyHixfx/a8cQ+/WLHb98G48fz7YQWnFsk3EYrh3zx2Ijmob+AyXz6ix7dnJ0/1fvqTbfOhJtpOzj+XysJZbONhTkzSzSp78Cok7twdt4l1UgmBHiqw80RrL/6//MmizdTdpNYN30bi+NKzDGC+/8zYAoFykuaNWVmNQ27kd3p/AcTghyNgHt8xWh/oCV6PM0jsYiehSE2Zly+XwXkwojwH4DICjQghVsOP/AD2gvxFC/DyAywB++j2cy8KA16ijUavAcd2ggD2ALKxsbwqOHj2G73z7HzE4OIRf+eX/FQD2CSF+BFa+a8bk5QWcOzyO1u4UrvzHl1CYKcLKduPxXqJQXsK7b3j3oZVczHEcJJPxJTVuVUSEYuARttNGjF1jQmwXDWzhRqRFmBm7r9KQ+Xdt3dp3sn2IVt0iB/NPzNBqfGlEJw4cPvQOAGCME3qm5rX3dzZHEQCDQ4MAgKeeegoA8PhjjwdtXn6JbJ0vPP8CAGBhTrN7n6M/DvSRdpDOEss9edhFpJVsxaUCrcL1enVRSjmLFco2FAqhrbMDFy5eCb4rs1ddpebWyyqtVzOsCNsPk1Gye6aTOkpCMe+tvPPJHVPE4i5c0VES/UNk337sQx8GAHQNDFJ/jGJLLl+/xsW1ijXD1slRMCG2DTruUiYOAD4PPyFVqrLBzgMbOH0XYd9DtiWM9z/xJA4deQejnBTy0WeePiGl/Bb/dEXylRJo1AXae7Rtf/cgjbGffoaSTLa1EpMdGdfa30XeQebgWbJrdzyvtZfHHie5bt9G9ugLZRqfUxU99rpLdIMnL9BuQp28P2ziin7OGda6Xj81DAB4vE+n8od4f82uNMm1xSG2OjWix2cbp/Kn24n5XRjVu/U83EfPd/oI2Xuz20lbeHBfHx78wsfpHmsevvGHL2JmZGHVspW+QCKmmWeKk/b8KvW7VOQIMiOJr4v1rSGPxpMPrVVU2B6e4YiggRD5RorGbkd51nx6+qhQ15Gv/i0AQMzr/ViniwvcH3rulYrWHkdm6DmFeXzHVGJQTM9dUR7XYY9LPYS0b6/IYzfN9epPnyA/2viotnuneCegXE7b3k3YVHoLCwuLJoWdwC0sLCyaFBtaC8V1HKQScUSj2kEZCpF5RJlMwqxCK7MJAETZGafMJWHDBBPmSnONKH2nNjWOmGp2hVSr7iypTNu3k0ll73697VZvDznjXn6ZHHUjxpZuc5xYceLkSe4zXet//tSngzY/8fGPAQBaOeTw69/4enCswOaR4jw5Zvbtputv3zYQtDnF23HVVxlu7HkeFvKLqBsOyjvu4NrcLNupaVIH5w3zkMORL/OLZf7Uv28ITpCIk8ln/90UAjc9r9W5bBep/wODgwAAwQ4YYVrd2KejnNEhqcPUIryNVr1O5wzC0NxrTSjKkucaFeECx6ajwjd5bFQNZ2PE2J9tlXAjLjKDrehu0c/s4Z0klx19ZNZIJcik8sKh4aDNHJsGOx8kU8S33jkaHNvJJpcS1xM/6VOS0GMJXat79DnaSuzbRymc8IlH2ay1RTsxe5N0/WSGnahCO5DfuUghhfu2UD+6o/S8ZwwT5cUR3u5tC93Pa2Udly051+XeHfTHoQUyL6Sr2hTgz81Bvvfo12sgpY9aOQ9pmNbUO9PZRg7hEPc7Y5THPn2UZBPJ0T3GjcfcwclMOztJTg/eQe/caEmP7+FhMkN19nO9lCl6Pz/3v3w2aFNiJ+qzr5KJdesuvd1anEMdPQ58CHPgQtYIwMjycGzjojaVsJ7XZjg+NhGi37UmqB93PvNQ0KbB9XJ+/z9+FcvBMnALCwuLJsWGM/DWZHJJKrdySCp2HYQMGsk+MU5nFczWhcHOw4rNs+NAJdJEjFAe1d7hSA+V7p3O6oSLzjaqgbyV6y0///3vB8cOHiQHktooY4pX6sOH3g7aPPNDtDnvj/4wOfO2DmgWdfYsJQV1tpODUDlNZu45ELSZnCJmUy6vrnJew2tgbnEGPb06xbq3l5iYWqXnuC65Z1RhnJknprPItcIR1r+fy3H99AKxgGqNKz8mdCmEVIavwRs1qwpvZrp7wM04hMrUjsLspKl69Exqy6ggOlSQPuQyvEOqTWc5I9UzCGEmm72m/UoRiobROtSN0pxO5JmZoL/T91CSS5Udbi+/+HrQ5sRRYtyPf4TG196HdZXLLpAmcrJGz1xVZDx/Qoewdm4dBAA8wbtBTY9QGN2u7n1Bm3vqJMN3zpOzPdKhx57LTtAobwR+xzbqq5PWWtiVM+TYK0/Scw7XNIN8+QId23kHvRcDIdIyJk7r8O7ZuSoaRnmLlSLkOmhrScKJ6XkhP0rvQ4mTZOa4DEN9Ub8f5ybZiVggB+P2AR2+GeJQ1S19FAqbjHE5jkXDgT7Bob+s7amqmdmknhe2DtB9/39/900AwNErOsFvR5adv70cJsxJiHGDZctZuo8oJxPO+lp7jXD99jhriFm2EMRCRsXE6PXrrFsGbmFhYdGk2HAGnonFlw0DVOn1kcjScEJAs+yAgYeNEDVm8yEuuqPC10JGqFwowum0zAUVW/ONlPwk260y91JIWFurTmjJ8Mr62usHAeg96yYnJoI2VWbOnW20Gt9/r95Tc98eYl1eldrML9JqfNf+vUGbyWliVmNj2va+EkgJVGs+6kZBncUCMe4ys+t5ZgE1IztAMdYCF4VKtmnbans7yeA82+ePHacdRIK9SQFsGaJEh7Dar5KvLwzZKru2+s4xUpUlJ+W4vEuT2j1HJXAAlE0J6HDICqf/A0CE2Yvkc3LEJiJhzSLjseuzmPcC0RCIz4Ux1KkTQe6IUd97WijN/QTvsPTKc88Fbfq2EJvrSdA9fIht4QBwicnY2GtcIIrZ/bkFPXb3P/ogAOCBIdLaDr9Iz2JqSsugUKbknhnef7Ec02Mgz2Mgz/XASyzDcl4nxLSy/0c4HLp3Ru+peZn3qn35DQqDHOqkfpwe1qGG8+Hykr0iVwrHcZBIJ5DIas2us4uYfqnE+8hyXeyJGd1v0aDx2c47Ic3mtUwmJqjdzv30HsZipO3Mjx4J2oycotBIn+3MSd4dan5G39vCLIX0yRKFhtYLOs39wL30LF2H5O3weeoVzbJrYXruJbYieI6e17w4jfkG+3TOXiCtxpf6+XX36PdxOVgGbmFhYdGksBO4hYWFRZNiYzc1Fg6S0diSMMIgfFCZQvj/IWOTVGUmcfiYa5hQBIf0RbguRUSdR2vwcNihKdiz5rMJYLlsP1UGYu8e7WxqZbNCOk1q2NtvvQkASCZ05licVf8oq/SOa5iJ2ElS5z6p0D0R0aFYd3K1t29+81tYDSQAXzpwjBsv8tZ1qrZ2Tw85t0rDOtOryvfrcJ2UXFGrf2fOUG2Jt946DAA4dIQccvfcf3fQ5n0qbJOvW1ebChu1HpXBRJlO1M5DgK7cJjiUSj2SqPH8VWXBEpuuajUzK42uE+ZMUnXuaNQcI2vnKVHXwWAqiRS0epti1f3KNGfvchW53kGdCdlxgJyP2zIkX7+mf38qR+GpFXaKx6bIXFHJaPPdmWEymQwk6T4XF2kbrstGtt5CjGSey5EJIRHV4yqbILmUcyS76WkyBeSr+vmkOuh6ozPU/0Vjq8CxIpmyDp4is8Kxy2Qm+PZbekNuP+yjaoTnrRR1r4Gp2Xm4jlFN0KFx1Zohx94A1zCpy8GgjStIlmqT6xmj/lFrC72bc7PkBP37v/4S9f+4rpeysEjyauXs2hiH4E5P6mqQLfyMhU/3bW4I7tXoO4e3c6yx3Bo1Lb/ZKo35y1xT3w/peyyyySzEJpeaT/ccNszHuRs4hy0Dt7CwsGhSbKwT03WRbslelcgTWvIZiRNTCRltwM4vVzkzjToboYBxu0s+XSMRRPnMeMMEeFW1mhpVEZkJRvjT3CBha4ySN37sRz8KANgxSKwqYfSxo6ubvmMmWyrpzW9VV8KqpodDfS7X9UqdTWs2vxqEQiF0dHUgFteyyTHrUlFNDS6DKIW+rsOyzS8Qy33jzePBsYOvUqLEpcvk1MrzPT30xFNBmxQnowjFGnhTYhgVB1XBEsmOTpj1llk4qgaKOiZCuo1i1T7vblJdwsDpuzhncSTYSeQYNSd8rN7BppBMRvHww0M4P6yZZz5JfT/NiSSj48So7n/sAeOHNB7kDDvBdhjsaprqkTguyax1H42rkSuaSY5zdcsZrrOe45DD3ft3636MEitfXKBxPTanQ+0qYRpX53g8z7jEMqs9Olw0kiDZ1ble+57+jwfHFg5SAsvLJ+keo1GuEd+rtYQOJ4rCZT3eVwMJwDVDh9UGxfzOlnLkjDXKxAdjTPIuN0ljx32GDQAAIABJREFUw+K9vMF0gWttnxvmMTygtaN0G9fAT9J1Z2bo3hYLRqjoHL0XuTw9k2hE1/lvMGOuc+hrrUrjoVzUY394ggIHLkwS26/VdJKU4HotWdYWnTiHwhoaY/0Gm0VbBm5hYWHRpNhYG7jrIpnJIGHYjhXzVjXCIxxuYzJwwTWNpdrwQG98ECTuGLk99Buj5riUyi6rKt6xTV1cy9KVxdYo2xuw+s4uShS4736qiRwy1r8q22lrdWJTeYOBC2Y/6lO1MXcvCYfX9ig8z8f8QhGxmA47KpSYoXi0+qdSJNsWY79Rn6ukXeY6x2YJgSwzlLnZ3JLfl00b3xzZcTtZAxFB+KZmvTK4T+WLMITLGoDazCLY1MIw/YXZvp5h27DpA/E45CqdoL4Fw8DTLNT0R6wWjUodM2cmkJb6mUcKdI/RCH3WOWTvjWOapfcNElPekqCkje+8/k5wzOd0f5frpY+H2JZtZCGdvEyscFs7Pdc4h9odMWpfv3aJ7OGlFD3X13K6WuQMy6eUIyYaOk1MdG5Gh8MtcEJMP+9z+eSDjwbHohxamGPb+cAAPedMi2bLC6VpeIZWt1I4wkEiGg18Q4B+/HWP3hXXV9rrte+1z58NI7zV43c220IJTPv2ktw6eruDNqWKYs70uxNcM/z4uYtGG5LbPFcy7e/XmsdrR+kZlAskP8GjTyW+AcAC73tZ4T5GDft2lLUMqZIZ+bUISz343RuEZ1oGbmFhYdGk2FAGLoSAG4nAMVLpIxy9EUQoKEZtsmxeoVSEg2estCq5w2G78vW25PMDuzbvERkybbF0vRpHCUhj+2q1W7dKLlErfcNImqlxXetqldqUqtorr3YNcvk8ahPXkhHwX7/R1hs3QMPzMLeQQySqV/hahc7psY2uq5cYdcrQVmIO2UIvDZM9ttTQ97TAESkVZtxJJgajV/SuJFeGzwEAhgbIfqqiiHxTu8HSCBO5JJGHtSt+NCL4nVEKgduHOTknaTBwCZJlhKNDvBzdx8xFnY4ucJV6tgrMLxTwt3//IgayWnv8oUcokcPhwktteerz3rSx5+c4RZgcrJDMyj3GfpMnKU09zMkqLUPEDmcd/Qxi7ZQ48623idUneO/PmqFh1Nq55neErvvaec3OU6qAGL8yDmu4sbBRDoFZ4uJFsqU/e/5v9T0+/hQA4BxHYE1cIg0tNq99DJn+FgipWf9K4TUaWJybXfLyBr4x9e5cpSEDhiYXlFgwzsnv6AKXj/B5gDWM8a1KOigf0e5tJP+3z+golMnJ2SVXHTHGfoPnCBlcmbVIY1qN8cm74nStuOGjUjX5IlyzvLuF3s8WYzcrFaX31ttaqzNhGbiFhYVFk8JO4BYWFhZNig01oUgADSlRKGvTQZUdeokk156WKjjfMORz+Jir6l4YylKDzQMNrs0bCl2rLivziFKroqyC+4Y65am/+bpVo560Mr0o00mJHZYNIyRI1TGv8LGCYR5Rmp7PW4qVC+TgnM/pcLGFRe1UWg0EyBk0NTFvfMdhSlxEWTh0/Z4e7YhJuOQcG+dtxwp17TTJs2qtCow02IQ0M6lrRYxcJodPqUDmhGSG1EDThKLCAB1JZi4zySfYLo23dgt+JsznuNQuZppEVBgkanTflflhup+zrwRtJsZXV19mCcJhiL5enDBqqZ/+PiU4xdki2NdCZon79ukQP5VIU+Ct+vJGjZ4S14+OZ7hOB3vOZUabJ3I+b5It6T25wqGhbd06HK7ACTz9XD/kylldy8TzOGxwls5TrZNTNNmnt85TO+d29lPoXaOix/7rR18FALhcoC/B5k/fMGOWp6qQa6hGKCDhSD8IEwX0O1dXob9sQzFNqwoycI5fe+4Gh+qqejrOEkOLWHKt9iSN0yfv3xW0WMhR8ltugeRXKun3WvWppipgshXUN2IdY2ymTcfoU81zgBG4wbaUNJs/I0atKOFcxyYMy8AtLCwsmhZCytWvnCu+mBDTAIoAZm7U9gcMHdi4Pm+TUnbeuNlSNLFsgY2T76pkCzS1fK1s1w+bPi9s6AQOAEKIN6WUD9y45Q8OmqXPzdLPq9Es/W6Wfppolj43Sz9N/CD02ZpQLCwsLJoUdgK3sLCwaFJsxgT+xU245lrRLH1uln5ejWbpd7P000Sz9LlZ+mli0/u84TZwCwsLC4ubA2tCsbCwsGhS2AncwsLCokmxYRO4EOIZIcRpIcQ5IcTnN+q6K4UQYkAI8T0hxEkhxHEhxL/i79uEEP8ohDjLn9ffLnqD0QzytbJdXzSjfK1s1wgp5br/A5XpOg9gO4AIgMMA9m3EtVfR114A9/HfaQBnAOwD8HsAPs/ffx7Av9/svjabfK1srXytbG/uv41i4A8BOCelvCClrAH4KwAfv8FvNgVSynEp5dv8dx7ASQD9oP5+mZt9GcAnNqeHy6Ip5Gtlu75oQvla2a4RGzWB9wO4Yvx/hL/7gYYQYhDAvQBeA9AtpRwH6GEC6Nq8nl2DppOvle36oknka2W7RmzUBL5cSa0f6PhFIUQKwFcAfE5KubZSgeuPppKvle36oonka2W7RmzUBD4CYMD4/xYAYxt07RVDUG3TrwD4SynlV/nrSSFELx/vBTC1Wf1bBk0jXyvb9UWTydfKdo3YqAn8DQC7hBBDQogIgE8C+NoGXXtFELRP058COCml/IJx6GsAPst/fxbAsxvdt+ugKeRrZbu+aEL5WtmuFRvoxf0RkOf2PID/c7O9ytfp5+MgNe4IgEP870cAtAP4LoCz/Nm22X1tNvla2Vr5Wtne3H82ld7CwsKiSWEzMS0sLCyaFHYCt7CwsGhS2AncwsLCoklhJ3ALCwuLJoWdwC0sLCyaFHYCt7CwsGhS2AncwsLCoklhJ3ALCwuLJoWdwC0sLCyaFHYCt7CwsGhS2AncwsLCoklhJ3ALCwuLJoWdwC0sLCyaFHYCt7CwsGhS2AncwsLCoklxW07gQoi/EEKMCyFyQogzQohf2Ow+3Uqw8l1fCCE+KYQ4KYQoCiHOCyHev9l9uhUghNgrhHhOCLEohDgnhPjxze7TjXBbbugghNgP4JyUsiqE2APgeQAflVK+tbk9uzVg5bt+EEI8DeC/APhZAK8D6AUAKeXoZvar2SGECAE4AeBPAPwBgCcBfB3AvVLKM5vZt+vhtmTgUsrjUsqq+i//27GJXbqlYOW7rvgtAP9OSnlQSulLKUft5H1TsAdAH4D/IKX0pJTPAXgZwGc2t1vXx205gQOAEOKPhRAlAKcAjAP41iZ36ZaCle/NhxDCBfAAgE5W8UeEEH8khIhvdt9uAYh3+e7Oje7ISnDbTuBSyl8CkAbwfgBfBVC9/i8sVgIr33VBN4AwgJ8CyfUeAPcC+Deb2albBKcATAH4DSFEWAjxYZAZJbG53bo+btsJHABYVXoJwBYA/2Kz+3Orwcr3pqPMn38opRyXUs4A+AJod3SLNUBKWQfwCQAfBTAB4NcA/A2Akc3s140Q2uwO/IAgBGujXU9Y+d4ESCnnhRAjIJ+CxU2GlPIIiHUDAIQQrwD48ub16Ma47Ri4EKKLw7BSQghXCPERAJ8C8Nxm9+1WgJXvuuPPAfxLlnMrgM8B+MYm9+mWgBDiLiFETAiREEL8OijC50ub3K3r4nZk4BKkzv8JaAG7BOBzUspnN7VXtw6sfNcXvw2gA8AZABWQmv87m9qjWwefAfALID/DiwCeNqKpfiBxW8aBW1hYWNwKuO1MKBYWFha3CuwEbmFhYdGkWNMELoR4RghxmpMKPn+zOmVBsPJdP1jZrh+sbDcOq7aBc1bYGQBPg2Il3wDwKSnliZvXvdsXVr7rByvb9YOV7cZiLVEoD4EKFl0AACHEXwH4OKggzLKIx6Iyk07B8xrBd44gJcBxKJPV5/VECp3ZGnK4DX/n+b7+/VXHoBYk4/dqkfI5fFbwMWGsXZKPqZ/7xjXUsXKVHNJeg46lYzGjH3TOhhsBALjGMfD95mZnAQC1SoX7o+GEXG7qzUgpO7FC+QohllmJlQzokJJjd2dL0KIlE1ty/XyhEhwrFOnvtrZWAEAqlVLXetdr+B7flUEMlGwCSH3n9VqBflcvcx/pmCfcoE04Rv2t1Kj/k5PTwbH5hUX6fXC95TKig76sSrYAEAm7MhYNLxmX0WQS3GnqX6VEV2vo8e1KOubVPDpmPCYR5jFToWNhvmcnol9Lz+X3ok7n9Fi+iZZU0KZWpnFZr9apr9FIcMxx6fp+g67h+/Rpqt4Oy0yNc9WWrsvvhbpvPp8p5fbWLBYW8yiWygKrkG06nZXtnV0wyWTwjoqlfVPzBR9c2pclj37pOHi3PHnzPCvG1ddfJjxf3ZOap657uutc4/TJ42rsLsFaJvB+AFeM/48AePja64tfBPCLAJBOJfDpn/gIFubmg+OJOJVxiEWiAIAq33DDuOH2VAYAEHWou8VKOTgWS1Gma4wnIPDL4xoPusaDtuDVqG2YBnhYj1PUuU2dJ9tyRU9kVUnHjpy9AADIz+QAAE/s26PvI0kT4Vx2CwAgu0sfQ44m7m9/6UsAgJEzpwEAFWMKT7TSJDU7PXuJv7qhfE3Z0v8dSHOA85BwJd1TG/fxX33mmaDFjz5N/Uy0ZwEA33v5dHDs1YOnAACf/NRPAgDe//7HAQChsJ4g4LDceeUt5mhCRqMWNInH6NlKNUT9XHBs+uJrAIDyxGEAQEucJsFiSC8ynXs/AQA4c5nO84X/5z8Hx/77s1+n9jwJUlE5LH2XBN2/lN57li2dS8s3Fgnhkbu2oRINB8e3P/QAtWulifzMWbqHxuxM0CZdI5kvXKF7rjtaLtEtJMfpE3PUqXA7ACA20B60ybeQfAujdM58kcb+XT+mK8heOUbF8qbOjtN5dmzT18/QRF9aWAAAFPO04EWlni4SPA34IDnl5/XzKU3QeyBdvu8svW/mxPFzP/Mx/PGf/rX674pl297Rid/8nT9AraZlEwrRFUK8GHn8fkajmhiFwtRG8LgSBlFQE71aANRbYc4LwTG1SIjgwDVtrj6v2U5NVYpDSaknloZHf8ei8WvOp/vGk/wSSsf9dekeH3tg/6VrDmJtE/h1qU7whZRfBPBFAOjpbJdh4SKZ0OUFIsxUG/yAXJcGbDahGUaK28R5shdRzc7KPFHEfOpOkgUVi0Z1mxoxFFGn30XCNBh9Zix0Yb4dHgSewRLVoUya+lRcoElmJqcHen+Krjs0SC9PuL0tOFZz6MXo7OkGAFw5d5bauPo+YmE9MTBuKF9TtkIIKSCXsNtgMufPGg+mxYJmiHNzdC8VnyaIxx/oCo49/dhdAICOzgH6gllyuaLlNj5KbDjCL7h6trXyYtCmAJoEMi10bOrKseDYlWMvAADu6KPft8To3MX8ZNCmWqSJ6cCBDwIAfvs3fy04li/Q5Pf33/wef6MmI0N8Usk5eLFWPHbT8aisLlYQMSbg+bdPUv9aaVwcuGcfAOCCcy5ok43QwtjaQ8/l5LHDwbHCHMmzLZMGAERYe/PjeizE03Qs35gCANx/3z30+fDjQZuz79BCq8ZZuaYJTmOaZF+coQlc8qSHpJ4Ii2UaAyF+ZZw2Q7P0SCwdCRrP0QwtVn5Zh0cfffkQyoXgmiuWbd+WrXJ4+ALy+XxwvK2dFrE5XnjUhH7XXXcFbUKOQSSwVNO7duJV3+vvXHcpKw7mb5gLwVVMXlz7t/pKnW9sbCxoU6/TmNu+fTuAJYrpNb8Xy4juGu316uPXPXp9jAAYMP6/BcDYu7S1WDmsfNcPVrbrByvbDcRaJvA3AOwSQgwJISIAPgngazenWxaw8l1PWNmuH6xsNxCrNqFIKRtCiF8B8G0ALoA/k1Iev95vhBAICQfhkFYRXVaNauzkibIakowYaiSrfSFWJ8KGHbLIar3jkP4XjS79BADhLLWHKSeoZ5gwoJxm/F04otWzSp3UxQjb2atsZy/UtCkh2UI22yj3e+TSsD51tQgA2LmXVOw6q6xzM1NBG4dNKKNjEwBWKd+rNFXlQFG2vcUS3cfrb+rTPLy/HwAwMX4ZADA8/GpwLLdIcvv4j9POUj19ZAJ67ZVDQZvjR8gvkEySn6K3l9o4jpbN0E66Rkc3tXnt+/8UHNvO5t40P68F7ofvZoI2sRDfQI1U7P6+bHDsc7/68wCAOXZmvvIq9a1hDG3vKgV+NbKNRFz0b2lHVerS2/kcmSdOvvEy9StEF+pN6rE3lqc2ETb7NVJ6XBX4WCs7lZMxuq9of3fQpsqdn6qyfZozu+tC319rmoTodtC4ila0mafBZpoIW9ZybGLbdf+BoM3CGJmrnAQ7U9P6/ZprJX/V3BSZCzuzNCaGdvcFbWZOX4IH5WdYjWwjGNi6BaalRb2/vb1k0lPvbjym+xYKLIRLHZ7ckSXXCOzNjv6+UqH3MsYmWlf5c5Y44Pn3gZ392v4r08nCAvm6jhzR78f+fVROPMRtGoaD2+HrBedexgl6I+fnmmqhSCm/BVuof91g5bt+sLJdP1jZbhw2tJiVlJLYr7FQhpXzjlfRCLPtiBFKFWXGrRycUSPELJ0kx1gmSo4kV7H7kG4TcZeGGhZKRW6jrxHi9glm0KZH21XtOUKl2iB2KeKajQVhg+yMnRnXZr8Gh5ft7KcIld6ODgDAkTc02704fBFrgQDgSB1uBQC+8ooHYWLc1lA8wswaz56i/lZKmj2+8SY544bH/hQA8NGn9lLb4xeCNieOULnkaoXO09ZBz6GlVTPonnaKIvnaS+SwnJnUkS7bP0BOuZkZYqPT56gfbbu0EzjC3rVGjeTYcLWX/8GHSKv51V/5LPV1mDWJUR0JoiJT1lL2R/oS9XIFMqaFd/YiXSuWIEfjnTt2AQDeek0zsMUFYq7RHT0AgFCbds6HfGLHix61aUuQg3Bxdi5oc+oYBR90xEmeCf48/87BoE2WI0MmjlEEUHleOxijPo39XJm0l1AvybUyprW/qUMUxVJ06RlEu5LBMRUdlptntspRITNxI8yzPQYRur6z7XpwXQeZTGoJ21SMOZ1KLvm/ybJ19If6xvQQqg/1Bx1zjXlh6gppHp2dFJ2XYYex8PV5Ana8NMaBrsYDSgU5TE6Qs727syNo08eBC4I1H9fsv34jl9zIcpEq7wabSm9hYWHRpNjYcrICkK6Aa1xWJZfEIrTSOmold/XK4/nEfH2VAORrBpaNE/uIRIgBq5DBhhnnyiwipsLp6sSgfTNulJm/+lmxVAqOlYrEbKrMpH1eFUemdUJJzwSt5nv7d9KtNrQNWKoYVk6ECXNcdMiwwcu6EdK4CkgA/lWLdRDexP1VAYbpjGZYhRKxv8kpYn3trTqG+Oc+81PUzwgxxYygkK7WjA4D/f/b+9InOc7zvl93zz2zM3vfu1jcWBwEQUoWdVm0rsiUbSpO7LKdpJTK9SH5EFfliyr5B5JUyhV/SFVKVVZFSaUkW/EhWZJ1mJQoASIJgiBAEFjcWGDvc+67j3x4nrefd0EIIHbJpYd+f1WoWUz3TL/99jvdv+f6PWMTIwCAwiYxvBNH9wIApk9KuteNu9Rz99vfI1/x3glhobfu0fE3A5pbJ0/XoeewMHCfGXTTo2urp3+pGfzMp54BAPzbf/0HAID/9sdfC/dZyVewU0SiEQwM9SEzIil29Sb7Z3k5FguLPF6pIXBdzqMuEoNtaznCNqdeZjJs/UXId710U0uh3KD5sYbpnLtB81y8JhZbKkUsL80DiXZrhTz83rERSmP71KcpfzwRkX3u9tHxb+SJQZY0BdUs513H+on5r6wQc799XayEuh9DvakVVWwXD8ixC5gN2/dZ0YDGjsPiP7x9232s1tE4q0ondu4rBtR/Rmpv635GT0ek9zimptbiyRPSRjPDHoJAMXA91VEx8DCP8O182jBwAwMDgw8odpWBW5aNSDyGelXYrarg62ImDS4vbrrCAupceclV1ltKheMcya3UieEE7OOqN6WYIcJP2ig/BRsN+u5IQr6nXqXPqyrPckVYm8+VjEODxHSaUWIjtvb8azCD3mBW7mhMSzHgJmetLC0SI53VMlVa2ni3C88CLI2GqL9V4F3xm8VFsRxOv/wmAMD3aN5usg8ZAOa5SOcLnyd225OjzIO0kGNcf/kXAIA4Z+3YacqkmJldCvf57k/OAQDuLfMcl8XaqG9cAgCk2rQmjozQOtj/2XE5jySV8gceWS6WNu82V+emU/T6r/757wMA1teEIf7Pr30LAFCsb1+bPxp1MD6Ww3Jb1u7YJM1HEKFr/VqZ/NXxp2XsgwWyUObPE2OuF+TzZYfGs+cIrasDeyhbp7IpWSSZCM3nsYPkV80k6VhLFwryPWyZTk/SceuOZDp4bJH+wW99HgDwqac+TGPWqh7neM7fnOV13SXxi1yWLvY6r5nLb9GaatzdCPcpthJYe1B6xmPAsfyt/u2wSlKxY96wJZBBv+ewzH9Llc3WVyVpEPNknCm2QmK2GgPLDWiZKqoYLoyJaYewg63ZbcH9jBpAoOze0BLA2yCVmOozgggebtkYBm5gYGDQoTA3cAMDA4MOxa66UPzAR63dRLkpQZ5IglLxBjkIGWUXiB6scFmEKpWifWNakU+9sdUsznL6Wr4sOiXVOqdH8fOqyW6b+YXlcB+VBqXEc+paTLGvh4JV/f1kMqeyVNyQykgwzmYz1qtQQUkuJgFKn4sPKhsUnNpYJfdCWRPMKjd3FsR8GEJFNJ7SFKdLAYCTIvfEUD+luVWLYlrfuj4LAHjhzOsAgDibnEsbMtbLc5yexkHgfINcIi3t3NaLXLCSo3nsSkiqYo0LVIolGlyRA8WftqWYxeqiwGrc5YILzYwOpICEvpt1Rf7Rb4lg1woXqvyvv/wxtgsLQMQCpkclOPvD/C0AwC2L3AuZEyQM1q35mFbvnAUA1JvkkrM010WzTG6Q9RGaj6pN5vLo4X3hPnfv0HfvPUgpiqU5CpQe6pNCmtNvkXbUKs/PwKi4QI4doqByjgPBBV7ztbLmInRp7rIgd00lL7+v9RJtO/8KuYCSLGS2f3Qs3CffaOLeTS03dRuwIMFEQvBLXrU97hMe1b0T93lQ4LP/ta2loLZA18JnsTP2xoVqo4AWVwxzFnWV0q2uE5+vX6DXB6qPvV0k9W1jtUMxLNlmP+C8dRgGbmBgYNCh2F0G7vuo1apwtDJ3j2lhXcnAciGOpQ0twaXJcQ50VrQAY4MDg30sx6oekI5WrVKpEKtrc9qWE6fvKdUkXavOutTJJB2rWKiG24YHmHlzoK4vzQqIaUmnU+mQKS46qWlpgS0OnvqsBpieou+LO78S7vPyyySrijWR2t0x+NGu2GmO0wf3HzwQ7hLnjMJl1ipfuCsFHpZF8zWyh/Y/f54knS/MSJFSxac5Ka6T5VFlzfQT01PhPoXGIh+fvu/pJ46G2/aMUXDu+lUqJnnx9HkAwJwYUHgqRsxdpYDZjrAg36XrFrCkgsWMPGbJ/J88IqmR20XbAuZtwNqYD99rjtLkdQ/QOhhhtcrNc5LiN/caFUPtGaFikcmpnnBbIkYl8Kts9b3CsrCOJVIBE1OHAQB9U8Tum6zR/uSkpIIOHKM5XFik4qXjk8LOJ/tobF6DAufreQru3uXUTgBoefRbm5qmAKdXkPl98Yc/BwAcGaJz23+QLLWfbMoFqvd2w75wC9tGECDwPTRaYrUlElvlVx+odm9t5dlbAvi4T7ec2bFn62qdfM/hlGXFZnVdUPWez/enLcVGEZWeq9II6TVm60qmW4vptogL3pe2KCmKmoX5iOozw8ANDAwMOhS7W8gTBLDbPnK94qPz2Um0zj5rjx9CCa2U3mK/eI21vy1tW1+OfKXZsDsK7etrJbP1GrEzVRuUr/GTPiKWwJG9lIKVTdF7b77xZritUSJW7NZorPE0sYO+bmFKyn/XrXTMNWdXvkS+TtWtpskstfug+DobrIV8/eZdvFsIn+z8x/Q0sbhcVsY9c4V8tIUC+UbjEbEqPnSKGNnkHmKw6xvEdm8tiNZ3nn27IyPEAveNkSU0Nim64tUGnf/GKh3jtfNi3dy5S2NR+t2RBJd+F6QUHlzAA0+J+0uanMuWk+2qVFP2hac00ajSzq2amufjUqkOdMl1TY5PAQD6e1hkLU+stnRTUjGfGCVrY3yA/NODKWGZyQk6j4ULFBMp3KL1vVmVFMjVBTqvlM1Mmk/rzTtiBe3vI7//EywaNjUo8YM4p8Z1J+g316yQpRXzxEI5eOwUjecIyRp8/1s/CrctrJCV+g+epZjC7Q3yyb/xhsghtLqTaNa2nwYb+D4a1TIuXboUvvfMM5S6arPP3Qq2dtPa8nfwoBL0rfuozMCIL5xVNXSx+aZj8WsEcu+wg60pwL7mA1cxIY87GEV5U9yTcTie6jDF49c5831+cen29HZJgF8Gw8ANDAwMOhTmBm5gYGDQodhVF0rEiaC3K4eoLYdtsVe/yXoCda56TMUlSBNTaWdsWWSz4oJJcbAjovo/cjpbRnNvtOpk3tXyZMqvrZMZ6Gn5PkN9FGTaP0HmZ0TTMtncJJeBCobmOGA6MihuglB7oa1698k5BqyHUWNVw3iSW2dpqYbTmjtluyArTWwuJaqm2pwdOkTByEhU9olzGVpvD5nh6YQE2bo4TbLJFYxDA5QeNzkq573Gwd59ExTcGhuiY7ltSZcbGiTTPqhx30ytErdap2uhApNT43TdR/u0ptA15QLp4s+IC6dZJbdMllUCA3aLjYyKG+Ejz3BLxv/x59gu7EQMyek9oX4GAHSzBnwyQvPTqPK1d8UEHpuiMc83aMxuRNbudI6ux+r8dwEAPWlagwNTss9Cg1xJp0+/QOfF+wRVqabtGqNgaG4PuQGv56Wl2zAr48WHKbAZYdXO6Q+fCvepsrvxT7/5lwCAH3335+G2vX30+aZH1/Us678f3Hs43Of23D09u+5lkHugAAAgAElEQVSxEQQBPLeNWkVaqqnq5fu1SPRoZriKH6BholoLKpfF/ZWdAODyJ8rs5mhx42i9LaFywSj9o4imYNq21fdw03P+7rYtv2u1txV+59sbNwse4EJ5hA/FMHADAwODDsWuMvBoNILR4WEEWldzlwOSJQ5QdqUVE9QYOBfuSIcZee7UObgQZu5wgFB14waAQWYhd7hbeJvTlXr6hKX5XHjSzzoQe8alUCHNOs1ei5/qPOasVsiTZMXD8gaxxUZVAnVKYDHKgU4nVCGU6R9iTeKdgNiGs+UdAJgcJ2Y2NEjnls3KE767n86twQHaoX6Zk1iSxlfn+fK5oW8sJtZJjlu9HJqkzwUgdh2La4VYPLddGWJxubToqPu8f8Sm1/4BYrGZrJxHmy2YOisnrs4Jw1xfpyKWwSGav+HhSQCArQWoDxycwk7httvYWFmBkxYGXl6n8XSD5uPOeQp8Jy1Z32slYt4Tp2h8n37mQ+E2j+N+jQqd33KRvu/wsKR5nuLONw4XQ7WXyYp86onJcJ/BHprPXB+tx0JJAor7DpCG+zingvoBKyD2iqX1g29+AwDwk7+hxtDJtly757/0OwCAi2/SuV28QnMfzYsF4DXbcHekphkg8F34WnDa9+n7XFcFL2l96gqeYXENW++Btk1111FFNm3+/I0FSZ88P0NNqWvMhKNcKKirXaruYRPqNzQkWt81Tvl96w1KfU1wlsTEcbFOlK5Kmn9yD+pRHNz3V/BLtj4IhoEbGBgYdCh2lYE7toOubBa+5iOqs28oozrhKM1uTTnOZ7+yzU9YpQIGAOpPdmPB5fSoRFIYQYo751isENjf3cXvC1PyWM2wwn7y1RVhGGCLoZd9316YsK+dG1R3Dn7ipsWCiMaJ7TSYibaZrVQ0n18qJftvH8EW/52KD0xP7wcA9A8QA49HpZxYKRE0msSu7bikuY3uJbrQ201sdmWJlsvxlrD0o9NkqVg2sadsNx0jqnVsuXmTUv362BIqrIt1UqvSXKTidE0cp83jkXG028RilYrjmZ++FG5bnKcCkqc/dJLOLUK+895ezbpy5Xjbhucj2Cgh3iPfu3RrFgBwb5n88EmO23RP7w336XU4rZFPZ+4VsR5+8iq1iszy2j9xgtj28aPS1D2TY1bo0j4/XiZN9YGsyCF87AT1t+QQAy5XpdhoaISYepLXboStoDt3JQ3xyltUeORXaA1MH5V+mT3cUeblrxHLLNbpWu4ZF8XF4YFBrM6LOuHjIpVK4OmnptHbI3GPQbYWYameubS+qhW5ll6bLWJmzJWqWB4l3s/jQNBigcZ99sqNcJ81LmoCz7+S0Qi031A3qzFG2fd95eLZcFu7Qj+e9QWav31s6a015Hdd4PU4wpZxl0aovbAnppJD3Gpt0LkZH7iBgYHBBxK7W0qPAE23hb5e8ff63PnG4yeNEpLx2sISVdZKMklPMz0SrIpjGpz1sMFP1fUNKZOPsBiWepbtnyKGs7EhmsoJh1hmV4b74tni68yXaYzZOLOhKHeY1oSJwMn8CX6ad/WIoFGTS+iVRo7qFmQ5YgEE7rshZhVAM26w/wCxrz1TpEmtXIbdPX3hPvv2km/UbdNcxGLih2woEbBBYs57JqYAANOHhKFluGfh5StvAAAi7B93tVL2aJIKgW5fpzJ9rdkRVtboP+USzVFfVi9kJjTr5Gt84/XXAAA//vHpcFs6QXM5PkLnuDZA191tyPF9LSNmu7CTMaRPTSKVEpY4xcGNrlNTAIBEmrZd/8X1cJ+7N2k9Zpg4ZmtCwRzuOP8hzuA5PEqWmlUTBre4REU+dos7Rrn0urgimTglLq5Z4s5DgyNiAaR76buVZRgFsdT1NdFrb1VUd3b67o9+6qPhtotX3wIA3Fmlee3n2ND4XjlGpVqE/whf7cNgWYDjACdOiMSCihOp7lkWS2MsLggjLZdo7cTZTz2/LEVw127SNdgzNQUAqG+ShRBUpUBsgOM0Fy8Qq3Y5ntb2hIEncmQJ+EWKJdy7fFHGyLGOD5+kDjy9LMPR0ITc6pzx1WezXr5273JVzwCO7fmK+WtTGfhGD9zAwMDgAwlzAzcwMDDoUOyuC6XdQnnpHnq0NJ1MkkzwddYnVhbDQFqCNB6bGoEq+mmJebyyRAGkBU4P2iyQud1uixmj0tYO76NimVQXFflE46L7EWMHy8wdCgDlq+JKyBfI7OxLcBodu1CqMQmo9PVSkKinj8zgtmb6NDkgV2nR91TYxAq0nCLf2fml8AMbRw9Ph/9/7rnPAQCmWJekh9PNBvulyCnOY3jjdWqNVtfifTOXyPx/+Sek7fHMx54GADz/pWfDfWKcUlks0/l+g1PSfK0x7jAHwjKsx9HOSbApya3CPC6iiEfJVB4ZlmKhu6wtcvolMnVnroqpPNhLqXNnI6SjYTt0jlNTkmbX3SNradvwPPjFCvyyjH2K3WQ1i861zpovPX2SXmrV6e99GVofw758fizBAfeAzOsWu/SilrhpnBwdI8tt/FYq9Jm1vLhQ2m367UyM01wOT4srYpF/H9VVWtfHT5LrI66572rsQhmdINdi75AEar/xg2/S8Zq0TzpL25Y2xUWZ7k4+OD/uHcKybCQSiS2FLapYz/Vpbqs1eo0lZG78smpOTi6UqpbKWOVWch4HBj3W/q6XRRcnwa6XoELpmz4nTnialkkjSp+vlMkF49XkB2JHWYue56bJ9zC3LT4Qm13DqpmxHpOM2CrxgXXF2f/pay4cxwQxDQwMDD6Y2FUG7jYb2Lh9DbVlSabfe5IKG/Ir9IS7eZvY1rMflUBKhBPsl9aITawuSSBi7s4sAKDNHW1UqmE8LsEwh1nZWp6elOsbpKQW1SyBIpd3t/mZVtE65FTK9DmfGw8P9hGb6h6QYGB3Xy8fnz5f1HS980UKSlUVE+fGyZmMMMOItbNLkUymcfjAcXz0o6Ix/swzHwcAjAwR4+7OkcWR0Er4h3uJ6W6s0TV5/dwr4bbeHAWbk1yKf2+BGHnTkyKZVILmolCm77zOKYNjQxLEXZ8nhlOrU+AsFpcg3eHDdHyVNjgwRCwwGpVr891vvAgAePUMF17UhaFU6sSCbrOOeTRJ1zZfFqY0MSGMcvuwYAUOSveE+b70IrH+4SdpzIc+fojGdE1S1fLnKJhW5fmYOC7pd10BfddqkZhYd4zmOauV66/xeQQBqxImaVu9oaW6cTelAU7TXF+SNML5NZrXA2yFtXid1zUm2cdW44knSY3wu3/zw3Db6VeoG9PgOM1hLkcWc7Ylx09WG7D97dfSR6MRDNxXyKbIeCJKFszqKq29H/6tdFVaY3VMm2UrVotyX8j2028zYEvO5SQDcFAXAApKGmM/Wa2tKjFwX9MVT7Oeek8/BckbI2JZNhp0/OUNTkdki37CFetddepyORHD10rjA9Yf8JTSoq8aKMs8PMqwMQzcwMDAoEOxqww88H24tRpKmnbwnRlKU6qx32npNhU6XO0SQZ/uIXqKXrtNCfONqvi3Ha7ksbmUPcbdftIZ8W9H2a9qsVDWJD/tW1o+W1wl8/cSW33p9XPhtnm2GBLgEnwuP7cTmmgNl1gXi8yqtJ6cjTr53yI+neNgks7N0np7zrAlsV1ks1l8/gufw/iY+I4nWdyoh5l3lNPeolquYZJTIz/6iU8BAJpa0UurRlZI1OES/F5KIfMgDLzeUKLG5FM9zt12+nOytG5fJf+6bRFD2jMu1zaTJkaTytB3Th+m3o83r0lXm1/8/AIAIL9J1yuZET+ox+llhSqxmfMstnT1llh5+/ZLh5rtwnM95NeKWD4r41q7NgsAmJqm0mm7SGMZbAsD+9hTVETVzYVa1WXxHY8dIFZb4enMz/G2QH4fyjKM99L89uTIaltYEba5yAxQrT3Pl+s72DcFAOjl7lFKw1oJswHAvoN0XQeG6buf6j4ZbvvLn5NFpvS5ymX6DUxrpfgHxvfgXPQKtgvX9VAsFjWJCRF6UgVx12/Qdf3O9/463CeaJHae5FTWQkPm7WA3jc8eoN/AeC9ZSaPTImXQ4vTSGt8HalzVVivJbzd/gyy6iEvbJn/ts+G2e5tk9S1eoCKnGKcbNzQGrmJMc8tUOPXGLZmneI7W8UdOkdUct2gh2JpnAJZJIzQwMDD4QGJ35WRtBz2ZLjRdKawob9BTrMKR2yyXsy5pojOqK8hqnvzKPVnxryb5Ca3EalRXi4b2NHYbnOFgE7tOccL9aL98z+QBylDJDpLvbGNZuqq46+S7HWc/5sggvcYj4qByFEMpUCZBJK6VBWfIN2mViN3GWPJ2w5XenjPXrmIniMUiGJ3ox5OnjoXvZbgHZtgeVGn/bJHkpHMYGqWsjV/7zBfDbUw6ELUz/H3EahzNX1/nDIb9e+nz/b/3DwEAKyviB04kiS3GHfI/jgyJ7z/JzDCTJavId4mdn/nF+XCf1Q1iRC7L/1YaWkceZm0NLrX2PWKIrZZ0tbmzIH9vF36zjdrtFTia73mgm6ydxbOzAID6DPmeT/WKZXXy2BQAYGGJ1rCTFs40eYyY7vo8sUuUOH4wMCLHYOYeV6XYbMVdmpVCHN+i+ezOcc9XLcMkwoJkrQZnWsToWk6OytpvFOgYwxN03CWt56nFvSRze2jbxJNU+HX5nFyfpheg0dh+sVQk4qA7l4WrMddIRJ0LvVb4t9Osy++60iTGWy3RObpaN8tamRbvZpFeLf7NxTUp5ViC1p7dRVZolGU41i7I3N66RNbf3j66d/QeOxRuK3Jh4ZtnyMLs6aW5KmpjtNnavXiOitA2bl2WE48FPFY6jzHuvXv4iGSS5RJirT4IhoEbGBgYdCjMDdzAwMCgQ7GrLhTbttCVTsJqyXOjzAFJlaqXy7JqWlKCkHNr5GZps6YINCd/m82eCJszddaRyFclVS3DLpNSkVwwc3PkHklr6VrPViht8Vlupvr8CelYMsGqZz2T5CboYveIX9M0D/JkotpN3rerO9ymMpiqrJ2ywh2BLt4WzYxzr1/ATmBZFhLx2JbURKWxwC+hMpuvdxzhv5Vu8sCgBPyUul6MGx0365xqWRfXj9em9xIRMn/799McjU1JiuXUPjK/SwU2TT1JxVL65WXWsL47S66zlYLs40Xp+BVuXKxrRTQ4yNNosmIlu9DiWsFHvqIfb3uIRKLo7x/EJGtrAEDaYmW/X5BWduHiLABg6rmPh/ukYmSe2xwAP3RA5tdjF0CNmxgfYjdUX1xS6mx2pwz0kPvq9JukOVPXCr+GxkmLY2SI1nMLWhqbq5Q86f+VGq3BVmE13OfJpynQmpsk0/1nL0ug1lfqnjlyyxSbFCgNeiSQPb+wgFZr+y6UcqmEl154AW1Ns0alrg5z96fFWXIz+ZriYIPdZU1OTrAjWgEVC/80Ahq/x4kTRa3AzgndrvR/pbty7035XRa4uMrqp/W0sS5BaC/G7g3WMW9zwWFZmwuHuzUVKjRv9YYmBMTuuOtvUWBztZfSpN+6fivc5eRRcac8CIaBGxgYGHQodpmB20imEmjG5LBrq8SKl5mB5SLEIKNtYU1tZolt1uHeLEsxRTZFT90cl7xWOZm/oukGx2NcxMDpXVUuYmhrT8M2l97u57Lv/Vo5sXPkCADgFqd0Xb1OAbrDB6XzRr1OT+8ap96V1mfDbSr4qgJZN+5SKfiFyzPhPosrwoi2A9u2kEgkwq5FAOBxcUWEKYYq0bW0IKYd9hNkLWS9uaFSZFRxH5/ZW1mCgjWe7wgr8Vk2FzRZYkFleiiFq9YmGlgqiApks0rfWS7RtVnP0+vqhly/OuewqSIr15Ux1jjwpc5DEVNHY+lNd/tKeQpOLIqeiRFMHpIS/VuvUCAvzpIKv/mlXwUAfP4zwsAvsURBwEUanpBjLHN6qtKwd9hS7B8Xlt7ilNMSBy9feJ3Sbse1jlE9bPU0LfryhPar9llzPsprIcYWYioiFkpuiK790ARd7wNHpOtMPEv7FfJ0nb0iBWp7kzKn1UQbvv3wdLeHIb+Zx7f+9M9C1gzQegaAw4fJOrg5Qww8ogU6Y8x4dYtSwWcrvRqaa7zOt/TEVFtYooO/u9oQC9OP0bYKl8tntHuGy5IHlirE4d9JWDQEIOAuUnaLrkOjJLrpNmi/aDdZq0M9FOTf2BTvQVUrCHwQDAM3MDAw6FA8koFbljUB4H8DGAbgA/hqEAR/bFlWL4A/BTAFYBbA7wZB8NDHhR/4qDZrWNe0mhc5PWixxIJAUXrC9Wkjy/aT/6+P/eKuVsarfHxxZgBpfmLWtUdTkdl4kf3tfli6Kizixjwxi4uzVEgUTwm7XmEBIrXtrav0WtCetI0osac3L1J59b0rkrCvnt6L7ONb5nPeLFZRKxXh+z4C34fN5bbbmlvfR7Va28LAFZyI6vzBnbn1OgGVYxhwqa8n/rsai/sEMWYK/MFaURh0dZOY2VgvpZc1m8RiSg2xoDbWibFvsChYoSAsJqL6GXLxSTZD/t9cTgpVkkmyuLq5289GQQotlKRySEVUSbfvwfN9lGst3WU+SOf8+PPbqtUx+8ZltArCzhzue/rxk8QSf/O5XwMArMzNhvusrJLff3R8ioZpSbFKP/dX3CzRCFNc0t7QKPTyJsVLzl4kq+/WEllqv//cE+E+RY/mY3SQdNczmiZ8i3NB2xUad5tNgEAbh8fxg6BGFuL0ARE7++SvPgUAuLPBPuSFOf68h7sXVtBuerBiNlR4ajtzW65U8dLpV9DXJ3GjHu4gpDTplTSFHQgDVxEs1V0+0BZ2yObtrdv0+VedtQK+D7jKPPLl/hRj2Qll4cUC+XyF12HAay40bLX7So3vOavrPP6kxKgOsV5/PKA4ye1FWvM5TXN+QEt1fhDeCQN3AfyHIAimATwD4N9ZlnUUwFcAvBAEwUEAL/D/DR4TiUwXsn39iEYTqqlrAmZu3yVYSCdjGOzJoZ+E+QfN2n13YFnA2NF+HH12EtPPH4Bbd2HmdvfxyBt4EARLQRCc57/LAGYAjAF4HsDXebevA/jSezXIDypsx0EkqnqBWso/F4OZ23cFjm2FgmXsU63DrN13BdFEBKkcWZ1OzIFNRW1mbncZjxXEtCxrCsApAK8CGAqCYAmgm7xlWYMP+SgAoOUDCw0LdS3dp3eM9ImDbjJDVOPbnl5p8pvqov3jXDkVaEEpV1U9VdlEYXOq7krFVY0towYHvyIq1SwiqVDNgFwH566xUiFXZwHA0gql95y/Qa6TeW54vPQzae11jXUslhbptXhvNtxmcfVXg/VdWuwu8CDmmOv68MkUqwDY97hzC8uGE4mF2uMAkEnz+fF02faWm1n4OUB02APNRGy0aW7r7CpI8RflNzWdF3aHJNfovds3qYrvBy9I42HVLq5vkFwEmgQM0lw0mOCKSrdFA0nFhVvEOUDtsgvK1kxll1O/Yo5ywai2e7KPYztok7srhW2uXQcWeqwYVlhrBQDqS5SO+okvfgIAsMLNjd9kBT86IK25JmvAD45K8HGY28C9VVDVyOTucLVAV7FI8zvPFcu5HlpDJ/ZKoHP2JqUxNjgt7YlucYGgRa6TeouuT5NdbJ4tP/356+TiinO6aI9WrdjH81lq0bZShQKv1bi4yLrGe+DTb3Jbc+v5HkrVMno0F0qKfyvcZxl5Th9saFHgtnJ9sO9C6k+BCLtBnBq53xIOnYfraIuPl4hq1Zji9RVo6cF8+ZBk3RVfC0IrV0ngqfsKfaGjywnGuBo6R9d9cM+RcNPek+SmdbgK/dZdmtuzZ38e7rO5ISmdD8I7DmJalpUB8OcA/jAIgtKj9tc+928syzpnWda5uibRaiAIfB+e14RNWR/vWJdTn9tKufzoD/w9he8HWKY8/bntrt12y330B/4ewvd8XP2rGcS64tju3D4obmPwzvCOGLhlWVHQzfv/BkHwF/z2imVZI/yUHQHwwDy4IAi+CuCrADA0OBC0+vYgFpVn5Xg/Pe0n+MmYZLYdS4sjP8qNQJWKXkQLRESZTbc4UT7ZxSppKRnOeoFTeJiVNzkVSL9TRrmp8Z17xKDjcdHyUHodbZeedy1+YpYrks64nqfgpcN5bNG4MHgnogKEbHLygb1WAUEQoFktwrJtWLalBvXYc3vgwIFgoH8QdU2HwfdY64GVAkMNE/3Emb2oAEygaSH7vtIZof/XOd0NGUkzQ5uu2+wSMb1zrC557epsuEuWLacWd0vqzsgPNsjQmDIj9J3tgDWaM3Ie2RQ3rub0roYr1z8W47XEwa0WM/guDgQFQYDFzQqGerK4t5pX0dfHnt/sUFcQ35fGcFl+Mn3DpIsxeZRY1d1FYs52SyzMgJsu97I+SrJHLMvTv6DirRsbFBhsV+g8BwckTbM7SkGsG1wEdfIEsbZ9U5quOHeZefEyKQdmpoTl9WdofkpMntwy61O3hbu9+jpZnQsrND2f/tWPhds+/ASd48X/Q5rst2+SlZEuWAiCALXNOgYOdqMwHwZ3H3tuIxE7gGUhmxMGPjRCjPXidZqbIgda21oBU0tZE7x445pltjFLxTC1Cv2eR1lnvvuAdCuKdNEaSfJt0ON7ycTe/eE+V9tkfUaGKcXPHhA9cZsDkyoZIsr6LXrT9cDjYiHWNNloytp/6w5ZcEGF1nqU3amqwxIA1Cs7VCO0SNfxTwDMBEHwR9qm7wD4Mv/9ZQDfftR3GWxFEARo1cqwbSfMQGGYuX0XEAQBFjfKSMaiGOnbIgpk5neHCIIA9WIDdsTGwIEefZOZ213EO2HgHwfwzwBcsixL1Xv/RwD/GcCfWZb1LwHcA/A7j/oiO+Ig1dO3xYepnlbq6eOwgzSaEP90nBl7nJ9wttbVwuHvinDfw8FJetIOaT0DV5TPenF5y/83NiRVrVLiUldOW2o0r4Xb9u6dAgAcP0EdS/YfJP/fzZviD52fI4ZUZX9xoHWU6eMy6D7ucMOV+VhfnIPXbsB2HL0PXg7bmFvLtpFIpkLtc0Aj2qpYhxl4oCfWcSl6wM/yQHumh+/xw8X1mGl0Sal3V3aYj8VdZfaSnveHPvbpcJ+rF2jZnH+ZuqnkOaYAAJ/7LPmPP/lJKn65ev0igK3V9hPDxEhm5zgbra4pynEco83qkwmWOQh8G7VmC8VaE422i8LNOgActSzrOWxjfp2ojdxQEn19Yj0+kZoCAPQ4ZD2sLHDqaJeYOCUujOrvJnZ57YpYdnOrrFAYpbHHmEmW1yRN8y6z603ut3hojNZSy5NCp3Q3WXvpEbJ0frQm6/JzcU6HbdB1ztfJGt1clFTOF39GVtOBPB3jS7/xfLhtsoesiQj3+1RFYKXVRhg4mfnBbbRbPrY7t0EAuC4Qi0nxV457ga4XqNhN+a6dhFg3tvJBc4GZr63rtUXyJ8fy9HsoLtJvflKLv+05RPG3K6/9BABw7zrFuCYOSPn6/l8hre7h/WQRWLbclxp3iOVb7BhXAfPautxX9k+RymlsgmIWL7wk/u2YSwU8PlvNMe5pcHjPRLjP0THa51t4GQ/CI2/gQRCcBrQ75lZ85lGfN/jliERj6OKWZrUy/UC8dqsYBMEGzNzuGKl4DNPjQ3C4eexb91avBEHwfd5s5ncHsCwrLHsd2pvGymwFrbpr5naXseul9Ol0couPSKXRKQaufJqhbxNAhDMMQl94RCLJUWbsfszm/9Pneock+L1vLz0Fq5x4v8yFJXfnRXP84gUSCVrkgp7VvAQFN0rEUKb2TgEAnn32WQDAJz7+iXCfM6epZPqln1L2RWFTnsI+M5UTo/Rk7WJf38xF8W8pq2L7BckALAu5bkn8j3GvwEBlmqhaX83PrQJIir3oDNxm1hNRWQnMPhIZuTZpFmuKJeg9jzNcJrRsgZFBuhYxh2j1wp23wm2Hj5Fo2OR+8U0CwPqyUPB0nFhoTA1D05Nvt5UOOL1WuFO550kMZd9eZjT3ti9X4MBCznIwkZZzz7HG9NqbxLzHOUPk1UU5v33cIWltidbc6rysi5FR7ijDGRNt7rlaX5MsiLNc3JHlUvzUPfIJLweaHAULNaVS9Du5ZQsTfW1xlsbB8Zvb92h9nzkthWav3CBpB2eSrKn5JbFe9/SQdfE7X/wIAMBzKOPl+qzMpdtuYidxSMe2kUmlsLQgvTwTvHa7bLJgynz+6aiwdMeleWozcw00cTrV17JUpYFtsOUx2JBf2PoNmsu1N6j7lrtOMYzbWkeeJ377HwMAVlZpTrrasm0iwvrhnCWV4+DWOGTt7vXo2jTZHo5oRYisc4VUF3f44l4Iqbqs78A3XekNDAwMPpAwN3ADAwODDsWuulDIVEoiHpdAQIR1OpTLJMp+tYjW4DTOgSnlLolqLpgo6z67bHKppsYxPRjHuhxDVE6NffvIpTJ9TNqPjXCa0JkzZwCQxrHCZp7M3yszMzxmOtY//f0/CPf57ed/EwDQwymHf/1dab5aYR2Kap7MzqOH6Pj7tGDF1WukQdzeZqq8ZdmIxOIixwctlZGnQvQvNMVCVnLzVZhDbywL1QSZvjMV5bSrhJYiqQKkodYEvR/RyiomWEP7yPETAICRQSk0OXaSmsw6SXIruRaZk/mimKH5IpmhnCePuBYg7lHB6wHWzmAt5lt3pCVeQVOv3C4C14e/2kRXr7iovDgFxk4dpUDTzAqtkzXNBH6W3X135ilg2YzJ2j/ATbHbTXq9coeKoE517Q33yXGbrgi3W+vn63vp7JvhPj97mQK/J05Q+uATn/5wuO3MKgXjazYFP8uXaQ1+/w1p7TXwYboum+yCOHv5brjtU09OAQD2j9I1/9gnaGzOsGSerBTmkF/VdK4fE729Xfgnv/ssIloruIV5CnT7aZqbGhfh9WvuvwT/VjZ5jotagLHIhVNNzhhQxTpdvqyrmxfIjfT8Z6hRcYqvzX//2tfDfZqb5L7b4PvB1JSk0I4dpt9x6SqlEPc7dMxkcR9/3GIAAA/zSURBVC7c5/Yq/X3tLrvCtGqjbJzOLR6hMa6tkQupR9unVTdNjQ0MDAw+kNh1Bt6TToeBS0ACkopdhymDWrFPgp+ellLV01hiVLF5DqKpQpqYxjLV/rbSw+ZjduVEGWygl7ScJ1ln+ac/+1m47ZVXXgUA1JhkrK4Si7l4QRq7fuGz9BT/jV//PH3PhJQ637hBqWMDfcTehkepCGP9yRPhPiurxObq9e0xGcdxkM3l4GlFOqoDj0q1DMJkoreX0tvhPppWOE9zjK9X3FG6LfLcV8FP9Y1qi2ouDQA+B/syWWLejYqkcjW58KbKNdObrAuOqORtF6tKK5xYaKBpWUcidPy+viSPh16rDfn8RkECh9uF6wbY2GyiEZMJnuAAZZuDumdeprSyaEuYYH2Fy/+5mOrE9L5w28ERWn9rd6mhtePQutKL2CrrFDQ7dYi67sSHpugz118N9/nVL9Kac3n2b52TFNjmFBdaxWgc0/vp8/9iv6SCznCXntM/pDV/JiPs+plpUpnsSnFa3/JZAEBNC1r27B3CwsUdBIhtC9lUFL4nv+sBDvTHOHhZZ73+xKZYU30cPOzlNEqrT86p3qZ1tcHpl2Uuj7/04g/CfSJsUR7ZT3ItRZaIyGgNyavLlB7c4Nf4mFhgaU5pHB8lSYRIQMcorUkw9vYc/a5vzlGANEjIPWftbp7HT1Z7N0fpxwflPLoTukDA22EYuIGBgUGHYtcZeDaRfGAaoCqvj8W2phMCwrJDBh6VYTtRlerGIkbKh651lonE6EltK/1f5ou+VpKf5sKh7CnSWe7tkbLeLJf3v3qWSpXrTMWVeBEg/SIHeom9PH1KemoePULFFKoHYr5IKU1PHJOCgZU1ekIvLorv/XHgOA5y2Sw8T5zooUCVw+fL6WqtlvgBVe9Im/eJaNdGFRYoLWTbV917BH7Yd9Pjbaq4Qus9yMmRCfb5LS8vhtt+/GPyDR/neITShE73Cgus8DFqrLvsaF1Z8iyTsMjFWck0Xf+2puncbL1jeZlfCi/wkffquDYrPRGn+4h5XZohn/G9WbqGPb62dk9M8nt07etlEaqqDdG6KrOufLlCjKw+IWNfd4lBVupUuLO6Sv8/Pi164Cc/SeXu5+4Q83v5r6SnYqRJHG2xl+bs2Eco9fUzQjKRf40KrUbZivnFiy+G237947SOD05RgdaRbi5eScj1vbO2Act9eLrbw9BqeZi9m8fKssiG2xatw94BYqce/669LkkjrHEBjx3lQhpfPt/LVkw3dxTyfGK+d1+UFE8nQ1ba3RvkCw8COmY6Ltdv/R6JSS2ykF1+WBj44T1TAIBEF333zcu0z/Cw9IMdHaX94xkW4tME9Ib7uRMPr/lUiuOAlqa74z987RoGbmBgYNChMDdwAwMDgw7F7lZiWjbS8cSWNMIwfVC5Qvj/EU00WrlJbN7maC4Ui1P6YqwmqAJuEa2tlK0Cbazj4Htb9T8ASaOLssUyfURaqvWwOd/FFVPnX6fKrXRKzLkkuyLiyu2g6Q4rjQTu6Qubo4NWTOzY46xo973vfR/bgWVZiCZiiLgyNzabYsq4bbVq/CppbnE2FxNxOpdkUlIEbR63ElUJXNWCSjPr2FXi31dDamuNk5WKpErxvLckrqcLr1MK3K07lG719NMnAQClqlQj2py2qAzLmCPXbXiYgsVququsiWJrC8APds5TmrUmbr5+G7c1FT8HrFldI9N9MU+uKTstx17kdLaxfjKTy+viPlpbY/2dEiswpmh9pbQgWonTBwt5chHdWKKA7OSYqBGurNL+17nlXyMi7oz0Io2twdWrV0vkApqwxRWwJ0vz2zxFbpL1iKRgrhYpOJng6tBMliY605DrM5VJ45Kz/TlutFzcmN9EtSoKlG1eo4vrrJHOv680ZG5s1pmfjNL4+2NaM2GL9cB5HaiAvqUnN6gm3TVK/yyU6JxcV1yMDdaiKaxQEPNvf/A34bblOQoW93APu+kjlBY8OikJDP09FLhXrdy8QLsvcDJAhUUyCwUaR0lLdfT8h8+rYeAGBgYGHYrdDWI6Drq6c/cV8kS2vMaYAUa0fcBPL0cFM7VilUjIuJ0tr47GCEIJEC5a8Zr05LW051eE0+hi/OppLHMyQU/W3/qNLwIA9k9RYCqljbF/cIjeY7ZYq4lanBpKFKrohcasUp0AIKcFZ7YDJ+Kgp7sLtm5VsFaEUmtrc3eWmBakSXCxTzpNgZioZt2oZq9KLNznrkVuoDGdkHn7W/aFlmposTWVYV2NWFoClFfvEBtfWKFg5FvXKGjkaJoXZW6GrFhMYMnxqw2a5ybrLrc9LqbISLpW/yCd40ZhewFigNJdRwdHsK6xo5/eouKrJq+n/dNkRR15RgLYtyNkN8w5rFcekfTGyxZ9V4HXWozPeXFTUkmLBfru5H46n0P7id2trkpq5Drr2y+xEmZeEyYZP0prtZQn5l1ao8+1e0XXOlgntrpnhJoi78mKjtDNJQpa1jNsQaTpGPGsrO+jT0zjxZ9KF6LHhef7KNTq8LR2Ny22GKp1GltxldaA0tcGxHpXv1Q7kMSDZJqTIeIq9ZhTYLUU5BIXeN2cpTUXZV2fRlX0Tvr7Ka04upcC1r1p+c2Pc9D3wBRty3TRNr31R4OviaU0TbTEiba6THy9Emxl+G25fm7z4Y1EDAM3MDAw6FDsrg/ccZDOZpHSfMeKeatCkhgX7egMPNSxVpUltjzFVOGO9mClz2ia40pxT6UPKv+0bb2dpSuPsd42UrH6AU6wf+rpp+h97fnXZE3fFqvjlTUGbjHDUq9qH0srdtGZ73YQsQL0xjz4ntbPL8apV+w3tLic2rHED5fmXn8qfdDSkgQVWQhTAjk109IYMFTKk7U1jVAv9vG4umhjk3x89ZZ83uMlOM9qbzXWHC9uihqkz5bKQD/7Ex2xjnwe0yb35rRUOmRCjp8vyLXYLjzLQyGygf6UFFm0uFWR8mXHj3HKW0vm98wZipcscP/M7iGxDHr7iY27HJtIsa9+tSQdeWoZ8ou/epP829E2Xd+REfGzziwQq565R37yklbNtcSFTg6XqfurxC5/VHwj3CdXpvUxv0L++cXZ6+G2J48TU4+3aKwx7hrU1ZT5Xb++DLex/XaJFoBo4MPWLAdVCNbgtDtV0NNwZX2XyjRvV7kz1vKyzFuai6uUhagsMicq95U867FfuE5poClOJR7tlTjQ0Qm2Gg8oC1uu7WA3zUmSLdoas23Xk/Wt7h3qfmJp6c0qluSF9ydC1BYL2Ynfd2O7D4aBGxgYGHQodpWBW5YFJxaDrZXSx1QhiSoWUYxaZ9nMDlUXak97wvn8nhI6sh5ST+CHfm36TESL1jv8pGy1iEkEmj9OPSlVAYx6wrpasUiLO1o3m7RPrSm+UlXK7vD3NNhnWtMi+W334b6uR8GtV7B25WXNJw3076VMmliWWERXjhispQlNRfGQJzwLBykGHqhCKL3QQLHxMBuF9rG1GEKTxZ3mOEtiYU6yHBTpajArL3D2iat9PsUMZ5y7kzR88VH6Hl33lVWK5KuMJS8vrHuz8I577f5SWIGNhJvB6sxS+F6Dsxb6eV739xNb/fY3/598jqlXxqN1ndD6ZVbvktXmcxFYmS/FrbYwycFxYn5q7Z3Nc6eZkhSt1Fgb3OWCs9W8lHJ7FziL5Q6xzXaKsn32TEvfx+QyzdUw/xanu0RsrLfMv6979D0O19C/9srNcJ+5Yg35gvRxfFwk41E8sW94S3ZUoUCseoULtVq8ZN2E3LLq7B+v1GmtrFbkOgcVtUaVZUjnH9FaFyojfWWFto0N0fo6fmhPuE/aoWNY/Ft3axKf2GDLSVny6v7ibBGEUwPily3C6Xw/47fUmnf1e8EjhNYNAzcwMDDoUJgbuIGBgUGHYlddKAEANwhQqYvroMkBvVSagjVOQKaKr5kODtsYjmqdpCXjuyyg7XLaUSTydpeAMmmU6yXOAUPfFReIp/7m4zabUlSgTCPlOqlxwNLVTD6lY97gbRXNPaLcOj4HveoVTvcqibJaobgzM79WyeP8z76J4ZGx8L1MN81XmgPDToxcKUEgc/R2A017J+B2ZT6di8+dhgNPa/nErcx8dd3Y9aRSNmlsZAbPs67E+oroibjsGvDYBVPmYo645gpTsjijI1R8UvNk49ISu054JReL3FS6KNdPmmhvXxMlcAPUV5uAIy6qVL9qtEv/P3uJGs9Ge7UgPaeVrt0l10V5QVxr1UVylcS5EXW8j34DbUeuQW83pfQtcCs0j91hmyuSRpjiFFTL53PPSrFLvkTn3M7Sd9fZ5eVqQd7MHnKZ7Bun4iBXC8D/9AoFNBdn6Nqx1wIexA2aHR0B1rcfKI5HbOwdSiPwZd5aQzTeMrsbW65ai9q64qBhuUHz1XA1JU1OZ1XrQqUHq3sJAKRSNE/ZbjpWN89RTHOfepx6K2tIPt9q0e9CEjHUfUbOTd3GrAe0FVZr3ucxqdaHuv6JbT9cY8YwcAMDA4MOhRXspBvp4x7MstYAVAHsXKB5d9GP3RvzniAIBh6921Z08NwCuze/25pboKPn18zte4f3/b6wqzdwALAs61wQBB/a1YPuEJ0y5k4Z5/3olHF3yjh1dMqYO2WcOv4ujNm4UAwMDAw6FOYGbmBgYNCheD9u4F99H465U3TKmDtlnPejU8bdKePU0Slj7pRx6njfx7zrPnADAwMDg3cHxoViYGBg0KEwN3ADAwODDsWu3cAty/qCZVnXLMu6aVnWV3bruI8Ly7ImLMv6iWVZM5ZlXbYs69/z+72WZf3Ysqwb/NrzqO/aTXTC/Jq5fW/RifNr5naHCILgPf8HwAFwC8A+ADEAFwEc3Y1jb2OsIwCe4r+7AFwHcBTAfwXwFX7/KwD+y/s91k6bXzO3Zn7N3L67/3aLgf8KgJtBENwOgqAF4JsAnt+lYz8WgiBYCoLgPP9dBjADYAw03q/zbl8H8KX3Z4QPREfMr5nb9xYdOL9mbneI3bqBjwGY0/4/z+/9nYZlWVMATgF4FcBQEARLAF1MAIO//JO7jo6bXzO37y06ZH7N3O4Qu3UDf5Ck1t/p/EXLsjIA/hzAHwZBsPOOAO8tOmp+zdy+t+ig+TVzu0Ps1g18HsCE9v9xAIu7dOzHhmVZUdBF+r9BEPwFv71iWdYIbx8BsPp+je8B6Jj5NXP73qLD5tfM7Q6xWzfw1wActCxrr2VZMQC/B+A7u3Tsx4JFnYb/BMBMEAR/pG36DoAv899fBvDt3R7bQ9AR82vm9r1FB86vmdudYhejuM+BIre3APyn9zuq/JBxfgJkxr0J4AL/ew5AH4AXANzg1973e6ydNr9mbs38mrl9d/+ZUnoDAwODDoWpxDQwMDDoUJgbuIGBgUGHwtzADQwMDDoU5gZuYGBg0KEwN3ADAwODDoW5gRsYGBh0KMwN3MDAwKBD8f8B6ldejIE0hy8AAAAASUVORK5CYII=\n" - }, - "metadata": { - "needs_background": "light" - } - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as c_trans\n", - "\n", - "ds.config.set_seed(6)\n", - "ds.config.set_num_parallel_workers(1)\n", - "\n", - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train/\"\n", - "\n", - "sampler = ds.RandomSampler(num_samples=4)\n", - "dataset1 = ds.Cifar10Dataset(DATA_DIR, sampler=sampler)\n", - "\n", - "random_horizontal_flip = c_trans.RandomHorizontalFlip(prob=0.8)\n", - "dataset2 = dataset1.map(operations=random_horizontal_flip, input_columns=[\"image\"])\n", - "\n", - "image_list1, label_list1 = [], []\n", - "image_list2, label_list2 = [], []\n", - "for data1, data2 in zip(dataset1.create_dict_iterator(), dataset2.create_dict_iterator()):\n", - " image_list1.append(data1['image'])\n", - " label_list1.append(data1['label'])\n", - " print(\"Source image Shape :\", data1['image'].shape, \", Source label :\", data1['label'])\n", - " image_list2.append(data2['image'])\n", - " label_list2.append(data2['label'])\n", - " print(\"Flipped image Shape:\", data2['image'].shape, \", Flipped label:\", data2['label'])\n", - " print(\"------\")\n", - "\n", - "num_samples = len(image_list1) + len(image_list2)\n", - "for i in range(num_samples):\n", - " if i < len(image_list1):\n", - " plt.subplot(2, len(image_list1), i + 1)\n", - " plt.imshow(image_list1[i].asnumpy())\n", - " plt.title(label_list1[i].asnumpy())\n", - " else:\n", - " plt.subplot(2, len(image_list2), i + 1)\n", - " plt.imshow(image_list2[i % len(image_list2)].asnumpy())\n", - " plt.title(label_list2[i % len(image_list2)].asnumpy())\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Resize\n", - "\n", - "对输入图像进行缩放。\n", - "\n", - "**参数说明:**\n", - "\n", - "- `size`:缩放的目标大小。\n", - "- `interpolation`:缩放时采用的插值方式。\n", - "\n", - "下面的样例首先加载MNIST数据集[2],然后将已加载的图片缩放至(101, 101)大小,最后输出缩放前后的图片形状及对应标签,并对图片进行了展示。\n", - "\n", - "下载MNIST数据集并解压,存放在`./datasets/MNIST_data/`路径,执行如下命令:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Source image Shape : (28, 28, 1) , Source label : 5\nFlipped image Shape: (101, 101, 1) , Flipped label: 5\n------\nSource image Shape : (28, 28, 1) , Source label : 0\nFlipped image Shape: (101, 101, 1) , Flipped label: 0\n------\nSource image Shape : (28, 28, 1) , Source label : 4\nFlipped image Shape: (101, 101, 1) , Flipped label: 4\n------\nSource image Shape : (28, 28, 1) , Source label : 1\nFlipped image Shape: (101, 101, 1) , Flipped label: 1\n------\n" - ] - }, - { - "output_type": "display_data", - "data": { - "text/plain": "
", - "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAADuCAYAAAAwTtAhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3daWwcZ37n8e/fusWbbLJ5SpRIijJJSbQuSqI88TiJ5HWwM4Nks5hc6xcBDCQ7QQLkRYTNBAtMAuR4kQALDDYxdgbjTbIzGyRzOHF2bEOxMUNdlGSJIimJ9303b1KSZUrPvugjpESRze4uVnXV/wM0SBb7ePrX1f3veuqpp8QYg1JKKe95ye4GKKWUsocWAKWU8igtAEop5VFaAJRSyqO0ACillEdpAVBKKY/SAqCUUh6lBeAZIvKJiDwSkYXQpc3uNrmFiGSLyA9EZFFE+kTkV+1uk9uISEVo/f07u9viFiLyNRG5ISKfich37G5PIm21uwEO9TVjzP+yuxEu9E3gMeAHaoH3RaTJGNNqb7Nc5ZvAdbsb4TLDwJ8A54FdNrcloXQLQG0KEUkBfgn4I2PMgjGmAXgP+A17W+YeIvJVYAa4aHdb3MQY831jzA+BSbvbkmhaAFb3pyISEJFLIvKa3Y1xiQPAE2NM+7JlTUC1Te1xFRFJB74B/L7dbVHJQwvA8/4A2A8UAe8A/ywiZfY2yRVSgdlnls0CaTa0xY3+GPiWMWbA7oao5KEF4BnGmGvGmHljzGfGmHeBS8CbdrfLBRaA9GeWpQPzNrTFVUSkFvg54K/sbotKLroTeH0GELsb4QLtwFYRqTDGdISWHQF0B3D8XgNKgX4RgeDW1hYRqTLGHLWxXcrhdAtgGRHJFJHzIrJTRLaKyK8BXwA+sLttyc4Yswh8H/iGiKSISD3wZeBv7W2ZK7wDlBEcWVUL/DXwPsFRKypOoc+CncAWgoV1p4i44suzFoCVthEc7jUBBIDfAb5ijNFjARLjtwkOoxsHvgv8lg4BjZ8x5oExZjR8Idjd9sgYM2F321zi68BD4ALw66Hfv25rixJE9IQwSinlTboFoJRSHqUFQCmlPMqSAiAib4hIm4h0isgFKx7DyzRf62i21tFsnSfh+wBEZAvBIX8/DwwSnJfkV4wxdxP6QB6l+VpHs7WOZutMVgxlOgl0GmO6AUTkewSH+73whRYR3RO9voAxJpcN5qvZRiWmbEPX0XzXFwC+hGZrhfC6GxMruoCKgOWHow+Glqn49IV+ar6Jp9laqw/N1ip961/lxazYAljtqNnnKrmIvA28bcHju926+Wq2MdN11zqarQNZUQAGgZJlfxcTnE97BWPMOwSPYNRNvY1ZN1/NNma67lpHs3UgK7qArgMVIrJPRLYDXyU477tKDM3XOpqtdTRbB0r4FoAxZklEvkZw/pwtwLf1cP/E0Xyto9laR7N1JkdMBaGbelG5aYw5vtEbabZRiSlb0HyjpOuudWJed0GPBFZKKc/SAqCUUh6lBUAppTzKFSc1UMkplv1PoTNeKaUSQLcAlFLKo7QAKKWUR3m+C8juYbBe69KwO2+1tvVeH6+tr7GKdj23O0/dAlBKKY/SAqCUUh7l2i4g7WpwlkS9Hqvdj92b0dF40fNPhrar6CTjZ45uASillEe5agsgWSqw27/1JcvroP6dvmbepFsASinlUVoAlFLKo1zVBWQHt3fnKKXr+NqSuftMtwCUUsqjXLUFsPybSqKqshX36UaajfISt0xkGNcWgIh8W0TGRaRl2bJsEflIRDpCP7Pib6YCzdZKmq21NF9nircL6DvAG88suwBcNMZUABdDf6vE0Gyto9laS/N1oLjPCSwipcC/GGNqQn+3Aa8ZY0ZEpAD4xBhTuc59WNZ/4JZNNeAmkEaSZ7sRq70OFk1WFlO2ocdbs0HJMrnaWu1MQBsdt+7GayPrvsWvsePOCew3xowAhH7mWfAYXqXZWkeztZbm60C27QQWkbeBt+16fDfTbK2l+VpHs91cVmwBjIU28Qj9HF/tSsaYd4wxx+PZfImGiKx6WYsxJnJxGEdkG082L3o9NvoaWSCqbGHz1l2rbfJ67oh1Nx4O/lyImRUF4D3grdDvbwE/suAxvEqztY5may3N14Hi2gksIt8FXgN8wBjw34EfAv8A7AH6gV82xkytcz+bXlKT5Yw9y/QBx7ApWyuOq7CiHTHef0zZhh4vaXcCb+J7wNZ1N14OH0gS107guEcBJYIWgKjE9EJrAYhKzG8iLQBRsXXdjZebC4BOBaGUUh7lqqkgNiJcoder7k7+BudUmokzJOAYnwS1RDmVbgEopZRHeXYLICzeyd6W38bL35i8/Nyt4oT9c17l8H7/hNEtAKWU8igtAEop5VGe7wJaLtodwy8Svl0ybgq+iHZDWMtp+bpp3Y2FV7p+wnQLQCmlPEoLgFJKeZR2Aa1CTwPpDG7IPt5uxWjueyPckKlKHN0CUEopj9ItgHXEclYqrxwbYMVzc+tOuGRoo1e5dZ2Lhm4BKKWUR2kBUEopj9IuoFXojrLNl4TTcyuV9HQLQCmlPEoLgFJKeVTMBUBESkTkYxG5JyKtIvK7oeXZIvKRiHSEfmYlrrmJt/xEz4k64bNFJzbPC913UuW7lo1kbvGJ4l2XrdO4IVuL3te2imcLYAn4fWPMy8Ap4L+KSBVwAbhojKkALob+VvHL03wto9laayearTOt9g04lgvwI+DngTagILSsAGiL4rZmMy5Ws7j907Hk6/TMouXEbM0mrrub9R6w6HHbnZatA9fBWC831stxrUtC9gGISCnwCnAN8BtjRgimPEJo81rFbTear1U0W2stoNk6UtzDQEUkFfgn4PeMMXPR9o+JyNvA2/E+vocMRJuvZrthUWcLmm8MnkZ7Rc12c8W1BSAi2wh++P+9Meb7ocVjIlIQ+n8BML7abY0x7xhjjhtjjsfThhd5wSZlwm3ijqGZ0M9187U6Wystz9OJ2UJy52sjR2Rr5WdBMopnFJAA3wLuGWP+ctm/3gPeCv3+FsF9AypxNF/raLbW0WwdSGKthiJyFvgp0My/b+L9N4J9qf8A7AH6gV82xkytc19xleTNqug2D/96CPwnNpivk7N10HC6mLKF+PPdbOu9nha9Jp0ERwranm2067OD1s313IxnaynmApBITv6QWs7mlSKmF9rJ2TroTRbzm0gLQFRsWXdXowVgJT0SWCmlPCqpJoPzyDd9R0n0Ga00W6WcQ7cAlFLKo7QAKKWURyVVF5AVtEsiOpqTUu6jWwBKKeVRSbUFoN9ClYqP199DXn/+z9ItAKWU8igtAEop5VFaAJRSyqO0ACillEdpAVBKKY/SAqCUUh6lBUAppTzKKccBBIDF0E+38JHY57M3xtsFgD4S3x67JfL5xJot6LobjXjWXc12bfGsu844HwCAiNxw0yn2nPZ8nNaeeDnp+TipLYngpOfjpLYkgtOej3YBKaWUR2kBUEopj3JSAXjH7gYkmNOej9PaEy8nPR8ntSURnPR8nNSWRHDU83HMPgCllFKby0lbAEoppTaRFgCllPIoRxQAEXlDRNpEpFNELtjdno0SkRIR+VhE7olIq4j8bmh5toh8JCIdoZ9ZNrRNs7W2fUmbr2ZrLafnC4AxxtYLsAXoAvYD24EmoMrudm3wORQAR0O/pwHtQBXwF8CF0PILwJ9rtu7I1g35arbezTd8ccIWwEmg0xjTbYx5DHwP+LLNbdoQY8yIMebT0O/zwD2giODzeDd0tXeBr2xy0zRbayV1vpqttRyeL+CMLqAiYGDZ34OhZUlJREqBV4BrgN8YMwLBlQHI2+TmaLbWck2+mq21HJgv4IwCsNpJOpNybKqIpAL/BPyeMWbO7vag2VrNFflqttZyaL6AMwrAIFCy7O9iYNimtsRMRLYRfJH/3hjz/dDiMREpCP2/ABjf5GZpttZK+nw1W2s5OF/AGQXgOlAhIvtEZDvwVeA9m9u0ISIiwLeAe8aYv1z2r/eAt0K/vwX8aJObptlaK6nz1Wyt5fB8g+zeUx7aE/4mwT3kXcAf2t2eGNp/luDm6R3gdujyJpADXAQ6Qj+zNVv3ZJvs+Wq23s7XGKNTQSillFc5oQtIKaWUDbQAKKWUR2kBUEopj9ICoJRSHqUFQCmlPEoLgFJKeZQWAKWU8igtAEop5VFaAJRSyqO0ACillEdpAVBKKY/SAqCUUh6lBUAppTxKC4BSSnmUFgCllPIoLQDPEJFPROSRiCyELm12t8ktRCRbRH4gIosi0iciv2p3m9xGRCpC6+/f2d0WtxCRr4nIDRH5TES+Y3d7Emmr3Q1wqK8ZY/6X3Y1woW8CjwE/UAu8LyJNxphWe5vlKt8keDpFlTjDwJ8A54FdNrcloXQLQG0KEUkBfgn4I2PMgjGmgeC5UX/D3pa5h4h8FZgheJpBlSDGmO8bY34ITNrdlkTTArC6PxWRgIhcEpHX7G6MSxwAnhhj2pctawKqbWqPq4hIOvAN4PftbotKHloAnvcHwH6gCHgH+GcRKbO3Sa6QCsw+s2wWSLOhLW70x8C3jDEDdjdEJQ8tAM8wxlwzxswbYz4zxrwLXALetLtdLrAApD+zLB2Yt6EtriIitcDPAX9ld1tUctGdwOszgNjdCBdoB7aKSIUxpiO07AigO4Dj9xpQCvSLCAS3traISJUx5qiN7VIOp1sAy4hIpoicF5GdIrJVRH4N+ALwgd1tS3bGmEXg+8A3RCRFROqBLwN/a2/LXOEdoIzgyKpa4K+B9wmOWlFxCn0W7AS2ECysO0XEFV+etQCstI3gcK8JIAD8DvAVY4weC5AYv01wGN048F3gt3QIaPyMMQ+MMaPhC8HutkfGmAm72+YSXwceAheAXw/9/nVbW5QgYoyxuw1KKaVsoFsASinlUZYUABF5Q0TaRKRTRC5Y8RhepvlaR7O1jmbrPAnvAhKRLQRHfPw8MEjwsPRfMcbcTegDeZTmax3N1jqarTNZsQVwEug0xnQbYx4D3yM42kMlhuZrHc3WOpqtA1kxlKkIWH404iBQ9+yVRORt4O3Qn8csaIfbBIwxuUSRr2a7YVFnC5pvDALAb6HZWiG87sbEigKw2kFTz/UzGWPeITh+GRHRoUjr6wv9XDdfzXbDos4WNN8Y9KHZWqVv/au8mBVdQINAybK/iwlOp6oSQ/O1jmZrHc3WgawoANeBChHZJyLbga8SnPZXJYbmax3N1jqarQMlvAvIGLMkIl8jOH3CFuDberRn4mi+1tFsraPZOpMjjgTWvr6o3DTGHN/ojTTbqMSULWi+UdJ11zoxr7ugRwIrpZRnaQFQSimP0gKglFIe5Yo5rVXyyc3NJSsri/z8/KhvEwgEmJ6eZmRkxMKWKeUdWgCULfx+PxUVFRw/Hv3+q5aWFjo6OrQAKJUgWgCULfx+P9XV1Zw7dy7q22zbto1Hjx5x48YNC1umlHd4ugAUFBRELunpz56v3FpTU1OMjo4yMjLC+Pj4pj62XXw+H/n5+RQWFvLmm29y+PDhDW0B9Pf309nZaWELva2kpISCggLKyspW/X9PTw+jo6P09vZubsOSzP79+ykoKGDPnj1rXi/8/h8dHWVmZmaTWreSpwtATk4OlZWVHDp0iMLCwk197N7eXpqbm3n06JFnCkBmZibl5eUcOnSI2traTc9crc3v91NVVcVrr7226v8vXboEoAVgHYWFhRw6dIi6uufmulvh7t27NDc3s7i4qAUgkXJyckhLSyMjI2PN6505cyZy2b9//ya1LujGjRts27aNiYkJ2trcfcrh7Oxs0tLSOH36NGfOnOH06dMcOXJkw/eTkZFBcXHxitvOz88zPz/PwsICDx8+TGSzEyYlJYW0tDRSU1NJSUkB4OHDhyva7gQlJSWcPn2at95664XXWVhY4OrVq5vYquSQlpYWubz++uucOXOG8+fPr3mbixcvYoxheHiYgYGBNa9rFVcWgOLiYvbt20dFRcWa16usrGTv3r2RN6WyRnFxMaWlpZw8eZKysjIyMzNjup/wN9T5+fnIsv7+fnp6eujp6XFsAcjKyqK0tJT9+/fj9/sBmJiYoKenh97eXscUABW7nJwcSktL2bdvHzU1NRsa3WbnbAyuLQC1tbWcPXt2zevl5OSQnZ2tBcBiRUVF1NbWcurUKbKzs2MuAHl5eWzdunXF7W/dugUEh4hOTEwkpL2Jlp2dTUVFBSdOnODAgQMAdHd3s337dmZnZ+nri2tGX+UA4e7kEydOUF1dTU5Ojt1NioprCkBeXh4+n4/c3FzeeOMNjh49ypkzZ+xu1nMGBwcJBAJcunSJtrY2pqam7G5SwqSlpZGbm4vP51vxBgi/HhvZ4buavLw88vLyOHjw4Irl4+Pj3L9/P677tlJmZib79+/nxIkTkQx8Ph/T09N0dXXZ3jafz4fP5+Po0aOUlJSsfyP1nHCRr6uro6qqakO3FVntVAmbwzUFwOfz8fLLL3Pw4EGqqqrIy8uzu0mrGh4e5v79+1y9epXe3l6mp6ftblLCpKamsnfvXg4ePLjiQ7q6ujrS9aGcJSMjg7KyMg4ePEhtba0WAI9xTQHIzc3l4MGDnD17lqqqKsd26wwPD3Pnzh2uXLnC4uIii4uLdjcpYVJSUigtLeX48ePU19dHlmdkZDj29dgsL73kzFlXMjIy2L9/P3V1ddTW1pKammp3k9Qmck0B2L17N3l5eezbt29DO2Di0dbWxuDg4IY2469cucLdu3dd2e+7c+dOsrOzKS4uprKyclMf2wnTmq/l6dOndjdhVTt37sTn81FSUkJxcfFz/x8cHGRoaIiBgQE++eQT27usnCQrK4uioiJKSko4f/48VVVVMe3f0p3ASWpoaIimpiYaGhqivs3AwABjY2MWtsqb7OxHjVUytHl6epr29nZu3bpFS0uLrrvLhLd4a2trOXbsGH6/P6YDSnUfQAJMT0/T3d3Np59+ypYtW8jKyop7T3wgEGBmZoa5uTmysrLIzMwkKysr8v++vj4aGxv5wQ9+EG/zk5rP5yMzM5NXXnmF0tLSFRmp5DY+Pk5zczP/9m//RlNTk93NcYTw+l5XV8fJkyepq6tb96CvyclJpqenGR0djSxraWlhcHDQ1m5g1xSAQCDA/fv3efz4MSkpKZSXl8ddAEZHR+nq6qK/v5/y8nLKy8v1w20V+fn5lJeXc+bMGcrKypJmCNxmWm0fgNO7rdTq/H4/5eXl1NfXR72+j42N0dnZuWIeq56eHvr6+lYc17LZXFUAPv/8c0ZGRtizZw9paWkbHo71rLGxMVpbW7lz5w5PnjwhMzNz3YPLvKigoICamhrq6+vJyMhY9whsKzj9w3S1fQDJ0AUU5vR8N1N+fj5VVVUbWt/Hxsa4e/cuH3zwQWTZ3Nwcs7OzWgASYWZmJjKfRm5uLuPj4zx9+hS/309BQUHU30onJycjEzS9//77NDU10d7eztzcHGNjY/T390eu29jY6MqdudEIT+xWUFDAL/zCL3DkyBEOHTqUsPsPTzy2PO+ioqLnJivz+/0cPnwYEWHXrl2Mjo465jUpLy8nPz+f119/nerqarKzs+1ukkqArKws9u/fz+HDh9e8XvizZGxsjPfff587d+7Q2Ni4Sa2MjmsKwHKDg4M8ffqU3Nxcqqur2b17d9QFYGZmhq6uLpqbm2lqamJoaIjFxUX6+/t58uTJip1gHR0dK/r0vCQ8sVtNTQ1HjhyhqKgoofc/NDRES0vLijfM0aNHefLkyYoCkJ2dzcGDB0lLS2NychLAMQWgpKSEmpoaTpw4QVFR0XM7CJPpW3Uyba04xezsLN3d3bS2tnL79m2Gh4ftbtJzXFkA2traaGtrY3p6mvr6ehYXF3nw4AEAO3bsiEzMtdqY54WFBYaGhrh//z5dXV3Mz89Hti5aW1s3+6k4TnZ2NqmpqZGJ3err66P+5v/gwQMWFhZYWFiIvB4v8vHHH3P58mV+/OMfR5bNzMywfft2fuZnfiayrLCwkMLCQo4fP87MzAwPHjzg2rVrsT25BDt48CCvv/46b775pt1NeU5WVhapqamUlZWRl5fn+eM01rN79+7IZ0ZJSUlU+wIDgQBNTU188MEHkZlUncaVBSBsZmaGzs5OtmzZEqm+Pp+P0tJSSktLVy0A4W+2T548YW5ujp6eHlcdrRuvZyd220h//8zMDL29vfT19a37bai1tdX1W1d2fqsuKiqitLSUU6dOUV5eHvP8TF6RmZnJnj172LdvH7W1taseM5GMPFEAZmZmuHfvHgClpaV89tlnkWkLnpWZmRmZsXJ8fJylpSVaWlo2u+mOFZ7Yra6ujpycnA0XgK6uLm7evMndu3fXvO7w8HCkS8et7OwCKioqisxZ7/P51v1Gm0zdVVZIT0+noqKCY8eOUVtb65r9Oa4vAOEPnbCamhrS09MpLS1d9TbhvfplZWX09fW5/ltoNHJzc8nJycHn80Umdjtx4sSG72d2dpauri6uXbumc8rbrLCwkCNHjqw7fj3Mq/sAiouL8fl8fPGLX+To0aMcO3aMl19+ec3bDA4OMjk5yZUrV+jo6LDtZC/RcHUBUImRm5tLZWUlBw8epLq6etOm2lDKbuFpTerq6ti7d29UXWXJNOGjFgC1Lp/Pt2KiPZ0wLDG8+q06mSzv8kxJSYlq3R8cHOT27dtcvXrV8RM+xlUARKQXmAeeAEvGmOMikg38X6AU6AX+szHGMSVwenqa5uZmnj59ytOnTykuLqa4uHjVyr5v3741TyrT3t7O4OAg3d3dVjY57GUAO/JNSUkhPz+fsrIyCgoKXni96elpBgcHIxOIPSs8JM6BJ26xLVurlJaWUlxc/Ny5E8Lq6+tfeBrU2dlZBgYGGBoa4qOPPqKlpSXu/THJmm14wrcXdRmvZnZ2luHh4aQ4d3IitgC+aIwJLPv7AnDRGPNnInIh9PcfJOBxEmJhYYHe3l4ePXqEz+djaWmJzMzMVQtAuKtj9+7dq97Xtm3bWFpa2qwCcC/007H5zs/P09vbS1NTE3fu3Hnu/4FAgLGxMSf2idqSrZU7VvPz86mpqeH1119f9f+lpaUv7MpbXFxkYGCA27dvc/36dcbGxhJxtKpj11svs6IL6MvAa6Hf3wU+wUEv9OzsLLOzs7S3t/P48WPGx8eZn5/n5MmTZGZmrjhgLLx18KIzWe3atQufzxc5pHtmZmYzDut2XL6BQIDZ2VmuXLlCY2Mj165dc9wRj1FyTLZpaWlkZmaSkZERU5fbF7/4RU6dOsWXvvSlDd92fn6ejo4OfvrTn/KTn/xkw7d/Acdku57wF8LwuRKiOYh0ZmYm8hnQ1dWVNCPY4i0ABvhQRAzwN8aYdwC/MWYEwBgzIiKrnppLRN4G3o7z8eMSnkBuaWkppgnkioqKOHz4MCMjI3R1dUUOHLOIL/Rz3Xw3O9vwpHmXL19OqpV/maizhcTlu9Y+gKysLMrKyigrK4tpzHlVVZXTxqonzeeCz+ejrKyM8vLyqAc9TE5ORj4DWlpakmb0YLwFoN4YMxx6MT8SkahPzBoqFu8AhArIpgsEAiwtLTE6OkpRURGpqakbmkCuqKiIlJQUZmdnEREmJydXzF2TYHki8oVorrjZ2Y6OjtLc3Mzly5dXzMmURKLOFhKX71pdQOEDEmM5xywET1LuoIO7ot6EccLnQnjQQ11dHdXV1VHlODk5SVtbG9euXaOlpSVp3gNxFQBjzHDo57iI/AA4CYyJSEGoyhcA4wlopyWWf1j5fL7Izsn8/Hz8fv+6WwPhk5S/9NJLZGRk4Pf72b17N+Pj41bsF5hhE/PNyMiI5PDqq69SUVFBWlraqtednJyku7s7meeL39RsIfgBX1lZyaNHj1Y9qKiyspIjR45QW1tLeXm5Vc1YYXBwkLGxMRoaGmhqakrkyV9ScPjnQvj9m5+fz/nz5zl8+PC6R/yGJ4gcHR3lww8/pKmpiaamJiu/BCZczAVARFKAl4wx86HfzwHfAN4D3gL+LPTzR4loqNUGBwcBIhPI7dixI+ruoPCBYzt27GBycpLW1lYrCkA60MIm5ZuWlkZpaSk1NTUcPXo0srXjUpuaLQTz3bdvH9u2baOwsPC5//v9foqLizd1au3x8XHu3r1LQ0MDg4ODiRyt9RCHfy6kpqZSWlpKdXU1R48epbi4eN19L+EBJa2trdy8eZOBgQFbp3aORTxbAH7gB6F+zK3A/zHG/FhErgP/ICK/CfQDvxx/M60XnkAuEAhw9uxZFhcX2bFjB6mpqetuAubm5pKbm8srr7zC559/zpMnTxK58yxsZjPzzcnJ4fDhw5w/f56f/dmfteIh1pSZmUlKSsqKD8CSkhKrujUsyTY8LLa3t5fU1FRSUlLYtWsXEJxULzs7e90phV9kYWGBxcVFFhYW+Oyzz6K6TbgNqamp7Nix47n/9/b20tDQwD/+4z/G1KY1zBH84Hfs50JWVhaHDh3i3LlznDt3LqrbhCeI/OCDD1ZMWphMYi4Axphu4MgqyyeBzf/ESJDZ2Vk6OzvZunVr5FuwQ/pSRyH5841WUVERe/bsWXFy+cOHD1NSUmLFw1mS7cDAAJ9++il+v5+9e/eyd+/eSAGIVyAQoK+vj76+PqampqK6zZ49eyLtWK0AWMkr622y0SOBnxEexjU7O4vP52Pr1q3U1NTY3SzPKSoq4siRIyumfvb7/fh8vjVu5SwDAwM8fPiQnJwcnjx5QlZWFn6/PyH3HR7B9umnn0bd5/zKK69gjInM66SUFoBnhHcMd3Z2sn37dqamptiyZUvk/9nZ2fh8PnJyclbtI8zLy4ucLm5ycpJAIEAgEHjuek633kFKMzMzkefX1NSUkJNd5OTkkJOTQ3Z2NufOnePYsWO89tpra95mamqKQCAQ2ffipOF3/f399Pf38/jx48h6kKijQ69fv86NGze4efNm1AXgyZMn5OTkrNiq8rr8/HxycnI4ffo0Bw4ciPr8vpOTk1y+fDnSbZystACsYXR0lNbW1hUn9D5w4ACVlZWR/QPP8vv9VFVVcfbsWdra2rh//35SriDrzVMzPT1NR0cH7e3tNDc3rzr1w0aFP1PvAkIAAAsRSURBVJwqKyupqalZc9qJsMnJSdrb22lra6O5uZmRkZG425Fo4S8US0tLCSsA3d3d9Pf3O3qemWSQn59PZWUl9fX1lJaWRrVlNDo6Snt7O5cuXaK3tzcp399hWgDWMDY2xuLi4opvt3Nzc+zYsYPCwsJVDxDx+/3s2rWLR48eISJMTU1x/37Uh0ckjfCH2tWrV2lqamJhYSHu+8zOzqayspIzZ85QXV39wmGny01NTdHe3s6VK1doaWlJSDsSbXp6ms8//5yRkZGE7QNYWFhgfn7ekc83meTn51NdXc2ZM2ciZwpcz9jYGC0tLTQ0NLC4uJh0I3+W0wLwjLy8PAoLCykuLiY3N/e5/1dWVpKfn//CN3J6ejrp6eksLi7S09Pj2qGTi4uLjI+P09XVxcDAQMz3k5mZSVFREUVFRZw7d46amhqqq6tXHX/d19fH0NAQbW1tkWV3796ltbWVlpaWyFBepwnPCJnAcfUqQTIyMiguLqaioiLq24Qne+vs7LSwZZtDC8Az0tPTKS8v5/Dhwxw4cOC5//v9fgoKCqL6dprMNusMUCkpKezZs4cjR45w7NixNbMdGxujtbWVixcvrlg2Ojqa1N/ClLKLpwtA+Oxf6enpkRk/Dx06xKlTp6irq4v6ZOfLTU1NMTs7y507d+jt7WVubi7RzXaF7Oxs0tPTqauri+R9+vTp5663sLDAzMwMc3NzfPzxx1y9epUf/vCHNrRYedny9bCzs5PxcUcdyBwzTxeA8KRPZWVlkZ0/JSUlkXMCx2J8fJzOzk4aGhro6Ohw4tz3UbH6ZCV5eXmUl5dTX19PeXn5C3e+zczM0N3dTXd3N7dv33ZsN49yt/Dw8PCUJ4kY9OAEni8AlZWVnDx5MnLCh/T09BeeHyAaY2Nj3Lt3j4aGBmZmZhx9Ojg7FRQUUF1dTX19PVlZWS/Me25uju7ubhobG7l9+3bSTLKl3CVcABobG2lqanLN+9ozBaC4uBi/38+ePXsiy44ePUptbe26kz6tZ2pqivHxccbGxnj//fe5ffs2n376aSKa7Vg+n4+qqioeP35MUVHRhm9/5swZamtrOXr06JrXW1hYYGBggDt37rhyNJXafDk5Ofj9fvx+PydPnmTv3r3r3mZ+fp7+/n6amppctR56pgAsH+4VVlJSQnFxcdwjdcLTR9y7d4/bt2/HNSrGKdbbCRyernjnzp0vPLXgWvbv3++0+eqVR2RlZVFeXk5VVRVHjhyJ6QuMW7iyAIQnEls+pvfVV1/l7Nmz/OIv/mLCHmd2dpbFxUVu3LhBQ0MDDQ0Nrvnmv94+gPz8fPLz8194trRYLCws8ODBAx48eBCZ4Kyvr49AIMDDhw8T9jjKm8KfCydOnODs2bOcPXs25sn43MKVBaC4uJiSkpIVY3sPHz6c8Eo/NDREf38/V65coaOjQ/un4zQ1NfXcBGe9vb2arUqI8OdCXV0dZWVlmzrVtlO5tgDU1tZy9uzZyLKCgoJVD+yKx9DQEE1NTTQ2NjIxMeGaHUN2CU/rcPPmzciUCXNzc5qtSojw58LJkyfx+XxaAEjyApCamkpOTg5ZWVkrJnE6d+4cx48f59VXX03I4ywsLDA1NcXU1NSK891++OGH3Lhxg0uXLiXkcZzE6gPBxsfHmZycXDHNxs2bN7lx4wbXr19P2Jw5SoXt2bOH48ePr3q8yXKTk5NMTk4yNTUVmWvp7t279PT0uO64nqQuAOH5+isrK1ecNq+6ujph0+5C8FD+vr6+yKRjYU6bfTKZhCfUamxsjCzr7e2lp6dH57dRtgoEArS3t9Pe3h6ZvmN4eJje3l7XdUUmfQHYt28fx48f5+TJk5HlOTk5CZ2qIVwAbt68yeXLlyPLp6amXDsFgdUHgoWndfjwww8jy+bn53WCM2W7iYkJ7t+/z5UrVyKndg1P+ua293vSFIDwHDxFRUUrjtqtqanh0KFDVFVVJeRxJiYmGB4eZmhoKHIU7/DwMM3NzTQ3N9PS0pKQx3G6qakpWlpaIoWgsLCQwsLCuPpNl2f7r//6r7S0tCTzieSVSy0sLDA8PMy9e/dcNeZ/NUlTALKysjhw4ACHDh2KjDvPysqioKCA7OzshD1OeJrjO3fu0NHRAQR3RI6MjKzo/3e7+fl5enp6ePToET6fj6WlpcjcSbFanm1TU5Mj5+5XKmz5eUDcKmkKQF5eHocOHeKNN95I2Njz2dlZ5ubmmJ2d5dGjRwA0Nzdz9epVLl++7Jlv+6sJnxnt/v37PH78mPHxcRYWFqI6avJFNFuVTJ4+fWp3EyyXNAXACoFAgO7ubnp6eiJn9env76ejo8N1e/vjEQgEuHfvHktLSyt2gm+UZquSiW4BuFwgEKCtrY3Gxkb6+vqAYHfP9PS06/b2x2NiYoKlpSXGxsYi02bHQrNVylmSpgBMTk5y//59du3albApgW/dusWtW7dcM3+PVcLHQKjkNjExQUtLC9u2bVv1bFaNjY2uPv6it7eXxsbGdb/Z37p1i66uLteN+FlN0hSAqakpOjs7+eyzz+jq6krIffb39zMwMKDDDpUnhI+0fvjw4aqjW7q7u119voXBwUFEZN0uSC99LshmnfpvzUaI2N8I57tpjNnw3m/NNioxZQuab5R03bVOzOsugPv3ciillFqVFgCllPKodQuAiHxbRMZFpGXZsmwR+UhEOkI/s0LLRUT+h4h0isgdEVn7dE9qQzRf62i21tJsnSmaLYDvAG88s+wCcNEYUwFcDP0N8B+AitDlbeB/JqaZKkTztY5ma50MNFtHWrcAGGN+Ajw7BvDLwLuh398FvrJs+f82QVeBTBEpSFRjleZrIc3WOploto4U6z4AvzFmBCD0My+0vAhYPqB+MLTsOSLytojcEJEbMbbBi6LKV7ONia671tmGZutIiT4OYLU5hFcdymWMeQd4B0BE5oHY5xhIftsJbh63hv6uBW4v+/8rwF5gbJXbPpfvM9lOAItAIIHtTSbrZVsLVBJltqDr7jLRZDsIrPZtX7NNjMp4bhxrARgTkQJjzEhoU248tHwQKFl2vWJg+LlbP68tnrGsyU5ESoF/CWcgIm3Af1yWb48xJldE/oYN5hu63Q2v5htFtp8A8+i6u2EbyPYDNFtLxLulFGsX0HvAW6Hf3wJ+tGz5fwmNqDgFzIa7itSGPJvvzLLlmm98dN21jmabbIwxa16A7wIjwOcEvyX9JpBDcPRPR+hndui6AnwT6AKagePr3X/odjeiuZ4bL1Hme0vztSzbbOCGZqvZJuMl3oycMhXE2ybY96dWEW8+mu/a4slHs12bZmutuD8bnFAAlFJKbT6dCkIppTxKC4BSSnmU7QVARN4QkbbQPCEX1r+F+1g135JmG6T5WkeztY5V2a5g8x7sLQRHBuwneFBJE1Bl9551G3L4AnAUaFm27C+AC6HfLwB/Hvr9TeD/ERxZcQq4ptlqvpqt+y5WZPvsxe4tgJNApzGm2xjzGPgewTlZPMVYM9+SZhui+VpHs7WORdmuYHcBiHr+FQ+Kd74lzXZtmq91NFvrxD0P23J2F4Co5w5SEdFmptnGRvO1jmZrnZgys7sAxDr/iheMhTfhYpxvSbNdm+ZrHc3WOvFmu4LdBeA6UCEi+0RkO/BVgvOGqPjnVdFs16b5WkeztU5i51tywJ7uN4F2gnv9/9Du9tiUgSXzLWm2mq9mm7wXq7JdftGpIJRSyqPs7gJSSillEy0ASinlUVoAlFLKo7QAKKWUR2kBUEopj9ICoJRSHqUFQCmlPOr/A6+4dmXV7dYgAAAAAElFTkSuQmCC\n" - }, - "metadata": { - "needs_background": "light" - } - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as c_trans\n", - "\n", - "DATA_DIR = \"./datasets/MNIST_Data/train/\"\n", - "\n", - "dataset1 = ds.MnistDataset(DATA_DIR, num_samples=4, shuffle=False)\n", - "\n", - "resize = c_trans.Resize(size=[101, 101])\n", - "dataset2 = dataset1.map(operations=resize, input_columns=[\"image\"])\n", - "\n", - "image_list1, label_list1 = [], []\n", - "image_list2, label_list2 = [], []\n", - "for data1, data2 in zip(dataset1.create_dict_iterator(), dataset2.create_dict_iterator()):\n", - " image_list1.append(data1['image'])\n", - " label_list1.append(data1['label'])\n", - " print(\"Source image Shape :\", data1['image'].shape, \", Source label :\", data1['label'])\n", - " image_list2.append(data2['image'])\n", - " label_list2.append(data2['label'])\n", - " print(\"Flipped image Shape:\", data2['image'].shape, \", Flipped label:\", data2['label'])\n", - " print(\"------\")\n", - "\n", - "num_samples = len(image_list1) + len(image_list2)\n", - "for i in range(num_samples):\n", - " if i < len(image_list1):\n", - " plt.subplot(2, len(image_list1), i + 1)\n", - " plt.imshow(image_list1[i].asnumpy().squeeze(), cmap=plt.cm.gray)\n", - " plt.title(label_list1[i].asnumpy())\n", - " else:\n", - " plt.subplot(2, len(image_list2), i + 1)\n", - " plt.imshow(image_list2[i % len(image_list2)].asnumpy().squeeze(), cmap=plt.cm.gray)\n", - " plt.title(label_list2[i % len(image_list2)].asnumpy())\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Invert\n", - "\n", - "对输入图像进行反相处理。\n", - "\n", - "下面的样例首先加载CIFAR-10数据集[1],然后同时定义缩放和反相操作并作用于已加载的图片,最后输出缩放与反相前后的图片形状及对应标签,并对图片进行了展示。\n", - "\n", - "依照上文步骤下载CIFAR-10数据集并按要求存放。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Source image Shape : (32, 32, 3) , Source label : 7\nFlipped image Shape: (101, 101, 3) , Flipped label: 7\n------\nSource image Shape : (32, 32, 3) , Source label : 0\nFlipped image Shape: (101, 101, 3) , Flipped label: 0\n------\nSource image Shape : (32, 32, 3) , Source label : 2\nFlipped image Shape: (101, 101, 3) , Flipped label: 2\n------\nSource image Shape : (32, 32, 3) , Source label : 1\nFlipped image Shape: (101, 101, 3) , Flipped label: 1\n------\n" - ] - }, - { - "output_type": "display_data", - "data": { - "text/plain": "
", - "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAADuCAYAAAAwTtAhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nOy9eZAlyXkf9vuyqt7d5/Tc196L3cUuFsfiFAiJFAiSMgTCCooQRROSJYOmBIXpcNiCJYYZpKmQ7AgzwiFSDEMGSZBB8zJACqQQJC6CMO5d7Ik9Z3d2zp27p893VmX6j+/7KvO97lns9IHumc7fRM97ryqrKuurrMzv/sg5h4iIiIiInQez1R2IiIiIiNgaxAUgIiIiYociLgAREREROxRxAYiIiIjYoYgLQERERMQORVwAIiIiInYo4gIQERERsUMRF4AARLQ08lcQ0b/f6n7dLCCiaSL6EyJaJqKTRPRTW92nmwFEVCWiTwhNF4noMSL60a3u180CIvooET1CRD0i+u2t7s9GIt3qDmwnOOda+p2ImgAuAPjjrevRTYdfB9AHsBfAgwD+MxE94Zx7emu7dcMjBXAawHsAnALwYwD+iIjud86d2MqO3SR4BcCvAHgfgPoW92VDQTESeHUQ0YcB/CKA210k0rohC+pVAK93zr0g234XwFnn3Me2tHM3IYjoSQC/5Jz71Fb35WYBEf0KgEPOuX+01X3ZKEQV0LXxYQC/Eyf/DcNdAAqd/AVPALhvi/pz04KI9oLpHSWriFdFXABWAREdAYvTn9zqvtxEaAGYH9k2D2BsC/py04KIMgC/B+CTzrnntro/EdsbcQFYHT8D4KvOuZe3uiM3EZYAjI9sGwewuAV9uSlBRAbA74LtLB/d4u5E3ACIC8Dq+BlE7n+j8QKAlIjuDLa9AVFNsSEgIgLwCbCB/e855wZb3KWIGwBxARgBEb0TwEFE758NhXNuGcCnAfwyETWJ6F0APgDmWCPWj98AcA+A9zvnOlvdmZsJRJQSUQ1AAiAhohoR3RQelHEBWIkPA/i0cy6qJjYe/wzsRncRwO8D+LnoArp+ENFRAD8Ldq09H8Sx/MMt7trNgl8A0AHwMQA/Ld9/YUt7tEGIbqAREREROxRRAoiIiIjYoYgLQERERMQOxaYsAET0I0T0PBG9SEQxynODEem7eYi03TxE2m4/bLgNgIgSsMvfewGcAfAwgH/gnHtmQy+0QxHpu3mItN08RNpuT2yGK9NbAbzonDsOAET0B2B3v2s+6KmJujuwbxzO2XKbAy9MJEKKtXbVY0OkqRdoTEJyHJ/H8E+QfgFQyD7fZjWBiPdp38IFk12vASvbjEnkkKCN0XPKeYL7GAzYVbsoeFuapNL3xPex4M9jx69cds7txnXSd2bXLnf08CHABPcm/UbZTbui3yvYArfyuytpI3TUzgIo8lwvNtQmPJFuKpy29Pu0t0o/M0LH8Jy6T5/HULf1uckzHrov+fHMsWNroi0ANBoNNzk5sfpOGvoY+rain9exNdy34oy02tfVzjPyPNy12/rn/GpdpKG2AJClFQDA6TPnLwP4u7hO2lZau1xj+jBM8Fz1q/9cpb9O+6KfYXcp6O3quBbdVhleAfle7Yzrg9I9pL+O6/nTj+vYXRM2YwE4CM5MqDgD4G2vdsCBfeP4g//rJ9HPvftykfMNGlMFAHQ6ui9YCFwibZgyUzM+Ud/4WA0A0F7i42p1vtVKw9/ywnKX27R5sqomfPzQg3a8rzfgtoNBr9yVZdy3Xp/bNJtN7nswEdZq0ifH2wb9drnv3LnzAICrs+xxumt6DwCg1Zz0fVzk+33v3//tk7Lpuuh79PAhfOOLnwNVquU2qvCL6az0M++t6PeKpaDwW4qcvw9kEBaykLVnr5Ztrl6ZBQAksij2enyNcAEdyMK30OfPSvBsa7KAVypMv0ZTno3JyzZ5ztdVGmepf7a6hnY6TO+BPCOdHPh4vt8H3/uja6ItAExOTuCf/jf/OFigAB2jfpLSfWEbGvq0AfMD0sldF62ViyfAfTfSNpF3gIYWAHmH9JNCBquQs0i8mIwFCpkwGbNW9uVFsM8a+ZRJlpIVbfbuPQIA+Pn//t+cxBpo25g+jB/4l19ELfN0y+QRV+QzNSsZw4HMC30rfXJ+XFh5BiWDIfQ3wUJS8ohCr0TokAzxUHLfQociGFfaI7eC0Qqgi5S0oVVWVx2rymAOBv5e8z7PR5/5F1MnVxx4HdiMBeBV1smgEdFHAHwEAPbtGUN3UEWn2/cHCGHrdU4VMz19EADQ6fpFwoGJUKvL4E/95DDf4X2mmvFxwl3PLvoJfLnH18tkQrQ6CKzvrn61CbdpjHlur7vMD6TW4Ik/ybjN5dkLZZtM5vtGvSGfZcZpjE/x59jkDACgWmnIjVXKNlW7knSjGzBC35C2Rw4fRqXVAEzwqGV2dE5eYiOTSR5M+/rSCydPweSVZnK8TKqdZb7JpXm/uF28cEnapnKtlRLQshx/SQ6bqHnJZ6bJz42k/bIGthrfx16fn2VfzpNlWbnPyoJVq1akLY+bbs+PH1plWI5uwPcYu+Pj47JwhpLJ6ALghj5Xu9SQKracN3Sb0IzCya6QNrxNJeShBaBsrwv7ygXAoi/XlwXAeiZAmZZCF4Kgj1Ynfp3IbCZt/DjRxXf4robwqrRt7jqEqVYyLBHLlNWX63T1esGNJwmfVheJekh3XTDkuHKuD9rogm0d35MuIOEkXy4c0rUkuL2ytzQiOa0iyejOYe7eXwXwRKpkAfPjlrER2Awj8BkAh4Pfh8D5tIfgnPu4c+4tzrm3TE40NqEbNy2+J31D2u7ePfN97dwNjuseu43GTZUefjNx3bStjcWxu9nYjAXgYQB3EtGtRFQB8CEAn9mE6+xURPpuHiJtNw+RttsQG64Ccs7lRPRRAH8JloZ+83uG+5sErj6FtOK5qb6oZzpg3fXyEotunY4XUVUv3O6IoW/Zr2d90Usb0aZQwueZnfOXHYhINzYh+mURawc9L2r1pB+J2CL6wb6XX2QGZvbiEgBvBL065y/SaLB0c/DAfgDAbbfsK/cdOMw6/0qd1VVzA1ZP9LpedM7t8CO6fvo6wBVwRSjiyh5RG6jBtuh1yzZW6NcbqPoiMLCD23c7fN8LV1nf3+744ys1JrzaFUpjeCBq56LeaucsNGdB+rK2qAMHIupmAzlfIGqreqpQdU+tVu7rtvlk8+A+pqIdOn/eM53JyOhf09jFiPoGoeqHRj7DNqVeQM5xbSNiadymldu8JUGeU6AiU722PjoTqnCc2hlUYTFs0JcfQ59hD9VhwkHViDR0SHhOOe9109bCoIsaKsE51c+jmop+XtSXoQpHh5raPEzQc509clEBefWOH996DzRiX0mH1HSjBuaVpv7ScuNPGLTRl7BYsc/3Q7YVrOpMl14q96UXvr6i/VqwKQmNnHOfBfDZzTh3RKTvZiLSdvMQabv9sC0y2hUOuNq3GJsI3OmEHez3xdjT5ZVyqfArda/PHN+x59m54Euff6Tct7DA3HSzxbe4d98hAEBrbKpsk9b4XJXaAgDgykU2XF44d6lsMzvL+zrtvvTDs6l5n1fo5WXdxxywC43IwmVXxNDcDLyQ3vHu2wEAP/FT7wIA1EWiyU3gjZOuU0tnLVyvhyJ0MjF8D3nBHHS7w15IuXD0fA9ybwPub2/gJZ9mje9vce4iAGBeJJ7BwHMxA+HK1cVVPbUCL9zSsagQg39u/c5KjSWuWsLXXVrmvnXCfojX1cIS75ubX/DnzvlciVyw0WARoN31UkonuN/1gLlxGvntpSb/O3QPVu8T8fQIbK+li6e3UMpHglGUnj66L/CKUSOwGZFEuBm3t/Lp3SNDTlgNpYl8hhzw8D5n1E06HGgruntdSIzDVKNAJbgnk+pJxVCrElTIQQvnXEjfesE+NV77s8g4GfKeGjHQl8bYgH6lh5Y/KjxieN/w9vAapQQVNrOidRCPwezy4/x58eGySbq4LuefEjEVRERERMQOxfaQAAqL+fllUKAEXlpkDr5iOMbBGHafvHzFVxX81teZ43/kYY4lOXPauxlrUFi1ylxi7XnmDiuBP7zqo/vi27+0xOdeXvY6+F7gmgoADXH5BAAjSmS1V4R+9GWbUvct7oxX/bk/90VWgc4cYH/ph97OEgGl3lUxq/rrrQXOWvSWl4c4TOUWE/GbJ/GnLzreRXYgbn1f+dZ3AQAvnTxX7nv7G+8FANxykOMVxsZYurm66O/N9ll3r66MKm2YQATQ2ICi4P5cvuxtJ3vH9wIAKuLaudjmtq1xX1SsVte4C76GunoC3tRQrYqHmXCBe/Z4G8zJkyewXhARkiQZui/lq1YEpg25AWoQm2xY1QagksBI9BO837iRtPSq7w45YR+bIFJT6FJMqntW91E39JtPMBx/EHKLo27rTo4P4zVXDZy6DhARkiwBrB+XSfsEAKDIu3I9sQUE/U6cuHcX4uKaBYXoJu8GAOQpj111W7XOvyArtPmqrncr+X19bEOcdPkshSZelPNNZF9S8H1ky+fLfdnVY3zOZbZXJfPHeUfu42xctjH1fqIEEBEREbFDsS0kAGsdur0+2gEHOj8/rzsBAIvzvFZ9/nNeD/b4d04AADoSkFWv+QhaZfTrNa05zscr1wl4LxLd1h+M6FMBpBqEIqt4yOkVEkGb5yolrBbNJ7pW+WwGUb6DnDnmb379SQDALbeztFNv+T7urqwvRsI5B9vPh7ixUh8v3a3IzjwNA6n4u+r3H3/s+XLfmZPMibzzba8HANx55y0AgMee9G16HT7u9fcc5X6IJ0O/5zk19ajqdXnfsWPHyn1T4zw0q2IPsSKlmMzr8Ht93qaRqmGgkgbmqHQnsUFDz2/v/kPYCBgyQ+kKPBetUZ76a6UOuPw9xF7LttLTZ5UoX5UgVBcvx7sgXNWq1KVBS0G0sgZ36SbVbxd2Jdte2inCgKoyBYIdabvSzrFWEBHSaobawtly29KTf8h9kSDAVYSjgNGWMZB7u5HZy6EIrdt/CAAwGL+HP+E9yLzX0zDdQ4nOjqTHCJ/t6Cyg9gLTu1Juq86ztiJbYM+edMHr9JM+e9WZhJ9Rn3ie6DovYQ+KdYpXZd8iIiIiInYk4gIQERERsUOxLVRAg0EfZ8+fxYXZQMQRG8elc2wI+eqXWT1w9pTPgZGlrN6pZCKGGq+6qWT6XURbEYfzPEwmxt8H+bAqIQlyj1C1OtQ2VCGp0VeNQ6tlo9TrFiKy2cCgVUh+m0sXWeSri1HzntfdWrZZuOrFxrWgKCwWl5ZQqfpHXZH8PD1RQak75vLAC6/LEtT10IOvAwB89xlvYD9xge/hCw+fAAB8+3nOffTiCz6uJzOsFtu9l8P5D4jmKy+C+xcXxIVFdsc8ccYHaR05wsftm2QV2FSLaaP5fxhiZHPDaggAMKLK6Pb4GWVV5nXSis+z5ExtxXHXCyKCMTSUbNWrflYGgJX9U9WBJvwaUu/wp7omUmlU9oZKNfqmEqBIor1rB4bwdpuf4WSL75MC92I1nhpN6iauljZU22iyRdK8Q0OBXfpF+iZJ1oJnsWpy3esBOZDJ8cBe36dXJnkgnTzJLsi1Ot9/qNrTBITak0riDabFBVYnLV/5JACgcdsDAIDqLX+7bDOo7dcz8XlUXeYCF3ASHbO6yoZ0k69pj9XY5gyrrdML3y2bZDm7Xg96c3KIn5dIgtws1E2bn+O5C94BZnJsY1KQRAkgIiIiYodiW0gA1jn0Bv2h2HxrmWt54nHmKl9+SVygrDdUViVGXFM5FIVfoTXXfrvdHvodunLpV1vYod8hRnPNr+bq6c+3miSgIfJi0OktBLv4ulev8kr//LMnAAC7d/uMoZ225wzWgsJazC61kXR8n+oNpm1VMqW2O3yNz3/5sbLNiZPM1X/wx98LALj97rvLfS9cZGPvnATnXVq4zPexFASwybN47LvswnbH+x8CADSmvFvtgqRpOP8Mu5ieu3C53Dd3lbmd+2/lLLDVZGXWxH6fz1WmIA72FWrAEzdSl2hqYD9+nNuY4U8YfeZqoFUj7mhyAG+IVxYsCdllp8FzYrxdJa99XdKbV1Me+8+98BwAIE88B57J+1TUJBV6kG01KXk/zdIq1woCD63VFNrqBhq6ka4e7OQCUYbWKQJkpsCBxjLectQHb75iH5J+PsufQscs8/d28MABAL6+RtiPK1d4HnnxONPr6lOcUsEdf6Js03zTj/OXQ+/m44UOCFLB16sqPWqm1sDQPM+SLL3wBd5w/unyfhQqcRW5ZmX1/V+aY6n/8hV+HyqNaQDARNNLr4cmA3f2dSBKABERERE7FNtCAjCJQaPVQKs1Vm77yl/xCv/4o6IXLqTAS9sHQ1SqzG3Um7x6FjbQ0YmueVGCu4pi2DUv/Kb60LJIRsB0q25P7QLG+FW4r/YATQy1askkVeRqYi5/ctWp9rr8+eILZwAA+w/4Aj+79kyvcs7XDpauesg7AW2Uo5FgmG8+8QIA4M8//52yzfwitz985wkAwHIQRNST70Wf6dVs8HOziedKlqXYzpMv8PP7wQ4/v6kp7wZ7fo5tHy++zPfdD2JbKsLZTkvgVyZ56xtjXjpSd93Ll5lTUikPAIxw/gOR2NodSS7X9/TvDtYnXSmIRl00pQ+4tgSgTKm2zSpeMskH3NfUrEzhoJicYJq/cpoDiE6dYhvN0Tu9/Wj3rl0AgHqm6RI8fUqJQzl54aTDZBPKr2pcU2hpKd0gR9whbRCQZZL18ZcN6uOB7GXMNA+W2/a/kXX2Dz74IADgiSeeAgBcuexrcBw5wu3vue9+AMP31JeX+8SJNwMAHn2E0yy8+PzjZZvlcy8DAMaOcooWIwGTbuBdkN1lsYkt8nWTZW+/olmWerM+z1WJzE8LbS/99tpsq8klEWK1FhRsEq2D2jj3SfqaVj0oCNNbba65fkQJICIiImKHIi4AERERETsU20IFRCBUkgTL897I8tg32e1z4QobCmt1Vr3keVDScZldQr3bphf2NCuhlbwgWh9gOAJTk3wMi8M2FNXL5pITPPXX0MLzeZnzvkwaElxDa5DyvpkZr9JRN72lZXYJ6yzx/dS8NgDkvFvfWuBsgX5nsaxrAAAtKZM4K7l3vvAVNv5eWvBGKjWW/sVff5PPE6q+tH6s3FNX6jcPgnw21RaratS19Lf+n78CAGSpF3VzyeyaViWn0Piect/Fy1IHoMLnaVa4/+2OdwPWvE7T00zTatW7ddbqYuAWUfvECRbRF4IyhZrpdX1w7K44pAIazjhJI9vDjb42g6e9ZjDVTz0qGHqYFseHl1/i+9m9l10Xp6Z3lW0adW6TiGq0cIH6Ufso6oayZGdwZ6WKVD7DKF99R0YdcIsg7axZzf/1OpCQw0RqUQ3qhJA4U+izn5jkDMKXLga5dMTNOZPrt3t+zLw8y+OgJvmr3v3eHwAAHL3VFyt7eonPPdfn98OK00MeuHEOLvC2TJ0NAuN7oZHpMr0uiZtzr+/fwYF8nxxjN+exwE07Kfh+x6ak1rXQveiEWYaxIYgSQERERMQOxbaQAIwxqFWaOPbciXLbubNsIFSufFDmffHs8fKybCt4ha9U/T6ff75MdhJ+CIZ5NA3KGQp4ER5nMNBAMG9IG5/kFXrfPg5aOnuGg1Pay0GwkpxTDWL79+319yh1B5YWF+U++Bq1QATodtebs55QuAxFIJVops8XX2b3y+Mn2YjaDqqGdSWgaP4YG7laY95461I2zLYln/6VKyzJpIG73eQkc+W5GF3PXGC6ZYGr78wuPueuXWPSU3/8y6eYNt98lN313vMODkjL7WLZpr/MfVSXyCzgovqSYbSbS06jtlSUyz1XemHWOxSsD9cwyKnhX8dXwG6NGnaHcokKV15W9JIxON7wIkCrwW2OHGWXx45VCS1w9RypA2BCfk/zXb2KLdEZrZrFv4ckY81m6auby/YgiNKsj780RGjUMqSpf65FrtfROiFqmPXXOnyYs+sWEkhVBMFpS5Jj7PQCS3/1Gr+7++UYALhzjts8/O0/BwB0TrOheWC9obnT4eOnmny8CyRbK5qBvtSwyPt8/UYYqNpkyTaTQVEs+3klVVfa0lNXM7aWTZAk65OuFFECiIiIiNih2BYSQFE4LMwP8MRjPhtku61pGVT3r4FcQUY+cUfsdJgT7Pe9m5ZyW3muq+fKakolU1zmUjcjO1CuvrrlwFHvovm3fvBNAIAjh1n/+md/+hUAwHefPO77UaYSdHJfXqc/GlTWldoDS8teZzmwbawHS+0evvHkcSwueEnigfvZhe47T7Er29yCVF9D4KZWcD9VbVzp+74OJMR9sa31WJm2M5O+olveV4oxZ1SXymxF7q/RGfB5milLCzO7vbvfsnB9jz7NtDx8mHXb480gXYXUCBhzPEbOXvSh8udmmYanz7MkeV6CzGb2+KCi5097vfH6MMJGj9oAVskmOaoeTwKXScKw9Kp7pie8Hcb2mQNdXuR7npcxU2t6TrRMj6CZUgKOvMz/PxJDF3ZLU2wkbrhucNhHNyIBhBKIWacEQATUkqJM0QJ4e59ettflcXr0lqNlm5akDVFdfKvh3cvfeBeP/aUlsbtJJtpW4F588nm2e+VSgcuCaTsovM1IU5L0a/yCpM4/m2XpU5JyP9Ia6/n7fU+/vtjGKjKH1cm/F2npmjtKkWtnk10rogQQERERsUOxLSSAXm+A4y+dw/EXvY7NWe6aciGtMV75Gw2fH//yJbHSCxfTH4ScmOZA51+qfgvrshajKSAkVDsMYBmb4FX87nuZw3jP33xrue/gAe5LLh42t9zKksAz3/W5vcv6pKLfPxVULRtd4pekEtncVa+bztHDejA7v4A//LMv4sF7bym37bnMXONjT58AAHQl0V238NJGX4JejORJX1z23HWny+3JsC1gQmo5m0DHWa3wcZrzX0larXvOJU2Zbmr7GB/zlZumWiwN1FPe953vchKvS5d9ZbKrS/z8d+9h76E84LAuXOVnu7DIXNu4BNGMzXhbxuzScLW3tcHBoBiyXxjhmF2ZLkFSOwRcm0/9IOMjDKBSLyA5Tyq67DT19o/L55kOTz7MElLzkCQ1C+wgRo6zybAtgr/rizFyO6FoIrporXFNQ15AKlbwh1ulolmZ7mKNIOeQ2MGQhxTJQFqS56qSUy0IpFKJwddC9jdZE++h6jRz7NYNS1t8gGgSMqZ3RZ7HpcXgGlX2Grpo2abXWfYSxJVFvn5PgrXac9zX9rKX/pVeLUlns6vmr793ku1Wh8bsUJtQglw96PT6ESWAiIiIiB2KuABERERE7FBsCxVQe7mDx77zDC5f8sZPQyJSEatAHnrbfQCA191zW9nmU3/8OQDAhfMsYh044I2IGvg0O8vuhEePSn6Qe2/31+2IIagnIl/CotfYpBfnbrlNAmx2sctnpeqDaa4ushGxIcFK45KfJcu8G2ezyeoRzcR44bzPGaIFvRW5upqGSXHS9UV8NKpVPHDnUfzkB/9Oue2LX2G3tpPn2TDal3zvg6BWgmZI7Q9YbC2s30cSFDbeYJo066wKa4UGSFFp9LpM42rGtJmZ8UbYZpPplKZ8vokxH8ilpTyt9OnbDz8CADh3+VLZJmnwcYk8r0GQa2VO3PymW9zHNz3Az39ylw/E+/LXvomNgStdNRnD6h23iolV1THGR4IFR4sDhIz9hoy5vOdVdFcv8ziamWKazclYdtbT0BjNWa+5qoacTbkfZeziqFHXq1vSMr9+2EetY+CGjw+usP56ABwMN6T6kM+K1Om48867AHgVK+BdxkuDcaCKsprdV+9fNWFBG93WoX0AgCsZZ8I9HbiZLyxxnxYv8zNqt4MA1Tl+r5ZmWaU9EFWlDd6vVN65LNXiAV61vX83qykPTPH4PtDiefGevZ6gzao3Oq8HUQKIiIiI2KHYFhLAoF/g7KkFNKrejbAnLol79zPHeO99zLnv2eND3TWz5Pwcc0b/xd99T7lPmYbf//3PAPBZCl/3Bp8tsdoSN9K+pJQQjikMPW9I7vyOpBAIXdJchfuWgznQqWleT5tNf3zWYK7hDW9ll9FHv/Wtct+ls56bBQCNojdVzw2Mja9vpd+zaxL/4qc/iKdf8pLH1x9hCWBZuXqpdBS6pWqG1EKCw4owzYMY0lp1ps3MBN9/wCChL+6rd9zFnHeKTI71huKJcZEchJMvgkC0q2I4e/ZZzlQ6K+6OWctzuDpeTJ+HcT2oNnbvQX4G99zFXJwa6F88610/5xbWV23Nw2FVrzwaMdStkjG0LO4eNFPuOiGWvqoZtzl3+kzZ5tw55i7bPabr0kDSkvS8yyOJROUrk4WGzuGKXtrTNKiGpwFU2u0wtYNKA+oO6jPqBsbwdbqBJkmC8YlxpGExe3lH1HjblAzCYRBombl3xI2W+ydGd+l3X6SF505654JHXuGx8tgCOyVcmOP35OpV7wbakZz9vQV+Dr05P67yRQ4IdeKWTjLNJhS6s/L3XLLkDoJ95yRFyiDhsfvyHB9/6eIzZZs33e6dGdaDKAFERERE7FBsCwnAWod2uweTeM63Ju6Cr7vnTgBAJrVcT570nOxgwOxAsyn64kCPuncvB2zddRcf//RzvHpeuuxd6e7ex66dpicup6VbqD/P3Lys+sIihXm7u13mHkxV3LVawtGOey71onAKKlQ88MbXl/u+Pss66I4kOFMOOMsCCaTlXSPXgoXFZXzuy9/AF77q6/WekSRohaz/rqyJ7HWUqlNVjilJPIc1LZLXLUeYU9o1xpx4q+7bTO7jZ7IkQXrHXzgBAEgTfz97ZjiNAYE5pfkgGeCpk+wuu7DEx2c1dsdFUHu1Ch4TqUhwtx/24+f2w3z9iXGV4FiCeOGlF8o2i+2NSAaHlZx+mRZBA8Lc0PbwGHUVLQJ7kOrqd08zrcyA3YKLgXcjrNZ4jCxc5kC3SpMl45Bb9teX30O6dKcbh24hDexXToL29DxuKHptNABsZZv1JoNzDhgUVCZ9BIDxCabJl7/IyQX1HfzABz5YttH2y7JvYSmoNS62wSWRKB95gTn/R0/4sffiGZ4jrhQKS9oAACAASURBVCxLKgmRbrKg4mBHkjQOROq04RDQWtN6+1JJrFbx13BSCayvwZRB3eKu0LQ9L1XfmlIHednPC92nnsNGIEoAERERETsU20ICMMagXq/hFU0AB2B8nDk+Deuu1nhVreVeV5bJtuY4r2O9YN+8rPB33s1c/vMvcvrWl1/0ur5DRyVplATc9KUSU7PRLNuoTlR1nIY8h1QUUrlJ8vSmmYTOB1knbr39EADg/gfYW6Gz4LmA55/gPp05zRyLeiYsLniOJamur/bn5YUlfPIvv4VuJ9Cjapphy31RDqewoQ1Av0vSsUP7y33veCsHwx3ax/fWEvGmEUhH85Iu4pvf/gYAoCtBV3fe7kP2D0sKjdlZfibPPe8D6BaXOHXF1CRz8oV4xtSCZHK3SBK+O45wm+lJz0VVRMeqEti3nuAKc195zKf07eXrDwQjsB5/KM2Dfo7o+cNAqjLFiLTJA+mgmTHN77mH7+/CcaaLnfED6wI7lmAgqTaMSGz1ICU2yjGrNoDg8uoZpHnHNKAryDOsOnQqA9pWeuP4e5RjgprAK/JdXCeIuCpaaLrQNAuz5/i5Piz2rK992+vHj7/EwXFzktSt3fEpYjriibMk0u7eB+8FABy530vmj/3F5wEAVrwSE+g778e3MzwOVWJKK0GQWMbBYWVaaJE6iLwkk4Clz1QSWea9IEisw89gUUpkVyXxH1X82L96xWsy1oMoAURERETsUMQFICIiImKHYluogAA2UKWBeK8idS4iW0tUQi4LRFTJkZGK29TMPu9GqrmDJqdYNDv6NLtUdXteBaMBSL0ui2M1UWWMj/nzaE7zRcnZH7q2VSpNOZ77ekUMzGHAzYH9rOZYbrOop7n/+dzDxsOuZCY8ftznC8oki+VaUTjCXJGUGU8BgERdYMXtU9U9oYtrXtYmYBfPH3vf3yr3PXgfB8YszovxWjUvzh9/7NiLAIBTJ9h18Z1vewcA4JYjPlgvz1k0n51jdc8r57xbbF2MnPUmj4mpJht47zjgA7luPTgufeS+9gM31mNn+Fxf+hpXO3v62CkAwHzXq33sOlUUIcJTlcbPUV1QWGfCqaFYPq1XX+3bxeOykbKhb7z6BADgVJBLpluwC3J9THLRSHHxej2snqU5m1ZeX/ukxmCTrMyW69uO3BeCzLnltTQgLMx4uj76tgfAY+cd6onPZPv0w18GAPzpn3Gu/gvnWH24vOTVPFbGoSMN7PTv85g4erQW+D28U4JAj5B3gHhB1DEnZK7oiprGDvwzKh1FyjojQR0EmceMzC9G6mcMKs2gDX83u/gaWagCknHs5MXqzbKLaZb49yMNnDLWgygBREREROxQbBMJwME5O1xvVzMiCmeSSEqEJPMrdU0qQU1O8Qq//2BQb7fDnHOlymvcG97IFaUef8Ln6p+9zIYkU2EuoCL5uwdB7vt2W7jTV5jTsIGRrNvjdp0l7tPxZ04AAJaDimCdDnOcjz7CnKgN0jx0u7rqa8ZQ/rVr2lcNy5retXFNcBZFb2m4xoEY6myh110pAQzEIH7wAHOa73rHQ+W+Gamn+tQsB7wQsZR17oLnUE68zJz/LUc48O6+e+8BMBwkp+6vzzzPz8QMVQtjw+6BA/xs7zrMn7fu9sa2hnD+c4tM08cf85LTp77IxucTUiMglypoLqgu5QbeKLd2OGhd4BKkNSiG60sM8cMlUy7Voiqe9ncckUyqS5yPvttlOjcn7yrb3D55P+97gWm+RypMhYyhy0cstGGvR7JTaLWv1WoTl5x84IJrRlxfNTCLgnFGGG5zvTh7eQkf+62v4u7cB8DNNPmZ7ZpmA3lfJDoyPlutZvnNRRq5J6jB8QFx+JjW9C2OPyvf9c4hd59nen9W5p7PVnnMdhHUVC7fIZkPCi9ZGpHmSCRcOxDNQDeo50D8Xpsaj+tq06dIgVT7SqReRlpwsFma+Xlln7xXzz7u3ZrXgigBREREROxQbAsJoFat4nV33oZvXHy83FYIF17L1GWRV1iTeykhLZgbnBJX0bzjObq26KcXrCZdktz9XlWIi6InnpyRGp5t3nnxvE8RUBSaLUtIZYPc4tK3jmMbglZlGg8Sjh0+wq6mec5cyNXL3tXV1yplTmlB7ANPP+mDPO570LunrQXOObjcDUku6ldXhu9LFItWTwOAasa0vVeSbVFw3x3J8d9sMPd0UWwfTzzj+62Vmt7x9jcCAKYlsVUR2GCe1cppwqned4+3Dxzey89r9iLXATjzMgcAnn3Zc6gdqf9wbpav9e2AG3plll1pk4S5NydupEUQzJNKJbPhumzXCVI30MDGQt/7jMpVK+d89+v2lPsOHmLp9Tvf4PFZq78TAPDQG95ftvnOMW6THOexd2gfBz5a6+lrxFXRix7X1slrUrdQCsw0OE3Giwu4/tKuoBy/Sj1BECXWmbM+tQPMtC9iTzWojyHd60tFLk366MJILAkQfavY3T4w7m0A9775bQCAtrgg12/nFDPFCS8BvOUQSxenLzGN/1LTXgT3o7fvTSGrBckxMtVsBBJU38lz6rF+3wY2gFQSxFXrLAXX5Lfre9pePrs+22B5rbUeSESHAfwOgH3gwnIfd879n0Q0DeAPAdwC4ASAv++c26jq2zsG/d4Ap156BcefO6OTxR4AiPRdP2zex2D2JFzRh7yskbYbhNkrS/jN//glLMy3QZTiB97DObAibbcn1qMCygH8D865ewC8HcA/J6J7AXwMwBedc3cC+KL8jrhOEAEHju7B+z/0I3jfB38QAPZE+m4MiAjZ5EFU9t2JbM+tQKTthsEkhJ/40Dvwy//2Q/hXv/CP8VdfegQAaoi03ZZYswTgnDsH4Jx8XySiZwEcBPABAH9Tmn0SwJcB/MtXO1e9XsN9970OC3NeP9Prs0i0bx+LxlnChrHLF7x65vLFOWnDBpTlRa8CKga8tqlrJQq+1UHgBnjq5dNyfRYHs7qqQrwYrGJ0lvL1e90welTEeNErzUi5wYMHDwYtJM9OwW2qQem3A4dYbN+9l/O4jEl2zPEJbyi1tgexjXawFvo6zqAYSuZaCXxILYRhiX16F/fpjltvlXv0NOl2+Htd6gA8d+zbAICnnvUqvJq46+7bzxla9a6Pv+SjfQfi1vjg3Vxe7+67dpf7OkssGj/+NY7crUp+JBuUnTx7mZ/3yQusBrm8GORaEfWFK4ZVE4YApBmQZnAuBxKD/lppKxi1sZY59kcKp4euk6qSyojH1d5dXgV0+jw7DAxSVp+97v4fBwBcmPVGyK9/8zsAgD1TPHarUqNhYP34pDICeBUjsLYZ2RKWRqSy36vkMhppo6UCJiebmJxklWy9UcWBAzO4eGG2grXMC2Tw+rSBiaBE61Pn+Z09JTnBOh1+5nmgAnqTqIf+CfG7t/cH3l3uu/oOVqcZvd8xVrN0znpD87OS0+rL4sYJcUVPnJ+ftD6GenKH7075VZ4JafaAMDuqqJTHa/y83vrgG8pdF8++DAA4fYI/rUbYB4d3ltZXJ6Ts4kachIhuAfBGAN8CsFcWB10k9lzjmI8Q0SNE9Ig+xIjVsbTYBoAGXiN9Q9raQOcdsRKW00G8ZtoCw/RdDnzzI4Zx+dIcTp08DwBLWANt292N8NKKeDWs2whMRC0AnwLw8865hdca/OGc+ziAjwPAvn27Xb1RQWvMc74VyW+uBpHZK8zlPf3Ui2WbpSU2PrbKSlJ+VayJ65Yux51FXr2d9at4tcLG2nHJuNmaUIOhP09bXMgKzZiZeA5rQapOTUi/Z+6/AwCQBFxqVzJVTktA2uRt3sXzDZKHxMhjMFK8O8k8O7G42Mbv/fnXAOD0a6VvSNusNu6uVUBaz1QI9xQGlzQk+2ZLqn6ZIMgq7zMNemK4OnWGjblX5zwXlRqhkxhEZy/Oyk+/2L/rIabX7bcK/Vuebk88fgwAkIiUUhGXvJl9ft5oTfK5FzovAQDagftuIQW59d40QCksjO6cQ//yKeA6aCvHlfQ9dGivY0Nw0KD8PkL3ocT0zEE2KkynhQtP+n2aVyZl+nz5m0yLb33bS1jfff4EAOD9P8jSE1nN6hnm/H8tdyNcvkh4IZOqrqGrZTMtx5TmCxrJPNrtDvAffu1P8ZM/9cP4jV/7f18zuzo0L+w64KhzFUj8c+3nkuM/5XFZGDl137+Xd0v7xj7u4/EXfJ6ghefYUYEkU2g2zZLuY/MXyzafO8tul2fEgYBEeqc8qFhYaKZUzZcUylJaG4HnJSvnCaWrujiQ6JR32+23lvtSMeRfOsMSQDMVI3xQ9czl20ACIJZtPgXg95xzn5bNF4hov+zfD+DitY6PeHUURYE/+YMv4L4H7gAASf8V6bsRcM6hd/kEUva/jrTdQOR5gd/4tb/E299xP978lnt0c6TtNsR6vIAIwCcAPOuc+9Vg12cAfBjAv5PP//S9z+YAWMzNe6cAjUxfWmYuOxN3zFbLSwlvfog56N37eFu/8MEgdiD1QOX35AT/ftObfU3giXHW/423JKOiuGlVKj6jYib96ItNYmzMr5kNidEizUsv9QwMBWHhogesNkSiqfp9A9FVLknNUOWWK1WCcw6f/ZOHMbN7HG//G3fjS39RVhK7bvoSEUIpwNdvHa7mVKl62jZqfC8zEkZvgjQPi0vMCV2eZ67+3Hl21awEkssbH2Dvj0ylG8P3+q63efrffbvYPqrCredeXfWQuL9O1Fk6OP4yp3K4NOttQInwL/fcdQufZ8pzaE8+z9yTlwr8PTvn0L9yEpTVkI3vRn+urDGxhrHL3L8bqgms+nTlBPWZB7YlYq5y926tGuXnwwtnmS5nxb324hLT7tgxX3Wq1eDnksqYvZaUF2JoDJT0gJxH+hi4s/p7Gh4vfK5wD9+BXuOTv/nX2L9/Cu9939vCa14/bVPA7DbAgQfLTa0Ku8YWYxygWVvmd7637FOsPC25+i/LmLuy6KX+qzJ2c3mx7zjEWoAnBz7dxJWM3/9pmYSW2iKZGz9dlm6gZTbV0HYy/CWTms697lzZBh11Y+U2X/38n5e72ovcl0JsDwsSjJoPSZkbE8K1HhXQuwD8VwCeIiKVTf8V+AH/ERH9EwCnAPzE+rq4M3H21BU888RJ7N47if/71/4MAO4loh9DpO+6YXvLyNtXQVkNnXPPA5G2G4YXj53HN7/+Ag4emsYv/S//UTdPINJ2W2I9XkBfxbW1jD90nWcDaIA3vuneckutxpzzxITU6ZW6sQ895D1F1BJfa0rFnXpwO1Ys8GWwE3d1YtxzuQuSEIqI9XnqOWQDr4NEVvGWpGQYBJFkdUlBkYiVPzF87rEg8KTX54Ckbp8lmXbXcxpa0axe5b4mkqZgMOjg8OFx/E+/+AFUK8wB/6//+nefcc59Vg69Pvo6DCmpfWow0dsK9xImwZueZM5ozy4NUQ/SRIguUtWQe3bxM3lLELT23ndz8rhUGMCpw3yeg/t8OHyrzrRJRUqqZH5fU+wye/dyzYEH3/AAAODYS94GtCASVBdsr/jcXz9W7nviWW7nShuAK3+bShONQw/CSi3k7tmn105bMGmHzQficVRyv6LDdV6H22ry9zHJ9f7KeS8ZnrssUpPsG5d89G99yzvKNgPpe5KIR5c+n6Gc/8NSQfi75PPFw0cridmQy9cmWKlvLiWIEUngjjv34eO/9bOykQOqPvJf/5t559wVXCdte/0ujp18Gqe/+f/5bQMJ6NO6A0YlTG+/ekF05sflPXZVXyd5oc7zCY2LTW4/J4mkoE6HvcTjck7sBJW6JKIMaoVrUJiXBIJayDIYKvJs8i7PM3NXAg84sZ+1U+53Z9FLB1buRet0pJnUvN7tq+lVNenfqVNYD2IqiIiIiIgdirgAREREROxQbItcQCYB6k3g8C2tcpsGGaltRfPUaHZOAMjEkGMk0+Og40XVhojPfXE71OMdeUOjBl5lonpIUzUGe3F8eZnVQ+3FlbEKdXFbrIu6qtNl8fRSkBUzrXGbXAKSgjgd1MXYVJPc94mIrN2gQPhGOHsRYTgVvBBV1SMVKSMYqoCmJiTfvKinyAT5bTRXiRiK3/QAZwrdM+kzl05VuOe7p5mmrSZfM+97Y12vI/QXtZwL6K7qJSd52sfE1ffIwX1lmzNX2HB6SbKBLgdidFnasAyG4usnSaDiKK6lwbwOEAByw+UWMawXMfIUs6pvNCakujrLA+Lc1QPlPitjvytjtyVFwd/9bl+T4Wvf/gqf24rBUFRAYdlGg2EDcbXqVWxtydWkgY5lCYMwoKkMaPN3tvLrMA1DY6iz66Nvv9/D6VMv4czJoD5Gld/1JTGUDmSgtFp+7Op7RCO5rgCgP+DxpCUd//qJRwCM5IOS43tSSjKXou6uGGoFwM8VlaBcYyaqXCuqqKtXL8t5/MuvtUgGEqezHMxrVXn+iTy3MXk/99R9PYG6lMN9eUWPrg9RAoiIiIjYodgWEgAc4FxRVqECgDxXwxn/TiSjXrPpu6zp4zX7og1cFdWwWxGmpynuo5W2X/OWl5gLUiNLmgoXFRjrquKiqAXj0yCffF8CNnodbn/pEq/is/PBai7G66pwLpXAWFWTe2uLi+lAKg6FnFq23idEBDIGYdYHvT/lTBt1lrzGWl4C272HDXg1odv8gnfRXVpmzuj4i5x9syoGyVt2e+5cq1rVa4Oha2VBzv8i5+/tDrep1T1tNBd9ITQpJFCpH6SkUPfV8SZzQ3cc8UF23/kuB/wM5NmQcGNhmo/EbYAEIHBBrg0tom6lTkICHidj1TCdhhrSJ6R/XnoqBuIaKv2bllQhvaB+QVeiZKs1plmZ1iOURMqMnZK6o+Z3as58sanCJZqd1B9PiXcX4P9XuprqddWF2gU85XorgiVJgompadz1Ou9ccP997CjypS9+AQDw8COcEqOwYbCY5OMXkoSG7VxrX6gxWSShRt2/c8rp9zsduadhgy9/VylarxFkKZa0Je1lDT7lRvv3+fGZCm21YP2V2aCegTg3TM+wcwXJnHPlipdwE7MRuoEoAURERETsWGwLCYAIyLIU1aqv9rQsufWVixiX2rRh3vVCKu9kwt2ZgP1RF79U7QSStSlJfZvxCdap1USX3Zb84XptwNckbjT5PJ2Oz/0yN8fuYgtLLB0MtFZB5jnZjqzmZYR+sK+7MHyPC4vsKhrWRk7habJWUEIg520fe6b4vpvCPS52uXOHDnoO5eBB5j6qTb5+94rXXy7OcTDWAdHvv+l1t8lv7yaXiuRkhcewTjnEoPascG3dJZaYXMCdp+oeJ8n82hI4s9Tz95EYvr5KFW978M5y30tnOLjr61JbQROxFUGqBFtIoA/WC6kKNgIrktb4pFTrona57+qCSEYiog5yL3WqBKH0UVfPbsdzicglD35JX0l6N5TqYlgCMAHXqEOsX9rGVuasLzOdlSkhQjFS+qp69lUC0dYrAWRZin17dqETBHmdO8tBh+0203JygiWoubmFso1qElQqsYFYY4thaSZL1QXZv5c9eW4aCKf3kQR1k9XWoa6awzWVh5PwHTrICfsaNS9ljIlkvbjM96ESGQDML/CIvHyFbYkt0XrkReCenm7M1B0lgIiIiIgdirgAREREROxQbAsVkHMO/f4Ay8veyKEiVkMS7jgpSdgLiqprWbqBbOsFZdU0MK90z1JjcuLFMBJ3y07pgsWicj4Iol5VLeHUKO3F2lZNVSkshjZbbDhVoyYAXJljF7CqGDiLQIVxVQyrVcn+qIHMy4HImweFqNcCQ0A9A+6/y9co+KF3cVm8x777PADg/BL37fCB/WWbvdMcdZiI32otUF/cdoD7e8dRzkTZqog6o/BR0g4qUkte/jJ5ZCAqa3SuGN1C9VoumRcXxN2v2xexnFYaMhNRVUyOeTe5d72Nc+kfE1XQWTGymSDaeKPE6GvBOaZdkvH4sM5fuy8Dst9lumZByU3tlpavVHdn2/PPQMufUkuK3WsGyjCfv1M1h/QnYPfUDZQM06wr2TQrNU8TrQNAqxmBVU0lmTdLV8uwNCbWByKgViUszfv34Wtf/ysAwMKCGMplPPQGQa5+GU80bMMG4N02VZ1TFcP4eNO7eS+IyqxfEXdSHaeBk4kanb3mK8gALLTds4cz105N8LsUqpl++IfZpfepJzl6fW7BKyKdOlXs5/dsZjdH5V+ZD7IU97nfFy6srzRklAAiIiIidii2hQQAAAkB1YpfITVYSd0ue8I5D4LCyAMxhA0GGuzlDZV1MV7mOe/TvPCNwNXQCBejWTmVY+j2/DUqEnBBYjUbr3tXSSqrTYkkIYbKwbLnJmri9ukkSCwN2JG6cCGJcE3jkp10LHBJM5nPTLoWTE+08KEfeSf+zju9K11BfP6vffsJAMAb7+fMnZNNb3CeanDfMsmSeMtu76aomUIrkujHqvtu4nk+DbxyJdu5In2kD7iT55AHvqp94Ui7XebsuiLluVWyINqRoC8AmJngPu7fwwFtp2dZuuyHedTtRhbLCTNlalSVSDbS99R4CWVuUfJPSXHwmSAIT43HWqBd7+riubP+cpKPXv0Q7Qi3DngutSVZXpcC54aeuErW68OZWG0oQZR3JucOosS0ohmJgZpGHnN4/FphTIJGYwz1hpcM9VlrUJX+rgcG1lLaLDsSjEuZV/RTx2yj6iXtZkXHU0WuIdqHnn+v1Z1Yg77Ce1WJfvcMu+9OSd6hQ4dvK9tMTLLL9O5pDvLbL27XAHA1K6QNzzVzS2IEDozZwVS5LkQJICIiImKHYltIAIkhjDcSDGqe212W2ruF6MDLNA25XwWzjNevTLL0WRvqd+WLfNbErTAZqnmayLWYw7hwlt0bB4Gub1rqDTck46cN6oJC3D7byxJUoi5mQQh8W4LNjHSoUfeuklOaOkE4q1aT9w2qgasj+fZrwfR4Ax9631swnnkd5beOcQqF6gS7eu4aY9qGgUrTwqw2UwnSSj3LoTpKK7n2U5FSQldA1fWTHeYeQ29B75Inv+3K4JZM9KbLmj990FuxTwPowiC9lnCEuyc1Yyv3pxu6mq7iunm9ILDrpRvifIerZS0uiZ4/cEE+d45z+x86xK63YSCZ0sFadT/k57PU9jYAksDIsuKZZrANxl5D6lpU5XPuqtelp1WxrYmUklU1qnLk5uDfkyJ4Pqp7N8mIC3ZgI1tvKggigklShEJbty/ul1CuWKX+cCoTt1eh0XCaDoZ6uyr9FoLKnrcw446OnHqxq67kXkJWjUQ14239wDY5M8PcfKpe4QnPYQ89eFfZ5uUz7OI5uZtTgNx21NsAnheX4Fmp+2tLW06gGTAbw7tHCSAiIiJih2JbSAAOFjk6qATccV1y6ltNaCVcwMAvgmWQlurmQi5KV2QNEsqUSw24kq6EYetxU1PM7VPA7bYl/H5Z9MVJ3XMBTryFOqKnHhM9bj7w/Wi1hJUWDrje8Jx4RbiHJeEQ5+f5M4hqx8SEtzmsBYaARgb0A/3horA2B8XrZ0a6eOTAVNlmWlKop1K5ygbOSOp9pZXPqPT4CZOFKcdvh3cFrihlEI08kpCDV84sk2dRqwndw2Rjcg2VAPIgmCoT2s5Mso5Vq4eZwE5UMeu3ATin/VgtZRrTp9/rD/0GgCMSHFQVrq4I2NxSepKxn4oEoEFjvFE5YDmmPDjwUmtKkNSsVNEKaK/1nzU5Xhk05UIpTNtLcrMgVUtaSpTDVcOSoJaGdevjL5uNOt725gfhgjohKvU/8uizfNWeJGMjTxsdD1lF6N8NbAgiARZltS4ZF8G4urI8TFuVasKc/5pAUqVQlf4BYPdulgAGPebqFxYl4HJxtmwzKeNyfJzHwStBXv+JKT5etSD68tkgGZ3e43oRJYCIiIiIHYq4AERERETsUGwLFRAMwVVNmf8cAEjEzYEGAIkYZoOcGSp2qRvmxPguf7yIoucunpPj1G0sKMxdFoFnEW2iwSqQhbYXGdsLLLa1mtwmDQLJ9LhGY0zOJ9ktAzfQZlNrDEifrd83kOCVuTl2Uey0B3ItX/otq6wvU40jg0FWx6DvRdSmZCZ98HYWQ28/yPfdavg2BmLsVlVOUOheg7FKw+fKxPGBMVLc7uRnmG1SjaQa8GSHLKkioguNK7kY2wIjrqp81CAZGpE1MGl6goNoGhXNG+Tv4+6jRwAAn3v+UawVaZphcvIQen3vYkklrfhTg7QC7QjqVaa9qn7CADlT3g/fw9yi3LPx46LZkgy2ooLQ7JShCqhwbOjt9Fn9WWkGDgVO1XjyUx5eSEMa4Q8ThC7QcpzTnE96Wn98teJVimtBvV7F/ffdOpTPf1qM+u94C9eg+PajT8nnE2WbuXnuXK3O9GqOTZb7NJhLA98G4iaeB+q55UJKs2ogXyrPL6gaUJEgMX0JKoFfZl3VxDKfveXtHPQ1OXO4bHP21AkAwAtnWPWTBPm/dM7Le/wOqotuLwwinQsKzK8DUQKIiIiI2KGg1bL4fd87QXQJwDKA9cU1bw1m8P3p91Hn3O7rPUhoexLfv35uJLY1bYEbeux+P8fDesbujUhb4AYYu8A2WQAAgIgecc69Zav7cb24Ufp9o/QzxI3S5xulnyFulD7fKP0cxY3S76gCioiIiNihiAtARERExA7FdloAPr7VHVgjbpR+3yj9DHGj9PlG6WeIG6XPN0o/R3FD9Hvb2AAiIiIiIr6/2E4SQERERETE9xFxAYiIiIjYodgWCwAR/QgRPU9ELxLRx7a6P6uBiA4T0V8R0bNE9DQR/XeyfZqIPk9Ex+RzfeGPG4xI283DjUBbINJ3M3Gj0raEc25L/8Dx8i8BuA1ABcATAO7d6n6t0s/9AN4k38cAvADgXgD/O4CPyfaPAfjftrqvkbaRtpG+kbav5W87SABvBfCic+644yrafwDgA1vcpxVwzp1zzj0q3xcBPAvgILivn5RmnwTw41vTw1URabt5skyVgwAAIABJREFUuCFoC0T6biZuUNqW2A4LwEEAp4PfZ2TbtgUR3QLgjQC+BWCvc+4cwIMBwJ6t69kKRNpuHm442gKRvpuJG4i2JbbDArBa3bht65tKRC0AnwLw8865ha3uz/dApO3m4YaiLRDpu5m4wWhbYjssAGcAHA5+HwLwyhb15VVBRBn4If+ec+7TsvkCEe2X/fsBXNyq/q2CSNvNww1DWyDSdzNxA9K2xHZYAB4GcCcR3UpEFQAfAvCZLe7TChAnnf8EgGedc78a7PoMgA/L9w8D+E/f7769CiJtNw83BG2BSN/NxA1KW4+ttkKLlfzHwNbzlwD8663uzzX6+DfAIuiTAB6Xvx8DsAvAFwEck8/pre5rpG2kbaRvpO1r+YupICIiIiJ2KLaDCigiIiIiYgsQF4CIiIiIHYq4AERERETsUMQFICIiImKHIi4AERERETsUcQGIiIiI2KGIC0BERETEDkVcACIiIiJ2KOICEBEREbFDEReAiIiIiB2KuABERERE7FDEBSAiIiJihyIuABERERE7FHEBiIiIiNihiAtARERExA5FXAACENHSyF9BRP9+q/t1s4CIponoT4homYhOEtFPbXWfbgYQUZWIPiE0XSSix4joR7e6XzcLiOijRPQIEfWI6Le3uj8biXSrO7Cd4Jxr6XciagK4AOCPt65HNx1+HUAfwF4ADwL4z0T0hHPu6a3t1g2PFMBpAO8BcApckeqPiOh+59yJrezYTYJXAPwKgPcBqG9xXzYUsSLYNUBEHwbwiwBud5FI64YsqFcBvN4594Js+10AZ51zH9vSzt2EIKInAfySc+5TW92XmwVE9CsADjnn/tFW92WjEFVA18aHAfxOnPw3DHcBKHTyFzwB4L4t6s9NCyLaC6Z3lKwiXhVxAVgFRHQELE5/cqv7chOhBWB+ZNs8gLEt6MtNCyLKAPwegE86557b6v5EbG/EBWB1/AyArzrnXt7qjtxEWAIwPrJtHMDiFvTlpgQRGQC/C7azfHSLuxNxAyAuAKvjZxC5/43GCwBSIroz2PYGRDXFhoCICMAnwAb2v+ecG2xxlyJuAMQFYARE9E4ABxG9fzYUzrllAJ8G8MtE1CSidwH4AJhjjVg/fgPAPQDe75zrbHVnbiYQUUpENQAJgISIakR0U3hQxgVgJT4M4NPOuaia2Hj8M7Ab3UUAvw/g56IL6PpBREcB/CzYtfZ8EMfyD7e4azcLfgFAB8DHAPy0fP+FLe3RBiG6gUZERETsUEQJICIiImKHYlMWACL6ESJ6noheJKIY5LPBiPTdPETabh4ibbcfNlwFREQJ2OPjvQDOAHgYwD9wzj2zoRfaoYj03TxE2m4eIm23JzZDAngrgBedc8edc30AfwD29ojYGET6bh4ibTcPkbbbEJvhynQQnJhKcQbA20YbEdFHAHwEAMhU3lxr7uPtIIDClk7b876hkzjZTXDOwTrAOQdnHZxzABHYPZq3wwFO2hARDBGIwO3g5EqjEhGV1+dPv2aq9CS7+NrOlb10vHHojHp8kiQwxnA/jAHgYG1RnsM5K33jay9ePXHZObf7tdA3pC2AN1OScm+4Q1Chj4b+o/B2V4He/0gzOZ4IIJPwcyKCSVbyFkpDf6gcJ+dVOjkmG393jp+M8/0e7aQ+B/+p1/P0JgqPc0N96SwuvGbayn2U9M2yypt3z8yU/V6tfyvH1Oi+axJ99Mqvchp61avwlVbS/5rnHXpW5P8ffQWDsw9dy1lYZ+GsxcWLr1wG8HO4TtrWUrz54FQi5wuuIN/dyssO9VMpcq0+jmLVvY4H6Oi19Cvpr9Ven+FGa0b4rvG9+ef20qVCx+6asBkLwGq3u4K2zrmPA/g4ADTGj7h73vo/wpgEhhKePIgAZ2FtHw4OxlA5uRAl5UN2DnCWMBg45AOLQd+i3y9QFA7GmHIiygeWF4bCwVkgTRLUKxmMAZ/bWBTOwrpi6EXmCTqFIQMiIxMHwcECsAAcTAI4a1EUBYqigEkSpGkCay1sUaCwNhjAKYgSjI2No1qtIstS1GpVOFi020vIiwGKIkde9JClhDQF0pTwpU//05Ovlb4hbSnNXNIcB5wFrIMrcsDy60NkADJAkgIULkYUDDQqFyL9IwISY2BkEU2yDEmaoNpqIq1kqFSrGBvjDA9WFkQyBolJyt6nxsAYoJYYZAmQGsLAAoUDcufQyx0KZ5HnA9iCaZsPCnnoplw8nXUoLNO9sAWsDRYKIiSGF9o0TeUY7guRZyoe/dyfv2bajtL34MFD7r/92X+u2wNChYuYDU4VLnMeJb1d2I2RTxf+5ufEiyXBOcA6330ns5aDKSd+C5JzEAiGF2Ck5ZiGPHvnOQ8YGGGQCMYQDAFkdMJzMhc5kL4LRAAser0O+v0O+v0ufvX/+J9ProW2d+xJ3b/74ASsc7DWwSoT4MDPGXLPw6Nf7pj7ZmBBJAuBjMVhVs/JU1F6jU72TAcXPFOnDUk5DRnjwRIrZBleR6992yupEy5gRPIOEUySwJgEZFJQkuC//PUrJ1ec6zqwGQvAGQCHg9+HwOlUrwmCgzGWJxXjYGSAOWdhbQ64AnlecFsySJMU+hJYWQDyAVDkPEAMEcjIxGX5oSTOwjleFMgQ0oRQScg/IOsQsJhyLX6sxvEAJzjYnAcikYNJebjYooCzFtYWgHMwlKKSJTxoEyDP/QC21sHaHEXeRW4cnEvhbAEHh36vh6LIeRFyOSwRLAF2eBRdH30d909WSsDakp1y5EBkeQCTgyNe0ngeIuHQRzlLpnE59kkGZZIgSROkWYosS1GtVoWsTFe/uPAFUmOQGEI1I1QMkBmACqBwDlQ4DGTRNCZBOak6/TQyGRl+FgVPTlTwghC+TFZeQmP4+sYQkkTOSStkyuseu0oDp5ctV8uhFuHjwCjX7x+vjMdgEfAtKTg3j32eg2TaoYCj1+MJCBcKckYYWl1AjCwEpmzrZHHw4qcpx4FKy6UsQSOfuiA4pXUitF4bbZ0DBoWFsypd23IHwYFKmnvxQCdenc0Lcjyo5c60nZOGLrgWQKUUqQ39ffn7JgTSZvCfI/+mOEfSbsVdrX6zoxJE2VW5JuktOVhdwDcgFm0zFoCHAdxJRLcCOAvgQwC+Z+GPxFgkiUViEuYwwA+8oALODpAPurDOITEGhAp48KYoCoK1QJETLM+WSCgBEmKOVyYgI6xDlhASMkgSg2qSwDkrC83Ig9FVF8HK7hyKfABrLYwBKGFJwNoerLOw1vKrZYAs5YnSWoDIoSgsisLCFjmcBfK8AkOALXLk/QGccxjkPVhXALAgKnjyF95qzfR1DjYXaYXZRHmBADgLR0Ymf355HfGLBWJuj0mhr4FyYK7koB0RyBiYNEWasuSTVTJUq1XhTlnyKs8jUlyWGCREqFUIlQSoGAC5xaDg1SUpADiCTQCQ5T9ZBJgbZSnAWZYOi4LPa63lxR+AhUOeF8JgsFoqMQZpksFPkOugbTlUwslaJ+ryAWDlZL7aOYDyrdfvQ/8HCyCUiwfTX/cFizWBZPJRiU765fSMOvknJYcPiFRFBn6BFGmp/Ad59nxH/tNLN0QOhvj5JKZcAK6bttYx81SqbYUJ83/wb4fMttyWSk7e2hGuXqWkcK0EH6MbnOf3YSiYgPUysl8XfQdmnlQwAFjaCGcUCvePrgHk27ihbSqx+CFhAZHdEpCn7Zqx4QuAcy4noo8C+Etw6PRvfs9oT2eBQRsuT2CFa2CxxyJzfTjKkaTMqSaGkGUOzhbIc4vUiACaUbnaGuLjUc57rPogONRrVaRJAkMGWUKsXsgt8jyHEy6Y1QTk+1GODVf+X61mGJ9sIEkJWc2ULLExCaqVCur1OqwtUBQWnW4H/f4A/X4f8wsddHs5Br0eiqIPZw1snoJkWSNRdSVJBmMcEqO83lrpqy++TvQAyiXFlNcjY/gvyfjFTxJW8RiWqMJ7BwhJmsGYBGmaotpoIKukqNZqSLMUaZLxREwEcjQ8HzIheeoxrP6pJg6VhMV5IwtL3xIKB5gsY3HfWtjcihhPpSTHajZREeUDFNYiSZnTB7yqwCCBSVJUsgrq9VapSgjfxbWMXZ4cqFQLqGZgmLMM1AYIJhslxtB3GtkezAou+BB1U6m+AJXnZQGPpD8U9MFPm1ROozLJEISTp1JlAkC+Uzkh+fvT2c/5G9bul8I0SxBrp62DcXl5aucsT+qhrceZgCZy73rf4DFV3o0LGDpCSYGy9SgXDvD8Qape889kZHoP9oWtaKiFdnpo/g8WZX2SwwcRq2pNCmeqcJUJuMoYkI0BlRZeg4D6qtiUfBbOuc8C+Ox1HAHkfYAMnEkAlwKGJywjHHGSQCZGQpYSisIBRcEkMxC9JT/ExFhWN1hRCVjA2RwEoFEvUEl5Ukqp4Mk/KTAwOXfFyCRsCGlKpd6YB59DkvBEWW8k2LW7hko1RaNV4QmHeNGoVCqo1WqlTWBpKUOv10W3myJNC7Q7DgtzOfo9i8IZXgAdT1CGAJMYpCkJJyVqmnXRVydhkpdVuEAdYEbtG8y1wRheAAyQyCLgnC4ArCZIkgwmSZGkKdKsijTLkKQVpg+x+ovKCUX5pHKdLK9vABgCEuLFzjogMUCS8OIBJHDGwRYWFoXwqShVSkVRwMoizSoCQpqyeqnkiB3gLMGYBFmWoVar+YlihBu7ftoqhVfM06uoCYIFIWD1iEIDdzDxD/cLQ4tBcF438lvblZN/MD87KKep05M8Fz23CRYhmfQDvVRwhHC55ITbdWVz5w8evYfroi3BISELeYVlERDdv6WhyZ7PP7IAhPugSk1RdDm1EQRqrGCtGxoWTuWMcHG+liwXHDZ0hK6M/uyunPwJ+kTcihMQHCUAeAGw6RhcNgVXnYKrTL5WUl4T2yKhEcEhcQVUR+3g+OUnwFDuJ0LDk0PFsKHQkYVJeNKqVDJkWYI0M8iyFIkxcKITttahGPQAZ1GtpkgTFn+NMygKIB8AeS5cf5YgSQxMkqBSScVbhwBYgIA05d+1RgXTu8aQVTM0WxUkKU+gSZoiS1NUKpVSAmi3M/R6PXR7PbRaGTrdHi7XO2gv5+h2CizOifFYJmCeEFEas4ZHxRroK7r3UjAmXQCYu/CGpQQm8xJAYmRyNoAOYesMLIgn/CRDmqbIMjZmp0nKtCMj+tnAFuOGXysd9obCP8eqNYtghlTJypSTv9H+847yXACQJBZpZmASKl8r59g+ZAzbj7I08yPPfe8XeX3wq8zqjzHwylqlK15y8JO/k4nEjbTzagzn1SA66ytImHYa7Y0NOqDflYPnhdWVk6PKDqyC8VIB94TVT57RWCucAwr2XSj/vMEbw4ZvnezLBYBv1umJgID7uNaTwNCsHS7KTm8+6NtrvrvAa0clN7+MBouEC34RmPaUwCV1IKnBZi0U1Rm4yiRcZQquMvFae3BNbIsFwBChSqyKKXIL5Ilw4YBJ2CicZoTEGaSwqBiLAgWydIBGo4p6o4IDB2cwNT2O6ekxtMbqSFMDmwN5wXrgpcU5DPo95INeqYe3OWALB2sBIEFWqaDZaqJSzVCtVtBs1ZFlKSrVjI2HhpDKApGkhKxikGYGtVrm9ctpytZ6NXgCyPOilAY6nQEGucXJl2cxN9vGpYsLeOrx01ha6iHvA6WO2xovnq60JL12EIHSzOswXQF9ARLD3HqlUmMPrCSFyarle5+QQ2ocsgSlaiYvCIUlpJWGcP4VNJpNXnRTXiyUVt5ri+eC0E3Tc/JOJn5bSgIEhyLPkVtCbhO2MZgElSyDIUJKhEK8rlinLyo/uUeTlJoHscM49PuF3GuGaiVjz66Evc7Wg5IrDWbaYbWPbtNP4UhtuCi4Uv/r+XJ/AT+JeTuI0+8wcDqZODs0Efrjqeyfk/XfghdcNQor/+nknN6Wwb3iSV4XgfB+9bwjbtqj4skaYB1huccMh9rTnHoChVN8Ob8rZx0wGLJdmQ2ltZLUKfWCfrqRb2pRCjGypo5sXUX1E2zRpTP8Vcoq4pnnYGBNBpfUYBsHYCvjcNVJFGNH4dImbNaES5vXJt5rxLZYAAjsBqjjNgG7k/FDszBwMI5gACRkUUkBwKBRzdAaq2FsrIH9+8axa2YSMzMTaI3VkSQGReFQ5A6DQY7FRaDf66LXa6PIc3YfHIjRyBKMyVCpVNAaH0O1WkGlWkGrVUeWZahWU57gE4MkEY+TcnEiVKvC+Ro2grJaRe8MYmTmv0HfobAORc+hVa8iNYTT4+dBNkfbWRQFWBJQo9oKFu56icvcvBrunCOeKABRsyXixZOySidL5TCHSgpUUkKjyotemhh0exb93IFkAmWOX9Q5MpmogVgnf6KQ8+EP3c7H8kuZGDZAmtJryQVdTUrXU36PZb8TA69RdR2BElVzsPQHZ0WC0OfB6qnEEJJV4hXWg6G1emTy99v1w3mtAFaqHkaEoJHzUDlxhPYH5TKdC7hMF7ZXDn74kYTX0feQtYXDcS0ET7/w2qVdoFzA1i9dOQCFFdOvI1hn5JyeUvqalVvI919/olz8cpZYrC2PH9HIr7rNG4V1wqah84dL+Wr3oFLR8DYM76GUz2sqzPWbFDatwWV1FNVdcJVxuMokimwSLqnzn6ldm3ivEdtiATCGUK/VkBuLARVsSEwTwFhYtwwWSQcACGlGGB9PMTZex8GDU5iYHMPEZAuHDu1Dq9VEs9VAmvJt5XkuapgcvV4VeZFjMOiLuybBWZmoYJCllf+fuzcJmW1d87x+b7faiPi6vfdpb97Ua1ZWWlWUiSKIIDpxpJQDdVqiUIg6sBCsgtKJUFLoSMVBlgUqUoJOUsUORXDuVFSQyswyb5On2c3XRMRq3sbB875rrdjn3Lr37u+Ih1yHOBH7i4gVK95Y62n+z//5P1jnaFt5v3WWuq6y0ddLhOkzC0iuOGGcOGdzn4K+iPzLqRTDaiBdI2for//glvHVFS9uWh7fvubrr+75yR++5Xz2qGRI0Qg9NAZWHvmvvimlsHW9OgDiGg3GRFKKZIyEzdainRXjqAJXh46bQ8fnH93QNhVtU/PV63vuH4+8fRgYJk+InmkO+GBoa3EKQh2s8rW40kmXCzNJI5y1BmMTxkjm0HYNaIs6TdyfBBL0KWKUwhqorCGGIJlcCATvpUAmMu3yW+k1/BO2lSfG3ANCYpoGHh+haVrapv1OHEDMlNVihbcG5MJoS7qQ71Z4YoH31WV0ukWQ10yg/Dmy8GFKQTgptgXF8u+00Dw3uEVhO5SoPm34/6hML2W5ByW1tlQCCRbrV5zJt2UAz0leJQBISJajSeaKqOQc0dou51U5nszBI6ochGX4SsUAKaDGNxDOqHBEJY9CGGKo9Tx93/RvDmV5XXrvFdsa+LLOS4Qv27dm8UoL40ppot0RTUVs7ki2JdlGjL7tCP3HYDsx+PoKlIVkSOF7yAL6kE0phbVGGDFB45zDWoNSAZ8MJd21RlFXmt2u4vq65eNPrrm6Egfw8uWepmlo2hbQpJTwXmWaqMK6lJ2BWy/KwldWGucqrHXUdYMxwkRylVsiS5CLeZogBL9gnOXY14i3GJSSFkuarYGYo1yFou8qmhrmaeLFXUf0E2++vicGcsOaOI6IHPczFjcb5QJFlggxAYGlbaZ4iEyhVFpRVZaua7i9ObDrGnZ9CyphK80UIkpPDJP0LKQYMNpgLRijcbZ0cC4fTLl8Eiw0Tp17EJQC5xzaVsxBGvViAhtjxu7FEafcLZ3J4fnqW0wUW0qm/AaXRijGiJ9nvHPScxH9h69t3tLPMXbvR/7Lvy5w6/W5JSr/1v2XHRSHsXEgZV8LFLI+XouP+Ql5xcUxFeP/DYj8/aLEz8kYyj7W92Rg5bn1lfIzK01SViAPs0PpCoyjNC8u3ylJBpkwi3GViN+jUg6kZouKARUniDMavxrnbLzXn2Czdlw+fO8VLFWRtO5nhc62b8jAlFIk5YT0ogzB7om2IdS3Au+YRoq8tiG6G7ANKIfSNZn1svnuH759TxwAGG2IOhGNxlqHs7koSZ1hCk/lDFfXO169uuHmdseLl9f0fUPf11gHSkuTlaSLKdMCJQuIOZJeDDUsEbvSGmOlA3kJgjbGozQzARuDouQE3DBoFgMr32phd0ijUOGph3zRZj6801xd7xmnQNu8YZ4iKWr8LPINMX0HhUpd0sxikOXPJf1MZHQ5OwbJEqTdxGhNXdd0fcfhqicQaFpHTInHpzPH88T5PJFSZN872qbBGIurW2YfmOfAVDp4lV7S9cUBmJjhNWjblqpuiRja+owxMcNswt6xWoMx+AxpaSUXkjFrZ/AljKLya5J44A0UEnM2+PxrqEAh8njL7tm+4vLxEk5eeoDLty2R/bfuY2uM0ma/G+NPEnhtdY9pLV5uPlsyAC7gnmV3W/xn8zbF+zvhss6fclj+jAwgIaSDpCxRN9DcoZtblG3Q2QGgVudTOncU2z4GUERUiuAamJ6Ix5o4P4E/QziKI1AJnWmml7/Bewe0eW75usvXfO833bxGsjBNUi5TOi3RdiRTkXRFaG5ItiU0txniqUmmA12DqlDJopSV76bKuf7HxgEoqqrKGL+iaRpcZTEmgTZoE+g66HrLy5c3/Naf+iFXVx0/+OEdxiqBEIwi4TM0IM1h8zwtDgAkyrTWLoa/3IwxWGNR2gj9UMklHVOEpFGle5ZtVCe0wtLxqFbPsWwFK1VIoTsmpNkrzBjtUNrQdpYf/PAj2rbhp3/4OtctNCOaKUV8DDwzAUAt3ZhJqLHlv2wJQjnWJNIV6IhOEaXBOstu33P34ppXL6746OMD0zRxc9Px9u0D7+6f+OLLr5kmz+cf77k6XKGNw7qWx+PAw8OJ1++e5GLVdikmC2NLCWvLBJw13N3csNtf0TRPPDxOTD4yF8xXSTeEVpCiIyW7GMhFf6hkMZtg1BiL1pJNLXpQgJ8nop8Zzs+7iJYAoRj/xYIs/9iQYS5x+Mvo/zKDuGCgFCewfZ4thJRfE8XoluBm69xS3v/7fqbsYzVcKjdNrl8jLZZdZaey3c/7sE/+5qXh/DkYUEr4qAm6Jag91fWPMFc/QDd7jHVLFvQ+yUcXqwzLeaG1gqcH0jgQXv8twvEr4vCWePopKj1h0kRK4SJcf3/dt8uyhHlrMrdsavMo5Rcl7UBVRLcn2pbgWkL7Qgq6tie0Av1Eu5PXoklRWECWCoNBK4vR1ZIxq++gfPW9cABaabq2xTtNmBV17TI+DLbWVBXcvWzY7SpubndcX+9oO5cjVUVMCpUyDpvE0IomzGq414LkJlpUKmv8CBdeXxjx3HyUkkT5+a+Ff75QKLX6VuMvrwUQKiRJOpBjpovqfLPWZAqrJSVpSJvGxNOTaAiFKDzoD91WeC1fjCqu6HGu3i6OTGnh4OcGrRgi5/PIw+ORvqs57xuaCmoLV7sKHVsqk7B4/Bz49NUN+/0hF4hrrNFEH3h4OhOTJqQN1JQjGK2kHuCsoqkrurZhHD1tXaF1gDkRsuEKua1zC8ktDmDjCKJKi1FVSmd8OC0GKcW4+Ilv9Fl/4LZJEstfLh/l/xXZhsV45+eWxCzxjfet3wZI2z7Y9zLUpCmk2PJ7FgZNilsHkOGKTHwvuLbYvrgeTJK+D/mc5VUblyT7+rYw/4Ik9MGbsM6GBEOAZgq42eOsp6ksSguLrzjT4gy2rlVnWCgmhbIGqFC7W7RzqGEHRpOmJ8LwmhjOkAI6zavjUiwe/DI/SqvTVt90dAkN2ogxNxWpPpBsQ2xuibYmmppYZ2jHduB2YGrQFYLxi1SHVhpjLNZI02VTZbtHep5zzdv3wgEYY7m7uSNGKX6WKNxViv21Ybev+BO/9RHXNx1ta7i+rURPJw7SEKLygicp7q7aO7mnQK2Gu9ykqcws7epmgXLUaihSkPeltBiXom2y1ba5UBy92FSOQg25CoBSlhhBl3QOzX7fc3waiTEwDCceH0a+/OKEqyqsk+L0h25Kaeqqzv0QEe9jNhigTObWK8lQrFE0Rprt6soQvOftuwf+8KdfogzUjeWzly1dq2le7ZivKqZpx/nTa1JSfPTxZ/S7fTb2lp99+Zbaau4fTgwzzLPKjk+azJROOKepK03bGG6uDtzcXKMx3B4eOA0znEYmH5h8yLIOXPD4VTaoFyyLpeM5Zw4Jgo7LGsTgIUXJfZ5RYF8+L6X3HECJ/EvWuPwasDFW3wDdtxE0K4JS3pGSXvYjHa5iBKIUiwTL1kboylmbyQfRggo5m02pSHxsnMiyelk3aIGeMt20OJ2lgXCLg6jNe8jG8PL2nO08a95GxduY6J8mGnem1ZYXewkS5+SXInyh48YUl7NBKwUxB111hWpaTHeFSwrmiemrPyQc3xG//j9Ip6/QacDM7yitZyp/R4XO9QUlAnNJ5FrKL5a2qUiCpBVR1SS7J7U3xJtfJ9V74u4TorZEZVGmQxuH0g7hPipSXIv5CrFVzjkqZ2lqx35XM/so8Kp/BjSQt++FA0CxtO6LwZabNYb9ruJw1bDb1XSdo6ol6i4pb4zSwRdVWDDA8jtsDfT7Rn/rEEok/G1R/CLxvDnpf17xRZxH2vz7m19U6hogiqYabRLWWarK0fcNu11H8FDVI9ZJR7B1Hw5TKCXsmRQTMWk0RTFzrU+IwdB0bcWL2x3Oafquws+e2Qf8NDGez5yejvhrA5XDqhykWI1qJGWtK4nkUzZQXWPZdRV9VxOOM/44orUh6oDXGpNAU+Gspa4dTV3R1BV1XUlBeA7M3jPNnjlrKS0X5IZiWjo4t+u8+QEX9dMStal8jm0wm2dt31q4zf/b9GaxhVEKFFUer3F0jrJTYQtt9pf/HzNdOCGSywXarF0t6qzKSG+E1oQUULkrOxaHkrYAFIvRFr6QnCfy50TCrMdU8oe8n8vzfUMtXbzAMxaV3AcwKx7nyDvvmR6OtPqeKUYb8UU1AAAgAElEQVSaxlA5i/dhkX8PIRFCYJzGzTEVCBGcrdDG0roGpy0GUN1eYML4a6T+gBof4cGQwgjllgMJyYyyAOHFqVN+09y1a5xE+/0LqA+k9prUvSK5lmj73NlrlmhfKUPJ3Yx1iP6SwRknDsAKeURrxRwik4+Ms9w/d/teOICCkYtNz6JPKmKsYrevOBwa+l1F17miXLwwHSRrzZE+q1xzSYMLtCMic6sjIEe9+qIovDmmnNZto/uSBcjzP98JlNdvv2F5uVIWZcR6iodPOJeoa0e/6+h3LdMcqGuNsXKzz6AqKqWorF0MvkYRgnChQ1ZKdUZROcOur3n14pqmsVwdOh4ennh4eGKeJ8Zh4HR8wk81NBqTUwdtFE47lDLUTlPZ7ISVpqstu76hb2vOg8dP01Iwn43F5tPeGUtT1dR1TdPUNPUoDKxhYsoOwMeED3E1+hmCMzqtoPT298v3KddvVDYSiohWMUf+hc/+/O2SCZTPgQVG2b5wdUUXcM+mtlSi8jUj2BRyCzEhBhEgTEFYa9bQto2saKmHoPD5uyudlvbZRUE5pcz+yWtI5tuluDKDkOdk7cr3Wrt9L0GR5atfZC4fukXgNCkex8j9ODM9PDHgmENg11f4uiL4NfoPITJNE0/HY/4tNnUVBU1dY60j9geauqGyDtftMU1PMgmml8Tja8I0wvQIPJL8SGmUE9BdkwrcXHrxQLKzpMBYkulI1Z60+xSaK1JzIPWvSLoi2o5Ca9Va6o5K64WjIDUr6apv6lYg5jQvv8PsE6OPjD4yzH9MHABAylruMSSazuAqxdW147PPr7i57bi5qWlawf2UglnBPAsvPAbEmKkomJktqVOVKZ16EXZbwILFIOdagDYsSiEpY56X1+iib1O2X0zDKhjr+oFaWcgKjKDQJlHXmr5vefnqGu8D1hmeHs+UyON9dvKvsmml6OpqiTZnY6Qr2UemeUYruNq37PqGjz664c/8vT+kbSturnf87Gdf8pOffsm7N+9I88hwfCKMO3R01FYRrCLlZjKlNa0TZU+lFdpqwqEGEq/v9szjyI/nEzFm+MK2aFPRuorDruP2esdh19F3NcdThXMGNJzOZ86TJyYRhysZWZH8FvmJImVBXu9VAUaVwm+OcClyExQA6cOzq7ItMXC6/NuqBfQePpyKVAL52LJhJi3n6AoBrdi9yhYn+MAwZgemoWkaur7n+uaa6CPJR+Z5Fp2kJdvbGMQEUZeu4i0XTLKpbU0FpTLLRmUZYhaZD1ntzbbJxpbPe0YByyfDF0PNF6fEF6cJF7/CvX2i6zpOx0eapqauG7Qxy69+Pp958+YN3s9ST5um5RdoGpEsubt9Qd/t6Lqem5sX1HVDdfsDyQgevoJpJB2/JD1BOj+Qo8XcYi5NWilFklFI16EGlRu4XEvqbqC5Jt79COoduI7UXOdrXuyQpgSmAsmJBHyicg5rKlzl2PUdSiXGweOD1AOHwS8OYAp/TBxAStKwE7Kqo6t6druaq0PL1VXDfl8JzVPFRbIgJtDaZhmHks5nihRZl0ebxUAszVxl2ziAki3kXCTTIfMlkdZjhLSoXOZP/SXMR8ZYt5HhcgFll6OlGNx2DbtdwzBMdF0lGkYhM3M+cFOQB7FkwxKL4ZNGKmsUh13H9VXHy5sDH7+6pq4sh33DeOo5P3XE4UxbWZxSolGkVNbsUUSlCLHoOAVAoDiroXKarnHsOkdba5wKBICYMFRUBtrG0bU1fd/inMkZmRTwQwySAXih9vqU1T+9z1m9yj0j0ri3ZHdxxW/LKpuiSQTLLAFFGWDy/O3nOYFvmv8VeUqb161QY4ZWNob/QlQOKYbP8yzzF5ShWjKnhomJOUx475m9Z/Yz3gt8ltLGNZaTe+WAbkCe9dwWrZ9CgrjMUtbH5VEpGucPSM8rVIakeYo15+AZvWc8nTGT5zyMzPNI01TcXN9Q1zXaWKytGIaR83lgGM4M5zOn82mB06rcXzSeR7qup+97QvT0/Y59f6B2tVCmm440NSTjCFFJLSf7S5U0yWYKqq0zpdNJj4J2JNdC/4JU9cTqWuicqoJY6mwrCcQYtehgpZBy/pXPmVyDjCkyBnFmPsA4Fx20nNU9c/teOABS1n7xnhA8VbVnl7H/q+uG/aHG2IjS4gCssVkH3qJVIiCRhkArAg0ocvXc2pxWFfhmXbSCYwoiVIZg6DVNX6LHciKnRd+/pG2wXqC/TGPGZbdmzgo0GGfoupp+13IeJrq+Zp5m5ikyP6dQWbKb/P1TXGEtk0/IQ99ye7Xnxa04AGc1fecYTkdOTz3j8UhbOZxWGMQJGKVIWhxbiCHP3AioFNEqYQ3UzhAb2HcVXW1wOkCQHgOrAs5AVzv6tqbvGpzNxWGViCkQYzZisyeiCRFCCMyTTIkjQV0HEd6D7OSF85nKIKANBIjKF1xuQDNK1v7Z23sGHVZHsEIhsDWWF46iFC+jnN8oyUALc6m8pphoaWbzWS/LZQfQUDc1MQT8xNL1Ps+zrFtMkEw56UmLiV/Si+WzLuDQBev4Zod7OZ6tCyiOdy0CP8MBIA7gFBNj8MzHAdSIUvBw/466rogR+r7HuYqmhfMwcjqfOT4+8vT0yP39/bL6xhiM0pxPJ/quo9/1WKeY5zPWVSgrIpKu7qBqSMbmrBOSzgKVaDANylQS3ZsKbE2srkimgaoj9S9JtiG4A0kbCfgC4lyskCDMQjkv9aiIChs3mxIxeVKKTH7OMKhi8pn0kjPZ527fCweQUiISUUaKHXev9nz62S0vXu25vu1pOkfIHZti1B0pKeoqCqNEzZnOKSeu0BqVFFeUw2ib1TrfdwDFcMN6cRbcZy1wvZ9Gx5j7ivK4qVJDuDQC289aTcA4TgQfRMV0MzTDWkPdNtRdg6vOFCVANvIAH7IppXB1kyETTUyDdCQDTVez62p+9Ouf8MlHN7y423F3qDEanIWX1z02vuSqabAayRb6jto6bB5Yo5Qol4LwGAwJFQNhHlERnE68vGlI4RbN37VkeV1b0zYVL+4cXQM6zXh/YhoTKU50raWuNDGMYsSSFmE4QCmTIZPIPHtCjEzTjNCCIfiQqZ4KZyUIOBy6rBKrRMHUGiqrcd+BFMTymyd1YbDfK7W+H3qsheESmasN9JNK0TZH/yUQQTHOkeN5olOGqtYYZYghMpyPEvWezpzPZymg+5CdSR7vWGoMKb13XBvNn7RSalegZ4ntN8DZUmJdn8kZTIxxYeF96BaS4hg0g09MXogAJTA7x0BdOQ77PSkl2rajbtocIMyM08QwTgzTtIxzrXVNbRyMMISRMAb+9vz71E3N24/v6fodbRq5DY+o8yNqCqhqD6ZGtVck16GqHt3foVwH9U4MvLYk3UiTl3Goqpf+JWWXwMsag9VaBA0z7g/SmR6iDEIKIRJ9wCaYCfjBE1PiHAIhSX6Q1mF2y8Cm52zfDweQ77URzLvb1RyuO/ZXHVVbYZ0GLxeB1rlqrhPG2NUop83sz2zQVVmpTD3c6tIvJ7zaPJZ3b7ofMxsiR+AXF3da/60vHMi3nfBrCj97zzzN0udgNIY8AEcr6UbWhb1Rhlnk1vYP3pQIvRXGk5kRxbmUtfFrbm8k+r8+NFRWSQSvEl1liftOmlAQ9kxT5LRTWC5+o2QFRYYnN5T5Qk2EtjHc3fTAy9yVHbFW6KZ9a3BWZj8HP+GNJiVPVRmclYJbjJ4YZfynVib/7jFrwkeCDwQV8MGTYsTPIsOtlciEV5WjC7WsL0qkro3oTTn3HVwCaYVsYBv1rkHAChG9F26zPWMK158V/in/Jp8PSaL5yQequMbfIQTGcWQaR9FK8jPe5xoA+TxaBAbLOb5G/yUzWI5oyWBWKE1QkLVe8O3numQLJUN7RgJAAin+x5TPGwm4Uh42H0JgniVDrKo1SxYKeGnQy9e/NtSuprU1VklfjJ8C92/vRQ9Kabr+iZ32VG7EzWdMCDjboFwP3Z04g3qHOnyMqnYS7RfUAENKWoq6eWStVgIxGqOpnMlqwWa5Fn0QWxRjwgcRiVQxkpRCh0SYxYH6mMGhXHIQ+JNfBn/+hdv3xAEklIWmcTSN4+7FgbuXB25uRWZYG4XJ56REzRFrEW3/PDTd2i1UsxawNmD/exBNPoHfZ4Fs2A7lTmspDJfBKCUaK4Zmmma0loarqq7kR7frxbSN4k6nE+fTmaZtqJzFVY6qqvPAZ9HmT6g8Ck+hlF3E7T5kUwpMnrOsdZZeVlFE9bqOu5s9n3x0wyevrulaQ2NF3dNqhd05+qbnZpeNcAg01mONFFe1UmAMdV0vUQ4InDf7KRswRW0Tdmfomh2RSIqB2WdRvjjkrtwBrTXzXBMD3Fy1jPOeTz66prk/8XSauX+c0UZhTSWsCxOXGcpSG5hlQLz3xJBAK7lPYI2omxqjcEbR1Jaubmib6oPX9mL7hlzHFk8v2WSJ9tO3vL5E5vKOhbKZ30+CKQjVcZgi5zGizYw1A4+PR8I8kRrHNE74eZJCf4wioawAtc6yBmkuXFD/8nhLcMgnj+QmGVZNyzMUQAqKG8g5Qc7EUxSo7zkeICWYQsSHgA9FA0r4MsKCKpIgpb6TmOeJ+/t7hmFgnnzOeyTbrquarulo8vUGidmfSASe3r3l9HDP2SlUq+krQ1/dUd0d0M0Bc/gEbTuU7bHtHdo1RG3wKUgNLKRFdro0OCojLDljxPCjFIEkPRkh4XP071MkGmHOBZ1ARUJSqLBaj/QeDFBc8HO374UDIAmbwTlL21a0bUXTVlS1XSmaWoqzKmPEwn3OJ6YSwxPz/Fkx0KvBXy6+QuGUv3IZweS0vERGsJ7wpTCX1n6F4FPGWQOn45BloSuRmjBCF8tXVo6E5LjmeWYYxxztgwlGfuICvObPkRnFwnTRz+z5XntHxfhrlTBG0WaOft9VdK2jqWROr1FahrY7jbMaZ8WohzALd18FVBSWjmKdgaCMyEXGGJnnafk+Ml0NtJUml5gUakzMszSzpKggadx0BhJJGbq2Zt/X3FztCAFCPPHwOMr3yL+HhEN5YkjOAJe5sYvEdMRomQVtjaiKWgUuR2W1+y4cwM+LhsnWNy6PS0RP2oIj67m4zU9LFlD+HUKU+RaBLHMeGMeZ4TRgiNRGfqeUKa5b+ugi4ZzWxq8LkOq9r7AamHIOp4uXFJz/m987LdnQ9vg/ZEtkWOSib2ULX6VNc6d8x+A95/NpYUEVNpOMIDWYLPhorUErmOZEiDPDPDAGj/aWo6owtqWyPew/QbUHzOFjtG5RusVWe7StCEmaCqOaUVyq9ipVNKoyAUELhysWxAIp5ockjjnlSYRZ5y3XgciI9LfXjp63urJ9PxwAgi/XjaXftTRdTd2IPMJC8c4GcjvUQYyxzhl1EphAxeXkW5uFftmj2J7erElCNsoqn/UJGfJyOg4M48S7d49Ya+j7QJ0jSuvMuoPNZRxiXAbExGhFb6h8alE3zF96GdP4DAhIKair3PFsNH7WpAjOanZdzX5X07WOtnE4I8VdkydnCV3V4EzCG4X3kDuQ0CqjyoqFgeNToZpLei7o26YJTy+2Dz8nok5MKRCy8uk0DfLbmhrXNDS15frQM8+RaRLGktHioMjTygKi+BiSDJrXGKwSiMVoTZuzyr5xNI3oQFXO0DhLWzm67yoD+IYTUAt+c2E6L+/k8bfAPYu+D5s1i4nZJ7xPhKRkml2IzN4TgsiHsxj/zWcthqQcWVoi/3Kq//xibW56WuQ13ns2vfed83uWZrNnYkAFytnqOC1Pkgv6+cQK3jPPM+M4ZvZcXF5aYOIYQ5bdVsvcbwj42TPOIybBULe0qiFUV6j+JbrZo5s7tKrQqsG4TmDIMCMB+zKwUuwSpceozMpY7UAp7MeUlnnVRQa8yKREtVnnrVN+z/g/Z2nL9r1wAForrq5bXn10zatXt9zc9HStxTkFBFLMXb1qDZShGPjc7KXFsMW0cp2F+18GhKf3PPT2CNar4/LPxRjnnoCUeHocmMaZd++e+P3f+ynH08CbN/d0XcOLV9f8BogTa916kS03JcXHymV20jo/QBUIyVnqygm1TRkUz5ta1VSO3/i1j7K2kuGrr77k6UnjnOaHn13z4vbA1b6mbUyO/DNWqSrh1StFEAoDoFFGlAkr2+TpXyxr48dRHBsRbSHHsaToMzMze1EFBkfS0lAWkycGz/n0xDicMK6migENfPbRDU1dUVnL8TiLzETQWUFUpL69V/gA+17gt76xVNZQO8vVvqVtKm5urrDG4H3AT55dv5Nb99ypSgVmLFb22w19KlH/1iiX50i5UztdvH5J/9HEmBjGwHlMzLNCmRbjHK6qZbZBXdFUjpE5U33nxXGsNa0EWT9ryYW3KOnmeEuEr1URY4+s/cG5FpDKb/z+OuRmqfg8GmhCEZI0TSmlszjjtsFLYaxMd5tmz/D6Nff394zjuK5vQupCKvH1u6959/SO9rFdrj+TbcP5fMJ7D7bj5eET9OEF5vAC3X+OMjVa9eIAdIXRonMlmY4nakNUQbB7ramNkDeIHrIMdUwBFLgS4WsIWi2jLj0aGX0FZejNz+tRfN8JPmf7XjgApaBpRQrhcNVRNy43c5VCTn5d5iwX9k15swZp0tCiBFjs/NoxumKEZUubXZBPposQQ20YHUVrJCbOp4HzceTd20e+/KO3HE9n3rx9YLcXHvv5PFJVeaoWSyy2bCJtbJe+hG1doiiTijhaLnZnJ/ChmzWau+vd0i06nh5IYRAZ6n3LYddSV0b09sk6PcpglM3XdJLZyzmS18mglME6ydq0AuErZ+gip1/r1xLHu9RnclZDHjuomJf3eT8DCZMSylaQLG3t2LU1T23FrqvwQTF7nbVuZH6ENyIaJjUhw1VfUTt57+1VT1NX7Pc7tFKM48RZjfR1xa6u6L/jDCBlJ/ArZerltLtwGmnzb6kH+YDoXCWFNhXGilaUaEbZ3N9QznW12dd7n1OeUOsHbo1/Kt9HbSCiVEx9yQPyq97/rgVeyufB8xxACSHWNH5bYN9e3yF4zudBWHahjHAva5BIMTKkATUr5uAzCcNKjVEpxkkgIx8VuA5V7VDVHmV2UthF5Jg1JjdvKYxKBJUH16sIKgq8aAWSTsmDChmW9JIdmPW7hCRDbGLuzQkpU9pLnWhNYC7W95tW5cO374UDsM7y6tU1Lz+64uWrA13vsBZQQuVLEaYpEYOMVQzBY52lbWuAS0pUgd8vNrU4hVXQilUnv2wJiGv67GdpoHl8OHE+j5yPA7//ez/m7dtHnp4G/uiPHvA+4mMkRUvTnHn75gGT2StL6J/TQJUU+/2etmnRebRkGTqvlVoi/zZ3dhZNEP3NL/RLb33X8qd/80ech5HzMHN+ukeliaa2fP7xLXc3e/q2xhmDUQanKrQ2GOOygRBsXSuF1RZjBMoxyqNUJCXPNM744AlBoh2BuWZQK88+saBHJBIx5I7gZIGA0pFplgyCKXAaIyFpxmCYp0RXw49++AJrHHXd4zIdOHjPPI1M0yTD4LXi0Btqp6gry67rRH9IObyPGCAME7XVtJVh1zzvEigRegFIEnGLhGxKpevrSSmPUi9/TGu9KT8O66sFL0YRQ0UICm0cu0PPzU3L3W3LqxcVKpzx5/vcNxEIMWWWVGbDpUvOfhF5uziufCzAAkNKMJopi2vszxJPLS8vbiNlmRa1DHH/8E0RVdG/1yw8JAWudlR1I5BJFjkszaRLUJWzqAIJ+CjMtTgm1KQyLClNbj6IZHw3BUgOUgWhJoVautcVOBepXMDxiI4eE59wjCQ9ot2AIuCMoq8LSzHJdW41rrZoq7HOLm7JJyU9GglOPjHHxHEUSugU4HFS+AhzUKsisIwxfMaaXm7fCwegtaLLg136vsE5vRR7y/kYfModjUL9qpKiaVZSmi4yADlqBZZawOXjUkxSyzzRZcswTcEb/Rzwc+D4OPD0eOLx4cQf/fQ1X38toxvfvptIKIy1VHVgGGbOw8Q8FTnZtNm/nHBVVQm+nil5SweyWtkC1lmcy6JQ+fahm3OWFzdX3D+eSPEoTqYSzP+wb9nvWjH+WmGQngSdb1lEWbB+rdDKYK0Mb5F196SgsjiZdC1KpC+OWmkQapyctwUFSlFmvaYSVQrWRAxJxL1IhHkgJMUcJDV2RnN71VJXDfvdFc46nHGE2TOOI+M4YqxCaTh0iroSuK1tWhKa86QYCcxaeglKX8N30QewRtvpAgW6fMXKmSnQzPt7EKO6Fmy3fQTC5TeIhkxN1exouo5+19H2hjAEzsc8e7ow1cpxpQuftNl+HsawOoF08brtl1uN7PY7JAokKH95Pk5dGjQ3EFOWSDZGGtuK9HsoTKEF01pTqwKpFex92fs2s1BZchwNSUMyELU0XkUhGFgVsGqENKLSEzCCGqnMiFGeyip2lRaNLAvGypztqhV5+6pasygfwUfpd3iaheVlgHFODAqGWT57zuuc8nF+d+b/e+IArDXsDx1d11A3dumQKzFFiInHxzPDeZYU/nRmd+ip63oZ1A7kEX9xcZBSAxD4ZBwnvI+czwN+DhlusQs8JLKxaSMZnDgfB8bJ8/b1I6fjIEXf00z0CpIV5csSJaViwHwu8MYMfWR4JCtwaW2z0RdP/r7KaJEp0HqN3J6jVxNC5M27B75+c8/rNw+M44yzVrKMuqatKqzWi4iY2sxKEBrgKlQFMrtXK0lbY5Zr8EHmLsswHqEfyvWUi9hak1Ig5eL3AmVEmKYJH2ZCkNnCs5cGopBi1v/RaOPyeMqWpq7Z9TVWaYzWjCmBDyTtBVbTYvhd7jPwMeJD5OFx5jzMDOcz52Fk1wtD5LvoA1ggk4v7DXTJVmTh8p1b2KWYBoE9zOIAQjSEKFxzkUJxuKrBGEdMmU+e1NIZr/LEhyXYYTXMl8e2ITcsT6WL+4ticclSvgX0KXZ3sbmbXT1n2/ZXXPw1xYXxE4KcV1Nm/mhd+hAUFCmL0m373rV08dXyNZoSIojHjE0nDJ4mzjRR0URNYya0mjHuLCMllafSAaMizhn6NsO4tVscgGtatBHGXDkEH3MGEMF5KfJbLUqf5xmUjoxecZx1zgRgmFdH8F3AQN8LB1BVjk8/e8n1dcfhqsE6wc4kiLbMk+enP/6aN28eeXo68frrt3zy6Sv2+wNN7ahrizaRYRwYhkHof0bTth1VJVWXh4cTp+OZn/30S56eThhjqVyTI16hPUIizrNE/t7z9o0Y/od3Z6YpME+BeU6EoFHJ0NWV6NWEEaImzJHhPDBNc3YEcj/PMylI5HG4uqau88Qf4uKAFJsagM284UyP/CZn/Jffptnzf//tH/Pjn3zBT37yR3z+2S0vbvfc3lxzd73natdh0RhVWtTzXITSSZ0pduIM5IIiZTpi5vtP08jsZ8bRizojZJxajJWxhognpDPj7Bknz/k8E7wwWErGcB4mZi+9FbMPJMQh9X3Pvun4/NNrmqpm3/WknDs/cOIYZ6wfMc6gjKZvDHVtRE54nDmePX/403ccjwPTNBG9p+92oBRt23wHZ3Dh1WwLoZeG8n0ncBHdX+xH1jqkVVZ79oY5atAV2hpc09PvrrGVvO48Triksa5m9mSNmLDUXtZPed94r9nIonJL/kOpZai4fK8ybKbk3ZJsZ2hpw5STLvk12/vQTc6j1eCtTwjdc4yRr79+LddKzrCLRtIqRleMe0LmIRQQ8hIluPB7CVTy2HSmSV9RpScO4Q2dT7QmsbMRZxNNI0q91mqqSup61inqrsK4iqY/oF2Ftg5TtZnmqSmaTCGqNVsLE6TA/eOJcUqcp8C7Y2LwireD5TxrHsfEF49poY9KtvK87XvhALRWVLVBGyRSTEnsTBIjM5xnHp9OPD6eOD6dOJ1GhmFknmbBpK0mzjMPj0ceHh5EU0Up7u5u2e3AOieRfUicngYe3h2zsZsWyMNqwSmC91mXKPDwcGQ4j5zPUx7VmDLDAjHeyaMIKGTASAwq669Mi+JmDIHpPDJNM/M442yFQmGdyc0o+bZgANmcxEAKSu6f8TtP88yXb95xGkeU0ez6luvDnqt9T2Wt8P3T1uhvb3JFq5yNSKSXSJRaTG6E87kjMwj2nN1ZLlh6mHzWaZco3/v1Ns25iSuKU4m55dHkbt22a+i6jr5rqWyeCxxFeC7GSCKAiqAjUUtvxegTHtnfuyfP8TTx7vHIcJ5JMVJbabgzxm6Gy3zodhn5F+jj253At2wLjq4KApnTfZ1vSubiJlG31a5adH+qSvorjAZDRNsqz1gOlzh4kW9m0+R4cWQZWlAF/tw+s60abCsaZf8r5LLSpL8DCmg+phCCKP5+A1KVYwqh9D3I4vkgDYtb6ZaSCbEYf3nv4s6UEAiM0exbw6GeONQnDk5z5WYqNXBwE20NTQV9KwFa3WQ2nzVUeRa2cRVVJ30CttuhjUUZg7aVLFBu5c18CHReL2VEgrtpHEpLv9M0z1idQHlaZ3DWMFEzzoFxliFJz92+Fw5AaYVzIo0ckkdHwXJjhGHwnM+jYPCPR86nIWO+E9M0Y60mWMM4jdy/e+Krr94wz8IsMcZhjKNDC3Mowfk08nh/lMg6WowSB2C0gRRkhrAXWYHTSSif85zDiLQabGnrmFEpoJlIyRAC0oY/zwKFZC7yNIycjidOxxNd12O0ResGq81F+i17lgsnhUAMKXPkP/xCGidxAMF7bG3Z73turvdc7XdUxmIoYzH1Oh6zRINLdlLu04K3xhiyhklYNGdkaEvKvQtKIpRZOiVDkhRdinVi/OfsOEKWui0t/EppnFHUdc311YG27ejbjsqIKEX02ShET0ieSCDpBCoRFJx9hBAYp8jrdyeeThPvHk7Mk8cZQ1t1GO2wtsLZ74oFJNs3AJKVg8kSVbOJODevXx8nFvkGdJbQ1riqxpmGpm1pmoaqAmcTxoBVCZbW0ZcAACAASURBVBVrjAlo4ylp5YXR/1bYQG1s/EqhFmOrRE57c1zf2JJaCsApj5IUP1Aawp7hBFLaQIprH0LJmEnk80DYBaKeK3AQGwewHHsq67H+TsJug65x1M5xaDU3zcBNrbiuAtfVQGU8h3qmrvL0utZgXTb6zmFshWt7oaTaGtfuUdZh6lY0f8oN6fYVOfCSReXGxSju1VWWMke8cyNOSRPjFETNdNYtx9FzHDxq9B++tnl7lgNQSv0B8IiQFnxK6R9QSt0C/wXw68AfAP9sSunt32k/xmiaTmCcECdUMKiomefA2zeP3L878vr1Pe/ePOamF9F/meeJaRL9/wIN/eTHX/D4eCTGiNFWTozrKxTgrGUaZo73J+Ypcn7yOROw8lqicNKzkJX3nhiTYPJbzn4p4BJBBbSaUFERZs/5fOR8PjONPufBivE08vD2gXev31LZhmmYuNN3tKbFXEBAuYidAtGP/NV/71+lcjW5CPxbec1/pfWdvOc0TVztO673N/zwBx/z2atbrncNnatwSoOWfokC+Sxzk3OzVTH+ADF4fJg5D6NAbtOZh+PAOE/MPncwJ40KXpxEvkCVEaceojiJcfKM0yxQUJTI31ipgVTWst+19F3LRx+9oK5qnHY44QUTpgmfPCHOzGkmaGmlnyKEkDgOE5OH83nmi68fGc4T9+9OqASHvqXv9vzr/+a/zWG/p6mrD17bzZUgd+9j5OnyNekbf1ersd0YykQ2Zhkf9lETk6VpO1zd09QtbVNTOUXtEEkRDCmOaOtRegalN1h8iZy/5dhKXVVtsOUspKfyPN3ivFLG0Lc5j8BAJViQ7/Gf/M2/gqvq5Zz+0LWVDMDL8Wu1oAJGG/q+l6lnSmp2wzByfHq8+Enk88m1tiIWWRq0hAHUtUIZ/uSuZ9fWvLrp+I2PA9f9wHU388neUlmZbuecwlpF0zmMq3DtDuNqtG1w3QHtapSt0M1erqk8uB5VZlLrnM1lgQ0lg6+UjpKFp0CrIHpL5QzMI8F7wOMTXCVFvet4OEcehsTT8PwqwHeRAfxjKaWvN//+y8D/klL6a0qpv5z//Zf+TjtQqhg/OYFiEAaJnz3H44nj8Yj3nkTCGk1VNdS1y1PEBC4pLeMgapDee86nM8fjibqu6Jo+M2AsVS1dfCFMJKxMAyqRzob7LBRNcQAFHolhjWqMURAVOkc+MtQ95E7fmLtqsypg0sQAp+OZhKK/2uEaR+n2JV9YpV9BoprEn/+n/zUq0/FX/8N/8f/8kPV11vLxqxfcXvfcXvXcXh/o24bauSXiLzQ7ifz1UpxebMYmbQ4h4HO35XkYOI8D53FimmcmL8Y9evBjAYERjLR2ebaxFMZDzFIQcyBmyqA20htgrKVtW5q2wWWJXqVktq3YykDRSk9AVJqkDcMoImnvnjzDmBhGz/HsmaYIeUxiXbX0/Q6lNH/j3/93+TO/9Ru8+Ht++4PWNi/Jxf37l6T8eyu/vIGHlveumHrJA2IMeA8+GGIyi+iYynNp5fwwSwCjksg+x9IMmSPey2NJS0ZyUQ/e+oUkTJlV+XMlj5bIvlQ02BzxwtjLO/5z/8S/RF1XpDTzO3/j3/iwtYVFTI1k8Tkw09pwdXVNU9dS6Peeh/t7Hu7vUbmOVjpwrVmlIkrvjbUu99oY7q46+sbx+V3LvnXcHmpe3bbsGs2ullqSNQptTe41MiQlurdKya0oEa8LXUaFlXuJohY4r6AIer2VGoWy0gCpQ8K4CtDEMKORLK+3kdTITJSqeo5IpGz/X0BAfw74R/Pj/xT4X/mFP/Sql7EIroXINE68ffOOd2+fGIeBGALOWfq+o2lqOenKhTJPpCjMjhgi8+S5v3/EOZka9sNf62m6hv2h4/qp44Ejb74aScyk5ETfW21+DGDbrFV+3xhFcVJnKprSEqmR8ki6cWaaPNMUqIwI2YUZhrPn+DQyh68xztDuW2xloAXdlEHzbGisWYE0eqIat4v1K61v29T89p/6kTiA656Xu47WGlzuwhWnYzL7Ry8ZgDS/yKlaFDxjDDw8PnE+n/jyzRuO5yfOw8BxFOnhYfaiajgFxqeZ2lXUVcX19QFtaqq6YRwnkh8ZhomnpzPnwaOUjMZramloapuOq+sDTV3JjAAFKUbmMEpGlyJz9MzR8zR5xjkyzPD6fuZ0jry+nzgNHu8T0xRQStP3B3ZNw4vrA599/Claa/b9ToaAPOvc5T2YR87n9V8rm+3i+cWQrtkV2fhCYhwmTgNM3qArKf7GZInJkqIihoBKot3kjAVvxPhHKQ6WruK0Hg6FBn0JpKdiweVYcwYQs4zBBQCUyhGn5aiXmRjF1uVrJ24gmOesrdEKIShbiYZTxLmKTz79jKvDnq5riNHz1Zdf4v2Ic46u66QgazRNJUNgrJXmSq01VVXnGcqWjw8tu9rwqo/sKtjvaj7/5ArnNJXVVE5vAtRssF2Ntg5tZai7No6i/ZWiJ4YBlQzgM9AvktExJnwstFaBOtEaZcX4J7LjtQ60o54ifpqZzkdSgEolDibQVC0H1TOp7hct3y/cnusAEvA/KcFEfiel9NeBj1JKPwNIKf1MKfXq296olPoLwF8A+Pzzz0nI5Cpn1YIHJ8i0wSBNYCEQtHT9xVAUIANhiQ6TaNMYQ7RWRuedR47HM9576qqmrh1d3zBNE8bJN9AqoUyZS6xyC3skbTD/9SJaG/SXole+0hZGQb4Q5+gJc+Dp6cTjw5GH+yeqacI4w+l0ZhwnrHVUdSlOSapelEWVUvzN/+o/KNfUi7x0v3B9t2t7d3fDJ7cHdn3FrnE4k7tF5ZVLtJ+R/iXFF8cgr4p5AtU8z7x7kEEbr9+843Q+cZ4GztPA7ANj1uZPAZgVdVVT1TW73Z66EcrukCa8DwzDxHkYGMeIMQljJTKTMX9ZVE9pvJdOSrJPTMBcJkT5mYengfMYOY2Bh8fIeUyMYyR4lTWPKipruLs6cNi1vLi+4u7mGqM1/9y//Bezgukvv7bvr+/hcPPNK4LVcF5g/UuiqVbMpexzxWtICcYxcDwKJbDeaYyyTBMoLVLdutxypaDo2xTNmxTXPgKVlrPrW7fFqG+hqXzM0um7yQBImfmzZgebnGZ5/3/33/8OAL/1m39/+eOvvLba2EU7R/B6MabWiopu0zXc3F1jtKJtKioLdV1zOBxw2ejLHGDpHBcHoKhtJfr8StH6CZsCtX/CzjON3nO1/xhXW4zTEqSpBBmXV4pF0hnjSNoQtcBtguODmkcJqKLN8IYhGUtKRoYVIUPho7JyvemShYNoqCiUSZiqlvUdp0yHylPsNFitSeb58ftz9/APp5R+mn/M/1kp9X/9sm/MzuKvA/zZP/v3JZJMb7LOgl8r+4L3r9z6mLnmMQQplCoWB5By6mqMwZpE8JHzecS5szQYpUTdOLpdwzAMWCtGQthZaUkbA8AC9agl4S1pXUpRpmGpnAznKzvmWeMpF6SiD6QQlwL2/cMT1SwyF+fTwDhMVJXMzS1bGVNoneFf+ef/Cl294+3bN/w7v/OXXiml/pFfdW3/5J/4u9PHN3uqSsvNqGUsyNJksxGh2w5cFycnhd9p9gzDyP3DI+/e3fP6zT2n4cQ4jZznCR8LDzthlKFSFShNXTfs9gesM4QUAMU8B4Zx5HwemeeEc4oie93UNVVV4axDa5h9yCiUaKPEJNOhztPMeZy4fxo5naUo9vikGWcYJ+FXK6VFEqKpubu55vqw48X1gbvbG373P/uP+dEPP+f49MSv/fY/9Euv7fvr+8knP1h6/pbnN7/mcn9hXN83l/lnSCLyllJiHAOno+c8RVKlqIwVDSCVcCZlwbHVCaRyLiIOO5Ze402ZQY49H0GJaQoqtPUOmwRhS5fkG8b+4oXLLv6pf/Iv0vcdp9Mb/tv/4T8C2P3iVS3Ht66tq5oVyUosQ42sMVSNo84OoKkrrq92vLju6bqW27s7XG6mbJdeIUNlRe+qMgarNDpEpq9f408nzl+Dn87Uas/V4RNsY1GVQdcW6e6O5FbG3GuxyX6ARBRRuJTZgUqhvVmpn8YBhhQjKEvCknQuCmuDshkG1uI0VAJbVZBAKZs/RWpgIjGdawzP3J7lAFJKP833Xyqlfhf4B4EvlFKfZC//CfDlL7kvCi62PbFK9948zYzDSDAWq6SYG30i5LBwHj3TeWY4z0xnzzxHYhg4HWeGc+Ddp08oTC781Bwri1JRUrBlvNoqYzD73FRGqQUIPBJj5lfHDAcR0CairMJVRrBua4gxMo0j03nk8enIcJ6EFuolcxA2TJnVKo7HGI2rHG3fcHd7xdW+J8ye2+s9wLsPWV9nNC/7ep1drYoGT4Z5ssHXSx1ALwqeKbMvHp+OvH79hq9fv+b3/uAPeHt/z+PxSUZBxoDPnHMfxcl2Tcv+sOf6+prbuzv2hz3TnOG8h0fevrvn6WlgHL0U4I2I4O13HdfXB5yTJijvZeZCYQdNc2TynvvTyGmYOQ0Tbx4mxikyTpF5tsSgqeuaprJ0Tc3Hr2447Dp++NlHXO169l3Dy5s9n724xhnD/uWLD17bdbuc4VuokEtd6aJOUNJ/Lp/LWcHkPdOceDpVPJ1rzlONnl4R7YE+9WhlZVaDVmgChED0EZVmSJEUwiKJcGm8WSz0FvZXy/GyZrPvbWsWkxvOvuHtVKaayh+6/oqUJpqm59d/+Fu8eftF/yFra7XiRa+J0RATdP0dVd1yc3vDn/6Tv8ndizs++eQj2rZGpYhJkaqq2O33WegtN4mW9CszGqx1ci17z+ukmc0DxzdPDCHQ6p6hu6VuHa422NZmB509pxJ+luxz0ZehUMFLqlrqJSkHURLBB3Q6Z7hHo3AQDNEbsGVKWBZQTAltLMYpTLMnmgSpwscd2jRg7P+/IyGVUj2gU0qP+fE/DvxbwH8D/Hngr+X7//oX7mxJURNbiEW8nxgsKT4GiAo/BaJPpKhyo4dw9P0cmMeZafT4OTH7iNYBMJyOI00z5mHyJqfLcRMKASktna3ez5lOJlpFAhUYFq31PKkqIQVrYxTWalwljVxkDvM0TbmSDzbr1xhrs9TC5aZLpFJVGCvnU9f0nI5HgAPwv/+q66vIFMFtNEqJ8qEwJNhmACXrUZBiYhxGHh4e+err13z51VvePTww+SkXxhEKJixQgTWOvu/pdz1d3+IqxzBKQf50PHE6nXMDGIvwXeUsdV3RNLWIAGYY0OfCso+J4yCR/+uHgdMwcxxmHp483sPsWTCWnTa0lWPft3x8d8P1oefju2t2XUPjHH4eiZOi2e+etbbftm2N7Tby/nn4y0UkrciDvxNzqPCxIqSOOR4w8QqlBbs2OmDUjM6TsaL3aCVS0DHFBR5Na3VhCbDKp4mkwGUtIG0PuKQHC/a5fd37KUOBmWD2IwpF5TTzPPHjn/wtgDMfsLZGw1UjhdOQFLd313T7K168eMkPPv2U29sb7u7uRBFAi8S5BBP1Wk9LQWDiEPAKolLEzM0PaE7GctSaJ6UYUDyhOGpDygXfahMkFXXcy8FSGRtIgZILSDOL/DakIBlEnICIjp4yAlT5gNDRDSmKOq7SG+2oDB8pJyq3KVXo2JJUhaLojT1ve04G8BHwu3kxLPCfp5T+R6XU/wb8l0qpfwH4f4B/5pfaWyoa2ZuhLkDh84qBl+Ef3ib8JCPTghfvfHw883h/4v7didPRk0q1XUW8H/niizdM00zbmDwmsKKqK1LUaOUgmXWOaZZwzegIVWWpnMO5ipIUa511ZBQo21LVlqZvePnJHde3B2xWM01E6rri6upA5SpcU2GsoWv/3/beNdaybLvr+80512PvfV5V1dXd7vvwm/AGB5BBISIGpCSgBCdSIJESBQSSlS+J+BJwFEVIfEggEhJGQeSBrcRIPEL44ACKjOPg2P4AxCEkNjjXNhjje7tvd3XVee3Hesw5Rz6MMddap6rv7e46Vem67jPv3X3O2bUfa40113j8xxj/saGulHithORaqaCJ8IurJ/yJP/3HQJTiArh4HvlqJYRhkY6pHhnnprJT/Wm8Q2VOcQjkUXnSzy8u+eLb7/Cz//jn+YUvvsN239GuWp1q1lTaNu/cFP2cnJzw+hsPOTk+pm4q9vsd5xcXvPuedmFvtzviqM1KR0cbTk9OOD055mizoqkCKScOh8GG7Yz0Y6IbEufbnl0XeXTZ0ZnXP4wajDs8909bVm3DN372De6fHHP/9Ihv+tybOljm9IimrgjO8aUvvs2//V1/GMyxeF7ZPr1/pVAJ3IB8pn/Wx4T9FxQd0CwHZNjuE5e7zK4/I3IPwhn4N8FtkDSQY2LsOrYXT0iNkFtHWAlIput29F3HOGr58qSn5Wba95kYQGaAszgH0ysX8FbhyHI3jIBlBiyvdDhc80M//L2AkCXxrd/8a3nv0S9eoYr/Y8n2dF3x237d5xkI9ATWZ29Rr084Ob3Hg3unnBxtWLdzzmi1aoAyQEnN35AdPZ6DD7yHcO3g4B3ZQ/TCE5/oQub6pGUMmffuHTEcb7jXVJxVjs85zxFwD8AmizkjkKM4UTjLE6jASuTMNJ8hQdaeIecH2yyGF6M9KymPdtYl5+gQX5MlQNiAUyOR44oYhXHsGWP3YSL80PXcBkBE/gnw6z/g+cfA7/xYH7bwwOXpR4kK7AbTJKty0aSkXakime4w0HUj3WEgjuCcls6J6PCR7fWBEDxtfWzNGxV10yDJgVTk7C2Ug1IN5DF+77ahNWy6XHDvHXXl8QFCDe26YX285v79M46ON4Y76qNdtQQfaJuW0NT44GnaViOBDywBjTy494A/+Z/9acsjJP7d//D3fvm55OtQXJHyBW65xxZRgPn9BRLy2k6fUuZwOHB9veX8/IKr6x2HfsRXDY04dAtFHNDUDU1jPEObNVUdyKL8S9vdlu12x6HrGcZoobGnbRqONitOjjdUlVZC6JjNyDBEdvuOQx/Zd4nHlx37PnF+rT0HY4QsgcoLwUPbNhwfr3n9tXu8+eCMe8cbXjs7YtVWtBVUXkPzb/r6z/ATP/g/Uei2q8/9iueT7TObeKlYl3reTcrhxktRA52tMShn6AbP/uAY0xpxJ/jqjBDWeN+Q84E4dgxpxy6dwzoQcqD1gZyTjUEcSXEByJc1hSbO/n/zWMthld+efXtJFpToYY749B0OxHFy/JB/69/8T4ABkRFh4Cf+/g8/l2xXTcWv/IY32LuGvWtI1T3Er2hXqym6HoZhSn5XxlKocKYe3YhwcI6d97zjhMdOuHRClMSYI9dEBpfo1jWpEvZHK6pVy2sh8NDDxqL9UymcrxZXLSoDJyNQZGIVVZbYAZTaXROO2m/kJCF51KSwqBHIOc+XwwVy8EoXTUBcjVAj4kgxMvR6f9x2vRKdwEiZ/MPk+ZebR248nMI+0w0jU6NR34/03Ujfj+RYEQKI82QRUoTd7kAI8Nr9I3zQSoKmaUgRJAcYHdkqKQr3fbBkS9M2tCs1AoUWwVv3cgieZhVYbVqOTrR8sWmbaRyc946mqamrmtUKfFXhSima5RZY7heH4ulpJMdoj9u0fDucL9PJmHHMhQFwk2G4SQUhgnZEHzqutzsuL6/Ybvf0MbM+siop4/bHqQFYtw3r9Yr1ZqWkcSmy2+3YbnfsdnuGMergDQI+qHFdr1ccH60tx5In2oiuG9kfBnad1vNfXvfsh8T1LpGyIyVvNdpqaDfrlrPjNW+8dsZbr9/n9GjN/ZO1Vj55vZUMotUM3tTkdNtlF1Bd/FnWNzAgysVdhPjzvtYafkc3wL5zxLQCMwDet3hXkeLAKDtgi+8uCbmicS1j3RCTJumHMZoiKV7900R0MhslZqU+e//lwCzWlaUxKIbFTclkffYmmFQgjtuuVV3xLZ9/yNatuXZrrocVQwo4X5OzNoLGOGq5NkKslNxRfJ5sbu9hj+MqeL5M5h0R3s+RUYQxDhzSSJSRtPLkpmG3aZCmYes8PcLDnGissqqI7AO3zGJsa6nmE1caRwEM06UyuCjhkgk+WdVhThNjgTgh51oRIleRqRGn/FYxZuI4MHT9s8fxMdcrYQCGGHlyccnp6TFVc6QTvhAI/USZmm08oXiP+Ez2ieQ1WTvGSB8TCYerGqqqNnhFmfe0nl+7jAmZ0ECzqTh9cMLYCzHC0EGdM82qYcr2e4N66oAPAefT5ESL4bXiAjUN3usgiLaqIGf219dcX15xeXHF9mpPoXVuN2utdDJmlanHwOvQajmC+/fv8ZnPvjnRRxx2++cXrmNqYtO/9aYuin85MMcKFgBhTCPb/Y7zi3Menz9hd9gRJSnNreGPYxwJo2ezrmibijcevsa9s2M2mw1N3bA/dGy3e95++30ur644dMl0ZOD07JijzZqz145ZbWrECU/Od8SYOPSR86sOIeCrY1zjaIPwepOJGV6PTBHLuqlo60BbB77lG97k/tkxn3/zHqdHa9oq4IjKpZ4XtBbeWWOVkLk9jlqUOEXZSpkPUIrjp1dStL4wowbDkNjtIvt95NGjwMVVzer4iKOjY9p1i+PLpH7Hu4/+X9LwPscreOueULNhXR8zroR9N3B5vbfEvNbOFyU+wfnl+6doZOHHi/Wbi1bapBJ1T0qf6acs6JQn53cyfJ4b5dO3MATJB66rE65lxbWsGF1AnCf4yvJp1VTjH6ow7/Pg2QXP3nv+GfAoJ95LiZ978ojz3Y7u8hKGEek6+nfeJh8OcLSCtuaqWTE8uebQNAxVxVt10Chg8v3nuc4qQfs7Ly3iTQuRbWZ1eZMvtHpuBb7VKVbBa3mtwd85CzFlMo7RSOMy4GtPs25wlaNafcJVQC9q5aRMkOtNYYBUsTqn2X8d8OPVojqtOMBlxGm5W8yJmGxchTdqh1o3icPN9NKSNDT0mrBtVo3ehYNMHb5VriYKBO+Lp1wurlBqggVIWfBTKG8YvlUKjcNA3/V0h4MxlGrHYGhrXJg9KDtT7fSrKpo6q0d8eoyI6IzT7nbpfje349szYg5/OQL7b/H80STsEEf2Xce+OzCMIyIZ7yEY/puzluM6p9j/8WbD2ckJTdviQ1D4qOu5ut6x3/fEKBoseKfd2cdrVuuWUAfEwaEbOBxGDn3i8mrAVw1Hpw0uaDfmpikFrNrEFjwcrSpWdWDVBD775j0enB1z/2TNqlG8nzLNzWnlkzjD4e163oqrZpawSlWWHvOzEMuNvxdJ2ZwyQx/Z7Qb2uxWHvWd1XNPUDau2Yhy2xPiE7dWX6HfvwnHDuDombTwptTYjY6QfR9Mx886avmpaT0UAk++/PJOn31uwf/eMMVgq/5LfcAUqumUQIDh639Lnhl602Uq3rFaweT9X580ODuBh9I5dCLwPfFkyXwYe7fdcXV6Q33uf6tDh9gfG9x6R+x738AyX16RDR3cYaMWzxrOrKnpcYYqezmmR/fiQE9WQIRuMYf6X3vHOI2WOia8RgvoMGSQncu5U8ZfhOg7wnlDb9MPq1ewE/tgr5cT11TV1bVU0it+wP3T0nU57SlZxo/h5Rd1WNG1NTsI4RvN0PaEKVi9syU3vbAybTN6wD46qrtls1lYlpHNiJYuOPzSP2E3VLXny2nypAphcOGvPN0hK5w5EdrvDNINg2pUumHWHcRjp+qFE1DjnGHutYtrvDxOdhJaI3tIATN/B4ua3f1s4auXZnJWmue87DocDh65jTIbb+0ILLNPMg/VqxfHREScnp5yenBGqmqZe411HTo5hSMSYtYa7DoQm0Kxaqrom5sy+G+lInF/t2e+Vx6cbYRUC7fqYUFV6XxRjiybgq+A43dSsmsC6qThet7RNpSP7wLDaWQrFYwPmqpivNHj146wP/Iilsl+8cInFowok5ah02kOPc2stFgiJ4PaQIv3+Xcb+MS4fqIOwbivu3zvlaLOiCp4xjsQcFaZYeNyquz/8/CY4Z9JqTzWNfUDVz/I7lurPfTVd+DFXAnaS6UQYBNpKOY/auma92eis6KY1agfl64rOc/Cexz7wyHneHgbe3++52O8YHz3GXzzBnV/ghgG6Hul6ZBzwSfAZqgRNypzlzP2s2P/KjA6l7c4qE2UyOs7un6WcDCuS4hjMEisEdWLGOgPXh45hzLhQ6xhY77TcU+zGFY8jUEq4/QsS9ithAMZh5Etfeod9t+fQ7WnbFd4Htld7Li6uuL6+Zhh7BCHUFSdna05O1xyfaCv0ECPOO5280zYgjhAgVEwDY3yV1WrWgVBrxc6D1x+w2/aEqmMY9moAohmNCSMVDYezTccy4rRJcfgKF2oEzzAkLi+3dF3H5eWlct0MCVyF8/rIyRERttuOnLWrsLq4JotYAlsZMne7nmFI5ORw7vlDvbJ3p1B9sWn8BxYSK9f6drfl8vKKJ+fnXFxe0nUdGT3ebAohxZGUa+7fe8AbD1/jc299nocP7itOmT2XfmAcztlue8YY8VVgfbzh6HjN8b0T2rZhP0QOhwPdfuTL715zvR3wvqFqTgmrlnsP36JpG2MfjSBC5WDVqNf/4HTFpq3YtDWnm5q2CtTB4ZyNDyx9Ja5ANFptppGOfCW99rGW3uqz3z/DAyr0JYpe2DWdzYMQhD717PsdV7s9wb/GydEpR81I4x7hxoGr936S/e4RJ2th3Toe3j/iW77lG8iSGNPA9WGrNBySdNLy5DAURfPsSS5iQUqeoBzpXHgxMxjdyG08ffKIwhiTkb0ph+ddI5l388iYK8ac2WzWHNVrjlYrXnt4j7ZtaJrauKL0Wu+854tVxT/F80+d5yd313TvPaJ778tUP/MFqidP8IeDOWIZDgdEhDBkQvQcR3hrSHyuSnx9SHxDTjxwIAsOIP3pJ+dJfaqn2JFEQNRpnaFBgWx9AWRS9owZoji++PZjzq92bE7u0aw2rFYb7r/2QBu+xog3ZoQg4HNAQn4h0esrYQBEhGEcGYeRYRjxrsJ77NWvdgAAIABJREFUMapgbQQKQRn8SqXHerPSgc5aa0nKFs5bRUAp6XTicOKZOxmV9dJZBkaviRgNtGhTzQIXL2GeWGif0c/TCEM/KyVtUsKNJMn0fU/XR5thzFS9pEyYA6XKaL87WLJYj2HodUhKjJm+U3mM/aARzi3WpPyXz7nlvy+8Rkt4JaPEHuOok77KZvPWSOa1HC54z6pds1kfsVod0bYbsjhC8lSVJi9Bq4pc5bXyKVSMYybmQWGP/chhN3Kx7dnvR+o6cNx6xDiCNIkt1mUtjCnhXca7TIw1qQ4Lj2qh8GaAmqVyKufpPkgwt5LywgjceK44E8vnQdkhBaEC3+IrIYiOAvW+RwsYe45WQu1qHt7fsFnXvPH6A+7fO6MfOraHTN6Xpq+lGl+OY/wgKOornMLT+2T5tuXnLbX84ixf5BJ0JnB2Wg2jzZJWKr3IW03FEw6ic2zFsxPH3kaKglJ/tydnNDjcOAA6ytQdDmQRTh4+pD0+4ez0jK9vW76uqnjde45wNKBwTclDOks2L8tln4KGdOKfTDKa4KJFBJCykLKOhtwferbbHYSWJB7nK5KovrLyhenzFcb0Hym6+7D1ShgAVX6Rvh/puhGyYnt9PxKjburVuqUKFQ8enPGZz73J6emGdrXGh94glURKYsbAGuGdkHKgkkAjFYJXb9zViGjeYIiRfhjYHxR2ib1W3DicNt0Ebwlgq5XPFeCpfEXdrHAI3RDphh6RAyKjjUrMeCszlFyqWzLdoTRBaecmKOwkIhgBqJJ6WaLTu5sK+vnXzc+YgIgSvhoGoI1wI8PY0w89/TAwxKRMjN7hKs1lFEbFpllx7+w+D+6/wcnxaxxtzhA8KQfWq2vqam0djYF67WlWLb5qOL/q2HU9j97fsdtrAvSwj4xROD6uObq/gqohe09yCgcMMTL2A91+TxMcdeUgReLJBgc0NhrU14DXMNkjN+razZwXpANeQBK4WJ+pZv7GfbkwAsVCOYUEs9hMWFfj2xPak0zojyFDFS5wdFR+4Os/s6Gpjvimb/ws985OuHd6zDd87k0eXz5B3k+8d2nUD67MEjbFvVBAzx7wIgYwTNoVCukF1MYkLTvuCVUrRuApyIM5GvoAYXy85TxSryEqK6YPjuCF4LKWmCZwWWdV4zw5BA443hHP48FxPTj80NKszmgeOtZnZ1RWyKCTuTJxt8OlxOeOzzirV7x+tOZXPXjAiROOEB6IUHuP+AZCQEKtHDylebKc7SJaUgAygx9BRrUFFgGUyCqJ0Efl+R8SnF9c8ei99+kjrI8HYoaz1xLZVQzJcgilMck9fTc//3olDEAxa455KMkkSq+DyqXylvkPeK9Z8W7f0x8G+k47f3NyIIXFUqAgwWK86gktLdwPHLqBrlOjMww2zzYlog2XwDC+LEJVvP5SOeJViYSoG1wHVuiAkpzHKYQuVTU5iQ13UYriOEZyGhGse3AxUENLXbWHxE/NWi8Ap3hmLfMA861ehr3EHC0CUNmop+ommoiqClShpq5r2lYHlFShtohI6PqB0SaB2ZeQcQxjJsrIxXXPdnfg/LJjf0gcusQ4qIxCE9l1Hc3+wMXVFW1bm4EciTGZ4hRc1IE3XT+w3wfaAJITlWtxFbhC0nXjdL8ClPEC5PksUr7E42f1gJXP5uyIyZGiTvxyzhHqGo+jaTJNlWlq4bhpWDeB+/eOuX9f6bxDNSc/P1zN3jyymwZxXh9ov6bzctN9cdMQzKcpN97xIpZTCzkOMCak3iEukX0kdRBTTZQWsdLqlDWRWhFZj56TwTHkkdbpzIRNu6F2lhP0jpwzfcq4ceSNpuVe3fBaXXPfe1ZkGkvazudUZL3Afig0ILMxmKki3PSQXBow/fQ68d7+3VO3a1abI5rVmqZZKXW6L9Tspa9hCe8tJf7869UwADDxeNdVPQ1rD17H9lVVZV6R4u/RWD4vzq9s1m9PHJUawjlNAmqtezEAjhjBD5nr6w7nKvp+YLvrOBx0lm1MZSKVGISXITokCIIv10JL7JyQksO5CMjECZRzIkucGtecE5zL5OgnAzAOWUdLTnDUTDs9GQAHaYIoCjXGC5T17MPNP50Dm82brBY/2rSvMSp/v9JoaERU1drb0DQNq7Zl1a61aitD349cXl5zvdvSDf1UDJkjpGEk5pEnF3u2u46Ly4FuyAxjJiU11r4bubi8JuP48rvvsVo1OoKv0QE/3gec3TvjmNl3Aw6hcok4VjTBEVyrEg5F1svzL/99OYZgCTktb1JZfK/gyckTR8cYa1JWgrCqgirAau2tvFU4ar0muY9XHB+vqKtASpGUI0nyIq/8laAdmZTVslhmqbnFlV9gxvvlqVcu5Hjzn2+c2fKZr2RsPtLKgnQjMmToM9lnYu4Yc83ge6Su8VFHMboQoGnwBE5pyNETkuMoZ1oXaQJsqoo6OBvC5MgxcjVEcjfyRhs58xWnKdFI0qYvyYwwVfqVqzpTMMx5pPknZhzzHBVmhfsKdKovMcXulQr/5N4DxNWsj09oVitWm43NNbDxqCUAWEZuL2D/viIGwCylBe3JSjJjynhfEUINQSblf3m5BYRHj4T9bmS3Hdhvo47Oy+oNYCyWArgM8aAjAt9554Lz8701Gw0M/UjfD4xjnhR3MjqKlCPOmo00b2D+kUAI2QaNaHmpoENKUh6mHASm5HPyGC2IPgSbJYAOjQnaESzmLWRnI+MlkW34yW2WogFi3sRNuZfyTwDJmRgHxrGnHzrl+j8c2Pc9Q0xkdFpXXWibq4bNes16vaFtV3RDJKY955eX/Pwv/AKPzx/z+OKcPkbInqFP7PYju8PAe+/v2O1HrvYJm+9uMwkc3Xjg0L/L+uKC6/0lm82azXrNgwc6BOTk6MgIdSGPA1dXQu0z3YMjjtY1K5uf6ryjkjJ20y0eyx+3u4lm2GcRURVPsXjMZbbDFGcFUq4ZoufQebpuRUorvG9YbzpWzcjpkaetNal90lasmoqj44r1Wkn6dvtrDoc9wzhYA+XyTJamvXwrC+V/0wGYvdh84zyWr5QPep9zUy/DM7HDC6hQceJpR0c9RiQONBFCHHCuIvkeNwb6caWKMgR83dLg+Qw1D/H04ki+ovaeJng2bU1dVVQCLgtpFL58faDfHvhs3XICtBUcjevJCYvBU2V1jiTrZC9N5LpFVMRU5gkF6SuOZIEGdW5AU6+NgFJncSNaffR1n6t48MZA1dTTPOy6aXBO2U/JGo24vDAsv5QigBgzh33H1eVW4YKsQzH6fmQcE2nUpK0jcW4t/c5B12X6Q6IfEjlZ04Uz+tbyIqTMZaCqDwxDnLyzmGTqM3AwV125GRtOWSxpnKekJiktEE+lr84GA6WcSDGqhS+qSjyIt3F0YRoT6rwj5wWb5AQ/ecoA9tt2Vd4AIqYZrm7Gb+2nln8q7/8wjgzjQD+M09zeKZHu3FQO65z2KnR9x+EwkLPw5Pycf/b2l7jeXnO9u+bQj2QchzGzP0T2h0jXwxD9FHXlLGakLJlvsgw+s9vtWK1a+r5jvV7TnZ5ROTWerc9UTh/rBrxbk1KyCMzd1Pnlzxu1r7dXVE9DJpPn755NxIrT652Swj/REMcQBB9EB4+32SifVdmklBlj5ur6mpyt1j8Ju/2eYRimc5n/t/zGcs1mGZR/myAd+70cd6njX04D+yBv0z399FPf8ewfH29577l3+gAvGU/mdNNOxr2udVKczs0wfp4QEHEciyeKY8SR0ZxBkMwmO+qUCUnlGoeetu+RvsP3HdReH3FU58FBEO31ySlqM6H34JJeWzMAauxnGodc7qcpIjBmz9DgLVJ2PuBxhCykLKyTUNVKE1O8/ipY35MRXhIzMkaFY60E+7brFTEAjsNe6ZK31wcd7JyzGYVoRHBap7/bCReX6l1750nJk6NjHLxRQwgiEW0UQ70UHBjff99H6xysWK1amzGgnDKu4PEOU0S6eQtHh6ATirzzRIE8jAhZee4lIyRSGklpZBwGU/SBKrQayfiKUDVGgewMShWtsc9z1OCd9kIU5Z/lK0vuo6wJBFmEslC8mOKlqPI/HPbsDwd2+y3b3Y7tfs+h64lJ5eFteHzho5ec2O6uCXXFkydX7A89jx4/5h/97Bc0cQwMSRiisNsnul7oh8xuD2MKjMkxjXeUjPZcZLo84pxwcaEVYKEKHB0fcXR0xOsPXqM2XviVF4JLBBKb1uOdMCYDncqQ+4UCnPS+KX+Hf0ZezyHg2XgvPGjB2YjRxZeLzvjtBk8/OoaoJatNk2iaxOnRSFtHgmQkCRKFHh0z+sUvfZEQMpUPNFVrk9AyOWvkOJ3LDUdcDX15asol4iZoSGyPZCktiWKf95XWTSOz/MkLUv6gw3w+/7lvZr1qWK9a5d6ywgg/5WCLR6zsvCsRjgyuLJMFnY5KoyVRIfickWj7PScOObHKkSqNhFQRUsSjfQWV9ZTkpEqfPDuWy/J+HSBVFL9YA6s2e/lQ01QbXLuh2dxXpgNvZJWWFK7blZI+pkihk3ZuZhXWvJcSAeakEG1Ot0MG4BUxACIwDmrVUoqa6EvJRizadLC0bMTSDrgsi4tgHbo+gA4HE4y/FYrH6ksSRVuuY0z2ncmSlWIdeKqMnA2P9ota8qlJLMvkvaZk7XtOPfpAhWtBW6a0O3hK6OqeUAoCQQ1I0uPQmcYO70VzQ5KmkrFby5jJBzWM0jjcRUc+zrIfGaNGAGOMhjXP0UmBPCTrEJ4YI9e7a8Q5rrY7Dt3Aru8YsmNMEJPQjZkYhW6AmBxCwAXBi+BdIknBx7JNoMozbGPfmTOMY2TotTu5rWqaIFR1afpTDzDUtU2OWqKlUhzyBV5unu6tA4BneW8m4GUBPZVmoJzdVJAgUriioKkjTZ2p3IAXpXouH5aTlhRvr5+QYse6XXP/7DWtIErzt7obPxf7xqCabPkC77D/zDHC/OIFtiw3XvFVltz4dYKbRG4lX+c9680J63XLetXaeEisk9/m/gJghQtx1AqbBZuwiODMSDQhaAlpVFbhsBm5PzqOup7N6TF1W1O1Dc36SD3x4KmMuj1Uganw367r1PthEWyp8HGS1bZmp8wEVU3drqnqZlFwMQtYdUSYPmtiWnVuKnhIOTPmTGe6KpuTfNv1ShgAgHG0UHeUiYtf+eBnbG2aWuWDKSOZ4Ve/4DzJc0medyXZWrxAhWJEmAxAKdGcOP7No6iCYZ/eT8bDGQSSzT/PIsRcuIN0aD3eU7lQKIUo1U3FAOCZNksWrbfPZlAciv+rIya8kDJFW3Lj9zLaT++UZNPWxmhGYByJaZwqgLLJ0+4qk5UjxZHt9pqUE5e7jr4f2XUHhgyDMXbuO72O/QginozH+4wPCRfLgI00eXNFbxZgo3RZj2OiDyNd1yO1QA2tr3QKnHd4X1FVjc0zflrzlMS8OQcWFbwYKoivriRVbCprrf1mgn58cFReaJtI2wiV63E5UibDIc6oTgYeP3nMYX/F6ckZ6/aI7AJR/OStP33ON5S3M6OtmXyKhAr0uDzWZ3T2pMeX+NFXl4YU5X8L+XrvWW+OJ3LB4vAXgkWlgdDvSzEyjv0EJ5ZzWUqhLgPidZoTVUrcb9bEcaRpm1npmxMRgp8cCm+FKcuopsA7guquYgTI5shkLVio6oamXROsWknfJpMxcAb5OGf3YV7cak73zJiEIWUO5gyknMzxvN16JQyACKSoyU8Q4qgnnSfsw5S3OBIelxQXS7ls6hIF2OucN24mT86q/LOU4i0hB7HqGxuebThrljx/p7jpgktQ3D54z+yR+8n3C4bpV5VntVLsrqmCKvVkM3JNtZdEp+YeMj4b6ZxzlCntuimWF/d2bqrei0tvrPiI+p+URmKMOt6xO7Df79lut+x3B7pDTxqTRUOBygeqEGjblrZpaJqGLo4Muz3vX27ZdwPXu57rQ2YYhWEUut4ZeyeUqWP4bPxHph4Fm2Jl5y3leprXZVFHSjp6MuCJLjBWgeLuHYbErhsZxsyYoc6QvVZxiCtOxOydZv8Mj+VzCphpTu5NCZfkr5ajpgxDdMSkdAIhgA+Zuoo0VaQJiYqEEzGYTY1GP3T0/Z7Ly0u22wscgfhG0pGCAmU44Q2KEpa6V48mJh2TGoKbvOlpE5jGmap2Jhe6/L2Q02Ql5qaz+Z/mMPHmnvv4y+GIMbHfd9qrszvQDwNVVfHw9Yes12vOTk+oglZFuaqZTsUEMh+n05GzSmVSwlhhfe+B8kUV5W1n432wCKBWRe3DIqKbo8pi7FypIrQBPQA1zqoYa6q6RUS0o170/d5mbyjm7wmh0mNIOhddmV0TY0xGDAeurnAh43LAvwDn5ZUwAMUKFu/a2YBkX+qvKN1zGsPHqF6LErjZdcglpHWIWK3txF9uOD6Ge2dRa+8DZfj78lFC+JyN8yM4vdmMBXOOLiwyAKqgrKEbG4qyaiqr/8+kmCf4xFq/pug4O2tkgWnQRKkc0MqCZ7nZP84qXgoG+eKWFR3FK802BU0hnXEc6fueYRgt96IcTME5quCpgo5dXK/XNO0KXzVkHH3M7PqR/ZDohswYlWk1ZT8b6Sl0FoVhvHrvhSDPLtMi9W2Kys3J5zkB7aaQ3yF03cDu0NOPUSeJVcYf5DBdXOi+mR3DF6D/J6sts0pUo1YUoHFFieL/GWelynqylR9tolcyXV6cDwyqVFgOgeAqgg/4oI2NGRbZxtlBLbphQtKEyWnCzeXM/mmMZ+G4TxVOLD5LijNWEJHZQM8vXnjgt9i8IsJhvzOG08zjx0/YH/Y0tc7lOD09ZbNeEfzCADor5yjX1QTiHDf3TtmNxiQQoyIOBXp01gDqbe5uqb+/eTpm6Mw5nTqwrfB5qk4yxzFGnWGwNAAhqGPZmt7QhLZokjrNs9BBpsin5DdeAAL0ahgAXUXxq7V1tnvKhs6plGkq/KIGQN8HUMp3nHnaxQBoaaWWlCJWl5+hEmdD4YviKU1Yc5inN60+vFXxzFhfgZaEYNPBmjqwXq1oqoqVEdXllIlDtIgmE23aGFm7W71gZWG6YcCoYGNUr7IoxVstecprK8rfTTmMkgdRAxC1NHYYiOOI5Dwp6hC8RQAr1psjmnZFLM1fMSm3Tx/phkxKkJInZW+G2k1aRdCuVa8ceRqtJauGUuFTFOE8pObpCiSj/RDFRw/9yG7f0w+JaGMkb2DBBfSQpea/vRc15xbMyi6raEpAiZs4knDajKQ0Bjbe0SkpIYSFt6mSGlNkGEfAGQOmGoGMw6ViTBcQ0Bw4L49yGqLkUENQJsJNr1hED+6GAXAf9IE3IoaZqK/8Yh9wiyZGkcx+t6fvD/R9x5fefoerqyva1Yrjk2NAeO21B7RNPV3fsj+KTZoUOoW6pcjd4ninBiCLKW6nlT7a/e9xxig8iVVm73+eg6xyEG/PG+SslT/eIGuNXru+nxS49jkFmwWuqlgdyoA4m+0co42WnA2A2LyFF4FevhIGQFALDBo6aadpNSv/rF50KhNjrMPX28AXg47tJiief5h4y6fpa+YFZVeGjfdaumlW1ns/TekqiR/dIF4hqpitZt/hg6du1Xo39cxR0tQWXmch2Lm4qiJkbXRyOZPE4ZsaJmVnuGbb2jlZYliUr/5WaWA75+Kp9UNmtMTuaEYxuEiKI7v9gSfnV1xebTk/v+b6as9h3ys/UoDKq+JvVysSFbs+c933vHexZ9eNvHe+Y9cNjEOm64sXOR3AHHqb8hfREBkfkErAO0KuNSFt16OujPI3eBusU9FUgcqDI2lHsF3cd96/4Gp34OH9e4xj4sHpMZ99/T5NVU0usVYxiUUUCxrhW6wZOHgqWnMBcmYYRsaEGcJap8PVbro4nmyVLX4qUtDN7MD6Q0Q8IbTUjVDVrTIdClrhgsJp2S213vKmKNyo3gymm4tZSibVos5JX8viYecnCwOj6Yn5uRultUvZ3GLrFjpxwRHqlqOTE52b0basVmuqqmYYBg5lf9SVOQr+5ve6AuvMjkM5sWzjG6M5Qd57ghmFZVHtMueYFyWf5fNFrDRzSpYZ7BYTKWk5dYyRrh/su3R/V0HnGQCsVs3Eb9bUNcdHxzYdL1G4q/S+KSjB7S3AK2EAEJkuhIY6JWE6Ex6pHjHBSrmB54x6qbIoN+Pk300eO4vPgeQ0AZuM014kK4NoqHHMXlYJ/bKF2TrlKxB8oKkbqqC4f7AZAsFnVQN5JpUrlXX4uXLJhSVFp57zqml0KpZ15OY51XxL+Zaoxps3mRhTph+1K21VawJ4GCOHQ8+h6+i6XnswhoikWXlWoaKqazKeFGFIiccXe672PefXA92o/RgTA8TCMOvZlgig2CQ/eeZZBGfhPjnh/czHFOwGr0LQ0jzFIzQZbRd1u+sYhsj55Zbj9YrKe964d0awBj77lrk82Oi8b7tmQIE5sc4Ms8WYiUmm0N8hVDoTiJJdV690UcXv5ihtgiZ9wPtaR3yW43bzeMab9T9FsS+894VX/zS8M2FYYDeJe8oALF5XbsHpffPPD0q+P+/KIkRTyj4EmkZnHzRta9U5XmchxEjtqim/5q1IBObEfxnqPpcEq27JFGRB7zktGXfM3dALhlSKol+ck1VYLUfYlvMuDaHjGOm6TrnHxmhQq+bVqipQxcB6PU7RtQ96reu6wXtrCM2CL1xPRfn/kjEAzlHX9TSKcRrwIAqFiLkmhX/JT14SgMNL8eY0oTLh7Wku1RQru3QTYZZe1qqwdaKfGypvoZm3zLzCDOUiFyucJTPEgWhsfojRQOQB7zC+ek0AIc4y99mG2cwjIUHnIag/a5wwPlDVBUtMtzIAAlwdRroh0o+Jf/bOIy6vt4gIZ2cntHXFcZtweeTy6oonFxdcbfdcbg/su4E+ZpKrSNREGq47xyFlDsOV1qEPiSfbUYe0R0cUA95LyAX6e7FBi40rMFVFOOepakGy0ewihBBYtY1dD6fdvc5R+ZI8M/IyU6vdMDLEyNtffsTQdey293nt9ISzow2btp6GhUuK4JzpxttHADfkPelRDeUBJCWd7UzEuYiXmhoHPtu+tcFErjgI6rU75/EyKxJ8ha8dvm4JTWuj8orBKQageJ+zoZXspnGrTnQiVVPVqlCKsXLoGNU852HUbhYvf4YO5+U0y84MBT0dCd1GRaln7ql9hQ81r7++wRkMcv/+fdq2mfB1hzMuKi27nuGahZ9VdsvCqUxR4bBuCMTorPnKLyIbWRxPeU4LD5bwUpn+VogSU0rs93vGUXNq+64j5czh0HN1dc3l5RXg2GyO2Gw2VMHrFjB24ZjhahcZBk2C55yVIqSxaGZx7W6zXgkD4FA+HPX25ik3OefZuzfYwBl+CswhLsZRYxOCSuI0xTm7X7hOJBeFqlaf4KcLC2aAQukZsJ9SErIFU9RbK+aEy9pNLHkkpYEYO03qNJUO9hDBuzDlA1xlGLCvCFWt91A0TiHjAQqGH6rydyhp3HMugX5MbLuR7X7g7UfnvH9+oYbNBTbrGp8STkb2Nvyl63u6ITKkrJVWeDKBJIE+OsYsXG57doeBwxA5DDa2jpolzl8O4CaZ3Wx85w3gTCZhiux8mdrWtqbwnXJBwewtm5OgXrKW4/oM19sdgcyqrui6gXXbsmlqdZqzXXcpOPGLgoDsVJZ/W9RVPEz9d6Uo96QpMelcIDtRTDkXT7rAU37OAzlvfSSaJ3DB4aaBQ7NyXsQjttMNL54UmLPoVoCo77ToIxtX/Xw2H3TCN6OKKRG8fO5FyFWK0g04V+mIzLahaSrW65UlTW+ekyJefoo8p1VgGpmNgPWHacQaHTHq7Owqa7SQl6iDmz9I9X753sUXUPaxTssbhsG66qMmf7MwDD3X19c8efIEEccwRFJGh0etVwaFe8Yo7A6Jro9srwckJ+oK8soZ2iCEF9DD+EoYAGApSWARUtmjWFtvE71UpxhbnnXtqVdo4Z9oV2POcxgI2imcp00wh9tTAGsGRuvKi39ZvFm9sGIerV5wAZI1kCUrN1XwJklWD87+pxw/5iE5d2OikG7GjDNvzflFZ+wtIoCYEl9+fMnFtuPi+sA7719wcXlFXVe8+XoiJc9uvyennt3+QD9qpDAabJEsMhmTw0Vh7CLihO1h5NDZa5NO7Jo96qX/d1P5P/Pb4kYtkJB3TGVxVdCyUzUA5m2KTAYgiyp9SgUVKJTVDRy6XqOCISLrGZJxz371rdbEtbZ8zs1es3PWX2vNisEr3OidmytWJJOWiteUv3Mz2Vuoa3WW6kZ5TRa5nZvntVBOi1LpiWCxRNlYFzFFnvp9S0jrJqjztEZlAZWUf59fc3v/lIkVIOeMD4G6VgbaqqrwwS/yOFbqLctCgqcO1yr4JOdZt2QrA7dIXpLDRyYd4gv/1+R0LKOdEsw+e6YpZaVTGQYrqhiJKbLb7djvt+x2W0B7BOpmRT9oniBUkSyOMSprwTBEco6Qk0Zy2eTuP2DTPcd6JQyAYJUv5Klqo+QF0oTPe8uJlYEQs8fvnDcGUX0ujomcdLRjuRnKhK9hTKSkz2k+2U9dwqBQRDCPU49BP0vJoIShdOw6kFJD5zKlpK6qK3zlNcnrPMk8O6UE8pqowzFa0sg5R7JGp64ftETVe6o+aLRSumSfc20PHT/293+a955c8+75lsNhR06RB2fH/PJv/Xqcd7z77iO6wzXDmNh2A7tuZDdkDskzSEUnQj8ILo6MSYeOD6NW2eSscAWWOMMAZg3Lzau94e3LBANZLZBxz7j5BjPsv6oqvdkXBsBNF06XF4/4ZUJMGMbMzo1c73ourvY0Vc1rxxu8eYwaQfqF8nwRq3zWXLmmY/ygDgEnmUwCRpqq5mil+9UZ+eGQ4jTPwlH2t5Yilwhpc3Sso0yPjnCh0qIht/CCZS5ELLzxItowFFM2Y6pkfnWozGkxY+NmSM0VyPOpc9PLqw6XPu2U5tgtggK7Bs+vLK13AAATI0lEQVT+9vGXSGYYOjDYNhtUWqL9AtGWvp9hiLhiTz/w88retC78yQDAGJWaw0VHN3qaSljVyvSr1WpuAVGH6RoX3H+ZIE4xMowD2+2Ovh8YYma7O9B3B548fsTl1QVXVxeEqsVXFYSGy+sdznti9tS1Ol/7fU9KibaKk8fvS9VVfjEtoq+EAQCb6GWwzHT/FAdSih9SSr1YWOSbpWxTpf0CHnJTWMpTrqfxjzrrxPPqIZU8RI5Wg+M0WpByV0xGqhwHxpPvlKfdopGSlNLjtZvIEtIpJ0h6DjnZIJYYp2qkbDMGpHTIPucax8Tb713w/uWOR5dbvCQqb012khErTdsb388YdUDFEIWYIYon2jErW6hipimX0r9yuxWZl0e+8XyJ4JbvWK7po6ZruvC2HGbc/ayuF6H8zLio35dEK5yGmIzULk4Gwk1RGBaBvQCo4pkTuvmZpavUi3Y3VFbRhBUEaA7I3vk0nOKKdIQqBJq6JlTV0xp3cX/YMSH2+TIVFdShsoZGZ5QbUl58c09PJ3bzPJ/+ximEkoVNng799u6pQl8WCZte0EQok6Kf6SCKgp9FUxzJ8kfZnzcSqBqUTZ+ZBeKo/+SdUFdqfIIIyiCgpeRqEOXm/kOjrdFKqaM1cKWYlbb80HO13dF1PTklfMja4zH0dF3Hoa2pmzWgjZNIwpMIXlkGSuEDMrW13Hq9EgYgC+z7MuZPa+qn5g4r+cw5zlGnuOmGTymZ3rAaXhROUcuuJZxMYZ8lYstGIOPMi2ibhqatWW9aJYJyjuEwMPYjvYzGkS/4SqmLNZum3EN144ycykpAPYZhW52vn493jNrK3Q+2y+ykcsp0/WDZfocPAcwA3CYJvD30/NQ/fodDTHRj5OvO1hxvao7XDTIcGKRj3w3susSYYT949oNj10M/OvroNRFsiqTIv3CZCyCpUFZPbpFh3kXp2vmXuumCcJTfp1/nGn/7GL2+Trs4y+zX0pFdejLKZ+Ws1RIpi8JAw8j1/sDRurWh9ppz8SXJt4gQb7dUEzr31D1px9Y2lfWZ6CzZo82Ge/fOGPqRrh9Jw8iUyDUDoN4muCRIjmSJVFVF27Tmvc+0Gd5yIcvOW53roE1kRRGt6pamVuqMHAcrbjBZJ+WnktJUdsNh0uupk7fsSbPYUvJ000k7Ci+XRkO3igFAtKrOO6iCUFVCHZThUxu7jFnXoNjZ0VL5l9zd8posfUALIGgrPbVudFwN5uBkdAKZF5oqW6NYRsTI3MTYA0SmmR4xRrbbrWL6lkNL2dENmfPLPb/49iOaCpo6UFeeHHt2V+c8OV8xjAPOefJmQ/CBda1HGswn0KbHZOL/JQQBqQIEcVr06AUIZmOTqeoFGY1L5cqVBBg45rLL0tyl+0cFpYRrqvTl5nalNI+ECWZQbDZ4R/azMUJktrwOCNmqU/zslUyUAwXWgAmMReZNc6P1vOQq8gSLqJNy+3kAKWUud51ym4seV10pXUWOI6PAMCb6mBkT9CP0ozAkGJMjZkcSjzHozqucr5jikIVSB0rljyZbTWBzULaIBeYfNwK1qfpF5kQ+c9Snv8uNJCCiRlksCkhZR34O0Sh0ywFMRGhuUQBwy2WesMPd3F9mvLx5lyJ5ig610lImbLoEEjf3kR13UfYThUPxYpeQ0yT8aZ9NyWdLrAeDNgt7pSzeU+R+cy3+djN1xuRdy/zlc0Box3Xz3R97KSzaUxR63/c6HCh4cq5MFl47rqdzNYhqgfHbwc/Ox2y/pgN0rjCMymQ4lNHTWG8np9FgTysfLQag9Bjk0vcR4/S92NaMSej7iHeBuraRlKNOBuy7jrquGYaettapQKGMf3Ru2gPlvF7UejUMgG7t6UYfrfELtMmn8OIDFkZbcswVxWuek7N66aIkSqXHtOHn4dl6wUsOwULxlBmGYUJzlWxJaSMUmhCd5pUF8QWWmKsPRLTBY4aotBs25/lmTdN4SBtaX6IGp1xCIuVmRXMVWeaB7M+xzBGiqKbgrYwSrTxIQeij0EcdbL89JPZdYohKWJayV5m58gkzRDFVVFBujkXCsOj26a6bn2P6PG4O1rYNnplxfVVkemNl83ozs4FYdgo777XwUr0GsmATzeIcvZhBLp25csNqPe9yT/0oxs72mtGIkDM5q7E/HDpjXB1t3usskyJT1U8FAjHK8eL5y2wUiohnuEj3Yk4RQbTr2Jgwi4Jb4uCz/Ke3L1YxElpUURqRyjd9wMvn59wHvuIjr5QS7z8+t8Rvw3q10vv0ONE0jUUASTmdsnJuiUUFc4TIYo/4WU4yH6yej1aGKQKBRhkevJsjWcGSzGY4iyOnhtYbwpAYhp5o1VkK/wl17Q0RUGqJnIS+H8l50BzEOExzRMZxAAlgBHE6y9z2/BSzLjbLLdaHGgDn3PcB/xrwnoj8GnvuAfBXgG8E/inw+0Tk3Kk79j3A7wb2wB8Qkb//4YfhcLkyWmIdSi6F/wLQm8gupIeQkirOUMo/HQSHoUCLi21ulU3tyjkrzwbWbVwv8P6sHXcx9ZTW4trXBFdRV2Fq4EqMSDLvNpQLXKs1T5k4Js0lWO1/9CzV4jS8PiVVSj/wA3+eL/zMP+Dk5Izv/qN/Gudgt7/m+773T/HkyXvcu/eQ3/f7vqtci48tXzEvxjkhOGgqT1NpN+zV9TWQudyNbLvEoYs8uewYxsy+m2kwJmpjJ3MO98aNvbxJYMJC3KwYZ8NQDLBbaC77zDzj0jk7HHnOlQCjV/gHuen5l+5Pb5Ga0oFnfvx//9/4a3/xFzk7PeUH/+r3kcm8f7HlP/iP/xi/8KV3+PxnPsN//af+8+eWLdMZZ7Sx6OnnzWu3GD67wCjCOEaeXFyoUbNIpUSLk3Kd2v0jIgoB5RSROCLeay9DTjhR6FQAfJ4UW0ojYxzAV9R1S1U3VGj5qXqT0WRekrtiWPiyn6C4x/arlaFKcvzY3/3LfPHtn2bVHvOdv+uPAtB3O37073w/u90Tjo7u8S/+C7+XVVsV+f6Zjyvbfhj4mZ/7ef1ePH0/8uYbD3ntwT289xxtVrTN3CgoWbv0XVVNxGwiDmd8PK6UGVM8fQzanG55gnMctQq7aKXWYjsLilLkpQGYIwDwjMPIdrtDRLuXfXAEElUQ6spT1w2QtDksHUhRZ2+/1r/Jahjp+wPBZXJVQa1RjnKllApFP+XDyn6/zfooEcB/D/xXwPcvnvtu4IdF5E84577b/v6jwO8Cfpk9fjPw5+znhyzBZePXdMad4bxhf868acUY/CSEgrcXb3/uCFWV66h8mGrLq8prjbMlEsv7QC2rQkQJl7Wt3jmofT2FmRiN7xSyY/CUZPo+Gz6n5E2lMU3dUBZ3tdMIoNR04/iNv/E7+O2//ffwF//Sn6Fttcvjr/+NH+CX/4pfx+/47b+Hv/W3/ho//uM/WAT1XPL1kmbHx7DhISau9xqFXO06dt1IP2SjioAsC/6ehYOrIMoCy4EpgnI3XjSH3Dc04/KtUqAOt7gxl/X9ehN758jOGV226IC14v0/5QaVIgER+KZv/ef4Hd/xW/mbf/1vGqVy5nv+u7/Ab/3238Bf+AP/Ht/z5/8Cf+rPfe+tZCsixLGz6xm46Rdr4jJbbb8qpYjgkcH2UIEqps8zuXhlx01pJPhMXTnjC4rojOuDUpknnZym782LDxlBokKbLhFcslxCMbRpuhzTl08G202fMxU+FEcKha6+9Zu/jV/5y34zP/Z3/yq4HoB/+IUf4q03v5lf/Sv/ID/10/8r/+inf4Rv+/W/DeDseWSbUubqekep2Hr30WPGGNnudohEjo7X3D87YdU2tG3D2cmxcesYK2dO5CS4FEnO4wq8O82KMO4vKRiEKvjKWVd/CeWnKMFRRraq2OaEsuRMdkyRahZNtMdx5NAduLx4zG57yTh2VMERKkdTraHRfVBVFZPDYAUlrpS5mg7TEngtiw72uO36UAMgIj/qnPvGp57+TuA77Pf/AfgR1AB8J/D9onHl33HO3XPOvSUi73z1LynOukFBrkJcnvBT0w+zdx/mZiG39PBTsiSJblrfrKaqnCzKsqeld8ybnGIAMpkIblwkHA3bKyqv4NIy43/Jlxt80SMQPHni/J8nBenNbQMB7Pi/5Vt+DcNwjXOOplWWw5/8yb/HH/7DfxwfPN/+7d/Bn/2zf3wp948lXyeCF5WljhjIho07rnMkpsTVtmffR8YodEO2hKWyVi4Aguk63VT/dt3cDZHehHuswc3M7fRzaVmK4nbLS2MYbnbZDIBy5hTlbycIuPm5BeT0+ltvUdV+MvApZ37wb/84/+P3fg/7fuBf+R3/En/oP/ojzy1bTJ5j7PAuoIOK5KkHi+50Yy7NkKyoYda+BkcY5OKyyiOnSPAyGwCJSuoWO4M5kjU3wrJaTGQEopWTJrxXfo4y3U75Vp8GaIpaLF3SpvkmhWeRjoOve/OzXF+f6/X0itP/4pd+in/5d/5BnBv45m/81fzQ3/5+fv2v/S0A955Htiklrq720zV999H7HA4HLq+viWng5HjD8HWvc3x8xMnRhqP1xvYCBoElYkpzIGNUAmVA00RNboSRpn0IWIIZjTxUtG6qGitElctIOGtIwDxbRJ2tMY4c9jszAFvicMC3DbiGpm31WIx2ulyCAmdqw18pO1Vnt7by6PK47XreT3izXDwRecc594Y9/1ngFxev+6I998yFds59F/BdAKcnDyeq4nITC/p7qEoppZT3Td670vrqNo42izfG0ernRcmhbPyi0t+WsEmtemkIQzS7niQijEgIVh2k4buzTZWsS7JEGyknJIk9XzBp/d0HK2EzYjP7GuVyQaYoRuuZlXGwaRqcE66uLrj/4CFxTNy//5Dd7rqI7SPJdynbqmk1AhDlzylDQcaY6JM2qVzvew59JCboBzVQeVHrvXARgQIVLL7vaTDSMUU9s7LnGe9/TirqNS81zgv9jyyMQKHufvbLnn3KmVFIaUQEol2HR4+fcP/+Pbb7nvXREU/OLz6WbJ+W7/HxKTF2E3eUKvGiiBUaK7I0HaGKN8kEaTqTV4kE5twGRlAm6ECqBDLa9DbDovOc25LFfkYijoRzChEFn0mMWktfFNjyUkxyLJhHUfxLE6FlrJPhcKP+5gYADt2Wo6MWkYH1ZkXX7ckyAtTPI9u6WXG13U823QfPdrtjc7Uixp7Tk2OqKjBGjWYepqz5lrJnJJNsSpj2pRgpXpib4YIv/RhWbYSOGPVmHBzBnB4zFHbusrwl7Jpl6xHSuRaOYBFAd9hxfXnOfrsnjh11rR3HdbNSgsO6IlQVgsz6bUGLU4oGtMxc5yHXtRLH3Xa5j5JRtgjgbyxyABcicm/x7+cict859zeB/0JEftye/2Hgj4jI//khn38NfOG5z+JrfzVoePwP7e9vA/7B4t//eeAJ8Pf4mPJ1zj0CdsD7L+G4vxbWh8n224AD8KPc7d2Puz6KbL8IvAX8zjvZvpT1y0Xk5Hnf/Lwm5N0Swjnn3gLes+e/CHx+8brPAW9/hM/7goj8puc8lq/5tTCwv8n+/gLwry/k+/Mi8rpz7r/hY8rX3vcTn1b5fgTZ/ghwzd3e/djrY8j2B7mT7UtZzrmfuM37nzeN/D8Dv99+//3ADyye//edrt8CXH4o/n+3Pmg9Ld+LxfN38r3dutu7L2/dyfZrbc2tzB/8AP4SitWNqJf0h4DXgB8GftZ+PpApTc6fBf4x8JPAb/qwz7f3/cRHed0vxcdHlO//dSfflybbB8BP3Mn2TrZfi4/byugj5QBe9nLOfZeI/Lef9HG8quu28rmT71dft5HPnWy/+rqT7ctdt9YNr4IBuFt3627drbv1//96ASMF7tbdult36259La47A3C37tbduluf0vWJGwDn3L/qnPuCc+7njFbiU7ecc9/nnHvPOfdTi+ceOOd+yDn3s/bzvj3vnHN/xuT1/zjnfsNX+dxPvWzhTr4vc93J9uWtlyXbG+sTzmAHtDLgm9Gmkv8b+FWfdGb9E5DDbwN+A/BTi+f+S+C77ffvBv6k/f67gf8Fraz4LcDfvZPtnXzvZPtL7/EyZPv045OOAL4d+DkR+SciMgB/GeVk+VQtEflRtNN3ub4T5VnCfv4bi+e/X3T9HeCeNd08ve5ka+tOvi9v3cn25a2XJNsb65M2AF+Jf+VuPcW3BHwY39LT6062X33dyfflrTvZvrx1W9neWJ+0AfgAJi/u6lK/+vqoMruT7fOtO/m+vHUn25e3nktmn7QBeF7+lU/DereEcM/Jt3Qn26++7uT78tadbF/euq1sb6xP2gD8H8Avc859k3OuAf4dlDfkbt2eV+VOtl993cn35a072b689WL5ll6BTPfvBn4Gzfr/p5/08XxCMngpfEt3sr2T751sv3YfL0u2y8cdFcTdult36259StcnDQHdrbt1t+7W3fqE1p0BuFt3627drU/pujMAd+tu3a279Slddwbgbt2tu3W3PqXrzgDcrbt1t+7Wp3TdGYC7dbfu1t36lK47A3C37tbduluf0vX/AYdiZlaryKTRAAAAAElFTkSuQmCC\n" - }, - "metadata": { - "needs_background": "light" - } - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as c_trans\n", - "\n", - "ds.config.set_seed(8)\n", - "\n", - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train/\"\n", - "\n", - "dataset1 = ds.Cifar10Dataset(DATA_DIR, num_samples=4, shuffle=True)\n", - "\n", - "resize = c_trans.Resize(size=[101, 101])\n", - "invert = c_trans.Invert()\n", - "dataset2 = dataset1.map(operations=[resize, invert], input_columns=[\"image\"])\n", - "\n", - "image_list1, label_list1 = [], []\n", - "image_list2, label_list2 = [], []\n", - "for data1, data2 in zip(dataset1.create_dict_iterator(), dataset2.create_dict_iterator()):\n", - " image_list1.append(data1['image'])\n", - " label_list1.append(data1['label'])\n", - " print(\"Source image Shape :\", data1['image'].shape, \", Source label :\", data1['label'])\n", - " image_list2.append(data2['image'])\n", - " label_list2.append(data2['label'])\n", - " print(\"Flipped image Shape:\", data2['image'].shape, \", Flipped label:\", data2['label'])\n", - " print(\"------\")\n", - "\n", - "num_samples = len(image_list1) + len(image_list2)\n", - "for i in range(num_samples):\n", - " if i < len(image_list1):\n", - " plt.subplot(2, len(image_list1), i + 1)\n", - " plt.imshow(image_list1[i].asnumpy().squeeze(), cmap=plt.cm.gray)\n", - " plt.title(label_list1[i].asnumpy())\n", - " else:\n", - " plt.subplot(2, len(image_list2), i + 1)\n", - " plt.imshow(image_list2[i % len(image_list2)].asnumpy().squeeze(), cmap=plt.cm.gray)\n", - " plt.title(label_list2[i % len(image_list2)].asnumpy())\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## py_transforms\n", - "\n", - "下面将简要介绍几种常用的`py_transforms`模块数据增强算子的使用方法。\n", - "\n", - "### Compose\n", - "\n", - "接收一个`transforms`列表,将列表中的数据增强操作依次作用于数据集图片。\n", - "\n", - "下面的样例首先加载CIFAR-10数据集[1],然后同时定义解码、缩放和数据类型转换操作,并作用于已加载的图片,最后输出处理后的图片形状及对应标签,并对图片进行了展示。\n", - "\n", - "依照上文步骤下载CIFAR-10数据集并按要求存放。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Transformed image Shape: (3, 200, 200) , Transformed label: 7\nTransformed image Shape: (3, 200, 200) , Transformed label: 0\nTransformed image Shape: (3, 200, 200) , Transformed label: 2\nTransformed image Shape: (3, 200, 200) , Transformed label: 1\nTransformed image Shape: (3, 200, 200) , Transformed label: 6\n" - ] - }, - { - "output_type": "display_data", - "data": { - "text/plain": "
", - "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAABpCAYAAADBa2OhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nOy9eaxvWXbf9Vl773POb7jDG+tVVXd12zTdUTc26bhDYsdYiDgDtkAZiEIIKHEUYwRyJMQfYCQLFAgS/AESiqNIBseTEBBCEBYOsQNqEpCTeMBxY7s9dXd11atXb7rTbzrT3nvxx97n/H73vldV71bcvNddd7361W+4Z9hnnX2+e63vWnttUVWu5Equ5Equ5GtLzPNuwJVcyZVcyZX89ssVuF/JlVzJlXwNyhW4X8mVXMmVfA3KFbhfyZVcyZV8DcoVuF/JlVzJlXwNyhW4X8mVXMmVfA3KFbhfyZVcyZV8DcoHCtxFZHXhFUTkLz/vdj1vEZEbIvI/i8haRL4sIn/6ebfpeYuIVCLyQ1kfSxH5RRH5jufdrucpIvK9IvLzItKKyI887/a8SCIif0pEPp+foS+IyLc97za5592A/z9FVfeGzyIyBx4A/+Pza9ELI38F6IA7wKeBnxSRX1LVX3m+zXqu4oA3gX8OeAP4TuCvi8g3qurrz7Nhz1HuAX8J+MPA9Dm35YUREfmDwH8O/CvAzwKvPN8WJZEP6gxVEfmzwH8EfEw/qEpgHOROgG9Q1d/Iv/048Jaqft9zbdwLJiLyOeAvqur/9Lzb8jxFRP4S8GFV/a7n3ZYXQUTkZ4AfUtUfet5t2ZUPFC1zQf4s8GMfZGDP8gkgDMCe5ZeAf+o5teeFFBG5Q9LVB9mbuZILIiIW+N3AbRH5LRG5KyI/ICLP3bP5QIK7iHyE5G7/6PNuywsge8DZhd/OgP3n0JYXUkSkAP5b4EdV9deed3uu5IWSO0AB/Ang20i05u8Cvv95Ngo+oOAO/Bng/1bVLz3vhrwAsgIOLvx2ACyfQ1teOBERA/w4KSbxvc+5OVfy4kmd3/+yqr6tqo+B/5IUo3mu8kEG9yurPclvAE5EPr7z2+/kin5ARAT4IZJ19i+rav+cm3QlL5io6glwF3jh6N0PHLiLyO8DPsRVlgwAqroG/ibwH4vIXES+FfgjJGv1gy5/Ffgk8C+pav1eG3+ti4g4EZkAFrAiMhGRD1TG3TvIDwN/QUReEpHrwL8D/K/PuU0fPHAnBVL/pqpe0Q5b+bdJqW0Pgf8O+Lc+4GmQiMhHgX+TxKHe35kb8a8956Y9T/l+Eg3xfcC/nj8/d275BZD/BPg5khf8eeAXgf/0ubaID3Aq5JVcyZVcydeyfBAt9yu5kiu5kq95+YqAu4j8CyLy6znv82oiTJYrvTwpVzp5Uq508nS50svl5LedlslJ/b8B/EFSFPnngH9VVX/1t/VEX2VypZcn5UonT8qVTp4uV3q5vHwlLPffA/yWqn5RVTvgvydlX3zQ5UovT8qVTp6UK508Xa70ckn5SoD7h0gFlwa5m3/7oMuVXp6UK508KVc6ebpc6eWS8pXIUZWn/PYE9yMi3wN8D8B0Unzm6z9yDUVJ/2k+jIAq70QdGSsYI4gIIqBR87FBRPLuStT0ng4ryNjEtH06vpJOk88t21YPx0KHz/lL3j7GSIyBECIxKsYYjBjEGAQhKrz80h73H64a4LvfSS+7OpnPZp/5HZ/4eG6LbLU66GLUiZ4/iJ7/rMMWCqoRjZEY0/twUN09Vv4YddTOcCfStTPoO+l8V4fD70nH7BxTt/rfaeKHX36Zu/fvv6tOLuqlKIrP3Lp18+Im4y0bv1yQJ3vQO9GR+uQRZPft4n56/l2f3EZ58tq3H7ZHFeDGjQMWizVcQie2nH1m/+WPp3496EG2RxbZbZqMV7g9qIznf+I8T7lWedofx+t72lEuJ7uP+9A3Zze/js3R68/8/Bgrn5nOyu3f0gbjNxl/vHi0XUx4hxM97Q9PdBjduY5zF/TE8c7rU9g29SkKzh+Hb5tV91hVbz+taV8JcL8LvLbz/cOkUqHnRFV/EPhBgE/9jjv643/lT+JjTwwRjYoxDmNLVBNoq0aiRlDFGDBGqaaGsjI4JxgrhBABsM5gjICFtvd03hMjaBCMsVgsiI4DR4yeEMOI2SCoGlDBGEfhHD4E+r7HOYu1DgGMASHQdTWb9ZpNXdN3PWU5pSonOFciUtJ1yj/65Qf8hf/gJ7p308uuTj7zuz6t//DvfRYxFnEWjM3tDWgMEHwCa41JR6ooEYmkBzhdHiFGeu8J3tOta7q6YblYUG82GGNAJA1OIaBAjBEflbpX6qAUAqWB0hoqZ7DG4Yr0MgaQpL8YFeccRVHkwS0PrjG12ftACB5VTfdJlV/6lc/z5/7df+9ddXJRL6+++op+97/x5/JAo8MN2w7oXHyNx0gf5AIYD8cQBSIDGAqKSB7Y8iCWvitCHP+uxPwKaO6faEQ09dn0kEeiKlGVtInkwVZQDGQDQMTy+PGaH/nRn9i9/PfUyfWPflr/0Pd/FmtMvieS+2YegGUL3WPrNelHB5CX1KY8fOfr1l08TAOAxuFrHkBk/KOSr23Q6rlRJe+k50bKHWNleMt9F9AYCT6goePRb/4Mf/e/+M5nfn72Dyb66W/+aDY6TDY8zIVXbookI1I0ojHdxxjjaJSkZ0tS71BzzpzaHQREZHxPuBJ3tos7L83HUFA5146hncYYdo2oiwPe8PrZv/ulL/MO8pUA958DPi4iXw+8Bfwp4N0Xf1ChDwbvhRjTzXXiMFJRuAIjFu8DUTtUA9Ypzim2UDCKj+khQgQjQhBDp0roI20f6H1IgGMNUSCOik1KjmJQwwjcwYN6wVAkILeO0Nc0rcf2grVgnaUsDM4W2MJQVMLMOOIsUtgC60pEHBotVpVPffJVgMmz6kVEsEUBRhKwjw+KQSQmCykKEgUVzRb51m4SYyCCSu603tP3PX3b09QN6/UGY1Mn0qEjAz4EfFBWvbLpoXTCzAniLE4sokpIWksDqCg+eGKI5zwXEZMHYoM1gsYE6iF4fPCIKh//2EcvpRNgfGiMGayjJ8H9PMhDGqy3A0E+0s7r4veLr4t/j+Pn8+AesocSEwhmcI8Z3Lde5A64qwVMBlnDyy/fxPvAZXRiRKgKgxgBMWg+XhyAW7fen4hiNIG/GUBf8uCmA7Sn603gvtXl0PYEctnaF0m7y3ZgGIcSGazg4TZsgWwE82EwGMZeZAueajASUAIvff03wGX6ygjmOx4luwC/05b3kHHfodl6HtSfxixs++UAzmlAjdFkI2A8+u6ZRvYg7b/rJe+eT0aj7t3ktx3cVdWLyPcCP0WapvzX3mu2o4pBizmYEkJII7YpgJIQHajBe1AKjHHpUYqK9CBeiTEQUcRKokKMofORzvd0PhIiFKXDOZeskkiypHzcGV1dGnEjbNYtm2WXQD6kG1TXNZu6xlmLKxzTyYS9vRnzacl05jB2jnUVxqS2xHws7yNBIToDadGHZ9NLfhCzhtjtBAPNpDGgIVsaISadINkaCBAjIfT0bUNbt7R1Tdd2hBgYKKsQwngO1aSXPkY6D02fzmUAQ8BKJADW9phgEZMe5BADMUZcCIQYsdYiGGJI1I8xkoEv0LQ1TbNhZLcuo5OnqemC1bMF93M9bEepT6iZ0XLfMSoHy3xrsQ+WbP5ttHn1/PF3feYnziPjOwNo7rZVFGuFa9f2ODo6e3adiKDGIUaytb69kNG7z1a4SIJ+M1wTCdMVPedJ7JgJO6fZ0m2iF3vlVoZA3mC5b72mrQbQeG6fXQYjeTwR6WukP4PmCNozuERfEQRr3Q5IppZtwVIGZ4X3RMndBl4iu3DbD9Ogkt7jDp2527bzxslgvVtjMVZ2Tpuf08GgfRf5itSFUNW/BfytZ95BBIopxhbE4FEfCFiiWjQIISjeRxAoCosJIFGzBSSEEIgaEQtiDSKWpg80baQPSkSoqoKicogKhGRFeg8hKIJBMIQY6Zqe05Oak8dL6rqna3pUlaZpaJqGoigoy4L9vT2uXw9cO5xz69ac+X7FdFZijCfGjhA8wUd6n9oZ1QCcqervfnZFRlSzk2wGdyzz5iEBe/R9GgyD4sPW0hIiRI/3HX1b0zY1XdvS9R4lYp0dreCtdZZspqiKj0IfBQnQG+gl0qFYDVgrmJi8iYiMLmyMqeM5l3Tq+0CMqRMbm6i0um5YrVcJXBIKXE4nSTFJL+9heW0Hx3fecJd22MHE/J5AX9gCo+RBIOHl4P0Ne21fQ99Mzd0ByAvbnQf39DaZVKjqJ9796raiCB6LVbCSB2NJ+jay2/bBwh4GrNSaOLyyVR41exK6BZVdasrsavQC87Krv90WDn+Qc7/peP2jQa8gMYB6TFhh24fI5i6mfgSX6SsCRmz20odr2IL6rtov+nNPtH4wpobP73DKd4oNDvGo1KxtzE93Pg+NHtuIwYjFGJu864Gr18HgeKfWbuWFKPqjCj0GW5RgLZiARkOMlqDgiXhNPHiMlrbuaZuWo8cLFosNfe+TxeiEoiooywlIkZ1mwChF6XGF4DtP17S0TUfTdHRdTwxKjBBCpGs99aZjtWxoG0/X9oQQ8H1P3/s0mlrDZDJhb++I+WzC7dtzbt6e8aHXDrj10pyitCAQUIIoKruW0LMrRb1P+0kEY5K3EQMh9Pi+Rb1P4K4QAvRhsOrBmojB03UNTb2haRq6rqP3MVnamfuLumPBXXxpdsVJ7r51gsMAyeMJmj0ok6yMECOx6+h7n6yLkMBeAOsSndV7T993I//42yMXaZjt13NafwoCjYGrXat0gOuByoCtZb97zLyvDDp6qgz2re58HgbgbOMOg9CFS3hWMaJMipiAXTjHt28Hr51rQTIdmb5F3fVBtrpI+CE7ehovmovf3g1m5Cmf9eJfMo1liEi/QPoldvMYu3oTaR5i2pNn1Mb2TCJmvJfpGp6dioHBmNIR3IcBcPd4z3ocGNyEwYpPgD9w60CKT13oI9t+MvSRbCzJDn31DvJCgHsEOoTSFYliIIG794agSoeniykoJz0cHzUcPV7wG7/2Zd66+5Cu84QQKUrDbDZlNt9nNtujKCfYIrk1xikQqDcbVosV69Wa1aqmbVra1uP7MHoIqoLvla5LQVTvfQ4MbjNqrLU453CFcPPmjFc+dI1Pf9PX8UnzGteul5SVTTwrARXDpXoVgCqx73OgLfGEUROv3YeWvmuIfYf6QFSh9dB0SusDIQSq0lDaiO9r2npD27b0vceH5K2EOASTcycbAqAKMSpRDVGFEBUfgNJii8S7h9DTdx198ISoFGWJNQYfAqH3Wys+pmu2RnDBUhSC9x4fAl3X4X337jp4R5ELD+rW4gG21rVsaYotzTkAd9pvC/Im98Stdb0FxzTAbi1fdrbLn0YufXgg84A+WFrnLNXhZRj5braDx6W1ITAtIlYUYxSRnUwocoDyAgqnSIFJ9KFYVDNnP7Z+Cy27siUKh2303O/bgexpgD8MgYOxk687etAeCR0mdtj6HqZ+iN3cx67ehG6J6dfvQy8Ws9uK8X4+1T4fTPTzgdTRI92C+0VQ36VMdv8m+YpTn5FRZ8P9dkWmisUiQN939H0PZHpZEiG62zGGfpRDWu8qLwa4R6VtOzAJvHzvQUtEK1QtqtC2PScnNYuzmntvPebhgxPuvvk2R49PiHlkdM5SVX1+bXCuwLgUFVdJwbyurdmsNzRNS1239F1P8IEwBAIxIIYYwPtA3/uRdjhHMopC5rlPTmuCKNduHjLbP8THa1y7PgVCimUZi1h7KZ2oKqHv0TgAz9bykhBTbMKHlE2ghtPTDY9P15wuNjRtz43DObeuzykcWFNRFBbVjqgRLz0pCJj/7VgWIabBIQSIUeiDp/aBqauI0RBtAv3eByKCcQ7nCqwrkqUv2TPILoQ1W9cSEYqyYhrnxKh47y/dV5K183Rw39lqS0UwWOAXLM7hIR8HBHasVJP3GzJiGI+XaJo8UoyZH1tufks3POEHPeVi2NmOHfC5rE5ADAgRiRtMbHKMI2dSDbQSQzZPwKgSTYEaB24PdXuoFKhYtumSkfNQtW3fGDDc/mm77c5Iors7jZKC/BJ7JLRIt8L0a0y3QHyN3TxAmiOkO0PiCqRFbOCykrJWzp2WcRAdf3s3UI/vCu6DQbGbSpwfeIxNfd7ZBOBoZga6PlOhMKlm7M3nGJvAfblcEsJqpEljzjTbzbq5jLww4N60LZie3icqxDDFiEXVUm88R8cbvvz6Y96+e8ybbz7k0cMzzs5OqOsaY0iBTlemTBrbY+06gWKO/IQY0sjoO9q2wXtP33lCCOPNKYoC54oUKIxkkBvS2bau+nhTEVTAr3vWbcd0/hhbzDCuwrgZxoItUmaNo7icUlQJXZdoDUn58pJdOBMjEiPEiIZErSwWa+6+9ZC79x5zutxw59YNXnvlNjeuzTncn2Ctw0fYdDVt4/G+x1nBWcOQnoUKPnh87/EefBB8U7PuN5R2n1ll8NYQgqfpPdYYCleAmDF7Ij0Ame5RQDSDcQIE5xyTySylRsb3MD3eQbbA/gxIOADmO2y6a4XvhDtJcZgBpHfAnXhur6cztucB+3zbeSrOvz9NbK/CmIgJLdI8RJtHaOxT/IrBKFGEkAE+BdvFlKgpYXoDZi9BsZ9A3hQpg0wv2u0717Nr68h5wumiRalCpl0UYo9owIQ2AXq/wjZHmPYU054ivkaaY+gWaGxRrZMhIpcDOBlomTFjZ3eAvXjP9Il/Qx/eZjoNVCUX9t3xVFQhUyhWHFUxYTqZMp3OCMHTNi3ERCMba9mbHXDt8HqywmOk73rqTZ0y3RhooeRlnc+W2c0Aemd5IcAdkhXXe6HvO5qmTU6MQt8bTk9q7r75iNe/9Db3751y9HjJZtWhweJchbWCcwbnSpwtETE7I2+6OSEkKzyEQAxAHoGNyBjV3x2Jh4lJ21zlQbk5m0AT0FrrxmyQ1bLh7bcfc+flQ67f3MMVgUIjE7GIvZyqk4EVRxpAGFLOBINiRVAxRJPaEkKgbVpOThY8eHRKveqo1z0v37nOK3dusrc3Zb3xHJ2saDZrQug4PJhxuDfN1xvSIOjTgBZ8IHhhvdlQr06YlMK0MjgBVY/vewrnwKa4QAokR3wImY7SrCdN2TkwpluLCOVkcmmdDPK0ji1j6sPW6d8F7B0GZnev8aHZ0i35weECg8JgBe4cdScQfX6skcxn7zLLTw4EY64zO+72+7TcrQScbgirt/DHX0BDjwafQH10zHY8CoWgQhSDzK5hDu7g9l9B5q8Si0OCmRAxOzqVC+3bIWRkC3py7hq3v0n0EDtsv8T0G0y3xLbHmG6JaU8w/QoTaoSeqBtUWgIdQf0IrpdVijEWEc1B/d3neHjJBWt9J7d9x6vVHXAf9tnKjqc28OAKpZ0wq/bZnx2yv3dA1zWs4gLfJhqqLEuqcp+i3CP0Lb5v0CCIWqxYJAdRdewnF3GIC+14Ul4IcA8xslwtabpI73u6tsf3NV2z5Oy05/7bS+69dcS9uyecnTbUG0/oFRFH4SzWQeEM1qZUycQbJ3cmhJBBJ1mkYaBYyMEMyLROkmECw7DfxfzoBPaSACvYbMEbggZWy5pHD45ZLl8hBKgmjunU4qzDXvKhVU0DnjFgTaKKiHG8NjTd7qDgo2KtY38+xYjkQPGSZa0sNoFFDQcHc5arBY8ePaRr1qA9H371FpOqxJqA0UAMmsEdeh/oushiueL4+JiyMkwqQ2mFwqbc6qAxD5hxtMyHQG1iMASvefauTZcw0DPGFpSmfE89PE3MCO7nO7c85X3XaNcdgN9mxmxBa5ykJIPVl+i8lHoLMUSsyZ4I5DiIjlyqGMn3x+xyFFu6RvMwoNu/bVuRoeNcKsozisDEBfakpdMFTfMoZUY1HcbI6DmZPLAOO2nuRzRn0J6i9THm8BQzu41Mb6PFAWoqVNwWtIfBMNmTO4BHHkCHPPdE/4hvoK8x7RnSLnHtKaZfI90igbrfYPwa0RaDBxPR4Amhp+s62q4bPddLqUTYSYUMY1D03fDwPIl28d84hHMeDzLtBSMFKJroXVGLxPRyOCpXMasik4lhOttj//Aas9k+zWaB9hEnJc7kSYAmB73zwJHmAzxJHb2bvBDgHkPgbLHAuJTW571Sb2Bx2vP2W0vefP2E05Oa1cITosWIQY1mc0uxZog6m53odszW985rzNDYRqpVJIOmjoB6cXbarpU4lioIQ+62R4wlauLTNpvElRVFwd7ehOvXK3zXES7JL6umgc4aQZ3BGpNjczEDeqSPSusjPiiTScWdl65z49o+Dx4t2LRwsurwrFn1wmS6ZLU84/joIdE3WBOophNu3zpkVirWJnc9XbPgfaRtI+t6w9lyxcFqyrXDKbPSYU2Bc2lgS/z6Nm9+sDYGcE+aS5lMqVxEij8oGe0vKedLH+w88Odm+jFSq6OdfYEX3vWI0vc865SUiWCMQQwEkqcSY0yDrUt8aupD8bx3wJAFoTvtGwKcA2Scu8vDsDC27P1Y7gDTQrlZQjd1rCcTVn1I1JszSefsDorpJEbSbMvY9+jyiNgs0OYEd3gbe/1jyP6rBHsddbOxjaOxoyFd7zAa5ZFzTDWMERN9ynxZP8Is7mHqI6xfYUJD7Gvwa1CP4BEJIOn59CHRs3XTsalbqsLh7GX7imCtPcebJ09NdpQs57aX0YtP7+NrAHflXL8bkiu2/plgSDjkfaRtOoyucpw+giiTyYRyMme+f43Z3gFlNUFiJHpP4UqccVmP5HtEDp7uUjXvDezwooC7Js7dhABqCMFwerLm7hun3Lt7xtt3F7RNJAab6JfC4pyguzMFNQVAByAfLO8hGBLGmiq6fdB33PthMs9FYH+abAE+oJosCyXQNgZjAouzNacnKyYTy8HBlL4H31+OM4xRWTdNyjQpHM7azAKkDJb1puXoeMnRyYpN3TOdTTnc3+PajRvcvN3SP95wctYSas8mrrFnG9arM5aLNdG3WBPZP1hw68Y+r9zeY39vQigD3no6lH5Rs1w3nC5WnJwuODyYUjf77FUFVVlSulRiYLTWc4kINNs3mq2a0dJLAKAqiCZKKc1F+ceXgULZte62AVB2HpZMbWVQPb9PAgNrDNYUoNC0DXVX4zWM/cdSgDWIZcw/Ho7JQEfJYMUr28k6yQJLzdDxp22EMrfv4qD1DGJEuTWH1yYlPt5gJR/i6OgUI6cYI2M5jqIoqaoSa+zo+agqXdfRtHWKxywfEZsFcX2GzN9Arn0Us/8ylIdQHeRrCinDJSkhj2HD9UUIfbLM10fI2V1YPcCsH2P7FaItoh4NPSF4QAkaUO3pfYqJ+ZjmhYQ+4AxMrDApLwfuoxWdB2rYUkdDpk7yMjLHDuOgPCYYnPP7ni4JCgTEImIxUmBMGlTqZoP3HV3XMJlWTKcTrLWUVYkrCqpywnS6T2GESeHo25b1ekkkEAmIyYaCGcpFMFLGzyIvBLgn8FWIBtShalmvAw8frjh6vGG5bNGYrFcRHa2RkVMfMzwC3m+DpNsRmxHYR25wx0UdOsEusF9s37Bd3nvr7oWtu+q9oWkji8WKo8dnzGYFBwcz+j4Q/CXBXZW67dNDGRVXJA/FWkvvI8t1y70HJ7zx5kPOlhtee+1VDg6vMd/b59r1luM1+NMeCYpvPTEE6k1H3UaCjwiBk7MNj0+W3Loxp5hOKYh409JEj4+e1aZmtd6wWm/YbFIQ2jnHpCpxGTBV8wxVAhKTs57/S4+NpECwGoOa9BCoWpQhzevyMjy4WxZ0+2/cZgD0rb+8tdLz380OuCsmxW6s4oxLGVxNy3K1JMjg6QnBprkW1iTqxuwCQLbykl62U8RT99mpybLbyPNXNfL9lxEryvUp3DkoiVxnZT1iCjqfAMsYwVphNkvZGVVZ5aJ2qW1N03K2OOPk7Jhw+ph2vSQsjtDydQq/oTA+3arpXhqsYgBC8gTscFGJFxY89GukeYQ5ex2Ovgirxxi/xkrIg31EQ0z1noAQPH3fsNms2dQbxFYYN6EwMCks88oyq95/fOYJcNfBo9py7qlUx9YoGGoIDabA1ufaAn+i4LKHKhYxDiMOZywxJA/E+5a+b7DlARNbpdmmooTgETGURcWkMMzKkuXyDOccfqCDJWbnKHtGur2mrxpaRoxQTaa4wnJyXHP8eMndu2fcfXNBvfL4Xgihx9qAsZom01gBScE/1TSxp+1auq7PtVKyLsYbOgRMknu09c6yi2oGnlW2VRNlO1rutDa/DcCSKQmGkgiRs9M1D+4fU1UF0+kUW6R4wGVEVWm7Lg0aweFCLsqlcHyy4otfvs+v/8ZbvP7GQzabji4WlLM9zuoOL4IncfEaUvDVWIfagiiOPqRMiqOzmnsPF7x05zYvxQllIfQIq3bJ8WLDo8cnLJablOeuhsKUTMsJ8+kES8Q5mykK0Ki0bUvbdSOoISb1SYUwBLiJRIWgkaiXT2+DHeDeuSNPgrueA/eRtotpssxFDlqBsiiYVJau7dmsVpydnbFYL5nMpkznU6qyZFqVFIUZj5+mmGw5nzFCIzKC1zhRdbi3+dMuSTMGa83leZkSz51ixfVJSVFcwx9MuH37Nh/5yEdYrZYsl0t831FVFQeHB1y/fpP9/f0x5tT1Pev1hpOzUx4+eMDJ8TEnZ8ds6hV+sySePcCV17CHOmpaY0D6GrpT8C34FgkN0q+Q5ixRMfURpjsBaYimz2m2PhljMdWRstaCDtWeBKGgshMm1ZTSQeWgMALhcnpJNMqWVk1e1pAwsZ26P3DxKbNuW1RsTJzAp/FsaOPW/EckeXqCobAlZVFRFRXOWZqmpu87nLOUU0c1r5jMqzxkeOpuhVs6VD1WFA0dbVejGtKAvLU+GAaSbdxgm6r5bvJigLsIRVlSuJKm2fDo0ZJH2WonmlyjJdUm8V6I6jLXl60AAiGm9L40+3Gbuz1aZiMfnMspjaP0dpbYaA2q8mywo7lYU/6mkRCEzbrl7GTJ2eEey2XDdD6hmlwyWwbF+36MEyjb0gLHZyvefPuIL999xBtvHdF2ken+GQmXN0MAACAASURBVIc3T6n7QBMSHx+UsQxyKoBWoMYSVPBBWG0SwJ+sPatGmIpj0xtWdeBs3XK2WFO3fbZ0UkXIsiiYFCVWAkXhcKXDmJQbvbEGIzsPlbUp6BsjXefpQ57VGgJ9gPCPOUF1GxR9igO9a1CT+GZjTcrujwP/vOO9AWVhqaqSru1omobNJs2HqKYTyqJkOplSFpZUJij1O7l4ot125GD7u4H71m7Y5YQvpwcngWu2YeYKZrMZRubcQuj6yMOHD7l//z6b9SrHgebcun2TW7du43LsIITIpm24sVgxnx8wnz3CGsdJVNZR6eoNxrc4hgHNo9qldMV+Dc0SuhXSr1M6Y3OKNCfYkCgYMdDHSK+DZ+1RNagarGw1aIzDWaiKknlRURRKaRWiEt+fHTDKbu2h5FXpyKEPcabBu9tSiIqJSjw3Z2GgYpLBYKVAxFK6imk5paoqrDN434OALSzVdEI1qyinZZoo2Ud837JpligBKwrR03UbEvIMhonkPnRevqoCqkICId8HFidrHrx1zOnRiq7uk0ulmvk58N7Qdz0mB+NSuve2nGbMSf8aL4B7/p8OEzN2J7gw3Pz8bpJHNDx8Wytx65KlzmgoqxTdDiHgY0jlg31KFdQYcEYxEhC95IQdVbzviDFgBcRavPe0jefhoxPuvn3EybKh8eCj4Wix5kt37+Gj0PTKalMTci+Mmqz4EJWggliHRVFjWDeBL989IgRDWVjq1rNaNSxXEbEVzk3RIuKDoWkDUW2atGRSNknK6FGMGJxzzGazccA0LgWavQ+sVjUaFPWpSmffedpLUlXjPbjgecl5iCXf0PMBVpLnNmTDbMF9S9OUzjKtHBsDUQNFUTCTOdPZjKqq0jwIK3nWY+pJwhYU8lnGnmKGekBDy0eQfxLcR2Pk/QSZgcJKysqyObNFDA5DURZjGebBg7FGsCI5oyyw6RpWbY0Xz/xgxh13h2JiOby2z1GcsDRzdDIh+g30G6hP0fqEuH6M1mepwFfokNghsUH7GvVNSlSIMZWd7j1d8Hm2d8DmKqyFywW11CEuZZiVzuEkA+tAmbx3/PCC7ALg+YlH6fvwLG+9eqORKMmjkCGQGiEaRaMmbFHNazWkeTWFnSAYJuWEvfmconD44CmKgvl8j/n+nMMbh8z2pxTTChMCtoiUAYx6vF+waRu6tmG9WeF9m/qTFYy4sTgfDIHh7Tybr4o8dxFBPdRdx9HDBW+98ZjT44au6bA2uapptB+2N7m0bCL8xjrJJIom5hmS5x/5zFsNMw7zQzfMQEw14iW7ZqkmvN3httJ0+m0PEwRnLdPplKJwdG1P3TS03hP6gO86RAPOJurmWaLbu6Kq+D7VtSlsmuHWNT0ny4a79x7z5buPOD5r2XTJQn94fMaqXWGcQ3GsWwhqkssbQirj4AMRsEWBKx1iYN3A62+e8ODhJgfaCgRDFw2u3KOapqyK3guLVcum9vhoKEtLjF0G95hmoVpLUaRgUZpabYl5fkHfJc4fDfje0zQ16+b9lh94d3l6nx/J70zHbIHdGrBWmFSG2bRgVVqss0xnM5zCdDanrCaURRoURUOiEHTMj8knPk/zaP7fwEaTIw3jLICddg5u97PWK9kVI1DZNKHMGAcaEGspTJ6YVyRP1xhDWZYUzmEE+hjoQseqXXG6WWFUcNOC6/PrTOczbty8xbRWjlrDWoT16j5xfURY3CPWjwnLB4TNCU4DdlCmIRtZaTanD5GuTxZ7DLkKq5LaZoXCZD0aR1EY1Kb+lp2jnZjF5eXpz1ymX2H0ksYECY1pFrZaRBLeaEzZb9jsjZIX4zGOwpYUxQRnLLPpjIP9PYyBumlQppSTivneHnv7B9hKcmeJiAtYC75t6dqG1WJJvdmkEigxABZrSgTHUBYjdYt4qf7xQoA7CN4ry0XN8mzDalnTdX60bIYFDmIE75W29ZmvizlYEsfp7oNLxY7FtOu47yYqjJ8yeMcoIBHnDFVVUE0qYlCaJnH5IeaoC8m6t9YxmUyoyhJIhblkdPnSjUjlOsP7mGov2XUd6rwYNk3L8emak7MNi1VD3QW6mGrihE1L3UWKssLakj46VAu8lxS8CjEXGUtpmoUbFv9Qmi6lPloxFIVQlmmmrisrDNCgdD5yfLriwaNTrh9Msbf2sEZy6eTEE1orSJELUeXaND4afDR0Xmh7pffQR2XT9qzr5n32l6c8tCO+bi12Rmttx4cbaADJvDzgDBROqAqhLITZrOTgYI8olroNGONG/2CbYfNUf+FcczTXjkkJWk/xUnaCwuNRd7yAZxURcHa7wMNYbVBT1lXwyeorXMne3h7lpEprAGiyjL33NPUmb2dwboqTgsl8jwPr0cKjZ6esHj8mLB/hV/cJ/ozQnOLbNRiHcWUq3S2DX6O5VlMg+rR4SSGCOJc9DYNTQXwGzUiaEyyJ6U6cto6c8/uRbWmA84F3JMVJhrRIzcA5ZKQQQY3F2iGtepvaGlUQXJ49n3CjqEpm0ylFWWBEUa2wRZHrN8FiscQUgEuxQQ19WmMh9ETf0/cNKetuKI+9U1U04w26NTS/qix3EDbrjocPTjg6WrJetfT9Dh+Wy/smAFbIE26sszkQmtMdwy6H+bQHb4djyTyNZvcNBWsN5cRxeDDjxs0D9vbmdG3P40cnHB8vqc/RCMmjcNYm7rmzo0utYypmrr7Y98RnZPEH8SFytKjxvWffWwITjk5r7r19yqOjNYtVS+OVLgR8CGjfoxqoqkBRRFQKokSi9vhcSsEKOGOwNtWDGbwRHxQfJNd7VMTCpCooy1zuwFhCu2Gx9tx7eMLBfoohzKaSyg/7ZKkVTghYTOih9USFpvXUbc/jowWnZ0tEoPOe41XNYrm8bEd5dpHtrYZtfCWVrB1AOgFJ4dLs29KlmZ6FE8rSYZ0DT07b3O6TD5/yxCXTLE8Zb7Y54fk7uUrpYOvvgPvQB4fMjkteKs7IToB4CArGcTJfUZTM5nP29vaoqip5Djn7qnIVs3JKS0vvFWKknDhm0zk9Z/huw2Jzj/j4C4TNCX5zTNANvV/T9z1STLEGEIuoG2nAFIgExeaBMbVxoMSIsr3uoZ7NuXuW9PM+nJmnyC6lasZSHltPKxkkW/fDJJduKAduLUZ6QhhSHh3WVFRlxXw2ZzqdYowQQ5ezf3yqgOo9fexxlWDLROal2cPJizVoGvhcKg6BWqKmYmKQsERlZwWnHa79qwLcNSrLRcPjR2csFhu6TnNk2+xc1OCeaU51TB13rFvCdiWgba3vncJBQ42Jc/o47/OVlePw+h4fevUWr712h/2DOYuzVI2urjuauhsnOQzuXEq/DOdq1KQJLyHd2K4j4omX5Nz73nP3wTF967lxU+iZ8Oik5sHjJWfLhrYP+BgJ2qc84RjyakeKMZFI+r3zpGwXoCwchU3BzxjSvDoRi7UZtaKiEgjakaoEOspighFLbwwaa85WLY+OFhweTlEmFDamVM8YCWrxagiho+k8des5Wzes1i0np2csVxuqSYmxwtEiTY56f7K1dncYkR3+XJ4C7oO1c2FbwDnDpLJYE9HQEvqGvm9p+1Qi2QU7BrRlBOvhBO+NPLuZPOe2Pvdw7nDClwSzlFiRH/rBS8nAGEPyqmazKXt7e0wmKdcaNOf1O2TPUBUVXdfS9mlJysKVTCYly7OGuH5M3DxA6weor1FqojaEnKseTCC4iMmeZghpqUZwYAsEC8EQYvZiBtAXxRKxhPF+6IXr2tXNZWUw+iA/5ju62a7GNNyXnO4ojGsfD0AvWGx0SDQQBWsLrCmoqinz6T57szllUeL7hrquqTc1q82KTV0nD94Ik3nJbF7hipS10/tstBqhdI7COhpiKkEgBSIusw+eyHYlr6+6SUw+RB4/OuPB2ycszxpCGCyjvMKckzQLNKbp8ckaSR0kZSJu81C3AdAdwDp3Nh2fbGvTGqmzeUU1Kbl1+zofeu0lXr5zjZdfvk5VFhw9LqnXNSfHC05P10O6MqopP3dT13R9PxYhg5TZ07Y9TdPmG+yJXA7cN03Lr33hTcrC4aVA3Jz7jxa8+fYxx2cbut7jNeBjhw8+r+hm6H0qE5xK9SZaREmWupE0uzdxhhaNyYU3RjAW0IgxEaRHKYBUlrQsS2JhIaZSBes2cv/xitPlGmJH07X0IaT0tXKC94FN7Vk1nrNVw3rT0HUNoe/ZO5hTTUvO6paT9fujZWQH3J987HX8XXb2SG/p4U1bpMJghoizkdIp0Tc0fcfp8YqjhyuaUICbZH3l8+6s2DSCxYWzD2ccCAFBEE0UzbkVOM9lQjw5ID27KEYDw7J+ImnWt+/7bAAlrn0yqbBjRlNazAJjmFpLVVQ5LzsN1EYMrigoTED7FerPUD1FJKLG473SxwIvFS37xDhHKNHoaPtA0ytgicEQ8yS+4FOiQ/I0lNIolVUm1lBYpXJKYRVn0qIjIyXzPjn3bfmQ7Q0bPHUdGZ+tITjiBprB3+Z4Ulpb1eSsmLKsKIqSspxQuIrgA6f1KU29ot6s0toJfUOXUyEnk5JJ4ZiXZZrJHnpin4LNagRsejYTrZwKitmioPNNKuHtw3aOzvj6KsmWCT5wcrLi5GRNU/fJPVJQkybupEWwHd5HmnpYGHrsy+dduZ3S6SK5NsPF0TunL9rSUZaO67f2uXZjnw9/+GU+9vHXODyo2J8XiY9uCg4OZ0ym6SGP+YSas0A2w0LTymi9hxjo+j7lfTdNttwvR8vUTceX793n1vUD9g+uM980nJyteXi0YLVp8SHiSTU4QvRoTNaGDx4fA94ncBdSOV5nHc66EdhtXq4wBs31axJIWZssf5G03KFxjqoskNJgKShMJBrl+Kyl69bU9ZpNW9P5jtlszmQ6JXqlaSOrRllvOpq2oTCBwimFr7CxoPWRpnt/2TLbwmCaVtZCn1h3ebuiTfo10ZfbTJmh4mFatShiTMD3a7pmxenxGccPV5j5LSYHc6xNGSZGYrIGzZbDvShbY1PGaGA6TxznoYz0gzDyvMNEmfcD7qnUQ57DkWM9PldBTevNGmwuQQvneVtEMGKwYlIAvBwCh4LkzBrEg+kR06fs7yD4aOm1RO2M1hzQ6ZwQS7pYUreRulNCAN95+qanazt8H1O2B1BYmDplaoW9iWFWKocTZU801+/Z0g6XTUbIO43gvluuIubZSmNCRbYGhyyaYW3ewbpP9fEDIQjOOIwI08mUyWSKMY7glcVqwenJMXW9pGvrNEHJpBhU4Qyz6YT9vTl78wnrzYbowxhcNjF50cEmehMc8/ke1WTKuj4lrBv6cQa4jNf2VZPn3nU9998+5vRkTdsGBItIxDq4cWPO7TuH7B/MWa8b3n7rIScna7rOM6kmFEXijlPVx5RdM5lV7O/P2dub5gkTPpckUDCCcRZXWGZ701T/5eY++wdTDg/nzA8ctvR4QrZeDJNZlaZtW4spE2edaNg4FtgXzi83F3wKovZ9nzizS5YstdbwoTu3+Sc++iGsm3B6tuDx6YLHp0sa32erPZwrs4BqXmtWc1DG4YoyLQ3oSqqipChtnuELwXuCTxaJNenvVWWxFsqywrmCqiyYTMpUBkHSSjlNvebk6IzTs1PWmzW9etTAQZiwFyWBexPZ1On4gnB4MOfafsX8YA/jLA8eC5u6fZ89ZgvuSWT8fTvW74Q9ZajWpwzLxBlJ+cVOAkY8MXjaesFmdUbsGwqrRPXE0IO63CfNjnX3Hi3cCeaaHGBWGSa7DxThk6D1PliZbPHv5HGTvjvn2N/fx1qX4kllMlhiSFRB2lx3BkFGXnwARJOnwEcp6MwhncxY2QkLq6y9xYcS3xUJ7IPQ9dC2aa0E33V0TU1o21xfKTDM93Q2AXxhDbPKMZ9ars8t+xXsl4FZEZhXwl6ZPctLjnqqSp8Xg9lScjqWzEDJdWNM+meGuEruNZIrSubBwTnLpCiZZKvdGEtdt5yenvHwwUNOjo8Q8RhRjDXMJxMmkyqlwxLpu47aKG3T5GfOpDWfJYG77wLG2JR94yxFUTBlTts3NG2TFlxPF5ZiRsZizbuX73ghwL3vPSfHC9arFt/nBTNQrIPDa3u89pGXuHnrWt5mxWpd03VKNanYm88zDdKimlKM5vMJL925we3bN1D1qV5F8PQ+gDHYoqCalBxe3+fatTl7BxMmU4e1IC7gtYdeKWyBGMZcYWssYouc7mcTH7YK9Lly3ZbfyyWDQ8rxTblel7M+Cmf5yKt3+MTHPsrDxyseHz1ksVyx3DRpQQziTq3p7fmGSU+JS0/WWuEchUvXUBZ2pBhS2mifgskCVVkym6YZmM6lyUlVWTCtUlqjNYKGwHK15nhR8+h4xWqzQQqDLQvsxOImBdEHmj7SdooTqMqC64f73Lm9RzWd0IWIEei695sKqTvv7/7Qy84rfU9UTFokOlEyhhTg6tsNTb1CFKrS0ZCzG2I5nvNi+uJw1DFOv2tlDu4+Q1zo/Cv9/UK/eH/08hODgphEq8znexRFiWocF42IMWLIkzmGUrW7WToX2pAqwVe0HFCbmyzNIQtjWarQeqFpPG3n6ftA33b0baRZN/R18oRi145r/RpJurcmTSrDFFTVhL2ZYd0nK/76JLBfRG6pMHUp39tesnCY5nki5/n1VA5jBPdcDmNwsraLWJ8n9IxNfX82m7E3mzOkYi+Xa87Ozjg+Pub05JjJxDGbllhnqKqSvb15WhIz9PR9S8wTLVXzwkB50ZlULDFibUFZljn4nwyswpUMqQ4D1WcGj+s9gu/vCe4i8teAfxF4qKrfkH+7AfwPwNcBrwN/UlVPJGnlvwK+E9gA36Wq/897nSMGpWsUKyXOKgGlLC37BxXXb+xz4+YhBwd7dK1nOptQVQVd67nz8g1eefllVCPr9YZ79+6z3qxTAGNWcv3WAWUlqISk4OAT6GWgm+1Nmc8mFGWqAChmsKpKMAaVAmMjVZXKI5RlWqnGlY56s2KzXiEI1XSSOPc+8eonR2s2646v+9irGFdiLHz2p34J4BtE5HPPopf5dMJrL90idoGjozPeuv+Ik+UquWiagf1CkbNt4Dm51MY6ikyrTKuSaVlQFMIwu9LNS2Re4ayjqkqqsqAsHVWmq5zNA4SkdLm669ms1xydnLDabPAaMWXi850r+bVf+DscP/giZTXj2//QdzOZOQoT+Qd//yf4h/WS27du82e+688TfKTtau5+4XI62XbKp3yX85+HB0RgjHtuQ2TJcjcSsFYJvmXdrVmtVqxWa+pG6H1BjyC2JPgiA7w9T8Vc+LjrRwz1uH/4x36Sz/2/X2B/f8Zf/A+/CyWyWjf84H/9kxwdL7h5Y58//93fyXSWytP+jb/+We7de8RldGJEsvGRA4GZkwXBOkeVJ5wNS0Om1bGGtEC2CoOcnx5TzKjvebAU3l7tcX8TedBM2IQpi6ZgVSubTU/XeromTcLxXUvfrAnthr5eEYalIENAYkByTCcc/TJd/RixJdWH/wBdFFZ9x8Nf/b+I/YrJ7IBPfeYPUPeBrtnw87/4OV5/4x7Ap0Tkm55FJ8MM71Ty2G6pmWH9UtLncZXDXG0k4eU22GokTfhy2UBKlSZ1LE44ULHGGorCUVYlZVkwmUyYzaaoVihpQpzNC91cTGfs25a+S4Hsuq4JamjaHuNSdUlrHOLSvUr0qmVYvu/d5Fks9x8BfgD4sZ3fvg/4P1T1PxOR78vf/33gO4CP59fvBf5qfn9XCSHS1BEjJc6lokSzecXNmwfcun2NmzevUVUVy+WGyaRKEzGKnpdfucUnPvFRjBUWixU+eu7d86goReU4vLHHwY0p5cTgQ0cfeoRc911cSmF0Nme8pNxLVSWKIeIIWiJWmUzSZKVqUtJHxTjhzodf4fDaIb/8C/+I+f4+y7Mlvu8BOLw+RwR+6zfv8+nf+0ne+NJ9zk5qgF8GvvdZ9DKtKm7s7/HgaMkbd+/zpTfvcbru6DWVYdDoxxmGmjm5FGQeRvU0oWhSVcynFXvTCdOqwFpAAlEk67FCQ5p1Z42hKh2zaUVVWUpniAp9H+najtOzFUfHJ5yendH0NRHFlo7CpsDSax/9NJ/61D/Lz//M3+BwYpkUhl/63N/nd37qU/yJP/7H+Duf/d/5Pz/70/zTn/kW3nrjCzT18lI62cpFL+hcjsVTt7+4gIcxirMBZyOhqWnWC5aLJev1mrozNL2l04i1JbGv0DiFc6tpyYiL6RS7FrtgjSFo5Fu/5Rv59n/+m/hvfvgnc90Y4W//1M/xyU9+hD/8Hf8Mf/t/+1l++qd/nj/6x76Fz//KGzx6eMIrr97izTcefM+z6kSMyWtxDiHDtFZt03WMAF9OKEq30/LBgtW8qHoyGPq8FNzxouZ4ueG33vZ84dGUt04L7q722LTKqu5pmo627umbBl8v6NsVoV0TNgtiuyF267RYSJ5MaEilj42JMLmBnbxMf/rr+Lom9JF6+euU05d56Ru/leVbv8Cv/Orn2PyTn+TLiy/y4NExf/qP/35+4If/ly8/q040Rvq+PRdvkGHW2bjKVqJeUsxGs/dt84TGBPDOWpy1lGWZdGzSvJGBDgUwmfKaTCfs7c2ZzSoODvaY781SLGs8HiOwD1a3iLA4XeD7mr7vUz2pTUtRTZlMJ4hRnCuAdH+rqqIqy1RT6z049/f0dVT17wHHF37+I8CP5s8/CvzRnd9/TJP8A+CaiLzy3udIky0QwdjEh8/nM67fvMZ0Nk0VEuuG9bqh7xNfVRTFuAJ4muo759rhIdPplL4PrDctmzql9BXFNL8SjzyM4iEE2q6n73tCSOAoYlMtm7yItIhQFC5RM5VDNdD2DfP9KbO9CcZart84ZDqZ7OqM2d6Eh/dOsLbgNz9/j2/65k8Of3smvbRdx6//1hv8yue/wJtvPWSxqmn6nsi21MKQJjqUNR4CcpL1M5tO2J/POdyfsz+fsD+fcLg34/bNQ1595TY3b15jNq8Qo9lt7FPMYlowrZKVb0TRGKjrmuVynbjUEFMFPFtgTIEVi8Vw++ZrzMoSI8Lt6xWvvDTni6//Ot/+bb+Pwka+9Zt/z/9H3Zv92ram512/rxvNnGN2a6299j5NVblcTuwYOxQWgpgghAThAiVShIQQSAghpKAQpEhINFfAHdwiEZDMHwDiBgckIiWCRLLlRBDi2E7ZLqpc7Tlnn92tbs455mi+hov3G3Oudfq9qyKOx9E6e+/VzTG/Mcb7ve/zPu/z8A9/+7e5vr3mw/d/iJuvXmtN7t0xPPQonbLyyT7i9JHU9KHypq1BCaZZFE4mK1MkJY9SQnOTIDeSkDH5qXw+/TdVBPf+fe/rUnJrtNL8/M9/jWZen75HKX7nd/6IX/3nfhGl4Fd/9Rf5nd/5Hkorfvd3f8A/+6u/NEF8X3xNkiRI4zgSU8IYzW635cc//hE//OH3efrBBxy6AyRFCJKVt4ee213Lq9s9r67vePHqmmfPX/HDHz/l97/7Y/7BH/6Y//v3n/Kt71/x/ac7nr/as9t19IeONPaoOKKTR6VRehPx5AUghvBWPpKWAJ8iRokpjHWNwCIkmc8YB8L+PUzzVcb+QH3xJzi8/CO6oHjvg+ecX17y/osrgP0XXZOEsNlOH0IXnvpTcYIyoyRL8fhx8n9QWiqishLpCRSyxrlJXZYli+WC1WrFer1mtVry6NEFX/nKVzi/OKeuSorS4ZzASveVZ62dzLEVISSGfqTvBrpDL+bxOcPXyoggWSFB3RkZqEshZDPtTz/eFHN/nFJ6CpBSeqqUusyffwf48b3vey9/7ulnX4ipzJF/a62pZzWbzTprlWj6YWAYR2IEYx1lhQhTCaeLqi7YnK24ulnSftiy33fc3OxZn69Z6BKdQCcJVCkDnnKR4zGDMUo4rSGKx6RxU1YERWEoCkM3SqlWzQtW6wZnDY+fXHLYd9xcXx93ZGMMfT8So2J727LarO6/5c9dl/bQ89t/8D0+fHbLi23LbhjwZAJGjMSQb8p8w06tROcKXFGyXq24OH/EernmbLGmcqUMJllDURtMabjb77i+vmEcZPS5LObUVcFyMZeGYhwYfZYK2AtsMXqP0lr48pl5NLcVlStoKoNNFmMUTx5VLGaO3X7HW5dr9t0eH0butne89/QD0a0uF6+1Jp97fGKZmmcPckjWSmEszOeWeaEIQ08cPNYEoYNqQ2CC8CJK58EvddR+fPDnA/R/gqwnRo5W6KiOniQ6IyZ3dy2bTUNSifXZnN22RWm4vdlxdvb6a5IQiWgfAiYGEobt9o4f//BHHA4d1lXs9h1Ns6DrOnHqGj2HYWAcPd5L5TqMA3fdyE3nedXB3WB5/8Nbnr/c0veRvs+ORjGJANw4EsaBOBxIoYPoMxxYkCwkHUjKo2LAqBGrFSDaMkwsNt8Lyyf0Ate1W8pZQxgPDElx6HoOAT54ccwvv9h9klJWoJRZGW2EYWfSvd7Y0WwkyrVWRhJNlXBKmGVFrn51BuZTFNN3Zw3zWWJYisNbXVkWi4onTx7z5Mklo+/Z7e4yoePkCjcM47GaiFlzpzvINRFxxHtDSuis4+SIcSQlTwqR0Qf6YfjHFtw/7fjkmviTvlGpvwT8JZCyJiXpshtjcE4GawqXdz3rsDFhrMNYi3EWl4ecQkyMo2Br9bxisZjz6sox9iN3N3v2u4HVWkSeYlYinATwldK5C535C8rkAvJEW9Sa7CAkD+98XmGrgovzNWfna4y1nJ1vePXs1YNlmLDZYfDEmBg/LpL1sXW5vyZVNed7T6/Y7Uc6H0XLMkVEMT0iuPnUIzhN5taVjJg/fnTOu28/YdmsWDVLCuuorMgOBB3wjAyvOl6+fEF/6IW/65Y0jWhkgKfvIzG07PZ7usNBJh2NxphCBlC0wijNal6xqCvWTYEiYq1iuXCUTlb2MBx49vKal9e3xBj54Okzgvek4mNL//7VsAAAIABJREFU8Ln3ynq1eICn5+W+h69PdMITAp7u/adRuMKwXFqaKtDfBdIQSEGxOygCiFFEyvh6SuJkr/QR2knHKH56makpd/+NqESmaKrTN3Fi0Tx4j/mHPmHq8HPX5N133j5WEyFF8CNDN9Bu97x4ec2+7fngw1cEYHu3Zbff0/cDfT8wZF77EDxDCITCoZsGtznDNituDk959vwpoRvxrc9rkgfhkiJmFofO/R5tDEkZoi2Exx0CyXtgFDpl6lFpRI9j7oPIxCYk4tAxaHDFxPmWTSDEyGF4EMg+d01cIYNak0uY/NTUcYkZjjEoDKRJ8pvM/08kY9EY0UzSIpmglUyll0WRJ7wVpeuESacizXzOYtHQNA27nVQIsnkOsvmOnr4Xhpj3nmEYZehpL4HdGEtRONCawlmsy/0RA37sGfqBmDLWnwcnP+t40+D+TCn1Vs7a3wKe58+/B3zl3ve9C3zwSb8gpfRrwK8BFIVLCuniix6EdK117viXVYEy4Cor2uiFISmFKy1FZbGlwVnFWs/Yt0uub+f4AO2hYxgCKWlizBKzSWhEzgm1MWV+egg+N170adovGYJHmqU5OM9mM5r1QoZxSofWMtnqCnvM2mNMDP2Ac5br6ztc6XjvR8/uv/1PXJf7a1LNN+nDu5boxXHqCMdE4Z9PtoFTaakynevRo3Peefst3n7yhHeevCXuO7YUHrO2GK253d/R7vdcX19z9eoVVVExX21YrxYsmpk0rAfxRm0PHbd3W/phlKy3MFnHBOrCUheOi/Wc1byimZm8QSvKSlhQ8/mc3/nD7/DqruWH7z1FGcuzqxuCMiQ/fO6afHRd3n3nyUfnfrj/70/4aSZuNyrinGZeJxazgabaYoZXDO0Vne4IaWSMEHWBtpCUJWW1S2vMA0jmk15FNp0jbSaf0qnxNWXuq+WM29s9q82cm+sdi8UMpRXrs4brq7vXXpNf+uVfSteHiDae4XpLf2j5zne/xx9+5494+fKKq+tbkrF0/UC7b2XIxucsHERSNwZ88BTNnMVbl5yXmnpZUIzXcPchoR0Z9mN+7TyHkvUxZdJwCpyTHPe94SCjQRUk5UA5wEMpGLcuS6kEdCHWe1ox7Aa0K4nDHuMc/aHFu2Oo+kJrMmuKqSCXfkIkJ20yR5OI6GRIWEw6aUEpbUjKQhKWik7if6q1ztk7+CEwdJ7D/kDf9ZIwWDm/YRho2zZXuiLPMIy9TK3n2ZcQBNYZBk/f9SgsRlmJdWUt8wXOYZRAS34MtG1H1x2OirfGOowuP2kZjsebBvf/Ffh3gP86//nX733+P1RK/U9I0+N2gm8+75hEcyafRxE1imijcKUjmYQrDGry4kyKonbUTYkrNIVVlJVlv5+zXjVs2/441aXUpB4pjUNnHIUrsU4aZOM4MgwZV9UKnbLJdlL4MXDoRsYxkKI0NJq5DLaI3nrKWGfEFY6hH0gxcnd3YLGec319y+Zyye/9P//v9D7/zBdZlwQMSVxbVIy58TUF+HTE3KeLrYxQyy4fnfNzP/s1Hj+64PL87DgZSJQ0U5E4HFqurq65vbmh3R9YzBds1itWqyV1XaCU4LeHLO6133cotFRUTkb1S6s4aypW84qLzYzVvMAViZtbeagjiV3f87WvfY2//Zt/j9n6Ed/59h/i6gVt76GaE7fXr7Ump3vl9OcD+t4D3ORUzSiVkOEeTeESsypSuhanX1KY56h0zTiOjMERYgFasiaPET6xMYKZTr/6GN3TvU8Irv+peh9HQorim//Uz/Fbv/X7/Kt//p/h7/7WH/DNX/kGSin+yW/+LH/n//xHkrW+xpr4AM92EatHrp8/5+r5+3znu9/lW9/5Dtu7O7a3W8YQGfNcQ4yJiKYoS1whwa0MATcOzEbLxo+cp8CSyKsYuAoj2+AZw0iIuToIkxLraSlAhMOU0jnga5QuQFmSKYjaojI3O7kgX68bVPTo7jGxf4oufpb+6vuUzSNCt2U2b7h9+QGr1ROA+Rddk6n3JLtQnoiIURzDiEQlUEzMCV8iiXR1jGIIl6IMyCWFSkZo0QlC9Gx3Le2u5XA40LYng43ucODmWuS/23bP9m4vHhN+JEbPMAwMw8A4ZjP6IJIpdTXDlvbI4hNoy4jukx/oB5l07/seIpks4Sir+jPX4ItQIf9H4F8ELpRS7wH/BRLU/2el1L8H/Aj41/O3/+8IDfK7CBXy3/2833//Ykyu38dAr7P1mZVgYZ3CFZpq5gDNaj1jtZ6Dyia7KNabmsdvn6Oe33HoRoa+pzt0RAYSAW0s06BCDIL1H1oxZgDZ6UM26U5B0d51bK93vHhxTd+PookeEn/nb/wmVy+u6buBv/Hrf4v12ZKicPRdz27bYZ3hG7/wVZyteOdrDS+e3vDsvZe/BPwPX2RdZNJ1zCIcKTf9UlbIvE99jIQYcIUo073zzlv8/J/8BuvFnHnpuL3b0e5HEmLeMPaB6+sbXr64YhwCy8WSy4tHPL58xGq5wFlLCJG77Z5nL16x3bZCDi0ss7qiqh2rpmAxszxaVmzmBcvGUpeK/+q/+Wv87rf+gLvtjr/6H//nfPNXfoXm/Al//+/9Xfru26Ad1aOvyZWqlnDYQ99+4TV5eMNwwjLuZcfqIx95NTE6YW1kOdecLT2FeU7y75HCS5T22KJkvjojVSv0PhJ2EbShmTei2W9EPvljhPL7/MfjP6cpT/jvf+1/4Q+//UN2uwN/9T/67/jX/uKf5S/8hT/Df/vX/jq/8Ru/x9nZgr/8V/48SsEv/+mf4Vu/9x7f/sMf8TprctMO/K3/6zs0aiTuXzC0V7x8dSONdmQ6d2osin6KnGwTPash8SjBZQSbNHMsTXIs20j1cs/mZsc72x0f+MQPiLxUmmsSUYVjQx/SA6tYCeoqB/keMBAkuKMtw7N/QOxeQBgY3v8/cOe/SPX4l+k++LsMz34D4yo2X/lFVGhZLOeMbc33v/1jgK8B/8IXvEOOG+1DFcV05E2J/IZg7GQT84nTbo2lcDLYZ5Rm7AcOhz2HtuXq+prtdksM8v7FH9XhCtnTQpS1jikeM/mT9pXANd4HVG7su6LCFQVKG/FgCIGYJZKP7maAMY7ZomG9PuP87JzVas2v//rf/NT3/7nBPaX0b37Kl/6lT/jeBPyVz131j78KcHJnn8rb4991Qul4tNirKpGkbZqaeVPhfSuYlVLM5o6z8yVtO7JvO/qu49C2KOMxNoiXZ0IGjLxk3btty36/O6pO+hAZh4gfI7dXO25e3nJzdUPfj2I6MIx84xe/zs+ZnyGFyO5mx8tn17S7A1obqlnBrKmoqhl1PUc7xz//r/xZvvutH/yjlNI//UXXJIYhB3Z17D+lyL1G8MkM3GhNVVVcnJ/xzttPmBUGnTztfgtpBIy817bjbrvj9nYLSUnD9eyM9XpFXVcolfDBs93tefnqhq7tszaJpZmXzOcVj84qzpYll6uC9cxRFRGjA//lf/KXub7d8/6za7713ff49g/e5wcfvqJ49HWUF335oHJ5rBJ6/YSwv3mNNZnulfQJf+ch6A33sNaE0ZHCQFNHFvMBk67xwwv8uCWpGlvMaOw5Nj0i6JZD6HDOHg0YHgbvh2fz8M8TDKSV4j/49/8i9426yeyd//Q/+zdI2YAhIUYQSsO/9W//Ob7/vQ/5wQ8++OUvuiLbw8Bv/sM/4jztWNqWmRM8va5qgpcmHqgj9z7kqrSJkUfjyJ9Imp9HU2OY6YKSgqKNaJ+D+27Pd7MP7qgdd8fuA6f7Uh0xEBSR7DgNaUDG5i0oQ1IFZvWnsOffRBe1aB5ZgXUW3/iX0aHDpi027dFpxFr4xj/xC8znNX/7f/ubv59S+vtfbFXUx4L7wwuXuVVZvkJYhQmMyDFYYyhcHioC+u7A7c01t7c3XF1dsd1tM0ZeiDyKK6kqS1EKn13p7LkbhJAwST2chqqE4lgUFUVRYawDpRi9sGTGUVh8XdczjCNVPaOuS1brDe+++1Xeeustzs8ffeYKfCkmVI2xNPOasdsKphzkBrEaQfVSzMMGgoE5nSicQyuIfsQP0ulXKIbBZxjGkAIMh5F222KLhKvEmDf4QKf7THmMDP1IDFlHOUqXWqsIaRTq2DASgaKqmDcNi+UCGU32+GE8QgPTWLl0xQe22x3Xr26YLRrq+ez1FiWzEo4B656e8wQmTuYhWmvm9Yyz9Ya6rKUpnBQhl3BaG8Yxsdu3vLy+Zbvb4f3IoplzcbFhtZxRFhpFpGt7trs97a4jhSTsmcawmJU0M4fWI3H0HHYHXnlNu81rRaTtPa9u9rz/7Irvv3/Fi5uWro+kZBDnqwwN5Vinc8vs9Y9PCe6f8d2zecFyYVitEnU1sN8eOPQQOGO+fIuZfYebbgldgd2DMZ66rKirQpRfJ8W4T0FdPvp6073wEKW/F+Snb7xP6VT3rvdrHCpGKt9SqRab+uP8wzRkM2WYkqZC4wN1jHxDG75iDF+dL3m8WKHLgmKzwV08whYljIFNM8MtavY+8L4PfKBO1+8T+x0f+cJEAZ02tkiQlUmA96SoIIDMZiQMIglhVW59+shwGE4+tT+V49QfUCoHY4Xw8DForMh1FAVWT45VA8YkZnOHDzOME5qitYaqFtqwMYoYPYeD2H1CxBhNWRUYbdFZ2ylFxKReW6wp0MaCEm+E4zWLEaUNi+VSSBtnF6zWG842Z1xevsVmc8ZisfzMd/mlCO6Fs2zWS9qtmMpOgb2wGqOFI0uA5BV4jVWWuigwKRKGnrEfGPpB3Ib6BEmjkiGOinbXcXt1Rz3X1EnjlQwrifyoWGpBVu1LCpUQByYdUAg25oNHO8u8rlmulqzXa2IcGYeeLinMJMhlNNaKnVkIgd12x8sXV1woUeV7nSORxJYupePD8gCOmbTtkSxjtVjy1uUl83qGiorgJVDIDWUYhgPXNze8//RDbm9uGceepjnnnbcvWK/nOKtI3rO/2/PyxRWHXYdVhsWi5PxsxqI2OO3Z724Z9i1+D3fCQWLwiSFEdq3n+U3Lh8+v+fDVNbuuZwxJzBxy2ZuS6FfriQ/9uoc6MWKUihP3hI+NbEyUmoy9LxY1jx+XnG12lMWB66uOtq9wxdss1n+K+frrhFc925c7TDHgbM98NqOpy3w94r3fee9k7l8zJRlszHCZyWPvE5Qz+S4ce66cArvY/2We/mvGMZMiy9CyMC06eryXAZ5+GBiGMfuWCk6uQ+JiHHnHj/zpecPXZzM2Tx6zeverjIs56myNXa9RgyfebFmuG+brOTftgfVupCKikrDMJpXMiY8EH7kM9yvwyf4q+wFrRnSI2BQx+Xdpo9HaYU3MNnuROHj61OH7z6b9fdJxP3N/cFIYlCry3EuZpQ1kMNCZktKKQXddVuJY5Xu8P6C0p6otqJqqNkf42DqHc1pYLd7TdR0hiIyxK8Sq0RqHNULiUOjsPSEJpY8RHwIhRHFLCwFjLXVVsdlsWK7WvPPOV3h0+ZimWbBcrikL8fX9rONLEdytszx6dC4d6ENHIrBYNMznM8qihKjpu0C772n3ney0eSBjHEYxi4gquw2JJvKUoR/aA3c3GkVFWVYkIzTI3KNEZfaMUlq69jHDIFGGHrSGsiyoa5lSraoCsc0TBT6tE0VpmTUVMUaGeY0rxMBjsZpRVQ6T3Zhe95i6/VM29yC4508boykLoT9u1mvKohCoRsmmqJU0eO52d7x89ZznL5/THfYQA1p7ZnPRkones9917Hd7fD8wLx2LpmCzrjk/r7FqpGsPdPtb9nd3mEl+U8H+4GkHz1078Oq242bb0mUFwIQS2eUc1OHkavNJqopf6JgSQR4GyglLRU3uOkJ91MpSFSWzqibEW3aHjj7M0eUZzfrruPqSPhheXW95//1njH2kqiqxrVNZyz19cnn/EHKfpg3ul+DSFJ8y1/vVhhAHFCfaCq8d2AGcUmy0Ya417TiwHzu2uz13t3f0XS8MjRioQmARIl+Pnl8g8rXNmvMnb2HeeZf+nXeJzmHmM9R8RqIl6MTWKHbW8L7WvK8NuwSKgErZF3i6pvltHSV2lVTAMgV6UqCUek1hVGRWWtaLmsrBOHR0h5a+uyMFSAYxuFFRaLOfM435RQ6ZP9BYW4gOe7OmKCpi9IzDgWHoaOZLFvMl83qOSonDYc9+d0t72NJ2O4ZxOPpJFGWJdZVoLiWhPYr9pz0OSzrnKUaf10bu+Um+YBhiDuhe4o4iw81L1qsVi+WS8/MzFosVZxePWC5XVFmR0poCbT87fH85gru1XFxIcN/vZdhluWyo6xpjLGMf2N4duL3esbtrmTeVcOOj+HHGINm68GoRm6wYs9JfR2ugrg3EWrIDpUCf9JGnpCKkKNN2CVL0aDx1aUjrhsKVzOczitKi8GgVcC5hjEPphqK0DOeeGDI10hmK0lLVjrIuKdxrZqnpXnDPn/i4SL/CWkdV1SwWC85WKyrnxFYvxiOcNY4DVzev+PDFU56/eEoInWTq6l2qymIUDN3AfrvD9x2V1axWNYtVzWZVsVoX9Ic7nu0HhvaO3e210CpdgTKW9jCyPQzcbA9c3x5ouxHv49FYLp12qXzW0/9/mqX2vV93/LUxy9kqUSDUiXEYGMbAENcoe04q3mHbl9zt7vj+j57yR997j83yjEercwrjMGjEeTZ9sfOdGDr/ON7fpxwWWKIogd0w0rZ7Edjb7cQ0ZhRV1DJGLoPnGzrwizawXFS4zZqumdFaI56/eZ5h2G05bK95v2/5MHi+DfxQabYkSB6VPCqOTKbjk+aympYgSYb8EKNRxwDrTGI5d7z95Iymduzvrnn1oiXs99lyz0wLKYywT2MhvcahlEgxzOqGs80l5+dPcNaxb7fstjcQYdms2azP0QnGoWe/u+P25hVdv+dud8eu3WOswDGNSpRlkeUFVJ6hMRQuSx4YQ/Aea8aMoU9VVWAcw7GHFzMFs65r1psN5+cXPHnymPV6w+Zsw3zeUM8birKSCsCKfMrk1vRZ98X/74eULwZXWJy36CDO6EpJV7kfeq6v7ri9vuNwODBrSoo86ASisGYRxbtgpFwnBVLyR0U5ZyV7E8qRdMmFijQZXQREcyUQ/EgMHmsUTVMxq6XhUVgn0r0qiCWbLXKzbSmVQJTa29iJoy9/V5k98LrHR9HX6TfEhEBISmG0kc6+zQNfKmtVZ/57CJ5+6Njt7tjtbzl0dwTfMVoYfEdK0h8YO0/0I/PaMt/MOT+fsV6VzGaWslRcDwkVRlQM6AxdlYWjrGuMCxSVxyfYtiNjiNiQnWbiKeKeEKaJcfJmD+xxFugYPz/aSJ1CTEJrL7jpcEu/34I64FNJShVdP+Pw4sCr61c8f3nD937wPs+evaLUFXp9IVVdzrY+HwlXx+YouYa4J+6bvyf3SqaNQj1kzmeA8PUXJEUIA6jJT0CRkkFpm3vxgrkXKbFSiaZMFGWiG3bsXj6n3+3o3/sAFaMYP1cVt0PH9e6O7zx7zo92dzyLimuV6GJARY9KAyqOqBgfnPFRXhlFwqKy1PEU2I1SoBWFhaowLJoZy2aGSYH27oa9kRWwOt/H9yuDNzjuV1vGGApXsFgsubi45Pz8sTBShg6xzhPJ3crVHNod+92OoesYh5623XF3d8d2v6coxfhEEqtRpkjLAqdkzadbMuXnMAQIIR2N4ruuPwX2kKgaCd4XFxe89fbbnJ2f8+j8nPm8Yb4QbfeiFEbNKahPVoqffnwpgjsIjpryRJrODawQRvrec+gOHNqOmBKzecliUTFvCoxNxDQIxo5k4NYonIXZzLJe11RVRdNUzOclVVVgncU6gRREMEznMeWAMdIUEVpmbkaqAq1d/rsWY4yksFZRVFlQyBUoZWU4IgJK3MqF6jvRF3+aqzU1Ug3WuOOI9KyqcEajYsw+jgP92LPf77i9vWG/v2P0LYXTzOuS0lnp4XmPJrJqHKum4nzdcH5WsZhbrBF2i/Y1h/WS7vIRpSmk/LQW48TNR1tL7wP9mCgOA2rbse964jASCB+Jv1Pw/UmysYcL+jD8Ziw4RZz1zIqIZovvPT5Gem9p+5Jtp2jHHe9/8IL3P/iQl9e3tG2HH0MGD3Lg/QKnekRVpsz9Hlyk7gf3j5z1T+VQCVUaKBfCkY4Om2rKNEONA6rvid5jwkBSI7dF4L0y0PlIe3NLH27ohUKDrSrK1YKb6Hm23/G93Y6nw0ivLaPWuUqOGXMXff8TDXWSNp4WIcqzoBOCaQsfXCcxSEk+Mhy2dMrTH1rCOJwmxjPlVyZg33SdjnUECiU6LUVJM1+waFYUtmTft3SHgTAGUV9EE4PQo+/ubvG+ZRx7gYZSynFAJltT7m1NLmgJQ/KZzRYkuRr9SJ8JH0M/4seADwmlDfN5TeFKzi4vWW3OODs/58mTJzSLBcvlkrqqqOoaVwpN8pQM5Pf2OdXMlya4h+AZ+p5xlPHc0Q/044BJonCntaJpauq6YnPeMG8c2kZC6klB5wxRNoayUKxWFTGucK6grkqanO1ro7JSm8FqlSVfZWdNObu0FryXC6i1QyvBXmWn1CRlsVbhSo1xogc9TaCNPmRjjoRJCqPJu+2bGR8fw8LEJ+aEuWulMVbwvdIVzKoSa6TZFb2nH3q6vmffHtjtdwxjh1KR1XLJxdkZi2aBUYqoIlWlWcwbHp3NOd/MWM4tlVMoIj4qTFpAeEJZlCzmV+x2e/atWAxOtL+mLglnhrIdiewYY2QYPfEY3h6aVfxEx8fw63tfUsK2Usrj3Eg9Czi7h9QzDgW7XS0WgPuedgxcXe+5ut4xDkmabHkWQvFRGOzhccrU81moU+at1JSwHL96+t78K6Vvcr8E+ezX+9RDg2pmMK8x7YJC7QmuJxY9duwwfUccBmIYuE0jPzCeG+NpBzj0kc57Oh+ICuq6ZFkXbKPm+eh4aS0HLYSBMikGdTID0UqTskE93GueHuNPVkS0kRizDWXy4hoVoNsdePk0cucsfdfT7u7w3gPiZjaJbJxKtdc7jvLHee2V0lhdUNgSheawP3B7c8vd7R0xjJTO4UexzmwP4jIWo/hEWCsOSa6o5ZkzlsKV6CyOJsOCgzikjf4Iofrg85TqwOiFkuqKgsJVrNcbVqsNl2+9xfrsnLqeMW/mRxs/V+QPV+TZG8+UsWs9uUZ9+vGlCe5V5VifLWm8yGSuVo1MRFqFdTVgWC6XpJSYLyzNwuIKgT1S1BAle08ZHrFWMZ8V4qSCoq7ED3EqmcQBHSwSoJNVJAIxKZwz2evR5MDscLbAGIuozXtAvCQTGaePcgNpnbPG/ELCdvkJmoefdCiZ5C2KgqqsmVUzmtmMRTOncBaykcdk0h2jKAWer9ecb5b8zFe+wjtvvc2jzQWLusLUiXlpWC0Lztcly0ZTF0qEnpImJk3pLFVZyc14cclu13J9c83ddkcXA2NMDNFydwg8f7XlMEZud/uPoya5EyyB7idvkp0yM2EOSaYttnlOB6oiMqukktntR/at43avGCL4rNBXFRWb1QU+mxk3zRytTpvpFzsDOSbHoJ/KBvYFjxA9H149xV9btt3I7tDjfRAJiSwrkEJinyAqwy4oymQYUmJUiR5FpzTJGprZjIvVgj4GtgH63YDvBIqLIYJJGJfAGHRmJchmyoMm8tTslqcyMPiWoWuJfoAY8FoTO8N42GOtERXWccT78cis0YVBOYfK2j6vc5z0Osn3Gxgl8tQExWG3Z797KQNJtzdUVYGrZox9z3YcOXR7AiPoKHacRqNsSeHJei8GrRRhTLShp1UjfkxH1ou8rDz/kYQyhno+Y1bXLFcrmvmCzeaM5XLFanPGfNFQFCVlWQqkay1aW6Fr54by1N9ISAP+85rMX5LgnqhnBRcXG5Qic0ctVelES8Za6npxnMi0RcAVAVdojNW5tW7kjWectywds7qk63rGUYYhFFmXZbJM03ITGCOj0kKNBGUkKxYczqKUpSxqXFES4kiMIz6IKfToswN8kOBijci9KgUpS6EqYg4Wr3uoY8JyP4sXTrARzK/MtK26Zl6LRdcU3GOMhCx8VZUVj84vWDQ1P/+Nn+Nr734VqzWFMdRO0cwty0azaDSl81iTcvmpMYgiZlXNWSzh/Oycrhu4unrF1fU17TgyxIhPjuvWE6Lhg+fXmUXwSYYiWT7hDYP7kXgxZYyASie530R2WbJifF1YxdBr+oNm12r2vUZZuXecgtWyoSgaxhhIRObz6rhJpwzxAB/Lqo+sJeRcNNxj09wHik7QzHSG8vP30fjT/MLrYng+eD54+mO2bS+OSOGegfwkCYBh1IpeK3a5AZiMJRpNnyyDjqjCkmZz6vlcgkkXSGVLcjkjjYFkpLJNyR3P9TS0ewruWoFRCRU9MfT0/oDv7kSFM+s4+cEy9j3GOfHGSmIUIpPqGqsNrixEZ8V8dvPwkw6xNzxdA6MsVjuij7TbHa9evuLm5pqh7yntGqst3o9iY+l7lEk5DjhUkHvO5D6XMSZDnz5LDAS8j8J6+eh5WENdzGmWG842Gy4vL1mt1wK91DPKjKeXpQT3+4ws7/3xtdXkK3ucc/ljENyVhnljKUp1dIuxVuOccMblvUizSClptmissGK8yhxzsatKE7NBRbRNFKXBWpXhCwuIboPIe0pGJ3ob+WcTOKtx1mQMzhAjx243+aEPWe89hoSKCqu0UKCMQWft5hDkBhC1yTdYlyM0/fCnJ22JqqyZ1TPmsxl1WVE4J40oleRGMDoPiC146/JtSCPNrOJytWJdWcpCUxeaolBUBVjjwUfRPdcO7Rwpl4FJyRyAVsIGMkYRwwKj4e7Q0g4DnVfsOrGs8+MgssT33sxJB0ZlrZs3WJUc1I+V0fRbkoRTpeVrhYXKAUS6LrJvS9quZAglwZSZc66wuuCt1Rmz+TlXt9fc3F1RGclGxfP0pJ+ictNUroEEoJD4LkSrAAAfIElEQVQF3Ca8eRpxn05sQl0eNFZPi3LvbU2Nv8/HUj96eO95+fx9tts9Pj/w3ouUr5i3aKwRswmtJo/UlOcMlASmlEjG0N9UtB/8mIRi6HqGtmPsB+LEuskie9NGJMuYM/VsqD3JiAijeGTsO7rDjjD2eTNQubIUSM/ESRZEo1KSZ99qKueoC9GAepPgrtTEcpJFH4eO3d0dcYwYXbDbiUmGUomycMxmdfY1CJRVQaU1RWEIIbLfHRj7nmEYiaEnJdlUQ6Yx+hDwYQru6miqMZvNWKzXLFcrzh89YrVes9lsmM/nzOYzkTioyqz5LhBvCF5mE2LMSpRCCNFKH607p2n+zzq+HMFdQVlpilLE8YUjmmNA1mEgeRSC7SphL5OCuMhoacATSCQVjiW/UgFjVTYINnnxDMZYQETyY5ByU/Cs/HAaMEp0bYjIcNTQ431EO9lIYsrC/x7I2b/VgsUZk5kQmdXy0Xbfa6/PtEhpgn50Hl0uKIuKqqxEyN9aMeTNWO8UwMqiYrPaiCxpVbCsKyqbxIB4Jqwea0QjPoZI0I6YNAmbS2R9DyeWjcM5Q12VpBgYkxfD7kmSOHpx4UkfzSxOSoHTpv3TPhQJq5UYMFtIMdF3kUNf0PYFyRiSsYTks5pfwXp9xpMnX8M4wzAeMGkEBqbNY5qtffA6SmGNlobfxO3+CaE3YZq8/u+IIdDu7+j2O1BgrMX3Hd2hE99gFM6Jmfy0+RyZKImseSLBvnul2RqbbSY5lknBe8ZhzIYX/vizct6TjZ8mGp0zWw0a/DjQdR1+HLKHqiRBIQYZ9gr++BpGS/XjtMIZTWUMM2Nx2mA+xy/048fDngyI7d7e7/Cjx2jHMPSkPEVaFJaqLEkpK8Qal1UaHePoObQeH7os/jXivcAvIcr8SowJHyexOkVV1RRlSbNYcHHxSCiOjx6xWC5pFg1lWVJVFdZagYScRfRnAiEOmU8fs76WzcFcZxKIf8AC+rTjSxHcSRBTyFOX09To6RCrKXl4dBac01p2ZTW5tyvBu5XWKKswNmJGxTiKcbTWCp0zMq0lG9MpSkM1yEYwTdIlBT6KzK/3ib4XZch+CBin0fY0rkwSk4+YO/s+SkPoKLif08rXpunmG34i1U04tVbpSO2cXGLqWU1Zl9jCinl2EKy963v2uz377ZYwdszqgmXtWMwsTW2oSrBW3IY0SRpzSpOiTNApHwRbNDnrVhPsJecTomQRkxOUMZq6dCyammVTURUT2wZR2OOEOkSlfoKRcnmITtn7BD3EbHoNRsmk3xgNKVl8KlDayQxDkArNOEPp5AGbzIu9H+W6GjO1B46o/rGUSnKvWSvq+slPbWKV+dh5M1cnow6QSuD4r5Rhnnz/TkH9TXozSms25+dszs5ZLBYsmoZXr17x9OkH3N7esW8PUm3mQCRI0wm7PV5DcqkfQp4W1UdIKnhPGMc8vJfvxVxVqhQz5VLnSi+hkiZ48d6NMeCspc54smSnco/6XDWrECiynou1Fm0M3ovUrTWf3zz85IWZkgspoUII+NDhR58rGUNdV5RlQVE6QvQYaygoGIaB3e7AbifV+eilh9UeBOYN+RmIefhIaYMrHXVVU9U1Z5szVqsl8/mc1XpNPZ/LYFx+f1OyOdkjxhgYxp6+7xgGqRBSSpmCXeaegzoydv74BPd7ATDGQE6iAfIwgDBcyOXkBDuIEMyUpacc9AWPjlEyb6VH/EhWl5wcJgV7RwW0jmgT0HGidImWRDeOxAT9EOkOI/vDQD96tJ1kBgzOGLGYM2CiRnsPSkriGCLGijSAVorXTjyOCyMTtROea5RCG8kyZtmzcblcMJvPcJVjHHqhJA4j3eFAu9vi+z2VgbPG8Whds1kULOYGayLGxNwQU8eqKCWF96KdY6zCktD5GsUoEsIpymTdGATXV4A1hnldcrFZ8Ph8xavrW7ZdS9uNWUNGMPgYJ1nnnyTT/Qj+nWENayLOSJD3PhKTJVHgY0FSBlKedsybclkUWKMJscf7Hj8OQpVFzB6OOPuDU41ZV0TOIsZEuF+kZDgmP4+fev4PaZJvvhZGa87Oz6mqGU8eX3J+ds77772XS/tEl0f3JzPnvKdMrbnsDTD1QdJx43PWwOTP6z1xHDnOAaucsecWnyCaChHCRzb/3AgUUbuSuqqwVoYI/eg59B37tmccB3FnSukITSQiwyiWc1q9QXJ0b11Txt5lFkV07I02OFeyWMyZzee4wtJ1B6FKW8tu57m+vqHrO0iasprR9572MEiDU8nvMFp8mMu6pp41bM42rJYrzi8uWC6XlEVBWVXYUkyz67rKZtnmCO3FlPWuugP7/VZs9rxUNEVRiaaWPqEORutcLf0xCO4KmVJVObDeh0imDELC3ISnTw01+emJciZNhwn7lIdLus7ZrsrYY1kzvU5MouvsnPgb+uAzy0T4qP0YGcdccmkR94nB53NQojIZEyH643sZRrHAMsZIZx392t1+ef/y8BfWULosf6w1MRmKwjGfzVgs5iwWM+pZgS0sw9hn+lVPGAcKm1iuKjZNyeVmzrqpaOaGwp0c2I98kyMtLz/MIW+CMaK1BLtwXJtA148MPspYtbJYrakLx/m64e3HG663W7bdAZ9ETxwkAMQ8QZw+Btt80eN+uZ2hIpWx2kJjTCDFkdFnez1tRAlxsiM84sUZW0/ZLSeMIjuRchaaHr7O8bWPGff0Ibl6JB0xdnKlc4rZ9wN5/vdEb03Te3qzzF0bTWEt0Xu69sDObTm0baYQi59u8EJFnJyTFKeGcMxwwpHZlad6jdanoJ+by/Je1b1n7dhQON5PU+/g6D+coYfCOcpChgrH0UufLEGbIqOPQqslURQGayXJiLnP8/pDgLLDTrMnMYDR7ghj1vUM5xx1XTOra/rec9fuaeZziqLgcDhwc3PLft8CmqbJcgEpZc16KIqS2XzGfDZnlTVgVqs1dV3TzBuqHMhFNz9PqmejbZ0VOmOMxJD13scxVzL37omUkzpjsba45736xyS4A1LipJT9HP3RZ1COTADKbzjlpsXUZI1REWLWgI8ajfglTs2NyWHp+DoxEMKIDyNHN3JzupEnRT0fIiSdrf4MEYX3IyGGo+UWcGzKSrNIM46y844ASaGEcPlay6EUVKXBWc3FasFmOT9257tRY4s5m9WSs/WCzaphVhc5KwqQRgyeWZkoZhWbxnG2rNg0BbVTGCcKfBJudC6zMzQgT72sbUxZxkGMLib9jD77bg6jF+pXlGuk0ZQONos57zy5oO1Htl3HYRzYdT0hJjG0yHoqbx7cP3rIoIvOU7paBcYIY0z46bVCkFmIrGAprx2lokqe4Dui70l+zE+FPiLuE54uQFI8BvaJTSMZr9yjMVcxEyvz47E6p80x/3kM+G8+rKOVwlnF0A/c3ryi3W+5ur7mcDhASlhj8OOIzxrjU1Pz6CV6D71QaIyWfkXhxAjEJ4EDhV2WEyfune6x8r4/xCQm0EUh8xdlWWCN4exsxXxWs29FyA6l8CGQGDA64kygKhRlqfEBRj+d2OutjfQACgyWFDVRQ13WWTdmRlVVjDnOpKjYbnc8f/6S87MzFouG/b5l3x7Y3u0IUcgRSolscT0rKIqS1XLJ2cU5q8WKs/MLFssls/k8wy1OHMuMZNu2mHp+6hjUfR6MCmEkRDEEilm2wWgNWnpCzpUUhWhjOesyMeT+Bfjk40sR3FOCYRiPUpdTwJ0c5aWkul8iJ4KPx8lSpUAZyTS0ycGDaWeTwaOg/bFLL7IDgqXFlCGHdGqGGG2oCoOLCq0LrCsJUbrmbdfS+z7jZgqCwDBhGDIeJkNEVksZ7EdpMsbXDe7AYubYzCu+/pXHvPvkkpu7LS9e3bAfDMrNWS4aFvMZzayksAaTAiZ5nPKoKjEvC1bzknVTUJdQZSxaKp2pkpA/J1hjeohSXu8YwtEvxGdt8H4cM+1L9GNO5b68/8Ials2cx5cXPHp1w/OrGw6jZwgjSkZ/JWtNb4RVffxIiURAKdG6IRkxqCAxxkSMXoyqkxI2zRSQp/sMaewlPxL9QIoaheXjOLkIxU3LlDKO7YNnwtljyPCgOgmG3adoHquOe5tDbkpMr/Lab1/OJ5DSwO3tHX3fs287DofDaSgoNwrjPfriCXFSxyxc5yltZxNFEVDjlECR1/C0IU2b86k3cbpnpjmM2WxGXUrWbqzh8eMnXD465+rqCpTIabftAVKgKmAxM3lYsaQbksgyR0V47WVRovhoSzQFJM18NmfZLKkKoRsO456+G/Bj4PrqhhfPX+R4I4KEk/Z6CJHDoc9zNU646auVaMC89YSmaVgu17hSmD0KlXsL6rg+QgwRjZ/paxMsM1WTKYrIXTaxwmhDWc0oyxlVWWfdd0ED0jFeffrxpQjux4ZhzG4o+t4bT+RAno50xVN27eXBzUFdtGbk14mjipOblgRZEExl3FhoS7mBqKQRJKYULhOEpw1ChpjGIE3WQlvQUcbTE0yccpUCJPFllAug8V4x6pB/x+sF96pwfPNPfpV3Lla8+9YlZ+s13/vxU+7u9pSzJdVcPE+Xs4qmKqhMwsSB2kbszKAocbagqUtmpWT0Bi8P5zGQ3E8tT38/6m/fe1hDiBLYh5F+EN5zSpCUwFmQg0Uu9cvCslk1nG0a1ss5d4eO/TDkuHAvsvxUDgk4kUiICoVhDJF+7Okz9FYYi1PS3U1qytwlyHk/MvYDY38Qs+ZoAUcGfKa6MTdxpd+SVGL0nsF7fBTeNrlnJOHyRH/9aHdAbtDcK5putXtX4XUT+An+8INhHHvutnf0nbA6pMqVBEYSH6SavP+aucSX4C5UXuugdBGnI6WNHLQiKYPKgX7KPqfG9tFYh4RChnBmsxnLZk5VOmYz4XNfXj7h8vIRWinadsuh7em6A0MPdQnLRqATYwtcSoQkWjmfQB//zENry6I5w7kKkiUGKIsClTRDLxOj+50YhSutuLnZst3uqWc7XGEZfMCYAusqlFZUdcNqvaFZLri8vMwN0xWbszPKUlhrIvw/WYTC0T0tPezLTKjEicEnF0Nj8/qbI/xSZ12ZsqyF7ZRvjqlP8lnHlyS450ao0UzguclYe8w35+Q3GGM8BnXJGAPWgVU6Z2Ny45mkwCb0hGtG2SCUOgV3KeW1SP5OgxNGo2LOppIWyCck4hgIo0dHsMpM3RlUShjAKkVEoVWitBpXFARvCFFE+SfvyC96VFXBN3/+Z/jZdy5YzBuUcTx7fkUIidV6zubsjKpyVIWhcppCR0wcmTmFyv2FwoEzAsEcBW6mQYgJhpmamik3rJFsRXRBpGqaoJkQRX7Ve/8guAOnEj/TApVKVE4zr0qaeSVTv5y0zpk+3vg4VVoTnh1ixMeIxjJGRT9G+tETk8eWGpWndyf/2ZRxdz8ODIeese+P15WUjpTS/HICuWSaX0pRBliCJ6SYYZpITAGSRj8QNpd7UD2Yes2aK/dfIL2ZlqRSGudKRiebzAnLvQ8/yezGg/6wur+Z58pNq/wsRKwWNplEcE3I+uMh+3/KtRbITt/fIDIvuypLZrOSqnCslg3NYkWzWFGWDVVVy70xq9jPKjodKQuoSkfE0g8aH6c1/5ha/+ceRhsWzRrnqqPJvUrIUGM/cDi0tO2B0QeUVhy6Dh8CwzjS9UKDNbrAukhZOpbrMx4/eYvzi3MuLy9ZbzbHZqlk3yqz/eJp/XPSI2uSMnknHfF28rMmTKDJWtRmOEtw+qqsKAqHs+6YsU8IRwyfLSP+pQjuWimqwhCjCNeHjEdGopTxuXyfymGxsbIyKJFk0lAbhVKZPaN0diCSG1WhGcfA4EfGXrLPEAPGGYq6oFBgSZiMe6qosltKIHgZVvJebmifH+ipoSqqjBpdOKbhmqIsKApHtEEGDrR77cy9qQt++efe5nwxpxsjN4cBj8ZVM2azmuW8oLCKeQFNCbVNFDrgtMIoi1IRM8ktZClS6TtEJlW5h2HkNM03ZRvp+PcpIHGEymSTzROwucyMcXKQF9jFWcO8KljMZpSukCGMJHMDp4nS1ztyQcGJe543FRTeBzpGjFa0h47tdk9SkaKw9zDmdK+BKFPIISS6XhrRQoWVxvnRaxMwWhhbxkiDsPcD/eCl4WfskRkjBAB1KoTyhnmcn82Z+jHTPTKGJpjq9XF3aW5K9RmiwgdFiFo0fXJTUgKJNMVPmba6t6gTLCfX3EcYAxSFDIMlIASNj4pRkb2OIUbpaZk8C6HuwTFl4USDySbO1nPeevsxtnAcDh3KOJrlhs0g+kN3W02Mnj5oYtTiJcpEx3wjgih9O7L3Pe3+QHvoMlFTbuwYA2OQ1x59oO16AgofwUcoy5p5U7PaGJpmwebsnPNHFyxXC+ZNQ1VVGRfPNOgYjjrtR8gkpeO1TqRstJUwJmW2jLqXvQvpw+WmaVGI6mThhI0j5MAg8wbjCcL+rONLEdyVgtLJ+L/ykHw44pETQjAFdtnldL6ZdMZ5QZmclWrJrrQy96a4FMnLRFl76GjbjpASRe2IVoMRfPR4QaImehgHCeonY2oRMRtHL9N4xuRhJ4PNapIosQAsnCWaaZrMoV4Tc68Kx1cebyi15un1nv2hY4zgylo0ZQpNaRK1S1QOSpsosk+oOWYGHK34VJ6aTfmh4chy+EiATR//9P1sT93DEkN2kJloXcfmdM70rNaUzlGXMmA1CTlNUMGbtQ8/+ZDgE0lJgrvgzi3OGaqyOG4EJ/mD/I6UISUvGjwpkTmfuSqZfrvcc9NAiTGWNA6MPsq6mlMw1yY32h/um0zl+gmFOU20Hr81vSk9VB2hlZjuBXakmoyJ40Y1nc8kkTGt3ek85QgRBi+JQ+lyoHeAzwyWI5kgj+Nrc9RUKfIovbUmDxhGmnnJ40dr7naJu92BhGbWLFj0A/u2ZRhH2kNHP54SC6MnMsTnc7o/diRo9z373Y7b2zt2ux1GK5zVFM7hnCMpzeg9h65jGMWhbSIWuKJisToT7ffzC87OzlhullRVibEnKOUkEBayEuT9Xs69zTOd7r1p7VRGKExuujpXUhZl/ntxbMoarWR4zGepg2HMm8kfg+AuGGRAK01pLTa5KR2Rjyg67VIx510yxSNkM9nOSVUpE3CiizK5nvhjVin0Jy2sDSuwhEzAhixdKy5OYq8n3WtjDGT3JucyDUmTaWaCweojJTLg/dSEkhvVWbKV1xc/coVMBMaQGHzEWEuzaGhqR1NpZqVh2RTMK01hI0bIy8fgecx41MT0EAgjvwIPA/vpSb8HRhzL7ETGCvOmZq3NrYkT/e2Ew6ZjBeWMoS5LCmtznyKhYsSoKFnUT3DcO0uAbJ0YAI81mmY+w2hNYRyKLMecA7VksBqtjOx1xqKMyZOZGeNNp98tkg8FKSa6Qy/qn5JNHAdzZL/8SGA/vh6nHUN9dO1PFdGbbHnOOX7mq+/Q930eY1fc3G65ur4Ta70EBoPovcfcrBNzm5gmf9WUT23ShpEA5aPiMGrGyWM4vxcFeUNXR8rvtGHK9GWJ1orgI4NSIns7dGhdUNcFxlSMg+Hw/7V3Ls1uHUUc//XMnKPXfUi6fhZQEApYsExlETasqXwC2JAFXyIpPgEsqWIBC7asyYaiKD4ABRsgGxNYkYqJ4zwciC3pnDnDonuOZOvasVLXlrieX5VKurKsM/ofqU9Pd0/PZ/epqprBcESbhDbmsJ0ePzsbu+7E1DQNd+/eZblY9tveaZrFEqatzrraTuPj48mEUFUaT5/Pmc3mzGdXGQyGjCdHjMdjQggkkm2hl2evUWf4MT505tzmwpaUHVO9AIew9tbp1ZSNC0BnxltoklYrNStdg9HGSNM0ltv6P4i5J0kkF3FetJWm8723kRKkqN55F8Wyzvm5vEDBKjo6kM5i95gXY8nALmpsbTAYWM8aR5MaVnGlJUgxQfAEqWyjCxW5T6AlQaK2PwhBQPT4PugydK3djf1KtvxF14uMdYvcgbxaNlpJX9N2DOqaK7OK2dGQ2US3wTs9CowGUPmozcmSzjA2G1itI7nrL5EKvw7B5Dxfzllslrs5Z2kyJ32yJ1rSKIdusPtocUCxGU0VPJPRqA/LeNsguXa6G8+HO39bNLzwcNMxm53ZZ4mSqKqKk6MjBPUsc1JUjbDHkTTRjfXJ9hXiK1yoQTRIl6uudGpdUYUhbbNg9WBJK4kk2v8b281eWz/kst3EWnMgOfKemX04aaPyRF/pQcLOXupgUPGtb7xESpHxqGY0rLj9/oeAY7lqeLBYafjBwpE5HNC1rS4e6tcc6Hnui6YEVjHRraBtIebwu2WB1QO1EIM4tHJZdNX0cEiMjc5EpKbrnJU4VwxqvVAGJ9ratqqo6xHLCKntbEFT168feZoQxKM0TcMHdz7oK7lAnaXUQRujhrGS4OsBw9GI0+mU4+kJN2/cYDY/YzabM5vO+5laDq10Kfbv2eb8k+10lSMFuRgk77Eq+bxnx9NmuTFGlstECBB86vdFdt4R/NLyJzojWDy4z2q56EM/G4Vtj+UgjDtAclqu1UUQS0aogRWLkXV96WL2iIOvdOdw9P8uVyuapiMlNXSbXQjzFdP5mtR5YupYLbXFgHirfsFWgjlBvBCChjBC8MQILRHx+cesX3LdiEAFb5qG5XKprVZjIvhAFWqcaz73RGzpgRAlmDflqLzjeDRgPPbMj0acnQ4ZDRyjgdiKzHXiK2VXQXLYz8oAYW3Y+4fZn7fX9/+2Lj3teoOvq1idd/jOa3sC57aSR5u6a1npkEFVUdlq3Tp4jscVk2HFv3b8nog46sGRVaX0z+rntvF6p58p+EH/gcQ+Xz+d7hLOjWii0/p7KnwYa5vpKqgnn6fWIoirSFTELtLEAHZBR5yVWPKQ0e4TZiaqkPoNLqwK8iHPq5M8kxixa/qwCoFrV2aaJO46jiZjblz7hGtX7vLRJ/d4/85HfHzvHp9++l9So11CfdAeTjrzNUOabKUqHbZfPG3Sltoxd/EU+tyWB7xfzzokaRfF4L11J42Mx2Pm8yvMzq4zmkxZLFY0ywXLxQPtfdOsLJyUSLEjxVab7VnurW3aflHgLmw6HZu5hWSz+6oaMBiNOJ3OOZ1a35fTU06nUybjMVU9ABsXXbRmg5DzTb1ht9XouQNrnrmuY+m5yRz4oA3/YrSybCs3DqG27fO0UkbD0BqvT50a97ZZ0e6owWEYdxE60alXX15o1THZuK/DHEkTYcn1cSpxQhNbliu9imLVC+p5r2ObznkqqUkpsLLG+TFqeCDljLUlY704QsghB01EJtv8VuOAgK1uzPGvtm1YrVYslw1t01HX2sw/xpZ29/2x6Zy3agGt4gkhEOohs+Mhp0cDqipR+YgQkd4TBDUlyob5YB0v2LDi54YB1Nz3LW2zdy+5kmJ9kyd5VEmrFuqq1sUXLrdiEE4mY06PhjtroqGwMTHmL3r2jvWiJuTwnP4QszcktqF3jnGnlBBX00ahSw6ocH5IqKxXt3levXGXQCLQJU/sPM4HvKv60NfaR8+164947uuU6kZJe8oyWRhLwA/WsdqnxHvHbKotsYP3nBxPmJ3OOD464b1/39Ey1mbJ/fv3aaMlQH1lhQm5LXNnnQ21B3ySRIcjormtLkXbsDr1HjtsGE5LrGvYTmdtqXMMh2Om86ucnJ4xGB6zXH5MbJYsFks+++yBGSw7TzE3r4tW3aYO02KhbQh2YsM52ZoJiSNUFePJhLOrV7l+/SbXbtzgZDrVZl5VsOq82IetchVZNu45qdm2ukm4tuXQv2OMfSljLusWlwidJrS7TqtetAFZSxUGtoLe2/mIdut64551WZeIb6z7eQzyhXZ+uWBE5D/Ared82CvA3ed8zK+mlK4+zQuLJufzguhSNDmf8vvZ5rGaHIbnDrdSSq88zwOKyJ+f9zF3pGhyPkWXbYom27zwmlzQ+u9CoVAoHBLFuBcKhcIl5FCM+y9fkGPuQtHkfIou2xRNtnnhNTmIhGqhUCgULpZD8dwLhUKhcIHs3biLyPdE5JaI/ENE3rjA9/2ViNwRkbc3npuLyO9F5B27n9nzIiI/szH8VURevqhxfBGKJtsUTc6n6LJN0cTYXE34vG/oRpX/BL4O1MBfgG9f0Ht/F3gZeHvjuZ8Cb9jjN4Cf2OPXgN+iK05eBf5YNCmaHLImRZeiyeeOd18nwQT4DvC7jb/fBN68wPf/2iMn4hZw0x7fRGthAX4B/OC81xVNiiaHqEnRpWjyebd9h2W+BA+1F3nXnntWXE8p3Qaw+2t7GseTKJpsUzQ5n6LLNkUTY9/G/bwmGvso3zmUccDhjOVQxgGHM5ZDGUfmUMZzKOOAwxnL3sexb+P+LvCVjb+/DLz3DI/3vojcBLD7O3sax5MommxTNDmfoss2RRNj38b9T8A3ReQlEamB7wNvPcPjvQW8bo9fB36z8fwPLcP9KnAvT7X2QNFkm6LJ+RRdtimaZPaR9HgkQfEa8Hc0w/3jC3zfXwO3gQa9iv4IOAP+ALxj93N7rQA/tzH8DXilaFI0OXRNii5FkyfdygrVQqFQuITsOyxTKBQKhWdAMe6FQqFwCSnGvVAoFC4hxbgXCoXCJaQY90KhULiEFONeKBQKl5Bi3AuFQuESUox7oVAoXEL+BxbRf69IrqqTAAAAAElFTkSuQmCC\n" - }, - "metadata": { - "needs_background": "light" - } - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.py_transforms as py_trans\n", - "from mindspore.dataset.transforms.py_transforms import Compose\n", - "from PIL import Image\n", - "\n", - "ds.config.set_seed(8)\n", - "\n", - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train/\"\n", - "\n", - "dataset1 = ds.Cifar10Dataset(DATA_DIR, num_samples=5, shuffle=True)\n", - "\n", - "def decode(image):\n", - " return Image.fromarray(image)\n", - "\n", - "transforms_list = [\n", - " decode,\n", - " py_trans.Resize(size=(200,200)),\n", - " py_trans.ToTensor()\n", - "]\n", - "compose_trans = Compose(transforms_list)\n", - "dataset2 = dataset1.map(operations=compose_trans, input_columns=[\"image\"])\n", - "\n", - "image_list, label_list = [], []\n", - "for data in dataset2.create_dict_iterator():\n", - " image_list.append(data['image'])\n", - " label_list.append(data['label'])\n", - " print(\"Transformed image Shape:\", data['image'].shape, \", Transformed label:\", data['label'])\n", - "\n", - "num_samples = len(image_list)\n", - "for i in range(num_samples):\n", - " plt.subplot(1, len(image_list), i + 1)\n", - " plt.imshow(image_list[i].asnumpy().transpose(1, 2, 0))\n", - " plt.title(label_list[i].asnumpy())\n", - "plt.show()" - ] - }, - { - "source": [ - "## Eager模式\n", - "上述介绍的关于`c_transform`、`py_transform`中数据增强算子的用法,都是基于数据管道的方式执行的。基于数据管道方式执行的最大特点是需要定义`map`算子,由其负责启动、执行给定的数据增强算子,对数据管道的数据进行映射变换。" - ], - "cell_type": "markdown", - "metadata": {} - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "random_crop = c_trans.RandomCrop([10, 10])\n", - "dataset = dataset1.map(operations=random_crop, input_columns=[\"image\"])" - ] - }, - { - "source": [ - "除此之外,MindSpore还提供了一种“即时执行”的方式调用数据增强算子,称为Eager模式。在算子的Eager模式下,不需要构建数据管道,因此代码编写会更为简洁且能立即执行得到运行结果,推荐在小型数据增强实验、模型推理等轻量化场景中使用。\n", - "\n", - "使用Eager模式,只需要将数据增强算子本身当成可执行函数使用即可,编写如下代码即可以Eager模式执行数据增强算子。" - ], - "cell_type": "markdown", - "metadata": {} - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Image.type: , Image.shape: (356, 200)\n", - "Image.type: , Image.shape: (320, 570, 3)\n", - "Image.type: , Image.shape: (280, 280, 3)\n", - "Image.type: , Image.shape: (360, 360)\n" - ] - }, - { - "output_type": "display_data", - "data": { - "text/plain": "
", - "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAADHCAYAAADifRM/AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nOy9ebwlyVXf+T2Rmfe+pV4tvam7S1J3S91qbcigXcNgFksymNUeZAPDSIAAGwYztvF4mY+HYbXlzzA2YLBlwQBmMbIwBguz2QjLjATIQgutpQXdUu9V1d21vPUumRFx5o/IuDcyb95Xr6vrdVcX+avPrXdvZkZkRGTkiRO/c+KEqCo9evTo0ePqgnm6C9CjR48ePS4/euHeo0ePHlcheuHeo0ePHlcheuHeo0ePHlcheuHeo0ePHlcheuHeo0ePHlcheuHeo0ePi0JEVkXk10RkS0R+6ekuTxsi8l4R+eYl5/4PEfnJp7pMTzfyp7sAPXr0OBhE5H7gm1X1d56G23818CzgWlW1T8P9Lxmq+o+f7jI8Heg19x49rhKIyGEqa7cAf3opgv2Qy9VjCXrh3qPHMwAi8nPAc4FfE5FdEfl7InKriKiIvFVEHgR+t772l0TkTE2h/J6IvCTJ52dE5MdF5NdFZEdEPiAiz6/PiYj8cxF5rE57l4i8VES+F/hu4K/V936riBgR+Uci8kB9/c+KyLE6n4VyJce+UUQeEpELIvI3RORV9X02ReTHWnX+JhG5u772t0XkluTcG0TkU3U5fwyQfdrue0Tk51tlO1A5ROT5IvK7InJORM6KyC+IyPHk/MtF5CN1W/6SiPw7EfmB5PyXichH63x/X0Redkkd4FKgqv2n//SfZ8AHuB94ffL7VkCBnwXWgdX6+DcBG8AQ+GHgo0manwHOA68m0LK/ALyzPvcXgQ8BxwnC8kXATfW57wF+Psnnm4B7gecBR4D/APzcsnIlx94OrABvBCbArwI3ACeBx4DPr/P4qjr/F9Xl/EfA79fnrgO2CVRRAfxtwBIoq652m5X9EspxO/CGui2vB34P+OH63AB4APjf6nL8FaAEfqA+//I6r9cAGfCW+hkOn5L+8nR32P7Tf/rPwT77CPfn7ZPmeH3Nsfr3zwA/mZz/S8Cn6u9fBPwp8FrAtPJpC/f3AN+e/L4TqGpBvFCu5NjJ5Ng54K8lv38Z+Fv1998E3pqcM8CIQA+9GfjD5JwADz9B4X6gcnTk9VXAR+rvfx54BJDk/PsS4f6vgO9vpf+TOHAc9qenZXr0eObjofhFRDIReZuIfFpEtgkDAgRtN+JM8n1E0LxR1d8Ffgz4ceBREXmHiBxdcs+bCVprxAMEwf6srnIleDT5Pu74faT+fgvwIzWdsUmYbQhBs745zVuD1Oy61344UDlE5AYReaeIPFK3588zb8ubgUfq+0ek5bgF+K5Yh7oez6nTHTp64d6jxzMHy0K4pse/DvhK4PXAMYKmCvtw0o2MVH9UVV8BvAR4AfC/L7n0FEF4RTyXQI2kQvLJhJx9CPjrqno8+ayq6u8DpwlCEgi2gvT3ZcY/IdTjZap6FPh65m15GjhZ3z8iLcdDwA+26rCmqr94SGVtoBfuPXo8c/AogePeDxvAlEA1rAEHdgOsjYqvEZEC2CNw0W7J5b8I/G0RuU1EjtT3+Xd6+dwk3w78w2gMFpFjIvKm+tyvAy8Rkb9Se+J8J3DjZbpvGxvALrApIidpDnZ/QGif7xCRXES+kmDLiPgJ4G/UbSoisi4iXyoiG4dU1gZ64d6jxzMH/wT4R/UU/+8uueZnCRTJI8AngT98AvkfJQikC3Ue54AfWnLtTwE/RzAw3kcYCP7mE7jXvlDVXwH+KfDOmg75OPAl9bmzwJuAt9VlvAN4/+W6dwvfSzCMbhEGlf+QlLEkGFHfCmwStPr/RBhcUdU/Ar6FQHVdIBiIv+GQyrkAadJFPXr06NHjUiEiHwDerqo//XSXpdfce/To0eMSISKfLyI31rTMW4CXAb/1dJcLeuHeo8cTgoh8sYj8iYjcKyL/4OkuT4+nHXcCf0ygbb4L+GpVPf30Fimgp2V69DggRCQj+IG/geBX/UHga1X1k09rwXr06ECvuffocXC8GrhXVT9TG9PeSXA77NHjikMv3Hv0ODhO0lyk8nB9rEePKw59tLYePQ6OroVAC7ymiHwr8K31z1ccaol6/JmHqnYuUOuFe48eB8fDNFcgPpuwUrMBVX0H8A4AEemNWj2eFvS0TI8eB8cHgTvqVZkD4GuAdz/NZerRoxO95t6jxwGhqlZEvgP4bUII159S1U88zcXq0aMTvStkjx6HiJ6W6XHYWMa597RMjx49elyF6IV7jx49elyF6IV7jx49elyF6IV7jx49elyF6IV7jx49elyF6IV7jx49elyF6IV7jx49elyF6IV7jx49elyF6IV7jx49elyF6IV7jx49elyF6IV7jx49elyF6AOH9ehxheNTd9+LiHApcaBEQtiRNG38np6Lx1R1dnw5FGYhc3wjXxFBRPC+eTwtz0HrEa9Ny5aWPd7PqQeEPDPgHcY7fuB7v483f/3X89xbbmUiplEn730j7/3q2y5v+rt9TlUxxjTyj3k752Zp0mte9apXHagtLgW95t6jxzMAqWCLWCaUVHW2hciydDPBpM28Li7Yl6M9kLQHkQPk0EjnffhrjJkJyvg9iC6Dpx5QjID3qHX40vI1X/0m/sUP/TMevOdeRqMR3vuGMHfONYTvsgEnvaZdD9VFwb+IUE5jwgeCoI/lOUz0wv1JQETeLiL/5+W+9iL53CoiKiKdsy4R+YSIfMGTvU+PKwtRIMOi8Oy6VmfSvfs6VcWrn19Hl/BaInykzhglvaStWRtjFgTjMoEfjkv9McwFffiI1MdUMJLN7mtEQyrNMD5jkBnyIuPOF72E17/hDfzdv/Xt/Osf+efc/6lPUOBR57BOKSuPrRy+qlBn8QpWBVVPe3OthZmD1gVjUetvPxtVX+c5P59q74eJPuTvMwwicitwH1Coqn16S9PjYrgcIX8/dfe9y/IGmGmBUTNMsSBMFbQWSt77mSasaFum7YNIy8TPAVLocoE518Yvnt4QtG4xA9AKlQqPQXTI0INhQpkL6nJMuccP/8B38Zn7zrC2vspnv+o1/KW//CZsNsCRId6SqcXkGVKs4E1Bjq/bUeoBZU7jzAaqOGCaRcGeDmZxMGjTP1GoG2N49atffaD2u0jb9CF/LydEJHu6y9Djzwaka+tWZUF41Bc3qBYRaaaXRQpGkKUyOhVqXSXr3la2VdRWGZcJwYthVg+pdXsVROsyGI/JAvcuZogVZWV9yJ//3M/DTkYcX1/jwx94P7/6rl9gNQNfTijLMdPpmLKcYqsp3k3xzs1okzhotmdNcb7TrldbeMfr4wymMUDs266XB71wTyAiLxKR94rIZk1vfEVy7mdE5F+JyG+IyB7whfWxH0iu+XsiclpETonIN9f0ye1J+h+ov3+BiDwsIt8lIo/Vab4xyedLReQjIrItIg+JyPc8gTrcLyKvr79/j4j8koj8vIjsiMjHROQFIvIP6/s+JCJvTNJ+o4jcXV/7GRH5662896vfUER+SEQeFJFHaxpq9Yk+gx6L0C7Jm8gFU2uYM8pAmsa+zvStvKKwidxwSqks8sqBHpkXxOwrqNK803I9YdTpMskQBKNgMBgRjHjA4T04FcRAlkNhBtxw/DquWT/GxmCFuz/yx3zo/X+AOIdaR1k6ppOKclziJjYIa+9Bg3CPhtDYDjNBbhaFc5vTT2dG8W88Bhw6LdML9xoiUgC/Bvxn4AbgbwK/ICJ3Jpd9HfCDwAbwvlb6Lwb+DvB64Hbg8y9yyxuBY8BJ4K3Aj4vIifrcHvBm4DjwpcC3ichXXWLVvhz4OeAE8BHCFnGmvu/3Af86ufYx4MuAo8A3Av9cRF5+wPr9U+AFwGfX508C332JZe7xRCCQZWEi2WXMjNp7QxjpxQ2dDQ0/sjqzfA1ChhAHluV5tT1SsixraLIH0WLT2UegkBQRxUhGRo5BcVjMYECulpuuPcJqDn/w/g+CgzMPn+LE2gamKnnnz/6/aDVBnaWsKkajEXvbm+xdOMve1ibT0R62nKLOQi2QF42gc6Ns2l7turQNuem5XnN/6vBa4AjwNlUtVfV3gf8EfG1yzX9U1ferqlfVSSv9XwV+WlU/oaoj4Hsvcr8K+D5VrVT1N4Bd4E4AVX2vqn6svs9dwC9y8cFiGf4/Vf3tmp//JeD6uo4V8E7gVhE5Xt/311X10xrw3wgD3eddrH4Seum3AH9bVc+r6g7wjwkbSPc4BKTCootXT6+LnPpMEOlybb7t2jfTumWZdn6wAaKd/xOlJWQhLwUTOPjMg3GCGoMXz4oRsske26fPcNddH2cy3ePMY6fIMlhdHTCd7LK7fR5np1g3oaxGlNMdqnKH6XRCVU6xVYmrpnhb4p1taNneK9a6JlVTDwCxveMA1vbISY912UguJ3o/9zluBh7S1LQNDxA00IiHLpL+jw54LcC5lkF0RBhcEJHXAG8DXgoMgCFBMF8KHk2+j4GzquqS39T33RSRLwH+L4IGboA14GP1NfvV7/r62g8lL6AQNpHucRmxr1CnWxtcMFjKIo9vjFkwqorIjOZp5x/PReOs1AbadpnaWnub6mmfS3nu2fFm0TEiOKOgHqOQaYGQMWFCVhje8+7/wL//qX/N8fV1VvKcajJhY+MYu6Mpj545x9pglR/+of+Hv/ldfwfnFSOgzuGtY2K3qMY5K2vrDIermDynGA6RLMc5QWqhnfqqx7Knz6Rt5O4yKPe0zFOHU8BzJJrIA54LPJL83o8sPA08O/n9nCdRln8LvBt4jqoeA97OQSxXTwIiMgR+Gfgh4Fmqehz4jeS++9XvLGGgeImqHq8/x1T1yGGW+c8cpCkM21r2Mi77IBx35Otnmr6vhU+XLTflnU2kgeaLdC6FbjgoXdHS3edfRBgUA7x3vO41r8FgEOt4/q038dxn3wDesrqyzqBY4dP3PMx9995LOZowGU2YjKaUkxI7rXDTCZPxLtPxHlU5xlcV3lrUORSPd35mbG23R5um6apfW3s/TPTCfY4PELjuvycihQRf8S8nUBcHwbuAb5RglF3jyfHNG8B5VZ2IyKsJXP9hI84QHgdsrcW/MTm/tH71bOcnCBz9DQAiclJE/uJTUO4/M2hr223hsExYPBED5ow3Nou0QZNTVpQozLThLvhkhNZ+aU3jOsWIoAhqFC8KXimyjFuf/wIeOnOWk8+5mfOnP82K2WNj1VFOt7nxxhO84nPu5LZn38B09zzlaJPJeAdrp1hXos5iVLHlhMloj6qMNM0UbyvAhcVSSwR8e/CNGnqbjsqyrNfcnyqoagl8BfAlBE30XwJvVtVPHTD9bwI/CvxX4F7gD+pT00sozrcD3yciOwQh+q5LyOMJoebJv7O+1wXCgPLu5PzF6vf36+N/KCLbwO9Q2xB6XB5cTEgverUcHKnr38VcEwMjE4WbIjKnftppu9wFD2LI7bIfNKCezGeAUGUOW1gG4imscn4M3/k9b+Ohc2f4hq99LV/y+S/kVS+7mRuuKVjJLZOdx7n92dfy4L1/zM75h5mMLjAebTIeb+PKMW46xpcl1WTMaHebajqhHI+Zjsf4qsR7i2rTXbKxZqDl1972EOqing4D/SKmQ4KIvAj4ODDUq3Cx0dVev8sFOYRFTCISaBP1M/63IUCRTjplP6hq8KgxB3FV9CCL/HqaV9uQ2LjPRYTagsePLnrUqzgyl4NAWThMBhtOKWTAjjlCnsMH3v0v+az1BxjvOD798Hnue8xx5uwO9913PzedvJFbXnA7gyPHWDl+A3k+oCgGDDKDGEM2WMFkOVkxYLi2QT4ckuUDBsMhWZZj8hyVbDa76ZrlLBPw8e9hL2LqDaqXESLyl4FfB9YJroG/djUJvqu9fs8UqOrMKNowcO5zvRET3NNbgjeiaWqKQpiFQUJrF8R9efEOumiZoXXZX+99cHeMYQha66yMGjAg4hkoiGaUBpwIojsMUL7wjW/EnfskgzOf4TnZkM2dU5zDoq5iLR+wtz1G1o4ysBNCNA9BjUOQEKYARcTgywlelNyAeoNVSy5DTCa1773ULvjdISLSOqWG18OmZQ5NuEvwi/4RgsfET6rq2w7rXlcQ/jrwM4AD/huBXrmacLXX74pEl8dJKgxhkY+fJ45/5p4wM9pDE61aojCKg4d2av/GLK4o7dLQY9lSt79UmLUHpbaGO8szuXeDz6bm3dHga+8N3hisQiYO8RayISvX3cFwWGCyI8CQ62+8luuPr7OzM+XU/X/KjQPHyto6PnOoqXBxIZgaxOU4qahKAXwIAZYXSFbgvMeIx9cCXRSUphbfbo+4FuGpomUORbhLWJr/48AbgIeBD4rIu1X1k4dxvysFqvrFT3cZDhNXe/2uVCwIgba7ooD6pjCeCVeaVMAsi4QaWITWLo6LMV9Uo2/kxamWNtecCvhl2npDuIvUi5W6XCk9ynx1LhJEq0cwkuO9IhRYHZAPT2CGWxRZyfXHcuS5z+LezzyMygo62aKQDOuUfMXiffAAEnVhRa06XFUhxiCmIrM+uE56pcjqMVFCADUxi4NcLHPXYHfYlPhhGVRfDdyrqp+pDZXvBL7ykO7Vo8dVjbbg9NrU/ARpLlJqpW1/2qEFUkMqKGKiXI0UjBLiti+nEQ7iAvhEtdW2D3kzbaoV14JM65gzkgEZTnOsDvH5CfxglePXHOGaowO2zj6EuF2ODCzsbqLTKWIU5x0qimo0FvuadvFQ+8Fb63DOoxaqKsSi8bXbqCbG1a72SAe2w9ba4fBomZM0F7k8DLwmvUBEvhX4VoC1tbVX3Hbb8xoZtNZOzB5l+9h+TdROU7vDMpuJxv/04nk9IajOb7SkQy4maZ+7WGm6rtell1w0vsgBMC/i4vT7ieXTfMn3e+kP6qO9zP8b4MyZM2xtbR3+23RIWKYJpue998HvvKPfNNqn7ujxt5HAXUe3xqANJ9rxQvM/MZ64raHGsqTlb1M2aZplkS5zkyN1eF71gaIR40GFsHdHBlLgTMaUgiM3nmRv635WC3jRi17AJ++5h92tPbQqOfvoPdy48VKMHEU1rEA1RrHWBu0cQlhgU+GqEiQjyzKcC3XJzSCMKnFWI8tnSvF7GmPmsHBYwr3rRWrUVlXfAbwD4KUv/Sz997/8q/g61cygQsfqOhZ9SrsNRItCp+u6BqcHiF+08kdObaFCrWllPNZlJAoZpS+GRzV28lRzWkQ6pUs7fWqUmRlpCP6+hgyjzNrU+uqi2kJ7xV1qFBLJZue9txhjQujV+vrYWdOVecvq1H42zft0l1GkucFC0xhl8N6SZRlVVZHneaMu3/It37Jvva90dHHuKdraODSfZZomriiNbe7V15r/ohEwvXfXu3YQL5iugSn93sXdz87V1JC0ygbRRBD7aqiZQYPmDegsdKTgfcb68es4awuq8YjhkWNsTyqGg3XYLXHTTcRNEHsUGdR18wAeLx4jDucFfIZ3FTiD+hyp3wEoycwwvN/1u9LVdsuOHRYOa+h4mOYKxmcTVoAuhZdFq3o6oqdGinZHjugSGDGv9K/R5ON1tqTZGIMaaQwyKTqFfotP2++laKK5kUH6gsY6L3RomUfta7cLgLWW3GQYBGPAiuIFXBbS53nemX/0121Pf5sDlyfcyi+2pzGzvNPn1KYCUgGQTl3TASu9f3slYHvzhzi4Na8B5yrAL9TzmQph+ezImOAF034n0j7ZyMs03zGYv1NZlpFlzeeUPrtl79esHFwa5dC+3yyPjjoY5opWEPz1xiR1Hw3avMPjZkZhFdjeczA4wd33neLMhT1OX5hwbrdkuHaEyc4FNh8/zVBm+yYFIZ3Ea/feod7ibIg3Y6tRKHeoADhXa+4ardKzurXjzsTPM3UR0weBO0TkNhEZEAJIvfsiaVrcX0D6osff7SlNW9ik6OqUUjsDmNrSbTRo7N57sjrWXZr3cgHdbRGPLwow026NMaDdzd3WhmLai70obQE6GAxwlQUjeBSnFq8W60qKosB7j7V2IY90C7B2B9RWJ+1q5/TaNERq2i7tOnbFuG53+rRcbb6y+VznXGcqjLKsfuGfAi3pMBFXi3a1n2oIFTCbMZqmUE4H12h4bc8AQxt7QggBbaZJy9ESwOkgkc5SG+9ELYzj+5aJIRMTFJD6E2SwzOigiKBwNTfJAMiMCUq593gNWnp0lVRRMoFMfNDixZNlYHWdjZMvYrJ6jD+65xE2rn8hp7YqtmzF+sqQcw/dz3T7EYx3ZN6R1RSX9+BVQT3qHbYc48oRvpqEXZysDVy7d3WYAg/qCKbd+SfFfrP0y4lDEe4afJ+/gxBe9m7gXar6iSeYx9LjqfbZ7mhtdGnbWo/mKk1BZZSGkIC6Q/mmptMlLNrae5sumZVPu4V8W6BdTCDFly8K01mZTb20uZ6mFkZYrW9trSXP8wZ9EtN2rU7s0qi6ZibpgNvW3vfLK6Zra5JdM6auAS/V2qOAAvBqawNZVbfLvk15xaP9XBp9SpnFFheZR29st6n3fkaXdykr7e/t/tFFr3Rto5dq8Av5stjX4rUCC2GJJf6TehAAspqmyagNvSIgBhWDSh2OQEFVMFq7KKogMmSwcYIXvvzVvO+DH+fMY9sMN65he1yxvrJOpiVnHvkUuTiMeDKZedjXAwg473HW4m0VPt7h1c02+FDvgmD3bQVpUQncT2ZdLhwao6+qv6GqL1DV56vqD+57LYvCIhXgKeLxtrYHTQGy0HlSoSDgggkJh6JGUCNBiHvFMP8e80r/tuq5773bVMM8YbPpU9vCwkwjSRvyFkQyVIU8HyCSETjxwDx6gma3khcMree9v/YbfP/3fz9ZlqGqjMfj2TRx2dRwXof5lLzZIeflTXeuaQ9mXVpdl/BJ671M+KTtk2XZbKCSekruvcdkzetNdhXQMh0D7Kytag0zneG0Ka+ZsKZ7sF72rrT7bTtN1yDcPh93QzUiZBK080B9zD8CSLy/ds9Y54MJ9QpcDYRM8EUMFE49RNREK6oGyFDNUKDKcm669XY2rrmWh06dYWt3QqUFz7vtNk4cXWE8epydzXOIVo1ZDEjIo54eeOfxlQ2hgJ0N4QicQzUOWh5pCfjFd/gZqrlfCtKR36vOPum59rUR+035I+US/3blIV5n59qduetYexRulyXVaLqopib87LquF6n9soSPEugGJbqoBQbCkRuP+im4Abl3VAPDG778K7hwz11897e/hTW3R5YPKCdT3GgTp37GUad1gEhbzcvdpMOa9YqBkNKdfLoEeDo4p1r3MiEf699lE4j3FEm8D9Qk/HEdnGmJ6i4iPyVhR6qPJ8euEZH/IiL31H9P1MdFRH5URO4Vkbuk3sTkqUBX4KlWPYDFQWwmYOv2SGdWbVpTJBkQW+ebvw3GZAvvRnxG3nvyPG/eP2rBQYVdnAXQ6iOtV0Ukci51vxSZUx8ieBGcmrCISA3qw0d8Bj5H/BCMpxKPWdngf37zN/CsG06wszdlbwxVWXLH7Sc5caLgEx/7EOfPnSJstxBCPNRLYevFUoJapZxUTCYTymmJq6kZW1lsVWFthXMluCDkJWmbp1LRuCKEe+xYM4HQISDbGkcbMT0woypSYR5hdPG3iMw0hsWyLXbitrBOy7RMQLensUkJFtKldUrz6Rok2uUwThByJK/AG0y+hss9D93zJ5y89jh/59u+GTfeYjLeYm9vh2qyh7OhU7a1b8+i5pF20Pa0PDWaps9j2fQ/FSDLOn37eNuQPBwOsdY2NVcH6gVnFSP5fk6lPwO0F2b9A+A9qnoH8J76N4SAcnfUn28F/tXybC8/5v3Pozii55WqUlVVw3Mppfbas2BVxde2GK8hHzGBOkifV9szKf42huBTn1CbiMxmUXHWm4VeONNkvXeJUuIQcYSFzhawqFoy9RjCJ9P5B28RHMYQBnIcKhlIBs6RqcNohcGDBG3e4bHiIfOYbAc1GeI2yGWDV7zqc9kZjRAz5Zbbr+OxC6c489hjeHKO+E0uPPAxpufuZ1VK8poE8giWCm+mVH4C6siYhk0+qhHWOcIsQfF+AkzxOsWpgITNvIXQBlIrZF3v/OXElRFbpn5/nXPzF1519lK2O+cy7TnV7ozS0NiisPHehw6YHq9dwVKk94zXRSzzT102ZV2mWUm9l0XNbDabJBGOF0OsTxCwimGAp0SLAuNKrt3IeeFtL+D+uz/Nyz7nz/Fff+1XePUXfBHb4ynZ3mnywQorq2usrB8hM0VYgl3XMwqNdObQNqymf6NgiNe1vWRSgZGmuRja16VCfjhcbQxMWZYa3Q3LslfV3xORW1uHvxL4gvr7vwHeS4h4+ZXAz2ooxB+KyHERuUlVT1+08E8SkU4TEXxNSamXGc2Snl/W/+bPCOICD2Pmr0iwaTQH8/S5pjsLqYYVrJHf9/WiH8O8TEJNn5nAgcvMvrU4U53PHGotOSl3fD9T33GR2tYQMgUjs3unCAMZiISVprkpcLXX13j0CC944a1823e+hcc+/kFcNWFw7TG8nzIa7VFWjq2de8j9CpPJAPIVpFjFug3QVUpr8OMJxaDAVyWljDHqybKcQZ7hSo8pgMxRuQmZqScu9bshImGv1kPElSHcWeRm47H0eBttgdPu0MumqVFLD7MEcN4tCNKFpdBLyhvzjWnC4gbXKFtar6jhhnTBxUrM4kKO9j1TI2hbmEZkErjI3IHqAFt4Hv7YR/iO7/77THcnbG9vM/3QlLPvfx8vfdXLqbynKivMZFprRzAYrkJmyIthYzbVVTfn3EywtOmoVIgvE+Bdz60LaZ3THXBSuiaN21GzvI1rnwCeFQW2qp6WOj493QvzThI2MWnXa7ZA73IhtkGe5QvT+/asdlbnmp+eXSuRDmsKW1XXSJc+w/Y6k1lbHphdkMbF7TIuzkY90U88vd9s0CE+35qpkXiHuWhvkVZ4DDlQaYXJcvJc+bIv+R/4mm96E6PxNoaSwYrAwIFOGTKFvV2uPeLZ297EW6GcDpj4VVaOPQ9vCqpKsNuW/twAACAASURBVGEEw1lHLoZgtlMKkwEGdYpKVbdXMduXbP6+HLQNLw1XhnCvn0YUGMuEVxup/237BXazMD7zvFOk1ALMhfkyN8s2uoTGxQaiKAhj+ugf7tXi/aILYTt9FGJd/tuxxA5PDhgykIprjx/laFZw84ufw91/ch+33Hon9//B+yl3xoydMsBDoWydH5GbDFXPyurazKUy1iXPc6bT6cyfPdUYUzqsa8bSnrm0qZtlVFPTztDUSlNB1EUTRQF4GXnOro7QmbEmC/TkMoT8TWdPqQ2n/Z50cdlBy3Yzj5p5m8biL3pApXkum7222z4o0xovZN400VtpcZYHc66/mW8sz7zJjTBb1GREcN6HMAmJoJ+VM8oTjTS/QdVSGIPVKYMVz9e/+SuZ6HlMXjIdbUFBWARFRVGE6JDWlazgsF6pRiPU7rI19ays38Bw7QTWHwnsmCEYVe0UEWE6zRiYARjF2SmzdQixnkboittzuXFlCHdt0hDRJVFk/qC6cDGtzMucY28I89b0PmrTKY1w0J1S2kKniyPuomWMCfu1h6XOjSLNrm2/QOk9Z9e00uU6wGYecmVNM47e8XLuPfc43/Z1n83G8AK7bszXfdVf4FMfeg/HbjzJ2vpx1lePICZn+/zjFKtr+Oo4xeoq2WCImJwsy8JS7NpXPq1zqh2nFMwyodCexXQdb88CutItG6y7BoQnqLk/KjXdIiI3AY/Vx5/wwrzLhbY2jba9q5idS+vu1dWshQluez5EdQz0S1OAx0Eb5jPQeO/076w9leBNpjrzTzfpeXQmuOd9uDkoGSN4v9jP5zP3WN/A6xhjwvushEBe6eDS9d2AoBhTkHuDMIJsB8wOTraZlheYjsbYcoSrKjbIYQJ5LliX462jyDLEjcl8SbVzASlGTEePMjEFR5/7OlSHOL8CCPnAo9ZBBj734D1ZrmTGUJlolAWTFU+0T14SrgiDKixqH8t8pdMH2KZS2kKknT8EjT69pj29j2gL9nSW0FhAkpRnWb2WnTOGWrDrzLsj1CtfCA+a3mOBskm+Zz7HG8XnlrV8wI7Lef+HP8KLbjnJy198J9cfPYF4ZTUrGO+MEIZ4V+FdFbYTKysm01G4r61Q1ZktxFo7a6s4EKbtFL93DYpp23W1cfrSp4ImrW9qOI1I961MB5b9ynIRvBt4S/39LcB/TI6/WQJeC2zpU8C3w6LgSme6UWjOztVQjd5Uc4NmdO8L55t9tz3Dafe5xrqSVgSbQII1BXuKeb7pQBvLnj7n7rrPvHxa13S+7zIfGGaWNAXIMVKxWowQs8ukLFHnWVsZsrK6ghEwahlkglpLnhuKgeApMbnDu5LcKJRjZLpLXu6yd/40ttrD2hJXOnRSQlVi3ZipDe+QWMWXwQ/e1y7DzlVU1RRnq8UKX0ZcMcI97UzR/7wdBqAtVLumkxGzTinNY+IVjDQWMLUF8H6CGpo7m6dp4/fmEvykicXPPsFDYT6QNbV/12iPLs0fZv0YJ/OP5pahKit2wBaegX+MyXiL1Rd9NrffcQsvfv4J3M4m7/r5dyG5kvnguuW9xbkqbDM23kXtmMqWYCtQh3fVQjunFEH68refUzsGTdpO7TaI39M2j8eLolhok/36Qxw0l0FEfpGwXeCdIvKwiLwVeBvwBhG5hxCyOu5D8BvAZwhbCf4ET2Es+y4h1ngWUa5FwSoaPGFmJ7sVkpimPSNqL0ZLr0sOzGcJNA7PBHA6i2izU/Nrmu9bmqYNT5ylLKEzZG7MlfATqf3hnSq5KSlkF2WX0aRifeUYK8UK29vbZEUeBiimOD/BM4VsipcJzo8wYhH1YWGUA7GOydYZyr1NcNMQfsA5qKY4N8H6Cc6W+NJC5XFlhS1LbFnirQX1eHe4+9xcGbRM6wG3PSwi2p0SFykCkNr9Kj73KNQXXB/3idOxUKxEqM7cK5OXoU3FdM0wQMMO8aozLT2cmwudrllGF/+5QF3UcUDSZduCC6v4REEzinyNcaUM1u4gu+YML5Rdtk6vcfz1d3Luk3/EkddeT2EMhReGWFw1xqKMd0uK9QHohMKszu6XejRFjS8VzGk7QXOKD4uG4642Tzn8dp1jnu14J+m5rgGoC6r6tUuK8Rc6rlXgf11a8MNEouAuvBcyvyYYFf1sUZOqr2mYJu0JTW293afTGdCy2WLUz038osH/PKSv7VdE7bk9OEXOP8mvfo8vUv0Qgyp65cRyzWig4DMzGzg0hCoQUYxxFGaP3IxR9Rw9fg2T8S5rmeHosXWgxIhhZ7JLBoirwFl2N8+Rs45WntH2mGyQ4/E4deRmi71zMHTKykaOtQU2U6BCfYEYh2GAMRleLSYLS7a8t5h6Re1h4orR3CNSTbBLI4N5x9wPMTBYinnAoYM1apsm6qKKutJEAZVlGdEf2Zh2sKz9Dahd2ksqGLteusBP1oLWK1K7qIkZ4IubOPnCF3LkyHGOHC9YKwpktIXW0SK9D3FnRD3eVThb4iobjsVImX4uQPebObVnGu1nmQqTdpr02bYHzi4evz0AxnaK99pPc3/GQBY12tnfxpL9Zlz2yHFfTFGK+XVpzcu06Nm5GB5g9r1+hjPVuSPNJci0dIjQ+H+tos+0f9XaKz14jmVGEAOKR/023u2BxtXNGcPVFUrn2Dgy5PjxdVQJjgSAKz3lXslqsU5hhhT5gGGxwiOPPIZXR14YNh8/jRttUm6dxbgJ1k2pKktVTvGTMb6aUNkpZTWlKqdUZYm3VaBlXHnomvuVIdzbQjiZGranjanwXKpVtAUe1Jbz5iDRRiqUul6ktuDuum8UJlEYhUGh6Zfd1srTvOP3Zdp8OmVWadZPNFIa4QUyOFQtKh6H4Acn+P0P38fRm25h4ldYXxlwJIciG5BJhihhQZMtcdUIW+2hNUeoNpBlabumQcLaLo8u8edtG+ci4qrJdv3bq1HTdk9j6bSfVUxfVdWsrdsePM90tAfO+L0525trr11oKy3tgTa9V+daiyV5p8+pPsJSCqUj7YGuqz/p81bVehyZ8+2mbgd8mDmvDCpW1wxZsYowRLCYXDhybIM//IP/xoMP3EuFY2NtFeM8gzynyDOGRQYuRFsdDla46cbrQZXJeMz6WkFup5Tbj2Mn20Fo4/Clg7LEl1Osqyi1DJSnnWJtOQ8wdnB/0kvClSHcE3T51Uahlk4X99PcuzRCFWZc+0HStTXTqAWmx9scpUgz5njwuLGd2upMQC+8EAGZmDCtbEXQM4mmlkk9yGjILzcGnMdjcEbw4snqnXSUHXan1/HGb/hb/Nb77+O8nmBw/Cgf+/3fonBTjKvIsyyEG1NlMt6hGm1STUdoFQyurpyChuBIMdJd2h4LQcyWPI+07ssGUmttwy01bbcuQRDponm7h1WVcSn81YS2Zu3Vha3hJA7u3dp6mqZ7tfTiALxM6/ctBSRcl84U4rnmjKD9rNP3oh0VMkafUTEL1zdCjIhgJBjii6w+Vi+yAsHjMbIHWlLZHKGgMI6sUM7vnON5t93ETTddSz4c4ie7DIxnZ+tRtrfPsLXzKHvTLZyWPPzIg/zxh09hx0quQ7xzYMdU2+c4c/oBKi2pVMOWh6XFTsaMyl0m5YRpOaGsSqbTKdPphLKcMBmNL/qsnwyeVM8XkfuBHep1xKr6ShG5Bvh3wK3A/cBfVdULB80z9cJoaiMdVIR2d75laE/l96nXrAPFcsTgWF3eHnHASYV0+1gUPqkmmQ5a0R1tJqR8cKFqvECzOiRWK2rKiFp7NzUbKRnMXkCDYRXNHSMzpFjd4JEzZ9grdjh2pOIj//19fM6rXov3K4R1hgXiwhTVlhXVcEquSjYYoLZe8GIEaQneNiLfnnLky9xX21p6+9zMztDi99O2jQI9fUapN80zFelajGbIhvq85EGgkNWG1LmAb7pMNkNCNNrcBL03Bp2DxUEk/hWt9xFNB+esplvqe8Ww2cryd04EwiskhEB4bYWtnu0itW940hc8GOMRbxGKsK4DC7lDNSxcEg2rlTMFYyw5e6AFVemp8gyxjmKqrD33JvTIOsIK+VDQapcP/t6jfOgDm9z5gnVuuOFZSOG58eYbwGyzs7nLiWs3OL/puPH6ISuDCvW7MDpHvn4tNl9Fs4xcwVQOY0vIBDUZJstBFW8rsuxwFY/Lobl/oap+tqq+sv69LDbHUigs8Kxtg1mXtp5ORw+Ci/GH7XzSe6bcbZsmAuaxNYxpaOQRUchFOidEcmwK6JTmSAVa10ykq9whX+p47gY0SwIoraNmissyzp9/nGKwymPnRrz0JbcxHAiPnnmwXo4e8zf4ylOWE6ppifeeqqrCtNLbWfz7LlfNtGzpYqJYT2gKhjbaz7mtaaaCPrZt/B13YYqUT1d8+Wca2m017w9aC8T5wqRZH6DZh1K0KY364MWVJE0+ST5dVFHXvRbRmhEsu63Obzqjk0RmRtzcGExmMCadHdT0pHqKYooxgkrGeGeTldW8toWFODQnn30La2trKGPErzEeW/78F76I/+Utd/JZn3MNf3LvPTxy+kFMvsd1161z/MQRvHWsrg5xzgKKc5bJaBdbTkOIAR+icHrvcN7VdGf8VDhnZ/ThYeEwho5lsTmWQpi/oOlK0XbHyBDUNb0zUqRuj/OFUE16wDCnZlKKJo1LkQqOPM/D7wy8a3LCXbRB5KVNVnOfmIYHyZwrdjMhNKN8nG/4C4uEThO19VgXU4ceFanjU0hdA1FQwYgiWFRC8C+PozAXEB2Q2SHf+m3fyE+84//m7T/3g5y954956Wsqdne32Nr+GFvbBm820NWbsPYYMhlR5WHjj3x4hFyEypVkw3XMLBaIgGZ13I5FDrf98repqPb5ThqgNaOLx6uqatBlsa3TNFcL2spAdCNJhf1+UdJi+s7V1bUHimll0PBgjM+H2OfnoQDqUs3L0QgGIB3HY5nC8fR1bs7QQ50kptb5QBYbQMJLg0drH3yDSI4YB1SIP8dwIHhbsTM9zxE5htEVTD5mbLcYb53nyOoquEkQzPYoj55+nGtODDiK4Uu/7CjjsmR3tMd0z1NNJ1hXsnr0hqDsKGg1pdzbhnyNQnJcMUDyQe3ToyA5qMc5Wy/cMmHP10PEk+35CvxnEfmQhHga0IrNAdywNHWSSVsbixpwDMcbp3kL1AyJtq/zKI8L1ygzvjoaH+MneJbMr48an3NutnORs4vaZxx8siyrl3inwinE7Ig0QZeQaRzz7U4/N3i1fZSDa9e87qaONGdEQlx6QowLZT6AWRkykLC8+s47r+Of/djfxct5VgYTiiOWjePK8fws12WPMtj+U7YeuovCnQc3xU7G2GpCWe0xnViML6jsCF95cIEKCftHho0KutwPl2l3bftFG8u49viSF0UxE+qNXa/2yfOZiK4Vuynloboo2LsG2MWZ3txXfSHUbtTQ64+p+W2RKOcl+TRSMhv0G+eb1zZnFW2HgroAybgg8f2eae+mdr+M60dqoSnZbJmViMX4EWU5onRjjpxYxasnk3Ws80yrXTaOrVOWFcYLznp2NpU/+u93s7tT4mxGkefkxiCaY0yBasburmM8HlMUGc5W5KIYV6HlGG/LsCDQVmEDDxS03l+1/tQVXXjOlxNPVnP/XFU9JSG40n8RkU8dNKEkwZVuuvnmeGxBu4vHurSwlB5pvMTOz6aZMVpdxLKYMPFv6vvbXoU519Kb5XO1O2EIpCWzODFdU+kofGadut4cREKmdQcF6F4B2xTy8/rkwecLbxY9ixDIspzMKmp2GAwmTOw5ts6do9o7x+am5fjRNXyVs1Zk7PhtdPtBNqcXWLnxFRhuwCsM1JMVQyrvyKxFisDTZlmBrdccVOKSCIIplw6BV100xC2jZ9qCKO0b6bOMM5/BYMBkMllI90xHSmvF/rMQ5AtH7DPtcLJdg+LF0Bbs6TGFRMDPr4oByJJcWp9U029q/SJx7+LZvCCcn8n3oPVHv32SgWa+X0fsa6FwgqfIPUOmiGQ8fn6bYsWzcbTAVkpmMk5ccy17Z0+TIThr2d4cMxxu8JrPfSmDocVkgnMgJqPICwZD5TP3neemG6/jwQcf4MV3Po8j66uUzqLVFF+O0aoEFayx5HkM+TBXukxtmzDmCubcVfVU/fcx4FeAV1PH5gCQZmyOdtp3qOorVfWVJ05cQ339wuq46DGSpIPM4GV5R00HhVS4pvm2r4emx0XUCmOaJv0y39DA+bDRhdabBzSMTIkgjgKoUZb6gRuawaHm9E3zBU7/xu9tl8wGXy3xPgre4bVgaLYpdYeMgltO3owBrtlYxZcTihVl6rYRM2VFLbJ3gc0HP8Z0uoObWsx4yrTapnQTpFTKsgybE1RTqjJ4APiqrD92ZuB09ebB6i2oa7RNrE8quOOnKzxBeyBP7TNxlpU+s4PaY65kdGntUNuENLioRqoitZssvEsds5+u7/sJ9hTNZxHsSNHDZZZa05DLjVB+jfc9zEDNfMvAuBcsYSDJhJnQD306cSWUEBzNmPAe5SZD1JMZx4P3fYLhwJMbOHH82RxZexZ2UiGUqPWIG5BpjjiPeDh6wjMqHyUberJVwckUMs8DD25x6lSJyh4ve+UGx67z3H7H8/jEJx8NfW86Ybx5AaZjqCq8dXjrUOfrvRIceB/cNUURiTLj8HDJwl1E1kVkI34H3gh8nOWxOQ6EtOM651ABp02f5ot5QKQ+7em2XW2B0UbK3aZafFuDng0Gvmp2zpYrXlswR+GTCp14vshy5jvLzGmZ9gDWHsxm7SX16j0T+H4xkBupNwtWDJasGLGxskM+zLmwabGTKYOVgpHdYVqNEDdlJXMUCuPdiqp0yO7DXHjoEzDepBrtMRmfZbL3GJPdHaZ7W1TTEZPRDuV4jJtOKacTymkQ9tPRHr4qUVvhKzsT9Oot3oWwBmkdLob2s+hqh/T3ZYwI+bQjpWEiRKIGHbT1+TZvi5RY1+Aw65ttmvLSStj6PR8aDjZ5CrNXQ1IIX3P06cCOYkRr42msT2wHz2AwwJYVuYFMptx2643YSlEsJssYDI6QmZXg9uvAl4FScaWl3APMlBtvupZjx04Aqzif4bxlUMDe7g5OobKOrBgyKAqe//xrEBFGo5IL5/bIUby1eDdXbLwNfd+rnwVbO+gM6sngycwLngX8Sl3AHPi3qvpbIvJB4F0S4nQ8CLzpYhmFWUp3p4RFATfj1WcPt374qVFmSSdNPVna0/y2Jtn1e/adZGea5F6p7aCtcbd9u6NRVAj7MprcYNVjksLvp23NjrXaTHW+wUFW85be5AyzR8lyT26V6244wcBMuXDhUQZH1rDO4SqLm1QYMvJsQCUl3paw9Sj+2I3seCh8QVFYJgODaEFmy7CP60DwVU2TGI/kwQe/LMt6dmHJsiLYBJxF8qIuvAU52CrSdGCez27m7RsDi10sWugzDbN+ROt9mAXJ6qbuUjRowXqTaaFWYBr3SMmSJH1kQw4g/BvvayNBfH7LZtzxv/q3qa+t04QJSDoja9oZRARXWQZFTs4Ua7fIih2QAYjFVhYjayHmelbiy13c3i7V7g4f+cDdfPTDD/PV/9PrIDNoZslyg9cCX1UcP77G+tpRzm6e564P7/HSF99BsX6Bosg4e/4cZ06NecnLbqfyVe0E4RHnyaSchfh1qmgWZhGowWp58cZ8Erhk4a6qnwH+XMfxc3TE5tg3LxKh1JpKewgbWNfXmHYHZ/kgEI+1f3dxkF0UTor0msixS032tV+mZQux2vl64sq6mCZE8lOlsaAjTdcUXHG6nd6rPiNBcw9XGazJyM1OoJ68khfKx+/6CM9//i2cOv0wea6c3TrN3s6InGfx8//mLt70NS9mrxyxUm3ywGfu5saXfV7YgMBNGCFkfoUiH1BVFYX1SGYYDkIcmuj6VRQDfFJe8WGO3YxbEvw02u3WdilN7R8Nm0dCaVlrryrBDvO6LnMFhuaMbtlsJQ76c403aduDlmWfpu0aVOpUzFWQxQ1pZmki1aq1fUvmoYkbm3SkdIZGqq7e+Kbm9is3ZjhQkDFOcnCWlWKdLF9BdUw5OYcrt3j89Gne99u/z1/4ws/htltupshyzp17nPVjOdNqQmYy9krLeHeKtZ7cDHjpZ12PYQtrS7Ki4NjxY1x77bU4rVAqvIbAYCEAoMF7QWq2IQNEDIg/dM79ili+FzV3SDRfTQQqzDRdr82FQZGPXyacu4xPjXt38Nnxb/uFiS+YkRCS1qttlLszn8zMNPQZas+YPPUGEBDmXkHRQNSsVyxT1Fjre6ZvplYMTViFh5mg3gAZR7NNslwx0y2qQvCV4/bbbufxRz/F6uAmTj3yUY4OrsUy4KFHzvHWv3Er4x1hfe1GBuLATPDbj1AduRnyHKZTvPXYLFBTvqzIhwPKyR7FcECer5BlGZXzqJGwYGMAIhmVhywLc+mU1qqqauanntJiqdCIz7OLKosLwdLzV4ugbysLoU2gqQUrPtnovYtKROsZUD1jFOhYGXrpZWy2eyrU90fo1tqYxc+LpYlG72cCnxgsTAINOTAF3gaDZV5kqB9RDB1IQW4KjM9wVYkay2S6ze7OY0x3z/NFn/cyjEBRgPoJJ45vMC63MEYYj0rs1HHhwi7qc6xaimHBzugUK+Y4VqeMRlOObawjxqDq8FoF+5LLcOKDs0NGbXdKFcsrlHO/rJBmjItUSKeaWkSbT+1aUReva1Aprc/S4iQvQyOOtUgjr/S+qfEqvbZNFQUDsYQt8TBoQxWKxigWzs151WZ0SjE03Dnz2WARfNCNCQPRwJxnkGWUbIFbRSTjngc/ii0d48mDjC6son7AkSNrvOTFtyJuhdHuDmfPPYpSkYtjsnmOahriV6ufx6a21uKdpRxP8M5RTaaUkzG2nOJ8haumeO8opyEMKq5e/FTbVeJiqGUcefo84vXxuafHY/unaS4hnvsVibQNIiUhNc8c+mOkCef2H2PMbJ9TYc6QhOPzpfsxz4PYJ4KZPw1O1izjvP8zF8iza+t3Of5qD9BAnmVkmZkpdJkJoQXC77kjw9xoG1yDZzQnSmYsk/F5itwGDt8M8OJR2cMYi3eO3Z1dVnJl68ImubGcfvhhLpzdxtucne0Rjzxwlr3tCTkDMgM33zzkxLUVqysFaocYdxSTCcYIa2vDsJ5EbXgO3tX2EQ/1IrPQt4NAj4uZDnuB3ZUh3HUxciA0IzGmnTt+j3FD0qn4sqlqO22XRtceTGbGytbCqjBl9HhvZ+fTazsFVHyxWr/j0f2EfFovY8zMlx3ROh9fG5C05rcVjM5eAsEzNBfwruKjH/8kRT4ky3JOnjzJSl6g1ZDjx56FlxH5YIhzjmIgvO/9D3L6kVMhzk1VYve2cdMRVTnBuQrR4AFAvWIV9WAtWEs5HTOdjLDlFLzDlhPwFl8FrUZtbRRXDTE6vJKJaQj79myurb120RTRaB2f5dWwiKlZb0WMQ7FIQymSWngkSo2CeI/x9RoQFj/zfA+2JmAmUmOYi1Y5o8at6vEED7IYU312ndSfmGcW7Ae5EcQ7DEqe1UI9zmBp0ZvZACMFmVGM8WGLPB9836flY6xtbJPJHqY6irEVWWGYSsXETcBNWGMXUzo2Vq5jNJ5yw/U3sPnYCO8ytFrl4x8aM3TPwpUWa0dUzlC5lbDXhCoXNqGywQ6QZRlCVj8LBQemIigxLsP7on6/Y8V9PWBdsvX6QLhien466sffbW29PdKlg0Bb628jFb7L0OU+F/OeCX1c4NQS7jBdaBTLDix4IHQJ9maa9ms3F+zRjdIYQyY1FeODdpAbA/Wu7vOMQ9AsX1oKM0GdBSl53ev+ElkuUCk6gcmFET5/iPUTFRSrWCnZ2Zvi1POFX3wjr3rlZwOQGxi4KX5SMp1UIeZMLbhFa7dN78LuTVWJVlPsdEo52g2CfrzLdDzC2QnlZI9yOsKVU3wVqC3nKyo7nVFW6ewsPvu27SGiHXc81eyvhvADcTVqameApgLSVm7iwNlWii4PBNQkungb9XsZR5tw81n5YllMnVXyZ05HxmxmyVs2MBxh14y4ms8imgWvM9khyxSRnGzgyIzlkx++i6ErGFQlMt5kfHaLC4+c48SRDdY3jqC54bm3P5+t7UdYP+J43etu41d/9S5GO4p3BbYKPHowlpYMimlNITEb0GJhvSrO+ZqCUdpeTDHNYc8qrwjhrjQplLahJeXPF4wwSdo2/52iPWh0XdOmbWJZ2oa9OA3s8h8OnVYa7phtLWk/o1QTzcVTaV1EQlCk8F0bs5zZqtSyYmVgMOzi/KBeXJXjXIkrdyg3t3j7j/0ybvcmvIcsDwuSqmqMLSt+891nsOWIT/3pn/Ce99xFJrZePg3ehtjUlS2DxuzKeqPvmif3Wrs8Oux0CqpU5YTJZIL3dkbLqDp8ZcFZsjh9TdzI4rOI7qXpM44afjtQWBpo6yBUw5WOYN+pBQSLCkp7gdPsHUoMp+nfgyAK7mWBaXXJ97m4r8tkTCtsgCB+blOL74rUeqyY2te9ro8kz7ahzFEh9arUUM+MzHjUnWd9bYrRsua/tyn3znD7bc9F3BjrH2dr99NYM+Ljf3of9z74MBUlzkyZ2k2M22A6LhkMJ7zylatsbU4Y7Xi8zagmHsOAzHjywYhZqDWtF5DVLaFeCcEQ3FxBIfbZmrbR/SPbXg5cEQbVFG0DWluDb8dzkWQxRNs1blm+7TxTzGOwz/NLDbJxepgeT6MeSm30BWpefUk9W6cWDcKxfKkBKR6arzydG67ixhxgJEMMeOvIiwzjx/jCMjA5VDmV22Q8ehi/N+Zb3vrF2GqK8ZDllslu8AwwOuQv/5XnsTsuOfnck9xxxwqVm2L9FHxFZkGyAiqLy7K6o+eIKE7r9rCghYAHV5aYosBWU0SUYphhbfAiyIsCV8ftUamfr+rMq6bLDtOlBMCiu2ncmu+ZjGX9OlV6e3XHUwAAIABJREFU2oH3hCggF68/CNrCW0kVlND3tPb2UuI4ogup47vRpjVDGZlRS0Yjl9+++dy7J60vWhuFjQVy8EOEC5j8PANjwCvWT3jve3+H644N+aw/91pG0x0m5RY2q2AAn/Xy5+PHBi130WqKq8b8zm9+mtI6Pvd/vIHrrz/G9lbJZJRhbYUQggNaZzlxYhVblz+EJJ7bIdL3N6wczoJAF0W1DmjG4rqFy40rQnOHehRXyMXMdlASme+nGlelxhWZ3vuZdtzW2lMBvswgGtHm1uPAEQ10s0iOxqFUpDGr430yBeM1/CU1mO4PL4tuYeBRo7XrVz24SF22uDgJrTl2qeNXp4s8crKsIhPDIM8QzjIozkAlyIaw5U5Tjc+TlVv8i3/2LvzYIf4Y5x8F3TlBNdlkbT2j8nvs7RXk+ZDhcIDHkBcerTxuOsFVMVwrIWqkC5t8pPu/og5RF+gaH7R+51xY1TodYSd7qAtxOIKmrqibgrcYPNls+grKXBuPz61LuMfnGwXeYUfeeyrQoJZa/RyatFQU7CyZnR4Ey1K1YnWyKD7awr07/EY05HYaetOZhzFgomHVMPOMEUE0q2O0KOIzxBuQTTLZxJcOISPLhVe88lXccsdLwRjG422qvZJy21FMc+zOmL2z29z7sce5+6OnyBWUnOl4zNraCabTilOndnCahbVHRus9AgqmpaXumHUdBVEDYoLA91rbj8LG2Gk4Y+89zh4+XXhFCPfo/tQ2pMXfM22kDkoVN2Foc4lpJ2/v5Qnd1E96TXuZN4TBpB2DPRX6Mf1BDVINqOnkLE3SEdrlFSU4sSfnGzqV8aA5yJTVYkDlTpOZFVZXLT/3L36aY37A5JEHeOhjD/C1b3o9ulLw2//lV7n2Osvb/sl/RqprmYzCpgLCCGjGcJmXp8mJB5ql5UGDJmUEXFhFOVu55z1VNUVdhXqPraazPL33WFvWs5Q42HW3SXs2FjVDY8xVsc2eyWoNUPycYk76fGqfCAcXefj098Ww35AwVznmxs54JtVYa3EHJizMi5uvpLaAwpgQKiC9dzog1QqcMVnQc+s9Yf9/8t47WpLkOu/83YjIzKp6vrun7bjusRhLjIMZDuFB0ACkuDSillwa4UBnSawkUruHInXI5VntitQhlxR5lqIWNAIpLkgNRRF0WBCGcGMBjGuM6R7T3r5+vmyaiNg/IrMqq/r1TM8AQw2wcc47VS8rKzMrMvLGje9+97veOsQn4D3aNdHeoNUacbRGbAq8y0JQU1rMTM8Rt7bSTTfwRZ/YambcLLQVuu957P6n2T67kz1bd5JIzLu+Yx/f9X130RsMUCZhyyVNMtej8AOyYgCiyQtBm2YN+qpW0kKWFzgnlPJSYSwybt+Gv9O9ssn3YttrwrhXUMdmBnKof+5GhkUpNax1WDcukyJfkw/AhbD4CwWpquPAeNm4eoDz5Rj0+oMBm2Hvoz3qfaJ8qchHhbXr2u8b3cIhU0Zb8AlZepzpVkGR96F/mh/+8R8C1SZvbDC9Y4aZ7dOI7nPD3rvpblg++MFbWTy7jNAAG4eK7+Kp6JfVxDpu1AP9azgBODtmZCvKV0i/LpDSsy+sDaXHhuyYoJ5XTQ71QCouPAj1e2OtJc/z4ZioWDIXwqO/ntv52kbjrT6Gh+Jetb56sTjTZHs55mZ4TJnIu2DkLHnvMCqIukVRNHTk6smI4bubw04BdaxgzgC3iAJUhtERyinEddGyBG6AzT0mVpw4fhwKOHPyFIYUXJdBuko/36CdbtC1Kb6huetbbuJzD93H6XNLeLEUNg1QjxS0ux3yIsWrDOsLOt2CNFOhBLkqx+1QYlpAaUKCkqJKyKoms/PXOK9+LOg1YdyrGz4JmVRGvSrQrEoszVs3FPWqtleGfaRION55dbx9sk16NnVcMM/z8x6uyiANB9vFQDAT76saM14q771m+sXj1ciQG60xEpQfNaMH1ZXXOiwUgqCVRfkE44Wp5gkiEmIj+OalFMUGayunaXVmSM+sce9vfxS7DLPzmuXFFC8aLxG9fk7ciFE6xjs78oSlWi05nC+wNh/2icIHTxvAO3JrKVwo+YceicC5ou6157giD0wel+FtHoSwapOGeBtEolwRArQTGHx9tVcPplYTcJIkrxieeK200YppZMjrbQTJjagnk/tdrHE/79wy+hudjwBHVJP4eYf1Q8lpqfvllXNWwjFj8CgX1nDyhHOFalHlqk0P8E7QWLSsomWdWAtZloH3XHr5btrry3z5oS8j6Tqut46xDpv2OXfuOBvtDbLcMygGvOM9d7D32h0UQKSD/O/6+oADT63SaUOWFRTWc+JEm3Y7ZKAWroctq5IJQRzN2fCLvcgwKzv8HsG7auKtKkm9ctjsYttrwrjD+Kw9OTAnWQ+Vp7ZZoHVyPxhPY5/EbOv4+aTRQBzayFBHpvqr4/6TmP9mhv7FfcdwnmpCEUDLaFXgy3NMMnag5BxLMIRBS8ajXYTKNUodRryA7wNNbJHR7Z5ko32WzHmiVoPv/AfvIi9SllYPMTXv2Ois41Wf3HfodsGJGU4mShnERHgnARsvvZGxuqmesvAvQw689yPuedi3TOqwBb4MAoeC3GUCiB0t4cN9Kvu+PK6S8f6u+klrPcaoqe7tN4YcQZk65N0YdjsZO6JmIAMsMvrsFcGGk0H/oZH3KHEl6j7OjhkacFXmbtT+lJgxjH3s2KUnW1c5VQpUlcRUevFG6xDPUikiKbFZomHO0Yhz8I6pqRa59Yi2tKbhve95J0V7lf65Vdaeb3P4oaNcFk3xxY89zcf/5IsktomzOe1ej7NLHZ47eJJBL6e3kXD0+Q5femCRQddgC8UVl8/Talm8y/AWtNGIGNI0x1PmGqBCEHiTJMTAGJ68Z69ee80Y98lWDdK6V143bpOGuf6d+ueT2zdrY/BNGcI1UeUB2nKQnc9jhwvTMzf9TeNjfciBD1WjyuUdgQ5WlRtQauSth89HsYQhv98GSphSCsM0iT6D8WeDF4PQb59D9RbJlxeJU0+vvcjGRoY0Lbm27Nh2Detrbe79yAt0uwn9NOP++w6BZKU+epByzdKiphMD3oXHu96/FZWxHMnn3VPnwNugzaMYFTu33uEcKF+qg9SYSs4VhKSxzSfTyW318339G/bqXgd3uW7c63BMcAxGQcqJI7wi417vzTHni4qBOMSASuihGqejcVFlw5bAM1o0Sm0SB6nNRpMOV0XH8S4Ei713eAzKdHF2EUWGFDm9bhfnPLnN8GLJB10G/TXSXhvtPY8+dIAnvvQsWT/jLfdcx+uu3U17rUeeZoj3DLoDvvjgMTbWegiW+flpisLjrMNZQUShlUNQKN/EFo6iyHHeg+hyJa3wCM6XRMmSBVS7EwhSFhn5/5Fxn3wYRYL8rWXkedhNPJdJD6a+vfKwq+/XNdUnPRpRvvadkKhUvZ/cv9K0kZqRudhsyKFngguej7fhZrsgJuQcwVNGE008CF7qvPuARZtIjzSs42dp6OM0jSdJEiw5qC7rZw4TpZ5/888+jD23Qv90Sr5moRD+43/4DP2NlDfccQXrK23ETnH3N++mGDhs4XDO4wmqdtaBQ2Fd2b/Oo7waCxaF5nA2QDfWOwobKJJeAOdxRTbksVsbgsLOBtim6ussy0aQi3Ooihs8hG7cMA5woXTur3dIBqqx6jbFpat+ON8g1jO9L875gPP5LvXzbPY+eOHls6Eqfv24EzR8NobQaPUclddPMIrVtFSHWsPqNMScdJkBGhYPMY42WvehKPAeFk+fDgZYQ5H1UVrR769i8x7eZ1x1zWXc8447cbFwrr1EJgWpDQy4SAlTzZjrrr4U8YKJMy7fm3DjzbOI5IGN48IkpTDYPEKVsa8ojkqHxJfGvbQ/MpqxhvfGecT/fSDuF2HcReT3RWRRRJ6sbdsiIp8UkefK14Vyu4jIb4rI8yKyX0Ruu5iLqIKHTqDwLvShEtBqWGJPUdZPRUCFOqiiquV35TyMvNsLwS+VMdgMohEM3tWDTxVbJxruB8EABRjipZOnKrrjmGRqVdavFEHz2iBRjK+qoTuPcgFrdoUdm0A0Gi+awhSIhxiF0RFGrxO7mJhFMidY1UZlTbxPsG6A767QWezyMz/3/fS6DT79iYdZW1kndTnv/YGrEB3RmmvTmgryZQ8/eAqjYlQrIVIacQNSmcUbRdOm4DReWyyWtAh1TJUOS2nnHM6HBBQtDiU2TGBKDWvSBpemCGp5LsfaFCQj8xmTpqWiwzoXsmA1fmjgq0Ljk0VLvpFaGFehLmjgSY+aiAwDzmE1dL4Q2MUYdVem3YwOXL2EZ2+YbFRi7SUHhNwWoA1eG5zSoAzeM1yZaTwaP3RkVDhZCdmVY1oE5VVY7CmNFYVToFyB8YKRmLBOyFAM0LbHrDvArKwCHh8b9j+9n127tyO5JZKELO/T2ThF58wp9j/4FT7zN48yO6WJTUZvPSNdm+ax+49y5OAG3e4AJx4VT3P1zVOYRCPK0WhZZhcilE5Aa7JCWDybglN438GK4DDEIjiXk9GgzTSFjtCkNGyOt0IhBqcKnDgKHEWJvb/a4/ViXM0PA++Z2PYvgU97768BPl3+D/BtwDXl3weA377YC5nESuF8b2FopG1FqXrxy7/QUn1SLmDk8YwnXMA4/r9ZXOAll7t+FEzUNZ6uYmTwqwmnmsTGDFU1kdXOoxGadg4jBYUUoB2SzyH6K+g4wtDG2T0M1EmK9gnWlw4Rud0ceuEIx46exJHx3HOWZw8us7ayEUoD+gKtkiCwxIDb77ycokiHfWEpH17AlauZ6p5VWHe9jmn1u+r31ntPP00DlbMsBzhk39hx8bCxeEpeatdwvqda6ftUMZjqGC+W0Pb11qp7H56BzfcJq8bzg5Ev4yyMgex+tHXsPOVrNREEyLSsYFayuirDrkpuOtQcLyEk81STR+1anSodFpcQ2TjEV5Ql932ctuCbKJugzSlcFCNGY1SGt5Zrr72TaGqKVNYpBktk62dIB+s05xa48brb2HflpTzy6AGiuImOI5pTTU6eylk8u0wjmQoGPpdQTczZ4KErU8KkDusGKOWY39KkcBnN5lQp1Cc4D9YrrJexydeVEJKMQWnVmHwNsGW8958HViY2fxfwB+X7PwC+u7b9D31oDwHzUpbce8kLmdB18d4PseY6vQvKATHWN56xzvLn89Xr34dx7nZ1zPCmJg42TJwpRobcj8sBvJxlf/BoC0JZNF96uQXiLNgCJb6smlQ3TLXfrDyiPFY5RA9wQNJoEuVrTE09QisZQKeN1R5xxynaA1ZOP4XtrLJ08jC7t09x/xfvxyWOn/zZO9h9ZYOik7N4MqXXCTCItZZu19Lr9fCqQHmIooi1zJC5BOWhkDh4Yz70UD0+MryfNey36mdb7me9P89IV5RHT6BIemeH2HsoQH4+S0aXCS6TeQj1c34jUCGh6uPJ//1wPIZShuc7IK+0Vbi6d35Uyq8M7AZ6bEi7V6osgeeD3ASUolgqePf1MXDeeB67Ph8ULD1EOiMSCy4JjospEBcRxSneHEGpFax08NkGziV4HaF1h976Mlla0O0cp98+i08HrC0eQrHGzu0z3PmGfeSskrseczs0P/SBW7j97ktI+12wQj5oY3NNkUOaZjjnA8deFXjgxIlVPANEFeSZJTIhM1spw8BHWJUE2NT7EGCl1HzyLsgtjN3P125AdYf3/jRA+bq93L4HOF7b70S57bwmIh8QkS+LyJdXV1eGD6IxZgjF1DHUure9WaDsYjtqclDVjYLS1UMzYoAMvc/Cjk0yQxrYBEPn/Fbr4ooP7CzejwpJiwhG6zFKWcAj3ShOUGfUiGC1oFSETnOOHvorxOVQLPHMxz+GzpYxqSLvHmYu1TS6TT787/+KxZNLvPNt97CxkZEVntUlz70fOcKg2+Dc4hoOG5agdgsmmiEyTYwx5FaTSROnIhQeWxYbqBvPuqGugrzV5+KCp1Z9Zr2jmPTQy301gh9i8Xa4ffI8RkqdfgIltT4mXgoq+3ps9XE5xKQn/Oq6hz85Gbzi88pIXWbSUNdjWlCjOUpFj5xwuiZ+z/BYquKuG6z2pAKFsjiVoiQmsjM0fI/IHSFRZ0lMhImCzEYxWCF3K+C7zJgIlWakG22UTSEb4Dc8B588wpmTJzlw4CCrqx0Kp+hlbeKWIisceaqwA0+/02VtOWXQt3gPRZGTZzn9Xk6/q5ifW8Bah/MhNqZKZ8+LkHuNkwjKKlcehRMdVuPiqJNC/77G5Nc6oLqZq7DpL/G1AtlbygLZRhTYUZap1joYCQl1VCc9t/I4wfBNkG0v1IGTHiMwccyRR1/nt9ePqSaO92Ie0hBnpMLM/VCqOMsylFIkcYyR8UliaCSVDA27K0rv1hVol9FwBUY/wnX7LscXfVjL+cO/+Cg9u8B//b1fI8s7LK+d5bd+6Y9573ffSBI1sGmfZmTI+kvsvhTe/e5d5EWfZisGFEXh6fRyvPLoqIFCSH2ENw1MFA8nlyqRCgJMNNlXFUxT4b9VLdsxDZSJe1m9r0MQMgw0K+xQtyPUuhU3UotUlP1THbuaWL4BYJlRYHSkYVJfuXr8GHd8BBe+nLNUK9XJCWN4EdQpmRWDrLo+pVQIeI4z2yd+x6QUCENepFKKxAcIshDBKhVmiEIRWQv5YZQ/xyNfvJ+it4rNU7LVNf76T36f5TOHcf2Cot9h0DlDowDaFt8uePLhZ9n/pSPYFHbvuIRmMk+eadrtDnkGzx1cZ/H0gCOHNkj7ioMHznH2zCqVHVBao/U06SDmxMkucdws8z2ikl0HhRMKiULFJ9Hl75Jy3RKyq6Xqw6HNeTn35pW1V2rcz1ZwS/m6WG4/AVxW2+9S4NTFHFAmSP1Dw1slwIgMgz51z4GaPsvY+5c632a4ZEgFHfNIK8OhS5xcatd48dhmGUzyIxjBaE1kTHgg3AjWqI5Z4fOqDOp664i0wVtH06+wxT6KS/8a6PBvf+5/4fTRo9CI+IVf+xW6S0/x7re/jfZGn6UTG3zf+94BUvCXf/4wa2d6PH7ffs48N2BjeUCUGFAdROX0+hZLxCOPH6U5A0hEURSs+xYFoZakiJBIgRc9zJgdBppr7BVl9BgzibLvTInNVquyignlhn0+8uqrqkIeStZOuCdjK7qJSb/iw1dts7jL11sbU7l058OUFc3wq1mtiPfnJyOVJ9JC0Esfc2RGsaHIGChhVPF+k7T6GoVTh6Q2paSE2oI6aJHlNFNPZCHSBdpZoqKJdmcQ9WWmptZA4E13fzPaDdD5Ot2VE8zO7Gb31r3kq2dZPfMMtljCdjc48dRZnrzvGFfu2cXevQsYNOI0Lk/xzjE9NQPS5pprG4BjbWWJXqfLlVfsZn5+LiRhiSfNCzptOHWqw2DgcV6jTYLWCWfOrOAkIrUGp5tYSlluAqtGlEKbUgRPwG0CMb+a7ZUa978EfqR8/yPAX9S2/w8S2huB9Qq+udg2+aAOqXD4UZGKr0HbzIP3vqzFKYz002v0rOr15QWsJgSDrB2lYJdqeBUTYbjfRLDJGEMURSF46Apidwix55iaWsC6jHfe9mb+9L98hL7q0F47wq/9wm+y0m2jz1nIco6fPEduhbe97RYGHYvOttJdX+PsqXP00xyjZslzg4kTslRx1VVXkRceJ6WEgGogStOITaBolpTGKnBWXWsVXK1DA3ZCS6NugOq1T0NPBWS3vk9VrWcSCpi8J9YGmGtyYv5G8N6Hkho179z7UaJSoAtuHvi/2CahsscIJ5dy5VSbOLQeeephonGBoqj1kON+vtEaeeYj2Cy4aaPfFNQ7c60odFlRi4zYnEarU+DWaS+dJZIM111n/2cfp7+0zFo35Q3v+la6/TYPfvqTLJ86jfOO9X6HAwee4osPPI7NYOfOWbTxuCIHN8AVXdqr69g0FLnxdLns8m0oDaIyvOQ4B64ssLG2McDEwvzWJjrSKJ1gHQxShVcxGQZP4O9XhemVVig1gi2qHAQYrY3+m2PuIvLHwIPAdSJyQkT+MfDLwLtE5DngXeX/AB8DDgHPA78D/MTFXkhFf6yW3kNqVwUDEKQItIwv/89jzIjb3IOvtlf4l0wkJYkraYqVVsTIiGhPoEg6ISxLc6AoX90mrJ1R4MmJw2LxyoMuOek6Rrsm5DF4g1WeQhxegRJHjGegLSbLSU2KJcO6HtuS0yT5Z/jzP/sDVjoW0pw/+t9+hb+477N84J+8n2zxONmJFd7/3m8nX+5x+NCzGKuBNQadLkopNlb73PuFLk1m0NMLGAdKeniVoHyDLz98hO1XQCuawvmURbcTgyfWghODRyEqDrx60YH1oAGl8U6XiUjl5CgCOjzMWmr9WnrwlVEa3krviWrvlVJBabOCyXwYJ04qAxOYMuKDqFZhM5TRY4buG0E4DCojW6MOVvh2bZ+6sfjqg6llXGm4KgsFKCDw5gP1texb587D44d/k9s8VFoxVUKP1hqcp9AC2uJdl4Zex0SH0HoV5Q39tTUGyydh0Ee6OTPNea7YdwM+yen3Frl67162bdtObzDAFnDjDddx553X4HWMd5CnOVgVykpurFMMHHk3oRjEIAWii6AdrzK08gTt1UDnnZ+fpZ8NmNsSo0wAWkRrLr9yB73CYyVGdBQCwipIEYgPIVWPgAQrX/XF31d7ST137/0PXuCjd2yyrwd+8pVezBA/rQnvDPm2HlRZhq1S+5uUJbhgO8/Y15aY4oYA2BDLLfcKy6swGdSPEbjVnuHceAE4yPvA8fUexApK6fL/HDF5YPV4UE5weVBztFFM4S1ND1miWMg01qwxlz9Fd3CSPG9z2cZZtqyfYlXN8EO/+NOsPreftWMvkA8sg7PLFL0BS2spUdOw3m/z+AuneN2Vu8hZwyYpb/smeGbJciVCe9ajB9Bs5TxzNmLfTdeyYHKcthRuNoghKYMyESIaXQW2yzV8ZTx1yU2OoijATNaFbvMeqniCjPqlMt7110kJZ+99qTkClHi9k9LLZESlFAGtQvlBb90QWqvgnwuNERG5DPhDYCdhRv6Q9/43RGQL8J+BK4EjwPd771clPJm/AXw70AN+1Hv/6EsNv6+61S/fV5ot9Q/rHruMYhUvwzOsYhzVMQUqyXQAtB7VXa2Cg+JrK9vqqxPGS4YBxup/N8zQrC7fW4dWQjPdoMEGee8Fnv7K57jlrjspRCDf4NSRY5irryKdLrjpvXdinSFLV8nXVojSAVu2znKus47PhfbpdZ568AXmtsxzLjnM9mSakydTBE2rZTh1fJmp5ixHjh3hiitjlJ6mKMCrjEGnS6PRxPqY3A1AG86eaaNUxPRseA4CCT/D+4jUR+SmgbWO2Ci0qp6RMhdAaZxoorLfhitYQJlXN4f0NZGhGlZ+fghB1Jdwk5DF8Dt+EgOcaGOeOsG7Lv+qCL3z5+uO1L2MITwgITlHxi7kQl03ggSq61M+BB11BcGIIOLJlVDocPwoSsIS1wZdePGKGb+BnXqK4/s/xOKBh/DPnmbpwQNce+ebYOuVLMxtpbtyBjUQdNcR5Rmq4ShUj6k5TaQSIma4+srLSLMYel3SPMKYBR5+epXC5ihnQCtSp3n2ubNs2xEKFCstbAxAVIQyMUr0EEevgqnVfVJKkabpmCqnr3l9wHn30HtfCj/V0udrsIsXhsa8vtyv6LGVlpAqsxYpYxoV3/oisc0C+Bfe+9cBbwR+UkRu4FXI4/hqWj0pSc57Pw4TSm0CfVnnUDDJbqkfS6laZrQqVw7lNYTsan9eKNVNPJpjHnxt7FBqDcUzAxSLnDjyOLt2bsPEgcve21hne2uahvN8/ON/RS45uVun31/myDMHeeKR/XQHfVQeYbqgHWzZsQXnYC7egrWardtmaDUSls92SNuaQTunFWe019scfn49lP51GWtrjkHqsVYQFVaihw63mZ/fiojG2tBZSuUM8owCQ4Ee9gOAlDLGlTyDK+ffSejswqHnr017TRh3GGmoVJrtEIKpkzjeZFWXSmtlsslmM0LZKiGwYEwCLZEajzoEUUc1S6v9KqhlhB1W3rur/VXnl6GHiZags44rvd8YR1QacSgkcNe9ODwZ4jK0OUdDPcFcvsKJs+uk3vH7/+FDXPe6b2LLdftIk5SVpRMUa2dIi1WKost//r0/Ie92wOWkPcsgW6brl+gWhj/8qxdYXrWsnFmiMXWSb7tnDozGeoNtQG6u4u7br6WhuhQKsoHBxjOhSIeJQY0YMsFwK0SPdH/iOB4N3Bo0Uqcwjt8fGe9HP1J1rHv29f1H4yAUNKmv3IYMGT+S/n2p2Ij3/nTleXvv28AzBOru1zyP42vRNvsloa8uJCb28s9RlX8bJchX470m41Erxl0Feat7Ndnf1VGCU+NLI1dfoRVoDf1em/bgBEvdw9x4221s33M1uRrg7IAt07v57F9/kvbicX7gu78T13d0O4sUg5Q9O6bZd+VW0syxfrZNutxBKU9jXnHq7HH+9s+epd8NTJ71tRU6GwXPP79MmubMz8yRmG3s3LlAVlYI27Z1jihKkFK8L88sUaRotx0rK30qMbDCZYiKSi67KkXEPLoacx4qxg0lVFP1zySz7NVqr5kye/UMxGqABB2R0cNbV348/6GdZElA6OHNoBM3tt+YESmdF+/yMkA3XvSjbmTCtnH65ORVeQxKMrwq0BJjbYEiRpzG2wFRkoQAk+3QVGs0OYqSNs4rtFfYXo/33P1mfuL9/4xf/tC/4dTGWZrHz9BOBujc0Tm7wsbJc0S2wT13vo3l3gC8YmVpA+UMnZWCfL3NPXdcSm4yTATMbaOV98ldxpmjy0T77uTZL3+Je27fgZYIq5us2i2kTjMVNUBUOeDDk6m1xquQ0qKUIqoN1rpBp/y8wopHprn8X4U/o81Qp8N6h5jxeIir9WuAuoLODWUQa6Slvyc/AAAgAElEQVQnH6rQ1+mUSm+e0DbZRORK4PXAw0zkcYjIS+VxjJEGROQDBM/+a9KCs1F68JXy4ERxDCiGv9n7MqZEuVrdFDYMA300GfjhPRKtw4QRcLLyvJ5IAjQXHq5yAhjGQCQEEUXwrkAoUHicaVEUKZEviBw4aZCVsngN3abon0LLKtvnu5x6fj97du2B9jJKCcXqgIOPPsHlu3fx/T/9Y+gpw+LZExif49MUv5Hz+796L8tLcPNt27n9W66hn6dk3QHGe+YWtrPzLdejzDF6KZw+vYqPt3JwDfa6Fq57ilQgac1hIqHwDSLjKUQoXAeROdZXPDfc0mDrJTFaJRjtQSyOGTqygKAwPkcR4VWERaNFI0qDDppPIdkrCZi+ECqtyZAg+aq114znPmkUKoYMSoYefGXYLw5rH2WYjnvW5w/0oadYi/gHj7QM2En9IaiVARsem0AFqxm4SIWkJFUr8mxLL8vbHpp1REUU+QoN/ySzPIpb/gK//i8/gGkvgu3z/Cc+ywe/56fonVviV3/zZ9h48iv45S69Xo+4ozj5+LMcf/Qwf/wbD9E+k3N27Rz5Rp9+e43uWo+sWIekjZ6KmJtfp5PBlx4Y4AcFOA1+lstvfAN/du+XeP03bUWbWTLxbPQ1mdI0kyTo+9SMo9YaFRl0lIzJJNTvxxCaoTZZK5k4TtDjrxgu1eQw6QFuNqEP749QSvqG4G5ReZ1Smp6LHCsiMg38GfDPvfcbL7brJtvOO7iv5XC86IlfZvOe0qBOyOyOQSmjlH/Ev6hh32ybAMqFP3GCQaNQmDKRzvkUpAAJGaviHcoF6JECgi+k8GJwosBBrDQ6NlitSGxEiwglGTo/zXy8RrOxwSA9x7ZYk588woMf/VOycydZ761z092309q1FYkLzp18AbEZxSDn2LOHeOLhR3nL3Tdz7d45zpxeI88thc+RhiIXz/4Dp/nC/U+RpZ688Mxta9FK2nzzrVOs9hbpFIYomsIVjm6aMiDD4+h318EkdAvDU4eOsm1hlkR5EhNWpZaIQU/KcVfi7DoK0KUa5Qooz0Ssbjz34NX23l8jxv182d467u2cG5fNlBH+99Ltpbnv9XNJiSUqpGSEbCJRUDu2SFCrqwxdXaI34HCBbiVEBOqIRasBmlVa7nG26YP0T9wHKweIBiv8zM/9PB//m8/SefwwC7t38CM/8W5ys8yBh77A7FyLyFlMBBuLp1iw09BOuOn2PeSmj9UDHrr/cdKB59FHXiDravLU4LF0lttsm21wyx1bMI2CtPC0mgt89sGD/MNvu5Yo0vSzdTIfMYjmMHa6TG0PcRCtq1ddamaEiVdNeO3De1MF9Wr3cDhxmmDQK8OujSHP82Fy17B3azCZr08O4sfONyYHrWrBRfXisEy5X0Qw7P+P9/6/lpu/5nkcX00be0hrweaqDY35kBw53PUCbfxZGsIIJdYipaMTtI5U+UoZw7J4cTiC0J8Xj4hDK493OThb2jMNRIjvo71DuwTnDM6sYPQhpuUAeuMZjn7l8/TOHkE6qww6Hf7urz/OFA2KbkFrepZchUlh7dwaz33lBZaOLdJd7aNUk737rmGQe775rTfz9nfcSpEXFKlQpBqtE+bmNJfsEPJeRtHvk1qPszCfNDmzHHHwmKI/MDSMIY4TjIQ+nJppUviEg4cXIW5idEEkgHMUtiC3CstsqKsqCpQZYurDXJWyn0VkbPU4hru/Atjs5bTXCCxTx/OACgsndF7FBbcuR0m45KGXP1x2KiqeOox72y8aeGVUQs8YhbceLQZHUQ7iwN5RyuBsKf6lHUqHIJA4j6gE0ZY8bWKUotB9xPWZEUUapRRZD792lKQ14JK0R9842r3jvLD/OS65fA9P3f9Fnn/+ebZFLd79T3+cu+95IzPTM+x/9DNcsbvJ8vNn2P/AfrbFl5OpRRZPrLFz1zaW8jaL6Smuvfky7rv/i9xx51Xccte1pAPH7Xe+juXVNQAiM8WBF+D1Wx0+yVg6kbHjqkv4808+wz1vuppGsUbe1dipnXQyjTJC3PSITjA6QSlDpBO0MXhjiLQug0pVDyqM0Xhfw1NDx6N88KodwXBoo9GYYXaq0gaUEMcJXhRedJBTLsMV1XHEQRkGwfqgZQJB1iDWIdlKGUNW5ERJHP73pZrhhUZdGBS/Bzzjvf+12kdVHscvc34exwdF5E+AN/AK8jheaRsPpI7RZ2DEdr/Atyf3r/1XOS5UHBiPEo/4Ktu07L8S5hQBW0IxgTETzis+3Muqt5UrA0o+AgqMXSeRDrZ5mqJzCrd0hrifsk1D56nDnDp1mj13X8etb3s7e7Zfhk0SMJ72xiJ5r4frdLls204So0gHHeKkT9brc8muhML1iJuGvh1Q5BbjW+A0l1++g9VVYWUtJ5EMLQmpzVg8uUx70CRzGdbHOGVwKOICCu+xvsHnHzhJo5Vwyy2XIaaP9zEKgxNHrxCcngeVI9qgdIQQaJBQc3iGAeSKxDEKKCulyMtSoa9We00Yd6FcolSFfUsDb0SwgRMZtkvdKylhmmHxijDI695+fbasvrOZka863FqLImgzazzOC5HTaHEUMkBpg7MavCGzYJ2l2YiQXoYRjbanacUxy/3nyDttlI6w68+wfW6epsp58OFH+Pnf+Cj/7nd/iX/3U7/GlVdFWK/54f/+A3zb934v//Rf/yLfH82w0d/g3NknWV08w+wlr8PNxFy+6xJcv8A3Wpw+coapuGCqEbFr+2X0B2tcf8OlrC/PQbKELTSdTp8kUmRuQG+QcdPrZ4goaDbmkVnh0PI0O7YqemsnyKIZpmbn6BSCihqgY1AGbSJEB5lepTWU74eyAZZA+VKjydk5H7KKJ2CaQKcsK1gJRMYEXJJKn6csOh5Fw2DWZIGUkcc6kifw3pPnaVhR+CJcXykNrbUO+vAXHnp3Az8MfEVEHi+3/RzBqN8rIafjGPB95WcfI9AgnydQIX/swof+2rWxMVsFhYBJDxzqY7yCADaDYCbbkM5RcrFhtOJVUFKEXRnjwI9qggpgxSM+BEyVr75fhFiLK5BoAyWrxH4dsSmdzJL5iKSpWD15hkfue4A733A3yjRQWwyydYbO+irSHpAPNvB5irUDlLE4m/HYw49y9Q27UdbgfYGIISs81lvSfAMvnl5RsNYuePCLp7nssin27YjIbEaep+y8LKaRR+SFIokcuSRYQNkMZxoUso2iOMvVV+xkpmFRSrAuQKpFZrDSINcuZL3qchxLYAApVSMTeA8+GPVqFTl0epTCmFcXc39NGPfJFlgPlQ5FGFheRqyL+kNf/069bfb5Sy3RtQKxGucNGBvkb8UgkpO6KWINih6x5IhLmTGeKLf8p9/5Jd71jrehzDH6Ww3TRcwsc5zoHufD//tv8GMf/DFmLtnNTfuu4df/0//KyhNf4qf/+U/TuEJwrRaf/stP8J7rv59f/Vc/y7ETT9OKNY3Uc92tb6Sb9njyiSe46w23c++HP8Y73/4GnjnwNEeOHOGet96MkDPoW57af5rOSpfb3jyF8x6tHIMsJY4VylusT7AS0dAFG60rOXHwGHdcF9FenSdZuIReDoUkKJWgdILSIRtVa402cTCaqswDGEIx4/chwDBgbR5WOj4E2rxINYMHI1+Tgq2OU6dCvlgLkI8MJwDvJEAFHhQahyUo+ECR5S+axOS9v48Lu7tf8zyOV9pG/VIFQUcaLarGxPC+Pu4rj77OPBoXE5Mg+1h6maYM/EkAzqsCH67KWA1eaUhSc2gBh8NVK2RVBKMuArZAK4d3BSZeBreM931UZPHdLt32KirRaBWxZddO3vCet7K6tsqhv32Od3/Pd7LaXyGVAa7XRbIcmw3I8x4PfP5LvOn1t6JSIe81OfLCGa64dBoRB0SkfU9kmiwtrqBkiijSdLqW08sFO+djEqMhjomnDa1eSmN6CldAQRhD4MjNFPff9wKX7d7K1llomhx0TC4O6yAvWniTYFVGLM1yHEtYtYiM/UXGgNJUTmf9fgY79uom2L1mjHvd064GY+VNQ3DqRQRtpCxEO/bt4feq1wtR6V7MwBdeoVSK1h2cWHAtvNIgq8wVZ2ivnWFmqkvkBlD0iRNDZ3WFJw98lGvvibh55kp+/Wf+LT/1P/8L3GCVD/8f/57/6ad+EDcnzDYynnzkS/zdH3+W7/uZH2X7toR2w5OtLfPm22+ge/gZekvLxK0Yv2H56O/+v3zfB/8RquG47Y7X01s9wTvedxuZivjWf3APG6t91jttOu01Fk95LttzKWZfhzTto7Umy7PgVZV9lSnPxtkz7Nz7LfTXl9iiV9hYX2BmxzaWexoVJ+hoChO3QmHpOEabONAdI4OJGyEbVYVB7Ai0rwo7H9e/D2wBcSEAawMAj1IBglE66GTr6n9Vwl4Epb9wTDO8Z5NFrzNbBC6+UuF1SNdziBPEeJQTKDH9r/dW1U6txu6FA3GVQa9adU8oJ4QRBAOEGrvVc1GuV0VsWWAmGHVtklJ9M9QuDYtqwdoMVcZIjCicTYl1hrIdlF+nKV3wfTLp03AO7SEvHNrnFL02u3fs4bEHHiF1lpvecCONbAvXbb+UfG2JIk7JXIbJHYONLi5N6a91uGXf1fSWBuzcfgXtvMuefdvJXcGg1+fcuRVmp2fpdQo654THH3mWm27exTu/eRurPcEpR+FzTKNBBwOxYMUiDYVSbQbdHN2Y5pkjayhdsG/PPLHuIUCWeXxk6AEDPU3hNZG2IStVBThmKKQ3EqoPcacgMlOO7fNjVK9me40EVDdnR1Q6LPWWFyNt9TqDpTpGPRhbHW88+LR5h0qJ84uPMekCrRzmzXFU/5P49v009AF2besx3YAkCgqKa+sDtJ7hF//Hn+XG7Xvpn8j4Jz/yfvw2y6I+y1Wv2w57L0fv2E0nmmL7wg7e+aPvYUvkOJr3KU6d5BP/9+9is3Vcf50iUWgX87FPfZ5mY579jz/HoNPGdfrc+7ufRWdTuG4Pqwfkrku/n/PcwdPEjZy42SdJwmArbE6jERPFLZwVCmdIVIPd+27nt37/06wePcqOndtoLcQs9RQSNUBiTNwYeoNxHBNFEZFJztNsqbNkKsNe0Rgrzx7CgPclgyCOGgDDSkxKKZTR5SSu8SJjQelxRtKoVfruIWhVruqUDpMwI959xZr5++ATv/pt5LTUW+ifzWv7jn93pLA5NOyMMOCgNe6DZlDte54qWO3wBHqj+AzrCsTEeG/RkiN2nSndRQ1OsHj4IWY4R7H6NE984U85dvBpbJEhaZ/u0SPQyfDrlvbJVW666QZuu+0Gmh766x0++4W/5dyZ45BlmKwgXVnlwKNPcuSpI3zuL5/mD3/rPtYXO/SzPrHOyPMueZHiRZiZWqDXGaDFs2XLDFfsnUWUopFEzM06VKR44VCP7oZFXISJNVFs8AqczYgbTQo9z+HDi1x19VamGuCtJnchlpAXMaltUhgVcj7cCH6R2hhTSgUlSaVLRVcJeS7VeB7a/pejT/XK2mvGc4cyo00Hadeh7oivhDND24wlM1qy1gJEE0v86n198pjcbzaeosiO0m0/zlrvOYwM2L17Jz6P8TJD7hzKK0Cjoym0tZw8dogt2xZIUuG+L3+Kt77vHQyKJrme5jve/z7U8jn+4nf+nLf94Lcz1RK2bb2U1fU2bnAYb2a48z1vpyeeRmxAOQpX8JZ73kjnpOPx+55m5yW3surXURtNHvjkI9z4ln30N9oURZtBmjPoCVGU4LE8+MAZvum2HRTOYwtLngM6JtYR4qf4jx95iNtuXGDn5TFxM6HbWaBIIiI0cdIE0SRJgyiKUDoC0aGykzHBGy9L5znnhpm29T4MzKHgjRcueOFG6yHTyRgT0tW1GtJbw7FH9yl442aIT9a99uFyVteD6oq8KEZVrsrQoNIej6sJLn/9thGFro6fj5gx42N9tI8wGvfDfoTymQr5AhVjXVzgpeOlzCwtA6WignaMz0EyjHKYoSJoByNtnD1G4gf86Ud+h9tvvQW7speTzx5i7459XLJnF0/u/zJ2PeOhv/0cb333u9l6+QLJTI/ji88wWFljVprMXrKTa265CdNokXccKvcc+eIRnvjCc9x4w+uQnueqPVt48onnuOx1u1k/6Dl24gTX3byFXq/Po48ss3v3DFt3xFhXsLB7ijTNceJQWY80a7J1y260U8TeIC7HuQR8hESGnk+4/7GjXL9vFwtzoUDIoFOQJDF9H9GzLZxu4Z0iMZok0thhcLSeeVvKp+iSQad1YMvUMrGHDED36gZUXzMjvwp7DculCed5JJsmMFVyAiUGeyH4pY6FbWb0lVLcc/Ne3MbTDNYfZkr32LPzMjbaffRMC5EWG2speIUtuuBW6G2cYWG6BSvr9N0ar3/P9fTTsyQbTzDtV8l7p1iMlvmW730Xut9jQzuK3immp0PKcmH7RDMt8o2cvO0YrKe45TZaQeK63HBDg/v/6D66zy/xTd9zBde/YSd+9TTLx3o8/sBJlk/0mZ9vkRUD1tt9puZgkApFDkrHiAlGWmnDJx9b5Z137OGm61v4YoFubycbcZNWnJA0Z4jiFkljiihuEMUNTJSg4mZJTwwDNY7jMe32kVc8gmRCAXOFiROkHOBGx2P671WQtloRbObF1Ol+w4lDa6IoOq+IizExUrJuquPYkj77jQDLQAXNjDDzSXLBuHFn6FFWOkz18a8QlIwygUWkgtgDU0YEvC5XRuCxiHKIDMB10O4MKnsGNXgWt/EsndPPka6c4h9973/HzoUtfOSPPsI1N95Iu9vj4x/5KxbSFpdu2cX3/ugP0mGFuZ0tBtkG0unx/JMHIY4YDAbMTs+gtaffW2Hl7Em2NKe48eormG5O40zG7CVNrr3lStApSiyDdJ08V2RFgJPWVyx5D7KeIR+E35lnHVSh2DLdYmYuJ5mz5LKB05bUpkRJjFMRx06ukGXCFTunMdqQFhkmCXGFvjM40wBliHwD8Rqv8tpzoIfyDMFzD8lKjlH80F/Anr2a7WJUITcrkP2LInJSRB4v/7699tnPSiiQfVBEvvViLsJTFsH2EGszfF8eb8h3Dh7jCEcVFZI0Km2ReqeNp7sHNkaV2CTehACtFBgXttm84Bf/1fvw+ZO0GsL8tnmUztkyO4cuHIpzzE5v4LMXkPw4kp9DySoiy/TdMj5fgU4Xl/dY6Rf0bJ8iF5reE/kMbyyxK9CqwSCHpvX00xyf5qAHgfrphK7fIO1vsNJr88ADh7jxzbtQU45nHzpAe61LVjRpr1uWlyDH8fiX+wzSmMMHE7bMXwo4kJhBnqG0Jssi1jYavP4KzZZdM+SyQD+aJo+FljFEjRlMo0XUaNBoBMOu4gSVJESxBokQZch9mIArrZeQUVz1uQJRKIlQEgd6qhOUmFAo22jEGEQUojRGhfRuJw4xAkbjRQUDLcGbpBZ/qcZB5fEoZWoGzaFcHv7E47ClkdeBzvcqL33/Ppr1PuQWSICwhrREytJ2w0kw4Ob4sqS196HknQ/VsIIHXgZNKdkcVf03H/iN3gn4HEWG8hmKNUhPki0fRLJT2PwU4k4Ty2kef+xvee65R3j+yaeYacySraccPXCCu+66iyIBiT3dtQGf/ptPcW5pCT2t2XfD5Tz1xKMcOXCIpm/wzW98C82pBT7z6S9w/Ohheu0VbL/N8aNHWFpZ4rK9u2nNKW68bS+XXnUJcVMxNZWgpy2XX7WTzHmc0zSbs2y9ZI400xR5hLgGNtUsns5IOxHWZsStnEI6YAocltZshDU5Tz1/mtOnV7hh3x4iycAJuROIYjJnKHyEE4X3jsQkmHJlGTD0auJU5fhWQ+aMGWofqWHco2J7yaS67avQLgaW+TDwfxHU8+rt1733v1rfIEF06R8CNwK7gU+JyLW+yuF/GW34UCqPrrBVH4x5JbE7ot+N0x7rnntFCzvv+KWBtyYH1WCh0eKtb72SXnqMwnXppxlab0WpOXyxgQhEkTDI2hgNG2urqDQlUhrsgI/d+wXe++3fgfKONG4xSC3OKIquIk40UOByQ8EG+AzrNIPlJWwmzG2dZ9DvIICRgrQvTDW63HLHbo6d3ODc+ipX79uFLhoM1peZ3zrDnW/cQ1q0ueW2PTzzzDGu2rcTrQd4MXgJ5fAinbDSWwXdIJ5doGchVwaiBnEyhTIxEsVEZfDUmEB/RIcSbhUsUvVn/dUJJf2NktIYKHOqDJz60jsXESgfgEoi1ktQcaSSXXaBWeB9kAuo3/86Bj9MZiLg7XX8svAOzThEJyLkef5yh95rrnkpkPJ+eFF4FOJKAy4gyuFcjjaCtTlaCXhQKqHwKSbSWGtwRRyOYwZYCkTlRC6niaKrllBRA28d8eAc6+dOk7QMM2bAdO5whaOvPZ3Vs0zFCecOn+J127cxtX0rn3v6IKeeOUiW5xzc/xg/8P5/zKNPP0N/dZ3v/OHvZtBZot9Z4/Th/czOxczFDbRpsHT6DH/3iQd405vezJvvuJ7+mubgFw+y96pLuXz3pbQ3BiyuLLN///Pceuu1aG147LGnuf76S1GRYWq2UZa9s+y5fI60X9BL01DS0YEQc/wkFLszzEKT3GZsrDimkhZz2x0pmmde6HB2yXLrTbuZafYZdNuISZDmFnrWkPsI0UEiRGuNNxlojfPTJHFSM+QRSgdqJCUUU30WjL8vqaYyNPZK/hsbd+/950Xkyos83ncBf+K9T4HDIvI8cBdBD/6CrZ5mUjcgITjncT4PDzMlH6CqRcpmBr2aJW3t/3EvUFHgvMJjILa43PE3936YO64+SS4rKAVb9+4gywxJNI8lRwqFd5Yo0tg8RZwjFs/SsTP0ijb33P52fuXn/5x3vvs2rn/jpWS9DkrFPPOVR9i940p27Znn6adf4LKrduN1DHFGXEQ4D921s+TOBHy6l/H8gRfo9GaZaTr67RbrpxRPr5zm8usMibakWU4/62J0g9Z8j1u3BMVa56axWCKTUOQ9Ot0+cwvbSH3EyiDBG4PETaLGDNrEaG1IGg2UCbh6ZJIyE1SXYmCNUCxjItNzpA4YArioYGxFQhCpYsWIVFV3AgWyggeclqFioJKyAIhU1XnOlyCAEl/XpV5KuZrz3mO0obD1YudqbPxMZr1+PTbjgxceVqgFtpRe9i4YMYcgOsH6HCljHFpCeUKjYoq8QKsMTRF0YqQLtMmzDunGBlFzilgfo9dNmYpbrB46QL/fpphpMLd1Cw9+/im6qeXN330X2ZlVTg26dE6eoNvpc/3dd5G2Ux750iO89Qe+h2Of+ww2TvADy513vZnuYI1O/xySdfF5SuobqKkpXAZpv0dTGVbOnuOSy/ZRZJ7FUx12XgJT0yZUIbPC3MwczjoGeZsr922n34swDY8tHHluUaLoDfqkWRE0aXwe1jXec/OtW1AmQ1GQmBZLS0u4Bcd8YwenlxXPPr/M3a/fRUN3WV7tMR03aCRTDJwwsGHMeVRgxui4pPNqdMn4Crr6wWOvvHZVCeqV968MHQ6N+TB+5N0F7vjXpn01U8cHRWR/CdsslNteUYHslZUVCj9OZ6w8tuq9xzLpPY68u1E5txElTw9/Xh1bhAohLnVmCo0q4Fd/6Rco8kC93LJtC2kqNFtz5C5H6QTnFUmjhQ1qWRx47llEwfR0i4QpFhYWaLTgM599FMkS8uWEe3/7Mzz6mRU+8V8eY+2Y8NAnD6PTJkWvw2A9pVAr+KhLt9tldekMPvXk/TUip/jUp47yyCPHUUZzxdUNrti7hwLLer9LUYTKTNZXcgCeAo0yMVGUEMUNvErQjVk6RcxqX6MbszRaC0TxFFHcJI6aRCbBmBhlYnSUDANpIiFICwwN+3B1pAINEiWICjUj8Srg+ybg+yaOSihGlx52lasQPM8AG2giFaHRQ9lf/IjvPumxD+GY8t5uOg5KDn11rRcvUfHabspFiE9QLkZ5hfIOIUerFK0yFCETWPkmyk+hXYL2MYnNmPOeBa+YL5bxKw8jnYeg9yR68BwtewxdnMCqk3z8wx/BnzxE+9BjtKIel16yhRf2P02W9ZE5y45LY/JDR3j8Uw/TW+2z3Fmjdck8hRIuvWIXN9x6K262xZvf963ksee6669hfe0caX8ZrRwojSPB6Bmy3JP7nG175rj7HXey0R6wca7HoLfCoFvwwOf285XHXwAnzM022bt3FyYqcC7j5PFVnn8mpdcdUBSOXq/PoNdDCZw508M5ixKHVo4oUcSJYIygtUN7zZV759lxxRaWu/McPdHj0t1TTEeQtXO8y4lac2REZE6BSXCiqWoaVAlLqjTu1WpU1YOnaiRlPMrHkbI+SXA2N8vTeTXaK3Vrfhv41wRH+l8D/yfw42yeELLp0+W9/xDwIYD/j703D5Isu8o8f+cuz91jy4zct9oyK2tRlTYkVUkIISEJIQk1akbQQPfQdBsz9NDMWLdNm81gY9YGYzaYdZuNwcA0RhsDGKJhkFhEo2ZowxAgQWtB+1JSqUqlUi1ZlZVr7O7+lnvP/HHve+4RlSUEqsjMyvavLCsi3D08rj9/ft653/nOd+6598XqnNthEpUe196Gmjx1PKs1xKG5xbl1BISOqr0iptkhNQ3goHEYVX7lV36JRx/5bU6ctKxvbbHvyCJRPK4QQhNQA8PhJlWdbEjvuuM2jDb0BmCe8qyOK/7pf/92NkZDxvV51re2eMl9t0BwzC846K/zprd9B+vDdSpdoegfoqahKQ0SHSb2uXT+Kb7y6We45fhpXnza4xcNH/jgI7zm2w4zP1+CKD27n3GjYEaoQsSCFknb4BsinpWtQDR7GI8dpTjs/BzeL2ILj1qLL/r0sjTRWpP58Imdr1iTOk0hT5ZJunMgUwFJxii4zlhMSH4yzhW0eyrBIG7S8NQ2bYhxuGy4lKiW5CaYkvLt7p/adrHmTAnMNhoOUvbfxCRx1+UAACAASURBVJC8ErMM8mppia8GagNCxEhAJXVIigY8DYYx824L4ohYrTNev0jfGiQEPvKhP+G2m06wZ98yly4/wk03H6AYFNRq8JXja5/+Kj1XYI473nj/XXz14S9z8kV3MNi3zPr6FvsP7qXcuMw9L7qdRx/6Cl/74hc4fuwQt7/4Xk7272GrqfBFn0O3C/uOHEaHG9wRDV/4T3/E0ZfeQhTFbjV87K8+yytf/QqO3nKI9fE63lrW1tY4sGcftojsu7RBsCVlvExvXilXKoxxVFVNWQaeOnOJqMrS0jxN2SPEIcMNxfk8zcsZCMqJ4/3UxCZKiA0EzRPBHCLzeKvsWxiwZZb4yIcf4u5bjrM8qLm4eo5+f54Dg0MMtU8TIPo+KgViC6zv552lzc19qYBqc+e2EZe6skW6mc/NVLNl28fR9iG056jf5V3l3ylzV9Vzqho0dUb8PyTqBb4JY6WdCohtEq5cGOsGLmO6x09n+NtUMu1wjisiBYooTaonOeHmO1/Knr0nsHY/hw+dpiojTRNoakPPD2iahqaJSBMpomG0OWK8VTIeVVhfUVMSinXUVNSjIYuL8+w5NuD9f/QgX3viElvVJuN4nirUnDnzDNIooy1HbEpiCU99dcz73/M4i4uLbAwvcvTggKeeuMCRE4u4OagbgzZ7KEeGqCMUl7bjxuH6SxQLi0RjOL9S8+jTa6yNC0KxRG9hH6Y3T28why9SVt/rDVLzkEvdp8ndLhejne0yaDo3znYYgcOYIilwpOgol8kgD5+MwvJ7khQymXZpuXTa7Kb14cjmbJNzq3tf1UzWJSJMV26mFTZBpyx+jcF7P+mTuAGCe5rnZVBRjKkozAiaVQbFJn23wurFT1NufoY4/jwDeRgXvsSAr/Da+w9x252OPceGvPj+0/T6jvH6EF8Kn/zgR3HDhkc+8Tn2NQPMxjoXzz+J7RvOPvEoRb/h5L1H2Tx3lt/6uffyn9//IQ7ecxt3vOIU2JpSI/25Obw17D+wl63NFUZnz/JX//EPuPDoI3iJxHpMs9HwwCe+Rl/maOqKR594BNuzLB84wNbGBmW5wfFb91Ps7bN33wFO3XELd957G8sHFhiXJWU15oknVllfqXn4y8+wZ2mRA4cb2iAJEWIDRIwRCu9QItbmJABFvMWKQ02Phn38yZ8/wumTB5hzJWuX1vEDR7Fgsb091GqIxhPzXNRIkuamAC1Ym3yUjFhsTopy4aOrIYmZ1JsikznCcWp0qLV2mz5+N/B3unSIyFGdGCZ9L9Aqad4P/L8i8rOkgupp4OPfyHO2H0wV8gT7LJFLg+k6aqUVwXRdkTuKp8+9DZ9yDtSWsgmoralFaeIeXvnKV/HQVz/CxQsbLO6vWDD7MdGwWV5AtSE2FQMH9WhMYZRyawuCovsHoDUxLnL54ZIPvO8jbG5Z6A/4jtfcxaFbhCZewNujjNdGLNl9/OXvn+EVb1silFBvbdIzNa/99qNoYzFWKNwaL3nxEUxxGa/LEC8ReoFmrsbRQ+IRxFlcL7I5hNVLazz++Ao333kfR5d7NBrpzQ1wto+SZIzibA6YKQBic7ZtXLf70ZicIDVLu6x3xCbibLoION+jDgGxNp3PmIkEMfPqiW9URPLkJu/yUIeknPHGJbpGNY2XNEJDTEXrvJYoU5m32MzFS6eZT4qZkL832fpXtiUFX/98eOHAiOIkoHGE13Xm3ZCV4TmaZoyRkuWFMVU5oueFXs9jUcbDLX7zvb/L69/y7Rw9fSvNxibh0iZf+PinWV3f4jve+EbmrWHfguNnf/r/5uWnDvGa734dayvP8OUvfo7vOvVONsZbnHvqGVxY5b5XvRi33OfcpRVOHDyAxIp6bZNmXOKahvf85nv5vu/7Hu572+sJqoyGFXHcMIiee0/fBqFAY8XJ206CRqrREEONmhqM0sSCfq/P3F6LKwLjUUloIk0ccespTzk07DtwGNWKXr+1oVKSZYJjPCoR8VRVhVibB784ak3Gg+ubF5hfuosPffxh1tYa9ljD2oULLC4tMDfvKeYWWR/VqJ3Lnac+0TDWd7UbY9LQDxGD8y6ZGApgDM75lJcbQawjhNxsR6oL6dSEsjYZ3TnA5vnG3xjcJQ3IfgNwQETOAD8FvEFEXkbaZzwG/DMAVf2iiPwO8CXSCLOf+MaUMooxEGOTWpltknV1lk8CzgoxBhDFUkw41widbpJpDmg6a5tujRfUlEgehhEBk4/xuv9Obr77NJfO/RHN6BK1LOK8MBquMphbJoSStY0VvIATxxhDWQ2Tl3OsqMcjBvMNr3/z/Xzms2d46tIqN532rK6NKQaH2dq6xMZKn545wIteuc54PVI3W3g3YH28TgwFX3v0LC992XHU9vEWhuMFRnGDhaUjxFDhzABrevjCM6wim1WfLz5+kUNHTnHTvQOkSLpvZxzWpQEb3hWoNSgWl+0DxFk0uybWocmFx2RfHEXwLnHuRh3Gt911Qk0apuG9o1bFtUqZ3GlKHuKh1ubinU1BHYsYwRlHUCExNa35G7hcxA1G8+4hc+5YmiatL6JozFtt4rahHJ19cFu0jTE5Q94AHapm9EGUEg0Nvic0zZD9SwXDrS3mF+ZBPRoaQGmi0DQBaxb44e/9YXxhadYqbG259NgGR5eOcu8rD1DPj2kWC8rLG9x6xwEOvfYV2FtvZv+c41tPniZIn/1+juZFp9l38jhLSwtcfPgRDtx5DyujDfTpp3no45/h0TNP8Pb/5rt51/e8CahoCkM1Diww4PHzF3jgQ59k5YJn+egT2D1jjpzs0bd9ts6v8dEPfJyt4ZDXveF+7HzFyJQ0EmlsIErDcFhz/pkNCt/H+UDQFQYDi/OOssoDRYCgDb5wXLy4xuLS3tQcJOk8s+IIwbCwfJj/8Luf5KUvP83xQyOq9S0OHd5HsWBpwoD1sk/pBG/6IBbn+0ju8WiDunM+zQg2Lp2PYnKXb9sgpojarFTKHdoxD5KBrrj/XL0dzzf+rgOyf/XrPP5ngJ/52y1je+a9zcdbW++SnKWJI+QOyfZ3pkN6d7imDtzf5CnT/q0NDThzK3uP/APspfdhhkPMXEGvWKIeD9G6YuD7DNfXCKGmpoR+SHSNAtGysFTTY8z9/UVWLnuaGJhbsgzLVWJMnOD7fv9z7D8ReNN3vogmBtYurVOV83zqE2d443fdRHSrPPLoBrecuInhEL7w+XO84c33Muj3qJoRFEtc3moYjgU/t8RNJw9AlmLheiAW74suk07Fz9RJl05KiJFk04uhMK2JV1K7eONzAM1ZeebMyZN2khZdcM4So3Sce+sTI1jEMtVglo59y9snEz1NHwppnQgzvSZCO8osfSiksywQMxnu0dJ008Xy6S5kgKIobojMveCrSf1iDMKAKiSlUoie0HhQcK5P3wvrl86zMOizunqZ33r3b3P3XSd57etexeVzK/z6b76Xm07t5133/QDBwUbZcODm4/zgP/t+1nqCrQOrT1xm/ugyo3qN9WcuoDbSX17C2B43HbuZUTOkLgOLvsfpO+/g9nvvYqupmF+Y5+mnznL40DGWFucpq4r+4oDFQ3uJdY+//suP8453fQdhOGQkIwb9eTYvRcLY8/Sjlzl69x7E1tRlSdM01GHMeFTz1JkN5vpw5OgivleDBFZXS/r9BbJ3K00MxABLS3uJUYlkjjt3k4v0+PinH+f2Ow5j68s01RaHbzmQPguhz1ZdEH0BJnVlG1dgbPpstDODrXOZY8/9GrnrNI3Xy0VSAJ2c76ppSLvIVGJ5lQI7XFf2A7mCnBP9yQzM1rvEIlEJMUxUE9PdelE7zx4g0zktL9+6r339qTxCDxqoisM0B36IRTnH41/+A/YvRgpjaZqKUV0yHA6xWNhq6KmwPlTCMPLFzz3K7fccZmy2cPsje/qOqqkYDofUdY+/+PNHePNbTvO2d91Erw/alGjoQahZ3qfc/2370ChotZ/bb7sNfGD/XOT1Jw7QUFCGgrpZZBz6RFugC3OUxqcuu9jQ63nq3FloC59O8BwEnU9e6SIpe46aDLySvLAdfO07iqXlxhvofGCS4mVqjqoIrrDde2eMSU1LxmAxYCVJxlqHwfxciCI7/ICwbuJ7kpUz7XO2s1XDlKJqZ/cq0NVkbN6Wt7/3QsdcYdFoML6Pc328Jn5ZiwgyTjK8aBhXgcHCHsbjLaJR/tt//gMYY3jwSw9z84Fj/P1/9Fr2Hd5PjA0+9iEaGjPm8uYqC43j8sNnefBzX+Clb3kl6nu4wiK2h7GeelzjFnuY8Tq2VoZiGS0WaIz0B30aFfYdO0rtLCOt8Qb2zvV55WtewsMfv8Tauct84gMfZd+pZW4+fQRnSk6dPMbGpTFaB0ITiGXDeCsQQ2C8lQQEx4/t4dLFihAq+tbRBLh8yTE3t8nS3vlkxUubECSaEY3EPNdVjOPyilLWloVC6Dtl3755TE9pGk9VD1A/h1py97VLShjrk8zRJmVaomVsSmzEprbLrFVPIw0lJ5Q7gru01hF01AztOb/Licd1E9y7q9rU99MT7jvTKuMIIWRTKiAqoWmeZe3aXhBCrK/4AZ8OCp0m2iqNloDBxQNslHMcPPXj/Osf/4fcf98x3vT6+2Dra8h4C1c4RsFQqqUKGxgiFy48w23NEZp6LpksNYY6buF8j6Lo8YZvv5c//sMHePPbb6KOhkBEVZCeB/HM9+czXx3S/EnjCWoIWrBZFUR1BGmzc4OatEUU7/CasopCXNae266wk4qjmfeb0pw7W3SadI3pRI5oVgS03i+e1mbXSOqSDCHkYtJE8qWxDerkYG66jLt9fyfKm0S7tKZutuhNKJYoWDEoSgyBSArWTdw+y3aat2yz+GkOc7oO80JH1YDvFaixNAiWPnVVUhQDnBOapmRcbeKMUJZjTAzMFY6mGlJXNadO3YbYkhPLBzEihI0z6dIaYctY1DSsbG3hDxS86s2vJtqKWNdgLCPGFHFMD2FtXNHE1O1qe5bCz6F1wIVE8WGFskwDbGJZpV6QYDi43/Dt33GS9UtrfP6jX+HpLz/GK153kkN3DVgez+GMEEerDLciW+uRy5dGECsGgx79XsHRox7xSllXjMpI0bfUTaCqSB5Fub7TxOwem5UpUZXVlU0+91DJ0QMD9i9birm5NGhnvIdRmKPxA6I1OAvOpHkGxros6/VZ7phEAcYajOuhxqRmP0LaebaF1CwSyAJ4YFr9lcTuWS+D5N3vbuK6Ce5tgE1ZeegyNmslf/XZ6S9l80HTYABjDVaf7YucOOI4Fdjjs++f+j7GSC2KNR5pwMQROhBWyz389C+8j4X5mnf/8i9w/6lDLM/tYX3jAv05Ba2hTu3N3/aGVzMcD7FiiBrAlTQCNPNoMMhgnTe9/TR+rkHr/QjrYGqUgPcDtE5zQY3xlOopa0/ZOFT61G6AmHRB826AWNM1LEFu/Tephb91puuaimzSmotPhVRphwzkDB1jcL00hCMZtuWWaiOQg3ddB7x3We6YtrxiDaKZBmmLqjaZhbXdpF2rmUw546nkC0eR3vuQ7L00NbOCpuKqtZJa72PcdoGf3t6272+rjJnudbgRsnZI07+MCNZYYhCsjwyHw2TZG5QQtojVBjihHq1johLqhnJjA+eE4ajGzSUOmphd70XBRIx6fKNUpqbuFTQhTbZKdRKlMKkmJcbjEILWVE1F33q0bvAOymodIz16cwuMhpv0rE223CadGysbNQ989nFefOdRbr/9IOoVKlhb2WAcKvbuncPUDRqUi89s8PDDl9m/v08II2LjGY5qbrl9iUuXNynHjj3LfZb3ziGSBBaanXY1d68bNyAGi6rlsccf4tjBZfYt9RjMe4IxVOpoYh91BSpK4VzKwl3Sshvns9wxNSYZ65Jc2NiukS6Fj6zmknbGgOxgDwDVTCG3Ng/pHtXuEbuG6ya4wyTIRkDEYG360KoqTdN0j0mBOYJ1xJC3qDstCIhTu57tgf3KjVCCEcU0DURhCwd1w1IPxpUlRs+7/rv/BW1KHBEfh/zur/+f1BurvOj0PsQfoGkaHn2iZm1tiwsXhzx29knqIbzlrS/iRXcdZm60n/FmoIzrVHGIxGWs6xO9Q71lixpkGaFHhUUTq5LlhC0nbbDWdzuVVsES8+NSULa5ADkpctqsx20VSTCRLKpMjMDSBTT//lQw7fd95r1dd1+SNwrSBKzPTo2SFQFX4MYnGXbyYW+Lou39AEFDyoQ0cfhp2MeEk2/VBtPuet17PlVY/caGqL8wUHibW1EbDJGmGVMUYyyR2JQQS/quSVp4kk+RkYiYOtNygVgmFsC61NHboAgBYsSKpzDJdjkNOwFcLgxGRdVQq+LV4HHgoalrCMqoHCOSqNJyXCFYogTquIXFMB5vcf7CJc5fWOHRQrnnJcepTU0dKprNmq3hJoPC07OO0IywrmBrExYWoT8nPH2uZHOz5sRJx9NPNfSLOZb39RAt80SwbPFMAPFYZ2lqaILh8sqIxcV9HNxX0JsruuakOliiTao0mzXo1tlkdFekzm1rs52AGKwv0rmY6z+QkkqNk+lKbbeqyBTbkuN5bvogKfTa7L2lcXYP111w1xhTxshENhRCSPNNc8YXQr2NxmmbAuyOTO3rSeF2btdFBK9CZS3BKoOqwRqljhUIaeC7BRXLOBq2WOQtP/yvQRv26hhj5lhbP8PG+BO4Zx7gplN7eYmtWVjYg5QLbKyNCOFhCreXvl9i//7DlANB4zxbowKcZw6lDA1gmKfEiEdjMsBKutjMNdsUyImCc54goKI0GvG+SIZdEbz13Wg7m2sXrcd5arR1XbYhkrJk3xt0x94VHjJ3GWNMXbGR/H64tHsiFy5zULdZzx6EnPXYlJm3TUlZVpa+N1g7PaIvYpxJzWkEJKZCVucTP5X9twF82t8doK7rZFlsnt3s9IKFSQNYmmYj05IlTSjzdLIakUBVjdJMX5qk+1al7yxEePKxZzhx5FaQQD0aY3upo1hMQczZtYQFQp2a2jRWNLUSG+j1HXUYIgi1zlHXAa0bNlcuEWro9+cZzPdoyiqFLY1UzQjRkmosuFhx8tSA5eUTFKbg/OUhj3ztGU7cvMj+PY79vb2YpkeztUEUWNzb475vPYYvYFRuITiGG4bLKxvccus8o2HA2CExN7WpuKSYi2S+3DMajXjsybPML8xz/JYDqAq1CmUUgjiCs/hikIfRuBS8JXWaOt+jtZ1OnznbncftWMg2495pHd7e13HsrWxXJA8bau+XLCS4DnXuzz8SxRJJ3ZFGBG3qJMsLMbWnxzyQWgQnLgfzfBRlu2F+wnTmvh1tINlJzVQxKTgcEIp0lSUGXK7cNnXsVCdJJQ+qltWYg9XccV78lqM01VsRTYGnKYfUdU0INSF8GzEkI6uyHONyoJxbCB2t0G/XlQvE0xendr2RqZFe1mSPeeiJdBz1TtqisQLqsSKI5ilIpEzb5GDpd3R1pqCfhh47m/xcTMclGlqDwiAtBTL5fd8WOcUSTRucU6avWncZd6rLpv2qMSTXT5kE8rjjwzN9PKaDent7W0ydzvRf6BCVXFcSRAJWDbbX58wTX+Pg/r0YEZzvE6XBaqSuxtCkeaJWPMtL+zB1ZDwqaTTglxRn+gzL5KppbSqAmxAoR0NCM8R7z8bGiP0H9qOxZqsc0++lQTDS1IzXG778pUe45567GPQtsW4IjFBtUA1o7ZFgCJUipmL5oGe0WvGZTz/NymqkHClz9wwoegNiVROrhsan3br3St2MMCawZ7kgaknRd/QGhl4/0utbokpq4lOT/dNTejzcSp3bx47vpTeYI6BUjaeOkWg8apJHjO0NMp9eYF1qvnM+FVOtsamz2mZxQp4bLJkaA2hCSHOA28z9OXKIdITTeZ52vqnTVXX3acPrI7jnFx6nsro2AKvJ2Xennkl8VSeDizGZi03xstsNo1ofbN32gd8ZFHZmeKpKEOlUlimQZvqgXS9M9l0iYJPRmXf9JN1sAr43wDZJ6xqbKn+tsb0aCZG2k7LdobTraLI+dno9k+JMzoLjdkmVtbbrDUgzTKcCoTNdBp3aAvJQlHw8d3YFt9pc/JQPeObR2wDajg1rg7CRVrFiqKpxPnmTI16il9quwu12p9NeMO3fbxvWYtbCB53IX9v340pF9+n3eFpV80KGtXPJE5xAaCLVsKbX99x04k6apqKpSwgNTVVhohBLZf3SCg8/coY7T9/K5YtjFg/u59f//R9x8y2HOHZyL7fddTOusMSmQqzD+YqHHvgiB/YfYdDv0ZuvqOoRl588y9xij763hGFJZIyLysAV3HPnHcwNHMPNFVQsTZQ0W7WOxHLM5YsrVGN49PE15uY8y/OeY4dPMNw4x1J/gfFG5Nwzl9l7YJm5nkE1UJWB4Vjp9S1GPdI3HDw6n3eF7S59ABKzPUbiy5uqzg6glv7cAhKEKJ5RFSnVJxmp72N8gXV5TrDJFtRFMekqtW2zUlLHaK4jtcNpkpjAdyMmgY7mhO1sQffZzHVCMclkDJkM79hNXBfBXSR1RDpjp2wIFNHW+zgXKrrt/bMz2m3c+baMLWWFO4P5tjb37rHJszldjU3iOTVRGoGwXU8tQMwBqg1GGrutWFBFvKDRJL15VJxPKgcVT5Gz2RgjVvNzx8mFyKridp4oV1h71O2P6fxVJNnqdjsUkdS+3j6nJp/NyIRK6S4gKplvlI4KMsbQaOzCa5s1T/citMcQoNfrbcumpy8crXRymifvEDW7RpguG2q7Uqff5/b5dtYGtpnN5QvnCx0BUkAQgzGwur7Csbmj1PWIsoq5UzIlFdXGFqnAbil8qs8cP36Eck15+cvuYHUVvvbIJred6qPUrJ2r+eiHPsSJ4yc4dPgwn/3wIwwGjnvuPcUTT17A+QWO31qgfkRv0CNqJIQG6yqcNERVQl1jikg9NoTokq9+s4GXwOrI8JlPrdErAq+5/xZCHONdw/79ae87GCyhWEbNiLIZY3WOuUFqbCvrkJIGWyPGEBQQwUiBSqKenEvquSZGfH+A9wM2yoDYAVtlJEofbD+pu3wvD7ApMCYNfkkNR6kxqpX6Ymxq+pO0GzDGdjJGaz3WWOpQI6QZB0JSkrVQtBMQdOe9JLvfbIeVOrOfK91/nnBdBPcWbfEUyNv/7c1M3jmapsFa33Ufpmw2bAvu00E4ZcTbi3ZXyuY0X0gmqp1W3ufSzMhJEg85g8bQ8f4WEFEUTW+4UWIA43yyU7CgTcAXvbQNDKn5wmQ/6nZ30s6Ntfl4GJ0EXcm7GNteBI3pMojp15a2lmmNhS+6nUHTNJgsGeyyEGMJ2au6DfyTi8qEbwfw2ehLyEHUGposSw1h+0DynRz55NBNmpS2XaQ6DXCijJpcsDWSLAdijEllprotS29/v5PHTh2L6WD/QoZqg/UOjSk4HD16DOsMVR0RA8Yoo9GIlQsXObp3maqscF649cTNaNVQa+Tc2XUur6+zZ3mJat0DltjUPPrgeXQ8x2i9xwc+9WkOHljCYalHnnNnKjZHT3Ho6D5ESqpyiApUzRDrIt4VNCFQ1RXU8MzZS5w4cpxQjamHgfFmYLRZU1cGFB577DxLC4fozfcwvUC/sDh61KZmXJVYZ4m1JKWZSW6sKSmuk92AtP0ZyfdfRSAr56yfA1cwaoRKPVUlNPQwro93A4xzqLFY30vSRnHZqldw1ncXTqydkvBOAnvHk0/vno1gRVLj1DaaN523YWpXSaZSTRYyIKabb7BbuC6CuzJxf5w+eBiTtuYxdJljFAM7P8jy3AqY9LN9VkCfDj6TN2bSzp74sRTRk1AvdpKmZ/2t6QtSTI0NTUxt9HWYzPekMGn3YSGYgGfSsBVzYHfZXCiEgGsvNOQTvl13bvePOfhOUzbT60rBLdvySsTbJD20kgdjuOy4mIN7jJOg2NI7Eg3e+W3HtUXQlMkAOLf9mKtO76BaFYHm5hK77fmCaprZCkimu9Lk+Em3qWvdIadeb/uaOz+bKequPVZXy151N2FcaqBDLTE6ymqIt8nCQWOgrEcYE9m3vEBZbjIab+CtgVL5T//xrzj3jPIT/9M7OHh0kSo0XFx5hijn0WiYXw7cffAo88Uelg/ewfxCnz37HWKG3PWyE2xtKbVWjDbWsK5kYX6ZEA1BPDSe0IyJDVSlopVw9sxZ5vuLfP5jTzA/WGJxeYF77jxGjeCLER//9NMsLFluDg6nW4mvVijMHuowSj79UoEZo2pR9Ukfqx5VizGK9Yp1BSHAuBa2SgtSEBtPVEslDrxPBVNX4E0a+Zi8YpKTo7Ueb10O2OTuZ4NmCbHJgoDceJPqRTqZCjedmbc694Q2obFYm1Q1qdZkAUnKtbwL223K8LoI7sC2YJ2UAanwmDTXqQDRBuCIZOegSRvAtDPk9NfJBz0XI2PY9tid/Pv099suFHgiIXmbSPvcsQs+MfNurSzT+l72RCly1tl2YuZCo/WE0AYqxYninRBiKjYaO5kVGrXB6vY1e++2vdY20E3zzumr5C3lJItuuf3pTCMdQ+k6Q1t6SVzm06cy/va4WjO9S2qDei62dsqAyfraYmwLzc/VbmljWwNI19COZ28znnYm6vSHYmcNZXrH1h6PFzrv3tTJy16MoxpXaSeGSd48kJq6VajqgBeXMtEmErUhBsOpk8cZx1WiC8QQ2bvfUzebSFjkxC0HiFrxzGNrfPWxSxw4OIcWPeYWHVJYzp85x9y+Y3jfI4TA5sYGRX+OqokYaoZbQwoxrF7Y4smvNVy6cIk7TnsUnwImAQ01D3/lIrfetsj8QsHScoE4k3T3OAiO0Ahqa5Q6d5bbrlCZ+OoexnjEBMRAHSzjKnB5raTRPMO0SI9xRWo0UudwRYEjSRmxqcPUOpuPX56MZJPjYzLDc9s/G5I+E6mulhK3bVr2aYM7kj1KW3wVyX0kSPZeacvS2wAAHIpJREFU2m7VIf+16NynOdlEpbQBf7uc0VpLXTV56xS7rHln8bH9ujNwXymQX2kd274XctXEkaZATQ8WyY1HmY+OAsYViQJxyaWu5QVTYTAXAoEif2BSMMo+5OJw3lDnDDfGBqftBSJz3nH7hWda/72zU1dEUoZi0sg5EcH1CgiToGfETSyUcwB25OKs0VbEy6QTb5q+mvjwTwd/laQiEBFiCGlyEGkAum/Nvabfn/zBSU6f6WdrLcTkz5Fq1s8eqTjt3T698+suujeAHNKaghgarBP6hUe9EJqSuhxjNOJEqKsAIXnxbKxvEquapfkBr3zd3SwvH0f7l2nKBjFzXDo7RKLBSeDyyjrRNnz5s2d47Ktb3HZyL7ecOkkMJcZ4jh45hLeOsoSyLGmqwOKeeUxP0GZMLEvwczzx+DmeelQpx1AeNxw+Okdd1pw9u4GRPj2XhAQ33VKwZ18fYip+YgxBbeoxEU+UABhE+4BLjUViwSRPdRXL1njMqITVtS2iGeCKedT1wffBOkzWpYvzSa9OargzzqeejtyI1HoZJZmjdHMFUgDONOw2rxjIXX65btWev9OWHTYH9/TcGJPO8+7dlInFxy6fl9dJcN9e3GyaKtMEeaAGOyRwkjJwoqZGn6w1SvRFPng7KJbJ85ucXaYgOD25SWR7licinX9NclnZiSSXNJq06e1vBgWxKegVRQrMbeHPykS2mQYUSw5mLr+GtCZn2iw9v0VhUoDFsy0jNdqabBmiaVILP4IhNy2FmJRIrR9+k4JykLbanz1g8gWiibkGIHnIspm09092NNmeuT2BIRWWp46OZL7cuJyFZSooTB7QPX46YKfjrlhJXbiaPj/PytBbbFMQ6YSqSkt64cshm6rCiFCONkkt70psakI1ZNDvMy5LqtEGTgzDrRELc3PUpiQ4y/5bF6jDCrXtYwpYvxz5wB+cYbS5xStecRcf+8TjDPb0uPf0Meb9OssHB5TVWQa+Twxpx7S5ukmMNeN1+C8ffIJ7X1lz8x17CGVDzxXUoyHHDu6h2ljh4KFDwCYhCsXcgEMDy7AW5pf3M+j1cf0NhAYbB9i4SbAbREkEpYm91KFMD3QOnMF5aCI0QSibknFZcuH8kN7CEr5/BL+wQCTNJhBncc5jbQHYPDXJY0zaeZKnJUnOxq1zyRKj7asxOTOPMSnjYqIRbSqopd3oVFbeijvSLtZ3O1jn0sUl5s+QSSwOQBf0k2PkNbb8vTp4DiokXxXb+yb8a57GZFObv7SPzQc6JXqZZ5+yILhSFrf9AvC3CwKTLHYy5m96BNzOTHOyK9kuc+yGkEy9/ra42T3OJf/zdli4SPKybncPYqayCJNcH8WQKRXTDS9Q1dRGjmJ0wu/L1N/raCtJ6pudFFcXMKdub5/nWdmz2Z5dX+nYT78n0wXcNlBPP3bne7Yzi5/eWUzekxd25h7CkLo29IoCTLK4jRqwhaUJFWVVUvgBTVXhbY+6GlHY5IjZtw5vA6KBpkkNT1GG9OYcjz5xgZUtGBmw/S2O3VKwsblFrPZQSQCzTgwDxpuWLz/4JKdOHeKV9x9iYamH1EKoGuoYUve4U47dsgc0oo1BcIlGsXN4U7O3J6hEVHsEYBw2KAqH4MC4/Bl2WFugmshXax21CnW0bFaW9c3IxZURy8u3UiwupKYiY3B5KIxYh3M+yRutI2pSU4WWIzcWaf9rg3obN4zpqL9kuyGggssTloBErUzaTUFdCtz5MUqan6qG9PvtbARyoiQtZZkUY2aXJzFdJ8E9YcKVNl0galmGVg3RPq77gCNESQGr3UJND/fIbim0M1iv9DefH+TW4h1BZTqjbF9HS8vYKYpo+mt7gUImVAhTFfk2iKpK55RIZk+SI4hgnBAD2Cxl3NnCT/ZFjygm8+xdj0HbZaqKRO2y7nZtIqnib5lcMHd+Nbngu+0ITfHj0xfAK/0+U8+98/Z2LdP3T29zpy8O0yqpFypEQFxJYJyCIWmoilY1m+MRXgyhadAYqENFNMlky7kC5wpGWxsU3hOaGueU+199D6FSjOmxtvEI41By8NBB5voF81t95uf2MK5XQQPj8Zjh2LK8dIzNNWF+MZ1Lw2FFiA0hVAieslaqyjMeNXhreOb8RU6d2p8u9jbkQrpnNDaEGBHTJygUtp/M/azBux4hKL7wRAyjWjO3Lpy/uE5/fi9Hjh7A9gYECWler8+TxIwBmeLMjct+RbnuI3mQdT5HrE/WvbFVo9FKFVOjUTq30rljxHTS4pZDl1wLaDu+02ci3RbJ0sc8tamJsfv7qb6ULEF2G9dFcP/SFx/YfPHddzx0rdfxDeIAcPFaL+IbxGytCbfs0vNeFZgGVApCE6jrMjX0xQatGga+z2hjnRgbmqZhXKUuUXGRUhu2RhXWCOVI0EYRKenNr7F8dA9rq+d5/esOMRyW1PUGobeM71uG1UWqJqDB04RA1MCHP/IkTgz3vfYmHv38V3n1t54mxIaqLgl1JIY5Pv6xs8QQeNm33Mptpy1qtqiJrG42LO9ZpqlhNIZPffICx0/s4fY7j2KMJG8katTNgxXWxg0xGta3GqqgDOaW2H/kADifEjhr8W5+aqeaBlYnW+rc0CaTnXLMkmqXvdeBZE1NG5hzY5ERrE5RgzJ5LrdtPGQWDLRmebnzuh3cYUgXiNb50Rjb+TCFoFibAn3c5ZzjugjuwEOq+sprvYhvBCLyydlan3+8kNZ6tTHaWMGY+Wxn22CloiprYqipx4FGFdR0KjITFW+hqkPySo8GGxxaBzRWHDiyRAw1c/ss9cUtVs5eYv+xm/jyVx7j9OlbCBoYDwMbGzAYGIyDm08NWLsIfhB50T0HCXWTAqR6iAZnDDffPEfTGIoeNHWDdQZMwfKeHkiyzN27d55XvabHYDBHfzBINAYBIVloV5XSNI6qMUixiMPSmAJxPnHVPmXpAZM59aR3T7y5BUMO8jlAG4vLXc7JOkCIRrBdI52d0C5Kom7a4G6T8Z4Tmwv6bcd1pn7bGb8t7dMWUqdsUZKkV0g2HiA29akgpJkHu4jrJbjPMMMMz4HNYc3ArdLa91RNSdRIU9coUFc1MUQ0BGggVhHjLWEUcKYHQQhbhssrW/TnBYwSJBKdMNhbcPrem9EAJ08eJ8aGqjLUleGhL51j7z7PrSeXOXX6AM2tnt4g4n0fNJJDJKCIiVhfUTXCYKDUjSHUBa5YZH2zYmG+wPhEycwv9THOEIxSxQhqqRuhxDGuI4hHfYHYebxxBJJ8MWpqbLPOAD43OCVvmdbYqx3taLPPu8lTyKS9CEBuwrOtjKMrcqbH2Y6+SfbjqY62TRWTNeqa1TZIer7Um7o9+BNTFq9592DzAJ2rQRTOgvsMM1zn8HvfxMMPfoTNlfPsnetzYP8czmlSm9RbxHoTYnIErZqASMGogmfOXeDg/r0QlQe+8DBf++oz3HbHEW678xhlJQgFIZSY4FGNKDVN1RCjpygML3npcVYvB558tGT/wR7ze7YQY4nkYSpRQBzWC02M3Hz70cxhg+8vZPlwj8U9vTRM3VpEFFGhwVE1Lg2giYYYLUEM+DRrQNUirt/ZUYPgrcnmadmrRVpv9axAk+y/btLcgtY+vLOf7jzaU5bfTVGa0ranQJwonNRAaLoLCKYtiOZ6n80Z/fTFwiQdu7adIq1VN1mQ0D5edr9z+noJ7r98rRfwt8BsrbuDF9Jaryp08BJuufs2nnjoAb7y5Y/xyS98hb1znm950TH6VqEa4ZwljGusOMqoKUAaePrs08QmcPymw3hnmVscUFcNZCvpwkY0CONQE7RKU4yiwbiIcSUhwsrKmKW9PZKneQrEIppFI4FGTDb4ywZdRkCyEUesc/B0RCx1MNRBCHjqmIK7ikWdTdRFLtYn3kK6Fn2TxzCqTOiVru8iG4BNq7liFgbYlhNXMM5hJCls2t83xk79bs7CxdAafaHaXTCYCsjpaW1Hx0TVjpMPqp1tQSs/TkubZP9oUtbsJuSFriSYYYa/K0SkD/wl0CMlOr+nqj8lIrcB7wH2AZ8GflhVKxHpAb8BvAK4BPyAqj72N/yNb/oD9oXPfYk553HNmFBeZG31aT74p3/M41/6MN//997EUm8EcYsmjLFOk+cRkY3Lm6xdWmV9ZZWTt95GXQfqpk69EGoJTcTbklgFtPCMyxGGPtQ9olaoVlRjy3gU8T0QN8bJHlCDMkqdpCbSBCUEg3OLNDFZT2CTE2hUS8gXhEYtVeMIaol4ghS04yCjCEYLjGuLmJonc9kUkLPqzYhLcxvshP/uJiYZRy4/kIb9pOAdYlLLeO/ThQDbKVuSesVMOPOc3ec3Lwd7OzEDaztVjSEwKb42TZ4rYCbNeyI2SzzNpE+DyQ4hxMj3/tA/+mZPD/Q5HMhmwX2G/2ohKV2bV9VNEfHAfwH+BfA/A+9T1feIyL8HPqeqvyQi/xx4iar+DyLyg8D3quoP/A1/45v+gD34wGdxEnAWmpC57hhotlZYv/wMn/rwnxHrNcYbq1Bd4PXf9lK02YR6TDlqqKqGRkeEYAlNUo/4Yo4YHVX2c7Gmj+8JRms0loSmREODNkpd18RYU8eIMwtJxx5CTkTTVCdEiGJpgmDMAI0FTaNU0iOIy1y0JYpFxSSPqDwGkqxWsRRZcULmy6e6SI1DMTlou1b3m7tKk/0AbbbdWW+0E7tc2gjk4CviJgGYSTE0p+NdJt45n9I2aU/mnlprCTEwbT8gZBsSndiSAx3nD1Nd2Plv/r3v/7qnzzeEWXCfYYavAxGZIwX3Hwf+P+CIqjYi8hrgp1X1u0TkT/L3HxURBzwDHNSv8yF6PoL7ow8+kBvfFOM8MVY4k7p4NdZIU2G1ohmv8+E/+0Me/uKn6LmG+15+OxojxhQESppGWL005syTT1KVgboynL14gdXNDWJl0KAcv3mR73nHqwj1KHfBekKphDim1lEy8msson3E+mRdIYI4AWMZlTVpIzSHqiVKkQb35fATs+pESVRO19hjLDGmpiPnpicepWBrbVLLpC5S1/HoCkkGmXXu+RYkD+9oh7Gni0J+XtPOMGWidIFk9cvEp6qbKwAdhQNXbozr+kdaHn+6p0SgHQ4vpL8tWUb53e/6/m/29HjO4H7NOXcReSvw8yTbtF9R1X9zjdfza8A7gPOqem++bR/wXuBW4DHgH6jqSs78fh54OzAE/omqfvoqrfMmEkVwhGR288uq+vPX6Vp3nf74JtZmgU8BtwO/CHwVWFXV1m/iDHA8f38ceBIgB/41YD+73EvQxJqgqWhnYkxNZ21jnFga1wc7T+0Wue8dP8ar3xHRWDO88DQ9ClZWt3C9LdZX1jDlefyCpxiMcbZg8fBNqE2ZZ69YgGB46qlFjKyyvKyobZDCUQh4G+j5RYItqI0BeqgWVHXi06MofYEQlZjpDtEa0ckULnA5wEk3FjNR4gpeIUouONpcLM1NiJK6RlWUoErhfG70a+cKkDtdJVtFm878zrZd45kyae/rPGA6b5j0/G1WHXIR1m1zfSQXTSGGmBUwpsv0IdkyJ94/dpObopJeD3mUYcfB7x6uaXDPH6xfBL6T9CH6hIi8X1W/dA2X9evAvyMFlxY/CfyZqv4bEfnJ/PP/CrwNOJ3/3Q/8Uv56NdAA/0pVPy0ii8CnRORPgX9yHa61BN44TX+IyH8m0R8/N0V//Ghe148CK6p6e6Y//i3wze9frwBVDcDLRGQv8AfA3Vd6WP56pU/jszJzEfkx4MeevzWmNniNeTSEREJou3MNzrSTsAyxNaczBn/gJgCWD0BolMER5fDdcG8ItKZ7sa5RjcmrXSNNk5qh0OnB9Eqsm86aW6NC03Rmc1aTDLOYclttmmaiQJl6LdODVaYDZuLZ3VQncqJAkrIlD8PIQdZLdgiN2+fnthLFbeZ5RgiSutad8Xm3M3lMu4PQtjM11InOsRYryZO9nXW600KjtanWbAzWNovbPLdYWifX3PDUSiCbrnv9Bg7uwH3AI6r6KICIvAd4J3DNgruq/qWI3Lrj5ncCb8jfvxv4IClgvhP4jbwt/5iI7BWRo6p69iqs8yxwNn+/ISIPkjLL63GtCmzmH33+p8AbgX84tdafJgX3d+bvAX4P+HciIl+P/nge1rgqIh8EXg3sFRGXs/cTwNP5YWeAm4AzmZbZA1y+wnP9Mln983zQMl3wQpLNhEpuWsouoZrZhY7GSDdam9QeCliXglrMk8ZiE9KsWhsRHB5Ndtg2YEyV6AiNWBuIMRBt8vQPMc0ZcKHIFwgIeSRkRzug+Lb9srWbIC906mh0ttJtkDc+XTi6YN1qxmWbp0sWm6C6I+DmC0nnyJg9XbIgPh0dm/xhYoiJ6lHQfJyiKt63E8Qyb24cO6/prSW1mZI2bnuN+bVJtvVV0/rI0tE13QvZRVzr4N5tczPOcPWyyb8NDrdBUFXPisihfPuV1n+cHHSvFvLF6OXAX3OdrvV6pD9E5CBQ58A+AN5M2iX8BfB9JMroR4A/zL/y/vzzR/P9f76bF5wWUZMPUeuOmVSIOcJlaBv08w8CbejveF8sSLZUNgVpB2BTQHXOoTEFbylqNMbUGKWhs5gOIaQB75qCYxtMUwafIl7U0AXKvJhsnZtv67yFupafrKrRTFtI9/jOrwm6DD75TzmCbh/M0kkZ2R4zVdqhJuTB8DljN8nUK0btlDem+7s5endPln7WzpqgvVTQBXWR1kdPs8JGOmfIaR+l6VkScZdPnWsd3L+hbe51jGu+fhFZAH4f+Jequr6z0DP90CvcdtXWuhv0x/OAo8C784XHAL+jqn8kIl8C3iMi/wfwGeBX8+N/FfgPIvIIKWP/wV1Y0xUgXWCESbBBJoXBNoDHNtK0hzBfDNopW4jkwAhJAkjyjMaAVUyMiPFEjZiQsnZVTd2vErCuB6rJqExbr/2Y83XFaMp02zcwZPoklTmV7fFMu3VKjOm5WqpCFdXJ0JdJnp8CtHSzjhMSjz55VnIgnrw+TZLKvJPpHi/pohLb7U+bhU8tdDJIKK+rfXVm6oQ1BpMLqhHNWnq6i9n0DqOTb97gwb3d5raY3gJfTzjXUhgichQ4n2+/puvP/PXvA7+lqu+7ntfa4vmkP56HtXyetOPZefujJMpw5+1j4JuXN/wt0WaMmQNIg9hbxYaQVSPa0TKTi0C7/U+6d6Bri9e2INvFoPyzTca3yVExIhqIIYDxiA0QSXSM8cSYhtiLxo7Db0NwuhYpgiJ2+rVMqJsuEcmBU2Ps5gS3r7ebt9tdCHIb/5S7aCtVFFKGn2j9XMSVzIcbzV2hmqmtrGwhZ+M50JrMjU/XBmJ+9vZ4d7sFMwnOkndWYg0Stg9s74q1tEXw9Dp2e77v7vfAfn18AjgtIreJSEHKhN5/jdd0JbTbcXj2Nv0fS8KrgbWrwWEDZPXLrwIPqurPXudrPZgzdqbojweZ0B9XWmv7Gq4a/XG9QvNQiKhKyFlwSiJzKG212KqQm380f21ViFa2/0vKQQFJEsSI5FF+HnFFssx1PtkG+x6+6OF9H+PS/bbo4Xo9bK+H6/WxvT7ie5iijyn6ycHRFZj8T2wBJnu856/pn8NYj3Ue35vD+T7W9jC2wLgexnus9zifBss738O4AusLjPMYX+B8gSvSbWI9zhdY7zHO4VyBtR5rXW5Qchhjsa69bTI8ozUhE5OGf1ifnCaNSX42aRyx5KHkrdJmYvlrEEwEKyb/E5y1ySo4a93T30mqmudQMD5vuKaZe+ZT/0fgT0hSyF9T1S9eyzWJyG+TCpIHROQM8FPAvwF+R0R+FHiCSfb2xyRp4SMkeeE/vYpLfS3ww8AXROSz+bb/7Tpd6wuE/rg+kWiDPAc3jQ+H9v86KUpOrn85C4Vuklib4UvOPDVn3dikSgkxe/5rohgioCEixmElcemi6cLgsma7aRUzIhgMKkk9Y6boklbXHTUZhKUhM9qtq1Oa0PLjlnY8e6u06TL5HfryliCZflwMIbX1q+0oIGNSIJWcMYcmdjp5o5PipuSdTHssJ7uH6RkE7cMNqpFpr5mWvpkqf9N2tMYp1VBrG7zLg5hmTUwzzLCbeD7UMg9+8UsdbWIMiHaDCrdxw21Bb1sw1EkBQzVTEjn0xhgRl8YnRm0J+ynaIVMlqppHwsXJsHd02wByUTrZpGRaI3HwkwlgExplMgKToLkYqXn3sOPxLSEzVThNGe/UHOMpxUqMMXm+5MOSipahC7LC9sEv04VSVUVlMlx9+rhONyRFcrYeJxebndPGtl+E8gqy/DG9tvQ3vvNtb/+7nRRT0FmH6gwzXH08H8F9hhm+Hp4ruF9rzn2GGWaYYYZdwCy4zzDDDDPcgJgF9xlmmGGGGxCz4D7DDDPMcANiFtxnmGGGGW5AzIL7DDPMMMMNiFlwn2GGGWa4ATEL7jPMMMMMNyBmwX2GGWaY4QbELLjPMMMMM9yAmAX3GWaYYYYbELPgPsMMM8xwA2IW3GeYYYYZbkDMgvsMM8wwww2IWXCfYYYZZrgBMQvuM8wwwww3IK71gOwZZrjRsQk8dK0X8Q3gAHDxWi/iG8QLZa1XY523PNcds+A+wwy7i4dU9ZXXehF/E0Tkky+EdcILZ63Xep0zWmaGGWaY4QbELLjPMMMMM9yAmAX3GWbYXfzytV7AN4gXyjrhhbPWa7pOUZ0NZ59hhhlmuNEwy9xnmGGGGW5AzIL7DDPsEkTkrSLykIg8IiI/eY3X8msicl5EHpi6bZ+I/KmIfCV/Xc63i4j8Ql7350XkW67iOm8Skb8QkQdF5Isi8i+ux7WKSF9EPi4in8vr/N/z7beJyF/ndb5XRIp8ey///Ei+/9bdXuMsuM8wwy5ARCzwi8DbgBcBPyQiL7qGS/p14K07bvtJ4M9U9TTwZ/lnSGs+nf/9GPBLV2mNAA3wr1T1buDVwE/k43a9rbUE3qiqLwVeBrxVRF4N/Fvg5/I6V4AfzY//UWBFVW8Hfi4/blcxC+4zzLA7uA94RFUfVdUKeA/wzmu1GFX9S+DyjpvfCbw7f/9u4O9P3f4bmvAxYK+IHL1K6zyrqp/O328ADwLHr7e15r+3mX/0+Z8CbwR+7znW2a7/94A3iYjs5hpnwX2GGXYHx4Enp34+k2+7nnBYVc9CCqrAoXz7dbH2TF28HPhrrsO1iogVkc8C54E/Bb4KrKpqc4W1dOvM968B+3dzfbPgPsMMu4MrZWUvFGnaNV+7iCwAvw/8S1Vd/3oPvcJtV2WtqhpU9WXACdJO7e6vs5arvs5ZcJ9hht3BGeCmqZ9PAE9fo7U8F861FEb+ej7ffk3XLiKeFNh/S1Xfdz2vFUBVV4EPkmoEe0WktXWZXku3znz/Hp5Nkz2vmAX3GWbYHXwCOJ3VEwXwg8D7r/GaduL9wI/k738E+MOp2/9xVqK8GlhrKZHdRuahfxV4UFV/9npdq4gcFJG9+fsB8GZSfeAvgO97jnW26/8+4M91l5uMZk1MM8ywSxCRtwP/F2CBX1PVn7mGa/lt4A0kp8Jz/3+7dmiDQBAEUPRPMGgKQFAAFdAGCQJDGxgSaqAD3Cl6wJIg8AgKOEmCYRB7LUDI5D+5atTPZmeBHXACOmAKPIBlZvZDYA+03zVPYJOZlx/NuQDOwA14D8db2rv738waEXPagnREuyR3mbmPiBlteT4BrsA6M18RMQaOtB1CD6wy8/7VGY27JNXjs4wkFWTcJakg4y5JBRl3SSrIuEtSQcZdkgoy7pJUkHGXpII+HCN7ieeEmykAAAAASUVORK5CYII=\n" - }, - "metadata": { - "needs_background": "light" - } - } - ], - "source": [ - "import numpy as np\n", - "from PIL import Image\n", - "import matplotlib.pyplot as plt\n", - "import mindspore.dataset.vision.c_transforms as C\n", - "import mindspore.dataset.vision.py_transforms as P\n", - "\n", - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/banana.jpg\n", - "img_ori = Image.open(\"banana.jpg\").convert(\"RGB\")\n", - "print(\"Image.type: {}, Image.shape: {}\".format(type(img_ori), img_ori.size))\n", - "\n", - "# Define a Resize op from c_transform and execute it immediately\n", - "op1 = C.Resize(size=(320))\n", - "img = op1(img_ori)\n", - "print(\"Image.type: {}, Image.shape: {}\".format(type(img), img.shape))\n", - "\n", - "# Define a CenterCrop op from c_transform and execute it immediately\n", - "op2 = C.CenterCrop((280, 280))\n", - "img = op2(img)\n", - "print(\"Image.type: {}, Image.shape: {}\".format(type(img), img.shape))\n", - "\n", - "# Define a Pad op from py_transform and execute it immediately\n", - "# Before calling Pad, you need to call ToPIL()\n", - "op3 = P.ToPIL()\n", - "op4 = P.Pad(40)\n", - "img = op4(op3(img))\n", - "print(\"Image.type: {}, Image.shape: {}\".format(type(img), img.size))\n", - "\n", - "# Show the result\n", - "plt.subplot(1, 2, 1)\n", - "plt.imshow(img_ori)\n", - "plt.title(\"original image\")\n", - "plt.subplot(1, 2, 2)\n", - "plt.imshow(img)\n", - "plt.title(\"transformed image\")\n", - "plt.show()" - ] - }, - { - "source": [ - "MindSpore目前可以支持Eager模式的数据增强算子包括:\n", - "\n", - "- [mindspore.dataset.vision.c_transforms](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.vision.html#mindspore-dataset-vision-c-transforms)\n", - "\n", - "- [mindspore.dataset.vision.py_transforms](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.vision.html#mindspore-dataset-vision-py-transforms)\n", - "\n", - "- [mindspore.dataset.text.transforms](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.text.html#mindspore-dataset-text-transforms)" - ], - "cell_type": "markdown", - "metadata": {} - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用注意事项\n", - "\n", - "在数据管道处理模式中,请勿混用`c_transforms`与`py_transforms`,因为两者在管道中运行的方式存在差异,混用会降低处理性能。\n", - "\n", - "(注:Eager模式混用`c_transforms`与`py_transforms`不受运行方式差异影响)\n", - "\n", - "![map](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/images/map.png)\n", - "\n", - "混用会引发C++与Python切换的成本,建议不要过度混用两个模块的算子,但是适量混用是可以接受的。\n", - "\n", - "**推荐的使用方式:**\n", - "\n", - "- 单独使用`py_transform`或`c_transform`\n", - "\n", - " ![tranform-c-py1](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/images/transform_recommended_1.png)\n", - "\n", - "- 先使用`py_transform`,再使用`c_transform`\n", - "\n", - " ![tranform-c-py2](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/images/transform_recommended_2.png)\n", - "\n", - "- 先使用`c_transform`,再使用`py_transform`\n", - "\n", - " ![tranform-c-py3](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/images/transform_recommended_3.png)\n", - "\n", - "**不推荐的使用方式:**\n", - "\n", - "- 在两种transform之间频繁切换\n", - "\n", - " ![tranform-c-py4](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/images/transform_not_recommended.png)\n", - "\n", - "## 参考文献\n", - "\n", - "[1] Alex Krizhevsky. [Learning_Multiple Layers of Features from Tiny Images](http://www.cs.toronto.edu/~kriz/learning-features-2009-TR.pdf).\n", - "\n", - "[2] Y. LeCun, L. Bottou, Y. Bengio, and P. Haffner. [Gradient-based learning applied to document recognition](http://yann.lecun.com/exdb/publis/pdf/lecun-98.pdf).\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6-final" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} \ No newline at end of file diff --git a/docs/programming_guide/source_zh_cn/auto_augmentation.ipynb b/docs/programming_guide/source_zh_cn/auto_augmentation.ipynb deleted file mode 100644 index 01da43e3638da7290d095b5eb3c281399a687139..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/auto_augmentation.ipynb +++ /dev/null @@ -1,237 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 自动数据增强\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/auto_augmentation.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_auto_augmentation.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9hdXRvX2F1Z21lbnRhdGlvbi5pcHluYg==&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "MindSpore除了可以让用户自定义数据增强的使用,还提供了一种自动数据增强方式,可以基于特定策略自动对图像进行数据增强处理。\n", - "\n", - "自动数据增强主要分为基于概率的自动数据增强和基于回调参数的自动数据增强。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 基于概率的自动数据增强\n", - "\n", - "MindSpore提供了一系列基于概率的自动数据增强API,用户可以对各种数据增强操作进行随机选择与组合,使数据增强更加灵活。\n", - "\n", - "关于API的详细说明,可以参见[API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.transforms.html)。\n", - "\n", - "### RandomApply\n", - "\n", - "API接收一个数据增强操作列表`transforms`,以一定的概率顺序执行列表中各数据增强操作,默认概率为0.5,否则都不执行。\n", - "\n", - "在下面的代码示例中,以0.5的概率来顺序执行`RandomCrop`和`RandomColorAdjust`操作,否则都不执行。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset.vision.c_transforms as c_vision\n", - "from mindspore.dataset.transforms.c_transforms import RandomApply\n", - "\n", - "rand_apply_list = RandomApply([c_vision.RandomCrop(512), c_vision.RandomColorAdjust()])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### RandomChoice\n", - "\n", - "API接收一个数据增强操作列表`transforms`,从中随机选择一个数据增强操作执行。\n", - "\n", - "在下面的代码示例中,等概率地在`CenterCrop`和`RandomCrop`中选择一个操作执行。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset.vision.c_transforms as c_vision\n", - "from mindspore.dataset.transforms.c_transforms import RandomChoice\n", - "\n", - "rand_choice = RandomChoice([c_vision.CenterCrop(512), c_vision.RandomCrop(512)])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### RandomSelectSubpolicy\n", - "\n", - "API接收一个预置策略列表,包含一系列子策略组合,每一子策略由若干个顺序执行的数据增强操作及其执行概率组成。\n", - "\n", - "对各图像先等概率随机选择一种子策略,再依照子策略中的概率顺序执行各个操作。\n", - "\n", - "在下面的代码示例中,预置了两条子策略,子策略1中包含`RandomRotation`、`RandomVerticalFlip`和`RandomColorAdjust`三个操作,概率分别为0.5、1.0和0.8;子策略2中包含`RandomRotation`和`RandomColorAdjust`两个操作,概率分别为1.0和0.2。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset.vision.c_transforms as c_vision\n", - "from mindspore.dataset.vision.c_transforms import RandomSelectSubpolicy\n", - "\n", - "policy_list = [\n", - " [(c_vision.RandomRotation((45, 45)), 0.5), (c_vision.RandomVerticalFlip(), 1.0), (c_vision.RandomColorAdjust(), 0.8)],\n", - " [(c_vision.RandomRotation((90, 90)), 1.0), (c_vision.RandomColorAdjust(), 0.2)]\n", - " ]\n", - "policy = RandomSelectSubpolicy(policy_list)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 基于回调参数的自动数据增强\n", - "\n", - "MindSpore的`sync_wait`接口支持按batch或epoch粒度在训练过程中动态调整数据增强策略,用户可以设定阻塞条件触发特定的数据增强操作。\n", - "\n", - "`sync_wait`将阻塞整个数据处理pipeline直到`sync_update`触发用户预先定义的`callback`函数,两者需配合使用,对应说明如下:\n", - "\n", - "- sync_wait(condition_name, num_batch=1, callback=None)\n", - "\n", - " 该API为数据集添加一个阻塞条件`condition_name`,当`sync_update`调用时执行指定的`callback`函数。\n", - "\n", - "- sync_update(condition_name, num_batch=None, data=None)\n", - "\n", - " 该API用于释放对应`condition_name`的阻塞,并对`data`触发指定的`callback`函数。\n", - "\n", - "下面将演示基于回调参数的自动数据增强的用法。\n", - "\n", - "1. 用户预先定义`Augment`类,其中`preprocess`为自定义的数据增强函数,`update`为更新数据增强策略的回调函数。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset.vision.py_transforms as transforms\n", - "import mindspore.dataset as ds\n", - "import numpy as np\n", - "\n", - "class Augment:\n", - " def __init__(self):\n", - " self.ep_num = 0\n", - " self.step_num = 0\n", - "\n", - " def preprocess(self, input_):\n", - " return (np.array((input_ + self.step_num ** self.ep_num - 1), ))\n", - "\n", - " def update(self, data):\n", - " self.ep_num = data['ep_num']\n", - " self.step_num = data['step_num']" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 数据处理pipeline先回调自定义的增强策略更新函数`update`,然后在`map`操作中按更新后的策略来执行`preprocess`中定义的数据增强操作。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "arr = list(range(1, 4))\n", - "dataset = ds.NumpySlicesDataset(arr, shuffle=False)\n", - "aug = Augment()\n", - "dataset = dataset.sync_wait(condition_name=\"policy\", callback=aug.update)\n", - "dataset = dataset.map(operations=[aug.preprocess])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 在每个step中调用`sync_update`进行数据增强策略的更新。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epcoh: 0, step:0, data :[Tensor(shape=[], dtype=Int64, value= 1)]\n", - "epcoh: 0, step:1, data :[Tensor(shape=[], dtype=Int64, value= 2)]\n", - "epcoh: 0, step:2, data :[Tensor(shape=[], dtype=Int64, value= 3)]\n", - "epcoh: 1, step:3, data :[Tensor(shape=[], dtype=Int64, value= 1)]\n", - "epcoh: 1, step:4, data :[Tensor(shape=[], dtype=Int64, value= 5)]\n", - "epcoh: 1, step:5, data :[Tensor(shape=[], dtype=Int64, value= 7)]\n", - "epcoh: 2, step:6, data :[Tensor(shape=[], dtype=Int64, value= 6)]\n", - "epcoh: 2, step:7, data :[Tensor(shape=[], dtype=Int64, value= 50)]\n", - "epcoh: 2, step:8, data :[Tensor(shape=[], dtype=Int64, value= 66)]\n", - "epcoh: 3, step:9, data :[Tensor(shape=[], dtype=Int64, value= 81)]\n", - "epcoh: 3, step:10, data :[Tensor(shape=[], dtype=Int64, value= 1001)]\n", - "epcoh: 3, step:11, data :[Tensor(shape=[], dtype=Int64, value= 1333)]\n", - "epcoh: 4, step:12, data :[Tensor(shape=[], dtype=Int64, value= 1728)]\n", - "epcoh: 4, step:13, data :[Tensor(shape=[], dtype=Int64, value= 28562)]\n", - "epcoh: 4, step:14, data :[Tensor(shape=[], dtype=Int64, value= 38418)]\n" - ] - } - ], - "source": [ - "epochs = 5\n", - "itr = dataset.create_tuple_iterator(num_epochs=epochs)\n", - "step_num = 0\n", - "for ep_num in range(epochs):\n", - " for data in itr:\n", - " print(\"epcoh: {}, step:{}, data :{}\".format(ep_num, step_num, data))\n", - " step_num += 1\n", - " dataset.sync_update(condition_name=\"policy\", data={'ep_num': ep_num, 'step_num': step_num})" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/auto_parallel.md b/docs/programming_guide/source_zh_cn/auto_parallel.md deleted file mode 100644 index 9f5d4d0dd93c80eda06f9ffa43220f773e837d01..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/auto_parallel.md +++ /dev/null @@ -1,398 +0,0 @@ -# 分布式并行用法 - - - -- [分布式并行用法](#分布式并行用法) - - [概述](#概述) - - [分布式并行配置](#分布式并行配置) - - [通用配置](#通用配置) - - [device_num](#device_num) - - [global_rank](#global_rank) - - [gradients_mean](#gradients_mean) - - [parallel_mode](#parallel_mode) - - [all_reduce_fusion_config](#all_reduce_fusion_config) - - [enable_parallel_optimizer](#enable_parallel_optimizer) - - [parameter_broadcast](#parameter_broadcast) - - [自动并行配置](#自动并行配置) - - [gradient_fp32_sync](#gradient_fp32_sync) - - [auto_parallel_search_mode](#auto_parallel_search_mode) - - [strategy_ckpt_load_file](#strategy_ckpt_load_file) - - [strategy_ckpt_save_file](#strategy_ckpt_save_file) - - [full_batch](#full_batch) - - [pipeline_stages](#pipeline_stages) - - [grad_accumulation_step](#grad_accumulation_step) - - [分布式通信接口](#分布式通信接口) - - [init](#init) - - [get_group_size](#get_group_size) - - [get_rank](#get_rank) - - [分布式属性配置](#分布式属性配置) - - [cross_batch](#cross_batch) - - [fusion](#fusion) - - [layerwise_parallel](#layerwise_parallel) - - [数据并行](#数据并行) - - [自动并行](#自动并行) - - - - - -## 概述 - -在深度学习中,当数据集和参数量的规模越来越大,训练所需的时间和硬件资源会随之增加,最后会变成制约训练的瓶颈。分布式并行训练,可以降低对内存、计算性能等硬件的需求,是进行训练的重要优化手段。 - -MindSpore提供了分布式并行训练的功能,它支持了包括数据并行和自动并行在内的多种并行模式。 - -## 分布式并行配置 - -MindSpore的分布式并行配置通过`auto_parallel_context`来进行集中管理,用户可根据自身需求和实际情况来进行个性化的配置。这些配置可分为三大类: - -- 通用配置:对数据并行、自动并行以及混合并行均起作用的配置,如:`device_num`、`global_rank`等。 -- 自动并行配置:仅在自动并行模式下起作用的配置,如:`auto_parallel_search_mode`、`gradient_fp32_sync`等。 - -用户可利用`context.set_auto_parallel_context`配置上述参数,同时可通过`context.get_auto_parallel_context`来获取上述参数。 - -### 通用配置 - -#### device_num - -`device_num`表示可用的机器数,其值为int型,默认值是0,且必须在1~4096范围内。若用户不配置,`Model`接口内部则会通过`get_group_size`方法获取,若用户进行了配置,则遵循用户的配置。这个配置可以在用户不使用`Model`接口的情况下,手动传递`device_num`。 - -代码样例如下: - -```python -from mindspore import context - -context.set_auto_parallel_context(device_num=8) -context.get_auto_parallel_context("device_num") -``` - -#### global_rank - -`global_rank`表示当前卡的逻辑序号,其值为int型,默认值是0,且必须在0~4095范围内。若用户不配置,`Model`接口内部则会通过`get_rank`方法获取,若用户进行了配置,则遵循用户的配置。这个配置可以在用户不使用`Model`接口的情况下,手动传递`global_rank`。 - -代码样例如下: - -```python -from mindspore import context - -context.set_auto_parallel_context(global_rank=0) -context.get_auto_parallel_context("global_rank") -``` - -#### gradients_mean - -`gradients_mean`表示在反向梯度进行聚合时,是否进行平均操作。其值为bool型,默认为False,即梯度聚合仅进行AllReduce的SUM操作,不做平均操作。`gradients_mean`会影响网络的收敛,不同场景,`gradients_mean`的设置可能不同。因此,MindSpore提供这个接口让用户根据实际情况来配置。 - -代码样例如下: - -```python -from mindspore import context - -context.set_auto_parallel_context(gradients_mean=False) -context.get_auto_parallel_context("gradients_mean") -``` - -#### parallel_mode - -`parallel_mode`表示并行模式,其值为字符串类型。用户可选择的模式有: - -- `stand_alone`:单机模式。 -- `data_parallel`:数据并行模式。 -- `hybrid_parallel`:混合并行模式。 -- `semi_auto_parallel`:半自动并行模式,即用户可通过`shard`方法给算子配置切分策略,若不配置策略,则默认是数据并行策略。 -- `auto_parallel`:自动并行模式,即框架会自动建立代价模型,为用户选择最优的切分策略。 - -其中`auto_parallel`和`data_parallel`在MindSpore教程中有完整样例: - -。 - -代码样例如下: - -```python -from mindspore import context -import mindspore.ops as ops - -context.set_auto_parallel_context(parallel_mode="semi_auto_parallel") -mul = ops.Mul().shard(((2, 1), (2, 1))) -context.get_auto_parallel_context("parallel_mode") -``` - -#### all_reduce_fusion_config - -`all_reduce_fusion_config`可以让用户自定义梯度AllReduce融合切分策略。出于减少资源消耗及算子执行间隙的目的,框架默认将所有反向梯度聚合的AllReduce融合成一个算子运算,但当模型较大时,这会造成迭代拖尾耗时增加。用户可结合具体网络,通过设置该参数,手动调优找到性能最好的融合切分策略。 - -代码样例如下: - -```python -from mindspore import context - -context.set_auto_parallel_context(all_reduce_fusion_config=[20, 35]) -context.get_auto_parallel_context("all_reduce_fusion_config") -``` - -样例中,`all_reduce_fusion_config`的值为[20, 35],将前20个AllReduce融合成1个,第20~35个AllReduce融合成1个,剩下的AllReduce融合成1个。 - -#### enable_parallel_optimizer - -`enable_parallel_optimizer`是一个开发中特性,参数默认值是False。数据并行时参数更新部分在各卡间存在冗余计算,优化器并行通过将优化器的计算量分散到各个卡上,在大规模网络上(比如Bert、GPT)可以有效减少内存消耗并提升网络性能。 - -在`data_parallel`模式下使能优化器并行,框架会将需要更新的参数进行分组到不同卡上,各自更新后再通过`Broadcast`算子在集群间做权重共享。需要注意的是参数量应当大于机器数,当前只支持`Lamb`和`AdamWeightDecay`优化器。 - -在`auto_parallel`或者`semi_auto_parallel`模式下使能优化器并行,如果经过策略切分后的参数在机器间存在重复切片,并且shape的最高维可以被卡数整除,框架会以最小切片的方式保存参数并在优化器中更新。该模式下支持所有优化器。 - -无论是哪种模式,优化器并行不会影响原有正反向网络的计算图,只会影响参数更新的计算量和计算逻辑。 - -代码样例如下: - -```python -from mindspore import context - -context.set_auto_parallel_context(enable_parallel_optimizer=True) -context.get_auto_parallel_context("enable_parallel_optimizer") -``` - -#### parameter_broadcast - -`parameter_broadcast`将数据并行参数在0号卡上的权值广播到其他卡上,达到同步初始化权重的目的。参数默认值是False,当前仅支持图模式。 - -代码样例如下: - -```python -from mindspore import context - -context.set_auto_parallel_context(parameter_broadcast=True) -context.get_auto_parallel_context("parameter_broadcast") -``` - -### 自动并行配置 - -#### gradient_fp32_sync - -`gradient_fp32_sync`表示梯度是否以float32类型进行聚合,其值为bool类型,默认为True,即梯度以float32类型进行聚合。由于`Ascend`AI处理器的特殊构造,float32类型的数据进行聚合的速度要高于float16,但可能会影响精度。因此,MindSpore提供`gradient_fp32_sync`接口,让用户自己根据实际情况去进行取舍。 - -代码样例如下: - -```python -from mindspore import context - -context.set_auto_parallel_context(gradient_fp32_sync=False) -context.get_auto_parallel_context("gradient_fp32_sync") -``` - -#### auto_parallel_search_mode - -MindSpore提供了`dynamic_programming`和`recursive_programming`两种搜索策略的算法,默认是`dynamic_programming`。`dynamic_programming`能够搜索出代价模型刻画的最优策略,但在搜索巨大网络模型的并行策略时耗时较长;而`recursive_programming`能瞬间搜索出并行策略,同时在已验证的常用网络中搜索出来的策略是最优策略,但在未经验证的某些特殊网络中可能找到次优策略。为此,MindSpore提供了参数,让用户自由选择搜索算法。 - -代码样例如下: - -```python -from mindspore import context - -context.set_auto_parallel_context(auto_parallel_search_mode="recursive_programming") -context.get_auto_parallel_context("auto_parallel_search_mode") -``` - -#### strategy_ckpt_load_file - -指定加载路径,加载自动并行中所有带有权重的算子的切分信息。 - -代码样例如下: - -```python -from mindspore import context - -context.set_auto_parallel_context(strategy_ckpt_load_file="./") -context.get_auto_parallel_context("strategy_ckpt_load_file") -``` - -#### strategy_ckpt_save_file - -指定存储路径,存储自动并行中所有带有权重的算子的切分信息。 - -代码样例如下: - -```python -from mindspore import context - -context.set_auto_parallel_context(strategy_ckpt_save_file="./") -context.get_auto_parallel_context("strategy_ckpt_save_file") -``` - -#### full_batch - -`full_batch`可以让用户决定数据集是否以全量导入。默认是False。即数据集以数据并行的方式导入。在特殊场景下,数据集全量导入的性能要优于数据并行方式导入,比如WideDeep网络的非均匀切分场景。因此,MindSpore提供`full_batch`可配置接口。 - -代码样例如下: - -```python -from mindspore import context - -context.set_auto_parallel_context(full_batch=False) -context.get_auto_parallel_context("full_batch") -``` - -#### pipeline_stages - -`pipeline_stages`是用来设置`pipeline`并行的`stage`信息。用来表明机器在`pipeline`并行下是如何分布的。目前`pipeline`并行仍在开发中。 - -代码样例如下: - -```python -from mindspore import context -context.set_auto_parallel_context(pipeline_stage=4) -context.get_auto_parallel_context("pipeline_stage") -``` - -#### grad_accumulation_step - -`grad_accumulation_step`指梯度累积步数。具体用法请参考[指导教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/apply_gradient_accumulation.html) - -代码样例如下: - -```python -from mindspore import context -context.set_auto_parallel_context(grad_accumulation_step=4) -context.get_auto_parallel_context("grad_accumulation_step") -``` - -## 分布式通信接口 - -`mindspore.communication.management`中封装了分布式并行用到的集合通信接口,方便用户配置分布式信息。 - -### init - -使能MindSpore通信,并完成分布式训练初始化操作。`init`要在`context.set_context`之后调用。用户可给`init`传入通信后端信息,`init`会根据不同的后端来进行不同初始化。 - -- `hccl`:全名为`Huawei Collective Communication Library`。用于`Ascend`处理器平台。 -- `nccl`:全名为`NVIDIA Collective Communication Library`。用于`GPU`处理器平台。 - -若用户不配置通信后端,MindSpore会根据`context`中的`device_target`信息进行自动配置。 - -代码样例如下: - -```python -from mindspore import context -from mindspore.communication.management import init - -context.set_context(device_target='GPU') -init() -``` - -### get_group_size - -`get_group_size`可让用户获取集群数量。在用`get_group_size`接口之前,要先调用`init`。 - -代码样例如下: - -```python -from mindspore import context -from mindspore.communication.management import init, get_group_size - -context.set_context(device_target='GPU') -init() -group_size = get_group_size() -``` - -### get_rank - -`get_rank`可让用户获取当前设备在集群中的ID。在用`get_rank`接口之前,要先调用`init`。 - -代码样例如下: - -```python -from mindspore import context -from mindspore.communication.management import init, get_rank - -context.set_context(device_target='GPU') -init() -rank_id = get_rank() -``` - -## 分布式属性配置 - -### cross_batch - -在特定场景下,`data_parallel`的计算逻辑和`stand_alone`是不一样的,`auto_parallel`在任何场景下都是和`stand_alone`的计算逻辑保持一致。而`data_parallel`的收敛效果可能更好,因此MindSpore提供了`cross_batch`这个参数,可以使`auto_parallel`的计算逻辑和`data_parallel`保持一致,用户可通过`add_prim_attr`方法进行配置,默认值是False。 - -代码样例如下: - -```python -import mindspore.ops as ops - -mul = ops.Mul().add_prim_attr("cross_batch", True) -``` - -### fusion - -出于性能考虑,MindSpore提供了`AllGather`和`AllReduce`算子的融合功能,`fusion`值相同的同类算子(算子类型以及通信域相同)会融合在一起,`fusion`的值必须大于等于0,且当`fusion`值为0时,表示不融合。目前只支持`Ascend`后端。 - -`fusion`属性的配置有两种方式,如果是显式调用通信算子可以通过`add_prim_attr`方法直接为通信算子配置属性。代码样例如下: - -```python -import mindspore.ops as ops - -allreduce1 = ops.AllReduce().add_prim_attr("fusion", 1) -allreduce2 = ops.AllReduce().add_prim_attr("fusion", 1) -``` - -样例中的`allreduce1`和`allreduce2`将在执行时被融合为一个算子。 - -在`AUTO_PARALLEL`和`SEMI_AUTO_PARALLEL`模式下自动插入的用于参数或者梯度聚合的通信算子,需要通过对`Cell`或者`Parameter`设置属性的方式间接添加。例如: - -```python -import mindspore.nn as nn -from mindspore import Tensor, Parameter -from mindspore import context - -class Net(nn.Cell): - """Net definition""" - def __init__(self): - super(Net, self).__init__() - self.fc1 = ops.MatMul() - self.fc2 = ops.MatMul() - self.p1 = Parameter(Tensor(np.ones([48, 64]).astype(np.float32)), name="weight1") - self.p1.comm_fusion = 2 - self.p2 = Parameter(Tensor(np.ones([64, 16]).astype(np.float32)), name="weight2") - - def construct(self, x, y): - x = self.fc1(x, self.p1) - x = self.fc2(x, self.p2) - return x - y - -context.set_context(mode=context.GRAPH_MODE) -context.set_auto_parallel_context(parallel_mode="auto_parallel", device_num=8) -net = Net().set_comm_fusion(2) -``` - -样例中对参数`Net.p1`设置`comm_fusion`为2,表示作用于该参数的通信算子`fusion`属性为2。当需要批量对参数进行操作时,可以调用`set_comm_fusion`方法将网络`Net`中包含的全部参数设置`comm_fusion`属性。如果多次调用的话,属性值会被覆盖。 - -> 当参数被共享时,需要保证连接参数的多个算子混合精度一致,否则融合会失败。 - -### layerwise_parallel - -在`HYBRID_PARALLEL`模式下用户需要手动切分模型,其中对于模型并行的参数用户需要手动打上标记`layerwise_parallel`,框架会根据此标记为模型并行参数过滤掉梯度聚合操作。 - -代码样例如下: - -```python -imoprt numpy as np -from mindspore import Parameter, Tensor - -x = Parameter(Tensor(np.ones([2, 2])), layerwise_parallel=True) -``` - -## 数据并行 - -数据并行是对数据进行切分的并行模式,一般按照batch维度切分,将数据分配到各个计算单元(worker)中,进行模型计算。在数据并行模式下,数据集要以数据并行的方式导入,并且`parallel_mode`要设置为`data_parallel`。 - -具体用例请参考MindSpore分布式并行训练教程: - -。 - -## 自动并行 - -自动并行是融合了数据并行、模型并行及混合并行的一种分布式并行模式,可以自动建立代价模型,为用户选择一种并行模式。其中,代价模型指基于内存的计算开销和通信开销对训练时间建模,并设计高效的算法找到训练时间较短的并行策略。在自动并行模式下,`parallel_mode`要设置为`auto_parallel`。 - -具体用例请参考MindSpore分布式并行训练教程: - -。 diff --git a/docs/programming_guide/source_zh_cn/cache.md b/docs/programming_guide/source_zh_cn/cache.md deleted file mode 100644 index b273c9c6fa20c7d6673cf0a0980fcf84e00e2e2d..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/cache.md +++ /dev/null @@ -1,434 +0,0 @@ -# 单节点数据缓存 - - - -- [单节点数据缓存](#单节点数据缓存) - - [概述](#概述) - - [缓存基础使用](#缓存基础使用) - - [缓存共享](#缓存共享) - - [当前限制](#当前限制) - - [缓存性能调优](#缓存性能调优) - - - - - -## 概述 - -对于需要重复访问远程的数据集或需要重复从磁盘中读取数据集的情况,可以使用单节点缓存算子将数据集缓存于本地内存中,以加速数据集的读取。 - -缓存算子依赖于在当前节点启动的缓存服务器,缓存服务器作为守护进程独立于用户的训练脚本而存在,主要用于提供缓存数据的管理,支持包括存储、查找、读取以及发生缓存未命中时对于缓存数据的写入等操作。 - -若用户的内存空间不足以缓存所有数据集,则用户可以配置缓存算子使其将剩余数据缓存至磁盘。 - -目前,缓存服务只支持单节点缓存,即客户端和服务器均在同一台机器上。该服务支持以下两类使用场景: - -- 缓存加载好的原始数据集 - - 用户可以在数据集加载算子中使用缓存。这将把加载完成的数据存到缓存服务器中,后续若需相同数据则可直接从中读取,避免从磁盘中重复加载。 - - ![cache on leaf pipeline](./images/cache_dataset.png) - -- 缓存经过数据增强处理后的数据 - - 用户也可在`map`算子中使用缓存。这将允许直接缓存数据增强(如图像裁剪、缩放等)处理后的数据,避免数据增强操作重复进行,减少了不必要的计算量。 - - ![cache on map pipeline](./images/cache_processed_data.png) - -## 缓存基础使用 - -1. 配置环境。 - - 使用缓存服务前,需要安装MindSpore,并设置相关环境变量。以Conda环境为例,设置方法如下: - - ```shell - export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{path_to_conda}/envs/{your_env_name}/lib/python3.7/site-packages/mindspore:{path_to_conda}/envs/{your_env_name}/lib/python3.7/site-packages/mindspore/lib - export PATH=$PATH:{path_to_conda}/envs/{your_env_name}/bin - ``` - - > 由于使用缓存可能会造成服务器的内存紧张,因此建议用户在使用缓存前增大服务器的交换内存空间至100GB以上,Ubuntu、EulerOS以及CentOS均可参考[相关教程](https://help.ubuntu.com/community/SwapFaq#How_do_I_add_a_swap_file.3F)了解如何增大交换内存空间。 - -2. 启动缓存服务器。 - - 在使用单节点缓存服务之前,首先需要在命令行输入以下命令,启动缓存服务器: - - ```shell - cache_admin --start - ``` - - 若输出下列信息,则表示缓存服务器启动成功。 - - ```text - Cache server startup completed successfully! - The cache server daemon has been created as process id 10394 and is listening on port 50052 - - Recommendation: - Since the server is detached into its own daemon process, monitor the server logs (under /tmp/mindspore/cache/log) for any issues that may happen after startup - ``` - - `cache_admin`支持以下命令和参数: - - `--start`:启动缓存服务器,支持通过以下参数进行配置: - - `--workers`或`-w`:设置缓存服务器的工作线程数量,默认情况下工作线程数量为机器CPU个数的一半。该参数需要根据NUMA架构来设置,若设置值不是机器中NUMA结点数的整数倍,则缓存服务器会对其进行自动调整。 - - `--spilldir`或`-s`:设置若缓存数据的大小超过内存空间,则溢出至磁盘的数据文件路径,默认为空(表示不启用数据溢出功能)。 - - `--hostname`或`-h`:缓存服务器的ip地址,默认为127.0.0.1。 - - `--port`或`-p`:缓存服务器的端口号,默认为50052。 - - `--loglevel`或`-l`:设置日志等级,默认为1(WARNING级别)。若设置为0(INFO级别),会输出过多日志,导致性能劣化。 - - `--stop`:关闭缓存服务器。 - - `--generate_session`或`-g`:生成一个缓存会话。 - - `--destroy_session`或`-d`:删除一个缓存会话。 - - `--list_sessions`:查看当前缓存会话列表和详细信息。 - - `--server_info`:查看当前服务器配置参数及会话列表。 - - `--help`:查看帮助信息。 - - 以上命令均可使用`-h`和`-p`参数来指定服务器,用户也可通过配置环境变量`MS_CACHE_HOST`和`MS_CACHE_PORT`来指定。若未指定则默认对ip为127.0.0.1且端口号为50052的服务器执行操作。 - - 用户可通过`ps -ef|grep cache_server`命令来检查服务器是否已启动以及查询服务器参数。 - - 用户也可通过`cache_admin --server_info`命令查看服务器的详细参数列表。 - - ```shell - $ cache_admin --server_info - Cache Server Configuration: - ---------------------------------------- - config name value - ---------------------------------------- - hostname 127.0.0.1 - port 50052 - number of workers 16 - log level 1 - spill dir None - ---------------------------------------- - Active sessions: - No active sessions. - ``` - - 其中,Cache Server Configuration表格分别列出了当前服务器的IP地址、端口号、工作线程数、日志等级、溢出路径等详细配置信息。Active sessions模块展示了当前服务器中已启用的session ID列表。 - - > - 设置cache_server初始化参数时,要先确认系统可用内存和待加载数据集大小,cache_server初始化容量或待加载数据集空间占耗超过系统可用内存时,都有可能导致机器宕机/重启、cache_server自动关闭、训练流程执行失败等问题。 - > - 若要启用数据溢出功能,则用户在启动缓存服务器时必须使用`-s`参数对溢出路径进行设置,否则该功能默认关闭。 - -3. 创建缓存会话。 - - 若缓存服务器中不存在缓存会话,则需要创建一个缓存会话,得到缓存会话id: - - ```shell - $ cache_admin -g - Session created for server on port 50052: 1456416665 - ``` - - 其中1456416665为端口50052的服务器分配的缓存会话id,缓存会话id由服务器分配。 - - 通过`cache_admin --list_sessions`命令可以查看当前服务器中现存的所有缓存会话信息。 - - ```shell - $ cache_admin --list_sessions - Listing sessions for server on port 50052 - - Session Cache Id Mem cached Disk cached Avg cache size Numa hit - 1456416665 n/a n/a n/a n/a n/a - ``` - - 输出参数说明: - - `Session`: 缓存会话id。 - - `Cache Id`: 当前缓存会话中的cache实例id,`n/a`表示当前尚未创建缓存实例。 - - `Mem cached`: 缓存在内存中的数据量。 - - `Disk cached`: 缓存在磁盘中的数据量。 - - `Avg cache size`:当前缓存的每行数据的平均大小。 - - `Numa hit`:Numa命中数,该值越高将获得越好的时间性能。 - -4. 创建缓存实例。 - - 在Python训练脚本中使用`DatasetCache` API来定义一个名为`test_cache`的缓存实例,并把上一步中创建的缓存会话id传入`session_id`参数: - - ```python - import mindspore.dataset as ds - - test_cache = ds.DatasetCache(session_id=1456416665, size=0, spilling=False) - ``` - - `DatasetCache`支持以下参数: - - `session_id`:缓存会话的id,通过`cache_admin -g`命令来创建并获取。 - - `size`:缓存最大内存空间占用,该参数以MB为单位,例如512GB的缓存空间应设置`size=524288`,默认为0。 - - `spilling`:当内存空间超出所设置的最大内存空间占用时,是否允许将剩余的数据溢出至磁盘,默认为False。 - - `hostname`:连接至缓存服务器的ip地址,默认为127.0.0.1。 - - `port`:连接至缓存服务器的端口号,默认为50052。 - - `num_connections`:建立的TCP/IP连接数,默认为12。 - - `prefetch_size`:每次访问获取的行数,默认为20。 - - > - 在实际使用中,通常应当首先使用`cache_admin -g`命令从缓存服务器处获得一个缓存会话id并作为`session_id`的参数,防止发生缓存会话不存在而报错的情况。 - > - 设置`size=0`代表不限制缓存所使用的内存空间,但不超过系统总内存的80%。注意,设置`size`为0可能会存在机器“out of memory”的风险,因此建议用户根据机器本身的空闲内存大小,给`size`参数设置一个合理的取值。 - > - 若设置`spilling=True`,则当内存空间不足时,多余数据将写入磁盘中。因此,用户需确保所设置的磁盘路径具有写入权限以及足够的磁盘空间,以存储溢出至磁盘的缓存数据。注意,若启动服务器时未指定溢出路径,则在调用API时设置`spilling=True`将会导致报错。 - > - 若设置`spilling=False`,则缓存服务器在耗尽所设置的内存空间后将不再写入新的数据。 - > - 当使用不支持随机访问的数据集(如`TFRecordDataset`)进行数据加载并启用缓存服务时,需要保证整个数据集均存放于本地。在该场景下,若本地内存空间不足以存放所有数据,则必须启用溢出,将数据溢出至磁盘。 - > - `num_connections`和`prefetch_size`为内部性能调优参数,一般情况下,用户无需设置这两个参数。 - -5. 插入缓存实例。 - - 当前缓存服务既支持对原始数据集的缓存,也可以用于缓存经过数据增强处理后的数据。下例分别展示了两种使用方式。 - - 需要注意的是,两个例子均需要按照步骤4中的方法分别创建一个缓存实例,并在数据集加载或map算子中将所创建的`test_cache`作为`cache`参数分别传入。 - - 下面两个样例中使用到CIFAR-10数据集。运行样例前,需参照[数据集加载](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_loading.html#cifar-10-100)中的方法下载并存放CIFAR-10数据集。 - - - 缓存原始数据集加载的数据。 - - ```python - dataset_dir = "cifar-10-batches-bin/" - - # apply cache to dataset - data = ds.Cifar10Dataset(dataset_dir=dataset_dir, num_samples=4, shuffle=False, num_parallel_workers=1, cache=test_cache) - - num_iter = 0 - for item in data.create_dict_iterator(num_epochs=1): # each data is a dictionary - # in this example, each dictionary has a key "image" - print("{} image shape: {}".format(num_iter, item["image"].shape)) - num_iter += 1 - ``` - - 输出结果: - - ```text - 0 image shape: (32, 32, 3) - 1 image shape: (32, 32, 3) - 2 image shape: (32, 32, 3) - 3 image shape: (32, 32, 3) - ``` - - 通过`cache_admin --list_sessions`命令可以查看当前会话有四条数据,说明数据缓存成功。 - - ```shell - $ cache_admin --list_sessions - Listing sessions for server on port 50052 - - Session Cache Id Mem cached Disk cached Avg cache size Numa hit - 1456416665 821590605 4 n/a 3226 4 - ``` - - - 缓存经过数据增强处理后的数据。 - - ```python - import mindspore.dataset.vision.c_transforms as c_vision - - dataset_dir = "cifar-10-batches-bin/" - - # apply cache to dataset - data = ds.Cifar10Dataset(dataset_dir=dataset_dir, num_samples=5, shuffle=False, num_parallel_workers=1) - - # apply cache to map - rescale_op = c_vision.Rescale(1.0 / 255.0, -1.0) - data = data.map(input_columns=["image"], operations=rescale_op, cache=test_cache) - - num_iter = 0 - for item in data.create_dict_iterator(num_epochs=1): # each data is a dictionary - # in this example, each dictionary has a keys "image" - print("{} image shape: {}".format(num_iter, item["image"].shape)) - num_iter += 1 - ``` - - 输出结果: - - ```text - 0 image shape: (32, 32, 3) - 1 image shape: (32, 32, 3) - 2 image shape: (32, 32, 3) - 3 image shape: (32, 32, 3) - 4 image shape: (32, 32, 3) - ``` - - 通过`cache_admin --list_sessions`命令可以查看当前会话有五条数据,说明数据缓存成功。 - - ```shell - $ cache_admin --list_sessions - Listing sessions for server on port 50052 - - Session Cache Id Mem cached Disk cached Avg cache size Numa hit - 1456416665 3618046178 5 n/a 12442 5 - ``` - -6. 销毁缓存会话。 - - 在训练结束后,可以选择将当前的缓存销毁并释放内存: - - ```shell - $ cache_admin --destroy_session 1456416665 - Drop session successfully for server on port 50052 - ``` - - 以上命令将销毁端口50052服务器中缓存会话id为1456416665的缓存。 - - 若选择不销毁缓存,则该缓存会话中的缓存数据将继续存在,用户下次启动训练脚本时可以继续使用该缓存。 - -7. 关闭缓存服务器。 - - 使用完毕后,可以通过以下命令关闭缓存服务器,该操作将销毁当前服务器中存在的所有缓存会话并释放内存。 - - ```shell - $ cache_admin --stop - Cache server on port 50052 has been stopped successfully. - ``` - - 以上命令将关闭端口50052的服务器。 - - 若选择不关闭服务器,则服务器中已创建的缓存会话将保留,并供下次使用。下次训练时,用户可以新建缓存会话或重复使用已有缓存。 - -## 缓存共享 - -对于单机多卡的分布式训练的场景,缓存算子还允许多个相同的训练脚本共享同一个缓存,共同从缓存中读写数据。 - -1. 启动缓存服务器。 - - ```shell - $ cache_admin --start - Cache server startup completed successfully! - The cache server daemon has been created as process id 39337 and listening on port 50052 - - Recommendation: - Since the server is detached into its own daemon process, monitor the server logs (under /tmp/mindspore/cache/log) for any issues that may happen after startup - ``` - -2. 创建缓存会话。 - - 创建启动Python训练的Shell脚本`cache.sh`,通过以下命令生成一个缓存会话id: - - ```shell - #!/bin/bash - # This shell script will launch parallel pipelines - - # get path to dataset directory - if [ $# != 1 ] - then - echo "Usage: sh cache.sh DATASET_PATH" - exit 1 - fi - dataset_path=$1 - - # generate a session id that these parallel pipelines can share - result=$(cache_admin -g 2>&1) - rc=$? - if [ $rc -ne 0 ]; then - echo "some error" - exit 1 - fi - - # grab the session id from the result string - session_id=$(echo $result | awk '{print $NF}') - ``` - -3. 将缓存会话id传入训练脚本。 - - 继续编写Shell脚本,添加以下命令在启动Python训练时将`session_id`以及其他参数传入: - - ```shell - # make the session_id available to the python scripts - num_devices=4 - - for p in $(seq 0 $((${num_devices}-1))); do - python my_training_script.py --num_devices "$num_devices" --device "$p" --session_id $session_id --dataset_path $dataset_path - done - ``` - - > 直接获取完整样例代码:[cache.sh](https://gitee.com/mindspore/docs/blob/master/tutorials/tutorial_code/cache/cache.sh) - -4. 创建并应用缓存实例。 - - 下面样例中使用到CIFAR-10数据集。运行样例前,需参照[数据集加载](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_loading.html#cifar-10-100)中的方法下载并存放CIFAR-10数据集。目录结构如下: - - ```text - ├─cache.sh - ├─my_training_script.py - └─cifar-10-batches-bin -    ├── batches.meta.txt -    ├── data_batch_1.bin -    ├── data_batch_2.bin -    ├── data_batch_3.bin -    ├── data_batch_4.bin -    ├── data_batch_5.bin -    ├── readme.html -    └── test_batch.bin - ``` - - 创建并编写Python脚本`my_training_script.py`,通过以下代码接收传入的`session_id`,并在定义缓存实例时将其作为参数传入。 - - ```python - import argparse - import mindspore.dataset as ds - - parser = argparse.ArgumentParser(description='Cache Example') - parser.add_argument('--num_devices', type=int, default=1, help='Device num.') - parser.add_argument('--device', type=int, default=0, help='Device id.') - parser.add_argument('--session_id', type=int, default=1, help='Session id.') - parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path') - args_opt = parser.parse_args() - - # apply cache to dataset - test_cache = ds.DatasetCache(session_id=args_opt.session_id, size=0, spilling=False) - dataset = ds.Cifar10Dataset(dataset_dir=args_opt.dataset_path, num_samples=4, shuffle=False, num_parallel_workers=1, - num_shards=args_opt.num_devices, shard_id=args_opt.device, cache=test_cache) - num_iter = 0 - for _ in dataset.create_dict_iterator(): - num_iter += 1 - print("Got {} samples on device {}".format(num_iter, args_opt.device)) - ``` - - > 直接获取完整样例代码:[my_training_script.py](https://gitee.com/mindspore/docs/blob/master/tutorials/tutorial_code/cache/my_training_script.py) - -5. 运行训练脚本。 - - 运行Shell脚本`cache.sh`开启分布式训练: - - ```shell - $ sh cache.sh cifar-10-batches-bin/ - Got 4 samples on device 0 - Got 4 samples on device 1 - Got 4 samples on device 2 - Got 4 samples on device 3 - ``` - - 通过`cache_admin --list_sessions`命令可以查看当前会话中只有一组数据,说明缓存共享成功。 - - ```shell - $ cache_admin --list_sessions - Listing sessions for server on port 50052 - - Session Cache Id Mem cached Disk cached Avg cache size Numa hit - 3392558708 821590605 16 n/a 3227 16 - ``` - -6. 销毁缓存会话。 - - 在训练结束后,可以选择将当前的缓存销毁并释放内存: - - ```shell - $ cache_admin --destroy_session 3392558708 - Drop session successfully for server on port 50052 - ``` - -7. 关闭缓存服务器。 - - 使用完毕后,可以选择关闭缓存服务器: - - ```shell - $ cache_admin --stop - Cache server on port 50052 has been stopped successfully. - ``` - -## 当前限制 - -- 当前`MindDataset`、`GraphDataset`、`GeneratorDataset`、`PaddedDataset`和`NumpySlicesDataset`等数据集类不支持缓存。其中,`GeneratorDataset`、`PaddedDataset`和`NumpySlicesDataset`属于`GeneratorOp`,在不支持的报错信息中会呈现“There is currently no support for GeneratorOp under cache”。 -- 经过`batch`、`concat`、`filter`、`repeat`、`skip`、`split`、`take`和`zip`处理后的数据不支持缓存。 -- 经过随机数据增强操作(如`RandomCrop`)后的数据不支持缓存。 -- 不支持在同个数据管道的不同位置嵌套使用同一个缓存实例。 - -## 缓存性能调优 - -使用缓存服务能够在一些场景下获得显著的性能提升,例如: - -- 缓存经过数据增强处理后的数据,尤其是当数据预处理管道中包含decode等高复杂度操作时。在该场景下,用户不需要在每个epoch重复执行数据增强操作,可节省较多时间。 -- 在简单网络的训练和推理过程中使用缓存服务。相比于复杂网络,简单网络的训练耗时占比更小,因此在该场景下应用缓存,能获得更显著的时间性能提升。 - -然而,在以下场景中使用缓存可能不会获得明显的性能收益,例如: - -- 系统内存不足、缓存未命中等因素将导致缓存服务在时间性能上提升不明显。因此,可在使用缓存前检查可用系统内存是否充足,选择一个适当的缓存大小。 -- 过多缓存溢出会导致时间性能变差。因此,在使用可随机访问的数据集(如`ImageFolderDataset`)进行数据加载的场景,尽量不要允许缓存溢出至磁盘。 -- 在Bert等NLP类网络中使用缓存,通常不会取得性能提升。因为在NLP场景下通常不会使用到decode等高复杂度的数据增强操作。 -- 使用non-mappable数据集(如`TFRecordDataset`)的pipeline在第一个epoch的时间开销较大。根据当前的缓存机制,non-mappable数据集需要在第一个epoch训练开始前将所有数据写入缓存服务器中,因此这使得第一个epoch时间较长。 diff --git a/docs/programming_guide/source_zh_cn/callback.md b/docs/programming_guide/source_zh_cn/callback.md deleted file mode 100644 index 32aecdd86613c0d457bfd755398dfec83eeaf571..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/callback.md +++ /dev/null @@ -1,56 +0,0 @@ -# Callback机制 - - - -- [Callback机制](#callback机制) - - [概述](#概述) - - [MindSpore内置回调函数](#mindspore内置回调函数) - - [MindSpore自定义回调函数](#mindspore自定义回调函数) - - - - - -## 概述 - -Callback回调函数在MindSpore中被实现为一个类,Callback机制类似于一种监控模式,可以帮助用户观察网络训练过程中各种参数的变化情况和网络内部的状态,还可以根据用户的指定,在达到特定条件后执行相应的操作,在训练过程中,Callback列表会按照定义的顺序执行Callback函数。Callback机制让用户可以及时有效地掌握网络模型的训练状态,并根据需要随时作出调整,可以极大地提升用户的开发效率。 - -在MindSpore中,Callback机制一般用在网络训练过程`model.train`中,用户可以通过配置不同的内置回调函数传入不同的参数,从而实现各种功能。例如,可以通过`LossMonitor`监控每一个epoch的loss变化情况,通过`ModelCheckpoint`保存网络参数和模型进行再训练或推理,通过`TimeMonitor`监控每一个epoch,每一个step的训练时间,以及提前终止训练,动态调整参数等。 - -## MindSpore内置回调函数 - -- ModelCheckpoint - - 与模型训练过程相结合,保存训练后的模型和网络参数,方便进行再推理或再训练。`ModelCheckpoint`一般与`CheckpointConfig`配合使用,`CheckpointConfig`是一个参数配置类,可自定义配置checkpoint的保存策略。 - - 详细内容,请参考[Checkpoint官网教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html)。 - -- SummaryCollector - - 帮助收集一些常见信息,如loss、learning rate、计算图、参数权重等,方便用户将训练过程可视化和查看信息,并且可以允许summary操作从summary文件中收集数据。 - - 详细内容,请参考[Summary官网教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/summary_record.html)。 - -- LossMonitor - - 监控训练过程中的loss变化情况,当loss为NAN或INF时,提前终止训练。可以在日志中输出loss,方便用户查看。 - - 详细内容,请参考[LossMonitor官网教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#mindsporecallback)。 - -- TimeMonitor - - 监控训练过程中每个epoch,每个step的运行时间。 - -## MindSpore自定义回调函数 - -MindSpore不但有功能强大的内置回调函数,还可以支持用户自定义回调函数。当用户有自己的特殊需求时,可以基于Callback基类,自定义满足用户自身需求的回调函数。Callback可以把训练过程中的重要信息记录下来,通过一个字典类型变量cb_params传递给Callback对象, 用户可以在各个自定义的Callback中获取到相关属性,执行自定义操作。 - -以下面两个场景为例,介绍自定义Callback回调函数的功能: - -1. 实现在规定时间内终止训练,用户可以设定时间阈值,当训练时间达到这个阈值后就终止训练过程。 - -2. 实现保存训练过程中精度最高的checkpoint文件,用户可以自定义在每一轮迭代后都保存当前精度最高的模型。 - -详细内容,请参考[自定义Callback官网教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#id3)。 - -根据教程,用户可以很容易实现具有其他功能的自定义回调函数,如实现在每一轮训练结束后都输出相应的详细训练信息,包括训练进度、训练轮次、训练名称、loss值等;如实现在loss或模型精度达到一定值后停止训练,用户可以设定loss或模型精度的阈值,当loss或模型精度达到该阈值后就提前终止训练等。 diff --git a/docs/programming_guide/source_zh_cn/cell.ipynb b/docs/programming_guide/source_zh_cn/cell.ipynb deleted file mode 100644 index 124cbfc2e2d9685372ed3cdf0ddeb31f87fd1663..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/cell.ipynb +++ /dev/null @@ -1,579 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Cell构建及其子类\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/cell.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_cell.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9jZWxsLmlweW5i&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "MindSpore的`Cell`类是构建所有网络的基类,也是网络的基本单元。当用户需要自定义网络时,需要继承`Cell`类,并重写`__init__`方法和`construct`方法。\n", - "\n", - "损失函数、优化器和模型层等本质上也属于网络结构,也需要继承`Cell`类才能实现功能,同样用户也可以根据业务需求自定义这部分内容。\n", - "\n", - "本节内容首先将会介绍`Cell`类的关键成员函数,然后介绍基于`Cell`实现的MindSpore内置损失函数、优化器和模型层及使用方法,最后通过实例介绍如何利用`Cell`类构建自定义网络。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 关键成员函数\n", - "\n", - "### construct方法\n", - "\n", - "`Cell`类重写了`__call__`方法,在`Cell`类的实例被调用时,会执行`construct`方法。网络结构在`construct`方法里面定义。\n", - "\n", - "下面的样例中,我们构建了一个简单的网络实现卷积计算功能。构成网络的算子在`__init__`中定义,在`construct`方法里面使用,用例的网络结构为`Conv2d` -> `BiasAdd`。\n", - "\n", - "在`construct`方法中,`x`为输入数据,`output`是经过网络结构计算后得到的计算结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-08T01:01:31.855049Z", - "start_time": "2021-02-08T01:01:31.084345Z" - } - }, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "import mindspore.ops as ops\n", - "from mindspore import Parameter\n", - "from mindspore.common.initializer import initializer\n", - "\n", - "class Net(nn.Cell):\n", - " def __init__(self, in_channels=10, out_channels=20, kernel_size=3):\n", - " super(Net, self).__init__()\n", - " self.conv2d = ops.Conv2D(out_channels, kernel_size)\n", - " self.bias_add = ops.BiasAdd()\n", - " self.weight = Parameter(\n", - " initializer('normal', [out_channels, in_channels, kernel_size, kernel_size]),\n", - " name='conv.weight')\n", - "\n", - " def construct(self, x):\n", - " output = self.conv2d(x, self.weight)\n", - " output = self.bias_add(output, self.bias)\n", - " return output" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### parameters_dict\n", - "\n", - "`parameters_dict`方法识别出网络结构中所有的参数,返回一个以key为参数名,value为参数值的`OrderedDict`。\n", - "\n", - "`Cell`类中返回参数的方法还有许多,例如`get_parameters`、`trainable_params`等,具体使用方法可以参见[API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn/mindspore.nn.Cell.html)。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-08T01:01:31.867924Z", - "start_time": "2021-02-08T01:01:31.856066Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "odict_keys(['conv.weight'])\n", - "Parameter (name=conv.weight)\n" - ] - } - ], - "source": [ - "net = Net()\n", - "result = net.parameters_dict()\n", - "print(result.keys())\n", - "print(result['conv.weight'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "样例中的`Net`采用上文构造网络的用例,打印了网络中所有参数的名字和`weight`参数的结果。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### cells_and_names\n", - "\n", - "`cells_and_names`方法是一个迭代器,返回网络中每个`Cell`的名字和它的内容本身。\n", - "\n", - "用例简单实现了获取与打印每个`Cell`名字的功能,其中根据网络结构可知,存在1个`Cell`为`nn.Conv2d`。\n", - "\n", - "其中`nn.Conv2d`是`MindSpore`以Cell为基类封装好的一个卷积层,其具体内容将在“模型层”中进行介绍。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-08T01:01:31.893191Z", - "start_time": "2021-02-08T01:01:31.870508Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "('', Net1<\n", - " (conv): Conv2d\n", - " >)\n", - "('conv', Conv2d)\n", - "-------names-------\n", - "['conv']\n" - ] - } - ], - "source": [ - "import mindspore.nn as nn\n", - "\n", - "class Net1(nn.Cell):\n", - " def __init__(self):\n", - " super(Net1, self).__init__()\n", - " self.conv = nn.Conv2d(3, 64, 3, has_bias=False, weight_init='normal')\n", - "\n", - " def construct(self, x):\n", - " out = self.conv(x)\n", - " return out\n", - "\n", - "net = Net1()\n", - "names = []\n", - "for m in net.cells_and_names():\n", - " print(m)\n", - " names.append(m[0]) if m[0] else None\n", - "print('-------names-------')\n", - "print(names)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### set_grad\n", - "\n", - "`set_grad`接口功能是使用户构建反向网络,在不传入参数调用时,默认设置`requires_grad`为True,需要在计算网络反向的场景中使用。\n", - "\n", - "以`TrainOneStepCell`为例,其接口功能是使网络进行单步训练,需要计算网络反向,因此初始化方法里需要使用`set_grad`。\n", - "\n", - "`TrainOneStepCell`部分代码如下:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```python\n", - "class TrainOneStepCell(Cell):\n", - " def __init__(self, network, optimizer, sens=1.0):\n", - " super(TrainOneStepCell, self).__init__(auto_prefix=False)\n", - " self.network = network\n", - " self.network.set_grad()\n", - " ......\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "如果用户使用`TrainOneStepCell`等类似接口无需使用`set_grad`, 内部已封装实现。\n", - "\n", - "若用户需要自定义此类训练功能的接口,需要在其内部调用,或者在外部设置`network.set_grad`。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## nn模块与ops模块的关系\n", - "\n", - "MindSpore的nn模块是Python实现的模型组件,是对低阶API的封装,主要包括各种模型层、损失函数、优化器等。\n", - "\n", - "同时nn也提供了部分与`Primitive`算子同名的接口,主要作用是对`Primitive`算子进行进一步封装,为用户提供更友好的API。\n", - "\n", - "重新分析上文介绍`construct`方法的用例,此用例是MindSpore的`nn.Conv2d`源码简化内容,内部会调用`ops.Conv2D`。`nn.Conv2d`卷积API增加输入参数校验功能并判断是否`bias`等,是一个高级封装的模型层。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-08T01:01:31.916550Z", - "start_time": "2021-02-08T01:01:31.894206Z" - } - }, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "import mindspore.ops as ops\n", - "from mindspore import Parameter\n", - "from mindspore.common.initializer import initializer\n", - "\n", - "class Net(nn.Cell):\n", - " def __init__(self, in_channels=10, out_channels=20, kernel_size=3):\n", - " super(Net, self).__init__()\n", - " self.conv2d = ops.Conv2D(out_channels, kernel_size)\n", - " self.bias_add = ops.BiasAdd()\n", - " self.weight = Parameter(\n", - " initializer('normal', [out_channels, in_channels, kernel_size, kernel_size]),\n", - " name='conv.weight')\n", - "\n", - " def construct(self, x):\n", - " output = self.conv2d(x, self.weight)\n", - " output = self.bias_add(output, self.bias)\n", - " return output" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 模型层\n", - "\n", - "在讲述了`Cell`的使用方法后可知,MindSpore能够以`Cell`为基类构造网络结构。\n", - "\n", - "为了方便用户的使用,MindSpore框架内置了大量的模型层,用户可以通过接口直接调用。\n", - "\n", - "同样,用户也可以自定义模型,此内容在“构建自定义网络”中介绍。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 内置模型层\n", - "\n", - "MindSpore框架在`mindspore.nn`的layer层内置了丰富的接口,主要内容如下:\n", - "\n", - "- 激活层\n", - "\n", - " 激活层内置了大量的激活函数,在定义网络结构中经常使用。激活函数为网络加入了非线性运算,使得网络能够拟合效果更好。\n", - "\n", - " 主要接口有`Softmax`、`Relu`、`Elu`、`Tanh`、`Sigmoid`等。\n", - " \n", - "\n", - "- 基础层\n", - "\n", - " 基础层实现了网络中一些常用的基础结构,例如全连接层、Onehot编码、Dropout、平铺层等都在此部分实现。\n", - "\n", - " 主要接口有`Dense`、`Flatten`、`Dropout`、`Norm`、`OneHot`等。\n", - " \n", - "\n", - "- 容器层\n", - "\n", - " 容器层主要功能是实现一些存储多个Cell的数据结构。\n", - "\n", - " 主要接口有`SequentialCell`、`CellList`等。\n", - " \n", - "\n", - "- 卷积层\n", - "\n", - " 卷积层提供了一些卷积计算的功能,如普通卷积、深度卷积和卷积转置等。\n", - "\n", - " 主要接口有`Conv2d`、`Conv1d`、`Conv2dTranspose`、`Conv1dTranspose`等。\n", - " \n", - "\n", - "- 池化层\n", - "\n", - " 池化层提供了平均池化和最大池化等计算的功能。\n", - "\n", - " 主要接口有`AvgPool2d`、`MaxPool2d`和`AvgPool1d`。\n", - " \n", - "\n", - "- 嵌入层\n", - "\n", - " 嵌入层提供word embedding的计算功能,将输入的单词映射为稠密向量。\n", - "\n", - " 主要接口有`Embedding`、`EmbeddingLookup`、`EmbeddingLookUpSplitMode`等。\n", - " \n", - "\n", - "- 长短记忆循环层\n", - "\n", - " 长短记忆循环层提供LSTM计算功能。其中`LSTM`内部会调用`LSTMCell`接口,`LSTMCell`是一个LSTM单元,对一个LSTM层做运算,当涉及多LSTM网络层运算时,使用`LSTM`接口。\n", - "\n", - " 主要接口有`LSTM`和`LSTMCell`。\n", - " \n", - "\n", - "- 标准化层\n", - "\n", - " 标准化层提供了一些标准化的方法,即通过线性变换等方式将数据转换成均值和标准差。\n", - "\n", - " 主要接口有`BatchNorm1d`、`BatchNorm2d`、`LayerNorm`、`GroupNorm`、`GlobalBatchNorm`等。\n", - " \n", - "\n", - "- 数学计算层\n", - "\n", - " 数学计算层提供一些算子拼接而成的计算功能,例如数据生成和一些数学计算等。\n", - "\n", - " 主要接口有`ReduceLogSumExp`、`Range`、`LinSpace`、`LGamma`等。\n", - " \n", - "\n", - "- 图片层\n", - "\n", - " 图片计算层提供了一些矩阵计算相关的功能,将图片数据进行一些变换与计算。\n", - "\n", - " 主要接口有`ImageGradients`、`SSIM`、`MSSSIM`、`PSNR`、`CentralCrop`等。\n", - " \n", - "\n", - "- 量化层\n", - "\n", - " 量化是指将数据从float的形式转换成一段数据范围的int类型,所以量化层提供了一些数据量化的方法和模型层结构封装。\n", - "\n", - " 主要接口有`Conv2dBnAct`、`DenseBnAct`、`Conv2dBnFoldQuant`、`LeakyReLUQuant`等。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 应用实例\n", - "\n", - "MindSpore的模型层在`mindspore.nn`下,使用方法如下所示:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-08T01:01:31.944015Z", - "start_time": "2021-02-08T01:01:31.917571Z" - } - }, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "\n", - "class Net(nn.Cell):\n", - " def __init__(self):\n", - " super(Net, self).__init__()\n", - " self.conv = nn.Conv2d(3, 64, 3, has_bias=False, weight_init='normal')\n", - " self.bn = nn.BatchNorm2d(64)\n", - " self.relu = nn.ReLU()\n", - " self.flatten = nn.Flatten()\n", - " self.fc = nn.Dense(64 * 222 * 222, 3)\n", - "\n", - " def construct(self, x):\n", - " x = self.conv(x)\n", - " x = self.bn(x)\n", - " x = self.relu(x)\n", - " x = self.flatten(x)\n", - " out = self.fc(x)\n", - " return out" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "依然是上述网络构造的用例,从这个用例中可以看出,程序调用了`Conv2d`、`BatchNorm2d`、`ReLU`、`Flatten`和`Dense`模型层的接口。\n", - "\n", - "在`Net`初始化方法里被定义,然后在`construct`方法里真正运行,这些模型层接口有序的连接,形成一个可执行的网络。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 损失函数\n", - "\n", - "目前MindSpore主要支持的损失函数有`L1Loss`、`MSELoss`、`SmoothL1Loss`、`SoftmaxCrossEntropyWithLogits`、`SampledSoftmaxLoss`、`BCELoss`和`CosineEmbeddingLoss`。\n", - "\n", - "MindSpore的损失函数全部是`Cell`的子类实现,所以也支持用户自定义损失函数,其构造方法在“构建自定义网络”中进行介绍。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 内置损失函数\n", - "\n", - "- L1Loss\n", - "\n", - " 计算两个输入数据的绝对值误差,用于回归模型。`reduction`参数默认值为mean,返回loss平均值结果,若`reduction`值为sum,返回loss累加结果,若`reduction`值为none,返回每个loss的结果。\n", - " \n", - "\n", - "- MSELoss\n", - "\n", - " 计算两个输入数据的平方误差,用于回归模型。`reduction`参数同`L1Loss`。\n", - " \n", - "\n", - "- SmoothL1Loss\n", - "\n", - " `SmoothL1Loss`为平滑L1损失函数,用于回归模型,阈值`beta`默认参数为1。\n", - " \n", - "\n", - "- SoftmaxCrossEntropyWithLogits\n", - "\n", - " 交叉熵损失函数,用于分类模型。当标签数据不是one-hot编码形式时,需要输入参数`sparse`为True。`reduction`参数默认值为none,其参数含义同`L1Loss`。\n", - " \n", - "\n", - "- CosineEmbeddingLoss\n", - "\n", - " `CosineEmbeddingLoss`用于衡量两个输入相似程度,用于分类模型。`margin`默认为0.0,`reduction`参数同`L1Loss`。\n", - "\n", - "- BCELoss\n", - "\n", - " 二值交叉熵损失,用于二分类。`weight`是一个batch中每个训练数据的损失的权重,默认值为None,表示权重均为1。`reduction`参数默认值为none,其参数含义同`L1Loss`。\n", - "- SampledSoftmaxLoss\n", - "\n", - " 抽样交叉熵损失函数,用于分类模型,一般在类别数很大时使用。`num_sampled`是抽样的类别数,`num_classes`是类别总数,`num_true`是每个用例的类别数,`sampled_values`是默认值为None的抽样候选值。`remove_accidental_hits`是移除“误中抽样”的开关, `seed`是默认值为0的抽样的随机种子,`reduction`参数默认值为none,其参数含义同L1Loss。\n", - " " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 应用实例\n", - "\n", - "MindSpore的损失函数全部在mindspore.nn下,使用方法如下所示:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-08T01:01:31.982064Z", - "start_time": "2021-02-08T01:01:31.946653Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1.5\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.nn as nn\n", - "from mindspore import Tensor\n", - "\n", - "loss = nn.L1Loss()\n", - "input_data = Tensor(np.array([[1, 2, 3], [2, 3, 4]]).astype(np.float32))\n", - "target_data = Tensor(np.array([[0, 2, 5], [3, 1, 1]]).astype(np.float32))\n", - "print(loss(input_data, target_data))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "此用例构造了两个Tensor数据,利用`nn.L1Loss`接口定义了loss,将`input_data`和`target_data`传入loss,执行L1Loss的计算,结果为1.5。若loss = nn.L1Loss(reduction=’sum’),则结果为9.0。若loss = nn.L1Loss(reduction=’none’),结果为[[1. 0. 2.] [1. 2. 3.]]。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 优化算法\n", - "\n", - "`mindspore.nn.optim`是MindSpore框架中实现各种优化算法的模块,详细说明参见[优化算法](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/optim.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 构建自定义网络\n", - "\n", - "无论是网络结构,还是前文提到的模型层、损失函数和优化器等,本质上都是一个`Cell`,因此都可以自定义实现。\n", - "\n", - "首先构造一个继承`Cell`的子类,然后在`__init__`方法里面定义算子和模型层等,在`construct`方法里面构造网络结构。\n", - "\n", - "以LeNet网络为例,在`__init__`方法中定义了卷积层,池化层和全连接层等结构单元,然后在`construct`方法将定义的内容连接在一起,形成一个完整LeNet的网络结构。\n", - "\n", - "LeNet网络实现方式如下所示:" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-08T01:01:32.016187Z", - "start_time": "2021-02-08T01:01:31.983072Z" - } - }, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "\n", - "class LeNet5(nn.Cell):\n", - " def __init__(self):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(1, 6, 5, pad_mode=\"valid\")\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode=\"valid\")\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120)\n", - " self.fc2 = nn.Dense(120, 84)\n", - " self.fc3 = nn.Dense(84, 3)\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x)\n", - " return x" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/conf.py b/docs/programming_guide/source_zh_cn/conf.py deleted file mode 100644 index adbf28ca537437b7cb6165286313131ba60879e0..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/conf.py +++ /dev/null @@ -1,90 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys -import IPython -import re -import nbsphinx as nbs - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx_markdown_tables', - 'recommonmark', - 'nbsphinx', - 'sphinx.ext.mathjax', - 'IPython.sphinxext.ipython_console_highlighting' -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -highlight_language = 'none' - -suppress_warnings = [ - 'nbsphinx', -] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_search_language = 'zh' - -html_search_options = {'dict': '../../resource/jieba.txt'} - -html_static_path = ['_static'] - -# Remove extra outputs for nbsphinx extension. -nbsphinx_source_re = re.compile(r"(app\.connect\('html-collect-pages', html_collect_pages\))") -nbsphinx_math_re = re.compile(r"(\S.*$)") -mod_path = os.path.abspath(nbs.__file__) -with open(mod_path, "r+", encoding="utf8") as f: - contents = f.readlines() - for num, line in enumerate(contents): - _content_re = nbsphinx_source_re.search(line) - if _content_re and "#" not in line: - contents[num] = nbsphinx_source_re.sub(r"# \g<1>", line) - if "mathjax_config = app.config" in line and "#" not in line: - contents[num:num+10] = [nbsphinx_math_re.sub(r"# \g<1>", i) for i in contents[num:num+10]] - break - f.seek(0) - f.writelines(contents) \ No newline at end of file diff --git a/docs/programming_guide/source_zh_cn/context.ipynb b/docs/programming_guide/source_zh_cn/context.ipynb deleted file mode 100644 index 4e77d47097db314d250d8263463ffb71365bd7b3..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/context.ipynb +++ /dev/null @@ -1,273 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 运行管理\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/context.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_context.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9jb250ZXh0LmlweW5i&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "初始化网络之前要配置context参数,用于控制程序执行的策略。比如选择执行模式、选择执行后端、配置分布式相关参数等。按照context参数设置实现的不同功能,可以将其分为执行模式管理、硬件管理、分布式管理和维测管理等。\n", - "\n", - "## 执行模式管理\n", - "\n", - "MindSpore支持PyNative和Graph这两种运行模式:\n", - "\n", - "- `PYNATIVE_MODE`:动态图模式,将神经网络中的各个算子逐一下发执行,方便用户编写和调试神经网络模型。\n", - "\n", - "- `GRAPH_MODE`:静态图模式或者图模式,将神经网络模型编译成一整张图,然后下发执行。该模式利用图优化等技术提高运行性能,同时有助于规模部署和跨平台运行。\n", - "\n", - "### 模式选择\n", - "\n", - "通过设置可以控制程序运行的模式,默认情况下,MindSpore处于PyNative模式。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-10T02:10:02.859770Z", - "start_time": "2021-02-10T02:10:02.856678Z" - } - }, - "outputs": [], - "source": [ - "from mindspore import context\n", - "context.set_context(mode=context.GRAPH_MODE)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 模式切换\n", - "\n", - "实现两种模式之间的切换。\n", - "\n", - "MindSpore处于PYNATIVE模式时,可以通过`context.set_context(mode=context.GRAPH_MODE)`切换为Graph模式;同样地,MindSpore处于Graph模式时,可以通过 `context.set_context(mode=context.PYNATIVE_MODE)`切换为PyNative模式。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Tensor(shape=[1, 4, 5, 5], dtype=Float32, value=\n", - "[[[[ 1.64782144e-02, 5.31007685e-02, 5.31007685e-02, 5.31007685e-02, 5.11828624e-02],\n", - " [ 3.00714076e-02, 6.57572001e-02, 6.57572001e-02, 6.57572001e-02, 4.35083285e-02],\n", - " [ 3.00714076e-02, 6.57572001e-02, 6.57572001e-02, 6.57572001e-02, 4.35083285e-02]\n", - " [ 3.00714076e-02, 6.57572001e-02, 6.57572001e-02, 6.57572001e-02, 4.35083285e-02],\n", - " [ 1.84759758e-02, 4.71352898e-02, 4.71352898e-02, 4.71352898e-02, 3.72093469e-02]],\n", - " [[-3.36203352e-02, -6.12429380e-02, -6.12429380e-02, -6.12429380e-02, -4.33492810e-02],\n", - " [-2.67659649e-02, -8.04031491e-02, -8.04031491e-02, -8.04031491e-02, -6.84653893e-02],\n", - " [-2.67659649e-02, -8.04031491e-02, -8.04031491e-02, -8.04031491e-02, -6.84653893e-02]\n", - " [-2.67659649e-02, -8.04031491e-02, -8.04031491e-02, -8.04031491e-02, -6.84653893e-02],\n", - " [-5.57974726e-03, -6.80863336e-02, -6.80863336e-02, -6.80863336e-02, -8.38923305e-02]],\n", - " [[-1.60222687e-02, 2.26615220e-02, 2.26615220e-02, 2.26615220e-02, 6.03060052e-02],\n", - " [-6.76476881e-02, -2.96694487e-02, -2.96694487e-02, -2.96694487e-02, 4.86185402e-02],\n", - " [-6.76476881e-02, -2.96694487e-02, -2.96694487e-02, -2.96694487e-02, 4.86185402e-02]\n", - " [-6.76476881e-02, -2.96694487e-02, -2.96694487e-02, -2.96694487e-02, 4.86185402e-02],\n", - " [-6.52819276e-02, -3.50066647e-02, -3.50066647e-02, -3.50066647e-02, 2.85858363e-02]]\n", - " [[-3.10218725e-02, -3.84682454e-02, -3.84682454e-02, -3.84682454e-02, -8.58424231e-03],\n", - " [-4.27014455e-02, -7.07850009e-02, -7.07850009e-02, -7.07850009e-02, -5.36267459e-02],\n", - " [-4.27014455e-02, -7.07850009e-02, -7.07850009e-02, -7.07850009e-02, -5.36267459e-02]\n", - " [-4.27014455e-02, -7.07850009e-02, -7.07850009e-02, -7.07850009e-02, -5.36267459e-02],\n", - " [-1.23060495e-02, -4.99926135e-02, -4.99926135e-02, -4.99926135e-02, -4.71802950e-02]]]])" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.nn as nn\n", - "from mindspore import context, Tensor\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "\n", - "conv = nn.Conv2d(3, 4, 3, bias_init='zeros')\n", - "input_data = Tensor(np.ones([1, 3, 5, 5]).astype(np.float32))\n", - "conv(input_data)\n", - "context.set_context(mode=context.PYNATIVE_MODE)\n", - "\n", - "conv(input_data)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "上面的例子先将运行模式设置为`GRAPH_MODE`模式,然后将模式切换为`PYNATIVE_MODE`模式,实现了模式的切换。\n", - "\n", - "> 本示例代码运行于GPU环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 硬件管理\n", - "\n", - "硬件管理部分主要包括`device_target`和`device_id`两个参数。\n", - "\n", - "- `device_target`: 用于设置目标设备,支持Ascend、GPU和CPU,可以根据实际环境情况设置。\n", - "\n", - "- `device_id`: 表示卡物理序号,即卡所在机器中的实际序号。如果目标设备为Ascend,且规格为N*Ascend(其中N>1,如8*Ascend),在非分布式模式执行的情况下,为了避免设备的使用冲突,可以通过设置`device_id`决定程序执行的device编号,该编号范围为:0 ~ 服务器总设备数量-1,服务器总设备数量不能超过4096,默认为设备0。\n", - "\n", - "> 在GPU和CPU上,设置`device_id`参数无效。\n", - "\n", - "代码样例如下:\n", - "```python\n", - "from mindspore import context\n", - "context.set_context(device_target=\"Ascend\", device_id=6)\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 分布式管理\n", - "\n", - "context中有专门用于配置并行训练参数的接口:`context.set_auto_parallel_context`,该接口必须在初始化网络之前调用。\n", - "\n", - "> 分布式并行训练详细介绍可以查看[分布式并行训练](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_tutorials.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 维测管理\n", - "\n", - "为了方便维护和定位问题,context提供了大量维测相关的参数配置,如采集profiling数据、异步数据dump功能和print算子落盘等。\n", - "\n", - "### 采集profiling数据\n", - "\n", - "系统支持在训练过程中采集profiling数据,然后通过profiling工具进行性能分析。当前支持采集的profiling数据包括:\n", - "\n", - "- `enable_profiling`:是否开启profiling功能。设置为True,表示开启profiling功能,从`enable_options`读取profiling的采集选项;设置为False,表示关闭profiling功能,仅采集`training_trace`。\n", - "\n", - "- `profiling_options`:profiling采集选项,取值如下,支持采集多项数据。 \n", - " - `result_path`:Profiling采集结果文件保存路径。该参数指定的目录需要在启动训练的环境上(容器或Host侧)提前创建且确保安装时配置的运行用户具有读写权限,支持配置绝对路径或相对路径(相对执行命令时的当前路径)。 \n", - " - `training_trace`:采集迭代轨迹数据,即训练任务及AI软件栈的软件信息,实现对训练任务的性能分析,重点关注数据增强、前后向计算、梯度聚合更新等相关数据,取值on/off。 \n", - " - `task_trace`:采集任务轨迹数据,即昇腾910处理器HWTS/AICore的硬件信息,分析任务开始、结束等信息,取值on/off。 \n", - " - `aicpu_trace`:采集aicpu数据增强的profiling数据。取值on/off。 \n", - " - `fp_point`:`training_trace`为on时需要配置。指定训练网络迭代轨迹正向算子的开始位置,用于记录前向算子开始时间戳。配置值为指定的正向第一个算子名字。当该值为空时,系统自动获取正向第一个算子名字。 \n", - " - `bp_point`:`training_trace`为on时需要配置。指定训练网络迭代轨迹反向算子的结束位置,用于记录反向算子结束时间戳。配置值为指定的反向最后一个算子名字。当该值为空时,系统自动获取反向最后一个算子名字。 \n", - " - `ai_core_metrics`取值如下:\n", - "\n", - " - ArithmeticUtilization:各种计算类指标占比统计。\n", - "\n", - " - PipeUtilization:计算单元和搬运单元耗时占比,该项为默认值。\n", - "\n", - " - Memory:外部内存读写类指令占比。\n", - "\n", - " - MemoryL0:内部内存读写类指令占比。\n", - "\n", - " - ResourceConflictRatio:流水线队列类指令占比。\n", - "\n", - "代码样例如下:\n", - "\n", - "```python\n", - "from mindspore import context\n", - "context.set_context(enable_profiling=True, profiling_options= '{\"result_path\":\"/home/data/output\",\"training_trace\":\"on\"}')\n", - "\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 保存MindIR\n", - "\n", - "通过`context.set_context(save_graphs=True)`来保存各个编译阶段的中间代码。\n", - "\n", - "被保存的中间代码有两种格式:一个是后缀名为`.ir`的文本格式,一个是后缀名为`.dot`的图形化格式。\n", - "\n", - "当网络规模较大时建议使用更高效的文本格式来查看,当网络规模不大时,建议使用更直观的图形化格式来查看。\n", - "\n", - "代码样例如下:\n", - "\n", - "```python\n", - "from mindspore import context\n", - "context.set_context(save_graphs=True)\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> MindIR详细介绍可以查看[MindSpore IR(MindIR)](https://www.mindspore.cn/doc/note/zh-CN/master/design/mindspore/mindir.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### print算子落盘\n", - "\n", - "默认情况下,MindSpore的自研print算子可以将用户输入的Tensor或字符串信息打印出来,支持多字符串输入,多Tensor输入和字符串与Tensor的混合输入,输入参数以逗号隔开。\n", - "\n", - "> Print打印功能可以查看[Print算子功能介绍](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#print)。\n", - "\n", - "- `print_file_path`:可以将print算子数据保存到文件,同时关闭屏幕打印功能。如果保存的文件已经存在,则会给文件添加时间戳后缀。数据保存到文件可以解决数据量较大时屏幕打印数据丢失的问题。\n", - "\n", - "代码样例如下:\n", - "\n", - "```python\n", - "from mindspore import context\n", - "context.set_context(print_file_path=\"print.pb\")\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> context接口详细介绍可以查看[mindspore.context](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.context.html)。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/customized.rst b/docs/programming_guide/source_zh_cn/customized.rst deleted file mode 100644 index 129b147956d9fc0e702dc68cc1e0add0f7e6d2d0..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/customized.rst +++ /dev/null @@ -1,9 +0,0 @@ -自定义算子 -=========== - -.. toctree:: - :maxdepth: 1 - - 自定义算子(Ascend) - 自定义算子(GPU) - 自定义算子(CPU) diff --git a/docs/programming_guide/source_zh_cn/dataset_conversion.ipynb b/docs/programming_guide/source_zh_cn/dataset_conversion.ipynb deleted file mode 100644 index bb5171ec76fe8f296bff720c2ba86f9a695ce956..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/dataset_conversion.ipynb +++ /dev/null @@ -1,862 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# MindSpore数据格式转换\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/dataset_conversion.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_dataset_conversion.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9kYXRhc2V0X2NvbnZlcnNpb24uaXB5bmI=&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "用户可以将非标准的数据集和常用的数据集转换为MindSpore数据格式,即MindRecord,从而方便地加载到MindSpore中进行训练。同时,MindSpore在部分场景做了性能优化,使用MindRecord可以获得更好的性能。\n", - "\n", - "## 非标准数据集转换MindRecord\n", - "\n", - "下面主要介绍如何将CV类数据和NLP类数据转换为MindRecord,并通过`MindDataset`实现MindRecord文件的读取。\n", - "\n", - "### 转换CV类数据集\n", - "\n", - "本示例主要介绍用户如何将自己的CV类数据集转换成MindRecord,并使用`MindDataset`读取。\n", - "\n", - "本示例首先创建一个包含100条记录的MindRecord文件,其样本包含`file_name`(字符串)、\n", - "`label`(整型)、 `data`(二进制)三个字段,然后使用`MindDataset`读取该MindRecord文件。\n", - "\n", - "1. 导入相关模块。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-22T10:33:34.444561Z", - "start_time": "2021-02-22T10:33:34.441434Z" - } - }, - "outputs": [], - "source": [ - "from io import BytesIO\n", - "import os\n", - "import mindspore.dataset as ds\n", - "from mindspore.mindrecord import FileWriter\n", - "import mindspore.dataset.vision.c_transforms as vision\n", - "from PIL import Image" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 生成100张图像,并转换成MindRecord。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-22T10:34:03.889515Z", - "start_time": "2021-02-22T10:34:02.950207Z" - } - }, - "outputs": [ - { - "data": { - "text/plain": [ - "MSRStatus.SUCCESS" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "MINDRECORD_FILE = \"test.mindrecord\"\n", - "\n", - "if os.path.exists(MINDRECORD_FILE):\n", - " os.remove(MINDRECORD_FILE)\n", - " os.remove(MINDRECORD_FILE + \".db\")\n", - "\n", - "writer = FileWriter(file_name=MINDRECORD_FILE, shard_num=1)\n", - "\n", - "cv_schema = {\"file_name\": {\"type\": \"string\"}, \"label\": {\"type\": \"int32\"}, \"data\": {\"type\": \"bytes\"}}\n", - "writer.add_schema(cv_schema, \"it is a cv dataset\")\n", - "\n", - "writer.add_index([\"file_name\", \"label\"])\n", - "\n", - "data = []\n", - "for i in range(100):\n", - " i += 1\n", - "\n", - " sample = {}\n", - " white_io = BytesIO()\n", - " Image.new('RGB', (i*10, i*10), (255, 255, 255)).save(white_io, 'JPEG')\n", - " image_bytes = white_io.getvalue()\n", - " sample['file_name'] = str(i) + \".jpg\"\n", - " sample['label'] = i\n", - " sample['data'] = white_io.getvalue()\n", - "\n", - " data.append(sample)\n", - " if i % 10 == 0:\n", - " writer.write_raw_data(data)\n", - " data = []\n", - "\n", - "if data:\n", - " writer.write_raw_data(data)\n", - "\n", - "writer.commit()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**参数说明:**\n", - "\n", - "- `MINDRECORD_FILE`:输出的MindRecord文件路径。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 通过`MindDataset`读取MindRecord。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-22T10:34:07.729322Z", - "start_time": "2021-02-22T10:34:07.575711Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Got 100 samples\n" - ] - } - ], - "source": [ - "data_set = ds.MindDataset(dataset_file=MINDRECORD_FILE)\n", - "decode_op = vision.Decode()\n", - "data_set = data_set.map(operations=decode_op, input_columns=[\"data\"], num_parallel_workers=2)\n", - "count = 0\n", - "for item in data_set.create_dict_iterator(output_numpy=True):\n", - " count += 1\n", - "print(\"Got {} samples\".format(count))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "### 转换NLP类数据集\n", - "\n", - "本示例主要介绍用户如何将自己的NLP类数据集转换成MindRecord,并使用`MindDataset`读取。为了方便展示,此处略去了将文本转换成字典序的预处理过程。\n", - "\n", - "本示例首先创建一个包含100条记录的MindRecord文件,其样本包含八个字段,均为整型数组,然后使用`MindDataset`读取该MindRecord文件。\n", - "\n", - "1. 导入相关模块。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-22T10:34:21.606147Z", - "start_time": "2021-02-22T10:34:21.603094Z" - } - }, - "outputs": [], - "source": [ - "import os\n", - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "from mindspore.mindrecord import FileWriter" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 生成100条文本数据,并转换成MindRecord。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-22T10:34:23.883130Z", - "start_time": "2021-02-22T10:34:23.660213Z" - } - }, - "outputs": [ - { - "data": { - "text/plain": [ - "MSRStatus.SUCCESS" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "MINDRECORD_FILE = \"test.mindrecord\"\n", - "\n", - "if os.path.exists(MINDRECORD_FILE):\n", - " os.remove(MINDRECORD_FILE)\n", - " os.remove(MINDRECORD_FILE + \".db\")\n", - "\n", - "writer = FileWriter(file_name=MINDRECORD_FILE, shard_num=1)\n", - "\n", - "nlp_schema = {\"source_sos_ids\": {\"type\": \"int64\", \"shape\": [-1]},\n", - " \"source_sos_mask\": {\"type\": \"int64\", \"shape\": [-1]},\n", - " \"source_eos_ids\": {\"type\": \"int64\", \"shape\": [-1]},\n", - " \"source_eos_mask\": {\"type\": \"int64\", \"shape\": [-1]},\n", - " \"target_sos_ids\": {\"type\": \"int64\", \"shape\": [-1]},\n", - " \"target_sos_mask\": {\"type\": \"int64\", \"shape\": [-1]},\n", - " \"target_eos_ids\": {\"type\": \"int64\", \"shape\": [-1]},\n", - " \"target_eos_mask\": {\"type\": \"int64\", \"shape\": [-1]}}\n", - "writer.add_schema(nlp_schema, \"it is a preprocessed nlp dataset\")\n", - "\n", - "data = []\n", - "for i in range(100):\n", - " i += 1\n", - "\n", - " sample = {\"source_sos_ids\": np.array([i, i + 1, i + 2, i + 3, i + 4], dtype=np.int64),\n", - " \"source_sos_mask\": np.array([i * 1, i * 2, i * 3, i * 4, i * 5, i * 6, i * 7], dtype=np.int64),\n", - " \"source_eos_ids\": np.array([i + 5, i + 6, i + 7, i + 8, i + 9, i + 10], dtype=np.int64),\n", - " \"source_eos_mask\": np.array([19, 20, 21, 22, 23, 24, 25, 26, 27], dtype=np.int64),\n", - " \"target_sos_ids\": np.array([28, 29, 30, 31, 32], dtype=np.int64),\n", - " \"target_sos_mask\": np.array([33, 34, 35, 36, 37, 38], dtype=np.int64),\n", - " \"target_eos_ids\": np.array([39, 40, 41, 42, 43, 44, 45, 46, 47], dtype=np.int64),\n", - " \"target_eos_mask\": np.array([48, 49, 50, 51], dtype=np.int64)}\n", - "\n", - " data.append(sample)\n", - " if i % 10 == 0:\n", - " writer.write_raw_data(data)\n", - " data = []\n", - "\n", - "if data:\n", - " writer.write_raw_data(data)\n", - "\n", - "writer.commit()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**参数说明:**\n", - "\n", - "- `MINDRECORD_FILE`:输出的MindRecord文件路径。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 通过`MindDataset`读取MindRecord。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-22T10:34:27.133717Z", - "start_time": "2021-02-22T10:34:27.083785Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Got 100 samples\n" - ] - } - ], - "source": [ - "data_set = ds.MindDataset(dataset_file=MINDRECORD_FILE)\n", - "count = 0\n", - "for item in data_set.create_dict_iterator():\n", - " count += 1\n", - "print(\"Got {} samples\".format(count))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 常用数据集转换MindRecord\n", - "\n", - "MindSpore提供转换常用数据集的工具类,能够将常用的数据集转换为MindRecord。部分常用数据集及其对应的工具类列表如下。\n", - "\n", - "| 数据集 | 格式转换工具类 |\n", - "| :------- | :----------- |\n", - "| CIFAR-10 | Cifar10ToMR |\n", - "| ImageNet | ImageNetToMR |\n", - "| TFRecord | TFRecordToMR |\n", - "| CSV File | CsvToMR |\n", - "\n", - "更多数据集转换的详细说明可参见[API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.mindrecord.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 转换CIFAR-10数据集\n", - "\n", - "用户可以通过`Cifar10ToMR`类,将CIFAR-10原始数据转换为MindRecord,并使用`MindDataset`读取。\n", - "\n", - "1. 下载[CIFAR-10数据集](https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz)并解压到指定目录,执行如下命令:" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/cifar-10-batches-py\n", - "├── batches.meta\n", - "├── data_batch_1\n", - "├── data_batch_2\n", - "├── data_batch_3\n", - "├── data_batch_4\n", - "├── data_batch_5\n", - "├── readme.html\n", - "└── test_batch\n", - "\n", - "0 directories, 8 files\n" - ] - } - ], - "source": [ - "!wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-python.tar.gz\n", - "!mkdir -p datasets\n", - "!tar -xzf cifar-10-python.tar.gz -C datasets\n", - "!tree ./datasets/cifar-10-batches-py" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 导入相关模块" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-18T02:27:04.856761Z", - "start_time": "2021-02-18T02:26:46.536793Z" - } - }, - "outputs": [], - "source": [ - "import os\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as vision\n", - "from mindspore.mindrecord import Cifar10ToMR" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 创建`Cifar10ToMR`对象,调用`transform`接口,将CIFAR-10数据集转换为MindRecord。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MSRStatus.SUCCESS" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ds_target_path = \"./datasets/mindspore_dataset_conversion/\"\n", - "# clean old run files \n", - "os.system(\"rm -f {}*\".format(ds_target_path))\n", - "os.system(\"mkdir -p {}\".format(ds_target_path))\n", - "\n", - "CIFAR10_DIR = \"./datasets/cifar-10-batches-py\"\n", - "MINDRECORD_FILE = \"./datasets/mindspore_dataset_conversion/cifar10.mindrecord\"\n", - "cifar10_transformer = Cifar10ToMR(CIFAR10_DIR, MINDRECORD_FILE)\n", - "cifar10_transformer.transform(['label'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**参数说明:**\n", - "\n", - "- `CIFAR10_DIR`:CIFAR-10数据集路径。\n", - "\n", - "- `MINDRECORD_FILE`:输出的MindRecord文件路径。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "4. 通过`MindDataset`读取MindRecord。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Got 50000 samples\n" - ] - } - ], - "source": [ - "data_set = ds.MindDataset(dataset_file=MINDRECORD_FILE)\n", - "decode_op = vision.Decode()\n", - "data_set = data_set.map(operations=decode_op, input_columns=[\"data\"], num_parallel_workers=2)\n", - "count = 0\n", - "for item in data_set.create_dict_iterator(output_numpy=True):\n", - " count += 1\n", - "print(\"Got {} samples\".format(count))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 转换ImageNet数据集\n", - "\n", - "用户可以通过`ImageNetToMR`类,将ImageNet原始数据(图片、标注)转换为MindRecord,并使用`MindDataset`读取。\n", - "\n", - "1. 下载[ImageNet数据集](http://image-net.org/download),将所有图片存放在`images/`文件夹,用一个映射文件`labels_map.txt`记录图片和标签的对应关系。映射文件包含2列,分别为各类别图片目录和标签ID,用空格隔开,映射文件示例如下:\n", - "\n", - "```text\n", - "n01440760 0\n", - "n01443537 1\n", - "n01484850 2\n", - "n01491361 3\n", - "n01494475 4\n", - "n01496331 5\n", - "\n", - "```\n", - "\n", - "文件目录结构如下所示:\n", - "\n", - "```text\n", - "├─ labels_map.txt\n", - "└─ images\n", - " └─ ......\n", - "```\n", - "\n", - "2. 导入相关模块。\n", - "\n", - "```python\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as vision\n", - "from mindspore.mindrecord import ImageNetToMR\n", - "```\n", - "\n", - "3. 创建ImageNetToMR对象,调用transform接口,将数据集转换为MindRecord。\n", - "\n", - "```python\n", - "IMAGENET_MAP_FILE = \"./labels_map.txt\"\n", - "IMAGENET_IMAGE_DIR = \"./images\"\n", - "MINDRECORD_FILE = \"./imagenet.mindrecord\"\n", - "imagenet_transformer = ImageNetToMR(IMAGENET_MAP_FILE, IMAGENET_IMAGE_DIR, MINDRECORD_FILE, partition_number=1)\n", - "imagenet_transformer.transform()\n", - "```\n", - "\n", - "**参数说明:**\n", - "\n", - "- IMAGENET_MAP_FILE:ImageNet数据集标签映射文件的路径。\n", - "\n", - "- IMAGENET_IMAGE_DIR:包含ImageNet所有图片的文件夹路径。\n", - "\n", - "- MINDRECORD_FILE:输出的MindRecord文件路径。\n", - "\n", - "\n", - "4. 通过MindDataset读取MindRecord。\n", - "\n", - "```python\n", - "data_set = ds.MindDataset(dataset_file=MINDRECORD_FILE)\n", - "decode_op = vision.Decode()\n", - "data_set = data_set.map(operations=decode_op, input_columns=[\"image\"], num_parallel_workers=2)\n", - "count = 0\n", - "for item in data_set.create_dict_iterator(output_numpy=True):\n", - " count += 1\n", - "print(\"Got {} samples\".format(count))\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 转换CSV数据集\n", - "\n", - "本示例首先创建一个包含5条记录的CSV文件,然后通过`CsvToMR`工具类将CSV文件转换为MindRecord,并最终通过`MindDataset`将其读取出来。\n", - "\n", - "1. 导入相关模块。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "import csv\n", - "import os\n", - "import mindspore.dataset as ds\n", - "from mindspore.mindrecord import CsvToMR" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 生成CSV文件,并转换成MindRecord。" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [], - "source": [ - "CSV_FILE = \"test.csv\"\n", - "MINDRECORD_FILE = \"test.mindrecord\"\n", - "\n", - "def generate_csv():\n", - " headers = [\"id\", \"name\", \"math\", \"english\"]\n", - " rows = [(1, \"Lily\", 78.5, 90),\n", - " (2, \"Lucy\", 99, 85.2),\n", - " (3, \"Mike\", 65, 71),\n", - " (4, \"Tom\", 95, 99),\n", - " (5, \"Jeff\", 85, 78.5)]\n", - " with open(CSV_FILE, 'w', encoding='utf-8') as f:\n", - " writer = csv.writer(f)\n", - " writer.writerow(headers)\n", - " writer.writerows(rows)\n", - "\n", - "generate_csv()\n", - "\n", - "if os.path.exists(MINDRECORD_FILE):\n", - " os.remove(MINDRECORD_FILE)\n", - " os.remove(MINDRECORD_FILE + \".db\")\n", - "\n", - "csv_transformer = CsvToMR(CSV_FILE, MINDRECORD_FILE, partition_number=1)\n", - "\n", - "csv_transformer.transform()\n", - "\n", - "assert os.path.exists(MINDRECORD_FILE)\n", - "assert os.path.exists(MINDRECORD_FILE + \".db\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**参数说明:**\n", - "\n", - "- `CSV_FILE`:CSV文件的路径。\n", - "\n", - "- `MINDRECORD_FILE`:输出的MindRecord文件路径。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 通过`MindDataset`读取MindRecord。" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Got 5 samples\n" - ] - } - ], - "source": [ - "data_set = ds.MindDataset(dataset_file=MINDRECORD_FILE)\n", - "count = 0\n", - "for item in data_set.create_dict_iterator(output_numpy=True):\n", - " count += 1\n", - "print(\"Got {} samples\".format(count))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 转换TFRecord数据集\n", - "\n", - "> 目前支持TensorFlow 1.13.0-rc1及以上版本。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "此部分示例需提前安装TensorFlow,如果未安装,执行下面的命令进行安装。如本文档以Notebook运行时,完成安装后,需要重启kernel后,执行后续代码。" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [], - "source": [ - "os.system('pip install tensorflow') if os.system('python -c \"import tensorflow\"') else print(\"TensorFlow installed\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本示例首先通过TensorFlow创建一个TFRecord文件,然后通过`TFRecordToMR`工具类将TFRecord文件转换为MindRecord,最后通过`MindDataset`将其读取出来,并使用`Decode`算子对`image_bytes`字段进行解码。\n", - "\n", - "1. 导入相关模块。" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "import collections\n", - "from io import BytesIO\n", - "import os\n", - "import mindspore.dataset as ds\n", - "from mindspore.mindrecord import TFRecordToMR\n", - "import mindspore.dataset.vision.c_transforms as vision\n", - "from PIL import Image\n", - "import tensorflow as tf" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 生成TFRecord文件。" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Write 10 rows in tfrecord.\n" - ] - } - ], - "source": [ - "TFRECORD_FILE = \"test.tfrecord\"\n", - "MINDRECORD_FILE = \"test.mindrecord\"\n", - "\n", - "def generate_tfrecord():\n", - " def create_int_feature(values):\n", - " if isinstance(values, list):\n", - " feature = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values)))\n", - " else:\n", - " feature = tf.train.Feature(int64_list=tf.train.Int64List(value=[values]))\n", - " return feature\n", - "\n", - " def create_float_feature(values):\n", - " if isinstance(values, list):\n", - " feature = tf.train.Feature(float_list=tf.train.FloatList(value=list(values)))\n", - " else:\n", - " feature = tf.train.Feature(float_list=tf.train.FloatList(value=[values]))\n", - " return feature\n", - "\n", - " def create_bytes_feature(values):\n", - " if isinstance(values, bytes):\n", - " white_io = BytesIO()\n", - " Image.new('RGB', (10, 10), (255, 255, 255)).save(white_io, 'JPEG')\n", - " image_bytes = white_io.getvalue()\n", - " feature = tf.train.Feature(bytes_list=tf.train.BytesList(value=[image_bytes]))\n", - " else:\n", - " feature = tf.train.Feature(bytes_list=tf.train.BytesList(value=[bytes(values, encoding='utf-8')]))\n", - " return feature\n", - "\n", - " writer = tf.io.TFRecordWriter(TFRECORD_FILE)\n", - "\n", - " example_count = 0\n", - " for i in range(10):\n", - " file_name = \"000\" + str(i) + \".jpg\"\n", - " image_bytes = bytes(str(\"aaaabbbbcccc\" + str(i)), encoding=\"utf-8\")\n", - " int64_scalar = i\n", - " float_scalar = float(i)\n", - " int64_list = [i, i+1, i+2, i+3, i+4, i+1234567890]\n", - " float_list = [float(i), float(i+1), float(i+2.8), float(i+3.2),\n", - " float(i+4.4), float(i+123456.9), float(i+98765432.1)]\n", - "\n", - " features = collections.OrderedDict()\n", - " features[\"file_name\"] = create_bytes_feature(file_name)\n", - " features[\"image_bytes\"] = create_bytes_feature(image_bytes)\n", - " features[\"int64_scalar\"] = create_int_feature(int64_scalar)\n", - " features[\"float_scalar\"] = create_float_feature(float_scalar)\n", - " features[\"int64_list\"] = create_int_feature(int64_list)\n", - " features[\"float_list\"] = create_float_feature(float_list)\n", - "\n", - " tf_example = tf.train.Example(features=tf.train.Features(feature=features))\n", - " writer.write(tf_example.SerializeToString())\n", - " example_count += 1\n", - " writer.close()\n", - " print(\"Write {} rows in tfrecord.\".format(example_count))\n", - "\n", - "generate_tfrecord()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**参数说明:**\n", - "\n", - "- `TFRECORD_FILE`:TFRecord文件的路径。\n", - "\n", - "- `MINDRECORD_FILE`:输出的MindRecord文件路径。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 将TFRecord转换成MindRecord。" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [], - "source": [ - "feature_dict = {\"file_name\": tf.io.FixedLenFeature([], tf.string),\n", - " \"image_bytes\": tf.io.FixedLenFeature([], tf.string),\n", - " \"int64_scalar\": tf.io.FixedLenFeature([], tf.int64),\n", - " \"float_scalar\": tf.io.FixedLenFeature([], tf.float32),\n", - " \"int64_list\": tf.io.FixedLenFeature([6], tf.int64),\n", - " \"float_list\": tf.io.FixedLenFeature([7], tf.float32),\n", - " }\n", - "\n", - "if os.path.exists(MINDRECORD_FILE):\n", - " os.remove(MINDRECORD_FILE)\n", - " os.remove(MINDRECORD_FILE + \".db\")\n", - "\n", - "tfrecord_transformer = TFRecordToMR(TFRECORD_FILE, MINDRECORD_FILE, feature_dict, [\"image_bytes\"])\n", - "tfrecord_transformer.transform()\n", - "\n", - "assert os.path.exists(MINDRECORD_FILE)\n", - "assert os.path.exists(MINDRECORD_FILE + \".db\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "4. 通过`MindDataset`读取MindRecord。" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Got 10 samples\n" - ] - } - ], - "source": [ - "data_set = ds.MindDataset(dataset_file=MINDRECORD_FILE)\n", - "decode_op = vision.Decode()\n", - "data_set = data_set.map(operations=decode_op, input_columns=[\"image_bytes\"], num_parallel_workers=2)\n", - "count = 0\n", - "for item in data_set.create_dict_iterator(output_numpy=True):\n", - " count += 1\n", - "print(\"Got {} samples\".format(count))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/dataset_loading.ipynb b/docs/programming_guide/source_zh_cn/dataset_loading.ipynb deleted file mode 100644 index b494cceb826c14c0d9cb3a9e0f7bfea45c9b7f7a..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/dataset_loading.ipynb +++ /dev/null @@ -1,986 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 数据集加载\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/dataset_loading.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_dataset_loading.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9kYXRhc2V0X2xvYWRpbmcuaXB5bmI=&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "MindSpore支持加载图像领域常用的数据集,用户可以直接使用`mindspore.dataset`中对应的类实现数据集的加载。目前支持的常用数据集及对应的数据集类如下表所示。\n", - "\n", - "| 图像数据集 | 数据集类 | 数据集简介 |\n", - "| :--------- | :-------------- | :----------------------------------------------------------------------------------------------------------------------------------- |\n", - "| MNIST | MnistDataset | MNIST是一个大型手写数字图像数据集,拥有60,000张训练图像和10,000张测试图像,常用于训练各种图像处理系统。 |\n", - "| CIFAR-10 | Cifar10Dataset | CIFAR-10是一个微小图像数据集,包含10种类别下的60,000张32x32大小彩色图像,平均每种类别6,000张,其中5,000张为训练集,1,000张为测试集。 |\n", - "| CIFAR-100 | Cifar100Dataset | CIFAR-100与CIFAR-10类似,但拥有100种类别,平均每种类别600张,其中500张为训练集,100张为测试集。 |\n", - "| CelebA | CelebADataset | CelebA是一个大型人脸图像数据集,包含超过200,000张名人人脸图像,每张图像拥有40个特征标记。 |\n", - "| PASCAL-VOC | VOCDataset | PASCAL-VOC是一个常用图像数据集,被广泛用于目标检测、图像分割等计算机视觉领域。 |\n", - "| COCO | CocoDataset | COCO是一个大型目标检测、图像分割、姿态估计数据集。 |\n", - "| CLUE | CLUEDataset | CLUE是一个大型中文语义理解数据集。 |\n", - "\n", - "MindSpore还支持加载多种数据存储格式下的数据集,用户可以直接使用`mindspore.dataset`中对应的类加载磁盘中的数据文件。目前支持的数据格式及对应加载方式如下表所示。\n", - "\n", - "| 数据格式 | 数据集类 | 数据格式简介 |\n", - "| :--------- | :----------------- | :------------------------------------------------------------------------------------------------ |\n", - "| MindRecord | MindDataset | MindRecord是MindSpore的自研数据格式,具有读写高效、易于分布式处理等优势。 |\n", - "| Manifest | ManifestDataset | Manifest是华为ModelArts支持的一种数据格式,描述了原始文件和标注信息,可用于标注、训练、推理场景。 |\n", - "| TFRecord | TFRecordDataset | TFRecord是TensorFlow定义的一种二进制数据文件格式。 |\n", - "| NumPy | NumpySlicesDataset | NumPy数据源指的是已经读入内存中的NumPy arrays格式数据集。 |\n", - "| Text File | TextFileDataset | Text File指的是常见的文本格式数据。 |\n", - "| CSV File | CSVDataset | CSV指逗号分隔值,其文件以纯文本形式存储表格数据。 |\n", - "\n", - "MindSpore也同样支持使用`GeneratorDataset`自定义数据集的加载方式,用户可以根据需要实现自己的数据集类。\n", - "\n", - "| 数据集类 | 数据格式简介 |\n", - "| :----------------- | :------------------------------------ |\n", - "| GeneratorDataset | 用户自定义的数据集读取、处理的方式。 |\n", - "| NumpySlicesDataset | 用户自定义的由NumPy构建数据集的方式。 |\n", - "\n", - "> 更多详细的数据集加载接口说明,参见[API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 常用数据集加载\n", - "\n", - "下面将介绍几种常用数据集的加载方式。\n", - "\n", - "### CIFAR-10/100数据集\n", - "\n", - "下载[CIFAR-10数据集](https://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz)并解压到指定位置:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/cifar-10-batches-bin\n", - "├── readme.html\n", - "├── test\n", - "│   └── test_batch.bin\n", - "└── train\n", - " ├── batches.meta.txt\n", - " ├── data_batch_1.bin\n", - " ├── data_batch_2.bin\n", - " ├── data_batch_3.bin\n", - " ├── data_batch_4.bin\n", - " └── data_batch_5.bin\n", - "\n", - "2 directories, 8 files\n" - ] - } - ], - "source": [ - "!wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-binary.tar.gz\n", - "!mkdir -p datasets\n", - "!tar -xzf cifar-10-binary.tar.gz -C datasets\n", - "!mkdir -p datasets/cifar-10-batches-bin/train datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/test_batch.bin datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/data_batch*.bin datasets/cifar-10-batches-bin/batches.meta.txt datasets/cifar-10-batches-bin/train\n", - "!tree ./datasets/cifar-10-batches-bin" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下面的样例通过`Cifar10Dataset`接口加载CIFAR-10数据集,使用顺序采样器获取其中5个样本,然后展示了对应图片的形状和标签。\n", - "\n", - "CIFAR-100数据集和MNIST数据集的加载方式也与之类似。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Image shape: (32, 32, 3) , Label: 6\n", - "Image shape: (32, 32, 3) , Label: 9\n", - "Image shape: (32, 32, 3) , Label: 9\n", - "Image shape: (32, 32, 3) , Label: 4\n", - "Image shape: (32, 32, 3) , Label: 1\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train/\"\n", - "\n", - "sampler = ds.SequentialSampler(num_samples=5)\n", - "dataset = ds.Cifar10Dataset(DATA_DIR, sampler=sampler)\n", - "\n", - "for data in dataset.create_dict_iterator():\n", - " print(\"Image shape:\", data['image'].shape, \", Label:\", data['label'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### VOC数据集\n", - "\n", - "VOC数据集有多个版本,此处以VOC2012为例。下载[VOC2012数据集](http://host.robots.ox.ac.uk/pascal/VOC/voc2012/VOCtrainval_11-May-2012.tar)并解压,目录结构如下。\n", - "\n", - "```\n", - "└─ VOCtrainval_11-May-2012\n", - "   └── VOCdevkit\n", - "    └── VOC2012\n", - " ├── Annotations\n", - " ├── ImageSets\n", - " ├── JPEGImages\n", - " ├── SegmentationClass\n", - " └── SegmentationObject\n", - "```\n", - "\n", - "下面的样例通过`VOCDataset`接口加载VOC2012数据集,分别演示了将任务指定为分割(Segmentation)和检测(Detection)时的原始图像形状和目标形状。\n", - "\n", - "```python\n", - "import mindspore.dataset as ds\n", - "\n", - "DATA_DIR = \"VOCtrainval_11-May-2012/VOCdevkit/VOC2012/\"\n", - "\n", - "dataset = ds.VOCDataset(DATA_DIR, task=\"Segmentation\", usage=\"train\", num_samples=2, decode=True, shuffle=False)\n", - "\n", - "print(\"[Segmentation]:\")\n", - "for data in dataset.create_dict_iterator():\n", - " print(\"image shape:\", data[\"image\"].shape)\n", - " print(\"target shape:\", data[\"target\"].shape)\n", - "\n", - "dataset = ds.VOCDataset(DATA_DIR, task=\"Detection\", usage=\"train\", num_samples=1, decode=True, shuffle=False)\n", - "\n", - "print(\"[Detection]:\")\n", - "for data in dataset.create_dict_iterator():\n", - " print(\"image shape:\", data[\"image\"].shape)\n", - " print(\"bbox shape:\", data[\"bbox\"].shape)\n", - "```\n", - "\n", - "输出结果:\n", - "\n", - "```text\n", - "[Segmentation]:\n", - "image shape: (281, 500, 3)\n", - "target shape: (281, 500, 3)\n", - "image shape: (375, 500, 3)\n", - "target shape: (375, 500, 3)\n", - "[Detection]:\n", - "image shape: (442, 500, 3)\n", - "bbox shape: (2, 4)\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### COCO数据集\n", - "\n", - "COCO数据集有多个版本,此处以COCO2017的验证数据集为例。下载COCO2017的[验证集](http://images.cocodataset.org/zips/val2017.zip)、[检测任务标注](http://images.cocodataset.org/annotations/annotations_trainval2017.zip)和[全景分割任务标注](http://images.cocodataset.org/annotations/panoptic_annotations_trainval2017.zip)并解压,只取其中的验证集部分,按以下目录结构存放。\n", - "\n", - "```\n", - "└─ COCO\n", - " ├── val2017\n", - "   └── annotations\n", - " ├── instances_val2017.json\n", - " ├── panoptic_val2017.json\n", - "    └── person_keypoints_val2017.json\n", - "```\n", - "\n", - "下面的样例通过`CocoDataset`接口加载COCO2017数据集,分别演示了将任务指定为目标检测(Detection)、背景分割(Stuff)、关键点检测(Keypoint)和全景分割(Panoptic)时获取到的不同数据。\n", - "\n", - "```python\n", - "import mindspore.dataset as ds\n", - "\n", - "DATA_DIR = \"COCO/val2017/\"\n", - "ANNOTATION_FILE = \"COCO/annotations/instances_val2017.json\"\n", - "KEYPOINT_FILE = \"COCO/annotations/person_keypoints_val2017.json\"\n", - "PANOPTIC_FILE = \"COCO/annotations/panoptic_val2017.json\"\n", - "\n", - "dataset = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task=\"Detection\", num_samples=1)\n", - "for data in dataset.create_dict_iterator():\n", - " print(\"Detection:\", data.keys())\n", - "\n", - "dataset = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task=\"Stuff\", num_samples=1)\n", - "for data in dataset.create_dict_iterator():\n", - " print(\"Stuff:\", data.keys())\n", - "\n", - "dataset = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task=\"Keypoint\", num_samples=1)\n", - "for data in dataset.create_dict_iterator():\n", - " print(\"Keypoint:\", data.keys())\n", - "\n", - "dataset = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task=\"Panoptic\", num_samples=1)\n", - "for data in dataset.create_dict_iterator():\n", - " print(\"Panoptic:\", data.keys())\n", - "```\n", - "\n", - "输出结果:\n", - "\n", - "```text\n", - "Detection: dict_keys(['image', 'bbox', 'category_id', 'iscrowd'])\n", - "Stuff: dict_keys(['image', 'segmentation', 'iscrowd'])\n", - "Keypoint: dict_keys(['image', 'keypoints', 'num_keypoints'])\n", - "Panoptic: dict_keys(['image', 'bbox', 'category_id', 'iscrowd', 'area'])\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 特定格式数据集加载\n", - "\n", - "下面将介绍几种特定格式数据集文件的加载方式。\n", - "\n", - "### MindRecord数据格式\n", - "\n", - "MindRecord是MindSpore定义的一种数据格式,使用MindRecord能够获得更好的性能提升。\n", - "\n", - "> 阅读[数据格式转换](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_conversion.html)章节,了解如何将数据集转化为MindSpore数据格式。\n", - "\n", - "执行本例之前需下载对应的测试数据`test_mindrecord.zip`并解压到指定位置,执行如下命令:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/mindspore_dataset_loading/\n", - "├── test.mindrecord\n", - "└── test.mindrecord.db\n", - "\n", - "0 directories, 2 files\n" - ] - } - ], - "source": [ - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/test_mindrecord.zip\n", - "!unzip -o ./test_mindrecord.zip -d ./datasets/mindspore_dataset_loading/\n", - "!tree ./datasets/mindspore_dataset_loading/" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下面的样例通过`MindDataset`接口加载MindRecord文件,并展示已加载数据的标签。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "dict_keys(['chinese', 'english'])\n", - "dict_keys(['chinese', 'english'])\n", - "dict_keys(['chinese', 'english'])\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "DATA_FILE = [\"./datasets/mindspore_dataset_loading/test.mindrecord\"]\n", - "mindrecord_dataset = ds.MindDataset(DATA_FILE)\n", - "\n", - "for data in mindrecord_dataset.create_dict_iterator(output_numpy=True):\n", - " print(data.keys())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Manifest数据格式\n", - "\n", - "Manifest是华为ModelArts支持的数据格式文件,详细说明请参见[Manifest文档](https://support.huaweicloud.com/engineers-modelarts/modelarts_23_0009.html)。\n", - "\n", - "本次示例需下载测试数据`test_manifest.zip`并将其解压到指定位置,执行如下命令:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/mindspore_dataset_loading/test_manifest/\n", - "├── eval\n", - "│   ├── 1.JPEG\n", - "│   └── 2.JPEG\n", - "├── test_manifest.json\n", - "└── train\n", - " ├── 1.JPEG\n", - " └── 2.JPEG\n", - "\n", - "2 directories, 5 files\n" - ] - } - ], - "source": [ - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/test_manifest.zip\n", - "!unzip -o ./test_manifest.zip -d ./datasets/mindspore_dataset_loading/test_manifest/\n", - "!tree ./datasets/mindspore_dataset_loading/test_manifest/" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下面的样例通过`ManifestDataset`接口加载Manifest文件`test_manifest.json`,并展示已加载数据的标签。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0\n", - "1\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "DATA_FILE = \"./datasets/mindspore_dataset_loading/test_manifest/test_manifest.json\"\n", - "manifest_dataset = ds.ManifestDataset(DATA_FILE)\n", - "\n", - "for data in manifest_dataset.create_dict_iterator():\n", - " print(data[\"label\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### TFRecord数据格式\n", - "\n", - "TFRecord是TensorFlow定义的一种二进制数据文件格式。\n", - "\n", - "下面的样例通过`TFRecordDataset`接口加载TFRecord文件,并介绍了两种不同的数据集格式设定方案。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下载`tfrecord`测试数据`test_tftext.zip`并解压到指定位置,执行如下命令:" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/mindspore_dataset_loading/test_tfrecord/\n", - "└── test_tftext.tfrecord\n", - "\n", - "0 directories, 1 file\n" - ] - } - ], - "source": [ - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/test_tftext.zip\n", - "!unzip -o ./test_tftext.zip -d ./datasets/mindspore_dataset_loading/test_tfrecord/\n", - "!tree ./datasets/mindspore_dataset_loading/test_tfrecord/" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 传入数据集路径或TFRecord文件列表,本例使用`test_tftext.tfrecord`,创建`TFRecordDataset`对象。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "dict_keys(['chinese', 'line', 'words'])\n", - "dict_keys(['chinese', 'line', 'words'])\n", - "dict_keys(['chinese', 'line', 'words'])\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "DATA_FILE = \"./datasets/mindspore_dataset_loading/test_tfrecord/test_tftext.tfrecord\"\n", - "tfrecord_dataset = ds.TFRecordDataset(DATA_FILE)\n", - "\n", - "for tf_data in tfrecord_dataset.create_dict_iterator():\n", - " print(tf_data.keys())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 用户可以通过编写Schema文件或创建Schema对象,设定数据集格式及特征。\n", - "\n", - " - 编写Schema文件\n", - "\n", - " 将数据集格式和特征按JSON格式写入Schema文件。\n", - " \n", - " - `columns`:列信息字段,需要根据数据集的实际列名定义。上面的示例中,数据集有三组数据,其列均为`chinese`、`line`和`words`。\n", - "\n", - " 然后在创建`TFRecordDataset`时将Schema文件路径传入。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "dict_values([Tensor(shape=[57], dtype=UInt8, value= [230, 177, 159, 229, 183, 158, 229, 184, 130, 233, 149, 191, 230, 177, 159, 229, 164, 167, 230, 161, 165, 229, 143, 130, \n", - " 229, 138, 160, 228, 186, 134, 233, 149, 191, 230, 177, 159, 229, 164, 167, 230, 161, 165, 231, 154, 132, 233, 128, 154, \n", - " 232, 189, 166, 228, 187, 170, 229, 188, 143]), Tensor(shape=[22], dtype=Int8, value= [ 71, 111, 111, 100, 32, 108, 117, 99, 107, 32, 116, 111, 32, 101, 118, 101, 114, 121, 111, 110, 101, 46]), Tensor(shape=[32], dtype=UInt8, value= [229, 165, 179, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 101, 118, 101, 114, 121, 111, 110, 101, \n", - " 99, 32, 32, 32, 32, 32, 32, 32])])\n", - "dict_values([Tensor(shape=[12], dtype=UInt8, value= [231, 148, 183, 233, 187, 152, 229, 165, 179, 230, 179, 170]), Tensor(shape=[19], dtype=Int8, value= [ 66, 101, 32, 104, 97, 112, 112, 121, 32, 101, 118, 101, 114, 121, 32, 100, 97, 121, 46]), Tensor(shape=[20], dtype=UInt8, value= [ 66, 101, 32, 32, 32, 104, 97, 112, 112, 121, 100, 97, 121, 32, 32, 98, 32, 32, 32, 32])])\n", - "dict_values([Tensor(shape=[48], dtype=UInt8, value= [228, 187, 138, 229, 164, 169, 229, 164, 169, 230, 176, 148, 229, 164, 170, 229, 165, 189, 228, 186, 134, 230, 136, 145, \n", - " 228, 187, 172, 228, 184, 128, 232, 181, 183, 229, 142, 187, 229, 164, 150, 233, 157, 162, 231, 142, 169, 229, 144, 167\n", - " ]), Tensor(shape=[20], dtype=Int8, value= [ 84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 116, 101, 120, 116, 32, 102, 105, 108, 101, 46]), Tensor(shape=[16], dtype=UInt8, value= [ 84, 104, 105, 115, 116, 101, 120, 116, 102, 105, 108, 101, 97, 32, 32, 32])])\n" - ] - } - ], - "source": [ - "import os\n", - "import json\n", - "\n", - "data_json = {\n", - " \"columns\": {\n", - " \"chinese\": {\n", - " \"type\": \"uint8\",\n", - " \"rank\": 1\n", - " },\n", - " \"line\" : {\n", - " \"type\": \"int8\",\n", - " \"rank\": 1\n", - " },\n", - " \"words\" : {\n", - " \"type\": \"uint8\",\n", - " \"rank\": 0\n", - " }\n", - " }\n", - " }\n", - "\n", - "if not os.path.exists(\"dataset_schema_path\"):\n", - " os.mkdir(\"dataset_schema_path\")\n", - "SCHEMA_DIR = \"dataset_schema_path/schema.json\"\n", - "with open(SCHEMA_DIR, \"w\") as f:\n", - " json.dump(data_json,f,indent=4)\n", - " \n", - "tfrecord_dataset = ds.TFRecordDataset(DATA_FILE, schema=SCHEMA_DIR)\n", - "\n", - "for tf_data in tfrecord_dataset.create_dict_iterator():\n", - " print(tf_data.values())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 创建Schema对象\n", - "\n", - " 创建Schema对象,为其添加自定义字段,然后在创建数据集对象时传入。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'chinese': Tensor(shape=[12], dtype=UInt8, value= [231, 148, 183, 233, 187, 152, 229, 165, 179, 230, 179, 170]), 'line': Tensor(shape=[19], dtype=UInt8, value= [ 66, 101, 32, 104, 97, 112, 112, 121, 32, 101, 118, 101, 114, 121, 32, 100, 97, 121, 46])}\n", - "{'chinese': Tensor(shape=[48], dtype=UInt8, value= [228, 187, 138, 229, 164, 169, 229, 164, 169, 230, 176, 148, 229, 164, 170, 229, 165, 189, 228, 186, 134, 230, 136, 145, \n", - " 228, 187, 172, 228, 184, 128, 232, 181, 183, 229, 142, 187, 229, 164, 150, 233, 157, 162, 231, 142, 169, 229, 144, 167\n", - " ]), 'line': Tensor(shape=[20], dtype=UInt8, value= [ 84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 116, 101, 120, 116, 32, 102, 105, 108, 101, 46])}\n", - "{'chinese': Tensor(shape=[57], dtype=UInt8, value= [230, 177, 159, 229, 183, 158, 229, 184, 130, 233, 149, 191, 230, 177, 159, 229, 164, 167, 230, 161, 165, 229, 143, 130, \n", - " 229, 138, 160, 228, 186, 134, 233, 149, 191, 230, 177, 159, 229, 164, 167, 230, 161, 165, 231, 154, 132, 233, 128, 154, \n", - " 232, 189, 166, 228, 187, 170, 229, 188, 143]), 'line': Tensor(shape=[22], dtype=UInt8, value= [ 71, 111, 111, 100, 32, 108, 117, 99, 107, 32, 116, 111, 32, 101, 118, 101, 114, 121, 111, 110, 101, 46])}\n" - ] - } - ], - "source": [ - "from mindspore import dtype as mstype\n", - "schema = ds.Schema()\n", - "schema.add_column('chinese', de_type=mstype.uint8)\n", - "schema.add_column('line', de_type=mstype.uint8)\n", - "tfrecord_dataset = ds.TFRecordDataset(DATA_FILE, schema=schema)\n", - "\n", - "for tf_data in tfrecord_dataset.create_dict_iterator():\n", - " print(tf_data)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "对比上述中的编写和创建步骤,可以看出:\n", - "\n", - "|步骤|chinese|line|words\n", - "|:---|:---|:---|:---\n", - "| 编写|UInt8 |Int8|UInt8\n", - "| 创建|UInt8 |UInt8|\n", - "\n", - "\n", - "示例编写步骤中的`columns`中数据由`chinese`(UInt8)、`line`(Int8)和`words`(UInt8)变为了示例创建步骤中的`chinese`(UInt8)、`line`(UInt8),通过Schema对象,设定数据集的数据类型和特征,使得列中的数据类型和特征相应改变了。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### NumPy数据格式\n", - "\n", - "如果所有数据已经读入内存,可以直接使用`NumpySlicesDataset`类将其加载。\n", - "\n", - "下面的样例分别介绍了通过`NumpySlicesDataset`加载arrays数据、 list数据和dict数据的方式。\n", - "\n", - "- 加载NumPy arrays数据" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0.89286015 0.33197981] [0.33540785]\n", - "[0.82122912 0.04169663] [0.62251943]\n", - "[0.10765668 0.59505206] [0.43814143]\n", - "[0.52981736 0.41880743] [0.73588211]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "\n", - "np.random.seed(6)\n", - "features, labels = np.random.sample((4, 2)), np.random.sample((4, 1))\n", - "\n", - "data = (features, labels)\n", - "dataset = ds.NumpySlicesDataset(data, column_names=[\"col1\", \"col2\"], shuffle=False)\n", - "\n", - "for np_arr_data in dataset:\n", - " print(np_arr_data[0], np_arr_data[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 加载Python list数据" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[1 2]\n", - "[3 4]\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "data1 = [[1, 2], [3, 4]]\n", - "\n", - "dataset = ds.NumpySlicesDataset(data1, column_names=[\"col1\"], shuffle=False)\n", - "\n", - "for np_list_data in dataset:\n", - " print(np_list_data[0])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 加载Python dict数据" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'col1': Tensor(shape=[], dtype=Int64, value= 1), 'col2': Tensor(shape=[], dtype=Int64, value= 3)}\n", - "{'col1': Tensor(shape=[], dtype=Int64, value= 2), 'col2': Tensor(shape=[], dtype=Int64, value= 4)}\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "data1 = {\"a\": [1, 2], \"b\": [3, 4]}\n", - "\n", - "dataset = ds.NumpySlicesDataset(data1, column_names=[\"col1\", \"col2\"], shuffle=False)\n", - "\n", - "for np_dic_data in dataset.create_dict_iterator():\n", - " print(np_dic_data)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### CSV数据格式\n", - "\n", - "下面的样例通过`CSVDataset`加载CSV格式数据集文件,并展示了已加载数据的`keys`。\n", - "\n", - "下载测试数据`test_csv.zip`并解压到指定位置,执行如下命令:" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/mindspore_dataset_loading/test_csv/\n", - "├── test1.csv\n", - "└── test2.csv\n", - "\n", - "0 directories, 2 files\n" - ] - } - ], - "source": [ - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/test_csv.zip\n", - "!unzip -o ./test_csv.zip -d ./datasets/mindspore_dataset_loading/test_csv/\n", - "!tree ./datasets/mindspore_dataset_loading/test_csv/" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "传入数据集路径或CSV文件列表,Text格式数据集文件的加载方式与CSV文件类似。" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "dict_keys(['a', 'b', 'c', 'd'])\n", - "dict_keys(['a', 'b', 'c', 'd'])\n", - "dict_keys(['a', 'b', 'c', 'd'])\n", - "dict_keys(['a', 'b', 'c', 'd'])\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "DATA_FILE = [\"./datasets/mindspore_dataset_loading/test_csv/test1.csv\",\"./datasets/mindspore_dataset_loading/test_csv/test2.csv\"]\n", - "csv_dataset = ds.CSVDataset(DATA_FILE)\n", - "\n", - "for csv_data in csv_dataset.create_dict_iterator(output_numpy=True):\n", - " print(csv_data.keys())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 自定义数据集加载\n", - "\n", - "对于目前MindSpore不支持直接加载的数据集,可以通过构造`GeneratorDataset`对象实现自定义方式的加载,或者将其转换成MindRecord数据格式。下面分别展示几种不同的自定义数据集加载方法,为了便于对比,生成的随机数据保持相同。\n", - "\n", - "### 构造数据集生成函数\n", - "\n", - "构造生成函数定义数据返回方式,再使用此函数构建自定义数据集对象。此方法适用于简单场景。" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0.36510558 0.45120592] [0.78888122]\n", - "[0.49606035 0.07562207] [0.38068183]\n", - "[0.57176158 0.28963401] [0.16271622]\n", - "[0.30880446 0.37487617] [0.54738768]\n", - "[0.81585667 0.96883469] [0.77994068]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "\n", - "np.random.seed(58)\n", - "data = np.random.sample((5, 2))\n", - "label = np.random.sample((5, 1))\n", - "\n", - "def GeneratorFunc():\n", - " for i in range(5):\n", - " yield (data[i], label[i])\n", - "\n", - "dataset = ds.GeneratorDataset(GeneratorFunc, [\"data\", \"label\"])\n", - "\n", - "for item in dataset.create_dict_iterator():\n", - " print(item[\"data\"], item[\"label\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 构造可迭代的数据集类\n", - "\n", - "构造数据集类实现`__iter__`和`__next__`方法,再使用此类的对象构建自定义数据集对象。相比于直接定义生成函数,使用数据集类能够实现更多的自定义功能。" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0.36510558 0.45120592] [0.78888122]\n", - "[0.49606035 0.07562207] [0.38068183]\n", - "[0.57176158 0.28963401] [0.16271622]\n", - "[0.30880446 0.37487617] [0.54738768]\n", - "[0.81585667 0.96883469] [0.77994068]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "\n", - "class IterDatasetGenerator:\n", - " def __init__(self):\n", - " np.random.seed(58)\n", - " self.__index = 0\n", - " self.__data = np.random.sample((5, 2))\n", - " self.__label = np.random.sample((5, 1))\n", - "\n", - " def __next__(self):\n", - " if self.__index >= len(self.__data):\n", - " raise StopIteration\n", - " else:\n", - " item = (self.__data[self.__index], self.__label[self.__index])\n", - " self.__index += 1\n", - " return item\n", - "\n", - " def __iter__(self):\n", - " self.__index = 0\n", - " return self\n", - "\n", - " def __len__(self):\n", - " return len(self.__data)\n", - "\n", - "dataset_generator = IterDatasetGenerator()\n", - "dataset = ds.GeneratorDataset(dataset_generator, [\"data\", \"label\"], shuffle=False)\n", - "\n", - "for data in dataset.create_dict_iterator():\n", - " print(data[\"data\"], data[\"label\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 构造可随机访问的数据集类\n", - "\n", - "构造数据集类实现`__getitem__`方法,再使用此类的对象构建自定义数据集对象。此方法可以用于实现分布式训练。" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0.36510558 0.45120592] [0.78888122]\n", - "[0.49606035 0.07562207] [0.38068183]\n", - "[0.57176158 0.28963401] [0.16271622]\n", - "[0.30880446 0.37487617] [0.54738768]\n", - "[0.81585667 0.96883469] [0.77994068]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "\n", - "class GetDatasetGenerator:\n", - " def __init__(self):\n", - " np.random.seed(58)\n", - " self.__data = np.random.sample((5, 2))\n", - " self.__label = np.random.sample((5, 1))\n", - "\n", - " def __getitem__(self, index):\n", - " return (self.__data[index], self.__label[index])\n", - "\n", - " def __len__(self):\n", - " return len(self.__data)\n", - "\n", - "dataset_generator = GetDatasetGenerator()\n", - "dataset = ds.GeneratorDataset(dataset_generator, [\"data\", \"label\"], shuffle=False)\n", - "\n", - "for data in dataset.create_dict_iterator():\n", - " print(data[\"data\"], data[\"label\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "如果用户希望实现分布式训练,则需要在此方式的基础上,在采样器类中实现`__iter__`方法,每次返回采样数据的索引。需要补充的代码如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0.36510558 0.45120592] [0.78888122]\n", - "[0.57176158 0.28963401] [0.16271622]\n", - "[0.81585667 0.96883469] [0.77994068]\n" - ] - } - ], - "source": [ - "import math\n", - "\n", - "class MySampler():\n", - " def __init__(self, dataset, local_rank, world_size):\n", - " self.__num_data = len(dataset)\n", - " self.__local_rank = local_rank\n", - " self.__world_size = world_size\n", - " self.samples_per_rank = int(math.ceil(self.__num_data / float(self.__world_size)))\n", - " self.total_num_samples = self.samples_per_rank * self.__world_size\n", - "\n", - " def __iter__(self):\n", - " indices = list(range(self.__num_data))\n", - " indices.extend(indices[:self.total_num_samples-len(indices)])\n", - " indices = indices[self.__local_rank:self.total_num_samples:self.__world_size]\n", - " return iter(indices)\n", - "\n", - " def __len__(self):\n", - " return self.samples_per_rank\n", - "\n", - "dataset_generator = GetDatasetGenerator()\n", - "sampler = MySampler(dataset_generator, local_rank=0, world_size=2)\n", - "dataset = ds.GeneratorDataset(dataset_generator, [\"data\", \"label\"], shuffle=False, sampler=sampler)\n", - "\n", - "for data in dataset.create_dict_iterator():\n", - " print(data[\"data\"], data[\"label\"])" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/dtype.ipynb b/docs/programming_guide/source_zh_cn/dtype.ipynb deleted file mode 100644 index f6c1158f82a6564c530e4f9bc149ac2f201cd26a..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/dtype.ipynb +++ /dev/null @@ -1,113 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# dtype\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/dtype.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_dtype.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9kdHlwZS5pcHluYg==&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "MindSpore张量支持不同的数据类型,包含`int8`、`int16`、`int32`、`int64`、`uint8`、`uint16`、`uint32`、`uint64`、`float16`、`float32`、`float64`、`bool_`,与NumPy的数据类型一一对应。\n", - "\n", - "在MindSpore的运算处理流程中,Python中的`int`数会被转换为定义的`int64`类型,`float`数会被转换为定义的`float32`类型。\n", - "\n", - "详细的类型支持情况请参考:。\n", - "\n", - "以下代码,打印MindSpore的数据类型int32。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Int32\n" - ] - } - ], - "source": [ - "from mindspore import dtype as mstype\n", - "\n", - "data_type = mstype.int32\n", - "print(data_type)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据类型转换接口\n", - "\n", - "MindSpore提供了以下几个接口,实现与NumPy数据类型和Python内置的数据类型间的转换。\n", - "\n", - "- `dtype_to_nptype`:将MindSpore的数据类型转换为NumPy对应的数据类型。\n", - "\n", - "- `dtype_to_pytype`:将MindSpore的数据类型转换为Python对应的内置数据类型。\n", - "\n", - "- `pytype_to_dtype`:将Python内置的数据类型转换为MindSpore对应的数据类型。\n", - "\n", - "以下代码实现了不同数据类型间的转换,并打印转换后的类型。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Int64\n", - "\n" - ] - } - ], - "source": [ - "from mindspore import dtype as mstype\n", - "\n", - "np_type = mstype.dtype_to_nptype(mstype.int32)\n", - "ms_type = mstype.pytype_to_dtype(int)\n", - "py_type = mstype.dtype_to_pytype(mstype.float64)\n", - "\n", - "print(np_type)\n", - "print(ms_type)\n", - "print(py_type)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/extension.rst b/docs/programming_guide/source_zh_cn/extension.rst deleted file mode 100644 index ffba7b0682c05c45a45ee9f9784935b35e874b33..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/extension.rst +++ /dev/null @@ -1,7 +0,0 @@ -功能扩展 -=========== - -.. toctree:: - :maxdepth: 1 - - probability \ No newline at end of file diff --git a/docs/programming_guide/source_zh_cn/images/api_structure.png b/docs/programming_guide/source_zh_cn/images/api_structure.png deleted file mode 100644 index 5af4744e922b9ddadbe60832af3f62c104412233..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/api_structure.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/api_structure.pptx b/docs/programming_guide/source_zh_cn/images/api_structure.pptx deleted file mode 100644 index 86ac94a403b260734daa6628078f81fc38da8978..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/api_structure.pptx and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/batch.png b/docs/programming_guide/source_zh_cn/images/batch.png deleted file mode 100644 index ee974652d361b4085033a08789a036d331c2bec8..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/batch.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/cache_dataset.png b/docs/programming_guide/source_zh_cn/images/cache_dataset.png deleted file mode 100644 index 665ed25a9a721c74c7c12bdfa5650f6ef792bf81..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/cache_dataset.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/cache_pipeline_zh.eddx b/docs/programming_guide/source_zh_cn/images/cache_pipeline_zh.eddx deleted file mode 100644 index 6e58671ea07a000641eccc20584e2ef9b1f9e242..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/cache_pipeline_zh.eddx and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/cache_processed_data.png b/docs/programming_guide/source_zh_cn/images/cache_processed_data.png deleted file mode 100644 index 11327cba87a190137070b4823546407614ff3c92..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/cache_processed_data.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/concat.png b/docs/programming_guide/source_zh_cn/images/concat.png deleted file mode 100644 index 7a28ff7826cc2a1c6334e2ff15eeaaffd6b67c06..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/concat.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/map.png b/docs/programming_guide/source_zh_cn/images/map.png deleted file mode 100644 index b92a44ffb75c47509b6e720bbd2c6ed09d634492..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/map.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/repeat.png b/docs/programming_guide/source_zh_cn/images/repeat.png deleted file mode 100644 index 9717ec81c52f23615e236d27e0f7c96bd6ac1155..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/repeat.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/shuffle.png b/docs/programming_guide/source_zh_cn/images/shuffle.png deleted file mode 100644 index 4464cefad03beefac6bb413da22eebeffaf8fe41..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/shuffle.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/transform_not_recommended.png b/docs/programming_guide/source_zh_cn/images/transform_not_recommended.png deleted file mode 100644 index e316dd4ebab3953a91ebb9e705c9576867e0cc6d..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/transform_not_recommended.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/transform_recommended_1.png b/docs/programming_guide/source_zh_cn/images/transform_recommended_1.png deleted file mode 100644 index e716ad9935b44f737c6638e85cfca5959e0fadad..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/transform_recommended_1.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/transform_recommended_2.png b/docs/programming_guide/source_zh_cn/images/transform_recommended_2.png deleted file mode 100644 index e2bdadceb0373d11886637e6e3315dcb37c0427a..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/transform_recommended_2.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/transform_recommended_3.png b/docs/programming_guide/source_zh_cn/images/transform_recommended_3.png deleted file mode 100644 index f858f5153ded11b14a234c4b76afc76d68476cb7..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/transform_recommended_3.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/images/zip.png b/docs/programming_guide/source_zh_cn/images/zip.png deleted file mode 100644 index f0052435898ae6a3546dfea9c50711ab3f303699..0000000000000000000000000000000000000000 Binary files a/docs/programming_guide/source_zh_cn/images/zip.png and /dev/null differ diff --git a/docs/programming_guide/source_zh_cn/index.rst b/docs/programming_guide/source_zh_cn/index.rst deleted file mode 100644 index d0f59747cab3a4f131c49c51f792f09356b5105d..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/index.rst +++ /dev/null @@ -1,79 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 11:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -MindSpore编程指南 -=================== - -.. toctree:: - :maxdepth: 1 - - api_structure - -.. toctree:: - :maxdepth: 1 - :caption: 数据类型 - - dtype - tensor - -.. toctree:: - :maxdepth: 1 - :caption: 计算组件 - - operators - parameter - cell - network_component - initializer - numpy - -.. toctree:: - :maxdepth: 1 - :caption: 数据管道 - - dataset_loading - sampler - pipeline - augmentation - tokenizer - dataset_conversion - auto_augmentation - cache - -.. toctree:: - :maxdepth: 1 - :caption: 执行管理 - - context - run - callback - -.. toctree:: - :maxdepth: 1 - :caption: 分布式并行 - - auto_parallel - -.. toctree:: - :maxdepth: 1 - :caption: 进阶用法 - - train - infer - advanced_usage_of_checkpoint - performance_optimization - customized - security_and_privacy - extension - -.. toctree:: - :maxdepth: 1 - :caption: 规格说明 - - 基准性能 - network_list - operator_list - syntax_list - 环境变量 diff --git a/docs/programming_guide/source_zh_cn/infer.md b/docs/programming_guide/source_zh_cn/infer.md deleted file mode 100644 index d60e673261820b204ba01b3d3bfa61e80ed803d3..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/infer.md +++ /dev/null @@ -1,25 +0,0 @@ -# 推理 - - - -- [推理](#推理) - - - - - -基于MindSpore训练后的模型,支持在Ascend 910 AI处理器、Ascend 310 AI处理器、GPU、CPU、端侧等多种不同的平台上执行推理。使用方法可参考如下教程: - -- [在Ascend 910 AI处理器上执行推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_ascend_910.html) -- [在Ascend 310 AI处理器上执行推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_ascend_310.html) -- [在GPU上执行推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_gpu.html) -- [在CPU上执行推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_cpu.html) -- [在端侧执行推理](https://www.mindspore.cn/tutorial/lite/zh-CN/master/quick_start/quick_start.html) - -同时,MindSpore提供了一个轻量级、高性能的服务模块,称为MindSpore Serving,可帮助MindSpore开发者在生产环境中高效部署在线推理服务。使用方法可参考如下教程: - -- [部署推理服务](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_example.html) -- [基于MindSpore Serving部署分布式推理服务](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_distributed_example.html) -- [基于gRPC接口访问MindSpore Serving服务](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_grpc.html) -- [基于RESTful接口访问MindSpore Serving服务](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_restful.html) -- [通过配置模型提供Servable](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_model.html) diff --git a/docs/programming_guide/source_zh_cn/initializer.ipynb b/docs/programming_guide/source_zh_cn/initializer.ipynb deleted file mode 100644 index 3e7a31f10194cfa123f669e4c0ec36831e2b6862..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/initializer.ipynb +++ /dev/null @@ -1,484 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 网络参数的初始化\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/initializer.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_initializer.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9pbml0aWFsaXplci5pcHluYg==&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "MindSpore提供了权重初始化模块,用户可以通过封装算子和initializer方法来调用字符串、Initializer子类或自定义Tensor等方式完成对网络参数进行初始化。Initializer类是MindSpore中用于进行初始化的基本数据结构,其子类包含了几种不同类型的数据分布(Zero,One,XavierUniform,HeUniform,HeNormal,Constant,Uniform,Normal,TruncatedNormal)。下面针对封装算子和initializer方法两种参数初始化模式进行详细介绍。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用封装算子对参数初始化 \n", - "MindSpore提供了多种参数初始化的方式,并在部分算子中封装了参数初始化的功能。本节将介绍带有参数初始化功能的算子对参数进行初始化的方法,以`Conv2d`算子为例,分别介绍以字符串,`Initializer`子类和自定义`Tensor`等方式对网络中的参数进行初始化,以下代码示例中均以`Initializer`的子类`Normal`为例,代码示例中`Normal`均可替换成`Initializer`子类中任何一个。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 字符串 \n", - "使用字符串对网络参数进行初始化,字符串的内容需要与`Initializer`子类的名称保持一致,使用字符串方式进行初始化将使用`Initializer`子类中的默认参数,例如使用字符串`Normal`等同于使用`Initializer`的子类`Normal()`,代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[[[ 3.10382620e-02 4.38603461e-02 4.38603461e-02 ... 4.38603461e-02\n", - " 4.38603461e-02 1.38719045e-02]\n", - " [ 3.26051228e-02 3.54298912e-02 3.54298912e-02 ... 3.54298912e-02\n", - " 3.54298912e-02 -5.54019120e-03]\n", - " [ 3.26051228e-02 3.54298912e-02 3.54298912e-02 ... 3.54298912e-02\n", - " 3.54298912e-02 -5.54019120e-03]\n", - " ...\n", - " [ 3.26051228e-02 3.54298912e-02 3.54298912e-02 ... 3.54298912e-02\n", - " 3.54298912e-02 -5.54019120e-03]\n", - " [ 3.26051228e-02 3.54298912e-02 3.54298912e-02 ... 3.54298912e-02\n", - " 3.54298912e-02 -5.54019120e-03]\n", - " [ 9.66199022e-03 1.24104535e-02 1.24104535e-02 ... 1.24104535e-02\n", - " 1.24104535e-02 -1.38977719e-02]]\n", - "\n", - " ...\n", - "\n", - " [[ 3.98553275e-02 -1.35465711e-03 -1.35465711e-03 ... -1.35465711e-03\n", - " -1.35465711e-03 -1.00310734e-02]\n", - " [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02\n", - " -3.60766202e-02 -2.95619294e-02]\n", - " [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02\n", - " -3.60766202e-02 -2.95619294e-02]\n", - " ...\n", - " [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02\n", - " -3.60766202e-02 -2.95619294e-02]\n", - " [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02\n", - " -3.60766202e-02 -2.95619294e-02]\n", - " [ 1.33139016e-02 6.74417242e-05 6.74417242e-05 ... 6.74417242e-05\n", - " 6.74417242e-05 -2.27325838e-02]]]]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.nn as nn\n", - "from mindspore import Tensor\n", - "from mindspore.common import set_seed\n", - "\n", - "set_seed(1)\n", - "\n", - "input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32))\n", - "net = nn.Conv2d(3, 64, 3, weight_init='Normal')\n", - "output = net(input_data)\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Initializer子类 \n", - "使用`Initializer`子类对网络参数进行初始化,与使用字符串对参数进行初始化的效果类似,不同的是使用字符串进行参数初始化是使用`Initializer`子类的默认参数,如要使用`Initializer`子类中的参数,就必须使用`Initializer`子类的方式对参数进行初始化,以`Normal(0.2)`为例,代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[[[ 6.2076533e-01 8.7720710e-01 8.7720710e-01 ... 8.7720710e-01\n", - " 8.7720710e-01 2.7743810e-01]\n", - " [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01\n", - " 7.0859784e-01 -1.1080378e-01]\n", - " [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01\n", - " 7.0859784e-01 -1.1080378e-01]\n", - " ...\n", - " [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01\n", - " 7.0859784e-01 -1.1080378e-01]\n", - " [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01\n", - " 7.0859784e-01 -1.1080378e-01]\n", - " [ 1.9323981e-01 2.4820906e-01 2.4820906e-01 ... 2.4820906e-01\n", - " 2.4820906e-01 -2.7795550e-01]]\n", - "\n", - " ...\n", - "\n", - " [[ 7.9710668e-01 -2.7093157e-02 -2.7093157e-02 ... -2.7093157e-02\n", - " -2.7093157e-02 -2.0062150e-01]\n", - " [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01\n", - " -7.2153252e-01 -5.9123868e-01]\n", - " [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01\n", - " -7.2153252e-01 -5.9123868e-01]\n", - " ...\n", - " [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01\n", - " -7.2153252e-01 -5.9123868e-01]\n", - " [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01\n", - " -7.2153252e-01 -5.9123868e-01]\n", - " [ 2.6627803e-01 1.3488382e-03 1.3488382e-03 ... 1.3488382e-03\n", - " 1.3488382e-03 -4.5465171e-01]]]]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.nn as nn\n", - "from mindspore import Tensor\n", - "from mindspore.common import set_seed\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "set_seed(1)\n", - "\n", - "input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32))\n", - "net = nn.Conv2d(3, 64, 3, weight_init=Normal(0.2))\n", - "output = net(input_data)\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 自定义的Tensor \n", - "除上述两种初始化方法外,当网络要使用MindSpore中没有的数据类型对参数进行初始化,用户可以通过自定义`Tensor`的方式来对参数进行初始化,代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[[[12. 18. 18. ... 18. 18. 12.]\n", - " [18. 27. 27. ... 27. 27. 18.]\n", - " [18. 27. 27. ... 27. 27. 18.]\n", - " ...\n", - " [18. 27. 27. ... 27. 27. 18.]\n", - " [18. 27. 27. ... 27. 27. 18.]\n", - " [12. 18. 18. ... 18. 18. 12.]]\n", - "\n", - " ...\n", - "\n", - " [[12. 18. 18. ... 18. 18. 12.]\n", - " [18. 27. 27. ... 27. 27. 18.]\n", - " [18. 27. 27. ... 27. 27. 18.]\n", - " ...\n", - " [18. 27. 27. ... 27. 27. 18.]\n", - " [18. 27. 27. ... 27. 27. 18.]\n", - " [12. 18. 18. ... 18. 18. 12.]]]]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.nn as nn\n", - "from mindspore import Tensor\n", - "from mindspore import dtype as mstype\n", - "\n", - "weight = Tensor(np.ones([64, 3, 3, 3]), dtype=mstype.float32)\n", - "input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32))\n", - "net = nn.Conv2d(3, 64, 3, weight_init=weight)\n", - "output = net(input_data)\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用initializer方法对参数初始化\n", - "\n", - "在上述代码样例中,给出了如何在网络中进行参数初始化的方法,如在网络中使用nn层封装`Conv2d`算子,参数`weight_init`作为要初始化的数据类型传入`Conv2d`算子,算子会在初始化时通过调用`Parameter`类,进而调用封装在`Parameter`类中的`initializer`方法来完成对参数的初始化。然而有一些算子并没有像`Conv2d`那样在内部对参数初始化的功能进行封装,如`Conv3d`算子的权重就是作为参数传入`Conv3d`算子,此时就需要手动的定义权重的初始化。\n", - "\n", - "当对参数进行初始化时,可以使用`initializer`方法调用`Initializer`子类中不同的数据类型来对参数进行初始化,进而产生不同类型的数据。\n", - "\n", - "使用initializer进行参数初始化时,支持传入的参数有`init`、`shape`、`dtype`:\n", - "\n", - "- `init`:支持传入`Tensor`、 `str`、 `Initializer的子类`。\n", - "\n", - "- `shape`:支持传入`list`、 `tuple`、 `int`。\n", - "\n", - "- `dtype`:支持传入`mindspore.dtype`。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### init参数为Tensor" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "代码样例如下:" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T02:59:50.340750Z", - "start_time": "2021-02-03T02:59:49.571048Z" - } - }, - "source": [ - "```python\n", - "import numpy as np\n", - "from mindspore import Tensor\n", - "from mindspore import dtype as mstype\n", - "from mindspore.common import set_seed\n", - "from mindspore.common.initializer import initializer\n", - "from mindspore.ops.operations import nn_ops as nps\n", - "\n", - "set_seed(1)\n", - "\n", - "input_data = Tensor(np.ones([16, 3, 10, 32, 32]), dtype=mstype.float32)\n", - "weight_init = Tensor(np.ones([32, 3, 4, 3, 3]), dtype=mstype.float32)\n", - "weight = initializer(weight_init, shape=[32, 3, 4, 3, 3])\n", - "conv3d = nps.Conv3D(out_channel=32, kernel_size=(4, 3, 3))\n", - "output = conv3d(input_data, weight)\n", - "print(output)\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "输出如下:\n", - "\n", - "```python\n", - "[[[[[108 108 108 ... 108 108 108]\n", - " [108 108 108 ... 108 108 108]\n", - " [108 108 108 ... 108 108 108]\n", - " ...\n", - " [108 108 108 ... 108 108 108]\n", - " [108 108 108 ... 108 108 108]\n", - " [108 108 108 ... 108 108 108]]\n", - " ...\n", - " [[108 108 108 ... 108 108 108]\n", - " [108 108 108 ... 108 108 108]\n", - " [108 108 108 ... 108 108 108]\n", - " ...\n", - " [108 108 108 ... 108 108 108]\n", - " [108 108 108 ... 108 108 108]\n", - " [108 108 108 ... 108 108 108]]]]]\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### init参数为str" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "代码样例如下:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```python\n", - "import numpy as np\n", - "from mindspore import Tensor\n", - "from mindspore import dtype as mstype\n", - "from mindspore.common import set_seed\n", - "from mindspore.common.initializer import initializer\n", - "from mindspore.ops.operations import nn_ops as nps\n", - "\n", - "set_seed(1)\n", - "\n", - "input_data = Tensor(np.ones([16, 3, 10, 32, 32]), dtype=mstype.float32)\n", - "weight = initializer('Normal', shape=[32, 3, 4, 3, 3], dtype=mstype.float32)\n", - "conv3d = nps.Conv3D(out_channel=32, kernel_size=(4, 3, 3))\n", - "output = conv3d(input_data, weight)\n", - "print(output)\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "输出如下:\n", - "\n", - "```python\n", - "[[[[[0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]]\n", - " ...\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]]\n", - " ...\n", - " [[0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]]\n", - " ...\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]]]]]\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### init参数为Initializer子类" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "代码样例如下:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```python\n", - "import numpy as np\n", - "from mindspore import Tensor\n", - "from mindspore import dtype as mstype\n", - "from mindspore.common import set_seed\n", - "from mindspore.ops.operations import nn_ops as nps\n", - "from mindspore.common.initializer import Normal, initializer\n", - "\n", - "set_seed(1)\n", - "\n", - "input_data = Tensor(np.ones([16, 3, 10, 32, 32]), dtype=mstype.float32)\n", - "weight = initializer(Normal(0.2), shape=[32, 3, 4, 3, 3], dtype=mstype.float32)\n", - "conv3d = nps.Conv3D(out_channel=32, kernel_size=(4, 3, 3))\n", - "output = conv3d(input_data, weight)\n", - "print(output)\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```python\n", - "[[[[[0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]]\n", - " ...\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]]\n", - " ...\n", - " [[0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]]\n", - " ...\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]]]]]\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 在Parameter中的应用" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[-0.3305102 1.0412874 2.0412874 3.0412874]\n", - " [ 4.0412874 4.9479127 5.9479127 6.9479127]\n", - " [ 7.947912 9.063009 10.063009 11.063009 ]\n", - " [12.063009 13.536987 14.536987 14.857441 ]\n", - " [15.751231 17.073082 17.808317 19.364822 ]]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "from mindspore import dtype as mstype\n", - "from mindspore.common import set_seed\n", - "from mindspore.ops import operations as ops\n", - "from mindspore import Tensor, Parameter, context\n", - "from mindspore.common.initializer import Normal, initializer\n", - "\n", - "set_seed(1)\n", - "\n", - "weight1 = Parameter(initializer('Normal', [5, 4], mstype.float32), name=\"w1\")\n", - "weight2 = Parameter(initializer(Normal(0.2), [5, 4], mstype.float32), name=\"w2\")\n", - "input_data = Tensor(np.arange(20).reshape(5, 4), dtype=mstype.float32)\n", - "net = ops.Add()\n", - "output = net(input_data, weight1)\n", - "output = net(output, weight2)\n", - "print(output)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/network_component.ipynb b/docs/programming_guide/source_zh_cn/network_component.ipynb deleted file mode 100644 index 75cc8f822ee3f056d16ff430ff241ad4fdfad3b2..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/network_component.ipynb +++ /dev/null @@ -1,288 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 常用网络组件\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/network_component.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_network_component.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9uZXR3b3JrX2NvbXBvbmVudC5pcHluYg==&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "MindSpore封装了一些常用的网络组件,用于网络的训练、推理、求梯度和数据处理等操作。\n", - "\n", - "这些网络组件可以直接被用户使用,同样也会在`model.train`和`model.eval`等更高级的封装接口内部进行使用。\n", - "\n", - "本节内容将会介绍三个网络组件,分别是`GradOperation`、`WithLossCell`和`TrainOneStepCell`,将会从功能、用户使用和内部使用三个方面来进行介绍。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## GradOperation\n", - "\n", - "GradOperation组件用于生成输入函数的梯度,利用`get_all`、`get_by_list`和`sens_param`参数控制梯度的计算方式,细节内容详见[API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.GradOperation.html)。\n", - "\n", - "GradOperation的使用实例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-09T02:41:47.549559Z", - "start_time": "2021-02-09T02:41:46.596650Z" - } - }, - "outputs": [ - { - "data": { - "text/plain": [ - "Tensor(shape=[2, 3], dtype=Float32, value=\n", - "[[1.41000009e+000, 1.60000002e+000, 6.59999943e+000],\n", - " [1.41000009e+000, 1.60000002e+000, 6.59999943e+000]])" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.nn as nn\n", - "from mindspore import Tensor, Parameter\n", - "from mindspore import dtype as mstype\n", - "import mindspore.ops as ops\n", - "\n", - "\n", - "class Net(nn.Cell):\n", - " def __init__(self):\n", - " super(Net, self).__init__()\n", - " self.matmul = ops.MatMul()\n", - " self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z')\n", - " def construct(self, x, y):\n", - " x = x * self.z\n", - " out = self.matmul(x, y)\n", - " return out\n", - "\n", - "class GradNetWrtX(nn.Cell):\n", - " def __init__(self, net):\n", - " super(GradNetWrtX, self).__init__()\n", - " self.net = net\n", - " self.grad_op = ops.GradOperation()\n", - " def construct(self, x, y):\n", - " gradient_function = self.grad_op(self.net)\n", - " return gradient_function(x, y)\n", - "\n", - "x = Tensor([[0.5, 0.6, 0.4], [1.2, 1.3, 1.1]], dtype=mstype.float32)\n", - "y = Tensor([[0.01, 0.3, 1.1], [0.1, 0.2, 1.3], [2.1, 1.2, 3.3]], dtype=mstype.float32)\n", - "GradNetWrtX(Net())(x, y)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "上面的例子是计算`Net`相对与`x`的梯度值,首先需要定义网络`Net`作为`GradOperation`的输入,实例创建了包含梯度运算的`GradNetWrtX`。调用`GradNetWrtX`是将网络传入`GradOperation`生成梯度函数,将输入数据传入梯度函数中返回最终结果。\n", - "\n", - "MindSpore涉及梯度计算的其他组件,例如`WithGradCell`和`TrainOneStepCell`等,都用到了`GradOperation`, 感兴趣的读者可以查看这些接口的内部实现。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## WithLossCell\n", - "\n", - "`WithLossCell`本质上是一个包含损失函数的`Cell`,构造`WithLossCell`需要事先定义好网络和损失函数。\n", - "\n", - "下面通过一个实例来介绍其具体的使用, 首先需要构造一个网络,内容如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-09T02:41:47.564738Z", - "start_time": "2021-02-09T02:41:47.551810Z" - } - }, - "outputs": [], - "source": [ - "import numpy as np\n", - "\n", - "import mindspore.context as context\n", - "import mindspore.nn as nn\n", - "from mindspore import Tensor\n", - "from mindspore.nn import TrainOneStepCell, WithLossCell\n", - "from mindspore.nn.optim import Momentum\n", - "import mindspore.ops as ops\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "\n", - "\n", - "class LeNet(nn.Cell):\n", - " def __init__(self):\n", - " super(LeNet, self).__init__()\n", - " self.relu = ops.ReLU()\n", - " self.batch_size = 32\n", - "\n", - " self.conv1 = nn.Conv2d(1, 6, kernel_size=5, stride=1, padding=0, has_bias=False, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, kernel_size=5, stride=1, padding=0, has_bias=False, pad_mode='valid')\n", - " self.pool = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.reshape = ops.Reshape()\n", - " self.fc1 = nn.Dense(400, 120)\n", - " self.fc2 = nn.Dense(120, 84)\n", - " self.fc3 = nn.Dense(84, 10)\n", - "\n", - " def construct(self, input_x):\n", - " output = self.conv1(input_x)\n", - " output = self.relu(output)\n", - " output = self.pool(output)\n", - " output = self.conv2(output)\n", - " output = self.relu(output)\n", - " output = self.pool(output)\n", - " output = self.reshape(output, (self.batch_size, -1))\n", - " output = self.fc1(output)\n", - " output = self.relu(output)\n", - " output = self.fc2(output)\n", - " output = self.relu(output)\n", - " output = self.fc3(output)\n", - " return output" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下面是`WithLossCell`的使用实例,分别定义好网络和损失函数,然后创建一个`WithLossCell`,传入输入数据和标签数据,`WithLossCell`内部根据网络和损失函数返回计算结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-09T02:41:47.802622Z", - "start_time": "2021-02-09T02:41:47.567396Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+++++++++Loss+++++++++++++\n", - "2.302585\n" - ] - } - ], - "source": [ - "data = Tensor(np.ones([32, 1, 32, 32]).astype(np.float32) * 0.01)\n", - "label = Tensor(np.ones([32]).astype(np.int32))\n", - "net = LeNet()\n", - "criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')\n", - "net_with_criterion = WithLossCell(net, criterion)\n", - "loss = net_with_criterion(data, label)\n", - "print(\"+++++++++Loss+++++++++++++\")\n", - "print(loss)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## TrainOneStepCell\n", - "\n", - "`TrainOneStepCell`功能是执行网络的单步训练,返回每次训练结果后的loss结果。\n", - "\n", - "下面构造一个使用`TrainOneStepCell`接口进行网络训练的实例,其中`LeNet`和包名的导入代码和上个用例共用。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-09T02:41:48.319959Z", - "start_time": "2021-02-09T02:41:47.804721Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+++++++++result:0++++++++++++\n", - "2.302585\n", - "+++++++++result:1++++++++++++\n", - "2.2935712\n", - "+++++++++result:2++++++++++++\n", - "2.2764661\n", - "+++++++++result:3++++++++++++\n", - "2.2521412\n", - "+++++++++result:4++++++++++++\n", - "2.2214084\n" - ] - } - ], - "source": [ - "data = Tensor(np.ones([32, 1, 32, 32]).astype(np.float32) * 0.01)\n", - "label = Tensor(np.ones([32]).astype(np.int32))\n", - "net = LeNet()\n", - "learning_rate = 0.01\n", - "momentum = 0.9\n", - "\n", - "optimizer = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), learning_rate, momentum)\n", - "criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')\n", - "net_with_criterion = WithLossCell(net, criterion)\n", - "train_network = TrainOneStepCell(net_with_criterion, optimizer) # optimizer\n", - "for i in range(5):\n", - " train_network.set_train()\n", - " res = train_network(data, label)\n", - " print(f\"+++++++++result:{i}++++++++++++\")\n", - " print(res)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "用例中构造了优化器和一个`WithLossCell`的实例,然后传入`TrainOneStepCell`中初始化一个训练网络,用例循环五次,相当于网络训练了五次,并输出每次的loss结果,由结果可以看出每次训练后loss值在逐渐减小。\n", - "\n", - "后续内容会介绍MindSpore使用更加高级封装的接口,即`Model`类中的`train`方法训练模型,在其内部实现中会用到 `TrainOneStepCell`和`WithLossCell` 等许多网络组件,感兴趣的读者可以查看其内部实现。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/network_list.rst b/docs/programming_guide/source_zh_cn/network_list.rst deleted file mode 100644 index 0086283c5f999b6131593dd0be63ce852df01927..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/network_list.rst +++ /dev/null @@ -1,7 +0,0 @@ -网络支持 -=========== - -.. toctree:: - :maxdepth: 1 - - MindSpore网络支持 \ No newline at end of file diff --git a/docs/programming_guide/source_zh_cn/numpy.md b/docs/programming_guide/source_zh_cn/numpy.md deleted file mode 100644 index d441413f6c3a72766bbf12a7ff3af4d1d09fcb6b..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/numpy.md +++ /dev/null @@ -1,464 +0,0 @@ -# MindSpore NumPy函数 - - - -- [MindSpore NumPy函数使用](#mindspore-numpy函数使用) - - [概述](#概述) - - [算子介绍](#算子介绍) - - [张量生成](#张量生成) - - [生成具有相同元素的数组](#生成具有相同元素的数组) - - [生成具有某个范围内的数值的数组](#生成具有某个范围内的数值的数组) - - [生成特殊类型的数组](#生成特殊类型的数组) - - [张量操作](#张量操作) - - [数组维度变换](#数组维度变换) - - [数组分割](#数组分割) - - [数组拼接](#数组拼接) - - [逻辑运算](#逻辑运算) - - [数学运算](#数学运算) - - [加法](#加法) - - [矩阵乘法](#矩阵乘法) - - [求平均值](#求平均值) - - [指数](#指数) - - [MindSpore Numpy与MindSpore特性结合](#mindspore-numpy与mindspore特性结合) - - [ms_function使用示例](#ms_function使用示例) - - [GradOperation使用示例](#gradoperation使用示例) - - [mindspore.context使用示例](#mindsporecontext使用示例) - - [mindspore.numpy使用示例](#mindsporenumpy使用示例) - - - - - -## 概述 - -MindSpore NumPy工具包提供了一系列类NumPy接口。用户可以使用类NumPy语法在MindSpore上进行模型的搭建。 - -## 算子介绍 - -MindSpore Numpy具有四大功能模块:张量生成、张量操作、逻辑运算和其他常用数学运算。算子的具体相关信息可以参考[NumPy接口列表](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.numpy.html)。 - -### 张量生成 - -生成类算子用来生成和构建具有指定数值、类型和形状的数组(Tensor)。 - -构建数组代码示例: - -```python -import mindspore.numpy as np -import mindspore.ops as ops -input_x = np.array([1, 2, 3], np.float32) -print("input_x =", input_x) -print("type of input_x =", ops.typeof(input_x)) -``` - -输出如下: - -```python -input_x = [1. 2. 3.] -type of input_x = Tensor[Float32] -``` - -除了使用上述方法来创建外,也可以通过以下几种方式创建。 - -#### 生成具有相同元素的数组 - -生成具有相同元素的数组代码示例: - -```python -import mindspore.numpy as np -input_x = np.full((2, 3), 6, np.float32) -print(input_x) -``` - -输出如下: - -```python -[[6. 6. 6.] - [6. 6. 6.]] -``` - -生成指定形状的全1数组,示例: - -```python -import mindspore.numpy as np -input_x = np.ones((2, 3), np.float32) -print(input_x) -``` - -输出如下: - -```python -[[1. 1. 1.] - [1. 1. 1.]] -``` - -#### 生成具有某个范围内的数值的数组 - -生成指定范围内的等差数组代码示例: - -```python -import mindspore.numpy as np -input_x = np.arange(0, 5, 1) -print(input_x) -``` - -输出如下: - -```python -[0 1 2 3 4] -``` - -#### 生成特殊类型的数组 - -生成给定对角线处下方元素为1,上方元素为0的矩阵,示例: - -```python -import mindspore.numpy as np -input_x = np.tri(3, 3, 1) -print(input_x) -``` - -输出如下: - -```python -[[1. 1. 0.] - [1. 1. 1.] - [1. 1. 1.]] -``` - -生成对角线为1,其他元素为0的二维矩阵,示例: - -```python -import mindspore.numpy as np -input_x = np.eye(2, 2) -print(input_x) -``` - -输出如下: - -```python -[[1. 0.] - [0. 1.]] -``` - -### 张量操作 - -变换类算子主要进行数组的维度变换,分割和拼接等。 - -#### 数组维度变换 - -矩阵转置,代码示例: - -```python -import mindspore.numpy as np -input_x = np.arange(10).reshape(5, 2) -output = np.transpose(input_x) -print(output) -``` - -输出如下: - -```python -[[0 2 4 6 8] - [1 3 5 7 9]] -``` - -交换指定轴,代码示例: - -```python -import mindspore.numpy as np -input_x = np.ones((1, 2, 3)) -output = np.swapaxes(input_x, 0, 1) -print(output.shape) -``` - -输出如下: - -```python -(2, 1, 3) -``` - -#### 数组分割 - -将输入数组平均切分为多个数组,代码示例: - -```python -import mindspore.numpy as np -input_x = np.arange(9) -output = np.split(input_x, 3) -print(output) -``` - -输出如下: - -```python -(Tensor(shape=[3], dtype=Int32, value= [0, 1, 2]), - Tensor(shape=[3], dtype=Int32, value= [3, 4, 5]), - Tensor(shape=[3], dtype=Int32, value= [6, 7, 8])) -``` - -#### 数组拼接 - -将两个数组按照指定轴进行拼接,代码示例: - -```python -import mindspore.numpy as np -input_x = np.arange(0, 5) -input_y = np.arange(10, 15) -output = np.concatenate((input_x, input_y), axis=0) -print(output) -``` - -输出如下: - -```python -[ 0 1 2 3 4 10 11 12 13 14] -``` - -### 逻辑运算 - -逻辑计算类算子主要进行逻辑运算。 - -相等(equal)和小于(less)计算代码示例如下: - -```python -import mindspore.numpy as np -input_x = np.arange(0, 5) -input_y = np.arange(0, 10, 2) -output = np.equal(input_x, input_y) -print("output of equal:", output) -output = np.less(input_x, input_y) -print("output of less:", output) -``` - -输出如下: - -```python -output of equal: [ True False False False False] -output of less: [False True True True True] -``` - -### 数学运算 - -数学计算类算子主要进行各类数学计算: -加减乘除乘方,以及指数、对数等常见函数等。 - -数学计算支持类似NumPy的广播特性。 - -#### 加法 - -以下代码实现了`input_x`和`input_y`两数组相加的操作: - -```python -import mindspore.numpy as np -input_x = np.full((3, 2), [1, 2]) -input_y = np.full((3, 2), [3, 4]) -output = np.add(input_x, input_y) -print(output) -``` - -输出如下: - -```python -[[4 6] - [4 6] - [4 6]] -``` - -#### 矩阵乘法 - -以下代码实现了`input_x`和`input_y`两矩阵相乘的操作: - -```python -import mindspore.numpy as np -input_x = np.arange(2*3).reshape(2, 3).astype('float32') -input_y = np.arange(3*4).reshape(3, 4).astype('float32') -output = np.matmul(input_x, input_y) -print(output) -``` - -输出如下: - -```python -[[20. 23. 26. 29.] - [56. 68. 80. 92.]] -``` - -#### 求平均值 - -以下代码实现了求`input_x`所有元素的平均值的操作: - -```python -import mindspore.numpy as np -input_x = np.arange(6).astype('float32') -output = np.mean(input_x) -print(output) -``` - -输出如下: - -```python -2.5 -``` - -#### 指数 - -以下代码实现了自然常数`e`的`input_x`次方的操作: - -```python -import mindspore.numpy as np -input_x = np.arange(5).astype('float32') -output = np.exp(input_x) -print(output) -``` - -输出如下: - -```python -[ 1. 2.718282 7.3890557 20.085537 54.598145 ] -``` - -## MindSpore Numpy与MindSpore特性结合 - -`mindspore.numpy`能够充分利用MindSpore的强大功能,实现算子的自动微分,并使用图模式加速运算,帮助用户快速构建高效的模型。同时,MindSpore还支持多种后端设备,包括`Ascend`、`GPU`和`CPU`等,用户可以根据自己的需求灵活设置。以下提供了几种常用方法: - -- `ms_function`: 将代码包裹进图模式,用于提高代码运行效率。 -- `GradOperation`: 用于自动求导。 -- `mindspore.context`: 用于设置运行模式和后端设备等。 -- `mindspore.nn.Cell`: 用于建立深度学习模型。 - -### ms_function使用示例 - -首先,以神经网络里经常使用到的矩阵乘与矩阵加算子为例: - -```python -import mindspore.numpy as np - -x = np.arange(8).reshape(2, 4).astype('float32') -w1 = np.ones((4, 8)) -b1 = np.zeros((8,)) -w2 = np.ones((8, 16)) -b2 = np.zeros((16,)) -w3 = np.ones((16, 4)) -b3 = np.zeros((4,)) - -def forward(x, w1, b1, w2, b2, w3, b3): - x = np.dot(x, w1) + b1 - x = np.dot(x, w2) + b2 - x = np.dot(x, w3) + b3 - return x - -print(forward(x, w1, b1, w2, b2, w3, b3)) -``` - -输出如下: - -```python -[[ 768. 768. 768. 768.] - [2816. 2816. 2816. 2816.]] -``` - -对上述示例,我们可以借助`ms_function`将所有算子编译到一张静态图里以加快运行效率,示例如下: - -```python -from mindspore import ms_function - -forward_compiled = ms_function(forward) -``` - -> 目前静态图不支持在命令行模式中运行,并且有部分语法限制。`ms_function`的更多信息可参考[API: ms_function](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.html#mindspore.ms_function)。 - -### GradOperation使用示例 - -`GradOperation` 可以实现自动求导。以下示例可以实现对上述没有用`ms_function`修饰的`forward`函数定义的计算求导。 - -```python -from mindspore import ops - -grad_all = ops.composite.GradOperation(get_all=True) -grad_all(forward)(x, w1, b1, w2, b2, w3, b3) -``` - -如果要对`ms_function`修饰的`forward`计算求导,需要提前使用`context`设置运算模式为图模式,示例如下: - -```python -from mindspore import ops, ms_function, context - -context.set_context(mode=context.GRAPH_MODE) - -grad_all = ops.composite.GradOperation(get_all=True) -grad_all(ms_function(forward))(x, w1, b1, w2, b2, w3, b3) -``` - - 更多细节可参考[API: GradOperation](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.GradOperation.html)。 - -### mindspore.context使用示例 - -MindSpore支持多后端运算,可以通过`mindspore.context`进行设置。`mindspore.numpy` 的多数算子可以使用图模式或者PyNative模式运行,也可以运行在CPU,CPU或者Ascend等多种后端设备上。 - -```python -from mindspore import context - -# Execucation in static graph mode -context.set_context(mode=context.GRAPH_MODE) - -# Execucation in PyNative mode -context.set_context(mode=context.PYNATIVE_MODE) - -# Execucation on CPU backend -context.set_context(device_target="CPU") - -# Execucation on GPU backend -context.set_context(device_target="GPU") - -# Execucation on Ascend backend -context.set_context(device_target="Ascend") -... -``` - - 更多细节可参考[API: mindspore.context](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.context.html)。 - -### mindspore.numpy使用示例 - -这里提供一个使用`mindspore.numpy`构建网络模型的示例。 - -`mindspore.numpy` 接口可以定义在`nn.Cell`代码块内进行网络的构建,示例如下: - -```python -import mindspore.numpy as np -from mindspore import context -from mindspore.nn import Cell - -context.set_context(mode=context.GRAPH_MODE) - -x = np.arange(8).reshape(2, 4).astype('float32') -w1 = np.ones((4, 8)) -b1 = np.zeros((8,)) -w2 = np.ones((8, 16)) -b2 = np.zeros((16,)) -w3 = np.ones((16, 4)) -b3 = np.zeros((4,)) - -class NeuralNetwork(Cell): - def __init__(self): - super(NeuralNetwork, self).__init__() - - def construct(self, x, w1, b1, w2, b2, w3, b3): - x = np.dot(x, w1) + b1 - x = np.dot(x, w2) + b2 - x = np.dot(x, w3) + b3 - return x - -net = NeuralNetwork() - -print(net(x, w1, b1, w2, b2, w3, b3)) -``` - -输出如下: - -```python -[[ 768. 768. 768. 768.] - [2816. 2816. 2816. 2816.]] -``` - -更多构建网络的细节可以参考[MindSpore训练指导](https://www.mindspore.cn/tutorial/training/zh-CN/master/index.html)。 diff --git a/docs/programming_guide/source_zh_cn/operator_list.rst b/docs/programming_guide/source_zh_cn/operator_list.rst deleted file mode 100644 index 4f2daa0ae22d8ec80280754c66d95ce938a36450..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/operator_list.rst +++ /dev/null @@ -1,9 +0,0 @@ -算子支持 -=========== - -.. toctree:: - :maxdepth: 1 - - MindSpore算子支持 - MindSpore隐式类型转换的算子支持 - MindSpore分布式算子支持 \ No newline at end of file diff --git a/docs/programming_guide/source_zh_cn/operators.ipynb b/docs/programming_guide/source_zh_cn/operators.ipynb deleted file mode 100644 index 3eb576ddc267147319bfe77933859c080c123589..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/operators.ipynb +++ /dev/null @@ -1,1096 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 算子\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/operators.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_operators.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9vcGVyYXRvcnMuaXB5bmI=&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "MindSpore的算子组件,可从算子使用方式和算子功能两种维度进行划分。以下示例代码需在PyNative模式运行。\n", - "\n", - "## 算子使用方式\n", - "\n", - "算子相关接口主要包括operations、functional和composite,可通过ops直接获取到这三类算子。\n", - "\n", - "- operations提供单个的Primitive算子。一个算子对应一个原语,是最小的执行对象,需要实例化之后使用。\n", - "\n", - "- composite提供一些预定义的组合算子,以及复杂的涉及图变换的算子,如`GradOperation`。\n", - "\n", - "- functional提供operations和composite实例化后的对象,简化算子的调用流程。\n", - "\n", - "### mindspore.ops.operations\n", - "\n", - "operations提供了所有的Primitive算子接口,是开放给用户的最低阶算子接口。算子支持情况可查询[算子支持列表](https://www.mindspore.cn/doc/note/zh-CN/master/operator_list.html)。\n", - "\n", - "Primitive算子也称为算子原语,它直接封装了底层的Ascend、GPU、AICPU、CPU等多种算子的具体实现,为用户提供基础算子能力。\n", - "\n", - "Primitive算子接口是构建高阶接口、自动微分、网络模型等能力的基础。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "output = [ 1. 8. 64.]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore\n", - "from mindspore import Tensor\n", - "import mindspore.ops.operations as P\n", - "\n", - "input_x = mindspore.Tensor(np.array([1.0, 2.0, 4.0]), mindspore.float32)\n", - "input_y = 3.0\n", - "pow = P.Pow()\n", - "output = pow(input_x, input_y)\n", - "print(\"output =\", output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### mindspore.ops.functional\n", - "\n", - "为了简化没有属性的算子的调用流程,MindSpore提供了一些算子的functional版本。入参要求参考原算子的输入输出要求。算子支持情况可以查询[算子支持列表](https://www.mindspore.cn/doc/note/zh-CN/master/operator_list_ms.html#mindspore-ops-functional)。\n", - "\n", - "例如`P.Pow`算子,我们提供了functional版本的`F.tensor_pow`算子。\n", - "\n", - "使用functional的代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "output = [ 1. 8. 64.]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore\n", - "from mindspore import Tensor\n", - "from mindspore.ops import functional as F\n", - "\n", - "input_x = mindspore.Tensor(np.array([1.0, 2.0, 4.0]), mindspore.float32)\n", - "input_y = 3.0\n", - "output = F.tensor_pow(input_x, input_y)\n", - "print(\"output =\", output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### mindspore.ops.composite\n", - "\n", - "composite提供了一些算子的组合,包括`clip_by_value`和`random`相关的一些算子,以及涉及图变换的函数(`GradOperation`、`HyperMap`和`Map`等)。\n", - "\n", - "算子的组合可以直接像一般函数一样使用,例如使用`normal`生成一个随机分布:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "output = [[2.4911082 0.7941146 1.3117087]\n", - " [0.3058231 1.7729738 1.525996 ]]\n" - ] - } - ], - "source": [ - "from mindspore import dtype as mstype\n", - "from mindspore.ops import composite as C\n", - "from mindspore import Tensor\n", - "\n", - "mean = Tensor(1.0, mstype.float32)\n", - "stddev = Tensor(1.0, mstype.float32)\n", - "output = C.normal((2, 3), mean, stddev, seed=5)\n", - "print(\"output =\", output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> 以上代码运行于MindSpore的GPU版本。\n", - "\n", - "针对涉及图变换的函数,用户可以使用`MultitypeFuncGraph`定义一组重载的函数,根据不同类型,采用不同实现。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor [[2.4 4.2]\n", - " [4.4 6.4]]\n", - "scalar 3\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "from mindspore.ops.composite import MultitypeFuncGraph\n", - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "\n", - "add = MultitypeFuncGraph('add')\n", - "@add.register(\"Number\", \"Number\")\n", - "def add_scalar(x, y):\n", - " return ops.scalar_add(x, y)\n", - "\n", - "@add.register(\"Tensor\", \"Tensor\")\n", - "def add_tensor(x, y):\n", - " return ops.tensor_add(x, y)\n", - "\n", - "tensor1 = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]]).astype('float32'))\n", - "tensor2 = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]]).astype('float32'))\n", - "print('tensor', add(tensor1, tensor2))\n", - "print('scalar', add(1, 2))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "此外,高阶函数`GradOperation`提供了根据输入的函数,求这个函数对应的梯度函数的方式,详细可以参阅[API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.GradOperation.html)。\n", - "\n", - "### operations/functional/composite三类算子合并用法\n", - "\n", - "为了在使用过程中更加简便,除了以上介绍的几种用法外,我们还将`operations`,`functional`和`composite`三种算子封装到了`mindspore.ops`中,推荐直接调用`mindspore.ops`下的接口。\n", - "\n", - "代码样例如下:\n" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.ops.operations as P\n", - "pow = P.Pow()" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.ops as ops\n", - "pow = ops.Pow()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> 以上两种写法效果相同。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 算子功能\n", - "\n", - "算子按功能可分为张量操作、网络操作、数组操作、图像操作、编码操作、调试操作和量化操作七个功能模块。所有的算子在Ascend AI处理器、GPU和CPU的支持情况,参见[算子支持列表](https://www.mindspore.cn/doc/note/zh-CN/master/operator_list.html)。\n", - "\n", - "### 张量操作\n", - "\n", - "张量操作包括张量的结构操作和张量的数学运算。\n", - "\n", - "张量结构操作有:张量创建、索引切片、维度变换和合并分割。\n", - "\n", - "张量数学运算有:标量运算、向量运算和矩阵运算。\n", - "\n", - "这里以张量的数学运算和运算的广播机制为例,介绍使用方法。\n", - "\n", - "### 标量运算\n", - "\n", - "张量的数学运算符可以分为标量运算符、向量运算符以及矩阵运算符。\n", - "\n", - "加减乘除乘方,以及三角函数、指数、对数等常见函数,逻辑比较运算符等都是标量运算符。\n", - "\n", - "标量运算符的特点是对张量实施逐元素运算。\n", - "\n", - "有些标量运算符对常用的数学运算符进行了重载。并且支持类似NumPy的广播特性。\n", - "\n", - " 以下代码实现了对`input_x`作乘方数为`input_y`的乘方操作:" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[ 1. 8. 64.]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore\n", - "from mindspore import Tensor\n", - "\n", - "input_x = mindspore.Tensor(np.array([1.0, 2.0, 4.0]), mindspore.float32)\n", - "input_y = 3.0\n", - "print(input_x**input_y)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### 加法\n", - "\n", - "上述代码中`input_x`和`input_y`的相加实现方式如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[4. 5. 7.]\n" - ] - } - ], - "source": [ - "print(input_x + input_y)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Element-wise乘法\n", - "\n", - "以下代码实现了Element-wise乘法示例:" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[ 4. 10. 18.]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore\n", - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "\n", - "input_x = Tensor(np.array([1.0, 2.0, 3.0]), mindspore.float32)\n", - "input_y = Tensor(np.array([4.0, 5.0, 6.0]), mindspore.float32)\n", - "mul = ops.Mul()\n", - "res = mul(input_x, input_y)\n", - "\n", - "print(res)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 求三角函数\n", - "\n", - "以下代码实现了Acos:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```python\n", - "import numpy as np\n", - "import mindspore\n", - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "\n", - "acos = ops.ACos()\n", - "input_x = Tensor(np.array([0.74, 0.04, 0.30, 0.56]), mindspore.float32)\n", - "output = acos(input_x)\n", - "print(output)\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "输出如下:\n", - "\n", - "```text\n", - "[0.7377037, 1.5307858, 1.2661037,0.97641146]\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 向量运算\n", - "\n", - "向量运算符只在一个特定轴上运算,将一个向量映射到一个标量或者另外一个向量。\n", - "\n", - "#### Squeeze\n", - "\n", - "以下代码实现了压缩第3个通道维度为1的通道:" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[1. 1.]\n", - " [1. 1.]\n", - " [1. 1.]]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore\n", - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "\n", - "input_tensor = Tensor(np.ones(shape=[3, 2, 1]), mindspore.float32)\n", - "squeeze = ops.Squeeze(2)\n", - "output = squeeze(input_tensor)\n", - "\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 矩阵运算\n", - "\n", - "矩阵运算包括矩阵乘法、矩阵范数、矩阵行列式、矩阵求特征值、矩阵分解等运算。\n", - "\n", - "#### 矩阵乘法\n", - "\n", - " 以下代码实现了`input_x`和`input_y`的矩阵乘法:" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[3. 3. 3. 3.]]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore\n", - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "\n", - "input_x = Tensor(np.ones(shape=[1, 3]), mindspore.float32)\n", - "input_y = Tensor(np.ones(shape=[3, 4]), mindspore.float32)\n", - "matmul = ops.MatMul()\n", - "output = matmul(input_x, input_y)\n", - "\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### 广播机制\n", - "\n", - "广播表示输入各变量channel数目不一致时,改变他们的channel数以得到结果。\n", - "\n", - "- 以下代码实现了广播机制的示例:\n", - "\n", - "```python\n", - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "\n", - "shape = (2, 3)\n", - "input_x = Tensor(np.array([1, 2, 3]).astype(np.float32))\n", - "broadcast_to = ops.BroadcastTo(shape)\n", - "output = broadcast_to(input_x)\n", - "\n", - "print(output)\n", - "```\n", - "\n", - "输出如下:\n", - "\n", - "```text\n", - "[[1. 2. 3.]\n", - " [1. 2. 3.]]\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 网络操作\n", - "\n", - "网络操作包括特征提取、激活函数、LossFunction、优化算法等。\n", - "\n", - "#### 特征提取\n", - "\n", - "特征提取是机器学习中的常见操作,核心是提取比原输入更具代表性的Tensor。\n", - "\n", - "卷积操作\n", - "\n", - "以下代码实现了常见卷积操作之一的2D convolution 操作:" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[[[288. 288. 288. ... 288. 288. 288.]\n", - " [288. 288. 288. ... 288. 288. 288.]\n", - " [288. 288. 288. ... 288. 288. 288.]\n", - " ...\n", - " [288. 288. 288. ... 288. 288. 288.]\n", - " [288. 288. 288. ... 288. 288. 288.]\n", - " [288. 288. 288. ... 288. 288. 288.]]]\n", - "\n", - " ...\n", - "\n", - " [[288. 288. 288. ... 288. 288. 288.]\n", - " [288. 288. 288. ... 288. 288. 288.]\n", - " [288. 288. 288. ... 288. 288. 288.]\n", - " ...\n", - " [288. 288. 288. ... 288. 288. 288.]\n", - " [288. 288. 288. ... 288. 288. 288.]\n", - " [288. 288. 288. ... 288. 288. 288.]]\n", - "\n", - "\n", - " ...\n", - "\n", - "\n", - " [[288. 288. 288. ... 288. 288. 288.]\n", - " [288. 288. 288. ... 288. 288. 288.]\n", - " [288. 288. 288. ... 288. 288. 288.]\n", - " ...\n", - " [288. 288. 288. ... 288. 288. 288.]\n", - " [288. 288. 288. ... 288. 288. 288.]\n", - " [288. 288. 288. ... 288. 288. 288.]]]]\n" - ] - } - ], - "source": [ - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "import mindspore\n", - "\n", - "input = Tensor(np.ones([10, 32, 32, 32]), mindspore.float32)\n", - "weight = Tensor(np.ones([32, 32, 3, 3]), mindspore.float32)\n", - "conv2d = ops.Conv2D(out_channel=32, kernel_size=3)\n", - "res = conv2d(input, weight)\n", - "\n", - "print(res)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "卷积的反向传播算子操作\n", - "\n", - "以下代码实现了反向梯度算子传播操作的具体代码,输出存于dout, weight:" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[[[ 32. 64. 96. ... 96. 64. 32.]\n", - " [ 64. 128. 192. ... 192. 128. 64.]\n", - " [ 96. 192. 288. ... 288. 192. 96.]\n", - " ...\n", - " [ 96. 192. 288. ... 288. 192. 96.]\n", - " [ 64. 128. 192. ... 192. 128. 64.]\n", - " [ 32. 64. 96. ... 96. 64. 32.]]\n", - "\n", - " ...\n", - "\n", - " [[ 32. 64. 96. ... 96. 64. 32.]\n", - " [ 64. 128. 192. ... 192. 128. 64.]\n", - " [ 96. 192. 288. ... 288. 192. 96.]\n", - " ...\n", - " [ 96. 192. 288. ... 288. 192. 96.]\n", - " [ 64. 128. 192. ... 192. 128. 64.]\n", - " [ 32. 64. 96. ... 96. 64. 32.]]]]\n" - ] - } - ], - "source": [ - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "import mindspore\n", - "\n", - "dout = Tensor(np.ones([10, 32, 30, 30]), mindspore.float32)\n", - "weight = Tensor(np.ones([32, 32, 3, 3]), mindspore.float32)\n", - "x = Tensor(np.ones([10, 32, 32, 32]))\n", - "conv2d_backprop_input = ops.Conv2DBackpropInput(out_channel=32, kernel_size=3)\n", - "res = conv2d_backprop_input(dout, weight, ops.shape(x))\n", - "\n", - "print(res)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### 激活函数\n", - "\n", - "以下代码实现Softmax激活函数计算:" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0.01165623 0.03168492 0.08612853 0.23412164 0.63640857]\n" - ] - } - ], - "source": [ - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "import mindspore\n", - "\n", - "input_x = Tensor(np.array([1, 2, 3, 4, 5]), mindspore.float32)\n", - "softmax = ops.Softmax()\n", - "res = softmax(input_x)\n", - "\n", - "print(res)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### LossFunction\n", - "\n", - " 以下代码实现了L1 loss function:\n" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0. 0. 0.5]\n" - ] - } - ], - "source": [ - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "import mindspore\n", - "\n", - "loss = ops.SmoothL1Loss()\n", - "input_data = Tensor(np.array([1, 2, 3]), mindspore.float32)\n", - "target_data = Tensor(np.array([1, 2, 2]), mindspore.float32)\n", - "res = loss(input_data, target_data)\n", - "print(res)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### 优化算法\n", - "\n", - " 以下代码实现了SGD梯度下降算法的具体实现,输出是result:" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(Tensor(shape=[4], dtype=Float32, value= [ 1.99000001e+00, -4.90300000e-01, 1.69500005e+00, 3.98009992e+00]),)\n" - ] - } - ], - "source": [ - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "import mindspore\n", - "\n", - "sgd = ops.SGD()\n", - "parameters = Tensor(np.array([2, -0.5, 1.7, 4]), mindspore.float32)\n", - "gradient = Tensor(np.array([1, -1, 0.5, 2]), mindspore.float32)\n", - "learning_rate = Tensor(0.01, mindspore.float32)\n", - "accum = Tensor(np.array([0.1, 0.3, -0.2, -0.1]), mindspore.float32)\n", - "momentum = Tensor(0.1, mindspore.float32)\n", - "stat = Tensor(np.array([1.5, -0.3, 0.2, -0.7]), mindspore.float32)\n", - "result = sgd(parameters, gradient, learning_rate, accum, momentum, stat)\n", - "\n", - "print(result)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数组操作\n", - "\n", - "数组操作指操作对象是一些数组的操作。\n", - "\n", - "#### DType\n", - "\n", - "返回跟输入的数据类型一致的并且适配Mindspore的Tensor变量,常用于Mindspore工程内。\n", - "\n", - "具体可参见示例:" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Float32\n" - ] - } - ], - "source": [ - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "import mindspore\n", - "\n", - "input_tensor = Tensor(np.array([[2, 2], [2, 2]]), mindspore.float32)\n", - "typea = ops.DType()(input_tensor)\n", - "\n", - "print(typea)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Cast\n", - "\n", - "转换输入的数据类型并且输出与目标数据类型相同的变量。\n", - "\n", - "具体参见以下示例:" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Float16\n" - ] - } - ], - "source": [ - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "import mindspore\n", - "\n", - "input_np = np.random.randn(2, 3, 4, 5).astype(np.float32)\n", - "input_x = Tensor(input_np)\n", - "type_dst = mindspore.float16\n", - "cast = ops.Cast()\n", - "result = cast(input_x, type_dst)\n", - "print(result.dtype)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Shape\n", - "\n", - "返回输入数据的形状。\n", - "\n", - " 以下代码实现了返回输入数据input_tensor的操作:" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(3, 2, 1)\n" - ] - } - ], - "source": [ - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "import mindspore\n", - "\n", - "input_tensor = Tensor(np.ones(shape=[3, 2, 1]), mindspore.float32)\n", - "shape = ops.Shape()\n", - "output = shape(input_tensor)\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 图像操作\n", - "\n", - "图像操作包括图像预处理操作,如图像剪切(Crop,便于得到大量训练样本)和大小变化(Reise,用于构建图像金字塔等)。\n", - "\n", - " 以下代码实现了Crop和Resize操作:\n", - "\n", - "```python\n", - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "\n", - "BATCH_SIZE = 1\n", - "NUM_BOXES = 5\n", - "IMAGE_HEIGHT = 256\n", - "IMAGE_WIDTH = 256\n", - "CHANNELS = 3\n", - "image = np.random.normal(size=[BATCH_SIZE, IMAGE_HEIGHT, IMAGE_WIDTH, CHANNELS]).astype(np.float32)\n", - "boxes = np.random.uniform(size=[NUM_BOXES, 4]).astype(np.float32)\n", - "box_index = np.random.uniform(size=[NUM_BOXES], low=0, high=BATCH_SIZE).astype(np.int32)\n", - "crop_size = (24, 24)\n", - "crop_and_resize = ops.CropAndResize()\n", - "output = crop_and_resize(Tensor(image), Tensor(boxes), Tensor(box_index), crop_size)\n", - "print(output.asnumpy())\n", - "```\n", - "\n", - "输出如下:\n", - "\n", - "```text\n", - "[[[[ 6.51672244e-01 -1.85958534e-01 5.19907832e-01]\n", - "[ 1.53466597e-01 4.10562098e-01 6.26138210e-01]\n", - "[ 6.62892580e-01 3.81776541e-01 4.69261825e-01]\n", - "...\n", - "[-5.83377600e-01 -3.53377648e-02 -6.01786733e-01]\n", - "[ 1.36125124e+00 5.84172308e-02 -6.41442612e-02]\n", - "[-9.11651254e-01 -1.19495761e+00 1.96810793e-02]]\n", - "\n", - "[[ 6.06956100e-03 -3.73778701e-01 1.88935513e-03]\n", - "[-1.06859171e+00 2.00272346e+00 1.37180305e+00]\n", - "[ 1.69524819e-01 2.90421434e-02 -4.12243098e-01]\n", - "...\n", - "\n", - "[[-2.04489112e-01 2.36615837e-01 1.33802962e+00]\n", - "[ 1.08329034e+00 -9.00492966e-01 -8.21497202e-01]\n", - "[ 7.54147097e-02 -3.72897685e-01 -2.91040149e-02]\n", - "...\n", - "[ 1.12317121e+00 8.98950577e-01 4.22795087e-01]\n", - "[ 5.13781667e-01 5.12095273e-01 -3.68211865e-01]\n", - "[-7.04941899e-02 -1.09924078e+00 6.89047515e-01]]]]\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> 以上代码运行于MindSpore的Ascend版本。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 编码运算\n", - "\n", - "编码运算包括BoundingBox Encoding、BoundingBox Decoding、IOU计算等。\n", - "\n", - "#### BoundingBoxEncode\n", - "\n", - "对物体所在区域方框进行编码,得到类似PCA的更精简信息,以便做后续类似特征提取,物体检测,图像恢复等任务。\n", - "\n", - " 以下代码实现了对anchor_box和groundtruth_box的boundingbox encode:" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[-1. 0.25 0. 0.40546513]\n", - " [-1. 0.25 0. 0.40546513]]\n" - ] - } - ], - "source": [ - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import mindspore\n", - "\n", - "anchor_box = Tensor([[2,2,2,3],[2,2,2,3]],mindspore.float32)\n", - "groundtruth_box = Tensor([[1,2,1,4],[1,2,1,4]],mindspore.float32)\n", - "boundingbox_encode = ops.BoundingBoxEncode(means=(0.0, 0.0, 0.0, 0.0), stds=(1.0, 1.0, 1.0, 1.0))\n", - "res = boundingbox_encode(anchor_box, groundtruth_box)\n", - "print(res)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### BoundingBoxDecode\n", - "\n", - "编码器对区域位置信息解码之后,用此算子进行解码。\n", - "\n", - " 以下代码实现了:" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[ 4.194528 0. 0. 5.194528 ]\n", - " [ 2.1408591 0. 3.8591409 60.59815 ]]\n" - ] - } - ], - "source": [ - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import mindspore\n", - "\n", - "anchor_box = Tensor([[4,1,2,1],[2,2,2,3]],mindspore.float32)\n", - "deltas = Tensor([[3,1,2,2],[1,2,1,4]],mindspore.float32)\n", - "boundingbox_decode = ops.BoundingBoxDecode(means=(0.0, 0.0, 0.0, 0.0), stds=(1.0, 1.0, 1.0, 1.0), max_shape=(768, 1280), wh_ratio_clip=0.016)\n", - "res = boundingbox_decode(anchor_box, deltas)\n", - "print(res)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### IOU计算\n", - "\n", - "计算预测的物体所在方框和真实物体所在方框的交集区域与并集区域的占比大小,常作为一种损失函数,用以优化模型。\n", - "\n", - " 以下代码实现了计算两个变量`anchor_boxes`和`gt_boxes`之间的IOU,以out输出:" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[ 0. -0. 0.]\n", - " [ 0. -0. 0.]\n", - " [ 0. 0. 0.]]\n" - ] - } - ], - "source": [ - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "import mindspore\n", - "\n", - "iou = ops.IOU()\n", - "anchor_boxes = Tensor(np.random.randint(1.0, 5.0, [3, 4]), mindspore.float16)\n", - "gt_boxes = Tensor(np.random.randint(1.0, 5.0, [3, 4]), mindspore.float16)\n", - "out = iou(anchor_boxes, gt_boxes)\n", - "print(out)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 调试操作\n", - "\n", - "调试操作指的是用于调试网络的一些常用算子及其操作,例如HookBackward等, 此操作非常方便,对入门深度学习重要,极大提高学习者的学习体验。\n", - "\n", - "#### HookBackward\n", - "\n", - "打印中间变量的梯度,是比较常用的算子,目前仅支持PyNative模式。\n", - "\n", - " 以下代码实现了打印中间变量(例中x,y)的梯度:" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(Tensor(shape=[], dtype=Float32, value= 2),)\n", - "(Tensor(shape=[], dtype=Float32, value= 4), Tensor(shape=[], dtype=Float32, value= 4))\n" - ] - } - ], - "source": [ - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "from mindspore import dtype as mstype\n", - "\n", - "def hook_fn(grad_out):\n", - " print(grad_out)\n", - "\n", - "grad_all = ops.GradOperation(get_all=True)\n", - "hook = ops.HookBackward(hook_fn)\n", - "\n", - "def hook_test(x, y):\n", - " z = x * y\n", - " z = hook(z)\n", - " z = z * y\n", - " return z\n", - "\n", - "def backward(x, y):\n", - " return grad_all(hook_test)(Tensor(x, mstype.float32), Tensor(y, mstype.float32))\n", - "\n", - "print(backward(1, 2))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/optim.ipynb b/docs/programming_guide/source_zh_cn/optim.ipynb deleted file mode 100644 index cfcf4401e203ec997746d0c39eed8b446dd8dec7..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/optim.ipynb +++ /dev/null @@ -1,271 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 优化算法\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/optim.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_optim.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9vcHRpbS5pcHluYg==&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "`mindspore.nn.optim`是MindSpore框架中实现各种优化算法的模块,包含常用的优化器、学习率等,并且接口具备足够的通用性,可以将以后更新、更复杂的方法集成到模块里。\n", - "\n", - "`mindspore.nn.optim`为模型提供常用的优化器,如`SGD`、`ADAM`、`Momentum`。优化器用于计算和更新梯度,模型优化算法的选择直接关系到最终模型的性能,如果有时候效果不好,未必是特征或者模型设计的问题,很有可能是优化算法的问题;同时还有`mindspore.nn`提供的学习率的模块,学习率分为`dynamic_lr`和`learning_rate_schedule`,都是动态学习率,但是实现方式不同,学习率是监督学习以及深度学习中最为重要的参数,其决定着目标函数是否能收敛到局部最小值以及何时能收敛到最小值。合适的学习率能够使目标函数在合适的的时间内收敛到局部最小值。\n", - "\n", - "> 本文档适用于CPU、GPU和Ascend环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 学习率\n", - "\n", - "### dynamic_lr\n", - "\n", - "`mindspore.nn.dynamic_lr`模块有以下几个类:\n", - "\n", - "- `piecewise_constant_lr`类:基于得到分段不变的学习速率。\n", - "\n", - "- `exponential_decay_lr`类:基于指数衰减函数计算学习率。\n", - "\n", - "- `natural_exp_decay_lr`类:基于自然指数衰减函数计算学习率。\n", - "\n", - "- `inverse_decay_lr`类:基于反时间衰减函数计算学习速率。\n", - "\n", - "- `cosine_decay_lr`类:基于余弦衰减函数计算学习率。\n", - "\n", - "- `polynomial_decay_lr`类:基于多项式衰减函数计算学习率。\n", - "\n", - "- `warmup_lr`类:提高学习率。\n", - "\n", - "它们是属于`dynamic_lr`的不同实现方式。\n", - "\n", - "例如`piecewise_constant_lr`类代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0.1, 0.1, 0.05, 0.05, 0.05, 0.01, 0.01, 0.01, 0.01, 0.01]\n" - ] - } - ], - "source": [ - "from mindspore.nn.dynamic_lr import piecewise_constant_lr\n", - "\n", - "def test_dynamic_lr():\n", - " milestone = [2, 5, 10]\n", - " learning_rates = [0.1, 0.05, 0.01]\n", - " lr = piecewise_constant_lr(milestone, learning_rates)\n", - " print(lr)\n", - "\n", - "if __name__ == '__main__':\n", - " test_dynamic_lr()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### learning_rate_schedule\n", - "\n", - "`mindspore.nn.learning_rate_schedule`模块下有以下几个类:`ExponentialDecayLR`类、`NaturalExpDecayLR`类、`InverseDecayLR`类、`CosineDecayLR`类、`PolynomialDecayLR`类和`WarmUpLR`类。它们都属于`learning_rate_schedule`,只是实现方式不同,各自含义如下:\n", - "\n", - "- `ExponentialDecayLR`类:基于指数衰减函数计算学习率。\n", - "\n", - "- `NaturalExpDecayLR`类:基于自然指数衰减函数计算学习率。\n", - "\n", - "- `InverseDecayLR`类:基于反时间衰减函数计算学习速率。\n", - "\n", - "- `CosineDecayLR`类:基于余弦衰减函数计算学习率。\n", - "\n", - "- `PolynomialDecayLR`类:基于多项式衰减函数计算学习率。\n", - "\n", - "- `WarmUpLR`类:提高学习率。\n", - "\n", - "它们是属于`learning_rate_schedule`的不同实现方式。\n", - "\n", - "例如`ExponentialDecayLR`类代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.094868325\n" - ] - } - ], - "source": [ - "from mindspore.common import dtype as mstype\n", - "from mindspore import Tensor\n", - "from mindspore.nn.learning_rate_schedule import ExponentialDecayLR\n", - "\n", - "def test_learning_rate_schedule():\n", - " learning_rate = 0.1 # learning_rate(float) - The initial value of learning rate.\n", - " decay_rate = 0.9 # decay_rate(float) - The decay rate.\n", - " decay_steps = 4 # decay_steps(int) - A value used to calculate decayed learning rate.\n", - " global_step = Tensor(2, mstype.int32)\n", - " exponential_decay_lr = ExponentialDecayLR(learning_rate, decay_rate, decay_steps)\n", - " res = exponential_decay_lr(global_step)\n", - " print(res)\n", - "\n", - "\n", - "if __name__ == '__main__':\n", - " test_learning_rate_schedule()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Optimzer\n", - "\n", - "### 如何使用\n", - "\n", - "为了使用`mindspore.nn.optim`,我们需要构建一个`Optimizer`对象。这个对象能够保持当前参数状态并基于计算得到的梯度进行参数更新。\n", - "\n", - "- 构建\n", - "\n", - "为了构建一个`Optimizer`,我们需要给它一个包含可需要优化的参数(必须是Variable对象)的iterable。然后,你可以设置Optimizer的参数选项,比如学习率,权重衰减等等。\n", - "\n", - "代码样例如下:\n", - "\n", - "```python\n", - "from mindspore import nn\n", - "\n", - "optim = nn.SGD(group_params, learning_rate=0.1, weight_decay=0.0)\n", - "optim = nn.Adam(params=net.trainable_params())\n", - "\n", - "optim = nn.Adam(group_params, learning_rate=0.1, weight_decay=0.0)\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 为每一个参数单独设置选项\n", - "\n", - "优化器也支持为每个参数单独设置选项。若想这么做,不要直接传入变量Variable,而是传入一个字典的iterable。每一个字典都分别定义了一组参数,并且包含一个key键,这个key键对应相应的参数value值。其他的key键应该是优化器所接受的其他参数,并且会被用于对这组参数的优化。\n", - "\n", - "我们仍然能够传递选项作为关键字参数,在未重写这些选项的组中,它们会被用作默认值。当你只想改动一个参数组的选项,但其他参数组的选项不变时,这是非常有用的。\n", - "\n", - "例如,当我们想制定每一层的学习率时,以`SGD`为例:\n", - "\n", - "```python\n", - "from mindspore import nn\n", - "\n", - "optim = nn.SGD([{'params': conv_params, 'weight_decay': 0.01},\n", - " {'params': no_conv_params, 'lr': 0.01},\n", - " {'order_params': net.trainable_params()}],\n", - " learning_rate=0.1, weight_decay=0.0)\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "这段示例意味着当参数是`conv_params`时候,权重衰减使用的是0.01,学习率使用的是0.1;而参数是`no_conv_params`时候,权重衰减使用的是0.0,学习率使用的是0.01。这个学习率`learning_rate=0.1`会被用于所有分组里没有设置学习率的参数,权重衰减`weight_decay`也是如此。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 内置优化器\n", - "\n", - "深度学习优化算法大概常用的有`SGD`、`Adam`、`Ftrl`、`lazyadam`、`Momentum`、`RMSprop`、`Lars`、`Proximal_ada_grad`和`lamb`这几种。\n", - "\n", - "在`mindspore.nn.optim`模块中,他们都有对应的类实现。例如:\n", - "\n", - "- `SGD`,默认参数为纯SGD,设置`momentum`参数不为0,考虑了一阶动量,设置`nesterov`为True后变成`NAG`,即`Nesterov Accelerated Gradient`,在计算梯度时计算的是向前走一步所在位置的梯度。\n", - "\n", - "- `RMSprop`,考虑了二阶动量,对于不同的参数有不同的学习率,即自适应学习率,对`Adagrad`进行了优化,通过指数平滑只考虑一定窗口内的二阶动量。\n", - "\n", - "- `Adam`,同时考虑了一阶动量和二阶动量,可以看成`RMSprop`上进一步考虑了一阶动量。\n", - "\n", - "例如`SGD`的代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore import nn, Model, Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "from mindspore import dtype as mstype\n", - "from mindspore import Parameter\n", - "\n", - "class Net(nn.Cell):\n", - " def __init__(self):\n", - " super(Net, self).__init__()\n", - " self.matmul = ops.MatMul()\n", - " self.conv = nn.Conv2d(1, 6, 5, pad_mode=\"valid\")\n", - " self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z')\n", - " \n", - " def construct(self, x, y):\n", - " x = x * self.z\n", - " out = self.matmul(x, y)\n", - " return out\n", - "\n", - "net = Net()\n", - "optim = nn.SGD(params=net.trainable_params())\n", - "\n", - "conv_params = list(filter(lambda x: 'conv' in x.name, net.trainable_params()))\n", - "no_conv_params = list(filter(lambda x: 'conv' not in x.name, net.trainable_params()))\n", - "group_params = [{'params': conv_params, 'weight_decay': 0.01},\n", - " {'params': no_conv_params, 'lr': 0.01},\n", - " {'order_params': net.trainable_params()}]\n", - "optim = nn.SGD(group_params, learning_rate=0.1, weight_decay=0.0)\n", - "\n", - "loss = nn.SoftmaxCrossEntropyWithLogits()\n", - "model = Model(net, loss_fn=loss, optimizer=optim)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/parameter.ipynb b/docs/programming_guide/source_zh_cn/parameter.ipynb deleted file mode 100644 index 5c20a3af7cd94075316c25c9c4535017537b5477..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/parameter.ipynb +++ /dev/null @@ -1,272 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Parameter\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/parameter.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_parameter.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9wYXJhbWV0ZXIuaXB5bmI=&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "`Parameter`是变量张量,代表在训练网络时,需要被更新的参数。本章主要介绍了`Parameter`的初始化以及属性和方法的使用,同时介绍了`ParameterTuple`。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 初始化\n", - "\n", - "```python\n", - "mindspore.Parameter(default_input, name, requires_grad=True, layerwise_parallel=False)\n", - "```\n", - "\n", - "初始化一个`Parameter`对象,传入的数据支持`Tensor`、`Initializer`、`int`和`float`四种类型。\n", - "\n", - "`Initializer`是初始化器,可调用`initializer`接口生成`Initializer`对象。\n", - "\n", - "当使用`init`去初始化`Tensor`时,`Tensor`仅保存张量的形状和类型,而不保存实际数据,所以不会占用任何内存,可调用`init_data`接口将`Parameter`里保存的`Tensor`转化为数据。\n", - "\n", - "可为每个`Parameter`指定一个名称,便于后续操作和更新。如果在Cell里初始化一个Parameter作为Cell的属性时,建议使用默认值None,否则可能会出现Parameter的name与预期的不一致的情况。\n", - "\n", - "当参数需要被更新时,需要将`requires_grad`设置为`True`。\n", - "\n", - "当`layerwise_parallel`(混合并行)配置为`True`时,参数广播和参数梯度聚合时会过滤掉该参数。\n", - "\n", - "有关分布式并行的相关配置,可以参考文档:https://www.mindspore.cn/doc/programming_guide/zh-CN/master/auto_parallel.html 。\n", - "\n", - "下例通过三种不同的数据类型构造了`Parameter`,三个`Parameter`都需要更新,都不采用layerwise并行。如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-08T02:02:52.663940Z", - "start_time": "2021-02-08T02:02:43.200604Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Parameter (name=x) \n", - "\n", - " Parameter (name=y) \n", - "\n", - " Parameter (name=z)\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "from mindspore import Tensor, Parameter\n", - "from mindspore import dtype as mstype\n", - "from mindspore.common.initializer import initializer\n", - "\n", - "x = Parameter(default_input=Tensor(np.arange(2*3).reshape((2, 3))), name=\"x\")\n", - "y = Parameter(default_input=initializer('ones', [1, 2, 3], mstype.float32), name='y')\n", - "z = Parameter(default_input=2.0, name='z')\n", - "\n", - "print(x, \"\\n\\n\", y, \"\\n\\n\", z)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 属性\n", - "\n", - "- `inited_param`:返回保存了实际数据的`Parameter`。\n", - "\n", - "- `name`:实例化`Parameter`时,为其指定的名字。\n", - "\n", - "- `sliced`:用在自动并行场景下,表示`Parameter`里保存的数据是否是分片数据。\n", - "\n", - " 如果是,就不再对其进行切分,如果不是,需要根据网络并行策略确认是否对其进行切分。\n", - " \n", - "\n", - "- `is_init`:`Parameter`的初始化状态。在GE后端,`Parameter`需要一个`init graph`来从主机同步数据到设备侧,该标志表示数据是否已同步到设备。 此标志仅在GE后端起作用,其他后端将被设置为False。\n", - "\n", - "- `layerwise_parallel`:`Parameter`是否支持layerwise并行。如果支持,参数就不会进行广播和梯度聚合,反之则需要。\n", - "\n", - "- `requires_grad`:是否需要计算参数梯度。如果参数需要被训练,则需要计算参数梯度,否则不需要。\n", - "\n", - "- `data`: `Parameter`本身。\n", - "\n", - "下例通过`Tensor`初始化一个`Parameter`,获取了`Parameter`的相关属性。如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-08T02:02:52.675656Z", - "start_time": "2021-02-08T02:02:52.665991Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "name: x \n", - " sliced: False \n", - " is_init: False \n", - " inited_param: None \n", - " requires_grad: True \n", - " layerwise_parallel: False \n", - " data: Parameter (name=x)\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "\n", - "from mindspore import Tensor, Parameter\n", - "\n", - "x = Parameter(default_input=Tensor(np.arange(2*3).reshape((2, 3))), name=\"x\")\n", - "\n", - "print(\"name: \", x.name, \"\\n\",\n", - " \"sliced: \", x.sliced, \"\\n\",\n", - " \"is_init: \", x.is_init, \"\\n\",\n", - " \"inited_param: \", x.inited_param, \"\\n\",\n", - " \"requires_grad: \", x.requires_grad, \"\\n\",\n", - " \"layerwise_parallel: \", x.layerwise_parallel, \"\\n\",\n", - " \"data: \", x.data)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 方法\n", - "\n", - "- `init_data`:在网络采用半自动或者全自动并行策略的场景下, 当初始化`Parameter`传入的数据是`Initializer`时,可调用该接口将`Parameter`保存的数据转换为`Tensor`。\n", - "\n", - "- `set_data`:设置`Parameter`保存的数据,支持传入`Tensor`、`Initializer`、`int`和`float`进行设置, 将方法的入参`slice_shape`设置为True时,可改变`Parameter`的shape,反之,设置的数据shape必须与`Parameter`原来的shape保持一致。\n", - "\n", - "- `set_param_ps`:控制训练参数是否通过[Parameter Server](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/apply_parameter_server_training.html)进行训练。\n", - "\n", - "- `clone`:克隆`Parameter`,克隆完成后可以给新Parameter指定新的名字。\n", - "\n", - "下例通过`Initializer`来初始化`Tensor`,调用了`Parameter`的相关方法。如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-08T02:02:52.696158Z", - "start_time": "2021-02-08T02:02:52.677707Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Parameter (name=Parameter)\n", - "Parameter (name=x_clone)\n", - "Parameter (name=Parameter)\n", - "Parameter (name=Parameter)\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "from mindspore import Tensor, Parameter\n", - "from mindspore import dtype as mstype\n", - "from mindspore.common.initializer import initializer\n", - "\n", - "x = Parameter(default_input=initializer('ones', [1, 2, 3], mstype.float32))\n", - "\n", - "print(x)\n", - "x_clone = x.clone()\n", - "x_clone.name = \"x_clone\"\n", - "print(x_clone)\n", - "\n", - "print(x.init_data())\n", - "print(x.set_data(data=Tensor(np.arange(2*3).reshape((1, 2, 3)))))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## ParameterTuple\n", - "\n", - "继承于`tuple`,用于保存多个`Parameter`,通过`__new__(cls, iterable)`传入一个存放`Parameter`的迭代器进行构造,提供`clone`接口进行克隆。\n", - "\n", - "下例构造了一个`ParameterTuple`对象,并进行了克隆。如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-08T02:02:52.715969Z", - "start_time": "2021-02-08T02:02:52.697174Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(Parameter (name=x), Parameter (name=y), Parameter (name=z)) \n", - "\n", - "(Parameter (name=params_copy.x), Parameter (name=params_copy.y), Parameter (name=params_copy.z))\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "from mindspore import Tensor, Parameter, ParameterTuple\n", - "from mindspore import dtype as mstype\n", - "from mindspore.common.initializer import initializer\n", - "\n", - "x = Parameter(default_input=Tensor(np.arange(2*3).reshape((2, 3))), name=\"x\")\n", - "y = Parameter(default_input=initializer('ones', [1, 2, 3], mstype.float32), name='y')\n", - "z = Parameter(default_input=2.0, name='z')\n", - "params = ParameterTuple((x, y, z))\n", - "params_copy = params.clone(\"params_copy\")\n", - "print(params, \"\\n\")\n", - "print(params_copy)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/performance_optimization.md b/docs/programming_guide/source_zh_cn/performance_optimization.md deleted file mode 100644 index 12cb72d2321c79559dd4bf441a5674c91883a699..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/performance_optimization.md +++ /dev/null @@ -1,19 +0,0 @@ -# 性能优化 - - - -- [性能优化](#性能优化) - - - - - -MindSpore提供了多种性能优化方法,用户可根据实际情况,利用它们来提升训练和推理的性能。 - -| 优化阶段 | 优化方法 | 支持情况 | -| --- | --- | --- | -| 训练 | [分布式并行训练](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_tutorials.html) | Ascend、GPU | -| | [混合精度](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/enable_mixed_precision.html) | Ascend、GPU | -| | [图算融合](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/enable_graph_kernel_fusion.html) | Ascend、GPU | -| | [梯度累积](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/apply_gradient_accumulation.html) | GPU | -| 推理 | [训练后量化](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/post_training_quantization.html) | Lite | diff --git a/docs/programming_guide/source_zh_cn/pipeline.ipynb b/docs/programming_guide/source_zh_cn/pipeline.ipynb deleted file mode 100644 index 0e53e2728a48b15837d85c362d6dec8a49c09aa4..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/pipeline.ipynb +++ /dev/null @@ -1,395 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 数据处理\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/pipeline.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_pipeline.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9waXBlbGluZS5pcHluYg==&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "数据是深度学习的基础,良好的数据输入可以对整个深度神经网络训练起到非常积极的作用。在训练前对已加载的数据集进行数据处理,可以解决诸如数据量过大、样本分布不均等问题,从而获得更加优化的数据输入。\n", - "\n", - "MindSpore的各个数据集类都为用户提供了多种数据处理算子,用户可以构建数据处理pipeline定义需要使用的数据处理操作,数据即可在训练过程中像水一样源源不断地经过数据处理pipeline流向训练系统。\n", - "\n", - "MindSpore目前支持的部分常用数据处理算子如下表所示,更多数据处理操作参见[API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "| 数据处理算子 | 算子说明 |\n", - "| :---- | :---- |\n", - "| shuffle | 对数据集进行混洗,随机打乱数据顺序。 |\n", - "| map | 提供自定义函数或算子,作用于数据集的指定列数据。 |\n", - "| batch | 对数据集进行分批,可以减少训练轮次,加速训练过程。 |\n", - "| repeat | 对数据集进行重复,达到扩充数据量的目的。 |\n", - "| zip | 将两个数据集进行列拼接,合并为一个数据集。 |\n", - "| concat | 将两个数据集进行行拼接,合并为一个数据集。 |" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据处理算子\n", - "\n", - "### shuffle\n", - "\n", - "对数据集进行混洗,随机打乱数据顺序。\n", - "\n", - "> 设定的`buffer_size`越大,混洗程度越大,但时间、计算资源消耗也会更大。\n", - "\n", - "![shuffle](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/images/shuffle.png)\n", - "\n", - "下面的样例先构建了一个随机数据集,然后对其进行混洗操作,最后展示了混洗后的数据结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'data': Tensor(shape=[3], dtype=Int64, value= [0, 1, 2])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [2, 3, 4])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [3, 4, 5])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [4, 5, 6])}\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "\n", - "ds.config.set_seed(0)\n", - "\n", - "def generator_func():\n", - " for i in range(5):\n", - " yield (np.array([i, i+1, i+2]),)\n", - "\n", - "dataset1 = ds.GeneratorDataset(generator_func, [\"data\"])\n", - "\n", - "dataset1 = dataset1.shuffle(buffer_size=2)\n", - "for data in dataset1.create_dict_iterator():\n", - " print(data)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### map\n", - "\n", - "将指定的函数或算子作用于数据集的指定列数据,实现数据映射操作。用户可以自定义映射函数,也可以直接使用`c_transforms`或`py_transforms`中的算子针对图像、文本数据进行数据增强。\n", - "\n", - "> 更多数据增强的使用说明,参见编程指南中[数据增强](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/augmentation.html)章节。\n", - "\n", - "![map](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/images/map.png)\n", - "\n", - "下面的样例先构建了一个随机数据集,然后定义了数据翻倍的映射函数并将其作用于数据集,最后对比展示了映射前后的数据结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'data': Tensor(shape=[3], dtype=Int64, value= [0, 1, 2])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [2, 3, 4])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [3, 4, 5])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [4, 5, 6])}\n", - "------ after processing ------\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [0, 2, 4])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [2, 4, 6])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [4, 6, 8])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [ 6, 8, 10])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [ 8, 10, 12])}\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "\n", - "def generator_func():\n", - " for i in range(5):\n", - " yield (np.array([i, i+1, i+2]),)\n", - "\n", - "def pyfunc(x):\n", - " return x*2\n", - "\n", - "dataset = ds.GeneratorDataset(generator_func, [\"data\"])\n", - "\n", - "for data in dataset.create_dict_iterator():\n", - " print(data)\n", - "\n", - "print(\"------ after processing ------\")\n", - "\n", - "dataset = dataset.map(operations=pyfunc, input_columns=[\"data\"])\n", - "\n", - "for data in dataset.create_dict_iterator():\n", - " print(data)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### batch\n", - "\n", - "将数据集分批,分别输入到训练系统中进行训练,可以减少训练轮次,达到加速训练过程的目的。\n", - "\n", - "![batch](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/images/batch.png)\n", - "\n", - "下面的样例先构建了一个随机数据集,然后分别展示了保留多余数据与否的数据集分批结果,其中批大小为2。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'data': Tensor(shape=[2, 3], dtype=Int64, value=\n", - "[[0, 1, 2],\n", - " [1, 2, 3]])}\n", - "{'data': Tensor(shape=[2, 3], dtype=Int64, value=\n", - "[[2, 3, 4],\n", - " [3, 4, 5]])}\n", - "{'data': Tensor(shape=[1, 3], dtype=Int64, value=\n", - "[[4, 5, 6]])}\n", - "------ drop remainder ------\n", - "{'data': Tensor(shape=[2, 3], dtype=Int64, value=\n", - "[[0, 1, 2],\n", - " [1, 2, 3]])}\n", - "{'data': Tensor(shape=[2, 3], dtype=Int64, value=\n", - "[[2, 3, 4],\n", - " [3, 4, 5]])}\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "\n", - "def generator_func():\n", - " for i in range(5):\n", - " yield (np.array([i, i+1, i+2]),)\n", - "\n", - "dataset1 = ds.GeneratorDataset(generator_func, [\"data\"])\n", - "\n", - "dataset1 = dataset1.batch(batch_size=2, drop_remainder=False)\n", - "for data in dataset1.create_dict_iterator():\n", - " print(data)\n", - "\n", - "print(\"------ drop remainder ------\")\n", - "\n", - "dataset2 = ds.GeneratorDataset(generator_func, [\"data\"])\n", - "\n", - "dataset2 = dataset2.batch(batch_size=2, drop_remainder=True)\n", - "for data in dataset2.create_dict_iterator():\n", - " print(data)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### repeat\n", - "\n", - "对数据集进行重复,达到扩充数据量的目的。\n", - "\n", - "> `repeat`和`batch`操作的顺序会影响训练batch的数量,建议将`repeat`置于`batch`之后。\n", - "\n", - "![repeat](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/images/repeat.png)\n", - "\n", - "下面的样例先构建了一个随机数据集,然后将其重复2次,最后展示了重复后的数据结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'data': Tensor(shape=[3], dtype=Int64, value= [0, 1, 2])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [2, 3, 4])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [3, 4, 5])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [4, 5, 6])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [0, 1, 2])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [2, 3, 4])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [3, 4, 5])}\n", - "{'data': Tensor(shape=[3], dtype=Int64, value= [4, 5, 6])}\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "\n", - "def generator_func():\n", - " for i in range(5):\n", - " yield (np.array([i, i+1, i+2]),)\n", - "\n", - "dataset1 = ds.GeneratorDataset(generator_func, [\"data\"])\n", - "\n", - "dataset1 = dataset1.repeat(count=2)\n", - "for data in dataset1.create_dict_iterator():\n", - " print(data)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### zip\n", - "\n", - "将两个数据集进行列拼接,合并为一个数据集。\n", - "\n", - "> 如果两个数据集的列名相同,则不会合并,请注意列的命名。\n", - "> \n", - "> 如果两个数据集的行数不同,合并后的行数将和较小行数保持一致。\n", - "\n", - " ![zip](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/images/zip.png)\n", - "\n", - "下面的样例先构建了两个不同样本数的随机数据集,然后将其进行列拼接,最后展示了拼接后的数据结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'data1': Tensor(shape=[3], dtype=Int64, value= [0, 1, 2]), 'data2': Tensor(shape=[2], dtype=Int64, value= [1, 2])}\n", - "{'data1': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3]), 'data2': Tensor(shape=[2], dtype=Int64, value= [1, 2])}\n", - "{'data1': Tensor(shape=[3], dtype=Int64, value= [2, 3, 4]), 'data2': Tensor(shape=[2], dtype=Int64, value= [1, 2])}\n", - "{'data1': Tensor(shape=[3], dtype=Int64, value= [3, 4, 5]), 'data2': Tensor(shape=[2], dtype=Int64, value= [1, 2])}\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "\n", - "def generator_func():\n", - " for i in range(7):\n", - " yield (np.array([i, i+1, i+2]),)\n", - "\n", - "def generator_func2():\n", - " for i in range(4):\n", - " yield (np.array([1, 2]),)\n", - "\n", - "dataset1 = ds.GeneratorDataset(generator_func, [\"data1\"])\n", - "dataset2 = ds.GeneratorDataset(generator_func2, [\"data2\"])\n", - "\n", - "dataset3 = ds.zip((dataset1, dataset2))\n", - "\n", - "for data in dataset3.create_dict_iterator():\n", - " print(data)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### concat\n", - "\n", - "将两个数据集进行行拼接,合并为一个数据集。\n", - "\n", - "> 输入数据集中的列名,列数据类型和列数据的排列应相同。\n", - "\n", - "![concat](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/images/concat.png)\n", - "\n", - "下面的样例先构建了两个随机数据集,然后将其进行行拼接,最后展示了拼接后的数据结果。值得一提的是,使用`+`运算符也能达到同样的效果。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'data1': Tensor(shape=[3], dtype=Int64, value= [0, 0, 0])}\n", - "{'data1': Tensor(shape=[3], dtype=Int64, value= [0, 0, 0])}\n", - "{'data1': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3])}\n", - "{'data1': Tensor(shape=[3], dtype=Int64, value= [1, 2, 3])}\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "\n", - "def generator_func():\n", - " for i in range(2):\n", - " yield (np.array([0, 0, 0]),)\n", - "\n", - "def generator_func2():\n", - " for i in range(2):\n", - " yield (np.array([1, 2, 3]),)\n", - "\n", - "dataset1 = ds.GeneratorDataset(generator_func, [\"data1\"])\n", - "dataset2 = ds.GeneratorDataset(generator_func2, [\"data1\"])\n", - "\n", - "dataset3 = dataset1.concat(dataset2)\n", - "\n", - "for data in dataset3.create_dict_iterator():\n", - " print(data)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/probability.ipynb b/docs/programming_guide/source_zh_cn/probability.ipynb deleted file mode 100644 index a6c8144f8e0a91f8377760381180ea0594805486..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/probability.ipynb +++ /dev/null @@ -1,1621 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 深度概率编程库\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/probability.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_probability.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9wcm9iYWJpbGl0eS5pcHluYg==&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore深度概率编程的目标是将深度学习和贝叶斯学习结合,包括概率分布、概率分布映射、深度概率网络、概率推断算法、贝叶斯层、贝叶斯转换和贝叶斯工具箱,面向不同的开发者。对于专业的贝叶斯学习用户,提供概率采样、推理算法和模型构建库;另一方面,为不熟悉贝叶斯深度学习的用户提供了高级的API,从而不用更改深度学习编程逻辑,即可利用贝叶斯模型。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概率分布\n", - "\n", - "概率分布(`mindspore.nn.probability.distribution`)是概率编程的基础。`Distribution`类提供多样的概率统计接口,例如概率密度函数`pdf`、累积密度函数`cdf`、散度计算`kl_loss`、抽样`sample`等。现有的概率分布实例包括高斯分布,伯努利分布,指数型分布,几何分布和均匀分布。\n", - "\n", - "### 概率分布类\n", - "\n", - "- `Distribution`:所有概率分布的基类。\n", - "\n", - "- `Bernoulli`:伯努利分布。参数为试验成功的概率。\n", - "\n", - "- `Exponential`:指数型分布。参数为率参数。\n", - "\n", - "- `Geometric`:几何分布。参数为一次伯努利试验成功的概率。\n", - "\n", - "- `Normal`:正态(高斯)分布。参数为均值和标准差。\n", - "\n", - "- `Uniform`:均匀分布。参数为数轴上的最小值和最大值。\n", - "\n", - "- `Categorical`:类别分布。每种类别出现的概率。\n", - "\n", - "- `LogNormal`:对数正态分布。参数为位置参数和规模参数。\n", - "\n", - "- `Gumbel`: 耿贝尔极值分布。参数为位置参数和规模参数。\n", - "\n", - "- `Logistic`:逻辑斯谛分布。参数为位置参数和规模参数。\n", - "\n", - "- `Cauchy`:柯西分布。参数为位置参数和规模参数。\n", - "\n", - "#### Distribution基类\n", - "\n", - "`Distribution`是所有概率分布的基类。\n", - "\n", - "接口介绍:`Distribution`类支持的函数包括`prob`、`log_prob`、`cdf`、`log_cdf`、`survival_function`、`log_survival`、`mean`、`sd`、`var`、`entropy`、`kl_loss`、`cross_entropy`和`sample`。分布不同,所需传入的参数也不同。只有在派生类中才能使用,由派生类的函数实现决定参数。\n", - "\n", - "- `prob`:概率密度函数(PDF)/ 概率质量函数(PMF)。\n", - "\n", - "- `log_prob`:对数似然函数。\n", - "\n", - "- `cdf`:累积分布函数(CDF)。\n", - "\n", - "- `log_cdf`:对数累积分布函数。\n", - "\n", - "- `survival_function`:生存函数。\n", - "\n", - "- `log_survival`:对数生存函数。\n", - "\n", - "- `mean`:均值。\n", - "\n", - "- `sd`:标准差。\n", - "\n", - "- `var`:方差。\n", - "\n", - "- `entropy`:熵。\n", - "\n", - "- `kl_loss`:Kullback-Leibler 散度。\n", - "\n", - "- `cross_entropy`:两个概率分布的交叉熵。\n", - "\n", - "- `sample`:概率分布的随机抽样。\n", - "\n", - "- `get_dist_args`:概率分布在网络中使用的参数。\n", - "\n", - "- `get_dist_type`:概率分布的类型。\n", - "\n", - "#### 伯努利分布(Bernoulli)\n", - "\n", - "伯努利分布,继承自`Distribution`类。\n", - "\n", - "属性:\n", - "\n", - "- `Bernoulli.probs`:返回伯努利试验成功的概率,类型为`Tensor`。\n", - "\n", - "`Distribution`基类调用`Bernoulli`中私有接口以实现基类中的公有接口。`Bernoulli`支持的公有接口为:\n", - "\n", - "- `mean`,`mode`,`var`,`sd`:可选择传入 试验成功的概率`probs1`。\n", - "\n", - "- `entropy`:可选择传入 试验成功的概率`probs1`。\n", - "\n", - "- `cross_entropy`,`kl_loss`:必须传入`dist`和`probs1_b`。`dist`为另一分布的类型,目前只支持此处为“Bernoulli”。`probs1_b`为分布`b`的试验成功概率。可选择传入分布`a`的参数`probs1_a`。\n", - "\n", - "- `prob`,`log_prob`,`cdf`,`log_cdf`,`survival_function`,`log_survival`:必须传入`value`。可选择传入试验成功的概率`probs`。\n", - "\n", - "- `sample`:可选择传入样本形状`shape`和试验成功的概率`probs1`。\n", - "\n", - "- `get_dist_args`:可选择传入试验成功的概率`probs`。返回值为`(probs,)`,类型为tuple。\n", - "\n", - "- `get_dist_type`:返回“Bernoulli”。\n", - "\n", - "#### 指数分布(Exponential)\n", - "\n", - "指数分布,继承自`Distribution`类。\n", - "\n", - "属性:\n", - "\n", - "- `Exponential.rate`:返回分布的率参数,类型为`Tensor`。\n", - "\n", - "`Distribution`基类调用`Exponential`私有接口以实现基类中的公有接口。`Exponential`支持的公有接口为:\n", - "\n", - "- `mean`,`mode`,`var`,`sd`:可选择传入率参数`rate`。\n", - "\n", - "- `entropy`:可选择传入率参数`rate`。\n", - "\n", - "- `cross_entropy`,`kl_loss`:必须传入`dist`和`rate_b`。`dist`为另一分布的类型的名称, 目前只支持此处为“Exponential”。`rate_b`为分布`b`的率参数。可选择传入分布`a`的参数`rate_a`。\n", - "\n", - "- `prob`,`log_prob`,`cdf`,`log_cdf`,`survival_function`,`log_survival`:必须传入`value`。可选择传入率参数`rate`。\n", - "\n", - "- `sample`:可选择传入样本形状`shape`和率参数`rate`。返回值为`(rate,)`,类型为tuple。\n", - "\n", - "- `get_dist_args`:可选择传入率参数`rate`。返回值为`(rate,)`,类型为tuple。\n", - "\n", - "- `get_dist_type`:返回“Exponential”。\n", - "\n", - "#### 几何分布(Geometric)\n", - "\n", - "几何分布,继承自`Distribution`类。\n", - "\n", - "属性:\n", - "\n", - "- `Geometric.probs`:返回伯努利试验成功的概率,类型为`Tensor`。\n", - "\n", - "`Distribution`基类调用`Geometric`中私有接口以实现基类中的公有接口。`Geometric`支持的公有接口为:\n", - "\n", - "- `mean`,`mode`,`var`,`sd`:可选择传入试验成功的概率`probs1`。\n", - "\n", - "- `entropy`:可选择传入 试验成功的概率`probs1`。\n", - "\n", - "- `cross_entropy`,`kl_loss`:必须传入`dist`和`probs1_b`。`dist`为另一分布的类型的名称,目前只支持此处为“Geometric”。`probs1_b`为分布`b`的试验成功概率。可选择传入分布`a`的参数`probs1_a`。\n", - "\n", - "- `prob`,`log_prob`,`cdf`,`log_cdf`,`survival_function`,`log_survival`:必须传入`value`。可选择传入试验成功的概率`probs1`。\n", - "\n", - "- `sample`:可选择传入样本形状`shape`和试验成功的概率`probs1`。\n", - "\n", - "- `get_dist_args`:可选择传入试验成功的概率`probs1`。返回值为`(probs1,)`,类型为tuple。\n", - "\n", - "- `get_dist_type`:返回“Geometric”。\n", - "\n", - "#### 正态分布(Normal)\n", - "\n", - "正态(高斯)分布,继承自`Distribution`类。\n", - "\n", - "`Distribution`基类调用`Normal`中私有接口以实现基类中的公有接口。`Normal`支持的公有接口为:\n", - "\n", - "- `mean`,`mode`,`var`,`sd`:可选择传入分布的参数均值`mean`和标准差`sd`。\n", - "\n", - "\n", - "- `entropy`:可选择传入分布的参数均值`mean`和标准差`sd`。\n", - "\n", - "- `cross_entropy`,`kl_loss`:必须传入`dist`,`mean_b`和`sd_b`。`dist`为另一分布的类型的名称,目前只支持此处为“Normal”。\n", - "`mean_b`和`sd_b`为分布`b`的均值和标准差。可选择传入分布的参数`a`均值`mean_a`和标准差`sd_a`。\n", - "\n", - "- `prob`,`log_prob`,`cdf`,`log_cdf`,`survival_function`,`log_survival`:必须传入`value`。可选择分布的参数包括均值`mean_a`和标准差`sd_a`。\n", - "\n", - "- `sample`:可选择传入样本形状`shape`和分布的参数包括均值`mean_a`和标准差`sd_a`。\n", - "\n", - "- `get_dist_args`:可选择传入分布的参数均值`mean`和标准差`sd`。返回值为`(mean, sd)`,类型为tuple。\n", - "\n", - "- `get_dist_type`:返回“Normal”。\n", - "\n", - "#### 均匀分布(Uniform)\n", - "\n", - "均匀分布,继承自`Distribution`类。\n", - "\n", - "属性:\n", - "\n", - "- `Uniform.low`:返回分布的最小值,类型为`Tensor`。\n", - "\n", - "- `Uniform.high`:返回分布的最大值,类型为`Tensor`。\n", - "\n", - "`Distribution`基类调用`Uniform`以实现基类中的公有接口。`Uniform`支持的公有接口为:\n", - "\n", - "- `mean`,`mode`,`var`,`sd`:可选择传入分布的参数最大值`high`和最小值`low`。\n", - "\n", - "- `entropy`:可选择传入分布的参数最大值`high`和最小值`low`。\n", - "\n", - "- `cross_entropy`,`kl_loss`:必须传入`dist`,`high_b`和`low_b`。`dist`为另一分布的类型的名称,目前只支持此处为“Uniform”。`high_b`和`low_b`为分布`b`的参数。可选择传入分布`a`的参数即最大值`high_a`和最小值`low_a`。\n", - "\n", - "- `prob`,`log_prob`,`cdf`,`log_cdf`,`survival_function`,`log_survival`:必须传入`value`。可选择传入分布的参数最大值`high`和最小值`low`。\n", - "\n", - "- `sample`:可选择传入`shape`和分布的参数即最大值`high`和最小值`low`。\n", - "\n", - "- `get_dist_args`:可选择传入分布的参数最大值`high`和最小值`low`。返回值为`(low, high)`,类型为tuple。\n", - "\n", - "- `get_dist_type`:返回“Uniform”。\n", - "\n", - "#### 多类别分布(Categorical)\n", - "\n", - "多类别分布,继承自`Distribution`类。\n", - "\n", - "属性:\n", - "\n", - "- `Categorical.probs`:返回各种类别的概率,类型为`Tensor`。\n", - "\n", - "`Distribution`基类调用`Categorical`以实现基类中的公有接口。`Categorical`支持的公有接口为:\n", - "\n", - "- `mean`,`mode`,`var`,`sd`:可选择传入分布的参数类别概率`probs`。\n", - "\n", - "- `entropy`:可选择传入分布的参数类别概率`probs`。\n", - "\n", - "- `cross_entropy`,`kl_loss`:必须传入`dist`,`probs_b`。`dist`为另一分布的类型的名称,目前只支持此处为“Categorical”。`probs_b`为分布`b`的参数。可选择传入分布`a`的参数即`probs_a`。\n", - "\n", - "- `prob`,`log_prob`,`cdf`,`log_cdf`,`survival_function`,`log_survival`:必须传入`value`。可选择传入分布的参数类别概率`probs`。\n", - "\n", - "- `sample`:可选择传入`shape`和类别概率`probs`。\n", - "\n", - "- `get_dist_args`:可选择传入分布的参数类别概率`probs`。返回值为`(probs,)`,类型为tuple。\n", - "\n", - "- `get_dist_type`:返回“Categorical”。\n", - "\n", - "#### 对数正态分布(LogNormal)\n", - "\n", - "对数正态分布,继承自`TransformedDistribution`类,由`Exp`Bijector 和`Normal`Distribution 构成。\n", - "\n", - "属性:\n", - "\n", - "- `LogNormal.loc`:返回分布的位置参数,类型为`Tensor`。\n", - "\n", - "- `LogNormal.scale`:返回分布的规模参数,类型为`Tensor`。\n", - "\n", - "`Distribution`基类调用`LogNormal`及`TransformedDistribution`中私有接口以实现基类中的公有接口。`LogNormal`支持的公有接口为:\n", - "\n", - "- `mean`,`mode`,`var`,`sd`:可选择传入分布的位置参数`loc`和规模参数`scale`。\n", - "\n", - "- `entropy`:可选择传入分布的位置参数`loc`和规模参数`scale`。\n", - "\n", - "- `cross_entropy`,`kl_loss`:必须传入`dist`,`loc_b`和`scale_b`。`dist`为另一分布的类型的名称,目前只支持此处为“LogNormal”。`loc_b`和`scale_b`为分布`b`的均值和标准差。可选择传入分布的参数`a`均值`loc_a`和标准差`sclae_a`。\n", - "\n", - "- `prob`,`log_prob`,`cdf`,`log_cdf`,`survival_function`,`log_survival`:必须传入`value`。可选择分布的参数包括均值`loc_a`和标准差`scale_a`。`Distribution`基类调用`TransformedDistribution`私有接口。\n", - "\n", - "- `sample`:可选择传入样本形状`shape`和分布的参数包括均值`loc_a`和标准差`scale_a`。`Distribution`基类调用`TransformedDistribution`私有接口。\n", - "\n", - "- `get_dist_args`:可选择传入分布的位置参数`loc`和规模参数`scale`。返回值为`(loc, scale)`,类型为tuple。\n", - "\n", - "- `get_dist_type`:返回“LogNormal”。\n", - "\n", - "#### 柯西分布(Cauchy)\n", - "\n", - "柯西分布,继承自`Distribution`类。\n", - "\n", - "属性:\n", - "\n", - "- `Cauchy.loc`:返回分布的位置参数,类型为`Tensor`。\n", - "\n", - "- `Cauchy.scale`:返回分布的规模参数,类型为`Tensor`。\n", - "\n", - "`Distribution`基类调用`Cauchy`中私有接口以实现基类中的公有接口。`Cauchy`支持的公有接口为:\n", - "\n", - "- `entropy`:可选择传入分布的位置参数`loc`和规模参数`scale`。\n", - "\n", - "- `cross_entropy`,`kl_loss`:必须传入`dist`,`loc_b`和`scale_b`。`dist`为另一分布的类型的名称,目前只支持此处为“Cauchy”。`loc_b`和`scale_b`为分布`b`的位置参数和规模参数。可选择传入分布的参数`a`位置`loc_a`和规模`scale_a`。\n", - "\n", - "- `prob`,`log_prob`,`cdf`,`log_cdf`,`survival_function`,`log_survival`:必须传入`value`。可选择传入分布的位置参数`loc`和规模参数`scale`。\n", - "\n", - "- `sample`:可选择传入样本形状`shape`和分布的参数包括分布的位置参数`loc`和规模参数`scale`。\n", - "\n", - "- `get_dist_args`:可选择传入分布的位置参数`loc`和规模参数`scale`。返回值为`(loc, scale)`,类型为tuple。\n", - "\n", - "- `get_dist_type`:返回“Cauchy”。\n", - "\n", - "#### 耿贝尔极值分布(Gumbel)\n", - "\n", - "耿贝尔极值分布,继承自`TransformedDistribution`类,由`GumbelCDF`Bijector和`Uniform`Distribution 构成。\n", - "\n", - "属性:\n", - "\n", - "- `Gumbel.loc`:返回分布的位置参数,类型为`Tensor`。\n", - "\n", - "- `Gumbel.scale`:返回分布的规模参数,类型为`Tensor`。\n", - "\n", - "`Distribution`基类调用`Gumbel`中私有接口以实现基类中的公有接口。`Gumbel`支持的公有接口为:\n", - "\n", - "- `mean`,`mode`,`var`,`sd`:无参数 。\n", - "\n", - "- `entropy`:无参数 。\n", - "\n", - "- `cross_entropy`,`kl_loss`:必须传入`dist`,`loc_b`和`scale_b`。`dist`为另一分布的类型的名称,目前只支持此处为“Gumbel”。`loc_b`和`scale_b`为分布`b`的位置参数和规模参数。\n", - "\n", - "- `prob`,`log_prob`,`cdf`,`log_cdf`,`survival_function`,`log_survival`:必须传入`value`。\n", - "\n", - "- `sample`:可选择传入样本形状`shape`。\n", - "\n", - "- `get_dist_args`:可选择传入分布的位置参数`loc`和规模参数`scale`。返回值为`(loc, scale)`,类型为tuple。\n", - "\n", - "- `get_dist_type`:返回“Gumbel”。\n", - "\n", - "#### 逻辑斯谛分布(Logistic)\n", - "\n", - "逻辑斯谛分布,继承自`Distribution`类。\n", - "\n", - "属性:\n", - "\n", - "- `Logistic.loc`:返回分布的位置参数,类型为`Tensor`。\n", - "\n", - "- `Logistic.scale`:返回分布的规模参数,类型为`Tensor`。\n", - "\n", - "`Distribution`基类调用`logistic`中私有接口以实现基类中的公有接口。`Logistic`支持的公有接口为:\n", - "\n", - "- `mean`,`mode`,`var`,`sd`:可选择传入分布的位置参数`loc`和规模参数`scale`。\n", - "\n", - "- `entropy`:可选择传入分布的位置参数`loc`和规模参数`scale`。\n", - "\n", - "- `prob`,`log_prob`,`cdf`,`log_cdf`,`survival_function`,`log_survival`:必须传入`value`。可选择传入分布的位置参数`loc`和规模参数`scale`。\n", - "\n", - "- `sample`:可选择传入样本形状`shape`和分布的参数包括分布的位置参数`loc`和规模参数`scale`。\n", - "\n", - "- `get_dist_args`:可选择传入分布的位置参数`loc`和规模参数`scale`。返回值为`(loc, scale)`,类型为tuple。\n", - "\n", - "- `get_dist_type`:返回“Logistic”。\n", - "\n", - "#### 泊松分布\n", - "\n", - "泊松分布,继承自`Distribution`类。\n", - "\n", - "属性:\n", - "\n", - "- `Poisson.rate`:返回分布的率参数,类型为Tensor。\n", - "\n", - "- `Distribution` 基类调用`Poisson`中私有接口以实现基类中的公有接口。`Poisson`支持的公有接口为:\n", - "\n", - "- `mean`,`mode`,`var`,`sd`:可选择传入分布的率参数 rate 。\n", - "\n", - "- `prob`,`log_prob`,`cdf`,`log_cdf`,`survival_function`,`log_survival`:必须传入`value`。可选择传入分布的率参数`rate`。\n", - "\n", - "- `sample`:可选择传入样本形状shape 和分布的率参数 rate 。\n", - "\n", - "- `get_dist_args`:可选择传入分布的率参数`rate`。返回值为`(rate,)`,类型为tuple。\n", - "\n", - "- `get_dist_type`:返回“Poisson”。\n", - "\n", - "#### 伽马分布(Gamma)\n", - "\n", - "伽马分布,继承自 `Distribution` 类。\n", - "\n", - "属性:\n", - "\n", - "- `Gamma.concentration`:返回分布的参数 `concentration` ,类型为`Tensor`。\n", - "\n", - "- `Gamma.rate`:返回分布的参数 `rate` ,类型为`Tensor`。\n", - "\n", - "`Distribution` 基类调用 `Gamma` 中私有接口以实现基类中的公有接口。`Gamma` 支持的公有接口为:\n", - "\n", - "- `mean`,`mode`,`sd`,`var`:可选择传入分布的参数`concentration`和参数`rate` 。\n", - "\n", - "- `entropy`:可选择传入分布的参数`concentration`和参数`rate`。\n", - "\n", - "- `prob`,`log_prob`,`cdf`,`log_cdf`,`survival_function`,`log_survival`:必须传入`value`。可选择传入分布的参数`concentration`和参数`rate`。\n", - "\n", - "- `cross_entropy`,`kl_loss`:必须传入`dist`,`concentration_b`和`rate_b`。`dist`为另一分布的类型的名称,目前只支持此处为“Gamma”。 `concentration_b`和`rate_b`为分布`b`的参数。可选择传入分布`a`的参数即`concentration_a`和`rate_a`。\n", - "\n", - "- `sample`:可选择传入样本形状`shape`和分布的参数包括分布的参数`concentration`和参数`rate`。\n", - "\n", - "- `get_dist_args`:可选择传入分布的参数`concentration`和参数`rate`。返回值为`(concentration, rate)`,类型为tuple。\n", - "\n", - "- `get_dist_type`:返回“Gamma”。\n", - "\n", - "#### 贝塔分布(Beta)\n", - "\n", - "贝塔分布,继承自 `Distribution` 类。\n", - "\n", - "属性:\n", - "\n", - "- `Beta.concentration1`:返回分布的参数 `concentration1` ,类型为`Tensor`。\n", - "\n", - "- `Beta.concentration0`:返回分布的参数 `concentration0` ,类型为`Tensor`。\n", - "\n", - "`Distribution` 基类调用 `Beta` 中私有接口以实现基类中的公有接口。`Beta` 支持的公有接口为:\n", - "\n", - "- `mean`,`mode`,`sd`,`var`:可选择传入分布的参数`concentration1`和参数`concentration0`。\n", - "\n", - "- `entropy`:可选择传入分布的参数`concentration1`和参数`concentration0`。\n", - "\n", - "- `prob`,`log_prob`:必须传入`value`。可选择传入分布的参数`concentration1`和参数`concentration0`。\n", - "\n", - "- `cross_entropy`,`kl_loss`:必须传入`dist`,`concentration1_b`和`concentration1_b`。`dist`为另一分布的类型的名称,目前只支持此处为“Beta”。`concentration1_b`和`concentration1_b`为分布`b`的参数。可选择传入分布`a`的参数即`concentration1_a`和`concentration0_a`。\n", - "\n", - "- `sample`:可选择传入样本形状`shape`和分布的参数包括分布的位置参数`loc`和规模参数`scale`。\n", - "\n", - "- `get_dist_args`:可选择传入分布的参数`concentration1`和参数`concentration0`。返回值为`(concentration1, concentration0)`,类型为tuple。\n", - "\n", - "- `get_dist_type`:返回“Beta”。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 概率分布类在PyNative模式下的应用\n", - "\n", - "`Distribution`子类可在PyNative模式下使用。\n", - "\n", - "以`Normal`为例, 创建一个均值为0.0、标准差为1.0的正态分布,然后计算相关函数。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "mean: 0.0\n", - "var: 1.0\n", - "entropy: 1.4189385\n", - "prob: [0.35206532 0.3989423 0.35206532]\n", - "cdf: [0.30853754 0.5 0.69146246]\n", - "kl: 0.44314718\n", - "dist_arg: (Tensor(shape=[], dtype=Float32, value= 0), Tensor(shape=[], dtype=Float32, value= 1))\n" - ] - } - ], - "source": [ - "from mindspore import Tensor\n", - "from mindspore import dtype as mstype\n", - "import mindspore.context as context\n", - "import mindspore.nn.probability.distribution as msd\n", - "\n", - "context.set_context(mode=context.PYNATIVE_MODE, device_target=\"GPU\")\n", - "\n", - "my_normal = msd.Normal(0.0, 1.0, dtype=mstype.float32)\n", - "\n", - "mean = my_normal.mean()\n", - "var = my_normal.var()\n", - "entropy = my_normal.entropy()\n", - "\n", - "value = Tensor([-0.5, 0.0, 0.5], dtype=mstype.float32)\n", - "prob = my_normal.prob(value)\n", - "cdf = my_normal.cdf(value)\n", - "\n", - "mean_b = Tensor(1.0, dtype=mstype.float32)\n", - "sd_b = Tensor(2.0, dtype=mstype.float32)\n", - "kl = my_normal.kl_loss('Normal', mean_b, sd_b)\n", - "\n", - "# get the distribution args as a tuple\n", - "dist_arg = my_normal.get_dist_args()\n", - "\n", - "print(\"mean: \", mean)\n", - "print(\"var: \", var)\n", - "print(\"entropy: \", entropy)\n", - "print(\"prob: \", prob)\n", - "print(\"cdf: \", cdf)\n", - "print(\"kl: \", kl)\n", - "print(\"dist_arg: \", dist_arg)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 概率分布类在图模式下的应用\n", - "\n", - "在图模式下,`Distribution`子类可用在网络中。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "pdf: [0.35206532 0.3989423 0.35206532]\n", - "kl: 0.5\n" - ] - } - ], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore import Tensor\n", - "from mindspore import dtype as mstype\n", - "import mindspore.context as context\n", - "import mindspore.nn.probability.distribution as msd\n", - "context.set_context(mode=context.GRAPH_MODE)\n", - "\n", - "class Net(nn.Cell):\n", - " def __init__(self):\n", - " super(Net, self).__init__()\n", - " self.normal = msd.Normal(0.0, 1.0, dtype=mstype.float32)\n", - "\n", - " def construct(self, value, mean, sd):\n", - " pdf = self.normal.prob(value)\n", - " kl = self.normal.kl_loss(\"Normal\", mean, sd)\n", - " return pdf, kl\n", - "\n", - "net = Net()\n", - "value = Tensor([-0.5, 0.0, 0.5], dtype=mstype.float32)\n", - "mean = Tensor(1.0, dtype=mstype.float32)\n", - "sd = Tensor(1.0, dtype=mstype.float32)\n", - "pdf, kl = net(value, mean, sd)\n", - "print(\"pdf: \", pdf)\n", - "print(\"kl: \", kl)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### TransformedDistribution类接口设计\n", - "\n", - "`TransformedDistribution`继承自`Distribution`,是可通过映射f(x)变化得到的数学分布的基类。其接口包括:\n", - "\n", - "1. 属性\n", - "\n", - " - `bijector`:返回分布的变换方法。\n", - " \n", - " - `distribution`:返回原始分布。\n", - " \n", - " - `is_linear_transformation`:返回线性变换标志。\n", - "\n", - "2. 接口函数(以下接口函数的参数与构造函数中`distribution`的对应接口的参数相同)。\n", - "\n", - " - `cdf`:累积分布函数(CDF)。\n", - " \n", - " - `log_cdf`:对数累积分布函数。\n", - " \n", - " - `survival_function`:生存函数。\n", - " \n", - " - `log_survival`:对数生存函数。\n", - " \n", - " - `prob`:概率密度函数(PDF)/ 概率质量函数(PMF)。\n", - " \n", - " - `log_prob`:对数似然函数。\n", - " \n", - " - `sample`:随机取样。\n", - " \n", - " - `mean`:无参数。只有当`Bijector.is_constant_jacobian=true`时可调用。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### PyNative模式下调用TransformedDistribution实例\n", - "\n", - "`TransformedDistribution`子类可在PyNative模式下使用。\n", - "\n", - "这里构造一个`TransformedDistribution`实例,使用`Normal`分布作为需要变换的分布类,使用`Exp`作为映射变换,可以生成`LogNormal`分布。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "TransformedDistribution<\n", - " (_bijector): Exp\n", - " (_distribution): Normal\n", - " >\n", - "underlying distribution:\n", - " Normal\n", - "bijector:\n", - " Exp\n", - "cdf:\n", - " [0.7558914 0.9462397 0.9893489]\n", - "sample:\n", - " (3, 2)\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.nn as nn\n", - "import mindspore.nn.probability.bijector as msb\n", - "import mindspore.nn.probability.distribution as msd\n", - "from mindspore import Tensor, dtype, context\n", - "\n", - "context.set_context(mode=context.PYNATIVE_MODE)\n", - "\n", - "normal = msd.Normal(0.0, 1.0, dtype=dtype.float32)\n", - "exp = msb.Exp()\n", - "LogNormal = msd.TransformedDistribution(exp, normal, seed=0, name=\"LogNormal\")\n", - "\n", - "# compute cumulative distribution function\n", - "x = np.array([2.0, 5.0, 10.0], dtype=np.float32)\n", - "tx = Tensor(x, dtype=dtype.float32)\n", - "cdf = LogNormal.cdf(tx)\n", - "\n", - "# generate samples from the distribution\n", - "shape = ((3, 2))\n", - "sample = LogNormal.sample(shape)\n", - "\n", - "# get information of the distribution\n", - "print(LogNormal)\n", - "# get information of the underlying distribution and the bijector separately\n", - "print(\"underlying distribution:\\n\", LogNormal.distribution)\n", - "print(\"bijector:\\n\", LogNormal.bijector)\n", - "# get the computation results\n", - "print(\"cdf:\\n\", cdf)\n", - "print(\"sample:\\n\", sample.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "当构造`TransformedDistribution`映射变换的`is_constant_jacobian = true`时(如`ScalarAffine`),构造的`TransformedDistribution`实例可以使用直接使用`mean`接口计算均值,例如:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2.0\n" - ] - } - ], - "source": [ - "normal = msd.Normal(0.0, 1.0, dtype=dtype.float32)\n", - "scalaraffine = msb.ScalarAffine(1.0, 2.0)\n", - "trans_dist = msd.TransformedDistribution(scalaraffine, normal, seed=0)\n", - "mean = trans_dist.mean()\n", - "print(mean)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 图模式下调用TransformedDistribution实例\n", - "\n", - "在图模式下,`TransformedDistribution`类可用在网络中。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "cdf: [0.7558914 0.86403143 0.9171715 0.9462397 ]\n", - "sample: (2, 3)\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.nn as nn\n", - "from mindspore import Tensor, dtype\n", - "import mindspore.context as context\n", - "import mindspore.nn.probability.bijector as msb\n", - "import mindspore.nn.probability.distribution as msd\n", - "context.set_context(mode=context.GRAPH_MODE)\n", - "\n", - "class Net(nn.Cell):\n", - " def __init__(self, shape, dtype=dtype.float32, seed=0, name='transformed_distribution'):\n", - " super(Net, self).__init__()\n", - " # create TransformedDistribution distribution\n", - " self.exp = msb.Exp()\n", - " self.normal = msd.Normal(0.0, 1.0, dtype=dtype)\n", - " self.lognormal = msd.TransformedDistribution(self.exp, self.normal, seed=seed, name=name)\n", - " self.shape = shape\n", - "\n", - " def construct(self, value):\n", - " cdf = self.lognormal.cdf(value)\n", - " sample = self.lognormal.sample(self.shape)\n", - " return cdf, sample\n", - "\n", - "shape = (2, 3)\n", - "net = Net(shape=shape, name=\"LogNormal\")\n", - "x = np.array([2.0, 3.0, 4.0, 5.0]).astype(np.float32)\n", - "tx = Tensor(x, dtype=dtype.float32)\n", - "cdf, sample = net(tx)\n", - "print(\"cdf: \", cdf)\n", - "print(\"sample: \", sample.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概率分布映射\n", - "\n", - "Bijector(`mindspore.nn.probability.bijector`)是概率编程的基本组成部分。Bijector描述了一种随机变量的变换方法,可以通过一个已有的随机变量X和一个映射函数f生成一个新的随机变量$Y = f(x)$。\n", - "\n", - "`Bijector`提供了映射相关的四种变换方法。它可以当做算子直接使用,也可以作用在某个随机变量`Distribution`类实例上生成新的随机变量的`Distribution`类实例。\n", - "\n", - "### Bijector类接口设计\n", - "\n", - "#### Bijector基类\n", - "\n", - "`Bijector`类是所有概率分布映射的基类。其接口包括:\n", - "\n", - "1. 属性\n", - "\n", - " - `name`:返回`name`的值。\n", - " \n", - " - `is_dtype`:返回`dtype`的值。\n", - " \n", - " - `parameter`:返回`parameter`的值。\n", - " \n", - " - `is_constant_jacobian`:返回`is_constant_jacobian`的值。\n", - " \n", - " - `is_injective`:返回`is_injective`的值。\n", - "\n", - "2. 映射函数\n", - "\n", - " - `forward`:正向映射,创建派生类后由派生类的`_forward`决定参数。\n", - " \n", - " - `inverse`:反向映射,创建派生类后由派生类的`_inverse`决定参数。\n", - " \n", - " - `forward_log_jacobian`:正向映射的导数的对数,创建派生类后由派生类的`_forward_log_jacobian`决定参数。\n", - " \n", - " - `inverse_log_jacobian`:反向映射的导数的对数,创建派生类后由派生类的`_inverse_log_jacobian`决定参数。\n", - "\n", - "`Bijector`作为函数调用:输入是一个`Distribution`类:生成一个`TransformedDistribution` *(不可在图内调用)*。\n", - "\n", - "#### 幂函数变换映射(PowerTransform)\n", - "\n", - "`PowerTransform`做如下变量替换:$Y = g(X) = {(1 + X \\times power)}^{1 / power}$。其接口包括:\n", - "\n", - "1. 属性\n", - "\n", - " - `power`:返回`power`的值,类型为`Tensor`。\n", - "\n", - "2. 映射函数\n", - " \n", - " - `forward`:正向映射,输入为`Tensor`。\n", - " \n", - " - `inverse`:反向映射,输入为`Tensor`。\n", - " \n", - " - `forward_log_jacobian`:正向映射的导数的对数,输入为`Tensor`。\n", - " \n", - " - `inverse_log_jacobian`:反向映射的导数的对数,输入为`Tensor`。\n", - "\n", - "#### 指数变换映射(Exp)\n", - "\n", - "`Exp`做如下变量替换:$Y = g(X)= exp(X)$。其接口包括:\n", - "\n", - "映射函数\n", - "\n", - "- `forward`:正向映射,输入为`Tensor`。\n", - "\n", - "- `inverse`:反向映射,输入为`Tensor`。\n", - "\n", - "- `forward_log_jacobian`:正向映射的导数的对数,输入为`Tensor`。\n", - "\n", - "- `inverse_log_jacobian`:反向映射的导数的对数,输入为`Tensor`。\n", - "\n", - "#### 标量仿射变换映射(ScalarAffine)\n", - "\n", - "`ScalarAffine`做如下变量替换:$Y = g(X) = scale\\times X + shift$。其接口包括:\n", - "\n", - "1. 属性\n", - "\n", - " - `scale`:返回`scale`的值,类型为`Tensor`。\n", - " \n", - " - `shift`:返回`shift`的值,类型为`Tensor`。\n", - "\n", - "2. 映射函数\n", - " \n", - " - `forward`:正向映射,输入为`Tensor`。\n", - " \n", - " - `inverse`:反向映射,输入为`Tensor`。\n", - " \n", - " - `forward_log_jacobian`:正向映射的导数的对数,输入为`Tensor`。\n", - " \n", - " - `inverse_log_jacobian`:反向映射的导数的对数,输入为`Tensor`。\n", - "\n", - "#### Softplus变换映射(Softplus)\n", - "\n", - "`Softplus`做如下变量替换:$Y = g(X) = \\frac{log(1 + e ^ {sharpness \\times X}\\ \\ \\ \\ \\ \\ )} {sharpness}$。其接口包括:\n", - "\n", - "1. 属性\n", - " \n", - " - `sharpness`:返回`sharpness`的值,类型为`Tensor`。\n", - "\n", - "2. 映射函数\n", - " \n", - " - `forward`:正向映射,输入为`Tensor`。\n", - " \n", - " - `inverse`:反向映射,输入为`Tensor`。\n", - " \n", - " - `forward_log_jacobian`:正向映射的导数的对数,输入为`Tensor`。\n", - " \n", - " - `inverse_log_jacobian`:反向映射的导数的对数,输入为`Tensor`。\n", - "\n", - "#### 耿贝尔累计密度函数映射(GumbelCDF)\n", - "\n", - "`GumbelCDF`做如下变量替换:$Y = g(X) = \\exp(-\\exp(-\\frac{X - loc}{scale}))$。其接口包括:\n", - "\n", - "1. 属性\n", - " \n", - " - `loc`:返回`loc`的值,类型为`Tensor`。\n", - " \n", - " - `scale`:返回`scale`的值,类型为`Tensor`。\n", - "\n", - "2. 映射函数\n", - " \n", - " - `forward`:正向映射,输入为`Tensor`。\n", - " \n", - " - `inverse`:反向映射,输入为`Tensor`。\n", - " \n", - " - `forward_log_jacobian`:正向映射的导数的对数,输入为`Tensor`。\n", - " \n", - " - `inverse_log_jacobian`:反向映射的导数的对数,输入为`Tensor`。\n", - "\n", - "#### 逆映射(Invert)\n", - "\n", - "`Invert`对一个映射做逆变换,其接口包括:\n", - "\n", - "1. 属性\n", - " \n", - " - `bijector`:返回初始化时使用的`Bijector`,类型为`Bijector`。\n", - "\n", - "2. 映射函数\n", - " \n", - " - `forward`:正向映射,输入为`Tensor`。\n", - " \n", - " - `inverse`:反向映射,输入为`Tensor`。\n", - " \n", - " - `forward_log_jacobian`:正向映射的导数的对数,输入为`Tensor`。\n", - " \n", - " - `inverse_log_jacobian`:反向映射的导数的对数,输入为`Tensor`。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### PyNative模式下调用Bijector实例\n", - "\n", - "在执行之前,我们需要导入需要的库文件包。双射类最主要的库是`mindspore.nn.probability.bijector`,导入后我们使用`msb`作为库的缩写并进行调用。\n", - "\n", - "下面我们以`PowerTransform`为例。创建一个指数为2的`PowerTransform`对象。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "PowerTransform\n", - "forward: [2.236068 2.6457515 3. 3.3166249]\n", - "inverse: [ 1.5 4. 7.5 12.000001]\n", - "forward_log_jacobian: [-0.804719 -0.9729551 -1.0986123 -1.1989477]\n", - "inverse_log_jacobian: [0.6931472 1.0986123 1.3862944 1.609438 ]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.nn as nn\n", - "import mindspore.nn.probability.bijector as msb\n", - "import mindspore.context as context\n", - "from mindspore import Tensor, dtype\n", - "\n", - "context.set_context(mode=context.PYNATIVE_MODE)\n", - "\n", - "powertransform = msb.PowerTransform(power=2.)\n", - "\n", - "x = np.array([2.0, 3.0, 4.0, 5.0], dtype=np.float32)\n", - "tx = Tensor(x, dtype=dtype.float32)\n", - "forward = powertransform.forward(tx)\n", - "inverse = powertransform.inverse(tx)\n", - "forward_log_jaco = powertransform.forward_log_jacobian(tx)\n", - "inverse_log_jaco = powertransform.inverse_log_jacobian(tx)\n", - "\n", - "print(powertransform)\n", - "print(\"forward: \", forward)\n", - "print(\"inverse: \", inverse)\n", - "print(\"forward_log_jacobian: \", forward_log_jaco)\n", - "print(\"inverse_log_jacobian: \", inverse_log_jaco)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 图模式下调用Bijector实例\n", - "\n", - "在图模式下,`Bijector`子类可用在网络中。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "forward: [2.236068 2.6457515 3. 3.3166249]\n", - "inverse: [ 1.5 4. 7.5 12.000001]\n", - "forward_log_jacobian: [-0.804719 -0.9729551 -1.0986123 -1.1989477]\n", - "inverse_log_jacobian: [0.6931472 1.0986123 1.3862944 1.609438 ]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.nn as nn\n", - "from mindspore import Tensor\n", - "from mindspore import dtype as mstype\n", - "import mindspore.context as context\n", - "import mindspore.nn.probability.bijector as msb\n", - "context.set_context(mode=context.GRAPH_MODE)\n", - "\n", - "class Net(nn.Cell):\n", - " def __init__(self):\n", - " super(Net, self).__init__()\n", - " # create a PowerTransform bijector\n", - " self.powertransform = msb.PowerTransform(power=2.)\n", - "\n", - " def construct(self, value):\n", - " forward = self.powertransform.forward(value)\n", - " inverse = self.powertransform.inverse(value)\n", - " forward_log_jaco = self.powertransform.forward_log_jacobian(value)\n", - " inverse_log_jaco = self.powertransform.inverse_log_jacobian(value)\n", - " return forward, inverse, forward_log_jaco, inverse_log_jaco\n", - "\n", - "net = Net()\n", - "x = np.array([2.0, 3.0, 4.0, 5.0]).astype(np.float32)\n", - "tx = Tensor(x, dtype=mstype.float32)\n", - "forward, inverse, forward_log_jaco, inverse_log_jaco = net(tx)\n", - "print(\"forward: \", forward)\n", - "print(\"inverse: \", inverse)\n", - "print(\"forward_log_jacobian: \", forward_log_jaco)\n", - "print(\"inverse_log_jacobian: \", inverse_log_jaco)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 深度概率网络\n", - "\n", - "使用MindSpore深度概率编程库(`mindspore.nn.probability.dpn`)来构造变分自编码器(VAE)进行推理尤为简单。我们只需要自定义编码器和解码器(DNN模型),调用VAE或CVAE接口形成其派生网络,然后调用ELBO接口进行优化,最后使用SVI接口进行变分推理。这样做的好处是,不熟悉变分推理的用户可以像构建DNN模型一样来构建概率模型,而熟悉的用户可以调用这些接口来构建更为复杂的概率模型。VAE的接口在`mindspore.nn.probability.dpn`下面,dpn代表的是Deep probabilistic network,这里提供了一些基本的深度概率网络的接口,例如VAE。\n", - "\n", - "### VAE\n", - "\n", - "首先,我们需要先自定义encoder和decoder,调用`mindspore.nn.probability.dpn.VAE`接口来构建VAE网络,我们除了传入encoder和decoder之外,还需要传入encoder输出变量的维度hidden size,以及VAE网络存储潜在变量的维度latent size,一般latent size会小于hidden size。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "import mindspore.ops as ops\n", - "from mindspore.nn.probability.dpn import VAE\n", - "\n", - "IMAGE_SHAPE = (-1, 1, 32, 32)\n", - "\n", - "class Encoder(nn.Cell):\n", - " def __init__(self):\n", - " super(Encoder, self).__init__()\n", - " self.fc1 = nn.Dense(1024, 800)\n", - " self.fc2 = nn.Dense(800, 400)\n", - " self.relu = nn.ReLU()\n", - " self.flatten = nn.Flatten()\n", - "\n", - " def construct(self, x):\n", - " x = self.flatten(x)\n", - " x = self.fc1(x)\n", - " x = self.relu(x)\n", - " x = self.fc2(x)\n", - " x = self.relu(x)\n", - " return x\n", - "\n", - "\n", - "class Decoder(nn.Cell):\n", - " def __init__(self):\n", - " super(Decoder, self).__init__()\n", - " self.fc1 = nn.Dense(400, 1024)\n", - " self.sigmoid = nn.Sigmoid()\n", - " self.reshape = ops.Reshape()\n", - "\n", - " def construct(self, z):\n", - " z = self.fc1(z)\n", - " z = self.reshape(z, IMAGE_SHAPE)\n", - " z = self.sigmoid(z)\n", - " return z\n", - "\n", - "\n", - "encoder = Encoder()\n", - "decoder = Decoder()\n", - "vae = VAE(encoder, decoder, hidden_size=400, latent_size=20)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### ConditionalVAE\n", - "\n", - "类似地,ConditionalVAE与VAE的使用方法比较相近,不同的是,ConditionalVAE利用了数据集的标签信息,属于有监督学习算法,其生成效果一般会比VAE好。\n", - "\n", - "首先,先自定义encoder和decoder,并调用`mindspore.nn.probability.dpn.ConditionalVAE`接口来构建ConditionalVAE网络,这里的encoder和VAE的不同,因为需要传入数据集的标签信息;decoder和上述的一样。ConditionalVAE接口的传入则还需要传入数据集的标签类别个数,其余和VAE接口一样。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "import mindspore.ops as ops\n", - "from mindspore.nn.probability.dpn import ConditionalVAE\n", - "\n", - "IMAGE_SHAPE = (-1, 1, 32, 32)\n", - "\n", - "class Encoder(nn.Cell):\n", - " def __init__(self, num_classes):\n", - " super(Encoder, self).__init__()\n", - " self.fc1 = nn.Dense(1024 + num_classes, 400)\n", - " self.relu = nn.ReLU()\n", - " self.flatten = nn.Flatten()\n", - " self.concat = ops.Concat(axis=1)\n", - " self.one_hot = nn.OneHot(depth=num_classes)\n", - "\n", - " def construct(self, x, y):\n", - " x = self.flatten(x)\n", - " y = self.one_hot(y)\n", - " input_x = self.concat((x, y))\n", - " input_x = self.fc1(input_x)\n", - " input_x = self.relu(input_x)\n", - " return input_x\n", - "\n", - "class Decoder(nn.Cell):\n", - " def __init__(self):\n", - " super(Decoder, self).__init__()\n", - " self.fc1 = nn.Dense(400, 1024)\n", - " self.sigmoid = nn.Sigmoid()\n", - " self.reshape = ops.Reshape()\n", - "\n", - " def construct(self, z):\n", - " z = self.fc1(z)\n", - " z = self.reshape(z, IMAGE_SHAPE)\n", - " z = self.sigmoid(z)\n", - " return z\n", - "\n", - "encoder = Encoder(num_classes=10)\n", - "decoder = Decoder()\n", - "cvae = ConditionalVAE(encoder, decoder, hidden_size=400, latent_size=20, num_classes=10)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "加载数据集,我们可以使用Mnist数据集,具体的数据加载和预处理过程可以参考这里[实现一个图片分类应用](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html),这里会用到`create_dataset`函数创建数据迭代器。\n", - "\n", - "直接执行下面代码,会自动进行训练集的下载与解压。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "os.system(\"wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/MNIST_Data.zip\")\n", - "os.system(\"unzip -o MNIST_Data.zip -d ./datasets\")\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "加载数据集,并进行数据增强操作。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore import dtype as mstype\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\" create dataset for train or test\n", - " Args:\n", - " data_path: Data path\n", - " batch_size: The number of data records in each group\n", - " repeat_size: The number of replicated data records\n", - " num_parallel_workers: The number of parallel workers\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " # define operation parameters\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - "\n", - " # define map operations\n", - " resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Resize images to (32, 32)\n", - " rescale_op = CV.Rescale(rescale, shift) # rescale images\n", - " hwc2chw_op = CV.HWC2CHW() # change shape from (height, width, channel) to (channel, height, width) to fit network.\n", - " type_cast_op = C.TypeCast(mstype.int32) # change data type of label to int32 to fit network\n", - "\n", - " # apply map operations on images\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=resize_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=hwc2chw_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - "\n", - " # apply DatasetOps\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds\n", - "\n", - "image_path = \"./datasets/MNIST_Data/train\"\n", - "ds_train = create_dataset(image_path, 128, 1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "接下来,需要用到infer接口进行VAE网络的变分推断。\n", - "\n", - "## 概率推断算法\n", - "\n", - "调用ELBO接口(`mindspore.nn.probability.infer.ELBO`)来定义VAE网络的损失函数,调用`WithLossCell`封装VAE网络和损失函数,并定义优化器,之后传入SVI接口(`mindspore.nn.probability.infer.SVI`)。SVI的`run`函数可理解为VAE网络的训练,可以指定训练的`epochs`,返回结果为训练好的网络;`get_train_loss`函数可以返回训练好后模型的loss。" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "46.33266196291671\n" - ] - } - ], - "source": [ - "from mindspore.nn.probability.infer import ELBO, SVI\n", - "\n", - "net_loss = ELBO(latent_prior='Normal', output_prior='Normal')\n", - "net_with_loss = nn.WithLossCell(vae, net_loss)\n", - "optimizer = nn.Adam(params=vae.trainable_params(), learning_rate=0.001)\n", - "\n", - "vi = SVI(net_with_loss=net_with_loss, optimizer=optimizer)\n", - "vae = vi.run(train_dataset=ds_train, epochs=10)\n", - "trained_loss = vi.get_train_loss()\n", - "print(trained_loss)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "最后,得到训练好的VAE网络后,我们可以使用`vae.generate_sample`生成新样本,需要传入待生成样本的个数,及生成样本的shape,shape需要保持和原数据集中的样本shape一样;当然,我们也可以使用`vae.reconstruct_sample`重构原来数据集中的样本,来测试VAE网络的重建能力。" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The shape of the generated sample is (64, 1, 32, 32)\n" - ] - } - ], - "source": [ - "generated_sample = vae.generate_sample(64, IMAGE_SHAPE)\n", - "for sample in ds_train.create_dict_iterator():\n", - " sample_x = Tensor(sample['image'], dtype=mstype.float32)\n", - " reconstructed_sample = vae.reconstruct_sample(sample_x)\n", - "print('The shape of the generated sample is ', generated_sample.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "ConditionalVAE训练过程和VAE的过程类似,但需要注意的是使用训练好的ConditionalVAE网络生成新样本和重建新样本时,需要输入标签信息,例如下面生成的新样本就是64个0-7的数字。" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The shape of the generated sample is (64, 1, 32, 32)\n" - ] - } - ], - "source": [ - "sample_label = Tensor([i for i in range(0, 8)]*8, dtype=mstype.int32)\n", - "generated_sample = cvae.generate_sample(sample_label, 64, IMAGE_SHAPE)\n", - "for sample in ds_train.create_dict_iterator():\n", - " sample_x = Tensor(sample['image'], dtype=mstype.float32)\n", - " sample_y = Tensor(sample['label'], dtype=mstype.int32)\n", - " reconstructed_sample = cvae.reconstruct_sample(sample_x, sample_y)\n", - "print('The shape of the generated sample is ', generated_sample.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "如果希望新生成的样本更好,更清晰,用户可以自己定义更复杂的encoder和decoder,这里的示例只用了两层全连接层,仅供示例的指导。\n", - "\n", - "## 贝叶斯层\n", - "\n", - "下面的范例使用MindSpore的`nn.probability.bnn_layers`中的API实现BNN图片分类模型。MindSpore的`nn.probability.bnn_layers`中的API包括`NormalPrior`,`NormalPosterior`,`ConvReparam`,`DenseReparam`,`DenseLocalReparam`和`WithBNNLossCell`。BNN与DNN的最大区别在于,BNN层的weight和bias不再是确定的值,而是服从一个分布。其中,`NormalPrior`,`NormalPosterior`分别用来生成服从正态分布的先验分布和后验分布;`ConvReparam`和`DenseReparam`分别是使用reparameteration方法实现的贝叶斯卷积层和全连接层;`DenseLocalReparam`是使用Local Reparameterization方法实现的贝叶斯全连接层;`WithBNNLossCell`是用来封装BNN和损失函数的。\n", - "\n", - "如何使用`nn.probability.bnn_layers`中的API构建贝叶斯神经网络并实现图片分类,可以参考教程[使用贝叶斯网络](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/apply_deep_probability_programming.html#id3)。\n", - "\n", - "## 贝叶斯转换\n", - "\n", - "对于不熟悉贝叶斯模型的研究人员,MDP提供了贝叶斯转换接口(`mindspore.nn.probability.transform`),支持DNN (Deep Neural Network)模型一键转换成BNN (Bayesian Neural Network)模型。\n", - "\n", - "其中的模型转换API`TransformToBNN`的`__init__`函数定义如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [], - "source": [ - "class TransformToBNN:\n", - " def __init__(self, trainable_dnn, dnn_factor=1, bnn_factor=1):\n", - " net_with_loss = trainable_dnn.network\n", - " self.optimizer = trainable_dnn.optimizer\n", - " self.backbone = net_with_loss.backbone_network\n", - " self.loss_fn = getattr(net_with_loss, \"_loss_fn\")\n", - " self.dnn_factor = dnn_factor\n", - " self.bnn_factor = bnn_factor\n", - " self.bnn_loss_file = None" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "参数`trainable_bnn`是经过`TrainOneStepCell`包装的可训练DNN模型,`dnn_factor`和`bnn_factor`分别为由损失函数计算得到的网络整体损失的系数和每个贝叶斯层的KL散度的系数。\n", - "API`TransformToBNN`主要实现了两个功能:\n", - "\n", - "- 功能一:转换整个模型\n", - "\n", - " `transform_to_bnn_model`方法可以将整个DNN模型转换为BNN模型。其定义如下:\n", - "\n", - " ```python\n", - " def transform_to_bnn_model(self,\n", - " get_dense_args=lambda dp: {\"in_channels\": dp.in_channels, \"has_bias\": dp.has_bias,\n", - " \"out_channels\": dp.out_channels, \"activation\": dp.activation},\n", - " get_conv_args=lambda dp: {\"in_channels\": dp.in_channels, \"out_channels\": dp.out_channels,\n", - " \"pad_mode\": dp.pad_mode, \"kernel_size\": dp.kernel_size,\n", - " \"stride\": dp.stride, \"has_bias\": dp.has_bias,\n", - " \"padding\": dp.padding, \"dilation\": dp.dilation,\n", - " \"group\": dp.group},\n", - " add_dense_args=None,\n", - " add_conv_args=None):\n", - " r\"\"\"\n", - " Transform the whole DNN model to BNN model, and wrap BNN model by TrainOneStepCell.\n", - "\n", - " Args:\n", - " get_dense_args (function): The arguments gotten from the DNN full connection layer. Default: lambda dp:\n", - " {\"in_channels\": dp.in_channels, \"out_channels\": dp.out_channels, \"has_bias\": dp.has_bias}.\n", - " get_conv_args (function): The arguments gotten from the DNN convolutional layer. Default: lambda dp:\n", - " {\"in_channels\": dp.in_channels, \"out_channels\": dp.out_channels, \"pad_mode\": dp.pad_mode,\n", - " \"kernel_size\": dp.kernel_size, \"stride\": dp.stride, \"has_bias\": dp.has_bias}.\n", - " add_dense_args (dict): The new arguments added to BNN full connection layer. Default: {}.\n", - " add_conv_args (dict): The new arguments added to BNN convolutional layer. Default: {}.\n", - "\n", - " Returns:\n", - " Cell, a trainable BNN model wrapped by TrainOneStepCell.\n", - " \"\"\"\n", - "\n", - " ```\n", - "\n", - " 参数`get_dense_args`指定从DNN模型的全连接层中获取哪些参数,默认值是DNN模型的全连接层和BNN的全连接层所共有的参数,参数具体的含义可以参考[API说明文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn/mindspore.nn.Dense.html);`get_conv_args`指定从DNN模型的卷积层中获取哪些参数,默认值是DNN模型的卷积层和BNN的卷积层所共有的参数,参数具体的含义可以参考[API说明文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn/mindspore.nn.Conv2d.html);参数`add_dense_args`和`add_conv_args`分别指定了要为BNN层指定哪些新的参数值。需要注意的是,`add_dense_args`中的参数不能与`get_dense_args`重复,`add_conv_args`和`get_conv_args`也是如此。\n", - "\n", - "- 功能二:转换指定类型的层\n", - "\n", - " `transform_to_bnn_layer`方法可以将DNN模型中指定类型的层(`nn.Dense`或者`nn.Conv2d`)转换为对应的贝叶斯层。其定义如下:\n", - "\n", - " ```python\n", - " def transform_to_bnn_layer(self, dnn_layer, bnn_layer, get_args=None, add_args=None):\n", - " r\"\"\"\n", - " Transform a specific type of layers in DNN model to corresponding BNN layer.\n", - "\n", - " Args:\n", - " dnn_layer_type (Cell): The type of DNN layer to be transformed to BNN layer. The optional values are\n", - " nn.Dense, nn.Conv2d.\n", - " bnn_layer_type (Cell): The type of BNN layer to be transformed to. The optional values are\n", - " DenseReparameterization, ConvReparameterization.\n", - " get_args (dict): The arguments gotten from the DNN layer. Default: None.\n", - " add_args (dict): The new arguments added to BNN layer. Default: None.\n", - "\n", - " Returns:\n", - " Cell, a trainable model wrapped by TrainOneStepCell, whose sprcific type of layer is transformed to the corresponding bayesian layer.\n", - " \"\"\"\n", - " ```\n", - "\n", - " 参数`dnn_layer`指定将哪个类型的DNN层转换成BNN层,`bnn_layer`指定DNN层将转换成哪个类型的BNN层,`get_args`和`add_args`分别指定从DNN层中获取哪些参数和要为BNN层的哪些参数重新赋值。\n", - "\n", - "如何在MindSpore中使用API`TransformToBNN`可以参考教程[DNN一键转换成BNN](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/apply_deep_probability_programming.html#dnnbnn)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 贝叶斯工具箱\n", - "\n", - "### 不确定性评估\n", - "\n", - "贝叶斯神经网络的优势之一就是可以获取不确定性,MDP在上层提供了不确定性估计的工具箱(`mindspore.nn.probability.toolbox`),用户可以很方便地使用该工具箱计算不确定性。不确定性意味着深度学习模型对预测结果的不确定程度。目前,大多数深度学习算法只能给出高置信度的预测结果,而不能判断预测结果的确定性,不确定性主要有两种类型:偶然不确定性和认知不确定性。\n", - "\n", - "- 偶然不确定性(Aleatoric Uncertainty):描述数据中的内在噪声,即无法避免的误差,这个现象不能通过增加采样数据来削弱。\n", - "\n", - "- 认知不确定性(Epistemic Uncertainty):模型自身对输入数据的估计可能因为训练不佳、训练数据不够等原因而不准确,可以通过增加训练数据等方式来缓解。\n", - "\n", - "不确定性评估工具箱的接口如下:\n", - "\n", - "- `model`:待评估不确定性的已训练好的模型。\n", - "\n", - "- `train_dataset`:用于训练的数据集,迭代器类型。\n", - "\n", - "- `task_type`:模型的类型,字符串,输入“regression”或者“classification”。\n", - "\n", - "- `num_classes`:如果是分类模型,需要指定类别的标签数量。\n", - "\n", - "- `epochs`:用于训练不确定模型的迭代数。\n", - "\n", - "- `epi_uncer_model_path`:用于存储或加载计算认知不确定性的模型的路径。\n", - "\n", - "- `ale_uncer_model_path`:用于存储或加载计算偶然不确定性的模型的路径。\n", - "\n", - "- `save_model`:布尔类型,是否需要存储模型。\n", - "\n", - "在使用前,需要先训练好模型,以LeNet5为例,使用方式如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 1, loss is 0.11922359\n", - "epoch: 1 step: 2, loss is 0.21615174\n", - "epoch: 1 step: 3, loss is 0.18693243\n", - "epoch: 1 step: 4, loss is 0.14668123\n", - "epoch: 1 step: 5, loss is 0.32135463\n", - "epoch: 1 step: 6, loss is 0.086044185\n", - "... ...\n", - "epoch: 1 step: 1872, loss is 0.07358544\n", - "epoch: 1 step: 1873, loss is 0.006983331\n", - "epoch: 1 step: 1874, loss is 0.122501254\n", - "epoch: 1 step: 1875, loss is 0.02729987\n", - "The shape of epistemic uncertainty is (32, 10)\n", - "The shape of aleatoric uncertainty is (32,)\n" - ] - } - ], - "source": [ - "import os\n", - "import mindspore.nn as nn\n", - "from mindspore import Tensor\n", - "from mindspore.common.initializer import Normal\n", - "from mindspore.nn.probability.toolbox.uncertainty_evaluation import UncertaintyEvaluation\n", - "from mindspore import load_checkpoint, load_param_into_net\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " # define the operator required\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16*5*5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " # use the preceding operators to construct networks\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x)\n", - " return x\n", - "\n", - "if __name__ == '__main__':\n", - " # get trained model checkpoint_lenet-1_1875.ckpt\n", - " os.system(\"wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/checkpoint_lenet-1_1875.zip\")\n", - " os.system(\"unzip -o checkpoint_lenet-1_1875.zip\")\n", - " \n", - " network = LeNet5()\n", - " param_dict = load_checkpoint('checkpoint_lenet-1_1875.ckpt')\n", - " load_param_into_net(network, param_dict)\n", - " # get train and eval dataset\n", - " ds_train = create_dataset('./datasets/MNIST_Data/train')\n", - " ds_eval = create_dataset('./datasets/MNIST_Data/test')\n", - " evaluation = UncertaintyEvaluation(model=network,\n", - " train_dataset=ds_train,\n", - " task_type='classification',\n", - " num_classes=10,\n", - " epochs=1,\n", - " epi_uncer_model_path=None,\n", - " ale_uncer_model_path=None,\n", - " save_model=False)\n", - " for eval_data in ds_eval.create_dict_iterator():\n", - " eval_data = Tensor(eval_data['image'], mstype.float32)\n", - " epistemic_uncertainty = evaluation.eval_epistemic_uncertainty(eval_data)\n", - " aleatoric_uncertainty = evaluation.eval_aleatoric_uncertainty(eval_data)\n", - " print('The shape of epistemic uncertainty is ', epistemic_uncertainty.shape)\n", - " print('The shape of aleatoric uncertainty is ', aleatoric_uncertainty.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`eval_epistemic_uncertainty`计算的是认知不确定性,也叫模型不确定性,对于每一个样本的每个预测标签都会有一个不确定值;\n", - "\n", - "`eval_aleatoric_uncertainty`计算的是偶然不确定性,也叫数据不确定性,对于每一个样本都会有一个不确定值。\n", - "\n", - "uncertainty的值大于等于0,越大表示不确定性越高。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 异常检测\n", - "\n", - "异常检测(Anomaly Detection)可以找到与“主要数据分布不同”的异常值,比如在数据预处理中找出异常点,有助于提升模型的拟合能力。\n", - "\n", - "MDP在上层基于变分自编码器(VAE)提供了异常检测的工具箱(`VAEAnomalyDetection`),与VAE的使用类似,我们只需要自定义编码器和解码器(DNN模型),初始化相关参数,便可以使用该工具箱检测异常点。\n", - "\n", - "基于VAE的异常检测工具箱的接口如下:\n", - "\n", - "- `encoder`:编码器(Cell类型)。\n", - "\n", - "- `decoder`:解码器(Cell类型)。\n", - "\n", - "- `hidden_size`:编码器输出张量的大小。\n", - "\n", - "- `latent_size`:隐空间的大小。\n", - "\n", - "构建VAE模型的编码器`EncoderVAE`和解码器`DecoderVAE`,设置`hidden_size`和`latent_size`,进行类的初始化,之后传入数据集可以进行异常点的检测。" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "30.0681 False\n", - "33.119724 False\n", - "34.285515 False\n", - "34.41275 False\n", - "35.29742 False\n", - "... ...\n", - "33.438026 False\n", - "34.576874 False\n", - "33.033386 False\n", - "35.304153 False\n", - "35.387157 False\n" - ] - } - ], - "source": [ - "import mindspore.nn as nn\n", - "import mindspore.ops as ops\n", - "from mindspore.nn.probability.toolbox import VAEAnomalyDetection\n", - "\n", - "class EncoderVAE(nn.Cell):\n", - " def __init__(self):\n", - " super(EncoderVAE, self).__init__()\n", - " self.fc1 = nn.Dense(1024, 800)\n", - " self.fc2 = nn.Dense(800, 400)\n", - " self.relu = nn.ReLU()\n", - " self.flatten = nn.Flatten()\n", - "\n", - " def construct(self, x):\n", - " x = self.flatten(x)\n", - " x = self.fc1(x)\n", - " x = self.relu(x)\n", - " x = self.fc2(x)\n", - " x = self.relu(x)\n", - " return x\n", - "\n", - "\n", - "class DecoderVAE(nn.Cell):\n", - " def __init__(self):\n", - " super(DecoderVAE, self).__init__()\n", - " self.fc1 = nn.Dense(400, 1024)\n", - " self.sigmoid = nn.Sigmoid()\n", - " self.reshape = ops.Reshape()\n", - "\n", - " def construct(self, z):\n", - " z = self.fc1(z)\n", - " z = self.reshape(z, IMAGE_SHAPE)\n", - " z = self.sigmoid(z)\n", - " return z\n", - "\n", - "if __name__ == '__main__':\n", - "\n", - " encodervae = EncoderVAE()\n", - " decodervae = DecoderVAE()\n", - " ood = VAEAnomalyDetection(encoder=encodervae, decoder=decodervae,\n", - " hidden_size=400, latent_size=20)\n", - " ds_train = create_dataset('./datasets/MNIST_Data/train')\n", - " ds_eval = create_dataset('./datasets/MNIST_Data/test')\n", - " ood.train(ds_train, epochs=5)\n", - " for sample in ds_train.create_dict_iterator(output_numpy=True, num_epochs=1):\n", - " sample_x = Tensor(sample['image'], dtype=mstype.float32)\n", - " score = ood.predict_outlier_score(sample_x)\n", - " outlier = ood.predict_outlier(sample_x)\n", - " print(score, outlier)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`score`输出的是样本的异常分数;`outlier`是布尔类型,True代表是异常点,False代表不是异常点。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/run.ipynb b/docs/programming_guide/source_zh_cn/run.ipynb deleted file mode 100644 index 915484881e764a4e259d4ae961fa778eadf7220a..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/run.ipynb +++ /dev/null @@ -1,504 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 运行方式\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/run.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_run.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV9ydW4uaXB5bmI=&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "执行主要有三种方式:单算子、普通函数和网络训练模型。\n", - "\n", - "> 本文示例适用于GPU和Ascend环境。\n", - "\n", - "## 执行单算子\n", - "\n", - "执行单个算子,并打印相关结果。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[[[ 0.01647821 0.05310077 0.05310077 0.05310077 0.05118286]\n", - " [ 0.03007141 0.0657572 0.0657572 0.0657572 0.04350833]\n", - " [ 0.03007141 0.0657572 0.0657572 0.0657572 0.04350833]\n", - " [ 0.03007141 0.0657572 0.0657572 0.0657572 0.04350833]\n", - " [ 0.01847598 0.04713529 0.04713529 0.04713529 0.03720935]]\n", - "\n", - " [[-0.03362034 -0.06124294 -0.06124294 -0.06124294 -0.04334928]\n", - " [-0.02676596 -0.08040315 -0.08040315 -0.08040315 -0.06846539]\n", - " [-0.02676596 -0.08040315 -0.08040315 -0.08040315 -0.06846539]\n", - " [-0.02676596 -0.08040315 -0.08040315 -0.08040315 -0.06846539]\n", - " [-0.00557975 -0.06808633 -0.06808633 -0.06808633 -0.08389233]]\n", - "\n", - " [[-0.01602227 0.02266152 0.02266152 0.02266152 0.06030601]\n", - " [-0.06764769 -0.02966945 -0.02966945 -0.02966945 0.04861854]\n", - " [-0.06764769 -0.02966945 -0.02966945 -0.02966945 0.04861854]\n", - " [-0.06764769 -0.02966945 -0.02966945 -0.02966945 0.04861854]\n", - " [-0.06528193 -0.03500666 -0.03500666 -0.03500666 0.02858584]]\n", - "\n", - " [[-0.03102187 -0.03846825 -0.03846825 -0.03846825 -0.00858424]\n", - " [-0.04270145 -0.070785 -0.070785 -0.070785 -0.05362675]\n", - " [-0.04270145 -0.070785 -0.070785 -0.070785 -0.05362675]\n", - " [-0.04270145 -0.070785 -0.070785 -0.070785 -0.05362675]\n", - " [-0.01230605 -0.04999261 -0.04999261 -0.04999261 -0.04718029]]]]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.nn as nn\n", - "from mindspore import context, Tensor\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "\n", - "conv = nn.Conv2d(3, 4, 3, bias_init='zeros')\n", - "input_data = Tensor(np.ones([1, 3, 5, 5]).astype(np.float32))\n", - "output = conv(input_data)\n", - "print(output.asnumpy())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> 由于weight初始化存在随机因素,实际输出结果可能不同,仅供参考。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 执行普通函数\n", - "\n", - "将若干算子组合成一个函数,然后直接通过函数调用的方式执行这些算子,并打印相关结果,如下例所示。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[3. 3. 3.]\n", - " [3. 3. 3.]\n", - " [3. 3. 3.]]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "from mindspore import context, Tensor\n", - "import mindspore.ops as ops\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "\n", - "def add_func(x, y):\n", - " z = ops.add(x, y)\n", - " z = ops.add(z, x)\n", - " return z\n", - "\n", - "x = Tensor(np.ones([3, 3], dtype=np.float32))\n", - "y = Tensor(np.ones([3, 3], dtype=np.float32))\n", - "output = add_func(x, y)\n", - "print(output.asnumpy())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 执行网络模型\n", - "\n", - "MindSpore的[Model接口](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.html#mindspore.Model)是用于训练和验证的高级接口。可以将有训练或推理功能的layers组合成一个对象,通过调用`train`、`eval`、`predict`接口可以分别实现训练、推理和预测功能。\n", - "\n", - "用户可以根据实际需要传入网络、损失函数和优化器等初始化Model接口,还可以通过配置`amp_level`实现混合精度,配置`metrics`实现模型评估。\n", - "\n", - "> 执行网络模型会在执行目录下生成`kernel_meta`目录,并在执行过程中保存网络编译生成的算子缓存文件到此目录,包括`.o`,`.info`和`.json`文件。若用户再次执行相同的网络模型,或者仅有部分变化,MindSpore会自动调用`kernel_meta`目录下可复用的算子缓存文件,显著减少网络编译时间,提升执行性能。详细内容请参考[算子增量编译](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/incremental_operator_build.html)。\n", - "\n", - "在执行网络之前,先将所需要的数据集下载并解压缩到指定位置:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 执行训练模型\n", - "\n", - "通过调用Model的train接口可以实现训练。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Training ==============\n", - "epoch: 1 step: 375, loss is 2.2898183\n", - "epoch: 1 step: 750, loss is 2.2777305\n", - "epoch: 1 step: 1125, loss is 0.27802905\n", - "epoch: 1 step: 1500, loss is 0.032973606\n", - "epoch: 1 step: 1875, loss is 0.06105463\n" - ] - } - ], - "source": [ - "import os\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "from mindspore.dataset.vision import Inter\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.transforms.c_transforms as CT\n", - "import mindspore.nn as nn\n", - "from mindspore import context, Model\n", - "from mindspore import dtype as mstype\n", - "from mindspore.common.initializer import Normal\n", - "from mindspore.train.callback import LossMonitor, ModelCheckpoint, CheckpointConfig\n", - "\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\"\n", - " create dataset for train or test\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # define map operations\n", - " resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Bilinear mode\n", - " rescale_nml_op = CV.Rescale(rescale_nml, shift_nml)\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - " type_cast_op = CT.TypeCast(mstype.int32)\n", - "\n", - " # apply map operations on images\n", - " mnist_ds = mnist_ds.map(input_columns=\"label\", operations=type_cast_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=resize_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=rescale_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=rescale_nml_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=hwc2chw_op, num_parallel_workers=num_parallel_workers)\n", - "\n", - " # apply DatasetOps\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds\n", - "\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"\n", - " Lenet network\n", - "\n", - " Args:\n", - " num_class (int): Num classes. Default: 10.\n", - " num_channel (int): Num channels. Default: 1.\n", - "\n", - " Returns:\n", - " Tensor, output tensor\n", - " Examples:\n", - " >>> LeNet(num_class=10)\n", - "\n", - " \"\"\"\n", - "\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x)\n", - " return x\n", - "\n", - "\n", - "if __name__ == \"__main__\":\n", - " context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "\n", - " model_path = \"./models/ckpt/mindspore_run/\"\n", - " os.system(\"rm -rf {0}*.ckpt {0}*.meta {0}*.pb\".format(model_path))\n", - "\n", - " ds_train_path = \"./datasets/MNIST_Data/train/\"\n", - " ds_train = create_dataset(ds_train_path, 32)\n", - "\n", - " network = LeNet5(10)\n", - " net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction=\"mean\")\n", - " net_opt = nn.Momentum(network.trainable_params(), 0.01, 0.9)\n", - " config_ck = CheckpointConfig(save_checkpoint_steps=1875, keep_checkpoint_max=5)\n", - " ckpoint_cb = ModelCheckpoint(prefix=\"checkpoint_lenet\", directory=model_path, config=config_ck)\n", - " model = Model(network, net_loss, net_opt)\n", - "\n", - " print(\"============== Starting Training ==============\")\n", - " model.train(1, ds_train, callbacks=[LossMonitor(375), ckpoint_cb], dataset_sink_mode=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> 示例中用到的MNIST数据集的获取方法,可以参照[实现一个图片分类应用](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html)的下载数据集部分,下同。\n", - ">\n", - "> 使用PyNative模式调试, 请参考[使用PyNative模式调试](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/debug_in_pynative_mode.html), 包括单算子、普通函数和网络训练模型的执行。\n", - ">\n", - "> 使用自由控制循环的迭代次数、遍历数据集等,可以参照官网编程指南《[训练](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/train.html#%E8%87%AA%E5%AE%9A%E4%B9%89%E8%AE%AD%E7%BB%83%E5%BE%AA%E7%8E%AF)》的自定义循环训练部分。\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 执行推理模型\n", - "\n", - "通过调用Model的eval接口可以实现推理。为了方便评估模型的好坏,可以在Model接口初始化的时候设置评估指标Metric。\n", - "\n", - "Metric是用于评估模型好坏的指标。常见的主要有Accuracy、Fbeta、Precision、Recall和TopKCategoricalAccuracy等,通常情况下,一种模型指标无法全面的评估模型的好坏,一般会结合多个指标共同作用对模型进行评估。\n", - "\n", - "常用的内置评估指标:\n", - "\n", - "- `Accuracy`(准确率):是一个用于评估分类模型的指标。通俗来说,准确率是指我们的模型预测正确的结果所占的比例。 公式:$$Accuracy = (TP+TN)/(TP+TN+FP+FN)$$\n", - "\n", - "- `Precision`(精确率):在被识别为正类别的样本中,确实为正类别的比例。公式:$$Precision = TP/(TP+FP)$$\n", - "\n", - "- `Recall`(召回率):在所有正类别样本中,被正确识别为正类别的比例。 公式:$$Recall = TP/(TP+FN)$$\n", - "\n", - "- `Fbeta`(调和均值):综合考虑precision和recall的调和均值。\n", - "公式:$$F_\\beta = (1 + \\beta^2) \\cdot \\frac{precisiont \\cdot recall}{(\\beta^2 \\cdot precision) + recall}$$\n", - "\n", - "- `TopKCategoricalAccuracy`(多分类TopK准确率):计算TopK分类准确率。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Testing ==============\n", - "============== {'Accuracy': 0.960136217948718, 'Precision': array([0.95763547, 0.98059965, 0.99153439, 0.93333333, 0.97322348,\n", - " 0.99385749, 0.98502674, 0.93179724, 0.8974359 , 0.97148676])} ==============\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.transforms.c_transforms as CT\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.nn as nn\n", - "from mindspore import context, Model, load_checkpoint, load_param_into_net\n", - "from mindspore import dtype as mstype\n", - "from mindspore.common.initializer import Normal\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore.nn import Accuracy, Precision\n", - "\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"\n", - " Lenet network\n", - "\n", - " Args:\n", - " num_class (int): Num classes. Default: 10.\n", - " num_channel (int): Num channels. Default: 1.\n", - "\n", - " Returns:\n", - " Tensor, output tensor\n", - " Examples:\n", - " >>> LeNet(num_class=10)\n", - "\n", - " \"\"\"\n", - "\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x)\n", - " return x\n", - "\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\"\n", - " create dataset for train or test\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # define map operations\n", - " resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Bilinear mode\n", - " rescale_nml_op = CV.Rescale(rescale_nml, shift_nml)\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - " type_cast_op = CT.TypeCast(mstype.int32)\n", - "\n", - " # apply map operations on images\n", - " mnist_ds = mnist_ds.map(input_columns=\"label\", operations=type_cast_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=resize_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=rescale_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=rescale_nml_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=hwc2chw_op, num_parallel_workers=num_parallel_workers)\n", - "\n", - " # apply DatasetOps\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds\n", - "\n", - "\n", - "if __name__ == \"__main__\":\n", - " context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "\n", - " model_path = \"./models/ckpt/mindspore_run/\"\n", - " ds_eval_path = \"./datasets/MNIST_Data/test/\"\n", - " network = LeNet5(10)\n", - " net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction=\"mean\")\n", - " repeat_size = 1\n", - " net_opt = nn.Momentum(network.trainable_params(), 0.01, 0.9)\n", - " model = Model(network, net_loss, net_opt, metrics={\"Accuracy\": Accuracy(), \"Precision\": Precision()})\n", - "\n", - " print(\"============== Starting Testing ==============\")\n", - " param_dict = load_checkpoint(model_path+\"checkpoint_lenet-1_1875.ckpt\")\n", - " load_param_into_net(network, param_dict)\n", - " ds_eval = create_dataset(ds_eval_path, 32, repeat_size)\n", - "\n", - " acc = model.eval(ds_eval, dataset_sink_mode=True)\n", - " print(\"============== {} ==============\".format(acc))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "其中:\n", - "\n", - "- `load_checkpoint`:通过该接口加载CheckPoint模型参数文件,返回一个参数字典。\n", - "\n", - "- `checkpoint_lenet-1_1875.ckpt`:保存的CheckPoint模型文件名称。\n", - "\n", - "- `load_param_into_net`:通过该接口把参数加载到网络中。\n", - "\n", - "> `checkpoint_lenet-1_1875.ckpt`文件的保存方法,可以参考[实现一个图片分类应用](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html)的训练网络部分。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/sampler.ipynb b/docs/programming_guide/source_zh_cn/sampler.ipynb deleted file mode 100644 index 81ca795435a8ec53beaf66265940d370b7fde735..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/sampler.ipynb +++ /dev/null @@ -1,388 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 采样器\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/sampler.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_sampler.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV90b2tlbml6ZXIuaXB5bmI=&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "MindSpore提供了多种用途的采样器(Sampler),帮助用户对数据集进行不同形式的采样,以满足训练需求,能够解决诸如数据集过大或样本类别分布不均等问题。只需在加载数据集时传入采样器对象,即可实现数据的采样。\n", - "\n", - "MindSpore目前提供的部分采样器类别如下表所示。此外,用户也可以根据需要实现自定义的采样器类。更多采样器的使用方法参见[API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "| 采样器名称 | 采样器说明 |\n", - "| :---- | :---- |\n", - "| RandomSampler | 随机采样器,在数据集中随机地采样指定数目的数据。 |\n", - "| WeightedRandomSampler | 带权随机采样器,依照长度为N的概率列表,在前N个样本中随机采样指定数目的数据。 |\n", - "| SubsetRandomSampler | 子集随机采样器,在指定的索引范围内随机采样指定数目的数据。 |\n", - "| PKSampler | PK采样器,在指定的数据集类别P中,每种类别各采样K条数据。 |\n", - "| DistributedSampler | 分布式采样器,在分布式训练中对数据集分片进行采样。 |" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## MindSpore采样器\n", - "\n", - "下面以CIFAR-10数据集为例,介绍几种常用MindSpore采样器的使用方法。\n", - "\n", - "下载CIFAR-10数据集并解压到指定路径,执行如下命令:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/cifar-10-batches-bin\n", - "├── readme.html\n", - "├── test\n", - "│   └── test_batch.bin\n", - "└── train\n", - " ├── batches.meta.txt\n", - " ├── data_batch_1.bin\n", - " ├── data_batch_2.bin\n", - " ├── data_batch_3.bin\n", - " ├── data_batch_4.bin\n", - " └── data_batch_5.bin\n", - "\n", - "2 directories, 8 files\n" - ] - } - ], - "source": [ - "!wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-binary.tar.gz\n", - "!mkdir -p datasets\n", - "!tar -xzf cifar-10-binary.tar.gz -C datasets\n", - "!mkdir -p datasets/cifar-10-batches-bin/train datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/test_batch.bin datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/data_batch*.bin datasets/cifar-10-batches-bin/batches.meta.txt datasets/cifar-10-batches-bin/train\n", - "!tree ./datasets/cifar-10-batches-bin" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### RandomSampler\n", - "\n", - "从索引序列中随机采样指定数目的数据。\n", - "\n", - "下面的样例使用随机采样器分别从CIFAR-10数据集中有放回和无放回地随机采样5个数据,并展示已加载数据的形状和标签。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "------ Without Replacement ------\n", - "Image shape: (32, 32, 3) , Label: 1\n", - "Image shape: (32, 32, 3) , Label: 6\n", - "Image shape: (32, 32, 3) , Label: 6\n", - "Image shape: (32, 32, 3) , Label: 0\n", - "Image shape: (32, 32, 3) , Label: 4\n", - "------ With Replacement ------\n", - "Image shape: (32, 32, 3) , Label: 0\n", - "Image shape: (32, 32, 3) , Label: 9\n", - "Image shape: (32, 32, 3) , Label: 3\n", - "Image shape: (32, 32, 3) , Label: 9\n", - "Image shape: (32, 32, 3) , Label: 6\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "ds.config.set_seed(0)\n", - "\n", - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train/\"\n", - "\n", - "print(\"------ Without Replacement ------\")\n", - "\n", - "sampler = ds.RandomSampler(num_samples=5)\n", - "dataset1 = ds.Cifar10Dataset(DATA_DIR, sampler=sampler)\n", - "\n", - "for data in dataset1.create_dict_iterator():\n", - " print(\"Image shape:\", data['image'].shape, \", Label:\", data['label'])\n", - "\n", - "print(\"------ With Replacement ------\")\n", - "\n", - "sampler = ds.RandomSampler(replacement=True, num_samples=5)\n", - "dataset2 = ds.Cifar10Dataset(DATA_DIR, sampler=sampler)\n", - "\n", - "for data in dataset2.create_dict_iterator():\n", - " print(\"Image shape:\", data['image'].shape, \", Label:\", data['label'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### WeightedRandomSampler\n", - "\n", - "指定长度为N的采样概率列表,按照概率在前N个样本中随机采样指定数目的数据。\n", - "\n", - "下面的样例使用带权随机采样器从CIFAR-10数据集的前10个样本中按概率获取6个样本,并展示已读取数据的形状和标签。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Image shape: (32, 32, 3) , Label: 9\n", - "Image shape: (32, 32, 3) , Label: 9\n", - "Image shape: (32, 32, 3) , Label: 6\n", - "Image shape: (32, 32, 3) , Label: 9\n", - "Image shape: (32, 32, 3) , Label: 6\n", - "Image shape: (32, 32, 3) , Label: 6\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "ds.config.set_seed(1)\n", - "\n", - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train/\"\n", - "\n", - "weights = [1, 1, 0, 0, 0, 0, 0, 0, 0, 0]\n", - "sampler = ds.WeightedRandomSampler(weights, num_samples=6)\n", - "dataset = ds.Cifar10Dataset(DATA_DIR, sampler=sampler)\n", - "\n", - "for data in dataset.create_dict_iterator():\n", - " print(\"Image shape:\", data['image'].shape, \", Label:\", data['label'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### SubsetRandomSampler\n", - "\n", - "从指定索引子序列中随机采样指定数目的数据。\n", - "\n", - "下面的样例使用子序列随机采样器从CIFAR-10数据集的指定子序列中抽样3个样本,并展示已读取数据的形状和标签。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Image shape: (32, 32, 3) , Label: 1\n", - "Image shape: (32, 32, 3) , Label: 6\n", - "Image shape: (32, 32, 3) , Label: 4\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "ds.config.set_seed(2)\n", - "\n", - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train/\"\n", - "\n", - "indices = [0, 1, 2, 3, 4, 5]\n", - "sampler = ds.SubsetRandomSampler(indices, num_samples=3)\n", - "dataset = ds.Cifar10Dataset(DATA_DIR, sampler=sampler)\n", - "\n", - "for data in dataset.create_dict_iterator():\n", - " print(\"Image shape:\", data['image'].shape, \", Label:\", data['label'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### PKSampler\n", - "\n", - "在指定的数据集类别P中,每种类别各采样K条数据。\n", - "\n", - "下面的样例使用PK采样器从CIFAR-10数据集中每种类别抽样2个样本,最多20个样本,并展示已读取数据的形状和标签。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Image shape: (32, 32, 3) , Label: 0\n", - "Image shape: (32, 32, 3) , Label: 0\n", - "Image shape: (32, 32, 3) , Label: 1\n", - "Image shape: (32, 32, 3) , Label: 1\n", - "Image shape: (32, 32, 3) , Label: 2\n", - "Image shape: (32, 32, 3) , Label: 2\n", - "Image shape: (32, 32, 3) , Label: 3\n", - "Image shape: (32, 32, 3) , Label: 3\n", - "Image shape: (32, 32, 3) , Label: 4\n", - "Image shape: (32, 32, 3) , Label: 4\n", - "Image shape: (32, 32, 3) , Label: 5\n", - "Image shape: (32, 32, 3) , Label: 5\n", - "Image shape: (32, 32, 3) , Label: 6\n", - "Image shape: (32, 32, 3) , Label: 6\n", - "Image shape: (32, 32, 3) , Label: 7\n", - "Image shape: (32, 32, 3) , Label: 7\n", - "Image shape: (32, 32, 3) , Label: 8\n", - "Image shape: (32, 32, 3) , Label: 8\n", - "Image shape: (32, 32, 3) , Label: 9\n", - "Image shape: (32, 32, 3) , Label: 9\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "ds.config.set_seed(3)\n", - "\n", - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train/\"\n", - "\n", - "sampler = ds.PKSampler(num_val=2, class_column='label', num_samples=20)\n", - "dataset = ds.Cifar10Dataset(DATA_DIR, sampler=sampler)\n", - "\n", - "for data in dataset.create_dict_iterator():\n", - " print(\"Image shape:\", data['image'].shape, \", Label:\", data['label'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### DistributedSampler\n", - "\n", - "在分布式训练中,对数据集分片进行采样。\n", - "\n", - "下面的样例使用分布式采样器将构建的数据集分为3片,在每个分片中采样3个数据样本,并展示已读取的数据。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'data': Tensor(shape=[], dtype=Int64, value= 0)}\n", - "{'data': Tensor(shape=[], dtype=Int64, value= 3)}\n", - "{'data': Tensor(shape=[], dtype=Int64, value= 6)}\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "\n", - "data_source = [0, 1, 2, 3, 4, 5, 6, 7, 8]\n", - "\n", - "sampler = ds.DistributedSampler(num_shards=3, shard_id=0, shuffle=False, num_samples=3)\n", - "dataset = ds.NumpySlicesDataset(data_source, column_names=[\"data\"], sampler=sampler)\n", - "\n", - "for data in dataset.create_dict_iterator():\n", - " print(data)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 自定义采样器\n", - "\n", - "用户可以继承`Sampler`基类,通过实现`__iter__`方法来自定义采样器的采样方式。\n", - "\n", - "下面的样例定义了一个从下标0至下标9间隔为2采样的采样器,将其作用于CIFAR-10数据集,并展示已读取数据的形状和标签。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Image shape: (32, 32, 3) , Label: 6\n", - "Image shape: (32, 32, 3) , Label: 9\n", - "Image shape: (32, 32, 3) , Label: 1\n", - "Image shape: (32, 32, 3) , Label: 2\n", - "Image shape: (32, 32, 3) , Label: 8\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "class MySampler(ds.Sampler):\n", - " def __iter__(self):\n", - " for i in range(0, 10, 2):\n", - " yield i\n", - "\n", - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train/\"\n", - "\n", - "dataset = ds.Cifar10Dataset(DATA_DIR, sampler=MySampler())\n", - "\n", - "for data in dataset.create_dict_iterator():\n", - " print(\"Image shape:\", data['image'].shape, \", Label:\", data['label'])" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/security_and_privacy.md b/docs/programming_guide/source_zh_cn/security_and_privacy.md deleted file mode 100644 index dc018d2999cbeb23921abe9237b899dea6a088e3..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/security_and_privacy.md +++ /dev/null @@ -1,72 +0,0 @@ -# AI安全与隐私保护 - - - -- [AI安全与隐私保护](#ai安全与隐私保护) - - [概述](#概述) - - [对抗鲁棒性](#对抗鲁棒性) - - [Attack](#attack) - - [Defense](#defense) - - [Detector](#detector) - - [模型安全测试](#模型安全测试) - - [Fuzzer](#fuzzer) - - [差分隐私训练](#差分隐私训练) - - [DPModel](#dpmodel) - - [隐私泄露风险评估](#隐私泄露风险评估) - - [MembershipInference](#membershipinference) - - - - - -## 概述 - -本篇主要介绍AI安全与隐私保护。AI作为一种通用技术,在带来巨大机遇和效益的同时也面临着新的安全与隐私保护的挑战。MindArmour是MindSpore的一个子项目,为MindSpore提供安全与隐私保护能力,主要包括对抗鲁棒性、模型安全测试、差分隐私训练、隐私泄露风险评估等技术。 - -## 对抗鲁棒性 - -### Attack - -`Attack`基类定义了对抗样本生成的使用接口,其子类实现了各种具体的生成算法,支持安全工作人员快速高效地生成对抗样本,用于攻击AI模型,以评估模型的鲁棒性。 - -### Defense - -`Defense`基类定义了对抗训练的使用接口,其子类实现了各种具体的对抗训练算法,增强模型的对抗鲁棒性。 - -### Detector - -`Detector`基类定义了对抗样本检测的使用接口,其子类实现了各种具体的检测算法,增强模型的对抗鲁棒性。 - -详细内容,请参考[对抗鲁棒性官网教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/improve_model_security_nad.html)。 - -## 模型安全测试 - -### Fuzzer - -`Fuzzer`类基于神经元覆盖率增益控制fuzzing流程,采用自然扰动和对抗样本生成方法作为变异策略,激活更多的神经元,从而探索不同类型的模型输出结果、错误行为,指导用户增强模型鲁棒性。 - -详细内容,请参考[模型安全测试官网教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/test_model_security_fuzzing.html)。 - -## 差分隐私训练 - -### DPModel - -`DPModel`继承了`mindspore.Model`,提供了差分隐私训练的入口函数。 - -详细内容,请参考[差分隐私官网教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/protect_user_privacy_with_differential_privacy.html)。 - -## 抑制隐私训练 - -### SuppressModel - -`SuppressModel`继承了`mindspore.Model`,提供了抑制隐私训练的入口函数。 - -详细内容,请参考[抑制隐私官网教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/protect_user_privacy_with_suppress_privacy.html)。 - -## 隐私泄露风险评估 - -### MembershipInference - -`MembershipInference`类提供了一种模型逆向分析方法,能够基于模型对样本的预测信息,推测某个样本是否在模型的训练集中,以此评估模型的隐私泄露风险。 - -详细内容,请参考[隐私泄露风险评估官方教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/test_model_security_membership_inference.html)。 diff --git a/docs/programming_guide/source_zh_cn/syntax_list.rst b/docs/programming_guide/source_zh_cn/syntax_list.rst deleted file mode 100644 index a4fd4ffeb92733727cc77b583950bd206456665f..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/syntax_list.rst +++ /dev/null @@ -1,8 +0,0 @@ -语法支持 -=========== - -.. toctree:: - :maxdepth: 1 - - 静态图语法支持 - Tensor索引支持 \ No newline at end of file diff --git a/docs/programming_guide/source_zh_cn/tensor.ipynb b/docs/programming_guide/source_zh_cn/tensor.ipynb deleted file mode 100644 index 97362ad75d070d6a4eb1800fed4dbaf509fd2973..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/tensor.ipynb +++ /dev/null @@ -1,215 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Tensor\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/tensor.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_tensor.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV90ZW5zb3IuaXB5bmI=&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "张量(Tensor)是MindSpore网络运算中的基本数据结构。张量中的数据类型可参考[dtype](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dtype.html)。\n", - "\n", - "不同维度的张量分别表示不同的数据,0维张量表示标量,1维张量表示向量,2维张量表示矩阵,3维张量可以表示彩色图像的RGB三通道等等。\n", - "\n", - "> 本文中的所有示例,支持在PyNative模式下运行。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 张量构造\n", - "\n", - "构造张量时,支持传入`Tensor`、`float`、`int`、`bool`、`tuple`、`list`和`NumPy.array`类型,其中`tuple`和`list`里只能存放`float`、`int`、`bool`类型数据。\n", - "\n", - "`Tensor`初始化时,可指定dtype。如果没有指定dtype,初始值`int`、`float`、`bool`分别生成数据类型为`mindspore.int32`、`mindspore.float32`、`mindspore.bool_`的0维Tensor,\n", - "初始值`tuple`和`list`生成的1维`Tensor`数据类型与`tuple`和`list`里存放的数据类型相对应,如果包含多种不同类型的数据,则按照优先级:`bool` < `int` < `float`,选择相对优先级最高类型所对应的mindspore数据类型。\n", - "如果初始值是`Tensor`,则生成的`Tensor`数据类型与其一致;如果初始值是`NumPy.array`,则生成的`Tensor`数据类型与之对应。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T02:59:50.340750Z", - "start_time": "2021-02-03T02:59:49.571048Z" - }, - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[1 2]\n", - " [3 4]] \n", - "\n", - " 1 \n", - "\n", - " 2 \n", - "\n", - " True \n", - "\n", - " [1 2 3] \n", - "\n", - " [4. 5. 6.] \n", - "\n", - " [4. 5. 6.]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "from mindspore import Tensor\n", - "from mindspore import dtype as mstype\n", - "\n", - "x = Tensor(np.array([[1, 2], [3, 4]]), mstype.int32)\n", - "y = Tensor(1.0, mstype.int32)\n", - "z = Tensor(2, mstype.int32)\n", - "m = Tensor(True, mstype.bool_)\n", - "n = Tensor((1, 2, 3), mstype.int16)\n", - "p = Tensor([4.0, 5.0, 6.0], mstype.float64)\n", - "q = Tensor(p, mstype.float64)\n", - "\n", - "print(x, \"\\n\\n\", y, \"\\n\\n\", z, \"\\n\\n\", m, \"\\n\\n\", n, \"\\n\\n\", p, \"\\n\\n\", q)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 张量的属性和方法" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 属性\n", - "\n", - "张量的属性包括形状(shape)和数据类型(dtype)。\n", - "\n", - " * 形状:`Tensor`的shape,是一个tuple。\n", - "\n", - " * 数据类型:`Tensor`的dtype,是MindSpore的一个数据类型。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T02:59:50.347520Z", - "start_time": "2021-02-03T02:59:50.342826Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(2, 2) Int32\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "from mindspore import Tensor\n", - "from mindspore import dtype as mstype\n", - "\n", - "x = Tensor(np.array([[1, 2], [3, 4]]), mstype.int32)\n", - "x_shape = x.shape\n", - "x_dtype = x.dtype\n", - "\n", - "print(x_shape, x_dtype)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 方法\n", - "\n", - "张量的方法包括`all`、`any`和`asnumpy`,`all`和`any`方法目前只支持Ascend,并且要求`Tensor`的数据类型是`mindspore.bool_`。\n", - "\n", - "- `all(axis, keep_dims)`:在指定维度上通过`and`操作进行归约,`axis`代表归约维度,`keep_dims`表示是否保留归约后的维度。\n", - "\n", - "- `any(axis, keep_dims)`:在指定维度上通过`or`操作进行归约,参数含义同`all`。\n", - "\n", - "- `asnumpy()`:将`Tensor`转换为`NumPy`的`array`。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T02:59:50.374128Z", - "start_time": "2021-02-03T02:59:50.349665Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "False \n", - "\n", - " True \n", - "\n", - " [[ True True]\n", - " [False False]]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "from mindspore import Tensor\n", - "from mindspore import dtype as mstype\n", - "\n", - "x = Tensor(np.array([[True, True], [False, False]]), mstype.bool_)\n", - "x_all = x.all()\n", - "x_any = x.any()\n", - "x_array = x.asnumpy()\n", - "\n", - "print(x_all, \"\\n\\n\", x_any, \"\\n\\n\", x_array)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/tokenizer.ipynb b/docs/programming_guide/source_zh_cn/tokenizer.ipynb deleted file mode 100644 index 188cdfd486053a9623700e96630be530ff1f708d..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/tokenizer.ipynb +++ /dev/null @@ -1,506 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 分词器\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/tokenizer.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_tokenizer.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV90b2tlbml6ZXIuaXB5bmI=&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "分词就是将连续的字序列按照一定的规范重新组合成词序列的过程,合理的进行分词有助于语义的理解。\n", - "\n", - "MindSpore提供了多种用途的分词器(Tokenizer),能够帮助用户高性能地处理文本,用户可以构建自己的字典,使用适当的标记器将句子拆分为不同的标记,并通过查找操作获取字典中标记的索引。\n", - "\n", - "MindSpore目前提供的分词器如下表所示。此外,用户也可以根据需要实现自定义的分词器。\n", - "\n", - "| 分词器 | 分词器说明 |\n", - "| :-- | :-- |\n", - "| BasicTokenizer | 根据指定规则对标量文本数据进行分词。 |\n", - "| BertTokenizer | 用于处理Bert文本数据的分词器。 |\n", - "| JiebaTokenizer | 基于字典的中文字符串分词器。 |\n", - "| RegexTokenizer | 根据指定正则表达式对标量文本数据进行分词。 |\n", - "| SentencePieceTokenizer | 基于SentencePiece开源工具包进行分词。 |\n", - "| UnicodeCharTokenizer | 将标量文本数据分词为Unicode字符。 |\n", - "| UnicodeScriptTokenizer | 根据Unicode边界对标量文本数据进行分词。 |\n", - "| WhitespaceTokenizer | 根据空格符对标量文本数据进行分词。 |\n", - "| WordpieceTokenizer | 根据单词集对标量文本数据进行分词。 |\n", - "\n", - "更多分词器的详细说明,可以参见[API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.text.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## MindSpore分词器\n", - "\n", - "下面介绍几种常用分词器的使用方法。\n", - "\n", - "### BertTokenizer\n", - "\n", - "`BertTokenizer`是通过调用`BasicTokenizer`和`WordpieceTokenizer`来进行分词的。\n", - "\n", - "下面的样例首先构建了一个文本数据集和字符串列表,然后通过`BertTokenizer`对数据集进行分词,并展示了分词前后的文本结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "------------------------before tokenization----------------------------\n", - "床前明月光\n", - "疑是地上霜\n", - "举头望明月\n", - "低头思故乡\n", - "I am making small mistakes during working hours\n", - "😀嘿嘿😃哈哈😄大笑😁嘻嘻\n", - "繁體字\n", - "------------------------after tokenization-----------------------------\n", - "['床' '前' '明' '月' '光']\n", - "['疑' '是' '地' '上' '霜']\n", - "['举' '头' '望' '明' '月']\n", - "['低' '头' '思' '故' '乡']\n", - "['I' 'am' 'mak' '##ing' 'small' 'mistake' '##s' 'during' 'work' '##ing'\n", - " 'hour' '##s']\n", - "['😀' '嘿' '嘿' '😃' '哈' '哈' '😄' '大' '笑' '😁' '嘻' '嘻']\n", - "['繁' '體' '字']\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.text as text\n", - "\n", - "input_list = [\"床前明月光\", \"疑是地上霜\", \"举头望明月\", \"低头思故乡\", \"I am making small mistakes during working hours\",\n", - " \"😀嘿嘿😃哈哈😄大笑😁嘻嘻\", \"繁體字\"]\n", - "dataset = ds.NumpySlicesDataset(input_list, column_names=[\"text\"], shuffle=False)\n", - "\n", - "print(\"------------------------before tokenization----------------------------\")\n", - "\n", - "for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']))\n", - "\n", - "vocab_list = [\n", - " \"床\", \"前\", \"明\", \"月\", \"光\", \"疑\", \"是\", \"地\", \"上\", \"霜\", \"举\", \"头\", \"望\", \"低\", \"思\", \"故\", \"乡\",\n", - " \"繁\", \"體\", \"字\", \"嘿\", \"哈\", \"大\", \"笑\", \"嘻\", \"i\", \"am\", \"mak\", \"make\", \"small\", \"mistake\",\n", - " \"##s\", \"during\", \"work\", \"##ing\", \"hour\", \"😀\", \"😃\", \"😄\", \"😁\", \"+\", \"/\", \"-\", \"=\", \"12\",\n", - " \"28\", \"40\", \"16\", \" \", \"I\", \"[CLS]\", \"[SEP]\", \"[UNK]\", \"[PAD]\", \"[MASK]\", \"[unused1]\", \"[unused10]\"]\n", - "\n", - "vocab = text.Vocab.from_list(vocab_list)\n", - "tokenizer_op = text.BertTokenizer(vocab=vocab)\n", - "dataset = dataset.map(operations=tokenizer_op)\n", - "\n", - "print(\"------------------------after tokenization-----------------------------\")\n", - "\n", - "for i in dataset.create_dict_iterator(num_epochs=1, output_numpy=True):\n", - " print(text.to_str(i['text']))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### JiebaTokenizer\n", - "\n", - "`JiebaTokenizer`是基于jieba的中文分词。\n", - "\n", - "下载字典文件`hmm_model.utf8`和`jieba.dict.utf8`,并将其放到指定位置。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/tokenizer/\n", - "├── hmm_model.utf8\n", - "└── jieba.dict.utf8\n", - "\n", - "0 directories, 2 files\n" - ] - } - ], - "source": [ - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/hmm_model.utf8\n", - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/jieba.dict.utf8\n", - "!mkdir -p ./datasets/tokenizer/\n", - "!mv hmm_model.utf8 jieba.dict.utf8 -t ./datasets/tokenizer/\n", - "!tree ./datasets/tokenizer/" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下面的样例首先构建了一个文本数据集,然后使用HMM与MP字典文件创建`JiebaTokenizer`对象,并对数据集进行分词,最后展示了分词前后的文本结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "------------------------before tokenization----------------------------\n", - "今天天气太好了我们一起去外面玩吧\n", - "------------------------after tokenization-----------------------------\n", - "['今天天气' '太好了' '我们' '一起' '去' '外面' '玩吧']\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.text as text\n", - "\n", - "input_list = [\"今天天气太好了我们一起去外面玩吧\"]\n", - "dataset = ds.NumpySlicesDataset(input_list, column_names=[\"text\"], shuffle=False)\n", - "\n", - "print(\"------------------------before tokenization----------------------------\")\n", - "\n", - "for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']))\n", - "\n", - "# files from open source repository https://github.com/yanyiwu/cppjieba/tree/master/dict\n", - "HMM_FILE = \"./datasets/tokenizer/hmm_model.utf8\"\n", - "MP_FILE = \"./datasets/tokenizer/jieba.dict.utf8\"\n", - "jieba_op = text.JiebaTokenizer(HMM_FILE, MP_FILE)\n", - "dataset = dataset.map(operations=jieba_op, input_columns=[\"text\"], num_parallel_workers=1)\n", - "\n", - "print(\"------------------------after tokenization-----------------------------\")\n", - "\n", - "for i in dataset.create_dict_iterator(num_epochs=1, output_numpy=True):\n", - " print(text.to_str(i['text']))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### SentencePieceTokenizer\n", - "\n", - "`SentencePieceTokenizer`是基于[SentencePiece](https://github.com/google/sentencepiece)这个开源的自然语言处理工具包。\n", - "\n", - "下载文本数据集文件`botchan.txt`,并将其放置到指定位置。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/tokenizer/\n", - "└── botchan.txt\n", - "\n", - "0 directories, 1 files\n" - ] - } - ], - "source": [ - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/botchan.txt\n", - "!mkdir -p ./datasets/tokenizer/\n", - "!mv botchan.txt ./datasets/tokenizer/\n", - "!tree ./datasets/tokenizer/" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下面的样例首先构建了一个文本数据集,然后从`vocab_file`文件中构建一个`vocab`对象,再通过`SentencePieceTokenizer`对数据集进行分词,并展示了分词前后的文本结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "------------------------before tokenization----------------------------\n", - "I saw a girl with a telescope.\n", - "------------------------after tokenization-----------------------------\n", - "['▁I' '▁sa' 'w' '▁a' '▁girl' '▁with' '▁a' '▁te' 'les' 'co' 'pe' '.']\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.text as text\n", - "from mindspore.dataset.text import SentencePieceModel, SPieceTokenizerOutType\n", - "\n", - "input_list = [\"I saw a girl with a telescope.\"]\n", - "dataset = ds.NumpySlicesDataset(input_list, column_names=[\"text\"], shuffle=False)\n", - "\n", - "print(\"------------------------before tokenization----------------------------\")\n", - "\n", - "for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']))\n", - "\n", - "# file from MindSpore repository https://gitee.com/mindspore/mindspore/blob/master/tests/ut/data/dataset/test_sentencepiece/botchan.txt\n", - "vocab_file = \"./datasets/tokenizer/botchan.txt\"\n", - "vocab = text.SentencePieceVocab.from_file([vocab_file], 5000, 0.9995, SentencePieceModel.UNIGRAM, {})\n", - "tokenizer_op = text.SentencePieceTokenizer(vocab, out_type=SPieceTokenizerOutType.STRING)\n", - "dataset = dataset.map(operations=tokenizer_op)\n", - "\n", - "print(\"------------------------after tokenization-----------------------------\")\n", - "\n", - "for i in dataset.create_dict_iterator(num_epochs=1, output_numpy=True):\n", - " print(text.to_str(i['text']))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### UnicodeCharTokenizer\n", - "\n", - "`UnicodeCharTokenizer`是根据Unicode字符集来分词的。\n", - "\n", - "下面的样例首先构建了一个文本数据集,然后通过`UnicodeCharTokenizer`对数据集进行分词,并展示了分词前后的文本结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "------------------------before tokenization----------------------------\n", - "Welcome to Beijing!\n", - "北京欢迎您!\n", - "我喜欢English!\n", - "------------------------after tokenization-----------------------------\n", - "['W', 'e', 'l', 'c', 'o', 'm', 'e', ' ', 't', 'o', ' ', 'B', 'e', 'i', 'j', 'i', 'n', 'g', '!']\n", - "['北', '京', '欢', '迎', '您', '!']\n", - "['我', '喜', '欢', 'E', 'n', 'g', 'l', 'i', 's', 'h', '!']\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.text as text\n", - "\n", - "input_list = [\"Welcome to Beijing!\", \"北京欢迎您!\", \"我喜欢English!\"]\n", - "dataset = ds.NumpySlicesDataset(input_list, column_names=[\"text\"], shuffle=False)\n", - "\n", - "print(\"------------------------before tokenization----------------------------\")\n", - "\n", - "for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']))\n", - "\n", - "tokenizer_op = text.UnicodeCharTokenizer()\n", - "dataset = dataset.map(operations=tokenizer_op)\n", - "\n", - "print(\"------------------------after tokenization-----------------------------\")\n", - "\n", - "for i in dataset.create_dict_iterator(num_epochs=1, output_numpy=True):\n", - " print(text.to_str(i['text']).tolist())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### WhitespaceTokenizer\n", - "\n", - "`WhitespaceTokenizer`是根据空格来进行分词的。\n", - "\n", - "下面的样例首先构建了一个文本数据集,然后通过`WhitespaceTokenizer`对数据集进行分词,并展示了分词前后的文本结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "------------------------before tokenization----------------------------\n", - "Welcome to Beijing!\n", - "北京欢迎您!\n", - "我喜欢English!\n", - "------------------------after tokenization-----------------------------\n", - "['Welcome', 'to', 'Beijing!']\n", - "['北京欢迎您!']\n", - "['我喜欢English!']\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.text as text\n", - "\n", - "input_list = [\"Welcome to Beijing!\", \"北京欢迎您!\", \"我喜欢English!\"]\n", - "dataset = ds.NumpySlicesDataset(input_list, column_names=[\"text\"], shuffle=False)\n", - "\n", - "print(\"------------------------before tokenization----------------------------\")\n", - "\n", - "for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']))\n", - "\n", - "tokenizer_op = text.WhitespaceTokenizer()\n", - "dataset = dataset.map(operations=tokenizer_op)\n", - "\n", - "print(\"------------------------after tokenization-----------------------------\")\n", - "\n", - "for i in dataset.create_dict_iterator(num_epochs=1, output_numpy=True):\n", - " print(text.to_str(i['text']).tolist())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### WordpieceTokenizer\n", - "\n", - "`WordpieceTokenizer`是基于单词集来进行划分的,划分依据可以是单词集中的单个单词,或者多个单词的组合形式。\n", - "\n", - "下面的样例首先构建了一个文本数据集,然后从单词列表中构建`vocab`对象,通过`WordpieceTokenizer`对数据集进行分词,并展示了分词前后的文本结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "------------------------before tokenization----------------------------\n", - "my\n", - "favorite\n", - "book\n", - "is\n", - "love\n", - "during\n", - "the\n", - "cholera\n", - "era\n", - "what\n", - "我\n", - "最\n", - "喜\n", - "欢\n", - "的\n", - "书\n", - "是\n", - "霍\n", - "乱\n", - "时\n", - "期\n", - "的\n", - "爱\n", - "情\n", - "您\n", - "------------------------after tokenization-----------------------------\n", - "['my']\n", - "['favor' '##ite']\n", - "['book']\n", - "['is']\n", - "['love']\n", - "['dur' '##ing']\n", - "['the']\n", - "['cholera']\n", - "['era']\n", - "['[UNK]']\n", - "['我']\n", - "['最']\n", - "['喜']\n", - "['欢']\n", - "['的']\n", - "['书']\n", - "['是']\n", - "['霍']\n", - "['乱']\n", - "['时']\n", - "['期']\n", - "['的']\n", - "['爱']\n", - "['情']\n", - "['[UNK]']\n" - ] - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.text as text\n", - "\n", - "input_list = [\"my\", \"favorite\", \"book\", \"is\", \"love\", \"during\", \"the\", \"cholera\", \"era\", \"what\",\n", - " \"我\", \"最\", \"喜\", \"欢\", \"的\", \"书\", \"是\", \"霍\", \"乱\", \"时\", \"期\", \"的\", \"爱\", \"情\", \"您\"]\n", - "vocab_english = [\"book\", \"cholera\", \"era\", \"favor\", \"##ite\", \"my\", \"is\", \"love\", \"dur\", \"##ing\", \"the\"]\n", - "vocab_chinese = [\"我\", '最', '喜', '欢', '的', '书', '是', '霍', '乱', '时', '期', '爱', '情']\n", - "\n", - "dataset = ds.NumpySlicesDataset(input_list, column_names=[\"text\"], shuffle=False)\n", - "\n", - "print(\"------------------------before tokenization----------------------------\")\n", - "\n", - "for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']))\n", - "\n", - "vocab = text.Vocab.from_list(vocab_english+vocab_chinese)\n", - "tokenizer_op = text.WordpieceTokenizer(vocab=vocab)\n", - "dataset = dataset.map(operations=tokenizer_op)\n", - "\n", - "print(\"------------------------after tokenization-----------------------------\")\n", - "\n", - "for i in dataset.create_dict_iterator(num_epochs=1, output_numpy=True):\n", - " print(text.to_str(i['text']))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/programming_guide/source_zh_cn/train.ipynb b/docs/programming_guide/source_zh_cn/train.ipynb deleted file mode 100644 index 9ef1e862fc42805fd37e58cb8694c7bda3465509..0000000000000000000000000000000000000000 --- a/docs/programming_guide/source_zh_cn/train.ipynb +++ /dev/null @@ -1,577 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 训练\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/train.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/programming_guide/mindspore_train.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3Byb2dyYW1taW5nX2d1aWRlL21pbmRzcG9yZV90cmFpbi5pcHluYg==&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "MindSpore在Model_zoo也已经提供了大量的目标检测、自然语言处理等多种网络模型,供用户直接使用,但是对于某些高级用户而言可能想要自行设计网络或者自定义训练循环,下面就对自定义训练网络、自定义训练循环和边训练边推理三种场景进行介绍,另外对On device执行方式进行详细介绍。\n", - "\n", - "> 本文示例适用于GPU和Ascend环境。\n", - "\n", - "## 自定义训练网络\n", - "\n", - "在自定义训练网络前,需要先了解下MindSpore的网络支持、Python源码构造网络约束和算子支持情况。\n", - "\n", - "- 网络支持:当前MindSpore已经支持多种网络,按类型分为计算机视觉、自然语言处理、推荐和图神经网络,可以通过[网络支持](https://www.mindspore.cn/doc/note/zh-CN/master/network_list.html)查看具体支持的网络情况。如果现有网络无法满足用户需求,用户可以根据实际需要定义自己的网络。\n", - "\n", - "- Python源码构造网络约束:MindSpore暂不支持将任意Python源码转换成计算图,所以对于用户源码支持的写法有所限制,主要包括语法约束和网络定义约束两方面。详细情况可以查看[静态图语法支持](https://www.mindspore.cn/doc/note/zh-CN/master/static_graph_syntax_support.html)了解。随着MindSpore的演进,这些约束可能会发生变化。\n", - "\n", - "- 算子支持:顾名思义,网络的基础是算子,所以用户自定义训练网络前要对MindSpore当前支持的算子有所了解,可以通过查看[算子支持](https://www.mindspore.cn/doc/note/zh-CN/master/operator_list.html)了解不同的后端(Ascend、GPU和CPU)的算子实现情况。\n", - "\n", - "> 当开发网络遇到内置算子不足以满足需求时,用户也可以参考[自定义算子](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_operator_ascend.html),方便快捷地扩展昇腾AI处理器的自定义算子。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "-------loss------ [0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0. 0. 0.]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "\n", - "from mindspore import Tensor\n", - "from mindspore.nn import Cell, Dense, SoftmaxCrossEntropyWithLogits, Momentum, TrainOneStepCell, WithLossCell\n", - "import mindspore.ops as ops\n", - "\n", - "\n", - "class ReLUReduceMeanDense(Cell):\n", - " def __init__(self, kernel, bias, in_channel, num_class):\n", - " super().__init__()\n", - " self.relu = ops.ReLU()\n", - " self.mean = ops.ReduceMean(keep_dims=False)\n", - " self.dense = Dense(in_channel, num_class, kernel, bias)\n", - "\n", - " def construct(self, x):\n", - " x = self.relu(x)\n", - " x = self.mean(x, (2, 3))\n", - " x = self.dense(x)\n", - " return x\n", - "\n", - "\n", - "if __name__ == \"__main__\":\n", - " weight_np = np.ones((1000, 2048)).astype(np.float32)\n", - " weight = Tensor(weight_np.copy())\n", - " bias_np = np.ones((1000,)).astype(np.float32)\n", - " bias = Tensor(bias_np.copy())\n", - " net = ReLUReduceMeanDense(weight, bias, 2048, 1000)\n", - " criterion = SoftmaxCrossEntropyWithLogits(sparse=False)\n", - " optimizer = Momentum(learning_rate=0.1, momentum=0.1,\n", - " params=filter(lambda x: x.requires_grad, net.get_parameters()))\n", - " net_with_criterion = WithLossCell(net, criterion)\n", - " train_network = TrainOneStepCell(net_with_criterion, optimizer)\n", - " train_network.set_train()\n", - " input_np = np.random.randn(32, 2048, 7, 7).astype(np.float32)\n", - " input = Tensor(input_np.copy())\n", - " label_np_onehot = np.zeros(shape=(32, 1000)).astype(np.float32)\n", - " label = Tensor(label_np_onehot.copy())\n", - " for i in range(1):\n", - " loss = train_network(input, label)\n", - " print(\"-------loss------\", loss)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 自定义训练循环\n", - "\n", - "在进行自定义循环训练之前,将需要使用的MNIST数据集下载下来,同时解压缩放置指定位置:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "用户如果不想使用MindSpore提供的Model接口,也可参考以下样例自由控制循环的迭代次数、遍历数据集等。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Training ==============\n", - "epoch: 1/10, losses: 2.3086986541748047\n", - "epoch: 1/10, losses: 2.309938430786133\n", - "epoch: 1/10, losses: 2.302298069000244\n", - "epoch: 1/10, losses: 2.310209035873413\n", - "epoch: 1/10, losses: 2.3002336025238037\n", - "epoch: 1/10, losses: 2.3022992610931396\n", - "... ...\n", - "epoch: 1/10, losses: 0.18848800659179688\n", - "epoch: 1/10, losses: 0.15532201528549194\n", - "epoch: 2/10, losses: 0.179201140999794\n", - "epoch: 2/10, losses: 0.20995387434959412\n", - "epoch: 2/10, losses: 0.4867479205131531\n", - "... ...\n", - "epoch: 10/10, losses: 0.027243722230196\n", - "epoch: 10/10, losses: 0.07665436714887619\n", - "epoch: 10/10, losses: 0.005962767638266087\n", - "epoch: 10/10, losses: 0.026364721357822418\n", - "epoch: 10/10, losses: 0.0003102901973761618\n" - ] - } - ], - "source": [ - "import os\n", - "\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.transforms.c_transforms as CT\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.nn as nn\n", - "from mindspore import context, DatasetHelper, connect_network_with_dataset\n", - "from mindspore import dtype as mstype\n", - "from mindspore.common.initializer import TruncatedNormal\n", - "from mindspore import ParameterTuple\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore.nn import WithLossCell\n", - "import mindspore.ops as ops\n", - "\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\"\n", - " create dataset for train or test\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # define map operations\n", - " resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Bilinear mode\n", - " rescale_nml_op = CV.Rescale(rescale_nml, shift_nml)\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - " type_cast_op = CT.TypeCast(mstype.int32)\n", - "\n", - " # apply map operations on images\n", - " mnist_ds = mnist_ds.map(input_columns=\"label\", operations=type_cast_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=resize_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=rescale_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=rescale_nml_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=hwc2chw_op, num_parallel_workers=num_parallel_workers)\n", - "\n", - " # apply DatasetOps\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds\n", - "\n", - "\n", - "def conv(in_channels, out_channels, kernel_size, stride=1, padding=0):\n", - " \"\"\"weight initial for conv layer\"\"\"\n", - " weight = weight_variable()\n", - " return nn.Conv2d(in_channels, out_channels,\n", - " kernel_size=kernel_size, stride=stride, padding=padding,\n", - " weight_init=weight, has_bias=False, pad_mode=\"valid\")\n", - "\n", - "\n", - "def fc_with_initialize(input_channels, out_channels):\n", - " \"\"\"weight initial for fc layer\"\"\"\n", - " weight = weight_variable()\n", - " bias = weight_variable()\n", - " return nn.Dense(input_channels, out_channels, weight, bias)\n", - "\n", - "\n", - "def weight_variable():\n", - " \"\"\"weight initial\"\"\"\n", - " return TruncatedNormal(0.02)\n", - "\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"\n", - " Lenet network\n", - " Args:\n", - " num_class (int): Num classes. Default: 10.\n", - "\n", - " Returns:\n", - " Tensor, output tensor\n", - "\n", - " Examples:\n", - " >>> LeNet(num_class=10)\n", - " \"\"\"\n", - "\n", - " def __init__(self, num_class=10):\n", - " super(LeNet5, self).__init__()\n", - " self.num_class = num_class\n", - " self.batch_size = 32\n", - " self.conv1 = conv(1, 6, 5)\n", - " self.conv2 = conv(6, 16, 5)\n", - " self.fc1 = fc_with_initialize(16 * 5 * 5, 120)\n", - " self.fc2 = fc_with_initialize(120, 84)\n", - " self.fc3 = fc_with_initialize(84, self.num_class)\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.reshape = ops.Reshape()\n", - "\n", - " def construct(self, x):\n", - " x = self.conv1(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.conv2(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.reshape(x, (self.batch_size, -1))\n", - " x = self.fc1(x)\n", - " x = self.relu(x)\n", - " x = self.fc2(x)\n", - " x = self.relu(x)\n", - " x = self.fc3(x)\n", - " return x\n", - "\n", - "\n", - "class TrainOneStepCell(nn.Cell):\n", - " def __init__(self, network, optimizer, sens=1.0):\n", - " super(TrainOneStepCell, self).__init__(auto_prefix=False)\n", - " self.network = network\n", - " self.weights = ParameterTuple(network.trainable_params())\n", - " self.optimizer = optimizer\n", - " self.grad = ops.GradOperation(get_by_list=True, sens_param=True)\n", - " self.sens = sens\n", - "\n", - " def set_sens(self, value):\n", - " self.sens = value\n", - "\n", - " def construct(self, data, label):\n", - " weights = self.weights\n", - " loss = self.network(data, label)\n", - " sens = ops.Fill()(ops.DType()(loss), ops.Shape()(loss), self.sens)\n", - " grads = self.grad(self.network, weights)(data, label, sens)\n", - " return ops.depend(loss, self.optimizer(grads))\n", - "\n", - "\n", - "if __name__ == \"__main__\":\n", - " context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - " \n", - " ds_data_path = \"./datasets/MNIST_Data/train/\"\n", - " ds_train = create_dataset(ds_data_path, 32)\n", - "\n", - " network = LeNet5(10)\n", - " net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction=\"mean\")\n", - " net_opt = nn.Momentum(network.trainable_params(), 0.01, 0.9)\n", - " net = WithLossCell(network, net_loss)\n", - " net = TrainOneStepCell(net, net_opt)\n", - " network.set_train()\n", - " print(\"============== Starting Training ==============\")\n", - " epoch = 10\n", - " for step in range(epoch):\n", - " for inputs in ds_train:\n", - " output = net(*inputs)\n", - " print(\"epoch: {0}/{1}, losses: {2}\".format(step + 1, epoch, output.asnumpy(), flush=True))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> 示例中用到的MNIST数据集的获取方法,可以参照[实现一个图片分类应用](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html)的下载数据集部分,下同。\n", - ">\n", - "> 典型的使用场景是梯度累积,详细查看[梯度累积](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/apply_gradient_accumulation.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 边训练边推理\n", - "\n", - "对于某些数据量较大、训练时间较长的复杂网络,为了能掌握训练的不同阶段模型精度的指标变化情况,可以通过边训练边推理的方式跟踪精度的变化情况。具体可以参考[同步训练和验证模型](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/evaluate_the_model_during_training.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## on-device执行\n", - "\n", - "当前MindSpore支持的后端包括Ascend、GPU、CPU,所谓On Device中的Device通常指Ascend(昇腾)AI处理器。\n", - "\n", - "昇腾芯片上集成了AICORE、AICPU和CPU。其中,AICORE负责大型Tensor Vector运算,AICPU负责标量运算,CPU负责逻辑控制和任务分发。\n", - "\n", - "Host侧CPU负责将图或算子下发到昇腾芯片。昇腾芯片由于具备了运算、逻辑控制和任务分发的功能,所以不需要与Host侧的CPU进行频繁的交互,只需要将计算完的最终结果返回给Host侧,实现整图下沉到Device执行,避免Host-Device频繁交互,减小了开销。\n", - "\n", - "### 计算图下沉\n", - "\n", - "计算图整图下沉到Device上执行,减少Host-Device交互开销。可以结合循环下沉实现多个Step下沉,进一步减少Host和Device的交互次数。\n", - "\n", - "循环下沉是在On Device执行的基础上的优化,目的是进一步减少Host侧和Device侧之间的交互次数。通常情况下,每个step都返回一个结果,循环下沉是控制每隔多少个step返回一次结果。\n", - "\n", - "默认配置下是每一个epoch返回一次结果,这样每个epoch里,Host侧和Device侧只需要进行一次数据交互。\n", - "\n", - "也可以结合`train`接口的`dataset_sink_mode`和`sink_size`控制每个epoch的下沉数据量。\n", - "\n", - "### 数据下沉\n", - "\n", - "`Model`的`train`接口参数`dataset_sink_mode`可以控制数据是否下沉。`dataset_sink_mode`为True表示数据下沉,否则为非下沉。所谓下沉即数据通过通道直接传送到Device上。\n", - "\n", - "dataset_sink_mode参数可以配合`sink_size`控制每个`epoch`下沉的数据量大小。当`dataset_sink_mode`设置为True,即数据下沉模式时:\n", - "\n", - "如果`sink_size`为默认值-1,则每一个`epoch`下沉的数据量为原始的整个数据集大小;\n", - "\n", - "如果`sink_size`>0,此时原始数据集可以被无限次遍历,每个`epoch`下沉`sink_size`大小的数据量,下一个`epoch`继续从上次遍历的结束位置继续遍历。\n", - "\n", - "下沉的总数据量由`epoch`和`sink_size`两个变量共同控制,即总数据量=`epoch`*`sink_size`。\n", - "\n", - "当使用`LossMonitor`,`TimeMonitor`或其它`Callback`接口时,如果`dateset_sink_mode`设置为False,Host侧和Device侧之间每个`step`交互一次,所以会每个`step`返回一个结果,如果`dataset_sink_mode`为True,因为数据在Device上通过通道传输, Host侧和Device侧之间每个`epoch`进行一次数据交互,所以每个`epoch`只返回一次结果。\n", - "\n", - "> 当前CPU和PyNative模式不支持数据下沉。\n", - "\n", - "代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Training ==============\n", - "epoch: 1 step: 1000, loss is 0.110185064\n", - "epoch: 2 step: 1000, loss is 0.12088283\n", - "epoch: 3 step: 1000, loss is 0.15903473\n", - "epoch: 4 step: 1000, loss is 0.030054657\n", - "epoch: 5 step: 1000, loss is 0.013846226\n", - "epoch: 6 step: 1000, loss is 0.052161213\n", - "epoch: 7 step: 1000, loss is 0.0050197737\n", - "epoch: 8 step: 1000, loss is 0.17207858\n", - "epoch: 9 step: 1000, loss is 0.010310417\n", - "epoch: 10 step: 1000, loss is 0.000672762\n" - ] - } - ], - "source": [ - "import os\n", - "\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.transforms.c_transforms as CT\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.nn as nn\n", - "from mindspore import context, Model\n", - "from mindspore import dtype as mstype\n", - "from mindspore.common.initializer import TruncatedNormal\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore.nn import Accuracy\n", - "import mindspore.ops as ops\n", - "from mindspore.train.callback import LossMonitor\n", - "\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\"\n", - " create dataset for train or test\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # define map operations\n", - " resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Bilinear mode\n", - " rescale_nml_op = CV.Rescale(rescale_nml, shift_nml)\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - " type_cast_op = CT.TypeCast(mstype.int32)\n", - "\n", - " # apply map operations on images\n", - " mnist_ds = mnist_ds.map(input_columns=\"label\", operations=type_cast_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=resize_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=rescale_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=rescale_nml_op, num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(input_columns=\"image\", operations=hwc2chw_op, num_parallel_workers=num_parallel_workers)\n", - "\n", - " # apply DatasetOps\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds\n", - "\n", - "\n", - "def conv(in_channels, out_channels, kernel_size, stride=1, padding=0):\n", - " \"\"\"weight initial for conv layer\"\"\"\n", - " weight = weight_variable()\n", - " return nn.Conv2d(in_channels, out_channels,\n", - " kernel_size=kernel_size, stride=stride, padding=padding,\n", - " weight_init=weight, has_bias=False, pad_mode=\"valid\")\n", - "\n", - "\n", - "def fc_with_initialize(input_channels, out_channels):\n", - " \"\"\"weight initial for fc layer\"\"\"\n", - " weight = weight_variable()\n", - " bias = weight_variable()\n", - " return nn.Dense(input_channels, out_channels, weight, bias)\n", - "\n", - "\n", - "def weight_variable():\n", - " \"\"\"weight initial\"\"\"\n", - " return TruncatedNormal(0.02)\n", - "\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"\n", - " Lenet network\n", - " Args:\n", - " num_class (int): Num classes. Default: 10.\n", - "\n", - " Returns:\n", - " Tensor, output tensor\n", - "\n", - " Examples:\n", - " >>> LeNet(num_class=10)\n", - " \"\"\"\n", - "\n", - " def __init__(self, num_class=10):\n", - " super(LeNet5, self).__init__()\n", - " self.num_class = num_class\n", - " self.batch_size = 32\n", - " self.conv1 = conv(1, 6, 5)\n", - " self.conv2 = conv(6, 16, 5)\n", - " self.fc1 = fc_with_initialize(16 * 5 * 5, 120)\n", - " self.fc2 = fc_with_initialize(120, 84)\n", - " self.fc3 = fc_with_initialize(84, self.num_class)\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.reshape = ops.Reshape()\n", - "\n", - " def construct(self, x):\n", - " x = self.conv1(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.conv2(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.reshape(x, (self.batch_size, -1))\n", - " x = self.fc1(x)\n", - " x = self.relu(x)\n", - " x = self.fc2(x)\n", - " x = self.relu(x)\n", - " x = self.fc3(x)\n", - " return x\n", - "\n", - "\n", - "if __name__ == \"__main__\":\n", - " context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - " ds_train_path = \"./datasets/MNIST_Data/train/\"\n", - " ds_train = create_dataset(ds_train_path, 32)\n", - "\n", - " network = LeNet5(10)\n", - " net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction=\"mean\")\n", - " net_opt = nn.Momentum(network.trainable_params(), 0.01, 0.9)\n", - " model = Model(network, net_loss, net_opt)\n", - "\n", - " print(\"============== Starting Training ==============\")\n", - " model.train(epoch=10, train_dataset=ds_train, callbacks=[LossMonitor()], dataset_sink_mode=True, sink_size=1000)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`batch_size`为32的情况下,数据集的大小为1875,当`sink_size`设置为1000时,表示每个`epoch`下沉1000个batch的数据,下沉次数为`epoch`=10,下沉的总数据量为:`epoch`*`sink_size`=10000。\n", - "\n", - "`dataset_sink_mode`为True,所以每个`epoch`返回一次结果。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> `dataset_sink_mode`为False时,`sink_size`参数设置无效。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/install/mindspore_ascend310_install_pip.md b/install/mindspore_ascend310_install_pip.md deleted file mode 100644 index 98b18b0ba618585f52e745acd58b41ef3c828e95..0000000000000000000000000000000000000000 --- a/install/mindspore_ascend310_install_pip.md +++ /dev/null @@ -1,123 +0,0 @@ -# pip方式安装MindSpore Ascend 310版本 - - - -- [pip方式安装MindSpore Ascend 310版本](#pip方式安装mindspore-ascend-310版本) - - [确认系统环境信息](#确认系统环境信息) - - [安装MindSpore](#安装mindspore) - - [配置环境变量](#配置环境变量) - - [验证是否成功安装](#验证是否成功安装) - - [安装MindSpore Serving](#安装mindspore-serving) - - - - - -本文档介绍如何在Ascend 310环境的Linux系统上,使用pip方式快速安装MindSpore,Ascend 310版本仅支持推理。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04/CentOS 7.6/EulerOS 2.8是64位操作系统。 -- 确认安装[GCC 7.3.0版本](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz)。 -- 确认安装[gmp 6.1.2版本](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz)。 -- 确认安装[CMake 3.18.3及以上版本](https://cmake.org/download/)。 - - 安装完成后将CMake所在路径添加到系统环境变量。 -- 确认安装Python 3.7.5版本。 - - 如果未安装或者已安装其他版本的Python,可从[官网](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz)或者[华为云](https://mirrors.huaweicloud.com/python/3.7.5/Python-3.7.5.tgz)下载Python 3.7.5版本 64位,进行安装。 -- 确认安装Ascend 310 AI处理器软件配套包([Ascend Data Center Solution 21.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/ascend-data-center-solution-pid-251167910/software/252504563?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C252309113%7C251167910))。 - - 软件配套包包括[驱动和固件A300-3000 1.0.10](https://support.huawei.com/enterprise/zh/ascend-computing/a300-3000-pid-250702915/software/252496291?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C250702915)和[CANN 5.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/cann-pid-251168373/software/252504455?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C251168373)。 - - 确认当前用户有权限访问Ascend 310 AI处理器配套软件包的安装路径`/usr/local/Ascend`,若无权限,需要root用户将当前用户添加到`/usr/local/Ascend`所在的用户组,具体配置请详见配套软件包的说明文档。 - - 安装Ascend 310 AI处理器配套软件包提供的whl包,whl包随配套软件包发布,升级配套软件包之后需要重新安装。 - - ```bash - pip install /usr/local/Ascend/ascend-toolkit/latest/atc/lib64/topi-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/atc/lib64/te-{version}-py3-none-any.whl - ``` - -## 安装MindSpore - -参考[版本列表](https://www.mindspore.cn/versions)先进行SHA-256完整性校验,校验一致后再执行如下命令安装MindSpore。 - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/ascend/ascend310/{system}/mindspore_ascend-{version}-cp37-cp37m-linux_{arch}.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 -- `{arch}`表示系统架构,例如使用的Linux系统是x86架构64位时,`{arch}`应写为`x86_64`。如果系统是ARM架构64位,则写为`aarch64`。 -- `{system}`表示系统版本,例如使用的欧拉系统ARM架构,`{system}`应写为`euleros_aarch64`,目前Ascend 310版本可支持以下系统`euleros_aarch64`/`centos_aarch64`/`centos_x86`/`ubuntu_aarch64`/`ubuntu_x86`。 - -## 配置环境变量 - -安装好MindSpore之后,需要导出Runtime相关环境变量,下述命令中`LOCAL_ASCEND=/usr/local/Ascend`的`/usr/local/Ascend`表示配套软件包的安装路径,需注意将其改为配套软件包的实际安装路径。 - -```bash -# control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. -export GLOG_v=2 - -# Conda environmental options -LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - -# lib libraries that the run package depends on -export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/acllib/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/atc/lib64:${LOCAL_ASCEND}/driver/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - -# lib libraries that the mindspore depends on, modify "pip3" according to the actual situation -export LD_LIBRARY_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore/lib"}' | xargs realpath`:${LD_LIBRARY_PATH} - -# Environment variables that must be configured -export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path -export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path -export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/atc/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path -export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # Python library that TBE implementation depends on -``` - -## 验证是否成功安装 - -创建目录放置样例代码工程,例如`/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_single_op_sample`,代码可以从[官网示例下载](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/sample_resources/ascend310_single_op_sample.zip)获取,这是一个`[1, 2, 3, 4]`与`[2, 3, 4, 5]`相加的简单样例,代码工程目录结构如下: - -```text - -└─ascend310_single_op_sample - ├── CMakeLists.txt // 编译脚本 - ├── README.md // 使用说明 - ├── main.cc // 主函数 - └── tensor_add.mindir // MindIR模型文件 -``` - -进入样例工程目录,按照实际情况修改路径路径: - -```bash -cd /home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_single_op_sample -``` - -参照`README.md`说明,构建工程,其中`pip3`需要按照实际情况修改。 - -```bash -cmake . -DMINDSPORE_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath` -make -``` - -构建成功后,执行用例。 - -```bash -./tensor_add_sample -``` - -如果输出: - -```text -3 -5 -7 -9 -``` - -说明MindSpore安装成功了。 - -## 安装MindSpore Serving - -当您想要快速体验MindSpore在线推理服务时,可以选装MindSpore Serving。 - -具体安装步骤参见[MindSpore Serving](https://gitee.com/mindspore/serving/blob/master/README_CN.md)。 diff --git a/install/mindspore_ascend310_install_pip_en.md b/install/mindspore_ascend310_install_pip_en.md deleted file mode 100644 index 48d78669792989aacf03ce94b1ba9ea7f34bef4d..0000000000000000000000000000000000000000 --- a/install/mindspore_ascend310_install_pip_en.md +++ /dev/null @@ -1,123 +0,0 @@ -# Installing MindSpore in Ascend 310 by pip - - - -- [Installing MindSpore in Ascend 310 by pip](#installing-mindspore-in-ascend-310-by-pip) - - [Checking System Environment Information](#checking-system-environment-information) - - [Installing MindSpore](#installing-mindspore) - - [Configuring Environment Variables](#configuring-environment-variables) - - [Verifying the Installation](#verifying-the-installation) - - [Installing MindSpore Serving](#installing-mindspore-serving) - - - - - -The following describes how to quickly install MindSpore by pip on Linux in the Ascend 310 environment, MindSpore in Ascend 310 only supports inference. - -## Checking System Environment Information - -- Ensure that the 64-bit Ubuntu 18.04, CentOS 7.6, or EulerOS 2.8 is installed. -- Ensure that right version [GCC 7.3.0](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz) is installed. -- Ensure that [GMP 6.1.2](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz) is installed. -- Ensure that [CMake 3.18.3 or later](https://cmake.org/download/) is installed. - - After installation, add the path of CMake to the system environment variables. -- Ensure that Python 3.7.5 is installed. - - If Python 3.7.5 (64-bit) is not installed, download it from the [Python official website](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz) or [HUAWEI CLOUD](https://mirrors.huaweicloud.com/python/3.7.5/Python-3.7.5.tgz) and install it. -- Ensure that the Ascend 310 AI Processor software packages ([Ascend Data Center Solution 21.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/ascend-data-center-solution-pid-251167910/software/252504563?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C252309113%7C251167910)) are installed. - - The software packages include [Driver and Firmware A300-3000 1.0.10](https://support.huawei.com/enterprise/zh/ascend-computing/a300-3000-pid-250702915/software/252496291?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C250702915) and [CANN 5.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/cann-pid-251168373/software/252504455?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C251168373). - - Ensure that you have permissions to access the installation path `/usr/local/Ascend` of the Ascend 310 AI Processor software package. If not, ask the user root to add you to a user group to which `/usr/local/Ascend` belongs. For details about the configuration, see the description document in the software package. - - Install the .whl package provided with the Ascend 310 AI Processor software package. The .whl package is released with the software package. After the software package is upgraded, you need to reinstall the .whl package. - - ```bash - pip install /usr/local/Ascend/ascend-toolkit/latest/atc/lib64/topi-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/atc/lib64/te-{version}-py3-none-any.whl - ``` - -## Installing MindSpore - -It is recommended to refer to [Version List](https://www.mindspore.cn/versions/en) to perform SHA-256 integrity verification, and then execute the following command to install MindSpore after the verification is consistent. - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/ascend/ascend310/{system}/mindspore_ascend-{version}-cp37-cp37m-linux_{arch}.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -In the preceding information: - -- When the network is connected, dependencies of the MindSpore installation package are automatically downloaded during the .whl package installation. For details about dependencies, see [requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt). In other cases, install the dependencies by yourself. -- `{version}` specifies the MindSpore version number. For example, when installing MindSpore 1.1.0, set `{version}` to 1.1.0. -- `{arch}` specifies the system architecture. For example, if a Linux OS architecture is x86_64, set `{arch}` to `x86_64`. If the system architecture is ARM64, set `{arch}` to `aarch64`. -- `{system}` specifies the system version. For example, if EulerOS ARM64 is used, set `{system}` to `euleros_aarch64`. Currently, Ascend 310 supports the following systems: `euleros_aarch64`, `centos_aarch64`, `centos_x86`, `ubuntu_aarch64`, and `ubuntu_x86`. - -## Configuring Environment Variables - -After MindSpore is installed, export runtime environment variables. In the following command, `/usr/local/Ascend` in `LOCAL_ASCEND=/usr/local/Ascend` indicates the installation path of the software package. Change it to the actual installation path. - -```bash -# control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. -export GLOG_v=2 - -# Conda environmental options -LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - -# lib libraries that the run package depends on -export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/acllib/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/atc/lib64:${LOCAL_ASCEND}/driver/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - -# lib libraries that the mindspore depends on, modify "pip3" according to the actual situation -export LD_LIBRARY_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore/lib"}' | xargs realpath`:${LD_LIBRARY_PATH} - -# Environment variables that must be configured -export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path -export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path -export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/atc/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path -export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # Python library that TBE implementation depends on -``` - -## Verifying the Installation - -Create a directory to store the sample code project, for example, `/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_single_op_sample`. You can obtain the code from the [official website](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/sample_resources/ascend310_single_op_sample.zip). A simple example of adding `[1, 2, 3, 4]` to `[2, 3, 4, 5]` is used and the code project directory structure is as follows: - -```text - -└─ascend310_single_op_sample - ├── CMakeLists.txt // Build script - ├── README.md // Usage description - ├── main.cc // Main function - └── tensor_add.mindir // MindIR model file -``` - -Go to the directory of the sample project and change the path based on the actual requirements. - -```bash -cd /home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_single_op_sample -``` - -Build a project by referring to `README.md`, modify `pip3` according to the actual situation. - -```bash -cmake . -DMINDSPORE_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath` -make -``` - -After the build is successful, execute the case. - -```bash -./tensor_add_sample -``` - -The following information is displayed: - -```text -3 -5 -7 -9 -``` - -The preceding information indicates that MindSpore is successfully installed. - -## Installing MindSpore Serving - -If you want to quickly experience the MindSpore online inference service, you can install MindSpore Serving. - -For details, see [MindSpore Serving](https://gitee.com/mindspore/serving/blob/master/README.md). diff --git a/install/mindspore_ascend310_install_source.md b/install/mindspore_ascend310_install_source.md deleted file mode 100644 index 1e08fa8800b5ca6f353fd5a178c15c23b7697a62..0000000000000000000000000000000000000000 --- a/install/mindspore_ascend310_install_source.md +++ /dev/null @@ -1,153 +0,0 @@ -# 源码编译方式安装MindSpore Ascend 310版本 - - - -- [源码编译方式安装MindSpore Ascend 310版本](#源码编译方式安装mindspore-ascend-310版本) - - [确认系统环境信息](#确认系统环境信息) - - [从代码仓下载源码](#从代码仓下载源码) - - [编译MindSpore](#编译mindspore) - - [安装MindSpore](#安装mindspore) - - [配置环境变量](#配置环境变量) - - [验证是否成功安装](#验证是否成功安装) - - [安装MindSpore Serving](#安装mindspore-serving) - - - - - -本文档介绍如何在Ascend 310环境的Linux系统上,使用源码编译方式快速安装MindSpore,Ascend 310版本仅支持推理。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04/CentOS 7.6/EulerOS 2.8是64位操作系统。 -- 确认安装[GCC 7.3.0版本](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz)。 -- 确认安装[gmp 6.1.2版本](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz)。 -- 确认安装[Python 3.7.5版本](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz)。 -- 确认安装[OpenSSL 1.1.1及以上版本](https://github.com/openssl/openssl.git)。 - - 安装完成后设置环境变量`export OPENSSL_ROOT_DIR=“OpenSSL安装目录”`。 -- 确认安装[CMake 3.18.3及以上版本](https://cmake.org/download/)。 - - 安装完成后将CMake所在路径添加到系统环境变量。 -- 确认安装[patch 2.5及以上版本](http://ftp.gnu.org/gnu/patch/)。 - - 安装完成后将patch所在路径添加到系统环境变量中。 -- 确认安装[wheel 0.32.0及以上版本](https://pypi.org/project/wheel/)。 -- 确认安装Ascend 310 AI处理器软件配套包([Ascend Data Center Solution 21.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/ascend-data-center-solution-pid-251167910/software/252504563?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C252309113%7C251167910))。 - - 软件配套包包括[驱动和固件A300-3000 1.0.10](https://support.huawei.com/enterprise/zh/ascend-computing/a300-3000-pid-250702915/software/252496291?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C250702915)和[CANN 5.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/cann-pid-251168373/software/252504455?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C251168373)。 - - 确认当前用户有权限访问Ascend 310 AI处理器配套软件包的安装路径`/usr/local/Ascend`,若无权限,需要root用户将当前用户添加到`/usr/local/Ascend`所在的用户组,具体配置请详见配套软件包的说明文档。 - - 安装Ascend 310 AI处理器配套软件包提供的whl包,whl包随配套软件包发布,升级配套软件包之后需要重新安装。 - - ```bash - pip install /usr/local/Ascend/ascend-toolkit/latest/atc/lib64/topi-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/atc/lib64/te-{version}-py3-none-any.whl - ``` - -- 确认安装git工具。 - 如果未安装,使用如下命令下载安装: - - ```bash - apt-get install git # ubuntu and so on - yum install git # centos and so on - ``` - -## 从代码仓下载源码 - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -## 编译MindSpore - -在源码根目录下执行如下命令。 - -```bash -bash build.sh -e ascend -V 310 -``` - -其中: - -`build.sh`中默认的编译线程数为8,如果编译机性能较差可能会出现编译错误,可在执行中增加-j{线程数}来减少线程数量。如`bash build.sh -e ascend -V 310 -j4`。 - -## 安装MindSpore - -```bash -chmod +x output/mindspore-ascend-{version}-cp37-cp37m-linux_{arch}.whl -pip install output/mindspore-ascend-{version}-cp37-cp37m-linux_{arch}.whl -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 -- `{arch}`表示系统架构,例如使用的Linux系统是x86架构64位时,`{arch}`应写为`x86_64`。如果系统是ARM架构64位,则写为`aarch64`。 - -## 配置环境变量 - -安装好MindSpore之后,需要导出Runtime相关环境变量,下述命令中`LOCAL_ASCEND=/usr/local/Ascend`的`/usr/local/Ascend`表示配套软件包的安装路径,需注意将其改为配套软件包的实际安装路径。 - -```bash -# control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. -export GLOG_v=2 - -# Conda environmental options -LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - -# lib libraries that the run package depends on -export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/acllib/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/atc/lib64:${LOCAL_ASCEND}/driver/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - -# lib libraries that the mindspore depends on, modify "pip3" according to the actual situation -export LD_LIBRARY_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore/lib"}' | xargs realpath`:${LD_LIBRARY_PATH} - -# Environment variables that must be configured -export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path -export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path -export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/atc/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path -export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # Python library that TBE implementation depends on -``` - -## 验证是否成功安装 - -创建目录放置样例代码工程,例如`/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_single_op_sample`,代码可以从[官网示例下载](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/sample_resources/ascend310_single_op_sample.zip)获取,这是一个`[1, 2, 3, 4]`与`[2, 3, 4, 5]`相加的简单样例,代码工程目录结构如下: - -```text - -└─ascend310_single_op_sample - ├── CMakeLists.txt // 编译脚本 - ├── README.md // 使用说明 - ├── main.cc // 主函数 - └── tensor_add.mindir // MindIR模型文件 -``` - -进入样例工程目录,按照实际情况修改路径路径: - -```bash -cd /home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_single_op_sample -``` - -参照`README.md`说明,构建工程,其中`pip3`需要按照实际情况修改。 - -```bash -cmake . -DMINDSPORE_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath` -make -``` - -构建成功后,执行用例。 - -```bash -./tensor_add_sample -``` - -如果输出: - -```text -3 -5 -7 -9 -``` - -说明MindSpore安装成功了。 - -## 安装MindSpore Serving - -当您想要快速体验MindSpore在线推理服务时,可以选装MindSpore Serving。 - -具体安装步骤参见[MindSpore Serving](https://gitee.com/mindspore/serving/blob/master/README_CN.md)。 diff --git a/install/mindspore_ascend310_install_source_en.md b/install/mindspore_ascend310_install_source_en.md deleted file mode 100644 index 9ec67d1680621c367251c6a9d9c9645b145fca03..0000000000000000000000000000000000000000 --- a/install/mindspore_ascend310_install_source_en.md +++ /dev/null @@ -1,153 +0,0 @@ -# Installing MindSpore in Ascend 310 by Source Code Compilation - - - -- [Installing MindSpore in Ascend 310 by Source Code Compilation](#installing-mindspore-in-ascend-310-by-source-code-compilation) - - [Checking System Environment Information](#checking-system-environment-information) - - [Downloading Source Code from the Code Repository](#downloading-source-code-from-the-code-repository) - - [Building MindSpore](#building-mindspore) - - [Installing MindSpore](#installing-mindspore) - - [Configuring Environment Variables](#configuring-environment-variables) - - [Verifying the Installation](#verifying-the-installation) - - [Installing MindSpore Serving](#installing-mindspore-serving) - - - - - -The following describes how to quickly install MindSpore by compiling the source code on Linux in the Ascend 310 environment, MindSpore in Ascend 310 only supports inference. - -## Checking System Environment Information - -- Ensure that the 64-bit Ubuntu 18.04, CentOS 7.6, or EulerOS 2.8 is installed. -- Ensure that right version [GCC 7.3.0](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz) is installed. -- Ensure that [GMP 6.1.2](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz) is installed. -- Ensure that [Python 3.7.5](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz) is installed. -- Ensure that [OpenSSL 1.1.1 or later](https://github.com/openssl/openssl.git) is installed. - - After installation, set the environment variable `export OPENSSL_ROOT_DIR= "OpenSSL installation directory"`. -- Ensure that [CMake 3.18.3 or later](https://cmake.org/download/) is installed. - - After installation, add the path of CMake to the system environment variables. -- Ensure that [patch 2.5 or later](http://ftp.gnu.org/gnu/patch/) is installed. - - After installation, add the patch path to the system environment variables. -- Ensure that [wheel 0.32.0 or later](https://pypi.org/project/wheel/) is installed. -- Ensure that the Ascend 310 AI Processor software packages ([Ascend Data Center Solution 21.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/ascend-data-center-solution-pid-251167910/software/252504563?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C252309113%7C251167910)) are installed. - - The software packages include [Driver and Firmware A300-3000 1.0.10](https://support.huawei.com/enterprise/zh/ascend-computing/a300-3000-pid-250702915/software/252496291?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C250702915) and [CANN 5.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/cann-pid-251168373/software/252504455?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C251168373). - - Ensure that you have permissions to access the installation path `/usr/local/Ascend` of the Ascend 310 AI Processor software package. If not, ask the user root to add you to a user group to which `/usr/local/Ascend` belongs. For details about the configuration, see the description document in the software package. - - Install the .whl package provided with the Ascend 310 AI Processor software package. The .whl package is released with the software package. After the software package is upgraded, you need to reinstall the .whl package. - - ```bash - pip install /usr/local/Ascend/ascend-toolkit/latest/atc/lib64/topi-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/atc/lib64/te-{version}-py3-none-any.whl - ``` - -- Ensure that the git tool is installed. - If not, run the following command to download and install it: - - ```bash - apt-get install git # ubuntu and so on - yum install git # centos and so on - ``` - -## Downloading Source Code from the Code Repository - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -## Building MindSpore - -Run the following command in the root directory of the source code. - -```bash -bash build.sh -e ascend -V 310 -``` - -In the preceding information: - -The default number of build threads is 8 in `build.sh`. If the compiler performance is poor, build errors may occur. You can add -j{Number of threads} to script to reduce the number of threads. For example, `bash build.sh -e ascend -V 310 -j4`. - -## Installing MindSpore - -```bash -chmod +x output/mindspore-ascend-{version}-cp37-cp37m-linux_{arch}.whl -pip install output/mindspore-ascend-{version}-cp37-cp37m-linux_{arch}.whl -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -In the preceding information: - -- When the network is connected, dependencies of the MindSpore installation package are automatically downloaded during the .whl package installation. For details about dependencies, see [requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt). In other cases, install the dependencies by yourself. -- `{version}` specifies the MindSpore version number. For example, when installing MindSpore 1.1.0, set `{version}` to 1.1.0. -- `{arch}` specifies the system architecture. For example, if a Linux OS architecture is x86_64, set `{arch}` to `x86_64`. If the system architecture is ARM64, set `{arch}` to `aarch64`. - -## Configuring Environment Variables - -After MindSpore is installed, export runtime environment variables. In the following command, `/usr/local/Ascend` in `LOCAL_ASCEND=/usr/local/Ascend` indicates the installation path of the software package. Change it to the actual installation path. - -```bash -# control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. -export GLOG_v=2 - -# Conda environmental options -LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - -# lib libraries that the run package depends on -export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/acllib/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/atc/lib64:${LOCAL_ASCEND}/driver/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - -# lib libraries that the mindspore depends on, modify "pip3" according to the actual situation -export LD_LIBRARY_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore/lib"}' | xargs realpath`:${LD_LIBRARY_PATH} - -# Environment variables that must be configured -export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path -export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path -export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/atc/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path -export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # Python library that TBE implementation depends on -``` - -## Verifying the Installation - -Create a directory to store the sample code project, for example, `/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_single_op_sample`. You can obtain the code from the [official website](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/sample_resources/ascend310_single_op_sample.zip). A simple example of adding `[1, 2, 3, 4]` to `[2, 3, 4, 5]` is used and the code project directory structure is as follows: - -```text - -└─ascend310_single_op_sample - ├── CMakeLists.txt // Build script - ├── README.md // Usage description - ├── main.cc // Main function - └── tensor_add.mindir // MindIR model file -``` - -Go to the directory of the sample project and change the path based on the actual requirements. - -```bash -cd /home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_single_op_sample -``` - -Build a project by referring to `README.md`, modify `pip3` according to the actual situation. - -```bash -cmake . -DMINDSPORE_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath` -make -``` - -After the build is successful, execute the case. - -```bash -./tensor_add_sample -``` - -The following information is displayed: - -```text -3 -5 -7 -9 -``` - -The preceding information indicates that MindSpore is successfully installed. - -## Installing MindSpore Serving - -If you want to quickly experience the MindSpore online inference service, you can install MindSpore Serving. - -For details, see [MindSpore Serving](https://gitee.com/mindspore/serving/blob/master/README.md). diff --git a/install/mindspore_ascend_install_conda.md b/install/mindspore_ascend_install_conda.md deleted file mode 100644 index 272efdd6c3fc3bfdb5cacb4ace1b375f8721ecf1..0000000000000000000000000000000000000000 --- a/install/mindspore_ascend_install_conda.md +++ /dev/null @@ -1,169 +0,0 @@ -# Conda方式安装MindSpore Ascend 910版本 - - - -- [Conda方式安装MindSpore Ascend 910版本](#conda方式安装mindspore-ascend-910版本) - - [确认系统环境信息](#确认系统环境信息) - - [安装Conda](#安装conda) - - [添加Conda镜像源](#添加conda镜像源) - - [创建并激活Conda环境](#创建并激活conda环境) - - [安装MindSpore](#安装mindspore) - - [配置环境变量](#配置环境变量) - - [验证是否成功安装](#验证是否成功安装) - - [升级MindSpore版本](#升级mindspore版本) - - [安装MindInsight](#安装mindinsight) - - [安装MindArmour](#安装mindarmour) - - [安装MindSpore Hub](#安装mindspore-hub) - - [安装MindSpore Serving](#安装mindspore-serving) - - - - - -本文档介绍如何在Ascend 910环境的Linux系统上,使用Conda方式快速安装MindSpore。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04/CentOS 7.6/EulerOS 2.8/KylinV10 SP1是64位操作系统。 -- 确认安装Ascend 910 AI处理器软件配套包([Atlas Data Center Solution 21.0.1])。 - - 确认当前用户有权限访问Ascend 910 AI处理器配套软件包的安装路径`/usr/local/Ascend`,若无权限,需要root用户将当前用户添加到`/usr/local/Ascend`所在的用户组,具体配置请详见配套软件包的说明文档。 - - 安装Ascend 910 AI处理器配套软件包提供的whl包,whl包随配套软件包发布,参考如下命令完成安装。 - - ```bash - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/topi-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/te-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/hccl-{version}-py3-none-any.whl - ``` - - - 如果升级了Ascend 910 AI处理器配套软件包,配套的whl包也需要重新安装,先将原来的安装包卸载,再参考上述命令重新安装。 - - ```bash - pip uninstall te topi hccl -y - ``` - -## 安装Conda - -下载并安装对应架构的Conda安装包。 - -- X86架构 - - 官网下载地址:[X86 Anaconda](https://www.anaconda.com/distribution/) 或 [X86 Miniconda](https://docs.conda.io/en/latest/miniconda.html) - - 清华镜像源下载地址:[X86 Anaconda](https://mirrors.tuna.tsinghua.edu.cn/anaconda/archive/Anaconda3-2020.02-Linux-x86_64.sh) -- ARM架构 - - [ARM Anaconda](https://github.com/Archiconda/build-tools/releases/download/0.2.3/Archiconda3-0.2.3-Linux-aarch64.sh) - -## 添加Conda镜像源 - -从清华源镜像源下载Conda安装包的可跳过此步操作。 - -```bash -conda config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/ -conda config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/main/ -conda config --set show_channel_urls yes -``` - -## 创建并激活Conda环境 - -```bash -conda create -n mindspore python=3.7.5 -conda activate mindspore -``` - -## 安装MindSpore - -参考[版本列表](https://www.mindspore.cn/versions)先进行SHA-256完整性校验,校验一致后再执行如下命令安装MindSpore。 - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/ascend/{system}/mindspore_ascend-{version}-cp37-cp37m-linux_{arch}.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 -- `{arch}`表示系统架构,例如使用的系统是x86架构64位时,`{arch}`应写为`x86_64`。如果系统是ARM架构64位,则写为`aarch64`。 -- `{system}`表示系统,例如使用的欧拉系统ARM架构,`{system}`应写为`euleros_aarch64`,目前可支持以下系统`euleros_aarch64`/`centos_aarch64`/`centos_x86`/`ubuntu_aarch64`/`ubuntu_x86`/`kylin_aarch64`。 - -## 配置环境变量 - -**如果Ascend 910 AI处理器配套软件包没有安装在默认路径**,安装好MindSpore之后,需要导出Runtime相关环境变量,下述命令中`LOCAL_ASCEND=/usr/local/Ascend`的`/usr/local/Ascend`表示配套软件包的安装路径,需注意将其改为配套软件包的实际安装路径。 - -```bash -# control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. -export GLOG_v=2 - -# Conda environmental options -LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - -# lib libraries that the run package depends on -export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/lib64:${LOCAL_ASCEND}/driver/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - -# Environment variables that must be configured -export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path -export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path -export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path -export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # Python library that TBE implementation depends on -``` - -## 验证是否成功安装 - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="Ascend") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -如果输出: - -```text -[[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] -``` - -说明MindSpore安装成功了。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -```bash -pip install --upgrade mindspore-ascend -``` - -## 安装MindInsight - -当您需要查看训练过程中的标量、图像、计算图以及模型超参等信息时,可以选装MindInsight。 - -具体安装步骤参见[MindInsight](https://gitee.com/mindspore/mindinsight/blob/master/README_CN.md)。 - -## 安装MindArmour - -当您进行AI模型安全研究或想要增强AI应用模型的防护能力时,可以选装MindArmour。 - -具体安装步骤参见[MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README_CN.md)。 - -## 安装MindSpore Hub - -当您想要快速体验MindSpore预训练模型时,可以选装MindSpore Hub。 - -具体安装步骤参见[MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README_CN.md)。 - -## 安装MindSpore Serving - -当您想要快速体验MindSpore在线推理服务时,可以选装MindSpore Serving。 - -具体安装步骤参见[MindSpore Serving](https://gitee.com/mindspore/serving/blob/master/README_CN.md)。 diff --git a/install/mindspore_ascend_install_docker.md b/install/mindspore_ascend_install_docker.md deleted file mode 100644 index 9917e8bf6ab439b279a542fbb348ea448458a31d..0000000000000000000000000000000000000000 --- a/install/mindspore_ascend_install_docker.md +++ /dev/null @@ -1,131 +0,0 @@ -# Docker方式安装MindSpore Ascend 910版本 - - - -- [Docker方式安装MindSpore Ascend 910版本](#docker方式安装mindspore-ascend-910版本) - - [确认系统环境信息](#确认系统环境信息) - - [获取MindSpore镜像](#获取mindspore镜像) - - [运行MindSpore镜像](#运行mindspore镜像) - - [验证是否安装成功](#验证是否安装成功) - - [升级MindSpore版本](#升级mindspore版本) - - - - - -[Docker](https://docs.docker.com/get-docker/)是一个开源的应用容器引擎,让开发者打包他们的应用以及依赖包到一个轻量级、可移植的容器中。通过使用Docker,可以实现MindSpore的快速部署,并与系统环境隔离。 - -本文档介绍如何在Ascend 910环境的Linux系统上,使用Docker方式快速安装MindSpore。 - -MindSpore的Ascend 910镜像托管在[Ascend Hub](https://ascend.huawei.com/ascendhub/#/main)上。 - -目前容器化构建选项支持情况如下: - -| 硬件平台 | Docker镜像仓库 | 标签 | 说明 | -| :----- | :------------------------ | :----------------------- | :--------------------------------------- | -| Ascend | `public-ascendhub/ascend-mindspore-arm` | `x.y.z` | 已经预安装与Ascend Data Center Solution `x.y.z` 版本共同发布的MindSpore的生产环境。 | - -> `x.y.z`对应Atlas Data Center Solution版本号,可以在Ascend Hub页面获取。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04/CentOS 7.6是64位操作系统。 -- 确认安装[Docker 18.03或更高版本](https://docs.docker.com/get-docker/)。 -- 确认安装Ascend 910 AI处理器软件配套包([Ascend Data Center Solution 21.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/ascend-data-center-solution-pid-251167910/software/252504563?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C252309113%7C251167910))。 - - 软件配套包包括[驱动和固件A800-9000 1.0.10](https://support.huawei.com/enterprise/zh/ascend-computing/a800-9000-pid-250702818/software/252727249?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C250702818)和[CANN 5.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/cann-pid-251168373/software/252504455?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C251168373)。 - - 确认当前用户有权限访问Ascend 910 AI处理器配套软件包的安装路径`/usr/local/Ascend`,若无权限,需要root用户将当前用户添加到`/usr/local/Ascend`所在的用户组,具体配置请详见配套软件包的说明文档。 - - 在完成安装基础驱动与配套软件包的基础上,确认安装CANN软件包中的toolbox实用工具包,即Ascend-cann-toolbox-{version}.run,该工具包提供了Ascend NPU容器化支持的Ascend Docker runtime工具。 - -## 获取MindSpore镜像 - -1. 登录[Ascend Hub镜像中心](https://ascend.huawei.com/ascendhub/#/home),注册并激活账号,获取登录指令和拉取指令。 -2. 获取下载权限后,进入MindSpore镜像下载页面([x86版本](https://ascend.huawei.com/ascendhub/#/detail?name=ascend-mindspore-x86),[arm版本](https://ascend.huawei.com/ascendhub/#/detail?name=ascend-mindspore-arm)),获取登录与下载指令并执行: - - ```bash - docker login -u {username} -p {password} {url} - docker pull swr.cn-south-1.myhuaweicloud.com/public-ascendhub/ascend-mindspore-{arch}:{tag} - ``` - - 其中: - - - `{username}` `{password}` `{url}` 代表用户的登录信息与镜像服务器信息,均为注册并激活账号后自动生成,在对应MindSpore镜像页面复制登录命令即可获取。 - - `{arch}` 表示系统架构,例如使用的Linux系统是x86架构64位时,{arch}应写为x86。如果系统是ARM架构64位,则写为arm。 - - `{tag}` 对应Atlas Data Center Solution版本号,同样可以在MindSpore镜像下载页面复制下载命令获取。 - -## 运行MindSpore镜像 - -执行以下命令启动Docker容器实例: - -```bash -docker run -it -v /usr/local/Ascend/driver:/usr/local/Ascend/driver \ - -v /usr/local/Ascend/add-ons/:/usr/local/Ascend/add-ons/ \ - -v /var/log/npu/:/usr/slog \ - --device=/dev/davinci0 \ - --device=/dev/davinci1 \ - --device=/dev/davinci2 \ - --device=/dev/davinci3 \ - --device=/dev/davinci4 \ - --device=/dev/davinci5 \ - --device=/dev/davinci6 \ - --device=/dev/davinci7 \ - --device=/dev/davinci_manager \ - --device=/dev/devmm_svm \ - --device=/dev/hisi_hdc \ - swr.cn-south-1.myhuaweicloud.com/public-ascendhub/ascend-mindspore-{arch}:{tag} \ - /bin/bash -``` - -其中: - -- `{arch}` 表示系统架构,例如使用的Linux系统是x86架构64位时,{arch}应写为x86。如果系统是ARM架构64位,则写为arm。 -- `{tag}`对应Atlas Data Center Solution版本号,在MindSpore镜像下载页面自动获取。 - -## 验证是否安装成功 - -按照上述步骤进入MindSpore容器后,测试Docker容器是否正常工作,请运行下面的Python代码并检查输出: - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="Ascend") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -代码成功运行时会输出: - -```text -[[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] -``` - -至此,你已经成功通过Docker方式安装了MindSpore Ascend 910版本。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时: - -- 根据需要升级的MindSpore版本,升级对应的Ascend 910 AI处理器软件配套包。 -- 再次登录[Ascend Hub镜像中心](https://ascend.huawei.com/ascendhub/#/home)获取最新docker版本的下载命令,并执行: - - ```bash - docker pull swr.cn-south-1.myhuaweicloud.com/public-ascendhub/ascend-mindspore-{arch}:{tag} - ``` - - 其中: - - - `{arch}` 表示系统架构,例如使用的Linux系统是x86架构64位时,{arch}应写为x86。如果系统是ARM架构64位,则写为arm。 - - `{tag}`对应Atlas Data Center Solution版本号,同样可以在MindSpore镜像下载页面自动获取。 diff --git a/install/mindspore_ascend_install_docker_en.md b/install/mindspore_ascend_install_docker_en.md deleted file mode 100644 index f0247399108d04aa4df71fcc35e623d30b9febc5..0000000000000000000000000000000000000000 --- a/install/mindspore_ascend_install_docker_en.md +++ /dev/null @@ -1,131 +0,0 @@ -# Installing MindSpore in Ascend 910 by Docker - - - -- [Installing MindSpore in Ascend 910 by Docker](#installing-mindspore-in-ascend-910-by-docker) - - [System Environment Information Confirmation](#system-environment-information-confirmation) - - [Obtaining MindSpore Image](#obtaining-mindspore-image) - - [Running MindSpore Image](#running-mindspore-image) - - [Installation Verification](#installation-verification) - - [Version Update](#version-update) - - - - - -[Docker](https://docs.docker.com/get-docker/) is an open source application container engine, developers can package their applications and dependencies into a lightweight, portable container. By using Docker, MindSpore can be rapidly deployed and separated from the system environment. - -This document describes how to quickly install MindSpore in a Linux system with an Ascend 910 environment by Docker. - -The Ascend 910 image of MindSpore is hosted on the [Ascend Hub](https://ascend.huawei.com/ascendhub/#/main). - -The current support for containerized build options is as follows: - -| Hardware | Docker Image Hub | Label | Note | -| :----- | :------------------------ | :----------------------- | :--------------------------------------- | -| Ascend | `public-ascendhub/ascend-mindspore-arm` | `x.y.z` | The production environment of MindSpore released together with the Ascend Data Center Solution `x.y.z` version is pre-installed. | - -> `x.y.z` corresponds to the version number of Atlas Data Center Solution, which can be obtained on the Ascend Hub page. - -## System Environment Information Confirmation - -- Confirm that Ubuntu 18.04/CentOS 7.6 is installed with the 64-bit operating system. -- Confirm that [Docker 18.03 or later](https://docs.docker.com/get-docker/) is installed. -- Confirm that the Ascend 910 AI processor software package ([Ascend Data Center Solution 21.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/ascend-data-center-solution-pid-251167910/software/252504563?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C252309113%7C251167910)) are installed. - - The software packages include [Driver and Firmware A800-9000 1.0.10](https://support.huawei.com/enterprise/zh/ascend-computing/a800-9000-pid-250702818/software/252727249?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C250702818) and [CANN 5.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/cann-pid-251168373/software/252504455?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C251168373). - - Confirm that the current user has the right to access the installation path `/usr/local/Ascend`of Ascend 910 AI processor software package. If not, the root user needs to add the current user to the user group where `/usr/local/Ascend` is located. For the specific configuration, please refer to the software package instruction document. - - After installing basic driver and corresponding software packages, confirm that the toolbox utility package in the CANN software package is installed, namely Ascend-cann-toolbox-{version}.run. The toolbox provides Ascend Docker runtime tools supported by Ascend NPU containerization. - -## Obtaining MindSpore Image - -1. Log in to [Ascend Hub Image Center](https://ascend.huawei.com/ascendhub/#/home), register and activate an account, get login instructions and pull instructions. -2. After obtaining the download permission, enter the MindSpore image download page ([x86 version](https://ascend.huawei.com/ascendhub/#/detail?name=ascend-mindspore-x86), [arm version](https://ascend.huawei.com/ascendhub/#/detail?name=ascend-mindspore-arm)). Get login and download commands and execute: - - ```bash - docker login -u {username} -p {password} {url} - docker pull swr.cn-south-1.myhuaweicloud.com/public-ascendhub/ascend-mindspore-{arch}:{tag} - ``` - - of which, - - - `{username}` `{password}` `{url}` represents the user's login information and image server information, which are automatically generated after registering and activating the account, and can be obtained by copying the login command on the corresponding MindSpore image page. - - `{arch}` denotes the system architecture. For example, the Linux system you are using is x86 architecture 64-bit, {arch} should be x86. If the system is ARM architecture 64-bit, then it should be arm. - - `{tag}` corresponds to the version number of Atlas Data Center Solution, which can also be obtained by copying the download command on the MindSpore image download page. - -## Running MindSpore Image - -Execute the following command to start the Docker container instance: - -```bash -docker run -it -v /usr/local/Ascend/driver:/usr/local/Ascend/driver \ - -v /usr/local/Ascend/add-ons/:/usr/local/Ascend/add-ons/ \ - -v /var/log/npu/:/usr/slog \ - --device=/dev/davinci0 \ - --device=/dev/davinci1 \ - --device=/dev/davinci2 \ - --device=/dev/davinci3 \ - --device=/dev/davinci4 \ - --device=/dev/davinci5 \ - --device=/dev/davinci6 \ - --device=/dev/davinci7 \ - --device=/dev/davinci_manager \ - --device=/dev/devmm_svm \ - --device=/dev/hisi_hdc \ - swr.cn-south-1.myhuaweicloud.com/public-ascendhub/ascend-mindspore-{arch}:{tag} \ - /bin/bash -``` - -of which, - -- `{arch}` denotes the system architecture. For example, the Linux system you are using is x86 architecture 64-bit, {arch} should be x86. If the system is ARM architecture 64-bit, then it should be arm. -- `{tag}` corresponds to the version number of Atlas Data Center Solution, which can be automatically obtained on the MindSpore image download page. - -## Installation Verification - -After entering the MindSpore container according to the above steps, to test whether the Docker container is working properly, please run the following Python code and check the output: - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="Ascend") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -The outputs should be the same as: - -```text -[[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] -``` - -It means MindSpore has been installed by docker successfully. - -## Version Update - -When you need to update the MindSpore version: - -- update Ascend 910 AI processor software package according to MindSpore package version of which you wish to update. -- log in to [Ascend Hub Image Center](https://ascend.huawei.com/ascendhub/#/home) again to obtain the download command of the latest docker version and execute: - - ```bash - docker pull swr.cn-south-1.myhuaweicloud.com/public-ascendhub/ascend-mindspore-{arch}:{tag} - ``` - - of which, - - - `{arch}` denotes the system architecture. For example, the Linux system you are using is x86 architecture 64-bit, {arch} should be x86. If the system is ARM architecture 64-bit, then it should be arm. - - `{tag}` corresponds to the version number of Atlas Data Center Solution, which can be automatically obtained on the MindSpore image download page. diff --git a/install/mindspore_ascend_install_pip.md b/install/mindspore_ascend_install_pip.md deleted file mode 100644 index d3331cc969e5aab6001f2fdbed532d5c392dbad5..0000000000000000000000000000000000000000 --- a/install/mindspore_ascend_install_pip.md +++ /dev/null @@ -1,144 +0,0 @@ -# pip方式安装MindSpore Ascend 910版本 - - - -- [pip方式安装MindSpore Ascend 910版本](#pip方式安装mindspore-ascend-910版本) - - [确认系统环境信息](#确认系统环境信息) - - [安装MindSpore](#安装mindspore) - - [配置环境变量](#配置环境变量) - - [验证是否成功安装](#验证是否成功安装) - - [升级MindSpore版本](#升级mindspore版本) - - [安装MindInsight](#安装mindinsight) - - [安装MindArmour](#安装mindarmour) - - [安装MindSpore Hub](#安装mindspore-hub) - - [安装MindSpore Serving](#安装mindspore-serving) - - - - - -本文档介绍如何在Ascend 910环境的Linux系统上,使用pip方式快速安装MindSpore。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04/CentOS 7.6/EulerOS 2.8/KylinV10 SP1是64位操作系统。 -- 确认安装[GCC 7.3.0版本](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz)。 -- 确认安装[gmp 6.1.2版本](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz)。 -- 确认安装Python 3.7.5版本。 - - 如果未安装或者已安装其他版本的Python,可从[官网](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz)或者[华为云](https://mirrors.huaweicloud.com/python/3.7.5/Python-3.7.5.tgz)下载Python 3.7.5版本 64位,进行安装。 -- 确认安装Ascend 910 AI处理器软件配套包([Ascend Data Center Solution 21.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/ascend-data-center-solution-pid-251167910/software/252504563?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C252309113%7C251167910))。 - - 软件配套包包括[驱动和固件A800-9000 1.0.10](https://support.huawei.com/enterprise/zh/ascend-computing/a800-9000-pid-250702818/software/252727249?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C250702818)和[CANN 5.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/cann-pid-251168373/software/252504455?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C251168373)。 - - 确认当前用户有权限访问Ascend 910 AI处理器配套软件包的安装路径`/usr/local/Ascend`,若无权限,需要root用户将当前用户添加到`/usr/local/Ascend`所在的用户组,具体配置请详见配套软件包的说明文档。 - - 安装Ascend 910 AI处理器配套软件包提供的whl包,whl包随配套软件包发布,参考如下命令完成安装。 - - ```bash - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/topi-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/te-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/hccl-{version}-py3-none-any.whl - ``` - - - 如果升级了Ascend 910 AI处理器配套软件包,配套的whl包也需要重新安装,先将原来的安装包卸载,再参考上述命令重新安装。 - - ```bash - pip uninstall te topi hccl -y - ``` - -## 安装MindSpore - -参考[版本列表](https://www.mindspore.cn/versions)先进行SHA-256完整性校验,校验一致后再执行如下命令安装MindSpore。 - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/ascend/{system}/mindspore_ascend-{version}-cp37-cp37m-linux_{arch}.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 -- `{arch}`表示系统架构,例如使用的Linux系统是x86架构64位时,`{arch}`应写为`x86_64`。如果系统是ARM架构64位,则写为`aarch64`。 -- `{system}`表示系统版本,例如使用的欧拉系统ARM架构,`{system}`应写为`euleros_aarch64`,目前Ascend版本可支持以下系统`euleros_aarch64`/`centos_aarch64`/`centos_x86`/`ubuntu_aarch64`/`ubuntu_x86`/`kylin_aarch64`。 - -## 配置环境变量 - -**如果Ascend 910 AI处理器配套软件包没有安装在默认路径**,安装好MindSpore之后,需要导出Runtime相关环境变量,下述命令中`LOCAL_ASCEND=/usr/local/Ascend`的`/usr/local/Ascend`表示配套软件包的安装路径,需注意将其改为配套软件包的实际安装路径。 - -```bash -# control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. -export GLOG_v=2 - -# Conda environmental options -LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - -# lib libraries that the run package depends on -export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/lib64:${LOCAL_ASCEND}/driver/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - -# Environment variables that must be configured -export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path -export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path -export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path -export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # Python library that TBE implementation depends on -``` - -## 验证是否成功安装 - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="Ascend") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -如果输出: - -```text -[[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] -``` - -说明MindSpore安装成功了。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -```bash -pip install --upgrade mindspore-ascend -``` - -## 安装MindInsight - -当您需要查看训练过程中的标量、图像、计算图以及模型超参等信息时,可以选装MindInsight。 - -具体安装步骤参见[MindInsight](https://gitee.com/mindspore/mindinsight/blob/master/README_CN.md)。 - -## 安装MindArmour - -当您进行AI模型安全研究或想要增强AI应用模型的防护能力时,可以选装MindArmour。 - -具体安装步骤参见[MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README_CN.md)。 - -## 安装MindSpore Hub - -当您想要快速体验MindSpore预训练模型时,可以选装MindSpore Hub。 - -具体安装步骤参见[MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README_CN.md)。 - -## 安装MindSpore Serving - -当您想要快速体验MindSpore在线推理服务时,可以选装MindSpore Serving。 - -具体安装步骤参见[MindSpore Serving](https://gitee.com/mindspore/serving/blob/master/README_CN.md)。 diff --git a/install/mindspore_ascend_install_pip_en.md b/install/mindspore_ascend_install_pip_en.md deleted file mode 100644 index 1f205a3c40fc3a76fb171828f7238070bd9a48f2..0000000000000000000000000000000000000000 --- a/install/mindspore_ascend_install_pip_en.md +++ /dev/null @@ -1,147 +0,0 @@ -# Installing MindSpore in Ascend 910 by pip - - - -- [Installing MindSpore in Ascend 910 by pip](#installing-mindspore-in-ascend-910-by-pip) - - [System Environment Information Confirmation](#system-environment-information-confirmation) - - [Installing MindSpore](#installing-mindspore) - - [Configuring Environment Variables](#configuring-environment-variables) - - [Installation Verification](#installation-verification) - - [Version Update](#version-update) - - [Installing MindInsight](#installing-mindinsight) - - [Installing MindArmour](#installing-mindarmour) - - [Installing MindSpore Hub](#installing-mindspore-hub) - - [Installing MindSpore Serving](#installing-mindspore-serving) - - - - - -This document describes how to quickly install MindSpore in a Linux system with an Ascend 910 environment by pip. - -## System Environment Information Confirmation - -- Confirm that Ubuntu 18.04/CentOS 7.6/EulerOS 2.8/KylinV10 SP1 is installed with the 64-bit operating system. -- Ensure that right version [GCC 7.3.0](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz) is installed. -- Confirm that [gmp 6.1.2](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz) is installed. -- Confirm that Python 3.7.5 is installed. - - If you didn't install Python or you have installed other versions, please download the Python 3.7.5 64-bit from [Python](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz) or [Huaweicloud](https://mirrors.huaweicloud.com/python/3.7.5/Python-3.7.5.tgz) to install. -- Confirm that the Ascend 910 AI processor software package ([Ascend Data Center Solution 21.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/ascend-data-center-solution-pid-251167910/software/252504563?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C252309113%7C251167910)) are installed. - - The software packages include [Driver and Firmware A800-9000 1.0.10](https://support.huawei.com/enterprise/zh/ascend-computing/a800-9000-pid-250702818/software/252727249?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C250702818) and [CANN 5.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/cann-pid-251168373/software/252504455?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C251168373). - - Confirm that the current user has the right to access the installation path `/usr/local/Ascend`of Ascend 910 AI processor software package, If not, the root user needs to add the current user to the user group where `/usr/local/Ascend` is located. For the specific configuration, please refer to the software package instruction document. - - Install the .whl package provided in Ascend 910 AI processor software package. The .whl package is released with the software package. After software package is upgraded, reinstall the .whl package. - - ```bash - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/topi-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/te-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/hccl-{version}-py3-none-any.whl - ``` - - - If the Ascend 910 AI processor software package is upgraded, the .whl package also needs to be reinstalled, first uninstall the original installation package, and then refer to the above command to reinstall. - - ```bash - pip uninstall te topi hccl -y - ``` - -## Installing MindSpore - -It is recommended to refer to [Version List](https://www.mindspore.cn/versions/en) to perform SHA-256 integrity verification, and then execute the following command to install MindSpore after the verification is consistent. - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/ascend/{system}/mindspore_ascend-{version}-cp37-cp37m-linux_{arch}.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -Of which, - -- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about other dependency items, see [requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)). In other cases, you need to manually install dependency items. -- `{version}` denotes the version of MindSpore. For example, when you are installing MindSpore 1.1.0, `{version}` should be 1.1.0. -- `{arch}` denotes the system architecture. For example, the Linux system you are using is x86 architecture 64-bit, `{arch}` should be `x86_64`. If the system is ARM architecture 64-bit, then it should be `aarch64`. -- `{system}` denotes the system version. For example, if you are using EulerOS ARM architecture, `{system}` should be `euleros_aarch64`. Currently, the following systems are supported by Ascend: `euleros_aarch64`/`centos_x86`/`ubuntu_aarch64`/`ubuntu_x86`/`kylin_aarch64`. - -## Configuring Environment Variables - -- **If Ascend 910 AI processor software is installed in a non-default path**, after MindSpore is installed, export runtime-related environment variables. `/usr/local/Ascend` in the following command `LOCAL_ASCEND=/usr/local/Ascend` denotes the installation path of the software package, please replace it as your actual installation path. - - ```bash - # control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. - export GLOG_v=2 - - # Conda environmental options - LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - - # lib libraries that the run package depends on - export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/lib64:${LOCAL_ASCEND}/driver/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - - # Environment variables that must be configured - export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path - export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path - export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path - export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} - # Python library that TBE implementation depends on - ``` - -## Installation Verification - -- After configuring the environment variables, execute the following Python script: - - ```python - import numpy as np - from mindspore import Tensor - import mindspore.ops as ops - import mindspore.context as context - - context.set_context(device_target="Ascend") - x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) - y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) - print(ops.add(x, y)) - ``` - -- The outputs should be the same as: - - ```text - [[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] - ``` - -It means MindSpore has been installed successfully. - -## Version Update - -Using the following command if you need to update the MindSpore version: - -```bash -pip install --upgrade mindspore-ascend -``` - -## Installing MindInsight - -If you need to analyze information such as model scalars, graphs, computation graphs and model traceback, you can install MindInsight. - -For more details, please refer to [MindInsight](https://gitee.com/mindspore/mindinsight/blob/master/README.md). - -## Installing MindArmour - -If you need to conduct AI model security research or enhance the security of the model in you applications, you can install MindArmour. - -For more details, please refer to [MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README.md). - -## Installing MindSpore Hub - -If you need to access and experience MindSpore pre-trained models quickly, you can install MindSpore Hub. - -For more details, please refer to [MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README.md). - -## Installing MindSpore Serving - -If you need to access and experience MindSpore online inference services quickly, you can install MindSpore Serving. - -For more details, please refer to [MindSpore Serving](https://gitee.com/mindspore/serving/blob/master/README.md). diff --git a/install/mindspore_ascend_install_source.md b/install/mindspore_ascend_install_source.md deleted file mode 100644 index 4ea9c3481d7081123b543b848f9fc4a690c465b3..0000000000000000000000000000000000000000 --- a/install/mindspore_ascend_install_source.md +++ /dev/null @@ -1,204 +0,0 @@ -# 源码编译方式安装MindSpore Ascend 910版本 - - - -- [源码编译方式安装MindSpore Ascend 910版本](#源码编译方式安装mindspore-ascend-910版本) - - [确认系统环境信息](#确认系统环境信息) - - [从代码仓下载源码](#从代码仓下载源码) - - [编译MindSpore](#编译mindspore) - - [安装MindSpore](#安装mindspore) - - [配置环境变量](#配置环境变量) - - [验证是否成功安装](#验证是否成功安装) - - [升级MindSpore版本](#升级mindspore版本) - - [安装MindInsight](#安装mindinsight) - - [安装MindArmour](#安装mindarmour) - - [安装MindSpore Hub](#安装mindspore-hub) - - [安装MindSpore Serving](#安装mindspore-serving) - - - - - -本文档介绍如何在Ascend 910环境的Linux系统上,使用源码编译方式快速安装MindSpore。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04/CentOS 7.6/EulerOS 2.8/KylinV10 SP1是64位操作系统。 -- 确认安装[GCC 7.3.0版本](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz)。 -- 确认安装[gmp 6.1.2版本](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz)。 -- 确认安装[Python 3.7.5版本](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz)。 -- 确认安装[OpenSSL 1.1.1及以上版本](https://github.com/openssl/openssl.git)。 - - 安装完成后设置环境变量`export OPENSSL_ROOT_DIR=“OpenSSL安装目录”`。 -- 确认安装[CMake 3.18.3及以上版本](https://cmake.org/download/)。 - - 安装完成后将CMake所在路径添加到系统环境变量。 -- 确认安装[patch 2.5及以上版本](http://ftp.gnu.org/gnu/patch/)。 - - 安装完成后将patch所在路径添加到系统环境变量中。 -- 确认安装[wheel 0.32.0及以上版本](https://pypi.org/project/wheel/)。 -- 确认安装Ascend 910 AI处理器软件配套包([Ascend Data Center Solution 21.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/ascend-data-center-solution-pid-251167910/software/252504563?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C252309113%7C251167910))。 - - 软件配套包包括[驱动和固件A800-9000 1.0.10](https://support.huawei.com/enterprise/zh/ascend-computing/a800-9000-pid-250702818/software/252727249?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C250702818)和[CANN 5.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/cann-pid-251168373/software/252504455?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C251168373)。 - - 确认当前用户有权限访问Ascend 910 AI处理器配套软件包的安装路径`/usr/local/Ascend`,若无权限,需要root用户将当前用户添加到`/usr/local/Ascend`所在的用户组,具体配置请详见配套软件包的说明文档。 - - 安装Ascend 910 AI处理器配套软件包提供的whl包,whl包随配套软件包发布,参考如下命令完成安装。 - - ```bash - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/topi-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/te-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/hccl-{version}-py3-none-any.whl - ``` - - - 如果升级了Ascend 910 AI处理器配套软件包,配套的whl包也需要重新安装,先将原来的安装包卸载,再参考上述命令重新安装。 - - ```bash - pip uninstall te topi hccl -y - ``` - -- 确认安装[NUMA 2.0.11及以上版本](https://github.com/numactl/numactl)。 - Ubuntu系统用户,如果未安装,使用如下命令下载安装: - - ```bash - apt-get install libnuma-dev - ``` - - EulerOS和CentOS系统用户,如果未安装,使用如下命令下载安装: - - ```bash - yum install numactl-devel - ``` - -- 确认安装git工具。 - - Ubuntu系统用户,如果未安装,使用如下命令下载安装: - - ```bash - apt-get install git - ``` - - EulerOS和CentOS系统用户,如果未安装,使用如下命令下载安装: - - ```bash - yum install git - ``` - -## 从代码仓下载源码 - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -## 编译MindSpore - -在源码根目录下执行如下命令。 - -```bash -bash build.sh -e ascend -``` - -其中: -`build.sh`中默认的编译线程数为8,如果编译机性能较差可能会出现编译错误,可在执行中增加-j{线程数}来减少线程数量。如`bash build.sh -e ascend -j4`。 - -## 安装MindSpore - -```bash -chmod +x build/package/mindspore_ascend-{version}-cp37-cp37m-linux_{arch}.whl -pip install build/package/mindspore_ascend-{version}-cp37-cp37m-linux_{arch}.whl -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 -- `{arch}`表示系统架构,例如使用的Linux系统是x86架构64位时,`{arch}`应写为`x86_64`。如果系统是ARM架构64位,则写为`aarch64`,目前Ascend版本可支持以下系统`euleros_aarch64`/`centos_aarch64`/`centos_x86`/`ubuntu_aarch64`/`ubuntu_x86`/`kylin_aarch64`。 - -## 配置环境变量 - -**如果Ascend 910 AI处理器配套软件包没有安装在默认路径**,安装好MindSpore之后,需要导出Runtime相关环境变量,下述命令中`LOCAL_ASCEND=/usr/local/Ascend`的`/usr/local/Ascend`表示配套软件包的安装路径,需注意将其改为配套软件包的实际安装路径。 - -```bash -# control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. -export GLOG_v=2 - -# Conda environmental options -LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - -# lib libraries that the run package depends on -export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/lib64:${LOCAL_ASCEND}/driver/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - -# Environment variables that must be configured -export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path -export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path -export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path -export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # Python library that TBE implementation depends on -``` - -## 验证是否成功安装 - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="Ascend") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -如果输出: - -```text -[[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] -``` - -说明MindSpore安装成功了。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -- 直接在线升级 - - ```bash - pip install --upgrade mindspore-ascend - ``` - -- 本地源码编译升级 - - 在源码根目录下执行编译脚本`build.sh`成功后,在`build/package`目录下找到编译生成的whl安装包,然后执行命令进行升级。 - - ```bash - pip install --upgrade mindspore_ascend-{version}-cp37-cp37m-linux_{arch}.whl - ``` - -## 安装MindInsight - -当您需要查看训练过程中的标量、图像、计算图以及模型超参等信息时,可以选装MindInsight。 - -具体安装步骤参见[MindInsight](https://gitee.com/mindspore/mindinsight/blob/master/README_CN.md)。 - -## 安装MindArmour - -当您进行AI模型安全研究或想要增强AI应用模型的防护能力时,可以选装MindArmour。 - -具体安装步骤参见[MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README_CN.md)。 - -## 安装MindSpore Hub - -当您想要快速体验MindSpore预训练模型时,可以选装MindSpore Hub。 - -具体安装步骤参见[MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README_CN.md)。 - -## 安装MindSpore Serving - -当您想要快速体验MindSpore在线推理服务时,可以选装MindSpore Serving。 - -具体安装步骤参见[MindSpore Serving](https://gitee.com/mindspore/serving/blob/master/README_CN.md)。 diff --git a/install/mindspore_ascend_install_source_en.md b/install/mindspore_ascend_install_source_en.md deleted file mode 100644 index 2ba8d5309cd1de94eba1f4355ba8ccd79dd821bd..0000000000000000000000000000000000000000 --- a/install/mindspore_ascend_install_source_en.md +++ /dev/null @@ -1,208 +0,0 @@ -# Installing MindSpore in Ascend 910 by Source Code - - - -- [Installing MindSpore in Ascend 910 by Source Code](#installing-mindspore-in-ascend-910-by-source-code) - - [System Environment Information Confirmation](#system-environment-information-confirmation) - - [Downloading Source Code from Code Repository](#downloading-source-code-from-code-repository) - - [Compiling MindSpore](#compiling-mindspore) - - [Installing MindSpore](#installing-mindspore) - - [Configuring Environment Variables](#configuring-environment-variables) - - [Installation Verification](#installation-verification) - - [Version Update](#version-update) - - [Installing MindInsight](#installing-mindinsight) - - [Installing MindArmour](#installing-mindarmour) - - [Installing MindSpore Hub](#installing-mindspore-hub) - - [Installing MindSpore Serving](#installing-mindspore-serving) - - - - - -This document describes how to quickly install MindSpore in a Linux system with an Ascend 910 environment by source code. - -## System Environment Information Confirmation - -- Confirm that Ubuntu 18.04/CentOS 7.6/EulerOS 2.8/KylinV10 SP1 is installed with the 64-bit operating system. -- Ensure that right version [GCC 7.3.0](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz) is installed. -- Confirm that [gmp 6.1.2](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz) is installed. -- Confirm that [Python 3.7.5](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz) is installed. -- Confirm that [OpenSSL 1.1.1 or later](https://github.com/openssl/openssl.git) is installed. - - Set system variable `export OPENSSL_ROOT_DIR="OpenSSL installation directory"` after installation. -- Confirm that [CMake 3.18.3 or later](https://cmake.org/download/) is installed. - - Add the path where the executable file `cmake` stores to the environment variable PATH. -- Confirm that [patch 2.5 or later](http://ftp.gnu.org/gnu/patch/) is installed. - - Add the path where the executable file `patch` stores to the environment variable PATH. -- Confirm that [wheel 0.32.0 or later](https://pypi.org/project/wheel/) is installed. -- Confirm that the Ascend 910 AI processor software package ([Ascend Data Center Solution 21.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/ascend-data-center-solution-pid-251167910/software/252504563?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C252309113%7C251167910)) are installed. - - The software packages include [Driver and Firmware A800-9000 1.0.10](https://support.huawei.com/enterprise/zh/ascend-computing/a800-9000-pid-250702818/software/252727249?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C250702818) and [CANN 5.0.1](https://support.huawei.com/enterprise/zh/ascend-computing/cann-pid-251168373/software/252504455?idAbsPath=fixnode01%7C23710424%7C251366513%7C22892968%7C251168373). - - Confirm that the current user has the right to access the installation path `/usr/local/Ascend`of Ascend 910 AI processor software package, If not, the root user needs to add the current user to the user group where `/usr/local/Ascend` is located. For the specific configuration, please refer to the software package instruction document. - - Install the .whl package provided in Ascend 910 AI processor software package. The .whl package is released with the software package. After software package is upgraded, reinstall the .whl package. - - ```bash - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/topi-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/te-{version}-py3-none-any.whl - pip install /usr/local/Ascend/ascend-toolkit/latest/fwkacllib/lib64/hccl-{version}-py3-none-any.whl - ``` - - - If the Ascend 910 AI processor software package is upgraded, the .whl package also needs to be reinstalled, first uninstall the original installation package, and then refer to the above command to reinstall. - - ```bash - pip uninstall te topi hccl -y - ``` - -- Confirm that [NUMA 2.0.11 or later](https://github.com/numactl/numactl) is installed. - - If not, for Ubuntu users, use the following command to install it: - - ```bash - apt-get install libnuma-dev - ``` - - If not, for EulerOS and CentOS users, use the following command to install it: - - ```bash - yum install numactl-devel - ``` - -- Confirm that the git tool is installed. - - If not, for Ubuntu users, use the following command to install it: - - ```bash - apt-get install git - ``` - - If not, for EulerOS and CentOS users, use the following command to install it: - - ```bash - yum install git - ``` - -## Downloading Source Code from Code Repository - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -## Compiling MindSpore - -Run the following command in the root directory of the source code to compile MindSpore: - -```bash -bash build.sh -e ascend -``` - -Of which, - -- In the `build.sh` script, the default number of compilation threads is 8. If the compiler performance is poor, compilation errors may occur. You can add -j{Number of threads} in to script to reduce the number of threads. For example, `bash build.sh -e ascend -j4`. - -## Installing MindSpore - -```bash -chmod +x build/package/mindspore_ascend-{version}-cp37-cp37m-linux_{arch}.whl -pip install build/package/mindspore_ascend-{version}-cp37-cp37m-linux_{arch}.whl -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -Of which, - -- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about other dependency items, see [requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)). In other cases, you need to manually install dependency items. -- `{version}` denotes the version of MindSpore. For example, when you are installing MindSpore 1.1.0, `{version}` should be 1.1.0. -- `{arch}` denotes the system architecture. For example, the Linux system you are using is x86 architecture 64-bit, `{arch}` should be `x86_64`. If the system is ARM architecture 64-bit, then it should be `aarch64`. Currently, the following systems are supported by Ascend: `euleros_aarch64`/`centos_x86`/`ubuntu_aarch64`/`ubuntu_x86`/`kylin_aarch64`. - -## Configuring Environment Variables - -- **If Ascend 910 AI processor software is installed in a non-default path**, after MindSpore is installed, export runtime-related environment variables. `/usr/local/Ascend` in the following command `LOCAL_ASCEND=/usr/local/Ascend` denotes the installation path of the software package, please replace it as your actual installation path. - - ```bash - # control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. - export GLOG_v=2 - - # Conda environmental options - LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - - # lib libraries that the run package depends on - export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/lib64:${LOCAL_ASCEND}/driver/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - - # Environment variables that must be configured - export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path - export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path - export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path - export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} - # Python library that TBE implementation depends on - - ``` - -## Installation Verification - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="Ascend") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -- The outputs should be the same as: - -```text -[[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] -``` - -It means MindSpore has been installed successfully. - -## Version Update - -Using the following command if you need to update the MindSpore version. - -- Update Online - - ```bash - pip install --upgrade mindspore-ascend - ``` - -- Update after source code compilation - - After successfully executing the compile script `build.sh` in the root path of the source code, find the whl package in path `build/package`, use the following command to update your version. - - ```bash - pip install --upgrade mindspore_ascend-{version}-cp37-cp37m-linux_{arch}.whl - ``` - -## Installing MindInsight - -If you need to analyze information such as model scalars, graphs, computation graphs and model traceback, you can install MindInsight. - -For more details, please refer to [MindInsight](https://gitee.com/mindspore/mindinsight/blob/master/README.md). - -## Installing MindArmour - -If you need to conduct AI model security research or enhance the security of the model in you applications, you can install MindArmour. - -For more details, please refer to [MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README.md). - -## Installing MindSpore Hub - -If you need to access and experience MindSpore pre-trained models quickly, you can install MindSpore Hub. - -For more details, please refer to [MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README.md). - -## Installing MindSpore Serving - -If you need to access and experience MindSpore online inference services quickly, you can install MindSpore Serving. - -For more details, please refer to [MindSpore Serving](https://gitee.com/mindspore/serving/blob/master/README.md). diff --git a/install/mindspore_cpu_install_conda.md b/install/mindspore_cpu_install_conda.md deleted file mode 100644 index 046094cd797166d977a0caa98a6c53282b1518e2..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_install_conda.md +++ /dev/null @@ -1,100 +0,0 @@ -# Conda方式安装MindSpore CPU版本 - - - -- [Conda方式安装MindSpore CPU版本](#conda方式安装mindspore-cpu版本) - - [确认系统环境信息](#确认系统环境信息) - - [安装Conda](#安装conda) - - [添加Conda镜像源](#添加conda镜像源) - - [创建并激活Conda环境](#创建并激活conda环境) - - [安装MindSpore](#安装mindspore) - - [验证安装是否成功](#验证安装是否成功) - - [升级MindSpore版本](#升级mindspore版本) - - [安装MindArmour](#安装mindarmour) - - [安装MindSpore Hub](#安装mindspore-hub) - - [安装MindQuantum](#安装mindquantum) - - - - - -本文档介绍如何在CPU环境的Linux系统上,使用Conda方式快速安装MindSpore。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04是64位操作系统。 -- 确认安装[gmp 6.1.2版本](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz)。 - -## 安装Conda - -下载并安装对应架构的Conda安装包。 - -- 官网下载地址:[X86 Anaconda](https://www.anaconda.com/distribution/) 或 [X86 Miniconda](https://docs.conda.io/en/latest/miniconda.html) - -- 清华镜像源下载地址:[X86 Anaconda](https://mirrors.tuna.tsinghua.edu.cn/anaconda/archive/Anaconda3-2020.02-Linux-x86_64.sh) - -## 添加Conda镜像源 - -从清华源镜像源下载Conda安装包的可跳过此步操作。 - -```bash -conda config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/ -conda config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/main/ -conda config --set show_channel_urls yes -``` - -## 创建并激活Conda环境 - -```bash -conda create -n mindspore python=3.7.5 -conda activate mindspore -``` - -## 安装MindSpore - -参考[版本列表](https://www.mindspore.cn/versions)先进行SHA-256完整性校验,校验一致后再执行如下命令安装MindSpore。 - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/cpu/{system}/mindspore-{version}-cp37-cp37m-linux_{arch}.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 -- `{arch}`表示系统架构,例如使用的Linux系统是x86架构64位时,`{arch}`应写为`x86_64`。如果系统是ARM架构64位,则写为`aarch64`。 -- `{system}`表示系统,例如使用的Ubuntu系统X86架构,`{system}`应写为`ubuntu_x86`,目前CPU版本可支持以下系统`ubuntu_aarch64`/`ubuntu_x86`。 - -## 验证安装是否成功 - -```bash -python -c "import mindspore;print(mindspore.__version__)" -``` - -如果输出MindSpore版本号,说明MindSpore安装成功了,如果输出`No module named 'mindspore'`说明未成功安装。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -```bash -pip install --upgrade mindspore -``` - -## 安装MindArmour - -当您进行AI模型安全研究或想要增强AI应用模型的防护能力时,可以选装MindArmour。 - -具体安装步骤参见[MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README_CN.md)。 - -## 安装MindSpore Hub - -当您想要快速体验MindSpore预训练模型时,可以选装MindSpore Hub。 - -具体安装步骤参见[MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README_CN.md)。 - -## 安装MindQuantum - -当您想要搭建并训练量子神经网络,可以选装MindQuantum。 - -具体安装步骤参见[MindQuantum](https://gitee.com/mindspore/mindquantum/blob/master/README_CN.md)。 diff --git a/install/mindspore_cpu_install_docker.md b/install/mindspore_cpu_install_docker.md deleted file mode 100644 index 551c576604b9cb011144a828a5492f3f90154962..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_install_docker.md +++ /dev/null @@ -1,105 +0,0 @@ -# Docker方式安装MindSpore CPU版本 - - - -- [Docker方式安装MindSpore CPU版本](#docker方式安装mindspore-cpu版本) - - [确认系统环境信息](#确认系统环境信息) - - [获取MindSpore镜像](#获取mindspore镜像) - - [运行MindSpore镜像](#运行mindspore镜像) - - [验证是否安装成功](#验证是否安装成功) - - - - - -[Docker](https://docs.docker.com/get-docker/)是一个开源的应用容器引擎,让开发者打包他们的应用以及依赖包到一个轻量级、可移植的容器中。通过使用Docker,可以实现MindSpore的快速部署,并与系统环境隔离。 - -本文档介绍如何在CPU环境的Linux系统上,使用Docker方式快速安装MindSpore。 - -MindSpore的Docker镜像托管在[Huawei SWR](https://support.huaweicloud.com/swr/index.html)上。 - -目前容器化构建选项支持情况如下: - -| 硬件平台 | Docker镜像仓库 | 标签 | 说明 | -| :----- | :------------------------ | :----------------------- | :--------------------------------------- | -| CPU | `mindspore/mindspore-cpu` | `x.y.z` | 已经预安装MindSpore `x.y.z` CPU版本的生产环境。 | -| | | `devel` | 提供开发环境从源头构建MindSpore(`CPU`后端)。安装详情请参考 。 | -| | | `runtime` | 提供运行时环境,未安装MindSpore二进制包(`CPU`后端)。 | - -> `x.y.z`对应MindSpore版本号,例如安装1.1.0版本MindSpore时,`x.y.z`应写为1.1.0。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04是基于x86架构的64位操作系统。 -- 确认安装[Docker 18.03或者更高版本](https://docs.docker.com/get-docker/)。 - -## 获取MindSpore镜像 - -对于`CPU`后端,可以直接使用以下命令获取最新的稳定镜像: - -```bash -docker pull swr.cn-south-1.myhuaweicloud.com/mindspore/mindspore-cpu:{tag} -``` - -其中: - -- `{tag}`对应上述表格中的标签。 - -## 运行MindSpore镜像 - -执行以下命令启动Docker容器实例: - -```bash -docker run -it swr.cn-south-1.myhuaweicloud.com/mindspore/mindspore-cpu:{tag} /bin/bash -``` - -其中: - -- `{tag}`对应上述表格中的标签。 - -## 验证是否安装成功 - -- 如果你安装的是指定版本`x.y.z`的容器。 - - 按照上述步骤进入MindSpore容器后,测试Docker是否正常工作,请运行下面的Python代码并检查输出: - - ```python - import numpy as np - import mindspore.context as context - import mindspore.ops as ops - from mindspore import Tensor - - context.set_context(mode=context.PYNATIVE_MODE, device_target="CPU") - - x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) - y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) - print(ops.add(x, y)) - ``` - - 代码成功运行时会输出: - - ```text - [[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] - ``` - - 至此,你已经成功通过Docker方式安装了MindSpore CPU版本。 - -- 如果你安装的是`runtime`标签的容器,需要自行安装MindSpore。 - - 进入[MindSpore安装指南页面](https://www.mindspore.cn/install),选择CPU硬件平台、Ubuntu-x86操作系统和pip的安装方式,获得安装指南。运行容器后参考安装指南,通过pip方式安装MindSpore CPU版本,并进行验证。 - -- 如果你安装的是`devel`标签的容器,需要自行编译并安装MindSpore。 - - 进入[MindSpore安装指南页面](https://www.mindspore.cn/install),选择CPU硬件平台、Ubuntu-x86操作系统和Source的安装方式,获得安装指南。运行容器后,下载MindSpore代码仓并参考安装指南,通过源码编译方式安装MindSpore CPU版本,并进行验证。 - -如果您想了解更多关于MindSpore Docker镜像的构建过程,请查看[docker repo](https://gitee.com/mindspore/mindspore/blob/master/docker/README.md)了解详细信息。 diff --git a/install/mindspore_cpu_install_docker_en.md b/install/mindspore_cpu_install_docker_en.md deleted file mode 100644 index 68abe6c42297d8482b723c3686be97b91cd22918..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_install_docker_en.md +++ /dev/null @@ -1,105 +0,0 @@ -# Installing MindSpore in CPU by Docker - - - -- [Installing MindSpore in CPU by Docker](#installing-mindSpore-in-cpu-by-docker) - - [System Environment Information Confirmation](#system-environment-information-confirmation) - - [Obtaining MindSpore Image](#obtaining-mindspore-image) - - [Running MindSpore Image](#running-mindspore-image) - - [Installation Verification](#installation-verification) - - - - - -[Docker](https://docs.docker.com/get-docker/) is an open source application container engine, developers can package their applications and dependencies into a lightweight, portable container. By using Docker, MindSpore can be rapidly deployed and separated from the system environment. - -This document describes how to quickly install MindSpore by Docker in a Linux system with a CPU environment. - -The Docker image of MindSpore is hosted on [Huawei SWR](https://support.huaweicloud.com/swr/index.html). - -The current support for containerized build is as follows: - -| Hardware | Docker Image Hub | Label | Note | -| :----- | :------------------------ | :----------------------- | :--------------------------------------- | -| CPU | `mindspore/mindspore-cpu` | `x.y.z` | A production environment with the MindSpore `x.y.z` CPU version pre-installed. | -| | | `devel` | Provide a development environment to build MindSpore from the source (`CPU` backend). For installation details, please refer to . | -| | | `runtime` | Provide runtime environment, MindSpore binary package (`CPU` backend) is not installed. | - -> `x.y.z` corresponds to the MindSpore version number. For example, when installing MindSpore version 1.1.0, `x.y.z` should be written as 1.1.0. - -## System Environment Information Confirmation - -- Confirm that Ubuntu 18.04 is installed with the 64-bit operating system. -- Confirm that [Docker 18.03 or later versioin](https://docs.docker.com/get-docker/) is installed. - -## Obtaining MindSpore Image - -For the `CPU` backend, you can directly use the following command to obtain the latest stable image: - -```bash -docker pull swr.cn-south-1.myhuaweicloud.com/mindspore/mindspore-cpu:{tag} -``` - -of which, - -- `{tag}` corresponds to the label in the above table. - -## Running MindSpore Image - -Execute the following command to start the Docker container instance: - -```bash -docker run -it swr.cn-south-1.myhuaweicloud.com/mindspore/mindspore-cpu:{tag} /bin/bash -``` - -of which, - -- `{tag}` corresponds to the label in the above table. - -## Installation Verification - -- If you are installing the container of the specified version `x.y.z`. - - After entering the MindSpore container according to the above steps, to test whether the Docker container is working properly, please run the following Python code and check the output: - - ```python - import numpy as np - import mindspore.context as context - import mindspore.ops as ops - from mindspore import Tensor - - context.set_context(mode=context.PYNATIVE_MODE, device_target="CPU") - - x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) - y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) - print(ops.add(x, y)) - ``` - - The outputs should be the same as: - - ```text - [[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] - ``` - - It means MindSpore has been installed by docker successfully. - -- If you install a container with the label of `runtime`, you need to install MindSpore yourself. - - Go to [MindSpore Installation Guide Page](https://www.mindspore.cn/install/en), choose the CPU hardware platform, Ubuntu-x86 operating system and pip installation method to get the installation guide. Refer to the installation guide after running the container and install the MindSpore CPU version by pip, and verify it. - -- If you install a container with the label of `devel`, you need to compile and install MindSpore yourself. - - Go to [MindSpore Installation Guide Page](https://www.mindspore.cn/install/en), choose the CPU hardware platform, Ubuntu-x86 operating system and pip installation method to get the installation guide. After running the container, download the MindSpore code repository and refer to the installation guide, install the MindSpore CPU version through source code compilation, and verify it. - -If you want to know more about the MindSpore Docker image building process, please check [docker repo](https://gitee.com/mindspore/mindspore/blob/master/docker/README.md) for details. diff --git a/install/mindspore_cpu_install_pip.md b/install/mindspore_cpu_install_pip.md deleted file mode 100644 index d362aea53f27ee13f466e22394f3597519dd32b9..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_install_pip.md +++ /dev/null @@ -1,74 +0,0 @@ -# pip方式安装MindSpore CPU版本 - - - -- [pip方式安装MindSpore CPU版本](#pip方式安装mindspore-cpu版本) - - [确认系统环境信息](#确认系统环境信息) - - [下载安装MindSpore](#下载安装mindspore) - - [查询安装是否成功](#查询安装是否成功) - - [升级MindSpore版本](#升级mindspore版本) - - [安装MindArmour](#安装mindarmour) - - [安装MindSpore Hub](#安装mindspore-hub) - - [安装MindQuantum](#安装mindquantum) - - - - - -本文档介绍如何在CPU环境的Linux系统上,使用pip方式快速安装MindSpore。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04是64位操作系统。 -- 确认安装[GCC 7.3.0版本](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz)。 -- 确认安装Python 3.7.5版本。 - - 如果未安装或者已安装其他版本的Python,可从[官网](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz)或者[华为云](https://mirrors.huaweicloud.com/python/3.7.5/Python-3.7.5.tgz)下载Python 3.7.5版本,进行安装。 - -## 下载安装MindSpore - -参考[版本列表](https://www.mindspore.cn/versions)先进行SHA-256完整性校验,校验一致后再执行如下命令安装MindSpore。 - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/cpu/{system}/mindspore-{version}-cp37-cp37m-linux_{arch}.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 -- `{arch}`表示系统架构,例如使用的Linux系统是x86架构64位时,`{arch}`应写为`x86_64`。如果系统是ARM架构64位,则写为`aarch64`。 -- `{system}`表示系统,例如使用的Ubuntu系统X86架构,`{system}`应写为`ubuntu_x86`,目前CPU版本可支持以下系统`ubuntu_aarch64`/`ubuntu_x86`。 - -## 查询安装是否成功 - -```bash -python -c "import mindspore;print(mindspore.__version__)" -``` - -如果输出MindSpore版本号,说明MindSpore安装成功了,如果输出`No module named 'mindspore'`说明未成功安装。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -```bash -pip install --upgrade mindspore -``` - -## 安装MindArmour - -当您进行AI模型安全研究或想要增强AI应用模型的防护能力时,可以选装MindArmour。 - -具体安装步骤参见[MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README_CN.md)。 - -## 安装MindSpore Hub - -当您想要快速体验MindSpore预训练模型时,可以选装MindSpore Hub。 - -具体安装步骤参见[MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README_CN.md)。 - -## 安装MindQuantum - -当您想要搭建并训练量子神经网络,可以选装MindQuantum。 - -具体安装步骤参见[MindQuantum](https://gitee.com/mindspore/mindquantum/blob/master/README_CN.md)。 diff --git a/install/mindspore_cpu_install_pip_en.md b/install/mindspore_cpu_install_pip_en.md deleted file mode 100644 index 77c1c2636e1e5b6a937aa3e6d0e864da64c3e7c8..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_install_pip_en.md +++ /dev/null @@ -1,74 +0,0 @@ -# Installing MindSpore in CPU by pip - - - -- [Installing MindSpore in CPU by pip](#installing-mindspore-in-cpu-by-pip) - - [System Environment Information Confirmation](#system-environment-information-confirmation) - - [Downloading and Installing MindSpore](#downloading-and-installing-mindspore) - - [Installation Verification](#installation-verification) - - [Version Update](#version-update) - - [Installing MindArmour](#installing-mindarmour) - - [Installing MindSpore Hub](#installing-mindspore-hub) - - [Installing MindQuantum](#installing-mindquantum) - - - - - -This document describes how to quickly install MindSpore by pip in a Linux system with a CPU environment. - -## System Environment Information Confirmation - -- Confirm that Ubuntu 18.04 is installed with the 64-bit operating system. -- Confirm that [GCC 7.3.0](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz) is installed. -- Confirm that Python 3.7.5 is installed. - - If you didn't install Python or you have installed other versions, please download the Python 3.7.5 64-bit from [Python](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz) or [Huaweicloud](https://mirrors.huaweicloud.com/python/3.7.5/Python-3.7.5.tgz) to install. - -## Downloading and Installing MindSpore - -It is recommended to refer to [Version List](https://www.mindspore.cn/versions/en) to perform SHA-256 integrity verification, and then execute the following command to install MindSpore after the verification is consistent. - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/cpu/{system}/mindspore-{version}-cp37-cp37m-linux_{arch}.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -Of which, - -- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about other dependency items, see [requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)). In other cases, you need to manually install dependency items. -- `{version}` denotes the version of MindSpore. For example, when you are installing MindSpore 1.1.0, `{version}` should be 1.1.0. -- `{arch}` denotes the system architecture. For example, the Linux system you are using is x86 architecture 64-bit, `{arch}` should be `x86_64`. If the system is ARM architecture 64-bit, then it should be `aarch64`. -- `{system}` denotes the system version. For example, if you are using Ubuntu x86 architecture, `{system}` should be `ubuntu_x86`. Currently, the following systems are supported by CPU: `ubuntu_aarch64`/`ubuntu_x86`. - -## Installation Verification - -```bash -python -c "import mindspore;print(mindspore.__version__)" -``` - -If the MindSpore version number is displayed, it means that MindSpore is installed successfully, and if the output is `No module named 'mindspore'`, it means that the installation was not successful. - -## Version Update - -Using the following command if you need to update the MindSpore version: - -```bash -pip install --upgrade mindspore -``` - -## Installing MindArmour - -If you need to conduct AI model security research or enhance the security of the model in you applications, you can install MindArmour. - -For more details, please refer to [MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README.md). - -## Installing MindSpore Hub - -If you need to access and experience MindSpore pre-trained models quickly, you can install MindSpore Hub. - -For more details, please refer to [MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README.md). - -## Installing MindQuantum - -If you need to build and train quantum neural network, you can install MindQuantum. - -For more details, please refer to [MindQuantum](https://gitee.com/mindspore/mindquantum/blob/master/README.md). diff --git a/install/mindspore_cpu_install_source.md b/install/mindspore_cpu_install_source.md deleted file mode 100644 index 48d44923e95dd918a2c858b383876ff6676df712..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_install_source.md +++ /dev/null @@ -1,122 +0,0 @@ -# 源码编译方式安装MindSpore CPU版本 - - - -- [源码编译方式安装MindSpore CPU版本](#源码编译方式安装mindspore-cpu版本) - - [确认系统环境信息](#确认系统环境信息) - - [从代码仓下载源码](#从代码仓下载源码) - - [编译MindSpore](#编译mindspore) - - [安装MindSpore](#安装mindspore) - - [验证安装是否成功](#验证安装是否成功) - - [升级MindSpore版本](#升级mindspore版本) - - [安装MindArmour](#安装mindarmour) - - [安装MindSpore Hub](#安装mindspore-hub) - - [安装MindQuantum](#安装mindquantum) - - - - - -本文档介绍如何在CPU环境的Linux系统上,使用源码编译方式快速安装MindSpore。 - -详细步骤可以参考社区提供的实践——[在Ubuntu(CPU)上进行源码编译安装MindSpore](https://www.mindspore.cn/news/newschildren?id=365),在此感谢社区成员[damon0626](https://gitee.com/damon0626)的分享。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04是64位操作系统。 -- 确认安装[GCC 7.3.0版本](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz)。 -- 确认安装[Python 3.7.5版本](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz)。 -- 确认安装[OpenSSL 1.1.1及以上版本](https://github.com/openssl/openssl.git)。 - - 安装完成后设置环境变量`export OPENSSL_ROOT_DIR=“OpenSSL安装目录”`。 -- 确认安装[CMake 3.18.3及以上版本](https://cmake.org/download/)。 - - 安装完成后需将CMake所在路径添加到系统环境变量。 -- 确认安装[wheel 0.32.0及以上版本](https://pypi.org/project/wheel/)。 -- 确认安装[patch 2.5及以上版本](http://ftp.gnu.org/gnu/patch/)。 - - 安装完成后需将patch所在路径添加到系统环境变量中。 -- 确认安装[NUMA 2.0.11及以上版本](https://github.com/numactl/numactl)。 - 如果未安装,使用如下命令下载安装: - - ```bash - apt-get install libnuma-dev - ``` - -- 确认安装git工具。 - 如果未安装,使用如下命令下载安装: - - ```bash - apt-get install git - ``` - -## 从代码仓下载源码 - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -## 编译MindSpore - -在源码根目录下执行如下命令。 - -```bash -bash build.sh -e cpu -j4 -``` - -其中: -如果编译机性能较好,可在执行中增加-j{线程数}来增加线程数量。如`bash build.sh -e cpu -j12`。 - -## 安装MindSpore - -```bash -chmod +x build/package/mindspore-{version}-cp37-cp37m-linux_{arch}.whl -pip install build/package/mindspore-{version}-cp37-cp37m-linux_{arch}.whl -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 -- `{arch}`表示系统架构,例如使用的Linux系统是x86架构64位时,`{arch}`应写为`x86_64`。如果系统是ARMv8架构64位,则写为`aarch64`。 - -## 验证安装是否成功 - -```bash -python -c 'import mindspore;print(mindspore.__version__)' -``` - -如果输出MindSpore版本号,说明MindSpore安装成功了,如果输出`No module named 'mindspore'`说明未安装成功。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -- 直接在线升级 - - ```bash - pip install --upgrade mindspore - ``` - -- 本地源码编译升级 - - 在源码根目录下执行编译脚本`build.sh`成功后,在`build/package`目录下找到编译生成的whl安装包,然后执行命令进行升级。 - - ```bash - pip install --upgrade mindspore-{version}-cp37-cp37m-linux_{arch}.whl - ``` - -## 安装MindArmour - -当您进行AI模型安全研究或想要增强AI应用模型的防护能力时,可以选装MindArmour。 - -具体安装步骤参见[MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README_CN.md)。 - -## 安装MindSpore Hub - -当您想要快速体验MindSpore预训练模型时,可以选装MindSpore Hub。 - -具体安装步骤参见[MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README_CN.md)。 - -## 安装MindQuantum - -当您想要搭建并训练量子神经网络,可以选装MindQuantum。 - -具体安装步骤参见[MindQuantum](https://gitee.com/mindspore/mindquantum/blob/master/README_CN.md)。 diff --git a/install/mindspore_cpu_install_source_en.md b/install/mindspore_cpu_install_source_en.md deleted file mode 100644 index f4bd70cbd559a2130628ef77ba5b94ec8c913b20..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_install_source_en.md +++ /dev/null @@ -1,121 +0,0 @@ -# Installing MindSpore in CPU by Source Code - - - -- [Installing MindSpore in CPU by Source Code](#installing-mindspore-in-cpu-by-source-code) - - [System Environment Information Confirmation](#system-environment-information-confirmation) - - [Downloading Source Code from Code Repository](#downloading-source-code-from-code-repository) - - [Compiling MindSpore](#compiling-mindspore) - - [Installing MindSpore](#installing-mindspore) - - [Installation Verification](#installation-verification) - - [Version Update](#version-update) - - [Installing MindArmour](#installing-mindarmour) - - [Installing MindSpore Hub](#installing-mindspore-hub) - - [Installing MindQuantum](#installing-mindquantum) - - - - - -This document describes how to quickly install MindSpore by source code in a Linux system with a CPU environment. - -## System Environment Information Confirmation - -- Confirm that Ubuntu 18.04 is installed with the 64-bit operating system. -- Confirm that [GCC 7.3.0](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz) is installed. -- Confirm that [Python 3.7.5](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz) is installed. -- Confirm that [OpenSSL 1.1.1 or later](https://github.com/openssl/openssl.git) is installed. - - Set system variable `export OPENSSL_ROOT_DIR="OpenSSL installation directory"` after installation. -- Confirm that [CMake 3.18.3 or later](https://cmake.org/download/) is installed. - - Add the path where the executable file `cmake` stores to the environment variable PATH. -- Confirm that [wheel 0.32.0 or later](https://pypi.org/project/wheel/) is installed. -- Confirm that [patch 2.5 or later](http://ftp.gnu.org/gnu/patch/) is installed. - - Add the path where the executable file `patch` stores to the environment variable PATH. -- Confirm that [NUMA 2.0.11 or later](https://github.com/numactl/numactl) is installed. - If not, use the following command to install it: - - ```bash - apt-get install libnuma-dev - ``` - -- Confirm that the git tool is installed. - If not, use the following command to install it: - - ```bash - apt-get install git - ``` - -## Downloading Source Code from Code Repository - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -## Compiling MindSpore - -Run the following command in the root directory of the source code to compile MindSpore: - -```bash -bash build.sh -e cpu -j4 -``` - -Of which, - -- If the compiler performance is strong, you can add -j{Number of threads} in to script to increase the number of threads. For example, `bash build.sh -e cpu -j12`. - -## Installing MindSpore - -```bash -chmod +x build/package/mindspore-{version}-cp37-cp37m-linux_{arch}.whl -pip install build/package/mindspore-{version}-cp37-cp37m-linux_{arch}.whl -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -Of which, - -- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about other dependency items, see [requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),In other cases, you need to manually install dependency items. -- `{version}` denotes the version of MindSpore. For example, when you are installing MindSpore 1.1.0, `{version}` should be 1.1.0. -- `{arch}` denotes the system architecture. For example, the Linux system you are using is x86 architecture 64-bit, `{arch}` should be `x86_64`. If the system is ARM architecture 64-bit, then it should be `aarch64`. - -## Installation Verification - -```bash -python -c 'import mindspore;print(mindspore.__version__)' -``` - -If the MindSpore version number is displayed, it means that MindSpore is installed successfully, and if the output is `No module named'mindspore'`, it means that the installation was not successful. - -## Version Update - -Using the following command if you need to update the MindSpore version: - -- Update online - - ```bash - pip install --upgrade mindspore - ``` - -- Update after source code compilation - - After successfully executing the compile script `build.sh` in the root path of the source code, find the whl package in path `build/package`, use the following command to update your version. - - ```bash - pip install --upgrade mindspore-{version}-cp37-cp37m-linux_{arch}.whl - ``` - -## Installing MindArmour - -If you need to conduct AI model security research or enhance the security of the model in you applications, you can install MindArmour. - -For more details, please refer to [MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README.md). - -## Installing MindSpore Hub - -If you need to access and experience MindSpore pre-trained models quickly, you can install MindSpore Hub. - -For more details, please refer to [MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README.md). - -## Installing MindQuantum - -If you need to build and train quantum neural network, you can install MindQuantum. - -For more details, please refer to [MindQuantum](https://gitee.com/mindspore/mindquantum/blob/master/README.md). diff --git a/install/mindspore_cpu_macos_install_conda.md b/install/mindspore_cpu_macos_install_conda.md deleted file mode 100644 index f193bce42814bbaf6b77c0b20bfb6c2388705f6a..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_macos_install_conda.md +++ /dev/null @@ -1,74 +0,0 @@ -# Conda方式安装MindSpore CPU版本(macOS) - - - -- [Conda方式安装MindSpore CPU版本(macOS)](#conda方式安装mindspore-cpu版本macOS) - - [确认系统环境信息](#确认系统环境信息) - - [安装Conda](#安装conda) - - [添加Conda镜像源](#添加conda镜像源) - - [创建并激活Conda环境](#创建并激活conda环境) - - [安装MindSpore](#安装mindspore) - - [验证是否安装成功](#验证是否安装成功) - - [升级MindSpore版本](#升级mindspore版本) - - - - - -本文档介绍如何在CPU环境的macOS系统上,使用Conda方式快速安装MindSpore。 - -## 确认系统环境信息 - -- 确认安装macOS Catalina是64位操作系统。 - -## 安装Conda - -下载并安装对应架构的Conda安装包。 - -- 官方源下载[X86 Anaconda](https://www.anaconda.com/distribution/) 或 [X86 Miniconda](https://docs.conda.io/en/latest/miniconda.html) -- 清华镜像源下载地址:[X86 Anaconda](https://mirrors.tuna.tsinghua.edu.cn/anaconda/archive/Anaconda3-5.3.1-MacOSX-x86_64.sh) - -## 添加Conda镜像源 - -从清华源镜像源下载Conda安装包的可忽略此步操作。 - -```shell -conda config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/ -conda config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/main/ -conda config --set show_channel_urls yes -``` - -## 创建并激活Conda环境 - -```bash -conda create -n mindspore python=3.7.5 -conda activate mindspore -``` - -## 安装MindSpore - -参考[版本列表](https://www.mindspore.cn/versions)先进行SHA-256完整性校验,校验一致后再执行如下命令安装MindSpore。 - -```bash -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 - -## 验证是否安装成功 - -```bash -python -c "import mindspore;mindspore.__version__" -``` - -如果输出MindSpore版本号,说明MindSpore安装成功了,如果输出`No module named 'mindspore'`说明未安装成功。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -```bash -pip install --upgrade mindspore -``` diff --git a/install/mindspore_cpu_macos_install_pip.md b/install/mindspore_cpu_macos_install_pip.md deleted file mode 100644 index d9d4757c0944b65d9a8a8696d7091a77d7ed4e81..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_macos_install_pip.md +++ /dev/null @@ -1,50 +0,0 @@ -# pip方式安装MindSpore CPU版本(macOS) - - - -- [pip方式安装MindSpore CPU版本(macOS)](#pip方式安装mindspore-cpu版本macOS) - - [确认系统环境信息](#确认系统环境信息) - - [安装MindSpore](#安装mindspore) - - [验证是否安装成功](#验证是否安装成功) - - [升级MindSpore版本](#升级mindspore版本) - - - - - -本文档介绍如何在CPU环境的macOS系统上,使用pip方式快速安装MindSpore。 - -## 确认系统环境信息 - -- 确认安装macOS Catalina是64位操作系统。 -- 确认安装[Python 3.7.5](https://www.python.org/ftp/python/3.7.5/python-3.7.5-macosx10.9.pkg)版本。 -- 安装Python完毕后,将Python添加到系统环境变量。 - - 将Python路径添加到系统环境变量中即可。 - -## 安装MindSpore - -参考[版本列表](https://www.mindspore.cn/versions)先进行SHA-256完整性校验,校验一致后再执行如下命令安装MindSpore。 - -```bash -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 - -## 验证是否安装成功 - -```bash -python -c "import mindspore;print(mindspore.__version__)" -``` - -如果输出MindSpore版本号,说明MindSpore安装成功了,如果输出`No module named 'mindspore'`说明未安装成功。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -```bash -pip install --upgrade mindspore -``` diff --git a/install/mindspore_cpu_macos_install_pip_en.md b/install/mindspore_cpu_macos_install_pip_en.md deleted file mode 100644 index 8ac0abe17b37bad933f56d59f11442837c420f77..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_macos_install_pip_en.md +++ /dev/null @@ -1,49 +0,0 @@ -# Installing MindSpore in CPU by pip (macOS) - - - -- [Installing MindSpore in CPU by pip (macOS)](#installing-mindspore-in-cpu-by-pip-macos) - - [System Environment Information Confirmation](#system-environment-information-confirmation) - - [Installing MindSpore](#installing-mindspore) - - [Installation Verification](#installation-verification) - - [Version Update](#version-update) - - - - - -This document describes how to quickly install MindSpore by pip in a macOS system with a CPU environment. - -## System Environment Information Confirmation - -- Confirm that macOS Cata is installed with the 64-bit operating system. -- Confirm that [Python 3.7.5](https://www.python.org/ftp/python/3.7.5/python-3.7.5-macosx10.9.pkg) is installed. - - After installing, add the path of `python` to the environment variable PATH. - -## Installing MindSpore - -It is recommended to refer to [Version List](https://www.mindspore.cn/versions/en) to perform SHA-256 integrity verification, and then execute the following command to install MindSpore after the verification is consistent. - -```bash -``` - -Of which, - -- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about other dependency items, see [requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)). In other cases, you need to manually install dependency items. -- `{version}` denotes the version of MindSpore. For example, when you are installing MindSpore 1.1.0, `{version}` should be 1.1.0. - -## Installation Verification - -```bash -python -c "import mindspore;print(mindspore.__version__)" -``` - -If the MindSpore version number is displayed, it means that MindSpore is installed successfully, and if the output is `No module named 'mindspore'`, it means that the installation was not successful. - -## Version Update - -Using the following command if you need to update MindSpore version: - -```bash -pip install --upgrade mindspore -``` diff --git a/install/mindspore_cpu_macos_install_source.md b/install/mindspore_cpu_macos_install_source.md deleted file mode 100644 index b9a4f0e5c1b3b37105d986c8bf66e5f65f7e2635..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_macos_install_source.md +++ /dev/null @@ -1,86 +0,0 @@ -# 源码编译方式安装MindSpore CPU版本(macOS) - - - -- [源码编译方式安装MindSpore CPU版本(macOS)](#源码编译方式安装mindspore-cpu版本macOS) - - [确认系统环境信息](#确认系统环境信息) - - [从代码仓下载源码](#从代码仓下载源码) - - [编译MindSpore](#编译mindspore) - - [安装MindSpore](#安装mindspore) - - [验证是否安装成功](#验证是否安装成功) - - [升级MindSpore版本](#升级mindspore版本) - - - - - -本文档介绍如何在CPU环境的macOS系统上,使用源码编译方法快速安装MindSpore。 - -## 确认系统环境信息 - -- 确认安装macOS Catalina是x86架构64位操作系统。 -- 确认安装Xcode并配置clang version 11.0.0。 -- 确认安装[CMake 3.18.3版本](https://github.com/Kitware/Cmake/releases/tag/v3.18.3)。 - - 安装完成后将CMake添加到系统环境变量。 -- 确认安装[Python 3.7.5版本](https://www.python.org/ftp/python/3.7.5/python-3.7.5-macosx10.9.pkg)。 - - 安装完成后需要将Python添加到系统环境变量Path中。 -- 确认安装[OpenSSL 1.1.1及以上版本](https://github.com/openssl/openssl.git)。 - - 安装完成后将Openssl添加到环境变量。 -- 确认安装[wheel 0.32.0及以上版本](https://pypi.org/project/wheel/)。 -- 确认安装git工具。 - 如果未安装,使用如下命令下载安装: - - ```bash - brew install git - ``` - -## 从代码仓下载源码 - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -## 编译MindSpore - -在源码根目录下执行如下命令: - -```bash -bash build.sh -e cpu -``` - -## 安装MindSpore - -```bash -pip install build/package/mindspore-{version}-py37-none-any.whl -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 - -## 验证是否安装成功 - -```bash -python -c "import mindspore;print(mindspore.__version__)" -``` - -如果输出MindSpore版本号,说明MindSpore安装成功了,如果输出`No module named 'mindspore'`说明未安装成功。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -- 直接在线升级 - - ```bash - pip install --upgrade mindspore - ``` - -- 本地源码编译升级 - - 在源码根目录下执行编译脚本`build.sh`成功后,在`build/package`目录下找到编译生成的whl安装包,然后执行命令进行升级。 - - ```bash - pip install --upgrade mindspore-{version}-py37-none-any.whl - ``` diff --git a/install/mindspore_cpu_macos_install_source_en.md b/install/mindspore_cpu_macos_install_source_en.md deleted file mode 100644 index e0d82d5a4e3aa272d3708685e0427b66a85b3f24..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_macos_install_source_en.md +++ /dev/null @@ -1,86 +0,0 @@ -# Installing MindSpore in CPU by Source Code (macOS) - - - -- [Installing MindSpore in CPU by Source Code (macOS)](#installing-mindspore-in-cpu-by-source-code-macOS) - - [System Environment Information Confirmation](#system-environment-information-confirmation) - - [Downloading Source Code from Code Repository](#downloading-source-code-from-code-repository) - - [Compiling MindSpore](#compiling-mindspore) - - [Installing MindSpore](#installing-mindspore) - - [Installation Verification](#installing-verification) - - [Version Update](#version-update) - - - - - -This document describes how to quickly install MindSpore by source code in a macOS system with a CPU environment. - -## System Environment Information Confirmation - -- Confirm that macOS Catalina is installed with the x86 architecture 64-bit operating system. -- Confirm that the Xcode and Clang 11.0.0 is installed. -- Confirm that [CMake 3.18.3](https://github.com/Kitware/Cmake/releases/tag/v3.18.3) is installed. - - After installing, add the path of `cmake` to the environment variable PATH. -- Confirm that [Python 3.7.5](https://www.python.org/ftp/python/3.7.5/python-3.7.5-macosx10.9.pkg) is installed. - - After installing, add the path of `python` to the environment variable PATH. -- Confirm that [OpenSSL 1.1.1 or later](https://github.com/openssl/openssl.git) is installed. - - After installing, add the path of `Openssl` to the environment variable PATH. -- Confirm that [wheel 0.32.0 or later](https://pypi.org/project/wheel/) is installed. -- Confirm that the git tool is installed. - If not, use the following command to install it: - - ```bash - brew install git - ``` - -## Downloading Source Code from Code Repository - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -## Compiling MindSpore - -Run the following command in the root directory of the source code to compile MindSpore: - -```bash -bash build.sh -e cpu -``` - -## Installing MindSpore - -```bash -pip install build/package/mindspore-{version}-py37-none-any.whl -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -Of which, - -- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about other dependency items, see [requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)). In other cases, you need to manually install dependency items. -- `{version}` denotes the version of MindSpore. For example, when you are installing MindSpore 1.1.0, `{version}` should be 1.1.0. - -## Installation Verification - -```bash -python -c "import mindspore;print(mindspore.__version__)" -``` - -If the MindSpore version number is displayed, it means that MindSpore is installed successfully, and if the output is `No module named 'mindspore'`, it means that the installation was not successful. - -## Version Update - -Using the following command if you need to update MindSpore version: - -- Update online - - ```bash - pip install --upgrade mindspore - ``` - -- Update after source code compilation - - After successfully executing the compile script `build.bat` in the root path of the source code, find the whl package in path `build/package`, use the following command to update your version. - -```bash -pip install --upgrade mindspore-{version}-cp37-cp37m-win_amd64.whl -``` diff --git a/install/mindspore_cpu_win_install_conda.md b/install/mindspore_cpu_win_install_conda.md deleted file mode 100644 index 08991b52b05b9c409ceeb2dd964d88fa89427828..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_win_install_conda.md +++ /dev/null @@ -1,80 +0,0 @@ -# Conda方式安装MindSpore CPU版本(Windows) - - - -- [Conda方式安装MindSpore CPU版本(Windows)](#conda方式安装mindspore-cpu版本windows) - - [确认系统环境信息](#确认系统环境信息) - - [安装Conda](#安装conda) - - [启动Anaconda Prompt](#启动anaconda-prompt) - - [添加Conda镜像源](#添加conda镜像源) - - [创建并激活Conda环境](#创建并激活conda环境) - - [安装MindSpore](#安装mindspore) - - [验证是否安装成功](#验证是否安装成功) - - [升级MindSpore版本](#升级mindspore版本) - - - - - -本文档介绍如何在CPU环境的Windows系统上,使用Conda方式快速安装MindSpore。 - -## 确认系统环境信息 - -- 确认安装Windows 10是x86架构64位操作系统。 - -## 安装Conda - -下载并安装对应架构的Conda安装包。 - -- 官方源下载[X86 Anaconda](https://www.anaconda.com/distribution/) 或 [X86 Miniconda](https://docs.conda.io/en/latest/miniconda.html) -- 清华镜像源下载地址:[X86 Anaconda](https://mirrors.tuna.tsinghua.edu.cn/anaconda/archive/Anaconda3-2020.02-Windows-x86_64.exe) - -## 启动Anaconda Prompt - -安装Conda后,从Windows“开始”菜单打开“Anaconda Prompt”。 - -## 添加Conda镜像源 - -从清华源镜像源下载Conda安装包的可忽略此步操作。 - -```shell -conda config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/ -conda config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/main/ -conda config --set show_channel_urls yes -``` - -## 创建并激活Conda环境 - -```bash -conda create -n mindspore python=3.7.5 -conda activate mindspore -``` - -## 安装MindSpore - -参考[版本列表](https://www.mindspore.cn/versions)先进行SHA-256完整性校验,校验一致后再执行如下命令安装MindSpore。 - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/cpu/windows_x64/mindspore-{version}-cp37-cp37m-win_amd64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 - -## 验证是否安装成功 - -```bash -python -c "import mindspore;mindspore.__version__" -``` - -如果输出MindSpore版本号,说明MindSpore安装成功了,如果输出`No module named 'mindspore'`说明未安装成功。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -```bash -pip install --upgrade mindspore -``` diff --git a/install/mindspore_cpu_win_install_pip.md b/install/mindspore_cpu_win_install_pip.md deleted file mode 100644 index b0db5d7c962010bf54d35b1d6c8cc1cf9a42b835..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_win_install_pip.md +++ /dev/null @@ -1,53 +0,0 @@ -# pip方式安装MindSpore CPU版本(Windows) - - - -- [pip方式安装MindSpore CPU版本(Windows)](#pip方式安装mindspore-cpu版本windows) - - [确认系统环境信息](#确认系统环境信息) - - [安装MindSpore](#安装mindspore) - - [验证是否安装成功](#验证是否安装成功) - - [升级MindSpore版本](#升级mindspore版本) - - - - - -本文档介绍如何在CPU环境的Windows系统上,使用pip方式快速安装MindSpore。 - -## 确认系统环境信息 - -- 确认安装Windows 10是x86架构64位操作系统。 -- 确认安装Python 3.7.5版本。 - - 如果未安装或者已安装其他版本的Python,则需从华为云下载[Python 3.7.5版本 64位](https://mirrors.huaweicloud.com/python/3.7.5/python-3.7.5-amd64.exe)进行安装。 -- 安装Python完毕后,将Python和pip添加到系统环境变量。 - - 添加Python:控制面板->系统->高级系统设置->环境变量。双击系统变量中的Path,将`python.exe`的路径添加进去。 - - 添加pip:`python.exe`同一级目录中的`Scripts`文件夹即为Python自带的pip文件,将其路径添加到系统环境变量中即可。 - -## 安装MindSpore - -参考[版本列表](https://www.mindspore.cn/versions)先进行SHA-256完整性校验,校验一致后再执行如下命令安装MindSpore。 - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/cpu/windows_x64/mindspore-{version}-cp37-cp37m-win_amd64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 - -## 验证是否安装成功 - -```bash -python -c "import mindspore;print(mindspore.__version__)" -``` - -如果输出MindSpore版本号,说明MindSpore安装成功了,如果输出`No module named 'mindspore'`说明未安装成功。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -```bash -pip install --upgrade mindspore -``` diff --git a/install/mindspore_cpu_win_install_pip_en.md b/install/mindspore_cpu_win_install_pip_en.md deleted file mode 100644 index 20297cbaaa912f97bf4491c81b1d9d99c7a19f93..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_win_install_pip_en.md +++ /dev/null @@ -1,53 +0,0 @@ -# Installing MindSpore in CPU by pip (Windows) - - - -- [Installing MindSpore in CPU by pip (Windows)](#installing-mindspore-in-cpu-by-pip-windows) - - [System Environment Information Confirmation](#system-environment-information-confirmation) - - [Installing MindSpore](#installing-mindspore) - - [Installation Verification](#installation-verification) - - [Version Update](#version-update) - - - - - -This document describes how to quickly install MindSpore by pip in a Windows system with a CPU environment. - -## System Environment Information Confirmation - -- Confirm that Windows 10 is installed with the x86 architecture 64-bit operating system. -- Confirm that Python 3.7.5 is installed. - - If you didn't install Python or you have installed other versions, please download the Python 3.7.5 64-bit from [Huaweicloud](https://mirrors.huaweicloud.com/python/3.7.5/Python-3.7.5.tgz) to install. -- After installing Python, add Python and pip to the environment variable. - - Add Python: Control Panel -> System -> Advanced System Settings -> Environment Variables. Double click the Path in the environment variable and add the path of `python.exe`. - - Add pip: The `Scripts` folder in the same directory of `python.exe` is the pip file that comes with Python, add it to the system environment variable. - -## Installing MindSpore - -It is recommended to refer to [Version List](https://www.mindspore.cn/versions/en) to perform SHA-256 integrity verification, and then execute the following command to install MindSpore after the verification is consistent. - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/cpu/windows_x64/mindspore-{version}-cp37-cp37m-win_amd64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -Of which, - -- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about other dependency items, see [requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)). In other cases, you need to manually install dependency items. -- `{version}` denotes the version of MindSpore. For example, when you are installing MindSpore 1.1.0, `{version}` should be 1.1.0. - -## Installation Verification - -```bash -python -c "import mindspore;print(mindspore.__version__)" -``` - -If the MindSpore version number is displayed, it means that MindSpore is installed successfully, and if the output is `No module named 'mindspore'`, it means that the installation was not successful. - -## Version Update - -Using the following command if you need to update the MindSpore version: - -```bash -pip install --upgrade mindspore -``` diff --git a/install/mindspore_cpu_win_install_source.md b/install/mindspore_cpu_win_install_source.md deleted file mode 100644 index 2c6f4da41df47475d062678d72ac778a0352dae3..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_win_install_source.md +++ /dev/null @@ -1,85 +0,0 @@ -# 源码编译方式安装MindSpore CPU版本(Windows) - - - -- [源码编译方式安装MindSpore CPU版本(Windows)](#源码编译方式安装mindspore-cpu版本windows) - - [确认系统环境信息](#确认系统环境信息) - - [从代码仓下载源码](#从代码仓下载源码) - - [编译MindSpore](#编译mindspore) - - [安装MindSpore](#安装mindspore) - - [验证是否安装成功](#验证是否安装成功) - - [升级MindSpore版本](#升级mindspore版本) - - - - - -本文档介绍如何在CPU环境的Windows系统上,使用源码编译方法快速安装MindSpore。 - -详细步骤可以参考社区提供的实践——[在Windows(CPU)上进行源码编译安装MindSpore](https://www.mindspore.cn/news/newschildren?id=364),在此感谢社区成员[lvmingfu](https://gitee.com/lvmingfu)的分享。 - -## 确认系统环境信息 - -- 确认安装Windows 10是x86架构64位操作系统。 -- 确认安装[Visual C++ Redistributable for Visual Studio 2015](https://www.microsoft.com/zh-CN/download/details.aspx?id=48145)。 -- 确认安装了[git](https://github.com/git-for-windows/git/releases/download/v2.29.2.windows.2/Git-2.29.2.2-64-bit.exe)工具。 - - 如果git没有安装在`ProgramFiles`,需设置环境变量指定`patch.exe`的位置,例如git安装在`D:\git`时,需设置`set MS_PATCH_PATH=D:\git\usr\bin`。 -- 确认安装[MinGW-W64 GCC-7.3.0](https://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win64/Personal%20Builds/mingw-builds/7.3.0/threads-posix/seh/x86_64-7.3.0-release-posix-seh-rt_v5-rev0.7z)。 - - 安装路径中不能出现中文和日文,安装完成后将安装路径下的`MinGW\bin`添加到系统环境变量。例如安装在`D:\gcc`,则需要将`D:\gcc\MinGW\bin`添加到系统环境变量Path中。 -- 确认安装[CMake 3.18.3版本](https://github.com/Kitware/Cmake/releases/tag/v3.18.3)。 - - 安装路径中不能出现中文和日文,安装完成后将`cmake.exe`的路径添加到系统环境变量Path中。 -- 确认安装[ActivePerl 5.28.1.2801版本](https://downloads.activestate.com/ActivePerl/releases/5.28.1.2801/ActivePerl-5.28.1.2801-MSWin32-x64-24563874.exe)。 -- 确认安装[Python 3.7.5版本](https://www.python.org/ftp/python/3.7.5/python-3.7.5-amd64.exe)。 - - 安装路径中不能出现中文和日文,安装完成后需要将`python.exe`的路径添加到系统环境变量Path中,Python自带的pip文件在`python.exe`同级目录的`Scripts`文件夹中,也需要将pip文件的路径添加到系统环境变量Path中。 -- 确认安装[wheel 0.32.0及以上版本](https://pypi.org/project/wheel/)。 - -## 从代码仓下载源码 - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -## 编译MindSpore - -在源码根目录下执行如下命令: - -```bash -call build.bat -``` - -## 安装MindSpore - -```bash -pip install build/package/mindspore-{version}-cp37-cp37m-win_amd64.whl -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 - -## 验证是否安装成功 - -```bash -python -c "import mindspore;print(mindspore.__version__)" -``` - -如果输出MindSpore版本号,说明MindSpore安装成功了,如果输出`No module named 'mindspore'`说明未安装成功。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -- 直接在线升级 - - ```bash - pip install --upgrade mindspore - ``` - -- 本地源码编译升级 - - 在源码根目录下执行编译脚本`build.sh`成功后,在`build/package`目录下找到编译生成的whl安装包,然后执行命令进行升级。 - - ```bash - pip install --upgrade mindspore-{version}-cp37-cp37m-win_amd64.whl - ``` diff --git a/install/mindspore_cpu_win_install_source_en.md b/install/mindspore_cpu_win_install_source_en.md deleted file mode 100644 index 33a60fe6bb131eb1e8369d3e1ecd99a4dd83fe0c..0000000000000000000000000000000000000000 --- a/install/mindspore_cpu_win_install_source_en.md +++ /dev/null @@ -1,82 +0,0 @@ -# Installing MindSpore in CPU by Source Code (Windows) - - - -- [Installing MindSpore in CPU by Source Code (Windows)](#installing-mindspore-in-cpu-by-source-code-windows) - - [System Environment Information Confirmation](#system-environment-information-confirmation) - - [Downloading Source Code from Code Repository](#downloading-source-code-from-code-repository) - - [Compiling MindSpore](#compiling-mindspore) - - [Installing MindSpore](#installing-mindspore) - - [Installation Verification](#installing-verification) - - [Version Update](#version-update) - - - - - -This document describes how to quickly install MindSpore by source code in a Windows system with a CPU environment. - -## System Environment Information Confirmation - -- Confirm that Windows 10 is installed with the x86 architecture 64-bit operating system. -- Confirm that [Visual C++ Redistributable for Visual Studio 2015](https://www.microsoft.com/zh-CN/download/details.aspx?id=48145) is installed. -- Confirm that [git](https://github.com/git-for-windows/git/releases/download/v2.29.2.windows.2/Git-2.29.2.2-64-bit.exe) tool is installed. - - If git was not installed in `ProgramFiles`, you will need to set environment variable to where `patch.exe` is allocated. For example, when git was install in `D:\git`, `set MS_PATCH_PATH=D:\git\usr\bin`. -- Confirm that [MinGW-W64 GCC-7.3.0](https://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win64/Personal%20Builds/mingw-builds/7.3.0/threads-posix/seh/x86_64-7.3.0-release-posix-seh-rt_v5-rev0.7z) is installed. - - Ensure that path of source code does not include special characters (Chinese, Janpanese characters etc.). After installing, add the path `MinGW\bin`to the environment variable PATH.For example, the installation directory is in `D:\gcc`, then you will need to add `D:\gcc\MinGW\bin` to the system environment variable PATH. -- Confirm that [CMake 3.18.3](https://github.com/Kitware/Cmake/releases/tag/v3.18.3) is installed. - - Ensure that path of source code does not include special characters (Chinese, Janpanese characters etc.). After installing, add the path of `cmake.exe` to the environment variable PATH. -- Confirm that [ActivePerl 5.28.1.2801 or later](https://downloads.activestate.com/ActivePerl/releases/5.28.1.2801/ActivePerl-5.28.1.2801-MSWin32-x64-24563874.exe) is installed. -- Confirm that [Python 3.7.5](https://www.python.org/ftp/python/3.7.5/python-3.7.5-amd64.exe) is installed. - - Ensure that path of source code does not include special characters (Chinese, Janpanese characters etc.). After installing, add the path of `python.exe` to the environment variable PATH. The `Scripts` folder in the same directory of `python.exe` is the pip file that comes with Python, you also need to add the path of the pip file to the environment variable PATH. - -## Downloading Source Code from Code Repository - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -## Compiling MindSpore - -Run the following command in the root directory of the source code to compile MindSpore: - -```bash -call build.bat -``` - -## Installing MindSpore - -```bash -pip install build/package/mindspore-{version}-cp37-cp37m-win_amd64.whl -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -Of which, - -- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about other dependency items, see [requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)). In other cases, you need to manually install dependency items. -- `{version}` denotes the version of MindSpore. For example, when you are installing MindSpore 1.1.0, `{version}` should be 1.1.0. - -## Installation Verification - -```bash -python -c "import mindspore;print(mindspore.__version__)" -``` - -If the MindSpore version number is displayed, it means that MindSpore is installed successfully, and if the output is `No module named 'mindspore'`, it means that the installation was not successful. - -## Version Update - -Using the following command if you need to update the MindSpore version: - -- Update online - - ```bash - pip install --upgrade mindspore - ``` - -- Update after source code compilation - - After successfully executing the compile script `build.bat` in the root path of the source code, find the whl package in path `build/package`, use the following command to update your version. - -```bash -pip install --upgrade mindspore-{version}-cp37-cp37m-win_amd64.whl -``` diff --git a/install/mindspore_gpu_install_conda.md b/install/mindspore_gpu_install_conda.md deleted file mode 100644 index d2af0fc91479304cae6d6d15728f24ece205de32..0000000000000000000000000000000000000000 --- a/install/mindspore_gpu_install_conda.md +++ /dev/null @@ -1,127 +0,0 @@ -# Conda方式安装MindSpore GPU版本 - - - -- [Conda方式安装MindSpore GPU版本](#conda方式安装mindspore-gpu版本) - - [确认系统环境信息](#确认系统环境信息) - - [安装Conda](#安装conda) - - [添加Conda镜像源](#添加conda镜像源) - - [创建并激活Conda环境](#创建并激活conda环境) - - [安装MindSpore](#安装mindspore) - - [验证是否成功安装](#验证是否成功安装) - - [升级MindSpore版本](#升级mindspore版本) - - [安装MindInsight](#安装mindinsight) - - [安装MindArmour](#安装mindarmour) - - [安装MindSpore Hub](#安装mindspore-hub) - - - - - -本文档介绍如何在GPU环境的Linux系统上,使用Conda方式快速安装MindSpore。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04是64位操作系统。 -- 确认安装[GCC 7.3.0版本](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz)。 -- 确认安装[CUDA 10.1](https://developer.nvidia.com/cuda-10.1-download-archive-base)。 - - CUDA安装后,若CUDA没有安装在默认位置,需要设置环境变量PATH(如:`export PATH=/usr/local/cuda-${version}/bin:$PATH`)和`LD_LIBRARY_PATH`(如:`export LD_LIBRARY_PATH=/usr/local/cuda-${version}/lib64:$LD_LIBRARY_PATH`),详细安装后的设置可参考[CUDA安装手册](https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html#post-installation-actions)。 -- 确认安装[cuDNN 7.6.X版本](https://developer.nvidia.com/rdp/cudnn-archive)。 -- 确认安装[OpenMPI 4.0.3版本](https://www.open-mpi.org/faq/?category=building#easy-build)(可选,单机多卡/多机多卡训练需要)。 -- 确认安装[NCCL 2.7.6-1版本](https://docs.nvidia.com/deeplearning/sdk/nccl-install-guide/index.html#debian)(可选,单机多卡/多机多卡训练需要)。 -- 确认安装[gmp 6.1.2版本](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz)。 - -## 安装Conda - -下载并安装对应架构的Conda安装包。 - -- 官网下载地址:[X86 Anaconda](https://www.anaconda.com/distribution/) 或 [X86 Miniconda](https://docs.conda.io/en/latest/miniconda.html)。 -- 清华镜像源下载地址:[X86 Anaconda](https://mirrors.tuna.tsinghua.edu.cn/anaconda/archive/Anaconda3-2020.02-Linux-x86_64.sh)。 - -## 添加Conda镜像源 - -从清华源镜像源下载Conda安装包的可跳过此步操作。 - -```bash -conda config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/ -conda config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/main/ -conda config --set show_channel_urls yes -``` - -## 创建并激活Conda环境 - -```bash -conda create -n mindspore python=3.7.5 -conda activate mindspore -``` - -## 安装MindSpore - -参考[版本列表](https://www.mindspore.cn/versions)先进行SHA-256完整性校验,校验一致后再执行如下命令安装MindSpore。 - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-{version}-cp37-cp37m-linux_x86_64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 - -## 验证是否成功安装 - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="GPU") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -如果输出: - -```text -[[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] -``` - -说明MindSpore安装成功了。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -```bash -pip install --upgrade mindspore-gpu -``` - -## 安装MindInsight - -当您需要查看训练过程中的标量、图像、计算图以及模型超参等信息时,可以选装MindInsight。 - -具体安装步骤参见[MindInsight](https://gitee.com/mindspore/mindinsight/blob/master/README_CN.md)。 - -## 安装MindArmour - -当您进行AI模型安全研究或想要增强AI应用模型的防护能力时,可以选装MindArmour。 - -具体安装步骤参见[MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README_CN.md)。 - -## 安装MindSpore Hub - -当您想要快速体验MindSpore预训练模型时,可以选装MindSpore Hub。 - -具体安装步骤参见[MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README_CN.md)。 diff --git a/install/mindspore_gpu_install_docker.md b/install/mindspore_gpu_install_docker.md deleted file mode 100644 index 83d4abb209f88f7901035a16efbf585d8348a067..0000000000000000000000000000000000000000 --- a/install/mindspore_gpu_install_docker.md +++ /dev/null @@ -1,145 +0,0 @@ -# Docker方式安装MindSpore GPU版本 - - - -- [Docker方式安装MindSpore GPU版本](#docker方式安装mindspore-gpu版本) - - [确认系统环境信息](#确认系统环境信息) - - [nvidia-container-toolkit安装](#nvidia-container-toolkit安装) - - [获取MindSpore镜像](#获取mindspore镜像) - - [运行MindSpore镜像](#运行mindspore镜像) - - [验证是否安装成功](#验证是否安装成功) - - - - - -[Docker](https://docs.docker.com/get-docker/)是一个开源的应用容器引擎,让开发者打包他们的应用以及依赖包到一个轻量级、可移植的容器中。通过使用Docker,可以实现MindSpore的快速部署,并与系统环境隔离。 - -本文档介绍如何在GPU环境的Linux系统上,使用Docker方式快速安装MindSpore。 - -MindSpore的Docker镜像托管在[Huawei SWR](https://support.huaweicloud.com/swr/index.html)上。 - -目前容器化构建选项支持情况如下: - -| 硬件平台 | Docker镜像仓库 | 标签 | 说明 | -| :----- | :------------------------ | :----------------------- | :--------------------------------------- | -| GPU | `mindspore/mindspore-gpu` | `x.y.z` | 已经预安装MindSpore `x.y.z` GPU版本的生产环境。 | -| | | `devel` | 提供开发环境从源头构建MindSpore(`GPU CUDA10.1`后端)。安装详情请参考 。 | -| | | `runtime` | 提供运行时环境,未安装MindSpore二进制包(`GPU CUDA10.1`后端)。 | - -> **注意:** 不建议从源头构建GPU `devel` Docker镜像后直接安装whl包。我们强烈建议您在GPU `runtime` Docker镜像中传输并安装whl包。 -> `x.y.z`对应MindSpore版本号,例如安装1.1.0版本MindSpore时,`x.y.z`应写为1.1.0。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04是基于x86架构的64位操作系统。 -- 确认安装[Docker 18.03或者更高版本](https://docs.docker.com/get-docker/)。 - -## nvidia-container-toolkit安装 - -对于`GPU`后端,请确保`nvidia-container-toolkit`已经提前安装,以下是`Ubuntu`用户的`nvidia-container-toolkit`安装指南: - -```bash -# Acquire version of operating system version -DISTRIBUTION=$(. /etc/os-release; echo $ID$VERSION_ID) -curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | apt-key add - -curl -s -L https://nvidia.github.io/nvidia-docker/$DISTRIBUTION/nvidia-docker.list | tee /etc/apt/sources.list.d/nvidia-docker.list - -sudo apt-get update && sudo apt-get install -y nvidia-container-toolkit nvidia-docker2 -sudo systemctl restart docker -``` - -daemon.json是Docker的配置文件,编辑文件daemon.json配置容器运行时,让Docker可以使用nvidia-container-runtime: - -```bash -$ vim /etc/docker/daemon.json -{ - "runtimes": { - "nvidia": { - "path": "nvidia-container-runtime", - "runtimeArgs": [] - } - } -} -``` - -再次重启Docker: - -```bash -sudo systemctl daemon-reload -sudo systemctl restart docker -``` - -## 获取MindSpore镜像 - -对于`GPU`后端,可以直接使用以下命令获取最新的稳定镜像: - -```bash -docker pull swr.cn-south-1.myhuaweicloud.com/mindspore/mindspore-gpu:{tag} -``` - -其中: - -- `{tag}`对应上述表格中的标签。 - -## 运行MindSpore镜像 - -执行以下命令启动Docker容器实例: - -```bash -docker run -it -v /dev/shm:/dev/shm --runtime=nvidia --privileged=true swr.cn-south-1.myhuaweicloud.com/mindspore/mindspore-gpu:{tag} /bin/bash -``` - -其中: - -- `-v /dev/shm:/dev/shm` 将NCCL共享内存段所在目录挂载至容器内部; -- `--runtime=nvidia` 用于指定容器运行时为`nvidia-container-runtime`; -- `--privileged=true` 赋予容器扩展的能力; -- `{tag}`对应上述表格中的标签。 - -## 验证是否安装成功 - -- 如果你安装的是指定版本`x.y.z`的容器。 - - 按照上述步骤进入MindSpore容器后,测试Docker是否正常工作,请运行下面的Python代码并检查输出: - - ```python - import numpy as np - import mindspore.context as context - import mindspore.ops as ops - from mindspore import Tensor - - context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - - x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) - y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) - print(ops.add(x, y)) - ``` - - 代码成功运行时会输出: - - ```text - [[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] - ``` - - 至此,你已经成功通过Docker方式安装了MindSpore GPU版本。 - -- 如果你安装的是`runtime`标签的容器,需要自行安装MindSpore。 - - 进入[MindSpore安装指南页面](https://www.mindspore.cn/install),选择GPU硬件平台、Ubuntu-x86操作系统和pip的安装方式,获得安装指南。运行容器后参考安装指南,通过pip方式安装MindSpore GPU版本,并进行验证。 - -- 如果你安装的是`devel`标签的容器,需要自行编译并安装MindSpore。 - - 进入[MindSpore安装指南页面](https://www.mindspore.cn/install),选择GPU硬件平台、Ubuntu-x86操作系统和Source的安装方式,获得安装指南。运行容器后,下载MindSpore代码仓并参考安装指南,通过源码编译方式安装MindSpore GPU版本,并进行验证。 - -如果您想了解更多关于MindSpore Docker镜像的构建过程,请查看[docker repo](https://gitee.com/mindspore/mindspore/blob/master/docker/README.md)了解详细信息。 diff --git a/install/mindspore_gpu_install_docker_en.md b/install/mindspore_gpu_install_docker_en.md deleted file mode 100644 index f9fb55f030b449cf9fdad9baec1cb1d6a79f7ec8..0000000000000000000000000000000000000000 --- a/install/mindspore_gpu_install_docker_en.md +++ /dev/null @@ -1,145 +0,0 @@ -# Installing MindSpore in GPU by Docker - - - -- [Installing MindSpore in GPU by Docker](#installing-mindSpore-in-gpu-by-docker) - - [System Environment Information Confirmation](#system-environment-information-confirmation) - - [nvidia-container-toolkit Installation](#nvidia-container-toolkit-installation) - - [Obtaining MindSpore Image](#obtaining-mindspore-image) - - [Running MindSpore Image](#running-mindspore-image) - - [Installation Verification](#installation-verification) - - - - - -[Docker](https://docs.docker.com/get-docker/) is an open source application container engine, developers can package their applications and dependencies into a lightweight, portable container. By using Docker, MindSpore can be rapidly deployed and separated from the system environment. - -This document describes how to quickly install MindSpore by Docker in a Linux system with a GPU environment. - -The Docker image of MindSpore is hosted on [Huawei SWR](https://support.huaweicloud.com/swr/index.html). - -The current support for containerized build is as follows: - -| Hardware | Docker Image Hub | Label | Note | -| :----- | :------------------------ | :----------------------- | :--------------------------------------- | -| GPU | `mindspore/mindspore-gpu` | `x.y.z` | A production environment with the MindSpore `x.y.z` GPU version pre-installed. | -| | | `devel` | Provide a development environment to build MindSpore from the source (`GPU CUDA10.1` backend). For installation details, please refer to . | -| | | `runtime` | Provide runtime environment, MindSpore binary package (`GPU CUDA10.1` backend) is not installed. | - -> **Note:** It is not recommended to install the whl package directly after building the GPU `devel` Docker image from the source. We strongly recommend that you transfer and install the `whl` package in the GPU `runtime` Docker image. -> `x.y.z` corresponds to the MindSpore version number. For example, when installing MindSpore version 1.1.0, `x.y.z` should be written as 1.1.0. - -## System Environment Information Confirmation - -- Confirm that Ubuntu 18.04 is installed with the 64-bit operating system. -- Confirm that [Docker 18.03 or later versioin](https://docs.docker.com/get-docker/) is installed. - -## nvidia-container-toolkit Installation - -For the `GPU` backend, please make sure that `nvidia-container-toolkit` has been installed in advance. The following is the installation guide for `nvidia-container-toolkit` for `Ubuntu` users: - -```bash -# Acquire version of operating system version -DISTRIBUTION=$(. /etc/os-release; echo $ID$VERSION_ID) -curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | apt-key add - -curl -s -L https://nvidia.github.io/nvidia-docker/$DISTRIBUTION/nvidia-docker.list | tee /etc/apt/sources.list.d/nvidia-docker.list - -sudo apt-get update && sudo apt-get install -y nvidia-container-toolkit nvidia-docker2 -sudo systemctl restart docker -``` - -daemon.json is the configuration file of Docker. Edit the file daemon.json to configure the container runtime so that Docker can use nvidia-container-runtime: - -```bash -$ vim /etc/docker/daemon.json -{ - "runtimes": { - "nvidia": { - "path": "nvidia-container-runtime", - "runtimeArgs": [] - } - } -} -``` - -Restart Docker: - -```bash -sudo systemctl daemon-reload -sudo systemctl restart docker -``` - -## Obtaining MindSpore Image - -For the `CPU` backend, you can directly use the following command to obtain the latest stable image: - -```bash -docker pull swr.cn-south-1.myhuaweicloud.com/mindspore/mindspore-gpu:{tag} -``` - -of which, - -- `{tag}` corresponds to the label in the above table. - -## Running MindSpore Image - -Execute the following command to start the Docker container instance: - -```bash -docker run -it -v /dev/shm:/dev/shm --runtime=nvidia --privileged=true swr.cn-south-1.myhuaweicloud.com/mindspore/mindspore-gpu:{tag} /bin/bash -``` - -of which, - -- `-v /dev/shm:/dev/shm` mounts the directory where the NCCL shared memory segment is located into the container; -- `--runtime=nvidia` is used to specify the container runtime as `nvidia-container-runtime`; -- `--privileged=true` enables the container to expand; -- `{tag}` corresponds to the label in the above table. - -## Installation Verification - -- If you are installing the container of the specified version `x.y.z`. - - After entering the MindSpore container according to the above steps, to test whether the Docker container is working properly, please run the following Python code and check the output: - - ```python - import numpy as np - import mindspore.context as context - import mindspore.ops as ops - from mindspore import Tensor - - context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - - x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) - y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) - print(ops.add(x, y)) - ``` - - The outputs should be the same as: - - ```text - [[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] - ``` - - It means MindSpore has been installed by docker successfully. - -- If you install a container with the label of `runtime`, you need to install MindSpore yourself. - - Go to [MindSpore Installation Guide Page](https://www.mindspore.cn/install/en), choose the GPU hardware platform, Ubuntu-x86 operating system and pip installation method to get the installation guide. Refer to the installation guide after running the container and install the MindSpore GPU version by pip, and verify it. - -- If you install a container with the label of `devel`, you need to compile and install MindSpore yourself. - - Go to [MindSpore Installation Guide Page](https://www.mindspore.cn/install/en), choose the GPU hardware platform, Ubuntu-x86 operating system and pip installation method to get the installation guide. After running the container, download the MindSpore code repository and refer to the installation guide, install the MindSpore GPU version through source code compilation, and verify it. - -If you want to know more about the MindSpore Docker image building process, please check [docker repo](https://gitee.com/mindspore/mindspore/blob/master/docker/README.md) for details. diff --git a/install/mindspore_gpu_install_pip.md b/install/mindspore_gpu_install_pip.md deleted file mode 100644 index a47340968b117046e2ba097548c687f97d607d3f..0000000000000000000000000000000000000000 --- a/install/mindspore_gpu_install_pip.md +++ /dev/null @@ -1,102 +0,0 @@ -# pip方式安装MindSpore GPU版本 - - - -- [pip方式安装MindSpore GPU版本](#pip方式安装mindspore-gpu版本) - - [确认系统环境信息](#确认系统环境信息) - - [安装MindSpore](#安装mindspore) - - [验证是否成功安装](#验证是否成功安装) - - [升级MindSpore版本](#升级mindspore版本) - - [安装MindInsight](#安装mindinsight) - - [安装MindArmour](#安装mindarmour) - - [安装MindSpore Hub](#安装mindspore-hub) - - - - - -本文档介绍如何在GPU环境的Linux系统上,使用pip方式快速安装MindSpore。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04是64位操作系统。 -- 确认安装[GCC 7.3.0版本](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz)。 -- 确认安装[CUDA 10.1](https://developer.nvidia.com/cuda-10.1-download-archive-base)。 - - CUDA安装后,若CUDA没有安装在默认位置,需要设置环境变量PATH(如:`export PATH=/usr/local/cuda-${version}/bin:$PATH`)和`LD_LIBRARY_PATH`(如:`export LD_LIBRARY_PATH=/usr/local/cuda-${version}/lib64:$LD_LIBRARY_PATH`),详细安装后的设置可参考[CUDA安装手册](https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html#post-installation-actions)。 -- 确认安装[cuDNN 7.6.X版本](https://developer.nvidia.com/rdp/cudnn-archive)。 -- 确认安装[OpenMPI 4.0.3版本](https://www.open-mpi.org/faq/?category=building#easy-build)(可选,单机多卡/多机多卡训练需要)。 -- 确认安装[NCCL 2.7.6-1版本](https://docs.nvidia.com/deeplearning/sdk/nccl-install-guide/index.html#debian)(可选,单机多卡/多机多卡训练需要)。 -- 确认安装[gmp 6.1.2版本](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz)。 -- 确认安装Python 3.7.5版本。 - - 如果未安装或者已安装其他版本的Python,可从[官网](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz)或者[华为云](https://mirrors.huaweicloud.com/python/3.7.5/Python-3.7.5.tgz)下载Python 3.7.5版本64位,进行安装。 - -## 安装MindSpore - -参考[版本列表](https://www.mindspore.cn/versions)先进行SHA-256完整性校验,校验一致后再执行如下命令安装MindSpore。 - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-{version}-cp37-cp37m-linux_x86_64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 - -## 验证是否成功安装 - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="GPU") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -如果输出: - -```text -[[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] -``` - -说明MindSpore安装成功了。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -```bash -pip install --upgrade mindspore-gpu -``` - -## 安装MindInsight - -当您需要查看训练过程中的标量、图像、计算图以及模型超参等信息时,可以选装MindInsight。 - -具体安装步骤参见[MindInsight](https://gitee.com/mindspore/mindinsight/blob/master/README_CN.md)。 - -## 安装MindArmour - -当您进行AI模型安全研究或想要增强AI应用模型的防护能力时,可以选装MindArmour。 - -具体安装步骤参见[MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README_CN.md)。 - -## 安装MindSpore Hub - -当您想要快速体验MindSpore预训练模型时,可以选装MindSpore Hub。 - -具体安装步骤参见[MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README_CN.md)。 diff --git a/install/mindspore_gpu_install_pip_en.md b/install/mindspore_gpu_install_pip_en.md deleted file mode 100644 index fc51e6e74cbf34ff5e651b46f2d84229eeb3d2c5..0000000000000000000000000000000000000000 --- a/install/mindspore_gpu_install_pip_en.md +++ /dev/null @@ -1,102 +0,0 @@ -# Installing MindSpore in GPU by pip - - - -- [Installing MindSpore in GPU by pip](#installing-mindspore-in-gpu-by-pip) - - [System Environment Information Confirmation](#system-environment-information-confirmation) - - [Installing MindSpore](#installing-mindspore) - - [Installation Verification](#installation-verification) - - [Version Update](#version-update) - - [Installing MindInsight](#installing-mindinsight) - - [Installing MindArmour](#installing-mindarmour) - - [Installing MindSpore Hub](#installing-mindspore-hub) - - - - - -This document describes how to quickly install MindSpore by pip in a Linux system with a GPU environment. - -## System Environment Information Confirmation - -- Confirm that Ubuntu 18.04 is installed with the 64-bit operating system. -- Confirm that [GCC 7.3.0](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz) is installed. -- Confirm that [CUDA 10.1](https://developer.nvidia.com/cuda-10.1-download-archive-base) is installed. - - If CUDA is installed in a non-default path, after installing CUDA, environment variable `PATH`(e.g. `export PATH=/usr/local/cuda-${version}/bin:$PATH`) and `LD_LIBRARY_PATH`(e.g. `export LD_LIBRARY_PATH=/usr/local/cuda-${version}/lib64:$LD_LIBRARY_PATH`) need to be set. Please refer to [CUDA installation guide](https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html#post-installation-actions) for detailed post installation actions. -- Confirm that [cuDNN 7.6.X](https://developer.nvidia.com/rdp/cudnn-archive) is installed. -- Confirm that [OpenMPI 4.0.3](https://www.open-mpi.org/faq/?category=building#easy-build) is installed. (optional, required for single-node/multi-GPU and multi-node/multi-GPU training) -- Confirm that [NCCL 2.7.6-1](https://docs.nvidia.com/deeplearning/sdk/nccl-install-guide/index.html#debian) is installed. (optional, required for single-node/multi-GPU and multi-node/multi-GPU training) -- Confirm that [gmp 6.1.2](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz) is installed. -- Confirm that Python 3.7.5 is installed. - - If you didn't install Python or you have installed other versions, please download the Python 3.7.5 64-bit from [Python](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz) or [Huaweicloud](https://mirrors.huaweicloud.com/python/3.7.5/Python-3.7.5.tgz) to install. - -## Installing MindSpore - -It is recommended to refer to [Version List](https://www.mindspore.cn/versions/en) to perform SHA-256 integrity verification, and then execute the following command to install MindSpore after the verification is consistent. - -```bash -pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/{version}/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-{version}-cp37-cp37m-linux_x86_64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -Of which, - -- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about other dependency items, see [requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)). In other cases, you need to manually install dependency items. -- `{version}` denotes the version of MindSpore. For example, when you are installing MindSpore 1.1.0, `{version}` should be 1.1.0. - -## Installation Verification - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="GPU") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -- The outputs should be the same as: - -```text -[[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] -``` - -It means MindSpore has been installed successfully. - -## Version Update - -Using the following command if you need to update the MindSpore version: - -```bash -pip install --upgrade mindspore-gpu -``` - -## Installing MindInsight - -If you need to analyze information such as model scalars, graphs, computation graphs and model traceback, you can install MindInsight. - -For more details, please refer to [MindInsight](https://gitee.com/mindspore/mindinsight/blob/master/README.md). - -## Installing MindArmour - -If you need to conduct AI model security research or enhance the security of the model in you applications, you can install MindArmour. - -For more details, please refer to [MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README.md). - -## Installing MindSpore Hub - -If you need to access and experience MindSpore pre-trained models quickly, you can install MindSpore Hub. - -For more details, please refer to [MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README.md). diff --git a/install/mindspore_gpu_install_source.md b/install/mindspore_gpu_install_source.md deleted file mode 100644 index 214a78b4998b15d276c8aef598c3ff7f4ad20318..0000000000000000000000000000000000000000 --- a/install/mindspore_gpu_install_source.md +++ /dev/null @@ -1,156 +0,0 @@ -# 源码编译方式安装MindSpore GPU版本 - - - -- [源码编译方式安装MindSpore GPU版本](#源码编译方式安装mindspore-gpu版本) - - [确认系统环境信息](#确认系统环境信息) - - [从代码仓下载源码](#从代码仓下载源码) - - [编译MindSpore](#编译mindspore) - - [安装MindSpore](#安装mindspore) - - [验证是否成功安装](#验证是否成功安装) - - [升级MindSpore版本](#升级mindspore版本) - - [安装MindInsight](#安装mindinsight) - - [安装MindArmour](#安装mindarmour) - - [安装MindSpore Hub](#安装mindspore-hub) - - - - - -本文档介绍如何在GPU环境的Linux系统上,使用源码编译方式快速安装MindSpore。 - -详细步骤可以参考社区提供的实践——[在Linux上体验源码编译安装MindSpore GPU版本](https://www.mindspore.cn/news/newschildren?id=401),在此感谢社区成员[飞翔的企鹅](https://gitee.com/zhang_yi2020)的分享。 - -## 确认系统环境信息 - -- 确认安装Ubuntu 18.04是64位操作系统。 -- 确认安装[GCC 7.3.0版本](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz)。 -- 确认安装[gmp 6.1.2版本](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz)。 -- 确认安装[Python 3.7.5版本](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz)。 -- 确认安装[CMake 3.18.3及以上版本](https://cmake.org/download/)。 - - 安装完成后将CMake添加到系统环境变量。 -- 确认安装[patch 2.5及以上版本](http://ftp.gnu.org/gnu/patch/)。 - - 安装完成后将patch添加到系统环境变量中。 -- 确认安装[Autoconf 2.69及以上版本](https://www.gnu.org/software/autoconf)(可使用系统自带版本)。 -- 确认安装[Libtool 2.4.6-29.fc30及以上版本](https://www.gnu.org/software/libtool)(可使用系统自带版本)。 -- 确认安装[Automake 1.15.1及以上版本](https://www.gnu.org/software/automake)(可使用系统自带版本)。 -- 确认安装[cuDNN 7.6及以上版本](https://developer.nvidia.com/rdp/cudnn-archive)。 -- 确认安装[Flex 2.5.35及以上版本](https://github.com/westes/flex/)。 -- 确认安装[wheel 0.32.0及以上版本](https://pypi.org/project/wheel/)。 -- 确认安装[OpenSSL 1.1.1及以上版本](https://github.com/openssl/openssl.git)。 - - 安装完成后设置环境变量`export OPENSSL_ROOT_DIR=“OpenSSL安装目录”`。 -- 确认安装[CUDA 10.1](https://developer.nvidia.com/cuda-10.1-download-archive-base)按默认配置安装。 - - CUDA安装后,若CUDA没有安装在默认位置,需要设置环境变量PATH(如:`export PATH=/usr/local/cuda-${version}/bin:$PATH`)和`LD_LIBRARY_PATH`(如:`export LD_LIBRARY_PATH=/usr/local/cuda-${version}/lib64:$LD_LIBRARY_PATH`),详细安装后的设置可参考[CUDA安装手册](https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html#post-installation-actions)。 -- 确认安装[OpenMPI 4.0.3版本](https://www.open-mpi.org/faq/?category=building#easy-build)(可选,单机多卡/多机多卡训练需要)。 -- 确认安装[NCCL 2.7.6-1版本](https://docs.nvidia.com/deeplearning/sdk/nccl-install-guide/index.html#debian)(可选,单机多卡/多机多卡训练需要)。 -- 确认安装[NUMA 2.0.11及以上版本](https://github.com/numactl/numactl)。 - 如果未安装,使用如下命令下载安装: - - ```bash - apt-get install libnuma-dev - ``` - -- 确认安装git工具。 - 如果未安装,使用如下命令下载安装: - - ```bash - apt-get install git - ``` - -## 从代码仓下载源码 - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -## 编译MindSpore - -在源码根目录下执行如下命令。 - -```bash -bash build.sh -e gpu -``` - -其中: - -`build.sh`中默认的编译线程数为8,如果编译机性能较差可能会出现编译错误,可在执行中增加-j{线程数}来减少线程数量。如`bash build.sh -e gpu -j4`。 - -## 安装MindSpore - -```bash -chmod +x build/package/mindspore_gpu-{version}-cp37-cp37m-linux_x86_64.whl -pip install build/package/mindspore_gpu-{version}-cp37-cp37m-linux_x86_64.whl -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -其中: - -- 在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)),其余情况需自行安装。 -- `{version}`表示MindSpore版本号,例如安装1.1.0版本MindSpore时,`{version}`应写为1.1.0。 - -## 验证是否成功安装 - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="GPU") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -如果输出: - -```text -[[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] -``` - -说明MindSpore安装成功了。 - -## 升级MindSpore版本 - -当需要升级MindSpore版本时,可执行如下命令: - -- 直接在线升级 - - ```bash - pip install --upgrade mindspore-gpu - ``` - -- 本地源码编译升级 - - 在源码根目录下执行编译脚本`build.sh`成功后,在`build/package`目录下找到编译生成的whl安装包,然后执行命令进行升级。 - - ```bash - pip install --upgrade mindspore_gpu-{version}-cp37-cp37m-linux_{arch}.whl - ``` - -## 安装MindInsight - -当您需要查看训练过程中的标量、图像、计算图以及模型超参等信息时,可以选装MindInsight。 - -具体安装步骤参见[MindInsight](https://gitee.com/mindspore/mindinsight/blob/master/README_CN.md)。 - -## 安装MindArmour - -当您进行AI模型安全研究或想要增强AI应用模型的防护能力时,可以选装MindArmour。 - -具体安装步骤参见[MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README_CN.md)。 - -## 安装MindSpore Hub - -当您想要快速体验MindSpore预训练模型时,可以选装MindSpore Hub。 - -具体安装步骤参见[MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README_CN.md)。 diff --git a/install/mindspore_gpu_install_source_en.md b/install/mindspore_gpu_install_source_en.md deleted file mode 100644 index b6c7e5fe655b7fdb36c2aa601687fe32047c2a89..0000000000000000000000000000000000000000 --- a/install/mindspore_gpu_install_source_en.md +++ /dev/null @@ -1,153 +0,0 @@ -# Installing MindSpore in GPU by Source Code - - - -- [Installing MindSpore in GPU by Source Code](#installing-mindspore-in-gpu-by-source-code) - - [System Environment Information Confirmation](#system-environment-information-confirmation) - - [Downloading Source Code from Code Repository](#downloading-source-code-from-code-repository) - - [Compiling MindSpore](#compiling-mindspore) - - [Installing MindSpore](#installing-mindspore) - - [Installation Verification](#installation-verification) - - [Installing MindInsight](#installing-mindinsight) - - [Installing MindArmour](#installing-mindarmour) - - [Installing MindSpore Hub](#installing-mindspore-hub) - - - - - -This document describes how to quickly install MindSpore by source code in a Linux system with a GPU environment. - -## System Environment Information Confirmation - -- Confirm that Ubuntu 18.04 is installed with the 64-bit operating system. -- Confirm that [GCC 7.3.0](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz) is installed. -- Confirm that [gmp 6.1.2](https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz) is installed. -- Confirm that [Python 3.7.5](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz) is installed. -- Confirm that [CMake 3.18.3 or later](https://cmake.org/download/) is installed. - - After installing, add the path of `cmake` to the environment variable PATH. -- Confirm that [patch 2.5 or later](http://ftp.gnu.org/gnu/patch/) is installed. - - After installing, add the path of `patch` to the environment variable PATH. -- Confirm that [Autoconf 2.69 or later](https://www.gnu.org/software/autoconf) is installed. (Default versions of these tools built in their systems are supported.) -- Confirm that [Libtool 2.4.6-29.fc30 or later](https://www.gnu.org/software/libtool) is installed. (Default versions of these tools built in their systems are supported.) -- Confirm that [Automake 1.15.1 or later](https://www.gnu.org/software/automake) is installed.(Default versions of these tools built in their systems are supported.) -- Confirm that [cuDNN 7.6 or later](https://developer.nvidia.com/rdp/cudnn-archive) is installed. -- Confirm that [Flex 2.5.35 or later](https://github.com/westes/flex/) is installed. -- Confirm that [wheel 0.32.0 or later](https://pypi.org/project/wheel/) is installed. -- Confirm that [OpenSSL 1.1.1 or later](https://github.com/openssl/openssl.git) is installed. - - ensure that [OpenSSL](https://github.com/openssl/openssl) is installed and set system variable `export OPENSSL_ROOT_DIR="OpenSSL installation directory"`. -- Confirm that [CUDA 10.1](https://developer.nvidia.com/cuda-10.1-download-archive-base) is installed as default configuration. - - If CUDA is installed in a non-default path, after installing CUDA, environment variable `PATH`(e.g. `export PATH=/usr/local/cuda-${version}/bin:$PATH`) and `LD_LIBRARY_PATH`(e.g. `export LD_LIBRARY_PATH=/usr/local/cuda-${version}/lib64:$LD_LIBRARY_PATH`) need to be set. Please refer to [CUDA installation guide](https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html#post-installation-actions) for detailed post installation actions. -- Confirm that [OpenMPI 4.0.3](https://www.open-mpi.org/faq/?category=building#easy-build) is installed. (optional, required for single-node/multi-GPU and multi-node/multi-GPU training) -- Confirm that [NCCL 2.7.6-1](https://docs.nvidia.com/deeplearning/sdk/nccl-install-guide/index.html#debian) is installed. (optional, required for single-node/multi-GPU and multi-node/multi-GPU training) -- Confirm that [NUMA 2.0.11 or later](https://github.com/numactl/numactl) is installed. - If not, use the following command to install it: - - ```bash - apt-get install libnuma-dev - ``` - -- Confirm that the git tool is installed. - If not, use the following command to install it: - - ```bash - apt-get install git - ``` - -## Downloading Source Code from Code Repository - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -## Compiling MindSpore - -Run the following command in the root directory of the source code to compile MindSpore: - -```bash -bash build.sh -e gpu -``` - -Of which, - -- In the `build.sh` script, the default number of compilation threads is 8. If the compiler performance is poor, compilation errors may occur. You can add -j{Number of threads} in to script to reduce the number of threads. For example, `bash build.sh -e ascend -j4`. - -## Installing MindSpore - -```bash -chmod +x build/package/mindspore_gpu-{version}-cp37-cp37m-linux_x86_64.whl -pip install build/package/mindspore_gpu-{version}-cp37-cp37m-linux_x86_64.whl -i https://pypi.tuna.tsinghua.edu.cn/simple -``` - -Of which, - -- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about other dependency items, see [requirements.txt](https://gitee.com/mindspore/mindspore/blob/master/requirements.txt)). In other cases, you need to manually install dependency items. -- `{version}` denotes the version of MindSpore. For example, when you are installing MindSpore 1.1.0, `{version}` should be 1.1.0. - -## Installation Verification - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.context as context - -context.set_context(device_target="GPU") -x = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -y = Tensor(np.ones([1,3,3,4]).astype(np.float32)) -print(ops.add(x, y)) -``` - -- The outputs should be the same as: - -```text -[[[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]], - - [[ 2. 2. 2. 2.], - [ 2. 2. 2. 2.], - [ 2. 2. 2. 2.]]] -``` - -It means MindSpore has been installed successfully. - -## Version Update - -Using the following command if you need to update the MindSpore version. - -- Update online - - ```bash - pip install --upgrade mindspore-gpu - ``` - -- Update after source code compilation - - After successfully executing the compile script `build.sh` in the root path of the source code, find the whl package in path `build/package`, use the following command to update your version. - - ```bash - pip install --upgrade mindspore_gpu-{version}-cp37-cp37m-linux_{arch}.whl - ``` - -## Installing MindInsight - -If you need to analyze information such as model scalars, graphs, computation graphs and model traceback, you can install MindInsight. - -For more details, please refer to [MindInsight](https://gitee.com/mindspore/mindinsight/blob/master/README.md). - -## Installing MindArmour - -If you need to conduct AI model security research or enhance the security of the model in you applications, you can install MindArmour. - -For more details, please refer to [MindArmour](https://gitee.com/mindspore/mindarmour/blob/master/README.md). - -## Installing MindSpore Hub - -If you need to access and experience MindSpore pre-trained models quickly, you can install MindSpore Hub. - -For more details, please refer to [MindSpore Hub](https://gitee.com/mindspore/hub/blob/master/README.md). diff --git a/install/third_party/third_party_cpu_install.md b/install/third_party/third_party_cpu_install.md deleted file mode 100644 index fb762174b68e99a17297519a83e04a80d5fdc338..0000000000000000000000000000000000000000 --- a/install/third_party/third_party_cpu_install.md +++ /dev/null @@ -1,383 +0,0 @@ -# 源码编译方式安装MindSpore CPU版本(含第三方依赖) - -作者:[damon0626](https://gitee.com/damon0626) - -本文档介绍如何在```Ubuntu 18.04 64```位操作系统```CPU```环境下,使用源码编译方式安装```MindSpore```。 - -## 确认系统环境信息 - -### 1. 确认安装Ubuntu 18.04是64位操作系统 - -(1)确认系统版本号,在终端输入```lsb_release -a``` - -```shell -ms-sd@mssd:~$ lsb_release -a -No LSB modules are available. -Distributor ID:Ubuntu -Description:Ubuntu 18.04.5 LTS -Release:18.04 -Codename:bionic -``` - -(2)确认系统位数,在终端输入```uname -a``` - -```shell -ms-sd@mssd:~$ uname -a -Linux mssd 5.4.0-42-generic #46~18.04.1-Ubuntu SMP Fri Jul 10 07:21:24 UTC 2020 x86_64 x86_64 x86_64 GNU/Linux -``` - -### 2. 确认安装GCC 7.3.0版本 - -(1)确认当前系统安装的GCC版本 - -在终端输入```gcc --version```,系统已安装版本为7.5.0 - -```shell -ms-sd@mssd:~/gcc-7.3.0/build$ gcc --version -gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0 -Copyright (C) 2017 Free Software Foundation, Inc. -This is free software; see the source for copying conditions. -There is NOwarranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -``` - -(2)如果提示找不到gcc命令,用以下方式安装 - -```shell -ms-sd@mssd:~$ sudo apt-get install gcc -``` - -(3)本地编译安装7.3.0,下载文件 - -[点此下载GCC7.3.0](http://ftp.gnu.org/gnu/gcc/gcc-7.3.0/gcc-7.3.0.tar.gz) - -(4)解压并进入目录 - -```shell -tar -xvzf gcc-7.3.0.tar.gz -cd gcc-7.3.0 -``` - -(5)运行```download_prerequesites```,运行该文件的目的是 - -> 1. Download some prerequisites needed by gcc. -> 2. Run this from the top level of the gcc source tree and the gcc build will do the right thing. - -```shell -ms-sd@mssd:~/gcc-7.3.0$ ./contrib/download_prerequisites -2020-12-19 09:58:33 URL: ftp://gcc.gnu.org/pub/gcc/infrastructure/gmp-6.1.0.tar.bz2 [2383840] -> "./gmp-6.1.0.tar.bz2" [1] -2020-12-19 10:00:01 URL: ftp://gcc.gnu.org/pub/gcc/infrastructure/mpfr-3.1.4.tar.bz2 [1279284] -> "./mpfr-3.1.4.tar.bz2" [1] -2020-12-19 10:00:50 URL: ftp://gcc.gnu.org/pub/gcc/infrastructure/mpc-1.0.3.tar.gz [669925] -> "./mpc-1.0.3.tar.gz" [1] -2020-12-19 10:03:10 URL: ftp://gcc.gnu.org/pub/gcc/infrastructure/isl-0.16.1.tar.bz2 [1626446] -> "./isl-0.16.1.tar.bz2" [1] -gmp-6.1.0.tar.bz2: 成功 -mpfr-3.1.4.tar.bz2: 成功 -mpc-1.0.3.tar.gz: 成功 -isl-0.16.1.tar.bz2: 成功 -All prerequisites downloaded successfully. -``` - -(6)运行成功后,进行配置 - -```shell -ms-sd@mssd:~/gcc-7.3.0/build$ ../configure --enable-checking=release --enable-languages=c,c++ --disable-multilib -``` - -> 参数解释: -> –enable-checking=release 增加一些检查 -> –enable-languages=c,c++ 需要gcc支持的编程语言 -> –disable-multilib 取消多目标库编译(取消32位库编译) - -(7)编译,根据CPU性能,选择合适的线程数 - -```shell -ms-sd@mssd:~/gcc-7.3.0/build$ make -j 6 -``` - -(8)安装 - -```shell -ms-sd@mssd:~$ sudo make install -j 6 -``` - -(9)验证,看到版本已经变更为7.3.0,安装成功。 - -```shell -ms-sd@mssd:~$ gcc --version -gcc (GCC) 7.3.0 -Copyright © 2017 Free Software Foundation, Inc. -This is free software; see the source for copying conditions. -There is NOwarranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -``` - -### 3. 确认安装Python 3.7.5版本 - -**注意:** ```Ubuntu 18.04``` 系统自带的 ```Python3```版本为```Python3.6.9```,系统自带```Python```不要删除,防止依赖错误。```Linux```发行版中, ```Debian```系的提供了```update-alternatives```工具,用于在多个同功能的软件,或软件的多个不同版本间选择,这里采用```update-alternatives```工具控制多个Python版本。 - -(1)查看系统Python版本 - -```shell -ms-sd@mssd:~$ python3 --version -Python3.6.9 -``` - -(2)[点此下载Python 3.7.5安装包](https://www.python.org/ftp/python/3.7.5/Python-3.7.5.tgz) - -(3)解压并进入目录 - -```shell -ms-sd@mssd:~$ tar -xvzf Python-3.7.5.tgz -ms-sd@mssd:~$ cd Python-3.7.5/ -``` - -(4)配置文件路径 - -```shell -ms-sd@mssd:~/Python-3.7.5$ ./configure --prefix=/usr/local/python3.7.5 --with-ssl -``` - -> 参数解释: -> --prefix=/usr/local/python3.7.5 -> 可执行文件放在/usr/local/python3.7.5/bin下, -> 库文件放在/usr/local/python3.7.5/lib, -> 配置文件放在/usr/local/python3.7.1/include, -> 其他资源文件放在/usr/local/python3.7.5下 -> -> --with-ssl:确保pip安装库时能找到SSL - -(5)安装必要的依赖 - -```shell -ms-sd@mssd:~/Python-3.7.5$ sudo apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev python3-openssl -``` - -(6)编译安装 - -```shell -ms-sd@mssd:~/Python-3.7.5$ make -j 6 -ms-sd@mssd:~/Python-3.7.5$ sudo make install -j 6 -``` - -(7)查看当前系统python/python3的指向 - -```shell -ms-sd@mssd:~$ ls -l /usr/bin/ | grep python -lrwxrwxrwx 1 root root 23 10月 8 20:12 pdb3.6 -> ../lib/python3.6/pdb.py -lrwxrwxrwx 1 root root 31 12月 18 21:44 py3versions -> ../share/python3/py3versions.py -lrwxrwxrwx 1 root root 9 12月 18 21:44 python3 -> python3.6 --rwxr-xr-x 2 root root 4526456 10月 8 20:12 python3.6 --rwxr-xr-x 2 root root 4526456 10月 8 20:12 python3.6m -lrwxrwxrwx 1 root root 10 12月 18 21:44 python3m -> python3.6m() -``` - -(8)备份原来的python3链接,重新建立新的python3指向以更改python3默认指向 - -```shell -ms-sd@mssd:~/Python-3.7.5$ sudo mv /usr/bin/python3 /usr/bin/python3.bak -ms-sd@mssd:~/Python-3.7.5$ sudo ln -s /usr/local/python3.7.5/bin/python3.7 /usr/bin/python3 -``` - -(9)重新建立pip3指向 - -```shell -ms-sd@mssd:~/Python-3.7.5$ sudo ln -s /usr/local/python3.7.5/bin/pip3 /usr/bin/pip3 -``` - -(10)输入验证,Python已更改为3.7.5版本 - -```python -ms-sd@mssd:~/Python-3.7.5$ python3 -Python 3.7.5 (default, Dec 19 2020, 11:29:09) -[GCC 7.3.0] on linux -Type "help", "copyright", "credits" or "license" for more information. ->>> -``` - -(11)更新```update-alternatives```python列表 - -```shell -sudo update-alternatives --install /usr/bin/python python /usr/bin/python2 100 -sudo update-alternatives --install /usr/bin/python python /usr/bin/python3 150 -sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.6 110 -``` - -(12)设置Python默认选项,选择2,默认优先级最高的选项 - -```shell -ms-sd@mssd:~$ sudo update-alternatives --config python -There are 3 choices for the alternative python (providing /usr/bin/python). - - Selection Path Priority Status ------------------------------------------------------------- - 0 /usr/bin/python3 150 auto mode - 1 /usr/bin/python2 100 manual mode -* 2 /usr/bin/python3 150 manual mode - 3 /usr/bin/python3.6 110 manual mode - -Press to keep the current choice[*], or type selection number: -``` - -### 4. 确认安装OpenSSL 1.1.1及以上版本 - -(1)Ubuntu 18.04自带了OpenSSL 1.1.1 - -```shell -ms-sd@mssd:~/Python-3.7.5$ openssl version -OpenSSL 1.1.1 11 Sep 2018 -``` - -(2)本地编译安装请参考[Ubuntu 18.04 安装新版本openssl](https://www.cnblogs.com/thechosenone95/p/10603110.html) - -### 5. 确认安装CMake 3.18.3及以上版本 - -(1)[点此下载CMake 3.18.5](https://github.com/Kitware/CMake/releases/download/v3.18.5/cmake-3.18.5.tar.gz) - -(2)解压并进入文件目录 - -```shell -ms-sd@mssd:~$ tar -zxvf cmake-3.18.5.tar.gz -ms-sd@mssd:~$ cd cmake-3.18.5/ -``` - -(3)编译安装 - -在源码的README.rst中看到如下文字: - -> For example, if you simply want to build and install CMake from source, -> you can build directly in the source tree:: -> -> $ ./bootstrap && make && sudo make install -> -> Or, if you plan to develop CMake or otherwise run the test suite, create -> a separate build tree:: -> -> $.mkdir cmake-build && cd cmake-build -> -> $./cmake-source/bootsrap && make - -选择从源码编译安装,根据提示在终端依次输入以下命令: - -```shell -ms-sd@mssd:~/cmake-3.18.5$ ./bootstrap -ms-sd@mssd:~/cmake-3.18.5$ make -j 6 -ms-sd@mssd:~/cmake-3.18.5$ sudo make install -j 6 -``` - -(4)验证,安装成功 - -```shell -ms-sd@mssd:~$ cmake --version -cmake version 3.18.5 - -CMake suite maintained and supported by Kitware (kitware.com/cmake). -``` - -### 6. 确认安装wheel 0.32.0及以上版本 - -(1)更新pip源 - -修改 ~/.pip/pip.conf (如果没有该文件,创建一个), 内容如下: - -```shell -[global] -index-url = https://pypi.tuna.tsinghua.edu.cn/simple -``` - -(2)安装wheel 0.32.0 - -```shel -ms-sd@mssd:~$ sudo pip3 install wheel==0.32.0 -``` - -(3)查看安装情况 - -```shell -ms-sd@mssd:~$ pip3 list -Package Version ----------- ------- -numpy 1.19.4 -pip 20.3.3 -setuptools 41.2.0 -wheel 0.32.0 -``` - -### 7. 确认安装patch 2.5及以上版本 - -(1)查看patch版本,ubuntu18.04自带了2.7.6版本 - -```shell -ms-sd@mssd:~$ patch --version -GNU patch 2.7.6 -Copyright (C) 2003, 2009-2012 Free Software Foundation, Inc. -Copyright (C) 1988 Larry Wall - -License GPLv3+: GNU GPL version 3 or later . -This is free software: you are free to change and redistribute it. -There is NO WARRANTY, to the extent permitted by law. - -Written by Larry Wall and Paul Eggert -``` - -### 8. 确认安装NUMA 2.0.11及以上版本 - -(1)如果未安装,使用如下命令下载安装: - -```shell -ms-sd@mssd:~$ apt-get install libnuma-dev -``` - -### 9. 确认安装git工具 - -```shell -ms-sd@mssd:~$ sudo apt-get install git -``` - -## MindSpore源码安装 - -### 10. 下载MindSpore源码 - -(1)从代码仓库下载源码 - -```shell -ms-sd@mssd:~$ git clone -``` - -(2)安装依赖(根据编译过程中报错,整理如下) - -```shell -ms-sd@mssd:~$ sudo apt-get install python3.7-dev pybind11 python3-wheel python3-setuptools python3.7-minimal -``` - -(3)编译(内存占用太大,总是超内存线程被杀死,建议4G以上) - -```shell -ms-sd@mssd:~/mindspore$ sudo bash build.sh -e cpu -j 2 -``` - -(4)编译成功 - -大约需要1小时,编译成功,出现如下提示: - -```shell -CPack: - package: /home/ms-sd/mindspore/build/mindspore/mindspore generated. -success building mindspore project! ----------------- mindspore: build end ---------------- -``` - -同时在```/mindspore/build/package/```文件下生成了```mindspore-1.1.0-cp37-cp37m-linux_x86_64.whl```文件。 - -(5)pip3安装MindSpore安装文件 - -```shell -ms-sd@mssd:~/mindspore$ sudo pip3 install /mindspore/build/package/mindspore-1.1.0-cp37-cp37m-linux_x86_64.whl -``` - -(6)导入测试 - -```python3 -ms-sd@mssd:~/mindspore$ sudo python3 -Python 3.7.5 (default, Dec 19 2020, 13:04:49) -[GCC 7.3.0] on linux -Type "help", "copyright", "credits" or "license" for more information. ->>> import mindspore ->>> mindspore.__version__ -'1.1.0' -``` diff --git a/resource/MindSpore-logo.png b/resource/MindSpore-logo.png deleted file mode 100644 index 9cdefce2d3b37a45032052fac30ad3a89c479162..0000000000000000000000000000000000000000 Binary files a/resource/MindSpore-logo.png and /dev/null differ diff --git a/resource/_static/api_source.png b/resource/_static/api_source.png deleted file mode 100644 index 5759b96201fe16606d5474f2ced87014641e8bce..0000000000000000000000000000000000000000 Binary files a/resource/_static/api_source.png and /dev/null differ diff --git a/resource/_static/logo_modelarts.png b/resource/_static/logo_modelarts.png deleted file mode 100644 index 9b499805e2f8ab52dcde3fd4a7708ef753da9b84..0000000000000000000000000000000000000000 Binary files a/resource/_static/logo_modelarts.png and /dev/null differ diff --git a/resource/_static/logo_notebook.png b/resource/_static/logo_notebook.png deleted file mode 100644 index f28598315f19f4be76a73ddf5dc6bbdbe4db35fd..0000000000000000000000000000000000000000 Binary files a/resource/_static/logo_notebook.png and /dev/null differ diff --git a/resource/_static/logo_online_experience.png b/resource/_static/logo_online_experience.png deleted file mode 100644 index 9845ddd10bc6e997be8725e841c16328f4eb9135..0000000000000000000000000000000000000000 Binary files a/resource/_static/logo_online_experience.png and /dev/null differ diff --git a/resource/_static/logo_source.png b/resource/_static/logo_source.png deleted file mode 100644 index 9932d67ab50871edb0c95979c4e948c812c7cdea..0000000000000000000000000000000000000000 Binary files a/resource/_static/logo_source.png and /dev/null differ diff --git a/resource/_static/master_api.png b/resource/_static/master_api.png deleted file mode 100644 index 3647bcec08defe99a58fa7315533c576fc7093d2..0000000000000000000000000000000000000000 Binary files a/resource/_static/master_api.png and /dev/null differ diff --git a/resource/_static/master_api_en.png b/resource/_static/master_api_en.png deleted file mode 100644 index 3ad3f9ba3b55197bcd186c47113b1820bb87bc19..0000000000000000000000000000000000000000 Binary files a/resource/_static/master_api_en.png and /dev/null differ diff --git a/resource/_static/master_doc.png b/resource/_static/master_doc.png deleted file mode 100644 index b4e46f4e28b84e2e4c190300c80ff5fc431700b2..0000000000000000000000000000000000000000 Binary files a/resource/_static/master_doc.png and /dev/null differ diff --git a/resource/_static/master_doc_en.png b/resource/_static/master_doc_en.png deleted file mode 100644 index 31800bcfc2fa868b2978d8aaceb97d9d74073902..0000000000000000000000000000000000000000 Binary files a/resource/_static/master_doc_en.png and /dev/null differ diff --git a/resource/api_mapping/ApplyAdadelta.md b/resource/api_mapping/ApplyAdadelta.md deleted file mode 100644 index a3bee3714a22a38b9f9772f07f3c86659c0cb224..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ApplyAdadelta.md +++ /dev/null @@ -1,84 +0,0 @@ -# 比较与torch.optim.Adadelta的功能差异 - -## torch.optim.Adadelta - -```python -class torch.optim.Adadelta( - params, - lr=1.0, - rho=0.9, - eps=1e-06, - weight_decay=0 -) -``` - -## mindspore.ops.ApplyAdadelta - -```python -class mindspore.ops.ApplyAdadelta(*args, **kwargs)( - var, - accum, - accum_update, - lr, - rho, - epsilon, - grad -) -``` - -## 使用方式 - -PyTorch: 需要将期望更新的参数放入1个迭代类型参数`params`后传入,且设置了`step`方法执行单步优化返回损失值。 - -MindSpore:需要分别传入期望更新的参数`var`,`accum`,`accum_update`,`grad`。 - -## 代码示例 - -```python -# The following implements Adadelta with MindSpore. -import numpy as np -import torch -import mindspore.nn as nn -from mindspore import Tensor, Parameter -import mindspore.ops as ops -from mindspore import dtype as mstype - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.apply_adadelta = ops.ApplyAdadelta() - self.var = Parameter(Tensor(np.random.rand(1, 1).astype(np.float32)), name="var") - self.accum = Parameter(Tensor(np.random.rand(1, 1).astype(np.float32)), name="accum") - self.accum_update = Parameter(Tensor(np.random.rand(1, 1).astype(np.float32)), name="accum_update") - def construct(self, lr, rho, epsilon, grad): - return self.apply_adadelta(self.var, self.accum, self.accum_update, lr, rho, epsilon, grad) - -np.random.seed(0) -net = Net() -lr = Tensor(0.001, mstype.float32) -rho = Tensor(0.0, mstype.float32) -epsilon = Tensor(1e-6, mstype.float32) -grad = Tensor(np.random.rand(1, 1).astype(np.float32)) -var, accum, accum_update = net(lr, rho, epsilon, grad) -print(var) -print(accum) -print(accum_update) -# Out: -# [[0.5480]] -# [[0.2969]] -# [[0.6028]] - -# The following implements Adadelta with torch. -input_x = torch.tensor(np.random.rand(1, 20).astype(np.float32)) -input_y = torch.tensor([1.]) -net = torch.nn.Sequential(torch.nn.Linear(input_x.shape[-1], 1)) -loss = torch.nn.MSELoss() -optimizer = torch.optim.Adadelta(net.parameters()) -l = loss(net(input_x).view(-1), input_y) / 2 -optimizer.zero_grad() -l.backward() -optimizer.step() -print(loss(net(input_x).view(-1), input_y).item() / 2) -# Out: -# 0.5616 -``` \ No newline at end of file diff --git a/resource/api_mapping/ApplyAdadelta_en.md b/resource/api_mapping/ApplyAdadelta_en.md deleted file mode 100644 index e9fd0577f0b0b6bc1588a3c8deccef167934340b..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ApplyAdadelta_en.md +++ /dev/null @@ -1,84 +0,0 @@ -# Function Differences with torch.optim.Adadelta - -## torch.optim.Adadelta - -```python -class torch.optim.Adadelta( - params, - lr=1.0, - rho=0.9, - eps=1e-06, - weight_decay=0 -) -``` - -## mindspore.ops.ApplyAdadelta - -```python -class mindspore.ops.ApplyAdadelta(*args, **kwargs)( - var, - accum, - accum_update, - lr, - rho, - epsilon, - grad -) -``` - -## Differences - -PyTorch: Parameters to be optimized should be put into an iterable parameter then passed as a whole. The `step` method is also implemented to perform one single step optimization and return loss. - -MindSpore: Parameters to be updated: `var`, `accum`, `accum_update`, `grad` should be passed respectively. - -## Code Example - -```python -# The following implements Adadelta with MindSpore. -import numpy as np -import torch -import mindspore.nn as nn -from mindspore import Tensor, Parameter -import mindspore.ops as ops -from mindspore import dtype as mstype - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.apply_adadelta = ops.ApplyAdadelta() - self.var = Parameter(Tensor(np.random.rand(1, 1).astype(np.float32)), name="var") - self.accum = Parameter(Tensor(np.random.rand(1, 1).astype(np.float32)), name="accum") - self.accum_update = Parameter(Tensor(np.random.rand(1, 1).astype(np.float32)), name="accum_update") - def construct(self, lr, rho, epsilon, grad): - return self.apply_adadelta(self.var, self.accum, self.accum_update, lr, rho, epsilon, grad) - -np.random.seed(0) -net = Net() -lr = Tensor(0.001, mstype.float32) -rho = Tensor(0.0, mstype.float32) -epsilon = Tensor(1e-6, mstype.float32) -grad = Tensor(np.random.rand(1, 1).astype(np.float32)) -var, accum, accum_update = net(lr, rho, epsilon, grad) -print(var) -print(accum) -print(accum_update) -# Out: -# [[0.5480]] -# [[0.2969]] -# [[0.6028]] - -# The following implements Adadelta with torch. -input_x = torch.tensor(np.random.rand(1, 20).astype(np.float32)) -input_y = torch.tensor([1.]) -net = torch.nn.Sequential(torch.nn.Linear(input_x.shape[-1], 1)) -loss = torch.nn.MSELoss() -optimizer = torch.optim.Adadelta(net.parameters()) -l = loss(net(input_x).view(-1), input_y) / 2 -optimizer.zero_grad() -l.backward() -optimizer.step() -print(loss(net(input_x).view(-1), input_y).item() / 2) -# Out: -# 0.5616 -``` diff --git a/resource/api_mapping/ApplyAdagrad.md b/resource/api_mapping/ApplyAdagrad.md deleted file mode 100644 index 85ccdb091596a4539304aeec1cec0e8bdedcfcd4..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ApplyAdagrad.md +++ /dev/null @@ -1,80 +0,0 @@ -# 比较与torch.optim.Adagrad的功能差异 - -## torch.optim.Adagrad - -```python -class torch.optim.Adagrad( - params, - lr=0.01, - lr_decay=0, - weight_decay=0, - initial_accumulator_value=0, - eps=1e-10 -) -``` - -## mindspore.nn.ApplyAdagrad - -```python -class mindspore.nn.Adagrad( - params, - accum=0.1, - learning_rate=0.001, - update_slots=True, - loss_scale=1.0, - weight_decay=0.0 -)(grads) -``` - -## 使用方式 - -PyTorch: 需要将期望更新的参数放入1个迭代类型参数`params`后传入,且设置了`step`方法执行单步优化返回损失值。 - -MindSpore:需要分别传入期望更新的参数`grads`,`params`。 - -## 代码示例 - -```python -# The following implements Adagrad with MindSpore. -import numpy as np -import torch -import mindspore.nn as nn -from mindspore import Tensor, Parameter -import mindspore.ops as ops -from mindspore import dtype as mstype - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.apply_adagrad = ops.ApplyAdagrad() - self.var = Parameter(Tensor(np.random.rand(1, 1).astype(np.float32)), name="var") - self.accum = Parameter(Tensor(np.random.rand(1, 1).astype(np.float32)), name="accum") - - def construct(self, lr, grad): - return self.apply_adagrad(self.var, self.accum, lr, grad) - -np.random.seed(0) -net = Net() -lr = Tensor(0.001, mstype.float32) -grad = Tensor(np.random.rand(1, 1).astype(np.float32)) -var, accum = net(lr, grad) -print(var) -print(accum) -# Out: -# [[0.5482]] -# [[1.0785]] - -# The following implements Adagrad with torch. -input_x = torch.tensor(np.random.rand(1, 20).astype(np.float32)) -input_y = torch.tensor([1.]) -net = torch.nn.Sequential(torch.nn.Linear(input_x.shape[-1], 1)) -loss = torch.nn.MSELoss() -optimizer = torch.optim.Adagrad(net.parameters()) -l = loss(net(input_x).view(-1), input_y) / 2 -optimizer.zero_grad() -l.backward() -optimizer.step() -print(loss(net(input_x).view(-1), input_y).item() / 2) -# Out: -# 0.1830 -``` diff --git a/resource/api_mapping/ApplyAdagrad_en.md b/resource/api_mapping/ApplyAdagrad_en.md deleted file mode 100644 index 2f3d349f5f8f58c0ba8a6fd6492059426d6c1716..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ApplyAdagrad_en.md +++ /dev/null @@ -1,80 +0,0 @@ -# Function Differences with torch.optim.Adagrad - -## torch.optim.Adagrad - -```python -class torch.optim.Adagrad( - params, - lr=0.01, - lr_decay=0, - weight_decay=0, - initial_accumulator_value=0, - eps=1e-10 -) -``` - -## mindspore.nn.ApplyAdagrad - -```python -class mindspore.nn.Adagrad( - params, - accum=0.1, - learning_rate=0.001, - update_slots=True, - loss_scale=1.0, - weight_decay=0.0 -)(grads) -``` - -## Differences - -PyTorch: Parameters to be optimized should be put into an iterable parameter then passed as a whole. The `step` method is also implemented to perform one single step optimization and return loss. - -MindSpore: Parameters to be updated: `grads`, `params` should be passed respectively. - -## Code Example - -```python -# The following implements Adagrad with MindSpore. -import numpy as np -import torch -import mindspore.nn as nn -from mindspore import Tensor, Parameter -import mindspore.ops as ops -from mindspore import dtype as mstype - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.apply_adagrad = ops.ApplyAdagrad() - self.var = Parameter(Tensor(np.random.rand(1, 1).astype(np.float32)), name="var") - self.accum = Parameter(Tensor(np.random.rand(1, 1).astype(np.float32)), name="accum") - - def construct(self, lr, grad): - return self.apply_adagrad(self.var, self.accum, lr, grad) - -np.random.seed(0) -net = Net() -lr = Tensor(0.001, mstype.float32) -grad = Tensor(np.random.rand(1, 1).astype(np.float32)) -var, accum = net(lr, grad) -print(var) -print(accum) -# Out: -# [[0.5482]] -# [[1.0785]] - -# The following implements Adagrad with torch. -input_x = torch.tensor(np.random.rand(1, 20).astype(np.float32)) -input_y = torch.tensor([1.]) -net = torch.nn.Sequential(torch.nn.Linear(input_x.shape[-1], 1)) -loss = torch.nn.MSELoss() -optimizer = torch.optim.Adagrad(net.parameters()) -l = loss(net(input_x).view(-1), input_y) / 2 -optimizer.zero_grad() -l.backward() -optimizer.step() -print(loss(net(input_x).view(-1), input_y).item() / 2) -# Out: -# 0.1830 -``` diff --git a/resource/api_mapping/ArgMaxWithValue.md b/resource/api_mapping/ArgMaxWithValue.md deleted file mode 100644 index 366cef38423edea54b1c727742380cd75e83ce93..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ArgMaxWithValue.md +++ /dev/null @@ -1,54 +0,0 @@ -# 比较与torch.max的功能差异 - -## torch.max - -```python -torch.max( - input, - dim, - keepdim=False) -``` - -## mindspore.ops.ArgMaxWithValue - -```python -class mindspore.ops.ArgMaxWithValue( - axis=0, - keep_dims=False -)(input_x) -``` - -## 使用方式 - -PyTorch: 输出为元组(最大值, 最大值的索引)。 - -MindSpore:输出为元组(最大值的索引, 最大值)。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# Output tuple(index of max, max). -input_x = Tensor(np.array([0.0, 0.4, 0.6, 0.7, 0.1]), mindspore.float32) -argmax = ops.ArgMaxWithValue() -index, output = argmax(input_x) -print(index) -print(output) -# Out: -# 3 -# 0.7 - -# Output tuple(max, index of max). -input_x = torch.tensor([0.0, 0.4, 0.6, 0.7, 0.1]) -output, index = torch.max(input_x, 0) -print(index) -print(output) -# Out: -# tensor(3) -# tensor(0.7000) -``` \ No newline at end of file diff --git a/resource/api_mapping/ArgMaxWithValue_en.md b/resource/api_mapping/ArgMaxWithValue_en.md deleted file mode 100644 index faec5c7be1802af5532f669c5a6d4d250a23d18c..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ArgMaxWithValue_en.md +++ /dev/null @@ -1,54 +0,0 @@ -# Function Differences with torch.max - -## torch.max - -```python -torch.max( - input, - dim, - keepdim=False) -``` - -## mindspore.ops.ArgMaxWithValue - -```python -class mindspore.ops.ArgMaxWithValue( - axis=0, - keep_dims=False -)(input_x) -``` - -## Differences - -PyTorch: Output tuple(max, index of max). - -MindSpore: Output tuple(index of max, max). - -## Code Example - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# Output tuple(index of max, max). -input_x = Tensor(np.array([0.0, 0.4, 0.6, 0.7, 0.1]), mindspore.float32) -argmax = ops.ArgMaxWithValue() -index, output = argmax(input_x) -print(index) -print(output) -# Out: -# 3 -# 0.7 - -# Output tuple(max, index of max). -input_x = torch.tensor([0.0, 0.4, 0.6, 0.7, 0.1]) -output, index = torch.max(input_x, 0) -print(index) -print(output) -# Out: -# tensor(3) -# tensor(0.7000) -``` diff --git a/resource/api_mapping/ArgMinWithValue.md b/resource/api_mapping/ArgMinWithValue.md deleted file mode 100644 index fb0a923b9d5389227505ed1a43d7c9084279037b..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ArgMinWithValue.md +++ /dev/null @@ -1,55 +0,0 @@ -# 比较与torch.min的功能差异 - -## torch.min - -```python -torch.min( - input, - dim, - keepdim=False) -``` - -## mindspore.ops.ArgMinWithValue - -```python -class mindspore.ops.ArgMinWithValue( - axis=0, - keep_dims=False -)(input_x) -``` - -## 使用方式 - -PyTorch: 输出为元组(最小值, 最小值的索引)。 - -MindSpore:输出为元组(最小值的索引, 最小值)。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# Output tuple(index of min, min). -input_x = Tensor(np.array([0.0, 0.4, 0.6, 0.7, 0.1]), mindspore.float32) -argmin = ops.ArgMinWithValue() -index, output = argmin(input_x) -print(index) -print(output) -# Out: -# 0 -# 0.0 - -# Output tuple(min, index of min). -input_x = torch.tensor([0.0, 0.4, 0.6, 0.7, 0.1]) -output, index = torch.min(input_x, 0) -print(index) -print(output) -# Out: -# tensor(0) -# tensor(0.) -``` - diff --git a/resource/api_mapping/ArgMinWithValue_en.md b/resource/api_mapping/ArgMinWithValue_en.md deleted file mode 100644 index 0ebc0936fe9edc4256d8043a99f63d5f4116d223..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ArgMinWithValue_en.md +++ /dev/null @@ -1,54 +0,0 @@ -# Function Differences with torch.min - -## torch.min - -```python -torch.min( - input, - dim, - keepdim=False) -``` - -## mindspore.ops.ArgMinWithValue - -```python -class mindspore.ops.ArgMinWithValue( - axis=0, - keep_dims=False -)(input_x) -``` - -## Differences - -PyTorch: Output tuple(min, index of min). - -MindSpore: Output tuple(index of min, min). - -## Code Example - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# Output tuple(index of min, min). -input_x = Tensor(np.array([0.0, 0.4, 0.6, 0.7, 0.1]), mindspore.float32) -argmin = ops.ArgMinWithValue() -index, output = argmin(input_x) -print(index) -print(output) -# Out: -# 0 -# 0.0 - -# Output tuple(min, index of min). -input_x = torch.tensor([0.0, 0.4, 0.6, 0.7, 0.1]) -output, index = torch.min(input_x, 0) -print(index) -print(output) -# Out: -# tensor(0) -# tensor(0.) -``` diff --git a/resource/api_mapping/AvgPool2d.md b/resource/api_mapping/AvgPool2d.md deleted file mode 100644 index 16b31556df4e0aede223f9fd4e826bce17d4b285..0000000000000000000000000000000000000000 --- a/resource/api_mapping/AvgPool2d.md +++ /dev/null @@ -1,53 +0,0 @@ -# 比较与torch.nn.functional.adaptive_avg_pool2d的功能差异 - -## torch.nn.functional.adaptive_avg_pool2d - -```python -torch.nn.functional.adaptive_avg_pool2d( - input, - output_size -) -``` - -## mindspore.nn.AvgPool2d - -```python -class mindspore.nn.AvgPool2d( - kernel_size=1, - stride=1, - pad_mode='valid', - data_format='NCHW' -)(input) -``` - -## 使用方式 - -PyTorch: 对输入数据的H与W维执行平均池化。使用上,仅需指定池化后数据H和W维的期望形状即可。无需用户手工计算并指定`kernel_size`、`stride`等。 - -MindSpore:需用户手工计算并指定`kernel_size`、`stride`等。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor, nn -import torch -import numpy as np - -x = np.random.randint(0, 10, [1, 2, 4, 4]) - -# In MindSpore, parameters kernel_size and stride should be calculated in advance and set for pooling. -pool = nn.AvgPool2d(kernel_size=3, stride=1) -input_x = Tensor(x, mindspore.float32) -output = pool(input_x) -print(output.shape) -# Out: -# (1, 2, 2, 2) - -# In torch, the shape of output can be set directly for pooling. -input_x = torch.tensor(x.astype(np.float32)) -output = torch.nn.functional.adaptive_avg_pool2d(input_x, (2, 2)) -print(output.shape) -# Out: -# torch.Size([1, 2, 2, 2]) -``` diff --git a/resource/api_mapping/AvgPool2d_en.md b/resource/api_mapping/AvgPool2d_en.md deleted file mode 100644 index fbbb4e723c870f0d041d539adaa7fe5fa9aa05c4..0000000000000000000000000000000000000000 --- a/resource/api_mapping/AvgPool2d_en.md +++ /dev/null @@ -1,53 +0,0 @@ -# Function Differences with torch.nn.functional.adaptive_avg_pool2d - -## torch.nn.functional.adaptive_avg_pool2d - -```python -torch.nn.functional.adaptive_avg_pool2d( - input, - output_size -) -``` - -## mindspore.nn.AvgPool2d - -```python -class mindspore.nn.AvgPool2d( - kernel_size=1, - stride=1, - pad_mode='valid', - data_format='NCHW' -)(input) -``` - -## Differences - -PyTorch: Performs average pooling for H and W dimensions of the input data. You only need to specify the desired shape of the H and W dimensions of data after pooling. It is unnecessary to manually calculate and specify the `kernel_size`, `stride`, etc. - -MindSpore:The user needs to manually calculate and specify the `kernel_size`, `stride`, etc. - -## Code Example - -```python -import mindspore -from mindspore import Tensor, nn -import torch -import numpy as np - -x = np.random.randint(0, 10, [1, 2, 4, 4]) - -# In MindSpore, parameters kernel_size and stride should be calculated in advance and set for pooling. -pool = nn.AvgPool2d(kernel_size=3, stride=1) -input_x = Tensor(x, mindspore.float32) -output = pool(input_x) -print(output.shape) -# Out: -# (1, 2, 2, 2) - -# In torch, the shape of output can be set directly for pooling. -input_x = torch.tensor(x.astype(np.float32)) -output = torch.nn.functional.adaptive_avg_pool2d(input_x, (2, 2)) -print(output.shape) -# Out: -# torch.Size([1, 2, 2, 2]) -``` \ No newline at end of file diff --git a/resource/api_mapping/BroadcastTo.md b/resource/api_mapping/BroadcastTo.md deleted file mode 100644 index e6ddc836f565f17e31f3ae24c4f04eba16877562..0000000000000000000000000000000000000000 --- a/resource/api_mapping/BroadcastTo.md +++ /dev/null @@ -1,49 +0,0 @@ -# 比较与torch.broadcast_tensors的功能差异 - -## torch.broadcast_tensors - -```python -torch.broadcast_tensors( - *tensors -) -``` - -## mindspore.ops.BroadcastTo - -```python -class mindspore.ops.BroadcastTo(shape)(input_x) -``` - -## 使用方式 - -PyTorch: 按照[一定的规则](https://pytorch.org/docs/stable/notes/broadcasting.html#broadcasting-semantics) -将输入的若干个tensor广播成1个tensor。 - -MindSpore:将一个给定的tensor广播成指定形状的tensor。 - -## 代码示例 - -```python -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, the parameter shape is passed to reshape input_x. -shape = (2, 3) -input_x = Tensor(np.array([1, 2, 3]).astype(np.float32)) -broadcast_to = ops.BroadcastTo(shape) -output = broadcast_to(input_x) -print(output.shape) -# Out: -# (2, 3) - -# In torch, two tensors x and y should be separately passed. -# And the final output of the tensor's shape will be determined by these inputs' shapes according to rules mentioned above. -x = torch.Tensor(np.array([1, 2, 3]).astype(np.float32)).view(1, 3) -y = torch.Tensor(np.array([4, 5]).astype(np.float32)).view(2, 1) -m, n = torch.broadcast_tensor(x, y) -print(m.shape) -# Out: -# torch.Size([2, 3]) -``` \ No newline at end of file diff --git a/resource/api_mapping/BroadcastTo_en.md b/resource/api_mapping/BroadcastTo_en.md deleted file mode 100644 index e71e9a7d80c1d3a0466c9661450e43c208976cb1..0000000000000000000000000000000000000000 --- a/resource/api_mapping/BroadcastTo_en.md +++ /dev/null @@ -1,49 +0,0 @@ -# Function Differences with torch.broadcast_tensors - -## torch.broadcast_tensors - -```python -torch.broadcast_tensors( - *tensors -) -``` - -## mindspore.ops.BroadcastTo - -```python -class mindspore.ops.BroadcastTo(shape)(input_x) -``` - -## Differences - -PyTorch: Broadcasts given tensors according to [Broadcasting-semantics](https://pytorch.org/docs/stable/notes/broadcasting.html#broadcasting-semantics) -. - -MindSpore:Broadcasts a given Tensor to a specified shape Tensor. - -## Code Example - -```python -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, the parameter shape is passed to reshape input_x. -shape = (2, 3) -input_x = Tensor(np.array([1, 2, 3]).astype(np.float32)) -broadcast_to = ops.BroadcastTo(shape) -output = broadcast_to(input_x) -print(output.shape) -# Out: -# (2, 3) - -# In torch, two tensors x and y should be separately passed. -# And the final output of the tensor's shape will be determined by these inputs' shapes according to rules mentioned above. -x = torch.Tensor(np.array([1, 2, 3]).astype(np.float32)).view(1, 3) -y = torch.Tensor(np.array([4, 5]).astype(np.float32)).view(2, 1) -m, n = torch.broadcast_tensor(x, y) -print(m.shape) -# Out: -# torch.Size([2, 3]) -``` \ No newline at end of file diff --git a/resource/api_mapping/Cast.md b/resource/api_mapping/Cast.md deleted file mode 100644 index 16491defd113433bb8440aaa1bee97a4a744a62e..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Cast.md +++ /dev/null @@ -1,51 +0,0 @@ -# 比较与torch.Tensor.float的功能差异 - -## torch.Tensor.float - -```python -torch.Tensor.float() -``` - -## mindspore.ops.Cast - -```python -class mindspore.ops.Cast(*args, **kwargs)( - input_x, - type -) -``` - -## 使用方式 - -PyTorch: 将tensor类型转成为float类型。 - -MindSpore:将输入类型转换为指定的数据类型。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, you can specify the data type to be transformed into. -input_x = Tensor(np.random.randn(2, 3, 4, 5).astype(np.float32)) -cast = ops.Cast() -output = cast(input_x, mindspore.int32) -print(output.dtype) -print(output.shape) -# Out: -# Int32 -# (2, 3, 4, 5) - -# In torch, the input will be transformed into float. -input_x = torch.Tensor(np.random.randn(2, 3, 4, 5).astype(np.int32)) -output = input_x.float() -print(output.dtype) -print(output.shape) -# Out: -# torch.float32 -# torch.Size([2, 3, 4, 5]) -``` \ No newline at end of file diff --git a/resource/api_mapping/Cast_en.md b/resource/api_mapping/Cast_en.md deleted file mode 100644 index 8e22b4f33ff5781572e35cdb77c5692690146a0d..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Cast_en.md +++ /dev/null @@ -1,51 +0,0 @@ -# Function Differences with torch.Tensor.float - -## torch.Tensor.float - -```python -torch.Tensor.float() -``` - -## mindspore.ops.Cast - -```python -class mindspore.ops.Cast(*args, **kwargs)( - input_x, - type -) -``` - -## Differences - -PyTorch: Changes the tensor type to float. - -MindSpore:Converts the input type to the specified data type. - -## Code Example - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, you can specify the data type to be transformed into. -input_x = Tensor(np.random.randn(2, 3, 4, 5).astype(np.float32)) -cast = ops.Cast() -output = cast(input_x, mindspore.int32) -print(output.dtype) -print(output.shape) -# Out: -# Int32 -# (2, 3, 4, 5) - -# In torch, the input will be transformed into float. -input_x = torch.Tensor(np.random.randn(2, 3, 4, 5).astype(np.int32)) -output = input_x.float() -print(output.dtype) -print(output.shape) -# Out: -# torch.float32 -# torch.Size([2, 3, 4, 5]) -``` \ No newline at end of file diff --git a/resource/api_mapping/Constant.md b/resource/api_mapping/Constant.md deleted file mode 100644 index b81998b3b1b49d7ffa3d2f79700a83dce9cc7c61..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Constant.md +++ /dev/null @@ -1,48 +0,0 @@ -# 比较与torch.nn.init.constant_的功能差异 - -## torch.nn.init.constant_ - -```python -torch.nn.init.constant_( - tensor, - val -) -``` - -## mindspore.common.initializer.Constant - -```python -class mindspore.common.initializer.Constant(value)(arr) -``` - -## 使用方式 - -PyTorch: 以常量`val`填充输入的tensor。 - -MindSpore:以`value`(整型或numpy数组)填充输入的numpy数组。 - -## 代码示例 - -```python -import mindspore -import torch -import numpy as np - -# In MindSpore, fill a constant array with value(int or numpy array). -input_constant = np.array([1, 2, 3]) -constant_init = mindspore.common.initializer.Constant(value=1) -out_constant = constant_init(input_constant) -print(out_constant) -# Out: -# [1 1 1] - -# In torch, fill in the input tensor with constant val. -input_constant = np.array([1, 2, 3]) -out_constant = torch.nn.init.constant_( - tensor=torch.tensor(input_constant), - val=1 -) -print(out_constant) -# Out: -# tensor([1., 1., 1.]) -``` \ No newline at end of file diff --git a/resource/api_mapping/Constant_en.md b/resource/api_mapping/Constant_en.md deleted file mode 100644 index dda3801912d91434456aeeb244eff99f2c2e97c3..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Constant_en.md +++ /dev/null @@ -1,48 +0,0 @@ -# Function Differences with torch.nn.init.constant_ - -## torch.nn.init.constant_ - -```python -torch.nn.init.constant_( - tensor, - val -) -``` - -## mindspore.common.initializer.Constant - -```python -class mindspore.common.initializer.Constant(value)(arr) -``` - -## Differences - -PyTorch: Fills in the input tensor with constant `val`. - -MindSpore:Fills in a constant array with `value`(int or numpy array). - -## Code Example - -```python -import mindspore -import torch -import numpy as np - -# In MindSpore, fill a constant array with value(int or numpy array). -input_constant = np.array([1, 2, 3]) -constant_init = mindspore.common.initializer.Constant(value=1) -out_constant = constant_init(input_constant) -print(out_constant) -# Out: -# [1 1 1] - -# In torch, fill in the input tensor with constant val. -input_constant = np.array([1, 2, 3]) -out_constant = torch.nn.init.constant_( - tensor=torch.tensor(input_constant), - val=1 -) -print(out_constant) -# Out: -# tensor([1., 1., 1.]) -``` \ No newline at end of file diff --git a/resource/api_mapping/Dense.md b/resource/api_mapping/Dense.md deleted file mode 100644 index 74ed6449ad5b87457dedda8f20e190b946a67ea0..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Dense.md +++ /dev/null @@ -1,58 +0,0 @@ -# 比较与torch.nn.Linear的功能差异 - -## torch.nn.Linear - -```python -torch.nn.Linear( - in_features, - out_features, - bias=True -) -``` - -## mindspore.nn.Dense - -```python -class mindspore.nn.Dense( - in_channels, - out_channels, - weight_init='normal', - bias_init='zeros', - has_bias=True, - activation=None -)(input) -``` - -## 使用方式 - -Pytorch:对传入数据应用线性变换,默认权重矩阵和偏移矩阵都由均匀分布初始化。 - -MindSpore:对传入数据应用线性变换,在输出数据之前可以选择应用激活函数`activation`,默认权重矩阵由标准正态分布初始化,偏移矩阵初始化为0。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor, nn -import torch -import numpy as np - -# In MindSpore, default weight will be initialized through standard normal distribution. -# Default bias will be initialized by zero. -# Default none activation used. -input_net = Tensor(np.array([[180, 234, 154], [244, 48, 247]]), mindspore.float32) -net = nn.Dense(3, 4) -output = net(input_net) -print(output.shape) -# Out: -# (2, 4) - -# In torch, default weight and bias will be initialized through uniform distribution. -# No parameter to set the activation. -input_net = torch.Tensor(np.array([[180, 234, 154], [244, 48, 247]])) -net = torch.nn.Linear(3, 4) -output = net(input_net) -print(output.shape) -# Out: -# torch.Size([2, 4]) -``` \ No newline at end of file diff --git a/resource/api_mapping/Dense_en.md b/resource/api_mapping/Dense_en.md deleted file mode 100644 index 787004c006d1a72802d391921cd5a2ec6f7a07bf..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Dense_en.md +++ /dev/null @@ -1,58 +0,0 @@ -# Function Differences with torch.nn.Linear - -## torch.nn.Linear - -```python -torch.nn.Linear( - in_features, - out_features, - bias=True -) -``` - -## mindspore.nn.Dense - -```python -class mindspore.nn.Dense( - in_channels, - out_channels, - weight_init='normal', - bias_init='zeros', - has_bias=True, - activation=None -)(input) -``` - -## Differences - -Pytorch: Applies a linear transformation to the incoming data. - -MindSpore: Applies a linear transformation to the incoming data, and applies the `activation` function before outputting the data. - -## Code Example - -```python -import mindspore -from mindspore import Tensor, nn -import torch -import numpy as np - -# In MindSpore, default weight will be initialized through standard normal distribution. -# Default bias will be initialized by zero. -# Default none activation used. -input_net = Tensor(np.array([[180, 234, 154], [244, 48, 247]]), mindspore.float32) -net = nn.Dense(3, 4) -output = net(input_net) -print(output.shape) -# Out: -# (2, 4) - -# In torch, default weight and bias will be initialized through uniform distribution. -# No parameter to set the activation. -input_net = torch.Tensor(np.array([[180, 234, 154], [244, 48, 247]])) -net = torch.nn.Linear(3, 4) -output = net(input_net) -print(output.shape) -# Out: -# torch.Size([2, 4]) -``` \ No newline at end of file diff --git a/resource/api_mapping/FastGelu.md b/resource/api_mapping/FastGelu.md deleted file mode 100644 index ffeea80d784762a51c15a675495037b3c00d38f9..0000000000000000000000000000000000000000 --- a/resource/api_mapping/FastGelu.md +++ /dev/null @@ -1,50 +0,0 @@ -# 比较与torch.nn.GELU的功能差异 - -## torch.nn.GELU - -```python -class torch.nn.GELU()(input) -``` - -## mindspore.nn.FastGelu - -```python -class mindspore.nn.FastGelu()(input_data) -``` - -## 使用方式 - -PyTorch: 基于高斯分布的累积分布函数。 - -MindSpore:采用与PyTorch不同的计算公式。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor, nn -import torch -import numpy as np - -def test_me(): - input_x = Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]]), mindspore.float32) - fast_gelu = nn.FastGelu() - output = fast_gelu(input_x) - print(output) - -def test_torch(): - input_x = torch.Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]])) - gelu = torch.nn.GELU() - output = gelu(input_x) - print(output) - -if __name__ == '__main__': - test_me() - test_torch() - -# Out: -# [[-1.5419e-01 3.9922e+00 -9.7474e-06] -# [ 1.9375e+00 -1.0053e-03 8.9824e+00]] -# tensor([[-1.5866e-01, 3.9999e+00, -0.0000e+00], -# [ 1.9545e+00, -1.4901e-06, 9.0000e+00]]) -``` \ No newline at end of file diff --git a/resource/api_mapping/FastGelu_en.md b/resource/api_mapping/FastGelu_en.md deleted file mode 100644 index ff00b1ad50da20c7cfaa9f686f57d3d30649a666..0000000000000000000000000000000000000000 --- a/resource/api_mapping/FastGelu_en.md +++ /dev/null @@ -1,50 +0,0 @@ -# Function Differences with torch.nn.GEL - -## torch.nn.GELU - -```python -class torch.nn.GELU()(input) -``` - -## mindspore.nn.FastGelu - -```python -class mindspore.nn.FastGelu()(input_data) -``` - -## Differences - -PyTorch: Cumulative distribution function based on Gaussian distribution. - -MindSpore:Compared with PyTorch, MindSpore adopts a different calculation formula and has better performance. - -## Code Example - -```python -import mindspore -from mindspore import Tensor, nn -import torch -import numpy as np - -def test_me(): - input_x = Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]]), mindspore.float32) - fast_gelu = nn.FastGelu() - output = fast_gelu(input_x) - print(output) - -def test_torch(): - input_x = torch.Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]])) - gelu = torch.nn.GELU() - output = gelu(input_x) - print(output) - -if __name__ == '__main__': - test_me() - test_torch() - -# Out: -# [[-1.5419e-01 3.9922e+00 -9.7474e-06] -# [ 1.9375e+00 -1.0053e-03 8.9824e+00]] -# tensor([[-1.5866e-01, 3.9999e+00, -0.0000e+00], -# [ 1.9545e+00, -1.4901e-06, 9.0000e+00]]) -``` \ No newline at end of file diff --git a/resource/api_mapping/Flatten.md b/resource/api_mapping/Flatten.md deleted file mode 100644 index 792bb2d3165f8c121cefaebf7c954611e71036ea..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Flatten.md +++ /dev/null @@ -1,54 +0,0 @@ -# 比较与torch.flatten的功能差异 - -## torch.flatten - -```python -torch.flatten( - input, - start_dim=0, - end_dim=-1 -) -``` - -## mindspore.ops.Flatten - -```python -class mindspore.ops.Flatten(*args, **kwargs)(input_x) -``` - -## 使用方式 - -PyTorch: 支持指定维度对元素进行展开。 - -MindSpore:仅支持保留第0维元素,对其余维度的元素进行展开。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, only the 0th dimension will be reserved and the rest will be flattened. -input_tensor = Tensor(np.ones(shape=[1, 2, 3, 4]), mindspore.float32) -flatten = ops.Flatten() -output = flatten(input_tensor) -print(output.shape) -# Out: -# (1, 24) - -# In torch, the dimension to reserve will be specified and the rest will be flattened. -input_tensor = torch.Tensor(np.ones(shape=[1, 2, 3, 4])) -output1 = torch.flatten(input=input_tensor, start_dim=1) -print(output1.shape) -# Out: -# torch.Size([1, 24]) - -input_tensor = torch.Tensor(np.ones(shape=[1, 2, 3, 4])) -output2 = torch.flatten(input=input_tensor, start_dim=2) -print(output2.shape) -# Out: -# torch.Size([1, 2, 12]) -``` \ No newline at end of file diff --git a/resource/api_mapping/Flatten_en.md b/resource/api_mapping/Flatten_en.md deleted file mode 100644 index 92be482601724d250767a33f9af32b9bb3fd69fa..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Flatten_en.md +++ /dev/null @@ -1,54 +0,0 @@ -# Function Differences with torch.flatten - -## torch.flatten - -```python -torch.flatten( - input, - start_dim=0, - end_dim=-1 -) -``` - -## mindspore.ops.Flatten - -```python -class mindspore.ops.Flatten(*args, **kwargs)(input_x) -``` - -## Differences - -PyTorch: Supports the flatten of elements by specified dimensions. - -MindSpore:Only the 0th dimension element is reserved and the elements of the remaining dimensions are flattened. - -## Code Example - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, only the 0th dimension will be reserved and the rest will be flattened. -input_tensor = Tensor(np.ones(shape=[1, 2, 3, 4]), mindspore.float32) -flatten = ops.Flatten() -output = flatten(input_tensor) -print(output.shape) -# Out: -# (1, 24) - -# In torch, the dimension to reserve will be specified and the rest will be flattened. -input_tensor = torch.Tensor(np.ones(shape=[1, 2, 3, 4])) -output1 = torch.flatten(input=input_tensor, start_dim=1) -print(output1.shape) -# Out: -# torch.Size([1, 24]) - -input_tensor = torch.Tensor(np.ones(shape=[1, 2, 3, 4])) -output2 = torch.flatten(input=input_tensor, start_dim=2) -print(output2.shape) -# Out: -# torch.Size([1, 2, 12]) -``` \ No newline at end of file diff --git a/resource/api_mapping/FloorDiv.md b/resource/api_mapping/FloorDiv.md deleted file mode 100644 index 32972c64c743ff52a8035f89e70a153c72bc8b26..0000000000000000000000000000000000000000 --- a/resource/api_mapping/FloorDiv.md +++ /dev/null @@ -1,53 +0,0 @@ -# 比较与torch.floor_divide的功能差异 - -## torch.floor_divide - -```python -torch.floor_divide( - input, - other, - out=None -) -``` - -## mindspore.ops.FloorDiv - -```python -class mindspore.ops.FloorDiv(*args, **kwargs)( - input_x, - input_y -) -``` - -## 使用方式 - -PyTorch: 结果是往0方向取整,而非真的向下取整。例如相除为-0.9,取整后的结果为0。 - -MindSpore:结果按floor方式向下取整。例如相除为-0.9,取整后的结果为-1。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, the output will be rounded toward the floor, so, after division, the output -0.33 will be rounded to -1. -input_x = Tensor(np.array([2, 4, -1]), mindspore.int32) -input_y = Tensor(np.array([3, 3, 3]), mindspore.int32) -floor_div = ops.FloorDiv() -output = floor_div(input_x, input_y) -print(output) -# Out: -# [0 1 -1] - -# In torch, the output will be rounded toward 0, so, after division, the output -0.33 will be rounded to 0. -input_x = torch.tensor(np.array([2, 4, -1])) -input_y = torch.tensor(np.array([3, 3, 3])) -output = torch.floor_divide(input_x, input_y) -print(output) -# Out: -# tensor([0, 1, 0]) -``` \ No newline at end of file diff --git a/resource/api_mapping/FloorDiv_en.md b/resource/api_mapping/FloorDiv_en.md deleted file mode 100644 index bfe5f84408a6e0aaf69c1d1ed5195b9d3242b1d4..0000000000000000000000000000000000000000 --- a/resource/api_mapping/FloorDiv_en.md +++ /dev/null @@ -1,53 +0,0 @@ -# Function Differences with torch.floor_divide - -## torch.floor_divide - -```python -torch.floor_divide( - input, - other, - out=None -) -``` - -## mindspore.ops.FloorDiv - -```python -class mindspore.ops.FloorDiv(*args, **kwargs)( - input_x, - input_y -) -``` - -## Differences - -PyTorch: The output will be rounded toward 0 rather than the floor. - -MindSpore: The output will be rounded exactly toward floor. - -## Code Example - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, the output will be rounded toward the floor, so, after division, the output -0.33 will be rounded to -1. -input_x = Tensor(np.array([2, 4, -1]), mindspore.int32) -input_y = Tensor(np.array([3, 3, 3]), mindspore.int32) -floor_div = ops.FloorDiv() -output = floor_div(input_x, input_y) -print(output) -# Out: -# [0 1 -1] - -# In torch, the output will be rounded toward 0, so, after division, the output -0.33 will be rounded to 0. -input_x = torch.tensor(np.array([2, 4, -1])) -input_y = torch.tensor(np.array([3, 3, 3])) -output = torch.floor_divide(input_x, input_y) -print(output) -# Out: -# tensor([0, 1, 0]) -``` \ No newline at end of file diff --git a/resource/api_mapping/L2Normalize.md b/resource/api_mapping/L2Normalize.md deleted file mode 100644 index 92ba178cc56a7ced6ffc937cdfac587d2573976b..0000000000000000000000000000000000000000 --- a/resource/api_mapping/L2Normalize.md +++ /dev/null @@ -1,56 +0,0 @@ -# 比较与torch.nn.functional.normalize的功能差异 - -## torch.nn.functional.normalize - -```python -torch.nn.functional.normalize( - input, - p=2, - dim=1, - eps=1e-12, - out=None -) -``` - -## mindspore.ops.L2Normalize - -```python -class mindspore.ops.L2Normalize( - axis=0, - epsilon=1e-4 -)(input_x) -``` - -## 使用方式 - -PyTorch: 支持通过指定参数`p`来使用Lp范式。 - -MindSpore:仅支持L2范式。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, you can directly pass data into the function, and the default dimension is 0. -l2_normalize = ops.L2Normalize() -input_x = Tensor(np.array([1.0, 2.0, 3.0]), mindspore.float32) -output = l2_normalize(input_x) -print(output) -# Out: -# [0.2673 0.5345 0.8018] - -# In torch, parameter p should be set to determine it is a lp normalization, and the default dimension is 1. -input_x = torch.tensor(np.array([1.0, 2.0, 3.0])) -outputL2 = torch.nn.functional.normalize(input=input_x, p=2, dim=0) -outputL3 = torch.nn.functional.normalize(input=input_x, p=3, dim=0) -print(outputL2) -print(outputL3) -# Out: -# tensor([0.2673, 0.5345, 0.8018], dtype=torch.float64) -# tensor([0.3029, 0.6057, 0.9086], dtype=torch.float64) -``` \ No newline at end of file diff --git a/resource/api_mapping/L2Normalize_en.md b/resource/api_mapping/L2Normalize_en.md deleted file mode 100644 index 1fc4d3089e7c03bb26fd8e721eb4afe588c333d9..0000000000000000000000000000000000000000 --- a/resource/api_mapping/L2Normalize_en.md +++ /dev/null @@ -1,56 +0,0 @@ -# Function Differences with torch.nn.functional.normalize - -## torch.nn.functional.normalize - -```python -torch.nn.functional.normalize( - input, - p=2, - dim=1, - eps=1e-12, - out=None -) -``` - -## mindspore.ops.L2Normalize - -```python -class mindspore.ops.L2Normalize( - axis=0, - epsilon=1e-4 -)(input_x) -``` - -## Differences - -PyTorch: Supports using the LP paradigm by specifying the parameter `p`. - -MindSpore:Only L2 paradigm is supported. - -## Code Example - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, you can directly pass data into the function, and the default dimension is 0. -l2_normalize = ops.L2Normalize() -input_x = Tensor(np.array([1.0, 2.0, 3.0]), mindspore.float32) -output = l2_normalize(input_x) -print(output) -# Out: -# [0.2673 0.5345 0.8018] - -# In torch, parameter p should be set to determine it is a lp normalization, and the default dimension is 1. -input_x = torch.tensor(np.array([1.0, 2.0, 3.0])) -outputL2 = torch.nn.functional.normalize(input=input_x, p=2, dim=0) -outputL3 = torch.nn.functional.normalize(input=input_x, p=3, dim=0) -print(outputL2) -print(outputL3) -# Out: -# tensor([0.2673, 0.5345, 0.8018], dtype=torch.float64) -# tensor([0.3029, 0.6057, 0.9086], dtype=torch.float64) -``` \ No newline at end of file diff --git a/resource/api_mapping/MatrixDiag.md b/resource/api_mapping/MatrixDiag.md deleted file mode 100644 index 95cdd518d9e3003e25ce1dfead620c1ebf2fea69..0000000000000000000000000000000000000000 --- a/resource/api_mapping/MatrixDiag.md +++ /dev/null @@ -1,68 +0,0 @@ -# 比较与torch.diag的功能差异 - -## torch.diag - -```python -torch.diag( - input, - diagonal=0, - out=None -) -``` - -## mindspore.nn.MatrixDiag - -```python -class mindspore.nn.MatrixDiag()(x) -``` - -## 使用方式 - -PyTorch: 仅支持1D和2D,如果输入是1D,则将返回一个2D的对角矩阵,除对角线外,均置0。如果输入是2D,则返回该矩阵的对角线上的值。同时,它支持通过参数`diagonal`指定对角线偏移量。 - -MindSpore:根据给定的值返回一个对角矩阵,对于k维的输入,将返回k+1维的对角矩阵。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor, nn -import torch -import numpy as np - -x1 = np.random.randn(2) -x2 = np.random.randn(2, 3) -x3 = np.random.randn(2, 3, 4) - -# In MindSpore, for the given k-dimension input, a k+1 dimension diagonal matrix will be returned. -matrix_diag = nn.MatrixDiag() -for n, x in enumerate([x1, x2, x3]): - try: - input_x = Tensor(x, mindspore.float32) - output = matrix_diag(input_x) - print('input shape: {}; output size: {}'.format( - str(n + 1), str(output.shape) - )) - except Exception as e: - print('ERROR: ' + str(e)) -# Out: -# input shape: 1; output size: (2, 2) -# input shape: 2; output size: (2, 3, 3) -# input shape: 3; output size: (2, 3, 4, 4) - -# In torch, output for 1-dimension and 2-dimension input will be returned based on different rules. -# If the dimension of the input is greater than 2, it will raise error. -for n, x in enumerate([x1, x2, x3]): - try: - input_x = torch.tensor(x) - output = torch.diag(input_x) - print('input shape: {}; output size: {}'.format( - str(n + 1), str(output.shape) - )) - except Exception as e: - print('ERROR: ' + str(e)) -# Out: -# input shape: 1; output size: torch.Size([2, 2]) -# input shape: 2; output size: torch.Size([2]) -# ERROR: matrix or a vector expected -``` \ No newline at end of file diff --git a/resource/api_mapping/MatrixDiag_en.md b/resource/api_mapping/MatrixDiag_en.md deleted file mode 100644 index 185a8aea67ba3306f3aa8e7f9b2728b91ed82fcb..0000000000000000000000000000000000000000 --- a/resource/api_mapping/MatrixDiag_en.md +++ /dev/null @@ -1,68 +0,0 @@ -# Function Differences with torch.diag - -## torch.diag - -```python -torch.diag( - input, - diagonal=0, - out=None -) -``` - -## mindspore.nn.MatrixDiag - -```python -class mindspore.nn.MatrixDiag()(x) -``` - -## Differences - -PyTorch: Only 1D and 2D are supported. If the input is a 1D Tensor, a 2D diagonal matrix will be returned, and all elements in the returned matrix are set to 0 except the diagonals. If the input is a 2D Tensor, the value on the diagonal of the matrix will be returned. It also supports diagonal offsets specified by parameter `diagonal`. - -MindSpore:Returns a diagonal matrix based on the given value, and k+1 dimensional diagonal matrix for k dimensional input. - -## Code Example - -```python -import mindspore -from mindspore import Tensor, nn -import torch -import numpy as np - -x1 = np.random.randn(2) -x2 = np.random.randn(2, 3) -x3 = np.random.randn(2, 3, 4) - -# In MindSpore, for the given k-dimension input, a k+1 dimension diagonal matrix will be returned. -matrix_diag = nn.MatrixDiag() -for n, x in enumerate([x1, x2, x3]): - try: - input_x = Tensor(x, mindspore.float32) - output = matrix_diag(input_x) - print('input shape: {}; output size: {}'.format( - str(n + 1), str(output.shape) - )) - except Exception as e: - print('ERROR: ' + str(e)) -# Out: -# input shape: 1; output size: (2, 2) -# input shape: 2; output size: (2, 3, 3) -# input shape: 3; output size: (2, 3, 4, 4) - -# In torch, output for 1-dimension and 2-dimension input will be returned based on different rules. -# If the dimension of the input is greater than 2, it will raise error. -for n, x in enumerate([x1, x2, x3]): - try: - input_x = torch.tensor(x) - output = torch.diag(input_x) - print('input shape: {}; output size: {}'.format( - str(n + 1), str(output.shape) - )) - except Exception as e: - print('ERROR: ' + str(e)) -# Out: -# input shape: 1; output size: torch.Size([2, 2]) -# input shape: 2; output size: torch.Size([2]) -# ERROR: matrix or a vector expected -``` \ No newline at end of file diff --git a/resource/api_mapping/Norm.md b/resource/api_mapping/Norm.md deleted file mode 100644 index 95921370f0a3f4b335e24931d000cf4a9add49ba..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Norm.md +++ /dev/null @@ -1,59 +0,0 @@ -# 比较与torch.norm的功能差异 - -## torch.norm - -```python -torch.norm( - input, - p='fro', - dim=None, - keepdim=False, - out=None, - dtype=None -) -``` - -## mindspore.nn.Norm - -```python -class mindspore.nn.Norm( - axis=(), - keep_dims=False -)(input) -``` - -## 使用方式 - -PyTorch: 支持包括L2在内的多种范式。 - -MindSpore:目前仅支持L2范式。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor, nn -import torch -import numpy as np - -# In MindSpore, only L2 norm is supported. -net = nn.Norm(axis=0) -input_x = Tensor(np.array([[4, 4, 9, 1], [2, 1, 3, 6]]), mindspore.float32) -output = net(input_x) -print(output) -# Out: -# [4.4721 4.1231 9.4868 6.0828] - -# In torch, you can set parameter p to implement the desired norm. -input_x = torch.tensor(np.array([[4, 4, 9, 1], [2, 1, 3, 6]])) -output1 = torch.norm(input_x, dim=0, p=2) -print(output1) -# Out: -# tensor([4.4721, 4.1231, 9.4868, 6.0828]) - -input_x = torch.tensor(np.array([[4, 4, 9, 1], [2, 1, 3, 6]])) -output2 = torch.norm(input_x, dim=0, p=1) -print(output2) -# Out: -# tensor([6., 5., 12., 7.]) -``` diff --git a/resource/api_mapping/Norm_en.md b/resource/api_mapping/Norm_en.md deleted file mode 100644 index 6ee468195cfd1ea5c78f871a07da2c532b93a6e8..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Norm_en.md +++ /dev/null @@ -1,59 +0,0 @@ -# Function Differences with torch.norm - -## torch.norm - -```python -torch.norm( - input, - p='fro', - dim=None, - keepdim=False, - out=None, - dtype=None -) -``` - -## mindspore.nn.Norm - -```python -class mindspore.nn.Norm( - axis=(), - keep_dims=False -)(input) -``` - -## Differences - -PyTorch: Multiple normalizations including L2-norm are supported. - -MindSpore: Only supports L2 norm. - -## Code Example - -```python -import mindspore -from mindspore import Tensor, nn -import torch -import numpy as np - -# In MindSpore, only L2 norm is supported. -net = nn.Norm(axis=0) -input_x = Tensor(np.array([[4, 4, 9, 1], [2, 1, 3, 6]]), mindspore.float32) -output = net(input_x) -print(output) -# Out: -# [4.4721 4.1231 9.4868 6.0828] - -# In torch, you can set parameter p to implement the desired norm. -input_x = torch.tensor(np.array([[4, 4, 9, 1], [2, 1, 3, 6]])) -output1 = torch.norm(input_x, dim=0, p=2) -print(output1) -# Out: -# tensor([4.4721, 4.1231, 9.4868, 6.0828]) - -input_x = torch.tensor(np.array([[4, 4, 9, 1], [2, 1, 3, 6]])) -output2 = torch.norm(input_x, dim=0, p=1) -print(output2) -# Out: -# tensor([6., 5., 12., 7.]) -``` \ No newline at end of file diff --git a/resource/api_mapping/ParameterTuple.md b/resource/api_mapping/ParameterTuple.md deleted file mode 100644 index 93ec09b903cc80f2eb1b8bfb6f01f7b89ddbb6ba..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ParameterTuple.md +++ /dev/null @@ -1,19 +0,0 @@ -# 比较与torch.nn.ParameterList的功能差异 - -## torch.nn.ParameterList - -```python -class torch.nn.ParameterList() -``` - -## mindspore.ParameterTuple - -```python -class mindspore.ParameterTuple() -``` - -## 使用方式 - -PyTorch: 以列表形式储存网络参数。 - -MindSpore:以元组形式储存网络参数。 \ No newline at end of file diff --git a/resource/api_mapping/ParameterTuple_en.md b/resource/api_mapping/ParameterTuple_en.md deleted file mode 100644 index dbb67cfc69d020d06fbb102480a89765d0d1f79c..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ParameterTuple_en.md +++ /dev/null @@ -1,19 +0,0 @@ -# Function Differences with torch.nn.ParameterList - -## torch.nn.ParameterList - -```python -class torch.nn.ParameterList() -``` - -## mindspore.ParameterTuple - -```python -class mindspore.ParameterTuple() -``` - -## Differences - -PyTorch: Stores parameters of network into a list. - -MindSpore:Stores parameters of network into a tuple. \ No newline at end of file diff --git a/resource/api_mapping/ReduceMean&AdaptiveAvgPool2d.md b/resource/api_mapping/ReduceMean&AdaptiveAvgPool2d.md deleted file mode 100644 index 627cc22afd7d5fd5f39940f4945f89a1e13d5dee..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ReduceMean&AdaptiveAvgPool2d.md +++ /dev/null @@ -1,47 +0,0 @@ -# 比较与torch.nn.AdaptiveAvgPool2d的功能差异 - -## torch.nn.AdaptiveAvgPool2d - -```python -torch.nn.AdaptiveAvgPool2d(output_size)(input) -``` - -## mindspore.ops.ReduceMean - -```python -class mindspore.ops.ReduceMean(keep_dims=False)( - input_x, - axis=() -) -``` - -## 使用方式 - -PyTorch: 对输入做自适应的平均池化,算法内部根据指定的输出大小计算出对应大小的结果。仅在输出为1*1时和MindSpore的`ReduceMean`一致。 - -MindSpore:计算指定维度数据的平均值。 - -## 代码示例 - -```python -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, mean of given axis will be computed. -input_x = Tensor(np.random.randn(1, 64, 8, 9).astype(np.float32)) -op = ops.ReduceMean(keep_dims=True) -output = op(x=input_x, axis=1) -print(output.shape) -# Out: -# (1, 1, 8, 9) - -# In torch, the corresponding results will be returned based on the input shape. -input_x = torch.randn(1, 64, 8, 9) -op = torch.nn.AdaptiveAvgPool2d((5, 7)) -output = op(input_x) -print(output.shape) -# Out: -# torch.Size([1, 64, 5, 7]) -``` diff --git a/resource/api_mapping/ReduceMean&AdaptiveAvgPool2d_en.md b/resource/api_mapping/ReduceMean&AdaptiveAvgPool2d_en.md deleted file mode 100644 index 9d2cb2846b2cd690c9cc431dd74c2a6c8e18ec3e..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ReduceMean&AdaptiveAvgPool2d_en.md +++ /dev/null @@ -1,48 +0,0 @@ -# Function Differences with torch.nn.AdaptiveAvgPool2d - -## torch.nn.AdaptiveAvgPool2d - -```python -torch.nn.AdaptiveAvgPool2d(output_size)(input) -``` - -## mindspore.ops.ReduceMean - -```python -class mindspore.ops.ReduceMean(keep_dims=False)( - input_x, - axis=() -) -``` - -## Differences - -PyTorch: Applies an adaptive average pooling over the inputs, and the corresponding results are calculated based on the specified output size. It is consistent with the `ReduceMean` of MindSpore only if the output is 1*1. - -MindSpore:Computes mean of the given axis. - -## Code Example - -```python -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, mean of given axis will be computed. -input_x = Tensor(np.random.randn(1, 64, 8, 9).astype(np.float32)) -op = ops.ReduceMean(keep_dims=True) -output = op(x=input_x, axis=1) -print(output.shape) -# Out: -# (1, 1, 8, 9) - -# In torch, the corresponding results will be returned based on the input shape. -input_x = torch.randn(1, 64, 8, 9) -op = torch.nn.AdaptiveAvgPool2d((5, 7)) -output = op(input_x) -print(output.shape) -# Out: -# torch.Size([1, 64, 5, 7]) -``` - diff --git a/resource/api_mapping/ReduceMean&std_mean.md b/resource/api_mapping/ReduceMean&std_mean.md deleted file mode 100644 index 08434c56c3dec210ade1c971baee7830a1ec8cf5..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ReduceMean&std_mean.md +++ /dev/null @@ -1,55 +0,0 @@ -# 比较与torch.std_mean的功能差异 - -## torch.std_mean - -```python -torch.std_mean( - input, - dim, - unbiased=True, - keepdim=False -) -``` - -## mindspore.ops.ReduceMean - -```python -class mindspore.ops.ReduceMean(keep_dims=False)( - input_x, - axis=() -) -``` - -## 使用方式 - -PyTorch: 计算指定维度数据的标准差和平均值。 - -MindSpore:计算指定维度数据的平均值。 - -## 代码示例 - -```python -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, only the mean of given dimension will be returned. -input_x = Tensor(np.array([[1, 2], [3, 4]]).astype(np.float32)) -op = ops.ReduceMean(keep_dims=True) -output = op(x=input_x, axis=1) -print(output) -# Out: -# [[1.5] -# [3.5]] - -# In torch, both std and mean of given dimensions will be returned. -input_x = torch.tensor(np.array([[1, 2], [3, 4]]).astype(np.float32)) -output = torch.std_mean(input=input_x, dim=1) -std, mean = output -print('std: {}'.format(std)) -print('mean: {}'.format(mean)) -# Out: -# torch.tensor([0.7071, 0.7071]) -# torch.tensor([1.5000, 3.5000]) -``` \ No newline at end of file diff --git a/resource/api_mapping/ReduceMean&std_mean_en.md b/resource/api_mapping/ReduceMean&std_mean_en.md deleted file mode 100644 index e86327bcc01d75eee64ac33e1417e1bcae19ee32..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ReduceMean&std_mean_en.md +++ /dev/null @@ -1,55 +0,0 @@ -# Function Differences with torch.std_mean - -## torch.std_mean - -```python -torch.std_mean( - input, - dim, - unbiased=True, - keepdim=False -) -``` - -## mindspore.ops.ReduceMean - -```python -class mindspore.ops.ReduceMean(keep_dims=False)( - input_x, - axis=() -) -``` - -## Differences - -PyTorch: Computes standard-deviation and mean of the given axis. - -MindSpore:Computes mean of the given axis. - -## Code Example - -```python -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, only the mean of given dimension will be returned. -input_x = Tensor(np.array([[1, 2], [3, 4]]).astype(np.float32)) -op = ops.ReduceMean(keep_dims=True) -output = op(x=input_x, axis=1) -print(output) -# Out: -# [[1.5] -# [3.5]] - -# In torch, both std and mean of given dimensions will be returned. -input_x = torch.tensor(np.array([[1, 2], [3, 4]]).astype(np.float32)) -output = torch.std_mean(input=input_x, dim=1) -std, mean = output -print('std: {}'.format(std)) -print('mean: {}'.format(mean)) -# Out: -# torch.tensor([0.7071, 0.7071]) -# torch.tensor([1.5000, 3.5000]) -``` \ No newline at end of file diff --git a/resource/api_mapping/ResizeBilinear.md b/resource/api_mapping/ResizeBilinear.md deleted file mode 100644 index 399b4ee8ad18771e794f2f1e7cffc8f6a50afadc..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ResizeBilinear.md +++ /dev/null @@ -1,55 +0,0 @@ -# 比较与torch.nn.Upsample的功能差异 - -## torch.nn.Upsample - -```python -torch.nn.Upsample( - input, - size, - scale_factor, - mode='nearest', - align_corners=None -) -``` - -## mindspore.ops.ResizeBilinear - -```python -class mindspore.ops.ResizeBilinear( - size, - align_corners=False -)(input) -``` - -## 使用方式 - -PyTorch: 有多种模式可以选择。 - -MindSpore:仅支持`bilinear`模式。 - -## 代码示例 - -```python -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, it is predetermined to use bilinear to resize the input image. -x = np.random.randn(1, 2, 3, 4).astype(np.float32) -resize = ops.ResizeBilinear((5, 5)) -tensor = Tensor(x) -output = resize(tensor) -print(output.shape) -# Out: -# (1, 2, 5, 5) - -# In torch, parameter mode should be passed to determine which method to apply for resizing input image. -x = np.random.randn(1, 2, 3, 4).astype(np.float32) -resize = torch.nn.Upsample(size=(5, 5), mode='bilinear') -tensor = torch.tensor(x) -output = resize(tensor) -print(output.shape) -# Out: -# torch.Size([1, 2, 5, 5]) -``` \ No newline at end of file diff --git a/resource/api_mapping/ResizeBilinear_en.md b/resource/api_mapping/ResizeBilinear_en.md deleted file mode 100644 index dd4f921dfb8d1fffbd2ef08948a94ef8cc783306..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ResizeBilinear_en.md +++ /dev/null @@ -1,55 +0,0 @@ -# Function Differences with mindspore.ops.ResizeBilinear - -## torch.nn.Upsample - -```python -torch.nn.Upsample( - input, - size, - scale_factor, - mode='nearest', - align_corners=None -) -``` - -## mindspore.ops.ResizeBilinear - -```python -class mindspore.ops.ResizeBilinear( - size, - align_corners=False -)(input) -``` - -## Differences - -PyTorch: Multiple choice of modes to resize the image. - -MindSpore:Only the mode of `bilinear` is supported. - -## Code Example - -```python -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, it is predetermined to use bilinear to resize the input image. -x = np.random.randn(1, 2, 3, 4).astype(np.float32) -resize = ops.ResizeBilinear((5, 5)) -tensor = Tensor(x) -output = resize(tensor) -print(output.shape) -# Out: -# (1, 2, 5, 5) - -# In torch, parameter mode should be passed to determine which method to apply for resizing input image. -x = np.random.randn(1, 2, 3, 4).astype(np.float32) -resize = torch.nn.Upsample(size=(5, 5), mode='bilinear') -tensor = torch.tensor(x) -output = resize(tensor) -print(output.shape) -# Out: -# torch.Size([1, 2, 5, 5]) -``` \ No newline at end of file diff --git a/resource/api_mapping/ScatterNdAdd.md b/resource/api_mapping/ScatterNdAdd.md deleted file mode 100644 index bdb12ff5e12823913dc780b24bfaad659a526845..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ScatterNdAdd.md +++ /dev/null @@ -1,57 +0,0 @@ -# 比较与torch.Tensor.scatter_add_的功能差异 - -## torch.Tensor.scatter_add_ - -```python -torch.Tensor.scatter_add_( - dim, - index, - src -) -``` - -## mindspore.ops.ScatterNdAdd - -```python -class mindspore.ops.ScatterNdAdd(use_locking=False)( - input_x, - indices, - update -) -``` - -## 使用方式 - -PyTorch: 给定输入tensor,更新tensor和索引tensor;将更新tensor按照索引tensor在指定的轴上加到输入tensor上。 - -MindSpore:给定输入tensor,更新tensor和索引tensor;将更新tensor按照索引tensor加到输入tensor上; -不支持通过参数自定义轴,但可通过调整索引tensor的形状来明确轴。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, no parameter for specifying dimension. -input_x = mindspore.Parameter(Tensor(np.array([1, 2, 3, 4, 5, 6, 7, 8]), mindspore.float32), name="x") -indices = Tensor(np.array([[2], [4], [1], [7]]), mindspore.int32) -updates = Tensor(np.array([6, 7, 8, 9]), mindspore.float32) -scatter_nd_add = ops.ScatterNdAdd() -output = scatter_nd_add(input_x, indices, updates) -print(output) -# Out: -# [1. 10. 9. 4. 12. 6. 7. 17.] - -# In torch, parameter dim can be set to specify dimension. -input_x = torch.tensor(np.array([1, 2, 3, 4, 5, 6, 7, 8]).astype(np.float32)) -indices = torch.tensor(np.array([2, 4, 1, 7]).astype(np.int64)) -updates = torch.tensor(np.array([6, 7, 8, 9]).astype(np.float32)) -output = input_x.scatter_add_(dim=0, index=indices, src=updates) -print(output) -# Out: -# tensor([1., 10., 9., 4., 12., 6., 7., 17.]) -``` diff --git a/resource/api_mapping/ScatterNdAdd_en.md b/resource/api_mapping/ScatterNdAdd_en.md deleted file mode 100644 index 98b9459fa7c8766a6c35c86c6225771f9697e5bb..0000000000000000000000000000000000000000 --- a/resource/api_mapping/ScatterNdAdd_en.md +++ /dev/null @@ -1,56 +0,0 @@ -# Function Differences with torch.Tensor.scatter_add_ - -## torch.Tensor.scatter_add_ - -```python -torch.Tensor.scatter_add_( - dim, - index, - src -) -``` - -## mindspore.ops.ScatterNdAdd - -```python -class mindspore.ops.ScatterNdAdd(use_locking=False)( - input_x, - indices, - update -) -``` - -## Differences - -PyTorch: Given an input tensor, updates the tensor and index tensor; adds the updated tensor to the input tensor based on the index tensor along the given axis. - -MindSpore: Given an input tensor, updates the tensor and index tensor; adds the updated tensor to the input tensor based on the index tensor. Setting axis is not supported. - -## Code Example - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, no parameter for specifying dimension. -input_x = mindspore.Parameter(Tensor(np.array([1, 2, 3, 4, 5, 6, 7, 8]), mindspore.float32), name="x") -indices = Tensor(np.array([[2], [4], [1], [7]]), mindspore.int32) -updates = Tensor(np.array([6, 7, 8, 9]), mindspore.float32) -scatter_nd_add = ops.ScatterNdAdd() -output = scatter_nd_add(input_x, indices, updates) -print(output) -# Out: -# [1. 10. 9. 4. 12. 6. 7. 17.] - -# In torch, parameter dim can be set to specify dimension. -input_x = torch.tensor(np.array([1, 2, 3, 4, 5, 6, 7, 8]).astype(np.float32)) -indices = torch.tensor(np.array([2, 4, 1, 7]).astype(np.int64)) -updates = torch.tensor(np.array([6, 7, 8, 9]).astype(np.float32)) -output = input_x.scatter_add_(dim=0, index=indices, src=updates) -print(output) -# Out: -# tensor([1., 10., 9., 4., 12., 6., 7., 17.]) -``` diff --git a/resource/api_mapping/TrainOneStepCell.md b/resource/api_mapping/TrainOneStepCell.md deleted file mode 100644 index ec44aef3ed17d8e51287d2b55c283f005e746c2a..0000000000000000000000000000000000000000 --- a/resource/api_mapping/TrainOneStepCell.md +++ /dev/null @@ -1,23 +0,0 @@ -# 比较与torch.optim.Optimizer.step的功能差异 - -## torch.optim.Optimizer.step - -```python -torch.optim.Optimizer.step() -``` - -## mindspore.nn.TrainOneStepCell - -```python -class mindspore.nn.TrainOneStepCell( - network, - optimizer, - sens=1.0 -)((*inputs)) -``` - -## 使用方式 - -PyTorch: 是`Optimizer`这个抽象类的抽象方法,需要由`Optimizer`的子类继承后具体实现,返回损失值。 - -MindSpore:是1个类,需要把`network`和`optimizer`作为参数传入,且需要调用`construct`方法返回损失值。 \ No newline at end of file diff --git a/resource/api_mapping/TrainOneStepCell_en.md b/resource/api_mapping/TrainOneStepCell_en.md deleted file mode 100644 index c48db395eaa44cd596e8f50c824262668782f02e..0000000000000000000000000000000000000000 --- a/resource/api_mapping/TrainOneStepCell_en.md +++ /dev/null @@ -1,23 +0,0 @@ -# Function Differences with torch.optim.Optimizer.step - -## torch.optim.Optimizer.step - -```python -torch.optim.Optimizer.step() -``` - -## mindspore.nn.TrainOneStepCell - -```python -class mindspore.nn.TrainOneStepCell( - network, - optimizer, - sens=1.0 -)((*inputs)) -``` - -## Differences - -PyTorch: An abstract method of the abstract class `Optimizer`, and it should be inherited and implemented by `Optimizer`'s subclass and return loss. - -MindSpore: A class, which requires `network` and `optimizer` to be passed as parameters, and loss will be returned by the `construct` method. \ No newline at end of file diff --git a/resource/api_mapping/Transpose.md b/resource/api_mapping/Transpose.md deleted file mode 100644 index 0f74ac0d01434798b4ea71bcccb3eb0b7b7780bc..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Transpose.md +++ /dev/null @@ -1,56 +0,0 @@ -# 比较与torch.Tensor.t的功能差异 - -## torch.Tensor.t - -```python -torch.Tensor.t(input) -``` - -## mindspore.ops.Transpose - -```python -class mindspore.ops.Transpose(*args, **kwargs)( - input_x, - input_perm -) -``` - -## 使用方式 - -PyTorch: 仅适用于1维和2维的输入。 - -MindSpore:输入的维度不限,且需要通过参数设置转置方式。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, the input tensor will be transposed based on the dimension you set. -input_tensor = Tensor(np.array([[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]), mindspore.float32) -perm = (0, 2, 1) -transpose = ops.Transpose() -output = transpose(input_tensor, perm) -print(output.shape) -# Out: -# (2, 3, 2) - -# In torch, only input of 2D dimension or lower will be accepted. -input1 = torch.randn(()) -input2 = torch.randn((2, 3)) -input3 = torch.randn((2, 3, 4)) -for n, x in enumerate([input1, input2, input3]): - try: - output = torch.t(x) - print(output.shape) - except Exception as e: - print('ERROR when inputting {}D: '.format(n + 1) + str(e)) -# Out: -# torch.Size([]) -# torch.Size([3, 2]) -# ERROR when inputting 3D: t() expects a tensor with <=2 dimensions, but self is 3D. -``` \ No newline at end of file diff --git a/resource/api_mapping/Transpose_en.md b/resource/api_mapping/Transpose_en.md deleted file mode 100644 index 327e3d219fca1c31260061e9364b6e8f5e833155..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Transpose_en.md +++ /dev/null @@ -1,56 +0,0 @@ -# Function Differences with torch.Tensor.t - -## torch.Tensor.t - -```python -torch.Tensor.t(input) -``` - -## mindspore.ops.Transpose - -```python -class mindspore.ops.Transpose(*args, **kwargs)( - input_x, - input_perm -) -``` - -## Differences - -PyTorch: Only applies to a 1D or 2D input. - -MindSpore: No limit for dimension of the input, and how to transpose should be set by relevant parameters. - -## Code Example - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, the input tensor will be transposed based on the dimension you set. -input_tensor = Tensor(np.array([[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]), mindspore.float32) -perm = (0, 2, 1) -transpose = ops.Transpose() -output = transpose(input_tensor, perm) -print(output.shape) -# Out: -# (2, 3, 2) - -# In torch, only input of 2D dimension or lower will be accepted. -input1 = torch.randn(()) -input2 = torch.randn((2, 3)) -input3 = torch.randn((2, 3, 4)) -for n, x in enumerate([input1, input2, input3]): - try: - output = torch.t(x) - print(output.shape) - except Exception as e: - print('ERROR when inputting {}D: '.format(n + 1) + str(e)) -# Out: -# torch.Size([]) -# torch.Size([3, 2]) -# ERROR when inputting 3D: t() expects a tensor with <=2 dimensions, but self is 3D. -``` \ No newline at end of file diff --git a/resource/api_mapping/Uniform.md b/resource/api_mapping/Uniform.md deleted file mode 100644 index 29b8ea92fa3b5de43feac907066dd0f828be7027..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Uniform.md +++ /dev/null @@ -1,46 +0,0 @@ -# 比较与torch.nn.init.uniform_的功能差异 - -## torch.nn.init.uniform_ - -```python -torch.nn.init.uniform_( - tensor, - a=0.0, - b=1.0 -) -``` - -## mindspore.common.initializer.Uniform - -```python -class mindspore.common.initializer.Uniform(scale=0.07)(arr) -``` - -## 使用方式 - -PyTorch: 通过入参`a`和`b`分别指定均匀分布的上下界,即U(-a, b)。 - -MindSpore:仅通过一个入参`scale`指定均匀分布的范围,即U(-scale, scale)。 - -## 代码示例 - -```python -import mindspore -import torch -import numpy as np - -# In MindSpore, only one parameter is set to specify the scope of uniform distribution (-1, 1). -input_x = np.array([1, 1, 1]).astype(np.float32) -uniform = mindspore.common.initializer.Uniform(scale=1) -output = uniform(input_x) -print(output) -# Out: -# [-0.2333 0.6208 -0.1627] - -# In torch, parameters are set separately to specify the lower and upper bound of uniform distribution. -input_x = torch.tensor(np.array([1, 1, 1]).astype(np.float32)) -output = torch.nn.init.uniform_(tensor=input_x, a=-1, b=1) -print(output) -# Out: -# tensor([0.9936, 0.7676, -0.8275]) -``` \ No newline at end of file diff --git a/resource/api_mapping/Uniform_en.md b/resource/api_mapping/Uniform_en.md deleted file mode 100644 index c0150b3e39f2389d620bc5f978c437da39824214..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Uniform_en.md +++ /dev/null @@ -1,46 +0,0 @@ -# Function Differences with torch.nn.init.uniform_ - -## torch.nn.init.uniform_ - -```python -torch.nn.init.uniform_( - tensor, - a=0.0, - b=1.0 -) -``` - -## mindspore.common.initializer.Uniform - -```python -class mindspore.common.initializer.Uniform(scale=0.07)(arr) -``` - -## Differences - -PyTorch: The upper and lower bounds of uniform distribution are specified by parameters `a` and `b`, i.e. U(-a, b). - -MindSpore:It only uses one parameter to specify a uniformly distributed range, i.e. U(-scale, scale). - -## Code Example - -```python -import mindspore -import torch -import numpy as np - -# In MindSpore, only one parameter is set to specify the scope of uniform distribution (-1, 1). -input_x = np.array([1, 1, 1]).astype(np.float32) -uniform = mindspore.common.initializer.Uniform(scale=1) -output = uniform(input_x) -print(output) -# Out: -# [-0.2333 0.6208 -0.1627] - -# In torch, parameters are set separately to specify the lower and upper bound of uniform distribution. -input_x = torch.tensor(np.array([1, 1, 1]).astype(np.float32)) -output = torch.nn.init.uniform_(tensor=input_x, a=-1, b=1) -print(output) -# Out: -# tensor([0.9936, 0.7676, -0.8275]) -``` \ No newline at end of file diff --git a/resource/api_mapping/Unique.md b/resource/api_mapping/Unique.md deleted file mode 100644 index 3bfee882b324897cf7bcb45f6465f8e6d3ca7a38..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Unique.md +++ /dev/null @@ -1,56 +0,0 @@ -# 比较与torch.unique的功能差异 - -## torch.unique - -```python -torch.unique( - input, - sorted, - return_inverse, - return_counts, - dim -) -``` - -## mindspore.ops.Unique - -```python -class mindspore.ops.Unique(*args, **kwargs)(x) -``` - -## 使用方式 - -PyTorch: 可通过设置参数来确定输出是否排序,是否输出输入的tensor的各元素在输出tensor中的位置索引,是否输出各唯一值在输入的tensor中的数量。 - -MindSpore:升序输出所有的唯一值,以及输入的tensor的各元素在输出tensor中的位置索引。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, the tensor containing unique elements in ascending order. -# As well as another tensor containing the corresponding indices will be directly returned. -x = Tensor(np.array([1, 2, 5, 2]), mindspore.int32) -unique = ops.Unique() -output, indices = unique(x) -print(output) -print(indices) -# Out: -# [1 2 5] -# [0 1 2 1] - -# In torch, parameters can be set to determine whether to output tensor containing unique elements in ascending order. -# As well as whether to output tensor containing corresponding indices. -x = torch.tensor([1, 2, 5, 2]) -output, indices = torch.unique(x, sorted=True, return_inverse=True) -print(output) -print(indices) -# Out: -# tensor([1, 2, 5]) -# tensor([0, 1, 2, 1]) -``` \ No newline at end of file diff --git a/resource/api_mapping/Unique_en.md b/resource/api_mapping/Unique_en.md deleted file mode 100644 index 74cace4aad03d75a334d337d5fa3ba77d56d3265..0000000000000000000000000000000000000000 --- a/resource/api_mapping/Unique_en.md +++ /dev/null @@ -1,56 +0,0 @@ -# Function Differences with torch.unique - -## torch.unique - -```python -torch.unique( - input, - sorted, - return_inverse, - return_counts, - dim -) -``` - -## mindspore.ops.Unique - -```python -class mindspore.ops.Unique(*args, **kwargs)(x) -``` - -## Differences - -PyTorch: By setting relevant parameters, determines whether to sort the output, to return indices of elements in the input corresponding to the output tensor, to return counts for each unique element. - -MindSpore: Outputs all unique elements in ascending order, and returns indices of elements in the input corresponding to the output tensor. - -## Code Example - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, the tensor containing unique elements in ascending order. -# As well as another tensor containing the corresponding indices will be directly returned. -x = Tensor(np.array([1, 2, 5, 2]), mindspore.int32) -unique = ops.Unique() -output, indices = unique(x) -print(output) -print(indices) -# Out: -# [1 2 5] -# [0 1 2 1] - -# In torch, parameters can be set to determine whether to output tensor containing unique elements in ascending order. -# As well as whether to output tensor containing corresponding indices. -x = torch.tensor([1, 2, 5, 2]) -output, indices = torch.unique(x, sorted=True, return_inverse=True) -print(output) -print(indices) -# Out: -# tensor([1, 2, 5]) -# tensor([0, 1, 2, 1]) -``` \ No newline at end of file diff --git a/resource/api_mapping/api_mapping.md b/resource/api_mapping/api_mapping.md deleted file mode 100644 index 48f985cce15f96baa1ca58cf940ab84787246517..0000000000000000000000000000000000000000 --- a/resource/api_mapping/api_mapping.md +++ /dev/null @@ -1,204 +0,0 @@ -# API 映射 - -由社区提供的PyTorch APIs和MindSpore APIs之间的映射。 - -| PyTorch APIs | MindSpore APIs | INFO | -|------------------------------------------------------|----------------------------------------------------------------|--------| -| torch.abs | mindspore.ops.Abs | 功能一致 | -| torch.acos | mindspore.ops.ACos | 功能一致 | -| torch.add | mindspore.ops.Add | 功能一致 | -| torch.argmax | mindspore.ops.Argmax | 功能一致 | -| torch.argmin | mindspore.ops.Argmin | 功能一致 | -| torch.asin | mindspore.ops.Asin | 功能一致 | -| torch.atan | mindspore.ops.Atan | 功能一致 | -| torch.atan2 | mindspore.ops.Atan2 | 功能一致 | -| torch.bitwise_and | mindspore.ops.BitwiseAnd | 功能一致 | -| torch.bitwise_or | mindspore.ops.BitwiseOr | 功能一致 | -| torch.bmm | mindspore.ops.BatchMatMul | 功能一致 | -| torch.broadcast_tensors | mindspore.ops.BroadcastTo |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/BroadcastTo.md)| -| torch.cat | mindspore.ops.Concat | 功能一致 | -| torch.ceil | mindspore.ops.Ceil | 功能一致 | -| torch.chunk | mindspore.ops.Split | 功能一致 | -| torch.clamp | mindspore.ops.clip_by_value | 功能一致 | -| torch.cos | mindspore.ops.Cos | 功能一致 | -| torch.cosh | mindspore.ops.Cosh | 功能一致 | -| torch.cuda.device_count | mindspore.communication.get_group_size | 功能一致 | -| torch.cuda.set_device | mindspore.context.set_context |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/set_context.md)| -| torch.cumprod | mindspore.ops.CumProd | 功能一致 | -| torch.cumsum | mindspore.ops.CumSum | 功能一致 | -| torch.det | mindspore.nn.MatDet | 功能一致 | -| torch.diag | mindspore.nn.MatrixDiag |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/MatrixDiag.md)| -| torch.digamma | mindspore.nn.DiGamma | 功能一致 | -| torch.distributed.all_gather | mindspore.ops.AllGather | 功能一致 | -| torch.distributed.all_reduce | mindspore.ops.AllReduce | 功能一致 | -| torch.distributions.gamma.Gamma | mindspore.ops.Gamma | 功能一致 | -| torch.distributed.get_rank | mindspore.communication.get_rank | 功能一致 | -| torch.distributed.init_process_group | mindspore.communication.init |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/init.md)| -| torch.distributed.new_group | mindspore.communication.create_group |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/create_group.md)| -| torch.div | mindspore.ops.Div | 功能一致 | -| torch.dot | mindspore.ops.tensor_dot |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/tensor_dot.md)| -| torch.eq | mindspore.ops.Equal | 功能一致 | -| torch.erfc | mindspore.ops.Erfc | 功能一致 | -| torch.exp | mindspore.ops.Exp | 功能一致 | -| torch.expm1 | mindspore.ops.Expm1 | 功能一致 | -| torch.eye | mindspore.ops.Eye | 功能一致 | -| torch.flatten | mindspore.ops.Flatten |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Flatten.md)| -| torch.flip | mindspore.ops.ReverseV2 | 功能一致 | -| torch.floor | mindspore.ops.Floor | 功能一致 | -| torch.floor_divide | mindspore.ops.FloorDiv |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/FloorDiv.md)| -| torch.fmod | mindspore.ops.Mod | 功能一致 | -| torch.gather | mindspore.ops.GatherD | 功能一致 | -| torch.histc | mindspore.ops.HistogramFixedWidth | 功能一致 | -| torch.inverse | mindspore.nn.MatInverse | 功能一致 | -| torch.lgamma | mindspore.nn.LGamma | 功能一致 | -| torch.linspace | mindspore.ops.LinSpace | 功能一致 | -| torch.load | mindspore.load_checkpoint | 功能一致 | -| torch.log | mindspore.ops.Log | 功能一致 | -| torch.log1p | mindspore.ops.Log1p | 功能一致 | -| torch.logsumexp | mindspore.nn.ReduceLogSumExp | 功能一致 | -| torch.matmul | mindspore.nn.MatMul | 功能一致 | -| torch.max | mindspore.ops.ArgMaxWithValue |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ArgMaxWithValue.md)| -| torch.maximum | mindspore.ops.Maximum | 功能一致 | -| torch.mean | mindspore.ops.ReduceMean | 功能一致 | -| torch.min | mindspore.ops.ArgMinWithValue |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ArgMinWithValue.md)| -| torch.minimum | mindspore.ops.Minimum | 功能一致 | -| torch.mm | mindspore.ops.MatMul | 功能一致 | -| torch.mul | mindspore.ops.Mul | 功能一致 | -| torch.nn.AdaptiveAvgPool2d | mindspore.ops.ReduceMean |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ReduceMean&AdaptiveAvgPool2d.md)| -| torch.nn.AvgPool1d | mindspore.nn.AvgPool1d | 功能一致 | -| torch.nn.AvgPool2d | mindspore.nn.AvgPool2d | 功能一致 | -| torch.nn.BatchNorm1d | mindspore.nn.BatchNorm1d | 功能一致 | -| torch.nn.BatchNorm2d | mindspore.nn.BatchNorm2d | 功能一致 | -| torch.nn.Conv2d | mindspore.nn.Conv2d | 功能一致 | -| torch.nn.ConvTranspose2d | mindspore.nn.Conv2dTranspose | 功能一致 | -| torch.nn.CrossEntropyLoss | mindspore.nn.SoftmaxCrossEntropyWithLogits | 功能一致 | -| torch.nn.CTCLoss | mindspore.ops.CTCLoss | 功能一致 | -| torch.nn.Dropout | mindspore.nn.Dropout | 功能一致 | -| torch.nn.Embedding | mindspore.nn.Embedding | 功能一致 | -| torch.nn.Flatten | mindspore.nn.Flatten |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/nn_Flatten.md)| -| torch.nn.functional.adaptive_avg_pool2d | mindspore.nn.AvgPool2d |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/AvgPool2d.md)| -| torch.nn.functional.avg_pool2d | mindspore.ops.AvgPool | 功能一致 | -| torch.nn.functional.binary_cross_entropy | mindspore.ops.BinaryCrossEntropy | 功能一致 | -| torch.nn.functional.conv2d | mindspore.ops.Conv2D | 功能一致 | -| torch.nn.functional.elu | mindspore.ops.Elu | 功能一致 | -| torch.nn.functional.log_softmax | mindspore.nn.LogSoftmax | 功能一致 | -| torch.nn.functional.normalize | mindspore.ops.L2Normalize |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/L2Normalize.md)| -| torch.nn.functional.one_hot | mindspore.ops.OneHot | 功能一致 | -| torch.nn.functional.pad | mindspore.ops.Pad | 功能一致 | -| torch.nn.functional.pixel_shuffle | mindspore.ops.DepthToSpace | 功能一致 | -| torch.nn.functional.relu | mindspore.ops.ReLU | 功能一致 | -| torch.nn.functional.softmax | mindspore.ops.Softmax | 功能一致 | -| torch.nn.functional.softplus | mindspore.ops.Softplus | 功能一致 | -| torch.nn.functional.softsign | mindspore.ops.Softsign | 功能一致 | -| torch.nn.GELU | mindspore.nn.GELU | 功能一致 | -| torch.nn.GELU | mindspore.nn.FastGelu |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/FastGelu.md)| -| torch.nn.GroupNorm | mindspore.nn.GroupNorm | 功能一致 | -| torch.nn.init.constant_ | mindspore.common.initializer.Constant |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Constant.md)| -| torch.nn.init.uniform_ | mindspore.common.initializer.Uniform |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Uniform.md)| -| torch.nn.KLDivLoss | mindspore.ops.KLDivLoss | 功能一致 | -| torch.nn.L1Loss | mindspore.nn.L1Loss | 功能一致 | -| torch.nn.LayerNorm | mindspore.nn.LayerNorm | 功能一致 | -| torch.nn.LeakyReLU | mindspore.nn.LeakyReLU | 功能一致 | -| torch.nn.Linear | mindspore.nn.Dense |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Dense.md)| -| torch.nn.LSTM | mindspore.nn.LSTM | 功能一致 | -| torch.nn.LSTMCell | mindspore.nn.LSTMCell | 功能一致 | -| torch.nn.MaxPool2d | mindspore.nn.MaxPool2d | 功能一致 | -| torch.nn.Module | mindspore.nn.Cell | 功能一致 | -| torch.nn.Module.load_state_dict | mindspore.load_param_into_net | 功能一致 | -| torch.nn.ModuleList | mindspore.nn.CellList | 功能一致 | -| torch.nn.MSELoss | mindspore.nn.MSELoss | 功能一致 | -| torch.nn.Parameter | mindspore.Parameter | 功能一致 | -| torch.nn.ParameterList | mindspore.ParameterTuple |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ParameterTuple.md)| -| torch.nn.PixelShuffle | mindspore.ops.DepthToSpace | 功能一致 | -| torch.nn.PReLU | mindspore.nn.PReLU | 功能一致 | -| torch.nn.ReLU | mindspore.nn.ReLU | 功能一致 | -| torch.nn.ReplicationPad2d | mindspore.nn.Pad | 功能一致 | -| torch.nn.Sequential | mindspore.nn.SequentialCell | 功能一致 | -| torch.nn.Sigmoid | mindspore.nn.Sigmoid | 功能一致 | -| torch.nn.SmoothL1Loss | mindspore.nn.SmoothL1Loss | 功能一致 | -| torch.nn.Softmax | mindspore.nn.Softmax | 功能一致 | -| torch.nn.SyncBatchNorm.convert_sync_batchnorm | mindspore.nn.GlobalBatchNorm | 功能一致 | -| torch.nn.Tanh | mindspore.nn.Tanh | 功能一致 | -| torch.nn.Unfold | mindspore.nn.Unfold | 功能一致 | -| torch.nn.Upsample | mindspore.ops.ResizeBilinear |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ResizeBilinear.md)| -| torch.norm | mindspore.nn.Norm |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Norm.md)| -| torch.numel | mindspore.ops.Size | 功能一致 | -| torch.ones | mindspore.ops.Ones | 功能一致 | -| torch.ones_like | mindspore.ops.OnesLike | 功能一致 | -| torch.optim.Adadelta | mindspore.ops.ApplyAdadelta |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ApplyAdadelta.md)| -| torch.optim.Adagrad | mindspore.nn.ApplyAdagrad |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ApplyAdagrad.md)| -| torch.optim.Adam | mindspore.nn.Adam | 功能一致 | -| torch.optim.Adamax | mindspore.ops.ApplyAdaMax | 功能一致 | -| torch.optim.AdamW | mindspore.nn.AdamWeightDecay | 功能一致 | -| torch.optim.lr_scheduler.CosineAnnealingWarmRestarts | mindspore.nn.cosine_decay_lr | 功能一致 | -| torch.optim.lr_scheduler.StepLR | mindspore.nn.piecewise_constant_lr | 功能一致 | -| torch.optim.Optimizer.step | mindspore.nn.TrainOneStepCell |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/TrainOneStepCell.md)| -| torch.optim.RMSprop | mindspore.nn.RMSProp | 功能一致 | -| torch.optim.SGD | mindspore.nn.SGD | 功能一致 | -| torch.pow | mindspore.ops.Pow | 功能一致 | -| torch.prod | mindspore.ops.ReduceProd | 功能一致 | -| torch.rand | mindspore.ops.UniformReal | 功能一致 | -| torch.randint | mindspore.ops.UniformInt | 功能一致 | -| torch.randn | mindspore.ops.StandardNormal | 功能一致 | -| torch.range | mindspore.nn.Range | 功能一致 | -| torch.round | mindspore.ops.Rint | 功能一致 | -| torch.save | mindspore.save_checkpoint | 功能一致 | -| torch.sigmoid | mindspore.ops.Sigmoid | 功能一致 | -| torch.sin | mindspore.ops.Sin | 功能一致 | -| torch.sinh | mindspore.ops.Sinh | 功能一致 | -| torch.sparse.FloatTensor | mindspore.Tensor | 差异对比 | -| torch.split | mindspore.ops.Split | 功能一致 | -| torch.sqrt | mindspore.ops.Sqrt | 功能一致 | -| torch.squeeze | mindspore.ops.Squeeze | 功能一致 | -| torch.stack | mindspore.ops.Stack | 功能一致 | -| torch.std_mean | mindspore.ops.ReduceMean |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ReduceMean&std_mean.md)| -| torch.sum | mindspore.ops.ReduceSum | 功能一致 | -| torch.tan | mindspore.ops.Tan | 功能一致 | -| torch.tanh | mindspore.ops.Tanh | 功能一致 | -| torch.tensor | mindspore.Tensor | 功能一致 | -| torch.Tensor | mindspore.Tensor | 功能一致 | -| torch.Tensor.chunk | mindspore.ops.Split | 功能一致 | -| torch.Tensor.expand | mindspore.ops.BroadcastTo | 功能一致 | -| torch.Tensor.fill_ | mindspore.ops.Fill | 功能一致 | -| torch.Tensor.float | mindspore.ops.Cast |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Cast.md)| -| torch.Tensor.index_add | mindspore.ops.InplaceAdd | 功能一致 | -| torch.Tensor.mm | mindspore.ops.MatMul | 功能一致 | -| torch.Tensor.mul | mindspore.ops.Mul | 功能一致 | -| torch.Tensor.pow | mindspore.ops.Pow | 功能一致 | -| torch.Tensor.repeat | mindspore.ops.Tile | 功能一致 | -| torch.repeat_interleave | mindspore.ops.repeat_elements | 功能一致 | -| torch.Tensor.requires_grad_ | mindspore.Parameter.requires_grad | 功能一致 | -| torch.Tensor.round | mindspore.ops.Round | 功能一致 | -| torch.Tensor.scatter | mindspore.ops.ScatterNd | 功能一致 | -| torch.Tensor.scatter_add_ | mindspore.ops.ScatterNdAdd |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ScatterNdAdd.md)| -| torch.Tensor.sigmoid | mindspore.nn.Sigmoid | 功能一致 | -| torch.Tensor.sign | mindspore.ops.Sign | 功能一致 | -| torch.Tensor.size | mindspore.ops.Shape | 功能一致 | -| torch.Tensor.sqrt | mindspore.ops.Sqrt | 功能一致 | -| torch.Tensor.sub | mindspore.ops.Sub | 功能一致 | -| torch.Tensor.t | mindspore.ops.Transpose |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Transpose.md)| -| torch.Tensor.transpose | mindspore.ops.Transpose | 功能一致 | -| torch.Tensor.unsqueeze | mindspore.ops.ExpandDims | 功能一致 | -| torch.Tensor.view | mindspore.ops.Reshape | 功能一致 | -| torch.Tensor.zero_ | mindspore.ops.ZerosLike | 功能一致 | -| torch.transpose | mindspore.ops.Transpose | 功能一致 | -| torch.tril | mindspore.nn.Tril | 功能一致 | -| torch.triu | mindspore.nn.Triu | 功能一致 | -| torch.unbind | mindspore.ops.Unstack | 功能一致 | -| torch.unique | mindspore.ops.Unique |[差异对比](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Unique.md)| -| torch.unsqueeze | mindspore.ops.ExpandDims | 功能一致 | -| torch.utils.data.DataLoader | mindspore.DatasetHelper | 功能一致 | -| torch.utils.data.Dataset | mindspore.dataset.MindDataset | 功能一致 | -| torch.utils.data.distributed.DistributedSampler | mindspore.dataset.DistributedSampler | 功能一致 | -| torch.zeros | mindspore.ops.Zeros | 功能一致 | -| torch.zeros_like | mindspore.ops.ZerosLike | 功能一致 | -| torchvision.datasets.ImageFolder | mindspore.dataset.ImageFolderDataset | 功能一致 | -| torchvision.ops.nms | mindspore.ops.NMSWithMask | 功能一致 | -| torchvision.ops.roi_align | mindspore.ops.ROIAlign | 功能一致 | -| torchvision.transforms.CenterCrop | mindspore.dataset.vision.py_transforms.CenterCrop | 功能一致 | -| torchvision.transforms.ColorJitter | mindspore.dataset.vision.py_transforms.RandomColorAdjust | 功能一致 | -| torchvision.transforms.Compose | mindspore.dataset.transforms.py_transforms.Compose | 功能一致 | -| torchvision.transforms.Normalize | mindspore.dataset.vision.py_transforms.Normalize | 功能一致 | -| torchvision.transforms.RandomHorizontalFlip | mindspore.dataset.vision.py_transforms.RandomHorizontalFlip | 功能一致 | -| torchvision.transforms.Resize | mindspore.dataset.vision.py_transforms.Resize | 功能一致 | -| torchvision.transforms.ToTensor | mindspore.dataset.vision.py_transforms.ToTensor | 功能一致 | diff --git a/resource/api_mapping/api_mapping_en.md b/resource/api_mapping/api_mapping_en.md deleted file mode 100644 index 94307315f3fd4b0d92b43c757118f55f28bc675d..0000000000000000000000000000000000000000 --- a/resource/api_mapping/api_mapping_en.md +++ /dev/null @@ -1,204 +0,0 @@ -# API Mapping - -Mapping between PyTorch APIs and MindSpore APIs, which is provided by the community. - -| PyTorch APIs | MindSpore APIs | INFO | -|------------------------------------------------------|----------------------------------------------------------------|------| -| torch.abs | mindspore.ops.Abs | same | -| torch.acos | mindspore.ops.ACos | same | -| torch.add | mindspore.ops.Add | same | -| torch.argmax | mindspore.ops.Argmax | same | -| torch.argmin | mindspore.ops.Argmin | same | -| torch.asin | mindspore.ops.Asin | same | -| torch.atan | mindspore.ops.Atan | same | -| torch.atan2 | mindspore.ops.Atan2 | same | -| torch.bitwise_and | mindspore.ops.BitwiseAnd | same | -| torch.bitwise_or | mindspore.ops.BitwiseOr | same | -| torch.bmm | mindspore.ops.BatchMatMul | same | -| torch.broadcast_tensors | mindspore.ops.BroadcastTo |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/BroadcastTo_en.md)| -| torch.cat | mindspore.ops.Concat | same | -| torch.ceil | mindspore.ops.Ceil | same | -| torch.chunk | mindspore.ops.Split | same | -| torch.clamp | mindspore.ops.clip_by_value | same | -| torch.cos | mindspore.ops.Cos | same | -| torch.cosh | mindspore.ops.Cosh | same | -| torch.cuda.device_count | mindspore.communication.get_group_size | same | -| torch.cuda.set_device | mindspore.context.set_context |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/set_context_en.md)| -| torch.cumprod | mindspore.ops.CumProd | same | -| torch.cumsum | mindspore.ops.CumSum | same | -| torch.det | mindspore.nn.MatDet | same | -| torch.diag | mindspore.nn.MatrixDiag |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/MatrixDiag_en.md)| -| torch.digamma | mindspore.nn.DiGamma | same | -| torch.distributed.all_gather | mindspore.ops.AllGather | same | -| torch.distributed.all_reduce | mindspore.ops.AllReduce | same | -| torch.distributions.gamma.Gamma | mindspore.ops.Gamma | same | -| torch.distributed.get_rank | mindspore.communication.get_rank | same | -| torch.distributed.init_process_group | mindspore.communication.init |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/init_en.md)| -| torch.distributed.new_group | mindspore.communication.create_group |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/create_group_en.md)| -| torch.div | mindspore.ops.Div | same | -| torch.dot | mindspore.ops.tensor_dot |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/tensor_dot_en.md)| -| torch.eq | mindspore.ops.Equal | same | -| torch.erfc | mindspore.ops.Erfc | same | -| torch.exp | mindspore.ops.Exp | same | -| torch.expm1 | mindspore.ops.Expm1 | same | -| torch.eye | mindspore.ops.Eye | same | -| torch.flatten | mindspore.ops.Flatten |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Flatten_en.md)| -| torch.flip | mindspore.ops.ReverseV2 | same | -| torch.floor | mindspore.ops.Floor | same | -| torch.floor_divide | mindspore.ops.FloorDiv |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/FloorDiv_en.md)| -| torch.fmod | mindspore.ops.Mod | same | -| torch.gather | mindspore.ops.GatherD | same | -| torch.histc | mindspore.ops.HistogramFixedWidth | same | -| torch.inverse | mindspore.nn.MatInverse | same | -| torch.lgamma | mindspore.nn.LGamma | same | -| torch.linspace | mindspore.ops.LinSpace | same | -| torch.load | mindspore.load_checkpoint | same | -| torch.log | mindspore.ops.Log | same | -| torch.log1p | mindspore.ops.Log1p | same | -| torch.logsumexp | mindspore.nn.ReduceLogSumExp | same | -| torch.matmul | mindspore.nn.MatMul | same | -| torch.max | mindspore.ops.ArgMaxWithValue |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ArgMaxWithValue_en.md)| -| torch.maximum | mindspore.ops.Maximum | same | -| torch.mean | mindspore.ops.ReduceMean | same | -| torch.min | mindspore.ops.ArgMinWithValue |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ArgMinWithValue_en.md)| -| torch.minimum | mindspore.ops.Minimum | same | -| torch.mm | mindspore.ops.MatMul | same | -| torch.mul | mindspore.ops.Mul | same | -| torch.nn.AdaptiveAvgPool2d | mindspore.ops.ReduceMean |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ReduceMean&AdaptiveAvgPool2d_en.md)| -| torch.nn.AvgPool1d | mindspore.nn.AvgPool1d | same | -| torch.nn.AvgPool2d | mindspore.nn.AvgPool2d | same | -| torch.nn.BatchNorm1d | mindspore.nn.BatchNorm1d | same | -| torch.nn.BatchNorm2d | mindspore.nn.BatchNorm2d | same | -| torch.nn.Conv2d | mindspore.nn.Conv2d | same | -| torch.nn.ConvTranspose2d | mindspore.nn.Conv2dTranspose | same | -| torch.nn.CrossEntropyLoss | mindspore.nn.SoftmaxCrossEntropyWithLogits | same | -| torch.nn.CTCLoss | mindspore.ops.CTCLoss | same | -| torch.nn.Dropout | mindspore.nn.Dropout | same | -| torch.nn.Embedding | mindspore.nn.Embedding | same | -| torch.nn.Flatten | mindspore.nn.Flatten |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/nn_Flatten_en.md)| -| torch.nn.functional.adaptive_avg_pool2d | mindspore.nn.AvgPool2d |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/AvgPool2d_en.md)| -| torch.nn.functional.avg_pool2d | mindspore.ops.AvgPool | same | -| torch.nn.functional.binary_cross_entropy | mindspore.ops.BinaryCrossEntropy | same | -| torch.nn.functional.conv2d | mindspore.ops.Conv2D | same | -| torch.nn.functional.elu | mindspore.ops.Elu | same | -| torch.nn.functional.log_softmax | mindspore.nn.LogSoftmax | same | -| torch.nn.functional.normalize | mindspore.ops.L2Normalize |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/L2Normalize_en.md)| -| torch.nn.functional.one_hot | mindspore.ops.OneHot | same | -| torch.nn.functional.pad | mindspore.ops.Pad | same | -| torch.nn.functional.pixel_shuffle | mindspore.ops.DepthToSpace | same | -| torch.nn.functional.relu | mindspore.ops.ReLU | same | -| torch.nn.functional.softmax | mindspore.ops.Softmax | same | -| torch.nn.functional.softplus | mindspore.ops.Softplus | same | -| torch.nn.functional.softsign | mindspore.ops.Softsign | same | -| torch.nn.GELU | mindspore.nn.GELU | same | -| torch.nn.GELU | mindspore.nn.FastGelu |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/FastGelu_en.md)| -| torch.nn.GroupNorm | mindspore.nn.GroupNorm | same | -| torch.nn.init.constant_ | mindspore.common.initializer.Constant |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Constant_en.md)| -| torch.nn.init.uniform_ | mindspore.common.initializer.Uniform |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Uniform_en.md)| -| torch.nn.KLDivLoss | mindspore.ops.KLDivLoss | same | -| torch.nn.L1Loss | mindspore.nn.L1Loss | same | -| torch.nn.LayerNorm | mindspore.nn.LayerNorm | same | -| torch.nn.LeakyReLU | mindspore.nn.LeakyReLU | same | -| torch.nn.Linear | mindspore.nn.Dense |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Dense_en.md)| -| torch.nn.LSTM | mindspore.nn.LSTM | same | -| torch.nn.LSTMCell | mindspore.nn.LSTMCell | same | -| torch.nn.MaxPool2d | mindspore.nn.MaxPool2d | same | -| torch.nn.Module | mindspore.nn.Cell | same | -| torch.nn.Module.load_state_dict | mindspore.load_param_into_net | same | -| torch.nn.ModuleList | mindspore.nn.CellList | same | -| torch.nn.MSELoss | mindspore.nn.MSELoss | same | -| torch.nn.Parameter | mindspore.Parameter | same | -| torch.nn.ParameterList | mindspore.ParameterTuple |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ParameterTuple_en.md)| -| torch.nn.PixelShuffle | mindspore.ops.DepthToSpace | same | -| torch.nn.PReLU | mindspore.nn.PReLU | same | -| torch.nn.ReLU | mindspore.nn.ReLU | same | -| torch.nn.ReplicationPad2d | mindspore.nn.Pad | same | -| torch.nn.Sequential | mindspore.nn.SequentialCell | same | -| torch.nn.Sigmoid | mindspore.nn.Sigmoid | same | -| torch.nn.SmoothL1Loss | mindspore.nn.SmoothL1Loss | same | -| torch.nn.Softmax | mindspore.nn.Softmax | same | -| torch.nn.SyncBatchNorm.convert_sync_batchnorm | mindspore.nn.GlobalBatchNorm | same | -| torch.nn.Tanh | mindspore.nn.Tanh | same | -| torch.nn.Unfold | mindspore.nn.Unfold | same | -| torch.nn.Upsample | mindspore.ops.ResizeBilinear |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ResizeBilinear_en.md)| -| torch.norm | mindspore.nn.Norm |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Norm_en.md)| -| torch.numel | mindspore.ops.Size | same | -| torch.ones | mindspore.ops.Ones | same | -| torch.ones_like | mindspore.ops.OnesLike | same | -| torch.optim.Adadelta | mindspore.ops.ApplyAdadelta |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ApplyAdadelta_en.md)| -| torch.optim.Adagrad | mindspore.nn.ApplyAdagrad |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ApplyAdagrad_en.md)| -| torch.optim.Adam | mindspore.nn.Adam | same | -| torch.optim.Adamax | mindspore.ops.ApplyAdaMax | same | -| torch.optim.AdamW | mindspore.nn.AdamWeightDecay | same | -| torch.optim.lr_scheduler.CosineAnnealingWarmRestarts | mindspore.nn.cosine_decay_lr | same | -| torch.optim.lr_scheduler.StepLR | mindspore.nn.piecewise_constant_lr | same | -| torch.optim.Optimizer.step | mindspore.nn.TrainOneStepCell |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/TrainOneStepCell_en.md)| -| torch.optim.RMSprop | mindspore.nn.RMSProp | same | -| torch.optim.SGD | mindspore.nn.SGD | same | -| torch.pow | mindspore.ops.Pow | same | -| torch.prod | mindspore.ops.ReduceProd | same | -| torch.rand | mindspore.ops.UniformReal | same | -| torch.randint | mindspore.ops.UniformInt | same | -| torch.randn | mindspore.ops.StandardNormal | same | -| torch.range | mindspore.nn.Range | same | -| torch.round | mindspore.ops.Rint | same | -| torch.save | mindspore.save_checkpoint | same | -| torch.sigmoid | mindspore.ops.Sigmoid | same | -| torch.sin | mindspore.ops.Sin | same | -| torch.sinh | mindspore.ops.Sinh | same | -| torch.sparse.FloatTensor | mindspore.Tensor | diff | -| torch.split | mindspore.ops.Split | same | -| torch.sqrt | mindspore.ops.Sqrt | same | -| torch.squeeze | mindspore.ops.Squeeze | same | -| torch.stack | mindspore.ops.Stack | same | -| torch.std_mean | mindspore.ops.ReduceMean |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ReduceMean&std_mean_en.md)| -| torch.sum | mindspore.ops.ReduceSum | same | -| torch.tan | mindspore.ops.Tan | same | -| torch.tanh | mindspore.ops.Tanh | same | -| torch.tensor | mindspore.Tensor | same | -| torch.Tensor | mindspore.Tensor | same | -| torch.Tensor.chunk | mindspore.ops.Split | same | -| torch.Tensor.expand | mindspore.ops.BroadcastTo | same | -| torch.Tensor.fill_ | mindspore.ops.Fill | same | -| torch.Tensor.float | mindspore.ops.Cast |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Cast_en.md)| -| torch.Tensor.index_add | mindspore.ops.InplaceAdd | same | -| torch.Tensor.mm | mindspore.ops.MatMul | same | -| torch.Tensor.mul | mindspore.ops.Mul | same | -| torch.Tensor.pow | mindspore.ops.Pow | same | -| torch.Tensor.repeat | mindspore.ops.Tile | same | -| torch.repeat_interleave | mindspore.ops.repeat_elements | same | -| torch.Tensor.requires_grad_ | mindspore.Parameter.requires_grad | same | -| torch.Tensor.round | mindspore.ops.Round | same | -| torch.Tensor.scatter | mindspore.ops.ScatterNd | same | -| torch.Tensor.scatter_add_ | mindspore.ops.ScatterNdAdd |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/ScatterNdAdd_en.md)| -| torch.Tensor.sigmoid | mindspore.nn.Sigmoid | same | -| torch.Tensor.sign | mindspore.ops.Sign | same | -| torch.Tensor.size | mindspore.ops.Shape | same | -| torch.Tensor.sqrt | mindspore.ops.Sqrt | same | -| torch.Tensor.sub | mindspore.ops.Sub | same | -| torch.Tensor.t | mindspore.ops.Transpose |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Transpose_en.md)| -| torch.Tensor.transpose | mindspore.ops.Transpose | same | -| torch.Tensor.unsqueeze | mindspore.ops.ExpandDims | same | -| torch.Tensor.view | mindspore.ops.Reshape | same | -| torch.Tensor.zero_ | mindspore.ops.ZerosLike | same | -| torch.transpose | mindspore.ops.Transpose | same | -| torch.tril | mindspore.nn.Tril | same | -| torch.triu | mindspore.nn.Triu | same | -| torch.unbind | mindspore.ops.Unstack | same | -| torch.unique | mindspore.ops.Unique |[diff](https://gitee.com/mindspore/docs/blob/master/resource/api_mapping/Unique_en.md)| -| torch.unsqueeze | mindspore.ops.ExpandDims | same | -| torch.utils.data.DataLoader | mindspore.DatasetHelper | same | -| torch.utils.data.Dataset | mindspore.dataset.MindDataset | same | -| torch.utils.data.distributed.DistributedSampler | mindspore.dataset.DistributedSampler | same | -| torch.zeros | mindspore.ops.Zeros | same | -| torch.zeros_like | mindspore.ops.ZerosLike | same | -| torchvision.datasets.ImageFolder | mindspore.dataset.ImageFolderDataset | same | -| torchvision.ops.nms | mindspore.ops.NMSWithMask | same | -| torchvision.ops.roi_align | mindspore.ops.ROIAlign | same | -| torchvision.transforms.CenterCrop | mindspore.dataset.vision.py_transforms.CenterCrop | same | -| torchvision.transforms.ColorJitter | mindspore.dataset.vision.py_transforms.RandomColorAdjust | same | -| torchvision.transforms.Compose | mindspore.dataset.transforms.py_transforms.Compose | same | -| torchvision.transforms.Normalize | mindspore.dataset.vision.py_transforms.Normalize | same | -| torchvision.transforms.RandomHorizontalFlip | mindspore.dataset.vision.py_transforms.RandomHorizontalFlip | same | -| torchvision.transforms.Resize | mindspore.dataset.vision.py_transforms.Resize | same | -| torchvision.transforms.ToTensor | mindspore.dataset.vision.py_transforms.ToTensor | same | diff --git a/resource/api_mapping/create_group.md b/resource/api_mapping/create_group.md deleted file mode 100644 index 6b7746352884c73fb9cc784bcce3404f3b0e5401..0000000000000000000000000000000000000000 --- a/resource/api_mapping/create_group.md +++ /dev/null @@ -1,23 +0,0 @@ -# 比较与torch.distributed.new_group的功能差异 - -## torch.distributed.new_group - -```python -torch.distributed.new_group( - ranks=None, - timeout=datetime.timedelta(0, 1800), - backend=None -) -``` - -## mindspore.communication.create_group - -```python -mindspore.communication.create_group(group, rank_ids) -``` - -## 使用方式 - -PyTorch: 该接口传入待构建通信域rank列表,指定backend创建指定的通信域,并返回创建的通信域。 - -MindSpore:该接口传入group名字,以及待构建通信域rank列表,创建一个以传入的group名字为key的通信域,不返回任何值。 \ No newline at end of file diff --git a/resource/api_mapping/create_group_en.md b/resource/api_mapping/create_group_en.md deleted file mode 100644 index 57a7720562c0340085e77b8c9c38c7aba5cd1b88..0000000000000000000000000000000000000000 --- a/resource/api_mapping/create_group_en.md +++ /dev/null @@ -1,23 +0,0 @@ -# Comparing the Function Differences with torch.distributed.new_group - -## torch.distributed.new_group - -```python -torch.distributed.new_group( - ranks=None, - timeout=datetime.timedelta(0, 1800), - backend=None -) -``` - -## mindspore.communication.create_group - -```python -mindspore.communication.create_group(group, rank_ids) -``` - -## Differences - -PyTorch: This interface passes in the rank list of the communication domain to be constructed, specifies the backend to create the specified communication domain, and returns the created communication domain. - -MindSpore:The interface passes in the group name and the rank list of the communication domain to be constructed, creates a communication domain with the incoming group name as the key, and does not return any value. \ No newline at end of file diff --git a/resource/api_mapping/init.md b/resource/api_mapping/init.md deleted file mode 100644 index 9c5337861c136fdb8ae6cf68e69254793b86ca47..0000000000000000000000000000000000000000 --- a/resource/api_mapping/init.md +++ /dev/null @@ -1,27 +0,0 @@ -# 比较与torch.distributed.init_process_group的功能差异 - -## torch.distributed.init_process_group - -```python -torch.distributed.init_process_group( - backend, - init_method=None, - timeout=datetime.timedelta(0, 1800), - world_size=-1, - rank=-1, - store=None, - group_name='' -) -``` - -## mindspore.communication.init - -```python -mindspore.communication.init(backend_name=None) -``` - -## 使用方式 - -PyTorch: 该接口支持的集合通信有3种:MPI、Gloo、NCCL。该接口在初始化`backend`的同时,还提供`world_size`、`rank`和`timeout`等内容的配置。 - -MindSpore:该接口当前仅支持2种集合通信:HCCL、NCCL。而`world_size`、`rank`和`timeout`等内容的配置并不在该接口中设置,调用该接口之前,需设置相应的环境变量。 \ No newline at end of file diff --git a/resource/api_mapping/init_en.md b/resource/api_mapping/init_en.md deleted file mode 100644 index 9ad43a0dc67d41db978d7434b50b199beda3e28d..0000000000000000000000000000000000000000 --- a/resource/api_mapping/init_en.md +++ /dev/null @@ -1,28 +0,0 @@ -# Function Differences with torch.distributed.init_process_group - -## torch.distributed.init_process_group - -```python -torch.distributed.init_process_group( - backend, - init_method=None, - timeout=datetime.timedelta(0, 1800), - world_size=-1, - rank=-1, - store=None, - group_name='' -) -``` - -## mindspore.communication.init - -```python -mindspore.communication.init(backend_name=None) -``` - -## Differences - -PyTorch: This interface supports three kinds of collective communications: MPI, Gloo, and NCCL. It initializes `backend` and also provides configuration, such as `world_size`, `rank`, `timeout`, etc. - -MindSpore:This interface currently supports only two kinds of collective communication: HCCL and NCCL. The configuration of `world_size`, `rank` and `timeout` is not set in this interface. The corresponding environment variable needs to be set before calling this interface. - diff --git a/resource/api_mapping/nn_Flatten.md b/resource/api_mapping/nn_Flatten.md deleted file mode 100644 index 421d5fb330fca8a157d3ac0ab896a3606f9bf14c..0000000000000000000000000000000000000000 --- a/resource/api_mapping/nn_Flatten.md +++ /dev/null @@ -1,55 +0,0 @@ -# 比较与torch.nn.Flatten的功能差异 - -## torch.nn.Flatten - -```python -class torch.nn.Flatten( - start_dim=1, - end_dim=-1 -) -``` - -## mindspore.nn.Flatten - -```python -class mindspore.nn.Flatten()(input) -``` - -## 使用方式 - -PyTorch: 支持指定维度对元素进行展开,默认保留第0维,对其余维度的元素进行展开;需要同`torch.nn.Sequential`一起使用。 - -MindSpore:仅支持保留第0维元素,对其余维度的元素进行展开。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor, nn -import torch -import numpy as np - -# In MindSpore, only the 0th dimension will be reserved and the rest will be flattened. -input_tensor = Tensor(np.ones(shape=[1, 2, 3, 4]), mindspore.float32) -flatten = nn.Flatten() -output = flatten(input_tensor) -print(output.shape) -# Out: -# (1, 24) - -# In torch, the dimension to reserve can be specified and the rest will be flattened. -# Different from torch.flatten, you should pass it as parameter into torch.nn.Sequential. -input_tensor = torch.Tensor(np.ones(shape=[1, 2, 3, 4])) -flatten1 = torch.nn.Sequential(torch.nn.Flatten(start_dim=1)) -output1 = flatten1(input_tensor) -print(output1.shape) -# Out: -# torch.Size([1, 24]) - -input_tensor = torch.Tensor(np.ones(shape=[1, 2, 3, 4])) -flatten2 = torch.nn.Sequential(torch.nn.Flatten(start_dim=2)) -output2 = flatten2(input_tensor) -print(output2.shape) -# Out: -# torch.Size([1, 2, 12]) -``` diff --git a/resource/api_mapping/nn_Flatten_en.md b/resource/api_mapping/nn_Flatten_en.md deleted file mode 100644 index a6b8b519c2e6f2a9b337eb7271848034ca74b7fe..0000000000000000000000000000000000000000 --- a/resource/api_mapping/nn_Flatten_en.md +++ /dev/null @@ -1,55 +0,0 @@ -# Function Differences with torch.nn.Flatten - -## torch.nn.Flatten - -```python -class torch.nn.Flatten( - start_dim=1, - end_dim=-1 -) -``` - -## mindspore.nn.Flatten - -```python -class mindspore.nn.Flatten()(input) -``` - -## Differences - -PyTorch: Supports the flatten of elements by specified dimensions. This should be used together with `torch.nn.Sequential` - -MindSpore:Only the 0th dimension element is reserved and the elements of the remaining dimensions are flattened. - -## Code Example - -```python -import mindspore -from mindspore import Tensor, nn -import torch -import numpy as np - -# In MindSpore, only the 0th dimension will be reserved and the rest will be flattened. -input_tensor = Tensor(np.ones(shape=[1, 2, 3, 4]), mindspore.float32) -flatten = nn.Flatten() -output = flatten(input_tensor) -print(output.shape) -# Out: -# (1, 24) - -# In torch, the dimension to reserve can be specified and the rest will be flattened. -# Different from torch.flatten, you should pass it as parameter into torch.nn.Sequential. -input_tensor = torch.Tensor(np.ones(shape=[1, 2, 3, 4])) -flatten1 = torch.nn.Sequential(torch.nn.Flatten(start_dim=1)) -output1 = flatten1(input_tensor) -print(output1.shape) -# Out: -# torch.Size([1, 24]) - -input_tensor = torch.Tensor(np.ones(shape=[1, 2, 3, 4])) -flatten2 = torch.nn.Sequential(torch.nn.Flatten(start_dim=2)) -output2 = flatten2(input_tensor) -print(output2.shape) -# Out: -# torch.Size([1, 2, 12]) -``` \ No newline at end of file diff --git a/resource/api_mapping/set_context.md b/resource/api_mapping/set_context.md deleted file mode 100644 index 63f85b1857e05a4e57a7359bc8c0070a8b209fc2..0000000000000000000000000000000000000000 --- a/resource/api_mapping/set_context.md +++ /dev/null @@ -1,19 +0,0 @@ -# 比较与torch.cuda.set_device的功能差异 - -## torch.cuda.set_device - -```python -torch.cuda.set_device(device) -``` - -## mindspore.context.set_context - -```python -mindspore.context.set_context(**kwargs) -``` - -## 使用方式 - -PyTorch: 设置当前使用的`device`卡号。 - -MindSpore:不仅设置当前使用的`device`卡号,还设置模式`mode`,运行环境`device_target`,是否保存图`save_graphs`等。 \ No newline at end of file diff --git a/resource/api_mapping/set_context_en.md b/resource/api_mapping/set_context_en.md deleted file mode 100644 index 32a8b4c5a771e6e4bb855dfc8d5f8759ea08089b..0000000000000000000000000000000000000000 --- a/resource/api_mapping/set_context_en.md +++ /dev/null @@ -1,19 +0,0 @@ -# Function Differences with torch.cuda.set_device - -## torch.cuda.set_device - -```python -torch.cuda.set_device(device) -``` - -## mindspore.context.set_context - -```python -mindspore.context.set_context(**kwargs) -``` - -## Differences - -PyTorch: It is used to set the current `device`. - -MindSpore:It is not only used to set the current `device`, but also set the `mode`, `device_target`, `save_graphs`, etc. \ No newline at end of file diff --git a/resource/api_mapping/tensor_dot.md b/resource/api_mapping/tensor_dot.md deleted file mode 100644 index 2703f9a41881b3d53127d33aa1be43c6d38f6af5..0000000000000000000000000000000000000000 --- a/resource/api_mapping/tensor_dot.md +++ /dev/null @@ -1,54 +0,0 @@ -# 比较与torch.dot的功能差异 - -## torch.dot - -```python -torch.dot( - input, - other, - out=None -) -``` - -## mindspore.ops.tensor_dot - -```python -mindspore.ops.tensor_dot( - x1, - x2, - axes -) -``` - -## 使用方式 - -PyTorch: 计算两个相同shape的tensor的点乘(内积),仅支持1D。 - -MindSpore:计算两个tensor在任意轴上的点乘,支持任意维度的tensor,但指定的轴对应的形状要相等。当输入为1D,轴设定为1时和PyTorch的功能一致。 - -## 代码示例 - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, tensor of any dimension will be supported. -# And parameters will be set to specify how to compute among dimensions. -input_x1 = Tensor(np.array([2, 3, 4]), mindspore.float32) -input_x2 = Tensor(np.array([2, 1, 3]), mindspore.float32) -output = ops.tensor_dot(input_x1, input_x2, 1) -print(output) -# Out: -# 19.0 - -# In torch, only 1D tensor's computation will be supported. -input_x1 = torch.tensor([2, 3, 4]) -input_x2 = torch.tensor([2, 1, 3]) -output = torch.dot(input_x1, input_x2) -print(output) -# Out: -# tensor(19) -``` \ No newline at end of file diff --git a/resource/api_mapping/tensor_dot_en.md b/resource/api_mapping/tensor_dot_en.md deleted file mode 100644 index e47bc3f617f2ca978aedc7bb667e891955d22744..0000000000000000000000000000000000000000 --- a/resource/api_mapping/tensor_dot_en.md +++ /dev/null @@ -1,54 +0,0 @@ -# Function Differences with torch.dot - -## torch.dot - -```python -torch.dot( - input, - other, - out=None -) -``` - -## mindspore.ops.tensor_dot - -```python -mindspore.ops.tensor_dot( - x1, - x2, - axes -) -``` - -## Differences - -PyTorch: Calculates the dot product(inner product) of two tensors of the same shape, only 1D is supported. - -MindSpore:Calculates the dot product of two tensors on any axis. Support tensor of any dimension, but the shape corresponding to the specified axis should be equal. The function of the PyTorch is the same when the input is 1D and the axis is set to 0. - -## Code Example - -```python -import mindspore -from mindspore import Tensor -import mindspore.ops as ops -import torch -import numpy as np - -# In MindSpore, tensor of any dimension will be supported. -# And parameters will be set to specify how to compute among dimensions. -input_x1 = Tensor(np.array([2, 3, 4]), mindspore.float32) -input_x2 = Tensor(np.array([2, 1, 3]), mindspore.float32) -output = ops.tensor_dot(input_x1, input_x2, 1) -print(output) -# Out: -# 19.0 - -# In torch, only 1D tensor's computation will be supported. -input_x1 = torch.tensor([2, 3, 4]) -input_x2 = torch.tensor([2, 1, 3]) -output = torch.dot(input_x1, input_x2) -print(output) -# Out: -# tensor(19) -``` \ No newline at end of file diff --git a/resource/api_updates/nn_api_updates.md b/resource/api_updates/nn_api_updates.md deleted file mode 100644 index 2c9ea8e89de30dde3f777790c830f0b0726a7f03..0000000000000000000000000000000000000000 --- a/resource/api_updates/nn_api_updates.md +++ /dev/null @@ -1,49 +0,0 @@ -# API Updates - -Compared with the previous version, the added, deleted and supported platforms change information of `mindspore.nn` operators in MindSpore, is shown in the following table. - -|API|Status|Support Platform|Class -|:----|:----|:----|:---- -|[mindspore.nn.ForwardValueAndGrad](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.ForwardValueAndGrad.html#mindspore.nn.ForwardValueAndGrad)|New|r1.2: Ascend/GPU/CPU|Wrapper Functions -|[mindspore.nn.TimeDistributed](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.TimeDistributed.html#mindspore.nn.TimeDistributed)|New|r1.2: Ascend/GPU/CPU|Wrapper Functions -|[mindspore.nn.BatchNorm3d](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.BatchNorm3d.html#mindspore.nn.BatchNorm3d)|New|r1.2: Ascend/GPU/CPU|Normalization Layers -|[mindspore.nn.InstanceNorm2d](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.InstanceNorm2d.html#mindspore.nn.InstanceNorm2d)|New|r1.2: GPU|Normalization Layers -|[mindspore.nn.SyncBatchNorm](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.SyncBatchNorm.html#mindspore.nn.SyncBatchNorm)|New|r1.2: Ascend|Normalization Layers -|[mindspore.nn.BCEWithLogitsLoss](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.BCEWithLogitsLoss.html#mindspore.nn.BCEWithLogitsLoss)|New|r1.2: Ascend|Loss Functions -|[mindspore.nn.DiceLoss](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.DiceLoss.html#mindspore.nn.DiceLoss)|New|r1.2: Ascend/GPU/CPU|Loss Functions -|[mindspore.nn.FocalLoss](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.FocalLoss.html#mindspore.nn.FocalLoss)|New|r1.2: Ascend/GPU|Loss Functions -|[mindspore.nn.MAELoss](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.MAELoss.html#mindspore.nn.MAELoss)|New|r1.2: Ascend/GPU/CPU|Loss Functions -|[mindspore.nn.MultiClassDiceLoss](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.MultiClassDiceLoss.html#mindspore.nn.MultiClassDiceLoss)|New|r1.2: Ascend/GPU|Loss Functions -|[mindspore.nn.RMSELoss](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.RMSELoss.html#mindspore.nn.RMSELoss)|New|r1.2: Ascend/GPU/CPU|Loss Functions -|[mindspore.nn.Conv3d](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.Conv3d.html#mindspore.nn.Conv3d)|New|r1.2: Ascend|Convolution Layers -|[mindspore.nn.Conv3dTranspose](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.Conv3dTranspose.html#mindspore.nn.Conv3dTranspose)|New|r1.2: Ascend|Convolution Layers -|[mindspore.nn.Norm](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.Norm.html#mindspore.nn.Norm)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Utilities -|[mindspore.nn.ClipByNorm](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.ClipByNorm.html#mindspore.nn.ClipByNorm)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Utilities -|[mindspore.nn.Pad](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.Pad.html#mindspore.nn.Pad)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Utilities -|[mindspore.nn.ResizeBilinear](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.ResizeBilinear.html#mindspore.nn.ResizeBilinear)|Changed|r1.1: Ascend => r1.2: Ascend/CPU|Utilities -|[mindspore.nn.Range](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.Range.html#mindspore.nn.Range)|Changed|r1.1: Ascend => r1.2: Ascend/GPU/CPU|Utilities -|[mindspore.nn.FakeQuantWithMinMaxObserver](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.FakeQuantWithMinMaxObserver.html#mindspore.nn.FakeQuantWithMinMaxObserver)|Changed|r1.1: To Be Developed => r1.2: Ascend/GPU|Quantized Functions -|[mindspore.nn.Conv2dBnFoldQuantOneConv](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.Conv2dBnFoldQuantOneConv.html#mindspore.nn.Conv2dBnFoldQuantOneConv)|Changed|r1.1: To Be Developed => r1.2: Ascend/GPU|Quantized Functions -|[mindspore.nn.Conv2dBnAct](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.Conv2dBnAct.html#mindspore.nn.Conv2dBnAct)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Quantized Functions -|[mindspore.nn.DenseBnAct](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.DenseBnAct.html#mindspore.nn.DenseBnAct)|Changed|r1.1: Ascend => r1.2: Ascend/GPU|Quantized Functions -|[mindspore.nn.AvgPool1d](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.AvgPool1d.html#mindspore.nn.AvgPool1d)|Changed|r1.1: Ascend => r1.2: Ascend/GPU/CPU|Pooling layers -|[mindspore.nn.AvgPool2d](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.AvgPool2d.html#mindspore.nn.AvgPool2d)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Pooling layers -|[mindspore.nn.MaxPool1d](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.MaxPool1d.html#mindspore.nn.MaxPool1d)|Changed|r1.1: Ascend => r1.2: Ascend/GPU/CPU|Pooling layers -|[mindspore.nn.LazyAdam](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.LazyAdam.html#mindspore.nn.LazyAdam)|Changed|r1.1: Ascend => r1.2: Ascend/GPU|Optimizer Functions -|[mindspore.nn.RMSProp](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.RMSProp.html#mindspore.nn.RMSProp)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Optimizer Functions -|[mindspore.nn.GroupNorm](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.GroupNorm.html#mindspore.nn.GroupNorm)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Normalization Layers -|[mindspore.nn.BatchNorm1d](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.BatchNorm1d.html#mindspore.nn.BatchNorm1d)|Changed|r1.1: Ascend/GPU => r1.2: Ascend|Normalization Layers -|[mindspore.nn.HSigmoid](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.HSigmoid.html#mindspore.nn.HSigmoid)|Changed|r1.1: GPU => r1.2: GPU/CPU|Non-linear Activations -|[mindspore.nn.HSwish](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.HSwish.html#mindspore.nn.HSwish)|Changed|r1.1: GPU => r1.2: GPU/CPU|Non-linear Activations -|[mindspore.nn.LeakyReLU](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.LeakyReLU.html#mindspore.nn.LeakyReLU)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Non-linear Activations -|[mindspore.nn.ELU](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.ELU.html#mindspore.nn.ELU)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Non-linear Activations -|[mindspore.nn.get_activation](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.get_activation.html#mindspore.nn.get_activation)|Changed|r1.1: To Be Developed => r1.2: Ascend/GPU/CPU|Non-linear Activations -|[mindspore.nn.Moments](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.Moments.html#mindspore.nn.Moments)|Changed|r1.1: Ascend => r1.2: Ascend/GPU|Math Functions -|[mindspore.nn.BCELoss](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.BCELoss.html#mindspore.nn.BCELoss)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Loss Functions -|[mindspore.nn.L1Loss](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.L1Loss.html#mindspore.nn.L1Loss)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Loss Functions -|[mindspore.nn.MSELoss](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.MSELoss.html#mindspore.nn.MSELoss)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Loss Functions -|[mindspore.nn.ImageGradients](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.ImageGradients.html#mindspore.nn.ImageGradients)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Images Functions -|[mindspore.nn.Conv1d](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.Conv1d.html#mindspore.nn.Conv1d)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|Convolution Layers -|[mindspore.nn.GraphKernel](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/nn/mindspore.nn.GraphKernel.html#mindspore.nn.GraphKernel)|Changed|r1.1: To Be Developed => r1.2: Ascend/GPU|Cell - -> diff --git a/resource/api_updates/ops_api_updates.md b/resource/api_updates/ops_api_updates.md deleted file mode 100644 index 7186ce20bab922ca979a5f63155e5582787578dc..0000000000000000000000000000000000000000 --- a/resource/api_updates/ops_api_updates.md +++ /dev/null @@ -1,150 +0,0 @@ -# API Updates - -Compared with the previous version, the added, deleted and supported platforms change information of `mindspore.ops` operators in MindSpore, is shown in the following table. - -|API|Status|Support Platform|Class -|:----|:----|:----|:---- -|[mindspore.ops.PMEExcludedForce](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.PMEExcludedForce.html#mindspore.ops.PMEExcludedForce)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.PMEReciprocalForce](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.PMEReciprocalForce.html#mindspore.ops.PMEReciprocalForce)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.DihedralForceWithAtomEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.DihedralForceWithAtomEnergy.html#mindspore.ops.DihedralForceWithAtomEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.BondForce](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.BondForce.html#mindspore.ops.BondForce)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.Dihedral14CFEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Dihedral14CFEnergy.html#mindspore.ops.Dihedral14CFEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.AngleEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.AngleEnergy.html#mindspore.ops.AngleEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.Dihedral14LJAtomEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Dihedral14LJAtomEnergy.html#mindspore.ops.Dihedral14LJAtomEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.Dihedral14LJForce](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Dihedral14LJForce.html#mindspore.ops.Dihedral14LJForce)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.Dihedral14LJEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Dihedral14LJEnergy.html#mindspore.ops.Dihedral14LJEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.AngleForceWithAtomEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.AngleForceWithAtomEnergy.html#mindspore.ops.AngleForceWithAtomEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.Dihedral14CFAtomEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Dihedral14CFAtomEnergy.html#mindspore.ops.Dihedral14CFAtomEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.Dihedral14LJForceWithDirectCF](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Dihedral14LJForceWithDirectCF.html#mindspore.ops.Dihedral14LJForceWithDirectCF)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.BondForceWithAtomEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.BondForceWithAtomEnergy.html#mindspore.ops.BondForceWithAtomEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.LJEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.LJEnergy.html#mindspore.ops.LJEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.PMEEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.PMEEnergy.html#mindspore.ops.PMEEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.LJForceWithPMEDirectForce](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.LJForceWithPMEDirectForce.html#mindspore.ops.LJForceWithPMEDirectForce)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.BondEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.BondEnergy.html#mindspore.ops.BondEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.LJForce](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.LJForce.html#mindspore.ops.LJForce)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.BondAtomEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.BondAtomEnergy.html#mindspore.ops.BondAtomEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.BondForceWithAtomVirial](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.BondForceWithAtomVirial.html#mindspore.ops.BondForceWithAtomVirial)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.NeighborListUpdate](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.NeighborListUpdate.html#mindspore.ops.NeighborListUpdate)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.DihedralEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.DihedralEnergy.html#mindspore.ops.DihedralEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.DihedralForce](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.DihedralForce.html#mindspore.ops.DihedralForce)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.DihedralAtomEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.DihedralAtomEnergy.html#mindspore.ops.DihedralAtomEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.MDIterationLeapFrog](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.MDIterationLeapFrog.html#mindspore.ops.MDIterationLeapFrog)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.AngleForce](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.AngleForce.html#mindspore.ops.AngleForce)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.Dihedral14LJCFForceWithAtomEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Dihedral14LJCFForceWithAtomEnergy.html#mindspore.ops.Dihedral14LJCFForceWithAtomEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.AngleAtomEnergy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.AngleAtomEnergy.html#mindspore.ops.AngleAtomEnergy)|New|r1.2: GPU|operations--Sponge Operators -|[mindspore.ops.NoRepeatNGram](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.NoRepeatNGram.html#mindspore.ops.NoRepeatNGram)|New|r1.2: Ascend|operations--Other Operators -|[mindspore.ops.Conv3DTranspose](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Conv3DTranspose.html#mindspore.ops.Conv3DTranspose)|New|r1.2: Ascend|operations--Neural Network Operators -|[mindspore.ops.SeLU](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.SeLU.html#mindspore.ops.SeLU)|New|r1.2: Ascend|operations--Neural Network Operators -|[mindspore.ops.Dropout2D](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Dropout2D.html#mindspore.ops.Dropout2D)|New|r1.2: Ascend|operations--Neural Network Operators -|[mindspore.ops.BCEWithLogitsLoss](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.BCEWithLogitsLoss.html#mindspore.ops.BCEWithLogitsLoss)|New|r1.2: Ascend|operations--Neural Network Operators -|[mindspore.ops.NLLLoss](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.NLLLoss.html#mindspore.ops.NLLLoss)|New|r1.2: Ascend|operations--Neural Network Operators -|[mindspore.ops.Conv3D](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Conv3D.html#mindspore.ops.Conv3D)|New|r1.2: Ascend|operations--Neural Network Operators -|[mindspore.ops.MaxPool3D](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.MaxPool3D.html#mindspore.ops.MaxPool3D)|New|r1.2: Ascend|operations--Neural Network Operators -|[mindspore.ops.Dropout3D](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Dropout3D.html#mindspore.ops.Dropout3D)|New|r1.2: Ascend|operations--Neural Network Operators -|[mindspore.ops.Mish](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Mish.html#mindspore.ops.Mish)|New|r1.2: Ascend|operations--Neural Network Operators -|[mindspore.ops.MatrixInverse](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.MatrixInverse.html#mindspore.ops.MatrixInverse)|New|r1.2: GPU|operations--Math Operators -|[mindspore.ops.MulNoNan](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.MulNoNan.html#mindspore.ops.MulNoNan)|New|r1.2: Ascend|operations--Math Operators -|[mindspore.ops.IndexAdd](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.IndexAdd.html#mindspore.ops.IndexAdd)|New|r1.2: GPU|operations--Math Operators -|[mindspore.ops.Randperm](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Randperm.html#mindspore.ops.Randperm)|New|r1.2: Ascend|operations--Array Operators -|[mindspore.ops.add](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Add.html#mindspore.ops.Add)|New|r1.2: same as mindspore.ops.Add|functional -|[mindspore.ops.stack](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Stack.html#mindspore.ops.Stack)|New|r1.2: mindspore.ops.Stack|functional -|[mindspore.ops.dot](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.dot.html#mindspore.ops.dot)|New|r1.2: Ascend/GPU/CPU|composite -|[mindspore.ops.batch_dot](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.batch_dot.html#mindspore.ops.batch_dot)|New|r1.2: Ascend/GPU/CPU|composite -|[mindspore.ops.matmul](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.matmul.html#mindspore.ops.matmul)|New|r1.2: Ascend/GPU/CPU|composite -|[mindspore.ops.MakeRefKey](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.MakeRefKey.html#mindspore.ops.MakeRefKey)|Deleted|r1.1: Ascend/GPU/CPU|operations--Other Operators -|[mindspore.ops.FusedBatchNorm](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.FusedBatchNorm.html#mindspore.ops.FusedBatchNorm)|Deleted|r1.1: CPU|operations--Neural Network Operators -|[mindspore.ops.Unpack](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.Unpack.html#mindspore.ops.Unpack)|Deleted|r1.1: To Be Developed|operations--Neural Network Operators -|[mindspore.ops.FusedBatchNormEx](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.FusedBatchNormEx.html#mindspore.ops.FusedBatchNormEx)|Deleted|r1.1: GPU|operations--Neural Network Operators -|[mindspore.ops.Gelu](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.Gelu.html#mindspore.ops.Gelu)|Deleted|r1.1: To Be Developed|operations--Neural Network Operators -|[mindspore.ops.FastGelu](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.FastGelu.html#mindspore.ops.FastGelu)|Deleted|r1.1: To Be Developed|operations--Neural Network Operators -|[mindspore.ops.Pack](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.Pack.html#mindspore.ops.Pack)|Deleted|r1.1: To Be Developed|operations--Neural Network Operators -|[mindspore.ops.TensorAdd](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.TensorAdd.html#mindspore.ops.TensorAdd)|Deleted|r1.1: To Be Developed|operations--Math Operators -|[mindspore.ops.ControlDepend](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.ControlDepend.html#mindspore.ops.ControlDepend)|Deleted|r1.1: Ascend/GPU/CPU|operations--Control Flowscontrol -|[mindspore.ops.GatherV2](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.GatherV2.html#mindspore.ops.GatherV2)|Deleted|r1.1: To Be Developed|operations--Array Operators -|[mindspore.ops.control_depend](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.ControlDepend.html#mindspore.ops.ControlDepend)|Deleted|r1.1: same as mindspore.ops.ControlDepend|functional -|[mindspore.ops.tensor_add](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.Add.html#mindspore.ops.Add)|Deleted|r1.1: same as mindspore.ops.Add|functional -|[mindspore.ops.pack](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.Pack.html#mindspore.ops.Pack)|Deleted|r1.1: same as mindspore.ops.Pack|functional -|[mindspore.ops.add_flags](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/mindspore/ops/mindspore.ops.add_flags.html#mindspore.ops.add_flags)|Deleted|r1.1: To Be Developed|composite -|[mindspore.ops.BasicLSTMCell](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.BasicLSTMCell.html#mindspore.ops.BasicLSTMCell)|Changed|r1.1: Ascend => r1.2: Deprecated|operations--Neural Network Operators -|[mindspore.ops.Tanh](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Tanh.html#mindspore.ops.Tanh)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.HSwish](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.HSwish.html#mindspore.ops.HSwish)|Changed|r1.1: GPU => r1.2: GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.CTCLoss](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.CTCLoss.html#mindspore.ops.CTCLoss)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.L2Normalize](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.L2Normalize.html#mindspore.ops.L2Normalize)|Changed|r1.1: Ascend => r1.2: Ascend/GPU|operations--Neural Network Operators -|[mindspore.ops.BatchNorm](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.BatchNorm.html#mindspore.ops.BatchNorm)|Changed|r1.1: Ascend => r1.2: Ascend/CPU|operations--Neural Network Operators -|[mindspore.ops.BinaryCrossEntropy](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.BinaryCrossEntropy.html#mindspore.ops.BinaryCrossEntropy)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.HSigmoid](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.HSigmoid.html#mindspore.ops.HSigmoid)|Changed|r1.1: GPU => r1.2: GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.ResizeBilinear](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.ResizeBilinear.html#mindspore.ops.ResizeBilinear)|Changed|r1.1: Ascend => r1.2: Ascend/CPU|operations--Neural Network Operators -|[mindspore.ops.Elu](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Elu.html#mindspore.ops.Elu)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.Conv2DBackpropInput](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Conv2DBackpropInput.html#mindspore.ops.Conv2DBackpropInput)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.Sigmoid](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Sigmoid.html#mindspore.ops.Sigmoid)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.SmoothL1Loss](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.SmoothL1Loss.html#mindspore.ops.SmoothL1Loss)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.FusedSparseLazyAdam](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.FusedSparseLazyAdam.html#mindspore.ops.FusedSparseLazyAdam)|Changed|r1.1: CPU => r1.2: Ascend/CPU|operations--Neural Network Operators -|[mindspore.ops.ApplyCenteredRMSProp](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.ApplyCenteredRMSProp.html#mindspore.ops.ApplyCenteredRMSProp)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.CTCGreedyDecoder](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.CTCGreedyDecoder.html#mindspore.ops.CTCGreedyDecoder)|Changed|r1.1: To Be Developed => r1.2: Ascend|operations--Neural Network Operators -|[mindspore.ops.Acosh](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Acosh.html#mindspore.ops.Acosh)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.Pad](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Pad.html#mindspore.ops.Pad)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.Stack](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Stack.html#mindspore.ops.Stack)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.MirrorPad](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.MirrorPad.html#mindspore.ops.MirrorPad)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.Adam](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Adam.html#mindspore.ops.Adam)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.GeLU](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.GeLU.html#mindspore.ops.GeLU)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.TopK](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.TopK.html#mindspore.ops.TopK)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.ApplyRMSProp](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.ApplyRMSProp.html#mindspore.ops.ApplyRMSProp)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.FusedSparseAdam](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.FusedSparseAdam.html#mindspore.ops.FusedSparseAdam)|Changed|r1.1: CPU => r1.2: Ascend/CPU|operations--Neural Network Operators -|[mindspore.ops.LayerNorm](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.LayerNorm.html#mindspore.ops.LayerNorm)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.LogSoftmax](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.LogSoftmax.html#mindspore.ops.LogSoftmax)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.Unstack](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Unstack.html#mindspore.ops.Unstack)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Neural Network Operators -|[mindspore.ops.BatchMatMul](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.BatchMatMul.html#mindspore.ops.BatchMatMul)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Floor](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Floor.html#mindspore.ops.Floor)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Sinh](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Sinh.html#mindspore.ops.Sinh)|Changed|r1.1: Ascend => r1.2: Ascend/CPU|operations--Math Operators -|[mindspore.ops.ReduceAny](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.ReduceAny.html#mindspore.ops.ReduceAny)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.ACos](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.ACos.html#mindspore.ops.ACos)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.FloorDiv](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.FloorDiv.html#mindspore.ops.FloorDiv)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.LogicalOr](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.LogicalOr.html#mindspore.ops.LogicalOr)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Sin](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Sin.html#mindspore.ops.Sin)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.LogicalNot](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.LogicalNot.html#mindspore.ops.LogicalNot)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.LogicalAnd](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.LogicalAnd.html#mindspore.ops.LogicalAnd)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Atanh](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Atanh.html#mindspore.ops.Atanh)|Changed|r1.1: Ascend => r1.2: Ascend/CPU|operations--Math Operators -|[mindspore.ops.Cos](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Cos.html#mindspore.ops.Cos)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.SquareSumAll](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.SquareSumAll.html#mindspore.ops.SquareSumAll)|Changed|r1.1: Ascend => r1.2: Ascend/GPU|operations--Math Operators -|[mindspore.ops.CumSum](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.CumSum.html#mindspore.ops.CumSum)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Cosh](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Cosh.html#mindspore.ops.Cosh)|Changed|r1.1: Ascend => r1.2: Ascend/CPU|operations--Math Operators -|[mindspore.ops.Erfc](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Erfc.html#mindspore.ops.Erfc)|Changed|r1.1: Ascend => r1.2: Ascend/GPU|operations--Math Operators -|[mindspore.ops.GreaterEqual](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.GreaterEqual.html#mindspore.ops.GreaterEqual)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Tan](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Tan.html#mindspore.ops.Tan)|Changed|r1.1: Ascend => r1.2: Ascend/CPU|operations--Math Operators -|[mindspore.ops.Asinh](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Asinh.html#mindspore.ops.Asinh)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.SquaredDifference](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.SquaredDifference.html#mindspore.ops.SquaredDifference)|Changed|r1.1: Ascend => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Asin](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Asin.html#mindspore.ops.Asin)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Atan](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Atan.html#mindspore.ops.Atan)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Expm1](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Expm1.html#mindspore.ops.Expm1)|Changed|r1.1: Ascend => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Reciprocal](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Reciprocal.html#mindspore.ops.Reciprocal)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Minimum](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Minimum.html#mindspore.ops.Minimum)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.ReduceAll](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.ReduceAll.html#mindspore.ops.ReduceAll)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Greater](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Greater.html#mindspore.ops.Greater)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Mod](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Mod.html#mindspore.ops.Mod)|Changed|r1.1: Ascend => r1.2: Ascend/CPU|operations--Math Operators -|[mindspore.ops.Atan2](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Atan2.html#mindspore.ops.Atan2)|Changed|r1.1: Ascend => r1.2: Ascend/CPU|operations--Math Operators -|[mindspore.ops.Div](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Div.html#mindspore.ops.Div)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Math Operators -|[mindspore.ops.Log1p](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Log1p.html#mindspore.ops.Log1p)|Changed|r1.1: Ascend => r1.2: Ascend/GPU|operations--Math Operators -|[mindspore.ops.Print](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Print.html#mindspore.ops.Print)|Changed|r1.1: Ascend => r1.2: Ascend/GPU|operations--Debug Operators -|[mindspore.ops.TensorScatterUpdate](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.TensorScatterUpdate.html#mindspore.ops.TensorScatterUpdate)|Changed|r1.1: Ascend => r1.2: Ascend/GPU|operations--Array Operators -|[mindspore.ops.ArgMinWithValue](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.ArgMinWithValue.html#mindspore.ops.ArgMinWithValue)|Changed|r1.1: Ascend => r1.2: Ascend/CPU|operations--Array Operators -|[mindspore.ops.ResizeNearestNeighbor](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.ResizeNearestNeighbor.html#mindspore.ops.ResizeNearestNeighbor)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Array Operators -|[mindspore.ops.Squeeze](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Squeeze.html#mindspore.ops.Squeeze)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Array Operators -|[mindspore.ops.GatherD](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.GatherD.html#mindspore.ops.GatherD)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Array Operators -|[mindspore.ops.Gather](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Gather.html#mindspore.ops.Gather)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Array Operators -|[mindspore.ops.UnsortedSegmentSum](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.UnsortedSegmentSum.html#mindspore.ops.UnsortedSegmentSum)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Array Operators -|[mindspore.ops.GatherNd](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.GatherNd.html#mindspore.ops.GatherNd)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|operations--Array Operators -|[mindspore.ops.count_nonzero](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.count_nonzero.html#mindspore.ops.count_nonzero)|Changed|r1.1: Ascend/GPU => r1.2: Ascend/GPU/CPU|composite -|[mindspore.ops.HyperMap](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.HyperMap.html#mindspore.ops.HyperMap)|Changed|r1.1: To Be Developed => r1.2: Ascend/GPU/CPU|composite -|[mindspore.ops.gamma](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.gamma.html#mindspore.ops.gamma)|Changed|r1.1: Ascend/GPU/CPU => r1.2: Ascend|composite -|[mindspore.ops.MultitypeFuncGraph](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.MultitypeFuncGraph.html#mindspore.ops.MultitypeFuncGraph)|Changed|r1.1: To Be Developed => r1.2: Ascend/GPU/CPU|composite -|[mindspore.ops.poisson](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.poisson.html#mindspore.ops.poisson)|Changed|r1.1: Ascend/GPU/CPU => r1.2: Ascend|composite -|[mindspore.ops.laplace](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.laplace.html#mindspore.ops.laplace)|Changed|r1.1: Ascend/GPU/CPU => r1.2: Ascend|composite -|[mindspore.ops.GradOperation](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.GradOperation.html#mindspore.ops.GradOperation)|Changed|r1.1: To Be Developed => r1.2: Ascend/GPU/CPU|composite -|[mindspore.ops.Broadcast](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.Broadcast.html#mindspore.ops.Broadcast)|Changed|Ascend/GPU|r1.1: operations--Common Operators => r1.2: operations--Communication Operators -|[mindspore.ops.ScalarCast](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.ScalarCast.html#mindspore.ops.ScalarCast)|Changed|Ascend/GPU/CPU|r1.1: operations--Inner Operators => r1.2: operations--Array Operators -|[mindspore.ops.ReduceScatter](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.ReduceScatter.html#mindspore.ops.ReduceScatter)|Changed|Ascend/GPU|r1.1: operations--Common Operators => r1.2: operations--Communication Operators -|[mindspore.ops.ReduceOp](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.ReduceOp.html#mindspore.ops.ReduceOp)|Changed|Ascend/GPU|r1.1: operations--Common Operators => r1.2: operations--Communication Operators -|[mindspore.ops.AllReduce](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.AllReduce.html#mindspore.ops.AllReduce)|Changed|Ascend/GPU|r1.1: operations--Common Operators => r1.2: operations--Communication Operators -|[mindspore.ops.AllGather](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/mindspore/ops/mindspore.ops.AllGather.html#mindspore.ops.AllGather)|Changed|Ascend/GPU|r1.1: operations--Common Operators => r1.2: operations--Communication Operators - -> diff --git a/resource/jieba.txt b/resource/jieba.txt deleted file mode 100644 index 0c29cddcc7d9964a2363dbe0fb7b5076e59adcd6..0000000000000000000000000000000000000000 --- a/resource/jieba.txt +++ /dev/null @@ -1,31 +0,0 @@ -数据集 -损失函数 -优化器 -学习率 -静态图 -动态图 -混合精度 -图算融合 -数据增强 -梯度累积 -分布式训练 -自定义算子 -数据并行 -模型并行 -混合并行 -二阶优化 -循环下沉 -训练可视 -模型可视化 -推理服务 -运行时 -预训练 -模型导出 -模型转换 -迁移学习 -增量训练 -环境变量 -语音 -计算机视觉 -昇腾 -AI处理器 \ No newline at end of file diff --git a/resource/release/release_list_en.md b/resource/release/release_list_en.md deleted file mode 100644 index 11e2843fd8efd8b35092b1d05d03b945d686144b..0000000000000000000000000000000000000000 --- a/resource/release/release_list_en.md +++ /dev/null @@ -1,373 +0,0 @@ -# Release List - - - -- [Release List](#release-list) - - [Related Documents](#related-documents) - - [Downloads](#downloads) - - [1.2.0](#120) - - [1.2.0-rc1](#120-rc1) - - [1.1.1](#111) - - [1.1.0](#110) - - [1.0.1](#101) - - [1.0.0](#100) - - [0.7.0-beta](#070-beta) - - [0.6.0-beta](#060-beta) - - [0.5.2-beta](#052-beta) - - [0.5.0-beta](#050-beta) - - [0.3.0-alpha](#030-alpha) - - [0.2.0-alpha](#020-alpha) - - [0.1.0-alpha](#010-alpha) - - - - - -## Related Documents - -| Category | Subcategory | Version | -| --- | --- | --- | -| Releasenotes and API Updates | | [1.2.0](https://gitee.com/mindspore/mindspore/blob/r1.2/RELEASE.md)    [1.1.1](https://gitee.com/mindspore/mindspore/blob/r1.1/RELEASE.md)    [1.0.1](https://gitee.com/mindspore/mindspore/blob/r1.0/RELEASE.md)    [0.7.0-beta](https://gitee.com/mindspore/mindspore/blob/r0.7/RELEASE.md)    [0.6.0-beta](https://gitee.com/mindspore/mindspore/blob/r0.6/RELEASE.md)   
[0.5.2-beta](https://gitee.com/mindspore/mindspore/blob/r0.5/RELEASE.md)    [0.3.0-alpha](https://gitee.com/mindspore/mindspore/blob/r0.3/RELEASE.md)    [0.2.0-alpha](https://gitee.com/mindspore/mindspore/blob/r0.2/RELEASE.md)    [0.1.0-alpha](https://gitee.com/mindspore/mindspore/blob/r0.1/RELEASE.md) | -| Installation | | [1.2.0](https://gitee.com/mindspore/docs/tree/r1.2/install)    [1.1.1](https://gitee.com/mindspore/docs/tree/r1.1/install)    [1.0.1](https://gitee.com/mindspore/docs/tree/r1.0/install)    [0.7.0-beta](https://gitee.com/mindspore/docs/tree/r0.7/install)    [0.6.0-beta](https://gitee.com/mindspore/docs/tree/r0.6/install)   
[0.5.2-beta](https://gitee.com/mindspore/docs/tree/r0.5/install)    [0.3.0-alpha](https://gitee.com/mindspore/docs/tree/r0.3/install)    [0.2.0-alpha](https://gitee.com/mindspore/docs/tree/r0.2/install)    [[0.1.0-alpha]](https://gitee.com/mindspore/docs/tree/r0.1/install) | -| Tutorials | Training | [1.2.0](https://www.mindspore.cn/tutorial/training/en/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/tutorial/training/en/r1.1/index.html)    [1.0.1](https://www.mindspore.cn/tutorial/training/en/r1.0/index.html)    [0.7.0-beta](https://www.mindspore.cn/tutorial/en/r0.7/index.html)    [0.6.0-beta](https://www.mindspore.cn/tutorial/en/r0.6/index.html)   
[0.5.2-beta](https://www.mindspore.cn/tutorial/en/r0.5/index.html)    [0.3.0-alpha](https://www.mindspore.cn/tutorial/en/0.3.0-alpha/index.html)    [0.2.0-alpha](https://www.mindspore.cn/tutorial/en/0.2.0-alpha/index.html)    [0.1.0-alpha](https://www.mindspore.cn/tutorial/en/0.1.0-alpha/index.html)    [master](https://www.mindspore.cn/tutorial/training/en/master/index.html) | -| | Inference | [1.2.0](https://www.mindspore.cn/tutorial/inference/en/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/tutorial/inference/en/r1.1/index.html)    [1.0.1](https://www.mindspore.cn/tutorial/inference/en/r1.0/index.html)    [0.7.0-beta](https://www.mindspore.cn/tutorial/en/r0.7/index.html)    [0.6.0-beta](https://www.mindspore.cn/tutorial/en/r0.6/index.html)   
[0.5.2-beta](https://www.mindspore.cn/tutorial/en/r0.5/index.html)    [0.3.0-alpha](https://www.mindspore.cn/tutorial/en/0.3.0-alpha/index.html)    [0.2.0-alpha](https://www.mindspore.cn/tutorial/en/0.2.0-alpha/index.html)    [0.1.0-alpha](https://www.mindspore.cn/tutorial/en/0.1.0-alpha/index.html)    [master](https://www.mindspore.cn/tutorial/inference/en/master/index.html) | -| | Mobile Phone&IoT | [1.2.0](https://www.mindspore.cn/tutorial/lite/en/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/tutorial/lite/en/r1.1/index.html)    [1.0.0](https://www.mindspore.cn/tutorial/lite/en/r1.0/index.html)    [0.7.0-beta](https://www.mindspore.cn/lite/tutorial/en/r0.7/index.html)    [master](https://www.mindspore.cn/tutorial/lite/en/master/index.html) | -| Docs | Python API | [1.2.0](https://www.mindspore.cn/doc/api_python/en/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/doc/api_python/en/r1.1/index.html)    [1.0.1](https://www.mindspore.cn/doc/api_python/en/r1.0/index.html)    [0.7.0-beta](https://www.mindspore.cn/api/en/r0.7/index.html)    [0.6.0-beta](https://www.mindspore.cn/api/en/r0.6/index.html)   
[0.5.2-beta](https://www.mindspore.cn/api/en/r0.5/index.html)    [0.3.0-alpha](https://www.mindspore.cn/api/en/0.3.0-alpha/index.html)    [0.2.0-alpha](https://www.mindspore.cn/api/en/0.2.0-alpha/index.html)    [0.1.0-alpha](https://www.mindspore.cn/api/en/0.1.0-alpha/index.html)    [master](https://www.mindspore.cn/doc/api_python/en/master/index.html) | -| | C++ API | [1.2.0](https://www.mindspore.cn/doc/api_cpp/en/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/doc/api_cpp/en/r1.1/index.html)    [1.0.0](https://www.mindspore.cn/doc/api_cpp/en/r1.0/index.html)    [0.7.0-beta](https://www.mindspore.cn/lite/apic/en/r0.7/lite/namespacemembers.html)    [master](https://www.mindspore.cn/doc/api_cpp/en/master/index.html) | -| | Java API | [1.2.0](https://www.mindspore.cn/doc/api_java/en/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/doc/api_java/en/r1.1/index.html)    [master](https://www.mindspore.cn/doc/api_java/en/master/index.html) | -| | Programming Guide | [1.2.0](https://www.mindspore.cn/doc/programming_guide/en/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/doc/programming_guide/en/r1.1/index.html)    [1.0.1](https://www.mindspore.cn/doc/programming_guide/en/r1.0/index.html)    [master](https://www.mindspore.cn/doc/programming_guide/en/master/index.html) | -| | Design&Specification | [1.2.0](https://www.mindspore.cn/doc/note/en/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/doc/note/en/r1.1/index.html)    [1.0.1](https://www.mindspore.cn/doc/note/en/r1.0/index.html)    [0.7.0-beta](https://www.mindspore.cn/docs/en/r0.7/index.html)    [0.7.0-beta (Lite)   ](https://www.mindspore.cn/lite/docs/en/r0.7/index.html)   
[0.6.0-beta](https://www.mindspore.cn/docs/en/r0.6/index.html)    [0.5.2-beta](https://www.mindspore.cn/docs/en/r0.5/index.html)    [0.3.0-alpha](https://www.mindspore.cn/docs/en/0.3.0-alpha/index.html)    [0.2.0-alpha](https://www.mindspore.cn/docs/en/0.2.0-alpha/index.html)    [0.1.0-alpha](https://www.mindspore.cn/docs/en/0.1.0-alpha/index.html)    [master](https://www.mindspore.cn/doc/note/en/master/index.html) | -| | FAQ | [1.2.0](https://www.mindspore.cn/doc/faq/en/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/doc/faq/en/r1.1/index.html)    [1.0.1](https://www.mindspore.cn/doc/faq/en/r1.0/index.html)    [master](https://www.mindspore.cn/doc/faq/en/master/index.html) | - -## Downloads - -### 1.2.0 - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl) | 3666923c62ebf012ce5b8ab458d3cfd279cf68ad444509ccdcfe21aa38c9d2e7 | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | cca1f78a0402aa6319d1e77ca49be78c8e0180d480def0079e0d209378eaefb1 | -| | | EulerOS-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | 1181415cc603ddeff4cfd660e736b57a3cb5eb781c9649d828dcbebb6d90cb5f | -| | | CentOS-x86 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/centos_x86/mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl) | 510ac1c470b5d5a4321f90f8c9130e76025d75a339766f16c7bc42efcee3da81 | -| | | CentOS-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | cb0443a05d39ffa8c36cf289a279d29700a54eb9dc150fb4ad9807a723b1ef42 | -| | | Kylin-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/kylin_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | 1181415cc603ddeff4cfd660e736b57a3cb5eb781c9649d828dcbebb6d90cb5f | -| | Ascend 310 | Ubuntu-x86 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ascend310/ubuntu_x86/mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl) | d00d24efd0ce811f0de8ea13dee19e30663e5954eba1161ecf9f51d92e58cc73 | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ascend310/ubuntu_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | 2c5d0572bba2f9e0edaa1b6076af3ecf7a23c3486b4d8c3d2abaf39e25667822 | -| | | EulerOS-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ascend310/euleros_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | 09c936c07297d2d16df581335f1f51651855e0eb9f1ea64b4d3b66d6978a0428 | -| | | CentOS-x86 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ascend310/centos_x86/mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl) | 13590cbb66df53430773732a6a54427565fd510cd184688df186c8510302201a | -| | | CentOS-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ascend310/centos_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | 377bac45c0e46e27afd0b6eb144eb9ea7ea13e61326d923e219fbe6577fcc61a | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.2.0-cp37-cp37m-linux_x86_64.whl) | 6efe2ce935703572ff2cc8ebaacc76104308f979dd0444769e4c6a77fc11880d | -| | CPU | Ubuntu-x86 | [mindspore-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/cpu/ubuntu_x86/mindspore-1.2.0-cp37-cp37m-linux_x86_64.whl) | 92421a45b0e5352621b6d17bcd6deafdbc9965b7ecd9f1219b83a8c02384c8d3 | -| | | Ubuntu-aarch64 | [mindspore-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/cpu/ubuntu_aarch64/mindspore-1.2.0-cp37-cp37m-linux_aarch64.whl) | 8042752a39c92fe39efc2208e236a3f989a4bb3d0ab4543b364d00fa79f11913 | -| | | Windows-x64 | [mindspore-1.2.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/cpu/windows_x64/mindspore-1.2.0-cp37-cp37m-win_amd64.whl) | 6038b1c28d574c565bf6a62a317421418960ee7df03bca9487d8f7c909ddb208 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindInsight/ascend/ubuntu_x86/mindinsight-1.2.0-cp37-cp37m-linux_x86_64.whl) | 24e83c1732caa1943aa7a5f5b2aaf350f47f04f5ba37c3fc4792231e86f5f36e | -| | | Ubuntu-aarch64 | [mindinsight-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.2.0-cp37-cp37m-linux_aarch64.whl) | c0f99217649e227b44c8e33644a1c8a3b054966c0e07541be336322d23ccc93a | -| | | EulerOS-aarch64 | [mindinsight-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindInsight/ascend/euleros_aarch64/mindinsight-1.2.0-cp37-cp37m-linux_aarch64.whl) | 2d4991636bd6ebe2f0e22e21fb2dc44625362a9a2154168720f1db95c3b5f8a5 | -| | | CentOS-x86 | [mindinsight-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindInsight/ascend/centos_x86/mindinsight-1.2.0-cp37-cp37m-linux_x86_64.whl) | a99f07c820419d4fbb35bbb04c30be70f7ece5cc77578d405318d58d414499ba | -| | | CentOS-aarch64 | [mindinsight-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindInsight/ascend/centos_aarch64/mindinsight-1.2.0-cp37-cp37m-linux_aarch64.whl) | 7192be74e05a97cec81d003978d691d65ee768c8d90d5e97237524a286076b43 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindInsight/ascend/ubuntu_x86/mindinsight-1.2.0-cp37-cp37m-linux_x86_64.whl) | 24e83c1732caa1943aa7a5f5b2aaf350f47f04f5ba37c3fc4792231e86f5f36e | -| MindArmour | Ascend 910 | Ubuntu-x86
CentOS-x86 | [mindarmour-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindArmour/x86_64/mindarmour-1.2.0-cp37-cp37m-linux_x86_64.whl) | f1387b5208049c25938c320056673c7df5a7e31c13b72ca8994c2da2e139971b | -| | | Ubuntu-aarch64
EulerOS-aarch64
CentOS-aarch64 | [mindarmour-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindArmour/aarch64/mindarmour-1.2.0-cp37-cp37m-linux_aarch64.whl) | ccb8356f17513588117df52dc13d8b652ea4040b90a8f32fd8d1494ec488065b | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindArmour/x86_64/mindarmour-1.2.0-cp37-cp37m-linux_x86_64.whl) | f1387b5208049c25938c320056673c7df5a7e31c13b72ca8994c2da2e139971b | -| MindSpore
Hub | | any | [mindspore_hub-1.2.0-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/Hub/any/mindspore_hub-1.2.0-py3-none-any.whl) | 6a6ac6695b859f6d5d22a0531e5ff2c27e390793b25d74ac67fe130189cda387 | -| MindQuantum | CPU | Ubuntu-x86 | [mindquantum-0.1.0-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindQuantum/ubuntu_x86/mindquantum-0.1.0-py3-none-any.whl) | 94a08dec7907756d063e97205c977c0463f5461ca60f5cc6526cef9c90ab8da2 | -| MindSpore
Serving | Ascend 910
Ascend310
GPU cuda 10.1 | Ubuntu-x86 | [mindspore_serving-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/Serving/ubuntu_x86/mindspore_serving-1.2.0-cp37-cp37m-linux_x86_64.whl) | 5775984408f3f93907fbb876c68c52abd2053d2382fd99cb5ad67d6d320e03c1 | -| | | Ubuntu-aarch64 | [mindspore_serving-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/Serving/ubuntu_aarch64/mindspore_serving-1.2.0-cp37-cp37m-linux_aarch64.whl) | 7008fc0f9feb8f40951d14726fd04c1fa17ad515026cd90ebeeec013b8da9c53 | -| | | EulerOS-aarch64 | [mindspore_serving-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/Serving/euleros_aarch64/mindspore_serving-1.2.0-cp37-cp37m-linux_aarch64.whl) | baea936bdf45aaa2e55cf590a058d471bb62ecbe5333c82dc2f6b6c195cd8844 | -| | | CentOS-x86 | [mindspore_serving-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/Serving/centos_x86/mindspore_serving-1.2.0-cp37-cp37m-linux_x86_64.whl) | 68ecc2233302acb27b6aa5c6e79b532c89303d388b1432566bf12ec7985fd523 | -| | | CentOS-aarch64 | [mindspore_serving-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/Serving/centos_aarch64/mindspore_serving-1.2.0-cp37-cp37m-linux_aarch64.whl) | 377e791bd463ff192b51cbe41bab83db533d7f79723663a8562436211d417ac7 | - -### 1.2.0-rc1 - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/centos_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/centos_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | Ascend 310 | Ubuntu-x86 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/ubuntu_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/ubuntu_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/ubuntu_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/ubuntu_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/euleros_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/euleros_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/centos_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/centos_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/centos_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/centos_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindspore_gpu-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | CPU | Ubuntu-x86 | [mindspore-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/cpu/ubuntu_x86/mindspore-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindspore-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/cpu/ubuntu_x86/mindspore-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/cpu/ubuntu_aarch64/mindspore-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/cpu/ubuntu_aarch64/mindspore-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | Windows-x64 | [mindspore-1.2.0rc1-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/cpu/windows_x64/mindspore-1.2.0rc1-cp37-cp37m-win_amd64.whl) | [mindspore-1.2.0rc1-cp37-cp37m-win_amd64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/cpu/windows_x64/mindspore-1.2.0rc1-cp37-cp37m-win_amd64.whl.sha256) | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/ubuntu_x86/mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/ubuntu_x86/mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/euleros_aarch64/mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/euleros_aarch64/mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/centos_x86/mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/centos_x86/mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/centos_aarch64/mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/centos_aarch64/mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/ubuntu_x86/mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/ubuntu_x86/mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| MindArmour | Ascend 910 | Ubuntu-x86
CentOS-x86 | [mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindArmour/x86_64/mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindArmour/x86_64/mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64
EulerOS-aarch64
CentOS-aarch64 | [mindarmour-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindArmour/aarch64/mindarmour-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindarmour-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindArmour/aarch64/mindarmour-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindArmour/x86_64/mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindArmour/x86_64/mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| MindSpore
Hub | | any | [mindspore_hub-1.2.0-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/Hub/any/mindspore_hub-1.2.0-py3-none-any.whl) | [mindspore_hub-1.2.0-py3-none-any.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/Hub/any/mindspore_hub-1.2.0-py3-none-any.whl.sha256) | -| MindQuantum | CPU | Ubuntu-x86 | [mindquantum-0.1.0-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindQuantum/ubuntu_x86/mindquantum-0.1.0-py3-none-any.whl) | [mindquantum-0.1.0-py3-none-any.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindQuantum/ubuntu_x86/mindquantum-0.1.0-py3-none-any.whl.sha256) | - -### 1.1.1 - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/centos_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/centos_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | Ascend 310 | Ubuntu-x86 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/ubuntu_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/ubuntu_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/ubuntu_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/ubuntu_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/euleros_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/euleros_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/centos_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/centos_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/centos_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/centos_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_gpu-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | CPU | Ubuntu-x86 | [mindspore-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/cpu/ubuntu_x86/mindspore-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/cpu/ubuntu_x86/mindspore-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/cpu/ubuntu_aarch64/mindspore-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/cpu/ubuntu_aarch64/mindspore-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | Windows-x64 | [mindspore-1.1.1-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/cpu/windows_x64/mindspore-1.1.1-cp37-cp37m-win_amd64.whl) | [mindspore-1.1.1-cp37-cp37m-win_amd64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/cpu/windows_x64/mindspore-1.1.1-cp37-cp37m-win_amd64.whl.sha256) | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/ubuntu_x86/mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/ubuntu_x86/mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/euleros_aarch64/mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/euleros_aarch64/mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/centos_x86/mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/centos_x86/mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/centos_aarch64/mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/centos_aarch64/mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/ubuntu_x86/mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/ubuntu_x86/mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| MindArmour | Ascend 910 | Ubuntu-x86
CentOS-x86 | [mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindArmour/x86_64/mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindArmour/x86_64/mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64
EulerOS-aarch64
CentOS-aarch64 | [mindarmour-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindArmour/aarch64/mindarmour-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindarmour-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindArmour/aarch64/mindarmour-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindArmour/x86_64/mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindArmour/x86_64/mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| MindSpore
Hub | | any | [mindspore_hub-1.1.1-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Hub/any/mindspore_hub-1.1.1-py3-none-any.whl) | [mindspore_hub-1.1.1-py3-none-any.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Hub/any/mindspore_hub-1.1.1-py3-none-any.whl.sha256) | -| MindSpore
Serving | Ascend 910
Ascend310 | Ubuntu-x86 | [mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/ubuntu_x86/mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/ubuntu_x86/mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/ubuntu_aarch64/mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/ubuntu_aarch64/mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/euleros_aarch64/mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/euleros_aarch64/mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/centos_x86/mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/centos_x86/mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/centos_aarch64/mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/centos_aarch64/mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | - -### 1.1.0 - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl) | 8dc45c9c6367a9b59a5893c896b3ebfd929544325c911f48f679b9203165d85d | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl) | b49124e793127ac9d55ba8e5df109a17aafb3f09bbc4a9f7bc228bfc5b652042 | -| | | EulerOS-aarch64 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl) | 1c03e7941a9e247fb0e64f9ba0adbcb4fde3e815cd00dc4bc79e6a81a29e0335 | -| | | CentOS-x86 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/centos_x86/mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl) | 3affe7f5dc4c7c649221d80bf8a41f54fe64028424c422d3513c11a6507f193f | -| | | CentOS-aarch64 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl) |051d2fe7fa1fa95e92da9841a1cdad113561da19a5e7f9abe30322ff44d68d2e | -| | Ascend 310 | Ubuntu-x86 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ascend310/ubuntu_x86/mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl) |fe357e5e83130938ad490563fa310e71261683cea08dede8731a915373991d5c | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ascend310/ubuntu_aarch64/mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl) |17dc70cdf79f80db0344def06a427c93c5b03f3448a5aeb34a0b41305425e0bd | -| | | EulerOS-aarch64 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ascend310/euleros_aarch64/mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl) |be0881c5848696f67cbf54456babf344317f9509ad0961487588ae5e26ec2f87 | -| | | CentOS-x86 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ascend310/centos_x86/mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl) |fc0c6d3cfd6688f6b7c999a4189cd06a8496ccde45db8528b57439edb12f819e | -| | | CentOS-aarch64 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ascend310/centos_aarch64/mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl) |2a6856e2a7bd8db106748877bc2b4fa9d9804db265578d2d5f057a4e79073305 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.1.0-cp37-cp37m-linux_x86_64.whl) | 11386b0e156f033987f879e3b79f87e7cde0a6881063434f2c84a8564099e858 | -| | CPU | Ubuntu-x86 | [mindspore-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/cpu/ubuntu_x86/mindspore-1.1.0-cp37-cp37m-linux_x86_64.whl) | 1a1683e9c30650284f23001a1af0ae570ca854317ec52efc698ce7da604e31b0 | -| | | Ubuntu-aarch64 | [mindspore-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/cpu/ubuntu_aarch64/mindspore-1.1.0-cp37-cp37m-linux_aarch64.whl) | e1fa3cec68aef0e6619408f81d7e9e627704c1bfbf453ed90ee6d3b6c0c8c84f | -| | | Windows-x64 | [mindspore-1.1.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/cpu/windows_x64/mindspore-1.1.0-cp37-cp37m-win_amd64.whl) | ce3f1d4504fd8236113827d435c9aa691b0200e1ffeba3db391e678ad31a7df7 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindInsight/ascend/ubuntu_x86/mindinsight-1.1.0-cp37-cp37m-linux_x86_64.whl) | 85f4a38ecaf4d6799482e2a982609c46a49471325b47699c5b01b340549ab961 | -| | | Ubuntu-aarch64 | [mindinsight-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.1.0-cp37-cp37m-linux_aarch64.whl) | adb45fa766ff5ca4ef6cbe24335ca7e87c81e9293b60ffe00fec76533115ef4e | -| | | EulerOS-aarch64 | [mindinsight-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindInsight/ascend/euleros_aarch64/mindinsight-1.1.0-cp37-cp37m-linux_aarch64.whl) | 78b9a728aecc01ead3687f9469d8af228917eab285f0770316bcc214b4ae3adc | -| | | CentOS-x86 | [mindinsight-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindInsight/ascend/centos_x86/mindinsight-1.1.0-cp37-cp37m-linux_x86_64.whl) | a19a126ae1daa210c78aa256262303c9ad20f9cfe2404a5af840d325a471eb30 | -| | | CentOS-aarch64 | [mindinsight-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindInsight/ascend/centos_aarch64/mindinsight-1.1.0-cp37-cp37m-linux_aarch64.whl) | f499aa428d754dc36da303f02b6531576e9e86158b213184c392f2302f13da2b | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindInsight/ascend/ubuntu_x86/mindinsight-1.1.0-cp37-cp37m-linux_x86_64.whl) | 85f4a38ecaf4d6799482e2a982609c46a49471325b47699c5b01b340549ab961 | -| MindArmour | Ascend 910 | Ubuntu-x86
CentOS-x86 | [mindarmour-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindArmour/x86_64/mindarmour-1.1.0-cp37-cp37m-linux_x86_64.whl) | 3d8b05437dca6d648073b85909508377b7cab05f9a6f52ee712592083d611770 | -| | | Ubuntu-aarch64
EulerOS-aarch64
CentOS-aarch64 | [mindarmour-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindArmour/aarch64/mindarmour-1.1.0-cp37-cp37m-linux_aarch64.whl) | bc724697cf053672198be226193cd0467c5a7f2a700d26a024bcfb318724f34a | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindArmour/x86_64/mindarmour-1.1.0-cp37-cp37m-linux_x86_64.whl) | 3d8b05437dca6d648073b85909508377b7cab05f9a6f52ee712592083d611770 | -| MindSpore
Hub | | any | [mindspore_hub-1.1.0-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/Hub/any/mindspore_hub-1.1.0-py3-none-any.whl) |1f329f35865a4e7014461e485e8a87859160aae6cbe1033973239e26c7dee01f | -| MindSpore
Serving | Ascend 910
Ascend310 | Ubuntu-x86 | [mindspore_serving-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/Serving/ascend/ubuntu_x86/mindspore_serving-1.1.0-cp37-cp37m-linux_x86_64.whl) | 4bfb3a41b9fbfd77ed09244f08ec98f8e5833e6fa27d7c214b9262c1f3568258 | -| | | Ubuntu-aarch64 | [mindspore_serving-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/Serving/ascend/ubuntu_aarch64/mindspore_serving-1.1.0-cp37-cp37m-linux_aarch64.whl) | 095ac95e4c338b17dd192422d8bf342c55441a79eeeeb70441ccc65746b0f2d7 | -| | | EulerOS-aarch64 | [mindspore_serving-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/Serving/ascend/euleros_aarch64/mindspore_serving-1.1.0-cp37-cp37m-linux_aarch64.whl) | 1695ac7a01fdcb4fad9d47a172767d56fcae4979ecced298f5e33c936e821649 | -| | | CentOS-x86 | [mindspore_serving-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/Serving/ascend/centos_x86/mindspore_serving-1.1.0-cp37-cp37m-linux_x86_64.whl) | ed0cc466efad7fb717527a511611c1fb2d72db4caf0f66e6fcbde0ecf7d6e525 | -| | | CentOS-aarch64 | [mindspore_serving-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/Serving/ascend/centos_aarch64/mindspore_serving-1.1.0-cp37-cp37m-linux_aarch64.whl) |e6ed84cfe0ff9b51b94cd2575f62238c95a73ac386e2d09adf75d3ea74177420 | - -### 1.0.1 - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.0.1-cp37-cp37m-linux_x86_64.whl) | 23664e8ab2e0f2b1a523de96753e300d42f2438e61f7d173b17a637fd139e2d1 | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.0.1-cp37-cp37m-linux_aarch64.whl) | 9584a9f893ccdb93a2581c034b51045e8882ab67ce203366a212f981c68ad602 | -| | | EulerOS-aarch64 | [mindspore_ascend-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.0.1-cp37-cp37m-linux_aarch64.whl) | a662f447e79604aec52224f9dca6c73e4127cb497250e82517e8d5d8b83332b0 | -| | | CentOS-x86 | [mindspore_ascend-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/ascend/centos_x86/mindspore_ascend-1.0.1-cp37-cp37m-linux_x86_64.whl) | 3b1f9c871b34ffbfa45d7dc55355adc0e828dbc5fb27d380ffed203644ef9155 | -| | | CentOS-aarch64 | [mindspore_ascend-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.0.1-cp37-cp37m-linux_aarch64.whl) | e01d0c52c7cf5670368e9bac6f06f9627eb016d109a48fc77dd7debd135599c9 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.0.1-cp37-cp37m-linux_x86_64.whl) | 5c84995e9f9a3640c31df0e96f69a37fa765f4e332cd71d9347c4e8c6c1d31f1 | -| | CPU | Ubuntu-x86 | [mindspore-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/cpu/ubuntu_x86/mindspore-1.0.1-cp37-cp37m-linux_x86_64.whl) | d8e66d962f66c00d7590ef24093186c3265cca60c27ff423769a5ef48922f494 | -| | | Ubuntu-aarch64 | [mindspore-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/cpu/ubuntu_aarch64/mindspore-1.0.1-cp37-cp37m-linux_aarch64.whl) | 8a2c630550e4ff6c786b1a53635e075d0a6625605af7221275360a04cdc3db0d | -| | | Windows-x64 | [mindspore-1.0.1-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/cpu/windows_x64/mindspore-1.0.1-cp37-cp37m-win_amd64.whl) | f50e1de60d6777bb449802024b7ac2fd90f58fb191bfd69e56079f6dbc5fe1b3 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindInsight/ascend/ubuntu_x86/mindinsight-1.0.1-cp37-cp37m-linux_x86_64.whl) | a1f5beb078d521f40454235f9bfcec5036479ada74d2a51a233ccbce3544e7ab | -| | | Ubuntu-aarch64 | [mindinsight-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.0.1-cp37-cp37m-linux_aarch64.whl) | 057ad1daec0cf48ece5dd9174aa95498816e373b831818b6e885b24173bd9cf5 | -| | | EulerOS-aarch64 | [mindinsight-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindInsight/ascend/euleros_aarch64/mindinsight-1.0.1-cp37-cp37m-linux_aarch64.whl) | e5551323f2f0a89a7eedd4eb508fffb9a71761bb1d70cc9f5f9e2e63a66af78d | -| | | CentOS-x86 | [mindinsight-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindInsight/ascend/centos_x86/mindinsight-1.0.1-cp37-cp37m-linux_x86_64.whl) | 62a86fa5faa32ee196b78071940f674642278ae016c9662d1051461a0c003969 | -| | | CentOS-aarch64 | [mindinsight-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindInsight/ascend/centos_aarch64/mindinsight-1.0.1-cp37-cp37m-linux_aarch64.whl) | f436c042b77e52d1f95dd0d104f24189cc7474660603561b196e49ca36b2eded | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindInsight/ascend/ubuntu_x86/mindinsight-1.0.1-cp37-cp37m-linux_x86_64.whl) | a1f5beb078d521f40454235f9bfcec5036479ada74d2a51a233ccbce3544e7ab | -| MindArmour | Ascend 910 | Ubuntu-x86
CentOS-x86 | [mindarmour-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindArmour/x86_64/mindarmour-1.0.1-cp37-cp37m-linux_x86_64.whl) | 5f6cee4c36e009bc7cf0cb65d8c5d9a01d87b00dd9e4c48fb9c836fdd4be38ab | -| | | Ubuntu-aarch64
EulerOS-aarch64
CentOS-aarch64 | [mindarmour-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindArmour/aarch64/mindarmour-1.0.1-cp37-cp37m-linux_aarch64.whl) | 1bd8e174f9a83537f4a60371fa2a0effe78851c9181e2666d9e2f49cab25efce | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindArmour/x86_64/mindarmour-1.0.1-cp37-cp37m-linux_x86_64.whl) | 5f6cee4c36e009bc7cf0cb65d8c5d9a01d87b00dd9e4c48fb9c836fdd4be38ab | -| MindSpore
Hub | | any | [mindspore_hub-1.0.1-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/Hub/any/mindspore_hub-1.0.1-py3-none-any.whl) | 5a0dc560c86aa35a54f4d8e20ba6e9b2b6084a5143fb4d6d73c2f6f3e55ab49e | - -### 1.0.0 - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.0.0-cp37-cp37m-linux_x86_64.whl) | 4682be18cffdf86346bdb286ccd9e05f33be4138415dbc7db1650d029510ee44 | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.0.0-cp37-cp37m-linux_aarch64.whl) | 6912fcc0488f3a8fa336d9680f506b5f0c97c5d82844d8fbfd9163bbcbe3140a | -| | | EulerOS-x86 | [mindspore_ascend-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/ascend/euleros_x86/mindspore_ascend-1.0.0-cp37-cp37m-linux_x86_64.whl) | 20fb5d35ccd7c1354084da48fa8e3cb93b6fa4843211be82a542dff775c39c0a | -| | | EulerOS-aarch64 | [mindspore_ascend-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.0.0-cp37-cp37m-linux_aarch64.whl) | b9700fc718e28026269f4639c7a963653a485c7213eed7d534ed26f89d98a44e | -| | | CentOS-x86 | [mindspore_ascend-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/ascend/centos_x86/mindspore_ascend-1.0.0-cp37-cp37m-linux_x86_64.whl) | 453d4ddb93e3e0ed79ac2ec16920994b387376682d07ba71f1e1387cccd57ded | -| | | CentOS-aarch64 | [mindspore_ascend-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.0.0-cp37-cp37m-linux_aarch64.whl) |f2066bfd3ffdeb458c6cdcdec2eb0c47c444336c7d983134638ae2de0cec0564 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.0.0-cp37-cp37m-linux_x86_64.whl) | af2b3b7744fdd475333a81e3dfadc81be2156e67e660477f92b584807b34cb70 | -| | CPU | Ubuntu-x86 | [mindspore-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/cpu/ubuntu_x86/mindspore-1.0.0-cp37-cp37m-linux_x86_64.whl) | a0a3c81b500d442d0324d82ed49808a32fb62c9e776fe614a863345965180f7c | -| | | Ubuntu-aarch64 | [mindspore-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/cpu/ubuntu_aarch64/mindspore-1.0.0-cp37-cp37m-linux_aarch64.whl) | eb3bf9d7a40a4f7bbb3ba566b8353ff8a2f89f2fae08d770af0f7d8b9f83d3ea | -| | | Windows-x64 | [mindspore-1.0.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/cpu/windows_x64/mindspore-1.0.0-cp37-cp37m-win_amd64.whl) | d30c89941939164fc1af8e406b202c1671a1309991a957a0f950b8c71775fcc9 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/ubuntu_x86/mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl) | dd951904ef10adbb93501c3cbafa6b4d34b1e8e5c4efe4fcaa7af49f0c081041 | -| | | Ubuntu-aarch64 | [mindinsight-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.0.0-cp37-cp37m-linux_aarch64.whl) | fc02c2ba823cc23eceb89c1c4f93e103502714ce5b4b7ea020c8d744220ae260 | -| | | EulerOS-x86 | [mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/euleros_x86/mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl) | 2df33884fe557e1073ac7bf18fef135dd2f0a90d8dfbc1a0fe6ab223fd959e9c | -| | | EulerOS-aarch64 | [mindinsight-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/euleros_aarch64/mindinsight-1.0.0-cp37-cp37m-linux_aarch64.whl) | 27bbdb4354f43b696068cc926dfa4a967e5aa48e3f9276a9501df84966bd465e | -| | | CentOS-x86 | [mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/centos_x86/mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl) | 8eab8881dd585731dfdedaec16b456fe6e80242199efbdc5703e20382b59aeab | -| | | CentOS-aarch64 | [mindinsight-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/centos_aarch64/mindinsight-1.0.0-cp37-cp37m-linux_aarch64.whl) | 3f76f2ff8c809b638136748348d5860b2ef6f6412ec37db2e02d00a7bc53c91f | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/ubuntu_x86/mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl) | dd951904ef10adbb93501c3cbafa6b4d34b1e8e5c4efe4fcaa7af49f0c081041 | -| MindArmour | Ascend 910 | Ubuntu-x86
EulerOS-x86
CentOS x86_64 | [mindarmour-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindArmour/x86_64/mindarmour-1.0.0-cp37-cp37m-linux_x86_64.whl) | a139ded76899e5901889fc4e578165ef78584a127f9c264830e4e2806c30cc82 | -| | | Ubuntu-aarch64
EulerOS-aarch64
CentOS aarch64 | [mindarmour-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindArmour/aarch64/mindarmour-1.0.0-cp37-cp37m-linux_aarch64.whl) | e895ba5a0d207e0cb3e93acdfaaa399a63161443371ef68d626d29542e41d940 | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindArmour/x86_64/mindarmour-1.0.0-cp37-cp37m-linux_x86_64.whl) | a139ded76899e5901889fc4e578165ef78584a127f9c264830e4e2806c30cc82 | -| MindSpore
Hub | | any | [mindspore_hub-1.0.0-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/Hub/any/mindspore_hub-1.0.0-py3-none-any.whl) |0cb7ea4c8cd81279bc61558e1102da14516d2ea9653269cb0519c7085df8e3c3 | -| MindSpore
Lite RT | CPU | Android-aarch32 | [mindspore-lite-1.0.0-runtime-arm32-cpu.tar.gz](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch32/mindspore-lite-1.0.0-runtime-arm32-cpu.tar.gz) |abb28cee1b8a439c51d05a7c4521dc3f76d05ae79db4be781c932ee5f0abc774 | -| | | Android-aarch64 | [mindspore-lite-1.0.0-runtime-arm64-cpu.tar.gz](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-runtime-arm64-cpu.tar.gz) |9ca80c1fff35008f8114b3524fc2d897dac1db247df873ea6560f3ddc548a7f3 | -| | GPU | Android-aarch64 | [mindspore-lite-1.0.0-runtime-arm64-gpu.tar.gz](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-runtime-arm64-gpu.tar.gz) |eae1c9856ae7f647ce52dae79f826412e07bb058e6cf9031d85ab0ca72e42156 | -| MindSpore
Lite Converter | CPU | Ubuntu-x86 | [mindspore-lite-1.0.0-converter-ubuntu.tar.gz](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/ubuntu_x86/mindspore-lite-1.0.0-converter-ubuntu.tar.gz) |baaf3e1d88416da535432949810c80e76e4189b3567b952b9d99397fcda0cad8 | -| | | Windows-x86 | [mindspore-lite-1.0.0-converter-win-cpu.zip](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/windows_x86/mindspore-lite-1.0.0-converter-win-cpu.zip) |6eae6f46ebe98697cf0a36268159d74a95ddf743ee27ec6de2088d469c753960 | -| MindSpore
Lite Minddata | CPU | Android-aarch32 | [mindspore-lite-1.0.0-minddata-arm32-cpu.tar.gz](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch32/mindspore-lite-1.0.0-minddata-arm32-cpu.tar.gz) |d998c5eba81b254c057eae61aeacd72cee24ad75eb01be89321133e6e035a330 | -| | | Android-aarch64 | [mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz) |9f6bd53663d029b7638274fca94e47efbfa33ff7dab5dbe1cf328379e3cbbc18 | - -### 0.7.0-beta - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/ascend/ubuntu_x86/mindspore_ascend-0.7.0-cp37-cp37m-linux_x86_64.whl) | 522b80e84de1b414d3800a27d01e40f75332000e5246b24cc1aea7d9e5566ce5 | -| | | Ubuntu-aarch64 | [mindspore_ascend-0.7.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-0.7.0-cp37-cp37m-linux_aarch64.whl) | cbdb56a20860aaf1df4a8cbcc090da837ea2a5d115a173e79cd746f84263d73b | -| | | EulerOS-x86 | [mindspore_ascend-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/ascend/euleros_x86/mindspore_ascend-0.7.0-cp37-cp37m-linux_x86_64.whl) | a21f086d2467eafaffc6934030941f24043e85fbff4888e4fb7ce879e59e5094 | -| | | EulerOS-aarch64 | [mindspore_ascend-0.7.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/ascend/euleros_aarch64/mindspore_ascend-0.7.0-cp37-cp37m-linux_aarch64.whl) | b1fbe55d7a461b8aa37efec100b87bad4332be7ef954ab83c01bec5f0f5da1e8 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-0.7.0-cp37-cp37m-linux_x86_64.whl) | 128eab1c10574de140f3c1b6aaaf55b383cdea806dbc8de23966c8d4b4aafb55 | -| | CPU | Ubuntu-x86 | [mindspore-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/cpu/ubuntu_x86/mindspore-0.7.0-cp37-cp37m-linux_x86_64.whl) | 473de6725a344e3b6353121de66dd06c8012e7eba3af3b96cd5d8a476b3b6e64 | -| | | Ubuntu-aarch64 | [mindspore-0.7.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/cpu/ubuntu_aarch64/mindspore-0.7.0-cp37-cp37m-linux_aarch64.whl) | 6b187948994eeaa2b4817303be83c6ccea3597c2aad5355428d5eaeb273604bc | -| | | Windows-x64 | [mindspore-0.7.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/cpu/windows_x64/mindspore-0.7.0-cp37-cp37m-win_amd64.whl) | 396152fab16ce5fcb4106cf49e02989b2e19503896304b1b040932eaddfdf56f | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.7.0-cp37-cp37m-linux_x86_64.whl) | 3f913d74643eab858bd86d1ea73eb05ee4d402f8164adfb439b6346425abfa19 | -| | | Ubuntu-aarch64 | [mindinsight-0.7.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindInsight/ascend/ubuntu_aarch64/mindinsight-0.7.0-cp37-cp37m-linux_aarch64.whl) | 73fb86732a88803b0699b47bd48aaa108b4921d0c3411e465bee27c348a68c76 | -| | | EulerOS-x86 | [mindinsight-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindInsight/ascend/euleros_x86/mindinsight-0.7.0-cp37-cp37m-linux_x86_64.whl) | bd84b6b3432d34b235bf8d49ce78e5e0dbaf4b692e75fe12a7600dc313d9124c | -| | | EulerOS-aarch64 | [mindinsight-0.7.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindInsight/ascend/euleros_aarch64/mindinsight-0.7.0-cp37-cp37m-linux_aarch64.whl) | 4c48c96df6438b67fd7e36d96e251bf8e5a3dbcde13382edbaabfc03ae11e807 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.7.0-cp37-cp37m-linux_x86_64.whl) | 3f913d74643eab858bd86d1ea73eb05ee4d402f8164adfb439b6346425abfa19 | -| MindArmour | Ascend 910 | Ubuntu-x86
EulerOS-x86 | [mindarmour-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindArmour/x86_64/mindarmour-0.7.0-cp37-cp37m-linux_x86_64.whl) | bd3725991f227dde57afb1d11baf694a6ae0591d68355de18465a05b161bab14 | -| | | Ubuntu-aarch64
EulerOS-aarch64 | [mindarmour-0.7.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindArmour/aarch64/mindarmour-0.7.0-cp37-cp37m-linux_aarch64.whl) | 928754efcde8c2106e1af4fb883899d8f66aa864e0ac1ba7358a291792d898a2 | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindArmour/x86_64/mindarmour-0.7.0-cp37-cp37m-linux_x86_64.whl) | bd3725991f227dde57afb1d11baf694a6ae0591d68355de18465a05b161bab14 | - -### 0.6.0-beta - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/ascend/ubuntu_x86/mindspore_ascend-0.6.0-cp37-cp37m-linux_x86_64.whl) | afea66c19beff797b99bf06bc0ed897a83fdb510d62e03663cef55a68e0f278f | -| | | Ubuntu-aarch64 | [mindspore_ascend-0.6.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-0.6.0-cp37-cp37m-linux_aarch64.whl) | d81a8d2641688032daf829f30d514e11f77f3ef98fb35ee6c7370723158c0abc | -| | | EulerOS-x86 | [mindspore_ascend-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/ascend/euleros_x86/mindspore_ascend-0.6.0-cp37-cp37m-linux_x86_64.whl) | 3ce2a21cd9b8cf58101ec342c9753a226f5fbe315f3a40da521fdf1d46e9dbef | -| | | EulerOS-aarch64 | [mindspore_ascend-0.6.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/ascend/euleros_aarch64/mindspore_ascend-0.6.0-cp37-cp37m-linux_aarch64.whl) | 55716a59295b92f13509f483c073a2b67cce89cb3e53919400b5d428d986f9f5 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-0.6.0-cp37-cp37m-linux_x86_64.whl) | f477dc282d503283c59a06e26cfad785c2c2a1996082671e46b4405a6fa539b1 | -| | CPU | Ubuntu-x86 | [mindspore-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/cpu/ubuntu_x86/mindspore-0.6.0-cp37-cp37m-linux_x86_64.whl) | 8daf749b9d7cf269208b47561844d088a7d200e10816f9437fbcce24fb844495 | -| | | Windows-x64 | [mindspore-0.6.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/cpu/windows_x64/mindspore-0.6.0-cp37-cp37m-win_amd64.whl) | c7ed48fdb808d4f65ca68654323f2e990a7aa7a99ccf0f19bc8bcc23024102f7 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.6.0-cp37-cp37m-linux_x86_64.whl) | 6a825a529339eba95799bfaef6876ef2aedb45f3f81933f41c64e99d9af5c3fd | -| | | Ubuntu-aarch64 | [mindinsight-0.6.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindInsight/ascend/ubuntu_aarch64/mindinsight-0.6.0-cp37-cp37m-linux_aarch64.whl) | 165376a2ca5574568468d745101b16a7760f9cc0aa113372b57a31a35774fae7 | -| | | EulerOS-x86 | [mindinsight-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindInsight/ascend/euleros_x86/mindinsight-0.6.0-cp37-cp37m-linux_x86_64.whl) | f02af4c6fa6ad88589ccc8c80134ad3ff9298379d3361839c1eb41350d2e12d8 | -| | | EulerOS-aarch64 | [mindinsight-0.6.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindInsight/ascend/euleros_aarch64/mindinsight-0.6.0-cp37-cp37m-linux_aarch64.whl) | dcb4560a41342fd61e29a4f6718459b247ba0e21b3e075ca4075ed4f9fec4375 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.6.0-cp37-cp37m-linux_x86_64.whl) | 6a825a529339eba95799bfaef6876ef2aedb45f3f81933f41c64e99d9af5c3fd | -| MindArmour | Ascend 910 | Ubuntu-x86
EulerOS-x86 | [mindarmour-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindArmour/x86_64/mindarmour-0.6.0-cp37-cp37m-linux_x86_64.whl) | 18f245bdff972414010c9f53de402d790cdef9a74f94ac41e5b6341e778e93b3 | -| | | Ubuntu-aarch64
EulerOS-aarch64 | [mindarmour-0.6.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindArmour/aarch64/mindarmour-0.6.0-cp37-cp37m-linux_aarch64.whl) | 8da35bbf7e909bdce7972f7cd11aa495de2c18b9334052e60609dadd82649922 | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindArmour/x86_64/mindarmour-0.6.0-cp37-cp37m-linux_x86_64.whl) | 18f245bdff972414010c9f53de402d790cdef9a74f94ac41e5b6341e778e93b3 | - -### 0.5.2-beta - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-0.5.2-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/ascend/ubuntu_x86/mindspore_ascend-0.5.2-cp37-cp37m-linux_x86_64.whl) | ec4bdb6c96d9ffd2d1e465bd07ac4a8a9c0633512b4fffe9217590ad1a576ea6 | -| | | Ubuntu-aarch64 | [mindspore_ascend-0.5.2-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-0.5.2-cp37-cp37m-linux_aarch64.whl) | 8bffe9ef96d99af7238db713cc1273a63762d95e1f2d758d53e20550e2c9b2a2 | -| | | EulerOS-x86 | [mindspore_ascend-0.5.2-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/ascend/euleros_x86/mindspore_ascend-0.5.2-cp37-cp37m-linux_x86_64.whl) | 396da09b61811ab9e5f72c6ad6d68bfd757384bb7923ac50bfed80672eafcf84 | -| | | EulerOS-aarch64 | [mindspore_ascend-0.5.2-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/ascend/euleros_aarch64/mindspore_ascend-0.5.2-cp37-cp37m-linux_aarch64.whl) | 71cb819be43d3d89cc6b5e62c4e4c988e52bcbad3b3b9e7d1ed9ecc469c7043c | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-0.5.2-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-0.5.2-cp37-cp37m-linux_x86_64.whl) | d424840777d4751cdf1a22a8e39453a96804545ebe3f0dfb67d3aabc10fa2bd2 | -| | CPU | Ubuntu-x86 | [mindspore-0.5.2-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/cpu/ubuntu_x86/mindspore-0.5.2-cp37-cp37m-linux_x86_64.whl) | ef4d85704bb2588bf3208b6d62b5282db9eb792f99e8b45f571094d2ae735213 | -| | | Windows-x64 | [mindspore-0.5.2-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/cpu/windows_x64/mindspore-0.5.2-cp37-cp37m-win_amd64.whl) | 023f255a81220210679a9872261e2fe4291cdebb157029506aa6773e59e070cd | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl) | 34b3c1a5ffbf9fa5e46dc6f295abde0308b65d76fd18d4551103ca0e222e3651 | -| | | Ubuntu-aarch64 | [mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/ubuntu_aarch64/mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl) | 97f92b556f8e97e250f311f5d11caace4ac5686015b099b98462d9603e2c5724 | -| | | EulerOS-x86 | [mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/euleros_x86/mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl) | 5fab87c3dfda57851a9981c7567200f0f0d856462b8dd521402b085830e6554f | -| | | EulerOS-aarch64 | [mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/euleros_aarch64/mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl) | 7a157fb849f078fef6792353414737a8eccd98ba7a6fdd3c4ba3b497bc3f019f | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl) | 34b3c1a5ffbf9fa5e46dc6f295abde0308b65d76fd18d4551103ca0e222e3651 | -| MindArmour | Ascend 910 | Ubuntu-x86
EulerOS-x86 | [mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindArmour/x86_64/mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl) | 09aa2887b0acbe9b31d07fb8d740c0bceefd6b8751aebdddd533f752f7564efc | -| | | Ubuntu-aarch64
EulerOS-aarch64 | [mindarmour-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindArmour/aarch64/mindarmour-0.5.0-cp37-cp37m-linux_aarch64.whl) | 51d2dfd9e65d6d919da36c29fa9420b68c3fb71aa33b54ec35aa5d6bb011c1a8 | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindArmour/x86_64/mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl) | 09aa2887b0acbe9b31d07fb8d740c0bceefd6b8751aebdddd533f752f7564efc | - -### 0.5.0-beta - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/ascend/ubuntu_x86/mindspore_ascend-0.5.0-cp37-cp37m-linux_x86_64.whl) | f20adcdb696316361e13fcd624d7188598b7248f77c7efc535cf193afc26f1c2 | -| | | Ubuntu-aarch64 | [mindspore_ascend-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-0.5.0-cp37-cp37m-linux_aarch64.whl) | 6b79da1ff33bc27d92835ebc40f9238c6e05a0ebd0a3307035e726b2de0eeae6 | -| | | EulerOS-x86 | [mindspore_ascend-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/ascend/euleros_x86/mindspore_ascend-0.5.0-cp37-cp37m-linux_x86_64.whl) | 34193fbd8a1181d1420386b6fa31315ac0098243dfc8965ee26a3063fedd331d | -| | | EulerOS-aarch64 | [mindspore_ascend-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/ascend/euleros_aarch64/mindspore_ascend-0.5.0-cp37-cp37m-linux_aarch64.whl) | 9ac71a08c7da451a1d8030e14ab5b239c27b42991834e40ed68486301c5ce895 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-0.5.0-cp37-cp37m-linux_x86_64.whl) | 4afbd886c8b7f60bfe0745e74749c5409007ff36d2f65034942a6597c5b92227 | -| | CPU | Ubuntu-x86 | [mindspore-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/cpu/ubuntu_x86/mindspore-0.5.0-cp37-cp37m-linux_x86_64.whl) | eec9fe7dcee83314e8c2e24b654bdfe25f6538b5fec471460bc8fd9451ee85e6 | -| | | Windows-x64 | [mindspore-0.5.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/cpu/windows_x64/mindspore-0.5.0-cp37-cp37m-win_amd64.whl) | 86fb9a4d508dcd56776a34650dea6f98905b0d1272a89af9eb3c1b9d670d06b5 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl) | 34b3c1a5ffbf9fa5e46dc6f295abde0308b65d76fd18d4551103ca0e222e3651 | -| | | Ubuntu-aarch64 | [mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/ubuntu_aarch64/mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl) | 97f92b556f8e97e250f311f5d11caace4ac5686015b099b98462d9603e2c5724 | -| | | EulerOS-x86 | [mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/euleros_x86/mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl) | 5fab87c3dfda57851a9981c7567200f0f0d856462b8dd521402b085830e6554f | -| | | EulerOS-aarch64 | [mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/euleros_aarch64/mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl) | 7a157fb849f078fef6792353414737a8eccd98ba7a6fdd3c4ba3b497bc3f019f | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl) | 34b3c1a5ffbf9fa5e46dc6f295abde0308b65d76fd18d4551103ca0e222e3651 | -| MindArmour | Ascend 910 | Ubuntu-x86
EulerOS-x86 | [mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindArmour/x86_64/mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl) | 09aa2887b0acbe9b31d07fb8d740c0bceefd6b8751aebdddd533f752f7564efc | -| | | Ubuntu-aarch64
EulerOS-aarch64 | [mindarmour-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindArmour/aarch64/mindarmour-0.5.0-cp37-cp37m-linux_aarch64.whl) | 51d2dfd9e65d6d919da36c29fa9420b68c3fb71aa33b54ec35aa5d6bb011c1a8 | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindArmour/x86_64/mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl) | 09aa2887b0acbe9b31d07fb8d740c0bceefd6b8751aebdddd533f752f7564efc | - -### 0.3.0-alpha - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/ascend/ubuntu_x86/mindspore_ascend-0.3.0-cp37-cp37m-linux_x86_64.whl) | 7756a50ca3af82d06eaf456db4d062fa647a8352724ef85da6569426a6393918 | -| | | Ubuntu-aarch64 | [mindspore_ascend-0.3.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-0.3.0-cp37-cp37m-linux_aarch64.whl) | 4f613b1466ba3eafb160ebca2f8086e63fdaeee9c07a5458b4476da4fce8f90a | -| | | EulerOS-x86 | [mindspore_ascend-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/ascend/euleros_x86/mindspore_ascend-0.3.0-cp37-cp37m-linux_x86_64.whl) | 93867f72c801affec1da901e734a6d329c6d1ae3cdec1297870b46a277aa64b8 | -| | | EulerOS-aarch64 | [mindspore_ascend-0.3.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/ascend/euleros_aarch64/mindspore_ascend-0.3.0-cp37-cp37m-linux_aarch64.whl) | ecd7f3e049034d20f722073ecb87d5d8108cfc218d2594ec9771e83db5222cf8 | -| | GPU CUDA 9.2 | Ubuntu-x86 | [mindspore_gpu-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/gpu/ubuntu_x86/cuda-9.2/mindspore_gpu-0.3.0-cp37-cp37m-linux_x86_64.whl) | cd4890d3c24b47f48da48c8cc9efdf35e14f9b4a76ec66779bb24d601d2e0c25 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-0.3.0-cp37-cp37m-linux_x86_64.whl) | 07e7263936e1c4805fb253d596ccbeb2fccab3a48929febce85ebb7609d82c4f | -| | CPU | Ubuntu-x86 | [mindspore-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/cpu/ubuntu_x86/mindspore-0.3.0-cp37-cp37m-linux_x86_64.whl) | 38b662673af0dfc89182f5b54261aa8694b8aefdbc1e5fa2d5e06377113e8a22 | -| | | Windows-x64 | [mindspore-0.3.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/cpu/windows_x64/mindspore-0.3.0-cp37-cp37m-win_amd64.whl) | ed6b1c04d08fcfe4ac913f4593da70f78741af8e9391dce7189106b67a1393c1 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindInsight/ascend/ubuntu_x86/mindinsight-0.3.0-cp37-cp37m-linux_x86_64.whl) | 40b0697fbafa3a08393cbeda2f6286caa299a3b758beb63c9ed68f621879ef49 | -| | | Ubuntu-aarch64 | [mindinsight-0.3.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindInsight/ascend/ubuntu_aarch64/mindinsight-0.3.0-cp37-cp37m-linux_aarch64.whl) | 0005334bf15268e499d91d0a7e1bfb5abc4b5a0e10a3c4c0798da0283b28fe23 | -| | | EulerOS-x86 | [mindinsight-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindInsight/ascend/euleros_x86/mindinsight-0.3.0-cp37-cp37m-linux_x86_64.whl) | e1ba11b37a0ce13c8f4f668a9479c0f97d922e4ce6128823e576c7d38298c86d | -| | | EulerOS-aarch64 | [mindinsight-0.3.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindInsight/ascend/euleros_aarch64/mindinsight-0.3.0-cp37-cp37m-linux_aarch64.whl) | 8d03e1f57b39268b4ba89c25ca88934b1a00304839f454d7bfd4747269abb359 | -| | GPU CUDA 9.2
GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindInsight/ascend/ubuntu_x86/mindinsight-0.3.0-cp37-cp37m-linux_x86_64.whl) | 40b0697fbafa3a08393cbeda2f6286caa299a3b758beb63c9ed68f621879ef49 | -| MindArmour | Ascend 910 | Ubuntu-x86
EulerOS-x86 | [mindarmour-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindArmour/x86_64/mindarmour-0.3.0-cp37-cp37m-linux_x86_64.whl) | 7a2bd6174be9e5a47e8ae6bcdd592ecdafc6e53e6f1cd5f0261fcb8337b5b337 | -| | | Ubuntu-aarch64
EulerOS-aarch64 | [mindarmour-0.3.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindArmour/aarch64/mindarmour-0.3.0-cp37-cp37m-linux_aarch64.whl) | 6d5f96cc004579d98664d018dca860d3b7f935df5b479f1192161f18a091d9c9 | -| | GPU CUDA 9.2
GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindArmour/x86_64/mindarmour-0.3.0-cp37-cp37m-linux_x86_64.whl) | 7a2bd6174be9e5a47e8ae6bcdd592ecdafc6e53e6f1cd5f0261fcb8337b5b337 | - -### 0.2.0-alpha - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/ascend/x86_ubuntu/mindspore_ascend-0.2.0-cp37-cp37m-linux_x86_64.whl) | aa1225665d05263b17bb7ec1d51dd4f933254c818bee126b6c5dac4513532a14 | -| | | EulerOS-x86 | [mindspore_ascend-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/ascend/x86_euleros/mindspore_ascend-0.2.0-cp37-cp37m-linux_x86_64.whl) | eb9a1b2a0ba32d7f7264ae344833f90a8ba2042cddf1a6a719c1a38a7ea528ea | -| | | EulerOS-aarch64 | [mindspore_ascend-0.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/ascend/aarch64_euleros/mindspore_ascend-0.2.0-cp37-cp37m-linux_aarch64.whl) | 820fb17d63341c636018d4e930151d3d2fa7ac05d4a400286c1b1aeb4cc34c6f | -| | GPU CUDA 9.2 | Ubuntu-x86 | [mindspore_gpu-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/gpu/cuda-9.2/mindspore_gpu-0.2.0-cp37-cp37m-linux_x86_64.whl) | b933f95551afc3de38ba06502ef68a5a2a50bebadcc9b92b870f8eb44f59f10a | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/gpu/cuda-10.1/mindspore_gpu-0.2.0-cp37-cp37m-linux_x86_64.whl) | e7167bad4549002f9d14b0a015abbabf56334621cf746fa60bb67df0fadb22ec | -| | CPU | Ubuntu-x86 | [mindspore-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/cpu/x86_ubuntu/mindspore-0.2.0-cp37-cp37m-linux_x86_64.whl) | d6702dce9dad94d1e08bedc43540ac21422e8c49d919f7abd0bb7a3aa804476f | -| | | Windows-x64 | [mindspore-0.2.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/cpu/x64_windows/mindspore-0.2.0-cp37-cp37m-win_amd64.whl) | 77151d20fe450df3697853a5309308ecc482870fd2984753b82d3db9d326fdec | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindInsight/x86_ubuntu/mindinsight-0.2.0-cp37-cp37m-linux_x86_64.whl) | 2334e833f322e0f38e04e65819214b7582527364c1e0aca79bd080a720932ca4 | -| | | EulerOS-x86 | [mindinsight-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindInsight/x86_euleros/mindinsight-0.2.0-cp37-cp37m-linux_x86_64.whl) | c6c3088a499967f2fe301ea910536fdf62dd4e38edb47e144726b9a4d4a17e50 | -| | | EulerOS-aarch64 | [mindinsight-0.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindInsight/aarch64_euleros/mindinsight-0.2.0-cp37-cp37m-linux_aarch64.whl) | 6e5e03b56988968ec36c556ece06d2e5aa68e80ff475374087998e0ff360a45a | -| | GPU CUDA 9.2
GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindInsight/x86_ubuntu/mindinsight-0.2.0-cp37-cp37m-linux_x86_64.whl) | 2334e833f322e0f38e04e65819214b7582527364c1e0aca79bd080a720932ca4 | -| MindArmour | Ascend 910 | Ubuntu-x86 | [mindarmour-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindArmour/x86_64/mindarmour-0.2.0-cp37-cp37m-linux_x86_64.whl) | 4146790bc73a5846e92b943dfd3febb6c62052b217eeb45b6c48aa82b51e7cc3 | -| | | EulerOS-x86 | [mindarmour-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindArmour/x86_64/mindarmour-0.2.0-cp37-cp37m-linux_x86_64.whl) | 4146790bc73a5846e92b943dfd3febb6c62052b217eeb45b6c48aa82b51e7cc3 | -| | | EulerOS-aarch64 | [mindarmour-0.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindArmour/aarch64/mindarmour-0.2.0-cp37-cp37m-linux_aarch64.whl) | 5d5e532b9c4e466d89cf503f07c2d530b42216a14f193f685b9a81e190c8db44 | -| | GPU CUDA 9.2
GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindArmour/x86_64/mindarmour-0.2.0-cp37-cp37m-linux_x86_64.whl) | 4146790bc73a5846e92b943dfd3febb6c62052b217eeb45b6c48aa82b51e7cc3 | - -### 0.1.0-alpha - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/ascend/ubuntu-x86/mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl) | a76df4e96c4cb69b10580fcde2d4ef46b5d426be6d47a3d8fd379c97c3e66638 | -| | | EulerOS-x86 | [mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/ascend/euleros-x86/mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl) | 45d4fcb37bf796b3208b7c1ca70dc0db1387a878ef27836d3d445f311c8c02e0 | -| | | EulerOS-aarch64 | [mindspore-0.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/ascend/euleros-aarch64/mindspore-0.1.0-cp37-cp37m-linux_aarch64.whl) | 7daba2d1739ce19d55695460dce5ef044b4d38baad4f5117056e5f77f49a12b4 | -| | GPU CUDA 9.2 | Ubuntu-x86 | [mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/gpu/cuda-9.2/mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl) | b6e5623135b57b8c262f3e32d97fbe1e20e8c19da185a7aba97b9dc98c7ecda1 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/gpu/cuda-10.1/mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl) | 43711725cf7e071ca21b5ba25e90d6955789fe3495c62217e70869f52ae20c01 | -| | CPU | Ubuntu-x86 | [mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/cpu/ubuntu-x86/mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl) | 45c473a97a6cb227e4221117bfb1b3ebe3f2eab938e0b76d5117e6c3127b8e5c | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindInsight/ubuntu/x86_64/mindinsight-0.1.0-cp37-cp37m-linux_x86_64.whl) | 960b6f485ce545ccce98adfb4c62cdea216c9b7851ffdc0669827c53811c3e59 | -| | | EulerOS-x86 | [mindinsight-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindInsight/euleros/x86_64/mindinsight-0.1.0-cp37-cp37m-linux_x86_64.whl) | 9f1ef04fec09e5b90be4a6223b3bf2943334746c1f5dac37207db4524b64942f | -| | | EulerOS-aarch64 | [mindinsight-0.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindInsight/euleros/aarch64/mindinsight-0.1.0-cp37-cp37m-linux_aarch64.whl) | d64207126542571057572f856010a5a8b3362ccd9e5b5c81da5b78b94face5fe | -| MindArmour | Ascend 910 | Ubuntu-x86 | [mindarmour-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindArmour/x86_64/mindarmour-0.1.0-cp37-cp37m-linux_x86_64.whl) | 7796b6c114ee4962ce605da59a9bc47390c8910acbac318ecc0598829aad6e8c | -| | | EulerOS-x86 | [mindarmour-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindArmour/x86_64/mindarmour-0.1.0-cp37-cp37m-linux_x86_64.whl) | 7796b6c114ee4962ce605da59a9bc47390c8910acbac318ecc0598829aad6e8c | -| | | EulerOS-aarch64 | [mindarmour-0.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindArmour/aarch64/mindarmour-0.1.0-cp37-cp37m-linux_aarch64.whl) | f354fcdbb3d8b4022fda5a6636e763f8091aca2167dc23e60b7f7b6d710523cb | -| | GPU CUDA 9.2
GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindArmour/x86_64/mindarmour-0.1.0-cp37-cp37m-linux_x86_64.whl) | 7796b6c114ee4962ce605da59a9bc47390c8910acbac318ecc0598829aad6e8c | diff --git a/resource/release/release_list_zh_cn.md b/resource/release/release_list_zh_cn.md deleted file mode 100644 index 287785bd2d3245eb8cb09a6b233c248b01ac282a..0000000000000000000000000000000000000000 --- a/resource/release/release_list_zh_cn.md +++ /dev/null @@ -1,374 +0,0 @@ -# 发布版本列表 - - - -- [发布版本列表](#发布版本列表) - - [配套资料](#配套资料) - - [下载地址](#下载地址) - - [1.2.0](#120) - - [1.2.0-rc1](#120-rc1) - - [1.1.1](#111) - - [1.1.0](#110) - - [1.0.1](#101) - - [1.0.0](#100) - - [0.7.0-beta](#070-beta) - - [0.6.0-beta](#060-beta) - - [0.5.2-beta](#052-beta) - - [0.5.0-beta](#050-beta) - - [0.3.0-alpha](#030-alpha) - - [0.2.0-alpha](#020-alpha) - - [0.1.0-alpha](#010-alpha) - - - - - -## 配套资料 - -| 类别 | 子类别 | 版本 | -| --- | --- | --- | -| 版本说明和接口变更 | | [1.2.0](https://gitee.com/mindspore/mindspore/blob/r1.2/RELEASE.md)    [1.1.1](https://gitee.com/mindspore/mindspore/blob/r1.1/RELEASE.md)    [1.0.1](https://gitee.com/mindspore/mindspore/blob/r1.0/RELEASE.md)    [0.7.0-beta](https://gitee.com/mindspore/mindspore/blob/r0.7/RELEASE.md)    [0.6.0-beta](https://gitee.com/mindspore/mindspore/blob/r0.6/RELEASE.md)   
[0.5.2-beta](https://gitee.com/mindspore/mindspore/blob/r0.5/RELEASE.md)    [0.3.0-alpha](https://gitee.com/mindspore/mindspore/blob/r0.3/RELEASE.md)    [0.2.0-alpha](https://gitee.com/mindspore/mindspore/blob/r0.2/RELEASE.md)    [0.1.0-alpha](https://gitee.com/mindspore/mindspore/blob/r0.1/RELEASE.md) | -| 安装 | | [1.2.0](https://gitee.com/mindspore/docs/tree/r1.2/install)    [1.1.1](https://gitee.com/mindspore/docs/tree/r1.1/install)    [1.0.1](https://gitee.com/mindspore/docs/tree/r1.0/install)    [0.7.0-beta](https://gitee.com/mindspore/docs/tree/r0.7/install)    [0.6.0-beta](https://gitee.com/mindspore/docs/tree/r0.6/install)   
[0.5.2-beta](https://gitee.com/mindspore/docs/tree/r0.5/install)    [0.3.0-alpha](https://gitee.com/mindspore/docs/tree/r0.3/install)    [0.2.0-alpha](https://gitee.com/mindspore/docs/tree/r0.2/install)    [0.1.0-alpha](https://gitee.com/mindspore/docs/tree/r0.1/install) | -| 教程 | 训练 | [1.2.0](https://www.mindspore.cn/tutorial/training/zh-CN/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/tutorial/training/zh-CN/r1.1/index.html)    [1.0.1](https://www.mindspore.cn/tutorial/training/zh-CN/r1.0/index.html)    [0.7.0-beta](https://www.mindspore.cn/tutorial/zh-CN/r0.7/index.html)    [0.6.0-beta](https://www.mindspore.cn/tutorial/zh-CN/r0.6/index.html)   
[0.5.2-beta](https://www.mindspore.cn/tutorial/zh-CN/r0.5/index.html)    [0.3.0-alpha](https://www.mindspore.cn/tutorial/zh-CN/0.3.0-alpha/index.html)    [0.2.0-alpha](https://www.mindspore.cn/tutorial/zh-CN/0.2.0-alpha/index.html)    [0.1.0-alpha](https://www.mindspore.cn/tutorial/zh-CN/0.1.0-alpha/index.html)    [master](https://www.mindspore.cn/tutorial/training/zh-CN/master/index.html) | -| | 推理 | [1.2.0](https://www.mindspore.cn/tutorial/inference/zh-CN/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/tutorial/inference/zh-CN/r1.1/index.html)    [1.0.1](https://www.mindspore.cn/tutorial/inference/zh-CN/r1.0/index.html)    [0.7.0-beta](https://www.mindspore.cn/tutorial/zh-CN/r0.7/index.html)    [0.6.0-beta](https://www.mindspore.cn/tutorial/zh-CN/r0.6/index.html)   
[0.5.2-beta](https://www.mindspore.cn/tutorial/zh-CN/r0.5/index.html)    [0.3.0-alpha](https://www.mindspore.cn/tutorial/zh-CN/0.3.0-alpha/index.html)    [0.2.0-alpha](https://www.mindspore.cn/tutorial/zh-CN/0.2.0-alpha/index.html)    [0.1.0-alpha](https://www.mindspore.cn/tutorial/zh-CN/0.1.0-alpha/index.html)    [master](https://www.mindspore.cn/tutorial/inference/zh-CN/master/index.html) | -| | 手机&IoT | [1.2.0](https://www.mindspore.cn/tutorial/lite/zh-CN/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/tutorial/lite/zh-CN/r1.1/index.html)    [1.0.0](https://www.mindspore.cn/tutorial/lite/zh-CN/r1.0/index.html)    [0.7.0-beta](https://www.mindspore.cn/lite/tutorial/zh-CN/r0.7/index.html)    [master](https://www.mindspore.cn/tutorial/lite/zh-CN/master/index.html) | -| 文档 | Python API | [1.2.0](https://www.mindspore.cn/doc/api_python/zh-CN/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/doc/api_python/zh-CN/r1.1/index.html)    [1.0.1](https://www.mindspore.cn/doc/api_python/zh-CN/r1.0/index.html)    [0.7.0-beta](https://www.mindspore.cn/api/zh-CN/r0.7/index.html)    [0.6.0-beta](https://www.mindspore.cn/api/zh-CN/r0.6/index.html)   
[0.5.2-beta](https://www.mindspore.cn/api/zh-CN/r0.5/index.html)    [0.3.0-alpha](https://www.mindspore.cn/api/zh-CN/0.3.0-alpha/index.html)    [0.2.0-alpha](https://www.mindspore.cn/api/zh-CN/0.2.0-alpha/index.html)    [0.1.0-alpha](https://www.mindspore.cn/api/zh-CN/0.1.0-alpha/index.html)    [master](https://www.mindspore.cn/doc/api_python/zh-CN/master/index.html) | -| | C++ API | [1.2.0](https://www.mindspore.cn/doc/api_cpp/zh-CN/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/doc/api_cpp/zh-CN/r1.1/index.html)    [1.0.0](https://www.mindspore.cn/doc/api_cpp/zh-CN/r1.0/index.html)    [0.7.0-beta](https://www.mindspore.cn/lite/apic/zh-CN/r0.7/lite/namespacemembers.html)    [master](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/index.html) | -| | Java API | [1.2.0](https://www.mindspore.cn/doc/api_java/zh-CN/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/doc/api_java/zh-CN/r1.1/index.html)    [master](https://www.mindspore.cn/doc/api_java/zh-CN/master/index.html) | -| | 编程指南 | [1.2.0](https://www.mindspore.cn/doc/programming_guide/zh-CN/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/doc/programming_guide/zh-CN/r1.1/index.html)    [1.0.1](https://www.mindspore.cn/doc/programming_guide/zh-CN/r1.0/index.html)    [master](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/index.html) | -| | 迁移指南 | [master](https://www.mindspore.cn/doc/migration_guide/zh-CN/master/index.html) | -| | 设计和规格 | [1.2.0](https://www.mindspore.cn/doc/note/zh-CN/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/doc/note/zh-CN/r1.1/index.html)    [1.0.1](https://www.mindspore.cn/doc/note/zh-CN/r1.0/index.html)    [0.7.0-beta](https://www.mindspore.cn/docs/zh-CN/r0.7/index.html)    [0.7.0-beta (Lite)   ](https://www.mindspore.cn/lite/docs/zh-CN/r0.7/index.html)   
[0.6.0-beta](https://www.mindspore.cn/docs/zh-CN/r0.6/index.html)    [0.5.2-beta](https://www.mindspore.cn/docs/zh-CN/r0.5/index.html)    [0.3.0-alpha](https://www.mindspore.cn/docs/zh-CN/0.3.0-alpha/index.html)    [0.2.0-alpha](https://www.mindspore.cn/docs/zh-CN/0.2.0-alpha/index.html)    [0.1.0-alpha](https://www.mindspore.cn/docs/zh-CN/0.1.0-alpha/index.html)    [master](https://www.mindspore.cn/doc/note/zh-CN/master/index.html) | -| | FAQ | [1.2.0](https://www.mindspore.cn/doc/faq/zh-CN/r1.2/index.html)    [1.1.1](https://www.mindspore.cn/doc/faq/zh-CN/r1.1/index.html)    [1.0.1](https://www.mindspore.cn/doc/faq/zh-CN/r1.0/index.html)    [master](https://www.mindspore.cn/doc/faq/zh-CN/master/index.html) | - -## 下载地址 - -### 1.2.0 - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl) | 3666923c62ebf012ce5b8ab458d3cfd279cf68ad444509ccdcfe21aa38c9d2e7 | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | cca1f78a0402aa6319d1e77ca49be78c8e0180d480def0079e0d209378eaefb1 | -| | | EulerOS-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | 1181415cc603ddeff4cfd660e736b57a3cb5eb781c9649d828dcbebb6d90cb5f | -| | | CentOS-x86 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/centos_x86/mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl) | 510ac1c470b5d5a4321f90f8c9130e76025d75a339766f16c7bc42efcee3da81 | -| | | CentOS-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | cb0443a05d39ffa8c36cf289a279d29700a54eb9dc150fb4ad9807a723b1ef42 | -| | | Kylin-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/kylin_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | 1181415cc603ddeff4cfd660e736b57a3cb5eb781c9649d828dcbebb6d90cb5f | -| | Ascend 310 | Ubuntu-x86 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ascend310/ubuntu_x86/mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl) | d00d24efd0ce811f0de8ea13dee19e30663e5954eba1161ecf9f51d92e58cc73 | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ascend310/ubuntu_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | 2c5d0572bba2f9e0edaa1b6076af3ecf7a23c3486b4d8c3d2abaf39e25667822 | -| | | EulerOS-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ascend310/euleros_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | 09c936c07297d2d16df581335f1f51651855e0eb9f1ea64b4d3b66d6978a0428 | -| | | CentOS-x86 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ascend310/centos_x86/mindspore_ascend-1.2.0-cp37-cp37m-linux_x86_64.whl) | 13590cbb66df53430773732a6a54427565fd510cd184688df186c8510302201a | -| | | CentOS-aarch64 | [mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/ascend/ascend310/centos_aarch64/mindspore_ascend-1.2.0-cp37-cp37m-linux_aarch64.whl) | 377bac45c0e46e27afd0b6eb144eb9ea7ea13e61326d923e219fbe6577fcc61a | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.2.0-cp37-cp37m-linux_x86_64.whl) | 6efe2ce935703572ff2cc8ebaacc76104308f979dd0444769e4c6a77fc11880d | -| | CPU | Ubuntu-x86 | [mindspore-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/cpu/ubuntu_x86/mindspore-1.2.0-cp37-cp37m-linux_x86_64.whl) | 92421a45b0e5352621b6d17bcd6deafdbc9965b7ecd9f1219b83a8c02384c8d3 | -| | | Ubuntu-aarch64 | [mindspore-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/cpu/ubuntu_aarch64/mindspore-1.2.0-cp37-cp37m-linux_aarch64.whl) | 8042752a39c92fe39efc2208e236a3f989a4bb3d0ab4543b364d00fa79f11913 | -| | | Windows-x64 | [mindspore-1.2.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindSpore/cpu/windows_x64/mindspore-1.2.0-cp37-cp37m-win_amd64.whl) | 6038b1c28d574c565bf6a62a317421418960ee7df03bca9487d8f7c909ddb208 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindInsight/ascend/ubuntu_x86/mindinsight-1.2.0-cp37-cp37m-linux_x86_64.whl) | 24e83c1732caa1943aa7a5f5b2aaf350f47f04f5ba37c3fc4792231e86f5f36e | -| | | Ubuntu-aarch64 | [mindinsight-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.2.0-cp37-cp37m-linux_aarch64.whl) | c0f99217649e227b44c8e33644a1c8a3b054966c0e07541be336322d23ccc93a | -| | | EulerOS-aarch64 | [mindinsight-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindInsight/ascend/euleros_aarch64/mindinsight-1.2.0-cp37-cp37m-linux_aarch64.whl) | 2d4991636bd6ebe2f0e22e21fb2dc44625362a9a2154168720f1db95c3b5f8a5 | -| | | CentOS-x86 | [mindinsight-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindInsight/ascend/centos_x86/mindinsight-1.2.0-cp37-cp37m-linux_x86_64.whl) | a99f07c820419d4fbb35bbb04c30be70f7ece5cc77578d405318d58d414499ba | -| | | CentOS-aarch64 | [mindinsight-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindInsight/ascend/centos_aarch64/mindinsight-1.2.0-cp37-cp37m-linux_aarch64.whl) | 7192be74e05a97cec81d003978d691d65ee768c8d90d5e97237524a286076b43 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindInsight/ascend/ubuntu_x86/mindinsight-1.2.0-cp37-cp37m-linux_x86_64.whl) | 24e83c1732caa1943aa7a5f5b2aaf350f47f04f5ba37c3fc4792231e86f5f36e | -| MindArmour | Ascend 910 | Ubuntu-x86
CentOS-x86 | [mindarmour-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindArmour/x86_64/mindarmour-1.2.0-cp37-cp37m-linux_x86_64.whl) | f1387b5208049c25938c320056673c7df5a7e31c13b72ca8994c2da2e139971b | -| | | Ubuntu-aarch64
EulerOS-aarch64
CentOS-aarch64 | [mindarmour-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindArmour/aarch64/mindarmour-1.2.0-cp37-cp37m-linux_aarch64.whl) | ccb8356f17513588117df52dc13d8b652ea4040b90a8f32fd8d1494ec488065b | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindArmour/x86_64/mindarmour-1.2.0-cp37-cp37m-linux_x86_64.whl) | f1387b5208049c25938c320056673c7df5a7e31c13b72ca8994c2da2e139971b | -| MindSpore
Hub | | any | [mindspore_hub-1.2.0-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/Hub/any/mindspore_hub-1.2.0-py3-none-any.whl) | 6a6ac6695b859f6d5d22a0531e5ff2c27e390793b25d74ac67fe130189cda387 | -| MindQuantum | CPU | Ubuntu-x86 | [mindquantum-0.1.0-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/MindQuantum/ubuntu_x86/mindquantum-0.1.0-py3-none-any.whl) | 94a08dec7907756d063e97205c977c0463f5461ca60f5cc6526cef9c90ab8da2 | -| MindSpore
Serving | Ascend 910
Ascend310
GPU cuda 10.1 | Ubuntu-x86 | [mindspore_serving-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/Serving/ubuntu_x86/mindspore_serving-1.2.0-cp37-cp37m-linux_x86_64.whl) | 5775984408f3f93907fbb876c68c52abd2053d2382fd99cb5ad67d6d320e03c1 | -| | | Ubuntu-aarch64 | [mindspore_serving-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/Serving/ubuntu_aarch64/mindspore_serving-1.2.0-cp37-cp37m-linux_aarch64.whl) | 7008fc0f9feb8f40951d14726fd04c1fa17ad515026cd90ebeeec013b8da9c53 | -| | | EulerOS-aarch64 | [mindspore_serving-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/Serving/euleros_aarch64/mindspore_serving-1.2.0-cp37-cp37m-linux_aarch64.whl) | baea936bdf45aaa2e55cf590a058d471bb62ecbe5333c82dc2f6b6c195cd8844 | -| | | CentOS-x86 | [mindspore_serving-1.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/Serving/centos_x86/mindspore_serving-1.2.0-cp37-cp37m-linux_x86_64.whl) | 68ecc2233302acb27b6aa5c6e79b532c89303d388b1432566bf12ec7985fd523 | -| | | CentOS-aarch64 | [mindspore_serving-1.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0/Serving/centos_aarch64/mindspore_serving-1.2.0-cp37-cp37m-linux_aarch64.whl) | 377e791bd463ff192b51cbe41bab83db533d7f79723663a8562436211d417ac7 | - -### 1.2.0-rc1 - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/centos_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/centos_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | Ascend 310 | Ubuntu-x86 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/ubuntu_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/ubuntu_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/ubuntu_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/ubuntu_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/euleros_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/euleros_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/centos_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/centos_x86/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/centos_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/ascend/ascend310/centos_aarch64/mindspore_ascend-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindspore_gpu-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | CPU | Ubuntu-x86 | [mindspore-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/cpu/ubuntu_x86/mindspore-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindspore-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/cpu/ubuntu_x86/mindspore-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/cpu/ubuntu_aarch64/mindspore-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindspore-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/cpu/ubuntu_aarch64/mindspore-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | Windows-x64 | [mindspore-1.2.0rc1-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/cpu/windows_x64/mindspore-1.2.0rc1-cp37-cp37m-win_amd64.whl) | [mindspore-1.2.0rc1-cp37-cp37m-win_amd64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindSpore/cpu/windows_x64/mindspore-1.2.0rc1-cp37-cp37m-win_amd64.whl.sha256) | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/ubuntu_x86/mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/ubuntu_x86/mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/euleros_aarch64/mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/euleros_aarch64/mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/centos_x86/mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/centos_x86/mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/centos_aarch64/mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/centos_aarch64/mindinsight-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/ubuntu_x86/mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindInsight/ascend/ubuntu_x86/mindinsight-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| MindArmour | Ascend 910 | Ubuntu-x86
CentOS-x86 | [mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindArmour/x86_64/mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindArmour/x86_64/mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64
EulerOS-aarch64
CentOS-aarch64 | [mindarmour-1.2.0rc1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindArmour/aarch64/mindarmour-1.2.0rc1-cp37-cp37m-linux_aarch64.whl) | [mindarmour-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindArmour/aarch64/mindarmour-1.2.0rc1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindArmour/x86_64/mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl) | [mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindArmour/x86_64/mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl.sha256) | -| MindSpore
Hub | | any | [mindspore_hub-1.2.0-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/Hub/any/mindspore_hub-1.2.0-py3-none-any.whl) | [mindspore_hub-1.2.0-py3-none-any.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/Hub/any/mindspore_hub-1.2.0-py3-none-any.whl.sha256) | -| MindQuantum | CPU | Ubuntu-x86 | [mindquantum-0.1.0-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindQuantum/ubuntu_x86/mindquantum-0.1.0-py3-none-any.whl) | [mindquantum-0.1.0-py3-none-any.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindQuantum/ubuntu_x86/mindquantum-0.1.0-py3-none-any.whl.sha256) | - -### 1.1.1 - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/centos_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/centos_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | Ascend 310 | Ubuntu-x86 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/ubuntu_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/ubuntu_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/ubuntu_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/ubuntu_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/euleros_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/euleros_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/centos_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/centos_x86/mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindspore_ascend-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/centos_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/ascend/ascend310/centos_aarch64/mindspore_ascend-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_gpu-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | CPU | Ubuntu-x86 | [mindspore-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/cpu/ubuntu_x86/mindspore-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/cpu/ubuntu_x86/mindspore-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/cpu/ubuntu_aarch64/mindspore-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/cpu/ubuntu_aarch64/mindspore-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | Windows-x64 | [mindspore-1.1.1-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/cpu/windows_x64/mindspore-1.1.1-cp37-cp37m-win_amd64.whl) | [mindspore-1.1.1-cp37-cp37m-win_amd64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindSpore/cpu/windows_x64/mindspore-1.1.1-cp37-cp37m-win_amd64.whl.sha256) | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/ubuntu_x86/mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/ubuntu_x86/mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/euleros_aarch64/mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/euleros_aarch64/mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/centos_x86/mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/centos_x86/mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/centos_aarch64/mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/centos_aarch64/mindinsight-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/ubuntu_x86/mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindInsight/ascend/ubuntu_x86/mindinsight-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| MindArmour | Ascend 910 | Ubuntu-x86
CentOS-x86 | [mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindArmour/x86_64/mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindArmour/x86_64/mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64
EulerOS-aarch64
CentOS-aarch64 | [mindarmour-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindArmour/aarch64/mindarmour-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindarmour-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindArmour/aarch64/mindarmour-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindArmour/x86_64/mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/MindArmour/x86_64/mindarmour-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| MindSpore
Hub | | any | [mindspore_hub-1.1.1-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Hub/any/mindspore_hub-1.1.1-py3-none-any.whl) | [mindspore_hub-1.1.1-py3-none-any.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Hub/any/mindspore_hub-1.1.1-py3-none-any.whl.sha256) | -| MindSpore
Serving | Ascend 910
Ascend310 | Ubuntu-x86 | [mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/ubuntu_x86/mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/ubuntu_x86/mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | Ubuntu-aarch64 | [mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/ubuntu_aarch64/mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/ubuntu_aarch64/mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | EulerOS-aarch64 | [mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/euleros_aarch64/mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/euleros_aarch64/mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | -| | | CentOS-x86 | [mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/centos_x86/mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl) | [mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/centos_x86/mindspore_serving-1.1.1-cp37-cp37m-linux_x86_64.whl.sha256) | -| | | CentOS-aarch64 | [mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/centos_aarch64/mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl) | [mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.1/Serving/ascend/centos_aarch64/mindspore_serving-1.1.1-cp37-cp37m-linux_aarch64.whl.sha256) | - -### 1.1.0 - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl) | 8dc45c9c6367a9b59a5893c896b3ebfd929544325c911f48f679b9203165d85d | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl) | b49124e793127ac9d55ba8e5df109a17aafb3f09bbc4a9f7bc228bfc5b652042 | -| | | EulerOS-aarch64 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl) | 1c03e7941a9e247fb0e64f9ba0adbcb4fde3e815cd00dc4bc79e6a81a29e0335 | -| | | CentOS-x86 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/centos_x86/mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl) | 3affe7f5dc4c7c649221d80bf8a41f54fe64028424c422d3513c11a6507f193f | -| | | CentOS-aarch64 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl) |051d2fe7fa1fa95e92da9841a1cdad113561da19a5e7f9abe30322ff44d68d2e | -| | Ascend 310 | Ubuntu-x86 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ascend310/ubuntu_x86/mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl) |fe357e5e83130938ad490563fa310e71261683cea08dede8731a915373991d5c | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ascend310/ubuntu_aarch64/mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl) |17dc70cdf79f80db0344def06a427c93c5b03f3448a5aeb34a0b41305425e0bd | -| | | EulerOS-aarch64 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ascend310/euleros_aarch64/mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl) |be0881c5848696f67cbf54456babf344317f9509ad0961487588ae5e26ec2f87 | -| | | CentOS-x86 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ascend310/centos_x86/mindspore_ascend-1.1.0-cp37-cp37m-linux_x86_64.whl) |fc0c6d3cfd6688f6b7c999a4189cd06a8496ccde45db8528b57439edb12f819e | -| | | CentOS-aarch64 | [mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/ascend/ascend310/centos_aarch64/mindspore_ascend-1.1.0-cp37-cp37m-linux_aarch64.whl) |2a6856e2a7bd8db106748877bc2b4fa9d9804db265578d2d5f057a4e79073305 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.1.0-cp37-cp37m-linux_x86_64.whl) | 11386b0e156f033987f879e3b79f87e7cde0a6881063434f2c84a8564099e858 | -| | CPU | Ubuntu-x86 | [mindspore-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/cpu/ubuntu_x86/mindspore-1.1.0-cp37-cp37m-linux_x86_64.whl) | 1a1683e9c30650284f23001a1af0ae570ca854317ec52efc698ce7da604e31b0 | -| | | Ubuntu-aarch64 | [mindspore-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/cpu/ubuntu_aarch64/mindspore-1.1.0-cp37-cp37m-linux_aarch64.whl) | e1fa3cec68aef0e6619408f81d7e9e627704c1bfbf453ed90ee6d3b6c0c8c84f | -| | | Windows-x64 | [mindspore-1.1.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindSpore/cpu/windows_x64/mindspore-1.1.0-cp37-cp37m-win_amd64.whl) | ce3f1d4504fd8236113827d435c9aa691b0200e1ffeba3db391e678ad31a7df7 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindInsight/ascend/ubuntu_x86/mindinsight-1.1.0-cp37-cp37m-linux_x86_64.whl) | 85f4a38ecaf4d6799482e2a982609c46a49471325b47699c5b01b340549ab961 | -| | | Ubuntu-aarch64 | [mindinsight-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.1.0-cp37-cp37m-linux_aarch64.whl) | adb45fa766ff5ca4ef6cbe24335ca7e87c81e9293b60ffe00fec76533115ef4e | -| | | EulerOS-aarch64 | [mindinsight-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindInsight/ascend/euleros_aarch64/mindinsight-1.1.0-cp37-cp37m-linux_aarch64.whl) | 78b9a728aecc01ead3687f9469d8af228917eab285f0770316bcc214b4ae3adc | -| | | CentOS-x86 | [mindinsight-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindInsight/ascend/centos_x86/mindinsight-1.1.0-cp37-cp37m-linux_x86_64.whl) | a19a126ae1daa210c78aa256262303c9ad20f9cfe2404a5af840d325a471eb30 | -| | | CentOS-aarch64 | [mindinsight-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindInsight/ascend/centos_aarch64/mindinsight-1.1.0-cp37-cp37m-linux_aarch64.whl) | f499aa428d754dc36da303f02b6531576e9e86158b213184c392f2302f13da2b | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindInsight/ascend/ubuntu_x86/mindinsight-1.1.0-cp37-cp37m-linux_x86_64.whl) | 85f4a38ecaf4d6799482e2a982609c46a49471325b47699c5b01b340549ab961 | -| MindArmour | Ascend 910 | Ubuntu-x86
CentOS-x86 | [mindarmour-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindArmour/x86_64/mindarmour-1.1.0-cp37-cp37m-linux_x86_64.whl) | 3d8b05437dca6d648073b85909508377b7cab05f9a6f52ee712592083d611770 | -| | | Ubuntu-aarch64
EulerOS-aarch64
CentOS-aarch64 | [mindarmour-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindArmour/aarch64/mindarmour-1.1.0-cp37-cp37m-linux_aarch64.whl) | bc724697cf053672198be226193cd0467c5a7f2a700d26a024bcfb318724f34a | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/MindArmour/x86_64/mindarmour-1.1.0-cp37-cp37m-linux_x86_64.whl) | 3d8b05437dca6d648073b85909508377b7cab05f9a6f52ee712592083d611770 | -| MindSpore
Hub | | any | [mindspore_hub-1.1.0-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/Hub/any/mindspore_hub-1.1.0-py3-none-any.whl) |1f329f35865a4e7014461e485e8a87859160aae6cbe1033973239e26c7dee01f | -| MindSpore
Serving | Ascend 910
Ascend310 | Ubuntu-x86 | [mindspore_serving-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/Serving/ascend/ubuntu_x86/mindspore_serving-1.1.0-cp37-cp37m-linux_x86_64.whl) | 4bfb3a41b9fbfd77ed09244f08ec98f8e5833e6fa27d7c214b9262c1f3568258 | -| | | Ubuntu-aarch64 | [mindspore_serving-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/Serving/ascend/ubuntu_aarch64/mindspore_serving-1.1.0-cp37-cp37m-linux_aarch64.whl) | 095ac95e4c338b17dd192422d8bf342c55441a79eeeeb70441ccc65746b0f2d7 | -| | | EulerOS-aarch64 | [mindspore_serving-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/Serving/ascend/euleros_aarch64/mindspore_serving-1.1.0-cp37-cp37m-linux_aarch64.whl) | 1695ac7a01fdcb4fad9d47a172767d56fcae4979ecced298f5e33c936e821649 | -| | | CentOS-x86 | [mindspore_serving-1.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/Serving/ascend/centos_x86/mindspore_serving-1.1.0-cp37-cp37m-linux_x86_64.whl) | ed0cc466efad7fb717527a511611c1fb2d72db4caf0f66e6fcbde0ecf7d6e525 | -| | | CentOS-aarch64 | [mindspore_serving-1.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.1.0/Serving/ascend/centos_aarch64/mindspore_serving-1.1.0-cp37-cp37m-linux_aarch64.whl) |e6ed84cfe0ff9b51b94cd2575f62238c95a73ac386e2d09adf75d3ea74177420 | - -### 1.0.1 - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.0.1-cp37-cp37m-linux_x86_64.whl) | 23664e8ab2e0f2b1a523de96753e300d42f2438e61f7d173b17a637fd139e2d1 | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.0.1-cp37-cp37m-linux_aarch64.whl) | 9584a9f893ccdb93a2581c034b51045e8882ab67ce203366a212f981c68ad602 | -| | | EulerOS-aarch64 | [mindspore_ascend-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.0.1-cp37-cp37m-linux_aarch64.whl) | a662f447e79604aec52224f9dca6c73e4127cb497250e82517e8d5d8b83332b0 | -| | | CentOS-x86 | [mindspore_ascend-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/ascend/centos_x86/mindspore_ascend-1.0.1-cp37-cp37m-linux_x86_64.whl) | 3b1f9c871b34ffbfa45d7dc55355adc0e828dbc5fb27d380ffed203644ef9155 | -| | | CentOS-aarch64 | [mindspore_ascend-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.0.1-cp37-cp37m-linux_aarch64.whl) | e01d0c52c7cf5670368e9bac6f06f9627eb016d109a48fc77dd7debd135599c9 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.0.1-cp37-cp37m-linux_x86_64.whl) | 5c84995e9f9a3640c31df0e96f69a37fa765f4e332cd71d9347c4e8c6c1d31f1 | -| | CPU | Ubuntu-x86 | [mindspore-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/cpu/ubuntu_x86/mindspore-1.0.1-cp37-cp37m-linux_x86_64.whl) | d8e66d962f66c00d7590ef24093186c3265cca60c27ff423769a5ef48922f494 | -| | | Ubuntu-aarch64 | [mindspore-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/cpu/ubuntu_aarch64/mindspore-1.0.1-cp37-cp37m-linux_aarch64.whl) | 8a2c630550e4ff6c786b1a53635e075d0a6625605af7221275360a04cdc3db0d | -| | | Windows-x64 | [mindspore-1.0.1-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindSpore/cpu/windows_x64/mindspore-1.0.1-cp37-cp37m-win_amd64.whl) | f50e1de60d6777bb449802024b7ac2fd90f58fb191bfd69e56079f6dbc5fe1b3 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindInsight/ascend/ubuntu_x86/mindinsight-1.0.1-cp37-cp37m-linux_x86_64.whl) | a1f5beb078d521f40454235f9bfcec5036479ada74d2a51a233ccbce3544e7ab | -| | | Ubuntu-aarch64 | [mindinsight-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.0.1-cp37-cp37m-linux_aarch64.whl) | 057ad1daec0cf48ece5dd9174aa95498816e373b831818b6e885b24173bd9cf5 | -| | | EulerOS-aarch64 | [mindinsight-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindInsight/ascend/euleros_aarch64/mindinsight-1.0.1-cp37-cp37m-linux_aarch64.whl) | e5551323f2f0a89a7eedd4eb508fffb9a71761bb1d70cc9f5f9e2e63a66af78d | -| | | CentOS-x86 | [mindinsight-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindInsight/ascend/centos_x86/mindinsight-1.0.1-cp37-cp37m-linux_x86_64.whl) | 62a86fa5faa32ee196b78071940f674642278ae016c9662d1051461a0c003969 | -| | | CentOS-aarch64 | [mindinsight-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindInsight/ascend/centos_aarch64/mindinsight-1.0.1-cp37-cp37m-linux_aarch64.whl) | f436c042b77e52d1f95dd0d104f24189cc7474660603561b196e49ca36b2eded | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindInsight/ascend/ubuntu_x86/mindinsight-1.0.1-cp37-cp37m-linux_x86_64.whl) | a1f5beb078d521f40454235f9bfcec5036479ada74d2a51a233ccbce3544e7ab | -| MindArmour | Ascend 910 | Ubuntu-x86
CentOS-x86 | [mindarmour-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindArmour/x86_64/mindarmour-1.0.1-cp37-cp37m-linux_x86_64.whl) | 5f6cee4c36e009bc7cf0cb65d8c5d9a01d87b00dd9e4c48fb9c836fdd4be38ab | -| | | Ubuntu-aarch64
EulerOS-aarch64
CentOS-aarch64 | [mindarmour-1.0.1-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindArmour/aarch64/mindarmour-1.0.1-cp37-cp37m-linux_aarch64.whl) | 1bd8e174f9a83537f4a60371fa2a0effe78851c9181e2666d9e2f49cab25efce | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-1.0.1-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/MindArmour/x86_64/mindarmour-1.0.1-cp37-cp37m-linux_x86_64.whl) | 5f6cee4c36e009bc7cf0cb65d8c5d9a01d87b00dd9e4c48fb9c836fdd4be38ab | -| MindSpore
Hub | | any | [mindspore_hub-1.0.1-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/Hub/any/mindspore_hub-1.0.1-py3-none-any.whl) | 5a0dc560c86aa35a54f4d8e20ba6e9b2b6084a5143fb4d6d73c2f6f3e55ab49e | - -### 1.0.0 - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/ascend/ubuntu_x86/mindspore_ascend-1.0.0-cp37-cp37m-linux_x86_64.whl) | 4682be18cffdf86346bdb286ccd9e05f33be4138415dbc7db1650d029510ee44 | -| | | Ubuntu-aarch64 | [mindspore_ascend-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-1.0.0-cp37-cp37m-linux_aarch64.whl) | 6912fcc0488f3a8fa336d9680f506b5f0c97c5d82844d8fbfd9163bbcbe3140a | -| | | EulerOS-x86 | [mindspore_ascend-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/ascend/euleros_x86/mindspore_ascend-1.0.0-cp37-cp37m-linux_x86_64.whl) | 20fb5d35ccd7c1354084da48fa8e3cb93b6fa4843211be82a542dff775c39c0a | -| | | EulerOS-aarch64 | [mindspore_ascend-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/ascend/euleros_aarch64/mindspore_ascend-1.0.0-cp37-cp37m-linux_aarch64.whl) | b9700fc718e28026269f4639c7a963653a485c7213eed7d534ed26f89d98a44e | -| | | CentOS-x86 | [mindspore_ascend-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/ascend/centos_x86/mindspore_ascend-1.0.0-cp37-cp37m-linux_x86_64.whl) | 453d4ddb93e3e0ed79ac2ec16920994b387376682d07ba71f1e1387cccd57ded | -| | | CentOS-aarch64 | [mindspore_ascend-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/ascend/centos_aarch64/mindspore_ascend-1.0.0-cp37-cp37m-linux_aarch64.whl) |f2066bfd3ffdeb458c6cdcdec2eb0c47c444336c7d983134638ae2de0cec0564 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-1.0.0-cp37-cp37m-linux_x86_64.whl) | af2b3b7744fdd475333a81e3dfadc81be2156e67e660477f92b584807b34cb70 | -| | CPU | Ubuntu-x86 | [mindspore-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/cpu/ubuntu_x86/mindspore-1.0.0-cp37-cp37m-linux_x86_64.whl) | a0a3c81b500d442d0324d82ed49808a32fb62c9e776fe614a863345965180f7c | -| | | Ubuntu-aarch64 | [mindspore-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/cpu/ubuntu_aarch64/mindspore-1.0.0-cp37-cp37m-linux_aarch64.whl) | eb3bf9d7a40a4f7bbb3ba566b8353ff8a2f89f2fae08d770af0f7d8b9f83d3ea | -| | | Windows-x64 | [mindspore-1.0.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindSpore/cpu/windows_x64/mindspore-1.0.0-cp37-cp37m-win_amd64.whl) | d30c89941939164fc1af8e406b202c1671a1309991a957a0f950b8c71775fcc9 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/ubuntu_x86/mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl) | dd951904ef10adbb93501c3cbafa6b4d34b1e8e5c4efe4fcaa7af49f0c081041 | -| | | Ubuntu-aarch64 | [mindinsight-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/ubuntu_aarch64/mindinsight-1.0.0-cp37-cp37m-linux_aarch64.whl) | fc02c2ba823cc23eceb89c1c4f93e103502714ce5b4b7ea020c8d744220ae260 | -| | | EulerOS-x86 | [mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/euleros_x86/mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl) | 2df33884fe557e1073ac7bf18fef135dd2f0a90d8dfbc1a0fe6ab223fd959e9c | -| | | EulerOS-aarch64 | [mindinsight-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/euleros_aarch64/mindinsight-1.0.0-cp37-cp37m-linux_aarch64.whl) | 27bbdb4354f43b696068cc926dfa4a967e5aa48e3f9276a9501df84966bd465e | -| | | CentOS-x86 | [mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/centos_x86/mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl) | 8eab8881dd585731dfdedaec16b456fe6e80242199efbdc5703e20382b59aeab | -| | | CentOS-aarch64 | [mindinsight-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/centos_aarch64/mindinsight-1.0.0-cp37-cp37m-linux_aarch64.whl) | 3f76f2ff8c809b638136748348d5860b2ef6f6412ec37db2e02d00a7bc53c91f | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindInsight/ascend/ubuntu_x86/mindinsight-1.0.0-cp37-cp37m-linux_x86_64.whl) | dd951904ef10adbb93501c3cbafa6b4d34b1e8e5c4efe4fcaa7af49f0c081041 | -| MindArmour | Ascend 910 | Ubuntu-x86
EulerOS-x86
CentOS x86_64 | [mindarmour-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindArmour/x86_64/mindarmour-1.0.0-cp37-cp37m-linux_x86_64.whl) | a139ded76899e5901889fc4e578165ef78584a127f9c264830e4e2806c30cc82 | -| | | Ubuntu-aarch64
EulerOS-aarch64
CentOS aarch64 | [mindarmour-1.0.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindArmour/aarch64/mindarmour-1.0.0-cp37-cp37m-linux_aarch64.whl) | e895ba5a0d207e0cb3e93acdfaaa399a63161443371ef68d626d29542e41d940 | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-1.0.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/MindArmour/x86_64/mindarmour-1.0.0-cp37-cp37m-linux_x86_64.whl) | a139ded76899e5901889fc4e578165ef78584a127f9c264830e4e2806c30cc82 | -| MindSpore
Hub | | any | [mindspore_hub-1.0.0-py3-none-any.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/Hub/any/mindspore_hub-1.0.0-py3-none-any.whl) |0cb7ea4c8cd81279bc61558e1102da14516d2ea9653269cb0519c7085df8e3c3 | -| MindSpore
Lite RT | CPU | Android-aarch32 | [mindspore-lite-1.0.0-runtime-arm32-cpu.tar.gz](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch32/mindspore-lite-1.0.0-runtime-arm32-cpu.tar.gz) |abb28cee1b8a439c51d05a7c4521dc3f76d05ae79db4be781c932ee5f0abc774 | -| | | Android-aarch64 | [mindspore-lite-1.0.0-runtime-arm64-cpu.tar.gz](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-runtime-arm64-cpu.tar.gz) |9ca80c1fff35008f8114b3524fc2d897dac1db247df873ea6560f3ddc548a7f3 | -| | GPU | Android-aarch64 | [mindspore-lite-1.0.0-runtime-arm64-gpu.tar.gz](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-runtime-arm64-gpu.tar.gz) |eae1c9856ae7f647ce52dae79f826412e07bb058e6cf9031d85ab0ca72e42156 | -| MindSpore
Lite Converter | CPU | Ubuntu-x86 | [mindspore-lite-1.0.0-converter-ubuntu.tar.gz](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/ubuntu_x86/mindspore-lite-1.0.0-converter-ubuntu.tar.gz) |baaf3e1d88416da535432949810c80e76e4189b3567b952b9d99397fcda0cad8 | -| | | Windows-x86 | [mindspore-lite-1.0.0-converter-win-cpu.zip](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/windows_x86/mindspore-lite-1.0.0-converter-win-cpu.zip) |6eae6f46ebe98697cf0a36268159d74a95ddf743ee27ec6de2088d469c753960 | -| MindSpore
Lite Minddata | CPU | Android-aarch32 | [mindspore-lite-1.0.0-minddata-arm32-cpu.tar.gz](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch32/mindspore-lite-1.0.0-minddata-arm32-cpu.tar.gz) |d998c5eba81b254c057eae61aeacd72cee24ad75eb01be89321133e6e035a330 | -| | | Android-aarch64 | [mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz) |9f6bd53663d029b7638274fca94e47efbfa33ff7dab5dbe1cf328379e3cbbc18 | - -### 0.7.0-beta - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/ascend/ubuntu_x86/mindspore_ascend-0.7.0-cp37-cp37m-linux_x86_64.whl) | 522b80e84de1b414d3800a27d01e40f75332000e5246b24cc1aea7d9e5566ce5 | -| | | Ubuntu-aarch64 | [mindspore_ascend-0.7.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-0.7.0-cp37-cp37m-linux_aarch64.whl) | cbdb56a20860aaf1df4a8cbcc090da837ea2a5d115a173e79cd746f84263d73b | -| | | EulerOS-x86 | [mindspore_ascend-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/ascend/euleros_x86/mindspore_ascend-0.7.0-cp37-cp37m-linux_x86_64.whl) | a21f086d2467eafaffc6934030941f24043e85fbff4888e4fb7ce879e59e5094 | -| | | EulerOS-aarch64 | [mindspore_ascend-0.7.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/ascend/euleros_aarch64/mindspore_ascend-0.7.0-cp37-cp37m-linux_aarch64.whl) | b1fbe55d7a461b8aa37efec100b87bad4332be7ef954ab83c01bec5f0f5da1e8 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-0.7.0-cp37-cp37m-linux_x86_64.whl) | 128eab1c10574de140f3c1b6aaaf55b383cdea806dbc8de23966c8d4b4aafb55 | -| | CPU | Ubuntu-x86 | [mindspore-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/cpu/ubuntu_x86/mindspore-0.7.0-cp37-cp37m-linux_x86_64.whl) | 473de6725a344e3b6353121de66dd06c8012e7eba3af3b96cd5d8a476b3b6e64 | -| | | Ubuntu-aarch64 | [mindspore-0.7.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/cpu/ubuntu_aarch64/mindspore-0.7.0-cp37-cp37m-linux_aarch64.whl) | 6b187948994eeaa2b4817303be83c6ccea3597c2aad5355428d5eaeb273604bc | -| | | Windows-x64 | [mindspore-0.7.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindSpore/cpu/windows_x64/mindspore-0.7.0-cp37-cp37m-win_amd64.whl) | 396152fab16ce5fcb4106cf49e02989b2e19503896304b1b040932eaddfdf56f | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.7.0-cp37-cp37m-linux_x86_64.whl) | 3f913d74643eab858bd86d1ea73eb05ee4d402f8164adfb439b6346425abfa19 | -| | | Ubuntu-aarch64 | [mindinsight-0.7.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindInsight/ascend/ubuntu_aarch64/mindinsight-0.7.0-cp37-cp37m-linux_aarch64.whl) | 73fb86732a88803b0699b47bd48aaa108b4921d0c3411e465bee27c348a68c76 | -| | | EulerOS-x86 | [mindinsight-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindInsight/ascend/euleros_x86/mindinsight-0.7.0-cp37-cp37m-linux_x86_64.whl) | bd84b6b3432d34b235bf8d49ce78e5e0dbaf4b692e75fe12a7600dc313d9124c | -| | | EulerOS-aarch64 | [mindinsight-0.7.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindInsight/ascend/euleros_aarch64/mindinsight-0.7.0-cp37-cp37m-linux_aarch64.whl) | 4c48c96df6438b67fd7e36d96e251bf8e5a3dbcde13382edbaabfc03ae11e807 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.7.0-cp37-cp37m-linux_x86_64.whl) | 3f913d74643eab858bd86d1ea73eb05ee4d402f8164adfb439b6346425abfa19 | -| MindArmour | Ascend 910 | Ubuntu-x86
EulerOS-x86 | [mindarmour-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindArmour/x86_64/mindarmour-0.7.0-cp37-cp37m-linux_x86_64.whl) | bd3725991f227dde57afb1d11baf694a6ae0591d68355de18465a05b161bab14 | -| | | Ubuntu-aarch64
EulerOS-aarch64 | [mindarmour-0.7.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindArmour/aarch64/mindarmour-0.7.0-cp37-cp37m-linux_aarch64.whl) | 928754efcde8c2106e1af4fb883899d8f66aa864e0ac1ba7358a291792d898a2 | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.7.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.7.0-beta/MindArmour/x86_64/mindarmour-0.7.0-cp37-cp37m-linux_x86_64.whl) | bd3725991f227dde57afb1d11baf694a6ae0591d68355de18465a05b161bab14 | - -### 0.6.0-beta - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/ascend/ubuntu_x86/mindspore_ascend-0.6.0-cp37-cp37m-linux_x86_64.whl) | afea66c19beff797b99bf06bc0ed897a83fdb510d62e03663cef55a68e0f278f | -| | | Ubuntu-aarch64 | [mindspore_ascend-0.6.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-0.6.0-cp37-cp37m-linux_aarch64.whl) | d81a8d2641688032daf829f30d514e11f77f3ef98fb35ee6c7370723158c0abc | -| | | EulerOS-x86 | [mindspore_ascend-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/ascend/euleros_x86/mindspore_ascend-0.6.0-cp37-cp37m-linux_x86_64.whl) | 3ce2a21cd9b8cf58101ec342c9753a226f5fbe315f3a40da521fdf1d46e9dbef | -| | | EulerOS-aarch64 | [mindspore_ascend-0.6.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/ascend/euleros_aarch64/mindspore_ascend-0.6.0-cp37-cp37m-linux_aarch64.whl) | 55716a59295b92f13509f483c073a2b67cce89cb3e53919400b5d428d986f9f5 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-0.6.0-cp37-cp37m-linux_x86_64.whl) | f477dc282d503283c59a06e26cfad785c2c2a1996082671e46b4405a6fa539b1 | -| | CPU | Ubuntu-x86 | [mindspore-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/cpu/ubuntu_x86/mindspore-0.6.0-cp37-cp37m-linux_x86_64.whl) | 8daf749b9d7cf269208b47561844d088a7d200e10816f9437fbcce24fb844495 | -| | | Windows-x64 | [mindspore-0.6.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/cpu/windows_x64/mindspore-0.6.0-cp37-cp37m-win_amd64.whl) | c7ed48fdb808d4f65ca68654323f2e990a7aa7a99ccf0f19bc8bcc23024102f7 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.6.0-cp37-cp37m-linux_x86_64.whl) | 6a825a529339eba95799bfaef6876ef2aedb45f3f81933f41c64e99d9af5c3fd | -| | | Ubuntu-aarch64 | [mindinsight-0.6.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindInsight/ascend/ubuntu_aarch64/mindinsight-0.6.0-cp37-cp37m-linux_aarch64.whl) | 165376a2ca5574568468d745101b16a7760f9cc0aa113372b57a31a35774fae7 | -| | | EulerOS-x86 | [mindinsight-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindInsight/ascend/euleros_x86/mindinsight-0.6.0-cp37-cp37m-linux_x86_64.whl) | f02af4c6fa6ad88589ccc8c80134ad3ff9298379d3361839c1eb41350d2e12d8 | -| | | EulerOS-aarch64 | [mindinsight-0.6.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindInsight/ascend/euleros_aarch64/mindinsight-0.6.0-cp37-cp37m-linux_aarch64.whl) | dcb4560a41342fd61e29a4f6718459b247ba0e21b3e075ca4075ed4f9fec4375 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.6.0-cp37-cp37m-linux_x86_64.whl) | 6a825a529339eba95799bfaef6876ef2aedb45f3f81933f41c64e99d9af5c3fd | -| MindArmour | Ascend 910 | Ubuntu-x86
EulerOS-x86 | [mindarmour-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindArmour/x86_64/mindarmour-0.6.0-cp37-cp37m-linux_x86_64.whl) | 18f245bdff972414010c9f53de402d790cdef9a74f94ac41e5b6341e778e93b3 | -| | | Ubuntu-aarch64
EulerOS-aarch64 | [mindarmour-0.6.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindArmour/aarch64/mindarmour-0.6.0-cp37-cp37m-linux_aarch64.whl) | 8da35bbf7e909bdce7972f7cd11aa495de2c18b9334052e60609dadd82649922 | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.6.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindArmour/x86_64/mindarmour-0.6.0-cp37-cp37m-linux_x86_64.whl) | 18f245bdff972414010c9f53de402d790cdef9a74f94ac41e5b6341e778e93b3 | - -### 0.5.2-beta - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-0.5.2-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/ascend/ubuntu_x86/mindspore_ascend-0.5.2-cp37-cp37m-linux_x86_64.whl) | ec4bdb6c96d9ffd2d1e465bd07ac4a8a9c0633512b4fffe9217590ad1a576ea6 | -| | | Ubuntu-aarch64 | [mindspore_ascend-0.5.2-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-0.5.2-cp37-cp37m-linux_aarch64.whl) | 8bffe9ef96d99af7238db713cc1273a63762d95e1f2d758d53e20550e2c9b2a2 | -| | | EulerOS-x86 | [mindspore_ascend-0.5.2-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/ascend/euleros_x86/mindspore_ascend-0.5.2-cp37-cp37m-linux_x86_64.whl) | 396da09b61811ab9e5f72c6ad6d68bfd757384bb7923ac50bfed80672eafcf84 | -| | | EulerOS-aarch64 | [mindspore_ascend-0.5.2-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/ascend/euleros_aarch64/mindspore_ascend-0.5.2-cp37-cp37m-linux_aarch64.whl) | 71cb819be43d3d89cc6b5e62c4e4c988e52bcbad3b3b9e7d1ed9ecc469c7043c | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-0.5.2-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-0.5.2-cp37-cp37m-linux_x86_64.whl) | d424840777d4751cdf1a22a8e39453a96804545ebe3f0dfb67d3aabc10fa2bd2 | -| | CPU | Ubuntu-x86 | [mindspore-0.5.2-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/cpu/ubuntu_x86/mindspore-0.5.2-cp37-cp37m-linux_x86_64.whl) | ef4d85704bb2588bf3208b6d62b5282db9eb792f99e8b45f571094d2ae735213 | -| | | Windows-x64 | [mindspore-0.5.2-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.2-beta/MindSpore/cpu/windows_x64/mindspore-0.5.2-cp37-cp37m-win_amd64.whl) | 023f255a81220210679a9872261e2fe4291cdebb157029506aa6773e59e070cd | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl) | 34b3c1a5ffbf9fa5e46dc6f295abde0308b65d76fd18d4551103ca0e222e3651 | -| | | Ubuntu-aarch64 | [mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/ubuntu_aarch64/mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl) | 97f92b556f8e97e250f311f5d11caace4ac5686015b099b98462d9603e2c5724 | -| | | EulerOS-x86 | [mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/euleros_x86/mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl) | 5fab87c3dfda57851a9981c7567200f0f0d856462b8dd521402b085830e6554f | -| | | EulerOS-aarch64 | [mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/euleros_aarch64/mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl) | 7a157fb849f078fef6792353414737a8eccd98ba7a6fdd3c4ba3b497bc3f019f | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl) | 34b3c1a5ffbf9fa5e46dc6f295abde0308b65d76fd18d4551103ca0e222e3651 | -| MindArmour | Ascend 910 | Ubuntu-x86
EulerOS-x86 | [mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindArmour/x86_64/mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl) | 09aa2887b0acbe9b31d07fb8d740c0bceefd6b8751aebdddd533f752f7564efc | -| | | Ubuntu-aarch64
EulerOS-aarch64 | [mindarmour-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindArmour/aarch64/mindarmour-0.5.0-cp37-cp37m-linux_aarch64.whl) | 51d2dfd9e65d6d919da36c29fa9420b68c3fb71aa33b54ec35aa5d6bb011c1a8 | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindArmour/x86_64/mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl) | 09aa2887b0acbe9b31d07fb8d740c0bceefd6b8751aebdddd533f752f7564efc | - -### 0.5.0-beta - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/ascend/ubuntu_x86/mindspore_ascend-0.5.0-cp37-cp37m-linux_x86_64.whl) | f20adcdb696316361e13fcd624d7188598b7248f77c7efc535cf193afc26f1c2 | -| | | Ubuntu-aarch64 | [mindspore_ascend-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-0.5.0-cp37-cp37m-linux_aarch64.whl) | 6b79da1ff33bc27d92835ebc40f9238c6e05a0ebd0a3307035e726b2de0eeae6 | -| | | EulerOS-x86 | [mindspore_ascend-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/ascend/euleros_x86/mindspore_ascend-0.5.0-cp37-cp37m-linux_x86_64.whl) | 34193fbd8a1181d1420386b6fa31315ac0098243dfc8965ee26a3063fedd331d | -| | | EulerOS-aarch64 | [mindspore_ascend-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/ascend/euleros_aarch64/mindspore_ascend-0.5.0-cp37-cp37m-linux_aarch64.whl) | 9ac71a08c7da451a1d8030e14ab5b239c27b42991834e40ed68486301c5ce895 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-0.5.0-cp37-cp37m-linux_x86_64.whl) | 4afbd886c8b7f60bfe0745e74749c5409007ff36d2f65034942a6597c5b92227 | -| | CPU | Ubuntu-x86 | [mindspore-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/cpu/ubuntu_x86/mindspore-0.5.0-cp37-cp37m-linux_x86_64.whl) | eec9fe7dcee83314e8c2e24b654bdfe25f6538b5fec471460bc8fd9451ee85e6 | -| | | Windows-x64 | [mindspore-0.5.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/cpu/windows_x64/mindspore-0.5.0-cp37-cp37m-win_amd64.whl) | 86fb9a4d508dcd56776a34650dea6f98905b0d1272a89af9eb3c1b9d670d06b5 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl) | 34b3c1a5ffbf9fa5e46dc6f295abde0308b65d76fd18d4551103ca0e222e3651 | -| | | Ubuntu-aarch64 | [mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/ubuntu_aarch64/mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl) | 97f92b556f8e97e250f311f5d11caace4ac5686015b099b98462d9603e2c5724 | -| | | EulerOS-x86 | [mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/euleros_x86/mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl) | 5fab87c3dfda57851a9981c7567200f0f0d856462b8dd521402b085830e6554f | -| | | EulerOS-aarch64 | [mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/euleros_aarch64/mindinsight-0.5.0-cp37-cp37m-linux_aarch64.whl) | 7a157fb849f078fef6792353414737a8eccd98ba7a6fdd3c4ba3b497bc3f019f | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindInsight/ascend/ubuntu_x86/mindinsight-0.5.0-cp37-cp37m-linux_x86_64.whl) | 34b3c1a5ffbf9fa5e46dc6f295abde0308b65d76fd18d4551103ca0e222e3651 | -| MindArmour | Ascend 910 | Ubuntu-x86
EulerOS-x86 | [mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindArmour/x86_64/mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl) | 09aa2887b0acbe9b31d07fb8d740c0bceefd6b8751aebdddd533f752f7564efc | -| | | Ubuntu-aarch64
EulerOS-aarch64 | [mindarmour-0.5.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindArmour/aarch64/mindarmour-0.5.0-cp37-cp37m-linux_aarch64.whl) | 51d2dfd9e65d6d919da36c29fa9420b68c3fb71aa33b54ec35aa5d6bb011c1a8 | -| | GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindArmour/x86_64/mindarmour-0.5.0-cp37-cp37m-linux_x86_64.whl) | 09aa2887b0acbe9b31d07fb8d740c0bceefd6b8751aebdddd533f752f7564efc | - -### 0.3.0-alpha - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/ascend/ubuntu_x86/mindspore_ascend-0.3.0-cp37-cp37m-linux_x86_64.whl) | 7756a50ca3af82d06eaf456db4d062fa647a8352724ef85da6569426a6393918 | -| | | Ubuntu-aarch64 | [mindspore_ascend-0.3.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/ascend/ubuntu_aarch64/mindspore_ascend-0.3.0-cp37-cp37m-linux_aarch64.whl) | 4f613b1466ba3eafb160ebca2f8086e63fdaeee9c07a5458b4476da4fce8f90a | -| | | EulerOS-x86 | [mindspore_ascend-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/ascend/euleros_x86/mindspore_ascend-0.3.0-cp37-cp37m-linux_x86_64.whl) | 93867f72c801affec1da901e734a6d329c6d1ae3cdec1297870b46a277aa64b8 | -| | | EulerOS-aarch64 | [mindspore_ascend-0.3.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/ascend/euleros_aarch64/mindspore_ascend-0.3.0-cp37-cp37m-linux_aarch64.whl) | ecd7f3e049034d20f722073ecb87d5d8108cfc218d2594ec9771e83db5222cf8 | -| | GPU CUDA 9.2 | Ubuntu-x86 | [mindspore_gpu-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/gpu/ubuntu_x86/cuda-9.2/mindspore_gpu-0.3.0-cp37-cp37m-linux_x86_64.whl) | cd4890d3c24b47f48da48c8cc9efdf35e14f9b4a76ec66779bb24d601d2e0c25 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-0.3.0-cp37-cp37m-linux_x86_64.whl) | 07e7263936e1c4805fb253d596ccbeb2fccab3a48929febce85ebb7609d82c4f | -| | CPU | Ubuntu-x86 | [mindspore-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/cpu/ubuntu_x86/mindspore-0.3.0-cp37-cp37m-linux_x86_64.whl) | 38b662673af0dfc89182f5b54261aa8694b8aefdbc1e5fa2d5e06377113e8a22 | -| | | Windows-x64 | [mindspore-0.3.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/cpu/windows_x64/mindspore-0.3.0-cp37-cp37m-win_amd64.whl) | ed6b1c04d08fcfe4ac913f4593da70f78741af8e9391dce7189106b67a1393c1 | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindInsight/ascend/ubuntu_x86/mindinsight-0.3.0-cp37-cp37m-linux_x86_64.whl) | 40b0697fbafa3a08393cbeda2f6286caa299a3b758beb63c9ed68f621879ef49 | -| | | Ubuntu-aarch64 | [mindinsight-0.3.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindInsight/ascend/ubuntu_aarch64/mindinsight-0.3.0-cp37-cp37m-linux_aarch64.whl) | 0005334bf15268e499d91d0a7e1bfb5abc4b5a0e10a3c4c0798da0283b28fe23 | -| | | EulerOS-x86 | [mindinsight-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindInsight/ascend/euleros_x86/mindinsight-0.3.0-cp37-cp37m-linux_x86_64.whl) | e1ba11b37a0ce13c8f4f668a9479c0f97d922e4ce6128823e576c7d38298c86d | -| | | EulerOS-aarch64 | [mindinsight-0.3.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindInsight/ascend/euleros_aarch64/mindinsight-0.3.0-cp37-cp37m-linux_aarch64.whl) | 8d03e1f57b39268b4ba89c25ca88934b1a00304839f454d7bfd4747269abb359 | -| | GPU CUDA 9.2
GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindInsight/ascend/ubuntu_x86/mindinsight-0.3.0-cp37-cp37m-linux_x86_64.whl) | 40b0697fbafa3a08393cbeda2f6286caa299a3b758beb63c9ed68f621879ef49 | -| MindArmour | Ascend 910 | Ubuntu-x86
EulerOS-x86 | [mindarmour-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindArmour/x86_64/mindarmour-0.3.0-cp37-cp37m-linux_x86_64.whl) | 7a2bd6174be9e5a47e8ae6bcdd592ecdafc6e53e6f1cd5f0261fcb8337b5b337 | -| | | Ubuntu-aarch64
EulerOS-aarch64 | [mindarmour-0.3.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindArmour/aarch64/mindarmour-0.3.0-cp37-cp37m-linux_aarch64.whl) | 6d5f96cc004579d98664d018dca860d3b7f935df5b479f1192161f18a091d9c9 | -| | GPU CUDA 9.2
GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.3.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindArmour/x86_64/mindarmour-0.3.0-cp37-cp37m-linux_x86_64.whl) | 7a2bd6174be9e5a47e8ae6bcdd592ecdafc6e53e6f1cd5f0261fcb8337b5b337 | - -### 0.2.0-alpha - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore_ascend-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/ascend/x86_ubuntu/mindspore_ascend-0.2.0-cp37-cp37m-linux_x86_64.whl) | aa1225665d05263b17bb7ec1d51dd4f933254c818bee126b6c5dac4513532a14 | -| | | EulerOS-x86 | [mindspore_ascend-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/ascend/x86_euleros/mindspore_ascend-0.2.0-cp37-cp37m-linux_x86_64.whl) | eb9a1b2a0ba32d7f7264ae344833f90a8ba2042cddf1a6a719c1a38a7ea528ea | -| | | EulerOS-aarch64 | [mindspore_ascend-0.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/ascend/aarch64_euleros/mindspore_ascend-0.2.0-cp37-cp37m-linux_aarch64.whl) | 820fb17d63341c636018d4e930151d3d2fa7ac05d4a400286c1b1aeb4cc34c6f | -| | GPU CUDA 9.2 | Ubuntu-x86 | [mindspore_gpu-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/gpu/cuda-9.2/mindspore_gpu-0.2.0-cp37-cp37m-linux_x86_64.whl) | b933f95551afc3de38ba06502ef68a5a2a50bebadcc9b92b870f8eb44f59f10a | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore_gpu-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/gpu/cuda-10.1/mindspore_gpu-0.2.0-cp37-cp37m-linux_x86_64.whl) | e7167bad4549002f9d14b0a015abbabf56334621cf746fa60bb67df0fadb22ec | -| | CPU | Ubuntu-x86 | [mindspore-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/cpu/x86_ubuntu/mindspore-0.2.0-cp37-cp37m-linux_x86_64.whl) | d6702dce9dad94d1e08bedc43540ac21422e8c49d919f7abd0bb7a3aa804476f | -| | | Windows-x64 | [mindspore-0.2.0-cp37-cp37m-win_amd64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/cpu/x64_windows/mindspore-0.2.0-cp37-cp37m-win_amd64.whl) | 77151d20fe450df3697853a5309308ecc482870fd2984753b82d3db9d326fdec | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindInsight/x86_ubuntu/mindinsight-0.2.0-cp37-cp37m-linux_x86_64.whl) | 2334e833f322e0f38e04e65819214b7582527364c1e0aca79bd080a720932ca4 | -| | | EulerOS-x86 | [mindinsight-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindInsight/x86_euleros/mindinsight-0.2.0-cp37-cp37m-linux_x86_64.whl) | c6c3088a499967f2fe301ea910536fdf62dd4e38edb47e144726b9a4d4a17e50 | -| | | EulerOS-aarch64 | [mindinsight-0.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindInsight/aarch64_euleros/mindinsight-0.2.0-cp37-cp37m-linux_aarch64.whl) | 6e5e03b56988968ec36c556ece06d2e5aa68e80ff475374087998e0ff360a45a | -| | GPU CUDA 9.2
GPU CUDA 10.1 | Ubuntu-x86 | [mindinsight-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindInsight/x86_ubuntu/mindinsight-0.2.0-cp37-cp37m-linux_x86_64.whl) | 2334e833f322e0f38e04e65819214b7582527364c1e0aca79bd080a720932ca4 | -| MindArmour | Ascend 910 | Ubuntu-x86 | [mindarmour-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindArmour/x86_64/mindarmour-0.2.0-cp37-cp37m-linux_x86_64.whl) | 4146790bc73a5846e92b943dfd3febb6c62052b217eeb45b6c48aa82b51e7cc3 | -| | | EulerOS-x86 | [mindarmour-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindArmour/x86_64/mindarmour-0.2.0-cp37-cp37m-linux_x86_64.whl) | 4146790bc73a5846e92b943dfd3febb6c62052b217eeb45b6c48aa82b51e7cc3 | -| | | EulerOS-aarch64 | [mindarmour-0.2.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindArmour/aarch64/mindarmour-0.2.0-cp37-cp37m-linux_aarch64.whl) | 5d5e532b9c4e466d89cf503f07c2d530b42216a14f193f685b9a81e190c8db44 | -| | GPU CUDA 9.2
GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.2.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindArmour/x86_64/mindarmour-0.2.0-cp37-cp37m-linux_x86_64.whl) | 4146790bc73a5846e92b943dfd3febb6c62052b217eeb45b6c48aa82b51e7cc3 | - -### 0.1.0-alpha - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore | Ascend 910 | Ubuntu-x86 | [mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/ascend/ubuntu-x86/mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl) | a76df4e96c4cb69b10580fcde2d4ef46b5d426be6d47a3d8fd379c97c3e66638 | -| | | EulerOS-x86 | [mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/ascend/euleros-x86/mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl) | 45d4fcb37bf796b3208b7c1ca70dc0db1387a878ef27836d3d445f311c8c02e0 | -| | | EulerOS-aarch64 | [mindspore-0.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/ascend/euleros-aarch64/mindspore-0.1.0-cp37-cp37m-linux_aarch64.whl) | 7daba2d1739ce19d55695460dce5ef044b4d38baad4f5117056e5f77f49a12b4 | -| | GPU CUDA 9.2 | Ubuntu-x86 | [mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/gpu/cuda-9.2/mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl) | b6e5623135b57b8c262f3e32d97fbe1e20e8c19da185a7aba97b9dc98c7ecda1 | -| | GPU CUDA 10.1 | Ubuntu-x86 | [mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/gpu/cuda-10.1/mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl) | 43711725cf7e071ca21b5ba25e90d6955789fe3495c62217e70869f52ae20c01 | -| | CPU | Ubuntu-x86 | [mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/cpu/ubuntu-x86/mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl) | 45c473a97a6cb227e4221117bfb1b3ebe3f2eab938e0b76d5117e6c3127b8e5c | -| MindInsight | Ascend 910 | Ubuntu-x86 | [mindinsight-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindInsight/ubuntu/x86_64/mindinsight-0.1.0-cp37-cp37m-linux_x86_64.whl) | 960b6f485ce545ccce98adfb4c62cdea216c9b7851ffdc0669827c53811c3e59 | -| | | EulerOS-x86 | [mindinsight-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindInsight/euleros/x86_64/mindinsight-0.1.0-cp37-cp37m-linux_x86_64.whl) | 9f1ef04fec09e5b90be4a6223b3bf2943334746c1f5dac37207db4524b64942f | -| | | EulerOS-aarch64 | [mindinsight-0.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindInsight/euleros/aarch64/mindinsight-0.1.0-cp37-cp37m-linux_aarch64.whl) | d64207126542571057572f856010a5a8b3362ccd9e5b5c81da5b78b94face5fe | -| MindArmour | Ascend 910 | Ubuntu-x86 | [mindarmour-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindArmour/x86_64/mindarmour-0.1.0-cp37-cp37m-linux_x86_64.whl) | 7796b6c114ee4962ce605da59a9bc47390c8910acbac318ecc0598829aad6e8c | -| | | EulerOS-x86 | [mindarmour-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindArmour/x86_64/mindarmour-0.1.0-cp37-cp37m-linux_x86_64.whl) | 7796b6c114ee4962ce605da59a9bc47390c8910acbac318ecc0598829aad6e8c | -| | | EulerOS-aarch64 | [mindarmour-0.1.0-cp37-cp37m-linux_aarch64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindArmour/aarch64/mindarmour-0.1.0-cp37-cp37m-linux_aarch64.whl) | f354fcdbb3d8b4022fda5a6636e763f8091aca2167dc23e60b7f7b6d710523cb | -| | GPU CUDA 9.2
GPU CUDA 10.1
CPU | Ubuntu-x86 | [mindarmour-0.1.0-cp37-cp37m-linux_x86_64.whl](https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindArmour/x86_64/mindarmour-0.1.0-cp37-cp37m-linux_x86_64.whl) | 7796b6c114ee4962ce605da59a9bc47390c8910acbac318ecc0598829aad6e8c | diff --git a/resource/statement/en/legal_statement.md b/resource/statement/en/legal_statement.md deleted file mode 100644 index 48e4cb535e7b670654a3f8adfec3d63e64f22c37..0000000000000000000000000000000000000000 --- a/resource/statement/en/legal_statement.md +++ /dev/null @@ -1,24 +0,0 @@ -# Legal Statement - -## Rules and Conditions for Accessing the Website of MindSpore Open Source Project - -[MindSpore open source project website] is operated by MindSpore open source project. Its domain name is . The following rules apply to all visitors or users accessing this website. MindSpore open source project may, without notice to you, at any time revise these terms according to laws, regulations, and market changes. The right to access this website is authorized by MindSpore open source project according to the following terms. If you do not agree to these terms, please do not use this website. Your use of services provided by will be deemed as your acknowledgment of the whole Statement. MindSpore open source project has the right to take legal and fair remedies for violations of these conditions. - -## Disclaimer - -Although MindSpore open source project has attempted to provide accurate information on the website (), including but not limited to the texts, pictures, data, opinions, suggestions, web pages, and links, the project does not guarantee the accuracy, completeness, fullness or reliability of these materials and content, and explicitly states that it does not take responsibility for errors or omissions in these data and content, or make any express or implied warranty on these materials and information, including but not limited to guarantee concerning relevant ownerships, free from infringement upon third-party rights, quality or computer viruses. -MindSpore open source project may change the information on the website () at any time without notice. You shall periodically visit this website to obtain the latest version information. Mention of products or services that are not related to MindSpore open source project is for information purposes only and constitutes neither an endorsement nor a recommendation. MindSpore open source project does not provide any statement, guarantee or authorization for any of the products or services appearing on the website (). All products and services are governed by the sales agreement of MindSpore open source project. -Whereas some of the services provided on the website () are bulletin board system (BBS) service, the relevant information about or posted by its members on the website (including but not limited to usernames, company names, contact persons and details, related images and videos) is all provided by the members, who shall be full responsible for any information they provide. -Any entity or individual who considers the content on the website () (including but not limited to the commodity information posted by the members of the website) suspected of infringing their legal interest shall make an infringement statement in writing to MindSpore open source project, together with their identification, ownership certification, particular URL and detailed proof of infringement. MindSpore open source project will remove the content alleged to be infringing by law upon receiving the foregoing legal document. - -## Copyrights - -All materials and content on the website () are protected by law. Other than quotes from other sources, all copyrights belong to MindSpore open source project. Without a prior written permission of MindSpore open source project or other parties concerned, no person or organization shall reproduce, distribute, reprint, or play any content of this website () in any form; link to or transmit the content through hyperlinks; upload the content to other servers using the method of images; store the content in information retrieval systems; or use the content for any other commercial purposes. For non-commercial and personal use, the content of the website may be downloaded or printed on condition that the content is not modified and all rights statements are reserved. - -## Trademarks - -All logos and trademarks used on this website () belong to MindSpore open source project, except trademarks, logos and service marks of other companies with indications. No content provided on the website () shall be deemed as a grant of the approval or right to use any trademark or logo aforesaid by implication, no objection or other means without prior written consent of MindSpore open source project or such third party which may own the mark. Without prior written permission, no one may use the name, trademark or logo of MindSpore open source project in any way. - -## Third-Party Links - -This website () may contain links to third party sites. Access to any other Internet site linked to this website is at the user's own risk and MindSpore open source project is not responsible for the accuracy or reliability of any information, data, opinions, suggestions or statements made on these sites. MindSpore open source project provides these links merely as a convenience and the inclusion of such links does not imply an endorsement, recommendation, or advertisement. diff --git a/resource/statement/en/privacy_policy.md b/resource/statement/en/privacy_policy.md deleted file mode 100644 index b7bff51f103489c3effea03e68240237ecc9c7ba..0000000000000000000000000000000000000000 --- a/resource/statement/en/privacy_policy.md +++ /dev/null @@ -1,49 +0,0 @@ -# Privacy Policy - -Please carefully read and understand the Privacy Policy before using MindSpore and other services on this website. - -To ensure the proper running of MindSpore and provide better services for you, we will collect the following personal data: - -1. You can subscribe to the latest MindSpore news on this website. To fulfill the business function, you need to agree us to collect and use your personal and sensitive personal information, including your email address. - -2. To participate in community development, you need to sign the Contribution License Agreement (CLA), which contains the following personal information: - - Your name or account - - Your address - - Your email address - - Your phone number - - Your fax number - - Your ID on Gitee - -3. The community provides a mailing list for developers to communicate and collaborate with each other. When you join a mailing list, we will collect your email address. - -4. To learn about the running status of the MindSpore community, we will collect your information when you access the website, including but not limited to your IP address, browser type, used language, access date and time, software and hardware features, web page you accessed, device model, device identity code, operating system, resolution, telecom carrier, and download clicks. - -Your information is securely saved and protected. Protection methods include, but are not limited to, data encryption. - -MindSpore does not share, transfer, or disclose your information without your prior consent unless otherwise required by laws and regulations. - -According to applicable laws and regulations, you may have rights to access, modify, delete, deny, limit, or transfer the above information. To exercise your rights, contact us through the contact information provided in this document. - -The above information is stored in the People's Republic of China. - -**You must ensure that your use of MindSpore is in compliance with laws and regulations. MindSpore only provides the standard service upon your request and is not responsible for the legal compliance of your use.** - -**You understand that the preceding information is mandatory for providing services. If you do not provide or disallow us to collect and use the above information, we will not be able to provide the services.** - -**How do we use cookies?** - -To ensure the normal operation of our website, sometimes we may store small data files named cookies on computers or portable devices. A cookie is a text-only file stored on computers or mobile devices by network servers. The content of a cookie can only be retrieved or read by the server that created it. Each cookie is unique to your web browser or mobile application. Cookies usually contain identifiers, site names, and some numbers and characters. Websites can store data about user preferences and other information using cookies. - -MindSpore enables cookies for the same purposes of most website or Internet service providers, that is, to improve user experience. Specifically, the purposes are as follows: - -1. Saving settings With cookies, the website can save settings, such as search result records, to acquire returned search results and optimize browser services. - -2. Authentication When you visit this website, a unique ID is generated to identify you. If the website does not use cookies with the authentication function, it will consider its users as new visitors each time they visit the website. For example, if you log in to the website and switch to another web page, the website will no longer recognize you and you will be regarded as a new visitor. - -Other third parties, such as Google and Baidu, will set cookies on our websites. They use these cookies to receive IP addresses, information about your browser configurations, and access information, but do not collect your personal identity information. In this way, they can learn how you interact with the website. You can visit the websites of the third parties to learn how they use cookies. - -You can manage or delete cookies based on your preferences. For details, see [AboutCookies](https://www.aboutcookies.org/). You can clear all cookies stored on your computer, and most web browsers can be configured to block cookies. However, if you do so, you may have to manually change your user settings every time you visit our website. - -If you have any questions, contact us at contact@mindspore.cn. - -Last update: March, 2020 diff --git a/resource/statement/zh_cn/legal_statement.md b/resource/statement/zh_cn/legal_statement.md deleted file mode 100644 index 4e22e26da7c8059b01526b079acad091e28122ab..0000000000000000000000000000000000000000 --- a/resource/statement/zh_cn/legal_statement.md +++ /dev/null @@ -1,24 +0,0 @@ -# 法律声明 - -## 规则及MindSpore开源项目网址访问条件 - -【MindSpore开源项目网站】指由MindSpore开源项目运营的网站,域名为。以下规则适用于所有访问MindSpore开源项目网站的用户或浏览者,MindSpore开源项目保留根据国家法律法规及市场行情等变化修改这些规则的权利。访问MindSpore开源项目网站的权利由MindSpore开源项目根据下列条款授予。如果您不同意下列任何条款、请停止使用本网址。如果您使用服务,您的使用行为将被视为对本声明全部内容的认可。对于违反这些规则的行为,MindSpore开源项目有权采取法律和公平的补救措施。 - -## 不承诺责任声明 - -MindSpore开源项目网站所载的材料和信息,包括但不限于文本、图片、数据、观点、建议、网页或链接,MindSpore开源项目力图在网站上提供准确的材料和信息,但MindSpore开源项目并不保证这些材料和内容的准确、完整、充分和可靠性,并且明确声明不对这些材料和内容的错误或遗漏承担责任,也不对这些材料和内容作出任何明示或默示的、包括但不限于有关所有权担保、没有侵犯第三方权利、质量和没有计算机病毒的保证。 -MindSpore开源项目可以在没有任何通知或提示的情况下随时对MindSpore开源项目服务网站上的内容进行修改,为了得到最新版本的信息,请您定时访问MindSpore开源项目网站。MindSpore开源项目在MindSpore开源项目网站上所提及的非MindSpore开源项目产品或服务仅仅是为了提供相关信息,并不构成对这些产品、服务的认可或推荐。MindSpore开源项目并不就网址上提供的任何产品、服务或信息做出任何声明、保证或认可,所有销售的产品和服务应受MindSpore开源项目的销售合同和条款的约束。 -鉴于提供的部分服务属于电子公告牌(BBS)服务,上关于其会员或其会员发布的相关信息(包括但不限于用户名称、公司名称、 联系人及联络信息,相关图片、视讯等)的信息均是由会员自行提供,会员依法应对其提供的任何信息承担全部责任。 -任何企业或个人认为网页内容(包括但不限于会员发布的商品信息)可能涉嫌侵犯其合法权益,应该及时向MindSpore开源项目提出书面权利通知,并提供身份证明、权属证明、具体链接(URL)及详细侵权情况证明。MindSpore开源项目在收到上述法律文件后,将会依法尽快移除相关涉嫌侵权的内容。 - -## 著作权说明 - -MindSpore开源项目网站所载的所有材料或内容受版权法的保护,所有版权拥有,但注明引用其他方的内容除外。未经MindSpore开源项目或其他方事先书面许可,任何人不得将MindSpore开源项目网站上的任何内容以任何方式进行复制、经销、翻印、传播、以超级链路连接或传送、以镜像法载入其他服务器上、存储于信息检索系统或者其他任何商业目的的使用,但对于非商业目的的、用户使用的下载或打印(条件是不得修改,且须保留该材料中的版权说明或其他所有权的说明)除外。 - -## 商标 - -MindSpore开源项目网站上使用和显示的所有商标、标志皆属MindSpore开源项目所有,但注明属于其他方拥有的商标、标志、商号除外。MindSpore开源项目网站所载的任何内容不应被视作未经MindSpore开源项目或其他方书面许可,以暗示、不反对或其他形式授予使用前述任何商标、标志的许可或权利。未经事先书面许可,任何人不得以任何方式使用MindSpore开源项目名称及MindSpore开源项目的商标、标记。 - -## 第三方链接 - -MindSpore开源项目网站可能保留有与第三方网站或网址的链接,访问这些链接将由用户自己作出决定,MindSpore开源项目并不保证这些链接上所提供的任何信息、数据、观点、图片、陈述或建议的准确性、完整性、充分性和可靠性。MindSpore开源项目提供这些链接仅仅在于提供方便,并不表示MindSpore开源项目对这些信息的认可和推荐,也不是用于宣传或广告目的。 diff --git a/resource/statement/zh_cn/privacy_policy.md b/resource/statement/zh_cn/privacy_policy.md deleted file mode 100644 index e17a9cea9e8ac3aedc2cc46321720d5e480a5069..0000000000000000000000000000000000000000 --- a/resource/statement/zh_cn/privacy_policy.md +++ /dev/null @@ -1,49 +0,0 @@ -# 个人信息保护政策 - -请您在使用本网站的各项服务以及我们的MindSpore产品前,仔细阅读并了解本《个人信息保护政策》。 - -为了MindSpore网站的健康运行,以便我们更好地为您服务,我们将收集您的以下个人数据: - -1. 您可以在本网站订阅MindSpore最新资讯。为了完成该业务功能所需,您同意授权我们收集和使用您的个人信息或个人敏感信息,包括电子邮箱地址。 - -2. 如果您参与社区开发,我们需要您签署贡献者协议(CLA), 协议会包含以下个人信息: - - 您的姓名或账户 - - 您的地址 - - 您的电子邮件地址 - - 您的电话号码 - - 您的传真号码 - - 您在码云(Gitee)的ID - -3. 社区也提供邮件列表,方便开发者相互交流和协作,当您注册邮件列表时,我们会收集您的邮件地址信息。 - -4. 为持续了解MindSpore社区的运行状况,我们将收集您浏览本网站期间包括但不限于您的IP地址、浏览器的类型、使用的语言、访问日期和时间、软硬件特征信息及您需求的网页记录、设备型号、设备识别码、操作系统、分辨率、电信运营商,下载点击等数据。 - -我们将会为您的上述信息采取相关安全保障措施,包括但不限于加密,请您放心。 - -对于您提供的上述信息,MindSpore不进行对外共享、转让、公开披露,除非依据您的指示或法律法规的强制性要求。 - -根据适用的法律法规,您可能有访问、更改、删除、拒绝、限制或移植您上述信息的权利。对于您权利的行使,请通过文末的方式联系我们。 - -上述所有信息均存储于中华人民共和国境内。 - -**您应确保,您使用本产品(或服务)符合法律法规的要求。我们仅按照您的指示,为您提供标准MindSpore产品(或服务),并不对您使用本产品(或服务)的合法合规性负责。** - -**您理解上述信息均是为您提供订阅服务之必需。如您选择不提供或不同意我们收集、使用以上这些信息,将导致您无法正常使用我们的产品(或服务),我们将无法为您订阅服务。** - -**我们如何使用Cookies** - -为确保网站正常运转,我们有时会在计算机或移动设备上存储 Cookie 的小数据文件。Cookie 是一种网络服务器存储在计算机或移动设备上的纯文本文件。Cookie 的内容只能由创建它的服务器检索或读取。每个 Cookie 对您的网络浏览器或移动应用程序都是唯一的。Cookie 通常包含标识符、站点名称以及一些号码和字符。借助于 Cookie,网站能够存储用户偏好等数据。 - -我们启用Cookie的目的与大多数网站或互联网服务提供商启用 Cookie 的目的一样,即改善用户体验。具体而言,有以下目的: - -(1)保存设置。借助于 Cookie,网站能够保存设置,例如搜索结果记录,便于返回搜索结果,从而优化浏览器服务。 - -(2)认证。当您访问本网站时,我们会为您生成一个单独的ID,以标识您的身份。如果网站不使用具有该功能的 Cookie,那么在用户每一次打开网页时,该网站都会将其视为新访客。例如,如果您登录本网站后转到另一个网页,本网站就不会识别出您,而您会被视为新访客。 - -其它第三方会将Cookie设置在我们网站上,例如谷歌、百度。它们使用Cookie接收IP地址、您浏览器配置相关的信息和访问信息,但不收集您的个人身份信息。它们使用Cookie以了解您和网站的互动情况。您可以通过访问它们的网站以了解这些第三方如何适用Cookie。 - -您可根据自己的偏好管理或删除 Cookie。有关详情,请参见[AboutCookies](https://www.aboutcookies.org/)。您可以清除计算机上保存的所有 Cookie,大部分网络浏览器都设有阻止 Cookie 的功能。但如果您这么做,则需要在每一次访问我们的网站时亲自更改用户设置。 - -如您有任何疑问,可通过contact@mindspore.cn联系我们。 - -最近更新时间:2020年3月 diff --git a/tools/link_detection/README_CN.md b/tools/link_detection/README_CN.md deleted file mode 100644 index c2be9e6e409f7926daaf6e5034c5525da6b120c1..0000000000000000000000000000000000000000 --- a/tools/link_detection/README_CN.md +++ /dev/null @@ -1,34 +0,0 @@ -# 链接检查工具 - -## 简介 - -此工具可以检查用户指定目录里所有文件的链接,将所有链接分为三类,并且将检查结果分别写入三个文件,如下所示: - -1. 响应的状态码不是200的链接,写入`400.txt`文件中。 -2. 脚本执行过程中请求出现异常的链接,写入`exception.txt`文件中。 -3. 对于安装包的链接,因为请求非常耗时,所以不发请求,直接写入`slow.txt`文件中。 - -## 使用说明 - -该工具所依赖的操作系统为Windows操作系统,执行环境为Python环境,具体使用步骤如下所示: - -1. 打开Git Bash,下载MindSpore Docs仓代码。 - - ```shell - git clone https://gitee.com/mindspore/docs.git - ``` - -2. 进入`tools/link_detection`目录,安装执行所需的第三方库。 - - ```shell - cd tools/link_detection - pip install requests - ``` - -3. 在`link_detection`目录下执行如下命令,在输入需要检测目录的绝对路径后,开始进行检测,完成后会在当前目录下新建`404.txt`、`exception.txt`、`slow.txt`三个文件。 - - ```shell - python link_detection.py - ``` - - > 检测目录的绝对路径全使用英文,并且使用Linux的绝对路径方式,例如:`/d/master/docs`。 diff --git a/tools/link_detection/link_detection.py b/tools/link_detection/link_detection.py deleted file mode 100644 index b2ff4f5cc344244b8cf2d2512dc6286e9881fea3..0000000000000000000000000000000000000000 --- a/tools/link_detection/link_detection.py +++ /dev/null @@ -1,101 +0,0 @@ -import subprocess -import re -import requests -import urllib3 -from concurrent.futures import ThreadPoolExecutor -from threading import Lock - -def get_all_file(check_path): - ''' - get all the files in the directory. - ''' - cmd = 'find %s -type f' %check_path - res = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE) - all_file_list = res.stdout.read().decode('utf-8').split('\n') - del all_file_list[-1] - return all_file_list - -def get_all_link(all_file_list): - ''' - get all the links in all the files. - ''' - re_rule = "(https:\/\/)([\w\-\.,@?^=%&:/~\+#]*[\w\-\@?^=%&/~\+#])?" - for i in all_file_list: - i = i.split('/', 1)[1].replace('/', ':/', 1) - try: - with open(i, 'r', encoding='utf-8') as f: - data = f.read() - link_list = [] - urls = re.findall(re_rule, data, re.S) - if urls: - for url in urls: - link_list.append(url[0]+url[1]) - if link_list: - dic[i] = link_list - except Exception: - continue - -def get_status(addr): - ''' - Request the link and write different results to different files. - ''' - try: - link_path, link_addr, file_404, file_exception, mutexA, mutexB = addr[0], addr[1], addr[2], addr[3], addr[4], addr[5] - response = requests.get(link_addr, headers=headers, verify=False, timeout=5) - print(link_addr) - print(response.status_code) - if response.status_code != 200: - mutexA.acquire() - file_404.write('链接所在路径: %s' %link_path) - file_404.write('\n') - file_404.write('链接地址:%s' %link_addr) - file_404.write('\n') - file_404.write('链接的状态码:%s' %response.status_code) - file_404.write('\n\n\n\n\n') - mutexA.release() - except Exception : - print('exception!') - mutexB.acquire() - file_exception.write('链接所在路径: %s' %link_path) - file_exception.write('\n') - file_exception.write('链接地址:%s' %link_addr) - file_exception.write('\n\n\n\n\n') - mutexB.release() - -def multi_threading(): - ''' - open multithreading to finish tasks concurrently, do not send a request to the download link, write it directly. - ''' - for i in dic: - link_list = list(set(dic[i])) - for j in link_list: - if j.endswith('.whl') or j.endswith('.gz'): - f3.write('链接所在路径: %s' %i) - f3.write('\n') - f3.write('链接地址:%s' %j) - f3.write('\n\n\n\n\n') - continue - pool.submit(get_status, (i, j, f1, f2, mutexA, mutexB)) - pool.shutdown() - f1.close() - f2.close() - f3.close() - -def main(): - all_file_list = get_all_file(check_path) - get_all_link(all_file_list) - multi_threading() - -if __name__ == '__main__': - check_path = input('请输入您要检测的绝对路径:').strip() - dic = {} - urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) - headers = { - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36'} - pool = ThreadPoolExecutor(500) - f1 = open('./404.txt', 'w', encoding='utf-8') - f2 = open('./exception.txt', 'w', encoding='utf-8') - f3 = open('./slow.txt', 'w', encoding='utf-8') - mutexA = Lock() - mutexB = Lock() - main() diff --git a/tools/pic_detection/README_CN.md b/tools/pic_detection/README_CN.md deleted file mode 100644 index a3cf658bc44bc75dede5f6d86a1f649209912092..0000000000000000000000000000000000000000 --- a/tools/pic_detection/README_CN.md +++ /dev/null @@ -1,29 +0,0 @@ -# 图片检查工具 - -## 简介 - -此工具可以检查用户指定目录里所有图片的使用情况,会检查出没有使用的图片,并且将没有使用的图片删除。 - -## 使用说明 - -该工具所依赖的操作系统为Windows操作系统,执行环境为Python环境,具体使用步骤如下所示: - -1. 打开Git Bash,下载MindSpore Docs仓代码。 - - ```shell - git clone https://gitee.com/mindspore/docs.git - ``` - -2. 进入`tools/pic_detection`目录。 - - ```shell - cd tools/pic_detection - ``` - -3. 在`pic_detection`目录下执行如下命令,在输入需要检测目录的绝对路径后,开始进行检测,最后将没有使用的图片删除。 - - ```shell - python pic_detection.py - ``` - - > 检测目录的绝对路径全使用英文,并且使用Linux的绝对路径方式,例如:`/d/master/docs`。 diff --git a/tools/pic_detection/pic_detection.py b/tools/pic_detection/pic_detection.py deleted file mode 100644 index a69cd53134a2d62380208a05284407a346d194d7..0000000000000000000000000000000000000000 --- a/tools/pic_detection/pic_detection.py +++ /dev/null @@ -1,88 +0,0 @@ -import subprocess -import os - -def get_images_dierctory(check_path): - ''' - get all images directory. - ''' - cmd = 'find %s -type d -name images' %check_path - res = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE) - dir_list = res.stdout.read().decode('utf-8').split('\n') - del dir_list[-1] - return dir_list - -def get_all_pic(dir_list): - ''' - get all the images in the images directory. - ''' - for dir in dir_list: - res = subprocess.Popen('ls %s'%dir, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE) - pic_list = res.stdout.read().decode('utf-8').split('\n') - del pic_list[-1] - for i in pic_list: - pic_all.add(i) - -def get_use_pic(check_path): - ''' - get all the useful pictures. - ''' - cmd1 = 'find %s -type f -name "*.md"' %check_path - cmd2 = 'find %s -type f -name "*.ipynb"' %check_path - cmd3 = [cmd1, cmd2] - for i in cmd3: - res = subprocess.Popen(i, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE) - file_list = res.stdout.read().decode('utf-8').split('\n') - del file_list[-1] - for j in file_list: - j = j.split('/', 1)[1].replace('/', ':/', 1) - with open(j, 'r', encoding='utf-8') as f: - data = f.read() - for k in pic_all: - if k in data: - use_pic.add(k) - -def get_use_eddx(): - ''' - get all the useful eddx files. - ''' - for i in filter_pic: - if i.endswith('eddx'): - if i.split('.')[0] in ' '.join(use_pic): - use_eddx.add(i) - -def get_useless_pic_path(check_path): - ''' - get the absolute path of all useless pictures. - ''' - for i in useless_pic: - cmd = 'find %s -type f -name %s' %(check_path,i) - res = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE) - data = res.stdout.read().decode('utf-8').split('\n') - del data[-1] - for j in data: - path.append(j) - -def del_useless_pic(): - ''' - delete all useless pictures. - ''' - for i in path: - os.system('rm -rf %s' %i) - - -if __name__ == '__main__': - check_path = input('请输入您要检测的绝对路径:').strip() - pic_all = set() - use_pic = set() - use_eddx = set() - path = [] - dir_list = get_images_dierctory(check_path) - get_all_pic(dir_list) - get_use_pic(check_path) - filter_pic = pic_all.difference(use_pic) - get_use_eddx() - useless_pic = filter_pic.difference(use_eddx) - get_useless_pic_path(check_path) - print('没有用的照片:', path) - del_useless_pic() - print('删除成功') diff --git a/tutorials/Makefile b/tutorials/Makefile deleted file mode 100644 index 1eff8952707bdfa503c8d60c1e9a903053170ba2..0000000000000000000000000000000000000000 --- a/tutorials/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = source_zh_cn -BUILDDIR = build_zh_cn - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/tutorials/inference/Makefile b/tutorials/inference/Makefile deleted file mode 100644 index 1eff8952707bdfa503c8d60c1e9a903053170ba2..0000000000000000000000000000000000000000 --- a/tutorials/inference/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = source_zh_cn -BUILDDIR = build_zh_cn - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/tutorials/inference/requirements.txt b/tutorials/inference/requirements.txt deleted file mode 100644 index 13396200381cc893c7a1ee023cbc0341eeea9f87..0000000000000000000000000000000000000000 --- a/tutorials/inference/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -sphinx >= 2.2.1, <= 2.4.4 -recommonmark -sphinx-markdown-tables -sphinx_rtd_theme -jieba diff --git a/tutorials/inference/source_en/_static/logo_notebook.png b/tutorials/inference/source_en/_static/logo_notebook.png deleted file mode 100644 index f28598315f19f4be76a73ddf5dc6bbdbe4db35fd..0000000000000000000000000000000000000000 Binary files a/tutorials/inference/source_en/_static/logo_notebook.png and /dev/null differ diff --git a/tutorials/inference/source_en/_static/logo_source.png b/tutorials/inference/source_en/_static/logo_source.png deleted file mode 100644 index 9932d67ab50871edb0c95979c4e948c812c7cdea..0000000000000000000000000000000000000000 Binary files a/tutorials/inference/source_en/_static/logo_source.png and /dev/null differ diff --git a/tutorials/inference/source_en/conf.py b/tutorials/inference/source_en/conf.py deleted file mode 100644 index 0a00ad8da18607c9f0ac88017972211d04c763c0..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/conf.py +++ /dev/null @@ -1,59 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys - - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'recommonmark', - 'sphinx_markdown_tables', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_static_path = ['_static'] \ No newline at end of file diff --git a/tutorials/inference/source_en/images/distributed_servable.png b/tutorials/inference/source_en/images/distributed_servable.png deleted file mode 100644 index e6c76259c4e6cfe60a58a06173984ea322db2813..0000000000000000000000000000000000000000 Binary files a/tutorials/inference/source_en/images/distributed_servable.png and /dev/null differ diff --git a/tutorials/inference/source_en/images/matmul_without_batch.png b/tutorials/inference/source_en/images/matmul_without_batch.png deleted file mode 100644 index 4d5873d13e4ebc13f47144433469ac20c33dee8a..0000000000000000000000000000000000000000 Binary files a/tutorials/inference/source_en/images/matmul_without_batch.png and /dev/null differ diff --git a/tutorials/inference/source_en/images/resnet_example.png b/tutorials/inference/source_en/images/resnet_example.png deleted file mode 100644 index f747ae6c48c406586a418ef890f81517ee5849b2..0000000000000000000000000000000000000000 Binary files a/tutorials/inference/source_en/images/resnet_example.png and /dev/null differ diff --git a/tutorials/inference/source_en/images/resnet_with_batch.png b/tutorials/inference/source_en/images/resnet_with_batch.png deleted file mode 100644 index e9b1b11093835a8780c7e8c758f6fb767a3baacd..0000000000000000000000000000000000000000 Binary files a/tutorials/inference/source_en/images/resnet_with_batch.png and /dev/null differ diff --git a/tutorials/inference/source_en/index.rst b/tutorials/inference/source_en/index.rst deleted file mode 100644 index a2a499e8f3c96ac7eabaeda4d34d1bea516749a3..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/index.rst +++ /dev/null @@ -1,37 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 09:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Inference Using MindSpore -================================= - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Inference Model - - multi_platform_inference - multi_platform_inference_ascend_910 - multi_platform_inference_ascend_310 - multi_platform_inference_gpu - multi_platform_inference_cpu - On-Device Inference - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Inference Service - - serving_example - serving_distributed_example - serving_grpc - serving_restful - serving_model - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Application Practice - - nlp_tprr \ No newline at end of file diff --git a/tutorials/inference/source_en/multi_platform_inference.md b/tutorials/inference/source_en/multi_platform_inference.md deleted file mode 100644 index b5e8a82269f9dbdf48d880e7a32b0b3c90abed27..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/multi_platform_inference.md +++ /dev/null @@ -1,123 +0,0 @@ -# Inference Model Overview - -`Linux` `Ascend` `GPU` `CPU` `Inference Application` `Beginner` `Intermediate` `Expert` - - - -- [Inference Model Overview](#inference-model-overview) - - [Model Files](#model-files) - - [Inference Execution](#inference-execution) - - [Introduction to MindIR](#introduction-to-mindir) - - [Networks Supported by MindIR](#networks-supported-by-mindir) - - - - - -MindSpore can execute inference tasks on different hardware platforms based on trained models. - -## Model Files - -MindSpore can save two types of data: training parameters and network models that contain parameter information. - -- Training parameters are stored in the checkpoint format. -- Network models are stored in the MindIR, AIR, or ONNX format. - -Basic concepts and application scenarios of these formats are as follows: - -- Checkpoint - - Checkpoint uses the Protocol Buffers format and stores all network parameter values. - - It is generally used to resume training after a training task is interrupted or executes a fine-tune task after training. -- MindSpore IR (MindIR) - - MindIR is a graph-based function-like IR of MindSpore and defines scalable graph structures and operator IRs. - - It eliminates model differences between different backends and is generally used to perform inference tasks across hardware platforms. -- Open Neural Network Exchange (ONNX) - - ONNX is an open format built to represent machine learning models. - - It is generally used to transfer models between different frameworks or used on the inference engine ([TensorRT](https://docs.nvidia.com/deeplearning/tensorrt/api/python_api/index.html)). -- Ascend Intermediate Representation (AIR) - - AIR is an open file format defined by Huawei for machine learning. - - It adapts to Huawei AI processors well and is generally used to execute inference tasks on Ascend 310. - -## Inference Execution - -Inference can be classified into the following two modes based on the application environment: - -1. Local inference - - Load a checkpoint file generated during network training and call the `model.predict` API for inference and validation. For details, see [Inference Using a Checkpoint File with Single Device](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference_ascend_910.html#checkpoint). - -2. Cross-platform inference - - Use a network definition and a checkpoint file, call the `export` API to export a model file, and perform inference on different platforms. Currently, MindIR, ONNX, and AIR (on only Ascend AI Processors) models can be exported. For details, see [Saving Models](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html). - -## Introduction to MindIR - -MindSpore defines logical network structures and operator attributes through a unified IR, and decouples model files in MindIR format from hardware platforms to implement one-time training and multiple-time deployment. - -1. Overview - - As a unified model file of MindSpore, MindIR stores network structures and weight parameter values. In addition, it can be deployed on the on-cloud Serving and the on-device Lite platforms to execute inference tasks. - - A MindIR file supports the deployment of multiple hardware forms. - - - On-cloud deployment and inference on Serving: After MindSpore trains and generates a MindIR model file, the file can be directly sent to MindSpore Serving for loading and inference. No additional model conversion is required. This ensures that models on different hardware such as Ascend, GPU, and CPU are unified. - - On-device inference and deployment on Lite: MindIR can be directly used for Lite deployment. In addition, to meet the lightweight requirements on devices, the model miniaturization and conversion functions are provided. An original MindIR model file can be converted from the Protocol Buffers format to the FlatBuffers format for storage, and the network structure is lightweight to better meet the performance and memory requirements on devices. - -2. Application Scenarios - - Use a network definition and a checkpoint file to export a MindIR model file, and then execute inference based on different requirements, for example, [Inference Using the MindIR Model on Ascend 310 AI Processors](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference_ascend_310_mindir.html), [MindSpore Serving-based Inference Service Deployment](https://www.mindspore.cn/tutorial/inference/en/master/serving_example.html), and [Inference on Devices](https://www.mindspore.cn/lite/docs/en?master). - -### Networks Supported by MindIR - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
AlexNetBERTBGCF
CenterFaceCNN&CTCDeepLabV3
DenseNet121Faster R-CNNGAT
GCNGoogLeNetLeNet
Mask R-CNNMASSMobileNetV2
NCFPSENetResNet
ResNeXtInceptionV3SqueezeNet
SSDTransformerTinyBert
UNet2DVGG16Wide&Deep
YOLOv3YOLOv4
diff --git a/tutorials/inference/source_en/multi_platform_inference_ascend_310.rst b/tutorials/inference/source_en/multi_platform_inference_ascend_310.rst deleted file mode 100644 index d7889e109f223b4335846ea5489339a4cde58a7b..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/multi_platform_inference_ascend_310.rst +++ /dev/null @@ -1,14 +0,0 @@ -Inference on Ascend 310 -=============================== - -Ascend 310 is a high-efficiency and highly integrated AI processor for edge scenes. It supports to perform inference on MindIR format and AIR format models. - -MindIR format can be exported by MindSpore CPU, GPU, Ascend 910, and can be run on GPU, Ascend 910, Ascend 310. There is no need to manually perform model conversion before inference. MindSpore needs to be installed during inference, and MindSpore C++ API is called for inference. - -AIR format can only be exported by MindSpore Ascend 910 and only Ascend 310 can infer. Before inference, the atc tool in Ascend CANN needs to be used for model conversion. MindSpore is not required for inference, only Ascend CANN software package is required. - -.. toctree:: - :maxdepth: 1 - - multi_platform_inference_ascend_310_mindir - multi_platform_inference_ascend_310_air diff --git a/tutorials/inference/source_en/multi_platform_inference_ascend_310_air.md b/tutorials/inference/source_en/multi_platform_inference_ascend_310_air.md deleted file mode 100644 index 21e05103e2b0ff7067320a944be6c7628643f2c8..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/multi_platform_inference_ascend_310_air.md +++ /dev/null @@ -1,256 +0,0 @@ -# Inference on the Ascend 310 AI Processor - -`Linux` `Ascend` `Inference Application` `Beginner` `Intermediate` `Expert` - - - -- [Inference on the Ascend 310 AI Processor](#inference-on-the-ascend-310-ai-processor) - - [Overview](#overview) - - [Preparing the Development Environment](#preparing-the-development-environment) - - [Hardware Preparation](#hardware-preparation) - - [Software Package Preparation](#software-package-preparation) - - [Preparing the SD Card](#preparing-the-sd-card) - - [Connecting the Atlas 200 DK to the Ubuntu Server](#connecting-the-atlas-200-dk-to-the-ubuntu-server) - - [Configuring the Python Environment](#configuring-the-python-environment) - - [Installing the Development Kit](#installing-the-development-kit) - - [Inference Directory Structure](#inference-directory-structure) - - [Exporting the AIR Model](#exporting-the-air-model) - - [Converting the AIR Model File into an OM Model](#converting-the-air-model-file-into-an-om-model) - - [Building Inference Code](#building-inference-code) - - [Performing Inference and Viewing the Result](#performing-inference-and-viewing-the-result) - - - - - -## Overview - -Ascend 310 is a highly efficient and integrated AI processor oriented to edge scenarios. The Atlas 200 Developer Kit (Atlas 200 DK) is a developer board that uses the Atlas 200 AI accelerator module. Integrated with the HiSilicon Ascend 310 AI processor, the Atlas 200 allows data analysis, inference, and computing for various data such as images and videos, and can be widely used in scenarios such as intelligent surveillance, robots, drones, and video servers. - -This tutorial describes how to use MindSpore to perform inference on the Atlas 200 DK based on the AIR model file. The process is as follows: - -1. Prepare the development environment, including creating an SD card for the Atlas 200 DK, configuring the Python environment, and updating the development software package. - -2. Export the AIR model file. The ResNet-50 model is used as an example. - -3. Use the ATC tool to convert the AIR model file into an OM model. - -4. Build the inference code to generate an executable `main` file. - -5. Load the saved OM model, perform inference, and view the result. - -> You can obtain the complete executable sample code at . - -## Preparing the Development Environment - -### Hardware Preparation - -- A server or PC with the Ubuntu OS is used to prepare a bootable SD card for the Atlas 200 DK and deploy the development environment. -- An SD card with a capacity of at least 16 GB. - -### Software Package Preparation - -The following five types of scripts and software packages are required for configuring the development environment: - -1. Entry script for SD card preparation: [make_sd_card.py](https://gitee.com/ascend/tools/blob/master/makesd/for_1.0.9.alpha/make_sd_card.py) - -2. Script for preparing a bootable SD card: [make_ubuntu_sd.sh](https://gitee.com/ascend/tools/blob/master/makesd/for_1.0.9.alpha/make_ubuntu_sd.sh) - -3. Ubuntu OS image package: [ubuntu-18.04.xx-server-arm64.iso](http://cdimage.ubuntu.com/ubuntu/releases/18.04/release/ubuntu-18.04.5-server-arm64.iso) - -4. Driver package and running package of Atlas 200 DK: - - - `Ascend310-driver-*{software version}*-ubuntu18.04.aarch64-minirc.tar.gz` - - - `Ascend310-aicpu_kernels-*{software version}*-minirc.tar.gz` - - - `Ascend-acllib-*{software version}*-ubuntu18.04.aarch64-minirc.run` - -5. Package for installing the development kit: `Ascend-Toolkit-*{version}*-arm64-linux_gcc7.3.0.run` - -In the preceding information: - -- For details about the first three items, see [Creating an SD Card with a Card Reader](https://support.huaweicloud.com/intl/en-us//usermanual-A200dk_3000/atlas200dk_02_0011.html). -- You are advised to obtain other software packages from [Firmware and Driver](https://ascend.huawei.com/en/#/hardware/firmware-drivers). On this page, select `Atlas 200 DK` from the product series and product model and select the required files to download. - -### Preparing the SD Card - -A card reader is connected to the Ubuntu server through a USB port, and the SD card is prepared using the script for SD card preparation. For details, see [Procedure](https://support.huaweicloud.com/intl/en-us/usermanual-A200dk_3000/atlas200dk_02_0011.html#section2). - -### Connecting the Atlas 200 DK to the Ubuntu Server - -The Atlas 200 DK can be connected to the Ubuntu server through a USB port or network cable. For details, see [Connecting the Atlas 200 DK to the Ubuntu Server](https://support.huaweicloud.com/intl/en-us/usermanual-A200dk_3000/atlas200dk_02_0013.html). - -### Configuring the Python Environment - -Install Python and GCC software. For details, see [Installing Dependencies](https://support.huaweicloud.com/intl/en-us/usermanual-A200dk_3000/atlas200dk_02_0016.html#section4). - -### Installing the Development Kit - -Install the development kit software package `Ascend-Toolkit-*{version}*-arm64-linux_gcc7.3.0.run`. For details, see [Installing the Development Kit](https://support.huaweicloud.com/intl/en-us/usermanual-A200dk_3000/atlas200dk_02_0017.html). - -## Inference Directory Structure - -Create a directory to store the inference code project, for example, `/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/acl_resnet50_sample`. The `inc`, `src`, and `test_data` [sample code](https://gitee.com/mindspore/docs/tree/master/tutorials/tutorial_code/acl_resnet50_sample) can be obtained from the official website, and the `model` directory stores the exported `AIR` model file and the converted `OM` model file. The `out` directory stores the executable file generated after building and the output result directory. The directory structure of the inference code project is as follows: - -```text -└─acl_resnet50_sample - ├── inc - │ ├── model_process.h // Header file that declares functions related to resource initialization/destruction - │ ├── sample_process.h // Header file that declares functions related to model processing - │ ├── utils.h // Header file that declares common functions (such as the file reading function) - ├── model - │ ├── resnet50_export.air // AIR model file - │ ├── resnet50_export.om // Converted OM model file - ├── src - │ ├── acl.json // Configuration file for system initialization - │ ├── CMakeLists.txt // Build script - │ ├── main.cpp // /Main function, which is the implementation file of image classification - │ ├── model_process.cpp // Implementation file of model processing functions - │ ├── sample_process.cpp // Implementation file of functions related to resource initialization and destruction - │ ├── utils.cpp // Implementation file of common functions (such as the file reading function) - ├── test_data - │ ├── test_data_1x3x224x224_1.bin // Input sample data 1 - │ ├── test_data_1x3x224x224_2.bin // input sample data 2 - ├── out - │ ├── main // Executable file generated during building - │ ├── result // Directory for storing the output result -``` - -> The output result directory `acl_resnet50_sample/out/result` must be created before inference. - -## Exporting the AIR Model - -Train the target network on the Ascend 910 AI Processor, save it as a checkpoint file, and export the model file in AIR format through the network and checkpoint file. For details about the export process, see [Export AIR Model](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html#export-air-model). - -> The [resnet50_export.air](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com:443/sample_resources/acl_resnet50_sample/resnet50_export.air) is a sample AIR file exported using the ResNet-50 model. - -## Converting the AIR Model File into an OM Model - -Log in to the Atlas 200 DK environment, create the `model` directory for storing the AIR file `resnet50_export.air`, for example, `/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/acl_resnet50_sample/model`, go to the directory, and set the following environment variables where `install_path` specifies the actual installation path: - -```bash -export install_path=/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1 -export PATH=/usr/local/python3.7.5/bin:${install_path}/atc/ccec_compiler/bin:${install_path}/atc/bin:$PATH -export PYTHONPATH=${install_path}/atc/python/site-packages/te:${install_path}/atc/python/site-packages/topi:$PYTHONPATH -export LD_LIBRARY_PATH=${install_path}/atc/lib64:$LD_LIBRARY_PATH -export ASCEND_OPP_PATH=${install_path}/opp -``` - -Take `resnet50_export.air` as an example. Run the following command to convert the model and generate the `resnet50_export.om` file in the current directory. - -```bash -/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/atc/bin/atc --framework=1 --model=./resnet50_export.air --output=./resnet50_export --input_format=NCHW --soc_version=Ascend310 -``` - -In the preceding information: - -- `--model`: path of the original model file -- `--output`: path of the converted OM model file -- `--input_format`: input image format - -For detailed information about ATC tools, please select the corresponding CANN version in the [Developer Documentation(Community Edition)](https://ascend.huawei.com/en/#/document?tag=developer), and then search for the chapter of "ATC Tool Instructions". - -## Building Inference Code - -Go to the project directory `acl_resnet50_sample` and set the following environment variables: - -```bash -export DDK_PATH=/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1 -export NPU_HOST_LIB=/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/lib64/stub/ -``` - -> The `include` directory of the `acllib` package in the `CMakeLists.txt` file must be correctly specified. Otherwise, an error indicating that `acl/acl.h` cannot be found is reported. The code location of the `include` directory is as follows. If the location is inconsistent with the actual installation directory, modify it. - -```text -... -#Header path - - include_directories( - - ${INC_PATH}/acllib_linux.arm64/include/ - - ../ - - ) -... -``` - -Run the following command to create a build directory: - -```bash -mkdir -p build/intermediates/minirc -``` - -Run the following command to switch to the build directory: - -```bash -cd build/intermediates/minirc -``` - -Run the `cmake` command: - -```bash -cmake ../../../src -DCMAKE_CXX_COMPILER=aarch64-linux-gnu-g++ -DCMAKE_SKIP_RPATH=TRUE -``` - -Run the `make` command for building: - -```bash -make -``` - -After building, the executable `main` file is generated in `acl_resnet50_sample/out`. - -## Performing Inference and Viewing the Result - -Copy the generated OM model file `resnet50_export.om` to the `acl_resnet50_sample/out` directory (the same path as the executable `main` file) and ensure that the input data sample is ready in the `acl_resnet50_sample/test_data` directory. Then, you can perform inference. - -Note that the following environment variables must be set. Otherwise, the inference fails. - -```bash -export LD_LIBRARY_PATH=/home/HwHiAiUser/Ascend/acllib/lib64/ -``` - -Go to the `acl_resnet50_sample/out` directory. If the `result` directory does not exist in the current directory, run the `mkdir result` command to create one and run the following command to perform inference: - -```bash -./main ./resnet50_export.om ../test_data -``` - -After the execution is successful, the following inference result is displayed. The `top5` probability label is displayed, and the output result is saved in the `.bin` file format in the `acl_resnet50_sample/out/result` directory. - -```text -[INFO] acl init success -[INFO] open device 0 success -[INFO] create context success -[INFO] create stream success -[INFO] get run mode success -[INFO] load model ./resnet50_export.om success -[INFO] create model description success -[INFO] create model output success -[INFO] start to process file:../test_data/test_data_1x3x224x224_1.bin -[INFO] model execute success -[INFO] top 1: index[2] value[0.941406] -[INFO] top 2: index[3] value[0.291992] -[INFO] top 3: index[1] value[0.067139] -[INFO] top 4: index[0] value[0.013519] -[INFO] top 5: index[4] value[-0.226685] -[INFO] output data success -[INFO] dump data success -[INFO] start to process file:../test_data/test_data_1x3x224x224_2.bin -[INFO] model execute success -[INFO] top 1: index[2] value[0.946289] -[INFO] top 2: index[3] value[0.296143] -[INFO] top 3: index[1] value[0.072083] -[INFO] top 4: index[0] value[0.014549] -[INFO] top 5: index[4] value[-0.225098] -[INFO] output data success -[INFO] dump data success -[INFO] unload model success, modelId is 1 -[INFO] execute sample success -[INFO] end to destroy stream -[INFO] end to destroy context -[INFO] end to reset device is 0 -[INFO] end to finalize acl -``` diff --git a/tutorials/inference/source_en/multi_platform_inference_ascend_310_mindir.md b/tutorials/inference/source_en/multi_platform_inference_ascend_310_mindir.md deleted file mode 100644 index 1ca30bb2fb694610364aa6e9b9c84238a7e15bc4..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/multi_platform_inference_ascend_310_mindir.md +++ /dev/null @@ -1,335 +0,0 @@ -# Inference Using the MindIR Model on Ascend 310 AI Processors - -`Linux` `Ascend` `Inference Application` `Beginner` `Intermediate` `Expert` - - - -- [Inference Using the MindIR Model on Ascend 310 AI Processors](#inference-using-the-mindir-model-on-ascend-310-ai-processors) - - [Overview](#overview) - - [Preparing the Development Environment](#preparing-the-development-environment) - - [Exporting the MindIR Model](#exporting-the-mindir-model) - - [Inference Directory Structure](#inference-directory-structure) - - [Inference Code](#inference-code) - - [Introduce to Building Script](#introduce-to-building-script) - - [Building Inference Code](#building-inference-code) - - [Performing Inference and Viewing the Result](#performing-inference-and-viewing-the-result) - - - - - -## Overview - -Ascend 310 is a highly efficient and integrated AI processor oriented to edge scenarios. The Atlas 200 Developer Kit (Atlas 200 DK) is a developer board that uses the Atlas 200 AI accelerator module. Integrated with the HiSilicon Ascend 310 AI processor, the Atlas 200 allows data analysis, inference, and computing for various data such as images and videos, and can be widely used in scenarios such as intelligent surveillance, robots, drones, and video servers. - -This tutorial describes how to use MindSpore to perform inference on the Atlas 200 DK based on the MindIR model file. The process is as follows: - -1. Prepare the development environment, including creating an SD card for the Atlas 200 DK, configuring the Python environment, and updating the development software package. - -2. Export the MindIR model file. The ResNet-50 model is used as an example. - -3. Build the inference code to generate an executable `main` file. - -4. Load the saved MindIR model, perform inference, and view the result. - -> You can obtain the complete executable sample code at . - -## Preparing the Development Environment - -Refer to [Inference on the Ascend 310 AI Processor](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference_ascend_310_air.html#preparing-the-development-environment) to install the device, and then refer to [Installation Guide](https://www.mindspore.cn/install/en) to install MindSpore. - -## Exporting the MindIR Model - -Train the target network on the Ascend 910 AI Processor, save it as a checkpoint file, and export the model file in MindIR format through the network and checkpoint file. For details about the export process, see [Export MindIR Model](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html#export-mindir-model). - -> The [resnet50_imagenet.mindir](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/sample_resources/ascend310_resnet50_preprocess_sample/resnet50_imagenet.mindir) is a sample MindIR file exported using the ResNet-50 model, whose BatchSize is 1. - -## Inference Directory Structure - -Create a directory to store the inference code project, for example, `/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_resnet50_preprocess_sample`. The directory code can be obtained from the [official website](https://gitee.com/mindspore/docs/tree/master/tutorials/tutorial_code/ascend310_resnet50_preprocess_sample). The `model` directory stores the exported `MindIR` model files and the `test_data` directory stores the images to be classified. The directory structure of the inference code project is as follows: - -```text -└─ascend310_resnet50_preprocess_sample - ├── CMakeLists.txt // Build script - ├── README.md // Usage description - ├── main.cc // Main function - ├── model - │ └── resnet50_imagenet.mindir // MindIR model file - └── test_data - ├── ILSVRC2012_val_00002138.JPEG // Input sample image 1 - ├── ILSVRC2012_val_00003014.JPEG // Input sample image 2 - ├── ... // Input sample image n -``` - -## Inference Code - -### Data-preprocessing by CPU operators - -Inference sample code: . - -Using namespace of `mindspore` and `mindspore::dataset`. - -```c++ -namespace ms = mindspore; -namespace ds = mindspore::dataset; -``` - -Set global context, device target is `Ascend 310` and device id is `0`: - -```c++ -auto context = std::make_shared(); -auto ascend310_info = std::make_shared(); -ascend310_info->SetDeviceID(0); -context->MutableDeviceInfo().push_back(ascend310_info); -``` - -Load mindir file: - -```c++ -// Load MindIR model -ms::Graph graph; -ms::Status ret = ms::Serialization::Load(resnet_file, ms::ModelType::kMindIR, &graph); -// Build model with graph object -ms::Model resnet50; -ret = resnet50.Build(ms::GraphCell(graph), context); -``` - -Get informance of this model: - -```c++ -std::vector model_inputs = resnet50.GetInputs(); -``` - -Load image file: - -```c++ -// Readfile is a function to read images -ms::MSTensor ReadFile(const std::string &file); -auto image = ReadFile(image_file); -``` - -Image preprocess(CPU operators): - -```c++ -// Create the CPU operator provided by MindData to get the function object - -// Decode the input to RGB format -std::shared_ptr decode(new ds::vision::Decode()); -// Resize the image to the given size -std::shared_ptr resize(new ds::vision::Resize({256})); -// Normalize the input -std::shared_ptr normalize(new ds::vision::Normalize( - {0.485 * 255, 0.456 * 255, 0.406 * 255}, {0.229 * 255, 0.224 * 255, 0.225 * 255})); -// Crop the input image at the center -std::shared_ptr center_crop(new ds::vision::CenterCrop({224, 224})); -// shape (H, W, C) to shape (C, H, W) -std::shared_ptr hwc2chw(new ds::vision::HWC2CHW()); - -// // Define a MindData preprocessor -ds::Execute preprocessor({decode, resize, normalize, center_crop, hwc2chw}); - -// Call the function object to get the processed image -ret = preprocessor(image, &image); -``` - -Execute the model: - -```c++ -// Create outputs vector -std::vector outputs; -// Create inputs vector -std::vector inputs; -inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), - image.Data().get(), image.DataSize()); -// Call the Predict function of Model for inference -ret = resnet50.Predict(inputs, &outputs); -``` - -Print the result: - -```c++ -// Output the maximum probability to the screen -std::cout << "Image: " << image_file << " infer result: " << GetMax(outputs[0]) << std::endl; -``` - -### Data pre-processing by Ascend 310 operators - -Dvpp module is a hardware decoder embedded in Ascend 310 AI chip which has a better performance on image processing compare with CPU operators. Several transforms applied on JPEG format image are supported. - -Using namespace of `mindspore` and `mindspore::dataset`. - -```c++ -namespace ms = mindspore; -namespace ds = mindspore::dataset; -``` - -Set global context, device target is `Ascend 310` and device id is `0`: - -```c++ -auto context = std::make_shared(); -auto ascend310_info = std::make_shared(); -ascend310_info->SetDeviceID(0); -context->MutableDeviceInfo().push_back(ascend310_info); -``` - -Load image file: - -```c++ -// Readfile is a function to read images -ms::MSTensor ReadFile(const std::string &file); -auto image = ReadFile(image_file); -``` - -Image preprocess(Ascend 310 operators): - -```c++ -// Create the CPU operator provided by MindData to get the function object - -// Decode the input to YUV420 format -std::shared_ptr decode(new ds::vision::Decode()); -// Resize the image to the given size -std::shared_ptr resize(new ds::vision::Resize({256})); -// Normalize the input -std::shared_ptr normalize(new ds::vision::Normalize( - {0.485 * 255, 0.456 * 255, 0.406 * 255}, {0.229 * 255, 0.224 * 255, 0.225 * 255})); -// Crop the input image at the center -std::shared_ptr center_crop(new ds::vision::CenterCrop({224, 224})); -``` - -Image preprocess (Ascend 310 operators, 130% performance increasing compare to CPU operators). - -Explicitly specify the computing hardware as Ascend 310. - -```c++ -// Define a MindData preprocessor, set deviceType = kAscend310, device id = 0 -ds::Execute preprocessor({decode, resize, center_crop, normalize}, MapTargetDevice::kAscend310, 0); - -// Call the function object to get the processed image -ret = preprocessor(image, &image); -``` - -Load mindir file: Ascend 310 operators must bind with Aipp module, insert Aipp module for model graph compiling. - - ```c++ -// Load MindIR model -ms::Graph graph; -ms::Status ret = ms::Serialization::Load(resnet_file, ms::ModelType::kMindIR, &graph); -// Build model with graph object -ascend310_info->SetInsertOpConfigPath(preprocessor.AippCfgGenerator()); -ms::Model resnet50; -ret = resnet50.Build(ms::GraphCell(graph), context); - ``` - -Get input information of this model: - -```c++ -std::vector model_inputs = resnet50.GetInputs(); -``` - -Execute the model: - -```c++ -// Create outputs vector -std::vector outputs; -// Create inputs vector -std::vector inputs; -inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), - image.Data().get(), image.DataSize()); -// Call the Predict function of Model for inference -ret = resnet50.Predict(inputs, &outputs); -``` - -Print the result: - -```c++ -// Output the maximum probability to the screen -std::cout << "Image: " << image_file << " infer result: " << GetMax(outputs[0]) << std::endl; -``` - -## Introduce to Building Script - -The building script is used to building applications: . - -Add head files to gcc search path: - -```cmake -option(MINDSPORE_PATH "mindspore install path" "") -include_directories(${MINDSPORE_PATH}) -include_directories(${MINDSPORE_PATH}/include) -``` - -Find the shared libraries in MindSpore: - -```cmake -find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) -file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) -``` - -Use the source files to generate the target executable file, and link the MindSpore libraries for the executable file: - -```cmake -add_executable(resnet50_sample main.cc) -target_link_libraries(resnet50_sample ${MS_LIB} ${MD_LIB}) -``` - -## Building Inference Code - -Go to the project directory `ascend310_resnet50_preprocess_sample` and set the following environment variables: - -```bash -# control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. -export GLOG_v=2 - -# Conda environmental options -LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - -# lib libraries that the run package depends on -export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/acllib/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/atc/lib64:${LOCAL_ASCEND}/driver/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - -# lib libraries that the mindspore depends on, modify "pip3" according to the actual situation -export LD_LIBRARY_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore/lib"}' | xargs realpath`:${LD_LIBRARY_PATH} - -# Environment variables that must be configured -export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path -export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path -export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/atc/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path -export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # Python library that TBE implementation depends on -``` - -Run the `cmake` command, modify `pip3` according to the actual situation: - -```bash -cmake . -DMINDSPORE_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath` -``` - -Run the `make` command for building. - -```bash -make -``` - -After building, the executable `main` file is generated in `ascend310_resnet50_preprocess_sample`. - -## Performing Inference and Viewing the Result - -Log in to the Atlas 200 DK developer board, and create the `model` directory for storing the MindIR file `resnet50_imagenet.mindir`, for example, `/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_resnet50_preprocess_sample/model`. -Create the `test_data` directory to store images, for example, `/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_resnet50_preprocess_sample/test_data`. -Then, perform the inference. - -```bash -./resnet50_sample -``` - -Inference is performed on all images stored in the `test_data` directory. For example, if there are 9 images whose label is 0 in the [ImageNet2012](http://image-net.org/download-images) validation set, the inference result is as follows: - -```text -Image: ./test_data/ILSVRC2012_val_00002138.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00003014.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00006697.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00007197.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009111.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009191.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009346.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009379.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009396.JPEG infer result: 0 -``` diff --git a/tutorials/inference/source_en/multi_platform_inference_ascend_910.md b/tutorials/inference/source_en/multi_platform_inference_ascend_910.md deleted file mode 100644 index f9c0cdf494daea0a5f1a0ba19e6e259263ccb8f7..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/multi_platform_inference_ascend_910.md +++ /dev/null @@ -1,335 +0,0 @@ -# Inference on the Ascend 910 AI processor - -`Linux` `Ascend` `Inference Application` `Beginner` `Intermediate` `Expert` - - - -- [Inference on the Ascend 910 AI processor](#inference-on-the-ascend-910-ai-processor) - - [Inference Using a Checkpoint File with Single Device](#inference-using-a-checkpoint-file-with-single-device) - - [Distributed Inference With Multi Devices](#distributed-inference-with-multi-devices) - - [Use C++ Interface to Load a MindIR File for Inferencing](#use-c-interface-to-load-a-mindir-file-for-inferencing) - - [Inference Directory Structure](#inference-directory-structure) - - [Inference Code](#inference-code) - - [Introduce to Building Script](#introduce-to-building-script) - - [Building Inference Code](#building-inference-code) - - [Performing Inference and Viewing the Result](#performing-inference-and-viewing-the-result) - - - - - -## Inference Using a Checkpoint File with Single Device - -1. Use the `model.eval` interface for model validation. - - 1.1 Local Storage - - When the pre-trained models are saved in local, the steps of performing inference on validation dataset are as follows: firstly creating a model, then loading the model and parameters using `load_checkpoint` and `load_param_into_net` in `mindspore.train.serialization` module, and finally performing inference on the validation dataset once being created. The method of processing the validation dataset is the same as that of the training dataset. - - ```python - network = LeNet5(cfg.num_classes) - net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - net_opt = nn.Momentum(network.trainable_params(), cfg.lr, cfg.momentum) - model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()}) - - print("============== Starting Testing ==============") - param_dict = load_checkpoint(args.ckpt_path) - load_param_into_net(network, param_dict) - dataset = create_dataset(os.path.join(args.data_path, "test"), - cfg.batch_size, - 1) - acc = model.eval(dataset, dataset_sink_mode=args.dataset_sink_mode) - print("============== {} ==============".format(acc)) - ``` - - In the preceding information: - `model.eval` is an API for model validation. For details about the API, see . - > Inference sample code: . - - 1.2 Remote Storage - - When the pre-trained models are saved remotely, the steps of performing inference on the validation dataset are as follows: firstly determining which model to be used, then loading the model and parameters using `mindspore_hub.load`, and finally performing inference on the validation dataset once being created. The method of processing the validation dataset is the same as that of the training dataset. - - ```python - model_uid = "mindspore/ascend/0.7/googlenet_v1_cifar10" # using GoogleNet as an example. - network = mindspore_hub.load(model_uid, num_classes=10) - net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - net_opt = nn.Momentum(network.trainable_params(), cfg.lr, cfg.momentum) - model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()}) - - print("============== Starting Testing ==============") - dataset = create_dataset(os.path.join(args.data_path, "test"), - cfg.batch_size, - 1) - acc = model.eval(dataset, dataset_sink_mode=args.dataset_sink_mode) - print("============== {} ==============".format(acc)) - ``` - - In the preceding information: - - `mindpsore_hub.load` is an API for loading model parameters. Please check the details in . - -2. Use the `model.predict` API to perform inference. - - ```python - model.predict(input_data) - ``` - - In the preceding information: - `model.predict` is an API for inference. For details about the API, see . - -## Distributed Inference With Multi Devices - -Distributed inference means use multiple devices for prediction. If data parallel or integrated save is used in training, the method of distributed inference is same with the above description. It is noted that each device should load one same checkpoint file. - -This tutorial would focus on the process that the model slices are saved on each device in the distributed training process, and the model is reloaded according to the predication strategy in the inference stage. In view of the problem that there are too many parameters in the super large scale neural network model, the model can not be fully loaded into a single device for inference, so multiple devices can be used for distributed inference. - -> Distributed inference sample code: -> -> - -The process of distributed inference is as follows: - -1. Execute training, generate the checkpoint file and the model strategy file. - - > - The distributed training tutorial and sample code can be referred to the link: . - > - In the distributed Inference scenario, during the training phase, the `integrated_save` of `CheckpointConfig` interface should be set to `False`, which means that each device only saves the slice of model instead of the full model. - > - `parallel_mode` of `set_auto_parallel_context` interface should be set to `auto_parallel` or `semi_auto_parallel`. - > - In addition, you need to specify `strategy_ckpt_save_file` to indicate the path of the strategy file. - -2. Set context and infer predication strategy according to the predication data. - - ```python - context.set_auto_parallel_context(full_batch=True, parallel_mode='semi_auto_parallel', strategy_ckpt_load_file='./train_strategy.ckpt') - network = Net() - model = Model(network) - predict_data = create_predict_data() - predict_strategy = model.infer_predict_layout(predict_data) - ``` - - In the preceding information: - - - `full_batch`: whether to load the dataset in full or not. When `True`, it indicates full load, and data of each device is the same. It must be set to `True` in this scenario. - - `parallel_mode`: parallel mode, it must be `auto_parallel` or `semi_auto_parallel`. - - `strategy_ckpt_load_file`: file path of the strategy generated in the training phase, which must be set in the distributed inference scenario. - - `create_predict_data`: user-defined interface that returns predication data whose type is `Tensor`. - - `infer_predict_layout`: generates predication strategy based on predication data. - -3. Load checkpoint files, and load the corresponding model slice into each device based on the predication strategy. - - ```python - ckpt_file_list = create_ckpt_file_list() - load_distributed_checkpoint(network, ckpt_file_list, predict_strategy) - ``` - - In the preceding information: - - - `create_ckpt_file_list`:user-defined interface that returns a list of checkpoint file path in order of rank id. - - `load_distributed_checkpoint`:merges model slices, then splits it according to the predication strategy, and loads it into the network. - - > The `load_distributed_checkpoint` interface supports that predict_strategy is `None`, which is single device inference, and the process is different from distributed inference. The detailed usage can be referred to the link: - > . - -4. Execute inference. - - ```python - model.predict(predict_data) - ``` - -## Use C++ Interface to Load a MindIR File for Inferencing - -Users can create C++ applications and call MindSpore C++ interface to inference MindIR models. - -### Inference Directory Structure - -Create a directory to store the inference code project, for example, `/home/HwHiAiUser/mindspore_sample/ascend910_resnet50_preprocess_sample`. The directory code can be obtained from the [official website](https://gitee.com/mindspore/docs/tree/master/tutorials/tutorial_code/ascend910_resnet50_preprocess_sample). The `model` directory stores the exported `MindIR` model files and the `test_data` directory stores the images to be classified. The directory structure of the inference code project is as follows: - -```text -└─ascend910_resnet50_preprocess_sample - ├── CMakeLists.txt // Build script - ├── README.md // Usage description - ├── main.cc // Main function - ├── model - │ └── resnet50_imagenet.mindir // MindIR model file - └── test_data - ├── ILSVRC2012_val_00002138.JPEG // Input sample image 1 - ├── ILSVRC2012_val_00003014.JPEG // Input sample image 2 - ├── ... // Input sample image n -``` - -### Inference Code - -Inference sample code: . - -Using namespace of `mindspore` and `mindspore::dataset`. - -```c++ -namespace ms = mindspore; -namespace ds = mindspore::dataset; -``` - -Set global context, device target is `Ascend910` and evice id is `0`: - -```c++ -auto context = std::make_shared(); -auto ascend910_info = std::make_shared(); -ascend910_info->SetDeviceID(0); -context->MutableDeviceInfo().push_back(ascend910_info); -``` - -Load mindir file: - -```c++ -// Load MindIR model -ms::Graph graph; -ms::Status ret = ms::Serialization::Load(resnet_file, ms::ModelType::kMindIR, &graph); -// Build model with graph object -ms::Model resnet50; -ret = resnet50.Build(ms::GraphCell(graph), context); -``` - -Get informance of this model: - -```c++ -std::vector model_inputs = resnet50.GetInputs(); -``` - -Load image file: - -```c++ -// Readfile is a function to read images -ms::MSTensor ReadFile(const std::string &file); -auto image = ReadFile(image_file); -``` - -Image preprocess: - -```c++ -// Create the CPU operator provided by MindData to get the function object - -// Decode the input to RGB format -std::shared_ptr decode(new ds::vision::Decode()); -// Resize the image to the given size -std::shared_ptr resize(new ds::vision::Resize({256})); -// Normalize the input -std::shared_ptr normalize(new ds::vision::Normalize( - {0.485 * 255, 0.456 * 255, 0.406 * 255}, {0.229 * 255, 0.224 * 255, 0.225 * 255})); -// Crop the input image at the center -std::shared_ptr center_crop(new ds::vision::CenterCrop({224, 224})); -// shape (H, W, C) to shape (C, H, W) -std::shared_ptr hwc2chw(new ds::vision::HWC2CHW()); - -// // Define a MindData preprocessor -ds::Execute preprocessor({decode, resize, normalize, center_crop, hwc2chw}); - -// Call the function object to get the processed image -ret = preprocessor(image, &image); -``` - -Execute the model: - -```c++ -// Create outputs vector -std::vector outputs; -// Create inputs vector -std::vector inputs; -inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), - image.Data().get(), image.DataSize()); -// Call the Predict function of Model for inference -ret = resnet50.Predict(inputs, &outputs); -``` - -Print the result: - -```c++ -// Output the maximum probability to the screen -std::cout << "Image: " << image_file << " infer result: " << GetMax(outputs[0]) << std::endl; -``` - -### Introduce to Building Script - -The building script is used to building applications: . - -Add head files to gcc search path: - -```cmake -option(MINDSPORE_PATH "mindspore install path" "") -include_directories(${MINDSPORE_PATH}) -include_directories(${MINDSPORE_PATH}/include) -``` - -Find the shared libraries in MindSpore: - -```cmake -find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) -file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) -``` - -Use the source files to generate the target executable file, and link the MindSpore libraries for the executable file: - -```cmake -add_executable(resnet50_sample main.cc) -target_link_libraries(resnet50_sample ${MS_LIB} ${MD_LIB}) -``` - -### Building Inference Code - -Go to the project directory `ascend910_resnet50_preprocess_sample` and set the following environment variables: - -```bash -# control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. -export GLOG_v=2 - -# Conda environmental options -LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - -# lib libraries that the run package depends on -export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/lib64:${LOCAL_ASCEND}/driver/lib64/common:${LOCAL_ASCEND}/driver/lib64/driver:${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - -# lib libraries that the mindspore depends on, modify "pip3" according to the actual situation -export LD_LIBRARY_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore/lib"}' | xargs realpath`:${LD_LIBRARY_PATH} - -# Environment variables that must be configured -export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path -export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path -export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path -export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # Python library that TBE implementation depends on -``` - -Run the `cmake` command, modify `pip3` according to the actual situation: - -```bash -cmake . -DMINDSPORE_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath` -``` - -Run the `make` command for building. - -```bash -make -``` - -After building, the executable `main` file is generated in `ascend910_resnet50_preprocess_sample`. - -## Performing Inference and Viewing the Result - -Log in to the Ascend 910 server, and create the `model` directory for storing the MindIR file `resnet50_imagenet.mindir`, for example, `/home/HwHiAiUser/mindspore_sample/ascend910_resnet50_preprocess_sample/model`. -Create the `test_data` directory to store images, for example, `/home/HwHiAiUser/mindspore_sample/ascend910_resnet50_preprocess_sample/test_data`. -Then, perform the inference. - -```bash -./resnet50_sample -``` - -Inference is performed on all images stored in the `test_data` directory. For example, if there are 9 images whose label is 0 in the [ImageNet2012](http://image-net.org/download-images) validation set, the inference result is as follows: - -```text -Image: ./test_data/ILSVRC2012_val_00002138.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00003014.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00006697.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00007197.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009111.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009191.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009346.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009379.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009396.JPEG infer result: 0 -``` diff --git a/tutorials/inference/source_en/multi_platform_inference_cpu.md b/tutorials/inference/source_en/multi_platform_inference_cpu.md deleted file mode 100644 index a4da624d62403c70730094936b72384a16495775..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/multi_platform_inference_cpu.md +++ /dev/null @@ -1,25 +0,0 @@ -# Inference on a CPU - -`Linux` `CPU` `Inference Application` `Beginner` `Intermediate` `Expert` - - - -- [Inference on a CPU](#inference-on-a-cpu) - - [Inference Using a Checkpoint File](#inference-using-a-checkpoint-file) - - [Inference Using an ONNX File](#inference-using-an-onnx-file) - - - - - -## Inference Using a Checkpoint File - -The inference is the same as that on the Ascend 910 AI processor. - -## Inference Using an ONNX File - -Similar to the inference on a GPU, the following steps are required: - -1. Generate a model in ONNX format on the training platform. For details, see [Export ONNX Model](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html#export-onnx-model). - -2. Perform inference on a CPU by referring to the runtime or SDK document. For details about how to use the ONNX Runtime, see the [ONNX Runtime document](https://github.com/microsoft/onnxruntime). diff --git a/tutorials/inference/source_en/multi_platform_inference_gpu.md b/tutorials/inference/source_en/multi_platform_inference_gpu.md deleted file mode 100644 index eed722ab58bd55d860cb09e7f77419891b40b1c1..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/multi_platform_inference_gpu.md +++ /dev/null @@ -1,23 +0,0 @@ -# Inference on a GPU - -`Linux` `GPU` `Inference Application` `Beginner` `Intermediate` `Expert` - - - -- [Inference on a GPU](#inference-on-a-gpu) - - [Inference Using a Checkpoint File](#inference-using-a-checkpoint-file) - - [Inference Using an ONNX File](#inference-using-an-onnx-file) - - - - - -## Inference Using a Checkpoint File - -The inference is the same as that on the Ascend 910 AI processor. - -## Inference Using an ONNX File - -1. Generate a model in ONNX format on the training platform. For details, see [Export ONNX Model](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html#export-onnx-model). - -2. Perform inference on a GPU by referring to the runtime or SDK document. For example, use TensorRT to perform inference on the NVIDIA GPU. For details, see [TensorRT backend for ONNX](https://github.com/onnx/onnx-tensorrt). diff --git a/tutorials/inference/source_en/nlp_tprr.md b/tutorials/inference/source_en/nlp_tprr.md deleted file mode 100644 index 616157de1610e1a82c145266662b1ef2c1987459..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/nlp_tprr.md +++ /dev/null @@ -1,268 +0,0 @@ -# Multi-hop Knowledge Reasoning Question-answering Model TPRR - -Translator: [longvoyage](https://gitee.com/yuanyanglv) - -`Linux` `Ascend` `Model Development` `Expert` - - - -- [Multi-hop Knowledge Reasoning Question-answering Model TPRR](#multi-hop-knowledge-reasoning-question-answering-model-tprr) - - [Overview](#overview) - - [Preparation](#preparation) - - [Installing Dependent Software](#installing-dependent-software) - - [Preparing Data](#preparing-data) - - [Loading Data](#loading-data) - - [Defining the Network](#defining-the-network) - - [Setting Model Parameters](#setting-model-parameters) - - [Defining the Model](#defining-the-model) - - [Inference Network](#inference-network) - - [Running Script](#running-script) - - [Reference](#reference) - - -   - -## Overview - -TPRR(Thinking Path Re-Ranker) is an open-domain knowledge based multi-hop question-answering model proposed by Huawei, which is used to realize multi-hop knowledge reasoning question-answering. In traditional question-answering, as long as the sentences related to the original question is found by the model, the answer can be found. It requires multiple "jumps" to find the answer for multi-hop knowledge reasoning question. Specifically, the model needs to use knowledge from multiple related documents to infer the correct answer for the given question. There are three modules in TPRR model: Retriever, Reranker and Reader. According to the given multi hop question, Retriever selects the candidate document sequence containing the answer from millions of Wiki documents, Reranker selects the best document sequence from the candidate document sequence, and finally Reader parses the answer from multiple sentences of the best document to complete the multi-hop knowledge reasoning question-answering. TPRR model uses conditional probability to model the complete reasoning path, and introduces the negative sample selection strategy of "thinking" in the training. It ranks first in Fullwiki Setting of international authoritative HotpotQA evaluation, and ranks first in the joint accuracy, clue accuracy and other two indicators. Compared with the traditional multi-hop question-answering model, TPRR only uses pure text information and does not need additional entity extraction technology. MindSpore hybrid precision feature is used to speed up TPRR model from framework. Combined with Ascend, it can achieve significant performance improvement. - -This tutorial will mainly introduce how to build and run a multi-hop knowledge reasoning question-answering model TPRR with MindSpore on Ascend. - -> You can download the complete sample code here: -. - -The sample code directory structure is as follows: - -```shell -. -└─tprr - ├─README.md - ├─scripts - | ├─run_eval_ascend.sh # Launch retriever evaluation in ascend - | └─run_eval_ascend_reranker_reader.sh # Launch re-ranker and reader evaluation in ascend - | - ├─src - | ├─build_reranker_data.py # build data for re-ranker from result of retriever - | ├─config.py # Evaluation configurations for retriever - | ├─hotpot_evaluate_v1.py # Hotpotqa evaluation script - | ├─onehop.py # Onehop model of retriever - | ├─onehop_bert.py # Onehop bert model of retriever - | ├─process_data.py # Data preprocessing for retriever - | ├─reader.py # Reader model - | ├─reader_albert_xxlarge.py # Albert-xxlarge module of reader model - | ├─reader_downstream.py # Downstream module of reader model - | ├─reader_eval.py # Reader evaluation script - | ├─rerank_albert_xxlarge.py # Albert-xxlarge module of re-ranker model - | ├─rerank_and_reader_data_generator.py # Data generator for re-ranker and reader - | ├─rerank_and_reader_utils.py # Utils for re-ranker and reader - | ├─rerank_downstream.py # Downstream module of re-ranker model - | ├─reranker.py # Re-ranker model - | ├─reranker_eval.py # Re-ranker evaluation script - | ├─twohop.py # Twohop model of retriever - | ├─twohop_bert.py # Twohop bert model of retriever - | └─utils.py # Utils for retriever - | - ├─retriever_eval.py # Evaluation net for retriever - └─reranker_and_reader_eval.py # Evaluation net for re-ranker and reader -``` - -The overall execution process is as follows: - -1. Prepare HotpotQA Development dataset, load processing data; -2. Set TPRR model parameters; -3. Initialize the TPRR model; -4. Load the dataset and model CheckPoint and perform inference, check the results and save the output. - -## Preparation - -### Installing Dependent Software - -1. Install MindSpore - - Before practicing, make sure that MindSpore has been installed correctly.If not, you can install it through [the MindSpore installation page](https://www.mindspore.cn/install/en). - -2. Install transformers - - ```shell - pip install transformers - ``` - -### Preparing Data - -The data used in this tutorial is the preprocessed [en-Wikipedia](https://github.com/AkariAsai/learning_to_retrieve_reasoning_paths/tree/master/retriever) and [HotpotQA Development datasets](https://hotpotqa.github.io/). Please download the [preprocessed data](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/tprr/data.zip) first. - -## Loading Data - -Store the downloaded data in the scripts directory. The Retriever module loads the data files preprocessed by wiki and HotpotQA, and retrieves relevant documents from the data according to the given multi-hop question. The source code of data loading is in the file `src/process_data.py`. - -```python -def load_data(self): - """load data""" - print('********************** loading data ********************** ') - # wiki data - f_wiki = open(self.wiki_path, 'rb') - # hotpotqa dev data - f_train = open(self.dev_path, 'rb') - # doc data - f_doc = open(self.dev_data_path, 'rb') - data_db = pkl.load(f_wiki, encoding="gbk") - dev_data = json.load(f_train) - q_doc_text = pkl.load(f_doc, encoding='gbk') - return data_db, dev_data, q_doc_text -``` - -Retrieved results of the Retriever module are saved in the scripts directory. According to the results, the Reranker module uses a custom DataGenerator class loading the data files preprocessed by wiki and HotpotQA to generator the reordering results and save them in the scripts directory. According to the reordering results, the Reader module also uses a custom DataGenerator class loading data files preprocessed by wiki and HotpotQA to extract answers and evidence. The source code of custom DataGenerator class is in the file `src/rerank_and_reader_data_generator.py`. - -```python -class DataGenerator: - """data generator for reranker and reader""" - def __init__(self, feature_file_path, example_file_path, batch_size, seq_len, - para_limit=None, sent_limit=None, task_type=None): - """init function""" - self.example_ptr = 0 - self.bsz = batch_size - self.seq_length = seq_len - self.para_limit = para_limit - self.sent_limit = sent_limit - self.task_type = task_type - self.feature_file_path = feature_file_path - self.example_file_path = example_file_path - self.features = self.load_features() - self.examples = self.load_examples() - self.feature_dict = self.get_feature_dict() - self.example_dict = self.get_example_dict() - self.features = self.padding_feature(self.features, self.bsz) -``` - -## Defining the Network - -### Setting Model Parameters - -The user can customize parameters such as topk and onehop_num in the model. Topk represents the number of candidate one-hop documents after Retriever sorting. The larger the topk, the more candidate documents. The recall rate will increase and more noise will be introduced, the accuracy rate will decrease; Onehop_num represents the number of one-hop candidate documents as two-hop candidate documents. The larger onehop_num, the more documents to be selected for the second hop. The recall rate will increase and more noise will be introduced, the accuracy rate will decrease. - -```python -def ThinkRetrieverConfig(): - """retriever config""" - parser = argparse.ArgumentParser() - parser.add_argument("--q_len", type=int, default=64, help="max query len") - parser.add_argument("--d_len", type=int, default=192, help="max doc len") - parser.add_argument("--s_len", type=int, default=448, help="max seq len") - parser.add_argument("--in_len", type=int, default=768, help="in len") - parser.add_argument("--out_len", type=int, default=1, help="out len") - parser.add_argument("--num_docs", type=int, default=500, help="docs num") - parser.add_argument("--topk", type=int, default=8, help="top num") - parser.add_argument("--onehop_num", type=int, default=8, help="onehop num") - parser.add_argument("--batch_size", type=int, default=1, help="batch size") - parser.add_argument("--device_num", type=int, default=8, help="device num") - parser.add_argument("--vocab_path", type=str, default='../vocab.txt', help="vocab path") - parser.add_argument("--wiki_path", type=str, default='../db_docs_bidirection_new.pkl', help="wiki path") - parser.add_argument("--dev_path", type=str, default='../hotpot_dev_fullwiki_v1_for_retriever.json', - help="dev path") - parser.add_argument("--dev_data_path", type=str, default='../dev_tf_idf_data_raw.pkl', help="dev data path") - parser.add_argument("--onehop_bert_path", type=str, default='../onehop.ckpt', help="onehop bert ckpt path") - parser.add_argument("--onehop_mlp_path", type=str, default='../onehop_mlp.ckpt', help="onehop mlp ckpt path") - parser.add_argument("--twohop_bert_path", type=str, default='../twohop.ckpt', help="twohop bert ckpt path") - parser.add_argument("--twohop_mlp_path", type=str, default='../twohop_mlp.ckpt', help="twohop mlp ckpt path") - parser.add_argument("--q_path", type=str, default='../queries', help="queries data path") - return parser.parse_args() -``` - -### Defining the Model - -Define the Retriever module and load the model parameters. - -```python -def evaluation(): - model_onehop_bert = ModelOneHop() - param_dict = load_checkpoint(config.onehop_bert_path) - load_param_into_net(model_onehop_bert, param_dict) - model_twohop_bert = ModelTwoHop() - param_dict2 = load_checkpoint(config.twohop_bert_path) - load_param_into_net(model_twohop_bert, param_dict2) - onehop = OneHopBert(config, model_onehop_bert) - twohop = TwoHopBert(config, model_twohop_bert) -``` - -Define the Reranker module and load the model parameters. - -```python - reranker = Reranker(batch_size=batch_size, - encoder_ck_file=encoder_ck_file, - downstream_ck_file=downstream_ck_file) -``` - -Define the Reader module and load the model parameters. - -```python - reader = Reader(batch_size=batch_size, - encoder_ck_file=encoder_ck_file, - downstream_ck_file=downstream_ck_file) -``` - -## Inference Network - -### Running Script - -Run the shell script in the scripts directory to start the inference process. Run the script with the following command: - -```shell -sh run_eval_ascend.sh -sh run_eval_ascend_reranker_reader.sh -``` - -After the inference is completed, the result is saved to the log file in `scripts/eval/` directory, and the evaluation result can be checked in the corresponding log file. - -Evaluation results of the Retriever module: val represents the number of questions found in the correct answer document, count represents the total number of questions, and PEM represents the accuracy of the top-8 documents after the problem-related documents are sorted. - -```python -# match query num -val:6959 -# query num -count:7404 -# one hop match query num -true count: 7112 -# top8 paragraph exact match -PEM: 0.9398973527822798 -# top8 paragraph exact match in recall -true top8 PEM: 0.9784870641169854 -# evaluation time -evaluation time (h): 1.819070938428243 -``` - -The following is Reranker and Reader module evaluation results, total_top1_pem represents the accuracy of the exact matching of the top-1 path after reordering, joint_em represents the joint accuracy of the predicted answer and the exact match of the evidence, joint_f1 represents the combined f1 score of the predicted answer and the evidence. - -```python -# top8 paragraph exact match -total top1 pem: 0.8803511141120864 -... - -# answer exact match -em: 0.67440918298447 -# answer f1 -f1: 0.8025625656569652 -# answer precision -prec: 0.8292800393689271 -# answer recall -recall: 0.8136908451841731 -# supporting facts exact match -sp_em: 0.6009453072248481 -# supporting facts f1 -sp_f1: 0.844555664157302 -# supporting facts precision -sp_prec: 0.8640844345841021 -# supporting facts recall -sp_recall: 0.8446123918845106 -# joint exact match -joint_em: 0.4537474679270763 -# joint f1 -joint_f1: 0.715119580346802 -# joint precision -joint_prec: 0.7540052057184267 -# joint recall -joint_recall: 0.7250240424067661 -``` - -## Reference - -1. Yang Z , Qi P , Zhang S , et al. HotpotQA: A Dataset for Diverse, Explainable Multi-hop Question Answering[C]// Proceedings of the 2018 Conference on Empirical Methods in Natural Language Processing. 2018. -2. Asai A , Hashimoto K , Hajishirzi H , et al. Learning to Retrieve Reasoning Paths over Wikipedia Graph for Question Answering[J]. 2019. diff --git a/tutorials/inference/source_en/serving_distributed_example.md b/tutorials/inference/source_en/serving_distributed_example.md deleted file mode 100644 index 541ed0a7cb27d4b470746e0af45703e95d57cc2c..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/serving_distributed_example.md +++ /dev/null @@ -1,268 +0,0 @@ -# MindSpore Serving-based Distributed Inference Service Deployment - -Translator: [xiaoxiaozhang](https://gitee.com/xiaoxinniuniu) - -`Linux` `Ascend` `Serving` `Intermediate` `Senior` - - - -- [MindSpore Serving-based Distributed Inference Service Deployment](#mindspore-serving-based-distributed-inference-service-deployment) - - [Overview](#overview) - - [Environment Preparation](#environment-preparation) - - [Exporting a Distributed Model](#exporting-a-distributed-model) - - [Deploying the Distributed Inference Service](#deploying-the-distributed-inference-service) - - [Usage Restrictions](#usage-restrictions) - - [Starting Master and Distributed Worker](#starting-master-and-distributed-worker) - - [Starting Agent](#startup-agent) - - [Executing Inference](#executing-inference) - - - - - -## Overview - -Distributed inference means that multiple cards are used in the inference phase, in order to solve the problem that too many parameters are in the very large scale neural network and the model cannot be fully loaded into a single card for inference, multi-cards can be used for distributed inference. This document describes the process of deploying the distributed inference service, which is similar to the process of deploying the [single-card inference service](https://www.mindspore.cn/tutorial/inference/en/master/serving_example.html), and these two can refer to each other. - -The architecture of the distributed inference service shows as follows: - -![image](images/distributed_servable.png) - -The master provides an interface for client access, manages distributed workers, and performs task management and distribution; Distributed workers automatically schedule agents based on model configurations to complete distributed inference; Each agent contains a slice of the distributed model, occupies a device, and loads the model to performance inference. - -The preceding figure shows the scenario where rank_size is 16 and stage_size is 2. Each stage contains 8 agents and occupies 8 devices. rank_size indicates the number of devices used in inference, stage indicates a pipeline segment, and stage_size indicates the number of pipeline segments. The distributed worker sends an inference requests to the agent and obtains the inference result from the agent. Agents communicate with each other using HCCL. - -Currently, the distributed model has the following restrictions: - -- The model of the first stage receives the same input data. -- The models of other stages do not receive data. -- All models of the latter stage return the same data. -- Only Ascend 910 inference is supported. - -The following uses a simple distributed network MatMul as an example to demonstrate the deployment process. - -### Environment Preparation - -Before running the example, ensure that MindSpore Serving has been correctly installed. If not, install MindSpore Serving by referring to the [MindSpore Serving installation page](https://gitee.com/mindspore/serving/blob/master/README.md#installation), and configure environment variables by referring to the [MindSpore Serving environment configuration page](https://gitee.com/mindspore/serving/blob/master/README.md#configuring-environment-variables). - -### Exporting a Distributed Model - -For details about the files required for exporting distributed models, see the [export_model directory](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html#id4), the following files are required: - -```text -export_model -├── distributed_inference.py -├── export_model.sh -├── net.py -└── rank_table_8pcs.json -``` - -- `net.py` contains the definition of MatMul network. -- `distributed_inference.py` is used to configure distributed parameters. -- `export_model.sh` creates `device` directory on the current host and exports model files corresponding to `device`. -- `rank_table_8pcs.json` is a json file for configuring the multi-cards network. For details, see [rank_table](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html#id4). - -Use [net.py](https://gitee.com/mindspore/serving/blob/master/example/matmul_distributed/export_model/net.py) to construct a network that contains the MatMul and Neg operators. - -```python -import numpy as np -from mindspore import Tensor, Parameter, ops -from mindspore.nn import Cell - - -class Net(Cell): - def __init__(self, matmul_size, transpose_a=False, transpose_b=False, strategy=None): - super().__init__() - matmul_np = np.full(matmul_size, 0.5, dtype=np.float32) - self.matmul_weight = Parameter(Tensor(matmul_np)) - self.matmul = ops.MatMul(transpose_a=transpose_a, transpose_b=transpose_b) - self.neg = ops.Neg() - if strategy is not None: - self.matmul.shard(strategy) - - def construct(self, inputs): - x = self.matmul(inputs, self.matmul_weight) - x = self.neg(x) - return x -``` - -Use [distributed_inference.py](https://gitee.com/mindspore/serving/blob/master/example/matmul_distributed/export_model/distributed_inference.py) to configure the distributed model. Refer to [Distributed inference](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference_ascend_910.html#id1)。 - -```python -import numpy as np -from net import Net -from mindspore import context, Model, Tensor, export -from mindspore.communication import init - - -def test_inference(): - """distributed inference after distributed training""" - context.set_context(mode=context.GRAPH_MODE) - init(backend_name="hccl") - context.set_auto_parallel_context(full_batch=True, parallel_mode="semi_auto_parallel", - device_num=8, group_ckpt_save_file="./group_config.pb") - - predict_data = create_predict_data() - network = Net(matmul_size=(96, 16)) - model = Model(network) - model.infer_predict_layout(Tensor(predict_data)) - export(model._predict_network, Tensor(predict_data), file_name="matmul", file_format="MINDIR") - - -def create_predict_data(): - """user-defined predict data""" - inputs_np = np.random.randn(128, 96).astype(np.float32) - return Tensor(inputs_np) -``` - -Run [export_model.sh](https://gitee.com/mindspore/serving/blob/master/example/matmul_distributed/export_model/export_model.sh) to export the distributed model. After the command is executed successfully, the `model` directory is created in the upper-level directory. The structure is as follows: - -```text -model -├── device0 -│ ├── group_config.pb -│ └── matmul.mindir -├── device1 -├── device2 -├── device3 -├── device4 -├── device5 -├── device6 -└── device7 -``` - -Each `device` directory contains two files, `group_config.pb` and `matmul.mindir`, which represent the model group configuration file and model file respectively. - -### Deploying the Distributed Inference Service - -For details about how to start the distributed inference service, refer to [matmul_distributed](https://gitee.com/mindspore/serving/tree/master/example/matmul_distributed), the following files are required: - -```text -matmul_distributed -├── agent.py -├── master_with_worker.py -├── matmul -│ └── servable_config.py -├── model -└── rank_table_8pcs.json -``` - -- `model` is the directory for storing model files. -- `master_with_worker.py` is the script for starting services. -- `agent.py` is the script for starting agents. -- `servable_config.py` is the [Model Configuration File](https://www.mindspore.cn/tutorial/inference/en/master/serving_model.html). It declares a distributed model with rank_size 8 and stage_size 1 through `declare_distributed_servable`, and defines a method `predict` for distributed servable. - -The content of the model configuration file is as follows: - -```python -from mindspore_serving.worker import distributed -from mindspore_serving.worker import register - -distributed.declare_distributed_servable(rank_size=8, stage_size=1, with_batch_dim=False) - - -@register.register_method(output_names=["y"]) -def predict(x): - y = register.call_servable(x) - return y -``` - -#### Starting Master and Distributed Worker - -Use [master_with_worker.py](https://gitee.com/mindspore/serving/blob/master/example/matmul_distributed/master_with_worker.py) to call `start_distributed_servable_in_master` method to deploy the co-process master and distributed workers. - -```python -import os -import sys -from mindspore_serving import master -from mindspore_serving.worker import distributed - - -def start(): - servable_dir = os.path.dirname(os.path.realpath(sys.argv[0])) - distributed.start_distributed_servable_in_master(servable_dir, "matmul", - rank_table_json_file="rank_table_8pcs.json", - version_number=1, - worker_ip="127.0.0.1", worker_port=6200, - wait_agents_time_in_seconds=0) - master.start_grpc_server("127.0.0.1", 5500) - master.start_restful_server("127.0.0.1", 1500) - - -if __name__ == "__main__": - start() -``` - -- `servable_dir` is the directory for storing a servable. -- `servable_name` is the name of the servable, which corresponds to a directory for storing model configuration files. -- `rank_table_json_file` is the JSON file for configuring multi-cards network. -- `worker_ip` is the IP address of the distributed worker. -- `worker_port` is the port of the distributed worker. -- `wait_agents_time_in_seconds` specifies the duration of waiting for all agents to be registered, the default value 0 means it will wait forever. - -#### Starting Agent - -Use [agent.py](https://gitee.com/mindspore/serving/blob/master/example/matmul_distributed/agent.py) to call `startup_worker_agents` method to start 8 agent processes on the current host. Agents obtain rank_tables from distributed workers so that agents can communicate with each other using HCCL. - -```python -from mindspore_serving.worker import distributed - - -def start_agents(): - """Start all the worker agents in current machine""" - model_files = [] - group_configs = [] - for i in range(8): - model_files.append(f"model/device{i}/matmul.mindir") - group_configs.append(f"model/device{i}/group_config.pb") - - distributed.startup_worker_agents(worker_ip="127.0.0.1", worker_port=6200, model_files=model_files, - group_config_files=group_configs, agent_start_port=7000, agent_ip=None, - rank_start=None) - - -if __name__ == '__main__': - start_agents() -``` - -- `worker_ip` is the IP address of the distributed worker. -- `worker_port` is the port of the distributed worker. -- `model_files` is a list of model file paths. -- `group_config_files` is a list of model group configuration file paths. -- `agent_start_port` is the start port used by the agent. The default value is 7000. -- `agent_ip` is the IP address of an agent. The default value is None. The IP address used by the agent to communicate with the distributed worker is obtained from rank_table by default. If the IP address is unavailable, you need to set both `agent_ip` and `rank_start`. -- `rank_start` is the start rank_id of the current server, the default value is None. - -### Executing Inference - -To access the inference service through gRPC, the client needs to specify the IP address and port of the gRPC server. Run [client.py](https://gitee.com/mindspore/serving/blob/master/example/matmul_distributed/client.py) to call the `predict` method of matmul distributed model, execute inference. - -```python -import numpy as np -from mindspore_serving.client import Client - - -def run_matmul(): - """Run client of distributed matmul""" - client = Client("localhost", 5500, "matmul", "predict") - instance = {"x": np.ones((128, 96), np.float32)} - result = client.infer(instance) - print("result:\n", result) - - -if __name__ == '__main__': - run_matmul() -``` - -The following return value indicates that the Serving distributed inference service has correctly executed the inference of MatMul net: - -```text -result: -[{'y': array([[-48., -48., -48., ..., -48., -48., -48.], - [-48., -48., -48., ..., -48., -48., -48.], - [-48., -48., -48., ..., -48., -48., -48.], - ..., - [-48., -48., -48., ..., -48., -48., -48.], - [-48., -48., -48., ..., -48., -48., -48.], - [-48., -48., -48., ..., -48., -48., -48.]], dtype=float32)}] -``` diff --git a/tutorials/inference/source_en/serving_example.md b/tutorials/inference/source_en/serving_example.md deleted file mode 100644 index 8dfdf99afd9c511523870ceed99113075df9cf11..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/serving_example.md +++ /dev/null @@ -1,261 +0,0 @@ -# MindSpore Serving-based Inference Service Deployment - -`Linux` `Ascend` `Serving` `GPU` `Beginner` `Intermediate` `Expert` - - - -- [MindSpore Serving-based Inference Service Deployment](#mindspore-serving-based-inference-service-deployment) - - [Overview](#overview) - - [Preparing the Environment](#preparing-the-environment) - - [Exporting the Model](#exporting-the-model) - - [Deploying the Serving Inference Service](#deploying-the-serving-inference-service) - - [Lightweight Deployment](#lightweight-deployment) - - [Cluster Deployment](#cluster-deployment) - - [Inference Execution](#inference-execution) - - - - - -## Overview - -MindSpore Serving is a lightweight and high-performance service module that helps MindSpore developers efficiently deploy online inference services in the production environment. After completing model training on MindSpore, you can export the MindSpore model and use MindSpore Serving to create an inference service for the model. - -The following uses a simple `Add` network as an example to describe how to use MindSpore Serving. - -### Preparing the Environment - -Before running the sample network, ensure that MindSpore Serving has been properly installed. To install MindSpore Serving on your PC, go to the [MindSpore Serving installation page](https://gitee.com/mindspore/serving/blob/master/README.md#installing-serving) and configure environment variables on the [MindSpore Serving environment configuration page](https://gitee.com/mindspore/serving/blob/master/README.md#configuring-environment-variables). - -### Exporting the Model - -Use [add_model.py](https://gitee.com/mindspore/serving/blob/master/example/add/export_model/add_model.py) to build a network with only the Add operator and export the MindSpore inference deployment model. - -```python -import os -from shutil import copyfile -import numpy as np - -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -import mindspore as ms - -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") - - -class Net(nn.Cell): - """Define Net of add""" - - def __init__(self): - super(Net, self).__init__() - self.add = ops.Add() - - def construct(self, x_, y_): - """construct add net""" - return self.add(x_, y_) - - -def export_net(): - """Export add net of 2x2 + 2x2, and copy output model `tensor_add.mindir` to directory ../add/1""" - x = np.ones([2, 2]).astype(np.float32) - y = np.ones([2, 2]).astype(np.float32) - add = Net() - output = add(ms.Tensor(x), ms.Tensor(y)) - ms.export(add, ms.Tensor(x), ms.Tensor(y), file_name='tensor_add', file_format='MINDIR') - dst_dir = '../add/1' - try: - os.mkdir(dst_dir) - except OSError: - pass - - dst_file = os.path.join(dst_dir, 'tensor_add.mindir') - copyfile('tensor_add.mindir', dst_file) - print("copy tensor_add.mindir to " + dst_dir + " success") - - print(x) - print(y) - print(output.asnumpy()) - - -if __name__ == "__main__": - export_net() -``` - -To use MindSpore for neural network definition, inherit `mindspore.nn.Cell`. (A `Cell` is a base class of all neural networks.) Define each layer of a neural network in the `__init__` method in advance, and then define the `construct` method to complete the forward construction of the neural network. Use `export` of the `mindspore` module to export the model file. -For more detailed examples, see [Implementing an Image Classification Application](https://www.mindspore.cn/tutorial/training/en/master/quick_start/quick_start.html). - -Execute the `add_model.py` script to generate the `tensor_add.mindir` file. The input of the model is two 2D tensors with shape [2,2], and the output is the sum of the two input tensors. - -### Deploying the Serving Inference Service - -Start Serving with the following files: - -```shell -test_dir -├── add/ -│ └── servable_config.py -│ └── 1/ -│ └── tensor_add.mindir -└── master_with_worker.py -``` - -- `master_with_worker.py`: Script file for starting the service. -- `add`: Model folder, which is named after the model name. -- `tensor_add.mindir`: Model file generated by the network in the previous step, which is stored in folder 1 (the number indicates the version number). Different versions are stored in different folders. The version number must be a string of digits. By default, the latest model file is started. -- [servable_config.py](https://gitee.com/mindspore/serving/blob/master/example/add/add/servable_config.py): [Model configuration file](https://www.mindspore.cn/tutorial/inference/en/master/serving_model.html), which defines the model processing functions, including the `add_common` and `add_cast` methods. `add_common` defines an addition operation whose input is two pieces of float32 data, and `add_cast` defines an addition operation whose input is data with its type converted to float32. - -Content of the configuration file: - -```python -import numpy as np -from mindspore_serving.worker import register - - -def add_trans_datatype(x1, x2): - """define preprocess, this example has one input and one output""" - return x1.astype(np.float32), x2.astype(np.float32) - - -# when with_batch_dim is set to False, only 2x2 add is supported -# when with_batch_dim is set to True(default), Nx2 add is supported, while N is viewed as batch -# float32 inputs/outputs -register.declare_servable(servable_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False) - - -# register add_common method in add -@register.register_method(output_names=["y"]) -def add_common(x1, x2): # only support float32 inputs - """method add_common data flow definition, only call model servable""" - y = register.call_servable(x1, x2) - return y - - -# register add_cast method in add -@register.register_method(output_names=["y"]) -def add_cast(x1, x2): - """method add_cast data flow definition, only call preprocess and model servable""" - x1, x2 = register.call_preprocess(add_trans_datatype, x1, x2) # cast input to float32 - y = register.call_servable(x1, x2) - return y -``` - -MindSpore Serving provides both lightweight deployment and cluster deployment. In lightweight deployment mode, the master and worker nodes are deployed in the same process. In cluster deployment mode, the master and worker nodes are deployed in different processes. If there is only one worker node, you can consider lightweight deployment, that is, deploy the master node in the process where the worker node is located. If there are multiple worker nodes, you can deploy them in a cluster and use one of them as the master node to manage all worker nodes. You can select the deployment mode based on the actual requirements. - -#### Lightweight Deployment - -The server calls a Python API to start the inference process shared by both master and worker nodes. The client directly connects to the inference service and delivers an inference task. -Run the [master_with_worker.py](https://gitee.com/mindspore/serving/blob/master/example/add/master_with_worker.py) script to deploy lightweight service: - -```python -import os -from mindspore_serving import master -from mindspore_serving import worker - -def start(): - servable_dir = os.path.abspath(".") - worker.start_servable_in_master(servable_dir, "add", device_id=0) - master.start_grpc_server("127.0.0.1", 5500) - -if __name__ == "__main__": - start() -``` - -If the server prints the `Serving gRPC start success, listening on 0.0.0.0:5500` log, the Serving has loaded the inference model. - -#### Cluster Deployment - -The server consists of the master and worker processes. The master process manages all worker nodes in the cluster and distributes inference tasks. The cluster deployment is as follows: - -Master deployment: - -```python -import os -from mindspore_serving import master - -def start(): - servable_dir = os.path.abspath(".") - master.start_grpc_server("127.0.0.1", 5500) - master.start_master_server("127.0.0.1", 6500) -if __name__ == "__main__": - start() -``` - -Worker deployment: - -```python -import os -from mindspore_serving import worker - -def start(): - servable_dir = os.path.abspath(".") - worker.start_servable(servable_dir, "add", device_id=0, - master_ip="127.0.0.1", master_port=6500, - worker_ip="127.0.0.1", worker_port=6600) - -if __name__ == "__main__": - start() -``` - -The lightweight and the cluster deployment modes use different APIs to start a worker. The `start_servable_in_master` API is used in lightweight deployment mode, while the `start_servable` API is used in cluster deployment mode. - -### Inference Execution - -The client can access the inference service through either [gRPC](https://www.mindspore.cn/tutorial/inference/en/master/serving_grpc.html) or [RESTful](https://www.mindspore.cn/tutorial/inference/en/master/serving_restful.html). The following uses gRPC as an example. -Execute [client.py](https://gitee.com/mindspore/serving/blob/master/example/add/client.py) to start the Python client. - -```python -import numpy as np -from mindspore_serving.client import Client - - -def run_add_common(): - """invoke servable add method add_common""" - client = Client("localhost", 5500, "add", "add_common") - instances = [] - - # instance 1 - x1 = np.asarray([[1, 1], [1, 1]]).astype(np.float32) - x2 = np.asarray([[1, 1], [1, 1]]).astype(np.float32) - instances.append({"x1": x1, "x2": x2}) - - # instance 2 - x1 = np.asarray([[2, 2], [2, 2]]).astype(np.float32) - x2 = np.asarray([[2, 2], [2, 2]]).astype(np.float32) - instances.append({"x1": x1, "x2": x2}) - - # instance 3 - x1 = np.asarray([[3, 3], [3, 3]]).astype(np.float32) - x2 = np.asarray([[3, 3], [3, 3]]).astype(np.float32) - instances.append({"x1": x1, "x2": x2}) - - result = client.infer(instances) - print(result) - - -def run_add_cast(): - """invoke servable add method add_cast""" - client = Client("localhost", 5500, "add", "add_cast") - instances = [] - x1 = np.ones((2, 2), np.int32) - x2 = np.ones((2, 2), np.int32) - instances.append({"x1": x1, "x2": x2}) - result = client.infer(instances) - print(result) - - -if __name__ == '__main__': - run_add_common() - run_add_cast() -``` - -Use the `Client` class defined by `mindspore_serving.client`. The client defines two cases to call two model methods. In the `run_add_common` case with three pairs of float32 arrays, each pair of arrays are added up. In the `run_add_cast` case, two int32 arrays are added up. If the results of the two cases are displayed as follows, the Serving has properly executed the `Add` network inference. - -```shell -[{'y': array([[2. , 2.], - [2., 2.]], dtype=float32)},{'y': array([[4. , 4.], - [4., 4.]], dtype=float32)},{'y': array([[6. , 6.], - [6., 6.]], dtype=float32)}] -[{'y': array([[2. , 2.], - [2., 2.]], dtype=float32)}] -``` diff --git a/tutorials/inference/source_en/serving_grpc.md b/tutorials/inference/source_en/serving_grpc.md deleted file mode 100644 index 6ac0bcd7063cf952ca76a0172b8d3b96389aeafe..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/serving_grpc.md +++ /dev/null @@ -1,158 +0,0 @@ -# gRPC-based MindSpore Serving Access - -`Linux` `Ascend` `GPU` `Serving` `Beginner` `Intermediate` `Expert` - - - -- [gRPC-based MindSpore Serving Access](#grpc-based-mindspore-serving-access) - - [Overview](#overview) - - [add](#add) - - [ResNet-50](#resnet-50) - - - - - -## Overview - -The gRPC API is provided to access the MindSpore Serving. In the Python environment, the [mindspore_serving.client](https://gitee.com/mindspore/serving/blob/master/mindspore_serving/client/python/client.py) module is provided to fill in requests and parse responses. The gRPC server (a worker node) supports only the Ascend platform. The client running does not depend on a specific hardware environment. The following uses `add` and `ResNet-50` as examples to describe how to use the gRPC Python API on clients. - -## add - -This example comes from [add example](https://gitee.com/mindspore/serving/blob/master/example/add/client.py). The `add` Servable provides the `add_common` method to add up two 2x2 tensors. The code of the gRPC Python client is as follows. One gRPC request includes three pairs of independent 2x2 tensors. - -```python -from mindspore_serving.client import Client -import numpy as np - - -def run_add_common(): - """invoke Servable add method add_common""" - client = Client("localhost", 5500, "add", "add_common") - instances = [] - - # instance 1 - x1 = np.asarray([[1, 1], [1, 1]]).astype(np.float32) - x2 = np.asarray([[1, 1], [1, 1]]).astype(np.float32) - instances.append({"x1": x1, "x2": x2}) - - # instance 2 - x1 = np.asarray([[2, 2], [2, 2]]).astype(np.float32) - x2 = np.asarray([[2, 2], [2, 2]]).astype(np.float32) - instances.append({"x1": x1, "x2": x2}) - - # instance 3 - x1 = np.asarray([[3, 3], [3, 3]]).astype(np.float32) - x2 = np.asarray([[3, 3], [3, 3]]).astype(np.float32) - instances.append({"x1": x1, "x2": x2}) - - result = client.infer(instances) - print(result) - - -if __name__ == '__main__': - run_add_common() -``` - -Export the model, start the Serving server, and run the preceding client code by referring to the [MindSpore Serving-based Inference Service Deployment](https://www.mindspore.cn/tutorial/inference/en/master/serving_example.html). After the code runs properly, the following information is displayed. To facilitate display, the format is adjusted. - -```python -[{'y': array([[2., 2.], [2., 2.]], dtype=float32)}, - {'y': array([[4., 4.], [4., 4.]], dtype=float32)}, - {'y': array([[6., 6.], [6., 6.]], dtype=float32)}] -``` - -Details are described as follows: - -1. Build `Client`. - - When `Client` is built, the IP address and port number of Serving are indicated, and the Servable's name and method it provides are given. Servable indicates a single model or a combination of multiple models (not supported yet) and provides different services in various methods. - - In the preceding `add` example, Serving runs on the `localhost`, the gRPC port number is set to `5500`, and `add` Servable is run to provide the `add_common` method. - -2. Add instances. - - Each gRPC request includes one or more independent instances which do not affect each other's result. - - For example, the `add_common` method provided by `add` Servable provides the function of adding two 2x2 tensors. That is, an instance contains two 2x2 tensor inputs and one 2x2 tensor output. A request may include one or more such instances, and one result is returned for each instance. The preceding `add` example provides three instances, so it is expected that three results will be returned. - - The input parameters of the `Client.infer` API can be a list or tuple consisting of one or more instance inputs, or a single instance input. Each instance input consists of the input name and value. The value can be in the following format: - - - `numpy array`: represents a tensor. For example, np.ones((3,224), np.float32). - - `numpy number`: represents a scalar. For example, np.int8(5). - - `python bool int float`: represents a scalar. Currently, int is regarded as int64, and float is regarded as float64. For example, 32.0. - - `python str`: represents a character string. For example, "this is a text". - - `python bytes`: represents binary data. For example, image data. - - In the preceding example, `x1` and `x2` are the input parameters of the `add_common` method provided by `add` Servable. Each input value is specified when an instance is added. - -3. Obtain the inference result. - - Use `Client.infer` to enter one or more instances. - The return results may be in the following format: - - - If all instances are correctly inferred, the following result is returned: - - ```shell - [{'y': array([[2., 2.], [2., 2.]], dtype=float32)}, - {'y': array([[4., 4.], [4., 4.]], dtype=float32)}, - {'y': array([[6., 6.], [6., 6.]], dtype=float32)}] - ``` - - - If certain errors occur in all instances , a dict containing `error` is returned. In the example, `add_common` is changed to `add_common2`, and the returned result is as follows: - - ```shell - {'error', 'Request Servable(add) method(add_common2), method is not available'} - ``` - - - If inference errors occur in certain instances, the error instances return a dict containing `error`. In the example, an input `dtype` of instance2 is changed to `np.int32`, and the returned result is as follows: - - ```shell - [{'y': array([[2., 2.], [2., 2.]], dtype=float32)}, - {'error': 'Given model input 1 data type kMSI_Int32 not match ...'}, - {'y': array([[6., 6.], [6., 6.]], dtype=float32)}] - ``` - - Each instance returns a dict. The key value comes from the Servable method definition. In this example, the `add_common` method provided by `add` Servable has only one output, which is `y`. The value is in the following format: - - | Serving Output Type | Client Return Type | Description | Example | - | ---- | ---- | ---- | ---- | - | Tensor | numpy array | Tensor array | np.ones((3,224), np.float32) | - | Scalar:
int8, int16, int32, int64,
uint8, uint16, uint32, uint64,
bool, float16, float32, float64 | numpy scalar | Converts data format from scalar to numpy scalar. | np.int8(5) | - | String | python str | Converts output format from character string to python str. | "news_car" | - | Bytes | python bytes | Converts output format from binary to python bytes. | Image data | - -## ResNet-50 - -This example comes from [ResNet-50 example](https://gitee.com/mindspore/serving/blob/master/example/resnet/client.py). `ResNet-50` Servable provides the `classify_top1` method to recognize images. In the `classify_top1` method, input the image data to obtain the output character string, perform operations such as decoding and resizing on images, and then perform inference. The classification label with the highest score is returned through post-processing. - -```python -import os -from mindspore_serving.client import Client - - -def run_classify_top1(): - client = Client("localhost", 5500, "resnet50", "classify_top1") - instances = [] - for path, _, file_list in os.walk("./test_image/"): - for file_name in file_list: - image_file = os.path.join(path, file_name) - print(image_file) - with open(image_file, "rb") as fp: - instances.append({"image": fp.read()}) - result = client.infer(instances) - print(result) - - -if __name__ == '__main__': - run_classify_top1() -``` - -The preceding `classify_top1` method requires users to input `image` (the image binary data) in each instance. -If the execution is properly completed, the following information is displayed: - -```shell -[{'label': 'tabby, tabby cat'}, {'label': 'ox'}] -``` - -If the ResNet-50 model is not trained, there may be other unknown classification results. diff --git a/tutorials/inference/source_en/serving_model.md b/tutorials/inference/source_en/serving_model.md deleted file mode 100644 index b2c5ba4b70efd8cbc23c6a24bd27c67032cfb157..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/serving_model.md +++ /dev/null @@ -1,249 +0,0 @@ -# Servable Provided Through Model Configuration - -`Linux` `Ascend` `GPU` `Serving` `Beginner` `Intermediate` `Expert` - - - -- [Servable Provided Through Model Configuration](#servable-provided-through-model-configuration) - - [Overview](#overview) - - [Concepts](#concepts) - - [Preprocessing and Post-processing](#preprocessing-and-post-processing) - - [Methods](#methods) - - [Instances](#instances) - - [Model Configuration](#model-configuration) - - [Preprocessing and Post-processing Definition](#preprocessing-and-post-processing-definition) - - [Model Declaration](#model-declaration) - - [Method Definition](#method-definition) - - - - - -## Overview - -MindSpore Serving supports only the Ascend 310 and Ascend 910 environments. - -MindSpore Serving Servable provides the inference services of the following types: One inference service comes from a single model, and the other one comes from a combination of multiple models (this is being developed). Models need to be configured to provide the Serving inference service. - -The following describes how to configure a single model to provide Servable. All Servable configurations are for single-model Servables and the Serving client is referred to as the client. - -ResNet-50 is used as an example to describe how to configure a model to provide Servable. For details about the sample code, see the [ResNet-50 Example](https://gitee.com/mindspore/serving/tree/master/example/resnet/). - -## Concepts - -### Preprocessing and Post-processing - -A model provides the inference capability. Its input and output have fixed data type, data length, and shape. - -If data sent from the client cannot directly meet model input requirements, the data needs to be preprocessed and converted into the qualified data. -If the model output cannot be not directly provided for the client, post-processing is required to convert the output into the required output data. - -The following shows the `resnet50` Servable data flowchart. The image data is transmitted from the client to the Serving through a network. The Serving performs preprocessing, inference, and post-processing, and returns the result to the client. - -![image](images/resnet_example.png) - -For a ResNet-50 inference model, the data sent by the client is images in JPG or PNG format, and the image classification is expected to be returned. The input of a ResNet model is the tensor generated by operations such as image `Decode`, `Resize`, and `Normalize`. The output is the score tensor of each category. The image needs to be converted into a tensor that meets the model input requirements during preprocessing. **Name of the top 1 category** or **Names of the top 5 categories and their scores** are returned after post-processing. - -The provided preprocessing may vary according to the composition, structure, or type of data input from the client in different scenarios. The provided post-processing may also vary according to the model output requirements. For example, in the preceding `resnet50` Servable, two post-processing methods are provided for the following two scenarios: **Name of the top 1 category** and **Names of the top 5 categories and their scores**. - -### Methods - -The preceding `resnet` Servable provides the `classify_top5` and `classify_top1` methods. The input of `classify_top5` is `image`, and the output is `label` and `score`. The top 5 category names and scores are returned. The preprocessing of `classify_top1` is the same as that of `classify_top5`, but the post-processing is different. The input is `image`, and the output is `label`. The top 1 category name is returned. - -One Servable can provide one or more methods. The Servable name and the method name are marked with a service provided by the Serving. Each method preprocesses the data provided by the client, performs model inference and optional post-processing on the model inference result, and returns the required result to the client. - -A Servable is used to: - -- Specify optional preprocessing and post-processing. -- Define a data flow between method input, preprocessing, model, post-processing, and method output. The former data value can be used as the latter data input. For example, the value of method output may come from the method input, preprocessing, model, or post-processing. -- Specify a method name for the client to specify a method to be used. -- Specify the input and output names of a method for the client to specify the input and obtain the output. - -### Instances - -Each request includes one or more independent instances which do not affect each other's result. For example, a category is returned for an image, and three categories are returned for three independent images. - -## Model Configuration - -Take the ResNet-50 model as an example. The model configuration file directory is as follows: - -```shell -resnet50 -├── 1 -│ └── resnet_classify.mindir -├── 2 -│ └── resnet_classify.mindir -└── servable_config.py -``` - -- `resnet50`: a directory, which is named after the Servable name. - -- `servable_config.py`: configures Servable, including preprocessing and post-processing definitions, model declaration, and method definition. - -- `1` and `2`: directories, which indicate models of the `1` and `2` versions. The model version is a positive integer starting from `1`. A larger number indicates a later version. - -- `resnet_classify.mindir`: a model file. When the Servable is started, the model file of the corresponding version is loaded. - -### Preprocessing and Post-processing Definition - -The following is an example to define preprocessing and post-processing: - -```python -import mindspore.dataset as ds -import mindspore.dataset.transforms.c_transforms as TC -import mindspore.dataset.vision.c_transforms as VC - -def preprocess_eager(image): - """ - Define preprocess, input is image numpy, return preprocess result. - Return type can be numpy, str, bytes, int, float, or bool. - Use MindData Eager, this image processing can also use other image processing library, likes numpy, PIL or cv2 etc. - """ - image_size = 224 - mean = [0.485 * 255, 0.456 * 255, 0.406 * 255] - std = [0.229 * 255, 0.224 * 255, 0.225 * 255] - - decode = VC.Decode() - resize = VC.Resize([image_size, image_size]) - normalize = VC.Normalize(mean=mean, std=std) - hwc2chw = VC.HWC2CHW() - - image = decode(image) - image = resize(image) - image = normalize(image) - image = hwc2chw(image) - return image - -def postprocess_top1(score): - """ - Define postprocess. This example has one input and one output. - The input is the numpy tensor of the score, and the output is the label str of top one. - """ - max_idx = np.argmax(score) - return idx_2_label[max_idx] - - -def postprocess_top5(score): - """ - Define postprocess. This example has one input and two outputs. - The input is the numpy tensor of the score. The first output is the str joined by labels of top five, and the second output is the score tensor of the top five. - """ - idx = np.argsort(score)[::-1][:5] # top 5 - ret_label = [idx_2_label[i] for i in idx] - ret_score = score[idx] - return ";".join(ret_label), ret_score -``` - -The preprocessing and post-processing are defined in the same format. The input parameters are the input data of each instance. If the input data is a text, the input parameter is a str object. If the input data is of other types, such as Tensor, Scalar number, Boolean, and Bytes, the input parameter is a **numpy object**. The instance processing result is returned through `return`, which can be **numpy**, or a single data object or a tuple consisting of **bool, int, float, str, or bytes of Python**. - -The input sources and output usage of preprocessing and post-processing are determined by the [Method Definition](https://www.mindspore.cn/tutorial/inference/en/master/serving_model.html#id9). - -### Model Declaration - -The sample code for declaring the `resnet50` Servable model is as follows: - -```python -from mindspore_serving.worker import register -register.declare_servable(servable_file="resnet50_1b_imagenet.mindir", model_format="MindIR", with_batch_dim=True) -``` - -The input parameter `servable_file` of `declare_servable` indicates the model file name. `model_format` indicates the model type. Currently, the Ascend 310 environment supports both `OM` and `MindIR` model types. The Ascend 910 environment supports only the `MindIR` model type. - -If the 1D model input and output is not the `batch` dimension, you need to change the value of `with_batch_dim` from the default value `True` to `False`. - -Set `with_batch_dim` to `True` if models contain the `batch` dimension, such as image and text processing models. Assume that `batch_size=2` and the current request has three instances of images which will be split into two batches for model inference. For the first batch, two images are inferred to return two results. For the second batch, the remaining image is copied and inferred to return one result. Finally, three results are returned. - -![image](images/resnet_with_batch.png) - -Set `with_batch_dim` to `False` if models do not involve or consider the `batch` dimension. For example, the input and output are matrix multiplication models of 2D tensors. Each instance of the request performs an independent inference task. - -![image](./images/matmul_without_batch.png) - -If a model has one data input with `batch` dimension information and one model configuration information input without `batch` dimension information, you need to set `with_batch_dim` to `True` and set an extra parameter `without_batch_dim_inputs` to specify the input information that does not contain the `batch` dimension information. -For example: - -```python -from mindspore_serving.worker import register -# Input1 indicates the input shape information of the model, without the batch dimension information. -# input0: [N,3,416,416], input1: [2] -register.declare_servable(servable_file="yolov3_darknet53.mindir", model_format="MindIR", - with_batch_dim=True, without_batch_dim_inputs=1) -``` - -For distributed model, the only difference compared with non-distributed single model configuration is declaration, you need to use `declare_distributed_servable` method, `rank_size` is the number of devices used in the model, `stage_size` is the number of stages in the pipeline. - -```python -from mindspore_serving.worker import distributed -from mindspore_serving.worker import register - -distributed.declare_distributed_servable(rank_size=8, stage_size=1, with_batch_dim=False) -``` - -### Method Definition - -An example of the method definition is as follows: - -```python -from mindspore_serving.worker import register - -@register.register_method(output_names=["label"]) -def classify_top1(image): - """Define method `classify_top1` for servable `resnet50`. - The input is `image` and the output is `label`.""" - x = register.call_preprocess(preprocess_eager, image) - x = register.call_servable(x) - x = register.call_postprocess(postprocess_top1, x) - return x - - -@register.register_method(output_names=["label", "score"]) -def classify_top5(image): - """Define method `classify_top5` for servable `resnet50`. - The input is `image` and the output is `label` and `score`. """ - x = register.call_preprocess(preprocess_eager, image) - x = register.call_servable(x) - label, score = register.call_postprocess(postprocess_top5, x) - return label, score -``` - -The preceding code defines the `classify_top1` and `classify_top5` methods in Servable `resnet50`. The input parameter of the `classify_top1` method is `image` and the output parameter is `label`. The input parameter of the `classify_top5` method is `image` and the output parameters are `label` and `score`. That is, the input parameters of the Servable method are specified by the input parameters of the Python method, and the output parameters of the Servable method are specified by `output_names` of `register_method`. - -In the preceding method definition: - -- `call_preprocess` specifies the preprocessing used and its input. - -- `call_servable` specifies the input of model inference. - -- `call_postprocess` specifies the post-processing and its input used. - -- `return` specifies the data returned by the method and corresponds to the `output_names` parameter of `register_method`. - -The method definition cannot contain branch structures such as if, for, and while. Preprocessing and post-processing are optional and cannot be repeated. Model inference is mandatory, and the sequence cannot be disordered. - -When a user uses a service provided by a Servable method on the client, the user needs to specify the input value based on the input parameter name and identify the output value based on the output parameter name. For example, the method `classify_top5` accessed by the client is as follows: - -```python -from mindspore_serving.client import Client - -def read_images(): - # read image file and return - -def run_classify_top5(): - """Client for servable resnet50 and method classify_top5""" - client = Client("localhost", 5500, "resnet50", "classify_top5") - instances = [] - for image in read_images(): # read multi image - instances.append({"image": image}) # input `image` - result = client.infer(instances) - print(result) - for result_item in result: # result for every image - label = result_item["label"] # result `label` - score = result_item["score"] # result `score` - print("label result", label) - print("score result", score) - -if __name__ == '__main__': - run_classify_top5() -``` - -In addition, one request may include multiple instances, and multiple requests in queue for processing also have multiple instances. If multiple instances need to be processed concurrently by using, for example, multiple threads in customized preprocessing or post-processing (for example, the MindData concurrency is used to process multiple input images during preprocessing), MindSpore Serving provides `call_preprocess_pipeline` and `call_postprocess_pipeline` for registering such preprocessing and post-processing. For details, see [ResNet-50 sample model configuration](https://gitee.com/mindspore/serving/blob/master/example/resnet/resnet50/servable_config.py). diff --git a/tutorials/inference/source_en/serving_restful.md b/tutorials/inference/source_en/serving_restful.md deleted file mode 100644 index 27f1e70b87c1c8da28c1dd0ff96fbbcc0a537451..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_en/serving_restful.md +++ /dev/null @@ -1,249 +0,0 @@ -# RESTful-based MindSpore Serving Access - -`Linux` `Serving` `Ascend` `GPU` `Beginner` `Intermediate` `Expert` - - - -- [RESTful-based MindSpore Serving Access](#restful-based-mindspore-serving-access) - - [Overview](#overview) - - [Request Method](#request-method) - - [Request Format](#request-format) - - [Base64 Data Encoding](#base64-data-encoding) - - [Response Format](#response-format) - - - - - -## Overview - -MindSpore Serving supports both `gPRC` and `RESTful` request modes. The following describes the `RESTful` request. - -`RESTful` is an API designed and developed based on `HTTP`. It manages and accesses resources through `URI` and features high scalability and clear structure. The lightweight `RESTful` can directly transmit data through `HTTP`, and has become the most popular `Web` service access mode. Users can directly interact with services in `RESTful` mode. - -For details about how to deploy `Serving`, see [MindSpore Serving-based Inference Service Deployment](https://www.mindspore.cn/tutorial/inference/en/master/serving_example.html). - -Use the `master.start_restful_server` API to start the `RESTful` service. Alternatively, you can use `master.start_grpc_server` to start the `gRPC` service. - -> `RESTful` clients do not depend on specific hardware platforms. Currently, the Serving server supports only `Ascend 310` and `Ascend 910` hardware environments. - -## Request Method - -Currently, only RESTful request of the `POST` type is supported. The request format is as follows: - -```text -POST http://${HOST}:${PORT}/model/${MODLE_NAME}[/version/${VERSION}]:${METHOD_NAME} -``` - -In the preceding information: - -- `${HOST}`: specifies the IP address to be accessed. -- `${PORT}`: specifies the port number to be accessed. -- `${MODLE_NAME}`: specifies the name of a model in the request. -- `${VERSION}`: specifies the version number. The version number is optional. If it is not specified, the latest model version is used by default. -- `${METHOD_NAME}`: specifies the method name of the request model. - -If the `curl` tool is used, the RESTful request method is as follows: - -```text -curl -X POST -d '${REQ_JSON_MESSAGE}' http://${HOST}:${PORT}/model/${MODLE_NAME}[/version/${VERSION}]:${METHOD_NAME} -``` - -For example, request for the `predict` method of the `LeNet` model to perform digital image inference: - -```text -curl -X POST -d '{"instances":{"image":{"b64":"babe64-encoded-string"}' http://127.0.0.1:1500/model/lenet/version/1:predict -``` - -In the preceding information, `babe64-encoded-string` indicates the character string generated after the digital image is encoded using `base64`. The character string is long and is not listed explicitly. - -## Request Format - -RESTful supports the `Json` request format. `key` is fixed at `instances`, and `value` indicates multiple instances. - -Each instance is represented by a `Json` object in `key-value` format. In the preceding information: - -- `key`: specifies the input name, which must be the same as the input parameter name of the method provided by the request model. If they are different, the request fails. - -- `value`: a specific value. Currently supported `value` types: - - - Scalar: `str`, `bytes`, `int`, `float` and `bool` - - `bytes` is supported after `base64` encoding. - - - Tensor: a one-level or multi-level array consisting of `int`, `float`, and `bool` - - A tensor uses the array format to indicate data and dimension information. - -The `int` type supported in `Json` is `int32`, indicating the range, and the supported `float` type is `float32`, indicating the range. - -Request format: - -```text -{ - "instances":[ - { - "input_name1":||, - "input_name2":||, - ... - }, - { - "input_name1":||, - "input_name2":||, - ... - } - ... - ] -} -``` - -Example: - -```text -{ - "instances":[ - { - "tag":"one", - "box":[[1,1],[2,3],[3,4]], - "image":{"b64":"iVBOR...ggg==="} - }, - { - "tag":"two", - "box":[[2,2],[5,5],[6,6]], - "image":{"b64":"iVBOR...QmCC", "type":"bytes"} - } - ] -} -``` - -In the preceding information, `iVBOR...ggg===` is the omitted character string of the image number `0` after `base64` encoding. `iVBOR...QmCC` is the omitted character string of the image number `1` after `base64` encoding. The character strings encoded in different images may be different. The preceding description is for reference only. - -### Base64 Data Encoding - -The `bytes` type needs to be encoded using `base64`. `base64` can indicate the `bytes` type as well as other scalar and tensor data. In this case, the binary data of scalar and tensor is encoded using `base64`, the data type is specified using `type`, and the dimension information is specified using `shape`. - -- `type`: This parameter is optional. If it is not specified, the default value is `bytes`. - - The value can be `int8`, `int16`, `int32`, `int64`, `uint8`, `uint16`, `uint32`, `uint64`, `float16`(or `fp16`), `float32`(or `fp32`), `float64`(or `fp64`), `bool`, `str`, or `bytes`. - -- `shape`: This parameter is optional. If it is not specified, the default value is `[1]`. - -Example: - -If the `base64` encoding is used to indicate a tensor of `int16` type, with `shape` 3*2 and the value `[[1,1],[2,3],[3,4]]`, the expression is as follows: - -```json -{ - "instances":[ - { - "box":{"b64":"AQACAAIAAwADAAQA", "type":"int16", "shape":[3,2]} - } - ] -} -``` - -`AQACAAIAAwADAAQA` is a character string obtained after the binary data format of `[[1,1],[2,3],[3,4]]` is encoded using `base64`. - -**The supported types in request are as follows:** - -| Supported Type | Example | Remarks | -| ------ | -------- | ---------------- | -| `int` | 1, [1, 2, 3, 4] | The default value is `int32`, indicating the range. | -| `float` | 1.0, [[1.2, 2.3], [3.0, 4.5]] | The default value is `float32`, indicating the range. | -| `bool` | true, false, [[true], [false]] | `bool` type | -| `string` | "hello" or
{"b64":"aGVsbG8=", "type":"str"} | Direct representation or representation specified by `type`. | -| `bytes` | {"b64":"AQACAAIAAwADAAQA"} or
{"b64":"AQACAAIAAwADAAQA", "type":"bytes"} | If `type` is not specified, the default value `bytes` is used. | -| `int8`,`int16`,`int32`,`int64`,
`uint8`,`uint16`,`uint32`,`uint64`,
`float16`,`float32`,`float64`,`bool` | {"b64":"AQACAAIAAwADAAQA", "type":"int16", "shape":[3,2]} | The base64 encoding is used to indicate the data specified by `type`. | - -## Response Format - -The response format is the same as the request format. The information in the `Json` format is returned. The response format is as follows: - -```text -{ - "instances":[ - { - "output_name1":||, - "output_name2":||, - ... - }, - { - "output_name1":||, - "output_name2":||, - ... - } - ... - ] -} -``` - -1. If all instances in a request are successfully processed, the response format is as follows: - - Example: `LeNet` requests to recognize numbers `0` and `1`. - - ```json - { - "instances":[ - { - "result":0 - }, - { - "result":1 - } - ] - } - ``` - -2. If certain instances are faulty, the response format is as follows: - - Example: `LeNet` requests to recognize the digit `0` and an incorrect digit image. - - ```json - { - "instances":[ - { - "result":0 - }, - { - "error_msg":"Preprocess Failed" - } - ] - } - ``` - -3. If all instances in a request fail, the response format is as follows: - - Example: `LeNet` requests to recognize two incorrect digital images. - - ```json - { - "instances":[ - { - "error_msg":"Preprocess Failed" - }, - { - "error_msg":"Time out" - } - ] - } - ``` - -4. If a system error or other parsing error occurs, the return value is in the following format: - - For example, the value of `LeNet` is an invalid JSON character string. - - ```json - { - "error_msg":"Parse request failed" - } - ``` - -**The response data is represented as follows:** - - | Serving Output Type | RESTful json Data Type | Description | Example | - | ---- | ---- | ---- | ---- | - | `int8`, `int16`, `int32`, `int64`, `uint8`, `uint16`, `uint32`, `uint64` | json integer | All types of integer data are represented as JSON integer | 1,[1,2,3,4] | - | `float16`, `float32`, `float64` | json float | All types of float data are represented as JSON float | 1.0,[[1.2, 2.3], [3.0, 4.5]] | - | `bool` | json bool | Bool data is represented as json bool | true,false,[[true],[false]] | - | `string` | json str | String data is represented as json string | "news_car" | - | `bytes` | base64 object | Bytes data is represented as a base64 object | {"b64":"AQACAAIAAwADAAQA"} | diff --git a/tutorials/inference/source_zh_cn/_static/logo_notebook.png b/tutorials/inference/source_zh_cn/_static/logo_notebook.png deleted file mode 100644 index f28598315f19f4be76a73ddf5dc6bbdbe4db35fd..0000000000000000000000000000000000000000 Binary files a/tutorials/inference/source_zh_cn/_static/logo_notebook.png and /dev/null differ diff --git a/tutorials/inference/source_zh_cn/_static/logo_source.png b/tutorials/inference/source_zh_cn/_static/logo_source.png deleted file mode 100644 index 9932d67ab50871edb0c95979c4e948c812c7cdea..0000000000000000000000000000000000000000 Binary files a/tutorials/inference/source_zh_cn/_static/logo_source.png and /dev/null differ diff --git a/tutorials/inference/source_zh_cn/conf.py b/tutorials/inference/source_zh_cn/conf.py deleted file mode 100644 index 0c819a8b0622e1914ff199e5bd29a591595470b3..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/conf.py +++ /dev/null @@ -1,63 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys - - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'recommonmark', - 'sphinx_markdown_tables', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_search_language = 'zh' - -html_search_options = {'dict': '../../resource/jieba.txt'} - -html_static_path = ['_static'] \ No newline at end of file diff --git a/tutorials/inference/source_zh_cn/images/distributed_servable.png b/tutorials/inference/source_zh_cn/images/distributed_servable.png deleted file mode 100644 index e6c76259c4e6cfe60a58a06173984ea322db2813..0000000000000000000000000000000000000000 Binary files a/tutorials/inference/source_zh_cn/images/distributed_servable.png and /dev/null differ diff --git a/tutorials/inference/source_zh_cn/images/matmul_without_batch.png b/tutorials/inference/source_zh_cn/images/matmul_without_batch.png deleted file mode 100644 index 4d5873d13e4ebc13f47144433469ac20c33dee8a..0000000000000000000000000000000000000000 Binary files a/tutorials/inference/source_zh_cn/images/matmul_without_batch.png and /dev/null differ diff --git a/tutorials/inference/source_zh_cn/images/resnet_example.png b/tutorials/inference/source_zh_cn/images/resnet_example.png deleted file mode 100644 index f747ae6c48c406586a418ef890f81517ee5849b2..0000000000000000000000000000000000000000 Binary files a/tutorials/inference/source_zh_cn/images/resnet_example.png and /dev/null differ diff --git a/tutorials/inference/source_zh_cn/images/resnet_with_batch.png b/tutorials/inference/source_zh_cn/images/resnet_with_batch.png deleted file mode 100644 index e9b1b11093835a8780c7e8c758f6fb767a3baacd..0000000000000000000000000000000000000000 Binary files a/tutorials/inference/source_zh_cn/images/resnet_with_batch.png and /dev/null differ diff --git a/tutorials/inference/source_zh_cn/index.rst b/tutorials/inference/source_zh_cn/index.rst deleted file mode 100644 index 585925818dbeb870a121fa65a1aedeec07c6d184..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/index.rst +++ /dev/null @@ -1,37 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 09:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -使用MindSpore进行推理 -================================= - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 推理模型 - - multi_platform_inference - multi_platform_inference_ascend_910 - multi_platform_inference_ascend_310 - multi_platform_inference_gpu - multi_platform_inference_cpu - 端侧推理 - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 推理服务 - - serving_example - serving_distributed_example - serving_grpc - serving_restful - serving_model - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 应用实践 - - nlp_tprr diff --git a/tutorials/inference/source_zh_cn/multi_platform_inference.md b/tutorials/inference/source_zh_cn/multi_platform_inference.md deleted file mode 100644 index f3733313285420d1f16a581e0b04d39908b6e697..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/multi_platform_inference.md +++ /dev/null @@ -1,123 +0,0 @@ -# 推理模型总览 - - `Linux` `Ascend` `GPU` `CPU` `推理应用` `初级` `中级` `高级` - - - -- [推理模型总览](#推理模型总览) - - [模型文件](#模型文件) - - [执行推理](#执行推理) - - [MindIR介绍](#mindir介绍) - - [MindIR支持的网络列表](#mindir支持的网络列表) - - - - - -MindSpore可以基于训练好的模型,在不同的硬件平台上执行推理任务。 - -## 模型文件 - -MindSpore支持保存两种类型的数据:训练参数和网络模型(模型中包含参数信息)。 - -- 训练参数指的是Checkpoint格式文件。 -- 网络模型包括MindIR、AIR和ONNX三种格式文件。 - -下面介绍一下这几种格式的基本概念及其应用场景。 - -- Checkpoint - - 采用了Protocol Buffers格式,存储了网络中所有的参数值。 - - 一般用于训练任务中断后恢复训练,或训练后的微调(Fine Tune)任务。 -- MindIR - - 全称MindSpore IR,是MindSpore的一种基于图表示的函数式IR,定义了可扩展的图结构以及算子的IR表示。 - - 它消除了不同后端的模型差异,一般用于跨硬件平台执行推理任务。 -- ONNX - - 全称Open Neural Network Exchange,是一种针对机器学习模型的通用表达。 - - 一般用于不同框架间的模型迁移或在推理引擎([TensorRT](https://docs.nvidia.com/deeplearning/tensorrt/api/python_api/index.html))上使用。 -- AIR - - 全称Ascend Intermediate Representation,是华为定义的针对机器学习所设计的开放式文件格式。 - - 它能更好地适应华为AI处理器,一般用于Ascend 310上执行推理任务。 - -## 执行推理 - -按照使用环境的不同,推理可以分为以下两种方式。 - -1. 本机推理 - - 通过加载网络训练产生的Checkpoint文件,调用`model.predict`接口进行推理验证,具体操作可查看[使用Checkpoint格式文件执行推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_ascend_910.html#checkpoint)。 - -2. 跨平台推理 - - 使用网络定义和Checkpoint文件,调用`export`接口导出模型文件,在不同平台执行推理,目前支持导出MindIR、ONNX和AIR(仅支持Ascend AI处理器)模型,具体操作可查看[保存模型](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html)。 - -## MindIR介绍 - -MindSpore通过统一IR定义了网络的逻辑结构和算子的属性,将MindIR格式的模型文件与硬件平台解耦,实现一次训练多次部署。 - -1. 基本介绍 - - MindIR作为MindSpore的统一模型文件,同时存储了网络结构和权重参数值。同时支持部署到云端Serving和端侧Lite平台执行推理任务。 - - 同一个MindIR文件支持多种硬件形态的部署: - - - 云端Serving部署推理:MindSpore训练生成MindIR模型文件后,可直接发给MindSpore Serving加载,执行推理任务,而无需额外的模型转化,做到Ascend、GPU、CPU等多硬件的模型统一。 - - 端侧Lite推理部署:MindIR可直接供Lite部署使用。同时由于端侧轻量化需求,提供了模型小型化和转换功能,支持将原始MindIR模型文件由Protocol Buffers格式转化为FlatBuffers格式存储,以及网络结构轻量化,以更好的满足端侧性能、内存等要求。 - -2. 使用场景 - - 先使用网络定义和Checkpoint文件导出MindIR模型文件,再根据不同需求执行推理任务,如[在Ascend 310上执行推理任务](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_ascend_310_mindir.html)、[基于MindSpore Serving部署推理服务](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_example.html)、[端侧推理](https://www.mindspore.cn/lite/docs?master)。 - -### MindIR支持的网络列表 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
AlexNetBERTBGCF
CenterFaceCNN&CTCDeepLabV3
DenseNet121Faster R-CNNGAT
GCNGoogLeNetLeNet
Mask R-CNNMASSMobileNetV2
NCFPSENetResNet
ResNeXtInceptionV3SqueezeNet
SSDTransformerTinyBert
UNet2DVGG16Wide&Deep
YOLOv3YOLOv4
diff --git a/tutorials/inference/source_zh_cn/multi_platform_inference_ascend_310.rst b/tutorials/inference/source_zh_cn/multi_platform_inference_ascend_310.rst deleted file mode 100644 index f6a503f1f9844c79b2bb0efa8bbf36c35f5230fd..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/multi_platform_inference_ascend_310.rst +++ /dev/null @@ -1,14 +0,0 @@ -Ascend 310 AI处理器上推理 -=============================== - -Ascend 310是面向边缘场景的高能效高集成度AI处理器,支持对MindIR格式和AIR格式模型进行推理。 - -MindIR格式可由MindSpore CPU、GPU、Ascend 910导出,可运行在GPU、Ascend 910、Ascend 310上,推理前不需要手动执行模型转换,推理时需要安装MindSpore,调用MindSpore C++ API进行推理。 - -AIR格式仅MindSpore Ascend 910可导出,仅Ascend 310可推理,推理前需使用Ascend CANN中atc工具进行模型转换,推理时不依赖MindSpore,仅需Ascend CANN软件包。 - -.. toctree:: - :maxdepth: 1 - - multi_platform_inference_ascend_310_mindir - multi_platform_inference_ascend_310_air diff --git a/tutorials/inference/source_zh_cn/multi_platform_inference_ascend_310_air.md b/tutorials/inference/source_zh_cn/multi_platform_inference_ascend_310_air.md deleted file mode 100644 index 3031b2b5e7651cbf30962ca0e107ba9c82b4810a..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/multi_platform_inference_ascend_310_air.md +++ /dev/null @@ -1,256 +0,0 @@ -# Ascend 310 AI处理器上使用AIR模型进行推理 - -`Linux` `Ascend` `推理应用` `初级` `中级` `高级` - - - -- [Ascend 310 AI处理器上使用AIR模型进行推理](#ascend-310-ai处理器上使用air模型进行推理) - - [概述](#概述) - - [开发环境准备](#开发环境准备) - - [硬件准备](#硬件准备) - - [软件包准备](#软件包准备) - - [制作SD卡](#制作sd卡) - - [连接Atlas 200 DK开发板与Ubuntu服务器](#连接atlas-200-dk开发板与ubuntu服务器) - - [配置Python环境](#配置python环境) - - [安装开发套件包](#安装开发套件包) - - [推理目录结构介绍](#推理目录结构介绍) - - [导出AIR模型文件](#导出air模型文件) - - [将AIR模型文件转成OM模型](#将air模型文件转成om模型) - - [编译推理代码](#编译推理代码) - - [执行推理并查看结果](#执行推理并查看结果) - - - - - -## 概述 - -Ascend 310是面向边缘场景的高能效高集成度AI处理器。Atlas 200开发者套件又称Atlas 200 Developer Kit(以下简称Atlas 200 DK),是以Atlas 200 AI加速模块为核心的开发者板形态的终端类产品,集成了海思Ascend 310 AI处理器,可以实现图像、视频等多种数据分析与推理计算,可广泛用于智能监控、机器人、无人机、视频服务器等场景。 - -本教程介绍如何在Atlas 200 DK上使用MindSpore基于AIR模型文件执行推理,主要包括以下流程: - -1. 开发环境准备,包括制作Atlas 200 DK的SD卡 、配置Python环境和刷配套开发软件包。 - -2. 导出AIR模型文件,这里以ResNet-50模型为例。 - -3. 使用ATC工具将AIR模型文件转成OM模型。 - -4. 编译推理代码,生成可执行`main`文件。 - -5. 加载保存的OM模型,执行推理并查看结果。 - -> 你可以在这里找到完整可运行的样例代码: 。 - -## 开发环境准备 - -### 硬件准备 - -- 一个操作系统为Ubuntu的服务器或PC机,用于为Atlas 200 DK制作SD卡启动盘和开发环境部署。 -- 一张SD卡,建议容量不低于16G。 - -### 软件包准备 - -配置开发环境需要的脚本和软件包如下5类,共7个文件。 - -1. 制卡入口脚本:[make_sd_card.py](https://gitee.com/ascend/tools/blob/master/makesd/for_1.0.9.alpha/make_sd_card.py) - -2. 制作SD卡操作系统脚本:[make_ubuntu_sd.sh](https://gitee.com/ascend/tools/blob/master/makesd/for_1.0.9.alpha/make_ubuntu_sd.sh) - -3. Ubuntu操作系统镜像包:[ubuntu-18.04.xx-server-arm64.iso](http://cdimage.ubuntu.com/ubuntu/releases/18.04/release/ubuntu-18.04.5-server-arm64.iso) - -4. 开发者板驱动包与运行包: - - - `Ascend310-driver-*{software version}*-ubuntu18.04.aarch64-minirc.tar.gz` - - - `Ascend310-aicpu_kernels-*{software version}*-minirc.tar.gz` - - - `Ascend-acllib-*{software version}*-ubuntu18.04.aarch64-minirc.run` - -5. 安装开发套件包:`Ascend-Toolkit-*{version}*-arm64-linux_gcc7.3.0.run` - -其中, - -- 前3项可以参考[Atlas 200 DK 开发者套件使用指南](https://support.huaweicloud.com/usermanual-A200dk_3000/atlas200dk_02_0011.html)获取。 -- 其余软件包建议从[固件与驱动](https://ascend.huawei.com/#/hardware/firmware-drivers)中获取,在该页面中选择产品系列和产品型号为`Atlas 200 DK`,选中需要的文件,即可下载。 - -### 制作SD卡 - -读卡器通过USB与Ubuntu服务器连接,通过制卡脚本制作SD卡。具体操作参见[操作步骤](https://support.huaweicloud.com/usermanual-A200dk_3000/atlas200dk_02_0011.html#section2)。 - -### 连接Atlas 200 DK开发板与Ubuntu服务器 - -Atlas 200 DK开发者板支持通过USB端口或者网线与Ubuntu服务器进行连接。具体操作参见[连接Atlas 200 DK开发者板与Ubuntu服务器](https://support.huaweicloud.com/usermanual-A200dk_3000/atlas200dk_02_0013.html)。 - -### 配置Python环境 - -安装Python以及gcc等软件,具体操作参见[安装依赖](https://support.huaweicloud.com/usermanual-A200dk_3000/atlas200dk_02_0016.html#section4)。 - -### 安装开发套件包 - -安装开发套件包`Ascend-Toolkit-*{version}*-arm64-linux_gcc7.3.0.run`,具体操作参见[安装开发套件包](https://support.huaweicloud.com/usermanual-A200dk_3000/atlas200dk_02_0017.html)。 - -## 推理目录结构介绍 - -创建目录放置推理代码工程,例如`/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/acl_resnet50_sample`,其中`inc`、`src`、`test_data`可以从官网示例下载[样例代码](https://gitee.com/mindspore/docs/tree/master/tutorials/tutorial_code/acl_resnet50_sample)获取,`model`目录用于存放接下来导出的`AIR`模型文件和转换后的`OM`模型文件,`out`目录用于存放执行编译生成的可执行文件和输出结果目录,推理代码工程目录结构如下: - -```text -└─acl_resnet50_sample - ├── inc - │ ├── model_process.h //声明资源初始化/销毁相关函数的头文件 - │ ├── sample_process.h //声明模型处理相关函数的头文件 - │ ├── utils.h //声明公共函数(例如:文件读取函数)的头文件 - ├── model - │ ├── resnet50_export.air //AIR模型文件 - │ ├── resnet50_export.om //转换后的OM模型文件 - ├── src - │ ├── acl.json //系统初始化的配置文件 - │ ├── CMakeLists.txt //编译脚本 - │ ├── main.cpp //主函数,图片分类功能的实现文件 - │ ├── model_process.cpp //模型处理相关函数的实现文件 - │ ├── sample_process.cpp //资源初始化/销毁相关函数的实现文件 - │ ├── utils.cpp //公共函数(例如:文件读取函数)的实现文件 - ├── test_data - │ ├── test_data_1x3x224x224_1.bin //输入样本数据1 - │ ├── test_data_1x3x224x224_2.bin //输入样本数据2 - ├── out - │ ├── main //编译生成的可执行文件 - │ ├── result //输出结果目录 -``` - -> 输出结果目录`acl_resnet50_sample/out/result`需先创建好再执行推理操作。 - -## 导出AIR模型文件 - -在Ascend 910的机器上训练好目标网络,并保存为CheckPoint文件,通过网络和CheckPoint文件导出对应的AIR格式模型文件,导出流程参见[导出AIR格式文件](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html#air)。 - -> 这里提供使用ResNet-50模型导出的示例AIR文件[resnet50_export.air](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com:443/sample_resources/acl_resnet50_sample/resnet50_export.air)。 - -## 将AIR模型文件转成OM模型 - -登录Atlas 200 DK开发者板环境,创建`model`目录放置AIR文件`resnet50_export.air`,例如`/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/acl_resnet50_sample/model`,并进入该路径下,设置如下环境变量。其中,`install_path`需指定为实际安装路径。 - -```bash -export install_path=/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1 -export PATH=/usr/local/python3.7.5/bin:${install_path}/atc/ccec_compiler/bin:${install_path}/atc/bin:$PATH -export PYTHONPATH=${install_path}/atc/python/site-packages/te:${install_path}/atc/python/site-packages/topi:$PYTHONPATH -export LD_LIBRARY_PATH=${install_path}/atc/lib64:$LD_LIBRARY_PATH -export ASCEND_OPP_PATH=${install_path}/opp -``` - -以`resnet50_export.air`为例,执行如下命令进行模型转换,在当前目录生成`resnet50_export.om`文件。 - -```bash -/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/atc/bin/atc --framework=1 --model=./resnet50_export.air --output=./resnet50_export --input_format=NCHW --soc_version=Ascend310 -``` - -其中: - -- `--model`:原始模型文件的路径。 -- `--output`:转换得到的OM模型文件的路径。 -- `--input_format`:输入数据格式。 - -ATC工具详细资料可在[昇腾社区开发者文档](https://ascend.huawei.com/#/document?tag=developer)中选择相应CANN版本后,查找《ATC工具使用指南》章节查看。 - -## 编译推理代码 - -进入工程目录`acl_resnet50_sample`,设置如下环境变量: - -```bash -export DDK_PATH=/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1 -export NPU_HOST_LIB=/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/lib64/stub/ -``` - -> `CMakeLists.txt`文件中`acllib`包的`include`的目录需要指定正确,否则会报`acl/acl.h`找不到的错误。`CMakeLists.txt`文件中指定`include`目录的代码位置如下,如果与实际安装目录不符,需要修改。 - -```text -... -#Header path - - include_directories( - - ${INC_PATH}/acllib_linux.arm64/include/ - - ../ - - ) -... -``` - -执行如下命令创建编译目录: - -```bash -mkdir -p build/intermediates/minirc -``` - -然后切换至编译目录: - -```bash -cd build/intermediates/minirc -``` - -执行`cmake`命令: - -```bash -cmake ../../../src -DCMAKE_CXX_COMPILER=aarch64-linux-gnu-g++ -DCMAKE_SKIP_RPATH=TRUE -``` - -再执行`make`命令编译即可。 - -```bash -make -``` - -编译完成后,在`acl_resnet50_sample/out`下会生成可执行`main`文件。 - -## 执行推理并查看结果 - -将生成的OM模型文件`resnet50_export.om`拷贝到`acl_resnet50_sample/out`目录下(和可执行`main`文件同路径),并确认`acl_resnet50_sample/test_data`目录中已经准备好输入数据样本,就可以执行推理了。 - -值得注意的是,需要设置如下环境变量,否则会导致推理不成功。 - -```bash -export LD_LIBRARY_PATH=/home/HwHiAiUser/Ascend/acllib/lib64/ -``` - -进入到`acl_resnet50_sample/out`目录下,如果当前目录下`result`目录不存在,需要执行`mkdir result`命令创建该目录,然后执行如下命令进行推理。 - -```bash -./main ./resnet50_export.om ../test_data -``` - -执行成功后,可以看到推理结果如下,打印了`top5`的概率标签,并且输出结果会以`.bin`文件的格式保存在`acl_resnet50_sample/out/result`目录中。 - -```text -[INFO] acl init success -[INFO] open device 0 success -[INFO] create context success -[INFO] create stream success -[INFO] get run mode success -[INFO] load model ./resnet50_export.om success -[INFO] create model description success -[INFO] create model output success -[INFO] start to process file:../test_data/test_data_1x3x224x224_1.bin -[INFO] model execute success -[INFO] top 1: index[2] value[0.941406] -[INFO] top 2: index[3] value[0.291992] -[INFO] top 3: index[1] value[0.067139] -[INFO] top 4: index[0] value[0.013519] -[INFO] top 5: index[4] value[-0.226685] -[INFO] output data success -[INFO] dump data success -[INFO] start to process file:../test_data/test_data_1x3x224x224_2.bin -[INFO] model execute success -[INFO] top 1: index[2] value[0.946289] -[INFO] top 2: index[3] value[0.296143] -[INFO] top 3: index[1] value[0.072083] -[INFO] top 4: index[0] value[0.014549] -[INFO] top 5: index[4] value[-0.225098] -[INFO] output data success -[INFO] dump data success -[INFO] unload model success, modelId is 1 -[INFO] execute sample success -[INFO] end to destroy stream -[INFO] end to destroy context -[INFO] end to reset device is 0 -[INFO] end to finalize acl -``` diff --git a/tutorials/inference/source_zh_cn/multi_platform_inference_ascend_310_mindir.md b/tutorials/inference/source_zh_cn/multi_platform_inference_ascend_310_mindir.md deleted file mode 100644 index 668d8b837aa067f5eef682968ba48700625abd5b..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/multi_platform_inference_ascend_310_mindir.md +++ /dev/null @@ -1,333 +0,0 @@ -# Ascend 310 AI处理器上使用MindIR模型进行推理 - -`Linux` `Ascend` `推理应用` `初级` `中级` `高级` - - - -- [Ascend 310 AI处理器上使用MindIR模型进行推理](#ascend-310-ai处理器上使用mindir模型进行推理) - - [概述](#概述) - - [开发环境准备](#开发环境准备) - - [导出MindIR模型文件](#导出mindir模型文件) - - [推理目录结构介绍](#推理目录结构介绍) - - [推理代码介绍](#推理代码介绍) - - [构建脚本介绍](#构建脚本介绍) - - [编译推理代码](#编译推理代码) - - [执行推理并查看结果](#执行推理并查看结果) - - - - - -## 概述 - -Ascend 310是面向边缘场景的高能效高集成度AI处理器。Atlas 200开发者套件又称Atlas 200 Developer Kit(以下简称Atlas 200 DK),是以Atlas 200 AI加速模块为核心的开发者板形态的终端类产品,集成了海思Ascend 310 AI处理器,可以实现图像、视频等多种数据分析与推理计算,可广泛用于智能监控、机器人、无人机、视频服务器等场景。 - -本教程介绍如何在Atlas 200 DK上使用MindSpore基于MindIR模型文件执行推理,主要包括以下流程: - -1. 开发环境准备,包括制作Atlas 200 DK的SD卡 、配置Python环境和刷配套开发软件包。 - -2. 导出MindIR模型文件,这里以ResNet-50模型为例。 - -3. 编译推理代码,生成可执行`main`文件。 - -4. 加载保存的MindIR模型,执行推理并查看结果。 - -> 你可以在这里找到完整可运行的样例代码: 。 - -## 开发环境准备 - -参考[Ascend 310 AI处理器上使用AIR进行推理#开发环境准备](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_ascend_310_air.html#id2)安装设备环境,然后参考[安装指导](https://www.mindspore.cn/install)安装MindSpore。 - -## 导出MindIR模型文件 - -在Ascend 910的机器上训练好目标网络,并保存为CheckPoint文件,通过网络和CheckPoint文件导出对应的MindIR格式模型文件,导出流程参见[导出MindIR格式文件](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html#mindir)。 - -> 这里提供使用BatchSize为1的ResNet-50模型导出的示例MindIR文件[resnet50_imagenet.mindir](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/sample_resources/ascend310_resnet50_preprocess_sample/resnet50_imagenet.mindir)。 - -## 推理目录结构介绍 - -创建目录放置推理代码工程,例如`/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_resnet50_preprocess_sample`,可以从官网示例下载[样例代码](https://gitee.com/mindspore/docs/tree/master/tutorials/tutorial_code/ascend310_resnet50_preprocess_sample),`model`目录用于存放上述导出的`MindIR`模型文件,`test_data`目录用于存放待分类的图片,推理代码工程目录结构如下: - -```text -└─ascend310_resnet50_preprocess_sample - ├── CMakeLists.txt // 构建脚本 - ├── README.md // 使用说明 - ├── main.cc // 主函数 - ├── model - │ └── resnet50_imagenet.mindir // MindIR模型文件 - └── test_data - ├── ILSVRC2012_val_00002138.JPEG // 输入样本图片1 - ├── ILSVRC2012_val_00003014.JPEG // 输入样本图片2 - ├── ... // 输入样本图片n -``` - -## 推理代码介绍 - -### 使用CPU算子数据预处理 - -推理代码样例: 。 - -引用`mindspore`和`mindspore::dataset`的名字空间。 - -```c++ -namespace ms = mindspore; -namespace ds = mindspore::dataset; -``` - -环境初始化,指定硬件为Ascend 310,DeviceID为0: - -```c++ -auto context = std::make_shared(); -auto ascend310_info = std::make_shared(); -ascend310_info->SetDeviceID(0); -context->MutableDeviceInfo().push_back(ascend310_info); -``` - -加载模型文件: - -```c++ -// Load MindIR model -ms::Graph graph; -ms::Status ret = ms::Serialization::Load(resnet_file, ms::ModelType::kMindIR, &graph); -// Build model with graph object -ms::Model resnet50; -ret = resnet50.Build(ms::GraphCell(graph), context); -``` - -获取模型所需输入信息: - -```c++ -std::vector model_inputs = resnet50.GetInputs(); -``` - -加载图片文件: - -```c++ -// Readfile is a function to read images -ms::MSTensor ReadFile(const std::string &file); -auto image = ReadFile(image_file); -``` - -图片预处理(使用CPU算子): - -```c++ -// Create the CPU operator provided by MindData to get the function object - -// Decode the input to RGB format -std::shared_ptr decode(new ds::vision::Decode()); -// Resize the image to the given size -std::shared_ptr resize(new ds::vision::Resize({256})); -// Normalize the input -std::shared_ptr normalize(new ds::vision::Normalize( - {0.485 * 255, 0.456 * 255, 0.406 * 255}, {0.229 * 255, 0.224 * 255, 0.225 * 255})); -// Crop the input image at the center -std::shared_ptr center_crop(new ds::vision::CenterCrop({224, 224})); -// shape (H, W, C) to shape (C, H, W) -std::shared_ptr hwc2chw(new ds::vision::HWC2CHW()); - -// // Define a MindData preprocessor -ds::Execute preprocessor({decode, resize, normalize, center_crop, hwc2chw}); - -// Call the function object to get the processed image -ret = preprocessor(image, &image); -``` - -执行推理: - -```c++ -// Create outputs vector -std::vector outputs; -// Create inputs vector -std::vector inputs; -inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), - image.Data().get(), image.DataSize()); -// Call the Predict function of Model for inference -ret = resnet50.Predict(inputs, &outputs); -``` - -获取推理结果: - -```c++ -// Output the maximum probability to the screen -std::cout << "Image: " << image_file << " infer result: " << GetMax(outputs[0]) << std::endl; -``` - -### 使用Ascend 310算子数据预处理 - -Dvpp模块为Ascend 310芯片内置硬件解码器,相较于CPU拥有对图形处理更强劲的性能。支持JPEG图片的解码缩放等基础操作。 - -引用`mindspore`和`mindspore::dataset`的名字空间。 - -```c++ -namespace ms = mindspore; -namespace ds = mindspore::dataset; -``` - -环境初始化,指定硬件为Ascend 310,DeviceID为0: - -```c++ -auto context = std::make_shared(); -auto ascend310_info = std::make_shared(); -ascend310_info->SetDeviceID(0); -context->MutableDeviceInfo().push_back(ascend310_info); -``` - -加载图片文件: - -```c++ -// Readfile is a function to read images -ms::MSTensor ReadFile(const std::string &file); -auto image = ReadFile(image_file); -``` - -图片预处理(使用Ascend 310算子): - -```c++ -// Create the Dvpp operator provided by MindData to get the function object - -// Decode the input to YUV420 format -std::shared_ptr decode(new ds::vision::Decode()); -// Resize the image to the given size -std::shared_ptr resize(new ds::vision::Resize({256})); -// Normalize the input -std::shared_ptr normalize(new ds::vision::Normalize( - {0.485 * 255, 0.456 * 255, 0.406 * 255}, {0.229 * 255, 0.224 * 255, 0.225 * 255})); -// Crop the input image at the center -std::shared_ptr center_crop(new ds::vision::CenterCrop({224, 224})); -``` - -图片预处理(使用Ascend 310算子, 性能为CPU算子的2.3倍),需显式指定计算硬件为Ascend 310。 - -```c++ -// Define a MindData preprocessor, set deviceType = kAscend310, device id = 0 -ds::Execute preprocessor({decode, resize, center_crop, normalize}, MapTargetDevice::kAscend310, 0); - -// Call the function object to get the processed image -ret = preprocessor(image, &image); -``` - -加载模型文件: 若使用Ascend 310算子,则需要为模型插入Aipp算子。 - -```c++ -// Load MindIR model -ms::Graph graph; -ms::Status ret = ms::Serialization::Load(resnet_file, ms::ModelType::kMindIR, &graph); -// Build model with graph object -ascend310_info->SetInsertOpConfigPath(preprocessor.AippCfgGenerator()); -ms::Model resnet50; -ret = resnet50.Build(ms::GraphCell(graph), context); -``` - -获取模型所需输入信息: - -```c++ -std::vector model_inputs = resnet50.GetInputs(); -``` - -执行推理: - -```c++ -// Create outputs vector -std::vector outputs; -// Create inputs vector -std::vector inputs; -inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), - image.Data().get(), image.DataSize()); -// Call the Predict function of Model for inference -ret = resnet50.Predict(inputs, &outputs); -``` - -获取推理结果: - -```c++ -// Output the maximum probability to the screen -std::cout << "Image: " << image_file << " infer result: " << GetMax(outputs[0]) << std::endl; -``` - -## 构建脚本介绍 - -构建脚本用于构建用户程序,样例来自于: 。 - -为编译器添加头文件搜索路径: - -```cmake -option(MINDSPORE_PATH "mindspore install path" "") -include_directories(${MINDSPORE_PATH}) -include_directories(${MINDSPORE_PATH}/include) -``` - -在MindSpore中查找所需动态库: - -```cmake -find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) -file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) -``` - -使用指定的源文件生成目标可执行文件,并为目标文件链接MindSpore库: - -```cmake -add_executable(resnet50_sample main.cc) -target_link_libraries(resnet50_sample ${MS_LIB} ${MD_LIB}) -``` - -## 编译推理代码 - -进入工程目录`ascend310_resnet50_preprocess_sample`,设置如下环境变量: - -```bash -# control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. -export GLOG_v=2 - -# Conda environmental options -LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - -# lib libraries that the run package depends on -export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/acllib/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/atc/lib64:${LOCAL_ASCEND}/driver/lib64:${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - -# lib libraries that the mindspore depends on, modify "pip3" according to the actual situation -export LD_LIBRARY_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore/lib"}' | xargs realpath`:${LD_LIBRARY_PATH} - -# Environment variables that must be configured -export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path -export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path -export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/atc/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path -export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # Python library that TBE implementation depends on -``` - -执行`cmake`命令,其中`pip3`需要按照实际情况修改: - -```bash -cmake . -DMINDSPORE_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath` -``` - -再执行`make`命令编译即可。 - -```bash -make -``` - -编译完成后,在`ascend310_resnet50_preprocess_sample`下会生成可执行`main`文件。 - -## 执行推理并查看结果 - -登录Atlas 200 DK开发者板环境,创建`model`目录放置MindIR文件`resnet50_imagenet.mindir`,例如`/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_resnet50_preprocess_sample/model`。 -创建`test_data`目录放置图片,例如`/home/HwHiAiUser/Ascend/ascend-toolkit/20.0.RC1/acllib_linux.arm64/sample/acl_execute_model/ascend310_resnet50_preprocess_sample/test_data`。 -就可以开始执行推理了: - -```bash -./resnet50_sample -``` - -执行后,会对`test_data`目录下放置的所有图片进行推理,比如放置了9张[ImageNet2012](http://image-net.org/download-images)验证集中label为0的图片,可以看到推理结果如下。 - -```text -Image: ./test_data/ILSVRC2012_val_00002138.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00003014.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00006697.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00007197.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009111.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009191.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009346.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009379.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009396.JPEG infer result: 0 -``` diff --git a/tutorials/inference/source_zh_cn/multi_platform_inference_ascend_910.md b/tutorials/inference/source_zh_cn/multi_platform_inference_ascend_910.md deleted file mode 100644 index d7f35b0fbac5c36695f2638c63cf16871602559f..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/multi_platform_inference_ascend_910.md +++ /dev/null @@ -1,334 +0,0 @@ -# Ascend 910 AI处理器上推理 - -`Linux` `Ascend` `推理应用` `初级` `中级` `高级` - - - -- [Ascend 910 AI处理器上推理](#ascend-910-ai处理器上推理) - - [使用checkpoint格式文件单卡推理](#使用checkpoint格式文件单卡推理) - - [分布式推理](#分布式推理) - - [使用C++接口推理MindIR格式文件](#使用c接口推理mindir格式文件) - - [推理目录结构介绍](#推理目录结构介绍) - - [推理代码介绍](#推理代码介绍) - - [构建脚本介绍](#构建脚本介绍) - - [编译推理代码](#编译推理代码) - - [执行推理并查看结果](#执行推理并查看结果) - - - - - -## 使用checkpoint格式文件单卡推理 - -1. 使用`model.eval`接口来进行模型验证。 - - 1.1 模型已保存在本地 - - 首先构建模型,然后使用`mindspore.train.serialization`模块的`load_checkpoint`和`load_param_into_net`从本地加载模型与参数,传入验证数据集后即可进行模型推理,验证数据集的处理方式与训练数据集相同。 - - ```python - network = LeNet5(cfg.num_classes) - net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - net_opt = nn.Momentum(network.trainable_params(), cfg.lr, cfg.momentum) - model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()}) - - print("============== Starting Testing ==============") - param_dict = load_checkpoint(args.ckpt_path) - load_param_into_net(network, param_dict) - dataset = create_dataset(os.path.join(args.data_path, "test"), - cfg.batch_size, - 1) - acc = model.eval(dataset, dataset_sink_mode=args.dataset_sink_mode) - print("============== {} ==============".format(acc)) - ``` - - 其中, - `model.eval`为模型验证接口,对应接口说明:。 - > 推理样例代码:。 - - 1.2 使用MindSpore Hub从华为云加载模型 - - 首先构建模型,然后使用`mindspore_hub.load`从云端加载模型参数,传入验证数据集后即可进行推理,验证数据集的处理方式与训练数据集相同。 - - ```python - model_uid = "mindspore/ascend/0.7/googlenet_v1_cifar10" # using GoogleNet as an example. - network = mindspore_hub.load(model_uid, num_classes=10) - net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - net_opt = nn.Momentum(network.trainable_params(), cfg.lr, cfg.momentum) - model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()}) - - print("============== Starting Testing ==============") - dataset = create_dataset(os.path.join(args.data_path, "test"), - cfg.batch_size, - 1) - acc = model.eval(dataset, dataset_sink_mode=args.dataset_sink_mode) - print("============== {} ==============".format(acc)) - ``` - - 其中, - `mindspore_hub.load`为加载模型参数接口,对应接口说明:。 - -2. 使用`model.predict`接口来进行推理操作。 - - ```python - model.predict(input_data) - ``` - - 其中, - `model.predict`为推理接口,对应接口说明:。 - -## 分布式推理 - -分布式推理是指推理阶段采用多卡进行推理。如果训练时采用数据并行或者模型参数是合并保存,那么推理方式与上述一致,只需要注意每卡加载同样的checkpoint文件进行推理。 - -本篇教程主要介绍在多卡训练过程中,每张卡上保存模型的切片,在推理阶段采用多卡形式,按照推理策略重新加载模型进行推理的过程。针对超大规模神经网络模型的参数个数过多,模型无法完全加载至单卡中进行推理的问题,可利用多卡进行分布式推理。 - -> 分布式推理样例代码: -> -> - -分布式推理流程如下: - -1. 执行训练,生成checkpoint文件和模型参数切分策略文件。 - - > - 分布式训练教程和样例代码可参考链接:. - > - 在分布式推理场景中,训练阶段的`CheckpointConfig`接口的`integrated_save`参数需设定为`False`,表示每卡仅保存模型切片而不是全量模型。 - > - `set_auto_parallel_context`接口的`parallel_mode`参数需设定为`auto_parallel`或者`semi_auto_parallel`,并行模式为自动并行或者半自动并行。 - > - 此外还需指定`strategy_ckpt_save_file`参数,即生成的策略文件的地址。 - -2. 设置context,根据推理数据推导出推理策略。 - - ```python - context.set_auto_parallel_context(full_batch=True, parallel_mode='semi_auto_parallel', strategy_ckpt_load_file='./train_strategy.ckpt') - network = Net() - model = Model(network) - predict_data = create_predict_data() - predict_strategy = model.infer_predict_layout(predict_data) - ``` - - 其中, - - - `full_batch`:是否全量导入数据集,为`True`时表明全量导入,每卡的数据相同,该场景中必须设置为`True`。 - - `parallel_mode`:并行模式,该场景中必须设置为自动并行或者半自动并行模式。 - - `strategy_ckpt_load_file`:训练阶段生成的策略文件的文件地址,分布式推理场景中该参数必须设置。 - - `create_predict_data`:用户需自定义的接口,返回推理数据。与训练阶段不同的是,分布式推理场景中返回类型必须为`Tensor`。 - - `infer_predict_layout`:根据推理数据生成推理策略。 - -3. 导入checkpoint文件,根据推理策略加载相应的模型切片至每张卡中。 - - ```python - ckpt_file_list = create_ckpt_file_list() - load_distributed_checkpoint(network, ckpt_file_list, predict_strategy) - ``` - - 其中, - - - `create_ckpt_file_list`:用户需自定义的接口,返回按rank id排序的CheckPoint文件名列表。 - - `load_distributed_checkpoint`:对模型切片进行合并,再根据推理策略进行切分,加载至网络中。 - - > `load_distributed_checkpoint`接口支持predict_strategy为`None`,此时为单卡推理,其过程与分布式推理有所不同,详细用法请参考链接: - > . - -4. 进行推理,得到推理结果。 - - ```python - model.predict(predict_data) - ``` - -## 使用C++接口推理MindIR格式文件 - -用户可以创建C++应用程序,调用MindSpore的C++接口推理MindIR模型。 - -### 推理目录结构介绍 - -创建目录放置推理代码工程,例如`/home/HwHiAiUser/mindspore_sample/ascend910_resnet50_preprocess_sample`,可以从官网示例下载[样例代码](https://gitee.com/mindspore/docs/tree/master/tutorials/tutorial_code/ascend910_resnet50_preprocess_sample),`model`目录用于存放上述导出的`MindIR`模型文件,`test_data`目录用于存放待分类的图片,推理代码工程目录结构如下: - -```text -└─ascend910_resnet50_preprocess_sample - ├── CMakeLists.txt // 构建脚本 - ├── README.md // 使用说明 - ├── main.cc // 主函数 - ├── model - │ └── resnet50_imagenet.mindir // MindIR模型文件 - └── test_data - ├── ILSVRC2012_val_00002138.JPEG // 输入样本图片1 - ├── ILSVRC2012_val_00003014.JPEG // 输入样本图片2 - ├── ... // 输入样本图片n -``` - -### 推理代码介绍 - -推理代码样例: 。 - -引用`mindspore`和`mindspore::dataset`的名字空间。 - -```c++ -namespace ms = mindspore; -namespace ds = mindspore::dataset; -``` - -环境初始化,指定硬件为Ascend 910,DeviceID为0: - -```c++ -auto context = std::make_shared(); -auto ascend910_info = std::make_shared(); -ascend910_info->SetDeviceID(0); -context->MutableDeviceInfo().push_back(ascend910_info); -``` - -加载模型文件: - -```c++ -// Load MindIR model -ms::Graph graph; -ms::Status ret = ms::Serialization::Load(resnet_file, ms::ModelType::kMindIR, &graph); -// Build model with graph object -ms::Model resnet50; -ret = resnet50.Build(ms::GraphCell(graph), context); -``` - -获取模型所需输入信息: - -```c++ -std::vector model_inputs = resnet50.GetInputs(); -``` - -加载图片文件: - -```c++ -// Readfile is a function to read images -ms::MSTensor ReadFile(const std::string &file); -auto image = ReadFile(image_file); -``` - -图片预处理: - -```c++ -// Create the CPU operator provided by MindData to get the function object - -// Decode the input to RGB format -std::shared_ptr decode(new ds::vision::Decode()); -// Resize the image to the given size -std::shared_ptr resize(new ds::vision::Resize({256})); -// Normalize the input -std::shared_ptr normalize(new ds::vision::Normalize( - {0.485 * 255, 0.456 * 255, 0.406 * 255}, {0.229 * 255, 0.224 * 255, 0.225 * 255})); -// Crop the input image at the center -std::shared_ptr center_crop(new ds::vision::CenterCrop({224, 224})); -// shape (H, W, C) to shape (C, H, W) -std::shared_ptr hwc2chw(new ds::vision::HWC2CHW()); - -// // Define a MindData preprocessor -ds::Execute preprocessor({decode, resize, normalize, center_crop, hwc2chw}); - -// Call the function object to get the processed image -ret = preprocessor(image, &image); -``` - -执行推理: - -```c++ -// Create outputs vector -std::vector outputs; -// Create inputs vector -std::vector inputs; -inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), - image.Data().get(), image.DataSize()); -// Call the Predict function of Model for inference -ret = resnet50.Predict(inputs, &outputs); -``` - -获取推理结果: - -```c++ -// Output the maximum probability to the screen -std::cout << "Image: " << image_file << " infer result: " << GetMax(outputs[0]) << std::endl; -``` - -### 构建脚本介绍 - -构建脚本用于构建用户程序,样例来自于: 。 - -为编译器添加头文件搜索路径: - -```cmake -option(MINDSPORE_PATH "mindspore install path" "") -include_directories(${MINDSPORE_PATH}) -include_directories(${MINDSPORE_PATH}/include) -``` - -在MindSpore中查找所需动态库: - -```cmake -find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) -file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) -``` - -使用指定的源文件生成目标可执行文件,并为目标文件链接MindSpore库: - -```cmake -add_executable(resnet50_sample main.cc) -target_link_libraries(resnet50_sample ${MS_LIB} ${MD_LIB}) -``` - -## 编译推理代码 - -进入工程目录`ascend910_resnet50_preprocess_sample`,设置如下环境变量: - -```bash -# control log level. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, default level is WARNING. -export GLOG_v=2 - -# Conda environmental options -LOCAL_ASCEND=/usr/local/Ascend # the root directory of run package - -# lib libraries that the run package depends on -export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/lib64:${LOCAL_ASCEND}/driver/lib64/common:${LOCAL_ASCEND}/driver/lib64/driver:${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - -# lib libraries that the mindspore depends on, modify "pip3" according to the actual situation -export LD_LIBRARY_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore/lib"}' | xargs realpath`:${LD_LIBRARY_PATH} - -# Environment variables that must be configured -export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE operator implementation tool path -export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP path -export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin/:${PATH} # TBE operator compilation tool path -export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # Python library that TBE implementation depends on -``` - -执行`cmake`命令,其中`pip3`需要按照实际情况修改: - -```bash -cmake . -DMINDSPORE_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath` -``` - -再执行`make`命令编译即可。 - -```bash -make -``` - -编译完成后,在`ascend910_resnet50_preprocess_sample`下会生成可执行`main`文件。 - -## 执行推理并查看结果 - -登录Ascend 910环境,创建`model`目录放置MindIR文件`resnet50_imagenet.mindir`,例如`/home/HwHiAiUser/mindspore_sample/ascend910_resnet50_preprocess_sample/model`。 -创建`test_data`目录放置图片,例如`/home/HwHiAiUser/mindspore_sample/ascend910_resnet50_preprocess_sample/test_data`。 -就可以开始执行推理了: - -```bash -./resnet50_sample -``` - -执行后,会对`test_data`目录下放置的所有图片进行推理,比如放置了9张[ImageNet2012](http://image-net.org/download-images)验证集中label为0的图片,可以看到推理结果如下。 - -```text -Image: ./test_data/ILSVRC2012_val_00002138.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00003014.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00006697.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00007197.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009111.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009191.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009346.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009379.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00009396.JPEG infer result: 0 -``` diff --git a/tutorials/inference/source_zh_cn/multi_platform_inference_cpu.md b/tutorials/inference/source_zh_cn/multi_platform_inference_cpu.md deleted file mode 100644 index ef7a6a82521a8e5610a202849b881f8d02e939ea..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/multi_platform_inference_cpu.md +++ /dev/null @@ -1,25 +0,0 @@ -# CPU上推理 - -`Linux` `CPU` `推理应用` `初级` `中级` `高级` - - - -- [CPU上推理](#cpu上推理) - - [使用checkpoint格式文件推理](#使用checkpoint格式文件推理) - - [使用ONNX格式文件推理](#使用onnx格式文件推理) - - - - - -## 使用checkpoint格式文件推理 - -与在Ascend 910 AI处理器上推理一样。 - -## 使用ONNX格式文件推理 - -与在GPU上进行推理类似,需要以下几个步骤: - -1. 在训练平台上生成ONNX格式模型,具体步骤请参考[导出ONNX格式文件](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html#onnx)。 - -2. 在CPU上进行推理,具体可以参考推理使用runtime/SDK的文档。如使用ONNX Runtime,可以参考[ONNX Runtime说明文档](https://github.com/microsoft/onnxruntime)。 diff --git a/tutorials/inference/source_zh_cn/multi_platform_inference_gpu.md b/tutorials/inference/source_zh_cn/multi_platform_inference_gpu.md deleted file mode 100644 index f88a523328f23fe842d6aa13993a94e023e00eec..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/multi_platform_inference_gpu.md +++ /dev/null @@ -1,23 +0,0 @@ -# GPU上推理 - -`Linux` `GPU` `推理应用` `初级` `中级` `高级` - - - -- [GPU上推理](#gpu上推理) - - [使用checkpoint格式文件推理](#使用checkpoint格式文件推理) - - [使用ONNX格式文件推理](#使用onnx格式文件推理) - - - - - -## 使用checkpoint格式文件推理 - -与在Ascend 910 AI处理器上推理一样。 - -## 使用ONNX格式文件推理 - -1. 在训练平台上生成ONNX格式模型,具体步骤请参考[导出ONNX格式文件](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html#onnx)。 - -2. 在GPU上进行推理,具体可以参考推理使用runtime/SDK的文档。如在Nvidia GPU上进行推理,使用常用的TensorRT,可参考[TensorRT backend for ONNX](https://github.com/onnx/onnx-tensorrt)。 diff --git a/tutorials/inference/source_zh_cn/nlp_tprr.md b/tutorials/inference/source_zh_cn/nlp_tprr.md deleted file mode 100644 index 60eea2fefc187254a2918d0f1d0b0045ddf27930..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/nlp_tprr.md +++ /dev/null @@ -1,265 +0,0 @@ -# 多跳知识推理问答模型TPRR - -`Linux` `Ascend` `模型开发` `高级` - - - -- [多跳知识推理问答模型TPRR](#多跳知识推理问答模型TPRR) - - [概述](#概述) - - [准备环节](#准备环节) - - [安装软件依赖](#安装软件依赖) - - [准备数据](#准备数据) - - [加载数据](#加载数据) - - [定义网络](#定义网络) - - [设置模型参数](#设置模型参数) - - [定义模型](#定义模型) - - [推理网络](#推理网络) - - [运行脚本](#运行脚本) - - [引用](#引用) - - -   - -## 概述 - -TPRR(Thinking Path Re-Ranker)是由华为提出的基于开放域多跳问答的通用模型,用以实现多跳知识推理问答。传统问答中,模型只需要找到与原文中问题相关的句子就可以找到答案。多跳知识推理问答中的问题,需要多次“跳转”才能找到答案。具体来说,给定一个问题,模型需要通过知识从多个相关的文档中推理得到正确回答。TPRR模型分为三个模块:Retriever(检索器)、Reranker(重排器)、Reader(阅读器)。其中Retriever根据给定多跳问题,在百万wiki文档中筛选出包含答案的候选文档序列,Reranker从候选文档序列中筛选出最佳文档序列,最后Reader从最佳文档的多个句子中解析出答案,完成多跳知识推理问答。TPRR模型利用条件概率对完整的推理路径进行建模,并且在训练中引入“思考”的负样本选择策略,在国际权威的HotpotQA评测Fullwiki Setting中荣登榜首,并且在联合准确率、线索准确率等四项指标均达到第一。相比于传统的多跳问答模型,TPRR仅利用纯文本信息而不需要额外的实体抽取等技术,使用MindSpore混合精度特性对TPRR模型进行框架加速,结合Ascend,能获得显著的性能提升。 - -本篇教程将主要介绍如何在Ascend上,使用MindSpore构建并运行多跳知识推理问答模型TPRR。 -> 你可以在这里下载完整的示例代码: - 。 - -示例代码目录结构如下: - -```shell -. -└─tprr - ├─README.md - ├─scripts - | ├─run_eval_ascend.sh # Launch retriever evaluation in ascend - | └─run_eval_ascend_reranker_reader.sh # Launch re-ranker and reader evaluation in ascend - | - ├─src - | ├─build_reranker_data.py # build data for re-ranker from result of retriever - | ├─config.py # Evaluation configurations for retriever - | ├─hotpot_evaluate_v1.py # Hotpotqa evaluation script - | ├─onehop.py # Onehop model of retriever - | ├─onehop_bert.py # Onehop bert model of retriever - | ├─process_data.py # Data preprocessing for retriever - | ├─reader.py # Reader model - | ├─reader_albert_xxlarge.py # Albert-xxlarge module of reader model - | ├─reader_downstream.py # Downstream module of reader model - | ├─reader_eval.py # Reader evaluation script - | ├─rerank_albert_xxlarge.py # Albert-xxlarge module of re-ranker model - | ├─rerank_and_reader_data_generator.py # Data generator for re-ranker and reader - | ├─rerank_and_reader_utils.py # Utils for re-ranker and reader - | ├─rerank_downstream.py # Downstream module of re-ranker model - | ├─reranker.py # Re-ranker model - | ├─reranker_eval.py # Re-ranker evaluation script - | ├─twohop.py # Twohop model of retriever - | ├─twohop_bert.py # Twohop bert model of retriever - | └─utils.py # Utils for retriever - | - ├─retriever_eval.py # Evaluation net for retriever - └─reranker_and_reader_eval.py # Evaluation net for re-ranker and reader -``` - -整体执行流程如下: - -1. 准备HotpotQA Development数据集,加载处理数据; -2. 设置TPRR模型参数; -3. 初始化TPRR模型; -4. 加载数据集和模型CheckPoint并进行推理,查看结果及保存输出。 - -## 准备环节 - -### 安装软件依赖 - -1. 安装MindSpore - - 实践前,确保已经正确安装MindSpore。如果没有,可以通过[MindSpore安装页面](https://www.mindspore.cn/install)安装。 - -2. 安装transformers - - ```shell - pip install transformers - ``` - -### 准备数据 - -本教程使用的数据是预处理过的[en-Wikipedia](https://github.com/AkariAsai/learning_to_retrieve_reasoning_paths/tree/master/retriever)和[HotpotQA Development数据集](https://hotpotqa.github.io/)。请先下载[预处理数据](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/tprr/data.zip)。 - -## 加载数据 - -下载后的数据放到scripts目录下。Retriever模块加载wiki和HotpotQA预处理的数据文件,通过给定的多跳问题从文档数据中检索出相关文档,加载数据部分在源码的`src/process_data.py`脚本中。 - -```python -def load_data(self): - """load data""" - print('********************** loading data ********************** ') - # wiki data - f_wiki = open(self.wiki_path, 'rb') - # hotpotqa dev data - f_train = open(self.dev_path, 'rb') - # doc data - f_doc = open(self.dev_data_path, 'rb') - data_db = pkl.load(f_wiki, encoding="gbk") - dev_data = json.load(f_train) - q_doc_text = pkl.load(f_doc, encoding='gbk') - return data_db, dev_data, q_doc_text -``` - -Retriever检索得到的结果保存在scripts目录下,Reranker模块根据该结果,使用自定义的DataGenerator类加载wiki和HotpotQA预处理的数据文件,得到重排序结果,并将其保存在scripts目录下。Reader模块根据重排序结果,同样使用自定义的DataGenerator类加载wiki和HotpotQA预处理的数据文件,提取答案和证据。自定义的DataGenerator类在源码的`src/rerank_and_reader_data_generator.py`脚本中。 - -```python -class DataGenerator: - """data generator for reranker and reader""" - def __init__(self, feature_file_path, example_file_path, batch_size, seq_len, - para_limit=None, sent_limit=None, task_type=None): - """init function""" - self.example_ptr = 0 - self.bsz = batch_size - self.seq_length = seq_len - self.para_limit = para_limit - self.sent_limit = sent_limit - self.task_type = task_type - self.feature_file_path = feature_file_path - self.example_file_path = example_file_path - self.features = self.load_features() - self.examples = self.load_examples() - self.feature_dict = self.get_feature_dict() - self.example_dict = self.get_example_dict() - self.features = self.padding_feature(self.features, self.bsz) -``` - -## 定义网络 - -### 设置模型参数 - -模型参数中用户可以自定义设置topk及onehop_num等参数。topk表示Retriever排序后候选一跳文档个数,topk越大,候选文档越多,召回率提高但会引入更多噪声,准确率下降;onehop_num表示一跳候选文档作为二跳待选文档的数目,onehop_num越大,二跳待选文档越多,召回率提高但会引入更多噪声,准确率下降。 - -```python -def ThinkRetrieverConfig(): - """retriever config""" - parser = argparse.ArgumentParser() - parser.add_argument("--q_len", type=int, default=64, help="max query len") - parser.add_argument("--d_len", type=int, default=192, help="max doc len") - parser.add_argument("--s_len", type=int, default=448, help="max seq len") - parser.add_argument("--in_len", type=int, default=768, help="in len") - parser.add_argument("--out_len", type=int, default=1, help="out len") - parser.add_argument("--num_docs", type=int, default=500, help="docs num") - parser.add_argument("--topk", type=int, default=8, help="top num") - parser.add_argument("--onehop_num", type=int, default=8, help="onehop num") - parser.add_argument("--batch_size", type=int, default=1, help="batch size") - parser.add_argument("--device_num", type=int, default=8, help="device num") - parser.add_argument("--vocab_path", type=str, default='../vocab.txt', help="vocab path") - parser.add_argument("--wiki_path", type=str, default='../db_docs_bidirection_new.pkl', help="wiki path") - parser.add_argument("--dev_path", type=str, default='../hotpot_dev_fullwiki_v1_for_retriever.json', - help="dev path") - parser.add_argument("--dev_data_path", type=str, default='../dev_tf_idf_data_raw.pkl', help="dev data path") - parser.add_argument("--onehop_bert_path", type=str, default='../onehop.ckpt', help="onehop bert ckpt path") - parser.add_argument("--onehop_mlp_path", type=str, default='../onehop_mlp.ckpt', help="onehop mlp ckpt path") - parser.add_argument("--twohop_bert_path", type=str, default='../twohop.ckpt', help="twohop bert ckpt path") - parser.add_argument("--twohop_mlp_path", type=str, default='../twohop_mlp.ckpt', help="twohop mlp ckpt path") - parser.add_argument("--q_path", type=str, default='../queries', help="queries data path") - return parser.parse_args() -``` - -### 定义模型 - -定义Retriever模块并加载模型参数。 - -```python -def evaluation(): - model_onehop_bert = ModelOneHop() - param_dict = load_checkpoint(config.onehop_bert_path) - load_param_into_net(model_onehop_bert, param_dict) - model_twohop_bert = ModelTwoHop() - param_dict2 = load_checkpoint(config.twohop_bert_path) - load_param_into_net(model_twohop_bert, param_dict2) - onehop = OneHopBert(config, model_onehop_bert) - twohop = TwoHopBert(config, model_twohop_bert) -``` - -定义Reranker模块并加载模型参数。 - -```python - reranker = Reranker(batch_size=batch_size, - encoder_ck_file=encoder_ck_file, - downstream_ck_file=downstream_ck_file) -``` - -定义Reader模块并加载模型参数。 - -```python - reader = Reader(batch_size=batch_size, - encoder_ck_file=encoder_ck_file, - downstream_ck_file=downstream_ck_file) -``` - -## 推理网络 - -### 运行脚本 - -调用scripts目录下的shell脚本,启动推理进程。使用以下命令运行脚本: - -```shell -sh run_eval_ascend.sh -sh run_eval_ascend_reranker_reader.sh -``` - -推理完成后,结果保存到scripts/eval/目录下的log文件中,可以在对应log文件中查看测评结果。 - -Retriever模块测评结果:其中val表示找对答案文档的问题数目,count表示问题总数目,PEM表示问题相关文档排序后top-8文档的精确匹配的准确率。 - -```python -# match query num -val:6959 -# query num -count:7404 -# one hop match query num -true count: 7112 -# top8 paragraph exact match -PEM: 0.9398973527822798 -# top8 paragraph exact match in recall -true top8 PEM: 0.9784870641169854 -# evaluation time -evaluation time (h): 1.819070938428243 -``` - -Reranker和Reader模块测评结果,其中total_top1_pem表示重排序之后top-1路径的精确匹配的准确率,joint_em表示预测的答案和证据的精确匹配的联合准确率,joint_f1表示预测的答案和证据的联合f1 score。 - -```python -# top8 paragraph exact match -total top1 pem: 0.8803511141120864 -... - -# answer exact match -em: 0.67440918298447 -# answer f1 -f1: 0.8025625656569652 -# answer precision -prec: 0.8292800393689271 -# answer recall -recall: 0.8136908451841731 -# supporting facts exact match -sp_em: 0.6009453072248481 -# supporting facts f1 -sp_f1: 0.844555664157302 -# supporting facts precision -sp_prec: 0.8640844345841021 -# supporting facts recall -sp_recall: 0.8446123918845106 -# joint exact match -joint_em: 0.4537474679270763 -# joint f1 -joint_f1: 0.715119580346802 -# joint precision -joint_prec: 0.7540052057184267 -# joint recall -joint_recall: 0.7250240424067661 -``` - -## 引用 - -1. Yang Z , Qi P , Zhang S , et al. HotpotQA: A Dataset for Diverse, Explainable Multi-hop Question Answering[C]// Proceedings of the 2018 Conference on Empirical Methods in Natural Language Processing. 2018. -2. Asai A , Hashimoto K , Hajishirzi H , et al. Learning to Retrieve Reasoning Paths over Wikipedia Graph for Question Answering[J]. 2019. diff --git a/tutorials/inference/source_zh_cn/serving_distributed_example.md b/tutorials/inference/source_zh_cn/serving_distributed_example.md deleted file mode 100644 index f5db9de14f60427b6ee753c0ee7c4e3ce0c5ec32..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/serving_distributed_example.md +++ /dev/null @@ -1,267 +0,0 @@ -# 基于MindSpore Serving部署分布式推理服务 - -`Linux` `Ascend` `Serving` `中级` `高级` - - - -- [基于Mindspore Serving部署分布式推理服务](#基于mindspore-serving部署分布式推理服务) - - [概述](#概述) - - [环境准备](#环境准备) - - [导出分布式模型](#导出分布式模型) - - [部署分布式推理服务](#部署分布式推理服务) - - [使用限制](#使用限制) - - [启动Master与分布式Worker](#启动master与分布式worker) - - [启动Agent](#启动agent) - - [执行推理](#执行推理) - - - - - -## 概述 - -分布式推理是指推理阶段采用多卡进行推理,针对超大规模神经网络模型参数个数过多、模型无法完全加载至单卡中进行推理的问题,可利用多卡进行分布式推理。本文介绍部署分布式推理服务的流程,与[单卡推理服务](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_example.html)部署流程大致相同,可以相互参考。 - -分布式推理服务的架构如图所示: - -![image](images/distributed_servable.png) - -master提供客户端访问的接口,管理分布式worker并进行任务管理与分发;分布式worker根据模型配置自动调度agent完成分布式推理;每一个agent包含一个分布式模型的切片,占用一个device,加载模型执行推理。 - -上图展示了rank_size为16,stage_size为2的场景,每个stage包含8个agent,占用8个device。rank_size表示推理使用的device的个数,stage表示流水线的一段,stage_size表示流水线的段数。分布式worker向agent发送推理请求并从agent获取推理结果。agent之间使用HCCL通信。 - -当前对分布式模型有以下限制: - -- 第一个stage的模型接收相同的输入数据。 -- 其他的stage的模型不接收数据。 -- 最后一个stage的所有模型都返回相同的数据。 -- 仅支持Ascend 910推理。 - -下面以一个简单的分布式网络MatMul为例,演示部署流程。 - -### 环境准备 - -运行示例前,需确保已经正确安装了MindSpore Serving。如果没有,可以参考[MindSpore Serving安装页面](https://gitee.com/mindspore/serving/blob/master/README_CN.md#%E5%AE%89%E8%A3%85),将MindSpore Serving正确地安装到你的电脑当中,同时参考[MindSpore Serving环境配置页面](https://gitee.com/mindspore/serving/blob/master/README_CN.md#%E9%85%8D%E7%BD%AE%E7%8E%AF%E5%A2%83%E5%8F%98%E9%87%8F)完成环境变量配置。 - -### 导出分布式模型 - -导出分布式模型需要的文件可以参考[export_model目录](https://gitee.com/mindspore/serving/tree/master/example/matmul_distributed/export_model),需要如下文件列表: - -```text -export_model -├── distributed_inference.py -├── export_model.sh -├── net.py -└── rank_table_8pcs.json -``` - -- `net.py`为MatMul网络定义。 -- `distributed_inference.py`配置分布式相关的参数。 -- `export_model.sh`在当前机器上创建`device`目录并且导出每个`device`对应的模型文件。 -- `rank_table_8pcs.json`为配置当前多卡环境的组网信息的json文件,可以参考[rank_table](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html#id4)。 - -使用[net.py](https://gitee.com/mindspore/serving/blob/master/example/matmul_distributed/export_model/net.py),构造一个包含MatMul、Neg算子的网络。 - -```python -import numpy as np -from mindspore import Tensor, Parameter, ops -from mindspore.nn import Cell - - -class Net(Cell): - def __init__(self, matmul_size, transpose_a=False, transpose_b=False, strategy=None): - super().__init__() - matmul_np = np.full(matmul_size, 0.5, dtype=np.float32) - self.matmul_weight = Parameter(Tensor(matmul_np)) - self.matmul = ops.MatMul(transpose_a=transpose_a, transpose_b=transpose_b) - self.neg = ops.Neg() - if strategy is not None: - self.matmul.shard(strategy) - - def construct(self, inputs): - x = self.matmul(inputs, self.matmul_weight) - x = self.neg(x) - return x -``` - -使用[distributed_inference.py](https://gitee.com/mindspore/serving/blob/master/example/matmul_distributed/export_model/distributed_inference.py), 配置分布式模型。可以参考[分布式推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_ascend_910.html#id1)。 - -```python -import numpy as np -from net import Net -from mindspore import context, Model, Tensor, export -from mindspore.communication import init - - -def test_inference(): - """distributed inference after distributed training""" - context.set_context(mode=context.GRAPH_MODE) - init(backend_name="hccl") - context.set_auto_parallel_context(full_batch=True, parallel_mode="semi_auto_parallel", - device_num=8, group_ckpt_save_file="./group_config.pb") - - predict_data = create_predict_data() - network = Net(matmul_size=(96, 16)) - model = Model(network) - model.infer_predict_layout(Tensor(predict_data)) - export(model._predict_network, Tensor(predict_data), file_name="matmul", file_format="MINDIR") - - -def create_predict_data(): - """user-defined predict data""" - inputs_np = np.random.randn(128, 96).astype(np.float32) - return Tensor(inputs_np) -``` - -使用[export_model.sh](https://gitee.com/mindspore/serving/blob/master/example/matmul_distributed/export_model/export_model.sh),导出分布式模型。执行成功后会在上一级目录创建`model`目录,结构如下: - -```text -model -├── device0 -│   ├── group_config.pb -│   └── matmul.mindir -├── device1 -├── device2 -├── device3 -├── device4 -├── device5 -├── device6 -└── device7 -``` - -每个`device`目录都包含两个文件`group_config.pb`和`matmul.mindir`,分别表示模型分组配置文件与模型文件。 - -### 部署分布式推理服务 - -启动分布式推理服务,可以参考[matmul_distributed](https://gitee.com/mindspore/serving/tree/master/example/matmul_distributed),需要如下文件列表: - -```text -matmul_distributed -├── agent.py -├── master_with_worker.py -├── matmul -│   └── servable_config.py -├── model -└── rank_table_8pcs.json -``` - -- `model`为存放模型文件的目录。 -- `master_with_worker.py`为启动服务脚本。 -- `agent.py`为启动agent脚本。 -- `servable_config.py`为[模型配置文件](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_model.html),通过`declare_distributed_servable`声明了一个rank_size为8、stage_size为1的分布式模型,同时定义了一个分布式servable的方法`predict`。 - -模型配置文件内容如下: - -```python -from mindspore_serving.worker import distributed -from mindspore_serving.worker import register - -distributed.declare_distributed_servable(rank_size=8, stage_size=1, with_batch_dim=False) - - -@register.register_method(output_names=["y"]) -def predict(x): - y = register.call_servable(x) - return y -``` - -#### 启动master与分布式worker - -使用[master_with_worker.py](https://gitee.com/mindspore/serving/blob/master/example/matmul_distributed/master_with_worker.py),调用`start_distributed_servable_in_master`方法部署共进程的master和分布式worker。 - -```python -import os -import sys -from mindspore_serving import master -from mindspore_serving.worker import distributed - - -def start(): - servable_dir = os.path.dirname(os.path.realpath(sys.argv[0])) - distributed.start_distributed_servable_in_master(servable_dir, "matmul", - rank_table_json_file="rank_table_8pcs.json", - version_number=1, - worker_ip="127.0.0.1", worker_port=6200, - wait_agents_time_in_seconds=0) - master.start_grpc_server("127.0.0.1", 5500) - master.start_restful_server("127.0.0.1", 1500) - - -if __name__ == "__main__": - start() -``` - -- `servable_dir`为servable存放的目录。 -- `servable_name`为servable的名称,对应一个存放模型配置文件的目录。 -- `rank_table_json_file`为配置当前多卡环境的组网信息的json文件。 -- `worker_ip` 为分布式worker的ip地址。 -- `worker_port`为分布式worker的port。 -- `wait_agents_time_in_seconds`设置等待所有agent注册完成的时限,默认为0表示会一直等待。 - -#### 启动Agent - -使用[agent.py](https://gitee.com/mindspore/serving/blob/master/example/matmul_distributed/agent.py),调用`startup_worker_agents`方法会在当前机器上启动的8个agent进程。agent会从分布式worker获取rank_table,这样agent之间才能利用HCCL进行通信。 - -```python -from mindspore_serving.worker import distributed - - -def start_agents(): - """Start all the worker agents in current machine""" - model_files = [] - group_configs = [] - for i in range(8): - model_files.append(f"model/device{i}/matmul.mindir") - group_configs.append(f"model/device{i}/group_config.pb") - - distributed.startup_worker_agents(worker_ip="127.0.0.1", worker_port=6200, model_files=model_files, - group_config_files=group_configs, agent_start_port=7000, agent_ip=None, - rank_start=None) - - -if __name__ == '__main__': - start_agents() -``` - -- `worker_ip`为分布式worker的ip地址。 -- `worker_port`为分布式worker的port。 -- `model_files`为模型文件路径的列表。 -- `group_config_files`为模型分组配置文件路径的列表。 -- `agent_start_port`表示agent占用的起始端口,默认为7000。 -- `agent_ip`为agent的ip地址,默认为None。agent与分布式worker通信的ip默认会从rank_table获取,如果该ip地址不可用,则需要同时设置`agent_ip`与`rank_start`。 -- `rank_start`为当前机器起始的rank_id,默认为None。 - -### 执行推理 - -通过gRPC访问推理服务,client需要指定gRPC服务器的ip地址和port。运行[client.py](https://gitee.com/mindspore/serving/blob/master/example/matmul_distributed/client.py),调用matmul分布式模型的`predict`方法,执行推理。 - -```python -import numpy as np -from mindspore_serving.client import Client - - -def run_matmul(): - """Run client of distributed matmul""" - client = Client("localhost", 5500, "matmul", "predict") - instance = {"x": np.ones((128, 96), np.float32)} - result = client.infer(instance) - print("result:\n", result) - - -if __name__ == '__main__': - run_matmul() -``` - -执行后显示如下返回值,说明Serving分布式推理服务已正确执行MatMul网络的推理: - -```text -result: -[{'y': array([[-48., -48., -48., ..., -48., -48., -48.], - [-48., -48., -48., ..., -48., -48., -48.], - [-48., -48., -48., ..., -48., -48., -48.], - ..., - [-48., -48., -48., ..., -48., -48., -48.], - [-48., -48., -48., ..., -48., -48., -48.], - [-48., -48., -48., ..., -48., -48., -48.]], dtype=float32)}] -``` - diff --git a/tutorials/inference/source_zh_cn/serving_example.md b/tutorials/inference/source_zh_cn/serving_example.md deleted file mode 100644 index 428ad0b8aad5114ce3c429df9000868cb789c9a4..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/serving_example.md +++ /dev/null @@ -1,261 +0,0 @@ -# 基于MindSpore Serving部署推理服务 - -`Linux` `Ascend` `GPU` `Serving` `初级` `中级` `高级` - - - -- [基于MindSpore Serving部署推理服务](#基于mindspore-serving部署推理服务) - - [概述](#概述) - - [环境准备](#环境准备) - - [导出模型](#导出模型) - - [部署Serving推理服务](#部署serving推理服务) - - [轻量级部署](#轻量级部署) - - [集群部署](#集群部署) - - [执行推理](#执行推理) - - - - - -## 概述 - -MindSpore Serving是一个轻量级、高性能的服务模块,旨在帮助MindSpore开发者在生产环境中高效部署在线推理服务。当用户使用MindSpore完成模型训练后,导出MindSpore模型,即可使用MindSpore Serving创建该模型的推理服务。 - -本文以一个简单的Add网络为例,演示MindSpore Serving如何使用。 - -### 环境准备 - -运行示例前,需确保已经正确安装了MindSpore Serving。如果没有,可以通过[MindSpore Serving安装页面](https://gitee.com/mindspore/serving/blob/master/README_CN.md#%E5%AE%89%E8%A3%85),将MindSpore Serving正确地安装到你的电脑当中,同时通过[MindSpore Serving环境配置页面](https://gitee.com/mindspore/serving/blob/master/README_CN.md#%E9%85%8D%E7%BD%AE%E7%8E%AF%E5%A2%83%E5%8F%98%E9%87%8F)完成环境变量配置。 - -### 导出模型 - -使用[add_model.py](https://gitee.com/mindspore/serving/blob/master/example/add/export_model/add_model.py),构造一个只有Add算子的网络,并导出MindSpore推理部署模型。 - -```python -import os -from shutil import copyfile -import numpy as np - -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -import mindspore as ms - -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") - - -class Net(nn.Cell): - """Define Net of add""" - - def __init__(self): - super(Net, self).__init__() - self.add = ops.Add() - - def construct(self, x_, y_): - """construct add net""" - return self.add(x_, y_) - - -def export_net(): - """Export add net of 2x2 + 2x2, and copy output model `tensor_add.mindir` to directory ../add/1""" - x = np.ones([2, 2]).astype(np.float32) - y = np.ones([2, 2]).astype(np.float32) - add = Net() - output = add(ms.Tensor(x), ms.Tensor(y)) - ms.export(add, ms.Tensor(x), ms.Tensor(y), file_name='tensor_add', file_format='MINDIR') - dst_dir = '../add/1' - try: - os.mkdir(dst_dir) - except OSError: - pass - - dst_file = os.path.join(dst_dir, 'tensor_add.mindir') - copyfile('tensor_add.mindir', dst_file) - print("copy tensor_add.mindir to " + dst_dir + " success") - - print(x) - print(y) - print(output.asnumpy()) - - -if __name__ == "__main__": - export_net() -``` - -使用MindSpore定义神经网络需要继承`mindspore.nn.Cell`。Cell是所有神经网络的基类。神经网络的各层需要预先在`__init__`方法中定义,然后通过定义`construct`方法来完成神经网络的前向构造。使用`mindspore`模块的`export`即可导出模型文件。 -更为详细完整的示例可以参考[实现一个图片分类应用](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html)。 - -执行`add_model.py`脚本,生成`tensor_add.mindir`文件,该模型的输入为两个shape为[2,2]的二维Tensor,输出结果是两个输入Tensor之和。 - -### 部署Serving推理服务 - -启动Serving服务,以Add用例为例,需要如下文件列表: - -```shell -test_dir -├── add/ -│   └── servable_config.py -│  └── 1/ -│   └── tensor_add.mindir -└── master_with_worker.py -``` - -- `master_with_worker.py`为启动服务脚本文件。 -- `add`为模型文件夹,文件夹名即为模型名。 -- `tensor_add.mindir`为上一步网络生成的模型文件,放置在文件夹1下,1为版本号,不同的版本放置在不同的文件夹下,版本号需以纯数字串命名,默认配置下启动最大数值的版本号的模型文件。 -- [servable_config.py](https://gitee.com/mindspore/serving/blob/master/example/add/add/servable_config.py)为[模型配置文件](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_model.html),其定义了模型的处理函数,包括`add_common`和`add_cast`两个方法,`add_common`定义了输入为两个普通float32类型的加法操作,`add_cast`定义输入类型为其他类型,经过输入类型转换float32后的加法操作。 - -模型配置文件内容如下: - -```python -import numpy as np -from mindspore_serving.worker import register - - -def add_trans_datatype(x1, x2): - """define preprocess, this example has two input and two output""" - return x1.astype(np.float32), x2.astype(np.float32) - - -# when with_batch_dim is set to False, only 2x2 add is supported -# when with_batch_dim is set to True(default), Nx2 add is supported, while N is viewed as batch -# float32 inputs/outputs -register.declare_servable(servable_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False) - - -# register add_common method in add -@register.register_method(output_names=["y"]) -def add_common(x1, x2): # only support float32 inputs - """method add_common data flow definition, only call model inference""" - y = register.call_servable(x1, x2) - return y - - -# register add_cast method in add -@register.register_method(output_names=["y"]) -def add_cast(x1, x2): - """method add_cast data flow definition, only call preprocess and model inference""" - x1, x2 = register.call_preprocess(add_trans_datatype, x1, x2) # cast input to float32 - y = register.call_servable(x1, x2) - return y -``` - -MindSpore Serving提供两种部署方式,轻量级部署和集群部署。轻量级部署master和worker在一个进程中,集群部署方式master和worker部署在不同的进程中。当只有一个worker节点时,用户可以考虑轻量级部署,即将master部署在这个worker所在进程中;当worker节点有多个,为了充分利用资源,可以考虑集群部署方式,选取一台机器作为master,管理所有的worker节点。用户可根据需要进行选择部署。 - -#### 轻量级部署 - -服务端调用Python接口直接启动推理进程(master和worker共进程),客户端直接连接推理服务后下发推理任务。 -执行[master_with_worker.py](https://gitee.com/mindspore/serving/blob/master/example/add/master_with_worker.py),完成轻量级部署服务如下: - -```python -import os -from mindspore_serving import master -from mindspore_serving import worker - -def start(): - servable_dir = os.path.abspath(".") - worker.start_servable_in_master(servable_dir, "add", device_id=0) - master.start_grpc_server("127.0.0.1", 5500) - -if __name__ == "__main__": - start() -``` - -当服务端打印日志`Serving gRPC start success, listening on 0.0.0.0:5500`时,表示Serving服务已加载推理模型完毕。 - -#### 集群部署 - -服务端由master进程和worker进程组成,master用来管理集群内所有的worker节点,并进行推理任务的分发。部署方式如下: - -部署master: - -```python -import os -from mindspore_serving import master - -def start(): - servable_dir = os.path.abspath(".") - master.start_grpc_server("127.0.0.1", 5500) - master.start_master_server("127.0.0.1", 6500) -if __name__ == "__main__": - start() -``` - -部署worker: - -```python -import os -from mindspore_serving import worker - -def start(): - servable_dir = os.path.abspath(".") - worker.start_servable(servable_dir, "add", device_id=0, - master_ip="127.0.0.1", master_port=6500, - worker_ip="127.0.0.1", worker_port=6600) - -if __name__ == "__main__": - start() -``` - -轻量级部署和集群部署启动worker所使用的接口存在差异,其中,轻量级部署使用`start_servable_in_master`接口启动worker,集群部署使用`start_servable`接口启动worker。 - -### 执行推理 - -客户端提供两种方式访问推理服务,一种是通过[gRPC方式](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_grpc.html),一种是通过[RESTful方式](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_restful.html),本文以gRPC方式为例。 -使用[client.py](https://gitee.com/mindspore/serving/blob/master/example/add/client.py),启动Python客户端。 - -```python -import numpy as np -from mindspore_serving.client import Client - - -def run_add_common(): - """invoke servable add method add_common""" - client = Client("localhost", 5500, "add", "add_common") - instances = [] - - # instance 1 - x1 = np.asarray([[1, 1], [1, 1]]).astype(np.float32) - x2 = np.asarray([[1, 1], [1, 1]]).astype(np.float32) - instances.append({"x1": x1, "x2": x2}) - - # instance 2 - x1 = np.asarray([[2, 2], [2, 2]]).astype(np.float32) - x2 = np.asarray([[2, 2], [2, 2]]).astype(np.float32) - instances.append({"x1": x1, "x2": x2}) - - # instance 3 - x1 = np.asarray([[3, 3], [3, 3]]).astype(np.float32) - x2 = np.asarray([[3, 3], [3, 3]]).astype(np.float32) - instances.append({"x1": x1, "x2": x2}) - - result = client.infer(instances) - print(result) - - -def run_add_cast(): - """invoke servable add method add_cast""" - client = Client("localhost", 5500, "add", "add_cast") - instances = [] - x1 = np.ones((2, 2), np.int32) - x2 = np.ones((2, 2), np.int32) - instances.append({"x1": x1, "x2": x2}) - result = client.infer(instances) - print(result) - - -if __name__ == '__main__': - run_add_common() - run_add_cast() -``` - -使用`mindspore_serving.client`定义的`Client`类,客户端定义两个用例,分别调用模型的两个方法,`run_add_common`用例为三对float32类型数组相加操作,`run_add_cast`用例计算两个int32数组相加操作。执行后显示如下返回值,三对float32类型相加结果合集和一对int32类型的相加结果,说明Serving服务已正确执行Add网络的推理。 - -```shell -[{'y': array([[2. , 2.], - [2., 2.]], dtype=float32)},{'y': array([[4. , 4.], - [4., 4.]], dtype=float32)},{'y': array([[6. , 6.], - [6., 6.]], dtype=float32)}] -[{'y': array([[2. , 2.], - [2., 2.]], dtype=float32)}] -``` diff --git a/tutorials/inference/source_zh_cn/serving_grpc.md b/tutorials/inference/source_zh_cn/serving_grpc.md deleted file mode 100644 index 1ea28a23be2c4829305dd130b1cf53ef717c60b3..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/serving_grpc.md +++ /dev/null @@ -1,158 +0,0 @@ -# 基于gRPC接口访问MindSpore Serving服务 - -`Linux` `Ascend` `GPU` `Serving` `初级` `中级` `高级` - - - -- [基于gRPC接口访问MindSpore Serving服务](#基于grpc接口访问mindspore-serving服务) - - [概述](#概述) - - [add样例](#add样例) - - [ResNet-50样例](#resnet50样例) - - - - - -## 概述 - -MindSpore Serving提供gRPC接口访问Serving服务。在Python环境下,我们提供[mindspore_serving.client](https://gitee.com/mindspore/serving/blob/master/mindspore_serving/client/python/client.py) 模块用于填写请求、解析回复。gRPC服务端(worker节点)当前仅支持Ascend平台,客户端运行不依赖特定硬件环境。接下来我们通过`add`和`ResNet-50`样例来详细说明gRPC Python客户端接口的使用。 - -## add样例 - -样例来源于[add example](https://gitee.com/mindspore/serving/blob/master/example/add/client.py) ,`add` Servable提供的`add_common`方法提供两个2x2 Tensor相加功能。其中gRPC Python客户端代码如下所示,一次gRPC请求包括了三对独立的2x2 Tensor: - -```python -from mindspore_serving.client import Client -import numpy as np - - -def run_add_common(): - """invoke Servable add method add_common""" - client = Client("localhost", 5500, "add", "add_common") - instances = [] - - # instance 1 - x1 = np.asarray([[1, 1], [1, 1]]).astype(np.float32) - x2 = np.asarray([[1, 1], [1, 1]]).astype(np.float32) - instances.append({"x1": x1, "x2": x2}) - - # instance 2 - x1 = np.asarray([[2, 2], [2, 2]]).astype(np.float32) - x2 = np.asarray([[2, 2], [2, 2]]).astype(np.float32) - instances.append({"x1": x1, "x2": x2}) - - # instance 3 - x1 = np.asarray([[3, 3], [3, 3]]).astype(np.float32) - x2 = np.asarray([[3, 3], [3, 3]]).astype(np.float32) - instances.append({"x1": x1, "x2": x2}) - - result = client.infer(instances) - print(result) - - -if __name__ == '__main__': - run_add_common() -``` - -按照[入门流程](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_example.html) 导出模型、启动Serving服务器,并执行上述客户端代码。当运行正常后,将打印以下结果,为了展示方便,格式作了调整: - -```python -[{'y': array([[2., 2.], [2., 2.]], dtype=float32)}, - {'y': array([[4., 4.], [4., 4.]], dtype=float32)}, - {'y': array([[6., 6.], [6., 6.]], dtype=float32)}] -``` - -以下将对其中的细节进行说明。 - -1. 构造`Client`。 - - 构造`Client`时,指示Serving的ip和端口号,并给定Servable名称和它提供的方法。这里的Servable可以是单个模型,也可以是多个模型的组合,多个模型组合提供Servable当前尚未支持,一个Servable可以通过提供多种方法来提供不同的服务。 - - 上面的`add`样例, Serving运行在本地(`localhost`),指定的gRPC端口号为`5500`,运行了`add` Servable,`add` Servable提供了`add_common`方法。 - -2. 添加实例。 - - 每次gRPC请求可包括一个或多个实例,每个实例之间相互独立,结果互不影响。 - - 比如:`add` Servable提供的`add_common`方法提供两个2x2 Tensor相加功能,即一个实例包含两个2x2 Tensor输入,一个2x2 Tensor输出。一次请求可包括一个、两个或者多个这样的实例,针对每个实例返回一个结果。上述`add`样例提供了三个实例,预期将返回三个实例的结果。 - - `Client.infer`接口入参可为一个或多个实例输入组成的list、tuple或者单个实例输入。每个实例输入由输入的名称和输入的值组成python字典,值可以是以下格式: - - - `numpy array`:用以表示Tensor。例如:np.ones((3,224), np.float32)。 - - `numpy number`:用以表示Scalar。例如:np.int8(5)。 - - `python bool int float`:用以表示Scalar, 当前int将作为int64, float将作为float64。例如:32.0。 - - `python str`:用以表示字符串。例如:"this is a text"。 - - `python bytes`:用以表示二进制数据。例如:图片数据。 - - 上面的add样例,`add` Servable提供的`add_common`方法入参名为`x1`和`x2`,添加每个实例时指定每个输入的值。 - -3. 获取推理结果。 - - 通过`Client.infer`填入一个或多个实例。 - 返回可能有以下形式: - - - 所有实例推理正确: - - ```shell - [{'y': array([[2., 2.], [2., 2.]], dtype=float32)}, - {'y': array([[4., 4.], [4., 4.]], dtype=float32)}, - {'y': array([[6., 6.], [6., 6.]], dtype=float32)}] - ``` - - - 针对所有实例共同的错误,返回一个包含`error`的dict。将例子中Client构造时填入的`add_common`改为`add_common2`,将返回结果: - - ```shell - {'error', 'Request Servable(add) method(add_common2), method is not available'} - ``` - - - 部分实例推理错误,出错的推理实例将返回包含`error`的dict。将instance2一个输入的`dtype`改为`np.int32`,将返回结果: - - ```shell - [{'y': array([[2., 2.], [2., 2.]], dtype=float32)}, - {'error': 'Given model input 1 data type kMSI_Int32 not match ...'}, - {'y': array([[6., 6.], [6., 6.]], dtype=float32)}] - ``` - - 每个实例返回一个dict,key的值来自于Servable的方法定义,例如本例子中,`add` Servable提供的`add_common`方法输出仅有一个,为`y`。value为以下格式: - - | Serving输出类型 | Client返回类型 | 说明 | 举例 | - | ---- | ---- | ---- | ---- | - | Tensor | numpy array | tensor array | np.ones((3,224), np.float32) | - | Scalar:
int8, int16, int32, int64,
uint8, uint16, uint32, uint64,
bool, float16, float32, float64 | numpy scalar | Scalar格式的数据转为numpy scalar | np.int8(5) | - | String | python str | 字符串格式输出转为python str | "news_car" | - | Bytes | python bytes | 二进制格式输出转为python bytes | 图片数据 | - -## ResNet-50样例 - -样例来源于[ResNet-50 example](https://gitee.com/mindspore/serving/blob/master/example/resnet/client.py),`ResNet-50` Servable提供的`classify_top1`方法提供对图像进行识别的服务。`classify_top1`方法输入为图像数据,输出为字符串,方法中预处理对图像进行解码、Resize等操作,接着进行推理,并通过后处理返回得分最大的分类标签。 - -```python -import os -from mindspore_serving.client import Client - - -def run_classify_top1(): - client = Client("localhost", 5500, "resnet50", "classify_top1") - instances = [] - for path, _, file_list in os.walk("./test_image/"): - for file_name in file_list: - image_file = os.path.join(path, file_name) - print(image_file) - with open(image_file, "rb") as fp: - instances.append({"image": fp.read()}) - result = client.infer(instances) - print(result) - - -if __name__ == '__main__': - run_classify_top1() -``` - -`ResNet-50` Servable提供的`classify_top1`方法需要用户提供输入`image`,上面例子中,每个实例的输入`image`为图像的二进制数据。 -正常结束执行后,预期将会有以下打印: - -```shell -[{'label': 'tabby, tabby cat'}, {'label': 'ox'}] -``` - -如果Resnet50模型未训练,可能有其他未知分类结果。 diff --git a/tutorials/inference/source_zh_cn/serving_model.md b/tutorials/inference/source_zh_cn/serving_model.md deleted file mode 100644 index 1db3cb93e641c6ad28faab43069580209a1dab2e..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/serving_model.md +++ /dev/null @@ -1,249 +0,0 @@ -# 通过配置模型提供Servable - -`Linux` `Ascend` `GPU` `Serving` `初级` `中级` `高级` - - - -- [通过配置模型提供Servable](#通过配置模型提供servable) - - [概述](#概述) - - [相关概念](#相关概念) - - [预处理和后处理](#预处理和后处理) - - [方法](#方法) - - [实例](#实例) - - [模型配置](#模型配置) - - [预处理和后处理定义](#预处理和后处理定义) - - [模型声明](#模型声明) - - [方法定义](#方法定义) - - - - - -## 概述 - -MindSpore Serving当前仅支持Ascend 310和Ascend 910环境。 - -MindSpore Serving的Servable提供推理服务,包含两种类型。一种是推理服务来源于单模型,一种是推理服务来源于多模型组合,多模型组合正在开发中。模型需要进行配置以提供Serving推理服务。 - -本文将说明如何对单模型进行配置以提供Servable,以下所有Servable配置说明针对的是单模型Servable,Serving客户端简称客户端。 - -本文以ResNet-50作为样例介绍如何配置模型提供Servable。样例代码可参考[ResNet-50样例](https://gitee.com/mindspore/serving/tree/master/example/resnet/) 。 - -## 相关概念 - -### 预处理和后处理 - -模型提供推理能力,模型的每个输入和输出的数据类型、数据长度、Shape是固定的。 - -如果客户端发来的数据不能直接满足模型输入要求,需要通过预处理转化为满足模型输入的数据。 -如果模型的输出不直接提供给客户端,需要通过后处理转化为所需的输出数据。 - -以下图是`resnet50` Servable数据流程图,描述了图像数据从Serving客户端通过网络传输到Serving,Serving进行预处理、推理和后处理,最后向Serving客户端返回结果: - -![image](images/resnet_example.png) - -针对Resnet50推理模型,客户端发来的数据为jpg、png等格式的图片,预期返回图片的分类。Resnet模型输入为经过图片`Decode`、`Resize`、`Normalize`等操作产生的Tensor,输出为每个类别的得分Tensor。需要通过预处理将图片转化为满足模型输入的Tensor,通过后处理返回**得分最大的类别名称**或者**前5类别名称及其得分**。 - -在不同的场景下,如果来自客户端的数据输入组成、结构或类型不同,可以提供不同的预处理。如果对模型的输出也有不同的要求,可以提供不同的后处理。比如上述`resnet50` Servable,针对返回**得分最大的类别名称**还是**前5类别名称及其得分**这两种场景提供了两个后处理。 - -### 方法 - -上述的`resnet` Servable提供了`classify_top5`和`classify_top1`两个方法(`Method`)。`classify_top5`输入为`image`,输出为`label`和`score`,返回前5的分类名称和得分。`classify_top1`预处理和`classify_top5`一致,而后处理不同,输入为`image`,输出为`label`,返回最大得分的分类名称。 - -一个Servable可提供一个或多个方法,Servable的名称和方法的名称标记了Serving提供的一个服务,每个方法对客户端提供的数据进行可选的预处理,接着进行模型推理,对模型的推理结果进行可选的后处理,最后将需要的结果返回给客户端。 - -Servable包含如下内容: - -- 指定可选的预处理和可选的后处理; -- 定义方法输入、预处理、模型、后处理、方法输出之间的数据流,前者可作为后者的输入。比如方法输出的值可来源于方法输入、预处理、模型或后处理; -- 指定方法名,使客户端可以通过方法名指定使用的方法; -- 指定方法的输入和输出名称,使客户端可以通过名称来指定输入、获取输出。 - -### 实例 - -每次请求可包括一个或多个实例,每个实例之间相互独立,结果互不影响。比如一张图片返回一个分类类别,三张独立的图片独立返回三个分类类别。 - -## 模型配置 - -以Resnet50模型为例,模型配置文件目录结果如下图所示: - -```shell -resnet50 -├── 1 -│   └── resnet_classify.mindir -├── 2 -│   └── resnet_classify.mindir -└── servable_config.py -``` - -- 目录`resnet50`指示Servable的名称。 - -- 通过`servable_config.py`配置Servable,其中包括预处理和后处理定义、模型声明、方法定义。 - -- 目录`1`和`2`表示版本`1`和版本`2`的模型,模型版本为正整数,从`1`开始,数字越大表示版本越新。 - -- `resnet_classify.mindir`为模型文件,Servable启动会加载对应版本的模型文件。 - -### 预处理和后处理定义 - -预处理和后处理定义方式例子如下: - -```python -import mindspore.dataset as ds -import mindspore.dataset.transforms.c_transforms as TC -import mindspore.dataset.vision.c_transforms as VC - -def preprocess_eager(image): - """ - Define preprocess, input is image numpy, return preprocess result. - Return type can be numpy, str, bytes, int, float, or bool. - Use MindData Eager, this image processing can also use other image processing library, likes numpy, PIL or cv2 etc. - """ - image_size = 224 - mean = [0.485 * 255, 0.456 * 255, 0.406 * 255] - std = [0.229 * 255, 0.224 * 255, 0.225 * 255] - - decode = VC.Decode() - resize = VC.Resize([image_size, image_size]) - normalize = VC.Normalize(mean=mean, std=std) - hwc2chw = VC.HWC2CHW() - - image = decode(image) - image = resize(image) - image = normalize(image) - image = hwc2chw(image) - return image - -def postprocess_top1(score): - """ - Define postprocess. This example has one input and one output. - The input is the numpy tensor of the score, and the output is the label str of top one. - """ - max_idx = np.argmax(score) - return idx_2_label[max_idx] - - -def postprocess_top5(score): - """ - Define postprocess. This example has one input and two outputs. - The input is the numpy tensor of the score. The first output is the str joined by labels of top five, and the second output is the score tensor of the top five. - """ - idx = np.argsort(score)[::-1][:5] # top 5 - ret_label = [idx_2_label[i] for i in idx] - ret_score = score[idx] - return ";".join(ret_label), ret_score -``` - -预处理和后处理定义格式相同,入参为每个实例的输入数据。输入数据为文本时,入参为str对象;输入数据为其他数据类型,包括Tensor、Scalar number、Bool、Bytes时,入参为**numpy对象**。通过`return`返回实例的处理结果,`return`返回的数据可为**numpy、Python的bool、int、float、str、或bytes**单个数据对象或者由它们组成的tuple。 - -预处理和后处理输入的来源和输出的使用由[方法定义](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_model.html#id9)决定。 - -### 模型声明 - -`resnet50` Servabale模型声明示例代码如下所示: - -```python -from mindspore_serving.worker import register -register.declare_servable(servable_file="resnet50_1b_imagenet.mindir", model_format="MindIR", with_batch_dim=True) -``` - -其中`declare_servable`入参`servable_file`指示模型的文件名称;`model_format`指示模型的模型类别,当前Ascend310环境支持`OM`和`MindIR`两种模型类型,Ascend910环境仅支持`MindIR`模型类型。 - -如果模型输入和输出第1维度不是`batch`维度,需要设置参数`with_batch_dim=False`,`with_batch_dim`默认为`True`。 - -设置`with_batch_dim`为`True`,主要针对处理图片、文本等包含`batch`维度的模型。假设`batch_size=2`,当前请求有3个实例,共3张图片,会拆分为2次模型推理,第1次处理2张图片返回2个结果,第2次对剩余的1张图片进行拷贝做一次推理并返回1个结果,最终返回3个结果。 - -![image](images/resnet_with_batch.png) - -设置`with_batch_dim`为`False`,主要针对不涉及或不考虑`batch`维度的模型。比如输入输出为二维Tensor的矩阵乘模型。请求的每个实例将单独作一次推理任务。 - -![image](./images/matmul_without_batch.png) - -另外,对于一个模型,假设其中一个输入是数据输入,包括`batch`维度信息,另一个输入为模型配置信息,没有包括`batch`维度信息,此时在设置`with_batch_dim`为`True`基础上,设置额外参数`without_batch_dim_inputs`指定没有包括`batch`维度信息的输入信息。 -例如: - -```python -from mindspore_serving.worker import register -# Input1 indicates the input shape information of the model, without the batch dimension information. -# input0: [N,3,416,416], input1: [2] -register.declare_servable(servable_file="yolov3_darknet53.mindir", model_format="MindIR", - with_batch_dim=True, without_batch_dim_inputs=1) -``` - -对于分布式模型`distributed_servable`,与非分布式单模型配置相比仅声明方法不同,需要使用`declare_distributed_servable`,其中入参`rank_size`表示模型推理使用的device个数,`stage_size`表示流水线的段数,可以参考[部署分布式推理服务](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_distributed_example.html)。 - -```python -from mindspore_serving.worker import distributed -from mindspore_serving.worker import register - -distributed.declare_distributed_servable(rank_size=8, stage_size=1, with_batch_dim=False) -``` - -### 方法定义 - -方法定义的例子如下: - -```python -from mindspore_serving.worker import register - -@register.register_method(output_names=["label"]) -def classify_top1(image): - """Define method `classify_top1` for servable `resnet50`. - The input is `image` and the output is `label`.""" - x = register.call_preprocess(preprocess_eager, image) - x = register.call_servable(x) - x = register.call_postprocess(postprocess_top1, x) - return x - - -@register.register_method(output_names=["label", "score"]) -def classify_top5(image): - """Define method `classify_top5` for servable `resnet50`. - The input is `image` and the output is `label` and `score`. """ - x = register.call_preprocess(preprocess_eager, image) - x = register.call_servable(x) - label, score = register.call_postprocess(postprocess_top5, x) - return label, score -``` - -上述代码在Servable `resnet50`定义了`classify_top1`和`classify_top5`方法,其中方法`classify_top1`入参为`image`,出参为`label`,方法`classify_top5`入参为`image`,出参为`label`和`score`。即,Servable方法的入参由Python方法的入参指定,Servable方法的出参由`register_method`的`output_names`指定。 - -另外方法定义中: - -- `call_preprocess`指示了使用的预处理及其输入。 - -- `call_servable`指示了模型推理的输入。 - -- `call_postprocess`指示了使用的后处理及其输入。 - -- `return`指示了方法的返回数据,和`register_method`的`output_names`参数对应。 - -方法定义不能包括if、for、while等分支结构,预处理和后处理可选,不可重复,模型推理必选,且顺序不能打乱。 - -用户在客户端使用Servable某个方法提供的服务时,需要通过入参名称指定对应输入的值,通过出参名称识别各个输出的值。比如客户端访问方法`classify_top5`: - -```python -from mindspore_serving.client import Client - -def read_images(): - # read image file and return - -def run_classify_top5(): - """Client for servable resnet50 and method classify_top5""" - client = Client("localhost", 5500, "resnet50", "classify_top5") - instances = [] - for image in read_images(): # read multi image - instances.append({"image": image}) # input `image` - result = client.infer(instances) - print(result) - for result_item in result: # result for every image - label = result_item["label"] # result `label` - score = result_item["score"] # result `score` - print("label result", label) - print("score result", score) - -if __name__ == '__main__': - run_classify_top5() -``` - -另外,一次请求可包括多个实例,且多个排队处理的请求也将有多个实例,如果需要在自定义的预处理或后处理中通过多线程等并法方式处理多个实例,比如在预处理中使用MindData并发能力处理多个输入图片,MindSpore Serving提供了`call_preprocess_pipeline`和`call_postprocess_pipeline`用于注册此类预处理和后处理。详情可参考[ResNet-50样例的模型配置](https://gitee.com/mindspore/serving/blob/master/example/resnet/resnet50/servable_config.py) 。 diff --git a/tutorials/inference/source_zh_cn/serving_restful.md b/tutorials/inference/source_zh_cn/serving_restful.md deleted file mode 100644 index 61b672849c2db29cc42b8128cb235550a762a48c..0000000000000000000000000000000000000000 --- a/tutorials/inference/source_zh_cn/serving_restful.md +++ /dev/null @@ -1,249 +0,0 @@ -# 基于RESTful接口访问MindSpore Serving服务 - -`Linux` `Serving` `Ascend` `GPU` `初级` `中级` `高级` - - - -- [基于RESTful接口访问MindSpore Serving服务](#基于restful接口访问mindspore-serving服务) - - [概述](#概述) - - [请求方式](#请求方式) - - [请求输入格式](#请求输入格式) - - [base64数据编码](#base64数据编码) - - [请求应答格式](#请求应答格式) - - - - - -## 概述 - -MindSpore Serving支持`gPRC`和`RESTful`两种请求方式。本章节介绍`RESTful`类型请求。 - -`RESTful`是一种基于`HTTP`协议的网络应用程序的设计风格和开发方式,通过`URI`实现对资源的管理及访问,具有扩展性强、结构清晰的特点。基于其轻量级以及通过`HTTP`直接传输数据的特性,`RESTful`已经成为最常见的`Web`服务访问方式。用户通过`RESTful`方式,能够简单直接的与服务进行交互。 - -部署`Serving`参考[快速入门](https://www.mindspore.cn/tutorial/inference/zh-CN/master/serving_example.html) 章节。 - -通过`master.start_restful_server`接口启动`RESTful`服务;另外,可通过`master.start_grpc_server`启动`gRPC`服务。 - -> `RESTful`客户端不依赖特定硬件平台,Serving服务端当前仅支持`Ascend310`和`Ascend910`硬件环境。 - -## 请求方式 - -当前仅支持`POST`类型的RESTful请求,请求格式如下: - -```text -POST http://${HOST}:${PORT}/model/${MODLE_NAME}[/version/${VERSION}]:${METHOD_NAME} -``` - -其中: - -- `${HOST}`:指定访问的IP地址; -- `${PORT}`:指定访问的端口号; -- `${MODLE_NAME}`:请求的模型名称; -- `${VERSION}`:表示版本号。版本号是可选的,若未指定具体版本号,则默认使用模型的最新版本。 -- `${METHOD_NAME}`:表示请求模型的具体方法名称。 - -如果使用`curl`工具,RESTful请求方式如下: - -```text -curl -X POST -d '${REQ_JSON_MESSAGE}' http://${HOST}:${PORT}/model/${MODLE_NAME}[/version/${VERSION}]:${METHOD_NAME} -``` - -例子:请求`LeNet`模型的`predict`方法进行数字图片的推理,请求如下: - -```text -curl -X POST -d '{"instances":{"image":{"b64":"babe64-encoded-string"}' http://127.0.0.1:1500/model/lenet/version/1:predict -``` - -其中:`babe64-encoded-string`表示数字图片经过`base64`编码之后的字符串。由于字符串比较长,不显式列出。 - -## 请求输入格式 - -RESTful支持`Json`请求格式,`key`固定为`instances`,`value`表示多个实例。 - -每个实例通过`key-value`格式的`Json`对象来表示。其中: - -- `key`:表示输入名称,需要与请求模型提供的方法的输入参数名称一致,若不一致,则请求失败。 - -- `value`:表示具体的值。当前支持的`value`类型: - - - 标量:`str`、`bytes`、`int`、`float`、`bool`。 - - `bytes`:通过`base64`编码方式支持。 - - - 张量:`int`、`float`、`bool`组成的一级或多级数组。 - - 张量通过数组格式表示数据和维度信息。 - -`Json`中支持的`int`类型:是`int32`表示的范围,`float`类型:是`float32`表示的范围。 - -请求格式: - -```text -{ - "instances":[ - { - "input_name1":||, - "input_name2":||, - ... - }, - { - "input_name1":||, - "input_name2":||, - ... - } - ... - ] -} -``` - -例子: - -```text -{ - "instances":[ - { - "tag":"one", - "box":[[1,1],[2,3],[3,4]], - "image":{"b64":"iVBOR...ggg==="} - }, - { - "tag":"two", - "box":[[2,2],[5,5],[6,6]], - "image":{"b64":"iVBOR...QmCC", "type":"bytes"} - } - ] -} -``` - -其中:`iVBOR...ggg===`是图片数字`0`经过`base64`编码之后的省略字符串。`iVBOR...QmCC`是图片数字`1`经过`base64`编码之后的省略字符串。不同图片编码出来的字符串可能不同,上述是示意说明。 - -### base64数据编码 - -`bytes`类型需要通过`base64`编码进行表示。`base64`除了可以表示`bytes`类型,也可以表示其他标量和张量数据,此时将标量和张量的二进制数据通过`base64`进行编码,并额外通过`type`指定数据类型,通过`shape`指定维度信息: - -- `type`:可选,如果不指定,默认为`bytes`。 - - 支持`int8`、`int16`、`int32`、`int64`、`uint8`、`uint16`、`uint32`、`uint64`、`float16`(或`fp16`)、`float32`(或`fp32`)、`float64`(或`fp64`)、`bool`、`str`、`bytes`。 - -- `shape`:可选,如果不指定,默认为`[1]`。 - -例子: - -如果要用`base64`编码表示:`int16`的数据类型,`shape`为3*2,值是`[[1,1],[2,3],[3,4]]`的张量,则表示如下: - -```json -{ - "instances":[ - { - "box":{"b64":"AQACAAIAAwADAAQA", "type":"int16", "shape":[3,2]} - } - ] -} -``` - -其中`AQACAAIAAwADAAQA`:是`[[1,1],[2,3],[3,4]]`的二进制数据格式经过`base64`编码后的字符串。 - -**请求支持的类型总结如下:** - -| 支持的类型 | 例子 | 备注 | -| ------ | -------- | ---------------- | -| `int` | 1,[1,2,3,4] | 默认`int32`表示范围 | -| `float` | 1.0,[[1.2, 2.3], [3.0, 4.5]] | 默认`float32`表示范围 | -| `bool` | true,false,[[true],[false]] | `bool`类型 | -| `string` | "hello"或者
{"b64":"aGVsbG8=", "type":"str"} | 直接表示或者指定`type`方式表示 | -| `bytes` | {"b64":"AQACAAIAAwADAAQA"} 或者
{"b64":"AQACAAIAAwADAAQA", "type":"bytes"} | 如果不填`type`,默认为`bytes` | -| `int8`,`int16`,`int32`,`int64`,
`uint8`,`uint16`,`uint32`,`uint64`,
`float16`,`float32`,`float64`,`bool` | {"b64":"AQACAAIAAwADAAQA", "type":"int16", "shape":[3,2]} | 利用base64编码,表示指定type的数据 | - -## 请求应答格式 - -应答格式与请求格式保持一致。返回`Json`格式信息。应答格式如下: - -```text -{ - "instances":[ - { - "output_name1":||, - "output_name2":||, - ... - }, - { - "output_name1":||, - "output_name2":||, - ... - } - ... - ] -} -``` - -1. 多实例请求后,如果多实例全部成功处理,则响应格式如下: - - 例子:`LeNet`请求识别数字`0`和数字`1`。 - - ```json - { - "instances":[ - { - "result":0 - }, - { - "result":1 - } - ] - } - ``` - -2. 如果部分实例出错,则响应格式如下: - - 例子:`lenet`请求识别数字`0`和一个错误数字图片。 - - ```json - { - "instances":[ - { - "result":0 - }, - { - "error_msg":"Preprocess Failed" - } - ] - } - ``` - -3. 如果请求全部失败,则响应格式如下: - - 例子:`lenet`请求识别两张错误数字图片为例。 - - ```json - { - "instances":[ - { - "error_msg":"Preprocess Failed" - }, - { - "error_msg":"Time out" - } - ] - } - ``` - -4. 出现系统性或者其他解析等错误,则返回格式: - - 例子:`lenet`传入非法`Json`字符串。 - - ```json - { - "error_msg":"Parse request failed" - } - ``` - -**应答数据表示如下:** - - | Serving输出类型 | RESTful json中数据类型 | 说明 | 举例 | - | ---- | ---- | ---- | ---- | - | `int8`, `int16`, `int32`, `int64`, `uint8`, `uint16`, `uint32`, `uint64` | json integer | 整型格式的数据表示为json整型 | 1,[1,2,3,4] | - | `float16`, `float32`, `float64` | json float | 浮点格式的数据表示为json浮点数 | 1.0,[[1.2, 2.3], [3.0, 4.5]] | - | `bool` | json bool | bool类型数据表示为json bool | true,false,[[true],[false]] | - | `string` | json str | 字符串格式输出表示为json str | "news_car" | - | `bytes` | base64 object | 二进制格式输出转为base64对象 | {"b64":"AQACAAIAAwADAAQA"} | diff --git a/tutorials/lite/Makefile b/tutorials/lite/Makefile deleted file mode 100644 index 1eff8952707bdfa503c8d60c1e9a903053170ba2..0000000000000000000000000000000000000000 --- a/tutorials/lite/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = source_zh_cn -BUILDDIR = build_zh_cn - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/tutorials/lite/requirements.txt b/tutorials/lite/requirements.txt deleted file mode 100644 index ea17a9e73613ddd99cc31690ddcf283d9a721450..0000000000000000000000000000000000000000 --- a/tutorials/lite/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -sphinx >= 2.2.1, <= 2.4.4 -recommonmark -sphinx-markdown-tables -sphinx_rtd_theme -jieba \ No newline at end of file diff --git a/tutorials/lite/source_en/_static/css/bootstrap.min.css b/tutorials/lite/source_en/_static/css/bootstrap.min.css deleted file mode 100644 index 35722284f98a1189566e9200862b02aac7cbec50..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/_static/css/bootstrap.min.css +++ /dev/null @@ -1,6 +0,0 @@ -/*! - * Bootstrap v3.3.7 (http://getbootstrap.com) - * Copyright 2011-2016 Twitter, Inc. - * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) - *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */html{overflow-y: hidden!important;font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}dfn{font-style:italic}h1{margin:.67em 0;font-size:2em}mark{color:#000;background:#ff0}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{height:0;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace,monospace;font-size:1em}button,input,optgroup,select,textarea{margin:0;font:inherit;color:inherit}button{overflow:visible}button,select{text-transform:none}button,html input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}input{line-height:normal}input[type=checkbox],input[type=radio]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}fieldset{padding:.35em .625em .75em;margin:0 2px;border:1px solid silver}legend{padding:0;border:0}textarea{overflow:auto}optgroup{font-weight:700}table{border-spacing:0;border-collapse:collapse}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{*,:after,:before{color:#000!important;text-shadow:none!important;background:0 0!important;-webkit-box-shadow:none!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}blockquote,pre{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px solid #000}.table{border-collapse:collapse!important}.table td,.table th{background-color:#fff!important}.table-bordered td,.table-bordered th{border:1px solid #ddd!important}}@font-face{font-family:'Glyphicons Halflings';src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix) format('embedded-opentype'),url(../fonts/glyphicons-halflings-regular.woff2) format('woff2'),url(../fonts/glyphicons-halflings-regular.woff) format('woff'),url(../fonts/glyphicons-halflings-regular.ttf) format('truetype'),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular) format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons Halflings';font-style:normal;font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-eur:before,.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.glyphicon-cd:before{content:"\e201"}.glyphicon-save-file:before{content:"\e202"}.glyphicon-open-file:before{content:"\e203"}.glyphicon-level-up:before{content:"\e204"}.glyphicon-copy:before{content:"\e205"}.glyphicon-paste:before{content:"\e206"}.glyphicon-alert:before{content:"\e209"}.glyphicon-equalizer:before{content:"\e210"}.glyphicon-king:before{content:"\e211"}.glyphicon-queen:before{content:"\e212"}.glyphicon-pawn:before{content:"\e213"}.glyphicon-bishop:before{content:"\e214"}.glyphicon-knight:before{content:"\e215"}.glyphicon-baby-formula:before{content:"\e216"}.glyphicon-tent:before{content:"\26fa"}.glyphicon-blackboard:before{content:"\e218"}.glyphicon-bed:before{content:"\e219"}.glyphicon-apple:before{content:"\f8ff"}.glyphicon-erase:before{content:"\e221"}.glyphicon-hourglass:before{content:"\231b"}.glyphicon-lamp:before{content:"\e223"}.glyphicon-duplicate:before{content:"\e224"}.glyphicon-piggy-bank:before{content:"\e225"}.glyphicon-scissors:before{content:"\e226"}.glyphicon-bitcoin:before{content:"\e227"}.glyphicon-btc:before{content:"\e227"}.glyphicon-xbt:before{content:"\e227"}.glyphicon-yen:before{content:"\00a5"}.glyphicon-jpy:before{content:"\00a5"}.glyphicon-ruble:before{content:"\20bd"}.glyphicon-rub:before{content:"\20bd"}.glyphicon-scale:before{content:"\e230"}.glyphicon-ice-lolly:before{content:"\e231"}.glyphicon-ice-lolly-tasted:before{content:"\e232"}.glyphicon-education:before{content:"\e233"}.glyphicon-option-horizontal:before{content:"\e234"}.glyphicon-option-vertical:before{content:"\e235"}.glyphicon-menu-hamburger:before{content:"\e236"}.glyphicon-modal-window:before{content:"\e237"}.glyphicon-oil:before{content:"\e238"}.glyphicon-grain:before{content:"\e239"}.glyphicon-sunglasses:before{content:"\e240"}.glyphicon-text-size:before{content:"\e241"}.glyphicon-text-color:before{content:"\e242"}.glyphicon-text-background:before{content:"\e243"}.glyphicon-object-align-top:before{content:"\e244"}.glyphicon-object-align-bottom:before{content:"\e245"}.glyphicon-object-align-horizontal:before{content:"\e246"}.glyphicon-object-align-left:before{content:"\e247"}.glyphicon-object-align-vertical:before{content:"\e248"}.glyphicon-object-align-right:before{content:"\e249"}.glyphicon-triangle-right:before{content:"\e250"}.glyphicon-triangle-left:before{content:"\e251"}.glyphicon-triangle-bottom:before{content:"\e252"}.glyphicon-triangle-top:before{content:"\e253"}.glyphicon-console:before{content:"\e254"}.glyphicon-superscript:before{content:"\e255"}.glyphicon-subscript:before{content:"\e256"}.glyphicon-menu-left:before{content:"\e257"}.glyphicon-menu-right:before{content:"\e258"}.glyphicon-menu-down:before{content:"\e259"}.glyphicon-menu-up:before{content:"\e260"}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}:after,:before{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:10px;-webkit-tap-highlight-color:rgba(0,0,0,0)}body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857143;color:#333;background-color:#fff}button,input,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#337ab7;text-decoration:none}a:focus,a:hover{color:#23527c;text-decoration:underline}a:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}figure{margin:0}img{vertical-align:middle}.carousel-inner>.item>a>img,.carousel-inner>.item>img,.img-responsive,.thumbnail a>img,.thumbnail>img{display:block;max-width:100%;height:auto}.img-rounded{border-radius:6px}.img-thumbnail{display:inline-block;max-width:100%;height:auto;padding:4px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px solid #eee}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}[role=button]{cursor:pointer}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-weight:400;line-height:1;color:#777}.h1,.h2,.h3,h1,h2,h3{margin-top:20px;margin-bottom:10px}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small{font-size:65%}.h4,.h5,.h6,h4,h5,h6{margin-top:10px;margin-bottom:10px}.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-size:75%}.h1,h1{font-size:36px}.h2,h2{font-size:30px}.h3,h3{font-size:24px}.h4,h4{font-size:18px}.h5,h5{font-size:14px}.h6,h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:300;line-height:1.4}@media (min-width:768px){.lead{font-size:21px}}.small,small{font-size:85%}.mark,mark{padding:.2em;background-color:#fcf8e3}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-nowrap{white-space:nowrap}.text-lowercase{text-transform:lowercase}.text-uppercase{text-transform:uppercase}.text-capitalize{text-transform:capitalize}.text-muted{color:#777}.text-primary{color:#337ab7}a.text-primary:focus,a.text-primary:hover{color:#286090}.text-success{color:#3c763d}a.text-success:focus,a.text-success:hover{color:#2b542c}.text-info{color:#31708f}a.text-info:focus,a.text-info:hover{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:focus,a.text-warning:hover{color:#66512c}.text-danger{color:#a94442}a.text-danger:focus,a.text-danger:hover{color:#843534}.bg-primary{color:#fff;background-color:#337ab7}a.bg-primary:focus,a.bg-primary:hover{background-color:#286090}.bg-success{background-color:#dff0d8}a.bg-success:focus,a.bg-success:hover{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:focus,a.bg-info:hover{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:focus,a.bg-warning:hover{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:focus,a.bg-danger:hover{background-color:#e4b9b9}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #eee}ol,ul{margin-top:0;margin-bottom:10px}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;margin-left:-5px;list-style:none}.list-inline>li{display:inline-block;padding-right:5px;padding-left:5px}dl{margin-top:0;margin-bottom:20px}dd,dt{line-height:1.42857143}dt{font-weight:700}dd{margin-left:0}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}}abbr[data-original-title],abbr[title]{cursor:help;border-bottom:1px dotted #777}.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #eee}blockquote ol:last-child,blockquote p:last-child,blockquote ul:last-child{margin-bottom:0}blockquote .small,blockquote footer,blockquote small{display:block;font-size:80%;line-height:1.42857143;color:#777}blockquote .small:before,blockquote footer:before,blockquote small:before{content:'\2014 \00A0'}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;text-align:right;border-right:5px solid #eee;border-left:0}.blockquote-reverse .small:before,.blockquote-reverse footer:before,.blockquote-reverse small:before,blockquote.pull-right .small:before,blockquote.pull-right footer:before,blockquote.pull-right small:before{content:''}.blockquote-reverse .small:after,.blockquote-reverse footer:after,.blockquote-reverse small:after,blockquote.pull-right .small:after,blockquote.pull-right footer:after,blockquote.pull-right small:after{content:'\00A0 \2014'}address{margin-bottom:20px;font-style:normal;line-height:1.42857143}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,"Courier New",monospace}code{padding:2px 4px;font-size:90%;color:#c7254e;background-color:#f9f2f4;border-radius:4px}kbd{padding:2px 4px;font-size:90%;color:#fff;background-color:#333;border-radius:3px;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.25);box-shadow:inset 0 -1px 0 rgba(0,0,0,.25)}kbd kbd{padding:0;font-size:100%;font-weight:700;-webkit-box-shadow:none;box-shadow:none}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:1.42857143;color:#333;word-break:break-all;word-wrap:break-word;background-color:#f5f5f5;border:1px solid #ccc;border-radius:4px}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;background-color:transparent;border-radius:0}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}@media (min-width:768px){.container{width:750px}}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.container-fluid{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}.row{margin-right:-15px;margin-left:-15px}.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{position:relative;min-height:1px;padding-right:15px;padding-left:15px}.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{float:left}.col-xs-12{width:100%}.col-xs-11{width:91.66666667%}.col-xs-10{width:83.33333333%}.col-xs-9{width:75%}.col-xs-8{width:66.66666667%}.col-xs-7{width:58.33333333%}.col-xs-6{width:50%}.col-xs-5{width:41.66666667%}.col-xs-4{width:33.33333333%}.col-xs-3{width:25%}.col-xs-2{width:16.66666667%}.col-xs-1{width:8.33333333%}.col-xs-pull-12{right:100%}.col-xs-pull-11{right:91.66666667%}.col-xs-pull-10{right:83.33333333%}.col-xs-pull-9{right:75%}.col-xs-pull-8{right:66.66666667%}.col-xs-pull-7{right:58.33333333%}.col-xs-pull-6{right:50%}.col-xs-pull-5{right:41.66666667%}.col-xs-pull-4{right:33.33333333%}.col-xs-pull-3{right:25%}.col-xs-pull-2{right:16.66666667%}.col-xs-pull-1{right:8.33333333%}.col-xs-pull-0{right:auto}.col-xs-push-12{left:100%}.col-xs-push-11{left:91.66666667%}.col-xs-push-10{left:83.33333333%}.col-xs-push-9{left:75%}.col-xs-push-8{left:66.66666667%}.col-xs-push-7{left:58.33333333%}.col-xs-push-6{left:50%}.col-xs-push-5{left:41.66666667%}.col-xs-push-4{left:33.33333333%}.col-xs-push-3{left:25%}.col-xs-push-2{left:16.66666667%}.col-xs-push-1{left:8.33333333%}.col-xs-push-0{left:auto}.col-xs-offset-12{margin-left:100%}.col-xs-offset-11{margin-left:91.66666667%}.col-xs-offset-10{margin-left:83.33333333%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-8{margin-left:66.66666667%}.col-xs-offset-7{margin-left:58.33333333%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-5{margin-left:41.66666667%}.col-xs-offset-4{margin-left:33.33333333%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-2{margin-left:16.66666667%}.col-xs-offset-1{margin-left:8.33333333%}.col-xs-offset-0{margin-left:0}@media (min-width:768px){.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9{float:left}.col-sm-12{width:100%}.col-sm-11{width:91.66666667%}.col-sm-10{width:83.33333333%}.col-sm-9{width:75%}.col-sm-8{width:66.66666667%}.col-sm-7{width:58.33333333%}.col-sm-6{width:50%}.col-sm-5{width:41.66666667%}.col-sm-4{width:33.33333333%}.col-sm-3{width:25%}.col-sm-2{width:16.66666667%}.col-sm-1{width:8.33333333%}.col-sm-pull-12{right:100%}.col-sm-pull-11{right:91.66666667%}.col-sm-pull-10{right:83.33333333%}.col-sm-pull-9{right:75%}.col-sm-pull-8{right:66.66666667%}.col-sm-pull-7{right:58.33333333%}.col-sm-pull-6{right:50%}.col-sm-pull-5{right:41.66666667%}.col-sm-pull-4{right:33.33333333%}.col-sm-pull-3{right:25%}.col-sm-pull-2{right:16.66666667%}.col-sm-pull-1{right:8.33333333%}.col-sm-pull-0{right:auto}.col-sm-push-12{left:100%}.col-sm-push-11{left:91.66666667%}.col-sm-push-10{left:83.33333333%}.col-sm-push-9{left:75%}.col-sm-push-8{left:66.66666667%}.col-sm-push-7{left:58.33333333%}.col-sm-push-6{left:50%}.col-sm-push-5{left:41.66666667%}.col-sm-push-4{left:33.33333333%}.col-sm-push-3{left:25%}.col-sm-push-2{left:16.66666667%}.col-sm-push-1{left:8.33333333%}.col-sm-push-0{left:auto}.col-sm-offset-12{margin-left:100%}.col-sm-offset-11{margin-left:91.66666667%}.col-sm-offset-10{margin-left:83.33333333%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-8{margin-left:66.66666667%}.col-sm-offset-7{margin-left:58.33333333%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-5{margin-left:41.66666667%}.col-sm-offset-4{margin-left:33.33333333%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-2{margin-left:16.66666667%}.col-sm-offset-1{margin-left:8.33333333%}.col-sm-offset-0{margin-left:0}}@media (min-width:992px){.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9{float:left}.col-md-12{width:100%}.col-md-11{width:91.66666667%}.col-md-10{width:83.33333333%}.col-md-9{width:75%}.col-md-8{width:66.66666667%}.col-md-7{width:58.33333333%}.col-md-6{width:50%}.col-md-5{width:41.66666667%}.col-md-4{width:33.33333333%}.col-md-3{width:25%}.col-md-2{width:16.66666667%}.col-md-1{width:8.33333333%}.col-md-pull-12{right:100%}.col-md-pull-11{right:91.66666667%}.col-md-pull-10{right:83.33333333%}.col-md-pull-9{right:75%}.col-md-pull-8{right:66.66666667%}.col-md-pull-7{right:58.33333333%}.col-md-pull-6{right:50%}.col-md-pull-5{right:41.66666667%}.col-md-pull-4{right:33.33333333%}.col-md-pull-3{right:25%}.col-md-pull-2{right:16.66666667%}.col-md-pull-1{right:8.33333333%}.col-md-pull-0{right:auto}.col-md-push-12{left:100%}.col-md-push-11{left:91.66666667%}.col-md-push-10{left:83.33333333%}.col-md-push-9{left:75%}.col-md-push-8{left:66.66666667%}.col-md-push-7{left:58.33333333%}.col-md-push-6{left:50%}.col-md-push-5{left:41.66666667%}.col-md-push-4{left:33.33333333%}.col-md-push-3{left:25%}.col-md-push-2{left:16.66666667%}.col-md-push-1{left:8.33333333%}.col-md-push-0{left:auto}.col-md-offset-12{margin-left:100%}.col-md-offset-11{margin-left:91.66666667%}.col-md-offset-10{margin-left:83.33333333%}.col-md-offset-9{margin-left:75%}.col-md-offset-8{margin-left:66.66666667%}.col-md-offset-7{margin-left:58.33333333%}.col-md-offset-6{margin-left:50%}.col-md-offset-5{margin-left:41.66666667%}.col-md-offset-4{margin-left:33.33333333%}.col-md-offset-3{margin-left:25%}.col-md-offset-2{margin-left:16.66666667%}.col-md-offset-1{margin-left:8.33333333%}.col-md-offset-0{margin-left:0}}@media (min-width:1200px){.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9{float:left}.col-lg-12{width:100%}.col-lg-11{width:91.66666667%}.col-lg-10{width:83.33333333%}.col-lg-9{width:75%}.col-lg-8{width:66.66666667%}.col-lg-7{width:58.33333333%}.col-lg-6{width:50%}.col-lg-5{width:41.66666667%}.col-lg-4{width:33.33333333%}.col-lg-3{width:25%}.col-lg-2{width:16.66666667%}.col-lg-1{width:8.33333333%}.col-lg-pull-12{right:100%}.col-lg-pull-11{right:91.66666667%}.col-lg-pull-10{right:83.33333333%}.col-lg-pull-9{right:75%}.col-lg-pull-8{right:66.66666667%}.col-lg-pull-7{right:58.33333333%}.col-lg-pull-6{right:50%}.col-lg-pull-5{right:41.66666667%}.col-lg-pull-4{right:33.33333333%}.col-lg-pull-3{right:25%}.col-lg-pull-2{right:16.66666667%}.col-lg-pull-1{right:8.33333333%}.col-lg-pull-0{right:auto}.col-lg-push-12{left:100%}.col-lg-push-11{left:91.66666667%}.col-lg-push-10{left:83.33333333%}.col-lg-push-9{left:75%}.col-lg-push-8{left:66.66666667%}.col-lg-push-7{left:58.33333333%}.col-lg-push-6{left:50%}.col-lg-push-5{left:41.66666667%}.col-lg-push-4{left:33.33333333%}.col-lg-push-3{left:25%}.col-lg-push-2{left:16.66666667%}.col-lg-push-1{left:8.33333333%}.col-lg-push-0{left:auto}.col-lg-offset-12{margin-left:100%}.col-lg-offset-11{margin-left:91.66666667%}.col-lg-offset-10{margin-left:83.33333333%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-8{margin-left:66.66666667%}.col-lg-offset-7{margin-left:58.33333333%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-5{margin-left:41.66666667%}.col-lg-offset-4{margin-left:33.33333333%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-2{margin-left:16.66666667%}.col-lg-offset-1{margin-left:8.33333333%}.col-lg-offset-0{margin-left:0}}table{background-color:transparent}caption{padding-top:8px;padding-bottom:8px;color:#777;text-align:left}th{text-align:left}.table{width:100%;max-width:100%;margin-bottom:20px}.table>tbody>tr>td,.table>tbody>tr>th,.table>tfoot>tr>td,.table>tfoot>tr>th,.table>thead>tr>td,.table>thead>tr>th{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>td,.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>td,.table>thead:first-child>tr:first-child>th{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>tbody>tr>td,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>td,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>thead>tr>th{padding:5px}.table-bordered{border:1px solid #ddd}.table-bordered>tbody>tr>td,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>td,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border:1px solid #ddd}.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover{background-color:#f5f5f5}table col[class*=col-]{position:static;display:table-column;float:none}table td[class*=col-],table th[class*=col-]{position:static;display:table-cell;float:none}.table>tbody>tr.active>td,.table>tbody>tr.active>th,.table>tbody>tr>td.active,.table>tbody>tr>th.active,.table>tfoot>tr.active>td,.table>tfoot>tr.active>th,.table>tfoot>tr>td.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>thead>tr.active>th,.table>thead>tr>td.active,.table>thead>tr>th.active{background-color:#f5f5f5}.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr.active:hover>th,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover{background-color:#e8e8e8}.table>tbody>tr.success>td,.table>tbody>tr.success>th,.table>tbody>tr>td.success,.table>tbody>tr>th.success,.table>tfoot>tr.success>td,.table>tfoot>tr.success>th,.table>tfoot>tr>td.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>thead>tr.success>th,.table>thead>tr>td.success,.table>thead>tr>th.success{background-color:#dff0d8}.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr.success:hover>th,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover{background-color:#d0e9c6}.table>tbody>tr.info>td,.table>tbody>tr.info>th,.table>tbody>tr>td.info,.table>tbody>tr>th.info,.table>tfoot>tr.info>td,.table>tfoot>tr.info>th,.table>tfoot>tr>td.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>thead>tr.info>th,.table>thead>tr>td.info,.table>thead>tr>th.info{background-color:#d9edf7}.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr.info:hover>th,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover{background-color:#c4e3f3}.table>tbody>tr.warning>td,.table>tbody>tr.warning>th,.table>tbody>tr>td.warning,.table>tbody>tr>th.warning,.table>tfoot>tr.warning>td,.table>tfoot>tr.warning>th,.table>tfoot>tr>td.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>thead>tr.warning>th,.table>thead>tr>td.warning,.table>thead>tr>th.warning{background-color:#fcf8e3}.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr.warning:hover>th,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover{background-color:#faf2cc}.table>tbody>tr.danger>td,.table>tbody>tr.danger>th,.table>tbody>tr>td.danger,.table>tbody>tr>th.danger,.table>tfoot>tr.danger>td,.table>tfoot>tr.danger>th,.table>tfoot>tr>td.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>thead>tr.danger>th,.table>thead>tr>td.danger,.table>thead>tr>th.danger{background-color:#f2dede}.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr.danger:hover>th,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover{background-color:#ebcccc}.table-responsive{min-height:.01%;overflow-x:auto}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>td,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>thead>tr>th{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}}fieldset{min-width:0;padding:0;margin:0;border:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:inherit;color:#333;border:0;border-bottom:1px solid #e5e5e5}label{display:inline-block;max-width:100%;margin-bottom:5px;font-weight:700}input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=checkbox],input[type=radio]{margin:4px 0 0;margin-top:1px\9;line-height:normal}input[type=file]{display:block}input[type=range]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type=file]:focus,input[type=checkbox]:focus,input[type=radio]:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}output{display:block;padding-top:7px;font-size:14px;line-height:1.42857143;color:#555}.form-control{display:block;width:100%;height:34px;padding:6px 12px;font-size:14px;line-height:1.42857143;color:#555;background-color:#fff;background-image:none;border:1px solid #ccc;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075);-webkit-transition:border-color ease-in-out .15s,-webkit-box-shadow ease-in-out .15s;-o-transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6);box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.form-control::-ms-expand{background-color:transparent;border:0}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{background-color:#eee;opacity:1}.form-control[disabled],fieldset[disabled] .form-control{cursor:not-allowed}textarea.form-control{height:auto}input[type=search]{-webkit-appearance:none}@media screen and (-webkit-min-device-pixel-ratio:0){input[type=date].form-control,input[type=time].form-control,input[type=datetime-local].form-control,input[type=month].form-control{line-height:34px}.input-group-sm input[type=date],.input-group-sm input[type=time],.input-group-sm input[type=datetime-local],.input-group-sm input[type=month],input[type=date].input-sm,input[type=time].input-sm,input[type=datetime-local].input-sm,input[type=month].input-sm{line-height:30px}.input-group-lg input[type=date],.input-group-lg input[type=time],.input-group-lg input[type=datetime-local],.input-group-lg input[type=month],input[type=date].input-lg,input[type=time].input-lg,input[type=datetime-local].input-lg,input[type=month].input-lg{line-height:46px}}.form-group{margin-bottom:15px}.checkbox,.radio{position:relative;display:block;margin-top:10px;margin-bottom:10px}.checkbox label,.radio label{min-height:20px;padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.checkbox input[type=checkbox],.checkbox-inline input[type=checkbox],.radio input[type=radio],.radio-inline input[type=radio]{position:absolute;margin-top:4px\9;margin-left:-20px}.checkbox+.checkbox,.radio+.radio{margin-top:-5px}.checkbox-inline,.radio-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;font-weight:400;vertical-align:middle;cursor:pointer}.checkbox-inline+.checkbox-inline,.radio-inline+.radio-inline{margin-top:0;margin-left:10px}fieldset[disabled] input[type=checkbox],fieldset[disabled] input[type=radio],input[type=checkbox].disabled,input[type=checkbox][disabled],input[type=radio].disabled,input[type=radio][disabled]{cursor:not-allowed}.checkbox-inline.disabled,.radio-inline.disabled,fieldset[disabled] .checkbox-inline,fieldset[disabled] .radio-inline{cursor:not-allowed}.checkbox.disabled label,.radio.disabled label,fieldset[disabled] .checkbox label,fieldset[disabled] .radio label{cursor:not-allowed}.form-control-static{min-height:34px;padding-top:7px;padding-bottom:7px;margin-bottom:0}.form-control-static.input-lg,.form-control-static.input-sm{padding-right:0;padding-left:0}.input-sm{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-sm{height:30px;line-height:30px}select[multiple].input-sm,textarea.input-sm{height:auto}.form-group-sm .form-control{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.form-group-sm select.form-control{height:30px;line-height:30px}.form-group-sm select[multiple].form-control,.form-group-sm textarea.form-control{height:auto}.form-group-sm .form-control-static{height:30px;min-height:32px;padding:6px 10px;font-size:12px;line-height:1.5}.input-lg{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-lg{height:46px;line-height:46px}select[multiple].input-lg,textarea.input-lg{height:auto}.form-group-lg .form-control{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.form-group-lg select.form-control{height:46px;line-height:46px}.form-group-lg select[multiple].form-control,.form-group-lg textarea.form-control{height:auto}.form-group-lg .form-control-static{height:46px;min-height:38px;padding:11px 16px;font-size:18px;line-height:1.3333333}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.form-control-feedback{position:absolute;top:0;right:0;z-index:2;display:block;width:34px;height:34px;line-height:34px;text-align:center;pointer-events:none}.form-group-lg .form-control+.form-control-feedback,.input-group-lg+.form-control-feedback,.input-lg+.form-control-feedback{width:46px;height:46px;line-height:46px}.form-group-sm .form-control+.form-control-feedback,.input-group-sm+.form-control-feedback,.input-sm+.form-control-feedback{width:30px;height:30px;line-height:30px}.has-success .checkbox,.has-success .checkbox-inline,.has-success .control-label,.has-success .help-block,.has-success .radio,.has-success .radio-inline,.has-success.checkbox label,.has-success.checkbox-inline label,.has-success.radio label,.has-success.radio-inline label{color:#3c763d}.has-success .form-control{border-color:#3c763d;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-success .form-control:focus{border-color:#2b542c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;background-color:#dff0d8;border-color:#3c763d}.has-success .form-control-feedback{color:#3c763d}.has-warning .checkbox,.has-warning .checkbox-inline,.has-warning .control-label,.has-warning .help-block,.has-warning .radio,.has-warning .radio-inline,.has-warning.checkbox label,.has-warning.checkbox-inline label,.has-warning.radio label,.has-warning.radio-inline label{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-warning .form-control:focus{border-color:#66512c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;background-color:#fcf8e3;border-color:#8a6d3b}.has-warning .form-control-feedback{color:#8a6d3b}.has-error .checkbox,.has-error .checkbox-inline,.has-error .control-label,.has-error .help-block,.has-error .radio,.has-error .radio-inline,.has-error.checkbox label,.has-error.checkbox-inline label,.has-error.radio label,.has-error.radio-inline label{color:#a94442}.has-error .form-control{border-color:#a94442;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-error .form-control:focus{border-color:#843534;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;background-color:#f2dede;border-color:#a94442}.has-error .form-control-feedback{color:#a94442}.has-feedback label~.form-control-feedback{top:25px}.has-feedback label.sr-only~.form-control-feedback{top:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#737373}@media (min-width:768px){.form-inline .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-static{display:inline-block}.form-inline .input-group{display:inline-table;vertical-align:middle}.form-inline .input-group .form-control,.form-inline .input-group .input-group-addon,.form-inline .input-group .input-group-btn{width:auto}.form-inline .input-group>.form-control{width:100%}.form-inline .control-label{margin-bottom:0;vertical-align:middle}.form-inline .checkbox,.form-inline .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.form-inline .checkbox label,.form-inline .radio label{padding-left:0}.form-inline .checkbox input[type=checkbox],.form-inline .radio input[type=radio]{position:relative;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}}.form-horizontal .checkbox,.form-horizontal .checkbox-inline,.form-horizontal .radio,.form-horizontal .radio-inline{padding-top:7px;margin-top:0;margin-bottom:0}.form-horizontal .checkbox,.form-horizontal .radio{min-height:27px}.form-horizontal .form-group{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.form-horizontal .control-label{padding-top:7px;margin-bottom:0;text-align:right}}.form-horizontal .has-feedback .form-control-feedback{right:15px}@media (min-width:768px){.form-horizontal .form-group-lg .control-label{padding-top:11px;font-size:18px}}@media (min-width:768px){.form-horizontal .form-group-sm .control-label{padding-top:6px;font-size:12px}}.btn{display:inline-block;padding:6px 12px;margin-bottom:0;font-size:14px;font-weight:400;line-height:1.42857143;text-align:center;white-space:nowrap;vertical-align:middle;-ms-touch-action:manipulation;touch-action:manipulation;cursor:pointer;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;background-image:none;border:1px solid transparent;border-radius:4px}.btn.active.focus,.btn.active:focus,.btn.focus,.btn:active.focus,.btn:active:focus,.btn:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn.focus,.btn:focus,.btn:hover{color:#333;text-decoration:none}.btn.active,.btn:active{background-image:none;outline:0;-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;filter:alpha(opacity=65);-webkit-box-shadow:none;box-shadow:none;opacity:.65}a.btn.disabled,fieldset[disabled] a.btn{pointer-events:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default.focus,.btn-default:focus{color:#333;background-color:#e6e6e6;border-color:#8c8c8c}.btn-default:hover{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active,.btn-default:active,.open>.dropdown-toggle.btn-default{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active.focus,.btn-default.active:focus,.btn-default.active:hover,.btn-default:active.focus,.btn-default:active:focus,.btn-default:active:hover,.open>.dropdown-toggle.btn-default.focus,.open>.dropdown-toggle.btn-default:focus,.open>.dropdown-toggle.btn-default:hover{color:#333;background-color:#d4d4d4;border-color:#8c8c8c}.btn-default.active,.btn-default:active,.open>.dropdown-toggle.btn-default{background-image:none}.btn-default.disabled.focus,.btn-default.disabled:focus,.btn-default.disabled:hover,.btn-default[disabled].focus,.btn-default[disabled]:focus,.btn-default[disabled]:hover,fieldset[disabled] .btn-default.focus,fieldset[disabled] .btn-default:focus,fieldset[disabled] .btn-default:hover{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#337ab7;border-color:#2e6da4}.btn-primary.focus,.btn-primary:focus{color:#fff;background-color:#286090;border-color:#122b40}.btn-primary:hover{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active,.btn-primary:active,.open>.dropdown-toggle.btn-primary{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active.focus,.btn-primary.active:focus,.btn-primary.active:hover,.btn-primary:active.focus,.btn-primary:active:focus,.btn-primary:active:hover,.open>.dropdown-toggle.btn-primary.focus,.open>.dropdown-toggle.btn-primary:focus,.open>.dropdown-toggle.btn-primary:hover{color:#fff;background-color:#204d74;border-color:#122b40}.btn-primary.active,.btn-primary:active,.open>.dropdown-toggle.btn-primary{background-image:none}.btn-primary.disabled.focus,.btn-primary.disabled:focus,.btn-primary.disabled:hover,.btn-primary[disabled].focus,.btn-primary[disabled]:focus,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary.focus,fieldset[disabled] .btn-primary:focus,fieldset[disabled] .btn-primary:hover{background-color:#337ab7;border-color:#2e6da4}.btn-primary .badge{color:#337ab7;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success.focus,.btn-success:focus{color:#fff;background-color:#449d44;border-color:#255625}.btn-success:hover{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active.focus,.btn-success.active:focus,.btn-success.active:hover,.btn-success:active.focus,.btn-success:active:focus,.btn-success:active:hover,.open>.dropdown-toggle.btn-success.focus,.open>.dropdown-toggle.btn-success:focus,.open>.dropdown-toggle.btn-success:hover{color:#fff;background-color:#398439;border-color:#255625}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{background-image:none}.btn-success.disabled.focus,.btn-success.disabled:focus,.btn-success.disabled:hover,.btn-success[disabled].focus,.btn-success[disabled]:focus,.btn-success[disabled]:hover,fieldset[disabled] .btn-success.focus,fieldset[disabled] .btn-success:focus,fieldset[disabled] .btn-success:hover{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info.focus,.btn-info:focus{color:#fff;background-color:#31b0d5;border-color:#1b6d85}.btn-info:hover{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active,.btn-info:active,.open>.dropdown-toggle.btn-info{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active.focus,.btn-info.active:focus,.btn-info.active:hover,.btn-info:active.focus,.btn-info:active:focus,.btn-info:active:hover,.open>.dropdown-toggle.btn-info.focus,.open>.dropdown-toggle.btn-info:focus,.open>.dropdown-toggle.btn-info:hover{color:#fff;background-color:#269abc;border-color:#1b6d85}.btn-info.active,.btn-info:active,.open>.dropdown-toggle.btn-info{background-image:none}.btn-info.disabled.focus,.btn-info.disabled:focus,.btn-info.disabled:hover,.btn-info[disabled].focus,.btn-info[disabled]:focus,.btn-info[disabled]:hover,fieldset[disabled] .btn-info.focus,fieldset[disabled] .btn-info:focus,fieldset[disabled] .btn-info:hover{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning.focus,.btn-warning:focus{color:#fff;background-color:#ec971f;border-color:#985f0d}.btn-warning:hover{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active,.btn-warning:active,.open>.dropdown-toggle.btn-warning{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active.focus,.btn-warning.active:focus,.btn-warning.active:hover,.btn-warning:active.focus,.btn-warning:active:focus,.btn-warning:active:hover,.open>.dropdown-toggle.btn-warning.focus,.open>.dropdown-toggle.btn-warning:focus,.open>.dropdown-toggle.btn-warning:hover{color:#fff;background-color:#d58512;border-color:#985f0d}.btn-warning.active,.btn-warning:active,.open>.dropdown-toggle.btn-warning{background-image:none}.btn-warning.disabled.focus,.btn-warning.disabled:focus,.btn-warning.disabled:hover,.btn-warning[disabled].focus,.btn-warning[disabled]:focus,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning.focus,fieldset[disabled] .btn-warning:focus,fieldset[disabled] .btn-warning:hover{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger.focus,.btn-danger:focus{color:#fff;background-color:#c9302c;border-color:#761c19}.btn-danger:hover{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active,.btn-danger:active,.open>.dropdown-toggle.btn-danger{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active.focus,.btn-danger.active:focus,.btn-danger.active:hover,.btn-danger:active.focus,.btn-danger:active:focus,.btn-danger:active:hover,.open>.dropdown-toggle.btn-danger.focus,.open>.dropdown-toggle.btn-danger:focus,.open>.dropdown-toggle.btn-danger:hover{color:#fff;background-color:#ac2925;border-color:#761c19}.btn-danger.active,.btn-danger:active,.open>.dropdown-toggle.btn-danger{background-image:none}.btn-danger.disabled.focus,.btn-danger.disabled:focus,.btn-danger.disabled:hover,.btn-danger[disabled].focus,.btn-danger[disabled]:focus,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger.focus,fieldset[disabled] .btn-danger:focus,fieldset[disabled] .btn-danger:hover{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{font-weight:400;color:#337ab7;border-radius:0}.btn-link,.btn-link.active,.btn-link:active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link,.btn-link:active,.btn-link:focus,.btn-link:hover{border-color:transparent}.btn-link:focus,.btn-link:hover{color:#23527c;text-decoration:underline;background-color:transparent}.btn-link[disabled]:focus,.btn-link[disabled]:hover,fieldset[disabled] .btn-link:focus,fieldset[disabled] .btn-link:hover{color:#777;text-decoration:none}.btn-group-lg>.btn,.btn-lg{padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.btn-group-sm>.btn,.btn-sm{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-group-xs>.btn,.btn-xs{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:5px}input[type=button].btn-block,input[type=reset].btn-block,input[type=submit].btn-block{width:100%}.fade{opacity:0;-webkit-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}tr.collapse.in{display:table-row}tbody.collapse.in{display:table-row-group}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition-timing-function:ease;-o-transition-timing-function:ease;transition-timing-function:ease;-webkit-transition-duration:.35s;-o-transition-duration:.35s;transition-duration:.35s;-webkit-transition-property:height,visibility;-o-transition-property:height,visibility;transition-property:height,visibility}.caret{display:inline-block;width:0;height:0;margin-left:2px;vertical-align:middle;border-top:4px dashed;border-top:4px solid\9;border-right:4px solid transparent;border-left:4px solid transparent}.dropdown,.dropup{position:relative}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;font-size:14px;text-align:left;list-style:none;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,.175);box-shadow:0 6px 12px rgba(0,0,0,.175)}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:400;line-height:1.42857143;color:#333;white-space:nowrap}.dropdown-menu>li>a:focus,.dropdown-menu>li>a:hover{color:#262626;text-decoration:none;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:focus,.dropdown-menu>.active>a:hover{color:#fff;text-decoration:none;background-color:#337ab7;outline:0}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{color:#777}.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{text-decoration:none;cursor:not-allowed;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-right{right:0;left:auto}.dropdown-menu-left{right:auto;left:0}.dropdown-header{display:block;padding:3px 20px;font-size:12px;line-height:1.42857143;color:#777;white-space:nowrap}.dropdown-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:990}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{content:"";border-top:0;border-bottom:4px dashed;border-bottom:4px solid\9}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:2px}@media (min-width:768px){.navbar-right .dropdown-menu{right:0;left:auto}.navbar-right .dropdown-menu-left{right:auto;left:0}}.btn-group,.btn-group-vertical{position:relative;display:inline-block;vertical-align:middle}.btn-group-vertical>.btn,.btn-group>.btn{position:relative;float:left}.btn-group-vertical>.btn.active,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:hover,.btn-group>.btn.active,.btn-group>.btn:active,.btn-group>.btn:focus,.btn-group>.btn:hover{z-index:2}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar .btn,.btn-toolbar .btn-group,.btn-toolbar .input-group{float:left}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.btn-group>.btn-group{float:left}.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-bottom-left-radius:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-right:8px;padding-left:8px}.btn-group>.btn-lg+.dropdown-toggle{padding-right:12px;padding-left:12px}.btn-group.open .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-group.open .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn .caret{margin-left:0}.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child){border-radius:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-left-radius:0;border-top-right-radius:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-top-right-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{display:table-cell;float:none;width:1%}.btn-group-justified>.btn-group .btn{width:100%}.btn-group-justified>.btn-group .dropdown-menu{left:auto}[data-toggle=buttons]>.btn input[type=checkbox],[data-toggle=buttons]>.btn input[type=radio],[data-toggle=buttons]>.btn-group>.btn input[type=checkbox],[data-toggle=buttons]>.btn-group>.btn input[type=radio]{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*=col-]{float:none;padding-right:0;padding-left:0}.input-group .form-control{position:relative;z-index:2;float:left;width:100%;margin-bottom:0}.input-group .form-control:focus{z-index:3}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-group-lg>.form-control,select.input-group-lg>.input-group-addon,select.input-group-lg>.input-group-btn>.btn{height:46px;line-height:46px}select[multiple].input-group-lg>.form-control,select[multiple].input-group-lg>.input-group-addon,select[multiple].input-group-lg>.input-group-btn>.btn,textarea.input-group-lg>.form-control,textarea.input-group-lg>.input-group-addon,textarea.input-group-lg>.input-group-btn>.btn{height:auto}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-group-sm>.form-control,select.input-group-sm>.input-group-addon,select.input-group-sm>.input-group-btn>.btn{height:30px;line-height:30px}select[multiple].input-group-sm>.form-control,select[multiple].input-group-sm>.input-group-addon,select[multiple].input-group-sm>.input-group-btn>.btn,textarea.input-group-sm>.form-control,textarea.input-group-sm>.input-group-addon,textarea.input-group-sm>.input-group-btn>.btn{height:auto}.input-group .form-control,.input-group-addon,.input-group-btn{display:table-cell}.input-group .form-control:not(:first-child):not(:last-child),.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:400;line-height:1;color:#555;text-align:center;background-color:#eee;border:1px solid #ccc;border-radius:4px}.input-group-addon.input-sm{padding:5px 10px;font-size:12px;border-radius:3px}.input-group-addon.input-lg{padding:10px 16px;font-size:18px;border-radius:6px}.input-group-addon input[type=checkbox],.input-group-addon input[type=radio]{margin-top:0}.input-group .form-control:first-child,.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn-group:not(:last-child)>.btn,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group .form-control:last-child,.input-group-addon:last-child,.input-group-btn:first-child>.btn-group:not(:first-child)>.btn,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle{border-top-left-radius:0;border-bottom-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{position:relative;font-size:0;white-space:nowrap}.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:active,.input-group-btn>.btn:focus,.input-group-btn>.btn:hover{z-index:2}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{z-index:2;margin-left:-1px}.nav{padding-left:0;margin-bottom:0;list-style:none}.nav>li{position:relative;display:block}.nav>li>a{position:relative;display:block;padding:10px 15px}.nav>li>a:focus,.nav>li>a:hover{text-decoration:none;background-color:#eee}.nav>li.disabled>a{color:#777}.nav>li.disabled>a:focus,.nav>li.disabled>a:hover{color:#777;text-decoration:none;cursor:not-allowed;background-color:transparent}.nav .open>a,.nav .open>a:focus,.nav .open>a:hover{background-color:#eee;border-color:#337ab7}.nav .nav-divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.nav>li>a>img{max-width:none}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{float:left;margin-bottom:-1px}.nav-tabs>li>a{margin-right:2px;line-height:1.42857143;border:1px solid transparent;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover{border-color:#eee #eee #ddd}.nav-tabs>li.active>a,.nav-tabs>li.active>a:focus,.nav-tabs>li.active>a:hover{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-tabs.nav-justified{width:100%;border-bottom:0}.nav-tabs.nav-justified>li{float:none}.nav-tabs.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-tabs.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-tabs.nav-justified>li{display:table-cell;width:1%}.nav-tabs.nav-justified>li>a{margin-bottom:0}}.nav-tabs.nav-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs.nav-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border-bottom-color:#fff}}.nav-pills>li{float:left}.nav-pills>li>a{border-radius:4px}.nav-pills>li+li{margin-left:2px}.nav-pills>li.active>a,.nav-pills>li.active>a:focus,.nav-pills>li.active>a:hover{color:#fff;background-color:#337ab7}.nav-stacked>li{float:none}.nav-stacked>li+li{margin-top:2px;margin-left:0}.nav-justified{width:100%}.nav-justified>li{float:none}.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-justified>li{display:table-cell;width:1%}.nav-justified>li>a{margin-bottom:0}}.nav-tabs-justified{border-bottom:0}.nav-tabs-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border-bottom-color:#fff}}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.navbar{position:relative;min-height:50px;margin-bottom:20px;border:1px solid transparent}@media (min-width:768px){.navbar{border-radius:4px}}@media (min-width:768px){.navbar-header{float:left}}.navbar-collapse{padding-right:15px;padding-left:15px;overflow-x:visible;-webkit-overflow-scrolling:touch;border-top:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1)}.navbar-collapse.in{overflow-y:auto}@media (min-width:768px){.navbar-collapse{width:auto;border-top:0;-webkit-box-shadow:none;box-shadow:none}.navbar-collapse.collapse{display:block!important;height:auto!important;padding-bottom:0;overflow:visible!important}.navbar-collapse.in{overflow-y:visible}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse,.navbar-static-top .navbar-collapse{padding-right:0;padding-left:0}}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:340px}@media (max-device-width:480px) and (orientation:landscape){.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:200px}}.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:0;margin-left:0}}.navbar-static-top{z-index:1000;border-width:0 0 1px}@media (min-width:768px){.navbar-static-top{border-radius:0}}.navbar-fixed-bottom,.navbar-fixed-top{position:fixed;right:0;left:0;z-index:1030}@media (min-width:768px){.navbar-fixed-bottom,.navbar-fixed-top{border-radius:0}}.navbar-fixed-top{top:0;border-width:0 0 1px}.navbar-fixed-bottom{bottom:0;margin-bottom:0;border-width:1px 0 0}.navbar-brand{float:left;height:50px;padding:15px 15px;font-size:18px;line-height:20px}.navbar-brand:focus,.navbar-brand:hover{text-decoration:none}.navbar-brand>img{display:block}@media (min-width:768px){.navbar>.container .navbar-brand,.navbar>.container-fluid .navbar-brand{margin-left:-15px}}.navbar-toggle{position:relative;float:right;padding:9px 10px;margin-top:8px;margin-right:15px;margin-bottom:8px;background-color:transparent;background-image:none;border:1px solid transparent;border-radius:4px}.navbar-toggle:focus{outline:0}.navbar-toggle .icon-bar{display:block;width:22px;height:2px;border-radius:1px}.navbar-toggle .icon-bar+.icon-bar{margin-top:4px}@media (min-width:768px){.navbar-toggle{display:none}}.navbar-nav{margin:7.5px -15px}.navbar-nav>li>a{padding-top:10px;padding-bottom:10px;line-height:20px}@media (max-width:767px){.navbar-nav .open .dropdown-menu{position:static;float:none;width:auto;margin-top:0;background-color:transparent;border:0;-webkit-box-shadow:none;box-shadow:none}.navbar-nav .open .dropdown-menu .dropdown-header,.navbar-nav .open .dropdown-menu>li>a{padding:5px 15px 5px 25px}.navbar-nav .open .dropdown-menu>li>a{line-height:20px}.navbar-nav .open .dropdown-menu>li>a:focus,.navbar-nav .open .dropdown-menu>li>a:hover{background-image:none}}@media (min-width:768px){.navbar-nav{float:left;margin:0}.navbar-nav>li{float:left}.navbar-nav>li>a{padding-top:15px;padding-bottom:15px}}.navbar-form{padding:10px 15px;margin-top:8px;margin-right:-15px;margin-bottom:8px;margin-left:-15px;border-top:1px solid transparent;border-bottom:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1)}@media (min-width:768px){.navbar-form .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.navbar-form .form-control{display:inline-block;width:auto;vertical-align:middle}.navbar-form .form-control-static{display:inline-block}.navbar-form .input-group{display:inline-table;vertical-align:middle}.navbar-form .input-group .form-control,.navbar-form .input-group .input-group-addon,.navbar-form .input-group .input-group-btn{width:auto}.navbar-form .input-group>.form-control{width:100%}.navbar-form .control-label{margin-bottom:0;vertical-align:middle}.navbar-form .checkbox,.navbar-form .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.navbar-form .checkbox label,.navbar-form .radio label{padding-left:0}.navbar-form .checkbox input[type=checkbox],.navbar-form .radio input[type=radio]{position:relative;margin-left:0}.navbar-form .has-feedback .form-control-feedback{top:0}}@media (max-width:767px){.navbar-form .form-group{margin-bottom:5px}.navbar-form .form-group:last-child{margin-bottom:0}}@media (min-width:768px){.navbar-form{width:auto;padding-top:0;padding-bottom:0;margin-right:0;margin-left:0;border:0;-webkit-box-shadow:none;box-shadow:none}}.navbar-nav>li>.dropdown-menu{margin-top:0;border-top-left-radius:0;border-top-right-radius:0}.navbar-fixed-bottom .navbar-nav>li>.dropdown-menu{margin-bottom:0;border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.navbar-btn{margin-top:8px;margin-bottom:8px}.navbar-btn.btn-sm{margin-top:10px;margin-bottom:10px}.navbar-btn.btn-xs{margin-top:14px;margin-bottom:14px}.navbar-text{margin-top:15px;margin-bottom:15px}@media (min-width:768px){.navbar-text{float:left;margin-right:15px;margin-left:15px}}@media (min-width:768px){.navbar-left{float:left!important}.navbar-right{float:right!important;margin-right:-15px}.navbar-right~.navbar-right{margin-right:0}}.navbar-default{background-color:#f8f8f8;border-color:#e7e7e7}.navbar-default .navbar-brand{color:#777}.navbar-default .navbar-brand:focus,.navbar-default .navbar-brand:hover{color:#5e5e5e;background-color:transparent}.navbar-default .navbar-text{color:#777}.navbar-default .navbar-nav>li>a{color:#777}.navbar-default .navbar-nav>li>a:focus,.navbar-default .navbar-nav>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.active>a:focus,.navbar-default .navbar-nav>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav>.disabled>a,.navbar-default .navbar-nav>.disabled>a:focus,.navbar-default .navbar-nav>.disabled>a:hover{color:#ccc;background-color:transparent}.navbar-default .navbar-toggle{border-color:#ddd}.navbar-default .navbar-toggle:focus,.navbar-default .navbar-toggle:hover{background-color:#ddd}.navbar-default .navbar-toggle .icon-bar{background-color:#888}.navbar-default .navbar-collapse,.navbar-default .navbar-form{border-color:#e7e7e7}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.open>a:focus,.navbar-default .navbar-nav>.open>a:hover{color:#555;background-color:#e7e7e7}@media (max-width:767px){.navbar-default .navbar-nav .open .dropdown-menu>li>a{color:#777}.navbar-default .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav .open .dropdown-menu>.active>a,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#ccc;background-color:transparent}}.navbar-default .navbar-link{color:#777}.navbar-default .navbar-link:hover{color:#333}.navbar-default .btn-link{color:#777}.navbar-default .btn-link:focus,.navbar-default .btn-link:hover{color:#333}.navbar-default .btn-link[disabled]:focus,.navbar-default .btn-link[disabled]:hover,fieldset[disabled] .navbar-default .btn-link:focus,fieldset[disabled] .navbar-default .btn-link:hover{color:#ccc}.navbar-inverse{background-color:#222;border-color:#080808}.navbar-inverse .navbar-brand{color:#9d9d9d}.navbar-inverse .navbar-brand:focus,.navbar-inverse .navbar-brand:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-text{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a:focus,.navbar-inverse .navbar-nav>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.active>a:focus,.navbar-inverse .navbar-nav>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav>.disabled>a,.navbar-inverse .navbar-nav>.disabled>a:focus,.navbar-inverse .navbar-nav>.disabled>a:hover{color:#444;background-color:transparent}.navbar-inverse .navbar-toggle{border-color:#333}.navbar-inverse .navbar-toggle:focus,.navbar-inverse .navbar-toggle:hover{background-color:#333}.navbar-inverse .navbar-toggle .icon-bar{background-color:#fff}.navbar-inverse .navbar-collapse,.navbar-inverse .navbar-form{border-color:#101010}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.open>a:focus,.navbar-inverse .navbar-nav>.open>a:hover{color:#fff;background-color:#080808}@media (max-width:767px){.navbar-inverse .navbar-nav .open .dropdown-menu>.dropdown-header{border-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu .divider{background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#444;background-color:transparent}}.navbar-inverse .navbar-link{color:#9d9d9d}.navbar-inverse .navbar-link:hover{color:#fff}.navbar-inverse .btn-link{color:#9d9d9d}.navbar-inverse .btn-link:focus,.navbar-inverse .btn-link:hover{color:#fff}.navbar-inverse .btn-link[disabled]:focus,.navbar-inverse .btn-link[disabled]:hover,fieldset[disabled] .navbar-inverse .btn-link:focus,fieldset[disabled] .navbar-inverse .btn-link:hover{color:#444}.breadcrumb{padding:8px 15px;margin-bottom:20px;list-style:none;background-color:#f5f5f5;border-radius:4px}.breadcrumb>li{display:inline-block}.breadcrumb>li+li:before{padding:0 5px;color:#ccc;content:"/\00a0"}.breadcrumb>.active{color:#777}.pagination{display:inline-block;padding-left:0;margin:20px 0;border-radius:4px}.pagination>li{display:inline}.pagination>li>a,.pagination>li>span{position:relative;float:left;padding:6px 12px;margin-left:-1px;line-height:1.42857143;color:#337ab7;text-decoration:none;background-color:#fff;border:1px solid #ddd}.pagination>li:first-child>a,.pagination>li:first-child>span{margin-left:0;border-top-left-radius:4px;border-bottom-left-radius:4px}.pagination>li:last-child>a,.pagination>li:last-child>span{border-top-right-radius:4px;border-bottom-right-radius:4px}.pagination>li>a:focus,.pagination>li>a:hover,.pagination>li>span:focus,.pagination>li>span:hover{z-index:2;color:#23527c;background-color:#eee;border-color:#ddd}.pagination>.active>a,.pagination>.active>a:focus,.pagination>.active>a:hover,.pagination>.active>span,.pagination>.active>span:focus,.pagination>.active>span:hover{z-index:3;color:#fff;cursor:default;background-color:#337ab7;border-color:#337ab7}.pagination>.disabled>a,.pagination>.disabled>a:focus,.pagination>.disabled>a:hover,.pagination>.disabled>span,.pagination>.disabled>span:focus,.pagination>.disabled>span:hover{color:#777;cursor:not-allowed;background-color:#fff;border-color:#ddd}.pagination-lg>li>a,.pagination-lg>li>span{padding:10px 16px;font-size:18px;line-height:1.3333333}.pagination-lg>li:first-child>a,.pagination-lg>li:first-child>span{border-top-left-radius:6px;border-bottom-left-radius:6px}.pagination-lg>li:last-child>a,.pagination-lg>li:last-child>span{border-top-right-radius:6px;border-bottom-right-radius:6px}.pagination-sm>li>a,.pagination-sm>li>span{padding:5px 10px;font-size:12px;line-height:1.5}.pagination-sm>li:first-child>a,.pagination-sm>li:first-child>span{border-top-left-radius:3px;border-bottom-left-radius:3px}.pagination-sm>li:last-child>a,.pagination-sm>li:last-child>span{border-top-right-radius:3px;border-bottom-right-radius:3px}.pager{padding-left:0;margin:20px 0;text-align:center;list-style:none}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;border-radius:15px}.pager li>a:focus,.pager li>a:hover{text-decoration:none;background-color:#eee}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:focus,.pager .disabled>a:hover,.pager .disabled>span{color:#777;cursor:not-allowed;background-color:#fff}.label{display:inline;padding:.2em .6em .3em;font-size:75%;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25em}a.label:focus,a.label:hover{color:#fff;text-decoration:none;cursor:pointer}.label:empty{display:none}.btn .label{position:relative;top:-1px}.label-default{background-color:#777}.label-default[href]:focus,.label-default[href]:hover{background-color:#5e5e5e}.label-primary{background-color:#337ab7}.label-primary[href]:focus,.label-primary[href]:hover{background-color:#286090}.label-success{background-color:#5cb85c}.label-success[href]:focus,.label-success[href]:hover{background-color:#449d44}.label-info{background-color:#5bc0de}.label-info[href]:focus,.label-info[href]:hover{background-color:#31b0d5}.label-warning{background-color:#f0ad4e}.label-warning[href]:focus,.label-warning[href]:hover{background-color:#ec971f}.label-danger{background-color:#d9534f}.label-danger[href]:focus,.label-danger[href]:hover{background-color:#c9302c}.badge{display:inline-block;min-width:10px;padding:3px 7px;font-size:12px;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:middle;background-color:#777;border-radius:10px}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.btn-group-xs>.btn .badge,.btn-xs .badge{top:0;padding:1px 5px}a.badge:focus,a.badge:hover{color:#fff;text-decoration:none;cursor:pointer}.list-group-item.active>.badge,.nav-pills>.active>a>.badge{color:#337ab7;background-color:#fff}.list-group-item>.badge{float:right}.list-group-item>.badge+.badge{margin-right:5px}.nav-pills>li>a>.badge{margin-left:3px}.jumbotron{padding-top:30px;padding-bottom:30px;margin-bottom:30px;color:inherit;background-color:#eee}.jumbotron .h1,.jumbotron h1{color:inherit}.jumbotron p{margin-bottom:15px;font-size:21px;font-weight:200}.jumbotron>hr{border-top-color:#d5d5d5}.container .jumbotron,.container-fluid .jumbotron{padding-right:15px;padding-left:15px;border-radius:6px}.jumbotron .container{max-width:100%}@media screen and (min-width:768px){.jumbotron{padding-top:48px;padding-bottom:48px}.container .jumbotron,.container-fluid .jumbotron{padding-right:60px;padding-left:60px}.jumbotron .h1,.jumbotron h1{font-size:63px}}.thumbnail{display:block;padding:4px;margin-bottom:20px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:border .2s ease-in-out;-o-transition:border .2s ease-in-out;transition:border .2s ease-in-out}.thumbnail a>img,.thumbnail>img{margin-right:auto;margin-left:auto}a.thumbnail.active,a.thumbnail:focus,a.thumbnail:hover{border-color:#337ab7}.thumbnail .caption{padding:9px;color:#333}.alert{padding:15px;margin-bottom:20px;border:1px solid transparent;border-radius:4px}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:700}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable,.alert-dismissible{padding-right:35px}.alert-dismissable .close,.alert-dismissible .close{position:relative;top:-2px;right:-21px;color:inherit}.alert-success{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;overflow:hidden;background-color:#f5f5f5;border-radius:4px;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,.1);box-shadow:inset 0 1px 2px rgba(0,0,0,.1)}.progress-bar{float:left;width:0;height:100%;font-size:12px;line-height:20px;color:#fff;text-align:center;background-color:#337ab7;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);-webkit-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress-bar-striped,.progress-striped .progress-bar{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);-webkit-background-size:40px 40px;background-size:40px 40px}.progress-bar.active,.progress.active .progress-bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-bar-success{background-color:#5cb85c}.progress-striped .progress-bar-success{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-info{background-color:#5bc0de}.progress-striped .progress-bar-info{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-warning{background-color:#f0ad4e}.progress-striped .progress-bar-warning{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-danger{background-color:#d9534f}.progress-striped .progress-bar-danger{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.media{margin-top:15px}.media:first-child{margin-top:0}.media,.media-body{overflow:hidden;zoom:1}.media-body{width:10000px}.media-object{display:block}.media-object.img-thumbnail{max-width:none}.media-right,.media>.pull-right{padding-left:10px}.media-left,.media>.pull-left{padding-right:10px}.media-body,.media-left,.media-right{display:table-cell;vertical-align:top}.media-middle{vertical-align:middle}.media-bottom{vertical-align:bottom}.media-heading{margin-top:0;margin-bottom:5px}.media-list{padding-left:0;list-style:none}.list-group{padding-left:0;margin-bottom:20px}.list-group-item{position:relative;display:block;padding:10px 15px;margin-bottom:-1px;background-color:#fff;border:1px solid #ddd}.list-group-item:first-child{border-top-left-radius:4px;border-top-right-radius:4px}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}a.list-group-item,button.list-group-item{color:#555}a.list-group-item .list-group-item-heading,button.list-group-item .list-group-item-heading{color:#333}a.list-group-item:focus,a.list-group-item:hover,button.list-group-item:focus,button.list-group-item:hover{color:#555;text-decoration:none;background-color:#f5f5f5}button.list-group-item{width:100%;text-align:left}.list-group-item.disabled,.list-group-item.disabled:focus,.list-group-item.disabled:hover{color:#777;cursor:not-allowed;background-color:#eee}.list-group-item.disabled .list-group-item-heading,.list-group-item.disabled:focus .list-group-item-heading,.list-group-item.disabled:hover .list-group-item-heading{color:inherit}.list-group-item.disabled .list-group-item-text,.list-group-item.disabled:focus .list-group-item-text,.list-group-item.disabled:hover .list-group-item-text{color:#777}.list-group-item.active,.list-group-item.active:focus,.list-group-item.active:hover{z-index:2;color:#fff;background-color:#337ab7;border-color:#337ab7}.list-group-item.active .list-group-item-heading,.list-group-item.active .list-group-item-heading>.small,.list-group-item.active .list-group-item-heading>small,.list-group-item.active:focus .list-group-item-heading,.list-group-item.active:focus .list-group-item-heading>.small,.list-group-item.active:focus .list-group-item-heading>small,.list-group-item.active:hover .list-group-item-heading,.list-group-item.active:hover .list-group-item-heading>.small,.list-group-item.active:hover .list-group-item-heading>small{color:inherit}.list-group-item.active .list-group-item-text,.list-group-item.active:focus .list-group-item-text,.list-group-item.active:hover .list-group-item-text{color:#c7ddef}.list-group-item-success{color:#3c763d;background-color:#dff0d8}a.list-group-item-success,button.list-group-item-success{color:#3c763d}a.list-group-item-success .list-group-item-heading,button.list-group-item-success .list-group-item-heading{color:inherit}a.list-group-item-success:focus,a.list-group-item-success:hover,button.list-group-item-success:focus,button.list-group-item-success:hover{color:#3c763d;background-color:#d0e9c6}a.list-group-item-success.active,a.list-group-item-success.active:focus,a.list-group-item-success.active:hover,button.list-group-item-success.active,button.list-group-item-success.active:focus,button.list-group-item-success.active:hover{color:#fff;background-color:#3c763d;border-color:#3c763d}.list-group-item-info{color:#31708f;background-color:#d9edf7}a.list-group-item-info,button.list-group-item-info{color:#31708f}a.list-group-item-info .list-group-item-heading,button.list-group-item-info .list-group-item-heading{color:inherit}a.list-group-item-info:focus,a.list-group-item-info:hover,button.list-group-item-info:focus,button.list-group-item-info:hover{color:#31708f;background-color:#c4e3f3}a.list-group-item-info.active,a.list-group-item-info.active:focus,a.list-group-item-info.active:hover,button.list-group-item-info.active,button.list-group-item-info.active:focus,button.list-group-item-info.active:hover{color:#fff;background-color:#31708f;border-color:#31708f}.list-group-item-warning{color:#8a6d3b;background-color:#fcf8e3}a.list-group-item-warning,button.list-group-item-warning{color:#8a6d3b}a.list-group-item-warning .list-group-item-heading,button.list-group-item-warning .list-group-item-heading{color:inherit}a.list-group-item-warning:focus,a.list-group-item-warning:hover,button.list-group-item-warning:focus,button.list-group-item-warning:hover{color:#8a6d3b;background-color:#faf2cc}a.list-group-item-warning.active,a.list-group-item-warning.active:focus,a.list-group-item-warning.active:hover,button.list-group-item-warning.active,button.list-group-item-warning.active:focus,button.list-group-item-warning.active:hover{color:#fff;background-color:#8a6d3b;border-color:#8a6d3b}.list-group-item-danger{color:#a94442;background-color:#f2dede}a.list-group-item-danger,button.list-group-item-danger{color:#a94442}a.list-group-item-danger .list-group-item-heading,button.list-group-item-danger .list-group-item-heading{color:inherit}a.list-group-item-danger:focus,a.list-group-item-danger:hover,button.list-group-item-danger:focus,button.list-group-item-danger:hover{color:#a94442;background-color:#ebcccc}a.list-group-item-danger.active,a.list-group-item-danger.active:focus,a.list-group-item-danger.active:hover,button.list-group-item-danger.active,button.list-group-item-danger.active:focus,button.list-group-item-danger.active:hover{color:#fff;background-color:#a94442;border-color:#a94442}.list-group-item-heading{margin-top:0;margin-bottom:5px}.list-group-item-text{margin-bottom:0;line-height:1.3}.panel{margin-bottom:20px;background-color:#fff;border:1px solid transparent;border-radius:4px;-webkit-box-shadow:0 1px 1px rgba(0,0,0,.05);box-shadow:0 1px 1px rgba(0,0,0,.05)}.panel-body{padding:15px}.panel-heading{padding:10px 15px;border-bottom:1px solid transparent;border-top-left-radius:3px;border-top-right-radius:3px}.panel-heading>.dropdown .dropdown-toggle{color:inherit}.panel-title{margin-top:0;margin-bottom:0;font-size:16px;color:inherit}.panel-title>.small,.panel-title>.small>a,.panel-title>a,.panel-title>small,.panel-title>small>a{color:inherit}.panel-footer{padding:10px 15px;background-color:#f5f5f5;border-top:1px solid #ddd;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.list-group,.panel>.panel-collapse>.list-group{margin-bottom:0}.panel>.list-group .list-group-item,.panel>.panel-collapse>.list-group .list-group-item{border-width:1px 0;border-radius:0}.panel>.list-group:first-child .list-group-item:first-child,.panel>.panel-collapse>.list-group:first-child .list-group-item:first-child{border-top:0;border-top-left-radius:3px;border-top-right-radius:3px}.panel>.list-group:last-child .list-group-item:last-child,.panel>.panel-collapse>.list-group:last-child .list-group-item:last-child{border-bottom:0;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.panel-heading+.panel-collapse>.list-group .list-group-item:first-child{border-top-left-radius:0;border-top-right-radius:0}.panel-heading+.list-group .list-group-item:first-child{border-top-width:0}.list-group+.panel-footer{border-top-width:0}.panel>.panel-collapse>.table,.panel>.table,.panel>.table-responsive>.table{margin-bottom:0}.panel>.panel-collapse>.table caption,.panel>.table caption,.panel>.table-responsive>.table caption{padding-right:15px;padding-left:15px}.panel>.table-responsive:first-child>.table:first-child,.panel>.table:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child,.panel>.table:first-child>thead:first-child>tr:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table:first-child>thead:first-child>tr:first-child th:first-child{border-top-left-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table:first-child>thead:first-child>tr:first-child th:last-child{border-top-right-radius:3px}.panel>.table-responsive:last-child>.table:last-child,.panel>.table:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:first-child{border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:last-child{border-bottom-right-radius:3px}.panel>.panel-body+.table,.panel>.panel-body+.table-responsive,.panel>.table+.panel-body,.panel>.table-responsive+.panel-body{border-top:1px solid #ddd}.panel>.table>tbody:first-child>tr:first-child td,.panel>.table>tbody:first-child>tr:first-child th{border-top:0}.panel>.table-bordered,.panel>.table-responsive>.table-bordered{border:0}.panel>.table-bordered>tbody>tr>td:first-child,.panel>.table-bordered>tbody>tr>th:first-child,.panel>.table-bordered>tfoot>tr>td:first-child,.panel>.table-bordered>tfoot>tr>th:first-child,.panel>.table-bordered>thead>tr>td:first-child,.panel>.table-bordered>thead>tr>th:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:first-child,.panel>.table-responsive>.table-bordered>thead>tr>td:first-child,.panel>.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.panel>.table-bordered>tbody>tr>td:last-child,.panel>.table-bordered>tbody>tr>th:last-child,.panel>.table-bordered>tfoot>tr>td:last-child,.panel>.table-bordered>tfoot>tr>th:last-child,.panel>.table-bordered>thead>tr>td:last-child,.panel>.table-bordered>thead>tr>th:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:last-child,.panel>.table-responsive>.table-bordered>thead>tr>td:last-child,.panel>.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.panel>.table-bordered>tbody>tr:first-child>td,.panel>.table-bordered>tbody>tr:first-child>th,.panel>.table-bordered>thead>tr:first-child>td,.panel>.table-bordered>thead>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>th,.panel>.table-responsive>.table-bordered>thead>tr:first-child>td,.panel>.table-responsive>.table-bordered>thead>tr:first-child>th{border-bottom:0}.panel>.table-bordered>tbody>tr:last-child>td,.panel>.table-bordered>tbody>tr:last-child>th,.panel>.table-bordered>tfoot>tr:last-child>td,.panel>.table-bordered>tfoot>tr:last-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>th,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>td,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}.panel>.table-responsive{margin-bottom:0;border:0}.panel-group{margin-bottom:20px}.panel-group .panel{margin-bottom:0;border-radius:4px}.panel-group .panel+.panel{margin-top:5px}.panel-group .panel-heading{border-bottom:0}.panel-group .panel-heading+.panel-collapse>.list-group,.panel-group .panel-heading+.panel-collapse>.panel-body{border-top:1px solid #ddd}.panel-group .panel-footer{border-top:0}.panel-group .panel-footer+.panel-collapse .panel-body{border-bottom:1px solid #ddd}.panel-default{border-color:#ddd}.panel-default>.panel-heading{color:#333;background-color:#f5f5f5;border-color:#ddd}.panel-default>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ddd}.panel-default>.panel-heading .badge{color:#f5f5f5;background-color:#333}.panel-default>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ddd}.panel-primary{border-color:#337ab7}.panel-primary>.panel-heading{color:#fff;background-color:#337ab7;border-color:#337ab7}.panel-primary>.panel-heading+.panel-collapse>.panel-body{border-top-color:#337ab7}.panel-primary>.panel-heading .badge{color:#337ab7;background-color:#fff}.panel-primary>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#337ab7}.panel-success{border-color:#d6e9c6}.panel-success>.panel-heading{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.panel-success>.panel-heading+.panel-collapse>.panel-body{border-top-color:#d6e9c6}.panel-success>.panel-heading .badge{color:#dff0d8;background-color:#3c763d}.panel-success>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#d6e9c6}.panel-info{border-color:#bce8f1}.panel-info>.panel-heading{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.panel-info>.panel-heading+.panel-collapse>.panel-body{border-top-color:#bce8f1}.panel-info>.panel-heading .badge{color:#d9edf7;background-color:#31708f}.panel-info>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#bce8f1}.panel-warning{border-color:#faebcc}.panel-warning>.panel-heading{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.panel-warning>.panel-heading+.panel-collapse>.panel-body{border-top-color:#faebcc}.panel-warning>.panel-heading .badge{color:#fcf8e3;background-color:#8a6d3b}.panel-warning>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#faebcc}.panel-danger{border-color:#ebccd1}.panel-danger>.panel-heading{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.panel-danger>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ebccd1}.panel-danger>.panel-heading .badge{color:#f2dede;background-color:#a94442}.panel-danger>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ebccd1}.embed-responsive{position:relative;display:block;height:0;padding:0;overflow:hidden}.embed-responsive .embed-responsive-item,.embed-responsive embed,.embed-responsive iframe,.embed-responsive object,.embed-responsive video{position:absolute;top:0;bottom:0;left:0;width:100%;height:100%;border:0}.embed-responsive-16by9{padding-bottom:56.25%}.embed-responsive-4by3{padding-bottom:75%}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.05);box-shadow:inset 0 1px 1px rgba(0,0,0,.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,.15)}.well-lg{padding:24px;border-radius:6px}.well-sm{padding:9px;border-radius:3px}.close{float:right;font-size:21px;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;filter:alpha(opacity=20);opacity:.2}.close:focus,.close:hover{color:#000;text-decoration:none;cursor:pointer;filter:alpha(opacity=50);opacity:.5}button.close{-webkit-appearance:none;padding:0;cursor:pointer;background:0 0;border:0}.modal-open{overflow:hidden}.modal{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1050;display:none;overflow:hidden;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transition:-webkit-transform .3s ease-out;-o-transition:-o-transform .3s ease-out;transition:transform .3s ease-out;-webkit-transform:translate(0,-25%);-ms-transform:translate(0,-25%);-o-transform:translate(0,-25%);transform:translate(0,-25%)}.modal.in .modal-dialog{-webkit-transform:translate(0,0);-ms-transform:translate(0,0);-o-transform:translate(0,0);transform:translate(0,0)}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #999;border:1px solid rgba(0,0,0,.2);border-radius:6px;outline:0;-webkit-box-shadow:0 3px 9px rgba(0,0,0,.5);box-shadow:0 3px 9px rgba(0,0,0,.5)}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{filter:alpha(opacity=0);opacity:0}.modal-backdrop.in{filter:alpha(opacity=50);opacity:.5}.modal-header{padding:15px;border-bottom:1px solid #e5e5e5}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857143}.modal-body{position:relative;padding:15px}.modal-footer{padding:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{-webkit-box-shadow:0 5px 15px rgba(0,0,0,.5);box-shadow:0 5px 15px rgba(0,0,0,.5)}.modal-sm{width:300px}}@media (min-width:992px){.modal-lg{width:900px}}.tooltip{position:absolute;z-index:1070;display:block;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:12px;font-style:normal;font-weight:400;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;filter:alpha(opacity=0);opacity:0;line-break:auto}.tooltip.in{filter:alpha(opacity=90);opacity:.9}.tooltip.top{padding:5px 0;margin-top:-3px}.tooltip.right{padding:0 5px;margin-left:3px}.tooltip.bottom{padding:5px 0;margin-top:3px}.tooltip.left{padding:0 5px;margin-left:-3px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;background-color:#000;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-left .tooltip-arrow{right:5px;bottom:0;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-right .tooltip-arrow{bottom:0;left:5px;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-left .tooltip-arrow{top:0;right:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-right .tooltip-arrow{top:0;left:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.popover{position:absolute;top:0;left:0;z-index:1060;display:none;max-width:276px;padding:1px;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;font-style:normal;font-weight:400;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.2);border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,.2);box-shadow:0 5px 10px rgba(0,0,0,.2);line-break:auto}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:5px 5px 0 0}.popover-content{padding:9px 14px}.popover>.arrow,.popover>.arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover>.arrow{border-width:11px}.popover>.arrow:after{content:"";border-width:10px}.popover.top>.arrow{bottom:-11px;left:50%;margin-left:-11px;border-top-color:#999;border-top-color:rgba(0,0,0,.25);border-bottom-width:0}.popover.top>.arrow:after{bottom:1px;margin-left:-10px;content:" ";border-top-color:#fff;border-bottom-width:0}.popover.right>.arrow{top:50%;left:-11px;margin-top:-11px;border-right-color:#999;border-right-color:rgba(0,0,0,.25);border-left-width:0}.popover.right>.arrow:after{bottom:-10px;left:1px;content:" ";border-right-color:#fff;border-left-width:0}.popover.bottom>.arrow{top:-11px;left:50%;margin-left:-11px;border-top-width:0;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,.25)}.popover.bottom>.arrow:after{top:1px;margin-left:-10px;content:" ";border-top-width:0;border-bottom-color:#fff}.popover.left>.arrow{top:50%;right:-11px;margin-top:-11px;border-right-width:0;border-left-color:#999;border-left-color:rgba(0,0,0,.25)}.popover.left>.arrow:after{right:1px;bottom:-10px;content:" ";border-right-width:0;border-left-color:#fff}.carousel{position:relative}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner>.item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>a>img,.carousel-inner>.item>img{line-height:1}@media all and (transform-3d),(-webkit-transform-3d){.carousel-inner>.item{-webkit-transition:-webkit-transform .6s ease-in-out;-o-transition:-o-transform .6s ease-in-out;transition:transform .6s ease-in-out;-webkit-backface-visibility:hidden;backface-visibility:hidden;-webkit-perspective:1000px;perspective:1000px}.carousel-inner>.item.active.right,.carousel-inner>.item.next{left:0;-webkit-transform:translate3d(100%,0,0);transform:translate3d(100%,0,0)}.carousel-inner>.item.active.left,.carousel-inner>.item.prev{left:0;-webkit-transform:translate3d(-100%,0,0);transform:translate3d(-100%,0,0)}.carousel-inner>.item.active,.carousel-inner>.item.next.left,.carousel-inner>.item.prev.right{left:0;-webkit-transform:translate3d(0,0,0);transform:translate3d(0,0,0)}}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:0;bottom:0;left:0;width:15%;font-size:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6);background-color:rgba(0,0,0,0);filter:alpha(opacity=50);opacity:.5}.carousel-control.left{background-image:-webkit-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.5)),to(rgba(0,0,0,.0001)));background-image:linear-gradient(to right,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1);background-repeat:repeat-x}.carousel-control.right{right:0;left:auto;background-image:-webkit-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.0001)),to(rgba(0,0,0,.5)));background-image:linear-gradient(to right,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1);background-repeat:repeat-x}.carousel-control:focus,.carousel-control:hover{color:#fff;text-decoration:none;filter:alpha(opacity=90);outline:0;opacity:.9}.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{position:absolute;top:50%;z-index:5;display:inline-block;margin-top:-10px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{left:50%;margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{right:50%;margin-right:-10px}.carousel-control .icon-next,.carousel-control .icon-prev{width:20px;height:20px;font-family:serif;line-height:1}.carousel-control .icon-prev:before{content:'\2039'}.carousel-control .icon-next:before{content:'\203a'}.carousel-indicators{position:absolute;bottom:10px;left:50%;z-index:15;width:60%;padding-left:0;margin-left:-30%;text-align:center;list-style:none}.carousel-indicators li{display:inline-block;width:10px;height:10px;margin:1px;text-indent:-999px;cursor:pointer;background-color:#000;background-color:rgba(0,0,0,0);border:1px solid #fff;border-radius:10px}.carousel-indicators .active{width:12px;height:12px;margin:0;background-color:#fff}.carousel-caption{position:absolute;right:15%;bottom:20px;left:15%;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6)}.carousel-caption .btn{text-shadow:none}@media screen and (min-width:768px){.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{width:30px;height:30px;margin-top:-10px;font-size:30px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{margin-right:-10px}.carousel-caption{right:20%;left:20%;padding-bottom:30px}.carousel-indicators{bottom:20px}}.btn-group-vertical>.btn-group:after,.btn-group-vertical>.btn-group:before,.btn-toolbar:after,.btn-toolbar:before,.clearfix:after,.clearfix:before,.container-fluid:after,.container-fluid:before,.container:after,.container:before,.dl-horizontal dd:after,.dl-horizontal dd:before,.form-horizontal .form-group:after,.form-horizontal .form-group:before,.modal-footer:after,.modal-footer:before,.modal-header:after,.modal-header:before,.nav:after,.nav:before,.navbar-collapse:after,.navbar-collapse:before,.navbar-header:after,.navbar-header:before,.navbar:after,.navbar:before,.pager:after,.pager:before,.panel-body:after,.panel-body:before,.row:after,.row:before{display:table;content:" "}.btn-group-vertical>.btn-group:after,.btn-toolbar:after,.clearfix:after,.container-fluid:after,.container:after,.dl-horizontal dd:after,.form-horizontal .form-group:after,.modal-footer:after,.modal-header:after,.nav:after,.navbar-collapse:after,.navbar-header:after,.navbar:after,.pager:after,.panel-body:after,.row:after{clear:both}.center-block{display:block;margin-right:auto;margin-left:auto}.pull-right{float:right!important}.pull-left{float:left!important}.hide{display:none!important}.show{display:block!important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none!important}.affix{position:fixed}@-ms-viewport{width:device-width}.visible-lg,.visible-md,.visible-sm,.visible-xs{display:none!important}.visible-lg-block,.visible-lg-inline,.visible-lg-inline-block,.visible-md-block,.visible-md-inline,.visible-md-inline-block,.visible-sm-block,.visible-sm-inline,.visible-sm-inline-block,.visible-xs-block,.visible-xs-inline,.visible-xs-inline-block{display:none!important}@media (max-width:767px){.visible-xs{display:block!important}table.visible-xs{display:table!important}tr.visible-xs{display:table-row!important}td.visible-xs,th.visible-xs{display:table-cell!important}}@media (max-width:767px){.visible-xs-block{display:block!important}}@media (max-width:767px){.visible-xs-inline{display:inline!important}}@media (max-width:767px){.visible-xs-inline-block{display:inline-block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block!important}table.visible-sm{display:table!important}tr.visible-sm{display:table-row!important}td.visible-sm,th.visible-sm{display:table-cell!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-block{display:block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline{display:inline!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline-block{display:inline-block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block!important}table.visible-md{display:table!important}tr.visible-md{display:table-row!important}td.visible-md,th.visible-md{display:table-cell!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-block{display:block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline{display:inline!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline-block{display:inline-block!important}}@media (min-width:1200px){.visible-lg{display:block!important}table.visible-lg{display:table!important}tr.visible-lg{display:table-row!important}td.visible-lg,th.visible-lg{display:table-cell!important}}@media (min-width:1200px){.visible-lg-block{display:block!important}}@media (min-width:1200px){.visible-lg-inline{display:inline!important}}@media (min-width:1200px){.visible-lg-inline-block{display:inline-block!important}}@media (max-width:767px){.hidden-xs{display:none!important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none!important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none!important}}@media (min-width:1200px){.hidden-lg{display:none!important}}.visible-print{display:none!important}@media print{.visible-print{display:block!important}table.visible-print{display:table!important}tr.visible-print{display:table-row!important}td.visible-print,th.visible-print{display:table-cell!important}}.visible-print-block{display:none!important}@media print{.visible-print-block{display:block!important}}.visible-print-inline{display:none!important}@media print{.visible-print-inline{display:inline!important}}.visible-print-inline-block{display:none!important}@media print{.visible-print-inline-block{display:inline-block!important}}@media print{.hidden-print{display:none!important}} -/*# sourceMappingURL=bootstrap.min.css.map */ \ No newline at end of file diff --git a/tutorials/lite/source_en/_static/css/lite.css b/tutorials/lite/source_en/_static/css/lite.css deleted file mode 100644 index bcafc0151498ecd27f5933f837c0f81caba71c34..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/_static/css/lite.css +++ /dev/null @@ -1,117 +0,0 @@ -.doc-filter-btn { - border: 1px solid #BFBFBF; - margin-right: 0.5rem; - font-size: 0.7rem; - color: #444444; - background-color: white; - width: 9.5rem; - height: 1.7rem; - text-align: left; - position: relative; - -} -.doc-stage-detail button{ - margin-bottom: 0.5rem; -} -button.doc-btn{ - background-color: transparent; - outline: none; -} -.doc-btn-color{ - border: 1px solid #379BE6; - color: #379BE6; -} -.doc-btn-hover{ - border: 1px solid #379BE6; - color: #379BE6; -} -.doc-article-list{ - margin-top: 1.1rem; -} -.doc-article-item{ - padding:2.5rem 2.5rem; - margin-bottom: 1.3rem; - border:1px solid #e5e5e5; - border-radius:0.5rem; - width: 1140px; - box-shadow: 0 0 30px 2px rgba(199,196,196,0.50) -} -.doc-article-item a{ - display:block; - text-decoration:none!important; -} -.doc-article-head{ - color: #444444; - font-size:0.9rem; - font-weight:bold; - margin-bottom:0.8rem; - text-align:left; -} -.doc-article-desc{ - font-size:0.7rem; - color:#444444; -} -.doc-footer nav ul li a{ - font-size: 0.7rem; -} -.doc-footer nav ul li span{ - font-size: 0.7rem; -} -.doc-title{ - font-size: 1.6rem; - color: #444444; - font-weight: bold; - margin-bottom: 2.2rem; -} -.doc-filter{ - font-size: 0.7rem; - color: #666666; -} -.doc-delete{ - font-size: 0.7rem; - color: #379BE6; - float: right; -} -.doc-condition{ - margin-bottom: 2rem; -} -.doc-label-choice{ - font-size: 0.7rem; - margin-bottom: 0.53rem; -} -.doc-os{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-hardware{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-user{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-stage{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-language{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-label-content{ - margin-bottom: 2.2rem; -} -div.col-sm-10{ - padding-left: 3.8rem; -} -.container{ - margin-top: 1rem; - margin-left: -15px; -} -#all{ - border: none; - background-color: transparent; - outline: none; -} - diff --git a/tutorials/lite/source_en/_static/img/choice.png b/tutorials/lite/source_en/_static/img/choice.png deleted file mode 100644 index 5fb06488a24489616b937778c06af9e8d409046b..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/_static/img/choice.png and /dev/null differ diff --git a/tutorials/lite/source_en/_static/js/lite.js b/tutorials/lite/source_en/_static/js/lite.js deleted file mode 100644 index 32d67bfea9f1fa50edb76d044591e7c3bcd59649..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/_static/js/lite.js +++ /dev/null @@ -1,253 +0,0 @@ -$(function() { - $("button.doc-btn").hover(function(){ - - //移入事件 - $(this).addClass('doc-btn-hover') - },function(){ - //移出事件 - $(this).removeClass('doc-btn-hover'); - }) - // 每页显示数 - var curNum = 8 - // 计算总数 - var all = $('.doc-article-list').children('div.doc-article-item').length; - - var list = [] - - - - $('button.doc-btn').click(function() { - $('.doc-article-item').removeClass('OUO'); - var id_val = $(this).attr('id') - if (id_val !== 'all') { - if ($('#all').hasClass('doc-btn-color')) { - $('#all').removeClass('doc-btn-color').find('img').remove(); - list.splice(list.indexOf('all_exist'), 1); - $('.doc-article-item').removeClass('all_exist'); - } - } else { - $('button.doc-btn-color').each(function() { - var tag = $(this).attr('id'); - $('.' + tag).removeClass(tag + '_exist'); - list.splice(list.indexOf(tag + '_exist'), 1); - }); - - $('button.doc-btn-color').removeClass('doc-btn-color').find('img').remove(); - } - if ($(this).hasClass('doc-btn-color')) { - $(this).removeClass('doc-btn-color').find('img').remove(); - $('.' + id_val).removeClass(id_val + '_exist'); - list.splice(list.indexOf(id_val + '_exist'), 1); - - } else { - if(id_val == 'all'){ - $(this).addClass('doc-btn-color'); - $('.' + id_val).addClass(id_val + '_exist'); - list.push(id_val + '_exist'); - }else{ - $(this).addClass('doc-btn-color').append(''); - $('.' + id_val).addClass(id_val + '_exist'); - list.push(id_val + '_exist'); - } - - } - - if(list.length > 0){ - var os_list = []; - var hardware_list = []; - var user_list = []; - var stage_list = []; - var language_list = []; - var all_list = []; - - $('.doc-article-item').addClass('hidden'); - var str = 'OUO'; - for(var i=0;i 0){ - for(var i=0;i -1){ - os_count += 1; - } - } - }else{ - os_count = 'empty'; - } - - if(hardware_list.length > 0){ - for(var i=0;i -1){ - hardware_count += 1; - } - } - }else{ - hardware_count = 'empty'; - } - - if(user_list.length > 0){ - for(var i=0;i -1){ - user_count += 1; - } - } - }else{ - user_count = 'empty'; - } - - if(stage_list.length > 0){ - for(var i=0;i -1){ - stage_count += 1; - } - } - }else{ - stage_count = 'empty'; - } - - if(language_list.length > 0){ - for(var i=0;i -1){ - language_count += 1; - } - } - }else{ - language_count = 'empty'; - } - - if(all_list.length > 0){ - for(var i=0;i -1){ - all_count += 1; - } - } - }else{ - all_count = 'empty'; - } - - - if(((os_count >0 && os_count <= os_list.length) || os_count=='empty') && ((hardware_count >0 && hardware_count <= hardware_list.length) || hardware_count=='empty') && ((user_count >0 && user_count <= user_list.length) || user_count == 'empty') && ((stage_count >0 && stage_count <= stage_list.length) || stage_count == 'empty') && ((language_count >0 && language_count <= language_list.length) || language_count=='empty')){ - $(this).removeClass('hidden').addClass(str); - } - }); - - }else{ - $('.doc-article-item').addClass('hidden'); - } - - var hidden_num = $('.doc-article-list').children('.doc-article-item.hidden').length; - var all_article = all - hidden_num - // 计算总页数 - var len = Math.ceil((all - hidden_num) / curNum); - // 生成页码 - var pageList = '
  • ' + 'Total ' + all_article + ' Result(s)' + '
  • ' + '
  • '; - // 当前的索引值 - var iNum = 0; - - for (var i = 0; i < len; i++) { - pageList += '
  • ' + (i + 1) + '
  • ' - } - pageList += '
  • ' - // 首页加亮显示 - if (all_article > 0){ - $('#pageNav').html(pageList).find('li').eq(2).addClass('active'); - }else{ - $('#pageNav').html('
  • ' + 'Total ' + all_article + ' Result(s)' + '
  • '); - } - - // 标签页的点击事件 - $('#pageNav').find('li.doc-data').each(function() { - $(this).click(function() { - $(this).addClass('active').siblings('li').removeClass('active'); - iNum = $(this).index() - 2; - if(iNum > 0){ - $('li.pre').removeClass('disabled'); - }else{ - $('li.pre').addClass('disabled'); - } - if(iNum+1 == len){ - $('li.nex').addClass('disabled'); - } - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = (iNum * curNum); i < (iNum + 1) * curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show() - } - - }); - }); - if(iNum == 0){ - $('li.pre').addClass('disabled'); - } - - if(iNum+1 == len){ - $('li.nex').addClass('disabled'); - } - // 向前页点击时间 - $('li.pre').click(function(){ - if(iNum > 0){ - iNum -= 1; - if(iNum == 0){ - $(this).addClass('disabled'); - } - $('li.nex').removeClass('disabled'); - $('#pageNav').find('li.doc-data').eq(iNum).addClass('active').siblings('li').removeClass('active'); - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = (iNum * curNum); i < (iNum + 1) * curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show() - } - } - - }); - - // 向后页点击事件 - $('li.nex').click(function(){ - if(iNum+1 < len){ - iNum += 1; - if(iNum+1 == len){ - $(this).addClass('disabled'); - } - $('li.pre').removeClass('disabled'); - $('#pageNav').find('li.doc-data').eq(iNum).addClass('active').siblings('li').removeClass('active'); - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = (iNum * curNum); i < (iNum + 1) * curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show() - } - } - }); - - // 首页的显示 - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = 0; i < curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show(); - } - - if ($('button.doc-btn-color').length == 0) { - $('#all').trigger('click'); - } - }); - - - $('#all').trigger('click'); - - }); - diff --git a/tutorials/lite/source_en/_static/logo_notebook.png b/tutorials/lite/source_en/_static/logo_notebook.png deleted file mode 100644 index f28598315f19f4be76a73ddf5dc6bbdbe4db35fd..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/_static/logo_notebook.png and /dev/null differ diff --git a/tutorials/lite/source_en/_static/logo_source.png b/tutorials/lite/source_en/_static/logo_source.png deleted file mode 100644 index 9932d67ab50871edb0c95979c4e948c812c7cdea..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/_static/logo_source.png and /dev/null differ diff --git a/tutorials/lite/source_en/architecture_lite.md b/tutorials/lite/source_en/architecture_lite.md deleted file mode 100644 index 6e87d12800183d35bae4e26da2ce5799ff2621be..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/architecture_lite.md +++ /dev/null @@ -1,39 +0,0 @@ -# Overall Architecture (Lite) - -`Linux` `Windows` `On Device` `Inference Application` `Intermediate` `Expert` `Contributor` - - - -MindSpore Lite is an ultra-fast, intelligent, and simplified AI engine that enables intelligent applications in all scenarios, provides E2E solutions for users, and helps users enable AI capabilities. - -MindSpore Lite is divided into two parts: offline module and online module. The overall architecture of MindSpore Lite is as follows: - -![architecture](./images/MindSpore-Lite-architecture.png) - -- Offline module: - - - **3rd Model Parsers:** converts third-party models to a unified MindIR. Third-party models include TensorFlow, TensorFlow Lite, Caffe 1.0, and ONNX models. - - - **MindIR:** MindSpore device-cloud unified IR. - - - **Optimizer:** optimizes graphs based on IR, such as operator fusion and constant folding. - - - **Quantizer:** quantization module after training. Quantizer supports quantization methods after training, such as weight quantization and activation value quantization. - - - **benchmark:** a tool set for testing performance and debugging accuracy. - - - **Micro CodeGen:** a tool to directly compile models into executable files for IoT scenarios. - -- Online module: - - - **Training/Inference APIs:** the unified C++/Java training inference interface for the device and cloud. - - - **MindRT Lite:** lightweight online runtime, it supports asynchronous execution. - - - **MindData Lite:** used for the device-side data processing. - - - **Delegate:** agent for docking professional AI hardware engine. - - - **Kernels:** the built-in high-performance operator library which provides CPU, GPU and NPU operators. - - - **Learning Strategies:** device-side learning strategies, such as transfer learning. diff --git a/tutorials/lite/source_en/conf.py b/tutorials/lite/source_en/conf.py deleted file mode 100644 index b472aa71f0899d61ef358f7388dcadfe8a2c7706..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/conf.py +++ /dev/null @@ -1,64 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -# import os -# import sys - - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore Lite' -copyright = '2020, MindSpore Lite' -author = 'MindSpore Lite' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'recommonmark', - 'sphinx_markdown_tables', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_static_path = ['_static'] - -def setup(app): - app.add_stylesheet('css/bootstrap.min.css') - app.add_stylesheet('css/lite.css') - app.add_javascript('js/lite.js') diff --git a/tutorials/lite/source_en/faq.md b/tutorials/lite/source_en/faq.md deleted file mode 100644 index a521fa183ed789fe507b3b747f46191135cb40f4..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/faq.md +++ /dev/null @@ -1,44 +0,0 @@ -# FAQ - - - -
    - -**Q:How many log levels are supported by MindSpore Lite? How can I set the log level?** - -A:Currently MindSpore Lite supports 4 log levels, including DEBUG, INFO, WARNING and ERROR. Users can set log level by set environment parameter GLOG_v. This environment parameter ranges from 0 to 3, which represents DEBUG, INFO, WARNING and ERROR. The default log level is WARNING or ERROR. For example, if the user sets GLOG_v to 1, MindSpore Lite will print the log of INFO level or higher. -
    - -**Q: What are the limitations of NPU?** - -A: Currently NPU only supports system ROM version EMUI>=11. Chip support includes Kirin 9000, Kirin 9000E, Kirin 990, Kirin 985, Kirin 820, Kirin 810, etc. For specific constraints and chip support, please see: - -
    - -**Q: Why does the static library after cutting with the cropper tool fail to compile during integration?** - -A: Currently the cropper tool only supports CPU libraries, that is, `-e CPU` is specified in the compilation command. For details, please refer to [Use clipping tool to reduce library file size](https://www.mindspore.cn/tutorial/lite/en/master/use/cropper_tool.html) document. - -
    - -**Q: Will MindSpore Lite run out of device memory, when running model?** - -A: Currently the MindSpore Lite built-in memory pool has a maximum capacity limit 3GB. If a model is bigger than 3GB, MindSpore Lite will throw error. - -**Q: How do I visualize the MindSpore Lite offline model (.ms file) to view the network structure?** - -A: Model visualization open-source repository `Netron` supports viewing MindSpore Lite models (MindSpore >= r1.2), which can be downloaded in the [Netron](https://github.com/lutzroeder/netron). - -
    - -**Q: Does MindSpore have a quantized inference tool?** - -A: [MindSpore Lite](https://www.mindspore.cn/lite/en) supports the inference of the quantization aware training model on the cloud. The MindSpore Lite converter tool provides the quantization after training and weight quantization functions which are being continuously improved. - -
    - -**Q: Does MindSpore have a lightweight on-device inference engine?** - -A:The MindSpore lightweight inference framework MindSpore Lite has been officially launched in r0.7. You are welcome to try it and give your comments. For details about the overview, tutorials, and documents, see [MindSpore Lite](https://www.mindspore.cn/lite/en). - -
    diff --git a/tutorials/lite/source_en/image_classification_lite.md b/tutorials/lite/source_en/image_classification_lite.md deleted file mode 100644 index f4c0e707a8da2b73fe8a6ebb1a916b8ca757dff6..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/image_classification_lite.md +++ /dev/null @@ -1,40 +0,0 @@ -# Image Classification Model - - - -## Image classification introduction - -Image classification is to identity what an image represents, to predict the object list and the probabilities. For example,the following table shows the classification results after mode inference. - -![image_classification](images/image_classification_result.png) - -| Category | Probability | -| ---------- | ----------- | -| plant | 0.9359 | -| flower | 0.8641 | -| tree | 0.8584 | -| houseplant | 0.7867 | - -Using MindSpore Lite to realize image classification [example](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/image_classification). - -## Image classification model list - -The following table shows the data of some image classification models using MindSpore Lite inference. - -> The performance of the table below is tested on the mate30. - -| Model name | Size(Mb) | Top1 | Top5 | F1 | CPU 4 thread delay (ms) | -|-----------------------| :----------: | :----------: | :----------: | :----------: | :-----------: | -| [MobileNetV2](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms) | 11.5 | - | - | 65.5% | 14.595 | -| [Inceptionv3](https://download.mindspore.cn/model_zoo/official/lite/inceptionv3_lite/inceptionv3.ms) | 90.9 | 78.62% | 94.08% | - | 92.086 | -| [Shufflenetv2](https://download.mindspore.cn/model_zoo/official/lite/shufflenetv2_lite/shufflenetv2.ms) | 8.8 | 67.74% | 87.62% | - | 8.303 | -| [GoogleNet](https://download.mindspore.cn/model_zoo/official/lite/googlenet_lite/googlenet.ms) | 25.3 | 72.2% | 90.06% | - | 23.257 | -| [ResNext50](https://download.mindspore.cn/model_zoo/official/lite/resnext50_lite/resnext50.ms) | 95.8 | 73.1% | 91.21% | - | 138.164 | -| [GhostNet](https://download.mindspore.cn/model_zoo/official/lite/ghostnet_lite/ghostnet.ms) | 15.0 | 73.9% | 91.40% | - | 9.959 | -| [GhostNet600](https://download.mindspore.cn/model_zoo/official/lite/ghostnet_lite/ghostnet600.ms) | 40.4 | 80.2% | 94.90% | - | 52.243 | -| [GhostNet_int8](https://download.mindspore.cn/model_zoo/official/lite/ghostnet_lite/ghostnet_int8.ms) | 15.3 | 73.6% | - | - | 31.452 | -| [VGG-Small-low_bit](https://download.mindspore.cn/model_zoo/official/lite/low_bit_quant/low_bit_quant_bs_1.ms) | 17.8 | 93.7% | - | - | 9.082 | -| [ResNet50-0.65x](https://download.mindspore.cn/model_zoo/official/lite/adversarial_pruning_lite/adversarial_pruning.ms) | 48.6 | 80.2% | - | - | 89.816 | -| [plain-CNN-ResNet18](https://download.mindspore.cn/model_zoo/official/lite/residual_distill_lite/residual_distill_res18_cifar10_bs_1_update.ms) | 97.3 | 95.4% | - | - | 63.227 | -| [plain-CNN-ResNet34](https://download.mindspore.cn/model_zoo/official/lite/residual_distill_lite/residual_distill_res34_cifar10_bs_1_update.ms) | 80.5 | 95.0% | - | - | 20.652 | -| [plain-CNN-ResNet50](https://download.mindspore.cn/model_zoo/official/lite/residual_distill_lite/residual_distill_res50_cifar10_bs_1_update.ms) | 89.6 | 94.5% | - | - | 24.561 | diff --git a/tutorials/lite/source_en/image_segmentation_lite.md b/tutorials/lite/source_en/image_segmentation_lite.md deleted file mode 100644 index be4f2854edabf824275a0c43f31720a97d67b166..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/image_segmentation_lite.md +++ /dev/null @@ -1,19 +0,0 @@ -# Image Segmentation Model - - - -## Image Segmentation introduction - -Image segmentation is used to detect the position of the object in the picture or a pixel belongs to which object. - -Using MindSpore Lite to perform image segmentation [example](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/image_segmentation). - -## Image segmentation model list - -The following table shows the data of some image segmentation models using MindSpore Lite inference. - -> The performance of the table below is tested on the mate30. - -| Model name | Size(Mb) | IoU | CPU 4 thread delay (ms) | -|-----------------------| :----------: | :----: | :-----------: | -| [Deeplabv3](https://download.mindspore.cn/model_zoo/official/lite/deeplabv3_lite/deeplabv3.ms) | 18.7 | 0.58 | 120 | diff --git a/tutorials/lite/source_en/images/MindSpore Lite architecture.pptx b/tutorials/lite/source_en/images/MindSpore Lite architecture.pptx deleted file mode 100644 index fe109a157970ae5bdb6977ba4037699de99a2fe3..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/MindSpore Lite architecture.pptx and /dev/null differ diff --git a/tutorials/lite/source_en/images/MindSpore-Lite-architecture.png b/tutorials/lite/source_en/images/MindSpore-Lite-architecture.png deleted file mode 100644 index 0b8afedb9e3a62d723faa916520c35171c0482fc..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/MindSpore-Lite-architecture.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/after_transfer.png b/tutorials/lite/source_en/images/after_transfer.png deleted file mode 100644 index cb066922a36214a940741f4c2bca96ec35ec7d19..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/after_transfer.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/before_transfer.png b/tutorials/lite/source_en/images/before_transfer.png deleted file mode 100644 index ba2fe024d6382a1bad7b0f6cc4f2623e4815c2cf..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/before_transfer.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/classification_apk.png b/tutorials/lite/source_en/images/classification_apk.png deleted file mode 100644 index 30e78acf6566c3747c53c420fdc9ae95f30a93c9..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/classification_apk.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/image_classification_result.png b/tutorials/lite/source_en/images/image_classification_result.png deleted file mode 100644 index a7cc49f582440e31b6b5b14dbba5131bfed2a4b4..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/image_classification_result.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/lite_codegen.png b/tutorials/lite/source_en/images/lite_codegen.png deleted file mode 100644 index 14283ccde9b250b0c0eb981643f9215f1202cfe1..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/lite_codegen.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/lite_quick_start_app_result.png b/tutorials/lite/source_en/images/lite_quick_start_app_result.png deleted file mode 100644 index a7cc49f582440e31b6b5b14dbba5131bfed2a4b4..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/lite_quick_start_app_result.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/lite_quick_start_home.png b/tutorials/lite/source_en/images/lite_quick_start_home.png deleted file mode 100644 index c48cf581b33afbc15dbf27be495215b999e1be60..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/lite_quick_start_home.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/lite_quick_start_project_structure.png b/tutorials/lite/source_en/images/lite_quick_start_project_structure.png deleted file mode 100644 index ade37a61ef97a479401240215e302011c014824c..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/lite_quick_start_project_structure.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/lite_quick_start_run_app.PNG b/tutorials/lite/source_en/images/lite_quick_start_run_app.PNG deleted file mode 100644 index 2557b6293de5b3d7fefe7f6e58b57c03deabb55d..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/lite_quick_start_run_app.PNG and /dev/null differ diff --git a/tutorials/lite/source_en/images/lite_quick_start_sdk.png b/tutorials/lite/source_en/images/lite_quick_start_sdk.png deleted file mode 100644 index 1fcb8acabc9ba9d289efbe7e82ee5e2da8bfe073..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/lite_quick_start_sdk.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/lite_runtime.png b/tutorials/lite/source_en/images/lite_runtime.png deleted file mode 100644 index de9c88208a721874c531f52fd855186c86d30671..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/lite_runtime.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/lite_segmentation_quick_start_install.png b/tutorials/lite/source_en/images/lite_segmentation_quick_start_install.png deleted file mode 100644 index e2782bd727cc3b8f82286519a9bd73ef1fc53767..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/lite_segmentation_quick_start_install.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/object_detection.png b/tutorials/lite/source_en/images/object_detection.png deleted file mode 100644 index ad5425c86393a9367701166796df42c9e4702988..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/object_detection.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/posenet_detection.png b/tutorials/lite/source_en/images/posenet_detection.png deleted file mode 100644 index db253e597caa3c8c825b466ef2bc0ce7893d1411..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/posenet_detection.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/segmentation1.png b/tutorials/lite/source_en/images/segmentation1.png deleted file mode 100644 index 85456fdecf884d17707a5440676299c6a2af10ca..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/segmentation1.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/segmentation2.png b/tutorials/lite/source_en/images/segmentation2.png deleted file mode 100644 index e872283bf0adf4aa13d100078630ba7e1f4f1bb1..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/segmentation2.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/segmentation3.png b/tutorials/lite/source_en/images/segmentation3.png deleted file mode 100644 index d5eeb242a4833dfd46614faa7ee0e70930923769..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/segmentation3.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/segmentation4.png b/tutorials/lite/source_en/images/segmentation4.png deleted file mode 100644 index 8b6559cf1ea92c121c306523354f8674ed30f08f..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/segmentation4.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/segmentation5.png b/tutorials/lite/source_en/images/segmentation5.png deleted file mode 100644 index bf733f051d49e0d0f6c7a7169ca012a3bc738df9..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/segmentation5.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/segmentation6.png b/tutorials/lite/source_en/images/segmentation6.png deleted file mode 100644 index 0954855aa38370cdf10e3685b70fc8da667c8dea..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/segmentation6.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/segmentation7.png b/tutorials/lite/source_en/images/segmentation7.png deleted file mode 100644 index 872e9d7f125d101cd1bbcfc022401281ebfc574f..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/segmentation7.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/segmentation_apk.png b/tutorials/lite/source_en/images/segmentation_apk.png deleted file mode 100644 index 0d8c4daad80a4bf30854e85825195cebf022a61a..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/segmentation_apk.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/side_infer_process.png b/tutorials/lite/source_en/images/side_infer_process.png deleted file mode 100644 index 4fe154283a29dfb3442882e203421e6d4e58ba8a..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/side_infer_process.png and /dev/null differ diff --git a/tutorials/lite/source_en/images/side_train_sequence.png b/tutorials/lite/source_en/images/side_train_sequence.png deleted file mode 100644 index d403ce000b655a425f6b61d95e15067ed467385a..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_en/images/side_train_sequence.png and /dev/null differ diff --git a/tutorials/lite/source_en/index.rst b/tutorials/lite/source_en/index.rst deleted file mode 100644 index e3e293642e64ca149dc2819d2ede22ecd1bd800d..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/index.rst +++ /dev/null @@ -1,405 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Aug 17 09:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Using MindSpore on Mobile and IoT -======================================= - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Quick Start - :hidden: - - quick_start/quick_start_cpp - quick_start/quick_start_java - quick_start/quick_start - quick_start/image_segmentation - quick_start/train_lenet - quick_start/train_lenet_java - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Obtain MindSpore Lite - :hidden: - - use/downloads - use/build - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Inference on Devices - :hidden: - - use/converter_tool - use/post_training_quantization - use/data_preprocessing - use/runtime - use/micro - use/asic - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Training on Devices - :hidden: - - use/converter_train - use/runtime_train - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Other Tools - :hidden: - - use/benchmark - use/cropper_tool - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: References - :hidden: - - architecture_lite - operator_list_lite - operator_list_codegen - model_lite - C++ API - Java API - faq - -.. raw:: html - -
    -
    -
    -
    - - -
    - Filter - -
    - -
    -
    -
    -
    -
    Environment
    -
    -
    - - - -
    -
    - -
    - - -
    -
    -
    -
    User
    -
    -
    - - - -
    -
    -
    - -
    -
    -
    -
    Stage
    -
    -
    - - - - - - - - - - - -
    -
    -
    - -
    -
    -
    -
    Application Specific Integrated Circuit
    -
    -
    - -
    -
    -
    -
    -
    -
    -
    Programming Language
    -
    -
    - - -
    -
    -
    - -
    -
    - - - -
    - - - - - - - - - - - - - - - - - - - -
    - -
    - -
    -
    -
    - \ No newline at end of file diff --git a/tutorials/lite/source_en/model_lite.rst b/tutorials/lite/source_en/model_lite.rst deleted file mode 100644 index d012998345df75509daa0aba56db98cf7744fae8..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/model_lite.rst +++ /dev/null @@ -1,12 +0,0 @@ -Model List -=================== - -.. toctree:: - :maxdepth: 1 - - image_classification_lite - object_detection_lite - posenet_lite - image_segmentation_lite - style_transfer_lite - scene_detection_lite \ No newline at end of file diff --git a/tutorials/lite/source_en/object_detection_lite.md b/tutorials/lite/source_en/object_detection_lite.md deleted file mode 100644 index 4bc6f25dd3501b06e199648f7ccea90b713dbbd6..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/object_detection_lite.md +++ /dev/null @@ -1,26 +0,0 @@ -# Object Detection Model - - - -## Object dectectin introduction - -Object detection can identify the object in the image and its position in the image. For the following figure, the output of the object detection model is shown in the following table. The rectangular box is used to identify the position of the object in the graph and the probability of the object category is marked. The four numbers in the coordinates are Xmin, Ymin, Xmax, Ymax; the probability represents the probility of the detected object. - -![object_detectiontion](images/object_detection.png) - -| Category | Probability | Coordinate | -| -------- | ----------- | ---------------- | -| mouse | 0.78 | [10, 25, 35, 43] | - -Using MindSpore Lite to implement object detection [example](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/object_detection). - -## Object detection model list - -The following table shows the data of some object detection models using MindSpore Lite inference. - -> The performance of the table below is tested on the mate30. - -| Model name | Size(Mb) | mAP(IoU=0.50:0.95) | CPU 4 thread delay (ms) | -|-----------------------| :----------: | :----------: | :-----------: | -| [MobileNetv2-SSD](https://download.mindspore.cn/model_zoo/official/lite/ssd_mobilenetv2_lite/ssd.ms) | 16.7 | 0.22 | 25.4 | -| [GhostNet-SSD](https://download.mindspore.cn/model_zoo/official/lite/ssd_ghostnet_lite/ssd.ms) | 25.7 | 0.24 | 24.1 | diff --git a/tutorials/lite/source_en/operator_list_codegen.md b/tutorials/lite/source_en/operator_list_codegen.md deleted file mode 100644 index b772f55c6f26bd55d6820af6c17db8f2bd9c099d..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/operator_list_codegen.md +++ /dev/null @@ -1,65 +0,0 @@ -# Codegen Operator List - -`Linux` `Ascend` `Device` `Inference` `Beginner` `Intermediate` `Expert` - - - -This article lists the operators supported by MindSpore Lite Codegen. - -| Operation
      | CPU
    FP32 | CPU
    Int8 | CMSIS
    Int8 | TensorFlow Lite
    operators supported | Caffe Lite
    operators supported | Onnx Lite
    operators supported |TensorFlow
    operators supported | -|-----------------------|:--------------:|:--------------:|:-----------------:|---------------------------------|--------------------------|-------------------------------------------------|-------------------------------------------------| -| Abs | ✅ | | | Abs | | Abs | | -| Add | ✅ | ✅ | ✅ | Add | | Add, Int8Add | Add, AddV2 | -| AddN | ✅ | | | AddN | | | | -| AvgPool | ✅ | ✅ | ✅ | MeanPooling | Pooling | AveragePool,
    GlobalAveragePool,
    Int8AveragePool | | -| BatchNorm | | ✅ | ✅ | | BatchNorm | BatchNormalization | | -| BiasAdd | ✅ | | | | | BiasAdd | BiasAdd | -| Cast | ✅ | ✅ | ✅ | Cast, QUANTIZE,
    DEQUANTIZE | | Cast | Cast | -| Ceil | ✅ | | | Ceil | | Ceil | | -| Concat | ✅ | ✅ | ✅ | Concat | Concat | Concat | ConcatV2 | -| Conv2d | ✅ | ✅ | ✅ | Conv2D | Convolution | Conv,
    Int8Conv,
    ConvRelu,
    Int8ConvRelu | Conv2D | -| Cos | ✅ | | | Cos | | Cos | | -| DetectionPostProcess | | ✅ | | Custom | | | | -| Div | ✅ | ✅ | ✅ | Div, RealDiv | | Div | Div, RealDiv | -| Eltwise | ✅ | | | | Eltwise | Sum, Max[3] | | -| Equal | ✅ | | | Equal | | Equal | Equal | -| ExpandDims | ✅ | | | ExpandDims | | | ExpandDims | -| Floor | ✅ | | | flOOR | | Floor | | -| FloorDiv | ✅ | | | FloorDiv | | | | -| FloorMod | ✅ | | | FloorMod | | | | -| FullConnection | ✅ | ✅ | ✅ | FullyConnected | InnerProduct | | | -| Greater | ✅ | | | Greater | | Greater | Greater | -| GreaterEqual | ✅ | | | GreaterEqual | | | GreaterEqual | -| Less | ✅ | | | Less | | Less | Less | -| LessEqual | ✅ | | | LessEqual | | | LessEqual | -| Log | ✅ | | | Log | | Log | | -| LogicalAnd | ✅ | | | LogicalAnd | | And | LogicalAnd | -| LogicalNot | ✅ | | | LogicalNot | | Not | | -| LogicalOr | ✅ | | | LogicalOr | | Or | | -| MatMul | ✅ | ✅ | | | | MatMul | MatMul | -| Maximum | ✅ | | | Maximum | | | Maximum | -| MaxPool | ✅ | ✅ | ✅ | MaxPooling | Pooling | MaxPool,
    GlobalMaxPool | | -| Minimum | ✅ | | | Minimum | | Min | Minimum | -| Mul | ✅ | ✅ | ✅ | Mul | | Mul | Mul | -| Neg | ✅ | | | Neg | | Neg | | -| NotEqual | ✅ | | | NotEqual | | |NotEqual | -| ReLU | ✅ | ✅ | ✅ | Relu | ReLU | Relu | Relu | -| ReLU6 | ✅ | ✅ | ✅ | Relu6 | ReLU6 | Clip[1] | Relu6 | -| Reshape | ✅ | ✅ | ✅ | Reshape | Reshape | Reshape,Flatten | Reshape | -| Resize | | ✅ | | ResizeBilinear,
    NearestNeighbor | Interp | | | -| Round | ✅ | | | Round | | Round | Round | -| Rsqrt | ✅ | | | Rsqrt | | | | -| Sigmoid | ✅ | ✅ | ✅ | Logistic | Sigmoid | Sigmoid | Sigmoid | -| Sin | ✅ | | | Sin | | Sin | | -| Softmax | ✅ | ✅ | ✅ | Softmax | Softmax | Softmax | | -| Sqrt | ✅ | | | Sqrt | | Sqrt | | -| Square | ✅ | | | Square | | | | -| SquaredDifference | ✅ | | | SquaredDifference | | | | -| Squeeze | ✅ | | | Squeeze | | Squeeze | Squeeze | -| Sub | ✅ | ✅ | ✅ | Sub | | Sub | Sub | - -[1] Clip: Only support converting clip(0, 6) to Relu6. - -[2] Pow: Only support the form where the exponent is a single constant. - -[3] Sum and Max: Only support 2 inputs. diff --git a/tutorials/lite/source_en/operator_list_lite.md b/tutorials/lite/source_en/operator_list_lite.md deleted file mode 100644 index 8494dee1ded0204324e60504826c38e4743681ed..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/operator_list_lite.md +++ /dev/null @@ -1,182 +0,0 @@ -# Lite Operator List - -`Linux` `On Device` `Inference Application` `Beginner` `Intermediate` `Expert` - - - -This article lists the operators supported by MindSpore Lite. - -| Operation
      | CPU
    FP16 | CPU
    FP32 | CPU
    Int8 | CPU
    UInt8 | GPU
    FP16 | GPU
    FP32 | NPU
      | TensorFlow Lite
    operators supported | Caffe Lite
    operators supported | Onnx Lite
    operators supported | TensorFlow
    operators supported | -|-----------------------|:----------:|:----------:|:----------:|:-----------:|:----------:|:-------------------:|:----------:|----------|---------|---------|---------| -| Abs | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Abs | | Abs | Abs | -| Add | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Add | | Add, Int8Add | Add, AddV2 | -| AddGrad | | ✅ | | | | | | | | | | -| AddN | | ✅ | | | | | | AddN | | | | -| Assert | | ✅ | | | | | | | | | Assert | -| Argmax | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Argmax | ArgMax | ArgMax | Argmax | -| Argmin | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | Argmin | | | ArgMin | -| AvgPool | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | MeanPooling | Pooling | AveragePool,
    GlobalAveragePool,
    Int8AveragePool | AvgPool | -| AvgPoolGrad | | ✅ | | | | | | | | | | -| BatchNorm | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | | BatchNorm | BatchNormalization | | -| BatchNormGrad | | ✅ | | | | | | | | | | -| BatchToSpace | | ✅ | ✅ | ✅ | ✅ | ✅ | | BatchToSpace,
    BatchToSpaceND | | | BatchToSpace,
    BatchToSpaceND | -| BiasAdd | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | | | BiasAdd | BiasAdd | -| BiasAddGrad | | ✅ | | | | | | | | | | -| Broadcast | | ✅ | | | | | | BroadcastTo | | Expand | | -| Cast | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Cast,
    QUANTIZE,
    DEQUANTIZE | | Cast | Cast | -| Ceil | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Ceil | | Ceil | Ceil | -| Concat | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Concat | Concat | Concat | ConcatV2 | -| ConstantOfShape | | ✅ | | | | | | | | ConstantOfShape | | -| Conv2d | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Conv2D | Convolution | Conv, Int8Conv,
    ConvRelu,
    Int8ConvRelu | Conv2D | -| Conv2dGrad | | ✅ | | | | | | | | | | -| Conv2dTranspose | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | DeConv2D | Deconvolution | ConvTranspose | Conv2DBackpropInput | -| Conv2dTransposeGrad | | ✅ | | | | | | | | | | -| Cos | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Cos | | Cos | Cos | -| Crop | ✅ | ✅ | ✅ | ✅ | | | | | Crop | | | -| CropAndResize | | ✅ | | | | | ✅ | | | | CropAndResize | -| CumSum | | ✅ | | | | | | | | | Cumsum | -| CustomExtractFeatures | | ✅ | | | | | | ExtractFeatures | | | | -| CustomNormalize | | ✅ | | | | | | Normalize | | | | -| CustomPredict | | ✅ | | | | | | Predict | | | | -| DeDepthwiseConv2D | | ✅ | ✅ | ✅ | | | | | Deconvolution | | | -| DepthToSpace | | ✅ | ✅ | ✅ | ✅ | ✅ | | DepthToSpace | | DepthToSpace | | -| DepthwiseConv2dNative | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | DepthwiseConv2D | Convolution | | DepthwiseConv2dNative | -| DetectionPostProcess | | ✅ | ✅ | ✅ | | | | Custom | | | | -| Div | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Div, RealDiv | | Div | Div, RealDiv | -| DivGrad | | ✅ | | | | | | | | | | -| Eltwise | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | Eltwise | Sum, Max[3] | | -| Elu | | ✅ | | | | | | | Elu | Elu,
    NonMaxSuppression | NonMaxSuppressionV3 | -| EluGrad | | ✅ | | | | | | | | | | -| Equal | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Equal | | Equal | Equal | -| Exp | | ✅ | | | ✅ | ✅ | | Exp | Exp | Exp | Exp | -| ExpandDims | ✅ | ✅ | ✅ | ✅ | | | ✅ | ExpandDims | | | ExpandDims | -| Fill | | ✅ | | | | | | Fill | | | Fill | -| Flatten | ✅ | ✅ | | | | | | | Flatten | | | -| Floor | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | flOOR | | Floor | Floor | -| FloorDiv | ✅ | ✅ | | | ✅ | ✅ | ✅ | FloorDiv | | | FloorDiv | -| FloorMod | ✅ | ✅ | | | ✅ | ✅ | ✅ | FloorMod | | | FloorMod | -| FullConnection | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | FullyConnected | InnerProduct | | | -| FusedBatchNorm | ✅ | ✅ | ✅ | ✅ | | | ✅ | FusedBatchNorm | | | FusedBatchNorm,
    FusedBatchNormV3 | -| GatherNd | | ✅ | ✅ | ✅ | ✅ | ✅ | | GatherND | | | GatherNd | -| Gather | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Gather | | Gather | GatherV2 | -| Greater | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Greater | | Greater | Greater | -| GreaterEqual | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | GreaterEqual | | | GreaterEqual | -| GRU | ✅ | ✅ | | | | | | | | | | -| HardTanh | ✅ | ✅ | | | | | | | | | | -| HashtableLookup | | ✅ | | | | | | HashtableLookup | | | | -| HSigmoid | | ✅ | | ✅ | | | | | | | | -| Hswish | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | HardSwish | | | | -| HswishGrad | | ✅ | | | | | | | | | | -| InstanceNorm | ✅ | ✅ | | | | | | InstanceNorm | | | | -| InvertPermutation | | ✅ | | | | | | | | | InvertPermutation | -| L2Norm | | ✅ | ✅ | | | | | L2_NORMALIZATION | | | | -| LayerNorm | | ✅ | ✅ | | | | | | | | | -| LeakyReLU | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | LeakyRelu | | LeakyRelu | LeakyRelu | -| LeakyReLUGrad | | ✅ | | | | | | | | | | -| Less | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Less | | Less | Less | -| LessEqual | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | LessEqual | | | LessEqual | -| LRN | | ✅ | | | | | | LocalResponseNorm | | Lrn, LRN | | -| Log | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Log | | Log | Log | -| LogGrad | ✅ | ✅ | | | | | | | | | | -| LogicalAnd | ✅ | ✅ | | | ✅ | ✅ | ✅ | LogicalAnd | | And | LogicalAnd | -| LogicalNot | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | LogicalNot | | Not | LogicalNot | -| LogicalOr | ✅ | ✅ | | | ✅ | ✅ | ✅ | LogicalOr | | Or | LogicalOr | -| LshProjection | | ✅ | | | | | | LshProjection | | | | -| LSTM | ✅ | ✅ | | | | | | | | LSTM | | -| MatMul | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | | MatMul | MatMul,
    BatchMatMul | -| MatMulGrad | | ✅ | | | | | | | | | | -| Maximum | ✅ | ✅ | | | ✅ | ✅ | ✅ | Maximum | | | Maximum | -| MaximumGrad | | ✅ | | | | | | | | | | -| MaxPool | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | MaxPooling | Pooling | MaxPool,
    GlobalMaxPool | MaxPool | -| MaxPoolGrad | | ✅ | | | | | | | | | | -| Merge | ✅ | ✅ | | | | | | | | | Merge | -| Minimum | ✅ | ✅ | | | ✅ | ✅ | ✅ | Minimum | | Min | Minimum | -| MinimumGrad | | ✅ | | | | | | | | | | -| Mul | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Mul | | Mul | Mul | -| MulGrad | | ✅ | | | | | | | | | | -| Neg | ✅ | ✅ | | | ✅ | ✅ | ✅ | Neg | | Neg | | -| NegGrad | | ✅ | | | | | | | | | | -| NotEqual | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | NotEqual | | | NotEqual | -| OneHot | | ✅ | | | ✅ | ✅ | | OneHot | | OneHot | OneHot | -| Pad | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Pad, MirrorPad | | Pad | MirrorPad,
    Pad | -| Pow | | ✅ | ✅ | ✅ | ✅ | ✅ | | Pow | Power | Pow[2] | Pow | -| PowGrad | | ✅ | | | | | | | | | | -| PReLU | | ✅ | | | ✅ | ✅ | | PRELU | PReLU | PRelu | | -| RandomStandardNormal | | ✅ | | | | | | | | | RandomStandardNormal | -| RandomUniform | | ✅ | | | | | | | | | RandomUniform | -| Range | | ✅ | | | | | | Range | | | Range,
    RaggedRange | -| Rank | | ✅ | | | | | | Rank | | | Rank | -| Reciprocal | ✅ | ✅ | ✅ | | | | ✅ | | | | | -| ReduceAll | | ✅ | | | | | | | | | All | -| ReduceASum | | ✅ | | | ✅ | ✅ | | | Reduction | | | -| ReduceMax | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | ReduceMax | | ReduceMax | Max | -| ReduceMean | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | Mean | Reduction | ReduceMean | Mean | -| ReduceMin | | ✅ | ✅ | ✅ | ✅ | ✅ | | ReduceMin | | ReduceMin | Min | -| ReduceProd | | ✅ | ✅ | ✅ | ✅ | ✅ | | ReduceProd | | ReduceProd | Prod | -| ReduceSum | | ✅ | ✅ | ✅ | ✅ | ✅ | | Sum | Reduction | ReduceSum | Sum | -| ReduceSumSquare | | ✅ | ✅ | ✅ | | | | | Reduction | ReduceSumSquare | | -| ReLU | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Relu | ReLU | Relu | Relu | -| ReLUGrad | ✅ | ✅ | | | | | | | | | | -| ReLU6 | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Relu6 | ReLU6 | Clip[1] | Relu6 | -| ReLU6Grad | ✅ | ✅ | | | | | | | | | | -| Reshape | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Reshape | Reshape | Reshape,
    Flatten | Reshape | -| Resize | | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ResizeBilinear,
    NearestNeighbor | Interp | | ResizeBilinear,
    ResizeBicubic,
    ResizeNearestNeighbor | -| ResizeGrad | | ✅ | | | | | | | | | | -| Reverse | | ✅ | | | | | | reverse | | | ReverseV2 | -| ReverseSequence | | ✅ | | | | | | ReverseSequence | | | ReverseSequence | -| Round | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Round | | Round | Round | -| Rsqrt | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Rsqrt | | | Rsqrt | -| Select | | ✅ | | | | | | | | | Select | -| Selu | | | | | | | | | | | Selu | -| Scale | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | Scale | | | -| ScatterNd | | ✅ | | | | | | ScatterNd | | | | -| Shape | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Shape | | Shape | Shape | -| Sigmoid | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Logistic | Sigmoid | Sigmoid | Sigmoid | -| SigmoidGrad | ✅ | ✅ | | | | | | | | | | -| Sin | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Sin | | Sin | Sin | -| Size | | ✅ | | | | | | | | | Size | -| Slice | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Slice | Slice | Slice | Slice | -| SkipGram | | ✅ | | | | | | SKipGram | | | | -| Softmax | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Softmax | Softmax | Softmax | Softmax | -| SoftmaxGrad | | ✅ | | | | | | | | | | -| Softplus | | ✅ | | | | | | | | | Softplus | -| SpaceToBatch | | ✅ | ✅ | ✅ | ✅ | ✅ | | SpaceToBatch | | | | -| SpaceToBatchND | | ✅ | ✅ | ✅ | ✅ | ✅ | | SpaceToBatchND | | | SpaceToBatchND | -| SpaceToDepth | | ✅ | | | ✅ | ✅ | | SpaceToDepth | | SpaceToDepth | | -| SparseToDense | | ✅ | | | ✅ | ✅ | | SpareToDense | | | | -| Split | ✅ | ✅ | ✅ | ✅ | | | ✅ | Split, SplitV | | Split | Split, SplitV | -| Sqrt | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Sqrt | | Sqrt | Sqrt | -| Square | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Square | | | Square | -| SquaredDifference | ✅ | ✅ | | | ✅ | ✅ | ✅ | SquaredDifference | | | SquaredDifference | -| Squeeze | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | Squeeze | | Squeeze | Squeeze | -| StridedSlice | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | StridedSlice | | | StridedSlice | -| Stack | ✅ | ✅ | | | ✅ | ✅ | | Stack | | | Pack | -| Sub | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Sub | | Sub | Sub | -| SubGrad | | ✅ | | | | | | | | | | -| Swish | ✅ | ✅ | | | | | | | | | | -| Switch | ✅ | ✅ | | | | | | | | | Switch | -| Tanh | ✅ | ✅ | | | ✅ | ✅ | ✅ | Tanh | TanH | Tanh, Sign | Tanh | -| TanhGrad | | ✅ | | | | | | | | | | -| TensorListFromTensor | ✅ | ✅ | | | | | | | | | TensorListFromTensor | -| TensorListGetItem | ✅ | ✅ | | | | | | | | | TensorListGetItem | -| TensorListReserve | ✅ | ✅ | | | | | | | | | TensorListReserve | -| TensorListSetItem | ✅ | ✅ | | | | | | | | | TensorListSetItem | -| TensorListStack | ✅ | ✅ | | | | | | | | | TensorListStack | -| Tile | ✅ | ✅ | | | | | ✅ | Tile | Tile | Tile | Tile | -| TopK | | ✅ | ✅ | ✅ | | | | TopKV2 | | TopK | TopKV2 | -| Transpose | ✅ | ✅ | ✅ | | ✅ | ✅ | ✅ | Transpose | Permute | Transpose | Transpose | -| UniformReal | | ✅ | | | | | | | | | | -| Unique | | ✅ | | | | | | Unique | | | | -| Unsqueeze | ✅ | ✅ | ✅ | ✅ | | | ✅ | | | Unsqueeze | | -| Unstack | | ✅ | | | | | | Unstack | | | | -| Where | | ✅ | | | | | | Where | | | Where | -| ZerosLike | | ✅ | | | | | | ZerosLike | | | ZerosLike | -| Other operators supported by the converter.[4] | | | | | | | | | | Loop, Dropout, If | Dropout, Enter,
    Exit, If,
    IsFinite,
    LinSpace,
    LoopCond,
    NextIteration,
    StatelessIf,
    StatelessWhile,
    While | - -[1] Clip: Only support converting clip(0, 6) to Relu6. - -[2] Pow: Only support the form where the exponent is a single constant. - -[3] Sum and Max: Only support 2 inputs. - -[4] Operators supported by [converter](https://www.mindspore.cn/tutorial/lite/en/master/use/converter_tool.html) but do not require specific implementation. Generally, such operators are optimized by the conversion tool, such as being merged or replaced by other operators. diff --git a/tutorials/lite/source_en/posenet_lite.md b/tutorials/lite/source_en/posenet_lite.md deleted file mode 100644 index d3f6bc307ba7527c1f1f2aa1dbee2fd1fd910538..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/posenet_lite.md +++ /dev/null @@ -1,15 +0,0 @@ -# Posenet Model - - - -## Posenet introduction - -Under the detection of photo cameras, posenet model can identify the facial features and body posture of the human body in different positions. - -The output of using the bone detection model is as follows: - -The blue marking points detect the distribution of facial features of the human body and the skeletal trend of upper and lower limbs. During this infernece, the probability score is 0.98/1, and the inference time is 66.77ms. - -![image_posenet](images/posenet_detection.png) - -Using MindSpore Lite to realize posenet [example](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/posenet). diff --git a/tutorials/lite/source_en/quick_start/image_segmentation.md b/tutorials/lite/source_en/quick_start/image_segmentation.md deleted file mode 100644 index 1b16ba26f00f979394aaed88e02848d5aea972ab..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/quick_start/image_segmentation.md +++ /dev/null @@ -1,309 +0,0 @@ -# Android Application Development Based on Java Interface - -`Android` `Java` `Whole Process` `Model Conversion` `Model Loading` `Inference Application` `Data Preparation` `Beginner` `Intermediate` `Expert` - - - -- [Android Application Development Based on Java Interface](#android-application-development-based-on-java-interface) - - [Overview](#overview) - - [Selecting a Model](#selecting-a-model) - - [Deploying an Application](#deploying-an-application) - - [Running Dependencies](#running-dependencies) - - [Building and Running](#building-and-running) - - [Detailed Demo Description](#detailed-demo-description) - - [Demo Structure](#demo-structure) - - [Configuring MindSpore Lite Dependencies](#configuring-mindspore-lite-dependencies) - - [Downloading and Deploying the Model File](#downloading-and-deploying-the-model-file) - - [Writing On-Device Inference Code](#writing-on-device-inference-code) - - - - - -## Overview - -It is recommended that you start from the image segmentation demo on the Android device to understand how to build the MindSpore Lite application project, configure dependencies, and use related Java APIs. - -This tutorial demonstrates the on-device deployment process based on the image segmentation demo on the Android device provided by the MindSpore team. - -## Selecting a Model - - Select an image segmentation model. - -> Click [here](https://download.mindspore.cn/model_zoo/official/lite/mobile_segment_lite/segment_model.ms) to download the Android image segmentation model. -> -> This example describes how to use Java APIs. - -Scan the QR code below or directly download the [APK file](https://download.mindspore.cn/model_zoo/official/lite/apk/segmentation/image_segmentation.apk) corresponding to this sample, and deploy it on an Android device. - -![apk](../images/segmentation_apk.png) - -## Deploying an Application - -The following describes how to build and execute an on-device image segmentation task on MindSpore Lite. - -### Running Dependencies - -- Android Studio 3.2 or later (Android 4.0 or later is recommended.) -- Android SDK 26 or later (installed by Android Studio by default) -- JDK 1.8 or later (installed by Android Studio by default) - -### Building and Running - -1. Load the sample source code to Android Studio and install the corresponding SDK. (After the SDK version is specified, Android Studio automatically installs the SDK.) - - ![start_home](../images/lite_quick_start_home.png) - -2. Connect to an Android device and runs the image segmentation application. - - Connect to the Android device through a USB cable for debugging. Click `Run 'app'` to run the demo on your device. - - ![run_app](../images/lite_quick_start_run_app.PNG) - - For details about how to connect the Android Studio to a device for debugging, see . - - Android Studio can identify the mobile phone only when USB debugging mode is enabled on the mobile phone. For Huawei phones, enable USB debugging mode by choosing Settings > System & updates > Developer options > USB debugging. - -3. On the Android device, click Continue. After the installation is complete, you can view the local album and the segmentation result of the image taken by the camera. - - ![install](../images/lite_segmentation_quick_start_install.png) - - The running result is shown in the following figure. (A portrait in the album is used as an example.) - - ![result1](../images/segmentation1.png) - - Select a portrait from an album. - - ![result2](../images/segmentation2.png) - - Select a background image from the nine images to replace and segment the portrait background. - - - - - - - -

    Figure 1 White background

    Figure 2 Blue background

    Figure 3 Oil painting background
    - -## Detailed Demo Description - -The Android demo for on-device image segmentation uses the Java layer. Therefore, you must have basic Android development knowledge. - -### Demo Structure - -```text -app -├── src/main -│ ├── assets # Resource file -| | └── model # Model file -| | └── segment_model.ms # Stored model file -│ | -│ ├── libs # Binary archive file of the Android library project -| | └── mindspore-lite-version.aar # MindSpore Lite archive file of the Android version -│ | -│ ├── java # Application code at the Java layer -│ │ └── com.mindspore.imagesegmentation -│ │ ├── help # Image processing -│ │ │ └── ... -│ │ └── ... Android page display and logic processing -│ │ -│ ├── res # Resource files related to Android -│ └── AndroidManifest.xml # Android configuration file -│ -│ -├── build.gradle # Other Android configuration file -├── download.gradle # Downloading the files that the project depends on -└── ... -``` - -### Configuring MindSpore Lite Dependencies - -Related library files are required for Android to call MindSpore Android AAR. You can use MindSpore Lite [source code](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html) to generate the `mindspore-lite-{version}-inference-android.tar.gz` library file package (including the `mindspore-lite-{version}.aar` library file) and decompress it. - -> version: version number in the output file, which is the same as the version number of the built branch code. - -In this example, the MindSpore Lite version file is automatically downloaded using the `app/download.gradle` file during the build process and stored in the `app/libs` directory. - -Note: If the automatic download fails, manually download the related library file [mindspore-lite-{version}-inference-android.tar.gz](https://www.mindspore.cn/tutorial/lite/en/master/use/downloads.html), decompress it, and save it to the corresponding directory. - -### Downloading and Deploying the Model File - -Download the model file from MindSpore Model Hub. The on-device image segmentation model file used in this demo is `segment_model.ms`, which is automatically downloaded during app building using the `app/download.gradle` script and stored in the `app/src/main/assets` project directory. - -Note: If the download fails, manually download the model file [segment_model.ms](https://download.mindspore.cn/model_zoo/official/lite/mobile_segment_lite/segment_model.ms). - -### Writing On-Device Inference Code - -The inference code and process are as follows. For details about the complete code, see [src/java/com/mindspore/imagesegmentation/TrackingMobile](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/lite/image_segmentation/app/src/main/java/com/mindspore/imagesegmentation/help/TrackingMobile.java). - -1. Load the MindSpore Lite model file and build the context, session, and computational graph for inference. - - - Load a model file. Create and configure the context for model inference. - - ```java - // Load the .ms model. - Model model = new Model(); - if (!model.loadModel(Context, "segment_model.ms")) { - Log.e(TAG, "Load Model failed"); - return; - } - ``` - - - Create a session. - - ```java - // Create and init config. - MSConfig msConfig = new MSConfig(); - if (!msConfig.init(DeviceType.DT_CPU, threadNum, CpuBindMode.MID_CPU)) { - Log.e(TAG, "Init context failed"); - return; - } - - // Create the MindSpore lite session. - LiteSession session = new LiteSession(); - if (!session.init(msConfig)) { - Log.e(TAG, "Create session failed"); - msConfig.free(); - return; - } - msConfig.free(); - ``` - - - Load the model file and build a computational graph for inference. - - ```java - // Compile graph. - if (!session.compileGraph(model)) { - Log.e(TAG, "Compile graph failed"); - model.freeBuffer(); - return; - } - - // Note: when use model.freeBuffer(), the model cannot be compile graph again. - model.freeBuffer(); - ``` - -2. Convert the input image into the Tensor format that is input to the MindSpore model. - - Convert the image data to be detected into the Tensor format that is input to the MindSpore model. - - ```java - List inputs = session.getInputs(); - if (inputs.size() != 1) { - Log.e(TAG, "inputs.size() != 1"); - return null; - } - - float resource_height = bitmap.getHeight(); - float resource_weight = bitmap.getWidth(); - - ByteBuffer contentArray = BitmapUtils.bitmapToByteBuffer(bitmap, imageSize, imageSize, IMAGE_MEAN, IMAGE_STD); - - MSTensor inTensor = inputs.get(0); - inTensor.setData(contentArray); - ``` - -3. Run the session and execute the computational graph. - - ```java - // Run graph to infer results. - if (!session.runGraph()) { - Log.e(TAG, "Run graph failed"); - return null; - } - ``` - -4. Process the output data. - - - Obtain information such as the dimension, number of batches, and number of channels based on the output data obtained by the tensor. - - ```java - // Get output tensor values. - List tensorNames = session.getOutputTensorNames(); - Map outputs = session.getOutputMapByTensor(); - for (String tensorName : tensorNames) { - MSTensor output = outputs.get(tensorName); - if (output == null) { - Log.e(TAG, "Can not find output " + tensorName); - return null; - } - float[] results = output.getFloatData(); - float[] result = new float[output.elementsNum()]; - - int batch = output.getShape()[0]; - int channel = output.getShape()[1]; - int weight = output.getShape()[2]; - int height = output.getShape()[3]; - int plane = weight * height; - ``` - - - Convert the NCHW format to the NHWC format and put it in `float[] result`. - - ```java - for (int n = 0; n < batch; n++) { - for (int c = 0; c < channel; c++) { - for (int hw = 0; hw < plane; hw++) { - result[n * channel * plane + hw * channel + c] = results[n * channel * plane + c * plane + hw]; - } - } - } - ``` - -5. Perform inference and post-processing on the input tensor based on the model. - - - Convert the `float[] result` data into the ByteBuffer data format. - - ```java - ByteBuffer buffer = ByteBuffer.allocate(4 * result.length); - FloatBuffer floatBuffer = buffer.asFloatBuffer(); - floatBuffer.put(result); - return buffer; - ``` - - - Convert the ByteBuffer data format into Bitmap. - - Based on the inferred data, compare the coordinates of each pixel in the bitmap. If the coordinate data corresponds to PERSON, the color of the coordinate point remains unchanged. Otherwise, change the color to transparent, as shown in the following figure. - - ```java - Bitmap.Config conf = Bitmap.Config.ARGB_8888; - Bitmap maskBitmap = Bitmap.createBitmap(imageWidth, imageHeight, conf); - Bitmap scaledBackgroundImage = - BitmapUtils.scaleBitmapAndKeepRatio(backgroundImage, imageWidth, imageHeight); - int[][] mSegmentBits = new int[imageWidth][imageHeight]; - inputBuffer.rewind(); - for (int y = 0; y < imageHeight; y++) { - for (int x = 0; x < imageWidth; x++) { - float maxVal = 0f; - mSegmentBits[x][y] = 0; - for (int i = 0; i < NUM_CLASSES; i++) { - float value = inputBuffer.getFloat((y * imageWidth * NUM_CLASSES + x * NUM_CLASSES + i) * 4); - if (i == 0 || value > maxVal) { - maxVal = value; - if (i == PERSON) { - mSegmentBits[x][y] = i; - } else { - mSegmentBits[x][y] = 0; - } - } - } - maskBitmap.setPixel(x, y, mSegmentBits[x][y] == 0 ? colors[0] : scaledBackgroundImage.getPixel(x, y)); - } - } - ``` - - - - - - -

    Figure 1 Before inference

    Figure 2 After inference
    - -6. Combine the image after inference with the selected background image. - - ```java - MainActivity.this.imgPreview.setDrawingCacheEnabled(true); - MainActivity.this.imgPreview.setBackground(isDemo ? getDrawable(IMAGES[selectedPosition]) : customBack); - MainActivity.this.imgPreview.setImageBitmap(foreground); - MainActivity.this.imgPreview.setDrawingCacheEnabled(false); - ``` diff --git a/tutorials/lite/source_en/quick_start/quick_start.md b/tutorials/lite/source_en/quick_start/quick_start.md deleted file mode 100644 index 3b69b70e631b9becee52ab6fce4ede4cb762d971..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/quick_start/quick_start.md +++ /dev/null @@ -1,431 +0,0 @@ -# Android Application Development Based on JNI Interface - -`Android` `C++` `Whole Process` `Model Converting` `Model Loading` `Inference` `Data Preparation` `Beginner` `Intermediate` `Expert` - - - -- [Implementing an Image Classification Application](#implementing-an-image-classification-application) - - [Overview](#overview) - - [Selecting a Model](#selecting-a-model) - - [Converting a Model](#converting-a-model) - - [Deploying an Application](#deploying-an-application) - - [Running Dependencies](#running-dependencies) - - [Building and Running](#building-and-running) - - [Detailed Description of the Sample Program](#detailed-description-of-the-sample-program) - - [Sample Program Structure](#sample-program-structure) - - [Configuring MindSpore Lite Dependencies](#configuring-mindspore-lite-dependencies) - - [Downloading and Deploying a Model File](#downloading-and-deploying-a-model-file) - - [Writing On-Device Inference Code](#writing-on-device-inference-code) - - - - - -## Overview - -It is recommended that you start from the image classification demo on the Android device to understand how to build the MindSpore Lite application project, configure dependencies, and use related APIs. - -This tutorial demonstrates the on-device deployment process based on the image classification sample program on the Android device provided by the MindSpore team. - -1. Select an image classification model. -2. Convert the model into a MindSpore Lite model. -3. Use the MindSpore Lite inference model on the device. The following describes how to use the MindSpore Lite C++ APIs (Android JNIs) and MindSpore Lite image classification models to perform on-device inference, classify the content captured by a device camera, and display the most possible classification result on the application's image preview screen. - -> Click to find [Android image classification models](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite) and [image classification sample code](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/image_classification). -> -> In this example, we explain how to use C++ API. Besides, MindSpore Lite also supports Java API. Please refer to [image segmentation demo](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/image_segmentation) to learn more about Java API. - -We provide the APK file corresponding to this example. You can scan the QR code below or download the [APK file](https://download.mindspore.cn/model_zoo/official/lite/apk/label/Classification.apk) directly, and deploy it to Android devices for use. - -![apk](../images/classification_apk.png) - -## Selecting a Model - -The MindSpore team provides a series of preset device models that you can use in your application. -Click to download [image classification models](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms) in MindSpore ModelZoo. -In addition, you can use the preset model to perform transfer learning to implement your image classification tasks. - -## Converting a Model - -After you retrain a model provided by MindSpore, export the model in the [.mindir format](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html#export-mindir-model). Use the MindSpore Lite [model conversion tool](https://www.mindspore.cn/tutorial/lite/en/master/use/converter_tool.html) to convert the .mindir format to a .ms model. - -Take the mobilenetv2 model as an example. Execute the following script to convert a model into a MindSpore Lite model for on-device inference. - -```bash -call converter_lite --fmk=MINDIR --modelFile=mobilenetv2.mindir --outputFile=mobilenetv2 -``` - -## Deploying an Application - -The following section describes how to build and execute an on-device image classification task on MindSpore Lite. - -### Running Dependencies - -- Android Studio 3.2 or later and Android 4.0 or later is recommended. -- Native development kit (NDK) 21.3 -- [CMake](https://cmake.org/download) >= 3.18.3 -- Android software development kit (SDK) 26 or later -- JDK 1.8 or later - -### Building and Running - -1. Load the sample source code to Android Studio and install the corresponding SDK. (After the SDK version is specified, Android Studio automatically installs the SDK.) - - ![start_home](../images/lite_quick_start_home.png) - - Start Android Studio, click `File > Settings > System Settings > Android SDK`, and select the corresponding SDK. As shown in the following figure, select an SDK and click `OK`. Android Studio automatically installs the SDK. - - ![start_sdk](../images/lite_quick_start_sdk.png) - - (Optional) If an NDK version issue occurs during the installation, manually download the corresponding [NDK version](https://developer.android.com/ndk/downloads) (the version used in the sample code is 21.3). Specify the NDK location in `Android NDK location` of `Project Structure`. - - ![project_structure](../images/lite_quick_start_project_structure.png) - -2. Connect to an Android device and runs the image classification application. - - Connect to the Android device through a USB cable for debugging. Click `Run 'app'` to run the sample project on your device. - - ![run_app](../images/lite_quick_start_run_app.PNG) - - For details about how to connect the Android Studio to a device for debugging, see . - - The mobile phone needs to turn on "USB debugging mode" for Android Studio to recognize the phone. In general, Huawei mobile phones turn on "USB debugging mode" in Settings -> System and Update -> Developer Options -> USB Debugging. - -3. Continue the installation on the Android device. After the installation is complete, you can view the content captured by a camera and the inference result. - - ![result](../images/lite_quick_start_app_result.png) - -## Detailed Description of the Sample Program - -This image classification sample program on the Android device includes a Java layer and a JNI layer. At the Java layer, the Android Camera 2 API is used to enable a camera to obtain image frames and process images. At the JNI layer, the model inference process is completed in [Runtime](https://www.mindspore.cn/tutorial/lite/en/master/use/runtime.html). - -> This following describes the JNI layer implementation of the sample program. At the Java layer, the Android Camera 2 API is used to enable a device camera and process image frames. Readers are expected to have the basic Android development knowledge. - -### Sample Program Structure - -```text -app -│ -├── src/main -│ ├── assets # resource files -| | └── model # model files -| | └── mobilenetv2.ms # stored model file -│ | -│ ├── cpp # main logic encapsulation classes for model loading and prediction -| | |── ... -| | ├── mindspore-lite-{version}-android-{arch} #MindSpore Lite version -| | ├── MindSporeNetnative.cpp # JNI methods related to MindSpore calling -│ | └── MindSporeNetnative.h # header file -│ | -│ ├── java # application code at the Java layer -│ │ └── com.mindspore.classification -│ │ ├── gallery.classify # implementation related to image processing and MindSpore JNI calling -│ │ │ └── ... -│ │ └── widget # implementation related to camera enabling and drawing -│ │ └── ... -│ │ -│ ├── res # resource files related to Android -│ └── AndroidManifest.xml # Android configuration file -│ -├── CMakeList.txt # CMake compilation entry file -│ -├── build.gradle # Other Android configuration file -├── download.gradle # MindSpore version download -└── ... -``` - -### Configuring MindSpore Lite Dependencies - -When MindSpore C++ APIs are called at the Android JNI layer, related library files are required. You can use MindSpore Lite [source code compilation](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html) to generate the MindSpore Lite version. In this case, you need to use the compile command of generate with image preprocessing module. - -In this example, the build process automatically downloads the `mindspore-lite-{version}-android-{arch}.tar.gz` by the `app/download.gradle` file and saves in the `app/src/main/cpp` directory. - -> version: Version number of the .tar package, which is the same as the version of the compiled branch code. -> -> arch: Operating system arm64 or arm32. - -Note: if the automatic download fails, please manually download the relevant library files [mindspore-lite-{version}-android-{arch}.tar.gz](https://www.mindspore.cn/tutorial/lite/en/master/use/downloads.html). After decompression, copy the `mindspore-lite-{version}-android-{arch}` folder to the directory of `src/main/cpp`. - -```text -android{ - defaultConfig{ - externalNativeBuild{ - cmake{ - arguments "-DANDROID_STL=c++_shared" - } - } - - ndk{ - abiFilters'armeabi-v7a', 'arm64-v8a' - } - } -} -``` - -Create a link to the `.so` library file in the `app/CMakeLists.txt` file: - -```text -# ============== Set MindSpore Dependencies. ============= -include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp) -include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/inference) -include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/inference/include) -include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/inference/minddata/include) - -add_library(mindspore-lite SHARED IMPORTED) -add_library(minddata-lite SHARED IMPORTED) - -set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION - ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/inference/lib/libmindspore-lite.so) -set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION - ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/inference/minddata/lib/libminddata-lite.so) -# --------------- MindSpore Lite set End. -------------------- - -# Link target library. -target_link_libraries( # Specifies the target library. - mlkit-label-MS - - # --- mindspore --- - minddata-lite - mindspore-lite - - # --- other dependencies.--- - -ljnigraphics - android - - # Links the target library to the log library - ${log-lib} - ) -``` - -### Downloading and Deploying a Model File - -In this example, the build process automatically downloads the `mobilenetv2.ms` by referring to the `app/download.gradle` file and saves in the `app/src/main/assets/model` directory. - -Note: if the automatic download fails, please manually download the relevant library files [mobilenetv2.ms]( https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms) and put them in the corresponding location. - -### Writing On-Device Inference Code - -Call MindSpore Lite C++ APIs at the JNI layer to implement on-device inference. - -The inference process code is as follows. For details about the complete code, see [MindSporeNetnative.cpp](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/lite/image_classification/app/src/main/cpp/MindSporeNetnative.cpp). - -1. Load the MindSpore Lite model file and build the context, session, and computational graph for inference. - - - Load model file: - - Read the model file in the Java layer of Android and convert it into a ByteBuffer type file `model _ Buffer`, which is transferred to C++ layer by calling JNI. Finally, the `model_ Buffer` is converted to char type file `modelbuffer`. - - ```cpp - // Buffer is the model data passed in by the Java layer - jlong bufferLen = env->GetDirectBufferCapacity(model_buffer); - if (0 == bufferLen) { - MS_PRINT("error, bufferLen is 0!"); - return (jlong) nullptr; - } - - char *modelBuffer = CreateLocalModelBuffer(env, model_buffer); - if (modelBuffer == nullptr) { - MS_PRINT("modelBuffer create failed!"); - return (jlong) nullptr; - } - ``` - - - Build context, session, and computational graph for inference: - - Build context and set session parameters. Create a session from context and model data. - - ```cpp - // To create a Mindspore network inference environment. - void **labelEnv = new void *; - MSNetWork *labelNet = new MSNetWork; - *labelEnv = labelNet; - - mindspore::lite::Context *context = new mindspore::lite::Context; - context->thread_num_ = num_thread; - context->device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND; - context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = false; - context->device_list_[0].device_type_ = mindspore::lite::DT_CPU; - - labelNet->CreateSessionMS(modelBuffer, bufferLen, context); - delete context; - ``` - - - Based on the model file `modelBuffer`, the computational graph for inference is constructed. - - ```cpp - void MSNetWork::CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx) { - session_ = mindspore::session::LiteSession::CreateSession(ctx); - if (session_ == nullptr) { - MS_PRINT("Create Session failed."); - return; - } - - // Compile model. - model_ = mindspore::lite::Model::Import(modelBuffer, bufferLen); - if (model_ == nullptr) { - ReleaseNets(); - MS_PRINT("Import model failed."); - return; - } - - int ret = session_->CompileGraph(model_); - if (ret != mindspore::lite::RET_OK) { - ReleaseNets(); - MS_PRINT("CompileGraph failed."); - return; - } - } - ``` - -2. Convert the input image into the Tensor format of the MindSpore model. - - Cut the size of the image `srcbitmap` to be detected and convert it to LiteMat format `lite_norm_mat_cut`. The width, height and channel number information are converted into float format data `dataHWC`. Finally, copy the `dataHWC` to the input `inTensor` of MindSpore model. - - ```cpp - if (!BitmapToLiteMat(env, srcBitmap, &lite_mat_bgr)) { - MS_PRINT("BitmapToLiteMat error"); - return NULL; - } - if (!PreProcessImageData(lite_mat_bgr, &lite_norm_mat_cut)) { - MS_PRINT("PreProcessImageData error"); - return NULL; - } - - ImgDims inputDims; - inputDims.channel = lite_norm_mat_cut.channel_; - inputDims.width = lite_norm_mat_cut.width_; - inputDims.height = lite_norm_mat_cut.height_; - - // Get the MindSpore inference environment which created in loadModel(). - void **labelEnv = reinterpret_cast(netEnv); - if (labelEnv == nullptr) { - MS_PRINT("MindSpore error, labelEnv is a nullptr."); - return NULL; - } - MSNetWork *labelNet = static_cast(*labelEnv); - - auto mSession = labelNet->session(); - if (mSession == nullptr) { - MS_PRINT("MindSpore error, Session is a nullptr."); - return NULL; - } - MS_PRINT("MindSpore get session."); - - auto msInputs = mSession->GetInputs(); - if (msInputs.size() == 0) { - MS_PRINT("MindSpore error, msInputs.size() equals 0."); - return NULL; - } - auto inTensor = msInputs.front(); - - float *dataHWC = reinterpret_cast(lite_norm_mat_cut.data_ptr_); - // Copy dataHWC to the model input tensor. - memcpy(inTensor->MutableData(), dataHWC, - inputDims.channel * inputDims.width * inputDims.height * sizeof(float)); - ``` - - Adjust the size of the input image, as well as the detailed algorithm of data processing. - - ```cpp - bool PreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr) { - bool ret = false; - LiteMat lite_mat_resize; - LiteMat &lite_norm_mat_cut = *lite_norm_mat_ptr; - ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 256, 256); - if (!ret) { - MS_PRINT("ResizeBilinear error"); - return false; - } - LiteMat lite_mat_convert_float; - ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0); - if (!ret) { - MS_PRINT("ConvertTo error"); - return false; - } - LiteMat lite_mat_cut; - ret = Crop(lite_mat_convert_float, lite_mat_cut, 16, 16, 224, 224); - if (!ret) { - MS_PRINT("Crop error"); - return false; - } - std::vector means = {0.485, 0.456, 0.406}; - std::vector stds = {0.229, 0.224, 0.225}; - SubStractMeanNormalize(lite_mat_cut, lite_norm_mat_cut, means, stds); - return true; - } - ``` - -3. The input tensor is inferred according to the model, and the output tensor is obtained and post processed. - - - The graph and model are loaded and on device inference is performed. - - ```cpp - // After the model and image tensor data is loaded, run inference. - auto status = mSession->RunGraph(); - ``` - - - Get the tensor output `msOutputs` of MindSpore model. The text information `resultCharData` displayed in the APP is calculated through `msOutputs` and classification array information. - - ```cpp - auto names = mSession->GetOutputTensorNames(); - std::unordered_map msOutputs; - for (const auto &name : names) { - auto temp_dat =mSession->GetOutputByTensorName(name); - msOutputs.insert(std::pair {name, temp_dat}); - } - std::string resultStr = ProcessRunnetResult(::RET_CATEGORY_SUM,::labels_name_map, msOutputs); - - const char *resultCharData = resultStr.c_str(); - return (env)->NewStringUTF(resultCharData); - ``` - - - Perform post-processing of the output data. Obtain the output object `outputTensor` through `msOutputs`, and parse it with the thing category array `labels_name_map` to obtain the training score array `scores[]` of each element. Set the credibility threshold value to `unifiedThre`, and count the credibility threshold value according to the training data. Above the threshold, it belongs to this type. On the contrary, it is not. Finally, a corresponding category name and corresponding score data `categoryScore` are returned. - - ```cpp - std::string ProcessRunnetResult(const int RET_CATEGORY_SUM, const char *const labels_name_map[], std::unordered_map msOutputs) { - // Get the branch of the model output. - // Use iterators to get map elements. - std::unordered_map::iterator iter; - iter = msOutputs.begin(); - - // The mobilenetv2.ms model output just one branch. - auto outputTensor = iter->second; - - int tensorNum = outputTensor->ElementsNum(); - MS_PRINT("Number of tensor elements:%d", tensorNum); - - // Get a pointer to the first score. - float *temp_scores = static_cast(outputTensor->MutableData()); - float scores[RET_CATEGORY_SUM]; - for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - scores[i] = temp_scores[i]; - } - - const float unifiedThre = 0.5; - const float probMax = 1.0; - for (size_t i = 0; i < RET_CATEGORY_SUM; ++i) { - float threshold = g_thres_map[i]; - float tmpProb = scores[i]; - if (tmpProb < threshold) { - tmpProb = tmpProb / threshold * unifiedThre; - } else { - tmpProb = (tmpProb - threshold) / (probMax - threshold) * unifiedThre + unifiedThre; - } - scores[i] = tmpProb; - } - - // Score for each category. - // Converted to text information that needs to be displayed in the APP. - std::string categoryScore = ""; - for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - categoryScore += labels_name_map[i]; - categoryScore += ":"; - std::string score_str = std::to_string(scores[i]); - categoryScore += score_str; - categoryScore += ";"; - } - return categoryScore; - } - ``` - - ​ diff --git a/tutorials/lite/source_en/quick_start/quick_start_cpp.md b/tutorials/lite/source_en/quick_start/quick_start_cpp.md deleted file mode 100644 index 5def293c59a83ba2cbaff0aadca76ec4aab7535f..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/quick_start/quick_start_cpp.md +++ /dev/null @@ -1,269 +0,0 @@ -# Experience C++ Simple Inference Demo - -`Linux` `Windows` `x86` `C++` `Whole Process``Inference Application` `Data Preparation` `Beginner` - - - -- [Experience C++ Simple Inference Demo](#experience-c-simple-inference-demo) - - [Overview](#overview) - - [Building and Running](#building-and-running) - - [Linux x86](#linux-x86) - - [Windows](#windows) - - [CMake Integration](#cmake-integration) - - [Model Loading](#model-loading) - - [Model Build](#model-build) - - [Model Inference](#model-inference) - - [Memory Release](#memory-release) - - - - - -## Overview - -This tutorial provides a MindSpore Lite inference demo. It demonstrates the basic on-device inference process using C++ by inputting random data, executing inference, and printing the inference result. You can quickly understand how to use inference-related APIs on MindSpore Lite. In this tutorial, the randomly generated data is used as the input data to perform the inference on the MobileNetV2 model and print the output data. The code is stored in the [mindspore/lite/examples/quick_start_cpp](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/examples/quick_start_cpp) directory. - -The MindSpore Lite inference steps are as follows: - -1. Load the model: Read the `.ms` model converted by the [model conversion tool](https://www.mindspore.cn/tutorial/lite/en/master/use/converter_tool.html) from the file system, import the model by using [mindspore::lite::Model::Import](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#import), parse the model, and create the `Model *`. -2. Create and configure context: Create and configure [context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#context) to save some basic configuration parameters required by a session to guide graph build and execution. -3. Create a session: Create [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) and configure the [context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#context) obtained in the previous step to the session. -4. Build a graph: Before performing inference, call the `CompileGraph` API of [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) to build a graph. In the graph build phase, subgraph partition and operator selection and scheduling are performed, which takes a long time. Therefore, it is recommended that with one [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) created, one graph be built. In this case, the inference will be performed for multiple times. -5. Input data: Before the graph is executed, data needs to be filled in the `Input Tensor`. -6. Perform inference: Use `RunGraph` of the [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) to perform model inference. -7. Obtain the output: After the graph execution is complete, you can obtain the inference result by `outputting the tensor`. -8. Release the memory: If the MindSpore Lite inference framework is not required, release the created [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) and [Model](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#model). - -![img](../images/lite_runtime.png) - -> To view the advanced usage of MindSpore Lite, see [Using Runtime to Perform Inference (C++)](https://www.mindspore.cn/tutorial/lite/en/master/use/runtime_cpp.html)]. - -## Building and Running - -### Linux x86 - -- Environment requirements - - - System environment: Linux x86_64 (Ubuntu 18.04.02LTS is recommended.) - - Build dependency: - - [CMake](https://cmake.org/download/) >= 3.18.3 - - [GCC](https://gcc.gnu.org/releases.html) >= 7.3.0 - -- Build - - Run the [build script](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/quick_start_cpp/build.sh) in the `mindspore/lite/examples/quick_start_cpp` directory to automatically download the MindSpore Lite inference framework library and model files and build the demo. - - ```bash - bash build.sh - ``` - - > If the MindSpore Lite inference framework fails to be downloaded by using this build script, manually download the MindSpore Lite model inference framework [mindspore-lite-{version}-linux-x64.tar.gz](https://www.mindspore.cn/tutorial/lite/en/master/use/downloads.html) whose hardware platform is CPU and operating system is Ubuntu-x64, and copy the `libmindspore-lite.a` file in the decompressed lib directory to the `mindspore/lite/examples/quick_start_cpp/lib` directory. Also copy the files from `inference/include` to the `mindspore/lite/examples/quick_start_cpp/include` directory. - > - > If the MobileNetV2 model fails to be downloaded, manually download the model file [mobilenetv2.ms](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_imagenet/mobilenetv2.ms) and copy it to the `mindspore/lite/examples/quick_start_cpp/model` directory. - > - > After manually downloading and placing the file in the specified location, you need to execute the build.sh script again to complete the compilation. - -- Inference - - After the build, go to the `mindspore/lite/examples/quick_start_cpp/build` directory and run the following command to experience MindSpore Lite inference on the MobileNetV2 model: - - ```bash - ./mindspore_quick_start_cpp ../model/mobilenetv2.ms - ``` - - After the execution, the following information is displayed, including the tensor name, tensor size, number of output tensors, and the first 50 pieces of data. - - ```shell - tensor name is:Default/head-MobileNetV2Head/Softmax-op204 tensor size is:4000 tensor elements num is:1000 - output data is:5.26823e-05 0.00049752 0.000296722 0.000377607 0.000177048 8.02107e-05 0.000212864 0.000422286 0.000273189 0.000234105 0.00099807 0.0042331 0.00204993 0.00124968 0.00294458 0.00139795 0.00111545 0.000656357 0.000809457 0.00153731 0.000621049 0.00224637 0.00127045 0.00187557 0.000420144 0.000150638 0.000266477 0.000438628 0.000187773 0.00054668 0.000212853 0.000921661 0.000127179 0.000565873 0.00100394 0.000300159 0.000282677 0.000358067 0.00215288 0.000477845 0.00107596 0.00065134 0.000722132 0.000807501 0.000631415 0.00043247 0.00125898 0.000255094 8.2606e-05 9.91917e-05 0.000794512 - ``` - -### Windows - -- Environment requirements - - - System environment: 64-bit Windows 7 or 64-bit Windows 10 - - Build dependency: - - [CMake](https://cmake.org/download/) >= 3.18.3 - - [MinGW GCC](https://sourceforge.net/projects/mingw-w64/files/ToolchainstargettingWin64/PersonalBuilds/mingw-builds/7.3.0/threads-posix/seh/x86_64-7.3.0-release-posix-seh-rt_v5-rev0.7z/download) = 7.3.0 - -- Build - - - Download the library: Manually download the MindSpore Lite model inference framework [mindspore-lite-{version}-win-x64.zip](https://www.mindspore.cn/tutorial/lite/en/master/use/downloads.html) whose hardware platform is CPU and operating system is Windows-x64. Copy the `libmindspore-lite.a` file in the decompressed `inference/lib` directory to the `mindspore/lite/examples/quick_start_cpp/lib` project directory, and change the include directory to the `mindspore/lite/examples/quick_start_cpp/include` project directory. (Note: The `lib` and `include` directories under the project need to be created manually) - - - Download the model: Manually download the model file [mobilenetv2.ms](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_imagenet/mobilenetv2.ms) and copy it to the `mindspore/lite/examples/quick_start_cpp/model` directory. - - - Build the demo: Run the [build script](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/quick_start_cpp/build.bat) in the `mindspore/lite/examples/quick_start_cpp` directory to automatically download related files and build the Demo. - - ```bash - call build.bat - ``` - -- Inference - - After the build, go to the `mindspore/lite/examples/quick_start_cpp/build` directory and run the following command to experience MindSpore Lite inference on the MobileNetV2 model: - - ```bash - call mindspore_quick_start_cpp.exe ../model/mobilenetv2.ms - ``` - - After the execution, the following information is displayed, including the tensor name, tensor size, number of output tensors, and the first 50 pieces of data. - - ```shell - tensor name is:Default/head-MobileNetV2Head/Softmax-op204 tensor size is:4000 tensor elements num is:1000 - output data is:5.26823e-05 0.00049752 0.000296722 0.000377607 0.000177048 8.02107e-05 0.000212864 0.000422286 0.000273189 0.000234105 0.00099807 0.0042331 0.00204993 0.00124968 0.00294458 0.00139795 0.00111545 0.000656357 0.000809457 0.00153731 0.000621049 0.00224637 0.00127045 0.00187557 0.000420144 0.000150638 0.000266477 0.000438628 0.000187773 0.00054668 0.000212853 0.000921661 0.000127179 0.000565873 0.00100394 0.000300159 0.000282677 0.000358067 0.00215288 0.000477845 0.00107596 0.00065134 0.000722132 0.000807501 0.000631415 0.00043247 0.00125898 0.000255094 8.2606e-05 9.91917e-05 0.000794512 - ``` - -## Configure CMake - -The following is the sample code when integrating `libmindspore-lite.a` static library through CMake. - -> When CMake integrates the `libmindspore-lite.a` static library, the `-Wl,--whole-archive` option needs to be passed to the linker. -> -> In addition, the build option for stack protection `-fstack-protector-strong` is added during the build of MindSpore Lite. Therefore, the `ssp` library in MinGW needs to be linked on the Windows platform. -> -> In addition, the support of processing .so file is added during the build of MindSpore Lite. Therefore, the `dl` library needs to be linked on the Linux platform. - -```cmake -cmake_minimum_required(VERSION 3.18.3) -project(QuickStartCpp) - -if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7.3.0) - message(FATAL_ERROR "GCC version ${CMAKE_CXX_COMPILER_VERSION} must not be less than 7.3.0") -endif() - -# Add the directory to include search path -include_directories(${CMAKE_CURRENT_SOURCE_DIR}) - -# Add the directory to link search path -link_directories(${CMAKE_CURRENT_SOURCE_DIR}/lib) - -file(GLOB_RECURSE QUICK_START_CXX ${CMAKE_CURRENT_SOURCE_DIR}/*.cc) -add_executable(mindspore_quick_start_cpp ${QUICK_START_CXX}) - -target_link_libraries( - mindspore_quick_start_cpp - -Wl,--whole-archive mindspore-lite -Wl,--no-whole-archive - pthread -) - -# Due to the increased compilation options for stack protection, -# it is necessary to target link ssp library when Use the static library in Windows. -if(WIN32) - target_link_libraries( - mindspore_quick_start_cpp - ssp - ) -else() - target_link_libraries( - mindspore_quick_start_cpp - dl - ) -endif() -``` - -## Model Loading - -Read the MindSpore Lite model from the file system and use the `mindspore::lite::Model::Import` function to import the model for parsing. - -```c++ -// Read model file. -size_t size = 0; -char *model_buf = ReadFile(model_path, &size); -if (model_buf == nullptr) { - std::cerr << "Read model file failed." << std::endl; - return -1; -} -// Load the .ms model. -auto model = mindspore::lite::Model::Import(model_buf, size); -delete[](model_buf); -if (model == nullptr) { - std::cerr << "Import model file failed." << std::endl; - return -1; -} -``` - -## Model Build - -Model build includes context configuration creation, session creation, and graph build. - -```c++ -mindspore::session::LiteSession *Compile(mindspore::lite::Model *model) { - // Create and init context. - auto context = std::make_shared(); - if (context == nullptr) { - std::cerr << "New context failed while." << std::endl; - return nullptr; - } - - // Create the session. - mindspore::session::LiteSession *session = mindspore::session::LiteSession::CreateSession(context.get()); - if (session == nullptr) { - std::cerr << "CreateSession failed while running." << std::endl; - return nullptr; - } - - // Build a graph. - auto ret = session->CompileGraph(model); - if (ret != mindspore::lite::RET_OK) { - delete session; - std::cerr << "Compile failed while running." << std::endl; - return nullptr; - } - - // Note: when use model->Free(), the model can not be compiled again. - if (model != nullptr) { - model->Free(); - } - return session; -} -``` - -## Model Inference - -Model inference includes data input, inference execution, and output obtaining. In this example, the input data is randomly generated, and the output result is printed after inference. - -```c++ -int Run(mindspore::session::LiteSession *session) { - auto inputs = session->GetInputs(); - - // Generate random data as input data. - auto ret = GenerateInputDataWithRandom(inputs); - if (ret != mindspore::lite::RET_OK) { - std::cerr << "Generate Random Input Data failed." << std::endl; - return ret; - } - - // Run Inference. - ret = session->RunGraph(); - if (ret != mindspore::lite::RET_OK) { - std::cerr << "Inference error " << ret << std::endl; - return ret; - } - - // Get Output Tensor Data. - auto out_tensors = session->GetOutputs(); - for (auto tensor : out_tensors) { - std::cout << "tensor name is:" << tensor.first << " tensor size is:" << tensor.second->Size() - << " tensor elements num is:" << tensor.second->ElementsNum() << std::endl; - auto out_data = reinterpret_cast(tensor.second->MutableData()); - std::cout << "output data is:"; - for (int i = 0; i < tensor.second->ElementsNum() && i <= 50; i++) { - std::cout << out_data[i] << " "; - } - std::cout << std::endl; - } - return mindspore::lite::RET_OK; -} -``` - -## Memory Release - -If the MindSpore Lite inference framework is not required, release the created `LiteSession` and `Model`. - -```c++ -// Delete model buffer. -delete model; -// Delete session buffer. -delete session; -``` diff --git a/tutorials/lite/source_en/quick_start/quick_start_java.md b/tutorials/lite/source_en/quick_start/quick_start_java.md deleted file mode 100644 index 983c97512c04e54f3f6b95406528432b1cefe7fb..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/quick_start/quick_start_java.md +++ /dev/null @@ -1,188 +0,0 @@ -# Experience Java Simple Inference Demo - -`Linux` `x86` `Java` `Whole Process` `Inference Application` `Data Preparation` `Beginner` - - - -- [Experience Java Simple Inference Demo](#experience-java-simple-inference-demo) - - [Overview](#overview) - - [Building and Running](#building-and-running) - - [Model Loading](#model-loading) - - [Model Build](#model-build) - - [Model Inference](#model-inference) - - [Memory Release](#memory-release) - - - - - -## Overview - -This tutorial provides an example program for MindSpore Lite to perform inference. It demonstrates the basic process of performing inference on the device side using [MindSpore Lite Java API](https://www.mindspore.cn/doc/api_java/en/master/index.html) by random inputting data, executing inference, and printing the inference result. You can quickly understand how to use the Java APIs related to inference on MindSpore Lite. In this tutorial, the randomly generated data is used as the input data to perform the inference on the MobileNetV2 model and print the output data. The code is stored in the [mindspore/lite/examples/quick_start_java](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/examples/quick_start_java) directory. - -The MindSpore Lite inference steps are as follows: - -1. Load the model: Read the `.ms` model converted by the model conversion tool (https://www.mindspore.cn/tutorial/lite/en/master/use/converter_tool.html) from the file system and import the model using the [loadModel](https://www.mindspore.cn/doc/api_java/en/master/model.html#loadmodel). -2. Create and configure context: Create a configuration context [MSConfig](https://www.mindspore.cn/doc/api_java/en/master/msconfig.html#msconfig) to save some basic configuration parameters required by a session to guide graph build and execution. including `deviceType` (device type), `threadNum` (number of threads), `cpuBindMode` (CPU binding mode), and `enable_float16` (whether to preferentially use the float16 operator). -3. Create a session: Create [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) and call the [init](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#init) method to configure the [MSConfig](https://www.mindspore.cn/doc/api_java/en/master/msconfig.html#msconfig) obtained in the previous step in the session. -4. Build a graph: Before building a graph, the [compileGraph](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#compilegraph) interface of [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) needs to be called to build the graph, including subgraph partition and operator selection and scheduling. This takes a long time. Therefore, it is recommended that with one [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) created, one graph be built. In this case, the inference will be performed for multiple times. -5. Input data: Before the graph is executed, data needs to be filled in the `Input Tensor`. -6. Perform inference: Use the [runGraph](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#rungraph) of the [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) to perform model inference. - Obtain the output: After the graph execution is complete, you can obtain the inference result by `outputting the tensor`. -8. Release the memory: If the MindSpore Lite inference framework is not required, release the created [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) and [Model](https://www.mindspore.cn/doc/api_java/en/master/model.html#model). - -![img](../images/lite_runtime.png) - -> To view the advanced usage of MindSpore Lite, see [Using Runtime to Perform Inference (Java)](https://www.mindspore.cn/tutorial/lite/en/master/use/runtime_java.html). - -## Building and Running - -- Environment requirements - - System environment: Linux x86_64 (Ubuntu 18.04.02LTS is recommended.) - - Build dependency: - - [Git](https://git-scm.com/downloads) >= 2.28.0 - - [Maven](https://maven.apache.org/download.cgi) >= 3.3 - - [OpenJDK](https://openjdk.java.net/install/) >= 1.8 - -- Build - - Run the [build script](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/quick_start_java/build.sh) in the `mindspore/lite/examples/quick_start_java` directory to automatically download the MindSpore Lite inference framework library and model files and build the Demo. - - ```bash - bash build.sh - ``` - - > If the MindSpore Lite inference framework fails to be downloaded, manually download the MindSpore Lite model inference framework [mindspore-lite-{version}-linux-x64.tar.gz](https://www.mindspore.cn/tutorial/lite/en/master/use/downloads.html) whose hardware platform is CPU and operating system is Ubuntu-x64. Decompress the package and obtain the `libmindspore-lite.so` file in the `lib\jar` directory. Copy `libmindspore-lite-jni.so` and `libmindspore-lite-java.jar` to the `mindspore/lite/examples/quick_start_java/lib` directory. - > - > If the MobileNetV2 model fails to be downloaded, manually download the model file [mobilenetv2.ms](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_imagenet/mobilenetv2.ms) and copy it to the `mindspore/lite/examples/quick_start_java/model/` directory. - > - > After manually downloading and placing the file in the specified location, you need to execute the build.sh script again to complete the compilation. - -- Inference - - After the build, go to the `mindspore/lite/examples/quick_start_java/target` directory and run the following command to experience MindSpore Lite inference on the MobileNetV2 model: - - ```bash - export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:../lib/ - java -Djava.library.path=../lib/ -classpath .:./quick_start_java.jar:../lib/mindspore-lite-java.jar com.mindspore.lite.demo.Main ../model/mobilenetv2.ms - ``` - - After the execution, the following information is displayed, including the tensor name, tensor size, number of output tensors, and the first 50 pieces of data. - - ```shell - out tensor shape: [1,1000,] and out data: 5.4091015E-5 4.030303E-4 3.032344E-4 4.0029243E-4 2.2730739E-4 8.366581E-5 2.629827E-4 3.512394E-4 2.879536E-4 1.9557697E-4xxxxxxxxxx MindSpore Lite 1.1.0out tensor shape: [1,1000,] and out data: 5.4091015E-5 4.030303E-4 3.032344E-4 4.0029243E-4 2.2730739E-4 8.366581E-5 2.629827E-4 3.512394E-4 2.879536E-4 1.9557697E-4tensor name is:Default/Sigmoid-op204 tensor size is:2000 tensor elements num is:500output data is:3.31223e-05 1.99382e-05 3.01624e-05 0.000108345 1.19685e-05 4.25282e-06 0.00049955 0.000340809 0.00199094 0.000997094 0.00013585 1.57605e-05 4.34131e-05 1.56114e-05 0.000550819 2.9839e-05 4.70447e-06 6.91601e-06 0.000134483 2.06795e-06 4.11612e-05 2.4667e-05 7.26248e-06 2.37974e-05 0.000134513 0.00142482 0.00011707 0.000161848 0.000395011 3.01961e-05 3.95325e-05 3.12398e-06 3.57709e-05 1.36277e-06 1.01068e-05 0.000350805 5.09019e-05 0.000805241 6.60321e-05 2.13734e-05 9.88654e-05 2.1991e-06 3.24065e-05 3.9479e-05 4.45178e-05 0.00205024 0.000780899 2.0633e-05 1.89997e-05 0.00197261 0.000259391 - ``` - -## Model Loading - -Read the MindSpore Lite model from the file system and use the `model.loadModel` function to import the model for parsing. - -```java -boolean ret = model.loadModel(modelPath); -if (!ret) { - System.err.println("Load model failed, model path is " + modelPath); - return; -} -``` - -## Model Build - -Model build includes context configuration creation, session creation, and graph build. - -```java -private static boolean compile() { - MSConfig msConfig = new MSConfig(); - // You can set config through Init Api or use the default parameters directly. - // The default parameter is that the backend type is DeviceType.DT_CPU, and the number of threads is 2. - boolean ret = msConfig.init(DeviceType.DT_CPU, 2); - if (!ret) { - System.err.println("Init context failed"); - return false; - } - - // Create the MindSpore lite session. - session = new LiteSession(); - ret = session.init(msConfig); - msConfig.free(); - if (!ret) { - System.err.println("Create session failed"); - model.free(); - return false; - } - - // Compile graph. - ret = session.compileGraph(model); - if (!ret) { - System.err.println("Compile graph failed"); - model.free(); - return false; - } - return true; -} -``` - -## Model Inference - -Model inference includes data input, inference execution, and output obtaining. In this example, the input data is randomly generated, and the output result is printed after inference. - -```java -private static boolean run() { - MSTensor inputTensor = session.getInputsByTensorName("2031_2030_1_construct_wrapper:x"); - if (inputTensor.getDataType() != DataType.kNumberTypeFloat32) { - System.err.println("Input tensor shape do not float, the data type is " + inputTensor.getDataType()); - return false; - } - // Generator Random Data. - int elementNums = inputTensor.elementsNum(); - float[] randomData = generateArray(elementNums); - byte[] inputData = floatArrayToByteArray(randomData); - - // Set Input Data. - inputTensor.setData(inputData); - - // Run Inference. - boolean ret = session.runGraph(); - if (!ret) { - System.err.println("MindSpore Lite run failed."); - return false; - } - - // Get Output Tensor Data. - MSTensor outTensor = session.getOutputByTensorName("Default/head-MobileNetV2Head/Softmax-op204"); - - // Print out Tensor Data. - StringBuilder msgSb = new StringBuilder(); - msgSb.append("out tensor shape: ["); - int[] shape = outTensor.getShape(); - for (int dim : shape) { - msgSb.append(dim).append(","); - } - msgSb.append("]"); - if (outTensor.getDataType() != DataType.kNumberTypeFloat32) { - System.err.println("output tensor shape do not float, the data type is " + outTensor.getDataType()); - return false; - } - float[] result = outTensor.getFloatData(); - if (result == null) { - System.err.println("decodeBytes return null"); - return false; - } - msgSb.append(" and out data:"); - for (int i = 0; i < 10 && i < outTensor.elementsNum(); i++) { - msgSb.append(" ").append(result[i]); - } - System.out.println(msgSb.toString()); - return true; -} -``` - -## Memory Release - -If the MindSpore Lite inference framework is not required, release the created `LiteSession` and `Model`. - -```java -// Delete session buffer. -session.free(); -// Delete model buffer. -model.free(); -``` diff --git a/tutorials/lite/source_en/quick_start/train_lenet.md b/tutorials/lite/source_en/quick_start/train_lenet.md deleted file mode 100644 index 5268b893eb4319537a8beb9135cb8ff213e73a26..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/quick_start/train_lenet.md +++ /dev/null @@ -1,403 +0,0 @@ -# Implement Device Training Based On C++ Interface - -`Linux` `C++` `Android` `Whole Process` `Model Export` `Model Converting` `Model Training` `Beginner` `Intermediate` `Expert` - - - -- [Overview](#overview) -- [Environment Preparing](#environment-preparing) - - [Dataset](#dataset) - - [Install MindSpore](#install-mindspore) - - [Download and Install MindSpore Lite](#download-and-install-mindspore-lite) - - [Connect Android Device](#connect-android-device) -- [Train and Eval](#train-and-eval) -- [Details](#details) - - [Folder Structure](#folder-structure) - - [Model Exporting](#model-exporting) - - [Model Transferring](#model-transferring) - - [Model Training](#model-training) - - [Loading Model](#loading-model) - - [Dataset Processing](#dataset-processing) - - [Execute Training](#execute-training) - - [Execute Evaluating](#execute-evaluating) - - - - - -## Overview - -Here we will demonstrate the code that trains a LeNet model using MindSpore Training-on-Device infrastructure. The code segments that are given below are provided fully in [train_lenet](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/examples/train_lenet/). - -The completed training procedure is as follows: - -1. Constructing your training model based on MindSpore Lite Architecture and Export it into `MindIR` model file. -2. Converting `MindIR` model file to the `MS` ToD model file by using MindSpore Lite `Converter` tool. -3. Loading `MS` model file and executing model training by calling MindSpore Lite training API. - -Details will be told after environment deployed and model training by running prepared shell scripts. - -## Environment Preparing - -Ubuntu 18.04 64-bit operating system on x86 platform is recommended. - -### DataSet - -The `MNIST` dataset used in this example consists of 10 classes of 28 x 28 pixels grayscale images. It has a training set of 60,000 examples, and a test set of 10,000 examples. - -> Download the MNIST dataset at . This page provides four download links of dataset files. The first two links are training dataset and training label, while the last two links are test dataset and test label. - -Download and decompress the files to `/PATH/MNIST_Data/train` and `/PATH/MNIST_Data/test` separately. - -The directory structure is as follows: - -```text -└─MNIST_Data - ├─test - │ t10k-images.idx3-ubyte - │ t10k-labels.idx1-ubyte - │ - └─train - train-images.idx3-ubyte - train-labels.idx1-ubyte -``` - -### Install MindSpore - -MindSpore can be installed by source code or using `pip`. Refer [MindSpore installation guide](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_pip_en.md#) for more details. - -### Download and Install MindSpore Lite - -Use `git` to clone the source code, the command in `Linux` is as follows: - -```shell -git clone https://gitee.com/mindspore/mindspore.git -cd ./mindspore -``` - -The `mindspore/lite/examples/train_lenet` directory relative to the MindSpore Lite source code contains this demo's source code. - -Go to the [MindSpore Lite Download Page](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html) to download the mindspore-lite-{version}-linux-x64.tar.gz and mindspore-lite-{version}-android-aarch64.tar.gz. The mindspore-lite-{version}-linux-x64.tar.gz is the MindSpore Lite install package for x86 platform, it contains the converter tool `converter_lite`, this demo uses it to converte `MIDIR` model to `.ms` which is supported by MindSpore Lite; The mindspore-lite-{version}-android-aarch64.tar.gz is the MindSpore Lite install package for Android, it contains training runtime library `libmindspore-lite.so`, this demo uses it to train model. After download these two files, you need rename the mindspore-lite-{version}-linux-x64.tar.gz to mindspore-lite-{version}-train-linux-x64.tar.gz and rename the mindspore-lite-{version}-android-aarch64.tar.gz to mindspore-lite-{version}-train-android-aarch64.tar.gz. Then put the renamed files to the `output` directory relative to MindSpore Lite source code(if there is no `output` directory,you should create it). - -Suppose these packags are downloaded in `/Downloads` directory, `Linux` commands for operations above is as follows: - -```bash -mkdir output -cp /Downloads/mindspore-lite-{version}-linux-x64.tar.gz output/mindspore-lite-{version}-train-linux-x64.tar.gz -cp /Downloads/mindspore-lite-{version}0-android-aarch64.tar.gz output/mindspore-lite-{version}-train-android-aarch64.tar.gz -``` - -You can also [compile from source](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html) to generate the training package for x86 platform mindspore-lite-{version}-train-linux-x64.tar.gz and for Andorid platform mindspore-lite-{version}-train-android-aarch64.tar.gz. These packages will directly generated in `output` directory and you should make sure that in the `output` directory both the two packages exist. - -### Connect Android Device - -Turning on the 'USB debugging' mode of your Android device and connect it with your PC by using `adb` debugging tool (run`sudo apt install adb` in Ubuntu OS command line). - -## Train and Eval - -Enter the target directory and run the training bash script. The `Linux` command is as follows: - -```bash -cd /mindspore/lite/examples/train_lenet -bash prepare_and_run.sh -D /PATH/MNIST_Data -t arm64 -``` - -`/PATH/MNIST_Data` is the absolute mnist dataset path in your machine, `-t arm64` represents that we will train and run the model on an Android device. - -The script `prepare_and_run.sh` has done the following works: - -1. Export the `lenet_tod.mindir` model file. -2. Calling the converter tool in the last section and convert the `MINDIR` file to the `ms` file. -3. Push the `lenet.ms` model file, MNIST dataset and the related library files to your `Android` device. -4. Train, save and infer the model. - -The model will be trained on your device and print training loss and accuracy value every epoch. The trained model will be saved as 'lenet_tod.ms' file. The 10 epochs training result of lenet is shown below (the classification accuracy varies in devices): - -```bash -======Training Locally========= -1.100: Loss is 1.19449 -1.200: Loss is 0.477986 -1.300: Loss is 0.440362 -1.400: Loss is 0.165605 -1.500: Loss is 0.368853 -1.600: Loss is 0.179764 -1.700: Loss is 0.173386 -1.800: Loss is 0.0767713 -1.900: Loss is 0.493 -1.1000: Loss is 0.460352 -1.1100: Loss is 0.262044 -1.1200: Loss is 0.222022 -1.1300: Loss is 0.058006 -1.1400: Loss is 0.0794117 -1.1500: Loss is 0.0241433 -1.1600: Loss is 0.127109 -1.1700: Loss is 0.0557566 -1.1800: Loss is 0.0698758 -Epoch (1): Loss is 0.384778 -Epoch (1): Training Accuracy is 0.8702 -2.100: Loss is 0.0538642 -2.200: Loss is 0.444504 -2.300: Loss is 0.0806976 -2.400: Loss is 0.0495807 -2.500: Loss is 0.178903 -2.600: Loss is 0.265705 -2.700: Loss is 0.0933796 -2.800: Loss is 0.0880472 -2.900: Loss is 0.0480734 -2.1000: Loss is 0.241272 -2.1100: Loss is 0.0920451 -2.1200: Loss is 0.371406 -2.1300: Loss is 0.0365746 -2.1400: Loss is 0.0784372 -2.1500: Loss is 0.207537 -2.1600: Loss is 0.442626 -2.1700: Loss is 0.0814725 -2.1800: Loss is 0.12081 -Epoch (2): Loss is 0.176118 -Epoch (2): Training Accuracy is 0.94415 -...... -10.1000: Loss is 0.0984653 -10.1100: Loss is 0.189702 -10.1200: Loss is 0.0896037 -10.1300: Loss is 0.0138191 -10.1400: Loss is 0.0152357 -10.1500: Loss is 0.12785 -10.1600: Loss is 0.026495 -10.1700: Loss is 0.436495 -10.1800: Loss is 0.157564 -Epoch (10): Loss is 0.102652 -Epoch (10): Training Accuracy is 0.96805 -Eval Accuracy is 0.965244 -===Evaluating trained Model===== -Eval Accuracy is 0.965244 -``` - -> If the Android device is not available on your hand, you could also exectute `bash prepare_and_run.sh -D /PATH/MNIST_Data -t x86` and run it on the x86 platform. - -## Details - -### Folder Structure - -The demo project folder structure: - -```bash -train_lenet/ - ├── model - │ ├── lenet_export.py - │ ├── prepare_model.sh - │ └── train_utils.py - │ - ├── scripts - │ ├── eval.sh - │ └── train.sh - │ - ├── src - │ ├── net_runner.cc - │ ├── net_runner.h - │ └── utils.h - │ - ├── README.md - ├── README_CN.md - └── prepare_and_run.sh -``` - -### Model Exporting - -Whether it is an off-the-shelf prepared model, or a custom written model, the model needs to be exported to a `.mindir` file. Here we use the already-implemented [LeNet model](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/lenet). - -Import and instantiate a LeNet5 model and set the model to train mode: - -```python -import numpy as np -from mindspore import context, Tensor -import mindspore.common.dtype as mstype -from mindspore.train.serialization import export -from lenet import LeNet5 -from train_utils import TrainWrap - -n = LeNet5() -n.set_train() -context.set_context(mode=context.PYNATIVE_MODE, device_target="CPU", save_graphs=False) -``` - -Set MindSpore context and initialize the data and label tensors. In this case we use a MindSpore that was compiled for CPU. We define a batch size of 32 and initialize the tensors according to MNIST data -- single channel 32x32 images. - -The tensors does not need to be loaded with relevant data, but the shape and type must be correct. Note also, that this export code runs on the server, and in this case uses the CPU device. However, the Training on Device will run according to the [context](https://www.mindspore.cn/tutorial/lite/en/master/use/runtime_train_cpp.html#creating-contexts) - -```python -BATCH_SIZE = 32 -x = Tensor(np.ones((BATCH_SIZE, 1, 32, 32)), mstype.float32) -label = Tensor(np.zeros([BATCH_SIZE]).astype(np.int32)) -net = TrainWrap(n) -``` - -Wrapping the network with a loss layer and an optimizer and `export` it to a `MindIR` file. `TrainWrap` is provided in the example as: - -```python -import mindspore.nn as nn -from mindspore.common.parameter import ParameterTuple - -def TrainWrap(net, loss_fn=None, optimizer=None, weights=None): - """ - TrainWrap - """ - if loss_fn is None: - loss_fn = nn.SoftmaxCrossEntropyWithLogits(reduction='mean', sparse=True) - loss_net = nn.WithLossCell(net, loss_fn) - loss_net.set_train() - if weights is None: - weights = ParameterTuple(net.trainable_params()) - if optimizer is None: - optimizer = nn.Adam(weights, learning_rate=0.003, beta1=0.9, beta2=0.999, eps=1e-5, use_locking=False, use_nesterov=False, weight_decay=4e-5, loss_scale=1.0) - train_net = nn.TrainOneStepCell(loss_net, optimizer) - return train_net -``` - -Finally, exporting the defined model. - -```python -export(net, x, label, file_name="lenet_tod", file_format='MINDIR') -print("finished exporting") -``` - -### Model Transferring - -To convert the model simply use the converter as explained in the [Convert Section](https://www.mindspore.cn/tutorial/lite/en/master/use/converter_train.html#creating-mindspore-tod-models), the command is: - -```bash -./converter_lite --fmk=MINDIR --trainModel=true --modelFile=lenet_tod.mindir --outputFile=lenet_tod -``` - -The exported file `lenet_tod.ms` is under the folder `./train_lenet/model`. - -### Model Training - -The model training progress is in [net_runner.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/train_lenet/src/net_runner.cc). - -The main code continues as follows: - -```cpp -int NetRunner::Main() { - // Load model and create session - InitAndFigureInputs(); - // initialize the dataset - InitDB(); - // Execute the training - TrainLoop(); - // Evaluate the trained model - CalculateAccuracy(); - - if (epochs_ > 0) { - auto trained_fn = ms_file_.substr(0, ms_file_.find_last_of('.')) + "_trained.ms"; - // Save the trained model to file - session_->SaveToFile(trained_fn); - } - return 0; -} -``` - -#### Loading Model - -`InitAndFigureInputs` creates the TrainSession instance from the `.ms` file, then sets the input tensors indices for the `.ms` model. - -```cpp -void NetRunner::InitAndFigureInputs() { - mindspore::lite::Context context; - context.device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND; - context.device_list_[0].device_info_.cpu_device_info_.enable_float16_ = false; - context.device_list_[0].device_type_ = mindspore::lite::DT_CPU; - context.thread_num_ = 2; - - session_ = mindspore::session::TrainSession::CreateSession(ms_file_, &context); - MS_ASSERT(nullptr != session_); - loop_ = mindspore::session::TrainLoop::CreateTrainLoop(session_); - - acc_metrics_ = std::shared_ptr(new AccuracyMetrics); - - loop_->Init({acc_metrics_.get()}); - - auto inputs = session_->GetInputs(); - MS_ASSERT(inputs.size() > 1); - auto nhwc_input_dims = inputs.at(0)->shape(); - MS_ASSERT(nhwc_input_dims.size() == 4); - batch_size_ = nhwc_input_dims.at(0); - h_ = nhwc_input_dims.at(1); - w_ = nhwc_input_dims.at(2); -} -``` - -#### Dataset Processing - -`InitDB` initializes the MNIST dataset and loads it into the memory. MindData has provided the data preprocessing API, the user could refer to the [C++ API Docs](https://www.mindspore.cn/doc/api_cpp/en/master/session.html) for more details. - -```cpp -int NetRunner::InitDB() { - train_ds_ = Mnist(data_dir_ + "/train", "all"); - - TypeCast typecast_f("float32"); - Resize resize({h_, w_}); - train_ds_ = train_ds_->Map({&resize, &typecast_f}, {"image"}); - - TypeCast typecast("int32"); - train_ds_ = train_ds_->Map({&typecast}, {"label"}); - - train_ds_ = train_ds_->Shuffle(2); - train_ds_ = train_ds_->Batch(batch_size_, true); - - if (verbose_) { - std::cout << "DatasetSize is " << train_ds_->GetDatasetSize() << std::endl; - } - if (train_ds_->GetDatasetSize() == 0) { - std::cout << "No relevant data was found in " << data_dir_ << std::endl; - MS_ASSERT(train_ds_->GetDatasetSize() != 0); - } - return 0; -} -``` - -#### Execute Training - -The `TrainLoop` method is the core of the training procedure. We first display its code then review it. - -```cpp -int NetRunner::TrainLoop() { - struct mindspore::lite::StepLRLambda step_lr_lambda(1, 0.7); - mindspore::lite::LRScheduler step_lr_sched(mindspore::lite::StepLRLambda, static_cast(&step_lr_lambda), 1); - - mindspore::lite::LossMonitor lm(100); - mindspore::lite::ClassificationTrainAccuracyMonitor am(1); - mindspore::lite::CkptSaver cs(1000, std::string("lenet")); - Rescaler rescale(255.0); - - loop_->Train(epochs_, train_ds_.get(), std::vector{&rescale, &lm, &cs, &am, &step_lr_sched}); - return 0; -} -``` - -#### Execute Evaluating - -To eval the model accuracy, the `CalculateAccuracy` method is being called. Within which, the model is switched to `Eval` mode, and the method runs a cycle of test tensors through the trained network to measure the current accuracy rate. - -```cpp -float NetRunner::CalculateAccuracy(int max_tests) { - test_ds_ = Mnist(data_dir_ + "/test", "all"); - TypeCast typecast_f("float32"); - Resize resize({h_, w_}); - test_ds_ = test_ds_->Map({&resize, &typecast_f}, {"image"}); - - TypeCast typecast("int32"); - test_ds_ = test_ds_->Map({&typecast}, {"label"}); - test_ds_ = test_ds_->Batch(batch_size_, true); - - Rescaler rescale(255.0); - - loop_->Eval(test_ds_.get(), std::vector{&rescale}); - std::cout << "Eval Accuracy is " << acc_metrics_->Eval() << std::endl; - - return 0.0; -} -``` - -In the given example, the program runs a fixed number of train cycles. The user may easily change the termination condition, e.g., run until a certain accuracy is reached, or run only at night time when device is connected to a power source. - -Finally, when trainining is completed, the fully trained model needs to be saved. The `SaveToFile` method is used for this purpose. diff --git a/tutorials/lite/source_en/quick_start/train_lenet_java.md b/tutorials/lite/source_en/quick_start/train_lenet_java.md deleted file mode 100644 index 31b58c1eb1b06f3eb7788cc0fcf1aeb97eba1c34..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/quick_start/train_lenet_java.md +++ /dev/null @@ -1,5 +0,0 @@ -# Implement Device Training Based On Java Interface - - This tutorial is being translated, please stay tuned... - - diff --git a/tutorials/lite/source_en/scene_detection_lite.md b/tutorials/lite/source_en/scene_detection_lite.md deleted file mode 100644 index ee78f8e8cb1d825b25f1d991b4383ff58ab3b83a..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/scene_detection_lite.md +++ /dev/null @@ -1,19 +0,0 @@ -# Scene Detection Model - - - -## Scene dectectin introduction - -Scene detection can identify the type of scene in the device's camera. - -Using MindSpore Lite to implement scene detection [example](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/scene_detection). - -## Scene detection model list - -The following table shows the data of some scene detection models using MindSpore Lite inference. - -> The performance of the table below is tested on the P30. - -| Model name | Size(Mb) | Top1 | CPU 4 thread delay (ms) | -|-----------------------| :----------: | :----------: | :-----------: | -| [MobileNetv2](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms) | 11.3 | - | 11.5 | diff --git a/tutorials/lite/source_en/style_transfer_lite.md b/tutorials/lite/source_en/style_transfer_lite.md deleted file mode 100644 index 016047942e9a5e6ed33fe52359ae819f421bd60e..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/style_transfer_lite.md +++ /dev/null @@ -1,17 +0,0 @@ -# Style Transfer Model - - - -## Style transfer introduction - -The style transfer model can change the artistic style of the user's target image according to the standard image built in this demo, and display it in the App image preview interface. Users can save the style transfer result or restore the original form of the target image. - -Using demo to open the target image: - -![image_before_transfer](images/before_transfer.png) - -Selecting the first standard image from left to perform the style transfer, as shown in the figure: - -![image_after_transfer](images/after_transfer.png) - -Using MindSpore Lite to realize style transfer [example](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/style_transfer). diff --git a/tutorials/lite/source_en/use/asic.rst b/tutorials/lite/source_en/use/asic.rst deleted file mode 100644 index 007f4257e71cf6e599fa5316651602637602f3a2..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/asic.rst +++ /dev/null @@ -1,7 +0,0 @@ -Application Specific Integrated Circuit Integration Instructions -================================================================ - -.. toctree:: - :maxdepth: 1 - - npu_info \ No newline at end of file diff --git a/tutorials/lite/source_en/use/benchmark.rst b/tutorials/lite/source_en/use/benchmark.rst deleted file mode 100644 index 9e986b94af6c0ecc2dc3149b3e647f00b7a5ff75..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/benchmark.rst +++ /dev/null @@ -1,8 +0,0 @@ -Benchmark Tool -======================== - -.. toctree:: - :maxdepth: 1 - - benchmark_tool - benchmark_train_tool \ No newline at end of file diff --git a/tutorials/lite/source_en/use/benchmark_tool.md b/tutorials/lite/source_en/use/benchmark_tool.md deleted file mode 100644 index 9d306623aaa4775dd4ff0e861d68032aa4829baa..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/benchmark_tool.md +++ /dev/null @@ -1,284 +0,0 @@ -# benchmark - -`Windows` `Linux` `Environment Preparation` `Benchmark Testing` `Intermediate` `Expert` - - - -- [Performing Benchmark Testing](#performing-benchmark-testing) - - [Overview](#overview) - - [Linux Environment Usage](#linux-environment-usage) - - [Environment Preparation](#environment-preparation) - - [Parameter Description](#parameter-description) - - [Example](#example) - - [Performance Test](#performance-test) - - [Accuracy Test](#accuracy-test) - - [CPU Performance Test](#CPU-performance-test) - - [Windows Environment Usage](#windows-environment-usage) - - [Environment Preparation](#environment-preparation-1) - - [Parameter Description](#parameter-description-1) - - [Example](#example-1) - - [Performance Test](#performance-test-1) - - [Accuracy Test](#accuracy-test-1) - - - - - -## Overview - -After model conversion and before inference, you can use the Benchmark tool to perform benchmark testing on a MindSpore Lite model. It can not only perform quantitative analysis (performance) on the forward inference execution duration of a MindSpore Lite model, but also perform comparative error analysis (accuracy) based on the output of the specified model. - -## Linux Environment Usage - -### Environment Preparation - -To use the Benchmark tool, you need to prepare the environment as follows: - -- Compilation: Install build dependencies and perform build. The code of the Benchmark tool is stored in the `mindspore/lite/tools/benchmark` directory of the MindSpore source code. For details about the build operations, see the [Environment Requirements](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#environment-requirements) and [Compilation Example](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#compilation-example) in the build document. - -- Run: Obtain the `Benchmark` tool and configure environment variables. For details, see [Output Description](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#output-description) in the build document. - -- Add the path of dynamic library required by the inference code to the environment variables LD_LIBRARY_PATH. - - ````bash - export LD_LIBRARY_PATH=${PACKAGE_ROOT_PATH}/inference/lib:${LD_LIBRARY_PATH} - ```` - - ${PACKAGE_ROOT_PATH} is the compiled inference package path after decompressing. - -### Parameter Description - -The command used for benchmark testing based on the compiled Benchmark tool is as follows: - -```bash -./benchmark [--modelFile=] [--accuracyThreshold=] - [--benchmarkDataFile=] [--benchmarkDataType=] - [--cpuBindMode=] [--device=] [--help] - [--inDataFile=] [--loopCount=] - [--numThreads=] [--warmUpLoopCount=] - [--enableFp16=] [--timeProfiling=] - [--inputShapes=] [--perfProfiling=] - [--perfEvent=] -``` - -The following describes the parameters in detail. - -| Parameter | Attribute | Function | Parameter Type | Default Value | Value Range | -| ----------------- | ---- | ------------------------------------------------------------ | ------ | -------- | ---------------------------------- | -| `--modelFile=` | Mandatory | Specifies the file path of the MindSpore Lite model for benchmark testing. | String | Null | - | -| `--accuracyThreshold=` | Optional | Specifies the accuracy threshold. | Float | 0.5 | - | -| `--benchmarkDataFile=` | Optional | Specifies the file path of the benchmark data. The benchmark data, as the comparison output of the tested model, is output from the forward inference of the tested model under other deep learning frameworks using the same input. | String | Null | - | -| `--benchmarkDataType=` | Optional | Specifies the calibration data type. | String | FLOAT | FLOAT, INT32, INT8 or UINT8| -| `--cpuBindMode=` | Optional | Specifies the type of the CPU core bound to the model inference program. | Integer | 1 | 2: medium core
    1: large core
    0: not bound | -| `--device=` | Optional | Specifies the type of the device on which the model inference program runs. | String | CPU | CPU or GPU or NPU | -| `--help` | Optional | Displays the help information about the `benchmark` command. | - | - | - | -| `--inDataFile=` | Optional | Specifies the file path of the input data of the tested model. If this parameter is not set, a random value will be used. | String | Null | - | -| `--loopCount=` | Optional | Specifies the number of forward inference times of the tested model when the Benchmark tool is used for the benchmark testing. The value should be a positive integer. | Integer | 10 | - | -| `--numThreads=` | Optional | Specifies the number of threads for running the model inference program. | Integer | 2 | - | -| `--warmUpLoopCount=` | Optional | Specifies the number of preheating inference times of the tested model before multiple rounds of the benchmark test are executed. | Integer | 3 | - | -| `--enableFp16=` | Optional | Specifies whether the float16 operator is preferred. | Boolean | false | true, false | -| `--timeProfiling=` | Optional | Specifies whether to use TimeProfiler to print every kernel's cost time. | Boolean | false | true, false | -| `--inputShapes=` | Optional | Specifies the shape of input data, the format should be NHWC. Use "," to segregate each dimension of input shape, and for several input shapes, use ":" to segregate. | String | Null | - | -| `--perfProfiling=` | Optional | Specifies whether to use PerfProfiler to print every kernel's CPU performance data (PMU readings), it is disabled when timeProfiling is true. Only aarch64 CPU is supported. | Boolean | false | true, false | -| `--perfEvent=` | Optional | Specifies what CPU performance data to measure when PerfProfiling is true. When set as CYCLE, the number of CPU cycles and instructions will be printed; when set as CACHE, cache reference times and cache miss times will be printed; when set as STALL, CPU front-end stall cycles and back-end stall cycles will be printed. | String | CYCLE | CYCLE/CACHE/STALL | - -### Example - -When using the Benchmark tool to perform benchmark testing on different MindSpore Lite models, you can set different parameters to implement different test functions. The testing is classified into performance test and accuracy test. - -#### Performance Test - -The main test indicator of the performance test performed by the Benchmark tool is the duration of a single forward inference. In a performance test, you do not need to set benchmark data parameters such as `benchmarkDataFile`. But you can set the parameter `timeProfiling` as True or False to decide whether to print the running time of the model at the network layer on a certain device. The default value of `timeProfiling` is False. For example: - -```bash -./benchmark --modelFile=./models/test_benchmark.ms -``` - -This command uses a random input, and other parameters use default values. After this command is executed, the following statistics are displayed. The statistics include the minimum duration, maximum duration, and average duration of a single inference after the tested model runs for the specified number of inference rounds. - -```text -Model = test_benchmark.ms, numThreads = 2, MinRunTime = 72.228996 ms, MaxRuntime = 73.094002 ms, AvgRunTime = 72.556000 ms -``` - -```bash -./benchmark --modelFile=./models/test_benchmark.ms --timeProfiling=true -``` - -This command uses a random input, sets the parameter `timeProfiling` as true, and other parameters use default values. After this command is executed, the statistics on the running time of the model at the network layer will be displayed as follows. In this case, the statistics are displayed by`opName` and `optype`. `opName` indicates the operator name, `optype` indicates the operator type, and `avg` indicates the average running time of the operator per single run, `percent` indicates the ratio of the operator running time to the total operator running time, `calledTimess` indicates the number of times that the operator is run, and `opTotalTime` indicates the total time that the operator is run for a specified number of times. Finally, `total time` and `kernel cost` show the average time consumed by a single inference operation of the model and the sum of the average time consumed by all operators in the model inference, respectively. - -```text ------------------------------------------------------------------------------------------ -opName avg(ms) percent calledTimess opTotalTime -conv2d_1/convolution 2.264800 0.824012 10 22.648003 -conv2d_2/convolution 0.223700 0.081390 10 2.237000 -dense_1/BiasAdd 0.007500 0.002729 10 0.075000 -dense_1/MatMul 0.126000 0.045843 10 1.260000 -dense_1/Relu 0.006900 0.002510 10 0.069000 -max_pooling2d_1/MaxPool 0.035100 0.012771 10 0.351000 -max_pooling2d_2/MaxPool 0.014300 0.005203 10 0.143000 -max_pooling2d_2/MaxPool_nchw2nhwc_reshape_1/Reshape_0 0.006500 0.002365 10 0.065000 -max_pooling2d_2/MaxPool_nchw2nhwc_reshape_1/Shape_0 0.010900 0.003966 10 0.109000 -output/BiasAdd 0.005300 0.001928 10 0.053000 -output/MatMul 0.011400 0.004148 10 0.114000 -output/Softmax 0.013300 0.004839 10 0.133000 -reshape_1/Reshape 0.000900 0.000327 10 0.009000 -reshape_1/Reshape/shape 0.009900 0.003602 10 0.099000 -reshape_1/Shape 0.002300 0.000837 10 0.023000 -reshape_1/strided_slice 0.009700 0.003529 10 0.097000 ------------------------------------------------------------------------------------------ -opType avg(ms) percent calledTimess opTotalTime -Activation 0.006900 0.002510 10 0.069000 -BiasAdd 0.012800 0.004657 20 0.128000 -Conv2D 2.488500 0.905401 20 24.885004 -MatMul 0.137400 0.049991 20 1.374000 -Nchw2Nhwc 0.017400 0.006331 20 0.174000 -Pooling 0.049400 0.017973 20 0.494000 -Reshape 0.000900 0.000327 10 0.009000 -Shape 0.002300 0.000837 10 0.023000 -SoftMax 0.013300 0.004839 10 0.133000 -Stack 0.009900 0.003602 10 0.099000 -StridedSlice 0.009700 0.003529 10 0.097000 - -total time : 2.90800 ms, kernel cost : 2.74851 ms - ------------------------------------------------------------------------------------------ -``` - -#### Accuracy Test - -The accuracy test performed by the Benchmark tool aims to verify the accuracy of the MinSpore model output by setting benchmark data (the default input and benchmark data type are float32). In an accuracy test, in addition to the `modelFile` parameter, the `benchmarkDataFile` parameter must be set. For example: - -```bash -./benchmark --modelFile=./models/test_benchmark.ms --inDataFile=./input/test_benchmark.bin --device=CPU --accuracyThreshold=3 --benchmarkDataFile=./output/test_benchmark.out -``` - -This command specifies the input data and benchmark data of the tested model, specifies that the model inference program runs on the CPU, and sets the accuracy threshold to 3%. After this command is executed, the following statistics are displayed, including the single input data of the tested model, output result and average deviation rate of the output node, and average deviation rate of all nodes. - -```text -InData0: 139.947 182.373 153.705 138.945 108.032 164.703 111.585 227.402 245.734 97.7776 201.89 134.868 144.851 236.027 18.1142 22.218 5.15569 212.318 198.43 221.853 -================ Comparing Output data ================ -Data of node age_out : 5.94584e-08 6.3317e-08 1.94726e-07 1.91809e-07 8.39805e-08 7.66035e-08 1.69285e-07 1.46246e-07 6.03796e-07 1.77631e-07 1.54343e-07 2.04623e-07 8.89609e-07 3.63487e-06 4.86876e-06 1.23939e-05 3.09981e-05 3.37098e-05 0.000107102 0.000213932 0.000533579 0.00062465 0.00296401 0.00993984 0.038227 0.0695085 0.162854 0.123199 0.24272 0.135048 0.169159 0.0221256 0.013892 0.00502971 0.00134921 0.00135701 0.000383242 0.000163475 0.000136294 9.77864e-05 8.00793e-05 5.73874e-05 3.53858e-05 2.18535e-05 2.04467e-05 1.85286e-05 1.05075e-05 9.34751e-06 6.12732e-06 4.55476e-06 -Mean bias of node age_out : 0% -Mean bias of all nodes: 0% -======================================================= -``` - -To set specified input shapes (such as 1,32,32,1), use the command as follows: - -```bash -./benchmark --modelFile=./models/test_benchmark.ms --inDataFile=./input/test_benchmark.bin --inputShapes=1,32,32,1 --device=CPU --accuracyThreshold=3 --benchmarkDataFile=./output/test_benchmark.out -``` - -#### CPU Performance Test - -The main test indicator of the CPU performance test performed by the Benchmark tool is the readings of CPU Performance Monitor Unit(PMU) of a single forward inference, including the number of CPU cycles and instructions, cache reference times and cache miss times, front-end stall cycles and back-end stall cycles. In a performance test, you do not need to set benchmark data parameters such as `benchmarkDataFile`. But you can set the parameter `perfProfiling` as True or False to decide whether to print the CPU performance data of the model at the network layer on a certain device, and set `perfEvent` as `CYCLE`/`CACHE`/`STALL` to decide what CPU performance data to measure. The default value of `perfProfiling` is False, the default value of `perfEvent` is `CYCLE`. Due to the fluctuation of PMU readings in multi-thread tests, `numThreads` is suggested to be set as `1`. For example: - -```bash -./benchmark --modelFile=./models/test_benchmark_2.ms --perfProfiling=true --numThreads=1 -``` - -This command uses a random input, sets the parameter `perfProfiling` as true, and other parameters use default values. After this command is executed, the statistics on the running time of the model at the network layer will be displayed as follows. In this case, the statistics are displayed by`opName` and `optype`. `opName` indicates the operator name, `optype` indicates the operator type, and `cycles(k)` indicates the average CPU cycles of the operator per single run (in thousand, affected by CPU frequency), `cycles(%)` indicates the ratio of the operator CPU cycles to the total operator CPU cycles, `ins(k)` indicates the average CPU instructions of the operator per single run (in thousand), and `ins(%)` indicates the ratio of the operator CPU instructions to the total operator CPU instructions. Finally, `Model`/`NumThreads`/`MinRuntime`/`MaxRunTime`/`AvgRunTime` is presented for reference. - -```text ------------------------------------------------------------------------------------------ -opName cycles(k) cycles(%) ins(k) ins(%) -Add_Plus214_Output_0 1.53 0.006572 1.27 0.002148 -Conv_Convolution110_Output_0 91.12 0.390141 217.58 0.369177 -Conv_COnvolution28_Output_0 114.61 0.490704 306.28 0.519680 -Matmul_Times212_Output_0 8.75 0.037460 15.55 0.026385 -MaxPool_Pooling160_Output_0 3.24 0.013873 8.70 0.014767 -MaxPool_Pooling66_Output_0 11.63 0.049780 35.17 0.059671 -Reshape_Pooling160_Output_0_reshape0 0.91 0.003899 1.58 0.002677 -nhwc2nchw_MaxPool_Pooling160_Output_0_post8_0 1.77 0.007571 3.25 0.005508 ------------------------------------------------------------------------------------------ -opType cycles(k) cycles(%) ins(k) ins(%) -Add 1.53 0.006572 1.27 0.002148 -Conv2D 205.73 0.880845 523.85 0.888856 -MatMul 8.75 0.037460 15.55 0.026385 -Nhwc2nchw 1.77 0.007571 3.25 0.005508 -Pooling 14.87 0.063654 43.87 0.074437 -Reshape 0.91 0.003839 1.58 0.002677 - -Model = test_benchmark_2.ms, NumThreads = 1, MinRunTime = 0.104000 ms, MaxRunTime = 0.179000 ms, AvgRunTime = 0.116000 ms - ------------------------------------------------------------------------------------------ -``` - -When `perfEvent` is set as `CACHE`, the columns will be `cache ref(k)`/`cache ref(%)`/`miss(k)`/`miss(%)`, which indicate cache reference times / cache reference ratio / cache miss times / cache miss ratio(to all cache misses, not to cache references); when `perfEvent` is set as `STALL`, the columns will be`frontend(k)`/`frontend(%)`/`backend(k)`/`backend(%)`, which indicate CPU front-end stall cycles / front-end stall cycles ratio / back-end stall cycles / back-end stall cycles ratio. For example: - -```bash -./benchmark --modelFile=./models/test_benchmark_2.ms --perfProfiling=true --numThreads=1 --perfEvent="CACHE" -``` - -```bash -./benchmark --modelFile=./models/test_benchmark_2.ms --perfProfiling=true --numThreads=1 --perfEvent="STALL" -``` - -## Windows Environment Usage - -### Environment Preparation - -To use the Benchmark tool, you need to prepare the environment as follows: - -- Compilation: Install build dependencies and perform build. The code of the Benchmark tool is stored in the `mindspore/lite/tools/benchmark` directory of the MindSpore source code. For details about the build operations, see the [Environment Requirements](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#id1) and [Compilation Example](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#id3) in the build document. -- Add the path of dynamic library required by the benchmark to the environment variables PATH. - - ````bash - set PATH=%PACKAGE_ROOT_PATH%\inference\lib;%PATH% - ```` - - %PACKAGE_ROOT_PATH% is the decompressed package path obtained by compiling. - -### Parameter Description - -The command used for benchmark testing based on the compiled Benchmark tool is as follows. The parameters are the same as those used in the Linux environment, and will not be repeated here. - -```bat -call benchmark.exe [--modelFile=] [--accuracyThreshold=] - [--benchmarkDataFile=] [--benchmarkDataType=] - [--cpuBindMode=] [--device=] [--help] - [--inDataFile=] [--loopCount=] - [--numThreads=] [--warmUpLoopCount=] - [--enableFp16=] [--timeProfiling=] - [--inputShapes=] -``` - -### Example - -When using the Benchmark tool to perform benchmark testing on different MindSpore Lite models, you can set different parameters to implement different test functions. The testing is classified into performance test and accuracy test. The output statistics are the same as those in the Linux environment, and will not be repeated here. - -#### Performance Test - -- Use a random input and default values for other parameters. - -```bat -call benchmark.exe --modelFile=test_benchmark.ms -``` - -- set `timeProfiling=true`, use a random input and default values for other parameters. - -```bat -call benchmark.exe --modelFile=test_benchmark.ms --timeProfiling=true -``` - -#### Accuracy Test - - The input data is set by the `inDataFile` parameter, and the calibration data is set by the `benchmarkDataFile` parameter. - -- Set the accuracy threshold to 3%. - -```bat -call benchmark.exe --modelFile=test_benchmark.ms --inDataFile=.test_benchmark.bin --benchmarkDataFile=test_benchmark.out --accuracyThreshold=3 -``` - -- Run on the CPU. - -```bat -call benchmark.exe --modelFile=test_benchmark.ms --inDataFile=test_benchmark.bin --benchmarkDataFile=test_benchmark.out --device=CPU -``` - -- Set specified input shapes. - -```bat -call benchmark.exe --modelFile=test_benchmark.ms --inDataFile=test_benchmark.bin --benchmarkDataFile=test_benchmark.out --inputShapes=1,32,32,1 -``` diff --git a/tutorials/lite/source_en/use/benchmark_train_tool.md b/tutorials/lite/source_en/use/benchmark_train_tool.md deleted file mode 100644 index c8802849e337dd9013af10a3ffc848884dfb60e7..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/benchmark_train_tool.md +++ /dev/null @@ -1,153 +0,0 @@ -# benchmark_train - -`Linux` `Environment Preparation` `Benchmark Testing` `Intermediate` `Expert` - - - -- [Performing Benchmark Testing](#performing-benchmark-testing) - - [Overview](#overview) - - [Linux Environment Usage](#linux-environment-usage) - - [Environment Preparation](#environment-preparation) - - [Parameter Description](#parameter-description) - - [Example](#example) - - [Performance Test](#performance-test) - - [Accuracy Test](#accuracy-test) - - - - - -## Overview - -The same as `benchmark`, you can use the `benchmark_train` tool to perform benchmark testing on a MindSpore ToD (Train on Device) model. It can not only perform quantitative analysis (performance) on the execution duration the model, but also perform comparative error analysis (accuracy) based on the output of the specified model. - -## Linux Environment Usage - -### Environment Preparation - -To use the `benchmark_train` tool, you need to prepare the environment as follows: - -- Compilation: Install build dependencies and build the MindSpore Lite training framework. The code of the `benchmark_train` tool is stored in the `mindspore/lite/tools/benchmark_train` directory of the MindSpore source code. For details about the build operations, see the [Environment Requirements](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#environment-requirements) and [Compilation Example](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#compilation-example) in the build document. - -- Configure environment variables: For details, see [Output Description](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#training-output-description) in the build document. Suppose the absolute path of MindSpore Lite training package you build is `/path/mindspore-lite-{version}-train-{os}-{arch}.tar.gz`, the commands to extract the package and configure the LD_LIBRARY_PATH variable are as follows: - - ```bash - cd /path - tar xvf mindspore-lite-{version}-train-{os}-{arch}.tar.gz - export LD_LIBRARY_PATH=/path/mindspore-lite-{version}-train-{os}-{arch}/train/lib:/path/mindspore-lite-{version}-train-{os}-{arch}/train/third_party/libjpeg-turbo/lib:${LD_LIBRARY_PATH} - ``` - -The absolute path of the benchmark_train tool is `/path/mindspore-lite-{version}-train-{os}-{arch}/tools/benchmark_train/benchmark_train`. - -### Parameter Description - -The command used for benchmark testing based on the compiled `benchmark_train` tool is as follows: - -```bash -./benchmark_train [--modelFile=] [--accuracyThreshold=] - [--expectedDataFile=] [--warmUpLoopCount=] - [--timeProfiling=] [--help] - [--inDataFile=] [--epochs=] - [--exportFile=] -``` - -The following describes the parameters in detail. - -| Parameter | Attribute | Function | Parameter Type | Default Value | Value Range | -| ----------------- | ---- | ------------------------------------------------------------ | ------ | -------- | ---------------------------------- | -| `--modelFile=` | Mandatory | Specifies the file path of the MindSpore Lite model for benchmark testing. | String | Null | - | -| `--accuracyThreshold=` | Optional | Specifies the accuracy threshold. | Float | 0.5 | - | -| `--expectedDataFile=` | Optional | Specifies the file path of the benchmark data. The benchmark data, as the comparison output of the tested model, is output from the forward inference of the tested model under other deep learning frameworks using the same input. | String | Null | - | -| `--help` | Optional | Displays the help information about the `benchmark_train` command. | - | - | - | -| `--warmUpLoopCount=` | Optional | Specifies the number of preheating inference times of the tested model before multiple rounds of the benchmark test are executed. | Integer | 3 | - | -| `--timeProfiling=` | Optional | Specifies whether to use TimeProfiler to print every kernel's cost time. | Boolean | false | true, false | -| `--inDataFile=` | Optional | Specifies the file path of the input data of the tested model. If this parameter is not set, a random value will be used. | String | Null | - | -| `--epochs=` | Optional | Specifies the number of training epochs and print the consuming time. | Integer | 0 | >=0 | -| `--exportFile=` | Optional | Specifies the path of exporting file. | String | Null | - | - -### Example - -When using the `benchmark_train` tool to perform benchmark testing, you can set different parameters to implement different test functions. The testing is classified into performance test and accuracy test. - -#### Performance Test - -The main test indicator of the performance test performed by the Benchmark tool is the duration of a single forward inference. In a performance test, you do not need to set benchmark data parameters such as `benchmarkDataFile`. But you can set the parameter `timeProfiling` as True or False to decide whether to print the running time of the model at the network layer on a certain device. The default value of `timeProfiling` is False. For example: - -```bash -./benchmark_train --modelFile=./models/test_benchmark.ms --s=10 -``` - -This command uses a random input, and other parameters use default values. After this command is executed, the following statistics are displayed. The statistics include the minimum duration, maximum duration, and average duration of a single inference after the tested model runs for the specified number of inference rounds. - -```text -Model = test_benchmark.ms, numThreads = 2, MinRunTime = 72.228996 ms, MaxRuntime = 73.094002 ms, AvgRunTime = 72.556000 ms -``` - -```bash -./benchmark_train --modelFile=./models/test_benchmark.ms --timeProfiling=true -``` - -This command uses a random input, sets the parameter `timeProfiling` as true, times and other parameters use default values. After this command is executed, the statistics on the running time of the model at the network layer will be displayed as follows. In this case, the statistics are displayed by`opName` and `optype`. `opName` indicates the operator name, `optype` indicates the operator type, and `avg` indicates the average running time of the operator per single run, `percent` indicates the ratio of the operator running time to the total operator running time, `calledTimess` indicates the number of times that the operator is run, and `opTotalTime` indicates the total time that the operator is run for a specified number of times. Finally, `total time` and `kernel cost` show the average time consumed by a single inference operation of the model and the sum of the average time consumed by all operators in the model inference, respectively. - -```text ------------------------------------------------------------------------------------------ -opName avg(ms) percent calledTimess opTotalTime -conv2d_1/convolution 2.264800 0.824012 10 22.648003 -conv2d_2/convolution 0.223700 0.081390 10 2.237000 -dense_1/BiasAdd 0.007500 0.002729 10 0.075000 -dense_1/MatMul 0.126000 0.045843 10 1.260000 -dense_1/Relu 0.006900 0.002510 10 0.069000 -max_pooling2d_1/MaxPool 0.035100 0.012771 10 0.351000 -max_pooling2d_2/MaxPool 0.014300 0.005203 10 0.143000 -max_pooling2d_2/MaxPool_nchw2nhwc_reshape_1/Reshape_0 0.006500 0.002365 10 0.065000 -max_pooling2d_2/MaxPool_nchw2nhwc_reshape_1/Shape_0 0.010900 0.003966 10 0.109000 -output/BiasAdd 0.005300 0.001928 10 0.053000 -output/MatMul 0.011400 0.004148 10 0.114000 -output/Softmax 0.013300 0.004839 10 0.133000 -reshape_1/Reshape 0.000900 0.000327 10 0.009000 -reshape_1/Reshape/shape 0.009900 0.003602 10 0.099000 -reshape_1/Shape 0.002300 0.000837 10 0.023000 -reshape_1/strided_slice 0.009700 0.003529 10 0.097000 ------------------------------------------------------------------------------------------ -opType avg(ms) percent calledTimess opTotalTime -Activation 0.006900 0.002510 10 0.069000 -BiasAdd 0.012800 0.004657 20 0.128000 -Conv2D 2.488500 0.905401 20 24.885004 -MatMul 0.137400 0.049991 20 1.374000 -Nchw2Nhwc 0.017400 0.006331 20 0.174000 -Pooling 0.049400 0.017973 20 0.494000 -Reshape 0.000900 0.000327 10 0.009000 -Shape 0.002300 0.000837 10 0.023000 -SoftMax 0.013300 0.004839 10 0.133000 -Stack 0.009900 0.003602 10 0.099000 -StridedSlice 0.009700 0.003529 10 0.097000 - -total time : 2.90800 ms, kernel cost : 2.74851 ms - ------------------------------------------------------------------------------------------ -``` - -#### Accuracy Test - -The accuracy test performed by the Benchmark tool aims to verify the accuracy of the MinSpore model output by setting benchmark data (the default input and benchmark data type are float32). In an accuracy test, in addition to the `modelFile` parameter, the `benchmarkDataFile` parameter must be set. For example: - -```bash -./benchmark_train --modelFile=./models/test_benchmark.ms --inDataFile=./input/test_benchmark.bin --device=CPU --accuracyThreshold=3 --benchmarkDataFile=./output/test_benchmark.out -``` - -This command specifies the input data and benchmark data of the tested model, specifies that the model inference program runs on the CPU, and sets the accuracy threshold to 3%. After this command is executed, the following statistics are displayed, including the single input data of the tested model, output result and average deviation rate of the output node, and average deviation rate of all nodes. - -```text -InData0: 139.947 182.373 153.705 138.945 108.032 164.703 111.585 227.402 245.734 97.7776 201.89 134.868 144.851 236.027 18.1142 22.218 5.15569 212.318 198.43 221.853 -================ Comparing Output data ================ -Data of node age_out : 5.94584e-08 6.3317e-08 1.94726e-07 1.91809e-07 8.39805e-08 7.66035e-08 1.69285e-07 1.46246e-07 6.03796e-07 1.77631e-07 1.54343e-07 2.04623e-07 8.89609e-07 3.63487e-06 4.86876e-06 1.23939e-05 3.09981e-05 3.37098e-05 0.000107102 0.000213932 0.000533579 0.00062465 0.00296401 0.00993984 0.038227 0.0695085 0.162854 0.123199 0.24272 0.135048 0.169159 0.0221256 0.013892 0.00502971 0.00134921 0.00135701 0.000383242 0.000163475 0.000136294 9.77864e-05 8.00793e-05 5.73874e-05 3.53858e-05 2.18535e-05 2.04467e-05 1.85286e-05 1.05075e-05 9.34751e-06 6.12732e-06 4.55476e-06 -Mean bias of node age_out : 0% -Mean bias of all nodes: 0% -======================================================= -``` - -To set specified input shapes (such as 1,32,32,1), use the command as follows: - -```bash -./benchmark_train --modelFile=./models/test_benchmark.ms --inDataFile=./input/test_benchmark.bin --inputShapes=1,32,32,1 --device=CPU --accuracyThreshold=3 --benchmarkDataFile=./output/test_benchmark.out -``` diff --git a/tutorials/lite/source_en/use/build.md b/tutorials/lite/source_en/use/build.md deleted file mode 100644 index 8c110760be6d1f2c66a266c92184f35eb2514981..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/build.md +++ /dev/null @@ -1,526 +0,0 @@ -# Building MindSpore Lite - -`Windows` `Linux` `Android` `Environment Preparation` `Intermediate` `Expert` - - - -- [Building MindSpore Lite](#building-mindspore-lite) - - [Linux Environment Compilation](#linux-environment-compilation) - - [Environment Requirements](#environment-requirements) - - [Compilation Options](#compilation-options) - - [Compilation Example](#compilation-example) - - [Inference Output Description](#inference-output-description) - - [Description of Converter's Directory Structure](#description-of-converters-directory-structure) - - [Description of Runtime and Other tools' Directory Structure](#description-of-runtime-and-other-tools-directory-structure) - - [Training Output Description](#training-output-description) - - [Description of Training Runtime and Related Tools' Directory Structure](#description-of-training-runtime-and-related-tools-directory-structure) - - [Windows Environment Compilation](#windows-environment-compilation) - - [Environment Requirements](#environment-requirements-1) - - [Compilation Options](#compilation-options-1) - - [Compilation Example](#compilation-example-1) - - [Output Description](#output-description) - - [Description of Runtime and Related Tools' Directory Structure](#description-of-runtime-and-related-tools-directory-structure) - - [Docker Environment Compilation](#docker-environment-compilation) - - [Environmental Preparation](#environmental-preparation) - - [Download the docker image](#download-the-docker-image) - - [Create a container](#create-a-container) - - [Enter the container](#enter-the-container) - - [Compilation Options](#compilation-options-2) - - [Compilation Example](#compilation-example-2) - - [Output Description](#output-description-1) - - - - - -This chapter introduces how to quickly compile MindSpore Lite, which includes the following modules: - -Modules in inference version: - -| Module | Support Platform | Description | -| --- | ---- | ---- | -| converter | Linux, Windows | Model Conversion Tool | -| runtime(cpp, java) | Linux, Windows, Android | Model Inference Framework(Windows platform does not support java version runtime) | -| benchmark | Linux, Windows, Android | Benchmarking Tool | -| cropper | Linux | Static library crop tool for libmindspore-lite.a | -| minddata | Linux, Android | Image Processing Library | - -Modules in training version: - -| Module | Support Platform | Description | -| --------------- | ---------------- | ------------------------------------------------ | -| converter | Linux | Model Conversion Tool | -| runtime(cpp) | Linux, Android | Model Train Framework(java is not support) | -| cropper | Linux | Static library crop tool for libmindspore-lite.a | -| minddata | Linux, Android | Image Processing Library | -| benchmark_train | Linux, Android | Performance and Accuracy Validation | - -## Linux Environment Compilation - -### Environment Requirements - -- The compilation environment supports Linux x86_64 only. Ubuntu 18.04.02 LTS is recommended. - -- Compilation dependencies of runtime(cpp): - - [CMake](https://cmake.org/download/) >= 3.18.3 - - [GCC](https://gcc.gnu.org/releases.html) >= 7.3.0 - - [Android_NDK](https://dl.google.com/android/repository/android-ndk-r20b-linux-x86_64.zip) >= r20 - - [Git](https://git-scm.com/downloads) >= 2.28.0 -- Compilation dependencies of converter: - - [CMake](https://cmake.org/download/) >= 3.18.3 - - [GCC](https://gcc.gnu.org/releases.html) >= 7.3.0 - - [Android_NDK](https://dl.google.com/android/repository/android-ndk-r20b-linux-x86_64.zip) >= r20 - - [Git](https://git-scm.com/downloads) >= 2.28.0 - - [Autoconf](http://ftp.gnu.org/gnu/autoconf/) >= 2.69 - - [Libtool](https://www.gnu.org/software/libtool/) >= 2.4.6 - - [LibreSSL](http://www.libressl.org/) >= 3.1.3 - - [Automake](https://www.gnu.org/software/automake/) >= 1.11.6 - - [Libevent](https://libevent.org) >= 2.0 - - [OpenSSL](https://www.openssl.org/) >= 1.1.1 - -- Compilation dependencies of runtime(java) - - [CMake](https://cmake.org/download/) >= 3.18.3 - - [GCC](https://gcc.gnu.org/releases.html) >= 7.3.0 - - [Android_NDK](https://dl.google.com/android/repository/android-ndk-r20b-linux-x86_64.zip) >= r20 - - [Git](https://git-scm.com/downloads) >= 2.28.0 - - [Android SDK](https://developer.android.com/studio?hl=zh-cn#cmdline-tools) - - [Gradle](https://gradle.org/releases/) >= 6.6.1 - - [OpenJDK](https://openjdk.java.net/install/) >= 1.8 - -> - To install and use `Android_NDK`, you need to configure environment variables. The command example is `export ANDROID_NDK=${NDK_PATH}/android-ndk-r20b`. -> - After Gradle is installed, you need to add its installation path to the PATH: `export PATH=${GRADLE_PATH}/bin:$PATH`. -> - To install the Android SDK via `Android command line tools`, you need to create a new directory first and configure its path to the environment variable in `${ANDROID_SDK_ROOT}`, then create SDK via `sdkmanager`: `./sdkmanager --sdk_root=$ {ANDROID_SDK_ROOT} "cmdline-tools;latest"`, and finally accept the license through `sdkmanager` under the `${ANDROID_SDK_ROOT}` directory: `yes | ./sdkmanager --licenses`. -> - Compiling AAR relies on Android SDK Build-Tools, Android SDK Platform-Tools and other Android SDK related components. If the Android SDK in the environment does not have related components, the required dependencies will be automatically downloaded during compilation. -> - When compiling the NPU operator, you need to download [DDK V500.010](https://developer.huawei.com/consumer/cn/doc/development/hiai-Library/ddk-download-0000001053590180), the directory where the compressed package is decompressed Set to the environment variable `${HWHIAI_DDK}`. - -### Compilation Options - -MindSpore Lite provides a compilation script `build.sh` for one-click compilation, located in the root directory of MindSpore. This script can be used to compile the code of training and inference. The following describes the compilation options of MindSpore Lite. - -| Parameter | Parameter Description | Value Range | Mandatory or No | -| -------- | ----- | ---- | ---- | -| -I | Selects an applicable architecture. This option is required when compile MindSpore Lite. | arm64, arm32, or x86_64 | No | -| -d | If this parameter is set, the debug version is compiled. Otherwise, the release version is compiled. | None | No | -| -i | If this parameter is set, incremental compilation is performed. Otherwise, full compilation is performed. | None | No | -| -j[n] | Sets the number of threads used during compilation. Otherwise, the number of threads is set to 8 by default. | Integer | No | -| -e | In the ARM architecture, select the backend operator. Otherwise, all operator of the framework is compiled at the same time. | cpu, gpu, npu | No | -| -h | Displays the compilation help information. | None | No | -| -n | Specifies to compile the lightweight image processing module. | lite_cv | No | -| -A | Language used by mindspore lite, default cpp. If the parameter is set to java, the AAR and JAR for Linux x86 are compiled. | cpp, java | No | -| -C | If this parameter is set, the converter is compiled, default on. | on, off | No | -| -o | If this parameter is set, the benchmark and static library crop tool are compiled, default on. | on, off | No | -| -t | If this parameter is set, the testcase is compiled, default off. | on, off | No | -| -T | If this parameter is set, MindSpore Lite training version is compiled, i.e., this option is required when compiling, default off. | on, off | No | -| -W | Enable x86_64 SSE or AVX instruction set, default off. | sse, avx, off | No | - -> - When the `-I` parameter changes, such as `-I x86_64` is converted to `-I arm64`, adding `-i` for parameter compilation does not take effect. -> - When compiling the AAR package, the `-A java` parameter must be added, and there is no need to add the `-I` parameter. By default, the built-in CPU and GPU operators are compiled at the same time. -> - The compiler will only generate training packages when `-T` is opened. -> - Any `-e` compilation option, the CPU operators will be compiled into it. - -### Compilation Example - -First, download source code from the MindSpore code repository. - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -Then, run the following commands in the root directory of the source code to compile MindSpore Lite of different versions: - -- Debug version of the x86_64 architecture: - - ```bash - bash build.sh -I x86_64 -d - ``` - -- Release version of the x86_64 architecture, with the number of threads set: - - ```bash - bash build.sh -I x86_64 -j32 - ``` - -- Release version of the x86_64 architecture, with the testcase compiled: - - ```bash - bash build.sh -I x86_64 -t on - ``` - -- Release version of the ARM 64-bit architecture in the incremental compilation mode, with the number of threads set: - - ```bash - bash build.sh -I arm64 -i -j32 - ``` - -- Release version of the ARM 64-bit architecture, with the built-in CPU operators compiled: - - ```bash - bash build.sh -I arm64 -e cpu - ``` - -- Release version of the ARM 64-bit architecture, with the built-in CPU and GPU operators compiled: - - ```bash - bash build.sh -I arm64 -e gpu - ``` - -- Release version of the ARM 64-bit architecture, with the built-in CPU and NPU operators compiled: - - ```bash - bash build.sh -I arm64 -e npu - ``` - -- Compile ARM64 with image preprocessing module: - - ```bash - bash build.sh -I arm64 -n lite_cv - ``` - -- Compile MindSpore Lite AAR and JAR for Linux x86, AAR compiles the built-in CPU and GPU operators at the same time, but JAR only compiles the built-in CPU: - - ```bash - bash build.sh -A java - ``` - -- Compile MindSpore Lite AAR and JAR for Linux x86, with the built-in CPU operators compiled: - - ```bash - bash build.sh -A java -e cpu - ``` - -- Release version of the x86_64 architecture, with the benchmark, cropper and converter compiled: - - ```bash - bash build.sh -I x86_64 - ``` - -- Release version of the x86_64 architecture, with the converter compiled and train on device enabled: - - ```bash - bash build.sh -I x86_64 -T on - ``` - -### Inference Output Description - -After the compilation is complete, go to the `mindspore/output` directory of the source code to view the file generated after compilation. The file is divided into the following parts. - -- `mindspore-lite-{version}-inference-{os}-{arch}.tar.gz`: Contains model inference framework runtime (cpp), and related tools. -- `mindspore-lite-maven-{version}.zip`: Contains model reasoning framework AAR package. - -> - version: Version of the output, consistent with that of the MindSpore. -> - os: Operating system on which the output will be deployed. -> - arch: System architecture on which the output will be deployed. - -Execute the decompression command to obtain the compiled output: - -```bash -tar -xvf mindspore-lite-{version}-inference-{os}-{arch}.tar.gz -unzip mindspore-lite-maven-{version}.zip -``` - -#### Description of Converter's Directory Structure - -The conversion tool is only available under the `-I x86_64` compilation option, and the content includes the following parts: - -```text -mindspore-lite-{version}-inference-linux-x64 -└── tools - └── converter - ├── converter # Model conversion tool - │ └── converter_lite # Executable program - └── lib # The dynamic link library that converter depends - ├── libglog.so.0 # Dynamic library of Glog - └── libmslite_converter_plugin_reg.so # A dynamic library of plugin registry -``` - -#### Description of CodeGen's Directory Structure - -The codegen executable program is only available under the `-I x86_64` compilation option, and only the operator library required by the inference code generated by codegen is generated under the `-I arm64` and `-I arm32` compilation options. - -- When the compilation option is `-I x86_64`: - - ```text - mindspore-lite-{version}-inference-linux-x64 - └── tools - └── codegen # Code generation tool - ├── codegen # Executable program - ├── include # Header files of inference framework - │ ├── nnacl # nnacl operator header file - │ └── wrapper - ├── lib - │ └── libwrapper.a # MindSpore Lite CodeGen generates code dependent operator static library - └── third_party - ├── include - │ └── CMSIS # ARM CMSIS NN operator header files - └── lib - └── libcmsis_nn.a # ARM CMSIS NN operator static library - ``` - -- When the compilation option is `-I arm64` or `-I arm32`: - - ```text - mindspore-lite-{version}-inference-android-{arch} - └── tools - └── codegen # Code generation tool - └── operator_library # Operator library - ├── include # Header files of inference framework - │ ├── nnacl # nnacl operator header file - │ └── wrapper - └── lib # Inference framework library - └── libwrapper.a # MindSpore Lite CodeGen generates code dependent static library - ``` - -#### Description of Runtime and Other tools' Directory Structure - -The inference framework can be obtained under `-I x86_64`, `-I arm64` and `-I arm32` compilation options, and the content includes the following parts: - -- When the compilation option is `-I x86_64`: - - ```text - mindspore-lite-{version}-inference-linux-x64 - ├── inference - │ ├── include # Header files of inference framework - │ └── lib # Inference framework library - │ ├── libminddata-lite.so # The files of image processing dynamic library - │ ├── libmindspore-lite.a # Static library of infernece framework in MindSpore Lite - │ └── libmindspore-lite.so # Dynamic library of infernece framework in MindSpore Lite - └── tools - ├── benchmark # Benchmarking tool - │ └── benchmark # Executable program - ├── codegen # Code generation tool - │ ├── codegen # Executable program - │ ├── include # operator header file - │ ├── lib # operator static library - │ └── third_party # ARM CMSIS NN static library - ├── converter # Model conversion tool - └── cropper # Static library crop tool - ├── cropper # Executable file of static library crop tool - └── cropper_mapping_cpu.cfg # Crop cpu library related configuration files - ``` - -- When the compilation option is `-I arm64` or `-I arm32`: - - ```text - mindspore-lite-{version}-inference-android-{arch} - ├── inference - │ ├── include # Header files of inference framework - │ ├── lib # Inference framework library - │ │ ├── libminddata-lite.so # The files of image processing dynamic library - │ │ ├── libmindspore-lite.a # Static library of infernece framework in MindSpore Lite - │ │ └── libmindspore-lite.so # Dynamic library of infernece framework in MindSpore Lite - │ └── third_party - │ └── hiai_ddk # NPU library, only exists in arm64 package - └── tools - ├── benchmark # Benchmarking tool - │ └── benchmark - └── codegen # Code generation tool - ├── include # operator header file - └── lib # operator static library - ``` - -- When the compilation option is `-A java`: - - ```text - mindspore-lite-maven-{version} - └── mindspore - └── mindspore-lite - └── {version} - └── mindspore-lite-{version}.aar # MindSpore Lite runtime aar - ``` - - ```text - mindspore-lite-{version}-inference-linux-x64-jar - └── jar - ├── libmindspore-lite-jni.so # Dynamic library of MindSpore Lite inference framework - ├── libmindspore-lite.so # MindSpore Lite JNI dynamic library - └── mindspore-lite-java.jar # MindSpore Lite inference framework jar package - ``` - -> - Compile ARM64 to get the inference framework output of cpu/gpu/npu by default, if you add `-e gpu`, you will get the inference framework output of cpu/gpu, ARM32 only supports CPU. - -### Training Output Description - -If the `-T on` is added to the MindSpore Lite, go to the `mindspore/output` directory of the source code to view the file generated after compilation. The file is divided into the following parts. - -- `mindspore-lite-{version}-train-{os}-{arch}.tar.gz`: Contains model training framework, performance analysis tool. - -> - version: Version of the output, consistent with that of the MindSpore. -> - os: Operating system on which the output will be deployed. -> - arch: System architecture on which the output will be deployed. - -Execute the decompression command to obtain the compiled output: - -```bash -tar -xvf mindspore-lite-{version}-train-{os}-{arch}.tar.gz -``` - -#### Description of Training Runtime and Related Tools' Directory Structure - -The MindSpore Lite training framework can be obtained under `-I x86_64`, `-I arm64` and `-I arm32` compilation options, and the content includes the following parts: - -- When the compilation option is `-I x86_64`: - - ```text - mindspore-lite-{version}-train-linux-x64 - ├── tools - │ ├── benchmark_train # Training model benchmark tool - │ ├── converter # Model conversion tool - │ └── cropper # Static library crop tool - │ ├── cropper # Executable file of static library crop tool - │ └── cropper_mapping_cpu.cfg # Crop cpu library related configuration files - └── train - ├── include # Header files of training framework - ├── lib # Inference framework library - │ ├── libminddata-lite.so # The files of image processing dynamic library - │ ├── libmindspore-lite-train.a # Static library of training framework in MindSpore Lite - │ └── libmindspore-lite-train.so # Dynamic library of training framework in MindSpore Lite - └── third_party - └── libjpeg-turbo - ``` - -- When the compilation option is `-I arm64` or `-I arm32`: - - ```text - mindspore-lite-{version}-train-android-{arch} - ├── tools - │ ├── benchmark # Benchmarking tool - │ ├── benchmark_train # Training model benchmark tool - └── train - ├── include # Header files of training framework - ├── lib # Training framework library - │ ├── libminddata-lite.so # The files of image processing dynamic library - │ ├── libmindspore-lite-train.a # Static library of training framework in MindSpore Lite - │ └── libmindspore-lite-train.so # Dynamic library of training framework in MindSpore Lite - └── third_party - ├── hiai_ddk # NPU library, only exists in arm64 package - └── libjpeg-turbo - ``` - -## Windows Environment Compilation - -### Environment Requirements - -- System environment: Windows 7, Windows 10; 64-bit. - -- Compilation dependencies are: - - [CMake](https://cmake.org/download/) >= 3.18.3 - - [MinGW GCC](https://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win64/Personal%20Builds/mingw-builds/7.3.0/threads-posix/seh/x86_64-7.3.0-release-posix-seh-rt_v5-rev0.7z/download) = 7.3.0 - -> - The compilation script will execute `git clone` to obtain the code of the third-party dependent libraries. -> - If you want to compile 32-bit Mindspore Lite, please use 32-bit [MinGW](https://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win32/Personal%20Builds/mingw-builds/7.3.0/threads-posix/dwarf/i686-7.3.0-release-posix-dwarf-rt_v5-rev0.7z) to compile. - -### Compilation Options - -The compilation options of MindSpore Lite are as follows: - -| Parameter | Parameter Description | Mandatory or Not | -| -------- | ----- | ---- | -| lite | Set this parameter to compile the MindSpore Lite project. | Yes | -| [n] | Set the number of threads used during compilation, otherwise the default is set to 6 threads. | No | - -### Compilation Example - -First, use the git tool to download the source code from the MindSpore code repository. - -```bat -git clone https://gitee.com/mindspore/mindspore.git -``` - -Then, use the cmd tool to compile MindSpore Lite in the root directory of the source code and execute the following commands. - -- Compile the Windows version with the default number of threads (6 threads). - -```bat -call build.bat lite -``` - -- Compile the Windows version with the specified number of 8 threads. - -```bat -call build.bat lite 8 -``` - -### Output Description - -After the compilation is complete, go to the `mindspore/output` directory of the source code to view the file generated after compilation. The file is divided into the following parts. - -- `mindspore-lite-{version}-inference-win-x64.zip`: Contains model inference framework and related tool. - -> version: Version of the output, consistent with that of the MindSpore. - -Execute the decompression command to obtain the compiled output: - -```bat -unzip mindspore-lite-{version}-inference-win-x64.zip -``` - -#### Description of Runtime and Related Tools' Directory Structure - -The content includes the following parts: - -```text -mindspore-lite-{version}-inference-win-x64 -├── inference -│ ├── include # Header files of inference framework -│ └── lib -│ ├── libgcc_s_seh-1.dll # Dynamic library of MinGW -│ ├── libmindspore-lite.a # Static library of infernece framework in MindSpore Lite -│ ├── libmindspore-lite.dll # Dynamic library of infernece framework in MindSpore Lite -│ ├── libmindspore-lite.dll.a # Link file of dynamic library of infernece framework in MindSpore Lite -│ ├── libssp-0.dll # Dynamic library of MinGW -│ ├── libstdc++-6.dll # Dynamic library of MinGW -│ └── libwinpthread-1.dll # Dynamic library of MinGW -└── tools - ├── benchmark # Benchmarking tool - │ └── benchmark.exe # Executable program - └── converter # Model conversion tool - ├── converter - │ └── converter_lite.exe # Executable program - └── lib - ├── libgcc_s_seh-1.dll # Dynamic library of MinGW - ├── libglog.dll # Dynamic library of Glog - ├── libmslite_converter_plugin_reg.dll # A dynamic library of plugin registry - ├── libssp-0.dll # Dynamic library of MinGW - ├── libstdc++-6.dll # Dynamic library of MinGW - └── libwinpthread-1.dll # Dynamic library of MinGW -``` - -> Currently, MindSpore Lite is not supported on Windows. - -## Docker Environment Compilation - -### Environmental Preparation - -#### Download the docker image - -```bash -docker pull swr.cn-south-1.myhuaweicloud.com/mindspore-build/mindspore-lite:ubuntu18.04.2-20210323 -``` - -> - Before downloading the image, please make sure docker has been installed. -> - Docker image does not currently support Windows version compilation. -> - Third-party libraries that compile dependencies have been installed in the image and environment variables have been configured. - -#### Create a container - -```bash -docker run -tid --net=host --name=docker01 swr.cn-south-1.myhuaweicloud.com/mindspore-build/mindspore-lite:ubuntu18.04.2-20210323 -``` - -#### Enter the container - -```bash -docker exec -ti -u 0 docker01 bash -``` - -### Compilation Options - -Refer to [Linux Environment Compilation](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#linux-environment-compilation) - -### Compilation Example - -Refer to [Linux Environment Compilation](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#linux-environment-compilation) - -### Output Description - -Refer to [Linux Environment Compilation](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#linux-environment-compilation) diff --git a/tutorials/lite/source_en/use/converter_tool.md b/tutorials/lite/source_en/use/converter_tool.md deleted file mode 100644 index 1405774b49a7dba6718482fa3f423615234ea918..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/converter_tool.md +++ /dev/null @@ -1,207 +0,0 @@ -# Converting Models for Inference - -`Windows` `Linux` `Model Converting` `Intermediate` `Expert` - - - -- [Converting Models for Inference](#converting-models-for-inference) - - [Overview](#overview) - - [Linux Environment Instructions](#linux-environment-instructions) - - [Environment Preparation](#environment-preparation) - - [Parameter Description](#parameter-description) - - [Example](#example) - - [Windows Environment Instructions](#windows-environment-instructions) - - [Environment Preparation](#environment-preparation-1) - - [Parameter Description](#parameter-description-1) - - [Example](#example-1) - - - - - -## Overview - -MindSpore Lite provides a tool for offline model conversion. It supports conversion of multiple types of models. The converted models can be used for inference. The command line parameters contain multiple personalized options, providing a convenient conversion method for users. - -Currently, the following input formats are supported: MindSpore, TensorFlow Lite, Caffe, TensorFlow and ONNX. - -The ms model converted by the conversion tool supports the conversion tool and the higher version of the Runtime framework to perform inference. - -## Linux Environment Instructions - -### Environment Preparation - -To use the MindSpore Lite model conversion tool, you need to prepare the environment as follows: - -- [Compile](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html) or [download](https://www.mindspore.cn/tutorial/lite/en/master/use/downloads.html) model transfer tool. - -- Add the path of dynamic library required by the conversion tool to the environment variables LD_LIBRARY_PATH. - - ````bash - export LD_LIBRARY_PATH=${PACKAGE_ROOT_PATH}/tools/converter/lib:${LD_LIBRARY_PATH} - ```` - - ${PACKAGE_ROOT_PATH} is the decompressed package path obtained by compiling or downloading. - -### Parameter Description - -MindSpore Lite model conversion tool provides multiple parameters. -You can enter `./converter_lite --help` to obtain the help information in real time. - -The following describes the parameters in detail. - -| Parameter | Mandatory or Not | Parameter Description | Value Range | Default Value | -| -------- | ------- | ----- | --- | ---- | -| `--help` | No | Prints all the help information. | - | - | -| `--fmk=` | Yes | Original format of the input model. | MINDIR, CAFFE, TFLITE, TF, or ONNX | - | -| `--modelFile=` | Yes | Path of the input model. | - | - | -| `--outputFile=` | Yes | Path of the output model. The suffix `.ms` can be automatically generated. | - | - | -| `--weightFile=` | Yes (for Caffe models only) | Path of the weight file of the input model. | - | - | -| `--quantType=` | No | Sets the quantization type of the model. | PostTraining: quantization after training
    WeightQuant: only do weight quantization after training | - | -| `--bitNum=` | No | Sets the quantization bitNum when quantType is set as WeightQuant, now supports 1 bit to 16 bit quantization. | \[1, 16] | 8 | -| `--quantWeightSize=` | No | Sets a size threshold of convolution filter when quantType is set as WeightQuant. If the size is bigger than this value, it will trigger weight quantization. | \[0, +∞) | 0 | -| `--quantWeightChannel=` | No | Sets a channel number threshold of convolution filter when quantType is set as WeightQuant. If the number is bigger than this, it will trigger weight quantization. | \[0, +∞) | 16 | -| `--configFile=` | No | 1) Profile path of calibration dataset when quantType is set as PostTraining. 2) Profile path of converter. It contains the parameters `plugin_path` and `disable_fusion`, both of which are optional. The former one is the third-party library path. If there are more than one, please use `;` to separate. The default value of the latter one is `off`. Fusion optimization will be turned off when the value is set to `on`. | - | - | - -> - The parameter name and parameter value are separated by an equal sign (=) and no space is allowed between them. -> - The Caffe model is divided into two files: model structure `*.prototxt`, corresponding to the `--modelFile` parameter; model weight `*.caffemodel`, corresponding to the `--weightFile` parameter. -> - In order to ensure the accuracy of weight quantization, the "--bitNum" parameter should better be set to a range from 8bit to 16bit. -> - PostTraining method currently only supports activation quantization and weight quantization in 8 bit. - -The following describes how to use the conversion command by using several common examples. - -- Take the Caffe model LeNet as an example. Run the following conversion command: - - ```bash - ./converter_lite --fmk=CAFFE --modelFile=lenet.prototxt --weightFile=lenet.caffemodel --outputFile=lenet - ``` - - In this example, the Caffe model is used. Therefore, the model structure and model weight files are required. Two more parameters `fmk` and `outputFile` are also required. - - The output is as follows: - - ```text - CONVERTER RESULT SUCCESS:0 - ``` - - This indicates that the Caffe model is successfully converted into the MindSpore Lite model and the new file `lenet.ms` is generated. - -- The following uses the MindSpore, TensorFlow Lite, TensorFlow and ONNX models as examples to describe how to run the conversion command. - - - MindSpore model `model.mindir` - - ```bash - ./converter_lite --fmk=MINDIR --modelFile=model.mindir --outputFile=model - ``` - - > The `MindIR` model exported by MindSpore v1.1.1 or earlier is recommended to be converted to the `ms` model using the converter tool of the corresponding version. MindSpore v1.1.1 and later versions, the converter tool will be forward compatible. - - - TensorFlow Lite model `model.tflite` - - ```bash - ./converter_lite --fmk=TFLITE --modelFile=model.tflite --outputFile=model - ``` - - - TensorFlow model `model.pb` - - ```bash - ./converter_lite --fmk=TF --modelFile=model.pb --outputFile=model - ``` - - - ONNX model `model.onnx` - - ```bash - ./converter_lite --fmk=ONNX --modelFile=model.onnx --outputFile=model - ``` - - In the preceding scenarios, the following information is displayed, indicating that the conversion is successful. In addition, the target file `model.ms` is obtained. - - ```text - CONVERTER RESULT SUCCESS:0 - ``` - -- If running the conversion command is failed, an [errorcode](https://www.mindspore.cn/doc/api_cpp/en/master/errorcode_and_metatype.html) will be output. - -## Windows Environment Instructions - -### Environment Preparation - -To use the MindSpore Lite model conversion tool, the following environment preparations are required. - -- [Compile](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html) or [download](https://www.mindspore.cn/tutorial/lite/en/master/use/downloads.html) model transfer tool. - -- Add the path of dynamic library required by the conversion tool to the environment variables PATH. - - ````bash - set PATH=%PACKAGE_ROOT_PATH%\tools\converter\lib;%PATH% - ```` - - %PACKAGE_ROOT_PATH% is the decompressed package path obtained by compiling or downloading. - -### Parameter Description - -Refer to the Linux environment model conversion tool [parameter description](https://www.mindspore.cn/tutorial/lite/en/master/use/converter_tool.html#parameter-description). - -### Example - -Set the log printing level to INFO. - -```bat -set GLOG_v=1 -``` - -> Log level: 0 is DEBUG, 1 is INFO, 2 is WARNING, 3 is ERROR. - -Several common examples are selected below to illustrate the use of conversion commands. - -- Take the Caffe model LeNet as an example to execute the conversion command. - - ```bat - call converter_lite --fmk=CAFFE --modelFile=lenet.prototxt --weightFile=lenet.caffemodel --outputFile=lenet - ``` - - In this example, because the Caffe model is used, two input files of model structure and model weight are required. Then with the fmk type and output path two parameters which are required, you can successfully execute. - - The result is shown as: - - ```text - CONVERTER RESULT SUCCESS:0 - ``` - - This means that the Caffe model has been successfully converted to the MindSpore Lite model and the new file `lenet.ms` has been obtained. - -- Take MindSpore, TensorFlow Lite, ONNX model format and perceptual quantization model as examples to execute conversion commands. - - - MindSpore model `model.mindir` - - ```bat - call converter_lite --fmk=MINDIR --modelFile=model.mindir --outputFile=model - ``` - - > The `MindIR` model exported by MindSpore v1.1.1 or earlier is recommended to be converted to the `ms` model using the converter tool of the corresponding version. MindSpore v1.1.1 and later versions, the converter tool will be forward compatible. - - - TensorFlow Lite model`model.tflite` - - ```bat - call converter_lite --fmk=TFLITE --modelFile=model.tflite --outputFile=model - ``` - - - TensorFlow model `model.pb` - - ```bat - call converter_lite --fmk=TF --modelFile=model.pb --outputFile=model - ``` - - - ONNX model`model.onnx` - - ```bat - call converter_lite --fmk=ONNX --modelFile=model.onnx --outputFile=model - ``` - - In the above cases, the following conversion success prompt is displayed, and the `model.ms` target file is obtained at the same time. - - ```text - CONVERTER RESULT SUCCESS:0 - ``` - -- If running the conversion command is failed, an [errorcode](https://www.mindspore.cn/doc/api_cpp/en/master/errorcode_and_metatype.html) will be output. diff --git a/tutorials/lite/source_en/use/converter_train.md b/tutorials/lite/source_en/use/converter_train.md deleted file mode 100644 index ac1709e65b3e85b29e4954c5f75d64f2a5b403ae..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/converter_train.md +++ /dev/null @@ -1,71 +0,0 @@ -# Creating MindSpore Lite Models - -`Linux` `Environment Preparation` `Model Export` `Model Converting` `Intermediate` `Expert` - - - -- [Creating MindSpore Lite Models](#creating-mindspore-lite-model) - - [Overview](#overview) - - [Linux Environment](#linux-environment) - - [Environment Preparation](#environment-preparation) - - [Parameters Description](#parameters-description) - - [Example](#example) - - - - - -## Overview - -Creating your MindSpore Lite(Train on Device) model is a two step procedure: - -- In the first step the model is defined and the layers that should be trained must be declared. This is being done on the server, using a MindSpore-based [Python code](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html#export-mindir-model). The model is then exported into a protobuf format, which is called MINDIR. -- In the seconde step this `.mindir` model is converted into a `.ms` format that can be loaded onto an embedded device and can be trained using the MindSpore Lite framework. The converted `.ms` models can be used for both training and inference. - -## Linux Environment - -### Environment Preparation - -MindSpore Lite model transfer tool (only suppot Linux OS) has provided multiple parameters. The procedure is as follows: - -- [Compile](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html) or [download](https://www.mindspore.cn/tutorial/lite/en/master/use/downloads.html) model transfer tool. - -- Add the path of dynamic library required by the conversion tool to the environment variables LD_LIBRARY_PATH. - - ````bash - export LD_LIBRARY_PATH=${PACKAGE_ROOT_PATH}/tools/converter/lib:${LD_LIBRARY_PATH} - ```` - - ${PACKAGE_ROOT_PATH} is the decompressed package path obtained by compiling or downloading. - -### Parameters Description - -The table below shows the parameters used in the MindSpore Lite model training transfer tool. - -| Parameters | required | Parameter Description | Value Range | Default Value | -| --------------------------- | -------- | ------------------------------------------------------------ | ----------- | ------------- | -| `--help` | no | Prints all the help information. | - | - | -| `--fmk=` | yes | Original format of the input model. | MINDIR | - | -| `--modelFile=` | yes | Path of the input model. | - | - | -| `--outputFile=` | yes | Path of the output model. The suffix `.ms` can be automatically generated. | - | - | -| `--trainModel=true` | yes | Training on Device or not | true, false | false | - -> The parameter name and parameter value are separated by an equal sign (=) and no space is allowed between them. - -If running the conversion command is failed, an [errorcode](https://www.mindspore.cn/doc/api_cpp/en/master/errorcode_and_metatype.html) will be output. - -### Example - -Suppose the file to be converted is `my_model.mindir` and run the following command: - -```bash -./converter_lite --fmk=MINDIR --trainModel=true --modelFile=my_model.mindir --outputFile=my_model -``` - -If the command executes successfully, the `model.ms` target file will be obtained and the console will print as follows: - -```bash -CONVERTER RESULT SUCCESS:0 -``` - -If running the conversion command is failed, an [errorcode](https://www.mindspore.cn/doc/api_cpp/en/master/errorcode_and_metatype.html) will be output. diff --git a/tutorials/lite/source_en/use/cropper_tool.md b/tutorials/lite/source_en/use/cropper_tool.md deleted file mode 100644 index bb7d736f1cf4b6212c96d4042fffe0ff64a83963..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/cropper_tool.md +++ /dev/null @@ -1,72 +0,0 @@ -# Static Library Cropper Tool - -`Linux` `Environment Preparation` `Static Library Cropping` `Intermediate` `Expert` - - - -- [Use Cropper Tool To Reduce Library File Size](#use-cropper-tool-to-reduce-library-file-size) - - [Overview](#overview) - - [Environment Preparation](#environment-preparation) - - [Parameter Description](#parameter-description) - - [Example](#example) - - - - - -## Overview - -MindSpore Lite provides the `libmindspore-lite.a` static library cropping tool for runtime, which can filter out the operators in the `ms` model, crop the static library files, and effectively reduce the size of the library files. - -The operating environment of the library cutting tool is x86_64, and currently supports the cropping of CPU operators, the compilation command is `bash build.sh -I arm64 -e cpu`, `bash build.sh -I arm32 -e cpu`, and `bash build.sh -I x86_64 -e cpu`. - -## Environment Preparation - -To use the Cropper tool, you need to prepare the environment as follows: - -- Compilation: The code of the Cropper tool is stored in the `mindspore/lite/tools/cropper` directory of the MindSpore source code. For details about the build operations, see the [Environment Requirements](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#environment-requirements) and [Compilation Example](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#compilation-example) in the build document to compile version x86_64. - -- Run: Obtain the `cropper` tool and configure environment variables. For details, see [Output Description](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#output-description) in the build document. - -## Parameter Description - -The command used for crop the static library based on Cropper is as follows: - -```bash -./cropper [--packageFile=] [--configFile=] - [--modelFile=] [--modelFolderPath=] - [--outputFile=] [--help] -``` - -The following describes the parameters in detail. - -| Parameter | Attribute | Function | Parameter Type | Default Value | Value Range | -| ------------------------------------- | -------- | ------------------------------------------------------------ | -------- | ------ | -------- | -| `--packageFile=` | Mandatory |The path of the `libmindspore-lite.a` to be cropped. | String | - | - | -| `--configFile=` | Mandatory | The path of the configuration file of the cropper tool. The file path of `cropper_mapping_cpu.cfg` needs to be set to crop the CPU library. | String | - | - | -| `--modelFolderPath=` | Optional | The model folder path, according to all the `ms` models existing in the folder for library cropping. `modelFile` or `modelFolderPath` parameters must be selected. | String | - | - | -| `--modelFile=` | Optional | The model file path is cut according to the specified `ms` model file. Multiple model files are divided by `,`. `modelFile` or `modelFolderPath` parameters must be selected. | String | - | - | -| `--outputFile=` | Optional | The saved path of the cut library `libmindspore-lite.a`, it overwrites the source file by default. | String | - | - | -| `--help` | Optional | Displays the help information about the `cropper` command. | - | - | - | - -> The configuration file `cropper_mapping_cpu.cfg` exists in the `tools/cropper` directory in the `mindspore-lite-{version}-linux-x64` package. - -## Example - -The Cropper tool obtains the operator list by parsing the `ms` model, and crop the `libmindspore-lite.a` static library according to the mapping relationship in the configuration file `configFile`. - -- Pass in the `ms` model through the folder, and pass the folder path where the model file is located to the `modelFolderPath` parameter to crop the `libmindspore-lite.a` static library of arm64-cpu. - -```bash -./cropper --packageFile=/mindspore-lite-{version}-android-aarch64/inference/lib/libmindspore-lite.a --configFile=./cropper_mapping_cpu.cfg --modelFolderPath=/model --outputFile=/mindspore-lite/lib/libmindspore-lite.a -``` - -This example will read all the `ms` models contained in the `/model` folder, crop the `libmindspore-lite.a` static library of arm64-cpu, and the cropped `libmindspore-lite.a` static library will be saved to `/mindspore-lite/lib/` directory. - -- Pass in the `ms` model by file, pass the path where the model file is located to the `modelFile` parameter, and crop the `libmindspore-lite.a` static library of arm64-cpu. - -```bash -./cropper --packageFile=/mindspore-lite-{version}-android-aarch64/inference/lib/libmindspore-lite.a --configFile=./cropper_mapping_cpu.cfg --modelFile=/model/lenet.ms,/model/retinaface.ms --outputFile=/mindspore-lite/lib/libmindspore-lite.a -``` - -In this example, the `libmindspore-lite.a` static library of arm64-cpu will be cropped according to the `ms` model passed by `modelFile`, and the cropped `libmindspore-lite.a` static library will be saved to `/mindspore-lite/lib/` directory. diff --git a/tutorials/lite/source_en/use/data_preprocessing.rst b/tutorials/lite/source_en/use/data_preprocessing.rst deleted file mode 100644 index ee14ed88452a9c8a7e0f53916528ea45b99a5648..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/data_preprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -Data Preprocessing -=================== - -.. toctree:: - :maxdepth: 1 - - image_processing diff --git a/tutorials/lite/source_en/use/downloads.md b/tutorials/lite/source_en/use/downloads.md deleted file mode 100644 index 12e035b12aa076bda480677f0d29883294e94639..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/downloads.md +++ /dev/null @@ -1,50 +0,0 @@ -# Downloading MindSpore Lite - -`Windows` `Linux` `Android` `Environment Preparation` `Beginner` `Intermediate` `Expert` - - - -- [Downloading MindSpore Lite](#downloading-mindspore-lite) - - [1.2.0](#120) - - [1.1.0](#110) - - - - - -Welcome to MindSpore Lite. We provide functions such as model conversion, model inference, image processing, etc. that support multiple operating systems and hardware platforms. You can download the version package suitable for the local environment and use it directly. - -## 1.2.0 - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| Inference runtime (cpp), training runtime (cpp), inference aar package, and benchmark/benchmark_train tools. | CPU | Android-aarch32 | | 7d073573385a69bff53542c395d106393da241682cd6053703ce21f1de23bac6 | -| Inference runtime (cpp), training runtime (cpp), inference aar package, and benchmark/benchmark_train tools. | CPU/GPU | Android-aarch64 | | 7f8400f0b97fa3e7cbf0d266c73b43a2410905244b04d0202fab39d9267346e0 | -| Inference runtime (cpp), training runtime (cpp), inference jar package, and benchmark/benchmark_train/codegen/converter/cropper tools. | CPU | Ubuntu-x64 | | 3b609ed8be9e3ae70987d6e00421ad4720776d797133e72f6952ba6b93059062 | -| Inference runtime (cpp) and benchmark/codegen/converter tools. | CPU | Windows-x64 | | bf01851d7e2cde416502dce11bd2a86ef63e559f6dabba090405755a87ce14ae | -| Inference runtime(cpp) | CPU | OpenHarmony | | a9987b25815cb69e0f630be1388486e8d727a19815a67851089b7d633bd2f3f2 | - -## 1.1.0 - -### Inference - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore Lite Converter | CPU | Ubuntu-x64 | | d449e38a8493c314d1b5b1a127f62269192da785b012ff892eda775dedca3d82 | -| | CPU | Windows-x64 | | 5e50b7701b97ebe784095f2ba954fc6c377eb157fbc9aaeae2497e38cc4ee212 | -| MindSpore Lite Runtime (include image processing) | CPU/GPU/NPU | Android-aarch64/Android-aarch32 | | a19de5706db57e97a5f04ef08e0e383f8ea497c70bb60e60d056b31a603c0243 | -| | CPU | Ubuntu-x64 | | 176256c2fbef775f1a44aaeccae0c4eea6a60f41fc0baece5479dcb378155f36 | -| | CPU | Windows-x64 | | 30b5545245832a73d84732166f360c77cd09a7a4fe1fb922a8f7b80e7df326c1 | - -### Train - -| Module Name | Hardware Platform | Operating System | Download Links | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore Lite Converter | CPU | Ubuntu-x64 | | f95a9db98c84ec3d97f88383ecc3832582aa9737ed287c33703deb0b419acf25 | -| MindSpore Lite Runtime (include image processing) | CPU | Android-aarch64/Android-aarch32 | | a6d8152f4e2d674c52af2c379f7d07858d30bc0dceef1dbc366e6fa16a5948b5 | -| | CPU | Ubuntu-x64 | | 1290f0adc790adc9edce654b9a629a9a323cfcb8453eb6bc19b779ef726282bf | - -> - Ubuntu-x64 Package is compiled in an environment where the GCC version is greater than or equal to 7.3.0, so the deployment environment requires the GLIBC version to be greater than or equal to 2.27. -> - Android-aarch32 does not support GPU and NPU. -> - MindSpore Lite also provides `libmindspore-lite.a` static library [cropper tool](https://www.mindspore.cn/tutorial/lite/en/master/use/cropper_tool.html#) for Runtime, which can crop the static library files, and effectively reduce the size of the library files. -> - After the download of MindSpore Lite is completed, SHA-256 integrity verification is required. diff --git a/tutorials/lite/source_en/use/image_processing.md b/tutorials/lite/source_en/use/image_processing.md deleted file mode 100644 index 6b567882ee516f0d79a864095801acee27e81894..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/image_processing.md +++ /dev/null @@ -1,153 +0,0 @@ -# Preprocessing Image Data - -`Windows` `Linux` `C++` `Data Preparation` `Beginner` `Intermediate` `Expert` - - - -- [Preprocessing Image Data](#preprocess-image-data) - - [Overview](#Overview) - - [Import image preprocessing function library](#import-image-preprocessing-function-library) - - [Initialize the image](#initialize-the-image) - - [Usage example](#usage-example) - - [Optional image preprocessing operator](#optional-image-preprocessing-operator) - - [Resize image](#resize-image) - - [Usage example](#usage-example-1) - - [Convert the image data type](#convert-the-image-data-type) - - [Usage example](#usage-example-2) - - [Crop image data](#crop-image-data) - - [Usage example](#usage-example-3) - - [Normalize image data](#normalize-image-data) - - [Usage example](#usage-example-4) - - - - - -## Overview - -The main purpose of image preprocessing is to eliminate irrelevant information in the image, restore useful real information, enhance the detectability of related information and simplify data to the greatest extent, thereby improving the reliability of feature extraction, image segmentation, matching and recognition. Here, by creating a LiteMat object, the image data is processed before inference to meet the data format requirements for model inference. - -The process is as follows: - -## Import image preprocessing function library - -```cpp -#include "lite_cv/lite_mat.h" -#include "lite_cv/image_process.h" -``` - -## Initialize the image - -Here, the [InitFromPixel](https://www.mindspore.cn/doc/api_cpp/en/master/dataset.html#initfrompixel) function in the `image_process.h` file is used to initialize the image. - -```cpp -bool InitFromPixel(const unsigned char *data, LPixelType pixel_type, LDataType data_type, int w, int h, LiteMat &m) -``` - -### Usage example - -```cpp -// Create the data object of the LiteMat object. -LiteMat lite_mat_bgr; - -// Initialize the lite_mat_bgr object. -// The image data pointer passed in by the user (The data in the Bitmap corresponding to the Android platform). -InitFromPixel(pixel_ptr, LPixelType::RGBA2GRAY, LDataType::UINT8, rgba_mat.cols, rgba_mat.rows, lite_mat_bgr); -``` - -## Optional image preprocessing operator - -The image processing operators here can be used in any combination according to the actual situation. - -### Resize image - -Here we use the [ResizeBilinear](https://www.mindspore.cn/doc/api_cpp/en/master/dataset.html#resizebilinear) function in `image_process.h` to resize the image through a bilinear algorithm. Currently, the supported data type is unit8, the supported channels are 3 and 1. - -```cpp -bool ResizeBilinear(const LiteMat &src, LiteMat &dst, int dst_w, int dst_h) -``` - -#### Usage example - -```cpp -// Initialize the image data. -LiteMat lite_mat_bgr; -InitFromPixel(rgba_mat.data, LPixelType::RGBA2BGR, LDataType::UINT8, rgba_mat.cols, rgba_mat.rows, lite_mat_bgr); - -// Create a resize image data object. -LiteMat lite_mat_resize; - -// Resize the image. -ResizeBilinear(lite_mat_bgr, lite_mat_resize, 256, 256); -``` - -### Convert the image data type - -Here we use the [ConvertTo](https://www.mindspore.cn/doc/api_cpp/en/master/dataset.html#convertto) function in `image_process.h` to convert the image data type. Currently, the conversion from uint8 to float is supported. - -```cpp -bool ConvertTo(const LiteMat &src, LiteMat &dst, double scale = 1.0) -``` - -#### Usage example - -```cpp -// Initialize the image data. -LiteMat lite_mat_bgr; -InitFromPixel(rgba_mat.data, LPixelType::RGBA2BGR, LDataType::UINT8, rgba_mat.cols, rgba_mat.rows, lite_mat_bgr); - -// Create the converted data type object. -LiteMat lite_mat_convert_float; - -// Perform conversion type operations on the object. Currently, the supported conversion is to convert uint8 to float. -ConvertTo(lite_mat_bgr, lite_mat_convert_float); -``` - -### Crop image data - -Here we use the [Crop](https://www.mindspore.cn/doc/api_cpp/en/master/dataset.html#crop) function in `image_process.h` to crop the image. Currently, channels 3 and 1 are supported. - -```cpp -bool Crop(const LiteMat &src, LiteMat &dst, int x, int y, int w, int h) -``` - -#### Usage example - -```cpp -// Initialize the image data. -LiteMat lite_mat_bgr; -InitFromPixel(rgba_mat.data, LPixelType::RGBA2BGR, LDataType::UINT8, rgba_mat.cols, rgba_mat.rows, lite_mat_bgr); - -// Create the cropped object. -LiteMat lite_mat_cut; - -// The image is cropped by the values of x, y, w, h. -Crop(lite_mat_bgr, lite_mat_cut, 16, 16, 224, 224); -``` - -### Normalize image data - -In order to eliminate the dimensional influence among the data indicators and solve the comparability problem among the data indicators through standardization processing is adopted, here is the use of the [SubStractMeanNormalize](https://www.mindspore.cn/doc/api_cpp/en/master/dataset.html#substractmeannormalize) function in `image_process.h` to normalize the image data. - -```cpp -bool SubStractMeanNormalize(const LiteMat &src, LiteMat &dst, const std::vector &mean, const std::vector &std) -``` - -#### Usage example - -```cpp -// Initialize the image data. -LiteMat lite_mat_bgr; -InitFromPixel(rgba_mat.data, LPixelType::RGBA2BGR, LDataType::UINT8, rgba_mat.cols, rgba_mat.rows, lite_mat_bgr); - -// The mean value of the image data. -// The variance of the image data. -std::vector means = {0.485, 0.456, 0.406}; -std::vector stds = {0.229, 0.224, 0.225}; - -// Create a normalized image object. -LiteMat lite_mat_bgr_norm; - -// The image data is normalized by the mean value and variance of the image data. -SubStractMeanNormalize(lite_mat_bgr, lite_mat_bgr_norm, means, stds); -``` diff --git a/tutorials/lite/source_en/use/micro.md b/tutorials/lite/source_en/use/micro.md deleted file mode 100644 index 0bd223f66309f1de236d84224bfb0e08f07bd859..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/micro.md +++ /dev/null @@ -1,288 +0,0 @@ -# Perform Inference on the Microcontroller - - `Linux` `IoT` `C++` `CodeGen` `Beginner` `Intermediate` - - - -- [Perform Inference on the Microcontroller](#perform-inference-on-the-microcontroller) - - [Overview](#overview) - - [Obtaining CodeGen](#obtaining-codeGen) - - [Parameter Description](#parameter-description) - - [Instructions](#instructions) - - [Using CodeGen to Perform inference on STM Boards](#perform-inference-on-the-stm-microcontroller) - - [More Details](#more-details) - - - - - -## Overview - -MindSpore Lite provides a code generator tool, namely CodeGen, which could have runtime compiling and computational graphs building done offline. Only necessary codes and information are kept in the generated program, thereby minimizing the size of the generated inference program. CodeGen supports operators in NNACL and CMSIS, and generates inference programs running on x86/ARM64/ARM32A/ARM32M platforms. - -Here is the process of using CodeGen: - -1. Use the [MindSpore Lite Converter](https://www.mindspore.cn/tutorial/lite/en/master/use/converter_tool.html) to convert the pre-trained model into a `*.ms` file. - -2. Use CodeGen and input the `*.ms` file to automatically generate the inference code. - -![img](../images/lite_codegen.png) - -## Obtaining CodeGen - -You can obtain CodeGen by any of the following ways: - -1. Download pre-compiled [Release Package](https://www.mindspore.cn/tutorial/lite/en/master/use/downloads.html) from MindSpore. -2. [Build](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html) from the source. - -> Currently the code generator is only available on Linux x86_64. - -## Parameter Description - -Here is the detailed description of parameters: - -| Parameter | Mandatory or Not | Parameter Description | Value Range | Default value | -| --------------- | ---------------- | -------------------------------------- | -------------------------- | -------------- | -| help | No | print help information | - | - | -| codePath | Yes | path of the generated code | - | ./(current dir)| -| target | Yes | target platform for the generated code | x86, ARM32M, ARM32A, ARM64 | x86 | -| modelPath | Yes | the path to the input model | - | - | -| supportParallel | No | generate parallel codes or not | true, false | false | -| debugMode | No | generate debug codes or not | true, false | false | - -> The input model should be converted into .ms file using MindSpore Lite Converter. -> -> debugMode is not available when the filesystem is not supported. -> -> Please check the [API Document](https://www.mindspore.cn/doc/api_cpp/en/master/index.html) to get the detailed API description. -> -> The following 3 interfaces are currently not supported: -> 1. `virtual std::unordered_map GetOutputs() const = 0;` -> 2. `virtual Vector GetOutputsByNodeName(const String &node_name) const = 0;` -> 3. `virtual int Resize(const Vector &inputs, const Vector> &dims) = 0;` - -## Instructions - -The example starts with a pre-trained classification model for the MNIST dataset. - -```bash -./codegen --modelPath=./mnist.ms --codePath=./ -``` - -After successful execution, CodeGen would generate a folder named mnist at the specified path. The structure of the project file is shown as follows: - -```text -mnist -├── benchmark # Benchmark model for debugging -│ ├── benchmark.cc -│ ├── calib_output.cc -│ ├── calib_output.h -│ ├── load_input.c -│ └── load_input.h -├── CMakeLists.txt -└── src # source files - ├── CMakeLists.txt - ├── mmodel.h - ├── net.bin # binary model weights - ├── net.c - ├── net.cmake - ├── net.h - ├── session.cc - ├── session.h - ├── tensor.cc - ├── tensor.h - ├── weight.c - └── weight.h -``` - -## Using CodeGen to Perform inference on STM Boards - -This guide takes the deployment on STM32F746 as an example to show how the pre-complied model is built and deployed on Cortex-M platform. More information about Arm Cortex-M could be found in their [Official Web Site](https://developer.arm.com/ip-products/processors/cortex-m). - -### STM32F746 Compile Dependencies - -The generated program compilation and deployment need to install the following tools on Windows: [J-Link](https://www.segger.com/), [STM32CubeMX](https://www.st.com/content/st_com/en.html) and [GNU Arm Embedded Toolchain](https://developer.arm.com/tools-and-software/open-source-software/developer-tools/gnu-toolchain/gnu-rm) to perform Cross-compilation. - -- [STM32CubeMX Windows Version](https://www.st.com/content/ccc/resource/technical/software/sw_development_suite/group0/0b/05/f0/25/c7/2b/42/9d/stm32cubemx_v6-1-1/files/stm32cubemx_v6-1-1.zip/jcr:content/translations/en.stm32cubemx_v6-1-1.zip) >= 6.0.1 - -- [GNU Arm Embedded Toolchain](https://developer.arm.com/tools-and-software/open-source-software/developer-tools/gnu-toolchain/gnu-rm/downloads) >= 9-2019-q4-major-win32 - -- [J-Link Windows Version](https://www.segger.com/downloads/jlink/) >= 6.56 -- [GCC](https://gcc.gnu.org/releases.html) >= 7.3.0 -- [CMake](https://cmake.org/download/) >= 3.18.3 - -### STM32F746 Project Construction - -- The structure of the project files that needs to be managed as follows: - - ```bash - ├── mnist # generated inference code by CodeGen - ├── include # API header files (needs to be managed) - └── operator_library # operator source code (needs to be managed) - ``` - -> API header files could be found in the [Release Package](https://www.mindspore.cn/tutorial/lite/en/master/use/downloads.html) provided by the MindSpore team. -> -> You need to obtain the source code corresponding to the target platform because the pre-compiled static library is not provided since the Cross compilation on Cortex-M platform is complicated. The corresponding project file structure is provided in the example and you could follow the instructions shown below to copy the source code and finish the compilation. - -- Use Codegen to compile [MNIST handwriting number identification model](https://download.mindspore.cn/model_zoo/official/lite/mnist_lite/mnist.ms), generate corresponding inference codes for STM32F46. The command is as follows: - - ```bash - ./codegen --codePath=. --modelPath=mnist.ms --target=ARM32M - ``` - -- The generated project file structure is shown below: - - ```bash - ├── mnist # root of the generated code - ├── benchmark # generated benchmark code - └── src # generated model inference code - ``` - -- The file structure of the prepared static operator library is shown below: - - ```bash - ├── operator_library # operator library - ├── include # header files of operator library - └── nnacl # operator source code provided by MindSpore team - └── wrapper # operator source code provided by MindSpore team - └── CMSIS # CMSIS source code provided by Arm - ``` - - > `arm_nnfunctions.h` needs to be added when using CMSIS v5.7.0 Softmax operator. - -#### Project Compiling - -1. Environment testing - - When programs needed for Cross-compilation are installed, add them to the Windows PATH one by one, and test them with the following instructions: - - ```bash - gcc -v # Check GCC - arm-none-eabi-gdb -v # Check Cross compiler - jlink -v # Check J-Link - make -v # Check Make - ``` - - If all success, the environment preparation is done. - -2. Generate the initialization codes run on the STM32F746 board. ([detailed code example](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/micro/example/mnist_stm32f746)) - - - start STM32CubeMX, new project and choose STM32F746IG. - - Choose `Makefile` and `generator code`. - - Launch `cmd` on the generated project root, execute `make` to test whether the initialization code compilation is successful. - - ```bash - # make success result - arm-none-eabi-size build/test_stm32f746.elf - text data bss dec hex filename - 3660 20 1572 5252 1484 build/test_stm32f746.elf - arm-none-eabi-objcopy -O ihex build/test_stm32f746.elf build/test_stm32f746.hex - arm-none-eabi-objcopy -O binary -S build/test_stm32f746.elf build/test_stm32f746.bin - ``` - -#### Compiling Model - -1. Copy operator library source code and header files provided by MindSpore team to the project folder generated by STM32CubeMX. - -2. Copy model inference code generated by CodeGen to the project folder generated by STM32CubeMX. - - ```bash - ├── .mxproject - ├── build # compile output folder - ├── Core - ├── Drivers - ├── mnist # cortex-m7 model inference code generated by CodeGen - ├── Makefile # modify makefile to organize mnist && operator_library source code - ├── startup_stm32f746xx.s - ├── STM32F746IGKx_FLASH.ld - └── test_stm32f746.ioc - ``` - -3. Modify makefile, organize operator library source code and generated inference code, check [example](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/micro/example/mnist_stm32f746) to get detailed information about makefile. - - ```bash - # C includes - C_INCLUDES = \ - -ICore/Inc \ - -IDrivers/STM32F7xx_HAL_Driver/Inc \ - -IDrivers/STM32F7xx_HAL_Driver/Inc/Legacy \ - -IDrivers/CMSIS/Device/ST/STM32F7xx/Include \ - -Imnist/operator_library/include \ # Added, header files for operator library - -Imnist/include \ # Added, header files of model inference code - -Imnist/src # Added, source code of model inference code - ...... - ``` - -4. Add code in `Core/Src/main.c` to call inference API. The code to be added is shown below: - - ```cpp - while (1) { - /* USER CODE END WHILE */ - SEGGER_RTT_printf(0, "***********mnist test start***********\n"); - const char *model_buffer = nullptr; - int model_size = 0; - session::LiteSession *session = mindspore::session::LiteSession::CreateSession(model_buffer, model_size, nullptr); - Vector inputs = session->GetInputs(); - size_t inputs_num = inputs.size(); - void *inputs_binbuf[inputs_num]; - int inputs_size[inputs_num]; - for (size_t i = 0; i < inputs_num; ++i) { - inputs_size[i] = inputs[i]->Size(); - } - // here mnist only have one input data,just hard code to it's array; - inputs_binbuf[0] = mnist_inputs_data; - for (size_t i = 0; i < inputs_num; ++i) { - void *input_data = inputs[i]->MutableData(); - memcpy(input_data, inputs_binbuf[i], inputs_size[i]); - } - int ret = session->RunGraph(); - if (ret != lite::RET_OK) { - return lite::RET_ERROR; - } - Vector outputs_name = session->GetOutputTensorNames(); - for (int i = 0; i < outputs_name.size(); ++i) { - tensor::MSTensor *output_tensor = session->GetOutputByTensorName(outputs_name[i]); - if (output_tensor == nullptr) { - return -1; - } - float *casted_data = static_cast(output_tensor->MutableData()); - if (casted_data == nullptr) { - return -1; - } - for (size_t j = 0; j < 10 && j < output_tensor->ElementsNum(); j++) { - SEGGER_RTT_printf(0, "output: [%d] is : [%d]/100\n", i, casted_data[i] * 100); - } - } - delete session; - SEGGER_RTT_printf(0, "***********mnist test end***********\n"); - ``` - -5. Launch `cmd` as admin and run `make` to compile. - - ```bash - make - ``` - -### STM32F746 Project Deployment - -Deploy executable files to the board using J-Link and perform inference. - -```bash -jlinkgdbserver # start jlinkgdbserver set target device as STM32F746IG -jlinkRTTViewer # start jlinkRTTViewer set target devices as STM32F746IG -arm-none-eabi-gdb # start arm-gcc gdb service -file build/target.elf # open debugging file -target remote 127.0.0.1 # connect jlink server -monitor reset # reset board -monitor halt # halt board -load # load executable to board -c # perform model inference -``` - -## More Details - -### [Linux_x86_64 platform compile and deploy](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/micro/example/mnist_x86) - -### [Android platform compile and deploy](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/micro/example/mobilenetv2) - diff --git a/tutorials/lite/source_en/use/npu_info.md b/tutorials/lite/source_en/use/npu_info.md deleted file mode 100644 index e77c095625306e8e65ceb726225e087d7f04cfd2..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/npu_info.md +++ /dev/null @@ -1,94 +0,0 @@ -# NPU Integration Information - -`NPU` `Android` `Linux` `Environment Preparation` `Operators Supported` `Intermediate` `Expert` - - - -- [NPU Integration Information](#NPU-Integration-Information) - - [Steps](#Steps) - - [Environment Preparation](#Environment-Preparation) - - [Build](#Build) - - [Integration](#Integration) - - [Supported Chips](#Supported-Chips) - - [Supported Operators](#Supported-Operators) - - - - - -## Steps - -### Environment Preparation - -Besides basic [Environment Preparation](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html), HUAWEI HiAI DDK, which contains -APIs (including building, loading models and calculation processes) and interfaces implemented to encapsulate dynamic libraries (namely libhiai*.so), -is required for the use of NPU. Download [DDK](https://developer.huawei.com/consumer/en/doc/development/hiai-Library/ddk-download-0000001053590180) -and set the directory of extracted files as `${HWHIAI_DDK}`. Our build script uses this environment viriable to seek DDK. - -### Build - -Under the Linux operating system, one can easily build MindSpore Lite Package integrating NPU interfaces and libraries using build.sh under -the root directory of MindSpore [Source Code](https://gitee.com/mindspore/mindspore). The command is as follows. -It will build MindSpore Lite's package under the output directory under the MindSpore source code root directory, -which contains the NPU's dynamic library, the libmindspore-lite dynamic library, and the test tool Benchmark. - -```bash -bash build.sh -I arm64 -e npu -``` - -For more information about compilation, see [Linux Environment Compilation](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#linux-environment-compilation). - -### Integration - -- Integration instructions - - When developers need to integrate the use of NPU features, it is important to note: - - - [Configure the NPU backend](https://www.mindspore.cn/tutorial/lite/en/master/use/runtime_cpp.html#configuring-the-npu-backend). - For more information about using Runtime to perform inference, see [Using Runtime to Perform Inference (C++)](https://www.mindspore.cn/tutorial/lite/en/master/use/runtime_cpp.html). - - - Compile and execute the binary. If you use dynamic linking, please set environment variables to dynamically link libhiai.so, libhiai_ir.so, and libhiai_ir_build.so. - For example, - - ```bash - export LD_LIBRARY_PATH=mindspore-lite-{version}-inference-android-{arch}/inference/third_party/hiai_ddk/lib/:$LD_LIBRARY_PATH - ``` - - For more information about compilation, please refer to [Compilation Output](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#description-of-runtime-and-other-tools-directory-structure) - with compilation option `-I arm64` or `-I arm32`. - -- Using Benchmark testing NPU inference - - Users can also test NPU inference using MindSpore Lite's Benchmark tool. -For the Benchmark tool location, see [Compilation Output](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#description-of-runtime-and-other-tools-directory-structure). -Pass the build package to the `/data/local/tmp/` directory of an Android phone equipped with NPU chips -(For supported NPU chips, see [Chipset Platforms and Supported HUAWEI HiAI Versions](https://developer.huawei.com/consumer/en/doc/development/hiai-Guides/mapping-relationship-0000001052830507#ZH-CN_TOPIC_0000001052830507__section94427279718).)and test NPU inference using the Benchmark tool -on the phone, as shown in the example below: - - - Test performance - - ```bash - ./benchmark --device=NPU --modelFile=./models/test_benchmark.ms --timeProfiling=true - ``` - - - Test precision - - ```bash - ./benchmark --device=NPU --modelFile=./models/test_benchmark.ms --inDataFile=./input/test_benchmark.bin --inputShapes=1,32,32,1 --accuracyThreshold=3 --benchmarkDataFile=./output/test_benchmark.out - ``` - -For more information about the use of Benchmark, see [Benchmark Use](https://www.mindspore.cn/tutorial/lite/en/master/use/benchmark_tool.html). - -For environment variable settings, you need to set the directory where the libmindspore-lite.so -(under the directory `mindspore-lite-{version}-inference-android-{arch}/inference/lib`) and NPU libraries -(under the directory `mindspore-lite-{version}-inference-android-{arch}/inference/third_party/hiai_ddk/lib/`) are located, to `${LD_LIBRARY_PATH}`. -The directory is specified in [Compilation Output](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html#description-of-runtime-and-other-tools-directory-structure) -with compilation option `-I arm64` or `-I arm32`. - -## Supported Chips - -For supported NPU chips, see [Chipset Platforms and Supported HUAWEI HiAI Versions](https://developer.huawei.com/consumer/en/doc/development/hiai-Guides/mapping-relationship-0000001052830507#ZH-CN_TOPIC_0000001052830507__section94427279718). - -## Supported Operators - -For supported NPU operators, see [Lite Operator List](https://www.mindspore.cn/tutorial/lite/en/master/operator_list_lite.html). \ No newline at end of file diff --git a/tutorials/lite/source_en/use/post_training_quantization.md b/tutorials/lite/source_en/use/post_training_quantization.md deleted file mode 100644 index f845a46be164e40cf7e5c573835ac1a5beb693b5..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/post_training_quantization.md +++ /dev/null @@ -1,141 +0,0 @@ -# Optimizing the Model (Quantization After Training) - -`Windows` `Linux` `Model Converting` `Model Optimization` `Intermediate` `Expert` - - - -- [Optimizing the Model (Quantization After Training)](#optimizing-the-model-quantization-after-training) - - [Overview](#overview) - - [Weight Quantization](#weight-quantization) - - [Parameter Description](#parameter-description) - - [Procedure](#procedure) - - [Partial Model Accuracy Result](#partial-model-accuracy-result) - - [Full Quantization](#full-quantization) - - [Parameter Description](#parameter-description-1) - - [Procedure](#procedure-1) - - [Partial Model Accuracy Result](#partial-model-accuracy-result-1) - - - - - -## Overview - -Converting a trained `float32` model into an `int8` model through quantization after training can reduce the model size and improve the inference performance. In MindSpore Lite, this function is integrated into the model conversion tool `conveter_lite`. You can add command line parameters to convert a model into a quantization model. - -MindSpore Lite quantization after training is classified into two types: - -1. Weight quantization: quantizes a weight of a model and compresses only the model size. `float32` inference is still performed during inference. -2. Full quantization: quantizes the weight and activation value of a model. The `int` operation is performed during inference to improve the model inference speed and reduce power consumption. - -Data types and parameters required for the two types are different, but both can be set by using the conversion tool. For details about how to use the conversion tool `converter_lite`, see [Converting Training Models](https://www.mindspore.cn/tutorial/lite/en/master/use/converter_tool.html). After the tool configuration is completed, you can enable quantization after training. - -## Weight Quantization - -Quantization of 1 to 16 bits is supported. A smaller number of quantization bits indicates a higher model compression ratio and a large accuracy loss. You can use the [Benchmark tool](https://www.mindspore.cn/tutorial/lite/en/master/use/benchmark_tool.html) to evaluate the accuracy and determine the number of quantization bits. Generally, the average relative error (accuracyThreshold) is within 4% which is small. The following describes the usage and effect of weight quantization. - -### Parameter Description - -Generally, the weight quantization conversion command is as follows: - -```bash -./converter_lite --fmk=ModelType --modelFile=ModelFilePath --outputFile=ConvertedModelPath --quantType=WeightQuant --bitNum=BitNumValue --quantWeightSize=ConvWeightQuantSizeThresholdValue --quantWeightChannel=ConvWeightQuantChannelThresholdValue -``` - -Parameters of this command are described as follows: - -| Parameter | Attribute | Function Description | Parameter Type | Default Value | Value Range | -| -------- | ------- | ----- | ----- |----- | ----- | -| `--quantType=` | Mandatory |Set this parameter to WeightQuant to enable weight quantization. | String | - | WeightQuant | -| `--bitNum=` | Optional | Number of bits for weight quantization. Currently, 1 to 16 bits are supported. | Integer | 8 | \[1, 16] | -| `--quantWeightSize=` | Optional | Set the threshold of the convolution kernel size for weight quantization. If the size of the convolution kernel is greater than the threshold, the weight is quantized. Recommended value: 500 | Integer | 0 | \[0, +∞) | -| `--quantWeightChannel=` | Optional | Set the threshold of the number of convolution channels for weight quantization. If the number of convolution channels is greater than the threshold, the weight is quantized. Recommended value: 16 | Integer | 16 | \[0, +∞)| - -You can adjust the weight quantization parameters based on the model and your requirements. -> To ensure the accuracy of weight quantization, you are advised to set the value range of the `--bitNum` parameter to 8 bits to 16 bits. - -### Procedure - -1. Correctly build the `converter_lite` executable file. For details about how to obtain the `converter_lite` tool and configure environment variables, see [Building MindSpore Lite](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html). -2. Take the TensorFlow Lite model as an example. Run the following command to convert the weight quantization model: - - ```bash - ./converter_lite --fmk=TFLITE --modelFile=Inception_v3.tflite --outputFile=Inception_v3.tflite --quantType=WeightQuant --bitNum=8 --quantWeightSize=0 --quantWeightChannel=0 - ``` - -3. After the preceding command is successfully executed, the quantization model `Inception_v3.tflite.ms` is obtained. The size of the quantization model usually decreases to one fourth of the FP32 model. - -### Partial Model Accuracy Result - - | Model | Test Dataset | FP32 Model Accuracy | Weight Quantization Accuracy (8 bits) | - | -------- | ------- | ----- | ----- | - | [Inception_V3](https://storage.googleapis.com/download.tensorflow.org/models/tflite/model_zoo/upload_20180427/inception_v3_2018_04_27.tgz) | [ImageNet](http://image-net.org/) | 77.60% | 77.53% | - | [Mobilenet_V1_1.0_224](https://storage.googleapis.com/download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_1.0_224.tgz) | [ImageNet](http://image-net.org/) | 70.96% | 70.56% | - | [Mobilenet_V2_1.0_224](https://storage.googleapis.com/download.tensorflow.org/models/tflite_11_05_08/mobilenet_v2_1.0_224.tgz) | [ImageNet](http://image-net.org/) | 71.56% | 71.53% | - -> All the preceding results are obtained in the x86 environment. - -## Full Quantization - -In scenarios where the model running speed needs to be improved and the model running power consumption needs to be reduced, the full quantization after training can be used. The following describes the usage and effect of full quantization. - -### Parameter Description - -Generally, the full quantization conversion command is as follows: - -```bash -./converter_lite --fmk=ModelType --modelFile=ModelFilePath --outputFile=ConvertedModelPath --quantType=PostTraining --bitNum=8 --configFile=config.cfg -``` - -Parameters of this command are described as follows: - -| Parameter | Attribute | Function Description | Parameter Type | Default Value | Value Range | -| -------- | ------- | ----- | ----- |----- | ----- | -| `--quantType=` | Mandatory | Set this parameter to PostTraining to enable full quantization. | String | - | PostTraining | -| `--configFile=` | Mandatory | Path of a calibration dataset configuration file | String | - | - | -| `--bitNum=` | Optional | Number of bits for full quantization. Currently, 1 to 8 bits are supported. | Integer | 8 | \[1, 8] | - -To compute a quantization parameter of an activation value, you need to provide a calibration dataset. It is recommended that the calibration dataset be obtained from the actual inference scenario and can represent the actual input of a model. The number of data records is about 100. -The calibration dataset configuration file uses the `key=value` mode to define related parameters. The `key` to be configured is as follows: - -| Parameter Name | Attribute | Function Description | Parameter Type | Default Value | Value Range | -| -------- | ------- | ----- | ----- | ----- | ----- | -| image_path | Mandatory | Directory for storing a calibration dataset. If a model has multiple inputs, enter directories where the corresponding data is stored in sequence. Use commas (,) to separate them. | String | - | The directory stores the input data that can be directly used for inference. Since the current framework does not support data preprocessing, all data must be converted in advance to meet the input requirements of inference. | -| batch_count | Optional | Number of used inputs | Integer | 100 | (0, +∞) | -| method_x | Optional | Input and output data quantization algorithms at the network layer | String | KL | KL, MAX_MIN, or RemovalOutlier.
    KL: quantizes and calibrates the data range based on [KL divergence](http://on-demand.gputechconf.com/gtc/2017/presentation/s7310-8-bit-inference-with-tensorrt.pdf).
    MAX_MIN: data quantization parameter computed based on the maximum and minimum values.
    RemovalOutlier: removes the maximum and minimum values of data based on a certain proportion and then calculates the quantization parameters.
    If the calibration dataset is consistent with the input data during actual inference, MAX_MIN is recommended. If the noise of the calibration dataset is large, KL or RemovalOutlier is recommended. -| thread_num | Optional | Number of threads used when the calibration dataset is used to execute the inference process | Integer | 1 | (0, +∞) | -| bias_correction | Optional | Indicate whether to correct the quantization error. | Boolean | false | True or false. After this parameter is enabled, the accuracy of the converted model can be improved. You are advised to set this parameter to true. | - -> For a multi-input model, different input data must be stored in different directories. In addition, names of all files in each directory must be sorted in ascending lexicographic order to ensure one-to-one mapping. For example, a model has two inputs input0 and input1, and there are two calibration datasets (batch_count=2). The data of input0 is stored in the /dir/input0/ directory. The input data files are data_1.bin and data_2.bin. The data of input1 is stored in the /dir/input1/ directory. The input data files are data_a.bin and data_b.bin. The (data_1.bin, data_a.bin) is regarded as a group of inputs and the (data_2.bin, data_b.bin) is regarded as another group of inputs. - -### Procedure - -1. Correctly build the `converter_lite` executable file. -2. Prepare a calibration dataset. Assume that the dataset is stored in the `/dir/images` directory. Configure the `config.cfg` file. The content is as follows: - - ```python - image_path=/dir/images - batch_count=100 - method_x=MAX_MIN - thread_num=1 - bias_correction=true - ``` - - The calibration dataset can be a subset of the test dataset. Each file stored in the `/dir/images` directory must be pre-processed input data, and each file can be directly used as the input for inference. -3. Take the MindSpore model as an example. Run the following command to convert the full quantization model: - - ```bash - ./converter_lite --fmk=MINDIR --modelFile=lenet.mindir --outputFile=lenet_quant --quantType=PostTraining --configFile=config.cfg - ``` - -4. After the preceding command is successfully executed, the quantization model `lenet_quant.ms` is obtained. Generally, the size of the quantization model decreases to one fourth of the FP32 model. - -### Partial Model Accuracy Result - - | Model | Test Dataset | method_x | FP32 Model Accuracy | Full Quantization Accuracy (8 bits) | Description | - | -------- | ------- | ----- | ----- | ----- | ----- | - | [Inception_V3](https://storage.googleapis.com/download.tensorflow.org/models/tflite/model_zoo/upload_20180427/inception_v3_2018_04_27.tgz) | [ImageNet](http://image-net.org/) | KL | 77.60% | 77.40% | Randomly select 100 images from the ImageNet Validation dataset as a calibration dataset. | - | [Mobilenet_V1_1.0_224](https://storage.googleapis.com/download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_1.0_224.tgz) | [ImageNet](http://image-net.org/) | KL | 70.96% | 70.31% | Randomly select 100 images from the ImageNet Validation dataset as a calibration dataset. | - | [Mobilenet_V2_1.0_224](https://storage.googleapis.com/download.tensorflow.org/models/tflite_11_05_08/mobilenet_v2_1.0_224.tgz) | [ImageNet](http://image-net.org/) | MAX_MIN | 71.56% | 71.16% | Randomly select 100 images from the ImageNet Validation dataset as a calibration dataset. | - -> All the preceding results are obtained in the x86 environment, and `bias_correction=true` is set. diff --git a/tutorials/lite/source_en/use/runtime.rst b/tutorials/lite/source_en/use/runtime.rst deleted file mode 100644 index 49466637b51350fa689d28927a760aa4b73b859b..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/runtime.rst +++ /dev/null @@ -1,8 +0,0 @@ -Executing Model Inference -================================= - -.. toctree:: - :maxdepth: 1 - - runtime_cpp - runtime_java \ No newline at end of file diff --git a/tutorials/lite/source_en/use/runtime_cpp.md b/tutorials/lite/source_en/use/runtime_cpp.md deleted file mode 100644 index cd570c209dbbe319e08377e5330a3fa0c4c95908..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/runtime_cpp.md +++ /dev/null @@ -1,592 +0,0 @@ -# Using C++ Interface to Perform Inference - -`Windows` `Linux` `Android` `C++` `Inference Application` `Model Loading` `Data Preparation` `Intermediate` `Expert` - - - -- [Using C++ Interface to Perform Inference](#using-c-interface-to-perform-inference) - - [Overview](#overview) - - [Loading a Model](#loading-a-model) - - [Creating a Configuration Context](#creating-a-configuration-context) - - [Configuring the Number of Threads](#configuring-the-number-of-threads) - - [Configuring the CPU Backend](#configuring-the-cpu-backend) - - [Configuring the GPU Backend](#configuring-the-gpu-backend) - - [Configuring the NPU Backend](#configuring-the-npu-backend) - - [Creating a Session](#creating-a-session) - - [Building a Graph](#building-a-graph) - - [Inputting Data](#inputting-data) - - [Executing Inference](#executing-inference) - - [Obtaining Output](#obtaining-output) - - [Releasing Memory](#releasing-memory) - - [Advanced Usage](#advanced-usage) - - [Optimizing the Memory Size](#optimizing-the-memory-size) - - [Core Binding Operations](#core-binding-operations) - - [Resizing the Input Dimension](#resizing-the-input-dimension) - - [Parallel Sessions](#parallel-sessions) - - [Sharing a Memory Pool](#sharing-a-memory-pool) - - [Calling Back a Model During the Running Process](#calling-back-a-model-during-the-running-process) - - [Simplified CreateSession API Invocation Process](#simplified-createsession-api-invocation-process) - - [Viewing Logs](#viewing-logs) - - [Obtaining the Version Number](#obtaining-the-version-number) - - - - - -## Overview - -After the model is converted into a `.ms` model by using the MindSpore Lite model conversion tool, the inference process can be performed in Runtime. For details, see [Converting Models for Inference](https://www.mindspore.cn/tutorial/lite/en/master/use/converter_tool.html). This tutorial describes how to use the [C++ API](https://www.mindspore.cn/doc/api_cpp/en/master/index.html) to perform inference. - -To use the MindSpore Lite inference framework, perform the following steps: - -1. Load the model: Read the `.ms` model converted by using the model conversion tool from the file system, import the model by using [mindspore::lite::Model::Import](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#import), parse the model, and create the `Model *`. For details, see [Converting Models for Inference](https://www.mindspore.cn/tutorial/lite/en/master/use/converter_tool.html). -2. Create a configuration context: Create a configuration [Context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#context) to save some basic configuration parameters required by a session to guide graph build and execution. -3. Create a session: Create [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) and configure the [Context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#context) obtained in the previous step to the session. -4. Build the graph: Before performing inference, call the `CompileGraph` API of [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) to build the graph. In the graph build phase, graph partition and operator selection and scheduling are performed, which takes a long time. Therefore, it is recommended that with the [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) created each time, one graph be built. In this case, the inference will be performed for multiple times. -5. Input data: Before the graph is exed, data needs to be filled in to the `Input Tensor`. -6. Perform inference: Use `RunGraph` of the [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) to perform model inference. -7. Obtain the output: After the graph execution is complete, you can obtain the inference result by `outputting the tensor`. -8. Release the memory: If the MindSpore Lite inference framework is not required, release the created [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) and [Model](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#model). - -![img](../images/lite_runtime.png) - -> For details about the calling process of MindSpore Lite inference, see [Simplified MindSpore Lite C++ Demo](https://www.mindspore.cn/tutorial/lite/en/master/quick_start/quick_start_cpp.html). - -## Loading a Model - -When MindSpore Lite is used for model inference, the `.ms` model file converted by using the model conversion tool needs to be read from the file system and created from the memory data by using the [mindspore::lite::Model::Import](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#import) static function. `Model` holds model data such as weight data and operator attributes. For details, see [Converting Models for Inference](https://www.mindspore.cn/tutorial/lite/en/master/use/converter_tool.html). - -The [Model](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#model) instance returned by the `mindspore::lite::Model::Import` function is a pointer created through `new`. If the instance is not required, release it through `delete`. - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L325) demonstrates how to read a MindSpore Lite model from the file system and parse the model by using `mindspore::lite::Model::Import`: - -```cpp -// Read model file. -size_t size = 0; -char *model_buf = ReadFile(model_path, &size); -if (model_buf == nullptr) { - std::cerr << "Read model file failed." << std::endl; -} -// Load the .ms model. -auto model = mindspore::lite::Model::Import(model_buf, size); -delete[](model_buf); -if (model == nullptr) { - std::cerr << "Import model file failed." << std::endl; -} -``` - -## Creating a Configuration Context - -The context saves some basic configuration parameters required by the session to guide graph build and execution. If you use `new` to create a [Context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#id2) and do not need it any more, use `delete` to release it. Generally, the [Context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#id2) is released after the [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) is created. The parameters contained in [Context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#id2) are defined as follows: - -- [thread_num_](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#thread-num): MindSpore Lite has a built-in thread pool shared by processes. During inference, `thread_num_` is used to specify the maximum number of threads in the thread pool. The default value is 2. -- [allocator](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#allocator): MindSpore Lite supports dynamic memory allocation and release. If `allocator` is not specified, a default `allocator` is generated during inference. You can also use the [Context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#context) method to share the memory allocator in multiple [Context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#id2). For details about the calling method, see the usage of [Sharing a Memory Pool](#sharing-a-memory-pool). - -- [device_list_](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#device-list): MindSpore Lite supports heterogeneous inference. The backend configuration information for inference is specified by `device_list_` in [Context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#id2). By default, the [DeviceContext](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#devicecontext) of the CPU is stored. During graph build, operator selection and scheduling are performed based on the backend configuration information in `device_list_`. Currently, only CPU and GPU heterogeneity or CPU and NPU heterogeneity is supported. When the GPU's [DeviceContext](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#devicecontext) is configured, GPU-based inference is preferentially used. When the NPU's [DeviceContext](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#devicecontext) is configured, NPU-based inference is preferentially used. - -> `device_list_[0]` must be `DeviceContext` of the CPU, and `device_list_[1]` must be `DeviceContext` of the GPU or `DeviceContext` of the NPU. Currently, the CPU, GPU, and NPU cannot be set at a time. - -### Configuring the Number of Threads - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L109) demonstrates how to configure the number of threads: - -```cpp -auto context = std::make_shared(); -if (context == nullptr) { - std::cerr << "New context failed while running." << std::endl; -} -// Configure the number of worker threads in the thread pool to 2, including the main thread. -context->thread_num_ = 2; -``` - -### Configuring the CPU Backend - -When the backend to be executed is the CPU, `device_list_[0]` is the `DeviceContext` of the CPU by default after `Context` is created. You can directly configure the `enable_float16_` and `cpu_bind_mode_` attributes in [CpuDeviceInfo](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#cpudeviceinfo). - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L102) demonstrates how to create a CPU backend, set the CPU core binding mode to large-core priority, and enable float16 inference: - -```cpp -auto context = std::make_shared(); -if (context == nullptr) { - std::cerr << "New context failed while running." << std::endl; -} -// CPU device context has default values. -auto &cpu_device_info = context->device_list_[0].device_info_.cpu_device_info_; -// The large core takes priority in thread and core binding methods. This parameter will work in the BindThread interface. For specific binding effect, see the "Run Graph" section. -cpu_device_info.cpu_bind_mode_ = HIGHER_CPU; -// Use float16 operator as priority. -cpu_device_info.enable_float16_ = true; -``` - -> Float16 takes effect only when the CPU is of the ARM v8.2 architecture. Other models and x86 platforms that are not supported are automatically rolled back to Float32. - -### Configuring the GPU Backend - -If the backend to be exed is heterogeneous inference based on CPUs and GPUs, you need to set [DeviceContext](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#devicecontext) for both CPUs and GPUs. After the configuration, GPU-based inference is preferentially used. [GpuDeviceInfo](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#gpudeviceinfo) contains the `enable_float16_` public attribute, which is used to enable Float16 inference. - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L120) demonstrates how to create the CPU and GPU heterogeneous inference backend and how to enable Float16 inference for the GPU. - -```cpp -auto context = std::make_shared(); -if (context == nullptr) { - std::cerr << "CreateSession failed while running." << std::endl; -} -// If GPU device context is set. The preferred backend is GPU, which means, if there is a GPU operator, it will run on the GPU first, otherwise it will run on the CPU. -DeviceContext gpu_device_ctx{DT_GPU, {false}}; -// GPU use float16 operator as priority. -gpu_device_ctx.device_info_.gpu_device_info_.enable_float16_ = true; -// The GPU device context needs to be push_back into device_list to work. -context->device_list_.push_back(gpu_device_ctx); -``` - -> Currently, the backend of GPU is based on OpenCL. GPUs of Mali and Adreno are supported. The OpenCL version is 2.0. -> -> The configuration is as follows: -> -> CL_TARGET_OPENCL_VERSION=200 -> -> CL_HPP_TARGET_OPENCL_VERSION=120 -> -> CL_HPP_MINIMUM_OPENCL_VERSION=120 - -### Configuring the NPU Backend - -When the backend to be exed is heterogeneous inference based on CPUs and GPUs, you need to set the CPU's and NPU's [DeviceContext](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#devicecontext). After the configuration, the NPU's inference is preferentially used. The [NpuDeviceInfo](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#npudeviceinfo) contains the public attribute `frequency_`, which is used to set the NPU's frequency. - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L137) shows how to create the CPU and NPU heterogeneous inference backend and set the NPU frequency to 3. It can be set to 1 (low power consumption), 2 (balanced), 3 (high performance), 4 (extreme performance) and other values will be changed to 3. - -```cpp -auto context = std::make_shared(); -if (context == nullptr) { - std::cerr << "CreateSession failed while running." << std::endl; -} -DeviceContext npu_device_ctx{DT_NPU}; -npu_device_ctx.device_info_.npu_device_info_.frequency_ = 3; -// The NPU device context needs to be push_back into device_list to work. -context->device_list_.push_back(npu_device_ctx); -``` - -## Creating a Session - -When MindSpore Lite is used for inference, [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) is the main entry for inference. You can use [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) to build and execute graphs. Use the [Context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#id2) created in the previous step to call the [CreateSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#createsession) method of the [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) to create the LiteSession. - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L275) demonstrates how to create a `LiteSession`: - -```cpp -// Use Context to create Session. -auto session = session::LiteSession::CreateSession(context.get()); -// After the LiteSession is created, the Context can be released. -... -if (session == nullptr) { - std::cerr << "CreateSession failed while running." << std::endl; -} -``` - -> The [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) instance returned by the function is a pointer that is created using `new`. If the instance is not required, you need to release it using `delete`. -> -> After the [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) is created, the [Context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#context) created in the previous step can be released. - -## Building a Graph - -Before executing a graph, call the [CompileGraph](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#compilegraph) API of [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) to build the graph and parse the [Model](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#model) instance loaded from the file for graph partition and operator selection and scheduling. This takes a long time. Therefore, it is recommended that with the [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) created each time, one graph be built. In this case, the inference will be performed for multiple times. - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L282) demonstrates how to call `CompileGraph` to build a graph. - -```cpp -// Assume we have created a LiteSession instance named session and a Model instance named model before. -auto ret = session->CompileGraph(model); -if (ret != RET_OK) { - std::cerr << "Compile failed while running." << std::endl; - // session and model need to be released by users manually. - ... -} -``` - -## Inputting Data - -Before executing a graph, obtain the input [MSTensor](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mstensor) of the model and copy the input data to the input [MSTensor](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mstensor) of the model using `memcpy`. In addition, you can use the [Size](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#size) method of [MSTensor](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mstensor) to obtain the size of the data to be filled in to the tensor, use the [data_type](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#data-type) method to obtain the data type of the tensor, and use the [MutableData](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mutabledata) method of [MSTensor](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mstensor) to obtain the writable pointer. - -MindSpore Lite provides two methods to obtain the input tensor of a model. - -1. Use the [GetInputsByTensorName](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#getinputsbytensorname) method to obtain the tensor connected to the input node from the model input tensor based on the name of the model input tensor. The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L169) demonstrates how to call `GetInputsByTensorName` to obtain the input tensor and fill in data. - - ```cpp - // Pre-processing of input data, convert input data format to NHWC. - ... - // Assume that the model has only one input tensor named 2031_2030_1_construct_wrapper:x. - auto in_tensor = session->GetInputsByTensorName("2031_2030_1_construct_wrapper:x"); - if (in_tensor == nullptr) { - std::cerr << "Input tensor is nullptr" << std::endl; - } - auto input_data = in_tensor->MutableData(); - if (input_data == nullptr) { - std::cerr << "MallocData for inTensor failed." << std::endl; - } - memcpy(in_data, input_buf, data_size); - // Users need to free input_buf. - ``` - -2. Use the [GetInputs](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#getinputs) method to directly obtain the vectors of all model input tensors. The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L150) demonstrates how to call `GetInputs` to obtain the input tensor and fill in data. - - ```cpp - // Pre-processing of input data, convert input data format to NHWC. - ... - // Assume we have created a LiteSession instance named session. - auto inputs = session->GetInputs(); - // Assume that the model has only one input tensor. - auto in_tensor = inputs.front(); - if (in_tensor == nullptr) { - std::cerr << "Input tensor is nullptr" << std::endl; - } - auto *in_data = in_tensor->MutableData(); - if (in_data == nullptr) { - std::cerr << "Data of in_tensor is nullptr" << std::endl; - } - memcpy(in_data, input_buf, data_size); - // Users need to free input_buf. - ``` - -> The data layout in the input tensor of the MindSpore Lite model must be `NHWC`. For more information about data pre-processing, see step 2 in [Writing On-Device Inference Code](https://www.mindspore.cn/tutorial/lite/en/master/quick_start/quick_start.html#writing-on-device-inference-code) in Android Application Development Based on JNI Interface to convert the input image into the Tensor format of the MindSpore model. -> -> [GetInputs](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#getinputs) and [GetInputsByTensorName](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#getinputsbyname) methods return vectors that do not need to be released by users. - -## Executing Inference - -After a MindSpore Lite session builds a graph, you can call the [RunGraph](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#rungraph) function of [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) for model inference. - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L347) demonstrates how to call `RunGraph` to perform inference. - -```cpp -auto ret = session->RunGraph(); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "RunGraph failed" << std::endl; -} -``` - -## Obtaining Output - -After performing inference, MindSpore Lite can obtain the inference result of the model. MindSpore Lite provides three methods to obtain the output [MSTensor](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mstensor) of a model. - -1. Use the [GetOutputsByNodeName](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#getoutputsbynodename) method to obtain the vector of the tensor connected to the model output [MSTensor](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mstensor) based on the name of the model output node. The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L184) demonstrates how to call `GetOutputsByNodeName` to obtain the output tensor. - - ```cpp - // Assume we have created a LiteSession instance named session before. - // Assume that model has an output node named Default/head-MobileNetV2Head/Softmax-op204. - auto output_vec = session->GetOutputsByNodeName("Default/head-MobileNetV2Head/Softmax-op204"); - // Assume that output node named Default/Sigmoid-op204 has only one output tensor. - auto out_tensor = output_vec.front(); - if (out_tensor == nullptr) { - std::cerr << "Output tensor is nullptr" << std::endl; - } - // Post-processing your result data. - ``` - -2. Use the [GetOutputByTensorName](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#getoutputbytensorname) method to obtain the corresponding model output [MSTensor](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mstensor) based on the name of the model output tensor. The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L212) demonstrates how to call `GetOutputsByTensorName` to obtain the output tensor. - - ```cpp - // Assume we have created a LiteSession instance named session. - // We can use GetOutputTensorNames method to get all name of output tensor of model which is in order. - auto tensor_names = session->GetOutputTensorNames(); - // Assume we have created a LiteSession instance named session before. - // Use output tensor name returned by GetOutputTensorNames as key - for (auto tensor_name : tensor_names) { - auto out_tensor = session->GetOutputByTensorName(tensor_name); - if (out_tensor == nullptr) { - std::cerr << "Output tensor is nullptr" << std::endl; - } - // Post-processing the result data. - } - ``` - -3. Use the [GetOutputs](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#getoutputs) method to directly obtain the names of all model output [MSTensor](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mstensor) and a map of the [MSTensor](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mstensor) pointer. The following [sample code](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L242) demonstrates how to call `GetOutputs` to obtain the output tensor. - - ```cpp - // Assume we have created a LiteSession instance named session. - auto out_tensors = session->GetOutputs(); - for (auto out_tensor : out_tensors) { - // Post-processing the result data. - } - ``` - -> The vector or map returned by the [GetOutputsByNodeName](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#getoutputsbynodename), [GetOutputByTensorName](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#getoutputbytensorname), and [GetOutputs](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#getoutputs) methods does not need to be released by the user. - -## Releasing Memory - -If the MindSpore Lite inference framework is not required, you need to release the created LiteSession and model. The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L361) demonstrates how to release the memory before the program ends. - -```cpp -// Delete model buffer. -// Assume that the variable of Model * is named model. -delete model; -// Delete session buffer. -// Assume that the variable of Session * is named session. -delete session; -``` - -## Advanced Usage - -### Optimizing the Memory Size - -If the memory is greatly limited, you can call the [Free](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#free) API to reduce the memory usage after the [Model](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#model) is compiled into the [CompileGraph](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#compilegraph) by the graph. Once the [Free](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#free) API of a [Model](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#model) is called, the [Model](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#model) cannot build graphs. - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L340) demonstrates how to call the `Free` API of `Model` to release `MetaGraph` to reduce the memory size. - -```cpp -// Compile graph. -auto ret = session->CompileGraph(model); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "Compile failed while running." << std::endl; -} - -// Note: when use model->Free(), the model can not be compiled again. -model->Free(); -``` - -### Core Binding Operations - -The built-in thread pool of MindSpore Lite supports core binding and unbinding. By calling the [BindThread](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#bindthread) API, you can bind working threads in the thread pool to specified CPU cores for performance analysis. The core binding operation is related to the context specified by the user when the [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) is created. The core binding operation sets the affinity between the thread and the CPU based on the core binding policy in the context. - -Note that core binding is an affinity operation and may not be bound to a specified CPU core. It may be affected by system scheduling. In addition, after the core binding, you need to perform the unbinding operation after the code is performed. - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L346) demonstrates how to bind the large core first when performing inference. - -```cpp -auto context = std::make_shared(); -if (context == nullptr) { - std::cerr << "New context failed while running." << std::endl; -} -// CPU device context has default values. -auto &cpu_device_info = context->device_list_[0].device_info_.cpu_device_info_; -// The large core takes priority in thread and core binding methods. This parameter will work in the BindThread -// interface. For specific binding effect, see the "Run Graph" section. -cpu_device_info.cpu_bind_mode_ = mindspore::lite::HIGHER_CPU; - -... - -// Assume we have created a LiteSession instance named session. -session->BindThread(true); -auto ret = session->RunGraph(); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "RunGraph failed" << std::endl; -} -session->BindThread(false); -``` - -> There are three options for core binding: HIGHER_CPU, MID_CPU, and NO_BIND. -> -> The rule for determining the core binding mode is based on the frequency of CPU cores instead of the CPU architecture. -> -> HIGHER_CPU: indicates that threads in the thread pool are preferentially bound to the core with the highest frequency. The first thread is bound to the core with the highest frequency, the second thread is bound to the core with the second highest frequency, and so on. -> -> MID_CPU: indicates that threads are bound to cores with the third or fourth highest frequency preferentially, which is determined based on experience. When there are no such cores, threads are bound to cores with the highest frequency. - -### Resizing the Input Dimension - -When MindSpore Lite is used for inference, if the input shape needs to be resized, you can call the [Resize](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#resize) API of [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) to reset the shape of the input tensor after a session is created and a graph is built. - -> Some networks do not support variable dimensions. As a result, an error message is displayed and the model exits unexpectedly. For example, the model contains the MatMul operator, one input tensor of the MatMul operator is the weight, and the other input tensor is the input. If a variable dimension API is called, the input tensor does not match the shape of the weight tensor. As a result, the inference fails. - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L368) demonstrates how to perform [Resize](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#resize) on the input tensor of MindSpore Lite: - -```cpp -// Assume we have created a LiteSession instance named session. -// Compile graph. -auto ret = session->CompileGraph(model); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "Compile failed while running." << std::endl; -} -... -auto inputs = session->GetInputs(); -std::vector resize_shape = {1, 128, 128, 3}; -// Assume the model has only one input,resize input shape to [1, 128, 128, 3] -std::vector> new_shapes; -new_shapes.push_back(resize_shape); -session->Resize(inputs, new_shapes); -``` - -### Parallel Sessions - -MindSpore Lite supports parallel inference for multiple [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession). The thread pool and memory pool of each [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) are independent. However, multiple threads cannot call the [RunGraph](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#rungraph) API of a single [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) at the same time. - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L463) demonstrates how to infer multiple [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) in parallel: - -```cpp -int RunSessionParallel(const char *model_path) { - size_t size = 0; - char *model_buf = ReadFile(model_path, &size); - if (model_buf == nullptr) { - std::cerr << "Read model file failed." << std::endl; - return -1; - } - // Load the .ms model. - auto model = mindspore::lite::Model::Import(model_buf, size); - delete[](model_buf); - if (model == nullptr) { - std::cerr << "Import model file failed." << std::endl; - return -1; - } - // Compile MindSpore Lite model. - auto session1 = CreateSessionAndCompileByModel(model); - if (session1 == nullptr) { - std::cerr << "Create session failed." << std::endl; - return -1; - } - - // Compile MindSpore Lite model. - auto session2 = CreateSessionAndCompileByModel(model); - if (session2 == nullptr) { - std::cerr << "Create session failed." << std::endl; - return -1; - } - // Note: when use model->Free(), the model can not be compiled again. - model->Free(); - - std::thread thread1([&]() { - GetInputsByTensorNameAndSetData(session1); - auto status = session1->RunGraph(); - if (status != 0) { - std::cerr << "Inference error " << status << std::endl; - return; - } - std::cout << "Session1 inference success" << std::endl; - }); - - std::thread thread2([&]() { - GetInputsByTensorNameAndSetData(session2); - auto status = session2->RunGraph(); - if (status != 0) { - std::cerr << "Inference error " << status << std::endl; - return; - } - std::cout << "Session2 inference success" << std::endl; - }); - - thread1.join(); - thread2.join(); - - // Get outputs data. - GetOutputsByNodeName(session1); - GetOutputsByNodeName(session2); - - // Delete model buffer. - delete model; - // Delete session buffer. - delete session1; - delete session2; - return 0; -} -``` - -MindSpore Lite does not support multi-thread parallel execution of inference for a single [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession). Otherwise, the following error information is displayed: - -```cpp -ERROR [mindspore/lite/src/lite_session.cc:297] RunGraph] 10 Not support multi-threading -``` - -### Sharing a Memory Pool - -If there are multiple sessions, you can configure the same `allocator` in [Context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#id2) to share the memory pool and reduce the memory size during running. The maximum memory size of the memory pool is `3 GB`, and the maximum memory size allocated each time is `2 GB`. - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L535) demonstrates how to share the memory pool between two [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession): - -```cpp -auto context1 = std::make_shared(); -if (context1 == nullptr) { - std::cerr << "New context failed while running." << std::endl; -} -auto session1 = mindspore::session::LiteSession::CreateSession(context1.get()); -if (session1 == nullptr) { - std::cerr << "CreateSession failed while running." << std::endl; -} -auto ret = session1->CompileGraph(model); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "Compile failed while running." << std::endl; -} -auto context2 = std::make_shared(); -if (context2 == nullptr) { - std::cerr << "New context failed while running." << std::endl; -} - -// Use the same allocator to share the memory pool. -context2->allocator = context1->allocator; - -auto session2 = mindspore::session::LiteSession::CreateSession(context2.get()); -if (session2 == nullptr) { - std::cerr << "CreateSession failed while running " << std::endl; -} -ret = session2->CompileGraph(model); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "Compile failed while running " << std::endl; -} -``` - -### Calling Back a Model During the Running Process - -MindSpore Lite can pass two [KernelCallBack](https://www.mindspore.cn/doc/api_cpp/en/master/mindspore.html#kernelcallback) function pointers to [RunGraph](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#rungraph) to call back a model for inference. Compared with common graph execution, callback execution can obtain additional information during the running process to help developers analyze performance and debug bugs. Additional information includes: - -- Name of the running node -- Input and output tensors before the current node is inferred -- Input and output tensors after the current node is inferred - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L633) demonstrates how to define two callback functions as the pre-callback pointer and post-callback pointer and pass them to the [RunGraph](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#rungraph) API for callback inference. - -```cpp -// Definition of callback function before forwarding operator. -auto before_call_back = [&](const std::vector &before_inputs, - const std::vector &before_outputs, - const mindspore::CallBackParam &call_param) { - std::cout << "Before forwarding " << call_param.node_name << " " << call_param.node_type << std::endl; - return true; -}; -// Definition of callback function after forwarding operator. -auto after_call_back = [&](const std::vector &after_inputs, - const std::vector &after_outputs, - const mindspore::CallBackParam &call_param) { - std::cout << "After forwarding " << call_param.node_name << " " << call_param.node_type << std::endl; - return true; -}; - -auto ret = session->RunGraph(before_call_back, after_call_back); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "Inference error " << ret << std::endl; -} -``` - -### Simplified CreateSession API Invocation Process - -Create a [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) by invoking the static method [CreateSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#createsession) of the [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) based on the created [Context](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#id2) and the read model buffer and buffer size. When this API is used to create a session, the model is loaded and the graph is built internally. You do not need to call the [Import](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#import) and [CompileGraph](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#compilegraph) APIs again. - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L425) demonstrates how to call the simplified CreateSession API to create a [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession). - -```cpp -auto context = std::make_shared(); -if (context == nullptr) { - std::cerr << "New context failed while running" << std::endl; -} - -// Use model buffer and context to create Session. -auto session = mindspore::session::LiteSession::CreateSession(model_buf, size, context); - -if (session == nullptr) { - std::cerr << "CreateSession failed while running" << std::endl; -} -``` - -### Viewing Logs - -If an exception occurs during inference, you can view logs to locate the fault. For the Android platform, use the `Logcat` command line to view the MindSpore Lite inference log information and use `MS_LITE` to filter the log information. - -```shell -logcat -s "MS_LITE" -``` - -### Obtaining the Version Number - -MindSpore Lite provides the [Version](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#version) method to obtain the version number, which is included in the `include/version.h` header file. You can call this method to obtain the version number of MindSpore Lite. - -The following sample code from [main.cc](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L712) demonstrates how to obtain the version number of MindSpore Lite: - -```cpp -#include "include/version.h" -std::string version = mindspore::lite::Version(); -``` diff --git a/tutorials/lite/source_en/use/runtime_java.md b/tutorials/lite/source_en/use/runtime_java.md deleted file mode 100644 index 3892ee0b0ed8cab16f59e93d7f8b8866119d85d0..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/runtime_java.md +++ /dev/null @@ -1,370 +0,0 @@ -# Using Java Interface to Perform Inference - -`Android` `Java` `Inference Application` `Model Loading` `Data Preparation` `Intermediate` `Expert` - - - -- [Using Java Interface to Perform Inference](#using-java-interface-to-perform-inference) - - [Overview](#overview) - - [Referencing the MindSpore Lite Java Library](#referencing-the-mindspore-lite-java-library) - - [Linux X86 Project Referencing the JAR Library](#linux-x86-project-referencing-the-jar-library) - - [Android Projects Referencing the AAR Library](#android-projects-referencing-the-aar-library) - - [Loading a Model](#loading-a-model) - - [Creating a Configuration Context](#creating-a-configuration-context) - - [Configuring the CPU Backend](#configuring-the-cpu-backend) - - [Configuring the GPU Backend](#configuring-the-gpu-backend) - - [Creating a Session](#creating-a-session) - - [Building a Graph](#building-a-graph) - - [Inputting Data](#inputting-data) - - [Executing Inference](#executing-inference) - - [Obtaining the Output](#obtaining-the-output) - - [Releasing the Memory](#releasing-the-memory) - - [Advanced Usage](#advanced-usage) - - [Optimizing the Memory Size](#optimizing-the-memory-size) - - [Core Binding Operations](#core-binding-operations) - - [Resizing the Input Dimension](#resizing-the-input-dimension) - - [Parallel Sessions](#parallel-sessions) - - [Viewing Logs](#viewing-logs) - - [Obtaining the Version Number](#obtaining-the-version-number) - - - - - -## Overview - -After the model is converted into a `.ms` model by using the MindSpore Lite model conversion tool, the inference process can be performed in Runtime. For details, see [Converting Models for Inference](https://www.mindspore.cn/tutorial/lite/en/master/use/converter_tool.html). This tutorial describes how to use the [Java API](https://www.mindspore.cn/doc/api_java/en/master/index.html) to perform inference. - -If MindSpore Lite is used in an Android project, you can use [C++ API](https://www.mindspore.cn/doc/api_cpp/en/master/index.html) or [Java API](https://www.mindspore.cn/doc/api_java/en/master/index.html) to run the inference framework. Compared with C++ APIs, Java APIs can be directly called in the Java class. Users do not need to implement the code at the JNI layer, which is more convenient. To run the MindSpore Lite inference framework, perform the following steps: - -1. Load the model: Read the `.ms` model converted by the model conversion tool introduced in [Converting Models for Inference](https://www.mindspore.cn/tutorial/lite/en/master/use/converter_tool.html) from the file system and import the model using the [loadModel](https://www.mindspore.cn/doc/api_java/en/master/model.html#loadmodel). -2. Create a configuration context: Create a configuration context [MSConfig](https://www.mindspore.cn/doc/api_java/en/master/msconfig.html#msconfig) to save some basic configuration parameters required by a session to guide graph build and execution, including `deviceType` (device type), `threadNum` (number of threads), `cpuBindMode` (CPU core binding mode), and `enable_float16` (whether to preferentially use the float16 operator). -3. Create a session: Create [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) and call the [init](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#init) method to configure the [MSConfig](https://www.mindspore.cn/doc/api_java/en/master/msconfig.html#msconfig) obtained in the previous step in the session. -4. Build a graph: Before building a graph, the [compileGraph](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#compilegraph) API of [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) needs to be called to build the graph, including graph partition and operator selection and scheduling. This takes a long time. Therefore, it is recommended that with [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) created each time, one graph be built. In this case, the inference will be performed for multiple times. -5. Input data: Before the graph is performed, data needs to be filled in to the `Input Tensor`. -6. Perform inference: Use the [runGraph](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#rungraph) of the [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) to perform model inference. -7. Obtain the output: After the graph execution is complete, you can obtain the inference result by `outputting the tensor`. -8. Release the memory: If the MindSpore Lite inference framework is not required, release the created [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) and [Model](https://www.mindspore.cn/doc/api_java/en/master/model.html#model). - -![img](../images/lite_runtime.png) - -> For details about the calling process of MindSpore Lite inference, see [Experience Java Simple Inference Demo](https://www.mindspore.cn/tutorial/lite/en/master/quick_start/quick_start_cpp.html). - -## Referencing the MindSpore Lite Java Library - -### Linux X86 Project Referencing the JAR Library - -When using `Maven` as the build tool, you can copy `mindspore-lite-java.jar` to the `lib` directory in the root directory and add the dependency of the JAR package to `pom.xml`. - -```xml - - - com.mindspore.lite - mindspore-lite-java - 1.0 - system - ${project.basedir}/lib/mindspore-lite-java.jar - - -``` - -> Add the paths of `libmindspore-lite.so` and `libminspore-lite-jni.so` to `java.library.path`. - -### Android Projects Referencing the AAR Library - -When `Gradle` is used as the build tool, move the `mindspore-lite-{version}.aar` file to the `libs` directory of the target module, and then add the local reference directory to `repositories` of `build.gradle` of the target module, add the AAR dependency to `dependencies` as follows: - -> Note that mindspore-lite-{version} is the AAR file name. Replace {version} with the corresponding version information. - -```groovy -repositories { - flatDir { - dirs 'libs' - } -} - -dependencies { - implementation fileTree(dir: "libs", include: ['*.aar']) -} -``` - -## Loading a Model - -Before performing model inference, MindSpore Lite needs to load the `.ms` model converted by the model conversion tool from the file system and parse the model. The [Model](https://www.mindspore.cn/doc/api_java/en/master/model.html#model) class of Java provides two [loadModel](https://www.mindspore.cn/doc/api_java/en/master/model.html#loadmodel) APIs to load models from `Assets` or other file paths. - -The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L217) reads the `mobilenetv2.ms` model file from `Assets` to load the model. - -```java -// Load the .ms model. -Model model = new Model(); -String modelPath = "mobilenetv2.ms"; -boolean ret = model.loadModel(this.getApplicationContext(), modelPath); -``` - -> Only the `AAR` library supports the API for loading model files from `Assert`. - -The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/quick_start_java/src/main/java/com/mindspore/lite/demo/Main.java#L128) reads the model file from the `modelPath` path to load the model. - -```java -Model model = new Model(); -boolean ret = model.loadModel(modelPath); -``` - -## Creating a Configuration Context - -Create the configuration context [MSConfig](https://www.mindspore.cn/doc/api_java/en/master/msconfig.html#msconfig) to save some basic configuration parameters required by the session to guide graph build and execution. - -MindSpore Lite supports heterogeneous inference. The preferred backend for inference is specified by `deviceType` of [MSConfig](https://www.mindspore.cn/doc/api_java/en/master/msconfig.html#msconfig). Currently, CPU and GPU are supported. During graph build, operator selection and scheduling are performed based on the preferred backend. - -MindSpore Lite has a built-in thread pool shared by processes. During inference, `threadNum` is used to specify the maximum number of threads in the thread pool. The default value is 2. - -MindSpore Lite supports inference in float16 operator mode. After `enable_float16` is set to `true`, the float16 operator is preferentially used. - -### Configuring the CPU Backend - -If the backend to be performed is a CPU, you need to configure `DeviceType.DT_CPU` in [init](https://www.mindspore.cn/doc/api_java/en/master/msconfig.html#init) after `MSConfig` is created. In addition, the CPU supports the setting of the core binding mode and whether to preferentially use the float16 operator. - -The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L59) demonstrates how to create a CPU backend, set the CPU core binding mode to large-core priority, and enable float16 inference: - -```java -MSConfig msConfig = new MSConfig(); -boolean ret = msConfig.init(DeviceType.DT_CPU, 2, CpuBindMode.HIGHER_CPU, true); -``` - -> Float16 takes effect only when the CPU is of the ARM v8.2 architecture. Other models and x86 platforms that are not supported are automatically rolled back to float32. - -### Configuring the GPU Backend - -If the backend to be performed is heterogeneous inference based on CPU and GPU, you need to configure `DeviceType.DT_GPU` in [init](https://www.mindspore.cn/doc/api_java/en/master/msconfig.html#init) after `MSConfig` is created. After the configuration, GPU-based inference is preferentially used. In addition, if enable_float16 is set to true, both the GPU and CPU preferentially use the float16 operator. - -The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L69) demonstrates how to create the CPU and GPU heterogeneous inference backend and how to enable float16 inference for the GPU. - -```java -MSConfig msConfig = new MSConfig(); -boolean ret = msConfig.init(DeviceType.DT_GPU, 2, CpuBindMode.MID_CPU, true); -``` - -> Currently, the GPU can run only on Android mobile devices. Therefore, only the `AAR` library can be run. - -## Creating a Session - -[LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) is the main entry for inference. You can use [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) to build and perform graphs. Create [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) and call the [init](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#init) method to configure the [MSConfig](https://www.mindspore.cn/doc/api_java/en/master/msconfig.html#msconfig) obtained in the previous step in the session. After the [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) is initialized, the [MSConfig](https://www.mindspore.cn/doc/api_java/en/master/msconfig.html#msconfig) can perform the release operation. - -The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L86) demonstrates how to create a `LiteSession`: - -```java -LiteSession session = new LiteSession(); -boolean ret = session.init(msConfig); -msConfig.free(); -``` - -## Building a Graph - -Before building a graph, the [compileGraph](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#compilegraph) API of [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) needs to be called to build the graph, including graph partition and operator selection and scheduling. This takes a long time. Therefore, it is recommended that with the [LiteSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#litesession) created each time, one graph be built. In this case, the inference will be performed for multiple times. - -The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L87) demonstrates how to call `CompileGraph` to build a graph. - -```java -boolean ret = session.compileGraph(model); -``` - -## Inputting Data - -MindSpore Lite Java APIs provide the `getInputsByTensorName` and `getInputs` methods to obtain the input tensor. Both the `byte[]` and `ByteBuffer` data types are supported. You can set the data of the input tensor by calling [setData](https://www.mindspore.cn/doc/api_java/en/master/mstensor.html#setdata). - -1. Use the [getInputsByTensorName](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#getinputsbytensorname) method to obtain the tensor connected to the input node from the model input tensor based on the name of the model input tensor. The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L151) demonstrates how to call the `getInputsByTensorName` function to obtain the input tensor and fill in data. - - ```java - MSTensor inputTensor = session.getInputsByTensorName("2031_2030_1_construct_wrapper:x"); - // Set Input Data. - inputTensor.setData(inputData); - ``` - -2. Use the [getInputs](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#getinputs) method to directly obtain the vectors of all model input tensors. The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L113) demonstrates how to call `getInputs` to obtain the input tensors and fill in the data. - - ```java - List inputs = session.getInputs(); - MSTensor inputTensor = inputs.get(0); - // Set Input Data. - inputTensor.setData(inputData); - ``` - -> The data layout in the input tensor of the MindSpore Lite model must be `NHWC`. For more information about data pre-processing, see [Implementing an Image Segmentation Application](https://www.mindspore.cn/tutorial/lite/en/master/quick_start/image_segmentation.html#id10). - -## Executing Inference - -After a MindSpore Lite session builds a graph, it can call the [runGraph](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#rungraph) function of [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) to perform model inference. - -The following sample code demonstrates how to call `runGraph` to perform inference. - -```java -// Run graph to infer results. -boolean ret = session.runGraph(); -``` - -## Obtaining the Output - -After performing inference, MindSpore Lite can output a tensor to obtain the inference result. MindSpore Lite provides three methods to obtain the output [MSTensor](https://www.mindspore.cn/doc/api_java/en/master/mstensor.html) of a model and supports the [getByteData](https://www.mindspore.cn/doc/api_java/en/master/mstensor.html#getbytedata), [getFloatData](https://www.mindspore.cn/doc/api_java/en/master/mstensor.html#getfloatdata), [getIntData](https://www.mindspore.cn/doc/api_java/en/master/mstensor.html#getintdata) and [getLongData](https://www.mindspore.cn/doc/api_java/en/master/mstensor.html#getlongdata) methods to obtain the output data. - -1. Use the [getOutputMapByTensor](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#getoutputmapbytensor) method to directly obtain the names of all model output [MSTensor](https://www.mindspore.cn/doc/api_java/en/master/mstensor.html#mstensor) and a map of the [MSTensor](https://www.mindspore.cn/doc/api_java/en/master/mstensor.html#mstensor) pointer. The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L191) demonstrates how to call `getOutputMapByTensor` to obtain the output tensor. - - ```java - Map outTensors = session.getOutputMapByTensor(); - - Iterator> entries = outTensors.entrySet().iterator(); - while (entries.hasNext()) { - Map.Entry entry = entries.next(); - // Apply infer results. - ... - } - ``` - -2. Use the [getOutputByNodeName](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#getoutputsbynodename) method to obtain the vector of the tensor connected to the model output [MSTensor](https://www.mindspore.cn/doc/api_java/en/master/mstensor.html#mstensor) based on the name of the model output node. The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L175) demonstrates how to call `getOutputByTensorName` to obtain the output tensor. - - ```java - MSTensor outTensor = session.getOutputsByNodeName("Default/head-MobileNetV2Head/Softmax-op204"); - // Apply infer results. - ... - ``` - -3. Use the [getOutputByTensorName](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#getoutputbytensorname) method to obtain the model output [MSTensor](https://www.mindspore.cn/doc/api_java/en/master/mstensor.html#mstensor) based on the name of the model output tensor. The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L182) demonstrates how to call `getOutputByTensorName` to obtain the output tensor. - - ```java - MSTensor outTensor = session.getOutputByTensorName("Default/head-MobileNetV2Head/Softmax-op204"); - // Apply infer results. - ... - ``` - -## Releasing the Memory - -If the MindSpore Lite inference framework is not required, you need to release the created LiteSession and Model. The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L204) demonstrates how to release the memory before the program ends. - -```java -session.free(); -model.free(); -``` - -## Advanced Usage - -### Optimizing the Memory Size - -If there is a large limit on the running memory, call the [freeBuffer](https://www.mindspore.cn/doc/api_java/en/master/model.html#freebuffer) function of [Model](https://www.mindspore.cn/doc/api_java/en/master/model.html#model) after the graph build is complete to release the MetaGraph in the MindSpore Lite Model to reduce the running memory. Once the [freeBuffer](https://www.mindspore.cn/doc/api_java/en/master/model.html#freebuffer) of a [Model](https://www.mindspore.cn/doc/api_java/en/master/model.html#model) is called, the [Model](https://www.mindspore.cn/doc/api_java/en/master/model.html#model) cannot be built again. - -The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L241) demonstrates how to call the `freeBuffer` interface of `Model` to release `MetaGraph` to reduce the memory size during running. - -```java -// Compile graph. -ret = session.compileGraph(model); -... -// Note: when use model.freeBuffer(), the model can not be compiled. -model.freeBuffer(); -``` - -### Core Binding Operations - -The built-in thread pool of MindSpore Lite supports core binding and unbinding. By calling the [BindThread](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#bindthread) API, you can bind working threads in the thread pool to specified CPU cores for performance analysis. The core binding operation is related to the context specified by the user when the [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html) is created. The core binding operation sets the affinity between the thread and the CPU based on the core binding policy in the context. - -Note that core binding is an affinity operation and may not be bound to a specified CPU core. It may be affected by system scheduling. In addition, after the core binding, you need to perform the unbinding operation after the code is performed. - -The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L164) demonstrates how to bind to cores with the highest frequency first when performing inference. - -```java -boolean ret = msConfig.init(DeviceType.DT_CPU, 2, CpuBindMode.HIGHER_CPU, true); -... -session.bindThread(true); -// Run Inference. -ret = session.runGraph(); -session.bindThread(false); -``` - -> There are three options for core binding: HIGHER_CPU, MID_CPU, and NO_BIND. -> -> The rule for determining the core binding mode is based on the frequency of CPU cores instead of the CPU architecture. -> -> HIGHER_CPU: indicates that threads in the thread pool are preferentially bound to the core with the highest frequency. The first thread is bound to the core with the highest frequency, the second thread is bound to the core with the second highest frequency, and so on. -> -> Mediumcores are defined based on experience. By default, mediumcores are with the third and fourth highest frequency. Mediumcore first indicates that threads are bound to mediumcores preferentially. When there are no available mediumcores, threads are bound to small cores. - -### Resizing the Input Dimension - -When using MindSpore Lite for inference, if you need to resize the input shape, you can call the [resize](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#resize) API of [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html) to reset the shape of the input tensor after creating a session and building a graph. - -> Some networks do not support variable dimensions. As a result, an error message is displayed and the model exits unexpectedly. For example, the model contains the MatMul operator, one input tensor of the MatMul operator is the weight, and the other input tensor is the input. If a variable dimension API is called, the input tensor does not match the shape of the weight tensor. As a result, the inference fails. - -The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L164) demonstrates how to perform [resize](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#resize) on the input tensor of MindSpore Lite: - -```java -List inputs = session.getInputs(); -int[][] dims = {{1, 300, 300, 3}}; -bool ret = session.resize(inputs, dims); -``` - -### Parallel Sessions - -MindSpore Lite supports parallel inference of multiple [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html). The thread pool and memory pool of each [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) are independent. However, multiple threads cannot call the [runGraph](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#rungraph) API of a single [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html#litesession) at the same time. - -The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L220) demonstrates how to infer multiple [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html) in parallel: - -```java -session1 = createLiteSession(false); -if (session1 != null) { - session1Compile = true; -} else { - Toast.makeText(getApplicationContext(), "session1 Compile Failed.", - Toast.LENGTH_SHORT).show(); -} -session2 = createLiteSession(true); -if (session2 != null) { - session2Compile = true; -} else { - Toast.makeText(getApplicationContext(), "session2 Compile Failed.", - Toast.LENGTH_SHORT).show(); -} -... -if (session1Finish && session1Compile) { - new Thread(new Runnable() { - @Override - public void run() { - session1Finish = false; - runInference(session1); - session1Finish = true; - } - }).start(); -} - -if (session2Finish && session2Compile) { - new Thread(new Runnable() { - @Override - public void run() { - session2Finish = false; - runInference(session2); - session2Finish = true; - } - }).start(); -} -``` - -MindSpore Lite does not support multi-thread parallel execution of inference for a single [LiteSession](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html). Otherwise, the following error information is displayed: - -```bash -ERROR [mindspore/lite/src/lite_session.cc:297] RunGraph] 10 Not support multi-threading -``` - -### Viewing Logs - -If an exception occurs during inference, you can view logs to locate the fault. For the Android platform, use the `Logcat` command line to view the MindSpore Lite inference log information and use `MS_LITE` to filter the log information. - -```shell -logcat -s "MS_LITE" -``` - -### Obtaining the Version Number - -MindSpore Lite provides the [Version](https://www.mindspore.cn/doc/api_java/en/master/lite_session.html) method to obtain the version number, which is included in the `com.mindspore.lite.Version` header file. You can call this method to obtain the version number of MindSpore Lite. - -The following sample code from [MainActivity.java](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L215) demonstrates how to obtain the version number of MindSpore Lite: - -```java -import com.mindspore.lite.Version; -String version = Version.version(); -``` diff --git a/tutorials/lite/source_en/use/runtime_train.rst b/tutorials/lite/source_en/use/runtime_train.rst deleted file mode 100644 index 6225beb4ab8877ab67422965ef9699b44df1be4c..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/runtime_train.rst +++ /dev/null @@ -1,7 +0,0 @@ -Executing Model Training -================================= - -.. toctree:: - :maxdepth: 1 - - runtime_train_cpp \ No newline at end of file diff --git a/tutorials/lite/source_en/use/runtime_train_cpp.md b/tutorials/lite/source_en/use/runtime_train_cpp.md deleted file mode 100644 index 780eac2425f95bacba01cb293f0a19690ea89632..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_en/use/runtime_train_cpp.md +++ /dev/null @@ -1,557 +0,0 @@ -# Using C++ Interface to Perform Training - -`Linux` `Android` `C++` `Model Training` `Model Loading` `Data Preparation` `Intermediate` `Expert` - - - -- [Using C++ Interface to Perform Training](#using-c-interface-to-perform-training) - - [Overview](#overview) - - [Session Creation](#session-creation) - - [Reading Models](#reading-models) - - [Creating Contexts](#creating-contexts) - - [Creating Sessions](#creating-sessions) - - [Creating TrainLoop](#creating-trainloop) - - [Example](#example-1) - - [Data Processing](#data-processing) - - [Data Reading Pipeline](#data-reading-pipeline) - - [Data Preprocessing Pipeline](#data-preprocessing-pipeline) - - [Example](#example-2) - - [Execute Training](#execute-training) - - [Training](#training) - - [Evaluating](#evaluating) - - [Others](#others) - - [Session Mode Switching](#session-mode-switching) - - [Obtaining Input Tensors](#obtaining-input-tensors) - - [Obtaining Output Tensors](#obtaining-output-tensors) - - [Execute Training or Evaluating](#execute-training-or-evaluating) - - [Execute Session](#execute-session) - - [Execute Callback](#execute-callback) - - [Saving Model](#saving-model) - - - - - -## Overview - -The principal procedures of lite training is as follows: - -1. Design the network and export the `MindIR` model file by using the cloud side APIs. -2. Transfer the `MindIR` file to .ms model file. -3. Train, evaluate and save `ms` model files. - -> The model structure is saved in the transferred `ms` model file which will be load to the device platform for training. - -A sequence diagram explaining the train sequence is shown in the image below: - -![img](../images/side_train_sequence.png) - -In this diagram the drawn objects represents: - -- `OS`: The operator system of user. -- `User`: The operations of the user. -- `MindData`: Load data from the storage and perform pre-processing (e.g., reading an image, rescaling it to a given size and converting it to bitmap) during the model training. -- `ToD`: The training mechanism of MindSpore Lite. -- `MS Lite`: A software module provided by MindSpore Lite, that provides flatbuffer DeSerialization into a network of nodes and interconnecting tensors. It performs graph compilation and calls the graph executor for train and inference. -- `CreateTrainSession`: Create the object of the class `TrainSession`. -- `CreateTrainLoop`: Create the object of the class `TrainLoop`. -- `InitDataset`: The user self-defined functions which can load and process dataset. -- `train_loop`: The object of the class `TrainLoop`. -- `Train`: The member function of the class `TrainLoop`, which receives the vector of off-the-shelf or user self-defined callbacks objects. -- `Callbacks`: Execute the off-the-shelf or user self-defined callback functions. - -## Session Creation - -[TrainSession](https://www.mindspore.cn/doc/api_cpp/en/master/session.html#trainsession) is the main entrance of the MindSpore Lite framework. We can compile and execute graph models through `TrainSession` class. - -### Reading Models - -A Model file is flatbuffer-serialized file which was converted using the [MindSpore Model Converter Tool](https://www.mindspore.cn/tutorial/lite/en/master/use/converter_tool.html). These files have a `.ms` extension. Before model training and/or inference, the model needs to be loaded from the file system and parsed. Related operations are mainly implemented in the [`TrainModel`](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#trainmodel) class which holds the model data such as the network structure, tensors sizes, weights data and operators attributes. - -> In MindSpore Lite the user is not allowed to access the training model object, since it is being used by `TrainSession` during training. All interactions with training model object including instantiation, compilation and deletion are handled within `TrainSession`. - -### Creating Contexts - -[`Context`](https://www.mindspore.cn/doc/api_cpp/en/master/lite.html#context) is a MindSpore Lite Object which contains basic configuration parameters required by the sessions to guide graph compilation and execution. It allows to define the device to run the model, e.g., CPU or GPU, the number of threads used for training and inference and the memory allocation scheme. -Currently, only single threaded CPU device is supported by `TrainSession`. - -Once the `TrainSession` is created with the `Context` object, it is no longer needed and can be deleted. - -### Creating Sessions - -There are two methods to create a session: - -- The first API allows MindSpore Lite to access the filesystem and read the model from a file, parse it, compile it and produce a valid TrainSession object. The `Context` described above is passed to the TrainSession as a basic configuration. The static function has the following signature `TrainSession *TrainSession::CreateSession(const string& filename, const Context *context, bool mode)`, where `filename` is the model's file name, context is the `Context` and mode is the initial training mode of the session (Train/Eval). On Success, a fully compiled and ready to use `TrainSession` instance is returned by the function, this instance must be freed using `delete` on the termination of the process. - -- The second API is similar to the first but uses an in-memory copy of the flatbuffer in order to create the `TrainSession`. The static function has the following signature `TrainSession *TrainSession::CreateSession(const char* model_buf, size_t size, const Context *context, bool train_mode = false)`, where `model_buf` is a pointer to the in-memory buffer and `size` is its length. On Success, a fully compiled and ready-to-use `TrainSession` instance is returned by the function. If needed, the buf pointer can be freed immediately. The returned `TrainSession` instance must be freed using `delete` when no longer needed. - -### Creating TrainLoop - -User can create the object of the class `TrainLoop` by using the function `CreateTrainLoop` to call MindData APIs. We recommend the function `CreateTrainLoop`. The member function `CreateTrainLoop` of the class `TrainLoop` whose prototype is as follows: - - `TrainLoop *CreateTrainLoop(session::TrainSession *train_session, lite::Context *context, int batch_size = -1)` - -The following codes show ho to create a training session based on the multi-threads CPU by using the class `TrainLoop`. - -```cpp -#include "include/train_session.h" -#include "include/context.h" - -int CreateSession() { - mindspore::lite::Context context; - context.device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND; - context.device_list_[0].device_info_.cpu_device_info_.enable_float16_ = false; - context.device_list_[0].device_type_ = mindspore::lite::DT_CPU; - context.thread_num_ = 2; - // Create Session - session_ = mindspore::session::TrainSession::CreateSession(ms_file_, &context); - MS_ASSERT(nullptr != session_); - loop_ = mindspore::session::TrainLoop::CreateTrainLoop(session_, &context); - acc_metrics_ = std::shared_ptr(new AccuracyMetrics); - loop_->Init({acc_metrics_.get()}); - return 0; -} -``` - -> Refer [Train a LeNet](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/train_lenet/src/net_runner.cc) for more details. - -## Data Processing - -### Data Reading Pipeline - -The class `Dataset` and its extension class (e.g., `MnistDataset` and `AlbumDataset`) have provided abundant data procssing API. Users only need to specify the dataset path and set the data processing operations for the model training by using the shared pointers from the related API. Reading pipeline will decode and load dataset during model training. Refer [Dataset](https://www.mindspore.cn/doc/api_cpp/en/master/dataset.html) for more detials. - -### Data Preprocessing Pipeline - -The class `TensorTransform` has provided abundant data preprocssing API and has the same function as the cloud side, (e.g., Dimension reshaping, data type casting and one-hot coding). The users only need to create the objects of the extension classes of `TensorTransform` and transfer them to the function `Map`. Refer [Vision](https://www.mindspore.cn/doc/api_cpp/en/master/vision.html) for more detials. - -### Example - -The following codes show how to read and process dataset by using the class `Dataset` and `TensorTransform`: - -```cpp -#include "include/datasets.h" -#include "include/context.h" - -int DataSetPipeline() { - train_ds_ = Mnist(data_dir_ + "/train", "all"); - TypeCast typecast_f("float32"); - Resize resize({h_, w_}); - train_ds_ = train_ds_->Map({&resize, &typecast_f}, {"image"}); - TypeCast typecast("int32"); - train_ds_ = train_ds_->Map({&typecast}, {"label"}); - train_ds_ = train_ds_->Shuffle(2); - train_ds_ = train_ds_->Batch(batch_size_, true); - if (verbose_) { - std::cout << "DatasetSize is " << train_ds_->GetDatasetSize() << std::endl; - } - if (train_ds_->GetDatasetSize() == 0) { - std::cout << "No relevant data was found in " << data_dir_ << std::endl; - MS_ASSERT(train_ds_->GetDatasetSize() != 0); - } - return 0; -} -``` - -## Execute Training - -MindSpore has provided some off-the-shelf callback classes for users (e.g., `accuracy_metrics`, `accuracy_monitor`, `ckpt_saver`, `classification_train_accuracy`, `loss_monitor` and `metrics`). The function `Train` and `Eval` of the class `TrainLoop` can set the model to the training or evaluation mode separately, specify the methods of the data processing and monitor the session status. - -### Training - -Create the objects of the off-the-shelf functions and call the Train function of the class TrainLoop to training: - -```cpp -int Train() { - struct mindspore::lite::StepLRLambda step_lr_lambda(1, 0.8); - mindspore::lite::LRScheduler step_lr_sched(mindspore::lite::StepLRLambda, static_cast(&step_lr_lambda), 1); - mindspore::lite::LossMonitor lm(100); - mindspore::lite::ClassificationTrainAccuracyMonitor am(1); - mindspore::lite::CkptSaver cs(1000, std::string("lenet")); - Rescaler rescale(255.0); - loop_->Train(epochs_, train_ds_.get(), std::vector{&rescale, &lm, &cs, &am, &step_lr_sched}); - return 0; -} -``` - -### Evaluating - -Also call the `Eval` function of the class `TrainLoop` to evaluate model. - -```cpp -float Eval() { - test_ds_ = Mnist(data_dir_ + "/test", "all"); - TypeCast typecast_f("float32"); - Resize resize({h_, w_}); - test_ds_ = test_ds_->Map({&resize, &typecast_f}, {"image"}); - TypeCast typecast("int32"); - test_ds_ = test_ds_->Map({&typecast}, {"label"}); - test_ds_ = test_ds_->Batch(batch_size_, true); - Rescaler rescale(255.0); - loop_->Eval(test_ds_.get(), std::vector{&rescale}); - std::cout << "Eval Accuracy is " << acc_metrics_->Eval() << std::endl; - return 0.0; -} -``` - -> With TrainSessions, a network can be used for both inference and training. These two modes differ in several aspects: -> -> - The input of the network: Running inference requires only the data, while running training requires both data and labels. -> - The output of the network: Running inference returns the predicted values in the output, while running in training mode returns the loss. -> - In training mode, the weights of the layers are updated in each Run, while in inference mode they are static. -> - Some layers behave differently in inference vs. training mode, e.g., updating the accumulated batch mean and variance in Batch Normalization layers. - -## Others - -### Session Mode Switching - -The functions `Train` and `Eval` in the class `TrainSession` are called by the functions `Train` and `Eval` in the class `TrainLoop` . User can switch session mode by calling the two functions directly, the prototypes are as follows: - -```cpp -/// \brief Set model to train mode -/// \return STATUS as an error code of compiling graph, STATUS is defined in errorcode.h -virtual int Train() = 0; - -/// \brief Set model to eval mode -/// \return STATUS as an error code of compiling graph, STATUS is defined in errorcode.h -virtual int Eval() = 0; -``` - -The following sample code shows how to set a `TrainSession` object to train mode. - -```cpp -// Assuming session is a valid instance of TrainSession -auto ret = session->Train(); -if (ret != RET_OK) { - std::cerr << "Could not set session to train mode" << std::endl; - return -1; -} -auto ret = session->Eval(); -if (ret != RET_OK) { - std::cerr << "Could not set session to eval mode" << std::endl; - return -1; -} -``` - -### Obtaining Input Tensors - -Before graph execution, whether it is during training or inference, the input data must be filled-in into the model input tensors. -MindSpore Lite provides the following methods to obtain model input tensors: - -1. Use the `GetInputsByTensorName` method to obtain model input tensors that are connected to the model input node based on the tensor name. - - ```cpp - /// \brief Get MindSpore input Tensors of model by the tensor name. - /// - /// \param[in] tensor_name Define tensor name. - /// - /// \return MindSpore Lite MSTensor. - virtual mindspore::tensor::MSTensor *GetInputsByTensorName(const std::string &tensor_name) const = 0; - ``` - -2. Use the `GetInputs` method to directly obtain the vectors of all model input tensors. - - ```cpp - /// \brief Get input MindSpore Lite MSTensors of model. - /// - /// \return The vector of MindSpore Lite MSTensor. - virtual std::vector GetInputs() const = 0; - ``` - - If the model requires more than one input tensor (this is certainly the case during training, where both data and labels serve as inputs of the network) it is the user's responsibility to know the inputs order or their tensorName. This can be obtained from the Python model. - Alternatively, one can deduce this information from the sizes of the input tensors. - -3. Copying Data - - After model input tensors are obtained, the data must be copied into the tensors. The following methods allows to access the size of the data, it's shape, the number of elements, the data type and the writable pointer. See also detailed description in the [MSTensor](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mstensor) API documentation. - - ```cpp - /// \brief Get byte size of data in MSTensor. - /// - /// \return Byte size of data in MSTensor. - virtual size_t Size() const = 0; - - /// \brief Get shape of the MindSpore Lite MSTensor. - /// - /// \return A vector of int as the shape of the MindSpore Lite MSTensor. - virtual std::vector shape() const = 0; - - /// \brief Get number of element in MSTensor. - /// - /// \return Number of element in MSTensor. - virtual int ElementsNum() const = 0; - - /// \brief Get data type of the MindSpore Lite MSTensor. - /// - /// \note TypeId is defined in mindspore/mindspore/core/ir/dtype/type_id.h. Only number types in TypeId enum are - /// suitable for MSTensor. - /// - /// \return MindSpore Lite TypeId of the MindSpore Lite MSTensor. - virtual TypeId data_type() const = 0; - - /// \brief Get the pointer of data in MSTensor. - /// - /// \note The data pointer can be used to both write and read data in MSTensor. - /// - /// \return The pointer points to data in MSTensor. - virtual void *MutableData() const = 0; - ``` - -4. Example - - The following sample code shows how to obtain the entire graph input `MSTensor` from `LiteSession` and enter the model input data to `MSTensor`. - - ```cpp - // Assuming session is a valid instance of TrainSession - auto inputs = session->GetInputs(); - - // Assuming the model has two input tensors, the first is for data and the second for labels - int data_index = 0; - int label_index = 1; - - if (inputs.size() != 2) { - std::cerr << "Unexpected amount of input tensors. Expected 2, model requires " << inputs.size() << std::endl; - return -1; - } - - // Assuming batch_size and data_size variables holds the Batch size and the size of a single data tensor, respectively: - // And assuming sparse labels are used - if ((inputs.at(data_index)->Size() != batch_size*data_size) || - (inputs.at(label_index)->ElementsNum() != batch_size)) { - std::cerr << "Input data size does not match model input" << std::endl; - return -1; - } - - // Assuming data_ptr is the pointer to a batch of data tensors - // and iassuming label_ptr is a pointer to a batch of label indices (obtained by the DataLoder) - auto *in_data = inputs.at(data_index)->MutableData(); - auto *in_labels = inputs.at(label_index)->MutableData(); - if ((in_data == nullptr)|| (in_labels == nullptr)) { - std::cerr << "Model's input tensor is nullptr" << std::endl; - return -1; - } - - memcpy(in_data, data_ptr, inputs.at(data_index)->Size()); - memcpy(in_labels, label_ptr, inputs.at(label_index)->Size()); - // After filling the input tensors the data_ptr and label_ptr may be freed - // The input tensors themselves are managed by MindSpore Lite and users are not allowed to access them or delete them - ``` - -Note: - -- The data layout in the model input tensors of MindSpore Lite must be NHWC (bathc size, height, weight and channel). -- The Tensors returned by `GetInputs` and `GetInputsByTensorName` methods shuold not be released by users. - -### Obtaining Output Tensors - -After each execution of the graph, the user might want to read the model's outputs, whether it is the loss in the case of training mode, or the predicted output in the case of evaluation mode. - -MindSpore Lite provides the following methods to obtain the model's output `MSTensor`. - -1. Use the `GetOutputsByNodeName` method to obtain the output tensors that belong to a certain node: - - ```cpp - /// \brief Get output MindSpore Lite MSTensors of model by node name. - /// - /// \param[in] node_name Define node name. - /// - /// \return The vector of MindSpore Lite MSTensor. - virtual std::vector GetOutputsByNodeName(const std::string &node_name) const = 0; - ``` - - The following sample code shows how to obtain the output `MSTensor` from `LiteSession` using the `GetOutputsByNodeName` method. - - ```cpp - // Assume that session is a vlaid TrainSession instance - // Assume that model has a output node named output_node_name_0. - auto output_vec = session->GetOutputsByNodeName("output_node_name_0"); - // Assume that output node named output_node_name_0 has only one output tensor. - auto out_tensor = output_vec.front(); - if (out_tensor == nullptr) { - std::cerr << "Output tensor is nullptr" << std::endl; - return -1; - } - ``` - -2. Use the `GetOutputByTensorName` method to obtain an output tensor, based on the tensor name. - - ```cpp - /// \brief Get output MindSpore Lite MSTensors of model by tensor name. - /// - /// \param[in] tensor_name Define tensor name. - /// - /// \return Pointer of MindSpore Lite MSTensor. - virtual mindspore::tensor::MSTensor *GetOutputByTensorName(const std::string &tensor_name) const = 0; - ``` - - The following sample code shows how to obtain the output `MSTensor` from `LiteSession` using the `GetOutputByTensorName` method. - - ```cpp - // Assume that session is a vlaid TrainSession instance - // We can use GetOutputTensorNames method to get the names of all the output tensors of the model - auto tensor_names = session->GetOutputTensorNames(); - // Use output tensor name returned by GetOutputTensorNames as key - for (auto tensor_name : tensor_names) { - auto out_tensor = session->GetOutputByTensorName(tensor_name); - if (out_tensor == nullptr) { - std::cerr << "Output tensor is nullptr" << std::endl; - return -1; - } - } - ``` - -3. Use the `GetOutputs` method to obtain all the output tensors, ordered by their tensor name: - - ```cpp - /// \brief Get output MindSpore Lite MSTensors of model mapped by the tensor name. - /// - /// \return The map of output tensor name and MindSpore Lite MSTensor. - virtual std::unordered_map GetOutputs() const = 0; - ``` - - After model output tensors are obtained, you need to enter data into the tensors. Use the `Size` method of `MSTensor` to obtain the size of the data to be entered into tensors, use the `data_type` method to obtain the data type of `MSTensor`, and use the `MutableData` method of `MSTensor` to obtain the writable pointer. - - ```cpp - /// \brief Get byte size of data in MSTensor. - /// - /// \return Byte size of data in MSTensor. - virtual size_t Size() const = 0; - - /// \brief Get data type of the MindSpore Lite MSTensor. - /// - /// \note TypeId is defined in mindspore/mindspore/core/ir/dtype/type_id.h. Only number types in TypeId enum are - /// suitable for MSTensor. - /// - /// \return MindSpore Lite TypeId of the MindSpore Lite MSTensor. - virtual TypeId data_type() const = 0; - - /// \brief Get the pointer of data in MSTensor. - /// - /// \note The data pointer can be used to both write and read data in MSTensor. - /// - /// \return The pointer points to data in MSTensor. - virtual void *MutableData() const = 0; - ``` - - The following sample code shows how to obtain the output `MSTensor` from `LiteSession` using the `GetOutputs` method and print the first ten data or all data records of each output `MSTensor`. - - ```cpp - // Assume that session is a vlaid TrainSession object - auto output_map = session->GetOutputs(); - // Assume that the model has only one output node. - auto out_node_iter = output_map.begin(); - std::string name = out_node_iter->first; - // Assume that the unique output node has only one output tensor. - auto out_tensor = out_node_iter->second; - if (out_tensor == nullptr) { - std::cerr << "Output tensor is nullptr" << std::endl; - return -1; - } - // Assume that the data format of output data is float 32. - if (out_tensor->data_type() != mindspore::TypeId::kNumberTypeFloat32) { - std::cerr << "Output of lenet should in float32" << std::endl; - return -1; - } - auto *out_data = reinterpret_cast(out_tensor->MutableData()); - if (out_data == nullptr) { - std::cerr << "Data of out_tensor is nullptr" << std::endl; - return -1; - } - // Print the first 10 float data or all output data of the output tensor. - std::cout << "Output data: "; - for (size_t i = 0; i < 10 && i < out_tensor->ElementsNum(); i++) { - std::cout << " " << out_data[i]; - } - std::cout << std::endl; - // The elements in outputs do not need to be free by users, because outputs are managed by the MindSpore Lite. - ``` - -Note that the vectors or map returned by the `GetOutputsByNodeName`, `GetOutputByTensorName` and `GetOutputs` methods do not need to be released by users. - -### Execute Training or Evaluating - -#### Execute Session - -Whether a `TrainSession` object is in the training mode or in eval mode, the way to make it execute, i.e., to run the data through the graph, is to call the `RunGraph` method. - -```cpp -/// \brief Run session with callbacks. -/// -/// \param[in] before Define a call_back_function to be called before running each node. -/// \param[in] after Define a call_back_function called after running each node. -/// -/// \note RunGraph should be called after CompileGraph. -/// -/// \return STATUS as an error code of running graph, STATUS is defined in errorcode.h. -virtual int RunGraph(const KernelCallBack &before = nullptr, const KernelCallBack &after = nullptr) = 0; -``` - -Prior to run each graph, the user must make sure that the data is properly loaded to the input tensors. - -#### Execute Callback - -MindSpore Lite framework allows the user to set two callback functions that will be called before and after running each node. Such functions can assist the developer in tracing the network, debugging it and measuring how long it took run each node. The callback parameters are as follows: - -- The current input tensors of the running node -- The current output tensors of the running node -- Name and type of the running node - -While the node name and type will be the same before and after running the node, the output tensors will differ between the two callbacks invocations. -For some operators, also the input tesnors will vary. - -```cpp -/// \brief CallBackParam defines input arguments for callback function. -struct CallBackParam { - std::string node_name; /**< node name argument */ - std::string node_type; /**< node type argument */ -}; - -/// \brief KernelCallBack defined the function pointer for callBack. -using KernelCallBack = std::function inputs, std::vector outputs, const CallBackParam &opInfo)>; - -``` - -The following sample code demonstrates how to define two callback functions, the first will be called before running each layer, and the second after running it. - -```cpp -// Assuming session is a valid instance of TrainSession and that data was assigned to the input tensors - -// Definition of a callback function that will be called before forwarding operator -bool before_callback(const std::vector &inputs, const std::vector &outputs, - const mindspore::CallBackParam &call_param) { - std::cout << call_param.node_name << std::endl; - std::cout << "Before forwarding: input size is " << inputs.size() << std::endl; - return true; -}; - -// Definition of callback function that will be called after forwarding operator -bool after_callback(const std::vector &inputs, const std::vector &outputs, - const mindspore::CallBackParam &call_param) { - std::cout << "After forwarding: output size is " << outputs.size() << std::endl; - return true; -}; - -// Hand over the callback functions to RunGraph when performing the training or inference -ret = session_->RunGraph(before_callback, after_callback); -if (ret != RET_OK) { - MS_LOG(ERROR) << "Run graph failed."; - return RET_ERROR; -} -``` - -### Saving Model - -The function `CkptSaver` calls the function `SaveToFile` actually. The user can also call `SaveToFile` directly to save the trained model. - -```cpp - /// \brief Save the trained model into a flatbuffer file - /// - /// \param[in] filename Filename to save flatbuffer to - /// - /// \return 0 on success or -1 in case of error - virtual int SaveToFile(const std::string &filename) const = 0; -``` - -You can load the saved model to do re-training or inference. - -> Please use [benchmark_train](https://www.mindspore.cn/tutorial/lite/en/master/use/benchmark_train_tool.html) to measure the performance and accuarcy of the trained models. diff --git a/tutorials/lite/source_zh_cn/_static/css/bootstrap.min.css b/tutorials/lite/source_zh_cn/_static/css/bootstrap.min.css deleted file mode 100644 index 35722284f98a1189566e9200862b02aac7cbec50..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/_static/css/bootstrap.min.css +++ /dev/null @@ -1,6 +0,0 @@ -/*! - * Bootstrap v3.3.7 (http://getbootstrap.com) - * Copyright 2011-2016 Twitter, Inc. - * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) - *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */html{overflow-y: hidden!important;font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}dfn{font-style:italic}h1{margin:.67em 0;font-size:2em}mark{color:#000;background:#ff0}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{height:0;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace,monospace;font-size:1em}button,input,optgroup,select,textarea{margin:0;font:inherit;color:inherit}button{overflow:visible}button,select{text-transform:none}button,html input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}input{line-height:normal}input[type=checkbox],input[type=radio]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}fieldset{padding:.35em .625em .75em;margin:0 2px;border:1px solid silver}legend{padding:0;border:0}textarea{overflow:auto}optgroup{font-weight:700}table{border-spacing:0;border-collapse:collapse}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{*,:after,:before{color:#000!important;text-shadow:none!important;background:0 0!important;-webkit-box-shadow:none!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}blockquote,pre{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px solid #000}.table{border-collapse:collapse!important}.table td,.table th{background-color:#fff!important}.table-bordered td,.table-bordered th{border:1px solid #ddd!important}}@font-face{font-family:'Glyphicons Halflings';src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix) format('embedded-opentype'),url(../fonts/glyphicons-halflings-regular.woff2) format('woff2'),url(../fonts/glyphicons-halflings-regular.woff) format('woff'),url(../fonts/glyphicons-halflings-regular.ttf) format('truetype'),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular) format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons Halflings';font-style:normal;font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-eur:before,.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.glyphicon-cd:before{content:"\e201"}.glyphicon-save-file:before{content:"\e202"}.glyphicon-open-file:before{content:"\e203"}.glyphicon-level-up:before{content:"\e204"}.glyphicon-copy:before{content:"\e205"}.glyphicon-paste:before{content:"\e206"}.glyphicon-alert:before{content:"\e209"}.glyphicon-equalizer:before{content:"\e210"}.glyphicon-king:before{content:"\e211"}.glyphicon-queen:before{content:"\e212"}.glyphicon-pawn:before{content:"\e213"}.glyphicon-bishop:before{content:"\e214"}.glyphicon-knight:before{content:"\e215"}.glyphicon-baby-formula:before{content:"\e216"}.glyphicon-tent:before{content:"\26fa"}.glyphicon-blackboard:before{content:"\e218"}.glyphicon-bed:before{content:"\e219"}.glyphicon-apple:before{content:"\f8ff"}.glyphicon-erase:before{content:"\e221"}.glyphicon-hourglass:before{content:"\231b"}.glyphicon-lamp:before{content:"\e223"}.glyphicon-duplicate:before{content:"\e224"}.glyphicon-piggy-bank:before{content:"\e225"}.glyphicon-scissors:before{content:"\e226"}.glyphicon-bitcoin:before{content:"\e227"}.glyphicon-btc:before{content:"\e227"}.glyphicon-xbt:before{content:"\e227"}.glyphicon-yen:before{content:"\00a5"}.glyphicon-jpy:before{content:"\00a5"}.glyphicon-ruble:before{content:"\20bd"}.glyphicon-rub:before{content:"\20bd"}.glyphicon-scale:before{content:"\e230"}.glyphicon-ice-lolly:before{content:"\e231"}.glyphicon-ice-lolly-tasted:before{content:"\e232"}.glyphicon-education:before{content:"\e233"}.glyphicon-option-horizontal:before{content:"\e234"}.glyphicon-option-vertical:before{content:"\e235"}.glyphicon-menu-hamburger:before{content:"\e236"}.glyphicon-modal-window:before{content:"\e237"}.glyphicon-oil:before{content:"\e238"}.glyphicon-grain:before{content:"\e239"}.glyphicon-sunglasses:before{content:"\e240"}.glyphicon-text-size:before{content:"\e241"}.glyphicon-text-color:before{content:"\e242"}.glyphicon-text-background:before{content:"\e243"}.glyphicon-object-align-top:before{content:"\e244"}.glyphicon-object-align-bottom:before{content:"\e245"}.glyphicon-object-align-horizontal:before{content:"\e246"}.glyphicon-object-align-left:before{content:"\e247"}.glyphicon-object-align-vertical:before{content:"\e248"}.glyphicon-object-align-right:before{content:"\e249"}.glyphicon-triangle-right:before{content:"\e250"}.glyphicon-triangle-left:before{content:"\e251"}.glyphicon-triangle-bottom:before{content:"\e252"}.glyphicon-triangle-top:before{content:"\e253"}.glyphicon-console:before{content:"\e254"}.glyphicon-superscript:before{content:"\e255"}.glyphicon-subscript:before{content:"\e256"}.glyphicon-menu-left:before{content:"\e257"}.glyphicon-menu-right:before{content:"\e258"}.glyphicon-menu-down:before{content:"\e259"}.glyphicon-menu-up:before{content:"\e260"}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}:after,:before{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:10px;-webkit-tap-highlight-color:rgba(0,0,0,0)}body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857143;color:#333;background-color:#fff}button,input,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#337ab7;text-decoration:none}a:focus,a:hover{color:#23527c;text-decoration:underline}a:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}figure{margin:0}img{vertical-align:middle}.carousel-inner>.item>a>img,.carousel-inner>.item>img,.img-responsive,.thumbnail a>img,.thumbnail>img{display:block;max-width:100%;height:auto}.img-rounded{border-radius:6px}.img-thumbnail{display:inline-block;max-width:100%;height:auto;padding:4px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px solid #eee}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}[role=button]{cursor:pointer}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-weight:400;line-height:1;color:#777}.h1,.h2,.h3,h1,h2,h3{margin-top:20px;margin-bottom:10px}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small{font-size:65%}.h4,.h5,.h6,h4,h5,h6{margin-top:10px;margin-bottom:10px}.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-size:75%}.h1,h1{font-size:36px}.h2,h2{font-size:30px}.h3,h3{font-size:24px}.h4,h4{font-size:18px}.h5,h5{font-size:14px}.h6,h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:300;line-height:1.4}@media (min-width:768px){.lead{font-size:21px}}.small,small{font-size:85%}.mark,mark{padding:.2em;background-color:#fcf8e3}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-nowrap{white-space:nowrap}.text-lowercase{text-transform:lowercase}.text-uppercase{text-transform:uppercase}.text-capitalize{text-transform:capitalize}.text-muted{color:#777}.text-primary{color:#337ab7}a.text-primary:focus,a.text-primary:hover{color:#286090}.text-success{color:#3c763d}a.text-success:focus,a.text-success:hover{color:#2b542c}.text-info{color:#31708f}a.text-info:focus,a.text-info:hover{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:focus,a.text-warning:hover{color:#66512c}.text-danger{color:#a94442}a.text-danger:focus,a.text-danger:hover{color:#843534}.bg-primary{color:#fff;background-color:#337ab7}a.bg-primary:focus,a.bg-primary:hover{background-color:#286090}.bg-success{background-color:#dff0d8}a.bg-success:focus,a.bg-success:hover{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:focus,a.bg-info:hover{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:focus,a.bg-warning:hover{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:focus,a.bg-danger:hover{background-color:#e4b9b9}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #eee}ol,ul{margin-top:0;margin-bottom:10px}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;margin-left:-5px;list-style:none}.list-inline>li{display:inline-block;padding-right:5px;padding-left:5px}dl{margin-top:0;margin-bottom:20px}dd,dt{line-height:1.42857143}dt{font-weight:700}dd{margin-left:0}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}}abbr[data-original-title],abbr[title]{cursor:help;border-bottom:1px dotted #777}.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #eee}blockquote ol:last-child,blockquote p:last-child,blockquote ul:last-child{margin-bottom:0}blockquote .small,blockquote footer,blockquote small{display:block;font-size:80%;line-height:1.42857143;color:#777}blockquote .small:before,blockquote footer:before,blockquote small:before{content:'\2014 \00A0'}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;text-align:right;border-right:5px solid #eee;border-left:0}.blockquote-reverse .small:before,.blockquote-reverse footer:before,.blockquote-reverse small:before,blockquote.pull-right .small:before,blockquote.pull-right footer:before,blockquote.pull-right small:before{content:''}.blockquote-reverse .small:after,.blockquote-reverse footer:after,.blockquote-reverse small:after,blockquote.pull-right .small:after,blockquote.pull-right footer:after,blockquote.pull-right small:after{content:'\00A0 \2014'}address{margin-bottom:20px;font-style:normal;line-height:1.42857143}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,"Courier New",monospace}code{padding:2px 4px;font-size:90%;color:#c7254e;background-color:#f9f2f4;border-radius:4px}kbd{padding:2px 4px;font-size:90%;color:#fff;background-color:#333;border-radius:3px;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.25);box-shadow:inset 0 -1px 0 rgba(0,0,0,.25)}kbd kbd{padding:0;font-size:100%;font-weight:700;-webkit-box-shadow:none;box-shadow:none}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:1.42857143;color:#333;word-break:break-all;word-wrap:break-word;background-color:#f5f5f5;border:1px solid #ccc;border-radius:4px}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;background-color:transparent;border-radius:0}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}@media (min-width:768px){.container{width:750px}}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.container-fluid{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}.row{margin-right:-15px;margin-left:-15px}.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{position:relative;min-height:1px;padding-right:15px;padding-left:15px}.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{float:left}.col-xs-12{width:100%}.col-xs-11{width:91.66666667%}.col-xs-10{width:83.33333333%}.col-xs-9{width:75%}.col-xs-8{width:66.66666667%}.col-xs-7{width:58.33333333%}.col-xs-6{width:50%}.col-xs-5{width:41.66666667%}.col-xs-4{width:33.33333333%}.col-xs-3{width:25%}.col-xs-2{width:16.66666667%}.col-xs-1{width:8.33333333%}.col-xs-pull-12{right:100%}.col-xs-pull-11{right:91.66666667%}.col-xs-pull-10{right:83.33333333%}.col-xs-pull-9{right:75%}.col-xs-pull-8{right:66.66666667%}.col-xs-pull-7{right:58.33333333%}.col-xs-pull-6{right:50%}.col-xs-pull-5{right:41.66666667%}.col-xs-pull-4{right:33.33333333%}.col-xs-pull-3{right:25%}.col-xs-pull-2{right:16.66666667%}.col-xs-pull-1{right:8.33333333%}.col-xs-pull-0{right:auto}.col-xs-push-12{left:100%}.col-xs-push-11{left:91.66666667%}.col-xs-push-10{left:83.33333333%}.col-xs-push-9{left:75%}.col-xs-push-8{left:66.66666667%}.col-xs-push-7{left:58.33333333%}.col-xs-push-6{left:50%}.col-xs-push-5{left:41.66666667%}.col-xs-push-4{left:33.33333333%}.col-xs-push-3{left:25%}.col-xs-push-2{left:16.66666667%}.col-xs-push-1{left:8.33333333%}.col-xs-push-0{left:auto}.col-xs-offset-12{margin-left:100%}.col-xs-offset-11{margin-left:91.66666667%}.col-xs-offset-10{margin-left:83.33333333%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-8{margin-left:66.66666667%}.col-xs-offset-7{margin-left:58.33333333%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-5{margin-left:41.66666667%}.col-xs-offset-4{margin-left:33.33333333%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-2{margin-left:16.66666667%}.col-xs-offset-1{margin-left:8.33333333%}.col-xs-offset-0{margin-left:0}@media (min-width:768px){.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9{float:left}.col-sm-12{width:100%}.col-sm-11{width:91.66666667%}.col-sm-10{width:83.33333333%}.col-sm-9{width:75%}.col-sm-8{width:66.66666667%}.col-sm-7{width:58.33333333%}.col-sm-6{width:50%}.col-sm-5{width:41.66666667%}.col-sm-4{width:33.33333333%}.col-sm-3{width:25%}.col-sm-2{width:16.66666667%}.col-sm-1{width:8.33333333%}.col-sm-pull-12{right:100%}.col-sm-pull-11{right:91.66666667%}.col-sm-pull-10{right:83.33333333%}.col-sm-pull-9{right:75%}.col-sm-pull-8{right:66.66666667%}.col-sm-pull-7{right:58.33333333%}.col-sm-pull-6{right:50%}.col-sm-pull-5{right:41.66666667%}.col-sm-pull-4{right:33.33333333%}.col-sm-pull-3{right:25%}.col-sm-pull-2{right:16.66666667%}.col-sm-pull-1{right:8.33333333%}.col-sm-pull-0{right:auto}.col-sm-push-12{left:100%}.col-sm-push-11{left:91.66666667%}.col-sm-push-10{left:83.33333333%}.col-sm-push-9{left:75%}.col-sm-push-8{left:66.66666667%}.col-sm-push-7{left:58.33333333%}.col-sm-push-6{left:50%}.col-sm-push-5{left:41.66666667%}.col-sm-push-4{left:33.33333333%}.col-sm-push-3{left:25%}.col-sm-push-2{left:16.66666667%}.col-sm-push-1{left:8.33333333%}.col-sm-push-0{left:auto}.col-sm-offset-12{margin-left:100%}.col-sm-offset-11{margin-left:91.66666667%}.col-sm-offset-10{margin-left:83.33333333%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-8{margin-left:66.66666667%}.col-sm-offset-7{margin-left:58.33333333%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-5{margin-left:41.66666667%}.col-sm-offset-4{margin-left:33.33333333%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-2{margin-left:16.66666667%}.col-sm-offset-1{margin-left:8.33333333%}.col-sm-offset-0{margin-left:0}}@media (min-width:992px){.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9{float:left}.col-md-12{width:100%}.col-md-11{width:91.66666667%}.col-md-10{width:83.33333333%}.col-md-9{width:75%}.col-md-8{width:66.66666667%}.col-md-7{width:58.33333333%}.col-md-6{width:50%}.col-md-5{width:41.66666667%}.col-md-4{width:33.33333333%}.col-md-3{width:25%}.col-md-2{width:16.66666667%}.col-md-1{width:8.33333333%}.col-md-pull-12{right:100%}.col-md-pull-11{right:91.66666667%}.col-md-pull-10{right:83.33333333%}.col-md-pull-9{right:75%}.col-md-pull-8{right:66.66666667%}.col-md-pull-7{right:58.33333333%}.col-md-pull-6{right:50%}.col-md-pull-5{right:41.66666667%}.col-md-pull-4{right:33.33333333%}.col-md-pull-3{right:25%}.col-md-pull-2{right:16.66666667%}.col-md-pull-1{right:8.33333333%}.col-md-pull-0{right:auto}.col-md-push-12{left:100%}.col-md-push-11{left:91.66666667%}.col-md-push-10{left:83.33333333%}.col-md-push-9{left:75%}.col-md-push-8{left:66.66666667%}.col-md-push-7{left:58.33333333%}.col-md-push-6{left:50%}.col-md-push-5{left:41.66666667%}.col-md-push-4{left:33.33333333%}.col-md-push-3{left:25%}.col-md-push-2{left:16.66666667%}.col-md-push-1{left:8.33333333%}.col-md-push-0{left:auto}.col-md-offset-12{margin-left:100%}.col-md-offset-11{margin-left:91.66666667%}.col-md-offset-10{margin-left:83.33333333%}.col-md-offset-9{margin-left:75%}.col-md-offset-8{margin-left:66.66666667%}.col-md-offset-7{margin-left:58.33333333%}.col-md-offset-6{margin-left:50%}.col-md-offset-5{margin-left:41.66666667%}.col-md-offset-4{margin-left:33.33333333%}.col-md-offset-3{margin-left:25%}.col-md-offset-2{margin-left:16.66666667%}.col-md-offset-1{margin-left:8.33333333%}.col-md-offset-0{margin-left:0}}@media (min-width:1200px){.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9{float:left}.col-lg-12{width:100%}.col-lg-11{width:91.66666667%}.col-lg-10{width:83.33333333%}.col-lg-9{width:75%}.col-lg-8{width:66.66666667%}.col-lg-7{width:58.33333333%}.col-lg-6{width:50%}.col-lg-5{width:41.66666667%}.col-lg-4{width:33.33333333%}.col-lg-3{width:25%}.col-lg-2{width:16.66666667%}.col-lg-1{width:8.33333333%}.col-lg-pull-12{right:100%}.col-lg-pull-11{right:91.66666667%}.col-lg-pull-10{right:83.33333333%}.col-lg-pull-9{right:75%}.col-lg-pull-8{right:66.66666667%}.col-lg-pull-7{right:58.33333333%}.col-lg-pull-6{right:50%}.col-lg-pull-5{right:41.66666667%}.col-lg-pull-4{right:33.33333333%}.col-lg-pull-3{right:25%}.col-lg-pull-2{right:16.66666667%}.col-lg-pull-1{right:8.33333333%}.col-lg-pull-0{right:auto}.col-lg-push-12{left:100%}.col-lg-push-11{left:91.66666667%}.col-lg-push-10{left:83.33333333%}.col-lg-push-9{left:75%}.col-lg-push-8{left:66.66666667%}.col-lg-push-7{left:58.33333333%}.col-lg-push-6{left:50%}.col-lg-push-5{left:41.66666667%}.col-lg-push-4{left:33.33333333%}.col-lg-push-3{left:25%}.col-lg-push-2{left:16.66666667%}.col-lg-push-1{left:8.33333333%}.col-lg-push-0{left:auto}.col-lg-offset-12{margin-left:100%}.col-lg-offset-11{margin-left:91.66666667%}.col-lg-offset-10{margin-left:83.33333333%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-8{margin-left:66.66666667%}.col-lg-offset-7{margin-left:58.33333333%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-5{margin-left:41.66666667%}.col-lg-offset-4{margin-left:33.33333333%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-2{margin-left:16.66666667%}.col-lg-offset-1{margin-left:8.33333333%}.col-lg-offset-0{margin-left:0}}table{background-color:transparent}caption{padding-top:8px;padding-bottom:8px;color:#777;text-align:left}th{text-align:left}.table{width:100%;max-width:100%;margin-bottom:20px}.table>tbody>tr>td,.table>tbody>tr>th,.table>tfoot>tr>td,.table>tfoot>tr>th,.table>thead>tr>td,.table>thead>tr>th{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>td,.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>td,.table>thead:first-child>tr:first-child>th{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>tbody>tr>td,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>td,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>thead>tr>th{padding:5px}.table-bordered{border:1px solid #ddd}.table-bordered>tbody>tr>td,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>td,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border:1px solid #ddd}.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover{background-color:#f5f5f5}table col[class*=col-]{position:static;display:table-column;float:none}table td[class*=col-],table th[class*=col-]{position:static;display:table-cell;float:none}.table>tbody>tr.active>td,.table>tbody>tr.active>th,.table>tbody>tr>td.active,.table>tbody>tr>th.active,.table>tfoot>tr.active>td,.table>tfoot>tr.active>th,.table>tfoot>tr>td.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>thead>tr.active>th,.table>thead>tr>td.active,.table>thead>tr>th.active{background-color:#f5f5f5}.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr.active:hover>th,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover{background-color:#e8e8e8}.table>tbody>tr.success>td,.table>tbody>tr.success>th,.table>tbody>tr>td.success,.table>tbody>tr>th.success,.table>tfoot>tr.success>td,.table>tfoot>tr.success>th,.table>tfoot>tr>td.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>thead>tr.success>th,.table>thead>tr>td.success,.table>thead>tr>th.success{background-color:#dff0d8}.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr.success:hover>th,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover{background-color:#d0e9c6}.table>tbody>tr.info>td,.table>tbody>tr.info>th,.table>tbody>tr>td.info,.table>tbody>tr>th.info,.table>tfoot>tr.info>td,.table>tfoot>tr.info>th,.table>tfoot>tr>td.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>thead>tr.info>th,.table>thead>tr>td.info,.table>thead>tr>th.info{background-color:#d9edf7}.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr.info:hover>th,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover{background-color:#c4e3f3}.table>tbody>tr.warning>td,.table>tbody>tr.warning>th,.table>tbody>tr>td.warning,.table>tbody>tr>th.warning,.table>tfoot>tr.warning>td,.table>tfoot>tr.warning>th,.table>tfoot>tr>td.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>thead>tr.warning>th,.table>thead>tr>td.warning,.table>thead>tr>th.warning{background-color:#fcf8e3}.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr.warning:hover>th,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover{background-color:#faf2cc}.table>tbody>tr.danger>td,.table>tbody>tr.danger>th,.table>tbody>tr>td.danger,.table>tbody>tr>th.danger,.table>tfoot>tr.danger>td,.table>tfoot>tr.danger>th,.table>tfoot>tr>td.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>thead>tr.danger>th,.table>thead>tr>td.danger,.table>thead>tr>th.danger{background-color:#f2dede}.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr.danger:hover>th,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover{background-color:#ebcccc}.table-responsive{min-height:.01%;overflow-x:auto}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>td,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>thead>tr>th{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}}fieldset{min-width:0;padding:0;margin:0;border:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:inherit;color:#333;border:0;border-bottom:1px solid #e5e5e5}label{display:inline-block;max-width:100%;margin-bottom:5px;font-weight:700}input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=checkbox],input[type=radio]{margin:4px 0 0;margin-top:1px\9;line-height:normal}input[type=file]{display:block}input[type=range]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type=file]:focus,input[type=checkbox]:focus,input[type=radio]:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}output{display:block;padding-top:7px;font-size:14px;line-height:1.42857143;color:#555}.form-control{display:block;width:100%;height:34px;padding:6px 12px;font-size:14px;line-height:1.42857143;color:#555;background-color:#fff;background-image:none;border:1px solid #ccc;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075);-webkit-transition:border-color ease-in-out .15s,-webkit-box-shadow ease-in-out .15s;-o-transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6);box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.form-control::-ms-expand{background-color:transparent;border:0}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{background-color:#eee;opacity:1}.form-control[disabled],fieldset[disabled] .form-control{cursor:not-allowed}textarea.form-control{height:auto}input[type=search]{-webkit-appearance:none}@media screen and (-webkit-min-device-pixel-ratio:0){input[type=date].form-control,input[type=time].form-control,input[type=datetime-local].form-control,input[type=month].form-control{line-height:34px}.input-group-sm input[type=date],.input-group-sm input[type=time],.input-group-sm input[type=datetime-local],.input-group-sm input[type=month],input[type=date].input-sm,input[type=time].input-sm,input[type=datetime-local].input-sm,input[type=month].input-sm{line-height:30px}.input-group-lg input[type=date],.input-group-lg input[type=time],.input-group-lg input[type=datetime-local],.input-group-lg input[type=month],input[type=date].input-lg,input[type=time].input-lg,input[type=datetime-local].input-lg,input[type=month].input-lg{line-height:46px}}.form-group{margin-bottom:15px}.checkbox,.radio{position:relative;display:block;margin-top:10px;margin-bottom:10px}.checkbox label,.radio label{min-height:20px;padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.checkbox input[type=checkbox],.checkbox-inline input[type=checkbox],.radio input[type=radio],.radio-inline input[type=radio]{position:absolute;margin-top:4px\9;margin-left:-20px}.checkbox+.checkbox,.radio+.radio{margin-top:-5px}.checkbox-inline,.radio-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;font-weight:400;vertical-align:middle;cursor:pointer}.checkbox-inline+.checkbox-inline,.radio-inline+.radio-inline{margin-top:0;margin-left:10px}fieldset[disabled] input[type=checkbox],fieldset[disabled] input[type=radio],input[type=checkbox].disabled,input[type=checkbox][disabled],input[type=radio].disabled,input[type=radio][disabled]{cursor:not-allowed}.checkbox-inline.disabled,.radio-inline.disabled,fieldset[disabled] .checkbox-inline,fieldset[disabled] .radio-inline{cursor:not-allowed}.checkbox.disabled label,.radio.disabled label,fieldset[disabled] .checkbox label,fieldset[disabled] .radio label{cursor:not-allowed}.form-control-static{min-height:34px;padding-top:7px;padding-bottom:7px;margin-bottom:0}.form-control-static.input-lg,.form-control-static.input-sm{padding-right:0;padding-left:0}.input-sm{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-sm{height:30px;line-height:30px}select[multiple].input-sm,textarea.input-sm{height:auto}.form-group-sm .form-control{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.form-group-sm select.form-control{height:30px;line-height:30px}.form-group-sm select[multiple].form-control,.form-group-sm textarea.form-control{height:auto}.form-group-sm .form-control-static{height:30px;min-height:32px;padding:6px 10px;font-size:12px;line-height:1.5}.input-lg{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-lg{height:46px;line-height:46px}select[multiple].input-lg,textarea.input-lg{height:auto}.form-group-lg .form-control{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.form-group-lg select.form-control{height:46px;line-height:46px}.form-group-lg select[multiple].form-control,.form-group-lg textarea.form-control{height:auto}.form-group-lg .form-control-static{height:46px;min-height:38px;padding:11px 16px;font-size:18px;line-height:1.3333333}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.form-control-feedback{position:absolute;top:0;right:0;z-index:2;display:block;width:34px;height:34px;line-height:34px;text-align:center;pointer-events:none}.form-group-lg .form-control+.form-control-feedback,.input-group-lg+.form-control-feedback,.input-lg+.form-control-feedback{width:46px;height:46px;line-height:46px}.form-group-sm .form-control+.form-control-feedback,.input-group-sm+.form-control-feedback,.input-sm+.form-control-feedback{width:30px;height:30px;line-height:30px}.has-success .checkbox,.has-success .checkbox-inline,.has-success .control-label,.has-success .help-block,.has-success .radio,.has-success .radio-inline,.has-success.checkbox label,.has-success.checkbox-inline label,.has-success.radio label,.has-success.radio-inline label{color:#3c763d}.has-success .form-control{border-color:#3c763d;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-success .form-control:focus{border-color:#2b542c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;background-color:#dff0d8;border-color:#3c763d}.has-success .form-control-feedback{color:#3c763d}.has-warning .checkbox,.has-warning .checkbox-inline,.has-warning .control-label,.has-warning .help-block,.has-warning .radio,.has-warning .radio-inline,.has-warning.checkbox label,.has-warning.checkbox-inline label,.has-warning.radio label,.has-warning.radio-inline label{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-warning .form-control:focus{border-color:#66512c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;background-color:#fcf8e3;border-color:#8a6d3b}.has-warning .form-control-feedback{color:#8a6d3b}.has-error .checkbox,.has-error .checkbox-inline,.has-error .control-label,.has-error .help-block,.has-error .radio,.has-error .radio-inline,.has-error.checkbox label,.has-error.checkbox-inline label,.has-error.radio label,.has-error.radio-inline label{color:#a94442}.has-error .form-control{border-color:#a94442;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-error .form-control:focus{border-color:#843534;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;background-color:#f2dede;border-color:#a94442}.has-error .form-control-feedback{color:#a94442}.has-feedback label~.form-control-feedback{top:25px}.has-feedback label.sr-only~.form-control-feedback{top:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#737373}@media (min-width:768px){.form-inline .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-static{display:inline-block}.form-inline .input-group{display:inline-table;vertical-align:middle}.form-inline .input-group .form-control,.form-inline .input-group .input-group-addon,.form-inline .input-group .input-group-btn{width:auto}.form-inline .input-group>.form-control{width:100%}.form-inline .control-label{margin-bottom:0;vertical-align:middle}.form-inline .checkbox,.form-inline .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.form-inline .checkbox label,.form-inline .radio label{padding-left:0}.form-inline .checkbox input[type=checkbox],.form-inline .radio input[type=radio]{position:relative;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}}.form-horizontal .checkbox,.form-horizontal .checkbox-inline,.form-horizontal .radio,.form-horizontal .radio-inline{padding-top:7px;margin-top:0;margin-bottom:0}.form-horizontal .checkbox,.form-horizontal .radio{min-height:27px}.form-horizontal .form-group{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.form-horizontal .control-label{padding-top:7px;margin-bottom:0;text-align:right}}.form-horizontal .has-feedback .form-control-feedback{right:15px}@media (min-width:768px){.form-horizontal .form-group-lg .control-label{padding-top:11px;font-size:18px}}@media (min-width:768px){.form-horizontal .form-group-sm .control-label{padding-top:6px;font-size:12px}}.btn{display:inline-block;padding:6px 12px;margin-bottom:0;font-size:14px;font-weight:400;line-height:1.42857143;text-align:center;white-space:nowrap;vertical-align:middle;-ms-touch-action:manipulation;touch-action:manipulation;cursor:pointer;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;background-image:none;border:1px solid transparent;border-radius:4px}.btn.active.focus,.btn.active:focus,.btn.focus,.btn:active.focus,.btn:active:focus,.btn:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn.focus,.btn:focus,.btn:hover{color:#333;text-decoration:none}.btn.active,.btn:active{background-image:none;outline:0;-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;filter:alpha(opacity=65);-webkit-box-shadow:none;box-shadow:none;opacity:.65}a.btn.disabled,fieldset[disabled] a.btn{pointer-events:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default.focus,.btn-default:focus{color:#333;background-color:#e6e6e6;border-color:#8c8c8c}.btn-default:hover{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active,.btn-default:active,.open>.dropdown-toggle.btn-default{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active.focus,.btn-default.active:focus,.btn-default.active:hover,.btn-default:active.focus,.btn-default:active:focus,.btn-default:active:hover,.open>.dropdown-toggle.btn-default.focus,.open>.dropdown-toggle.btn-default:focus,.open>.dropdown-toggle.btn-default:hover{color:#333;background-color:#d4d4d4;border-color:#8c8c8c}.btn-default.active,.btn-default:active,.open>.dropdown-toggle.btn-default{background-image:none}.btn-default.disabled.focus,.btn-default.disabled:focus,.btn-default.disabled:hover,.btn-default[disabled].focus,.btn-default[disabled]:focus,.btn-default[disabled]:hover,fieldset[disabled] .btn-default.focus,fieldset[disabled] .btn-default:focus,fieldset[disabled] .btn-default:hover{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#337ab7;border-color:#2e6da4}.btn-primary.focus,.btn-primary:focus{color:#fff;background-color:#286090;border-color:#122b40}.btn-primary:hover{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active,.btn-primary:active,.open>.dropdown-toggle.btn-primary{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active.focus,.btn-primary.active:focus,.btn-primary.active:hover,.btn-primary:active.focus,.btn-primary:active:focus,.btn-primary:active:hover,.open>.dropdown-toggle.btn-primary.focus,.open>.dropdown-toggle.btn-primary:focus,.open>.dropdown-toggle.btn-primary:hover{color:#fff;background-color:#204d74;border-color:#122b40}.btn-primary.active,.btn-primary:active,.open>.dropdown-toggle.btn-primary{background-image:none}.btn-primary.disabled.focus,.btn-primary.disabled:focus,.btn-primary.disabled:hover,.btn-primary[disabled].focus,.btn-primary[disabled]:focus,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary.focus,fieldset[disabled] .btn-primary:focus,fieldset[disabled] .btn-primary:hover{background-color:#337ab7;border-color:#2e6da4}.btn-primary .badge{color:#337ab7;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success.focus,.btn-success:focus{color:#fff;background-color:#449d44;border-color:#255625}.btn-success:hover{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active.focus,.btn-success.active:focus,.btn-success.active:hover,.btn-success:active.focus,.btn-success:active:focus,.btn-success:active:hover,.open>.dropdown-toggle.btn-success.focus,.open>.dropdown-toggle.btn-success:focus,.open>.dropdown-toggle.btn-success:hover{color:#fff;background-color:#398439;border-color:#255625}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{background-image:none}.btn-success.disabled.focus,.btn-success.disabled:focus,.btn-success.disabled:hover,.btn-success[disabled].focus,.btn-success[disabled]:focus,.btn-success[disabled]:hover,fieldset[disabled] .btn-success.focus,fieldset[disabled] .btn-success:focus,fieldset[disabled] .btn-success:hover{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info.focus,.btn-info:focus{color:#fff;background-color:#31b0d5;border-color:#1b6d85}.btn-info:hover{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active,.btn-info:active,.open>.dropdown-toggle.btn-info{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active.focus,.btn-info.active:focus,.btn-info.active:hover,.btn-info:active.focus,.btn-info:active:focus,.btn-info:active:hover,.open>.dropdown-toggle.btn-info.focus,.open>.dropdown-toggle.btn-info:focus,.open>.dropdown-toggle.btn-info:hover{color:#fff;background-color:#269abc;border-color:#1b6d85}.btn-info.active,.btn-info:active,.open>.dropdown-toggle.btn-info{background-image:none}.btn-info.disabled.focus,.btn-info.disabled:focus,.btn-info.disabled:hover,.btn-info[disabled].focus,.btn-info[disabled]:focus,.btn-info[disabled]:hover,fieldset[disabled] .btn-info.focus,fieldset[disabled] .btn-info:focus,fieldset[disabled] .btn-info:hover{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning.focus,.btn-warning:focus{color:#fff;background-color:#ec971f;border-color:#985f0d}.btn-warning:hover{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active,.btn-warning:active,.open>.dropdown-toggle.btn-warning{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active.focus,.btn-warning.active:focus,.btn-warning.active:hover,.btn-warning:active.focus,.btn-warning:active:focus,.btn-warning:active:hover,.open>.dropdown-toggle.btn-warning.focus,.open>.dropdown-toggle.btn-warning:focus,.open>.dropdown-toggle.btn-warning:hover{color:#fff;background-color:#d58512;border-color:#985f0d}.btn-warning.active,.btn-warning:active,.open>.dropdown-toggle.btn-warning{background-image:none}.btn-warning.disabled.focus,.btn-warning.disabled:focus,.btn-warning.disabled:hover,.btn-warning[disabled].focus,.btn-warning[disabled]:focus,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning.focus,fieldset[disabled] .btn-warning:focus,fieldset[disabled] .btn-warning:hover{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger.focus,.btn-danger:focus{color:#fff;background-color:#c9302c;border-color:#761c19}.btn-danger:hover{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active,.btn-danger:active,.open>.dropdown-toggle.btn-danger{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active.focus,.btn-danger.active:focus,.btn-danger.active:hover,.btn-danger:active.focus,.btn-danger:active:focus,.btn-danger:active:hover,.open>.dropdown-toggle.btn-danger.focus,.open>.dropdown-toggle.btn-danger:focus,.open>.dropdown-toggle.btn-danger:hover{color:#fff;background-color:#ac2925;border-color:#761c19}.btn-danger.active,.btn-danger:active,.open>.dropdown-toggle.btn-danger{background-image:none}.btn-danger.disabled.focus,.btn-danger.disabled:focus,.btn-danger.disabled:hover,.btn-danger[disabled].focus,.btn-danger[disabled]:focus,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger.focus,fieldset[disabled] .btn-danger:focus,fieldset[disabled] .btn-danger:hover{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{font-weight:400;color:#337ab7;border-radius:0}.btn-link,.btn-link.active,.btn-link:active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link,.btn-link:active,.btn-link:focus,.btn-link:hover{border-color:transparent}.btn-link:focus,.btn-link:hover{color:#23527c;text-decoration:underline;background-color:transparent}.btn-link[disabled]:focus,.btn-link[disabled]:hover,fieldset[disabled] .btn-link:focus,fieldset[disabled] .btn-link:hover{color:#777;text-decoration:none}.btn-group-lg>.btn,.btn-lg{padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.btn-group-sm>.btn,.btn-sm{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-group-xs>.btn,.btn-xs{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:5px}input[type=button].btn-block,input[type=reset].btn-block,input[type=submit].btn-block{width:100%}.fade{opacity:0;-webkit-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}tr.collapse.in{display:table-row}tbody.collapse.in{display:table-row-group}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition-timing-function:ease;-o-transition-timing-function:ease;transition-timing-function:ease;-webkit-transition-duration:.35s;-o-transition-duration:.35s;transition-duration:.35s;-webkit-transition-property:height,visibility;-o-transition-property:height,visibility;transition-property:height,visibility}.caret{display:inline-block;width:0;height:0;margin-left:2px;vertical-align:middle;border-top:4px dashed;border-top:4px solid\9;border-right:4px solid transparent;border-left:4px solid transparent}.dropdown,.dropup{position:relative}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;font-size:14px;text-align:left;list-style:none;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,.175);box-shadow:0 6px 12px rgba(0,0,0,.175)}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:400;line-height:1.42857143;color:#333;white-space:nowrap}.dropdown-menu>li>a:focus,.dropdown-menu>li>a:hover{color:#262626;text-decoration:none;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:focus,.dropdown-menu>.active>a:hover{color:#fff;text-decoration:none;background-color:#337ab7;outline:0}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{color:#777}.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{text-decoration:none;cursor:not-allowed;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-right{right:0;left:auto}.dropdown-menu-left{right:auto;left:0}.dropdown-header{display:block;padding:3px 20px;font-size:12px;line-height:1.42857143;color:#777;white-space:nowrap}.dropdown-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:990}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{content:"";border-top:0;border-bottom:4px dashed;border-bottom:4px solid\9}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:2px}@media (min-width:768px){.navbar-right .dropdown-menu{right:0;left:auto}.navbar-right .dropdown-menu-left{right:auto;left:0}}.btn-group,.btn-group-vertical{position:relative;display:inline-block;vertical-align:middle}.btn-group-vertical>.btn,.btn-group>.btn{position:relative;float:left}.btn-group-vertical>.btn.active,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:hover,.btn-group>.btn.active,.btn-group>.btn:active,.btn-group>.btn:focus,.btn-group>.btn:hover{z-index:2}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar .btn,.btn-toolbar .btn-group,.btn-toolbar .input-group{float:left}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.btn-group>.btn-group{float:left}.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-bottom-left-radius:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-right:8px;padding-left:8px}.btn-group>.btn-lg+.dropdown-toggle{padding-right:12px;padding-left:12px}.btn-group.open .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-group.open .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn .caret{margin-left:0}.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child){border-radius:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-left-radius:0;border-top-right-radius:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-top-right-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{display:table-cell;float:none;width:1%}.btn-group-justified>.btn-group .btn{width:100%}.btn-group-justified>.btn-group .dropdown-menu{left:auto}[data-toggle=buttons]>.btn input[type=checkbox],[data-toggle=buttons]>.btn input[type=radio],[data-toggle=buttons]>.btn-group>.btn input[type=checkbox],[data-toggle=buttons]>.btn-group>.btn input[type=radio]{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*=col-]{float:none;padding-right:0;padding-left:0}.input-group .form-control{position:relative;z-index:2;float:left;width:100%;margin-bottom:0}.input-group .form-control:focus{z-index:3}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-group-lg>.form-control,select.input-group-lg>.input-group-addon,select.input-group-lg>.input-group-btn>.btn{height:46px;line-height:46px}select[multiple].input-group-lg>.form-control,select[multiple].input-group-lg>.input-group-addon,select[multiple].input-group-lg>.input-group-btn>.btn,textarea.input-group-lg>.form-control,textarea.input-group-lg>.input-group-addon,textarea.input-group-lg>.input-group-btn>.btn{height:auto}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-group-sm>.form-control,select.input-group-sm>.input-group-addon,select.input-group-sm>.input-group-btn>.btn{height:30px;line-height:30px}select[multiple].input-group-sm>.form-control,select[multiple].input-group-sm>.input-group-addon,select[multiple].input-group-sm>.input-group-btn>.btn,textarea.input-group-sm>.form-control,textarea.input-group-sm>.input-group-addon,textarea.input-group-sm>.input-group-btn>.btn{height:auto}.input-group .form-control,.input-group-addon,.input-group-btn{display:table-cell}.input-group .form-control:not(:first-child):not(:last-child),.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:400;line-height:1;color:#555;text-align:center;background-color:#eee;border:1px solid #ccc;border-radius:4px}.input-group-addon.input-sm{padding:5px 10px;font-size:12px;border-radius:3px}.input-group-addon.input-lg{padding:10px 16px;font-size:18px;border-radius:6px}.input-group-addon input[type=checkbox],.input-group-addon input[type=radio]{margin-top:0}.input-group .form-control:first-child,.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn-group:not(:last-child)>.btn,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group .form-control:last-child,.input-group-addon:last-child,.input-group-btn:first-child>.btn-group:not(:first-child)>.btn,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle{border-top-left-radius:0;border-bottom-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{position:relative;font-size:0;white-space:nowrap}.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:active,.input-group-btn>.btn:focus,.input-group-btn>.btn:hover{z-index:2}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{z-index:2;margin-left:-1px}.nav{padding-left:0;margin-bottom:0;list-style:none}.nav>li{position:relative;display:block}.nav>li>a{position:relative;display:block;padding:10px 15px}.nav>li>a:focus,.nav>li>a:hover{text-decoration:none;background-color:#eee}.nav>li.disabled>a{color:#777}.nav>li.disabled>a:focus,.nav>li.disabled>a:hover{color:#777;text-decoration:none;cursor:not-allowed;background-color:transparent}.nav .open>a,.nav .open>a:focus,.nav .open>a:hover{background-color:#eee;border-color:#337ab7}.nav .nav-divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.nav>li>a>img{max-width:none}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{float:left;margin-bottom:-1px}.nav-tabs>li>a{margin-right:2px;line-height:1.42857143;border:1px solid transparent;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover{border-color:#eee #eee #ddd}.nav-tabs>li.active>a,.nav-tabs>li.active>a:focus,.nav-tabs>li.active>a:hover{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-tabs.nav-justified{width:100%;border-bottom:0}.nav-tabs.nav-justified>li{float:none}.nav-tabs.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-tabs.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-tabs.nav-justified>li{display:table-cell;width:1%}.nav-tabs.nav-justified>li>a{margin-bottom:0}}.nav-tabs.nav-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs.nav-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border-bottom-color:#fff}}.nav-pills>li{float:left}.nav-pills>li>a{border-radius:4px}.nav-pills>li+li{margin-left:2px}.nav-pills>li.active>a,.nav-pills>li.active>a:focus,.nav-pills>li.active>a:hover{color:#fff;background-color:#337ab7}.nav-stacked>li{float:none}.nav-stacked>li+li{margin-top:2px;margin-left:0}.nav-justified{width:100%}.nav-justified>li{float:none}.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-justified>li{display:table-cell;width:1%}.nav-justified>li>a{margin-bottom:0}}.nav-tabs-justified{border-bottom:0}.nav-tabs-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border-bottom-color:#fff}}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.navbar{position:relative;min-height:50px;margin-bottom:20px;border:1px solid transparent}@media (min-width:768px){.navbar{border-radius:4px}}@media (min-width:768px){.navbar-header{float:left}}.navbar-collapse{padding-right:15px;padding-left:15px;overflow-x:visible;-webkit-overflow-scrolling:touch;border-top:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1)}.navbar-collapse.in{overflow-y:auto}@media (min-width:768px){.navbar-collapse{width:auto;border-top:0;-webkit-box-shadow:none;box-shadow:none}.navbar-collapse.collapse{display:block!important;height:auto!important;padding-bottom:0;overflow:visible!important}.navbar-collapse.in{overflow-y:visible}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse,.navbar-static-top .navbar-collapse{padding-right:0;padding-left:0}}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:340px}@media (max-device-width:480px) and (orientation:landscape){.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:200px}}.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:0;margin-left:0}}.navbar-static-top{z-index:1000;border-width:0 0 1px}@media (min-width:768px){.navbar-static-top{border-radius:0}}.navbar-fixed-bottom,.navbar-fixed-top{position:fixed;right:0;left:0;z-index:1030}@media (min-width:768px){.navbar-fixed-bottom,.navbar-fixed-top{border-radius:0}}.navbar-fixed-top{top:0;border-width:0 0 1px}.navbar-fixed-bottom{bottom:0;margin-bottom:0;border-width:1px 0 0}.navbar-brand{float:left;height:50px;padding:15px 15px;font-size:18px;line-height:20px}.navbar-brand:focus,.navbar-brand:hover{text-decoration:none}.navbar-brand>img{display:block}@media (min-width:768px){.navbar>.container .navbar-brand,.navbar>.container-fluid .navbar-brand{margin-left:-15px}}.navbar-toggle{position:relative;float:right;padding:9px 10px;margin-top:8px;margin-right:15px;margin-bottom:8px;background-color:transparent;background-image:none;border:1px solid transparent;border-radius:4px}.navbar-toggle:focus{outline:0}.navbar-toggle .icon-bar{display:block;width:22px;height:2px;border-radius:1px}.navbar-toggle .icon-bar+.icon-bar{margin-top:4px}@media (min-width:768px){.navbar-toggle{display:none}}.navbar-nav{margin:7.5px -15px}.navbar-nav>li>a{padding-top:10px;padding-bottom:10px;line-height:20px}@media (max-width:767px){.navbar-nav .open .dropdown-menu{position:static;float:none;width:auto;margin-top:0;background-color:transparent;border:0;-webkit-box-shadow:none;box-shadow:none}.navbar-nav .open .dropdown-menu .dropdown-header,.navbar-nav .open .dropdown-menu>li>a{padding:5px 15px 5px 25px}.navbar-nav .open .dropdown-menu>li>a{line-height:20px}.navbar-nav .open .dropdown-menu>li>a:focus,.navbar-nav .open .dropdown-menu>li>a:hover{background-image:none}}@media (min-width:768px){.navbar-nav{float:left;margin:0}.navbar-nav>li{float:left}.navbar-nav>li>a{padding-top:15px;padding-bottom:15px}}.navbar-form{padding:10px 15px;margin-top:8px;margin-right:-15px;margin-bottom:8px;margin-left:-15px;border-top:1px solid transparent;border-bottom:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1)}@media (min-width:768px){.navbar-form .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.navbar-form .form-control{display:inline-block;width:auto;vertical-align:middle}.navbar-form .form-control-static{display:inline-block}.navbar-form .input-group{display:inline-table;vertical-align:middle}.navbar-form .input-group .form-control,.navbar-form .input-group .input-group-addon,.navbar-form .input-group .input-group-btn{width:auto}.navbar-form .input-group>.form-control{width:100%}.navbar-form .control-label{margin-bottom:0;vertical-align:middle}.navbar-form .checkbox,.navbar-form .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.navbar-form .checkbox label,.navbar-form .radio label{padding-left:0}.navbar-form .checkbox input[type=checkbox],.navbar-form .radio input[type=radio]{position:relative;margin-left:0}.navbar-form .has-feedback .form-control-feedback{top:0}}@media (max-width:767px){.navbar-form .form-group{margin-bottom:5px}.navbar-form .form-group:last-child{margin-bottom:0}}@media (min-width:768px){.navbar-form{width:auto;padding-top:0;padding-bottom:0;margin-right:0;margin-left:0;border:0;-webkit-box-shadow:none;box-shadow:none}}.navbar-nav>li>.dropdown-menu{margin-top:0;border-top-left-radius:0;border-top-right-radius:0}.navbar-fixed-bottom .navbar-nav>li>.dropdown-menu{margin-bottom:0;border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.navbar-btn{margin-top:8px;margin-bottom:8px}.navbar-btn.btn-sm{margin-top:10px;margin-bottom:10px}.navbar-btn.btn-xs{margin-top:14px;margin-bottom:14px}.navbar-text{margin-top:15px;margin-bottom:15px}@media (min-width:768px){.navbar-text{float:left;margin-right:15px;margin-left:15px}}@media (min-width:768px){.navbar-left{float:left!important}.navbar-right{float:right!important;margin-right:-15px}.navbar-right~.navbar-right{margin-right:0}}.navbar-default{background-color:#f8f8f8;border-color:#e7e7e7}.navbar-default .navbar-brand{color:#777}.navbar-default .navbar-brand:focus,.navbar-default .navbar-brand:hover{color:#5e5e5e;background-color:transparent}.navbar-default .navbar-text{color:#777}.navbar-default .navbar-nav>li>a{color:#777}.navbar-default .navbar-nav>li>a:focus,.navbar-default .navbar-nav>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.active>a:focus,.navbar-default .navbar-nav>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav>.disabled>a,.navbar-default .navbar-nav>.disabled>a:focus,.navbar-default .navbar-nav>.disabled>a:hover{color:#ccc;background-color:transparent}.navbar-default .navbar-toggle{border-color:#ddd}.navbar-default .navbar-toggle:focus,.navbar-default .navbar-toggle:hover{background-color:#ddd}.navbar-default .navbar-toggle .icon-bar{background-color:#888}.navbar-default .navbar-collapse,.navbar-default .navbar-form{border-color:#e7e7e7}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.open>a:focus,.navbar-default .navbar-nav>.open>a:hover{color:#555;background-color:#e7e7e7}@media (max-width:767px){.navbar-default .navbar-nav .open .dropdown-menu>li>a{color:#777}.navbar-default .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav .open .dropdown-menu>.active>a,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#ccc;background-color:transparent}}.navbar-default .navbar-link{color:#777}.navbar-default .navbar-link:hover{color:#333}.navbar-default .btn-link{color:#777}.navbar-default .btn-link:focus,.navbar-default .btn-link:hover{color:#333}.navbar-default .btn-link[disabled]:focus,.navbar-default .btn-link[disabled]:hover,fieldset[disabled] .navbar-default .btn-link:focus,fieldset[disabled] .navbar-default .btn-link:hover{color:#ccc}.navbar-inverse{background-color:#222;border-color:#080808}.navbar-inverse .navbar-brand{color:#9d9d9d}.navbar-inverse .navbar-brand:focus,.navbar-inverse .navbar-brand:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-text{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a:focus,.navbar-inverse .navbar-nav>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.active>a:focus,.navbar-inverse .navbar-nav>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav>.disabled>a,.navbar-inverse .navbar-nav>.disabled>a:focus,.navbar-inverse .navbar-nav>.disabled>a:hover{color:#444;background-color:transparent}.navbar-inverse .navbar-toggle{border-color:#333}.navbar-inverse .navbar-toggle:focus,.navbar-inverse .navbar-toggle:hover{background-color:#333}.navbar-inverse .navbar-toggle .icon-bar{background-color:#fff}.navbar-inverse .navbar-collapse,.navbar-inverse .navbar-form{border-color:#101010}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.open>a:focus,.navbar-inverse .navbar-nav>.open>a:hover{color:#fff;background-color:#080808}@media (max-width:767px){.navbar-inverse .navbar-nav .open .dropdown-menu>.dropdown-header{border-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu .divider{background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#444;background-color:transparent}}.navbar-inverse .navbar-link{color:#9d9d9d}.navbar-inverse .navbar-link:hover{color:#fff}.navbar-inverse .btn-link{color:#9d9d9d}.navbar-inverse .btn-link:focus,.navbar-inverse .btn-link:hover{color:#fff}.navbar-inverse .btn-link[disabled]:focus,.navbar-inverse .btn-link[disabled]:hover,fieldset[disabled] .navbar-inverse .btn-link:focus,fieldset[disabled] .navbar-inverse .btn-link:hover{color:#444}.breadcrumb{padding:8px 15px;margin-bottom:20px;list-style:none;background-color:#f5f5f5;border-radius:4px}.breadcrumb>li{display:inline-block}.breadcrumb>li+li:before{padding:0 5px;color:#ccc;content:"/\00a0"}.breadcrumb>.active{color:#777}.pagination{display:inline-block;padding-left:0;margin:20px 0;border-radius:4px}.pagination>li{display:inline}.pagination>li>a,.pagination>li>span{position:relative;float:left;padding:6px 12px;margin-left:-1px;line-height:1.42857143;color:#337ab7;text-decoration:none;background-color:#fff;border:1px solid #ddd}.pagination>li:first-child>a,.pagination>li:first-child>span{margin-left:0;border-top-left-radius:4px;border-bottom-left-radius:4px}.pagination>li:last-child>a,.pagination>li:last-child>span{border-top-right-radius:4px;border-bottom-right-radius:4px}.pagination>li>a:focus,.pagination>li>a:hover,.pagination>li>span:focus,.pagination>li>span:hover{z-index:2;color:#23527c;background-color:#eee;border-color:#ddd}.pagination>.active>a,.pagination>.active>a:focus,.pagination>.active>a:hover,.pagination>.active>span,.pagination>.active>span:focus,.pagination>.active>span:hover{z-index:3;color:#fff;cursor:default;background-color:#337ab7;border-color:#337ab7}.pagination>.disabled>a,.pagination>.disabled>a:focus,.pagination>.disabled>a:hover,.pagination>.disabled>span,.pagination>.disabled>span:focus,.pagination>.disabled>span:hover{color:#777;cursor:not-allowed;background-color:#fff;border-color:#ddd}.pagination-lg>li>a,.pagination-lg>li>span{padding:10px 16px;font-size:18px;line-height:1.3333333}.pagination-lg>li:first-child>a,.pagination-lg>li:first-child>span{border-top-left-radius:6px;border-bottom-left-radius:6px}.pagination-lg>li:last-child>a,.pagination-lg>li:last-child>span{border-top-right-radius:6px;border-bottom-right-radius:6px}.pagination-sm>li>a,.pagination-sm>li>span{padding:5px 10px;font-size:12px;line-height:1.5}.pagination-sm>li:first-child>a,.pagination-sm>li:first-child>span{border-top-left-radius:3px;border-bottom-left-radius:3px}.pagination-sm>li:last-child>a,.pagination-sm>li:last-child>span{border-top-right-radius:3px;border-bottom-right-radius:3px}.pager{padding-left:0;margin:20px 0;text-align:center;list-style:none}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;border-radius:15px}.pager li>a:focus,.pager li>a:hover{text-decoration:none;background-color:#eee}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:focus,.pager .disabled>a:hover,.pager .disabled>span{color:#777;cursor:not-allowed;background-color:#fff}.label{display:inline;padding:.2em .6em .3em;font-size:75%;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25em}a.label:focus,a.label:hover{color:#fff;text-decoration:none;cursor:pointer}.label:empty{display:none}.btn .label{position:relative;top:-1px}.label-default{background-color:#777}.label-default[href]:focus,.label-default[href]:hover{background-color:#5e5e5e}.label-primary{background-color:#337ab7}.label-primary[href]:focus,.label-primary[href]:hover{background-color:#286090}.label-success{background-color:#5cb85c}.label-success[href]:focus,.label-success[href]:hover{background-color:#449d44}.label-info{background-color:#5bc0de}.label-info[href]:focus,.label-info[href]:hover{background-color:#31b0d5}.label-warning{background-color:#f0ad4e}.label-warning[href]:focus,.label-warning[href]:hover{background-color:#ec971f}.label-danger{background-color:#d9534f}.label-danger[href]:focus,.label-danger[href]:hover{background-color:#c9302c}.badge{display:inline-block;min-width:10px;padding:3px 7px;font-size:12px;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:middle;background-color:#777;border-radius:10px}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.btn-group-xs>.btn .badge,.btn-xs .badge{top:0;padding:1px 5px}a.badge:focus,a.badge:hover{color:#fff;text-decoration:none;cursor:pointer}.list-group-item.active>.badge,.nav-pills>.active>a>.badge{color:#337ab7;background-color:#fff}.list-group-item>.badge{float:right}.list-group-item>.badge+.badge{margin-right:5px}.nav-pills>li>a>.badge{margin-left:3px}.jumbotron{padding-top:30px;padding-bottom:30px;margin-bottom:30px;color:inherit;background-color:#eee}.jumbotron .h1,.jumbotron h1{color:inherit}.jumbotron p{margin-bottom:15px;font-size:21px;font-weight:200}.jumbotron>hr{border-top-color:#d5d5d5}.container .jumbotron,.container-fluid .jumbotron{padding-right:15px;padding-left:15px;border-radius:6px}.jumbotron .container{max-width:100%}@media screen and (min-width:768px){.jumbotron{padding-top:48px;padding-bottom:48px}.container .jumbotron,.container-fluid .jumbotron{padding-right:60px;padding-left:60px}.jumbotron .h1,.jumbotron h1{font-size:63px}}.thumbnail{display:block;padding:4px;margin-bottom:20px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:border .2s ease-in-out;-o-transition:border .2s ease-in-out;transition:border .2s ease-in-out}.thumbnail a>img,.thumbnail>img{margin-right:auto;margin-left:auto}a.thumbnail.active,a.thumbnail:focus,a.thumbnail:hover{border-color:#337ab7}.thumbnail .caption{padding:9px;color:#333}.alert{padding:15px;margin-bottom:20px;border:1px solid transparent;border-radius:4px}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:700}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable,.alert-dismissible{padding-right:35px}.alert-dismissable .close,.alert-dismissible .close{position:relative;top:-2px;right:-21px;color:inherit}.alert-success{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;overflow:hidden;background-color:#f5f5f5;border-radius:4px;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,.1);box-shadow:inset 0 1px 2px rgba(0,0,0,.1)}.progress-bar{float:left;width:0;height:100%;font-size:12px;line-height:20px;color:#fff;text-align:center;background-color:#337ab7;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);-webkit-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress-bar-striped,.progress-striped .progress-bar{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);-webkit-background-size:40px 40px;background-size:40px 40px}.progress-bar.active,.progress.active .progress-bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-bar-success{background-color:#5cb85c}.progress-striped .progress-bar-success{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-info{background-color:#5bc0de}.progress-striped .progress-bar-info{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-warning{background-color:#f0ad4e}.progress-striped .progress-bar-warning{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-danger{background-color:#d9534f}.progress-striped .progress-bar-danger{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.media{margin-top:15px}.media:first-child{margin-top:0}.media,.media-body{overflow:hidden;zoom:1}.media-body{width:10000px}.media-object{display:block}.media-object.img-thumbnail{max-width:none}.media-right,.media>.pull-right{padding-left:10px}.media-left,.media>.pull-left{padding-right:10px}.media-body,.media-left,.media-right{display:table-cell;vertical-align:top}.media-middle{vertical-align:middle}.media-bottom{vertical-align:bottom}.media-heading{margin-top:0;margin-bottom:5px}.media-list{padding-left:0;list-style:none}.list-group{padding-left:0;margin-bottom:20px}.list-group-item{position:relative;display:block;padding:10px 15px;margin-bottom:-1px;background-color:#fff;border:1px solid #ddd}.list-group-item:first-child{border-top-left-radius:4px;border-top-right-radius:4px}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}a.list-group-item,button.list-group-item{color:#555}a.list-group-item .list-group-item-heading,button.list-group-item .list-group-item-heading{color:#333}a.list-group-item:focus,a.list-group-item:hover,button.list-group-item:focus,button.list-group-item:hover{color:#555;text-decoration:none;background-color:#f5f5f5}button.list-group-item{width:100%;text-align:left}.list-group-item.disabled,.list-group-item.disabled:focus,.list-group-item.disabled:hover{color:#777;cursor:not-allowed;background-color:#eee}.list-group-item.disabled .list-group-item-heading,.list-group-item.disabled:focus .list-group-item-heading,.list-group-item.disabled:hover .list-group-item-heading{color:inherit}.list-group-item.disabled .list-group-item-text,.list-group-item.disabled:focus .list-group-item-text,.list-group-item.disabled:hover .list-group-item-text{color:#777}.list-group-item.active,.list-group-item.active:focus,.list-group-item.active:hover{z-index:2;color:#fff;background-color:#337ab7;border-color:#337ab7}.list-group-item.active .list-group-item-heading,.list-group-item.active .list-group-item-heading>.small,.list-group-item.active .list-group-item-heading>small,.list-group-item.active:focus .list-group-item-heading,.list-group-item.active:focus .list-group-item-heading>.small,.list-group-item.active:focus .list-group-item-heading>small,.list-group-item.active:hover .list-group-item-heading,.list-group-item.active:hover .list-group-item-heading>.small,.list-group-item.active:hover .list-group-item-heading>small{color:inherit}.list-group-item.active .list-group-item-text,.list-group-item.active:focus .list-group-item-text,.list-group-item.active:hover .list-group-item-text{color:#c7ddef}.list-group-item-success{color:#3c763d;background-color:#dff0d8}a.list-group-item-success,button.list-group-item-success{color:#3c763d}a.list-group-item-success .list-group-item-heading,button.list-group-item-success .list-group-item-heading{color:inherit}a.list-group-item-success:focus,a.list-group-item-success:hover,button.list-group-item-success:focus,button.list-group-item-success:hover{color:#3c763d;background-color:#d0e9c6}a.list-group-item-success.active,a.list-group-item-success.active:focus,a.list-group-item-success.active:hover,button.list-group-item-success.active,button.list-group-item-success.active:focus,button.list-group-item-success.active:hover{color:#fff;background-color:#3c763d;border-color:#3c763d}.list-group-item-info{color:#31708f;background-color:#d9edf7}a.list-group-item-info,button.list-group-item-info{color:#31708f}a.list-group-item-info .list-group-item-heading,button.list-group-item-info .list-group-item-heading{color:inherit}a.list-group-item-info:focus,a.list-group-item-info:hover,button.list-group-item-info:focus,button.list-group-item-info:hover{color:#31708f;background-color:#c4e3f3}a.list-group-item-info.active,a.list-group-item-info.active:focus,a.list-group-item-info.active:hover,button.list-group-item-info.active,button.list-group-item-info.active:focus,button.list-group-item-info.active:hover{color:#fff;background-color:#31708f;border-color:#31708f}.list-group-item-warning{color:#8a6d3b;background-color:#fcf8e3}a.list-group-item-warning,button.list-group-item-warning{color:#8a6d3b}a.list-group-item-warning .list-group-item-heading,button.list-group-item-warning .list-group-item-heading{color:inherit}a.list-group-item-warning:focus,a.list-group-item-warning:hover,button.list-group-item-warning:focus,button.list-group-item-warning:hover{color:#8a6d3b;background-color:#faf2cc}a.list-group-item-warning.active,a.list-group-item-warning.active:focus,a.list-group-item-warning.active:hover,button.list-group-item-warning.active,button.list-group-item-warning.active:focus,button.list-group-item-warning.active:hover{color:#fff;background-color:#8a6d3b;border-color:#8a6d3b}.list-group-item-danger{color:#a94442;background-color:#f2dede}a.list-group-item-danger,button.list-group-item-danger{color:#a94442}a.list-group-item-danger .list-group-item-heading,button.list-group-item-danger .list-group-item-heading{color:inherit}a.list-group-item-danger:focus,a.list-group-item-danger:hover,button.list-group-item-danger:focus,button.list-group-item-danger:hover{color:#a94442;background-color:#ebcccc}a.list-group-item-danger.active,a.list-group-item-danger.active:focus,a.list-group-item-danger.active:hover,button.list-group-item-danger.active,button.list-group-item-danger.active:focus,button.list-group-item-danger.active:hover{color:#fff;background-color:#a94442;border-color:#a94442}.list-group-item-heading{margin-top:0;margin-bottom:5px}.list-group-item-text{margin-bottom:0;line-height:1.3}.panel{margin-bottom:20px;background-color:#fff;border:1px solid transparent;border-radius:4px;-webkit-box-shadow:0 1px 1px rgba(0,0,0,.05);box-shadow:0 1px 1px rgba(0,0,0,.05)}.panel-body{padding:15px}.panel-heading{padding:10px 15px;border-bottom:1px solid transparent;border-top-left-radius:3px;border-top-right-radius:3px}.panel-heading>.dropdown .dropdown-toggle{color:inherit}.panel-title{margin-top:0;margin-bottom:0;font-size:16px;color:inherit}.panel-title>.small,.panel-title>.small>a,.panel-title>a,.panel-title>small,.panel-title>small>a{color:inherit}.panel-footer{padding:10px 15px;background-color:#f5f5f5;border-top:1px solid #ddd;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.list-group,.panel>.panel-collapse>.list-group{margin-bottom:0}.panel>.list-group .list-group-item,.panel>.panel-collapse>.list-group .list-group-item{border-width:1px 0;border-radius:0}.panel>.list-group:first-child .list-group-item:first-child,.panel>.panel-collapse>.list-group:first-child .list-group-item:first-child{border-top:0;border-top-left-radius:3px;border-top-right-radius:3px}.panel>.list-group:last-child .list-group-item:last-child,.panel>.panel-collapse>.list-group:last-child .list-group-item:last-child{border-bottom:0;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.panel-heading+.panel-collapse>.list-group .list-group-item:first-child{border-top-left-radius:0;border-top-right-radius:0}.panel-heading+.list-group .list-group-item:first-child{border-top-width:0}.list-group+.panel-footer{border-top-width:0}.panel>.panel-collapse>.table,.panel>.table,.panel>.table-responsive>.table{margin-bottom:0}.panel>.panel-collapse>.table caption,.panel>.table caption,.panel>.table-responsive>.table caption{padding-right:15px;padding-left:15px}.panel>.table-responsive:first-child>.table:first-child,.panel>.table:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child,.panel>.table:first-child>thead:first-child>tr:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table:first-child>thead:first-child>tr:first-child th:first-child{border-top-left-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table:first-child>thead:first-child>tr:first-child th:last-child{border-top-right-radius:3px}.panel>.table-responsive:last-child>.table:last-child,.panel>.table:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:first-child{border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:last-child{border-bottom-right-radius:3px}.panel>.panel-body+.table,.panel>.panel-body+.table-responsive,.panel>.table+.panel-body,.panel>.table-responsive+.panel-body{border-top:1px solid #ddd}.panel>.table>tbody:first-child>tr:first-child td,.panel>.table>tbody:first-child>tr:first-child th{border-top:0}.panel>.table-bordered,.panel>.table-responsive>.table-bordered{border:0}.panel>.table-bordered>tbody>tr>td:first-child,.panel>.table-bordered>tbody>tr>th:first-child,.panel>.table-bordered>tfoot>tr>td:first-child,.panel>.table-bordered>tfoot>tr>th:first-child,.panel>.table-bordered>thead>tr>td:first-child,.panel>.table-bordered>thead>tr>th:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:first-child,.panel>.table-responsive>.table-bordered>thead>tr>td:first-child,.panel>.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.panel>.table-bordered>tbody>tr>td:last-child,.panel>.table-bordered>tbody>tr>th:last-child,.panel>.table-bordered>tfoot>tr>td:last-child,.panel>.table-bordered>tfoot>tr>th:last-child,.panel>.table-bordered>thead>tr>td:last-child,.panel>.table-bordered>thead>tr>th:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:last-child,.panel>.table-responsive>.table-bordered>thead>tr>td:last-child,.panel>.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.panel>.table-bordered>tbody>tr:first-child>td,.panel>.table-bordered>tbody>tr:first-child>th,.panel>.table-bordered>thead>tr:first-child>td,.panel>.table-bordered>thead>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>th,.panel>.table-responsive>.table-bordered>thead>tr:first-child>td,.panel>.table-responsive>.table-bordered>thead>tr:first-child>th{border-bottom:0}.panel>.table-bordered>tbody>tr:last-child>td,.panel>.table-bordered>tbody>tr:last-child>th,.panel>.table-bordered>tfoot>tr:last-child>td,.panel>.table-bordered>tfoot>tr:last-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>th,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>td,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}.panel>.table-responsive{margin-bottom:0;border:0}.panel-group{margin-bottom:20px}.panel-group .panel{margin-bottom:0;border-radius:4px}.panel-group .panel+.panel{margin-top:5px}.panel-group .panel-heading{border-bottom:0}.panel-group .panel-heading+.panel-collapse>.list-group,.panel-group .panel-heading+.panel-collapse>.panel-body{border-top:1px solid #ddd}.panel-group .panel-footer{border-top:0}.panel-group .panel-footer+.panel-collapse .panel-body{border-bottom:1px solid #ddd}.panel-default{border-color:#ddd}.panel-default>.panel-heading{color:#333;background-color:#f5f5f5;border-color:#ddd}.panel-default>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ddd}.panel-default>.panel-heading .badge{color:#f5f5f5;background-color:#333}.panel-default>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ddd}.panel-primary{border-color:#337ab7}.panel-primary>.panel-heading{color:#fff;background-color:#337ab7;border-color:#337ab7}.panel-primary>.panel-heading+.panel-collapse>.panel-body{border-top-color:#337ab7}.panel-primary>.panel-heading .badge{color:#337ab7;background-color:#fff}.panel-primary>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#337ab7}.panel-success{border-color:#d6e9c6}.panel-success>.panel-heading{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.panel-success>.panel-heading+.panel-collapse>.panel-body{border-top-color:#d6e9c6}.panel-success>.panel-heading .badge{color:#dff0d8;background-color:#3c763d}.panel-success>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#d6e9c6}.panel-info{border-color:#bce8f1}.panel-info>.panel-heading{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.panel-info>.panel-heading+.panel-collapse>.panel-body{border-top-color:#bce8f1}.panel-info>.panel-heading .badge{color:#d9edf7;background-color:#31708f}.panel-info>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#bce8f1}.panel-warning{border-color:#faebcc}.panel-warning>.panel-heading{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.panel-warning>.panel-heading+.panel-collapse>.panel-body{border-top-color:#faebcc}.panel-warning>.panel-heading .badge{color:#fcf8e3;background-color:#8a6d3b}.panel-warning>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#faebcc}.panel-danger{border-color:#ebccd1}.panel-danger>.panel-heading{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.panel-danger>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ebccd1}.panel-danger>.panel-heading .badge{color:#f2dede;background-color:#a94442}.panel-danger>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ebccd1}.embed-responsive{position:relative;display:block;height:0;padding:0;overflow:hidden}.embed-responsive .embed-responsive-item,.embed-responsive embed,.embed-responsive iframe,.embed-responsive object,.embed-responsive video{position:absolute;top:0;bottom:0;left:0;width:100%;height:100%;border:0}.embed-responsive-16by9{padding-bottom:56.25%}.embed-responsive-4by3{padding-bottom:75%}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.05);box-shadow:inset 0 1px 1px rgba(0,0,0,.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,.15)}.well-lg{padding:24px;border-radius:6px}.well-sm{padding:9px;border-radius:3px}.close{float:right;font-size:21px;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;filter:alpha(opacity=20);opacity:.2}.close:focus,.close:hover{color:#000;text-decoration:none;cursor:pointer;filter:alpha(opacity=50);opacity:.5}button.close{-webkit-appearance:none;padding:0;cursor:pointer;background:0 0;border:0}.modal-open{overflow:hidden}.modal{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1050;display:none;overflow:hidden;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transition:-webkit-transform .3s ease-out;-o-transition:-o-transform .3s ease-out;transition:transform .3s ease-out;-webkit-transform:translate(0,-25%);-ms-transform:translate(0,-25%);-o-transform:translate(0,-25%);transform:translate(0,-25%)}.modal.in .modal-dialog{-webkit-transform:translate(0,0);-ms-transform:translate(0,0);-o-transform:translate(0,0);transform:translate(0,0)}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #999;border:1px solid rgba(0,0,0,.2);border-radius:6px;outline:0;-webkit-box-shadow:0 3px 9px rgba(0,0,0,.5);box-shadow:0 3px 9px rgba(0,0,0,.5)}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{filter:alpha(opacity=0);opacity:0}.modal-backdrop.in{filter:alpha(opacity=50);opacity:.5}.modal-header{padding:15px;border-bottom:1px solid #e5e5e5}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857143}.modal-body{position:relative;padding:15px}.modal-footer{padding:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{-webkit-box-shadow:0 5px 15px rgba(0,0,0,.5);box-shadow:0 5px 15px rgba(0,0,0,.5)}.modal-sm{width:300px}}@media (min-width:992px){.modal-lg{width:900px}}.tooltip{position:absolute;z-index:1070;display:block;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:12px;font-style:normal;font-weight:400;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;filter:alpha(opacity=0);opacity:0;line-break:auto}.tooltip.in{filter:alpha(opacity=90);opacity:.9}.tooltip.top{padding:5px 0;margin-top:-3px}.tooltip.right{padding:0 5px;margin-left:3px}.tooltip.bottom{padding:5px 0;margin-top:3px}.tooltip.left{padding:0 5px;margin-left:-3px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;background-color:#000;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-left .tooltip-arrow{right:5px;bottom:0;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-right .tooltip-arrow{bottom:0;left:5px;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-left .tooltip-arrow{top:0;right:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-right .tooltip-arrow{top:0;left:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.popover{position:absolute;top:0;left:0;z-index:1060;display:none;max-width:276px;padding:1px;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;font-style:normal;font-weight:400;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.2);border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,.2);box-shadow:0 5px 10px rgba(0,0,0,.2);line-break:auto}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:5px 5px 0 0}.popover-content{padding:9px 14px}.popover>.arrow,.popover>.arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover>.arrow{border-width:11px}.popover>.arrow:after{content:"";border-width:10px}.popover.top>.arrow{bottom:-11px;left:50%;margin-left:-11px;border-top-color:#999;border-top-color:rgba(0,0,0,.25);border-bottom-width:0}.popover.top>.arrow:after{bottom:1px;margin-left:-10px;content:" ";border-top-color:#fff;border-bottom-width:0}.popover.right>.arrow{top:50%;left:-11px;margin-top:-11px;border-right-color:#999;border-right-color:rgba(0,0,0,.25);border-left-width:0}.popover.right>.arrow:after{bottom:-10px;left:1px;content:" ";border-right-color:#fff;border-left-width:0}.popover.bottom>.arrow{top:-11px;left:50%;margin-left:-11px;border-top-width:0;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,.25)}.popover.bottom>.arrow:after{top:1px;margin-left:-10px;content:" ";border-top-width:0;border-bottom-color:#fff}.popover.left>.arrow{top:50%;right:-11px;margin-top:-11px;border-right-width:0;border-left-color:#999;border-left-color:rgba(0,0,0,.25)}.popover.left>.arrow:after{right:1px;bottom:-10px;content:" ";border-right-width:0;border-left-color:#fff}.carousel{position:relative}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner>.item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>a>img,.carousel-inner>.item>img{line-height:1}@media all and (transform-3d),(-webkit-transform-3d){.carousel-inner>.item{-webkit-transition:-webkit-transform .6s ease-in-out;-o-transition:-o-transform .6s ease-in-out;transition:transform .6s ease-in-out;-webkit-backface-visibility:hidden;backface-visibility:hidden;-webkit-perspective:1000px;perspective:1000px}.carousel-inner>.item.active.right,.carousel-inner>.item.next{left:0;-webkit-transform:translate3d(100%,0,0);transform:translate3d(100%,0,0)}.carousel-inner>.item.active.left,.carousel-inner>.item.prev{left:0;-webkit-transform:translate3d(-100%,0,0);transform:translate3d(-100%,0,0)}.carousel-inner>.item.active,.carousel-inner>.item.next.left,.carousel-inner>.item.prev.right{left:0;-webkit-transform:translate3d(0,0,0);transform:translate3d(0,0,0)}}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:0;bottom:0;left:0;width:15%;font-size:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6);background-color:rgba(0,0,0,0);filter:alpha(opacity=50);opacity:.5}.carousel-control.left{background-image:-webkit-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.5)),to(rgba(0,0,0,.0001)));background-image:linear-gradient(to right,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1);background-repeat:repeat-x}.carousel-control.right{right:0;left:auto;background-image:-webkit-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.0001)),to(rgba(0,0,0,.5)));background-image:linear-gradient(to right,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1);background-repeat:repeat-x}.carousel-control:focus,.carousel-control:hover{color:#fff;text-decoration:none;filter:alpha(opacity=90);outline:0;opacity:.9}.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{position:absolute;top:50%;z-index:5;display:inline-block;margin-top:-10px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{left:50%;margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{right:50%;margin-right:-10px}.carousel-control .icon-next,.carousel-control .icon-prev{width:20px;height:20px;font-family:serif;line-height:1}.carousel-control .icon-prev:before{content:'\2039'}.carousel-control .icon-next:before{content:'\203a'}.carousel-indicators{position:absolute;bottom:10px;left:50%;z-index:15;width:60%;padding-left:0;margin-left:-30%;text-align:center;list-style:none}.carousel-indicators li{display:inline-block;width:10px;height:10px;margin:1px;text-indent:-999px;cursor:pointer;background-color:#000;background-color:rgba(0,0,0,0);border:1px solid #fff;border-radius:10px}.carousel-indicators .active{width:12px;height:12px;margin:0;background-color:#fff}.carousel-caption{position:absolute;right:15%;bottom:20px;left:15%;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6)}.carousel-caption .btn{text-shadow:none}@media screen and (min-width:768px){.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{width:30px;height:30px;margin-top:-10px;font-size:30px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{margin-right:-10px}.carousel-caption{right:20%;left:20%;padding-bottom:30px}.carousel-indicators{bottom:20px}}.btn-group-vertical>.btn-group:after,.btn-group-vertical>.btn-group:before,.btn-toolbar:after,.btn-toolbar:before,.clearfix:after,.clearfix:before,.container-fluid:after,.container-fluid:before,.container:after,.container:before,.dl-horizontal dd:after,.dl-horizontal dd:before,.form-horizontal .form-group:after,.form-horizontal .form-group:before,.modal-footer:after,.modal-footer:before,.modal-header:after,.modal-header:before,.nav:after,.nav:before,.navbar-collapse:after,.navbar-collapse:before,.navbar-header:after,.navbar-header:before,.navbar:after,.navbar:before,.pager:after,.pager:before,.panel-body:after,.panel-body:before,.row:after,.row:before{display:table;content:" "}.btn-group-vertical>.btn-group:after,.btn-toolbar:after,.clearfix:after,.container-fluid:after,.container:after,.dl-horizontal dd:after,.form-horizontal .form-group:after,.modal-footer:after,.modal-header:after,.nav:after,.navbar-collapse:after,.navbar-header:after,.navbar:after,.pager:after,.panel-body:after,.row:after{clear:both}.center-block{display:block;margin-right:auto;margin-left:auto}.pull-right{float:right!important}.pull-left{float:left!important}.hide{display:none!important}.show{display:block!important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none!important}.affix{position:fixed}@-ms-viewport{width:device-width}.visible-lg,.visible-md,.visible-sm,.visible-xs{display:none!important}.visible-lg-block,.visible-lg-inline,.visible-lg-inline-block,.visible-md-block,.visible-md-inline,.visible-md-inline-block,.visible-sm-block,.visible-sm-inline,.visible-sm-inline-block,.visible-xs-block,.visible-xs-inline,.visible-xs-inline-block{display:none!important}@media (max-width:767px){.visible-xs{display:block!important}table.visible-xs{display:table!important}tr.visible-xs{display:table-row!important}td.visible-xs,th.visible-xs{display:table-cell!important}}@media (max-width:767px){.visible-xs-block{display:block!important}}@media (max-width:767px){.visible-xs-inline{display:inline!important}}@media (max-width:767px){.visible-xs-inline-block{display:inline-block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block!important}table.visible-sm{display:table!important}tr.visible-sm{display:table-row!important}td.visible-sm,th.visible-sm{display:table-cell!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-block{display:block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline{display:inline!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline-block{display:inline-block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block!important}table.visible-md{display:table!important}tr.visible-md{display:table-row!important}td.visible-md,th.visible-md{display:table-cell!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-block{display:block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline{display:inline!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline-block{display:inline-block!important}}@media (min-width:1200px){.visible-lg{display:block!important}table.visible-lg{display:table!important}tr.visible-lg{display:table-row!important}td.visible-lg,th.visible-lg{display:table-cell!important}}@media (min-width:1200px){.visible-lg-block{display:block!important}}@media (min-width:1200px){.visible-lg-inline{display:inline!important}}@media (min-width:1200px){.visible-lg-inline-block{display:inline-block!important}}@media (max-width:767px){.hidden-xs{display:none!important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none!important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none!important}}@media (min-width:1200px){.hidden-lg{display:none!important}}.visible-print{display:none!important}@media print{.visible-print{display:block!important}table.visible-print{display:table!important}tr.visible-print{display:table-row!important}td.visible-print,th.visible-print{display:table-cell!important}}.visible-print-block{display:none!important}@media print{.visible-print-block{display:block!important}}.visible-print-inline{display:none!important}@media print{.visible-print-inline{display:inline!important}}.visible-print-inline-block{display:none!important}@media print{.visible-print-inline-block{display:inline-block!important}}@media print{.hidden-print{display:none!important}} -/*# sourceMappingURL=bootstrap.min.css.map */ \ No newline at end of file diff --git a/tutorials/lite/source_zh_cn/_static/css/lite.css b/tutorials/lite/source_zh_cn/_static/css/lite.css deleted file mode 100644 index eabdfd9b2349b8ca46e6c661eaacfdfd43f1276a..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/_static/css/lite.css +++ /dev/null @@ -1,117 +0,0 @@ -.doc-filter-btn { - border: 1px solid #BFBFBF; - margin-right: 0.5rem; - font-size: 0.7rem; - color: #444444; - background-color: white; - width: 5.0rem; - height: 1.7rem; - text-align: left; - position: relative; - -} -.doc-stage-detail button{ - margin-bottom: 0.5rem; -} -button.doc-btn{ - background-color: transparent; - outline: none; -} -.doc-btn-color{ - border: 1px solid #379BE6; - color: #379BE6; -} -.doc-btn-hover{ - border: 1px solid #379BE6; - color: #379BE6; -} -.doc-article-list{ - margin-top: 1.1rem; -} -.doc-article-item{ - padding:2.5rem 2.5rem; - margin-bottom: 1.3rem; - border:1px solid #e5e5e5; - border-radius:0.5rem; - width: 1140px; - box-shadow: 0 0 30px 2px rgba(199,196,196,0.50) -} -.doc-article-item a{ - display:block; - text-decoration:none!important; -} -.doc-article-head{ - color: #444444; - font-size:0.9rem; - font-weight:bold; - margin-bottom:0.8rem; - text-align:left; -} -.doc-article-desc{ - font-size:0.7rem; - color:#444444; -} -.doc-footer nav ul li a{ - font-size: 0.7rem; -} -.doc-footer nav ul li span{ - font-size: 0.7rem; -} -.doc-title{ - font-size: 1.6rem; - color: #444444; - font-weight: bold; - margin-bottom: 2.2rem; -} -.doc-filter{ - font-size: 0.7rem; - color: #666666; -} -.doc-delete{ - font-size: 0.7rem; - color: #379BE6; - float: right; -} -.doc-condition{ - margin-bottom: 2rem; -} -.doc-label-choice{ - font-size: 0.7rem; - margin-bottom: 0.53rem; -} -.doc-os{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-hardware{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-user{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-stage{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-language{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-label-content{ - margin-bottom: 2.2rem; -} -div.col-sm-10{ - padding-left: 3.0rem; -} -.container{ - margin-top: 1rem; - margin-left: -15px; -} -#all{ - border: none; - background-color: transparent; - outline: none; -} - diff --git a/tutorials/lite/source_zh_cn/_static/img/choice.png b/tutorials/lite/source_zh_cn/_static/img/choice.png deleted file mode 100644 index 5fb06488a24489616b937778c06af9e8d409046b..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/_static/img/choice.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/_static/js/lite.js b/tutorials/lite/source_zh_cn/_static/js/lite.js deleted file mode 100644 index c152bb3502cf4c9c76da44160657670e41cf05a4..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/_static/js/lite.js +++ /dev/null @@ -1,253 +0,0 @@ -$(function() { - $("button.doc-btn").hover(function(){ - - //移入事件 - $(this).addClass('doc-btn-hover') - },function(){ - //移出事件 - $(this).removeClass('doc-btn-hover'); - }) - // 每页显示数 - var curNum = 8 - // 计算总数 - var all = $('.doc-article-list').children('div.doc-article-item').length; - - var list = [] - - - - $('button.doc-btn').click(function() { - $('.doc-article-item').removeClass('OUO'); - var id_val = $(this).attr('id') - if (id_val !== 'all') { - if ($('#all').hasClass('doc-btn-color')) { - $('#all').removeClass('doc-btn-color').find('img').remove(); - list.splice(list.indexOf('all_exist'), 1); - $('.doc-article-item').removeClass('all_exist'); - } - } else { - $('button.doc-btn-color').each(function() { - var tag = $(this).attr('id'); - $('.' + tag).removeClass(tag + '_exist'); - list.splice(list.indexOf(tag + '_exist'), 1); - }); - - $('button.doc-btn-color').removeClass('doc-btn-color').find('img').remove(); - } - if ($(this).hasClass('doc-btn-color')) { - $(this).removeClass('doc-btn-color').find('img').remove(); - $('.' + id_val).removeClass(id_val + '_exist'); - list.splice(list.indexOf(id_val + '_exist'), 1); - - } else { - if(id_val == 'all'){ - $(this).addClass('doc-btn-color'); - $('.' + id_val).addClass(id_val + '_exist'); - list.push(id_val + '_exist'); - }else{ - $(this).addClass('doc-btn-color').append(''); - $('.' + id_val).addClass(id_val + '_exist'); - list.push(id_val + '_exist'); - } - - } - - if(list.length > 0){ - var os_list = []; - var hardware_list = []; - var user_list = []; - var stage_list = []; - var language_list = []; - var all_list = []; - - $('.doc-article-item').addClass('hidden'); - var str = 'OUO'; - for(var i=0;i 0){ - for(var i=0;i -1){ - os_count += 1; - } - } - }else{ - os_count = 'empty'; - } - - if(hardware_list.length > 0){ - for(var i=0;i -1){ - hardware_count += 1; - } - } - }else{ - hardware_count = 'empty'; - } - - if(user_list.length > 0){ - for(var i=0;i -1){ - user_count += 1; - } - } - }else{ - user_count = 'empty'; - } - - if(stage_list.length > 0){ - for(var i=0;i -1){ - stage_count += 1; - } - } - }else{ - stage_count = 'empty'; - } - - if(language_list.length > 0){ - for(var i=0;i -1){ - language_count += 1; - } - } - }else{ - language_count = 'empty'; - } - - if(all_list.length > 0){ - for(var i=0;i -1){ - all_count += 1; - } - } - }else{ - all_count = 'empty'; - } - - - if(((os_count >0 && os_count <= os_list.length) || os_count=='empty') && ((hardware_count >0 && hardware_count <= hardware_list.length) || hardware_count=='empty') && ((user_count >0 && user_count <= user_list.length) || user_count == 'empty') && ((stage_count >0 && stage_count <= stage_list.length) || stage_count == 'empty') && ((language_count >0 && language_count <= language_list.length) || language_count=='empty')){ - $(this).removeClass('hidden').addClass(str); - } - }); - - }else{ - $('.doc-article-item').addClass('hidden'); - } - - var hidden_num = $('.doc-article-list').children('.doc-article-item.hidden').length; - var all_article = all - hidden_num - // 计算总页数 - var len = Math.ceil((all - hidden_num) / curNum); - // 生成页码 - var pageList = '
  • ' + '共' + all_article + '条' + '
  • ' + '
  • '; - // 当前的索引值 - var iNum = 0; - - for (var i = 0; i < len; i++) { - pageList += '
  • ' + (i + 1) + '
  • ' - } - pageList += '
  • ' - // 首页加亮显示 - if (all_article > 0){ - $('#pageNav').html(pageList).find('li').eq(2).addClass('active'); - }else{ - $('#pageNav').html('
  • ' + '共' + all_article + '条' + '
  • '); - } - - // 标签页的点击事件 - $('#pageNav').find('li.doc-data').each(function() { - $(this).click(function() { - $(this).addClass('active').siblings('li').removeClass('active'); - iNum = $(this).index() - 2; - if(iNum > 0){ - $('li.pre').removeClass('disabled'); - }else{ - $('li.pre').addClass('disabled'); - } - if(iNum+1 == len){ - $('li.nex').addClass('disabled'); - } - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = (iNum * curNum); i < (iNum + 1) * curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show() - } - - }); - }); - if(iNum == 0){ - $('li.pre').addClass('disabled'); - } - - if(iNum+1 == len){ - $('li.nex').addClass('disabled'); - } - // 向前页点击时间 - $('li.pre').click(function(){ - if(iNum > 0){ - iNum -= 1; - if(iNum == 0){ - $(this).addClass('disabled'); - } - $('li.nex').removeClass('disabled'); - $('#pageNav').find('li.doc-data').eq(iNum).addClass('active').siblings('li').removeClass('active'); - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = (iNum * curNum); i < (iNum + 1) * curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show() - } - } - - }); - - // 向后页点击事件 - $('li.nex').click(function(){ - if(iNum+1 < len){ - iNum += 1; - if(iNum+1 == len){ - $(this).addClass('disabled'); - } - $('li.pre').removeClass('disabled'); - $('#pageNav').find('li.doc-data').eq(iNum).addClass('active').siblings('li').removeClass('active'); - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = (iNum * curNum); i < (iNum + 1) * curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show() - } - } - }); - - // 首页的显示 - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = 0; i < curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show(); - } - - if ($('button.doc-btn-color').length == 0) { - $('#all').trigger('click'); - } - }); - - - $('#all').trigger('click'); - - }); - diff --git a/tutorials/lite/source_zh_cn/_static/logo_notebook.png b/tutorials/lite/source_zh_cn/_static/logo_notebook.png deleted file mode 100644 index f28598315f19f4be76a73ddf5dc6bbdbe4db35fd..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/_static/logo_notebook.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/_static/logo_source.png b/tutorials/lite/source_zh_cn/_static/logo_source.png deleted file mode 100644 index 9932d67ab50871edb0c95979c4e948c812c7cdea..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/_static/logo_source.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/architecture_lite.md b/tutorials/lite/source_zh_cn/architecture_lite.md deleted file mode 100644 index 2b492c3f5a6a77cdfb5b75e6fd550e87f3eaed06..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/architecture_lite.md +++ /dev/null @@ -1,39 +0,0 @@ -# 总体架构 - -`Linux` `Windows` `端侧` `推理应用` `中级` `高级` `贡献者` - - - -MindSpore Lite是一款极速、极智、极简的AI引擎,使能全场景智能应用,为用户提供端到端的解决方案,帮助用户使能AI能力。 - -MindSpore Lite 分为离线模块和在线模块两个部分,其框架的总体架构如下所示: - -![architecture](./images/MindSpore-Lite-architecture.png) - -- 离线模块: - - - **3rd Model Parsers:** 将第三方模型转换为统一的MindIR,其中第三方模型包括TensorFlow、TensorFlow Lite、Caffe 1.0和ONNX模型。 - - - **MindIR:** MindSpore端云统一的IR。 - - - **Optimizer:** 基于IR进行图优化,如算子融合、常量折叠等。 - - - **Quantizer:** 训练后量化模块,支持权重量化、激活值量化等训练后量化手段。 - - - **benchmark:** 测试性能以及调试精度的工具集。 - - - **Micro CodeGen:** 针对IoT场景,将模型直接编译为可执行文件的工具。 - -- 在线模块: - - - **Training/Inference APIs:** 端云统一的C++/Java训练推理接口。 - - - **MindRT Lite:** 轻量化的在线运行时,支持异步执行。 - - - **MindData Lite:** 用于端侧数据处理。 - - - **Delegate:** 用于对接专业AI硬件引擎的代理。 - - - **Kernels:** 内置的高性能算子库,提供CPU、GPU和NPU算子。 - - - **Learning Strategies:** 端侧学习策略,如迁移学习。 diff --git a/tutorials/lite/source_zh_cn/conf.py b/tutorials/lite/source_zh_cn/conf.py deleted file mode 100644 index 88925d3d6f26b5216c2c406566a4abbb50a1fe18..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/conf.py +++ /dev/null @@ -1,68 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys - - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore Lite' -copyright = '2020, MindSpore Lite' -author = 'MindSpore Lite' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'recommonmark', - 'sphinx_markdown_tables', -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_search_language = 'zh' - -html_search_options = {'dict': '../../resource/jieba.txt'} - -html_static_path = ['_static'] - -def setup(app): - app.add_stylesheet('css/bootstrap.min.css') - app.add_stylesheet('css/lite.css') - app.add_javascript('js/lite.js') \ No newline at end of file diff --git a/tutorials/lite/source_zh_cn/faq.md b/tutorials/lite/source_zh_cn/faq.md deleted file mode 100644 index ec32e712ede9bad7e30518fb8c5aeb8a4284c3fd..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/faq.md +++ /dev/null @@ -1,45 +0,0 @@ -# FAQ - - - -
    - -**Q:MindSpore Lite支持的日志级别有几种?怎么设置日志级别?** - -A:目前支持DEBUG、INFO、WARNING、ERROR四种日志级别,用户可以通过设置环境变量GLOG_v为0~3选择打印的日志级别,0~3分别对应DEBUG、INFO、WARNING和ERROR,默认打印WARNING和ERROR级别的日志。例如设置GLOG_v为1即可打印INFO及以上级别的日志。 - -
    - -**Q:NPU推理存在什么限制?** - -A:目前NPU仅支持在系统ROM版本EMUI>=11、芯片支持包括Kirin 9000、Kirin 9000E、Kirin 990、Kirin 985、Kirin 820、Kirin 810等,具体约束和芯片支持请查看: - -
    - -**Q:为什么使用裁剪工具裁剪后的静态库在集成时存在编译失败情况?** - -A:目前裁剪工具仅支持CPU的库,即编译命令中指定了`-e CPU`,具体使用请查看[使用裁剪工具降低库文件大小](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/cropper_tool.html)文档。 - -
    - -**Q:MindSpore Lite推理是否会耗尽手机全部内存?** - -A:MindSpore Lite内置内存池有最大容量限制,为3GB,如果模型较大,超过最大容量限制,运行将会异常退出。 - -
    - -**Q:MindSpore Lite的离线模型MS文件如何进行可视化,看到网络结构?** - -A:模型可视化开源仓库`Netron`已经支持查看MindSpore Lite模型(MindSpore版本 >= r1.2),请到Netron官网下载安装包[Netron](https://github.com/lutzroeder/netron)。 - -
    - -**Q:MindSpore有量化推理工具么?** - -A:[MindSpore Lite](https://www.mindspore.cn/lite)支持云侧量化感知训练的量化模型的推理,MindSpore Lite converter工具提供训练后量化以及权重量化功能,且功能在持续加强完善中。 - -
    - -**Q:MindSpore有轻量的端侧推理引擎么?** - -A:MindSpore轻量化推理框架MindSpore Lite已于r0.7版本正式上线,欢迎试用并提出宝贵意见,概述、教程和文档等请参考[MindSpore Lite](https://www.mindspore.cn/lite) diff --git a/tutorials/lite/source_zh_cn/image_classification_lite.md b/tutorials/lite/source_zh_cn/image_classification_lite.md deleted file mode 100644 index dae9812fc6742df7536b3ef957fd5aafeaa9d343..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/image_classification_lite.md +++ /dev/null @@ -1,40 +0,0 @@ -# 图像分类模型 - - - -## 图像分类介绍 - -图像分类模型可以预测图片中出现哪些物体,识别出图片中出现物体列表及其概率。 比如下图经过模型推理的分类结果为下表: - -![image_classification](images/image_classification_result.png) - -| 类别 | 概率 | -| ---------- | ------ | -| plant | 0.9359 | -| flower | 0.8641 | -| tree | 0.8584 | -| houseplant | 0.7867 | - -使用MindSpore Lite实现图像分类的[示例代码](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/image_classification)。 - -## 图像分类模型列表 - -下表是使用MindSpore Lite推理的部分图像分类模型的数据。 - -> 下表的性能是在mate30手机上测试的。 - -| 模型名称 | 大小(Mb) | Top1 | Top5 | F1 | CPU 4线程时延(ms) | -|-----------------------| :----------: | :----------: | :----------: | :----------: | :-----------: | -| [MobileNetV2](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms) | 11.5 | - | - | 65.5% | 14.595 | -| [Inceptionv3](https://download.mindspore.cn/model_zoo/official/lite/inceptionv3_lite/inceptionv3.ms) | 90.9 | 78.62% | 94.08% | - | 92.086 | -| [Shufflenetv2](https://download.mindspore.cn/model_zoo/official/lite/shufflenetv2_lite/shufflenetv2.ms) | 8.8 | 67.74% | 87.62% | - | 8.303 | -| [GoogleNet](https://download.mindspore.cn/model_zoo/official/lite/googlenet_lite/googlenet.ms) | 25.3 | 72.2% | 90.06% | - | 23.257 | -| [ResNext50](https://download.mindspore.cn/model_zoo/official/lite/resnext50_lite/resnext50.ms) | 95.8 | 73.1% | 91.21% | - | 138.164 | -| [GhostNet](https://download.mindspore.cn/model_zoo/official/lite/ghostnet_lite/ghostnet.ms) | 15.0 | 73.9% | 91.40% | - | 9.959 | -| [GhostNet600](https://download.mindspore.cn/model_zoo/official/lite/ghostnet_lite/ghostnet600.ms) | 40.4 | 80.2% | 94.90% | - | 52.243 | -| [GhostNet_int8](https://download.mindspore.cn/model_zoo/official/lite/ghostnet_lite/ghostnet_int8.ms) | 15.3 | 73.6% | - | - | 31.452 | -| [VGG-Small-low_bit](https://download.mindspore.cn/model_zoo/official/lite/low_bit_quant/low_bit_quant_bs_1.ms) | 17.8 | 93.7% | - | - | 9.082 | -| [ResNet50-0.65x](https://download.mindspore.cn/model_zoo/official/lite/adversarial_pruning_lite/adversarial_pruning.ms) | 48.6 | 80.2% | - | - | 89.816 | -| [plain-CNN-ResNet18](https://download.mindspore.cn/model_zoo/official/lite/residual_distill_lite/residual_distill_res18_cifar10_bs_1_update.ms) | 97.3 | 95.4% | - | - | 63.227 | -| [plain-CNN-ResNet34](https://download.mindspore.cn/model_zoo/official/lite/residual_distill_lite/residual_distill_res34_cifar10_bs_1_update.ms) | 80.5 | 95.0% | - | - | 20.652 | -| [plain-CNN-ResNet50](https://download.mindspore.cn/model_zoo/official/lite/residual_distill_lite/residual_distill_res50_cifar10_bs_1_update.ms) | 89.6 | 94.5% | - | - | 24.561 | diff --git a/tutorials/lite/source_zh_cn/image_segmentation_lite.md b/tutorials/lite/source_zh_cn/image_segmentation_lite.md deleted file mode 100644 index f04b0acc967f1bf68b7e1c19588217bd43588619..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/image_segmentation_lite.md +++ /dev/null @@ -1,19 +0,0 @@ -# 图像分割模型 - - - -## 图像分割介绍 - -图像分割是用于检测目标在图片中的位置或者图片中某一像素是输入何种对象的。 - -使用MindSpore Lite实现图像分割的[示例代码](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/image_segmentation)。 - -## 图像分割模型列表 - -下表是使用MindSpore Lite推理的部分图像分割模型的数据。 - -> 下表的性能是在mate30手机上测试的。 - -| 模型名称 | 大小(Mb) | IoU | CPU 4线程时延(ms) | -|-----------------------| :------: | :-------: | :------: | -| [Deeplabv3](https://download.mindspore.cn/model_zoo/official/lite/deeplabv3_lite/deeplabv3.ms) | 18.7 | 0.58 | 120 | diff --git a/tutorials/lite/source_zh_cn/images/MindSpore-Lite-architecture.png b/tutorials/lite/source_zh_cn/images/MindSpore-Lite-architecture.png deleted file mode 100644 index 0b8afedb9e3a62d723faa916520c35171c0482fc..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/MindSpore-Lite-architecture.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/after_transfer.png b/tutorials/lite/source_zh_cn/images/after_transfer.png deleted file mode 100644 index cb066922a36214a940741f4c2bca96ec35ec7d19..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/after_transfer.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/before_transfer.png b/tutorials/lite/source_zh_cn/images/before_transfer.png deleted file mode 100644 index ba2fe024d6382a1bad7b0f6cc4f2623e4815c2cf..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/before_transfer.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/classification_apk.png b/tutorials/lite/source_zh_cn/images/classification_apk.png deleted file mode 100644 index 30e78acf6566c3747c53c420fdc9ae95f30a93c9..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/classification_apk.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/image_classification_result.png b/tutorials/lite/source_zh_cn/images/image_classification_result.png deleted file mode 100644 index a7cc49f582440e31b6b5b14dbba5131bfed2a4b4..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/image_classification_result.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/lite_codegen.png b/tutorials/lite/source_zh_cn/images/lite_codegen.png deleted file mode 100644 index 14283ccde9b250b0c0eb981643f9215f1202cfe1..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/lite_codegen.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/lite_quick_start_app_result.png b/tutorials/lite/source_zh_cn/images/lite_quick_start_app_result.png deleted file mode 100644 index a7cc49f582440e31b6b5b14dbba5131bfed2a4b4..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/lite_quick_start_app_result.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/lite_quick_start_home.png b/tutorials/lite/source_zh_cn/images/lite_quick_start_home.png deleted file mode 100644 index c48cf581b33afbc15dbf27be495215b999e1be60..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/lite_quick_start_home.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/lite_quick_start_install.png b/tutorials/lite/source_zh_cn/images/lite_quick_start_install.png deleted file mode 100644 index 37391feec840a6131c10f4ff97a77fad9e494d21..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/lite_quick_start_install.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/lite_quick_start_project_structure.png b/tutorials/lite/source_zh_cn/images/lite_quick_start_project_structure.png deleted file mode 100644 index ade37a61ef97a479401240215e302011c014824c..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/lite_quick_start_project_structure.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/lite_quick_start_run_app.PNG b/tutorials/lite/source_zh_cn/images/lite_quick_start_run_app.PNG deleted file mode 100644 index 2557b6293de5b3d7fefe7f6e58b57c03deabb55d..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/lite_quick_start_run_app.PNG and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/lite_quick_start_sdk.png b/tutorials/lite/source_zh_cn/images/lite_quick_start_sdk.png deleted file mode 100644 index 1fcb8acabc9ba9d289efbe7e82ee5e2da8bfe073..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/lite_quick_start_sdk.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/lite_runtime.png b/tutorials/lite/source_zh_cn/images/lite_runtime.png deleted file mode 100644 index ee696c569d51be4f72c82d749014d944b4edfd3d..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/lite_runtime.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/lite_segmentation_quick_start_install.png b/tutorials/lite/source_zh_cn/images/lite_segmentation_quick_start_install.png deleted file mode 100644 index c08c2551449b2ce0503a8c05f9973a1c97dcb632..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/lite_segmentation_quick_start_install.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/object_detection.png b/tutorials/lite/source_zh_cn/images/object_detection.png deleted file mode 100644 index ad5425c86393a9367701166796df42c9e4702988..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/object_detection.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/posenet_detection.png b/tutorials/lite/source_zh_cn/images/posenet_detection.png deleted file mode 100644 index db253e597caa3c8c825b466ef2bc0ce7893d1411..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/posenet_detection.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/segmentation1.png b/tutorials/lite/source_zh_cn/images/segmentation1.png deleted file mode 100644 index 85456fdecf884d17707a5440676299c6a2af10ca..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/segmentation1.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/segmentation2.png b/tutorials/lite/source_zh_cn/images/segmentation2.png deleted file mode 100644 index e872283bf0adf4aa13d100078630ba7e1f4f1bb1..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/segmentation2.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/segmentation3.png b/tutorials/lite/source_zh_cn/images/segmentation3.png deleted file mode 100644 index d5eeb242a4833dfd46614faa7ee0e70930923769..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/segmentation3.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/segmentation4.png b/tutorials/lite/source_zh_cn/images/segmentation4.png deleted file mode 100644 index 8b6559cf1ea92c121c306523354f8674ed30f08f..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/segmentation4.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/segmentation5.png b/tutorials/lite/source_zh_cn/images/segmentation5.png deleted file mode 100644 index bf733f051d49e0d0f6c7a7169ca012a3bc738df9..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/segmentation5.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/segmentation6.png b/tutorials/lite/source_zh_cn/images/segmentation6.png deleted file mode 100644 index 0954855aa38370cdf10e3685b70fc8da667c8dea..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/segmentation6.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/segmentation7.png b/tutorials/lite/source_zh_cn/images/segmentation7.png deleted file mode 100644 index 872e9d7f125d101cd1bbcfc022401281ebfc574f..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/segmentation7.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/segmentation_apk.png b/tutorials/lite/source_zh_cn/images/segmentation_apk.png deleted file mode 100644 index 0d8c4daad80a4bf30854e85825195cebf022a61a..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/segmentation_apk.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/train_sequence.png b/tutorials/lite/source_zh_cn/images/train_sequence.png deleted file mode 100644 index d403ce000b655a425f6b61d95e15067ed467385a..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/train_sequence.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/images/visual_mnist.png b/tutorials/lite/source_zh_cn/images/visual_mnist.png deleted file mode 100644 index b37a36dff99f2157b5a16c49e8715ef1d2636625..0000000000000000000000000000000000000000 Binary files a/tutorials/lite/source_zh_cn/images/visual_mnist.png and /dev/null differ diff --git a/tutorials/lite/source_zh_cn/index.rst b/tutorials/lite/source_zh_cn/index.rst deleted file mode 100644 index cd2f5f95a3523fb9423810b6a978f35cd870d734..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/index.rst +++ /dev/null @@ -1,445 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Aug 17 09:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -在手机或IoT设备上使用MindSpore -================================= - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 快速入门 - :hidden: - - quick_start/quick_start_cpp - quick_start/quick_start_java - quick_start/quick_start - quick_start/image_segmentation - quick_start/train_lenet - quick_start/train_lenet_java - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 获取MindSpore Lite - :hidden: - - use/downloads - use/build - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 端侧推理 - :hidden: - - use/converter_tool - use/post_training_quantization - use/data_preprocessing - use/runtime - use/micro - use/asic - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 端侧训练 - :hidden: - - use/converter_train - use/runtime_train - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 其他工具 - :hidden: - - use/benchmark - use/cropper_tool - use/visual_tool - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 参考文档 - :hidden: - - architecture_lite - operator_list_lite - operator_list_codegen - model_lite - C++ API - Java API - faq - -.. raw:: html - -
    -
    -
    -
    - - -
    - 筛选条件 - -
    - -
    -
    -
    -
    -
    环境
    -
    -
    - - - - - -
    -
    - -
    - -
    -
    -
    -
    用户
    -
    -
    - - - -
    -
    -
    - -
    -
    -
    -
    阶段
    -
    -
    - - - - - - - - - - - - - - -
    -
    -
    - -
    -
    -
    -
    专用芯片
    -
    -
    - -
    -
    -
    -
    -
    -
    -
    编程语言
    -
    -
    - - -
    -
    -
    - -
    -
    - - - -
    - - - - - - - - - - - - - - - - - - - - - -
    - -
    -
    - 可视化工具 -
    -
    - Netron是一个基于Electron平台开发的神经网络模型可视化工具,支持MindSpore Lite模型,可以方便地查看模型信息。 -
    -
    -
    -
    -
    - -
    - -
    -
    -
    - diff --git a/tutorials/lite/source_zh_cn/model_lite.rst b/tutorials/lite/source_zh_cn/model_lite.rst deleted file mode 100644 index 98053efb0b09a6258009c46b7a18e0d3cc891469..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/model_lite.rst +++ /dev/null @@ -1,12 +0,0 @@ -模型支持 -=================== - -.. toctree:: - :maxdepth: 1 - - image_classification_lite - object_detection_lite - posenet_lite - image_segmentation_lite - style_transfer_lite - scene_detection_lite \ No newline at end of file diff --git a/tutorials/lite/source_zh_cn/object_detection_lite.md b/tutorials/lite/source_zh_cn/object_detection_lite.md deleted file mode 100644 index 71a7a223e19046f4b7f213a3991f55135e5a97d3..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/object_detection_lite.md +++ /dev/null @@ -1,26 +0,0 @@ -# 目标检测模型 - - - -## 目标检测介绍 - -目标检测可以识别出图片中的对象和该对象在图片中的位置。 如:对下图使用目标检测模型的输出如下表所示,使用矩形框识别图中目标对象的位置并且标注出目标对象类别的概率,其中坐标中的4个数字分别为Xmin,Ymin,,Xmax,,Ymax;概率表示反应被检测物理的可信程度。 - -![image_classification](images/object_detection.png) - -| 类别 | 概率 | 坐标 | -| ----- | ---- | ---------------- | -| mouse | 0.78 | [10, 25, 35, 43] | - -使用MindSpore Lite实现目标检测的[示例代码](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/object_detection)。 - -## 目标检测模型列表 - -下表是使用MindSpore Lite推理的部分目标检测模型的数据。 - -> 下表的性能是在mate30手机上测试的。 - -| 模型名称 | 大小(Mb) | mAP(IoU=0.50:0.95) | CPU 4线程时延(ms) | -|-----------------------| :----------: | :----------: | :-----------: | -| [MobileNetv2-SSD](https://download.mindspore.cn/model_zoo/official/lite/ssd_mobilenetv2_lite/ssd.ms) | 16.7 | 0.22 | 25.4 | -| [GhostNet-SSD](https://download.mindspore.cn/model_zoo/official/lite/ssd_ghostnet_lite/ssd.ms) | 25.7 | 0.24 | 24.1 | diff --git a/tutorials/lite/source_zh_cn/operator_list_codegen.md b/tutorials/lite/source_zh_cn/operator_list_codegen.md deleted file mode 100644 index b6ae6ba937fcb5d59bc39629c1e12b395a428a62..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/operator_list_codegen.md +++ /dev/null @@ -1,65 +0,0 @@ -# Codegen算子支持 - -`Linux` `Ascend` `端侧` `推理应用` `初级` `中级` `高级` - - - -本文列举MindSpore Lite Codegen支持的算子。 - -| 操作名
      | CPU
    FP32 | CPU
    Int8 | CMSIS
    Int8 | 支持的TensorFlow Lite算子 | 支持的Caffe Lite算子 | 支持的Onnx Lite算子 |支持的TensorFlow算子 | -|-----------------------|:--------------:|:--------------:|:-----------------:|---------------------------------|--------------------------|-------------------------------------------------|-------------------------------------------------| -| Abs | ✅ | | | Abs | | Abs | | -| Add | ✅ | ✅ | ✅ | Add | | Add,
    Int8Add | Add, AddV2 | -| AddN | ✅ | | | AddN | | | | -| AvgPool | ✅ | ✅ | ✅ | MeanPooling | Pooling | AveragePool,
    GlobalAveragePool,
    Int8AveragePool | | -| BatchNorm | | ✅ | ✅ | | BatchNorm | BatchNormalization | | -| BiasAdd | ✅ | | | | | BiasAdd | BiasAdd | -| Cast | ✅ | ✅ | ✅ | Cast, QUANTIZE,
    DEQUANTIZE | | Cast | Cast | -| Ceil | ✅ | | | Ceil | | Ceil | | -| Concat | ✅ | ✅ | ✅ | Concat | Concat | Concat | ConcatV2 | -| Conv2d | ✅ | ✅ | ✅ | Conv2D | Convolution | Conv, Int8Conv,
    ConvRelu,
    Int8ConvRelu | Conv2D | -| Cos | ✅ | | | Cos | | Cos | | -| DetectionPostProcess | | ✅ | | Custom | | | | -| Div | ✅ | ✅ | ✅ | Div, RealDiv | | Div | Div, RealDiv | -| Eltwise | ✅ | | | | Eltwise | Sum, Max[3] | | -| Equal | ✅ | | | Equal | | Equal | Equal | -| ExpandDims | ✅ | | | ExpandDims | | | ExpandDims | -| Floor | ✅ | | | flOOR | | Floor | | -| FloorDiv | ✅ | | | FloorDiv | | | | -| FloorMod | ✅ | | | FloorMod | | | | -| FullConnection | ✅ | ✅ | ✅ | FullyConnected | InnerProduct | | | -| Greater | ✅ | | | Greater | | Greater | Greater | -| GreaterEqual | ✅ | | | GreaterEqual | | | GreaterEqual | -| Less | ✅ | | | Less | | Less | Less | -| LessEqual | ✅ | | | LessEqual | | | LessEqual | -| Log | ✅ | | | Log | | Log | | -| LogicalAnd | ✅ | | | LogicalAnd | | And | LogicalAnd | -| LogicalNot | ✅ | | | LogicalNot | | Not | | -| LogicalOr | ✅ | | | LogicalOr | | Or | | -| MatMul | ✅ | ✅ | | | | MatMul | MatMul | -| Maximum | ✅ | | | Maximum | | | Maximum | -| MaxPool | ✅ | ✅ | ✅ | MaxPooling | Pooling | MaxPool,
    GlobalMaxPool | | -| Minimum | ✅ | | | Minimum | | Min | Minimum | -| Mul | ✅ | ✅ | ✅ | Mul | | Mul | Mul | -| Neg | ✅ | | | Neg | | Neg | | -| NotEqual | ✅ | | | NotEqual | | |NotEqual | -| ReLU | ✅ | ✅ | ✅ | Relu | ReLU | Relu | Relu | -| ReLU6 | ✅ | ✅ | ✅ | Relu6 | ReLU6 | Clip[1] | Relu6 | -| Reshape | ✅ | ✅ | ✅ | Reshape | Reshape | Reshape,Flatten | Reshape | -| Resize | | ✅ | | ResizeBilinear,
    NearestNeighbor | Interp | | | -| Round | ✅ | | | Round | | Round | Round | -| Rsqrt | ✅ | | | Rsqrt | | | | -| Sigmoid | ✅ | ✅ | ✅ | Logistic | Sigmoid | Sigmoid | Sigmoid | -| Sin | ✅ | | | Sin | | Sin | | -| Softmax | ✅ | ✅ | ✅ | Softmax | Softmax | Softmax | | -| Sqrt | ✅ | | | Sqrt | | Sqrt | | -| Square | ✅ | | | Square | | | | -| SquaredDifference | ✅ | | | SquaredDifference | | | | -| Squeeze | ✅ | | | Squeeze | | Squeeze | Squeeze | -| Sub | ✅ | ✅ | ✅ | Sub | | Sub | Sub | - -[1] Clip:仅支持将clip(0, 6)转换为Relu6。 - -[2] Pow:仅支持指数为单个常数。 - -[3] Sum与Max:仅支持输入个数为2。 diff --git a/tutorials/lite/source_zh_cn/operator_list_lite.md b/tutorials/lite/source_zh_cn/operator_list_lite.md deleted file mode 100644 index 02ffa9fc770d98dbb19d82306d033a62fa86a09e..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/operator_list_lite.md +++ /dev/null @@ -1,182 +0,0 @@ -# Lite算子支持 - -`Linux` `Ascend` `端侧` `推理应用` `初级` `中级` `高级` - - - -本文列举MindSpore Lite支持的算子。 - -| 操作名
      | CPU
    FP16 | CPU
    FP32 | CPU
    Int8 | CPU
    UInt8 | GPU
    FP16 | GPU
    FP32 | NPU
      | 支持的TensorFlow Lite算子 | 支持的Caffe Lite算子 | 支持的Onnx Lite算子 |支持的TensorFlow算子 | -| --------------------- | :------------: | :------------: | :------------: | :-------------: | :------------: | :------------: | :---------: | ------------------------------- | ------------------------ | ----------------------------------------------- | ----------------------------------------------- | -| Abs | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Abs | | Abs | Abs | -| Add | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Add | | Add, Int8Add | Add, AddV2 | -| AddGrad | | ✅ | | | | | | | | | | -| AddN | | ✅ | | | | | | AddN | | | | -| Assert | | ✅ | | | | | | | | | Assert | -| Argmax | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Argmax | ArgMax | ArgMax | Argmax | -| Argmin | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | Argmin | | | ArgMin | -| AvgPool | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | MeanPooling | Pooling | AveragePool,
    GlobalAveragePool,
    Int8AveragePool | AvgPool | -| AvgPoolGrad | | ✅ | | | | | | | | | | -| BatchNorm | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | | BatchNorm | BatchNormalization | | -| BatchNormGrad | | ✅ | | | | | | | | | | -| BatchToSpace | | ✅ | ✅ | ✅ | ✅ | ✅ | | BatchToSpace,
    BatchToSpaceND | | | BatchToSpace,
    BatchToSpaceND | -| BiasAdd | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | | | BiasAdd | BiasAdd | -| BiasAddGrad | | ✅ | | | | | | | | | | -| Broadcast | | ✅ | | | | | | BroadcastTo | | Expand | | -| Cast | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Cast, QUANTIZE,
    DEQUANTIZE | | Cast | Cast | -| Ceil | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Ceil | | Ceil | Ceil | -| Concat | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Concat | Concat | Concat | ConcatV2 | -| ConstantOfShape | | ✅ | | | | | | | | ConstantOfShape | | -| Conv2d | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Conv2D | Convolution | Conv, Int8Conv,
    ConvRelu,
    Int8ConvRelu | Conv2D | -| Conv2dGrad | | ✅ | | | | | | | | | | -| Conv2dTranspose | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | DeConv2D | Deconvolution | ConvTranspose | Conv2DBackpropInput | -| Conv2dTransposeGrad | | ✅ | | | | | | | | | | -| Cos | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Cos | | Cos | Cos | -| Crop | ✅ | ✅ | ✅ | ✅ | | | | | Crop | | | -| CropAndResize | | ✅ | | | | | ✅ | | | | CropAndResize | -| CumSum | | ✅ | | | | | | | | | Cumsum | -| CustomExtractFeatures | | ✅ | | | | | | ExtractFeatures | | | | -| CustomNormalize | | ✅ | | | | | | Normalize | | | | -| CustomPredict | | ✅ | | | | | | Predict | | | | -| DeDepthwiseConv2D | | ✅ | ✅ | ✅ | | | | | Deconvolution | | | -| DepthToSpace | | ✅ | ✅ | ✅ | ✅ | ✅ | | DepthToSpace | | DepthToSpace | | -| DepthwiseConv2dNative | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | DepthwiseConv2D | Convolution | | DepthwiseConv2dNative | -| DetectionPostProcess | | ✅ | ✅ | ✅ | | | | Custom | | | | -| Div | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Div, RealDiv | | Div | Div, RealDiv | -| DivGrad | | ✅ | | | | | | | | | | -| Eltwise | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | Eltwise | Sum, Max[3] | | -| Elu | | ✅ | | | | | | | Elu | Elu,
    NonMaxSuppression | NonMaxSuppressionV3 | -| EluGrad | | ✅ | | | | | | | | | | -| Equal | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Equal | | Equal | Equal | -| Exp | | ✅ | | | ✅ | ✅ | | Exp | Exp | Exp | Exp | -| ExpandDims | ✅ | ✅ | ✅ | ✅ | | | ✅ | ExpandDims | | | ExpandDims | -| Fill | | ✅ | | | | | | Fill | | | Fill | -| Flatten | ✅ | ✅ | | | | | | | Flatten | | | -| Floor | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | flOOR | | Floor | Floor | -| FloorDiv | ✅ | ✅ | | | ✅ | ✅ | ✅ | FloorDiv | | | FloorDiv | -| FloorMod | ✅ | ✅ | | | ✅ | ✅ | ✅ | FloorMod | | | FloorMod | -| FullConnection | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | FullyConnected | InnerProduct | | | -| FusedBatchNorm | ✅ | ✅ | ✅ | ✅ | | | ✅ | FusedBatchNorm | | | FusedBatchNorm,
    FusedBatchNormV3 | -| GatherNd | | ✅ | ✅ | ✅ | ✅ | ✅ | | GatherND | | | GatherNd | -| Gather | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Gather | | Gather | GatherV2 | -| Greater | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Greater | | Greater | Greater | -| GreaterEqual | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | GreaterEqual | | | GreaterEqual | -| GRU | ✅ | ✅ | | | | | | | | | | -| HardTanh | ✅ | ✅ | | | | | | | | | | -| HashtableLookup | | ✅ | | | | | | HashtableLookup | | | | -| HSigmoid | | ✅ | | ✅ | | | | | | | | -| Hswish | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | HardSwish | | | | -| HswishGrad | | ✅ | | | | | | | | | | -| InstanceNorm | ✅ | ✅ | | | | | | InstanceNorm | | | | -| InvertPermutation | | ✅ | | | | | | | | | InvertPermutation | -| L2Norm | | ✅ | ✅ | | | | | L2_NORMALIZATION | | | | -| LayerNorm | | ✅ | ✅ | | | | | | | | | -| LeakyReLU | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | LeakyRelu | | LeakyRelu | LeakyRelu | -| LeakyReLUGrad | | ✅ | | | | | | | | | | -| Less | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Less | | Less | Less | -| LessEqual | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | LessEqual | | | LessEqual | -| LRN | | ✅ | | | | | | LocalResponseNorm | | Lrn, LRN | | -| Log | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Log | | Log | Log | -| LogGrad | ✅ | ✅ | | | | | | | | | | -| LogicalAnd | ✅ | ✅ | | | ✅ | ✅ | ✅ | LogicalAnd | | And | LogicalAnd | -| LogicalNot | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | LogicalNot | | Not | LogicalNot | -| LogicalOr | ✅ | ✅ | | | ✅ | ✅ | ✅ | LogicalOr | | Or | LogicalOr | -| LshProjection | | ✅ | | | | | | LshProjection | | | | -| LSTM | ✅ | ✅ | | | | | | | | LSTM | | -| MatMul | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | | MatMul | MatMul,
    BatchMatMul | -| MatMulGrad | | ✅ | | | | | | | | | | -| Maximum | ✅ | ✅ | | | ✅ | ✅ | ✅ | Maximum | | | Maximum | -| MaximumGrad | | ✅ | | | | | | | | | | -| MaxPool | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | MaxPooling | Pooling | MaxPool, GlobalMaxPool | MaxPool | -| MaxPoolGrad | | ✅ | | | | | | | | | | -| Merge | ✅ | ✅ | | | | | | | | | Merge | -| Minimum | ✅ | ✅ | | | ✅ | ✅ | ✅ | Minimum | | Min | Minimum | -| MinimumGrad | | ✅ | | | | | | | | | | -| Mul | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Mul | | Mul | Mul | -| MulGrad | | ✅ | | | | | | | | | | -| Neg | ✅ | ✅ | | | ✅ | ✅ | ✅ | Neg | | Neg | | -| NegGrad | | ✅ | | | | | | | | | | -| NotEqual | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | NotEqual | | | NotEqual | -| OneHot | | ✅ | | | ✅ | ✅ | | OneHot | | OneHot | OneHot | -| Pad | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Pad, MirrorPad | | Pad | MirrorPad, Pad | -| Pow | | ✅ | ✅ | ✅ | ✅ | ✅ | | Pow | Power | Pow[2] | Pow | -| PowGrad | | ✅ | | | | | | | | | | -| PReLU | | ✅ | | | ✅ | ✅ | | PRELU | PReLU | PRelu | | -| RandomStandardNormal | | ✅ | | | | | | | | | RandomStandardNormal | -| RandomUniform | | ✅ | | | | | | | | | RandomUniform | -| Range | | ✅ | | | | | | Range | | | Range,
    RaggedRange | -| Rank | | ✅ | | | | | | Rank | | | Rank | -| Reciprocal | ✅ | ✅ | ✅ | | | | ✅ | | | | | -| ReduceAll | | ✅ | | | | | | | | | All | -| ReduceASum | | ✅ | | | ✅ | ✅ | | | Reduction | | | -| ReduceMax | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | ReduceMax | | ReduceMax | Max | -| ReduceMean | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | Mean | Reduction | ReduceMean | Mean | -| ReduceMin | | ✅ | ✅ | ✅ | ✅ | ✅ | | ReduceMin | | ReduceMin | Min | -| ReduceProd | | ✅ | ✅ | ✅ | ✅ | ✅ | | ReduceProd | | ReduceProd | Prod | -| ReduceSum | | ✅ | ✅ | ✅ | ✅ | ✅ | | Sum | Reduction | ReduceSum | Sum | -| ReduceSumSquare | | ✅ | ✅ | ✅ | | | | | Reduction | ReduceSumSquare | | -| ReLU | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Relu | ReLU | Relu | Relu | -| ReLUGrad | ✅ | ✅ | | | | | | | | | | -| ReLU6 | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Relu6 | ReLU6 | Clip[1] | Relu6 | -| ReLU6Grad | ✅ | ✅ | | | | | | | | | | -| Reshape | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Reshape | Reshape | Reshape,Flatten | Reshape | -| Resize | | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ResizeBilinear,
    NearestNeighbor | Interp | | ResizeBilinear,
    ResizeBicubic,
    ResizeNearestNeighbor | -| ResizeGrad | | ✅ | | | | | | | | | | -| Reverse | | ✅ | | | | | | reverse | | | ReverseV2 | -| ReverseSequence | | ✅ | | | | | | ReverseSequence | | | ReverseSequence | -| Round | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Round | | Round | Round | -| Rsqrt | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Rsqrt | | | Rsqrt | -| Select | | ✅ | | | | | | | | | Select | -| Selu | | | | | | | | | | | Selu | -| Scale | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | Scale | | | -| ScatterNd | | ✅ | | | | | | ScatterNd | | | | -| Shape | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Shape | | Shape | Shape | -| Sigmoid | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Logistic | Sigmoid | Sigmoid | Sigmoid | -| SigmoidGrad | ✅ | ✅ | | | | | | | | | | -| Sin | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Sin | | Sin | Sin | -| Size | | ✅ | | | | | | | | | Size | -| Slice | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Slice | Slice | Slice | Slice | -| SkipGram | | ✅ | | | | | | SKipGram | | | | -| Softmax | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Softmax | Softmax | Softmax | Softmax | -| SoftmaxGrad | | ✅ | | | | | | | | | | -| Softplus | | ✅ | | | | | | | | | Softplus | -| SpaceToBatch | | ✅ | ✅ | ✅ | ✅ | ✅ | | SpaceToBatch | | | | -| SpaceToBatchND | | ✅ | ✅ | ✅ | ✅ | ✅ | | SpaceToBatchND | | | SpaceToBatchND | -| SpaceToDepth | | ✅ | | | ✅ | ✅ | | SpaceToDepth | | SpaceToDepth | | -| SparseToDense | | ✅ | | | ✅ | ✅ | | SpareToDense | | | | -| Split | ✅ | ✅ | ✅ | ✅ | | | ✅ | Split, SplitV | | Split | Split, SplitV | -| Sqrt | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Sqrt | | Sqrt | Sqrt | -| Square | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Square | | | Square | -| SquaredDifference | ✅ | ✅ | | | ✅ | ✅ | ✅ | SquaredDifference | | | SquaredDifference | -| Squeeze | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | Squeeze | | Squeeze | Squeeze | -| StridedSlice | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | StridedSlice | | | StridedSlice | -| Stack | ✅ | ✅ | | | ✅ | ✅ | | Stack | | | Pack | -| Sub | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Sub | | Sub | Sub | -| SubGrad | | ✅ | | | | | | | | | | -| Swish | ✅ | ✅ | | | | | | | | | | -| Switch | ✅ | ✅ | | | | | | | | | Switch | -| Tanh | ✅ | ✅ | | | ✅ | ✅ | ✅ | Tanh | TanH | Tanh, Sign | Tanh | -| TanhGrad | | ✅ | | | | | | | | | | -| TensorListFromTensor | ✅ | ✅ | | | | | | | | | TensorListFromTensor | -| TensorListGetItem | ✅ | ✅ | | | | | | | | | TensorListGetItem | -| TensorListReserve | ✅ | ✅ | | | | | | | | | TensorListReserve | -| TensorListSetItem | ✅ | ✅ | | | | | | | | | TensorListSetItem | -| TensorListStack | ✅ | ✅ | | | | | | | | | TensorListStack | -| Tile | ✅ | ✅ | | | | | ✅ | Tile | Tile | Tile | Tile | -| TopK | | ✅ | ✅ | ✅ | | | | TopKV2 | | TopK | TopKV2 | -| Transpose | ✅ | ✅ | ✅ | | ✅ | ✅ | ✅ | Transpose | Permute | Transpose | Transpose | -| UniformReal | | ✅ | | | | | | | | | | -| Unique | | ✅ | | | | | | Unique | | | | -| Unsqueeze | ✅ | ✅ | ✅ | ✅ | | | ✅ | | | Unsqueeze | | -| Unstack | | ✅ | | | | | | Unstack | | | | -| Where | | ✅ | | | | | | Where | | | Where | -| ZerosLike | | ✅ | | | | | | ZerosLike | | | ZerosLike | -| 转换工具支持的其他算子[4] | | | | | | | | | | Loop, Dropout, If | Dropout, Enter,
    Exit, If,
    IsFinite,
    LinSpace,
    LoopCond,
    NextIteration,
    StatelessIf,
    StatelessWhile,
    While | - -[1] Clip:仅支持将clip(0, 6)转换为Relu6。 - -[2] Pow:仅支持指数为单个常数。 - -[3] Sum与Max:仅支持输入个数为2。 - -[4] [转换工具](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html)支持,但不需要具体实现的算子,一般这类算子在转化工具中被优化而消失,如被融合掉或者使用其他算子代替。 diff --git a/tutorials/lite/source_zh_cn/posenet_lite.md b/tutorials/lite/source_zh_cn/posenet_lite.md deleted file mode 100644 index 31616b2a16f4d29ab8cb9e765f92706b46b975dd..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/posenet_lite.md +++ /dev/null @@ -1,15 +0,0 @@ -# 骨骼检测模型 - - - -## 骨骼检测介绍 - -骨骼检测可以识别摄像头中,不同姿势下人体的面部五官与肢体姿势。 - -使用骨骼检测模型的输出如图: - -蓝色标识点检测人体面部的五官分布及上肢、下肢的骨骼走势。此次推理置信分数0.98/1,推理时延66.77ms。 - -![image_posenet](images/posenet_detection.png) - -使用MindSpore Lite实现骨骼检测的[示例代码](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/posenet)。 diff --git a/tutorials/lite/source_zh_cn/quick_start/image_segmentation.md b/tutorials/lite/source_zh_cn/quick_start/image_segmentation.md deleted file mode 100644 index b23c1e0cc4ef9719cfa6ff0618987e6675a96120..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/quick_start/image_segmentation.md +++ /dev/null @@ -1,309 +0,0 @@ -# 基于Java接口的Android应用开发 - -`Android` `Java` `全流程` `模型转换` `模型加载` `推理应用` `数据准备` `初级` `中级` `高级` - - - -- [基于Java接口的Android应用开发](#基于java接口的android应用开发) - - [概述](#概述) - - [选择模型](#选择模型) - - [部署应用](#部署应用) - - [运行依赖](#运行依赖) - - [构建与运行](#构建与运行) - - [示例程序详细说明](#示例程序详细说明) - - [示例程序结构](#示例程序结构) - - [配置MindSpore Lite依赖项](#配置mindspore-lite依赖项) - - [下载及部署模型文件](#下载及部署模型文件) - - [编写端侧推理代码](#编写端侧推理代码) - - - - - -## 概述 - -我们推荐你从端侧Android图像分割demo入手,了解MindSpore Lite应用工程的构建、依赖项配置以及相关Java API的使用。 - -本教程基于MindSpore团队提供的Android“端侧图像分割”示例程序,演示了端侧部署的流程。 - -## 选择模型 - - 选择图像分割模型。 - ->你可以在这里找到[Android图像分割模型](https://download.mindspore.cn/model_zoo/official/lite/mobile_segment_lite/segment_model.ms)。 -> -> 本示例中讲述了Java API的应用方法。 - -我们提供了本示例对应的APK文件,你可扫描下方的二维码或直接下载[APK文件](https://download.mindspore.cn/model_zoo/official/lite/apk/segmentation/image_segmentation.apk),并部署到Android设备后使用。 - -![apk](../images/segmentation_apk.png) - -## 部署应用 - -接下来介绍如何构建和执行MindSpore Lite端侧图像分割任务。 - -### 运行依赖 - -- Android Studio >= 3.2 (推荐4.0以上版本) -- Android SDK >= 26 (Android Studio默认安装) -- JDK >= 1.8 (Android Studio默认安装) - -### 构建与运行 - -1. 在Android Studio中加载本示例源码,并安装相应的SDK(指定SDK版本后,由Android Studio自动安装)。 - - ![start_home](../images/lite_quick_start_home.png) - -2. 连接Android设备,运行图像分割应用程序。 - - 通过USB连接Android设备调试,点击`Run 'app'`即可在你的设备上运行本示例项目。 - - ![run_app](../images/lite_quick_start_run_app.PNG) - - Android Studio连接设备调试操作,可参考。 - - 手机需开启“USB调试模式”,Android Studio才能识别到手机。 华为手机一般在`设置->系统和更新->开发人员选项->USB调试`中打开“USB调试模式”。 - -3. 在Android设备上,点击“继续安装”,安装完即可查看到本地相册以及设备摄像头拍照的头像图片进行分割推理的结果。 - - ![install](../images/lite_segmentation_quick_start_install.png) - - 运行结果如下图所示(以选取相册某张头像图片为例)。 - - ![result1](../images/segmentation1.png) - - 选取相册带有头像图片。 - - ![result2](../images/segmentation2.png) - - 选择九宫格中不同的背景图片,即可对人像的背景就行替换分割。 - - - - - - - -

    图1 白色背景

    图2 蓝色背景

    图3 油画背景
    - -## 示例程序详细说明 - -本端侧图像分割Android示例程序使用Java层,需读者具备一定的Android开发基础知识。 - -### 示例程序结构 - -```text -app -├── src/main -│ ├── assets # 资源文件 -| | └── model # 模型文件 -| | └── segment_model.ms # 存放的模型文件 -│ | -│ ├── libs # Android库项目的二进制归档文件 -| | └── mindspore-lite-version.aar # MindSpore Lite针对Android版本的归档文件 -│ | -│ ├── java # java层应用代码 -│ │ └── com.mindspore.imagesegmentation -│ │ ├── help # 图像处理 -│ │ │ └── ... -│ │ └── ... Android页面展示以及逻辑处理 -│ │ -│ ├── res # 存放Android相关的资源文件 -│ └── AndroidManifest.xml # Android配置文件 -│ -│ -├── build.gradle # 其他Android配置文件 -├── download.gradle # 工程依赖文件下载 -└── ... -``` - -### 配置MindSpore Lite依赖项 - -Android调用MindSpore Android AAR时,需要相关库文件支持。可通过MindSpore Lite[源码编译](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html)生成`mindspore-lite-{version}-inference-android.tar.gz`库文件包并解压缩(包含`mindspore-lite-{version}.aar`库文件)。 - -> version:输出件版本号,与所编译的分支代码对应的版本一致。 - -本示例中,build过程由`app/download.gradle`文件自动下载MindSpore Lite版本文件,并放置在`app/libs`目录下。 - -注: 若自动下载失败,请手动下载相关库文件[mindspore-lite-{version}-inference-android.tar.gz](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html),解压后将其放在对应位置。 - -### 下载及部署模型文件 - -从MindSpore Model Hub中下载模型文件,本示例程序中使用的终端图像分割模型文件为`segment_model.ms`,同样通过`app/download.gradle`脚本在APP构建时自动下载,并放置在`app/src/main/assets`工程目录下。 - -注:若下载失败请手工下载模型文件[segment_model.ms](https://download.mindspore.cn/model_zoo/official/lite/mobile_segment_lite/segment_model.ms)。 - -### 编写端侧推理代码 - -推理代码流程如下,完整代码请参见 [src/java/com/mindspore/imagesegmentation/TrackingMobile](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/lite/image_segmentation/app/src/main/java/com/mindspore/imagesegmentation/help/TrackingMobile.java)。 - -1. 加载MindSpore Lite模型文件,构建上下文、会话以及用于推理的计算图。 - - - 加载模型文件:创建并配置用于模型推理的上下文。 - - ```java - // Load the .ms model. - Model model = new Model(); - if (!model.loadModel(Context, "segment_model.ms")) { - Log.e(TAG, "Load Model failed"); - return; - } - ``` - - - 创建会话。 - - ```java - // Create and init config. - MSConfig msConfig = new MSConfig(); - if (!msConfig.init(DeviceType.DT_CPU, threadNum, CpuBindMode.MID_CPU)) { - Log.e(TAG, "Init context failed"); - return; - } - - // Create the MindSpore lite session. - LiteSession session = new LiteSession(); - if (!session.init(msConfig)) { - Log.e(TAG, "Create session failed"); - msConfig.free(); - return; - } - msConfig.free(); - ``` - - - 加载模型文件并构建用于推理的计算图。 - - ```java - // Compile graph. - if (!session.compileGraph(model)) { - Log.e(TAG, "Compile graph failed"); - model.freeBuffer(); - return; - } - - // Note: when use model.freeBuffer(), the model can not be compile graph again. - model.freeBuffer(); - ``` - -2. 将输入图片转换为传入MindSpore模型的Tensor格式。 - - 将待检测图片数据转换为输入MindSpore模型的Tensor。 - - ```java - List inputs = session.getInputs(); - if (inputs.size() != 1) { - Log.e(TAG, "inputs.size() != 1"); - return null; - } - - float resource_height = bitmap.getHeight(); - float resource_weight = bitmap.getWidth(); - - ByteBuffer contentArray = BitmapUtils.bitmapToByteBuffer(bitmap, imageSize, imageSize, IMAGE_MEAN, IMAGE_STD); - - MSTensor inTensor = inputs.get(0); - inTensor.setData(contentArray); - ``` - -3. 运行会话,执行计算图。 - - ```java - // Run graph to infer results. - if (!session.runGraph()) { - Log.e(TAG, "Run graph failed"); - return null; - } - ``` - -4. 对输出数据进行处理。 - - - 通过Tensor获取的输出数据得到其维度,批处理数,通道数等信息。 - - ```java - // Get output tensor values. - List tensorNames = session.getOutputTensorNames(); - Map outputs = session.getOutputMapByTensor(); - for (String tensorName : tensorNames) { - MSTensor output = outputs.get(tensorName); - if (output == null) { - Log.e(TAG, "Can not find output " + tensorName); - return null; - } - float[] results = output.getFloatData(); - float[] result = new float[output.elementsNum()]; - - int batch = output.getShape()[0]; - int channel = output.getShape()[1]; - int weight = output.getShape()[2]; - int height = output.getShape()[3]; - int plane = weight * height; - ``` - - - 将NCHW格式转为NHWC格式,放入到`float[] result`。 - - ```java - for (int n = 0; n < batch; n++) { - for (int c = 0; c < channel; c++) { - for (int hw = 0; hw < plane; hw++) { - result[n * channel * plane + hw * channel + c] = results[n * channel * plane + c * plane + hw]; - } - } - } - ``` - -5. 对输入Tensor按照模型进行推理,进行后处理。 - - - 将`float[] result`数据转换成ByteBuffer数据格式。 - - ```java - ByteBuffer buffer = ByteBuffer.allocate(4 * result.length); - FloatBuffer floatBuffer = buffer.asFloatBuffer(); - floatBuffer.put(result); - return buffer; - ``` - - - 将ByteBuffer数据格式转成Bitmap。 - - 通过推理出来的数据在Bitmap每个像素坐标进行比对。如果坐标数据等于PERSON,坐标点颜色不变。反之,则改成透明色(如下图所示)。 - - ```java - Bitmap.Config conf = Bitmap.Config.ARGB_8888; - Bitmap maskBitmap = Bitmap.createBitmap(imageWidth, imageHeight, conf); - Bitmap scaledBackgroundImage = - BitmapUtils.scaleBitmapAndKeepRatio(backgroundImage, imageWidth, imageHeight); - int[][] mSegmentBits = new int[imageWidth][imageHeight]; - inputBuffer.rewind(); - for (int y = 0; y < imageHeight; y++) { - for (int x = 0; x < imageWidth; x++) { - float maxVal = 0f; - mSegmentBits[x][y] = 0; - for (int i = 0; i < NUM_CLASSES; i++) { - float value = inputBuffer.getFloat((y * imageWidth * NUM_CLASSES + x * NUM_CLASSES + i) * 4); - if (i == 0 || value > maxVal) { - maxVal = value; - if (i == PERSON) { - mSegmentBits[x][y] = i; - } else { - mSegmentBits[x][y] = 0; - } - } - } - maskBitmap.setPixel(x, y, mSegmentBits[x][y] == 0 ? colors[0] : scaledBackgroundImage.getPixel(x, y)); - } - } - ``` - - - - - - -

    图1 推理前

    图2 推理后
    - -6. 将推理后的图片与选择的背景图片相结合。 - - ```java - MainActivity.this.imgPreview.setDrawingCacheEnabled(true); - MainActivity.this.imgPreview.setBackground(isDemo ? getDrawable(IMAGES[selectedPosition]) : customBack); - MainActivity.this.imgPreview.setImageBitmap(foreground); - MainActivity.this.imgPreview.setDrawingCacheEnabled(false); - ``` diff --git a/tutorials/lite/source_zh_cn/quick_start/quick_start.md b/tutorials/lite/source_zh_cn/quick_start/quick_start.md deleted file mode 100644 index ee936fbf521fb27eaeabb6ee9583372e2ee0402e..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/quick_start/quick_start.md +++ /dev/null @@ -1,433 +0,0 @@ -# 基于JNI接口的Android应用开发 - -`Android` `C++` `全流程` `模型转换` `模型加载` `推理应用` `数据准备` `初级` `中级` `高级` - - - -- [基于JNI接口的Android应用开发](#基于jni接口的android应用开发) - - [概述](#概述) - - [选择模型](#选择模型) - - [转换模型](#转换模型) - - [部署应用](#部署应用) - - [运行依赖](#运行依赖) - - [构建与运行](#构建与运行) - - [示例程序详细说明](#示例程序详细说明) - - [示例程序结构](#示例程序结构) - - [配置MindSpore Lite依赖项](#配置mindspore-lite依赖项) - - [下载及部署模型文件](#下载及部署模型文件) - - [编写端侧推理代码](#编写端侧推理代码) - - - - - -## 概述 - -我们推荐你从端侧Android图像分类demo入手,了解MindSpore Lite应用工程的构建、依赖项配置以及相关API的使用。 - -本教程基于MindSpore团队提供的Android“端侧图像分类”示例程序,演示了端侧部署的流程。 - -1. 选择图像分类模型。 -2. 将模型转换成MindSpore Lite模型格式。 -3. 在端侧使用MindSpore Lite推理模型。详细说明如何在端侧利用MindSpore Lite C++ API(Android JNI)和MindSpore Lite图像分类模型完成端侧推理,实现对设备摄像头捕获的内容进行分类,并在APP图像预览界面中,显示出最可能的分类结果。 - -> 你可以在这里找到[Android图像分类模型](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite)和[图像分类示例代码](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/image_classification)。 -> -> 本示例中讲述了C++ API的应用方法,此外MindSpore Lite还支持Java API。关于Java API的使用请参考[图像分割demo](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/image_segmentation)。 - -我们提供了本示例对应的APK文件,你可扫描下方的二维码或直接下载[APK文件](https://download.mindspore.cn/model_zoo/official/lite/apk/label/Classification.apk),并部署到Android设备后使用。 - -![apk](../images/classification_apk.png) - -## 选择模型 - -MindSpore团队提供了一系列预置终端模型,你可以在应用程序中使用这些预置的终端模型。 -可下载[MindSpore Model Zoo中图像分类模型](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms)。 -同时,你也可以使用预置模型做迁移学习,以实现自己的图像分类任务。 - -## 转换模型 - -如果预置模型已经满足你要求,请跳过本章节。 如果你需要对MindSpore提供的模型进行重训,重训完成后,需要将模型导出为[.mindir格式](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html#mindir)。然后使用MindSpore Lite[模型转换工具](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html)将.mindir格式转换成.ms格式。 - -以mobilenetv2模型为例,如下脚本将其转换为MindSpore Lite模型用于端侧推理。 - -```bash -call converter_lite --fmk=MINDIR --modelFile=mobilenetv2.mindir --outputFile=mobilenetv2 -``` - -## 部署应用 - -接下来介绍如何构建和执行mindspore Lite端侧图像分类任务。 - -### 运行依赖 - -- Android Studio >= 3.2 (推荐4.0以上版本) -- NDK 21.3 -- [CMake](https://cmake.org/download) >= 3.18.3 -- Android SDK >= 26 -- JDK >= 1.8 - -### 构建与运行 - -1. 在Android Studio中加载本示例源码,并安装相应的SDK(指定SDK版本后,由Android Studio自动安装)。 - - ![start_home](../images/lite_quick_start_home.png) - - 启动Android Studio后,点击`File->Settings->System Settings->Android SDK`,勾选相应的SDK。如下图所示,勾选后,点击`OK`,Android Studio即可自动安装SDK。 - - ![start_sdk](../images/lite_quick_start_sdk.png) - - (可选)若安装时出现NDK版本问题,可手动下载相应的[NDK版本](https://developer.android.com/ndk/downloads?hl=zh-cn)(本示例代码使用的NDK版本为21.3),并在`Project Structure`的`Android NDK location`设置中指定NDK的位置。 - - ![project_structure](../images/lite_quick_start_project_structure.png) - -2. 连接Android设备,运行图像分类应用程序。 - - 通过USB连接Android设备调试,点击`Run 'app'`即可在你的设备上运行本示例项目。 - - ![run_app](../images/lite_quick_start_run_app.PNG) - - Android Studio连接设备调试操作,可参考。 - - 手机需开启“USB调试模式”,Android Studio才能识别到手机。 华为手机一般在`设置->系统和更新->开发人员选项->USB调试`中打开“USB调试模式”。 - -3. 在Android设备上,点击“继续安装”,安装完即可查看到设备摄像头捕获的内容和推理结果。 - - ![install](../images/lite_quick_start_install.png) - - 识别结果如下图所示。 - - ![result](../images/lite_quick_start_app_result.png) - -## 示例程序详细说明 - -本端侧图像分类Android示例程序分为JAVA层和JNI层,其中,JAVA层主要通过Android Camera 2 API实现摄像头获取图像帧,以及相应的图像处理等功能;JNI层在[Runtime](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/runtime.html)中完成模型推理的过程。 - -> 此处详细说明示例程序的JNI层实现,JAVA层运用Android Camera 2 API实现开启设备摄像头以及图像帧处理等功能,需读者具备一定的Android开发基础知识。 - -### 示例程序结构 - -```text -app -├── src/main -│ ├── assets # 资源文件 -| | └── model # 模型文件 -| | └── mobilenetv2.ms # 存放的模型文件 -│ | -│ ├── cpp # 模型加载和预测主要逻辑封装类 -| | ├── .. -| | ├── mindspore-lite-{version}-android-{arch} # MindSpore Lite版本 -| | ├── MindSporeNetnative.cpp # MindSpore调用相关的JNI方法 -│ | └── MindSporeNetnative.h # 头文件 -| | └── MsNetWork.cpp # MindSpore接口封装 -│ | -│ ├── java # java层应用代码 -│ │ └── com.mindspore.classification -│ │ ├── gallery.classify # 图像处理及MindSpore JNI调用相关实现 -│ │ │ └── ... -│ │ └── widget # 开启摄像头及绘制相关实现 -│ │ └── ... -│ │ -│ ├── res # 存放Android相关的资源文件 -│ └── AndroidManifest.xml # Android配置文件 -│ -├── CMakeList.txt # cmake编译入口文件 -│ -├── build.gradle # 其他Android配置文件 -├── download.gradle # 工程依赖文件下载 -└── ... -``` - -### 配置MindSpore Lite依赖项 - -Android JNI层调用MindSpore C++ API时,需要相关库文件支持。可通过MindSpore Lite[源码编译](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html)生成`mindspore-lite-{version}-android-{arch}.tar.gz`库文件包并解压缩(包含`libmindspore-lite.so`库文件和相关头文件),在本例中需使用生成带图像预处理模块的编译命令。 - -> version:输出件版本号,与所编译的分支代码对应的版本一致。 -> -> arch:操作系统,arm64或arm32。 - -本示例中,build过程由`app/download.gradle`文件自动下载MindSpore Lite版本文件,并放置在`app/src/main/cpp/`目录下。 - -注: 若自动下载失败,请手动下载操作系统为Android-aarch64/Android-aarch32的MindSpore Lite 模型推理框架相关库文件[mindspore-lite-{version}-android-{arch}.tar.gz](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html),解压后将`mindspore-lite-{version}-android-{arch}`的文件夹拷贝到`src/main/cpp`目录下。 - -```text -android{ - defaultConfig{ - externalNativeBuild{ - cmake{ - arguments "-DANDROID_STL=c++_shared" - } - } - - ndk{ - abiFilters'armeabi-v7a', 'arm64-v8a' - } - } -} -``` - -在`app/CMakeLists.txt`文件中建立`.so`库文件链接,如下所示。 - -```text -# ============== Set MindSpore Dependencies. ============= -include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp) -include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/inference) -include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/inference/include) -include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/inference/minddata/include) - -add_library(mindspore-lite SHARED IMPORTED) -add_library(minddata-lite SHARED IMPORTED) - -set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION - ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/inference/lib/libmindspore-lite.so) -set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION - ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/inference/minddata/lib/libminddata-lite.so) -# --------------- MindSpore Lite set End. -------------------- - -# Link target library. -target_link_libraries( # Specifies the target library. - mlkit-label-MS - - # --- mindspore --- - minddata-lite - mindspore-lite - - # --- other dependencies.--- - -ljnigraphics - android - - # Links the target library to the log library - ${log-lib} - ) -``` - -### 下载及部署模型文件 - -从MindSpore Model Hub中下载模型文件,本示例程序中使用的终端图像分类模型文件为`mobilenetv2.ms`,同样通过`app/download.gradle`脚本在APP构建时自动下载,并放置在`app/src/main/assets/model`工程目录下。 - -注:若下载失败请手工下载模型文件[mobilenetv2.ms](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms)。 - -### 编写端侧推理代码 - -在JNI层调用MindSpore Lite C++ API实现端侧推理。 - -推理代码流程如下,完整代码请参见[MindSporeNetnative.cpp](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/lite/image_classification/app/src/main/cpp/MindSporeNetnative.cpp)。 - -1. 加载MindSpore Lite模型文件,构建上下文、会话以及用于推理的计算图。 - - - 加载模型文件: - - 在Android的Java层读取模型文件,转换成ByteBuffer类型文件`model_buffer`,通过JNI调用传输到C++层。最终将`model_buffer`转换成char类型文件`modelBuffer`。 - - ```cpp - // Buffer is the model data passed in by the Java layer - jlong bufferLen = env->GetDirectBufferCapacity(model_buffer); - if (0 == bufferLen) { - MS_PRINT("error, bufferLen is 0!"); - return (jlong) nullptr; - } - - char *modelBuffer = CreateLocalModelBuffer(env, model_buffer); - if (modelBuffer == nullptr) { - MS_PRINT("modelBuffer create failed!"); - return (jlong) nullptr; - } - ``` - - - 构建上下文、会话以及用于推理的计算图: - - 构建上下文,设置会话参数。通过上下文和模型数据创建会话。 - - ```cpp - // To create a MindSpore network inference environment. - void **labelEnv = new void *; - MSNetWork *labelNet = new MSNetWork; - *labelEnv = labelNet; - - mindspore::lite::Context *context = new mindspore::lite::Context; - context->thread_num_ = num_thread; - context->device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND; - context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = false; - context->device_list_[0].device_type_ = mindspore::lite::DT_CPU; - - labelNet->CreateSessionMS(modelBuffer, bufferLen, context); - delete context; - ``` - - 基于模型文件`modelBuffer`构建用于推理的计算图。 - - ```cpp - void MSNetWork::CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx) { - session_ = mindspore::session::LiteSession::CreateSession(ctx); - if (session_ == nullptr) { - MS_PRINT("Create Session failed."); - return; - } - - // Compile model. - model_ = mindspore::lite::Model::Import(modelBuffer, bufferLen); - if (model_ == nullptr) { - ReleaseNets(); - MS_PRINT("Import model failed."); - return; - } - - int ret = session_->CompileGraph(model_); - if (ret != mindspore::lite::RET_OK) { - ReleaseNets(); - MS_PRINT("CompileGraph failed."); - return; - } - } - ``` - -2. 将输入图片转换为传入MindSpore模型的Tensor格式。 - - - 将待检测图片`srcBitmap`进行尺寸裁剪并转换为LiteMat格式`lite_norm_mat_cut`。对其宽高以及通道数信息转换成float格式数据`dataHWC`。最终把`dataHWC`拷贝到MindSpore模型的Tensor输入`inTensor`中。 - - ```cpp - if (!BitmapToLiteMat(env, srcBitmap, &lite_mat_bgr)) { - MS_PRINT("BitmapToLiteMat error"); - return NULL; - } - if (!PreProcessImageData(lite_mat_bgr, &lite_norm_mat_cut)) { - MS_PRINT("PreProcessImageData error"); - return NULL; - } - - ImgDims inputDims; - inputDims.channel = lite_norm_mat_cut.channel_; - inputDims.width = lite_norm_mat_cut.width_; - inputDims.height = lite_norm_mat_cut.height_; - - // Get the MindSpore inference environment which created in loadModel(). - void **labelEnv = reinterpret_cast(netEnv); - if (labelEnv == nullptr) { - MS_PRINT("MindSpore error, labelEnv is a nullptr."); - return NULL; - } - MSNetWork *labelNet = static_cast(*labelEnv); - - auto mSession = labelNet->session(); - if (mSession == nullptr) { - MS_PRINT("MindSpore error, Session is a nullptr."); - return NULL; - } - MS_PRINT("MindSpore get session."); - - auto msInputs = mSession->GetInputs(); - if (msInputs.size() == 0) { - MS_PRINT("MindSpore error, msInputs.size() equals 0."); - return NULL; - } - auto inTensor = msInputs.front(); - - float *dataHWC = reinterpret_cast(lite_norm_mat_cut.data_ptr_); - // Copy dataHWC to the model input tensor. - memcpy(inTensor->MutableData(), dataHWC, - inputDims.channel * inputDims.width * inputDims.height * sizeof(float)); - ``` - - 调整输入图片的尺寸,以及数据处理详细算法。 - - ```cpp - bool PreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr) { - bool ret = false; - LiteMat lite_mat_resize; - LiteMat &lite_norm_mat_cut = *lite_norm_mat_ptr; - ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 256, 256); - if (!ret) { - MS_PRINT("ResizeBilinear error"); - return false; - } - LiteMat lite_mat_convert_float; - ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0); - if (!ret) { - MS_PRINT("ConvertTo error"); - return false; - } - LiteMat lite_mat_cut; - ret = Crop(lite_mat_convert_float, lite_mat_cut, 16, 16, 224, 224); - if (!ret) { - MS_PRINT("Crop error"); - return false; - } - std::vector means = {0.485, 0.456, 0.406}; - std::vector stds = {0.229, 0.224, 0.225}; - SubStractMeanNormalize(lite_mat_cut, lite_norm_mat_cut, means, stds); - return true; - } - ``` - -3. 对输入Tensor按照模型进行推理,获取输出Tensor,并进行后处理。 - - - 图和模型加载完成,执行端侧推理。 - - ```cpp - // After the model and image tensor data is loaded, run inference. - auto status = mSession->RunGraph(); - ``` - - - 获取对MindSpore模型的Tensor输出`msOutputs`。通过`msOutputs`以及分类数组信息,计算得到在APP中显示的文本信息`resultCharData`。 - - ```cpp - auto names = mSession->GetOutputTensorNames(); - std::unordered_map msOutputs; - for (const auto &name : names) { - auto temp_dat =mSession->GetOutputByTensorName(name); - msOutputs.insert(std::pair {name, temp_dat}); - } - std::string resultStr = ProcessRunnetResult(::RET_CATEGORY_SUM,::labels_name_map, msOutputs); - - const char *resultCharData = resultStr.c_str(); - return (env)->NewStringUTF(resultCharData); - ``` - - 输出数据的后续处理。通过`msOutputs`获取输出对象`outputTensor`,并和事物类别数组`labels_name_map`解析得到每个元素的训练的得分数组`scores[]`。 设置可信度阀值为`unifiedThre`,根据训练数据统计可信度阀值。高于阀值,归属于这个类型。反之,则不是。最终返回一个对应事物类别名称和对应得分的数据`categoryScore`。 - - ```cpp - std::string ProcessRunnetResult(const int RET_CATEGORY_SUM, const char *const labels_name_map[], std::unordered_map msOutputs) { - // Get the branch of the model output. - // Use iterators to get map elements. - std::unordered_map::iterator iter; - iter = msOutputs.begin(); - - // The mobilenetv2.ms model output just one branch. - auto outputTensor = iter->second; - - int tensorNum = outputTensor->ElementsNum(); - MS_PRINT("Number of tensor elements:%d", tensorNum); - - // Get a pointer to the first score. - float *temp_scores = static_cast(outputTensor->MutableData()); - float scores[RET_CATEGORY_SUM]; - for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - scores[i] = temp_scores[i]; - } - - const float unifiedThre = 0.5; - const float probMax = 1.0; - for (size_t i = 0; i < RET_CATEGORY_SUM; ++i) { - float threshold = g_thres_map[i]; - float tmpProb = scores[i]; - if (tmpProb < threshold) { - tmpProb = tmpProb / threshold * unifiedThre; - } else { - tmpProb = (tmpProb - threshold) / (probMax - threshold) * unifiedThre + unifiedThre; - } - scores[i] = tmpProb; - } - - // Score for each category. - // Converted to text information that needs to be displayed in the APP. - std::string categoryScore = ""; - for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - categoryScore += labels_name_map[i]; - categoryScore += ":"; - std::string score_str = std::to_string(scores[i]); - categoryScore += score_str; - categoryScore += ";"; - } - return categoryScore; - } - ``` diff --git a/tutorials/lite/source_zh_cn/quick_start/quick_start_cpp.md b/tutorials/lite/source_zh_cn/quick_start/quick_start_cpp.md deleted file mode 100644 index ab6e217839a5f36f358266f4def9561a14ea5718..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/quick_start/quick_start_cpp.md +++ /dev/null @@ -1,265 +0,0 @@ -# 体验C++极简推理Demo - -`Linux` `Windows` `X86` `C++` `全流程` `推理应用` `数据准备` `初级` - - - -- [体验C++极简推理Demo](#体验c极简推理demo) - - [概述](#概述) - - [构建与运行](#构建与运行) - - [模型加载](#模型加载) - - [模型编译](#模型编译) - - [模型推理](#模型推理) - - [内存释放](#内存释放) - - - - - -## 概述 - -本教程提供了MindSpore Lite执行推理的示例程序,通过随机输入、执行推理、打印推理结果的方式,演示了C++进行端侧推理的基本流程,用户能够快速了解MindSpore Lite执行推理相关API的使用。本教程通过随机生成的数据作为输入数据,执行MobileNetV2模型的推理,打印获得输出数据。相关代码放置在[mindspore/lite/examples/quick_start_cpp](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/examples/quick_start_cpp)目录。 - -使用MindSpore Lite执行推理主要包括以下步骤: - -1. 模型加载:从文件系统中读取由[模型转换工具](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html)转换得到的`.ms`模型,通过[mindspore::lite::Model::Import](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#import)导入模型,进行模型解析,创建得到 `Model *`。 -2. 创建配置上下文:创建配置上下文[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#context),保存会话所需的一些基本配置参数,用于指导图编译和图执行。 -3. 创建会话:创建[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)会话,并将上一步得到的[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#context)配置到会话中。 -4. 图编译:执行推理之前,需要调用[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)的`CompileGraph`接口进行图编译。图编译阶段主要进行子图切分、算子选型调度等过程,该阶段会耗费较多时间,所以建议[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)创建一次,编译一次,多次推理。 -5. 输入数据:图执行之前需要向`输入Tensor`中填充数据。 -6. 执行推理:使用[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)的`RunGraph`进行模型推理。 -7. 获得输出:图执行结束之后,可以通过`输出Tensor`得到推理结果。 -8. 释放内存:无需使用MindSpore Lite推理框架时,需要释放已创建的[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)和[Model](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#model)。 - -![img](../images/lite_runtime.png) - -> 如需查看MindSpore Lite高级用法,请参考[使用Runtime执行推理(C++)](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/runtime_cpp.html)。 - -## 构建与运行 - -### Linux X86 - -- 环境要求 - - - 系统环境:Linux x86_64,推荐使用Ubuntu 18.04.02LTS - - 编译依赖: - - [CMake](https://cmake.org/download/) >= 3.18.3 - - [GCC](https://gcc.gnu.org/releases.html) >= 7.3.0 - -- 编译构建 - - 在`mindspore/lite/examples/quick_start_cpp`目录下执行[build脚本](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/quick_start_cpp/build.sh),将自动下载MindSpore Lite推理框架库以及文模型文件并编译Demo。 - - ```bash - bash build.sh - ``` - - > 若使用该build脚本下载MindSpore Lite推理框架失败,请手动下载硬件平台为CPU、操作系统为Ubuntu-x64的MindSpore Lite 模型推理框架[mindspore-lite-{version}-linux-x64.tar.gz](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html),将解压后`inference/lib`目录下的`libmindspore-lite.a`文件拷贝到`mindspore/lite/examples/quick_start_cpp/lib`目录、`inference/include`目录里的文件拷贝到`mindspore/lite/examples/quick_start_cpp/include`目录下。 - > - > 若MobileNetV2模型下载失败,请手动下载相关模型文件[mobilenetv2.ms](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_imagenet/mobilenetv2.ms),并将其拷贝到`mindspore/lite/examples/quick_start_cpp/model`目录。 - > - > 通过手动下载并且将文件放到指定位置后,需要再次执行build.sh脚本才能完成编译构建。 - -- 执行推理 - - 编译构建后,进入`mindspore/lite/examples/quick_start_cpp/build`目录,并执行以下命令,体验MindSpore Lite推理MobileNetV2模型。 - - ```bash - ./mindspore_quick_start_cpp ../model/mobilenetv2.ms - ``` - - 执行完成后将能得到如下结果,打印输出Tensor的名称、输出Tensor的大小,输出Tensor的数量以及前50个数据: - - ```shell - tensor name is:Default/head-MobileNetV2Head/Softmax-op204 tensor size is:4000 tensor elements num is:1000 - output data is:5.26823e-05 0.00049752 0.000296722 0.000377607 0.000177048 8.02107e-05 0.000212864 0.000422286 0.000273189 0.000234105 0.00099807 0.0042331 0.00204993 0.00124968 0.00294458 0.00139795 0.00111545 0.000656357 0.000809457 0.00153731 0.000621049 0.00224637 0.00127045 0.00187557 0.000420144 0.000150638 0.000266477 0.000438628 0.000187773 0.00054668 0.000212853 0.000921661 0.000127179 0.000565873 0.00100394 0.000300159 0.000282677 0.000358067 0.00215288 0.000477845 0.00107596 0.00065134 0.000722132 0.000807501 0.000631415 0.00043247 0.00125898 0.000255094 8.2606e-05 9.91917e-05 0.000794512 - ``` - -### Windows - -- 环境要求 - - - 系统环境:Windows 7,Windows 10;64位。 - - 编译依赖: - - [CMake](https://cmake.org/download/) >= 3.18.3 - - [MinGW GCC](https://sourceforge.net/projects/mingw-w64/files/ToolchainstargettingWin64/PersonalBuilds/mingw-builds/7.3.0/threads-posix/seh/x86_64-7.3.0-release-posix-seh-rt_v5-rev0.7z/download) = 7.3.0 - -- 编译构建 - - - 库下载:请手动下载硬件平台为CPU、操作系统为Windows-x64的MindSpore Lite模型推理框架[mindspore-lite-{version}-win-x64.zip](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html),将解压后`inference/lib`目录下的`libmindspore-lite.a`拷贝到`mindspore/lite/examples/quick_start_cpp/lib`工程目录、`inference/include`目录里的文件拷贝到`mindspore/lite/examples/quick_start_cpp/include`工程目录下。(注意:工程项目下的`lib`、`include`目录需手工创建) - - 模型下载:请手动下载相关模型文件[mobilenetv2.ms](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_imagenet/mobilenetv2.ms),并将其拷贝到`mindspore/lite/examples/quick_start_cpp/model`目录。 - - - 编译:在`mindspore/lite/examples/quick_start_cpp`目录下执行[build脚本](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/quick_start_cpp/build.bat),将能够自动下载相关文件并编译Demo。 - - ```bash - call build.bat - ``` - -- 执行推理 - - 编译构建后,进入`mindspore/lite/examples/quick_start_cpp/build`目录,并执行以下命令,体验MindSpore Lite推理MobileNetV2模型。 - - ```bash - call mindspore_quick_start_cpp.exe ../model/mobilenetv2.ms - ``` - - 执行完成后将能得到如下结果,打印输出Tensor的名称、输出Tensor的大小,输出Tensor的数量以及前50个数据: - - ```shell - tensor name is:Default/head-MobileNetV2Head/Softmax-op204 tensor size is:4000 tensor elements num is:1000 - output data is:5.26823e-05 0.00049752 0.000296722 0.000377607 0.000177048 8.02107e-05 0.000212864 0.000422286 0.000273189 0.000234105 0.00099807 0.0042331 0.00204993 0.00124968 0.00294458 0.00139795 0.00111545 0.000656357 0.000809457 0.00153731 0.000621049 0.00224637 0.00127045 0.00187557 0.000420144 0.000150638 0.000266477 0.000438628 0.000187773 0.00054668 0.000212853 0.000921661 0.000127179 0.000565873 0.00100394 0.000300159 0.000282677 0.000358067 0.00215288 0.000477845 0.00107596 0.00065134 0.000722132 0.000807501 0.000631415 0.00043247 0.00125898 0.000255094 8.2606e-05 9.91917e-05 0.000794512 - ``` - -## 配置CMake - -以下是通过CMake集成`libmindspore-lite.a`静态库时的示例代码。 - -> 集成`libmindspore-lite.a`静态库时需要将`-Wl,--whole-archive`的选项传递给链接器。 -> -> 由于在编译MindSpore Lite的时候增加了`-fstack-protector-strong`栈保护的编译选项,所以在Windows平台上还需要链接MinGW中的`ssp`库。 -> -> 由于在编译MindSpore Lite的时候增加了对so库文件处理的支持,所以在Linux平台上还需要链接`dl`库。 - -```cmake -cmake_minimum_required(VERSION 3.18.3) -project(QuickStartCpp) - -if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7.3.0) - message(FATAL_ERROR "GCC version ${CMAKE_CXX_COMPILER_VERSION} must not be less than 7.3.0") -endif() - -# Add directory to include search path -include_directories(${CMAKE_CURRENT_SOURCE_DIR}) - -# Add directory to link search path -link_directories(${CMAKE_CURRENT_SOURCE_DIR}/lib) - -file(GLOB_RECURSE QUICK_START_CXX ${CMAKE_CURRENT_SOURCE_DIR}/*.cc) -add_executable(mindspore_quick_start_cpp ${QUICK_START_CXX}) - -target_link_libraries( - mindspore_quick_start_cpp - -Wl,--whole-archive mindspore-lite -Wl,--no-whole-archive - pthread -) - -# Due to the increased compilation options for stack protection, -# it is necessary to target link ssp library when Use the static library in Windows. -if(WIN32) - target_link_libraries( - mindspore_quick_start_cpp - ssp - ) -else() - target_link_libraries( - mindspore_quick_start_cpp - dl - ) -endif() -``` - -## 模型加载 - -模型加载需要从文件系统中读取MindSpore Lite模型,并通过`mindspore::lite::Model::Import`函数导入模型进行解析。 - -```c++ -// Read model file. -size_t size = 0; -char *model_buf = ReadFile(model_path, &size); -if (model_buf == nullptr) { - std::cerr << "Read model file failed." << std::endl; - return -1; -} -// Load the .ms model. -auto model = mindspore::lite::Model::Import(model_buf, size); -delete[](model_buf); -if (model == nullptr) { - std::cerr << "Import model file failed." << std::endl; - return -1; -} -``` - -## 模型编译 - -模型编译主要包括创建配置上下文、创建会话、图编译等步骤。 - -```c++ -mindspore::session::LiteSession *Compile(mindspore::lite::Model *model) { - // Create and init context. - auto context = std::make_shared(); - if (context == nullptr) { - std::cerr << "New context failed while." << std::endl; - return nullptr; - } - - // Create the session. - mindspore::session::LiteSession *session = mindspore::session::LiteSession::CreateSession(context.get()); - if (session == nullptr) { - std::cerr << "CreateSession failed while running." << std::endl; - return nullptr; - } - - // Compile graph. - auto ret = session->CompileGraph(model); - if (ret != mindspore::lite::RET_OK) { - delete session; - std::cerr << "Compile failed while running." << std::endl; - return nullptr; - } - - // Note: when use model->Free(), the model can not be compiled again. - if (model != nullptr) { - model->Free(); - } - return session; -} -``` - -## 模型推理 - -模型推理主要包括输入数据、执行推理、获得输出等步骤,其中本示例中的输入数据是通过随机数据构造生成,最后将执行推理后的输出结果打印出来。 - -```c++ -int Run(mindspore::session::LiteSession *session) { - auto inputs = session->GetInputs(); - - // Generate random data as input data. - auto ret = GenerateInputDataWithRandom(inputs); - if (ret != mindspore::lite::RET_OK) { - std::cerr << "Generate Random Input Data failed." << std::endl; - return ret; - } - - // Run Inference. - ret = session->RunGraph(); - if (ret != mindspore::lite::RET_OK) { - std::cerr << "Inference error " << ret << std::endl; - return ret; - } - - // Get Output Tensor Data. - auto out_tensors = session->GetOutputs(); - for (auto tensor : out_tensors) { - std::cout << "tensor name is:" << tensor.first << " tensor size is:" << tensor.second->Size() - << " tensor elements num is:" << tensor.second->ElementsNum() << std::endl; - auto out_data = reinterpret_cast(tensor.second->MutableData()); - std::cout << "output data is:"; - for (int i = 0; i < tensor.second->ElementsNum() && i <= 50; i++) { - std::cout << out_data[i] << " "; - } - std::cout << std::endl; - } - return mindspore::lite::RET_OK; -} -``` - -## 内存释放 - -无需使用MindSpore Lite推理框架时,需要释放已经创建的`LiteSession`和`Model`。 - -```c++ -// Delete model buffer. -delete model; -// Delete session buffer. -delete session; -``` diff --git a/tutorials/lite/source_zh_cn/quick_start/quick_start_java.md b/tutorials/lite/source_zh_cn/quick_start/quick_start_java.md deleted file mode 100644 index 68851640c468b316a41f160399011341f74643d5..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/quick_start/quick_start_java.md +++ /dev/null @@ -1,188 +0,0 @@ -# 体验Java极简推理Demo - -`Linux` `X86` `Java` `全流程` `推理应用` `数据准备` `初级` - - - -- [体验Java极简推理Demo](#体验java极简推理demo) - - [概述](#概述) - - [构建与运行](#构建与运行) - - [模型加载](#模型加载) - - [模型编译](#模型编译) - - [模型推理](#模型推理) - - [内存释放](#内存释放) - - - - - -## 概述 - -本教程提供了MindSpore Lite执行推理的示例程序,通过随机输入、执行推理、打印推理结果的方式,演示了利用[MindSpore Lite Java API](https://www.mindspore.cn/doc/api_java/zh-CN/master/index.html)进行端侧推理的基本流程,用户能够快速了解MindSpore Lite执行推理相关Java API的使用。本教程通过随机生成的数据作为输入数据,执行MobileNetV2模型的推理,打印获得输出数据。相关代码放置在[mindspore/lite/examples/quick_start_java](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/examples/quick_start_java)目录。 - -使用MindSpore Lite执行推理主要包括以下步骤: - -1. 模型加载:从文件系统中读取由[模型转换工具](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html)转换得到的`.ms`模型,通过Model的[loadModel](https://www.mindspore.cn/doc/api_java/zh-CN/master/model.html#loadmodel)导入模型。 -2. 创建配置上下文:创建配置上下文[MSConfig](https://www.mindspore.cn/doc/api_java/zh-CN/master/msconfig.html#msconfig),保存会话所需的一些基本配置参数,用于指导图编译和图执行。主要包括`deviceType`:设备类型、`threadNum`:线程数、`cpuBindMode`:CPU绑定模式、`enable_float16`:是否优先使用float16算子。 -3. 创建会话:创建[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession),并调用[init](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#init)方法将上一步得到的[MSConfig](https://www.mindspore.cn/doc/api_java/zh-CN/master/msconfig.html#msconfig)配置到会话中。 -4. 图编译:在图执行前,需要调用[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)的[compileGraph](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#compilegraph)接口进行图编译,主要进行子图切分、算子选型调度。这部分会耗费较多时间,所以建议[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)创建一次,编译一次,多次执行。 -5. 输入数据:图执行之前需要向输入Tensor中填充数据。 -6. 执行推理:使用[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)的[runGraph](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#rungraph)进行模型推理。 -7. 获得输出:图执行结束之后,可以通过输出Tensor得到推理结果。 -8. 释放内存:无需使用MindSpore Lite推理框架的时候,需要释放已创建的[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)和[model](https://www.mindspore.cn/doc/api_java/zh-CN/master/model.html#model)。 - -![img](../images/lite_runtime.png) - -> 如需查看MindSpore Lite高级用法,请参考[使用Runtime执行推理(Java)](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/runtime_java.html)。 - -## 构建与运行 - -- 环境要求 - - 系统环境:Linux x86_64,推荐使用Ubuntu 18.04.02LTS - - 编译依赖: - - [Git](https://git-scm.com/downloads) >= 2.28.0 - - [Maven](https://maven.apache.org/download.cgi) >= 3.3 - - [OpenJDK](https://openjdk.java.net/install/) >= 1.8 - -- 编译构建 - - 在`mindspore/lite/examples/quick_start_java`目录下执行[build脚本](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/quick_start_java/build.sh),将自动下载MindSpore Lite推理框架库以及文模型文件并编译Demo。 - - ```bash - bash build.sh - ``` - - > 若MindSpore Lite推理框架下载失败,请手动下载硬件平台为CPU、操作系统为Ubuntu-x64的MindSpore Lite 框架[mindspore-lite-{version}-linux-x64.tar.gz](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html),解压后将`inference/lib/jar`目录下的`libmindspore-lite.so`、`libmindspore-lite-jni.so`以及`libmindspore-lite-java.jar`拷贝到`mindspore/lite/examples/quick_start_java/lib`目录。 - > - > 若MobileNetV2模型下载失败,请手动下载相关模型文件[mobilenetv2.ms](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_imagenet/mobilenetv2.ms),并将其拷贝到`mindspore/lite/examples/quick_start_java/model/`目录。 - > - > 通过手动下载并且将文件放到指定位置后,需要再次执行build.sh脚本才能完成编译构建。 - -- 执行推理 - - 编译构建后,进入`mindspore/lite/examples/quick_start_java/target`目录,并执行以下命令,体验MindSpore Lite推理MobileNetV2模型。 - - ```bash - export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:../lib/ - java -Djava.library.path=../lib/ -classpath .:./quick_start_java.jar:../lib/mindspore-lite-java.jar com.mindspore.lite.demo.Main ../model/mobilenetv2.ms - ``` - - 执行完成后将能得到如下结果,打印输出Tensor的名称、输出Tensor的大小,输出Tensor的数量以及前50个数据: - - ```shell - out tensor shape: [1,1000,] and out data: 5.4091015E-5 4.030303E-4 3.032344E-4 4.0029243E-4 2.2730739E-4 8.366581E-5 2.629827E-4 3.512394E-4 2.879536E-4 1.9557697E-4xxxxxxxxxx MindSpore Lite 1.1.0out tensor shape: [1,1000,] and out data: 5.4091015E-5 4.030303E-4 3.032344E-4 4.0029243E-4 2.2730739E-4 8.366581E-5 2.629827E-4 3.512394E-4 2.879536E-4 1.9557697E-4tensor name is:Default/Sigmoid-op204 tensor size is:2000 tensor elements num is:500output data is:3.31223e-05 1.99382e-05 3.01624e-05 0.000108345 1.19685e-05 4.25282e-06 0.00049955 0.000340809 0.00199094 0.000997094 0.00013585 1.57605e-05 4.34131e-05 1.56114e-05 0.000550819 2.9839e-05 4.70447e-06 6.91601e-06 0.000134483 2.06795e-06 4.11612e-05 2.4667e-05 7.26248e-06 2.37974e-05 0.000134513 0.00142482 0.00011707 0.000161848 0.000395011 3.01961e-05 3.95325e-05 3.12398e-06 3.57709e-05 1.36277e-06 1.01068e-05 0.000350805 5.09019e-05 0.000805241 6.60321e-05 2.13734e-05 9.88654e-05 2.1991e-06 3.24065e-05 3.9479e-05 4.45178e-05 0.00205024 0.000780899 2.0633e-05 1.89997e-05 0.00197261 0.000259391 - ``` - -## 模型加载 - -首先从文件系统中读取MindSpore Lite模型,并通过`model.loadModel`函数导入模型进行解析。 - -```java -boolean ret = model.loadModel(modelPath); -if (!ret) { - System.err.println("Load model failed, model path is " + modelPath); - return; -} -``` - -## 模型编译 - -模型编译主要包括创建配置上下文、创建会话、图编译等步骤。 - -```java -private static boolean compile() { - MSConfig msConfig = new MSConfig(); - // You can set config through Init Api or use the default parameters directly. - // The default parameter is that the backend type is DeviceType.DT_CPU, and the number of threads is 2. - boolean ret = msConfig.init(DeviceType.DT_CPU, 2); - if (!ret) { - System.err.println("Init context failed"); - return false; - } - - // Create the MindSpore lite session. - session = new LiteSession(); - ret = session.init(msConfig); - msConfig.free(); - if (!ret) { - System.err.println("Create session failed"); - model.free(); - return false; - } - - // Compile graph. - ret = session.compileGraph(model); - if (!ret) { - System.err.println("Compile graph failed"); - model.free(); - return false; - } - return true; -} -``` - -## 模型推理 - -模型推理主要包括输入数据、执行推理、获得输出等步骤,其中本示例中的输入数据是通过随机数据构造生成,最后将执行推理后的输出结果打印出来。 - -```java -private static boolean run() { - MSTensor inputTensor = session.getInputsByTensorName("2031_2030_1_construct_wrapper:x"); - if (inputTensor.getDataType() != DataType.kNumberTypeFloat32) { - System.err.println("Input tensor shape do not float, the data type is " + inputTensor.getDataType()); - return false; - } - // Generator Random Data. - int elementNums = inputTensor.elementsNum(); - float[] randomData = generateArray(elementNums); - byte[] inputData = floatArrayToByteArray(randomData); - - // Set Input Data. - inputTensor.setData(inputData); - - // Run Inference. - boolean ret = session.runGraph(); - if (!ret) { - System.err.println("MindSpore Lite run failed."); - return false; - } - - // Get Output Tensor Data. - MSTensor outTensor = session.getOutputByTensorName("Default/head-MobileNetV2Head/Softmax-op204"); - - // Print out Tensor Data. - StringBuilder msgSb = new StringBuilder(); - msgSb.append("out tensor shape: ["); - int[] shape = outTensor.getShape(); - for (int dim : shape) { - msgSb.append(dim).append(","); - } - msgSb.append("]"); - if (outTensor.getDataType() != DataType.kNumberTypeFloat32) { - System.err.println("output tensor shape do not float, the data type is " + outTensor.getDataType()); - return false; - } - float[] result = outTensor.getFloatData(); - if (result == null) { - System.err.println("decodeBytes return null"); - return false; - } - msgSb.append(" and out data:"); - for (int i = 0; i < 10 && i < outTensor.elementsNum(); i++) { - msgSb.append(" ").append(result[i]); - } - System.out.println(msgSb.toString()); - return true; -} -``` - -## 内存释放 - -无需使用MindSpore Lite推理框架时,需要释放已经创建的`LiteSession`和`Model`。 - -```java -// Delete session buffer. -session.free(); -// Delete model buffer. -model.free(); -``` diff --git a/tutorials/lite/source_zh_cn/quick_start/train_lenet.md b/tutorials/lite/source_zh_cn/quick_start/train_lenet.md deleted file mode 100644 index 8b924bf8d2f9bbe9676a122306a51cace7b36dc6..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/quick_start/train_lenet.md +++ /dev/null @@ -1,397 +0,0 @@ -# 基于C++接口实现端侧训练 - -`Linux` `C++` `Android` `全流程` `模型导出` `模型转换` `模型训练` `初级` `中级` `高级` - - - -- [基于C++接口实现端侧训练](#基于c接口实现端侧训练) - - [概述](#概述) - - [准备](#准备) - - [下载数据集](#下载数据集) - - [安装MindSpore](#安装MindSpore) - - [下载并安装MindSpore Lite](#下载并安装MindSpore-Lite) - - [连接安卓设备](#连接安卓设备) - - [模型训练和验证](#模型训练和验证) - - [示例程序详解](#示例程序详解) - - [示例程序结构](#示例程序结构) - - [定义并导出模型](#定义并导出模型) - - [转换模型](#转换模型) - - [训练模型](#训练模型) - - - - - -## 概述 - -本教程基于[LeNet训练示例代码](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/examples/train_lenet),演示在Android设备上训练一个LeNet。 - -端侧训练流程如下: - -1. 基于MindSpore构建训练模型,并导出`MindIR`模型文件。 -2. 使用MindSpore Lite `Converter`工具,将`MindIR`模型转为端侧`MS`模型。 -3. 调用MindSpore Lite训练API,加载端侧`MS`模型,执行训练。 - -下面章节首先通过示例代码中集成好的脚本,帮你快速部署并执行示例,再详细讲解实现细节。 - -## 准备 - -推荐使用Ubuntu 18.04 64位操作系统。 - -### 下载数据集 - -示例中的`MNIST`数据集由10类28*28的灰度图片组成,训练数据集包含60000张图片,测试数据集包含10000张图片。 - -> MNIST数据集官网下载地址:,共4个下载链接,分别是训练数据、训练标签、测试数据和测试标签。 - -下载并解压到本地,解压后的训练和测试集分别存放于`/PATH/MNIST_Data/train`和`/PATH/MNIST_Data/test`路径下。 - -目录结构如下: - -```text -MNIST_Data/ -├── test -│   ├── t10k-images-idx3-ubyte -│   └── t10k-labels-idx1-ubyte -└── train - ├── train-images-idx3-ubyte - └── train-labels-idx1-ubyte -``` - -### 安装MindSpore - -你可以通过`pip`或是源码的方式安装MindSpore,详见[MindSpore官网安装教程](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_pip.md#)。 - -### 下载并安装MindSpore Lite - -通过`git`克隆源码,进入源码目录,`Linux`指令如下: - -```bash -git clone https://gitee.com/mindspore/mindspore.git -cd ./mindspore -``` - -源码路径下的`mindspore/lite/examples/train_lenet`目录包含了本示例程序的源码。 - -请到[MindSpore Lite下载页面](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html)下载mindspore-lite-{version}-linux-x64.tar.gz以及mindspore-lite-{version}-android-aarch64.tar.gz。其中,mindspore-lite-{version}-linux-x64.tar.gz是MindSpore Lite在x86平台的安装包,里面包含模型转换工具converter_lite,本示例用它来将MINDIR模型转换成MindSpore Lite支持的`.ms`格式;mindspore-lite-{version}-android-aarch64.tar.gz是MindSpore Lite在Android平台的安装包,里面包含训练运行时库libmindspore-lite.so,本示例用它所提供的接口在Android上训练模型。下载完成后,需要将mindspore-lite-{version}-linux-x64.tar.gz重命名为mindspore-lite-{version}-train-linux-x64.tar.gz,将mindspore-lite-{version}-android-aarch64.tar.gz重命名为mindspore-lite-{version}-train-android-aarch64.tar.gz,最后将重命名后的文件放到MindSpore源码下的`output`目录(如果没有`output`目录,请创建它)。 - -假设下载的安装包存放在`/Downloads`目录,上述操作对应的`Linux`指令如下: - -```bash -mkdir output -cp /Downloads/mindspore-lite-{version}-linux-x64.tar.gz output/mindspore-lite-{version}-train-linux-x64.tar.gz -cp /Downloads/mindspore-lite-{version}-android-aarch64.tar.gz output/mindspore-lite-{version}-train-android-aarch64.tar.gz -``` - -您也可以通过[源码编译](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html)直接生成端侧训练框架对应的x86平台安装包mindspore-lite-{version}-train-linux-x64.tar.gz以及Android平台安装包mindspore-lite-{version}-train-android-aarch64.tar.gz,源码编译的安装包会自动生成在`output`目录下,请确保`output`目录下同时存在这两个安装包。 - -### 连接安卓设备 - -准备好一台Android设备,并通过USB与工作电脑正确连接。手机需开启“USB调试模式”,华为手机一般在`设置->系统和更新->开发人员选项->USB调试`中打开“USB调试模式”。 - -本示例使用[`adb`](https://developer.android.google.cn/studio/command-line/adb)工具与Android设备进行通信,在工作电脑上远程操控移动设备;如果没有安装`adb`工具,可以执行`apt install adb`安装。 - -## 模型训练和验证 - -进入示例代码目录并执行训练脚本,`Linux`指令如下: - -```bash -cd mindspore/lite/examples/train_lenet -bash prepare_and_run.sh -D /PATH/MNIST_Data -t arm64 -``` - -其中`/PATH/MNIST_Data`是你工作电脑上存放MNIST数据集的绝对路径,`-t arm64`为执行训练和推理的设备类型。 - -`prepare_and_run.sh`脚本做了以下工作: - -1. 导出`lenet_tod.mindir`模型文件; -2. 调用上节的模型转换工具将`lenet_tod.mindir`转换为`lenet_tod.ms`文件; -3. 将`lenet_tod.ms`、MNIST数据集和相关依赖库文件推送至你的`Android`设备; -4. 执行训练、保存并推理模型。 - -Android设备上训练LeNet模型每轮会输出损失值和准确率;最后选择训练完成的模型执行推理,验证`MNIST`手写字识别精度。端侧训练LeNet模型10个epoch的结果如下所示(测试准确率会受设备差异的影响): - -```bash -======Training Locally========= -1.100: Loss is 1.19449 -1.200: Loss is 0.477986 -1.300: Loss is 0.440362 -1.400: Loss is 0.165605 -1.500: Loss is 0.368853 -1.600: Loss is 0.179764 -1.700: Loss is 0.173386 -1.800: Loss is 0.0767713 -1.900: Loss is 0.493 -1.1000: Loss is 0.460352 -1.1100: Loss is 0.262044 -1.1200: Loss is 0.222022 -1.1300: Loss is 0.058006 -1.1400: Loss is 0.0794117 -1.1500: Loss is 0.0241433 -1.1600: Loss is 0.127109 -1.1700: Loss is 0.0557566 -1.1800: Loss is 0.0698758 -Epoch (1): Loss is 0.384778 -Epoch (1): Training Accuracy is 0.8702 -2.100: Loss is 0.0538642 -2.200: Loss is 0.444504 -2.300: Loss is 0.0806976 -2.400: Loss is 0.0495807 -2.500: Loss is 0.178903 -2.600: Loss is 0.265705 -2.700: Loss is 0.0933796 -2.800: Loss is 0.0880472 -2.900: Loss is 0.0480734 -2.1000: Loss is 0.241272 -2.1100: Loss is 0.0920451 -2.1200: Loss is 0.371406 -2.1300: Loss is 0.0365746 -2.1400: Loss is 0.0784372 -2.1500: Loss is 0.207537 -2.1600: Loss is 0.442626 -2.1700: Loss is 0.0814725 -2.1800: Loss is 0.12081 -Epoch (2): Loss is 0.176118 -Epoch (2): Training Accuracy is 0.94415 -...... -10.1000: Loss is 0.0984653 -10.1100: Loss is 0.189702 -10.1200: Loss is 0.0896037 -10.1300: Loss is 0.0138191 -10.1400: Loss is 0.0152357 -10.1500: Loss is 0.12785 -10.1600: Loss is 0.026495 -10.1700: Loss is 0.436495 -10.1800: Loss is 0.157564 -Epoch (10): Loss is 0.102652 -Epoch (10): Training Accuracy is 0.96805 -Eval Accuracy is 0.965244 -===Evaluating trained Model===== -Eval Accuracy is 0.965244 -``` - -> 如果你没有Android设备,也可以执行`bash prepare_and_run.sh -D /PATH/MNIST_Data -t x86`直接在PC上运行本示例。 - -## 示例程序详解 - -### 示例程序结构 - -```text - train_lenet/ - ├── model - │ ├── lenet_export.py - │ ├── prepare_model.sh - │ └── train_utils.py - | - ├── scripts - │ ├── eval.sh - │ └── train.sh - │ - ├── src - │ ├── net_runner.cc - │ ├── net_runner.h - │ └── utils.h - │ - ├── README.md - ├── README_CN.md - └── prepare_and_run.sh -``` - -### 定义并导出模型 - -首先我们需要基于MindSpore框架创建一个LeNet模型,本例中直接用MindSpore model_zoo的现有[LeNet模型](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/lenet)。 - -> 本小结使用MindSpore云侧功能导出,更多信息请参考[MindSpore教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/index.html)。 - -```python -import numpy as np -from mindspore import context, Tensor -import mindspore.common.dtype as mstype -from mindspore.train.serialization import export -from lenet import LeNet5 -from train_utils import TrainWrap - -n = LeNet5() -n.set_train() -context.set_context(mode=context.PYNATIVE_MODE, device_target="CPU", save_graphs=False) -``` - -然后定义输入和标签张量大小: - -```python -BATCH_SIZE = 32 -x = Tensor(np.ones((BATCH_SIZE, 1, 32, 32)), mstype.float32) -label = Tensor(np.zeros([BATCH_SIZE]).astype(np.int32)) -net = TrainWrap(n) -``` - -定义损失函数、网络可训练参数、优化器,并启用单步训练,由`TrainWrap`函数实现。 - -```python -import mindspore.nn as nn -from mindspore.common.parameter import ParameterTuple - -def TrainWrap(net, loss_fn=None, optimizer=None, weights=None): - """ - TrainWrap - """ - if loss_fn is None: - loss_fn = nn.SoftmaxCrossEntropyWithLogits(reduction='mean', sparse=True) - loss_net = nn.WithLossCell(net, loss_fn) - loss_net.set_train() - if weights is None: - weights = ParameterTuple(net.trainable_params()) - if optimizer is None: - optimizer = nn.Adam(weights, learning_rate=0.003, beta1=0.9, beta2=0.999, eps=1e-5, use_locking=False, use_nesterov=False, weight_decay=4e-5, loss_scale=1.0) - train_net = nn.TrainOneStepCell(loss_net, optimizer) - return train_net -``` - -最后调用`export`接口将模型导出为`MindIR`文件保存(目前端侧训练仅支持`MindIR`格式)。 - -```python -export(net, x, label, file_name="lenet_tod", file_format='MINDIR') -print("finished exporting") -``` - -如果输出`finished exporting`表示导出成功,生成的`lenet_tod.mindir`文件在`../train_lenet/model`目录下。完整代码参见`lenet_export.py`和`train_utils.py`。 - -### 转换模型 - -在`prepare_model.sh`中使用MindSpore Lite `converter_lite`工具将`lenet_tod.mindir`转换为`ms`模型文件,执行指令如下: - -```bash -./converter_lite --fmk=MINDIR --trainModel=true --modelFile=lenet_tod.mindir --outputFile=lenet_tod -``` - -转换成功后,当前目录下会生成`lenet_tod.ms`模型文件。 - -> 更多用法参见[训练模型转换](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_train.html)。 - -### 训练模型 - -模型训练的处理详细流程请参考[net_runner.cc源码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/train_lenet/src/net_runner.cc)。 - -模型训练的主函数为: - -```cpp -int NetRunner::Main() { - // Load model and create session - InitAndFigureInputs(); - // initialize the dataset - InitDB(); - // Execute the training - TrainLoop(); - // Evaluate the trained model - CalculateAccuracy(); - - if (epochs_ > 0) { - auto trained_fn = ms_file_.substr(0, ms_file_.find_last_of('.')) + "_trained.ms"; - // Save the trained model to file - session_->SaveToFile(trained_fn); - } - return 0; -} -``` - -1. 加载模型 - - `InitAndFigureInputs`函数加载转换后的`MS`模型文件,调用`CreateSession`接口创建`TrainSession`实例(下述代码中的`ms_file_`就是转换模型阶段生成的`lenet_tod.ms`模型)。 - - ```cpp - void NetRunner::InitAndFigureInputs() { - mindspore::lite::Context context; - context.device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND; - context.device_list_[0].device_info_.cpu_device_info_.enable_float16_ = false; - context.device_list_[0].device_type_ = mindspore::lite::DT_CPU; - context.thread_num_ = 2; - - session_ = mindspore::session::TrainSession::CreateSession(ms_file_, &context); - MS_ASSERT(nullptr != session_); - loop_ = mindspore::session::TrainLoop::CreateTrainLoop(session_); - - acc_metrics_ = std::shared_ptr(new AccuracyMetrics); - - loop_->Init({acc_metrics_.get()}); - - auto inputs = session_->GetInputs(); - MS_ASSERT(inputs.size() > 1); - auto nhwc_input_dims = inputs.at(0)->shape(); - MS_ASSERT(nhwc_input_dims.size() == 4); - batch_size_ = nhwc_input_dims.at(0); - h_ = nhwc_input_dims.at(1); - w_ = nhwc_input_dims.at(2); - } - ``` - -2. 数据集处理 - - `InitDB`函数预处理`MNIST`数据集并加载至内存。MindData提供了数据预处理API,用户可参见[C++ API 说明文档](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html) 获取更多详细信息。 - - ```cpp - int NetRunner::InitDB() { - train_ds_ = Mnist(data_dir_ + "/train", "all"); - - TypeCast typecast_f("float32"); - Resize resize({h_, w_}); - train_ds_ = train_ds_->Map({&resize, &typecast_f}, {"image"}); - - TypeCast typecast("int32"); - train_ds_ = train_ds_->Map({&typecast}, {"label"}); - - train_ds_ = train_ds_->Shuffle(2); - train_ds_ = train_ds_->Batch(batch_size_, true); - - if (verbose_) { - std::cout << "DatasetSize is " << train_ds_->GetDatasetSize() << std::endl; - } - if (train_ds_->GetDatasetSize() == 0) { - std::cout << "No relevant data was found in " << data_dir_ << std::endl; - MS_ASSERT(train_ds_->GetDatasetSize() != 0); - } - return 0; - } - ``` - -3. 执行训练 - - 首先创建训练回调类对象(例如`LRScheduler`、`LossMonitor`、`ClassificationTrainAccuracyMonitor`和`CkptSaver`)数组指针;然后调用`TrainLoop`类的`Train`函数,将模型设置为训练模式;最后在训练过程中遍历执行回调类对象对应的函数并输出训练日志。`CkptSaver`会根据设定训练步长数值为当前会话保存`CheckPoint`模型,`CheckPoint`模型包含已更新的权重,在应用崩溃或设备出现故障时可以直接加载`CheckPoint`模型,继续开始训练。 - - ```cpp - int NetRunner::TrainLoop() { - struct mindspore::lite::StepLRLambda step_lr_lambda(1, 0.7); - mindspore::lite::LRScheduler step_lr_sched(mindspore::lite::StepLRLambda, static_cast(&step_lr_lambda), 1); - - mindspore::lite::LossMonitor lm(100); - mindspore::lite::ClassificationTrainAccuracyMonitor am(1); - mindspore::lite::CkptSaver cs(1000, std::string("lenet")); - Rescaler rescale(255.0); - - loop_->Train(epochs_, train_ds_.get(), std::vector{&rescale, &lm, &cs, &am, &step_lr_sched}); - return 0; - } - ``` - -4. 验证精度 - - 训练结束后调用`CalculateAccuracy`评估模型精度。该函数调用`TrainSession`的`Eval`方法,将模型设置为推理模式。 - - ```cpp - float NetRunner::CalculateAccuracy(int max_tests) { - test_ds_ = Mnist(data_dir_ + "/test", "all"); - TypeCast typecast_f("float32"); - Resize resize({h_, w_}); - test_ds_ = test_ds_->Map({&resize, &typecast_f}, {"image"}); - - TypeCast typecast("int32"); - test_ds_ = test_ds_->Map({&typecast}, {"label"}); - test_ds_ = test_ds_->Batch(batch_size_, true); - - Rescaler rescale(255.0); - - loop_->Eval(test_ds_.get(), std::vector{&rescale}); - std::cout << "Eval Accuracy is " << acc_metrics_->Eval() << std::endl; - - return 0.0; - } - ``` diff --git a/tutorials/lite/source_zh_cn/quick_start/train_lenet_java.md b/tutorials/lite/source_zh_cn/quick_start/train_lenet_java.md deleted file mode 100644 index 4baa4169b6b35a29bc706df064f6ee9587f3de90..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/quick_start/train_lenet_java.md +++ /dev/null @@ -1,235 +0,0 @@ -# 基于Java接口实现端侧训练 - -`Android` `Java` `全流程` `模型加载` `模型训练` `数据准备` `初级` `中级` `高级` - - - -- [基于Java接口实现端侧训练](#基于java接口实现端侧训练) - - [概述](#概述) - - [准备](#准备) - - [环境要求](#环境要求) - - [下载MindSpore并编译端侧训练Java包](#下载MindSpore并编译端侧训练Java包) - - [下载数据集](#下载数据集) - - [部署应用](#部署应用) - - [运行依赖](#运行依赖) - - [构建与运行](#构建与运行) - - [示例程序详细说明](#示例程序详细说明) - - [示例程序结构](#示例程序结构) - - [编写端侧训练代码](#编写端侧训练代码) - - - - - -## 概述 - -本教程通过构建并部署Java版本的LeNet网络的训练,演示MindSpore Lite端侧训练Java接口的使用。 首先指导您在本地成功训练LeNet模型,然后讲解示例代码。 - -## 准备 - -### 环境要求 - -- 系统环境:Linux x86_64,推荐使用Ubuntu 18.04.02LTS - -- 软件依赖 - - - [Git](https://git-scm.com/downloads) >= 2.28.0 - - - [Maven](https://maven.apache.org/download.cgi) >= 3.3 - - - [OpenJDK](https://openjdk.java.net/install/) >= 1.8 - -### 下载MindSpore并编译端侧训练Java包 - -首先克隆源码,然后编译MindSpore Lite端侧训练Java包,`Linux`指令如下: - -```bash -git clone https://gitee.com/mindspore/mindspore.git -cd mindspore -bash build.sh -A java -ecpu -Ton -j8 -``` - -更详细的编译说明,请参考[编译MindSpore Lite](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html)章节。 -本教程使用的示例源码在`mindspore/lite/examples/train_lenet_java`目录。 - -### 下载数据集 - -示例中的`MNIST`数据集由10类28*28的灰度图片组成,训练数据集包含60000张图片,测试数据集包含10000张图片。 - -> MNIST数据集官网下载地址:,共4个下载链接,分别是训练数据、训练标签、测试数据和测试标签。 - -下载并解压到本地,解压后的训练和测试集分别存放于`/PATH/MNIST_Data/train`和`/PATH/MNIST_Data/test`路径下。 - -目录结构如下: - -```text -MNIST_Data/ -├── test -│   ├── t10k-images-idx3-ubyte -│   └── t10k-labels-idx1-ubyte -└── train - ├── train-images-idx3-ubyte - └── train-labels-idx1-ubyte -``` - -## 部署应用 - -### 运行依赖 - -在准备阶段,我们已经成功编译出MindSpore Lite端侧训练Java包。假设您的MindSpore源码路径为`/codes/mindspore`,对应编译出的Java包在`/codes/mindspore/output`目录。解压Java包并拷贝相关文件到示例程序目录。命令如下: - -```shell -cd /codes/mindspore/output -tar xzf mindspore-lite-${version}-train-linux-x64-jar.tar.gz -mkdir ../mindspore/lite/examples/train_lenet_java/lib -cp mindspore-lite-${version}-train-linux-x64-jar/jar/* ../mindspore/lite/examples/train_lenet_java/lib/ -``` - -### 构建与运行 - -1. 首先进入示例工程所在目录,使用maven构建本示例。命令如下: - - ```shell - cd /codes/mindspore/mindspore/lite/examples/train_lenet_java - mvn package - ``` - -2. 运行示例程序,命令如下: - - ```shell - cd /codes/mindspore/mindspore/lite/examples/train_lenet_java/target - java -Djava.library.path=../lib/ -classpath .:./train_lenet_java.jar:../lib/mindspore-lite-java.jar com.mindspore.lite.train_lenet.Main ../resources/model/lenet_tod.ms /PATH/MNIST_Data/ - ``` - - > ../resources/model/lenet_tod.ms是示例工程中预置的LeNet训练模型,您也可以参考[训练模型转换](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_train.html),自行转换出LeNet模型。 - > - > /PATH/MNIST_Data/是MNIST数据集所在路径。 - - 示例运行结果如下: - - ```text - MindSpore Lite 1.2.0 - ==========Loading Model, Create Train Session============= - batch_size: 32 - ==========Initing DataSet================ - train data cnt: 60000 - test data cnt: 10000 - ==========Training Model=================== - step_500: Loss is 0.05553353 [min=0.010149269] max_accc=0.9543269 - step_1000: Loss is 0.15295759 [min=0.0018140086] max_accc=0.96594554 - step_1500: Loss is 0.018035552 [min=0.0018140086] max_accc=0.9704527 - step_2000: Loss is 0.029250022 [min=0.0010245014] max_accc=0.9765625 - step_2500: Loss is 0.11875624 [min=7.5288175E-4] max_accc=0.9765625 - step_3000: Loss is 0.046675075 [min=7.5288175E-4] max_accc=0.9765625 - step_3500: Loss is 0.034442786 [min=4.3545474E-4] max_accc=0.97686297 - ==========Evaluating The Trained Model============ - accuracy = 0.9770633 - Trained model successfully saved: ../resources/model/lenet_tod_trained.ms - ``` - -## 示例程序详细说明 - -### 示例程序结构 - -```text -train_lenet_java -├── lib -├── pom.xml -├── resources -│   └── model -│   └── lenet_tod.ms # LeNet训练模型 -├── src -│   └── main -│   └── java -│   └── com -│   └── mindspore -│   └── lite -│   ├── train_lenet -│   │   ├── DataSet.java # MNIST数据集处理 -│   │   ├── Main.java # Main函数 -│   │   └── NetRunner.java # 整体训练流程 - -``` - -### 编写端侧推理代码 - -详细的Java接口使用请参考。 - -1. 加载MindSpore Lite模型文件,构建会话。 - - ```java - MSConfig msConfig = new MSConfig(); - // arg 0: DeviceType:DT_CPU -> 0 - // arg 1: ThreadNum -> 2 - // arg 2: cpuBindMode:NO_BIND -> 0 - // arg 3: enable_fp16 -> false - msConfig.init(0, 2, 0, false); - session = new TrainSession(); - session.init(modelPath, msConfig); - ``` - -2. 切换为训练模式,循环迭代,训练模型。 - - ```java - session.train(); - float min_loss = 1000; - float max_acc = 0; - for (int i = 0; i < cycles; i++) { - fillInputData(ds.getTrainData(), false); - session.runGraph(); - float loss = getLoss(); - if (min_loss > loss) { - min_loss = loss; - } - if ((i + 1) % 500 == 0) { - float acc = calculateAccuracy(10); // only test 10 batch size - if (max_acc < acc) { - max_acc = acc; - } - System.out.println("step_" + (i + 1) + ": \tLoss is " + loss + " [min=" + min_loss + "]" + " max_accc=" + max_acc); - } - } - ``` - -3. 切换为推理模式,执行推理,评估模型精度。 - - ```java - session.eval(); - for (long i = 0; i < tests; i++) { - Vector labels = fillInputData(test_set, (maxTests == -1)); - if (labels.size() != batchSize) { - System.err.println("unexpected labels size: " + labels.size() + " batch_size size: " + batchSize); - System.exit(1); - } - session.runGraph(); - MSTensor outputsv = searchOutputsForSize((int) (batchSize * numOfClasses)); - if (outputsv == null) { - System.err.println("can not find output tensor with size: " + batchSize * numOfClasses); - System.exit(1); - } - float[] scores = outputsv.getFloatData(); - for (int b = 0; b < batchSize; b++) { - int max_idx = 0; - float max_score = scores[(int) (numOfClasses * b)]; - for (int c = 0; c < numOfClasses; c++) { - if (scores[(int) (numOfClasses * b + c)] > max_score) { - max_score = scores[(int) (numOfClasses * b + c)]; - max_idx = c; - } - } - if (labels.get(b) == max_idx) { - accuracy += 1.0; - } - } - } - ``` - - 推理完成后,如果需要继续训练,需要切换为训练模式。 - -4. 保存训练模型。 - - ```java - session.saveToFile(trainedFilePath) - ``` - - 模型训练完成后,保存到指定路径,后续可以继续加载运行。 diff --git a/tutorials/lite/source_zh_cn/scene_detection_lite.md b/tutorials/lite/source_zh_cn/scene_detection_lite.md deleted file mode 100644 index b2b6302fc8c818c97a8dd0e63d831ee5a74f0d9e..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/scene_detection_lite.md +++ /dev/null @@ -1,19 +0,0 @@ -# 场景检测模型 - - - -## 场景检测介绍 - -场景检测可以识别设备摄像头中场景的类型。 - -使用MindSpore Lite实现场景检测的[示例代码](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/scene_detection)。 - -## 场景检测模型列表 - -下表是使用MindSpore Lite推理的部分场景检测模型的数据。 - -> 下表的性能是在P30手机上测试的。 - -| 模型名称 | 大小(Mb) | Top1 | CPU 4线程时延(ms) | -|-----------------------| :----------: | :----------: | :-----------: | -| [MobileNetv2](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms) | 11.3 | - | 11.5 | diff --git a/tutorials/lite/source_zh_cn/style_transfer_lite.md b/tutorials/lite/source_zh_cn/style_transfer_lite.md deleted file mode 100644 index bce25b0fc8ffb6f2c767d4bf30adfa0636993715..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/style_transfer_lite.md +++ /dev/null @@ -1,17 +0,0 @@ -# 风格迁移模型 - - - -## 风格迁移介绍 - -风格迁移模型可以根据demo内置的标准图片改变用户目标图片的艺术风格,并在App图像预览界面中显示出来。用户可保存风格迁移结果,或者恢复图片的原始形态。 - -使用demo打开目标图片: - -![image_before_transfer](images/before_transfer.png) - -选择左起第一张标准图片进行风格迁移,效果如图: - -![image_after_transfer](images/after_transfer.png) - -使用MindSpore Lite实现风格迁移的[示例代码](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/style_transfer)。 diff --git a/tutorials/lite/source_zh_cn/use/asic.rst b/tutorials/lite/source_zh_cn/use/asic.rst deleted file mode 100644 index 416f00b65682651b8443d72fc2b588fa77cfdc3c..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/asic.rst +++ /dev/null @@ -1,7 +0,0 @@ -专用芯片集成说明 -================= - -.. toctree:: - :maxdepth: 1 - - npu_info \ No newline at end of file diff --git a/tutorials/lite/source_zh_cn/use/benchmark.rst b/tutorials/lite/source_zh_cn/use/benchmark.rst deleted file mode 100644 index a56ae2dbbe5054290fea0cae999658a849da41f6..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/benchmark.rst +++ /dev/null @@ -1,8 +0,0 @@ -基准测试工具 -======================== - -.. toctree:: - :maxdepth: 1 - - benchmark_tool - benchmark_train_tool \ No newline at end of file diff --git a/tutorials/lite/source_zh_cn/use/benchmark_tool.md b/tutorials/lite/source_zh_cn/use/benchmark_tool.md deleted file mode 100644 index e2a9585623f2f9dbbb2f6d67107c988bc70aeec1..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/benchmark_tool.md +++ /dev/null @@ -1,284 +0,0 @@ -# benchmark - -`Windows` `Linux` `环境准备` `基准测试` `中级` `高级` - - - -- [使用Benchmark进行基准测试](#使用benchmark进行基准测试) - - [概述](#概述) - - [Linux环境使用说明](#Linux环境使用说明) - - [环境准备](#环境准备) - - [参数说明](#参数说明) - - [使用示例](#使用示例) - - [性能测试](#性能测试) - - [精度测试](#精度测试) - - [CPU性能测试](#CPU性能测试) - - [Windows环境使用说明](#Windows环境使用说明) - - [环境准备](#环境准备-1) - - [参数说明](#参数说明-1) - - [使用示例](#使用示例-1) - - [性能测试](#性能测试-1) - - [精度测试](#精度测试-1) - - - - - -## 概述 - -转换模型后执行推理前,你可以使用Benchmark工具对MindSpore Lite模型进行基准测试。它不仅可以对MindSpore Lite模型前向推理执行耗时进行定量分析(性能),还可以通过指定模型输出进行可对比的误差分析(精度)。 - -## Linux环境使用说明 - -### 环境准备 - -使用Benchmark工具,需要进行如下环境准备工作。 - -- 编译:Benchmark工具代码在MindSpore源码的`mindspore/lite/tools/benchmark`目录中,参考构建文档中的[环境要求](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#id1)和[编译示例](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#id3)执行编译。 - -- 运行:参考构建文档中的[编译输出](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#id4),获得`benchmark`工具。 - -- 将推理需要的动态链接库加入环境变量LD_LIBRARY_PATH。 - - ```bash - export LD_LIBRARY_PATH=${PACKAGE_ROOT_PATH}/inference/lib:${LD_LIBRARY_PATH} - ``` - - 其中${PACKAGE_ROOT_PATH}是编译得到的包解压后的根目录。 - -### 参数说明 - -使用编译好的Benchmark工具进行模型的基准测试时,其命令格式如下所示。 - -```bash -./benchmark [--modelFile=] [--accuracyThreshold=] - [--benchmarkDataFile=] [--benchmarkDataType=] - [--cpuBindMode=] [--device=] [--help] - [--inDataFile=] [--loopCount=] - [--numThreads=] [--warmUpLoopCount=] - [--enableFp16=] [--timeProfiling=] - [--inputShapes=] [--perfProfiling=] - [--perfEvent=] -``` - -下面提供详细的参数说明。 - -| 参数名 | 属性 | 功能描述 | 参数类型 | 默认值 | 取值范围 | -| ----------------- | ---- | ------------------------------------------------------------ | ------ | -------- | ---------------------------------- | -| `--modelFile=` | 必选 | 指定需要进行基准测试的MindSpore Lite模型文件路径。 | String | null | - | -| `--accuracyThreshold=` | 可选 | 指定准确度阈值。 | Float | 0.5 | - | -| `--benchmarkDataFile=` | 可选 | 指定标杆数据的文件路径。标杆数据作为该测试模型的对比输出,是该测试模型使用相同输入并由其它深度学习框架前向推理而来。 | String | null | - | -| `--benchmarkDataType=` | 可选 | 指定标杆数据类型。 | String | FLOAT | FLOAT、INT32、INT8、UINT8 | -| `--cpuBindMode=` | 可选 | 指定模型推理程序运行时绑定的CPU核类型。 | Integer | 1 | 2:表示中核
    1:表示大核
    0:表示不绑定 | -| `--device=` | 可选 | 指定模型推理程序运行的设备类型。 | String | CPU | CPU、GPU、NPU | -| `--help` | 可选 | 显示`benchmark`命令的帮助信息。 | - | - | - | -| `--inDataFile=` | 可选 | 指定测试模型输入数据的文件路径。如果未设置,则使用随机输入。 | String | null | - | -| `--loopCount=` | 可选 | 指定Benchmark工具进行基准测试时,测试模型的前向推理运行次数,其值为正整数。 | Integer | 10 | - | -| `--numThreads=` | 可选 | 指定模型推理程序运行的线程数。 | Integer | 2 | - | -| `--warmUpLoopCount=` | 可选 | 指定测试模型在执行基准测试运行轮数前进行的模型预热推理次数。 | Integer | 3 | - | -| `--enableFp16=` | 可选 | 指定是否优先使用float16算子。 | Boolean | false | true, false | -| `--timeProfiling=` | 可选 | 性能验证时生效,指定是否使用TimeProfiler打印每个算子的耗时。 | Boolean | false | true, false | -| `--inputShapes=` | 可选 | 指定输入维度,维度应该按照NHWC格式输入. 维度值之间用‘,'隔开,多个输入的维度之间用‘:’隔开 | String | Null | - | -| `--perfProfiling=` | 可选 | CPU性能验证时生效,指定是否使用PerfProfiler打印每个算子的CPU性能,当timeProfiling为true时无效。目前仅支持aarch64 CPU。 | Boolean | false | true, false | -| `--perfEvent=` | 可选 | CPU性能验证时生效,指定PerfProfiler打印的CPU性能参数的具体内容,指定为CYCLE时,会打印算子的CPU周期数和指令条数;指定为CACHE时,会打印算子的缓存读取次数和缓存未命中次数;指定为STALL时,会打印CPU前端等待周期数和后端等待周期数。 | String | CYCLE | CYCLE/CACHE/STALL | - -### 使用示例 - -对于不同的MindSpore Lite模型,在使用Benchmark工具对其进行基准测试时,可通过设置不同的参数,实现对其不同的测试功能。主要分为性能测试和精度测试。 - -#### 性能测试 - -Benchmark工具进行的性能测试主要的测试指标为模型单次前向推理的耗时。在性能测试任务中,不需要设置`benchmarkDataFile`等标杆数据参数。但是,可以设置`timeProfiling`选项参数,控制是否输出在某设备上模型网络层的耗时,`timeProfiling`默认为false,例如: - -```bash -./benchmark --modelFile=./models/test_benchmark.ms -``` - -这条命令使用随机输入,其他参数使用默认值。该命令执行后会输出如下统计信息,该信息显示了测试模型在运行指定推理轮数后所统计出的单次推理最短耗时、单次推理最长耗时和平均推理耗时。 - -```text -Model = test_benchmark.ms, numThreads = 2, MinRunTime = 72.228996 ms, MaxRuntime = 73.094002 ms, AvgRunTime = 72.556000 ms -``` - -```bash -./benchmark --modelFile=./models/test_benchmark.ms --timeProfiling=true -``` - -这条命令使用随机输入,并且输出模型网络层的耗时信息,其他参数使用默认值。该命令执行后,模型网络层的耗时会输出如下统计信息,在该例中,该统计信息按照`opName`和`optype`两种划分方式分别显示,`opName`表示算子名,`optype`表示算子类别,`avg`表示该算子的平均单次运行时间,`percent`表示该算子运行耗时占所有算子运行总耗时的比例,`calledTimess`表示该算子的运行次数,`opTotalTime`表示该算子运行指定次数的总耗时。最后,`total time`和`kernel cost`分别显示了该模型单次推理的平均耗时和模型推理中所有算子的平均耗时之和。 - -```text ------------------------------------------------------------------------------------------ -opName avg(ms) percent calledTimess opTotalTime -conv2d_1/convolution 2.264800 0.824012 10 22.648003 -conv2d_2/convolution 0.223700 0.081390 10 2.237000 -dense_1/BiasAdd 0.007500 0.002729 10 0.075000 -dense_1/MatMul 0.126000 0.045843 10 1.260000 -dense_1/Relu 0.006900 0.002510 10 0.069000 -max_pooling2d_1/MaxPool 0.035100 0.012771 10 0.351000 -max_pooling2d_2/MaxPool 0.014300 0.005203 10 0.143000 -max_pooling2d_2/MaxPool_nchw2nhwc_reshape_1/Reshape_0 0.006500 0.002365 10 0.065000 -max_pooling2d_2/MaxPool_nchw2nhwc_reshape_1/Shape_0 0.010900 0.003966 10 0.109000 -output/BiasAdd 0.005300 0.001928 10 0.053000 -output/MatMul 0.011400 0.004148 10 0.114000 -output/Softmax 0.013300 0.004839 10 0.133000 -reshape_1/Reshape 0.000900 0.000327 10 0.009000 -reshape_1/Reshape/shape 0.009900 0.003602 10 0.099000 -reshape_1/Shape 0.002300 0.000837 10 0.023000 -reshape_1/strided_slice 0.009700 0.003529 10 0.097000 ------------------------------------------------------------------------------------------ -opType avg(ms) percent calledTimess opTotalTime -Activation 0.006900 0.002510 10 0.069000 -BiasAdd 0.012800 0.004657 20 0.128000 -Conv2D 2.488500 0.905401 20 24.885004 -MatMul 0.137400 0.049991 20 1.374000 -Nchw2Nhwc 0.017400 0.006331 20 0.174000 -Pooling 0.049400 0.017973 20 0.494000 -Reshape 0.000900 0.000327 10 0.009000 -Shape 0.002300 0.000837 10 0.023000 -SoftMax 0.013300 0.004839 10 0.133000 -Stack 0.009900 0.003602 10 0.099000 -StridedSlice 0.009700 0.003529 10 0.097000 - -total time : 2.90800 ms, kernel cost : 2.74851 ms - ------------------------------------------------------------------------------------------ -``` - -#### 精度测试 - -Benchmark工具进行的精度测试主要是通过设置标杆数据来对比验证MindSpore Lite模型输出的精确性。在精确度测试任务中,除了需要设置`modelFile`参数以外,还必须设置`benchmarkDataFile`参数。例如: - -```bash -./benchmark --modelFile=./models/test_benchmark.ms --inDataFile=./input/test_benchmark.bin --device=CPU --accuracyThreshold=3 --benchmarkDataFile=./output/test_benchmark.out -``` - -这条命令指定了测试模型的输入数据、标杆数据(默认的输入及标杆数据类型均为float32),同时指定了模型推理程序在CPU上运行,并指定了准确度阈值为3%。该命令执行后会输出如下统计信息,该信息显示了测试模型的单条输入数据、输出节点的输出结果和平均偏差率以及所有节点的平均偏差率。 - -```text -InData0: 139.947 182.373 153.705 138.945 108.032 164.703 111.585 227.402 245.734 97.7776 201.89 134.868 144.851 236.027 18.1142 22.218 5.15569 212.318 198.43 221.853 -================ Comparing Output data ================ -Data of node age_out : 5.94584e-08 6.3317e-08 1.94726e-07 1.91809e-07 8.39805e-08 7.66035e-08 1.69285e-07 1.46246e-07 6.03796e-07 1.77631e-07 1.54343e-07 2.04623e-07 8.89609e-07 3.63487e-06 4.86876e-06 1.23939e-05 3.09981e-05 3.37098e-05 0.000107102 0.000213932 0.000533579 0.00062465 0.00296401 0.00993984 0.038227 0.0695085 0.162854 0.123199 0.24272 0.135048 0.169159 0.0221256 0.013892 0.00502971 0.00134921 0.00135701 0.000383242 0.000163475 0.000136294 9.77864e-05 8.00793e-05 5.73874e-05 3.53858e-05 2.18535e-05 2.04467e-05 1.85286e-05 1.05075e-05 9.34751e-06 6.12732e-06 4.55476e-06 -Mean bias of node age_out : 0% -Mean bias of all nodes: 0% -======================================================= -``` - -如果需要指定输入数据的维度(例如输入维度为1,32,32,1),使用如下命令: - -```bash -./benchmark --modelFile=./models/test_benchmark.ms --inDataFile=./input/test_benchmark.bin --inputShapes=1,32,32,1 --device=CPU --accuracyThreshold=3 --benchmarkDataFile=./output/test_benchmark.out -``` - -#### CPU性能测试 - -Benchmark工具进行的CPU性能测试主要的测试指标为模型单次前向推理CPU性能参数(目前只支持aarch64 CPU),包括周期数和指令数、缓存读取次数和缓存未命中次数、CPU前端和后端等待时间。在CPU性能测试任务中,不需要设置`benchmarkDataFile`等标杆数据参数。但是,可以设置`perfProfiling`与`perfEvent`选项参数,控制输出在某设备上模型网络层的哪些CPU性能参数,`perfProfiling`默认为false,`perfEvent`默认为`CYCLE`(CPU周期数和指令数)。由于多线程的读数波动较大,建议设置线程数为1。使用方法如下: - -```bash -./benchmark --modelFile=./models/test_benchmark_2.ms --perfProfiling=true --numThreads=1 -``` - -这条命令使用随机输入,并且输出模型网络层的周期数/指令数信息,其他参数使用默认值。该命令执行后,会输出如下CPU性能参数统计信息,在该例中,该统计信息按照`opName`和`optype`两种划分方式分别显示,`opName`表示算子名,`optype`表示算子类别,`cycles(k)`表示该算子的平均CPU周期数(以k为单位,受CPU频率影响),`cycles(%)`表示该算子CPU周期数占所有算子CPU周期数的比例,`ins(k)`表示该算子的指令数(以k为单位),`ins(%)`表示该算子的指令数占所有算子指令数的比例。最后会显示当前模型、线程数、最小运行时间、最大运行时间、平均运行时间用做参考。 - -```text ------------------------------------------------------------------------------------------ -opName cycles(k) cycles(%) ins(k) ins(%) -Add_Plus214_Output_0 1.53 0.006572 1.27 0.002148 -Conv_Convolution110_Output_0 91.12 0.390141 217.58 0.369177 -Conv_COnvolution28_Output_0 114.61 0.490704 306.28 0.519680 -Matmul_Times212_Output_0 8.75 0.037460 15.55 0.026385 -MaxPool_Pooling160_Output_0 3.24 0.013873 8.70 0.014767 -MaxPool_Pooling66_Output_0 11.63 0.049780 35.17 0.059671 -Reshape_Pooling160_Output_0_reshape0 0.91 0.003899 1.58 0.002677 -nhwc2nchw_MaxPool_Pooling160_Output_0_post8_0 1.77 0.007571 3.25 0.005508 ------------------------------------------------------------------------------------------ -opType cycles(k) cycles(%) ins(k) ins(%) -Add 1.53 0.006572 1.27 0.002148 -Conv2D 205.73 0.880845 523.85 0.888856 -MatMul 8.75 0.037460 15.55 0.026385 -Nhwc2nchw 1.77 0.007571 3.25 0.005508 -Pooling 14.87 0.063654 43.87 0.074437 -Reshape 0.91 0.003839 1.58 0.002677 - -Model = test_benchmark_2.ms, NumThreads = 1, MinRunTime = 0.104000 ms, MaxRunTime = 0.179000 ms, AvgRunTime = 0.116000 ms - ------------------------------------------------------------------------------------------ -``` - -当`perfEvent`参数被指定为`CACHE`时,列标题会变为`cache ref(k)`/`cache ref(%)`/`miss(k)`/`miss(%)`,分别代表算子缓存读取次数/缓存读取占比/缓存未命中次数/缓存未命中次数占比;当`perfEvent`参数被指定为`STALL`时,列标题会变为`frontend(k)`/`frontend(%)`/`backend(k)`/`backend(%)`,分别代表CPU前端等待时间/CPU前端等待时间占比/CPU后端等待时间/CPU后端等待时间数占比。使用方法如下: - -```bash -./benchmark --modelFile=./models/test_benchmark_2.ms --perfProfiling=true --perfEvent="CACHE" -``` - -```bash -./benchmark --modelFile=./models/test_benchmark_2.ms --perfProfiling=true --perfEvent="STALL" -``` - -## Windows环境使用说明 - -### 环境准备 - -使用Benchmark工具,需要进行如下环境准备工作。 - -- 编译:Benchmark工具代码在MindSpore源码的`mindspore/lite/tools/benchmark`目录中,参考构建文档中的[环境要求](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#id9)和[编译示例](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#id11)执行编译。 -- 将推理需要的动态链接库加入环境变量PATH。 - - ```bash - set PATH=%PACKAGE_ROOT_PATH%\inference\lib;%PATH% - ``` - - 其中%PACKAGE_ROOT_PATH%是编译得到的包解压后的根目录。 - -### 参数说明 - -使用编译好的Benchmark工具进行模型的基准测试时,其命令格式如下所示。参数与Linux环境下使用一致,此处不再赘述。 - -```bash -call benchmark.exe [--modelFile=] [--accuracyThreshold=] - [--benchmarkDataFile=] [--benchmarkDataType=] - [--cpuBindMode=] [--device=] [--help] - [--inDataFile=] [--loopCount=] - [--numThreads=] [--warmUpLoopCount=] - [--enableFp16=] [--timeProfiling=] - [--inputShapes=] -``` - -### 使用示例 - -对于不同的MindSpore Lite模型,在使用Benchmark工具对其进行基准测试时,可通过设置不同的参数,实现对其不同的测试功能。主要分为性能测试和精度测试,输出信息与Linux环境下一致,此处不再赘述。 - -#### 性能测试 - -- 使用随机输入,其他参数使用默认值。 - - ```bat - call benchmark.exe --modelFile=test_benchmark.ms - ``` - -- 使用随机输入,`timeProfiling`设为true,其他参数使用默认值。 - - ```bat - call benchmark.exe --modelFile=test_benchmark.ms --timeProfiling=true - ``` - -#### 精度测试 - -输入数据通过`inDataFile`参数设定,标杆数据通过`benchmarkDataFile`参数设定。 - -- 指定了准确度阈值为3%。 - -```bat -call benchmark.exe --modelFile=test_benchmark.ms --inDataFile=.test_benchmark.bin --benchmarkDataFile=test_benchmark.out --accuracyThreshold=3 -``` - -- 指定模型推理程序在CPU上运行。 - -```bat -call benchmark.exe --modelFile=test_benchmark.ms --inDataFile=test_benchmark.bin --benchmarkDataFile=test_benchmark.out --device=CPU -``` - -- 指定输入数据的维度。 - -```bat -call benchmark.exe --modelFile=test_benchmark.ms --inDataFile=test_benchmark.bin --benchmarkDataFile=test_benchmark.out --inputShapes=1,32,32,1 -``` diff --git a/tutorials/lite/source_zh_cn/use/benchmark_train_tool.md b/tutorials/lite/source_zh_cn/use/benchmark_train_tool.md deleted file mode 100644 index d2fbdda13a83c74393217d69923dc481047dd6f5..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/benchmark_train_tool.md +++ /dev/null @@ -1,147 +0,0 @@ -# benchmark_train - -`Linux` `环境准备` `基准测试` `中级` `高级` - - - -- [使用benchmark_train进行基准测试](#使用benchmark进行基准测试) - - [概述](#概述) - - [Linux环境使用说明](#Linux环境使用说明) - - [环境准备](#环境准备) - - [参数说明](#参数说明) - - [使用示例](#使用示例) - - [性能测试](#性能测试) - - [精度测试](#精度测试) - - - - - -## 概述 - -与`benchmark`工具类似,MindSpore端侧训练为你提供了`benchmark_train`工具对训练后的模型进行基准测试。它不仅可以对模型前向推理执行耗时进行定量分析(性能),还可以通过指定模型输出进行可对比的误差分析(精度)。 - -## Linux环境使用说明 - -### 环境准备 - -使用`benchmark_train`工具,需要进行如下环境准备工作。 - -- 编译:`benchmark_train`工具代码在MindSpore源码的`mindspore/lite/tools/benchmark_train`目录中,参考构建文档中的[环境要求](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#id1)和[编译示例](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#id3)编译端侧训练框架。 - -- 配置环境变量:参考构建文档中的[编译输出](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#id5),获得`benchmark_train`工具,并配置环境变量。假设您编译出的端侧训练框架压缩包所在完整路径为`/path/mindspore-lite-{version}-train-{os}-{arch}.tar.gz`,解压并配置环境变量的命令如下: - - ```bash - cd /path - tar xvf mindspore-lite-{version}-train-{os}-{arch}.tar.gz - export LD_LIBRARY_PATH=/path/mindspore-lite-{version}-train-{os}-{arch}/train/lib:/path/mindspore-lite-{version}-train-{os}-{arch}/train/third_party/libjpeg-turbo/lib:${LD_LIBRARY_PATH} - ``` - -benchmark_train工具所在完整路径为`/path/mindspore-lite-{version}-train-{os}-{arch}/tools/benchmark_train/benchmark_train`。 - -### 参数说明 - -使用编译好的`benchmark_train`工具进行模型的基准测试时,其命令格式如下所示。 - -```bash -./benchmark_train [--modelFile=] [--accuracyThreshold=] - [--expectedDataFile=] [--warmUpLoopCount=] - [--timeProfiling=] [--help] - [--inDataFile=] [--epochs=] - [--exportFile=] -``` - -下面提供详细的参数说明。 - -| 参数名 | 属性 | 功能描述 | 参数类型 | 默认值 | 取值范围 | -| ----------------- | ---- | ------------------------------------------------------------ | ------ | -------- | ---------------------------------- | -| `--modelFile=` | 必选 | 指定需要进行基准测试的MindSpore Lite模型文件路径。 | String | null | - | -| `--accuracyThreshold=` | 可选 | 指定准确度阈值。 | Float | 0.5 | - | -| `--expectedDataFile=` | 可选 | 指定标杆数据的文件路径。标杆数据作为该测试模型的对比输出,是该测试模型使用相同输入并由其它深度学习框架前向推理而来。 | String | null | - | -| `--help` | 可选 | 显示`benchmark_train`命令的帮助信息。 | - | - | - | -| `--warmUpLoopCount=` | 可选 | 指定测试模型在执行基准测试运行轮数前进行的模型预热推理次数。 | Integer | 3 | - | -| `--timeProfiling=` | 可选 | 性能验证时生效,指定是否使用TimeProfiler打印每个算子的耗时。 | Boolean | false | true, false | -| `--inDataFile=` | 可选 | 指定测试模型输入数据的文件路径。如果未设置,则使用随机输入。 | String | null | - | -| `--epochs=` | 可选 | 指定循环训练的轮次,大于0时会执行训练EPOCHS次,并输出耗时数据。 | Integer | 0 | >=0 | -| `--exportFile=` | 可选 | 导出模型的路径。 | String | null | - | - -### 使用示例 - -在使用`benchmark_train`工具进行模型基准测试时,可通过设置不同的参数,实现对其不同的测试功能。主要分为性能测试和精度测试。 - -#### 性能测试 - -`benchmark_train`工具进行的性能测试主要的测试指标为模型单次训练的耗时。在性能测试任务中,请设置`epochs`为大于1的数值,不需要设置`expectedDataFile`等标杆数据参数。但是,可以设置`timeProfiling`选项参数,控制是否输出在某设备上模型网络层的耗时,`timeProfiling`默认为false,例如: - -```bash -./benchmark_train --modelFile=./models/test_benchmark.ms --epochs=10 -``` - -这条命令使用随机输入,循环10次,其他参数使用默认值。该命令执行后会输出如下统计信息,该信息显示了测试模型在运行指定推理轮数后所统计出的单次推理最短耗时、单次推理最长耗时和平均推理耗时。 - -```text -Model = test_benchmark.ms, numThreads = 1, MinRunTime = 72.228996 ms, MaxRuntime = 73.094002 ms, AvgRunTime = 72.556000 ms -``` - -```bash -./benchmark_train --modelFile=./models/test_benchmark.ms --epochs=10 --timeProfiling=true -``` - -这条命令使用随机输入,并且输出模型网络层的耗时信息,其他参数使用默认值。该命令执行后,模型网络层的耗时会输出如下统计信息,在该例中,该统计信息按照`opName`和`optype`两种划分方式分别显示,`opName`表示算子名,`optype`表示算子类别,`avg`表示该算子的平均单次运行时间,`percent`表示该算子运行耗时占所有算子运行总耗时的比例,`calledTimess`表示该算子的运行次数,`opTotalTime`表示该算子运行指定次数的总耗时。最后,`total time`和`kernel cost`分别显示了该模型单次推理的平均耗时和模型推理中所有算子的平均耗时之和。 - -```text ------------------------------------------------------------------------------------------ -opName avg(ms) percent calledTimess opTotalTime -conv2d_1/convolution 2.264800 0.824012 10 22.648003 -conv2d_2/convolution 0.223700 0.081390 10 2.237000 -dense_1/BiasAdd 0.007500 0.002729 10 0.075000 -dense_1/MatMul 0.126000 0.045843 10 1.260000 -dense_1/Relu 0.006900 0.002510 10 0.069000 -max_pooling2d_1/MaxPool 0.035100 0.012771 10 0.351000 -max_pooling2d_2/MaxPool 0.014300 0.005203 10 0.143000 -max_pooling2d_2/MaxPool_nchw2nhwc_reshape_1/Reshape_0 0.006500 0.002365 10 0.065000 -max_pooling2d_2/MaxPool_nchw2nhwc_reshape_1/Shape_0 0.010900 0.003966 10 0.109000 -output/BiasAdd 0.005300 0.001928 10 0.053000 -output/MatMul 0.011400 0.004148 10 0.114000 -output/Softmax 0.013300 0.004839 10 0.133000 -reshape_1/Reshape 0.000900 0.000327 10 0.009000 -reshape_1/Reshape/shape 0.009900 0.003602 10 0.099000 -reshape_1/Shape 0.002300 0.000837 10 0.023000 -reshape_1/strided_slice 0.009700 0.003529 10 0.097000 ------------------------------------------------------------------------------------------ -opType avg(ms) percent calledTimess opTotalTime -Activation 0.006900 0.002510 10 0.069000 -BiasAdd 0.012800 0.004657 20 0.128000 -Conv2D 2.488500 0.905401 20 24.885004 -MatMul 0.137400 0.049991 20 1.374000 -Nchw2Nhwc 0.017400 0.006331 20 0.174000 -Pooling 0.049400 0.017973 20 0.494000 -Reshape 0.000900 0.000327 10 0.009000 -Shape 0.002300 0.000837 10 0.023000 -SoftMax 0.013300 0.004839 10 0.133000 -Stack 0.009900 0.003602 10 0.099000 -StridedSlice 0.009700 0.003529 10 0.097000 - -total time : 2.90800 ms, kernel cost : 2.74851 ms - ------------------------------------------------------------------------------------------ -``` - -#### 精度测试 - -`benchmark_train`工具进行的精度测试主要是通过设置标杆数据来对比验证MindSpore Lite训练后的模型输出的精确性。在精确度测试任务中,除了需要设置`modelFile`参数以外,还必须设置`inDataFile`、`expectedDataFile`参数。例如: - -```bash -./benchmark_train --modelFile=./models/test_benchmark.ms --inDataFile=./input/test_benchmark.bin --accuracyThreshold=3 --expectedDataFile=./output/test_benchmark.out -``` - -这条命令指定了测试模型的输入数据、标杆数据(默认的输入及标杆数据类型均为float32),同时指定了模型推理程序在CPU上运行,并指定了准确度阈值为3%。该命令执行后会输出如下统计信息,该信息显示了测试模型的单条输入数据、输出节点的输出结果和平均偏差率以及所有节点的平均偏差率。 - -```text -InData0: 139.947 182.373 153.705 138.945 108.032 164.703 111.585 227.402 245.734 97.7776 201.89 134.868 144.851 236.027 18.1142 22.218 5.15569 212.318 198.43 221.853 -================ Comparing Output data ================ -Data of node age_out : 5.94584e-08 6.3317e-08 1.94726e-07 1.91809e-07 8.39805e-08 7.66035e-08 1.69285e-07 1.46246e-07 6.03796e-07 1.77631e-07 1.54343e-07 2.04623e-07 8.89609e-07 3.63487e-06 4.86876e-06 1.23939e-05 3.09981e-05 3.37098e-05 0.000107102 0.000213932 0.000533579 0.00062465 0.00296401 0.00993984 0.038227 0.0695085 0.162854 0.123199 0.24272 0.135048 0.169159 0.0221256 0.013892 0.00502971 0.00134921 0.00135701 0.000383242 0.000163475 0.000136294 9.77864e-05 8.00793e-05 5.73874e-05 3.53858e-05 2.18535e-05 2.04467e-05 1.85286e-05 1.05075e-05 9.34751e-06 6.12732e-06 4.55476e-06 -Mean bias of node age_out : 0% -Mean bias of all nodes: 0% -======================================================= -``` diff --git a/tutorials/lite/source_zh_cn/use/build.md b/tutorials/lite/source_zh_cn/use/build.md deleted file mode 100644 index da8a2e51d74e8852d34530633865fae4d235901f..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/build.md +++ /dev/null @@ -1,525 +0,0 @@ -# 编译MindSpore Lite - -`Windows` `Linux` `Android` `环境准备` `中级` `高级` - - - -- [编译MindSpore Lite](#编译mindspore-lite) - - [Linux环境编译](#linux环境编译) - - [环境要求](#环境要求) - - [编译选项](#编译选项) - - [编译示例](#编译示例) - - [端侧推理框架编译输出](#端侧推理框架编译输出) - - [模型转换工具converter目录结构说明](#推理模型转换工具converter目录结构说明) - - [代码生成工具codegen目录结构说明](#代码生成工具codegen目录结构说明) - - [Runtime及其他工具目录结构说明](#推理Runtime及其他工具目录结构说明) - - [端侧训练框架编译输出](#端侧训练框架编译输出) - - [训练Runtime及配套工具目录结构说明](#训练Runtime及配套工具目录结构说明) - - [Windows环境编译](#windows环境编译) - - [环境要求](#环境要求-1) - - [编译选项](#编译选项-1) - - [编译示例](#编译示例-1) - - [端侧推理框架编译输出](#端侧推理框架编译输出) - - [Runtime及配套工具目录结构说明](#Runtime及配套工具目录结构说明-1) - - [Docker环境编译](#docker环境编译) - - [环境准备](#环境准备) - - [下载镜像](#下载镜像) - - [创建容器](#创建容器) - - [进入容器](#进入容器) - - [编译选项](#编译选项-2) - - [编译示例](#编译示例-2) - - [编译输出](#编译输出) - - - - - -本章节介绍如何快速编译出MindSpore Lite。 - -推理版本包含模块: - -| 模块 | 支持平台 | 说明 | -| ------------------ | ----------------------- | --------------------------------- | -| converter | Linux, Windows | 模型转换工具 | -| runtime(cpp、java) | Linux, Windows, Android | 模型推理框架(Windows平台不支持java版runtime) | -| benchmark | Linux, Windows, Android | 基准测试工具 | -| cropper | Linux | libmindspore-lite.a静态库裁剪工具 | -| minddata | Linux, Android | 图像处理库 | -| codegen | Linux | 模型推理代码生成工具 | - -训练版本包含模块: - -| 模块 | 支持平台 | 说明 | -| --------------- | -------------- | --------------------------------- | -| converter | Linux | 模型转换工具 | -| runtime(cpp) | Linux, Android | 模型训练框架(暂不支持java) | -| cropper | Linux | libmindspore-lite.a静态库裁剪工具 | -| minddata | Linux, Android | 图像处理库 | -| benchmark_train | Linux, Android | 性能测试和精度校验工具 | - -## Linux环境编译 - -### 环境要求 - -- 系统环境:Linux x86_64,推荐使用Ubuntu 18.04.02LTS -- runtime(cpp)编译依赖 - - [CMake](https://cmake.org/download/) >= 3.18.3 - - [GCC](https://gcc.gnu.org/releases.html) >= 7.3.0 - - [Android_NDK](https://dl.google.com/android/repository/android-ndk-r20b-linux-x86_64.zip) >= r20 - - [Git](https://git-scm.com/downloads) >= 2.28.0 -- converter编译依赖 - - [CMake](https://cmake.org/download/) >= 3.18.3 - - [GCC](https://gcc.gnu.org/releases.html) >= 7.3.0 - - [Android_NDK](https://dl.google.com/android/repository/android-ndk-r20b-linux-x86_64.zip) >= r20 - - [Git](https://git-scm.com/downloads) >= 2.28.0 - - [Autoconf](http://ftp.gnu.org/gnu/autoconf/) >= 2.69 - - [Libtool](https://www.gnu.org/software/libtool/) >= 2.4.6 - - [LibreSSL](http://www.libressl.org/) >= 3.1.3 - - [Automake](https://www.gnu.org/software/automake/) >= 1.11.6 - - [Libevent](https://libevent.org) >= 2.0 - - [OpenSSL](https://www.openssl.org/) >= 1.1.1 -- runtime(java)编译依赖 - - [CMake](https://cmake.org/download/) >= 3.18.3 - - [GCC](https://gcc.gnu.org/releases.html) >= 7.3.0 - - [Android_NDK](https://dl.google.com/android/repository/android-ndk-r20b-linux-x86_64.zip) >= r20 - - [Git](https://git-scm.com/downloads) >= 2.28.0 - - [Android SDK](https://developer.android.com/studio?hl=zh-cn#cmdline-tools) - - [Gradle](https://gradle.org/releases/) >= 6.6.1 - - [OpenJDK](https://openjdk.java.net/install/) >= 1.8 - -> - 当安装完依赖项`Android_NDK`后,需配置环境变量:`export ANDROID_NDK=${NDK_PATH}/android-ndk-r20b`。 -> - 当安装完依赖项Gradle后,需将其安装路径增加到PATH当中:`export PATH=${GRADLE_PATH}/bin:$PATH`。 -> - 通过`Android command line tools`安装Android SDK,首先需要创建一个新目录,并将其路径配置到环境变量`${ANDROID_SDK_ROOT}`中,然后通过`sdkmanager`创建SDK:`./sdkmanager --sdk_root=${ANDROID_SDK_ROOT} "cmdline-tools;latest"`,最后通过`${ANDROID_SDK_ROOT}`目录下的`sdkmanager`接受许可证:`yes | ./sdkmanager --licenses`。 -> - 编译AAR需要依赖Android SDK Build-Tools、Android SDK Platform-Tools等Android SDK相关组件,如果环境中的Android SDK不存在相关组件,编译时会自动下载所需依赖。 -> - 编译NPU算子的时候需要下载[DDK V500.010](https://developer.huawei.com/consumer/cn/doc/development/hiai-Library/ddk-download-0000001053590180),并将压缩包解压后的目录设置为环境变量`${HWHIAI_DDK}`。 - -### 编译选项 - -MindSpore Lite提供编译脚本`build.sh`用于一键式编译,位于MindSpore根目录下,该脚本可用于MindSpore训练及推理的编译。下面对MindSpore Lite的编译选项进行说明。 - -| 选项 | 参数说明 | 取值范围 | 是否必选 | -| -------- | ----- | ---- | ---- | -| -I | 选择适用架构,若编译MindSpore Lite c++版本,则此选项必选 | arm64、arm32、x86_64 | 否 | -| -d | 设置该参数,则编译Debug版本,否则编译Release版本 | 无 | 否 | -| -i | 设置该参数,则进行增量编译,否则进行全量编译 | 无 | 否 | -| -j[n] | 设定编译时所用的线程数,否则默认设定为8线程 | Integer | 否 | -| -e | 编译某种类型的内置算子,仅在ARM架构下适用,否则默认全部编译 | cpu、gpu、npu | 否 | -| -h | 显示编译帮助信息 | 无 | 否 | -| -n | 指定编译轻量级图片处理模块 | lite_cv | 否 | -| -A | 指定编译语言,默认cpp。设置为java时,则编译AAR包和Linux X86的JAR包。 | cpp、java | 否 | -| -C | 设置该参数,则编译模型转换工具,默认为on | on、off | 否 | -| -o | 设置该参数,则编译基准测试工具、静态库裁剪工具,默认为on | on、off | 否 | -| -t | 设置该参数,则编译测试用例,默认为off | on、off | 否 | -| -T | 是否编译训练版本工具,默认为off | on、off | 否 | -| -W | 启用x86_64 SSE或AVX指令集,默认为off | sse、avx、off | 否 | - -> - 在`-I`参数变动时,如`-I x86_64`变为`-I arm64`,添加`-i`参数进行增量编译不生效。 -> - 编译AAR包时,必须添加`-A java`参数,且无需添加`-I`参数,默认同时编译内置的CPU和GPU算子。 -> - 开启编译选项`-T`只生成训练版本。 -> - 任何`-e`编译选项,CPU都会编译进去。 - -### 编译示例 - -首先,在进行编译之前,需从MindSpore代码仓下载源码。 - -```bash -git clone https://gitee.com/mindspore/mindspore.git -``` - -然后,在源码根目录下执行如下命令,可编译不同版本的MindSpore Lite。 - -- 编译x86_64架构Debug版本。 - - ```bash - bash build.sh -I x86_64 -d - ``` - -- 编译x86_64架构Release版本,同时设定线程数。 - - ```bash - bash build.sh -I x86_64 -j32 - ``` - -- 编译x86_64架构Release版本,同时编译测试用例。 - - ```bash - bash build.sh -I x86_64 -t on - ``` - -- 增量编译ARM64架构Release版本,同时设定线程数。 - - ```bash - bash build.sh -I arm64 -i -j32 - ``` - -- 编译ARM64架构Release版本,只编译内置的CPU算子。 - - ```bash - bash build.sh -I arm64 -e cpu - ``` - -- 编译ARM64架构Release版本,同时编译内置的CPU和GPU算子。 - - ```bash - bash build.sh -I arm64 -e gpu - ``` - -- 编译ARM64架构Release版本,同时编译内置的CPU和NPU算子。 - - ```bash - bash build.sh -I arm64 -e npu - ``` - -- 编译ARM64带图像预处理模块。 - - ```bash - bash build.sh -I arm64 -n lite_cv - ``` - -- 编译MindSpore Lite AAR和Linux X86_64 JAR版本,MindSpore Lite AAR同时编译内置的CPU和GPU算子,JAR只编译内置的的CPU算子。 - - ```bash - bash build.sh -A java - ``` - -- 编译MindSpore Lite AAR和Linux X86_64 JAR版本,只编译内置的CPU算子。 - - ```bash - bash build.sh -A java -e cpu - ``` - -- 编译x86_64架构Release版本,编译模型转换、基准测试和库裁剪工具。 - - ```bash - bash build.sh -I x86_64 - ``` - -- 编译x86_64架构Release版本,模型转换、基准测试、库裁剪工具和端侧运行时 (Runtime) 训练版本工具。 - - ```bash - bash build.sh -I x86_64 -T on - ``` - -### 端侧推理框架编译输出 - -执行编译指令后,会在`mindspore/output/`目录中生成如下文件: - -- `mindspore-lite-{version}-inference-{os}-{arch}.tar.gz`:包含模型推理框架runtime(cpp)和配套工具。 - -- `mindspore-lite-maven-{version}.zip`:包含模型推理框架runtime(java)的AAR。 - -> - version: 输出件版本号,与所编译的分支代码对应的版本一致。 -> - os: 输出件应部署的操作系统。 -> - arch: 输出件应部署的系统架构。 - -执行解压缩命令,获取编译后的输出件: - -```bash -tar -xvf mindspore-lite-{version}-inference-{os}-{arch}.tar.gz -unzip mindspore-lite-maven-{version}.zip -``` - -#### 模型转换工具converter目录结构说明 - -仅在`-I x86_64`编译选项下获得(推理和训练的目录结构相同)内容如下: - -```text -mindspore-lite-{version}-inference-linux-x64 -└── tools - └── converter - ├── converter # 模型转换工具 - │ └── converter_lite # 可执行程序 - └── lib # 转换工具依赖的动态库 - ├── libglog.so.0 # Glog的动态库 - └── libmslite_converter_plugin_reg.so # 注册插件的动态库 -``` - -#### 代码生成工具CodeGen目录结构说明 - -仅在`-I x86_64`编译选项下获得codegen可执行程序,在`-I arm64`和`-I arm32`编译选项下只生成codegen生成的推理代码所需要的算子库。 - -- `-I x86_64`编译选项下获得codegen,内容如下: - - ```text - mindspore-lite-{version}-inference-linux-x64 - └── tools - └── codegen # 代码生成工具 - ├── codegen # 可执行程序 - ├── include # 推理框架头文件 - │ ├── nnacl # nnacl 算子头文件 - │ └── wrapper - ├── lib - │ └── libwrapper.a # MindSpore Lite CodeGen生成代码依赖的部分算子静态库 - └── third_party - ├── include - │ └── CMSIS # ARM CMSIS NN 算子头文件 - └── lib - └── libcmsis_nn.a # ARM CMSIS NN 算子静态库 - ``` - -- `-I arm64`或`-I arm32`编译选项下获得codegen,内容如下: - - ```text - mindspore-lite-{version}-inference-android-{arch} - └── tools - └── codegen # 代码生成工具 - ├── include # 推理框架头文件 - │ ├── nnacl # nnacl 算子头文件 - │ └── wrapper - └── lib # 推理框架库 - └── libwrapper.a # MindSpore Lite CodeGen生成代码依赖的部分算子静态库 - ``` - -#### Runtime及其他工具目录结构说明 - -推理框架可在`-I x86_64`、`-I arm64`、`-I arm32`和`-A java`编译选项下获得,内容如下: - -- 当编译选项为`-I x86_64`时: - - ```text - mindspore-lite-{version}-inference-linux-x64 - ├── inference - │ ├── include # 推理框架头文件 - │ └── lib # 推理框架库 - │ ├── libminddata-lite.so # 图像处理动态库文件 - │ ├── libmindspore-lite.a # MindSpore Lite推理框架的静态库 - │ └── libmindspore-lite.so # MindSpore Lite推理框架的动态库 - └── tools - ├── benchmark # 基准测试工具 - │ └── benchmark # 可执行程序 - ├── codegen # 代码生成工具 - │ ├── codegen # 可执行程序 - │ ├── include # 算子头文件 - │ ├── lib # 算子静态库 - │ └── third_party # ARM CMSIS NN算子库 - ├── converter # 模型转换工具 - └── cropper # 库裁剪工具 - ├── cropper # 库裁剪工具可执行文件 - └── cropper_mapping_cpu.cfg # 裁剪cpu库所需的配置文件 - ``` - -- 当编译选项为`-I arm64`或`-I arm32`时: - - ```text - mindspore-lite-{version}-inference-android-{arch} - ├── inference - │ ├── include # 推理框架头文件 - │ ├── lib # 推理框架库 - │ │ ├── libminddata-lite.so # 图像处理动态库文件 - │ │ ├── libmindspore-lite.a # MindSpore Lite推理框架的静态库 - │ │ └── libmindspore-lite.so # MindSpore Lite推理框架的动态库 - │ └── third_party - │ └── hiai_ddk # NPU库,只存在于arm64包 - └── tools - ├── benchmark # 基准测试工具 - │ └── benchmark - └── codegen # 代码生成工具 - ├── include # 算子头文件 - └── lib # 算子静态库 - ``` - -- 当编译选项为`-A java`时: - - ```text - mindspore-lite-maven-{version} - └── mindspore - └── mindspore-lite - └── {version} - └── mindspore-lite-{version}.aar # MindSpore Lite推理框架aar包 - ``` - - ```text - mindspore-lite-{version}-inference-linux-x64-jar - └── jar - ├── libmindspore-lite-jni.so # MindSpore Lite推理框架的动态库 - ├── libmindspore-lite.so # MindSpore Lite JNI的动态库 - └── mindspore-lite-java.jar # MindSpore Lite推理框架jar包 - ``` - -> - 编译ARM64默认可获得cpu/gpu/npu的推理框架输出件,若添加`-e gpu`则获得cpu/gpu的推理框架输出件,ARM32仅支持CPU。 - -### 端侧训练框架编译输出 - -如果添加了`-T on`编译选项,会生成端侧训练转换工具和对应Runtime工具,如下: - -`mindspore-lite-{version}-train-{os}-{arch}.tar.gz`:模型训练框架runtime。 - -> - version: 输出件版本号,与所编译的分支代码对应的版本一致。 -> - os: 输出件应部署的操作系统。 -> - arch: 输出件应部署的系统架构。 - -执行解压缩命令,获取编译后的输出件: - -```bash -tar -xvf mindspore-lite-{version}-train-{os}-{arch}.tar.gz -``` - -#### 训练Runtime及配套工具目录结构说明 - -训练框架可在`-I x86_64`、`-I arm64`、`-I arm32`编译选项下获得对应不同硬件平台的版本,内容如下: - -- 当编译选项为`-I x86_64`时: - - ```text - mindspore-lite-{version}-train-linux-x64 - ├── tools - │   ├── benchmark_train # 训练模型性能与精度调测工具 - │   ├── converter # 模型转换工具 - │   └── cropper # 库裁剪工具 - │   ├── cropper # 库裁剪工具可执行文件 - │   └── cropper_mapping_cpu.cfg # 裁剪cpu库所需的配置文件 - └── train - ├── include # 训练框架头文件 - ├── lib # 训练框架库 - │   ├── libminddata-lite.so # 图像处理动态库文件 - │   ├── libmindspore-lite-train.a # MindSpore Lite训练框架的静态库 - │   └── libmindspore-lite-train.so # MindSpore Lite训练框架的动态库 - └── third_party - └── libjpeg-turbo - ``` - -- 当编译选项为`-I arm64`或`-I arm32`时: - - ```text - mindspore-lite-{version}-train-android-{arch} - ├── tools - │   └── benchmark_train # 训练模型性能与精度调测工具 - └── train - ├── include # 训练框架头文件 - ├── lib # 训练框架库 - │   ├── libminddata-lite.so # 图像处理动态库文件 - │   ├── libmindspore-lite-train.a # MindSpore Lite训练框架的静态库 - │   └── libmindspore-lite-train.so # MindSpore Lite训练框架的动态库 - └── third_party - ├── hiai_ddk # NPU库,只存在于arm64包 - └── libjpeg-turbo - ``` - -## Windows环境编译 - -### 环境要求 - -- 系统环境:Windows 7,Windows 10;64位。 - -- 编译依赖 - - [CMake](https://cmake.org/download/) >= 3.18.3 - - [MinGW GCC](https://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win64/Personal%20Builds/mingw-builds/7.3.0/threads-posix/seh/x86_64-7.3.0-release-posix-seh-rt_v5-rev0.7z/download) = 7.3.0 - -> - 编译脚本中会执行`git clone`获取第三方依赖库的代码。 -> - 如果要编译32位Mindspore Lite,请使用32位[MinGW](https://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win32/Personal%20Builds/mingw-builds/7.3.0/threads-posix/dwarf/i686-7.3.0-release-posix-dwarf-rt_v5-rev0.7z)编译。 - -### 编译选项 - -MindSpore Lite提供编译脚本build.bat用于一键式编译,位于MindSpore根目录下,该脚本可用于MindSpore训练及推理的编译。下面对MindSpore Lite的编译选项进行说明。 - -| 参数 | 参数说明 | 是否必选 | -| -------- | ----- | ---- | -| lite | 设置该参数,则对MindSpore Lite工程进行编译 | 是 | -| [n] | 设定编译时所用的线程数,否则默认设定为6线程 | 否 | - -### 编译示例 - -首先,使用git工具,从MindSpore代码仓下载源码。 - -```bat -git clone https://gitee.com/mindspore/mindspore.git -``` - -然后,使用cmd工具在源码根目录下,执行如下命令即可编译MindSpore Lite。 - -- 以默认线程数(6线程)编译Windows版本。 - -```bat -call build.bat lite -``` - -- 以指定线程数8编译Windows版本。 - -```bat -call build.bat lite 8 -``` - -### 端侧推理框架编译输出 - -编译完成后,进入`mindspore/output/`目录,可查看编译后生成的文件。文件分为以下几种: - -- `mindspore-lite-{version}-inference-win-x64.zip`:包含模型推理框架runtime和配套工具。 - -> version:输出件版本号,与所编译的分支代码对应的版本一致。 - -执行解压缩命令,获取编译后的输出件: - -```bat -unzip mindspore-lite-{version}-inference-win-x64.zip -``` - -#### Runtime及配套工具目录结构说明 - -Runtime及配套工具包括以下几部分: - -```text -mindspore-lite-{version}-inference-win-x64 -├── inference -│   ├── include # 推理框架头文件 -│   └── lib -│   ├── libgcc_s_seh-1.dll # MinGW动态库 -│   ├── libmindspore-lite.a # MindSpore Lite推理框架的静态库 -│   ├── libmindspore-lite.dll # MindSpore Lite推理框架的动态库 -│   ├── libmindspore-lite.dll.a # MindSpore Lite推理框架的动态库的链接文件 -│   ├── libssp-0.dll # MinGW动态库 -│   ├── libstdc++-6.dll # MinGW动态库 -│   └── libwinpthread-1.dll # MinGW动态库 -└── tools - ├── benchmark # 基准测试工具 - │   └── benchmark.exe # 可执行程序 - └── converter # 模型转换工具 - ├── converter - │   └── converter_lite.exe # 可执行程序 - └── lib - ├── libgcc_s_seh-1.dll # MinGW动态库 - ├── libglog.dll # Glog的动态库 - ├── libmslite_converter_plugin_reg.dll # 注册插件的动态库 - ├── libssp-0.dll # MinGW动态库 - ├── libstdc++-6.dll # MinGW动态库 - └── libwinpthread-1.dll # MinGW动态库 -``` - -> 暂不支持在Windows进行端侧训练。 - -## Docker环境编译 - -### 环境准备 - -#### 下载镜像 - -```bash -docker pull swr.cn-south-1.myhuaweicloud.com/mindspore-build/mindspore-lite:ubuntu18.04.2-20210323 -``` - -> - 下载镜像前,请确保已经安装docker。 -> - docker镜像暂不支持Windows版本编译。 -> - 镜像里已安装好编译依赖的第三方库并且配置好环境变量。 - -#### 创建容器 - -```bash -docker run -tid --net=host --name=docker01 swr.cn-south-1.myhuaweicloud.com/mindspore-build/mindspore-lite:ubuntu18.04.2-20210323 -``` - -#### 进入容器 - -```bash -docker exec -ti -u 0 docker01 bash -``` - -### 编译选项 - -参考[Linux环境编译](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#linux) - -### 编译示例 - -参考[Linux环境编译](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#linux) - -### 编译输出 - -参考[Linux环境编译](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#linux) diff --git a/tutorials/lite/source_zh_cn/use/converter_tool.md b/tutorials/lite/source_zh_cn/use/converter_tool.md deleted file mode 100644 index 24fcd43b498d49f770d77d7cab143a1f1a0f1e38..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/converter_tool.md +++ /dev/null @@ -1,206 +0,0 @@ -# 推理模型转换 - -`Windows` `Linux` `模型转换` `中级` `高级` - - - -- [推理模型转换](#推理模型转换) - - [概述](#概述) - - [Linux环境使用说明](#linux环境使用说明) - - [环境准备](#环境准备) - - [参数说明](#参数说明) - - [使用示例](#使用示例) - - [Windows环境使用说明](#windows环境使用说明) - - [环境准备](#环境准备-1) - - [参数说明](#参数说明-1) - - [使用示例](#使用示例-1) - - - - - -## 概述 - -MindSpore Lite提供离线转换模型功能的工具,支持多种类型的模型转换,转换后的模型可用于推理。命令行参数包含多种个性化选项,为用户提供方便的转换途径。 - -目前支持的输入格式有:MindSpore、TensorFlow Lite、Caffe、TensorFlow和ONNX。 - -通过转换工具转换成的`ms`模型,支持转换工具配套及更高版本的Runtime推理框架执行推理。 - -## Linux环境使用说明 - -### 环境准备 - -使用MindSpore Lite模型转换工具,需要进行如下环境准备工作。 - -- [编译](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html)或[下载](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html)模型转换工具。 -- 将转换工具需要的动态链接库加入环境变量LD_LIBRARY_PATH。 - - ```bash - export LD_LIBRARY_PATH=${PACKAGE_ROOT_PATH}/tools/converter/lib:${LD_LIBRARY_PATH} - ``` - - ${PACKAGE_ROOT_PATH}是编译或下载得到的包解压后的路径。 - -### 参数说明 - -MindSpore Lite模型转换工具提供了多种参数设置,用户可根据需要来选择使用。此外,用户可输入`./converter_lite --help`获取实时帮助。 - -下面提供详细的参数说明。 - -| 参数 | 是否必选 | 参数说明 | 取值范围 | 默认值 | -| -------- | ------- | ----- | --- | ---- | -| `--help` | 否 | 打印全部帮助信息。 | - | - | -| `--fmk=` | 是 | 输入模型的原始格式。 | MINDIR、CAFFE、TFLITE、TF、ONNX | - | -| `--modelFile=` | 是 | 输入模型的路径。 | - | - | -| `--outputFile=` | 是 | 输出模型的路径,不需加后缀,可自动生成`.ms`后缀。 | - | - | -| `--weightFile=` | 转换Caffe模型时必选 | 输入模型weight文件的路径。 | - | - | -| `--quantType=` | 否 | 设置模型的量化类型。 | WeightQuant:训练后量化(权重量化)
    PostTraining:训练后量化(全量化) | - | -| `--bitNum=` | 否 | 设定训练后量化(权重量化)的比特数,目前支持1bit~16bit量化 | \[1,16] | 8 | -| `--quantWeightSize=` | 否 | 设定参与训练后量化(权重量化)的卷积核尺寸阈值,若卷积核尺寸大于该值,则对此权重进行量化 | \[0,+∞) | 0 | -| `--quantWeightChannel=` | 否 | 设定参与训练后量化(权重量化)的卷积通道数阈值,若卷积通道数大于该值,则对此权重进行量化 | \[0,+∞) | 16 | -| `--configFile=` | 否 | 1)可作为训练后量化(全量化)校准数据集配置文件路径。2)可作为转换器的配置文件路径,内含参数`plugin_path`与`disable_fusion`,均为非必选,前者为第三方库加载路径,如有多个请用`;`分隔,后者默认值为`off`,当为`on`时会关闭融合优化。 | - | - | - -> - 参数名和参数值之间用等号连接,中间不能有空格。 -> - Caffe模型一般分为两个文件:`*.prototxt`模型结构,对应`--modelFile`参数;`*.caffemodel`模型权值,对应`--weightFile`参数。 -> - 为保证权重量化的精度,建议`--bitNum`参数设定范围为8bit~16bit。 -> - 全量化目前仅支持激活值8bit、权重8bit的量化方式。 - -下面选取了几个常用示例,说明转换命令的使用方法。 - -- 以Caffe模型LeNet为例,执行转换命令。 - - ```bash - ./converter_lite --fmk=CAFFE --modelFile=lenet.prototxt --weightFile=lenet.caffemodel --outputFile=lenet - ``` - - 本例中,因为采用了Caffe模型,所以需要模型结构、模型权值两个输入文件。再加上其他必需的fmk类型和输出路径两个参数,即可成功执行。 - - 结果显示为: - - ```text - CONVERTER RESULT SUCCESS:0 - ``` - - 这表示已经成功将Caffe模型转化为MindSpore Lite模型,获得新文件`lenet.ms`。 - -- 以MindSpore、TensorFlow Lite、TensorFlow和ONNX模型为例,执行转换命令。 - - - MindSpore模型`model.mindir` - - ```bash - ./converter_lite --fmk=MINDIR --modelFile=model.mindir --outputFile=model - ``` - - > 通过MindSpore v1.1.1之前版本导出的`MindIR`模型,建议采用对应版本的转换工具转换成`ms`模型。MindSpore v1.1.1及其之后的版本,转换工具会做前向兼容。 - - - TensorFlow Lite模型`model.tflite` - - ```bash - ./converter_lite --fmk=TFLITE --modelFile=model.tflite --outputFile=model - ``` - - - TensorFlow模型`model.pb` - - ```bash - ./converter_lite --fmk=TF --modelFile=model.pb --outputFile=model - ``` - - - ONNX模型`model.onnx` - - ```bash - ./converter_lite --fmk=ONNX --modelFile=model.onnx --outputFile=model - ``` - - 以上几种情况下,均显示如下转换成功提示,且同时获得`model.ms`目标文件。 - - ```text - CONVERTER RESULT SUCCESS:0 - ``` - -- 如果转换命令执行失败,程序会返回一个[错误码](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/errorcode_and_metatype.html)。 - -> 训练后量化示例请参考。 - -## Windows环境使用说明 - -### 环境准备 - -使用MindSpore Lite模型转换工具,需要进行如下环境准备工作。 - -- [编译](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html)或[下载](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html)模型转换工具。 -- 将转换工具需要的动态链接库加入环境变量PATH。 - - ```bash - set PATH=%PACKAGE_ROOT_PATH%\tools\converter\lib;%PATH% - ``` - - ${PACKAGE_ROOT_PATH}是编译或下载得到的包解压后的路径。 - -### 参数说明 - -参考Linux环境模型转换工具的[参数说明](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html#id3)。 - -### 使用示例 - -设置日志打印级别为INFO。 - -```bat -set GLOG_v=1 -``` - -> 日志级别:0代表DEBUG,1代表INFO,2代表WARNING,3代表ERROR。 - -下面选取了几个常用示例,说明转换命令的使用方法。 - -- 以Caffe模型LeNet为例,执行转换命令。 - - ```bat - call converter_lite --fmk=CAFFE --modelFile=lenet.prototxt --weightFile=lenet.caffemodel --outputFile=lenet - ``` - - 本例中,因为采用了Caffe模型,所以需要模型结构、模型权值两个输入文件。再加上其他必需的fmk类型和输出路径两个参数,即可成功执行。 - - 结果显示为: - - ```text - CONVERTER RESULT SUCCESS:0 - ``` - - 这表示已经成功将Caffe模型转化为MindSpore Lite模型,获得新文件`lenet.ms`。 - -- 以MindSpore、TensorFlow Lite、ONNX模型格式和感知量化模型为例,执行转换命令。 - - - MindSpore模型`model.mindir` - - ```bat - call converter_lite --fmk=MINDIR --modelFile=model.mindir --outputFile=model - ``` - - > 通过MindSpore v1.1.1之前版本导出的`MindIR`模型,建议采用对应版本的转换工具转换成`ms`模型。MindSpore v1.1.1及其之后的版本,转换工具会做前向兼容。 - - - TensorFlow Lite模型`model.tflite` - - ```bat - call converter_lite --fmk=TFLITE --modelFile=model.tflite --outputFile=model - ``` - - - TensorFlow模型`model.pb` - - ```bat - call converter_lite --fmk=TF --modelFile=model.pb --outputFile=model - ``` - - - ONNX模型`model.onnx` - - ```bat - call converter_lite --fmk=ONNX --modelFile=model.onnx --outputFile=model - ``` - - 以上几种情况下,均显示如下转换成功提示,且同时获得`model.ms`目标文件。 - - ```text - CONVERTER RESULT SUCCESS:0 - ``` - -- 如果转换命令执行失败,程序会返回一个[错误码](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/errorcode_and_metatype.html)。 diff --git a/tutorials/lite/source_zh_cn/use/converter_train.md b/tutorials/lite/source_zh_cn/use/converter_train.md deleted file mode 100644 index b205078cf399a4be82c1085ee7b22cbf4c4c23e6..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/converter_train.md +++ /dev/null @@ -1,74 +0,0 @@ -# 训练模型转换 - -`Linux` `环境准备` `模型导出` `模型转换` `中级` `高级` - - - -- [训练模型转换](#训练模型转换) - - [概述](#概述) - - [Linux环境](#linux环境) - - [环境准备](#环境准备) - - [参数说明](#参数说明) - - [模型转换示例](#模型转换示例) - - - - - -## 概述 - -创建MindSpore端侧模型的步骤: - -- 首先基于MindSpore架构使用Python创建网络模型,并导出为`.mindir`文件,参见云端的[保存模型](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html#mindir)。 -- 然后将`.mindir`模型文件转换成`.ms`文件,`.ms`文件可以导入端侧设备并基于MindSpore端侧框架训练。 - -## Linux环境 - -### 环境准备 - -MindSpore Lite 模型转换工具提供了多个参数,目前工具仅支持Linux系统,环境准备步骤: - -- [编译](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html)或[下载](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html)模型转换工具。 -- 将转换工具需要的动态链接库加入环境变量LD_LIBRARY_PATH。 - - ```bash - export LD_LIBRARY_PATH=${PACKAGE_ROOT_PATH}/tools/converter/lib:${LD_LIBRARY_PATH} - ``` - - ${PACKAGE_ROOT_PATH}是编译或下载得到的包解压后的路径。 - -### 参数说明 - -下表为MindSpore Lite训练模型转换工具使用到的参数: - -| 参数 | 是否必选 | 参数说明 | 取值范围 | 默认值 | -| --------------------------- | -------- | ------------------------------------------- | ----------- | ------ | -| `--help` | 否 | 打印全部帮助信息 | - | - | -| `--fmk=` | 是 | 输入模型的原始格式 | MINDIR | - | -| `--modelFile=` | 是 | MINDIR模型文件名(包括路径) | - | - | -| `--outputFile=` | 是 | 输出模型文件名(包括路径)自动生成`.ms`后缀 | - | - | -| `--trainModel=true` | 是 | 是否是训练模式;如果要训练模型,必须为true | true, false | false | - -> 参数名称和数值之间使用等号连接且不能有空格。 - -### 模型转换示例 - -假设待转换的模型文件为`my_model.mindir`,执行如下转换命令: - -```bash -./converter_lite --fmk=MINDIR --trainModel=true --modelFile=my_model.mindir --outputFile=my_model -``` - -转换成功输出如下: - -```text -CONVERTER RESULT SUCCESS:0 -``` - -这表明 MindSpore 模型成功转换为 MindSpore 端侧模型,并生成了新文件`my_model.ms`。如果转换失败输出如下: - -```text -CONVERT RESULT FAILED: -``` - -程序会返回的[错误码](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/errorcode_and_metatype.html)和错误信息。 diff --git a/tutorials/lite/source_zh_cn/use/cropper_tool.md b/tutorials/lite/source_zh_cn/use/cropper_tool.md deleted file mode 100644 index 6ae0c3bb81bbb753c97f61bebcdb9e89229a0e46..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/cropper_tool.md +++ /dev/null @@ -1,72 +0,0 @@ -# 静态库剪裁工具 - -`Linux` `环境准备` `静态库裁剪` `中级` `高级` - - - -- [使用裁剪工具降低库文件大小](#使用裁剪工具降低库文件大小) - - [概述](#概述) - - [环境准备](#环境准备) - - [参数说明](#参数说明) - - [使用示例](#使用示例) - - - - - -## 概述 - -MindSpore Lite提供对Runtime的`libmindspore-lite.a`静态库裁剪工具,能够筛选出`ms`模型中存在的算子,对静态库文件进行裁剪,有效降低库文件大小。 - -裁剪工具运行环境是x86_64,目前支持对CPU算子的裁剪,即编译方式为`bash build.sh -I arm64 -e cpu`、`bash build.sh -I arm32 -e cpu`、`bash build.sh -I x86_64 -e cpu`中的`libmindspore-lite.a`静态库。 - -## 环境准备 - -使用MindSpore Lite裁剪工具,需要进行如下环境准备工作。 - -- 编译:裁剪工具代码在MindSpore源码的`mindspore/lite/tools/cropper`目录中,参考构建文档中的[环境要求](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#id1)和[编译示例](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#id3)编译x86_64版本。 - -- 运行:参考构建文档中的[编译输出](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#id4),获得`cropper`工具。 - -## 参数说明 - -使用裁剪工具进行静态库的裁剪,其命令格式如下所示。 - -```bash -./cropper [--packageFile=] [--configFile=] - [--modelFile=] [--modelFolderPath=] - [--outputFile=] [--help] -``` - -下面提供详细的参数说明。 - -| 参数 | 是否必选 | 参数说明 | 参数类型 | 默认值 | 取值范围 | -| ------------------------------------- | -------- | ------------------------------------------------------------ | -------- | ------ | -------- | -| `--packageFile=` | 是 | 需要裁剪的`libmindspore-lite.a`文件路径。 | String | - | - | -| `--configFile=` | 是 | 裁剪工具配置文件的路径,裁剪CPU库需要设置`cropper_mapping_cpu.cfg`文件路径。 | String | - | - | -| `--modelFolderPath=` | 否 | 模型文件夹路径,根据文件夹中存在的所有`ms`模型进行库裁剪。`modelFile`和`modelFolderPath`参数必须二选一。 | String | - | - | -| `--modelFile=` | 否 | 模型文件路径,根据指定的`ms`模型文件进行库裁剪,多个模型文件采用`,`分割。`modelFile`和`modelFolderPath`参数必须二选一。 | String | - | - | -| `--outputFile=` | 否 | 裁剪完成的`libmindspore-lite.a`库的保存路径,默认覆盖源文件。 | String | - | - | -| `--help` | 否 | 打印全部帮助信息。 | - | - | - | - -> 配置文件`cropper_mapping_cpu.cfg`存在于`mindspore-lite-{version}-linux-x64`包中的`tools/cropper`目录。 - -## 使用示例 - -裁剪工具通过解析`ms`模型得到算子列表,并根据配置文件`configFile`中的映射关系来裁剪`libmindspore-lite.a`静态库。模型文件传入方式包括文件夹、文件两种: - -- 通过文件夹的方式传入`ms`模型,将模型文件所在的文件夹路径传递给`modelFolderPath`参数,对arm64-cpu的`libmindspore-lite.a`静态库进行裁剪。 - -```bash -./cropper --packageFile=/mindspore-lite-{version}-android-aarch64/inference/lib/libmindspore-lite.a --configFile=./cropper_mapping_cpu.cfg --modelFolderPath=/model --outputFile=/mindspore-lite/lib/libmindspore-lite.a -``` - -本例将读取`/model`文件夹中包含的所有`ms`模型,对arm64-cpu的`libmindspore-lite.a`静态库进行裁剪,并将裁剪后的`libmindspore-lite.a`静态库保存到`/mindspore-lite/lib/`目录。 - -- 通过文件的方式传入`ms`模型,将模型文件所在的路径传递给`modelFile`参数,对arm64-cpu的`libmindspore-lite.a`静态库进行裁剪。 - -```bash -./cropper --packageFile=/mindspore-lite-{version}-android-aarch64/inference/lib/libmindspore-lite.a --configFile=./cropper_mapping_cpu.cfg --modelFile=/model/lenet.ms,/model/retinaface.ms --outputFile=/mindspore-lite/lib/libmindspore-lite.a -``` - -本例将根据`modelFile`传入的`ms`模型,对arm64-cpu的`libmindspore-lite.a`静态库进行裁剪,并将裁剪后的`libmindspore-lite.a`静态库保存到`/mindspore-lite/lib/`目录。 diff --git a/tutorials/lite/source_zh_cn/use/data_preprocessing.rst b/tutorials/lite/source_zh_cn/use/data_preprocessing.rst deleted file mode 100644 index d80161faccf15deedbd6afb8df91d0220484ff67..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/data_preprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -预处理数据 -========== - -.. toctree:: - :maxdepth: 1 - - image_processing diff --git a/tutorials/lite/source_zh_cn/use/downloads.md b/tutorials/lite/source_zh_cn/use/downloads.md deleted file mode 100644 index 029c8c27ae7d480bf7caccd85b7f9f0af23a1abf..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/downloads.md +++ /dev/null @@ -1,50 +0,0 @@ -# 下载MindSpore Lite - -`Windows` `Linux` `Android` `环境准备` `初级` `中级` `高级` - - - -- [下载MindSpore Lite](#下载mindspore-lite) - - [1.2.0](#120) - - [1.1.0](#110) - - - - - -欢迎使用MindSpore Lite,我们提供了支持多种操作系统和硬件平台的模型转换、模型推理、图像处理等功能,你可以下载适用于本地环境的版本包直接使用。 - -## 1.2.0 - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| 推理runtime(cpp)、训练runtime(cpp)、推理aar包、以及benchmark/benchmark_train工具。 | CPU | Android-aarch32 | | 7d073573385a69bff53542c395d106393da241682cd6053703ce21f1de23bac6 | -| 推理runtime(cpp)、训练runtime(cpp)、推理aar包、以及benchmark/benchmark_train工具。 | CPU/GPU | Android-aarch64 | | 7f8400f0b97fa3e7cbf0d266c73b43a2410905244b04d0202fab39d9267346e0 | -| 推理runtime(cpp)、训练runtime(cpp)、推理jar包、以及benchmark/benchmark_train/codegen/converter/cropper工具。 | CPU | Ubuntu-x64 | | 3b609ed8be9e3ae70987d6e00421ad4720776d797133e72f6952ba6b93059062 | -| 推理runtime(cpp)以及benchmark/codegen/converter工具。 | CPU | Windows-x64 | | bf01851d7e2cde416502dce11bd2a86ef63e559f6dabba090405755a87ce14ae | -| 推理runtime(cpp) | CPU | OpenHarmony | | a9987b25815cb69e0f630be1388486e8d727a19815a67851089b7d633bd2f3f2 | - -## 1.1.0 - -### 推理 - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore Lite 模型转换工具(Converter) | CPU | Ubuntu-x64 | | d449e38a8493c314d1b5b1a127f62269192da785b012ff892eda775dedca3d82 | -| | CPU | Windows-x64 | | 5e50b7701b97ebe784095f2ba954fc6c377eb157fbc9aaeae2497e38cc4ee212 | -| MindSpore Lite 模型推理框架(Runtime,含图像处理) | CPU/GPU/NPU | Android-aarch64/Android-aarch32 | | a19de5706db57e97a5f04ef08e0e383f8ea497c70bb60e60d056b31a603c0243 | -| | CPU | Ubuntu-x64 | | 176256c2fbef775f1a44aaeccae0c4eea6a60f41fc0baece5479dcb378155f36 | -| | CPU | Windows-x64 | | 30b5545245832a73d84732166f360c77cd09a7a4fe1fb922a8f7b80e7df326c1 | - -### 训练 - -| 组件 | 硬件平台 | 操作系统 | 链接 | SHA-256 | -| --- | --- | --- | --- | --- | -| MindSpore Lite 模型转换工具(Converter) | CPU | Ubuntu-x64 | | f95a9db98c84ec3d97f88383ecc3832582aa9737ed287c33703deb0b419acf25 | -| MindSpore Lite 模型训练框架(Runtime,含图像处理) | CPU | Android-aarch64/Android-aarch32 | | a6d8152f4e2d674c52af2c379f7d07858d30bc0dceef1dbc366e6fa16a5948b5 | -| | CPU | Ubuntu-x64 | | 1290f0adc790adc9edce654b9a629a9a323cfcb8453eb6bc19b779ef726282bf | - -> - Ubuntu-x64版本包是在GCC版本大于等于7.3.0的环境编译得到的,所以该版本包的部署环境要求GLIBC版本大于等于2.27。 -> - Android-aarch32的版本包不支持GPU和NPU。 -> - MindSpore Lite还提供对Runtime的`libmindspore-lite.a`[静态库裁剪工具](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/cropper_tool.html#),用于剪裁静态库文件,有效降低库文件大小。 -> - MindSpore Lite下载完成后,需要进行SHA-256完整性校验。 diff --git a/tutorials/lite/source_zh_cn/use/image_processing.md b/tutorials/lite/source_zh_cn/use/image_processing.md deleted file mode 100644 index b6d9b5b87e4f8f55c8b702cbe25ce74f6e06e352..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/image_processing.md +++ /dev/null @@ -1,153 +0,0 @@ -# 预处理图像数据 - -`Windows` `Linux` `C++` `数据准备` `初级` `中级` `高级` - - - -- [预处理图像数据](#预处理图像数据) - - [概述](#概述) - - [导入图像预处理函数的库](#导入图像预处理函数的库) - - [对图像进行初始化](#对图像进行初始化) - - [使用示例](#使用示例) - - [可选的图像预处理算子](#可选的图像预处理算子) - - [对图像进行缩放操作](#对图像进行缩放操作) - - [使用示例](#使用示例-1) - - [对图像数据类型进行转换](#对图像数据类型进行转换) - - [使用示例](#使用示例-2) - - [对图像数据进行裁剪](#对图像数据进行裁剪) - - [使用示例](#使用示例-3) - - [对图像数据进行归一化处理](#对图像数据进行归一化处理) - - [使用示例](#使用示例-4) - - - - - -## 概述 - -图像预处理的主要目的是消除图像中无关的信息,恢复有用的真实信息,增强有关信息的可检测性和最大限度地简化数据,从而改进特征抽取、图像分割、匹配和识别的可靠性。此处是通过创建LiteMat对象,在推理前对图像数据进行处理,达到模型推理所需要的数据格式要求。 - -流程如下: - -## 导入图像预处理函数的库 - -```cpp -#include "lite_cv/lite_mat.h" -#include "lite_cv/image_process.h" -``` - -## 对图像进行初始化 - -这边使用的是`image_process.h`文件中的[InitFromPixel](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/dataset.html#initfrompixel)函数对图像进行初始化操作。 - -```cpp -bool InitFromPixel(const unsigned char *data, LPixelType pixel_type, LDataType data_type, int w, int h, LiteMat &m) -``` - -### 使用示例 - -```cpp -// Create the data object of the LiteMat object. -LiteMat lite_mat_bgr; - -// Initialize the lite_mat_bgr object. -// The image data pointer passed in by the user (The data in the Bitmap corresponding to the Android platform). -InitFromPixel(pixel_ptr, LPixelType::RGBA2GRAY, LDataType::UINT8, rgba_mat.cols, rgba_mat.rows, lite_mat_bgr); -``` - -## 可选的图像预处理算子 - -此处的图像处理算子,用户可以根据实际情况任意搭配使用。 - -### 对图像进行缩放操作 - -这边利用的是`image_process.h`中的[ResizeBilinear](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/dataset.html#resizebilinear)函数通过双线性算法调整图像大小,当前仅支持的数据类型为uint8,当前支持的通道为3和1。 - -```cpp -bool ResizeBilinear(const LiteMat &src, LiteMat &dst, int dst_w, int dst_h) -``` - -#### 使用示例 - -```cpp -// Initialize the image data. -LiteMat lite_mat_bgr; -InitFromPixel(rgba_mat.data, LPixelType::RGBA2BGR, LDataType::UINT8, rgba_mat.cols, rgba_mat.rows, lite_mat_bgr); - -// Create a resize image data object. -LiteMat lite_mat_resize; - -// Resize the image. -ResizeBilinear(lite_mat_bgr, lite_mat_resize, 256, 256); -``` - -### 对图像数据类型进行转换 - -这边利用的是`image_process.h`中的[ConvertTo](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/dataset.html#convertto)函数对图像数据类型进行转换,目前支持的转换是将uint8转换为float。 - -```cpp -bool ConvertTo(const LiteMat &src, LiteMat &dst, double scale = 1.0) -``` - -#### 使用示例 - -```cpp -// Initialize the image data. -LiteMat lite_mat_bgr; -InitFromPixel(rgba_mat.data, LPixelType::RGBA2BGR, LDataType::UINT8, rgba_mat.cols, rgba_mat.rows, lite_mat_bgr); - -// Create the converted data type object. -LiteMat lite_mat_convert_float; - -// Perform conversion type operations on the object. The currently supported conversion is to convert uint8 to float. -ConvertTo(lite_mat_bgr, lite_mat_convert_float); -``` - -### 对图像数据进行裁剪 - -这边利用的是`image_process.h`中的[Crop](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/dataset.html#crop)函数对图像进行裁剪,目前支持通道3和1。 - -```cpp -bool Crop(const LiteMat &src, LiteMat &dst, int x, int y, int w, int h) -``` - -#### 使用示例 - -```cpp -// Initialize the image data. -LiteMat lite_mat_bgr; -InitFromPixel(rgba_mat.data, LPixelType::RGBA2BGR, LDataType::UINT8, rgba_mat.cols, rgba_mat.rows, lite_mat_bgr); - -// Create the cropped object. -LiteMat lite_mat_cut; - -// The image is cropped by the values of x, y, w, h. -Crop(lite_mat_bgr, lite_mat_cut, 16, 16, 224, 224); -``` - -### 对图像数据进行归一化处理 - -为了消除数据指标之间的量纲影响,通过标准化处理来解决数据指标之间的可比性问题,这边利用的是`image_process.h`中的[SubStractMeanNormalize](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/dataset.html#substractmeannormalize)函数对图像数据进行归一化处理。 - -```cpp -bool SubStractMeanNormalize(const LiteMat &src, LiteMat &dst, const std::vector &mean, const std::vector &std) -``` - -#### 使用示例 - -```cpp -// Initialize the image data. -LiteMat lite_mat_bgr; -InitFromPixel(rgba_mat.data, LPixelType::RGBA2BGR, LDataType::UINT8, rgba_mat.cols, rgba_mat.rows, lite_mat_bgr); - -// The mean value of the image data. -// The variance of the image data. -std::vector means = {0.485, 0.456, 0.406}; -std::vector stds = {0.229, 0.224, 0.225}; - -// Create a normalized image object. -LiteMat lite_mat_bgr_norm; - -// The image data is normalized by the mean value and variance of the image data. -SubStractMeanNormalize(lite_mat_bgr, lite_mat_bgr_norm, means, stds); -``` diff --git a/tutorials/lite/source_zh_cn/use/micro.md b/tutorials/lite/source_zh_cn/use/micro.md deleted file mode 100644 index df4f27ee3f312e4d4e719b601c05b04e58c8e3cd..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/micro.md +++ /dev/null @@ -1,288 +0,0 @@ -# 在微控制器上执行推理 - - `Linux` `IoT` `C++` `模型代码生成` `推理应用` `初级` `中级` - - - -- [在微控制器上执行推理](#在微控制器上执行推理) - - [概述](#概述) - - [获取codegen](#获取codegen) - - [参数说明](#参数说明) - - [使用步骤](#使用步骤) - - [使用CodeGen在STM开发板上执行推理](#使用CodeGen在STM开发板上执行推理) - - [更多详情](#更多详情) - - - - - -## 概述 - -相较于移动终端,IoT设备上系统资源有限,对ROM空间占用、运行时内存和功耗要求较高。MindSpore Lite提供代码生成工具codegen,将运行时编译、解释计算图,移至离线编译阶段。仅保留推理所必须的信息,生成极简的推理代码。codegen可对接NNACL和CMSIS算子库,支持生成可在x86/ARM64/ARM32A/ARM32M平台部署的推理代码。 - -代码生成工具codegen的使用流程如下: - -1. 通过MindSpore Lite转换工具[Converter](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html),将训练好的模型文件转换为`*.ms`格式; - -2. 通过自动代码生成工具codegen,输入`*.ms`模型自动生成源代码。 - -![img](../images/lite_codegen.png) - -## 获取codegen - -自动代码生成工具,可以通过两种方式获取: - -1. MindSpore官网下载[Release版本](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html)。 -2. 从源码开始[编译构建](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html)。 - -> 目前模型生成工具仅支持在Linux x86_64架构下运行。 - -## 参数说明 - -详细参数说明如下: - -| 参数 | 是否必选 | 参数说明 | 取值范围 | 默认值 | -| --------------- | -------- | -------------------------------| -------------------------- | --------- | -| help | 否 | 打印使用说明信息 | - | - | -| codePath | 是 | 生成代码的路径 | - | ./(当前目录)| -| target | 是 | 生成代码针对的平台 | x86, ARM32M, ARM32A, ARM64 | x86 | -| modelPath | 是 | 输入模型文件路径 | - | - | -| supportParallel | 否 | 是否生成支持多线程的代码 | true, false | false | -| debugMode | 否 | 是否以生成调试模式的代码 | true, false | false | - -> 输入模型文件,需要经过MindSpore Lite Converter工具转换成.ms格式。 -> -> os不支持文件系统时,debugMode不可用。 -> -> 生成的推理接口详细使用说明,请参考[API文档](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/index.html)。 -> -> 以下三个接口暂不支持: -> 1. `virtual std::unordered_map GetOutputs() const = 0;` -> 2. `virtual Vector GetOutputsByNodeName(const String &node_name) const = 0;` -> 3. `virtual int Resize(const Vector &inputs, const Vector> &dims) = 0;` - -## 使用说明 - -以MNIST分类网络为例: - -```bash -./codegen --modelPath=./mnist.ms --codePath=./ -``` - -执行成功后,会在codePath指定的目录下,生成名为mnist的文件夹,内容如下: - -```text -mnist -├── benchmark # 集成调试相关的例程 -│   ├── benchmark.cc -│   ├── calib_output.cc -│   ├── calib_output.h -│   ├── load_input.c -│   └── load_input.h -├── CMakeLists.txt -└── src # 源文件 - ├── CMakeLists.txt - ├── mmodel.h - ├── net.bin # 二进制形式的模型权重 - ├── net.c - ├── net.cmake - ├── net.h - ├── session.cc - ├── session.h - ├── tensor.cc - ├── tensor.h - ├── weight.c - └── weight.h -``` - -## 使用CodeGen在STM开发板上执行推理 - -本教程以在STM32F746单板上编译部署生成模型代码为例,演示了codegen编译模型在Cortex-M平台的使用。更多关于Arm Cortex-M的详情可参见其[官网](https://developer.arm.com/ip-products/processors/cortex-m)。 - -### STM32F746编译依赖 - -模型推理代码的编译部署需要在Windows上安装[J-Link](https://www.segger.com/)、[STM32CubeMX](https://www.st.com/content/st_com/en.html)、[GNU Arm Embedded Toolchain](https://developer.arm.com/tools-and-software/open-source-software/developer-tools/gnu-toolchain/gnu-rm)等工具来进行交叉编译。 - -- [STM32CubeMX Windows版本](https://www.st.com/content/ccc/resource/technical/software/sw_development_suite/group0/0b/05/f0/25/c7/2b/42/9d/stm32cubemx_v6-1-1/files/stm32cubemx_v6-1-1.zip/jcr:content/translations/en.stm32cubemx_v6-1-1.zip) >= 6.0.1 - -- [GNU Arm Embedded Toolchain](https://developer.arm.com/tools-and-software/open-source-software/developer-tools/gnu-toolchain/gnu-rm/downloads) >= 9-2019-q4-major-win32 - -- [J-Link Windows版本](https://www.segger.com/downloads/jlink/) >= 6.56 -- [GCC](https://gcc.gnu.org/releases.html) >= 7.3.0 -- [CMake](https://cmake.org/download/) >= 3.18.3 - -### STM32F746工程构建 - -- 需要组织的工程目录如下: - - ```bash - ├── mnist # codegen生成的模型推理代码 - ├── include # 模型推理对外API头文件目录(需要自建) - └── operator_library # 模型推理算子相关文件(需要自建) - ``` - -> 模型推理对外API头文件可由MindSpore团队发布的[Release包](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html)中获取。 -> -> 在编译此工程之前需要预先获取对应平台所需要的算子文件,由于Cortex-M平台工程编译一般涉及到较复杂的交叉编译,此处不提供直接预编译的算子库静态库,而是用户根据模型自行组织文件,自主编译Cortex-M7 、Coretex-M4、Cortex-M3等工程(对应工程目录结构已在示例代码中给出,用户可自主将对应ARM官方的CMSIS源码放置其中即可)。 - -- 使用codegen编译[MNIST手写数字识别模型](https://download.mindspore.cn/model_zoo/official/lite/mnist_lite/mnist.ms),生成对应的STM32F46推理代码。具体命令如下: - - ```bash - ./codegen --codePath=. --modelPath=mnist.ms --target=ARM32M - ``` - -- 生成代码工程目录如下: - - ```bash - ├── mnist # 生成代码的根目录 - ├── benchmark # 生成代码的benchmark目录 - └── src # 模型推理代码目录 - ``` - -- 预置算子静态库的目录如下: - - ```bash - ├── operator_library # 平台算子库目录 - ├── include # 平台算子库头文件目录 - └── nnacl # MindSpore团队提供的平台算子库源文件 - └── wrapper # MindSpore团队提供的平台算子库源文件 - └── CMSIS # Arm官方提供的CMSIS平台算子库源文件 - ``` - - > 在使用过程中,引入CMSIS v5.7.0 Softmax相关的CMSIS算子文件时,头文件中需要加入`arm_nnfunctions.h`。 - -#### 代码工程编译 - -1. 环境测试 - - 安装好交叉编译所需环境后,需要在Windows环境中依次将其加入到环境变量中。 - - ```bash - gcc -v # 查看GCC版本 - arm-none-eabi-gdb -v # 查看交叉编译环境 - jlink -v # 查看J-Link版本 - make -v # 查看Make版本 - ``` - - 以上命令均成功返回值时,表明环境准备已完成,可以继续进入下一步,否则请务必先安装上述环境。 - -2. 生成STM32F746单板初始化代码([详情示例代码](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/micro/example/mnist_stm32f746)) - - - 启动 STM32CubeMX,新建project,选择单板STM32F746IG。 - - 成功以后,选择`Makefile` ,`generator code`。 - - 在生成的工程目录下打开`cmd`,执行`make`,测试初始代码是否成功编译。 - - ```bash - # make成功结果 - arm-none-eabi-size build/test_stm32f746.elf - text data bss dec hex filename - 3660 20 1572 5252 1484 build/test_stm32f746.elf - arm-none-eabi-objcopy -O ihex build/test_stm32f746.elf build/test_stm32f746.hex - arm-none-eabi-objcopy -O binary -S build/test_stm32f746.elf build/test_stm32f746.bin - ``` - -#### 编译模型 - -1. 拷贝MindSpore团队提供算子文件以及对应头文件到STM32CubeMX生成的工程目录中。 - -2. 拷贝codegen生成模型推理代码到 STM32CubeMX生成的代码工程目录中。 - - ```bash - ├── .mxproject - ├── build # 工程编译输出目录 - ├── Core - ├── Drivers - ├── mnist # codegen生成的cortex-m7 模型推理代码 - ├── Makefile # 编写工程makefile文件组织mnist && operator_library源文件到工程目录中 - ├── startup_stm32f746xx.s - ├── STM32F746IGKx_FLASH.ld - └── test_stm32f746.ioc - ``` - -3. 修改makefile文件,组织算子静态库以及模型推理代码,具体makefile文件内容参见[示例](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/micro/example/mnist_stm32f746)。 - - ```bash - # C includes - C_INCLUDES = \ - -ICore/Inc \ - -IDrivers/STM32F7xx_HAL_Driver/Inc \ - -IDrivers/STM32F7xx_HAL_Driver/Inc/Legacy \ - -IDrivers/CMSIS/Device/ST/STM32F7xx/Include \ - -Imnist/operator_library/include \ # 新增,指定算子库头文件目录 - -Imnist/include \ # 新增,指定模型推理代码头文件 - -Imnist/src # 新增,指定模型推理代码源文件 - ...... - ``` - -4. 在工程目录的Core/Src的main.c编写模型调用代码,具体代码新增如下: - - ```cpp - while (1) { - /* USER CODE END WHILE */ - SEGGER_RTT_printf(0, "***********mnist test start***********\n"); - const char *model_buffer = nullptr; - int model_size = 0; - session::LiteSession *session = mindspore::session::LiteSession::CreateSession(model_buffer, model_size, nullptr); - Vector inputs = session->GetInputs(); - size_t inputs_num = inputs.size(); - void *inputs_binbuf[inputs_num]; - int inputs_size[inputs_num]; - for (size_t i = 0; i < inputs_num; ++i) { - inputs_size[i] = inputs[i]->Size(); - } - // here mnist only have one input data,just hard code to it's array; - inputs_binbuf[0] = mnist_inputs_data; - for (size_t i = 0; i < inputs_num; ++i) { - void *input_data = inputs[i]->MutableData(); - memcpy(input_data, inputs_binbuf[i], inputs_size[i]); - } - int ret = session->RunGraph(); - if (ret != lite::RET_OK) { - return lite::RET_ERROR; - } - Vector outputs_name = session->GetOutputTensorNames(); - for (int i = 0; i < outputs_name.size(); ++i) { - tensor::MSTensor *output_tensor = session->GetOutputByTensorName(outputs_name[i]); - if (output_tensor == nullptr) { - return -1; - } - float *casted_data = static_cast(output_tensor->MutableData()); - if (casted_data == nullptr) { - return -1; - } - for (size_t j = 0; j < 10 && j < output_tensor->ElementsNum(); j++) { - SEGGER_RTT_printf(0, "output: [%d] is : [%d]/100\n", i, casted_data[i] * 100); - } - } - delete session; - SEGGER_RTT_printf(0, "***********mnist test end***********\n"); - ``` - -5. 在工程跟目中目录使用管理员权限打开`cmd` 执行 `make`进行编译。 - - ```bash - make - ``` - -### STM32F746工程部署 - -使用J-Link将可执行文件拷贝到单板上并做推理。 - -```bash -jlinkgdbserver # 启动jlinkgdbserver 选定target device为STM32F746IG -jlinkRTTViewer # 启动jlinkRTTViewer 选定target devices为STM32F746IG -arm-none-eabi-gdb # 启动arm-gcc gdb服务 -file build/target.elf # 打开调测文件 -target remote 127.0.0.1 # 连接jlink服务器 -monitor reset # 重置单板 -monitor halt # 挂起单板 -load # 加载可执行文件到单板 -c # 执行模型推理 -``` - -## 更多详情 - -### [Linux_x86_64平台编译部署](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/micro/example/mnist_x86) - -### [Android平台编译部署](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/micro/example/mobilenetv2) - diff --git a/tutorials/lite/source_zh_cn/use/npu_info.md b/tutorials/lite/source_zh_cn/use/npu_info.md deleted file mode 100644 index 2c9816ecee730465bb0dd89a366cd4215467fd2b..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/npu_info.md +++ /dev/null @@ -1,84 +0,0 @@ -# 集成NPU使用说明 - -`NPU` `Android` `Linux` `环境准备` `算子支持` `中级` `高级` - - - -- [集成NPU使用说明](#集成NPU使用说明) - - [使用步骤](#使用步骤) - - [环境准备](#环境准备) - - [编译构建](#编译构建) - - [集成使用](#集成使用) - - [芯片支持](#芯片支持) - - [算子支持](#算子支持) - - - - - -## 使用步骤 - -### 环境准备 - -在基本的[环境准备](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html)之外,使用NPU需要集成HUAWEI HiAI DDK。 -DDK包含了使用NPU的对外接口(包括模型构建、加载,计算等),以及封装成动态库的接口实现(名为libhiai*.so)。 -下载[DDK](https://developer.huawei.com/consumer/cn/doc/development/hiai-Library/ddk-download-0000001053590180), -并将压缩包解压后的目录设置为环境变量`${HWHIAI_DDK}`。构建脚本将使用这个环境变量寻找DDK。 - -### 编译构建 - -在Linux环境下,使用MindSpore[源代码](https://gitee.com/mindspore/mindspore)根目录下的build.sh脚本可以构建集成NPU的MindSpore Lite包,命令如下, -它将在MindSpore源代码根目录下的output目录下构建出MindSpore Lite的包,其中包含NPU的动态库,libmindspore-lite动态库以及测试工具Benchmark。 - -```bash -bash build.sh -I arm64 -e npu -``` - -有关编译详情见[Linux环境编译](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#linux)。 - -### 集成使用 - -- 集成说明 - - 开发者需要集成使用NPU功能时,需要注意: - - 在代码中[配置NPU后端](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/runtime_cpp.html#npu), - 有关使用Runtime执行推理详情见[使用Runtime执行推理(C++)](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/runtime_cpp.html)。 - - 编译执行可执行程序。如采用动态加载方式,参考[编译输出](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#runtime)中编译选项为`-I arm64`或`-I arm32`时的内容, - 配置好环境变量,将会动态加载libhiai.so, libhiai_ir.so, libhiai_ir_build.so。例如: - - ```bash - export LD_LIBRARY_PATH=mindspore-lite-{version}-inference-android-{arch}/inference/third_party/hiai_ddk/lib/:$LD_LIBRARY_PATH - ``` - -- Benchmark测试NPU推理 - - 用户也可以使用MindSpore Lite的Benchmark工具测试NPU推理。 -编译出的Benchmark位置见[编译输出](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#runtime)。 -将构建包传到具有NPU芯片(支持的芯片详情见[芯片与HUAWEI HiAI Version版本映射关系](https://developer.huawei.com/consumer/cn/doc/development/hiai-Guides/mapping-relationship-0000001052830507#ZH-CN_TOPIC_0000001052830507__section94427279718)) -的Android手机的`/data/local/tmp/`目录下,在手机上使用Benchmark工具测试NPU推理,示例如下: - - - 测性能 - - ```bash - ./benchmark --device=NPU --modelFile=./models/test_benchmark.ms --timeProfiling=true - ``` - - - 测精度 - - ```bash - ./benchmark --device=NPU --modelFile=./models/test_benchmark.ms --inDataFile=./input/test_benchmark.bin --inputShapes=1,32,32,1 --accuracyThreshold=3 --benchmarkDataFile=./output/test_benchmark.out - ``` - -有关Benchmark使用详情,见[Benchmark使用](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/benchmark_tool.html)。 - -有关环境变量设置,需要根据[编译输出](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html#runtime)中编译选项为`-I arm64`或`-I arm32`时的目录结构, -将libmindspore-lite.so(目录为`mindspore-lite-{version}-inference-android-{arch}/inference/lib`)和 -NPU库(目录为`mindspore-lite-{version}-inference-android-{arch}/inference/third_party/hiai_ddk/lib/`)所在的目录加入`${LD_LIBRARY_PATH}`。 - -## 芯片支持 - -NPU芯片支持见[芯片与HUAWEI HiAI Version版本映射关系](https://developer.huawei.com/consumer/cn/doc/development/hiai-Guides/mapping-relationship-0000001052830507#ZH-CN_TOPIC_0000001052830507__section94427279718)。 - -## 算子支持 - -NPU算子支持见[Lite 算子支持](https://www.mindspore.cn/tutorial/lite/zh-CN/master/operator_list_lite.html)。 diff --git a/tutorials/lite/source_zh_cn/use/post_training_quantization.md b/tutorials/lite/source_zh_cn/use/post_training_quantization.md deleted file mode 100644 index 65947f1157fb60ae28aee1f9940dddf6a8ff738c..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/post_training_quantization.md +++ /dev/null @@ -1,141 +0,0 @@ -# 优化模型(训练后量化) - -`Windows` `Linux` `模型转换` `模型调优` `中级` `高级` - - - -- [优化模型(训练后量化)](#优化模型训练后量化) - - [概述](#概述) - - [权重量化](#权重量化) - - [参数说明](#参数说明) - - [使用步骤](#使用步骤) - - [部分模型精度结果](#部分模型精度结果) - - [全量化](#全量化) - - [参数说明](#参数说明-1) - - [使用步骤](#使用步骤-1) - - [部分模型精度结果](#部分模型精度结果-1) - - - - - -## 概述 - -对于已经训练好的`float32`模型,通过训练后量化将其转为`int8`,不仅能减小模型大小,而且能显著提高推理性能。在MindSpore Lite中,这部分功能集成在模型转换工具`conveter_lite`内,通过增加命令行参数,便能够转换得到量化后模型。 - -MindSpore Lite训练后量化分为两类: - -1. 权重量化:对模型的权值进行量化,仅压缩模型大小,推理时仍然执行`float32`推理; -2. 全量化:对模型的权值、激活值等统一进行量化,推理时执行`int`运算,能提升模型推理速度、降低功耗。 - -训练后量化在两种情况下所需的数据类型和参数设定不同,但均可通过转换工具设定。有关转换工具`converter_lite`的使用方法可参考[转换为MindSpore Lite模型](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html)。在此基础之上进行配置,启用训练后量化。 - -## 权重量化 - -支持1~16之间的任意比特量化,量化比特数越低,模型压缩率越大,但是精度损失通常也比较大。可以结合使用[Benchmark工具](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/benchmark_tool.html)进行精度评估,确定合适的量化比特数;通常平均相对误差(accuracyThreshold)满足4%以内,精度误差是比较小的。下面对权重量化的使用方式和效果进行阐述。 - -### 参数说明 - -权重量化转换命令的一般形式为: - -```bash -./converter_lite --fmk=ModelType --modelFile=ModelFilePath --outputFile=ConvertedModelPath --quantType=WeightQuant --bitNum=BitNumValue --quantWeightSize=ConvWeightQuantSizeThresholdValue --quantWeightChannel=ConvWeightQuantChannelThresholdValue -``` - -下面对此命令的量化相关参数进行说明: - -| 参数 | 属性 | 功能描述 | 参数类型 | 默认值 | 取值范围 | -| -------- | ------- | ----- | ----- |----- | ----- | -| `--quantType=` | 必选 | 设置为WeightQuant,启用权重量化 | String | - | 必须设置为WeightQuant | -| `--bitNum=` | 可选 | 设定权重量化的比特数,目前支持1bit~16bit量化 | Integer | 8 | \[1,16] | -| `--quantWeightSize=` | 可选 | 设定参与权重量化的卷积核尺寸阈值,若卷积核尺寸大于该值,则对此权重进行量化;建议设置为500 | Integer | 0 | \[0,+∞) | -| `--quantWeightChannel=` | 可选 | 设定参与权重量化的卷积通道数阈值,若卷积通道数大于该值,则对此权重进行量化;建议设置为16 | Integer | 16 | \[0,+∞) | - -用户可根据模型及自身需要对权重量化的参数作出调整。 -> 为保证权重量化的精度,建议`--bitNum`参数设定范围为8bit~16bit。 - -### 使用步骤 - -1. 正确编译出`converter_lite`可执行文件。该部分可参考构建文档[编译MindSpore Lite](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/build.html),获得`converter_lite`工具,并配置环境变量。 -2. 以TensorFlow Lite模型为例,执行权重量化模型转换命令: - - ```bash - ./converter_lite --fmk=TFLITE --modelFile=Inception_v3.tflite --outputFile=Inception_v3.tflite --quantType=WeightQuant --bitNum=8 --quantWeightSize=0 --quantWeightChannel=0 - ``` - -3. 上述命令执行成功后,便可得到量化后的模型`Inception_v3.tflite.ms`,量化后的模型大小通常会下降到FP32模型的1/4。 - -### 部分模型精度结果 - - | 模型 | 测试数据集 | FP32模型精度 | 权重量化精度(8bit) | - | -------- | ------- | ----- | ----- | - | [Inception_V3](https://storage.googleapis.com/download.tensorflow.org/models/tflite/model_zoo/upload_20180427/inception_v3_2018_04_27.tgz) | [ImageNet](http://image-net.org/) | 77.60% | 77.53% | - | [Mobilenet_V1_1.0_224](https://storage.googleapis.com/download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_1.0_224.tgz) | [ImageNet](http://image-net.org/) | 70.96% | 70.56% | - | [Mobilenet_V2_1.0_224](https://storage.googleapis.com/download.tensorflow.org/models/tflite_11_05_08/mobilenet_v2_1.0_224.tgz) | [ImageNet](http://image-net.org/) | 71.56% | 71.53% | - -> 以上所有结果均在x86环境上测得。 - -## 全量化 - -针对需要提升模型运行速度、降低模型运行功耗的场景,可以使用训练后全量化功能。下面对全量化的使用方式和效果进行阐述。 - -### 参数说明 - -全量化转换命令的一般形式为: - -```bash -./converter_lite --fmk=ModelType --modelFile=ModelFilePath --outputFile=ConvertedModelPath --quantType=PostTraining --bitNum=8 --configFile=config.cfg -``` - -下面对此命令的量化相关参数进行说明: - -| 参数 | 属性 | 功能描述 | 参数类型 | 默认值 | 取值范围 | -| -------- | ------- | ----- | ----- |----- | ----- | -| `--quantType=` | 必选 | 设置为PostTraining,启用全量化 | String | - | 必须设置为PostTraining | -| `--configFile=` | 必选 | 校准数据集配置文件路径 | String | - | - | -| `--bitNum=` | 可选 | 设定全量化的比特数,目前支持1bit~8bit量化 | Integer | 8 | \[1,8] | - -为了计算激活值的量化参数,用户需要提供校准数据集。校准数据集最好来自真实推理场景,能表征模型的实际输入情况,数量在100个左右。 -校准数据集配置文件采用`key=value`的方式定义相关参数,需要配置的`key`如下: - -| 参数名 | 属性 | 功能描述 | 参数类型 | 默认值 | 取值范围 | -| -------- | ------- | ----- | ----- | ----- | ----- | -| image_path | 必选 | 存放校准数据集的目录;如果模型有多个输入,请依次填写对应的数据所在目录,目录路径间请用`,`隔开 | String | - | 该目录存放可直接用于执行推理的输入数据。由于目前框架还不支持数据预处理,所有数据必须事先完成所需的转换,使得它们满足推理的输入要求 | -| batch_count | 可选 | 使用的输入数目 | Integer | 100 | (0,+∞) | -| method_x | 可选 | 网络层输入输出数据量化算法 | String | KL | KL、MAX_MIN、RemovalOutlier。
    KL:基于[KL散度](http://on-demand.gputechconf.com/gtc/2017/presentation/s7310-8-bit-inference-with-tensorrt.pdf)对数据范围作量化校准。
    MAX_MIN:基于最大值、最小值计算数据的量化参数。
    RemovalOutlier:按照一定比例剔除数据的极大极小值,再计算量化参数。
    在校准数据集与实际推理时的输入数据相吻合的情况下,推荐使用MAX_MIN;而在校准数据集噪声比较大的情况下,推荐使用KL或者RemovalOutlier | -| thread_num | 可选 | 使用校准数据集执行推理流程时的线程数 | Integer | 1 | (0,+∞) | -| bias_correction | 可选 | 是否对量化误差进行校正 | Boolean | false | true、flase。使能后,能提升转换后的模型精度,建议设置为true | - -> 对于多输入模型,要求不同输入数据分别存放在各自不同的目录,同时各自目录中的所有文件的文件名按照字典序递增排序后,能够一一对应。例如:模型有两个输入input0、input1,校准数据集共2组(batch_count=2);input0的对应数据存放在/dir/input0/目录,输入数据文件名为:data_1.bin、data_2.bin;input1的对应数据存放在/dir/input1/目录,输入数据文件名为:data_a.bin、data_b.bin,则认为(data_1.bin, data_a.bin)构成一组输入,(data_2.bin, data_b.bin)构成另一组输入。 - -### 使用步骤 - -1. 正确编译出`converter_lite`可执行文件。 -2. 准备校准数据集,假设存放在`/dir/images`目录,编写配置文件`config.cfg`,内容如下: - - ```python - image_path=/dir/images - batch_count=100 - method_x=MAX_MIN - thread_num=1 - bias_correction=true - ``` - - 校准数据集可以选择测试数据集的子集,要求`/dir/images`目录下存放的每个文件均是预处理好的输入数据,每个文件都可以直接用于推理的输入。 -3. 以MindSpore模型为例,执行全量化的模型转换命令: - - ```bash - ./converter_lite --fmk=MINDIR --modelFile=lenet.mindir --outputFile=lenet_quant --quantType=PostTraining --configFile=config.cfg - ``` - -4. 上述命令执行成功后,便可得到量化后的模型`lenet_quant.ms`,通常量化后的模型大小会下降到FP32模型的1/4。 - -### 部分模型精度结果 - - | 模型 | 测试数据集 | method_x | FP32模型精度 | 全量化精度(8bit) | 说明 | - | -------- | ------- | ----- | ----- | ----- | ----- | - | [Inception_V3](https://storage.googleapis.com/download.tensorflow.org/models/tflite/model_zoo/upload_20180427/inception_v3_2018_04_27.tgz) | [ImageNet](http://image-net.org/) | KL | 77.60% | 77.40% | 校准数据集随机选择ImageNet Validation数据集中的100张 | - | [Mobilenet_V1_1.0_224](https://storage.googleapis.com/download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_1.0_224.tgz) | [ImageNet](http://image-net.org/) | KL | 70.96% | 70.31% | 校准数据集随机选择ImageNet Validation数据集中的100张 | - | [Mobilenet_V2_1.0_224](https://storage.googleapis.com/download.tensorflow.org/models/tflite_11_05_08/mobilenet_v2_1.0_224.tgz) | [ImageNet](http://image-net.org/) | MAX_MIN | 71.56% | 71.16% | 校准数据集随机选择ImageNet Validation数据集中的100张 | - -> 以上所有结果均在x86环境上测得,均设置`bias_correction=true`。 diff --git a/tutorials/lite/source_zh_cn/use/runtime.rst b/tutorials/lite/source_zh_cn/use/runtime.rst deleted file mode 100644 index f221f4421703d63341b064fd72d773bbe0a80cb1..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/runtime.rst +++ /dev/null @@ -1,8 +0,0 @@ -执行推理 -======================== - -.. toctree:: - :maxdepth: 1 - - runtime_cpp - runtime_java \ No newline at end of file diff --git a/tutorials/lite/source_zh_cn/use/runtime_cpp.md b/tutorials/lite/source_zh_cn/use/runtime_cpp.md deleted file mode 100644 index 4a25857987939155348a2b71647665e41c4a9b42..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/runtime_cpp.md +++ /dev/null @@ -1,592 +0,0 @@ -# 使用C++接口执行推理 - -`Windows` `Linux` `Android` `C++` `推理应用` `模型加载` `数据准备` `中级` `高级` - - - -- [使用C++接口执行推理](#使用c接口执行推理) - - [概述](#概述) - - [模型加载](#模型加载) - - [创建配置上下文](#创建配置上下文) - - [配置线程数](#配置线程数) - - [配置使用CPU后端](#配置使用cpu后端) - - [配置使用GPU后端](#配置使用gpu后端) - - [配置使用NPU后端](#配置使用npu后端) - - [创建会话](#创建会话) - - [图编译](#图编译) - - [输入数据](#输入数据) - - [执行推理](#执行推理) - - [获取输出](#获取输出) - - [内存释放](#内存释放) - - [高级用法](#高级用法) - - [优化运行内存大小](#优化运行内存大小) - - [绑核操作](#绑核操作) - - [输入维度Resize](#输入维度resize) - - [Session并行](#session并行) - - [共享内存池](#共享内存池) - - [回调运行](#回调运行) - - [CreateSession简化版接口调用流程](#createsession简化版接口调用流程) - - [查看日志](#查看日志) - - [获取版本号](#获取版本号) - - - - - -## 概述 - -通过[MindSpore Lite模型转换工具](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html)转换成`.ms`模型后,即可在Runtime中执行模型的推理流程。本教程介绍如何使用[C++接口](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/index.html)执行推理。 - -使用MindSpore Lite推理框架主要包括以下步骤: - -1. 模型加载:从文件系统中读取由[模型转换工具](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html)转换得到的`.ms`模型,通过[mindspore::lite::Model::Import](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#import)导入模型,进行模型解析,创建得到 `Model *`。 -2. 创建配置上下文:创建配置上下文[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#context),保存会话所需的一些基本配置参数,用于指导图编译和图执行。 -3. 创建会话:创建[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)会话,并将上一步得到的[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#context)配置到会话中。 -4. 图编译:执行推理之前,需要调用[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)的`CompileGraph`接口进行图编译。图编译阶段主要进行子图切分、算子选型调度等过程,该阶段会耗费较多时间,所以建议[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)创建一次,编译一次,多次推理。 -5. 输入数据:图执行之前需要向`输入Tensor`中填充数据。 -6. 执行推理:使用[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)的`RunGraph`进行模型推理。 -7. 获得输出:图执行结束之后,可以通过`输出Tensor`得到推理结果。 -8. 释放内存:无需使用MindSpore Lite推理框架时,需要释放已创建的[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)和[Model](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#model)。 - -![img](../images/lite_runtime.png) - -> 快速了解MindSpore Lite执行推理的完整调用流程,请参考[体验MindSpore Lite C++极简Demo](https://www.mindspore.cn/tutorial/lite/zh-CN/master/quick_start/quick_start_cpp.html)。 - -## 模型加载 - -通过MindSpore Lite进行模型推理时,需要从文件系统读取[模型转换工具](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html)转换得到的`.ms`模型文件,并通过[mindspore::lite::Model::Import](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#import)静态函数从内存数据中创建,`Model`将持有权重数据、算子属性等模型数据。 - -`mindspore::lite::Model::Import`函数返回的[Model](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#model)实例是一个通过`new`创建的指针,不再需要时,用户需要通过`delete`释放。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L325)演示如何从文件系统读取MindSpore Lite模型,并通过`mindspore::lite::Model::Import`进行模型解析的功能: - -```cpp -// Read model file. -size_t size = 0; -char *model_buf = ReadFile(model_path, &size); -if (model_buf == nullptr) { - std::cerr << "Read model file failed." << std::endl; -} -// Load the .ms model. -auto model = mindspore::lite::Model::Import(model_buf, size); -delete[](model_buf); -if (model == nullptr) { - std::cerr << "Import model file failed." << std::endl; -} -``` - -## 创建配置上下文 - -上下文会保存会话所需的一些基本配置参数,用于指导图编译和图执行,如果用户通过`new`创建[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#id2),不再需要时,需要用户通过`delete`释放。一般在创建完[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)后,[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#id2)即可释放。其中[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#id2)所包含的参数定义如下: - -- [thread_num_](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#thread-num):MindSpore Lite内置一个进程共享的线程池,推理时通过`thread_num_`指定线程池的最大线程数,默认为2线程。 -- [allocator](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#allocator):MindSpore Lite支持动态内存分配和释放,如果没有指定`allocator`,推理时会生成一个默认的`allocator`,也可以通过[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#context)方法在多个[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#id2)中共享内存分配器,具体调用方式可参考[共享内存池](#共享内存池)的使用方式。 - -- [device_list_](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#device-list):MindSpore Lite支持异构推理,推理时的后端配置信息由[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#id2)中的`device_list_`指定,默认存放CPU的[DeviceContext](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#devicecontext)。在进行图编译时,会根据`device_list_`中不同的后端配置信息进行算子选型调度。目前仅支持两种异构,CPU和GPU异构或者CPU和NPU异构。当配置GPU的[DeviceContext](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#devicecontext)时,优先使用GPU推理;当配置NPU的[DeviceContext](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#devicecontext)时,优先使用NPU推理。 - -> `device_list_[0]`必须是CPU的`DeviceContext`, `device_list_[1]`是GPU的`DeviceContext`或者NPU的`DeviceContext`。暂时不支持同时设置CPU, GPU和NPU三个`DeviceContext`。 - -### 配置线程数 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L109)演示如何配置线程数的方式: - -```cpp -auto context = std::make_shared(); -if (context == nullptr) { - std::cerr << "New context failed while running." << std::endl; -} -// Configure the number of worker threads in the thread pool to 2, including the main thread. -context->thread_num_ = 2; -``` - -### 配置使用CPU后端 - -当需要执行的后端为CPU时,`Context`创建后`device_list_[0]`默认是CPU的`DeviceContext`,可直接配置[CpuDeviceInfo](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#cpudeviceinfo)中`enable_float16_`以及`cpu_bind_mode_`等属性。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L102)演示如何创建CPU后端,同时设定CPU绑核模式为大核优先并且使能Float16推理: - -```cpp -auto context = std::make_shared(); -if (context == nullptr) { - std::cerr << "New context failed while running." << std::endl; -} -// CPU device context has default values. -auto &cpu_device_info = context->device_list_[0].device_info_.cpu_device_info_; -// The large core takes priority in thread and core binding methods. This parameter will work in the BindThread interface. For specific binding effect, see the "Run Graph" section. -cpu_device_info.cpu_bind_mode_ = HIGHER_CPU; -// Use float16 operator as priority. -cpu_device_info.enable_float16_ = true; -``` - -> Float16需要CPU为ARM v8.2架构的机型才能生效,其他不支持的机型和x86平台会自动回退到Float32执行。 - -### 配置使用GPU后端 - -当需要执行的后端为CPU和GPU的异构推理时,需要同时设置CPU和GPU的[DeviceContext](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#devicecontext),配置后将会优先使用GPU推理。其中[GpuDeviceInfo](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#gpudeviceinfo)包含`enable_float16_`公有属性用于使能Float16推理。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L120)演示如何创建CPU与GPU异构推理后端,同时GPU也设定使能Float16推理: - -```cpp -auto context = std::make_shared(); -if (context == nullptr) { - std::cerr << "CreateSession failed while running." << std::endl; -} -// If GPU device context is set. The preferred backend is GPU, which means, if there is a GPU operator, it will run on the GPU first, otherwise it will run on the CPU. -DeviceContext gpu_device_ctx{DT_GPU, {false}}; -// GPU use float16 operator as priority. -gpu_device_ctx.device_info_.gpu_device_info_.enable_float16_ = true; -// The GPU device context needs to be push_back into device_list to work. -context->device_list_.push_back(gpu_device_ctx); -``` - -> 目前GPU的后端是基于OpenCL,支持Mali、Adreno的GPU,OpenCL版本为2.0。 -> -> 具体配置为: -> -> CL_TARGET_OPENCL_VERSION=200 -> -> CL_HPP_TARGET_OPENCL_VERSION=120 -> -> CL_HPP_MINIMUM_OPENCL_VERSION=120 - -### 配置使用NPU后端 - -当需要执行的后端为CPU和NPU的异构推理时,需要同时设置CPU和NPU的[DeviceContext](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#devicecontext),配置后将会优先使用NPU推理,其中[NpuDeviceInfo](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#npudeviceinfo)包含`frequency_`公有属性用来设置NPU频率。 - -下面示例[代码演示](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L137)如何创建CPU与NPU异构推理后端,同时NPU频率设置为3,频率值默认为3,可设置为1(低功耗)、2(均衡)、3(高性能)、4(极致性能),设置其他值将被改为3: - -```cpp -auto context = std::make_shared(); -if (context == nullptr) { - std::cerr << "CreateSession failed while running." << std::endl; -} -DeviceContext npu_device_ctx{DT_NPU}; -npu_device_ctx.device_info_.npu_device_info_.frequency_ = 3; -// The NPU device context needs to be push_back into device_list to work. -context->device_list_.push_back(npu_device_ctx); -``` - -## 创建会话 - -使用MindSpore Lite执行推理时,[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)是推理的主入口,通过[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)可以进行图编译、图执行。采用上一步创建得到的[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#id2),调用[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)的静态[static LiteSession *CreateSession(const lite::Context *context)](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#createsession)方法来创建[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L275)演示如何创建`LiteSession`的方式: - -```cpp -// Use Context to create Session. -auto session = session::LiteSession::CreateSession(context.get()); -// After the LiteSession is created, the Context can be released. -... -if (session == nullptr) { - std::cerr << "CreateSession failed while running." << std::endl; -} -``` - -> 函数返回的[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)实例是一个指针,通过`new`创建,不再需要时,需要用户通过`delete`释放。 -> -> 创建完[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)后,上一步创建得到的[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#context)即可释放。 - -## 图编译 - -在图执行前,需要调用[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)的[CompileGraph](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#compilegraph)接口进行图编译,进一步解析从文件中加载的[Model](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#model)实例,主要进行子图切分、算子选型调度。这部分会耗费较多时间,所以建议[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)创建一次,编译一次,多次执行。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L282)演示调用`CompileGraph`进行图编译。 - -```cpp -// Assume we have created a LiteSession instance named session and a Model instance named model before. -auto ret = session->CompileGraph(model); -if (ret != RET_OK) { - std::cerr << "Compile failed while running." << std::endl; - // session and model need to be released by users manually. - ... -} -``` - -## 输入数据 - -在图执行前,需要获取到模型的输入[MSTensor](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#mstensor),将输入数据通过`memcpy`拷贝到模型的输入[MSTensor](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#mstensor)。同时,可以通过[MSTensor](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#mstensor)的[Size](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#size)方法来获取Tensor应该填入的数据大小,通过[data_type](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#data-type)方法来获取Tensor的数据类型,通过[MSTensor](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#mstensor)的[MutableData](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#mutabledata)方法来获取可写的指针。 - -MindSpore Lite提供两种方法来获取模型的输入Tensor。 - -1. 使用[GetInputsByTensorName](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getinputsbytensorname)方法,根据模型输入Tensor的名称来获取模型输入Tensor中连接到输入节点的Tensor,下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L169)演示如何调用`GetInputsByTensorName`获得输入Tensor并填充数据。 - - ```cpp - // Pre-processing of input data, convert input data format to NHWC. - ... - // Assume that the model has only one input tensor named 2031_2030_1_construct_wrapper:x. - auto in_tensor = session->GetInputsByTensorName("2031_2030_1_construct_wrapper:x"); - if (in_tensor == nullptr) { - std::cerr << "Input tensor is nullptr" << std::endl; - } - auto input_data = in_tensor->MutableData(); - if (input_data == nullptr) { - std::cerr << "MallocData for inTensor failed." << std::endl; - } - memcpy(in_data, input_buf, data_size); - // Users need to free input_buf. - ``` - -2. 使用[GetInputs](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getinputs)方法,直接获取所有的模型输入Tensor的vector,下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L150)演示如何调用`GetInputs`获得输入Tensor并填充数据。 - - ```cpp - // Pre-processing of input data, convert input data format to NHWC. - ... - // Assume we have created a LiteSession instance named session. - auto inputs = session->GetInputs(); - // Assume that the model has only one input tensor. - auto in_tensor = inputs.front(); - if (in_tensor == nullptr) { - std::cerr << "Input tensor is nullptr" << std::endl; - } - auto *in_data = in_tensor->MutableData(); - if (in_data == nullptr) { - std::cerr << "Data of in_tensor is nullptr" << std::endl; - } - memcpy(in_data, input_buf, data_size); - // Users need to free input_buf. - ``` - -> MindSpore Lite的模型输入Tensor中的数据排布必须是`NHWC`。如果需要了解更多数据前处理过程,可参考基于JNI接口的Android应用开发中[编写端侧推理代码](https://www.mindspore.cn/tutorial/lite/zh-CN/master/quick_start/quick_start.html#id10)的第2步,将输入图片转换为传入MindSpore模型的Tensor格式。 -> -> [GetInputs](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getinputs)和[GetInputsByTensorName](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getinputsbyname)方法返回的vector不需要用户释放。 - -## 执行推理 - -MindSpore Lite会话在进行图编译以后,即可调用[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)的[RunGraph](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#rungraph)进行模型推理。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L347)演示调用`RunGraph`执行推理。 - -```cpp -auto ret = session->RunGraph(); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "RunGraph failed" << std::endl; -} -``` - -## 获取输出 - -MindSpore Lite在执行完推理后,就可以获取模型的推理结果。MindSpore Lite提供三种方法来获取模型的输出[MSTensor](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#mstensor)。 - -1. 使用[GetOutputsByNodeName](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getoutputsbynodename)方法,根据模型输出节点的名称来获取模型输出[MSTensor](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#mstensor)中连接到该节点的Tensor的vector,下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L184)演示如何调用`GetOutputsByNodeName`获得输出Tensor。 - - ```cpp - // Assume we have created a LiteSession instance named session before. - // Assume that model has a output node named Default/head-MobileNetV2Head/Softmax-op204. - auto output_vec = session->GetOutputsByNodeName("Default/head-MobileNetV2Head/Softmax-op204"); - // Assume that output node named Default/Sigmoid-op204 has only one output tensor. - auto out_tensor = output_vec.front(); - if (out_tensor == nullptr) { - std::cerr << "Output tensor is nullptr" << std::endl; - } - // Post-processing your result data. - ``` - -2. 使用[GetOutputByTensorName](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getoutputbytensorname)方法,根据模型输出Tensor的名称来获取对应的模型输出[MSTensor](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#mstensor),下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L212)演示如何调用`GetOutputsByTensorName`获得输出Tensor。 - - ```cpp - // Assume we have created a LiteSession instance named session. - // We can use GetOutputTensorNames method to get all name of output tensor of model which is in order. - auto tensor_names = session->GetOutputTensorNames(); - // Assume we have created a LiteSession instance named session before. - // Use output tensor name returned by GetOutputTensorNames as key - for (auto tensor_name : tensor_names) { - auto out_tensor = session->GetOutputByTensorName(tensor_name); - if (out_tensor == nullptr) { - std::cerr << "Output tensor is nullptr" << std::endl; - } - // Post-processing the result data. - } - ``` - -3. 使用[GetOutputs](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getoutputs)方法,直接获取所有的模型输出[MSTensor](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#mstensor)的名称和[MSTensor](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/tensor.html#mstensor)指针的一个map,下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L242)演示如何调用`GetOutputs`获得输出Tensor。 - - ```cpp - // Assume we have created a LiteSession instance named session. - auto out_tensors = session->GetOutputs(); - for (auto out_tensor : out_tensors) { - // Post-processing the result data. - } - ``` - -> [GetOutputsByNodeName](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getoutputsbynodename)、[GetOutputByTensorName](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getoutputbytensorname)和[GetOutputs](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getoutputs)方法返回的vector或map不需要用户释放。 - -## 内存释放 - -无需使用MindSpore Lite推理框架时,需要释放已经创建的LiteSession和Model,下列[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L361)演示如何在程序结束前进行内存释放。 - -```cpp -// Delete model buffer. -// Assume that the variable of Model * is named model. -delete model; -// Delete session buffer. -// Assume that the variable of Session * is named session. -delete session; -``` - -## 高级用法 - -### 优化运行内存大小 - -如果对运行时内存有较大的限制,可以在[Model](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#model)被图编译[CompileGraph](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#compilegraph)以后,调用[Free](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#free)接口来降低内存占用。一旦调用了某个[Model](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#model)的[Free](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#free)接口,该[Model](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#model)就不能再进行图编译了。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L340)演示如何调用`Model`的`Free`接口来释放`MetaGraph`减少运行时内存大小。 - -```cpp -// Compile graph. -auto ret = session->CompileGraph(model); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "Compile failed while running." << std::endl; -} - -// Note: when use model->Free(), the model can not be compiled again. -model->Free(); -``` - -### 绑核操作 - -MindSpore Lite内置线程池支持绑核、解绑操作,通过调用[BindThread](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#bindthread)接口,可以将线程池中的工作线程绑定到指定CPU核,用于性能分析。绑核操作与创建[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)时用户指定的上下文有关,绑核操作会根据上下文中的绑核策略进行线程与CPU的亲和性设置。 - -需要注意的是,绑核是一个亲和性操作,不保证一定能绑定到指定的CPU核,会受到系统调度的影响。而且绑核后,需要在执行完代码后进行解绑操作。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L346)演示如何在执行推理时绑定大核优先。 - -```cpp -auto context = std::make_shared(); -if (context == nullptr) { - std::cerr << "New context failed while running." << std::endl; -} -// CPU device context has default values. -auto &cpu_device_info = context->device_list_[0].device_info_.cpu_device_info_; -// The large core takes priority in thread and core binding methods. This parameter will work in the BindThread -// interface. For specific binding effect, see the "Run Graph" section. -cpu_device_info.cpu_bind_mode_ = mindspore::lite::HIGHER_CPU; - -... - -// Assume we have created a LiteSession instance named session. -session->BindThread(true); -auto ret = session->RunGraph(); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "RunGraph failed" << std::endl; -} -session->BindThread(false); -``` - -> 绑核参数有三种选择:大核优先、中核优先以及不绑核。 -> -> 判定大核和中核的规则其实是根据CPU核的频率进行区分。 -> -> 绑定大核优先是指线程池中的线程从频率最高的核开始绑定,第一个线程绑定在频率最高的核上,第二个线程绑定在频率第二高的核上,以此类推。 -> -> 对于中核优先,中核的定义是根据经验来定义的,默认设定中核是第三和第四高频率的核,当绑定策略为中核优先时,会优先绑定到中核上,当中核不够用时,会往大核上进行绑定。 - -### 输入维度Resize - -使用MindSpore Lite进行推理时,如果需要对输入的shape进行Resize,则可以在已完成创建会话`CreateSession`与图编译`CompileGraph`之后调用[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)的[Resize](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#resize)接口,对输入的tensor重新设置shape。 - -> 某些网络是不支持可变维度,会提示错误信息后异常退出,比如,模型中有MatMul算子,并且MatMul的一个输入Tensor是权重,另一个输入Tensor是输入时,调用可变维度接口会导致输入Tensor和权重Tensor的Shape不匹配,最终导致推理失败。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L368)演示如何对MindSpore Lite的输入Tensor进行[Resize](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#resize): - -```cpp -// Assume we have created a LiteSession instance named session. -// Compile graph. -auto ret = session->CompileGraph(model); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "Compile failed while running." << std::endl; -} -... -auto inputs = session->GetInputs(); -std::vector resize_shape = {1, 128, 128, 3}; -// Assume the model has only one input,resize input shape to [1, 128, 128, 3] -std::vector> new_shapes; -new_shapes.push_back(resize_shape); -session->Resize(inputs, new_shapes); -``` - -### Session并行 - -MindSpore Lite支持多个[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)并行推理,每个[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)的线程池和内存池都是独立的。但不支持多个线程同时调用单个[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)的[RunGraph](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#rungraph)接口。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L463)演示如何并行执行推理多个[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)的过程: - -```cpp -int RunSessionParallel(const char *model_path) { - size_t size = 0; - char *model_buf = ReadFile(model_path, &size); - if (model_buf == nullptr) { - std::cerr << "Read model file failed." << std::endl; - return -1; - } - // Load the .ms model. - auto model = mindspore::lite::Model::Import(model_buf, size); - delete[](model_buf); - if (model == nullptr) { - std::cerr << "Import model file failed." << std::endl; - return -1; - } - // Compile MindSpore Lite model. - auto session1 = CreateSessionAndCompileByModel(model); - if (session1 == nullptr) { - std::cerr << "Create session failed." << std::endl; - return -1; - } - - // Compile MindSpore Lite model. - auto session2 = CreateSessionAndCompileByModel(model); - if (session2 == nullptr) { - std::cerr << "Create session failed." << std::endl; - return -1; - } - // Note: when use model->Free(), the model can not be compiled again. - model->Free(); - - std::thread thread1([&]() { - GetInputsByTensorNameAndSetData(session1); - auto status = session1->RunGraph(); - if (status != 0) { - std::cerr << "Inference error " << status << std::endl; - return; - } - std::cout << "Session1 inference success" << std::endl; - }); - - std::thread thread2([&]() { - GetInputsByTensorNameAndSetData(session2); - auto status = session2->RunGraph(); - if (status != 0) { - std::cerr << "Inference error " << status << std::endl; - return; - } - std::cout << "Session2 inference success" << std::endl; - }); - - thread1.join(); - thread2.join(); - - // Get outputs data. - GetOutputsByNodeName(session1); - GetOutputsByNodeName(session2); - - // Delete model buffer. - delete model; - // Delete session buffer. - delete session1; - delete session2; - return 0; -} -``` - -MindSpore Lite不支持多线程并行执行单个[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)的推理,否则会得到以下错误信息: - -```cpp -ERROR [mindspore/lite/src/lite_session.cc:297] RunGraph] 10 Not support multi-threading -``` - -### 共享内存池 - -如果存在多个Session的情况,可以通过在[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#id2)中配置同一个`allocator`,实现共享内存池来减少运行时内存大小。其中,内存池的内存总大小限制为`3G`,单次分配的内存限制为`2G`。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L535)演示如何在两个[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)间共享内存池的功能: - -```cpp -auto context1 = std::make_shared(); -if (context1 == nullptr) { - std::cerr << "New context failed while running." << std::endl; -} -auto session1 = mindspore::session::LiteSession::CreateSession(context1.get()); -if (session1 == nullptr) { - std::cerr << "CreateSession failed while running." << std::endl; -} -auto ret = session1->CompileGraph(model); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "Compile failed while running." << std::endl; -} -auto context2 = std::make_shared(); -if (context2 == nullptr) { - std::cerr << "New context failed while running." << std::endl; -} - -// Use the same allocator to share the memory pool. -context2->allocator = context1->allocator; - -auto session2 = mindspore::session::LiteSession::CreateSession(context2.get()); -if (session2 == nullptr) { - std::cerr << "CreateSession failed while running " << std::endl; -} -ret = session2->CompileGraph(model); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "Compile failed while running " << std::endl; -} -``` - -### 回调运行 - -MindSpore Lite可以在调用[RunGraph](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#rungraph)时,传入两个[KernelCallBack](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/mindspore.html#kernelcallback)函数指针来回调推理模型,相比于一般的图执行,回调运行可以在运行过程中获取额外的信息,帮助开发者进行性能分析、Bug调试等。额外的信息包括: - -- 当前运行的节点名称 -- 推理当前节点前的输入输出Tensor -- 推理当前节点后的输入输出Tensor - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L633)演示如何定义了两个回调函数作为前置回调指针和后置回调指针,传入到[RunGraph](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#rungraph)接口进行回调推理。 - -```cpp -// Definition of callback function before forwarding operator. -auto before_call_back = [&](const std::vector &before_inputs, - const std::vector &before_outputs, - const mindspore::CallBackParam &call_param) { - std::cout << "Before forwarding " << call_param.node_name << " " << call_param.node_type << std::endl; - return true; -}; -// Definition of callback function after forwarding operator. -auto after_call_back = [&](const std::vector &after_inputs, - const std::vector &after_outputs, - const mindspore::CallBackParam &call_param) { - std::cout << "After forwarding " << call_param.node_name << " " << call_param.node_type << std::endl; - return true; -}; - -auto ret = session->RunGraph(before_call_back, after_call_back); -if (ret != mindspore::lite::RET_OK) { - std::cerr << "Inference error " << ret << std::endl; -} -``` - -### CreateSession简化版接口调用流程 - -CreateSession简化版接口[static LiteSession *CreateSession(const char *model_buf, size_t size, const lite::Context *context)](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#createsession)是基于[Context](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#id2)以及读入的模型buffer和buffer的size来创建[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)。使用该接口来创建会话会在内部进行模型加载和图编译,无需再次调用[Import](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#import)接口和[CompileGraph](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#compilegraph)接口。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L425)演示如何调用CreateSession简化版接口创建[LiteSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#litesession)的流程: - -```cpp -auto context = std::make_shared(); -if (context == nullptr) { - std::cerr << "New context failed while running" << std::endl; -} - -// Use model buffer and context to create Session. -auto session = mindspore::session::LiteSession::CreateSession(model_buf, size, context); - -if (session == nullptr) { - std::cerr << "CreateSession failed while running" << std::endl; -} -``` - -### 查看日志 - -当推理出现异常的时候,可以通过查看日志信息来定位问题。针对Android平台,采用`Logcat`命令行工具查看MindSpore Lite推理的日志信息,并利用`MS_LITE` 进行筛选。 - -```shell -logcat -s "MS_LITE" -``` - -### 获取版本号 - -MindSpore Lite提供了[Version](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#version)方法可以获取版本号,包含在`include/version.h`头文件中,调用该方法可以得到当前MindSpore Lite的版本号。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_cpp/main.cc#L712)演示如何获取MindSpore Lite的版本号: - -```cpp -#include "include/version.h" -std::string version = mindspore::lite::Version(); -``` diff --git a/tutorials/lite/source_zh_cn/use/runtime_java.md b/tutorials/lite/source_zh_cn/use/runtime_java.md deleted file mode 100644 index 2d21d56768d6b1abca9e4167da535c0fc4897604..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/runtime_java.md +++ /dev/null @@ -1,370 +0,0 @@ -# 使用Java接口执行推理 - -`Android` `Java` `推理应用` `模型加载` `数据准备` `中级` `高级` - - - -- [使用Java接口执行推理](#使用java接口执行推理) - - [概述](#概述) - - [引用MindSpore Lite Java库](#引用mindspore-lite-java库) - - [Linux X86项目引用JAR库](#linux-x86项目引用jar库) - - [Android项目引用AAR库](#android项目引用aar库) - - [加载模型](#加载模型) - - [创建配置上下文](#创建配置上下文) - - [配置使用CPU后端](#配置使用cpu后端) - - [配置使用GPU后端](#配置使用gpu后端) - - [创建会话](#创建会话) - - [图编译](#图编译) - - [输入数据](#输入数据) - - [执行推理](#执行推理) - - [获得输出](#获得输出) - - [释放内存](#释放内存) - - [高级用法](#高级用法) - - [优化运行内存大小](#优化运行内存大小) - - [绑核操作](#绑核操作) - - [输入维度Resize](#输入维度resize) - - [Session并行](#session并行) - - [查看日志](#查看日志) - - [获取版本号](#获取版本号) - - - - - -## 概述 - -通过[MindSpore Lite模型转换工具](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html)转换成`.ms`模型后,即可在Runtime中执行模型的推理流程。本教程介绍如何使用[JAVA接口](https://www.mindspore.cn/doc/api_java/zh-CN/master/index.html)执行推理。 - -Android项目中使用MindSpore Lite,可以选择采用[C++ API](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/index.html)或者[Java API](https://www.mindspore.cn/doc/api_java/zh-CN/master/index.html)运行推理框架。Java API与C++ API相比较而言,Java API可以直接在Java Class中调用,用户无需实现JNI层的相关代码,具有更好的便捷性。运行MindSpore Lite推理框架主要包括以下步骤: - -1. 模型加载:从文件系统中读取由[模型转换工具](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html)转换得到的`.ms`模型,通过Model的[loadModel](https://www.mindspore.cn/doc/api_java/zh-CN/master/model.html#loadmodel)导入模型。 -2. 创建配置上下文:创建配置上下文[MSConfig](https://www.mindspore.cn/doc/api_java/zh-CN/master/msconfig.html#msconfig),保存会话所需的一些基本配置参数,用于指导图编译和图执行。主要包括`deviceType`:设备类型、`threadNum`:线程数、`cpuBindMode`:CPU绑定模式、`enable_float16`:是否优先使用Float16算子。 -3. 创建会话:创建[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession),并调用[init](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#init)方法将上一步得到的[MSConfig](https://www.mindspore.cn/doc/api_java/zh-CN/master/msconfig.html#msconfig)配置到会话中。 -4. 图编译:在图执行前,需要调用[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)的[compileGraph](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#compilegraph)接口进行图编译,主要进行子图切分、算子选型调度。这部分会耗费较多时间,所以建议[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)创建一次,编译一次,多次执行。 -5. 输入数据:图执行之前需要向输入Tensor中填充数据。 -6. 执行推理:使用[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)的[runGraph](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#rungraph)进行模型推理。 -7. 获得输出:图执行结束之后,可以通过输出Tensor得到推理结果。 -8. 释放内存:无需使用MindSpore Lite推理框架的时候,需要释放已创建的[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)和[model](https://www.mindspore.cn/doc/api_java/zh-CN/master/model.html#model)。 - -![img](../images/lite_runtime.png) - -快速了解MindSpore Lite执行推理的完整调用流程,请参考[体验Java极简推理Demo](https://www.mindspore.cn/tutorial/lite/zh-CN/master/quick_start/quick_start_java.html)。 - -## 引用MindSpore Lite Java库 - -### Linux X86项目引用JAR库 - -采用`Maven`作为构建工具时,可将`mindspore-lite-java.jar`拷贝到根目录下的`lib`目录,并在`pom.xml`中增加jar包的依赖。 - -```xml - - - com.mindspore.lite - mindspore-lite-java - 1.0 - system - ${project.basedir}/lib/mindspore-lite-java.jar - - -``` - -> 运行时需要将`libmindspore-lite.so`以及`libminspore-lite-jni.so`的所在路径添加到`java.library.path`。 - -### Android项目引用AAR库 - -采用`Gradle`作为构建工具时,首先将`mindspore-lite-{version}.aar`文件移动到目标module的`libs`目录,然后在目标module的`build.gradle`的`repositories`中添加本地引用目录,最后在`dependencies`中添加AAR的依赖,具体如下所示。 - -> 注意mindspore-lite-{version}是AAR的文件名,需要将{version}替换成对应版本信息。 - -```groovy -repositories { - flatDir { - dirs 'libs' - } -} - -dependencies { - implementation fileTree(dir: "libs", include: ['*.aar']) -} -``` - -## 加载模型 - -MindSpore Lite进行模型推理时,需要先从文件系统中加载模型转换工具转换后的`.ms`模型,并进行模型解析。Java的[model](https://www.mindspore.cn/doc/api_java/zh-CN/master/model.html#model)类提供了2个[loadModel](https://www.mindspore.cn/doc/api_java/zh-CN/master/model.html#loadmodel)接口,使其可以从`Assets`或其他文件路径中加载模型。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L217)将从`Assets`读取`mobilenetv2.ms`模型文件进行模型加载。 - -```java -// Load the .ms model. -Model model = new Model(); -String modelPath = "mobilenetv2.ms"; -boolean ret = model.loadModel(this.getApplicationContext(), modelPath); -``` - ->只有`AAR`库才支持从`Assert`加载模型文件的接口。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/quick_start_java/src/main/java/com/mindspore/lite/demo/Main.java#L128)将从`modelPath`路径读取模型文件进行模型加载。 - -```java -Model model = new Model(); -boolean ret = model.loadModel(modelPath); -``` - -## 创建配置上下文 - -创建配置上下文[MSConfig](https://www.mindspore.cn/doc/api_java/zh-CN/master/msconfig.html#msconfig),保存会话所需的一些基本配置参数,用于指导图编译和图执行。 - -MindSpore Lite支持异构推理,推理时的主选后端由[MSConfig](https://www.mindspore.cn/doc/api_java/zh-CN/master/msconfig.html#msconfig)的`deviceType`指定,目前支持CPU和GPU。在进行图编译时,会根据主选后端进行算子选型调度。 - -MindSpore Lite内置一个进程共享的线程池,推理时通过`threadNum`指定线程池的最大线程数,默认为2线程。 - -MindSpore Lite支持Float16算子的模式进行推理。`enable_float16`设置为`true`后,将会优先使用Float16算子。 - -### 配置使用CPU后端 - -当需要执行的后端为CPU时,`MSConfig`创建后需要在[init](https://www.mindspore.cn/doc/api_java/zh-CN/master/msconfig.html#init)中配置`DeviceType.DT_CPU`,同时CPU支持设置绑核模式以及是否优先使用Float16算子。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L59)演示如何创建CPU后端,同时设定CPU绑核模式为大核优先并且使能Float16推理: - -```java -MSConfig msConfig = new MSConfig(); -boolean ret = msConfig.init(DeviceType.DT_CPU, 2, CpuBindMode.HIGHER_CPU, true); -``` - -> Float16需要CPU为ARM v8.2架构的机型才能生效,其他不支持的机型和x86平台会自动回退到Float32执行。 - -### 配置使用GPU后端 - -当需要执行的后端为CPU和GPU的异构推理时,`MSConfig`创建后需要在[init](https://www.mindspore.cn/doc/api_java/zh-CN/master/msconfig.html#init)中配置`DeviceType.DT_GPU`,配置后将会优先使用GPU推理。同时是否优先使用Float16算子设置为true后,GPU和CPU都会优先使用Float16算子。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L69)演示如何创建CPU与GPU异构推理后端,同时GPU也设定使能Float16推理: - -```java -MSConfig msConfig = new MSConfig(); -boolean ret = msConfig.init(DeviceType.DT_GPU, 2, CpuBindMode.MID_CPU, true); -``` - -> 目前GPU只能在Android手机端侧运行,所以只有`AAR`库才能支持运行。 - -## 创建会话 - -[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)是推理的主入口,通过[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)可以进行图编译、图执行。创建[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession),并调用[init](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#init)方法将上一步得到[MSConfig](https://www.mindspore.cn/doc/api_java/zh-CN/master/msconfig.html#msconfig)配置到会话中。[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)初始化之后,[MSConfig](https://www.mindspore.cn/doc/api_java/zh-CN/master/msconfig.html#msconfig)将可以进行释放操作。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L86)演示如何创建`LiteSession`的方式: - -```java -LiteSession session = new LiteSession(); -boolean ret = session.init(msConfig); -msConfig.free(); -``` - -## 图编译 - -在图执行前,需要调用[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)的[compileGraph](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#compilegraph)接口进行图编译,主要进行子图切分、算子选型调度。这部分会耗费较多时间,所以建议[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)创建一次,编译一次,多次执行。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L87)演示调用`CompileGraph`进行图编译。 - -```java -boolean ret = session.compileGraph(model); -``` - -## 输入数据 - -MindSpore Lite Java接口提供`getInputsByTensorName`以及`getInputs`两种方法获得输入Tensor,同时支持`byte[]`或者`ByteBuffer`两种类型的数据,通过[setData](https://www.mindspore.cn/doc/api_java/zh-CN/master/mstensor.html#setdata)设置输入Tensor的数据。 - -1. 使用[getInputsByTensorName](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#getinputsbytensorname)方法,根据模型输入Tensor的名称来获取模型输入Tensor中连接到输入节点的Tensor,下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L151)演示如何调用`getInputsByTensorName`获得输入Tensor并填充数据。 - - ```java - MSTensor inputTensor = session.getInputsByTensorName("2031_2030_1_construct_wrapper:x"); - // Set Input Data. - inputTensor.setData(inputData); - ``` - -2. 使用[getInputs](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#getinputs)方法,直接获取所有的模型输入Tensor的vector,下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L113)演示如何调用`getInputs`获得输入Tensor并填充数据。 - - ```java - List inputs = session.getInputs(); - MSTensor inputTensor = inputs.get(0); - // Set Input Data. - inputTensor.setData(inputData); - ``` - -> MindSpore Lite的模型输入Tensor中的数据排布必须是`NHWC`。如果需要了解更多数据前处理过程,可参考[基于Java接口的Android应用开发](https://www.mindspore.cn/tutorial/lite/zh-CN/master/quick_start/image_segmentation.html#id9)的对输入数据进行处理部分。 - -## 执行推理 - -MindSpore Lite会话在进行图编译以后,即可调用[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)的[runGraph](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#rungraph)执行模型推理。 - -下面示例代码演示调用`runGraph`执行推理。 - -```java -// Run graph to infer results. -boolean ret = session.runGraph(); -``` - -## 获得输出 - -MindSpore Lite在执行完推理后,可以通过输出Tensor得到推理结果。MindSpore Lite提供三种方法来获取模型的输出[MSTensor](https://www.mindspore.cn/doc/api_java/zh-CN/master/mstensor.html),同时支持[getByteData](https://www.mindspore.cn/doc/api_java/zh-CN/master/mstensor.html#getbytedata)、[getFloatData](https://www.mindspore.cn/doc/api_java/zh-CN/master/mstensor.html#getfloatdata)、[getIntData](https://www.mindspore.cn/doc/api_java/zh-CN/master/mstensor.html#getintdata)、[getLongData](https://www.mindspore.cn/doc/api_java/zh-CN/master/mstensor.html#getlongdata)四种方法获得输出数据。 - -1. 使用[getOutputMapByTensor](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#getoutputmapbytensor)方法,直接获取所有的模型输出[MSTensor](https://www.mindspore.cn/doc/api_java/zh-CN/master/mstensor.html#mstensor)的名称和[MSTensor](https://www.mindspore.cn/doc/api_java/zh-CN/master/mstensor.html#mstensor)指针的一个map。下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L191)演示如何调用`getOutputMapByTensor`获得输出Tensor。 - - ```java - Map outTensors = session.getOutputMapByTensor(); - - Iterator> entries = outTensors.entrySet().iterator(); - while (entries.hasNext()) { - Map.Entry entry = entries.next(); - // Apply infer results. - ... - } - ``` - -2. 使用[getOutputByNodeName](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#getoutputsbynodename)方法,根据模型输出节点的名称来获取模型输出[MSTensor](https://www.mindspore.cn/doc/api_java/zh-CN/master/mstensor.html#mstensor)中连接到该节点的Tensor的vector。下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L175)演示如何调用`getOutputByTensorName`获得输出Tensor。 - - ```java - MSTensor outTensor = session.getOutputsByNodeName("Default/head-MobileNetV2Head/Softmax-op204"); - // Apply infer results. - ... - ``` - -3. 使用[getOutputByTensorName](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#getoutputbytensorname)方法,根据模型输出Tensor的名称来获取对应的模型输出[MSTensor](https://www.mindspore.cn/doc/api_java/zh-CN/master/mstensor.html#mstensor)。下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L182)演示如何调用`getOutputByTensorName`获得输出Tensor。 - - ```java - MSTensor outTensor = session.getOutputByTensorName("Default/head-MobileNetV2Head/Softmax-op204"); - // Apply infer results. - ... - ``` - -## 释放内存 - -无需使用MindSpore Lite推理框架时,需要释放已经创建的LiteSession和Model,下列[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L204)演示如何在程序结束前进行内存释放。 - -```java -session.free(); -model.free(); -``` - -## 高级用法 - -### 优化运行内存大小 - -如果对运行时内存有较大的限制,图编译结束之后,调用[Model](https://www.mindspore.cn/doc/api_java/zh-CN/master/model.html#model)的[freeBuffer](https://www.mindspore.cn/doc/api_java/zh-CN/master/model.html#freebuffer)函数,释放MindSpore Lite Model中的MetaGraph,用于减小运行时的内存。一旦调用某个[Model](https://www.mindspore.cn/doc/api_java/zh-CN/master/model.html#model)的[freeBuffer](https://www.mindspore.cn/doc/api_java/zh-CN/master/model.html#freebuffer)后,该[Model](https://www.mindspore.cn/doc/api_java/zh-CN/master/model.html#model)就不能再次图编译。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L241)演示如何调用`Model`的`freeBuffer`接口来释放`MetaGraph`减少运行时内存大小。 - -```java -// Compile graph. -ret = session.compileGraph(model); -... -// Note: when use model.freeBuffer(), the model can not be compiled. -model.freeBuffer(); -``` - -### 绑核操作 - -MindSpore Lite内置线程池支持绑核、解绑操作,通过调用[bindThread](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#bindthread)接口,可以将线程池中的工作线程绑定到指定CPU核,用于性能分析。绑核操作与创建[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html)时用户指定的上下文有关,绑核操作会根据上下文中的绑核策略进行线程与CPU的亲和性设置。 - -需要注意的是,绑核是一个亲和性操作,不保证一定能绑定到指定的CPU核,会受到系统调度的影响。而且绑核后,需要在执行完代码后进行解绑操作。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L164)演示如何在执行推理时绑定大核优先。 - -```java -boolean ret = msConfig.init(DeviceType.DT_CPU, 2, CpuBindMode.HIGHER_CPU, true); -... -session.bindThread(true); -// Run Inference. -ret = session.runGraph(); -session.bindThread(false); -``` - -> 绑核参数有三种选择:大核优先、中核优先以及不绑核。 -> -> 判定大核和中核的规则其实是根据CPU核的频率而不是根据CPU的架构,对于没有大中小核之分的CPU架构,在该规则下也可以区分大核和中核。 -> -> 绑定大核优先是指线程池中的线程从频率最高的核开始绑定,第一个线程绑定在频率最高的核上,第二个线程绑定在频率第二高的核上,以此类推。 -> -> 对于中核优先,中核的定义是根据经验来定义的,默认设定中核是第三和第四高频率的核,当绑定策略为中核优先时,会优先绑定到中核上,当中核不够用时,会往小核上进行绑定。 - -### 输入维度Resize - -使用MindSpore Lite进行推理时,如果需要对输入的shape进行Resize,则可以在已完成创建会话`CreateSession`与图编译`CompileGraph`之后调用[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html)的[resize](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#resize)接口,对输入的Tensor重新设置shape。 - -> 某些网络是不支持可变维度,会提示错误信息后异常退出,比如,模型中有MatMul算子,并且MatMul的一个输入Tensor是权重,另一个输入Tensor是输入时,调用可变维度接口会导致输入Tensor和权重Tensor的Shape不匹配,最终导致推理失败。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L164)演示如何对MindSpore Lite的输入Tensor进行[resize](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#resize): - -```java -List inputs = session.getInputs(); -int[][] dims = {{1, 300, 300, 3}}; -bool ret = session.resize(inputs, dims); -``` - -### Session并行 - -MindSpore Lite支持多个[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html)并行推理,每个[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)的线程池和内存池都是独立的。但不支持多个线程同时调用单个[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#litesession)的[runGraph](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html#rungraph)接口。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L220)演示如何并行执行推理多个[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html)的过程: - -```java -session1 = createLiteSession(false); -if (session1 != null) { - session1Compile = true; -} else { - Toast.makeText(getApplicationContext(), "session1 Compile Failed.", - Toast.LENGTH_SHORT).show(); -} -session2 = createLiteSession(true); -if (session2 != null) { - session2Compile = true; -} else { - Toast.makeText(getApplicationContext(), "session2 Compile Failed.", - Toast.LENGTH_SHORT).show(); -} -... -if (session1Finish && session1Compile) { - new Thread(new Runnable() { - @Override - public void run() { - session1Finish = false; - runInference(session1); - session1Finish = true; - } - }).start(); -} - -if (session2Finish && session2Compile) { - new Thread(new Runnable() { - @Override - public void run() { - session2Finish = false; - runInference(session2); - session2Finish = true; - } - }).start(); -} -``` - -MindSpore Lite不支持多线程并行执行单个[LiteSession](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html)的推理,否则会得到以下错误信息: - -```bash -ERROR [mindspore/lite/src/lite_session.cc:297] RunGraph] 10 Not support multi-threading -``` - -### 查看日志 - -当推理出现异常的时候,可以通过查看日志信息来定位问题。针对Android平台,采用`Logcat`命令行工具查看MindSpore Lite推理的日志信息,并利用`MS_LITE` 进行筛选。 - -```shell -logcat -s "MS_LITE" -``` - -### 获取版本号 - -MindSpore Lite提供了[Version](https://www.mindspore.cn/doc/api_java/zh-CN/master/lite_session.html)方法可以获取版本号,包含在`com.mindspore.lite.Version`头文件中,调用该方法可以得到当前MindSpore Lite的版本号。 - -下面[示例代码](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/runtime_java/app/src/main/java/com/mindspore/lite/demo/MainActivity.java#L215)演示如何获取MindSpore Lite的版本号: - -```java -import com.mindspore.lite.Version; -String version = Version.version(); -``` diff --git a/tutorials/lite/source_zh_cn/use/runtime_train.rst b/tutorials/lite/source_zh_cn/use/runtime_train.rst deleted file mode 100644 index 2853e2dfa22b6d016cbfe1ea7ca079dcdee2e663..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/runtime_train.rst +++ /dev/null @@ -1,7 +0,0 @@ -执行训练 -======================= - -.. toctree:: - :maxdepth: 1 - - runtime_train_cpp \ No newline at end of file diff --git a/tutorials/lite/source_zh_cn/use/runtime_train_cpp.md b/tutorials/lite/source_zh_cn/use/runtime_train_cpp.md deleted file mode 100644 index 9a36cf517a2f03810a61a51bff0aef2cb66be438..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/runtime_train_cpp.md +++ /dev/null @@ -1,566 +0,0 @@ -# 使用C++接口执行训练 - -`Linux` `Android` `C++` `模型训练` `模型加载` `数据准备` `中级` `高级` - - - -- [使用C++接口执行训练](#使用c接口执行训练) - - [概述](#概述) - - [创建会话](#创建会话) - - [读取模型](#读取模型) - - [创建上下文](#创建上下文) - - [创建会话](#创建会话) - - [创建迭代训练](#创建迭代训练) - - [使用示例](#使用示例-1) - - [数据处理](#数据处理) - - [数据输入流](#数据输入流) - - [数据预处理流](#数据预处理流) - - [使用示例](#使用示例-2) - - [执行训练](#执行训练) - - [训练](#训练) - - [推理](#推理) - - [其他](#其他) - - [会话模式切换](#会话模式切换) - - [获取输入张量](#获取输入张量) - - [获取输出张量](#获取输出张量) - - [执行训练或推理](#执行训练或推理) - - [执行会话](#执行会话) - - [执行回调](#执行回调) - - [保存模型](#保存模型) - - - - - -## 概述 - -端侧训练主要步骤: - -1. 使用云侧接口设计模型并导出`MindIR`模型文件。 -2. 将`MindIR`模型文件转换为`ms`模型文件。 -3. 在设备端训练、验证和保存`ms`模型文件。 - -> 转换得到的 *.ms 模型文件包含模型结构,.ms模型文件将被载入设备端进行训练。 - -下面的时序图展示了训练详细流程: - -![训练流程图](../images/train_sequence.png) - -图中的名词说明: - -- `OS`:程序运行的操作系统。 -- `User`:用户调用`TrainLoop`类的函数。 -- `MindData`:在训练中加载数据并预处输入理数据的系列接口(例如读取图像,缩放至指定大小,转换为bitmap格式)。 -- `ToD`:MindSpore Lite在设备端的训练机制。 -- `MS Lite`:MindSpore Lite架构,它能为模型节点和内联张量提供flatbuffer反序列化的功能、执行图编译并调用图执行器进行训练。 -- `CreateTrainSession`:创建`TrainSession`类的对象。 -- `CreateTrainLoop`:创建`TrainLoop`类的对象。 -- `InitDataset`:用户自定义函数,加载并预处理数据。 -- `train_loop`:迭代训练`TrainLoop`类的对象。 -- `Train`:`TrainLoop`类的成员函数,接受现有的或用户自定义的回调对象。 -- `Callbacks`:执行现有的或用户自定义的回调函数。 - -MindSpore Lite架构引入了`MindData`数据处理接口。首先,`MindData`简化了训练流程,创建会话、加载数据、预处理、训练和保存模型一个函数搞定;其次,它可在训练中加载并处理数据,这极大地降低了移动端的资源消耗。 - -用户依次执行上图中`User`列的函数即可启动模型训练。首先调用`CreateSession`函数创建训练会话对象,并创建`TrainLoop`类对象;然后依次执行`InitDataset`、`Train`、`Eval`即可完成训练。`ToD`和`MindData`列为模型训练中调用MindSpore Lite底层函数。 - -> 更多C++API说明,请参考[API文档](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/index.html)。 - -## 创建会话 - -MindSpore Lite训练框架中的[TrainSession](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#trainsession)是训练的主入口,通过`TrainSession`我们可以进行编译和运行图模型。 - -### 读取模型 - -模型文件是一个flatbuffer序列化文件,它通过MindSpore模型转换工具得到,其文件扩展名为`.ms`。在模型训练或推理之前,模型需要从文件系统中加载并解析。相关操作主要在[`TrainModel`](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#trainmodel)类中实现,该类具有例如网络结构、张量大小、权重数据和操作属性等模型数据。 - -> 在MindSpore Lite中训练模型将被`TrainSession`占用,所以你不能直接改变它。所有与训练模型的交互操作,包括实例化、编译和删除操作将在`TrainSession`中处理。 - -### 创建上下文 - -[`Context`](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/lite.html#context)是一个MindSpore Lite对象,它包含了`TrainSession`用来加载模型文件、引导图编译和执行的基础配置参数。它能够让你指定模型运行的设备类型(例如CPU或GPU),模型训练和推理时使用的线程数量,以及内存分配策略。目前`TrainSession`只支持单线程的CPU设备。 - -如果用户通过`new`创建`Context`,不再需要时,需要用户通过`delete`释放。一般在`TrainSession`对象创建完成后,`Context`对象即可释放。 - -### 创建会话 - -有两种方式可以创建会话: - -- 第一种直接读取文件系统上的训练模型文件,然后反序列化,编译并生成有效的`TrainSession`对象。上述`Context`将作为一个基本配置传递给`TrainSession`。该静态函数原型如下: - - `TrainSession *TrainSession::CreateSession(const string &filename, const Context *context, bool mode)` - - 其中`filename`是模型文件名,`context`是指向Context的对象指针,`mode`表示当前会话是否为训练模式。成功创建后,函数返回一个已全部编译并可使用的`TrainSession`,该实例必须在当前会话结束前使用`delete`释放。 - -- 第二种使用flatbuffer的内存拷贝创建`TrainSession`。静态方法如下: - - `TrainSession *TrainSession::CreateSession(const char *model_buf, size_t size, lite::Context *context, bool train_mode = false)` - - 其中`model_buf`是一个指向内存缓冲区的常量指针,`size`是缓冲区长度。成功创建后,函数返回一个完整编译并且可以使用的`TrainSession`实例。`model_buf`指针在函数调用完成后,可以被立即释放以节省资源。`train_mode`为是否将模型设置为训练模式。一旦`TrainSession`实例不再被使用,它必须使用`delete`释放。 - -### 创建迭代训练 - -用户可通过`CreateTrainLoop`函数创建的`TrainLoop`类对象来调用`MindData`接口函数,所以我们更推荐`CreateTrainLoop`函数。`CreateTrainLoop`原型如下: - - `TrainLoop *CreateTrainLoop(session::TrainSession *train_session, lite::Context *context, int batch_size = -1)` - -下面示例代码演示了如何使用`TrainLoop`类在CPU多线程上创建训练会话: - -```cpp -#include "include/train_session.h" -#include "include/context.h" - -int CreateSession() { - mindspore::lite::Context context; - context.device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND; - context.device_list_[0].device_info_.cpu_device_info_.enable_float16_ = false; - context.device_list_[0].device_type_ = mindspore::lite::DT_CPU; - context.thread_num_ = 2; - // Create Session - session_ = mindspore::session::TrainSession::CreateSession(ms_file_, &context); - MS_ASSERT(nullptr != session_); - loop_ = mindspore::session::TrainLoop::CreateTrainLoop(session_, &context); - acc_metrics_ = std::shared_ptr(new AccuracyMetrics); - loop_->Init({acc_metrics_.get()}); - return 0; -} -``` - -> 参见[训练一个LeNet](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/train_lenet/src/net_runner.cc)获取完整代码。 - -## 数据处理 - -### 数据输入流 - -`Dataset`类及其扩展类(例如`MnistDataset`和`AlbumDataset`)为用户提供了丰富的数据处理API,用户只需要指定数据集的路径,通过接口函数返回对应类型的共享指针来设定训练中执行的数据处理操作,输入流会在训练过程中加载并解析数据。API说明详见[Dataset](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/dataset.html)。 - -### 数据预处理流 - -`TensorTransform`类其扩展类(例如`TypeCast`和`OneHot`)为用户提供了丰富的数据预处理API,其功能与云侧Python接口相同,例如维度重塑、数据类型转换和独热编码等,用户只需要创建`TensorTransform`扩展类的对象并传递给Map函数, Map会在训练过程中顺序调用预处理函数处理已加载的数据。API说明详见[Vision](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/vision.html)。 - -### 使用示例 - -下述代码展示了如何使用`Dataset`类和`TensorTransform`类读取和处理数据: - -```cpp -#include "include/datasets.h" -#include "include/context.h" -#include "include/transforms.h" - -int DataSetPipeline() { - train_ds_ = Mnist(data_dir_ + "/train", "all"); - TypeCast typecast_f("float32"); - Resize resize({h_, w_}); - train_ds_ = train_ds_->Map({&resize, &typecast_f}, {"image"}); - TypeCast typecast("int32"); - train_ds_ = train_ds_->Map({&typecast}, {"label"}); - train_ds_ = train_ds_->Shuffle(2); - train_ds_ = train_ds_->Batch(batch_size_, true); - if (verbose_) { - std::cout << "DatasetSize is " << train_ds_->GetDatasetSize() << std::endl; - } - if (train_ds_->GetDatasetSize() == 0) { - std::cout << "No relevant data was found in " << data_dir_ << std::endl; - MS_ASSERT(train_ds_->GetDatasetSize() != 0); - } - return 0; -} -``` - -示例中用户可通过Mnist函数返回的`MnistDataset`类共享指针调用`Dataset`类和`TensorTransform`类的现有函数来定义训练数据处理流程。 - -## 执行训练 - -MindSpore为用户提供了现有的回调类:`accuracy_metrics`、`accuracy_monitor`、`ckpt_saver`、`classification_train_accuracy`、`loss_monitor`和`metrics`。`TrainLoop`类的`Train`和`Eval`函数分别将模型设置为训练和验证模式,指定数据预处理方法并监测会话状态。 - -### 训练 - -创建现有回调类对象并调用`TrainLoop`类的`Train`函数进行训练: - -```cpp -int Train() { - struct mindspore::lite::StepLRLambda step_lr_lambda(1, 0.8); - mindspore::lite::LRScheduler step_lr_sched(mindspore::lite::StepLRLambda, static_cast(&step_lr_lambda), 1); - mindspore::lite::LossMonitor lm(100); - mindspore::lite::ClassificationTrainAccuracyMonitor am(1); - mindspore::lite::CkptSaver cs(1000, std::string("lenet")); - Rescaler rescale(255.0); - loop_->Train(epochs_, train_ds_.get(), std::vector{&rescale, &lm, &cs, &am, &step_lr_sched}); - return 0; -} -``` - -### 推理 - -同样,我们调用`TrainLoop`类的`Eval`函数进行推理: - -```cpp -float Eval() { - test_ds_ = Mnist(data_dir_ + "/test", "all"); - TypeCast typecast_f("float32"); - Resize resize({h_, w_}); - test_ds_ = test_ds_->Map({&resize, &typecast_f}, {"image"}); - TypeCast typecast("int32"); - test_ds_ = test_ds_->Map({&typecast}, {"label"}); - test_ds_ = test_ds_->Batch(batch_size_, true); - Rescaler rescale(255.0); - loop_->Eval(test_ds_.get(), std::vector{&rescale}); - std::cout << "Eval Accuracy is " << acc_metrics_->Eval() << std::endl; - return 0.0; -} -``` - -> 推理和训练模式的不同点: -> -> - 网络输入:训练需要数据和标签,而推理只需要数据。 -> - 网络输出:训练返回损失值,而推理返回预测标签值。 -> - 每一轮训练都会更新网络的各层权重值,但推理不会。 -> - 网络的某些层在训练和推理具有不同的输出,例如在批量标准化 (Batch Normalization) 层中更新批次累计均值和方差。 - -## 其他 - -### 会话模式切换 - -`TrainLoop`类中的`Train`和`Eval`函数实际上调用的是`TrainSession`类中的`Train`和`Eval`函数,用户也可以直接调用`TrainSession`的方法来切换模型训练和验证模式,函数原型如下: - -```cpp -/// \brief Set model to train mode -/// \return STATUS as an error code of compiling graph, STATUS is defined in errorcode.h -virtual int Train() = 0; - -/// \brief Set model to eval mode -/// \return STATUS as an error code of compiling graph, STATUS is defined in errorcode.h -virtual int Eval() = 0; -``` - -下述代码展示了如何将一个当前训练会话设置为训练或验证模式: - -```cpp -// Assuming session is a valid instance of TrainSession -auto ret = session->Train(); -if (ret != RET_OK) { - std::cerr << "Could not set session to train mode" << std::endl; - return -1; -} - -auto ret = session->Eval(); -if (ret != RET_OK) { - std::cerr << "Could not set session to eval mode" << std::endl; - return -1; -} -``` - -### 获取输入张量 - -在图执行之前,无论执行训练或推理,输入数据必须载入模型的输入张量。MindSpore Lite提供了以下函数来获取模型的输入张量: - -1. 使用[`GetInputsByTensorName`](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getinputsbytensorname)方法,获取连接到基于张量名称的模型输入节点模型输入张量。 - - ```cpp - /// \brief Get input MindSpore Lite MSTensors of model by tensor name. - /// - /// \param[in] tensor_name Define tensor name. - /// - /// \return MindSpore Lite MSTensor. - virtual mindspore::tensor::MSTensor *GetInputsByTensorName(const std::string &tensor_name) const = 0; - ``` - -2. 使用[`GetInputs`](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getinputs)方法,直接获取所有模型输入张量的向量。 - - ```cpp - /// \brief Get input MindSpore Lite MSTensors of model. - /// - /// \return The vector of MindSpore Lite MSTensor. - virtual std::vector GetInputs() const = 0; - ``` - - 如果模型需要1个以上的输入张量(例如训练过程中,数据和标签都作为网络的输入),用户有必要知道输入顺序和张量名称,这些信息可以从Python对应的模型中获取。此外,用户也根据输入张量的大小推导出这些信息。 - -3. 拷贝数据 - - 一旦获取到了模型的输入张量,数据需要拷贝到张量中。下列方法可以获取数据字节大小、数据维度、元素个数、数据类型和写指针。详见 [MSTensor](https://www.mindspore.cn/doc/api_cpp/en/master/tensor.html#mstensor) API 文档。 - - ```cpp - /// \brief Get byte size of data in MSTensor. - /// - /// \return Byte size of data in MSTensor. - virtual size_t Size() const = 0; - - /// \brief Get shape of the MindSpore Lite MSTensor. - /// - /// \return A vector of int as the shape of the MindSpore Lite MSTensor. - virtual std::vector shape() const = 0; - - /// \brief Get number of element in MSTensor. - /// - /// \return Number of element in MSTensor. - virtual int ElementsNum() const = 0; - - /// \brief Get data type of the MindSpore Lite MSTensor. - /// - /// \note TypeId is defined in mindspore/mindspore/core/ir/dtype/type_id.h. Only number types in TypeId enum are - /// suitable for MSTensor. - /// - /// \return MindSpore Lite TypeId of the MindSpore Lite MSTensor. - virtual TypeId data_type() const = 0; - - /// \brief Get the pointer of data in MSTensor. - /// - /// \note The data pointer can be used to both write and read data in MSTensor. - /// - /// \return The pointer points to data in MSTensor. - virtual void *MutableData() const = 0; - ``` - - 以下示例代码展示了如何从`LiteSession`中获取完整的图输入张量和如何将模型输入数据转换为`MSTensor`类型。 - - ```cpp - // Assuming session is a valid instance of TrainSession - auto inputs = session->GetInputs(); - - // Assuming the model has two input tensors, the first is for data and the second for labels - int data_index = 0; - int label_index = 1; - - if (inputs.size() != 2) { - std::cerr << "Unexpected amount of input tensors. Expected 2, model requires " << inputs.size() << std::endl; - return -1; - } - - // Assuming batch_size and data_size variables holds the Batch size and the size of a single data tensor, respectively: - // And assuming sparse labels are used - if ((inputs.at(data_index)->Size() != batch_size*data_size) || - (inputs.at(label_index)->ElementsNum() != batch_size)) { - std::cerr << "Input data size does not match model input" << std::endl; - return -1; - } - - // Assuming data_ptr is the pointer to a batch of data tensors - // and iassuming label_ptr is a pointer to a batch of label indices (obtained by the DataLoder) - auto *in_data = inputs.at(data_index)->MutableData(); - auto *in_labels = inputs.at(label_index)->MutableData(); - if ((in_data == nullptr)|| (in_labels == nullptr)) { - std::cerr << "Model's input tensor is nullptr" << std::endl; - return -1; - } - - memcpy(in_data, data_ptr, inputs.at(data_index)->Size()); - memcpy(in_labels, label_ptr, inputs.at(label_index)->Size()); - // After filling the input tensors the data_ptr and label_ptr may be freed - // The input tensors themselves are managed by MindSpore Lite and users are not allowed to access them or delete them - ``` - - > - MindSpore Lite模型输入张量的数据维度必须为NHWC(批次数,高度,宽度和通道数)。 - > - 用户不能主动释放`GetInputs`和`GetInputsByTensorName`函数返回的张量。 - -### 获取输出张量 - -MindSpore Lite提供下列方法来获取模型的输出张量: - -1. 使用[`GetOutputByNodeName`](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getoutputbynodename)方法获取一个确定节点的输出张量。 - - ```cpp - /// \brief Get output MindSpore Lite MSTensors of model by node name. - /// - /// \param[in] node_name Define node name. - /// - /// \return The vector of MindSpore Lite MSTensor. - virtual std::vector GetOutputsByNodeName(const std::string &node_name) const = 0; - ``` - - 下列代码为使用`GetOutputsByNodeName`方法从当前会话中获取输出张量: - - ```cpp - // Assume that session is a vlaid TrainSession instance - // Assume that model has a output node named output_node_name_0. - auto output_vec = session->GetOutputsByNodeName("output_node_name_0"); - // Assume that output node named output_node_name_0 has only one output tensor. - auto out_tensor = output_vec.front(); - if (out_tensor == nullptr) { - std::cerr << "Output tensor is nullptr" << std::endl; - return -1; - } - ``` - -2. 使用[`GetOutputByTensorName`](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getoutputbytensorname)方法,依据张量名称获取输出张量。 - - ```cpp - /// \brief Get output MindSpore Lite MSTensors of model by tensor name. - /// - /// \param[in] tensor_name Define tensor name. - /// - /// \return Pointer of MindSpore Lite MSTensor. - virtual mindspore::tensor::MSTensor *GetOutputByTensorName(const std::string &tensor_name) const = 0; - ``` - - 下列代码为使用`GetOutputsByTensorName`方法从当前会话中获取输出张量: - - ```cpp - // Assume that session is a vlaid TrainSession instance - // We can use GetOutputTensorNames method to get the names of all the output tensors of the model - auto tensor_names = session->GetOutputTensorNames(); - // Use output tensor name returned by GetOutputTensorNames as key - for (auto tensor_name : tensor_names) { - auto out_tensor = session->GetOutputByTensorName(tensor_name); - if (out_tensor == nullptr) { - std::cerr << "Output tensor is nullptr" << std::endl; - return -1; - } - } - ``` - -3. 使用[`GetOutputs`](https://www.mindspore.cn/doc/api_cpp/zh-CN/master/session.html#getoutputs)方法,根据张量名称排序的所有输出张量。 - - ```cpp - /// \brief Get output MindSpore Lite MSTensors of model mapped by tensor name. - /// - /// \return The map of output tensor name and MindSpore Lite MSTensor. - virtual std::unordered_map GetOutputs() const = 0; - ``` - - 获取模型输出张量后,用户需要将数据导入张量中。使用`MSTensor`的`Size`方法获取将要导入张量中的数据大小,使用`data_type`方法获取 `MSTensor`的数据类型,并且使用`MutableData`方法写指针。 - - ```cpp - /// \brief Get byte size of data in MSTensor. - /// - /// \return Byte size of data in MSTensor. - virtual size_t Size() const = 0; - - /// \brief Get data type of the MindSpore Lite MSTensor. - /// - /// \note TypeId is defined in mindspore/mindspore/core/ir/dtype/type_id.h. Only number types in TypeId enum are - /// suitable for MSTensor. - /// - /// \return MindSpore Lite TypeId of the MindSpore Lite MSTensor. - virtual TypeId data_type() const = 0; - - /// \brief Get the pointer of data in MSTensor. - /// - /// \note The data pointer can be used to both write and read data in MSTensor. - /// - /// \return The pointer points to data in MSTensor. - virtual void *MutableData() const = 0; - ``` - - 下列代码展示了如何使用`GetOutputs`方法从会话中获取输出张量,并打印前10个数据或每个输出张量的数据记录。 - - ```cpp - // Assume that session is a vlaid TrainSession object - auto output_map = session->GetOutputs(); - // Assume that the model has only one output node. - auto out_node_iter = output_map.begin(); - std::string name = out_node_iter->first; - // Assume that the unique output node has only one output tensor. - auto out_tensor = out_node_iter->second; - if (out_tensor == nullptr) { - std::cerr << "Output tensor is nullptr" << std::endl; - return -1; - } - // Assume that the data format of output data is float 32. - if (out_tensor->data_type() != mindspore::TypeId::kNumberTypeFloat32) { - std::cerr << "Output of lenet should in float32" << std::endl; - return -1; - } - auto *out_data = reinterpret_cast(out_tensor->MutableData()); - if (out_data == nullptr) { - std::cerr << "Data of out_tensor is nullptr" << std::endl; - return -1; - } - // Print the first 10 float data or all output data of the output tensor. - std::cout << "Output data: "; - for (size_t i = 0; i < 10 && i < out_tensor->ElementsNum(); i++) { - std::cout << " " << out_data[i]; - } - std::cout << std::endl; - // The elements in outputs do not need to be free by users, because outputs are managed by the MindSpore Lite. - ``` - - > 用户无需手动释放 `GetOutputsByNodeName`、`GetOutputByTensorName`和`GetOutputs`函数返回的数组或是哈希表。 - -### 执行训练或推理 - -#### 执行会话 - -无论`TrainSession`对象是训练或推理模式,图计算都是调用`RunGraph`方法。 - -```cpp -/// \brief Run session with callbacks. -/// -/// \param[in] before Define a call_back_function to be called before running each node. -/// \param[in] after Define a call_back_function called after running each node. -/// -/// \note RunGraph should be called after CompileGraph. -/// -/// \return STATUS as an error code of running graph, STATUS is defined in errorcode.h. -virtual int RunGraph(const KernelCallBack &before = nullptr, const KernelCallBack &after = nullptr) = 0; -``` - -在执行图计算前,用户需要确保数据被正确地导入了输入张量中。 - -#### 执行回调 - -MindSpore Lite框架允许用户设置两个在每个节点计算前后调用的回调函数。这两个函数能够帮助用户跟踪、调试网络,并测量各节点的计算时间。回调参数如下: - -- 计算节点的当前输入张量。 -- 计算节点的当前输出张量。 -- 计算节点的名称和类型。 - -尽管节点计算前后的名称和类型一致,两个回调函数的输出张量却不同。对于某些计算操作,输入张量也不同。 - -```cpp -/// \brief CallBackParam defines input arguments for callback function. -struct CallBackParam { - std::string node_name; /**< node name argument */ - std::string node_type; /**< node type argument */ -}; - -/// \brief KernelCallBack defined the function pointer for callBack. -using KernelCallBack = std::function inputs, - std::vector outputs, const CallBackParam &opInfo)>; -``` - -以下代码为如何在执行训练前后使用回调函数: - -```cpp -// Assuming session is a valid instance of TrainSession and that data was assigned to the input tensors - -// Definition of a callback function that will be called before forwarding operator -bool before_callback(const std::vector &inputs, - const std::vector &outputs, - const mindspore::CallBackParam &call_param) { - std::cout << call_param.node_name << std::endl; - std::cout << "Before forwarding: input size is " << inputs.size() << std::endl; - return true; -}; -// Definition of callback function that will be called after forwarding operator -bool after_callback(const std::vector &inputs, - const std::vector &outputs, - const mindspore::CallBackParam &call_param) { - std::cout << "After forwarding: output size is " << outputs.size() << std::endl; - return true; -}; - -// Hand over the callback functions to RunGraph when performing the training or inference -ret = session_->RunGraph(before_callback, after_callback); -if (ret != RET_OK) { - MS_LOG(ERROR) << "Run graph failed."; - return RET_ERROR; -} -``` - -### 保存模型 - -MindSpore的`CkptSaver`类实际调用的是`SaveToFile`函数,当然你也可以直接调用`SaveToFile`来保存模型,`SaveToFile`原型如下: - -```cpp - /// \brief Save the trained model into a flatbuffer file - /// - /// \param[in] filename Filename to save flatbuffer to - /// - /// \return 0 on success or -1 in case of error - virtual int SaveToFile(const std::string &filename) const = 0; -``` - -保存的模型可继续用于训练或推理。 - -> 请使用[benchmark_train](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/benchmark_train_tool.html)进行训练模型性能和精度评估。 diff --git a/tutorials/lite/source_zh_cn/use/visual_tool.md b/tutorials/lite/source_zh_cn/use/visual_tool.md deleted file mode 100644 index cd97fce97d7bab5e144055a667099dd3832d3a9f..0000000000000000000000000000000000000000 --- a/tutorials/lite/source_zh_cn/use/visual_tool.md +++ /dev/null @@ -1,59 +0,0 @@ -# 可视化工具 - -`Windows` `Linux` `Mac` `可视化` - - - -- [可视化工具](#可视化工具) - - [概述](#概述) - - [功能列表](#功能列表) - - [使用方式](#使用方式) - - [开发调试](#开发调试) - - - - - -## 概述 - -[Netron](https://github.com/lutzroeder/netron)是一个基于[Electron](http://www.electronjs.org/)平台开发的神经网络模型可视化工具,支持许多主流AI框架模型的可视化,支持多种平台(Mac、Windows、Linux等)。`Netron`支持MindSpore Lite模型,可以方便地查看模型信息。如下图所示,使用`Netron`加载`.ms`模型后,可以展示模型的拓扑结构和图、节点的信息等。 - -![img](../images/visual_mnist.png) - -## 功能列表 - -- 支持加载`.ms`模型,要求MindSpore版本>=1.2.0; -- 支持查看子图; -- 支持拓扑结构和数据流`shape`的展示; -- 支持查看模型的`format`、`input`和`output`等; -- 支持查看节点的`type`、`name`、`attribute`、`input`和`output`等; -- 支持结构化的`weight`、`bias`等数据的查看与保存; -- 支持可视化结果导出为图片保存。 - -## 使用方式 - -ms模型的支持代码已经合入官方库。`Netron`的下载地址为 , 作者不定期更新并发布Release版本。用户按照以下方式安装`Netron`,将模型拖入窗口即可打开。 - -- macOS: 下载`.dmg`文件或者执行`brew cask install netron` - -- Linux: 下载`.AppImage`文件或者执行`snap install netron` - -- Windows: 下载`.exe`文件或者执行`winget install netron` - -- Python服务器:执行`pip install netron`安装Netron,然后通过`netron [FILE]`或`netron.start('[FILE]')`加载模型 - -- 浏览器:打开 - -## 开发调试 - -### 使用开发版本 - -步骤1:通过`git clone https://github.com/lutzroeder/netron`克隆一份源码 - -步骤2:进入`netron`目录,执行`npm install` - -步骤3:执行`make build`进行编译,在./dist路径下将生成可执行程序 - -### 使用Javacript调试模型 - -在调试模型时,在`netron`文件夹下,先在./test/models.json中添加调试模型的信息,然后使用node.js调试./test/model.js脚本即可。 diff --git a/tutorials/notebook/README.md b/tutorials/notebook/README.md deleted file mode 100644 index 2fe3ff89138bf2319db9dc7194706cdf2363086b..0000000000000000000000000000000000000000 --- a/tutorials/notebook/README.md +++ /dev/null @@ -1,99 +0,0 @@ -# MindSpore的教程体验 - -## 环境配置 - -### Windows和Linux系统配置方法 - -- 系统版本:Windows 10,Ubuntu 16.04及以上 - -- 软件配置:[Anaconda](https://www.anaconda.com/products/individual),Jupyter Notebook - -- 语言环境:Python3.7.X 推荐 Python3.7.5 - -- MindSpore 下载地址:[MindSpore官网下载](https://www.mindspore.cn/versions),使用Windows系统用户选择Windows-X86版本,使用Linux系统用户选择Ubuntu-X86版本 - -> MindSpore的[具体安装教程](https://www.mindspore.cn/install/) - -### Jupyter Notebook切换conda环境(Kernel Change)的配置方法 - -- 首先,增加Jupyter Notebook切换conda环境功能(Kernel Change) - - 启动Anaconda Prompt,输入命令: - - ```bash - conda install nb_conda - ``` - - > 建议在base环境操作上述命令。 - - 执行完毕,重启Jupyter Notebook即可完成功能添加。 - -- 然后,添加conda环境到Jypyter Notebook的Kernel Change中。 - - 1. 新建一个conda环境,启动Anaconda Prompt,输入命令: - - ```bash - conda create -n {env_name} python=3.7.5 - ``` - - > env_name可以按照自己想要的环境名称自行命名。 - - 2. 激活新环境,输入命令: - - ```bash - conda activate {env_name} - ``` - - 3. 安装ipykernel,输入命令: - - ```bash - conda install -n {env_name} ipykernel - ``` - - > 如果添加已有环境,只需执行安装ipykernel操作即可。 - - 执行完毕后,刷新Jupyter notebook页面点击Kernel下拉,选择Kernel Change,就能选择新添加的conda环境。 - -## notebook说明 - -| 教  程  类  别 | 教  程  名  称 | 文  件  名  称 | 内  容  描  述 -| :----------- | :----------- | :------- |:------ -| 快速入门 | 手写数字分类识别入门体验教程 | [mindspore_quick_start.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/quick_start/quick_start.ipynb) | - CPU平台下从数据集到模型验证的全过程解读
    - 体验教程中各功能模块的使用说明
    - 数据集图形化展示
    - 了解LeNet5具体结构和参数作用
    - 学习使用自定义回调函数
    - loss值与训练步数的变化图
    - 模型精度与训练步数的变化图
    - 使用模型应用到手写图片的预测与分类上 -| 快速入门 | 线性拟合 | [mindspore_linear_regression.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/quick_start/linear_regression.ipynb) | - 了解线性拟合的算法原理
    - 了解在MindSpore中如何实现线性拟合的算法原理
    - 学习使用MindSpore实现AI训练中的正向传播和方向传播
    - 可视化线性函数拟合数据的全过程。 -| 基础使用 | 加载图像数据集 | [mindspore_load_dataset_image.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/use/load_dataset_image.ipynb) | - 学习加载图像数据集
    - 学习处理图像数据集
    - 学习增强图像数据集 -| 基础使用 | 加载文本数据集 | [mindspore_load_dataset_text.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/use/load_dataset_text.ipynb) | - 学习加载文本数据集
    - 学习处理文本数据集
    - 学习文本数据集分词 -| 基础使用         |  保存模型   |     [mindspore_save_model.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/notebook/mindspore_save_model.ipynb)          | - 了解不同平台用于训练的模型类型
     - 学习如何用不同策略保存训练模型
     - 学习如何将模型导出为不同的文件类型,用于不同平台上的训练 -| 基础使用 | 加载模型用于推理或迁移学习 | [mindspore_load_model_for_inference_and_transfer.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/notebook/mindspore_load_model_for_inference_and_transfer.ipynb) | - 了解预训练模型的方法
    - 学习在本地加载已有的模型进行推理的方法
    - 学习在本地加载模型并进行迁移学习的方法 -| 处理数据 | 转换数据集为MindRecord | [mindspore_convert_dataset.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/advanced_use/convert_dataset.ipynb) | - 展示将数据集转换为MindRecord
    - 展示读取MindRecord数据集 -| 数据处理 | 优化数据准备的性能 | [mindspore_optimize_data_processing.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/advanced_use/optimize_data_processing.ipynb) | - 数据加载性能优化
    - shuffle性能优化
    - 数据增强性能优化
    - 性能优化方案总结 -| 应用实践 | 自然语言处理应用 | [mindspore_nlp_application.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/notebook/mindspore_nlp_application.ipynb) | - 展示MindSpore在自然语言处理的应用
    - 展示自然语言处理中数据集特定的预处理方法
    - 展示如何定义基于LSTM的SentimentNet网络 -| 应用实践 | 计算机视觉应用 | [mindspore_computer_vision_application.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/notebook/mindspore_computer_vision_application.ipynb) | - 学习MindSpore卷积神经网络在计算机视觉应用的过程
    - 学习下载CIFAR-10数据集,搭建运行环境
    - 学习使用ResNet-50构建卷积神经网络
    - 学习使用Momentum和SoftmaxCrossEntropyWithLogits构建优化器和损失函数
    - 学习调试参数训练模型,判断模型精度 -| 调试网络 | 模型的训练及验证同步方法 | [mindspore_evaluate_the_model_during_training.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/notebook/mindspore_evaluate_the_model_during_training.ipynb) | - 了解模型训练和验证同步进行的方法
    - 学习同步训练和验证中参数设置方法
    - 利用绘图函数从保存的模型中挑选出最优模型 -| 调试网络 | 使用PyNative进行神经网络的训练调试体验 | [mindspore_debugging_in_pynative_mode.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/notebook/mindspore_debugging_in_pynative_mode.ipynb) | - GPU平台下从数据集获取单个数据进行单个step训练的数据变化全过程解读
    - 了解PyNative模式下的调试方法
    - 图片数据在训练过程中的变化情况的图形展示
    - 了解构建权重梯度计算函数的方法
    - 展示1个step过程中权重的变化及数据展示 -| 调试网络 | 自定义调试信息体验文档 | [mindspore_custom_debugging_info.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/notebook/mindspore_custom_debugging_info.ipynb) | - 了解MindSpore的自定义调试算子
    - 学习使用自定义调试算子Callback设置定时训练
    - 学习设置metrics算子输出相对应的模型精度信息
    - 学习设置日志环境变量来控制glog输出日志 -| 调试网络 | MindInsight的溯源分析和对比分析 | [mindspore_lineage_and_scalars_comparison.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/notebook/mindinsight/mindspore_lineage_and_scalars_comparison.ipynb) | - 了解MindSpore中训练数据的采集及展示
    - 学习使用回调函数SummaryCollector进行数据采集
    - 使用MindInsight进行数据可视化
    - 了解数据溯源和模型溯源的使用方法
    - 了解对比分析的使用方法 -| 调试网络 | MindInsight训练看板 | [mindinsight_dashboard.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/notebook/mindinsight/mindinsight_dashboard.ipynb) | - 了解完整的MindSpore深度学习及MindInsight可视化展示的过程
    - 学习使用MindInsight对训练过程中标量、直方图、图像、计算图、数据图和张量信息进行可视化展示
    - 学习使用Summary算子记录标量、直方图、图像、计算图、数据图和张量信息 -| 调试网络 | 应用自动数据增强 | [mindspore_enable_auto_augmentation.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/notebook/mindspore_enable_auto_augmentation.ipynb) | - 了解自动数据增强原理
    - 在CIFAR-10数据集上实现自动数据增强 -| 优化训练性能 | 混合精度 | [mindspore_mixed_precision.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/notebook/mindspore_mixed_precision.ipynb) | - 了解混合精度训练的原理
    - 学习在MindSpore中使用混合精度训练
    - 对比单精度训练和混合精度训练的对模型训练的影响 -| 优化训练性能 | 应用梯度累积算法 | [mindspore_apply_gradient_accumulation.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/notebook/mindspore_apply_gradient_accumulation.ipynb) | - 了解梯度累积训练算法的原理
    - 学习在MindSpore中自定义训练函数,并在其中实现前向传播和反向传播的计算过程构建,实现梯度累积计算
    - 了解梯度累积训练对模型训练的影响 -| 模型安全和隐私 | 模型安全 | [mindspore_improve_model_security_nad.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/notebook/mindspore_improve_model_security_nad.ipynb) | - 了解AI算法的安全威胁的概念和影响
    - 介绍MindArmour提供的模型安全防护手段
    - 学习如何模拟攻击训练模型
    - 学习针对被攻击模型进行对抗性防御 -| 编程指南 | MindSpore API概述 | [mindspore_api_structure.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/api_structure.ipynb) | - 了解MindSpore的总体架构
    - 介绍MindSpore的设计理念
    - 介绍MindSpore的层次结构 -| 编程指南 | dtype | [mindspore_dtype.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/dtype.ipynb) | - 了解MindSpore支持的数据类型
    - 介绍MindSpore数据类型转换的接口 -| 编程指南 | Tensor | [mindspore_tensor.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/tensor.ipynb) | - 了解tensor的构造
    - 介绍tensor的属性 -| 编程指南 | 算子 | [mindspore_operators.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/operators.ipynb) | - 学习算子的使用方式
    - 介绍算子的功能 -| 编程指南 | Parameter | [mindspore_parameter.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/parameter.ipynb) | - 介绍Parameter的初始化
    - 介绍Parameter的属性
    - 介绍Parameter的方法 -| 编程指南 | Cell构建及其子类 | [mindspore_cell.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/cell.ipynb) | - 介绍Cell的关键成员函数
    - 介绍nn模块与ops模块的关系
    - 了解模型层
    - 了解MindSpore的损失函数
    - 学习MindSpore优化算法
    - 学习构建自定义网络 -| 编程指南 | 优化算法 | [mindspore_optim.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/optim.ipynb) | - 介绍各种类型学习率的类
    - 介绍Optimizer的构建及使用方式 -| 编程指南 | 常用网络组件 | [mindspore_network_component.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/network_component.ipynb) | - 介绍GradOperation的使用方法
    - 介绍WithLossCell的使用方法
    - 介绍TrainOneStepCell的使用方法 -| 编程指南 | 数据集加载 | [mindspore_dataset_loading.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/dataset_loading.ipynb) | - 了解常用数据集加载
    - 学习特定格式数据集加载
    - 学习自定义数据集加载 -| 编程指南 | 采样器 | [mindspore_sampler.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/sampler.ipynb) | - 介绍MindSpore采样器
    - 学习如何自定义采样器 -| 编程指南 | 数据处理 | [mindspore_pipeline.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/pipeline.ipynb) | - 学习MindSpore数据处理算子的使用方法 -| 编程指南 | 数据增强 | [mindspore_augmentation.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/augmentation.ipynb) | - 介绍MindSpore提供的数据增强操作
    - 介绍c_transforms使用方法
    - 介绍py_transforms使用方法 -| 编程指南 | 分词器 | [mindspore_tokenizer.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/tokenizer.ipynb) | - 介绍MindSpore提供的分词器 -| 编程指南 | MindSpore数据格式转换 | [mindspore_dataset_conversion.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/dataset_conversion.ipynb) | - 介绍非标准数据集转换为MindRecord
    - 介绍常用数据集转换为MindRecord -| 编程指南 | 自动数据增强 | [auto_augmentation.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/auto_augmentation.ipynb) | - 介绍基于概率的自动数据增强
    - 介绍基于回调参数的自动数据增强 -| 编程指南 | 运行管理 | [mindspore_context.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/context.ipynb) | - 介绍执行模式的选择和切换
    - 介绍硬件管理的方式
    - 介绍分布式管理的用法
    - 介绍维测管理的使用方式 -| 编程指南 | 运行方式 | [mindspore_run.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/run.ipynb) | - 介绍单个算子的执行
    - 介绍普通函数的执行
    - 介绍网络模型的执行 -| 编程指南 | 训练 | [mindspore_train.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/train.ipynb) | - 学习自定义训练网络
    - 学习自定义训练循环
    - 学习如何边训练边推理
    - 介绍On-device执行 -| 编程指南 | 深度概率编程库 | [mindspore_probability.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/probability.ipynb) | - 介绍概率分布类
    - 介绍概率分布映射
    - 学习深度概率网络构建
    - 学习概率推断算法
    - 学习贝叶斯网络的构建及转换
    - 学习MindSpore中贝叶斯工具箱的使用方法 -| 编程指南 | 保存、加载与转化模型 | [mindspore_advanced_usage_of_checkpoint.ipynb](https://gitee.com/mindspore/docs/blob/master/docs/programming_guide/source_zh_cn/advanced_usage_of_checkpoint.ipynb) | - 学习MindSpore中模型保存、加载的进阶方法。
    - 学习保存、加载模型中指定Cell的权重参数。
    - 学习对模型中的参数和数据进行添加、删除和修改等操作。
    - 学习将其他框架中的模型转化为MindSpore框架中的模型的方法。 diff --git a/tutorials/notebook/apply_deep_probability_programming/apply_deep_probability_programming_bnnlenet5.ipynb b/tutorials/notebook/apply_deep_probability_programming/apply_deep_probability_programming_bnnlenet5.ipynb deleted file mode 100644 index 6d1bca13c1002470fc670480b7d40863477f734f..0000000000000000000000000000000000000000 --- a/tutorials/notebook/apply_deep_probability_programming/apply_deep_probability_programming_bnnlenet5.ipynb +++ /dev/null @@ -1,351 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 深度概率编程\n", - "\n", - "## 概述\n", - "\n", - "深度学习模型具有强大的拟合能力,而贝叶斯理论具有很好的可解释能力。MindSpore深度概率编程(MindSpore Deep Probabilistic Programming, MDP)将深度学习和贝叶斯学习结合,通过设置网络权重为分布、引入隐空间分布等,可以对分布进行采样前向传播,由此引入了不确定性,从而增强了模型的鲁棒性和可解释性。MDP不仅包含通用、专业的概率学习编程语言,适用于“专业”用户,而且支持使用开发深度学习模型的逻辑进行概率编程,让初学者轻松上手;此外,还提供深度概率学习的工具箱,拓展贝叶斯应用功能。\n", - "\n", - "本章将详细介绍深度概率编程在MindSpore上的应用。在动手进行实践之前,确保,你已经正确安装了MindSpore 0.7.0-beta及其以上版本。\n", - "\n", - "> 本例适用于GPU和Ascend环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 环境准备\n", - "\n", - "设置训练模式为图模式,计算平台为GPU。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore import context\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target=\"GPU\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据准备\n", - "\n", - "### 下载数据集\n", - "下载MNIST数据集并解压到指定位置,执行如下命令:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义数据集增强方法\n", - "\n", - "MNIST数据集的原始训练数据集是60000张$28\\times28$像素的单通道数字图片,本次训练用到的含贝叶斯层的LeNet5网络接收到训练数据的张量为`(32,1,32,32)`,通过自定义create_dataset函数将原始数据集增强为适应训练要求的数据,具体的增强操作解释可参考官网快速入门[实现一个图片分类应用](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html)。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore import dataset as ds\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " # define some parameters needed for data enhancement and rough justification\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # according to the parameters, generate the corresponding data enhancement method\n", - " c_trans = [\n", - " CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR),\n", - " CV.Rescale(rescale_nml, shift_nml),\n", - " CV.Rescale(rescale, shift),\n", - " CV.HWC2CHW()\n", - " ]\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # using map to apply operations to a dataset\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=c_trans, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - "\n", - " \n", - " # process the generated dataset\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size)\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义深度神经网络\n", - "\n", - "在经典LeNet5网络中,数据经过如下计算过程:卷积1->激活->池化->卷积2->激活->池化->降维->全连接1->全连接2->全连接3。 \n", - "本例中将引入概率编程方法,将卷积1和全连接1两个计算层改造成贝叶斯层,构造成含贝叶斯层的LeNet5网络。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.common.initializer import Normal\n", - "import mindspore.nn as nn\n", - "from mindspore.nn.probability import bnn_layers\n", - "import mindspore.ops as ops\n", - "from mindspore import dtype as mstype\n", - "\n", - "\n", - "class BNNLeNet5(nn.Cell):\n", - " def __init__(self, num_class=10):\n", - " super(BNNLeNet5, self).__init__()\n", - " self.num_class = num_class\n", - " self.conv1 = bnn_layers.ConvReparam(1, 6, 5, stride=1, padding=0, has_bias=False, pad_mode=\"valid\")\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = bnn_layers.DenseReparam(16 * 5 * 5, 120)\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, self.num_class)\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x) \n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本例中将卷积层1和全连接1两个计算层换成了贝叶斯卷积层`bnn_layers.ConvReparam`和贝叶斯全连接层`bnn_layers.DenseReparam`。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义训练网络\n", - "\n", - "定义训练网络并进行训练。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 1, loss is 2.3022718\n", - "epoch: 1 step: 2, loss is 2.3022223\n", - "epoch: 1 step: 3, loss is 2.3028727\n", - "epoch: 1 step: 4, loss is 2.3034232\n", - "epoch: 1 step: 5, loss is 2.3019493\n", - "epoch: 1 step: 6, loss is 2.3017588\n", - "... ...\n", - "epoch: 1 step: 1866, loss is 0.097549394\n", - "epoch: 1 step: 1867, loss is 0.082386635\n", - "epoch: 1 step: 1868, loss is 0.027000971\n", - "epoch: 1 step: 1869, loss is 0.026424333\n", - "epoch: 1 step: 1870, loss is 0.19351783\n", - "epoch: 1 step: 1871, loss is 0.02400064\n", - "epoch: 1 step: 1872, loss is 0.3389563\n", - "epoch: 1 step: 1873, loss is 0.004886848\n", - "epoch: 1 step: 1874, loss is 0.020785151\n", - "epoch: 1 step: 1875, loss is 0.33145565\n" - ] - } - ], - "source": [ - "from mindspore.nn import TrainOneStepCell\n", - "from mindspore import Tensor, Model\n", - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor\n", - "from mindspore.nn.metrics import Accuracy\n", - "from mindspore.nn.loss import SoftmaxCrossEntropyWithLogits\n", - "import os\n", - "\n", - "\n", - "lr = 0.01\n", - "momentum = 0.9\n", - "model_path = \"./models/ckpt/probability_bnnlenet5/\"\n", - "\n", - "# clean old run files\n", - "os.system(\"rm -f {0}*.meta {0}*.ckpt\".format(model_path))\n", - "network = BNNLeNet5()\n", - "criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction=\"mean\")\n", - "optimizer = nn.Momentum(network.trainable_params(), lr, momentum)\n", - "model = Model(network, criterion, optimizer, metrics={\"Accuracy\": Accuracy()} )\n", - "\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=1875, keep_checkpoint_max=16)\n", - "ckpoint_cb = ModelCheckpoint(prefix=\"checkpoint_lenet\", directory=model_path, config=config_ck)\n", - "\n", - "ds_train_path = \"./datasets/MNIST_Data/train/\"\n", - "train_set = create_dataset(ds_train_path, 32, 1)\n", - "model.train(1, train_set, callbacks=[ckpoint_cb, LossMonitor()])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "训练完成后会在对应的路径上生成`.ckpt`为后缀的权重参数文件和`.meta`为后缀的计算图文件。 \n", - "其路径结构为:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./models/ckpt/probability_bnnlenet5/\n", - "├── checkpoint_lenet-1_1875.ckpt\n", - "└── checkpoint_lenet-graph.meta\n", - "\n", - "0 directories, 2 files\n" - ] - } - ], - "source": [ - "!tree $model_path" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 验证模型精度\n", - "\n", - "载入验证数据集,并验证含有贝叶斯层的LeNet5网络模型的精度。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'Accuracy': 0.9730568910256411}\n" - ] - } - ], - "source": [ - "ds_eval_path = \"./datasets/MNIST_Data/test/\"\n", - "test_set = create_dataset(ds_eval_path, 32, 1)\n", - "acc = model.eval(test_set)\n", - "print(acc)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "模型精度大于0.95,证明模型效果良好。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结\n", - "\n", - "本例使用了深度概率编程在经典LeNet5深度神经网络中应用,含有贝叶斯层的LeNet5网络和原本的LeNet5网络的训练体验过程极其相似,有心的用户可以对比两者在训练收敛效率,稳定性等方面的不同,是否体现了概述中深度概率编程的优点。 \n", - "当然深度概率编程近年来最激动人心的是在CVAE以及GAN等生成网络中的应用,这使我们在拥有了以假乱真的数据生成能力,接下来一篇就以CVAE网络体验介绍深度概率编程的另一种应用。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.0.1", - "language": "python", - "name": "mindspore-1.0.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/apply_deep_probability_programming/apply_deep_probability_programming_cvae.ipynb b/tutorials/notebook/apply_deep_probability_programming/apply_deep_probability_programming_cvae.ipynb deleted file mode 100644 index 129eb22180e870e8660f68b189ad4ec6f039404d..0000000000000000000000000000000000000000 --- a/tutorials/notebook/apply_deep_probability_programming/apply_deep_probability_programming_cvae.ipynb +++ /dev/null @@ -1,429 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 深度概率编程CVAE\n", - "\n", - "## 概述\n", - "\n", - "本例采用MindSpore的深度概率编程方法应用于条件变分自编码器(CVAE)模型训练。 \n", - "整体流程如下:\n", - "\n", - "1. 数据集准备\n", - "2. 定义条件变分自编码器网络;\n", - "3. 定义损失函数和优化器;\n", - "4. 训练生成模型。\n", - "5. 生成新样本或重构输入样本。\n", - "\n", - "> 本例适用于GPU和Ascend环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据准备\n", - "\n", - "### 下载数据集\n", - "\n", - "本例使用MNIST_Data数据集,执行如下命令进行下载并解压到对应位置:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据增强\n", - "\n", - "将数据集增强为适应CVAE网络训练要求的数据,本例主要是将原始图片像素大小由$28\\times28$增强为$32\\times32$,同时将多张图片组成1个`batch`来加速训练。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.common.dtype as mstype\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\"\n", - " create dataset for train or test\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - "\n", - " # define map operations\n", - " resize_op = CV.Resize((resize_height, resize_width)) # Bilinear mode\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - "\n", - " # apply map operations on images\n", - " mnist_ds = mnist_ds.map(operations=resize_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=hwc2chw_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - "\n", - " # apply DatasetOps\n", - " mnist_ds = mnist_ds.batch(batch_size)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义条件变分自编码器网络\n", - "\n", - "变分自编码器的构成主要分为三个部分,编码器,解码器和隐空间。 \n", - "其中: \n", - "编码器(Encoder)主要作用是将训练数据进行降维,压缩,提取特征,形成特征向量,存储在隐空间中。 \n", - "解码器(Decoder)主要作用是将训练数据因空间分布的参数进行解码,还原生成出新的图像。 \n", - "隐空间主要作用是将模型的特征按照某种分布特性进行存储,属于编码器和解码器中间的桥梁。 \n", - "本例中条件变分自编码器(CVAE)是在变分自编码器的基础上增添标签训练,在后续随机采样生成图片的过程中,可以施加标签指定生成该条件的图片。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import mindspore.nn as nn\n", - "from mindspore import context, Tensor\n", - "import mindspore.ops as ops\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE,device_target=\"GPU\")\n", - "IMAGE_SHAPE=(-1,1,32,32)\n", - "image_path = os.path.join(\"./datasets/MNIST_Data\",\"train\")\n", - "\n", - "class Encoder(nn.Cell):\n", - " def __init__(self, num_classes):\n", - " super(Encoder, self).__init__()\n", - " self.fc1 = nn.Dense(1024 + num_classes, 400)\n", - " self.relu = nn.ReLU()\n", - " self.flatten = nn.Flatten()\n", - " self.concat = ops.Concat(axis=1)\n", - " self.one_hot = nn.OneHot(depth=num_classes)\n", - "\n", - " def construct(self, x, y):\n", - " x = self.flatten(x)\n", - " y = self.one_hot(y)\n", - " input_x = self.concat((x, y))\n", - " input_x = self.fc1(input_x)\n", - " input_x = self.relu(input_x)\n", - " return input_x\n", - "\n", - "class Decoder(nn.Cell):\n", - " def __init__(self):\n", - " super(Decoder, self).__init__()\n", - " self.fc2 = nn.Dense(400, 1024)\n", - " self.sigmoid = nn.Sigmoid()\n", - " self.reshape = ops.Reshape()\n", - "\n", - " def construct(self, z):\n", - " z = self.fc2(z)\n", - " z = self.reshape(z, IMAGE_SHAPE)\n", - " z = self.sigmoid(z)\n", - " return z" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义优化器和损失函数\n", - "\n", - "定义条件变分自编码器的损失函数,将图像与label关联。 \n", - "损失函数采用ELBO函数,此函数用于计算解码图像和原图像的差值,并通过对比两个图像的差值,以及图像分布的均值之差来计算两图的损失情况。 \n", - "优化器采用`nn.Adam`来最小化损失值。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.nn.probability.dpn import ConditionalVAE\n", - "from mindspore.nn.probability.infer import ELBO, SVI\n", - "\n", - "class CVAEWithLossCell(nn.WithLossCell):\n", - " \"\"\"\n", - " Rewrite WithLossCell for CVAE\n", - " \"\"\"\n", - " def construct(self, data, label):\n", - " out = self._backbone(data, label)\n", - " return self._loss_fn(out, label)\n", - "\n", - "# define the encoder and decoder\n", - "encoder = Encoder(num_classes=10)\n", - "decoder = Decoder()\n", - "# define the vae model\n", - "cvae = ConditionalVAE(encoder, decoder, hidden_size=400, latent_size=20,num_classes=10)\n", - "\n", - "# define the loss function\n", - "net_loss = ELBO(latent_prior='Normal', output_prior='Normal')\n", - "# define the optimizer\n", - "optimizer = nn.Adam(params=cvae.trainable_params(), learning_rate=0.001)\n", - "net_with_loss = CVAEWithLossCell(cvae,net_loss)\n", - "vi = SVI(net_with_loss=net_with_loss,optimizer=optimizer)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "参数解释:\n", - "\n", - "- num_classes:类别数量,本例中为0-9个数字,共计10个种类。\n", - "- ConditionalVAE:条件自编码器模型,将编码器,解码器,压缩大小,隐空间维度和类别数量等变分自编码器网络初始化。\n", - " - `encoder`:编码器网络。\n", - " - `decoder`:解码器网络。\n", - " - `hiddend_size`:数据压缩后的大小,本例为400。\n", - " - `latent_size`:隐空间的向量维度,向量维度越大,分别的特征维度越多,图像特征越清晰,本例中可调节维度大小为20。\n", - " - `num_classes`:类别数量。\n", - "- ELBO:变分自编码器的损失函数。\n", - " - `latent_prior`:隐空间初始化分布,本例中隐空间的参数遵循正态分布。\n", - " - `output_prior`:输出权重的初始化分布,本例中其权重参数初始化分布遵循正态分布。\n", - "- nn.Adam:优化器。\n", - "- CVAEWithLossCell:本例重建了`nn.WithlossCell`函数,使得生成的数据,附带标签(label)。\n", - "- SVI:模型函数,类似MindSpore中的Model,此函数为变分自编码器专用模型函数。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 训练生成模型\n", - "\n", - "生成训练数据,将调用上述代码中`vi`的训练模式,对模型进行训练,训练完成后打印出模型的loss值。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "39.697325069173175\n" - ] - } - ], - "source": [ - "# define the training dataset\n", - "ds_train = create_dataset(image_path, 32, 1)\n", - "# run the vi to return the trained network.\n", - "cvae = vi.run(train_dataset=ds_train, epochs=5)\n", - "# get the trained loss\n", - "trained_loss = vi.get_train_loss()\n", - "print(trained_loss)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 样本重建\n", - "\n", - "先定义可视化绘图函数`plot_image`,用于样本重建和条件采样生成数据的可视化。\n", - "\n", - "使用训练好的模型,查看重建数据的能力如何,这里取一组原始数据进行重建,执行如下代码:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The shape of the reconstructed sample is (32, 1, 32, 32)\n", - "\n", - "=============The Original Images=============\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADcCAYAAADTE3J+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAABkxklEQVR4nO29d5xV1bn//97l9MOc6cPA9IEZikoRkCLFimIJdg03seBVr+Vef7Hkq/l+NYkmRqO55ho111gwCSFYiAUV0AgIIl3pZWB6Y5jeTt17//440w5TGIY5+wxmv18vXy/Ze+0zz957rc9e61nPs5agaRoGBgYGBvogRtoAAwMDg38lDNE1MDAw0BFDdA0MDAx0xBBdAwMDAx0xRNfAwMBARwzRNTAwMNARua+Tl4g3DKl4ss/Vd4Xezhm2DpzebD1T7ATD1tPh+2DrmWInGD1dAwMDA10xRNfAwMBAR/p0LxgYGBgMJr75U2jINKHYgqNvQQFbtUrU3zZH2DL9METXwKAX5NQU/KlxqBYJ87FmlP2HI23SGY02YwKFC0XOPSuPdHstAPV+O+uPjibqbxE2Tkf+5URXThmJZjWDcIKfW9MQVI1AQRGcCetRCAJyZjqaKHS/Fzjz7mcI0jxxBGXzRJToANE740mMtOiKElKUExLiejwteHxozS2gqd3LHK9BaWwGVdHB0BMNC9bVgzfauHf259zq2k2i5MCvKezzBThYn6irOVJCAoLTjiZLJy0reHxoDY0ojY2D9vfDJrqCLCNYLD2fVBS0QAAtEAjXn++VygVpuBMFtBPvXAPRB+mvt6B5PKCqEbe1JwRZRrDZEOw2Cm8agdrLI+56P0pNbWQaWxuiw9H9oKah+QNofp/+BvUDwWKhZpzMdRdu4qJh+7ibW9FXGk40SEBKiMMzIY2KmeYei1irwFXoRwxo3cqkrHVh2pWPUt+gh7WdiBJSQhyFN43gVwuWcbXjGHYxWB+OKW6W1s2mbuNwnOTrY47VSuPcLOqzJRTbycvbKjUSdjbD1j2DZkNYRFe02xFSkmke2/MX2dwQwFJUE+yF6czkxbv51Yg1JErdhaDA38zNpY9gaVKgrXMYSVtPpOO5jovDO0xi5T3PkWly9lq+/X7iVh2JjPAKAoLZjHvuOFRTaG9c8mpYK5rRdh3Q16Z+IozJonWESoq5LnggwoMFKS6WlqkZlNwQIP+SV3os81JdOi/tnkfAJ3UrM065l8ySGNBTdEUJOTGe6kuyutTVzo/Bfl8M7+08l5ynN+lmUmDqWAJ31PDXsUuZ2FunsAsPVkxh7V+mMXzr4NkwqKIrWq0ITgfKqJHkL3SQ9+NXeyz3+LFz+PCd80l/uW5Qu+39oazVxX7fMHymRhIkCxbB1HEu0+Rky7OhNkfS1q4Isgw5GeTd7OryXHsXXOi8n7n1d2H/shW1pSX8hgIIAqLNhhgTjRoXxauv/J6xZntIkTWtJu5efys5i/Ux6VQ5+KCdt+a8xgyrl+VNyQiekw9Fw0WwdzaK1lvryZ+yvNdyD8QU8cDct3W0rA/aeubVl2S1tanQuurV/FQG4nV/rhnPH+aZEWuI76HTpReDKrqViyeTffNhfp32KtEiQM839mTiDsbcWs7PR17H6Pu3DKYJJ+d6D8/JF1P476O49voNPJ3Y97AhorZ2wTdvAsW3q2ya8zy9PdehgpyZTuFNI3jn7hcQ0cgxWSNt0oB5omoqK9+ZSe4f9qBGyIbKxZMZv2g/L6d9BvRjTDwEaK8DK+95jp46B0PhuUaKQRVdxQIp9npyTH2LgkUwMVxuQLPq72dUampBEMhcamPLhinMjZoBgCaCb5jIsl89HzJkj6StAMVPzCRhVgWXJn/NBc79PbpFhhLajAkcvNHGLxcsZ4zJgiSceaHgeW9P5vkZ7zLJ0sIHqozsBrWpKWL2KBaItzTjEjsFt8DfzC2PP8x1j33OXdF7Q86dWMbcrJKxrwK1vFIXe7vWgZ7cXwsOLaDi3QwyPi0lEMHnGikGTXRr7pxB1KWV3BCzla45FxWBZuYufQTJIzDz8t08nryKbJOTceY6bjx3Ox//v5mkPqWfTwcATUMpKUOuOIYsdQ5vnHY7V6U8imKGmQs6bQWg16S+8FH8xEwuuXobd8ZvIF3WsAoyJ+azZK66E3OZCdEv4Enzcf3kHfx2+Lf6G0swBrNwoci9sz/nGkcFktDzhM9QZ9qoQs6zluMS+3bf6MGJ7apZ9bCiOYXnltxL2po8/u68lLfi56P1MEqXfJC2Jg+ttRXV69VtMjgwzISU3Mo1jgq6+nABZu2+Fs+KJJLXHUMpLdfFnt54vzmKX+6/At/OGBBAsWlsXPR82Ds2gyK6DYumI15TzWOjPmWC2UdFIMB7TeN5Zf8cfB6ZnGV1CK1etrrP4b8WDOOJ9I+YZLZxS/QWvp6VNRgmnDI9RiS0tpK+3I4miWwcn0Vlop1sU8/XhxPRaqVy8WQuumoH9yesI1O2YhJCW1WD6mbejttJf1fAXlhNc24MpfESTsmrv8HtNmWamDT+CLe6dnfMULez4NACrhu+k6ucR0mUHKTKDYzPLqPmh9OHTGB8+3O/MXYF0WLkoyl7aldv1Z/Lm59fQO7yMgLV1Qz/xAxmUzB08AQERSVQXa1ryKAwaTwVM8xcnL0XuxgquAsOLcDzjySS1h8HTcN92WQasmRdkyPWrT2HmemjkE0K3lInCdth5LYqfCOiyL9Gn8Y+KDWr5goPL+R8wlxrPU7Ryn6/nyX55zH8dQuiX0PbfwQ1ECB1pYkCKZMnLlnILzM/IEsOkBtdRelgGDEYaMG4Vm3GBJx2N1YhQHHAx6cN87CW6NdrE5wOsm8+zEOJ/yRNtocM0RVNpTDQys9Kr8a5xIV9ax5KbR3eKdOJz67myqjvaO9deDU/T1RNxVzvByW87hFpfC4NORpz4vJCegrtNhzcm8oOZy3z7EdIlCBVFrlz5AYevmL4kAmMb3/uF9qPYBPsbPX62VCehe14ZLyOXdtVrRrgjbrz+POmWWSsCdCamwi5iUjbjqKUlQ+JWGxpfC5Fl7vInFfI4vivOLEeHtybSqwCzWNiaR4hUX+ej0vG7WFrRTrW2qkA2LYdRamtC9v9ZH3Qgt9pQpNMmBrdyMcbQZKoPsvKD+dswCWGv50PiujOy85jpvU4TtFBg+pmmzuH+iOxxK8Ofr06Ht/xOuL3RnM4OYXP4iZwX+y2wfjzg07BQjt3ZmwiVfbzeWs6H+6eQOZGnXqQgoAgy/wi9YMQf1iD6qY8oHHIn8hndbM4/Ndckj77FsXjQUpIoDFL4OaUfZxr6aw0xxUvK9+dSUZJKUqYh5YVF8QxemIRs+2HaW9sdUorH7eksfKdmWg5XnLslQxr65E5RSvnW48xNztvaHx0e3juf6qajXtLPIl76/Sd7GlLJlg4ZhezrdU4RTvrPVZW5E8g9lsJTdY4fkcrAAliNvbCBvAHwhLI31/klJEUXRlH9vx8nkz7qKMe1imtrG4dyXvrpyMmeGm9zI9b0EiPreOhEVu43lnJ9niJu/79R533s60wfCGOm3djAqSoKARXFIHkGI6f6yTt+vy2SfXw93ZPW3RFhwOT2Plwdvls/KXwPGL3hA53ugYla3Yfx3xRHFNEGvxWoPl0zRg0RIeDSy78lsXR3xEvOdjWnEnULgumL3T2O3fBq/nZ6Inh1dIL2HcwFVupTOqrm8DhQHQ4aJybhTqumQn24pBrDvldpL9xhMCxqrDbWH+ul/9O/6yjsXk1Pzt9w3hy40LGLckn86M6/i1qX0RDdfpCkCQ0px2xrYvQrHpYd3Q0qZu9qDrHEou2YOLLjxzrsQjBJurXZGRRpTkd6mYp5M9YCkBm42LMZXGIAQFrFcTvbkXasl/3pJPKBWlM/MF+fjFyZcc8iFfzs8vn5GdbF2JqFPj55R+0JUd07U2amGWFfV3uJ8ubhmWHL6yJHMq4DI5PctAwWmPk+Eo+Gr0qbH/rRE5LdAVTMPA93boOU9sQeEXdFNxrEhn+ZheREgQC53UGJbtEP5+35vBkyVXsOJJODtWndRODRU/3MxTY4jXx2N5rcCx3kbMsOHoQLJaOpAPtzuMsz13WEezt1xSO+AM8V3g5UqA1IjZ7tABbWkaR+rFI07Q0Uq1Huz1TFVAjMUN5IqKEFB9H9YzhWIVgn/bDlpFo5VZMjTrFNrcjCIhRw3jn7hcYb7bRPmpY6Ghm4dS/Q3AUjqIF7SyY/0bHpS/VpfPimsvJacrWPelk8uLdbfGvnaMzjxZgS+s44tdY+fvTv+2WHNETBfPfYFz+vWQWhDeRo2Kmk0sWbeaF5J1h+xu9MXDRFQSkuBhefuV/2ipHL/GDgoCUmMDY3+3hycSviJHsPFgxK5jl8eKmISO4/b4fnakINHP71w+Q8ZaI/GWn4Aq5mSckHQQFV9FUjvi9/OTojXBRKZFK/nWJNh6PP8Tjrx7qcrTzmSqaSo0iUN7iAiIYNtRLEP+v/noTuUvLCeQXRs62XmhVfRQEgm82U5Y6eo4PxBRx9tWvsTjhVnL/KzY4RI8gLtHGT+Py+OmzeZwskWeoYhd9BGxBd8RguW3C251rE9zbNmzh6aSNxEj2k19jEMLcpY+Q81s3pq92AUH3h+eic/jdR2/2mHSwwSNz5cb7kG6MTA+3vwwVO+XMdIruGMXfn/5tRO3oL9VKC49WzObR6Qt5dPpCHj82kzql8xnOtgb4bPYfUN4x2tpg8GTiDn566zsc/PXYQfvNQY+L+a/4dZx9dymbb8kGwCbVscB+DKc4dLKS2hMOcqNDfZ02qZbME1Ye+q/4dfgWyaxzziRVjxxxTUOpqeO+//hPNFlg1MFjaCXlqFPHU7DQziUXfku6dV23tFqAF+syeGn1ZeS83aB7L2fMi60sbriLBy/5jAdiTr5OhYKA6pUi3hvTRAHVTMik5bhX7iXjHzW6JROcjAWHFlC8OoO4fQEEVcPU4Ec8FvwI73twItdETUITBarPksm+LJ/Xs95nhKMBPaJgpbhYeM/KI8PfxdXPNt5+P7YqjZo5PvIvfePkF0UIi2DiKkcx5kve4fEl1zL6th2n/ZsDF11NQ21s4qY/PsRjty/nGkcFdtFMmmznpmGF/MB5FAAJAac4tL66nhQ/92d+yYW20GopIWA/wdY02c4DCV8i/kDjS0GfRA7N78O2fh8IAqrXi/fiSRT+QOCe879gcfR3bb7RUPfHQxWT+XjNeWSvdKPtPxp2G7vZvP8o9vIplPpigL5F9/3mKH66/VpSPxoCfnNB6Jb4EntQgbJKVI9Hf3u6tCulzf0Zc1glbddxtOK2+qooqG0hVfK2Ax0JPnHieArPi8ElmrklfjN3v3I7Y356IHzZdIKAYDLxSNqqHmPJe6I9OSJtw3E0k0RjVky3MlqE3Py9JXLFSHZm2kqYnF00KI6w0+rpqm436cvLeCLxBn6R7ObiUYe4LX4D0yzWDg9Og+pm0rab8fpl7hu3nhuGHRwEs0+P6F0m3hx1PhmZHzDN0neIiCSIjDJZuDP+K76bNVInC0Ft7RwyNmTITBp/hNvbIip6wib5UeVghY3Ecoma30fs/gDvfzGDjzPPQpJU0mPqWJnzWbey37amY95vx/n1oYj5nHuiPeEkqaAZ1R0BwW2jvV1pUttHqakFtb4Bzds9bLHrh0HyaiiKiEUwMdXSwE/mruYzaw6EOdU2QWrBJJx8DiQkOUJVqZ0aj+Oc2m5lYg6raE3hncCMORzgwy/P47PMcQBomhCSyLVxXGhylF0QcJk8kRfd9mSCUe+48DtNrJ05mW3T0piVHFwbU9UEqrzDcC5xEe1WefuB6Uwfp38v7ESS11ZTFJXOfbMWsSB1H5PsRSx09B62ZhIkUmRPxBI5AnaBTEdNn+mJV7t2snrsWJq+i8W1UUfjuuDcVU5WTRz+KCuKVaRgXDTkdJ5/vzmKb1vTeefgJGILVJTqmsgY2oYUE4M7Ow5PavAjVa0o2P/qQizMQ/FFcJ3ftnbVjpyagnrumFPawSJKtHKh4yCrpDHhtPSUOFCYTKzWmRzRONvN82M/AToTKCrezSB55zHUMMcad62r7bQncilTx3ckR50s2mIgDI5Pty3gOKN6HHVHY1mbEB88roHk00j67FtUj4fCm86laUzkfbvK/sOkmWXqimN5N2suy8a1kDr9TyGJBYf9LVQqDjxq8FNXq6RwsD4R5xCKKe7KNIuJWcn5rE2IxxUhGwIlpQglpZhFCTk5idaE9JDzv9x/Bb5vY4g9ohKztyHiq0tpacOpnGbixsmbUDQVjyYRvaGQQIT9zCcy5HawGCDjM8spjommRhE7kiMWOppDkmgyPi1FKS0P6zoRUkICyBJydROmSgWtuRXl+PHgxH9mOnk32rgncyOpsh8w49cUSgPyoLX/QZ1IU7/bj+s7ujX6SDeunmi3NTYhgcY5WdwffQvfTHi/4/wzFZfxdUEW/ta28YVfxFYq67bCfW94NT9NarAXdqKrwSQoqHIw3jiSOzJIUU7cY5OpnR06HPbtjCHrr8EwrKFQJzzJTsQJDTyb9B1K5LNoe2TI7WDRA8cVB17NG7I2dU/05Gpqz1j7+RfXkfuHPbqsOtY8K5O6HJmAHeRWcB1VGLbG07EbS9cdLtpDMF+rvnDQdriI/KoeEUY5fpxhq1ppCYyHLuuXb/nkbEYPoThNBZE6pZUt3hjerZ6KTfLzXPKGkOyekZY6WkeoCGOz0HZH0HeeEEfF+ZZuuxcIQ1TYhjLtO1ikWU7uijlxAsqvhXmiUtPQ/H6eKVzAi9nvMMok9msyrZ12wX1s07Xk3L9Ftw9xyQ8U3przOvNsKju8Pu4/eAstytgTdmMJtqviQCsvHb+YdR9OHrTopYiIbjgCjr/PSF4obY3m7cZx/M/a+Yx+YCtyUiIFmxXGd3E5/Ud0HrELmvm5+TpG3x8ZWwVZRnNYCVhDFbbA34zop3PvOYN+0b6DxTybyprW3nuSotVKwC5iMQWH5W7Nx0FfMlqYF8JRamrholoeWns9L2a9S7Zs69cayl7Nz8ctafz8i+vIieDmAOdazMERbkf/IDSJ4/fV8/h66WRSXxy8qKWIiO5Q2Y3hTGH4Gztp/budNeJocj37e+0RDIXV+Hvb4eKWxx8mbU1ecKlBg0Gn6+4S1YrKzysvJP9KF0rVcX0MuN7Dgpfu4+3pbzKrH9M2Q6GuRoqIiG6kd2Poipwykoor07jpvi8ibUqwtzJ1LBnPBydKCh8ajbz9YDAsqC00SLTbUedMJPuFvd0SObxDYJcDTRYxW7zdIi0sjSpaa+uQWIKwHdu2oySI2WQ1LSb/0jfIlCWyV9Zy4MGJnc99CHGetZH/nfs2yzefR1mrC/EGNwf/O4MpWUXcGLeCC+1HcIlOqpUW3IoJRYeFjtpRamoZ/WwS/3f4XahmkdYEqdfEB2PniEgyBNY60axmPAkC98TspmvCgd4B2lJCAo1zswjcUcMzI9YAcL1rXEfguzQ+l4oL4miY5iExvpFfJK3vlsgRaYI7Rwg8PG59pE3pF0ptHfbCBkylwV2r7aKZXySt5xrXxJAdRSJJ6gcSd1l/xHNT3mehA+baWpk8cjW1Kix46T5+O+U9zrOWEy3K2ITI1gdt/1GseTIIAs5oF8OKkxmbf2+3cjGHVZJ3Rm7niNBn2ns0wkMVk/ls1VQyNg3uhyFiojtkdg5oasF1VGXRkWt7nF3VC8FppyFL4i9jl3ZEJRQvEHGMn4Doh5YUlVGTivjv9M+IFt3d1rFY0pjIh7smkp7nj4T5QNvOEeOOtCXAhPZ0Sy4TyK7PQf7uSER74iFoGtTUE787lnGb/q3jcFp5q25b25wM59dHkSbmsmtcGgsd+7EIJiySiRhR5c3pS5hgdodsK1QRaOaN+ilsWHc2mXyjq62a39cRNaO6PVhbWsgsjO1esD3ZI0LP+MRneiILDi2gpD4aZUc0qRs9iAcGN9pGV9G1FFv4pGECWaYNpMrykNg5QK1vIGZHNWXvZvLgHVM6jtuPaeDTUcACCqZGjSW1szoOmeLdNNtNoAqYHD4UTeSD+sk9Xv7hrokk/VPGsauESMlFT0kc7UHvpng3nngbwyzmiC4qdiJqfQPR2yowNyR0HBOL8yObHNEFpbqGqHyNvx2cQl2ge092xQn/LmyOY/feDEaviHA8uaoE18MN4/KMA+Vkz7Ti3QxiiwPYi2sRiitQBrmToKvojvjax3tJU1GnCsx37cGjmkiKi+xL0bxe1KOFjPD6WGua1nF8+LdNqA36RVZoDY0k7Gxm7V86bbC2/RfETBVOqkjt8fr0w34cu0sIlJaF29RekVs19taPYI2rc1v7ykA8K9+ZidUN9rImNO/QELN2NK+XQEER5i4ZYJGfaQgldk8DghrF2oRpJy0rt2pk5Ptg656Tlv1Xpq9nmvJJCUpZBWqYeuK6iq5pzXYyhCl8WjmdFamTh0zCgRYIECgsZviLxSHH9ZxVVRobYesehm8d+G9EekDsKvBTvCGNuwtvDR7QQPBIwRnqtt7Cv9pM9WDQW9KRwcDp65mGux3p7tM1r95O2mq9/6qBHvT2bg2hNTDoZAisrWdgYGDwr4MQ7owVAwMDA4NOjJ6ugYGBgY4YomtgYGCgI4boGhgYGOiIIboGBgYGOmKIroGBgYGOGKJrYGBgoCOG6BoYGBjoiCG6BgYGBjpiiK6BgYGBjhiia2BgYKAjhugaGBgY6IghugYGBgY6YoiugYGBgY4YomtgYGCgI4boGhgYGOiIIboGBgYGOmKIroGBgYGOGKJrYGBgoCOG6BoYGBjoiCG6BgYGBjrS5xbsl4g3DKldKz9X3xV6O2fYOnB6s/VMsRMMW0+H74OtZ4qdcBLRNTAwMDgVfPOnUDHLhDfVB34RW4mJ1Kc2RdqsIYUhugYGBoOG6NeQ3AJCi4zgF5A8kbZo6GGIroHBGYw2YwL+KBPWyhaE4kqUurqI2mPNO8ZIdxz+KBOComFq9IWcl8bn4k1yInkCmEpqCJSURsjSyGGIrighRTkhIa7H04LHh9bQiNLYqLNhPdCbrcdrUBqbQVUiY9eZgCAgZ6ajiQIIAjQ2o9Y3oHm9kbZsYLTdz8EbbcgjWjFviyFljQYRFt1ASSlCSSnmXs5XXBBH/WQfQrOVhG2pxH1jQvD4CJSW6WpnX8gpI9Gs5mA96YqmIagagYIi0AbuQg6v6IoSos3adxlFQfXoPwYRZBlBlhHjYnGPH0HFzJ6ribUK4ve4kXcejpit7UhRTnwTsymbF/pMU9a6MO3KR6lviJBl/UOwWBAkqXtlBtSWlrD+bdFmo/CmEahmQIDY/SrR2yqCDagvBAHBbEaQQ5uK6vZE9CPXfj+/WrCMqx3HmCwsxrPHiXl3xEzqF/XnenlrzlvUq3aeTLuKopRkHOUaMW9HQHR7ebcVV6bhSRDQpBPKayD6IP31FpSa2gG///CIriAgSBJiXCzN0zN6L6aAucGPuPG70/pyDAQpdSTe9DjqR1mone0l/5JXeiz3Ul06/73uMlJd4yNmawfxsZTNtXLgrlBbx6r3klUSA0NVdNtEVhibjWe4A8USKrqiX8P6+S40v6+nqwfl74sx0bxz9wuMN9sAGLPxR5gbEjCfRHSl2BjUzBG0jLSHHHduKjithndaCAJi1LAu99Nbv3IIogkoCCx0NHPBlDf5bOwIHtt0LTFv629Kb+/2pvu+4J6Y3bhEW7drCvzN3Fz6CHGrjgz4/Q+q6IpWK4LTgSDLaE47NTOGs+XZV3stX6208PPKC8m/MgGl6riuYlZyzUguWbSZF5J39lnugZgiHrjmf6m+OnK2tiOoGqIv+OIzTc6O44pdQ3Vag889ENDdrj4RBKTEBARB4NCjZt6e/r/MsnaGhyuaykG/l0enLyRQeSw8JpjNqHFRiJz6O3NPzabq31vZN+O1kOPTH7mH2DWgVFdH7iN8JuIROR6IwqtVscUTxWMbryNn8faImNLbuw3SXXABMk1Otjz7KnPr78L+ZeuARmiDmhxRuXgy9n8IPLf5A174Yil/f/q3fZaPlxz8Jnk9P/5qG1JiwmCaMugMBVsDBUWkv57Hzf/3kZDjGxc9z+FHbPjnTIiIXb3SJri3bdjCM5s/ZNvsV5hu0dcE0eHAc9E5/O6jN8kxncTVdQos+/XzFC8ejZyeOmi/+a/AmJ8e4Km3b+Gp45MjbUrEGJSebt7bk5mSXcSNcSuYZz9ChhzsrkvCyTXdKVq50lHBpk/qcCsOvn39HJI+LSJQVj4YpvVI8RMzueTqbdwf/xXgpEF183LtRFb/bG5oucvhJ3NX80BMUTdbDzw4EXn7QX19vJqG5vFgaQod0iRKDkyWAJqss6L1gDZjAgUL7cybF3Qu2qQ6FtiPYRPM/aoP4UA1CYw1209e8BTINDlRTYAY+aTOzNWLSX9HxLb9KEN9KlVtakLyglsxRdqUiHFaoitFRXHgN2N4dsZyZlrLiBZlbIL9lBuXU7Tyi6T1KGjMSZwA5vC+EE+Kn4td+0iT7Rz2t/BE6VXkvZ1L4pehsxCj6nN4a88Cfp+tMfysKjaes6LD1mtcE5GlEz3t4Udzu3Huqybnz//BxkXPkyg5Os/1mgOjD775UyhcKHLP+V+wOPo7ACQEnGLPgteseljRnMJzS24ktfE7/Qw9DSoCzcxd+giSRyBlnRutVt9oAcFiQRibzaFHzYyQgm4Nc6kZe/5xFJ1tOVWKn5iJJ8XP1HGHuDZmOy1q5DsJvTFr97Uc25uIEh3g+sk7+O3wbwfttwckuoIsI6WOpPCWkTx90XLm2ytxic4+r1lwaAEl9dHMTsnntvgNTLOECmuMFGyYmkSPs9uDikklSvQgCSKVioOdxamM/qSIwAn+GdOuoyQXDCN2dBKFQiKc02mrJkZG4bRAAK2skswVDupvgcQ23Q/3I+uLhkXT8cSJNJ7r4baJm7g9+jviu3wMIPj+rxu+k6ucRzs+FH5NZWdLBunLywi43YNulxQVhX9SNiXzQ48/VDEZcdcwrBW1qH1cL0waT+V5Ji5IOwoE/c/1qkjmP5qR6lo6Q/V0RJAkPMMdvDn9T0SJQXeJ6BcQ/IEh71tOmFXB/Zlfcra5gpKAi98WXUb0jqEzCdigupm343a8fhl3tR1JAMkewCkNbljhgEXXmx7HNTdu4HpnJRYh1OncoLrZ5bOxom5Kx7GKdzOILQ6w6tIJ5FxUyTRL4WkZPlC0GROIjW9imOihOOBjRd08pEMOAmW7upVVGhuhsRGLIDDsaBov1aV3uBoihiAg2G00ZTowDWBiaFBNkWV88ybgu6mO84YXc4HrIOfbSkiUnCiaSnGgld9XzwOC7/+tq61MzC0mUQrWkfWeRD4+cDajCgavFxFinyuK6rNsPDzv45DjK3aeS+Y2H0JxRZ/X150VxbBpx1kc/xXtEQIqAnJxVdgm/U6KIKBYBOZY4UxZr6q9nlyWvLGtg2bns+ZUCnamkLOuOmIuEVOjD3eZkw9anCx0NNOqKjQfiEH2CDgn1DNpeCmTo4qZbT9M+/v3an6eqJqKud4Pip4hY5KEzyXzdOIeILTH2qC6WetO4Dd5l+Fek9hxPOWTEpSyCpw50yjxxAKFA/rTp0vBQjt3ZmwiVfbzeWs6H+6eQObGvr9kWkMj8Xvc/Pe6y3jgmv8FoDVBwhnt0j1eUzCbUTOS8SyqIzYC7o2uCRqaw0rx7Spvn7WMKRYFi2ACnPg1hSN+Ly8dv5ivlwYnTFI+KeHA9ASaRlsBlfKAxutls0n8JHxDTM1qxpMA90WXhBy3lpiwHSkncJIQO3eCwCXDj3KuZej0xs5EBJuN4vlmJtiLsApBySn2xuIoE1D2H46YXaaSGhK2pfKL9CtZOPXv2EWJYeNq8QVk7sndwA3DDraNyjrf/3HFy8p3Z5JRUooywEihQQ0Z82p+NnpieOrgAsR/xDH8zc6FLlSrFcFiQTWDLPY1qAsv8+btZnHb8HdbcyZRuyyYvuh7QQ6lsRF552FSYsfDNcFj1bN9RBUlY2lp0TUpQZBlWkba2TnlNbqGteg1smxP0Ci9wIpi0dg0p92vLOLV/Hi0AAV+kdeqL2Tdh5NJfTH4bFWrFUhAFILvviTgYt/RkeQs26yP4V1QTRqaxRT0yQzxIfn3AUEQUKIDOIQwxWEPkEBJKXGbzRRkD4ep4BJt7JyyvEuJUBeZoqk0qBIZbx49rZHOoIrudq/E/923EOvyaKKWbQ5xNAbOG4tvmImW9ABplppef0M4Q9pAwfw3GJd/L5kFQycpQZdn15agcfDf2xM0ghVT0VS2eE2sbZrAmoox1G0cTurTnR+zwNSxJMY3EiV4AZ0mUDSNnjwwvhQfrRnR2KtiggHuZzhDvc2oXi/p/4At07OZZNmNRTChakKP70Z3eqkjPaGi4ddO36UzqKJ77+5FOP/mIvqrAkhKDDk3+nd7eDLxq44JM4PBoyLQjN8rIwQiU4sL/M20ahK3bbiPzLcFnF/uwEl+SJmM5w/zzIg13SbYIkHB/DcY4/gRyWoW5tVntugW+JsR/YAaudFjXwgWC0JuJq++8vu2sD0bDaqbKu8wJN9QUN2To2gqjaqHooDEQd9wtNMcHQ2q6H415U1qJym0dktahkxZwt5L6JDB6TF36SPkLKtD238kIp2HWx5/mLhvKhlTfQS1tXVIdGD+Vbjl8YdJW5NHoLo60qZ0Q3Q4cM8dx6uv/D4kMWXO9juIejuKpM++7TN6ZKiwwSNz5+b7yH2oHE3Tghmpp8Ggiq5LtOESg1+GE+krdrfA38wtjz+MuVklY18FannlYJrVIwsOLaDi3QxSPinhZO5wOWUkFVemcdN9X3QcG/fKvWT8o0YXW/uyA0ByC4jNHgI6pABrpRVkLoW5O+/qOBa7+ShKXV2/U5BfrMvgpdWXMWZJfVgbnVZaQcb7Fsap97L/P0LXq3hjytusHTOOVf/fOGpPcIVA9wQaCNbhTFkie2UtbiXYY1+/9hwyP2iBzZFZacbSqKK1tg5Z33RPiSk+v4zcGtnFo04FBQHFKxE4VjUozzksC970NzmiMzj+XtLW5KG1tqJ6vWFZP6A9kePR+DcYJpopqo0hociPUtZ32BC0z4IL3BOzm/bJq9iDCpRV6lpxerJDb1SPB6GwBHtFVccxpY9Gf+JzByjxxGIvF9EO5Pd4zWDaKpUdI+ZQdLdzUywKZ5m/5cphu1iWdB7vZJ8bcv6WiRu5LfYb0uRQwbCL5o5EHoDpaaPxO0386+ZX9YyUkEDj3Cy0O0N7hbN2X4tjlRP7obKTdnZ0QRDgJDHuiiYG/b6D9GEbkOhqgQD2slbGbfo3AP40+c9dQob6T0hwfDgXDhEEsFl5eN5nTDY3YRHsqKqIEND6FHhh0njqzoqiYZSAfVINLtHWEUCdVNAcDBfTEyG43FzX1Y8WHFpAzGEVrSm8SyN2RQsE+vdh7OG5AyiIiAHCt6pYF9TWVoblNzNp282sO/etjmdnEUxYBBPjzQoPxG3k0jl7Qq7LNTWQJNl67EC0z0s8VDEZ8z47lqq+kyz+FRGcdhqyJP6Su4yuE6fH9iYyalcTSkWE4pxPpLGZ2P1qh5a1c9+49V1CxgaXgYmuz4dUUE7Sn7IAuP/eHzJleAkOuXu8q1308WTijh4FWUGjyW89+Zqmg4AgCMyzHz6liby6s6JouKqZW3J3MMFeDEC1omD/qwuxMA/Fp18IjBQTgzs7Dk9q6N8sXpVB2q7jqENhkfWuCAJSXCz187JY4FhBjBQcom/1+tlQnoXtuD4ypfl8SIUV2JeOpnaigusEDTUJEimykxT5xFjrvjMsX6zL4OM155G+0X3SJIvBQrRaEdJHUjNuaO89IMXE0JqbSHOuj4mW0EgVU6OIVNtMYIgsHq/WNxC9rQJzQwKaLFA1yYQmw86UdC5zHOjI+BxMBvb2NA2lprZj5tcaN51tcbHdF/0FAnZ44J5NJMuhouvXFEoDMgfrE3Gibyplf5ASEmjMErgldwdPJuwHgr5qjyYRvaGQgM6hRlracCqnmbhxcqjvMW5/AK24fGjtgCBKSAlxtEzN6JbE8aeqebi3xJO4t06f3mFbXXWtz2dl83hyLcGFlFLlBlJlEad4aiuPKZrKBo/MS6svI3ulG2n3UZSmpnBY3g3BFUXdxFiyLwuvW+Z06a2u7vD6kFsZcCZXONC8XgIFRZgLipCiXXhvzsJq9zE5qohhYUr1H5RPZtTfNhPV0wlBQE5KpPYuieQuhxVN5Yjfy2vVF1K3cXi38CI96MuTIVqtNM7NQh3X3NHDBQigUKk4TztkZCB4kp2IExp4Nuk73f/2qSJFOfGcnUrJDQHypyyn3f/crHpYd3Q0qZu9qLsO6GeQpoHbw3+vvQzNFmzwZ48q5Y4RGznf2vsw1ypI3VZHa1Q93Ln5PnKW1KMdyEfVwUXSQZST+hyRb0av0u9vngLtO4O0ZA7DNLmuW12978APic5X0Fpa9bNJlhHaettd175t3zmG9g6BKEJ8LCtnv9xl4i884Y3hH6f0kKpaHGjlpeMXBzOWnu47G2wwURBQNBVJEHtfIEYQCJw3lsAdNSwfu7RjeOTV/BzxB3iu8HKkgH6V5owkIY6K8y3dduP4sGUkWrkVU6N+/ud2lMZGRt+/pePfVYum8/CCZOZm5/V6zUzXES60H+lYMF7RVMoVgdyHyiOz9sIpBPJHAmFMFp5kJzVjZbJjuq94Znk1FvuXe1HCvDVTO6LdjpCSTPPYuODuJP/cjdbmEmzfOcbnapNAAbzDJKxCL+OvQXzuYRXd3lbs/331PL5e2pkiqgeaprHfm0yWXIFTsGI2BQg47Fit1s4IhLZFt8d2SeTwan6OK14O+V08V3g5XFQ65NcsjSSCLKM5rASs3Wvpr/56E7lLywnkF+pv2Am4lm7GtRT62ov29z+5lv392F3EIMjBB+28NecN5tkiP60oyDLkZJB3s4uDP3qZw34PP7n6DqTj9aBpFCwaybXXb2hbP6Yrob58r+anMhCP4Bk8527YRLe3wOiI0BbQ/Oc5U9n8aS1PJn7FV1Pe5D8SFnAwaRIJr34TssvBAvuxjjVgn6iaysp3Z5L+xhGkQKshuCfBN28CxberbJrzPOEanhkYnIyu9VASHOSYrPzuozdR2+LDYkWFWMnCiQt2ncgTVVNZ+c5Mcv+wZ9DmIMLa0w3Hiv0DRtNQjlXhVmwoaMSIdp5K+Zh//uchtt2WCXTuctA+udKeQJHxaSmBCO2LNhQRrVYCU8eS8XxwhajCh0Yjbz9I3XUT8d1Ux9tnLesWahOJZJLTJXVFKV/6pnP1DYl8NET9qEOJMS+28ou/38GT5p7j9B3fHEXRKcxSk0XMFm9HPZQEkbFme0fiVn9yCULa/yBOlg7t2JMwoHZZsCJDtvPDYflc6wz69brucjBr97V4ViSRvO4YSmm5IbhdkSR8LhPPjFgDwPWucciShDteZPrwYqZYFNrXem3faSH7g1q0o0VnTBYSgFJaTlTRcArrYoBgQx0haRz6XTKjn41F239Ul1jjDvoRyB9JtP1HsebJva6o31cSzWBjP1SFffUIzndey8ZzVnQc72/iVrf2P4joLrpLGhP5cNdE0vP8ev9pANatPYfXFlRzZ8xOEiUHTsHaLSJzwaEFeP6RRNL646hFpUNvh106V7lPKm/R3T4tEMBW2cqcLXcDkFbeGkyYkMApebEIJryany1eE7d//QA5y+rgSOEZJbjQlgRU3MTxHTE8kjaJ3w7/lijRypvTl/DzpDux5Mn6im4Pgfxppfq//97Q/D59n0cfKOWVxH8XxZExnTu+nIyuO0fYVw0jaUN42r/uorusbBqxW0w4dhVHJA0w64MW/hQ1l63jM8hw9rzEZMW7GSSvPRZ84EMk/tXU6MNbMIwHM6agagJV3mE4l+ifpAHdk2PE4vxuNni0AGubJpDxlhhciGeICMOpIhRXMmKDjQ9t0/HP75xMkdyK7it7dQ3kb6enZ28QjL+VK+uIynPyYMWUk5bv2qai3Sr2g2UoFcfC0v7DJ7qahuTVWNMa6qg+nJ9M9iEvgdKysP3pPtm8m8yoKZR8l0WBPbvHIu27XAwloTCV1JCyNpm1pdNAA8mnkfTZtyiR6D2ekBzTPrkot2rsrR/BGtceapUY1lSMwfnljqEc5XRSlLo6zDsVMlvTWVs1reN4SnHJgHcOGChdA/k77NPVgjMLraGRhJ3NrP3LtH4U7mxTqscT1g5h2ERX8wewVjRz9/pbQ47H7JQxVzVEtLKY1mwnvo/zQ0dqOwmUlGIpKWV4l2ORD8wJxVXgp3hDGncX3gp+EVupHJHEl8FGaWyErXsYvrXz2FCsIwah9PTeToYebSqMoutD23WAnMXdzxlf5+8n5tXbSVsdaSsMDIY2Z8Z2ogYGBgbfE4RIrCNgYGBg8K+K0dM1MDAw0BFDdA0MDAx0xBBdAwMDAx0xRNfAwMBARwzRNTAwMNARQ3QNDAwMdMQQXQMDAwMdMUTXwMDAQEcM0TUwMDDQEUN0DQwMDHTEEF0DAwMDHTFE18DAwEBHDNE1MDAw0BFDdA0MDAx0xBBdAwMDAx0xRNfAwMBARwzRNTAwMNARQ3QNDAwMdMQQXQMDAwMdMUTXwMDAQEf63IL9EvGGIbVr5efqu0Jv5wxbB05vtp4pdoJh6+nwfbD1TLETjJ6ugYGBga702dM1MDhdpKgoDvxmDFhVEDSit1lIXleNsv9wpE0zMIgIhugahAVBlpFSR1J4y0ievmg5w+V6AO5UbiduvxNpf2TtMzCIFIboGpw22owJmMprUSqr0LxeRKsVIT2FsksTuebGDVzvrMQimHi/OQqxSUb0uiNtsoEBAKLVipicRGtuYv/K+1VM63ehBQID/punJ7qCgJyZjiYKIPTqN+5+mceH1tCI0th4Wn/eIMK0vf+DN9qI3Z1CwmYran4xgiuKunPjSbs+n6cT9wAmAH65/woSdoCppJqBV9l/TUSrFcEVBVHO/l2gaQiqRqCgCHnkCDSrGZpaUOsb0Lze8Bp7hiBarQhZaVSeH4dnfv+0yNNqJneb/bS0a+CiK0pICXEU3jQC1XJql1qrIH53K9KW/Wh+34BN+D4jOhynfpGioAUCp/UVPhVEm43Cm0bwqwXLeCbzMqq1eBI8PjSbhdrxAt+MXtVRtkF1498WQ8rXZQRKy3Sx70xHtFpBkgAQ0kdSNzGW+px+zn1rIPog/fUWKq5KwxMv4DqiErOzGvVoYXjriCAgmM0Ich/yonNd7QkxOYnK8+NwXFfJjrP/EXKuWfXg11QUNCQELIKMRZA56PfyaPwtCK2tA7Z9QKIryDJSfBzVl2Sx8p7nyDT18+vbxkt16by45nJymrLRdh0YiAmnzsl64trQiTgRLBbcc8ehmvo/egAwNwSwFNUQKCgKk2VdEATEqGEsu/t3jDWZ8I35nF+edT2OigQ0WcCfEtqberl2IrbjGvj84betP5zCyKwDPeuIIBA4byy+YSY0CWrGyWRflh/yIesLRVMpDLTyw9JHuOW+NdwVvZdb8q6j4r0MRnh9BAqLw2a6FBuDmjmClpH2XsvoWld7oTU3Ec/8xg7BVTS149yK5hR2tmTQ5LcyzORhqrOARcNqsAsK1bOSiWtqQampBVU55b97SqIryDKi0wHxsVTPGM6WZ18FTk1wAR6IKeLsq19jccKt5P5XbND4MCKYzEhxMX2W0fz+sNvRHwSLBSE3k1df+T1jzb1X2p54/Ng5fPjO+aS/XKeL60bTNPZ7k4EKnltyI7l/L6V1TBLH72glf8bSkLKrfzaXxC93E2hpCbtdJ0OKigKbFeEUhVe3OiIISIkJjP3dHp5M/IoY6dTqAYBXC+DTRDb95mUkQQRsfJr7KQ/eMYW1pmkMfzF8ouuemk3Vv7eyb8ZrvZbRu66ejFbVR0GgU0CfW3Ij6X8vJVBYjDQqk9WLprHo7lfINDnZ8uyrzK2/C/uXragDqM+nJLq+eRMoulVl5eyXsQoqAxHcdmZbA3w2+w888M5NcFH4KrLocOCeO46XX/mfXsscVxw8U7ggrHb0h3ZbX33l9+SYrKd8/ZOJOxhzazk/H3kdo+/fEgYLu6BpKFXH+fOcqQiCQGrjdwTcbhiTFN6/Owgc+M0YHp73GfPs/Q9b062OtAnubRu2sMB+DKd46oJbrbTwy2PzyLsjm9999OYpf7z1QNe6ehKqlRZ+XnkhBVdFdxzrqM9h4JREV5NFLDYvOSZr29czlAWHFlC8OoO4fd19HQt+vZZ7YnbjEm0ASIJItAgjHA2UD9D43mhYNJ3qKzxckJ2HSVRIt65jjKl3x/Mok5eXRi3nt1su7Tj27evnkPRpEYGywbaud1S3B/u2Qm7/2U9Y9uvnT9ltYxFMDJcb0KynPuQZEJqGcqwqOFTXNBoWTcd3Ux2vnbWM9rybAn8ztzz+MLGbj6K4PfrY1Qd5b0/m2RnLucRWQZTY/w9bgtQUlrraE4IgMMZ8DJtg7rVM+3M1N6sIaug5xSzQlCJxwVtbSe/LrxoG+qoD7bYWXw4/mbuaf4s6gPmSd3h8ybWMvm2HrnaeiFsxEThW1XmgzZWkzZjAoRtt/GrBMmBw6vMpvxFB0LoJbuaqOzGXmYjbp5G26zhacfeqWeSJw6+F1g4REBl8P5k7XuSC7DyeGbEGAJMgIgm2XstLiGTKYkd5gP+8zcye6LGkfuLUL5BfVVBqaoldA1elPorSe5sLYeaC3TyevIrsdpEegLvytGiroO54kenDi5liUWhWfaxoTuG5JfeStiZvwP6vwaI9SePZGcuZb6/EdYo9SJdo5pb4zdz9yu2M+ekB1KamMFkaRKJ7Ozvsb+GZisvY/OnZSD5IW5OH1tra7VrB6YDZmTyZ+BX2AfSUB0rNnTOQrz3Or3M+7rEOtNvqGDeBUl8MMZKdmbYSJmcXEd6n2TP2Q1VEL09m7o5HsFVpxGnfdCsTGGZCSm7lascxFE2mVZOI+6aSwGnU59P6DDaobubtuJ30dwXshdVQ29AtJEW0WqlcPJnrnSuwClLH8YpAM2/UT2HDurPJpPvNng6aBFGym3ipMwKg3Vavv/OW02Lr+OGILfw4qhqTIIWUfyrlYy4em41vm86B/KqCUl1N+nI7mtS/meqN47OoTLSTbQqzbSdBk8ApealVvLxVfy5vfn4BucvLCFRXR3aiUhDAZuXheZ9xia2iQ3C3ev28cXwOG4qze7xsXvoRbovfwDSLCYtgYqqlgZ/MXc1n1hwIs+j2hISGQ/IRcGgE7FD649Gkvl+KUlreMZMuRUXhHzOSsgu1AfmCT4faszX+T+Ymzrc2UKsEeq0Doh8CqkhFoJk/109h99ejyaRaV1sBlPJKXOvdRG+3g9fXLYxRmDSeihlmLs7eiyQIfO0VuW3jfYypPnJaHYhTEl1Tow9vwTAezJiCqglUeYfhXOLCvjUPpbau86G2BRy3jEkkYBcZv2g/F9qPYBM6K0GJYuH9golkrmgesPG9YTuusfLIWShtw5uutka71Q47qzLT+OXsRKKmvctCR6gd2SYn1mFeFIsFqdtfCDOa1u9ZXW3GBJx2N1YhAPSzaxwGpPG5tKSopFprKVEsvFswiezlLRGdne5A0yAQ4J/VY7jMcYDVrTHsdafwWclY3FvjSdrac9jiqismkHNRJdMshSiaSq2qsrYmJ2I99iRJ5sa4LVgu7owA+dIznaji4diLmxAbW/FmxFE038LD81Z2u36r18+G8ixsx9Vu504XbcYEYrJqOddaiFO0st/vP2kdCKcG9AfN6w26x3pAGp9L0eUuMucVsjj+KzyawtqmCWQuEVB7GF2cCqcmuiU1pKxNZm3pNNBA8mkkffYtiifo25CiohBcUQSSY6ia6MR7WSNmOcDLaZ/hEoND34pAMyWKhY8aJtOYH03i1s2ndQM9EbunAUGNYm3CtOCBLraqXWw1N2TgH+bkj8lzWTjmk0G3Qw8KFtq5M2MTqbKfSIpuxQVxjJ5YxGz7YWoUB/XVThK3bo+YPSeitbRSuGwSL9xxMasOjMN62EpUgUri3jrUHsIWpYQELNNH0xAIdhTcmo9/tuZQuGwUSS3fhtdWTePLljGUBcoZZ64jSbJhEiScopU5VpiTvLOj7JU3JFFcH83xHTHYjkXTlAXXzv+G+6JLQn7zsL+FJwpuxPt1fPCeB9nmrvWwQVXY5s7ptX3LrRp760cQYzqb+mPDSK6tRsjKCIrzEAjdlFNGUnRlHNnz83ky7SPOMUvs82msqRiD88sdp+0QPSXRDZSUYikpZXiXYyqdQdz+SdlUn2WjMVtl5PiuAced/tSPW3L4a/F5lBQkEJ0nIlqtHUI4WKjf7cf1HbhOPOFwdCQdtNvamqzS6u9ZrCRJRTWLnYkKijLotp4u8+btZnH0dyGuEb0RrVYapnn4fcYnnGsxs6aPjoAgy8Gg+bagf72eqerxkPDqN6yLn0nadj/2bYdQqmt6FZ/mWZkoY5uZYA+GVnk0hS0NWSS8+s2gC1YImgZuD/+99jI0m8KNk7dzS/QWUmRPR5C+XeysrytzPgPgodTJbKtO5+LEAp5N+i7kJxtUN0+UXkXZp+mkfRqexYa61sPNHoX3yiYTlSf2mOTjOKaSvy2VvBGJCH6RgkXJiD5I+33VafciTxfR4aDiyjQm/mA/vxi5kmyTk9JAM0vrZlO3cThO8k/7b5x2GjDQEcRdfAU8PPfjbl/ZrnxQOZGadcmk7g8gKAECU8cibvwueDKMXznBZA5JOCi57OS2psXUUXSWizhxPIIC5gZ/0NYh8DUGggkKJ05dR4DA1LEkxjcSJXiBtiiRnh6RICCljsSbHofPJUfkmaY+tQmAkzkISn6g8NbUvzLPpv/zVRobO8KoPv5/M/l6Vha50VUdQfo3O48DhEy0vZC8E7r0gDt+S1N5qXYSeW/nkvZJYVijcdqf1GZ3NkWHh5NSFqDl4vHdygmqRvI3Cq0JFmpne9m78NVgptdbCyMquu0acdN9X7RFWgVH53t9cby381xynt40KH9nwKLbNeFg9CkEcX+a+ynkBv+/Mz4uuNiEUlMXnrRgQUCKi+HlV/6H8ebeoxj6Y2v+lQkoVccjL7xt8Zw2qRZJ93CFUDKeP8wzI9YQLznwan4qA/EInlBPuBQXi2AyUbBoJNdev4GnE/cMvWc6BGn/SJQCclYGn980jYl3v4CI1mvoZjuKpnLY7+Gfj84m8atdBMIoaGWtLvb7huEzNfIf0Xk8eE0hXNO/a5Wh8Nq7aYSNBtVNtaJw0JvRrT6fDgMS3RMTDjJlaUChKfGSg+eSN1CwOdjvuP+e/8T61b6IDzF6Il5y8Jvk9az8Kpk/z5naqwNeFwYhgD5cPFE1lZXvzCT3D3tCh+HvWXkkbRW5pveJlSyAaWg90zOAQEERab+v4tEl16AmRPeZ+NAuuD+58g4sebtRw73IzfUenpMvpvDfR3V8VM905my/A+fSKFzr8sn17B80t9KAd45QTQJjTBbGmCwhPqZTxS6aO37nkue+oupHE5BHjhjw74UTp2jlSkcFoz6pQz1/YtCXHSH6E0AfCbyqjOymI45ViotFWjuCFzLfY4bVS7LsxCJ0xrYNpWfaTt7bk3l+1rtMsgRTPHd4fVy+63aKHx4VMZu0GRPI/810UtYKpHzYQO5bR06a+KAiINbUo/nCv6iUUlOLUnWczKVlfPL6bK7Ou6xf1zWobp6pGcd9//GfKDV1YbayO3LKSKrvmkHaZjvZK2vJlDt7tD6/jLlRQTl+fFDjsk/Lp9vX0GYgv3Nf7Hfs/fEI9kSFNylh1u5rObY3EVPjye33pPm4fvIOfjs8OGPtFK38Imk98x/LwPzmBKLWH0GprgmLnT0hWCwIY7M59KiZVEkNeQfvN0fx0+3XkvrRENmFSRAQTCYeSVvFKJMFU1uc9vvNUbxWMocWv5mN56zoeKbXuCYiS7oH6HVj2qhCzrOWd/j06lUbNdXDiN++L7yTaL3gmz+FwoUi95z/BYujvwNAQujf6FLT9HPbaBpKSRnJn5s4XpPB2NH3dityYiKPX1Mp8cRgW78PVecVB6XxuRRdGcfEH+znmRFrQp7prN3X4ljlxH6obNCXIR20HMGHKibzVcUoWjz973lJkkp6TF3HDKxLtIUnKUHT0DwervrqPiw2P47VTkZ914RYf/LFKtzZcXx4fDrMp0N4YyQ7r437Kz/K+v9w7XCAnqIrSXiGO3hz+p9C0liXNCby1PYrSPzEgvPrvJNOFIWb9p0jCn44klzT+5gEZ4ed/1swm6qaKHJHHusoHyPZg+syR5D2RJ4bY1cQLQabxlavn98UXoVrqyUikSu++VMovEbkthkbub2XKJWHKibjlL1cEfUd0yyRzZDRAgHUwhJi6huJ2RZqa8k1ycii0jG8bk+QWvvlRDJbBzdB6mQIk8ZTdJmL7Mvy+cXIlcRLnSn3Cw4twPOPJJI2HkepONbHrwyMgYmuomBuCPBgxZSOQ5+tmkr8Lg1nQ/++C6pZpDlZYv9MG+R0Hg9XUoLa3BIMbDZZsB8qCz5Mux0tbTjeJAemRh9s3t3tOltNPSPJ5r3Ec/nt5Z3xmRMtFgI2QNSvVylarQgZKVRNMjHHCu3eofebo3hq2xUkrLIQu6GEgI4fgRNJs9TSlKkSfflkGjJkrr1hQ5sPt9NOqcSKkOHmrpSvImZnTwhOB9k3Hw5J5NnYkkvBzhRy1lXr/iHTZkygcKHI4plfcVv0dhK7CINX8/NE1VS8qszH+88mNamOSfYisEQm0aArHUkHJ+iVZ5KLK2J2kSDJFAeaeaPuPP6y8XxG65wc0Z74kH5JIU+mfdTR625/phXvZpC89hhqUWlYFnwfkOhqgQCWwmrW/mVax7GMTU2IBwpP6vvomkDRlOLkrEx9FrTWAgHkL4OLagTa7PBNzKRsthV1fDOBMjtjqjoDtKWEBASnHU2WEP0aYkOEc2whuCPDxFiyLwuNFfzl/itIWG0hbn1JxBcIP99xiCMzE/k6PZP0mLqQnSNeK5mDfZcNQQP7xOaOLEBFU9ngkZG8asQiGES7HWXUSH6d9mrHQkMVgWa+qhmNKw99N9LssiPHvbM/51bX7hDBrVNa+bgljZXvzER2g5atkJJdT6pcS3uCTKPq4fOWcWhK5EMK2+9n4ZhdzLZW4xTtfNgSx593TifzHwHYqt+k24mJD+dags+r6zPN+LQztTpkx44uu3GcTj0dsOgGCou7rcnZ1+vtLYHioxMWZW5WPQT8EkKY40iUcRkUX2rhmss28WTCVv7Rksyvj99E+ustaB4PjXOzqM+WUGzgi1IZOX7whxmnhCBAXPSQ3JGhKWDB01YJp1lMTBu5GUZ2z0Rq8ZtxJ6so0QEuTz4CgF9TOOL3svib+8ipbEbz67+TgGi3w6g08hc6iO4ycFnedBa79qeTma+vr7HrjhxXO45hFzuH6XVKK6tbR/LzL64j9w97QFE49Mcx3DV8XYeAeDU/W7wx/M/a+eR6Ir8DaPv9/MixHosg49X8rKsfQ/QOC6YvBif2tV929JD4AMHntdM3jCc3LmTcknzU5pbg3InFErpjR9tuHKebxBHedd+6LBLd3wSKFc0paOVWTI3hXey6YqaTqy7d3Ja9Y2bRsBpm3vMcN5c+gqVJQbvzOH/NXcZES+9LQgo6dsqk2BhaM11DckeGrXmZbIxP5XJHecfSnT2x8ZwVcE7nv/2awmG/j58cvZHc/yqKyCLy7YJbuDCWvB+/AnQK3B/3zSbzHyqmL3RcdlAQEGOieefuF9riRTvnSNoF97FN15Jz/xZUQUCdM7FbYkqD6mN59TRG378lIhN/IfRwP195YGtFGvYa/axr343lxMQHAI8WYEvLKFI/FmmalhZyXdcdOxRNHZQkjvCIriAg2myIUcM6DvUngaLA38xzS+4lt23Fdr1pXxW+k94Ft8DfjOgHVH0qjntq9pDdkWH0bTt47JUb4KJ3ucZZFRIS1huKpnLE7+UnR2+Ei0ojMvEnyDLkZHDkZleb4HZSEWjG75URAvq6OwSzGTUuqtuSp17Nz8ctafz8i+vIuX9Lr7tLKJpKjSJQ3uKCiCyYGEpP93PX9h+R/IYF8+rBX3elRxu67cYS2jFwiTYejz/E468e0sWesIiunJlO4U0jeOfuFzqO9SeB4pbHHyZtTV5wGbghzplkqx6M+ekBnnrgFvbe2L/A+A0emcXf3Bfs4epgX0/45k2g+HaVTXOep2sPF2Du0kfIWVaHtv9IGFZ87pm+dg4JSTrpIzlmKDzXdk53J5Tviw0nMrCMNKuVwNSxZDzf8+RClFzMD50bQnZr6Cmmt0F183LtRFb/bC5AcDX2mlpdJlNSV5Sye8cEpmafR80cH/mXvtFn+UjaeibsyKA2NZGxrIwtX09hbtSMk5YXfSo5lS0R3ZdOk0XMFi+JJ4RhjXvlXrI/qIUjYd41twdUk9Bjlll70gmKgjpnIrkv7G0T3FAhURBQvVLE9/uTU0ZScWUaN933Rcj9ZK5eTPo7IrbtR3X7KPT2TE9GTzvhCAENa82u07JnYD1dScLnMoXstBByum01JKmXbKn3m6P45f4r8G+LwXZcI/HLYKiW0tqq2+y1UlqOXFlF4pFYhhUnMza/eyB3VwQF3W0tfmImnhQ/E3ILuGfkOqZYFEBE0dRBWcF+sFFKypArjvUvwUHTIjJp1k4w4UDg4XHrO45VBJqZu/SRDsEdSivKXRuzndLrotk5eQyJ8Y38Iml9t/TvoZQco1nNeBIE7onZTdfhvLnUjD3/eHD97QjyUMVkVuw8F2tJ766wmMNq951wNO20kzgG7F7QJAa0nGB7EH/CKgspm8qDK7ZHwB+pBQLBQO6KY1hbWsgsjO2zvKBqutuaMKuC+zO/5GxzBamyiEWwts1MmwZlBfvBpv2Zngk0ZJqYNO4INww7CDhQNJV6VSTzH82QXzykBBdgstnDU6kfcWyEk2jR3W1uZKglxyAIaBIhE6sLDi0gbp8GtQ26da60QAB7WSvjNv1b6Invosjc6sOaX9H7xU0t3XbCGQwGLTmiv3y4ayJJ/5SHREwpENwep74B6hsibUk3ymtcWLP8pMoiTtFKg+pmoyeGx/ZeMygr2P8rE7ALZDpqSJSCglsYaOVnxdcgHS0LjmIiwSC0q0gnxwBIMTG4s+PwpAZ7hCFJBzuPoeq45brm8yEVlJP0p6yQ49aKWoTiimDb15lBS47oL+mH/Th2DxHBHeIkrLTyVNSVlGd9TZa5ijxfKm8enYFjuQv5y826TfB8n2mPE36x6hJddoXoi+9Nu0qMoy7HRHpqOWtaTVQG4rslHeiGpqHU1GJeHerjjmQo3aAmR/SXM2MAGnmi/raZGtsMfnv2VajDAoiNMrF7BKKW6Zun/n1EUKDeb2efL8Br1Rey4aNJpL66KaKN8XvTriQRQYGiknjuLr4VwSOR+4c9BCKwmedQJLzJEQanTdwb3xAXaSO+h9iqVdYfHc3B+kTqNg4ndZB2BTAIpkwn7j9MYpdjEU/SGEIYomvwL4lr6WZcbXkmg7HvlYFBfxE0Y4sUAwMDA92IfECfgYGBwb8QhugaGBgY6IghugYGBgY6YoiugYGBgY4YomtgYGCgI4boGhgYGOiIIboGBgYGOmKIroGBgYGOGKJrYGBgoCOG6BoYGBjoiCG6BgYGBjpiiK6BgYGBjhiia2BgYKAjhugaGBgY6IghugYGBgY6YoiugYGBgY4YomtgYGCgI4boGhgYGOiIIboGBgYGOmKIroGBgYGO9Lkb8CXiDUNq18rP1XeF3s4Ztg6c3mw9U+wEw9bT4ftg65liJxg9XQMDAwNdMUTXwMDAQEcM0TUwMDDQkT59ugZDC9FqRUxOojU3sdcyklfBfKwZZf9hHS0zMDDoL4boihJSlBMS4k7pMq2kHNXjCZNR3RGtVoSsNCrPj8Mzv7HXcu4mK4798aQ1thAoLdPNvjMd0WpFSB3R7bjg8aE1NKI09v7Mhzpyykg0qxkEQZf7Ea1WBFcURDlB0xBUjUBBEWgaUkICgtOOJks9Xqt3u4oE/5KiK1gsCJIEgoAY7cI9fgQVM82n9BuZf9UQCkvQAoEwWRmKmJxE5flxOK6rZMfZ/+i13GF/Cz8ffSWH68YQ9ydDdPuDaLfDqDQKro3tds5WqZGwsxm27omAZd0RZBnBYgFAbWnp1zWVC9JwJwposj73IyYnUTctmbqxImgg+iD99RY0j4fGuVnUZ0sotp6v1btdRQJ9RFdoi57QhkZUhzAmC0+yE8Ui0JogUTvbS/4lr5zSb8zdcRf2iirdKkfLmEQ88xv7FFyAHJOD/0lbyU8X+yl9XRgyz3yo0i64hQtjOXBX9zrwYMUU1v5lGsO3RsC4E2jvjTePi0f0a1j/uRvN6+37IkFgyp3f8avkL4iXHLrcT8uYRBpubOLAjKUAFPibubn0ESxNCtqdx/lr7jImtn04uqJoKhduv0fXdhUJwi+6goCUmIAgCKiNTahud59CIEVFoWkamtsdtgd/8EE7b815g3k2NSy/P9iIVisBu4jF1P/nYZP8SIkJKFXHDeHtBUGWISeDIze7yPtxzx9du+gjYAvWy0i7GISUZAp+OJy9d/2Bw34PP7n6DjhU0LvwtrU9m1SLRK9ho4NKT3U10+Rky7OvdinVs+Ae9nsQA9r3vr6GN3qh7aXftmELz2z+kJL/nIicntrnJQd+M4Yj/+8s/HMmhNW0M4nKxZPJeOgQaycv6Vf5eMnBb5LX8+OvtiElJoTXuDMY37wJ5D1i4etFz/da5snEHfz01nc4+OuxOlrWN5IgkmOy8ruP3sR7wTnB3vqJdGl7TydtJEbqoUwYONW6Cp2C+5Mr78Cydjdqa2v4DBwChK2nK1qtBM4bS+4Le1lgP4ZNMPPY7cvZeUM6jQFHr9c9FPcWHs3E7svSWFU+jtqNw0l9etOg2ZX39mSen/EukywtQC+OpSGGYoF4SzMusbu94165l5hDCuUXaPxk7moeiCkCwCaYGWepQBDC18OR4mLhPSvJtkZEoX+9k/VrzyHzgxbYvDtsdvWHhkXT8d1Ux9tnLSNR6r0+WgQTw+UGNKuio3Xd0WZM4NCNNn61YBkQFN6xZjuqLHS6705AEATGmINtD2DBoQVUvJtByiclhGvw3rWuNqhuXq6dyOqfzT3pdUJAw5rXD3dJhBCtVgJTx5LxfPeooLVHckhYaSFq2eZ+/VZYRFdKSKBxbhaBO2r4RdJ6nGLwK3uNo4IF9hIUem+gw0QziqYx11bDKMsxHiu8YVBtmzaqkPOs5bhEZ8exZtXDhy0jeXrpTf3+ncwDFag6VJCaO2cQdWklN8RspevApCLQzNylj5D9QS1CaQUNWeMp9cUARR1lpD6e86nimz+FilkmvKm+zt+3KHya9TKxpzBeem1BNe+Pn0Bt9bngF7GVmEh9avA+qv2h5s4ZyNce59c5HzPFotCvAZ8+o/NeCQwzISW3crXjGNBl0rcHuwSLBWFsNoceNZMqqUhC8P6KamNIKPKjlFWExcYT66pVkJnpyOOtW2Z0L1xsI/kbBccX+4L/1jRd2lN/KH5iJp4UP5g63Y+CpJEY38gzI9Z0K/+IJvJt/NlE9fP3wyK6gtNOQ5bEX8YuDRnW2EUz9rYKUxFo5r2m8bx6YHavv+OutxK9e3BNHCZ7sZ7QM/BrKtuaM8n8W/8ro1ZaEXZnf8Oi6YjXVPPYqE+ZYPYBVryany1eE7d//QA5y+rgSCGKx4Poh4AaPm9RQ6aJjPOLeSR9Vccxk6CQLds6GnV/uD16B/OH7aFetdGiWlhVfw7f1Mxk+Js7dQsVqj1b4/9kbuJ8awMWwdpxvEF1M2/H7Xj9wTo3L/0It8VvAEy62NUbwqTxVMwwc3H2XuxisP2025pU3hJSD6WoKPwTssm/zsJb0/9IlBi8v4cqJqPtdGErqUUNU72tPVvj/6Rv7qirFsHEJEsLfzzvL93Kfpo7gRWuKWS6x2Jasz0s9vQXQZaRUkZQek0KAJdcvY2LXftwiJ0fAQmNaNFNfA+jIpfJjdZzBFyPDLroSjExtOYm0pzr6zZD+X5zFN+2ptOsWChsjmPX/nQyPuh9Mkvyqpirqgn3wE4UBGLkVrzpsZjW7xoyM6c1V3h4IecT5lrrcYpWGlQ3Gz0xPLb3GjLeEtH2H9HFVml8Lg05Gj9O2sdFNgVFUykOtPL76nmsOMXfmuosYLatiHMtZhStmfHmL/jpLU5al9tBB9HVZkwgJquWc62FONsESdFUCgOt/Kz0apxLXES7VRoyZL66NJtLo/diFyLXA5PG51J0uYvMeYUsjv+K9l5utaJg/6sLsTAPxdc5+hBcUVSfY+Mnl37MHCu09+JX7DyXzG0+hOLw9HJ7eq4ALtHGRbbuLXi06SuiZnpYop1PTuM5EXM3iVYrQnoK5ZckcvG/Bd0D98d/RZps76Ez0X0CcCAMquhKUVH4JmZSNk/mxsnfdDv/Wskcjn6bgu2YiLlBIzPfh+mLHX3+5qAJriAgZ6YTbSrqNpNrFWQuGLafv9xxHqOrRyO2tjWyxmbU+oaI+ZmS4xpIk+uwCMGeVnlA49XSC3AsdyF/uXkQnQd9UzM5lujRNZxtLQHArfn4vDWHtX+Zdsq/9dHECdw66RvuitlKsuwkQ7bzi9SPedS0cJCtPoG293/wRhv3ZG4kVfbTLmBeLcAO70i+/TKXrLw6BK+P4xOHkxVXQ6pcS4tmxmT3I6eM1CfhpM1WTRQoviqO7Pn5PJn2EedagvYqmopPE3GWuCE2Gik2uuNST2oMjdkq90WXhPyktcSE7Ug5gfqGsNja9bk2qArlAY2SgKvHS1LlBlJlmbtitmKd7ec1z0WMqcroSKAIN10TNJQYB8fPdZJ6Qz4vJO9sK+Hs8boG1c0WT3cnwr76ZOTW/ts9aKIrWCz4J2RTNN/CtfO/4dmk77qVafGbceUJJK2rQj1aqGuPUrTZKLxpBD9yrMcihN62RTAxxwobZr/EnJJHkD1BUY4+pBKzs1p3W9up3pjM67FzuDP+K1JkD7u8Kew7mErOCQ57wWJBNYEshicETjGDJGo0qTaqlWMcU0TWVI9j+Iun7ouNnT+FJZ7ziZ/T1E0Ywkn7+//VgmVc7TiGXQwdJvo1GVERKLw+mCAx87LdPJ68imyTk8P+FqakFXP48vAmnAiyjGCzIdiDtqqWUDvakQSRaFElf6ED2RMqEL4olZHjj3X7bdWkodqtiFYrWiAwaPVZtNko/cEInrz8Ha53liMJZr5wD+PV0gvYd3Rkj9ecPaqUu0eu42JbEw/E7CPlilp+ffwm0n5fFfbIBdFqDUnQCD6vSj4avSqkXLPqwa+pHfNPrZrGJncqj228rttvWkvMJOf3v2M2eKI7Npuj11t46JKVvTamjeesYIFlARVyBiO8PgKFxYP1509inIAYNYx37n6B8WYbIRMRXUiWneT9uDOecMGhBVS8l8EInz9oq87xg6lPbeJLZvLdrJHkRldxqD4RW5ncbbZaGJtN6wiVNEtNyHG/Njg+3rg3vqHBM52HF9zA3Ow8GvxWdhxJJ4fqU/4t8+rtZAhTeSVhDve1Bc+HnZO8f7toZtGwGhZ1S44IClpIwsmfwmNiR+LD+Hi8wyRW3vMcmR1C273ndWJdPRm+FB/Hz4shOt6OpbB60NqeYLMy/MpiplqLkQQTW7wmHtt7DY7lrm6dg3aqFk3n/964kOiz/8Ysq8jNzuNMvPsFHn1rYdhFNzB1LIE7avjr2KU9Jmi0s6I5hZ0tGTT5g66Sjjq/+PT9z4MiulJcLIcfNfOX6a8wy9p3Q/8091MevGMKa03TGP6iTqI7QD7N/ZTH7zyHD13nk/GmB83vR6mp1dWG9pn9MlkmylmDy9oCSaEL3hzq4dm7NR8Hfclog/ShcC3djGsplALQPCDBNeid9sSHA3e3C3/PQ9yBUjD/DZjfJcNukNqeUlMLF9Xy0NrreTRtFXdsvI3MtwXkL3sPn3It3YyvZip3/fuP2KfXh7eNjOcP88yINT1OiLVT4G/muSX3kv730i4fp8Gr84PT033PyqdZL5Mtnxlxr6fCk4k7eOCeTRxaHMUzhQvgIn1Ftx3fvAkU3ary0ezumVMjJK1jlhqgWmnh55UXkn+lK5iRZmAQbq738Jx8MWPcR1BbW3WbbwgHtzz+MGlr8ghUh6djMSiiO9LeQKxIyGxfT4HRvntreHXM3/iv+HX4Fsmsc84c1MSHgdKguvlj3Tn849mLWPar57sM64L+3mTZhCQ0McLRQLmOduW9PZkp2UW4TB7SbV9zgXM/Y0zdh0QnzrK6RCv/mfglD/79BqTrY3TvnfdFe1LCa2ct40xZznmH18c9+24n+jd2RL6LiA0v1mXwP2vnk7o6VM4W/Hot98TsDkmc6SspwdzoJ6Vw8JMjlJraoNurnyMr27ajJIjZZDUtJu+SMPlsBoilUUVrbQ2bO/G0RFeKiuLAb8bwaPwbDGuLH3y/OYpf7r8C/7YYbMc1Er/sDAUpuSGXxlwLZ8smLo3ey2cp55ye9f1F01Abm7jpjw+h9ODOFRSwVWskrT7CVSmP8tjty7nGUdEREwlBeRB1+n63P9dnZyxnprUMqyBgEoLB5pJw8phRkyCRKVt5MetdFrx0H6OfTULbfxTN7zvpteHGHS8yfXhx/5MSBoMu77+nd9tOe8LJL6/9e9tkW7BMvWqjpnoY8dv3EbbVOgShW6JD5qo7MZeZEP0CtuMa2XvcyN/mgSgiOh3sfzqFGY48rF0mhg/7W3ii9Cry3s4NaXsdKApKuCaFT0GklNo67IUNmEpPbUnV0+Xb189hTuIErOfV8MTYT1joaNb178PpiK4ggM3Kw/M+Y7K5CYtgZ0ljIk9tv4KEVRZSNpWD10egbfm5hkXTOSe9gOFSCzt8Zl4uvmDQEx/6QnW7SV9ehiZ1b+iCqgVtra4mfbmdJxJvoPSSf3J79HcdKaJWQeI8Vz47710Y3kD+Ls/1ElsFMdLAfHsmQSJbtvHm9CXcfts9ZL0/DnnXUdSmpkE2+NTQJHBKXiyCqSPR47aN9zGm9UhY/277+z/x3bYn6byyfw4+j0zOsjp2zM9gvr2sI5FH0UQ0RQhv8kZjM7H7VcZt+reOQ+nvCtgLq8EfQPD4UBsa0bxepNSRFN4ykqfPX84EsxuLYGNJYyLLyqZxuDgJ5z4LaZ8UdrS9oYicMpLqKXE4zgnfKEyKikIZl0HlDCeiF4a/uZOkz4rBbOKoeQS70tNY6Ngftr/fG6eleoIgMM9+mBjJzvvNUTy1LSi4cetLOuIZBVnGN28CvpvquGfkOkZIEiubcinYmULOuvAnPnSgtS2kfBICBUWMesfFO6MncfFZ+0hsyzSxCWYush9mzS2Hwx7I3/W5dqVBdXPAZ2abO6tjjYW+kASROVZ48NLPeOvwApILoyIuul3xaAHWNk0gc4kQ/kVO2t7/qHdc/NF6Ad+MzyLDWdMtScczYhgxcitiGNes6Am1voHobRWYGzoXKLJtP4pSWxfSgxStVrzpcVxz4waud1ZiEYJuhWVl06j4NI3sHR7MVdUEyvRxhLW3b00WsR+qQimvPGlcu5yaQvW8VGouc/P82E/CZ5sriuOTHFzwb1up9jopPDYBuVVF0MAXq+CSI7OwzmmJrqZpfNkyhrJAOT/dfi2Jn1qI3dApuKLdDjkZFN+u8vZZy5hiUahVAnxVMxpXHkN3S5nNu6mtPpcm1QptA0pJEPUL5D+BikAzJYqFPZ5MPqs+i92lI3lg7ts9lqlX7EiCSrTo7gimn2nP4zWHAD308vVESkjAPwyi5OAHy6+pFLtjkb/cod/Ey+bdZEZNoeS7LArs2citnUk6UrSLQy9lMdd5IGTIrgea10ugoAhzl45Be4dEiopCcEWhWc1oNjP1oyw8nbiH9vTkw/4WDhcNJ3uHB2ndTt06Ml3bt8XqIXp5Mq71bpRjVX1e1zxxBMcv8vHUuR+z0NGMEqaXr1nNuJMEXkzeToPq5oJbk/AFZDQNFqQdYbb9ML2Fj4aTgdcsTQO3h/9eexmaTSH1IxHn13kEamoRLBZEux0tNYkjN7vYNOd5EiUHzaqPt+rPZffeDDLyI+9f7A3RakWQNERhaKy3+3FLDn8tPo/SYzFobgmhhxWvljedxV/yp1FbFYUgq8THN/HpOUsAgpNA+QpaS2SXzGuelYkytpkJ9siGCprWbCf+hGOCLEN8LCtnv8xYs512f7OiqbRqFvBH7oOljMvg+CQH7iQB1aThHxlsO9VK0H3wROlVOPebdUmZb0eQZYSUZPK6tO+xu+4lenvfS0gKFgs142RumLCVRcOCceUBFCoV56CFN/aES7Sxc8ryE452Cq5fU2hQg50BIVxfgTZO63OuNDUx+v4tnf8WBKS4WNSMZBqynNSOEdsWhw76RVc0p/DmmgsY/U5LZLc/OcnQMXDeWBLjG4kSvAxWvvXp8EHlRGrWJRN7XGvb5eLNbmX+uG82I163EL9me8cydD/97XwAon9jR96+CyXCe0+V/EDhral/HXqLx4sSYlwsNTOGYz3hQ1scaGVN/UxspZHb2apippNLFm3ukqYazJj6eeWFuBUTR97KJe2zQt1cChAUz+axcW0JGr3HvHa77oREHr+mcMQf4LnCy5ECg9wp0DRONoRSNJUACvl+P89XBtuLqdEPSvg+XwOuSYLJjBgb3bEzgRQVBTYr9fOy8CyqY+eUP4aUbw84zg0JONYfKS4WwdR3BMDo3+3hycSvdFv4+WR8mvsp5PZ+viLQjN8rIwSCNUz1eBA3fEvp9OB5ke/CN+v+PUC0WWmentG2u0HoxOXvq+fx9dLJpA4g5TlcNKseVrYkB+Owj1URxzdhWx/3VFDsGqrTiiDLPaYZn5hEpWgqR/xefnL0RriodNB76YKqIfqC2tM1DBSC8yPVioJHk6hUnDxXeA1cFEz9CXd7GZDoig4H7jnjuOfF9/jznKkox6o48JsxPDzvMxY4VhArSZy4QHi4A477zXtWHklbRYLU+8xupixhF4eG4PaHuUsfIWdZXXDVsUgbYxBWhnLiy8ZFzzNj5P1kvDUB+cseFrI6IYlqg0dm8Tf3kftfRWFxiwQKikh/vYWbSx85YbsgmLP9DpxLo4jeUIimaUiBVt1cMwPu6apmgSsdFWz6pA634uChuLeYamnoFuJU4G/mlscfJvaL/GAAdYT3Pxppb2CcuYm4HnZhaKen9WEbVDcv1U7ii5/OxlqzK5wmnhLjXrmX7A9q4UhkFuUZKEMh4eBk6LHTQl+0J8fcGLeCC+1HaO+FuxXTSSerIkGi5OCtWW/x1YQxFLm7x98+MvxdMmUrkiDyYl0GL62+jJy3G8KXvKNpKDW1xK06wtz6u0JOJZe3IBYdIdD+t3XUpdNyVDlFK79IWo+CxjDRjEUI9g4P+1t4puIyNn96NpIP0tbkBR+sGtktT9oR6VlYeyMk4Hz9LtRwJhmcYiB/u+DqtQD4QCl+Yia3TNzIKFMj4NQn4aAftO9yot3ZvdcY7p0WTsa0UYU8OXIlqbKITbB31MMjb+USR/elU/VE83px7q8m58//wcZFz3fEs59n8TPB/C1+rftbjRGDC94/VDGZj9ecR/ZKN9r+o+E1VFVQamqxfxnqL9YCgeA6xBHoBA5IdLVAAHtZa0ggd1fcTRac+y1k/qMCQVGDLoUzcIfPHgPOddg0r2sg/y+S3cim0I+VpgkdgfxnguACJMyq4OaYrSRJwRGGLgkH/aBjl5PcZZw4aaqqIkJAi9gIYpjsJUHScIpWtnr9/LzwBko+yQhOmkXEok60QACtpJzsZRZmpNyP2RLg/vHruGHYwT73nHuoYjIrV51H2udepN1Hw9uBaUdVUIdQosjARNfnQyooJ+lPWT2e79jx4UjBaRkXDtYeHc0jmojL5D5p2Q93TSR2i4nsQ15dA867BvL7nSY0qfvEn+jXdNs5YjAor3FR6I8lS67FJJzC3ibhJqBgatRYUjur+6kCJ6bGyIXZda2rG8qz8H4dT9qnOtbDk6B6PAj78sh4cwKqycLz11zO52PHkeGs6fWaz1ZNJW2NB9OufJQhlKijJwNzL7T5Ssyre/fFDA1HQncSVlr5Nu7sfu1plH7Yj2N3MYHSssjcz+bdfe7OdSaNHRJWWnl95BxGp69grP7x6L2iNTSSsLO5x10wRh72YyqpjlivsmtdtR1XSdxbN+QSirRAoGPSLFObQklGMOmkNzI2NSEeKPyXFVwI4xbsQ5Wov23u966dQMSHcd8Xov62mT3zppI3Ip4ck/6LjPSG0tgIW/cwfGvP5yP5/k+sq0M97K+npJOeGOr3EW7OjLX1DL4X2EpMrKk/m+LBDoI3MDiD+Jfr6RpEjtSnNvFVy0wsi/zMd0UwI9HAIIII4cx3NjAwMDAIxXAvGBgYGOiIIboGBgYGOmKIroGBgYGOGKJrYGBgoCOG6BoYGBjoiCG6BgYGBjry/wM3HdjwVnH3DAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "============The Reconstruct Images=============\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADcCAYAAADTE3J+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAADFcklEQVR4nOz9d5ikyXXeC/4iPptf+ixf1VXV3o93GAAz8IAAEJBAEpREcmVWnnf1aLXLfXa1e3evVvfeFVfau1cryl2K4pUoUqKRRBGGIAzhBpgBxjTGte+uNtXlK7PSZ34uYv/4MqtrGjOD6emq6gGZ7/P0dE9lVdb5IiPeOHHOe04IrTUDDDDAAAPsDuTdNmCAAQYY4E8SBqQ7wAADDLCLGJDuAAMMMMAuYkC6AwwwwAC7iAHpDjDAAAPsIgakO8AAAwywizDf7MWPyM++o/RkX1W/K97otYGtbx9vZOuPi50wsPVO8MfB1h8XO2Hg6Q4wwADvBIg35Kg/dnhTT3eAAQYYYFsgBAiJMAyEZSJME601xDE6CNBKg47vtpW7ggHpDjDA60EaCJkQBVr1SEHBO72Cs0duwO7a+0aeqpAIy0Rm0jBUpDtToDNiEbkCI9C41Rh3pYO5sE5c2UCH0d0d563PsUM27DzpvtGH8U6fvD8u6I9vb6EJmfz/jw1JvFMgROKFOQ4ylyUeHyIYSRHbEhkqnNUOcnENVa0lxKDeAV7ZFpuF5yEyHjrloE2JiBSi3UW3Ouh2Gx2E6Dje3jmxZW0Lw9jydQmyZ1vKhdEhaidL1GcNWrMxcqiLYSjCjoWoWHhLOUpnPTKvWOh6E9Vu7w759sfPNMGykGkPHBuURnc66E43Gbco3FY7do50ew/E1g8DoEcG70hS6B+Bfhw8nC3jK0wTYZlgmpt2E0XJhAmC3mK7y/ZvGVutera8U8ZVCIRpJd7YcJHqA6OUTwr8iQiZClFNC+9anpEXPdIvL6CqNVS7fXds7xGdMC1kyoWpMdr7CjQnTfyiIEqBNkAG4FTBrSiy17uY601Eq4NutRNSC4I7t//Wz7P/NUOCZSEcG0ZKVO8dYulDMXv3LfOJ4WtM2FV8ZbHgF7jUGOHSxAhlmcZdL2ICUisU3R7xbuPmtmUOCttGZDNQyhMOZ+iM2rQmDGIbzI4msxTjXWthLJdRG1WU72/b5729pNt7KGlbyUN5KUi5yWtRDGGIDkN0EEIQbP+gvh174WasyXWQpQKqmEUbAtkOEBt14o0q2vfvnp19yCQeJrMZRC6LTjmolEWUsohTBsoSKFMgFBidGLsWYNxYSyZNEN4d70wkE9woFZO5oDV0uuh2B9XpbrsX8Xbs6xOFSHt0Z4osf8rnL9/3DPemrgMwF4zypUMnuepNs3djGCMIEb6/e5vZFo9MuA4inyMeLdCcTLF+j4nxUJUPTF/kiLdMWvp0lUUt9pjrDHO6MsHcK2Pk5lJkFmO8663EY69U0WFwRzYBm4Tb/1tIhdYCKUQynpM5yvcKPv7AK3wof4Zpq4wrYrraYNEtMmS1aIc2SxkPZcotby/Q/d9zx5vDFl5KpRCFHOF4geZsito+SXt/yN69y/zVqVMoLTnbnuDrVw5hvJhn5Ace6VdN9PLqnY3XFmwf6QqBdJIJwXCR7kSG1oRNtyRAgNXSpMoKuxphbXQxNhroSpW42dpdMrhlY8BxEK6DzqXp7Mmy9G4b74EyjhWxfG2IoeeGGf1eBX1lPvFu7gZ6xCULedTMGGsnM1Tu08jRLkOFJqVUhbSMcc2QlBFiCsVCO8+FxTGGv7SXoW/Oo9fW0cEuepbSQNoWspAnnh7lxnuztCY1VkuQntcULvs41yuolbW75zVugdYaoojukMXP3/sUP5N/gZFeqOaEvYo1EfP/mRwnSlsYKu6dgHbY5q1zNZ9Dj5Vo7suxftJk7P0L/MLMF3hvap6StLGEgSSxVyV0RahjKuMB/3byYX537gFWXy4wbGbItX1Es3XnG55WW/7dI18tEiukQGXTlE84TDyyyM8NPc1+s40rJCGaqoppyzZZo0u965C9CvaNCmqtvH2ns60nV8NAjo8STA+x8kiK8PEGP3fkm3w69yL7TUgJG6MXoosLV6iPf53fPnmI//fBj7MnNUX2KZ+4XNkWrtoe0hUCmcmgj+5l6V1Zmo+3+Zljz/FnCi8wJH0cAW0tmAtLnOlO8fX1I5w+M834dyYpff0K0crqjsdu+t6sTKcQ+RydI2OUj9nUj0RMH1jjZ6e/w0fS59ljOpgYKDT+PSFnPmbwixd+hujX7iX3X05t2253O7bLTAZ1Yh9zn0jz5//0t/hbpecoSheJQKEJdYzi5gKI0fhasbZP8vOZv4y/MIbtB0k8cqc9y97mK4eHqD+6h+ivrfOrx36FPSZYGPg6Yj6WfKlxD//6lfcw9R9G8b5x+u4Rr9aAgjhGtdrIUOOI6DXfYgmwRAyBxD19jWijtjuOQo9w5cQYtYfGWXw//Ox7n+ZnCs9xxDJwhAVkfujH+gE9iSArYyasKqYRE3UFZleBUq+Nwb4dvNlnJSQ4DsFomvqRmL+/72scsTpYwkBpTVdrqsrhnD/Bl1ZOEH1riD3fXiZeWtmesMdr7FQgEgcrLmaZ/4jLez76Mr8w9nWOWeBsIds+DCEpGh4/n7uM++7P8f/k0xxcncJ8voHqvkNIV6ZSRPcf5NonXI6+e46/MfVNHnPK5KWLJZJJEeqYEbnBcbvMu72LfK10gn9beBdWa5b015qoVms7TLmJrUmGlIvIpOkcHGH5cQf3kTJ/avpZ3pW5xCFrnXEDPGnhiJsT2AAsYfCAHfM/H/5tfvGvfhYuH4YXTu9qRtgYHWH94weIf6rMb578Ve61DSyRfo2dfQ9nKzyhcUTAXz30Xf791CcZOitf65nsBKSBUczTfWAfVz9j8i8+9m/5QKqJI1Kb3+JokyMyppT/AcUHWvxS5VMcPTcKc9fvXqhJ68Sz6vrkXlrhX3/vSdS7BE9mzpETPgtxif+6fD97viqI18u7djITPe9s5cOTdD9e55dOfoEn3AXy0sbEINZq06u9FQpFqGPOhyb/8NTHmfhPNmPn1hAbdVSrje50dmYeC4lMuaiZMZYed/jZ9zzFw84yBpKGilmJbV7x93CqOct3l/bR+sEQs0810POL20+4PXuQAuHYhHkHcazJR4qnmTQCLJH6IcLdCksY3O/Mc2zfIqvHZhk9l0ZEETqK3vBn3grunHRlMjFufDDFvkev89Njz7PfrGD1HibUMTXV5UZk0tUOMYKq8gBIez7t0RSZlAs76ekYBiqfTqQqx1s8OXWZe715Ro0GrlBIYSCRxFq95kOItcLXIeU4y2o9w2yjy7YvtzeRqBj5HJUP7yf+qTL/47Hf54ilsIT9mu/xdUhXRxgILGFgctODUVrzcnMPbjlGd7s7G4PsEW54cpYbH7L5W+/7Co+71dcQbp8k2ipkPba42h1GBAJsa2dsul1oBZ0u2fMWn5u6h41xj5zZ5Zurh1j7oylmvnWO+A4X3FuGNDCGS6x8aAr/EzX+1pGnuMde6h3PY9oqpKY058Jh5oMhKlGGUBsYQiHRNGOHUxvTXP/GLAe+3MCYu45qNBNdbLxDm4YQyLSHOjLL/IezPPjJM/z5wrMUpEmMZi7M8O9W38t3ru0nWvLwFiQjl2KMjRZK70C4RifaXx0oVKOJfXoe76sH+Vf59yH3foNHnAVGDBNHWEjEzfBCb542VMByXGK9ncbovvY5f8jW/jp+C89wx6QrLJPWsRG6swFH8isUjDa+NrgUCuajPJ8v3893r+9Da8GhsTXuzS8watcJtUHKDul44ocVDtsBrZO4WxQhOl1kuY7VyRL5Jmer47Qih9lUmRl7nVGzwYjRoCQDClJiCYklEpvaOubV7jTd61lEbX17J8atmsotH6YwTfTMBGsPwV+YeZWTdvk1BBbqmA3V5WKYIsZiRLYZMWKy0kYiCXXMcgxfef4ejl3dQHd3MBEoBNJ1YGyY8nEX52iN4+4CSmt8HQLQ1RG+VlQVLMdpXurM8vTafrxFiWh3f8Qv2AX050tvA2j7Nmfr47RDm/nT4+x/poOq1XfHFiEwchnq75ql+sEuf+nAKR5JzZGWyfgtxh7fah7jf331cayzHu66xuodFGO7p14IIb0csffiMnp5DdXp3CTbHdp4hW0jJkZZeSiD8dgGPzPyLCOGAgwqccyvLL+f7750mMycSX5N49RjnGoyP4Rp7lyyWmt0EKDqdYZfbnFjaJL/xwOf5n17L/GB/DkO2SsUZIAnwOqRp4GgrTVXg2Eq1QxT1ThRBKk7H7s7I10hkCmXzpBJKtfAkRHz4RAvtPfx9Pp+Lt8YIfOyS3FR0ZqQnLnfJHPAJ2928GRA3unScpIPa0ege7E630fWIX2lQem7BW6MzHDdnuHbpkYboCVEhYjMaIsPzVzg/blzHLLWyMqYqjI53ZzEW5Jof5vjua93tNmUBJm0Z3IUDlV4LH2ZrEw2gVDHhDrmUqT4z7VHudEpcji9wsPeHFlZw9MahCIk5kwwzvh3JKxVkiPRTi02w0DmczQPFGjsgxOlMqE2WY7BUAFtZbIcF6grl0qUYT3KcqE1ytJGjnRLJ8qWnQ59vJXnkIJorEDjUMQjo8uYQnG1XCI9L7GvlYl2w8vtydfiwzMsfAh+9sTzfDT7CpNGQkhryuZbzWP86+efYOTbFoWLbcyNNqKTvK5NAywTYgXV+k1d8S5oXo1igfKjI9Te0+W/Ofh97rFX8URCMfNxhkvVYZxVE2dDY/iayBFE4zbKKpGKFfr6DipCtEaHEcaVZSatKWpLGb558H6+tvcoUyNVDuXXOJpZ4gnvApNmBwtNQxncCEqoDRun4m/bON55eMFxCD2wzJhKkOZau8QrS5PosxlGLkLp5Q1EJ0A8OELjuMSWEXkjUQGkLR8tAblDddf9WJ3SKKWRN1YYjRRx1kle7+9aUhDkbTojOb548hGeu3+GT06e5oi7REO5nKuO4pT1Hcdy3oq9m0k/26Y1ZnD/yCLTZhULA4WiqyOuRYJ/vvoB/ujSETzPJzvdJU4lBK5QoCU1FfOFyn0UX6ygm62dO1JCEr4ZK1E9aCJmmuzNlAm0wcVwmGvBCCthjmbsECkDhSBUBhU/TRwZiJi7rlwANhUii+/N8sR9r/LB4jmu+CM8F8ySKWt0tbbjvx/YTPYuPpblfQ+/wqfzp9hvBljCoK1iutrkYnuUzDmbwoUW1kIF3eoVP2idaGR7m7nudHYmTvp65ts2wZFJVh/TfOb4S3wsc5oRw0Qi8XVEJc7ghyba1PglQXtcEOY0cUphNi2KYxMMtdqocmXniFfFqEoV6wIMV4qkynkq1RTzBy2CGYOs1UWmFf1gV0ubXGqN4KwamJUW6s3W/23Ye4eerkSYJpEnUEpytVlifr2AcTbDyMsx2QtVWFxFuA5hapR0qcMBb51xq0ZLJcRn+EAQ3pEZb4p+XEcr4o0astVG9sMZ/YEyDFK2hed5eKvjLMRjfNWIKZfSNGOH5fU84w0Napu9sTfz7myL7rDggLeGJ2JCoK0UK7Hky80TfOWVE7jXbYITMUWzTU52sXsL19cRV6MM3zx9hGNLlxON7g4uPJlyaezL0jgQc+/ECnvddZSWvNqd5jvlA9R9l5zTpeS0yVldUkZAxvIxrQhlvgManfSLI8ZGyH10mb8z/jU8ERFriVICp65QnW0OgbxepaaQSbFLJkPtQZ8/O/ws+82ArLSJtcYSCgNNK7KRPgilN4tjdKwQ/fmkVVJVFe/S6aHn5S4+lOLkvXP8ZPH5RK3SC9HFWtNVFlnXpz7TJZYwXGxwuLjKmNOgEqR5at9+nMZeMs+QaIh3SGWjoxBdqyOlxC55GL4JTszefIV7vBuMGz6eNAi1oqst5qpDpJc0otroebp3O7wAYBpEKQi7FotxnnjRI39dk77RTgyFxAs6Bh+Zvsx93nUKsk05yrDazuKuJyV3O44e+Spf/dCxXhgGhCGEEe5Vh8y+UdaPp1nwClT9FKpuYXZ24Hj2Ju8nTBO/qMmbbUIEDRVTUSZPtw/wH+ceJnPeBgWjpRonUzcYMzo4vUne0IpnWocofd/qSbF2cPEJgchkqB40GN2/yuOlOfba67SUw7VuiaV6DoA92SrHM0sMWw0AFqwiVzMlOk72Zmx7N9ULtxTGyFyG8rsn+DdH/2f2mQa+hrT0UR0TpxomC27bbbhlHkqREGg+w7G9S+y3KnjCQiKRAlwUngzJW126oxq/6IDKItsuwg8QUW/8lEJ3/V6mfQcdmr7dhkG0d4zuo81eIr2NK5zN3EJXawJtcKy4zGMjVzmUWuGos8i00cQV0NUwN5Lnr4d/gb31aZxXNGpjY2dOllonmmzHpjNq09ivePzwHH9p7DvcY2+QlzaGEEBEV1lUamkmV+Oe2mN71tEdk662LYSCqGkRYeHWJaav0IZAp1PoYo75P1Xgofec408VXmbcrNHVFlf8ERauDHP4YivxInbriNnXZcJN4TRsdjwSQtAtCSZzDYbtFkoLMDQyZPs93TeDlyL2FEtBgTm7BMBLnRl+b/4+Os8PkV3RbByHD42e56i9QkFKDAQxmsUoxecX7mH45da27c5vBlXI0tob84HRa9yXuk5Wdoh1gYzhc2JkmbTp83juMkftJdyeBva6WeV0doLz1uiO2va66JdQ9+RE0nHQE6Ps/YULHLZsJIK2DpkPS9irJvbiBvEObFzi1rBabwOICikOZq5hoTGE2JQEWsIgK3zuz17nO4f3U1vM4mWTjVbEvQSsArOjcNbayBsggmDHw2IilWLtvjQf2P8iD7nzZGUSVlAkuYWGkhhC89HCaQ7Zq4wbMRlhYYnUptZ8xGjxzx//D/ztxf8t+5vjyG4X3WjsjL2GQTReYP0+yeOPneUXJ77MrBnjSWdT/ZOMvUKFErOjkirabcI2SMYksQsYGiMV0R0VNDomWnhw0GP9fvi5j3yLj2ZfYcRIPNq5MMPzlRly503MpQ2iYJcLDuBm6a8hQfaaxWTSNI+WGPnAIj81cYqs0eVSd4wXc3uIXWdnVBava5pAGxJ31eB3zj3AcyOz+LHJtasjlJ43mZjzEQrWH7QYs2p4IgYkoVZUlOJ7nQMsnx7lyPx1op1OUAlJnHVIjTd5LDvHfqsCgIHm3dlLpPM+40adMSPczAyHWhNTZY9X5bQL7JYMq29yv8lJr80gI0NUHijy72f+zaYGei0W/O61BymcA5Z2oHjn9d6vVzklYsVcY4jGiEmoY6SQSEQvxAB77TUe2XONp999gFrZRlsa3Bg3E2BZEc1aCvtqnumv2ZgvNHdWKigEcqhI5YGYJ/MXGDESWWM//7Aex8xHRSwRvYZwHWFuSrT6WvPHnA2efN8rnH/2BLkrLjSbO6DbFQjHYf2+NKOPLfN3J77CQUvjCGczHAJJIc+0WWd4uEFrbBjXS0HX35bT2LaEF8KcYnKqwmx2A3NfTPdeCyk0xzLLvC9zjlmzjisgBkINq1GW+Y0C2XWFbu+ilws3iyZsG5nLonMZVNYlLLisPujw8E+9wt8d/ypZEdHVknGzytmZcV6+5yjZl4ZR17o7J47fIiETjRbjzwY0b6RZt9OYXZjZiHHW2xgNnzjr4JQdvlw+wQF7ldBs0FYm3+sc4t9dehfjz2hUubIzdm41WQq6ww6ThQrTVpkRKbCEZET67LeWiLXGFRKQxGgMkrJwA43SAiPgpie2HXX2bwH9El5p2+jpMdYeK2L95CpFmUjyYq34e9c+Q/T5YcafWd6x8m8dRa8Nc4jktGWu1jlzbopTkzOkU3PkZYyBoKUVK7FNNU7TDB1sJ8RXDqQiDs8s84GRC+yxKyyGBb625xjlG9OMXR9CL67sTIy0V324/NEpfvrR7/GIe51MLxyS6PNjXvQnebpxkFG7gS1iXLGCY8RYGGx1YQwhcYTJY/k5nj1wL7kzBUS1tiMVoMJL0dgLPzF+gVkzxBHuawgXkk1gRAp+du9z/NOPfBAZHaL0zatEK2t3vP7vPLxgGWhTc7K0xEcLp5m2ymRFiC1UUj5Jon1rqGSXNgTYIsYwFFqIxMvZpcUGvexwNovaO8n6PTk2joGe7XBgfJm/Of4yfyp9lrwUmx9CWtb4xNDLvHzfJJ3vj+CsJJrH5OG3weatCZV+1llpVL1B6uV5UhddtG2B0Yv/xQoMSZS18A92kUJzIRjnVGcv3y4f4qWL0xRO2eReXnnzbOs2wt4IuL5e5OLkONPGJQpbQpVtDTEKXyfXlLhCU1WSl/0pnl2dJb2gb3q6u7X5qhhiAVIQ5VM0ZuCnxi+i0BjAudDn7Lf3s++5eo+wdnAc+4oVrdAxqE4XuV7hyP+a5ZfWfpp/+kCZB0YXKNktqqHHxdoI86tF3NMpkOAqaKclthHjyYARo86IUUeNSn7l+B5GnkkjDIne7kfoN18aH2XjpOKh9FWy4mbPh3UV8EowzB/VjvNyeZJSqs1L5h4ezl/jiLvIfrPCpCnwhL251gwhUFokWmPb7Nm9A9wgZaKaIjl1KRSx7ulzhdwMJTnC5FOZVyk82uZfDL2P1dQ+Rj4XEleqr9t34q3ijkhXSIG2DBAwZLWYMjeYNHzSQmIJ82YPgFhQUSlKsovqxacKqS61IZG0I9xFCNOEiRFWH8ux8WjI40cu857iJaasDabNSk8gLZOdWAhCoZixKsyUNqgNT+P0Y4HbcWzf6tlu7YML6DBC1RuIbhcsG+HYSVexvEd3LMXiEyZHp69xLLPMUlDg6fX9XLg4Se6sSfFSgKg3e+8d7+imppXGWmtivDLK7ww/jDGlOO4skBYRDeWwHOfxhE9a+mRlQE0JLoSjfG3jOCtXSxya6ySFG7ut0+3J8qK0SeyCJRP9M8Cnvvm/Y983AuTVJeKdLCrpo0+8gI7jhHjnFtj7hSkap4u8lC2hbNBG8j1DTU16OUQboCyB1bJ41ZoiZ3Xxij7TVpmi2cKZbhKOeNg3nEQhtF2JSiE2G5NH4wXMkS7ZXugwJMbXiuXY4Xx3kvO1URaWiiz4wxjZkHPZUVJ2yMnSEp8u/YD3uht4JHH0UMfUYg8jABGpnalSA3TXJ70g+MbyYR5PX+KkXe5xlkRq2UukJZvAmGHyQW+OwuE2//1nPkF0bg/GqVZPEXTLnH2LVWl3xHhaaWQnxGhJ2som3tIDoKtjKgrmoxzfbh6laLa4372OJ32kUBTcDmt5kp6buwnDIM44dIcEuVKLaW+jl/iRtLRNSHKc6Q+8Alrapua7WC2VqBy2mSA2+/eStMbrQ8cxhAIhJDg2OmXjD7lUD1ikj1U4lltmwq5yuTvKjWoeZ9XAW1NYVT+RC+2U/nkrtILyBiMvlpjLTvPLbY9jQ6sMO02UFkihOe4tstdaI0ayHOX5cuUenjp3iKEfGFg3ykQ7LGl7PQjLBNdBWUkrzGU/R1uHKB0y8kc27vlrxLX6LjW22VoK3vN4603kpRsU1nNo20JbJpjyZoPyZif5OcvEWfcQkcd3o4PIE4oPF8/gioDDI2usDe/FNs3EQdLbs/lutpnMpGlNpZgdXSQtknUTakVLaSpxhgW/wGojA0GvvDaQbCzm2RCabmhy0FvluLWOZcRYwqCqIp6p7Mdb1ohmG3ZKW+77FC6FLLw4zj+xPsxHR88yaW1QMNpMmjUmjRhPWBgiOfGWJNxnL/MTM6f5+vh7yVgWIk4+p7fDBXfmZmqF3KjjLZd4ZWOSQ6lZus4irghZi3M80zzIt5cPsHJ5mMmDa7gzIYecZQw0OatL7Ordr7uPY6QfkVrXbFzL84X4BM/nZ5jw6nygeI4ROUdeJtpChaIcC75ZP8bamREOXavfPGpuJ0kYBqLv6WiRFG305EPCMsG20OkUYcmjPWbRGdMcyNVJGSFK945ksUTEAqF08sfYcsbfSULTGtVokj63xrg1Sm1lmGdHh4jSCnIhB/asMeVsEFoGoTL4fuMA37p8iPwph+EXG6hKdff7/AqxSXQyUNhVk5fLk6yNCv67G5+i9NJGItLfzQTfVvlYn3jbbUQQJCTXT7L1CTpOqi2RAruWZliMEWVSvDIyyYO560xbFR4uXuP3SvspOL0k8HaQ2JaOfRgG3ZLkRHYdTyYngj4FSRSmVKSdADUsyKZ8YiWp1pO+Kzk3+f6GNinoiJCYp7tTvHR6loNXuuh6c8cKenQc412tMZwvcSWY4ZenJsgU29wzusSnh18k685jGTFGjx4lEkvAsNlM+lUb8g3aDL013CHpatRGleKFkKuzk/zbrstUtkagDK5VinSvZcldlsxcCVl43xgvFPcyYjawRJSQrqOTHXwXoaMIY73G0KsOTtWlM5RnqZhnbk+E80DEPe48SoeEIqahYr7f3c/nLt7D+DMacW0Jtc2ZYCFFspB6ygihNRi9MIjjIPqEO5L0J+6MCCJPUfdd1oIMWaOLJWJy6S7lYppuUeBlbAzX2TYbfxR0EKCXVsn5IenreYKhFH7BpL7XpVJKYfSm6EJY4rsr+7HPpBh5uYOcWyBu3aUexVJCFGM1I5wNi5W1PE/v3c+rf3iEvUsXiXeyYOetQiVVkH3yF+K1J5d+8YOIFdZKCqfi0ujauCJk3KxipBT/cShpKLTZFHyboLVGKEXsCIasFgaamESJ4AoYNxscSy3SGknm4f7UGl1lsRwkuu1Ru8G0VaGrDdZUTFsZ/Mtr72f0GQP76mLSZ3uHuqChNaLeInfFxQhStBcs2hMFLj4UUS5mCHsn9rj3+0NiasrgbHsCuxYl4677f7bYuCVM9Ga4Y8ZTXZ/0S4vskVNUrwxz0RvGakJpISZ9rYGxUkWHIcWRA1x6cJhG3mXcrFGyWqhMjHb6V8zsjrej45h4vYzZ7lCccyimXKLRHOWTaX4wNUVjxE1CDFozF2X4naWHsZ/PkP/+daJafWcmgpSb/U211ohe131cB5VNEQ55NCdt2mOSIK/RElY3shTcDtPuBkWrxQMjN/heaNLwCzg1G2sjjVgWif54p6F1cgVMFCFrdVLLGezxIn4uh2tFlMwmAFe7w6xcLzFxSWFfKxPXm3fpNgt5U3OtNUKBDiW/u/QQ488GiT70bt2BtiXGv1XOePP1vqebdAvbzAXYFmFWMFZocNRZYtZskxYh3RGFTvU83e1aZ7oXZuv6mC1NW9lJaE4HuL2cyLgR85h7lf32KmkRUJLBZiFEV0tCJF1t0NUWF8MszzQPsfr1KWa/v9I7ZezApteLRSdjIRCRwgiSz185mtl8hXGzhid6KhuSsvqGinnFn+YrF45x+HoN9WZN1nejyxgqJl5eIf1Ml8zpLNqQiG6Q1PsHAXEQIiwTI9CYUuFJH0/6TNhVUqUOcdZFSrF7eRSt0b5PHEaIRlJvbwYhqckUXamxREygNQ0l+Ur9Hs6/NMO+57uJ/Gq7F2L/SKnUTbI1JJgmOpsG06AzlaUxY9IZFUSeBgHaVkS+idKCUavOuFll1l5nyGrxZeMoG/4Q3rKHfeuC3Un0Ozkp3SuzLtKaEnxy4jyH7GVaymHJz+OsmKSXO+jGzh0ffxSETBaf9lyCoo1fFKSLHS7Mj3H0SmXXVB9vZBuQ9IG2kwRqnyQwZBJ6UgrdaCJihTAkMp+jdiRP/V6f/2bm+5y0G2Slg8LHGO+gbfOHyfsOoJWGWKG7XYoXOpyrjXE1PUzB7eLqCEtKMphkTNgvQkxMjF5L0lgrImK6OmIlDjgTFPla9QRffO4+jv2XVfSNpUQdtENerrBtZNpD59J0R1LUZ01qx2Ledf8F/ubEN9hrNsnKpNUrJDHqxdjhc+v3U/qqCwuX77ifxbac7XUUoao1RLOVyDGUSo5F/XuTLJPQE+TtDgXZZkh2OGCvMlGoE6VGsA1j1wXyqDi5WmRLldkjY9cZN1r4Gk750/zW6YcZfRacswtEO1GqrBVa9Y47SiWLyzSTfhalNP6QQ33apDkLYTECJ0Zaikzax7UiPj32Eo+6VygZIaGGcbPGsNXkd4wH8V8ewt7NCroehCERxTwbJ3JMPnGDj2dfZo/Z4XxocaE6gressdbb6M4O6p1/FAwDkU4TDmdo7DFp7YuY9DqoH+QRjda2tO+7XWzGbHtqHjExiipm6IynCD1JlBKEnkAbkF2Iycw1MFYq6IzH2rvHaP/pOv/DiT/kE948RSNNqGNiDXEkEc0OersSwP2KTi3QYYS51uBqpciVoVFGzQZpq4xHUj13q/YVEkkWGkIdsBhl+Z3VR3j5c8c49oUy6vLVHS3kEJaJHC4R7ClRO+BSOQFj9yzzs3uSBj3JtT1JzFmRtCVdU5pfX38vP/jKMfZ/eY6o0XgHdBnrQb9Jr0mRTtPYC/fkFpky6+RlTEO2GXJb3BiexDHNXeuG9FrDkltL47ECy++S/P3Sc7hCsxw7fH3jGPaZFPmLDVS1tnO29dpPakjIVwpIpYhTJvUZk+6wICxE5MYbHCitM5mqM+7UeG/6AgetOgVpIkkqgLKyRded53fFA3hL/u41POlBmBZyfJT190zQ+ckq/7/9v8cxOwCSY+TiYonppRhRSY5odwU9Qb/OeHRGHVpTMDZT4WhhlW8cKKJGi4jyBvr1CGoHiguSv+Vm8yiRSSPSHvV7R1l8QlA6VOHB0XlOpheZtsu4IqQae7zcnuZbKwexpOJnp77ER9Jn2WcaOMLdbL7/dHeW/HddhNLbGs/d7D0chIhKldFfK/HrH32S0w9P8PNjz/Cgs4rb66P7emhqn693JvnvT3+S1O/nmf3yHNHyyo5WzQnDQHoe3f3DLD3uYD68wc/te5mPZl9hv9mmZDi9648SbzzUMStxxP+y/iRf/saDHPz9KvHa9vTT3t4s1taJ2o9NWSZq7zjp+yo8nO5X14AUimGnxdk9kmIuuyP3y78p+h9EPkdtX4bC8TLTZh1XCGwUpoyRMYgwTo5mO6F11Rp6fXJROklFhAIcC2VJOmOC4GCHh2bneTA/z35nlWmrzLjRZswwcbbUrvs9qc6r3WmWr5coVqqoWxqj7ySEZWNMjlF59yTrH+vyD4//AQetLq6waauQOX8Ma9XCrvro6C55uCQtCMVwiebhEuWTBva9G/zEnlcZNhus7M1SPTxDbjGbhMf646Z0T1GwA15YvxzdMhPbLItgqkhnSJI/UOEv7X+G96QusceMNmVMsW7w8fQif3PoaWINBZlUc1nCIOrdKvFSkOGXznyMoWtRcvvydm/AKuncp2oNvOeucmhllGsvHObvvOcAT9x3jp8d+T7H7TLD0sbp9dTt6ICXAptfPP/ztL46xtS368jL53YuVwI31RaOgyjmqe2z6ewP+OTUHO9JX2DSaJOV5mtuXIlIijt+o/oo/+X7DzP7rehmEn0bsDPSgT7hGgYy5dIaS5FP1Qm1ia+Ti/4aymW5k8Vq6ps9DYQkObrsAkkYBrKQxz80xtoDkj8zdQFPaLoarkZDfGd+P96KRnSC3lXQO5Ts64UYEn1uT8EQxRh+jNEBKTWTqRr3edfYb1YoSIUnjc0rRvoXU7Z1zFyU58srx8mdtRCN9u7cWEuitJC5DMHeYconBe8/eJGj9gqWkMRasxzD55bvpXgG7MVacnXQXTjC973ceChLe8SgOxZzsrhB0WxhiRjPDFjOS/KpVK+YQPdOITHbf09TzyQpNnsuYMikATmAglrdo61sLKE2bzORCEwBDiaWjPB7pWZtHRKqgKqSnA9H+Q8rj2F8o4C73rjZIWsHnAYdhaiNDUS7zfBiluKrQ8ztP8b/8fBJOse6vPvgHAfSayz7Ob52/ijFb7uUTrcpXbuKKleIfX/nCdcwNhPVTk1hlC2ut4tUchlCq0qoFZaIUT2Z6LUo4J+ufogvPncfE9+WpE9vr5pie0m337WrfxmcbSOKBYKcpNtOcdEfo2Q0yQmfcpyh0k1jtbkZb9qNbFrfw82k0RNDNKdsgmJMK3JYjB2qscc360fpLGco1ntayLcgA7lTaKURveox3epgNAOsRopW26QSpGkphxBJiMLXCghQWhOjaSjNYuzxjcZxLlwbZ/pKhG42d348+5M6lYLRIeozLuFwRNr0WYvTuKIOKL7WOs6lK2PMrkWIjo9WalvF+rcDHcfIaovMgkPk2pxt7+OV4T2gwCxbzFwJksw8JAqHXlx8Rzaw3jFdCJ38TiHQzTbWqkW6YNGcc/n94fuwpmPe7V1k2ujgCokUgrBX6bkYZ1mLctRVivUwy8X2KKeW9+C/XGDm+RbGjTXiTnfnkpa6J2uLY7TvIzaq5G5kyZ0pEDyT5crIES54x7A6mv1LPvbcNVS5khTE7GRMf+tVWPRUQe0O3nJA9mqKV7Iz/Cvf5aGh6xxKrTBtl8nKLs+3D/Ebc4/QeX6I6ZciMmfXUOWNbR2/7SPdW4ipn4nXveKH2kaaF2qz5I0OQ0aTs50pFis5xmsx+P5Nz2c3FqHoKQSsZPczGwYvrO/hZHovV/wRvjx3jPQVA7fSRfjhzsuudH9hyySh2GphlBvk5tP4QzbPZmZQWvBw/hp77DJp6ZMWAV1tUY4zXPOHudge5elr+8ietklfqexau8y+zlhbBtoAEUrmmsOccvey1us49p8XHiBzzsZdqyclv3F8dzzdnnJFrJXxwghnPUvxUoowbSAjjdX0cS4soxrNRBvbDyv0/t4Zm25WNulYge8jw4C0FIyYJcrhOP/00Af54tQ9nCgsMWHXiLVkOcjxYnkPS+U8Ud1GRALZFdg1SWpFM3wpwJpbJt6o7kp7z03yjSLwfcTGBuacQa5X4KPDCB2ERLsZQuyvqxjo+iilsOfLDDOEU3MoX57k80MTRBmF9mKkHWPMu5ROa0YuNDAXyqhqLVlL27hBbB/p9rKaOgZ6jS8IQmS7S2o1xL3o8Jw5SzuyGXJanK2MIS6nSa20btbe75KmFK3QfoCx0fN4HId1Rvkt4xEW1gu4pzyGTofYi/Uktte/G2mn7Nn8d9xz+DWslUmfNRDRMPX1NKcmjvLM8CFkNsRyIrKeT9u36DQdaFqYdUnmOgy92kHMr6B2oun269iu4xgdBMh6m/RShu6czWljmvnxAqV0mzA2WP/+OFOnfIylCqrV6t3Xtkuf960mRxFxrY5odRCr67imiWsYyZwIQqLdnIvA5s0mW6ZXHATIICTXaJNaHqZ9xmV9bJqvDE0TeRoRg9kSpBc1U+sRVi1IqhAjheyGyHob3WgS92Sbuz7OPQImipLfv/VZd9GGm/+O0SrRNeulFaxag6HrGUoZD+XZxK6JsiXKNEjdqMDKOrrZIu558Ntt9zYn0vreau8hoxC9uoYbBExGe6iUXc7t3YeyNO6aZPzVCHN+fddr73UUoVtJbbfdaDG6mCd3Lc9aeZLSkqb0gwqyXEU1kw1hVxN8kNzl1GrB3HVSiyt4z7oI206awqddopxLd9gj24iw6gEi9hF+jKw1UZUN4p28zv5WaI3yfVhdxwsj7I0hclc9OkMFKl4RGcC+71TgygJx/0bau0C2t9qsw2BH2gZuC3rFJqrdRi6vkDEMsn2tLr35+3qOgJAoSBKovfe563gn2NCHitF+nMSRq9XNLyf9ihPEu7Dh7mwNbr8QYXUNq9Fk/NUUwnGSSdNOuktFcXx39JoqTvqkdjqIjQ3sOcmep82ky1MYJRP3bk+Yvo239HOVgLclnKN7f9TdsrdPEl0fsV4m+2qPJFTiPard3rT+GGHTY3xL33z3VCE/drjFE95N7E7jg96ivJU83hHQN2/5fcd6Pq+HdyKJ9TyJd6BlAwzwjoHYldr8AQYYYIABANjF4vwBBhhggAEGpDvAAAMMsIsYkO4AAwwwwC5iQLoDDDDAALuIAekOMMAAA+wiBqQ7wAADDLCLGJDuAAMMMMAuYkC6AwwwwAC7iAHpDjDAAAPsIgakO8AAAwywixiQ7gADDDDALmJAugMMMMAAu4gB6Q4wwAAD7CIGpDvAAAMMsIsYkO4AAwwwwC5iQLoDDDDAALuIAekOMMAAA+wiBqQ7wAADDLCLGJDuAAMMMMAuYkC6AwwwwAC7iDe9Dfgj8rPvqFsrv6p+V7zRawNb3z7eyNYfFzvhNmyVBsIyEUKgY4WOY9Bq229X/uPw+cOPj60/LnbCbl3BPsAAdxvSAK0QUiAdJ/laFAGg4/5/fswgDYQUICTo/gbyjuKeAV4HA9J9I4gtG9U7bSILkdjUt/GdZt87DUIgDAO0QNg2WCbE8Ws/4/6YvsX36xPdro9973cLKZCZNDgOwrLQ3S663UF1fVDvkA3k9ebnO3ld7RIGpNufxJaJdBxE2gPHBilBKfADdBCguz7a9++eN9EnDsNA2DbCtkAItB9AGKKjCK30O2fBvRnEG5y8dmJc++MmBUKYCC+FcN2EdI0g+ZyDAB1GwBuQ6BaiQ0iQApTuOce7RLy95xC2jSwWiCeHaE6liG2JFuCWQ9zFBsbKOqpWR/e8+F3H1rEyDIRpIgwDrTXCkGCaNzesKEJ3fVQQ3v1527fb6IWfTBMM4+brWqFjdXOt3QEP7Czp3rq43kk7Wy+2J1MuIpNBDeVoTWdpjRlEnkBoMHyNXdek1kPstRbGWhW1Ud39SSKNZDK4DjLtofNZwtEsMowx6l1ErYluNFG+jw65+xP4jdB7joQAk7mhtQalk0nd+7v3wp3/PiEQpoWwLUTKRbgu0Z4hIs9CxgqjHmBUm+hqHdVu/zDx3roQbSshjVihgwACdod4+4Rrmoh0mmhqiBsfytLeFyLcAN018K44DJ0xyAQhotO9i87Ba8dLpFKQchFao7IpopyLso2b479WhcoGqtPZXXtv3RxsG1nIozMpooJHlDZRtoQ+hSkwfIVd6WzygA6CtzXO20u6t3qNXmrTa9TVOrrTSXaL/sLqYVc9tN4Elp6HKBUIp0o0Zl1qByTxySb3TC4y6jZpRjZL7TzX14tECx7ZqyWKF7J4Zy1YWUMF7I7NPaKSKReRy6KG83Sm0lSOWNg1jVNPkb3qYCyCiGOIY7S+jaPyLkGYJjKbTU4SZs+DiGJ0GEIQon0fwmhLaPUOiaxPVK6DzGWJJ0q0ptNUjhqEWY3ZEXjLLrkrLqnLIPre7q2E25vLpFzIptGug4hjZKONqtWh64MWuzOHpUSYBlHGxr+nzacOn+aIt8zZ1iRf5F4yCyaZrd7ZbkMIhBQJ4ToOwrHBsVHZFNqUtGYzNPYYhGkwfPBWHAoXJLLTScZ/pzaK14SREsHWa5yYYg5/Mkf1gE13SNAdUahsjLAUyJ49GnTHJLVQoHQ2S+5FB1bWodO5bbu3j3RlQgyyWCAeKdCaSlPbb9IZ1SgbRk5NkL3axqi0EH4AsQIjGQDd7qLq9WTS7+TE7Xk+Mp2CiVGq9wyx+pBg78M3+MXpb/GYu0hBmljCINYaX0esHdScCcb4YuU+vvaDE8yYE6S7PqJSRe90TG/LJCblovMZuuNparMW7SlFd1hgNQzshou39g6NFIkkcSXHR2kfGaU1YaGsZNHZTYXZirHqIdZiBVWp9khMoZXkToh38yie9ognh1h7IEPrw03eM3OFrNXlXG2M85cmUaaNs+bBKj/kDAjD2Aw56WKOzkyeICvRUuCWc7hXbcRGDd3pJhsG7EycV+tkYQcBqtnCWWkir5Zo7XeYtspcNYYxGgap9Qiq9cQLvwux5k1v3LY2CRetEW0fXUjT2GNQPxRDLkSHkjBrk1p3SM3dfM7ttin5u0e0sn9ykYhMGkaHaBwusPqgwcEnr/J/nf4Ke80ajgAFdLUg1MnPSjQhkvmowD+89Akq/36Moe/GqOWwd0J767y1LStVWDbGcInGYzMsvE/y5OOn+Ruj3+SI5ZORSaZ45c91+Hp7L/9p5WEuro4QdE2EodFKIBdc9v5BF+vMDVS1ho7CHdrx+gOeoX60xNL7NJ9517P8xaGnOWhKPJnZ8r3gaBNPxpTkCuMj34YH4Olr95GeKyCarWQh7PDRUiudnHDiGMIIsxPhVg3UdYk2wGppnHI3Wfhx3JsAd9nL3eolZjM033OApT8b8OkjpzjuLZI1OgA04hTXgyE+f+0kxu/tYeS7NmJ1HdXpgop6i+UOiDfloocK1PenaX24yT+4/3PMmBUAvuceYH6jAOSQ1Sax7//Q7xGGRGQzxBMlmnvTlE8Y+Pt8tC/JnXMY1iUcy0RWG8kprh+f3AklhNZJLLHVRl5fZO/nXZ47Ps392Xm+tXSQ7JwkdaOKbjR783KXIWRCuKkUwnWSMExvLoowQlmS9rjGm2qyt5R8BudS49RWXLxzafRGbWft0wodgzAAw0JkMzQOF1h4UvJnP/BdfmHoacaMFBIPAIVGcXMTlkgUiv1mlfSh/8pffvKvkLtSxKzWIAhvy5S3R7pb3HVhWnDyEJd+Msc977/Ifzf5DR60G+SkiyG8ze/bY2b4ycwNHnLnuTpVpBxlcGXIkNEk1Ca/9MDHqf7Hg4x+5RrxyuqOJQJEKoV/cIzlxyWP3n+eTxReYtyIsYRF3PN0FBpfh4Ra0dWKtoYYwbH0Ek89sB//+1mcVRfh+zurNNIaBMkJoNNF1BpYhiQDiNihMSNpjws2jmUoao0ERLWGDt4CUW238kEaSNtC5HPo8SEaB3OUTxiMvXeRX9z3G5ywV8lLgSsMJBKj9/u7+gp/pfh9/tuRT/Cqc4Kx71jIG0sodfvHttc8nmkicllae3PU9kuGci0echYAWI49zrYm8K9kGb8QoDdqryUqITCyWfTMJBsnC5RPCDjc4t2zV7gns8B6mOG3nEeQvsOQymJHMUTJpoj273go3xRagVIIrWmsp7k6PUQ3NLEijYgUWt0dRYUwjCR+a1vJBhEEiaMQK5ACs5YidlM8OX2Z9+fPYqD5RvoYf7BxP+PfTu2MXX2Fz5Y8gVYaISXac+kWDPSQz153na1Bmf76h4RsLWEgEUgMpJBMmg0yoy0Q9uuGS38Ubp90twSghWnC0f2c+5sef+3xr/MTuZeYNTWecDDEa4vd2irgpcDma42TXG4PkzN9nsyfY79ZIy0F/+Dgf+UXPvVzBHNjWI0mcbO17aEGIQUik6Y5ZROVQiSaq8EIBdkhMDq4QtDVmlP+KH9YvZe5xhDt0MaQirQVUHJaDOValE9kmbyeQ7QT72ZHmbe3QxMAzRZSCEzbRIzatGZjirMbrM7kCNNZhi0Dcw5Upfrmp4Wtkqc7hRDIVAoxNU71wVHW7xNkTlb407NP8Xj6IoesDUYME4m9SbQAoY7p6pia0pwLh3nq9GH2XwgQaxVUp3tnpx2RJEe0bRF6kqCgOVxYwxPQ1VBVHpfqIzhliVXtJsm8vlcNSM9D79/D4gcKdB5r8olDZ3gie4FD9ippETEf5fjayBF8ewTdl5qpJJ6+0xCGgcxmWL0vzU8+8H2Oe4t8195P1xFo+5aM+y5CGDJ5fj9JiGqtk/9XCiwLLAMx3uXR7Bz32EvYQhHnBd+YOJgQ806FFW+dQzpRIIhyleIZmzCd5Zdz72foRJMHnUVcAefDHK909xMjOOQsc4+9TqkXdgRoK5Pmehqj1YLw9rxcuF3S3XJsFLaNHC5x7i/m+MUnvsgHvfOMG2D13nKr13gp9Pm/z3+aU6cO4q5KoozGPVbl8dwl0lKQlzbHrRafPfADPn/ofYxdLyZe5Fvx2G4HhoHKZ+iWkqzkYivPs9Y+1qMsGaPLepjl22sHufG9KbJXwaknv7uTFqyNCU4f8Tmxb5HF4xEjLxewak1EGO1sbFdroHc0iiU6ihBhjF+QHD95lSeGLjE3NsxXxAkM32OkVkA0mglp/Yj3vNNEFYDMZFAn9nHjgxnEwzU+MXueJ7IXOGqvMGIoPHEzRt5HqGOqKmIuyvCd5hF+8+LDTH7FxL28gGq2toe8pEyOthqibMwhb5VeMIhGnGK1kcFugAhjRC92DokkTw6XqB3M0jgQ8+j0Dd6fO8dxe4URKbCESVe3iJXArmvsSgcarURSGEU762WKxNHRQwUq98f8ZPF5AB4cucEfjY4QZey7pgHdJNmYJMzV8wC10olyIWVxeGKVo/YSQ4ZGIijINlJqRLu7u7ZGEarewFw0KV50WBzL8y+y7+e9I5dZ7Bb4xuVDxHUbIxfw8Ox1/tzoszzoLJMVCikEp4M9ZM9ayHqbOL79dXSbpCs3M+lMjLL4gRE+9eSzfCR9rjchk7hHU0V0taKqJE939vM/PPUpSs+b7LsUIGPFxkGXjWmXrOzgCImJgSsMjqUW+O0xgcp5iBWDbZ2+IhHGq4xNnAICyWIlR73r8JI5RaPj0F5Jkz9nMv1yF3upjuj6ICUq59GeyRKlHMaONWgdXGH9xBQT6yVkGKBa7Fwc+nWgbZP2mOAzYz/gfvc6C26B+X1Frl7ZSynjIN9IB/uaN7lDZYBpJbG7fVPMfyhD/r0rfHLqNA94Vxk36kg0LaVpEQFJqCgtBRJoa83VKMMf1u7l9y7dR+YPMuR/sJJ46EG4PXHpOEZECXmLdMQeu4wtBGHvfaPIwA6SIzkk4QgA4TpozyXISLQbUrLbFGQbT2gcYQHQ0oLKepaZ1RhZbaLanZt27ySERKQ9ulNZjhxZYL/ZJgaezJ3nS+MnCfMWprxL7VTiOFmvryf/A5ozKf708EUmzQ6eMAm1oqo8WnUX3VzaPTv7SckwQnc62JUu3qLF3LVR1ptp2m0HMZ/CDiBQNjdKBebyo0ybFTACWrHkV6+/l8JcEl9/O6fFt066W+UgqRTtmTyN93T42dIzm4Qbo6mpmLkwx/fbB/j66hEuvbKHfV8ISc0tJUbmMoj9LikvYMqs4oqbJtgiRlugpeQt0MbtQUiEbaEcAy3AaEtCXDbqDkQCd9Vk5IqmdKaBcWMN3WqjoijxfLo+bsrCamYoWG2eHN3g3x8bJ38lR7rWRHR9dCx3NMywqSd0HPxSis7+gHen5hg3ICvXOJZb5mJmNtFAws5Jb7ZuvGPDrD9QhAfqfHTyHPemruOKkLU4SzX2WAyLLPgF9rplxqwq42YNA0U5zvBs6wC/P3cP1veyjDy9il5cSYir3wvhTqFUQrwKLDciZ3SxEICmqy2UEghFMk6W2QsRqCTMEEaYHY3oGNRDF2Az5qdQnPP34MzbuKuJPvqmXnMb7H4TCMNAeB7NCYufGrlAVppIJPvtVaxMQOykkmfYbfSJrL/pbD35iUSTvf6A4PH0RfIyiemHxJzu7MG+6qLa7btgs0rsFQJlCYShUVoghCZKK2JXILxkPa+EOa6Gw1RVi+80j7D8nSn2Xq2i2523tdHenqfbW/Q6l6E5afHo3stMGz5WjzjbKuZMMMTvVR7kaxeO4r2Y4sAzbczTV1BhhLBMdKpEZ0xw79gi02aIxMUQklhrlsM8Vh1kN0DtRHxMGoRpEwRIXyAiA6HAbApyVxX5y22M+VVUvdHTbKokNtg1EX6MNiBj+pxI3SA/U6M1XsSbS8GGAdx+bOctQ8hNAbfOpWlM2zx06CLTpsQRFjFdilY7ea5I7UwScmuhgJ1kf8PRHNUjcHJ0hRm7DMBCWORCd4IXq3s4tzhGXLUp7qlx3+giR9PLSKG41hnm6aW9qJfzTD7fQd9YShQLcJO4bqcs91ZonRxvlUZLsKwYSXI0VEBLOai4J1c0DIQ0ECJC9xQCstVJCmIqDmudDIE2MIRAoejqmO/WD5G+AUalmYQV+nNlxwskJDrlEOQE+5zVXoJHYhOhlESGd1G58iYhK+E67H/0OofMJq5wiLVmLZY8vb6f4VfiJOm267aCcGy6Iy7NvYoD06vsSVfZ8D3mM3nC2MC1IkqpNp3Y4mx3ko3Q4/devZ99T/mIpfW3XXJ9G55uTxKS9giHM7THBSezi3gy8QFCrViJLZ5pHeTrl46Q/X6K0VNtrLnlhHBdB4p5GofytI91+amRF8hLG4kg1gpfK75eOUrueozYqL9WqL4NEFIgHJvOkIkyQcQgQ4HRBW9Fk5vrYC5UUI0mOow2s9mCngcjIUpB0WwxajSYLWxwpVBCOzscReufMAwD4diERY/GrODnx79HStgYQmIgqEUpzJbAqHWIw20m3X4pbV9yZ5nolEOYMQkLMbYR01YOl/xxzrYmeH55msalAoUzArOrqR4e4pljNp0pC1MoLlWHqc0VGb2gsC+v9uRaO+ChmQaRK7DNCEVfbwm+SjxdLQGzR75xnMy5MEIbErvcwWo4tEJ78+dCragoeG55hsxKjGj2TkOwO2TXi1PLSGOgN2VMZeWhKg52Pbg7crE+Xo9wDQNGhvi/zP5XSoaDRNLWPi/601y6NM7xF5aJ7obawjTRpTy1fRbjJ5b57OQLzFhlFsIiK8U8G5FHJ7Y3f+TVxiTPX51l9Ks27vnrxP1k9dvAW2aMfmhB59I0Zl3aBwKOpxY2X+9qxUJc4mxjnHjDwWxrorSJWcgiMh7RSJbqIY+1J0P+2gPf5YOpZUySo5tCczWyOfXMYQ6fKRNvVLc3m9nLZqtSls6IIPISKZbsgF0HbzXGXG+ga/XXOeImyRVlmyhb48mAGMGI22ROALtxmhMyeQbHJsjb+CMxjznLGCLRFTe04lRlmvSihvXq9hOYkDd7Dki5KTbXUmB0JPONAk/pg6y2s1xfKuHMuQxfUuQvt4hdE2U5rI27dMcsbCOiE1gYbYFTi9DN5mulYdshZRMCpECnHPyCpOR1iHWyObkCSmYTxw1RVgotBKLvpfbGTQchRAplQSnVZtRoAskcvxoWqVYyDFXD3e1v0JNhiWqD3NUhlqM8ijKhjvlK/R6ylw2sxdrOnBDvAMK2KT8+xsNOGxObiJjlGH79xuOUTpmopZW7Zpc/nqV+UPG/2fMij7pXGDNC9lsV2sqkqlKU4wxrUZbL3VHmqkN4p1KUXlgjXl2/oxzObXm6mCZxzqU1IZmeLpOVHQwEMZquBleEHM6scml6mGqnSGvaRsQjdEdjstN1PjB9mj9TOMVJu0FOJto8haamuvy9y3+e2T8MYHFle48b/WOxbdOZzuKXNHEqOXaKGGQETsWHaiPpXXDr7iUkwrIICjbRpM+UVSHWEikUhg+iT9K7ASGI0hJ7rM2wcVPbeDHMc+X0JAfOJZV9O+J1aQVKookhipDNNqnFJtm5IitqjPVgDHddML6kSC91sFabiHoLw3NhdoyDB5f5yMgZmrFLEJu8nM0hFNt+oulDGAbEChloHCNir7WORJKVJg+61zk6usLZ4RxxzsZw3aQUuXdCEI6DP5qmMxVzb36BkhGiEIRAJc6gY0GYNrFdF9EvStmF0msdx6hqjfS5Vb5VOcyfzZ6jrTX/+WuPc+CZZhIXf6eRbjqN+bMrm3Krhgr4XON+rj81w/4/nCfq7q5yYROWiV80UfkIS8RYQmEJQRZNyYwY1nUKMiniucwotWaK0pKCpdU7Tpq/ddLtS8Asg8iDnNPloj/OPfYFHCHJSsF+s8ZQ/nkeP36R8uEMBaPNkNHEFRGeiBgyNHlpY5LqxXEVTeXzb6r34//LCXKnLhDXm9s+eZMERIrKEQv35AZhZNCpu9CwsFoao+lD+MO198IwkOkUanyI2j6LsdEKFjG2iCn7adBs8cx2yOXtJSkEFirj0Zw0eGJ2bnMSx1rxj699kvGnwTp7nfhHbVhvx5NUPVIRGoGRkEwUIWsthl92KV40sCtdZK2NaHeTPgpRhIqTQ339AHx27AwnnAVa2qaRd3kpswfd13JvbVW5qe++wxhpr0hA9vZQA40lTCxhM220+Zmx5/mfHimyEA8zNLQHb7GDeaOM9lw6+4osP2bz6APn+Ez+BfLSQGlNF03W6JAbatEZLuDl0ohuN7EzCHbe8+1740HI1VqJa5HF11vHmPxOjHHxBnGne/erEbdAmCYMF/iVo7+OSVKZ+rxf4l899QEOfLNLvLCLqoXXGNYrC9ZAJFgJc8xHBSxRwQJCkkIjT0aMm1WGrSYqlmSudxNJ4x2O8VsnXcNAuC7KkhgdOH1lkpLT4ri7wJTRJCsFWSlIy4hhYx3fWt+8C8gV4AqJJ21MjM3CiYiY+Vjyv5x6gmPfv0HcaGx7WKGf+NEjJVrTioeGV8lbXU6tTlFfHUKGJLIhpRFSJOQCiYfrODAxyvoDedrva/K39j5DwWhTjT2aoYNTU4iuj9rpia7VpnY18uBYeolYKwwhKasOl34wzcHLTVTjR2xYb0VK9oY29BMlIiG0KEI3mljnOlgqRne6ydG272n14r9YJv5UyJhZo2S08bSPJWKIJIa/ZaPaUnSzLdKrXvzTCDXNwHnNSxnp8GHvBodO/AaXD49wLRhmPczy0sYUpmyTF01+euQMH/TOM21KJAZ+T/YWapMwMpA5QZRPYbXTN7P3O026vTHSnkvR7WCg+XcX3sXketAr0nkLG/+dJChvx1TLRh6c5dJfHGafmaz5tgr4au0ExVcMnEsLRHfLK9cKwgjvRpvCy1l+03yM7+w5wMniEh8unGbEqFOSXUIkDZViPcwQtU3MZhu1DXPzrcd0ewvWbIfkrlvI0OE74hB73CqHU8scdxYoyAALjSHAEjcbRnS1ZtY0X0O4ALHWXA1LFL7nJK3StvtD6MUihW3jj6YZPbrG4cwqlog5a41Rp7fb9RbNpj603/Yxm6E9k6d2BJ6YneNeZx5XxHS1xcXFUWbWk36g7LQ+ExISMW6SpkJjAF9tz5CdkxjrdaLbSaC9ncXXK6MkTrwtRLQphL9VMtWvcY+LWdLFDqNmAwtFW1usBDlkw8DwX3tMSwoUtglKIboBTlUxv1Ri+UCOw7rRK+cU5KXLEStkv7lEnFok1Jp2KfnRsnIYkj6lnuY1JKalFWuxzaudPcSxJHJB95JwbJVL7SSERLgOcTHNnvQqllC06y5axJubzF1Hr8ERR/Zx9SeK/J1Pfx5PJgmpdRXwpSvHGbsSoTaqO2fvW2iUroMAc7VG6YyN4dusDU/yxelRbpwo8LMT3yM2aygtWY2yzHeKyLqJCLeHn94y6Wqt0d0uxnqDbKRwyw6Gb/PbxkMMlZq8e/wKe911SkYTV4aE2mQtymKJmAP2KrNm44fe09cRc8EopbNJg/Cdie1JsC3CnMFEuk7ebBMqEz8yMdsCuxYi6q1eo5KtpGGgcxka0xbMtnkwdw1XRIRaMh8O4Z5O4a5UYTd6l/a87siziR2QIrEz1opvVo+SWYrRjeabezqv5+W+LeJVr5UjbxXC39oW0fOoHcnynj0vMW7UcYViTqU5XxvFXZMYrS2hECF7dfHbQLw9yZjudHBXO6QuZPmje49z3H6KvIxxRaJvdYSFI2CrKlyhceIOvoaqUj2ZmEFZZZgLRnl+Y5ag4pKtaoxulDS570vGdhJ9RyDt0dzj8UThAiUZs2eiQpgfxjLfwlLeaS9XCIxCgeC+fdz4oMN7PvoyP5k5CyQJ38UoRXQhi7u4kaz3N7Ozj9u194faOL5+mErHCt1o4lw3GK6nCfMO9RWbl9NTfGi4QE52CbTBepRjuZXD2ZCIzvaobN56eCGO0X6AqDUxOj5G2WK0lsXsZmlNpPjcVAlyIam0j2koolgShgZD+RY/P/ssj7tVnC2/LtYqybrXZ3Culbf/qNEv7ZSJ1C1MSaRIpDarkcdGLU1uTeOutBNd7hbiFFIgUi7BVJ7GXjg+ucyUtUFXm8QIvrhyD0NnI+RadefkTlsfxUpuPAjzNkFekZVJ8qGjA85ujJEq94+Xb9Jr4Ye+9jZ7L2wpS07+/3WajvcKUfTkMMtPaP5vpR8wbIQoYM4f49rKEKVFjWh2blYdarVJvNuhedVxEvIwV2uUznr8p+ce5sATqzyYusqk4ZMWEkfcrKfv5xhqqst6bHE5HKGhXGItiZFsRGlONyc4uzxG5opJZilE1tqbjV12tBpty60ROpehMWXwiHuNrDT51NTL/O7IR0m7zo9+nx32hI1slu4jB7jxAYsHnzzPL45/ZTPhG2vFC929FC6AXKu+8Xp/TVwf3lZsv6cnv3nDxy2FK331TRAiNmoYzTZGJYU2hllvmYTawBIRgTaoxSnWm2mcMuBvT4L/rXu6SiPCEK0UutvTPFbrDFcL5CcKBAWLIGsTek7ScjAGx4TybJq50RFU9jxby8wUmrXY5rkbs+zbuL79E6LfUQhASmJH0I0tGrHLldYQLLpkb8QJcW6Nh/XKhRkdonLUgUMtHi1eJSs71JVLV1uce3mGoxc3kjaUO5R930RfU5jxaI+ayPEOU9YGAG0ds7qR5UDNT2ROt4M72Si2CuFv9Z6EQNoWcmyE5XcV+My7vs999jppIVlTmlONGeS8S2YhSHSuWxffdt4aoVVyvVK1Rvacw6RZ4h97H+XxA1f4cOkMx50FRqRPtt/5TEuaOuSr7Rmea+7jlY1J2qGFZ4WkrYBISVaaGcL1FMUFhbPWRbQ6qCBMCjF2EMIwEn20YxMNZWjOaCaNJDH4kfQZ/u3ExxhOp3YtXvu6Npomeu8ki++1uP+JC/wfJr/MrHlzU/N1xG/PP0z+ShdVb7y+nVtvc+jh7fRVlraVrOHe+2zKQJXe/JroNwZSutfPJEJZArMQMGzWGZJtDDSxlnTaNrmKQne3J1F5W8p+3W/u0K+vFhLh+1jrFSzLJmMmmtZEFyvR6RQb9SILTxaIb+mkoFDMRyXU+Uzipe0UVNIKT0aaajfFjW6Ri+UR0guC1HLntf1H+4qFoRKV+4fYeCTkUwfOcjI1jyViqnGalzvTjD4LLK/1+r7uQmcp2yYczVLfK3ho9jqz5gbgEGhNvJJCtBpvrs98owl+JxOo/7O3EK4wLeRQifr9YxifXucXhr9NSdqExCxGGV5Y3kPmGjjLLXS7k8RDd0Tilng4qtVBLqxQqLewmlM8/67jnL5/nPftucSD6avstdfJCZ8YeLF7gP/vmQ/RvZbFXZNoCWtFRVyKsFIh0lBJQxcBIuy1WOzNdbTaGdnY1jaqjoM/5JA9vIEjkhzJpBnQ2ROj8t6bvMnOQzgOG/cWyD5Y5ufHnmG/GWAJd/P1pThg7bsT7J9fTE6Hb/Q+coun27u9Wau3XmLfvxFis80kPVlnrw/H5imq58xgmmCZxENZKkct3n/gDPe7NyjJCEsoPBmgQondiLeNp94a6fbvs4pjRP9qEq2AOCGsnjGbO1S/cint4Y1lmHKrWK9JoCm6OuKiP0bpjN45T6F3mZxud8gsBFyvZjClor6aYWRDI9vBpt1a9XoKFPJsvHsP5U93+Lljp3hv5gKlTXG8xW+++ihHnllOpG27QbiGgcikqe91iY62+dTwS0yayTj7GjJXerGm28UOkIMwLWQhT+veKW58IuYLJ36dGTOFRNBUId9oHqNzrsCeuRC5UUcFO1xBpTU6DIijENFu4z3bZe/qBOs3inzu5MM8tX8/948sMuHWmGsN88zZA0x9ySC90EZLgV9yaEwbNGILPR3h2BFBIaQ95pK9amGkXIjizQPc7d4g8JYfo39isy3aIwZ/49BTOMLEEBJHSNKTDaKci2kYd+1CSpFJU98neWJ0ngNWGU9Ym9WmETG/Xn2MPV9vo9crPxyK2XLDQxLXv8kHtx226ZXM63QKlfXQjoGWAoRA2RKzumWtmJLYMQjzFhuHLSY/eY3//djXmDUFCTVGZI0uKIHVjN48Dn0beGuk2z9O0ptTWxfslkl2U27Vi40qTWvC4n25c7i9SdJHJY55pTFFZmGHY6I6uUTQXmkSlYu0Ml2EG9MdsujsyZIKQmS1AWGAKORZ+fAU9/3VV/js8HPMmhukpSLWcC3K8Wvz7+HgP4mIF5bQ4S7UiwuBcBzCmWFqh+Ch2evc4yzgChOFYi1OkVmMk2PP3UQ/yZPL0X5kL9c+LfiX7/91Dlt2r62j4lyY5jdffZTRH2i8i+s3L/jcjeNwr6dCXNlAdn1Ga6Okl0conxjm63sLCDfGueow+2yI9+wliCJENov083SLOZSlGS020FrQsS0iD8K8hbXhJF5U33u63RDPW4SQAiyLeChL/QB8JH1+sxrRwiCOJWI31BNvZmPaI0xrMkZCTEkKMrl9YS4M+U+/9T5mz51Pmtts7a8BP5RfuLVD2W3DNFGFNO09Hq1Rg8ZeCIci0iNtWmUP2Uh6rihHQy5kbLTCZ6dO8xcKzzNl3Lw5AiKWgjzWmoV9fW3b8k63URzx1j/UzW5kuQz1vZK9VgUwNnvs+jpiTTkstApYzWBHkxBaaUQcIxpt8meG6U6avOvAFeozLhceGUVfH8OujeOXFPc+epl/tuefccTye565pK01r4Q5/vG1P0X71yYpnj/zowsQtgsiucupO2ITlFRyYkAR6qSj/dVwGLPTk3DdDWy55JPRIaoPj7H0sZD/82N/yGPOBlbv5pAN1eGXrn2W4tdcCj9YRS2vvu1mIXcErVGdLsZqmbRlIuICqRUTMMhf7WK/Oo9uttCxQhoGIsyAAG1qMnbATHqDF+NJWmYGP2/guiZGsyfV2imPvZebkEIQ5B2CyRBvS27E1xGddQ+j1d751pJvZmatztCr43x+70mGrBYfzJyhZHRZjtP8j1c+w+zvr6NqjdeE8l6jy95KvHe6EUcRyjZoDxk0p4F97c1Ta1JFq4kRuCIiK0MKEvLSxhE3r+uKdMhKbPOl68cZekmjVta2zUHY/m4tfW2s4xCP5Okc9MmKCEkSX1Fo2jqkGhdohxbprIPlOski3KFOTVpriCK8NcVy08GZiPh48VV+fqKJfEjhipAps8qsGZKXLhIXhaapfE75Jf7x1Y+x9qU97Hl2qbdT787klrYFQwXawwai0GXcqeGKhHQbOuK3lh8ltdJJEgH9wo7dsG1ri8feBX8bDw6z8l7N+49d4H73OlIIfB0Sa80ftGZZ+K97mfr+OnpptXdx4l3aKLRCdbrI5XXSfoi7kgaZ6JxVu30z1GWaKM8myAl0NsKSMa3YJooNlKWJrd7thX6QJGp2Ujam1c0GTIYi5OYlAaeCLIVXzUSnvTO//a2ZGIQUzrcIshl+de39/O7MAwxnWlxeGGHi8xb5G2e3EO4WdQEkjQT6r93p/JUCTDPx/AWEOU0mFXAstcgRq0xBSiwhN6+MkrhIxA/ddNPVEb+18W7850pMPL+cJNu3CdtHultb/7kOIpehO5ZifLyCIrktwBCCro6oKPhBZy/LC0UOBWFyXXP/+vBtkgwBrw2LBAHeko97JsWrxQkOeascsFcZMRpkZUhJ0iNcQURMTQV8vb2Hf3T+o8TfGGLqG9XdrW0XAuE6qFRvswoM5rsl5lJ5hmSby+E4L52f4VirltyLBW+qS9xOuzYJN51C5DIE41n8gkDbydjUlctK3CXUES90p/l//c5n2fetatJX421cWb3t6CkbqDcwghCEQPdvrNAq6aZnWSjLAA0EEj82yZo+Ra9DfSwNFyyk32t605ck7eQzxTH2WovU+RJff2w/fzp9lZZW/J9O/xWGTnfRlerd28h69hnrdYbOmNh1h87lEiuyxMQNReGF5ZvJsz5HWOZmMnfbCLdnB4AIY5y6xilL/HGLS90xpq0ylmiSFQJLCBLq/WE5ZVN1+ZXqSX7/K+9i9ptt9OLKtp7K7px0t5KtZSJSLiKXJZwsUpu1mEq1mY9ySOoAtLXB6WCSb60fInXNxqzWNyt6tpVwt0JpCCOslTojL5usMcyvrr+XfXuOcKKwxLRbYdZe57C1SoBkLhjld1Ye5sWX9jN0SjL6Yg0xd4O409ldslBJss9bi+met/l8eD9/NHwYw1A0Njwmv2YkbTDjLZnZncTr6X1lkowAkA2Dl9cmsOXD5MwuV1pD/ODqNDNPR8jlcnKa2fped5N4+6XM3e7No61hJDZZvax3rLAbGnvdZD5fQGnBRjuFrJk49aTiTUfRzWrGHYSOIuRKhcnvpvhHxk/zy4+uU1nOM/ZNA+fi/F3vu6DDCF1vYF+HYtUjn7IQkUJWW+j1yk0ylEkHuKQTnNz2sIxWGnwfY6NF+oaJlik2yPAb8aM8PbafT0+8xAF7lXGjTl76uILNu/skcCMy+eerH+Jb37iXPV8LsM4vJGO7jbgz0t2qq+tdVClsG+25hFmLKAORklwORqkqj3rssh7lOFWf4eyVSUav6V412JbO+zsRXohjlO8jV9dJByH2RpHW5RTrE3v48vAUYUEhCgGjw3UaHZf2UobcOYN9Z3zcuXXU6vruT2qtE5srNdKXDcxOhs51kyiVdOfK1xW5FxZQtfrmwt9x+zZ1uUkPBh2EiDBCBgqrBU5FUlko8K1O0iRcLaTIXxKkLq8kXfZj9VpyuovEq/uJrziRfemtfSOUQvsBZrlFxjXR0qbZyXC95GF0BcPnwFtsJ9WIQXjT0905Yzc7jFmnY2aro1SuDrF/IcA9v0C8Xn7bvV23zcQ4RjVbiCBEVOsYUiQn117ope8YaCV7fQnpjXu/onGb5kE/fFTZwPIDChtpUqs5WnMuyyMz/E9792CMdBkt1RnzGhTtDo4RESqDjSDFi9enSb2YYvaZNtbFRVS1tu25hzv3dLVKBlIkGWKCANHxMZshds1koZbnO94hGqHDeidDueXRXM6QO2+Sv9xOjpvRDnbe14mMRweKOAgQrQ7GWpn8RY9CxkPl00R5hyBn0SmNUmgpJhe6WPPrqFqd+C4eh3UQoCpVZKdLaj1FyrET7yCK0Z0OcbW2+7b1fpeOYwiCpM2jH2O1FE7FAEzitQx2S5C9pshdacH6RnLD7+uR024Tr95ytUwcJzIl+OHsedxARBFu18fayJG54RLmTKxmhDtXRnR8VLN1k1B2YcPTQYCq1hCdLsMrFVS9QXQ3EpKvB5WsMX1r/49b17WOe2rTW5X724T+OMUxtDqwXsa+YeO4LiKbJhrN0Rlz6RbHuJIZ52IKlJm0eDVbMHvex700j1qvJI7WDoztnZHupkA+Rqs42W27PrLTxe50KYkJ5icLfOtGFqsmsZoCu6qZXo7xrtWQN1bRrfbueWmADoNE7tVqwRpJBRVJF7SUvKlBjrY+393CFp0pjcZrvv5OgFaJxloGEXYtQihIlQUy0jgbEc5KExZ6muZbF9+ddDy7Y8N7G7EWJNUO/NCY6khAECKaLcR6Bde2cQ2JbrZu3hYBu7vp9aRvOop2NaH7lqFvQ6e8k7b3xmnzf30/WT9ra3BF4BkGXr8LnpQ352KvZWm0w5/p9qoXeoOu2m1Up4Oxts7+l7zk4r9euV1y5EjCCfE7YdK8ZuO4u6a8Id4J43Qr+klK30deXyW17pISIpGvRclNq6rT7SkVXsf+d8IzvZkNfWLuOxPvNJJ7J9ny44QthKzvUkRm5y746gvS6/Ud+xUD3GVo/Sfj872V4O5WLPrW08GAeH8sIfTggxtggAEG2DXIH/0tAwwwwAADbBcGpDvAAAMMsIsYkO4AAwwwwC5iQLoDDDDAALuIAekOMMAAA+wiBqQ7wAADDLCLGJDuAAMMMMAuYkC6AwwwwAC7iAHpDjDAAAPsIgakO8AAAwywixiQ7gADDDDALmJAugMMMMAAu4gB6Q4wwAAD7CIGpDvAAAMMsIsYkO4AAwwwwC5iQLoDDDDAALuIAekOMMAAA+wiBqQ7wAADDLCLGJDuAAMMMMAuYkC6AwwwwAC7iDe9Dfgj8rPvqFsrv6p+V7zRawNb3z7eyNYfFzthYOud4I+DrT8udsJOXsH+TsbdukL7jzO2Xg/+eleW34rB+A/wJxR/ckj31oW/lXjfjDAGeGsQEiHFzX9bJiLlIlIpdDqFCKPkNaXAD9CdDqrVQUfhYMwHeGdgl3jgTwbp9gZTmBbCdZKvKZUMrFIAaK0hjtFxPCCB24UQCCkQpgmWhXBsRDZDOFFg44hHe0KgJVgtSK0p0kshzlIDubKOqtXRUXS3n2CAP2kQ4qZzYNsIL5XMX8d+rWPQ6aLDCHTCE9vBDXdGuj3DgZtezhZoddNAIXvfKwX0vp4QnNo5kpMGwjAQroMcKqJyHnHaQcQK6UeIMIYwQnSTASYI0UGAjqIB+d4GhGEkEzflIrIZVCFDd9Rj46BN9YGQqekysRasVXJ05l1Cz6ags7gdH9HpDsb6dvF64ZrXvH5zTW6uwZ1cZ28F75QQkxDJfHUcZC6LGi3S2ZOhMWUSZgTaABmC1dK4Gwp3PcRabyNrTXSzhWq3b5Lw27T/7ZFu33DbRjgOwrHBssAy0aaRfEsQojtdhBBgWeish/JstBRIP0LW2+hqPXmIINj2D0CYJiKVQuZzxCMFNo5kaY/KZFB9MDsa09eYHY1dj7DLHUSri2y2UfVGQsDv5KPv1p26N6F1rBICU/Hu2UBvrG0Lkc0QjRdo7UlRnzVonAh44tgFHstfYSkocMqd5mwwQVBxiF2JNuSPJpA/yRC3hGtsG2FbYJqJ4xIG6FglnpmUCEOCaSIsC/onuihGd310t5v8vdtzeitXmCYYBmiFDkJ0GO3efO2tF2lbiLQH4yM09+epHDFpHg6Z3bvETLZCoEzqgct6O81iOYux5OAtumRv5Mlcb2MsllH1BrrTedvOwu2RrhAI00KmXEQuixrK0Z7M4BcMwrQgTAuUDUKB1dA4NY02IEwL2mOCoKhQJlh1QeZGgfzlYVJzZdTiMsr3t28ySAOZzcLkKM39ear7TerHIrLjNdotlziQ4BsIX2K2JHbNwS3bOLUs3nKAPW+jyhvQUXffC+sTm2Eki8+QN4/wmTTRaJ44ZSIChVltw2p5d4/sQoKU4DiojEd7IsXGYYPwnhafOnSGnyi8SMFos2gVWQsynBMTmC1wKj5U6+jgHbax3boJ3CVvLPmse6e03lrzR9P4JYvIFaDBaivMtsLwk6OvsiWRJ/GzkiAnQCTORXo5JnWjhbG0jqrWtnetvckzCNtGeh4MFVDFDEHOJrYlVjvC3OhgVBrJXO10dna+9ogfw0Dkc6jpUSons6w/qDh6z1X+3MSzPOTOU5IxDS1oK5PlOMflYJQXGzO8sLKHpbki+XMZhi2JdQ3iIEg2Pn37G8ZbJ11pIG0LWSwQzY5SPZSmehQeev85Hs5f44i7yJBsIYWiGnssRkXWoix5o8MJ5wazZpusTLzghop53h/nl699kBuf38Oez2n04vK2ebwy7cHkKOuPlFh7MuTw7HX+7MhF8mab9TCLIRSN2GWpm2O1k6UbWURKUm2nWLmSZfyZCXIvW4jlNeJm620N7NvGVpI1jMSL9DxENo3KpogzDkHOwi8abByRjL5riQP5dS7Xhll8eZyJp0vknp0nXlndeeLVGmGI3mZsEuVd6jMJ4f7c8ef4idyLjBkBBqB0jUCZmEs2hUsBxpVlVLV2d+O5fXKTN8NkSJGcHKTseWF658Ngt9gDIFMuspAnnhxi9Z4M5YcUJ09e42dGXuGos4QnfRrK5TvNI1xsjVIPXVwjZNar8FD6KofsFVwRcy4Y41/feIK5788w8d00mVeW0MuraN/fsWcQto3M5dATQzQO5lm/1yA81GF2bA0DuLZWRFwuUjhfJH+pjTm3RLxW3lmPV0iEbaPHSqw9mKXyeMAnT77Knx96hpO2jydsLGFQ0iFdHZGVZUaMBkedRR7JjfHM6AG+PXQQhMf4egqxYSR89TZwW56uSHuooQLN6RTrD2r+wge/zV8uPktJmljC2Py+UDcIdY2uVrhC4kkLEw+jN6EyQvGB1Bre3i/xd5/8GVqXRvHqDVRdb8vxR7gurX15agchXeiQtbtM2htYImbaKtPVFpUogycDxpwGUmgm7CquCKkd8viN/Y/QGRpl9Hs2xpUF4kZj5xbcLZ6VMK3kKJnNwHCR1v48i+8xGLp3jSfGz3F/+jqH7GVGpE9WCjxhYQhBvEezcjTgP37sIX71W+/n6L/MEJ+9uONEoeMYoTU4NtWDKer3+/zcsVN8Jn+KcSPGEyYxmq62+O7VfQy9ovEurBFXawmZ3Q30icFxEF4KXAedckBKlGcT28lclkGMbAeIZjuJ5zVbO0dWfdN6CUk5NkLjnlHmPw6ffOgUP116jiNWnby0cYQJGER0OWQ9SzVv0lYWrojIy5CSYeAJG4nJQavMYwd/m98ZvZdfLnyYaWOCzNNd4nJlR0hOZjJE9x3g6kdTPPqR0/y9sd/iqNXC6/FDjKZ9OGblXRYv+1P85sK7WPjcQab+nU9cq+/ofBWmSZR2CPKCkdE6R7xl0iJEaQ0CfB2yEvtUlUklziGFoiA7PORe5YC9woPZ6/wT8UGGX8xgLPQ2yB31dFWMqjeRwyXaIwZ777nBx7KvMCztZNFrjULR1TENpVlTDvPhEONmlSNWh7x0QatN4nWFybjZYN9QhbVSDm+7YntCoLtdnI0As+VhGTFFu8MVf4Si2WJRF1j281xsjrLWTqO1YCa3Qcls8e7MRSYNn888+CJPH9vHP/jep5j44jEKT10lXi/vjFfW97QMIyGBbIZ4cojlR7LU393hz598hg9kz7DXrJGXArc3eSXJuEskEoEUmknT4a8XT7H/o6v8PeuzHP+He4iuzW+/zT/0DIK4mKY1JZia2OCwu4SBJtCaWEe8EhT5ZwsfwnwlQ/ZqC12t95IRP2KB9T2/7fAye+9lDJXw751h9SGH5jGfwzMrHMitkjW7ABgohq0mMYJQmbSVzVxrmOfnp0k9k2HP5xaIrt3YfsLqe922jRwqUXtgjJWf8vl7D3yF93sXGTMknkghERhCEmtFrDXLscMr3WkADtirjBnhpteWPI9kwjD4iewrnLpvhlOLx0lfG0HU62h/e59BptPUPnmC5p+r8W/u/TXuswMcYSFJAaBIOAIJY4Q85M7TnbT5R48Nw5eGoVbfVns2oZPTimq3sZarOFWPtdUc38gephKlGbYaLPhFvnDtBNELRVIrmtgR1A/HHL/nOn9h8mkOW6scd2/wyL5rLI4eJGOZ0Hl75tyWp6vjGNHu4tQV1Y4LQE0lLnZVSS6Gw/zO2qN89/QhsmctrKamMyb4yZ96ir9eeoZhaWNhEOqYigr4Xucwpy/u4eDVLtoPbsoy7gS9xSlihdmBZstloZ3nerPIWitNx7fprqdw1gzMtiB24LmZAgcfXCMrQkrSZkQIRtJXcR//L/y31p9BG/softdClSs3g//w+vbeLjlohVYSQZyQuu+jTEmQh5mxyqYHHmhJQym6IiH+UIMrwBI3iRigrTXlOIPRkuC/vePP7dmvQUpizybMaXJOl1CblFWKtVhSVR7/Zf1BLn95P5NPd7GuraE6nbf+Wd/JnNgSqhGmiZwY4+qfmyL93jV+es9pHvCuMmNukJchhkjGFKClTZROflYKTTXn8nx+P79mP05zbgxvpf8M25z8lQJhW6hihoUPa/7mPd/lXak5ShIMREJYSCId0lYhT/sl/v65T7M+XwCpOXhwmb8980d8OFV9zcnTEJIxQ/KB4jm+O3UYf9TDnXOSuOQ2PYOwbJofO4n/cxX+0bHf54jVwRI2CoWvYxoq4qnuFGtRDgBLxLSVzXcqB5FXUwi/vC12vCG0SiSh7Q7Z6yF+3uHl8gFeMg7gVCS5OcX4XAdrMXFSVDaNt1rgjJ7hS24Td+gFFJJuZCEUN8NRbwO3l0jTCt1uk14KmbtR4IUDewnceRrK5d8tv4cXnjvEyPNw5EwDY70GWqOG8/zH0ns4+PEVPujN4QpBVcHTnf386tx7GPqeiX19mbjrb1/SKo4xGj5ONU2j4nDVLuF3LMSyi7MhGF7SuDWFDDWdIQO/ZJI3OwwZund0g4yUPObO8+79l3n2+AnSN4axlUL7frJBqN6HGPf1eyqR59zucUNrICFewgg6XazFCqlVj7VGhoudMQAKRhuASpThfHuc5W6Wfekyj2TmuN9Z3FyYDWUw1xkhtSJRjeadj+VbgDBNOmM2UUYhhaYSpznnT3KxM8Yzq/soPz3Onqc62FfXksxv/FYJV9++umHr9/dOEcI0EfkcG49O4L1nnb+473s85F5lzOiQlgILSVcrYqBvWV6GuAIMIUiLJg13kT2FKvV0NsnAv82j5ZvCMMCy8UfTPHLPZd6bPk9JRhhIYpJTQ6gVa7Hg+929/IMv/ySlVwRTVYWfM7ikJnhheB8fTL3wQ29tYTBi1rHyPn4hhWttr0RfHt7HjU/G/O19zzNrbgCwGPl8vzvNPzr/UfxnhsjOK/ycpDWjYW+L6eEq1Y6LXRMQ7XDCWutkfQYhTsUne8PEbghkAJnFgNRcBdY3UJ3OpkpEWaC9mJLVAiDQBnMbJSbWum87ngu3Tboa3fWxyx3S1wr89vzDnMrP8kp5gubTI8w+F5A6v4JaKye7qGFgxDGGnyfQJl0t6Go4E4zxhbV72Xh1mH3nOuh686ZHsw0lujoIkPUW6aUs3TmbdjeDtypJrWjsVozVVMhAEackQVZgjrW4L3WdjLA2wx9oRVoK9nllnhqN6Yw7mNUcwg+QjVZCHFEEPV1vQnnx23PMesSLFolGuFancNnn+oUcfxAcJ+MdIFaSVstFrTu4qxKrDT+YUZy6Z5q/MfMt7ncWyQpFV9tUQw+jy+4lqWyLzpBEeyGRklxojVMLXV5dniA+l2XihRD7RgXdaCQ66F2K5fbDNiKVguEijWnJA0PLTFtlPBmigHIsqCiXtTjH1WCYUBubyd9ps40FeAKysstoqkF1p8qJ+moFz6V2wOZ92SVKsostRBITV4quhjXl8FxnP79y4b2MPC/IXe0mmvMpDxFIJK+/dhSKtnKII4mMdLLBb5fpls3ykyXec+wsD6auYgjNmTDNb5XfxVe+dT+TTynGL6wh2l3ikTzlTpZyJkW30MQ2Y1oeYBo/8vdsC+IY0Qmx6zEyklhNhbvQhEoV1Wz1nseku69E5aTg8L5ljnjLGEKxFuboXChgLi8QR9HbPoXd/hSKY2S9TX4ux3JhnBvpUVKLBuPP+aTOLaO2xD6l4xDuHaN07xqH7WVcoWkog6vBMGdXx8heAXOllniPavt2OR3H6GYLd6FJycjirRp4qwFW1UcbEm0IwrxFbdaidjLkUwfPctwu4wjvh94rY3SR6ZBuwcXLOhimTLL23QC9ddC3wf5+tlzRxbmyxtizk9SXMgRuBiOAQl2TqijctTZGN6JyMsvVkSFujA9xwFrDkgFdbdKKbcy23habfiR6ygVlJh5mzXfx42FWalmii1mGX9F4c1V0vYHu+jdtEhJudWK3I7y0adeW458hUSkLZUM18JgPhwi1STX2ONeZ4Ln1WeZXi+iajbYVk9NlPj31Cu9Ln2OPmQTujJ4PbATAdm8afS2raaJdh25J4MoQhSDQSVChpSQV5TIfDnG2NUl9MUu2rRBKo2yDblFC0WfCriJfp3lgqBUvt6cRyy7uHXpqt9pujI+y8WjAh0tnGDOaVJXNH9bu5Usv3MuBLwbYL15BtduJ1LSYJcwKZMlnX65CoAxWSsO908Mu9USRgAYZaoxujOj6ySakFcJx0NPjLDxhMfvIDT498RJTVoVylOEb5SOMvKDR1dodzYHbJl0dK3SrTeZqC2WkiW2J3Yxx1troMEw8C8NAZjMERya59nGXf3b4P3DcbmAhaaGpRR7tWopCWSE6PlqpXvVMf7LcYeJE6+RDXqvgBSGeZSLqrcRTz2fwxzPU9lpUHwj48D1n+fmhpxkznJtebg9drTHQ2G5EkBPEnplUs3kOon8ciuMkzHCnBLf5vAodRqi1MrkXID1fIHYMhNLIToTsBoh2F2KFM5NGSI0jQyyhejZb1AMXu7GL+lLTQIYaupJGx6UpNJ1Vj6HLkLvchLVKIszvx9v7BR2WleiOSeYVYZh4+reO5dskY600wgC0RnZC3LLm9PUJ/MhECM1iPUdzPkd2zmDyWozVjPELJmv3j/GU22HWXicvF3CFoqUcVjtZrEa8WTq+7TAMtGUiI1jwCyykcnSNFqE2qCqPapz8ARBeTHPSIcikiB2oH4RDU6scsFcwbgnJhDrmWmTwhWsnKJwD+0al56ltwxwRku6hMR4/PMdxJxmr+cjjucoshVdNnLPXEtmlFIhigfX789TuD/jwgYu8K3eZlTDPD4am0Y5157b8KGiVzEGZOF56s1dI70TkOMjRYRaeLHDkyTn+4sTTTJkb///2/jzIsiQ77wN/7nd9+xJ7REZG5Fq5VNa+VzW6G+gF3QQIogHQSI0AEKQ4hDgj0SRRNJs/NJSNRiPMUMORGc0oUoRASAJBAI21gWajN/RSS9delZWV+xaZkbFvb393dZ8/7n2RkbXmEhHdIN9nllaVkRHx/Pp1P378nO98h7mowjfW7+et1w9y3xtLiWbIPYRC79zT1Qp8H3O5TsEy8Cs2YU7iD2WxLQPhRWjXpLYvx/ynNf/z536DT2Y6SFwUClf5hNqAQGJ6evOBiRXCEmmc9K6e5dZhBgG63kAEIQAqCBCugx4s0ZqwqT0c8DMPv80vVl/ioBVj4tzy8xExXppMybo+7XIev2IiYo3YtI8a3YsVbZeXppO4sOp2YWkF2WpjuO7N65fSEIbg2PgFyeTQBkeceaqyl2AzWe9mydR3kY4lJYYP0pMEvkkcS9wlk/JlD2NhHdVJFmlSfmmCbSVCOPkMyrURcYxseehmG5pNiNUt83lH66EXB9Yq8XbjpCJLrtWonM8RZTJcnZvEbAmyi5qp2ZDM9XVYT7wXt1omcoaZv7/I2nCejjYxREhHOyw0iozUgtuPSd8FhFJkljUvzO/HkRF7nfWESaGNhBJmtDmam6d2OMNLYj9aCSw34tDoCl8YPs240STWJnHqmSs0C3GXf1t7jviVCoNv1lHLq9sW4hGGwcZhm1+oXGDU8JGAQtIKbKxWslFkLgODVVafHqX7M3X+s0Mv80z2IiXpc8WqMlw9jsrndiZO/l4oBZFK9rGZFJSoYgYjjMAyWXtimIG/eoO/P/Ed9lvrLMY5vlE7zkuvHWHfn4XoGwsJrfUecFcRKh0rRBghYkVr3KC5H0RsIyIHZWnCsYDPHX+HXx/+NgctB0skp1gvO+zKEJyYIJ+UB4swRHS9pIzY88HfhqSa1qggRGiNECIZc8alO5Zj7QR84vgFfqHyKgetmLx4v5cb6piasrFEzESxwak9RdZDm+IVQX4OZMNOqsK63STMsFWxbDs8CK1Rvo+IFSIIk89yEi4pQqAqBVp7BX9t5BzTZp2ckJt82Ho7Q3nN297r+sdAmaBtTRxL9JpDflZjz9WTAogwSrzbbAYqJcKRIt0RB68qCXMCs6MpXgvJzJgIFUO7k+y9zeTkXc5nygwhilDtDs7lZUb8AZRjYDZ9jI02utneLJFFCqSQyGiIjB1SNjpkRYSFJtAGzUaGiVoTpdX2z22aXRddn+I1n6XvD/D7Bx8nM9jBtUPGiw2eql7lwcw1isLnqcxlvjAwRqgNLBEzZDQYNlrkpCIkTsYIrMQRv1l7kj/85tNMv9BFXr1B3PW2yctN2BbdUcGoVccVAglUjRb7iuu8tXeE8oExYsdg8SmHT/7sm/yfB7/HlBnjCBOFxNd1BjIdOrkyphQ7u2S1RkcRRquD1XQJDJugYBAU8pjDWTYOW0x96Qr/14lvc8DaoKksfm/tSb79/Qc59Psd5Omr21LNd3dpgZ4ylxA0P93hyPgSnxi4yIPudaatGkNSkJXWB8ZIDQH7nBWOTi9w/sm9BIVBcksVMksesWviXFtDra7DPdQ2b0InV/VevbdwXdpjJqUja3yxeor9ZgdXOO/7scTgRoTaZtJe44tDp9iTrfHm6B6WBwfxLrtUHEn2QgjN1q0n9HbGpHRaLOKlfxcyqb03DPzBDP7hLj+eP0NVSiwhCXXMclTE23AR3eaHpFS2HyqXwRsUiFKAkBqjI3AaCtHx0LBZ7BFPjbD2YJ6N45rM3iaD+TZCaBZrRdR3CpidAlatkRwsWsPdOj23HIDpGohjVBxj9oSNooh4a2io5xn7Pn5J8HB5mWlrhYJUhBrmwwrGvAPrN7Y1/9Abr1Y6ibM2mjiXIsbCIbxLNlrmaE4YnD5RZOSBJp8rhOyzFBYGB60bKK2J0TSVJkTQVpK20nS0wblgjH964SfQXx3gwOtN5MxCkizaTssmJWhoxC6B1hSkwaTh80sjLyF/UvPGiUn+wfFv8vnceUYMG0dYGOmeC3VMQXYZclrMhmr75/WDoHSiIGZKuoMG3WFBUNIEIzF/94lv81PFkwzJiJqS/B/rT/Ptrz7K4S+voS7NoLaJYnfnMV2lkwokpYhyJk9MXebnh17nhL3IoGGQFe4tHMGt6OgATwuO2Av8V3u/zuzYAPM/XqEeZ3h1dYobKxUyb0ww8moV68Jcwou9F8ObVppsekzplUprgRRqs6gDERPpmFDHhFoxHwtmowHW4jxlo80jmRmeylyhVs3wyuQBfn3oWbSRxd4oYnS6iChCB2rL1XabDW8cI8IQbAukgyrl8QYsHt13kawMaWpFrBQryuGd1iTWqokIo8To9uJ7O5SgEIZB+0CR9r6Iw+MrGFJxPh6huZghc2AYazFDNJhn9YEMGw9FHDx4g5+oznLAXWbASChtV8aG+JfBj1G+YmKbJtowktcGd88IgVtCDToG3e0iguDWcNAt86JACoKyoGx1sESyXpra5HRrgspZoOvd/PbtfNdaJWGLdgfd7mC2OxSKebRt4ayXiV2XUxNj1AazGFYTR5hYGCihCHVMk4iaspkJB3mpeYivnH6Aqd82GD27iO5cQnc6qO24QfaQFnPoIKB0SXG6u4dH3VlyQpOVBg/aa0yN/zsYh0lT4mwp7NiKQEuutytYK03UTnu6kKgcmiZRxqA9JghOdHhyeobnyhf5mfx5DCFoK/he5xBf+daT7PtGG3VpZltFue5c8CbVBMC2CMomK908loiwREKaAojfM3O+jrgRh7ztj+Mpi2GzybhZ55BVx0qd4dnya3xt+EH+jXyMtXaOkXoV0Wqju97dx3l66ls9Kclul/JFj5lzVf544BEYgmlrFQONp00uB2Oc6kxytT3AYrvIaK7Bk5WrPJm9zJDRYsQIGS++DSfgX819Bn/QJbeSQXS6aYXVDnEN0wSAME1UtUDrQJHVhwS/VD3Hepylo0La2uY7zWP8+YVjVC4mKm8YxhYGww5pBwhJZ8jg8KFZfm78TXLS52xlnNfGp5h5popWOcYHaux1Fvjp8g32OStkpY8tEi9nQHbISZ/D40usjE+RPZ8mVJRKkm2mmby/e60A01vlRD94HoRhoPeM0R1V7M+skBMRdWXwWneal25MM7KQeMg7Zhm0Qkdq07ERvo/IuFhZB6vl0Oy4zEcVPF3DEvFmFWhHx9SVxUw4yPfrR/jauWOM/jub7Nk51Mpa8sw7oRWd3iIGvjfLVw49w9lPj/L5odNMWmsYQuEpm1AbrKgNDpktCtIkg71ZUefrkCtRlUtXRjnWnN15KqFIdTXSOVAODJRbPFm6yuOZqxhCYCFoaoM/W36A6mmwri4R3U715B3gDo1uonIljERZyuwqLr67h9/NPsFs6TL77WUGjDZu6iHUlMNX6w/xwvIBFt8cRcRgHGnyC4fe4meLb1JIdRkkEkt4fLpwhstTg7w2cj9xwcU0jJTVcIdP9SGEetX1sK8uM/Gdcd5uHOUH+w+QLXUxDUUYGXTXMhgtA7smQcHc/ir2kZgT7g1cs0VWGrhac9Sdw9nfoH2mSOZ6BrGRkPC13iHKS4/DmXHxB7M09hrEe7qE2uScP4anLd5s7OX504epvmpRPd1Ctzqg9K2aqjsAYZl0RgSfq17nUXeGggw54czxueIp1vfmiXXi2RgpuyLQBqE2MdC4ImFdZKXPoNtiISvQKbNB9ZgOqUD6PXm7H/X35EOSOXYc/NEcxx+e4VF3hqyImY+zvNWawp8p4CzWksP1o37XNmBTm3oLS8LwwW/brEQFVmJBqG9qQKwrg7m4xCVvlEvNQXTNxuwqCMKbRTz3Ehv/IGxh28Qrq0z/UZG101P8+sh+/AoEJUWcVchAorIxTx+/xH8z8VWmzGizKnUpjvhB6xDFd6007LHTgkKJQh+mkdyAFYSxJEbgaZNAByDgTW8vZ07t5cBVL5F5/aDFdw/6wLdvdHterpTJ4JXCroUMvmnySuM4L1SPog0NGkQoMLoSZ0OQv6Fw6or98w2UY7K8VOSPzQd44Ngs4+YCAJJEuGXCaHEgu8qLJUWUMTA/QBj99sa6pXUMNz1d4hhVb5A7ZzLRqdI9ZxPkiigbTAmFLphdhdUOiTKSdWFzfniI5YECsZ2UKVpCMmHWODC4xrVyCe2aybzsIIQUCX8wl8GrmvhVTbXSJkZwtj3JW6sTrLw7zMhJTflsI9H89LyP/8XbMTbTJCxo9jprjBhBqiSnmdQd2mZrk2NaVw6LcYlQJ0vOlQEFGeCKGKUlncjG8FNvtLd4lUbvdL/qLbrEcniQ2c/Y/NrYS0yl/NyayvLuxhi56xK50SLaaanPdC0J00xi4aUi/kAWvyww3ZB6lOVMMEpBdmmqDKNmjabKMBsMsBbm8GMTbWm8ikEh40L9ZrHPjkAnsWhx6Trl5QKlfBaddYhzNrFrgojxqiav5qapjzn42kehqKmIl70p/uTqCYbf9ZNE5k5CiM1krirlCIoGcVZjmzGhNvC0ha9hKbb487X7KZ03sJYaqA+SHt16409DVYmNub3b5B0Y3bTrQ8/ARDFmrUv5oia/YBHbSaWL4cVIP8boBMhaC71R37zWmBmXIWuKaxNlvjF+nCP2EnmhMQVYwiArIGv4aFOjjVtDA7c/zt7hIDYnpOftJWWAAXqjhh2EWAsZdMZGOSbKNsAQiEAhvYio5OCsG9QbOVajIr6GAiCRVGXAfYUlLlT2Eecs5E4KcfcodbaFyliEOUkwoDg6sIivLE6ujbP29jBjryoK59YTPd3Nbgy7wF6QAm3pNGQgcNMyagRYaYwcqWhqRaCTtZOTPmXZ2bwRNZTLmaVRqhsK4Qeb12EAlNw5T32Lbq0sl1h/eoxPffodnnTnqRoOTRWwGJa5sVJh4nKEbn6I17MtY5E32x0ZEuG66EqR7mSJ2gGL5sGIg0MbGEJxJRiiEzucrE9wpLDEoNWiGbuE2qDidFga7NCaLFIeKWGs7LCmAWzy4lXXQ6waCENiGAamZYLjYOwfw7QUMr3tNFXEmaDCv5l/Ev1yGffyDaJ7pGF9JLYIqVMq0B3P0xqXhEMBY7kGWRkQapOmsni+c5jXrkwxMRshmu3Nn9+anBW2jcy4YNkQRUm8N4puO4RzZ9KOW41L2kfIXIuxFsIkI+x56LSnkNaK6H3tegTWQoPCVZeX56f52WqZKbN+y2csB0XMtsT07oEPmWqiagko+b6NouOEayxMAwyZGE0piC0zLR0VaEMk1T6hpBm7BJvXZEFOCvY664QFjTJTcv92X9+2QBgGwrKIcxZ+WZAZbfFI8TobUY75hQpjJzWFd1fQiyuJx7CV47rDGWEdK2QgaKpMkpQkOZgUKlFAE5owHY4tYnLSpyA9CjKpiFqJM5z3x4jPF8jOJ0kkHd4skBByB+hZcFMMRwpkLoOaHGbpcyG/Nfp1Rowk6aOAC94oYtYld3kV1d5+kZveWIRlIlwnKVl2bILJAdoTDs09ktahkMMHF3hq8Colo8tSWORic4i3r04yP1TixMACVbtN0fQ4VFjBNmJebTm0z2conTPRQnL3VJDbRMov1ypGRzeFhiTgjTg8MXmOsgyIScIh324e59zpSQ681EUtr+7s7SE90GQuS3dvmY3DFs3DEdN7V3ikPMuEtYFEUVMZvrVyFOuqi7Oe0OqS4p0kz9DzlEU+R1wpgATZ9BJZgG43KQC6Df2Vu68k39rELYzQnW5Szrs1O7xFtlCkJwRaY7U0G/UMy1EBSIxucuWQvLY6RW5WYK20kwq3u91wUiKUQqPTktA4uab3hMEzyTUjqmQI8yZRRuKVJE5DYTctgoIkKAucXEDB6Mn+9RKFGikU0hdIP05OuZ2EEOisS3fIpj2p+InJKxxyFvlBeAj7hk3xUhNW1pKXviUZIaT4kCz99kEHAZklwdn2OCvZixRkjCMSwUmE2lTryomIUaOOJWKyIsIQmqayeMef5E/nHmDglMaaXUs0a7d46VrJnduQ6WYUlTIrjxT4R098hT2mgyUMYq1YiwWvrk5RugwsLKPDnVFtEz1Zz3IJVc7jD2ZYesKhsyfCGWzz7J5ZnitfZNzaYD3Oc6U7xMW1IYwlh1Urz0Yhwx53gzG7Rk767HNW6OyzuTq1n5K9C5Ve78WmVygR5RKLTxn8w8HXqaaRjpmowrduHKZySmJfWiDq3qVG4u2g1zTVdVCDFdaPOjQe8nn04DU+M3CWY+4cReHjaZPrUZUrqwM4G4nTpQu5m7dY1yEaKuKNZAjyEr8ksNqQn3dxFk3EOsnNOvh4j/3OpB21TsyOaaIzDt5YHiNUWGkJJ6nIjRAapJksaNdBuG5iODIO0VCB7qCkXGmnJ4wkIqauAp7vHmLurTGmT3dhee3uPV2lIb3K9MIMm2pTqXC1qhboTuRpTZh0RgX+gEJXfMw5h8ySQZSDzoGAZydmud+dpSAVEjNNAFi8uHGQyjmw5zZSsvkOVinZNuFwgY1DBvsemuVXBl+gJH1+ABSugbHeSmNPN8cgpNiVVkM6CBg66fGNJ4/wcP4a1dwlhgwDEwOJgSkMBg2NrxVVmWyuEKgpk3PBGH8w/wirL4wx/foCamU1eY5bmAo7OK9SIHJZvH2DBF+o86XCBRyRA5KKxJP+BNfOjnL4reamGMqOjMM0EcUC4USV+oEM6/fDg09fYMBpc39unkPOIkNGE0jKvNuxTWshj6khkwkYdZvc5y4waa2REyEN7bBaKXB2aF8SsvhhQBrIUoH6o2P8nb/yLZ5xV8gKi7oKONmZonapyv4zHmonhct74SPTRJRLNA+VqJ8I+fkH3+Sny2+x32yRE5IQTVOFnPYzhIGJcKG518EpDSJDTZg3aExJ2ic8Du+Zw9aS1aUBuJ4htm1K5HGiOLF/t4HbfyPphhaGROSzdKcrLD5pY3Rh4LRJZtZCuk5i6U0TnXWJSxm64xlaYwZaCIxA4w0KvBNdfnHqFPvNOh0t6CjNn7cP82t/8dNMvhBjz6xu8Xju8IXoNLCtt4RCpEjCDGnPKV0u0J4usHHIpLU/orKnzkMDywCs7ckx3ygyWarz5MAMT+Quc8DaICckvo5oasV3O/fz+vePsP/dJnptI4k/7mBogcEKS09kmfrcDP+v6T9iyoyJ0QxaTdyNJMzzXj2AXevtpjX2qWvknr+P3xp8ksl9azzpNsgL42anEBxcEePpiLqKuRIW+V7rCL9z4VFKf5xj30tzxLNzHzzmHe4kwOgQKw85/J3D36YkbSChPC7FPv/kwmcZfUkgzl5F7dRtJtWgiIfKrDyUJf7sBv/Jgdf5fP5dqkaIRaKZbCAI0cAKh7LLvDm+B8eK+MToZT5fOsWkWScn0zUQQzN2cdbEzlLcPuKZjHwO/4Fp/F/Z4FfKb1OSiQzAijL5xuJRSucF9tXlnfVySUNzuSzB1AC1gwbP3n+ev1I6yTGrTUkmYSRfR3giYCksMVhpsjhl4w0ljVPlkMdT0xf5peEXGTDaDBkB67HFb5Se40/bDxEuWMSuTBgRYXRb1MY7OwZ7fFfXpjNk0p2IEJmI1mEDVBGzXkGGEJYUA/s2+KnJNzjoLuGKkG/Xj3G5MUjO8jlUWGGfs8z5cIAYwddrJ/jWHz3Oge91sC8tJM3zgrsMLYiUIqWSTXJL/6t0U6uMhV8y8IY15YkGDw/PcTi3xKDZpKlc1Ihk0GwwYW0wZLSRQE0pmtrka80H+RevfpJ93wmR15ZuJq12AkIgKxXmfmqU6Z+5wv8w9UcctEzMNMZ0wp3ltyoyuUIaRhKDEnrX2+DEa+uM/+FlblgH+B9+6ov84/1/yv12k4K0MTFQaOoq4FyY409rD/OViycw384z+mZA9swN4sXl3W0A2muwWinTOFym9aDHM9mLSBL+6Jrq8t8vfhbxxwNUvnuFqNPZwbFIhGMTlhy8IXh4aJGncxfZY0YUpLPZGQQSHYWsFTJSfoMvPvgONZWhanQoywiLJN/gac3lcIg/vvAAUy8kbYZ2GzKTIXzwAFd+weCPjv8fVKSLRNDREV9v3c/SC+NMvVInXtn5WC4yCWsGBYvW4ZBDuWWkUHha4+oQSxiEqc7KlLPKw4NzXLZ9DKk4WlzkycJlTtgLDBkaVxjEGMxGFs3IRbYN7IbG6CasBX2bz3Lndw8h0bZJUBSYxYDHp65RtTvcl11kyGxQNjpUjRZD0qe8WZ6qOGCtMFMe4K3OFFc7A5xtPMZ6N8vcfJXiOzYTr3ewFutJbLh3Ot/NC0kD+jcLI27Gc3WkUO0OxlqT3LyDV3WolQpcyQ6QMUJCxyDWklbsYIkYW8RpgkgyG1b5xvIxrnx/mn0vBmTOLqLad+mN3w6EwCgUqP34ASpfmOcf7/0KBy0TR9yM0Q0ZbdrjgnC0jNX1UO1OEuYhjeXvVkNNrYmWV5n4bUX45h7+wWO/SuvRLs8euEzV7vDiwn7qpwbILgjy8zF7F32spUVY3SButXcsVvphEIaBzOdQI1Wakwb7JhLqYqhjQmL+tHWA7/+7h5l+o0a8Udv5wyBOBFi0gE5k44rkHfYMbu/GYJDQKx3DoipjQtopn1nS1or1WHIuGOF/W3gG+/U8Rn09KfgQkkSlaYebawqBdBw4uJf55zL8zadeYNyMkFgoNFdDyW+ef4rq2RhjcS0pw94F6CjG8GLyFx1eOzTFmF3DZpbQ6JKTyRhiLXBFyIjdYNEq4BoRRdNDaclSnGddRbSVw5VgmD9YeIS570wy8W5E7koDud5MEsC3SXu7484RxDHCC7GbmnjN4cZAGfLg5EOmrVVGjC5lKcluOaUlEZaImQkG+frcUZauV3GWDZw1wfh8THahgz1fQ28k7aG3hQmwhbyN2EI7CgJ0rYF73aRqVhHK4lowyspEnpFiE8eIkEJzzaziGiFKS643K8zODZC5YjP6Rkjm4gpqfSMtDdyBq5s0kBkXdWiSxec0/+n4KSbNEDPtNQXJFXg2KuPUwGj7t8a/pUBogcbY8UTaJlRMvLaOddJnz40K4ctlZoaPcNmRVOY8hlaWEy0Gz0uYLqn4+2575b24PgNlmgcLNPcrHs2vU4uzLMkNXvMn+O9f+yJTL4eIuRXUNop9fxi072OttsjNuVyrVZgdG2BIzmOJIKHgfcirMxD4KNpKMx9nea27ny/PPsL6y6NMvO4ha82dC4u8F71u4UODbBwr4x3v8mzhAo6QKDQbyuPf1p7FfL5E8fx6Ej7caa2FXouedpvMpRXGGyUW16b5J4emUXs8RgfrjOUamFIx2ywzPzsAscDaMIjyijdH9lDMeUSxpNHKoDYcnGWD8kXFxFwXa6EBGw2U56Xr+fZob3fG0+0Rt9tdCjMe3cEsC51R5ibKLHULnK+MciJ3gyP2wmYrFICZyObLG8/wh2ceInMqw9iMIrMSYNU9ZKObbMZ2G9X10hDADig49UIMKlEako0W7g2LqsrjbFh0h0rMDhbRJihLI2JAgAwFdk0wtKjJLoXkzi6h12voINz+RbOlp5cs5KlP5zEHPQrSo600eREl/Sm0ZlUF/OuFz+GuK0S01eDKVG5Cs+M0ofdCxcStNqLrYayuk3fspHS5eTPR9+G6B7sAaaTtwQuEIyXaowaqGLDm53ixfZgrVo1/dflZiq+7uDfWYYc7/wLJnAQBYmWd6vk8sxNV/nnuU/zynpd4wJmjlHZ97jFnQjRtpakrK8m2B8PMeIOcrY1yYWaUwhmbsZM+zqVlVL1xK5toBxNWvdipGijiVQSGGXPeG2dAtsnKkN+rPcXv/eAJ9p0JkGsN1G4ctqluifZ8WFzGqNUZXq9SPVMkqNgExWGuu0k7LNPTTDWSW6tQIWHewCvnCZ0CRqAZqSvc9Qh7tYFcTRsvdL3kVn6HFX93lkhLDZauN7Avx4xGw4RFm9aYy/LwHr4yOMEfjT7E2EiNqcIGVbuDr0xenp8ieLfEyGlN4UoTc6WRlKmGwWajR51qqO6EgtOmviqAFslCFAK5Cq7n4yy5RAUHb8hBWQJlgBFoZKyRvsZqhpgND9nsJrXsQXiT1rTdpZW9sEiskLEmbNmc645xyFkkpEGsBSsqywutE5z8wSGmbviIdhqSUQpUKtTckx/c8jt3BSpGa4VuhdDeUg212wb2w2AY4NhEOZMoQ1Lu3SzRjmxKdpfGmQEmLoXIVgcVxztb3g2bhkE1W9iXlxl3xlhu7+HXnvhJHhm/wd7MOhWrjZVW7tXjDFc7A1ypD7K8USBadbEaEmdNMDanKFxtYs6u3DS4uxkrh2RfhRDWXL62dJzr5SqOjPi9Nx9j9HmJe3UV3QvLwfYLRL1vQEm4UXlxws1vdzHml8g6DlnTvFVEv0cWME3IuBQzDlrKRMa23UW3Wqh299ZCjrsY+x13A+4ZG9VqIVbXcQyJWypCqUBczeENubSHR3i3PEpsg4ygdDWmcG4DsbSGbreJ36v01GuzfQ8dNj964DcnZrMljtKIIIB2B2GaWLaFtZRPypylQPhh4nUHYcIXDkJUFKHeU3ywE2PVUYhut8lea1M8XeJPMye4MjnIkNNiyStwda2KP1Ng77dCnEtLSSucIHj/waXf89/dwubn7rKnfRsQQqBNA2WJpP4+kGzUc7S6DotmgcIMZObb6J0qhPggaI0OI+KVVdw3PSbnBlmfr/D2nmO8ltNoK4kwCA0yEFhNcNdS72slSKo/m15iGJot4l5yd7vbxH/U+Hti8c0OuaUc/mWTK94El8vDaF8y8n2DyuvLsLR6a8Xkbq5NrdFhkOQQOp0Pdka21hakLYR0HKN6obBdlXbsJai2fikM0CHgebC0DEKQMQwyaQECkHjGYZQKP3/IgDd/7y5QW95bOeP7m8ZerNc2q9mU1u+pq+79/C5cjbVGdbsYl2eZCCIaN8pcm9jPjAHZZcXElS7WlavEq2s7rwXw7xvSDaVMAQqMrkRpG88yEYFk70yEXKklcbrwA1oH7RRUjA4U8do6rK1TPm9QMc3ECeCms9DD1lj4ZiDphxW2gWT8vo9YXSd3wcBqFfEvW4RZC3ctJvvaxUTQ/kdpvX7QOHQqJRpF6B2SRd1e5rROwg9EEfpu4mE/ZI9Mp57Bj8SS0Jq42YSzbfKXTAppPL13ZYx+VBbuXyKIXtvxOMaphWSXDISWKBPshiS3qMidXUqMw3Zqz94utt7Iomj3ujlvE3QUEdcbiHYX64aFbRjo9HYY75bXvZ3YoXf/QypX+UuM3epYCje9cj/+0TgI/pJD+z5xEECjhXFtjuorqRepdRJKEjKJ1/UPtLvH1it8Hx8IcbuE3j766KOPPu4dO61W2kcfffTRxxb0jW4fffTRxy6ib3T76KOPPnYRfaPbRx999LGL6BvdPvroo49dRN/o9tFHH33sIvpGt48++uhjF9E3un300Ucfu4i+0e2jjz762EX0jW4fffTRxy6ib3T76KOPPnYRfaPbRx999LGL6BvdPvroo49dRN/o9tFHH33sIvpGt48++uhjF9E3un300Ucfu4i+0e2jjz762EX0jW4fffTRxy6ib3T76KOPPnYRfaPbRx999LGL+MhuwJ+Vv/Aj1bXym+rL4sP+rT/Wu8eHjfUvyzihP9Z7wb8PY/3LMk7ot2Dvo48+/kOCEAjbRgiB1hriGK00Qgq00qDVze/9oE7pQnzw1+8A22d0RWrY+y3d/8NE//3vLvrzfdcQQiAyGYQUidENQnQYIaRCK3mr4X0vtL4591u/dgfYHqMrBIh0sHd7EvQeRKRhZq36C+rDIATCMMAwEOm86Vih4/iHN2/9d9XHjzqEQDoOwrZBClA6+dPbQ1s93Y9bz2JrOuzO9ty9GV1pJJtfpgYzHXSy+W9jEKmxFoaBsMzEiNgWQHL6dLvoKLqnId4xegcIJCdhb3J7L0mrO3s52zgmYZnIbBYGykRDRYKqjZYCGWqsZohR95DNNrrVRrc7qCDsH153inv0YnYc73VOPmzDb91bhgQpQSl0FN3+/txubHUWDOPm15VKrvoqHVO6tzb32b2OVQiEaSEMichkwDQRhkTH6R6O41u//6M+rzevW22DTkMTKv7wn9uCuze60kDa1i2nxuYLVZqPXQxSJJNvmgjXQeRzaMdGZx1QCtnsomsN4nrjth/mbp+jtzCFbYNhgG0hXAft2GCZaEMgYg1hhPAD8AO056HDKLme9LzMnRhnGoOSxSJ6bIDmwRK1AwbtyRhj0EcDsWci6y7uSpbMUoXCXETmeh1jYQXVau/o2D4QP2qG6nawZWP2DICOY+7Ui9mOcXzw17fsmdRg6TBCx7A5xi0GWdpWcoUu5tFZF20ZiI6PqDVQtfruOTOpoRW2jchmkn2ey6BsEySISCG6AUIpRBRDFKPDMLEhYYBqd9FhcE+fj5CI1FYJ1wHTTP8pQt/FvhBSIMzESdxEFKFDbmuf3Z3R3TKROE7ytd7EKH3L992yYLeedKaJyLiIXBZVyuEN5wiLJpErsDoKZy2DZRpI30d1u9u78HuGLJ+DSom4mico2oQFE2VC7Aj8oiTKQ2yDliBiMLtgNzXuuiK76GPWuoiOh2620K02yt/mDSqTG4AxNEj32BhLj9oYT27wKwdf4ZnsRQoyINSStraYiyq809nLC8sHmLk0TPndQUZec5CXbiSGdzs93vd5W1twLyGmHwZ6azmTQZZL6IyDDEJ0u4vudJIbVxTu6o3mli9tMbTCcRKj4TrQ9VCNZnow8D6DK8sl4slhGvtz+GWBMgSZNUXxUhYZhMSNxo4+h+jt8VwWqmXCkSLdUYfOoCQoC+LUbMgAnJrG7ILV0bhrIUY3QoYKWe8gV9aJ6/fmNGzepE0TbCuZq9S4J+83Sj3qj9kjvffTs33prRxARBEKDx18/D67S6Mrb4YUoigZdBjeGld8L3oeZepFimwWVS3QmcjTHjFpjwv8AYUMBe6aSWbJoGgKnGYb5fmkR/q9QwhkPo/YO876QxVWHoNDD87yc6Pv8HjmCqE2CLWJJSIsEWOgMUgm0dMml8Nhfnfxcc6+vI/89SyFGxHZ63nk/ApivXZvp/J7xiksE6Naof7UHua+GPNfPvlVfr5wmkEjg0RgiAyxVig0vr3EM+4cP1t6g1N79/DHxx/mYukAUxtlhO9vj9e2NfSyNazUg9I3r1rsYvjlbiENZC6LHKzSPTDIxn0OfgWsJpSuRWSvtTGWN1C1Oqrr7eyNa8sNUKeOS8+jEq6DyOXQpTzBUA6/YpGbaSE8Dzz/PY4NCNsmnhxm9jMFeKzOcLFFO7BZuF4hcosMrVeg2dz+d5M+g7QtRD4HwwM076uw8Kxg38Nz/Mdjb3LIWcQVIQChNqmpLCc7e3mjtpczc6Nk38iSWbFxGjGuKbG6PqLTQft3Mfe9g6gXGoyi5JYaK/B9dBDcfi4k/V2b7ySXvWnAAcIICSilP9YG3J3R1QodRqjbjXGmBkTmc4hSkXigQGciQ33apHEkYvrAPPcX1hm0W1zrVDm9NErtfBG7beE4NsIw7uoa8EGQmQzxif1c/EWHv/Psd/ml8uuMbRqx5OQKdYjqGQ1E+gdiHXPUvsHARItfe9jlem4EERvYDQenlkG02tvqFUnHwT8yzvLPd/n/PfqHfDqzQl5kMT7Aw7SEQVVKciKiLK9QHu/wf3+ySvByBXt1HREE93ZuSeP9ISHLQvtB4kGoGMIkvCTiOLmipxBCfPSBDLtunIVpYowMU3tmkqW/FvA/PvG7POIskhWCutK85k3y24tPcub1aUZf0pRenyeand9Zwws3PVcA0mt5uUQ0Wqa9J0N7xEDEGquZwZnh/fOpFVgm7T1ZDv3kZf7+xF8QYjAbDPDHmYe4Pj9FtZpHzBo7EmKQtoUcHKD18ASznxX83HOv8rerLzJlmlgiuY4rFLHWhPh0VINJc50pZ5ULS59HmWB6GrseIb0IDIkwTbTv392AhEwZCgFImeSJtoaPbucGuDVE4jiIYp54qEScs4gdA8OPMTe6SJJqs7i2I0ZXg44TQ/hhV8ktpx6Hp5n/dJXG0YjccJvp6ipPlxbY66xRNjqMmjXGjSauUHhVyexoiW/uu58/mHgYLSco/tnatniQwjSJHzzEwn8d8j+f+B2ec+vkZf6W74m1oqMDOunmsoTAEhIDQYwm1IoYwfxaiaFXJaWLHcyVRhJiCLbxGiokIpdl6XGXT0y/wzF7iaxwbzG4oY7ZUB7N1DNyBLhCkBUwatQZLzZoV4rYjgOt9t0PxUpDMYMVwvES7VGHoCiQIRiBJswKEGC1NVZbIUONsgXdqkFrjwAJVgNKMzGZRS8Jy3QTD01nHLRhIJbW0M3m7iT/pIHct5crf3OUn/3ZF/gvB39ARWYwRLIWKlIxklvg8enf5+XRKf758U8yNz7J+O94xCsrOzOmdE/dAsNIcgpZl9ZkhvVjBt5YhFkzcOoWbqGQHvSp8Uyz82rvGDf+Ssz/MvVHTJqSGE3HXiU77vPfHhqhvTdP4WKWeDu93fQGqQ/v5fpPlBj87By/eeBPeMT2yAgXAIUm1DEhyXOGWuFpWIkL/Pna/YQ3cgxejime2UA0O8n68INkX90NdHLj0nF6mG11Vj7MSdwaArVtZLGAGizhjeXxqgZhTlA/BNnDNe4bXKBkeSx5BU5d2sPw9yoMfvMq1D86dHPvlLEP82xtG2N4iI1n96B/eYX/fPoPOeQsYqQepIHGEjGuiKnKGEMIJAJXKwyrxrOFi1zeN8jZo4cofvVDiztuH9LAGBzgwt8z+af3/wFPuzUywrnlWzoq4GwI/3r1U1zvVLBlxKjbZNypUTI7AJzvjPKnbz7E2LcNym+vwNoGuuslsaH3ZkHvBVqB6xDmNWWrg5EuWEiMbUuHPN8d459f/xRN32E83+CJygxP5i4xarTwtEXR9mhrkhCQ0ne3waSBsWeM9pFhWntM2hMCbzKgPNiiGxncN7SMKRXLnQLz60XCroWdDTkxPs/nBs9wxJnHFSEXg1G+vPgYJy/sJTOTw25ClIXORAyFiMLJKmMvNDGuzqOarSRJuUPJP3NqD5f+1ghf+MlX+fsDL1GRt94efB1RUxFNZWGJiKnCBq9ODaPHBmB1ddfiu0IIRD5He1+JjSMG+niTEyMrrHWzLDNC6VIFsb6xedMUpoUcG2H+6RL/z0/8LgctB5ne0gwZcMRZYGpsjdrEBIWMm4QYtmmsMp8nfuAAs5/NsvcT1/kvpr7JCauDk+6xiJil2GcmyrMe52nELqtRkdOtcV6a3Yf7nQIH32xhzq0nib40XLnJhrpb9BKMekvI6z1j3wyJuA6iVKT58BgbB026Ixo17DM2UuNY5TojToO9zhpF2WXaWqVqeBhoasrmK+WH+d/9Zxl4KfOxQ9reirStD7BnjPnPj/HIL77D3x7+PpNmB1cIAq1pKklTW8QIJBojjYusK+goi+tRhXe7e7i6MYC7wt2fdFuHZpmsfH4/f+/Rb/GYs0hWZDY3WqhjluIu/2rjSb78+5+kMKMROvHgzlUF3oAmLsTIriR/XbL/pI97ZQldq6M9P2FtvLeaZRugbYsoqzGloq1NWjqkHQecDEb5Hy9/jtZXRynMxaiq5NTxAer3u5QmOuTcGSwRMei0WYr0+ykxd4hwrEz9gEXjoMKdbPD06DxPlK8CsN9eZj3Oc8kbYSY/AMB0do3n8hd40F6jajhIJNPmdQYmWvxZ9iFeHN5HfSOHtBT371ng/uI8f149yrJXZaQ7hIzV9if/UkjHYePJMQYfXeIXKq9SlTaQ3HAiYm5EPs939+Mpi6z0UUims2u8MdmhO1HAPWttX9z+oyASepO/f4i14xbB0Q6fnrrC48Wr+Mri6+4x6q/upTCTRabhHFnI0z46TPO5Dj+euYYlbt7iLAxsFBkzZN3kntfELUM1LcTIICsPZyk8vsL/aeJl7rPWsIS56dneiOD36k/y7YX7WFgtoVoWRlvirEoGLsQU312GpRXU1v0E23Pr6f18779bko7CMJD5HHrvKCuPlll9LOa+I3P8WPU6e+x1ykaHstFhyGhSlQGugBgINVjpr3FFTKgMzIYBzY+/UW57GXDP4K58YpSRn7vGPxj9FlOmxsAkRuPpmKU4z+VgmFAbTNprrKsOoTZ529vLle4QpzbGuTY/QOacy9hZ796NmTSQ5RLrn/P4fP5dqtLe9AB6Bvc3a4/x5d//JJNfb2I0PbRhoPI2QdnBLxuEWQOrrSleaSKvL930xrbGtLfbAzINlKsxUDSVjadjXuoc4p+9+WmGv+Yw/tYKwg9xJyr41Qzr+7IoLZFCkRUxBdNDqHsfk4gUygSVjxgvN9ifW+WQs0hZdijIAFvExI4kKwMUgkGzSUF2yUoDE4OImJqCmWCQdmxTzXXIOgEVt8snBi7ySGaGlZECz48OEJVdrBXr4wd1t89SKrLysOCXx88wZXYAG19HtHTIa/4A/9O1L+FHJodKKzxevMqkvUY25zOzZ4DT9x1l4kWXeKeNbpoDUfvHmX/OxXlsnZ/be44fK5xjwqjT0A7nc6O84UwhHBtCB2HZRAfHWXrM4m8ce5mqcestLtQx16Mqs7UymQ0N2+DI9MYqMy7BZIXaIwF/d/IdHnVnKcvUoSFmNY55sXuYr904xvq5AbJLEquhUyZQSHamlhjcVntHnJcPHHaam5CVMv5948x+xmHwsSV+deIUz+XOM2503mdg3dRY12LBtahCoA2aKsM1f5Bvzh2hcI3bOsy2tQxY2DZybITVp0fxf6bGfzv9Ffab4AgbhaKjAs4FFb7dPM6b65PUPZfRfJOi5VELMpydHYUVB3dFMrigKVz3cS4tE92L4UgXsB4d4OeOvcUeM8JJvdxYK3wdciks8oczDzL2ko85vw6GBMdGdgWWEBhekgAw6z5yfiW9/kQ7uziERAuBNjW+MpkNB1iMSvzOtceofM+l+vwsutlEOw7SL6BMqGQ8RqwaORFhCYUrt2djGbUO2eUc7ZrJWjvLXK5MLZNjQLYJtWQ5KrAaFajHGdqRw0aYJUZiiSuUZZc3vT382dqDvDE3SeCZuNmAfQPrPFq5zjF3jkmzwZDdRFkaodlWL+wWCIEeHSB7X43HsldxhSAkZj2Oeb47zf/nzOeJ3y7h7/eZLq5RNVtMmjUMNOuVC/zgyCH2lIqwC3QrY3iI+SdL5J9a4RenX+WZ7EVGjAADiGNBN06LiLIuwpDEw2WWnshReHKFny29gblla8daUVMRf157AO98idEZb9uSaMIwENkMtYMOR/dd47HsFaoyxhLJ5/tasRhnebc9wdp6HmddYtc0Tl3j1CKcNQ/RaKOCcNcMLkIiHAc5PEjr/hHmP2HwqU++w18beINj1ipVw8AVDrHWdHSIrzWrsUWApKMcXmof4ptLR6h1MrQ7DmHHwrlhMzoTQrxTlLEPeg7TwhgcYOOJMdZ+0uM3Hvi3PGDHOCK5vvk6Zj4y+W7zKF+7dpT21RJ2XbLhDoIAsysYvKzJrEY4Gx2MehdRa6I2avf8IqTj0NmT5ycKpylI+5b4nadjFqMSGysFBqIYXcwRlTLEWRNlJkURRqgwOhHGRvMWD3fHY3vpMFeCPJf8Ed5tjrN8eYAD5z10pwNKI2ybzkSWzt6Izw7OMm2tUpIxoQZHRCjz/dzPO4JWUG9SmMnTHcixkS/xrtSUrC7trIOB4nu1w1ypD9L2bbzAQmvIugFfLx0niA3OXx0jM2OTWdVYWUHzsEl2ZJHjmRtMGHVcoQm1gdkWGO0AevHxbZ5fYRjUj5Z4avwkk2YNgLqKORmM8q9nn8X8bonCqmL5sOJIbonD1jLjRnIAnHBnGZ9eRVXycGOHeMg92pjj0L5/jOazXf7B9Kt8JneWUQMsYeLpmBjBip+EDqKBPNqWrB116T7V5j/f/zz7zQgj3XeQxFOvRHm+cfEIQyc19pUV4nAbjG6PQphxaeyDTxYXKRtJ7iNOaZZtpVmJi6wHORAQZzVhXiAjMH2JsgwM07i10AB2bm8JkTAshgdpPDTK/I8JfvrHXuPvDXyfAUNTkkk4TKHwdURdaS6GA7zZmeZiZ5irjQGuzQ6Su2Bj1zXlLpi+xq4nHvvtsCy2TXtBFvO0H5xg8Qsh//SJL/OYHZDdEi/r6JjTwV6+v3SQzqUSpcsCd10hI42zESGDGGupgej6SbWXH6TxnXtgBPQykY5Dc8Jk0qwjcd73bZaIcQo+a8fy2A2H7rAkzIIRgl3TZFcVmUhjxAqU2p3TOP0M4UtWvTx5M2C+VcKqSUSsEK4LtiKcHGD5Mcnj91/gi6WTjBs+OSFpo8gaPlFGvp9Pe6dDabWxFusUrzsEJZM1t8DLcpqzziiRllyZGca9YWO2wfFBKFACrpkDuKuawxfbyE4LbZs09+dp7ZdMZdc5bC1TkiGeFsx2K2SXNLLWIvb9HZljYZqsPCx4onCVgoxRCNZji9fb+7h2YZTRJUV7TPLo9HV+PH+GKTPeXMOjRocj5WWuVQ9jfMzn3N3gbtKSZKXMwjMmf+P4y/x47hyjBjjCxBACT8eE2qAbWUSuoH4oS2dUED7W5G8deZUfz14iL28mc2Kt6KiQk92D2KezlE+vo9bW7/xQ+xCBHWFIVDFLOBCRMUI8beFpH0MrAq1ZVzbLURGlBbm8R2uPIMzb+BWBXzbJZSSlqIwRJbSunb5BCjMpHGkfG2b+xwRfeO4t/v7g95gyk/dsYqDQdFTIYgznw1G+tnGCv7h0H2ImQ/EqTM1HZOZqCD+pnBNhBJ0uqt1JqGkfg20xusK0iA5Pcu1n4B8+8Q0+k1klK93Nf1doagoueSOsNXOYbYnha0xfYdcjnGvr0OmmGctU9ScItsfbMQxE1qUzLrDF+19mVlgcsxf5laMv89XS/QSxwaAdkLMCFppFVm+UCS6biNjGXHHAspJ42G7EnbwQwxdIockZPmW3y42qojWZIa+H0LZk4ekMD3/qHL86+l2O2U3yqYcTElI2OkTuvTM/dBCCH+CsBWQXDcK8zVK3ympbYviC6jUoX/KxNjxkECUHk0oTeLUGuuslHMeBMlrmMQe6fLb4LuNmBAguhhneurGHyUs+ut5MN972ezqiVCRzX40D9vJmfM7TJrUwizY09QMGQ5+a5x9O/Dn3WRFZkWT/FRpLQNnqcDlrJDel7SrW2RxcwkeVxQL+oRHGn5rnc8VTDBkKR1gYQhBrjSJh/uzNbXDtvj2EwyHH98/xN8de5Rn3GuPmTcYCJHtvRWneae3BXdPIeps4CO6MGfJBpck9L9eyCAayiEgy0xngojOKspdxRUhDuVwJhnmnPclSt0DWDnEHI/ySiQbC0GRpKUtQyDNgSkzPS+K6t2G47gpCIHMZgkPjzD9r8ujjF/ilwRfYY1g44mYewVcB87Hg+c5B/mz5Ac6c3svQK5LiNQ97ZhXd7qRJOJmErIIgZTAFNxOAH4FtMbqyXOLSl7L83af/gi/lz5IR2Vv+XaFwhaZgeNhWRLMS01IGUdak0orR7Q66001J9HHy56PKie8AQgi0ZSJ9qCmbEZ3EOTepNEIwasDPF9/i54tvESNYj11iBLPhAC+PHeBbw/ex6paQUZXsRgPanbubqDsauERnHaJyxOOVazyXP89DuTIDTpvnywdZXM2giyF//aGX+OvlV9lvRmSlvXk1QicevNVWt5Zm3w20QrcTb7ciBHbTISgYuBsKsxvjrHqY15eT0EuvMGIr+RyQcYwQFZp7JY9NzvKQUyMrDNZVxHdax7DfyGPPL6G73Z050ISAwQoPjcwxZLRx0pBLQQYczC5x8NACDz1xg79dfZFJU+IIOy2YkSgdJ8R3obGaO6NZIAwDkc8R7h9l9jMO/8nYuwzILtZmYY7G1xFeug8+Ub7Avi+u8nB2hgmjzrgZUZIOZuqHx+kc+jpkJc5wqTGIjHj/Nf528F45w61iOqaJjBXOksGLpw5xenSUsWIDpQXzjSLNxQL2qoEMBFFWEw2G5Ac6HB1a4nB+mfXpHN8dPciyW2SsPYycW0Y19I6UXgvDgIlRrv6MwzPPnuY/GnqFScPHEjdvBqGOqauAP2s+zK+fehb3rSx73w3JXl6FtQ3iRgtkwofWtrVZfAHclsGFbTC6wjTpPDHNgUdm+UzhXaqGc0vMNNZJ9UlbSfKGx7MTV7lerjBbK9O4WiK3aJKxLHTcQpOSmLdZolB0fYbfCvlfV3+M/8vQdxgxPNy0OibUihBNQQosBE2tsERMViimrVXcYkjpQJevGCdYb1fJvesim1ZC4t9BMRRhmSw9XeLRoxf5ZP4cB60Gh6w6J5w5fnnoRQACbVCUHuNGgCVMZBoEjrWmqTRfXz+O1brHcIhOjKfqeshaA0tK8lESR7RnNyBMSsBVo3nzdpL+3M2HESAlqpihNR3xbOUSjpB4OuZMMMDvXn6E4df9hPMc7YyXC+BNFDiSe5uSjHFT/ugeM+Jn8u/yqf3nycqIgtBptlohMdIya4WvYckvYHQCkGJ7zwVpIHMZ1NQoC8/kcI9vcMydwxKKEIh1SKg1HQ0rcYY1laMgPb5QeAdLKMoywhE3jalCo1CEOuZapFmMyqy1s7gh6LSqkDtNpL2HbiWkSNXLBCJQDL0TEVw2kFGZdqeI1YwYr/sY9WV0s518n2kSTg0x/4kyl5+LeLw8w6DVZHkkzxsnMmRWylS6ASIIoau2t2JOCGSlwtxnBrj/8cv8x0M/4CGnRkUm1aihjgl1zLoK+KPmcf7Fq5+k+orFwKk21sxSotrX9VL7BNq2EVGUOEfxnXGJ79noymyW1pjJuNtOCfxqc/MnjIUkNnIqGKOjbPZlVsgZPkoLzlWyNCcciqU8rG/cSsHainvYhDqKUBs1sqfgnV97kJ/6/AlO3DfLw+VZBs0WMYJW7OIpC1+ZVMwOhlAcdecoSo+C7HI8c4P6ngx/cuwhwh9UMTfqiChKBY93JtMu8zlak3CksETVSDjOBgJLRpRlgzCdkkD3uMaKmAClNW2tmIvzzLXLmN1kfEIKtBbv2zy3Pbdp7broeJiAiGJ0vZGU/kZRonj1QbeTHqulWmHhiSL7D89x3Jkj1Iqm0syFFbpXijgLawnneRsobh+I1BHoKPsWiryBICcFHa1YjHMsAgaKQ1aXfHrl7OiQmajEqeVxxpoe8XaOMWXXiFKR9t48rQMRVSNmNhigGWcoGx1iBLU4x1JYoh5nyBseVaONZ5oUZFJ+GmqF0mGyBtJfXVPwfOcwL9UP0FwokPdSTQchuKcnSEVfkIkyn1nrkGv75CBRMmt3k3cZBMRRosTX02uxgJGMyVxugG9kj/FXRk/xePka18aqeNVBVD6DdOwkxLBdydS0Uq/xyf14z7T44vAppq3a5m0HUlulQ04Gg/zGpacpnbSpnulgXV9B1Ru3xJpFL0eSPpMQIgkz3KajePdGtxf4H6wS5gTz7RLngjFccQNXdJOFomzmo2GWoyJvNKcoW10GrSbDdgO/YLI6lKOVH4Io3kxQbfem00pDEKA2apReEWSWhlidmOar+X3EjkDZJIpHAkQEUQ78/R5PHJzhE5WLTNsrDBgtHs9f5ey+Udbun2Tkej7RMgjCewp9fCikgchmCcsx2VRJzNMRtkjKkg3AQhOSeGQx0NSKWEGIoKMsZoJBVlo5hqQAywbDuym52cOdxCa1Sp633UlEQ3pCR5C8OykQGKBvjf8J00QUCnSOj7HxQMxPVGdxRUhdaRbjLC83DpCblYhmBxXtbBLFbIW8uTHJbPEkrkiu7p5WLMUWb3hTvFA/xIXaEH9j8g2GjDNYMnm+jk68xdp8kfHGte0do9abIbAgL8kONxjMdnipfoBakGWlk2OjmcVvORBIhBszNbHKz0+8iSsiXKHxNczHNvNRhUac5FI8bdNRNi/X9vPm7B7yl01y811Eq4O6J9GjnkZvqn3t2InxDRPtDdFKQoXK9zdb4STzlRhe3W5jr7TJzdlcXRxkuVpkzK5zoLLKGWcwYezcTQjkw5CKGonxEea+EPOlg+9y2F4kKzRKa1rap60VoYamNjnZmcJ/vcrwpRBrqYFutzfDZLfYpl5CPQat9S1aIx+HuzO6PS3dXJZwtAQCbqyW+ZPsQ7yW2UegTJa9PK3QwY9NKk4HUyqG7BYF6SGFpmV1sI0YGYHoeKh4h8TBddqCIwhQq2tY3S6Va9mkpt1Ma9ut9CUrhcrarB/L8Ur3IOp+gTsQcsBeZtSs8czgFf63ByYYfjEPtTpCRtvv6PaI8dUCmCntRts0VUROqs32zQY3Wzn7Gjxt4GmDUBs0VFJi6fsWMlYIM5WiUzoZ710YDa10EkoASL0QrXWiKyrTCHlvr/SuoCmHU1dL1PdbFMZqVMwODZXEzC8Hw5xaGyM/Fyex3J3i56YwNzqcOzPJ1yoPUstfxCDxtF9uHOC7lw/BbAYtoTNhp8pyid5GW0kueKPkr5ioZmvbD1mtNQLQEnJukkQ6tTxObaGIs2SSWYZSI6mS7A6YXGMQf8wiKxJlq8U4y1frD/HG+l5WO1mUkoSxge9ZRDWb4kWT6tkQe24jybDfA11MSJEoBeZziIyLKuWIii5GJ0C2knnphQjfF2oSoGOFCGMMD+KuwVqYY9BqkTMCDI9E5CaK7j0PAZuFGxyYZP7HynzhgTd5pnCJqtFBAetKUVM2i3Fx8ybxp3P3k11IKGAiCJNVIARaSKAno9kThdebfda2fub283S3XIfUniHqB7NEGVAbDm/qvbwlNFHXRLSSX61zEd7EBg8PzjFm18hKn6bKsOgXWd4oUF7SSaeDrQpU2+05poZXByEibiF6iTApN+ONqFQTwrYZ8MfxqiUujg3xeDnHERFRFiEPZq8zfmiFqFTANIxbX8R2QUik49Ady4OlaMUO63EeA0VIgIXCFoqsADvNaMcImsqmoVw62qEWZ1kKiwRtO4mPpQkPrPjmIaTvINbbq1vXItmwaW+pZLzi5lXTSK+d6deEZaELWcJqFm9QMJZvY8mIjnYIlcGNYICVtQL7VsKboYWd4mdqBas1Bl8b5N9knuSrA8eJtaC5nsO9blOZ0dhNxcZ9BiWjgyPAEpI4raD8/vJBBs6EicTjdkNpRKwwQs1G26XRdgkWs5QvGOQWY9zVEKMTohwTLV1ahmLc2iAnEy28mXCIr10/RvNSGXtDImKQMeRbkFlT5G90sOY30Gsb956oNAxExoVykWAoT3fUwStLMus22RsSs51oX98s492iNpfGdVXeJSwIpBOjtMRTFvXQJbccJ4US3jZQBtObuCzkWT9aRP34Bj9Zfodpa52CiAg1rCmH2XCAs944lztDXKwNsXpymAEvXYcyXc+GkRQ9sCW0oFQSountA6XZbFn2Mbgzo9t7EMdBD1VZfahI/TAoS4ECVbMhEpjd5E/sauKBmIl8naPZBSasDRqxy8XuCD+Yn8Y8k6N0qYPqdHa22OA96kI6Ulv+euui0FGEsbiGUyvSCU0MNDkRUpIh0+Yajw1d553cg5jbeQXaHIBIXqpt0R4xcXKtxHONXbLCx0DjighbKNzeMwkIlaSh3ISEHuVZj3Jcbg1irlhIr5t8nxQ3aT58QLLrNpB0TU2vVFtuJaJncE0zMe6mCaaRdAJxLJRtoI0k+6+0JNAGgTZYDgvoDRuz2bopJL1T0BpVqzP46hpWp4pfqGBFUFmLyd6oIRsdVCHD+tEKo1YdV4iUCRJzypvk2oVRjp5ZJNopaccoxuoogoUcZltQnoXKBR97rYvwQ7Rj4Q+4tMcFRycXOWInMpRNpXmzNUXzQoWBdyCzFiIDhdBg1X2MlTq646Ha7W2hYQohEI5DVMrQ2J9h4yhERUXmhoEMM+RXt5RwvzeZahgwWKF+MEdzWjE6VKdodukom4trQ4xe7yRVlj39522gi+p8lsY+ya8eeolD1iplmayxjgalJU3lMu+XObk8Tm22TOUKiDhJOOqe8Hla2blpcLXavOndjUd+F55uUoHSnSyw9njM0OQG3cCi27VRkQQNcRmU1DiZkJ+cPs8ni+cZNWvUVJbL/gjfu3GQ6PUK4y94WBfnE97gDhvczZ5GPUFjAK1uTuRW2BZhXjBYaLPHXqNqhBSExBIhe5113na2NM/cgfEK16UzJpio1imZXVwZJn9EREGGFKTAScss0RExgo52WApLLAQlFrwSZ5dHyM0JpB/ePI23YY7fK0Si4ySWl3jTpIY9/aMUwg8xuhZmy2G9k6EZu4TaJNaCVT+PvWEgWz5qh0MLADoMUJevUVpaTTZTHCfVb0GAMgykGiDMlxk16pvslrbSvNaYovyuRC0s7eDgNFYzJn/dxF3VFK/52AsNhOejXQd/OMvacQvxZI1fGX+RKTPGEgY3lOCbs/dRPgeVs01krY1IvTLdaG5m3LerijIxNAplG7QmBOOPzlN2upzM7qG9apO96n4Ir1diVCvUHhpi+UnYe3yBZ4euMGg1mfMrtC+WMeeuEbe721aNKIRI5q6qeCRzlYJUWOnYHDRZGVKQyc2l0cziLhoYvsYI03J0cdOrBVKPN6WHpfmMXvv29C87k0hLmru5dEZMnjxxnoeLs9SjDLUoy7KXRwrNZGaDQ5klpu1VDllrFKRgLRac9vfwtRvHCF6pMvF8F+vsdeL12o6X026Kb6f97reit4jSvyBsC3//EI0TAf/R+Ls85MxTlYkAs6VjBs207n67jUSP+2iZqHKB9uGAhyo3eCBznWlrnbKMyAqBJQxcYW5Wzvg6QmlBI3ZZCotcbQ9wcW2I8GKR0pJK2gmFYWIY77WabuvPblFu0lEIQWJkiaI03JB0lhBSYkqJ1c7SbLvUwixtx8FXFkvdAs46iEZrZ0MLWx/B95NDfmvXaSGRNmjTwDrYZMToYgknFUPK8PbSBCPveklyaLshxCbJXkaKzLLGrcWY9TQRZVuEowVWHrSxPrHGP7rvGzzjzpOVyfhe7B4kfqVC9UwbY2E9qeZUOvFqU1bJtobt4qTNjYwUyoYHqnNMu2vMt4q0RoYSwSLXSbqVbPYBEBj5HPVnp1j6WZ+fP/o2T+cvUZZJfH/WqzJ4UieVctvJz5Uy6cMWC6rSwyBhq0ghUCgsFK4MsGWE1onms+lrrJZCeiEiTRhvDSF8kD6EVndWZn+H4YWkAkUVs7THBF8aeoOHnHkMNJ5OPANLpIUQQqbeWCJ2M6slX1l6EO/bQ0x+r468OLu9IsrvG+sWArdtJf+tltG2hc7YCcVDiPSlaETHByHwpsrc+NsB/+9H/oRPZeYpSRuTpArJ0xEXvDHsjWD7Kua2jtVKVI/q95X40oOv8QuV1xg3u+lc2ljC2FLUkZD2YzRrKsvp7h5eXNrP3HwVd8Zm9GRE4dTyTfnJu7wKbWIrQf69z6x1qn+bJhXklkVomgjDILNWoalu0rbm/Ao31stUFlVS4bObeJ9guAIpiat5vnTw7ZsKWVrxtreX9tUS1pkLxDu1Vg0D7dpEmaRHX5iV2DmLqFQhKJusnjDJPbnKrx74Ps9kZimlpcnXIs3/97XPMvVmkFCbeg0nt7IGdmLMsULEGmVp8obPc7nzWNMx/9J/jo2lIsMbQ0kuIY3NikyGxueO8uT/7TV+eeAlRlM9i0BrzoeCb83ex57vXCfy/e0bb3oLMzZaVE9XOB8Oc8haYcRQSMRmEnourHJyfQKWHYbe8bDWu4gwTmhvrTbau3nQfniniTs71O7c0zVNlG2iTXBFSEFostLASI2BxNrUx5VIQh2zEkf8NzN/nbXfmGLyhTni+cWdDSlshRSb4sTt+4ZYfcAiqGjCYozIRVhuhG1HWIbJ3lKNvzHyXT6RmWHEsDfVyCCpVLkRwW+/8AxHr99Ipnk7ykG3HA4y4xIPV1h+VHJfdpERo7t5eCW19+8/UZdiyW+tPMN33z5K4aLJ6Lwit+jhXF1FLa/ezFRvpbzc7Wb8qJ/RaaJNi/fdAkQcowwQUhFqyUaU42p7AH8hS/6Gd0d0m21Hj/qYzbBxLM+nC2ewhEwLCwx+c+ZpBt8S6J1IoG1FOgdCQ3dQsvpQhniPR77Q5Nmx63y28i4n7AUkUFcB16IMv/jyf8rBfxFjnrtK3GzeStDfoTnVccI0MVYbFC/nqUVJ9eknc+eZOrHCb488xSsPH2TgzSqFGxFe1WDxsyG//6l/xlELLGEBFgrFtSjgt1aeofK/5ImXLm4/KySMUOsbVN8p8o9e/zn+1vGXeTB7DQNNWzm8293DH159kPgHFfa94mGfnt0sGlG9wyvtbv6RrX3ucNx3RRnTUiADeLV9gGlrnSEistLA2ixBTAbhETAfaf67uZ+m808mGHjjMvF6bfe6q25BOFKiO2TSOZZwcJ8qX+GQs8iAbJOVIa6IKQhNQZo4IrPZzwkSg7sQd/nfN56j+rZMNCL0+68Zdwux5WAIhjLEkx45efOE7R1isVabkpQAS3GX/27ur/LWX9zHxNuK3GwTY6MNteZm4qSHHaHjvQdaaQRb2qKkyTudz9IdkpQLXQyhWQkKrHbziFgQuwamab6/eGM3kXYDbu0RGGg6OsbTmje8gywtlplaiz44TrlN0FGEbHVw1jzCXJbusEE0FvDQ1A0OFVY44C5Tlh062qQeGTzfOcy//NrnOPwbq+irs6lA0O7Nm44idKNJ5VyHb1w8wrPFi5xw5jhuL/OPJ/6M+dEClz89Qit22WOv8ZQ7x4SRfU/TAJ/fXH+GF//ifg69dZVoJ2L6Kkb7PnJ2kT3/ej+/c+In+PUHPAqlLp2OA9czlC7C8PkO9swKqtm8eSNMw047oZd950Y3jpGdEHdd8ydXT6CmBZ8qnKVsdCiIEFsoPG2wEud4pXOAf/nmJxj/ikXxtSuojY0daYb3sRACGaYvtWkRKUnVSLRSh2SUeuoGjrA2r++9LrvJAgn4d+2jfPnkoxx6pwUp8Xt7xiYTT1ckCT6zFeKcy/Hro89Rm8jykHuNSbNDSSax3EjHxFqzFAf8Pxa+wNnfO8L0K0mbE91qJ5nftEHk5sKBHTe4tzzPluRk0vraILahG1jMd0oEymCxVsCuS7QhEv3iHqtiJ4pNPnSsKRsnmyXcM0B3OqChXGqqyZVwgO/VDmOuWpidHYjlbkUcoztdjLUmbt6iucdAbFgsdwq4RhKzXw0LKASvb0xx5gf7OfCHLfTV2STO/FHztd3zmdLBdNfDXGmQ/8EY/6z8af7qnlM8k7vIuNnkkFVnv1kHoCAFJXnrjfF61OVfrH2CP3j+SQ583SNe39hRz1y12mTOLjK+XqZ5PUeUcchGSbt3d66FWFlH1Rtpaf9W9BLQ25swvzOjq9XmqZyfK9B+u8y/XXyKF/YfoOp2sI2ISEmWOgUWV0qY113G3laUXp4lrtW3t4fY7UIlze2M1QbZ5QzZWYu3ypO4RkRYMTnuzFE1PLJC44pkfIrEW/c0LMUZnu88yG9deoLKD2zMuQXiXg32diyU3rU8CKDVxrqu2fMdWF8a5386NI6cbnPf6DL3FZYomV06sc2VziCvXtzHwPdtJl5Zg/ll4k7ng2N5O1SmfAtkWqFkmUmV0lY6XbVMOJxHWdCtZ5izSsRK4Dcdcu+9sd8mz3FbIEQi85dxEaUCQdlGmIpv1E6wmJ/l7dZe3pifxK6JpPuGvLNkyW2jZ8SCAJptrBWX3JKNX5XMzVdZzuY55YwhhKbVyOBcdpl4JcS4Mk/8cd0fNtvSbLfhVeggQGzUGX6zxEJmmP/1gWd4eXIfT1RmOOwuMGrWcUWIp0MUHhLwtOZMUOE3lj7La68dZuJ7GuvcXBJq3EmopBbAiCKKYYw2ZRLGb3uIRitRNotvrrutjKYP4hvfK+7Y09VhhK43yV51GKGMXzbYuDLGShbQCaXNbsLIqiI352HPrhGvru1+SCEl9OtIQVehVhQZy2RQDFBvZ3h54SivTu5l39A6B4qrVK02VbNNR9kJGyPM0o5s5tslrs0NUHzHYfCtZiLssp3Sg2nAX6cEeTodjHqDkbkyA+9WaE9kuDG8jyul/SgTZAR2XTN9MSDz7hVUrb47HXTfi/f0mdo0uJaNMI00cywJhwp0h2y0CdozaLRdVCwRbQPDI73KbV+o5vbHn3KLLRNtmShboAPJ6dooy36esysjdBfzlBogAnXzmXdijlNnBs9D1prkrzl0B3MIZaOlTQjIACqrmtJln8z5JVSr/fHvvJf83PZiI73pQVqXFxjTo2ysZrm4Zz/vjkxhVHwqpTbDuRZjmQaHc4u0YpeLrWFev74X42yOiZMRhXeWdp691LvppbFouUyyNuM40WXpMRS2rr/33g63GXfo6Sb0INVqI64H5DYaZAs5SuczxBkLoTSyGyE7fsIvbLWJe/qYP4xYXfqZOkqu2+L6HLlmm8xshYGhLJ3hLMuDeW6U9hJlNNpK9BcML2kvLuKkrfjEsqJwuYa4sYTqdre/S22aTdfp79W+j2q1EPNLFE47FF0X4TrJtT1KrqKq0yH6Yc1rL/knxU3jtclsUKDTCj/LJM6YRI5ASxCRIOxa6FBidmQyx5pbWRW79TxapSWpESIIsZoxRtNisVZkdqWCXnTJrEiyywqz4e3sLa33zEqh/QBrqU5uwcVdFxiBRkYasxVjr3kYi2uojVp60N7GXO1YhV/incdrG5iex9BylWo1T1i0CUoOfjHDbHGIKwX4TukBDB+cDcHI1TjJPSyso3rO2E6hV2zEFhvQSyzH8c1uw1vzHVuT1UIiZFrBuY2Kgnce000Nrw4DVKeDWE2SEJaTtgeJInQQonpxxZ2q4LlTaI32faLFJVhaxhCSohQUDSOJp6alwDpOxVvS0tlebbUOgoTAv2tGQaPDIFmUPUrVbnuzH4T3GNzN+G2vQidWici7YSC0RgYKq6MxfDDaklibGIHAagsMXyNivWPdlD8SPQeioxBa49gW+WujdL08mbrAXdFkVyKyN1qwtLo7joNMwyt+QP5aGxFEyJaXFHB4HrrTJU75tz8SSNdoXAug0ULMCGwhsaWg0CsFt22wU+H/nlZ2FBH3mAE7PKfJugo3jfstuhMfRv/S4maITG2/4b07wZstH6zjRIRceP5mtddH0it+FLDpWZIcEh/0Lbs9pg/D+zilPxrYLAlWEr11fEKie3J3WmGtthAqR5hzkYEkzBvIELKLmvIlLxFh+WGtF50a/FYLMeMz/tt1RD4HUibC+mliUu2CkdNKI8JEYIUgRGzUAIi3isf8qO4nSJgCW2tnev/T/viW5DuGNPSFFrfSJW/nR3uOmFDp9uuxcj6gQOgOcW96uluSNVrFH06e7+PfH/QOASFuLsZbkIZIABFFyPU6Vten7BUon5coKzHKMlIYay3U4vJN72M3mQtb0fPYVtdgdW33Px9AxSgvBs9DWDebSv7IOzA/ylA3Q3a34L1dMHr/2/NuU2Fy3ht++KgCoTuA+KES0/voo48+/gPDDvFg+uijjz76+CD0jW4fffTRxy6ib3T76KOPPnYRfaPbRx999LGL6BvdPvroo49dRN/o9tFHH33sIv7/lcp4tpHYJSsAAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "\n", - "def plot_image(sample_data,col_num=4,row_num=8,count=0):\n", - " for i in sample_data:\n", - " plt.subplot(col_num,row_num,count+1)\n", - " plt.imshow(np.squeeze(i.asnumpy()))\n", - " plt.axis(\"off\")\n", - " count += 1\n", - " plt.show()\n", - "\n", - "sample = next(ds_train.create_dict_iterator(output_numpy=True, num_epochs=1))\n", - "sample_x = Tensor(sample['image'], dtype=mstype.float32)\n", - "sample_y = Tensor(sample['label'], dtype=mstype.int32)\n", - "reconstructed_sample = cvae.reconstruct_sample(sample_x, sample_y)\n", - "print('The shape of the reconstructed sample is ', reconstructed_sample.shape)\n", - "print(\"\\n=============The Original Images=============\")\n", - "plot_image(sample_x)\n", - "print(\"\\n============The Reconstruct Images=============\")\n", - "plot_image(reconstructed_sample)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "对比原图片,CVAE生成的图片能明显对应上原始图片,但还稍显模糊。说明训练效果已经达到但还有提升空间。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 条件样本采样\n", - "\n", - "在隐空间中进行条件采样,本例使用条件为`(0,1)`,对应生成`(0,1)`的图像数据,同时将采样生成的数据进行可视化。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The shape of the generated sample is (32, 1, 32, 32)\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADcCAYAAADTE3J+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAC050lEQVR4nOz9ebDdyXXfCX4y87ff/b79PTzsSwFVqJXFIoss7qR2WbIlS2p5PHbYbXdrojtiph12xERPu+3omJmIsWV7NO1uj9tty/tYmylRMiVRXEQWyWLtRAGFHXjA27e7L78tc/743QegilVcqt69KFn3G1EEiAu8e375y/zmyXO+56QwxjDGGGOMMcZoIO+3AWOMMcYYf5owJt0xxhhjjBFiTLpjjDHGGCPEmHTHGGOMMUaIMemOMcYYY4wQY9IdY4wxxhghrO/24aflz76n9GR/qH9NvN1nY1vfOd7O1j8pdsLY1neD/xxs/ZNiJ4w93THGGGOMkeK7erpjjLHvkAoh7zoBRhswGsZFOmP8KcGYdP+kQdxzahEyIzClsv+fpu9pEhOWhXBdhOOAFKANJoowUYxJ4veWzeJNp8P3km1jDA/3vvchvfPhka4QILLohZDZ74WSIO9GNEyaZkSRpu+dST2we89mpEAMXoQxJiOKNB0tsd1rk1IZeXkuwvMwvguuk9kSxYhOD9Pponv99waRCYFQChkEcGCW7uESvQkLbYHdNQQbEc5yDXbq6Fbr/m4aUiEdG+H7iMAHS0GqIUkwYYTp9TBJ8t6er3sw+v6M5Zucgru/Fd/xZ/fNxnvxFmsLY0DrbL0PgZ/2l3TF3YF9A0EoBa6LCDyMY2fEqzWyH2HaHXS3i4kT0Om+mvMD2z6we4/UcF2EbYNtYSyFTDXECabbRbc7o9kw9uxSCqEkwnEQgY+ploimcnRmXaJiNu5Oa0Bit2uIjW3o6ftPEEIifB99bJGNp0vUH0rIzzawVUqtFSCXPKrnZyhfziOvLmN6PXQUA6Pd1IRSGdnOTdM7WqVfVWglsEKD00xxd/qonRam0US3O9l8fS+QhW1l89VxwLIyxwYwe5tFkmSbxbDI7d41P5ij2Hb2q1J3P1cqW1NKYZREJCn0+uhOF9MP74+DcO/acmxksfAGB8b0eph+CLCv62j/Pd29hxhMBlwX4TqksxW6CwFhUWEkWH2DV0vwbjeQa5to3cPc52OxsC2k6yIKeUwxhw4c0sAhLlpEBYURmd3+Zoh9cxPTaqPDcOgbhpAiO44rBbYFnktcDWge8th6UuPMdJDSsLMdULjsMW0qOK3OYMLcx41sYLsIfLYeL5J+usYvHDrPSX8dW6RsxCW+fOAkrxUOIXSO6mYe0hSRakwK2f8M28DBwnNdxOIcmx+aYOf9CYWpBgDtho9adwlWCxSWA/I3c8jlLXS9kb33UW4O99gs9hyaQgFKeXBsjGOROgpjSYQ2yG6E6EWIjW10GO5/+EkIhGVna92xEbkcJuejcx5J3iF1JUYJjBIkviQsSeKcwCiweoZgS5O70UbeWkM3mpgkefc2/QC2w2B+KonwXPR0hTTvoh2JvdtDbss7m9d+Yv9I980emWVlHlk+IKnm2fhAgdaTPeamtgHY2C0ibvpMvVyl3Auh1983U96Z/QObJ6tEixX6Ew6JJ4gKgu6soH8gRgUJaSTxrwfMf3UW9+oGsgkaMNHwFp/RBkF6N3YrJdpRdGcFP/n0i/xo6VVyMuRcf5F/duBD7ESTzC/5sFu/78dgow0Ucuw8mfJfHXuBp4JrBDJEG0lOhrzuzfFaMUbFNqQ6i/HuhW9GBOnYyKkJVj8+yYO/eIG/Of95pmRC18DNpMQL3aN8dec4568coPJSkSklUVGcebzJ6Ox8A4RE5HMkJxfozbhoRUZuriD1AAN2N8Cta/JxgtytZ6SWpgMv/V1saHuk7zgI14WJMrqco7sQ0Fqw6M0awukUu9wn50dM5js8WVnmidxNTjgbpAiuRLP8/9af5OKzRzj8OQt17jqm3R7dfDUGhBhsQgYhBMax6M24dKYl5esSv9kFvf/vd/9I1xgy+lHZ8WJwBI4ncmw/7JN8rMFfPv4iH8xdoSD71NOAFx46yv9+6GmCtSrW+ib3jR6EGHhkAd2TU+yesgFQkSEqCvozCfOLOxwp7rLRK3DVzNB/3cZZcaHdHq5t946r1llM2bZIPUn0RJu/VH2W47bBFw6PONc4/MA2/zf+DL3LU7hrG5hE3N/TgxS0H5riM4+f46ngGlXVBUCjsUWCNoL8tz2KF7bRu7XMIxuhvUIpRKFA69FZ3vcXX+XvLXyBvHABh4SUSdXmhP0yH8xd4fOlh/n37vtQYZ6pWgWRJPcthCOUhKkqOw/6pK5ARQYjIAkEcRFSxxCsSzCQc51swx54bEIKjHmH82JPfTI4kotinubZaerHFe3jMceP3+bjU5d5PLjJvNWgLBMKQuIKC1so5IByHnHWefrwr/Mfqo/xv6lPcWJ7GnG9N9qxNAZMionBdHuo7SZyxsdIgYw0JNnGtEfM+4V99XSl7yNLRfRUme5cntaiTRIIGg8kPDG1wUlvjRN2gxnlYhHyqPsSpce6/Mqnf4KjrwcQxaM5Ur7ZbtdFzk6z/kML7D4Vc/LQLbY6ORqNAJNKgkKfkpt54hKDs2ITrHah1sgSVvGQj0VCDia5gygV6B0qUztl89iBa0hhkEiUkPg4POLs8NMHX+XfPPFJDr1cgDD8rs8+1AkuFWpmmts/avhb1ZeZUW2UMMRGUjceL3cP88VXzvDAH9XRV25i4mh4trwFhO2gZqbY/cgiZ/7b1/h/zP8BeeEDoDGkxhAbTWrAEzEP+Kt8+oHX+eqlx6hWcqjd+uhDOIP5Kg4d4PaPT+F/bAsF7NbzpA0b1ZWIVKBCMBLyKxGiOQiDRfswviZzAIQQ4LrEBybYelThPlLjLx15lQ/nL7FoNSkIgyck9oBsLRTqniSaQnLAkvxM8WVuf6LK128+zuzmNmmzzf0I2Qjfo3tyits/LCgt7nJ7vsJcbo7ceQmb2+he7z0W090jrokq6WSJcCagO20RFwSJB8IILm1P8znrEerlHI96S8yrLrGBWatBfLSHKBURzfZo47p7ds9M0XxsjsZHe/zFB58nNZIvxSeotW3shiLccnh9KweWRjUsjvx+D/v6OmmrPfyEihB3M+rFPPFchdoJh/CDLZ6pXGFKJijhkhpNQsquViyHleyfKpkR9tttZENOAMpcQP3pRf7LD3yZBZXFSLdSn9vxBN9sH+N3Xj/Lkd/UcGVptIS7997nZ9n+8Bzxz+zyf5n9Q0rSQwlJajQN3edCnOOF7gO81DzIWrdIyenRiV3CSU1cdrFsO4u1j9Bu4TjImSnWPjHFgz91kQ+Vr/F6d44vdk4g2h7+usDbNbgNjb8ZYl1eQbdaWXLy3pDNu333g8RYVLYJ52M+On+TI+4mtkipa4cto0iRlGUfT0RMSUEgbSyyEJnGEJsUCTyUW+Z3H3uYmW8sIK8soXt9Rkm80nPRB+e4+ecE//0zn2XWrvOvJp/mlfQB5sMp/DDCxMm+zdF9IV2hFKJUJJmr0D6cozMriQrZZzIFd0sR75T46k7AtQOTXJia52ywTE6GXOrPYTkpOvBGP4FVFgoxrkNUkEyW20zbTVbCCvWOj7dhEawZRALaVqhQUboRYr9+i7TZHknGVVg2olSEyQr92TzNQw7N45pTUzscdrboG4hNSophV0c82zvJF66eYvpyionjodr2Xe1WClkusfZhwYfzlwhkQmwk60mZ59tH+IMbD5B/3se/sETS7Y7QsOxEJg4tsPX+SbY/EfI/nPwS88ogEaRGs5l2+dfNR/inr30I9XqeYM0gE1ifEURnuxjLkPgSPBdhWe8+Rvp9256pV9KpErXHE/7s1IuUZZe1qETUsymsCSqXY7zVLqrWwtQbWdx5v4/sA9WEsS1SR2LlQubdBhrJalxhKylyrT/FdpjHlQkH/BofzF3lqL3LlBRIIdDG0DKapaTIclTFq/TpHM5TWM9niotRJSmlQlbKbDxR5K984Ev8aO4qSgjUzLP8D++bYGd3ivn1MqJWx+zTcvr+SHdP9vE2AyAcBzNTpXksR/OwJCobjAS7JXB2DE7LIDR0dx22Nmf47FyF5+cOspBv0Ah94r4FVpIFs/fnub6/5wHQGhHF2B3N2k6BlbkKzcSn1/Io1CC3kWK1swXlNCLkzfUs0zqK2JMQyJyPmanSPlaieUjRWdR4B1vk7ZC+dugbSd+k9E3Cc/15/u3t9+O+nKNwafeO3GXoYYS3Mt1xiA9MMHNyiwnZwxOGvhGsxhW+uXWY9GKB2fMherc28pONOLLI1vurbH8w4ScePMeT3hK2kCSkdHXMb7ZP84+//gmmnrUoX+lg1XsYKQlnc6zkfZJKSq+qyJXziE4PkSRDTaTeMV8KhGMTlVzykx0OWrvYIiVFQsMmt6oJbjZhu4Zutff1SPwWxiC0QWiDMYJ26rIaVdiJc7zenOVWrUKv6yCkoVTosjVb4GOl1zlpbyKFoasdNtMCl8I5vt1YwLETWvMW+WoJGUZ3ZaRD3sxkLiA+PE3to31+vPAqVeUCcMbZ4ePzV/iNA5MkFT871fT3J9n/7j1dIbJ4yIECzcOS3oEU46aIrsLaUBSWU7ztPmiDt+sSrVp0V122FmZYnZgAy6B2LWDE6oW9+JIxEEZ4OzFyzWPnVC77846F2zC4tRir3kfEKaLWJK3VRiZtEZYN8zPUHyqze1oQH+ozOdGi5PVpRh7rSYkFq4Ym4lw4zz+9/Qxrz82x+HwfsbY50Lvu/bARVlhJhcjl6BzwOFO5RkGmeEIQG8mV3jSraxUmboJ3q47ejzjj94O9o/ncDFtPVtn+QMIzZy/xQ+VzlGRK1wginfC13iJ//4VPM/dFRfnVbVjfxiQJwrHxwjLFqzNsPwW9GUFvLkfQ6SPDkHTYBHFHl2sT5xV5L6QgIySGzbCAt6nIrXRhu5ZpiaMhn8LSFLRGhQZddzjfmMNTMcutMtvbBWjYiESgLcNu2+ZaMMm8O0dsLDrapZbkWItK3GxPcGO3CkB/wRDOFfBaXUQUZUqLd5rw+z4gLAsxO8Xm4wE/89DXOWSluINQXUkqTnrrpOWEuGBhv3n9vAvsA+lm0pX2nEVvIcWb6aC1IOrkCDYNwa0mspZl+FXNwfMd/M0Af9uhO2UT58HqAaOS3uyFFfZIV2tMHGO1QqyOhysTXJkgYoHVM6h2hGz1MiH3noc7IjtluUTtkSobT8HMyU0eqq4jheZyY5rbtTIvBYeIjaKbuvz28lkaz02z8GyIc2EZ3Wx/p/TqzkYz3LEWSkEpT+ug4mRug4KQSCHoG4v1fhFZs/F3NTRaoxnPwTtXlTLth2bYeirlqYeu8UPV8yyoBi0tuZ2UuBAu8I/PfYSZ/+RQeW4NvbmN2UtExjHCsiisVNlJFWHV0D5g4dbyqGYb0etjhqXVviPHlOA6JL6k6IQEIiVGcKM5QbBmsDeamXY8iodbaGQyxYaIE+xWjL8ccLk8A0DasLGaCpmCtiG1DGiRFcPEAbGZZblXYblTZquTo93xiPsWQbFPNB/ROuDgbOQR7Q4mihlaklIIZD5H79gEzSdCfqz0CoEcqJaERBlBToYIN0U7Fvt5ge/3T7pvF1qQAl3I0Z8UqGrIQqVBo+9R6+fJr0TInSam0wFtoJ3JTJxuiNXO4dY8woqFTAwijNCjOGYO4mJ3yvwGf2ZsRTiV8nh+CYBf91NAIeL0bnVKFI3wKCxJTsyz8WHDBx69zIcqV1FoXm4f5NZ6FbXi8ZXkON/yD9KuBZRfcjj4Qht16TZpo3k3ufcW5dgYMTxvXQiE55JW87SPpLwvuI4rLDQZ0WsjEBpkbAZqleGHaIRSCNclOTjN+gcUT569zDOVK5RVh7r2uRVX+fWN93Hu0iIHf1uQf/Emulb/juSTiGLseohq5UnKKZ05i8Jtl2CYCbV7RPwohXFsYl9wuLBDTgrqGla2y8xvplBvZXN0qIRrMglVkmD6faxal+KSx27gIVOBk4BRkPiGJJ8i8gnFYo/DuV1CbbPVzXO5Ns3mdhEaNjISEGjciYRgqkVnfpLSNQ97yx7eMzDIQ1XKNA9ZPHTkBlOyi8S983mKoal9TCSzubqP+ZHvj3S/x8LQeYc4byjke5ScHjudALspcLa7mH4/y0ZqkxVNpClCSqRSOJZEpAbVzyYMI4iTCjsr2iBJIE6ywc8HtA4F/Lmnn+PHczfYSgWFmTb9SpmCZyONwaR6dF4uIH2P28/keP+jF/lzUy8yodpcjma52pzCu+BTXNIkl32ctsfsah9naR29tU36Zp2rMSC4W6I5KMEeWkxaSETg05v3mTyyyyNOE1f4JKQUZMSx/DYvVg4TFSxy1pD7Le15iJaFrJTZfCzPzPvWebi4gkJzM5riVjjBF1dOEv3RJKe+2kRcWiLtvUkvune0TFNkmOBvCtqzKamv0JbIiH1Y+Yh7+ioI28K4DnFe8JPVl8kLmzox6Y6LU48gDDNCHDaMzo7+UYxsdsiv5DDSxUiIioLeFCTVhPJMi8l8h1PFTVwZc609xXqnwMZ6GWvbxmoLEJDmU/JuRKwlUcUQVRycvaZIQ4KwLHQxICoKAiuioV0kd79vN035RuMY7qqNu9XOKtP2CfujXohTVChod12uM0FtpcTMkkbuNLPa6j0dqxR3u2IpiZFZiaBV72P6/eFPGDHoq+APSNeKEb5P/+gk60/Dfz/9dUoyR0mm/Pih8/z6oQ9RuuFirYpspxtlwmdqguSJFj8//S0ecdbpGsVzSY4bK5McuJCQu9ZEtjqYThfT7ZHeqeQy3/mz9vpdDP7/MO0WSiFyAd0JxcFCnYJ0suMakikZ8liwxB9OnaJfnrjTJ2CY2OupkM5VqT2oORK0aKUe23Gei40ZLl1eYO5LkumvXCPdqaGTgUfzHeOYaaW1a5F6YLkJGAeZmDeemvbb/nsaL2FZpAWX7pzhSXcHV/hMDUJhMk5HXslHmmKaLaydAsXEoG1Ja9GlOwt2IWI636bg9LFkyhdun6Ld8tChQrYtRAraMaS+oTjT5uHqKleaU6xXY1J3UHm5z0UJb4BSiCjB2zG8vHyAb5aP8aR7PXsso/kPzcf46tceZPHZGHVjnXQfHa59Id00cHB3QV/O0UvyTN42lF9vohvNuxVGe8ck30dXC8QTAYmncJoxst1FD6Hc7m0Rx5k9lRKtM5Msf0rwNz/5O5RkJoy3heKTxfP81vFHaF/JE1yxh7ao3gpCKXafnucXTv0xJ+wtclLQTw2pkZhIYbdTxPoWaav93bue7RWsTE9mzxvFmG4P0+sNz3bHJlqoUHvQ8BcmruCKu8dEJQRV1abohdSGe3ocGCMRjg0TZWoP5PEXmlhS89zWYVa2y8hrPgde1hS/dZt0e+etQy73xFOF6xCXXboHEmbLbdarLlFBElhqaI9gtEFIjXB8zOwUW4/nOPOha0xIHyUkgbTRvka7CqWGZ8db2bWXTJONNsjs/QauJHVsep0cN3MBqie4uWGorCZUgLCs6E1IwgkIJ1PcmS6Pzy7zcP42AEvFCombyzq8DfUBDMZWpC5YVoonYjQGBezoHv/iNz7N8d9tIa8uo1utfd3M9oV07bU6VUeSLklUT+NsdxAbu5lk5R7IICCdnaBzOE93SpJ6ArtlMb0mswk/7F3amKyCLIqRvofwPbQlMI5m1m7c+Wup0RRkjOcMvJ4oHn7V2R6EQPg+W0/AaX8FT6TUteBSPM1LjUVUQwE6IwIhMG93qJUq08rOTtN5YAq7GWNvtrI46jDN9z3ivIUONFNWi9ToOwUHHW24GU+xdHuSQ1didGuIJdRCZKGkfI50skDroKBX93mueQR7zWHivKF8oYm8PShyudeTeVMcXFgWIhdginlSV4Kf4qoU1ZHY3TQrFx3WKU2nGBQCMK4iKsIj5eU7H7d0hF3pE5Y9bGcUO9k9SFOMUlmSsReBEHgbAqvjULqhkFGK1ewjd1vZRi8V/swE6lSJsKJwZ7r85PFz/HTpRQoyQqF51j2C0Ayl58G9MGmKjLN3nvMipqwWGk1s4FcbDzP57TSTh7Za+57/2J/wQqeHs9XJft+LEO0uutt7Q82yGFSwhNM+zcOK9qLGuBpvfchHiTdrVHWKMRqTWIg0xeppVEuxEldITRMlJKFJ6GqPXmhT7Zpswozq2CYkMp8jONYgJ0M6xmIrzfFs6wSvrc3hb0jsRpglS6SEVGe19FrCIFl1pwNVLiA8WCUJJFZPDsZieM8hLBtRLNCdschNN5m16nc+q+keV+IK//b2+yl+2yG4vJadgoaIvaZLSc4GAQjwr7tULqYULzUQK5v3qDzuzhHhOJkCA+5IzUQ+R1rwiPIS5UbcWp2gcEvi7PYyhcOwFSGOTeLbpB64Irnjla2mKjtECu6qU0aFQYOrbJMSYClEGGN3w6x1YxRj2m3SXj/zioVEAt5kgFGKk9NbfKZ4juN2Hw0s2jskWqJik/UxHqbplkVSDgjLgkBl5JsaA0Lzv7z4UU6s9jDd7lDyOPtCuiaOM1kVZP1mo2gQAx0M3J7n4Hv0Ji06BzS5ow1cK6Xembjbl3YYeBsiN2mK6fbxtvoUbhb40s4p/mrpOhjompiOKRJ1HeyOHkhXRgMhBfgenp3QSn22RMyF/gFeqR0gXs1RWjfI3sAekbWlM0YghAFUFjffa3JeKtKZd1CRQcT6jYlKMSDpfdzohOeSThToVwWloIcn7o7broY/bj/A7QuzHLwUw059uInJASHsVU2pPshmVmGYX+oiN2tZvuFNcVBhWajpKVAy82DTFCwL47kYJZEppE2HwmWL6sUIe72enZ6G+CxZa0+F0AarC6thGY0mNYKbcZW4ZyNDs+8tCL8n9vIzroMJXIytsnaS/Qh6fUy3d7eZPgAppttFhimpb3i4tMIJu0FBuqTGMK3aRJGFCgehCxhaYY8s5OlOuCT57Gd3tDPwdA32LRfVaKGTZCjfvT/p40Fzb4S4owq4k+3bi+UqiS749KYlcqHNozMr9FObl/XEQDo04hZ52kAYonZa5NYCXt+coX8kwcbQN4a+djA9heqlI1FV3IGQGCWpt3xW4wr1NOCV1iJLm1W8TYm/E2fjtac3VuruBJVZpp4B6SaTBVJH4NZSrHaE6IXoKMYYk3nH+8kTUiHzOboTHmHVcDxoUZAR4BCblNWkwNc2j1G8IvFv7aJHVfo7eG8yBmdX4tVTVKOXJW5T/QZpnVAKtbhA4/FZANx6gtWKkFEWuxSpxupqSuctJl/r4y7tYnZqA093mH0sJBiN7CW4dcOl5jT9uYTQRFwNjyN2bexWf6SSxqwPrUJ4HmmlQDThYSyJbUmsJM10y/c6U3t5nTTFOJKkmPKQv0xVWlgopDAUZI80zRRNWQewIVX5SQWBT5yTaAVhYtFIc6QYrsYKuy0QYXx3Xe0zhqfZedP1IcJxiKo+3TnD6blNzhZWONdawGmI0epf92B0VmkURtgdTa/jEho9aMSR7XyyL1FxPPT40pvtwhiSjYDzh+fxVczF+jTJrkeuDlY3a/KNbWfRXJXF+zKyVVnIQQiM7xJVXNymxtvqIWutTC8dx0OJPwrbwhTzdGYsormYR0orlGUCOIQm5nK0yNKNKQ7dHHi58XC8iHth0hQZxVj9FJnYOE2welnZt94jXLhDuHKiysYn59l5fwKpwF91ya045NYSvM0usp/gNGJmnu9jLW2imy1MGI5GSpgkyH6E3TasNYo0dMpW6vBK6wD+psRqhUMjie/AXtc714VSns6RPL0JiRHg1RW+q3AAEceDEul7qg6VIqzYuNUeh+1tbJGFcfbkWjpSqFAPtepTSJHdXmGy3jDd0GYtKtHSKS/2j6FChjqW+0e6e9dzyOw4IPYaA+95vL5He95BLHZ5uLRCICPWuiVyawZzHxqYG20QWoMQaFtgOQkdbSjILDK6nRRRfYEM05EqF4w2iE6P3G3Jy0cWKHohW7UCVktmPVOVQBf8LEkURhjfxbg2qWdhlARtkIkmydtoR5JbaqM2G1lpaK/PUO6lktkdaMlknvYBwfHDG3w0f5GSVGgMXZPytfpxCpdt/JU6pjuCGLnJwkKm3cHezeO0XBIfZKgHfYkHyUgFDJrzND98hOBn1/mvD36DRhrwB5unuXRxAfOyhdOwkO0Qe6MJW7uk7c5orpjZi8PrLM4pNMSxYjXxWU9LnN+exd80iG44ouIi8YbkYjxTYue0oj+tQRrshiRY9yiULXLXLeTaTnbLRhIPNNwB7TnF4kSdquoj8e786NtJEbXpYNeb2Z10Q5SQijjB6mqstkWv47IRFtlObVaiCqpvhpcYZR9J1zg2JudhlECEKbLbhzDCdDpZYcFMlcYxycJkHVukXOnNcGN9koOr8f70+fx+8aaL6LAt4kAyVW6zRwMtbXO+PY+3LVCtED0KwfkedIpuNJk4H3N7vkJzJiTtKyyRCc87szbd6RJuI8XuJPQnHHoVibGyI7QKs+ZCqSMo3exnV8vcqcUfDtEJ24KJMq2DHr0jER+evMa81cLGRqNZTSy+ev4kR85FiLX97U36tjCZlM50uojVbapC0HigiNVL79pszB3CbT0xz9G/8Tr/08LvUZKKrk456m7yD5NPsbE+T3HJxtpMMRvb6E53tPf53dHpKlJHUMz1aWqPzaTI7maRhUbmvWd/dwTNjZTKlD8TRRpHfZKzbc7MbhFYEe3YZb1V4PZKifL5KpPnfOxXI3RncBqamaB9ED5SXiMQd+1sm5B/s/0M0y8Y1Mo2aX94yUmjDabTw90J8bcUvXmbnBXSNxbtZFCVZlt3Q3f7PJ77o14IfMKFMt0ZhygvMBaoPhSWI9xbNWSSUjtTpn8gpuJ2aSYeFxszyCUP//wSyagSAG+++dO2MJ5DnBNMOCEtbSOJOR/N88ULD3DoYgybuyOtRAPQvR65l29RmT9KTbqo6T7iUER71qLVs0AavBWbJLBIpmIQCUQSkWQ3BaiuZOY5jX1xJfMy9iN883aLWSrU9BTt01NsPyY4fWyVR4JbFAYLqqtj/vedj3Lk3xu8l2+M9i6sQQiJbhdZa+E0cqSeQpVy2a0HUhLNFdl4v8dnfu6b/J2Zr5OXeSBrWn7C3uJYcZvl4ixGCtjeRY/ySpm9O8gG/ZTjkk9YFcwFHdaTErUkh7Vl42/3Md3+aMILQiJdFyol2seK1M7An3/gZT5VOE9B9kkRxEax8kCF33/sLH/85bOc2J5Frm4iSgV2Hi3jna7zdOEK3iDf09R9/nXzNM/+9iMcefYm6U5tJKcImWhkAkiYdlqspyUW3DpRWaCLAXLbHUojo31peGNKebYe8Wg+ErEwv0vR7dMIPZZ2SjgXZ7E60D6cUphuo43kpd1Fll+c59ivNUk3NvfhMb6XjXvJvCzxJIS4W6GVpLhNzaUbc3x55hRl1eUfXPwk5Rcc/KVdTLc7+iSfMaQ7Naa/so5MZ9l+zMM90GJxss5Crs5Bv4YnY1wZU5B9NuISW1GB1xszXL88y+QrhuK3tzCN5v4R7t6v9xS6QHa/mK4WaB60SIoJUhjqacBq6rClNV/pnOaFv/cE5W9dIG13Rush7vUJiGJMrYG/kiMp+bSPl+iXJe0DAvlIg7/14G/zZ/PL5OXdo66FwhMhK90ShRsS/+Xrg54WIyRcx0HmcwjPw5QL9OY8OvOGhaCBJ2M2o0J2GmsPud/CvWYNbvZOKzmahxTOqQZH3C3OOC0q0kMi0BhCZ4NH3VUe/PFV/rH6IQ59Pk9n1mbzg4YfXrjBrNWgawzLUcJnm+/j1/7txzj8G2uDIpUhE67RmH4f2e4j4xzGS6lYHU7YWwQipPdgj+5refLbBWQUocP9Tei9M9K999plx0YHDmEVTh1e46dmX+Gos4kjUnaO5nnxzGEutmbY7BYwwGvL8ziv+xx4PsqE6SMo/b2T0Bs0u8nuth/E9OIEp57gLbn82sLjVLwevQtl5m8OKuUG/27U1wiZJMZsbDPxkosRFbbtHP7ELk8WlzjjLTOlMl30elKgkQZcb09khPuCovxaDfZugN2XW1/v0X+Ke0pTyWRiWkqcpiF30+JS4zB/Z2KBXLVHr+tQ/aLH1B9dHT3h7sFkNwubKEJu1lBKkc67NI/B5BPr/NKRr/BDwS38wTU9e0hI+WzrYa4/d5BjX9lF1xsj93BluYQ+MEU04dObtKiflEw+uMmjhdsoDHLveC5llpG/V8kyLEiBcB2SgkO/ajhWqbFo72APEmFZyXeWGJtXEZ/KX+D8M/N8NT2LEVBYaLAbBXy29jg97fDC5iKNlyY58kdN2N4dmbeePUt2O3F1usms1SAl6yyWL/RpL3h42xNYaYrZ3t3XjeAHJ917CVdmVT+JZ4HIylSlMHgyRhtJPQ2IjeLKzhTdpSL+uqSyYSguhXjXt9Dtzki8yDeQhGMj7KysVwiBsS1kavC3DKs3Jll1NaU1gdOIM53mnnRl1DBZUYZc2WRSSlRU5HLjMNdPTPDUgSXO5FfRRvJyc5EXbh7CuuYxc9lQvthCrG5lkqz9Iok7WX55J4ly56M4QRiDV0tRsSRZE2jbBmMzuZqQe+02aa12fwgX2LvY0yQJutVGKUXet+hXAtYPldg4UKJvzJ1iA4DQxHy2M8k//v3PcOTzfVhaGU3e4Z7wl1ASJsr0ZwO6UxadOUF/PmbeibgVVulqhxd2DuLWDKI35FaO9yJNs+5inRh/0+XqxiS3Zyc4bNexRXxXhYAhJWtW3kkdMODWBP3XyjyfL/EtyyB7kmBNMHcxRm010P1w3y+BfCvsja92LIyCfuiwGleYterspHm0yZ7B3Nun5L57uvfAxAlWvU/xms/1wgL/cyvPYrlOnCrWWgU6bY/gVZ/pFU2wEWLv9u9cJWKGJD7+DhsH9esIOehVoLNJbVkgBKoTk1+1smbFTUNxKcbabmftHEccz32D3WmKbneQyxtUo4Rgo0D7YoHnZ8/yxzMPIlKB3RFUbhnyKzHeWhu5WfvOstZ3bcjeO9JZ5Vuqs8TOIKOuthsEicbYGW0JrRH9GNFoZTHl+ziGe9gLM+hmC3tFMWFLUs/nf1XP0Drj8anCa5ywO7S04fOd0/zyH/wYh34/wb6wTNrZxw3s+8GeJ5ZqVD9rFG71BPaOxXVmuLVZpVjo0n5lgvnlwYksGp6u9F6YNMX0+libTaqv2yRBnr9vfYobx6f4YP4KC6qBK1JaxuZCeIjPbT3MS68cY/Ii+LsJRpGV3gtQcYrdTHHXW1njplG0ArgHshtSWE6JXijyy61P8cjRZa7XqsTnSkwsJ1g77aFUo/7gpLsX0zODBRjFyI0dqq8pnHaeztUKN4oVhAG7DdWuoXy5i13rIZodTK+HDiNMNFxJyBvsJTtiIgYlvXtFBfagFDhJyXVjVD+Pu91D7bYx9WZGuiPQk343200UoRstRBjh7dTxbubQpYBwKkBog4w19lYX2Whj2h30MG3eu7J6EJrZm4x6t4Zot7N436ARiokidJKMxHP5gezuafS2xklSZpJpdnoB/6L+Yb507CRPT13nxd2DXHtpkcP/KcI9dws9Si/93rlqNGK3gWsMVieHW3cJNhRxziJ1bBI34MC3Q7zr25jBrcQjuULKmEybvL2LHyfMJtPUagX+3fKH+K3Fh6nkssrUWsenu5kjd9PiwKWE3I06otNHDNa8kSL7fZpmtvdG0GVw7xG0wfRDxG6D3GWwW0Xayw6Xbhwjt2aYuJ3gX9vJrj3q7L/j9c483b0XO5jIulZDhiGljQLFnI9xMw9SRAkijAcJnRgTx3eqgO5oRUeBe+zVob6rYNjrxt+2EUrib7lZm8Q7E3j0V0G/le0mjjBJjOj1EI0mctPBv+Xe8TZNv4+O4+xCvxEtvHtj3LrXg/49fRT23uv9Hrs3w5g7Y2SiGKvfZ6Y2SX6lTP3oPL82P0f+luD4NxrIa7fvTxx6b4PQkO7sIjsd1I5HzvMIfBccO3N6tIa1TdJObzR64XtNTBLSVgvR6+N0u8wsl6hcqdCZLRD6RWQC1bZmfivCXt+GWiMj1TS9oyV+c/9hc2+xyrChMx4QqUZ0uzi7DSZu5ShfKqB2WphmK3NghnQDx7tXLwwmctpsQqt1h9DeQKrvpcV3z6QGshs+9y6cuw8XOH7f2COMJMnsfau+uPfRIx91ovFdYbCRpbs1RLNNcNsl/2IOk/Nhp45utUhH3cfgraDTLDbf7b633vfgu00cke7swm4Ndf0WxTd8ni2w9O16n8Abn2nUzXr2nJk4gk4HtrbghiAZfDZM7H8ZsNk7Hr1Hyeu74U+SzX+SbH2vwphMJdKOs4IHeG+cbt4K70Wb9vBON9033HDyHti0RzTG+0u67+WJMcYYb4V7Qk/v6ZPOGP/ZQIyyr8AYY4wxxp92jDiQMsYYY4zxpxtj0h1jjDHGGCHGpDvGGGOMMUKMSXeMMcYYY4QYk+4YY4wxxggxJt0xxhhjjBFiTLpjjDHGGCPEmHTHGGOMMUaIMemOMcYYY4wQY9IdY4wxxhghxqQ7xhhjjDFCjEl3jDHGGGOEGJPuGGOMMcYIMSbdMcYYY4wRYky6Y4wxxhgjxJh0xxhjjDFGiDHpjjHGGGOMEGPSHWOMMcYYIcakO8YYY4wxQoxJd4wxxhhjhPiutwF/Wv7se+rWyj/Uvybe7rOxre8cb2frnxQ7YWzru8F/Drb+SbET9vsK9jHG+G4Qb5qH45uox3iv4t65us/zdP9JVwgQEqEUQkmE54KQYDQm1ZgowsQJ6HTfv/pPDYRAKJWNqxxMDm0GY5y+N8nsXpvhvWurEAjLRtjZ0jBpms1Xo997tr5XsLfm9+aieFPUUr5ps92bq4Nf31Pj+hbvnzTd17m6f6QrFdKxEaUiVEvEk3m6sw69SYmRAqtrcFsabzvG2eogdhuYThfd62OS+P4PvLg7YYRtIV0XfC/bNJQCraEfYvp9TD9ER/FoJ8yAtIRlITwXUShgijmMEAitEd0+ptPDdLuYMHxvENreBPZcROBDMY9xnbv2NlroVuv+2jogjL25qw/OEFZd4oJCaHAaCc5GB7ldQ9cb2Xt/rzgMe2RnWwhrsJSNwcTJYEyHPD/v+X4ZBNk7dh2MpbLPpMDYCu3ZaN8CDSLVqE6EbHSy99/u3P/1LwTCcZD5HCIISCdLRFWP1JHI1GC1Y6ztNmztorvdd70J7w/pSoX0PeTUBP1jU9ROOrSOgHWkzanpTRyVcLtVZr1WgBWf/G2Pwu0yueUuam0XvVtD98P7t+vtEZrjIBwbUa0Qz5YIJ1z6ZUXqAQacliHYiHA226jldXQYjsZr3yMvx0YW8ujJCt1DBVoLFkaC1YNgK8Ff7SA3dtG1wc58P7E3kcslzFSV7qEC7TmL1BOo0BBspeRu5JC3uH/EOxhX6XswO0Xj0Sk2HxckMxFevkscK8yGR+FGlcqlPMFlDza2srl6v4lXCKTrIkpFRC7AKAnGIOIE0w8hDDFRnBHwMEhtb824LiIIoFoiKfnERYfUV2hLoC2IA0lYFfSmDMYCGYK/laN0vUDuso2IosG7v0/jec88jU/M0170aB2S9KY1xjWIWODsOhRuBVQuBFgrO+hmC9PrvWNP/d2TrhDZThsERAeqbD3i0ntflx868To/UnmVw1YNgB3tcz2a5qWTh/jmxmFWrlcpXi4wec7GeT1G3DnGjXjw7/EgZaWMnixRP12ifkLSn0mR1R65IMRSKVutAG77lC67TH9NIzd3Bjsfw1uEe/bZFiKfQ89UaZwusXNWYI51sCxNr+HhLTlULhUpJRrR6UIU39+JPNjEzOwE24+X2XlMkz9YRwnDbiOgdctl0ilRbnQQdybwCO3ds9FzYXaKnaemaf9Uk//u9Jc5693GFilbaYFnWyf53SMPsl6pMJdO4rW7GVEYcf+8M6lQxTzm8ALN4wWivESFBrursTopVi9FtUNks4vp9tD1xl3vDPbHbiGzU1c+lxFuOSAp2IQlRRxIUhdSVxBWITwS8ujRW5wobLEb5fjW2kG2ihWcVhF7fevd2/JuHkOp7FQ7UWbzyYDmwyFnj65wqrABwG6U40pjitsLk2iVZ9KA0hqdphAngyn7g43rPoYXBMaWpB5MlNt8uHiZM/Y2ZSlJMVRlmwXV5qizyalgnS8VT/Gifxin5TJ1w70T5xkpBscjhEQ4DnqyxNpHKvQ+3OaxA8sseHUm7A6TdgtPROymeVYeqPDZgw/j1ScovRgh0xQNQyZeiXBsqJZoniqx9jHNjzzxbT5SvMSU1WQ9KfNbJx/jldJxrH6Rwk4d0e2OfDjfAKWQuYDWkQK7n+jz1x/9Gqe8NXaSPOe6B/hi8QSNVoXSK3b29++TsSLw6R2psPsjPf7VY/+Sh5wYV9iAJDZNTtjf4LC3zT9RH6a5MoF/Xg02iPsXDpG+R/ToMa79F4qfed+36KQuz64coblawKk5WG2Bv+3jbxfxtiPsCwm63clik/uxue1tWH4WMtKBg+pGWexWG5yWJPUE/ZIiKgnK1TY/NnWOD/g3sIXmm6VD/N3OT9B73cGG+/bu74RHAp/egSLNsxE/88hL/HDp2+RERFN71HXAydw0n5dn2Li9QLjsEqxl4cZ3epp896Q7iCERxQht0BZ4VkI6kADHGPrG0DeC1AgmZI8n/evMzjSoOD2+1DjLxMt5WN9816a8E9sRZJPF9+geLND7cJu/+uCzzFgNNJJAhkyoNmXZA+CaPcWl+RmWp45QdJ3s5+jhLkChJKJUpPlAldWPaf7WR36XH89fYkq5SCRtfZsHDqzxvzof58vpw3jbs6hGE5MkQ7Xr7e3Nwk3p3CQb71P8rfd9jk8GlwHYUj4pkovFGW7kywCD+PiISWzwfcL3aC3a/NWzX+EhJ8YXDuqeRNCMSnm/d4NL87P8p9nJ+x4nF46DODDH9b8k+OzHfoUpmfBqNMFmP8+LtQDTUBgLEl+gosHRV2f/mf1wbPYcFcAkCaLeQrW7YFnIehtbKYxro3Muqu/Tn7BxrJRngmscUDag+Fhwky8du8rzJ85SetaHVuvdD8w7fRzbwlSK7J5yOH3kJj9WepXjdpOuEUihiVEEMsK3YrRjcHf6mHY7W1t7oYU3q3K+B/bN0xWuQ7/qEBcMSmo24hLXrRLrSZnlqMr13iS+ijnhb3DM2WTWqvMT1ZcJn1acu/YQc8s50tqIwwuDwRJKIQKf1oKiWuxQVR08GaONRBtJPc3R0S5d7bIaVzh37QDHX+/D9m6WCNxLWgzDRMtGzkxRf2qB1U+n/I0PfZ6fK1yhKAOUkKRGYwtJQcY8WrjF148dZvdMkZmlyv2JPcoszsfCLGsfKfHYxy9y0lnHFdkhrCAjyqqDFAarJxCtzv3xdIRA+D6941PsfCjip4uv4AoPgNRoeiaia1J2UsGtpMJGWMRIwLbvn2cmFWphjqt/YYrf+dgvc8gSxEZyK57g5aVFiq+4FG+nqL7G6qVYrQh5Y3V/Y+bGABqMwEQRxDE4TpZ09j2MbWGUyv6qgrgIz8xeoyzBFtmfB0LweOEWX5k/gynkYFPel1CYUAqRy9FfLNF4MOFHy8t0jMP1JE9Le3S0y1ZS5PXuHFc3JilfA7W8RdruvNGhGXlMFzKpiOuQ+BmJbbVzfKtxmJv9CVqJx4vrB2jXAwAOzu/w0ZkrPBHcoKy6/JmJV7j2k5OEF4/gPH8Z3emOniiEyBIRAopOyFpc5lY0wa1elVoY0E8tWpHL5m4Rs+Zx/LdD7PO30M32cDOvUqEmKtTfP8/qJzR/5vGXeSa4QiDtO96YxtA3Ketpjiu9GXptF09COltB1urobnf0Cot8jmgqR3feUHW6eCKmpSUxkt00YDmaYGm3QvGmRjea98V7FEohJyrUTzh87PRrlAfOrcbQ0H2+0pvjc7uP8Nr2HDs7edS6S2nNgG0NJJCjJwnpe4SHqpz6yA2mlMYWDrs64Z/f/CATX/CYfG4Ldhuwd+xNsrDCKJKUwrYxroPOuxgl0a5Fv6KIjvX4dPE1StK5Q7r2HdngwOmRYvT72CCBRqVId9rCLvUIVEQr9bFJiY1FK/W5FU7wzbVDOK/kmXx+m3Sn9q5PkPvj6QqJLuXoTknSyZCZQpuS3aMe+1ypT9FeKhGsSuKCYdUrsloqc9pzmVYtDtvb/DdHvsTf/uTPc/zmBMQxOmI0Soa98EKaInohTsuw2wu43a9QiwIubM7S2Q6w6hbelmB6SVO8UEfcWs0SaMOczEKg8jn6Zw6w/kHBB89e5odL55iSCaBIjUZjqOk+NxOH57rH+ObGYaw1F6eVHXlkuZRlsEcpyRloMlU3Jn/L46vLR/lI8RKH7W2k0NR1wM3+BN2tHHOrUeaNjxpCIPM5mo/N0Xi6z/9x6lmCASGEJuZznSP8P7/9Q/B6AX8DptpZksqpJxjbun8kYVskviKwIiJjiE3K9bjIzivTHLnQhpUNdK9/xxM32uy/A3Ov0seyMMYgCnl0tUBS8kmCTC4WlhS1U4KPnbjCGaeGK/JAdooIjeaV1iLBmkS0OpmdI0YWk/YwvoNIIV33+drMMSZnW1RVm4Ls0Zc2obao7+aZv5LC8jomjt71d++Pp+vY9BYKdBY1hxZ2eLSyTMXqcq07Rb0d4NQlMiYjOCMItSJFYouUgow5665SenSb3tEJvGYL0WjdzQoOnXj3ijZi3LqmmWa78E4/R2c9R+GaRX5Zk1vt46zUSFfW0FE0fM9BKZiZZO1pl+OP3OInJ1/hhL1DIBWpMcRE7OqEi1GFb3WP8bsrD7J5ZZLiqsDqaYyAdLqC7PbeqC2E4Y9pFKPqXQrLPitLRdZPljjqbOKgkWjqsY/VUNi7TfR+s9defO27PKOwbJIHj7D6EcFfe/RrnHFa2MJBo9lIE/796pOoVwpUL6Z4OxEiMRhBlrMo+hnZjFriJiS4Lokv6SYODa1wRMrFcI5gVaBqnUzCeG+oa2gOQZbYFZ4HxqArRfrzBXqTitQWpB50DkD18U3+7OQLVKVz559qDBfjHF+9cYzZiwmm28vsfau46BAdmqywSIHJNlRv0+L6xiTefMyC1cQWGlsMNqyWRbDaz06N+4D9kYwFAY1jNu6RBk9N3eTh4DapEayGZaLQwk0gdUHbYIygmzh0tEvfWBSI8UTKx+ev8MXFD+JfzSEGL2IkJzgzSC7oFJkYqrkuU06bl8MDeOsW5WsJuavNO+J4E47AMxMC4bp0T04gHmvw03Mv84i7QlVmG0LbxKyniq93T/Fc8yjPrx6kf61I8bYg2NSoUGNsRVx28fqTyPXtbMLsZa+HuZmlKSaKkK0O3mYebzOHJ2LKMvMQHJHST22snkC2++j7EFpQExWWnwl46v2v8+eLL1OSWUKyayKe7y9y6co8c9c0uVsdZCfEKIUOMoF/7Fu4pSJiZ3ekico9zywOJJ3YYUf7BKJNV7vI5B41xSgIV8msYMhzEUIQT/i05yz6k4IkgHAqZfroDj938EXO2NvYwgcyL3c77fHPN38E+1yO/JWdLC585+eqOwqIYVesCZWtJRGn2O0Ut6boNh0W7R1mVDaGfdOnl9rYdYm90SDZJ+37PpCuhKkKzaOaD86t8FiwxLxdYz0p0UkddNtGJCAMyBiits1ap8jN/CRl1SEnYgKZcNJb5/cmBCbwshc6akhFVFR8oLLMEXeLbvgwwYYhWOog1rdIW+27E2SY2KuQKpfYfMzmM4cv8QH/OlPKYAtFSydcSfJ8vvEwn1t6kNZ6AX/ZorJi8GsJqqcRqSH1FGHZQltlcr0wk7aFIYJ0qJuZ0QaSBBNFqFaIjHMcdrapSugbgy0SotRChkCvPzxD3g5SET6wAO9v8Ndmv8K85WKhCE3CdpryL1aepnTeJn+rg6x3EKkGP4tTRgWbqChxdiYRrfbovN1BaMH4LqkHrdClpT20ajNv1wgrApPzMg/83hPNkG0ylgLbIizb9CcF4YQhnkhYWNzhR+fP8wH/Gp4AjSY2UNN9fq9znC+/dJpDr8awsZ1tXAPClaVCtvaTQYFHFGOS/X8WoVT2PUYj+hFWJ8Zp22AbjloNStInNimKlKV2lWBNYHZr750yYCEF3UMlnMUOD+TXqao2ABtxmde25/BWLdyaQUVgdQQytlm1qjxvH0IKQy4fMksTWyQkAWjXRgqBHmWcR0hE4LF7RvLp0nk8ERP2bUo1g9pt3iXcEXllQgrSuSrW+2p8rHiRSRVjC0XfpNxOXX6z9j4++9JjlM7ZTNcMbjPB3Y2RcYoRAu0q4oJFryroVyy8zSKy10ckyfAr1YweeLtZwUt/wnDK3qEgXZSOsUVKP7VQIcPZxL7HO5KOzdrjHp869G3OOC0s/EHyLOL32g9y86uHWHy5h73RgDACx8bYiiRn0ZlVdGcFVr9AYcmFfjj8hNpeNaKVqQKMgG6YHdcLUnDWXaV7KCaaCHBvOYgwHK5JRmdx3FQjkhTj2MS5LF8TF1NyE11OVzY47q1n79pAS0c0tOHzndP8o29/nNmvSXIX1tDdTIa5V6RiFqZJih6qG6HWa+idXcwwDhNKgZSQaky3h2o4WH2fxYUdZlSW8NNodrTPlZVpDl0OM53zPuHdk67j0JmzqBY6uDKmrgN2ozx/tP0AzZcnmDqf4m9HyDBFu4repEOj43A1mSXRkoLq4wXX2U6KpJ7BuOpug4xReRGOTTJT5tM//jxPuTXWB5NWxgbCQYOeER6DhWWxe6bApxefY9HaxRECbQxbqeArnQf47MuPsvi7gtyNGiLRECdZ+adtYQIX40jCoqQ3LbD60J/2ye0GWfxsBDDaINIUpGTi4a073qQUCbFRtCIXu2MgHr2OWORztE4mPJq7hSskCSkNHfGH3cP88tc+wwO/tovY2Mk8MCXBd0nKHu0DDs3jYA52adcCioGPaLWG71TuHedtCyTshRlP2DtUpIdNiD/RIy4EuNaImgZqk2l0kxRjSWI/i+OiDLZK0UawlRTxRMxNY9HRDr+38zAv/fEp5r+akLuwit6pgdZZQs73YLLK7kMl6qcE/qbPzHMS2enAfofzhEAIkRU3RBEiTRFdF6Hhrx76KsEg/hyblD9qPYp/3sc7d5V0H0NJ7550fY+wLNCxxUpYYT0s8dX1Y7S/McWR32+hlrfQzRYYg/I9nGKB3EqJ+kaOpXCWl4MWB5xd+trOSPcHFBq/awiJLOTZOZPnb05/iaIMCGTK6YV1NgpHfmDh87uz5W5pamtRsODW8QarrGU0z/cP888vfpBDvynIvXwL0xkE9qXAWFaW2LAUqe3Tm5aEx/v0ewqnbhEsZfG3oW4d9wrFtcYIwf/h8HNYKJSQdNOUK+Esq8tVDt1KRhOueROE54GTMWVXp2yblF9vPsa/+LVPc+ZXb5OubmCMzsjOc5HaEBVtGidg6tENzlQ2+OLGWeZKedjegSHHdYUUmWcmBCLNToxpKplXClso8tKltxXg7sZZM6ZhV8sZg0lThM5ONKIfY/UNVldipKLu5HhZLHCzXaXe82ien6B4HWae3eXEzvWsLHkwZnuNm6JDk6x8LGDymTU+WlnnD148S+m6R3Brr0Ph/rrumaeeYozJniFJ0Qo+EdwEMpXF9QT+5Tc+xPFne6RbO/v6/e+OdIWA6QlaJxJmrZSLjRmub07gvpjn4JeaiEtLJO323eqfKEJEMZYx5IoO7VWHV2cXKNk9rrcmUf23aQ03RMhcQHRqnv5P1ZlTWcGBQvJfzD7H/3jsKNVXyrAxwvpwpRD5PP25lJLq0jEWpAnX4gl+dflpnK8UyV1YJt3ezSa/FIPOY0Caol2bzrxD80zMU8duEqWKV3vHKC4VCOqtzHMYcohBSIHIBfTn8/yZ/OsokSc1mufCWf7ptQ9RedEmd3F1X72H788wQTpVhr7kfHcBieY/bj7Gpf90gqP/8ibJ6tpdwhICEUuMpehNKJJDfU6Wt6g6HUwlRufc7Mg/5LDTXoxcCEk8GdA4LviJY6+Rl1khR2hiJr+lcK7eIu313l4iJobXK8IKDf6GIb8kiPMuJnWJ1lOmLzeYuXWetN0hHRwJ9uKpspBHH5pl8/1F4s80+OWz/45DVo2Wsdl5KMeVSycJbpYR9QYm3GfSjZPsNC0kIueRzlbYekwyo+4m/H5l45PMfFViv3aTdJ9ld++cdAfi4s7xCv5Ul5mgze1mCXE1x8SFGLVeI30rDWaaQj/E6iRYPYdurFhqV9lo5bOYb5QMnRTuQCpktczuKY+/fvILbyj/POuuYh5o0zlaJLhk7fuLf0sMEgrGdwG4FU1QVl062uE3N57g1osLHLwQYt4uvmRZpEWHzpzg1LFVnigt4YmE83NzxAUfLDV8b1dIhO9jDsyw9kGHgsySoptpl8/ufJTeNydZfLGF3twefSc0IRHG4G5Z/Ob5R/mNxvupvio59LUt0s3t7yAlYwxCa4yEXL7PmfwqtTiH6StEL87m6bDDTjrN+nrEEaqbICOYtNukRqOE5Gv9EvnVGP3dyr6HdVozBiyFu5ug+gqnFaPaEarWwTSamE6XNAzfMEZGG6RrwcwkG0+VCH5ynX988tc47UTYWDR0yJHcDhfyZBubUvs7X/cq6lBZP5OpCXbPFnnfRy8iycapbUK+8NppTtzofe9Y7vchUXwz3hnpDo7BspBn430WJ6e2mXA7XOxN49YETi3ChNEb5R5icExSalC9pkh8cNyYRujRbvrMXNeo3fZABzv8DKz0PaKDk9Qe0TzpX4es/QaxSZEYAi8icYPhExVkYypFNk62hUgEr7dm6WubS80ZXn39EBOXwd7tv/FIK+SdcTW+S3faoXM44cHSGqfcNbrafcN33Pk3Q8i2CMtCFgqYg7NsfqDMgY/cxhPZFHsunOXZm0eYuqKxVne/YzGOAkIpjBDkVg3Jrk9uQ1O81IT17ayA5A1/eSBhuoewuqnLze4EuRsWstYcmacuBl4ZWmN3YKk/QWgSbBS/9Owv8sDt5ls7ON/xg/bH273brHxQCNNPsLoxqtFD1Ac9cvvhWxblCCkQvkf3YJHGmZT/+tA3OWWH5Acl2ErEuDJBOwZtS9TeO9jPuSIkMp+DuWnqD0+w9XTC3535+h2n64UwT+6Kg7W1S/rdHIN3uJm9Q9LNunKZ+SnUQw3eX7mJxPCSe4DYGhiz55Lv9TcY9C0VhQLxfJXWokN/RlO0UhodH2vZpXijg2m2MOnwCXevlWN70SW/0KAgYvZIV6MJjaIX2uSjEetIZdYXVXUFt5oVuonD5fUp/GWL3GaC7A4WlxQIBi0flUJ4HvFEnvaCorKww4Jbpyj7aCRR3cXqakgGcaxhbGhSISsV9MFpag8WqZ3V/Lmpy0iy/hBXwhnStQB/K8oahtyHfr/CtjCWxGkZvJoht9JHbdVJ3yx6v6fdp/EdUgfi2OJcc56XlxZZfC3OeqqO6hkGN6/Idkh+VfPlpePsznwBgMkvubC5NPp+EAPNOKlGaIMIU0Q/ysj2To/cN62dgeMligVaBy0mj2zzfu/GG8raU2OoxwEyFMhYZ5cH7KvdClUqkp44QO10np1HDE89eI2j9i6QA+Br7VMEawbR7r79uL7BgfnBbHxHpCtkJt5vHSvx4cVv87B/i9hYHCof5ny1QlS2saxBjbowCNtCVcro2Qk6B/I0D1u0DmusuS6pEfSaHtOvg7VybwOZIVd8OQ66WqA7LTlQaqBE9n2p0bR0xK1kinDXx2kmGVGNCjojR7cuqLd9epFNvOvht0EkAzus7GYLY0xWGTRoS9k64tM6onn/xBZ51SeQITfjSZxtC7vVz04fw5DiDUqWzdwE7SN5WgcF/lybGbsBZMe1lbCCuyuxWgM1yKghsth34ipkAnY76zlr+v03hrPuaWwuqmX60znioiDqW3x7eQHvvE9waQ3d643UUzdxgmy2yd3OsXuuyAuPzPIvVj9E9dUmutX+/mzZlz66dxOlACiJURJhDZKoUtw9Gd7rocrMQZDFIv3DEzSPwU/OXmNSxUj8Oz++a+Bmp4rbICtM2U8uEAJVzJOeOMD2o3nqZwyzD2zyTOUKnjCkRtPUfb68eQJ/J/3eid57c08/gDf+g5Pu3qA7Nu0FxROFJQ7bu3S1zWPl27x2YJ72vEdQzCMH4ndRKtJ+dJ7aCYvOQY0112a+3MK1ElbrRdSOTeW1ZnYdyig6Y8msq1g0EdCfNCzmaijMne5Sy4nFN9vHcTcsrNZoZFZwV2olohh3x9Buu8S2hepKMKBdgQ5cVJjLZGJpCr5HWs7TOp5n90FB5egOR4NtPBHhoDnXOYC/IbAavew2gSFsaGLgvYSTOXpVSVw0lN2YdFDltZoYVnsl7DaI6P60mwRAKbSrENogI42I3zQWA2JQlTJ6ukLnYJH6MYvurEaHCrXiUb2YYtY2R+jlikwbmySYThd7vc7kOY9/eONTbP7xPEfWrpF8t03sHcQcvy8Mft6ehhlf4cYBMspi3VLKO2MkHAcR+NmaO1hl8wmX4ultni5cxbvniJ4azVbqc2VjismNFNFo31E67AeEUjA3zc7ZjHDLx3Z5YvI2C3aN2EBoEs7FATdvTHO8MYjZf69Q3L1e7vdJvD846d7zQ1MHyqpDQSR4KuWR4BbPLxzi8pHDFJeqeI6N9m2ax/KsfSbhiZOXebK8xKTVoqtdvt0+wEqtRG5FIG+vD7yHIR+ThMjuwyrk6U47RFMJh7xdIBv0XZ3wrf5Jvrh2Em8bZDcefjwX7gT49xaXv6tROzZpTmJpQepDv6SwpnxsW2bi9DAlngxoHXTZfRDKD+7wkbmrHPa2mbZadIzNF26foricImvtQRx1CBU+loXJ+aSuxAyKCVMtaKce22nK7aTCSruE6pk7TfbvC6TICDcxiD07ZBZGYNADWFTKdM5MUzth0zqaYs+0ybkxrVqAtykoXK6T9vqj9XLTNGtbEkXQ7pK70eb2l+aZOndP74Lv+gP20daBZIw0xcQxpIY4r4h9gVECVwmk72bkK7IK02gmT1i2CYuSxgnIP7LNLx55gTPO+t2OY0BT9/li+zHUa3kK1xtZ2f0+bm7CdekeLdM4Cf7BFieq28w6TSKj2NEuKRG/U3+K4IaN6vezUIgUb39LyDssuX5H4QWjDYQhuTVNbCw04AnDorXLJyYvsfJgibWoitWeoDdjOPP+G/y/Fn+PRauLzeAIkZTYiIsksWLuXIhujOaerL37kJKZEp05STDZoqD6pAjaJuZ6XOTzWw+yeXWCuQ2N6IfZ5Bmi5OYOBhNaN1rkbrbJHyjRm1GkLkRFA0agbRuxYKNig9U3NI4q2qdDHjm6zIcmrnLKXWNCtcmJmG/2jpJ8q0Lh4jZ6tza8Io9B2baMNaoPVltQr+fYjvPcTEq81l9kfbvERJ9MDQBDS+a9LUxWjGG1IuKcRepKkpKPlaRI18F4DtFihe2zHvZntvnM/BWOeZukSF7vzPOF6CR2x4HVzftyP5rRBlIN/T6y1aVyOY+/0ctCX/eh1aSJM8IXW7twskS/KjHSJs4rhM6TOoJ+RdA4aVh8aJ2K2yewIh4qrPKQv8wJe4spZVBINDorUOnN8U+/8RFO/mEbcfnWoGPa/oUWRBBQP2ojDnY4OrHDtNsiNBZX+7MA7CZ5fufqQ+S3Bw2OHCdr/xhFdzuh7YkD3oVd7yyRZrKuXIVbfS73Z3naW8ITgkNWzE8Xvs3TZ69w63SVvrZZsGu8z20TCAdJQEKKp2PqssdGWCT/lQDnG6+i96Fl2veEVKipSaIj0+ye8WieSnhkcpuS6tDVFn1jeKl3mFdePcr0c4LixTrUm6ON6RqDSWLUjWWqEx470qVzwBBPJsQzhk6aHcdkR6Fdw8lTt/jE9CUe8W8xbzUoy+w49lI4y9975dMc/FaIub2WTeAhkYVQWbcmFWuctsatK8KGzfXuJIE6wQu1Q9jXPfKrMaLZYZhN378XkoJDe0HRrwqMtJFpDq2gt5Dw1CNX+Xvz/4kDVoItJLHRbKUCT8R81TlK9UKftF4fvdFv0q2LVOPtxKSuwva87M62SL/x7w+z8c3eqSyKoN3B3whpHgpoHRaEFYGa6/Ho4jI/OnmOJ72lrPcvgv7AJiUEkqyvbophIwn5d4338a//48c5/a82MLdX0UNSt3QWDXPVJgeDGrZIudWr0kkcNuMC39w4THozj9M0WX9t30MkKcKxs06Evd6+7G3vWKdr4gR7eYd/+fwHeeyjS7zPXacsLQIlmVEpDztbKCEG1Uh3A+XaGFpG8xv19/HHX3iY45+9QdIbctx00ERGlYr0T82y+YRL50CKP9XFs2IaaY7L8TSxsfhnlz7I7NcExWtt5G4LHUaj0w3vwRjSZhvvxevMdA6yqXM0T0is6R75IKTohZwobXHI3+FT+fPMqB45mU3kroFXwmn+0c1PMvU7Hv7r30M0vx/mRhGy08PeUuQSg9AuqaN4LjjKxsECN5cnmb4C7mYXE0b3zzuLImSkaR2B+cdX+fjMZR4JbnHY2uGAlVCSHrZwAZfUaBKR0tIhX28chy9XsF99jXSUG7BU2YY2gLCtrJjnQJXVZzyMgsnpoxQu1RG7jcFNDkkWotpTEAwLe2GGMMS6sMS0OMTSj/scfniVPzv/Mh/yr3LASihIB1fYpEaTJ+sBHJPS0inXE4uvd0/wb268H/kfJjj2xyukK/vTs/Y7MGhHmV8S3C5PstXMKs+SWKG14IVUYHoW+U2BkYao6mBUGasdIJtdqDWyfjD78P7foaebDbhptpn94gK/cvgT/NLBL/Okt0pVWthCIZFIxBsKDmKTciPp8z+t/ijPf/4hjv36Dun27tD7Zt65TTcX0J536E8aTD4lSSQ3G1V8FVOyp7jcnCa6VsSrxchOVrklhMAICYyYeHWKbrWwXr/JXGuOYKvM7pkczeOS6Xyb48Emp7w1CjJCCehow652eL53hF9d+gD935lh/mu3sqYhw1x8g0SICUNER2ElKfkwQaYBKnJY2ZwnVxf4OzEijLO4qlKjVzDsaUo7EaoXMB20+Gj+ImecFiXpYOG/Ya4CdHXMr7ce4UvPPcSpL+zuWz/V78tWIbMYcxBkIn7HJp4ts/5Yjs5H2vzC6a8wbTf5w4+f4dUri3i3Jyhd05SudrGWd0i3tofvLOyFw1ot7PNLHNYHWWke4N99xGLiSJuSvI0nUiSS0MS0dEJdS57vH+Rz24/wwtXD5M+5zH6jg3Xhwndeg7PP0M0WlSsR4JDkbNBgazASZAIyApkYtCWIc9mtxk7LxtMa0Wjumx3vvCLNaHS3S/Wb62y6C/yND/95fuTh1/ixyisctXbJSU0gBN6AgLsm5uv9Kf7OxV8g+eIkh77cwNy4/Z2i9H2GUCq72NGxs0kiweoKTM0i6Sg26h4bKxVkR+FtSqavarzlZnYMjuP7ehQ2aYru9JArG5SNweqVqNcDLp1YwJKafsUm8jNPqJ7meK2zwB/eeAD7GwUWvl5H79ZGcumjMQZhBjHHQbzLaqf42xIZiSzksBsiwvi+3BJwx85Uo7bqlK4WeeX0AuerCxy1XydvDNY9OvfUaNbSLv+09hT/+ovPcPS3I1haGU3/XKnuFEOIIMAsTBFWfTrzNrVTgvyj2/zS0W/x6dzreCJlwa7xm87jPOsdo9f0KV4XWQ+GUTVpMlnzG91qYV24yaHGLI2r0/ztR36e3NldfvTgBR7yl7keTvNK8wCv3D6AvBJQuAmHVmL829uwtkXabA83Vm40ph/iX9nCapdJgkyzjTGINEusylQTlm0QoJVAW1keR8RZ17z94oF3QboGE0Xo9U2mvwKF2xN865uP8aW5x4kqGp1LUfmYYr5HlFh0aj7+dYfqxZTi69uDOOPotI4mTqDbo3grxOpn97lpJRAaVGRwOhrVS/CXW7BTx4RhlixIkvt35fbeiaLXQ2xsk09S3N0CuTWXaxtHOD+ziDfZI4kVSd9C7dgUbgomv91DLG9kcbFhJ32MybpODbLVApDG4PUTnIZDXHCwmyFqt5016AnD/Re8f592mjRF1xtUXq2TOhX+/vaP8tunH7kTEz9q79LVFq+Ei/zyxU+iv1HhyHN9nNeWSDsj8HLvEdwLJRGuQ+pZhFWLzqwkmo6ZDLp0U5fYSPpG0dc22/08bLn4mxqr1s3KxEec7DNJQtpsI6PblOstCtcqdL9V4vNTH+azFYEKwWkY5jcTvM1mVircyWw1Q1LVvNHALFeit3ex+yG2Y98t8NA6068rib3pZrcZexZGSWQ3RrR7+3pbzLtreGMMutfD3F7F3dph5vUCppgjLXikOZs45xDnPILEMFNPcJc3sxt0253R9KfdI600Rcgku5LnygbOqo9xrWySpwYRxYhB4YBpNAfxMM0butffLxiNjmJku4NIEpxmm8pGDn+nQr9qEZbymWKgb3CaCd5mF7W6k/UAHlGZqkli6JPJiMII0c5Kk6VUeJ6bJSCiONvABnKj+zKmOkX3+silFab6IYWVCbbOL/LPDi6SzIfMTDeotwPiWzmmn4fyt7dheZ10VCS2l6BKyeZer4fabhEoibY8kDZXwgWWF0rsHM5xozPB+bU55Pk8cxdSCpcbsL6dbbb3AzrN+CCKEDu75K845H0PkQsyYuuHmG72efrm9TUqr7zXI42iN8TK7xZ6ZH12hWNj2XamWtIa3e3tazjs3TfgNAYTR5g4QrfbiI0s+G8P/hNK3um/qfekF6PchQfftbfGk9X1O6Wz2eeZhEnvTYJR3331vWAMmBQdaohiRK+PaLbwWx28fJA1selHWYlvFEGvT9rrDz1s82YbTRxl3zmIi96tz5ejubfr+4VOs3na6+FubDN3voCeKNKfy9ObmmK6oQluNbKTwrBve34rDN630ZDWGshOF6vWoLJaoHitSH/KozNT5HcPfwB3F6Zvp+Qv7yLWt9Ct9v0j3Hvsv7O5hiG0O4jdevbRe2AemCTJbqb4bn0T7pnDw9gU9rfr8WDASZLRFBS8E+gUE6bvXfveDnuLMUwxYZgldYS8S27wRq/8PoVD9hQJ9/Nw8D2xRwytFrrVgtU1nHPgDjZi/V7ZeHWK7qfQ78POLtwAVwhcIZmw717Ncz/umfue2LPJpJj7oGv+nvhuYzbkOTyiVvNj7Dvu8YjGeJfYK2kddX/fd4J7Nt8x/mRidN3CxxhjjDHGQIy02mqMMcYY4085xp7uGGOMMcYIMSbdMcYYY4wRYky6Y4wxxhgjxJh0xxhjjDFGiDHpjjHGGGOMEGPSHWOMMcYYIcakO8YYY4wxQoxJd4wxxhhjhBiT7hhjjDHGCDEm3THGGGOMEWJMumOMMcYYI8SYdMcYY4wxRogx6Y4xxhhjjBBj0h1jjDHGGCHGpDvGGGOMMUKMSXeMMcYYY4QYk+4YY4wxxggxJt0xxhhjjBFiTLpjjDHGGCPEmHTHGGOMMUaI73oF+6flz76nbq38Q/1r4u0+G9v6zvF2tv5JsRPGtr4b/Odg658UO2Hs6Y4xxhhjjBTf1dP9UwkhYHwt/Z8uCIFQCpQCwMQJ6PQ+G/WfMd7La0wIEBLpe4h8DiElJkkxnQ4mijBp+q5tHy7pioGHLSRCZg+D0RhtwOj3zsAPBhpASJENLLx37NvDwE4hBcKyQMo7E9jEyWBCvIfG9c24Z5wx+u6fj8peIRCOk/2nJCiFsG0IfNJqnv60j1OPsJe20PUGutf/k0O+b5rDd9bae2UuS4WwrWzsRbbGTJJkG9x7Zc4O5ofM52BmkqTkI4xBNftIJdH1xoC73t2c2D/SHXgLwnEQvoco5Ekni/SnfOKCIvEyAra7Gm87xllrwvYuutHEJMm+mfH92oqQdydB4CNyAcZSiDjBdHsQhpgovktm92vx7Y2r6yLyOSgViKcL9KddUlsgtMHqGZxGjNXoIZtdTKeH6XTQ/fD+TOjBZiuUyha/HEzmXIDJB+A6GCEQcYIII+iHmDBCtzv7v3HsvWulEEoiDsyhSwFRySOsWvRLkqgkCCuGuKRxt3NMfdsl/3qA2twZjk37BamQnoucniQ6NEFnziUsCVJXYAQ4LYPb1BRfryN26uhmK5sTo5rLUiEdGzk5QXR4iv60S5TLNga7q3FrCc52B7lVR9fq6Cge/TgP1pcMAsREhf6RSVqLDu1FQVQyCA3ubp7iUpXSxSbq5so9c+Kd2bk/pLv38itl0vkJ2osB7XlF+4BBH+gzVa1R8XpIYVhvFVhZLVG4PM3UKyXcb98k3dkdzUDfQwbCdZGFPKZaIprO051xCMsCb1fj7SQ4mx1krYlptdFhiIkZPfEKgbBsZDEP0xP0F0s0D9k0joE+0Md2EnQqibs21o6Ht+UTbFTIrcd4txuotU10pzda0rh3jC0L4blgWVApER4o051x6JczYpApOE2Dt5vi7oZY19Ywrfa+HePuJVw5cAR6h6t05rJF1T2YkJ9pMFtscTBXw1cx5+uzrNrzWO0Sfj9EJEm2KaQA7xHivWdexKcPsvlEQPN0wvzhTR6vbDDltJDCcLUzxfX6BNdfmKJ0rUr5cgd1Yx29sztcR2ePyAoFzOIMW49X2H7M4Mx1KOd7pFqy0QxIt12ClSrlqyWKFwqolQ10GEKa7s/7/35MVQrh+zA7xfZT02w/bigerfGx+ZsccGuExuJ8c46Xrh+kN1FmVmvk7Y13tRm/O9LdG9x8DmamaDw0wfYjEnG6zZMHlvho+TJnvdvMqpBgsBj7xnD1wSL/28Mf4VtTpznamEU0Wpg4elemfN8mDwZZ5nPo6QrdxQLNQxadRYM40qG5lCNYcalckQRpCuFg4aUpxow4FiUkwrFhokLrgSrbZxXmoRY/dOQyp4J1SqoDQEv7rEVlXmvMc2F1lvq1gMrrE1RfFsjVjQHxwkhIw5i7YSUA20EEHv3FCrunXdqLhmQmIlfqI1XKdtNHrXgUrgfM7BQRUTw4Wbz7YxxkR21hW+C66MkSYcWiMy/ongg5fXiNpyeu85C/zLxVQxvJ+fwC//cjP0L8soM/iPG+pzA4Aqv5WdoPzXD7M4KffPpb/FT5JY7aTUpSYaNQQhBPpvQPpnzu2BH+yY1nWP7jGRbUPKrdGSrp7hGuPrbAyseKTP7QCn/j0Jc5Zm/hiZTYSNbTIhfDOb5eO8bzl48QliaZetYgN3cwvR7A8IlXCBjwV+9gmc1nEn7mfS/w6eJrLFoNHKHpGIsP5kr8Ue4Mv54+QX61RLHRQYThIFQq+UHX1Tsn3T3CLZdIj86z/sEC6Ucb/JWT3+TTuQscsBICYWMLhSRAibtCiTmVcPTA5/iff7jOHy59kJnL7vBJdy/mNSAyk/NJcw5xTpL4oG1DKejTPCCJ2gEYEP0I3evvn+f1A9qbbRAeyWSe5uGMcP/iA9/iw/lLTMhsYgYiRQP9QLFVyPHSxGF+d+ohbgbz2N0KxVYX0Q/vxvZGAWPufl+3i3Bs4qIirEIyFTM/V+NkeYucing9N8ONdApx1UGEEbrXuxvn2y8IAVJAavB2E/pVSa9hs9XJ0yj52CKhLCMUhlm7DoDV1dDtZe8+iffPlncDIZCui5ybYfVHF3jwFy/wj+Z+nyO2JhDOd6wzV9jkgR/N3eDq3Az/5tAE/SmHvONApzM0G4XrYg7NsfzJIj//i1/kr5RfYFL5SCzAQmM4bho84uzwTHCZr1ZO8k8Kz5Bbr5BrdzFhOBzb3spcpTCFHI1jDmdP3eDnK8+xaMW4QhIbgWsSYtWiYnUhEcjEYDrde3IoPzgnvDPSvSd+mx5f4MZP5fjox1/lv5z+MqfsZDAB7DdMgHuhhGRO+fz58rf4rY89zOxv+dBuj8AL02AEptdHAHYYU+oUcJp52jWL3s4kyoXcqsFf62CarSyuO2rCHUAoCdUym0/kSD7c4K+d+gZPB1eYt3rYQArs+WIFqanKJvPWOY66m/wH/0le7D1AbrmCbLUQqcYkenSZY2OyRIk2CKWwWylOw6LfVcSpIqciFr1dYiO50Z6jfDVE79SyuN5+hXGMyb4/TTHdHrLewrYVTsvGbkgabY/tKM9WUmRWNQlE5v2ZRCISjdkbp0FSKtu0B8nWvWTw4HtGAWHZyKlJtj46z6f+yjf4pYmvMqUsbPHdl3FHG37n5kNMfVORf20d3R4S4Q5s5MgiSz9R5v/0C7/DXy5eI5D5N/wdBUgEtlJ4IobgMq8fmuObJx8juJlHDJQCQ4eQCNclmcxTP234W3Pf4JAVEwibmMyZCQ1spQUudmbwl2zyr9wifZcn83fh6UpELsfG+/N86pMv8V9NfZlDlhkQbna8jO85HkrEd5BwQcYcmdqFUgE2t96xKd8XBgvDaANxAvQhThD9kFyjQ3DLJ5zN01p00BZEVR+vnr/HSxx1gF8iSkU2n5km/EiLnz/+Mg95t7FFylbqcCWaYTWu0Nc2x711Dlq7VFUfTxjOOBv8pZmv4X4y4ZWVs8w1JpHrW+geI38WIQU4NtrKkjvG1Uzn2hzytynIPo3YJ3dL4V65TdrrDSVubpIEkgTjOsQVj+60JJxKKHoxrdhlOapSVl2qqs1WUiQo9mkdLOKtVZBhiKaPEAKRz6GPzJPkbGSUYm21YLeexfeGnYWXClnMEx6fZusDKSf8DW6neZaSbE1VVZdZlVKSDhbqzlpr6z6/cP4vMf0PPOxXL5A228PLTQiBmp3m5k9U+cxPfYufK1wkkLk3/JXYpGg0qTHZrxgCkfBwfpkvTT9KUvKxNgYJWEZwOktTGLyy3STPcmLhioSGdtnROW5HEzzXPMpXz5/k2LN90s2td30qf+ekazTCc+nOGh7O3SYnEkARmpi+SdlKBdeTKjtJnlmrwQm7RlUpvMGu3NUx1+MqNzYnOBHuvquH+EHtNkkmpREyymK23S6iE+ACtZNV2gchLrhMMoWXatjZHSgBRndEF0qRHJuj9pDhY4s3OO2vEKM4H85zrrvI681ZrmxMkQ/6nKgu8IHydc64K0yrNrbQzFotPlW5wEufOUBzY5JSnCC2d+8mhUbxLDKLl6VH59l6zKF7ICU32aVo94m1xVpa5tz6HJMXEtLN7eHEGY0GskUsigXCqkWcA/wUx0pJtKKbOkRGMSF75Nxl/tyxV/h3Jz5CYbmA3+4ibQemKqx8ZpIP/eJL/GTlZVbiCr+x/jhXnjvF3DdSCi+ukK5vDO1UJKTIZFfa4K1Z/H8ufZSCF7K2UUbUHIw0FA42+dmjL/OTxVeoyoTYwKf++L/hxD+MEBcvk3a7Q91spe/TfHKB6GyXTxQvYAtJajRq8GtT91lKFNfiKVrawxMxBdXDEzETqg1HukQVB9txsnDQsDGQ1KlWiL+e43ObD5NOS2yR0tUOV7szXG1NcmV1muJ5B/fGbZJ98MDfOekqBUqSHAg57Gwjga5O2Uhtvtg5y7++/iS9FybwNw39KYH35A7/7ckv8aS3RCBStrTLHzXPIK4HwO7gCDcCIthL9BidfZ3OJqEwmmgqR+uZHo8sLnOjXmXdn2TOTONEESKK9jXM+L0gSwXWnspx6tGbfKLyOhMDL+xcd5E/uPUA3Utl/A1B6OR57niR2pGA9pTHaW+VKatJWfY5667wc0df4lcf/QRubRJPa0wt05+aYSsxhEB6LubwAjf/TJ7JJ9Z5uFBj2mux4NYIZMRLzUNEV4vkX7xBMoyY/iDLLxwbYVu0zk6z85CCB1scrTTwrZgpr81Jf51j9hZVmdI3KbZI0RaZd25nCozegSLmYzX+rzNfYEq5xF6Dp/3r/PbkI/x/D3yY6WCR8m/tAAwvSRVH2GtNZp63qO9W6AAzOxpvN0XGms5ciX955uP82pnHmMx3WHp5gZO/WoOrNzNVwDBPN1Ih5mfYOaM4e2CFCdVGG0MiUkKdsJZG/O2VH+f52weJah4AdinkgfkNfmjqPI96Szxz5BrPn34Yb20C2Q/RI1AxmCRB1VtULxY5f3iRqtvhSLCDxNBJHVqRi27ZeDsG0+7siy3vmHSFUpicT6XSRqLpGsVSkuM3a0/wO+ceZuLrDgdfbaHqXeK5Istygs9VHiaatJhQbZaiSb66fozcMtnEti1MNKJj751Y3UDeJAQiF7B91uMvPPhlnspd42J1jl+pf5z2LZeJ6+5dUf8IICyL5OQBek+1+bGZcxy2t4iNlZFufZ7upTKTrxrcRkJUUCS+w1V3CiU1cVVx0ltH2VvMqi7vC67z788+Tm21zEQ0gQNZjHOYictBwkfMz7D2oTKPfPQSf3bqJaqqDUDf2NyOJ7jWnKTyOqTbQzjp7MmqfA9RKRHPV1l/SlF9dJMPzVzHlQmxUUzabeatGgUZo4SgqyUXWnPkVgTeVh/R6YFlEecUT8wuM6eyZJUrbI7aMT9ZeJXrJyf56qXHKNs2DINwB/PUxAmy2ca/ZWN3fBLfQvVT7FoPESVYLQ+rH9DcqbBlKhz/agtz6fpIlEHSc+memqR3JOJMcR1PJMQYdtOQ26nL3136Wa598QjF2wanrUk8SftAjvPpHKeL6zwTXOGZ8mW+dPYUnet5ivVi5uj0+sN3xpIEq6tRbQtXpiw4NRQajWAnzLFqGYQ2WShiH/AOE2mDAHTJI+c06RublaTIl1un+b1LD1F+3mXy5SZyaSPbnS2F0/RZaZd4zV/AFimXWjNs3Koyv61BSmQQoOGNmetR6PQGwv1kpkzzfX1+pvQiM0ozpVr8x5lHaZTms8qvUbm5MpPbrH4gx4+e+BZPeDcpyIj11GYjLnJzu0rxOhSvdxGpAe3h7kpSz+OSmCFMLRoVH/KQE6tMqA5Pzd3iy4fLeDUPq11ANNvDy14zkAxVyjTPTtP8QI9fmvsSp+wmSgg62nA7zfPN9nGWlqY4da45NFIQKgspRIcn2TntMf34Bj+xcI4j7iaxsYiNoiD7eDKmbxR1nfJs7xjfOHeCIxci1OoOptPNCmckzLmNN/x8C0VZhsy7DRLfIIRA6yHM2YHqJlOFaGS3jwX0T5boVxU5AVY9RKRZ4ZHV1Ti1CHHuCnoUUkwhkKUitZM2s/MbHHR3sIWmpQ23kyL/Yff9XPnaYeaej/HXOog4JSl5QEBvzqGTuCgMJ5x1TixusDW7SL6UQzRbiDAc6tITQoBtE+cUaU7zROEmj3pLaCMpqy6pkVyrThAHxTun4neLd0S6e8mRqORgq5SW9lnXZf544zj2FZ/y9Ri13cykH1KQFj2iAgTAcrfMbj/HrdUJcjcsnFYMlkJUSijXQXe6mUwnThhJwmcQ66s/kOenH3qOI5bCFg4zqkfZ7VGXQJJmCbhhY3AkZ26K5OkmP1w6x4zqESPoaJernSmS2zlKN2LUdgtcB6tv49YtQBD1fK43Z9mcy5McUHilmMP2Nidz63xp8QSd1YDcmovt2MNTMQiB8H3S+Qm2HpP82OnXeMRpk5f+IIES0tUuz+8eovyKjbxye3jpEqUwhYD6UY/6acNfXnyRU+4qjkjpG5t0kPDdTfPcjCZppDl+5dxHmfuSxH99NSsiSDXSsREapHjr8brZm8CtiUztsN8MsacUUjIrNrEUxncJZ3OsfVCSlhNyVz1KNxzceoIKU7zNHmp5i2RE0ith2cSHZ2gdT3m8sjkILQjWdcBX2g/w+QtnmH9FE9xqItpd0AalBFbfQ6QCV2aSvCnV40Rxi1tTB0lzDta9ZePDglKYvE9nRjK5uMNHgyvMW4LQaEpyHVskvDJxgFuF0r595TsPLwhBEkiqTo9W6nGtP83aVoniJtiNCJI0K//0PLrzPuFkiqtSVtslNtbKBNccSjc0qp+icy66EiDSItZmM4s7drsDr3e4Rwvh2KSzFbaeMPz1ya/eybYqoJM4WD2TibVH4ekKiQh8Wg9U+MsPfIFT9g45KVhPFZfCOb69Nk/pssC7WYNGG1HIYbUdgk2F1xCka5JwRdHZLPMlfZzi0R4TxTZTVosjMzvcnvCJ8xaObQ/vEQZi89ZiDh5o83PV5yhKDyUkodH0DbzaO8jVS3M88LUaaas1VFt03qMzLygdrvGIv4QnYvrGppO61NOARhpwsz/Budo8S2sTHPhNi8I3b5Du1O7qc1ONjA291EZjuLdkomvg1c15ijfTfTt+vuEZLBvhuRnp+j56okznWJH1Dyr+zz/2OY45m/z9o5/hxksHKF2xKd4S2OstdLM1sgpEmc+xdTZg+sgWD+TWKcgefWNxLZrmy5snCF73CFY6iG4fUg1KgpSkrkDnUqadFp5IcQUseruEE5rUVVjD2MTeYHvWCiApB7SOwn939FmO2jYWikAYXBEB2xzNb3Mtd2TfknvvqiItdQV5O2QjLnG+MYepZyQljAHHRngeplSgOylBaFa2y6TbLoWbitKNlGCth0g0UdWjN2HRn5AUb3rkrljIDY3WvaEnfITv0TkQ8PCjNzhieXf+vG9gvVmg2NCYMBpdSWIhz/ZZxUdzF8lJQWQMt5MyX9g4jTlXZOK1LuzUMFFGCEprgkY381wthQ4cgk2fLV3ixfJBHvBXqao2j1dvc3Vult6EReC7WUx+v+OPgwofkw/oTiqOTu3wiBOhRDausUm5FE/wr668n5mvS7h6a7jjqhRpYBFOaj44s0xOZAUQq2mel7uHeK0xz1K9Qn2zQP6yzbFvdLG/fZGk3bkrqxokXYU2NBOf2KTY4i7tRkbS6bmUWjrTlu7n8wiBzPmIXA5ch7Sap3Eyz/pHNJ/94X/AadtGo5GHPs//W32KS+lhcpsiKzIZhc6VQW5ncYbaI5pPT65wyNnGkzF1HXC5P8et9SpTyxqr3s3+gW1lnvp0QGdOUp6pc8DZJRjw2YzdwOQSjCVAm+GdMPdOEJ5HOOninWjwM4XLuGIgcTMaG0VBRhStPto2d7rQvVu845guSiETaEY+15hkuVECA2FF0J3zycUpCkgm8/i7muSqIl0O8DcNpZt9nNUmaE0yWWDzMYfuYkppcZeNV6rMR2X8Ti/TyI6gGCh1JCcKm29YTF2j6Pccqv39C6B/VwiBcGySqSLmwRYFERMZw0bq8I32CS5fn2Puos46YHW6mFRj+iGi0xl0HMtmrVSKfL2MtqfYel+O2FjkZMgRdwuv3CfO5cEeUnO5vS5XqUZoKDs9fOHc+fh2ovkfL/8EwW8XqX5+IGEaMrQl0aWEGbdJiqClPV7oHOF3bj5E52qJwnXJ4esxwflb6K1t0rfK8guJ0HClMUU4n+CabPxCk7CaFohDC5Ho/T8KCwm2gynliSdy1E947Hw05H/50L/hYWfPQVCcdWo8Xb3O+fIiQNbwaBThMJk1itl5pEJhoc6iV6Oq2jik3E7zvFRbxL3kk1/pI8IYY1uYwKU/E9A4atM+mvCp2duccNYJpKI12OhEx0L1wswpGJKnK1TW9cwUcnSnFKcmN6lI/w1/R6P5/7f33kGWZud53++cL958+3ZO0z25d2ZzTlgEEgAJFAmSkGiJpGlLVsmSLMouy6qybFm2qiy5ZNllS1ZiiaFIU0VSjAKJDBDERmDjzM5OztO5+/bN4Yvn+I/vdk/PAgtid/v2gq77VG1Nbd8O5373nPe84Xmft64MLrXGSK8ll8BepOXe/enbbqmUktiCmp8stNtxEJEAAdoAbUi0FJi1Lvn1OtkbGZRjIr0Q2fQQUUw8lGHrnjSlj63yt2dfZt4u88dT9/Nl90FmmcR9LVH76ps3pDW665Fe93l+/RBMvLHz0mJURK+4OOXu/rTQColIuXhjKR6euYQrFKGGU94B/uP1eyietiicTVTZdtSYSJodhBUncnk66f6SrQ52I2aj7WKJiFGjTduqMlFssJnPoU3Zn0OpVcK8GMnROAj35Zd2SPqhjvmfl36M8A9HmfjTZaJKrf/Rg2kSpQ2slE+oDRbDYdrK5nM37iZ4s8j4GUXh3BYsrxG/XY9gtxJdIYdXMnCATaWxROJF3ow0v7z2DM41F7vepzSJihFegD9cZOshxf/y2Od42q0Dt6OyphZc745gbxqk1trodru/4kw7Leop9PwUWz/i8VcPvc6MvYWnLTbDPH9SvYvzF2eYORsnDlbXS1J5pQzNAxa1Bc38kXWeKlxmVPoYu02R0P0dr7D92aZT+AeGqNyr+btjr93RvLWlulwOU/zSxod5/VtHmTu/d/nx91ZI64WykSsIlcSUMUIqjEBgNTVWKwnHkDJJnAchhh9iWGbiNSqNzqbozGSo3B/ztw68xMfSV3AFBMUzfHn2LjqjLqlUCkS9r3ld7ftYqw0qr49Rv6dLVji0tM9L7ZNYTYkM4z2rWn4vbKcW6gdNPpldBaCpTc50ZvAuFpi9FCA2K+gw7LWhyts/1zO4yU3cKz4qDUJzwKowKiM6RpOUGaIlyaGRYu+dCCER6TTdyRThbMBd7vIOOX417vLqqSMcOdtFbZT7r9jW09iIbUEcS5a7Rd40ZnmteoDWzQLjFzSFczVYXuvxlm8L9exIlNoWojRE99AwjTnJUadLU1l4MkQBr3gH+daVg4xe08hqC7XXD1TFqHoDaZqIGLTU1OMMMbf3Y6wVz3UO8/VX7+bAtyKMK8vEXn8LaNtSiEyOsfp0iUfnzzPnlHFFyGI4zNfLC5x68xBTzwpy58pQqaPDACElypRoAZiaIafDqNkg14vSFLASDGFXDYxOBKpP+VwhkYUc3v3zLH/U5qnHzvKYuwgk7cq+DvlXlUf59ZeeYvw5ybE3KrBeTogB4t0L3Lwd70vwRvUix0CZoAVWQ5DairFrPrIbInq6riidhLTbN4xjozIOrUmD4wu3eDx1nXHDJiSmaHRIpX0iNwWG7I9x2AWtNKLdJXsTliI4YsV8rTPOH986SWZZI+udvT9M3wXCkKhMis6UZtyqYwjwlMGal8etCOyKd5sDKiRCqkSE2zSTDRDHOxEIpklQMDk4us6hbaqWtqn7LmYXRLj72O4dpOugZsaoHTY5MJVUsbfxzzefofiWxFqpEu9DvlEYBpgGWoKQybvtxDYXlifIXZNkb3UQlXpPX/a2xoIwDIyZSXTKQWUcuuMpakdNugsex/PrGGgMBBWlealxGGvRIb0RIbygL9GD7vFDjW6MVbM535mknjtLoeeUfbWb4h9/7TMc+KIic3qZuN7sv5frOIiREvV7hqk/6vOR0kVysstmlOcbW8c5deYQI69KcteaicHtdcKJnEQojeEDMaTNkGHZxuixSGINbzWnSG2CbHn9iTCFwMhnaT5zhOWPwOMPXeDnRl9i1LhtCv9N7Si/87sf5sifdrGvrKKqtSQKEnKXw/PeDe/7aAPWaEOwXc8zreQByVgn+a0w2hEBR0pE2DMYlgmGJCw4tKfhieHrjBsKS9g7xi1thwQWd0oE9gmid8saPmzGGUaNJr+98Qi1yyVmVyJEs70/+TFAWwbhUMyo2cACDDQ50ye2QDkmhm0nxjWMEqOSchGmmYiu94o4IuWiijla0wYfH7lGTkjaWrEWFdlqZMg2dX8MhDSQE2NsnczRPB7y9NBa0tmFROmYL1w7yfitKNHL3Y90Te9zFQoECd2rHTvEXQOrozG8CB3FySUVq553ayFHSpQ/NEWYgSgt8IY18YEuTx66zl2pFSyR7NFrYYmXVubJLIGz3kG3O/2rtAuJ2Y1JrQueXTzMRwrnGUlvolD8za/+deY/r0ifWSYub/VfEU1IhOsQjhfYOmFw/6FrHLPXAFjRQ1zeGiVz0yB/w8PYrCfeodbJPs2miF2TMCOgGHI4s0lBJl55qBV1ZfHKzTkmlmNEo42Kor0vTGazND+2wNKnYz597xl+dOg0C3YVSzhAEjn885d+mCPPdrHO3SSuN3b2qzB6798gEc7aT5UxrTVEMSLSSKGxZYRrh7TTmiAjcVwT2RSJSlbKTap+PU1aZNLJ1h21CCYiDjob5ORukRyDWEmEZh84ukljhC5k6Y4LLBHxij/Ma9cPkLsucTdaaM/r7xreDlORlx6WEKRFxFxqi69NR9QPpSjGYxjVDkJrVM4lyjsY3QhzvQ6tTmJochm8mRztGc0TmctIIajHBue7U/jlFCM1Bd09fk8976G9MErlpGB2vszBVCJgpFC85DmoS1nc9UbSYbRfUBoZazSQMX1iLTDTEWHaJsra2MUc0jSSfZZO4R8osf6oi3d/B9sJMQ3FbLbNyeIqD2evM21VycmYEMHVYIzmao6ZdYVRaaC63f5oLvQ4utILyawpNi/l+a2RRzl54HP89Qs/y/wfKlKnbhFXaonB7fOZ2U69BEUbby7g6dLVpGagTWIk3a5NpgVmOwQ/SGijKTcRsZ/K05q2aR3QTI3VOOhsYgiNpzUx8FznLuwzaTI36snlvMeOgTAMovuPsPQjir/84Mt8unCKKaNDblcu90LoM/q8hX1p6bbB3aURLXpF6Pej+fzuje52KBuGmF0wpGLYaVPPpKiOFOiMm5ieg9BgpGy0KRFBhGx5EITolEMwnqU1JcmP1pm2qpg95mOoFcvhENV6hpGWTr6/j16mdBzkxBjVe0t07+tSkh7faJ3AvOWSW46R9YQlAPRdElFrjQhjRNfAEhEGgpxU3OWucOjYGtejSbqjWex6BiR4wwJlQWpdM3TZxKqkQQiCUor6QRPrUJOjVhVfCxajEt/emie1bJLa6O65bJ4wDJgco7JgYR5t8MjITaasGp62WI9b/IvlT1G4BEa1jdovXV+lQSlkoIl9A6PnHMyMVlmaSlHrOKRKo8nrrqA9ZlA/GfPRh97kycIVAGIkGekzYdYoyi6WUKSFoKk059tTOBsm7mYH3Wonxc29Rm+uGIaBiDVWW+GWTd5cnOFXMk8R/8YYqZcvoprN7/S6+rVfe41RUUaSLTU57q5QkDFSaSwRY5iKKA1+ySHVKSYMpVKG1oxLc1bSnVC4h5rcP7xM0ejQUSZNBKE2+KXLTzL2Rohc2ezLfDqZy3Hrwyn+wsMv8dniq0wZAa6QOGKbjRLyK1tPUXqrmYwRe/sz7U2cQSvwFEjxngYbvDdPV2m0H+A0FKZUTDl1HBnRPOCwTokoZdIeTyN0itgWuBVNqhxhdmNi16BxwKR1KObp8WUOmXUgjUJTU4oXGkcxbrhkVoO+amoK00SOj1J7eILVH474hfu+SUYqymGW1LrA3QwQQZh4w5bZ/w45pRGej1M2aCsHKXxywuCEvcZfnX2BlwpHOH9inFrXJWOH3FfYZLWb58LNScKsS2bNRijoliSNY4qfOnQWV8BKZPJs8zhXLk8ycU1hbSQawXsZCgvHoXOwSGs+5v6xdebdLQwUa1GBG8EIp187zMGbfuL5wP6IG2mFjpI9Z2zZXG6M8tHRSzwxcp0vLqTYGs4ifANtadyhLg9MLfE3Jr/BrNEiIwWe1oS9j3rbDzIEGEgW4wyvbs6SvaWxNls9fYC970STtoXM59CFLEEphTdkEDsQ1y3+4KVHuOv5lYR1sds47BpR1I/ZfkIkIkCRI8g4ARNGg4yQIBVjZoODI1ucP5wCbNypEsqC1hwEUwHj43VmUx3msxXmUmXayuFqOMpWnOWt9jTGl4qkzy4S1+p7nyaRCZ84/+QGP1J4k1kzJC1MJBJDCGKtqaiA33/1IU6srdxZd9ge2OA4CNd533bpveV0tUL7PunVLn5skDZ8CmaH4ek2KyMFlheKdCOL6UyNQJm8uTrF1s0sRscECcFYxD3HFvmZ0W8zZTooNC3l821vlq+cvpvp1xXu9XJy2/UjTyYEslig/vAUK5+M+K8f/xqfyb6FBVSCNHY9yfkhBDgOEtBC9KhafTIWWiG6PpklzY1wlMfcBhYGU2ZMUd7kSfcmnVGDGEFGREhgJU7z+/mH+WL2BOvLWcyuICxGnFxY5MeKb7CpTL7QvJfffONRxp+TDJ2uwMYWei+r20Igc1nqhyys8RYTqSYxgsWwRCd2+OLyCYZPC8y6nzxPy0JE0f6MPgpCrEqX4kWHq84Un/5YIvDOAWhNOwxbbQ46Gxyz15kzu4wYKSChQIY6JiQm1AoFxL0QeFNpfrfyCPVvjzH3ZhPWN9+/c/C22sV2CC8nxugeGaE9YdGZEHgljXIVIpRMPq9RW9Xb+fHt8HdbVc22E53fvdb4lRIskyglyFkhllA9TzHiuLXBfz79Aq8UDnHxxDjNwOFQbovHCtcYMxsE2mAzytNRNqEyueRNUI9SnKlOsfqNGQ7+4dXbnYB7nct1HSr3D/FTs1/jkFknLcwdXn6sNR0d8nx3lqmvycTLVW+7xGwbkc/d8WvfaxT+ngtpOo4xKm1urZVgCubtMqOpBsWCT07GSSFIJB7D5pTNKycO8lpznmbkMJeu8NHceR5zqng66XM+5Rf5H77y08x9SZE5t4YqV/a+wwd2kun+ffOsPiX4qftf5yPpi2SkoKZgo5vDaShEGPduOIne7kTpI4tBK43udMnfDHitOcdPZC/jShMLm7TBTs47gY1CU5Qe8dDrALyYOUjHt1kYqvLJ0bM83zrONzaPce3MNMf/fRt54ya6p8i/p3J5QhIemqA5p1kY32TcblCP0qwHeVY6BRovjTFajpBB1FP+MpNBldBX2b4kGvORG1VGXpXEVoHn7z/CXxh7jcezVxgzmowaXUpSkpYW5q5RN7FWyYHUoNB4OiHJXwjG+YPyg7z6tbs4+HsVxK2VpF39/b6PXlVcmCayWEDnMgTTRa7/uE3+cI3xXJNpMyDSBhvtLOs3S2SW/N75ULd/h2ViDBVRI0OIZhv61HyiLYPYFeRsP2FzCEFWOMyJmAljlY+kVohHkq9LwEDgaUVFGZwF3uwc4FJrjKvVYao3h5j7Y8Xcc6eJ+qH3uzPEM0ftKDyduUjJMHpjxCShjvF1xFJk8g/f+DGOPHudeHdRVMhEeyOdSoTwoyghCcTqPYvWv0dPt5fX3aww/I0xvjR1kuNzq8yaHUrSxBIOErljKMaNmHHjIg+mbuyIF2dEyGIs2YwzvNQ+yi+/+AxHftvHurzyHQ0AewnpOOiFeVYfd5g4scY9maUdRaTNOM3FG5McroaIjn+bJ6h3jW7pF7RCdTq4Nyp884sPcOpnX+VBp0bubZMAtmEAjjAZlR1m3Qrj6VHWyXKjOsT/ce0TpC85jL4ZcvzSBnp5DbVtbGEPDW7iQfhpE6Hg+laJbmThGBGtwGGlXCTdBhkqtBDg2omGQE/YiG63f9qzKkZ5PjquItsdpqpNrhjH+F8/Os5nD5/iE7kzpAVYItmp3/HjKOoqYDF2eKV7iC9vnuStt+aYeFZw8HwVbi4n89zet8FN0ldGaQh/YYrFjzkEYxGfefgN/mnpRUoywNOSunLYiHOc96b57ehB/JEC6W02C0mKR46PEg/nkn1bDnpTEfZ43yqF8CMMT7NUL7AWZ5kxW6SFjSPMnfwoJBfW7f9Joquz3Rl+7/L98FaO4bdiJl5fI15a7Z8aWo9tQToFAhbDYWbNRVRPvKipFeeCYf7ZjU8y++8sVKV225juGksGoP0A7fvJfLT34Y2/d09XaXQQUDrf5vLZKb5SuIeJkecpyAinZ3C3DYXSGlskYfGFcIgL3SludUtcrQ+zfnmEkdclx861MG6uo3cb3H7cepk03rCLNqHaSvNy8xASRVOl+I2bjzL6rIW9tIbo7grB90NLd/si26ox/WyJv3n45/jHj/whj7mLjBomKew7DG+sE6Pw5fbd/PrVx2idLZG9CflNxfhGgL28gq5UdwZr9kdRTCaGN9LYdUF3Ncu1rTRIDVpgVkzcLU2YMzCHUsgw2bxG00dovbdpju8GFaMDlRTvfJ+p32wTvTbD508+w68/9BRP3HOZnx1/iQWrTEEKXGHg6ZiV2OBaOMJ/2HiEb1+fx76UonhFcfxsHbG8cbtwtgd7dHs8fHhoguVnXEoPr/NTs6f4eOYcBy2FKxxirQkJmFEbjBlNVqcLfH3hUTIXxxDVBsJ1iCeGaI+4IMDd9PpWANaxQno+qYpi+foQX5m9h9nSC4wbAW4vR3pnVJakaq5FJv9y/Yf45rdPMvESFN6qINY2k3lj/aK5bRtNISCOKZ3T/PLS0yyOn2PUbLISDPHNzaNceXOGuc+HON+6gNo2pjsG10rYV3FSI9BBj275gTRHaIUOQszFMnNfnOaL8QPUHk/x82MvcL9TIyssrJ4UcF0FvOIP87nKAzx36zDeSgZ308Dd1MxfC3GXa7C2iW53+mNwt3NmImkcsJoh+esmzTjPl9bu44upuzHLFkPnYPhUDWqNZNOa5k5BZj/0F3QcJ97uhVVmfnuG/2nlL3HgwWV+YvI0j6WvMGF0sEjEeK5FBf6fpc9y7sVDlM5q5hY9rHIH0Uo4o2p7km2/QvjtZxrHWDWPwjUbu26gbAMRg9nRpLYU6dUuyjEwuklRUgsST6yPffV3YHsycRwTV2oYZwLGb+UZfaXAytQR/uHMAp1JgT8aY450EQKClo29apFdhNmlCHeljrFZSyKw7Zl5e7xHlSWJ0prpbJ1Rs4kUeqdpYDvVkRGanAwomF2CgsafLeIIgXItwryN0GA1QoytJqrd7stnr+MYXW+QvZplJF/kd8zHaD7q8oniWxztTd9wt1u/0WzFgtP+NP/k/I8gvjHE4dc62DfLqFp9byKFP3PBvfPbalN8s8bSl2b518cmMNIRcd0ie93kwJmQ1FtLRF3vO9gKO1raPSfzjoGk7xHvrzkiClFbFdJvCebDCc5eO8HfOLpAbq7OydE1Jt06tTDNm+Uptq6WyN6SFFYU49UYq+ljND3kVgPd6SZJ/z5s5jvXrMD3MdfrDAUxmRWbOGUglMCqd7E2W1CuJER32FEV2p5q2/9pxckHq7YqZF+H+eoYzTen+MXpGf75iCbOxSA1sm3gbkjyNxQHr7cx1+voVgd8HxVFu8ZD91mPWCtUEGKubVEMY3I5B21IRKx6HYle0hQjk0OozeR5io6H6jMd8M519v6OjhOxIM9HVKqkF10yZzOoXAaVtQmzFkiBDBRmvYFs9i6xTpe46/Xlmeo4RnU97NUGQ+dHeT17mOVDBZ4cu85PDb1KSXoYQtNUFlfDSb5eO8FXzp5g+CZYNW+n6ciqeAitkY0OulLtX3SjFarrIde2GD4lsJs5/nTtQb44cw+58Raj2TYZK6Ad2qw3crTXM6QXTYbPRmQvbcJamXhbtrXfreDbF24QJGmR5XWmvmnhXUihLBurFeOsVxFrW8TbaYXdUBrCEB3L5LLZozW/P7kprVG+j94s47Q7TN4sMnKqQHu6wPmRIc6kwAggVVYcvuVhbbURzU4ildhLSsdBknva83zj29a5DdXuItUWst4iZW4b1ST81HGc5Gy256bJ3kjuPbjd3s1ale+jN8pYzRbDV9OU8llUPkWUsUCA0fYwKi2o1tFdj7g3bWPHiPXb2PbWmfwbE1driHYHY1vtTG2PX1doKZMNbxg7l5gKgoS21u9D992gYrRWSUjr+Yh6IymWGBJnl3SfjqJkCsT2c+3XM+1dtKxuMPJtQXq9SGtqjC9OjfO5u+5lqNBGa0G95RJtpUgtGcxcjMleqyNWy8lzlAIpjcQg+gG620eRpm3HoFZH+j6Fap7MYhF/1MUbKlBLF6kaIAMo1hWT6wHW+hZsJGykvk9N/m7rjZMhBCKKMC7dInMz6T7D99GeT/zd9mLPqUT3bITSe7Zf37/Gn06qxHEQIJpNzKVVim85FC07UblXaufNqW2PEW4bsf0caw7oMCCOY8Suyu4dRnXXevZzEOUd0DpZZxRCq40oVxCGxNr2GOP49tC+3vd/kNC+/2fTpnbnxffz0H037LowtCJJe0TcXuN+702tiZtNxOUO7jWDdMpFlIboHBvFLw4jFEw1Y9xyJ2mtrdRQXS/JP77D7+v3enfOfKuNLFdI2zZp00y6UEkuLYIwMdBR1P80wp+x3u3POu5dst/XZ6x1Xwq9eyes2lugjiLY79bZdwsVf3AG9d1gZ7PE+6Ir/L7wZx2ofRxf/66xywh/kGvQUQRRlGj61urY129i71qfBvrE9Xhv2H3m+zhzb0+h31vr7l6iT2rWAwwwwPvGBxzBDNAf7N9c8QEG+AAhTHNfVOsGGODPgug76X+AAQYYYIAdDDzdAQYYYIB9xMDoDjDAAAPsIwZGd4ABBhhgHzEwugMMMMAA+4iB0R1ggAEG2EcMjO4AAwwwwD5iYHQHGGCAAfYRA6M7wAADDLCPGBjdAQYYYIB9xMDoDjDAAAPsIwZGd4ABBhhgHzEwugMMMMAA+4iB0R1ggAEG2EcMjO4AAwwwwD5iYHQHGGCAAfYRA6M7wAADDLCPGBjdAQYYYIB9xMDoDjDAAAPsIwZGd4ABBhhgHzEwugMMMMAA+4jvOYL94/Iv/kBNrfyq+p13HOc6WOt7xzut9c/LOmGw1veD/z+s9c/LOmHg6Q4wwAAD7CsGRneAAf68QryjMzXADzC+Z3rhPWN7MwiJsHp/QmnQCt37F/0DEg3s3rg/KGt6txDiB3vtfx7WByDe5oNo1fv3B2jtu/erkCD4wVznAO+IvTW6QiAMA2HbiJSLyGZQuQxCKUTXh66H9jx0EKLDCB3HH5wBFiJZp20jHBthmugoQnc9dBRBHP/gXRDbEAJhWgjLRJgmGMbtg6c0Wutk/XGcXHZSJF/fr+e9vQ8cB2Hbydd669FRhA6jD/a57l5fOo3IpFD5NHHWSV6OFDKIER0f0fXRng++j+p66Cjc/3ULsePACNtGpFMI10k+9yBMzpQfoIMgebYq3t/1vRO2120YIAVCCJASVLJX79inP2hnbDf22GnYO6PbMwQyk0IUC8SjBdozaboliVCQ2YhwNzxktYVsd9HdbmJ8g2D/H7o0kCkXOVREjRQIii6xa4DWuKstjHo7OWhdL1lfFP1gbIxdl5rM59C5DCqbQqUtlCXRUiSeD2B4MUbTQ7a85GD6AbrTQft+/97L7ks3nYKhAv7sEEJrjG6EUWkjqg10u43yfNAfgHEQAuk4yOES8WSJzkSazqhBd0wQFDVagIzB6AqsJrhVTaoc4W50MdaqqEq1t/Y+Xxq7o0Uj2a8il0UXcwRjGVpTNsoUOA1FasPHXK8jqnVUq40OPmBHQRpI20KkUohMGp1ywDRQaRttSIQGEcZIP0S0uuh6A9Xp9PeMvVM0sw2tkte2/93+MSne9m3v3xHbQ6MrEa6DGCrizw1TPeZQP66JSwEoQeeKTeGaQWbFxKrYiJYF7U7ys/tpeIVAZtKI8RE6R0ZozJl0RwTK0SgLCpeGyKznsOshZrWDrLfQzdYH5+XsXrphINNpmB6nM1ekM2biDQv8IU2UAuUotK1BaGTbwt1Mk13W5G752BstxCa7PPg9Nni7vEeZz6HGhmgdyrP2WBIC2zWXocsp8udNxHKACIId53w/IWwbOVyie9ckW/c4NI5G5KZqHBqqMpOukZIBUmi6sc2al+NqZYTNxQK5K3mGz7mkzmp0eSvxKOmTcZPGzmFPIjEHMVQgnCjQOpCiMS/x7u2AFqiqTeFCmpHTArvrI9odPrAdKg2k6yCHisRjQ3Sn0nRGTYK8QFkQ5CF2ktUZvsCuQ3pDkb/Wxbq+jqrWUL7ftzMmDOO7G12tgO3XDIQhE48cvjP92Ise30/UuDdGd8fDsYhLWWpHHCqPhXzinrMcT6+zGhT4yvAC5UKRIO+SXTZxN21kzUKIJNzQStO3Tbx7qbaNmBqn+uAIlbsF+lCHUqFNN7DQQM0o0p6ycGoWmXWH9Goaa9VGbG6hW2Ff1/Y9IRODq47OsvyxAt4DHQ5PrPDE8HWOuOvkZJeM9MlJDxtFQzushEP8n5c/TuXzw4y1Q+RWHwsvQiJME5nPEc+MUjueoXyf4KPPvMmR9AanGzO8/NJx7EYOt1yFVrt/a3kn9J5hcGictccceKDBJw5c5d7sEtNWlZzsYomYCaNNWmgUsDlj88rRg/z28YdZGZpmrjGC0W6j4vbeO+q9Ay6kAMNIzlQmA4UswUSBxqEU1QUwjzX42aNvMGnVONeZ4o+MB8gtWdjXBTr+gLxcaWBkM+iD06w/XGTrsYgP3XORp4tXmLc2ARg22uRkSKwFHW1yIxzhK7WTfOWNu5n6+jzF526gy5W9d256aQ4M486vb3ut299myMQ+5LK30yBxnKRBlE7SNkGYfD3Y9lvefT59zzxdYZkwNszmQ3laP9Tmv7vnG3w8c4GiBE9r7kkv8u9zj3NpfJzmsk1qPUdmPUPuegZjyUBXqv31HiDxctNpvLki1QWJOthhuJAc/k7HQdxIMXQRwiwoG1qTBpHjkhNF7E4X1e58YCGxkc/iP3iE6z9p8k8+8Zs85i4CYAiwgLQ0sDCwhEFCSonp2CvkFj7HL2z9DG41Q7Geg0432Th7vUQpwLIg5dKZSrF1t2DhkRv8WOkNFuxNHktf5dqJYZpnR0ldTkFl/4kzwjJhtMTWSRceaPAzx17licxlMiLA0xaetsiIAAtNRkgcYTIiYdy4yMR8nX8Wf4LGhTFKixnoeslB3Ov9KmTyn9JgCoQh0ZaJsg0iF6K84tjIFgedTRbsVdLS5/O5u4mtXu78AwkfBDLlEi/MceUvZfiJD3+bvzXyLJOGjSUMJAKFRiEAG4lEoThubfCIu8IzH7nEPzB/ksKpDKJaQ8eSvtiB7SgPvvM5CQmxQhgGOpsGQ0IUI6IYoXViaKP4tqHuebpabe/j73+9e2J0peOg7j7Mjc9kueeZy/yd6a9xj9UhL1MANJTHmNnk3qFlUmbI2niOWitFo2OzVstQuJhl7NVRjFvr6HojyaHudRGrl3PW0+Ns3mvjT4Wk7Ii2b9NpO9iXUpTOxxihpjlvwt1NDpSqVL0U18+NcOCrszjPN1Gdzt6s512s2ygWqf7ocdo/Xed37vs17rLAEikUiqYKSIIjgSW2jW6CLA5Pu1X+x0e/wD+p/AR2s0Sm6yd5an8PLw/ZyzMX8oTjBZozJsahJh8eucSCvUlJQtFu8t8e+Sr//Uc+i1ufIFdrENf2MV0jBDKXpfzkGPGP1PibR1/kkdQ1pFBcC8Z4vnGML1++i6hjcuTgOp+dfJ0HUzcYlT4AU2aVH58+w7/98Ecpnisgmy2U0re9nff7Pra9MUhCV0DEEu0HiEYbRwhybgG/ZHFlbITmaIq2tpm2qjxz5AovH7mH3JUibJYhit7fWt7t0k2L+N4jrPy9iP/rnl/nQ26ZvExj9N5PqGN8HVJRERJ2LjRLGJQkPOne5OcffolvHn6S9EYZEUXJc93LApZWd/pLu3+vEAgpkNkM7aePcuszCjMdEdVs3DWTzIomu9LL61da6HYHYpUYaal20hJJ4frPPlfv2+gK00TfdZhrfyHLxz5yir8y8jzHLZ+sdDCEJNaKEM1GlONsfZK1Zo5ISXJpn+nxDe4rLrH5VI4XPnkQ+3OHGf3WFmK9jG53UEG4t56lFETDyUVgVkyCRhajIyiswNAlH6vi0Z3OEB7r8rPHXueh9HUUkt8dephXOyc49EYG9tnoSseh9cxRNj7l87+f+DyHzAhLJFX2UMd0tCbUIGWMJQxiLXY2uyEkaWxOOsuYUx28Ypq0YyW3+V4vVAh0GGK0fVJbKRpLGc7MTnOfe4uiUyPWmmGjxeR4jdbUBLnhItQb+xY5CMNAzU2w+aGQn547z3FnhS2V4dnmAn946V5Sz2eZvhKCgMrsDP9sYZqJuzZ4cuw6B5wKAM3YxR3uUr2nwHCliYii7zzM7xW65zXv/lIUoTsdhO8jPI90GDGqhllN5fj6+AL3zd6kKD0+XLzInx5aoDudIXPdJQ6Cfb3MjPFRLv1ll7+78AUedDZISweFJtIhTRXwul/kN8sf4dT6NAeHtvjo8CUeTF1n1uxgASGCgtGlftAi84YL7S6whzWe7+P3CNsmWjhA/F+V+d8OfgOA5xvHeHb5EPVsEYSJ4TuIMEY02zsMjJ3i27vA+zO6QmCMDHPjRws8/cyZOwyuRBDqmJbyedEb5xdvPMPK9RGMjkQ5Gm+ky/HSBg+lr3O0UOazpRz/wPkJVlPjjL/sIG+tI+sNlLd3h1IYBiLS2M1tGpXA3dIUr3jYi1sgBN178/zQ0TP8aO5NZswuoYZa6Rwvzh/a/9BNCMTsFMsfkfz8Pd/mQWcFRyRhZKhjrkXwXOcuZq0tjlplLBGRFXfmrSSCgvSYH6mwPpxFZ1MJzWyvoTX4PrLRIbOaonjR5bn0ApOPNViwn8cVglGjzd2lVf50fBxVzCCk2J9HKgTCcVh/NM8TCxd4KnsJW8S81DrKH166l/Q3s0y8WEOW62BIUrcy5JZy1K5N8AdzYzDpM1RoM52rc3xsgzMPH6RwaQij271Nf9qLQvDbfn6H8hfHoBRCCFKLFoXREuuPZskLn5KMWLBXmZrdoj05TtZ1k8tsnyAMA+/4BCfuvcVD7g0yQhJrTUf73IwMfmXrI3z1jx+hdDYm72muzZd44+55HjpxnR8fPcW0VcXTFpUoQ5gDHDtJqcRy/1J5QiLzOa59Ks2/PvobnLDqdDR42uKUO40XgVtR2JttxMomqtn67qmKvqYXeqGQTLm0HzxA5qlN/vLItzhieaRlYhR8HVFRAd/2pvhH5z5N9MoQI6saEYNfkjQMl2hWMmy0OGRZTJkt/suDz/GPHvxx3EqGoXYR4Sf8yD258Xp5GaPlYzddRAR2W5NZ9rEXt9DNFiKfo/yI4qeHX07ei7AJiZkw6zjpMAkp9hHCtqk+PMb0iXU+mjtHsVdR7eiAm5Hgl8ofYqVb4OnSFSbMOqEOUELztnIBtlBkLJ8gD1HOwXp7QeH9QqtePh5otrBWTIqGJMw6XD05QlwCSwhyImLOrRDlNLFr7njkfcU2p3lyjNpjAT9UOs+EWedaMMZXVhcwT2UZPdVGLK2jOt2kgNVJvEqrkSOz7tCaTlGdcmnOOXzs4GUqd61TuXuCkc4oYg1otfvDCOl5v1ol+Ub8ANnxMD1F1vYpGSE5aVCQPkNulyVXQD8u1HdC7zKrHrP5aOk6JcMDBB0dcjOy+LWtp/jylx5m/kstzKUtMCRWcxSEy+vuAXKWzz3ZZUJtUAkzRBmNzqSSYlas0PvE5RaWiRob4sCTSzxoN0lLB0sFSBSVVprMsiCz3EVu1ojrzaTQ93b0vZDWI2rL0hBrj1n8lQOnWLCr5GQS9m4bhS+37uOXzj5F7qsZxq56mDUfbUk602n8IQMvsnBFhCNsTAwedG8xMVWlOzJOPutgmGbPfd+bzaxjhdHsYjdzmJ7AqYRYa3V0swVCEJey/Mhjp7nPblCQLhIBvRvPb9voINiTdXy/MEpDbDwK/8XkOWaNFpYwUSgqccwfNx7ipbWD5F0PX1mE2iDUmlDHybpJ0gsKjacNlJYoC5QtkyLBXkLvotCopFziSElmYoR6kMIV4AgTVwSUzBbKVmhznwppQiLzWdoLo3x04RwnnWVsFBe9SVZuDjN9KcZa3Er4rbG67VUCth9gNjLYjSxG16TmponnBU+MXed37x4nvZEn4/mIIEzykP24k7fTDlokXrUQhGnJfG6LnJC4wsQVPlJotCT5bPfwzHxPCIlIuXQmYcxqYKDxtGY9tvjjxv380Zl7mX8+wri4iPJ9RDqF2fRxag6ybPPW8CTd2MIUilqQIrYhKqYwt9yEUtivgtod7yEprjeOFfhr03+yE6Ur4HT7AMHNLGPXQszVKqre2BNmxbs3uttJ55RLNDlE5sEyT2QukxOyl2OMuRBm+K2tx/nia/cy8U1J6Vsr6FYHVIywbTJ6lMaBHPXAZTu7aAiJK2Lytk/bIgn/9xg6jqHTxezEaAFWzUN0PDQgclk2H87zL8a+zpBM7eSjVe+AOjcctO/v+ZreEdKge88MR+5d4unsRXK959HRMdeiAl9dX6Dt2Uzn6qRlQIyko8HSISExFgZGL8WzFmfxYhOhQBviO6kzewEVo7VAK42kg3RdrLbCMSLS0ugZ3QhLxDsNHPsBYZmIQp6tu03+TulNxo0udWVxuT2Gs2aRXmmjW+2d4q3QItmRngdRhPR8UmGMMou0ZwzakcO92SXyh2s0rpZIrWeR9VbCZug3pESnHLpjggeyt0hLCxMDY/t5Cm7zS/uNnh0QjkPsgtKCmrLZ1Abf6h7m96/fR/E1h/SlFVS3C1IiLIs4ZaEFmC1JeS1Pq+vg2iFCaLSp8UsO1rKDbhkIGfX/7hASUcxTvlfymHsDS6SJtWIzlnxj+SiFS4L0tSqqUus1xbz/C+C9e7qpFK35DJ+de5lZo0WMoBJHnA3G+PW1Jzn9wlGO/lEX89xN4lZ7p9ND2BbGlotTz9LyHWIEsVY9IyeItESGIL0I7Qd7m0dVMdoPkFHyO0WQtPsK1yWYKWF9ZpODpntH2FtTEc9tHWH0VLRTVd4PyEya5Q9Z/O2JM0wbLUDg6ZjNWPJ6d57Vap7hfJuZdI2c0aWjHBa1ZE1EGGhyMiAtYkIEN4JRYp10BqJ00o7ZD2yHw1GE7tHS7i8ukRY2hpBIIYiRiFggQ9X/HHmv1TsaydG5y+N+Z4WSYdDUmvVuDrsBRrPH5lC7DpPSydeCpIlDCoFTSWG2M3ixiStCTo6u8fL0EH7JIb3mJHnIPtO4hWkS5V3aM4oHUzcwdyWSJImnq01jH3PlMvEUfbjqjSGF5pY/zFeWF/BPDzFz1kNX68m32hY6lyEsWCgTrCaIWzZxyqKeU8ihAG1AmJFo29q3y0MYBnEpi3l3g4ldvsgZf4rGpSHmL/lQrqC73T1rr35PRlfYFrpUoHKX5MPZ87gCKgq+1Z3nF288Q/1PJzjyxSpcuUXc6SSHUQigt+ggxPQ0lY5DTaWI8Ih0TEebtAMbw9eIIEKrvQ8tdBAguxFCk/DuLAs1lKW64PL/nvhXWCK9872+jjgdjHD22jQnTq8T7WNFWMxOUniwzFFnDUnCdW4qyeVwlFdq85imYjpbZ8Kp04xTrIcFVoMCa14eL7IYdVs8lLsBwOXuOH5kggIZ97QZ+iVCo3ViwHob9IdzZ3dobEprWrGLCEWP49pfCMNAuC7eeIoPHz+/wxuFkDA2kCGIMEqeB+ziXcYQJ9xjze09Y7XBiy0MoXgwf4sXJg/jDVuksilE3d67+sN3fTMSbIugaGOMesybAYbIEGtFW0naUY+na5nv3OraD2iNUxP88dW7sawFmuUM6as2Y+cj7KVaomNiWQjXRaWT9KPV0Ri9TJ2W4A0btOYc7IZEaHVnF1ifIWyLoOAyX1oh20uPKjRfrZ4kd11iL9cTPZY93K/v2ugKw0Ck03Tm8pj31yhKnxg45U/xb65/mNbXx5n9Wg0uXr+zpW/3ZowirJbCr6RYDIdp2tcBuBzMsFnJMV7XCC+4fRj2EDoIMVcqYBoQK7Rr05nLU3kq4Jjl7nxfqGNW44B/evVHGX3WQq1v7vlavit62gDrHxrmZOksAJ6WdLTgajjMV2p3c3p5Gq3hVmOIjBlQ9dO8tTyJfS5NdjF5ZlfHBV+/+ziTYzXSVkit62J6IL1eV00/LxCt0J6PWw6YMptABoCmVlSiDIYvkN4+cEkNAwpZGnMmj+Wv4QgT1Utnpa2AchpU1knaQ+E76D9aSXbIIBKiNBzPrTNqNkhLn2yxS5ix0bZ5+3f0C1ohTBPlCIRUFHoFa0NIVuIcYWygTVCO1ZfU3Dsuq9Nl+K2AdjlLejNivBFiNtvIehtdb0IYgmGgO0khyrUMzI4FGmQQoy2JETgoW2LXNSLWO0Z3Lw3dO0FYJlHaYNRt7dRCFIoXbh1kbClCNFqo7XrFHuHdGV0hErpRMUfjgMl94yu4IibUcKo9R/nMGAfe8BHXloi/260vkg4bXAdtgggEV7xxVtxbeNrgG/UFrGsu6Y2e2EwfwnkdhajyVqJ+lXJRYwVqh0w+dfL0HWmF1bjL31/8DNWvTTL7whpqn4powrQQczPoT1d4JH8TT1tci0psRVm+1TzC164cx301gxbgd7KckmOgYXIlJnutityoJps8n6F1qcjWXRMYH1lNtlO8nbbpc25aa3QQYtQ9Osok7m3Yc8Ewf3TjbvJXwag06Guypteajm0R5GDCqmMIiQEYJPoKACgQvfzkHbQvaewIzVAq0pnJ0JlU3JNZ4pBZYTEqMJJtUy4WiTM25l4XJ3dD651QPnIkU6UGktuNB8+2TnLz6hgTSwrpBWi7V/Ttd2SmFbrbJX12lfRlC91sJ6JKUUS8W6WvB9HpIJstnHQqYVkIgXYsivUUdjOLX5QoU6CdRD1PSIGO+vgepIEYKtKYMzmc3tw5/7HWBGtp7EaYFFf3GO/S6Ca53Hg4S3sGHi7cwBLgaUHB7KINfcf3vj0LLkwLkcslCmTjBmLII2d4rEU5mirFppfFqQrMZpJP60sIqnVSSY0V0jDQlkGUASkUoU4aDFajFr9cfYw3XjzGgdd82Nzal3yusGyMiTGWPjXGXzv8eWbtLbaiLG+05nh2+RD+2SIjb2pyN1uIsLceBSKOkVsNVKNJHATJhg1D0osWjQMF6l2Xbtcm39bIjp/c3P2GFCDhUjjGXXaZm1HAs837aS7nmdmI0e1O/5+pYaBSFmFBUzJaO8yOGEG5k8GuJ8/jOyIqIRJ2TjYDY8M07yqx/ojk8YcvcNRewxWKCbOJKRUiBhGpvtMJhSFRpRyNeclnxy/vfL0cd/nG2jHSiyapDR/RaCfPdR8YDFppiBWq3kAIgfL97ynbqeMY1fUQUZQ4bykXUg5h0UUbgIYwIxJPdzcPtk+QrkM4NUTjiOLe1K2d2tLNKAIBYdbESbuIZnNPm4neldFNVKRswqxFUIqZsqrkelX+ObuMGg3ojtq4hTwyim5L4NFTdyoWUFOjVBeyVE9o7p9bYs4uYwhFjOBKZQS3opFtH7Y/vH5A77qBlcbw4UpzlPq4R1pY/N9bT/PFm3eRWRTYle5OUaifEKaJMVJi68Mz5D65xpPpy4Ta4EYwwour80SvDTHxRkTmWg1R7ZHfDSNJk0Qxutm6fVHt2vCxA+3lPHbZILsUIlqdhBbV1zeTpEiC4TShNvF0xIvdg1xqjWFVJHbdT4qkfV2DTIpbUiIiqMVpFHU6OuJGOMz6SpHpDZWwV3YEl3o/ui1PWcjTPliksmCQuqvKp0feZMLoUJSSldCm1k1heEmY3NdnKg1ELkd7Lk/7eMAD6Zs7L/1+6y6WLowzdSnGXUzojzqMEi9R7594/E4jB7zz39ymFkJysQE67dCcdQhygjgFuUWF6Ph9v5CFlQhf1Y6mSM01yEgfRcJx/5POcRAQOwJt7T3v+V3FRMKQYJooRyLzIUXZwREmOSmYtbaYGa/SmJOEs8OIXDbR1DQthJ14cOHCNOtPFNh4Kube+6/zE2NvMG+VsUTMZpSnc7lIdjVENDvfWVHeawiJcGyUbSAiuF4eZikyWY8Dvrl6hOZaDruhEX54u+WvjxCmSTw1zPrHIv76/LPMmiGjRpdRs0nXt3EqkFrvIuottOejwxCEQGXTYFtgJ89ZuE4iIO86KNtEBpC/YDD8Vkz6ViM5lH19rj1x+HyO1pRNTnbxtWI9LFAPUphdgfTjfXmmqOTzs5qCa8EYTRWwHite6xzEXrNIlYPkWUa7vDPRU/hyXaKxPI05k86RgKemrzNvbZKTAikEZ7xZtso53FrPSPRR70C6DmpmjOpRk0MHNhg1kks31DG/fOVJSm8KsldbUKnfySXvpST6WpiS4rY6l5WkBL7n39MJa0UYBuSztOey1I9C/biiO66x6xHC63NqRAhksYB/oER7UmAZMYvh8I5GxKnmAYh7im/bQwH2EO+Jp6uFwE0F5KWHIQRpLEaNLvcPL/FHB0epr6QpNUoIQAQh2Bbd4+NsPGAT3N/m00cu8IniGY5aZQDqyuGF6hGKl8BZaaFbrdsHoU8QhkxUsQyB4Wu8zRQ3omHe6s6wUc5jNgxMvyfpJnuE834RtXuGyhtN86G7LvKh1A2ywsaSEbPWFqVcm1o+R5izMSwzmRqQcgknCnSmXKxWDmc9g1FvJxGCFAntRkBuKcbdCrE3WrCxhep6fX6uiXxiPF6keUAyZjRZiw2asUsntBI6YLQPBlcn1DXZ8nC3NGeaM6xkLrASFfj21jxuWWDWex2Pu7wvhEQ6Dgzlac2maByCI3Pr3J+91WvkkdRVzJ9UFnCuO6TXfESzjQr7ZHSFQGQydKYztGcUzxTWyYgQMDkfhrRPlzhwsYuxtoVq9xo8tEouDqGhx53ux94VUtyWn7RM6FqJZGcQfu8mAiERmTTegSHKd5uoo23mRmrcvDSB2Y52DTbozz4RpgVDefySibKg0UzxemuOT6avcTNKcaUxguH1aI27Pfg9wrsyulpriCJErNFa4IoIiYkUkBMB92UWeWH6IM25EdIbWVJhhPB8VCHD5r02xhNV/saRb/OxzHlmzAgDQUUpAgy+deUgh674yHKVeLuI1k8VecsCy0QLgeGD1TC46E3ypZUT6I6JiEEZ7ErqBwmdqB95MiHBcfALBo/kbzJqmDjCxBCCCaPN46M3+IO7ipQ9lyF3HLMb0x2xqB8y6E4orIZFdjFPdiWNu9FN0jNKY3RC8ue7iGoj6bjy/P4KsW97ucU8ndkMnQMRaRmyEhVoRC7dIKlaa0MgtwWl+5h31GEEnS6pLcW56jjXhkdYDIa5uVUiW9eIbrAr3JU7ZH/GhukcKVE9bpA6UuOx4RtMW5WdRp5T/hivXDrI5AWFs1RDbfPQ+wEhoZBNaiATXeZSZSyh8HXI5xr3M3xWYy9Vks93OyW3PSLHFOgoQhD3Qfs30acVKRc1UiDOOhjtALllJ5Mgutsph/g7fk6mXOLpEcr3JJrGj04tcV9+kV88PYnRDRN2Tb/SCz0ZSpVzUWZy9nXN5kx1ipulFM91jrHRyGK2BYbXk3PcY7w7TzeO0WGI1YroltNsxFnuooNEJgbCqjFXqHJ6uIQ3bGLX00jPpjuVoXNfl//m6It8Knu2x5d08XUIJNJ6+ddcnBvLqEazt3n6nFowTbRjoQ2B0ImBvdQep+3bYCnilMQblgSjGdxqPlGUCiN0n+ZPibSLXxCMWzUcYSUtvFozKgU/VjyFc0/EV0sL3Do0hNE2UdMejxy6yZHMJm/Vpzh9bQb/rMOwhpQXIUIfWa4n42X2aR6d6B3CeDhHa9IgN1mjqWxqcZpKkMEPTbAgTpkYttVfEn8vf6g9D3fT58rqEOemp6lGafyORSHUO2vWhoE0BSKVguEirbtH2bwvkfd8cvo6x9xVXJHk9StxzG+sP0HxVZvC+QpsVpLL7G259L2CMAxULo03IpgoNRg3k2aDmor4vev3M351FzULboufp1NJcW93HnXPFtULvU0Tkc3QPpCjM2pgei7ptRTOagpZrqGbTVTAbcO7PexgeIjqiRzNBzx+5shpjrmrKCTpFYms90ZK9fF5knJBSmSoMbtgdCSbzSxn/WnW/AJexybTAtOLd3RzP7AZaVppdNfDWm9QemOM33ngUe6Z/DJpaeBpjYEma/molCK2DaKMBWmL6lGLHzvxBh/LXGDSsElLe4dG1FQWv7n8KBMvNlDlyh3Ft77CkAlzIWUQ5AVyJhEzL6a7WBMx4YhB3SogYoeRYBQ7ilDVWl+6jraFk7tjgmHjNl9QIshKhwecNoeGX+TThVMsHh3GEjHDRotZM8ntvZaexouf5trqAbQpEFoj2l3iSnV/W5dFktuLUxZhRjBXrCWslChPJ7IAiN1eVTiThnqzv+vp8YWttTrZ85OcW5hEaYEODCJXEJUyWMEQRhCCYxNOFqkeS1F+NOauhRvcX1xi2qmSkQGuCIkR/IfGA5z5ynEOfnMLltdQ7W5fQ2GkQDsGcQqKbhdbxMQIbkZpuqeHMMoryd+X8nYB0LETWmaztWsqy95CWGZywY4WqB41ac8otKmx6jbZxRGKV/K4l9ehVu+J/yd5XFks0D4xRvkBzQ8tXOQT+TMMyy6/WH6G0vkw6f4K9rgTdfe6HSdJz0UKq60w2xLpg5SKSpwhRqIiieGD9OO+5Orfnaer4uQQr24w/nXFa+JefvsXlvlI+iIxBrU4jSE0Ih3hDZtYbRNlCRoLEZ8qnGbGTIRPtlFTEb9Ze5rWv52mcP0ycbuzL5NMk2R/YnT9gqQ9rZkfq5CzPH5u5tt4OhGQeXboKG8YBzE8l9FmCaq1vq1JuSZhTlOUXQxxm/huAAWRIisUk4bmYWcDoEdvc/B0xJjRxI9NzK7AbMeIWpN4s9zX4s53QIiEJqYTD0VoKHcyXPAnCbVBJ7KJY4mMQEQ6Cdv6fblqnYTXWzVG3hzmpXsPMj1Ww8iENOdNYjeD2UmjTIgygsZCyMnjN/iFiVfIG4mWgkSRkT5bcZZfWfkQK796iEPfXCZeWduXqcbCNImtpPGiG1k0VIqVqMCzzeMMndfoTjf5PtsCy0ZkUui0i+h4SWS2nePdwzVuG3eGh6gfzRI+0eTnj73GuFVnKSjxenWW81enKL0yy/izKeT1RXSskPks3n0HWPy4wUcef4v/dPQF5s0Wr3hT/MnvPMLc+SXing5GXyANxNQ4wUQBf9jCKxgERUGUVYy6PtUwQ9bwIZA4NYVR7ex5Nxq8h0KajiJ0u4NYXGHiD9r8RvSj/NKnnuLe8RVylsdqJw9KENsQ5CTtScFDd1/hsFXF3SW+vR53+eXqo3z+t55k7sUbxI3W/ni4O29EgZT4BYlxsMmnJ87wscwFSjKiokyayiY9GtA86nJra4ahizZGvw6XVogo4Xw2lUuso++QPdw2wGAQa4UUyciTUCte785z6+wks6+HuOeWiDbKH8gY7m1NBxFrzLamXMtizGk6KtlmoW+S9sDw1b5dCDqOUZ0O6bdWKLwwx+JjJeamt8hPrwFgipipVIN7s4s85N5g1gwJei3XK3GOq8E4z1aP8cKZo8x8WTL60nXifszxeifEMWa9S3rV5dqtMb5g3cN4qsH56gRWRyXGTwhwnWSMfNpGxBoZhP3xwHvsDplJ050rsvkw/O0Tz/Gf5N7qdcldpzP8LZbmTb782N38yseeoPCFB8jf8tk66VJ/1OMnT77MZ4ZeZ1h2+Wr7CP/45U9x/EtV4tX1/lHFpIExXKL68BibDwpUWqFFDK6iNNrgibHrzLtlXmkcJHvVIn+t3WODhHt+lt4bCU3F6EChKjXG/ugq/rVZrk4v0BkXKBuyHlitZLpuUNTMpGt42qCpAjytuRZl+a2tH+bLz9/P0a83UFv7uIl7XXUim8Ebc2nPaO6fWuGos0ZJRuSkgUGEK2LGzAbDbpvrdkId6ZdegI5jZMfH3RKc8g7whHsZ4/tg83VUyH9sHebffOGTzH8pwDlzi7ha/UAM7s6omTDEaPo49RRqy+Hr5QXGUw0q3TTKS7absgTCsvZPIyBOeMwjb3YIihlW3AJTB+o8WrzOCWeZWbPOqKEpSBcwKMddKsrl1zae4rm3jlN40+LwGQ/nwi3ireq+ToXWUYRc3WL0dQvDy3Ju/RBvpjW565LJlWaSfigV8Cdy+EPJ802VA4zNGBVFfSlIC9NE5zK0xy1m713hQ+lLFKTdK/4m00oKMubI0Bl+8vFTfOu+Ob5ZW2DBCDmSXqdodLjgT/FWe5ovXDrJ2FdsxNKNhAGy1891O4+cShEfnGDzITj8wCKH82VClbgxM26VOadMJcqy6WVxyxqz3oWwP2mO98781Tppqa3UcM5onMUcxaEMcSbJ3YlQoWwDLW2+cnCBA06FcavOeljgT8rHOf/GHDNfV8gbq8T91gLYjV7VVVsmYVoSpzS2jHFFuKPZFJPkmmtxmvVOLumSq3uofnrilRql88P86uUn+Pj95zli0ZPtu22YEqlJTUcHXAkNfq/2OP/h608y/wUf+/wSqlbfVyW03RBSJNQ600xoeIHGrhgsNws0Q4fyZh57w8RqaWTUnyLJd0VPgEf5PtatMmOvmmxGGV7yDlGbT9EZdlDpGyhqgMdmLPha+yS/evVxvJeHmX0rInOtglivENfq+2pwoVdH8TyMcoPCNRMjdAhTAqeRGFPt2EnXXd4gtgVWR2F0InSr3SsC9aerU0Qxpq9p+dsiMXeeje2BlFNGwOOpm4Ta5HR7ltcbB2iFDkvNIpuLQwydNhg6U03mjvXhfG2nQmQhT5ixiIdDCk6XktXGlSGesmjFDi83D7HSKXD64gEO3goQ9RYqCPuSD39/7Ra9yqhqNJC+j9lsY7rOjiybNg1KukBF5/hXi59AuQrpSdwNyeSlmMyF3uiL/R6DE8fJdIC1gPzlFC+kj9KJLD49eoZQG6yHBa52Rji/NUHlcomxGyppSuhXU4HWqFab9KUyrT+a4OeN/4y/d+yrnHBWyYgIS0Csoa4sLgQTfLV6km9cOkbuNZeDp7pYF5YTg7vPBmEHu0ZcCykhiEiVA4qXHCrOMM0A8psCt6JIbcXY5U4Stu3X596b3qqqNVKXJZPtEu2bLsvT8/zqxBz/bjTEzfukXZ/KWoHUTYvCVcXo1Sbmeq1HgfL2R8/gu6HnqdtLUKym0E4v0VRNeNkSSK1IHMdE+hFGubfePl3AOo7RnS7ZG21uvjbCr008xc8Nv8i82SUrLIxemqmjQlZiwbe6h/mNpce4sTSCaJkYHYldE4ysawrXfMR6BRX1x8sFdsZTWVsd3BslXhXzXCyOYZsxfmTQbrmotokIJKOvStzFTXS73beC3vvvceuNalFKJ7SqdidpPAAwDJwwYqyRJX8zndCz4hi74WFuNGBjq//0sLdDxehIoBpN7FsGo1EJp57i/PJR3pidR3QlZktiNwR2TTO9GpO52kDVG301EjqM0Otlxp6TbKox/v4jf5GRAzVKqQ6GVHRCm3IrQ3sjQ+a6ycyFiOzZVdhKeKIfmMGFnctXhCG62+1NXQgZqqexW1msZozVDJCdIGm77XQTYZT98sq36WNdD9Y3MesNiotZCoUsYSlNULQIMxliK8vB9RBnrYLcTJ5rHASJ0esnb/x7rl312rtVMhm4KhEiiSh2mCktA6PexDTNZK2dbvJav6Q7w8STNhc3mXzR4Qvph7nx6DAfG7nAIXuDjPRpqhTnutN8c/Mol65Nkj9nMb6hsLoaw4swuzFmzceoNFDNVn/2Qi/K0UEAjSbCD5h8IUPrhk2QdwgMMCIY6uqe3KQm/9YmrG/uYqX8oHm60KtWx2itbo+O3paWExLh+chGk8yKfftHooQ/qfeLHvb2JfeKgToIMBsthldzFC/mCUouZsvDaPjIjpd01nS7uyYT9/HQqRjV7iBvLTPWbDN0aYLGfImt9DBagOHDUFMxuRlgrW5CuZJ4M/tQQf++1x+AiBV0vSSsK5vk1tPJMwyjxJvpKU/tuxHr7VPlxeD7iFYbUd7CWrSxLBOkkeRHO110EBDt1v74IJ/t9oUR9yQ5e0g4zu+kcdDn/aCS4qSOIlKnFAfbU9xcOcS/nDpInIvB0IhAYlUkuVtw6FqAc3M1kWsNw509QE+fpa9nS+36zDsdnG97pAp5dNq9rSmtFMIP0UGQRIxh1Ne6yN6pOfQ2NdxpR3UU7hxCpEAIkXS2fZDeA/Tob3EiQVmtwk2Btf0Su4Zh76thUCjfR62tIzfLFF+Wd7y2fanFH7SBfSeoeKd5ZGeFjf2bTPt9o0cl01GUjOX5QceOJvVtQ/AB+Cp3Qmu07xOvbyA3Npl8xU7O+LaucBgmz7h3xu8wYW/XZtiP/bz9mTebqFZr///+LvR/dOj2CJeYnflTOzqbP0jG4wdhLbvWsK8c2wEGeD/oGeDv+wR90GftA/77+zOveZfh/aDf8AADDDDABwnRj5E4AwwwwAADfHfs4wS7AQYYYIABBkZ3gAEGGGAfMTC6AwwwwAD7iIHRHWCAAQbYRwyM7gADDDDAPmJgdAcYYIAB9hH/H37YptEikQmZAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "# test function: generate_sample\n", - "sample_label = Tensor([i for i in range(0,2)]*16, dtype=mstype.int32)\n", - "# test function: generate_sample\n", - "generated_sample = cvae.generate_sample(sample_label, 32, IMAGE_SHAPE)\n", - "# test function: reconstruct_sample\n", - "print('The shape of the generated sample is ', generated_sample.shape)\n", - "plot_image(generated_sample,4,8)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在条件为`(0,1)`特征采样中,生成的图片有的看起来像其他的数字,说明图像在特征分布中,其他数字的部分特征与`(0,1)`的特征出现了交叉,而随机采样正好采样到了这些交叉特征,导致`(0,1)`图片出现了其他数字的特征。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.0.1", - "language": "python", - "name": "mindspore-1.0.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} \ No newline at end of file diff --git a/tutorials/notebook/apply_deep_probability_programming/apply_deep_probability_programming_toolbox.ipynb b/tutorials/notebook/apply_deep_probability_programming/apply_deep_probability_programming_toolbox.ipynb deleted file mode 100644 index 902691b73eb6628e3bf765a8dfb4a6b162b13cbe..0000000000000000000000000000000000000000 --- a/tutorials/notebook/apply_deep_probability_programming/apply_deep_probability_programming_toolbox.ipynb +++ /dev/null @@ -1,365 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用不确定性估计工具箱\n", - "\n", - "## 概述\n", - "\n", - "贝叶斯神经网络的优势之一就是可以获取不确定性,MDP在上层提供了不确定性估计的工具箱,用户可以很方便地使用该工具箱计算不确定性。不确定性意味着深度学习模型对预测结果的不确定程度。目前,大多数深度学习算法只能给出预测结果,而不能判断预测结果的可靠性。不确定性主要有两种类型:偶然不确定性和认知不确定性。 \n", - "\n", - "- 偶然不确定性(Aleatoric Uncertainty):描述数据中的内在噪声,即无法避免的误差,这个现象不能通过增加采样数据来削弱。\n", - "- 认知不确定性(Epistemic Uncertainty):模型自身对输入数据的估计可能因为训练不佳、训练数据不够等原因而不准确,可以通过增加训练数据等方式来缓解。\n", - "\n", - "不确定性估计工具箱,适用于主流的深度学习模型,如回归、分类等。在推理阶段,利用不确定性估计工具箱,开发人员只需通过训练模型和训练数据集,指定需要估计的任务和样本,即可得到任意不确定性和认知不确定性。基于不确定性信息,开发人员可以更好地理解模型和数据集。\n", - "\n", - "本例将使用MNIST数据集和LeNet5网络模型示例,进行本次体验。\n", - "\n", - "1. 数据准备。\n", - "2. 定义深度学习网络。\n", - "3. 初始化不确定性评估工具箱。\n", - "4. 使用不确定性评估工具箱评估偶然不确定性。\n", - "5. 使用不确定性评估工具箱评估认知不确定性。\n", - "\n", - "> 本例适用于GPU和Ascend环境" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据准备\n", - "\n", - "### 下载数据集\n", - "\n", - "下载MNIST_Data数据集,并将其解压到指定位置,执行如下命令:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据增强\n", - "\n", - "定义数据集增强函数,并将原始数据增强为适用于LeNet网络的数据。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "from mindspore.dataset.vision import Inter\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\"\n", - " create dataset for train or test\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " # define some parameters needed for data enhancement and rough justification\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # according to the parameters, generate the corresponding data enhancement method\n", - " c_trans = [\n", - " CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR),\n", - " CV.Rescale(rescale_nml, shift_nml),\n", - " CV.Rescale(rescale, shift),\n", - " CV.HWC2CHW()\n", - " ]\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # using map to apply operations to a dataset\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=c_trans, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - "\n", - " # process the generated dataset\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size)\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义深度学习网络\n", - "\n", - "本例采用LeNet5深度神经网络,在MindSpore中实现如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.train.serialization import load_checkpoint, load_param_into_net\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " # define the operator required\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " # use the preceding operators to construct networks\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x) \n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 初始化不确定性工具箱\n", - "\n", - "初始化不确定性工具箱的`UncertaintyEvaluation`功能,准备如下:\n", - "\n", - "1. 准备模型权重参数文件。\n", - "2. 将模型权重参数文件载入神经网络中。\n", - "3. 将训练数据集增强为适用于神经网络的数据。\n", - "4. 将上述网络和数据集载入到`UncertaintyEvaluation`中。\n", - "\n", - "MindSpore中使用不确定性工具箱`UncertaintyEvaluation`接口来测量模型偶然不确定性和认知不确定性,更多使用方法请参见[官方文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.nn.probability.html#module-mindspore.nn.probability.toolbox)。\n", - "\n", - "### 准备模型权重参数文件\n", - "\n", - "本例已经准备好了对应的模型权重参数文件`checkpoint_lenet.ckpt`,本参数文件为[实现一个图片分类应用](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html)中训练完成5个epoch后保存的权重参数文件,执行如下命令进行下载:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/models/checkpoint_lenet.ckpt" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 完成初始化\n", - "\n", - "将需要进行不确定性测量的DNN网络与训练数据集载入,由于不确定性测量需要贝叶斯网络,所以当第一次调用初始化完成的不确定性测量工具时,会将DNN网络转成贝叶斯网络进行训练,完成后可传入对应的数据进行偶然不确定性或认知不确定性进行测量。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "from mindspore import context, Tensor\n", - "from mindspore.nn.probability.toolbox.uncertainty_evaluation import UncertaintyEvaluation\n", - "from mindspore import dtype as mstype\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "# get trained model\n", - "network = LeNet5()\n", - "param_dict = load_checkpoint('checkpoint_lenet.ckpt')\n", - "load_param_into_net(network, param_dict)\n", - "# get train\n", - "ds_train = create_dataset('./datasets/MNIST_Data/train')\n", - "evaluation = UncertaintyEvaluation(model=network,\n", - " train_dataset=ds_train,\n", - " task_type='classification',\n", - " num_classes=10,\n", - " epochs=1,\n", - " epi_uncer_model_path=None,\n", - " ale_uncer_model_path=None,\n", - " save_model=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用不确定性评估工具箱评估偶然不确定性\n", - "\n", - "### 转换成贝叶斯训练测量\n", - "\n", - "首先将验证数据集取出一个`batch`,进行偶然不确定性测量,首次调用时会将原本深度神经网络转换为贝叶斯网络进行训练。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 1, loss is 0.004005958\n", - "epoch: 1 step: 2, loss is 0.07655292\n", - "epoch: 1 step: 3, loss is 0.0005438468\n", - "... ...\n", - "epoch: 1 step: 1872, loss is 0.04286071\n", - "epoch: 1 step: 1873, loss is 0.01260218\n", - "epoch: 1 step: 1874, loss is 0.012557915\n", - "epoch: 1 step: 1875, loss is 0.19752459\n" - ] - } - ], - "source": [ - "ds_test = create_dataset(\"./datasets/MNIST_Data/test\")\n", - "batch_data = next(ds_test.create_dict_iterator())\n", - "eval_images = batch_data[\"image\"]\n", - "eval_labels = batch_data[\"label\"]\n", - "epistemic_uncertainty = evaluation.eval_epistemic_uncertainty(eval_images)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`evaluation.eval_epistemic_uncertainty`:认知不确定性测量接口,第一次调用时会使用训练数据对DNN模型进行转换成贝叶斯训练。 \n", - "`eval_images`:即偶然不确定性测试使用的`batch`图片。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 打印认知不确定性" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[5 9 9 6 1 6 3 5 6 3 9 2 3 6 8 1 0 3 7 9 4 2 4 5 1 0 6 3 0 5 0 0]\n", - "(32, 10)\n" - ] - } - ], - "source": [ - "print(eval_labels)\n", - "print(epistemic_uncertainty.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "认知不确定性内容为32张图片对应0-9的分类模型的不确定性值。\n", - "\n", - "取前面两个图片打印出对应模型的的偶然不确定性值。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0.03323262 0.5226982 0.82968014 0.7946823 0.61467606 1.1574581\n", - " 0.75548947 0.10916232 0.31576157 0.52857774]\n", - "[1.6137205 0.18264882 0.5027047 3.1947389 3.2517838 0.50852066\n", - " 4.5504684 1.165394 1.4876759 3.0724404 ]\n" - ] - } - ], - "source": [ - "print(epistemic_uncertainty[0])\n", - "print(epistemic_uncertainty[1])" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/apply_deep_probability_programming/apply_deep_probability_programming_transforms.ipynb b/tutorials/notebook/apply_deep_probability_programming/apply_deep_probability_programming_transforms.ipynb deleted file mode 100644 index aa170a57dbc127b2fa6d2b9d81860bb7d47c513c..0000000000000000000000000000000000000000 --- a/tutorials/notebook/apply_deep_probability_programming/apply_deep_probability_programming_transforms.ipynb +++ /dev/null @@ -1,208 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 深度概率编程之贝叶斯层转换\n", - "\n", - "## 概述\n", - "\n", - "对于不熟悉贝叶斯模型的深度神经网络(Deep Neural Networks,简称DNN)研究人员,MDP提供了高级APITransformToBNN,支持深度神经网络(Deep Neural Networks)模型一键转换成贝叶斯神经网络(Bayes Neural Networks,后续简称BNN)模型。目前在LeNet,ResNet,MobileNet,VGG等模型上验证了API的通用性。本例将会介绍如何使用transforms模块中的TransformToBNNAPI实现DNN一键转换成BNN。 \n", - "\n", - "整体流程如下:\n", - "\n", - "1. 定义DNN模型;\n", - "2. 定义损失函数和优化器;\n", - "3. 转换整个模型;\n", - "\n", - "> 本例适用于GPU和Ascend环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义DNN模型\n", - "\n", - "本例用到的深度神经网络(DNN)模型为LeNet5,定义完成后,打印其神经层的名称。由于转换层面上主要卷积层和池化层,本例也针对性的展示这两种计算层的转换信息。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " # define the operator required\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " # use the preceding operators to construct networks\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x) \n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "对于经典的DNN模型LeNet5网络卷积层有两层conv1,conv2,全连接层为3层:fc1,fc2,fc3。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义损失函数和优化器\n", - "\n", - "本例中使用损失函数为交叉熵损失函数`nn.SoftmaxCrossEntropyWithLogits`,优化器为`Adam`函数即`nn.AdamWeightDecay`。\n", - "\n", - "由于需要将进行整个模型的BNN转换,所以需要将DNN网络,损失函数和优化器关联成一个完整的计算网络,即`train_network`。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['conv1.weight',\n", - " 'conv2.weight',\n", - " 'fc1.weight',\n", - " 'fc1.bias',\n", - " 'fc2.weight',\n", - " 'fc2.bias',\n", - " 'fc3.weight',\n", - " 'fc3.bias']\n" - ] - } - ], - "source": [ - "import pprint\n", - "import numpy as np\n", - "from mindspore.nn import WithLossCell, TrainOneStepCell\n", - "from mindspore.nn.probability import transforms\n", - "from mindspore import context\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE,device_target=\"GPU\")\n", - "\n", - "network = LeNet5()\n", - "lr = 0.01\n", - "momentum = 0.9\n", - "criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction=\"mean\")\n", - "optimizer = nn.AdamWeightDecay(params=network.trainable_params(), learning_rate=0.0001)\n", - "#optimizer = nn.Momentum(network.trainable_params(), lr, momentum)\n", - "net_with_loss = WithLossCell(network, criterion)\n", - "train_network = TrainOneStepCell(net_with_loss, optimizer)\n", - "\n", - "DNN_layer_name = [i.name for i in network.trainable_params()]\n", - "pprint.pprint(DNN_layer_name)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "上述打印信息即为当前未转换的卷积层和全连接层名称。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 转换整个模型\n", - "\n", - "转换整个模型使用到了`transforms`中的`TransformToBNN`API,一键转换完成后打印出模型中BNN的名称。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['conv1.weight_posterior.mean',\n", - " 'conv1.weight_posterior.untransformed_std',\n", - " 'conv2.weight_posterior.mean',\n", - " 'conv2.weight_posterior.untransformed_std',\n", - " 'fc1.weight_posterior.mean',\n", - " 'fc1.weight_posterior.untransformed_std',\n", - " 'fc1.bias_posterior.mean',\n", - " 'fc1.bias_posterior.untransformed_std',\n", - " 'fc2.weight_posterior.mean',\n", - " 'fc2.weight_posterior.untransformed_std',\n", - " 'fc2.bias_posterior.mean',\n", - " 'fc2.bias_posterior.untransformed_std',\n", - " 'fc3.weight_posterior.mean',\n", - " 'fc3.weight_posterior.untransformed_std',\n", - " 'fc3.bias_posterior.mean',\n", - " 'fc3.bias_posterior.untransformed_std']\n" - ] - } - ], - "source": [ - "bnn_transformer = transforms.TransformToBNN(train_network, 60000, 0.000001)\n", - "train_bnn_network = bnn_transformer.transform_to_bnn_model()\n", - "BNN_layer_name =[i.name for i in network.trainable_params()]\n", - "\n", - "pprint.pprint(BNN_layer_name)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "上述打印信息即整体转换成贝叶斯网络(BNN)后的卷积层和全连名称。 \n", - "\n", - "如果想要进行单个贝叶斯层的转换可参考[官网教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/apply_deep_probability_programming.html#id16)。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.0.1", - "language": "python", - "name": "mindspore-1.0.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/enable_graph_kernel_fusion/images/graph1.png b/tutorials/notebook/enable_graph_kernel_fusion/images/graph1.png deleted file mode 100644 index de4827bf7e6437ceb9e6728856d479c3c002ca9a..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/enable_graph_kernel_fusion/images/graph1.png and /dev/null differ diff --git a/tutorials/notebook/enable_graph_kernel_fusion/images/graph2.png b/tutorials/notebook/enable_graph_kernel_fusion/images/graph2.png deleted file mode 100644 index 1b7743e19c4e97f70d4ba4edbeb7dc53d461bc7a..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/enable_graph_kernel_fusion/images/graph2.png and /dev/null differ diff --git a/tutorials/notebook/enable_graph_kernel_fusion/images/graph3.png b/tutorials/notebook/enable_graph_kernel_fusion/images/graph3.png deleted file mode 100644 index 8fc0a0216a3db84a11c540d469871efd0539dd8e..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/enable_graph_kernel_fusion/images/graph3.png and /dev/null differ diff --git a/tutorials/notebook/enable_graph_kernel_fusion/images/graph4.png b/tutorials/notebook/enable_graph_kernel_fusion/images/graph4.png deleted file mode 100644 index 50112b6b610a78718938751629e84a9178b32839..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/enable_graph_kernel_fusion/images/graph4.png and /dev/null differ diff --git a/tutorials/notebook/enable_graph_kernel_fusion/mindspore_enable_graph_kernel_fusion.ipynb b/tutorials/notebook/enable_graph_kernel_fusion/mindspore_enable_graph_kernel_fusion.ipynb deleted file mode 100644 index c8535fd9a35f726aa9cffa8e81d3959fd17528e7..0000000000000000000000000000000000000000 --- a/tutorials/notebook/enable_graph_kernel_fusion/mindspore_enable_graph_kernel_fusion.ipynb +++ /dev/null @@ -1,319 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使能图算融合\n", - "\n", - "## 概述\n", - "\n", - "图算融合是MindSpore特有的网络性能优化技术。它可以通过自动分析和优化现有网络计算图逻辑,并结合目标硬件能力,对计算图进行计算化简和替代、算子拆分和融合、算子特例化编译等优化,以提升设备计算资源利用率,实现对网络性能的整体优化。相比传统优化技术,图算融合具有多算子跨边界联合优化、与算子编译跨层协同、基于Polyhedral的算子即时编译等独特优势。另外,图算融合只需要用户打开对应配置后,整个优化过程即可自动完成,不需要网络开发人员进行其它额外感知,使得用户可以聚焦网络算法实现。\n", - "\n", - "图算融合的适用场景包括:\n", - "\n", - "- 对网络执行时间具有较高性能要求的场景;\n", - "- 通过拼接基本算子实现自定义组合算子,并希望对这些基本算子进行自动融合,以提升自定义组合算子性能的场景。\n", - "\n", - "接下来,以自定义组合算子开启图算融合为例来体验使能图算融合。\n", - "\n", - "> 本文档适用于GPU环境。\n", - "\n", - "## 整体流程\n", - "\n", - "1. 准备环节。导入公共模块。\n", - "2. 构造简单`MyNet`网络。对比算子融合前后计算图。\n", - "3. 自定义组合算子。构造一个简单网络`MyNet`和自定义算子`MyOp`,对比算子融合前后计算图。\n", - "\n", - "## 准备环节\n", - "\n", - "导入执行以下代码导入所需模块。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import numpy as np\n", - "import mindspore.context as context\n", - "from mindspore import Tensor\n", - "from mindspore.nn import Cell\n", - "import mindspore.ops as ops" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 构造简单`MyNet`网络\n", - "\n", - "当前图算融合优化默认为关闭状态,我们只需在训练脚本中为`context`指定参数`enable_graph_kernel=True`即可启用图算融合。\n", - "\n", - "为了说明图算融合优化场景,构造了一个简单网络`MyNet`, 包含一个乘法和加法计算。在打开图算融合进行优化之后,这两个计算便会自动合成一个融合算子。\n", - "\n", - "为了对比开启图算融合前后计算图的差异,分别执行以下两段代码,记录两次计算的计算图。其中`graphs_path1`和`graphs_path2`分别为开启图算融合前后进行计算保存的计算图路径。\n", - "\n", - "1. 关闭图算融合时进行计算,设置`enable_graph_kernel=False`:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "result: [[2. 2. 2. 2.]\n", - " [2. 2. 2. 2.]\n", - " [2. 2. 2. 2.]\n", - " [2. 2. 2. 2.]]\n" - ] - } - ], - "source": [ - "graphs_path1 = \"./log/enable_graph_kernel_fusion/graph1\"\n", - "os.system(\"rm -rf {}\".format(graphs_path1))\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "# save graph ir to view fusion detail.\n", - "context.set_context(save_graphs=True, save_graphs_path=graphs_path1)\n", - "# enable graph kernel optimization.\n", - "context.set_context(enable_graph_kernel=False)\n", - "\n", - "class MyNet(Cell):\n", - " def __init__(self):\n", - " super(MyNet, self).__init__()\n", - " self.add = ops.Add()\n", - " self.mul = ops.Mul()\n", - "\n", - " def construct(self, x):\n", - " a = self.mul(x, 2.0)\n", - " res = self.add(a, 1.0)\n", - " return res\n", - "\n", - "x = np.ones((4, 4)).astype(np.float32) * 0.5\n", - "net = MyNet()\n", - "result = net(Tensor(x))\n", - "print(\"result: {}\".format(result))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 开启图算融合时进行计算,设置`enable_graph_kernel=True`:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "result: [[2. 2. 2. 2.]\n", - " [2. 2. 2. 2.]\n", - " [2. 2. 2. 2.]\n", - " [2. 2. 2. 2.]]\n" - ] - } - ], - "source": [ - "graphs_path2 = \"./log/enable_graph_kernel_fusion/graph2\"\n", - "os.system(\"rm -rf {}\".format(graphs_path2))\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "# save graph ir to view fusion detail.\n", - "context.set_context(save_graphs=True, save_graphs_path=graphs_path2)\n", - "# enable graph kernel optimization.\n", - "context.set_context(enable_graph_kernel=True)\n", - "\n", - "net = MyNet()\n", - "result = net(Tensor(x))\n", - "print(\"result: {}\".format(result))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 查看计算图\n", - "\n", - " 在当前工作目录下执行`mindinsight start --summary-base-dir ./log/enable_graph_kernel_fusion`,其中`./log/enable_graph_kernel_fusion`为保存的所有计算图的主目录。启动MindInsight可视化工具查看计算图,训练看板中`graph1`目录保存的为图算融合前的计算图,`graph2`目录保存的为图算融合后的计算图(参考[MindInsight计算图可视化教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/dashboard.html#id5))。\n", - "\n", - " ![未开启图算融合](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/enable_graph_kernel_fusion/images/graph1.png)\n", - "\n", - "
    图1: 未使能图算融合计算图。
    \n", - "\n", - " ![开启图算融合](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/enable_graph_kernel_fusion/images/graph2.png)\n", - "\n", - "
    图2: 开启使能图算融合计算图。
    \n", - "\n", - " 根据该计算图的结果所示,其中图1为未使能图算融合时的对应计算图,图2为使能图算融合后的对应计算图。可以看到该网络中的加法和乘法被融合成一个算子。\n", - "\n", - "## 自定义组合算子\n", - "\n", - "基于图算融合技术,用户可以很方便地实现高性能的自定义组合算子。其主要流程为:\n", - "\n", - "1. 在脚本中用基本算子组合的方式实现自定义算子定义和使用;\n", - "2. 打开图算融合配置;\n", - "3. 图算融合对自定义组合算子中的基本算子自动进行算子融合,并生成高性能融合算子。\n", - "\n", - "相比其它自定义算子方式,这种方式具有对框架无侵入、简单易用等优点。\n", - "\n", - "构造一个简单网络`MyNet`,并在其中使用了自定义算子`MyOp`。分别执行以下两段代码,记录两次计算的计算图。其中`graphs_path3`和`graphs_path4`分别为开启图算融合前后进行计算保存的计算图路径。\n", - "\n", - "1. 关闭图算融合时进行计算,设置`enable_graph_kernel=False`:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "result: [[-0.015104 -0.015104 -0.015104 -0.015104]\n", - " [-0.015104 -0.015104 -0.015104 -0.015104]\n", - " [-0.015104 -0.015104 -0.015104 -0.015104]\n", - " [-0.015104 -0.015104 -0.015104 -0.015104]]\n" - ] - } - ], - "source": [ - "graphs_path3 = \"./log/enable_graph_kernel_fusion/graph3\"\n", - "os.system(\"rm -rf {}\".format(graphs_path3))\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "# enable graph kernel optimization.\n", - "context.set_context(save_graphs=True, save_graphs_path=graphs_path3)\n", - "context.set_context(enable_graph_kernel=False)\n", - "\n", - "class MyOp(Cell):\n", - " \"\"\" my first custom OP composited by basic OPs \"\"\"\n", - " def __init__(self):\n", - " super(MyOp, self).__init__()\n", - " self.sub = ops.operations.Sub()\n", - " self.mul = ops.operations.Mul()\n", - "\n", - " def construct(self, x, y):\n", - " a = self.sub(x, y)\n", - " return self.mul(a, x)\n", - "\n", - "class MyNet(Cell):\n", - " def __init__(self):\n", - " super(MyNet, self).__init__()\n", - " self.mul = ops.operations.Mul()\n", - " self.pow = ops.operations.Pow()\n", - " self.my_op = MyOp()\n", - "\n", - " def construct(self, x, y):\n", - " a = self.mul(x, 2.0)\n", - " b = self.pow(a, 3.0)\n", - " res = self.my_op(b, y)\n", - " return res\n", - "\n", - "x = np.ones((4, 4)).astype(np.float32) * 0.2\n", - "y = np.ones((4, 4)).astype(np.float32) * 0.3\n", - "net = MyNet()\n", - "result = net(Tensor(x), Tensor(y))\n", - "print(\"result: {}\".format(result))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 开启图算融合时进行计算,设置`enable_graph_kernel=True`:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "result: [[-0.015104 -0.015104 -0.015104 -0.015104]\n", - " [-0.015104 -0.015104 -0.015104 -0.015104]\n", - " [-0.015104 -0.015104 -0.015104 -0.015104]\n", - " [-0.015104 -0.015104 -0.015104 -0.015104]]\n" - ] - } - ], - "source": [ - "graphs_path4 = \"./log/enable_graph_kernel_fusion/graph4\"\n", - "os.system(\"rm -rf {}\".format(graphs_path4))\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "# enable graph kernel optimization.\n", - "context.set_context(save_graphs=True, save_graphs_path=graphs_path4)\n", - "context.set_context(enable_graph_kernel=True)\n", - "\n", - "net = MyNet()\n", - "result = net(Tensor(x), Tensor(y))\n", - "print(\"result: {}\".format(result))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 在MindInsight中查看生成的计算图。MindInsight训练看板中`graph3`目录保存的为图算融合前的计算图,`graph4`目录保存的为图算融合后的计算图。\n", - "\n", - " ![未开启图算融合](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/enable_graph_kernel_fusion/images/graph3.png)\n", - "\n", - "
    图3: 未使能图算融合计算图。
    \n", - "\n", - " ![开启图算融合](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/enable_graph_kernel_fusion/images/graph4.png)\n", - "\n", - "
    图4: 开启使能图算融合计算图。
    \n", - "\n", - " 根据该计算图的结果所示,其中图3为未使能图算融合时的对应计算图,图4为使能图算融合后的对应计算图。可以看到不仅自定义算子`MyOp`中的基本算子进行了融合,并且与其他算子也进行了更大范围融合。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结\n", - "\n", - "以上便完成了图算融合的体验过程,我们通过本次体验全面了解了如何开启图算融合模式,理解了如何生成高性能的融合算子。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/mindinsight/images/caculate_graph.png b/tutorials/notebook/mindinsight/images/caculate_graph.png deleted file mode 100644 index 460e56104705421002cf5d6112a84322b1ace391..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/caculate_graph.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/data_function.png b/tutorials/notebook/mindinsight/images/data_function.png deleted file mode 100644 index 123d4cefd29b84cf261ccab8c8a62f63b06aea43..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/data_function.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/data_lineage_page.png b/tutorials/notebook/mindinsight/images/data_lineage_page.png deleted file mode 100644 index 19fbb50c14bb28005835cb283e32501e6e9f12ea..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/data_lineage_page.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/detailed_information_page_of_model_lineage.png b/tutorials/notebook/mindinsight/images/detailed_information_page_of_model_lineage.png deleted file mode 100644 index af9973f4f6404cb80ed1dce41d8f3e120babea3b..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/detailed_information_page_of_model_lineage.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/graph_sidebar.png b/tutorials/notebook/mindinsight/images/graph_sidebar.png deleted file mode 100644 index 4b9b6097aa62fdf426d6fc62ee1dd55f8086aeb9..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/graph_sidebar.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/histogram.png b/tutorials/notebook/mindinsight/images/histogram.png deleted file mode 100644 index f15c27645e66375b7ab81f931f5f225b0a653d8b..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/histogram.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/histogram_func.png b/tutorials/notebook/mindinsight/images/histogram_func.png deleted file mode 100644 index 15437442b436b8d1783ad36a816b05df4f156c06..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/histogram_func.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/histogram_panel.png b/tutorials/notebook/mindinsight/images/histogram_panel.png deleted file mode 100644 index b822ca43783fc7e7a5afe59f6917e1de074f0594..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/histogram_panel.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/image_function.png b/tutorials/notebook/mindinsight/images/image_function.png deleted file mode 100644 index 01768a4282378c6803422c00d148957836559626..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/image_function.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/image_panel.png b/tutorials/notebook/mindinsight/images/image_panel.png deleted file mode 100644 index 451a3fea4ecfabb224f0f1cc90ca5e5d617d9bf1..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/image_panel.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/mindinsight_homepage_for_lineage.png b/tutorials/notebook/mindinsight/images/mindinsight_homepage_for_lineage.png deleted file mode 100644 index 9e0dc2fc6184a3bee5c3d9784d4875186d9b69f3..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/mindinsight_homepage_for_lineage.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/mindinsight_homepage_for_scalars_comparison.png b/tutorials/notebook/mindinsight/images/mindinsight_homepage_for_scalars_comparison.png deleted file mode 100644 index e4495498b20a73d78aa3de250989bc746ba74261..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/mindinsight_homepage_for_scalars_comparison.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/mindinsight_panel.png b/tutorials/notebook/mindinsight/images/mindinsight_panel.png deleted file mode 100644 index d93921429b05195b38b491040e9abcc99fb4994d..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/mindinsight_panel.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/mindinsight_panel2.png b/tutorials/notebook/mindinsight/images/mindinsight_panel2.png deleted file mode 100644 index 355d4ce219bb2e2766eef80928838f4a11976c46..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/mindinsight_panel2.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/model_lineage_page.png b/tutorials/notebook/mindinsight/images/model_lineage_page.png deleted file mode 100644 index 9d32ada663f0526d83ba2399688cf4005f2d8543..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/model_lineage_page.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/multi_scalars_select.png b/tutorials/notebook/mindinsight/images/multi_scalars_select.png deleted file mode 100644 index c4a9616e729d04be9bd5739972b0418131026c5b..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/multi_scalars_select.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/optimization_target_page_of_model_lineage.png b/tutorials/notebook/mindinsight/images/optimization_target_page_of_model_lineage.png deleted file mode 100644 index 3c31da28789964279226e603f4b4ce4a9a3b7878..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/optimization_target_page_of_model_lineage.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/scalar_panel.png b/tutorials/notebook/mindinsight/images/scalar_panel.png deleted file mode 100644 index 55f49fb639f126fa823f84cd4b0e2945f85c86f9..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/scalar_panel.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/scalar_select.png b/tutorials/notebook/mindinsight/images/scalar_select.png deleted file mode 100644 index 07a7850969f2193cec615012384030f3af07209c..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/scalar_select.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/scalars_comparison_page.png b/tutorials/notebook/mindinsight/images/scalars_comparison_page.png deleted file mode 100644 index b39483385813272c9b746252bb9e36a7499b6a49..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/scalars_comparison_page.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/tensor.png b/tutorials/notebook/mindinsight/images/tensor.png deleted file mode 100644 index 581e5895ffa77280cfb48245f38709107bf8c7cb..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/tensor.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/images/tensor_func.png b/tutorials/notebook/mindinsight/images/tensor_func.png deleted file mode 100644 index df549a8e628bdc8d2ffef9dd814e5a8cf0f3ba34..0000000000000000000000000000000000000000 Binary files a/tutorials/notebook/mindinsight/images/tensor_func.png and /dev/null differ diff --git a/tutorials/notebook/mindinsight/mindinsight_dashboard.ipynb b/tutorials/notebook/mindinsight/mindinsight_dashboard.ipynb deleted file mode 100644 index 329a30bce6377e49d0763475774c8080d6503b3e..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindinsight/mindinsight_dashboard.ipynb +++ /dev/null @@ -1,844 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# MindInsight训练看板\n", - "\n", - "通过MindSpore可以将训练过程中的标量、图像、参数分布直方图、张量、计算图和数据图记录到summary日志文件中,并通过MindInsight提供的可视化界面进行查看。\n", - "\n", - "- 通过查看特定的标量数值随着训练步骤的变化趋势,比如查看每个迭代的损失值、正确率、准确率这些标量的变化过程,追踪神经网络在整个训练过程中的信息,帮助用户了解模型是否过拟合,或者是否训练了过长时间。可以通过比较不同训练中的这些指标,以帮助调试和改善模型。\n", - "\n", - "- 通过查看训练过程中的图像数据,用户可以查看每个步骤所使用的数据集图像。\n", - "\n", - "- 参数分布直方图支持以直方图的形式呈现Tensor的变化趋势,用户可以查看训练过程中每个训练步骤的权重、bias和梯度参数变化信息。\n", - "\n", - "- 张量可视能够帮助用户直观查看训练过程中某个步骤的Tensor值,Tensor包括权重值、梯度值、激活值等。\n", - "\n", - "- 计算图的生成是通过将模型训练过程中的每个计算节点关联后所构成的,用户可以通过查看计算图,掌握整个模型的计算走向结构,数据流以及控制流的信息。对于高阶的使用人员,能够通过计算图验证计算节点的输入输出是否正确,并验证整个计算过程是否符合预期。\n", - "\n", - "- 数据图展示的是数据预处理的过程,在MindInsight可视化面板中可查看数据处理的图,能够更加直观地查看数据预处理的每一个环节,并帮助提升模型性能。\n", - "\n", - "接下来是本次流程的体验过程。\n", - "\n", - "## 整体流程\n", - "\n", - "1. 下载CIFAR-10二进制格式数据集。\n", - "2. 对数据进行预处理。\n", - "3. 定义AlexNet网络,在网络中使用summary算子记录数据。\n", - "4. 训练网络,使用 `SummaryCollector` 记录损失值标量、权重梯度、计算图和数据图参数。同时启动MindInsight服务,实时查看损失值、参数直方图、输入图像、张量、计算图和数据图的变化。\n", - "5. 完成训练后,查看MindInsight看板中记录到的损失值标量、直方图、图像信息、张量、计算图、数据图信息。\n", - "6. 相关注意事项,关闭MindInsight服务。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 准备环节\n", - "\n", - "### 下载数据集\n", - "\n", - "本次流程使用CIFAR-10二进制格式数据集,下载地址为:。\n", - "\n", - "CIFAR-10二进制格式数据集包含10个类别的60000个32x32彩色图像。每个类别6000个图像,包含50000张训练图像和10000张测试图像。数据集分为5个训练批次和1个测试批次,每个批次具有10000张图像。测试批次包含每个类别中1000个随机选择的图像,训练批次按随机顺序包含剩余图像(某个训练批次包含的一类图像可能比另一类更多)。其中,每个训练批次精确地包含对应每个类别的5000张图像。\n", - "\n", - "执行下面一段代码下载CIFAR-10二进制格式数据集到当前工作目录,如果已经下载过数据集,则不重复下载。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "********Checking DataSets Path.*********\n", - "*****Downloading CIFAR-10 DataSets.*****\n", - "*********data_batch_1.bin is ok*********\n", - "*********data_batch_2.bin is ok*********\n", - "*********data_batch_3.bin is ok*********\n", - "*********data_batch_4.bin is ok*********\n", - "*********data_batch_5.bin is ok*********\n", - "**********test_batch.bin is ok**********\n", - "*Downloaded CIFAR-10 DataSets Already.**\n" - ] - } - ], - "source": [ - "import os, shutil\n", - "import urllib.request\n", - "from urllib.parse import urlparse\n", - "\n", - "\n", - "def callbackfunc(blocknum, blocksize, totalsize):\n", - " percent = 100.0 * blocknum * blocksize / totalsize\n", - " if percent > 100:\n", - " percent = 100\n", - " print(\"downloaded {:.1f}\".format(percent), end=\"\\r\")\n", - "\n", - "def _download_dataset():\n", - " ds_url = \"https://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz\"\n", - " file_base_name = urlparse(ds_url).path.split(\"/\")[-1]\n", - " file_name = os.path.join(\"./datasets\", file_base_name)\n", - " if not os.path.exists(file_name):\n", - " urllib.request.urlretrieve(ds_url, file_name, callbackfunc)\n", - " print(\"{:*^40}\".format(\"DataSets Downloaded\"))\n", - " shutil.unpack_archive(file_name, extract_dir=\"./datasets/cifar-10-binary\")\n", - "\n", - "def _copy_dataset(ds_part, dest_path):\n", - " data_source_path = \"./datasets/cifar-10-binary/cifar-10-batches-bin\"\n", - " ds_part_source_path = os.path.join(data_source_path, ds_part)\n", - " if not os.path.exists(ds_part_source_path):\n", - " _download_dataset()\n", - " shutil.copy(ds_part_source_path, dest_path)\n", - "\n", - "def download_cifar10_dataset():\n", - " ds_base_path = \"./datasets/cifar10\"\n", - " train_path = os.path.join(ds_base_path, \"train\")\n", - " test_path = os.path.join(ds_base_path, \"test\")\n", - " print(\"{:*^40}\".format(\"Checking DataSets Path.\"))\n", - " if not os.path.exists(train_path) and not os.path.exists(test_path):\n", - " os.makedirs(train_path)\n", - " os.makedirs(test_path)\n", - " print(\"{:*^40}\".format(\"Downloading CIFAR-10 DataSets.\"))\n", - " for i in range(1, 6):\n", - " train_part = \"data_batch_{}.bin\".format(i)\n", - " if not os.path.exists(os.path.join(train_path, train_part)):\n", - " _copy_dataset(train_part, train_path)\n", - " pops = train_part + \" is ok\"\n", - " print(\"{:*^40}\".format(pops))\n", - " test_part = \"test_batch.bin\"\n", - " if not os.path.exists(os.path.join(test_path, test_part)):\n", - " _copy_dataset(test_part, test_path)\n", - " print(\"{:*^40}\".format(test_part+\" is ok\"))\n", - " print(\"{:*^40}\".format(\"Downloaded CIFAR-10 DataSets Already.\"))\n", - "\n", - "download_cifar10_dataset()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下载数据集后,CIFAR-10数据集目录(`datasets`)结构如下所示。\n", - "\n", - "```shell\n", - " $ tree datasets\n", - " datasets\n", - " └── cifar10\n", - " ├── test\n", - " │   └── test_batch.bin\n", - " └── train\n", - " ├── data_batch_1.bin\n", - " ├── data_batch_2.bin\n", - " ├── data_batch_3.bin\n", - " ├── data_batch_4.bin\n", - " └── data_batch_5.bin\n", - "\n", - "```\n", - "\n", - "其中:\n", - "- `test_batch.bin`文件为测试数据集文件。\n", - "- `data_batch_1.bin`文件为第1批次训练数据集文件。\n", - "- `data_batch_2.bin`文件为第2批次训练数据集文件。\n", - "- `data_batch_3.bin`文件为第3批次训练数据集文件。\n", - "- `data_batch_4.bin`文件为第4批次训练数据集文件。\n", - "- `data_batch_5.bin`文件为第5批次训练数据集文件。\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据处理\n", - "\n", - "好的数据集可以有效提高训练精度和效率,在加载数据集前,会进行一些处理,增加数据的可用性和随机性。下面一段代码定义函数`create_dataset_cifar10`来进行数据处理操作,并创建训练数据集(`ds_train`)和测试数据集(`ds_eval`)。\n" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "from mindspore import dtype as mstype\n", - "\n", - "\n", - "def create_dataset_cifar10(data_path, batch_size=32, repeat_size=1, status=\"train\"):\n", - " \"\"\"\n", - " create dataset for train or test\n", - " \"\"\"\n", - " cifar_ds = ds.Cifar10Dataset(data_path)\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - "\n", - " resize_op = CV.Resize(size=(227, 227))\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " normalize_op = CV.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))\n", - " if status == \"train\":\n", - " random_crop_op = CV.RandomCrop([32, 32], [4, 4, 4, 4])\n", - " random_horizontal_op = CV.RandomHorizontalFlip()\n", - " channel_swap_op = CV.HWC2CHW()\n", - " typecast_op = C.TypeCast(mstype.int32)\n", - " cifar_ds = cifar_ds.map(operations=typecast_op, input_columns=\"label\")\n", - " if status == \"train\":\n", - " cifar_ds = cifar_ds.map(operations=random_crop_op, input_columns=\"image\")\n", - " cifar_ds = cifar_ds.map(operations=random_horizontal_op, input_columns=\"image\")\n", - " cifar_ds = cifar_ds.map(operations=resize_op, input_columns=\"image\")\n", - " cifar_ds = cifar_ds.map(operations=rescale_op, input_columns=\"image\")\n", - " cifar_ds = cifar_ds.map(operations=normalize_op, input_columns=\"image\")\n", - " cifar_ds = cifar_ds.map(operations=channel_swap_op, input_columns=\"image\")\n", - "\n", - " cifar_ds = cifar_ds.shuffle(buffer_size=1000)\n", - " cifar_ds = cifar_ds.batch(batch_size, drop_remainder=True)\n", - " cifar_ds = cifar_ds.repeat(repeat_size)\n", - " return cifar_ds\n", - "\n", - "ds_train = create_dataset_cifar10(data_path=\"./datasets/cifar10/train\")\n", - "ds_eval = create_dataset_cifar10(\"./datasets/cifar10/test\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 抽取数据集图像\n", - "\n", - "执行以下一段代码,抽取上步创建好的训练数据集`ds_train`中第一个`batch`的32张图像以及对应的类别名称进行展示。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The 32 images with label of the first batch in ds_train are showed below:\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAsYAAAHdCAYAAAAekb9wAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAABYlAAAWJQFJUiTwAAEAAElEQVR4nOz96a9k2ZrmCf3WsAeb7Ux+/Lh7jPdG3rw5VFNdhVJUQ1PVQrSgoPlQ4gOivyGQ+CP4xh8AEjRCCAES8BEVNIgugbqKrq6qrMxb1TnczDtFxI3w8czH5j2tgQ9rbTM7x91jcD8ecbPyvCELO262bdu2vdde61nPet7nFd577uIu7uIu7uIu7uIu7uIu/rqH/L4P4C7u4i7u4i7u4i7u4i7u4jch7oDxXdzFXdzFXdzFXdzFXdwFd8D4Lu7iLu7iLu7iLu7iLu4CuAPGd3EXd3EXd3EXd3EXd3EXwB0wvou7uIu7uIu7uIu7uIu7AO6A8V3cxV3cxV3cxV3cxV3cBXAHjO/iLu7iLu7iLu7iLu7iLoA7YHwXd3EXd3EXd3EXd3EXdwHcAeO7uIu7uIu7uIu7uIu7uAvgDhjfxV3cxV3cxV3cxV3cxV0Ad8D4Lu7iLu7iLu7iLu7iLu4CuAPGd3EXd3EXd3EXd3EXd3EXwB0wvou7uIu7uIu7uIu7uIu7AEDfxk6EEP429vNvanjvxbf9zK2d0xT4GDgAesAl8Bx4Bmx9Q5pI/sF//9/nP/yf/I8Y7HWYV1f8r/9X/xH/yf/1X+Dqb3AoGfBjYFfAiQ/fcfVtDlTAg0/gg48hSUCqcHzeg7XgbPg7hv8X/+9vfU7hrq1+XXyvbfXf0HiTcwp35/Xr4m3bqgR2gAeDHv/lv/v3+Hf+W3+f+w8eIpoGu2ooZw31yqF1D4/ms1/8CX/2r/8zjF1y+P59Pv6d3+Vv/rv/TR598nv84s//nD/6z/8p/5f/8/+BX5/8+qXv7Wc5/53/9n/AP/gf/od0ez0mVxP++F/8c/5f//D/xq+ehO1Trfh3/s4f8Pf/u/89fve3/wbvP/wIYSXnp+f85I/+Jf/H/9P/lj/74ud0pOJ3Pv4h//W/+9/g7/29fx/vLH/6Jz/hT3/yz/nTn/whny8LNNAhDPAKaE9UDSyABhgA/UxyXjvaLv7u/r/9eNP7/wf/s8ceKRFaI5WOF9EDAoHcXNRXfeeN99qhlPbZb7/u43se5z14cM7j2mcXXvfx2XlwHqzzWO/xzm3ex4cvaDfa/ttu/9td/9va+OzBVFAvYHECJz+Diy+gKcFMwfwhoRW/+Xn9JnErwBgebP1981hfdc+I1/z9qn9vv/ZVn3vVd93c/lX7eU2I1xybEK/cRADOzcEfv+I4vsewwAooCSDZAO7lzTKl2Rvv0+2mHJ9/yp9//of85c9/imu+wW8RhF64Ai49TIDltzxOmcBgBJ2c9Ul1HrwL/5Qy3Ej+7c7tf/Xv/gdA7AwInUDoC7Z6i3XENvN1TfjmWyJ0Xeu2Enui2OcA7lrHtIl4PPHZeffSd/v4+zfH/vLBbf+2G+/w/OmvOTv58vUHfxd38dckHKFbXJQlL54+5pd/+mcsL6YMOj06SQfhUpIkRSqFdeCFxwsPSqCyFJ1nSK0QQiC1RmcpKklf+h4BHPQH7I1GJCgSkdDLe4xHY/b3djg5eYrD0xv06ORdFvOC5aLEGtBC4J1HeIeWAgEoPMJYpBNomYJwZCohlQK11a84Nl1AO/ppwjCggXuZ4ODBPuJkwvNV/Zs0at0FoBONkBKpNUKp9fXxiOvjy1aI+H9PO060n9mMB36rjfh2DIngWPrwSSE80nuc8HjpcQ6cjMDYBVAshEd4jxfgvcNJ8F7giLi3BcQijuGiHWwdceMImG0EUBKkA9NANYHZU7j4Jcz/EpgTQEV9+yf6FXFLwPh/vHWR2gsmgHhS2isiWxQpNtuJrdfk9mfZPLfbiThLuvH5AFDjhcJvNkcghEBIGTovIRFSIISM74HEI9vPQNhexIYXO6L1axKEkEgZDlUKUDIwD0LC5flnnDz+XxIu4m9IOAJgbeLDEMDyjV4wTRLyPMf5iqcnv+Bf/fSfcvx09vUYXwADhRgoaDy+NLDw4bu+TagMul1IdJxFWtYIvp2BCF4J6r9N/G/+d/97oO0QXLhH11Po7R+7DYoleEEgRuJDsO582u381qGG2Npf6G3WwNd799L7a8jrwyzdO7f5Dt92ZO56Z8Zmtr/Zpu3o/LVf5L3jP/6//z/4j/4X/3OMKd7iLN7FXfzVD0m4Va1zTM/O+fLnv6SZ1xweHLI72qfbHZFlGVKKwIwJ8BKEUOg0JckyVGTypFaoNEUlLw+pCYKD0Yh+p4t0AokkTTJ6nR7D4YDd8RCVKvrDIUmSsZitWC4KrPVIJbDO45xD4jb4orFgQYsUITyJ0iRCorZ+F2wAcfuaIgBjCez2Mx4cPaCqBWerk+8IctzFNw2tJUJIlFYBGIt29NnGSNsR8QpiM5a0lM+1MSK+2+JUHyZf6/EDjxQBBEsRxhcnPM47rBcRX3mss2Ad1lU4U0YSSwIaKRK8VPgIxrwkztRceBY2AGK2Hm1DpYFmBqszKB4DX/DtAcXbxS0B4wHrW7AFriICY1y4k2lf33p/GxjLFmGKzb7aECK8/4rPifi3FB4hXAS77W4i+JUSKWUEtWIDdAWxI/EB6K73RwDRYmtbKZARHEsBKh6SEsTXBYvZgN842baEbpKihcdZy6JygSa5EQ0Nx1df8rPP/hU///ynfPn4lPor26JAJpqkm5Pv9FCpwnpD5VeUegXafru2bGuYL6DfD7ONdc++hYQ9cXL15tzGaDRadxJrAPoK1jU0rzCcCCQCFdqxaLkYh8etAaiLYNU5h3cW6wzOGrx3sd1plM7QKjBK14Gx3zqm8Nwe1+bYwlKVX4PhLUBMS0pvv399f+02/X4/tPG7uIu/5nEoBHtZwmDYp9fpUS9WTM/OSZyEGszQ0xsI0twHaRdh4JdKk6YZaZKFvt97pBAkOkEq9dL3WDyrumRVl5R1iSxTqrqiKAvqusIag041UirqqmY2mbFcrDC1QSUKYxrqqqYqKxzQeEdTGVzj8Dawcd4KhJMkQpATxrUM1kC5jRYcJ0CSaNIkI09zEiGo33I17i5uN4R3AWNIj1IyrFh4cIH+u7Ya0BKEYo2dfAC819ZCW8JEBEAch1LnWe+73asTcQImAvMrcOBM4HekAgHOlLhqRjU7xk6eg61A55ANoXsP0dlDyBSURniJlwJvG7AGmhWUUygX4JpAhKkMdAYUoDwoRWit333cEjCOU49ttnfNwm2zfpFT81ufa5lf77gujNm+SeXW/uS1z7UsnfcuXlS/vuAiHk8LMIRw4OWavLs2e8KH2Y33CCHwuI2OR2yDMbHBbGuKsKX7fvM6Fq0zdjsP6CUZXtQ8MU8oqpcRq8Hw/PJL/uIXjl89+SUvXtS4MPkLEzokUqZIJVBakXe6dPo98jwjSzM8jqqqKH1OoruUeUFjVuDNNztQV8PVBXSy8EiTOBmK3fq6aUleElB9i2ia8Nudi23F3wTGm0lTkDyICIrDv4VweGHwGLyzOOdw1mFdeBhjMKahaUrqusA5i1YJSZLR6Yzp5COkkAFUt4udfpu1DtEe1xoA+5ugeHvbCLRv7mfNTm+2de4tKfe7+F6jtwu9AQGjEXpD6eM4AggFPgErwchWC0hkaeJKl4xzz7gId3Mq34KntTZVbFY9jYPGgWkHzS0eo90GHwgD1RIPInzOuI3scM1kinDMQm325xz4BpyJ3YfbWqHjumb2TUMDHx/c52B3h7zfxXtJ0xhW0ylTNMJKvFMgNF5IkjQFwtggVYLWGYlOA4HiPEoItFIo+SpgDLOqZFmWlHWFrAqKsqBYLSlWBVVdkeQpeE9VVsymc5aLFXXdoKTCNIa6rqiaer2/pjHYxuFsIHWc8eAECZIOG2DcLrJt9TZsLpnAOUBIVBz77uI3KFyNEJpEJCQ6MMXOe6xvr2W8C1oScI1NtnBPgLRsyBGxhmst5BLe47y4Nk6ICIidtzjfgGvwpgpNRHUQSLyZY1bH2LOfwdM/g2oJ+QCG9+Hgt/FKIdIxMk2BcINbb/DeQDOHxQtYngZA7Q2kfch3wo+TLuYaxc/+lWSMt+UQ1+QR6/9t3Y0ts9w+bn5uDTu3Pndj+2u3dpwjrWUP248AcgJLvM0ux8+KzZ6kEDc+t5FdyNewxlKETin8O0g2ftNiMN7l4/d+RH/YBWWgknwx+ZK6btaDpU4Vo70RTZNw/HzB4lwiS0Wee8xIIhpNInIkGutsWFJxlrosccZQ6wq8p2ks1jmUT8hShcsktlqGUe5rw8PlE5ifQ5pDfwh7+zAeh5F8PfrGg37D2AbE62/eGg+2ZcHeW5yzNE1DXTcYU9GYEutqvDdY22CMoakNddPQNDVVVVFVBUW5pCznOGfJsw79/pj79z/m6PAj8qyLVG1b8VvHtDkXvk2CiEB3ext/A0Szfu3l914G0dflFXfxVyu6A9g5BGxcFpeQekhc7MwT8CkYBbWMIDbmtbT9o1IBGCsZ555sr12E/yViK3ErbmAjIK5cUPq10oLNEu+6SwmMpAiAXQK1g8qEzxO30QK0imSsDvsxPqyw2ibMlV0TxkwlQcuwz4QAut8mOjrlhz/8Mbs7I3SiqCtDUdQBlBtLsVqSd1bk3YIkzxAKrG2w1iJUXEFC4Y3HNhbhBVonQVrxivCJwimBFR7rLTZOoI0x2MZiGkNTNZSrguV8znK+YLVaIoDaNBhnMW7N+22d+EAHOecxYWUby0Y6cZNqsoQFwwJ4frmk+Oxzjq8WLO8mzL9x0UxeoDo5ve4+o26KQVIbqL2gcTLwVbAewDY8Xfx3e19GzBT4FR/HC7F+3/mw6uHbnsA7rGlwpqCcn7OanlAurzDlEi8UyeA+OhtQzU6oL76AX/8EvvwjqGegOzB4BMsSrMLtfUiS9wLL7MFbj6UBUwQd8eIFrM6hmkI2gN49UHlUVyxj59Dllcvc7zBuBxjLFuzCNdDbdrfCb3pl4JpW+CYo3ga+a/wrt/THm+8I4LUFshspRZArR9C8BsdyC+QG0CtpZRR+S4KxDYLjdnIjq5BCXNMYS+HXgFmuAdxvSAjBvfv3+eTjHzEY9HBYbAXlTDDLphhjyPKUvZ0dxqMhAs3VqaeZj+l7yDvgdGBPsNDUDYvFkroqqQvHUmxkKoHZjcuMWY7WKXmnSy0ETXH1zY7X25CNWi9gOQm9fLcXZ45xpWBzp79RBAnD9ZBye3+h3QRZhKU2JYvFlPn8iuVqxmo1o25KnGtomoaqrqjKiqIoKIuC5WrJcrVktZqxXE3w3tDvDdnfPeJ3f8cy6O2glCKVeViZaCUPrl30ag90A4pbsPvSNnfx1zJ8TOjWhEcmAkDWPuAlF29HLwAJwoFtu2MfV7q2m5qIsn42jO+6H4VrCV1CRJBK+B4nNrejF5t+MSE8ZETL0sduPN5+UkCqQ0qBiOv9tv2Slo2ObLaPx7Huc786If8bRb8z4ONPfodRvwfOhNWuomS5KlgsVtRVRVUXVHVJ1lRILWlMQ2MMQmm8E+AEzjicMQgPiVYRGG9PNcI/ZZ4hUo0TLgBjZ3DW4qzDGBdAcVGRqBXLZM5yMWO5WCBF+N6gcd78aik0QigQkUW0YIyn9oFba9dVbz4MARQ3QFk7jl+cUfq3Tt24i3cQ1cWXZMMBo4OMo/6QygkWNRQWCicw8aJ5366KtxRwuweB27o3W22yi0sH62S5ONb4eOM753FNha0mLC4+Zfb4z3DnT2BxBSqnefA76PEDzOQ5nHwKj/8YVr8GXGhg5VlYrtIDyAaonQcoLQMDLT0WA64IQHp1BhefwuwJJH3o34fODiQ9aEwc93cIrfa7y4u5Rcb4FQxweHNr6rq9zasT6V75OSFugO8WwG70vdcY4vXubwBcWpC89fl2eYwWe7f723xWtgl7a83yNjBugXLLRP/mhEgUO/v77B0c0u91sM6yt3vF4e6MYW8XtCTPUvrdLkmqaVxNU1VoN2CQJjjp8emm12xqg9IavUwoy5KmqrCNxQoLQiKURoswMwyaO41PMpqiVd5/i/AGJmcw3YV+D9K4MBhlBe8m2smUxNqGVTFnNjvnxcljTo6fMJtfslrNaZoK5x3OWhpjqOuasgzAeFUUFMWKopizKic4Z+h2+kxnl4xGexwcPEQpxXC4S5JkoUN7BeBtlWHXmOLXgOI12+e3/3UX/yaGdxunIy82XWMrp1jfIkTQ6jcscCtrF46gSHLg9YZJtDEvpu1mZQTA6+m+vA6ybPz+Nc6OQLyVPEi4dtvLdhsiax2Z6/ViEJvuXkSph4/AXsrNQ9wCMDbGslqWDLsDOvmAROdolWItLBcFZVlweXlBZSxOenb0Lt5bpFQomaBlghI6qK2MQwKJTkj0K4ZUHxL8lssFl+fndNKMalUyn05YrVasrKVeLFmVDct5SbkynD465epqghCSsqrwQpCnHQTQFRm93ggvFPPFEussy7KiNh6PXMNys/XcXoY2/7r9+5uYDn27kLw+z6ZdU/g2X/q6K/1vfj/nl6eotKLnd9mRQ5bWY6zF2gRLhhIJXoSZZWCAJaJlhAlAeHPGN65I7RDaSp+c3wj7AHCGZnnO/OxT5r/+Y9wv/nO4+BLqZWBzZyeY3fdheQFXT2H1hOvjewHTL+DkS9j7AGlKdJrhpcQpidUJVqdhuQgH1QyqY6gkFFfQuQ/DRwEod3ZC51SVBA/Y7+a63xJjvHUjiBuPl9jfLXB8ExCLluK4uZ/N++t/tm9tkcktQG6T79YAtwXQa/AMQvoIZje3shDXPyel2JJKbN7bgOJN8p2U4gbzePuRsOnsvknobspwZ4es30PnGcI2dPt9RuMxnaaH0hKtNFoncWCSeAQyk2QywdQ1pm4QCJI0AyTD4ZiirLiaTJlOp9R1jbMOhEBrhdYJUgRNuPBtEmOK92+wFNKs4OIs3MGDOCq6ly3MbjXihMi4msn0jGfPPuWXv/wzfvXZT5nPrwJbHH9vu4KA91RNSJBpmprG1DTNiqpeYJ2hLOc0pubZ8y+4d/A+aZKT5z20Tl4rbdiWTLysg37Vxnz1NrBWoP11ihvc3V/58G7DGPsotvVxfIFN39hqjttlde/CXHOz/go+ibeTCPah1gUwjQChQ9+WxH5PbXMWsGaVDRvw3b7dkgxxs2va1pa0UOu+s13O3Rpat/t+CajYB7eg+BYW5mblgp/95c8Rv+V579EjpNIopUPynICiKjmfThHn56AE3X4HISBNU7KsQ5JkaJEgvADrkEKSJglpkoQVpxutrioqLk7P+PLzz+hmOa5uOD85Yb5YBIDqPEVdMasrZquCh08/4OL8nCRJKcoSoRTj8R7Dk+fc6x8w3j/AK8Xp5SVNUzOZL6isR8iclBmCcG0agimRD6fxW40f3zbSbJ/f/1t/n9FwSJ4qpAqElLGOqjZMFxWnl3OmsxVlFSQk6yRkIVgvHRBmRUIppFIIqaJ8JZIDzobEZhtYd2/txhoM2DSOdqljPcLz1Q2nbYGvOkPbDfK76UdTMyEznp65pG9ymrJCzgsUXZJkhNQDvOyBkuFciXZV1a+ZYOtbT2KxHlN8tF3bAOM2lTyqll1JMXnC5IufYH/+T+D5n4KrwkEZ4NkUzn4eEubtCnz18sG7CVw+hdkZVHNkloPOSNMEYfuU3RG2M4KsH8+nA2qwJ7C0kOxCdgDDMSQ7cHoJ7phrHdg7jFtijG/8fbPtbLfHbcu2Vz2ugegbn93elC2N7xq0tq9fZx22XSiC5KK1XvPXgLbc/swNULx5T1wDxWtXCiFCAsM7umm2T8U36dhEIth5cI+9vX06nZw01Rjr6XQyBv0ui+WKummoTINrPEorlE5IUg1JANFVWVKyQkpJ3umidIZ1UFYGIQKXVFYl1li892tZhZLhJvUiSC3qvE9dWL69gN7DYgpZDnkGUoeR/G3W/bZP3rVLtX3tPE1dcnl5zJOnv+LTz/+cX/ziT1gVi9jGQgOXUpFojZJhubNuGqyxGGswpqRuDE0DzhqWS8fh/lNOj56zu3ufe/cegO+yxbe9dKAbO53XMMVrbVmrSfYv/y5/ffu/blKMG0PkX/24YU7rZRz3Y8fQKtjWmFJsuTM6wEbmmA3n4NpxKcY6CS4C79aWct03bjXZ7ez2jYxtczzrBR6/4ULWw4OPt7OIMo54LOvPtD/CXwfGt4FNGm84PnnOo8Mj3EPQKkElljTLyDod0qJktlrRrJaUVYGxDVJJ8rxDp9Mjz7okOg0yFWPCClma0u31kFJib2h2TdUwObvgWfoFnTQD77k4P6Mori8Pe6AwFZPJhOUq9NFeQNbpcnB4n0eTKft799k/eojudJgXBVVVUjuPzLv0x/dobENjVljXYL2jNb5s2eN3Ffv3f4d/8D/4n/Lo6B79TkKSSpSWGOsoSsP5pODTZxN+fTzn+KrkfF5Sm3B0HoEXCicUxku8kKRpQpYm5KkiSxUqShqttUH6UlWsyoZV2VA3jsa4YJsbZ3A+NiLRzq6+Ahw7WmvMdpZ3s8cQN/axFX7rhowfvQ0UoO0KUUE1fc70ZMHp5SVPLy5pkjFy8JBkeEQ+OCJNdtBaoFSUAHqHdQLjJbK9x1x7XwWAvF51an2HRfuawdiCavqM5vlP4fTnG1C8/r0FVF8nayigPIXFJbaagR+SqAwlNdKl2DTDKk1YTmnYAF4HfhEQfTqEbAy5gatfBUb5rxYwjikXLaVwA8xee22NRLfB8PZrN7eFtf1DbHEtK7zN3ArhrwHhl8noYJsl8GvWYVv50YJpGTvjjXZ4W0scvkcJgRYxQzoC4paVflfh+ebW1lkv4/4PH/Kj3/59Hty7RydN0EqgnKCjFR2tOVvMefL8Oaax9LtdxsMRewd7jIZDJB5nDYVaIXwAu51uD6VT6sbhaegPgnyiMSYk5NmQkOdtXMTxIOJJ9gim3mHLK15Gpi3Ufw10qQsoVlD2Qa9FVW92EonJd1toabPYselMrQ0s7/n5U548+yXHp19yOT1DCOj3+iipsaYdalzsaCzWhCS9uq6pKktRQF2GJKIiqzk/vGQ2vaIsFoHxeMXvXs/qIzp4lSYav+U+Ebd3a1bZI7xY3wPXT1fs9L9HmChRdNCkwqGEo3CWknfX3d2WdlKxtQx5S/t8k2gn5C1ubEFsMNnfAqnEPs5v5AprcMzmzpORqZU+Jr5FoKqiHrjtJ6UEoTa8Bu2kLQ6465W3eD+1DPBL8GJ94CEpUNgboFhtBm3XYpR4/GLrd9zGNfCuAVxg25IErTwdMWBPS9Juh3zQp24ahsM+Uni0lvT6PfrdIf3egDzthN9RNeA8WZqxu7dPlnZZlbPrX1ZblhcTTq0nSVKEgMvLSxr7cssXCJQKjKvOEhKZMN7d4f7DR9R1w97+fe4fvUd/OA5JfVaSD/vs3L9PmiTs7B2wWs5YLa+YTE5w1XKduvQu267zgsYpjNc4kSCVJE0EeQqdNCFRGuMkSmV0sgWdbElZVRhraJyndoLKSUonaKK9lxNRIiAUSaLRSQJAkiQoneJEgfESJx1eu6iRb/s+H8+nQrReJq8gr2Iv3hpw3pjJtREb5zYw9pHB3uIjhAfVAuS3PJ+J8zTFgidPPuXZs4LPvvgVx08eIwb36D/8XY4+/H3e+9gz6CWkKkNpTW09lXFAmGQgJEoSJh5eBmCMxDuxHmc84d6zzmJKA/WCZn4KF0+gWbzh0TuwM6immHqKc/sk6RhQCCtQwoBZBOlEPeWllpn0YLAfrN+qCpJOBMbfTdyexvi1ABg2LWcLfX4TlrhlDLYampA+gmIflvfYJM21jhFr2cU1pjgcx4YtfvkRWGFxHSjLzesb6UWQYQSALG4wxt9fCAS74x0++u0f8MPf+20ePHif3dEO0jtsbWiKFa6qwDQsplNOnj+jrmvGwx0SITg82GfUHeDxwdbMS5wDqSTdXh8pE0RZ47ykk4XlK+tcWIp1DmsM1lqcsTjngzabYFBfW8OiKcAWgEClfXrDPZzzIZlvecYrYYxvAmucpIE5Vuq6dOdbRpuV2/aRzrUMlwckzhmsrVgsp5xdPOPZ8885O3/ObFaSppIsNXglME34/aaxCAFV1TpSGKrKUa1CFcuWJG+sYz5dUVUV1hqcjzzeFkkRDxC+Sj6xZpBftnJr99LKM9olXX8DRH8/IUjYZV8dcDAY0etYMl0xX55zOb/gpKlZfU9H9rpQhCrqY2AkNolLJ/67TAO5HlIENwd3rW+Kb0aw2lp/t9bwMoyTa3a5JV3bST8i6IWtD5pT6wJgVTFprpU/yJadjpihXbwRbGQRLWg3raUbm23aKXB7nA42AzMBd3hYV4l1sE68e2kF8i3DA01TUFVLqqpAZxqVJHQSRZpn5L0enX6fum7oDfpxpRESrUnTlDTNghTKOpo6UBZ5lnH/8D4P7r/H42efY0wNeDpJxjDrIY1jNZmhkwSpgz550B0gKo2L1o8CQaIzRsMRnU5O3slJsgQv4cH77yN1yu7+PfYPjtBpivMCmWlQ0B30sPePaFYF8/mM6fSS3skXJMe/ZjGf0DQNKx9KQr+LiaixjmVlWdaOJHVIDUoKMi3JE4XsK2orQWikEGjhWRSCqq5ZNZalAWFC4TNj3dr9yFiBteBdYI2VVOBD/kXZGJQyIfFUiI2aop2A0QJj+UpQ3G7qEGstLtee22iRhmST/B1mom2fKjxIH3KZbgPCuWrFsiy4urxgPnvK+ac/x88XoD+juLgiU4oPj44YiT20r8BIfOOoG48QCVJkSKmDeYCUOBQOFQCyFNG6LfxSJwXGeBAG6WqEraKN2psevQca8BXeV0hpSXRw7vKNQEsbnCnKeXi+Fiqwxf1dSHugVsGxYpEThEHvPm7RlYJXgN2WutgGv4KXkSm8EiRvTc4EbIHS6CSBj8UUHFIqSDQSFQFty5pdd6fYuFRsdMfbbPHGwWKjIRVyy7KtfbDRLMstII4Qb9GY3i5GvR3+5n/pD/jgtz7m3qP75N0etnGcTy+YXV4wuzzn6vKSy4sLLs4uaao6Lq0olEzJsx55p89qFXw2HYrecEyWZXR6PQQCZkvqxlDVNbPpHOstUiuSNCXv5EipaOqQve2cwzmL0BKdpOh8gCnD3/cffcjBwQHz2ZzzszPq1YRXapUgzFpPiyD87/Qhfbns6jcN3/pWRXZNiNAphqVgT9PULJcTLi9POT8/4eL8lNlsTlnCYuqYnk3i9d5M17zzWOOxBpwFv1W0b/PFglT3GA/36XWHKJVEVuwV1nFRFnG96t31v7clFn7zxppJFvHHbSrkEQeT7yv/PCHlkH7nfQZ79xgMNd2OZVCcMZo8oX/2mOfLGVPv3+ly7zcJQQDDH2bwXl8x6mryXFM4z6Q0pGc1n9fvdln6daFlcHPwIuSuZApSsbFF255kCRElEC37GhGqIjpLqPAQYXzEWKhqqKMQVdjINreJem36h4zJfjI4YQSWbLPQvMYmEQBLWGfHb7++ZuBjl9ku97Z655vFIdtdhOXytzuPHrhcTXjy5DPybs6D997j3oNDOllG08TEQ6UxxqESCdZRLudMLs9xTUOxfEBTrTBNTmJTlBD08i6f/OAT/u6/++/x9NknTGcTnG3Y6fbp5xmuKrFVRZpndPp9vJSUxlA1FU1ZYuoapRI6eZcf/fh3eXh0yO54hMpzOv0BCMX+4QMGwzHj8Q46zRBKYqyhrsLnfW2xVc1itWQ+n3F1/pzL0y+ZHT9hefyM6cU5J7MFF1XNwkPl/a2BZOM8y8oxLx1SORrnqGpHN9P08gypNL2eYl+kWBRCJ0wXKxZFQVo16AZU7fCVgyYUTRKA8w5jobEGbS0Q6xdIiVIKnWickKDaZOXrIQiM83Vg3JII4W8nAjvdEnmhG95mLCIg2TLvXteVa7/TE0tBxZXptzyfx88+pTEritULmtmLUAALwFT4s6e4yQk7fsmBKljOz5kvVxSVZdV4fNpDdUboLCdLEpRWWDQGHZ69Zp2cJ8K9JrEkGrr9HvsPf0Dz2/8VlpWBy8+5PpUSwCiCJhNs1V4a8DTke7C7R29nj26vh9YiFA1JNGmSIpGhwC3J9X3LfejvQdYN8kmdwPgIFj+A6k9e8V23H7fEGLc9ptj0WGvQexMcvwIMX/vc1r4IH5HxEip8MKfHI7F4b7BNjW1qVJKSyA5Sycjsyi3WGNbexGvwez2JrnWYuAmMX/Xv8Eu2PZLFZv8IXr41330IBA8O3+ff+pt/wNEHD8kHOVVVcXl+xenJCU8++4wXT75kuViwKgomiwXWGoTMkTJF6Zwk7ZIkXZyvKCpDkiX0en16/R69XhfnPFVtYTqjKAoury5x3pNmKcPRkGw0Is9zyrqmrCrqusbWDqRCaU2nN4Ben8FwxMcf/4CDgwNePHtOuVyxSPqYOjAsrw4bbsBlDcWbA2O3jRz8dvMLvEFdl0xnl5xfHHN+fsLl5TmLeUVdgJu3Pqzbx/jNrrYQil53h73dIwaDHZTU0ZHC8XJfvqUE3gK765dewyavwa9vS5xfB8ubz30fLTQjFbtk3Qd0d96nv9dhMFSI+pzd3V3GowG7L37Fs8tzHjf+eytPmwHvKfjxvuJ3frjPg6NDsrwLKmFaVJxNl9S/esLFFzMuvoc5RqIg3wbGMvoFs5FFsNXlKhnGlhaAogLjnMjwSOWmtpSxUDgoXQCntMBYhYFTtIx0ZKtVlF1ItgqMxG57G/g6NsOAh7X84lpEcG5tmzC09Tva99u/uZ0WfOEMXx4/RihBkkvuHe2RpV3AIIQiTXK8VzS2oqpWFIsFk4tTXFOzWk6p6iWduoezBqUTulmHH3z4MaPRDrPFjLJc0tQV0lp8WTG9PGc+mdDt9djZ3yPv9RCJxnlHWSxp6posy+h1u+zfO+Lw6JDeeITMuzgh6Q1GNHVNp9Ol2+2S5jk6SwPjHxPRhHU441iVFatixXx6wfzqlOnpM6bPn3Jx/JTj4xecnp1xMS+4mK84mV6yaqq3PqfGehaVY1ZavLSUxrFUhr5xeJnQySVpphlqSYPCqQSdpchlgigaRO2gchhtsKWhsRZrQ0MIzLHFGAsiWNQhBFIpklTjZACoN4mGtaQPFWsdhBa03Yd6fGBQY14M8dV2ZWMzXIjgcXYN8rqQ+Bc/orwM7lftC28Rl09/gW9WUF+AuzENr+ao5YQdWXNPl7xYnXN1ekpVNFSNR/V2SEcluRvQcQnaJRiR0KCpSRA+wcqg5fZbmiglBZ1uh3uPPkFLwReFp/yTEsqnW79Hg9wNfq65D1ZrZn7j6DMY7KL3Dhjt7tPt9UIhO++RaUKTZiiV4EQKYgD+ktCTjCH/GEb3oNONOisDew+h+n14+kvCmse7jdtLvttmfNcMcvv6zecbzLIErlWZk+s/A2ER6rco6dFSAA5TlTTlkmZ+BfWcJhvhd++hEx2BntrkIombIHbjf7wuB92yxi07LDcgevsRmMINC70t2XjHphRfGVIoRju77B3cY2//Hlk/ZzqZcGIuOD4540//4qecnnyJc3YbduG9wBhH3VjK2gR9kk7J+wOEFFgkzkuETBHOUlcN8+mcq8srri7OQSr6gyGDwZA87zAYDJFlEcpAqgSEom4cFDVShfPb7fbY2T/g8OghzsJqWbBallydF3j/NQvqvoli/TeN653Vpg8NWuFVseDs7AUvjp9wdXXBcrWiLMAVL330W4RgvHvIhx/8iKMHHzAa7oZOwV1fsNvWArf49SYoXm93g0W++Rt9zEy+RiF+r+HRQpJ3O3R3xwwOhox2MhLXQRSa8UgwHDi6zzzi2QVPK/+dyhUyYF/ABz3BJ+/1+ejjB3zw/gfsHdxDZR280PSWJfpqxpOFJ3vxKyi+22pMAKmCTAf+QEVgq0UAxcB6JaTVDXsC0+t15Lzi9okIn83azxNAqbAgXBj/XaxI13oi+8gOC7/pm1tArnyUXcTvlBHUKtYEGxAmlu18Yru7bPffAvtttjmwdxvW+aW8qDeMJfC8qeidn/D+7D180yC9A2NwxtJSMK2XVSj9bBGiwbfVwFyNt00cLyS9vIfc1YyHI4xtMHVFU6wo5wsSJKnQDEZDDg7v0x8NSfMchKcsVzRNFVbnOh06vQHdbodEyeDuoBNSLfG+i9YJSZKSZGlwG1Ii5CJ4Fwo1OOgYQ78eMNwZUN3bozi6z/L9D5lPrphcXTK5mjCZzbm8mvHsyRMeP/mS5Wr5VufTOMeiNExXhsYLpLQIanqrhkXlGXQsOknxQrMoDUXjKKynsFB5gRUKlEAlkDqBMAIjLMqHvB4ZLVPXIUApgdYaLz3Kb1bg1snIIQM8ssbh89vyszU4lgKvNg11zQZvTfDW5trxy0PvKreWPlopRTuhe7uG6uePebVSHxAJnTRnkKfs9HLc7hCJobesmJcNIkvRHUGiTXCWcY7a1QR5ZBjTQWAROBN8vBvnESoFKdEC+oMdevfuU46P4PiETZZTA+4FFEmoImReNWbXYAuUdHTzlG6WhLLQRO14r8Ng7x7To4+w5RIuHYge9B7Cg99GHL1HtrsHxuHqDibxOFfCiz7YvyrAWG4ay3VZBVxDuNfkEq9hjonewbQFONrLJ0iEINUCaz1lvaKZXUD1FDiF4ghTdPG9PlKG5IXWA7YFw9f8iCNjHEqWbsBxyw7LG6D3dYzxddD8/SHjRGmGwwGdfo/ecEh/0KOuDNZ6Ts4vOD17hr0564ydQGMdZd2wLCuKukZlOcM8o65r6rqkrBp6BmzjWMwLLi+nXF5c0lRzEJoqWrllWZdef4BXGicUQiYIoSkrC7LAYcELpErp9UfsHdzHNJ7VomQxWzGfXdJU373SNHSQwTpouZxxcvKU58+/ZDK5oCwMdcFbrZv3+vv8+Md/kx/96Pd5+PBDRqMdhNy0T+AlL+ONVOJVQN6/EjDz8tZb4Po6EPmuI0xsDVlP0dvpMjgYMdrvk5OTVBozFgzHjt5IkvY+o/fknM+nltk7BvUSuAd8MoAfvdfl0QcPePD+R+zdf8B4d5/uYITMejiZIJcFZnhF//kclT2DYvpuD+4VkSrIY70bJaO2F9a64lY2IfwmG52txDntN9XjsgiMExkkGs4AJgJjEYCx0/EhN5rgFgS3hT8EG1DctrG2y0ewTgJs80hbX90197YFnCNB+JLcwsXPrdOfbhEcT6uSalVAbaFx2Kahrk1MVIqaDgdaCrJUkqYCrRyCWLPaGrzUCBQKQaZTUp0iJFhTUwiNMh7TK/HWMR7vsL93wGg8Js9zhBSUdYExFUmWkeUZQmq8lAjvkM4ivSJVMtqWyaCIjbZlYaXS0eqghQzjgUpTOt0cNx5i7x1imwbT1NRVTVWWLGdz5lcTjp8/49mTL1kt3g5wOOtYVoZpUbOyHusNxlR0EsF0aRjmFVmWo1TCojTMSsNk1TApDKWxGCdpvEAoiU4JwFcQpToSpXVcEZY4AsMppSRJVGjvxGIVzm6B48gYCxXpLLleWWu38QBKxGKCbWO8AZw9sAV529WMjb4nPLX3we3E60UuMhkw6O/Q7/UYjQZknYzR7ojZfMVsWdJ4iZMJjsAIG2fAWrxxOOtCCfE2+a4oqOZzytpA2kWkHYQSaCXo9ntM9newF11ottfxCvDFV4yLBuoJylV0E0E3UeAUXoSKe2rYxR09RJoVV85gpYLuGA4/Ijn6AYOj9+kO9/HGYuuKupdQUFLo7ndiTPHuS0ILbop4oRVo3mCP1/IEL5HIdSfY6omVFGjpsE2NWVxC9Qw4Bi6BPsKZoPyJDfqaHGKrxPM2wxsOy2+yqtds8Y3kvS0gLNtbTARu+5rG+HuKTKdheS0NnXKW5PS6PXbGu+zuHZCkXay57iUsZIfuYJ/dnR329vcZ7+wy2tmhOxiQ5BmL+YzL83NMXTO5moZypcsCIQTD4QgpHEqndAdDsiynqWuWywJjLVJqdBIE/klakiQpJiblGWupG0NdGxCKNOuEwc98B8L6m51Wy2SZhqapmM2uOD17zsnpcxaLKU3jsF+l8PiaEFLz8NFH/PjHv8ejR+8z7I9Ikiw4eGwv5bUs8Zo9fn3yHVvbXVd13Fj7Ywtc+yAj8Tfe/65CIRl1cnb3+xzcH7F/f4edgyFdNSC3PcwqYznVdAY6ZJfLBvP5JY/nYeHsXagWNPBAwu8dwG9/0uGT3/ot9h/8kNHhh/RGB+S9IUmnh0g7WBRFuiBpEsiHWHk7Xee3CUGoFNcyxuvkOc+6rxV+I6kQYdV5o++90e1qDXkagHaqAsYTMeHORmDcaDAaGsHaP5koq1jTIWKTfCe2WOr2O30LFiJou8Y8s8EWXmzs5QRRFx3311bFa6UbtzXLS4Bxp0s364REuqrGNA3WGLxXUSnqsCYW8zHRp9zUNG1JZ9sEr11C1TDb2CDZEoGNs8aC82iV0u30yLMOWukA5kxI3sU4hJeBVfOxrIoVOFvhqiJcMyWQWqHSLirrIKTAW0ldN6xWc+qqwNlw0TvdAZ3uEK01UihkqknSPJw2TyhOtFdSHS7Zv3ePB48eUVVvV3bXuwCMVWFQjadxlto2pNKzKg3TtCZLKpTSFCbqkZuGRW2obWhUzosgVxMCJYNUQiLQQpIkCYlOUEpGa1AbJiUmThWEDMUqrMI5t2Z8xXoVuqXaNv1i2+cGKcUWgef9lgTtFcB4/ddWC/Ygvduya3tXHW1KOnzI7s49Bt0evU5OnmfkeR5WEtIVtfVYFNaLcE6tp2oslTHUcYW4cY7GelTjMRiaesl8vmDVeKwIYHpydhoS9r91SESny6ibsteBbh4cq4SUSC2xacLYjRmLIzJzxZkokN0R3XtH9Pd3GOxk5D2JcAJvoMo7TOouT3XyneTf3b7GGG6wwWyB4u33rrPEG9AZZoTKb7kPCoWULlqkeVxd4IoT4AtgRkj9d0gVwKqzBisUWmuUUhEMy+ss7w0rNgkoES7cpjBIC3Y3zhMtMG6dMLblFvI2nOffMLpJRjfNSGRgLXDQ7fR57/0P+Bt/Y8HTx1/ysz//Y6xZEXqLlMHuIx49eo/D/X0eHj3ggw8/5MH7jxju7tIZ9Dg/O0NLzYunz3j+9AXzyQTrLYPhiJ2dMVqFxSLjPMY6ppMZ09mCrNsj6XSCz2+SkmU5eZpjGkNZVRRlxcXFFd3uKXVR0hjLYj7H2rdbyvsmsdHzxtWEuFjbNDWL5Zyrq3NOz15wfn7MslhgvX+rGWqeD/j444/5rd/6hIODfZSSa9DrtqzY1g4T6474NXrgbVy7ZoP91nv+5df81rLgq+zfvoPIRcrB3pCjhzs8eG+Xvfu7jHdH9DPoygNMOWI2ydFdSdEsWK0uMfWK7LhksoC5gSv/7Z2wXxUC6APv5/Dj9+BHP0r48KP3efTBJ/T3f0A2+oC0t0eS91BJDioNoLBSGLWi8prKfg+zC0J19CS5zj144sAdgel6FmE3bKtXEWwSNLxSBO1x3oFBJwBkbzZg1RJY4kZDpSMD3YCNdO921T0lAuMs2YDy1ltZuA3Ybu8j76NjRXyvBclKBPba+2gbFw/es6U73vrdbxsCONQZn7z/A/b2D3FOUJYVjWkisLJh6dc5atNQVCWzxQIvBcuipKxruk2DbQxSGIQQNFXDsgge5jaWi7Z1iW1qBJ4sy5BCUJVVKBSEw7lodQkknS5p15OkGWmiEWaFXVxg6gUWh9cJ3d1DeruHgT3VitVsyuPHv+b87ISqrJBCcv/oPe4/fI8874QiGfGMSSHDOCnDCmqn3yPJM8b7ey95L3/bcM6xrC2ibEA5jHfUzqNwlJVlKiqSeCyVE1QWSusovAsTubVUogWe4TgTKUmUIk00aZagdMjPsNYh6xovZGivQoaiFiqc01ZOIdcr1K0wkzVjvGkMMflObvevkVWlfe06MA6vXG+JIkpuNnu57X5CQHbEzv0fsr/3gEFvQCfVNC78nixNyLIU5cMN7qVCCIUjrAw35vqjNo55lpJKjzMNZyfHPH9+RrVc4IoFfnEC85NosfQtQvfoH73Ho/0RR13o5DXeerRWocgHgjLJmadDemaXLnuIpEtvJ6U7cnS7K/JcBq9wL1glnnzheaFvL1n0Kw//Vvby2qId8f2XADEbMI3YKtccE+f8NjO7SSRqqhXl/Ipi9hjcL4BPCQ2vCxSYYspq1kPnjjSPTIYKwCf4F7eWa1vlndede7R/25JFvMqFQrbAeP1MLDEdLFG+L2BsrQ1sR1lhagMW8jRnb3efjz76mH/7b/8BQmguL84piwW9wQ6HDx9y/94h9/b2uH9wjwcPHnDv4B7DvR2yXpembjjvnOK9ZzafM58vGO2M2NkdMeh16OYZdVMzX644v7jk4vyCZVnSG47oDoPmOE2TkDmcJqhS433JarXixfNn1GUFjWG1mDGfnPDdmHdf77BaAFqUBVdXF5xfnHBxccJkesGqWGHCnOvNQij29g95/8P3ee/99xiPR4HlWXfa7ZFsmNw1S/GKMtEv/5LrwPkm67wGxXEAcK8D299BBM2aZnevy73DITsHQwbjAf0soa8Fps5IOh5LyWxyzOzqGcXiknJZ4kxYtZub2wHGKfC+hh89hB98IvjgBzvcf+8Rew8fkY0foQYPkdkOKs0RMgngqDZUbsa8MMxXNVXzPXhSiOBWqDVrJnZbSukJz8EGYgskb/ulxf5OS8gS6HVg0IVeHgb1xEOuoDRQOSgic2zsFrPb7lcS8gZiH9jqjQXRtm2rfxWOjR0b1xc32kP2hAHJtvuJwLlliuMK9trG7W0jAQ46XXZ3d8iyhKouML6i8TUOgZBBZG2soWpqZvM5pxeXFE3DdDZntSrodUqavA4yPelwrsHZBts0NLXFGgPOIYQgzTKUCsyn9566roLFpTPhvpcSKzOscnjhURLcYsny+DHl5JjaO1yaMixLjJB0rCVtaqaXF5w+f86zZ09Yzhd4B1XRgJN0er1Q0U/KCDQFSmvSVJN3O3R6HTqdPgOdhN/7FuG9p3IOZaKVpfc0LtRTNd6ihUN6iwAaLzBOhOp8wuOFREp/3RpVBvdhrSSJVmgtUSq83lZpDY5RxL8jCeaCkwXehXE/TgtCcl1cyriRARqW+CPPK2Lf6rf7VtgS/6zjGmMMCOc3Mp93QkIMEJ0HpL1dkqyH1hq8o64qlsuC+WLBfFmC1Og8RycZQim0DDIcpT2JBes8xnga46Icy7JaFkjbUF2cY58/g9UJuAXBnPLbjc26v897773Po90BB5kh1Ss8Bq0UeZIisBTpiiQvWXUtRV/iqEn8FUlZAhfQ9Ei7Q7IkBzsnMxcI93arGt/4+G9lLy0wvuZPHN+LyyJtBkY7I9zMCTf/rZudjEN+3NY7izMVi6vnNPM/A/+XwFM2YnABnONWv6QoV8jhe7AT1ETByULhpSRRKSrZVNBplwCViL7EkmtgeA2OYa07bh9qDYq/CylFHOm+IiblgufPjzk/uWC4c0BvsINOEpSwjEc7/P7v/w0ODw+pqxpnbMxQl3SyjGG3z3g4ZDQekWUpzjrKomS1XLJaFTSNQSeawc6IB+895PDwAIzBVgWmaZBCYhrDdDrj8vISeXFB1u2xf++Q/YODIGsJmXd4D8v5jC8Xc56Jn4OpMHVBUc3e0bm7Hs61N3hobUKEScViPuPk+CnHx0+YTM5YLmcUy6gvfqP+TdAb7vH+Rx/x8OEj9vZ3yTt5YDqcXet+oe2EIyjeXsJz16Gx3L6vbsS1hJOw0w0zHZmgjafxdx/GVzhWZF3PcJzRH6TknVBCVyuFFD26vR2Gw31Goz36vRF17XlxDs9WQQt6W8OMI0gS+j3odDRJ2kMkfVwygHSIzMfIbAhKBzbKempTM5nNeXF8wunpGVX93XTQL4UiJNJFqti1Lg42Mqzx8rZNpfUVbuUMSkIngUEO4xzGCQwT6CXhnIxSqIZwOYeLOTQNEG0IXWRxRZRUBOkZiC3AK9rvg7XdlogAWrsg0TCR/XVy63e5rcVFB9KGR8smry3d2Pgjv210gUw5qmbGYpWAKFBJgpMOqVKSRCG1xtiGsio5u7zk8XRBr2x4/+yCowcLuvmKPFvFAlAOJSW9TkqaJDSNx1kXltdxEdRtxh7vHFZKnAuMHlKjsw467aJ0ghBQrhacP/k10xefUePxeYdZZRhYT3c0I+sOmS8XFMsFTVkxn82oigohEprK0+n1QjGMSNpIKVFakXcydg/32RcHdIQEmbx1gSqPwAiBIRQm2qzOSZwUQbe+nqQTJmlAIjxetDLHMNMSW7k+IWcobGy9w9gGZz2NdSFprK5AJggdS3qruDrsHRK3JtfYAsZrbfHNEDfaVtudirYNvgoYxw/CFiiOT7fa3XaAA7zrMF1WTBYLlmXBfDHn5OyCF6fnnF9NmcyXZJ0ug/EuebcXJmQ6DedVrP29cF7gHCSJZjAYsbtTMxwOSdIEW83BtRhLE+iEdi2pHUMFqOxG5TpAJOw+ep8ffvgeR+MOA5bIpsTZBulAe4U1JeXkhPnFCcXkmHpxSlkWNC54/Hvp6ORdDvYeMejvsVwsuXz2DFtNb/OEvjbeETBmCwyHaGdWrT53/apoQfEGkPobKxbOGly9wqyegP/XwAuuN9+CoDMGnMIVI5puH+Et1tQkKkEnCTqTqDRFy1AFp5VRKBFt4LYY41YisQHH28B4U/Fu267tXUkpFBpH85UMYu0aXpy84MWzZ+zcO2S0dw8ZMxh6vT4//OQTfvhbn5BnGWmSMp1OObs4p6lqEqnIs5S8E5bdmqahLkvmszmL+RxjDHm3S5ImHB4dcf/wHqvphNlFhRISrUIzqquKankFy5piGr57OBqDlKgkQWoVbHeWU4yZ8V2Vd9yO4JIrgs8vMnhkGsN8PuH4+DEnJ0+YTi8oViWrZSgF/yYhdcqDh+/x8cc/4P6DI4ajoC2+lvCxOahrlmqtXuLm9fZbd86117eTRLZfY2Nf1DJz31c0vmJVXiFkRacryDuSPFMkWiFRSJ+SZ326nSH97oBO1qGpHOcV3DQCuo3IkgCM87yLTvvIpIeTXbzuQtJDJl2QCuscja1YFiXnF5c8ffqMs5NjjPnuHSkgsq6xiJeNtmrGRelDm4BHlCUINo4SBIY0FdBNYJTDMIWeig8dqq6rbtgfAoo62LfJVlMciQQpI3jd5j9EYHVbS7dWH9zWVZAyst0iJO3Zdtv2uCOIlj7sW/ktm7f4Ox2sK+q9wq72W0cPkKJhsbxAqoa67gb7MB0mS91uRiIzjGkoq4rL2YyFs9RVyeXVhOWyoIr+w7UGLwwqSVEqQUqNkgLvBBIX2NBIogQZgMWJQMNLFyQASI3UKVJnSCVBWExdsby64Or4ORUOl+WsZMZC5vRWNZ1BgY3V89I0Q0vNyhYs5kukOCPvLEiTFKVUXC0IGs9Ot4POU/qjITrroBK/EY2/acQ2sC7rvQakAqQKrH9c0fLeI3yAaGpNpgWG3kkJ0R1KylBzoD0262xo88YFzXdd0VRVsLuVSdBhy8B+S2xgqGMOEULgIiv+bQmCdjXGvbYTbQk/sdHZvwNQDGMwmuVixcVsxnSx4Gqa8+z5Cz7/4gknZ5dcTWf0hiP27y3pj0ZknS5pmqG0CpptlSB1AoTzLWQoWtPt9ej3uuRZQulXhJ7XEkBxH9iLv3MOooF8BLtjuDyB4mxzJtIO+/cOONgf0c8Eqp5FfVSD84ZaWspizuXpY05Pn3F5PmF+MWU6u2I5v8BUi4Db8ozF0cfs7jxgtaq4PDnFm3cvt4TbdKVYL9VtscZxih8gyLYkYYs5bhnZeCMgN8A4JHM4qnJFNTnFm8+AU17mC+r46AIjdN5BCsfi6hxf1+jugN5whFYKJXpoFZhLEW9K1TpUyBYMbwFktoFvyzJvgWS5pT9+R35tluYVv/lmeM6vXvDZpz+jvzOiNxyzs78fjlurAIizlG6vS55loBRFUzOfzqjLCmstKkkRWlM3DcvliquzCyYXV1hj2ds/oDcYkPf7NN5jXLA3S9KMYZqzu1Owu7PH9Ooc06zAGVbTc5z/hDzv4pVmWRbYpo6eh68DxW0vaL/Bb/724ZwBJMIrNuWfV0ym5xyfPub8/CnL5YSydKEa5huumHd7Iz786Ad89PEnjHf2UDpFxOQQ/xVSiRuEw1fGGkO/buNtWcX3DI0rDE/Pn3B5fkxdzMEOSWSoxYT1eGMQxqCMRTYG0TT4xgaXgluOHBgPYDRO6Q9HdLsjkqSLQOGdiCgsoEprLNPJnOPnL3j65Rc8+fxTzs9efK3M5V2FV9EtIoJLQyzlzEaigA+FP7RmXf1OCegq6GvYSWFHh95SGHA1+CTsFxcAtyRMHjouODIJDakLlm4+Wrq1rLCT8XRxnTG+vtwRcY+IxUAi0+3d5jNrQK8iIy2AJhxPU4WkeAuwXZr6LaIGposCdfyM+eJyXWkuzTv0+rtI1UPqPtZZGmuojIka7cBWeoJVWJIo6qpgsbwKmk6ZImQGIkeiUdKjZHtWNidlXSLCx0pvwqJlRaZLtEyQSpB1OgwP7tNUc6bFkpVzVA58sUJ2a7I+9PpjdnYPePjIMJlMWMwXIDRSJdHaTSPEBvUKIUiyhG63R6LTmIfTprq/RYgorVGxvIWPhTi2xslwlUOnJa5lc8YBX8XlBaHXMhwvQjJYmzzsPFhCjoa1DaYpg6WbCn75QmVIHZL2pBfIax6DLTD+dj+tBcbtJPPVPz/+vkgI3h4wFsCAcOcUUMwwE8nVxSUnJ+dkyvH46Qu+fPKck9NzJpMZ3cGMxbKkPxpF1jgPkkal0GlGkuZInaxBstIJq/kC4WuyTMSkgPYHtPgqh84edBJErhC5Q6gae3V643gdi9WUk+Nn1FXNojcEPC5eq6ZZMJud8fTZZxyfPGE+K1jNSprpBb5Yxuo+YCU8OZnwov8prvHYouQlP+d3FLfHGN8ExW0ziVY9m2S1bWcI1ozsRvcrcNLjhUcID9bRVAt8cQI84fVIRQC7kO2R9TqAwS3OwF7R1AeUWtLv9cOSnlYh8cG3FfDi/bjOIXzZhk2IjQOFlC2Y3jDIgYF+Vxrjb3aHzcsZn376lwx2dhnt3cNBSHzrdJAilMgSURgoZJgpWudZrApwDpVmCKVZLpdMJ1Mml1fMJzN0mrJ3cMjO3i5eQN3UNDH7Oc0ykqxDWTXs7+1z8vwJsyaw96ZaUDc1XT0iSxJkkkbniVdfwzwbMBwe4kXOZHJMU5/f0vnbhHVB4xb8PoPOb7laMJmcc3r2mPOL5yyXC5qCtxC0Csa7B3z44Q/44MOPGY13g5wnDgqvXcZjPWzw6mv+Mri9phm+iaoFsZN+/R6/yzhfXHB28pxiPsHt7aKFCcygcfi6gLqEqlg/XNXcCjN4MwYKxrsw2gnFZjrdIYnOEIhgZ9QYnDJIoTG1YTadcvziBc8ef8nTLz9j/pZ+r28TXoGRseQyG2DsI5PaTielDG4TQoZCHFpAN0olRgr6ArSN1e58cLpoZRdt1bkkgmcbZRDGhe1dBMihGlm4TdbT2Ng8r4GCdniI1dzbYiC+1R1HzALhWRNBuo9V8BowCyiW4ffofI1v3ioaYNZ47EXBfFGQ5YJON6HfH2KdoNM9IOs0GGtoTCg40d5t1oEQkjRNyFLNZDrl4uoU40DIFJX0SJIhic7RyiOlDzZizq3tRNsEOIugNhaLIIuSgFTnSJIAjO8d0diG8uqc1WqBlZraBDmW0imjnT0ODu/R7XQpi4LVasWqKCmqOkw2lELQTpxCUSGlJIPhkCzNAghviam3CCGCxZdSwa8WCAC4JZVkTFWLhJfwsNb6ChBKIFpgfO0CeywBRK9X3CIGtbahqUq8syRKItI02A8mCuHbleggc9kk4H37+HrGOP6w6HV8fQr0ttEWpwdYhnnFUoU6BafnSN/w+OkxXz55zunJGbPJlKzTZTqd0R+O6PT6oRiMUmidkHc6pHmHJM3DCkeSopOEsqxwdUGW+NAZXAsHqoPYuU/vwRGD8YC6OGF59ZibecjeWaaTC548+YLFomQ43AsiDFtTFjOWiwsuzp/y7MkvWJ0e42sX9VX2pa/0q5pmdfs44Ovi9uzatiUULaiMy2AhUW0LcMoNWyzjv9fm263hZayr60yNW12CfwJcvOYA+sAnJP1P0L09mqbCLOdgzwjLAWPSPCXNdJg9EsRrLQgOcojIbG8d29qdoj1Wuc0ct3ZvYp0s8H0m3wE4HC/OntL56Z/ghGL/3pekWYc0D96YaZahlUYpiTGWuqkxjcE4Q5JlJEkKHlbFiqIskTphvL9H1ukw3tlhMBjinMU0Gu0MtYhFJKRG6SRkkG79fNcs+fXP/5zBvYd0x2Ouri5xxau1xEIo7t17n8H4HpPp6p1pYZ1zMZPYYZ1jtZpxdXXC5eUxV5dnzGYTlsvmWyfhbkeS9zl68JCjBw/Z379HnudrqYTbcp/4trFxrgif99sDxbXXNn9//3B4E6WpODt+wvT0GQe7fRikeKEwZY0pFjTFhOX0mPnVKZOLcy4n9a0X+ZDATg92D1OGB0M6gxydC5AVtplSr85xLkHnNbrTD+XNyyWmKqhWc1aLeXAq+Z7Cy+gW4QOwayJ4hDAm2/jQAuoWZEZSzvhg1bsoIstsoG6g34cHhzAchm2NC8l3zgTiKIuJdc5HUE54NERpRUTFLbvv237ft6t+rIFMS5LEIljrbX1E9d6BqaEooVhBMQ+AuCihceH7hgay7O3PZR/oK8hSQaJD1S8lN25GEFaUGlNT1RVNLCwUTycIgdYJWmuquuDq6hzjBEnaI+9IlOwhk3ZoDD825DjI9UqpFZLgf9GqNy3O1XincF6wampeTFe8uFhwOV9RGMPB/oDDB++xd/89dvcfsrO3T28wJs+zUAK426NT11R1HSZJW/kvoX9wCASdPCdLA2DaVGt785BK0clSsjTBrhGkj3n2Lcm0WVyWAM4jnNwsJcjQQLz0xJT59dYCj9xaJmu8xZuKYnmFEgrXlAgJea9HnnbBt2O3i9hiC6fwipHaf3Vv+Y2B8doS7tucva/Z7zrDwoFIScZDunnKYjbjeb3g2ZPHPPvi11ydPscWFyylZvK8h8576E6PvDugNxjS7w9CUnyeI1WCUAqVZmRxjJrP5mAqZNrBFT1Cq8xAHSL3H9E/us/uvft0Bz0uTlc0JgGfsU3pYA2Lq0tePH9CU7uQhOqCfGK5vGI+O2d68Yzi+AWU348k7evinblSCCnXnWB7072uaIaMVebC/xw4g/d1EGE3Jb4+Bx7DK4dJDfwOyeDf5uiD36GxlpMnn+NWz4BzoEEo6A/6dLp59NG04ZZrmWrZstvXQfE2i70BxnLLv3iTfBe0UN8vMAYobM2nX/6Cq9mC3mAPmWQbY3QpsLXBGkOv02c83mE4HtHfGTHa2yFLEvCeqq6pm4a81+X+oBcq2g1HdLKcsCSS0WhJnSrquqGsTOwExI3ewFNcPqGYXZLsPcQUC7Cvhjp5d4/77/8AIVJOz2dY847qnrXWZcLgnGWxvOLs/BnnF8dMppfMZyuKBfg3vF+F0hwcHvHee+9z//59xuMxaZKEFYr16fkmPeZWR7M+dA9+i23eTtTzW6D4JlCO//p+W2ZInLk4e87kxWPKe0P8OMNKqFcrytWMcnHF5PIxl+fPuDg943xhb8WFYjsyATt7MD7sMtgfkA0SZGrwLDDNBcYIqqVB5yvSwQ61F/imQPgG11QY830Vqya0HxmWGBsXmN4mJsTJSIYZGUBvI4MEYp2YZ0LvqWwAmRRQFLAqYG93wxi3muXax/LnMngcJ7HxWB9BuQj7b6vueQO125JH+AhnNjghsGhR2qHZyD5c5EGcBVtDNYf5FCZTWFVcY6SMB1u+vZRCArvdjFEnSCGkiomJnTx6DWcoKXCuoWlKqqqgbitutoBehKITWivqumIyucR5SbcHWvdQUpAkQUohor2Gd9srkZvripTRRzf0TYgGj2S2WvH58QWfPzlmWS5IsoQHvREffPxDDo7epz8+IM2DO4ETAqkFqdSoPKfT3veizZnZYnE90R9YRfeNtwfGQmm6nUDAVHGWJFoZQ7uCvM2hxUlRW5zGt35/ccU4pCyCj45PYcwNfhQSh6TB2ZLV7ALhPabqkSSK8d4+ecIGpIoAjhEb54n1wnYbLd7m9b1z64zy1RExgBe3CIw9QcqgwiPvs//gHnvjHuVqwex8zvPHX3L14nNsdUrQH4Gz59SlpJ4oViJj2rlHf+eQLM9QSYL3oSCKzjLyXh+dZljrsGWJ6nVxxSNQKao7Jh3vM7h3yHj/Hv3RDkKnOHJspcF2CbVDI5vkHHY64VIlCDQCTVmtmEzPWM4uqBfnuOklVL+ZoBhutcDHhjFeZ5TCWmrQJq+9Dhh7Qva8MTVVOcNVS4T0SFeBPSUU8nhVSztA5b/H+N7HDMd7XF5d4KoJcAasgBSZhApAaaoRODxmDXDXFswyzi7XrLB8GRy3/yYk7W2yZuUWMP7+o3IVzy++QF48j0tHYcLhcWsf20F6yEcffBKse3rdwCDXDVXTIJQKNj+JRqU6mKcbQ2EWwdEDT13XlFVNWVasiorVakVd18Fg/lp4MAuas8dh9HzlNZTk/TFpp4c1QULzrjSc3oIXDiEaalMym59zevqUi8tjlss5RWGDjOINv77b6/H+Bx/w6NEjxuMdsjwPv+ca2/u6g9v63W0ynX/Fe4Tju+ZesQWSr2229aUbu7bvJzxwfnnCk0//hJ1+TVdNSFJJ3ZTMZhMuzk559uWv+cWf/5Sf/fKUZ83t893Gw7SEq2XJ5XyKVRajLHkGaRqWeHE11iyxbkqDxlUrlFsi3OoV7fvVIdhUabvNCBrXCH7jw/uY1NZSsQQ2t4j64XoBooClg4ELTGkP0Gpj1zbowqC3cX1YVuGzxgRtr3cBr+E3vy0hssFRdyxdAOrWBTbaugCWnQcjAph2MYXAiwCIjQnHVy+gWobHcgWzEgr78vXPBAz70Ovx1uF9SOKyzkVJnY9uCRJIkOKCurHBs3h1dW1StJ5wChBSkSYZeacLQtPt9cjzDKVASk+WJSSJoGkUTRNW66w1eCSJFkitQWhSrcnzlE6ekmgd5TECdIruDhj0evSGffbuP2T34JDhzg55r49KMtaibhWMoPVa3tgecQSEiHXim/A+2IutUSp8A+T32pBKkacJSRLqiK/lkDdKwbX8r0DElYUoMpMb/z+PxIT0OZDBrUIKgRYOJRzCeagdvl5SzM/xxuKqLv1eB+kqMs0Gl0TWek0PrJ1it37r9a71tUI2f+2cvmqjOHu91YgFX+hBMqRzsM/9gyG5FkyuLjg7ecbJ00+x1TEv5+648PANdlUza+r4Wg3e4J1FSI3QOVLHiotIbBMlLlmXbJjT6SqUL2lWEyol8CqlWK3CshQdYAhkIDVkHUR/iMpHGJsymxVMJicsTn6NX56DKW8ne/Ydxq0W+HjZ91esl1HkNTAc549rtjh4u1rTUC6X2Mk5VFcgJVZZ8MeE+lc3Iwf5I0b3fovh+BBEQrEqwFwSXCpSoItKO+R5itbBrB3vNybn2zNY8TIolnLz22QsKx2NviKwvl5qGiHe+ep1Gp+/mrtyOMrXHktl50ilyDoddNQWWx/aa7/XpzfoI1QovblaLJheXVCtCtIsRUnBcjlnvphSlRVl2TCZXLFcLjDuNbNA9xUMsAxJIk1V4VzQh0ml3onOPhQJcHhvqao5k+kJx6dfcnV1QlWtMDVvVf55vLPHhx9+xKNH79Hv94Lnpo++mt+yYbyu+l0gjl1krjbFQrxreeL1lutttvf5fcbVYs4v/uJfkslTqL6gO+iA9pxenPPp51/y85895ee/mHNW+Le5DC+F0gGsNcDjc9j/rEQmz7l3v8N+s8N4bBhrSDBIV2CqCXXTofEprhZoP0X5VeSwvjokQZvbI4DHW4vIrpr4sPEZH3yJxVainbWwqKGcwewUminkFkYSPt6D+wcw6gVgfG8PHj2EnV1Ah32eXYK7gFUJy0UAx8pH5574Xe3qdxq1BTqyzY2BqgJbBTnEchUY6EaCT0ClYfwUIuiHZxcwvYBlEbyTb06fM6CTQrcDwwGMdyB9SymFA6ZFRVmuLZlD4p8uySdL+v0FxaqiP5pRGsNqucRsV+b0oYiEcw4hFf3hmPtHHyClIsv7aN0uLxvyToder4MxhqqqWC4WLBYFwgsyqUmzFJFmyDQj72R0sizAOWfI8g737h+SdDI6gy7D3R0efvgheX+A1EmQHuKQSiJu6me3cZ/fFA7ChUl0sJhraVtPWyHuTUMIQapDIQ6Ew7WgWMDmirYpajbYB+KDnZ2QCKVBarxQQWKFxHoVLNakRAlBIkB7D9bhRY2rZpjpKaYx+KpPvTNG+JJEuZhH065my63zsclvige+zUC8HhTDV/pntwmHtzcd1mz0xTl0Dxk+esDDo132R11sNef87JinX35KtXzB17s81fjmjIActsYEFzirTc/Wov8cljUul9SuYDkRiCSjO9hBZF1Wk1XQZskc2AnVeXbG5Lu79Ed75J0+tqxYLKcsTs7xk+M3X4r9juNWgLGU7fLEDUYYQplLboDmdRU61qJ/Zw1NXWNXCygvwJ+FDAzjCOzvqy76A5L+J+zsP6TbHeFxmLoCPyXokTtAgjML6npFlmiE0EilAzPcssNbwL3VPLegeM0kX9NCE+zFt+UVkTF+eQH89mNIaMSXb7EPIQRpJyfJQyUUYxyNtZiYbS2lpK4q5vMZV+dnnL04ploVDIYDsk7OqliyXM6pG4MxluVqyXIxw5hvL86VMfmjaWq8Dwb0Smc486rJ0NuFh1AOtVmxWF5xNTnh7Pwpk+k5VVVi3+K+lUqxd3DAe++9z73DQ/K8A2tw+y1ahY/ds7/+WnjaFAO5Zsnmti3brtMf3vu1MuN7xsU4C2cvzvl1OsesHpP3M0gEp5M5nz6e8svPLKffQEXTlnC3/uvPrE5hby9hPmlYFVAZuLiC41MDeg4apE7IsgQvG7Rd4q2mMQmNS7AuR9QV0q2Q3wAYCwIg7rKZxN5GeCIbazdWbe2lJQJWGcfmxgeAWsZH3YRkOZWATSDrwe4e3BvBwS4c7ASNse5EnTKwqmG+2mh+W2Cs4ve046cnAssIkH3UJ5sGRBksD8smMMBWgkhYSxechfll+J6SzbVUBLDfS2FnCMMdGIwFWceTpCGJ721jRZSjbB1/UkNSW1bFIpa4v8IgWNYNjdmMQYIwuXbRPqPbG7J/EDzz0zTDOSjLBik9SRp8g42RCOEpS4l1JlhGCtBaolONzhPyLCfP8uCEUQU7wcPD+4z3dxnsjBns7jAYj0nzHI+naYKPr7UWY22wgYsFRaQQKKXRiY65JRopQ3WY4Ge7YUBvxVBJCLSUpDqQRDZqhTfmvpuQXiC9DbUGfBw/dUgId0LhhAx+t0TvYSFRwpMikNZhTQXNArO6wk3PoCypsy6r8QhTzpG+Rqs87FPE/ldsH+qWnEWIiApvlky6Hq7Fvde2ud4BOb/lLfeWIfUHIBRedRDpgPHDB3z4wQPGPYWm5vLqlBePP2M5+SpTgptxHRS/OtqzsILmnGYmaITEVmE8LtM+IhngTAJNEvRPaEg6oazzcJ/R7kM6vTHLyRVl6UIG7/c9+HyLuB1g3CopbsokEJsJY/xfsGULYFgQHR6EpPEOWxWwmm8BW0Hooiev+NYMmX7EYHxImnaRUgcrLucIXewVQWN8jll2OX2xi7/3iOFwD6WCT2Sob9/KPuQ12zUZ2e62qIIQbLHeWwwzW2B/vXTz7kICXZlQudag6U1C0Ovv0N0ZIdKUsqpxYolOE9K8gemM5XzJyfOnfPHZp1yenlIsZmitOXx4xM7BAdYHII2U6EzivWMxu6R5A22wQILzmKYCqRE6QaYDKC+57WmGEAJrDYvVjMurEy4uj7m8OmaxvKJqzFux1Hmvz+HhAw6jtljpUDUL+La4eNPfbgHgbcC8hsDRMHZbSvESK+y39uPdO5OpfFVkwFjDUAfQ8+xpxYsXJxgCOClMYDhnX9N8kjSwhr0MUhmTtGJilicyqjYUpthwVDDoa7raMrtwdHVgH52BagWrec0qL1ilc9ANmdNgFaYRGKsxroNdWYRdoeXXnzsLVPFx22H9RgfcWpoFyVoAi95vPIJJIB3AWIAfBXeKvoJsB/wAVAfyPOiLpQVh498Z7IwDmF2UcDGDMlql2bAquynqtQWOfZRQeBdAbT9ViL5DKQ+LoB1eLQNQt8TEPYKcQgNDESzmOjn0BoL+UNAfSXoj0LlBZ2EwuVG07I0j1i6hYSMPMQTo0FiormrSWY1Q4fXKtT/ZI3zIFPQ+aGGTrMOAwNpqrTHG4n0ZiZ9YBt61/s7x7o3JeNZaZHSs8M6FIcwFJXanM+TwfoZQnrzfJet30XmOkiJYX60M8/mcy8tLptMpq8WSqq5QSpEkCf1+n+FwxGi0w2i0Q68/RCdp8LEVAiv8uurs7WBjj1YCqWSw8ZPiFSyrDyIJJ5Deo5xFyeCvjApssZcBFDuCLCOQURbtHN5WmGpKPTulmp7iZidQLEBqZoMBy+kFTbUiSRPSJIvE8Eb43q5Wt+Tdq1Z5X2pfYtufud3mRlEl2mTB2wHGH//W7yJ0iugNSHoDdoddxv2UcnHF+ckLvvjVXzA9/+xbsrDflrpbYVetRCOMZd6cEVxHEtZsNhoqi5spVnnGcHhAmqQw2gOdURQzymoG1VO+j/oF3zZuBRirOOsSImp1WxAsQlNZL59s6XJFgKSxgpwHa3BVAWZKYIjbpYGaV8soRvR3PmQ42ifRGUrosH88m+7NAQ24z1hcvke3N2A42EEpHXQ1UbfUAvW1ywQRFLduFdcA8DYzzuYGk5vM33cZKZIsG2GaAmGWbzRAKJmys7fPYG+MSDWziwn2asKyKJjM5oGJqxo+/8XP+PQXf8pyeYXwkvFwD51q0m4XnSZBSy6DG2ddlSyWl7wJWPfOUZYls/kUlWZYBDIbcE3Qf2shsNYyn084vzjm4vKYyfSM+XJBVW4N+N8ypE65d/iAo6MH7O8fMBgMUFJhrVu3m2sX62v6p5Zlbv/bJNptPnfNlWLrse6ob3zHdhW87zI08IM+PLoHVR2W5k+v4NyFq7stMvmq9pwlcHQEhwfQzyHVYcm+qmKrk5Kq8iwWnuNTmC03O80zT7enGOeOxAUrM1NBuYRl2rBIVuRK43RB4wQYT1M5rEvwcoArBMqVZPKbDS0lode67VS9Vl9sImMsw8+OZenjwxO8+0WYPHTyIHOQQC6C3VmdQxUBn7WBFXZ16MvzDgwsrBroTQJY9SLqgU04b87E7422aj7kTOOaANKzTJJkCb3cI7WltobFKqzaGxPraYkAoHUSrkevB4MhjHYUo72M/kiT9iw6N1hMALFNPNZbGFvb7At14xnC8ZVxKNEmvH4NfrggrHbO4b0g0XkgXGRYaTQm+KUjPFrrMNrF97ROSKMWRKtQlW7b59c7gfNh3SHLUob5iLSjUXmCyhTGWeqmYjqdcnlxwfPnz/nyyy948fwFk8mE1WqJ1po8z9nd3ePg4JCjo4c8fPg+9+4/YLS7S38wCvqi1g3Ks/Xr3yxEnJwlOjRIJwVOvQyMw8QiegxbizQxeV3LaNcm8TIAZLcmnEB5hzIOQ0NVL3GrCXY1ATMDLsBZ6st9ltNLTLlC9PskOoxRbQfcYpBwPcT6uDcH+ZpzcAMYh4e7tlK3kSDcDkH2yQ/fR6cZ2WiXrN9H+hpXLVicTTl5/pjJ+ZffkTThVVN8x2b6r4F+6EjmhjrVFDv3seOGNO+hugOKuuCsKrHHQPmMt9IrfgdxO8BYAlFasG54wsW21mqXwkxKtBU/4vjsnMNbi2uWeDMHfwH8GviM0BW9RpspRnTzXTLdDQBbSLRKQl1tOkSDobjxJa65wtoKqRRKJXhatrgFwJskwWtSELH12P63ZH2D0cotbuNkfkUoJF3Rxyd9nBNgVq8+N18TiUwYDYfs7O9TFk2wennyBcZVgTby4K2hXF1iozZYkCD1Id1+n53dUG7aes9quWR6dcnJs2cU5ZvVKHNuxfTqKYvFKSrro/JdjAFEF3z1Rr/xdWE9lHXD1eSS45PnXE3OWZULiqWj/Kq6I18Tw51dfvDDT3jw8CH9fh+dBDSxYYfaNTixhaw2QLdlgKHtUlvz/w0o9pGhaik6T8DA3t2UU2wY5vjl8SkwUrfNwn9d9IGeDKDI1QHILlwo8/yqI2kBy/alyCXcG8PDPdjflXQyQaoFzonA7tWesoImU/T7Cudr5iuztgRzTUWSQrcL1GAKWFRh+VzjSMUSYTxT5xCVwa4M9cohpWS8v49IR2TK0c1Bzr++mThgxu2vH1kf2PEmMrMqAl6xNTavE4pdAK5JEkBoeywLCccxoa6poKjgoQ263b6NkgwdtMDIAEZXy+BgUZRQVwEXdrvh0dSBsGuqoBlWErp9R94rQQfys5MHyUYvD/uxHtIepF3QXdAdSZp50syTpBadrTA6HGNdb5INXcsWvyUpJ4GjjiSLzh2CYNcWzpunbhyzJkxu2rW5djTxgDU1tqmxxmINCB00sEppkjQhSz1ZliPwJFlCojXaJaQ6JU0y+v0hCEjSFJ2GantCJwiRhKNz4b5GKaTO0WmKzjVoRzGZcXF+yl/+9Kf8F//6v+BnP/sZj58+YTKdUjf12qtdRVlHt9Pl8OCQjz/6hB//7u/zN/7m3+K3fvy7dAZj0qxHW8whkEpvLt6WQpAnijwJ2gy/BsY374LAN0rnaUszhhUPj1B+7Wfs5bbFmkBZgQas9FjpQ8XGLEEmGSG1ZYUvp1TTGWa5QozNmr0O5ZCjAdzWqjbr+yJglK+6X/1LD3EjD+QGKH7LbrZcLkmtIc1zSKBYzZhPznj++AtOnn3xhs5N74IYMax786bBXUou1Oc4Y9m9/z6D/SN2948QCK56Xcrjn8HkMdg53/VY9E3j1hjjFvsGYByYYy88XrjNcO9d0EB5gfKhEXrbYG1JXV7hm0uC+8QTvo4pFKJPmgzQKhjzt6WJ86wDogdes5njl3g3x7kGKRVKpVgbb4Q4kEg2colrOuktQBxAPht2uM1AWQv7b2em+KpQJOyIEZ2sj1U5RjrelFFNdUJ/0Gd3b4+TF2ecnTxncvVLvmq493iSvEN/OGK8u4tSirppWEynXJ6ccXH2HP/GSyQOXImtyqBjKh0i3QGRg0+5zUXpxjiKquLy6oLjk2dMJheUq4pyGTH4G4SQkoN7h/zwhz/kwYOHdLt9pFRhEI9Ads1KxNJJTrJBtrBhesWmrbFmit0mwc63gFesQd82MG4/E93zt8A1OGeD3Og77Is6hEKi2kG9gnIFZRWsw151GJoAorMMigYqGwpT7A7h/j7cH2t2BjmJtmjtETIkqMwWNVdNjUokg3xAVS15/HRBbYi6V08igvuCBabTIBVIZcgZUTTYZoErLPUMyml4dFLLR791xfgwIUs8/Z5AnfuXTO1fF7d9qg1hmb+O0gfdziLaSx+bWNuOhAjEoFJhE+dhZoJ+uCjCBKUug1fxMJI+688E90ZMHRRukyksloH1b7u5NA2g+eo8XFvnwnf1Gug5yDqQ5tDrwqAf3q8NOA3pEJKhRnUzVCf0Zc4V1MZT1lG60IRHExMNpSK6PbzdeVTAwd6Abp7gvUdKiVprUQzLxZLmvGJhN3KL9lo6oDE1xjTRYcIhhQMhQ4ELoVGJIrsxLiqlEYmg0+kBYZWxLe/mhcQjMAaMEeBispzQCJkiVYZQEkPFdHrF489+yR/+03/C//cf/xNOLi83q0lb0WAo64rZYsbx2TE//9XP+eUvfk5VlnQ7Xe49+JDxXgrRli7UDnhzYKykoJNI8kRGYCxxamtGth4aRXRlC7/ZRa4s5tghdZRVxLKNXgSArIwgdYSS6EpQJZpRt0PaG1KuzgGHNwVmscKtCqS1JGsmOjpktNIRGU1kt4Dx9uj9MpTfPK/lFFEWc/3Mi+sbvkVMry7JshScwaxSppMLzk9f8Pzx51TF+dt/wa1GQ5hGllAbzLHholqi04zx/iHjnT06/TG90S7ngzGzZ3u447+E5jftd4S4peQ7GQFkbGhxhthmvRprqOuGuiyxi3DiVN6l0+1g6hX1coJdPgfza+AvCUU5vjq8X7IqV2S2IdUCmWi01HTyAUofYJsx13yPrcdZECiUVGs/Wbm+/zYln68l2631SKxlIFJu5BeilWG0Hd07CUmHnI5I8KZhsahY2vKNgah3DikkaZaTd7okac43AfTGQ+M8Vd1gmiWzyZRf/exn/PLnf8b81qrTODBzPBrcFbet1Jwvp8zmEy4nF1xenjGbTSmWFvM6+vIbRJrlHD044uGjR+zs7pEkybXecwNm478R62SP9jXwm9W8gHbx62pPLoDi9b5EZCsIc4oIil0UMfr1Kotbs8TORTvE5rvNCu4SlGhlEZjilYGFf/10TgOjLoz3A4hzVpAn0M894yGMcknmG+plQ9WECl5SCKq5o7j0rOoCp2ouJ57WWa2Xw/6OZHek6GkJeUMvdTQVoUxx1EI2IqEwjunCsziH1SUMUti/VzPcK0kzxWCUkqgqAO7vIVyUULTV6dqCXs4FDbHTIcHNRg2yJwBpu9XUvGFdTMO5YOG28sFSzcf+UGvI0lCeGQfNCmaXQZ4i48TFNgFUFwuYLqC0oRdJLLACkUCnA8MejMcwHIV86qUJ1nGFglobjLY0ssBbh7UBOBcm9Ddtol/rxCFE2Mc3kHp/ZRjglyczUhkmqjLmk4h4kivjmdiQoHdzjcUBq7qiriusDQC5LEqqqozJdwlKq/W42KZ0rccNKdeWpsgI0qRESI2SGUKmeDQORfBKTvBCUqxWXM1O+ZN/9RP++f/vP+Wf/rN/zvHFxTfutqqm4uef/Yz6H9ZcXVzxt/7gv8bv/1t/m16qEPUC7xo++sHffuNzqrWi10no5UmQHrSgVm6Y2HZlLOTkSZyXkaQCqQUqkahEIhMVuLS4DyclCkHSTsq0punk7O3ssHf/iOOyxs4VSb7PqNulpxS5FGQyeDuj2xXeVvIh14AYWLvVvQ4Yb479xvPrbCpuAeudnx6DNbxoVjhXsipXLOslpWlb5W9atGVqHNgaNzHMzh9QHE3pjUNiqR/t4Mz7OGuZVQWc1YScst+suDVgDKzBY8ustgkhtqmpV1PM1RUszsGvMKt7FP4AW87xi+fgPwV+TvBa+Aatyl8xX56Tjh7QkeCVRsiELO+Td++znB4BJ4QGpMBJvJEIoQNrHIHGWp+3BsYtk9z6LrNmjzeOFWIDitkCx++oJHRKRocULTwrs2DGivolU6NvHtZbvBMoldDpDRjv3OP4+QBrr77yc4UJnp6z6YzZ5IrjZ8/4xc//jMXi+I2O47Xha7AzXl3Q5e1iMr1gMj3nanLG5dUF89mCcukDWHiTEILR3i4PHj7k/oMjRqNRZIsjAI6xyXde6yjWbHHcDS2IDnIHdwMQtyA57MNHLeI6oSdaR7WfIz6cC1WHnLNY46ibiu8q+S5jq5BpLCFcsFmeflVoAf0eHOwrOp0+adJHuhppCzpJTVc5RG1ZXXgWM1DCoSXM5zC9gkkBU2uZ2o0G92AMR/cG7PQ1qW9IhEbf8xjTMJk1rCogU/i8R7HwTOqKqwUsDJQOFnOHsSVpnjEYd0l1xfL7AsZ+40gRLzeOUN5Zy0D6ubZX9xFEr+s1x9YXJ1QtwzyTQW/sovGykpAmkGdB1619sF6bL2FmA5DWOWCi7/AC5jZMYVsXjqqGtA7FQfaH8PAB3H8ENoXzCk5W8Hwe3CiaymMKH/yQo/9x5cLB5klg9J3cWJ86//a9rAdeNNdpPXHj/a+KVV1T1QWNbWhM0PteXFzirF3rjDf5Nn4rqVuuVxyFkME2VCl0kpJmOb3umF5vhEq6iCQPeTAqwXvHfDrhya8/55//Z/+Uf/Sf/mMuZ99euma95dPHv+LyP75kOS/Y7Y/Z7aeI1QXelvB33xwYJyoC445e93ZOEKWG21xBKActvMQ6gdFRKqEDKNapRmodqt9JjxMCh0Q6SRLHXp0k+G6P/d09jh4+wjnJ5fMeo50jDkZjBllGN1FkSiAiwywiAbZJqt+wu8EkQGzsnL9RvK4nFbcDjE9e0NgZxs0i2fFXJeIyj/GUl2csJhekaUZnqMizlMF4l8YYiuWSZlnC8pfcfi7R28WtAOO2fcktr0CBw9kaU69YXp1hLp9BdUpQ3jmwKVLs4hOB1Qk029rgbzLqXFFN/pjTxlKUhv29h+yP9hCiQek2Y/J6OBc0wVKqqMEUCLmlOeLG87oD27Jk2wLF163e5Dsr8JGQkEgVJhm2oqF5q/vOe3DW4wwMh2N+/Lu/R1Ou+OyzP8Ta5Ws+ZZlPTnn65AnW1Jw8f8bxs1+zWJy+xZG87gBN8Hh6B3Fy/oSryxMm0zMWiymzScEbSqMB6PT7PHr/Qw4PjxiNQkEPhIi+xe06qr2mXvMugl7ncNauC2+4mPkn8NFOcOuL/IZ5bsukRkOKNRscHhbvLN5bvLVYFyodWmsxxlAW5TVA/q5Csrmb2wHREBi4113ZnOBasVzBk2eWbreg1xWk0pKIGtu1ZLlAOlgt4OoMaAJr2tQBvHm3EVANgJ6G+13o+QpZVDR1g9CQ9UNmfqIN1J7FynFVFpw8a3j2LPjvaqDjYLGA6WxFpQVKpdGe8vtZ/mtZYC8iOI5AUhMkBsRiG60ErJVPbDtYCKIbBFCIcD1KAhitaqjKAGgSD90MdkYwHkLvPJwX4UMCXkE4785AVwYJTDeFfgf6w+CL/MF78MH7MDqAfAQzH5wuLms4W8HpfFNKWkQG3BOcNaSI1ffchjGG8BtvWvbeyrn9FtvWxlCbGmMqjClZLGdcXJxS1zWi9dBrb39axV3r2y83K5BKopOELM3IOz3cnidJUnKdEkwaBAjPYrngV7/8FX/8R/+MP/yjf/lGoHjzOz0Xs3P+5R/9Mx4dPeKjeyOGsiARb5fRqJQgTxWdTEUZSCvr2SSmh8mNC64ezmOcoraBvVVaBOs6rVCpCsBYuOBOIQIwThOFylKwOdr22BkN2ZmPKZcFvraMR2OG/Q7dTJMngkw5pBJ4LdiuUrtOno/SNsHGcvmbA+Ov2HJtnfnm4azBuuqvGCjeDoOvC+rVnKZakpscoRK0UqR5l3SwTzN6CMUFuOf8JkkqbgkYt5KDzXIFCOpyRTE/oT7/HJq/JDC4GhgBhk6eodMuVS+nmGTYZQfcr4Av+PoldAP+p5jFGVerE8rp75N8/GNcs8TYGaG731KFeRuqN60BbEg1kBGARJ74BiiWG5Y4sgAyapNknBVeKxf9rkpCyyiEFOAa89bNxyMxxlNXNfcf3OMP/s7f4f7BPv/Pfzjji8f/ilcv03ia8oQnn1suTl8wnzz7ChD9ttHCqNuPF8efMpteMJuds1rNWU7tmyf2CsH+4REff/xDDg/v08l7KBlkFBv3h9ABe0nsLD3OWqxtaJqGpq5pmmbjPxrbWpKGJB6lwnJqENsRdcStlCJKKJwLpTydxRqDs+2joWkMxtQYE4BxUSy+kyIfsXgpLVmZsPGKedXw2xXwwS4kGj47DZpWKWq0rMkUDDJ4cAgqT+lowbJomE0DkLM+gOo+AQw3BKZ6V8LeCHYUiMuSZUwSSzKodwp0V9M0CmM8Z5eWk6sVz09gHtvDTvwN8yWcnBlMt6C2+jdjFVNE9tiCaCIwFiA0m0JZYou1i+8roqY6vmYJxTdKC8saZgvoZCATwMMgD2zvLLK7/lk450UdKroqFVwsjnZgMAhWertjuLcf3EMOj2DvABYCzir4YgK/eA5fXsD5HOZl8JlO03BcCYGxbouIbBcysa2mwW/ImO8rrPc0JpQIb5qCspyzWFxRFEVYvfFuLddDuLCCqlSoHNpKDaVEtcA4y+n1BnQ6Hex4jJAOnUqkBovh/PKCn/zkJ/wn/+gf8fjF81v5DU9Pv+Qf/+P/D1cfPeSTwx6jztvBAakEWSLItFwD4nbpuMUF4ZT4uAQAtVPIRuIJUgydKJJUoRIN0kW2WGB9eC3PUxJyhLaksmE469Lv5vQ6OUU3o9tNyXNNngpyDR3tEFoidGCOW2JrI/kM6H0tpbiF1QjYXiF88+h2OvhyQWW/euVUvPQQ194RKDwCS1x9ZOtGeqehwjU0BaZeYZouWINtgp496/Qpdw6xkyNYTQgJfL8ZcWvAuJUftBIK7zxVsaC+egbNLwgyiRVhyBoiVEq306Pb62DckGWnz3w6pp7u4atd8J8RvIy/ahZrgefg5hQXpzzhEoGjXPwSeMbmwguQKTpJkFIFX0kRsp/aTNXWQq4F+c46jG3w3gVzdK1J01A9TxCSB0L7a4F00Im9kx5bgPWGyjU02LduzmFAdbjGkirNaNzHPHrA0dEjnjz9C6x7HafnqKtz6mrGb9rSxzeN4+PPWS5nzGaXFKuKN6hHso4kyzl68Ij33/+QnZ1gGyiQmwpTAFiMNRhbU5YrqrIIhvxNg6lr6qbG1A3GGpxzKB0GgDTLyPKMLMvJsg5JkqGkRoq4TBnBdwDFwdzfGIM1DaYJz9bWmCYAcGNMBMbFdzIvd0RLrvZcEQsqvGJbAYzT4FrQODCnkTWMwK+yMK8DeJOqYZgLzi/gqgpdaUMAsd34UIQl/fsd2BuG9t5MgmuCKUB0wHYA4SkryWwBT57Ds+nGqxbCMWiCA8RiCY6G2le477GcaVTUAOEcWxsY21A+GESU+LV1BoJGk7XmExGvidholVcGphVcLqE7Cecvy0ICXiJhbyeA48UyfO7yIoBkRLBa29sPMon9/VCMY28Hjg4COO4MQfdgcglPz+HTM/jsDJ5dBFeQykBmw+pCpsArQnWzyDaaKBUysahJzHH73oHxvK758tlThsNdqrri9OSU58+eU5ZlXA1qZU8ugjBQSsaJ7jYwVmitydKMXrcfS1TD/qFlT2u8FhSu4unTJ/zkX/0xv/z1r7C3ZLloXMNffPaniMUZffch7A7ean9SChItyZIAdEXUUrZE0zpcXPawDmUkIgn0v05kYIyT8AiloVtgLNEoOk6TihSZGBI69LoZ3TShk2nyTNPtpAz6OYNBRi8X5MohZKhXLqWKhbg2hNY6PIHp5ya4jK+sNUjfLALsfLtGurO3i7ooOF/OvrLPbsuzp0i0CFhFRFlnkF75sOLiZVQBGxwWj+HNE+a/SSQoLL6a0yyvKBMJKqVqDLYySAE662A7I1iNCWtQvwmsw61rjFtgGWYmzWIC5a+BT9ksoCpgnzTfZdAdkWZdjPVoOSTvHFCMH1IsP6ZePsXMfwbmU0Khjq9iEOfg/zXz8882/762fYrO9xgMhyQ62uFE67gggWo1xJtM1bqsmUwmVFVFlmV0/v/s/XmcZdtW14l+x1ztbqOPjOxOf85tuPdyEUQQEaRsEFERrZJSOsW2qnxqiU8Rsa4Ipe9pSanYYFeowLMpxbKKHvXSCBcuwu3Ouff0ebLP6GN3q51zvj/mWhErIyObkxkRGZln/z6flTtj7dWOPddcvznmb4zRbtHv9QkC30UvV3rRvYC8vaIlhw1jShKdkpBx77RmN+3BrcezjkgpLJPhDhvXLnH18ltMkgmeF92BGMNubuhHFKvrl0iSCcPRNumkeKBc4/3ZBc6eOcfKqdN0Oz2MFUpdsZLqZViWJcPxNtvbG1y9cpFrVy+TJRNMWeyWJ7dVOWcLLil9FOCFIUEY0u3PsrC4zOzMAt1Ojzju7HqJS+08xGVZUpSO/BZ5QVkWjhiX7v9lUaK1k1PkeXIsUgqN0xIb3BCqwJHig+ciXM7cSeLShAXKkeH922xMIH3N0lWWvHDHqz3QCuctPgWcUTAbw8yMy3W8U6Uk831HlOMetPsBGQGj9YRLVyyXd27NOezhCFoQOq9mqi1pMqE8ynfJXaDLKoevdiTRVHJyU+mpreC6Pq+ydUUwd+WU4jzIWipPbAnbKVwfOo+7LSFLXKnoOHJNJQ5dCWlrnP772nXY3HJZK+KO8wqfOw9LizBbyS7m+y4v8djAVgIX1uGTF91swNrQBeCVFqgCBbMcCCr+IZX0w1YBmMYFEJZlVbbZ43AqtT0AJqXmv77yGpevrbE0O0eSpIzHY4w17L0FqnIPlcfUqzzEu9JDpZyUQrmA8CiMmHnrMnOLb/DsC+/jhc/SdGfnSU3JZz7zEp955SXKBynNeQAynXFh/RrvGi7Tm3kwYiwCvueW3aqyVQ3xXY+xxellDFgRJHDE2AJ+IPiBIvCd3liqyFJHjAVfhJbxCMRz1WtLjyhQBL57tgNfaLcD5hd6LC72iToBgVdiKTEalPj4Xoin6sBIUxHISqJmG1RW9uRIdWpAWwdM32P3+aC97JnzT9DxFcmb64zs7TOiGxyb8VVMEAQuVShVzvqypDRlVdiJipnpXXmGd5trrr3PVRjDfcBDVEBAhhlvkCmDycbghRTGZdUpC4vN0oqOtcHE3F5od7w4pMp3FRGgzt6gnGejGOMKdezs2yMmCGMCFRBIjHiuw4iU0O0toU+dIy2eY3P1KcY3XoXRLwMvc2d5hcFVuzsIbXpzp5idnSMMospj7FVSCoNSdjfLhEsIYJhMxqzeuM54PKbb6zEzM+OuMQogCKopGW+PFKtau/TA5rwFpS0oyRvN+e4QNymJ3S2hsAdtciaTETpP2N64wRuvvMzFty6wvb2FqHspYnvyK9fcDusb18nSjMFgQDa5/9GpF7ZYOnWalZXTLC4u02p1sEZwhe5sJYmAvMzZ3tniypW3eOnFT/Dypz9FMhqC0XgibqDlebveoyAKCKKQIIrwo5i5+UWyLKUsXX5ppVx6KecBLpxHuKwlGaWTZ5QFpiwcGS7dUuuPi+JeSoIeDix74ZN3s3RmXI7cdteVAZ4kB+8z1jDRe8UY6knBrDqXApZCOD0LQey8oqMcbAqtWVich9ZsgN/psTm2TEYT1oa3kmLBeTF7EfRmodODIqUq+nFf5nhwWEeMdVV5rvYeu+DLvV+1Hg6bWuNaeYoNjfpVtqruVsCWdiK3qASTuYC6XsulWGu3nLRiputkE3Xu4rltZ99OD06twNlzsLjgpBTddkWqFWxuwtUdeGMVPnMVLm26QY+ur7f2ChfO5l59vVUGiroEdlHdt1/xGO9owjneFhJtuLC9xcXtrZueKDngs17U/vVSBYThHDS+d4l21GVrOCHqzTA7GTOYjPnMSy+yub12JPexU2Rc2dyi3es+0HGUcoOW0N+TIypP3eSddZU6Heu04jJRqMBFIriMFM7r7NfHEMGIwljBRxGZWikkmMDlMg/86jMQ2q2QufkuCwtdxAMrBaV2UjNPIsJQ4SvB2/Ucu1gQi6U5ESRid7XILvWlG6255+wAcnzQe/8Bu9nZ+TlUmTF7bZZJsnrbPtQRXqkKo0hVKt5gypJSF5S2oMA0c0bchGpy6RZJBggaoaSu6CeVMMP1vNldEgAEnsXXCWa4QVak5OMBeCFaPEqrKI3C5hpM7jwi5ta4sIeFw5FS1KlPaimFAqxX5Rjdf4oxcIHJzhIbN+boz54ijPsU2jBMhmRlCiFYVaCNgnAB5GlcktnruNdflZz0nqBQwdOcPvMMywvLtOM2nh+irA9ofKXxRFeBKZY8z0mShK2NVa5deZPJaMj80il8T+N5GqtTWu0OrbhFHMUEfoQfCJ7IA+fWvB2KfQOCXa3WHRB4LeLWDKVo0mQLU+7pdywFN268yerVtyhLzS9+5L9wY+0ixnjowygpdYIxHIxIk5StjZTiASRNnV6fU8unmJ2dp9Pp4vuhIyi6ynVdRXTkacbG2hpvvfUmb7zxKm+8/hrZJEGsK50axz7tdkyr1SaOIrJcoRKPTr9PvxVS6pzNzXWsUXj4KAmx1qK1doS4yCiKnLJwRLkonXfY1kF4xmB0pS0Tw14liOPBvT6lBkBB1Ib5Oad73U5vbecRMI/TJAfiPJJjHFHOcKG9gwzaO+BNXIaG3IOgB/1lxfITc0SdLqkRbDIg1zfLJ2rEwJMxPPcMLD2jiBdj1I2c9ZGbAnxYcSJl7soz58alNasJpoZKg16/KB3/UGrPK1RWkoS8esTzwi0WaBtQBZgU8pHL99xtOZLbabnUbb7v9MNRDGcK50mP285LPDfn8hS3YpdreFTAIIXXrsOnLsFr15yneJhRvbjdNQjs5q/1lfMOG1uV+LaOEBflnqfcrziJ9/DULLdgf/N5W5dmd/+pRjkZFBnFp36F5TNPsjQYcGN9jVdefpGiPOw6ig6ltXzyrYvc2LydY+ne4KQUbtkNLtwNbnf3aGu3ccU5fV/hR57LJe0rPE/wPYvn2TrFc6WVr8pHW43oAqNdqjwRTRQoWnFApx3R7cX0ezG9TkBZZuR56mbQdIkvhrAVEHg+Ino3k5SIolFjtLoZ2T33TRmD1G6oyM04AmK8tb7KZDjAIFRjxQNhgRyDLsfYUqqB8d7d3ElNXJPlJvZu5SaL3LTP3f1/mrLYJtc5UqT4+QSJxlgvROOIcaaFojQw2oZyk4MrHD8cHJKUohpfVCM8z8ONK/wQ94pp/qwaeAuje2yudtGFZmbekuQ5G+uXyLMdJPSQ0HNTAdqCmgf9LuAczrczwGmIN7n7q7dHu/MMpxZP0+v08apM9x61l9tUonuNtSV5OmK4s8PG+nWGW5cw5ZDBjtDpBFiTkaVDZvqzyNw8QaAQCfCURWyJtcU9XM+DI2SvJuDBEOKgw9z8AlG/zfpGzOa112k+HqPRKtcvv8loNOHq9ZfRJsU196ru+Qkv2Xi/mIwnjEcZ+Q4P5Phut1rMzszQbnfw/RARVWlPTSXJceQ4z1M2N9e5cuUiV65cZLwz2v3hCiDLNKXOUcrg+1Dm2lUFa8dEUYCnYDjYpswMcdQhDNqIuLLWeZFTFClFWbgKXLqk1Ho33dtuD25tIw/3g1rwaGBxHkYvgv6c+39x3aVMA9eDxMD5EM70oBs5ycVwBNsTR8TG1pHkLQve2HkfVQQsKNqLAfPnljj11JOowGdzZwfZTpxWQtlbHqa+wBMr8Oy7fZaeWSKYnSGVTS6vbuKr8qFJ4YpyjxiXei+Fmdg977Gh8hbrKpCtkh5ou1dSWldeWlvCjoWgBDJn99KH3tjlf+62nfd4pgfzM9Dtwey8C2AMQpfWLYqc3CKIHHnODGxN4PIWfOYKfOoCvLkOW2NXtKX5rq2Jsa9Ae3vp6OquP82rgiBV3uXAc+TqJBHjo8DqaIM33nyNcTLhwqULXLryJkc5GtvMUrayB4sbUZ4QBoowEOoASed0rSUL7GbWEXGzata6wDprAc+lr/M88JTBqwZ1tSzDagCNMS5+wugMJYYgUMRRQDsOXLq4Tkg79sgSjTEpeZ5iywIVKkLpErokyi5WqBo87hYmralgNYtcy4vraze1t/h2xHh34wO2eZtYu3KZPEvIdHkXalwT3EM46T0c4V7PYtDkZghJRlmMUWkLqwJKPLRVFJqqHv2QOyfxPH4cosa4OUoUBB/lBzgfz35X6gT4GHm+xvrqc2xvP4MxGbp4DVjDpnUOWJ9aVu5eiz2Uv4JSOWUu3Dn5U42EyegCr3zmY8wurBPFswRhjAp8VxwAjdgSXWQUWcZoOGQwGLC9veVGqX6XwBfybExZTBgNPZQYZme7KCkpTUKZTEjGY67feAujj37Uc3f6rXZHjUEQ4Pu3TlGUOmNt9SppmqNN7ZFuTn4/nsgTQz60t86dv01sr9/g4uuvsry4RK/Tpz+7QKszg+f5KCV4CoxnKcqcosgwpiSKQ4KWj85LN23sCVEktNoBQRhiEUbjlPE4ozezQLczQ6vdYXtzhyyfMB7tsBO3UMo9tqUuKMt8N6NFsyS0II0Baz1oBc8LuLc5h+OFxmlR7aCqbhbD8jzMJK4qWyeA+RaszLgCIFXVWbR4FDZmPBSGAyiGOcWoYF1bOu2A3kKXuWdPM//sWU6dWWT21BzWluj2BqdUh3eZmDJe5eOvJzflJhZAAvDaLdrzK3ROnWYxn+PUoMXMG5fY3no4zNjYvcp3pXaEUSpZhWcr/bBht36MZo9IGqleoHWGB02dbpSRB6G4CoUa6CbQDiqvcctVytM48trzXDU7X7ksIr6qPHsVWR9lLtDu5WvwylV44wasjpyO2NaTfZbd4EDFXmBgHb+sKx1iWrr96krmGncfJ0FKcZQw1jAej9hcX+PixTcZJvvliIePB+0R6sIwfvXbKPbIZ01EbdX1KAQxgvUUptpBPA/xFOK5vsuTanBbBcvhCdZXaN/lcFfKpXcLAw8RQ6kziiKhKBOMyfAoiDyN9V1BkZYPsWcJ1F4ZZxd871S6jsU7YuykIOxKXJAqLabdI/c3Yb/D4RC62GtXX6Y0msxkhxBy/3DgYkxyN9VVDrlVYAR7uYtODg6nJLS3J+GuczOigooYhxzseM+BN7H2KkX2Mo7urXNrYJfgyPEK8BxR6xxx7LG9voa19yLWzjHlJ7lyoeT6lbOE7SWiTo+wHRMGgXtYS0MyGpOOR+TphLJIQEI6/Xk6nQ5KhDKfkGYZWpd0uxGizqA8TVGOmYzHrN24zuXLL2Pt0RPLu7+SPYqyIE1T4jzHlLdqgSyW7e3VqurQyWqURwmduuVBkaYJr778EnEY43kBZ594jlNnFVEUVXphi+cZ8jzB2pLA95hfmAMpMHmO2KrT98H3FJ4XURaW4TBjMs5JJppue5Zuv8NkNCZNMiaTHXZ2fHw/wvMCbFXiuU4N1SxSsxsMqvY0fgIHDpIeJurWp4G1sctz2+s6TevKeVjsxSz2uyzNdOn4hshMkCIhT3OsUXQWz9JZeJLC9pikIVfeXOXlX3mZzY0h3vIKC08+yakPfhZPf+Dd9Gdj4sCg8yHS28SbWaGz8jRz568wLn+RT74+3n22SluRMhvgdZboLj3NIsucLWZY/sw2l7Z3eBjJKUzl8S3KPZJpjVtncETTwO4cqW5KD3xHkmvSjMFlvbQun/EAJ6coCqevbgl0oooYF1AoyBUU9cy/dp5qhSMvBkdod0Zw8QZ8+hK8eg0ubrqsImV9XXV3FLjF0pB9eI4YG+2uI8srQu2koI4YN+Qhjy8sZVEwGOywMdw4cEr7pEFV2l3fc3rcPe00N6VxBVBWOXmFVRjrVKviqarUoSOpvlgnUaxlip5AoCi1D9YjCxRB6BGGCtBk2YRJMiRJh+TFmJCcVkWKA08RhYrYFzzlAtOstVUmiipnvFK7fG1X/y17wZNaLNqYqvx2/XvsuYhv+oUO4ecalA+g9TuROByP9nHgENO17f3fTUF4eHEMXh90xO0D5zLg4h2O7hQ0TjZxnWyiKFKNta/huvK7Xh0u6/0qOi9I8g2SQYx4vsuvaAusKbG6mqujjRf06PQ6zC/0CYKQ7a1thsMhIoo4jqrAAIM2rizocLTN+sZ1trevcjIyNhSk5SblxoRxsk2eH/yAFXm+lzT/HQIxAXLfiYtvxmgy4hOf/GV2RiOeeOYSZ594hjCMsBjnDQ4VeTHh+vXLTJIx1mh836MoPXRRYrH4ClJdkkwmTIYFk1GGtXD18jVef/1NTp1aZDQaYo0lzxOybOK0btalGTRVBFbt1VBK4SlXxMZT3m5xGlENYiyyJ/J8yGheRVk6p0JROs/TudMBZ586zflTy5xenCcWTTncpBgPyMYJ1nrMnHmG2XPvwgYLpLqL7V7lyo4lbW3SOnOOztPPMPP0e5h76t3EsUA5hrxLO+xi4zmKaIde4tHqfgaR8e4FGarCHtspo1HGjFF0+jOcfsLn/FMLfPq1HUaHW638nmCgSgfI3ojCcFPmCQW7VfEwe5kdROPyHNf7NkLOS+tcDKry+ka5C8ZrB84DnFun1y5lLyAuSaGTuEC7MK7iZxSsDuDimtMXX9qEzVHloa7KS+92kVX1F1F1JgPqaugu+K5aGhXO0eKu72S03qOFsS5Y1tiTM8V8J3gIoVIESsDIXlBXNVu15yusBu/WeYytVc4LawzWloCqJJkKX+rCHE5SYX0PCQMsEaGOCQPfBXmpEqHAmgxTTjDFCKtTpEwJLfjiESlN6Fk8z5WYtsbW02kunVztQKgvtEorZ63rq5UCqWQYBzmanDdc6pvc+/8UbwtHINd+2zgkYqwa/6dqZIqg1YbWAoz63BuJvRMS4AJGX8DoulTA7eDjfB51yv86hj0DJmAKrCmAFLsbzBcAMyjv3Zx78gkWlpbwvYDhcMRotEE6WifunmJh8TT9fg8RQ5YnpMmE8XiH4WCLNNni4eXha2qQLFBS6hHl8A7SDhHsYx5stx+BF+OpnPKQ9EzjyYhPv/Qx3nzjVfpzCyAKrTXKU8StiCgKCCJBlGY8GTAcbTMZpRTjElEQtRS6tCQjc1PTGQ+HfPTnP8Lpcyv0ZzrMzs6gyxxrSwRTTU8Knr05VaLyFJ7y8DwfT1X5LBvpkjzPP4FCigYs5BMY7oCxAd2ZeeaWTzF/apHQapIwII9aRJ0SJGDm1Hm6S+cgWiSiz1zWYun6COlts3D6NLNPnKd1agWv38d6Bp1aEA/f7+DRZrxRsLYDO2N1iwd4OIDVKymLV6/RXTlLPLfIyulZnnxmgeX5C4yuHf+zvnuN9Y9Yk0ivcrY5eSeZZa/MeSWbsCVYr5KqVJrjemobC7k4jbYG0gL8CSSe0wWXgPahECflSAsnb2kFrjhHWKW0kwCujeGtNXhrs6psV8/Q1GUQC/YmAnFe4jBw16+rx1JZN9cYijtn2ST6+thjSI8ddTkMpyBQu176kwxPhEgUkSin0bW157VBjCsZhZNeKqxSWM9DFwU6K9zMV1iVjFY+SgV4qsqEUBNjQsSDEk0U+gRKE3gloe8+PZtBMcKk2+hshPJDwqhDKB0CZfB8x09sVZrPVtcCqgrIA2ssutSUeUmZF2ijCVs+QTuoay1xs1h+TxpgbS1hezBiPIdLdfl4ixtvRt0t1NashyBvJ93CYeBQS0K7/+81+iju4s+sUE7OgrnGg4mrNXeWTfg4IrwI3hlUsIwXdFEqpMwmmHKI1SNczuNa6J2yV20lAzJEztLpB7Q7PuPRhMFggyzZALbx1DLdXpe4FWOsIc9S0jQhz1xKNM/zH6K/+M7i/IMwTsaU+p1FjJeXzhAFbVavXafID8flZ60hSYYkya1lWkUJUTemP9smSSck45QysXsBeHXC3gN+up3NbSaTEctnFlDK0Ol06VWFQCy1l1iqACblUg56exUaXZXGWulXe0IeDS9GlkCegohPELVo9fouaMYavCDCFhalIqL+EirqYYIOhhZ+Z4bu8hmKoEtncYFwbgaigEyXFMZQlhZlfUI/IrMFN7Yy3ri0yerm+KbBggfYAoqRYbK9zWR7g85sn/m5PmfPzvH0UxHX1hKSY45RNbU2uPaiVhct4uIIvWo2uNCNSnG7O7t9re/IsbWNHrnyQmfO2YeyoCqNb6qrgL2KoOYZjMcQ+RBWulIvrPI9x7CewdVNV9kuS3A+jEqDDOwlnq7uQ6gq8ZkqGwVOp+rjtOSBaqR3q4IGbwlbeczgiSLwfUQJnvLuuWuviUWziRwXbFFg0gSytpNRVO1ojzPu5TK21pWtd1qegjxJGG8PyLIcghAvjmj3erR7PQLPc8fRGlNkWF0AGilzvDIjLFPatmQmUHQ88IoMPRpgR+tIukPQ6iBWU8Yt0nSEsm5wbCzo0lCWLj4DA55SBJ7Clpo8KciSjDRNMcYws9BnRs3gRx572Srczbk89LKbDsYaW0ncLG6I9/bRxjXz2+V/fxwR4Fyau057qv4AN54+Ln51uFKKZjUZK8TxDP35p9gefAAzvAJcPozTNeADs4h/Fi86Q9g5S9w7Rau7QLs1QxTE+MonzzKyNCVLR+TZkDzdIU+3SYcvY8qPsud9LtDlZVZvvMVguMnm6jrpaBujE8DD8z38KqtFWWi00aRJDkYxP79MNnmKKxdDHk5VuLf/hh7nJyc9ynHhc37V5zMej7h25QqXL13i+pXL6ENOmt+ENZZ0kJCnKaa0t/Zwd+nxirTkxtU1lLK0Wj1mZhYJfI3vV3TXzVXu5g3dVbwZg7ZOHlSp6JxGs3zwkuLHgbKA4XrG9tom45UlSmOIo5Cg00G8wEWB2QgtEaNRwUQPGBUJW9sJ1vOIOm3whSxP2NleBTXB85yrx1MBURBzY2vMi6+8xX/95RfZ3NrZPbfgQn3nQpiLISwTzGgTKRaJvRmWlzo8//wsb7yZ8Ob147WLrSfLmi4U5YilX2V3EAFfOyK7e0Ow62G2lfa4DmhrMigrUPhACBJBkTsiXFaZIvIURgNYi/ckmXiObAeRS9+WWFjfcXmHKXFvsyaRbRJk7bbTVbaJOvNEO6g0x7XMopaPGGcD/Sg04vuEAHNxh263TZJmu6nO7rZPC+gKtCsdeGLcPO29vBn2/J33j3S4w86Na8TWEofhXsGs6gS2ms0w1pKXGaVOUeJSpo62trh28QpbG1vk1iBhyKlz51g5e44oiBwxLgpsniBljo/G5Al24zrRYIu5ssCGEX3lE4wT8vVN9M4aJt0m7PUISoM1gslKTBBj8ShKw3ickEwSyjzDFAWR7xGHIbbUJKMJ6SghTVKUp3jimad40nuadr/jquh6CoW4vPJao+uoVwt5XpBlLjCaZ2buy541IY6r/z/uLiyFm99vs6fyanZROY8oMd4L8nGPWRT28GZjsqUR42TN1WNl4zBOiSuT9H5ac+/j9JPvZmbxNN3+IlFrhiBo43sxgQrxxMNojS4K0mRAMt4mTbZJkx0uvhGwdeNlXNBfjXW21l8EYnS2hiO5XWABowvSLGGSTEDc1GuWloDP3GwXXZzj6qUQa4+GGHs8/g/HUeM973k/RZHz7NPPc/XaVf7LT/8Ml956FWuPdkxu8vt/k5epYe36FjP9VWZ6C+SZJY5zfN/H84QwDIiiCD8IqhyTbsZGodlLzO5QlMWJ0RffCcbCeEuzcW2NjVNzLJ+aR6QH1iBBgPJjxLbIjE86LhjllkGqyHND3GkRxDFhqFCeIUuHDDbHKM93IlsVoVTCW1dX+fTLr3Pj6uVGCe89kjHfg4UexKIhGyHlhMArmZ/r8NTTp3ni/CYXb2THS9KqTBK7HYE4GYXvVR7jWmcs7BW/bBLj6lr1flJcJyf1XQAfAdjIyRbK1Om+y6o64dbQlYKuA/kKBYXnpBTtljvGoKg8uyV77sv9bqAq+M8UTi5hq+sKvcpb7LMbpCrSuM7jnlc9ZrREMdftoZSQF9k95ZZvA2d8oRP5hJFHqUvSoiRIYNPenlBIte+8r2h5D0YHxoMNLr/2GmaS0O108T0PqeqR10GhWqC0miQfk+VjAt/SCizb62tceO01rl25xiAZo5XiyeeeYzB8gVYUozSoMkfylEDntMTglxmTjVXs+g1aScG8gVZhYHtIWhQUw210tkNWGjyrSCYZQ9kkw0MbIc1ytrYH7OwMySZDynRCHPh0WjG2KEmGI9JxQpkXhGFEku0ggWF2fp643SYIfJdGzliyLKcsCsQqxAqTyYThcERRFHzJF7xwX/ZMqt8nxD0PQ06wBO4QoHD36XPzhNjDuOdDk1LsVbepPxW+F+J7XfqzT5DtvJ9ycwj2I+zJFx7kpKeYXfk8Vs69wKnTTxB3Z/DjFsqPXU10qSaRrZseEc8jT3M21tZJ0x08leMHLZAzYHfY6zpydPYSe3N/irqG92gQ8NrLGf2508wvrNBqdfFUQBxHhGGXOO5zlPHSU1L84Oi25zCmpBV1CIKYnQ8MKbXm2uU3jpwc3xb3IPpNRwVvvHaBwU6CYy2KIAxptWMWFxc4e/Ys/f4MWpcYYwmDiCiMnKZYXGU9BMri5OWnrnjYbgbtev5Gj+DKa9t4fIpsvMH84jztOKbT6tKJ54njeYzyEC+iHYZEcZd5FVKKB0q5QgGiQadYnZLmJeM0ZWtng9W1bT7z8qtcePXTmH2FE0JgNoSlU7B0yqM106UVt/BViLUeYdRhdvEUS6euEAU3mBxN3YUDIdUbw9aEt/KoRp6THJiqSlzZzIDUFOvVXVqtvNofYG8a/6+zRvgul/C2hknmvNG+8we4AirWSSCkBC+tsxBUeuiDcktabpJS2AJSU92ThtJzBT2iwGmWtTgvNyV7RPsxZgiJNVzdXKW0MEjGpPrODUyAjkA78AjEImWBsgYf90zF3J4Yt4EnWyHnVpbotDsPdN1ra1f40R/7Sc6snGGm2yMKAgJcZhzrKbSCHEtmCpJsRJqPaUVCv+2TDYfcuHyNG1evcXX1OuNszEsvfYLllRVCP0AZiLH0laHvwVyg6IimGI7IRmOsiiDsYxPNtgQUs318KVFeRFpAMRhxdecab97YZHMwZpLkDMcJO8Mhw9GQLBmh00mVcs4DrdF5gTWGQHl0Ol2url/ltUtvMrewSLfXJ4wiPCVobUiThCIrCL0Q3/MZ7gzZ3NgkTTO+4Wt/233Zc8zuWPURyudw/yiBNfaS8zYT/dZj4uPCoeUxrvU2sisoUngqRElMt71EsfQCW0WGHoyBT/BgNbEFPzjL+SefZ2n5PN3+AuL7rmSycZMOVkogch4m6yrrjEcjrl+/RpZs0+64BJpB5ymKyQRMswBGsyfXuLFaCTYlHe+QpRPyXFhYUPT7cwR+hzDsEoUd55F63FvwI4wodBM1cdwiilp81mdp4ijmP/3kkO2tG0dyTk+B57usC6bZtDyIOoLneWQTjc7tHdvOZJByaXixcvg68Z5SHjPz82RpxsrpFfIsRxtDr9uj15shCmMCP3ApFUUoy5OQNcVxrg6uE4xwHto2zjuYS+UU1bB+DXQxYTx4g4WlS8zPd5mbW2ZhNmemLwQtD78V0+kFxL0Zws4MKm6hAlfdijIlGWwx3t6kGGdkO0OuXbzCpz79Ci+++GmGm+s3XZcCZpQrb7x4RrFwepZwboGwM4MftLA2xI969OeWmV9aph2vMcmPb0ClcKnXCsBlIXGEOPCdxzgrXTU7XZNHaXw2hafNz9q7DDd5om9KI2/BlE5zXGeM2PUw1/tpl64U67pBz2OPyDaPDXtuoSpLxW5tpCrDRppBHkLQdvplU/COEVtaYKgL0o1raHv3RG0e0BYh9BRiS0zh5FPK7v18txt7zweKc6cWeObJ8/S6vQe67rwY8vO/8DN0W0vMdvt0goBYeXi+wvoepQeJ1SSmYJKNyPIxvZbHfC9GypLx9pD1tRtsjjewGDa313jjjZerWWiLjzArsOIJp3zFvAJVGtCGMOoSzZ6mzIXSeGRFQX+uQztuU1jDYDTh4ptv8fFPfppLV68zSROKssAY3dAU3R7rWxtcW7vBi6+8TH9ukf7sHHHcQimF1ZrJ2BHjTtymFbbY3tzmxo3rpOmD8Jy9WNV3Cgxujr6ebArYi9k9TpfO4eQx9qFWKe0G4llDkQ8psm2S8Q5ZMsAahatelwKXgB3u73YtZf4qr734Ya5eOEvcnkf8AONB1O7Sn1tgpr/ITHeRuDVPGHgQ+HRaAWEgDHdGTEbbwBgv8gjbZ8hHF7l9pguLI/IdxJtjZuEM5594kqWls3Q7s3Q6XdqtkCzZQiSY8uITjDTLqswpgCi63T7LK6fo9npHRozrVFk3hdripsCDUIjCiCiCLC2Y7OR37KObU/5Yi9ElW2urfDxJeGNmBq01nudz9ux5nnjiKWZmZmnXniChmpZ9+C20NkedO2ZGYK4N3S6ELVexLlNuCXvQmrVEUUGRDthe02TbCdvxNv3ZM/TnEwIj+GFM3IoJvBjlKUxZkmUFk41NVi+/xaUrV3jr6lUuXLrE1YuXSdd38G1VSkgg9mCpDU8uwHNPK04/N8/8E08QzpwhnD1NNHcaP14kNBPCdknU6RFEbz/o9UHgua4MW+w5f6lsWZekN7rysMKenOKgn7xOnVa77GvU29bro+oEGbseYJuzN/cpjWNVbzSLS+22C2ksNfbrj5uiQnH3kXvV1xl7c8t+Y5/HGMU9zmBpYGIsaZYT+24GwZWnd5/7ig3efA5tyNOULEkIvQed7bSkWUaajdkeWrqBz0zoEwQe1lfkYhjpgklViMPolCT0yYcpSlvySc44uflardE3SdIL3ABNSlcl0xNBBT428EiMJk3GDHc2SFoereU+4fwcOk+RgcbXijiHVgplqjHVSM3cw5SdtZYsTciuXWNzY0zQ3sQPIpeBCzDaVcqb6c0y04HBIGVrkFPk7yRae3jQuFp4gut+6nH0ceFwPMaNMkR1mhlrNFm6xWBjjfFgk3y0g8lGoGaA94BZwjnON4At3MTB23lhb5IMf5Jk2MZJtkMQH/FX6C28izPnXyA+5zHfnqEdefheQL/Xot+P2VxP0cnrwAAvOEu7O0M+irl7ObSYxVPP8Py73sfTz7yP+bnThGEb3wsQSgY711DK5x2WAe2Rwng0QVSVtF1ZlPLotDtEcfvIzmlxU9siLhuAaXrqAD/wabU6WGu5VtwgH7/9LmAyGjIZ1VkxhCzJaLd7BEFIFEV4vucipU9I4zS4J76N82jNxHBqAVaWYWEJ+vNQxIosCNGBj/YDJknBYGvMeGeH7dEI0assndrGnEmILfTCAAmch0rKkjzNmOxssfbWZS68/Glefu0lXnn9LVY3J0xSQ8vA2bbTEfc7Qr/nZoEWlwLOnp1l5YnzzK08QTzzFGH/HKozj4m7JGaLIErxozYqPF6G5vuOGBuzVzq5llXUWTMt7OU5rr3FzUA8aeQ6rolp/SbYv089H19VprvJhVW7dWpvdFNPfBD2E+M6W0WdTbOeL633N2CDytGc4HwT/r5jTOE8zECncLMH7VgotaXU7ie5U+6diYGtnSG91TXS8YN5N/cUyyGlhoEBZYXYOuKYYRgWOWWeVSM3RVZaBpnGM1AWHsZ0UZQICS2xtH0YlxlJ5f3u42Z0+m3od3zCsIOEXcZEDKxHolP0aItsEnM6DogW59GDEcEko+d3ON2aw+tqRrZNUpaUQUCqFJvpkNyMcY3yTn1kic13yPOUvG644oPESNzFjzzCVkShutggrWo4THG/sDg3qs/xFvU5VI1x3WOVRUGajFm9+gqT67+MLS6C3cb1pM2cwimut8u4Py9WVi1b7k8LtniDwY3XSXdeZ7B+jZ3zq8zOLtBqddhYv85kskaRXgNeB7bIhzcoJyF31j0rkAW6sx/guRfez1NPPc/Kyjm63Xl8L0QQjM6J4xYij39NpkcZaTpxyeN9N7uhdY4xerd88lHC2iooqv7bQJ4bfC/Fr7TAhyNztgwH22xubNLvz9Dt9fDDwGWoOEHFAnLcsDgCeqXTr2aF6wmCEMKeot3rYKI22o9oTVJXhpiEUWkoxgnlaJXhDYvKx6Tba0TdRbz2AkbFTJKM4WDA9Suvc+3yawzWrqDKCQsdWJmDmY7i/HLE6cUevU5Mpx3TmYnozMXMLswwv7hMe+4MUfcpgs45JOqjvZgg8wiigSPG/vE+71XtJJTsOVerTFM3p+dqSihqqURDPtGUP1D/f793uUmSFbcS25pY1x7juzWt5rFrQg17mud6m6b2uSbuzYDDWwt5vuOR4uZfwwK81KK1k9XU09K3wwRYS0rC1XU68c4DXoXCCaTagGAIyFWAKEEDmSkpda0ncyJza0GXUkV8KgIV0sKn5eWcXow5tRgyHqyzsbWBV5YsBIrZQIgDQbVaxHMrtOeWoRCGo5wsyRkmYxjvUNgSPwwQz8MYha9C+q0Z6EHf75NrSx6GTJRgdiasj0YYk1TWTHHcotIG7UJwI8XKGVcnE/Pa2KBHpmaY0CH1AkxLwJt6jA8D9QzjceFwPMZVPlWlPIwx5HnKzvYa4+u/BPl/5HjjKXOwl8iTa1x982XWrr6LuH2GTmeevEwZjzbR+es4b7WriGfuqOboEbbezelzn8fTz7yfZ555L3Nzp+i0ZhwpFlUlYffwg+CmYidTnDzkhSPGSguIpSgyJpMxWh+fgmm38FzpilnoPCNNSwShSA9nwkiXBVubmywuLbFQzmNtWAUXniyRZgpcAyggX3c5cxMDIw0trYlDS9QKiDodZtpteu2YfGbCaCZhspWQjxPGGxfZvn6NonyRQocUNibTPpO8IM0LynyMsQlxz/DCM4qZ+RZzCx0W5mZYnp9noT9PFLUJgxZ+K8JrR4TtFlG7S9BewIvOINEKKughXkQQasJwEz9oIep4n3dTpb2q018Z61KXaVuVha5IrNhqO4XzReyLYtkNZqubQ01UawJs9i0HeYOb3uP93OFu3X1NqpvBgE3iXRPyZg7k+rg5x/uWfARgcanZPOOSP9VmGnPnJ76kysk0zmk9cBRpTYy77v9hQNnyUZ5QWkuhS6e7NBFQgCoQMXhYAvFQEuIraEnIXMfy3heWec+zSxTpNltrV8jGI6TU2LygSFJS5TG3fJbZJ5/HJAVbq1uY1VXG4wHeYJs8nYAu0UVJnmsgIIr69Psh/bYLGh0JYAxd22Mks6R5iS1LbDmpnHnrOP5SowWchtY5iFvOuyHKVbiJIsqoxYiYwi8xnQ5EJ8cR8SjjkQy+Q1XpoZRgrVDqgjQZQXGJh5dkpAQuUWQ3KLIuw61Z9mqSXufuZvYRdZaZ+c/lmec/l+effx9Li2eYmz1FELZRKnTeYQERi4iqSvBOifFJRqkz9/4VwWIoioKyLIjiqBrYHe3jZ5rlbCtyrEvQdaGPQ3tULBvr1+l0u8zOzRCEPr7fTDtwcpDgMpwPNOxsulRfmxm0U0srG9EdQ3dO04t9OpSEAXTbiiBXjHPNyFhGg5KdzYTBCEYJjEtIresFIh86XZiZg6VFxcoTPU6dOcXyqWUW55aY7c8TeB081cL6AcYPnGbB9xCviw1aWBWCClEqxPcjwjAkjCL8Y5ZS1CR4N3lERRC1cbmGa3K8W41WNZbaGws3e4Prt05Tv1tvqxvLfk9tM9tFnWmiDqG/F47V0BPvSirqCcX63DXxrj3aNUmfeoxvQQlsszfeKbm3EPd65iZ8YJvWitC4SkAdUHg+eMoVu5C6ukyIkOFJTqQKWqKJ/IDAbxH5Hh3PZ6kL51cWePaJ0ygzx2h5htFgyGg4ZmdnwPr6FkmhMd05wqXTRJOMKAd/OHJ9e1li8xyTpuRJRpJkJFlJVkJhPQRxMhOtSUpLWlh0SaVzqxtwXZ2miQhkAXpnodfbi6b2PfB8tHgkVmGVhdBAMG2oh4VHT2Ncl5wVccnIrcWak/ICzoHNaoG7uzMUIku0uu/h/JO/mqef/SxOLZ1nfn6FVqtH4LdRhEiteJHdsMNGttgpTiqs0dV73O4KMYMgYGXlNGtr19lYvX6kbfe2Uokj6D/zbMzFC6/Rn+0TRgG9vpviPInIcb6ZgXHFIWYmwHWwr+Z0epv0Z7ZY7gtn+rAQWyILobG0QugsQjeEbgDtCLYGEKWOJFoPwjZEXWi1QNuSpByR2FlSZclbAbrXwfO7iMRYFNpajOiqbDz4qoUvAdYv8WgjaoIfaFotj043uDcP6SGhNKAqQinigvHEq4pv5K7i3YFzHzUBrUO8DXt+giYJ3u+xhZs9xhYn9mx6eGvi7LMXqFfvczc0Pdl23/paYFiwl+AUHvs8xg+CAiepuF285e1Qx1U+OOqzutGZLgUrgu/7VRC8hxi/SnhiaHmGtmdphYo4CmiHPh1lmG1r2lGJNQlB4DE3N08YtsAfMNGKcpAxKTNG1mNiPEoJ8IOYbrvDXK9PN/IJtaEcjEkGIwY7Y9a3B9zY2GS0PaHMSjJtGGEZWM0gGWPMGBfyVcsoDnKSeDji3wG/t9ehewpEYVGNAZ89qd3tFHfBoaVro9IZ14tSzYiOk4Q7dRcRynuO0+d/LS+8+1fx7LPv5dTSeQLfFQyp66nXNS53A63F1X939zx9Ek4ytC6xuIGbsVX1OFHML8zz9LPPkqUThtvbD/syDw15NubKpUvMzvaJIu+m8u0nETlVSG6VwssOQF0HT1kWY8sLC/DEPMy3Ya4DC32Y6ULHh5bvZjSVD1EC+CCBI8VRD1QHJBa0GBJTMNYFY13SshqNJlDaRcGXJdrkGDJQAaER957zS8TLQcYoP8MPIYo9p/c9JmJsjMsZ7NmqMpzvZnKtqUixqThjk+DWMoTbLU1db41mV9YUMCuctDLYt18tiahJc60JvrMPYi+ipj52M8ivlmoUONlq/To5KDfyFLu4X9M8uEkb0weWSt8DRlmMZwmVJfIhEiGwEGBp+dAJoR0J7VhoB9BSll5s8SWjyMe0wj6tbh/x2yTGJ0gNqj1Bl4rEwM4kJS80xhh85dEOImIlFMMJW9fXWFvd4frVdd66fIPL124wGA8pTEqJrsZYhj3v8N2sUPEaraq0KxUfUL5b9ucGn+KRxOEE3ylX+lGUoKxHEEZErTZDr++ytR9rBrq3iyqSVpYJoydYWnkfz7/wAc6dfY65/hniaBalfJQ0enAFiHX5mz1xKWPA1bafSilONJIkwVqXnUFr1wkaYymKEt8L8IN7rWtfTRU8AlXk1levcuFCh16vdXO6txOM5uvJWCdBuTEGfwJ2FSYdyGZBrUC05CqxdbsK40HuGyQDfAjaivnlFnNLbVQrRAcBNgzACxkNxtjyEuONTbqtDp24g48g1qBtTkmCBCFxZ0SrmyJegfglpR1R6Al5MSHN8mNvArYKhFeqCoivJQbVO1pUpSGuiWnd/Urj72axjP2ShoM0vx573uC6FFedlaLpbS7Zk0PcreuvPcy38yc0yXqdU7n2UE+72ROKAsjdb5dbN2IzGl1m6LDACyyxr4kkJ5Scni/0Y592YAjtmCA3KEkorSZNPEaTFn7cxZcQG4X4bU1nRrNYGFRrhC5Lrly8QJoWTEYTJoMdxts7jK3hM/Y1Ll68xsVrm7x+fZUbw00SM8beMgp8m7AJbK3BeOjuz/eh13d6LRW4h9IYl4rouEbMUxwqDslj7Ll+TQSlLGEY0e70UMEcpow42cR4FuU9T3/2aZZOPc3KylMsLz5BO57DmoAs0YgylSdco8SA56r5KF/h4UaKUhU5OeEOuXc8JskEawyl1hitMcaijSFJUoxx6dvuBBFFqzNLrz+D7/sMBwPGo210eaekSA8Xusy5cfUyTz517pEhxgfBADcs+CnkqauM59K0QX/Bo93rYtuKSZhhck3pKcJOzOKTK5x/YgW/3UYrj3GWsT0cMBmNSLe22TIFs+02s502se8RKLAqJ5cEFbewukB5ghf7eHFAaRKyMiHJE5K0OF5ibKvMFHV2igYxNqaqOqcqjXFNgvcHzdXranLc1Ag3vcM1wa1Jbp1tv06Z1hSz1jrl5ny83zjmfu9xrWeuk5TW9e6b29WfhpulFLVXeooTBoOTIbRwUgrtqvWUCVbtUGQTdFhiQ4Mo7So2BjEd2kQ6R+U5tszJbUoZGnYiRdCKsWEXG2pQPoX1UWFMtz+LxuPG2harq5eZjBOyLCdLEtJ0gtUFV9dX0dZyIxkzMjl3L5VyLxB3j8Waq4VOCRKDGFeqMagSb1sDRV1p59FH3QU0E8g0a/fsn4C6l7uutz+Jkz+HJ6WAyl0hBFFMq9snmHmCLHkKeJmTS463MfpT7GxeYjR4kSsXT9PpnKHbOUWvt0Sr1XV5bwUQR5I9z8MLQqK4RbvVod3u0ut0uXb1AkUxeNg3NMUdMB6OMNagtUbXxFhr0jQlz+/u/bPWkmcpo6FCeR5ZkqAfgc4vTcYMBoNjzb5xFChwKsAeLh1VpiFDoNUlXlgB8WjHYwabO6ytDxmv5+QqgLjH0kqfmYUlgo4QREMm8TaTnTXy4TZ6NGEyHjv5he+qrfkxeF6IrwXPei62QCK0KchyyyTJSZJjtmf1ZrI48muFvZRtpnohNd9SsPemqiUPTWLcfLM1vcT1G9Db9//676aEoklgm9UkajVdLa1IuZX0No/b9Co0M2McdOypB+IEwuCC7evk15WTwWyDuUZZbrOZFmxjUAIBQsfz6PkekYBog1iDYPADYW59h7nVbdq9VaL2W1g8siwjy1KSLGU0nnB9fZO1wQ5JWVBag8FiGgT48MesbRzxr6lTLa+sJRXiyneWuIHBCckbfy9oPvb1o7d/XNysrVOrnISbx6n1fs07P+h3aO530sjxIRX42OuJrQihxFj6zKw8zfrkV2OGPtg3cK+0k+axssAEayeUxTXK4iUmo4A12oj0EemC1GF1rrmIBIhq4QcztOJF+jNLLC2ukEy20HpKjE8yhqMh1lq0Nhit0ZWkoshzsizHmua49yBYymJCWTxoMvzjhdaa8XD4SHuM4eZ4rFKg8KAIfaQ3T7x8Gs/zaQXb6FHO5RvbvHUFNgfrjAufdzFDZ77LbG+Gfq8knwzY9lsMjUe5s04x2qKQkiiEsBsSeh0CIiJpE0gPX/qI6mNMSZ4rJokmTY63SxdxEgoL6IY+uNRu2f11D9IQ1x7ZWgJRe5Jr722NJhGuvbWW22esaDappmzDZ6/md1pt2wzyb8o36vP6jf3rc5h9+560t+gUFSwu9K/ypFJ5T80mLrS23B2jaQsFlklpWCuL3SawOy4rIJ7s0FkdodRFDF6VkcWgrUYbS2ENuTXHyCgEpF8ttZ64BD9ywQ2e5x7OKuYKa+8QbX28aD5mB12Rwj2m9aTQ/ke7OX5tzqk2j1vDNL6r96uP1xzrNieq9hPph43DKQld966qHjV5eKrF8qkzePJBtm/Mkm0/hcmvgNlxI0i2OZlVwA114RBrt+7gQRSK3CMZR2xudLl8cQEosfZOhUKmeNhIJonzKmgXrGG0wRhNUZQURdUedxMNP0awlrIsq0I8jzZGwFXANzBnQXd6+HNLtJZXiDyfGQnpbk7wvFXyTLOzY7h2Y0h/bpvZ+W2wPp1WiOe16HYX8UtIS8jTHK8YYtME61uk3UJZD19ifK+D53XwVBulUjwVoyQ4/kw0qtE89xKr7FbBu+ntV3uFc/aC4Zr64vrN1HzT1aS3cb7deqwHBeo1XUz137WLyWOPGB+Uob/ppVaNdfVnfZz6fCUHv4mnOEHYwGVP7gIxyvoYBtxtxnj/REI9fhsXe6WbTwaqhmqt0zL5EUSR0xnXDdOaihA3aeDDQx0WUE/KNEMDatRKpeZj2Fz2Tx4119t9S/3Y7p+EorE93Ey2a2VW0fhu/zk1N2eUPkocCjEObVnJKLwqp7EgfkC0uMxcr0N65jzjyWeTpiOKYkwy3mRw/SXywadBv8XJlVncCc0oljFleePIziSqVaWOarpj9l/LFPeCNE9d8J02LremcRkqyrKgLPIqo0qA0SdXM3z/eDxYRe2XumBgWQtmZpZwcYn20jLK88itx/LWmDOnrrCzsUMQQpaUrF7fIPRfZ7Q9YHFhltmZDu2oz/xyh1R8UiuYwTVIMoqihFLjaUBCfL+N57fwvBZh0KIVduhEHcKojkI7HtT6YltWv+T+CPj675oE1wVGM/be0wfxjHqf2tnn6iHsEeOmy6eZhaLmAzVxrt9k9dxrWC37i5s2t5V965r30STG+/ef4gRC44jxBIVP17YoSUm4/VuqSoCGcJhp444INgObsjvqC1quTKfyqmdLO32T0Q1y/HCxP0SgjomtlzqZjHBrQeyDiPD+z6Z3uf6N60e77irqpT5XHVbQlGg0E+j4gC9u8QREhImBkTkcpfjdcCjEOB1uuQpwvo94VSCaUvieR6s9w2xvtrKwxaBJiwk3rj7P1QsvsPn6j4L+DCehAZ1YSHPCAQ5+K9ytuTT3OcxtHy0UZQHWYozFWoOtcm5rU6KNRpTghxF58njlhBKliOIYXT6Kg9CDMQE2CxgbRSo+pQrwUJQlWC3EUczszAC/06LTXyCMumgN6SRlFIwIBMJZRTsOiaM20ulTFAOKIsAojRaLFoNRBpStgmuFUIV0wjbduEsrDHCpno4HVRjHrte4XtyX3Opyyzm4sm0TNSmu5zPrXMe1+6d+S+wP4mtqiOvz16TY52Y3VPNtCntZJpouo2b6tuZ5mnrj/frpKU4oLC0K5tAoLGNgi4NJ7xyKeQIKNJuU7DS+q8dFd0KbDhMmHP27yskund9SAS3QHujI5Ur0q5GlhyPFCvYq7Tw81CS0+Sg2ZRHN8Wg9noabu5Omp3f/o7w/w2LdLTRTptexED4QKFcFwlqwCGKtI8yBRxSFtFoRrXZIFIZ4fgiiSNOC9Z0hsr7x6BDjGxdfd2TYCxDfHdKKIopi2nGHuN0iimP8KEQFAe2wy8qpJ2lHMS8nY3YuZ2Df5HEiIocJq9P9a+7nKEe07aMFJe4hVdjdokWo+iE3iALP9xA/wOqimjKD3QTAlr3560cIrXaXxaUltjY2H/alHBoMMBhbrl7dZGFpnV5vDqs1F9+4yKULV9gapEgUcur805x//gWWFheYnWkTSEkx2SEbbzPIdyg8Q6AzfApU6COdDjYsMC2hDHNKNaIwO/jlCK9I8I2h7bXoR116UfRQ7l3EBdyVZi8QT7yGk8qyFx1zt0px+9OiNSUTB82f1oS5fgPufyPW2uKgOmbtggobxwpwbsL6Ddr0INfXVLOiYN81PnyuMcUdUOtVF4BTvqUVCsYKVzPDm+bWKfwFL6DtRWzlY/bP00V4aPRtmUFb2jzRf4oLgzdJ7XHEfQzZa4AtKEsYGRfwYATiyHmQsa4anv9w06fsel+51SMr+7Zrut+a0ohmNsb93uJ6LFs/2oFA5EEcKqLIxwtCvDDCC0P8MMLzQ5QfUlrFINEMk5IkTcmLnM7sDIvLp1g+tcjy8jz9bgeLkKYFly9fY+PV12F9i+PgiYdCjK9fehNRHsoPEM/HWItBaLU69Dp9uv0Zuv0Z2r0eUadFGMfEMxGz/Rl0lvNqaRhc/wmwFw7jcqaY4rYIQh9rbCWjcEU+rBGM0Sit8H2PMApAWlgbAy7rilJq17tcFAVlnjvifJ/wlIe17vxHDc8POH3uHAsLC4wGj48G3gLbE7h8cUC3ewXfC8nSjNdfeZNrV7eYpCVe1KK3sMITz72HM6eXmZtpUSbbbFx9k8nmmCwZosuUdpWfXyIPz+tgogLpABEYr6BkQqnH+GUC2uJbCPGJlXegJH1/BPeho3pLmYpo1OEdFm4OsmtmcrgTDsowUUsr9i9NPXET9bxoAETgBaBznLe6FjrWYseaPPvcfOzazVS7qPa9pQVQdsqNTyrmgbZAJLDow0rPo9d2JdTntyYkmxnX2WtmXaAtgraaMeaWAsz2Lr90qEJ63R7BOCItj4MYZzhyrKv/V4Q4qSJiyxLi0qVt8zynezpiND27+1GPj5tEt5lYZn/miOakUDPcQOEe4+ZkUHO8TLUu8oBQ4cURXitG4jZe3MVvtQlbHVQQgReRF5B6KUOTMCwGZMUETYcomKXfXoLZU6h+D601hRqzbbfYyMAc05N/KMR4uH0NxEPElZ2yuB56EkQMo026/RlmFxaZLRaYVYuEUYASD/FiFhdPUz7/AV5Nd5hsDdgr3TzFFIePfreHxbrAO+uC77TRBFlAELglDEOKosAYs5vFQURwFfNcEFuWZWRphs4zXDWFtwd9jGl85haXOHfuHO12m6P2dAfcOuN+lNgu4cLFksn4Im++tspoormymjFMnLNmfjHn7DBlnBm0BITtPp1OTBgK6UKPYrRBOd7CZkOyfIzvKcJ2C6/Vgp6P15vFj7vgRRgsRZmQJAnb22tsbd5gMJjcQor3B6scNqT6pyrA6Thy09NaE+K6iR0kTzgIzbdlMxsE3OwRro9VB/U135Y1Md7/tq7dVPVxmpKLpjzC7v1f2JuJVlLtXoIppnUTTiJ84LnlkPl2QOx79NsRs7027Sgk8D36W2Nyu8rMTkZi9oouXypTxtycza/G+C7xRzt6hxevfpLkWLzF4HzhMXvapARQ7h2QJJA64kfchk6nCso7WtztUai7gwc5dt0N1GELt0UJlAaZJECCsEWd1UtEkCp62Ficc8ruUfad7QtcuvgJftkPCcMAz/OreCBNmqVVrYDjefAP6VdbA+tjbQgmpB5DaK1IUo8sGVCWGUoZ2p0W9LtY4yGi6M8sEAc+eZryyse30NnP4cT7U0xx+Oj3+24kXBFjrTVlWVKEOXmeE8UxRZ5TFIXLdWwMWOu8xdVSliVhGBKGIUkakKcptriTiPPhIYxinnr6aVZOrxBFMcYcHWX1gXMB5IXzqQw5eoto4FoCq5cNcmVCaW8O5c22Cq6u7rC2MWDxVM6y9en2O3T6MSafY7Jzg/H2dcbrV0i3xihfCFshYTdA+h28zjwqnkF5bSyKUqeMxpusbVxide0yO4OD9cXNTGOHiZqEq4o8qspL7FWfuukpruc/a7nCnSre1mS1jimucyzVN9Nk+7UrqRkpVXt/G9II2zRAM4Rd9h2vdgI1gvqUAk85ZxzipmgDceWwS3NfY9EpjhiBLzz51ArL833aUUCr5dOKfSJfESjF7NIsUafD6SsDNteGbAwmvGU0V7n/8HuLZXJsmaAEqKaRgL1R6Aj3cI3B+lCGMJ51uqaHJLV62Lgpy0jdadmmD/rgvawpKPKCYv/UwTHjkIhxnfA6BqnrhlL1XiXW5JTFhDwbUxYJWudQWsS4uUDxfLr9WdqzzzBcfQPs64dzWVNMsQ9RHGNxEUumURq6DEOK0qVsK8uCsix3i4BYU3uYqwwWuqQsyl0inaYpyXhMkaZYc3KC25Tns7i8wvzCAmEYVfmbj+76LC4G5e3M4B8G6ujqg044TAyf+MRrEIQMx0MmkxFnzywyNxvSiRVee4auB9gMzBjJNsizAdaPidodlLQJg1m8eA7xY7TVbI3WuXD5VV6/8Drbw1t9MU1572HDli7gfTdiuyEWLGtPbx1aXnuRm+HhB/0wTfJcb1enRqsJc1P6AHsa4GY8cHPRVfq4Zgq5ZkBfMwKoGdZeHddWc7Wq8ozXNaRqPfUUJw95aXnl8g22xjv0uxEzMy1mZtv0ex36cRcviuiqENvqIe1N7JUNysGQsNSsmz2BwslFrQcK2Et41pwXqkejhWuoYwOT4KADTXHCcUjEeA7Eh7CNF8UgygUS5wkmH6N8hVBidIrWGbrMsOgqq0mBLjL8IGRh6TTp4AmK5AIn/RGZ4tFEEDY7qorwGouJQspS75Jht5SuEIjRleTCVERaU1Z5j+M8J8sy4igiSRLS0ZgiT7EPObG7iOLsufM8+fTT9HpdRAStS8ry6J4rDWybk5dyaWMz5SM/9wk2NjcZDAe88MJTPHF+idMrsyzMtujOx4hNUOUOyfo6yeYGRRnjteaJejFBMEvYWkArIc8TNkcbvHrxFV598wbJMXs2aimB+BD4TseLgC4rP0SdiaLOjdT0BJt9nzWaIeo1ma2TstTkOGZPH1znaNqfbo3G/5vkusb++rHNBKXNKnzN4D7Zk4lY23grTMnxiYMGXryW0VrN6MZwakVx5lyH094pTLdFr93B82PiqE2MpY3m6eUuL4hiczTiwsaAy+OSbXsS597g5sy7++uT16O++iHUQAr24QbfTXF/OCRiHIPyUGFEEMeIuDB/LZrCZNVo32DrEolGY4xgxOlHtNaIUrTaLfywR5HUveUUUxwuPG+vo7I4AqmUxRhBROF5Cq0VWnsY7e1WxqsJsdEepdIocSkJndCzJtgGW2pMqSkfch5kz/dYWFyk3+8TBIGb2rb2yAn7UXlKHxTJuOTatVVurK6xsDTL3FybuayLVT5+HBBGMXkUkSooi9zp4EoD1kNJiOfHWNFYZcmKlMFwh8G4PP46MFXtAKHS3Vbk0TaD1Gri2ySgzZDzeqlRb7PvPLvfNcPZFTd7og86Fo19mtvv1xw3dcXNbBoNfXLz0I9eLph3HkoLwxKSEfgDQztJ6BcZGYZO4CES4Gkfvx0RdCNi1Wau3aY9alNQkKZDxuVekcNmGuuHj2ajrRt1M5StvtKmQH86gnsUIY96idgppphiiimmmGKKKaY4DEz9/FNMMcUUU0wxxRRTTMGUGE8xxRRTTDHFFFNMMQUwJcZTTDHFFFNMMcUUU0wBTInxFFNMMcUUU0wxxRRTAFNiPMUUU0wxxRRTTDHFFMAREGMR+ZCIPLapLkTkfxWRHxeRDRGxIvKNx3Tex9auIvJ5IvIPReQzIjIRkYsi8v0i8vQxnPtxtuuTIvJ/ichbIpKIyLqI/JSIfMURn/extel+iMifr/qBnz3i8zzWNq1seNDywSM+72NtVwAReY+I/Jvq+U9E5GUR+ZNHeL7H1qb1vd1mSY/j3Ed5jocJEXlCRP5Z9f5PROQVEfkOEekc+7Ucdro2ETkHnLPWfuRQD3xCICJD4GPAG8DXA3/AWvu9x3Dex9auIvI3gC8Evh94ETgLfBuwDHzQWnvpCM/9ONv1s4D/GfgwcBnoA38Y+G3A77bW/rsjOu9ja9MmROQZ4BO4GvavWmt/3RGe67G2afXC/17ge/Z99Qlr7eQIz/u42/XzgP+E6wP+KbADPA90rbV/84jO+djatL63fas7wI8CP2it/e+O+tyPqV07wK/gKqZ8CLgI/GrgLwP/wVr7e4/1eqZ5jN8eRERZa42IPAe8yjER48cZIrJkrV3bt+5J4E3gO6y1f+nhXNnjBxHxcXb9mLX2tz/s63mUISI/BlwA3gX4R0mMH3dUxPg7rbV/8WFfy+MCEVHAp4CXrbW/62Ffz+MKEfk64J8DX2mt/aGHfT2PIkTkNwM/BvwWa+2PN9b/NeCbgf5RDpD341ikFNU0w3eIyJ+ppnUnIvJDIrJcLf9aRHZE5JKI/Ll9+y6JyPdUbvVJtc0PiMjZA87931fT8amIfFJEfoeIfFhEPnzAMf+BiFwRkaza54/cy/3Zh1Tr93G2635SXK17C1jDeY+PDI+zXQ+CtbbEeY3K+9n/XvBOsKmI/D7gVwHf8raMc594J9j0YeAxt+uXAu8BjsQzfDs85jY9CN8A3MARuyPDY27XsPoc7Fu/za2F548erkzs4S04N7jdt84CbwE/hJvG/YM4A/wo8F+Avwj8Rtw0mgW+orHvu4C/Bfxu4NcDXwN8FOepiRvb/SZcHcZ/D3wFrrG+AVwFPtzYrg+8jHPV/+HqvH8dV7/xT7yN+3yuutZvPGwbvpPt2jjee6pr/uapXR/MrriOxQdWgL+Eq7j630xten82BeZwL8I/UP39YeBnp+30gWxqgQ0gAya46f8vPkqbPu52xT3rtjrXR3DV2leBvw20pjZ98HcVcL7a73+bttUHaqsx8ArwU8B7gS7wZcA14O8dtW1vuZ5j/PFewU031uv+ZrX+LzbW+dWD+3/c4fhe1Rgt8Lsa638ON20kjXWfW23X/PG+DUiB5/cd9x8B681rvMt9nhRi/FjZtXG9P1Vd89zUrg9mV+BvVMe1wBD46qlN79+mwD8GfqY+Dw+XGD8uNv0XwO8Fvhj4WuDjOCL3pVO73p9dgX9QHW8T+HacB/mbcQOPH5za9FDeVd9SHf8DR9lO3wl2xcUU/Qx77ypb7auO2rb7l+NM1/YT1k3j1vhM9bk7/VB9/xrux9mFiPxxEfm4iIxwU8AXq6/eVX3vAZ8H/FtbWbg63n/F6Smb+HLgF4A3RcSvl+o6FnCjlUcJj6Ndvxv4tcDXWmu33sZ+h4nHya7/Oy6Q4bcDPwL8gIh85T3sd9h45G0qIl+MC7r9483zPEQ88jatjvl11tp/Za39GWvt9wG/DueR+o67WuBo8DjYtX6/f5+19i9Zaz9srf0buICmrxKR99zZBIeOx8Gm+/H1wK9Yaz/xNvY5bDzydhWRGPhXOHL8dcCXAH8WN1j+u3e1wCHDP8Zz7Sc4+R3Wx/UfIvIncFM/fxNnqC3cA/+RxnaLuGjG1QPOe2Pf38s4b29xm+tcuO0dnEw8VnYVJ7b/I8A32IYI/yHgsbGrtfYyLisFwP9T6cL+BvD/3G3fQ8bjYNPvAf4JcFlEZqt1PuBVfyfW2uwO+x82Hgeb3gJr7VBEfgj4prez3yHicbDrRvX5E/vW/zjw14DPAT59h/0PG4+DTXchIp8PvBv4U/ey/RHicbDrN+FmNJ6z1r5erftpEdkB/qGI/ANr7cfvsP+h4jiJ8f3ia4D/aK39M/UKuTW/7Trux1g+YP9T7I2CwHUWq8CfvM35Xr7/S32kcOLsKiLfCvw5nB7pX9xt+xOKE2fXA/BLPPzO/O3gJNn0PdXyxw74bgv40zgP/UnHSbLpnXASvPJvByfJri/e5VofSiD5feAk2bSJb6jO+QP3uP1Jw0my6/uBrQYprvGL1ed7cPKqY8GjQIzb3Bqp+Aeaf1hrtYj8EvC7ReRDtctfRD4XeJqbf7wfBf4EcNFae9Ao6J2CE2VXEfl/4aZNv9Va+91vd/8ThBNl1/0Ql8Lp1wH7O6CTjJNk099wwLr/HafP+xO46cpHASfJprdARPrAV7L3YnxUcJLs+iO4YMbfAvzfjfVfXn3+0ts83sPCSbIp1XFDHLH8EXtAVqVHBCfJrteBORF5zlrb7EN/TfV55W0e74HwKBDjHwX+nIj8BVwn+WXA7zlgu/8FN0X0gyLyD3FTAB/CGbw5Mv4unG7lZ0Tku3CjmA5uSuSLrbW/804XIyJfAizhIvwBPq/S52Ct/T/v5wYfEk6MXUXka3Dk4keB/yQiX9D4emCtfel+bvAh4STZ9UPAPC46+TquzX4T8PnA77vvOzx+nBibWms/vH+diGzjAktu+e4E48TYVES+Gadp/M84XfGTuCCxFeD33/8tPhScGLtaazdE5K8C3yYiA1ymj8/DZav4Z/sIyEnGibFpA1+J61v/2f3c0AnBSbLr9+KKUf2wiHwnjnB/Hi6g77/i3mHHhkeBGH87MIubooxx2Qp+Cy5dyC6stT8hIr8f9yP+IM5z82dwncBOY7sdEfm11fo/h8uTu437Ef/tPVzPX8YJw2v8j9UCx51r78Fwkuz65TjbfTl73owaP4XTHj0qOEl2/WWcZOJrgBlcR/ZxXCd1rB3NA+Ik2fRxwUmy6cvA76qWGZwX678A32StfdQ8xifJrvX1DIH/ATfYuIZLofVX7vP+HgZOmk3BySg2Of44jcPEibGrtfZC5RD7EG7meBG4BPxDXOGfY5X9PNaV78SVUHwNZ9hHqSM40Zja9WgwtevhY2rTw8fUpkeDqV0PH1ObHg0ed7s+NsRYRFq46MqfxAnGnwH+3ziB+GdZa689xMt7ZDG169FgatfDx9Smh4+pTY8GU7sePqY2PRq8E+36KEgp7hUap0n7blxakDEuWfR/+zj+cMeIqV2PBlO7Hj6mNj18TG16NJja9fAxtenR4B1n18fGYzzFFFNMMcUUU0wxxRQPguOsfDfFFFNMMcUUU0wxxRQnFo8MMRaRD1cVu6aY4qFBRL5KRP7nh3TuD4mIFVdmc4r7QGXDL3vY1/GwcC9tSES+tNrmSx/0PPe7/0nF9Pl/+Gi0z994D9vaKm3lY43KJh+q8tSfWFS/x13LvO/ne4fRJ70dnGgjTjHFCcRX4fItTvFo4n/B5euc4vb4ZeALq88pbsZXMX3+HyV8IfCPH/ZFHAO+FNe3PS6c7n+oloeCd/TI804Qkchamz3s6zhOvBPv+agwteUUjyqstQPgI3fbbtrGb4+pbU4GrLV3bcdTnDw87KJeJ3J0ISJfIyKfEZFMRF4Ukd91wDZLIvIPRORKtd1nROSPHLDd0yLy/SKyVm33sf3Ha0xRvU9EfkxcJbt/fYS3+NBxu3sWkdMi8s9FZL2y1ydE5GsP2P83isiviEgqIq+JyB8Ske8VkQvHfzfHAxH5Xlxi97OV7ayIXGhM83y1iPwjEVkDbtT7HGSTg6RBVZv+eyJyqbL9JRH5FyIS3eGavlxERiLy3Sd9Gu1BISKfLSI/KCIbIpKIyMsi8i3Vd79ZRH5YRK6JyEREPiUif0ZEvMb+9dT+tzZ+vw89jHs5AXiPiPznylbXROTb6/Zz0LRl1V5/VkR+e/XcZ1QeHRH5HBH5maovuCIi38ajVezonjB9/o8PIvJC9ayvVu3qooj8G7lZRtKu7nu9Wr5PRGb3HeemZ7zx3nv/7dr/cUBEnqt+2zervuwNEfn7IjK3b7sDJaRVu/ve6v8fwnmLAYq6bTa2ves7XUS+sdrv14rIvxaRoYjcaPSvX14992MR+ai4ktDN/UVE/nTVJ+eVTb9bXGn3Ay5fvlVELlf3/tMi8sF7ue8DDvTVIvKR6nfcrtrIE3fb7244cR5jcbqhHwB+CFddZQn4W0CAq6BCZeyfBVq4Silv4iq2/H1xI/W/U213HvgFYBVX3WUNV7Lw34rIV1lr/8O+0/9fwD8B/j/cXOrwcUbznlu46jdzwF/AVZ75WuBfiEjbWvsPAUTkvbjf5xdxVdVCXOnGGR5vu/0VXHv81cDvqNZluPsG+DvAjwBfh6skdM+oOsSfw5UZ/Q7gE8Ay8Dtx9r3F+yQiX4+bJvx2a+1ddVuPMkTk84EP45LK/2ngMvA88IFqk2eA/4j7DVJcOdEP4X6vP19t84XAz+PKj35Pte7yUV/7CcW/B/4p8Fdxfee34Z7dD91hnxeAv417Dt4ANkVkEVdq+DqONGbAnwUe+OV0AjF9/o8PPwRsAX8clzv3LPAV3OzM+1u4ynO/D1dS/P+LSy32Dfdw/H/P22//h4kzuPfrn8Ld5zO4d+4P4/qpt4N/DJwDvgn4dTgbACAiHe7hnd7APwP+Oa7i3H8L/K/VYOMrgO8ERjg7/3sRedZam1f7fSfwLcDfBf5v4L245+WzReRL9lWu+3pcyef/CYhwFfj+o4g8b63dvNebFpE/Bvx94P+ojtHD/X4/JSIfsNYO7/VYt8Bae6IWXCnQlwDVWPcFgAU+XP39bbiX3/P79v1HuIfIr/7+JzgyvLBvu58APtb4+0PV8f/kw77/Y7TzLfeMa6gW+NJ92/4kbnDhVX//QGXXdmOb09VvcuFh39sR2+17gcv71n1pZbcfvM32t9gER/I+3Pj723Ed2ufcw2/m4xKsF8Afetg2OSa7/zSuU2/fw7ZS2ehbcS+dZl9ige942PfzEO1Yt6E/v2/9P8KVDp5ttOcvbXz/YRxx+OC+/b4TyIHzjXWdqh+2D/t+j8B+0+f/6G28WN3n77jN97W9/9m+9d9dvYOksc4CHzrAhrdt/w/pnn0cqbXNNrC/nTTWXwC+96C2sW+7e32nf2O13V/ad02rVTt7urH+d1Tbfkn19zxu4Pa9+87xtft/x+rvdaDTWPdUdY6/crv7Zl+fBHRx5aj/6b5zPo3rj/7Ug/weJ2rqRdy0568G/k/bGGFYpxO60Nj0y3Ge4DdFxK8X4MdwCajf29juh4GdA7b77APc/D94FPd1wtG8518PXLHWfnjfNt+H85TUdv0C4IettZN6A+sSff/cEV7no4AHaT+/GfiotfZX7mHb7wL+MvB7rLWPfWCJiLSBLwK+v9nm9m1zWkS+R0TewnWMBc7zNovzvE1xM/ZLxf4l7mXzvjvsc8Fa+7F9674Q+Ii19lK9wlo7xnmN3mmYPv+Hgw3cjMRfE5E/LCLP32a7H9r39ydxHshT93CO+2n/hwYRCUXkL4iTgCa4/upnqq/fdYinutd3eo0fqf9jrS1xM3SvWGvfbGzzmerzfPX5BbhZje/bd6x/CZTAl+xb/8NVH1Gf5wIupuHteMq/EOgD37+P212qru/Xv41j3YITRYxxI8WASp+1D811y7gbL/Yt/6b6fqGx3dcfsN1f37ddjceyistd0LzneQ62wfXG9+C8w6sHbHfQ7/ZOwoO0nwXufVr/vwc+hRv1vxMwh+urDrRPpQ38D8BX4sjwl+EG2N9ZbfK2prXfIdj/rNZ/n73DPge179MHHOug478TMH3+DwHWuf5+E/BLOKnDK5UG94/v23T/tHstN7mX5/1+2v9h4q/ivLzfB/w24POBr66+O8z+6l7f6TW29v2d32Yd7F1nfYybzlMR640DznG7/uLt2L52dvwkt/K793Mrt3tbOGka43XcjR004jsFvFX9fwNHzP7kbY7zcmO7n8HpZw/C1X1/P3Z5N+8BzXve5ODR6krje3APwEFeuHsZqT/OOKj9pLjR9H4s4NpnjVpHdy/4b4AfB35ERL7CWjt6W1f56GELN41/O/s8i9MUf521dtdrISK//Riu7VHFKZxXrvk3wBVu/144qH1f4/b99TsN0+f/kGCtfQP4ehER4LNxkoC/Jy6QMTmEU9yp/R8Hvgb457ahDReR7gHbpTjP6H7sJ5u3w72+0x8E9TFWgBfrlZUHd+GAc9yuv3g7tq+fnW9snrOB+9cXc8I8xtZaDXwU+D3NCFER+TU4HUqNHwXeDVy01v7SAcuwsd0HgBdvs900nc7N+CngnIh80b71vw83EKlTqHwE+IpqihtwU9m46e7HHRkuSPFe8RZwSkSW6hUi8iy3dlY/Dny+iHz2PRzzRZzm6nncy/GgDvWxQSWf+Fnga0XkINvX7bCoV4hIAPz+A7bNeXu/3+OK/27f31+DC6z55Ns8zs8DX1AFOgO7AT+P66Bk+vwfI6zDx9jLHX1YUofDav/3izaN/qrCHzhgu7eAF0Rkd3AlIr8eF2jWRM1l9rfNe32nPwg+gutXv2bf+t+LG2R/eN/6r6j6CABE5CmcHOPn38Y5fw5Hfp+7Dbd7+W4HuBNOmscYXNqRH8dFPX4PTgfzl9lz/YPTWP1e4GdE5LtwHuIOjix/sbX2d1bb/SVc5oSfFpHvxumU53AP1zPW2j949LfzSOF7cV74fyci34qb2vv9uGmtP1oNXMBNV/8e4MdE5G/gdF3fhpsOeZyzUoDrSOarab1fwo3o74R/g4vO/T4R+Zs4udC34DxETXwXrrP6SXGVgT5Zbfs7gT9m90XYWms/LS6d1n/G/Q5fvn+bxwzfjOvkf15E/jdc23wG+CAue81bwHeKiMa9cP70bY7zEvDbRORHcZ7oq9ba/TNH7wT84cr58FFcVP4fwgUp7Tgn3T3ju3Bp235cXNqoOivFYXj1TiKmz/8RQ0Q+gMs48a9wGlcP5xkscRlQ9pPC+8Ft2/8hHPte8KPAN4jIJ3H3+NXArz1gu38J/BHgn4pLz/Y0bpCw/zprgvtnRORHAG2t/SXu/Z1+37DWblZ98reIyBgX1/UeHE/4WW7Vgie4/uKv47jDXwYGuGfgXs85EJE/C/zdatD5IzibnMVpmj9srf2BB7mpE7fgNFQv4zrZF4Hfxa1RinOVId/EjVZWcbKJP7XvWOdw6UyuVNtdw2Wl+NrGNh/igIjOx3m53T3jNIP/AtdxZ7i0QV97wP6/CfhYtc0bwB/FBZ/8ysO+tyO2Wwf4/+FIlcUNtr60+v9vvM0+X4XTBCbAx3GBNje152q7ZVyanGtVW72ES58T3e43w3mNLuNG2/2HbZ8jtv3n4IK6titbfgb4c9V3H8R1wpPKHt+Oe9lZ4KnGMb4I+K84QnNTxPo7YWm0offhSFWCczr8FarsHdw+K8XP3uaYv6rqe9Oqn/023MvOPuz7PQL7TZ//o7fxcnXfr1TP8yZuUPxb9rXP37hvv2884Hm/XVaK27b/Y7rHRRzp3aqW78fFRVjgG/dt+0eBV6tr/Tngc7k1K4WHS5W2inNO2cZ3d32nN2z33L71tzz3uNl7SyMjCi4T0J/G8baaZ/3d/W2y2u87canjLuP6jJ/h1mw3Nz0fHNAnVeu/ovodB1VbeRWXhu+9D/L7SHXwKaZ4IFTTea8BP2St/aaHfT1TTDHFFFNM0YTsFcMIrAsOm2KKW3ASpRRTPAIQkb+DG71exSUr/5M4L/7fepjXNcUUU0wxxRRTTHG/mBLjKe4XMS7bxync1Mkv4qa2PvFQr2qKKaaYYoopppjiPjGVUkwxxRRTTDHFFFNMMQUnLF3bFFNMMcUUU0wxxRRTPCxMifEUU0wxxRRTTDHFFFMwJcZTTDHFFFNMMcUUU0wBHFLwnYjcVqjs+TG+GGyZU9jjq7kswJNRh88/d44nZ7qcbsd0bEakJ0SzC0Tn30XruffRff/n0Hnu3fhBhBIhmQwZDbcpixylFMZYsjQjLwqsBbHgeUJZlvz9v/u3+ckf+Xd3vRZr7dvKmA97No1bT/Gbfus3k+VtbqylXL60ydbqGqYc4NIaZrh6BhpXeTRgb7xT4NIEJo1tbWOh2jbAFeKZZ6/6pMGli92g3TrFl33x5/LrPu/dnFs5xdLsPJ5S7hi2xFqNtQXG5BibYNUQK2OssRgjFIVQlFCWmlLnlDqjKFMKnVCUGXmZUpQFRVGQZzlZnpJlOUWeU+QZpigxZUmRleRZidGG//Dzv/C2bdq060GIgxZnZnp46SatsqQvMB8Ly6fnmV1Y5MKrb/KJ6zkCPNUR5s7MkrUXSYM+wyxjmKSkWUqWZaRpRpJkaG3x/RjPD1HWIlgCBUHgEUUxcauD54XkhaEoDabS/PueTxD4RK027V4Xg7C5vcVoOCIKQ+I4Rmcl6TjBpBrJwBOPqB0joc8wGzFIhmRZSmkzFJZIPERKEjMiwxz4LD5IW62hULxr6Ryf/Z5F3vVcB4vP5WuG62uazW3NaFQgeozoMTujMev5iPFu4SaAiMXZF/jyr/gdfPlXfiXnnnySmdk5gjDEWIsBDNa1Ymux1mCMwRiNLgymsFCCIAiAWLTVDLc22bhxg4/94s/zEz/873np4i9ibik+dfi4H5vCndvqFIfTVt2b4otx1WxTXD8quH5RqsW4v+UpeO49RO9Z5slTMcPVTa795C/C+CLx+z+fr/66LyKKY65upAx2huTjLQIKTs32iMMWr1/Lef16gReEzPQ6DEYpG5fWsXnO3BOznF/uYHVBWeSEKqUdZHS7LWbml1BRj83EYzsRkjRlNB5x9a2r5G9cJOrCr3rfIu97don24ilSv8PPvnSDl16+wan5iC96pk/fy1jbXOOTr6/y1i+twc4Grh7CiL33h8XaXz4Emx4e3C+hEBRKKZR4WECbEmNdL2CxqOppr554wGIwt+nlHgwdIn7rB76S3/AVv4UP/poPcv6Zc/iBwvcUnheytr7J7/2DX8PHX/pFdyXT5/9IcL92vRcceVYKXaY8cGmV+4AFVrMxr1+/QmBO0Q6WEd8DL0SyHHXtIp7nky0sEMwvoeYWCDtdoriFLgtyz0eXJcaUeL5PiGCNwVigehFrffTtNks3+MgvfpQs6zIZaMo8A5viigAZXNcRVp9R4//giLCpPkscOS7Z6/wF1wTK6v/jxnqNy5ddIGiUtSixBJ4lDDWeaBw9KbGmxNoCbQqsLbBSYNEYBcaI69RE4SlBleAKa1nXdRl3LE2JUDSW3C22qEh3gTEaY8pd8njYEFEoQjAevlcy3ws4P9/h9JklZmf7jC97RJU189SiRwO6nTaz3RkWu20yHTOYpOyMJ2xs4whyocFoxFo8z8f3FEoJKKG0iizXiMoxGowBpRSe5+H7Pr7vY61hkkwotCFNUkpdEkqIUopSWTQlRhV4ocELAuLZFn4ckA49bGnIs4yCCYLGWB+x+rak+LBgMOyMd0jSEM9XxO0uS6aN9oSkGDMe54SeIfI0qi1YG7pB0W7RxJK8SJiMR4yHQ9JJQrvdQ5SPMVTk2FZ34WhyWeYUeUGe5KSTHFNaQj/AU4rSFBRlSjIakecTPE/R68wQqxkmZn8BsineedDVYtglwbt9K7i3iYAK8Ja6LK30mJ0JEAzXzz2DvdIjXFigTHJCgV6oMbFlM4EkN2xPCuJcocSy2PNod2Jm59oksxFhKJR5wcp8zOJMhKdcn7i9s8W1jSHtFFSroB8aWn6ACRXZxJJnJd1+hH33Mqe6mrOLPr2WptMRIs+nHRiUnmDTEeVoBB3otQwzsyESKtzwUuP6/vpdcrJ4WIjHM90nWFk4w9zSIv3ZWbr9Hkp5DMcjhuMRk3RClqV0ohaz3T4KyNOM4XDI+uYa1zavcSW5TnmIBVkLNNujHdY31rixeoOoF9FuRURxSBjEpGmK1tMUyY8yHut0bRPgzfGI9lZIrzuDdELED/DyApVcxwPyM+dIl88QRiGq1yUIQ+JWG0FIbYIyBt/z8ZTCaIM2Bl1ajDFYc/TVj62dsHbl42D7uOI2fmPx9i1Nj7HFdXjgOsACR4yLalHVMYJqW1VZTBr7TIAMTIHoEmU1vlcQBQVKmV1PsbUlxhaIKTA2w5JjbVH1tQqrPDwP59kTg5YSRYHYAmyONRlGZ+gyR+sMU7pFlzllmVEUznucZyVFpo9sQCLioVSASEAYwtLSIk+dXWRlaYZuy+N6pGg7i5BqSAaadm9Ab6aHF8XghXT9kNizmLJgMkkpjUWJI8Oe7xEEAa4SKRhRFAZEG7DOw+kpwfc8fN/DVx7aGtI0IcsLsjxDa4O1YEWqYUmJJUf5FomFoCuEbR+/VDAUTPXbWyw5GuF4Xn+5MWhTIJLRiiPm5jsUeGxtw3BHExlNC4MnAhIw3PHId58nQ1kmTEZjRsMRySShKEo832A06CYxFg22IJ2MGY+GjAZjhjtjytzQjmOCICDNxiTZiCJNKJKUJBnh+T6R12NituChDN2nODmoSeH+J6MixOA+lUfQ9um1PKIQep2A+KklkjDGa0Vcv75NpxOAr8iylDzPSNOcIjN4ZOSFhzE+voppRZY48hDbIs8DWoEgGOLIIwx8sjSg1MIkt0zygiAv0KVgMpgMxwy2x3Riy+kzXVa6Bd0ox5oUXeYUJkdnCSQjClKG20JsAohilFfflzlgOTnEWCE833uK3/zrfyvPv/ddnHnyHIunlpmfnyeKQpIkYTKZMB6NSCcJvV6Phbl5yqJkbX2d69evc/HiJV59+WV+/qM/zWuDt8gP6TnXGDZ31rlx7TpLV5eI2hH9fo9ur0Mrdg4Mo6d9yqOMx5oYgxMDvDYYIDdWyRdm8WbahJ6H0hneeAd1+U1Up0cU+MQzs0gYEfgBNtRoXWKNwfOUI3Vau/ewFUSOzIu/DxrsZfZkDj2gxR6hrQlwyR7hrelPgpsuG+C8wTn1lNke6mnDmkwXjWMmwBCjh5TZCF2MENtC+QqlDMYUlZSiAFsgUoLJwGYYm6O1UJZCWSp0IRRFSZ5n5EVClo/IijFpNiHJErI8I8sy8uZnmpFlKWmSkKYpaVKQTAp0eTQduMJJGCRs0Z2Z4YlnXuBdz52lZSfo4Q1aVjODm3zcBiZjmFwYMtx8lU4noNWJ8OI2c1GXYK5DL4rYmRRM0qLyDCuUr7DiYcRzHmrx3DSgceRYKbXrsxcRsBZdaMqyxGiNtYaydHYsyhJjQKOw1qKMR1Z6KO2DCfFo4UmCtm6moG4ZRz2c86XFmaWn6fdDyrygmIwIVMpMCKfnc7wyoxjlFBONZyxxIPR8n0FeVK8ui9YZySRlMknJ0gJdGoyxaGMx1mJFQMBqgy5zNtdXuXzpAqvXV9lc3yZPS7rtDmHgMxxtMRhskGcJOs/ZuLHOpcsXGRXbx2CNKU4+akdD7TVuyijqxYMyJX3rGpejMXbeI7AarxiCGbP1WsHPvarxQh8VhUgL4pZGiWYy0hQjgx6BzRT+Up+FJ+fodnysLsjygvHYYEvh1GLM+aUWS22f8+8/TWosm5nh4rV1trYKdjZzRhsj9M6IyQzoFUXRM4yCkjj0Ka8YBmXMK6+toq9ssdPOebW0XO/45F7I9dUSuzNibzaxdpCcHGKsEM63TvO57/81vP9zP4fzTz/JwtIivZk+7XYL3/eJ4hbdTg8zt4g1hjCKiFstR5bzgn5R8HQYMdPvEyPMf+yjfGrzFbZt/sDXpzFc277EhVdeo9vv46mQ+aVF5uYtvZ5iMikpy6PtVzwgVIKHYIHcGspjlKo+7njsibEBbpiSYmMND2E+btFpKUcFszFcfQsRRTw7T372CcK+T+D5ELjpXWOr0bQ1aAWiLNYoREmtCTgGbFWfAY4Y157hmgwX1f/tvsURW0eKM/YkFEG1hOzJL8LqHPW0WonT3E2wdkSZDdH5EGsjPM8iSjvJREWKqSUUUmB0icFQGqEooMiFMoc8L8jzlCybkOYj0nzEJBmTpGPSLCVNM/I8J89zR4jThDRJGE8SxmP38hhncFR9jgIC30e12vTn5zj/7As8/95nyK6/xvrWJVpGMwusAdcriw5ymFnXLGxq5qOU3sKY7orQ686x2OsxKmB1Y4fNwYgSwYpQikIrH1E+vhfsEmMxjvzVt+eIcSXh0Xs3XZYlWZpRliXagLaK0lik9MgLhZ8rrA7xiQmlhWGCRRNUutz9Q6PDRchi/2meffazmF9IMfoq2WSbMBjTkpylniK0wqYq2SoMnraEHvQDn1EesEOJxqJNTpamJJOULCsota50xAZtAREnxtEFWTJh7cZVXn7pk1x4801Wr6+TJjm9dpcwCNjausHW1nXyInHbZxlpnqJJj9QSUzwq8HD9Ye05bWqMa6eB7yRsl64wLNe4dsqjG1vsIIPhBK7vUOZDSvFBWhBFpCsxfhvyGwnspFBqsFCs9bi+OY+3GBBGJTovya9rGENyvkernOH8u0/zgRfOsj7RfPil67x5YZ3hpQH2+hAmCZQ5pq3YGAeM+op2YBEUk3xAPlbo9RHsjNBhwZWRRYUKbX3sTgn5COcksdW919K5k4FT4SJf8IEv4nO+4PN59rPezcLiIu2WG+SCoEuDQhGGMWE7IAwCDFBYQ2YgNxYvjFg53eP0qWXm223OLSzS+k+Kn13/FMkhDIY39BavX3yZVreHqIjJWJOnHuWiT1EenfMGYF4Jn3tumafPrTDb66GN5vLV67x1bY03tkasTQnyA+OxJ8bguroNnfHa1jqn2m0iOhi/kvSPh5SrV5lcvoBaXKF79inaC8t4QUQYlSDWET1TIuKjPMFa8H0PzzsuYlxP6dWeYMNeYF1WLTk3T4nV5DbHdX59IERJmyjq0G536XQ6+EGAtZbRKGFjYw1jBriXRITzTCf4IlgzIU83MVqB5Iiqjm2dbMLYAkOJsRotCo1g8NBWOf2sFkyZo4uUskgo8oQ8m5BnY7J0RDKZMJkkTJIJySRhkiQkk5xJUjKawDiHxBxt9y3K0m57tOI+S2eXWTx3mrnTK+wMr6KMIRChr6BrnEUznD8+x+mDbQLZtZLx6Dpxb4ug3yHwY/qmRAWa1GhyayiUpRCFF4SEcYTnBdjSYkpNUToSKMoNvjwUvh8QWBDloY1BKUEbQ1lqtDaUuZMYKGMo4xKjcrwiIzY5Vmk8LVg8VCVAkEZrOUx4apZzK5/NB9//OXzgA++hE+5g8wtkw9fY3nqZdLyDMZqyhNBX9HseEzGMS03LM8wrgzJuKOeJwZeUshiT5SPSbIIXeFhrMRa0sZRlwXi0zc7mdV75zEt84lc+ypuvv8b2zhZlWRB4MZ4o0mKb0iRHcMfHiZrA3AsOei3ead+3u/2d9t9/rIYU4bbf3es13ct3bxd1nEUtUzuIGEfVoiBN4OKYnesFO6qAMocyA131xdYHW0KSYy5n5IFAkoOtA58FSgXXFXpLSCQHbSBz5Dvb0FxpZ4Tk7OwMubJT8qkXNxhf3IHBGPSEXefFSLBveqQ+pFJdtw5Ae2Aqh0lSYlJdPfUKrME9/bDnLa4dJg+fTrWIeHb5eZ56/gVml5YQ5ZOmBWUxxBhDUZZgLYHvE/gBnudkaZM0ZTAaMxiNGA6HKIFgYY7ZdpszZ88Qi7B1fZXVX1jj09k1HtRvnGN4PX2L4adSrq6v8dTTL/DUs8/zzDPPErfblMXRvanGxrK5M2S+ExGKC0CejEaMJinjKSk+FLwjiDG4xnKjSLmwtUkvUvj9mEgpvCzD315ncukNTLuPiE/UmyPo9ghDjQjkWYoxBuUJnvKwFoLQR3nHle2u9u763Cx3GOF0wDVBbpLimkx7QAslc/Rb86ysLHP63DKnTs2ztDyH5wlJknDxrcv8wi8M2dy5AszgSQtrnYq1HYYIKVm2hTaApCAFqLTSGLvgOK0KtAVjI4xEaHyM9bFWsEawpqi0xJkLyiwyyiIlzyek6Yjx2HVqg52U4dAwTmFcQmqPh9IopWl3hMXFGVaeWGJuZZHOwizjKMIag+8JXXE++7iybm39+hU/0OBvGzo7CbNRQruriNseYRiTiGKiFRmGXAle0KLdjvDDNmVhKHKNZCk2z1AiTmrhWYIwAE+hjUVrTak1ZVFgtEUXFp1bp73VBjPJwVOERQJMCPyMWCwaD60UBgi1Ii01Q3t4GRlEWjz37BfyZb/ht/Lud38WTzz1FEW2wdbaG1x+I+TS2hW2rhfEgaEVQRzCTA88A8UEYgVzPoQFRBasZ4j9lLIckKQDJskA8Z0O2xhLUWiSyYT1G1e4evlNXvrEx/n0i7/M9uAGdWspT44T7AHRB36z+2zOUtn9/9mvHW32AR5ItWDAVrNKFlx/UgfoNuIVJKy2O+jpa85M1cFrOXuZHWrSFbP3tDR1vDUhlX3HNPuOvac7d2gGix0W9hPEppTC9Z/Qqb5PQI8gacrTSvbiNeprBIoSivq+m8elItRldQzjji8KRgkbVyfsXF/jE8aQj0rMsKiI7v7AaQvagtaN9WHjOir72bJxDbAnofAax3r4UooQn6c7z/Dsc+/l1NnzhHGHcZIxSdzsWJ45eZUFut0OrThys2dZxtrGJtdX18myDM9XzPS7tHyPfhzRn5lxsrbP+WwGW1vkn/4vvFxuPHALKtFcza+yfnGTyxtXWdvaxBjL6TPnyYujy3aTAZ/cmXB1eIml+BqewNVJwYbdiyp6WGiGqz7KeMcQY4Acy2vDbVq+IgwWaQdtlCnx8gy7sYa98Bphp0+ysIwRUIFCvKrzcBFPYAXleQShj+97x3TlTS9xhqNjJbdqh+sO3kkjlNchjvrMdOc5tbjM2dPLnD27zJlzS8zOden1Y8oyZ2d7i2S8jpIt4DKQ4nst2mFE5C9wai6kHYM1I/LckuUTvFCDFFgpKwlFlZ2i0ruilLtq6xwiZWnRZYE1JViNNaUj0zpHly5FUb0UhaEoICuOxrN5e5QEUUFvJqQ/3yHuRUigyDGMs4xhqhmaPdX2hD3/Uu1nD6i8yRaKFDqZIR4agrhE+dAKFGGYYwLwgxZRWOJHjihnSiFWYY0jf6YsMSIo8Qh8D2UMSursKBYPSyCW0AelLJ1As9DOmWsDYYKJE0pTkBvI8MhQFMajLDwmk5J0UlIcQhfmefPMzZ3j/R/4IO/+rPdy5uwT9Gbn2dkuKaTH1jjm4nW4esHiC7RCmO/DXB+sBuVBpwPtALo5tFLIvRyTXOfGlVfAF9Iip92dQcQFwSZJxmgw4MbVi1x+6zVe+tTH2Bms8mh7hW+HJQi+FPyZisPYaooCXN9ExYVrkloTqLpPCEAFEPjgK0ekjN7bvyzcdAea3VbsxxBG1Thcu/6vJuViK45XkVddVsdIcf1RTbCrNJBeG6yqrq++2Jp0NmMiaHxf632b+9SzYEdBjOtlH4HFB38elk9BnsHOFhQG55Sw7OmTa69yfU8daM9CK4JR6va1VVo01YGZGSgyGK0CmVvXb4PNMVsT8nQCJqnOETSurzmwqB0z1WBnd7DRHHBY3PugOSip7dsceDxcKiPAaf80zz/1XlbOnafTm0FbYXNrwPrqKlcuXWQ0GmG0pdfv8dzzz3H69ApJkrC5scknX3yRX/74rzBOJsx0+zz95BN0fI+VxXl6nTZhHHPuqfO857Pfz/rWBjcuf5TNB/YbO+SkXBq/wvqL17m8dpEnzj7P5mD1UI59+3PCNWNZmxQuE8c97eUyV9Up7eyuM22K/TgkYlxPQZ18I2+Ygk9vbTLXbjHfbuEpHx+B4RB19S3Sfp/B/CwtKYnmF1Fh6OLfTVlpi5XTNwUBQRBwPFNQtd43xXWGtURiB6cjrj0AHZA2fjzH7Pw8p5eXOb9ylrOnVlhZWubU8iKLizPMzfUQVfL/Z+8/lyzJsuxM8DtMyeXGzXmQjMysqkShIAC6BxBptMyItMw7zev0v3kASHeP9MhAADRmuqpAqrIyM7iTcDdudrmyQ+bHUTW77kHSw8PcI6KqdqaG2TW/RFWv6jnrrL322o1bs1hc0ZQe3Jyy/IoI944Joc9oeJd7h/vsjRPGvWj7VlYN86Uk64FJBShHkHHCCtKC00hSQvBRhxygsZ6qdrjGErxD4hG46GbhbSxyDB4hAlKCMZCYyB7W7t2tgkOwBFFgepD2NEJ5KlezqAouVgVHc8fjAI+BC16eUjqn6G4aW7ebCdCvoF8F+hJ6PUcyWqN7Cq0ztIxsWqUMSRAI7fEqUFhLWTuC1OgkQykVUSS+zRsElPBkSSBJAn3tmeSOg92G7ZHHhArhKxpvKT2snWZlE1a1piwlc+Vp6oYLW/5gcPzn//R/ZHd3n/d/8SHD0RAhPOvljOnVJefnFxyfzHl2UvN8Fa6v1N4U9jPYHcLOGIZ9MD4SY70lLCtLefWYz35rOTm7YPv5GelgghASay3L+ZL5dMrp0VNOjx5zOX1K+AnpJG83NCQ5SB1BqHfXelVEC+LCJhvYudB0AFUSvRNlZBibGlwdXyd1m17vGEUNOgHTbiFEljmE9rPazxABWttBmqat1+0+z3PDPMv4OqEisHYduH2VGe72fxP8bV6Xm6/bBIW3ER1r2n12x6R6IEE8POC9/9uvKBYlp384wX9ioGi4cQJKNo63fb/hHfb+h1+zf2/Mk8/OWX5+DmdTKJeIB4fc+9fvs74quPw/PobFFPYm6Hs97Okcjn38jnHEuXVIHFVqbkiQjUXMSwuMLrrzKjf2q1s4ldxojOFGRvKupIFfj5SUR7sf8ujDj9ja2UMoQ1nUFOuCv/6r/8x/+qt/x2w9JdUZH9z7BVvbE9577xFlWbJarfjt3/0N//mz/0ATKhQJJ6cf8GB/l9/8+iNCS3KNdrZ576MPODs548nFCxbF41shBmIECjfjD0f/J58d/w02FLf0vt8drz83CqQcMxzsopUCAovllLqZ8rqw+nXiNpHQ5pL5XSPLWwLGGTds5k87AnDuG55Mp+wMcsgTdKYR5RrpGsqjpzAe4ZVEmIRkaxshIkvsrMc2DqUESmmUeleEe+dF3NmpdQMcgAFhECYjHWyxtb3DncM9HtzZ4+Gdezw4vM/h3h22JztsjbcYDnN6fUNZLZgvLK4JGF3h7AzrZ+171jTuGesCvNc4lzCfr8CvEbpmXQV29/ts7Q4xuUImAi9jEwWPR0oHIlA1FfNlw+y8YH65BhfopQlCuOvCurIsqauKuqmxtsG3dmS0c67i3Sw9AFzw1H6Fs2tcvaJYzVgkgsvpJUfTBU9XgS+Ak42z34XlZbM7D9elXX3i1OY8qBUkPpCxJBUWI9aQjUllTh0SpPQI5RHS44QDIUgTidYaZwNOCrwArwQqSBKh6GuYJJ7tnmN3UjHug2GF9AWNdxROsGo080awKAVLJemhUE0PMxfMbMkK/8aDz907ewxGI6R0LNdXeF+jlebi/Ijz0xNOTy+4WCyvB3EPzD2s1jCrofKwN4RhCqmBXg5Swnm5ZLr8nMW64mq+RqYjnBdUVcNivmQ+u2Ixe45zl/wcFuVvHhK0igDTvSoxaP9ddOD4VYDcsZoqMsVCtKC4jH/3HRhsgabQoFt2WbWASnaSC3EjwRABlIxf1EugWG98Zgu2vG/ZZvUN+7kBir8xD7spadgE0LcZmyzs5mMJKqH3wRb/439/n/PzFf+hqplO1/BVCX4AWQ69LK7gK9uy4oL0F3f4N//ml/z64Tb/bvyEv5RQpQKuDL1fHfAv/tlDXpws+cuPzwlVwNzd5vBBn+cF+OMKhAG9BSoF3QdlIuPcVFCuW81yBmoASQI9CdrHNFvVtGsIAVkKowy0gNpHvfNqAa4ArSHTMWUjXgXW7zbGcpvDO4/Yv3uP/miClIa6rlivSk6OT3h2+YQyzJEkpMcZx8fHXJyfU5QlRVmwXC1oQkXAYymZr6+oyjI2BVEKoRS9QZ/dwwPufvCI9z//gGdfnnIcVrd8JOGdgeLvFxmj8QGHd+5itCT4QHbV4/zcUFUXxPFgMwxRQlRz0xjs3UUCjLlxFF9xQzy9i7glZLfmx07FfJ+wwBfLOYNjkLtb9PUwNlTwgnp6Rvj890gho9Y4H2K0QWeS1XpNVRWYoJBStH607wq21cRLo0vb9UAM0b2M4d6Q/TtbPLq3x6N7+xzuTtjfGrI92mIy2GGQT8izAVmWkGWgtUU7S2I8StY4u6ApI6DpIrDkfP4xxccvGBiFDhW5aRj93rG3D7/+swG//rOHjHdHpIMMmUi89AghY7Y2BKbzOU+fnfPsi2OePz4lVZoHd+8wHOasqzmrYs66LFiXa4qyoCwr1oWnKKCqYgb3XcIdG2C5WnJ1dsJ5L+d8kJHNR7z46glPjud8XsFzvnmV3rHG3TTeqQc707s1Ld8ToqaW0qOma/KtNen4nJCmeNMjIyVXCVlqyJRBGEU20BitCTbS5xKJFpqeFgwSzcg0TIxnZBr6uiFTltAs8E1D4yPwXAfLkoqFUizSjFFISZ0nEYb+KnBV11Thzc724yd/S94bsFqdc37+jJ3tA8aDCVcX55wff8XV2RFlPfva6xxwZmF6BqMLOMhgbwDDNovfA2oa6vqC4lJR2JRF4VhXDXVVYu2KKGL5+wyK4SXtrRSgWqDYMcahZQWlaLFwx3bajde6+Lzrf2tlFrJlhL2IoFXrCHhFC3qFjJ8XOvlGO9ZJEcG6kvHvVNx00RTcMJmi1ce6m8eICMDlJhgLr+ie4UY+oG5kG91z3tqYu8mep6BTets97uwNqMsKQQNbCWQPMYlgdDdlsiVZXC5ZnC1oSo/3moe/mvDPPxzy4CDls88tv88L6h1J6I8Y7fTJjEYhEEoQEk1vq8/B7pjzo4oiWSO2Ruz86g7DcYYUliACVkjWpePy43P80ysYDUnf22e0l9AbWQIll+cL1pclvlLgUtK7I+6+PyHLFKuiZnaxZvF4ir8qSO4O2Xs0xgiLq9eEH6kphSFhL7/HZHePrD/EJDlKpWgZSHTKeLjFONmFSjJUI7Ik4+nTpwhgOBphtOLB4X2endxlVc9IRcoHex9y5/CQfr+PSROEFIhEI7KEfDxi584ddl7scFKufkbI5U1DkGUj9vf3OLx7B61krJkxBu8DFxeeur7ghjlWIEaodIyrC/BT4iz29qLL/3T5mjGRTArcCMM6/613EbcEjH9+E9M0eD6bzdnONIfjHgmSJHjEfEooC0ySUR88JBntYLbGqDSnqGqcc21hlCaEd1V8t1k5nSLFgCybMNkfc/Bwi3sPtrl/f4f3Hxzw3r09tkd9RnlCZjKMyFEiB5EgpUAbjxQOJRuM8YhQUayuWMxPoi/xRgQ8y3rGsr7ZCzmD3hGsihlp/oKDpmGwM8TkBqFAG0meeYIPXM3nPHvxnN9//CWfP1kwNIpESZTap6xL6rqO/rytN61zkXiqK6jqCFTf5ZXlPVG7enXONNVc9Q3ZvM/Ji694cVlzFL476dT5hHRlPF05U9X+vbuxPbHoLFlDvwFdOcxgjeitSYwhNT0yM6BvhogskA08iXEIZ1HekmpPZjzDNDBOAyPlGYqGPFQoXyNthW0s1kUiq2mg10BmA7mzZKxIjUXkCoUiTxJ6dUr9hif7yy/+FpNknJ0948XRIffuPOJg7y7L2YKTo+dcXB5hw6uMxMvn7cLDcg2LGu4OYHsAhFiUR1hQVQ67VqyWDcU10Pr7P6XFaFPrsgVtm1l/aNnYFkQKAU6C7+RWm+eoA5abBXrtSzupwyZYDcTHSsbXxcra+AIpI6PcsY2h24dunOqAcZf278CsAtWy0aLdXx+inKMrSrgGpi2A1urm+cG/DNBvPbqpuGW/s5zhVs7WIOEkkQjlMXsZ9+/s8Givz8NtzyStefYVfPm0ZLqGwqd8dL/He3spk4FAUxDqBSo3yK0hW5MewhMtGLWEPKE37rO9NSIbrSn6aya/vMv//X/6U+5uJ8yvzmmamnQwYBU0/5/xgGdpzvjehP/uL+7x/p4iVwuW6zl/+MrzxYmgCD2EGfPLh1v884+2yBI4na15crLgdz3FfFbzF7/c41/9yR5JKFgvLrHNj5PxnYhdDrYfMJhskWY9tE6QUqGUITUp+7v7fHT3V1zNLjCJxhP4/Se/45PHH/P+vQ/44IMP2Nvd51/96b+mKguMlNy/d5e7d++S5TlKawIeLwVBKUy/x3h/l63RHqp8hv0ZjCM/hH4TaCbDMbt7O+zt7yGlxDuH9yHapFYVdlrj/bJ9RYJJRwwnW6zXKeXCQugkPLcffWCHeHwVcQTpc2NI2wmz3hXagn9gxXebESUVnrOqYS0UmVAkAaS1SOewF6esvvg9QinUL35NdngXZVLSNEMgcM7j3LuanHcQ7JHqLSbDffb39rlzf4877+2yf3fM1nbO1lbO3taAvcmAfqbJjUBLEN5FlkMo2p4ICOHRymMMVPWKF1895vGTz7F/pJNfJ+BYWPj9l7C1M2W+dgx316SDDJUqegPP7s4OifLMV2vOLi85Ol8xtyCIq9TtnR0cI6yraHxDbRvmiwXTqylCXLJeR3eLrszmXQ1bIoB2AVUViOUl1VnCYpmyOrukWr8eSO9UgK8mu7tSyY6Puq4jb4BLyFeQ9kH3G4aDOb28ZisLyMyTphUmk2hRY2RDqhsy05BKSyos2lb4oqasLMIGZFd/1YCvwJZQd5sDJwPIkizAWEvyyZDdZBunszc6b+v1EtZLFvMrTk6+4vT0Kw7272NLy+JqzuXi9YriKuArG6WY4zkMZcwCj7YDd7dLikYxOnMcXTqm9ue4HH/TsFAXUeIQfFw9trZVkd3RLS5u3QhEt5BO29e3IFVsXpXtNOObqF0PLWPsHEgXQfC1oqBtJhS6zd8UANJAWcfiu81CMZlE9tnZuHXAWLYgV7VAvNPydsWD13pn4r8pGd9HtyA82q+0APq2ogP0m+dHgRzA/oRBX+PqCoIn6afcGeX86z+/w4e7KfX0mNnZBbPLM84uLlg0CVaNma8tZ7Oay1XDf/7dGZd/cwSHh0x+MSTRhtmiYl05GPUR2tAfjzC9AWY4hElBb9Jna5LjQs0nX52zXK359a8esXcw4Rfv7VF5w6/vD/jXf7bFWK25OF5wcXnC7GLGYmWZ7I05uLvLZGC4uloxnc/47NkppxdLlrVEJylZKMjqBZmqUarG/QieBgrNbnqHrd09kiyP46IPaB/QUtLLM95/9B6DXsrz4+c8e/GUPzz5AydFrCn44vRTHj//M/7sF3/GX/zFP2MyHCGCYzQccHhwByEkjXU431BUDUVd4zyYNKeXj9Bo7DvjId88fggwVsrQ6+fkvZw8zxFC0NQNJkkwJiHLcop0SFUqAgEpDb1ej36eoUTA2yH1uibOYLc7G6fABNhqH2+YGl7nyNfcWAy8q7gVYPyuxAS3HTVwXltmXpAGQRJAe48JHnt1zvqLjwlCkGztYHbvoFVCnvVw1lGWFf4ttSZ+OSSZecj+1kPuHe7z4P4h9x8ccu/hAYcP9qPO1wQSA3kiyI3AqEAiYqFWCM11uQrXqXKPkBajAlWx5NnTL3l++v2qaC8L+MPHjkUxY7xfkg1TTG4YbXkIewwGOVezJacXM65WPvIwAZIsZ3dvH6kDQXocHuscl5eXGJNT1Y6rqwtCW0r1LsuppIDEQ+osqlhQX8JKSoqLFbZ5vRVrt+rdJPS6cESP3s7YKtCaK3kIBcgaMgupDGhTxPbQpkQridGQJoEkcaTGkmoLrsHXnqYKlDNoVhFoKx8lhQaoCigWsF5GTe/KQmnAJqAS6Gee8SCQ7oyQ6eQHnT/vHeV6yfNnX3B+dowIEtcEavv6Or5AW2bqYerhQQUHBt47cHjhGZh4vqoLWP0cB503ChstTkIawaRtVzydljeEVk7RMrCiZVpDynWBnRRAszEGdFdo07K9If4MKrLNvn0sZGQ1xYac4ppsbhnkuo7gugOYykTdqzHgmgjifesyIUQLjLuiQW72GXipELD7bK1aiYcC1z5P3uayqNNGb4TIYHtM8miLYS5ZzJcUZYVJNXd2+/yzX2/zwbbi4//2jKcXZxwdn3B6dEHDENFLOZ83vLisKBvH008u4OQMsh0GeQ8pNZfzglXVIEc9xKhPPhwSkh6m14PJgGyQoY3galrwu8+OWczWjA/2ufsw4d7BGKThNw9yfvN+H79Yc/nllKvjY05PlqwKzf59xYN7E3xR8+T5MZ99/ozT3z8hzJewv4u+t4NfG+qpx6QOKSrkj7DUTMnZndxhvLODTlKcC7HDp3coKUiylPGjB/zig0d8+uWnXC2mLO2c0IL40s355Phvubv7kF9+9Kf88sMPCL5BSsh6CSEI6tpS25p1UbIuamrnUSal1xtihKG8RdvKtxU/ZKgzOiXPM7IsJUmSdm0b0EqjtSFNMwb9IVonsahbSYaDHv08xWiBxLOQgaoQeB8zfyF04oY3D81GL18jEAQSGx2dSl5uT/auy6pvBRj/XOenAHy1LvjtV8d8tLuNnozQQmJ8gywK/PkRdZqwPniAHG6jRyNMlqMTTwgBrbtK3rd3BpTs8ee//qe8//A+hwdbHBxMODjYYnt3wnh7SG/QQ8mAUoFEBRIRUMGhQiCEgPUuznEEgqjBOoKIkNP7hvl8yvMXX7Gqvt96zANPzmC2DozPSpJBBRr64xXHJ5bBYItPfv+UJ18tKVsiIiCQOkGZFI/FuoYgFUJKtMnIsj69/ojeYEG+rmiqd7tKNAoGuWCQSjIFoVpTNp5yEaj869+crzbm3gxPPKZuFdwVFWRE7OCXUe6JA1mvEet1rIHIRXxSGggGrI7Pb6oIfFdXULWiZukhM7GuplnDeg7LJSyKCCYrAZWCxoDPIDcr9HhJkr4ZY/xqBO8p18s//sQ/EhVw1MDONLLeaRroi8gkp8Rz9w8qnGvZ2k51t8GsBtmCxnbr2OTOscJ7CDYWcF2r9jaB0CZgVi3gbaUVibnJZXriPjh/o46QGkx/A/TqWLzXscPatZ/dgt6uurbd9VjE1742JDfMtBCtxdwGMJZt2utWpRQSSEH0IVXQU8jdPoMP9rj3cIwv5vyn/zzj6UXFyVFJrgLr5QKXa7A1wnvK0mEXDoQnKE9hHY1rkCKgUgO6jxymTAbgfcHRbMWisCSD2KjCNpaziwVlXUMKNtScTedcTpcsykDVCK5WFRezGeezgsv5isWyR1V4RLXCulhP0DSS0Ci01PQTzfF0xadHM86+vCS8mMYBYzhCC4cr1yynFjM0ZH2FMu8+gWzo0xuMyfp9lI6svXMW2wi00Wij6fd6DPKM0/MxUidfg+821MyWK9brEusdRiu0kYSWLa7qmqIoWSxKlquKpglokzEcTBjIAQu3fufH/X3jza92Ra83JM9zpBA4WxNdajxKSrIkYTjok6bREzoQLUF7vZx+rwcE7NaQshizWkxanT1UTc3F1RHOv/korkUx1QABAABJREFU3M2PDVA14fr3NdFva82PUfYX4x+slKKLpff87dk5QigmowmZlqR4ZN0gm3MaJVh/+QnkQ/rvfxQrZtuCkyQxf/wDfmAMetv8q//LP+PRwwO2JxnjccpkHCUdSoN0FhUUGtnOXQHZMi7Bh7aFrsOFBhsCja1wro5zloT5bMrJ+QVvQn5XAU6WcLEGqaIVlzIFn3/+JVn/GVcnNav5zfMDgNT4oCjqkrJaQ9vhzTqBMil5b8hoNGZdXFBZR7mBRt82n5FowajfY9iTZMYTXE2xbCjWEby+LkjvpBLdsulVQN0moK+1x50W2RPJNTmPDLKfg05BZuCyQMjBZdDomN22LvYQWK1gPoWiXVrLAIMM+tkGMC5vVt91gNrGzZYQtKM3ukDLn94Sdw0czWF6BZM+iBISC/qnt6tvMVrg6P1GIdtN9ufauzjAy9ZjbY4j+JbRfbWF/Kbh4KbdZgeoVUwtBN/KNUQLjls5AyJqk5Mk2oko1eLdTh/cvgfhZh98KwPpWpwL2gI/GYG0bJ0wvONlexp1A4yhlXHcViiQOez3kQ+HHLw34YO7Q+5t5+Q68F8//Yr/+oevKGeBUGecBMvjLzO2m5RqXaKEwlpNWMt46npQO4f3Nf1U0t8bMN/dJTvIGfcsV6uCk7MLrFdMJtv0M81quWJ6UbNclCBr1uWaJ0fnzJYVdTB4JZguK56+OOfxyRUvTmaM5JA/3SnIQ0lpHU4aHNl19zvhHJeLgoujKeHFPBZu4MF6jHQ0VcHsqiCVA7LeGK2SWzynrxOCRI1IB0NM3llSery1OAkkEmUkJlEkxqC1QSiFeAW2BGCxLjg6PmVnZ8JolNPrZyRJgpCS1apiuVyzmBesFhVNHUh0xmi4xU6yw0lxhv/Z0nvfFQKjh4xGE7I8BwJVWaClhgBGCfI0Qcnh9X0XgkMQSFNDmhjSNCFJNLZpKJYrmrpGCkFV1XzxOOH5yZM3BsddHU63LOlKeLt56seUyv2DB8YAC+/47PKK3ckWatjHGI0kYGyNXyyoXjyBtIfKc8z2Nkm/h1QSqd6+96MQHqUbtK4xRpKlirxnSE0g+FiIooJAeImzjkCrKQ6xULD2lsZZmuBpvMPaGucb0jQhSxOKsmBdfHth1OuE3ZhTbQPV2kddwKtXthA4FJUNLJYli+UCqTUq0ThrsV4idUraH9IblMyXc6S7meZfLSW67TDa0OtP0D2Bp2a5sBRLOCvhjJsb+HXjWsLyDX9vuEkVTYmkcEYLURwY1+qDWxCsTQTFOo1OTkLHPgO1i0Vr8yUUrZ+NJjpyubZbbVHFdgQdQ93poCugCRCmwPMZ6eynx5wEYGbh6gqoYXEF0+XbrpH+qUULDJHtRaVBprzkYezgZY9fGS8a2RbGeXcjlRB13EK4YX2vf/FcM6gkkQ2+9vRtgbWwIFqXCyHankIqyh46/fGmXDe04LbzNA6txdumw0THEnc6ZLlp7cYGIA43Xs63Gg5sSaglTSlZzBqerhR1WfPZ3z2n+PhFbNssBsyrhv9dFTw+1NTLBZcXM06fr2DZ+s1XU87zF/y7UWCQK6bPL6GsqY6v+DhxrBYl5fMZCMXVTsEiTbHLGr+qCW0zlYurOf/l9IKm8tjjApznqWs4f3bK8mKBu1ryX857+PklQ+04O5/y/GTN4sgSVg3PPjnBB8HR8yvc5+cw79oRAfOS+cmMzxaBy8QzOlsxPF5jEsP/45bP6neHIlUD0n6fJMtQJkEqBRKCCAgl0alBJtGdJ80zJlvbbE0esD6fE0IBKIzYJk97rIoVl1eXWPrUvkev10NpQ1HVrMuKqm5oXLy+tcnoj0bsDg8YFs+Y8cMzXD+10HrA7u5dtra3yfMcvKeuCkSSkWhDlqWMJyMa6wgIfPBtEy4Xa3AJhBALXaWUmCQhMYZ+3gOgbixFVXB2+Yw36TbgieO446ZlWbds/7GXKT9LYGxo2/IKWId4Un/oyTyrC/7uxTH6zgGDvR1SnUTnzCbQHL8gOI8cDmFnm97uIdIkBPH2O98V5YzPv/gtgjtU1Qgpd+gPd0m1QKIQQYGPHryuKbG2xPvYprlxNZWtqG1N7S3WR5ZISAhhiDEa6zzNN/TOHeWKdeUi6H2T+IbXCQEuCIqqYTpbcnU1Q6cGk2Ug2pbGaJK0T9Yr0WaFKN11L78OUL6tSE1K2t+GXLCuC6qm4GpZ8dTBKd//1v9jwHgFXHJTJmWIg0Te/rQBVNNuAupVi1M0eNUyvw6WNczbLoGdUZYo4g5bC0W4aTjSWftXG1vRwPTII+VP04e8DHA5j/Vn51dw7P6hyShECxZ1LKBLEsjzCHwR0ddwWUDVNSdvJQhZFmUNQW6AUAuqiFsgAuVG3vSN6OzSZAoqizIKqVopRAWhjlodFVo2GG4AbwvAfYgXaDs23fT7aOURUkURvPNtIV37E9vKJ5LY0EQntCgp7lxoosVKWbS+ybcYfgUXl4SF4/wJnCcgvIzF+KuqLS7UgCV8NefJ+VOeprFaIDQNFO2CgBpWa5rfnfNfv/oUoSV+Fu1h3OKM8y9aP+k6Mu7u6TOckBtJgLjY8RJWinjsNrLuzbFlptrmLt6zeCH5j3/QSB3wjSVUHkoBXrKez/j0959B4aJ3cdjIdy3muE9XTI1nqgJCCERXmf0OQ2LomQFZLyfJckyaoIyJXu5aIlONylNkaghGk40G3H1wnz9f/gX54zEX01NSmTHJJty7e4DHM13MaGjnPe9I05yyrqhcgw0epEAaDUKRD4bs7R1yb3oPW3/J6p0K9952aPb2HvHg4QNGoz5pmuCDoypLjNKoLGOQDOj1+9RNQ1lWlFVJCFEC1NQW7xwsQxxzQsC7QC/PGY9TBoM++8WaxXLObHFB3cz/+C59Q3QF9j+1+FkC4xTYM7CdwWoJsxBZtyVvLtL2wNPFlIFJ2BtPyE2GVAmqqRHzK4KtKHd3YXsHCSTbB2/PMWgjarvidx//Nc49omkOybKKrW1NohTSC4TzBK/wDqq6oK4LfGgIOKxvaFyJDQ2xzC2gdDQ8V0aitEYgvlbgrSV8+HCPJy8uuFzcHhQVUiFUgguCqnasihrtIRUapTVSGkyqGMgE5yWzWcG6vEKGgJHg67frUtHLewy37pBmEreeMzue8WK94Jg3b13zbfvqiSB1xg0/F6fdeA13j6/7hwWQNpJ1HRnXAdsVkXmuuTHKkiHOhV2fq04K0mz87KQcljbT/q4rHF4zPNH4QDSwcj9+mu3dh4I0A92LwDPLYNCPwFEIqJv4xS/bgjmpIE1bYNy6RHQsrfDxb7qTSxhoVBSeW4iSDE/XOCjqe2UExrj4JYhwA6ICUd5R+7YHR8dMa/A66p47ObQQN+L7Tgd97Y/sW7K7ddTQrRtFaFlyZyOgtHXUydrbXCK3gDYsYrVqVQGOcN01rutu195NoYZiTSg2c1ivPMc5wlVDQBCXuknrJtJVILQSk2bTkOr6bo/H6jrv59ZdxBYxBdQVC9aBcOHb21bevBYJ5YJY3NHRCt2/hbjAWHXtEjwBS/gReDpFTpb20WmGSiIzrBIDwiGMaK89hVcSpyXZcMD9hw8gNTx8/32WqxWhaVA2MMhzklRRNiVuZbHCIYwmSIkNjtACYh1AKk+wkA/6THZ2ODy7S3225Cgcs/qpDoLfM4wZMdk5YDDaIs0MUkLTNHgXzem0SUjTDK0NVV2BmNPYaJ+6XK1YLhcsl4vWLz5ml4TQjEYTEJIDIpGVJQajDPUb3o5d2cJPLX6WwNgS2bRg4tgvqji2xuHszaMOji+nFxzOdsiTqNpVUmFsCcWc+vljgkmidtRkuNtmLb4hQvB88fR3lOU5Qv6SwUiws9dHBIWwNaFJCU7GOcM1uNCADDETqTzSKFKjSBKFSWMba5MkpEmGVobEpKjrVq8xEqUYDicoNeM2OVqtE7LegDQbYNIeSmdIZZAqxSQZaZqgtUZKRX+wRdMAKGxZ4ZsG70pqF94aazwcb7F35z3yXsri8oLHnx9zVJ4x/5YJwxDlD4IITL/vtNIQF3Ott8B1YZ7neipFc6MW3SR0vkmn3JZVYLnRNncOGJ0ytVOgduDabWw/dvrq28IQcZJJYFhDYW8aof+DCJHBeAz5iCgOTGJHM6WiVMI28WIZJtw4SbSuENLEx160F4FvV1iu1RWaWHWqRMvCdlrgFuAqGdln34I3L1rw2zphBBE7rZVFZIR1C2i6q6qTNtNqiEPrROH9hla5LcrTMkoyUgWpiO/X2HZ/Wt/BUMd9V7cJYDr5SGtL91L3PsPNErVl3K/Bajd9Cl5uy/yqLKV7D0f8osTG+3XHv6kNb8/XdZVC97fN1tNdsxQ2Xrv52d0xbHpLs/G3zpXEcwOe320YckySghJ4IRBaRdmElLFTuRTU3qEJKCHJhkPuPXzA1t4OdVPTNDXNek1TFDRlRVPXOO+oXYOoSvq2ISegtCZTEpd6EutxtcdVnqqo6I2GjLa2uFPdI1kIHvsj1j9rcCyIjb8OKL1iXli8UKRa4Z1CSRnn27RH2stJkhRRaoq6xi8XLFYrXhwfc3byjKq4IoRNKkpwdmY4O33Owd4hUgjqqqR5QxykiVZtU356rPHPEhh7IkFR+dghK5OQreLw88PUsrBwNV9cXDLKMrJ+Sq41NAJRl4TTF3hrSUdbJIcP8fbdrLKdtxyfH/PsRcrBnTFX8320zJBNAzYhOIn3giACQQZUotBKYRKNSiDNNf1eFgsS0vZmQGBrj9ZJ28HvJjJt6PUmSPn8Vo/DmJQ0G5CkPbTpoUyGVBopU9K0z2A4IO/1SNOUUVHQ1JENX81mrOdLfKOwZUHzhh3a/liMRzsc3nmPfDhAmxE++4RZeEL1DQPlELiTwnYmECHwd/Po8vh9wxLBcTdNdtNgQwTdm6wxvEzUdS0culKqbsHQNRHpfm70M7qeurv32SjdeuMreYjG4Yl5iQ3Z6i2FIeIwAeQpjDxc+h9+r/9sQuawPYHB1st2Z7ItcPM2DoJNyyyKFhx3TTFky9xWIQ6cwd8wyyqNDUG0j2ykK8BVUXxuo6YWlbYewhqcij7H+Btc1TQQ2u6nRkGio4RD6JsLTOgomL+2aWsL8vA3zhSJglxBIqMnX5c/8S4K5jvPZelvMe3fgdQOHLaM9TW47LYO7HdXt9j4KV55v02Qu/n75rjVjbmbBZCb2zdF50vdvd+rABleBtrd52z+/iow7j7rXS+LBUbk6CQhSIGXcWEkjUYqhdbgW2CsvEPhSXoZ+8MeWgm0FMjgKZYLVvMZF5cXnJ2fs1iusFWD9T4WlCuJNgYlVXRpch5XORplKXs5eb/HcDJCcUg/S1ifrngSrn6yJMF3h0bqXfrbD+mNJniVUzpFFgypNGgVMFpjkhypDAiFC9A4T900LFdrzs7POT3+iqo44+vXRCCEmtn8OYvlOVqlGJXSuDcbiTu7thJ+cgrvnx0wVkTAUHo4ncX5wIQIAm7DPM0Djy9OMd7Tf3Sf4WQASYYUAekcLGf45Qy/XhLq8gd+2uuH855FtWJerFmuC4a9mlwkJAlIqZBKxxW3kZjMYLIEnUiUhiSR5GkU2xuTonVK03iaumibSL08y2QmR6scIb7t8nizs6x1itIpUiYtUxyrhlGarD9gZ++QwXCA1ppiXVAWDd7Bpc5QwSAwKB1tZd5GjMfb3L37iMnODsN8zCd3/8B48Annl/OXpBQKeDBO+NWH++xuZwhbsPr/Ped3xZutfAM3kojL9nFB7P7TaY83mWN4eZrdnII7kNtV/HYSz03eq+so1L3XH5uO/1hsMSL6Tsf/WSwNlgpPQ/jBzG4BnK5jg5JcQBoi03DO7TINP9W0HiqH3W0YbUcwvNlkI7jI4mofgWwH1lTL9qpOCiFb7Y2PmlWXtcA4iextXUUZwXoF1Rks1lGbmu7B8H7U/CoDaY94FTVRm9ztQ7c8a2TUF2sTwbFrr0zXXomhtXIzstX7bOxnKuMmQvRt3nhb8PF5QoNsnTJu5+SCnEAYxvN63b+y4ObqGrTbJhDtChJfjU0gvAlKN0H0JtP76l3Ysudfi2+7Ozc/q3scNn7fBPBdbEg5fjRgLFFEdtgTO5/GpooCk2iSNLoVBeGpXR2vKwVpltPLMnqJRksoE0GqAz40VLaOQLiyaG3IB32ylmgxxuCcx9YNtWjA1egkzpNJnoEbIL1jdDnCNFPqnxU01ig9JhvcYbx7j8neHZIsRypFv5eyPewx7KXkiSTRghAcZbXmanrBarViOptyfnHBydER58ePsc2UP3Y9eF9R++qNJRQQh6M5N5nXH8ua7ZviZwWMJbGHdk5cYSwCpC6e1C5h9UOBMUAdLI+nF9zd2WZ/e4JMetHf2Jaoqoy+WMWa0Ly7r9IDha1ZVxVF1WCdQ+SQpipWi6YJOjPoLMHkKUmeoY1ESo+WAqMlidJonSFlQggVPoB1Ab8xaEoEg3SM0T2k+Lod3UgN6CUjTorj7w15lE4R0lxr9wKK0BrMpXmf7d09hqMRIQSMWVOVDc6CsIpQg1Epvd6g7Th4uyEQkTE+uM/B4QH9rM/h4QMm27tk0wXVhhA7l/DgwT1+9U/+nL07E6RfcnH8v3D08Yrz8P2viC6huiZev21tOyXQ49of4BrYdtMafJ2b6pjjzSmvmzJ73CR0NV+ftt8UagzlECEEqrX8sa6m9hUlNQUVNZYGj8XTNW75PueoJrqCNB72iff7Vrv/59yO13UOTCSc+p+g1FpnsDOGyYTr7nRNEyUUtrnJ7vsW7AhugLFpwSYyMsYVUCXRo1ipKLfwTTy5Kw/VAlmf4FdnYBdg12CGbUHchlOFbIVrtmMoN3IWrmVXQ6sRRrVFeRZE0jLCMgJ2iPugDRgRN9tKM2oXWerQSkZUtJlC3cYo34WC7W0Q42h/You4nyyJ03ZNXIbtEMFxN9u8CnI3weirwqfNEHwzQN4ExJvv1T3ePN5Xj/1VdnpTmvHq8zedS159z3cJS0QkXqQk4PHB44UHCSY1pLnGB4fz0e/eV6C1QoicNNX0spTMSBLpkcJR25qiqRHakFYWqQz94ZC83yfLcpLEYK2lKisIEmc9OtHoxJBkKdL1kM4xTMfkjaH+WRTiKYzZYjh+wHD3kNHuAaPJLoPhFkJpPNDLDFujnMkgY5QbUuW5uDjl9PSI4+Njjo6OOD2/YHY1pS7OCP7dORMFYo3NCDgk3m0zfhrj708eGHeJLkNk0CYqZutoa0ACEUB0pQ+3xfqsfMMfjo/ppykfjQbk+QBlNUE4lNIkUvD2XYw3QoI0miTv0R9OGEwmDPM+/TTe9DoxiEQjjURlApUHpAYlBEqKqC2SsnV4Cngf2rbW4aUiwoEcsrt1l8Fgm8S83PAhFZoP7z5iZ/eQ+W+nrJvvcxMJglCsipLL6Yyr2YzpfI5QitQ5dsoGHyTWBor1mtVyRV1btE7Y2tohNzlNXWFt03bdueUQAq1jJyClNELIFrBusi4xtJRkvQH5YMJwvEuiRzx8f59ff/Ulf1hGsPZ9pphuKrPcOKx0/o49ImjrtVunad6c9jrrtw5MV9wQbdfFeO3j7obfVEN2LPOb3jelL1FoEqnQOkEhMUERgkagMVgcvuWTaxosxfdcVtXAVXsuOoZBE3HUFJi/wYJkMywxC/WTZIx1bDpB3vr4egFWQCNacXkLdJx6We6qiJpfKVuFgIveit3AKXWUPUjia4NHXi7pVacs7QtgCmEQjaNHSSz4U6ptp1jAqoS0hLWGMo0f2AHyrt1z42JxYGh3TNjoRah0C3ZVZJDTVmYhQ9zHoFqiM4k6aSnjewsfNca3BeKEZvs3E/L9e9jZBC73EJeX+IsTyvlzCn9Kc212mBCvvLzdUm6WrZsSi28CzZ2EYVPWwCuPvw8w/iamd/PL3xRLvfq8zeff5iLj+4S4Xoz7dlekECgtSVJFmhmcD7HzOQGERWw0ponrLYnOUnIGjD2gDINhRd04hNAMBn16vT5aK6SUVKHCigYhRbRbNRKhJEEIgoySI6N79MiY/wQ4482x+kbsKJAqQadj+qM77N55n+H2PiLNkUlGmvXRSYKQiiAExsRjJ3i8t9S+Ybmcc3lxztXlBYv5gqoosc4T/K3pk147OnemXWCPG9Kz7VX1o8VPHhgPgbGAXgJ5IklzATKgC09awrKKJ7FTgsnvfrvvFS8WU7KvUkbvPeJwso03CQGLSnNSrUnkJrx4uyEkmCylPxwx2dlja3eXUZaSJxqlBVJHix8vHcJYvBQI6ZBCEK59QcGHQPAO613UYflwvfcaw53+Aw727jEabpOa3kv7MM4GfPjRB9y5f4+/e/Jb1pffAxiLqImcL5cYJTk9v+D84gKpDXkd9U111SBEycXFFYvZHNs0hCCYbO2Q791BEPDe8rbsQEIIsXijrinLknVRUJYV4ZXP01KidIJQBqkzsn6fe+99QHlyTvk3C+b2zVwsOqeKrj/8nDhQDIiZkm5q7Z4LN1rjDhh3xXidLZsmTuWmffxqiU03OXVTzpvEjAWahL5TpDIBoQhSIJxAIeiWkJEttti2998a+73unIZY5DgkSip6OYx6sKfhk1OY/wBU2xAB9o89GX5jyNBisBBBLUTg2PnzBRFlFM7f1FpJWhBJfFLrR4poPYBtuJE0GNVqgAMpSwbVGQVHOK5A3IN+gJ0ctsaRNV6v47aYw9JD30aJBj46SQgRwW1tW2/fLrMmW3PtOgJjrWORoE4gESA6MByIneg8yIxoHSdasB/idksDvUol/9d/MeIXv74T3X3mNfZ0zvKLFxz/l4Sjxw0n7jlXnLLGETBEBvkOcTrvxE6vxqugdhM0wzdLH7q/fxPj+yrruxmvSic2QXH3ft8kmNqcv941YwwuBJz3URkkQCqB1gKTSNJM4bxCNZLgo5+ulg4ZHN47XPBYIcAYEiUZ64S0N8DWDtt4hJAYk2C0wXsfC9NlQ+eEIpRAKHk9JzoPPkiUTukzQrOk+ZGXyb/SG0BVSFQ+QPW3yXbv0zt4xHD3HuOdfaTJmS2jxBIk1lskIeqqPbimpBIiOhq5gsuLMy5OT1m2c6xWijzvU3iHq0vefCZ4MyzUebtsCdgWcRxvl+U/2nj8kwbGObClYZQL8lyRZ5oskygl2JoIGhuYzirmS0vSPn9aRn/X2wiH5+nsnIPzEQc72+ymhkmWQ5ajlESJdzeYCCUwOiFJ2gK6NEMZhdQi2pu22cbQTogBF1lhKRFe4IJHyIAInhAEzkdNl5AC0Q7Sueyxt7VLlqY0VYNtXl6zWe9Ylyvmqyll8/0E90IqlFZUTcVitWi32IZYpynee4RUSKmRQqG0ia4ZSrMznrA7mqCkIHj3NaB6KxHA+0BVVsymM85Ozzg+PuXs6ooqhOvkoxGwMx7QG40xSYY2GWmWsnv4EPercy6OfsvxkeM5r8c+bnI8cANSNyURnWFUB3A3Gd7u982mHSU3nsVd05AeX2sifA2KO1eKN50GLlkjKFl4R69yyPbdGyoaqhYap+hWo+kwG0fy/b7LrtgwDREvKhExXV/Cwv+wu/FNXisQhLc9BsgQ2V7jIui8zsCH9suTkUVWobXviORBBJgtYPXdWOVAWUgCZA7y0JqEB9AWb1dUzRTPFTADVcU03UEG270WGEtYCxi6KL+oRbR8kwKSrC32a4HxdAmzVQTKXsT9y1Tcx0Dcp8TH4xOdPlnHC1eJWPinkvbLDi0L3n75txDelpw8+z29zCJDQNYBWQSsWZMdSPbrHfpTWFY5J82K81BREYhLNMFNfieB6+u6y9G0x/dSV8FXGeVXxUyvAtcOUL96vN3fN2NTWPXq3bypNX41rfBjAGNPHSrquo7uTj4K+pSUKCXRSqKUQiuDCCCDJE0SjBaIthGFbQRKSaRUGAMKhdMebwMCEe1IpcQ1FuFdXAa0+vxuvYgUUeccAo1zKGXoyxG5v6D5kdsI/frhPYKQWCEJOkWN9zGTfdRoHzXcRSQ9rHMU6yvOLqfMFssoaVM6elMDqREMUkUvlfRShcSyWqxwPqB0Qp4LtM5InUcJxXxaEfwV3fUjuClF/e7ROkWoAcFbCN+P7/XAc2AV4hq889z/MUmKnyQwFsBQwDhRDEcpiRFoFUhSwaAnGfYSev0BWifMFiXzeQHOIb3j7MsFs/L2TmkVGj69OGH/coewv03eG+KzPEoS3qEaRgpJIhOUiDZy3kdfQhkEGokWEkSEJIQAznc/gJiC9HikiGyEDyGmlIxCqTiI95Me460h3lsuz89YrV6uFZ1XKz7+9GOenXzFfLX4XvuvjcZkCi8slS0p6oKqrqJ/sQCjNVkWjcOd9WRZDyViy8o7e/sc7u1ipMRZG7vx3HoEnHOs1muWqzVPnzzj8ZMnnK7X1003xnnC9mTEgwcP2NrbJclztMkwSY/R9l3EewWPfnHG2dULyvL1JBWbmY5vSo5uwBkqbhwrOrlEZ+/WAeoOGBfcWLS1yqOvTc3d+3YD3pueVd/+d8WCNRURLoLf+FRBgiZHtrC5iZzGxqe+3qd3bhsNsRjPO6h1xIw9BOu3D1MBGDKgLwbIoLngguptTqLCEbu82FZf2357wUPSWqcpGQGyabVmMkTG1Tmo61aI3V5NSYiuD4N2Uz5KHkJDVa5o3IzQ9Ug0AfZSuJvBlokNP1Ye1h4q1/YzV9AYMAbRHyLTHCMkynnqqyX2ckmoWus2FeJ7BgfrKnrv+UBsQCHafW9dLby8+V366Iah/K0C42Ar/r//r/+Vv/6P/wHcEi0laTZgMNhme3jA1p/tsa0fcTdo9p5fcvT4iKPZOVfhCstzYrlsj5jTmRDzO4abIr0up7MZHVP7TbrgV8HxqwzzNwHlV4H2JtD1rzyPjX3r3Dh+jPDUrCjLgrqs8dYhfECFmGVSQiKVQUiFkYpEGpKWLFEEfF1TO4sxBm1MvIa8Q7aNbAQSiW/HVR+L50NAhIDwXdfYgBTR1997T13VCCEYpAPGRZ8lxY/KGb/30Z/gpcLqDJf20cNd5HCH0ktWteXi4ozjkzPOzi+Yz+eURYlOMtI8j6SYbUi0YJgbhv2U0bBHL0txjSPLRmjTx7lAYxuqqiLRBkmgXEiS5pycwBZR4uCJUrZnfLP7UtLfYjjaw1c1y/kpjb36XsfadWXtZoQfO3P3kwTGmYD9QU5vkJLmCYn0GGHJ0kA/k0yGCTvbQ3q9AYtlzWxYYq3DWcvwpIzdAG4xzssFv/3qK3SasLV/QKOSaBf6Dr8+JQRaaJRoV/ot6LXOI50kqJZJEuKaVQohPvaIFsIHRAuMnQ8EIZBKIXUsmkqNRhm4mp5zPj1junr54rZ4npwfIS8Fzn+/IcOkhn4/pTdIyYxhOOrTNAWj4YT9vV32dnbYGo8ZjbfIsz5NYzFSkqcJh/s7HOzuoIRoW1a+nfMuBDRVzWq95vz8gsurKS4EjBDsjgbs7+2yu7/LweEhg/EEneZIk6F0j3SwA3s1+48+4NHzS2aPSxb+uy3FuqlJbfxtM+m5eYa76bWTSHTgt+TlhGnBDWiGG0DcFacaNhy0uP0BKLQuyuGVIwitU8XNBN8ZxHUA4fUYq26BkBIxVlW3zT+ALTQe9wOms9c7I31G/KL/K8a9bWrnKee/o7K3a234UqgAWYjsruRGFiFbvbAhao6DbOURqj3NLZsru6YbcfEcdb0y0uwD2mK+OrYWXC/wYUZU+nkQJWRNZIfHPoLqdggS2pOrBlcW1H6BSHOyyYj+OGfUy8m0YjEdMLtasSgtvmnBuvFRUjFbw6KKDHSpYtpLtY1FfKuslF3hXetdbFx04BC3d+XayxPsZbfMhAWCizTndP899h/9ijt37nG4vUfe2+IwDWSPJcNLz3kzZ9VecTfXcJfL6ZzIOxC6qf34JmDcxbcxwa/+vvnz2wD45me9Cpa7ZbHf2N4lHAl41hTVmrqscLUluHhdCwfSB6SMxeCJlGRGo6VE4ME1sTbGSURwSALBebx17WGIeJ07TwgSQpRgSDyKEOftth26EBIhZfxrCChl6OV9RuWQ03BJ9SNC47sf/SlBaVzSx6UDyIY4k3Nyfsn84gVPnz7hyeefsLw6JrhIkVRiyCoZxOOzFrSk6GdUwz7eTmA8Jk1z+oN+7K7uHU1dkyQFidZkSYIdZPSXgdH6koPGc9CuaU+4kbO9fKWI2ANgZ5t6uaYpF98bGHfxUyi8g58oMDbaMJiMSA1o5Uh1IDOaXgp5ArnRDLOEYS9Feom3UDeOxjqSpFNT3l644Pji/DlZknL33gMeeHB0BSbvJiQKGTQyqJZ1k9cYGFTUHPo42IrQblIhhEKKyEm6doD03mGdj6yxkteMsQ+WxfqKx8+f8Hx+Qh2+7sViw5uV7WepZmtrwN37B0xGQ0ajjMX0kO3JDgcHd/jgg4949OABg+E4Dno+IIUg0ZLhIGfYTyF4bC3fCjAWQmCMQSpJ09RUdRXZBGC7P+DhB++ztb0dCzpGY3TWR5gMoTKCyvGqwush+fYdDt6/w92rJzy/8n/Ua3fzSDrAuvl4M/nZgdlrxpSbRhfdSnsTFMONvWGPiIGGsv33ljh81Z3izaLzg4nFSHECcm2JXSeE7UBC58G1eY++/gXV6amz9t0KItMQC/Iatokc3vc3UkyAbaKy7du/NUnKTvKQgwe/YDzepm4CR19ccTl9i8A4UYgtgxxHDa53LoIIF+Ip97RSiQg6hYlgLHgRO9opEcFwk0Rw3AcGApEYci2Rc0vdFNTzOayuiNxQAwhoprA8g/ICEwxSBirRgFuhF2ckl0dUl8eEy1PCaIhzJSEN6Mke2WRE6AkYaSgcy9IhhKeXBhSOcj2kWjb4qWurJxPQrXSi82qm1RanHpXETZsoC7udEMD7xKvoCd1dF6o162e/5/HRF3w1GDPaP2Bn95CD7QPGW/uMlvscXjScHS05ny9Y+CWWC2JdfU5Uwm8R77pNgLz5ua9qiMPGv/Ed/9797JbVHSjezP8k3DQh0Ruv78pyu5ZA3Siy5l2XOwUKinpJvS6wVU1obJTcNA5fx+YvQTgaY1HW4lWsWBAiyh+QqvVPd/jG4erYFlTL2CQqtHKCAJEdxqFFiLWhIRDa5bsQAq0VSZJA5pA+kKkemdVUP6I7xfYHf4bQCeR9gskpGsdsXTJbLPjs88/4/OPfUs6e8VJFS6jb1vAJiNg8S8oorzAmI80HDPrDa/KpKAuEkCgpImE1HCCaMXqdYpYvUMUxrqyoSiirth/PqyEMw8GIfp4RygL50yxh/l7x0wTGSUreH5CICukbUi3ppZo8k2QpZElCnib0s5S6CiRKxs6i4Qbk3XbY4Hh8ccrjsymPFmse1DXevbsL4BrsIpBCxJVuO7iKDWAsgkAEGcHzBjiOtjjyZTcKQLYVugA+NCzXU04W3wyKXyu+xRYkMYrRsMfB4Q53Dg/YngyoVhU723sc7t/lzuE9Dg/26fdHCKmvU+5SBIwOaO3x1iLw+O/JVr9uaK3b6yeglWQyGbN7OebRw4e8/9EvyXs5EEh6fWSStineyHQ5DFblmNEuW/feY/fogu3ZnAv/7dNNeGV7VcPludHUdt9Gx5huFtl1wPjbOCNNnCJ7Cno9KF20rRXuZb7pTSPlYZywUK0LRoOhpsBQUOIRBFKiUrv+lr18veiOv+amoLDipsgwb3+eEpmN1wtBBMX3iRfwEd98RgQZu0xGdxjt7jMcb+Es9E62YPo2+Pc2MsXWjiEZaYLz1E2grj0+gJIRKMRFsYiFuKrVnTtBve7kxSF+4d7BUMOOIVOKXe/xi5Kr1Zz64hKqKTcLgwD2Ci6+gukdxE6GTiW1awjNCrU4Izn9kvr5Yzh6Bls7NIMedneMEyPCQKLyhLSn6BUeu/Yo4RmlgUR6qsazKjzTgYXL1kRQp5BGyYbQAonHiEA/CwzzQJ4GssRze8O8BD4ggsMpcVHQRQBbYqcll/NTFosjwp/9hruHjxg/2GNQGPIvZkw+11xeNEztnDk1DSUv37Gdn0xnuPjqMvTVbMmrDO+rz918XieU6sBtNyKMuXH27wBz1T5v0W5dArv7/W31FP22aKjckqoosGVFaKL9n28svpZ40VZaNAJ0bFolW3clrTVSa7zwsaC3tjRljQwS2boydPyJlAIpJEr4tsN5e88g2v9Hq0mTGGgcwgRSlZFZ86O6U0ze+zVSG1Tew0vN5dWUeXXCYrXm+MULytlzvl7m3bnX90GqOD60ue3QNvuROkElKS60V5MQaKNRicYYgxIQyoywHkKRU62fsV6VrJYBOw/IhusMNIBOeoxHQ3pZSqVl7Or+M4+fJDDWSqFkIJGBRApSLTFKY3QSm1fkPZTpEYShqpfMFgvKBhovKeq3B1YX5Zy//Lu/YTIZsLc1ZDWfvbXPejWC9wTnW7Y0tKoJ0Xata7cQKWSxyRqHltkOMoJpAVJ6vIwaYyHF9Tjtg6OqS2r/A5iDbzn9wVmUFAwHPXZ3thhkGb5x7G7tsbd7yGg0wWgV015IpJAIAkJEFw1nHbauqary7S1IBCSpYTwZ8ei9B/z35b/k4aP77B7us3/vkHVZcXF5hQux4YqzFtdU2FpjraUOEEyGGW7RnwwY6zn9OnJI3xadNnhTA7wZFVGrvCROrwk3wLCbfr8JVHfRFeY5iIkFGTvrlu6GbV1zo1V+k3jQvw8+4K0jOIsMS0Tw9MmwZDRoLCkWgaPCUlGzxlO1V7LmRhW9Gd3ED126txtzu6LDjhHPiU4VgogBU74PMA7EM3FBVNB921QoSUkQdcVyeoqwBcILbHH5Ha/54aF6mkd7Cb1RireOynqqRuMAqbv7O4AXhCDwSEoPaxcohacpG+rZHH9+CssZuCEkE2SSkjlDc3GO/fIxPH0CbspLV4K/gM/+Fnp9mnSIu5sTigaKFXpxSnL1GH32BSyeAzV+Pme9rrgsLUXp4m5ZT+nBSYkNgaumRgob6yQUMGgBsUohzZCZJk8VQw0D39APliwJZAkkmURnCqlua/aVRCfVfeJd8HfE6+CV79MHmuMzvpj/Hzwf/Q15b0yejskY0NsZsD0ZcxAOmK09l8uK2aKgcovo7HHdQrrTIWfcsMgd+N3UEG92NhG8rFnedLPo2OAVcTF3RMyXOOCD2EmwqSGcEZ3AL4l3RTdy/BgSis3wNGFFXVX4pgFnwTlcY2kq8KEihCb2o9ESozVa34C3zrow2ADOInxcgsfCmiiN6OY+ZCzs01phjEZrh1YS02qYayGRISqTlRRoIXg7FNvrx+jgkCAkQShq67Ao1k2gcgqvcxAjCF2pdmdHs5F1kApPoKhK/ByQisY65vMFWZbjnKOpSoQIGCNJE4PUGqUNpAOQilpLnO6xlHNWlITQkCw9pfWEUCNEYLJ1yGg0JNOKLE1IzW11lPjx4icHjAUC1bY51crHAmYlUVKjdIpO+qi0DyqncYplUXM1XVA0gUYkrOq3lw4KBF5cfsV//e3f8ovDHaaXF2/ts7722T7KCzp5FERgHLUU0Y5CtKtDgYCWOY4McuvT2EoDhBR4FeLrN/LoHkfjKvxbuKCD8ygBvSxjazzG9/qIINjZ3mN3ex8tk2ibY5t4jDK0uxZ5UE9DVZUURYm1lr1b30MI3pEYxWQ8JEs1k8mQ1XpN2s/QWcqz4xOuliuausF5h3M1vikItcC6hsYHgknRgwm9yYRJdsS4Diz4dtDZTUvfBUodcerrtLUd57r5LX3bN9a9dk2Um1oPpY1/W7Zb1+PrTYHx/f19vPU0ZU1TFfimxjdrlErQOsPJjFqk1F7gbE3jampbUwWLI7TNTWY4Ljf2InLAsnVuDq2CWlFcP+oKC3tEdUC/7f/gbZTkfkvy4lti1b7jtxfRSSQZglCuWZy/wC0zEqloysvvfc6+TyR9xaOdhMHI4J2ksoHSRVmx13HSFx6CCzRNoGo8Sxe1mqZxBFGyXF+yeP4pnLyAeg/0HchHGHLs8Qn2yy9g/jm89B0ALGHxt/DxiLD/Hq63C84iqyVmfUYye4JZPEWEU0KRwnJNXVima8u8cNFMwnq8kzg0znuqxkKo2vW8QvQMcpCjsxyT5QxzzU4G28ozctD3MXMUV0EG0Uuj1dathACdA/fADolLrL8kguNXIoBfVRSrioILkAIz7rP78H3e++hPuX/3Q+6sJcvTNYuPXzB7csWFO+UCR4MitjGACMQ3BUybeuNumdzV5muiLOPlBeLNqNGVRT1ut+76DZCPwF2A+4wbwPzTChdKmrrCW0twMaPhbENdO7yv8L6KToRK4pKogVUC0KotpHMIPDL4ttguevbyiievbLMrxmiM9hhtMUpilEJLFW0lA21JsEBuTI0/FrxTvT7Oe5raUVQN81XB5WzNqrJ4kYDZgloSZ4Zu4dRVmTgEDm9rgq2x5ZKmWjGfTTFJijEJUgjAkySGXi+j1+vhfCAPeYsNenijsG7AKl2yqObYvKBvPJmLGnCTSA4OdhkNY+fAPE3IjOGduPW8xfjJAeMArMqay9kK2RckvWg3Zm1AOUGOonSSq3WN9JbLxZr5umRZOta2ZFa83XRQCI7Pn33Ov//3ik+/evJWP2szkiQhyzJ06xMa7RhFHKx9wIk4KEjZgmIp2/2FEGIVbiSUI4DGQQg+Vue2SDuE8Has0ACjFInWcSBSEudiAWG8fRw+WJwFLzwhhOieEWKhhAgN+IaiWLNarbDN2/mOQ3DRuUNBmiVsbY8YjHKk1jghMFrH6Sh4hAhoBYkOZMbjETQhQTIklZ7J3j77O59yvmg4C98Ot74PGO24ge8bM+KUOayihewytE0xiPxR5zL7pt98NzkpFaK9bhPwrkEgCdIilSdREqU1TguMkyS1IWs8NgRsiNfAkjXxTEUe2DCizxCuVWsFkgWaKeBouLGxG2Yw2dJIpcjLgJs3XNaB13fa9vBHnh0lKQ3CrnErj/UNWmeR6XqLoURgoAMD4/HSoyVIJygJ1MLG4krZehq4tpekCNFOsq5oLs6pvvwc/uav4Pz38GwHPr9DuXvIenxA/eQYf/pb4GO+DggDcAnzZ4jpBb1qhpYNqbxiLC7piykNS2Y02AD4BFyGr1L8OkGIgHAOrTSJ1lgFVZPgfXTI0UoySBXDTGMShUoD/cwzyWBiAuMgyYOmbhcDpZeUhcfdZrpWShBpC4z3gXu8rPjv7o5XFPw+0FwtOV7/jtnZCc8fPmFrfMgk32P8QcZO9pC759s8n654UdfMMXiW3LDCXfuejmHrlrEXwHG79YEPiXrlNXEpe0m8gztd8ax9vAl8j2Jx43WJ7k8zBCBCPNdCeoRsc0FBtllRFS1GW2KoS/3HH3GeEyqyvVqHdkCNMiNERwzRgl6JFgotJYkQpEKSK01PG6oQCHUNTYPyLjYPasWKP1b87//b/4ZznrIFxS+OTnn61THPvvic+fFn0HS1AJuzSPd7TWg6V6k4stdLQb1ss8ttplkgEEpFEiPJSJIck+RIZVCyYxoiYVU3NT54lDIkScJoPGA8HjCZDMizFFu2s9w/SineRgSWVcnJlUWTkidZZEa8g9qRWYGqPatyhavWXMyXLMuG6bLiau1YNW9f97toCv7jx7+l8O9mBS6FYDgY0B/0SZK0TReHFvCC3zDiEuJaaIEQrbbIB3zwBBFQqh1MAnjn8c7dUND8MID0bSGAzBhSbVBCXt9o3nmquqQs12iZtIOhwjuPEJE5FiGAbwiuZr1aspjPqX9Ig/ZvjYAPFu8bghAgPMbEogWkpPaRnbHe4wlIFZ2regkMUgFa45TCJwqZpzT7h8wP+1y+mPKk+i4e8u1HC234kqg37XrUT3mTIrWvR1Us6WSsHod1DVWoERYa59FeorIUZXQsaGzdVbQKeB+N/n3dUIdeq+jTSDJ6DJmIbaQweCGAihD6eO9puMLSlvsZGO702L6zT6INZbEimCkvjtesb+liVkCfgKFEeoWoA1IrlMwiS/UWQ+DRwWKCjedXCKQUBOeprMV6TyqjbaNzYF0svBMB3LpgeXxM/fHHcPJ/gv8ULjRcjnDpeyx2fombXRKa3xLrzr/phHmw5yTLC/abKXlSkqszcn2F1ksKWaMIWNEqvX0fqhzWCYhY9JRmmizXeCFZS4ezAk1DrmAv0+z2FTIJBGVJEscgkQwTGGpBisIV0Kw98zJwVTVUDuK3fysnuCXcBIQesXlHpwlOieC0U64/IwLWmzs6VI71i1OenpxzvDvm0W/+gg/vf8TBn3xEViZsfzJl99NLnpxccuJmlNfKeMdNv9auMG4KfAV8RmSCU1B59H0uT4mtD464Wc5+W3RM8utERmxW0vtjT7z1UKhWFuEROm7IOK9KKVBoooTKR5mgikA4dklsQZ1WIDxeCILtQLRHyEgEtfAvyiSEwghFgiQTip429LRhESCUFdQ1MngMgSRW7/xoPPv/83/+n3HeU9YNRVmyms+oiimhmfN6BgOvYqGN3GS4+RFszLI1FRTXVSebzikdmNZImZFmQ5LEMBz0Odzfp9eL/tK2LPA+km0/b774JwmMY2ONq7JGXDqMCGR5ijQGjCOtPB6HL9fU6wWrVUPtBMvScr7wt9bc44/F8h2BYoC9nR3u3bvHZDwhz7IILkOUnbSGyi1IDgQZrtFtxLuBgMD7OHAIfATMPrT+oTcnTLQT7ttIH2kp0VIhQ5RVRHsdS1NXERgriwgS7wTORaYgTzNSE9uteu9p6ob1ak1VvUlfue+OQGxgYl0Tz08IiNApd1tGvatjFnHQ1lpiTNy0EGgfCEIjFfRGI7b3J+xNpmydfP820W8aXXnPq2coEMHwvP23rtzmNvapWC+iTEfptnsgrYOoIwQLTYMwDqXi4kK2Yp0AkVEktO0RJPWGhZXGkJocrROCkPhgcF7Q1DO8v8K3x5tkgv7WhPGd98m0oJgdsVgtIrH0Aw5QsNF6GshxQEliRkx2tphsHzAa7XL6uOSzF09/2Id9537EgtsgwBIN8WwI1N5R2wbn/XWjA4RCtWBSeKh8g706h7OvwB9xXcoZCigXrF+sCWFJBH3fsf9+Tn9xyu7iOZmaYupjxOJLbHGMdev4jbopXD2Ho8dR/2ktItGxk1lIyPMUkUjyLGpmdbBkIrCTJWynCikDQViUh9RLdBA4ISmloJCwEpKl9yyqQGVv8Vx3WOAaGB8S9cBd8/EBkdndBXaI4PgZEXhuABTnqU+vePzf/pr5xSknDx6yO9gnGaYM38u4I3qo85KjakHBKfHKGrWfUxLZ4LP2fTvgXYL7PfgXG3+/DfKn0z1PiAuB+8QFwLsMQSp6ZGlCYjTaSJTuCkhFLLQTrSAq2GgtqmJWSMpYWK6Ujj7GyhOExWHx1uEJG71wWlgs4mWphGy9kgVGShIlkSEQnMXb5trrWCNJuG2Pq9ePj3/779vfXkdwd1vRjczf8FkBvCso1w1SQlGMsLahqQNNZVkt5hSrBbYqeJdWtm8jfpLAGOLXclk51PmKSd/RG/YQxmOKOBk0i4JmVeC8xwtDUQvm9ucs9/7mMMbwm9/8hoO9fYajIb08Q8nOsUFGkNxJjVsg4INHBIEMrd6Y0LLG4IKIVjVdpyEhW61RLE6Q4u2wXxIRm5A4T7CulXB4bFNTFWusaiBI6sqyXJYoqdnfP6DXyxBeg7dRn9i2ar71COCspW6alv0M4G00hjcdY9ktHiIIkVK1tlIqWmd518pTAibNGe8dsLt/xNZpRRLerE309wlNnOJ6RL7p1aVbV7BWEKfg2xrw1+UCIRQ6yVFCRe9QEkI0UkJ6h7INQmoC8Rx5G/Cuy3w4CBWyLc8LeEK08UdK2TJCAusV1huiJZxEE6vMTQ75eMDo8BGp9Eg/RWn/g2vsNREedeDYEwjU5KMe9z78kHvv/5KdwwfMByl/efSXuDd1cvljISVBKiySMkjWLrByULpA43yUAAkIMgKL1BhSocAHKmkRqymsz/h6Sn1NFT5rofYfm3QLRqsXbF30kM0RfvWU8uRzllcrlraV+PhTOP7r+F7Lc5i9jxwNSIY9EnqYXp806ZP2BhidoXzAiKgNTyRoPNLFJg1CCGwQVEHSKMnCSxZICgRNAPuKhvSHxeaskRHlFJ05ouTGFm3Eja3fB8TFxOP25/r6rZrzOScXf8fp7z7GDPrs3f8F79/7JemvDPsHW1x9ck6x/JL4fXRscacX/qZYQPh+DZW+OUbAAfGqHtMq82lLV3nXcECQMdRbDAYDsjwlSQzaKLRWaNN2QZWaqPuTaC1RKlqxiXb8VdpgTAI+yidEiItG8NHqWrSSC3XjchQL1lsWWcbxXIhWlug8wTm892gSkh9VZfyuXUJeJ2pCmLJe1pyeGfLckKdxfi6XM2aXZzTlnJQbf5SfY/xkgTHEkzqtA1I06NRT155l0VBWDruscOsKpMQHQWF/vJXd24xe3udPfv0n9Hs9CIEszdGqTUnL2LWnE0/IDiRvinxELL6LmotAED5Ooq5zufA3aZXg31JXOWJXIwTCB/A+6sqCx9maulJIaQlBUKwrptMFUmoG/T6Dfg8lImNsraWsKoq3AYwJWGtp6ib2QvA+5pdCQIVAUArXDpjex/PkQ8D6QOPj/tG2jlZSkGQ9hjuHbB1O2P7khH71doFxRuS5Psqhn0oup56zV57TAWO43cKSxpZIqRFOIxTXSbSAx9MgUEhbg5CE4NrCxdB2ZQyEYLEU2KiabV8LNbHIUXmPUCo6LgRFCMk1ME40ZAmkfU0+2iIRATvLUUrwQ+XyHXeiibDBAggYbg3Ze3if+7/8iP2HH/BkNUX+v9NYOPoWIgRB4yXeSZYNrB0UHiov8CGCt9AV3Oq2pW4AHTw9X6NtQeNjR8JXw72myEfS0F+e0j9N8OtnFLMnzC8WHJeRx2xVjFD8Hp4tYH4Ml89J9/YYHewyUNvk/S3yZJtcC4wQBCcJQSCco7YOrCWxFmTAG0mTSYpcsUoVa6tZexXdLW47TSuJVKKUEFIQOSDAv1ro1vmfbLV/e0DUIz/lRp/dLkcDhNpSX844qv6OspiRmRzRSNb2KTFfA+8uUb8F/HfAn4BoW3sLYtWm9dHJ4S01Tvq20AwZpVsMRgN6vR5pmkTLTC2RSkaiRqo4fyFQSsTHrT42tM05pFTRiUnF+Uw6QWi/thC6a2Xj2m8Bc5DE+VGKlliKhJFz0U1FkZCikTQ/W4D35tFJKOBGE9+FJ4QV08tjtBLkqUYFi1svKedXhKak1756+erbfo9P71T3P0b8pIExtOUFQaFbBqRYxzZXsm7Aebz1VM6zbt6Gl8KPH4P+gIf3H6CkYLVatf3iNUprlFIoqRAitCvjyP7GQaVlNEU3+cSLO2p2A95ZmqbBNjbqt4HaNpTV7fs2Rp1zrAyOnWpje04fAsE5rK0RwhK8oK4q6qqAIJjNrjBGkZrYBrSuKuq6pq5vfwkUAGst1jaxuNF7grOE4LAhLiaKsqSqKkLbPrQua8qypjA6MvEyoGglI/0BYfuQ/uEhk/Epk9PAlNtfQRticvdRKni03+PBg12SNOXsrz5nvnAvgfGuVKfHTV+IGT8cHCsJUgaUdK2FYDST8xRYagIO4dK201KDC/baWTXqBx0Fq7atcrc3FRUr1k2BDwKlTazQbgI+GDQZmiWZgZ4OpLKIXR2lRsgUJVpLpx9wcJYIdTqf6LR1VZlsjxnvbDPa3Wa0N6Y3GbST9dsJawOLKiB0YFFBGQS1EBHHyDQyrTLKqnwgWiM2DlmW5HXBUAeqrI+vt3lTHkfgUasrxFFCWF5QLdcc2W8SYFRgv4CLY+TsD4zPH3KneI+Juk+vdwcdDmFeYOWYtU8og6LRHq88WWPp1Q1GCmRusH3DcmxYDRJKHJXXWB/P8+15pYqY8UkUWB0px5ZRjGAxLuDb0sZ269JzOZGB7RHvwhMiSD5jk6bxq5KLTz+7rR3+niGJeaQ/BX4BcrvFOx3YDzf2LbfWNOX19sswotcfMxiO6PX7JGka/YllV5QdgW2nEBZBEoLEexE32zq86TYLeqOJuc6cEloZoQs4F7DOYb2/WVxJEQcwpaK9mRBYH/BBIkkx5KgfCRhn+T7W1dh6ybtpvmKQOkcnfbROkNIQgsc1DXU5x/tNK8uAa6acHlcoqVB4tLckvsYQrhtOvWl0/jAX/Di8+U8eGHvaBa2DsqhZry02ODJXYnyND9F+qnB/H2ExZFnKzs52lByUJYTYoMM2ljp4fGzjE5/cpiC7dH+3snY+UNcNjbWtl7GgLEsWiznnZ6cUZUyxrpqasFq+lQVG8AHZgmPVyptliOyxtxaEiL6mTUldldRVg7WW2fSKYb9PP89YLBbUdY2zb2cd6ZzFWYeQIjLCzrWLhuicUNcVTdMQnKOpG6o6AuMyiY0+hIyOG4lW+DCErX36h4/YuvMZ+xcrzt0395n/viGJgHgE3NHwYMvw3geH3H3vQ3YPDhFBcnyx4ui3z/nyJiFw7Y66RZzKu3Kf1Td9yPfZH+la1sW1gKUmUOOpcDQEJCrUBKew1Diaa2As2gm6oX7FJtBjqalCjbZpLMoMAu9lWxYzQMuCVDsyDZoS16yRqg8ywZicRLxu8dG3hyV+ZzmQS+gNevQGfdJejs4TdKZRqbhNpPa1cE3gahWzFMsqcupeR517InRkWAEbYiFjCBZVFMjFirxYs2MCYZixWu5Q+DnhW67C77K3E4Bq5ohTha+nVLjvcHwOCFb07Jdsn52yG86YiFN64j5cnlKpfdZiC0+PUqSsE0GVBNLaUhUNeWJIx33CVp+GHo1wWJ3gRAAvUZ37zm2FkrGNtm6BYlvDEbFwZOIj2Apt5s1z4xs7AbEN3ANRgv8Y+G9EicWPyTMK4gjxCPgNcA/0BEwKoYKw6VzeMhbvdAo1JHJMbziiPxyS93PSJEEphRBRtIQPeBm/CEkExcHHwlLvAk4EbBNopI+LYu9byWAsTPceCB7vPBaHtZ6msTTOYoPHiYBvMwVCS2SiQStCIwhSoVyCIUezakesdxsffvhrFoslR0df0dTnvL3rSYAc0RtsMxzFRYpsXa1sHW1SpxeBsljxMkAPBL/G+hu/fEnMqxh+GNvbB3baW+6Cd7Ms2IyfPDAOwMrWnF0tqAksrIuWISLEhpciFmpVfz9xMbaxLOfLeHFeTfHeY0zCcj5rB2mHDw7nXJQCWNtWhgZ8m+KvG0tRVFRVm6YOgdVqxWw65S//+q+5bBuVlN5R1q9vcPW9ok3VyUD0qW4HQKklUsXGBBCPxzUVZycnPH36FVeXU/b39rh35y7j8Yh+v0+vl7+VXez0riJoBKItaIIgBS74KFFpW4m60DEQ8ZwrHcGR0hKTJAQdLcbygw/Y//Uz3p//jvpJxZc+ppe+bRXccR5wMzcLorKxR2zpPDGSrVyzu5Wxdzhh+2Cf8f5dRlsHDIbbuMayc3CXe1+ccrRsrpPlXe+rMXBPQBoTDTzjjxmVfXc0tkRJg5KmncBEW0ceeQOBJnomKCSGDlZ0CngAg0PQtHrXeCYkBi1SEpORZnm08atrvE0xYYSRa7SaowTU5YrZ5ROyfBdjUobb+xzkZ5wvmx8ssappwbHXeN+jqAKXV1Oy0xOclpydHuHd2xNyNSvL8/MSnycsyyYW0WpJogS5hESDUQKlAeURWNRyBZdX9BdzDkVD1oNz5Tn1/hslPRlRORs2jnfzGpVAEkpUPcVRXnsqfFOkwB5RZDAMK8T55xSLF9gveoRshE22qNItbD6B3gj6CSE3FI2lLhtG4zHpowf0BodIv40JE4oQKJFYaXBGfM2n9s2jZQx1u/kuz04s3FChBcYSvIqgOFYQx586A6mhtuAKomvEc34cUNy6eqc70H8POf4IX05gmkKTgtEtKywgRN1+vAFvLDvfXeT0kjG94YCsl5OaBKVVdFtpJRCEyIhFxUfAa9V2fPV47/CuxvlAU9uYscKBtwTXRJlgiAsc6xuEDdTWUVYNZVVRNQ11C5C9BJ0asn6OtxZJzJ4qK1Ehjl4/RnzxyV9HEO9q3t6qJZYYCz1AmAyPoqotVVVRFQVNVWCbAtcseB14GitAYvyQO2AJ9NpDTvhuBf7biJ88MAaoCJw31csql9AO3AEk7u+tBqgoCo6PjqjKkrOzU+azGeW6xBhNU1fYpoqVoU1DURYURXktkWiaCNrKqmK1XLMuowzAe8+6WLNYr5iulvh3MCh67xEuAmMtouWOkAqpIjBuG/oRvKWpCo5ePOff/4f/yOOTU3YGA3756D3+yW9+w2/+/J8wHI3fyj6GdkGBJDLuAIgIjF3XJVDGSbFN1TkfcK1mWoe2XakxBBHT+dnee+z9uqDxCcr+N/pflbwIMem6qb+SxPTRBJhICEJgEWgp6SWGUS9hdzJke2fIYGvMcGvMaHeL0c4WJu+DShE6x+icpqgY7d7lzs4XjJYXm5b/1MBAwP0xmGVMRXZup296FTiaCB5CQLTFSgGDICDRKDK0SJAiIXjV/nvMbAgEQQhSL0lwVNetaRWShEQlpGlGnuV4PEIFXJ2gXB8txmi1RklLU62ZXT6hGTkm/S3y8R6Hk5zzZcMRP4xxCLRNUkJCZVMWi4qTk1OcMSyKJS+ePsH7t5fwC6XlxXmFy0qa0rYMlyJVgloJMg0qESgD0liMaOjNlsjLK9LFjMQXpLqiUQXTpv5GYDwBPgJ2RYbWPT5trviEcH2Ndh7Opk1Cdz3ZXo0e0XX3fST7GFZUzENFUVZQToEXcfEkNMJkJMMBbtLHD3qULjYyyA7vkB6sGAtHFhypA4NHyXhPeKUJt9cTOmqLdWcB1jWHaCUG15XNREreA6EBUYESkIgINusZ0UbtS95+me2rkQN3QX+IeP9POPinH7F15x7peJdPPr1i/VdfwemyzSy69kZvvWwF7d/CO9QYCyQj8nxMNhhECYUx1yxlZHs9IWy4J3mPd7HwDiTSgROBunZIWaNEwMiAFNFTvVUXxqJqbwneUTUNRd1Q1DVl01C5BustQQZ0mpAP+gQXi9IrW6JLgbTE2hheHR83G7SEV/71ds5jUf/QXN53RcIN/MvB5HhpqJynbiqW8xmunEH4Lhrn5RBEYGy4ke29aayJi3Pf7inwnYvx246fBTCG7z4hf19BMcDl9JJ/+2//LXVdMV/MSUzCYDBESoFtaqxtIjC2lqqxNLbBOd8WibXMZmgfd408foTjCK1ncmg3Wpsu0drqdPUvVVlycnLMx3/4A58fHVM6z+pqysXit1jnuHvvHoPB4O3sIx1r7GP6X8ZiDy+jtY/suii0v2utMYnBJElkOiC2r/YuFoXohGwwZuvue7H4sChI/R+QRzVrH2/+7todA+8JeG/L8OC9u8hsRO1TpElIeym9Qc5gNGAw7KPzFJ1lZIMe+bCHULol5BUIDcqRD0ds7e8yfH7JyYa1VUH8XCsjHsiJZlRdFfGbxO72LkImGDVAihSTJKRVSggWIQJKJRjVR8gk+kC3HqNIgQ2O2lpcCb3G4hDY6yYIgSAcXjY4UeNdQ2OXWLdGB49SCTLJSLIlWgHNlGKZUlWC9XxNEyw9ASb88FScBZY0TJdz3LOvmC6XPHv6hN5wwO++/Dv8D2mj/sfCBYoCcALKmJkIRtIYgWulsXUTQHvSLKANmMbTryvU/JL6xZdUz/+AL49Q38CfJ0R29wMhuDMekuRD6uMZ58FRtseeAgMCw2tleLxeXhCvp+4KGwATAeME+gIuyxvXX9/++x6eLWqyQY2+M8duG+pJRiOgCYHJATz4aI9sP+fIzSnPT2j8NgU7yN4+6WAfaW4rayRAqHgzdO3OaFFVBxyFbiuBavBzCC8gPIW6gHo3FrOFF0T/4VdLXt929IhyiV/A4BfkH37EBx8+Ytgf4qThSyOj5UciWwlIW2T4EqizLSh+VzOpQpC3QDjgXQS4RVG37qNR0hJZYxHt15wlMQapVFtLE6J8og0pAo0IKBlai+P2OUS3Idt4iqpmXZYUVU3R1BTrgqquot1hauiNhlHYJQKqXsNCgo162Wg+ebP/8bx3jVlu9jk+bnjzsrMfHh1ra0Rcy1V8U2fThnhEMhabSklQKnqkK8hGY3zep1qc45tzXgeSeiLxUnPT0PxNwxKLejv70R/SmfVN4mcDjP+hhnWWv/nk9z/7wkLfaoldY3HWRhN3eSOAFUiEgGK94tnTp3z8xRNKd3MrrKzl948f8y9OTtje2X4r+xiI84PwIfrrKonWsSDD4Vq2QiBErPw3xpCmKWmW4b2PGmXnaJoGqQRCGdK8x2TvkDxJSL0jM5rVf/otL45rLoiDiCKmnj8cS/7Jv/yIX//Lf4Me7FB6gxcK3Xp7GqORSmK9o/YeaRSkGo9oLfAii+1FIB30GO3vMcy+RC7rjX5IcBlgWkHp4zDeg7a/3JvFnfv3CUETfIb3BteUODtCCodWAiEVBENoO9gFAkIJhIR1WbBcrWlw5KFHbR2WkqhTtlgq6rBGWEtjS9bVFGtLjJRIowjpkLQnMMbh/Yrl8ozLyjOblayL6prFeF0fk+58dBq5rhVDZI0bLtwlV1dz3FRcV8Rf2duw0vqO6ACxl1DqyGwGHVl6HS2mymBxMhYqDaQk9YGRq3GLc06/+oTZ/BklHt0eXzfRBOK1976A97Y0d+7kaKl5cRoYuKj1WxHVqiNgS8S/jQSMQiw9+zzEIjxBBMNpD8zQE7DMTuBpuPHMvksE4YfbsPNLGH8IYrchTBpCBiERDHam7D08o1KK1Rfw1QvDojxk5u/T2/P0kzHprQFjIgCW4qYAQoRoWSDaTSURTNZXEC6A3wOfQNd7MaREIPQupu74DSoGuOs79xDUHdjeYWfUYz8VUC2ZLS1uPgNfgQ6RCe+OtSsW9a222LlWMvIuIkOSxsyDc9RNBMWLucRaj1JxvBNExrhpLLapyRKH1rEjm1TRds23JJAgGjxqJTA6Ng+KuDtgG0ddNayLkuVqzboqKeuKsigpy4rGOVSSkI+HeKDxFrVOwEgoA5ros17RfcOam6Yvmq+PnF2HwncfQ+ADDQejlJ3JABvgfFlyvqo5KS1Tv+n701Z6hCjV8FIShEAaw9ZwzKDf5+zFcy6e1+Ber16j61va1bP8kOjAtWl/f5cY6B+B8c8gfu6gGMDahnJdUKzXlOsCqeMkJHxs+S2EIniYz2c8ffqE09nXwcZ0tebx48cMBm/HiD4QOwTiRds1SbWemRIlBKo1lvfBt17Gsm2xLSHE0jHf2v0I6RAojFaYfo9UC0T5CGzNwckJu5fPuaxjR7ro+gt5Jhnv7rL78BFmvEcVNEGIrskTUsUixqIukVVNwEeAGQISQXABb2Or3bTXY7S7y9Ykxyxv0ueO+JknLYXXEAeBAW8+GDjpo8uJrVoZT4m1BQKHkjEl6r3AeRF9x0Ms9gnCsSoLFuWS0pbUoaFpQTEELGecNQWX1iCFwocGFwoCnsT1cXWPWeGZFYIBDdpY1suSZ6dLTq8svnS4dt7XvB6DkRLBXsJNywXfvjZHMJIJPiiK4NqFUM0Pd0z+I9F4WDhIHVQ+grdaYI1gqT1KeBrn8DIujoy1NOUKFa6Q4gTNCTmehOjQ2xGjnng9HEj4oA9bE4viHLsWmODZaj/eEsHsnRz290Fm4JPYEmIe4P4avihAGLi3DQdj2E4hFB63gOUqTpgJ0BNw9w784p/DzkcwfgRqAqJNW4g0oHsXZMPfczr9itVJzfHHmgv/S9YJJNkh2gdSZW7p5LauFEpGaYQmguKY/mkdKWyrLS5igZ4tuHGdqHin0gmRkWb32cnvU9QZq1JiwwCfphjRsLueM7gQ1F4g1h5RFEADsgbfgG2iDaV3N64bLsQL4Z1NND2USEGEKAEsCpbLjCACjXUkicYYRausoKlrmrqCALn1WB/QApBRxuZsdFuygHdxrLGS6HQUaJtQWIqqpChLyrKgLCvKusI6i9QKYxJkP9aTlK5GLVJEEscdExQZFkH81t2NZwbX4PL6cedL/e6jK7fc1oKDYcad/S2USTisGqZFzcm85GRRcrZaM60biut6yxVh9YK6miPSPmo8Ic9Ttre3WM2vuBSvf0Td4kFwOwVzXTFf4N26U/wjMP7HeCdRWctqsWA5W7CaLdCZRCRAEzW8SmlCEFxdXvL0+TGF+zr7UnvHZ19+iZJvym1+d4RWchIA4WMRiBAyth0VAq2i64Rr2aTIMIdrf+PYErltvx0cSrgW1MYubn5ri6a+z+6jh9x5csr0ecOKm9V1UXqqpqEREqkVTiiEjF2gtBLXE4XTHi9bv03vkSJ23kME6mAJEtJeymBnm93DLUZHM85u7FW5AJ77OOB0Z7JjSd8kTq9OcFZQlpKyDNTNitqt8aEiYGPLZ0SU9Vw38IgTSff4m6Oh4YrmVQkfYKnwTU1y1SBDzf7YsT2C1czy/GTB2cYomhMLy6747sG1a+gxou0QzA1r7IA7UvNw/wChBhS1oGk83jZ8un7MpX+LrHHtYFpDWsXflQLjQEEjLY1oPWhVYF15wrpmJq5owhFp7wWD7RWmhHEO4wH08rip9iBTDz0LogjMztdMZ1D72OdtT0Bu4HAC7z+Cg4eQb0MyBjJwCVw5OG8gGBj2YzFgaODiOGBOwH8Sz+c28HAHfv0/wZ/+DzC8C/kuiJR48bU5a+ssZXnC/PyUrz4NfP7fBGXfow7uMr5boaQmSW6pHTREQGzardOOhhBBpKuhWcfCOtXAOIPpMLL4tw6ANktvv+WeMCX5/cDBwxGqHlBNFYu5ZLqqSVfnTF6s0DbDZX2UzJGNB9EAa7AzKGdQzaIk5Dqd3nX3u63FxndFrKZIROxoWjcVi9WaJDWx86i1ZFlClpnWtxiauqGuGqRsveQ78ptYm2K9j1k+aL3OBUIEvO9khQ5nLWVdU9masq4pyoqmqRFSYLKULM1IjMELWFYFup+jsgQlE1Jn8FQouuyTo74WKHTu8F0OJuGb1fdvP7oxq6wDdVnh6oYsz9jqjRjuGnac5mEdOLu85PjsnE9PLrh0bVmbX0G9IjQZLpHIUJMZCa4h+NfvG9DVsdxWpMRxY0Fslv6PGuN/jL9X0TjLerVitViwmC/QdQTGXsYMnlQKguDs7Iyz2TenoQLw9PiYpnxTNexrhLiZ7FqVa9Q/y9YfWikIYIxGaxNtyiAWkkl5naIMBIK3BBH1yoKAzjLyyQ6Tw7sc3h9yfn7JSRVZSUcc0KqqxjqHDhH8ShmZLKHj5wsByiuklfjgYqemENntSKK0jHGWMphM2L2zz53Pv2J6Za9B4ZKo+9wiTocdz/Gmy43Pjx7jAjSO6AFKjX/rFJSnZMlR4SkbsFX0GS4XUDQvf3Kb8CbnBhh3EKSDH53EoFsgdA1zu2poCRz+/9n773hJsvSuE/4+55yISHtN+Wo3PdPTM6ORRxIg2EVaISEtCO+EMBr2s2KBd3kXELuAQDDIAAvvIjwIswghzCKxErBCIKTVyI+k0fie9qa6qstenzdNRBzz/nFOZOa9fau7zL1VdbvzV5+ozBsZGebJEyd+5zm/53n6GY8+chaVn2RSG2wdsFXF+oUBG8MjJMa1g+0Sikl8rxQYk6b9LYhr8h9CWVHVY2zvOnn3CqsP3cC836IegTOn4PQp6Peh241KTxxMrgcGL8H2q5EQWw3LpyLJ7azAymlYPQMnz8DKGeidjuQ464BqQ6WSXl7FWY2yhBs3YM1BvQROQe7hdA6PfBY8+ivg4S+BYgWyLlMe4T1YB1sbsHYDLrwQePlZWHsh4E+ukRdbTCYl1gfcYZKPhhgbiZ2RqOhRDRbcEKrrUK5DW0OnBcoewRNaEf3yHaJBXoOp1n4O3jGpN9mSGyx1alqqjYhQlxYZO4aXAtfWhEm7y0bRYTL0sFXDaAD1FrhtCDvMkjRq4nAQIkE+ahTkLNHK2uhMYZ1jNB7TGsa+VKViGyLE8tAK6jrG0GQuzQAl/bdoBS4G74akJ44JNmIQtXUO66I+2TnLuKoYlyVlXWG9AxGyIgb3NgVGqmBpjXYpum3ybpu81cIP2wg1BQEHGAxDfMrNMhNYRFTcL2Ic0tkMPOwMK3aHI/Juh04vp9NbYjnvYsk4sdzjRL/NeDxme3N3b1MOE+qda2xcbWMIbK+9Bv7+lk5r+uN7adUFMV7gnsA6z2QyZjgcsrOzg4wDXjmcBFwKWnA+8NrlK+xObu7X2yxLBteuHck5KqVjyVGJVZdizmhLUDFMEFIZ7lzR6XTodju0UkS1T5kplIrkOYRAVVYxxZiABCE4h1aKXq/H6TOrnOlvslSG2aSsC9TjEb6a4OuS4B0+RM+xEpNiZtS0+p6z0cMiPpJw8dHbFgJkeU6/3+f0mTM8cq7Hle0tbqT+2xLlFAVRKeeIU9132v1tHWn09BvBMwGuWvCb0DeJIBM9DA1qYiGTplCHIWrxDJEeTIjfKYidb0kk0vMEWWs4dbLNqTOn0a2zVD7HWcFXlle2rsDwlaO7TOdgUENVQhUL0KB0JMY6eay8jdPlbozoHbonXuPsw5d59LFtwhOBzMJyH/o9aOVCYbrUO5rxmmW4PWJ7HBhU0DoDy0vQeUdc2ieg1YdWb7bkPTBdYhYMExM65CHOyAcP9SZsDuHVa3BjK87Y9xU8/Ag8/CT0z4PpR+mFn0ulOy5hOIQLF+BTn4AP/wK88Cz4MbBVUQ132R6P2ZpM8JOSQ3t86TAbGaomhVkAVceo/Oo18C/BWMcTd1c53EGfIQbQfT5INx4/PJ4C+m4Q4/OT9skGJq9e4+Xrm+hWl1Z/FehS+oxQBzZ3RpgwxoqnFIcNdST4U+9wY/CMODQ+QSxOcpJ7Q4z7dE3Ur7ZbLZQW6qqiLEvanSJJ0WKVUx9ikLBzFkdMq+YVoBWSGbQ2MXwsyd/wgeBJAb4upjqzcYlZmyaMJmWUuhlNnrXo9rp0u91pP17WFZ1+l06/S7vXptPrIFVFVgsqFRrpEdjBcp0J1YGe/fsjpXDEWTEAM/JkGwNc0eJEq0O/7cmAPFOcWOrR0nDt6jUubO+yve8SfD3gxqWnWb/yIt7e38LOE6KneH8G5aPGghgvcE9gQ2A8njAeR52xVw4vljp4ajwhBOrasba+Sf0GxVqOUmukRGIVH61TAYGAd9FD50XFKkwqao2LVkGr3aYoMvJMY71DnEeSu8P7WATEWpvSlqW0PyJ0um1OnT7F6ZMXWV2vGCWt18RCNRpjxyN8OcEbgwSDDQYJHpGY0si5KKNoFlwgOJAQzzcAeZaTZYYTJ07w0EOnOPHKNmvjWSndpDzck1rnflQYOgwEYkDhje3oFe4SqcQ80R8SO7ucSIpPpb830udNCI0jdsaa2eRyDWQZLPcNS/0eWW8Vq7qEoAmlY/nVU8jlRnd4BPAWdksoNdQV0xy7ihhUJRZcCWoCso3prLNUXOb06as8dnpAkUGRQSuDPANDhnbL7FzyjIZjSoShC1Q5rL4Dzr0bTn4OnHh/lEyoPCZmQMcEDpKlRA2NZy9A5uOpTYYwHMPla/DiBbi+GU83z+HUo3DiUchXogQjNKktQiTI29twYw2eexY+8otx2V5LNhg72B6wszNgbWeXujVMv/QhYJ4UTxM2hBgcTAV+HbgUvci7bfCHmeu9C7wX+GxovwuKInquXRW1zJN1qC8DF4hFQ4ZxdmBU4kYlw8EWtFIZ6xqGTXqtEN6AnzVFzpeIivpTICugDlGeciByNKfod5bp9nq0O0WUprmYWSkG0oUpOfapXKr3Do+PMjWBoBTKGHSWoSXmLw42yom8dfgQCzJZ76lShdeqKhmXJeOqRERoFQWtXofe8hL9fp88yxClaJdjuv0enX6PTr/LeKmLqj12kJF5ReYVk1CRMWHCgHUerBigxumhPeTbJao9QHd66FaXjo6VBXutjH62xLkTy5y4eJXt8oDpj2Bj4a03gRJDCHaPDeZzntztxIonOjXu9bNpQYzfxjjR7lKYjM3RLqWzR3qD1wTGZUyNo4zGZIagMjIBJx7rPEoqMnV/m6RSkiKfo8LUp87WBpe8GB6dqgcarcgyQ54ZsESi6gMSoje3qmtsXRNc9OoarTESMMawvLTEqZNtzpmKSR07gLGH3cEuuzubqF4P1ekCAesdzlZMSo2IRC9IHTutWEI1oDRg4wPB+TjQiIQkZ2VlidW2Ro/tdNStSQQxg1EdSeRx7gxq4GoZfWA50Q+2xszL0KQSEmae4YyZZ7gkDhZKIiHuAssKOhoGNk7x+9GYarCOzk6Q95ZAtXHiMVmzx6MixjVsj2PO3LqauVklSSoaMbRx0B6xFDY5JZssqx06uiIzabM4CYIEgapmtL7LtZfHbG0FspPQewjOvhfOvAe6j0J2NgbaSfouSWkQJManTdNYWbAVbK7Ba5fgmWfgI78AH/sobCQXlgN8FmUXOxNobUUFSKggOKhLuHgJnnsRPv4J+MhH4Pp8nQzn4No16kuXufLqVbYnGTGU8DAgiRQ3xDhEuYq0wPVBdcEZoAR3J5IZzSyDgWZvcrsngMchOwN5kUoTA5LHWQF0JL12FfwZYv6PASmzNtTjuLxp3pXmjj85W9QqqJV4nZLNMlUcCQRYpaNPsdRfotdr02nn5CYGL0ceP7t/JN1O3oVEiAMOjw0eiwcj6MJgVKyOF6yPOYi1AyVRyOVtLBudZiUtnto7TJaRd9qx4l6qYqm1QQTyTptOv0tvuUd/uUc17JMHTW3aUHmk8hS1ZskJte/hqNnGP1ApYxvPcacKFGsDjMnAQzUu8f0eWbdN2xh6RrOaaV4t3e0TWN2i1eqx2ulSb99gWI1QRCFQQWzClhjPcrehqfdDyHGcn4UL3AUyUXzp5/8yzp09x0svvcRzL7/I5d2tIyPHFhiWJVVd4wlo0WijCVowKqAqh1cBlbSy9wMhpGQ8RqGVxtmA9xbvLZXz2CqmYzNGpW0DWsWUbtbamDfaO0IIWOuo6wpb1di6IjhHbjIKrdDa0O/3OXWyx9nONoPt2JGNAwx3xuxurVMsr9Ay0S1nk6QxBrGFpI+NxT+M0eggKfOdj8d3DufT+WWG/tISq92cfGNGjDNiyq0zy7C7Hcnxce8M1omP35PEtGF5iNNwI2YEudHhpQye04T0u8zybwoxhdnSUgxYcxuwW4PdHTPavEJWnCLvnkEbg8KhjT7aC/MVDIbxB3J1Sq0VUs7qdnTj+gBZQFZLVhiyHEYUrkRbAWXwwWNTsgVxCj2u2blacuWFwHgAy6fh5GPRU7z6bpA+uDa4lOK3cVDDXMrbFKNWlTAewNXX4DOfgl/6JfilX4DLryWvMFAHGLlIim9sgM2ASZRJ+BrKETz9LHzkY/CJT8P163Myiwa763DxCuPzVxlPcg4HMkvLNhd7F9O0tcD3QS1FY7xh8XRDbHnQZFWJO+sB74DssVglT0w0mveR8GZLYDrp+ALi4+cIaBN1LHkfwlkI74ze6moA9TbYF4BneX1qsPlBmmJaAIQnoXgE8hOQ9RPxVmBtylZxlPQuR3GO5c5J+ktL9LoFrbZGEeLISFKchsSZO0kV8JpKo56Yk7/2Dhs8QQsqNzGZiIKgPTiPS8RYE4MnvUhUWYhEchw8Rgl5p027n0q7F7EUNUDebtHud+kt9Vla6lMPlymlRWlK7KiiHpcYp9AOZLREXnsusc3aA0WNYx+3DrRGDnN9E3EBV5aoekKXFfrdDl0jrLYy8t3qltNZAkjWZfWxd3Om32M100wuw/aVixjvWdXQTk28dDFG+Cr3vuTN3eK4PwsXuEN44pSV0Zput8NSu8O13a0j0/F4YHM05KWXXqKsJzFdm0AwMSuFt4FyUvPKxSv3vC56g6qqmYwnKKXJMkPwjuAdtfdM6opxOWY8GeNcze7ugN1BQSYOb3PKqmRSVvj0cHHOxWIrdZySCt7jRKiDil7KoqDTbbHUhv52fORWwHhUsb2xRvvEaYpeH42PqUZ98hz7kCQfCpemSwMxLXRIWrq6rrHORVlHgDwr6LUz2kSS2DyuuxKn1lsaevX9Chk5PNTEmfleDif60J/AyiRmTdgiXnuTXKnRFTfSigGz7BO7wEhitoS8H2PexjswntSsXX+ekGfobk5bnUYrjVE7HKGQAhjD1mbMijB9AAtNMReUiV5OLYSwwq4Xro4VL6y3KP0a3WyXlh6TicUERzYxsKtYfzVn68oWLlS0T0cpxRZgK8FP4kDMZFBoyHUMbsxkJjvxiU9trMG11+CZz8DHPg4f/yRcvjIjxQC7FTz7IlQGXrgO7R7YHfC76SHk4KUL8NSLURJzYDFOX8FOBYMQ874dGpqDyexlSo5zMMtQnyXepWP2blQQW9BDwONpXVNHUgFdyM9A/0Ry2+sU0RnS/rO4bpoabu48mqp7SgANKgM6UeRd9mBo40CJNWZ+tQ6zBFchHl9WQJ2B1jnonIxEXBXTinI0Gt0j1QScoKNP019ept3v0erltFoGfMxtr0yGaI2k0tySKhuKT0RZFErrmCDNx5m82P01ep5Y+MMohTKKIAFVV0gZq5hqozFGx6JMmSHPM7TWOO+p6oqMDG0MJjN0Ol2WlpYYrZ4gVJZJPmFcTBjt7BIU1KMJk3LMwO6wwy6jB4wUN9ghycdGllZrSFEYbLdAhUCeGbrtDiv9Hq2NEeNbrXoohu7ph3n0sXewkmd0XU053KTYymm5CaeWMrpFhvUwLh2yVWJtLAT0IElO3gz3hRgf4aTjArcIHwLXrl+nVRSMhiMEIUPFaaojwqa1PP3Sy1y88lqcEgsh5dGP3gHr4Prw/o0tJ2XJcDSCpEFTKkYT1bamnER99Gg4otSK7e0dNgqNtyVlK6d2ltrZPQ3b+ZhOLYQYmOeci4EiRAlE0SrodoSeBFohErtybNleX2P53A6r/hxaJBHgAKl6ICkwMPg0xRhAuYCva8qypKqqVCEq5g3WWUa3nbEksB3iY3wVaDX1C3T0KR13YgyR2Pb6cP5hWClhdQeWb8CGjWW4N9I2Q2akuCB6VxrlaA0MQ5w9152YOcFUMJzA6PoOIX+a9rImzx8mb3Uw6qiVhpPoLRU7F3hnYnCYmoDRMUeaykCdYTM8wou752lffZTN3Q3OZjusmgEtdin8GNm1uG3LjVdPs7s+IuTbtKttdFWxu2VQrRzXA9+taRdb9AtH38QAup5OaX+TqmMygutX4dlnIyH+6MfhpVeiE3LP7xLghUs5lzdaZJ8cI6qmHkauW6goq90pI9e7OXysSjPOoDxkPWxy0s6q3aVFGSiWYfJo1I9wnWnyOXUSzCnITkbybPrx9wlN6q6UgDwzkOlZE5nmSW4O3vQbcwS9Kbbhk0eVdG5KR+mDSmUUJnl0u6s4M4BuRf1LlsdFZylKMo/yDDExgs2lQDzv0v6jnvdo0EfzMMv9U/SWl6OntltQtAzO1jApUXmOZAYxGpXFJQRBvKAkZlwxxtDEfTjnsEmTDClrkFJIk1RNQE+iXZRRmKDJsow8z8izHGNMzOZRVzhbEWjTMrGaabvVot9folw9iXKBcTFmVAwRDbWvGIwtO+UOa2GLddxteVvvJQJxoNv1sDwp6VcV3nlECUVe0O12WV5aom3W2axufuOJ7iOmjXclebfHQ489zkPnztHyjmy0S6vXpb3SpReEs6ttuq2cygrDsSP4beqtknV/55VV7wfuCzFekOL7jwBcuHqZ0e4uk6piZzykPCJSrIGe1iwZQ0vH4LHK1pTWYWOg9bTC2P3yFgP42lFNSowognVkJubRdC6mRWsq9+GgnlSU45IxAeoyTvcFAIkOnvgWJWAyE8tLo2IAfIj6apW3KLqKQju0jY/SURnY3thgd2ebqqzQ7fgAcM4TvI9FuVwMSEF8TFXkY4Uo76IOuq5qfFMko6whKLI8p6ug72CFGHaTmZiO1adp8iMWBNwTlMAk1S6A6KRbziG3M0Lc6Ikzom+txQHXLoCJzrXWaiTR6zdgfQCjy9vk3U8jXGVpuY+vXzviq7LACEIOQYM3s3PEpiz4GfTa0OpQZ102vebSKCNjlTwfkWVDKgZoP6IaWka7nqt14Kry1GHExnAbc22MLQW3BqFtUd2S1f5Fzi5d4WRrzKqGlQyW8jjLMBrB1ja8+Dw89RQ89Qy8eil6kQ9C7R3buxPYvdO73MF4Fy5vgFu+w30cgOkDSZh31kayrKG9DNXDUPbBPxIJZnYC2qsxTUfRSd7c5LmX+cC3kOQRSf7i0+ckwXdIDPlADzkxANDPsWhJ2gGdR09wnmo0apOWRISLPEY8plmjWKUmxBEKzOQcKhX5UOGIkg+0gIfp5mdZXl2hu7JEa6lLq9Miyw1SVjgvBGMgy5DcoIsMU2Rp8O9iGjcjmMzMlXi22DrVQ4v5KkGrRJCFEHKyLCczGeJjIGVhMkKrTatoYZROVfUqgndopQlZjhYhNzmddpfl5ROoAHm2m7zLFltP2N02OCzbDzApbuCIUoYw9Ez8Nj5At1XQLdqMy4qAoOUNXCKS0119hHavy3gypNUuWOn1aBmDntS4ukaFQKfVoivQaRW0C4PgqbQn03HQe9ycLgspxdsYG5Mhm5PhXufFIUID71g+wdmVFZb7XfLcEILD2orxZMhoPGI0HjMclmxN7n8nE5zHlTWlj2m4fBYLeniV4nGSlldSwHqoPZWUiIseWqWilxaiRk6UkOWadqegKLIYbGRjGJirM7xpoXsZWdsRBnFwMKphe2vA7vY24/EE1ariACL4lLItCjt9qrRHE7gSJKZxcw5ra6oqRnoHOyG4gMoMLQVdF72kPYlOpAkxIOpe16I/KlTAjSEs3YheS19D18CpXsyYsB7itU6InXVTWLiRljaOw4JIqk0b2u2YLGBzHV4Ctncgv7CN8juIVdQjf8SD/cCskEA6Q3Ex9DybxOocKwpOTWA5Q9oFdQY7ocWO19Sqj1M1YzWm8iXbmWKz0Fzr5lxZzRlMLPX2iHI8YaJqrC7R2YiiGPDIqfO88/xrnO+/xInsKicKx2oHWiba4foafPpp+MSn4IUXogf55miELHcKD2495nNbP8Qh9NRRm9zEDbENSe7QWQJTzPTdosDkkYgGiaTXNeGd8y0htSjVRJLN6YebY+k5z/BUfjHXI897kiVl1VaNfCBVRhGJs0iNGLyRX1ifjhtm5DgwE4w3cYDN66GzlzbwMG15iBPLJ+ivLtFd7dFa7pG3YoYEJ5pQe4JRYDJUlmGKgqKdR2IcAlorTKYQCYTg0+C/oi7LlNs99bchIBKzCpEH8rygyAuwHh9ijEfWMWRZjkERaks9LrHWkpucUDi01ojStIo2fnk1VjgVjQRPsBXUEwZrLbTo+xIUdieogEsBtkaOncubcawWhGFlGYzGhAN1SwCarHOGM+cfotUqGO5uoZXEKqLjMeVgB7e1QT6Z0FEGo1tAhrMhyRJHjEcVY3d/HV53ggUxfhvjqAhxAyOK8ydPcerk6jRgzdoSvKCMIcsz8tpRG0cujizcX5F+dMB5rK8I1qGcQXJDMAoVQkwbGyRld5I48+hD9MSEqJWG6JRRaQpPTKxKVxgdi5gosLak1gXkHXS3wHQmqEH8LSYeBtslu4NtyvGErBP1wi4QC32k8tOEGAktEqOgRNQex5NzjqqqEO9QXqGznNwI7TrQIzkX+6B7hlwHutrj6uM/lxOAGzV016ayW4puLEbRHkPHzZSiI6K2uKGdDT/IiLxhNIaNAQwVXBvBlZS940aAixvQMQGjHbu7Rz0L1rCauTy0DSGTEnQVIw1bY8gDbVVhvML6QOlg4jRjDQ5FGQo2dc5a1mK932L7dIvNHdipagaDEjsZQTkADMY46lNLsFky6GzRDWssZY7VHnSKWOp5Ywueew5efhkG20dqhHTtOzB+LVZxORTs7wXnhX7JE5m3IGsxnQIKxHvQ+UiWrZ2t27OfRFJD4zKL0qwZAU+Bf/NkXMLMy6uYia2nnyWyPpVO6NnfDbFtPNHez5aGEE911DJLUxfSdd2qzvSWsIxwjjZnOdE6yeryMv1+l06vQ6vbJssLRGlUFQiqisF0WY5pFRSdFu1ue+ph10owmYopMOtYGtq7WM2OYFBKEQgEFX9LEUEpTZ5lFHmOr2q8qglkqEwQhKossbZmOBxR13XMaOGhKHK0jlVNs6JF23tsXWOrimo0YpxFKUauMtQxEoUGYl/3Yh2oLm+yO7GI1gxrPxdzmbTskiGqhW71Of3Qo5w7d54iNwzbOeBY6nYptGFoLZPxhCwE2p0u7cyTZYCv0vNnwqgKDLh3FesOCwtifMc4PjfF/UIZPB9/5UXalwxKolYslsqIKc0IAZVm8VqiOS2K676+byNxozRG6VTe2cXKBSEgKchNELSKgW8xI4ShlQlFJjEjRcoG4Z1PUlCFeMtEB5Q4WqZDoXJ03kIVXbJ2D9VuI61tjMR8sBYYjjzj4Yi6nBCsQ4jeEFfVMZeyCCrlM/JAUILoeCvH4JKcurbYusmenCNZgTZCTqCdQ38Vlk53WD55Bi0KKvcGnoPjhW3gNTfLaVxMIneoiJ7gFrPMFNvMJDxN8Y+CmEXh4nV4bQdu2Ei2B6l3t8B1B8vrcaZ97QiL3kU0hKYhxynqjRCrsFkf86VVFcVkg1Mjx5J42sqD0uyqgsxliNI4DLXrQNbGrBhaSmgvKexA4QYVI7awdhNl18mqdcqNTa6Wm1yvL1JulYiNFfG6qfQzwGtXYHBYNmhGJzedvqiIRS8O0wc11+7nuOMUPhHNhkM33uSUmYY0ixM1yPNajOZtyucxnw2t8ezOH3P+uDL3RkL6fO47zXk3MohGtzyPZnvZLxGRZF9JJ+UP2PedQoA+GQ/T4SQ9vcRyr0O/26LbLugUBa28QJkMUl73AIhSZEVGu9Om1++z3O9EeZh1iAREYrGlqUO8mSnzMU9PkEjwnItaYwIorcmKHFvXuDpm6wkhUJYTBoNddnd3GY1G1FXNdm+Lrc1NOt0unU6bopWT5zmiDTov0HmBBcZVjUdot3r0yozxsfEbR1TASzZw5foOBdBWgtMd+t0zhFYfU3TJijZ5ntNuFZw+fYqzZ86S55p6pYdSgeVen1xHLdVY3aAoupxcbbGSeQo7wo4GKAFXe4Z2VlzpOGFBjG8bzRyUcH8y7B0vDL1jWL1+vNg8B3IiUWkrQ2FyTO2owv25jWKGAYVtIrWJEdHxOTj7W0msgqRFUh5jha0t3vqYKs1afKp4551ClEPhydoZ0mqhdUbIW5i8g847mLZQmEArZYaoKijHE6pygq9rREfvRG1rnIt6OKVjCi6XNHjBBJTE2kwmRWA3UdwigsnbFB1DZ1zRXYLeas7y6bOsnnuUdrtPbnKUeiuojCN1XCOS4opYK2EUIn8MxE6voQBl2kYxyzKriN7kl22UtI54PecYAmsVyBpsHvk8YUNgGt928hwHlTTHAaoA4xK9s4Fmg1CVVJVju6VReYsdnZOrAq0KxqrLSDp4I5gTnk47g7yFUhZd32BSXgd3A1XdYFzvUg9GjLeHjNeik1qpmHls9QSsLsH26BA9QquJCK5xgN+h+XUm7K1teBgIewmqEMX3DRMOfkaQp19J71XyCgfmvpwwT2ibz6dl59+EjDYfNVKM6bnJ7PgyR9pl3/HnZRX7pRJNomDmvnMoxDhHWKZDj54UtHNDK1fkRsiVkIkiE5Wy7MR8785ZCDH1ZKvVotvp0Ov1CD7ErD7eEoLF2hjzAZH0Ts82xAxL3vppfydJzmaMJstyXG6xFdi6Zrg75MqVK6zduMFwOKQsS1qpLHS316Xf79NfXmJpeZlWkVFXFZULTCrHcFxSu4AxBR0pkFAdO/dYIPZpI2DHB071lnjXuz+HlXOPYYoWxuQIAa2g1+2ystwjyzWEGmMU3VYHnGNw4zqgyAvD0uoqy8YhI2FUjXHOMy49O+Hw79R7gQUxvi0Is0TtlgUxvnM0jpdJWra9RaqjzInx5tDJCxxC9GpnxmCMpg7RC9wE4UkI1DamRXNW4jSg0mR59Mx4H3B1ybgukQoQjxJNpiyZihXybBUIQZMVHbr9jJVeRbUZn8UmAyUBV46pqxEq74A2MQglSPREK8HXjrIqY1CeUsmLLIQQvdeCgNJIULR6y5w+f4Jusc5qP6O3skRn9TTdE+foL52m21tNhSreGnDMyiCsERM4pHIJU1mlY9YOG7pZp88a3+wboQY2J/ei44+/IyGl9RITNa5FAe2cqEEawUbBaGB5+foYnXsoHCavKIylZWJ6vpYSvMrwWU62VFCsFuSmQ97q0ek7zGiN7eF1hpPrTOoNJIBRBRYh6Al4hzcB2wK9rOieNehVUKuejQ1HteVn2cruBD6PMzVqMgsUa2xAHzgD7XMxj95hYb7qnW/kDWHqTJ1KHqTpneYIMXMkdc+5Nm/nXLVBJe/u/hMI00wzMRvF3PenXmZ5PbGdJpiWOT7ctOhEfDXJk82MQDfp4polkLzIh1HgoyRwhR0mlOEk/ckqaseRtRVZ25C1c3SmqQOU1jHcHbG7M8SomClJaU2W57RaLYJzWK2xNdjaYbTCmCIVV9Ioo6MzQATnYtCx2BrnXRzkOxdTuGlNnuXYScV4OOTatWs8/+xzvPLKK2xsrzMsdxGlMNqQmYw8z1nqrXD69FlOnjzFUr9Hy2gGG1sMRyW7wzFbu0OGx5AU70cQQ3v1NI++6wkeeed7yPMCrU10BAWXbKcxRtCK6HTJMurxBG3yaQEsnAMVA9ArG9gaVlwdBq5wPFnSghjfFjIOrQzpAnuwX+l3P6C1niPGUZagtcG6OnokvItlSh3T6nPOanymp98VYhBcVZeUVYUPDqU1WufkuiaTGnE1rrQ4L2R5m95Sl9WTFcFGD2YoIFPgqxJbTshMloL7BJGoqWtSsdVVFTVyIXpKtFapbLSjST8lKNr9Vc4+8jj16jJtI3Q7Bd2VM7RXztA7+QgrJx4iLzr3+Re4eySVHClM6cBwL82s4t3+Trv53q0cp8mbfPRBoynfrc9jGhGTQ6cD7Q60upEYVUPYzaGusa7EioAKlHrC0FSgKkRKlKoQHZBc6D3U5/RjK6ysLNPJPFnmGGfbDPQm1m4RxrsEaVMVJyBvw5KKxswcui+0Ti+x9NAySxLoTmryjS0uX72BfW0DNuetMhW/spfdHXDH1wVIK7qk3ZhZkqcWyGlYPg+PPgK9pcMzr6hZ9KVK5ygwzXYwTTOj9hbBUOnapJFShAOVFJEUJ+Y6f8nTjBQy0x7P64CnX5fXrZp6jqcBd825z+mjmw2bkofezxp3c74hpKA8xeH1wJbAGhN2cWEHN5zgbli88qDjjFzpPOOyZjyZMB5X5Hms6KlEMCbmGQ5OI1KB17ha4oxeFgtDxYA7lYKdiSk1y0hUnfcYbdBeUD6glCIzGucsg8GAK69d5pnnnuaFq88wCbuEqSxnZmRBU7zcY7lzhodOPczZk2eQqsSOhmzu7LJWbbNzLCnfXohu0T91nkfe8U6efPJJ8izOHHpvcc7ibI2rK5QSitygszhrs8sAlKa2lsnEM94dUuQQypLtUcm17YqLLsoojiMWxPiWoYje4ihAX+CthyLPaLdbUb/mfcqLCUqiN8GYDK0NSohVz5JUIQSPoGMHnGfJcRNieh9no3QiKMrSIm5MqEtcOaKaWJQuaHeXWFoZ4m2FFQi5odcpUNQoX5MbhWnlWBtwLj5AhaizU0qjdcznGZ+r8UEpomIeZtEgms7SCq1WgdRjxJUYBZ2Vs2TdVVR7BYoVQnbviLFQkNGlYou7VaAJURe8IvHurEPM33kzwtoU+LibieOKmBPZci8irrMYKUk7Jg/Oc2i1I1lVeeQzdQbjHCYdqPvMbJry8sbaYTgJ0fNpYHM7MFiryJbGFC0FHsY7jnpL4zdVdLe385iSrL+KXulilAEceQbS6jDy3UjAVU2tCrz4VHQkxJQgtED1AD3zTCLJak0Y5BwxmUgMKut0QfdgvAWhBn0Wls7Dw+eRR86heofoMSbsJYrz6dYaT/CUnM6RfM+MQL9Onz/vGW7eJ4IssFdwnMh4YK+HufnsAK78OkzP7QByu0f+0Szz1+yPwDMRgDE1Ezb8Dba3X+HK7hmWrz/G0vIZSueiXjcoEIOIZTw6g0u5iX2IKSmttYQQcxhroyhaBcbopGqJudyds3jnKcvoLa5rG/trYoB0qC12UrGxscHFS5d4/oXneen6s4zD1gHn3LyzTNwWk8EW1wcvkV0oMGgyFM5X1JQcf38xrJ48z6PveAenz55laXlpqusGRQgKGwLWOVSALBhMiMVXRIRJVbO2NWB7vMXm1Ut0lENsxfZwzEujihscP21xgwUxvmUUadHEjvzekeO+MiiJlc5c8PjgqVPg1QKHAyFKKaI3VuFwVHWNdTVuWp40dcbEV+8dVeVRWFzmyUJAUGgVA+Dyoo3UcXK+qhxuMmLXlbhqjC0HUA3RVlBZm6zVodWzBG3QrQ55kePqEluNk95LYUPMaRwDTgJ1XccHSDMb6mlOMgYCOhd5iBJarQ7LJ06QKbDDHVxdojrLONVm4g2qBOPuRYsSWixzojhLv7/CKxvPUPo78ysoYj7mczmc7sFqP5YnvrYOk/qNPbl3ywPuNvHY7aEFnRVQ7XiBxsQ8tTrlz3A+uq8rDXWLKDlovLMqkdHkJ2+KRTjgmmDXAlaXjE3yVjqBup1KII+IldR6qKUVlldW6OY5voolzp3SbI5iv1RazfZQ43dN/H6eUpl1l6G7EotOOBPrnu/a6OEOa8ThxZB4ASoKwZ1O9bhz2OlAVUdP8ZmH0OfOsnzuNK1O7/DMu4cgNh7jObK7J0huzju8R9fLzGO8nyxPsz00coq0/3lH+tSbfJDW96CWeqvDujnSPd1Nk7fNz94f2ZRdAGocm4zcFqOty1zbOkEq8gx0EVml08kYDh7HWp+qhjpsVVOVJUoJWZZRtHLanXZM82Yd1ro4a1d7rI3EuK5rtKrQotAyI8bVeML169d5+aWXee7VZxi6rdu4Akvl7bHyDwtzFSrZk89mity0eM8T7+Zd73onp06epNVqMRoOmZRjnPcE75iUJZPRCK2SPUUwGIL3jMYT1gbbDAZXeQE/jYSwxLv5OGNBjG8ZE+JEd9PU7g0tFeD08gnaWZ5G0TGP46is2K1KSm9xeHzq2Y7/GPb+IAA/+gs/z3MXLlA7l0o7h5RtSfABdoYjtgYDBOi227QLE+WQjYRB65TCR/DBY22N842HVwhOwAe8s3hXI75CE6Olq6qmdgGkikFQaojJLpG3P02r20dnBbX1scNK+T2jvKMhys1VxAdms01IU6kmy2gVOUYJti7x1iKmQEyBNm0y00NUztf87q86UjsXdDnffpiTp07T6nW4sp1T3uGtVAAngTMtWO5GZUHtoAyHU2WpmV2///NDGrIiFnUIIRZ4UHpu2r0hvo1IpM3MnWnSugkz/3ZDilJJYRegssyuOCNatx2/Lz5KMcwIlVu08uAc3sNuCaNxyWg0ph6MoHLRw3yij+ouIZ0+0u4TpMC5LEZBbtewtQs7S1Avg4xAl7HudDtHrSzTPn0KlRcMBwN8WcPSaWT1PMXqCfJOF120OTRoFY/t85TlQeYyTTSkMUQ7NYG5N/UkN2hsPLcPR9Qwu0bfm753EwXEbFcHSCuA2e8lc17h/V7jeRlGIuR7tmmu4148zwIwwO9R5bcJQZiMJlSloy49w1FNUYypqwn1eEKeG2i3yEJTzEjhXUhkODAeOYYDy+5OxaQsYz5jAlpAp2w+k+GQi69e5rkXXuD67msc0SjggUAXOCVwpoB2rgnaMHKwNrHs1J46BIII7zz3EJ/95Lt5/NGHOX36BO1eL3npa8RanI3yvCAxw1KWFxRFEXNGI5S1ZVKPj7Ra7v3CghjfMpre6942AkFYXl6m2+7Mze4FauuYTCxlVVN7m1KF1ThXU7uKKgUhuDQtFZKH+cgcA28BXNvc5Nrm5i1tu717lCFXzcCrhq0Jx0updZPp3ISeXuL82fN0V3oE5RG5/dbY5IXJiFYajiPX2BnBxhBes4ej+20opiMOie9fjm2ZkTUFcRrfMS0PrSSWHM4zsBm4PG6DSVfQI4o/6rmlYqaQbkg0zKybEttlAEN8ZRnubiLO0DKaTGm8D1SVZ3d7B3t9IxrIdGDlFL1H3sWpc48guo0jp7SaSaUoJ2CXPG51Qtg+BaNNMCW0akxX0+q36C936a+sgNHc2N5mZ1QinVWy3klUscJIYrGcQ7NtpqGTQ9FUFJSZVzcQA4tskw+4kVuk7TSzILz9pZ6Vj4SzqXhXB6h8zME8SgOSKbFNv2uTAWPu9KZlqpsVzTokBQumc5npJPZtm/Y9x9WnspZAXCme+/NkSMkSvcLWisnYs7M9jg6gakxtJ3SKAh8EEY0QnRF17Skry3hUMRqWbG1WbG1UjEbjmN/YO4zEtKC2LBnt7vDSSxd55cZL2GNVnPj2sAK8S8PjJw2PPdpn9eRJiu5JLDkb6yM2t4aMJhV1EB5+3+fxxLsf56FzZ1g9dYKs3U/djFBVFVWKkbG2ot0qWF5eotNuU9VVDP720VHzVsSCGD/gUKJodzp0ul0CTKf6vSd5EFMVtODxDTGuJ5TVBFvWuNrirE1TT57a1lTBUoUwlWMsiPICdwOVEqDFLMkBzxuUPxPQmca6isl4hPW3N0HZTNdBfKRuE1P6mjpyhU0PO3d2Ga+DTcc7LeAzuFDdJ82cqEh6dTMUkFjYQWsQk1JseLAOfCvWxA6Np7iO66iZ0fuSvQmDmyuFGaFOiRSFmCN5YqmMUGpNpnKMzmIiA3F4vwt2CKqAfofi9GnOPPQwZ84+xrgUBiNL5aM2UWeC9DSS1dStAqolVMvRbnu6XUOvl1O0MnQrow6BTHKKjiPrnaDon8JJm9rnuHBYqQVLePWnYfOVZOs5otkQSpcq2zVaXNjrqFeJFDeyFQ/T8pjTKnoeah+nNOo6asX9HDF+HamdJ7fzHuO0fZMbmbnzvBkxnt4x6fybtHM+Zh6IRLx5EvyluzHmHcADW4xGG6zfWOPK5VV2dwe0OxmEOMPR73WxVrCVUI7B6AxbB6rKMRpNGA5LNjdLNtZLBoMRk9EQW5ZIsARvqSdjRoNtXnj1FXbd+j2+vnsHA5wBHulp3vnICd79xDnOnD9Pb/VhlOkzHJQMByXjsqJynt6jT7D62CP0Tp2g0+8R8hZVVVHXNuaCRlIqPU+73aK/tESR59gdl2JdYsD6WxELYvygQ5iW+4x9nEwLPBjR6NQJB/EQDCFk1NZQ1FnMp2s9wXqCCzjrsbWlqitsXVO5mto5ah9LDjsCLkT118K7vMCt4on+e7HOMypLttwm5RsQ44Hd4tLlSwRqxnaHsd29rWMF9ga7lcRAO0V0ejVCgcOAJZLsM204dRKuvwbDfcx4XwHfo4FKWSiyLthWJEomBlWCRMIV1IyQaQWuBDcB23gxGxfrXFDe1OdumKsXPLcuj6UYdxXkhtDJCJLhxeBFIZmQKWidUkzyLsr06aw8xNLyQ7S6K0xq4fragPXr29hK40MO0izEVILdHp2eptfTtFoKUyjGtmSwtsvY1tRaI+02vZWTrJx9BEebcaWx/lY1tm8GC1sfgq3D2t+bYT/xvRO80bnu3++t6JXvNzbZHX2az3ymxWQ8oNvt0Gm3aLUKWu2CEydWmAwDu11LqxijVYb3gnMwmdSMxyUbG2PW10esr2+ztbHB7s429WRMXY6w9ZhxucPV0Sscf/XrzTGNJDCaVtGh3Vqm01ml11+laK+wvCRpjBfV3erkw2TnzqD6PUQrbGjIrkqZlDStVociL+h0WnS6PQTSbHVJkWesLJ2gazQtEyirMYPxiHIq7Tu+WBDjBxzee66ur9FKU/dKYg5bghCaqTvVOBU8IjFhunWW4N1Uy9bElrjgsBLwRuFVTAOWeY9O2lUf/HSKxBHJ8mGSjQXeevjc938Ou6MRly5fYXdjh/INGkvFmNeGr+IpcdR7VPF7YpFuA0ep+HcABlZWoXcNhnMO7hYx+G/AEadskywKqItuyheqIDNplBBA6qjtzaJ3BwXUEot+xHJgxEcm7KXxjSBFsTecsPEYF9ELnbJThLpFbTPGVVQWiBaCCFk7g3yZvFih1z9Hq7VK6TTbGyPWL29QXVyDkEPRh7wDuY/nn8cywFkqA2xywSvPcFyzuTnGV2PoL6E7LVRnmdbKKRxt3EShDjUVyHFzAdzOuR6H6woErnJt7RfZHVwhy7oUeZd+d5WV5ROcPXea3YdKlvp9ijwnMwWIJniNtZ6qtKxvDrh+Y4crl29w+coldnavUbsdXNglMCYwJLBxvy/0SKGATCDPM/KsQ573KPI+rbxHt7tElrUwWY4oDcZQ905RLZ+gbrWplOBdvP8lBaFrY8iyDGM07XaLVrtDXZXUlaWclLSKgofOnaVfnOeh08vYyYjLr73GlevXWd/dZVjXydF2/LAgxg84PIGXrly836exwAI3xbmHH2ZjY51LVy9ThTeQUQAQqPaVxFDM8goLMzXsgyDzCcDWGDZugJ8jYwVRz7csMdjvaImxgawVSaW3TPPXOg++gtpCWcG4BFuSSnxFT7IFQlPsOmOWVWc+Vr15nSfMBVPCWAJbGnRGOYGSOpJx7dNuc8hajPWEweYa+CFhrAlDCBtD2J5A7wS0T0KrFbNqKMBbXOnZHQfQHl3GQJ/dkcVXkvTSGc4aBiMw2zWOLHqM3WEUo1jgwYHHhysMJtdgEutQXl3votVJll55jEceeicnT52k2+lSFG20LtAqR8Tgg7C+vsGVK1e4cOl5BuNPA9dIdS85vknDbh0CLAPLmaLT6ZHlHYQCbxW2rLBViclbSJ6jshzJClyrjZiCIAYfkic5xSSJjqlHM63J8oyiyFE6psmzNsY09Xo9Hn7kYc6fWuHJdz2CchXXX3uN61evsHZjjbWNdQbDXQajMZujEcPa3YPUloeDBTFeYIEF7gp5kYMSRvUu9g5C1NrESOpmEr/x391OUsQmZKyJ0w/MyPWeAP/bPjvYquHijagqIB2nTfK1GpCjnlJpKt1leUxl1sAn8ltamFQwmURS7GpwNulim2LXOZHhNmVN5kugzPvck1YW0veSRnYIIDHVWm2jTENbyDx029AzhDDGjcawG6LmpPYx/RsZZCuQt1J+YqJ+tCqhKqlHlm1qlFYEBFfZmNqNlCmi0owGHmdKHApbG8KhVGlb4MFD0xYtgQnWb7AxeI2dFy7RuXSabmeZVquLMS2yrI0xLbTK2Nze4MbaawzHTwMXeTuQ4XkUwAlguZ3R6XTJsjaEPI6PJxVVMcF0ehitUFksAU/WwusMLwrvJRaw8j5WIFQKoxVZltHKc7Iszi6HANbGCrDdTpfi/HmeeOfDfM5nv5eWgs1HHmbj+lW2NtbY2txgdzhgd7DLtRs3uHx1jWevrTGoH3x6vCDGCyywwF3he//jP2dSjxmWO9zJxNkwLXeDN5JT3C1nnQAX5/pyR9Q1bxFLTR/5Izj4KJVQMdp+mh2htjHNWpoCRScSrAVClgKsUkq2WsCX6Woa7/G8h7gZjswPLZrFxX1NSPvy0QimjsfWRSTu3sCug4GP2RcIxEd2B7J2JPZ5BoUGatAVVFXkvwQQhVI5khuCa6rMFVArwk7FpN6Bepxqggjw3qO2/AL3HXFUZt0z7AyfY2fYDNympQpjKsxpQfcHn3QdNoSYubyvIG8Z0BoHVD5gvcel8s742H94QLQmaJ2qB84GmbF6qo6JHpUiywwmSSqyLKPKMkLw1HVF8BajoMgM3XZBv1VgvKWVZywt9ThxYpXxeMh4POKRRx7mia0dWp/4NL/wwstU/n7PBb4xFsR4gQUWuCus7V6536dw33BP/FLeRZlEyMHaVNrXR6+wTRkTlEqShuTlVSoSZVvDZJzIdcaMFMfqVgd7jJvcH026/uS7dyljhdPxXEIdH7a2gKoDXsWsC67RK6dHdrYMnSUwGeQG6eaINnhdgq7BCBiFyXOyrEvloKaIxF8UVBIlIoMd2PWwbRPxXuDthYOHvw82xTp6dImyrk4u6NzgVKAOntJbKu9mcRzegY9xHUFpgkrEWAniYm5ipWJOfq1UlFFoHT3HRtNptahaJRCoypK6KjG+AlshrsaYFp1+H6U1RSun2+9SVxW2LgnWEawlzwuubGzx4tqDrfdeEOMFFlhggQcaPqVjSynZnJt5jl3y5pJ0x6Rcx1onLa9Ecqwaock88W1kGc3fYW6beWFK2ib4pHVpCleQcvdm0Vvs1ZxUudln1B9j8pRdRxCj0Rp8pqPnV0l8OCuNMgYtitqEuD9CKorhokxk18JOCeX9L7uywAL3G8KsJq/RoFJA7DTDVKrUOstlHQcWQeJ2Mc2mTJMGNhmvtFJonUhz87cxGK2RAN46vHOE4GKQf/AxU1ZmKFotQrCIBPIsw7kCI0IGnD1zhhP9Pi+vbTzQYhcJr6vxvsACCyywwAILLLDAAm8/LCIYFlhggQUWWGCBBRZYgAUxXmCBBRZYYIEFFlhgAWBBjBdYYIEFFlhggQUWWABYEOMFFlhggQUWWGCBBRYAFsR4gQUWWGCBBRZYYIEFgCMixiLyQRF5y6a7EJF3isj3i8iWiAxF5MdF5IsPcf9vdfv9ZRH5ERFZF5EgIh94g22/UUSeEZFSRJ4VkT98h8dc2DRu9w0i8u9E5ELa7rvv8rhve7uKyHkR+Ssi8pHUJ9wQkR8TkV9zh8d829s0bffPRORpEdkRkV0R+YSI/DER0QdtfwvHXdj19d/5VSLi0/a3nb51YdPpdh9Kn+9f/vgdHndh19m2qyLyN0Xk1cQDLt3tc+vNcFQe438CfOkR7fu+QkROAj8NfA7wPwFflz76cRH5rEM6zFvWfgl/jFhV9/95o41E5BuB7wL+HfA1wPcBf19E/sgdHHNh04jfBzwB/Fdg5xCOu7ArfBHwu4F/D/xO4APEOnEfEpGvvYNjLmwa0Qb+DtGmvw34UeBvAX/jDo+7sOscRCQj9q/X7uKYC5vO8EmiLeaXf3OHx13YlUiKiXzrK4E/D3wV8KeAwVGe3CKP8W1CRP488EHgvSGEF9O6LvAS8BMhhN91H0/vWEBEVAjBi8i7geeBPxhC+O592xjgMvDDIYRvmFv/fwK/CTgfQqjv4Wk/0LgVm85vl95fAn40hPCBe3qyxwi32FZXgN0Qgp1bZ4CngGshhDvyHL9Vcatt9Sbf/dfA14YQ+kd5jscRt2tXEflm4OuJA7pvBrL5NrzAbfWrHwJMCOG/uceneCxxG3b9h8B/D3xuCOEwHDm3hHsmpUju8m8XkW9K07gjEfkhETmTln8rItsiclFE/vS+754Wke8SkefS9y6KyL8SkYcPOPbvkTj1PhGRT4nIb0rTHB86YJ//UEReS+75Z0TkD93C5f1K4PmGFAOEEIbATwFfeyfTUQdcw1vZfjTE7E3wpcBp4Hv3rf8XwEngtjqghU1vb7tbxcKuEELY2k8o0t8fB1533m+GhU3fEOvEGtW3jYVd9xznCaIH7o8Cd+xgWNj0aLCw69Th+AeAf3IvSTHc+5LQvx/4NPFmPAv8TeB7gD7ww8A/Ik6b/VUR+VQI4T+l750gTk3+WeAG8BDwTcDPiMj7QggTABH5KuBfAv8B+JNEYvU3gRbwXHMSIrJEdM+3id7fl4GvBv6BiBQhhL/zBtfggOqA9WXa3xPAs7doj9vFW8F+t4rPTq+f3rf+qfT6fuDHD+E4byeb3ku8re0qIjlxcPfJQ9zt286mIiLE+tQ94NcC3wD8tcPaf8Lbzq7APwS+L4TwkyLyFYe43wZvR5t+oYhsAx3gaeBvhRD+6SHuH95edv2itP9rIvL9wG8g8q8fBf5ECOHlQzjGwQghHPpCNFTYty4QDWvm1v2NtP7Pz60zwHXgn73B/jXwaPrub51b/7PERiNz674obfehuXXfQmwkT+7b7z8G1ubP8YBj/zVgBJycW6eI0wEB+NKF/W5uv33bvzvt+wMHfPbN6bPWvvUmrf+WhU1vz6YHbHsJ+O5FWz1cu6bt/zLggf92YdM7tynwtWmbkOz5lxdt9e7sSowz2ADOzNvlVve/sOmBn38r8I3AlwG/mRgXs+d6F3a9PbsS47cCMRbm/yLqi78euJCW/p3Y9laWe52u7b+GvVOOz6TX/9KsSJ+/QPzRphCRPyIxKnmXOJX2avrovelzDXwx8O9Csmra3y8RRzPz+Brg54GXRcQ0SzqPk0Rv5M3wD4lE+HtE5AkROQ/8beCd6fOjnHp5K9jvQcPCpkeDt61dReTrgT8DfFsI4acOcddvR5v+FPAlxOCbvwr8KRH5jkPcP7yN7CoiJ4hk6ptDCNfvdn9vgLeNTdOx/0II4R+HEH4ihPDvQwi/HfhB4M+JSO8wjpHwdrJrw09fAr4uhPBfQwj/CvhdwGPEAd6R4F5LKTb3/V29wfpW84eI/DEi+fwbwP+atlfAh+e2OwVkxJHSfuyPuj1DHKncTFt18mYXEEJ4SUR+L/D3iI0P4KPAdxKjJa/c7LuHgGNvv9tAc02r7LXpifS6cQjHmD9Og7eyTe8l3pZ2FZHfCHw38E9DCH/xMPfN29CmIYRt4CPpzx8TkQr4FhH5+yGE1w7pMG8nu347sT/9txKDRmF2rssiMgkxZuZu8Xay6c3wr4HfAnwu8HOHtM+3k13X0+uP7SPqPy8iO8AXHsIxDsS9JsZ3iq8jGuebmhUi8s5926wRf6QzB3z/LLPREUSDXwf+l5sc7w01wiGEfyciPwi8B6hCCC+KyD8ALoYQXn2j794nPFD2u0U0WuLPZi8xbkainzmEY9wNjqNNjwOOrV1F5NcSUwr+ADGV44OCY2vTA/AR4gP9ncBhEeM7xXG06/uBz2NGOuaxRsxQ8VsO4Th3iuNo0zdDePNNjhzH0a5PvcnnRzY7f1yIcYfX51z9g/N/hBCciHwE+O0i8sFmhCEiX0TsROd/1P9MzKP36p1OJ4UQHFFgj4g8RMxj+tfvZF/3AA+c/W4BP0e8UX8vUWzfoNHH/cwRHfdWcRxtehxwLO0qIl9KJBU/Bvy+cJ+i2W+CY2nTm+DLiETjpXt83INwHO36x4GVfes+QAxq/EruLqfxYeA42vRm+L3AGPjUPT7uQTh2dg0hXErn81UiInPn86XAEvCLR3FcOD7E+D8Df1pi3sVfAL4C+B0HbPcXgR8BfkBE/hFxauCDwFX2ji6+k0hkf0pEvpM4uukC7yMGy/zmm52IxKTofw34CWJD+2xipOdTwP9x55d4pHhg7AcgIl9GjHY9l1Z9cdI9EUL4/vRai8i3EAt6vEYkx18B/A/AHwshHJQZ5F7i2Nk0bfd+Zl73NvAOEWnO+ydCCDfe/NKPFMfOriLyPuCHiAO5vw58kYhM9xFC+PAtXvtR4Tja9DcQH9z/kfhA7hPzmf4h4LtCCJdv4/qPCsfOriGEjx/wvS9Pb38i3P88xsfOpiLy3xJjCv5v4BVgmTjQ+E3Anzkkacrd4tjZNeHPEHXL3y8i/yR95zuI2up/dUtXficIRxDRx80jKr9937oPpPXv3rf+Q8BPz/3dBv4BMc3IgFgt5Z3pux/c992vJ/5IJZGs/lbgY8AP7NtulfjjvkzU41wnBnr88Te5NpOOfy0d40WibquzsN+b22/u/MJBywHb/k/ESNySmPnjjy5seuc2ZRaBftDy5Qu73r5d567tltr0wqa3ZNP3EYnGxXQu14jpoX4voG7Xpgu7vrldONysFG8bmxK1tj9MlPaUwC4xu8PvuZN2urDr67b974ne4QlRwvE9wNk7te2tLG/5ynci8ggxSO47Qgjfdr/P57hhYb/Dx8KmR4OFXQ8fC5seDRZ2PXwsbHo0eDva9S1FjEWkTYy6/FHitOa7gP+NKBz/7BDCUWaMOPZY2O/wsbDp0WBh18PHwqZHg4VdDx8Lmx4NFnaNOC4a41uFI2pW/i4xXUhTqvl3vl1+0LvEwn6Hj4VNjwYLux4+FjY9GizsevhY2PRosLArbzGP8QILLLDAAgsssMACC9wp7nXluwUWWGCBBRZYYIEFFngg8bYjxiLyQRH5ivt9HkcNEfktIvIn79OxPygiIZWIfEvgrXhNDwoWbfX+Q0S+PNnhK29h2yAiH7wHp/XAQES+W0ReOeR9Pp5s+YHD3O/bASLyIRH50P0+j+OGW+nv5vqCL7/b49zp9+833nbEmJin7y1PjInVi+4L2VhggdvEb2HRVo8TvhT4J/f7JO4xvo2YtmqBBd7q+CjxHv/o/T6R+4W3tZdkARCRIoRQ3u/zeLtj8Tu8ORY2ejAQ7n/BknuOEMKLb7ZNKv5kwyJwZ4FjjBDCDvCm9/hbuT8+Vh5jEfl8EfkBEVkXkbGIPCsifzZ99utE5D+JyBURGYnIp0Xkm0REz32/6bD+XJoqeEtOCYrIdxMr7zw8d52vzE2R/DYR+ccicoNUAvRmU4UHTVmJyGkR+fsiclFEyvT6L0SkeINz+hoR2RWRvysix6rd7cM7ReSH0rVcEJG/MH89IvLe1Ea3Uhv9sIh8zfwO5qazPkdE/kuq+PNv02dfLSI/KyLb6RjPishf2Pf9zxeR/yAim+kYPyOx+tKxw6Kt3juIyHtS27wuIhMReVVEvm/ftGonXfdaWr5XRFb27WdPvznXnj9XRH489b9XRORbH3T7ici7U3t4Od1LL4nIPxCR1X3b7WlzMpNB/FER+WsicplYEGFFRD6QPvs1IvKDqS2ti8jfk5gO643O50tE5PtF5JLMnnF/ef/3Ulv/aRH5ShH5qMyeea/zah/3/kJEvk5Enkn371M3ucY37XfTdr8n7WsiIp8Skd90UL/xNsBn3exelQOkFHPt7TeKyMdEpAT+aPrsC0Xkp5JNX5NYsVYOOuhxwbHxGIvILydWS3kB+BPAJeBJ4PPSJu8Cfgz4O8QKKV9MrB5zmlhWEOL0wM8B3w18V1p36ajP/T7g24jX/SXEspQQO+3l9P7vECv1/H6gdTs7Tg+MnwVOECv+fRI4A/xmIE/H2f+dP0Ccev3WEMK33+a1PGj4AeCfESv+/EbgLxGrc/0zEXmIWJ1rAPzPwDbw/wF+SES+NoTww/v29e+Bfwr874AXkXcB/wH4fuBbidWEniS2bQBE5JcR0+d8DPhGYAT8YeBHReRXhRB+6Sgu+gixaKv3Dj8EbAJ/hJij9GHg17PXQfK3iFWxvh54L/DXiCmcvuEW9v+DwP8J/BXgq4FvIZaR/eBhnPwR4SHi/fvHibZ5F/DNwH8iPi/eDH+OWJXrDwGa+Oxp8L3EAe/fB3458BeIZXM/8Ab7ewz4OPEZNQA+O33vXcDX7dv2CeLv9VeIv+c3Ad8nIu8LIbwAx7+/kKh5/1fEtvtNxL7ibwEZsTobt9rvishXAf+S2Mf+ybSvv0nsV567Zxf1YOAHuf179T3A3yb22S8BGyJyCvh/iSWjv4HYp/6vxHZ8fHGUZfUOcwF+ktiBvWnpZeJoxRA7rU3mSohyQFnFt+JC7Fgv7Vv35en6f+Am279ywPoPAR+a+/tbiQ/KL3yDY38wHccQk4PXwP94v21yl/ZsrukP7lv/KeBH0vv/H2CZK89JfFg+C3z0gH39L/v29TvS+qU3OI8fA54G8n3HeBr4wfttpzu07aKtHr2NT6Xr/E03+byx9z/ft/7vEsmezK3bU0Z2zoZ/Zt93/zGRrKzc7+u/DTsZ4L9J1/OFc+v3tDng8bTNR+dtkz77QPrsH+5b/+dSe3zPvn184Cbn0jzHfh+RtJyc++xDqa0+ObfuTNr/N8+tO9b9BfAzwGfY+wz/lcluH0p/32q/+7PAp/e15S+a39dbfbmVe3WuL/jyfe3NA1+w73vfQXTgPDq3rkscqIX7fb13ujzQ01wNRKQD/GrgX4YQRjfZ5ryIfJeIXCD+UDXRS7RC7DAWmOEH7uK7vw74xRDCx25h2+8kelR/RwjhrRKs80P7/v40s9HxrwE+HJK3BiCE4IB/DXyBiCzt++7+3+HjxHb7b0Tkd4jInnabplO/DPg+oofZpGlwIVYq+jV3fFUPLhZt9XCwTvTy/FUR+UYRefIm2+1v358CCmLlqzfDv933978BesDn3M6J3kuISC4i35ym18fE+++n0sfvvYVd/GBIbOAAHGQPRfQe3+x8lkTkfxeRF4netxr4F8R7fP9v9nwI4fnmjxDCdeA6qT867v2FRBnklwDfH0LwzfoQNe6vzG36pv1u2tcXA/9u/vcK0WP+8pFeyIOJO7lXXwkhfHzfui8l2v5isyKEMAT+42Gc5P3CsSDGwCrxXA+UPSRtzH8AvpZIhr+CeEN9R9rktqZg3wa4mwo2J7l1+cnvIRLHH72L4z1o2Nj3d8msfZ3gYNteJT6MVvet37Nt6ti/mtjW/wVwNWnlvmxu/5o47VXvW/5nYPVB13TeARZt9RCQyMBXAR8hTp8+l/S0f2Tfpge1b7i1PvTaTf5++HbO9R7jrxC9aN8L/AYiaf1t6bNbueY3ap93Yo9/RpQ6/G3i7/UlRFnAQeez/7eC1/dHx7m/OEWUTOy3I/vW3Uq/2+zr+pvs6+2CO2mbB9n4/AH7Omj/xwrHRWO8SXTj3+xHe4I4Gvz9IYTvbVaKyG+8B+d2HHGQh2NC1F3ux0mit6lBo028Ffxa4EeAHxaRXx9C2L2tszx+2CCW09yPc0Sbb+5b/7rfIYTw48CPSwwO+9VEOcAPicjjwBbxPvh7wPccdALznpW3CBZt9ZAQQngJ+AMiIsDnE8nR35cYVDY+hEOcJXql5/8GeO0Q9n1U+Drge8KcnlxEerfx/TfKQHEWeGrf33ATe4hIi6h//2AI4W/Nrf/c2zifeWxxvPuLNSKJP2i24ixwIb2/lX53mPZ10OzxWeDVuz3ZY4Y3uldvxgsPautXuPnvc2zxII8Wp0jyiZ8Gft9Nono76bVuVkhMnfN7D9i2At4wMvgtgpLbu84LwFkROd2sEJEneP104o8Av1xEPv8W9vkUUa/0JJFw3M4D5zjiJ4BfmUgsMJ0O/N3Ax0JMg3NLCCGUIYT/lxj81AXemaaofopIaj4aQvjI/uUwL+YeYtFW7yFCxMeZ5Y4+LKnD79r399cBu0Q5xoOKDnPPjYQ/eEj7PsgeHvj5m2xfED28+8/nA3dy8OPeXyQ5xC8Cv0P2Zv75FUR9doM37XfTvj4C/PY0MGy2+yLgnUd5HQ8oDute/Tmi7R9tVohIlxiYfmxxXDzGAH+KeAP8nIj8H8Qp0ncBX0CMVr0AfIeIOGLH8idusp/PAL9BRP4zcSR5OYRw+YjP/X7gM8CJNFX6EfZGSx+E7yNGm36viPwN4tTTnyWO2ufxncSI9R8VkW8n3kiniJ6OPxxCGMxvHEJ4OqV9+XHgv4jI1+zf5i2E7yQ+xP6riPxFYIeY0uY9xGnaN4SI/GGiXu4/EQNNm9/gMnGaHyKZ+UmiLf8pccR+CvhlgA4h/Jn9+z0GWLTVI4aIfB4xmv//Imb20cS2aolR5f1DOMw3JgLzi0RJ0P9I9H5uH8K+jwr/GfgGEfkU0S6/DfhVh7TvXy8if500QCMWl/qeeV3wPEII2yLyYeCbROQKsT3/D9ydFOW49xd/kWi/HxSR7yJmkvhLRJlEg1vtd5t9/YCI/COiHT6Y9vUge86PAje9V+fGDbeC7yTa+kckpnBsslIcxgzU/cP9jv67nQX4QqKoe4to+GeAP50++wKiV3lEJM3fSvyxA/D43D5+NfBLxIfvnujqt9JC9DL+ayL5D8RghS9P77/yJt/5LUQCNgY+QQxe+hD7InaJ01H/iNjJVkQS98+BIn3+wXQcM/edJ9Pv8nO8QdaFB3U56JrS+u9mb7T6e4mpcLZTG/sw8DW3uK8vJaZwu0jsYK4QSeB79233WcRgietpu0tEjf2vv992WrTVB3NJdvjnxLRUI+L0808AX50+P9DezDIsPD637mZZKT6HOKgYE8nGtzGXTeBBXIjk6N+ktrdJTOf1JezLFnHAff542uZ1GUzmbPZr0v28m+z994D2Afv4wL51P0zMEHCdmBXkN3BwloCfPuDYrwDfvW/dse4viPr/Z9O5P0WsQLjnXucW+t203dcfsK+PcUD2m7ficiv3KjfPSvG69pY+a1ICTohSjG8hDl7C/b7eO10kXdgCCyywwAIL3DaSp+gvAlkIwd7n07nvEJEPEIPongxzmRIWePAgIo8QZwq+I4Twbff7fBZ4MHCcpBQLLLDAAgsssMACt40Un/Q3iJln1ohSzP+NOIPyVkzRuMAdYkGMF1hggQUWWGCBtzocMVPF3yVmsGmCE39nCOFu0kIu8BbDQkqxwAILLLDAAgsssMACHJN0bQsssMACCyywwAILLHDUWBDjBRZYYIEFFlhggQUWYEGMF1hggQUWWGCBBRZYAFgQ4wUWWGCBBRZYYIEFFgAOKSuFiCwi+N4AIYTbKiUDC5u+Ge7EpjCzq+kW/Oqv+2oeOn+epe4yy90VljortFtt8tygjUGMBi04V+OcxdcVoa4JriY4x2C0zYVrF7ixcY22VqwUBY+dfYh3PfwOTqyeoN1pk+U5CgU+4CdD3HgXO96iHm3h6xKCB1GgM1CGEFQsSO8DEgLKx0V8QLwnAF5pnGis87ja09nQrFwq6F/MyF8Bt265sLLBS6c3eeXsiAunS67JDhvbm2zubrE53GE0GOHWqpi0aHh3dl201TfGnbbVZz/20QAgIigRNIIRhSiF0oINgco7KuewwREkkJucIiswImhARKGUIKIQJShRhPSPZpG4nQgopabvkdgWp/HZ6dUHP33fQIXZdiEEvPd457DeYZ2lrCom5ZjB5habV6/x6nMv8NGf+zCf/MQnuLCxwY73fMG7HuW3/s7fxhf88l/OuUfeQX95BWcdzllCgPX1Df7En/qzfORjH7ljuy7a6hvjTmz6u//CPwpCbDtaKURplCiUUiit0M17peL66d+CEj3X5iS2dRXbatMOm/UgoOLppb9u9aLwByUZCOD9rKhD3CYQ8ATvCcERvI9tOQSCD4QQ3zft23uP9y7ux/vZ9mkB+Bff/ofu6lm1wMG40371VrBI17bA2xKtTpv3PfI458+dpdvu0m336Lb65HmOyQzaaNCxY/YhdoLO1riqxNU11lbkBurqDG2lyLF0jGKpyMnFo1xJqMG5Cus8vqpx5Qg7HuAmQ1w5RCRgshxlCsRkIBprLd5WBOdQiQgjGlEaRBNCwHpH7SvsZIwdjQgD0HWBywqylRyMIixD/2SPs6stwoqlp/qcbK2wvbTLzniXtZ1NLspFqmoca0C97csyPLiInEBAhACIFoJWVHXF9nCXndEuu5Mx1jlOrKxyevUE7bxIhCR+b0osAJmnFGnf0zUhHW/vx4AQhFQVShKxTqcVAPHxPRKJg7NUtmQ0HjMcDllfu8GNa1e4cuECrz33Ii8+/RyfefFlLo3HlOlYz792jU9+4imWT5+lt3KSpZWVSJCCQkLgNkvVLnCPMG1ZIoSmLUlqG0yb7ozgKhUJrjSvMv1MZhvHLyuZHSGR4ukB5l4gTMd5rz9BQSGzAd50YDj//fg2hHR/iAAqXYcgwYOEuI/gZxdIQ+A9CIT957+gtscSC2K8wNsS/XaHd509z7mzZ2gVBa2iTZG3MSZDG43SClEAkQQEH7B1ha0yqqqiqg1GAqFepaMF7StysfTbBZkSlHdQlTignpSRxCZCHOoSb0tMlmPyHJ21EW0ICNQ1rq7wtkKcRSsFpoUoBUrhg1DVJeVkSD3Yot7Zot71hLpFpVuYfhfVbuP7Bd2VNieXBN0P9IzjRLdmUE8YTsac6m5QVSWXBhcJ235BjB9QNKR0ymslEQSjqUrH1u6Aa+trrG9vUtoa6xzdTodMa7IsT6RDzZ7jzX4RZoxmtvYAzhwf9kGmD/kgiYQk0oD45C6OdMM6y2i8y2Cww8bGJhvr61x65WUuvPQ8rzz9LC8/+xIXNnfZCmEPb9gqKz7xqac5+8hjPPKOd3Lm3Hmg8XL7Kbla4MFCQ4ohNhP2kWPmyeIeUqxg6ilmjhQza+fN3/NEWOaO3Pwd5lb5vecXprMi0xWRR08HdUzbM9NBX9qxqDToU0xT2wpxoEZ0nITQHCXtcO5UF7z4eGJBjI8his4q3aVTBH/QbRfX3TQ9dXMTB08IPo1+HQAiBhGTOgOVepLZdOv89/cdbh/2dWa3gT3nfbOLOITexiihnys6GeTakymLkQojAR0E8WraaTZTZ+IdeIsEiwqeTIRunmM6HSQYDI520SbLstiR+oCrK8rhDuVwFz8Z4e0EQojqCZ2jig661Y1Tedbi6ppqtIuvJgRXoXWGKTwmB3ROCFCOBwy3bjDZWqPaXkOPPRPXoeW75NkSWbaEzpcQrchVRkcZKDKyIqfwBb28RaaEq5ur3OhdY9KaxNpPCzxgkJnHqvEYxzlrRCvKumZ9c4NLl1/j2voNyrqiZTJOrazQzgztLIse47Sr/buese20qvEME4lzJDlqL4n27PEYQ4jcRYFzjqqq2Nre4tULF7j4ygUuvvoql165wGsXXuHypcvcWB+wVTkmB1ytB56/coXVX/gojz/xJKfPnafX7ZEXRbp+tSAaDyqEJH+YDeIE2UN6mzY8Wz/vIZ4N3qbfSft9U1IMUzIugddFTk3b63QgNj/jkQZ8itgAJSRyPDuu0Pzd3BlCED3H98P0WuLs/mwgIIs6EccSC2J8zKBNh9/6+/+/fPlXfjXeWbyfn/tshsJEvRN7eez0URY8zlmcq/E2ei8FIcs6mLyD6Bx0nqaG4hSSxPFwvNF92DMaDvOaw+kDLE5Fhdskx/F8k+4rvX/d0/AwiLFAYTyFWLQEdAAVAuJqCII4mfZvUTcW8NbhrSdYD96i8bSNJm8VgEbhKVotTFaAkii/qCrK0ZDJzha+nhBsjTIGnbdA56i8jcrb+Lom+BpblVTDXVw5wrsSbXIyDz4IKo/TgZPhDrsbVxlvXKPcWkNqz9h0KPQSLV3SMo52pimkQCtDS2kwCm0UOZ4qyxA8J3tL9DrtSIwX7o0HDg2pnZKHxoOlouettjVb21tcu36Ny9euMqlLTq2sMNx9iLrXg45HKdmrE5a9r2r2ZvpBIHnT0j+UII3HWMLUKzblLBJABeqqZDDc4fLVSzz11Cf55Mc+xvOfeZaXL1xla1hS+sA+Z97rUHnPMy+9xEsvvMyT73sfxmTk7Xaa497n9l7gwYBSM+469zpPiKfLHrI871Ge3/aAn3nvGO51f8/vl32bNg6k6fOEhuyG6XFJ6ojmeRm7w2arOZeyAEHF9RKmxDg6l2ZajoZST+UfCxwrvCWIsTY5AN77KUkTEbTJ0KaYG/E1M35hKkmaEsm5/YW9/83I2p7tw9wOm62aCJSjYxjK5LzziSf5nM/7vEi8fLqjZ2c6I5Zz1zU9ex8/976OxNhVBBeJsTGdOK2vIjEO88S4IccpKKxBmLNTRPJsoRJBVtNTC7dil7RdCGmEv/8rB3rJ7wDBIvUIbAvEEILFhwpRsdPzc3NznoDz4Bw4F3DW450n2OQ9BpQyGC2YrEAZg4SA83XUF6ftbVXjbInyHqMzjPPUziF1TT0ZUQ0HTIZDqtGYuhoRbInSFc4G6qpGsjE+BAaba+ysr1Fub1ENxygfsMbhMvBFhlct0CUhc6Ci1VUQdAAtcbiixVMooW00YiAomomDBR4gSOMJa/6FuCiETGvarRa9dpte0cKEQKF0Go76RGD97B6EvR62sJ9tRMw8xs2DXU09cuBRIohqPHYe52rqcsLVa1d44cUX+fSnPs1HPvzzfPqTz3BjY8jY3d49uzses3Ztjd2dXVZPnEyDgUBojrnAA4d5726cbNzrLd7j9Z2+mbqWmc5gyOxpMiPS81+fOSzmtfL73809oeLAbu4x2UglZP8zSaYOYyAOGn2jtxCJz8Dpd/aShuk1znnMw8LZcGxxrImxaM0T7/tcvuy/+2pCEK5du87OzoC6Kun1+rz3ve/jHY8/jjE5BMF5j7MB50IkKs5jbaB2gdqD9YJzMVLVuYBzcXvrPHXz6hzWBqz1OOupa49zLi0Wa2ustTgbo7Hf3Edy21eN1hqtdew4/Ov3H+Y9rsyR1wBBRbKpPGgFQQshRL+RUjlKZ5EoKpUCCRQonzqF+HqzS5r1eYrQyDFoIttnU1m3WoZ8r1QkzDxWh4Dganw5wFcFEgyiDc6l2yFJJRo/uUfwqBk59gGf2lCwDoJHlEGbPMojtAEXI5W981PvufeR4KoAwVhMXVFVJd57yt0B5WCLyWBAOR5jqwneliBCXVYoMySYHOcD2+s32N5Ypx4N8TZ6u72rIYzBjIESTI3PHcpEe3mXBjlYgq0ItkJ7T0sUyoDTLIjxAwal5h628/9CfNDnJmOl3+f06gnseMJkVLDcalNojVKABLywz9uaaHazzxBJc/Owb7zFTUBUQCGiYfpNEBX2bD+pS3Z2NnnllZf42Z/9GX72Zz7M05+5wGBc39F1196ytbnFcHdMZV3SmhJHdXfpMf7v3hFfFaDTOi2CaEEp0CraRUn8W023Jt3Fe5zrUwLkQnQ6eKL06qYD+ORhbC5Jpd14ojWjsC1QE6iBCijT+mZ7iF2hVTE0IB4zrvTJNxPS+QRm3XVI2zkPmxuwvXXndtxzScJURkGKy9gjlZgnwTD3oCA6XRLZnVchzNurWTF927RdtXfDA1tGmumQqVRi+gENC38dOU7fC41EgkiKp+RYIkegceBM7SCoIHhpvMwLHFcca2JMgKLd5sy5MzgXmJQTRAnOOc6dO8+X/Iov4Yu+6ItpdzponeE9U2LsbCTFlfXUdaDyULm9XkGblqr21NZTWkdlPbb21FUkxXXtqOtIgmtrqeqKqq6pbU1d22nKlkNDM+WkVDOQPdgw8xNA04djIPgUXY4iiI55ltI5xrQ5yVOUInKb3mg6Qt93zJmnYM5vnTqT6DmeuqgI+Fmww5shxFNo+q/p8LtZd5fwtsYOB9hhAUUBxoDWMUWPrZDgok5YaTwKLzoR3dg+rAv4JHFA6Rj1jCIEpoTZWYfzMUhDmQyVFejg4nuT4ZxntDtAApTDXarRADspsQ48Bq8geIstKyirmKbNBSaTktprrGR4gYBHhYAN4LUh5Bm+MNhCITogweFqj7WWOlRYWxLqCYUIy0Wbdkez23EL78aDhn16TEVMdSWiCGn214aADR7rPVVtGZUVu+MJ/crSR5GpRGrDTCc5dWSltpNuMBq6KyHgg4qR+KkTUJImlSVSOIKnthVlOeLyaxd5+aXn+cVf+EV+6sd/ik8/e5GyvvN+zwcYj0vKsor9ZyJcN3Fw3xa+5dc35FbIRCHGYJRGZRlKF5gsR8jIMoNSgtZxNgj0dGCiZj1d9GRL6gt8vMe8D3hvmZ9abxDjzwKiAtqEyPXTVpZATY2lpqRmEizj4BkGSx3TJaDSoMRJTCYzlkiObYiLqyNJd3Xs1utm38mfYS3UFj78s/DzP3d3tpy/ptkb2UNRm+670a9P/01/y+Y5EQcMYY5Ez7zN0yPNBnfTY73ZyTFt7NOQmfQaEikPTcaTZsKVOEM6HU+GeKTQSIgkjtU8cfDq/cw7Pg1UlT0HX+CY4VgT4+Adz33q4/yAC9SVZf3GNeqyRJTmHe96kkcffYgn3v0uUEK7BSLRkxIC+OTQ1Aq8Bk3ARHcm4qNLNaiAl4BWHiceLR4tLq4TjxdHEEfQPnpodYje5uBnHtLDdhhPL/7WNpt5cdODMUWU753ekpkShDAfgH7woecuKqQp2b2dY7NdGrAHphKX6Mm+RY9x8nw35GDW4dw9fG2pBgPqbjuOhowBraLEpBoj3kYCqzOCMoQUTOedx9UuziB4BaaIhDfpkK218YnkLc5aggugNDpvkSvQrRyUQekM5x27W1vU5QRXTnB1Fdufygg6AwFfj6lHA1w1IRBnNCwZqnMC0WMohwRXEbwjmAJVdDGdPtJu49sZGKJ22VlKW1K7itpHYtzRilO9PqdXuox3d3D54dh2gcOBim7fffpMRVBxFmNiHVvjETd2dri2s81gZ0C+sUxvfZv28glWg6alc0iBtrMMxpF++CnTnCkppzPE3sUZE4kyG5+2EgJKPN5bxuNdNtbXeOpTn+Jnf/In+bmf+wjPvXqd8jalEwfBpRk537CThLvdc1BRaKLEoI0CZfBGI4n8EqJk1geHlowQVJpJMsTM0AodM5OD+Dj1hgMc3rsUk+BSiseGlkaoFM8cFCgRcBDyOYdC3AsVnso7rHP4EPsTFSBoqEOc3Wm8xfP8EoAUTx0UeAFJzp7p7zrX5R8e9o1Y5vfdzPI1JHO6tUra+fltZW6j2T7j9am5cw7zn9zWWU6/kxwsIc6JEOf10n0wN4gM+77f6P2VEvAqzcLNp4Kb23r/9S1wbHCsiTFAXY559hO/mIjojLC9/HzJ00+/n/d91vsIBPTJ0zEoKjSjU6bTIkoCSgJaGkG9T9NaHiUeJQ6l0msiyF55tPIE5fEhvobg0dpjmgC1EIn1vcaeWLzZy/TGj73UXEdx4MA2zD6b32B/DzDvAZa9qxutcDPdNPNJ3QISiZ4es+lkDokce+ex4wl2PI5ebKOTzrYiTAbga4wpUFkKRNQZzocombEB6wSHQamcgEqzERa8wxLAOnxd42ubHlQGVXTRqj2d7qyHQ3YHA8rRMMpUAK00SmeJlBuCzpHaEazH2fjAVXmb3PSQfIzNMqQuMd5iijam20N3ukirwGcKnzSgVVVSliMqW+KpCb6iJbDa7nCu32d3dZdBcVSjuAXuBGoaqR8JcVMcwQKlc2yPx1zb2ubi+hoXb9xga2uHkROGFioxZJ0lzgWhXWTkRiCJguJt1NDcdDOFOe9eElnG2y4QxBNEUEm3HPA4a9na2uDihZf55C99lJ/8iQ/z/OV1DoETEwgMR0MGg12qqp4jVbfgIXwTiMQMF0prUBq0jqkQRSOipwV2ZppRFRkpGSFowOAbYown+mQhhMg+vQ+pGEScRSSkjD8ABnSSrrkQZ4QMM3mEx+PwWCzWOWyo8SEgKZWiBayJR5wXqewxeSMZUNNDTzfw6f3hP5Jk77v0gE1+ppipQfZu3wR3Ns+Y5ped/s57uLbsIfLNbOQtt4UpCWZvloi005kTOZHiZKCoMU40PHmQG5VMfI1PMx9CykYRjT8NLNxnmwWOD449MYY4Qt+P0WCbT33ik7z7ySdRStHrdmkniUAIcTItpoZN01rJyxt87PyVeHR8BOCJnmKtPFpFz3BohF3NSDzeIlNPsQrgkIMr7hwC3njEP+tI4k3fZHiYbRHmIvOiNIOZjCKN2BuHkpqS43TH77um10157fn4zq4/7Hs/dSJwSF1Nkjy42iJSESxRG1aPcaNtgqujZjgrUHkbydpYFNYHXNAgGWLiZzorIARsVWFTAKi3DlfXSYMcPUl5kZPlOd7VVOMJk7JmNCqpJxVFkZNneQxsEoXOWuhWB+M6aJWh8y62muBsTaEzgsqoywnVuIW4igxHkRXkSyvoToeQG7wEaldTVXHKezLepa4niHKgPIV4lgvD6W6H0XKb3WJ8GJZd4JAwS38lqVqYQURTO8tONebq1hYvX73KMxdf4+KlS2zd2OClC5f59Gde4uKVdXZHNZ/13id47OHTnFzpYVTApJyszdM7NN6uJlYPSURUoRqvnmqC7WZ3XmUrrl25zFOf+CS/9PMf5eUrG4dCihvc2FzjyuXLPLHzbrx3saLaIdz4eZ4hIhhJEgqlUyVBjVYm2lwLRmu0NhiVIaEAX4CfSSqA6C0WCCqg49TMdIYrJMUJxP5K60iKlVaJlIPoZNJG7ByIM0zJA914RhsdMiQ9cSN0Toy6eSs+KuOSeiZqn5vdO8hCJNdm7pCHgim/nT0jAiotKU5j/vEhs2tREq9yfy7jmT55H1k+8Phv0jBC40+JM8F7XOfp85krPXqzac5c+XhdDlwI1N5jbYwpap718QZq9PjxnTTZWxbp2o4l3hLE+EAEx2uvvszTTz/Dysoqq6urdLo9QhAy06Ld6mBMjkiIQWiNh1KF2MNIlFIEFfAqeoG9ngula6b4U3cwC5iOXgYlHieK4O/XiLEZic/Odz73aPN/Mz2rYgQFguATOd6jttjzzbmjTEnx/HXORuKH3S0oUW++0S1g2qc5j7c1WI/H4sshdjTE12X03GYFuuXRheBVhhND0Bp0gc7aaFNE/bqd4KpxCrxMxNh58D4OvLTGKI1kLQiC9SW1S0E7aHTWIm+1oiTCBUQZTNZC8qhx1KagLofYaoLKDEobbCunzgvwFqMCWZaTd/roVoHTBi+Cc57a15RVyXgyxlYjlLIo5REtFCL0jXCqXdAxC4/xg4QopZgnC7EoQlladoYjrm5s8NLlKzz/6kW2L18m3NhgUjt2RDMcTpJcx6K1IzNn6ORCYVSclVA66pKtx9YOW1lwAYVGK4XWGdoYVKaR3KBMJIyCR7xlMNjh4oULfPqjn+DpF16lvMNsMQdOVgEbgy1effUCW1tbWOdQJvYxd1vgIxY9AY2J16oVWhuU0pjMRFKUxSC8TGdkOkdcIsYNpQxNoEMN2hK0wloBE30lokAZQYXottUm1bXQCqNNjA+JhS4JKu0uPVlEEsE1AeWIS0zKQRqzRERVx9SGpLOrfVSF1SE93OcYcLBpG/b21neNqeOkIccNlZcoQUsOl5jlSKbEePrl5vEh8XuqcbnObRNXHdD3T72zB19RoCHisUhTKm2XHEUy48Sh+Xv2jG883j4EXAhUzjGpLVXtqK0l+MgBFEwJsZak4fdNCekFMT6OeMCIcfSKKF1g8hxCrDZTV2O8q9ibDmFuSH4TVJOaq1eu89RTT7M7HKKVpqocZ06f5b3v+yzOnXsIrQ2ZUjRlyeNAb95T06wkeU4CRgQrglMBqxRWC7WNXgGlBWXBasEpd3jpxfbjLnc7G4UrTCK3IeikuGo6r310+BZ70xkhntdZ3Ob5zdH4vXsIhzChGvtg0Tq+CVFTKK7E1VUs/VzXhNpBZVFeoclQRYYUBZJ1MFkLTI5SiuAstpxQj3epq5raujQNrVFKYSQWZAhInGpFobKcVm8JYwx4S5EZjEA1HlFNJjFrRjVKXivQRYYiQ4mLno/g0EZT9DopoEfQxqCzAjGGYDK0yqOn0QekqqOX2XnscBdX7yLBErCM7YRgJ2i/KH/3ICH4OJj1Pk7jah0QpRmNJ2ztDLh6bY2XX77A9kuvEHYGUDW/n2fryiU+9vGPgR8jYUA1epTTy11We12KrMCojO3BkPXNLTY2tthc22QyriiyFkXRptWKS9Fv0+q1KToFeStDSaCejFm/coVPf/TTfPJjT7E+Kt/wOm6Gjmnx+JlHWN9ZY2O4Qz0nhdutxjz3wvP8smtXGY/HKN2ZDhDuBjHDRhNgovHaRB+wzlDkUXucAh0zyRAxYLIkp0je3JACVYMF7RBxGO1wIRAMEDS1i9K7EHSS60kk4NqQZQaTa0THWSqPo8bhcWjjyRKBE+dQDqhjVhlNJNM6Eck9SolZnaZ0nft6X5mt19nMq3wYSMqNuYMznY1oZA9Togyv69dl38K+93eD6RMsRE9wnPCMlokTvjM5hA8x85T1MV+98y7OIBOwzrE7qRiMS8raxj7ehyRLSbmLkpPNSAyqjC1tQYyPIx4AYqzI8iV6/dOcOHmWk6dOs7SyTLvTYTIpGe6OGQwG7A4GOOvQxuCcYzwaMhkPqesR1o6x1QBCyfxt54NifW2LZ/yzvHrxVcpJyXB3xJPveQ+tdoel5XicPCtQAXS6a+N9raYjwSYARVxAqYBTgUwJNr23WtBaqG2UJCgNuhacyOFnpbgjHOyXESVoURgdOzDnFa4ZzkdB1lQvNiv7etC+3wiH3TEcjq9DRBATiasA+JiXODgbq9CFQG1rXHAoVaB1TZ7H2QadtyEr4g8dAt7W1JMxk+GAqiypawfKkBUtsqKYTkWH4KOngYDKcjp5jllZRYtHuRpfT8Bb6smYYCvcJCBZRpZniElRokHj64pga4zJyVotlMkREzWTQWIGDTEGpXM0Gu0FldeIybFBMRqNmOysEeqSEDyVDvhU8HCBBwchxIDOurY459EmINownkzY3R2xtr7J2sXLhPWt192bYbTN5eefxsiYXqvCuAH12dP41VVaeQcthitXrvPSyxe48MpFLrxyiZ2dIZ1On253iW6vT7fXZ+nEEssnl+mv9ukudTFamOzucu3iRT75kU/wwtWNO8ryJwjve+y9fMWv+7Vcv/oazz3zaT7+/LNMXCT3gcDlG1e4fu064/GEVruYzWLdFdIjT0VirADJDEoyNEUcAAeNEY0Kmow0eDakrieJTF0cVNZYwCHKoTNHcJGAKa1wXkcPcrpeUYbM5GTGkBlNUIHaW6y3lMFRYqmCxxED57yekd5G/WIAq5l6PT0gjteXdA97XvY0Dy2HX3di7+xg48bdN+3YbBde/zvuJ8evP72bnPAt+F5k5jYGdCxwFQLOz5FiH4M9K+uo6phhqrYuVjyVmHFkazhhazihrGzMT+/SMzF4gjhEAkYJmRIKrSiMRh9ulOMC9wj3gRgLSufkrR79pZOcOHmO8+cfYWVllVa7hc40tasp65JQxajfldUVzp49T2YyjDGEQNJNlkzKMaPJkO3NdXa217D1GCWCyTOWVk7SLgq2tra5dPkK2xs3mAy22NxY58Tp05jCcOrUaVZXVsizPOrJJJKYJgpYhegJjoaKuiEhJO1wSJ7NmEvW+RSt6hopwS2Hmt02bmW/r5c/7CXITdYIHxqtV0i5L1OXGxo5Rph1LoGk0Z51+IQmnVvcexP/EXtBz6y8iuDCtIZeWpfEjTC1lqQ9NfubBmY02x8Cop48PdRER+8xRdIYZkhWRX2wBynaSJaniPaAC45g6/hESsS4HA8Z7u5QjsfUlcXkRZw6zTOCj/KKibWIjNF5QVa0yIuc3Bg0Hj8ZUltJU7p5ypNJrGDoYuqn4KNEo5oMqScDtMkobJes3cVIF1EqFn0JHucVXscE9UorTNbCtLqYYozoNkEKkBoJDi1QmOQUW+CBQVVV00wnIYAyGVpp8qyg0+5ixODH9cEDVlHoXouio/F+zGh3g20TUKMh5aBksLHLs0+/wCeffo5Xrl5jfTiitA6torY20xmZyWm1C7rdLqsnljlz9gy9Xgc3GbN59SofffoFhnc48M+U4bHHH+fJz30/D73jPKunlhkMhzx98ZXpHe7m9Jw+NAzoEIiGInlBIDSpIlAIBkKGtlkaNCc2LG4msYu0Nc4SYYnChKYfCeg4VsYohSHOwAFEeZ1GgjAej1hfH7G5M+baesn1Xcd2GdipAuM6UDso2tDtw9IKrKxAewmkDSoW2Yy7TN20pJFJQ07jGc4rlY9u4vINsV8ycZPf7m5/0TeVK6SPY22CQGktk8pSpXblQ1OzwFFbR+0iKbZuLzEeTaroLXYu1kLwPhUVa/hAzB6iBIwScrGHnP1jgXuFe0iMBZMtceb8E7z7vZ/N4+98B48++jDLy0t02m2qumRze4Pr69e4ePkGV65dYbgzxo4dZ08/zCOnH2F5aQVjYnELldLu1HXFpJowGG6zO9hCSaDbbaNFGA5HbG1t88rFS9y4fIV69ypgufzKM/z8L5xBZ5rHH38Hjz78MCdWT7K8tIpRGaSgE5EYmKdCQIeoPgr42KmmdWiP8zGrBULKdRvz3Np089wPHOT8O6j72BNl69M1JhIcgwjmCXBK8RT2dnPx0pvqeLOS84RUFGQ6gFCEEKO6Z/mNY3ceZL7W3Oy1ocQypcmHhBAIriZ4h2iN0nnMQBHiFJpyFrEO5QKYDMkKxGh8cARbEnNPRG22tzWTyYjRYIfxaEhVVrTbPVrtLuId3lpC0nI65+n0lyjaBXlmyHON8lCVAbyLxDhv4V2axgvgrSN4h6sdtqwZD3cYD66jjCZvdWjZVVoiGJGY5sqBUx6vXcxqoXN0nmPaPbKyJuvskE1GBK2QMMYYh8/Cghg/YKgmZRyI+YBI7JOMMXQ7XVZdoN/uo/xN3PytFqceOcv58yfotBSu2mV3y1HeuMFrz7/GS89c4KMvXODSaLQnw0HtfEyCSwrEHMQXeVnIs4xWliPBU9cVQ3vn0ptu0eHcYw9x/p2PIPYsp06tsrO+xebmFld3t/BApg1G61m10j3yrjtEk5JQ5kPaIjGOjNMQggGfxeInQaGCQlPhUs6I4F3yFFtiCY6aZsCuBLIMCDpqZXVy7SZdsnU1W1tbXLiww9MveT79KrywDes17HioQtQHtzSsFPDOc/C5nwPveDf0cuhksEcEa/e6CgIpj3E6u9C83i+CdoBneO+6OOA5SGJxwNcPRpg5VW5+GoL1nknp2C0rdkYTxmUdcz8H8L4p0hWlFG5KetPiUyEvP9OCN83Ge6IjwgvBxb66yUolb0baF3ggceTEWETTaq2wvPIQj73jPbzvsz+bd7/3PZw7d5qTp1bwvmY83OXy1U2uXL3Ecy88y/PPP8P6tSsxY0DQeBt47OFHabVysizDZAajDUoprHfUtkun12JlpUueCUv9HhI8N66t4a1jqddns7uMHW8T3ABXjXnxuWdAwcWLr/D4O97BZ733/bz3yffR7fRRKiMEiWnYpkskKsHHimbBNyPGMK2U51O1POtmN9mhE+PmIUGYve63+UHf2fdZQ0yDEIPtgCAy9YTH1E6C8o03uJmbm/fr7lP7+vRfc46+IvgKwaHwBHKCdAkS5+3jflzyxDSa8eTlDPO0eP8U2913NsGDr2uCc2BShSMd85rqWKWD4Dw4j28q+XmPq8tIUFO5ayVRY+ySrpiUtscnPbG1FlyFtyVVVWGtI+AwuUFjEZuhvaMe7VKNh9iqjkS6uUQlcdqX6EGrbcl4tMPO9joigTxvUdeWYFoUYqYdtPMW51XMuqoDSmuyvCBvd8jbPep2H6cC3gaClOCrRSf+gKF2FlJGCm1in6d1RlvnrKqMc6fPcOLUWbZfe5lZbq4IVWQUObhqxNaNEX49cHXsqDfHvPjcZV5Y2+GG87d8JwUCZV1R1tWhXNup5VXOPHSGE6dOkAn0WgWf+wWfz87aJi9fuMCgnHD6/HnOnT9HnpkUdHv37M4m7aej6U8CjoAm4FLazkCI2QtUkpAZPw2utsQgbJ2m4iP2WlGIGQwqH6hrj7UwnpRsb21z4/qYjz1V8ouvBJ4dwHULo9ftAXYtrFm4+BK8uAFfeA0+5wvhnd2Yex8ilW+uQxGbgE6z+xKIsS4+rnepRF7wafsj9tccrHA4aO3M9XGwU1lu62cPc//Pr/QhxMDUScXOqGQwrhhXNmWOEryP3mGf5EtRXhFlEt679MCITqP5Y/j5xafFpSJPLhl8gWOHIyXGojKefP+v4PM//4s5e/ohTp85y+nTp1k9sUKrbdAmcOP6Gq9eeImnPvMZPvbxj3Lx5ReoRgPmG/fViy+w/q4nWVldIdAikGFt7KAdxIIaOFTS+qpEtPJM0+91eOihh8k7S1zq9Vi/cgE73mJ4/TJP7azz4lM9zpx/hHJccebUWYwpyPNYZsk7F0s/O5/KSbtURtqn6ngBW3tsHajTErMS2GmZ6HDoHuPGvZtew97ik/s72Cl1DnPkltQRKYn5LpPgzIcwTWszFTwEUqWnhg77WVc2LfsskTAHR/CRXDvvCHYEdhdFhYhDVBdvMpQqcHOCQWmmJyHJNVLqG2a+4nlSLK+7yjuwog8EWxOcjYn5g5oeLQgxhVNy+OBDJJrO4XyFE4NVJmp5IXaACHneiZ6izKLzHC8SyfBoQD3ZpbIV1lusLyFUuFGXusjRQD0eUY/H1MmrLNqgszx6sosiejSqXUo7YTQaMNgeE1zA6DG1DahWH0yboFo4yfE+6tu18qmUt8JkmryVU3Q61GWPSaixboKzk1ipa0GMHyjY4NGiMXlOkbcwpkArQ8vk6HaXdzzyKJ/1/vdz7fJrjG5cnJHjrIUpNHa4zXq9wW41gkHJeKtmMPKs1YHDobd3hkJpHnn4PKfPnqC/1MEgZATe81nvIVeGz1vbYFRX9FaWeO/73kOn3UYbNb3/7wYuZlXDeNmjjXYSUMqhTZQWoT1ex2wccX6c6LFHY32IqR9s7Oci1Z55zwNQOs/OuGZnYv//7P3XmmRZeqaJvUttYdJVyNQlUAJoNLoxPSTnhMNb4JXw0nhC8oQzDznsJtGNhqoqVKEqtQjlytzkFkvxYK1t7pGZJcMjMhLI/3ksPcPd3MR222t/6/8/wWbjOb/Y8atfLfmHD+G/LeAi/mGksC7CRws4/zsIBu6/A+MMjj1pCZUehEtrtQqgs+bbe7DZ2cK79L5jtnRTtx3//nWdYfgSFv46hJvvMODiryEf70Hz8K0b///VJSu7T+xbPym8w/tI0/cstg2rxtJYj/UQ99S+7J6BTLuGkDgqA4MnDi8/DhkH6Yq19+rfv5d8FRHJ3em7aOhvZ71UYGyqA/78P/wn/uo//jXT0YzxeMJkXFNVhhh7mt2G02ePeP+DX/Ozn/0Dn/zmn/H2qwrnrlnw6PEXTKZTxuMRZVkhZRI4iMGoHcfgZGBdnzp+waOVYjadJn5lYahGNadPPqdZPMY3a7bNii92Wz774Y9ZLBZMJlOkUEih8jjFZ1Cc+G7OhxQr7SLOxX1s9ACSvfP7MXgM/qXYtcRhhYuBQd2xh67xxqVD8NyJub+wxAHPSpAiRQ+T+KxBpNjm1DnJMHQ/xhyWg5CBa2SIWRqCPGJIVAQfekK3AnsBdMkezPQIOQZZ5VciEih+zg1B5pcYuE5HunaiGED7Cx/DEPF9j7cWpc3186RYKuKwORDJrD/kJC7rIhaBEwovVOIjk2J6TVkThSL0fUrRi5G+7+h3G9r1gqbb0ruOZlNhd1e0owmT0QijNN5afG8Tr83HxEMGjNYIAZ5I27dstksuL1ecPo0EC1rCvF8h61MwNao+RBRF7oSQr442EeWFRGmFrgpMXWP7LXQqWbp1Ke76u3qNSkqEVokGU1YoaRBCobXBlAV3jo744ffe5dGPf8SHwbNbnlGMx8xPjqmqiJEtu4srTs8adk0CWa9D/+qgnvDOe+9wfHLIqCoSiSHUPHjzDWajKU3b0nuPKg2HJ0cUpUEJ+aWB+59YMvkAWxsxMTcFBAQZCNoTpCXq3HLVAZRmbzGQN85KaGJ0KK3AK7wf+rZpbfXBs2tazq52PFu0nF85Hj22/LdfwX/fwOpPeNkrC7/+EP6HU9D3YDx5Hm8K8sXcgQkJCPsAyiXQ7P11L0WRwPOt1g0AGa//+dvudqOuvbSjgL0yLyaQLIbVPtMlUghHuk+6+zUd5rkr3XPdotTs8FHsqRPxplfxYDotUpR38C5RIYb2UJRc251eX4Ei6ZiG/LKHgWHM1934pevvd/XtqZdLpZAF66bhsydP0DyjLkqOj6bMZjUxdLTNio8/fp8P3n+fR59++rWgGCAGy2cff4B1gfl8zmQ6oa5r6rqmqGqKskRr0CrQx0CwHcFZttuGtu1xMY2857MJRfEm43HF08c1q4szXLMGJF3Xs1hcJh6zKjCm2tMkgs9dY5/ody4vNM7lbHqbIkwTtyiNXUQc6AMvoQsXyVSOIZAkn4Q3UuYgLyyZmxj39Iv8AEImoaBMTho+hszzDYk2QOYEC4FnEMIForBEXNpph7RoxDAsICIr6Xu82+HaC0L7mFJ0CAMKR9RTYiyJmPw8FmJ+hqj2CyAxr0di4EvHLy1JL1YhBmzXYLsGISUxeGTI5vsqCXL2oozeYnuLcwHrA51zdNbhhULXB+hyipEaUyUOctcnGoZPHwz6dkOzuWC1uGC92mEKxXpSM5nOmB4cUNVjiIoYRJ42BEzpKD2UUVACzrasFhecP33K55/v+PxR+gwaAXd2AcwzUIbxsaEyE4YF3LmUooWSRKWJ0aOUpCg0ttAYLelDoG8Sfv6uXp8yVYVSGlUUSGMQN3LSRIzUpebu0YwfvveQsXEsr+5wfOeYO3eOse2a5cVTPr+4Yrn7qmnBN1n37t7nne+9x9H8gGLAk0oyGtcYpRl7j4/JwLeoiuygEBkyE16ketIR1Fokr1+ZIpaDCiDtdRSdTl7iKLffVA7LjpQCYzTOD3HAgmEw6LzDWsdqvebRkxWfPA48uYDPn8D7mz1l+0+qxxfw6fvpnC+zD/LwmjQ5byRCTpHGZ1Acbeo93Ey+U7e5QxpAYRTX/vDcAIp8dcW+6Wkc8oMMFBAhApIkZA85CjqIpIFJFmoRJUVyFMpkX5kfU+6DQmQOqErWezoI6koyG4NUPW3nsdYSbQfOkhI8PF23o2s3BBSYGagyXWvjAMKHq5C68f/73vHz71vcxpXqu/om6qUC4xAFpxcXBAH9tqXUiof3jzk+HOPsjs1mwa//5X3e/5dfsV1f/o5HimyXj/i4bRnNjpkfHjKfz5nOpozGE+rxiFFlqCuFloHOW1zf0+w6+tYRZfKiHI8qDuYzppMx46ridDLj/PQUgiP4wGKxYD67pDAVdR2BZLfjQ0icIx8ZsK93pM6xh+Dz+CaEPSgWWWJ268A48lx3NoSAlCJFWcYsGMh3lUKilMgb8JjpDok/JfeuGulBkz9u4r8K1GA3f6PDlJMBo8+UibxYBPEctzbGQPAdwW2w/SVu9xShegpUshSLJ0QmBAwhKkQMyBiIQaXxfxapiLygDR7S+0ShW+rAhxixfZuBsSAGg/ISoTUiO/InSkjAdR2265Ni2Tm6rmO32+KFpoiGSo8T5z1fVGNwaUPlFMH19M2a3eqSi6crzk8jUlqqccvsaMXh3Q2T+RGmGCNVRXCe4AMFmqA9sbcIrejbhuXyirPTFV88g89sGqVqYLuA4lFPUZ+iygOK8RFRDjznQPAWlEQWZdruSNBGY0y26osxbfjaWzm039UtlS6KJDTWyXg2eNLi4xxCQqEERwdj3n54wqwKtM0BB4eHzOZzNldXnAXHqfqM8EJw7HZLIjm5+4AHb77JbDpB5017kAJRlZRVledSOUHyhvvPl2ljf0pFsmODyHSEPHCMwoNWCOGSNaIUCBmyt5nPCDruvYKTEFLuAzWcF+k6kGlkXd+z2QSWK1hdwWabntNE/mQay6aHX/4K6hqmB1DV6ftJMpga3IMlhXOZR5xVeCFcA1Q53Pe2KsqBTZepCfxWRHjd1xV7RySR54PEJPaWuVs8DCoH+bYPkd4mWqNWEq1AySSAjvlaFmISqnoCLgwan9S8sW2P7zpC2xK6nth1YFuE75EiIAm4bkNsVsm1R1Ygiz3SvW7NyESpiDeFr/65bvn+uv9dw/hbWS8VGLvdBR/+6pd8XlV06w1aSe7eu8PhwZjV8pzFxVMWZ2fY/g+5Ilt894z1+Zrd5orFYs54OqUsK8qq4uhoyt2TA+pSI/AEa+maHtt7hAloJVDCYJRkXFXY2SESxbieEIKnKmsWlwsKU9F3nun0gLoeURQVA+IRcohOvnZr2G/B89hdingNiHO++q1XfG5vmhejiPc+ibvyiqKVSl1h1JcAdSTElNwjZUDpFHlNCMgQCShiVAQkPuZoz9wNCN5AULl7nBUd+BxvKhHRIWiIfksIK4JbggwoWaJkj6cnxo4oIiEmR4bgI7b39B2EkI6x0gpRgVJF7lanS9ptdYxjjDjbYbstEY+32e3EGIQpkvl/SB1j26eNVrp19M2ObrPFyQJZH+256HhP1zZ0zW7vIBBsx26z4uJszaePI19s0qsvr+B45XkzrLhHwWReMxoXiFKikOhqjBlPMGWBNBLfNGwaz+nS87S7vrh64FmA6gzK8Q4zOseM5shyTtB1nno4BAoZs8hwcBYJERk9SkQKCeI1cDX/rq5r4Fs659KGvA+EPqIKjaoUzu6ojGBUSC67HcvzZ1ydn6FNkdfFEYcndykeXdD614FEkUoqhTEpbU6KFBcsRdoED6SpdM4LriHP7SCMfm9JLFEIZPYyVoj079wJBIi5PT2kzl1XJAV9hP33tcrWaDE57xzNan7wdsfh3LNuYLWC/3QOjy7glxfw6Q6uvuQo8fvKRfj7j9LTT4/g3gMYTUAPPOPc+MRlER7XF/jBtm0AxrcdCZ3CsFITZvD/lwwd4ETNG5wcEkUiInKAjSOmJo5MlDSlUvoiQ2JeDmSJiKQNEQEv0/VYK4lRApFpj855euvZNT2L5ZrNZoPrLa7ruVotOb9csNtusV0HMVCUhrouGNclo7ogSoGLiqCL9LwIpMr85yizwC7kDvLgPBFykEe6pbDb/MH4rmX8rayXeymMDdtn77OFNN8BNpefo02F7ZbE8MfObiPELb5p2bZLdlcThDKooqDZ3cWoSJiOUCIQvafvepwNKTksJi2yFJHCGGaTCZUpOTo4IsRIWRmurpbEIGh3PQcHOw4Oj5hMZpRVjTJlWqjFwM+NeWubbmK4Eff8XIDnNpW3VF/dkccMWgPOJ8C7551l3+fhPjEOSVppxqakQ3mLkn4/qvQofJRpvB8Ho3pDCJoYTKJQSJueQ/UpT14FpBIQLSE2wJbg1zi7IiqRY2Y9QfRE7L6zEEIEB33r2K6TP6QykqJMRviUw5u9Pta3dQyd6+i7LcH3eKXSRbIwyLJCKLPnlnvn8dbhuwbXbrHbLe1mQ1Ajir7He4+NHuc72t2Grtklv9miwNuO7WbN+bnn0RY2+fm3AbYrUKc2AdpxoNYl2tRIXVLWY8rxGGMUSjqCWLPp4WIX2X7pauqApx3MngUm8wvGswPMRCDq4WLiUwxtZM/fSxT1JKFWRAqV6JTf1etTAwXKWkfwFrvrsE2H0ApdKXbrK/A9wvdsry548tknrNZbOht58+13+cGf/ZiDOw8Yjz6nXf8pzNaXVFFc6wfI42+uu4TJT/162nbtfnML4NhcdyBBIxE5oUyiUgYee5lffl1SJBJL2E/QMjAebB7yS0sexunRjg4mzA4ELloQae1sfcG603zwxZZffLTk//63Lb9Y/HHYaRfgl1/AO58UFKOaslYotUH4nmgTdQKfQKmJCUwnAfV1MJ7kdi/8e9eiPejd94quMz6G+5K7xhGSNWaeegoJWiONzJOBdKxvuiT5GDNHOOUMEKEQKb47ZvvNvuvZ7HouLlc8enzK6bMzuu2Gfrvh8uwZzeljYreE0CYOf32EPjxkenLIyfEB1XSOGh0gdE2Q6ShljXqejA56mvSNNPFMZIp07WfPtQ6308P5rr6BevmXwvj84Ci4Db3b/JY7/6HlIe6IcYQZjzk4OOTw8JCqqlAq2WqFvV1a4j6ImEBA8A4hFIVJ8Z99b3HeIZCEEGjbliVLnAu0bcdovGE0mVLXI6Q2ycotz/aFjAgV9nQAEQJCDPbq+faylP43Fpt443tfVvAO9xhA8QCSg3N41+LDFuIWRZ+39hKkQUiNEhohFEqUREqCqAhSEaXZC/eQMjk4SImUCu/ANo7d1ZbNxYL28gw9rzmajFFIfFSIoEAYIhpcCsvom5Zm0+BDpCgLtIIYqrQ8ijRse+69vmCFCLZv6VvwN4ScypWo4JC62HO4o/cI5xB2B/0G+gacJWDxtsN2DS50hH5Dt9vSNTtMYYiuxHUNq2XL6RK2X3rxXYTFFu7sHAdeIFSJqSeU1ZSqHlGNRmgFInbIcoweHaInE8T56iutph3wZAWzpzvGk8eMXKSIEl3UKSpaKaRUecFO494Ufd1nW6FbOrDf1a2Vc5YYwblI31k2iytWl1e0XUPjGi4uz/ji8Wd89vEHfPgv/8IXj07xNlGrNqNz1kfnuK5nVI1YrNf84eZsL68CkV27o2kanPtSYySLrUQkTedi3IurYv7viy6nvUw29DJGfCYjSUAJgRKp8ytFWsu0VMgcHf98zzoFITl3kygn8pQu0QlKo6l1SVQFQVb01rC5DJxf9Xz+aMtvPux4tv7TTrtFB//17y3OGUS8Q5gaiu0zdA867m32U3dz4BjnJxpA8RAKchtlvd9vKGSmSMjM+023/DORgrBCCIhsd9q0LbvdFmKkzImASgikZG+bNnDpfIh0Pvt6S4XSilGhGRUq0TF8oGl7NtuWxdWWzWZN27ZJTJdt+GLw0LfAAuiJ/QK7mbHs36YsS8z4iFExRlYjrNS0zrFc7mibPsV5a4PWiYImpUyx0DKSxx7JzGKYKGf+/Hf17atvcY8oIsuCO/fv88477zCf1tRlGuW7LuTd8eDekEYeMQS8cygl0cpADHS+x3Yu27wZnHfsdju6zrJabSirK6azGZPplNF4TFWPULpIqUg3wXHwCBEQe09en/2Pb//MGNS6z38vj7Nk2tEmjJtlawOFYk+9II3/+w7aNb47R/otSiuU1inMwhRIXaC1QagapCcqgRc6CROyhVFatJIvsaQkOkG3iSwvtlw9W7A7O6UKR4STu8ioEcEk3hYlREPwLaHv6NuW3WaV7IQYEypDyle9KWsYxqsvXiFC37d0rU1jPikRSqJ8jYkRZcrc1YiI4BDBItwW+i3C9YiYRqne9thmi++3uHZJ3zbYrkdQ4W1J3zUslz0X7dePTXc97LqIDRKhS8p6xmh6SFWPKKsSKTzeaVTVUs3vMDleYZ5Z/LZ57nEisAhwfhY5mFwShQJdoqRAFVO00gg5OK1EvHc4l+gh3sbhNPmuXqOy1hFCoO8dzbbh/OKU08ePeXb6lNPzZzx+9ojHj77gyeOnXF7t6H0a/k5FpF8uWTx5jMdQ1yPKoqbpd9/0WwIiu92W7WaD7e11bgfsu8bkTnn6fnh+4vWCz25Nom5JF9EEwCHRKAlKRqSSSC32NDSl9b6jff0OBDkU87nvAiBzwIcUCJMEYC4Y1q3ig8/W/N3PF/xvP2/4p8X19OiPrQh8fBqR/+SYT+eoewVje0EdHaVMvQ3neU4PE0OiTwxEEXmLaszO+iRyCxGpIkqoZHWpIkomDreMIKNAqZC93z2ut1wtV1xcnONtR60FRpD9gpO7U/Aeho53BBtIVp9SI7SmrkrqskRrhRCK3jq2u47NtqVpdyADZW3QtcK6lt5u8W4H3RVp1rYB3+AvRzT33wIMo3KErmoaBNuu5fzsis3pGlXXjGZTJtOKycRglLqebiSJTXKMgjRFvr1D/F294voGgLFKuZZCZ9UD6YrsG+DrXSm+vhLYrUcj7ty5w2RcIrG4bkfouz0YDPHaazB4j8USQ1r40so2qFtV8uUN4GJSFkNH0zS0bcNms2I8GTMejxFap5AKqZAqdVWJkehJ4R45iz18ySXiduvG48a4X6RF5nld/2igTtwAxyIpq1UO2QjWEvuWqARRK6SzCFegjEEYg9A9QvegOlANUhZ5EzCQ7wQiVsioCbYgbAPNomVzsWJ9ccnxuCC6gAgKETUyGIQoIGpi6Aje4fsG163Tx8ELiEVqdQwW9oO445aWmxCgb1o6nflvKqlxdExWdNJZovNE1xNsAu9da2l3jtZGmgixcui+R3YNtlnTb6/wzhGixNuObrtie7VkufTsfsvHIEqBLKaU4yNGsxMmB3eYzo+pqhpTGMDjbEPvJfOTlqN7PePHW9rtI74ME3pg0cL5k4CL57iYwkeUAKXShicJXjzBW0L2cA6JHp1CAL6r16ZSGldK3HLeYZ2l7VouFxd88tknfPrpxzx+csqmSW0ADRwZyYPZCKMUq6fPWPeBy8bSu2/Sufj52jsXDICXa/7rsHbtoWi82XZ78bXU+0AMAoXDBZUJEZ4QTOaUKmImVyD0c766UkiCCMRMBblm7GZUlFfhIFI31Hew3vV8drrj15/0/L/+fsfffOp51MKLGsB44NPznl/8/AK/MdytFUe1Y6qzViCDYpf7NDJe84oHB4vbqo+/OE1NmYGOJiRKJM2GUjptNjJvWOWfB++xfc/i4oKzZ0/ZXTwjrM6J3ZJom+wxn1PkMk1hWP/TxDb9faQxqMIgVYFQhohKgvgg8AGi1JRZoN/2HdEYGM+S0thL0qopQBqsT7Qla3t0sJS6oJSR0HXE1QrX9mysI/gRQowojCB4RwwOH5JQO/hsV5ojpeM3ksX9Xb1ovUJgLBHlEeM773J4/yFRSVwMtH0Cn93pM7j6DGLz+x8qV3A9Asl4NGYyKiF2tNHTyW3OwLhhvi1EchjwFi/BK4GI6WQtihKlVF7sxB6sB+9w1tK0O66uBKNRQV2XuChpPciioJ6OqeqaQmmMVImXGySDScXLGKVcUyKe/3d6v2ksLiAJ3LLALl5fiRAidUJ0XeFDhWtLgjV7/2URLNInuoW0HVI3yGKbNgRaglZIrRLVghJClTh6UeB6SdxG7KqjudqwuVrSncwINiBiOuZEDcKkrwGitwTfEPw6H3+DoEZgSbv6HAuLzBuNFwfHIULbhASMJUgdESYQRQtCIfoO17bYpqXbOJoNtA20HTgJsQKNRXYdomvpdhu6zSrJd0xNsI5mecHqbMlyE/ltk0tTV4yPHjA7epODk7c4Or7DbH5EUZVok+zVbN8QVcWdJrJcO44+O2fx7JTwJbATgVWEZ1fQO4+1z4i2T+IcrTBlnYSkwaXfjQ5BIIRr68FvppJAVGT7pX3S1B4u/dusGPJ5LkAogS4MxajEE1mu15xfXrHNoLgW8HAy4odv3OWtu3d59uyUf/nkEY86yx++or6aMkVBURRIJW+A4wx89v7pw4Qo1y19DKyzKCTSa5SHLgqMEBgZsD5RwogKj87XgyEnL22etdQ46xBGE10khOHnaToXCLjo6PrIZhf4+FHD//a3K/7XX8LPltzq36KLkV988gi7q/n+mxJ/RyEqjzYgcuIdMQnxhk6xzl9v09D6X37xUaK+KJVuMnVvZZ5ASqUy3U5dA+OQOsa7i3O6p0+Jz34J3WekLcOL/LFvzh/S36+Xc9ajezCaJkuPyRx0Cd1dsF16uoNjvIBts2W1ukSoyHh2wEQLShnYhh52PaFr2LoRIU7RhSQ4l7yP99auybUpOE+034UmfVvr1QFjWVDe/x7v/OSnvPX227gQaLqW1XbD1XrNMyL95gLsH7F0REckYrTGGAMhYGVaAlKkYyDEgdJATnX0eAdORCQ6CSxE4gp551L3OKtfQ/BY6+hsh/OOrtXsjGLTWRbbHiskelRR1gXjomBclYxGM0bVFKNLlNDJcuxWa8+2yzyyfBkZ+LA3540hEmUghOxLHK9/WyqFliWyHBPLaYpG9i6tpnIYZ0IYWg7YnNzswQeilwSlkFRIxkh6VJQo65B2Bf2GaHcE2xBdD9HfsFoTScSAyKMni6BD0uZ1rU7OFfT5bwwxDgb/t9MxjjFRhTuTRDMyd1Ui/T47pdv2NCtYX8L6CjYdbHPQVTWDiXaYrsO4fh/mMlwAoutpmhXbZUfvvn6pF1IwOzrh6N6bHN1/i5N7b3F4dMx0NseUBUpJQnD0fQuqYNt6TlYNd++c8Gg8Ybf8qsWhA9YR1BqkDJjiKlEyygrqKZgaHywhuNSNuWmB9U2s4cIwmb/NnXtvMj2YI6Xk6uqK9XJF2+xwfYv3LTF0xGAJPn0mXo+oipdbafpzTX+KQhBl3pgaQ1HVVHU6H++Mah6e3OHO4QmzasLnzSPOevfagWKA9WbNxcUF6/WKtm1ROjnApN18BsbPbf5v77x3+bogvENGjcHSA4WLWJnit6NM4E5JTYhpLQw4JDEJ8bTGOYvWGm8jPiZKBqRrRvCwWjsePWn4+S8a/j8/g3/Y/uk2bb+rVs7yq6cOUdRU4zG1WjNRcW/ddpM+ce2Cza2e6+7TJ6nVr3W6SZUWVa1zQEpOSJUDr0LkaD4HqyUsNtCtuJ0jdHO6kP8u4RQ2K9gdwvgumDI9d7D5OT2sT+m6DWeLM9ZPpkwOjjg4uYMLiub0GawuE49YFcQ4ZkefqDLOZc5KZN8NCz4HHfjvgPG3tF4dMA4e51ocnqgBHxEyIGVEZwu0P/ZsjTGNGa23OK8RIQVxhP3tOpY5EiBbjqUUO5v4rFmUpL1EOYnRCq2HfbVnUB9HPN4FugCXl2s+e7JguVzhui0CizKScjTm3lvv8vY73+fk8ISDySGFKW79UCYQHAkxZGB5TdsIeXQjSMrdENJxzgeM3KABKYnSIIoJanQHoccQXOLOioCUnhhdcq/AEfHJCN05Ii1BdERhUVKhZIlWU4Q8gxAx8nOq4py6auhGgrIElXNLBUk2nfyQQeCQwmK0oyxs8rUULTG2hNCnqGIhcxjI1wkL/8RjGKHv0k2Z7AWa6RrO9rgedivYLODyHC7aJNdYk+JUD5fga0/ddYyDQxUl1fQoiU2ExO2aZBfYQUU60b7MVBjNZrz17ru88c57PHzrbe6/+Saz+SHjySRPMBIX2NoWIQXb3Y7Dozn3H5zwxpsPeCp6dqsNIXeEDOm5ZH6uXQPrpcdUF1DWUM/Qo4jr+0yjSOTD5Gv8vHr8VZQujnj41p/zl3/117z73rscnxwSpeTx46c8fXLGerVjt2uQCooCrO3ZbjZs1kvWVwt2m0va3QXB7fjXCJSVVkSXznfnXJqu2Z5iNObhO+8wOTig33X4tkd2FtU5Ts/XnH52ys+ePmPxB1yUpTIISCPhV7Qz+uzxp/z3//7fuHPvgLv3T5hNpxSmTE4EN6hvac2GWzvpgc5aNCJRuoAeTR8DXYhIO7TnJRKNRyOjQmiHkxGNRxIRUiRqkvfJochHPD3EQHCRvhOcPmn52T8s+ZtfeH71kkDxUJsQ+fgKjvs5x1icaVD2eVCcySEvp1bbBHoHYDzclEoTquTXxiCiu/ZtA9AwOwL3HjQWWPJyzuUWwlNYL0hHYqDpXT9XRNJfSHoUK1XyWI8QsiTamLLE5Qzqg/RYvknXJOczGN533m78+ztQ/G2tV0il8Ph2w8422GAZ2nKDvZkUww7vj6lBROTwLjlPhCwuCn74/5AAZFaPxijw3tF1FucjSiZelM6gOJZmL2RLYRkDQPQ54COwXm1YnF5gr75gOJETI1myvTyjaVqat74H9yPj0fjWj+T+whEGDvPQNQ7JgouskCWdmzKfnwL27kIxCpAKoWtkLRFmkt07fDK5xxJCT/QdxB4RLIQ20x4Czjf4sEaqgDaSwlQIfU6wESkvqMoFo1GPazVFpRDSE6OFaDNFwuaFwyLoUaLHSIsLgehbvG3wrsN7i0DvqXwx3g6Ai4Dt020QTuyjU0MSLm+yOf9lC6fAJQlwChLtoFxH5rsGZztMUWLKKjmTeEdoJSGkxX9Sw2ELlz4zpoWgGI146913ePu9DIrfeIOT+/eZTpMDSmqeeZyz2F4T8cw2Uw4P5zx8eJft6i0mdWBx+oR+tYbWI+x151vkpkzXwXbTYdZLzHZJISXWdim63DmCTx8OnSklr6p0ccKf/9X/zF/9h//IT376E956601O7h4TQuSjj7/g0+PHrK627LYdk2nJ0fEEqaBtO7bbhtVqy+JywWeffswXn7zP4uwDorviXxNAVtlLO0awzrFrWlabLSjF8d17HN+9hxGGZrnl0Qcf8/T8MzaXF2yaDefxdxPGy3LC9OCY4wf3UCKyW1ywvDxnvdngwss9hk3f8E8//xnvvPcGb7/7Bjx8g6Oj40ytSJv88ByN5jZC4FPZEFKYBA5B2kz2CHRMHu4y3HRSUNnXOKC1whHQWeUgh0Q2IgpNQBKioO9gs4o8/qLjZ7/y/N1VWjdedq16yaKr2YUxTnSUIqTBX7wGyM/VbW6C+xaQCQQbndSHxqfusMgrprjxpIP7kZJgimTGLN+GqxI2H0N8xEs7j0UkuVo9D4pTDdNRB74Dv7rxuVPgj5OVkMqLrDSD8OWaNO8zMBbJ5SjHNn7rqgIOgAtenA//baxXCIwDcXPFerlgvVlT6gRAtUxKVPWnoJ0YiD5kCkH6QMZsA+Nj2HdU0789EoGQMqlHiQRv8c6BIINijVIQTIpLToEZFut6nHdILbPHpUz82P2JdP0e/foJj362ZfHklOX3f8LhweHtHL7nniUtwoMFW0qFSrZxRJ+7q0OHNWXwDePIKAY+hcicX0kUOscva2IM2W4mpLAO3SOiQ0WHCB3BjrF2hHOSxnqE7zDRY31LR4e3jhBWmLJhNJGEOKaoKyypW+NFS4g7pDToGAiux/cdvmuw7Taly1mI0VCUc4qiR5clyiRbOLi9JdPZNPUa1rQcXIiziWZhuxSTPDCdB55wJHEFVxtYLbaMZkum80NMVaOI4CRmPGUcAlQ79KGn3gXG28hVB6P5AXfuv8F7P/g+7773Nvcf3OXgaM5kWjMalZSlIW36Yhb/SMpSM5lWHB3PeOedhxgduX9vxvLyLpuLJ2zPn9BebXBtJPZQGCjKlI6lyxRO3rU7vFTYvsc6R9/32Dal3u0pvS+9BEV1l7d/8B/50U9+yt379ynKEh8CXdtjrWO72bFdN2zWDc2uQylFP4koo7BWEWKFKQvGB2Pu2BKh50wOHrK9ekzfXODsBmd3ONcSg/uj35igIL4w1/HFK0UFpfFzCHC1XPHo0SNc8JR1DSiavuXxo2f84y/+maurs99rySZVwcmdN/nJX/wF85MZemQIocdud3TLFdvzKy6fnvPJ00d04eWpMZ9dnvNf/79/w/HRAf5/+GvG0ymVNtdGl3stQY73uCV80fc2d081moDDkLbqDiMsLlhsBsQiP78Jnug9mkCfVt8E8WJMRsE+QFQ4X7BYWL74fMe//GbHzxfwVYnsyymFITQ9tmmxZcDH5GOcYeCecbuvW31RuakVXDoeyf9u+PMxCLSB687x/tdcpmgrmB4mwLpxwDm/H5J9+UMhknZF1Qhd3dDUKKQuGR/c4+TBWyitaNZLVotT1uefEfpVElrKMuOIniT8vvn8HthC2CJHx0zfOKGeH6U136T1mizwJ1uOjuoKpb9dxl8SeBP4My14Y1zxj+uWfw7xtYqUfxX1av9qdsPu8pzl1YLpZEJlCiQRLQT6TwTGhEyVyKOLARiH/L3UgfCEkPnDQiZvSplGddalOOhYaqQq8EESY4onTmPsHuc6nPcoaZAy2V5JWfD1w6lItEu2T37JB23PeH7yokftSw8vUnQy4jp9hxyVHAPCe6IET0jvdViUYha05EaMiMn+Jr2DBI59TKk9Ivv6SjwKhyRdFESwONUixAjXQxsCxA1OdljfIP2O6Bp8bDGFox4bopyiqoouCGRvIe6IYYtQBUp4om0JXYNrdtjdlra3oCMxllT1jqpqkWKEUamHE28JFkcyzSyj3RjTmuwd2DZR3V2XO8h8FYxbYOVgufCM5kuKesxYmRTBrSRSK3Q9pXKBiQsc9J7DnWPTR2ZHd7n7xtu8+c47vPPuW9y9d4eDgymj2lCWGlPIvEFhWG8pCsl4XHJ0PEPpyPxgxGp5h/XyDRbPHnH6+YzFs89plwtc01MUgqos0FkoKaSm71ssEecSnaJvLV1DCgSQt9tE+m1VjR7wl3/9f+J7P/whb7z1NpPZFB8C6/WGEAJdZ1lcLlgurlitGpqdRQhBVRVoY+g6h/XgRYUqRsyPSsrREd///o8Ylw4tLSF0bDdXPH36mLNnT1heXbBeXdE2K7xt+Cqp5WaV1OMD2mZBCN+sk0NkoBApfIDVcsWTx4+RWnN4fIcQJIurDR989CmXV+f8PrSjiyk/+Ol/4D/8D/+ev/jLnxCUY9VcYfsW4QMmCsYUNOdr/p//t/+Ff/jw5/iX1LkLMfKbjz7m7t/8LYdHJ7z13veQZZ141CJNtGLMgQkM6Z4vXn3foxEkeZ0isUwFBoujx+mkCxFCXC8KOcDIikiyd8vUhEjyYQ8BF6CxitNTx2/eX/GLTxwfhd/9SbuNEqKmKmYc1AXadrBrsUWKuh54FGkul7rjL2coNHT3Q+6YynTc8vAyNZIyhSKZHN+4f16EpYJqnHltJWyfgfuU503tBDAGpuk+OgtElIKiRNYjZodHnNy7z2w6JfqAiJGqMkzGNe+99QY//N7b1FXFbtfwxeNn/N0//IyPPvqIsqqZjCfYvmW7XrI4e8b24gtit8ggOY3hRBE5PBrxg++9wf033mJ+cEg9GqFkWqyjT5qmyWTEwXxGUd4+lfJl1h3gf64VP33zLm/cv8f8/c/YPL3kI77pNsGrrVcLjENP/+wRj8cj5vNDZuMJWkpwIXXa/oRZriAlzpE7oZG4p1L4kDjG3qXOb4qbTAhAa4E3edwd82OQuMghOzs43+N8j3U9PnhMSOKXQZz1Oz8qsaO7/A3d4tM/9Wj99oeOQ4cvEf5j3iB426cuoA/YCEIbyqpGG3NNsxA52clotEr+nckcPdILkSkWmiiTbZGP+XsxtVaDLPFaIKqAEgVCNCjVIfwG7ArCBmkaVNiCkbhW0bgRq52iixahVyhToquIUiW+XdDvruh3S9rdEmsDqlJ429I3W9rtBq1HlOV4CG373cf9Dz2GJCeGm1PjOOgmskPD0E0eTPFvpkcBbIGLDZSnLePpFk4i2lToskbI9FkJUWBdYOwiEwd9FMwO73B8/wH3H9zj+OSQ2WxCVZpsGh8Reyg+0IsCSkFdF8CUolBMxhXz+YjNaswo+3cLEVlJaNQlVVlS12OMKVLXQhm8MlgXaHYNzXpFd9Vjr9L1syxfPpVCyAn33vgRb7z1JvWooml3+ODZbDZUZcmorgg+0LY7jIHppGBUGSaTklGlMYWmqgxRKISqQBisHeHdjOlYcTQvmYwMVaWxtuXs7JSzs1Muzs85Pz/j9Nkzzk+fsVycs9suk9tHdJmnL0Ek7+5I8lH9pisiIcr9JthnH3YfIl3bsto0fPTxFzx5/Dm/e44iMKMpb//ZT/npf/wp99+5h54oXPTooBCyQASopGFeTqnNmOnxHeTHBh++aqFZqIKyKGm6BvcCXeXWWj7/7BEfvP8hd998yIO33+bg6AhTFPtGhyRN3JHDyf9i537bRLSISUcxZOCFRKnTaGRQpLhSef18CoRPCXhg0vY8BlQMKCHxPnK57Pj8fMF//9kz/svPev5ul0h2L1rDrO/r3rVAce/uT3jr5JADdc6xfspcBHSbGqcY9uTiwVTu5dSXetLep1GbytdloZL4bng3Udx4U0OzZuCTyyQSDsB6R7JwtVyD4ntQHlxzmIVIXw9mVHeOuHPvLm/ev8fhbJ5sKoVkNCqYjCse3jvh4YN71GVJby3TwyPKyYR3f/ADVBbwd23DZr3m/OKCJ0+fsLy8pNvtiN4zmc85PrnDG2+9xZtvv8vxnTtMJzOqukarHHIlBErCeFwznY0pzLerY3wAHFeKWampJZzUBe9KweMQX0sh78uqV/9X216y+fgD2oMT2sMjpuMJo6pOHUFtiN1+/vL7K1OVxA0yf+LfhqwODns6hLM68cKkyspilYQeIYVyKAVSDuA4OQw473DO4r3Fe0/QmiCHx/46jtKXy+bd5u3VQGca4iaTEM8TnMV2HV2zo+k6dn2PLkqYzxFlhc+ddblXB5eoAgopkQSCiEQHwYvBv2yfGui9SLzDGIlSE03qmFf1IWBRWITbQLMgsASxwfsVXmq6IKEfIbea0lqEvKIoImPvKIoa21zS7S5pt1e0uxU+CCpTg++w3ZZ2t6KuJ+BnN/haL77E74HxsKYP388TQZ8dwwbhSkE6WW7CpQBcBSgXkeNVAz5iTEVZ16jCIFRyPXE+4gLMpUHokun8gIPjE45Pjjk8mDMe1RSFTuEw0UOQ+41MDOl7UkJVFRgjqGtNPykZjwyj2iCjp++6RJFod7i+p6hr6smUsqwwpsILwc55mt0ucXQvV3SXEbuEUgKj1IB5WSVEyb03/px3vvcek8mI3nZsz3cIqdDKUBYFoyoZ9QcfGI8N82mF0YayLCirkqIsUCbdtC6RSiXfaO+ZTyqODsfMZ1MmszFCCVbLK64WC5aLSy4Xl5yfPuP09CnnZ2dcnl9ydbVku92xaxqsTRvgvm+w3fY1AcZ5MpS1PFJKtNE4H+i6ltPzMx59+jG2GSCYeO63AWShOX7zTd770Y947/vf5423HmIqzVVzRQgOl3eAwgtciPQmdVCtknzZM1wIyf17b/LvfvoT3nrjIR9/+Dm/+eAjnpw/+loA/fsqAOeXC95//0NGhzN2zvJ9o5nOZkSfpmBKSrTKNn63sCFumgFop1cQYw/BpyAcoZKd5IAmh6fTiihk5urK1AgNkoBHaHBO8OTZmn98/5z/9y97/ssOrl74laZXMSXh24avBoKMRyf8xU/+E9+7WzPd/Zxx84zKRnQOMR1wqC6+hmN8qzW0hvPNDzzdmGMG82uRQxee54GxEGkhti6BZmWgmsD2CMIOaPOd51AcwmiWXS1yh1pEiAZdzyinB5STA0bTOUZrSmMYjQvGk5piOsHKIvHGlWB6cMiP6gnvfu8HWO+xztO2Hbu2YbPd8v3Nht2uoe96iJGD+ZTjgznTyZhxPaKqKsrCYEyRE/EMRaEpC0NdG6q6QKmXe+RvsyQwAYyI+L6l2awxIvCgVBw3ji++6Rf4Cusb2M4E2K5x0tAozawsmVQHyKMDmu0D1qeRsF1C6PjdAFlSjQ6YTacoJZE38PSeVxx8Nsl3eG9xXoITKZAjx07ub0oglEhf9xqBoRN9w1szewWnLtM3J/SxvaXdrunbbfaltbS7DW2zxTpLIDAajZCjAqVBeIf32cECwa5dslsEjFLUxqB0gRMFURRZYBv2rWkxKG2J6cIhFcqkzruIHoFDyBpBQZAjhNph44SoFFYoRCzpwxx8gQweH9eEaNFKYpsruuYC69bE2GU7H4uQNjtT7HBug7UrtBiBMunvdwv13Pqc1+xh9PhlQfFv+yQ6SHZsgdx1KDBFhSoKUHkU7gISgSlrytGE6XzObD5lMh1RVQZtJEKE/FlNG6kkcHRp4pHHc0oKpFQIUlfLGUFpBGUpqeuSajSmHE1pRg26LFDVCFOPqcoRNkLX9oRdz27XslhYumUSVY88yG3iJb+sini6dsduu2G73SBk2phKpUGnkXknIIYUqpD8xRVlkS42RguMFhRV4lsXhcIYg8AgEIzrkunkBkdbQFmW1KOavqsZtTWj8YjxeETXTvDZY1RKQQyBvtvR7Vb03eYbp1AMFXxev4IjRI8qNNVkxGa7ZdNuWe9WON+k83A8Qc8PkSJkoaxFEji8c8T3f/wj3nznbQ6PZugxuNhhXRJFDUlz3kfW2zWfffaMZ5895Zfv/xL3JbBbjWf84C9+zE/+6qe8/dYbHD+8z8mDB/zTP/2STz57n65fc/NMUdpQlTV1VbFcXmG/JmQkApvNmidPHjM7PuDg5BAXbALGQlJoTVUYYt7chxdU+nddZhhkDJcS2Txa9ijRIYOGoCCI7L4VCEoTlEYJiYgCGcReNN66wGLb8Xe/vuD/8fcNf7u6HVAMcAi8RaLrbiN8QQLHwxEYjY+Yn5xweCg4KgpqIxFrkF1u1hdAHfMAAQAASURBVJI39gNd+6XV0F0QkN2f9lzjQD7IIb+Q7FIRM70CrrnIIf+uyDM6M4PuPqlr7EGMoJxCMUr39eHaEi0odo3jbLkjigVX2w4tk8NUVRdUo5JRWTIqC4rs+KOkSMmuQtH2lrbv6DpLZ3u63tL1Dusk0Zv0VlpJv7Kc79YosUkTBAFKSow2mPxZLUtNYRRFoVFS8D++/Z9e5sG/tZIkP/Sq0BijMVoyrgqOxxWH7YYv/g1xKV4BMB4k/5ljFEj/zur9yhhODg9AHVGPK85PjlicnbG9OMdtV0SXSZA3Hk+oktHsDg/fepe7d08ojNmrXm+GXaToW58pFQ7hkm+ukBopdF5hwvCoKJnGISqro5XKST1KpVFUHB4/ZrXtqwfGgxCl6xrOzk5ZLS4hpo5xs13TNRvKQjGdVJhKUtFRC4mLDhcS0LLOcrW44vLiHKU0R4dHTGaHFKM5qpwQpCWgky2RzOA3JMGJlAIpFEGksdFgoyZ0jZYQdQm6Q4YJ0WicMGipwYwRRgAdNrQ06wXe7hAhWd94tsgiJBP4IiCNRZoeVIMNK9peU0iHUROEvD0ENzhS3Gx4XB/odBtcSn9bDzHZnWmKskQXBiVl2k8FUqqWT2l6WimqskgxpnVBVag0DcTl7l2fPrs+XZCdSxs6nwMvpEgcHu8srm+x3Za+W+HsDkKPUgJdjjGjA6SRKda7qND1mBgFMnZ4sWHXWK6W0PlrQ0LpoHyZhMjoWJz9mg+JGKk5ODpiMp1RFiVFWWIKQ6E1Ssr9ZsA5j5Q+BX9IgQqS4CUxJK/owmgKU1AYg1YJaG+3Wza7Db3tWa1WLK+WXF1ecnl5zsV5olZcLa7YrNesr1YsFguWywu6bpm6h69Red/jnMtULoepCiaHc7a+Y3W1oKVFTDViajj53jvce+MhIjii7TASCiOZzaYc3jmhnmms2LHadZmXKZGqQKkCHyNdsDw5Peef/ubvOf3gA2yzfv7FCMH8/l2O3jhGTwtiBSdvnTA5nDO7c8iD9x/w2acfc3b2hBg89x68wdvvvMPJyTGT8Zh//sef8fOf/yNNdz2QlQhmsznaaJrdlqvLc86ePaZttwQESirqqmYyGhGJuOwV/iK1axIwHh5lOP21sEjdI7whekUMKSXPO4/XGi9MEm/71LkXWuCINN7y6bMt/+svt/znK7it0G0D/NjAe6Nk9bbsQLXwGFgBDo0pRwRhCcKjCsfIp9eXTYn2DZ6XjoufA8aRvQ/G4O8rfKJT6Jw4ctOpQWX6hSAB4sHfOEaoRqndnTd6aJ3VxCbdV5A5zRE8uPMVz9YN5+Y0USazqlpqjTQaJXVyoRIi/bqSSGOQQmOtxfc2ice921u+Dq5PQoiEB3T+fZEacNGl95RS/iRSJ/xgdJp0SCH4v/yfvz3AuJIwrkZMxhMm0yktgum2YbTYptCvb/pFvqJ6icBYIPSI8fE9xvODxBsj0Gw39G2HKSomkyl3Dg+4f/cO1aji+M4RV/fucvbsiPOnh5yfnrI6OyO0a8AiZEE9O+Ho5C7HJ2kUPZ9N05jIZ4rDEM34FRFeyN6tASE9SqZozxBSNw7JPnkrVVo6E57PHekcoRyvtdMv7/D99sOKEMlr9+rqimdPn9C3LV2zY7W8ZLdecjAb8eDeIbgOIyO+H6cAE++x1tK1HZ9/8jHvv/8+Shnefvc9HrzxNod3BBNtUuJfAOc8zluctfi+R0nJdDpnPJ4iTYHUKo17Y+quCy0SXzE6KGqi9gQdiVoiTI0sPUpovHW0bUvbLZFYNI4gI6pQKFNSjdKtqDWmECAszu1QvkDHwan3xStkEOoGbQXJiaLroGvzV5/GmF/ent34cyBF6hYrY1IEqkjuJ1Fl9YuICKmo64rJdMRkUjMelZTZBSWSEhljDwKPwOYo4Ow1jEcIj1ZATNMBbztcv8O1a3y/hWjRWlKPJ/iYALcxEl3VyKJKF0wV8SjaNrCx15rrYRBa3spR/V3Vc3X+Pr/6Rc/J3Td58PBNZgcHTKYTqlENOYHSOU8ISRjbW4fpFcYoyt5Q9yXelclvO1j6LJJq247trmG73bJrtqzXaxaLK66urlhcXrC6umS7WbHdbvAeyrLGW8dyeUXbLnj5Mqk/vkJ0eN/TdTuabksUAV0bZKkIKqBryfTeFFOXvPHuMfceHqUI866lNIpRVSYKSm0QqqdzO1znEcYgTYEuarSM+BhpfMfFesGzLz7BfRkUQ+oqG8fGrll1S2Z2xHxyyMHxAdW0YH5S88a7Rzx9+hRnHQ8ePuTunXuUVQ0Rju/fof5w8hwwNsZwfPeEg+MDJpMapSJtswGZkiKV0smuMXf/g48v3DFudtcXvYEmZQCtAkJZMC3BCYILuMJTGI9TBqd8cqqIqWkSZWDdtnx+fsXff3DB35z5WwPFAMfA2yM4nMHFLiUY577pvkJwdN0VtvfIsMVIm+Kgs0nCvl66qnaYuzmuuwzq+meRRBFzJBN479hn8In4vJ+oiLlzTLJyq+rMbbP5YXUCxUrl6WK+v/ewbmC1xceYmgk+34b7Iq8v2YoEypVJP7Mu3fwNm6IB7EuRMf9gxUa+CHvo+/T6hgvB8B4ytfPbVOmvJjDKUJoi6V4albRF/4a6xXDbwFhoqvE9hNYoo3n4zrv86Cc/5t7du9SjCu8sp2fPWFxeEkPAKM3Dhw959+03qMdjem9Zrdc8HY+ZFAU4S7Nc4pihlWJ+eMTDN97g+OQ48XvKikIrgu0TPzam4A7nfQ76SDtJEZPvZIzJ9xg8Pm+tvQ/Z3UEn4V6MOH99YXYu3U9IjVAqie8Gg8hvSKcpRYqrbtsdl5eXnJ2ecnb6jGePH7FanHJ4MOfddx7y5sMH3L9/j4ODOVonjrUPnqZp+fk//5q//dv/SlGUbDuL0CXleMZkfkCMAds7Li4XnD475XJxyfpqTVmU/OAHP+Sdd95jPJ1Ryix+iIKAwotkJWeFwWmJN2Oi2RELQTQVqoK6KBFeIYVFSU+wLdH3SAyKkrKeMDu4w2hyhCrGSDNCqiJxo+EGwfrFKgLWQ9ckoYqWIAK0O1itYNunC9GOFOyx4+v/2pIsWhvsiUTqRChjkGWNMgVJ7GmYzafMD+eMp1NGkxFFVWK0ghhwtqfvHd6JJP7zaSMnRESb1CQJHqRIHWPv+mRxt1vjuw0ydlSl5OD4gHp2mNfpmKccpCj0EPEu0HfJzmmowVb/JTIp9hVjx9Xlb1guPuGLz445OHrI3fsPODw5Zj6bU1V1diBIm4vBL1YSKQpNXVVMRjXT2YSiMFxdXHL27JTHTx7z+OkjFldnbDcrunZN13UE16WUv+f86DTz4/c4PDrJE4/Xsw8iZMCHnu1uxdXVBZvdit71KCOZzifoWjJ3I2QhqUfg/RJvO7xtkdFgZYkQJYGGKAW9TWsjSiFM6jgW5RjvBG1vcWyg/C1zkRA4//hT/iFu6N0KXUSEiphSoirPaC65I6eMDxXtrqXrOj754gOWyzVXlysef/6Yq9XiuYeczCbcf/sBb73zkHv3jpkcTKjHNVGRfLYBHx0u2EyLiy/cMW7W+aKXh5hSZAgnIShL1IKgA653FBkYF7qglx6jDFJKbO/Y7nZ88uiU//KLK/7zZeT0Fj9CR8A7CmQBpw18tICPQ/KUvX4ax2b1lGb5MbYUeHGOjzt0jAk78tUh2Mur4VUN18QsVZaD6I60ZkcS8Bza2YOa2udXOPhl7l/0AJIHQQ3gsg4jkqkaN+7zFW5c7iork/yVA9fXDi2uBZ0DGBakdXwQAUqZ/q3yh8VasPm9pkXp+r0REqf6uabZK/PAvJVK5xs4G4k+HeemabhYbbkIL6lbPLi/vGZ1q8D48Ogd/v1f/x8wlUJqwbvvvcePf/IT7t27x2Q8wtqex48/59npU7q2xfvA8fExD+/fp6prnPesN2t0CPiuZXlxweVkhEQync64c+cub7zxBvODeeYRxzSytv0+n865lFDn8hgsOJ/CPTK9wjtHiBGRP9UxAlKlx8txoYP62+VbCOnECHkjGLJA7xv50MeY/YpTst+uaXjy9Ckfvf8+y6cfEP2Cq7OKi/MnfP7oAXfv3efw6JBRVVOUJTEmRfs//PyXfPHoQ6TUlKMZ04MjJgdHjOczfBQ0Tc+Tx4/4zfsf8Pnnjzg7PWcynqJUyeHhHUxRUZQVYphYZUseT8TJdAu6SPGbWhBVgTKKqq7QaAgWESO222G7loBF4DHlnPH0DrP5McgSpCEKDaJAycFb9HbK+ZR8J7ZgI2Bh08ClSwl3LalTvOV66b9BZd//Wyj2Zu5CKaTWmLLCjMeYskJrSWE009mY2aymHpWUdeIWIyAGj/UO2wd2jadtHM46oneYQjKaFFSVJupkM2X7Ftc29Lst3WaF223AW0ojMfWEoEZpFOwCfdvR7xqs77A2eRe3bfha+7lXaeQeY0+7e8LT5pyrxSNmB3eYHx4zGc8QKtkiAtllxqdjoTSjqmY6GTOfz1BK8tnHn/Dxh+/z7OwT2n6ZLpq/txy23yGVSQ40r+3FKxCipeu27HYrur4l4qnqAl1NmYuaKD2ogFCRSEuIDfiWKDTBeYJ0yWxRpOTA3tu0se8Vzu3woSEGhbUOIRuKmcL9Flf/2HoWn5/x6M6IBw+PmU5rJtOKoCyqdkyMYXZ8yGa95eMPP+ezx5/x+WdPWDy5IvT575JPX11o7r/zgPvv3OfNd9/g4cO7KCOxwdH2PS5aonMYZ1KzI6Rc9RcV4LWbPL2/4dYwAGN0IKoOryzeeLyL+CLiZcCZSCc8QgqurlY8+uKKv/t4w/9yHnn2Qq/o+TLAAwlVAWc2AeMPQ6JPfLl8vyBuPoWJJKpzgmwStntF1otfrRurpFA5DlqnC8QAPuFGRxb2DhWDA4+ImSZx8x3cIIMM2NPHa4A8gF2Rn3e4UMvcudYmCSgiWaxHArzDS/Yh/VtmR5Ic0IGWuau8R8CpOzw8V7j5OiPXSXqea/Ld67np/rqKQBtg2zp2bU/dalbbhmfbnsXv/e0/oQpNcTjH7hriZvdaLcO3BoylKPjhe3/G//S/+x8pxgXCwNHRCffu3udgPqeuKyBQFYq7d45w1uFDoKpKRvUI7z2b7RZv0+ieGJiMx9y/dx8pFKN6xHQ6TWlQfgjviIisXo4in2MDP9E62q7Ddx26UJR1kYTGGfwGnyKhkRIlAyEUOSgkOVUkjqfH9hbvY9ahFWiTrMRwXx5svapKHrRKwHw+5+DggBAi2+Ul0S8ATwxblhcfsbz8nI9+U6PNCFVUSF0ihSQEz/bqC2Lo8cHy6eefMjo4wknDctegdIF1ng8++oRf/PJfePTZF+wWl4znx7z7vR/xw23LZO4Yx0AyfxuUHslnOODx0RGxCNnnhU4TSXM+ISXGHFJVCkmDIHkfW9chxBytDzHmCKkLhDRIZZCqIApDFLfrxhl99ituoXPpAnRB6qD2fPUv/OVz1wNBCIQuUEWFGdWUozHlaEw1HmOKAiU9WkaM7MFtiDYQ+0CILrl/RIntI03rubzYsLjY0nUd3vdMpxV37x2hZY30GVCuFmyWF6wvTlmfn9O2uyQANzVFIZBVgfWC3uYNomjoestms2F5tWTbvEa0gWhpt09pt884fawQIo1KhZCDbCCLXSNSSJQ0aF1QViOklKxXF/T9mj/2AuRtS7Pb4fotr9WKfKOct8To0UZQjwxTVxHEBB+rdL0vBGWlEDrSu5a+b5MNn1JoIagKgzYaoRWBiA6gQ8CFQAgWJSJSBogKEwLzUeCtdw84M4521eEanxNE2XdqqwPFZKwRocd2a5ptgQ+O1m4IwSOVpgsbdnbJzq7AWPRcUBRjxuMJhSmQAg4OD/jJj3/E/XfuMjkaI6sUQuOjp3ENy/USlwWoSiumbor6Ckfgj69dk/DO4JSgyMOewYzCRIRJYrwYNCIaYoj0IdBsVyyXG371myX/vyeBX/q0ab7NssC/BNANiOY6uPjLpYAHteWhfsJRkBRxg9B+T70daiA4CF4mb/JmbzqTU9QQC53b1yE/+6B21/k+N4FlEvKk70vFXgUdZZ7Qwr7VT75vzF+FSNeZAWSHmBLqYgStrr0oB8P2m1qS4WdptHbzBwlU2NzJtu5Gd9pn4d/QIBtuQ6d4OBYv2QPzd9Qsf31eEpvqyw0eSK/6gsjHqy2xlKyd5bPzDY9svPXPOQLKmWE20yw7R/+aLcG3dq7UZsQP3/0+//4v/x2jeY0oBFoXFKqkKDJfRUsm4xLinb3Lg3OJ97parfHO0TYNXdvgrGVc19y/dw8hJMYUFEUyy+77Pv12TBGeIiZCf5Rib9HW95Zm12LbFlNqKlejdO4Sh4CziW8spQKdFOAxJMJ9EBLvAt46bG+xfbov0RG9wnVNFgW++t1gjIG+bxHAfDbjYH6IEApvtzy/hEaIPd72eLu8oQp5nkMNke3yGf/y61/TWMf55RXVaISQivc//ISPf/Nr+sVjiGu2oeXJ2QXLTcNhZwkxZeohYlZ5pxUnRE+gB9EhRJdcJKKBWBBjWji1hliWEFsiDb3bEtkRxQwp52g9R+kCpQuKokSZAhegsz65Ad3GsSTT03poXVJ8X5GoE3+o+VQEUAJZlOhqRDGaUE7H1OMxVT2iMBJCg4w9+A7XSGxokdESTJX8hYPK0c2W82cXPHp0wW63xbue45MZZSmoikCUARkatoszlmdPWZ4/ZXV+inU9uigwY0EhA6aUSK+IItKoDheh7XtWyxVXl1e09jVbhYD0eXXEHGX8da9wEAr2PexekNDpXMt6eUkMr687p/eWGAOmUIzHFZ4RWgeQKQWwHhVMpjXaCJp2w263Zm00GymRMVBohdYKoRQuBko8DY7eJbqYDBblQAhFEFBMJJP37vDu/VlS53c9Tduz65I/ureOuqo4mY8xwuH7Lc1G4KJjZ3cEAlJpGrul9WscDdVMUUxnnByf8OD+Aw4PDxmPx8ymU44ODpnP5pRFgZN53Q6OXddwtV7QdaltXZUVve0ptX7hLUzbZlcKlZuamUoxGCEMkepaeoTsEEhaIraxPH224tNHW/7Lk8i/+JfHSh8+57+rTgr44WHkzXLNAVAMCPhL+c83lTB7l7pbr6ylQKebNNfAV6vccR14ZmJvB7rvJg8OTwO3ZfjdgSYxfH8ICBmAaRxoFqQ/oMzPM3SRh8ABJdPvp2SvfGDC9UG5eU0cwHXM7yuQ+M3eXQv9BnAcckd4nyZ7s+SNx331NQH+nNTc+RXPi0KnwANSKuNNwBuAM+A3bUt/FlnuOj5eNjzi9qeJZiY4PNBMqpDWpFt+/Bet2+sYA0YItJRorRE6GaJH7/G9pYsBb+Q+In2/s80gtes62qah3TV0bYdzDqU1o9E4p9UppEw84URvSB96GbmOelYSZy3drmW72bJaLXH9Dl1IRtMRBSmGWiCyoC7ZX8mshvXWI6Il5kCQwSM1WEvwHic8OIHdronu6/ZhL79idGxWSzarFV3b4X0gPid2+L2P8DXfatmcfsivNyseP37CaDqnqGqePT2lvzqDmHTQwW54/PSMTx4/YTIfMz8cUwtNxGVQnCgpRIuIFi0CpY6YHFoxJBTGbJQvRERphQ4Fymik1iBGuFDgvEYog0YjhEZJk7jj2VnjhY8j+aIRrqlmsO8T/MElgdHYUE/n1JMDyvGMoq7RpkCISLANfneJb5dE24L3FOWIcjRHFyOELrFRsdk5Lpcdn35+zmdfXLDd7nDecnI5I/iG7WLKSFm0b1hdPOHq7Amb5YLdegVCUIxG1M5DMSbqGi9KYkhODS5EmrZlubhicbG5tY3Ft7m827JZfsLrPOqMwSNEoDCSUV0QfIEMhhgDUgnG2nBQ1pSlodeGVheMXUrUFdEnYGxS8mEU0LkxvbNY57A+IJXKn9P0ifc+aTRstqtq+56mS7feeoKPGGWYTyZMK4OwLburji5Ydr4hKIEuDDY4qrHm5P6cGAVKGw7mc06OJ0wmJVWlqSqIqqdxGxwWFQzeB6y1bPsdu25L31mabkTbN1hvMerFXSnabZ6aZwpsKZM7WzQQBDQtbBuwl5Ft07FpO847uOjhaQsL/9v1Bq+qxgL+7AjensOBhprrVDslE312AMQyZpe08CWYdqtvYESiLpjUJR6+mkxh0JlrPLhR7E2WZVY/k/4gRiUu8Jc7xnL43QyKQ6YtxAGoDq9DXANXyB3rG5SIoU168z43+c6I5JyhVALgUuUuskw3LxN9YkDOIXeYvb/+6ocucn4938AnZYLgR0R+TJp8PoV9OEcJ/AD4CTAH/pnnQfMl8Evgqu243/Wcxsjvz9T840pImNaRsWox3qJewwvSrQHjEDzNZst6ucRJhywlSmgMSbAgdfIIljnBUZEsT3pr2XUd69WK9XrNZrOhaRpsn0bwRVleB3jkE8Xl0I7gAyrbrCmlkFHhraXd7TJwXAKW7a6i6w5QSqO0RAqFVoIoyfZsChEFwTms94kd5APBZbsZH4jOErDY3tNv1xC/mU5TDJ7V1YLF5QVt72h2u8S1li/6p+xxu8csPj9jocYIMyH2HYQEigEILefPnvL+Z59xfGfO/fuHSFkghUsb8aCS3MD3yGApRKDWInVlsqI3+IgXkRAFUSRBoy4KdC/RhQRV4ryitxGpIGhBDCILggfu9+1QWBzXmhBJurgUQEXqGP8hp6uRMJ1PmMyPGc2OqMZzTGWQKhJ9T9+uaS+e0S6e0G/X9G2DKcZU40N0NUGaij5ILtcdp4sdn3x+ySePFmx2HS54jk8ntNsFl8cjpqqljFvWF09YX5zStS3WJn/bejqls4GgR3hZQzHByzLRinwSXF4trthsXyMaxTder9+CfLNitHtgXNeaYDX0Cu+Sh2vhA3WUjFQBUuNkQd15xtYjCBRaYorkliK1JIhBXBzwPiKlSUEpUgABHx3WdvR9y65vabqO1lo67zM1U6W1EwE+0jQ7drstO9+xiz2xUBRVhVSK2eGI0aRCa4M2xd7fVcseFyy7tsH6Hbqv0abGmCrhDOfZ9jta1+K8o7cdne2SeDQG4gteoptNloZlDGZ1mvKHCE7A1sHVDh4v4bMGzkLqqn3zcS+pDhX8aJ6A8Z0ayggiU22FvKboDr7+3iUjiOEN7OeEt/nRL8YJeJoMiAdeY2GgqtKBHsDt0F2NZKVXFq2pbMVWFvl+ueM6UCRkFrMM3VqRob+N6TZoBdxAcSB1nssiAW6trt0r9r78XHeSh26yD9d85MHFwul08y7Zxg30i4E77TLNwg4AOZ2fe+D+CmtOxb+f3uVHYst77YJnfeBjEp1Ck7yxB2B8AJRIfgOcZfZ+INEJd8CzmNLu2lt+jVrC3EAZLNHZ5Hz7mtWtAePGbfmHn/8dxeGUYlaBAS1N6tLmbazUAmMURaGoTUGhNb21bNuWi4sLHj9+wuXFBU3T4EPg8PCYo6NjlEyewz4kQNw0DVeLK5qmYTJKY7m6qjACgvO4rsd11xxgYwomkymj8QilJAKxF7EmT0KZvGdlShzzzuUkS4M0AhMFwals8WZTfPU31DMIMbBYXHL67BlNZ3n69Blnz57iu9ugxyf6Ba4nuqvr7+2ro7v4nPd//RvuHE+5ezLhYFKg6FNHWEqiSI4CwQdc06BcxOh8MYqe6HuCyAEpMqKlRhuT7OFCTO4ZeXcvlUBriZSRGJN1WcihLbfwTvcaDh2vB4ElCRgPYaS/r8aF4OBozmgyRZkSoXQaTYfsGrFd0ayu2C0WbNdL2u0GoSqKaoM0I4Iu2fWRx+drHp9teHTW8PS8oWnTez0/Pefi9CknU8lJbZnrDt+ssVu7j7Q2Zc+46/ERZDmGYoysIBQR2/f01tK0Het1wy0027+rV1Ted4lOEdJGUwaLCn2yZHOeIAK9UWjv9kElJkRqpVEiJru+wV9VqXReqbTRDFEipUHKBIyFjEQczqUEwM52tLbDhoCLCW1JoRFR5jXWsloqViFQBEmFSWKaukIZk51FSBM5pXJjLqV0Op8mcX2MWB9QwaO8TSl/PtCHHllIjDIII4kiRTAPGpAXOqb2ejrvY6JPnQJWpnHxOqQ0y01I5/+LrvK33TNsA3y0grMdTBWMNNQSKgG1glkJkyJFvJsKRiWMCihzE3bod+5ucx0oqwQw98A4P4kx16BUZKVjDsVINNx8EdYxgecyA9kBDA8ivD0FYgDGGRQPADmHcEFMuwCZnaqLInes5fMAeLjvc4bPA80jUy8GgO38DRu3DKYj7MduQl2T1mUEnUG3D88LDl9BTRjz0+kb/NX3HvBu95Q7p2vUouftmMBtS7rGDWybEckBZcpXu8IOXo7gDjACigDtGrY72N428r6Fur2OMYH3v/gFj/6vXyBUWsykUAh0pjkIhBEUJkW8TkZjJvU4jc6aHZeXF6yvLnA25VmacsTb7/0Z0+kMrXRylPCJcrFYXPHJRx+xWl5wcucB77zzDkoptEq7wsGJAhRCag4Oj7l37yGjcX3DpxgyqWLPxZdErLP0XY/N1kbRe6gSj1lKh3MdVXUF629mTEKMXF5e8OjJYzbblo8+/pSzL34D4eq2n+hrvhfAPuLiwzG/Oppw57DmcKwRYQfRpiQzrairMYWpkxl+AFlLTAAdAzF0OVkspY4ZI1GqSN5EIiCFwhiB0lCWkqpOi1wIPTE6YvSEW4rrHRopA53CxGtgXJH4Wb/rLyyBw6nm8OiIsq6zN73HK8C1uN2abr1it92xazq2u5bttiXEHrFu8Rj6qLnY9Hz0xYIvTndsG3A2K6MJbLee7SV8ISPTAk4qmEioVHrx3kNRgfMO5AJdz1DVDOklwUaa1tFZS9t2tK+T6O67+r3V200Oc9niug2+W+PbDa7rU6BPr4j9jl2R/LOVFMntJaQIcZl3fj46ohOIQqEyD1SJ1CAggowy4wOJLhSFMVRFxSR6yI0DISUpcRHwAdtbNvWI9WRCFyy9CAijMGWJUArrk3jZhZDW0RgI0eNw9HkT7GMSAQYLfXBpwh0jQUaqyQgtFHVRoY3KzbvsOf9CBzUDQ1Jn7IoEGCy3Oz+QwMNSUWnFR9v+1h67idA4OL1xKt+Uvg0Tr1rAXMMbU3j7PhzNk9OFyaYNy9sExip3hJVO4+AB1A6it0jm+97gUQ5Cu8GVYk+9yFZvN8HxTW4wuTs80BZ0vBbfDZHTPgvi1I3vDQBV62uAfDMBJcQb3OUIIQsZ2i5Fm3p/bd0WMmAeNgMD9eMmozG8WmA8ZsJfzH7Ef3z3Ln/+xoS7FxumF+nYf49EpfiE9Jk/B57l/18TnnNdehVlgNjCYgVL+3rO7W5VqBrxbHcXf9idRaI0RFIX4csQpPOe7XaT7yqReYcXIzjnaJotfbtm18zpnU2xz84RrM/pNR6QCKEoypqyqimKihBudHsjCUBn1bsPHttZ2qal7xLPOfgU50uEGHt627Brvkkle2C1XnJ6ds5623J6eoprLl7h62mI2095/PEJH98/YDkz6NgSQ4cPFikldTmmNiOM0BhhmM0ncDjFWYPQASFjWje1oqomlOU0O3/0RGRa85wkREOICuccXWdpe0/bB9wtnUku33xeQ1Xeydak3XQDv1MUUAs4PCiZzCZorRI3s9MIJNHukp3abkOza9jtOjabhtVyjXUpCa9zgq0VPF1YPnzU0/62J8tr/VWT7OSOBJzUqUHhHRSWdC0xPWZyhayvkEESysi29SkNbrOh778Dxt+mst0qUWH6hmBbCD0KT8Tjosdby851NGKgk0mMkhRKEJUgeJF47jEkPr+RKKOQMrm9IAoiBVKpRDFTYo9BpJRopdFKobVJVDWuwbR3nrEpmNY1Nnq8iAilUCa5inTO0ztHby29zambwSVbQmXpQ8AhsELghcTLdB4GQGiNVppClVSqpixKtL4RGPEClch1CQw7Xo5FoSCNrN8uBbtXoL+6Ob8cwogWEZ5Y+HQBH7RwZwrzCYzrhEkf36rNwAAqIXVh5T5dcf/zQOruDt73e5FRPkADiN7TKAZwLK5BL6T/D5lLPISDDMJvlQE3RX7eG/QKH268LnUN4PdCutzlJSYADwkAW5t+P4TUHZZcc4tl5iHLyLV4Kr/eEEgdkpd/XRbAQ33Cj998h+89mPPmXDK9lMgmMAJOSJ3hD0ld4Cf531sSp3jz0l/h89U6eLaEtXs9QTG8kkjo31IxZA/Rry8pNaN6RFkWFIUh+phPoch0OuXo5A5IyfzwkKIoiDGmVLeuw/Y9PlrAESPsmoblckXTNNkmLok4gk+0iSSyS4libdPQ7HbpcQYBXiCP8Htc6OndbQzZ/vTaNg1Xm9SFbPueV//x2tCcP+Xzzx/TH48ZlRERO/p+h7MdwSbelfQCLRR37p7w4M371OMqdX1EQCiBMZrxeMZkNCMEibMerTT1uMb7CUI5rB9nUNlhvcBFRbwlbXWfb3rg5RnQDiqXVL0N6cL5dX9pDRyWMJvXGJO8VrebVbLAswrhGuyuoW17ms6ya1tW6xVXiyaFJUVoLKwaeHLFbwfFXyoHbCPM83VgoLKJBliDXm+IoytkEITSs95aFosNi8sF9rV0o/iuflu1zTo55TiLxFMajRmNcEbjeptDYXpsb2lD6k4ZJSm0ShhBpnXLe0uIjpgV9FKZ9GGXBVFmYKwU2kiMSdSmojCUpqQqCqo8kRYqUSOElEgjYTTCGEMkc3+lSDQiwPqQaBg+4kJIIuGQ+O7OO1zw9ERsku5iBbiYushRpoaGkgWlrhkXY+qyRIb03C9SZ/zhK3fBXlaG5+u9hG+Wyr8zJ02e3l85VnxzF/8I7CJ8soNPd6BOoVCpydncpg1A12ZQGLOmbnCPyC9i71UsszAtdyGUulYE7jm7YRAhZXCZQefgfDGI71y4pjOQO8YxpPsNorsBsMsMjAfQLmX6MA9t9pjpGCpeCwONTnyUQOqWZOZF4uBkYB2H95rBuRpoGdzoYL/8nVGJ4k415fBgzKwoKK3Dnzdst5Yd129z8OZ/xPX17Sm3F2P+h1YXkzXq61zfHDD+2hrGdopqNGU8HqFVyiYPQxdDa+pRzcnJMaY0TCcTjFE5ra2j7zt6l5bc5OkbWS4uefzoMUpK+j51gn3weO9wtsdZS/SW4JMi27nhQvLNcYl/V1mfooodIJQGWYN/lV1sR9g94fHHB3TbAw7mJVpa+m5Du1uzW61pN1uiCygUb7z5kN63jKYjet/jY0BIMIVhOpkxm8xRQhEDFGXJuBnR9FNaZymrMatVy2rVEqVBmVF6z7dQlsQj1KQ1TBWgC6i79MMupGN807F6GFceKrh7LJnMJiitcLZnt14RfUuwGhE6bLOjazp2u4b1dsdq1XJ1me0wgV2XhD7rP9QbLtcA6E2+PtgAtgGnQK06QrVAOkEsHVfrjvOzFcvF5nX8KH9Xv6Nc3+y7WUJECq2RUuGNxhlL1yp8CNg8IQs+IGLyFXf5gu99n9e0dPPBMdhlRakJ0iBk4iFrozFFQVmW1FXFqBoxqipc6SiLksIYtNape5w5y4Us91hIDPoCBqoEiTccZZrMDXaYweODp4+BnoAlYEXExpDifAFP4kCXuqYuJtRlhe+TJuRF6g85BSTwEPgzCW9Okijv0sJ/3n19d02QOnB/lrVlG1JX7vmkum+2IuAiuGFMdpvle0BD1Oz9MMQNlyRx46sgA+gMjpHXoHMAxjIkQDl0cfeWbLlTnAXx+y708LsR9h58WrMHxsgs1rsBXBHXIrx9yE9+rMEyrizz64vXgDyELOTLUaRxANzi+r0JrkV3LxkXKwR35Yx7R0ccHkwYjQpM7LFtYJu5xZJ0jRs+ixsSlaIhdYxfl8/o61SvCTAWVOMpJyf3mExnyW1CaWaTCX3XpghcH/ZuFNa2aCOZjCpKo1DRE33AuYB1HTYMOV4R8GzXT/jw/fP8eR1OtJu3b1NJZod3uffGW1hrMdUEF2D16DfE9gtenrvmlyou2D75BbvLOaezOaaSEC3etnTbLb5pIYgUzrHqKJctZQ9N19DbxOgTQjIebRlXS7TSKCEo60RNqMdjTHEJskw7352nGk2Zzw8pivJW3sIAjCVpLdYqCVbKKZQ9yDWMeuhjuq+WqXNWVzCeK2Z3pozGE4gR13d4BDH2ScEcLbZtabY7lqsrri4XrBae7SpNAkWRnH+EBvH7yMxfKg80XRLi9LmZIS20EcQ4EKo1ygli0XN+sebxoxW79avMtfuubqPKIvm8BueJLqSOrRAooTDKoFXiFtfVODezkl2mkql75kMW0/UdvU0BINZ2ifcbIy6KfZiRd5G+F8imYacU2+wbXxUVVVlSmAJjDEan0JAEkCVKKoxWlFpjtEZpA1Im1xkSLyNZY5KpGBGRNSOWgCPiiFgRMyiOhCAICIQ0aFNTlGNqU9I4/yWNyO3VIL6tSaPnN/PXMuOl0iUV/832g8j3v0cKU/ARfkOia3zbriovVGWdOrRF5ghrnTqu4iY94ka3VqlrkBvjtSJwAMbYzM/Nj7/vOt/gC9+kUexBqby+wbU7BFxzigeag83PEbLSJITczc7d5CBTlHQhIbp0Px9SfHXMqX7EDLgje8/krzhSvLxPQoXh+/VDfvzwe7z37jsc3JlTTGuiCPj5PcIXHyDoqUgbtzdJn9+KdLgGOtF39dV6LYDxaDrnz370Y/78z/+co4MjLs4vuVosk/dms0uJSCGlIHmXRnFSQV0XGCEQ3iZ/Yx+wvk/xoc99ICMx9P9KFivJ7Pg+9998G2cd04MjRtM5X5zc44tf/j1h/QF/eDzFi1SEuCS2K9r2nFZmmx0CuJ50yiWO8GLrKZYW08Bmu6Nt2+RFHCJlsaE0BYWWaCUZjWum8wlFVeOCpLeStgfnFMdHJzy4rxiN6lt5BwMwFiRBQCVBjqAeJzV/tXMctJEY0oJajQvq2QhTV0hToKoRZjwm+hS6Ep0jegPOQHT0XcNus2F5tWCxaFkvYbcF8nUjZvqGkPxRflCBZCsVuBYICqDrQawgVB3GLcH0nJ9tWF3479oC38KqqzK5u2DxwSGCQMp0E0Kk0COTNommKCi0Sd7skPy+XeoW930CxW3X0HUNre3prKV3HuESrcF7TwiB3ieamZASlR1jjCkw2qC0xmiDMRqTu8fGGOqiYFSV1GVFWWZXCqEIIvEvhQAVU0amEmLf9dUi4YxAtoiNaUYXAoQsnJZFjS5GFLrEyi55zt9iGRKwvQMcA2OZJkIqv76uzZMjn+4zJNFVJK7/u2M4GcPjS3jf/hsExZCAsRSZp5F98PIGaS+eUzdAMiIBzTD4zA0iuKETHEnKbXnNU74ZJ00GxdwAxtygLUSuecDO51jn3EkeurnOQm/ZO1wMIFkMNAwBUicLhZu+xwMlZOhMy7xzsn16POeuOc2DkO8llELy7vhN/tNf/jU/+sH3mI5rJuOaYjYGpQknDwlqivQXGNKm7vv5d3vS5/i7Vslvr9cCGCc3A01RVpRliRKS4D0+pll2ECloOC2cASFAa5kmHyHgvUsUDCWRhUKZPzTs4ltYAlRZYaoJQllqFMdRgSqwQXD+0Qx7+WuIC17NEh2BHYTd14CvtMO3K8vT0y1CaVyzS/7IPu38d9oidYfSEmUE1aZjtO2RuqDtA70VCFmjzQRTeo6soHC38/cdaBIAVcwd3AJkXWGqEWYSIQcUaF1S1iPK8SRf+CVBCFyUe96kj4LoDPiSGHra3YrtasF6vWWzhs0u2yQFkJv0fP0wyfsj6+u0M7sIegdqC0XsiTqyWXrid22Bb2UpkUGwkkgtiS6m8wYAgZSSoigSpaEo9nHLImZNhC/w3uJchfMW5zqs6+mdpbc2CeGyd3vqLjucTT7hIcR0Tc+dPh8Dtu9pu7QVE4LETZaSUmtqY6iqiqqqk5exLtPmUZt0/qDQSBRyb5EZJcniUQqIEpFBc3LM0AhpEKJACoMStxOva/LXYbx8REoBm5MSwXQ+F/NpCv7asWIC3M+/OwXmJRwdgijhbAGP+TcIiiEpgJFpFDYk2cXMDw4uCdRMTOA0DF3bTEMQWcC2T4oT14Ba3RC0ceOxB07xMDyImSNiM5g2Nn0dRHM6A/aYBYExsLd5C+EGJQIgpNc8GO0N7yXm51Qqd4X79F5MFtgNbAzyr+5zx19OGQyH02Nmx8eMDw8oC0NRFVSjGXVR404eEuqHdJsrdnh60uf3HtduLN/Vb6/XAhi73tI0PbtdS1U0bDZbtpsNQgqkUQhFWjwBocT+sz04VERrU0hEpdGFRhU5g/0Vegi+uhIgNFHo5H5jYDSVyKKkHE25uP8WH/7skO3nfwvh7Bt+rbkN2vW4Z+u02Lk+j68gvRdPkIKgBdYI2p1ltU4LW2hT0iDjI6r5iM4pAgXI6lZenSONkwJJZJMOrUaYEaqeUZR1cjQpa6qqRhdVSgqTkkASe253O7qmwVqH84HoCggR7xqazYL11QXrVc9mkzjFOzK26a7X0ttyTorAzkKxy+ElytK/hh6R39UfViLvNJUSyKhSZ9f5dI2WMYcTyZQeaRRSZ9+IGLNnsSbGlJQX8Xns6/Eh4KPfd4qdT2mS1jlsFvNZ6+idx3qP9YGutzRdR9/3dH3WZeQOrwG0EJRFQT0aUdU1RT2iqEaYsko3ZdBCZwtPmUJFdOIpS62RWmGkRkuFERIlFEpoBPn+USKiTLvJF6j/wA2qK6kzLEnnyzr//5dvjqzBIrlNKKAuoBxDWyQx7Gf+Ol3s31zZHkIWzO1jbd1111ZI8EXqIg9WbQMNYaBVDGBYqNSplUNet7imWQwd5ptCuMFRou9h16SfmSKL4/KdjSKluORudqYdowY6h7+mWRAh2hs0jgyUQ6ZZSJHe07CwhjJbuA0cZ8Fe+arVDU7z7VYpKsp6jDAFTiqi0ghVUBRj6npGe3Qff/wO280TFpyzJv15ZqTP86t2ovi21TcPjIXk+N6bvPn2u9y5e49JVbOZbmg2W6KICC2TKb28npS0XcfV1RVXl5dsl1f0XUtRVkwPD+l6x26z/lcKilPFKAjIxOGTCm1KRspQFDWTyYymafhos8RfDDrUb6o8+95mb0mn5k0ZW/6jBhIA7gQ0iqBNWhi9AFkhp4pxPaauapQ0iFv62EaumegOQIPQBpRB6BJdjSlGE+p6TFXVKG2QUuVJXdjrQoJzeNvjXPZZFWD7DbvNFbv1ht06WWJuQgLGXyX63F7ZkDrTyqWGhv8D52VCgyx+//2+q1dXtmvTMhYC0YXUzbVJ1S+EQClFVAqEx9pACC75su/lEzf0FCIiVcyTbYFAoZQgaEUIScQXvMcVBd45rPPp5gN9BsZV29F2HV3X0dse7zxh8HoPHusdtC19COjeoZoOU5TookTrAq0KpMjuAlJmUCwxRUlRFlSmpDbJB1kpkTjJA1jKa8WLniv382c82oRlhrFyZrY+J1Qa5lKDmegAySNZuNTARQMXHi7/LQfnBJe7rzJTJGTixgy+wJA5ujd+Zy++g313VpksXsv9+jDwirnmFg/8YrgBZnNnWatrXu/NMVwIWXWYfx+RKRs3x3U3qBrDeTOcQ0Na3iDgCy7TJMid4yFVb2h15Mdw8aUB411s+OzsMZNPDnAxMh/VHE2mqFCg54Yz5/hMSz4TkscxdYmH69wlycv4u57Jb69bBcZCGHQ5xdtdCnHYg9PfvpzNDx/yv/+f/o/85V/+Bffv3UHGmJxThMDaHk9IOteBgxYjbdPz5NETLp5+QQx5Trxesri8AAQx/OtmzwQfCT7mqVLqfUghKUyJEpoH9+6ye/f7PNl8Rug+/4ZfbUe6/Az9mKFXc7MnM4zKZM6l14nToMeI0YSD+YQ37h5zPJ9RGsMLOjZ9pTzJLgotQRdJrZ95ZELoRNMREAn4EAkhJjFo3ye7P58cTrx3KdWvC9h2Q7db0W4d7TZ1cje8GrFD03LNEfkDkIQwML5bUIy/Q8avU+02G4gxJb45j3fJ9gwxcI1l6hrndDk1xOhmjmXwKWEyRg8iJrCprjnKQoivuElJASr7CJcFhCjwCHyIKbDD+0S5cA5nLc7Z5ATUtZlSFLAh0nQdoe1AaoTSKF2gTQnyht2iAKUU9ahmNB4zrUaEOiJKhZYFWlyL90AyGHa+SPWGpLXimi4xnCIDKB6+wnWDEq59zxtg3cOz/vXlFIuUCk+/hHib1mxfW0PetE+3kGkVQqY1VcjExY05KCPkLq7Q16BV60TcJtwArQM4zdcJkW3SJOx3f0NnVuXo6YGjPNRAWt93MQauzI3/F1kbs99IDveN178bbvw8hGvBoHe5o8z175K72869tAadpeeTzcdsftnw+Okpx7MZ94+OaFY97X3L5xdn/GOz4NdxxyNSe2o4e3pub0r5r7VuFRgXZs79t77PervganFO6Pv8gXJc77vTLkqrgno048//4t/z7/78p7z39juMRzV92zCZTJhMJnR9R+8cQUSiFDjn6fqOzXrL1cXpNSjO9eV//2utxMMTOY0qorIhuVIKYSJ3jg4Jbz9g8/g+yyeP+OaVV0Nv9st1s2PwJcAcK4gKbSIHI83JtGRapajbeMu5xpG8WGS+WxyiSIeOFblTFSKRwf86pJQ77xNwCTFFVbs0tu77HbbrsG2aur1yBfAfuB7rAurjgvGdO+h69HJf03f1R9VumyZfwfl08wGfL+ZCiT1XVwqZY50lIqv3gw9En7rIKSkyJFCs5D4MRA6JdkIm2pqQGWyLPSAVQqOFRClNoUzCBaR45gEcd31L25d0vaW1FmEdvfUE6wkxEKzFI/EZIEUEMYq0dkmfRH3G0iuH1R6rAz6m8zGJCdPakGKmXwwchwxorLzB6OJ65fnK/blevbak7f2CxNH8Zt3sf3eJkMSDVryC1xgy2cSJ5w/iILgjc3hDTJQH55JAuSiuu8E69+mNzvcN19PESALaKqfj6bwBHIJC9m4XMvsbZ/A97Pq8z0B9eGEDgS13j3PD+tqtav/GbnCMh8fN/OO8+bwG3cPDxfR4ziWKiX95196A47R7zPLJhvnpjMfTI67WLU8uFnzx9At+fvWIRzS/M6Tqu/r6ul0qhYgUZYHYCaLtwLekT4vCmDFVVVFWJZPxiHt37/HwwUPeffdd7k1HiG7Htt2y3W1pNhucTeM85wNCK6RQWNdzdbXiyaPHuO7fLKOL0hhGRZG8S0Ma+IncfVEC5HSEOp5x9eCE3VmJda/rsfpdEwUHsaQSHSPRUdGiQ0uwBvsSNkBdhN6Dz7zJ6y62SDG1PmTAkL6XgDBfuViHEBK1wtkMTnJ66a2/4hcvU8LBnQoznyPKMVF91zF+nWq32RJDDiGyNgVjeEfMF/CECzKYFTeBsST6mNcHTwgui5Zj1jQlYCxkWlelzFxfIbMdWupGJ8BskLK4DvbIfNAEa9OGcC8C1AYTA3WIeB+TRimmW3ocgxBp4ymyCE8KSVkm7+TSJOF1jGBjRMaQaKviukH3wiivSH2amNHu4Kx180I40CqGp+tJG9tn+fZtGEGHDranr+jJ+i4BU+/StK8MiZAt3TXdQWb6QtOmiGVtwPTs0apSyVanMLkDrFKHWMi0gLoebAc2W/oU+pqzLG50afeOFUOJa4eMTCm6pnsMFmy5M4287s/gr2kZ19/MYD1/qAdgfJOOcZO8HvLrf6kV6Lji1K+5uDrj880zxp9OWbdLtvYyaQu+qz+6bplKAdoUSClhMPomIpBUheFgPuPo6JC7d0743rvv8u7bbzGpaoxrac53NNaxbVq2qxVd19FmSyGkQCjF1dWKx4+ecHn+jNd3r/6SS0CpFSOjcVIQvM9rT7I7UyIiCwmV5MFBzfmo4unqdQXGv6scIu7QoUG6LaFf47qSICRC3z6AsxF6mzpySayUaDshxn0DI42gZYq0zgu+GEa9mUsWYwox8Nn2KuRGyeu2PAkF0yPFwZ07hGpCKw3+Jaqov6s/vtqmIwSPsx3O9liXqAshpNRQAUgl9+BYiMTL3W/ccphG8Ndd44RrRQLGAyiWCq00Qso8+Y2pEy0VUhq0KlFK55tC6pSUJ/LnH5noF1IItBBp4pKpD0MeA0IjhCZFEqTfHWggWqp9cIhSSTgdRJ6wiBuw5DaWfEHKoYhpQu8zp8I7UOF5HjFkz3ASZeKMbwcofuUVsqAh3IhOFoPwLIPFIaaz66Gz6WC7yD4aWuUOgvVQVVCqBKalSI9p+wRApUpJRr6A4ibvGJ4jxojc1RVJ5Elh8s9CBsX5A7b3Tobn290DLULcsIMbRHbhBjjOwFhw7au8H36K/XXh5ZfH07ByDav1U/7N4qNbqlsFxkobRnXNbDan3R3RrJe4viHi2e5WONfT7rZ0zZaRlkwUXHlHt1oSEah6jFcFm96x7SyrnBjWtA27puH87JSr8yc4e6tB79+6UiJiREAQEqcVkDGgo0QGj91csnzyCZtnnxDbzbV/+jf8uv/YitHSNCsuLk9BFUyPHaND0NXtj/wD0PYR29s9PcLnKNsQQSiNzF6uMUQiEh0iSvdIlczsYz7SUgik1KmTp/x+8vY6lSmgrAyqqAhS433Evnbw/d92WS+JIWA9OVoZfFRJfJvtzRLIVPvO3ECvjCLsU3STd2tuVggQg68rKUgDofC5ixtiIISIECGbBki8dKgo0BFUjP9/9v470LZlLeuEf29VjTFmXGmnE+4JNxMFCX4iNtB+2tDXT0zYoiJc22ybaRWk1YuA2tqCAbRFJBta7Ia2PwRp0EuDiIKkCzdx0zk7h5VnGKFC/1E15pxr7bXzWnuvfc589hlnrjnmiDVqVL311vM+LzoZuyKRyjWbbZ5ZsQrVGtKqpWtkiIqGcfDRoI6ea41JBrHWJnKlTYboaBgt+uzaPA6Pgjo1hsGkyXQ1M39mMVatYphW0XbaqeEqiskTp6SdVrQFtuDF9Qte1BknmMQtVsljXCTvsJon4NApZ7XoWKmaNGppj9Um0KgaUFMocugU0fBtZVrno7FEtUhG7SwddFj4vjAdIa2hDTNJNwWgOUDXMC4a9U0dU9CGpLzRytD5RGIPx9nwL5J9FpnxR+G09TZPH47XY6xiStHhcIi35xhnhvHeHnU9wXtH3UzY3q2o6ymrecZQBfxoj52rV9A6Y3jhGcxwjQmafevZ3Nljc2uHra1Nxvvb2HrEk+fLPlnEgalDeYcKluAd0nKjghBczXT7JpsXP8TO5Y/i6oYOMUOT8JSlgAyOqp6wtX2LygrrwXCm6HM8Ym2HTgWUNdR1g7Mxxa6zFutcNB6URpmcLM8itQKFcQFtKpQy0TgJyVBRgtY5ymgkGcanCSJQdMDkOVpn1CHy9xv/1NSM1wdUTkARxBHEJSpDmDuoVPTcRs9uS4Nop3ZDCryL3uXg5/SLlmccub5z7/DMOnRxsB0EREcKhSjBKiEkCS3RajZrQiDpHkftY6VCDA5EFgzeDFEZoKLNFKKqhjbRMDbaoNLgU3SOMmaub9sGGCc1jkdBSMF3NoDPYp4Hr6NNZCBJjoHLIBRQB832ds6+y5hryixxAFolreJsTlvwCwoN7YBJqZQARMeRebZg0KqkYpGUlhCJfOS6Jg7o0nGcT3JpdfzsFNDrzpdWTu3ACEci4bqtTy03ecZTZk7HOKx+oUheX81ciUKIaatV5N8FS6w9Eo1416Q62waWPxqEnKFZJ2Q5zmimviI0ZVLEaFPOJGNYSXwebTmbNBBolTLaoEafONM2xCq9cIgljtkwLqf7bG1tUeSaleEqZ1bXyLSOgWLEBjnLNb1Oh2c21ji/OsTu7bLWG2KdRbo9xg42tza5srXNrc0t9vZ3aeoJp28y+skghEBVlkwmExpbY62dBQeIb/D1hK3rr7Jz7RUme7sp/1wU9+4QA0b2n+gd3Ad0hiq66KyHzvuYvE/IM7wSbCsLdQKoLdSVpanLlHa8wLkm8jOVjkaCNqgAWge0MbOO3+iUDpeYNSyYIuoeFzXGRLrdaYHWrdRnasx9iNPtJxgossSDY3XjWXxweFvhXU3wLZ89UhXmwXJR+izSelpjJBnHiRJEUvQJbecvJK4yM1oDkKgXftZHilKp3mu0SoZsUq1op4klCBJkZhhHKTkT6RVpP1HRU+iZ981KCVqpmYEf3zGD6JbXnM24zTLjeB5D7528xAemvRfQ+Jh4p7awaw2briZQ8RS5FB4vDlNxZwkxSDSE5MENbQZOlzy1KhpmtJ5c5nrGBGgmUedSJSpEkDndorGRylBbUNXcm6v1vIK1x7EN1CFKyCk1D5yz6Th1Ez9tw9xYljTjInPVCZdUNayPS1VFz3VTxwZeSTSKXZsg5FEN42iQq2IVWV3H5xprhEA3GcWtvrKbCzwZBVlS6jAqaQ4mo9jaxANvyzANPBoby6fVLbyXQ/p1gGM1jG0z5tq165w/d4ZzZ1Z49plneP6Z5zh75gyrq0N6/S5ZrsmMwniPcg3N3ohyZ5e9/X229ke8cu0qm1tbvPKxD1PWU173T+gwAkwmE3b2d6mbVmg/Rsv6aowd7XDr6ivsbV5j6j2O+G5kxCQWG8QhxuRJ3sMdIZD1yFfX6A2HmKyHznoo1UWZHrrTwUvMwnUSaByUZaAupzSdApMV5K7GhxB57kqD0tGR0E77tovRZC6L/YNShNDB5T1MMcZkDn1KyIlCDAbPssjvhMSL9jaqaixxanD2wkvRkE1eIQkGiLMTcVGJAikz47idkZ6RcpN9EtWnPN57rG1wLip4R+fZ3BMbQkhUoTBP0SzJeE51PTNZGlRFKFEYMcnh1hrHzDjIIjLzQLvkwfYhzO3SNtCvlWfTJhrGOouUpDYo8JikrwLpxIuUVJL2d4DaRDm2bQs7NuCetFNmRod5spdxR4TknV3MEud9Sq2cMsO5RY8l87+b5NHVKnp1W++x8lDvwXg/epZ1P3poGxuN0tYj60LiLCcjMEsGtMjc22ubaETOdI/T4tzcK11XcYlpdWOAn068ZGehaaIRXjXJMA7RoK7L9H6SqBitZTkfwD40+mcgywkrK9TDPrUCJx6UiXJCWhYM4NYr3hrDaUASPLMI8KaO91E1UNlk2Ot4HyZlDnQh3tsp6a+eFI45wYenmpbs7u7iXY13AVd7JpOSs9MNVlaHdHs5Ra7BWUIarQWERhms0nhRcaraL337R8Ozu7/LtVubVHVF01R41+Cbknq0TbVzg91XP8zOZMyUOEuiiBJDJVFy6FSqPIsG00cNV+msrtEZrJDlPXTWBSkQVaDyAh8CjT2Zjqp1ApRVQ7cuyV1DCD7S1FLQkdIaWo9xMo5jBjKDMT6GmiqD4Al5n7zYI+9MKEZQhidfo9sMrKLa6cQ2PkUR5Elf3RKLGK4/Q2TueqKv2CBkcYDGfFAjLBrGEp9t/DGlWY7Puc1yV5ZTqqoE3ELCj5YWEetAazxaHzPkwdwwVkajVOw6RIRcZxRZgRIVAwMTbaN1GooInoDHo4KLXvBI5IjHbVNEk04qCiUGUYYgmiCSHFnJ4/2oaGfGAwQN2PkMdGNht4FrRN3xcBrErk57gEioYkBcG8gWiH9rlXJst+s9syx3LV3B++SN9QsBfERqQ1VFQ86nkRbJw+wFMEmuJNVb5+f0CmCWgU635/LzY7QSb8HPDeOqip9aQ5aM9pYOUlUxaNC23mIXpxVsA6FhxkWYGcWt+1wfLqkHw3PnoCjorq3SHw5xWmhUwBpwBkKmCJkhGIVXcdwQgk8ayklHuWnidTbN3EZ3xHJMjh7EtiPnZBg/2mW/FnDMhnH0Qu1sbrF5bY+LH5UkFN+ll7SJhys9hsMe3U5BJ8/pmJxcZxgVuZlNCPQHA1aHq9zavnk8DeFrCoGt3S1evX49ZqCqS2w9oZnuM92+SrV1hWZ0dUF2LCKmPjmF7auY6BEoekinT94fkvVWMJ0Buuhjsi6iCkTliMqxHvwJGcbex/ajrKCqarquIYhPmUrjlLLSOtI4tY/TwK23TmmUjslAtM4QCXjbpyh6dLoTehqmdp5340lhPtEuKcA6RMmszKDDIzbkSxwruoMNgGQUC6IMkriMIQjeR0m2QEh1ca5LDERvsZJIZQBwDl/XeBS19YRgmcfttcF8MuMhi1L40NC4Bi8hpmlGI6FVwYhcZWN6qLyPUTopYcQltNeN0KajDiFO/UZjP51TqRgfNfM4S7rraMQ4Ypprl+ggj4TEMSYZxcEnkp6ODrfawVZojeIl7g8jIjG7iEvrLbVppDHLgCeRw2VMDNSbUblIrvoQvZZV8nJWQDCJ1zJKBq0BlSTdjERvqUok8cZCqKN3lDAP7HOJTztLEqLmKaPr5EWtm2hMmmR4uxCNYWdhOo3e1taoty4ZxYnGEC1N5mRdH881i359SJxbJev1WD9/gXNnzqI7OVJorIZGe6wCpwUrgSo4Km+p6pq6qrDTEj9NHvCQOrZ24OIWDPzWC17ZJyC2f3pxzHJtmk7RoalGeDvBW0cDTCewswMkb1pvuMLKyoDhcEC/26NbdBn0+vT7A0IIZJ2ClZVVtve2sbOctqfSrHsCCNR1zd6kpKorqioaxeV4i/rmdZhc4061+3SVngHTgU4Xul1M0SEruuRFD5N3EdNBTBEXnaMkB3Schj2hl7cBbo5gt4Kr2xX9azfpD8f0h7cYDC+SFznGZJEyZy1NVTOeTKnKKY1roua2KJQyBGdpminlZMJoBKU7HW1OCDAZQ91Y9re30dmYIAq3HIKeOuhOP5kYrcEaKRStc0e8R5Jh3GoCt5SEkCLioyc5GsZaORwKVTeIqfFex6C+BcNYLSRNEJ2k3zSRo0yc0RNRBFEYpUEbVFags04cNNqo6CJJxioa2yoZwg6VMvFFCkVLtUi0ixCzSkqSOQRoCRetL+6R2QQLM+ltHF/r4AzENmDCaWsrTztuEg3BIi2d+Bl0DFSbFXoyFKVVj0hGWp14ucEx5+EQPZ8upAxydTyHLiBLdId57vOFZBuJRxt8MprVXKVCkhHdpnCWZLDPUkAnw1H5eTKPpoGqBF8nCoNOx09BgTM+f7u0hnLrnn14w7j/wjOsDIa8/IaXePHZ5yi6XXRhKGmYuIqpbyi9pXQN46ZiXJWMJxMm3jOtajwhUSia6PWeVtHInzZQNlCWaV0yik8rVecJ4FgNY6Vz1tdWKXJhe8tSTjY52MQEvLdY29DYmsZWTCuPdyWBGjHEKTolZJ0u2uRzw1gUs7zrr2sIpujSG/TRdYY2Qi2WYAvqVsT8VKKdxtJxxJ91odNFioKsk5PlBXlWJB3s1Pm7gFUeTQCdusjbshMdL+rkRBjVsLlfwfWKSERJd9G2c6eAFvGwCB6aEpqyhtMwVbzEkQgpo6WfGcaxI5/lTFAB0SkArv09RfC32eJiUo9obCjtMVrokBF0D+vqGFyaOKIxSUgysNMUtVaKXJu5eoXWaK1itjqTkZmMIs9ROcTpZBel3lScUo/X6mPOhaDTfK9eeHdkTh1OFuoCmSNtErWRj+V9a6t7cvKJjXFT3sV3v1zOJD8E2kItie18BqwQs3yY9L2Is4MqGceNjZze6QjsLlEtujUo+yBJRykE5sZmMvRwoNIDtMmaazWPQ4gc4yBRnF756JnO9Tywr1VtUIA38ZhlMhZb73Y7fWgTlQM992y3UnC3LerQ56N5jN/+to9jY2WVT37j23jrG16kKArQwuZ0j6ujTbYme+xNx+xOxlS2JJQ1zWhCub9Psz+C0RjG4+gJGU9gMo2GcdlAaeNS+1PKrXyyOGYqRaxUWabpdDqUE8VhNQmlDb1Bn8FwQH/QJTMqcu+NRIFsJagsw+QFmSmoqqhZHD0ly3BJEPJOj8FwhayuyDJFrT3ipkw7HexYE+VjThNMzGVv8hjQYDIoCsgLTJaR5xl5FjtZZTKURB6vT5HDMuvsk0TOExzaHlP8zxJL3BO/+IsfOCCaIBLSwkx1ipaqcAiRTSHzJBwJIcTgu7pusE1FY+uFoMs2410KxCMG7LlEi2h/N8agM0Oe5WQmizrEiTc6S5GeaA8tPSMgeB/1l2dSsa0Rs4C5gka6guQ9VErY291lN049PjSS8zvGI/lIgVXJeVg3KefDI53h9Y5ANJS3iebFgmEcCmjy5LG10QvrR0QqRlvykrbtMjcwW2dIoie4DMoc6iydMixoFMvCFEDqJ7I89j0z5YsUaNEqZrRUgyapTvikneyTF3tRdsPC3DN8uKa069v+6dFUKV5+6SXOrqzxtje+mY97w8vkmcEHz8Xt60x9xaScMnbgy4ZmNKXc3We6u0eztwv7e7A/iobxeBKN42kJ0zqleWU5ArwLjtUwdnbExz7y84QQ05AeZcCsrp/n0z7tMzl3/gyrqwOMBkJMc2otTCYVnm329qforOUrhZn3ZAmh2+uzsrpK3dTUVY7tajqZp5lssz3dwo9PU2bAFCghOk5F6Qx0DioHlRGUIaQsWSCzVPNK1Kw9Ct4TbExmIrOI3yWWeG3jK7/8zx/4LosSCizMXtzhfTj695A0hz3BRyP2XpiTbJJ3uk0/vZBxb2a+tFPSab+50d56huXowWU49GXx+0xEwLG/vXXP670beulUjjjL7xS4Nm6MSAJYJ8ZkLGeWHwWtl7c6evUdEYie53vIIhzm1dztmM2D9Benpd8ElRm8wKScsL2zjUJomppLt67wsasXuXTrGje3t9jc2WZ3b5fJ/h52tA+jEYxH8XMyiQZx2USD+GizbIlDOGaPMTh39/TDq6sbfPzHfTJvePF5zpxZxxgIWMbjMdtbu9y4cYu9/XLOByJ5CE94Cv3pgdDp9hgOBzS2oakNrqvoGE8zOUs92mR/ujmXkHniaCVrdJxKbQ1kNc8WF5Ck3hPQai7jBIlG5oEU1x5mgQ1LLPHaxvbNa0/6El5zMMlO0ylwXycaa5WC8IRoGI+Ik/tLvBbwdNoNla0Zl1NubG2iKkuwnrKc8pHrV3j/lY9x8cZVtrc2me5s48eJNjGdQDmNHuJJGekSS2P4gXHshvG9cPXSK/z4j/0n3vimN/Lc888xXOnR6WbUVcX2zg7Xr9/k1UtXefXVi+zv3GQ+FHzS8fynBQFnHdY6nHM45+P3FA2+6LF58kje4lnQBcwzD8XfQ6JMeAkE5/CiUdqDT1m6EvdRSZyODeFpbeaWWGKJJ40Pby54jMP875JoCJdEyuVylnmJJ433/OIv0TEZF3srrOZdgnVUVcnlrVtcvnWV0fYmYW8XRvvJK1wm2bkmpqpeVuKHxmM3jKtqj5/7mR/lg+9/LxeeeSPnnznP2XMbiA6MxyNu3rzBxz7yYfa3rxA1Apc4jLouKadTrLM0TYktS6bTKeW0pKnLObfqiSIFYUgegy7aeV1p/5d4jCHgXZjrlIojOA8qRNoYoJMkWkBwIdItllhiiSUeFB8Icx0BmA+yX/vzkUIkkkBUs1/itOOVn/rPiCg+YgqMNtFxZBvq8T5htBfpEmUZZeWaNs85r/WK/Fjw2A1jgOArxqOrXLqYxbTmQJZppuWY3b0Rk9He0ii+IwLe+egx9tFrbK3FWotzDu+OCgp4Ekj0iTbvPSwE8C5wvlJMQ/CBIGHB4x3SLm0A0ZyDHB6EMrbEEksskfD61WAxRKUIiH7xpcLTaUe4eYOAUCtDjZoHBZZTmI6jUbxUlTgRyCMLpi+xxBJLLLHEEkssscRrAMsopiWWWGKJJZZYYokllmBpGC+xxBJLLLHEEksssQSwNIyXWGKJJZZYYoklllgCWBrGSyyxxBJLLLHEEkssATyiYSwi7xJ57YpnichfE5EfEpFNEQki8s4jtvm89Nudll/9EOd93Zdr2q4nIl8tIh8UkamIXBSR7xSRlx/inMsyZVam3yAil0WkEpH3iMjveYTzvmbLVUQ+Q0S+WUTeLyITEXlVRP6piLzxiG2ViHyliHxMREoR+XkR+e0Ped5lmcZt/6yI/F8icjXV6Xc94rlf9+UqIm8Tkb8rIr8gIqNUtv9aRD7lIc+7LFORoYj8SxH5kIiMRWRHRP6ziHzJI5z7dV+uR+z3xakduHTS1/ioHuNvAT7rOC7klOJPAF3g/3+XbX6GWAaHl/cC14CfeojzLss14luAPwf8Y+AdwP8EfA7wIyIyeMBzLss04v8A/nvgbwC/CfgPwHc/QiP+Wi7XLwY+Efh7wH8LfAXwacBPi8gLh7b9GuBdwDembX8S+B4RecdDnHdZphF/EDgPfN8xnXtZrvDfAP818B3E9/+PAeeAnxSRT3+I8y7LFHJiOo2/Dnwh8LuB9wHfJSJ/5iHPvSzXBYjIGvB3iDbVySOEsFzusAAqfb6FKKz7zvvc7yWiPPPfetL3cBqX+ylXohq9Bf7aofVfkPb5/Cd9H6dpuc8y/bVH/UY0pq8A+knfx2lagHNHrGvf7b+6sO48MTXnVx/a9keAX3jS93Galvst07S+rdMm1dt3PenrP63LA9TVsySZ1oV1q8A28J1P+j5O0/IgdfUO+/9H4D1P+j5O2/Iw5Qp8M/BvgW8HLp30NR47lSK5ur9WRL5cRF5JrvLvF5HzafmXIrKbpsX/wqF9z4nIP0pT55O0zT8TkeePOPfvSq74Mk0Hf6GIvFtE3n3EMf/Xhanj94vIH7qf+wvhoVPI/V5iKovveJidl+UKxDzSGtg7tH4nfT5Q3V2WKQAtrecHDq3/QeDZhd/vG6/lcg0h3Dxi3SvATWDxej6f6DX67kObfzfwyXKPKcIj7mtZpjxS+3skluUKIYRbIVkaC+t2gQ9yqPzvB8syvSs2ecjEzMtyPXCezwa+BPgf7nXs48JJZb77vcAvEqdpLhBd4N8JDImd8jcDvwP4GyLynhDCv0n7bRDT8nwlsZCeA74c+A8i8nEhhBJARH4D8E+Bfw38WeJU0N8BOsQXnLTdCvDjxCnmdwEfJXZi/1BEihDC3z+Ru4cvBX4mhPCLx3zc1025hhD2ReS7gD8pIv+JSEl5CfhbwM8TvXHHgddNmTJPd3U4AViVPj+JSK04Drwmy1VEPp7oIX7fwupPJJbhhw5t/kvp8xPSeR8Vr6cyfZx4XZeriGwQ3/1ve5Dj3wOvuzIVESE6c1aB357O8/sf5Pj3gddVuYpIlu7pb4UQPiTymNLePqJL/F3xEAfWBWIBmoV1X5/W/08L6wxwA/i2uxxfAy+kfX/rwvqfIFYOWVj36Wm7dy+s+0vEyvDWQ8f9x8CtxWu8x33eN5WCyAsKwJ9cluujlWu6zm9K27TLT3LEVMyyTO9dpkSedgD+20PrvzWt/8plud71Xg3wo+ma1xfWfzNw7S7P4vcuy/TByvSIbR6ZSrEs1ztu+0+BCfCWZZk+fJkCf5x5P1UDf2xZVx+tXIlxRR8COun7t3PaqRR3wf8dQlicQnh/+vy37Yr0+4eID2cGEfmjEiO6R8RpiFfTT29Pv2vgM4D/PaSSSsf7L9zulfkC4D8BHxUR0y7pOs4QPTnHjS8jZi//Zydw7NdbuX4tcQrlfwQ+lzhaPgP8gIj0j+kcr6cy/SHiiPzvichnici6iPx+4Hel349z6vq1WK7fCPwa4EtCCNsPsN9xYVmmJ4PXbbmKyFcSg8X+eAjh8IzHo+D1WKb/G/CZxICybwH+voj84Qc4/v3gdVOuIvIW4KuIdbN8gOM9Mk6KSnG40tR3Wd9pv4jInyBGKn49UY1gm8gl/cmF7c4CGXF0cRjXD30/T/TaNHe4zjN3vIOHgIgUwH8HfH8I4dZxHjvhdVOuIvKJxGjVPxBC+CcL6/8TcdT8B4C/+6jn4XVUpiEEKyJfRBy0/cTCdXwl8A3A1Uc9xwJeU+UqIn8D+EPAl4UQfujQz9vAmojIYodCnL4E2Lqfc9wHXk9l+jjxuixXEfkjwF8jehu/9X6O/QB43ZVpiNzZlj/7gyLSA/4XEfnWEMKdzv+geD2V698D/h1RMWUtrcvjbrIGVCGE6f2c50FxUobxw+KLgR8JIXx5u0JuD1y5RXwY54/Y/wLzURBE8vsN4E/d4XwfePhLPRJfCKzzkEF3J4insVw/OX0ekLsLIfyyiOwAH38M53gUPI1lSgjhvcCnStSC7hMHGb8t/Xxc/OJHwakrVxH5KuAvAH8ihPBdR2zyS0ABvJmDPOPWa/Lee53jhPE0lunTgKe2XEXk9wL/APjbIYSvu9dxHyOe2jI9Aj9NnEG+AJy49u498DSW6ycQ44qO8s5vEx1jf/pe53kYnDbDuMftKgS/b/FLCMGJyE8Dv11E3tV6aCRqML6Rgw/vB4n6rq+GEI4aBR03voxYub7/MZzrQfA0lmurV/irgF9oV4rI24A14PIJnfd+8TSW6eK1fSxdS0bkxv1QCOHDJ33e+8CpKlcR+ZNESs9XhRC+8Q6b/SCxQ/k9wFcvrP8S4BdDCIenIR83nsYyfRrwVJariPxWYqDdt4QQ/scHPc8J46ks0zvgc4ERR3tgHzeexnL9Yha83glfQeQ8/w5OcLBx2gzjHwT+goj8ReA/A78O+KIjtvsrRL7k94rINxOnAN5FNKYWeZLfAPxO4MdE5BuIo5g+8HHAfxVC+M13uxgR+VxiVOYzadVnJH4OIYR/dWjb86SozGOcNjkuPI3l+mNE9Ym/LSLrxNH3i0Qy/i5P3iv/NJZpyyl8hahb/CJRAudF4LPv98ZPGKemXEXki4kR2T8I/Ds5mMVyL3nfCSHcEJGvB75SRPaJSX9+Z7r2L3yguz8ZPHVlmrb9DOBl5tKMn5CoQAD/JoQwueednyyeunIVkc8B/jmxbf32Q9tVIYSfvb9bPzE8jWX6h4lSlz9MNNbOECmVXwR8RQih5snjqSvXEMJPHrHvO4n19N33vONHQXiEyD3uHDn5tYfWvTOtf8uh9e8Gfnzhexf4h0Sezj4x8cAbOSIimRgw8AGiTNIvAb8V+Fngew9tt058iB8l8m5uEI2uP30f9/duDioizJYjtv0z6bdPf5QyXZbrge3OAH8b+GVgClwkBji8fVmmD12mX5uOXxF5Y98BvLCsq0fe27ffqUxZiNBO22rioO2VdD2/AHzRskwfqUzvtu3Ly3J98HJty+AOy8eWZfpQZfprgH9DjNGoiLOZPwz8xod5/5fles99T1yVQtLJnnqIyBuI/L6vCyF8zZO+ntcKluV6/FiW6clgWa7Hj2WZngyW5Xr8WJbpyeD1WK5PpWEsIl1idOUPEzm9bwL+PJEg/okhhOOMrn/dYFmux49lmZ4MluV6/FiW6clgWa7Hj2WZngyW5Rpx2jjG9wtH5FJ+I3G6fUx04f+O18uDOyEsy/X4sSzTk8GyXI8fyzI9GSzL9fixLNOTwbJceUo9xkssscQSSyyxxBJLLHHcOKnMd0ssscQSSyyxxBJLLPFUYWkYL7HEEkssscQSSyyxBKfEMBaR3yIif/YJnftdIhIk5vl+3UJEPi+Vw6+/j22DiLzrMVzWUw8RebeIvPtJX8dphIh8u4h87JiP+XKqn+88zuM+TVi2aY8X91PeC+3r5z3qeR52/9cKln3V7Uhl8i4RORU23Z2QnsfX3sd2B/rN43h/HgSnpeH8LcCvJ0ZDLnH68Vk8+RSXSzz9+BpiWs8llnit42eI7eaTTg/+esPrpa/6PGJyjq/lYCKOpxV/7Eme/LQYxvcFESlCCNWTvo7XO8IRGWmWWOJBEe4jBbXElNU2LKOETw2W7fCDI4SwB9yz3VyW7fFi2Vc9nQgLGTCfBJ64211Evh34MuD55CoPIvKxBdf5bxORfywiN4mZuu44BXvUtLWInBORfyAiF0WkSp/fJSLFXa7pC0RkJCLfeNqnJh4EIvI2EfleEbkhIqWIvCoi33NoCrCX7vtWWr5bRNYOHefA9NTCVOIni8i/F5GJiFwVkb/6Wiq/e0FEvlhE3p/q2S+JyG89Ypu3p2ewIyJTEflJEfmCI7b7XelYpYi8R0S+8GmhZYjIW9I79tF0jx8RkX8oMbX34nYH3uMFGsQfE5G/KSJXiNmX1kTknem3zxGR70vv56aIfJNE7c27Xc9nisi/EpFL6Xo+ICJ/7fB+qXx/XER+vYj8TKrHv3iH5/gpIvKvRWQ7HfM/iMh/9UgFd/x4o4h8fyqrV0TkLy++j/dTFxfe7U8SkX8rMc34v0y/fb6I/ISI7KZzfEBE/vKh/Z+GcjoufPyd2j85Yip4ob79JhH5WRGpSJ4yEfmVIvJj6f2/LCJ/CZAncVNPAvIa76seoI08ss2XaCN9e/r7XURvMUCT7i8sbPusiHxnKqNKRH5BRL7k0PHa9vXXiMi/FJF9EbkuIl+Zfv+CVEfHIvJTIvLph/YXEfkzqQ2oU5l+o4isHH378lUyb4//HxH51Pu57yMO9NtSuzVJ7dj3iMiL99rvXjgNHuOvAc4Bnwl8YVpXAavp778P/ADwe4HOgxw4VbKfADaIUwy/AJwHfjOQp/Mc3udLgW8B/moI4Z5cmKcM3w9sA3+UKN79PPAODg6Q/i4xXeTvBt4O/E2ituGX3cfxvw/4VuCvA58P/CXitM67juPiTzMk8t3+GbGMv5xYp/8ukBHTayIizwE/TkzJ+ceBXeB/AL5fRP5/IYQfSNv9BuCfAv8a+LPpWH+HWP8/+Nhu6uHxHDF9958m1rc3AX+RmDb1s+5j/68Cfgr4Q8RUy+XCb99NNMz+AfCrgL8M9ImpUe+EF4GfI6YT3Qc+Me33JuCLD237ZuJz++vEd+TLge8RkY8LIXwIQEQ+jajt+bPAHwQmwB8BflhEfk0I4b/cxz0+Dnwv8G3EtK2/Cfhq4nP5tvutiwv4P4F/AvzPgBeRNxHr578C/ioxJexbiWUKPFXldFz4Ph68/Xsb8PeI/eBHgC0ROQv8O+Aasd2tgD9HrMevF7zW+6pHbSMX8S3AG4DfD/xaYhkAICJ94EeJ6Zv/YjrnlwDfJSK9EMI3HzrWdwDfCXwz8DuAv5YGG+8Avg4YEcv5+0TkzSGEOu33dcBXAt8E/F/AJxDr9KeIyOeGEBbpHV8KvEpsdwpi+/EjIvLWEMLW/d60iPwRYprrb0vHGBKf34+KyK8IIezf77Fuw0nnnL6fhSPyXxM5M4FD+bkXtv/YEevfzcEc5n+VWEl+5V3O/a50HkPM8NIAf+BJl8kJlPHZdJ9feIff2/L+jkPrv5FomMjCugP51RfK8CsO7fuPiR3v2pO+/8dQvv+ByB9UC+t+NQv534H/BbAs5LUnGn4fAH5mYd1PAL94qMw/nfvIJX8al/Ru/dp0/b9yYf2B9xh4OW3zM4v3nn57Z/rtfz20/qvSO/62Q8d45x2uRdL1fAmxIzyz8Nu70/v/1oV159Px/+LCuh8B3gfkh57j+4DvOwXl3b6Pv+/Q+vcAP/SAdbE91p86dKwvSutX7nIdp7qcTqC879j+MW9fP+9QffPApx7a7+uIA40XFtb1iQZieNL3+xjK83XXV3HnNvLdHNHmAx8Dvv2I+zKHtvvjh+tdWv/DwA1Ap+/vTNv95UPXdIPYJr5xYf0Xpm0/N33fIA7evv3QOb7k8HNM328B/YV1L6dzfM2d7vvw+wMMiIP5bz10zjemd+dPP8rzeBqmub/3Efb9b4CfCiH87H1s+w1Ej8oXhRC+5RHOeVqxSfRI/A0R+YMi8tY7bPf9h76/hziqu3Af5/iXh77/C2IF/qQHudCnDSKiiTMe/yosjIxD5Ld9bGHTzwF+MiTPY9rGAf8c+FQRWUnH+gzgfw/pTU/b/Rfgoyd6I8cEEclF5C9KpIJMiY3ej6Wf334fh/i+xXs/hKPqmCJ6j+90PSsi8j+LyIeJDXgDfBfRSD78HvxyCOGX2y8hhBvEzuHFdKwu8LnA9xA9pyZN7wqxs/mc+7i/x4XD7/IvMvc63rMuHtr3cDv8c8Ry/Bci8kUicn7xx6esnI4LD9P+fSyE8HOH1n0W8dlcbFeEEMZET9zrAa/5vuoY2sj7xecAl0MI7z60/ruJM5GfcGj9bKYohGCBDwEfDCEs9j3vT58vpM9fTZyB/+5Dx/oXxMH35x5a/29SfW7P8zEi//5BPOWfBawA/7RtW1L7cjFd3yO1L0+DYfwoaQjPcP8Rqb+L2HH88COc79QiGRq/Afhp4vTRBxOv6Y8e2vTwVEZLN7kfGsv1O3x//kGu9SnEWSJl4vD9c2jdBkfX52tEg2F94Vg37nGs04y/TvRgfDfwG4lG629Lv91PPbrbO/8wdezbiFP4f4/4DnwmkTZw1PUcNZVXLWy3QfR6/iViZ7a4/HFg/XFyFe+Bo97lxfu4V11cxIFtk0H9+cQ+5LuAa4nr13aCT1M5HRcepm4e9QyePeJYRx3/NYnXSV/1qG3k/eJu73n7+yK2D32v77AODrYlHD5PMqw3jzjHner2g5R9OxD/YW5vXz6ZaPs9NE4Dx/heOMpzVBJHKIdxhvggWrTcpPvB/xf4IeAHROQdIYTRA13lU4AQwkeALxURAT6F2EH9A4kBUNNjOMUF4kh/8TvA5WM49mnGLeILeZSn4gLwSvp7i5iH/jCeIdbzbWJu+ob5i3/4WK8+6sU+Bnwx8J1hgaMvIoMH2P9O3mKIZfBLh77DHeqYiHSIMQXvCiH83YX1n/wA17OIHeL09zcRuXi3IRzk051W3E9dXMRtzySE8O+Bfy8xkPmzidS17xeRl3ntlNOD4G7t35362qPq+lXu3Ja8LvA66Kvut40siZ7RwzhsbN4JWxztgX5m4fdHRXuMZ1hom5MH98wR57hT3X6Qsm/tvHdysD9o8fD8Yk6Px7gC7hpZfgivABdE5Fy7QkTezO0V4IeAXyUin3Ifx/wlIo/lrUTj+EE68qcKIeLniIFdcHzTR//doe9fTCTrv+eYjn8qkaagfwr4IjkY9f//IfKnWvwo8KuT4dBuo4HfCfxsCGEvHeungd+eOoV2u08n8qeeBvSIxv0ift8xHfuoOuaB/3SH7Qui5/Lw9bzzYU6epgB/jNhZ/0wI4acPLw9z3CeAe9bF+z1QCKEKIfw7YlBOn8hHfK2U04PguNq//0h8Nu1UdRtE9Zse7fKePryG+6r7bSNfAd4mIjNHoIh8DjHQbBGtt/ywHfWjwBtE5LMPrf/dxFnJ45BF+0miF/lwIPPvJA4I331o/TtSfQaiGhGRjvEfH+CcP0E0ft9yVNsSQvjAA97DAZwWj/F7gY00VfLTHIxCPwrfQ4x4/G4R+Xri9PNXEj13i/gGYgX4YYnZVt6Ttv3NwB8Jh6IWQwjvkyin8++BfysiX3B4m6cVIvIriFG8/xuRN6SJxoElRkAfftEeBn8wGYY/RZxm/QNET93uMRz7tOOvEAdi3yci/4jI3/pq5lNWEOvjO4H/W0T+CrBHlGd6G3E67fCxvldEvplYZ9+VjvU0eNl+EPgyEXkPsa79NuDXHNOx3yEif4s06CWW1Xcu8oIXEULYFZGfBL5cRK4S24j/nkebMv2zwP9DbCP+CdHDdxb4NGIwy1c8wrEfF+63Lh6JFBH+OcQo+ovM2+ArREoavDbK6UFwx/ZvYYx7P/gG4rP4IYlSXK0qxXF4Sk89Xid91f22kf+CqM7zrRLl2d5IfK8OX2dr4H65iPwA4NLg89uBPwX8HyLyVURq6e8hUlX+cHLEPBJCCFsi8reBrxSRMbFN+HiiEtiPczsXfEqs23+L6Lj4amL78w0PcM49EflzwDclB+kPEMvkeSKn+d0hhH/2KDf1xBeil+GfE6fvAjFg6fPS37/+Dvv8FmIDPAV+nhho924ORXASp6S/mdgo18RG/DuAItwhmpPoNb5EHMHcMer6aVpSOXwHUe5rQpze+FHg88PBqM9ff2i/d6b1Ly+su1Ok7ycRBxVTohH3NSyoNLzWFyJP/QPEjuyXgN96uE4SZzW+j/gSl8TR9hcccazffcSxfpYjVFpO20I0fv5Fep+3idJzn8khtQjurEpxmyrMQj38HKJ02CjV4W8Cukcc452H1v0A0cNwgxi9/hs5WiXgx48498e4PeL649M93kjP6BJRvuwdp6D8b2vT7lDe96yLdznWZ6XncDHd/1Wiw+LtT0s5nUB537H9486qFLfVt/RbK3VXEqeY/xLRgAhP+n4fQ3m+5vsq7rONTNv+YeCX07X+BFGh6ECbRBw8fFN6z/xiPSFy1r+L6BSoiLK1X3KHsnvLofW31VGOaKeJcQl/hthn1ak9+CYO2U9pv68jSsddSvX7x7hdmeXd3EWVYmH9O9Jz3Et15ZeJMnyf8CjPR9LBl1jioSFzgfEsRML9EscMEXkD0bPwdSGEr3nS1/O4ISLvJAbRvTUsKCksscQSS9wvln3VEveD00KlWGKJJRKS1NXXEyNubxHF3/88cUT8WpQSXGKJJZZYYolTgaVhvMQSpw+OGOH7jcSo3jaQ6XeEEB5FvnCJJZZYYokllrgLllSKJZZYYoklllhiiSWW4PTItS2xxBJLLLHEEkssscQTxdIwXmKJJZZYYoklllhiCZaG8RJLLLHEEkssscQSSwDHFHwnIoeIyhou/EbYeDN4AyED0iKACiAhfUL63/xvISrWLfKfRUCp9DnbgCjZ55ln1pS4jaR9RMXrER2/IwvbtKLrAUnHkvT3YTn2ENrLUfEzxOOEtD9+sQgkXpN4kED4yb/wQOru8XYPlylELeyGpyPHw8kihPDAZQp3KteI3/x5v4sv/Z1fivU13jd47/HOUU4rJuMJe9evsHP5VcQ5BoMh/TPnKJ5/AX32PGPvmNQ1dryPHe9QXrvE/isfJlz+KL3NK3TGuygXkADTEEV4x0SZCUd8O3LiE+6IRuVDfHfIR0bb/JQdMQZeys7wSZ/8q/mkz/os3v7pn0FvZQ1UBtYTyopgG5z3BB9wzuFsgwsB7xzOO6x32OBpiEr5Fo/Hg/dIetf+yld86THV1SVaPGxdvSASHFHoMwBnEc6zQocCjUbQOAyBDp4u0MXoHJPlZFmHPO+iJIdgCEGwztM4RxXquLiK2tfYYAkhxNoQYtvXNV16eZ+u6VFkPbKsh8n6ZMUA0xtQDAb0hwN6gy7BTmkmO0z3Nxnv3KSepJxIIihlUFpjtEZrjajkiwngvSeEgEZQqFgHfcBZR9M0eO8QEQQhhMDWdIsf2fw/sSnJ18OU653q6mDlDH/z6/8RX/iFvwXnPN6DKIUoRSBqmzofsN7jvMe5gLWOunLUtaNpHHVt2d3a5NbNG1y/coVLFy+xeXOTyaSknFY0tcc2jrrx1I3DWnBOUdcN+/sjRqMdgr9ElICtjrrMBSjy4gXe8tZP5lM+9Vfwqb/yk3nr297M+WfOsrGxRpYZjNEorVBKxXIUQUSlru9on9iF1du6vocu0yUiTqKvWuLhy/V+cEKqFMK8mzfMjGIMc4O2XRZw5H0GZo7toJKhu4gjDOP2M6SlNY5RM+NbWgM5nWNmEEdx5yPvqN2qtad9kHSEaODHQEZJ24dFa3qJpwCiQImQiRCUgNJYCdS+wZcjqv0dJrtb5EqhhwOKTCPB461FaU1WFIgK6DyALwl2TDANnb7Q2clQdUNoLGpaoStLxwVWiEaqT58lsB0ce9UO29UuVwmM0vV5YzC9LkFrGueorY2dtycN9DQoDeIJAbxKRjFgg6d2Hhs8FklLIKY9EoIPt72OSzxZeOYtqUqfQiDg09A9toXxuydgcV6lRLNCCIKWADi8FxprqW3DyI/ZD2P22GfEHpYST0NIA25B061XGdbrrKl1Vot1Bn2DyQYYk1F0CjrdDnlRoE1GXU8oy4rxeMxotE852YstrCiUMmitUUrNFhE1d34AWhRGFCoQDWPnsU2Dcw5Rc8O4sQ3hJCupzP0vACKxxU/ZEtKA01M3lqpsKKc1k0lNWdY4G2gax7Ur13n1ox/hA+9/L+/9pV9g88YlGmvxzgM5SgpEeijdw5g+Wd5DJAO6aO2xfo04ZN7k7g4QT11d5P3v22Pz5nVu3rjFzZs3+YRPejtvfsvLrK4O6fV7GEzsnpSglCIWukAISOtAWmKJJQ7g5AxjydKi02kM0XObDGJZMGZVMlIXbeW5zZqcvGr+OWOAtEbzQgMye9kX3/hkFC8YxK0nYn7FntYHLHdofMPCr+GA4zoQJCx4uuVIr/OjY9GYX+IkIBLSwEehBJQH5RrcdEy1v8No+ybdooM6fw5jhBA83kXDWGc5kgm+A6JqFBUUgc7AUOz0kbIiVA35tKKYlLhpha9qmqqhsg2j4NgnsElMCXSdEG0c0kRL0aFY6ROMYlqVhKzAZKBQ6EA0ioHgJRrFEnAEHB4boPHQ+IATcCp6qi3gfcD7cLJGxxIPDLPwGZdoFoeZyRyfV2wRooPABwtewFqgwQmoED2cU1szDhM22WaHLaZs4xkTa8LBZz9ilym7TP2EurQEMWT5gJ4S8iyn0+mQ5znaGHwIVHXNZDJlNBpRjkczA1hrg1L6gMdSKwUqtcSiMEqTKY0EQULAW4e1Fuc9Ku0XQqBu6pOvoypNZqrU8Uj0YnufPMS1ZTKtGI+mjPan7I9KppOKEARnPTeubfLqq5d47y+9l1c/+tOEsLdwcElPsg9sIHIWY85gTD/eVYgGcvx9j3t7jT3ebXP9+s8z/o8j9kf7WFujFDzz7AXOnj1Dp9PBGIPJDJkxaC2pK4z97TxT9eGOd4klXr84OR1jBbG3bn1hLb2A+KkS1UBxhBeYAx6FA9QHNfeUzLY7MOOwSJEg/R0bOUn0iraRnjM4JDaGPh4reo4PXk6A2EDL3DAmeYXbTIczT/OiYXysHuPW4/7I6c2XOALOR8qBDgHBo4In+AaaEl+NGe9tcfPGVfq9Luefu8Cab9DEqWCPoBACCiRDd4d0zgV0r0N3bZ1itA/TGlfWhLKORnFZ4cuaZjRlurtH2BuxV41QfkygPvCUtRjWz5xneGYDGwI3bt2iVzb0+kOKrMBgosEB+BBw3mO9x7ZTwInt4wN4mfsYA8RtrcX7pWF8mrCKwhNY9JN6fPo7OgjUgboXvYJaabTRGB2dASF4mqZhN2xzg1tscwXHiLt7JGscN9lhl1HYZDQp8SEj763QI6CzHGUMguCDx9qGuq6pqoqyKlGiUEqjtUUpk9hmPvksYvsbPcjRKDZKxzbXBYL3OO8ggFIapQTvPZV9DIZxCASZU0qCV1jrqWvHaFKxN5qytzdmd3fM/t6Y/f0p1bTBmAytDNNp9B6X0ykhTA4fnOjO3wH2CeEWTXOWplklzq4KMWt5w4NN39SM9j/Mz//shPF4j5s3r/OWt76FN77pZdbX1xkMBgyHQ4bDAd1eF600Soc4merntMTFYdYSS7yecXKGsQTQieYQwgLfN60TD+Kiodt6g+98sLlhPOMGt1btYUN4YXsWDysHd01/zL56idN2M9rzwYappVKQLje0O/o4bR1a6sTs9gPq2GkUGdH7XbOc9z5+RMPYosQjwYO3iK2RpiLUE6ajHbZHt2hsj+l0H29r8uBQgAvQuIBXCpRBd/roPCMfrtDbeIa8agjTCjdtoKoJVUNIRnIzmqA2d3Fb23R3N+ns3SKf3sCECXXyDvaLFc489xzDM2dwApvb21SNi4ZuN5CbHKMzIFZD55JRHNpPcKjEpPezafPI9Qw0yUO3xOnBkB4Wx4SS+gD9TBAMCkNAJ/NYgVIoY9DaYHTkl4YA1jrKMGWLW2xzGcf+A1xFjeU6twhk1YDe/gb9M2dZCT7OlngiXcdZbFNTNTUVFRIU2mm0M2gVt7XBpuFYvAMlBiMaowyZaPCB4F1qSyOtSSuDUgrvPbVr2dYniTDzyocgBB+pE9Npw97ehM2tfba29tne3mN3Z8R4NKEqG7qdHkXRYTq1EBTBW+7uwHBEr/AY6KWljSEp77HvUagpp6/yvl/c4eaNy1y5co3trV2ef/55zp07x/kL5xE0WmdkuaCRhcnX0P63xBJLcKKGcQqUk3BwWWzgZ3TgcNC4jSsP/jk7RrJIZ58w51swo1LI4q6EOVUizNdKyzkWSXyy+SXTHnbxiiQkg1gIijl/WeZe49n0pvjjN4xVD3xG9MBPbr/AJR4JkUfoQYdoGLua0FQYsXSN0C80/UyRKXBNRTkdo8uSrKrxVvDK442AEYLWBFXgdU5dKIIRgrGEvEEai2oc0jhUY5FpTXe4j1vfY3XvFtO9W9i9W+SjLcauwiGsnznHmWefozsc4kShbSDvFGhjQCm8aFxoOafglY484xDwKhrKLnkg8XFkpwAkoIJCgr5tlmSJJ4uKCoejTpSagugjzsjI6aDo4DEEyQgqB52hdIbSBkQRvKesK0b1lGvhBjd5Bcf0Ia4k4LjBFf9fGN8YsVXt8sJ4l/W1DQaDPs10RLBNGky6xFz3uBiRgfIKj8PjcLiZ11uFBhs0xmscesaflvRPBw0u4L1gg6Ph5A3j1ij2iX7S1IH9vYrd3QnbW2O2tkfs7IzY3Z2wtzdhf2/EdFqSmQmZMdT1GO8FrTtEJ4a9xxkd0Us8Yt5rPcgAVTMPQg84t8v1q+9lPB5x6+Yub37zm3j5jW/kzW+u0DrHmIK8AyYL0RGk4kxo210vhaqWWOJEOcZqZjRCy2tKvx2mQtzGnZUjVrWGdJjvLgc/W86wyHy1ACrMlSaicSyzywrttbaX2h5uRpE4eFlq5nae3VD6zaXvbTDgzEY/PvRWwfag9On8rabB8WNtPaPb0QTr2dqpqZt77/O0ow2wQQXEO1xT4eoJOjT0cljpF2wM+wTRWFszHu2jeyPIxzTKUocMXxikyAiZgFEErXFSUJoMCIh2aBswAYwPZB5M4+isloTJmPXJNjLZYlDtcaEZM21KSlvT6ffYeOY5iv4QZaLhY7IOed5B6wwRgxeTpjaieYGo6PUS8MrjJLFTfUBDGl8GTAho2vq7xGnBPg2e6D/0QJ9AhiKnoJA+ynTxkuNVBigCOs3KCc4HGufYr8fcCLe4weWHNIpbBDybbIf/wmjnJlvjTV448xLPPfMcuViwDVoCGo9ErZM0y9Yabu3icEkPRVBYVBqwqdQz+OjZTIxqwUMQGmpqypOnUkiacQmeuglMp5btnX1u3Nhjd3vC7m7kFo/HJZNRxf7ehNH+CO9jO2xMQCtPlvWI3eu9DOMWiwE2D4JV4kziPjBNx6gZ7X2E975nmxvXb7G7M4FgWBmu0+uuUNSBrMhjnK4CbRTaKERJOtYSS7y+cXIe45m12npzFz3Gi9sc4Sk+bFEu8ojbqZ82cngW5Db3JCtRM5/w3Dhueb9yYEwciF612e7tXkKc2lvYcnYZoT1ynIuaBesJzBQybrvXR4VA3oXOAOoSZtPex28c57nwyR9/nnNrPWgaPvixm7z/I2Psa53aLJEvWZU1k8kOk63rTHZuYSd7VONddrauMZ6MqJ2nefVjbO5N6V3ZxgyuMPUZZcgp1lYYnN2gvz6kvzYg7/UJRiOSXjXROBO5voEYoKSyQDBdVNEl7+V06wJxq3RCSdVUlE2FyQydlRXEZOgswxiNNhlKRRlCF5KyRIsgEBQiGlEpoFDCrGrq2Xg14L3HOIP41/oDfrqwydyN0JKoDEJGTlH00cWQoHMaK4wmEyZujzElU6aUoabGMmbEhBGO+piuqqLhI1xpJtgbEwoFG8Me3Qy6eU43L3C2SK4BhcYkqkernqFSex3wKfYkirWp5FaIxnX7l0t/RYLGyc+StWxb6yyjccXuTsnmZqRPjPZKJpOG8WTK3t4+29s73Lq1yc72NmU5pWlqet2C4aBDXVuijkh5glcrxHmEITFobzctrfe4ZG93zNbmPjdvbHPlyi207mCKLM00BURDUWQUnQytNR/3/IsneL1LLPF04OQM47BImbiTUXzE3wd4xO06f5DjoJJBLHMv8IwekdQm1IIXOTHwUicz8yu3TLJkLyzwkiVKHYkSCD7JsEUjWeY2dDxyUOkvEHykWIgnSBvadIwwBvIOdPowaUW+IDa+x+fSffZcwae+7Tzn1vvgGvqdwHhS8soVx3HGZwmJYi7zvxWAijXFRondYz3n3aCUxjtPub/P7s2rXP3IB7l1+aPY6R623OPm1jZXxyVjIIwvwfXriPplgvSYoqnJWB88z4svvZkX3vIyz7/pJbqmi/cOp5uoFpAo920QXFCglBC0wmUZQQok7yNOIcFgbE7HdmJwksmxaUwYiIZwkJBUJfxsrBTZPoJGRSNYBC8kMwN0CKhE7wfw1mOc5qFlITv/dTrxof3bGZ4ZR2m2MjUFiwPKwzsuLhxsOg7Uh/k08izMXhZnc2T+2wJH9/bzyjxwYBZAcASnKrTn8vM2rj1Oswn1ezkunfF94rPuAQMgI3JDM5VTdAdkvVXEdBhNasb7m1wMl9nkFhVl8tmeFByBq2z6jK3ddYb5s6z1h5hBn3o6RAWPd7F4gheCCzhcciCERKcI+JnZG0sshY4mqoXD0prPjoqSCeMTvKeEEK+laiy7uyNuXN9la3PC3u6UqnRY66mqiu2dba5fv861a1e5deMG1XQPZ8dkeZ+VlTUm4z3mAXUn1YC1JZej1Soi61h3g0jL6KLUKv3eGbQqGI8qLl+6zmRcz4JvEQ8a+v0Ow5U+eW74db9qaRifRuQmevIjxcfdsUbN57IFkn410ioOhRl/f9FtGELABz+fIReS/rWO53J3aUtOsHorAa0VSgTrHHe7jOPGCRnGh3nEdym5ttOEmThFWPQQL27HfNtoBLeG8VxiTdL6RbaFpP0O9oMtpzjEkCSJ3jUlCiUagiSDWKVPZqoTiwYxPnmnQ5Qi8snA9gDqmE1jY6DIoVNA1QFXEQ1il5aHrzmt3dIrhLe/tMqLF1ZY7xtCA5M1w4sbils3HPuPaH+LQC+PS7+ATINJL1fwqamXaBSXNUwrGFdzs/8QgeVYYXQM9HG2ZrS7w5WLr/Lqh96PbyaId+wG2CaGy9SA9Q3ONwT2Zh4yu+1Yy1eozpwhjCvUqiOEGpybzTYEYba9eI0KUS3VK4sz4DA4n8UECEbQPkOJJhiDBZT3qOAIXpBgCV5wTghhXuu1KLzEuhtCwPsYYY8PsT8M8zw7zjm0F7x/yJL91C9Lpz00oFXEEygB3f52F6NzhkWjdp4QInrB0+dspWcmN9bGIKhUS2ThXMHP91loc2anChIrXvK0HzSo221aY9ilv9uRSDrv1Q/BB1+BBwpuuzdaEa8c0CiMMhSdLnmvj5gulZ1gCYwZM+WwEsJJwWO5zo3JFdZ2O6wNC4yCLM/odDsEF41i7wKu8amOaYSWaBHL1s9kMuO3lpvc+pctDktDRZU4xid8VyFgG8d0WrOzs8+tW9vs7pTs79dUZUNdN9y8dZNLl17h0sWLbN+6TFNeJ3pqp9Rln83qbBpkFsSnd1ze+sMwQAetegwGqxijGI87VPUErTKKokOedalrz+atXayFq1duxkQ/3oEKKCMMV/tsbKxSFEsaxWmEEuFNL7yIVorpZMrO7g67kzGO0Irg0oriFlroFgVZ3sN0ekimwQS8anBMcb7GO4W3GvEG8QYbLLWf0PiYEAqt6PdX6fSG7O7tsXtzm9Acsi0ETCF0ipxgPb62NDaqHx0HOpliOOyysTak08nYvLXJza0J1f0ykx4RJ0ilWOz4ZMFrLAuWzfy32arUn91evq03pzV+BfGCkoBq9/eH9Ynj6GjWn6b/R6dSSI6lkPpthRGNlgwtOa2ofOt9E5lJvSdPSAwK8S5GLs8c5K2cnPjZlOGxwehoSWodO3+nU7nMec0PChG4cG7AGy6sIL6il8Obnlsnw1OPRtSjXaqtG8h+Q889DHEjozXatYI3nO/y4vmCXuHJlcU2JU3lKadQllDV0DRga6ACcfEIC4PZEzOMizyj2ylwWYbygeneHlvlGIUnT3ehiN1dzmINFywZgT7PZed5ZnWD9aJPF0E3dZR8S/WilQlMg3aCSCxPFyWqLJZGLLU4yhDNBq0NaI1ViiaN7J2LcnLiHSE4nPOEoGelpIkj7RCijrF3Dmsd4jzaK1QyjqNMoSAuicg8DLpF/FQLw5aZDSxpKkDNpwcW38/Fz6PYR8LcEG4T9hxoWpLCTbvzbApCDlaUsPiH3F6JZo5n4aDhzcLs1+Lfi0mF0jGLLsdZO/vEurZO9Bh3MGgUWmuyzJBlGZiMPM/p6x5Du8Ieu8nX+jhQshle5dJuTq/Q9DqGuokeyZjlTuMFlHiMz/DeoRqVVDsFhcYlrZQ4C+KThODco2yxNDRY7In6wFs455hOK/b3xuzs7LG9vcNo1DAZN+zs7LK5ucmrr7zKqx/9IJO9VyBsM2eBA1SEMCL6+dsneNyGsRCHSmfIzAV6vVW63U6ceQoDtM5RSQ6vriybt7bY3xuRX7mB0goXfHxGuSYrMtbWhuzvjiiK/Jivc4ljwWwsLwvNWmTv90xOkRnEW1RwFFkcEBVFERNOZZqgHU5JDH/1UJYWa2ucbQheQIHJQStF7dJck4qzNvMZskNofQ/EGU9tFCo4lIMmHLnHkTjK4awgDrK1kBlFnmmKjqEo5jPJJ40TVKWAltM4dwCFBY/S3GM722Vm+x5OjpE6q9kGiZUmggoBzVx3eNHR3CpFtDJr7aRdpGE4RHw6f1QhEB8gOLy3SJCZypzWBqUVyZFMEI9TIQnBq1iRksEStY4jYSPe+jEbxiYZxTOtndYwfojDGeHFN6zwaz/9Tbzp+TNMJ2NsPWW1n4FtGO1tsX/9BtuXRlRbYDx0eAA9DAHTLxDfEKqKC+uGT3zTKi9e6KGpCXbCeNSwv+eRJlb4qgZbJuM4zLN/Zdye4/C4oY2ik2fQLejlGeI9lpD4nUInDY0scYQugCZD0wcGiKxyfuUcF1ZXWe0UFBJQto6vwYKRGOKOMXeAB4LHWxf1hHXASqDBUuNjfdMKpRWNAiVRo7iByMewqd56Fq4qzWCgaNP8eu8JzqMdZJ7kbU7yhP523e4HglFtC3lgRmeuPa7mxrEc9vknQ3PR3px5f9vfWTCKDxvGwjz5T5gbxIunObzP7NoWzt+W4cwjnR6OdxBs7CBUm1lQQBabznRwbe40qn8oXGCeDqKLkJOjMRilMSnlb9CKLMsYFEM27Dq77LLLNif3liwiYLnGVZ9RbArrRR+NJVNCoTtkSkWDV+lUxAYRwZU+eYoT7S09eD/zG8d/Nv1zJAm3k78dbOOYTEp2d0dsbe1w6+YW43HFeFxz7epVLl58hasXP0Q1/jDMclIeRk1sJaKP/3hhgDXgDEav0e8PKYoclXhRxkTCTYgaotjGU9UTgh/PZj5j+yNknYxOt8A2juACRVEc87UucRwIBOq6RolQliV1E4NyNZBlJj43r5FgyUyWErnoaCqkgOvY/CoEDb6maSrq2uMsmFzRKTTaCBI8wYP3FmcbrLOzenPoovAuYBubHCyt9c4DNT2a2916sfmODs/WkWmMptNRREl0w5G5L44RJ+gxZsFr1HZy8xGPpBKceXcX+qpZH3bbl0hXAIUKEhfxqQ9Mhdh2su2OCwk9goSovRkavK1woSLYGu8apnUgVAE/bgjjiuCS10U0Ki/iUhhUniGZQUyGMQXGdNE6R1Q0WOXgjR9rcy4qEDSgVfQazyZSHvws/UHOr/ykZ/mklzd4w5mCYTal1/fYQqNoKMcjbly8ysUPjHj1KlwsYzDQ/SgoayJVoiig0x9H1YM+nF91rOYTaBrGkxHjSc14FJiMYDxJyxSmLvpgbFpakshJd41NPUVj2Vgd4J55hpeefYHJzVvkQdPXXZxtmNopTbCpqzYYBhg1JGRdpOgxWO3QyRu0lCAVXhXUoqgSJUdcsq1cqq/eRYkrG5OLWJ8mAggo5UEUYsAroRZmElgaPXsndAiYYFGJhBXLKSpSuBBw3sXMdi6gPYiPVKCgFBqFJWAJNA8bxGmSQT7zCDN731Oe7WQctxKOKTXtohe3NYpbY/WAtOMdPMbALIHP/JVLbvD29/S/GX94cT8WPCJh3vgE4sisLqEaQ7kXR2vdPnSHkPdAFcSsnvMSP+AxPwYMk9FoaDsLTU5Mw0ya0fI+emdXhysE9zxSKq5yjU1uPRbqAVSUfIxfbm7RaQYMGbAhq6zn66x2V1DoNEALkGg9rZYxyWssrYrPwr/D31X6d7II1I1lb3fCrZvbXL16g1devcitm9ts3trm+pVX2d/5GMHd4N4xHZ6oU3x89SG2922gnWBdyWjkmUxNyjQY63mbcTDTBpNl5LrAaI0xBqU1yijEaHSmyfKM/qBPvz8gy5dUitOIAGxtb+Odo2xqGh/JRhYYlSW2adDJw6qVIQNsU+OaBmUEkyskA5QjhOgg8c7jHDQWgvJkPnpvvI9eZRcszlucs0f73Tz4GsrGzS7yYfrno+a2ZpaTRJpcEI/ONN1eh65k5Fk3SlKeIE42813bSSWPrah5ENzcOJ47gtv1LPZ/wvxLiFH8kmKYhaQ2EfyC99cveJxbEyEa0t47fLA4N8U1I2wzxtdTfD2FqYVpDbs74DaZRxMbHF2c9ED3YqfY7UGvh+qv0Otv0OkO0aqDkjzZ5K3L6nibRaViKt/ESo9eqzD3Et4vBmtdPvsz3sg7fu3beXYoTDavUk12MdpgtVBOp4z2drny6pj3XoSLllni2APXw+3vTAY814Xnnk22jgtkCjoaBr1A5kZM94XNTcf2NkwShaJsoLIwDfFcbbfzOPxeLapyTHAl6/0unQsXuP7CS9S3RuS+oG+GuLKhGk0oJxPKZooFsqyP7vegmxO6OXqYYVQFjAkywNGhJmOqheBSnXUBrV00ip0F5wjO4r2jSY5K0WCUIiSt0aBC9CCHFDIRVPL4ClmAjneY9DRicJ/gvGCdp3HRMMZHr3/ULVZ4Y8g01BKPbR9Wd9u0ihsyr4oHEvKoBSpF61U+1JK29u8ibYHFv9tVrZHbnof5wHvRMIaDDcr8wuZ/h4VBdEr9Oz+lh3oK4y3Yuw71PqyegSwkOlMnjnAWy0wdr+HWpQM4fMqCqFDkUmBMRiBmLAx4tNIMhyt0sg7dUZ/18Rku1pe5xEWm7B7rNR0Ni2OHMbtM6FCF8+ja0DM9jI589xDaANGWLhFHQW0AUBtQevCfmhnFpPs/SQSgrhp2d/a5fnWTSxev8tGPfJTLFy+xv3UZ11yFB+Jwlzx6D5CmlyiYJwIxxNZ4inXTtlNIyjdZ4hcLKsvpdAqKTkGn00lpvAt0ppE08yhK0ekUdHsdzAkbG0s8PHYnoyPt0yp4GusxQM8oMmPwPuBsha1qjBY6/RxTaLyJ9IjgYk/uHVFpyoFzsf1zHpwD5yxOO5xzd+2Ejz2HGanGq7jEwEEwmaHX71EUPQb91UgjO0GcKMdYUtT9bBaZVi5trivceoph/nlbWzJbn/YTH2kUkLwNHiUpAIdo/Hrv8C6OeLy1hMZhmwpfT/DlPpRb0IwBD1qhewM6qz1KB26vgrDPfDyTQyjAdsH1IAxAhpA7Ml3QzTsokyEq4BzYpBZwvHVG0ErhtSa0HGMRYqCQ4X55bN1+xmd8ygt8xic8wwvnOnSlxpqYABZrsXXD3q0trl/Z4+KlwCUb/R5H4Q4DSZoaqt342LwH56OhWxnY3/S4ADt7sDuFykMdYjNv0108aELU48Lu1iY7t25w7pnzdIxm0Omx2luF2qBCBx90MmIbHCUOi3EOaSzkiT7jwNaWpsoIzZDgujgB5yUaUu3chgdJgV4hRH6w92BDTIPgNUlOScA5vJqXd1ACyqJ8HPCF1Ji19mDUYVW4IPF4wWOdwztH4yAKYmlqCRjx1K5h6izWPyQ3VeuDnuL2hb+bYdxGWrYIJLmNA5wHDm60OFqerzqwsGAUL8waLaxc+GwN4WQYqwXDuBEgFaytoKliT0LyfrezNiHMr1k/+CD1bqioaZ+6JkWUt7xxlzq44KLfVWtUUTAMQxDFeGfKtt9hyj5Hv6kngUBgyg7X6YQOw3JAP+ujsjirFb3Fc6eILBAkDl/hoVDpmZl8YlcewLrAaDzl2tWbfPhDr/DeX3w/H/nAe6gmlyHsHXGV93Xkh7wiQ6RiFMyD+NTCkqF1TpYdXIzJyEyWsh8asjwnL3KMiZ575xvKyRTrHS4Eggi9bo9hPSAzS47xacXdap4n9ptiPXnjKIzFVRVVWUbnla0wHU3IBafBWXdgks4tNMU+gLVA1eCaElvax9YZt/E7/S6srOb0hl2KIsNohcm6iFasr53h3NkLdDrdE72WEzOMRQJ6wVsTnUkhcUfavjN5DRKpfN67zT1FM8+vgKg47akSNSJJ2QM2HSvSJHA1zpY01ZSmKnGjdo5+H5pdcNsQdommmALVY7j6Jl5+6Tm2VgLXXtml3i8Tv9ARR/4VMIUwjWoQtsaQ08/XGfQib9kTqAMEF6I2sgoHnFuPCq0VXhusMbFjnhFDO8zNyrvtD5/wljP8us98iTc9N6SbNbhqGocWQdFUY8q9PTav7HLxw56L+w8eX++ALQdszRl2LsSr8wCSfB0hluiilsYR/sHHiq2dW9y8doU3DAr6QcjEUJgOVRkoa0s1aSinlomvmTDB0eA8FFOPkhoxGShNEENWGny1BrYPoggSvb9KdHRMeuKgJrRvhY/BRtbifMArH0fLyuOVIyxOqyiXBpQBkcTf9D5GFBM9by7E/GPREeBwwdLYJkYQY7CSYcSjRdO4hso1uIfVMc5aj/HC0o54D1Ap2lFwOyW00NzPmBP3emEO/X4Ue6E1isPi9zsgJONWJV3A9vy6pX0sGPbaxCgVk0X6iFZzwxoi1/oYjbd93AF/oSVOhTrnaKxFW4tohxKdVHEEk2d06NAZdSjqgjbW4XEiULLDJiN3FkOG0TmQVH5C6xxRhFZN5FB7385EzYcv85m/k7tmaJxnb3/C5cvX+eAHPsRHP/geqvEHOR4pzLZfuxcyWlY5dFAqR1QePe6uleXU5FmH4XCF1dVV+oMB3V5MSZ2ZDBEdPX7WxVlapbDOUk5LxuMxe6N9RpNRHCwHYbCywsbGGTpF5xjuc4nHgTbmZrHFroHKOrrOYZuGMlU3VzpM45AsJs51hybMWjnU1pFlLbgmUFPiq8enkdYB1nqwup4zXFul6PViv6c1eVHQ7fd47rnnePnFlxkOhid6LScafDfXEoboQZaZVu08MUeYG8Vt398GBSWJtbnKUzSMhcgVDt5hXY13Nc5WcfqgmeDLMb4cEaoRVCOoJuCSUTszx1o4dOZ4+cVzfPav+iSm5YRXL53j5o3r7GxvMdrbo5pOaeoa0QaVFUhWoHLDcE1xbk0zWFFYB3UTEBfSlHRL6Ti+bkkrgzcGmyXDOJXfYinfDavDnLe9YZ2Xz/U4O8jIjaeqIx/Nec9oZ4dbV/e4ecWztQU74eF8JBaYhDSKZe4Ntsy9w0/SAL4TnK2oRyOq/RGZ0uAsSsAFR2kttW9oxM/E8SyOkhLnPXldkDUe5QzaeXRToaopMp2gnKDyQDAetInSaS66SAVFEINXAesDDVWc3rKxlILITL5QSVJUEZ9UVTQ6DY4sHufndSHmH2vl2mIQWfAWH3xUvRYQ7xAXO03r6ln2rgeG0e1pjzCOjzKMmVGjbvMa3xNy+1e5ffWMIhEO/b64Xevtba+lLb8gUGTQ64FfBWWhGcLKmcgxLjpxMKAOGcb6eA23ihShnS67omYaSqhKfFliVCcme9EpiYYkxYo8pzAdirqHwuBOJDtm4onf4aGV7HKTm2iXUbgumTYopdHao53Ghag8oWb9QDSAVVI3Dkm0TacHqGhVgU4G3numZc3u3ogb1ze5dvUG1WST4zGKDXA2Has1bhcHBW0FNUSDuAd0EdUlLzoURYcQAuNRlGU0JqfX63Hm7BmeffZZev0eEKk11bSkLCvKckpZVok+6GmahqosmU4nTKa72Ha2FKEuzxK8p9vtHcO9LnHSUEDHZDjvmaQ2WyFkRqNN7Mudn4si1sTuRNISUragttnyHuo6qlU2FVClCb3w+IziDFgxsLGiWFnt0Rt2UXlOVTfY4MhyQ3/YZ319lXPnzrCy8pQaxkIyjA/NsCqYKTjN+6hwYL9oECu0kuQdjhQJxBGCjRGTrsY2JVU5wU7HuPEujHeh3I5eYd8yVe9thvWHPT7lV7yVd3z+52Ayxa1bN7h67QqXL1/m6tVr3Lxxg52dXUQptDH4EI2O7nDAxtkOeS9jWmpGE3B1oAyOgE+RmsdoGGtDMBlVZqIxIjA3O+/egGstvPH5DZ7d6NKVhpycXHcIBjIzxVvH1vURFz/iuXkL9pvYMT/wNRI9xTAPoIP43GuOJtufFhSiMC5Q708Yh0BdlXgamlAzdRanwXdApgbtchyOCkuFZWAht5rc66g76zx5WaPHU4wD4x1O13id4YPCekVQOcbkIBmNEhodsFbhggJrEZe8mAKiwox3pVVAJRHioCRNpfkDXjZBkuEZjQ3lHDo4PJ5GYCoeHyLv2TtHsHYhm+IDwqj2pLcbxq0xvCij1uJAdO09cJQ3+SiD+LYNwtFGcbuiNY5nbnxJtkoGahV6XWjWwFsoenHRJkVQtheWDmzS/R3TK78QFgjAlIaMEb7u46ZDCl2RSTGjlkXntmBQ5HlOd9JFk+Ee6k2+G4SYirghpiG+3fAONNziGgVdVt0qJovBYCJgrUU7PXMctGF1sabGRNFtuJ1OyiptXryTgneeyXjK7s6Izc0ddrf3CfdFLWp7NZ0+2z5ncYs1BoM341ygaSqsmxDChNgiLoYWBxa75BAEpXO6vT5aCSF4plOh2+3RH/Y4e+4ML770AsYYtre3uXnzBpcvX+LWrWvU1QjrplEqMg3lo7pAO7M4b5ldZdnbLWjsSektL/GoaGuFBzpK08k7MdlHOSWI0O8P6Pa6iG0I5ZQQwmzmZbEP1s1CM7zAViunRGWotko+ZvSB1T5srHboD3tknQwnwjQ4rIszH91Oh16vS6+b0+ucrILKCVIpkmG8SPNrZ4IP0CSYUylaIzq0Dy5EveBQ40OFdSXWTnH1BFuO8OUIN9mH6R7UO2D3OagpeX8wHcXZ8yu8+c0vsLG2Qlm9yM3r13nllVd49dWLXL58hVs3bxGCR5RgnaV2lrw7ZLg2QGUd9kYZGkU9bZgkPp0/rh4yFhLKZDFyODdzPWN7KDL+Duj3c156foMLG0OGnZxu0SHv9AkuDj58UzPZd+xuwW4ZecUP6ytpX8YmLa3X6zR6iRfR1TldZfBlQ9M0hKaC0FDZCXvllMYGghMaSipGTBkzocLhWPcdwmgF6GOkh3Ry2N+PHtOqgk5ByAq8zvGS41ROSIOboBWlV9QhJhjwgZj4IwXmRQ59wGvQykeZ7KS8EBI1pSYqSywGtyoEQtI59n7mIqgJlCI4BBcAF/WQHzaSQjI1p/4uGqGLPOPEiZ9TpsLtpzvSyA0HPh7i6hau54hz+MBBz3FarzPIc/A9CCuJbqE4oERxOJunPl4qRTvj0nZwUwKGCcIU1dQoZ9HOEUz0vSJRkskoQ5EXdFWPzHeo7ygr9rAwwArRKL5zT9owYp9dbGhQImRZFqf1bfRiSuLXa4l1NQSLQ+OwSeHY0ubB0yjmDOXjb0m8c+ztjdjd3Wc8qWJMgKykWUbLfAC0KJOpiRPAHVAdRBtCswXcZG6OCNqscebseQiaqrJMJlMm0zHWVgRqCIvaOwKiEZWjTYHJCrQxZFrR6/UwRqFVy9mOs5KNbRiP96ND5+pHqMpXiG6N++kHHbCHrcdU7czPEqcOuTIQAi54lCh88Cil6Hd7ZJ0Oa2fP0uv3GW9tsl9OITDT3m8jFWZDLweStQ7HCNdyHZ+AUZwBawWsDYVBv6DINaiAdQ3O1lgXZ0rzPCfLTHSYqpP1Zp+YYayVQqsYMRT7neiRaR/OXLlJUGqRZxynmYNzOOuwTU1VjWiqPdxkByabMN2EZid6hUPbMD98Y1nWJZt7W2xt3mLYLRgUPcL6GepJCbUjR7PS7eN9bMCsq6l9jSl69FZWQA3oFAalYDqF/f0G7wJe3Qdl8r4h6E4HKXJUZvBFHqd6mwp8Seyk7mzKrq/0OLM2YGN9nfUzF1gZDjB5gbMe1zTU0ykiAWPii7TLw/lnWppB6z/JmXcj7W+nFcNiwFp3QAfwTYMKDsEynu5yY7TJ2FfUeEpKxtSM8UyI5bROxfPNPue3C85NVzBNRSd4srqi6RTURYErOvi8hy8GSL5CCJ7aN9RKKL2m8ipmUpQM0QrlFWAjpSN4nIuBi8YHjEqJZgRqNfcBCQEJAfEeCQ5va1wTRfa0MXijsaKp0VEKL8RgWP0I709eZLNADh9aXn1gUSqxlZM64DAOh97ahd8WhefuhMQwORoL633rxJXb38eg/JwOERZl5NLGAVLawIPe7pZDfYKG8YT5BHtONHUyGnKaGMMQUiBnW+ZClONSUdu0l/XoVgPGbPIo7ePtyDEU2Dt4i+fwVExxeLQ2FEUnyUH56FhPI6M4M0gMlrZN5McGi58pGAcUMUHBwZC940PwDbdubjIcPoOSjPWNc2hlKKcXaCMkYj8WZxlCCCAKpQzKRFk0rRSjHYOtLLBFmxJIJI80EpUhYhDRaJ3RuAYfHH6m1yrkWU6Wxzm3EBwSPE1tCRp6vS6DQY/9vT22t7dwzjEej0ECe3u73Lx5map8ldgXPAiiotMxi6oscZwQiZQYosFYTRu6WcHK6iob585x/vk30B8MuOwd+7duIsQhW9uHt8Zxm6TKqAxtGsSF250ajxl9gbOrsDI0ZEYIrqZqaqbWU9c1TjQigjaaEDxlNUWrk73YE/QYSwy+k7CggrSQuGNhBjKk/3vvCd7jaourG+y0xE6nNJOtaBCXN8DeIgp6Hd+Iwfqa3fEOm1ubnF9fZ9DpMCh6rPVXqFZjZKbyQt1Msa7Geo31Gp0XdLoGrxRVreh1AkUWMMqhfTTWj89rrFB5gclztNH43MRp3rKCaYf4KI82jPNC8+z5AWdXu6yuDBgOV+n1utEzEaApp9TTCYQ4S2x5OBrFIhazxLVKy+1E3mn1HPfygn7RIfcW6xwmeCRYymqfLX+LHRomxG6npYm0tXATMHgypvTKhv6WUCsITUWV51RFhi16+O4A1ffolYIQsmgUS2ASNBUGQaOUjoF1KokSBlKmOo9xgUx7jA8zdkEThArBhphlTIIH34BraMopTTlGC3S7XZACJxobDDalMA+QMuA9XLkVhZmxEaJnIqR3PU3vi5oNfmcNcKoQM5/rgqd57oddmOu7E5LjVg6uOvDHYTt3ET7ROXw46MGOl6kiHeq2cffCxbaDgABWq2Ot21Pm71AaIs3qnFIGraKRJRJlt1SieimjyTsdup0O3apHfAOPj8QUU42YlIvu7pWmoYxcYq3J8hjc5X1AKZ2ys0li2QS8ddjG4lyDd1HX23mfDGih9v44PQ0H4L3j2rXrDIbP0NSWbrdPnheEcJY2BUHUfo2Ldw4fojErKurHKoHgHHu2SjEEe0BBnvejt1zivJlzAWPcnE+oQOn47LrdLt1OB2tt4gSPmI5HKOUpijWKomA3eKbTfabTfbY2b4AEnC1xbosHk5ObQxQo/YQsoyXuiRA8NvjZ2+YA7R1FnrO2vs75CxcYrK6wt7WFZDmiS7IQ404Mi/kABG1yyPsoKVEuxsjMQgYeMxSw2oGzZzP6gw6iobI1ZVkzqS21A/JOtCe1IYSQ1DZO9mJPzDBWPqCdB+/jJJjo2XRf7Esc3lqcr7G+ihSJakIox4TxGKYjKPfBjcGPiN3EyZhVkjr2yXjC3u4emTa4pmE0LhmPp2xv73Ht+k32dnfZH+/R2AoXGrJOl97qHrpYpWo6jCaGclrhXBkb0XCM7bgIojKU1mgtNFqlTHiG+Bgzorl2sHxEC889t8qbnhtwfkXR0TX1dA9bj2jqhs0bV9jfuUk5mUTns53zhB+1KxViKEnG3PPl4bGkHXgY+CTiaPBoCXTwFMGifSQqtB7vnMiJajPixQnIMCu3LgFjK5rRLo0tmWjN2GiqrKApenTONgy0QULASY1VOV4KvMoISkfeb3CElj8uDUp5jAgZYSaI0BqZNsTAvWhweFSIknK+KanKfSajnRjAJUOM6iIqx+gc8SpqJtsYXyYPaRhnmZl5Lm/39CaP8R3SNIf5Zsx4ycwH0u2/2YYLXtuZeXrAfk4ekLAw8l4YgIfb/I3x2P7AxcyuPMmwh+gwPnB+WdwwHkNrnByfcbE4eT8X7+rRY51+sUq/u0rWGWDyDkYptFaYPCpm6CyPiUCia/bYrml+Zfc2igE8jiZYPAptOmitCUFj8oaY6TzpZoSAdx7bxJlC5yw2fTofPcVl45GqVbM45jsKjlc+9lEa1+XyxVtsb+3hQ5opIH4G7/GuJXTE+qy0QilNlmUYo1E6o9NbxzVdCDVF0eWZC29gfX3IeFwymeyzs7PDeLwdFZTQtNkgRWnyvEfR6Ub5ONtQV2Nss4NgqcoRnU6Puq4RyfBun7q+ydwf+LDuDCGkTK9LnE6UKanHIlySpBUCWgsmyygGQ7rrF5hKwXha4uqYEMSL4E0HX/SxgwGh16Mqx7iwFdW67HHHIdwfMoH1ddi4MMQoQzmpsNMp1aihrgJOQaYyVAAlhhAU1gbqE1bLODlVihBQ3oGLXkwvgZA0TD0B5yqcnVLX+9hyH6bbUUx/ugXNPoRWPeKE31YNZligjKEuK0b7ozht1yTO2d6Im5tbXL58nevXb3Dr1i2qpsLjKfo9hmu7dAbriBniQpfpNHoTQoDgJMlsHQcEUTHVY6YVpU68TaXjTczICwc7jcFqn4974zleutDnzCBg/JjxXoxSnkymbN28xv7uJuXEUZVRqiUHBtyu3/HgVxy9XR3mzLyWe3waKRXWxqlcraN3tsBRYDHUM4ajJ97TORTnzJBnBqsM85wb21e53owZAj2EzNfYkaWZwITAWGCscuqsy0oQev0BojVIhZeCYBowBTYorFI0wdEER0h8SyUBIxqjVEya0GYmFom0HRc5Szp4VLBRa7mpmU7HjPd3yXSgyD0qc4jpkmmP9oqQMh5LA/Kwam15bEZCO8UMc7sxEA2IQ+9B6xWe28OtN3luGPvkyZ0n/ThkXIeDB4wD7uT9TVPThz3JR6Y35aDeweyakqRcEJUM4+T1PvII0Bh1rIZx+1ZntO9RQZ91+maDYX+DXn8dU/TQWY7ogNKCzgxBKUxLuZpd3XGinZS9d/vs8dTB4VEoU5BlHZCCHEeWg8ni8w0u4JzH2hAN4yZKFzZNg01JBnLlkOqk5vsD169cZH9asHtrFzuZMq+lrXHc3qsCyRIXOFIp8qKg6ORkOmdtfYMiz+h1Cwb9HivDFYw2VNWU6XSX0fgy3l8jGrRtkiYIXlHaDuWkx5yANgV2CVjKcpuy7CHSx5gO3j0K6W0RsX47+/hUCJZ4MBzZaoWAtzFdvUjAZIbOcI3+2eeoQpdRGGG1RSkVZSa7Q0J3gO30sEVBM9qGJo8JBtjifnMhHBcUMMhh/ZzhzIUNfO2xVQ2NxY0DrgTVAdOL0qlG5SgyvNc4f7K8n5MzjH2DsyW+mWKdpfFRlN43NaGpCNV+zCZVbSdt4dGCMfwY0RN65zZY39hguDIEEfZHI6qyYn9/xGg8ZTKtmJYNZWWp6kBtBS+aUIFMGpowwRQx1WZmclZWOtROqP1cAeo4EBCMzuh1OkyNwTY1lCNgRJxCu73sOt0+K8MeGQ12f5OtySY7CpSKnopyPIrpI23MgNvUUSzomXS0zUe4XkskvQRi5w7zTv7hJvxOFi5ETrttHEynqLLENDXG1ijiNe8QjRXBs0HDcxfO8/aXXmT3xnluXbtCNdojTMbUbop1nmnaZx9oEJwa4Xb7hNGZmLwj6ZTqrIvOcqzWOIHKO6bO4l2ccYmcfYPWBuUV4iOHX2sdsz8CEnzUtvUNoS7x9RRrS7yr4syFq5BGY8QhTRkpCE6BVdCoh+5fM90asy1H4nZiw6KJeviVUMkgluRVjtzjsOCTlNsPexuid3fmYZ5tPreiZ7EOt13F3Bi/7RRBRXEPL3PD+Ih3OhCojaI+RqLes4BB6KLp0mPAGYZynpX+eQbDDbq9VXTRQWUmcqU1kYcnQlZ0KDpdjMqOvUmNShJwPxUm4KiDI4jB5AM63QGZswRxZHlAZ22K2oC3gZgM0mOtxVqLaRqstYQQKFSD7J1ch9g0NVVdgzZIpx958SrRaRYWURqT5bMUy8YYipRIo1MYik7GcNBlfXVIrxvpI+VkgnMV48kW3l9nrhDfeupaZYt2fTtXsBiZEVuTEHKapgOzCIdHhcPXe5TNcUjTLfE4oUXo5BnDQZ/1jTV2KxhOFFOzQdmdokMM2lRFl4npMjUFlRhsgBCGMDUwCswDaR8PBOgIrKzAmWcGXHjxBaT29IsuHZOh/TbjiSNfGdI/+yzPnnuWsxsXWFvfYGUwIMtOznSFEzOMPd43NM0EW+5RVVN8XcF0EhUkml1wu8QX+0nlOUtYzVl75jxnz59j/cwZMlHs7ewy2h8xGU+ZTqfUdYPzAZEMY7p4yfEqenCtFWgcQVkyZel2evTNkMYJU+tjqsXjQIidhtYZg26f/WwPW9dQ7xEb06OmQoS804tcuWqb0eY21WRMWVr6gy5rGxu4piG4GF9UVdC4mHlmox+TA+74h+9XHdGf0RApFS3ho8v9pCN5/Ih6n1WcERiN8OUUXVdkzqKJTUdrrF0HzvmG1UGXT/iEt6Le8gKT3S1e+eUP8qH3vY9b+xUT4pBll7a7Cxhf4/c2CXs3CeIJOkd0js67mLyg1kJQ0DhLWTc0HrRqPVMZYnJQiqANeZaTk5MFjQkK5T1iLWIrqEpCNcHbCoWLkf+hQbkpTMs4v+AF4xXiDMpnSHi4qPTWMD6gRz7jL4TFjwXMV6ikWqFUpFpFJZrkMZ4dWOaHbQ92wLZtDdJIlls0cmPGtUQpCK2y+L3fS0n7xmuSmYd+RlZOMRTtDVZG7mG8PxjeyBoaQ86AQq1QmA26vQ1W1s4zGK5R9IfovIBM4SXgVUCZWIZ5t0On0ydXnbmi/zFC32cYnCdQE/A6I+sM6fRXo162cujcoYwnWIezPokzSAwI9q3HuI4zOc6Thxo54SQf2uT0V7toZciygizL0xlbEn3kR+dZFg1jpVA6fs/yjE4no9PNWVsbcO7MGkWeMdrb41ZT0TQTquoWHKkS4jlqxu/oq0xis4+EVieo1RDaIgafLvG0QEFUK+l2WF9b4ez5s+z5HitVj3FRUQ4cKuuwsbGB6Q3YdhpvoawcYVpD3U3TuSWwDY8xS2YO9Duwsi5sPHuGZ196kcwJG4NVBnmXzMJoVDE4+wZWn3mJM8+/xNnzz7OyfobBYPi0poQO1JNb1HZK2L8G1R64EnybZOMJG8MtFKhhzsrqKoPhkF6vTxaEqqhoGodzQlZMCSiaxlE3Pi4uidDXDqoKmZTorMJkE/KixGRTKhsYlw3NsU1PeSajMd2yIddx6m7SKaKslD2641Mm55lzK5xf7VLYbeqqZlrWTEtHp98n7/QwuaeYTCl6Db2Bo19C0QXTBbXDgeRkD4OG+LpZ5l7jxaC104TaN0zKkmk5QaYTMu8ZaMNQKwaJe93CAa/6hvd89AM8s97jmY1VVjoZw2GXjTOrIJayqilqSx2i57ztzvRoG/noB1H9FSbG4Do98pU19HBIVhgKo7DO4xqHQYHKCDrDuxzb5FgxOGWojaXIHEY0mRe08yhXI7ZE2wrVyt00JbmAChmZD9A0hLohTGr81BGsiimR/MMZUEUWO9Tb7OIU9Xab+gSwuEaIAUyiUqJgnQILiQbfzP97wLo+yryVRKWIvyQ/+tyTfSD67uD+RwXvRbs3JI+xP0ClmF9Ke2wwWg5woB8VL/XeiChDlvUx2QBdrJIVK3QH63T6Q/JuF5VFTrGTqG6gjEKU0Ov1WVlb5WzvHK/uDqnYOaarEhTFA+kKe9GovCDvDegMVvCJ2K4yh2iLtw7VOIINBAvK+NkgD22QpkE5j6k6J2oYd3urPPPMBUQMRmfkRZcsy8GHGAzYWGzTQGAeTEqkCWmtZ8GPWZ5RFAWdbg+jI1d4d2+Xm7euYu3Nu5Tb4wx+W6TB1Jw+N8USB9BqNi7AiLDS77G+usLq6gqD4ZBsF0JeoXpdCp1T9FdZO38e3e0znVhkUuNHNTQl6BJkDyTKjEZz9fFEABUK1tbg7IWC9bMXGKyfJbOgGmG0tQ+SEXB0e0M2Ns5z9uwznD33LP3hKkWnh9ZPqcc47F5OkfE3OJ7sQScAA7pf0E/pNE2W0zUFIppOd8CkV9JYMOYydWWZTEpG45KyrpNxbKPcDoGAIYhBqS4iHcoqSqB5d0zR4MEx2dpCb5xjo5+RdTvIxjphfwTbu0QvxKKZoBie3eDjXt7g5fMDwk5B02SozoAiFwZnnmHjwvM4ZyltYFxaztZ7qI6nbmAyhak/HgPWE+cG2nf7FAyJjsTEVexPxwwnY4pqSlcLZ/o9znS6rFdjehxMkb0N/NutG1x+94/wqatDPunlF+h3Cy684SznL6xhq4qtW1uYq5vULnCDSKsYec/WziZhZ5NaBGMMZ86ss37hPEWvi+kUZAjdAI0YnO7Q+JyyaSilpvEZNmRYY2l0jRaFRjDBY7AY31CEhiw4rK2pqxF5EJTPKXxAVRP8/ojxjR3GW3vYJuC9ilJUD4FOvmAYH8BBusLdn/tcslEkEaiT3KEPMbDwTvzgmYkbouKBc56Q9D5FWv3kNsveArVDWhZpmEsSp0yDrUlNgGAlyujBnAOdaBsz/nOIgWTHiRdeflOU99JdlO6C7iGmR9YZkHe66CJHGTPTvlQS0EajjGZI9G6/9MzL3Bzf4JL9CDVT/D0l1u4GQbFCj5VkGN/Pm6wQKTBFl2IwoBgOo6IDDqUtiMWrBsQSlCOokBLbeFAqzdAIQVlEGzgxw1hx/pk38Na3vDnqpqLI8y7GZDS1pa4qqsmU6XRCXbVe7Eh1ChBjPzJDCBYlnrKTU5UlNZ7NzU0uX7rE9tYr3NkAbakTjwuntRVe4iiYXGPLg+9toRQba2ucP3eO1dU1ik4PF0ZMygYbCorBKsO1sww3ziFFFx1GuLJttzz4AlwBvks0jPvMAzmB+37HHwxCTCy6ccbw7HPPsrr+LKYYIOJBTyitYmu/Yn+vZj0Y8v4Kw7WzrJ89T97tx3D3Y3RAHIWTM7ubGzx6+NYJo4Bi0GPQ7WJQ1NMaG2r2dvfY29tnb2/Mq69c4n3v/2Xe+773sb2zy2QSOdM+ZbcLwaWK1j4oHYMpQiuudFwVKxC2bzLZfZa13hm63R6d1RWmZ9ZhtAPNhJmou2RkK0Pe9PJZXj5bcKavmNZdpnYVKQJWDMVwHdXpEZoaU3Qoen1WNhrQY/Z2YFoer1c3cKprAgAT27Czv0exs42UY1YyTeYta0XGG7ThJWdnbO72qe4AP2Ut9eY2Upc8f26NjTNDBt0Ovd4QJYGt7T22xjU7zAXX2zmTOgR00yA3b9JM9uj2CopBl26vy7Dbx2c9KueY0iDOYH2GkS5egauhEU+DgHhU8GRYcixBWyTEdOnBWZRoOpmilwmhmWL3tsl2NjGjETXhDgz1+0M3O5hkZv7/ObM4HOkNk5mZ2hqXEBI9IakAtBrChGj8KR15nyIcNLajlWutA9vgnEspiKOur2rTNc+M4wXucXudM8N4gRHt47AXH45QpUjptgMQJFJKjrHBPnvheWKgV0FQBUEKvOqgsi4m76BMjhhD0NFkDRLQifNqTJzef+GFF5iWU/pX+9yor3KLKzTsPtT1GIYMOY/BUFMR7ku3JiarUHkH04mLcw4VXBTpF4dXNSI1XkXj2HuPcgHRGi9pIsMpTJYvDHCOGzF72KDfZX9/grUOrYQiyxEfcE0kjjhnqeuKqqpomhrnHMEnOTqjcS4qiue5pt8r8K7hxo0bXL16Eed27nL+p6GFXOJJIcs1tnIH6n4/yziztsaZ9XW63S5Ka3wQahsIOqPoDugMVil6A7zOCaqk8RBsgMbHTsgaUD1QZ6IkFV3mOtithlureFKn71EfZz7T8GCOz5AO3e/3WF17BqTD1s6Uam/MzpUbfPTSDT5yZURZec5OKpwYsk6P/soqJutQW/fIM9n3wgn6o09JeFX7bANzHbK2Hz+jWVtbZb3o4qcVN65c4/rV67zvAx/k1VcvcePGTS6+eokrl16hrJ70/QTYvYbbfgF57jzdTofVXoVb26CeWNjqRuM4VJj1nLe/pceveLHLuqmQxtHr9yl6XaomUDtPMJrNvRGumjAtpwiBPM/o9XJsXVOOoaeitu1ppD2cBEpnubm9RXXxVXZHO5zPM1aKjBUlfOKZDfqjES/WNR+0lleIKqWB2DS8CnT3p1wZT1m9fJ2z/Yxnzg4pVEZhhPMCe2Huce6lfafEdVdcwO6VDPdKzqldnlsxnHvhAllfmODZc5amFMomQ3UMnV6PWhSVF6rgqHyDDw2Zb2ioCdoSVI3yDVmu6Pc7rKz2GWpF6Wr8eJcVX7MicCs8WuhFJztodC6ax9F8PBh4N/slcS5EJEZY+6QV6yMFpJpOor520nTOigzd6ZFlOZI0fENKsRnP4BFnsW4KdRNFuUOMC8izlDQhKca0VzGTaRNJXmSV1HNiMCMqJdGQaD4rRVIjSEQNn1LteiE3x+vJKHpDAjF1eCCLxrEqUKaDMgVisuQxjuoPiEfrOBgoVAE9QZI+bq/Tg48EdpvNOJC6i3UpZNErkxTI43xYjzM8S5cee+wxZo/7E3RUoDPEZMxymhOQ0EqdBbwyiCi8bvDaolyUplLWRR+DFrT3FGUH4eF48PdGYDre5+qVy1x69TJl2XDhwnNsbJylaRrKsmQ8GjPe36csS+qmwbqG4KLOchuEVzcl1lYILtIo6pIrl6+wu3vpPsrr+LSml3htQeuDczQGONPtcXZ9nZXBAC0xq6L1ICbHqA55t4vJMnwI1E1NXVc05RRGE9ibwKQE52OisN4bQM6CVOBrqMoYdCQadBGN5nocP6UAMRAm4PcgXCGSBe8fpYNAjkiPa9d3uXh1l5uXr3L5Q5e49Mom1/csRuDMtS1eGk+pvEeMAWMIDtwJWyUnS9R43Fh05rQ6R4Z5ts2CaJGkGTl9puDMyirDrKAeT7n0yiV+7md/nnf/2I9z8dKrVNMJ7rioEMeB5hZhezMKxGcFvU6Xejhk+3wgFJ2Yds9VnH/R8GlvyXn7Wc/QlIhz5P0VdKeHLmuYllTlmP3xLr6a4KopPjiUjsZHt9vQ7wdWMsjt6dUdPm7UznFzf5ednesM7ZS6FJ5VmjP9HhfWBjzXP8vbA/zyzU1+djzmA8Tkr2VaLgE3PZjKc6GqeFPZcK6fgfd0lbDqAhukFJjMxZiuEWkZrXGqPZzfs/T3d+lnOZlvcHXG/lTImwxRXaQ7jM8lBBrno+KLb3C+xPsSpWqUlGShiglBco0pCpTErJKUFV3vKZRi7ByWB8+X1aKTJS9e0g4+SKCYN+bh0BJ/jBxk5z3eNtimoq4mVOWYan+PerSPNkLeMeS+T2YchekjIUPICBI1WKOiRQCxNKEm+BqFw2ghl0DHRG+uVyHJKYa0EI1iBaIWDON0efiAD8mLPMviRzKMA+ITD9kLhTneqfCsM8QjeIk0LVQBOkd0gSRvsdIGlGDwIGGmVGKMIcsyOkWHXm+A1pobN65ycbvDlKO1gBWGAWdY4wIiBUE0PsSkzAU5K7qP9TVbfhPLPnczrlsIBmM66MwgRiFGUMm41dqgtcIrhQh4JXgRglExA7fReEUcnIRA0esgSp+QY9Uz2t/m8sWLvPrh92LrKdPRmGpa4n2griomkynj8YiqrqJSho/eYogBedpomqZDY0sIFiVQTcfcuH4ZZ7dO4qKXeJ0gHJK36gNnBkPWV1YYdLuIKGrr8WhM3iHTPbIipilvrKW0jqossdMy2gmTSTR+XRN5DSsb0VmsHfgKdrZhZw/yHgzW4iz4uILaJhkhD24SpXXHBuxluM82AaCysLtv2dyasjO+xdUbm1x55SZbmxU2vd9NgBv/b3t/HmhJdpV3or+1d4znnDvlWJVVJZWkkoQAMYPBA/Bs3NBgY4wHBtMGv7b9PLSf3Xa/9kDT1mOw/V7zjN3Q3Tb0czO63Z7w0GCMMZYAMxiBACGpqlSqysqqyunOZ4ppD/3Hjrj35K2blTcz762c4pOiTt44Ma4Tsffaa3/rW7tzpmVN43ygjGmFF8GdMB//wXWMhXD1nfL9wRL2mn1HuRPO7da3+0VpxFKakirF9voGlzdf4QM/87N8/OMfC1O49x1msPMqu5s7YSToLFqBEh/0U7MMiXPW1iLOrUWsjhpiwCmP10HTtJxP2N5YZzreZj4bIzgGaYRSQmUqGlOjE83ymufCGcv2ZbhoH41YRuVhowlyZ7uAweOdYTgZ8/h0zEgJwzQidpBFEW9xsOssEzwFYY5kHbhCcHZfmztOzytOE3ShFfAWhDUdcXYwYpBGIJZr0ynDoubZ9joiAsXS7swxso6VFHxCbBJylSE6D85RPMCpjNppxCZgQJkKZRowM4wZY0xJYWtUZIm3h0xUitQRSkY4O2VuSzba676zCXZIOm7tLTjKHZ/XEzjDzjtM09DUDcW8YDabMZ2OmU62qeZjIlORuIbRMGWgB4ycZeghw4JL8ETYJswKetUWvPEWH2usSlESoaOYNNYkOji1ThRWHKbxWO/2eM1aq0C3aDnGSHDgvJOg+GC75kTwOIKumEVsjcKhdUKmDSK3Vmo4KlzU1rwTDSpuI68JKoohivcoIqLCtQZxD0ErRaSD5jkiDCVnZXmZ5eEyo+1lJmy0NRwhJiIlIyEnZ5U1Oc9SfhqfRNhI4ZzFGQvGQm2oXEXJlKPOLyhylrMBw0FClgpZAk4rPIISjXgJWuyt8gqRx+0xZ4Q4SVBa4/GkZR745yeE8fY6k3FFU1wEanbWFc56lIpwzlHVJVVZ7GnH7km44bGisI3GugZjKrwzONswn06YzS4f2V49ehyG+bS6Ifd4SSlWh0MGWUKsNU4UvnWK0wE4GYCOaazDFSWzsmI+m+HqEnwDkQPdAEUQRmgICXm712C2DuUOmBmoEcRnQQZgW0lPacMd4kAb0EmINtuoLcZ262e9dvDRF3Z4df1XmRUNZWk5GIP0gBFBohgvYL1BuZC6r+S4WtnD8eA5xp1D3PHFM/brDi86w579GqoN+zSYBSpwFGnyOCYCtjY3ufjRl3nppU/cp04xgIfqOsXGFuOlJTLlEK3a+I+DNEEPMpaXY5ZHisGwRozHi8GLxhrDbHeLzSuX2N6cMJ5Y0gROnU3I8oSiKmlqR56kDFdSHrswpqo843W4/gjkatQIW4TSmymhDYiAJ4DYQ249+bwh18La2ik+KV/GlBXz+YyLszEv4BkTJpVqgrOZAufaY1wQ4UK6zBMrZ3hi7RynlwYsDRRbW68RPfscaWPZJrC5FNBMHaWfUuo5RkUonZElQ0TaCoipR6cRhohZk1I1iriakpQN2kyg3qIqC8qqprYGmwyZJkssNQkDvUrpLIaKDQJX+k5nBtKOv/sGLmGnDOG9x7qQuNqYhqacM59M2dneZXtri8nWJvXOOn6+g840p0YpabLEyDtWRDFESLzB+wTnNGXlcbXHao2PY3QUk2mNjxMUEUpFJJEijjrKhsKgwATunGrLVUeRoKPWMW6pFqrlGzuvwvvVRZnbilOYCm8qlHiyyJNHZl++7RjgVBauR+mgzhDFrQpFjNLxXhlo1RZQ0XsllhVaFIIiUoLECYM8Z2k0Ylkts+tGjClISDjDWVY4zZAVBtEq+dIK8fISNlYYTesYG6rplFm9zZyidaqPdp8xA5YHA0aDmCwNwSkfCb5N9vROwCtEorYmeSgx6Lzf0xGOoggE0iwL250Qitkm3m/T0QCdfY2djSjM0EiEc02Ipt0wjdvawasgNVdo5rXGNBVNVVLOtvFu58Su+dFBSui8F5PDHh34hVkSAUZRzPJoQB7HaKXwKEQ0OslJBzGNH+BUHKLITcV4PmM+n+OrNu8rITjGbgZ2DDMDxQZsPw9+mz0buy2orhIcrpTQI0q7xNDEEGvI2qn4agjNNvjDJAlvRFlDuf7GvY5XGok0jiClCkHO8WAE/bhxcq3MIfIid3SMLsIbs+/4hrq7YUna7zV7/DWc3897swv7dSK07XrrDJU31BjmVcFkNsWY+1RBo4OfY3a3KesLLJ0eMlzKwXkmWmNURJxmkCgmCFMihomQSIX3jqacMt1eZ/vKmPVrju3d8DyL1AyXG8rC09RgsoomsYjyrK7CqW1Ybx7+PGajFEUc4bRCjKNkv6jHJrBCaBoS61na2iZLZkgcUcWCbyk7tQ+P3FWCg7xHsQBe9p7HyzGPV3Me297k6eVTvPNtT5KfPsMTb93EbGyyMTOMm/BYY2BnF67iGEsNqkbnc5Yyz/KpjHi4ilobsuMGDHc9O40hqiv0dEw020Fmu0xmNXUFbjrGqSl2aYCxKU2yhgzBTD3ebKPv4tdNo0UO0+vR8YeNMdRt1K0oZsxnU3Z2dtja3Ga8vk597TrsbEI1BW3J3/4W3vnEWdZWh6TDCJ16nJ9TlBXeRzSNYjy2TCaGBo2LYlSaorOcOMvI4pQszUIVQxUcLY/QSEiQjYwP0VYV6BFaXMtztkGeyzm8tbgmSIqJhGp62lu0tyhv0L4hixQrgwy9HMq1H9fsio8SkLa6pdb4KMLpCNEKqzvJsBBZldZB1hIiyN5LcGqdw1qD95APhpxKz1IUczzCkCGnOMsSa+R6iWQwIspyJErDdGnL7dZKAp/bExKOj9ywC5mkrGQRg9gTU6F9EaZlJcL7BC8RWilsFKG0xzpPyCwN9hfVyvSJoOLoRLPRvTuo7FOB38LbIZ6E/dRZz76CxJ6cSVjnQ8GcelazW5c4s8V9q8r0ICBNUMkyUXQGWzhseY07n9t6gLHASdPAKI4YpAmxCsQkrzU6SlBa4bXgrcYSkpHrsqQsylZq0EIEcSo0dgaT16C8BqoAu9k6xYvwHK6bvdDmNwqa7gK7ZL3jgfOexjbM5mO2t67hnWI2KTG15bd+9mce23kO4uQc45C3cXRvqosEd05slyjXJUAmB77r1nXtk1Ztso0H0+zLNNqFY3dc43Zd0zRMbcXcNZTO0NjX1yO/exx3Q25gtoNpLPnyCmujlFRr8jSmsGAlotbBkV1yQXA+0gW22Kac7jLd3GbnimP9GmyUkE8hjqGuPVUVTFcknjRt0ATH+cwALu4+/FxjI0KVZegkBmNwhG5wG7hMGGut0Rbfdo64LJESVC6cW1JEcUwyM6wVlped5xJwkcDd7ZareH7TNwzqHd6xMeELU+Hdn/I0S5/2SbxHpuy+dpmdVzaYbjXMi9AFvAxseRhaWJsZ1qY7rPlTnBkqVs+tsNvkjMqC68ZipzP89g56sgtFjbLhvHpek27MSXEoleOyIUiMixKiXUPejO/42U+6iHEXSOjg2zLa1mGcwTYl5XSX8e4OO9sb7GxtsHV9nfL6dbh6BbZfA7MT3tvRKmujZ/j0d7+D0Shm7gomTcG4KpjXBms0ZSlsblRsbFRUjeBUhM4GZMtLjJaWWBkNiZZApQkJHh2FCLBWHmlLb4eEO0GJQ3BB0aKusHWNaZpQqbM2iA0qBToSEgWZ9qTKk2jHKE04t6TJVxMSrY6PdqRDxDjwpVrnWGu8aqvxSafUEfQ/glOs2si4CxVHjaGpa5qmIc0GnFo9T9N4IjMkJWVZVsnjIVGeo7MEF2mcC8+6bQVBxIb+9Ebt5qNAyCVmKVXk2hC5OcooQj2/NOhCo7FKYXWEIQTcvTU4Iyhrca3qh0hIJLxZSe7jwcFfzhN4k5YwNenYd3I73t7BYuJtNMdX2Fqxn6Lb43ahTuU8+Z73MozPwDxh9+oO1y/XGNMVBnt0EOVg2vz/FBilMWkUofGI82il0XECGqxYjBeMg9pYyqqmrEqsMeA9sRZGIoybCbZ8GdwrQVr3tsj7i5kjb+wIR63PfCftovOOui6Y7G6xfjVhPqvYvLpNWVTwNX/4Do54NJxsxPiw5OfF4FLXjnQ84az9dxfV7SLFnYPcKUx0x+iiwo4wDaeb0Ho3BI+mbD87/nFXbs2Gv+3AMK4KCmeQJCJfGqJ1FCoz3bdwMN/Ejsd4hDTPMYMEU0bYCmqvqBxslI61SnNupJAoVJCaT3aZjkvGO7BewmuAqmHyKqxstk26QJpBnsEghSyCSIWf5mF3jIki/GgIqyNcWWBdGCdvE2zVCZuM2B+jGcAUHmUso5HweKzIXcRqUbNGoGQ8x35iWyd8UwEfwnLpykUen17j7FrGuTMx573lzJkVRlmDHRvicUVRGQaEV2HoYTirSLe2WFrd4czalLRu2NnexWxex29vwe4YihLTqk3kgMIzLBvyGtQggXSAS2NcFuHtDLUzJ/J35tJlXdJZqMmM86FyWdPxh4uC2XzGZDJmd3eb3e0ttjevM9+8jl1fh43rML4G/irhKUvAJwyHCU8+dprBMGG73EXNwYjDimB1SNBCCZVxTOcGX5UQ18zmDcWswqxU0DTEEeSZRnwDItimoZnPqZu6rWwnJFFEojWRs0TW4H2Dkwa0CZQVFEmkSCJNnijyWMjjkHA3GmScPr1MUu0ca0UmlbQcYy2gdUg8iTqnWPAqFC/ZTwxkL6JqrKGqKqqypCxLprMpxll0mjAcrOFnKVolZNkQiTS1gLcuFAsShfMtZdwB1odqdN4FFY8jO8Y+zHDNd6gmGxS5IPUslECXAbFeJtIeFQWHX+1FvhWqlZ9TTvbis0ovcuXeLHSRsoWpRuBGx7jr5LoOrfsO7jyl9RHHQFh9+2O8+53vIqpyptcKfDxhR45vRuZBQj7QzCqLs5ALDLOUWAlNWTKfTPCjglLVzMua6bxiasDpmLqqKYuSsqhCpVwTFH+KaoabbILb4aR79kPl54+IpqmYTna4dvkVZpNNrr52nRc/fonJ7pRv/xt/6/gu8gBOzjE+2H4mBEWIxTaji+TGhGFQ2w/sSeZ16KpWLtIhunN0peSFvY55b13T7rfoGFfsO9SxZTwvKJwjHQ44ff4saZ63XJbjwglEC+pN3OZ1qnmB9Us43xGpwbmIovZsTx27ucKEMSVVVTKd7DKbWGYlbLAvHTZpIG6C87sELE1hKYMyD85xUwfnaszDze5SUUy0soKcOoOfjmnGFQXQ5ZNH7IuaDAnO8ZyWU9yA2TYMFOTpMqtxzvmm5BwVQxzPwp6OcQcLXHeW67tT2J2SvAzvyoSvePsF3nXhKc6/bcS7xlOevnyFqzu7TMqK0jiiuUdd2gReIWJIVMfoi5sk1zZI5+vE1ZTa+z2neEh4xZaNZWShiWKaQYYVTWMVTbWKmW5hmlvzwg5D2uoYB3UGR2McxtRUsynj3V22trZY31xne3uL2XiHcncbt7MJOxuwuw3VdjuF1zkRocxgkscsLw0YDTN85ECHRIxB3eAkoqg883LG1iRiVs3w4wJmc+ysZjqe0UxnmGJGpB1ZKqRxBM5RFgWT8S5FMQ/cYVEsDwasDAYMk4hcC3EiaNHEOiKJI7I4Jos1WazJE80gi8iSiDRNyLOE4WCAm+2SJsfnGOskbyPGbdRYC15LeIaUYz+6IK3cnAq0Fe+oq4b5rGA2nTKbT9ndHTOZTamsQeUZeTJARQk6Tqmbmsl0SmMbkjwnERUUJJS0ba/DGofxppVJOrpjXPpttjdeYzmDyM2ohktonRFHy4yyBsl8GInHcUtf9KAklAcHvArKIQJ7695cdFPJ3T0vOsbd94tTo2rhu67j6nFbEIjPZTzx2HnOrS7TbFnKpoBiirYzHkWbJmmESS3VHJZixTDPEe+ZjcewsQHpElOTsrVTs7VTMLU5EitMYyhnBfOixDUG6hq3s0m5fR12uloTJ4u7CTOW84Lx1gZ1sYMxFc9+9CUuvrJNbU42eHlyjnFHeeg4vssEr6uLIncD7S5i3EmrdVjkAxv2qRl24TvbftfZ6OCxOzpF138crLlReKqyofFwZmWFM3qFs+fPMR3vHKclTgC7+M1LTLfeTXl2BWcNCosWhZLA5yxrS2MFpWPiNEZFGlq6nuVGlenFKOYU2LFwbtbOruRhgxWCAzh+s2/1TYTXoTwzy2v406eYV9eQytEQ7KUJgwcINkvYH8N17kLiIKkKliRlmZghBoVjGXiewDe+WVNUe/h44fmN13ZYcgPOPHGKtbWzaJ1wbnWX6WzCfD4PUlHesVLX5LvbTCcNxeVX2d3dIqEmwzEEzmpobHDIa2DmZpjJBnWiMYmHRHCRg1HO8NRZsnrpjuwWS+CzBoWJink5ZzafsbW5yfr6da5dvcLm1ctU25tQzqGcwXwK8wnUU8KT5UFa6oBKkOGAfJQxGKQsDXMkgjSJGeWDoMOtI+aNp6nHTOea+bxhurEL0xk4i48U5WzARjlC+xKaKbFWeNMwn0zY2dxkPp1ivUXFEefOnEafO8fKmVXOnllmeZiSxJo0jkjTmCyJyVoHOW0d5EgLkVLBFWpq1HQSuEjHBLnBMQ6LtNxfsG18UlC+dYpRrbxc4OrWxlI2hnlZU9YN1gs6SUnzjDTKQqlxUZSTCfPxmHlZknpP6ixJmhDHEcpZvLUYazBYDG1BoyPCMGMyvcbOdYj9nHo0Io4GpOkqMrAwdNAM8VmKTTQmUqGgh+/SNYOU22I57pPDokO7iM45PgyH8QUXM8B73Al0BJFYxLa0pmKMLcY417YVjxJa1ZwkFgyeYRoxHOYogcnODjPROJUxncPGhmN7xzLHoeIYZy3VvKQuK7AWyjJIsV29AtWYfc4q7Jedun9grWU+GzPembO1tcFLl6ZUb8K46OQc45jQPjTs2z7mRoe14fUOctcGdTkOsO/Yds5w5wh3+y7SXTjw78XI9cHf3AW9akXEY+fO8+TTp3jlxYtcfuUSVXk/EwcszC4yefUVds6ukdmaSCmSNjknVYZMGvIIRoOUlWWFn5+iHp9ltLpOlpqbtvOW4BwnwKCBQQSZhjNZkDEc31/vzbHCepjrCJcG7cZyZcp0a0JqQqS4G49NCUoTS+yP6Rbptc41OBoUgXbxlnabAcGxfo6bxzwq4D/szHht9wUuvXKVt6+ssDoasro05PGzZ1gapIgN3FejFYXWvLZ5lcvza/wmlpowlvmCDD71U86ir064dLlkw8MVP8dNPgHVOmq6SrY0YDjMWMlTzr/jKRJ1h82BNdi6YjzZZXd3h92dbbZ3trh+7SobVy4zv3oZf/01KMYhmUzr8L6nCuIcbAyDDFZXkDzFm5poacjq6ogoVuRZQpbEnBot4bwLJZvjiMJ4UtlErKKa7PDiqxNstRHSnb2HeUY9ybm8fY2tSxniGlxV0uxuY7au46tp+EWjlNkTb0N/0jM8OXo3T55+jAvnT5NlCXGs0TiUQJSkRHGC9hBZh5kXVLu7VLtjyvGYV37tVynHx6dXGyWDLlTaOsbh3x6Hbhs/5YNzjBecD66k8xavLOgGFTXoxJDkMJKUaGBxcR4WBGuhcB6r15n7iqIs0bUwyoYsjQZoPN7UNNbQEJzj24kBCTWF3WK8A5EUNMUSSTIkzwqoGkxVINkQBkN8EuPjCHRQXUE0gkYkLJxwlEjUELzG+51Dvr2dubKHuJF8M+ChfK3glfwTLOsBfgK7kzHzcpf64Sf0vR4enHE4G56rKIkZjIaIVuzsbDGbTKjmhsnylCuzAeNZTqUUkiR457Fl3U5pto7xeAzFJkEB5jSoFVAWzBb42yvUcdJQCrw3TMY7XL82pX6TJgtOzjGO2rJpi9SJzrPoKA1dNcEuitsN2G/GA7/T9uZm+znBGY+gOH3qFO968h1c/vRP5SMf+TCfeP6FE5cEuSv4KfWVl9l64gJnhsJSEmNF46wiFohxJAryNGI0SpAzZ3HVmOvnXmN5ZRN9i9BvFwWNJPgsowg2t3kzZl7uGSxCoSJMOsCNVphXJcp6op0ZiQ3UhJIQNZ8QnOPThGIdnZBN026zx9YBzgB5AqmD3EHhQkLdzbr5EvhN79mYTnjnbMo7BkM+5bGzPHb2FO9429Pk4pCm5uruFi9uXmNnd4PXjOUK+9T8yUhx+lOf5NrgKvONK+xUXdduoN6CzR2m04TZaED++OOceeztrC4t35Hd5uNd5mXBtY1rXNu4xubGOtsb19m9cgV75TXYvALmOoEqMQA1hOEgVFyKsvDenz3Nqbe/nbXlJcbjbRLlOX9mlSSNyNOUVMeBA6w1qqUTjMua7cGUy6kjc1Nkfh0m10In4DzMYtiJsNc8M7GhhratwG4RCDJdKxtRvDjmSizM3nKGPIIzKzlLSzmpFnw1w5sGyTNUnuJKg5lUVOubbLx4ie2XX2V87Sov/OaHqMrj61iiuIugt4sOn048CgcuOMYCtH+2GtEWrx0qsejMEVtIJUalntQLTZxhoxTjwBpDXBYQJzQ4SiZ4V+OKCq0MsVJ4Y6lsRUlNTcVRE5/CpKGjYcy8gXi7xlUFWTrE5iW+rmmqOSofIdUA0hxJUnScEccZOkoQlSAqBokQ6+6OsHgL5INVkCHzyZRHcbr+vkIBWy9t85I8R2JS6rFl0uy0A7NHED4kwnoC1z7KM7yCyWTMelExnRl2BnOucZbCn8bHMZiMoDJgCS+7Df+ua/AVSAKjNcjaCOYYqI4uxXg3uCFH+w22i2OF1kJT18yKN2/IeYI6xipUKehS+7cIobbOOe6c39tRrjhuOA/WgLPEkWa0NODpp5/iMz7r06hNzdVXr2KNIcsHaB0zGe/i75vEPIffvsjOxccYveMJzqycpqksrqlxGJxqpaZMTRQNOPvYBZZHA+bjCZvXf4X8asHkDfq3BFjO4exZyFIoG0jGh+dTPizwXjCS0CRDTL6GXzIE+rZCJlMm3rFNeJR3CY+z4cbc0Y7KXrJfyU4BcQ2PESLIOfCbwIvtsQ77GRxBCeOK9/zibMrjn5jylTubfLU2XDi1wihRvHL9FT74sRf4tWsFV/3+7+KA9crxscuX+NDLYy5Vh8S7vMOXJfOy5NJkzGlfoM6euSO7feijv868Kri2dZ2trXWq3S3szk6YsptvB4vonD3NxUTD0gBW19oqSga1ssyptRWePneW5InzrAxSPvmtb2FlNCJNEmKlSaKYNI5pjGF9c5uXX7vGhz70EX7+g7/JJz78Eczl58AWhFh+At4E3dlmyr46dNb+cotWb6C5zNalZX7jubOsPX6euTW8/cyI05HDbV3DFzPyx54kPfcE050Zm5eu8vKv/gYf/5mf57UXn2U+3eGlcpv6DhMYD4PTafiHtBxiL4GC2zrA+IXJMAHwoRiJckisifIolIaOcqKqpraOxkIlEbVEKO8x1pCPlhgtrzCebDFrNqnZZNvPcbMpCREKRelnjNmlZpejOsah0GjU1rkzGFNTVzrMrkjgyBk7R9UDpMyRZIBKcuJ0hM9GJMkgzCjoDKVTMCdb72q4cpokXaWYXsH7R1AS7D6Dm3peff4aOlJoo6iteWj7njeEQBylNLrB46icZ1rXEAc9eGcrpuMttqaWsXJ4rWCQh0iwjgllneMwW5dZWFmF+kJwiNcGoH0o7lHakO/BneWaHAWpwOooYjSMsN7QGMt45plWh495B8OMpeVlZrMRS1tzip3gtp00TjD5TvYdX9ivm3s/IQj9gTMo5cnSmMcunOPTPv1TSbOESy+/gjWOp9/2DFk25D/89Ad46eMfO9GoxW3BbGJfe4nqwhmSfIh2BW5aYLzBKNtKNVUoBqydOU/y2AWmuxOuXdlk6dee5fruze9jqGFtFc6cDTPfbjcErB5mxxgEKzEmGmJzF0bXjUCt8SZiWmwz955dQsS4IsxuD9pluf27E0+Zt9ukBHfsLPAU8JTAe2O4ZOHjFl4AXuFw/nbHLLoI/PjWmCefe5b3vOU8Z1eGfPTSK/zC1YJfc4FHvLjPZgG//twmH7566yD/vDK89vIl1HT9DmwGH/rYr9PUJfV4Ez/eavnD01By1NUw0JCvhQeorsIFLg1hbS3wZW1NNMjJ04RTSyPeeu40bzlzmgtnzrIyHJLGMcordKSJk5iqbli/vs2zH/sEv/zzH+K5X/p5zOQTBK5yZ4Gl1qJjbozNd1NUB1HiJpd58YVLRGfP0xiDfnIF4hr32ovIZAc3nyNodq5u8/KHn+fXf/o/8J8+9H5eMtO9ktrH+W541SbySfef4BzvrVrctnWMvQTnmCgikhiiDBXnRJlBNwZtHN4Kzgl4i0SWbDBieWWF3Z0hmzsW2KZily2fEJERk1BRUbPNjdkJN4cAK6SMSImJQrvhLbapMQpqZcFXGDNF6gxVZKh4iEqGpHmJaprAQbYeSQS8DhHjE2t9hKWV04yWz3L98ipN+WbIrPVc5FvBlR6HpXlUI8WEpyRNcmodMoFK49gtSyDCK4/SPtQoaBoaicNgUtbC9KQWUFErTKDDqPrU6TCjnypkKcV7C/MZVAqKBswVQlvaFbO5++dTAUOBC6cz3vL2J1k9tULdzJnMxrx8ZYPqckV9YLwtAqPlEUurK5T1KU5Pd6ltxc7k5J3jk1WluN+fZQuuCrqlTVlQlXOcNSSR5tzZM6wsLzMYjHjH258hTQdsbW5x7fKrzKf3SwqagdkVqvE4VOtzDm8arDdUYigqmM4r5mWD8ZosHZIun2J45jzD0Quwe3jkJxd461lYOw0+Cu/KrIbqfv897xph8tdLBJKCHrRyHS5IO9gKV82YEKgQ3aRHl7Q4Y78+kyYkLHbUhpgQKc6AgYfVOmginyFQMj7aLttvcHVXPHzgpWu8cHWD5VTziZ2K33SheEj306SEqLQ38NIrQf/4VvDA9YnBzia33PYwTOfbrXZ4CdoF7rCkkEgoE5olqLUVJFLYnV0oylACLdQzBwVNXfPi5deoTE0UCedXV0CEOIqIVIR4j1YK3WomF0XJzvaE7a0dzLxz2A4mFhwWX1zMvl1EBskp6ijl6rjitY0xm7njVFqjdndQk2309Ss0ccb6a5tcefETvHrpBV4xUzbvyGpHwIHyx0KozNfpQ3fj8z2mWqtIYb3HOI+1nsZ4rKNdBOskTOQ5j/PhU0SRZjl5nqN3NfgGmLXJdhElKeEJKzhqox6hGDBs3eIIIUEkRnSEaI3saTE78A3WKrzEeImxUYVt6lBYRVu8C3PISoLy8clAiOOM4XCJdLBGU17hZMs4d9qkXQvyMOv99LgbiAhpnFNnNZGqMc6xPZkQkTPIM07nKVWlqKqYosooGx+Sp2wDLiZwidvpJdGQ5+Hfw5RkdYQToSlKGJ2H0eOwuwWznVAW2o4JvZsn9C6e0NMVhOe2kwRb5Mx2CE6gBpY1nFmOOHf+NKfPnGN5bRXnKrLpkOu7BV5eH74RIEkyRqMV8DXOl0h0Gfdazfxo4/M7xsk5xl2i3P0MC75sMGVFOZ8xHY+ZT6eUZUmeZTzx+AXOP/44b3/b24njlE+88AK/+Ru/wQvPT+6fqLEdU493ME2NaxrENDjXUGGYV57JvGQyL5lXDVEOLs6Jlk+ztJojrzWvcxESgWfOwDufCRrGZQGTGezOYG4e7tiGoFBehZE1UeBgqRwSH0YLjQErYGYUeF4mNBFbhMp4WwQnd43AO16FvdLSHYVe2Hdeu+KNuYLIB2f6WR+OddirMwd+zniSiUFNDEV7/m7bAaH09Gp7LVf80ZmSM6C84765Cg5xHkE6BJcFilJTgSlRwyFnz50j0pqrkcZu74Qyor4lougIPyuYXbvCC9cuE0VwZnmJM6trKFFEWuOtQ9rSx977Vh+5pK6aQNm64clU7GtAJtyoJXvYExyDugDnn4bVU0yNsDkuGe8K86wirioiUzPZ3mBiHNde2eDaKxe5vnv9hFVaDsiTiSCi2rLU7vU5xx6scxhraRoTdKSbBmMamsZQG0djHcYJ1imss1hnwHnSJCHPc2JJKDzszzN0xKDbQ0JOzpAY3f4vRukElcToOEbHGh1rVKTwosM0L4DzeBuUdZwLpV9bqelQ6e+2r+ToUCoiy3KGS2tMt0fgjy+R8kYkhBkNzf4Mxkk5xl01LN2eozjBc/U4CYgokjgnzy15NscAm+Nd8ghOP3aKwdISsYuJqpjpZsRrOxGIC7JStgZlwt/dVGaagNbI2gqDs6dxUcq4dvjawJNNCFzs7MD2Fmzvwng3jKwlCX6PmYGbgN8hzMgJN1ZbCwNrmCAyZTkynF1JOH1mlZVTp0gHI+J0gNY5Xml0fOVwd0qEKIoZjpbIB4p0IKBqiuYK0QmrAJycY1zb+z9/wYEvDa5usLXBNBYcJDomHaScOX2a82fPcmptlUhHnD93hieeuMCliy9RV/cLL6Sm3rnOdLyDQhilioiITEOkwTQNs9mc8XgCSjGvGpyKOHduxNInxkzK/Y41Bk4nsDoKXP3dGUzGMJ7AZAqFfbgdY5xHWVA+wpG0UWMTnD5lQC+Fz5YoYfBcJfCNNwmR27cCTxOc1YgQF1qsTbOoAts1J0sOzknoshKCpNt19jnMfmF5I+Zj1V5HxT6l/3Zwp+PYKAmV2ZQkIQDsHeIMRTnFFR7JInQSobVG0iQ0zCrIzEgSk+Y59cTjtmrcVsnLr77Gh1dXeGztFO+68CR5FIcofJuD5r0LutzTCcVkF/zBae/FUflRXCkfnPT5DMZjXJRQp45JXrM1MESzBlU6Il2A2WVnN0jnlbZ+08b+LZEiOMZtmQ3XDgh8Gz52rVNsWoe4W6yxNNaEIh3WY73Ceo81hsY0WBuiPnGUkOkhEzfCs8OdO1AxKUM0GaotheOJ8VGMVzFeRXgVhc5Za5RKQBJEUrROUCpCJApqFEqhlEZpjY70iUq2OefxKOI4QamUk0sn6YbJXffbOcjHiU5cckRwwhUhAn6do9JhetwfCG2/JooysnyIcoa6sTTOEScJS0tLqGiANSmXneNa3bpermqlbl1wjFsaVldJU+cjkuVT+HRI1AiNlVCO3VhY3YWVHVgdw+40+HMEHXiqAqoZFLtQTVqnW9qpUR+ULeavQlPiERoPReOYlQ1qMqdW2+SzmiQRqmrOeFpgb/quCVpHDAYj0swxnW+yvLWOO+HKhyfoGD8Ao1IPvnS4xoIFhSaLc1ZGK4gohtmQVMdgLRZPnqWcP3uGldVTrF+7fK+vvoXHTK6xs3GdM2srnF1K2gx+w3LeoHxNMZuyu71B05Ts7u5QNQ0XLqzx2e/Z5fp6QVk5jAn0o0TDvISXXwmFPWYFVFVI9C8faq8YcA5tIFMRpYDRLjQqvgGrgqyYHbA/fTQHHAXwEsExXmefW9xJbi8TmBi0e3Wa0TOC81oSgtLnCIHp8zpoSW/7wGXu9KN3eOOxpiU4xCcV57oZTo9yRGviKEZrjXIh6XMDy7ScYeuGnfkcpTXGubaKm4A4kkHChTOn2M0TNqe7MNlhdu0aH44j3nH+Ap/3zLtZitOgFxwEbXHOUJRzJuMdivF19rnFHQ5qg9wKBvwV2JzDfEzcVChdMYmXuDpQ+LJGrCfXnlRZZtaFukEiB2O6JwIBRLU0CkLiXRc1ds4FCkX776ZuaJoaYwy2MVhrMc7ub9fSLZwTjOuiymE7pSJGyQq7zTkq1rmzqm2CZkTCAGkrNnkEJxGWCINGE6rbRSpGogSlU5SkKJUjKkNHOVqnKBWjdISKInQcoeMYkZOzeFXXVHUTBhoHaCzHi06WqTvHcXbDXfmhsAgZWqftb14Swh89HiR476kbh1IRaTYkMhWCQUQhSpMkCWo4wjDkVGkYzA3jSsCVIYcKT6jVI2FmxmtQCSpJSfIRLl9GG01j25lSA8gQZAnSAlZKaFohBWvBFFDPoZhC1SYyKx+mPSMH5RZc3g45Ot4wNVCNDbvFDsnmHJ1tEicJWSJ413BtfQd/iLvoPVgb2rUkScjyZZaXlhgOE6rmQXWMHxCY7ZIrl67w6pnXeHJ0HoVCOU2apSQ6RjyURYl1FmcNeRqqXK3fR2lovh5T7q4TrSSsDnIGMSRiGWqDLadMdz1aO6bTnM2NTca7O6Sx8PQTI06NLNNpxbzwVHXIl5rPYKeAwgRpsbaC9sNf3NQ6pLboltpgjATqhANcO5reK+HYxX9D9MUTnNjn2WdjTggR3scIsm4JoStcrEzeDZRbYR1yAjf5tEDhg0O8S4jzGG5MsrtdnNQTu5QlrT5xFD4FvA9RPpQGZ5jPZ8GpMU3LKfEgHh1plgYZWguTM2eoFWBqxltbbI0nlFWNsYGnFhxBS2MaimLOeLyLKQ4jnnQJDreTOFKD34C5ob5s2KrGXJysMFnO0MoSx4q1BFbziFqnSJ6RxClJPTmxnGK/RyIOvGK8x3qPsxbbLov/ttbuRYld+53zrRPdkZK9D5FRH8TzrTNYZ/EeojhhNFplpTrPdXOZMLi43ScmYcgqA5aIyPacYNEqlLTWGqc1XkcQp0GeTedEKkckQ6mMKB6goyDXpqMEFUftok+QY+yp65qqbMLM4YlS5TzBtq1KCzHhpTiOYJJmvxpWDWJxvsD7jhJzv8x09jgqvPNMpkWoU4BGVIxSsjdgTJKEdJDjoxFr05qVYcXcWEzRUj5jIBYkjvE6Bh96IuV9eDejmEhiRGm8bZ9Dp0OCkY4Dv1KbVmVMwCWQ+ZB70/gQOBLbRo5t2I+gOtO1H8ZD7QXbOKwpQSqSGPCGojj8uY9jzWg0Isty0jRCKSHNMvI8YVifrN7yI+8Yu3HD87/0Eex6wfiVbR4//zirq2ucOXsab6GpDdubO8zmU7bWN5lOphSz+6z6jquh2CTxawyTNUZxQ+ImxM02ZbnJ5qyhLMZEacbGxgbXr11nd3uL6e6E6cQym0NVh5kWZ2A6h50mOHddhPL+GQacIJzFFTXGQ2MN1GWo1NbxWJVAvDDqJgrZvd7t5elUwMcJJbc/AVwg0CueIDjHa4SJza60dHuUvchj7QM9t5sI7dLI6nbfu1FYPbF8fqVorGVSFMwbg3cW7y1NXQdn2TmYzVupQ9ftBALGNcyqOVmc8NSTFxifWmV7exMtEEcR1gd6gPI+BEyVpyxLZrMZk91d7KFlrDvnYJG4chR4YIemmPPKq6+ycW3IUrbMyvIKZ8+fRpZPM0hW0cuawZmS0dIqo9kGE07GtradX7yBPuA91jmctRhjMMa2lAiDtSZEitv9VLuvEsHjwnRlG5pxPgwePA6lhCiKSNOcleU1TFUy2XmKgi7x5uhIWeY051hRA4ZRSqTCK6IjQcdCFCuiRBOlMTpNiLKMWOdEeoAiQyQjijOiNEcnGSpJ0UmCxHGYaThBKkVVVMwmBcW8wrmTTLyDGzVtOj78nQxEDjtuxw+Xbix0DMftce/g2VjfJIpSYi2sZhFLaUqSDYjjUHQoTjJIMpZyYTmzbOxOMdNJ0CzWDpdGMBihsgGOFJRFygm+mCBxQqxylBHspILxHDZ3YHsn9IG2ClQzadPNVVty2BRh8Q2IAW9D+zLehvJGWcfRMOP8+bPE2YDKgHUeLQ1NNWNebmAOMgxEOP/YeZ5++zs4d/5xYE4xLxAvDLIM1+QnavFH3jEGaKYVz3/keXZe2+KdTz/De97zyWgVsby8TJI2FOWM7e1ttre2KYqCur7Pqlx4C+U2qpkQ+4KUisxtI9U6s8k15rZgOtlGooTNjU021reZjutQmXcGs3nIK9NJcCVmZj8Pde8U9+jW3lQ4oPZYa3BNExqVxoSSeGiIIkiiIH/jdEhG0BlIGXZu+9KGEOHdIKhXvAK8jSDV9iTBWX6MfRm3RdGmxa6xKxTpCM70MqEbPbmErzucplbQGMukmFPPZqAVEoWkuXy0jK1L6tkE55rAb4taXc1YY51ldz5DjxSnlpcYDQfUtsHWNUqFBLGmaUK02Tmct1RlSVmWgef/Ot3gxTKbin1axVGfYA9UeCrmzQ7zZpPt4nHqeMDyBeGsHhANFcnanMHyMoOriuiEGG/W3Ujqv4E+4RzGGJrGtMl1TesUh++UksDpVkLQO+6Gtj50Xt7jcYh4RAUeX5pkyHAFVxu2J09Q2h08L3B09nnECmc5NTjHKMkZxDFtoBilHEoHBzlJNHEWEWcxUZoS6wGRDFCSARlRlKKTDJ2k6DhBohiJdDjQicFTlxWzaUFZlK0yx0liMWrczSUdd/jhPm21F4vWdAMdBRIJKhK6XExXWlxlwyu+6Dc9ElGaG9FUE5qqoFYJuRqhlgckaYaOE6I4IktiVJIwTCzDWNDNDHauwGwS8mLSGJZOhYIekoHO8cMcN1lBa02SOWKjsJNtuL4FV9dhfSMU/PBtNTbdgLah6pdygcNsW8qamDACc8B0FhL08CApWR5z4akLvO3tT5MNRswqS103iK+ZT3cYzwp2N29URMqynHe845289a1v49z5xxjvXmW86/AOsjjD5cNDrHR86B3jFs5arm5eY3N7i5devsRnXfssdBTxlihiOp2wvb1LVTdEcUQc32c8Le9xxS7F9hUmuUdnDaLH+Pl1iu1rmLJGJVt40Wxtl2xteZp2Lr8qoJgFygRl6AJr/0i2PSApoodosWhfY5wOkWFJQ6NQF4Ea0BDmhhwtvUKDVGG6ye634I4wwPgEgQ6xQUiO22iXU4TUmJRAodAE33pOiNY37d+d65cSHOSuTs7xYkSIad8+jAKDx7ZTatnSEudOneLsaMSpLMWahs35mGk5p3aW2lmKxlE2lroxbGxus721SxQneA91MSVLYqq6DtPbVYXxEGkhajRVVRFHEcsrq+jkNLbqZnC6bP9RkNrLNKJX8LO0lR26lSxW1m6z+OQ31KZmd14xLSrqqsIrg1GC0XKiegLOmH1Zto5H7FulBufaaHGzFyXuluA8K5xze9Fm5zzOBp5xp2SnRFASEnHQINpjVUMSDRhGK6T2POXeU3srKDLOsBY9xtLSKZayAYM0C79ZJK1T7NDt3zpWxHEYPEUqIyJHyBDSoF4RpWHwpKNQF1bUHY/bjgrbNBRFRVOVnKxU294ZCYSr7s1/yKBAYkEPYpLRiHx1leHKafKlVZI0JU5itHLgHZHyxBriOHwqDPPdHaY7O8zGE4rxHLwljQUljqaumIwN61d9EKV56OGBBussRZXgvEJUhKhQLl0rTaQE7RpcOaXeuAzbz4YCBDiYaxgv45JToEeQjKhdSZl6BtSkqw0DH1EW67B7Ba5egmuXwE5bx9js0d9A2mS+mr02VQRUDCpr+8AgWBqvjHj729b4lHe/nXe9+50MhkvM6zALGIllNt0GBR/64K/TlPuD0TwfcPbcY5w5c5aV5TXm0y2a2mCMw6MRdbKua+8YH0DjGl7deAX7q44nnnqS0fIS48k2W9tbFGWFUpo4Tu71Zb4OtphQbF9hV5fokSNKC1yxyXirpJyCl5rGws4YdnZbVkDccopdO0Xv92NDx8V4e5AgkqD0AK0ciiQ4xT4Ko+TEhkgxXeNA6yDroBUpLiQeuAJ859bub9apV2wRosnXaIt9EJLuOvm2gv26bG1ztMfUStrtMo47r1wBT4I8cUd7O2nzOSJB0ogLZ07xKW95mrecOsVjS0uAY6cYMy5nTKqScVGwMSlY351xeX2DancbM50Fbicesghzeo2q6RzjEu08RglRozB1TRrHrK2tka9cYHp9K+zHiFByOoNhznBtQD5ImU5OUe7OQrlornE4GSVBcQbHJq+XdzPMi4p5WVLXBcSeBo+RkxXaMiY8Q84HnmEXKcZ5vHdYGzqY4BDvy5sFdQWDs/u0A0+gT3Quv1KCahUfpI0oO+1xUUYcVQzTVZbr8zR+ht0rV+MWlk4nBYSYjFOc5S2sDR9jtHSapcGQYZYRJTqUdY2EKPIo5UO9AfGIBMdd+wTlM8SnKJ+iOodYx3tSbqFwidtLrD8JWGsoixpvuzfvVjiO8MFipsFDgiGkj8ecvnCKJx5/nCefeDtPPfFOLpx/mrOnH2Nl+VRX8oWmqajrGc5UYGvEN3garKmYj3eZ7m4zm+wwn4zBNySJYF3JzmSdV157lZ/7uevsXr/XN/xmwlE3Dd4L0oXWlaCV4AXENthygtu9Am6dG1onW0CxBaQgyzipqQZCHnuyBKxKUbOruJ1LsPkcmOcJA7eaMNOkwgLscdj3EtE12CH7IqURJKusPPkY7/6kp3jve9/NJ3/KexgtrVLUIQk4T4TpZIvd8S4XL17i+qv7P2SWDVhbO83q6mmWRsus64S6spjGI0RodbI+WO8Y3wTXtq7ygZ/7ANe3rmJdQ91UFGXFdDZndtLq0ncAaxpMPcGUgtUe42t8VeGqEOgsKyiqfZUJR2AAzHxw2Gr2SyI8ZM30keEcVE3r8BgflFUa1yYtEargGR0WG9RKQr+m9kfTvivnMWWvQWn/OydQK3YJhTy6QsUVcJ5AlUjZT9Trihd38dCSQKUYtf8+PodMhTPfodeRKkU+GHJ+ZZmlPOeZx57gnY89wenBgFESA5azzZDS1BS2YV43bE9L1sdTPpoP+biD0hiYbwXt42gY8s1ciIjiHTrSJFGYrcnTnDxOyXXU5tgbwlDCAXXQUS5qbB4ypb1poOkoFTd7ug2OMYfLZlmsraltTeObvU4dUXvvzIlwjNvCFtBGfPe0fR2+VZfwbQkoJeCD2C8irfRYq0KxiDCLrdDt9qICRSXyQOKwcUYWpWhisjhnNF5iy5yj8RMUNUKJYo60tlR4MlYYyXlWssdYWTpLPlghzjN0mhInKvCKYyFqo9KIa53i8ASLSxGfIC7B2xgvEV401gviPdhwz421uMPS148J3gvOtslER2oFH7k5tVtjFQZv0bztred46twFLpx6nAtnH+P82gqnRimj2JEzx5qGpirx1RxbzrCmRJwJ1Chl8K5G1TMSJkhSkY08SmvSPKZxnkY0etPhTlLY+j6FR4UgjY5amcyg2BPhiXDE3oDtBrIH0YTFF7Ab0WyPqIcpaM/cCu7iS/Dyi1C+QAgiHPYeHNbidRQ2Q+j/BLRmMMhZW13m9Ooqp1dXGa2sUZlwjDxVjAYxTz31Fp5661vZur6NacvfpdkAraNQ1KhdlIpI4pzRYBljsmOw5M3RO8Y3gfOWZ1/4GJdeu0icRERxBCJY65neN5Xv9uE8OFPhzRTXCE4bfOPwNuh8l1OYFlC1KhMlQX5txn5sxPNoPxDeOcraYXGBW1w2UNYEwWsPtWmdZQHbOcd+33EG9otLRARXeMZiA2UINIpp+23N/rgbgmN8in0HuJNpC8zXcLSl9nOx1tvdISI0mHc24BsozcpoxDNPPcEzFy5wYfUM55dWUd5h6xLrDGuDDK/AaY31MK1qdqYFg3SAsY7n6xI73gwjOHKUFpS32CYI38VxQp5lJGlKWdRkOia2Hmk6K7YJIRThfpoB1W6DmIJ6WrWyQrvc3GKOm7O3Hd4bGt9gxeAklGoRidAiexMIxw3X0nI639bvKUrsO70eH+pOiSBa48XhvGDFhW0XxHgFQCmUUkSiEA1RDBohQhF5hQw9bsmyPFzh9OpZHp88wWSyS11NcWaGN1NopmDnaDwKTxQNibNlktEq6dIqaTYiTiJUqgNvNFEtd1QhyuGxeLEgvq12HYdMdx+DRHg0zqvwWlkfouHG0NgGd6LOqGptfZLzAA8xElh7JuZtT57lHecucHa4xkqcMKor1HiLqq5o4phtJZTljPl8QlFMKYoZ3tZBp1x5dOQRbbGmxDYVIo5IPEkWo2QAqqYxY2bljPst3edNgWhERyjdUSmkLR7liXEkYlG3zHqwYDZw4+vU2ym1KZnOK3jpOZh8At4wpfiN3sEuKyZQtpJYMcxiBnlMniUMsoTYabwIeaLQYnn88cd55p3v4rVXr3H10isoFZEPRuAFax22cQiKOEoY5CPiSHC2l2u7Z3DOMp1NXy+Teh9CKRAs3pSY2lM6h699J2OIjiCO2nwyggtwWErS/V6T5USxF6H0QYOxsUEHsi23TWNCXWzTOspO2qmlTp6mIzx0zkg33fT61rsErrCvZNrVClqlLencrhP2lUm7SauUEF3eIvyOHXHjdsTJbkQFXCKoMN/B3ls71HWDHQ6xWU5thVnZ4GxDWc7x3gYlgjgmStJQ1KGoUNM50XROMp2jixJbN2BqqGv8bMbWa1d4Ln+W8anTLC+NGOYD0jRne3OXixdf5tVLl5hOrnJjVKOz3BhflhRV2nLhjhoFPIgg0aeyjDiJ2uIaEn76qJ3q9yczx2IXtDq9DxH0Lloc1AYc0tIqhFbVbS+ZSSHiUbp14lWIDotSOK1xSuNVKBiilSbTMZFolFOIhXqloi4qyqKgLArqaoqpJ9hyiqvG+LpAe4dCUHECSQ7JAElyVJIQJUGFQkegouCEiwYR1zrFliBLLcExJgEbnGLvBGy4H4dDMGEuoDL4E4wYQzcYaeijwbcPSeHsas6FpWXOZTmrOmbkIG8scVEjVuGiCieepphhiim2nOGqOd4Z1KLSnLOhpLFvQiJpLMSZJsk11kagJaR5PILTm9ZWVLXBedrCN1E7S2TxrgFXI0dq62p8XVLPdqmqErM7hekr3Fl6tyX0YEIXNZYoJU9j8jQmjSMSLaSRRhMhSpHGgibjqSee4NM//TOwRvjwr3+ENM741E99L29561tZWl4JRX50RJxkDPIlfJZyqPDxMaJ3jB94CKI9aRKSRbGGumwLNTZBfk0iGK1AOgK7ATvV8QgDPXRwdk8QPdAkbEgk6P5dN6HqiW3LuLoFftWeY+xgb/qna+kPR0ngHltCLPMKIVq8RuAbL6qbZuxHk6t232m7TBY+x+wXBTl60+EXjnb72Lr4MkUcYy9fYXN5iXOnT3NmbS1oQjQ1WivSLCHNM7I8R+uY8WTOxtYur7zyKpsvXcSsXw06gdbCrMBe3+RS+THcpausraywsrxMng9I0pzJzoSP/NpH+fhzH6Y2G29wT1WbOHI30Ei6xOjUKQaDAQoJtAYFLolxEuFOKHHKVYHr3CXg+Y464bvfNvxb4UFkj7SjcIhSRNJVlgudp4o0KooQrduqcxFKa5IoJotSYhWhUYgVTG1o6oaqqqjKiqaaY+opppwG57ieI86hPHgd4aMYq2KsRDgleOWQllOsFYhqqUbKozrN084x9hokxovCeQEveBPUM8R5xIbod1M0+BPMtAoRastDmQh3U3SCkaEYy91AASMHA+vJjCdTjtRbUrEk2hNrjegYrzVRokmJaeKcJg9lqlWbpKm0R8RSmYLKFCjtiRNFNkzJlwb4piRKB0En/RGkUhhXMi9mGGsQrdFxhBePsTWmLrDVHDnyYN3TVCVmPA2ln/3k1rvc/MoIPVkJrKHTVQZZRJZGJLFCa4i0R7eD9Fh7oizmqbc8Sba0zJNPvZXP+dzPJ1IJj58/z5m1NUajGCVB9CBNM7TygZJ1wr977xg/sOhiiRGiw1SyrfdjHeKD5G6mIBuEiHHdAKp3im8K6eSs/P78NYRosTX7DvKeHu/BTlrYL/7cOcYt3+oQi3uCE1sROMfXCU7xKoEuMSQ4xDH7jnEn8bYCnCE0RZbwuxftcTYJZaWvEcgRJz0LsPnSyyhvWbcNH1fCyvIKaytraK3w3pGkCUtLI5aWl1hZWSWJU66vb3L5ynUuvfYa6xuXcdUsRF5FYDrDbG5zmVe5Loo0W2K0copsMECpiPl0xsZrF2mqm3HgjhMKleYsLS+RZxlaNHiHNY7GeUrkxN4lU+wXjwkupA+cqYVOQUSC8kS7eMC3EmzSRpN0FKPjmCgOn6GanEbrmEhHxHFCliQkOkZ5jfKCNTaUkm6Cg2yakqYqMHXghLq6BGcRDxbBKoXxQm0DW9ticNImU+5xituBo7hAo5D2Vtw+j3BvbEmrtexCRrByYEqzX/TkRNANch/mbrHrNzotnAH7hevvrtpfJLAcxyxHMUtRzEApUg+R82gbiogqQgnwKFakKsb4DONyEIfSIcgjODwNuo5CgqiGKNVkSUIaD7A+KJkg8cnWYblv4ZiVM2ZVQWFq5rbGNVAbR1HvUje7+CMGBLx1+LIKmq0HtIfvDF02zC6+yfGmDNQpb7CuxtgSZxtEafBCFCnW1pZZOXWaJy48xew9FQpFnuVECpp6xnSyifMOYw3GehCNVicrUfMwtwAPIbrGLGGfdVriGks1haKEuKu2CyyNYHgahlmgUFQlFPVxKxo8RJBWq9j5tqBHOzTtHGZZzMpfzM6HfW+l61wt+45xx+E9HB3PeEZwbLuuKm+XjH3O8ZDw66cL33UFQxTht50SItFXCeSIrfa4M04mFmbWXwRvqbHM8GxvKF5eKN2rVEScDBiN1jh79jHyfMClVy6yvXUFaw5JEmlN1abSUc+uM9m6gqgYjw0FbfxBWbWTgiA6IkkTkiQliRJ8U2DnJcW8YHaCU3rNJExpSkt5EAEVwqyBU9hSI8ILrwKfCkAEUb6Nvqkw3ao1URSFRUfh73Z9LIoECQVntEd5IRaF05BowSYaazQ2S7AmxzUjnKn3EgGt8xjvMdZTW4fxDkODwYTpXR8oEN47/N4709rNL6hA+XArHr83U+qcx0vQL/X1yVakE9HoeIgpT7OfBfCgohtsdMnAXRGRDGSISNePhKqVvo3U3w2yDM4sDzm9vMyplWUyH6FM+E0dFuss4sMgKYo0PtJor4ldFAZPun0unMEY0DYiMjFaIHKa2KXENkEZCzbGGX3SM+r3LWbllI3JDld2l4hGkNca4xt2y2uUzQbWHKVOrcObGuoI6qqlhB2XHtUcO3mVrfVlJrMnmZYzpuUEF1mqusaLZ5hlDAZD8sFamA1MEgZ5gqBI4gjvDLO5h3nDvJqwvr1BWZQoiUO28Qmid4wfJEQXIDobpvvNmOD2NOBhXoZnxRBG7lqCpneawnAkmB1PXe2XeO5xCPYib21Ubi+rKkzr4lpu8YJU1Y2OsVr4ruuYug5poerbTdA5yOOFoyUEB3jEfpGPYbussq+F3EWYWzlqThMKiXT0ii1C0t8VQiS54BjdytdFJ9wNXFBrDbYoKYtttreuEUUDqmqDozfADvz0Dai8iyVSjhud8oMKzikKW1vqacF8VlGfYM9cz6fBGW4T5rQIXlTrKAe5NYkUgt7X+m0XpRxKBVk2aaXZtAiRUsQSaBYRiijsjXYOJRYl+362FyFSGu81Lta4JMG5DG8t3pn9Cny2i+Y4EuswztLQhGRFazA2lKi2rSPdOcbOhU/ZG3h6VEu3cM7h8WBDgqFzDlfZE+UWKhWRpEMqfa6lVHRpsrdbXvzNRjcY7+aWhkCOqBylMnSUEkVJoNPoGCUpSBwSHB0Y67DGvk7B5HaRZ4pTowGnlpdYW1oicgpbGpyPwnMaRBRQWrWJmO2z5TQiDqUceIutBbwjchrrIrSF2Goiq9FGI43GNxrbyCObI1nWJa9cu0Iy9BRMGC1pRBk2JuvMqk2sPYphPK6e440FM+WYewWwNdOddeblnFk9Z1JOKV3BZLqD9YbVpRXW/CniLGcpPoVKI1wSRA60FoyxVI3HUTOe7XL1+lVmswqtsyBVd4LoHeMHCdaCnYHfJWgV7OsSNMC2DwyfyIcIYj4PEm1J6tmZwNYkBEM1j3iS3c1gqlDtBA9mgU/ctP82pu2YFx3j7gXtIqSL33cdVsp+UtitdUsX2cqdzsKY/cjxkH1neAU4S3CAV9r9um48ITjIZ4GnCU73DoFm8XHg+VteyXHDY+0Ua9+Iy3w74mcanZxlafUCggT5J29BgzE19XwHbyfc3fRgjZ3vsL07YTweMh0kNJM5460Z87nF+ZjDkiuPA64uAz1CKbxWe86vqCDP5HRw1AWHiEZ53UZbPd7plpPs9pJHfevIekJingSXM0xzWodXFq8VTulQlAwISg1CS2NGKRWeT6UQ53DaIdainEY7R+TAeEtEhHYRxjRoq7GmO7drEwkdztmQ7OYs4kKhh/B+hQRH72idYo+0lf5OkkohEpGlOVW+RjUXcJ3+S8X+u+sO+eze1sNmkmB/4HYY/epIV8Y+xSMCiRGJQRKUSlAqRUUDoihH6wE6zoh0jG5nBrRWiEirZCJYo7BWYYzH+OCMhuIxd+sYC6M8YzTIGAxzYmJc5sO1xgNUkoWKhnEcGDLe4h1oHwrNxFqBFxpvUVZw7bOmnCe2HtWEwZGtG0xlaGr3yEaMnfNcvbrD9s6YFy4mrJ2JGC4L42nN7qQ+Yj6wx5ut9t8n4xHYxuBQVMaxO53hfM3G5hWMqZmtnaYxliw/xak1QYlu5RhdyDNwDmstxbzi2tUNPv7xlzAWVlbOEid9SegeHfyc0EhvcbPOvpt+NsC4gnkRJJl2JrAzC831yZYVfoBhKphPQ6TY2eAc2zo4xnUdMhn3HN6us9PsTz/5hfWw36El7b8T9mvbHdIJLR52ocHvXOqGEL+K2iN1TnLnGK8unLGjXqwSeMtdARFL4DI/3x7rlds00YlCMojWoNnhxiIbh0Ol53nne7+Az/uczyWOE8p5QWMbvBi2dnZ47vkXufziC7jiZe7ceXXQ7DDe2OT6IGGoPXYyZXdjwrwweHL21Ug6QszxwJomDBO0CkoNWoOKUK6NHHtB0UbbxOPF45XGS0jUw7mQBOcc3trgaCjd0hmC84lvK+K1+saujU4rLSgV5i3EK3wrURdoGjo41aJCgpxSKKeJCBJy1js0GuU0xmgaG+GMwdoQ+e2q+FlrsdKx5FuJNO9wtNxkWl+5laYz5iSpFIGukqQZo6VVojilqdewpsa5JmT7+9YZ3nPgD3OMF+6lPe7+TFKnpb3Vfr4RIiBH9DJRshrk8JIBcZIHSk+SEEVxW4k1IY4SdBSjJArRtDZZ0zmHs47GGJqmpqotVRnoL95brA2a7cZ00fw7Nh9ZrhjkKVmWkmcpSZThidBRgkpyVJyi4gRRUahqWVm884gPVeJjpREvaGsQo7FKhefSB46yMhbfNJiqoSkbTGkf2Yhxh6p0XLtcsr4OyUpgw9h9+fwj4HYcYgGVtjk43Xv7xnDW4dBUxjGezimrCVeuXaOuC+rGoHTC6dNFYC4C1gQ5R7xgTENVNsymBVevrvPSi5dRUYzSSwxHfYGPHnvotAVvHQFzwMRBaWBJQrXGOIO8rXFwMmWFH3B4F154z36S3V5nuN9Z70MWls45Vod83yXidaJqN4mK+sNXH9ykc5JLwhNREty+FfbTalYINAthn6/cTbYusy8LdxzQyTmcrfB2Bhxmp6NBRTnR8hr1Vg2+ZJ+GUnBYAy44BsOcs4+dZXlpCWc7HqvlyrVrXLm6weXbikDfbNsxzcaLXJxeYf2lhMR5otpTVo4wNEkJTvGtnJ3bg7e2Tajz4BWCIN4ELWLxIAprPEo0ogTv24IjHc1Cqf0KdzokrKiFRUv7fTcak86ZCurIzkk4Dy2lr9VM7cpMe/GID86xU4Hy4PEo30WFW1sqwSmNtkFqjlaH2VqLs60sYmRxkcFHFtsYxAgYISglC851/MeTgm+j2GFwG5Q8knBvSfIMAAAns0lEQVROF+FcEmYjuoiW6+hVnVPcvbzdjFD3HHX0FtlvXxiyz/rv0qU76lXgAOtomWSwwmC0Qj5YIsuHJElKHCdoHRFp3f6O4VpFtW2MD5H2UA0xDCZMY0IFybqmKhuq0lFVDtOExZo2sfhuegTpUjLCAEMpRZqmxOmANB8SD0bEaY6KQtn3yWTKeAKmrvC2aWcjJDjGSugmSBQds823zn7Qta6bhqpqHtmI8UG4BsrOPTix6WAP7vbauLo0TCclk3FJkqRh0KwykkSjVIazEVVpmewWRNrQNKGPVMozL6ZcvbLOq69cYWN9m/msZnk1Z2l5hdW10ydziy16x/iBwu1FvQoHVRPa5SyHpaWQW+ZLkEdRGP0o8LRc4m7xB3jFXVT4YCfdxWph30FedJg7Sbd2OvQY0uC6LnidEIPuXLQBwSnu1EcW2VgRe9Xtj03p6C1PP4OxhqKYU5dz6npOU8+xdecoHw06ThiOcuodHQqoyCoqP4UrrrT0oRvhzIyimGG9JU5j4iQh0gqFZz6fh4hpc7Nqdq87Oyp9S9AKbrbYrzdIuAd/DVvAuAAQIhIiVtHpKYwDmisct2PsfJhSDHcUFH3DbyagAr2guzzRCvF+zylWWlBaobVuFShahYo24S5so4Kz3CVKttX0nPeIDc+5V4Tz69Z1E7Ugs9Y6uXtXGBxjnEU7T4THR+CtoMThtbuhGErHUSYO2uCuMbjaYKIaqTvqiAVRNMbsJ8OeUNDY2pq6KqjriqauA9Wj1Y7G21YtobV5JxwNIUy3d00LeQbSkrW7Qgxt4qR3A/BnwNcgDYJHqaAekqQDsnxAludkWR6cyyQjimNUK72HhOegK/RiTNC0dRZsWxDFNE2IEtehrHpd19RVTVMZmsZhG9+O9Rej3XdnWKEdDLUVG5M4YTQaMVxZI19eJR2MiHSMsx6tNU3TUHiL7YIRBHuFZ/hG89I2w9aDdZa6qalK0zvGizhaEPdNRV3WbG9N2N2eEScpWarIs1W0FgbZCKVyyrlle3OMVhG2pVuJOMaTbS5dfJWXPnGJzfUdrPXkgwFnzp7l7LlzJ3rdvWP8EKMBtsYwzKGYQlNAWcC86aPFh2IxALzImOg0WP3iRouO8cEOpXOKYb/TEfZLF6ftd8dTSMASqDFj9hP2tgjR5C5G3RUQiQnx12sc36T/uXNnWy6oxVhLWZbM53M2r19lPl3H27Z64C3u1buGaroLbgIY8EXY1x/u2Ho74+Xnf52fyQecPXeOwWBIlqaksWZ9fYPrVy7jzRtVcFqADFl98l0AzLauU+08D/5mFvIYKgzb7fjG88aVou4MdVOHcq9KhQjm3ginVabwgU6xx7vdV21rI3cEJ0raL2EvWhscYBV0iPGtv+nbMWAb++3GhWKDc9IevHPwOgdwb6LDd8dol8Xj+Zb73F6r9/slrlX7rokKzrxyGhU5tG9JHwLae0QvvlfHj7p8ls2rV26MDLN/n4dP6Rxct/C3J3h1LkS9fdd2yOK2rV9tBWcUttaU09aRVrqNBketKonccBa827f1go1xDu9tu4R/O2+Dw3rDPR28j7uAh7r21E2NaRq8ce0MTjtYAuhmMiQM4Gjl+PZnEdgbnFlnscaEaHaX+Kw83kBjGqqqpCrNXV/2Q4X7sFOvq5KPP/ciZVlx+sway8s5WR6HJZ2zmU1ZvzzmYv4qgoRZJB8KAE2nu1x65WUuvvQJrl6/jLEWrRRxFBHH8Yled+8YP7DopHhuXr7UAldboVxrAq1iTKBR9O3JIVAqhNTNQvEO1UZ99pyLxeIdEOzf/QaLU6JdJ7ZIHO7YwUJwUbtSHcc3zO9UKTrHt6ObzQl0CtWuv0Zwno8Do9EACOoHXmj1b2tWV1cY7z5GOS8wdY0xBmMcxjSYZoY1Bd539gNTTTF1Ab7jF0/xVcPNZ0oc852X+NDPXkVFOSrKiKKMOM4wxlBMulj6raHSZc6eP0OWxszWBlx6dpt6equhQw3+evvv43+jiqJEiQR5tSgijiJu4NgKiFeoBS3lThbNudYRcqFIjRcTWEKdg6qjUEVPh7LWi1HITjEiJL15lDiUDxFh5d0NdIrueny7r3UmqFJ4Q+Nt+M2twdrw707izXm/V8VPAdrThQPD9UGozKck6NtG4fNkMcW7Oytyc1s4xCft3OkHtpKbh2LuqMqSumloTENdV8xnc5yKqK2QVQ1ZPkCUtNUUS0zTYI1FKWn5547GtvrZTUNdN3hv0aoNLCjbOsUFVXkfeoL3Evdhp+6d5dInPs7Vy6+SDwcsLY9YPbXC0mgQ+PFaoQltmDUGY+rgGGOpmpKdnS12dnbZ3Z2F8tC+pqwK5rOTLUfcO8YPHLpqap1z9sYt6diBbWd4O17qffj+3B8Q3eoJ2YXQG90ccui49xzdzjG23Oj8LibbyIG/O03RTsZNt9+XHDcxrIthvkhQKllhn2dcs89wPA6I6LZsp0a0Ioo9cZqTZgNWV9dwJjhpznmMhbqxTKdTJpMJZTGmrsYYU2GNwdlFOoLn1vQEj3dzbD3fK3AT3Orb4RaD0jF5njIcZMTac22wRD09yjFO7m36wMv/NpRsbss2K6XRosO/pZWPUx0fNg5V7VSododKEBWjVILoGFFxcGiV2jtGtHesBUe3vR/nXOvzdlPbXfSy1VQGbozetpE+67DeYrEYAofYOYd1Ifrn2oQw2oQ68HTaGqr1Dn2bGBaUEhzOGayxTIptardzYvbucXcoZ46yrGjqhqYxVFWNVwVWFLWF2lgaa9FRRFkW1HVN09SBvuTbnsw7jLU01tAYszeYcpqgc2wE0zRt8t6j1JPdXnt2P8HZmnJaU0532b6uuJJmJGkaqmHi8MbstRO+k2mUdlbJ+jDRIRAl4FzNZLIbKEUniN4xfqCgCNPwCcGRunW0sZtm73EEyGKEWLih8kCnbdzNGe85xHAjtaJzhhelmxYd5IgbRdQ7B7pmn/hwfKiAywT94oMs6OM602xWB7OojhcapocFHyqpaY2KhEggERXUNJaGnDqzRtOUGFNQlXPms4Ld3QnT8TbedJqanjuTW7u9TsTWU2aTMd42VOWcpr73JPwXig/e5h5yyL8PRlkPi7oets4f+t2tYrb+Df66/Y79VrSFHvcTmgrKMvCZjWmomwanKqzSJBJhlcaLQumIqqoxTRjwOGfQCgyCuHaWwdg2gdDuD8K8YB2hkExtMI9S5e6HBd7RlHOa8vaK56gEsmEoh72xcY3p7GSL7/SO8QOJLvW0VyM+diyU1wXYS7y7wSM42EHfLFu+k29aJC9rbnSeO65URBjoHKVi0e1jb6r2BI49mzV7+UdeETRulRBpRazDdJlohWhNFAVd1VzaEsfe4mmoyoLpdMZoecb2zhnGkznVfI5rZmB32SeFnAy8mbG9sU6RD6jKgqY4fs7wyeMg1/Vkz9CjxyIaA3VlMKbBWkdjDc4YXN3gowZfVyAapQ1VXQc+qbNYG9JKO/65ICgRtFLoSIdWU2vQGica5xymNti7rV78QKGj5y224DcbKB7sjxYHuQ/W4FIU6BjyJVheUejIM5mOmc1PNnDRO8YPFBwPdpnSBwBebgz2drKkh+bJHWxkFikVi5HiziVd1DxdlEfqGrKYB5Hs0ljPXkVZT0iW6YpCeEuXUe5ciPgo1VY78x6PxTuLtYKOMoZLMVG6xNJqQ1HWlEXBfDqhno/xrg4HaqP43lu8mYM/BqfZF2y++nFEkkDnMBu3sXOnVW05maLbPXrc/wi0dodvqTdaayKliLRCC6huIOzAeYvzDhFFFAlxFJFnGZESTBTTJDFZnDBIU7x3KCUYcRRiUOLaiPK9vuM3ETIIVD+/2Oe02bG4/W7HdzTLg3M73T6WMI9457KaJw3RkKSQD4XBUBgMI0bLGcPhCO8HNE2O8z2VokePNwemhOl6SL5r6lDwwzRhjtAddL46Z7ZrqA5KWjj29XM6R3jRKbbc6BzDfae1c0Q03Rhgr/hfcHoD+0RapzhQt4NTTJsp3yVjWZRSxFFCmmvyUZCkMiYk4dRlRVWVGNslOQYemjENs8mE+XRMU8yxTRkEPf2i7Y8Kj6+vtMoBtxNXF0SfYmXlNLiSnZ2Lt7Fvjx4PD6yB6xsll5d3yfQmo2lNHKfEaYZOU6IkDTrGophPZxTTAucdSimyNGGUD0jiCEyDa0qqoqIuQvIdOEpXM24Krl7bZnuzeaSoFEk2QOkk8PJ9y7+l1QXHI3R5Al1D3Eo77qnS+KBigsM7ExZvcL5T/ei2OWpEuevLjte5TnM4fS5mdXXA0vKAfJCQ5wn5YEiW5ZRVxHQmNM3JJuL2jnGPHh3qy3DlGvvO7s04kkd9Ke/PEflxIzis7Knx78UzlGBVlzgWNHO1Vojq1BNC1Mi7IOjuESJAt7JOXaOvY02mUjxJW9jCB+UFa0mSiGyQBY3WusE0wdm2TYUpZzhTtIOao0RI7mSaMSJOMgZ5hmn6LPkejy7qBt7/S2P+4y9/mDj+SCjYQUjWpEvwbClqYUDcvmutvKCSUOo8zAb5tqT5/jvpCVUVm8bTGP+ItK4BokLFSdX6rsH99XsiNUE4qbNd2752zmubQ+7Ft1KLOuzgNeLiUCXzoFO8Z9yFvnAx7gO8vvrjwf5xsZ88mhOd5RF5NiLSAxQpuBhnU7xNcTZFSUySKPQJK9T0jnGPHnvouNtH2a5HB2Nap1O6bOI2ots6uLp1jgNnMDjHgQ6xoMNqbVCtMBYdGZQoGhOy2/E+cM1a+S5RKpxOIM+yoGnZ8RMlyHo556irirIsKeYldVHQ1DXWNPhWEsjbGlxX7uROoEANSZIYpcA/sFpbPXocD7yH2nrqR4rncPLY16ruUhHbqHDrrHb60EFNxgSqijN47/Z0zTsN8qD6At6FapmhOuP+ICWcsDtzW+hK3MKs4L68Y7d9KPgDvqV37BdeaQdDXaXIvVLSr6+0GMcRWbKMtzlVofHWU1eeugoFaYwBFSm0TkMfcoLoHeMePXrcFXxXgU1coBb7ILMlXdR3L+jg9xpyEd+WG/bd//ejHBa8OKyxGBPKxWoUvi2HFZzQtpRxFLWlj4PDrbUmihQiYExDU9cURc18Xu9lzHdZ8818ji+3uPOKdUEOTWsJnOl+wNSjR48TQOcYd2Fb2SsWI3t0iUClcN0OQQ/Y+wWiX3Cmne+c2MWcmBv/SUe/cLROs9pXZiKUm5c9hzxE+/eL+7Rl5X04kBKF961Dblygu1ETuM77bWYUpWiVgo9xFpqmE4ryNLEnaiBukzKVPlmOsexVTerRo0ePHj169OjR4xHGycaje/To0aNHjx49evR4QNA7xj169OjRo0ePHj160DvGPXr06NGjR48ePXoAvWPco0ePHj169OjRowfQO8Y9evTo0aNHjx49egB36RiLyPtE5KGUtRCRzxGR7xWRZ0VkLiKXRORHRORth2x7UUT8IctX3eG5e7vub/+EiPwDEbkqIpWIvCQif/MOzvvI21REvukmz2m3PHYH537k7dpue1pE/q6IvCgiRfucfo+InL2D8/Y2Dduead/99damvyQiX3oX536Y7fpWEfmXIvJya6sNEfmAiHz5IdtmIvI/iMiVdttfEJEvvMPz9jYN2/4NEflJEdls29JvustzP/J2vV1f4Viv8W7k2kTkSeBJ7/0vHt8l3R8Qke8EvgD4EeAjwBPAtwDngM/w3r+ysO1F4FngfQcO85z3fvsOzt3bNWz7NPAfgZeA/xG4BjwNPOO9/5bbPO8jb9PWSXvHwd2Bfw286L3/vDs4d29XEQF+DngX8N8DHwM+GfhW4AXgC/xtNLS9TUFEUuCXgTPANwNXgf8S+H3A7/bev/8Ozv0w2/VTgL8IvB94FVgG/gTwFcAf8N7/84Vtf6Rd//8CXgT+LPCfE57TX7vN8/Y2DdtOgF8j2POPAn/Me//9d3HuR96ut+MrHDv2K6r0y+ICnD1k3VsJktffemD9ReCH7/U1PwjLbdr1J4D/BMT3+rrv5+V2bHrIdr+DoLL+Z+/1fdxvy1HtSnCIPfAnD2z7p9r1777X93K/LLdh029obffFC+sE+A3gP93r+3gQFkIBr1eAf72w7tNbu/6xA9s9B/yre33N9/tymE3b9ar9fKa17zfd62t9kJabPKt33K/d7XLsVIp2GuHbReQvtaHyuYj8mIica5d/LCK7IvKKiPzlA/ueFZG/LyLPt/u9IiL/UESeOOTcX9eG2EsR+bCIfKWIvF9E3n/IMf+eiLwmYSr+WRH5k7e6N+/9+iHrXgbWCSOXE0NvVxCRdwBfCny397651XFvhd6mN8U3EsoQ/e+3Os9h6O0KQNJ+jg9svtN+3lY729sUgM8HCr8QGfahZ/xJ4HMPu/Zb4WG262Hw3htglxvr3H8l0AD/x4Ht/hHwpRIi9UdGb9O99XdaV/5Q9Ha9tz7Y3Xr576NtrxbWeeBl4McIofH/O6HD+AnCtPh/B3wJ8Pfbbb98Yd93A38X+APAFwJfS5hOuwhkC9v9bsKo4V8AX07o3F8ELgPvX9humTASvkQI1X8J8D8QCnX/uTu43/e01/zfHFh/kfCjzgl1Dn8R+KrernduV8J0lAf+IPDvWrtuAz8InO5temfP6oFt8va5/Wf9s3pXz6oAHyBM930OMAI+D/go8OO9Te/Ipn8X2D1k2+9ot/3S3q6H3qMiRN8eI9B6auB3LXz/jwgUv4P7/eH2/j6lt+nt2fTAtscSMe7tevS24iSWu9v55j/e80C0sO5vt+v/u4V1EXAd+N/e4PgaeKrd9/cvrP954DdpOdLtus9ut1v88b4FKIF3Hjju9wEbi9d4hHuNCJ3fdWDtwHffTXDkfgfBkXt/ey3f0Nv1zuwK/JX2vGPgfwJ+J/AngU3gg7RTV71Nb+9ZPbDd17XX8ZV38pz2dr3huyHwz9vzd8v/CeS9Te/o/f8z7Xnfc2D7n27Xf11v10Ov4TsXnr8J8NUHvv9J4BcP2e9L2n1+R2/T27PpgW1P2jF+JO26cF+37NeOYzkpubZ/50NovMOz7ee/7Va0379A+HH2ICJ/WkR+XUSmhLD6pfard7ffa0JU5p/51lrt8X6FkKS1iC8Dfgl4SUSibmmv4zQhQeao+B7gtxKc3e3FL7z3f857/4Pe+5/13v9T4HcRnLfbVk+4BR4lu3bP5vu993/We//T3vvvJXSYn02gWRwHHiWbHsQ3EhqZH7+NYx8Vj5pdv48w/f+ngC9qPz8H+Kciclzt7KNk039I6GB/QETeK0Gh4q8Rol0QolrHhYfJrn8H+Fzg9wL/BviHIvJ7jrDfcaO36cngUbbrUfu1u0Z0Qsc9eNH1G6zPuj9E5M8R1Af+NiFjdpvgIP3iwnZngJjQoR/EtQN/nyOM4G7GUT190ztYgIj8LUK08hu99z95q+2991ZE/gnw/xGRx733V45yniPgUbLrZvv57w6s77b7TMLLdLd4lGy6uN3jhCjRdx9oaI8Lj4xdReQrCNH3L/He//t29c+IyIuE5/X3Av/yKOe5BR4Zm3rvd0Tkq4EfICTcAXyCEEn7NuC42lR4iOzqvX+VkOkP8H+2vNDvJMxe0F7jWw/Z9VT7uXWrcxwRj5JN30w8kna9XR/sbnFSjvGd4muBf++9/0vdCnm9Zt0G4cc4d8j+59kfBUFwrq4Df/4m53vuVhckIt8M/GUCb+aHbrX9IfC33uTE8SDa9SO3OMSxJjvcAR5Emy7iGwjTaT9wq+O+yXgQ7fre9vOXD6z/T+3nezgex/hO8SDaFO/9z0pIwn2G8Kw+T+jUC+BXbnWONwH3nV0PwQeBv7Dw90eA3y8iA+/9fGH9JxOcqRfu4BzHiQfRpg8CHli7HoMPdtu43yrfDXj9COSPLf7hvbcEA/4BEZFuvYh8NnDwh/4J4JOAS977Dx6yTN7oYkTk/wl8O/DN3vvvOepNtFMKX9Oe9+pR9ztBPIh2/UWCdulBysSXtZ8HnZA3Gw+iTRfxR4Hf8LepW/om4EG0a/eOH9SB/i3t52tvdI43AQ+iTbvr8t77j3vvn23v408AP+S9n73Rfm8S7iu7HkRL4fnthEh7h39NiAr+oYXtuv7qJ7331e2c4wTwINr0QcADadc79cHuFvdbxPgngL/ccsn+EyHh6g8est1fJ0xR/qiIfC9hCuB9hA5qMZL4XYQX/mdF5LsIo5gh4Qf9Hd7733ezCxGRryVwYH4C+GkR+fyFr8fe+4+2230dQXT+xwk6fOcJgumfRZhevR/wwNnVe29E5K8A3y8if4+Q2PQMISv9/YQknHuJB86mC9t/FvCpwF/i/sODaNd/Tnguf1BEvo3A+/uk9hpfAX70qDd/QngQbYqECpe/QohkPUOIFjfAXz3qjZ8w7ie7vo9Ah/iP7XEfIxRE+Tzg67vtvPcfEpH/A/g7IhITuKN/muD4/JHbu/0TwQNn03bbLwLOttsAfE7L5cWHvKN7jQfOrrfbrx0r/MlkTn77gXXf1K5/5sD69wM/t/B3DvwvBJ26CYFr8rZ23/cd2PfrCT9GRTs9BHwI+NED260RfsSXCFNF14GfBf7CLe7t+7kxw3xxWczO/HyCk3aN0GjvAD/FHcgJ9Xbdt+vC9v8FIUu2IvAKvxsY9Ta9K5v+3fZZPX+nz2hv1xvtSkh0+f+35yjbz+8Dnuhtesc2/QcEDmLdfn43cKp/Vg+9t68k9EPX23O8DPwr4Lcdsm1O4JpebZ/VX2KhkEpv0zuy6ftv9lz3dr0zu3Kb/dpxLndVEvp+goQSii8A3+G9/7Z7fT0PC3q7Hj96m54MerseP3qbngx6ux4/epueDB5Fuz6QjrGIdCPenyJMs70d+G8JNIZP8cenAvFIobfr8aO36cmgt+vxo7fpyaC36/Gjt+nJoLdrwP3GMT4qLIGX8j0EWZAZIYT/hx6VH+6E0Nv1+NHb9GTQ2/X40dv0ZNDb9fjR2/Rk0NuVBzRi3KNHjx49evTo0aPHceN+k2vr0aNHjx49evTo0eOe4L5wjEXkq0TkL96jc79PRHyr5fjIQkTe31ae6XFMEJEvbp+tLznCtr6VsXnk0L//Dz5aO/7Oe30dx4n2/X2fHF9J7xNB+/x++xG2u6GNX2ifvvgEL++OcZR38zjuoTvPne5/v6JvV+8c98sL/1XAPfkBe/S4T/AFwP96ry/iHuGr6N//Bx1/naCN+jDhiwn3db/0k3eLP9MuDxN+ldB2/uq9vpD7EF9F367eER4ob15EUn/vK/P06HHs8N7/4r2+hvsd/fvfo8edw59kQYR7BO/9mFAl9Q3Rtx03R2+b1+Oej4RF5PuBbwSeaEPvXkQuLkyRfLWIfJ+IrBOKaCAi3y8iFw851uvoACJyVkT+ZxF5RUSq9vOHRCR9g2v6MhGZisj33O/TaHcCEflaEXm2tcdHROT3H7LNu0XkR0VkR0QKEflFEfmyQ7b7uvZYpYh8WES+8lGiZYjIu1o7XW9tcElE/smBKaRB+yxttMsPi8jqgePcQKVYmIp6r4j8BxGZi8gVEfnWh+mZ7N//ew8R+fT2Gd5s3/XnROSvtt/9ZyLy4+2zNxeR3xSRvyQiemH/bhr6mxd+w/fdg/t4pv1tX2rv40UR+V9EZO3Adoe2T+1z9/3tv99HiBYDNN19LWz7uIj8YPs+VyLyGyLyDQeO903tfr9VRP6xiExE5NqCbb9MRD4kIjMR+WUJpXcX9xcR+a/b36Nuf4PvEZHlw29fvllEXm3v/WdE5DOOct+HHOir2/Z+3rb//0RE3nKr/U4Q77lZGyiHUCna+/w5Efm9rX0r2ki5iHymiPyshLb6NRH5FkAOO+mDjL5dvTvcDxHjbyOUUvxcQkUUCNVQVtp/fzfwbwgV0LLbOXDbIP48ofzgtwO/AZwjlHBO2vMc3OePEqa0v9V7f0ve1oMGCXzXfwj8GKEk8FlCJbSYUO0GEbkA/ByhQs5/BewSylz/mIj8Hu/9v2m3+93AjxCq1vzF9lh/h/A7Pf+m3dS9xY8B24SyqhvAE8CXc+Og8+8SKg19PfBu4P9LkMX5xiMc/18QKoD9TeBLgW8hlOZ833Fc/H2A/v2/hxCRzyNUyXoB+K8JFebeCXxau8nbgX9P+B1K4HMIz95Z4K+023wB8AuESlV/v1336klf+yG4QCjB/RcI7+Tbgb8G/DjhGm8H/yvwJKFU7W8nvK8AiMgQ+ACh8tdfa8/5DcAPicjAe/+9B471A8APAt8L/CHgb0gYGH85oZT4lNAm/AsReYf3vm73+w5C+ev/CfjXwCcT3pdPF5Ev8t4vluj9o8AlQnudAt8K/HsReaf3fuuoNy0if4pQIe1/a4+xRPi9PyAin+a9nxz1WMeIf8Htt4HvAv5Hgr1eBLZE5Ayh4tpVQttbEcqM30un/6TQt6t3g5Msq3fUhdCgvnpg3RcTSv/96E22v3jI+vdzY7nmbyU0aJ/5Bud+X3ueiCBk3QB//F7b5ARt/R+BjwJqYd3ns1BmEfhOwLBQZhLQBMf5VxfW/TyhXLMsrPts3oSSjffDQqgj74GvvMn33TP8AwfWfw/ByVi02w2lOReey79yYN/vIwxYVu/1/R+jHfv3/97Z/mcIjt3gCNtKa6dvJjiei23I68rV3uulvdbf3l7bZy6sv+E5WVh/Efj+w56NA9v9V+36Lz6w/qcIZW51+/c3tdv99weu6Xr7nL1tYf1Xttt+Ufv3KYKD8f0HzvENB9uc9u8NYLiw7un2HN92s/teeMe+uP17RAiC/IMD53wboeTvX3iTf79btoEH72HhPh3wGQf2+472Pp5aWDdsbefv9fN6Avb7fvp29Y6W+zqc3eJH72Lf/wz4Ze/9h46w7XcB/2/gD3rvH8okqHb683OBf+oXog0+8FsvLmz6hcAveu9fWNjGAv878Bkistwe63OAf+bbN6Hd7lcItdMfBWwSohF/S0T+hIi88ybb/diBvz9MiOqcP8I5/vGBv/8RoQP71Nu50AcY/ft/QhCRAfDbgB/x3s9vss3jIvL3ReRlglPREKJEq4Qo0X0DEUlE5K9JoHYVhGv92fbrdx/jqb4QeM17//4D63+YEKX75APr/033D++9IUTnn/feL7aTz7afT7Wfn0+Ivv3wgWP9I0LQ4osOrP9x7/1s4TwXCdzb24mUfwGwDPyIiETdQhg4PUu473uBO2kDL3rvf+3Aui8g9GuvdCtam/3r47jIBwx9u/oGeBAc47uptnKao0/pfR0h+vlTd3G++x1nCJSJa4d8t7juFIfb/SoharS2cKzrtzjWQ4t2QPC7gQ8Spvmel8Br/NMHNj04ldlNNR1lCuugLbu/n7ida32A0b//J4c1Qh9wqI1aHuC/An4PwRn+nYSB9Xe0m9zWFOybgL9JiFT9MPAVwOcBX91+d5zX+kbtY/f9IrYP/F3fZB3sX2d3jBvO0zrWm4ec42Zt+u20E91A56cIg4rF5b2E9+le4E7awMN+n8cPOdZhx38U0Lerb4AHwTE+TF+wJIymD+Lgi9txPo+C30XgGv0bERkd/fIeKGwQGrnDIpWL67YIZSEP4jHC77G9cKzDokZHiYQ+FPDev+i9/6OESNFnEjhs/7OI/OfHdIqDtuz+fu2Yjn+/o3//Tw7bhCnnm9noHYRZob/svf8+7/3Peu8/yALf9j7D1wI/6L3/du/9T3vvfxnYOWS7mz0/B53Nm+GN2sfu+7tFd4wbztNGcE8fco6btem3005stp/fRBgAHVz+5G0c6zhxJ23gYe3GlUOOddjxHwX07eob4H5xjCsgv43tXwbOi8jZboWIvIPXT5f9JPB5IvLpRzjmRwj8m3fygP2IR0VLh/hl4A8uZoWKyG8hcNI6fAD4fBF5emEbDXwN8CHv/bg91geBPyAisrDdZxM4aY8UfMCvsa8beVxUhz984O+vJSTrfPiYjn8/oH//7wFa+sTPAd8gIofZf9B+Nt0KEYmBP3LItjW39xueBAYsXGuLP3bIdi8D7xKRPSdARL6QkGi2iG5m5+B9fQB4UkR+24H1X0+YQTsOWbRfJNj0aw+s/xoCb/P9B9Z/eZsUCEDbdn8+ISnyqPh5Anf3Ge/9Bw9ZnrvNezguHFcb+AuEfq2jq3SJlL/37i7vvkXfrt4h7hfH+KPAKRH50yLyuSLy3lts/08II54fFpEvFZE/AvxLwkhmEd9F4ID+lIj8eRH5nSLyh0XkR0TkYCOI9/5jhB/xHcC/PWybhwB/HfgkQgb0V4jINxE4XFcXtvkuQqTl34nI14vI7yHwsN5FSLxZPNanAD8qIl/eZp7+k/ZYixnTDyVE5NMkyAj9KRH5EhH5UkJWviFEjo8Df6LlTf5uEflO4I8D3+m93z2m498P6N//e4f/hhAR+gUR+S9E5P8mIv+liHw38DFCZ/kdIvIHReT3Af/uJsf5KPAV7XP6Oa2yzZuNnwC+UUT+jASZub8H/NZDtvtHhHv+B+17+ycI7+3Bd6pzcP+SiPwWEfmc9u/vBz4O/HMR+eMSZKh+iECr+pY2aHBX8EFJ4v8H/HER+Tvt/fx54O8RBjMH8xYK4CclVDv7GoItxoR34KjnHBNUGv6qiPw9Efl9EuS9/oiIfK+IfP3d3tcd4rjawO8CZgQ7fY2IfBXBySuO93LvG/Tt6p3iXmf/tXlbQ0Ji1zbhh7nIfvbkl9xkn68icFcK4NcJhPD3cyDbmDDV/72EaZSakEjwA0DqD2RPLuzzTgKH5heA5XttnxOw99cRFCYqwoju9x+0HWGU+C8InUVJiGB82SHH+vpDjvUhDsl6fdiW9tn6AYI03ZwwvfkB4Evb7w99htnPVn96Yd3NVCk+FfgP7XN+lSDDo0763t5kO/bv/721/2cSBr47rT2fJdAnAD6D4IjNW5t8K8ExOfj8/jbgV9q24oZn+U28jzMEp3e7XX6EQAHwwDcd2Pb/QXBuC0Kk9LN5vSqFJkilXScM9P3Cd48DP0RwGiqCZNU3HDhH954/c2D9+4GfO7Du6XbbP76wTggSes+1z+6V9nqWD+zrCbzvv9b+RiUh6fAzDjnv+xf+7t6xLz6w3ZcT2pxx+7t/nCCX9slv8u/ZvZs3bQMPu4fD7Lvw3We1tikJVIxvISSH+ZO8l3ux0Lerd7xIe8E9ehwLRORJQtb1d3jvv+1eX8+DCtkvMBD7kHDTo0ePHj169Dhh3A8FPno8oGh5iX+bkHG6QRDU/28JUYYHRpqlR48ePXr06NEDese4x93BErKmv4fA2ZsRpqn+kPf+buRgevTo0aNHjx493nT0VIoePXr06NGjR48ePbh/VCl69OjRo0ePHj169Lin6B3jHj169OjRo0ePHj3oHeMePXr06NGjR48ePYDeMe7Ro0ePHj169OjRA+gd4x49evTo0aNHjx49gN4x7tGjR48ePXr06NED6B3jHj169OjRo0ePHj2A3jHu0aNHjx49evTo0QPoHeMePXr06NGjR48ePYDeMe7Ro0ePHj169OjRA+gd4x49evTo0aNHjx49gN4x7tGjR48ePXr06NED6B3jHj169OjRo0ePHj0A+L8AOP3Z7In7Z58AAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "from matplotlib import pyplot as plt\n", - "import numpy as np\n", - "\n", - "label_list = [\"airplane\", \"automobile\", \"bird\", \"cat\", \"deer\", \"dog\", \"rog\", \"horse\", \"ship\", \"truck\"]\n", - "print(\"The 32 images with label of the first batch in ds_train are showed below:\")\n", - "ds_iterator = ds_train.create_dict_iterator()\n", - "next(ds_iterator)\n", - "batch_1 = next(ds_iterator)\n", - "batch_image = batch_1[\"image\"].asnumpy()\n", - "batch_label = batch_1[\"label\"].asnumpy()\n", - "%matplotlib inline\n", - "plt.figure(dpi=144)\n", - "for i,image in enumerate(batch_image):\n", - " plt.subplot(4, 8, i+1)\n", - " plt.subplots_adjust(wspace=0.2, hspace=0.2)\n", - " image = image/np.amax(image)\n", - " image = np.clip(image, 0, 1)\n", - " image = np.transpose(image,(1,2,0))\n", - " plt.imshow(image)\n", - " num = batch_label[i]\n", - " plt.title(f\"image {i+1}\\n{label_list[num]}\", y=-0.65, fontdict={\"fontsize\":8})\n", - " plt.axis('off') \n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用Summary算子记录数据\n", - "\n", - "在进行训练之前,需定义神经网络模型,本流程采用AlexNet网络。\n", - "\n", - "MindSpore提供了两种方法进行记录数据,分别为:\n", - "\n", - "- 通过Summary算子记录数据。\n", - "- 通过 `SummaryCollector` 这个callback进行记录。\n", - "\n", - "下面为在AlexNet网络中使用Summary算子记录输入图像和张量数据的配置方法。\n", - "\n", - "- 使用 `ImageSummary` 记录输入图像数据。\n", - "\n", - " 1. 在 `__init__` 方法中初始化 `ImageSummary`。\n", - " \n", - " ```python\n", - " # Init ImageSummary\n", - " self.image_summary = ops.ImageSummary()\n", - " ```\n", - " \n", - " 2. 在 `construct` 方法中使用 `ImageSummary` 算子记录输入图像。其中 \"Image\" 为该数据的名称,MindInsight在展示时,会将该名称展示出来以方便识别是哪个数据。\n", - " \n", - " ```python\n", - " # Record image by Summary operator\n", - " self.image_summary(\"Image\", x)\n", - " ```\n", - " \n", - "- 使用 `TensorSummary` 记录张量数据。\n", - "\n", - " 1. 在 `__init__` 方法中初始化 `TensorSummary`。\n", - " \n", - " ```python\n", - " # Init TensorSummary\n", - " self.tensor_summary = ops.TensorSummary()\n", - " ```\n", - " \n", - " 2. 在`construct`方法中使用`TensorSummary`算子记录张量数据。其中\"Tensor\"为该数据的名称。\n", - " \n", - " ```python\n", - " # Record tensor by Summary operator\n", - " self.tensor_summary(\"Tensor\", x)\n", - " ```\n", - "\n", - "当前支持的Summary算子:\n", - "\n", - "- [ScalarSummary](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ScalarSummary.html): 记录标量数据\n", - "- [TensorSummary](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.TensorSummary.html): 记录张量数据\n", - "- [ImageSummary](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ImageSummary.html): 记录图片数据\n", - "- [HistogramSummary](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.HistogramSummary.html): 将张量数据转为直方图数据记录\n", - "\n", - "以下一段代码中定义AlexNet网络结构。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.common.initializer import TruncatedNormal\n", - "import mindspore.ops as ops\n", - "\n", - "def conv(in_channels, out_channels, kernel_size, stride=1, padding=0, pad_mode=\"valid\"):\n", - " weight = weight_variable()\n", - " return nn.Conv2d(in_channels, out_channels,\n", - " kernel_size=kernel_size, stride=stride, padding=padding,\n", - " weight_init=weight, has_bias=False, pad_mode=pad_mode)\n", - "\n", - "def fc_with_initialize(input_channels, out_channels):\n", - " weight = weight_variable()\n", - " bias = weight_variable()\n", - " return nn.Dense(input_channels, out_channels, weight, bias)\n", - "\n", - "def weight_variable():\n", - " return TruncatedNormal(0.02)\n", - "\n", - "\n", - "class AlexNet(nn.Cell):\n", - " \"\"\"\n", - " Alexnet\n", - " \"\"\"\n", - " def __init__(self, num_classes=10, channel=3):\n", - " super(AlexNet, self).__init__()\n", - " self.conv1 = conv(channel, 96, 11, stride=4)\n", - " self.conv2 = conv(96, 256, 5, pad_mode=\"same\")\n", - " self.conv3 = conv(256, 384, 3, pad_mode=\"same\")\n", - " self.conv4 = conv(384, 384, 3, pad_mode=\"same\")\n", - " self.conv5 = conv(384, 256, 3, pad_mode=\"same\")\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = ops.MaxPool(ksize=3, strides=2)\n", - " self.flatten = nn.Flatten()\n", - " self.fc1 = fc_with_initialize(6*6*256, 4096)\n", - " self.fc2 = fc_with_initialize(4096, 4096)\n", - " self.fc3 = fc_with_initialize(4096, num_classes)\n", - " # Init TensorSummary\n", - " self.tensor_summary = ops.TensorSummary()\n", - " # Init ImageSummary\n", - " self.image_summary = ops.ImageSummary()\n", - "\n", - " def construct(self, x):\n", - " # Record image by Summary operator\n", - " self.image_summary(\"Image\", x)\n", - " x = self.conv1(x)\n", - " # Record tensor by Summary operator\n", - " self.tensor_summary(\"Tensor\", x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.conv2(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.conv3(x)\n", - " x = self.relu(x)\n", - " x = self.conv4(x)\n", - " x = self.relu(x)\n", - " x = self.conv5(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.flatten(x)\n", - " x = self.fc1(x)\n", - " x = self.relu(x)\n", - " x = self.fc2(x)\n", - " x = self.relu(x)\n", - " x = self.fc3(x)\n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用 `SummaryCollector` 记录数据\n", - "\n", - "下面展示使用`SummaryCollector`来记录标量、直方图信息。\n", - "\n", - "在MindSpore中通过`Callback`机制,提供支持快速简易地收集损失值、参数权重、梯度等信息的`Callback`, 叫做`SummaryCollector`(详细的用法可以参考API文档中[mindspore.train.callback.SummaryCollector](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.train.html#mindspore.train.callback.SummaryCollector))。`SummaryCollector`使用方法如下: \n", - "\n", - "`SummaryCollector` 提供 `collect_specified_data` 参数,允许用户自定义想要收集的数据。\n", - "\n", - "下面的代码展示通过 `SummaryCollector` 收集损失值以及卷积层的参数值,参数值在MindInsight中以直方图展示。\n", - "\n", - "\n", - "\n", - "\n", - "```python\n", - "specified={\"collect_metric\": True, \"histogram_regular\": \"^conv1.*|^conv2.*\",\"collect_graph\": True, \"collect_dataset_graph\": True}\n", - "summary_collector = SummaryCollector(summary_dir=\"./summary_dir/summary_01\", \n", - " collect_specified_data=specified, \n", - " collect_freq=1, \n", - " keep_default_action=False, \n", - " collect_tensor_freq=200)\n", - "```\n", - "\n", - "- `summary_dir`:指定日志保存的路径。\n", - "- `collect_specified_data`:指定需要记录的信息。\n", - "- `collect_freq`:指定使用`SummaryCollector`记录数据的频率。\n", - "- `keep_default_action`:指定是否除记录除指定信息外的其他数据信息。\n", - "- `collect_tensor_freq`:指定记录张量信息的频率。\n", - "- `\"collect_metric\"`为记录损失值标量信息。\n", - "- `\"histogram_regular\"`为记录`conv1`层和`conv2`层直方图信息。\n", - "- `\"collect_graph\"`为记录计算图信息。\n", - "- `\"collect_dataset_graph\"`为记录数据图信息。\n", - "\n", - "  程序运行过程中将在本地`8080`端口自动启动MindInsight服务并自动遍历读取当前notebook目录下`summary_dir`子目录下所有日志文件、解析进行可视化展示。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 导入模块" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import mindspore.nn as nn\n", - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor\n", - "from mindspore.nn import Accuracy\n", - "from mindspore.train.callback import SummaryCollector\n", - "from mindspore import load_checkpoint, load_param_into_net\n", - "from mindspore import Tensor, context, Model\n", - "\n", - "device_target = \"GPU\"\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=device_target)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义学习率\n", - "\n", - "以下一段代码定义学习率。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "\n", - "\n", - "def get_lr(current_step, lr_max, total_epochs, steps_per_epoch):\n", - " \"\"\"\n", - " generate learning rate array\n", - "\n", - " Args:\n", - " current_step(int): current steps of the training\n", - " lr_max(float): max learning rate\n", - " total_epochs(int): total epoch of training\n", - " steps_per_epoch(int): steps of one epoch\n", - "\n", - " Returns:\n", - " np.array, learning rate array\n", - " \"\"\"\n", - " lr_each_step = []\n", - " total_steps = steps_per_epoch * total_epochs\n", - " decay_epoch_index = [0.8 * total_steps]\n", - " for i in range(total_steps):\n", - " if i < decay_epoch_index[0]:\n", - " lr = lr_max\n", - " else:\n", - " lr = lr_max * 0.1\n", - " lr_each_step.append(lr)\n", - " lr_each_step = np.array(lr_each_step).astype(np.float32)\n", - " learning_rate = lr_each_step[current_step:]\n", - "\n", - " return learning_rate\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 执行训练" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Training ==============\n", - "epoch: 1 step: 1, loss is 2.3037791\n", - "epoch: 1 step: 2, loss is 2.3127236\n", - "epoch: 1 step: 3, loss is 2.3156757\n", - "epoch: 1 step: 4, loss is 2.2910595\n", - "epoch: 1 step: 5, loss is 2.3042145\n", - "epoch: 1 step: 6, loss is 2.3150084\n", - "epoch: 1 step: 7, loss is 2.2808924\n", - "epoch: 1 step: 8, loss is 2.3073373\n", - "epoch: 1 step: 9, loss is 2.308782\n", - "epoch: 1 step: 10, loss is 2.2957213\n", - "\n", - "...\n", - "\n", - "epoch: 10 step: 1550, loss is 0.54039395\n", - "epoch: 10 step: 1551, loss is 0.25690028\n", - "epoch: 10 step: 1552, loss is 0.26572403\n", - "epoch: 10 step: 1553, loss is 0.4429163\n", - "epoch: 10 step: 1554, loss is 0.25716054\n", - "epoch: 10 step: 1555, loss is 0.38538748\n", - "epoch: 10 step: 1556, loss is 0.12103356\n", - "epoch: 10 step: 1557, loss is 0.16565521\n", - "epoch: 10 step: 1558, loss is 0.4364005\n", - "epoch: 10 step: 1559, loss is 0.428179\n", - "epoch: 10 step: 1560, loss is 0.42687342\n", - "epoch: 10 step: 1561, loss is 0.6419081\n", - "epoch: 10 step: 1562, loss is 0.5843237\n", - "Epoch time: 115283.798, per step time: 73.805\n", - "============== Starting Testing ==============\n", - "============== {'Accuracy': 0.8302283653846154} ==============\n" - ] - } - ], - "source": [ - "network = AlexNet(num_classes=10)\n", - "net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction=\"mean\")\n", - "lr = Tensor(get_lr(0, 0.002, 10, ds_train.get_dataset_size()))\n", - "net_opt = nn.Momentum(network.trainable_params(), learning_rate=lr, momentum=0.9)\n", - "time_cb = TimeMonitor(data_size=ds_train.get_dataset_size())\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=1562, keep_checkpoint_max=10)\n", - "ckpoint_cb = ModelCheckpoint(directory=\"./models/ckpt/mindinsight_dashboard\", prefix=\"checkpoint_alexnet\", config=config_ck)\n", - "model = Model(network, net_loss, net_opt, metrics={\"Accuracy\": Accuracy()})\n", - "\n", - "summary_base_dir = \"./summary_dir\"\n", - "os.system(f\"mindinsight start --summary-base-dir {summary_base_dir} --port=8080\")\n", - "\n", - "# Init a SummaryCollector callback instance, and use it in model.train or model.eval\n", - "specified = {\"collect_metric\": True, \"histogram_regular\": \"^conv1.*|^conv2.*\", \"collect_graph\": True, \"collect_dataset_graph\": True}\n", - "summary_collector = SummaryCollector(summary_dir=\"./summary_dir/summary_01\", collect_specified_data=specified, collect_freq=1, keep_default_action=False, collect_tensor_freq=200)\n", - "\n", - "print(\"============== Starting Training ==============\")\n", - "model.train(epoch=10, train_dataset=ds_train, callbacks=[time_cb, ckpoint_cb, LossMonitor(), summary_collector], dataset_sink_mode=True)\n", - "\n", - "print(\"============== Starting Testing ==============\")\n", - "param_dict = load_checkpoint(\"./models/ckpt/mindinsight_dashboard/checkpoint_alexnet-10_1562.ckpt\")\n", - "load_param_into_net(network, param_dict)\n", - "acc = model.eval(ds_eval, callbacks=summary_collector, dataset_sink_mode=True)\n", - "print(\"============== {} ==============\".format(acc))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## MindInsight看板\n", - "\n", - "在本地浏览器中打开地址:`127.0.0.1:8080`,进入到可视化面板。\n", - "\n", - "![](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/mindinsight_panel.png)\n", - "\n", - "在上图所示面板中可以看到`summary_01`日志文件目录,点击**训练看板**进入到下图所示的训练数据展示面板,该面板展示了标量数据、直方图、图像和张量信息,并随着训练、测试的进行实时刷新数据,实时显示训练过程参数的变化情况。\n", - "\n", - "![](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/mindinsight_panel2.png)\n", - "\n", - "### 标量可视化\n", - "\n", - "标量可视化用于展示训练过程中标量的变化趋势,点击打开训练标量信息展示面板,该面板记录了迭代计算过程中的损失值标量信息,如下图展示了损失值标量趋势图。\n", - "\n", - "![](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/scalar_panel.png)\n", - "\n", - "上图展示了神经网络在训练过程中损失值的变化过程。横坐标是训练步骤,纵坐标是损失值。\n", - "\n", - "图中右上角有几个按钮功能,从左到右功能分别是全屏展示,切换Y轴比例,开启/关闭框选,分步回退和还原图形。\n", - "\n", - "- 全屏展示即全屏展示该标量曲线,再点击一次即可恢复。\n", - "- 切换Y轴比例是指可以将Y轴坐标进行对数转换。\n", - "- 开启/关闭框选是指可以框选图中部分区域,并放大查看该区域,可以在已放大的图形上叠加框选。\n", - "- 分步回退是指对同一个区域连续框选并放大查看时,可以逐步撤销操作。\n", - "- 还原图形是指进行了多次框选后,点击此按钮可以将图还原回原始状态。\n", - "\n", - "![](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/scalar_select.png)\n", - "\n", - "上图展示的标量可视化的功能区,提供了根据选择不同标签,水平轴的不同维度和平滑度来查看标量信息的功能。\n", - "\n", - "- 标签选择:提供了对所有标签进行多项选择的功能,用户可以通过勾选所需的标签,查看对应的标量信息。\n", - "- 水平轴:可以选择“步骤”、“相对时间”、“绝对时间”中的任意一项,来作为标量曲线的水平轴。\n", - "- 平滑度:可以通过调整平滑度,对标量曲线进行平滑处理。\n", - "- 标量合成:可以选中两条标量曲线进行合成并展示在一个图中,以方便对两条曲线进行对比或者查看合成后的图。\n", - " 标量合成的功能区与标量可视化的功能区相似。其中与标量可视化功能区不一样的地方,在于标签选择时,标量合成功能最多只能同时选择两个标签,将其曲线合成并展示。\n", - "\n", - "### 直方图可视化\n", - "\n", - "\n", - "直方图用于将用户所指定的张量以直方图的形式展示。点击打开直方图展示面板,以直方图的形式记录了在迭代过程中所有层参数分布信息。\n", - "\n", - "![](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/histogram_panel.png)\n", - "\n", - "如下图为`conv1`层参数分布信息,点击图中右上角,可以将图放大。\n", - "\n", - "![](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/histogram.png)\n", - "\n", - "下图为直方图功能区。\n", - "\n", - "![](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/histogram_func.png)\n", - "\n", - "上图展示直方图的功能区,包含以下内容:\n", - "\n", - "- 标签选择:提供了对所有标签进行多项选择的功能,用户可以通过勾选所需的标签,查看对应的直方图。\n", - "- 纵轴:可以选择步骤、相对时间、绝对时间中的任意一项,来作为直方图纵轴显示的数据。\n", - "- 视角:可以选择正视和俯视中的一种。正视是指从正面的角度查看直方图,此时不同步骤之间的数据会覆盖在一起。俯视是指偏移以45度角俯视直方图区域,这时可以呈现不同步骤之间数据的差异。\n", - "\n", - "### 图像可视化\n", - "\n", - "图像可视化用于展示用户所指定的图片。点击数据抽样展示面板,展示了每个一步进行处理的图像信息。\n", - "\n", - "下图为展示`summary_01`记录的图像信息。\n", - "\n", - "![](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/image_panel.png)\n", - "\n", - "通过滑动上图中的\"步骤\"滑条,查看不同步骤的图片。\n", - "\n", - "![](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/image_function.png)\n", - "\n", - "上图展示图像可视化的功能区,提供了选择查看不同标签,不同亮度和不同对比度来查看图片信息。\n", - "\n", - "- 标签:提供了对所有标签进行多项选择的功能,用户可以通过勾选所需的标签,查看对应的图片信息。\n", - "- 亮度调整:可以调整所展示的所有图片亮度。\n", - "- 对比度调整:可以调整所展示的所有图片对比度。\n", - "\n", - "### 张量可视化\n", - "\n", - "张量可视化用于将张量以表格以及直方图的形式进行展示。\n", - "\n", - "![](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/tensor_func.png)\n", - "\n", - "上图展示了张量可视化的功能区,包含以下内容:\n", - "\n", - "- 标签选择:提供了对所有标签进行多项选择的功能,用户可以通过勾选所需的标签,查看对应的表格数据或者直方图。\n", - "- 视图:可以选择表格或者直方图来展示tensor数据。在直方图视图下存在纵轴和视角的功能选择。\n", - "- 纵轴:可以选择步骤、相对时间、绝对时间中的任意一项,来作为直方图纵轴显示的数据。\n", - "- 视角:可以选择正视和俯视中的一种。正视是指从正面的角度查看直方图,此时不同步骤之间的数据会覆盖在一起。俯视是指 偏移以45度角俯视直方图区域,这时可以呈现不同步骤之间数据的差异。\n", - "\n", - "![](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/tensor.png)\n", - "\n", - "上图中将用户所记录的张量以表格的形式展示,包含以下功能:\n", - "\n", - "- 点击表格右边小方框按钮,可以将表格放大。\n", - "- 表格中白色方框显示当前展示的是哪个维度下的张量数据,其中冒号\":\"表示当前维度的所有值,可以在方框输入对应的索引或者:后按Enter键或者点击后边的打勾按钮来查询特定维度的张量数据。 假设某维度是32,则其索引范围是-32到31。注意:可以查询0维到2维的张量数据,不支持查询超过两维的张量数据,即不能设置超过两个冒号\":\"的查询条件。\n", - "- 拖拽表格下方的空心圆圈可以查询特定步骤的张量数据。\n", - "\n", - "### 计算图可视化\n", - "\n", - "点击计算图可视化用于展示计算图的图结构,数据流以及控制流的走向,支持展示summary日志文件与通过`context`的`save_graphs`参数导出的`pb`文件。\n", - "\n", - "![graph.png](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/caculate_graph.png)\n", - "\n", - "上展示了计算图的网络结构。如图中所展示的,在展示区中,选中其中一个算子(图中圈红算子),可以看到该算子有两个输入和一个输出(实线代表算子的数据流走向)。\n", - "\n", - "![graph-sidebar.png](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/graph_sidebar.png)\n", - "\n", - "上图展示了计算图可视化的功能区,包含以下内容:\n", - "\n", - "- 文件选择框:可以选择查看不同文件的计算图。\n", - "- 搜索框:可以对节点进行搜索,输入节点名称点击回车,即可展示该节点。\n", - "- 缩略图:展示整个网络图结构的缩略图,在查看超大图结构时,方便查看当前浏览的区域。\n", - "- 节点信息:展示选中的节点的基本信息,包括节点的名称、属性、输入节点、输出节点等信息。\n", - "- 图例:展示的是计算图中各个图标的含义。\n", - "\n", - "### 数据图可视化\n", - "\n", - "数据图可视化用于展示单次模型训练的数据处理和数据增强信息。\n", - "\n", - "![data-function.png](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/data_function.png)\n", - "\n", - "上图展示的数据图功能区包含以下内容:\n", - "\n", - "- 图例:展示数据溯源图中各个图标的含义。\n", - "- 数据处理流水线:展示训练所使用的数据处理流水线,可以选择图中的单个节点查看详细信息。\n", - "- 节点信息:展示选中的节点的基本信息,包括使用的数据处理和增强算子的名称、参数等。\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 单独记录损失值标量\n", - "\n", - "\n", - "为了降低性能开销和日志文件大小,可以单独记录关心的数据。单独记录标量、参数分布直方图、计算图或数据图信息,可以通过配置`specified`参数为相应的值来单独记录。单独记录图像或张量信息,可以在AlexNet网络的`construct`方法中使用`ImageSummary`算子或`TensorSummary`算子来单独记录。\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 关闭MindInsight服务\n", - "\n", - "在终端命令行中执行以下代码关闭MindInsight服务。\n", - "\n", - "```shell\n", - "mindinsight stop --port 8080\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 注意事项和规格\n", - "1. 为了控制列出summary文件目录的用时,MindInsight最多支持发现999个summary文件目录。\n", - "2. 不能同时使用多个 `SummaryRecord` 实例 (`SummaryCollector` 中使用了 `SummaryRecord`)。\n", - "\n", - " 如果在 `model.train` 或者 `model.eval` 的callback列表中使用两个及以上的 `SummaryCollector` 实例,则视为同时使用 `SummaryRecord`,导致记录数据失败。\n", - "\n", - " 自定义callback中如果使用 `SummaryRecord`,则其不能和 `SummaryCollector` 同时使用。\n", - "\n", - " 正确代码:\n", - " ```\n", - " ...\n", - " summary_collector = SummaryCollector('./summary_dir')\n", - " model.train(2, train_dataset, callbacks=[summary_collector])\n", - "\n", - " ...\n", - " model.eval(dataset, callbacks=[summary_collector])\n", - " ```\n", - "\n", - " 错误代码:\n", - " ```\n", - " ...\n", - " summary_collector1 = SummaryCollector('./summary_dir1')\n", - " summary_collector2 = SummaryCollector('./summary_dir2')\n", - " model.train(2, train_dataset, callbacks=[summary_collector1, summary_collector2])\n", - " ```\n", - "\n", - " 错误代码:\n", - " ```\n", - " ...\n", - " # Note: the 'ConfusionMatrixCallback' is user-defined, and it uses SummaryRecord to record data.\n", - " confusion_callback = ConfusionMatrixCallback('./summary_dir1')\n", - " summary_collector = SummaryCollector('./summary_dir2')\n", - " model.train(2, train_dataset, callbacks=[confusion_callback, summary_collector])\n", - " ```\n", - "3. 每个summary日志文件目录中,应该只放置一次训练的数据。一个summary日志目录中如果存放了多次训练的summary数据,MindInsight在可视化数据时会将这些训练的summary数据进行叠加展示,可能会与预期可视化效果不相符。\n", - "4. 当前 `SummaryCollector` 和 `SummaryRecord` 不支持GPU多卡运行的场景。\n", - "5. 目前MindSpore仅支持在Ascend 910 AI处理器上导出算子融合后的计算图。\n", - "6. 在训练中使用Summary算子收集数据时,`HistogramSummary` 算子会影响性能,所以请尽量少地使用。\n", - "7. 为了控制内存占用,MindInsight对标签(tag)数目和步骤(step)数目进行了限制:\n", - " - 每个训练看板的最大标签数量为300个标签。标量标签、图片标签、计算图标签、参数分布图(直方图)标签、张量标签的数量总和不得超过300个。特别地,每个训练看板最多有10个计算图标签、6个张量标签。当实际标签数量超过这一限制时,将依照MindInsight的处理顺序,保留最近处理的300个标签。\n", - " - 每个训练看板的每个标量标签最多有1000个步骤的数据。当实际步骤的数目超过这一限制时,将对数据进行随机采样,以满足这一限制。\n", - " - 每个训练看板的每个图片标签最多有10个步骤的数据。当实际步骤的数目超过这一限制时,将对数据进行随机采样,以满足这一限制。\n", - " - 每个训练看板的每个参数分布图(直方图)标签最多有50个步骤的数据。当实际步骤的数目超过这一限制时,将对数据进行随机采样,以满足这一限制。\n", - " - 每个训练看板的每个张量标签最多有20个步骤的数据。当实际步骤的数目超过这一限制时,将对数据进行随机采样,以满足这一限制。\n", - "8. 由于`TensorSummary`会记录完整Tensor数据,数据量通常会比较大,为了控制内存占用和出于性能上的考虑,MindInsight对Tensor的大小以及返回前端展示的数值个数进行以下限制:\n", - " - MindInsight最大支持加载含有1千万个数值的Tensor。\n", - " - Tensor加载后,在张量可视的表格视图下,最大支持查看10万个数值,如果所选择的维度查询得到的数值超过这一限制,则无法显示。\n", - "\n", - "9. 由于张量可视(`TensorSummary`)会记录原始张量数据,需要的存储空间较大。使用`TensorSummary`前和训练过程中请注意检查系统存储空间充足。\n", - "\n", - " 通过以下方法可以降低张量可视功能的存储空间占用:\n", - "\n", - " 1)避免使用`TensorSummary`记录较大的Tensor。\n", - "\n", - " 2)减少网络中`TensorSummary`算子的使用个数。\n", - "\n", - " 功能使用完毕后,请及时清理不再需要的训练日志,以释放磁盘空间。\n", - "\n", - " 备注:估算`TensorSummary`空间使用量的方法如下:\n", - "\n", - " 一个`TensorSummary数据的大小 = Tensor中的数值个数 * 4 bytes`。假设使用`TensorSummary`记录的Tensor大小为`32 * 1 * 256 * 256`,则一个`TensorSummary`数据大约需要`32 * 1 * 256 * 256 * 4 bytes = 8,388,608 bytes = 8MiB`。`TensorSummary`默认会记录20个步骤的数据,则记录这20组数据需要的空间约为`20 * 8 MiB = 160MiB`。需要注意的是,由于数据结构等因素的开销,实际使用的存储空间会略大于160MiB。\n", - "10. 当使用`TensorSummary`时,由于记录完整Tensor数据,训练日志文件较大,MindInsight需要更多时间解析训练日志文件,请耐心等待。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结\n", - "\n", - "本次体验流程为完整的MindSpore深度学习及MindInsight可视化展示的过程,包括了下载数据集及预处理过程,构建网络、损失函数和优化器过程,生成模型并进行训练、验证的过程,以及启动MindInsight服务进行训练过程可视化展示。读者可以基于本次体验流程构建自己的网络模型进行训练,并使用`SummaryCollector`以及Summary算子记录关心的数据,然后在MindInsight服务看板中进行可视化展示,根据MindInsight服务中展示的结果调整相应的参数以提高训练精度。\n", - "\n", - "以上便完成了标量、直方图、图像和张量可视化的体验,我们通过本次体验全面了解了MindSpore执行训练的过程和MindInsight在标量、直方图、图像、张量、计算图和数据图可视化的应用,理解了如何使用`SummaryColletor`记录训练过程中的标量、直方图、图像、张量、计算图和数据图数据。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} \ No newline at end of file diff --git a/tutorials/notebook/mindinsight/mindspore_lineage_and_scalars_comparison.ipynb b/tutorials/notebook/mindinsight/mindspore_lineage_and_scalars_comparison.ipynb deleted file mode 100644 index 61110f04a18855960415b97c5f9e5f85f3b97ab1..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindinsight/mindspore_lineage_and_scalars_comparison.ipynb +++ /dev/null @@ -1,1233 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#
    MindInsight的溯源分析和对比分析体验" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "在模型调参的场景下,需要多次调整模型超参并进行多次训练,这个过程,往往需要手动记录每次训练使用参数以及训练结果。为此,MindSpore提供了自动记录模型参数、训练信息及训练结果评估指标的功能,并通过MindInsight进行可视化展示。本次体验会从MindInsight的数据记录、可视化效果、如何方便用户在模型调优和数据调优上做一次整体流程的体验。\n", - "\n", - "下面按照MindSpore的训练数据模型的正常步骤进行,使用`SummaryCollector`进行数据保存操作,本次体验的整体流程如下:\n", - "\n", - "1. 数据集的准备,这里使用的是MNIST数据集。\n", - "\n", - "2. 构建一个网络,这里使用LeNet网络。\n", - "\n", - "3. 记录数据及启动训练。\n", - "\n", - "4. 启动MindInsight服务。\n", - "\n", - "5. 溯源分析的使用。\n", - "\n", - "6. 对比分析的使用。\n", - "\n", - "\n", - "> 本文档适用于GPU和Ascend环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据集准备" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据集下载" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "执行如下命令,进行数据集下载并解压,将解压后的数据集移动到指定位置。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据集处理" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "数据集处理对于训练非常重要,好的数据集可以有效提高训练精度和效率。在加载数据集前,我们通常会对数据集进行一些处理。\n", - "
    我们定义一个函数`create_dataset`来创建数据集。在这个函数中,我们定义好需要进行的数据增强和处理操作:\n", - "\n", - "1. 定义数据集。\n", - "2. 定义进行数据增强和处理所需要的一些参数。\n", - "3. 根据参数,生成对应的数据增强操作。\n", - "4. 使用`map`映射函数,将数据操作应用到数据集。\n", - "5. 对生成的数据集进行处理。\n", - "\n", - "具体的数据集操作可以在MindInsight的数据溯源中进行可视化分析。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore.common import dtype as mstype\n", - "import mindspore.dataset as ds\n", - "\n", - "def create_dataset(data_path, batch_size=16, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\" create dataset for train or test\n", - " Args:\n", - " data_path (str): Data path\n", - " batch_size (int): The number of data records in each group\n", - " repeat_size (int): The number of replicated data records\n", - " num_parallel_workers (int): The number of parallel workers\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " # define some parameters needed for data enhancement and rough justification\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # according to the parameters, generate the corresponding data enhancement method\n", - " resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR)\n", - " # if you need to use SummaryCollector to extract image data, do not use the following normalize operator operation\n", - " rescale_nml_op = CV.Rescale(rescale_nml, shift_nml)\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # using map method to apply operations to a dataset\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=resize_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_nml_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=hwc2chw_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " \n", - " # process the generated dataset\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义LeNet5网络" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本例采用的网络模型为LeNet5网络,对于手写数字分类表现得非常出色,网络模型定义如下。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.ops as ops\n", - "import mindspore.nn as nn\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " def __init__(self):\n", - " super(LeNet5, self).__init__()\n", - " self.batch_size = 32 \n", - " self.conv1 = nn.Conv2d(1, 6, 5, pad_mode=\"valid\")\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode=\"valid\")\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, Normal(0.02), Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, Normal(0.02), Normal(0.02))\n", - " self.fc3 = nn.Dense(84, 10)\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - " self.image_summary = ops.ImageSummary()\n", - " self.tensor_summary = ops.TensorSummary()\n", - "\n", - " def construct(self, x):\n", - " self.image_summary(\"image\", x)\n", - " self.tensor_summary(\"tensor\", x)\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x)\n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 记录数据及启动训练" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore 提供 `SummaryCollector` 接口来记录训练过程中的信息。\n", - "\n", - "为了更好的体验溯源分析和对比分析的效果,这里将调整学习率(`learning_rate`)、迭代次数(`epoch_size`)、batch数量(`batch_size`)来多次训练模型,并使用`SummaryCollector`保存对应的数据。\n", - "\n", - "`learning_rate`取值分别为0.01和0.05。\n", - "\n", - "`epoch_size`取值分别为2和5。\n", - "\n", - "`batch_size`取值分别为16和32。\n", - "\n", - "每次调整一个参数进行训练,总共分2x2x2=8组参数。\n", - "\n", - "`SummaryCollector`的更多用法,请参考[API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.train.html#mindspore.train.callback.SummaryCollector)。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "================= The Situation 1 =================\n", - "== learning_rate:0.01, epoch_size:2, batch_size:16 ==\n", - "================ Starting Training ================\n", - "epoch: 1 step: 125, loss is 2.296706\n", - "epoch: 1 step: 250, loss is 2.2627764\n", - "epoch: 1 step: 375, loss is 2.3244872\n", - "epoch: 1 step: 500, loss is 2.3250148\n", - "epoch: 1 step: 625, loss is 2.2620986\n", - "epoch: 1 step: 750, loss is 2.3121898\n", - "epoch: 1 step: 875, loss is 2.3026366\n", - "epoch: 1 step: 1000, loss is 2.2881525\n", - "epoch: 1 step: 1125, loss is 2.3082426\n", - "epoch: 1 step: 1250, loss is 2.2710335\n", - "epoch: 1 step: 1375, loss is 2.270731\n", - "epoch: 1 step: 1500, loss is 0.9243496\n", - "epoch: 1 step: 1625, loss is 0.6205256\n", - "epoch: 1 step: 1750, loss is 0.24102339\n", - "epoch: 1 step: 1875, loss is 0.22378448\n", - "epoch: 1 step: 2000, loss is 0.29994896\n", - "epoch: 1 step: 2125, loss is 0.17046359\n", - "epoch: 1 step: 2250, loss is 0.0100224735\n", - "epoch: 1 step: 2375, loss is 0.48139822\n", - "epoch: 1 step: 2500, loss is 0.06742601\n", - "epoch: 1 step: 2625, loss is 0.0052048573\n", - "epoch: 1 step: 2750, loss is 0.3211555\n", - "epoch: 1 step: 2875, loss is 0.31865978\n", - "epoch: 1 step: 3000, loss is 0.01020497\n", - "epoch: 1 step: 3125, loss is 0.50366527\n", - "epoch: 1 step: 3250, loss is 0.009847044\n", - "epoch: 1 step: 3375, loss is 0.090079\n", - "epoch: 1 step: 3500, loss is 0.011234925\n", - "epoch: 1 step: 3625, loss is 0.10852169\n", - "epoch: 1 step: 3750, loss is 0.10376892\n", - "epoch: 2 step: 125, loss is 0.02567438\n", - "epoch: 2 step: 250, loss is 0.04415862\n", - "epoch: 2 step: 375, loss is 0.23834515\n", - "epoch: 2 step: 500, loss is 0.0027683496\n", - "epoch: 2 step: 625, loss is 0.03725496\n", - "epoch: 2 step: 750, loss is 0.0075114276\n", - "epoch: 2 step: 875, loss is 0.18687367\n", - "epoch: 2 step: 1000, loss is 0.037377894\n", - "epoch: 2 step: 1125, loss is 0.06776899\n", - "epoch: 2 step: 1250, loss is 0.4728808\n", - "epoch: 2 step: 1375, loss is 0.0015088853\n", - "epoch: 2 step: 1500, loss is 0.11711723\n", - "epoch: 2 step: 1625, loss is 0.102257855\n", - "epoch: 2 step: 1750, loss is 0.0031716113\n", - "epoch: 2 step: 1875, loss is 0.061183207\n", - "epoch: 2 step: 2000, loss is 0.019151682\n", - "epoch: 2 step: 2125, loss is 0.021350795\n", - "epoch: 2 step: 2250, loss is 0.106557846\n", - "epoch: 2 step: 2375, loss is 0.3371804\n", - "epoch: 2 step: 2500, loss is 0.283691\n", - "epoch: 2 step: 2625, loss is 0.009455755\n", - "epoch: 2 step: 2750, loss is 0.20017545\n", - "epoch: 2 step: 2875, loss is 0.009389517\n", - "epoch: 2 step: 3000, loss is 0.04983216\n", - "epoch: 2 step: 3125, loss is 0.03779413\n", - "epoch: 2 step: 3250, loss is 0.0079332255\n", - "epoch: 2 step: 3375, loss is 0.2990877\n", - "epoch: 2 step: 3500, loss is 0.01983778\n", - "epoch: 2 step: 3625, loss is 0.09528342\n", - "epoch: 2 step: 3750, loss is 0.008239745\n", - "================ Starting Testing ================\n", - "============ Accuracy:{'Accuracy': 0.9797} ============\n", - "\n", - "\n", - "================= The Situation 2 =================\n", - "== learning_rate:0.01, epoch_size:2, batch_size:32 ==\n", - "================ Starting Training ================\n", - "epoch: 1 step: 125, loss is 2.2925339\n", - "epoch: 1 step: 250, loss is 2.3008182\n", - "epoch: 1 step: 375, loss is 2.3030884\n", - "epoch: 1 step: 500, loss is 2.2976336\n", - "epoch: 1 step: 625, loss is 2.3130703\n", - "epoch: 1 step: 750, loss is 2.300329\n", - "epoch: 1 step: 875, loss is 2.2933068\n", - "epoch: 1 step: 1000, loss is 2.3033197\n", - "epoch: 1 step: 1125, loss is 2.2951014\n", - "epoch: 1 step: 1250, loss is 2.295413\n", - "epoch: 1 step: 1375, loss is 2.3133006\n", - "epoch: 1 step: 1500, loss is 0.85154563\n", - "epoch: 1 step: 1625, loss is 0.47536567\n", - "epoch: 1 step: 1750, loss is 0.26820138\n", - "epoch: 1 step: 1875, loss is 0.4543515\n", - "epoch: 2 step: 125, loss is 0.32313684\n", - "epoch: 2 step: 250, loss is 0.22960262\n", - "epoch: 2 step: 375, loss is 0.046680164\n", - "epoch: 2 step: 500, loss is 0.05865948\n", - "epoch: 2 step: 625, loss is 0.0072424933\n", - "epoch: 2 step: 750, loss is 0.086514264\n", - "epoch: 2 step: 875, loss is 0.11134705\n", - "epoch: 2 step: 1000, loss is 0.020027155\n", - "epoch: 2 step: 1125, loss is 0.12832528\n", - "epoch: 2 step: 1250, loss is 0.055560835\n", - "epoch: 2 step: 1375, loss is 0.028572561\n", - "epoch: 2 step: 1500, loss is 0.19585766\n", - "epoch: 2 step: 1625, loss is 0.14577985\n", - "epoch: 2 step: 1750, loss is 0.23607145\n", - "epoch: 2 step: 1875, loss is 0.0840621\n", - "================ Starting Testing ================\n", - "============ Accuracy:{'Accuracy': 0.9787} ============\n", - "\n", - "\n", - "================= The Situation 3 =================\n", - "== learning_rate:0.01, epoch_size:5, batch_size:16 ==\n", - "================ Starting Training ================\n", - "epoch: 1 step: 125, loss is 2.328815\n", - "epoch: 1 step: 250, loss is 2.3232577\n", - "epoch: 1 step: 375, loss is 2.2851524\n", - "epoch: 1 step: 500, loss is 2.2837648\n", - "epoch: 1 step: 625, loss is 2.3328993\n", - "epoch: 1 step: 750, loss is 2.2846725\n", - "epoch: 1 step: 875, loss is 2.3305407\n", - "epoch: 1 step: 1000, loss is 2.3256888\n", - "epoch: 1 step: 1125, loss is 2.3163714\n", - "epoch: 1 step: 1250, loss is 2.2763608\n", - "epoch: 1 step: 1375, loss is 2.3155422\n", - "epoch: 1 step: 1500, loss is 1.2162496\n", - "epoch: 1 step: 1625, loss is 0.53659093\n", - "epoch: 1 step: 1750, loss is 0.23527911\n", - "epoch: 1 step: 1875, loss is 0.105321795\n", - "epoch: 1 step: 2000, loss is 0.19657795\n", - "epoch: 1 step: 2125, loss is 0.5824721\n", - "epoch: 1 step: 2250, loss is 0.38761842\n", - "epoch: 1 step: 2375, loss is 0.10887136\n", - "epoch: 1 step: 2500, loss is 0.2810255\n", - "epoch: 1 step: 2625, loss is 0.9004075\n", - "epoch: 1 step: 2750, loss is 0.13873589\n", - "epoch: 1 step: 2875, loss is 0.010646933\n", - "epoch: 1 step: 3000, loss is 0.073572345\n", - "epoch: 1 step: 3125, loss is 0.25893953\n", - "epoch: 1 step: 3250, loss is 0.028899945\n", - "epoch: 1 step: 3375, loss is 0.3362317\n", - "epoch: 1 step: 3500, loss is 0.02972875\n", - "epoch: 1 step: 3625, loss is 0.0014936002\n", - "epoch: 1 step: 3750, loss is 0.18348369\n", - "epoch: 2 step: 125, loss is 0.0075014555\n", - "epoch: 2 step: 250, loss is 0.08570729\n", - "epoch: 2 step: 375, loss is 0.12431516\n", - "epoch: 2 step: 500, loss is 0.18875955\n", - "epoch: 2 step: 625, loss is 0.01166816\n", - "epoch: 2 step: 750, loss is 0.45471027\n", - "epoch: 2 step: 875, loss is 0.07407855\n", - "epoch: 2 step: 1000, loss is 0.47525182\n", - "epoch: 2 step: 1125, loss is 0.02400005\n", - "epoch: 2 step: 1250, loss is 0.010517514\n", - "epoch: 2 step: 1375, loss is 0.02913664\n", - "epoch: 2 step: 1500, loss is 0.25256392\n", - "epoch: 2 step: 1625, loss is 0.21558005\n", - "epoch: 2 step: 1750, loss is 0.013623273\n", - "epoch: 2 step: 1875, loss is 0.020157713\n", - "epoch: 2 step: 2000, loss is 0.00023730143\n", - "epoch: 2 step: 2125, loss is 0.04196192\n", - "epoch: 2 step: 2250, loss is 0.22700204\n", - "epoch: 2 step: 2375, loss is 0.15068744\n", - "epoch: 2 step: 2500, loss is 0.18599582\n", - "epoch: 2 step: 2625, loss is 0.11737528\n", - "epoch: 2 step: 2750, loss is 0.003812017\n", - "epoch: 2 step: 2875, loss is 0.008812527\n", - "epoch: 2 step: 3000, loss is 0.035302274\n", - "epoch: 2 step: 3125, loss is 0.18453324\n", - "epoch: 2 step: 3250, loss is 0.0103479475\n", - "epoch: 2 step: 3375, loss is 0.009817297\n", - "epoch: 2 step: 3500, loss is 0.032968633\n", - "epoch: 2 step: 3625, loss is 0.0034950136\n", - "epoch: 2 step: 3750, loss is 0.0057869614\n", - "epoch: 3 step: 125, loss is 0.27577397\n", - "epoch: 3 step: 250, loss is 0.007953547\n", - "epoch: 3 step: 375, loss is 0.21745506\n", - "epoch: 3 step: 500, loss is 0.0020471578\n", - "epoch: 3 step: 625, loss is 0.009939543\n", - "epoch: 3 step: 750, loss is 0.032122627\n", - "epoch: 3 step: 875, loss is 0.03780477\n", - "epoch: 3 step: 1000, loss is 0.0076191444\n", - "epoch: 3 step: 1125, loss is 0.007801161\n", - "epoch: 3 step: 1250, loss is 0.0006592998\n", - "epoch: 3 step: 1375, loss is 0.07005897\n", - "epoch: 3 step: 1500, loss is 0.016776687\n", - "epoch: 3 step: 1625, loss is 0.18362688\n", - "epoch: 3 step: 1750, loss is 0.080620855\n", - "epoch: 3 step: 1875, loss is 0.6229161\n", - "epoch: 3 step: 2000, loss is 0.0055219308\n", - "epoch: 3 step: 2125, loss is 0.0009366708\n", - "epoch: 3 step: 2250, loss is 0.16341054\n", - "epoch: 3 step: 2375, loss is 0.0015036274\n", - "epoch: 3 step: 2500, loss is 0.013156504\n", - "epoch: 3 step: 2625, loss is 0.0027046946\n", - "epoch: 3 step: 2750, loss is 0.0009584853\n", - "epoch: 3 step: 2875, loss is 0.22423576\n", - "epoch: 3 step: 3000, loss is 0.05459709\n", - "epoch: 3 step: 3125, loss is 0.00039554507\n", - "epoch: 3 step: 3250, loss is 0.010483981\n", - "epoch: 3 step: 3375, loss is 0.032579858\n", - "epoch: 3 step: 3500, loss is 0.000750014\n", - "epoch: 3 step: 3625, loss is 0.00826493\n", - "epoch: 3 step: 3750, loss is 0.049514227\n", - "epoch: 4 step: 125, loss is 0.015774932\n", - "epoch: 4 step: 250, loss is 0.06803825\n", - "epoch: 4 step: 375, loss is 0.00016382817\n", - "epoch: 4 step: 500, loss is 0.078078\n", - "epoch: 4 step: 625, loss is 0.14985096\n", - "epoch: 4 step: 750, loss is 0.12369352\n", - "epoch: 4 step: 875, loss is 0.021003578\n", - "epoch: 4 step: 1000, loss is 0.0004177717\n", - "epoch: 4 step: 1125, loss is 0.03918505\n", - "epoch: 4 step: 1250, loss is 0.16861053\n", - "epoch: 4 step: 1375, loss is 0.19486608\n", - "epoch: 4 step: 1500, loss is 0.024210513\n", - "epoch: 4 step: 1625, loss is 0.00055875443\n", - "epoch: 4 step: 1750, loss is 0.021766845\n", - "epoch: 4 step: 1875, loss is 0.04386355\n", - "epoch: 4 step: 2000, loss is 0.6126808\n", - "epoch: 4 step: 2125, loss is 0.00016478299\n", - "epoch: 4 step: 2250, loss is 0.045052838\n", - "epoch: 4 step: 2375, loss is 0.009033074\n", - "epoch: 4 step: 2500, loss is 0.083323196\n", - "epoch: 4 step: 2625, loss is 0.0013404265\n", - "epoch: 4 step: 2750, loss is 0.00039283157\n", - "epoch: 4 step: 2875, loss is 0.023555582\n", - "epoch: 4 step: 3000, loss is 0.03309316\n", - "epoch: 4 step: 3125, loss is 0.00038078718\n", - "epoch: 4 step: 3250, loss is 0.0011988003\n", - "epoch: 4 step: 3375, loss is 0.1094174\n", - "epoch: 4 step: 3500, loss is 0.4831129\n", - "epoch: 4 step: 3625, loss is 4.419859e-05\n", - "epoch: 4 step: 3750, loss is 0.000106370804\n", - "epoch: 5 step: 125, loss is 0.031739432\n", - "epoch: 5 step: 250, loss is 0.0023120884\n", - "epoch: 5 step: 375, loss is 0.19174251\n", - "epoch: 5 step: 500, loss is 0.115054466\n", - "epoch: 5 step: 625, loss is 0.00063831004\n", - "epoch: 5 step: 750, loss is 0.011749344\n", - "epoch: 5 step: 875, loss is 0.00043033107\n", - "epoch: 5 step: 1000, loss is 0.1209258\n", - "epoch: 5 step: 1125, loss is 0.085516274\n", - "epoch: 5 step: 1250, loss is 0.011499016\n", - "epoch: 5 step: 1375, loss is 0.0013453395\n", - "epoch: 5 step: 1500, loss is 0.1783311\n", - "epoch: 5 step: 1625, loss is 0.000960443\n", - "epoch: 5 step: 1750, loss is 0.00059457694\n", - "epoch: 5 step: 1875, loss is 0.08647974\n", - "epoch: 5 step: 2000, loss is 0.0013335779\n", - "epoch: 5 step: 2125, loss is 0.02167116\n", - "epoch: 5 step: 2250, loss is 0.0005232549\n", - "epoch: 5 step: 2375, loss is 0.016557036\n", - "epoch: 5 step: 2500, loss is 0.046004463\n", - "epoch: 5 step: 2625, loss is 0.00019306582\n", - "epoch: 5 step: 2750, loss is 0.0066435235\n", - "epoch: 5 step: 2875, loss is 0.0028824392\n", - "epoch: 5 step: 3000, loss is 0.24145652\n", - "epoch: 5 step: 3125, loss is 0.063728176\n", - "epoch: 5 step: 3250, loss is 0.0018528743\n", - "epoch: 5 step: 3375, loss is 0.005786577\n", - "epoch: 5 step: 3500, loss is 0.0063151703\n", - "epoch: 5 step: 3625, loss is 9.56385e-05\n", - "epoch: 5 step: 3750, loss is 0.005796452\n", - "================ Starting Testing ================\n", - "============ Accuracy:{'Accuracy': 0.9819} ============\n", - "\n", - "\n", - "================= The Situation 4 =================\n", - "== learning_rate:0.01, epoch_size:5, batch_size:32 ==\n", - "================ Starting Training ================\n", - "epoch: 1 step: 125, loss is 2.3141277\n", - "epoch: 1 step: 250, loss is 2.299097\n", - "epoch: 1 step: 375, loss is 2.2934532\n", - "epoch: 1 step: 500, loss is 2.3099198\n", - "epoch: 1 step: 625, loss is 2.305512\n", - "epoch: 1 step: 750, loss is 2.3175468\n", - "epoch: 1 step: 875, loss is 2.3058007\n", - "epoch: 1 step: 1000, loss is 2.3117945\n", - "epoch: 1 step: 1125, loss is 2.3218691\n", - "epoch: 1 step: 1250, loss is 2.3033545\n", - "epoch: 1 step: 1375, loss is 2.2944286\n", - "epoch: 1 step: 1500, loss is 0.5123912\n", - "epoch: 1 step: 1625, loss is 0.14081886\n", - "epoch: 1 step: 1750, loss is 0.137348\n", - "epoch: 1 step: 1875, loss is 0.332155\n", - "epoch: 2 step: 125, loss is 0.029168233\n", - "epoch: 2 step: 250, loss is 0.2399086\n", - "epoch: 2 step: 375, loss is 0.18014185\n", - "epoch: 2 step: 500, loss is 0.2132935\n", - "epoch: 2 step: 625, loss is 0.0040447153\n", - "epoch: 2 step: 750, loss is 0.13248429\n", - "epoch: 2 step: 875, loss is 0.16978796\n", - "epoch: 2 step: 1000, loss is 0.042082515\n", - "epoch: 2 step: 1125, loss is 0.043927424\n", - "epoch: 2 step: 1250, loss is 0.15354133\n", - "epoch: 2 step: 1375, loss is 0.06834163\n", - "epoch: 2 step: 1500, loss is 0.045728613\n", - "epoch: 2 step: 1625, loss is 0.016941896\n", - "epoch: 2 step: 1750, loss is 0.05370252\n", - "epoch: 2 step: 1875, loss is 0.011741843\n", - "epoch: 3 step: 125, loss is 0.00913367\n", - "epoch: 3 step: 250, loss is 0.15724385\n", - "epoch: 3 step: 375, loss is 0.067094825\n", - "epoch: 3 step: 500, loss is 0.061788365\n", - "epoch: 3 step: 625, loss is 0.0050505553\n", - "epoch: 3 step: 750, loss is 0.0023197087\n", - "epoch: 3 step: 875, loss is 0.028508047\n", - "epoch: 3 step: 1000, loss is 0.039646797\n", - "epoch: 3 step: 1125, loss is 0.1460342\n", - "epoch: 3 step: 1250, loss is 0.0054985345\n", - "epoch: 3 step: 1375, loss is 0.3982027\n", - "epoch: 3 step: 1500, loss is 0.010748535\n", - "epoch: 3 step: 1625, loss is 0.015157141\n", - "epoch: 3 step: 1750, loss is 0.0019374305\n", - "epoch: 3 step: 1875, loss is 0.058262732\n", - "epoch: 4 step: 125, loss is 0.29354185\n", - "epoch: 4 step: 250, loss is 0.019852865\n", - "epoch: 4 step: 375, loss is 0.044506036\n", - "epoch: 4 step: 500, loss is 0.038882047\n", - "epoch: 4 step: 625, loss is 0.010133128\n", - "epoch: 4 step: 750, loss is 0.0055175046\n", - "epoch: 4 step: 875, loss is 0.086619824\n", - "epoch: 4 step: 1000, loss is 0.010645878\n", - "epoch: 4 step: 1125, loss is 0.025731985\n", - "epoch: 4 step: 1250, loss is 0.10762554\n", - "epoch: 4 step: 1375, loss is 0.006392666\n", - "epoch: 4 step: 1500, loss is 0.0022511086\n", - "epoch: 4 step: 1625, loss is 0.020254254\n", - "epoch: 4 step: 1750, loss is 0.007738711\n", - "epoch: 4 step: 1875, loss is 0.021094736\n", - "epoch: 5 step: 125, loss is 0.15432167\n", - "epoch: 5 step: 250, loss is 0.009095187\n", - "epoch: 5 step: 375, loss is 0.09194406\n", - "epoch: 5 step: 500, loss is 0.02482254\n", - "epoch: 5 step: 625, loss is 0.072574414\n", - "epoch: 5 step: 750, loss is 0.0033603504\n", - "epoch: 5 step: 875, loss is 0.014673766\n", - "epoch: 5 step: 1000, loss is 0.10280271\n", - "epoch: 5 step: 1125, loss is 0.017723871\n", - "epoch: 5 step: 1250, loss is 0.0246438\n", - "epoch: 5 step: 1375, loss is 0.0056467657\n", - "epoch: 5 step: 1500, loss is 0.009505681\n", - "epoch: 5 step: 1625, loss is 0.030743863\n", - "epoch: 5 step: 1750, loss is 0.1039285\n", - "epoch: 5 step: 1875, loss is 0.0149848955\n", - "================ Starting Testing ================\n", - "============ Accuracy:{'Accuracy': 0.9836} ============\n", - "\n", - "\n", - "================= The Situation 5 =================\n", - "== learning_rate:0.05, epoch_size:2, batch_size:16 ==\n", - "================ Starting Training ================\n", - "epoch: 1 step: 125, loss is 2.3003526\n", - "epoch: 1 step: 250, loss is 2.267969\n", - "epoch: 1 step: 375, loss is 2.295865\n", - "epoch: 1 step: 500, loss is 1.685572\n", - "epoch: 1 step: 625, loss is 2.1919081\n", - "epoch: 1 step: 750, loss is 2.2844672\n", - "epoch: 1 step: 875, loss is 2.2945147\n", - "epoch: 1 step: 1000, loss is 2.3321033\n", - "epoch: 1 step: 1125, loss is 2.3237975\n", - "epoch: 1 step: 1250, loss is 2.337674\n", - "epoch: 1 step: 1375, loss is 2.3723369\n", - "epoch: 1 step: 1500, loss is 2.328748\n", - "epoch: 1 step: 1625, loss is 2.3221745\n", - "epoch: 1 step: 1750, loss is 2.3402386\n", - "epoch: 1 step: 1875, loss is 2.2624133\n", - "epoch: 1 step: 2000, loss is 2.2845757\n", - "epoch: 1 step: 2125, loss is 2.2816522\n", - "epoch: 1 step: 2250, loss is 2.2604764\n", - "epoch: 1 step: 2375, loss is 2.293416\n", - "epoch: 1 step: 2500, loss is 2.2869396\n", - "epoch: 1 step: 2625, loss is 2.2734303\n", - "epoch: 1 step: 2750, loss is 2.2904344\n", - "epoch: 1 step: 2875, loss is 2.3431993\n", - "epoch: 1 step: 3000, loss is 2.3309033\n", - "epoch: 1 step: 3125, loss is 2.3322077\n", - "epoch: 1 step: 3250, loss is 2.321935\n", - "epoch: 1 step: 3375, loss is 2.3091938\n", - "epoch: 1 step: 3500, loss is 2.3223789\n", - "epoch: 1 step: 3625, loss is 2.3160322\n", - "epoch: 1 step: 3750, loss is 2.30167\n", - "epoch: 2 step: 125, loss is 2.3138895\n", - "epoch: 2 step: 250, loss is 2.3254342\n", - "epoch: 2 step: 375, loss is 2.3004107\n", - "epoch: 2 step: 500, loss is 2.27686\n", - "epoch: 2 step: 625, loss is 2.2919784\n", - "epoch: 2 step: 750, loss is 2.3029525\n", - "epoch: 2 step: 875, loss is 2.2823474\n", - "epoch: 2 step: 1000, loss is 2.3258169\n", - "epoch: 2 step: 1125, loss is 2.2833183\n", - "epoch: 2 step: 1250, loss is 2.3104324\n", - "epoch: 2 step: 1375, loss is 2.271712\n", - "epoch: 2 step: 1500, loss is 2.2836237\n", - "epoch: 2 step: 1625, loss is 2.2735772\n", - "epoch: 2 step: 1750, loss is 2.3267956\n", - "epoch: 2 step: 1875, loss is 2.2562587\n", - "epoch: 2 step: 2000, loss is 2.3003142\n", - "epoch: 2 step: 2125, loss is 2.3798678\n", - "epoch: 2 step: 2250, loss is 2.2594686\n", - "epoch: 2 step: 2375, loss is 2.3176265\n", - "epoch: 2 step: 2500, loss is 2.318133\n", - "epoch: 2 step: 2625, loss is 2.2887654\n", - "epoch: 2 step: 2750, loss is 2.3572085\n", - "epoch: 2 step: 2875, loss is 2.2714615\n", - "epoch: 2 step: 3000, loss is 2.3420625\n", - "epoch: 2 step: 3125, loss is 2.3499656\n", - "epoch: 2 step: 3250, loss is 2.2610397\n", - "epoch: 2 step: 3375, loss is 2.3557587\n", - "epoch: 2 step: 3500, loss is 2.361361\n", - "epoch: 2 step: 3625, loss is 2.3162065\n", - "epoch: 2 step: 3750, loss is 2.338607\n", - "================ Starting Testing ================\n", - "============ Accuracy:{'Accuracy': 0.1028} ============\n", - "\n", - "\n", - "================= The Situation 6 =================\n", - "== learning_rate:0.05, epoch_size:2, batch_size:32 ==\n", - "================ Starting Training ================\n", - "epoch: 1 step: 125, loss is 2.3143704\n", - "epoch: 1 step: 250, loss is 2.3132532\n", - "epoch: 1 step: 375, loss is 2.2908692\n", - "epoch: 1 step: 500, loss is 0.83405465\n", - "epoch: 1 step: 625, loss is 0.7648193\n", - "epoch: 1 step: 750, loss is 0.77581483\n", - "epoch: 1 step: 875, loss is 0.63934445\n", - "epoch: 1 step: 1000, loss is 1.0165555\n", - "epoch: 1 step: 1125, loss is 0.20264903\n", - "epoch: 1 step: 1250, loss is 0.4031322\n", - "epoch: 1 step: 1375, loss is 0.22567266\n", - "epoch: 1 step: 1500, loss is 0.5009518\n", - "epoch: 1 step: 1625, loss is 0.30227607\n", - "epoch: 1 step: 1750, loss is 0.4046876\n", - "epoch: 1 step: 1875, loss is 0.13460635\n", - "epoch: 2 step: 125, loss is 0.47336528\n", - "epoch: 2 step: 250, loss is 2.1019025\n", - "epoch: 2 step: 375, loss is 2.3308382\n", - "epoch: 2 step: 500, loss is 2.3199062\n", - "epoch: 2 step: 625, loss is 2.281591\n", - "epoch: 2 step: 750, loss is 2.3075724\n", - "epoch: 2 step: 875, loss is 2.3032534\n", - "epoch: 2 step: 1000, loss is 2.2849927\n", - "epoch: 2 step: 1125, loss is 2.3171089\n", - "epoch: 2 step: 1250, loss is 2.2753448\n", - "epoch: 2 step: 1375, loss is 2.3221805\n", - "epoch: 2 step: 1500, loss is 2.3242655\n", - "epoch: 2 step: 1625, loss is 2.3066783\n", - "epoch: 2 step: 1750, loss is 2.3138652\n", - "epoch: 2 step: 1875, loss is 2.3345938\n", - "================ Starting Testing ================\n", - "============ Accuracy:{'Accuracy': 0.0974} ============\n", - "\n", - "\n", - "================= The Situation 7 =================\n", - "== learning_rate:0.05, epoch_size:5, batch_size:16 ==\n", - "================ Starting Training ================\n", - "epoch: 1 step: 125, loss is 2.295558\n", - "epoch: 1 step: 250, loss is 2.38386\n", - "epoch: 1 step: 375, loss is 2.33319\n", - "epoch: 1 step: 500, loss is 1.438849\n", - "epoch: 1 step: 625, loss is 1.4208732\n", - "epoch: 1 step: 750, loss is 1.1754154\n", - "epoch: 1 step: 875, loss is 0.7132174\n", - "epoch: 1 step: 1000, loss is 1.0798488\n", - "epoch: 1 step: 1125, loss is 2.4280946\n", - "epoch: 1 step: 1250, loss is 2.3117175\n", - "epoch: 1 step: 1375, loss is 2.3256335\n", - "epoch: 1 step: 1500, loss is 2.2663872\n", - "epoch: 1 step: 1625, loss is 2.3064473\n", - "epoch: 1 step: 1750, loss is 2.2814608\n", - "epoch: 1 step: 1875, loss is 2.312989\n", - "epoch: 1 step: 2000, loss is 2.3795862\n", - "epoch: 1 step: 2125, loss is 2.3190327\n", - "epoch: 1 step: 2250, loss is 2.3067005\n", - "epoch: 1 step: 2375, loss is 2.3292706\n", - "epoch: 1 step: 2500, loss is 2.3708742\n", - "epoch: 1 step: 2625, loss is 2.3234503\n", - "epoch: 1 step: 2750, loss is 2.286217\n", - "epoch: 1 step: 2875, loss is 2.3187988\n", - "epoch: 1 step: 3000, loss is 2.2813363\n", - "epoch: 1 step: 3125, loss is 2.3160567\n", - "epoch: 1 step: 3250, loss is 2.3587837\n", - "epoch: 1 step: 3375, loss is 2.3024836\n", - "epoch: 1 step: 3500, loss is 2.3151147\n", - "epoch: 1 step: 3625, loss is 2.3327696\n", - "epoch: 1 step: 3750, loss is 2.3304598\n", - "epoch: 2 step: 125, loss is 2.3098416\n", - "epoch: 2 step: 250, loss is 2.2828104\n", - "epoch: 2 step: 375, loss is 2.312215\n", - "epoch: 2 step: 500, loss is 2.2553732\n", - "epoch: 2 step: 625, loss is 2.3105173\n", - "epoch: 2 step: 750, loss is 2.339398\n", - "epoch: 2 step: 875, loss is 2.2900229\n", - "epoch: 2 step: 1000, loss is 2.292558\n", - "epoch: 2 step: 1125, loss is 2.3165226\n", - "epoch: 2 step: 1250, loss is 2.2258747\n", - "epoch: 2 step: 1375, loss is 2.367465\n", - "epoch: 2 step: 1500, loss is 2.3556745\n", - "epoch: 2 step: 1625, loss is 2.3215854\n", - "epoch: 2 step: 1750, loss is 2.2786517\n", - "epoch: 2 step: 1875, loss is 2.2869582\n", - "epoch: 2 step: 2000, loss is 2.2685075\n", - "epoch: 2 step: 2125, loss is 2.334608\n", - "epoch: 2 step: 2250, loss is 2.294904\n", - "epoch: 2 step: 2375, loss is 2.3460655\n", - "epoch: 2 step: 2500, loss is 2.2993896\n", - "epoch: 2 step: 2625, loss is 2.3113718\n", - "epoch: 2 step: 2750, loss is 2.2953403\n", - "epoch: 2 step: 2875, loss is 2.3484921\n", - "epoch: 2 step: 3000, loss is 2.3252711\n", - "epoch: 2 step: 3125, loss is 2.3128834\n", - "epoch: 2 step: 3250, loss is 2.3085055\n", - "epoch: 2 step: 3375, loss is 2.2696073\n", - "epoch: 2 step: 3500, loss is 2.2517495\n", - "epoch: 2 step: 3625, loss is 2.332074\n", - "epoch: 2 step: 3750, loss is 2.288159\n", - "epoch: 3 step: 125, loss is 2.278061\n", - "epoch: 3 step: 250, loss is 2.2659266\n", - "epoch: 3 step: 375, loss is 2.3351808\n", - "epoch: 3 step: 500, loss is 2.3183289\n", - "epoch: 3 step: 625, loss is 2.3381956\n", - "epoch: 3 step: 750, loss is 2.3140006\n", - "epoch: 3 step: 875, loss is 2.4133265\n", - "epoch: 3 step: 1000, loss is 2.2901528\n", - "epoch: 3 step: 1125, loss is 2.2979116\n", - "epoch: 3 step: 1250, loss is 2.310516\n", - "epoch: 3 step: 1375, loss is 2.3049035\n", - "epoch: 3 step: 1500, loss is 2.2720628\n", - "epoch: 3 step: 1625, loss is 2.3208938\n", - "epoch: 3 step: 1750, loss is 2.2830434\n", - "epoch: 3 step: 1875, loss is 2.30417\n", - "epoch: 3 step: 2000, loss is 2.2737663\n", - "epoch: 3 step: 2125, loss is 2.2822623\n", - "epoch: 3 step: 2250, loss is 2.3083425\n", - "epoch: 3 step: 2375, loss is 2.31658\n", - "epoch: 3 step: 2500, loss is 2.2714338\n", - "epoch: 3 step: 2625, loss is 2.3353026\n", - "epoch: 3 step: 2750, loss is 2.2701824\n", - "epoch: 3 step: 2875, loss is 2.3068202\n", - "epoch: 3 step: 3000, loss is 2.3071563\n", - "epoch: 3 step: 3125, loss is 2.3619137\n", - "epoch: 3 step: 3250, loss is 2.2972512\n", - "epoch: 3 step: 3375, loss is 2.307385\n", - "epoch: 3 step: 3500, loss is 2.25137\n", - "epoch: 3 step: 3625, loss is 2.3223963\n", - "epoch: 3 step: 3750, loss is 2.332354\n", - "epoch: 4 step: 125, loss is 2.3525374\n", - "epoch: 4 step: 250, loss is 2.2607126\n", - "epoch: 4 step: 375, loss is 2.3337207\n", - "epoch: 4 step: 500, loss is 2.2943015\n", - "epoch: 4 step: 625, loss is 2.322392\n", - "epoch: 4 step: 750, loss is 2.3488765\n", - "epoch: 4 step: 875, loss is 2.3072693\n", - "epoch: 4 step: 1000, loss is 2.2509954\n", - "epoch: 4 step: 1125, loss is 2.267654\n", - "epoch: 4 step: 1250, loss is 2.3125684\n", - "epoch: 4 step: 1375, loss is 2.2700844\n", - "epoch: 4 step: 1500, loss is 2.3357136\n", - "epoch: 4 step: 1625, loss is 2.3254232\n", - "epoch: 4 step: 1750, loss is 2.3321593\n", - "epoch: 4 step: 1875, loss is 2.3218544\n", - "epoch: 4 step: 2000, loss is 2.2537644\n", - "epoch: 4 step: 2125, loss is 2.350479\n", - "epoch: 4 step: 2250, loss is 2.2925644\n", - "epoch: 4 step: 2375, loss is 2.2582018\n", - "epoch: 4 step: 2500, loss is 2.3031194\n", - "epoch: 4 step: 2625, loss is 2.2963529\n", - "epoch: 4 step: 2750, loss is 2.3857465\n", - "epoch: 4 step: 2875, loss is 2.3052728\n", - "epoch: 4 step: 3000, loss is 2.3019109\n", - "epoch: 4 step: 3125, loss is 2.345898\n", - "epoch: 4 step: 3250, loss is 2.3057108\n", - "epoch: 4 step: 3375, loss is 2.3092058\n", - "epoch: 4 step: 3500, loss is 2.263299\n", - "epoch: 4 step: 3625, loss is 2.2924554\n", - "epoch: 4 step: 3750, loss is 2.3009706\n", - "epoch: 5 step: 125, loss is 2.363699\n", - "epoch: 5 step: 250, loss is 2.340525\n", - "epoch: 5 step: 375, loss is 2.3687658\n", - "epoch: 5 step: 500, loss is 2.3060727\n", - "epoch: 5 step: 625, loss is 2.3061423\n", - "epoch: 5 step: 750, loss is 2.3200512\n", - "epoch: 5 step: 875, loss is 2.296088\n", - "epoch: 5 step: 1000, loss is 2.3382936\n", - "epoch: 5 step: 1125, loss is 2.3020995\n", - "epoch: 5 step: 1250, loss is 2.3069475\n", - "epoch: 5 step: 1375, loss is 2.2993364\n", - "epoch: 5 step: 1500, loss is 2.2792392\n", - "epoch: 5 step: 1625, loss is 2.3670845\n", - "epoch: 5 step: 1750, loss is 2.237617\n", - "epoch: 5 step: 1875, loss is 2.3088713\n", - "epoch: 5 step: 2000, loss is 2.305958\n", - "epoch: 5 step: 2125, loss is 2.2708802\n", - "epoch: 5 step: 2250, loss is 2.3196752\n", - "epoch: 5 step: 2375, loss is 2.2655036\n", - "epoch: 5 step: 2500, loss is 2.2821996\n", - "epoch: 5 step: 2625, loss is 2.3173587\n", - "epoch: 5 step: 2750, loss is 2.31017\n", - "epoch: 5 step: 2875, loss is 2.2813506\n", - "epoch: 5 step: 3000, loss is 2.3327284\n", - "epoch: 5 step: 3125, loss is 2.3425179\n", - "epoch: 5 step: 3250, loss is 2.3141623\n", - "epoch: 5 step: 3375, loss is 2.345585\n", - "epoch: 5 step: 3500, loss is 2.2416115\n", - "epoch: 5 step: 3625, loss is 2.2807086\n", - "epoch: 5 step: 3750, loss is 2.2743173\n", - "================ Starting Testing ================\n", - "============ Accuracy:{'Accuracy': 0.1028} ============\n", - "\n", - "\n", - "================= The Situation 8 =================\n", - "== learning_rate:0.05, epoch_size:5, batch_size:32 ==\n", - "================ Starting Training ================\n", - "epoch: 1 step: 125, loss is 2.3025277\n", - "epoch: 1 step: 250, loss is 2.3404844\n", - "epoch: 1 step: 375, loss is 2.0081742\n", - "epoch: 1 step: 500, loss is 0.7698262\n", - "epoch: 1 step: 625, loss is 0.84001845\n", - "epoch: 1 step: 750, loss is 0.858749\n", - "epoch: 1 step: 875, loss is 1.1702987\n", - "epoch: 1 step: 1000, loss is 0.32887033\n", - "epoch: 1 step: 1125, loss is 1.203075\n", - "epoch: 1 step: 1250, loss is 0.3252069\n", - "epoch: 1 step: 1375, loss is 0.6137168\n", - "epoch: 1 step: 1500, loss is 0.20187378\n", - "epoch: 1 step: 1625, loss is 0.31883952\n", - "epoch: 1 step: 1750, loss is 0.51499724\n", - "epoch: 1 step: 1875, loss is 0.35917458\n", - "epoch: 2 step: 125, loss is 1.1622694\n", - "epoch: 2 step: 250, loss is 0.5028538\n", - "epoch: 2 step: 375, loss is 0.6323484\n", - "epoch: 2 step: 500, loss is 0.31263918\n", - "epoch: 2 step: 625, loss is 0.81616145\n", - "epoch: 2 step: 750, loss is 0.24894318\n", - "epoch: 2 step: 875, loss is 0.87633514\n", - "epoch: 2 step: 1000, loss is 0.51267153\n", - "epoch: 2 step: 1125, loss is 2.2888105\n", - "epoch: 2 step: 1250, loss is 2.3071456\n", - "epoch: 2 step: 1375, loss is 2.2765212\n", - "epoch: 2 step: 1500, loss is 2.3278954\n", - "epoch: 2 step: 1625, loss is 2.3195877\n", - "epoch: 2 step: 1750, loss is 2.3329341\n", - "epoch: 2 step: 1875, loss is 2.3095658\n", - "epoch: 3 step: 125, loss is 2.304574\n", - "epoch: 3 step: 250, loss is 2.3350236\n", - "epoch: 3 step: 375, loss is 2.3366516\n", - "epoch: 3 step: 500, loss is 2.3036337\n", - "epoch: 3 step: 625, loss is 2.3146763\n", - "epoch: 3 step: 750, loss is 2.3325539\n", - "epoch: 3 step: 875, loss is 2.3182425\n", - "epoch: 3 step: 1000, loss is 2.2901216\n", - "epoch: 3 step: 1125, loss is 2.271974\n", - "epoch: 3 step: 1250, loss is 2.3013616\n", - "epoch: 3 step: 1375, loss is 2.3093197\n", - "epoch: 3 step: 1500, loss is 2.288068\n", - "epoch: 3 step: 1625, loss is 2.3186734\n", - "epoch: 3 step: 1750, loss is 2.3295755\n", - "epoch: 3 step: 1875, loss is 2.2763002\n", - "epoch: 4 step: 125, loss is 2.3041005\n", - "epoch: 4 step: 250, loss is 2.329235\n", - "epoch: 4 step: 375, loss is 2.2897174\n", - "epoch: 4 step: 500, loss is 2.2791119\n", - "epoch: 4 step: 625, loss is 2.3264925\n", - "epoch: 4 step: 750, loss is 2.3077648\n", - "epoch: 4 step: 875, loss is 2.301721\n", - "epoch: 4 step: 1000, loss is 2.2765012\n", - "epoch: 4 step: 1125, loss is 2.2757645\n", - "epoch: 4 step: 1250, loss is 2.2934148\n", - "epoch: 4 step: 1375, loss is 2.3058321\n", - "epoch: 4 step: 1500, loss is 2.3203738\n", - "epoch: 4 step: 1625, loss is 2.319434\n", - "epoch: 4 step: 1750, loss is 2.2785978\n", - "epoch: 4 step: 1875, loss is 2.3218942\n", - "epoch: 5 step: 125, loss is 2.300228\n", - "epoch: 5 step: 250, loss is 2.3302739\n", - "epoch: 5 step: 375, loss is 2.302813\n", - "epoch: 5 step: 500, loss is 2.3095956\n", - "epoch: 5 step: 625, loss is 2.306519\n", - "epoch: 5 step: 750, loss is 2.2943096\n", - "epoch: 5 step: 875, loss is 2.316216\n", - "epoch: 5 step: 1000, loss is 2.3018808\n", - "epoch: 5 step: 1125, loss is 2.2752795\n", - "epoch: 5 step: 1250, loss is 2.300592\n", - "epoch: 5 step: 1375, loss is 2.313322\n", - "epoch: 5 step: 1500, loss is 2.2971704\n", - "epoch: 5 step: 1625, loss is 2.328839\n", - "epoch: 5 step: 1750, loss is 2.2877312\n", - "epoch: 5 step: 1875, loss is 2.3039935\n", - "================ Starting Testing ================\n", - "============ Accuracy:{'Accuracy': 0.1135} ============\n", - "\n", - "\n" - ] - } - ], - "source": [ - "from mindspore.train.callback import SummaryCollector\n", - "from mindspore.nn.metrics import Accuracy\n", - "from mindspore import context, Model\n", - "from mindspore.nn.loss import SoftmaxCrossEntropyWithLogits\n", - "from mindspore import load_checkpoint, load_param_into_net\n", - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor\n", - "import os\n", - "\n", - "if __name__==\"__main__\":\n", - " context.set_context(mode=context.GRAPH_MODE, device_target = \"GPU\")\n", - " if os.name == \"nt\":\n", - " os.system(\"del/f/s/q *.ckpt *.meta\")\n", - " else:\n", - " os.system(\"rm -f *.ckpt *.meta *.pb\")\n", - "\n", - " mnist_path = \"./datasets/MNIST_Data/\"\n", - " model_path = \"./models/ckpt/lineage_and_scalars_comparison/\"\n", - " repeat_size = 1\n", - " config_ck = CheckpointConfig(save_checkpoint_steps=1875, keep_checkpoint_max=10)\n", - " ckpoint_cb = ModelCheckpoint(prefix=\"checkpoint_lenet\", directory=model_path, config=config_ck)\n", - " # define the optimizer\n", - " \n", - " lrs = [0.01,0.05]\n", - " epoch_sizes = [2, 5]\n", - " batch_sizes = [16, 32]\n", - " situations = [(i, j, k) for i in lrs for j in epoch_sizes for k in batch_sizes]\n", - " count = 1\n", - " \n", - " for lr,epoch_size,batch_size in situations:\n", - " momentum = 0.9 \n", - " network = LeNet5()\n", - " net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')\n", - " net_opt = nn.Momentum(network.trainable_params(), lr, momentum)\n", - " model = Model(network, net_loss, net_opt, metrics={\"Accuracy\": Accuracy()})\n", - " summary_collector = SummaryCollector(summary_dir=\"./summary_base/LeNet-MNIST_Data,lr:{},epoch:{},batch_size:{}\"\n", - " .format(lr, epoch_size, batch_size), collect_freq=1)\n", - " # Start to train\n", - " print(\"================= The Situation {} =================\".format(count))\n", - " print(\"== learning_rate:{}, epoch_size:{}, batch_size:{} ==\".format(lr, epoch_size, batch_size))\n", - " print(\"================ Starting Training ================\")\n", - " ds_train = create_dataset(os.path.join(mnist_path, \"train\"), batch_size, repeat_size)\n", - " model.train(epoch_size, ds_train, callbacks=[ckpoint_cb, summary_collector, LossMonitor(125)], dataset_sink_mode=True)\n", - "\n", - " print(\"================ Starting Testing ================\")\n", - " # load testing dataset\n", - " ds_eval = create_dataset(os.path.join(mnist_path, \"test\"))\n", - " acc = model.eval(ds_eval, callbacks=[summary_collector], dataset_sink_mode=True)\n", - " print(\"============ Accuracy:{} ============\\n\\n\".format(acc))\n", - " count += 1" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 启动及关闭MindInsight服务" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "这里主要展示如何启用及关闭MindInsight,更多的命令集信息,请参考MindSpore官方网站:。\n", - "\n", - "启动MindInsight服务命令:\n", - "```\n", - "mindinsight start --summary-base-dir=./summary_base --port=8080\n", - "```\n", - "\n", - "- `--summary-base-dir`:MindInsight指定启动工作路径的命令;`./summary_base` 为 `SummaryCollector` 的 `summary_dir` 参数所指定的目录。\n", - "- `--port`:MindInsight指定启动的端口,数值可以任意为1~65535的范围内。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "停止MindInsight服务命令:\n", - "```\n", - "mindinsight stop --port=8080\n", - "```\n", - "- `mindinsight stop`:MindInsight关闭服务命令。\n", - "- `--port=8080`:即MindInsight服务开启在`8080`端口,所以这里写成`--port=8080`。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 溯源分析\n", - "\n", - "### 连接到溯源分析地址\n", - "\n", - "浏览器中输入:`http://127.0.0.1:8080`进入MindInsight界面如下:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![image](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/mindinsight_homepage_for_lineage.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 模型溯源界面介绍" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "上图训练列表中序号1-8分别是按照8组训练参数,保存的训练数据。点击右上角的溯源分析便可以进入,溯源分析包含模型溯源和数据溯源,首先是模型溯源界面,如下所示:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![image](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/model_lineage_page.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### 优化目标区域\n", - "\n", - "可以选择模型精度值(Accuracy)或模型损失值(loss)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![image](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/optimization_target_page_of_model_lineage.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "能直观的看出`learning_rate`、`epoch`、`batch_size`三个参数对本次训练模型的精度值和损失值的参数重要性(参数重要性的数值越接近1表示对此优化目标的影响越大,越接近0则表示对优化目标的影响越小),方便用户决策在训练时需要调整的参数。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### 模型训练的详细参数展示界面" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "展示界面中提供了模型训练过程中的各类重要参数信息,包括:网络、优化器、训练样本数量、测试样本数量、学习率、迭代次数、`batch_size`、`device`数目、模型大小、损失函数等等,用户可以自行选择单次训练数据进行可视化分析或者多次训练数据进行可视化比对分析,提高分析效率。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![image](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/detailed_information_page_of_model_lineage.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据溯源界面介绍" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "数据溯源展示了用户进行模型训练前的数据增强的过程,且此过程按照增强顺序进行排列。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![image](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/data_lineage_page.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本例中数据增强的过程包括`MnistDataset`,`Map_TypeCast`,`Map_Resize`,`Map_Rescale`,`Map_HWC2CHW`,`Shuffle`,`Batch`等操作。\n", - "\n", - "- 数据集转换(`MnistDataset`)\n", - "- label的数据类型转换(`Map_TypeCast`)\n", - "- 图像的高宽缩放(`Map_Resize`)\n", - "- 图像的比例缩放(`Map_Rescale`)\n", - "- 图像数据的张量变换(`Map_HWC2CHW`)\n", - "- 图像混洗(`Shuffle`)\n", - "- 图像成组(`Batch`)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 对比分析" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 进入对比分析界面\n", - "\n", - "从MindInsight主页进入对比分析界面。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![image](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/mindinsight_homepage_for_scalars_comparison.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "从对比分析界面中可以对比不同的训练中的标量信息,本例使用`SummaryCollector`自动保存了loss值,其他的标量信息保存,请参考[官方文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ScalarSummary.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![image](https://gitee.com/mindspore/docs/raw/master/tutorials/notebook/mindinsight/images/scalars_comparison_page.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "对比看板中可以选择对比的信息有:\n", - "\n", - "- 训练选择:本例有8组不同的训练参数对应的训练信息可供选择,此次选择了其中学习率(lr)分别为0.01和0.05的两组训练过程的数据进行对比。\n", - "- 标签选择:本例保存了loss值一种标量标签。\n", - "\n", - "> 对比曲线可通过调整平滑度来优化显示效果。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本次体验使用了MindSpore的数据收集接口`SummaryCollector`对不同训练参数下的模型训练信息进行收集,并且通过开启MindInsight服务将溯源信息和标量信息进行可视化展示,以上就是本次体验的全部内容。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.0.1", - "language": "python", - "name": "mindspore-1.0.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/mindspore_apply_gradient_accumulation.ipynb b/tutorials/notebook/mindspore_apply_gradient_accumulation.ipynb deleted file mode 100644 index 494b237417b8748a27acd26b007f752f43998802..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_apply_gradient_accumulation.ipynb +++ /dev/null @@ -1,588 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 应用梯度累积算法" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "本教程介绍梯度累积的训练方式,目的是为了解决由于内存不足导致某些大型网络无法训练大`batch_size`的问题。 \n", - "传统的训练方式是每次计算得到loss和梯度后,直接用所得梯度对参数进行更新。与传统的训练方式不同,梯度累积引入`mini_batch`的概念,首先对每个`mini_batch`的数据计算loss和梯度,但不立即更新模型参数,而是先对所得梯度进行累加,然后在指定数量(N)个`mini_batch`之后,用累积后的梯度更新网络参数。下次训练前清空过往累积梯度后重新累加,如此往复。 \n", - "最终目的是为了达到跟直接用N个mini_batch数据训练几乎同样的效果。 \n", - "本例将在MindSpore中应用梯度累积算法,实现对模型的训练。 \n", - "体验过程如下:\n", - "\n", - "1. 数据准备。\n", - "2. 定义深度神经网络。\n", - "3. 训练函数并实现定义梯度累积算法。\n", - "4. 调用自定义训练函数进行训练。\n", - "5. 使用训练保存的模型参数进行验证。\n", - "\n", - "> 本文档适用于GPU环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据准备\n", - "\n", - "下载MNIST_Data数据集并解压到指定位置,执行如下命令:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "定义数据集增强函数create_dataset,调用该函数对MNIST原始训练数据集60000张$28\\times28$的图片增强为1875个batch,每个batch张量为`(32,1,32,32)`的训练数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore import dtype as mstype\n", - "import mindspore.dataset as ds\n", - "\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " # define some parameters needed for data enhancement and rough justification\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # according to the parameters, generate the corresponding data enhancement method\n", - " c_trans = [\n", - " CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR),\n", - " CV.Rescale(rescale_nml, shift_nml),\n", - " CV.Rescale(rescale, shift),\n", - " CV.HWC2CHW()\n", - " ]\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # using map to apply operations to a dataset\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=c_trans, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - "\n", - " # process the generated dataset\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size)\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义深度神经网络\n", - "\n", - "本例采用LeNet5训练网络对数据集进行训练,其构造方式如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " # define the operator required\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " # use the preceding operators to construct networks\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x) \n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义Model函数并在其中进行梯度累积定义\n", - "\n", - "梯度累积计算在Model函数中,这里对Model函数的原始代码进行重构。\n", - "\n", - "重构中需涉及重构的方法主要有五点:\n", - "\n", - "1. 定义梯度累积方法。\n", - "2. 定义前向反向传播方法。\n", - "3. 定义权重更新方法。\n", - "4. 定义梯度累积清除方法。\n", - "5. 定义模型训练执行器。\n", - "\n", - "具体实现如下:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义梯度累积方法\n", - "\n", - "需要定义梯度累积的计算方式,并将计算方式注册到计算图中,若不进行注册,计算方法将不能在`nn.Cell`中构建计算图。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.ops as ops\n", - "\n", - "_sum_op = ops.MultitypeFuncGraph(\"grad_sum_op\")\n", - "_clear_op = ops.MultitypeFuncGraph(\"clear_op\")\n", - "\n", - "\n", - "@_sum_op.register(\"Tensor\", \"Tensor\")\n", - "def _cumulative_grad(grad_sum, grad):\n", - " \"\"\"Apply grad sum to cumulative gradient.\"\"\"\n", - " add = ops.AssignAdd()\n", - " return add(grad_sum, grad)\n", - "\n", - "\n", - "@_clear_op.register(\"Tensor\", \"Tensor\")\n", - "def _clear_grad_sum(grad_sum, zero):\n", - " \"\"\"Apply zero to clear grad_sum.\"\"\"\n", - " success = True\n", - " success = ops.depend(success, ops.assign(grad_sum, zero))\n", - " return success" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`_cumulativa_gard`:梯度累积方法,将grad值加到`grad_sum`中,后续计算过程中作用是将`mini_batch`计算出的grad值添加到`grad_sum`中。 \n", - "`_clear_grad_sum`:梯度清除方法,后续计算过程中的作用是当累积的梯度值`grad_sum`更新到权重中后,将`grad_sum`值清零。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义前向反向传播方法\n", - "\n", - "前向传播:利用训练前的模型函数,载入数据集中的数据,计算出loss值的过程。 \n", - "反向传播:利用loss值和载入的数据,通过优化器函数计算出梯度值,并将梯度值更新到模型函数的权重中的过程。 \n", - "这两个过程将在`TrainForwardBackward`中定义。 \n", - "MindSpore采用继承`nn.Cell`的方法,并将整体的计算过程在`construct`中实现。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.nn import Cell\n", - "\n", - "class TrainForwardBackward(Cell):\n", - " def __init__(self, network, optimizer, grad_sum, sens=1.0):\n", - " super(TrainForwardBackward, self).__init__(auto_prefix=False)\n", - " self.network = network\n", - " self.network.set_grad()\n", - " self.network.add_flags(defer_inline=True)\n", - " self.weights = ParameterTuple(network.trainable_params())\n", - " self.optimizer = optimizer\n", - " self.grad_sum = grad_sum\n", - " self.grad = ops.GradOperation(get_by_list=True, sens_param=True)\n", - " self.sens = sens\n", - " self.hyper_map = ops.HyperMap()\n", - "\n", - " def construct(self, *inputs):\n", - " weights = self.weights\n", - " loss = self.network(*inputs)\n", - " sens = ops.Fill()(ops.DType()(loss), ops.Shape()(loss), self.sens)\n", - " grads = self.grad(self.network, weights)(*inputs, sens)\n", - " return ops.depend(loss, self.hyper_map(ops.partial(_sum_op), self.grad_sum, grads))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`weights`:即网络中的权重参数。 \n", - "`loss`:当前网络参数载入训练数据后的损失值。 \n", - "`sens`:创建一个与loss相同类型和张量,将数值1填充其中。 \n", - "`grads`:计算出本次`mini_batch`的梯度值。 \n", - "`ops.depend`:使用前面的`loss`方法将loss值计算出来。\n", - "\n", - "此方法定义了模型训练过程中前向传播和方向传播的具体过程,并且可以保存出所有权重的参数,计算出当前模型的权重参数下的loss值。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义权重更新方法\n", - "\n", - "执行优化权重的方法,即将`grad_sum`更新到权重参数中。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "class TrainOptim(Cell):\n", - " def __init__(self, optimizer, grad_sum):\n", - " super(TrainOptim, self).__init__(auto_prefix=False)\n", - " self.optimizer = optimizer\n", - " self.grad_sum = grad_sum\n", - "\n", - " def construct(self):\n", - " return self.optimizer(self.grad_sum)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义清除累积梯度的方法\n", - "\n", - "当累积的梯度`grad_sum`更新到权重中后,调用本函数将`grad_sum`值清零,再开始下一次梯度累积。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "class TrainClear(Cell):\n", - " def __init__(self, grad_sum, zeros):\n", - " super(TrainClear, self).__init__(auto_prefix=False)\n", - " self.grad_sum = grad_sum\n", - " self.zeros = zeros\n", - " self.hyper_map = ops.HyperMap()\n", - "\n", - " def construct(self):\n", - " seccess = self.hyper_map(ops.partial(_clear_op), self.grad_sum, self.zeros)\n", - " return seccess" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义模型训练执行器\n", - "\n", - "在`GradientAccumulation`定义前向和反向以及梯度累积的执行过程。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import mindspore.nn as nn\n", - "from mindspore import ParameterTuple, context, DatasetHelper\n", - "from mindspore import save_checkpoint\n", - "\n", - "\n", - "class GradientAccumulation:\n", - " def __init__(self, network, loss_fn, optimizer):\n", - " self._network = network\n", - " self._loss_fn = loss_fn\n", - " self._optimizer = optimizer\n", - "\n", - " params = self._optimizer.parameters\n", - " self._grad_sum = params.clone(prefix=\"grad_sum\", init='zeros')\n", - " self._zeros = params.clone(prefix=\"zeros\", init='zeros')\n", - " self._train_forward_backward = self._build_train_forward_backward_network()\n", - " self._train_optim = self._build_train_optim()\n", - " self._train_clear = self._build_train_clear()\n", - "\n", - " def _build_train_forward_backward_network(self):\n", - " \"\"\"Build forward and backward network\"\"\"\n", - " network = self._network\n", - " network = nn.WithLossCell(network, self._loss_fn)\n", - " loss_scale = 1.0\n", - " network = TrainForwardBackward(network, self._optimizer, self._grad_sum, loss_scale).set_train()\n", - " return network\n", - "\n", - " def _build_train_optim(self):\n", - " \"\"\"Build optimizer network\"\"\"\n", - " network = TrainOptim(self._optimizer, self._grad_sum).set_train()\n", - " return network\n", - "\n", - " def _build_train_clear(self):\n", - " \"\"\"Build clear network\"\"\"\n", - " network = TrainClear(self._grad_sum, self._zeros).set_train()\n", - " return network\n", - "\n", - " def train_process(self, epoch, train_dataset, mini_steps=None):\n", - " \"\"\"\n", - " Training process. The data would be passed to network directly.\n", - " \"\"\"\n", - " dataset_helper = DatasetHelper(train_dataset, dataset_sink_mode=False, epoch_num=epoch)\n", - "\n", - " for i in range(epoch):\n", - " step = 0\n", - " for k, next_element in enumerate(dataset_helper):\n", - " loss = self._train_forward_backward(*next_element)\n", - " if (k + 1) % mini_steps == 0:\n", - " step += 1\n", - " print(\"epoch:\", i + 1, \"step:\", step, \"loss is \", loss)\n", - " self._train_optim()\n", - " self._train_clear()\n", - "\n", - " train_dataset.reset()\n", - "\n", - " save_checkpoint(self._train_forward_backward, \"gradient_accumulation.ckpt\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`train_process`:构建训练执行过程,并将梯度累积的方法在其中实现,即每`mini_steps`个`batch`数据训练完成后更新一次权重参数。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 执行训练\n", - "\n", - "执行训练过程,类似快速入门案例,将损失函数`SoftmaxCrossEntropyWithLogits`,优化器函数`Momentum`和深度网络`LeNet5`传入,自定义模型训练函数`GradientAccumolation`,并调用`train_process`方法,使用数据进行训练。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Training ==============\n", - "epoch: 1 step: 1 loss is 2.302572\n", - "epoch: 1 step: 2 loss is 2.3027077\n", - "epoch: 1 step: 3 loss is 2.3026032\n", - "epoch: 1 step: 4 loss is 2.3029802\n", - "epoch: 1 step: 5 loss is 2.3009882\n", - "epoch: 1 step: 6 loss is 2.3028584\n", - "epoch: 1 step: 7 loss is 2.2963173\n", - "epoch: 1 step: 8 loss is 2.301377\n", - "epoch: 1 step: 9 loss is 2.3019261\n", - "... ...\n", - "epoch: 1 step: 461 loss is 2.2829156\n", - "epoch: 1 step: 462 loss is 2.2586172\n", - "epoch: 1 step: 463 loss is 2.2446578\n", - "epoch: 1 step: 464 loss is 2.1804438\n", - "epoch: 1 step: 465 loss is 2.1868634\n", - "epoch: 1 step: 466 loss is 2.118839\n", - "epoch: 1 step: 467 loss is 2.1144428\n", - "epoch: 1 step: 468 loss is 1.94902\n", - "epoch: 2 step: 1 loss is 1.9981135\n", - "epoch: 2 step: 2 loss is 2.0984964\n", - "epoch: 2 step: 3 loss is 2.0167308\n", - "epoch: 2 step: 4 loss is 2.0224195\n", - "epoch: 2 step: 5 loss is 2.0156221\n", - "epoch: 2 step: 6 loss is 1.9364308\n", - "epoch: 2 step: 7 loss is 1.8101931\n", - "... ...\n", - "epoch: 2 step: 459 loss is 0.12907082\n", - "epoch: 2 step: 460 loss is 0.15356739\n", - "epoch: 2 step: 461 loss is 0.36636132\n", - "epoch: 2 step: 462 loss is 0.2972299\n", - "epoch: 2 step: 463 loss is 0.035830393\n", - "epoch: 2 step: 464 loss is 0.3594339\n", - "epoch: 2 step: 465 loss is 0.0087479465\n", - "epoch: 2 step: 466 loss is 0.16021682\n", - "epoch: 2 step: 467 loss is 0.11816633\n", - "epoch: 2 step: 468 loss is 0.019440759\n", - "epoch: 3 step: 1 loss is 0.0047739483\n", - "epoch: 3 step: 2 loss is 0.03690074\n", - "epoch: 3 step: 3 loss is 0.38832387\n", - "epoch: 3 step: 4 loss is 0.121167235\n", - "epoch: 3 step: 5 loss is 0.097194746\n", - "epoch: 3 step: 6 loss is 0.047661886\n", - "epoch: 3 step: 7 loss is 0.13189279\n", - "... ...\n", - "epoch: 3 step: 455 loss is 0.26175526\n", - "epoch: 3 step: 456 loss is 0.028598795\n", - "epoch: 3 step: 457 loss is 0.060193256\n", - "epoch: 3 step: 458 loss is 0.04647294\n", - "epoch: 3 step: 459 loss is 0.31234825\n", - "epoch: 3 step: 460 loss is 0.07622443\n", - "epoch: 3 step: 461 loss is 0.04356075\n", - "epoch: 3 step: 462 loss is 0.02148334\n", - "epoch: 3 step: 463 loss is 0.16675451\n", - "epoch: 3 step: 464 loss is 0.017797818\n", - "epoch: 3 step: 465 loss is 0.037047308\n", - "epoch: 3 step: 466 loss is 0.009920539\n", - "epoch: 3 step: 467 loss is 0.16409619\n", - "epoch: 3 step: 468 loss is 0.058633693\n" - ] - } - ], - "source": [ - "if __name__ == \"__main__\":\n", - " context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - " ds_train_path = \"./datasets/MNIST_Data/train/\"\n", - " ds_train = create_dataset(ds_train_path, 32)\n", - "\n", - " net = LeNet5(10)\n", - " net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction=\"mean\")\n", - " net_opt = nn.Momentum(net.trainable_params(), 0.01, 0.9)\n", - " model = GradientAccumulation(net, net_loss, net_opt)\n", - "\n", - " print(\"============== Starting Training ==============\")\n", - " model.train_process(3, ds_train, mini_steps=4)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本例中采用了累积梯度为`mini_steps=4`,即每训练4个batch的数据,进行一次权重参数的更新。最后在目录中保存了模型的权重参数文件`gradient_accumulate.ckpt`。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 验证累积梯度训练出的模型精度\n", - "\n", - "载入累积梯度训练结束后保存的模型参数`gradient_accumulation.ckpt`文件到神经网络LeNet5中,同时将其与损失函数(net_loss),优化器(net_opt)放入MindSpore的模型函数Model中,重新结合成完整计算图,输入验证数据集进行验证。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'Accuracy': 0.96875}\n" - ] - } - ], - "source": [ - "from mindspore.train.serialization import load_checkpoint, load_param_into_net\n", - "from mindspore import Model\n", - "from mindspore.nn import Accuracy\n", - "\n", - "\n", - "ds_eval_path = \"./datasets/MNIST_Data/test/\"\n", - "ds_eval_data = create_dataset(ds_eval_path,32)\n", - "\n", - "param_dict = load_checkpoint(\"gradient_accumulation.ckpt\")\n", - "load_param_into_net(net, param_dict)\n", - "model = Model(net, net_loss, net_opt, metrics={\"Accuracy\": Accuracy()})\n", - "\n", - "acc = model.eval(ds_eval_data, dataset_sink_mode=False)\n", - "print(acc)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "经过验证,使用累积梯度训练方法生成的模型精度大于0.95,此方法训练效果可行。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.0.1", - "language": "python", - "name": "mindspore-1.0.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/mindspore_apply_quantization_aware_training.ipynb b/tutorials/notebook/mindspore_apply_quantization_aware_training.ipynb deleted file mode 100644 index e226e4d85c6cde72e2c6c3c9efc6d87de14d19bd..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_apply_quantization_aware_training.ipynb +++ /dev/null @@ -1,633 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "322849d6", - "metadata": {}, - "source": [ - "# 应用感知量化训练\n", - "\n", - "## 背景\n", - "\n", - "越来越多的应用选择在移动设备或者边缘设备上使用深度学习技术。以手机为例,为了提供人性化和智能的服务,现在操作系统和应用都开始集成深度学习功能。而使用该功能,涉及训练或者推理,自然包含大量的模型及权重文件。经典的AlexNet,原始权重文件已经超过了200MB,而最近出现的新模型正往结构更复杂、参数更多的方向发展。由于移动设备、边缘设备的硬件资源有限,需要对模型进行精简,而量化(Quantization)技术就是应对该类问题衍生出的技术之一。\n", - "\n", - "## 概念\n", - "\n", - "\n", - "\n", - "量化即以较低的推理精度损失将连续取值(或者大量可能的离散取值)的浮点型模型权重或流经模型的张量数据定点近似(通常为INT8)为有限多个(或较少的)离散值的过程,它是以更少位数的数据类型用于近似表示32位有限范围浮点型数据的过程,而模型的输入输出依然是浮点型。这样的好处是可以减小模型尺寸大小,减少模型内存占用,加快模型推理速度,降低功耗等。\n", - "\n", - "如上所述,与FP32类型相比,FP16、INT8、INT4等低精度数据表达类型所占用空间更小。使用低精度数据表达类型替换高精度数据表达类型,可以大幅降低存储空间和传输时间。而低比特的计算性能也更高,INT8相对比FP32的加速比可达到3倍甚至更高,对于相同的计算,功耗上也有明显优势。\n", - "\n", - "当前业界量化方案主要分为两种:感知量化训练(Quantization Aware Training)和训练后量化(Post-training Quantization)。感知量化训练需要训练数据,在模型准确率上通常表现更好,适用于对模型压缩率和模型准确率要求较高的场景;训练后量化简单易用,只需少量校准数据,适用于追求高易用性和缺乏训练资源的场景。\n", - "\n", - "伪量化节点是指感知量化训练中插入的节点,用以寻找网络数据分布,并反馈损失精度,具体作用如下:\n", - "\n", - "- 找到网络数据的分布,即找到待量化参数的最大值和最小值;\n", - "\n", - "- 模拟量化为低比特时的精度损失,把该损失作用到网络模型中,传递给损失函数,让优化器在训练过程中对该损失值进行优化。" - ] - }, - { - "cell_type": "markdown", - "id": "c9fbf550", - "metadata": {}, - "source": [ - "本文将介绍在MindSpore中如何应用感知量化训练来对模型进行量化,主要流程如下:\n", - "\n", - "1. 数据集和预训练模型的准备。\n", - "\n", - "2. 构建数据预处理函数。\n", - "\n", - "2. 量化网络模型的构建。\n", - "\n", - "3. 量化网络模型的微调训练。\n", - "\n", - "4. 量化网络模型的保存及导出。" - ] - }, - { - "cell_type": "markdown", - "id": "92c98009", - "metadata": {}, - "source": [ - "## 准备工作" - ] - }, - { - "cell_type": "markdown", - "id": "fc1db470", - "metadata": {}, - "source": [ - "### 数据集准备\n", - "\n", - "下载MNIST数据集并将其放置在指定位置。为后续微调所需要用到的数据集做准备。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "c240466b", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "id": "33171c49", - "metadata": {}, - "source": [ - "### 预训练模型准备\n", - "\n", - "下载预训练好的模型LeNet5网络的模型文件,为后续预训练模型转化为量化模型做准备" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "8187bd94", - "metadata": {}, - "outputs": [], - "source": [ - "!wget https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/models/checkpoint_lenet.ckpt" - ] - }, - { - "cell_type": "markdown", - "id": "4e23e2c2", - "metadata": {}, - "source": [ - "## 构建数据预处理函数\n", - "\n", - "数据预处理函数可以参考[快速入门篇章](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html)将微调模型所用的数据集从单张`28*28`大小的图片,处理成`32*32`大小的图片。\n", - "\n", - "将数据集增强为符合网络模型LeNet5训练要求的数据数据--即将6万张大小为`28*28`的数据集,增强为1875个batch,每个batch为32张图片,每张图片大小为`32*32`的数据集。增强后batch数据的张量为`32*1*32*32`。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "f9dc928d", - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore import dtype as mstype\n", - "\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\"\n", - " create dataset for train or test\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # define map operations\n", - " C_trans = [\n", - " CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR), # Bilinear mode\n", - " CV.Rescale(rescale_nml, shift_nml),\n", - " CV.Rescale(rescale, shift),\n", - " CV.HWC2CHW()\n", - " ]\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # apply map operations on images\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=C_trans, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - "\n", - " # apply DatasetOps\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds" - ] - }, - { - "cell_type": "markdown", - "id": "dc4820af", - "metadata": {}, - "source": [ - "## 构建量化前的融合网络模型\n", - "\n", - "在MindSpore中的量化网络构建主要分为自动量化网络构建和手动量化网络构建,本文将以自动量化网络构建为例,完成感知量化训练。" - ] - }, - { - "cell_type": "markdown", - "id": "bba49089", - "metadata": {}, - "source": [ - "### 自动量化网络\n", - "\n", - "自动量化网络需要分两步执行完成量化。\n", - "\n", - "1. 构造含有融合算子的网络\n", - "\n", - " 与一般的LeNet5网络构建相比,自动量化网络需要使用到融合算子来构建。主要使用了`nn.Conv2dBnAct`和`nn.DenseBnAct`替换了原来的卷积层和全连接层,这里融合算子将多种操作融合在了一起,会提升运算性。\n", - "\n", - " - `nn.Conv2dBnAct`:融合了2维卷积、Batch Normolization和激活操作,其参数`activatation`中设置`relu`,即在卷积后,自动采用`relu`函数进行激活。\n", - "\n", - " - `nn.DenseBnAct`:融合了全连接、Batch Normolization和激活操作,其参数`activation`中设置`relu`,即在全连接后,自动采用`relu`函数进行激活。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "034a0553", - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "\n", - "class LeNet5(nn.Cell):\n", - " def __init__(self, num_class=10):\n", - " super(LeNet5, self).__init__()\n", - " self.num_class = num_class\n", - "\n", - " self.conv1 = nn.Conv2dBnAct(1, 6, kernel_size=5, pad_mode=\"valid\", activation='relu')\n", - " self.conv2 = nn.Conv2dBnAct(6, 16, kernel_size=5, pad_mode=\"valid\", activation='relu')\n", - "\n", - " self.fc1 = nn.DenseBnAct(16 * 5 * 5, 120, activation='relu')\n", - " self.fc2 = nn.DenseBnAct(120, 84, activation='relu')\n", - " self.fc3 = nn.DenseBnAct(84, self.num_class)\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.conv1(x))\n", - " x = self.max_pool2d(self.conv2(x))\n", - " x = self.flatten(x)\n", - " x = self.fc1(x)\n", - " x = self.fc2(x)\n", - " x = self.fc3(x)\n", - " return x" - ] - }, - { - "cell_type": "markdown", - "id": "2f8bc06e", - "metadata": {}, - "source": [ - "2. 量化算子融合网络\n", - "\n", - " 在`QuantizationAwareTraining`接口中设置网络量化的参数,然后使用`QuantizationAwareTraining.quantize`接口,将算子融合网络自动插入伪量化节点,完成对模型的量化。\n", - "\n", - " 其中接口`QuantizationAwareTraining`中参数:\n", - "\n", - " - `quant_delay`:推理评估期间量化权重和量化激活数的步骤数。\n", - " - `bn_fold`:使用bn fold算子进行模拟推理的标志位。默认True。\n", - " - `per_channel`:基于层或通道的量化粒度,第一个元素值如果为True,则基于每个通道量化,否则基于层量化。第二个元素值代表数据流必须为False。\n", - " - `symmetric`:量化算法是否对称。第一个元素值如果为True,则基于对称算法,否则基于不对称算法。第二个权重代表数据流设置为False。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "43c7913e", - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore import context\n", - "from mindspore.compression.quant import QuantizationAwareTraining\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "\n", - "network = LeNet5(10)\n", - "quantizer = QuantizationAwareTraining(quant_delay=900,\n", - " bn_fold=False,\n", - " per_channel=[True, False],\n", - " symmetric=[True, False])\n", - "\n", - "quant_network = quantizer.quantize(network)" - ] - }, - { - "cell_type": "markdown", - "id": "9be4fe58", - "metadata": {}, - "source": [ - "> 除了自动量化网络外,还能[手动模式构建量化网络](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/apply_quantization_aware_training.html#id10),而且手动模式构建量化网络的方法由于引入了专门的量化参数[quant_config](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.compression.html?#mindspore.compression.quant.create_quant_config),可以更细粒度的调节模型量化程度,比如量化的类型,指定量化的通道等。并且使用了专门的量化计算节点,在构建网络时就已经插入了伪量化节点,可以不必使用`QuantizationAwareTraining.quantize`接口来进行量化。" - ] - }, - { - "cell_type": "markdown", - "id": "73563f94", - "metadata": {}, - "source": [ - "## 载入预训练模型权重文件\n", - "\n", - "由于预训练文件是未量化的模型文件,而待载入的网络为量化网络,这里需使用专用接口`load_nonquant_param_into_quant_net`来完成预训练模型的载入。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "12bdbcf2", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "init model param conv1.weight with checkpoint param conv1.weight\n", - "init model param conv2.weight with checkpoint param conv2.weight\n", - "init model param fc1.weight with checkpoint param fc1.weight\n", - "init model param fc1.bias with checkpoint param fc1.bias\n", - "init model param fc2.weight with checkpoint param fc2.weight\n", - "init model param fc2.bias with checkpoint param fc2.bias\n", - "init model param fc3.weight with checkpoint param fc3.weight\n", - "init model param fc3.bias with checkpoint param fc3.bias\n" - ] - } - ], - "source": [ - "from mindspore import load_checkpoint\n", - "from mindspore.compression.quant import load_nonquant_param_into_quant_net\n", - "\n", - "# load quantization aware network checkpoint\n", - "param_dict = load_checkpoint(\"./checkpoint_lenet.ckpt\")\n", - "load_nonquant_param_into_quant_net(quant_network, param_dict)" - ] - }, - { - "cell_type": "markdown", - "id": "a3cc20cb", - "metadata": {}, - "source": [ - "完成模型的载入和初始化后,其余微调训练,模型保存等操作方式,跟快速入门中的样例一致。" - ] - }, - { - "cell_type": "markdown", - "id": "c909d572", - "metadata": {}, - "source": [ - "## 模型微调\n", - "\n", - "微调过程跟训练过程相差不大,需要先定义损失函数,优化器等超参,然后调用`Model`接口,将量化网络,损失函数,优化器结合成完整的计算网络,然后送入微调用的数据集,完成对模型微调,并将微调后的模型保存出来。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "5d4ebcdb", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 375, loss is 0.03274906\n", - "epoch: 1 step: 750, loss is 0.34685582\n", - "epoch: 1 step: 1125, loss is 0.0022193685\n", - "epoch: 1 step: 1500, loss is 0.15521993\n", - "epoch: 1 step: 1875, loss is 0.05880319\n" - ] - } - ], - "source": [ - "import mindspore\n", - "from mindspore import export, Model\n", - "from mindspore.train.callback import LossMonitor, ModelCheckpoint, CheckpointConfig\n", - "\n", - "lr = 0.01\n", - "momentum = 0.9\n", - "epoch_size = 1\n", - "\n", - "# define fusion network\n", - "net_opt = nn.Momentum(quant_network.trainable_params(), lr, momentum)\n", - "net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction=\"mean\")\n", - "model = Model(quant_network, net_loss, net_opt)\n", - "\n", - "config_ckpt = CheckpointConfig(save_checkpoint_steps=epoch_size * 1875,\n", - " keep_checkpoint_max=10)\n", - "ckpoint = ModelCheckpoint(prefix=\"quant_checkpoint_lenet\", config=config_ckpt)\n", - "\n", - "ds_train = create_dataset(\"./datasets/MNIST_Data/train\")\n", - "model.train(epoch_size, ds_train, callbacks=[ckpoint, LossMonitor(375)], dataset_sink_mode=False)" - ] - }, - { - "cell_type": "markdown", - "id": "19725123", - "metadata": {}, - "source": [ - "## 查看模型大小\n", - "\n", - "对比微调后的量化网络模型权重文件和原本的网络模型权重文件,在大小上的区别。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "8e973c23", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The original model is 482 KB\n", - "After quant the model size is 482 KB\n" - ] - } - ], - "source": [ - "import os \n", - "\n", - "original_model_size = os.path.getsize(\"./checkpoint_lenet.ckpt\")\n", - "quant_model_size = os.path.getsize(\"./quant_checkpoint_lenet-1_1875.ckpt\")\n", - "print(\"The original model is\", original_model_size//1024, \"KB\")\n", - "print(\"After quant the model size is\", quant_model_size//1024, \"KB\")" - ] - }, - { - "cell_type": "markdown", - "id": "cbdc2017", - "metadata": {}, - "source": [ - "先查看量化后的模型中的计算节点。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "0e024ad5", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Original model calculation node number: 24\n", - "{'conv1.weight': Parameter (name=conv1.weight),\n", - " 'conv2.weight': Parameter (name=conv2.weight),\n", - " 'fc1.add.fake_quant_act.maxq': Parameter (name=fc1.add.fake_quant_act.maxq),\n", - " 'fc1.add.fake_quant_act.minq': Parameter (name=fc1.add.fake_quant_act.minq),\n", - " 'fc1.bias': Parameter (name=fc1.bias),\n", - " 'fc1.weight': Parameter (name=fc1.weight),\n", - " 'fc2.add.fake_quant_act.maxq': Parameter (name=fc2.add.fake_quant_act.maxq),\n", - " 'fc2.add.fake_quant_act.minq': Parameter (name=fc2.add.fake_quant_act.minq),\n", - " 'fc2.bias': Parameter (name=fc2.bias),\n", - " 'fc2.weight': Parameter (name=fc2.weight),\n", - " 'fc3.add.fake_quant_act.maxq': Parameter (name=fc3.add.fake_quant_act.maxq),\n", - " 'fc3.add.fake_quant_act.minq': Parameter (name=fc3.add.fake_quant_act.minq),\n", - " 'fc3.bias': Parameter (name=fc3.bias),\n", - " 'fc3.weight': Parameter (name=fc3.weight),\n", - " 'learning_rate': Parameter (name=learning_rate),\n", - " 'moments.conv1.weight': Parameter (name=moments.conv1.weight),\n", - " 'moments.conv2.weight': Parameter (name=moments.conv2.weight),\n", - " 'moments.fc1.bias': Parameter (name=moments.fc1.bias),\n", - " 'moments.fc1.weight': Parameter (name=moments.fc1.weight),\n", - " 'moments.fc2.bias': Parameter (name=moments.fc2.bias),\n", - " 'moments.fc2.weight': Parameter (name=moments.fc2.weight),\n", - " 'moments.fc3.bias': Parameter (name=moments.fc3.bias),\n", - " 'moments.fc3.weight': Parameter (name=moments.fc3.weight),\n", - " 'momentum': Parameter (name=momentum)}\n" - ] - } - ], - "source": [ - "import pprint\n", - "\n", - "quant_params = load_checkpoint(\"./quant_checkpoint_lenet-1_1875.ckpt\")\n", - "print(\"Original model calculation node number:\",len(quant_params))\n", - "pprint.pprint(quant_params)" - ] - }, - { - "cell_type": "markdown", - "id": "3fc90d6a", - "metadata": {}, - "source": [ - "再查看未量化的模型网络计算节点。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "cd67743e", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "quant model calculation node number: 18\n", - "{'conv1.weight': Parameter (name=conv1.weight),\n", - " 'conv2.weight': Parameter (name=conv2.weight),\n", - " 'fc1.bias': Parameter (name=fc1.bias),\n", - " 'fc1.weight': Parameter (name=fc1.weight),\n", - " 'fc2.bias': Parameter (name=fc2.bias),\n", - " 'fc2.weight': Parameter (name=fc2.weight),\n", - " 'fc3.bias': Parameter (name=fc3.bias),\n", - " 'fc3.weight': Parameter (name=fc3.weight),\n", - " 'learning_rate': Parameter (name=learning_rate),\n", - " 'moments.conv1.weight': Parameter (name=moments.conv1.weight),\n", - " 'moments.conv2.weight': Parameter (name=moments.conv2.weight),\n", - " 'moments.fc1.bias': Parameter (name=moments.fc1.bias),\n", - " 'moments.fc1.weight': Parameter (name=moments.fc1.weight),\n", - " 'moments.fc2.bias': Parameter (name=moments.fc2.bias),\n", - " 'moments.fc2.weight': Parameter (name=moments.fc2.weight),\n", - " 'moments.fc3.bias': Parameter (name=moments.fc3.bias),\n", - " 'moments.fc3.weight': Parameter (name=moments.fc3.weight),\n", - " 'momentum': Parameter (name=momentum)}\n" - ] - } - ], - "source": [ - "no_quant_params = load_checkpoint(\"./checkpoint_lenet.ckpt\")\n", - "print(\"quant model calculation node number:\", len(no_quant_params))\n", - "pprint.pprint(no_quant_params)" - ] - }, - { - "cell_type": "markdown", - "id": "380227a6", - "metadata": {}, - "source": [ - "从上面在量化后和量化前的对比可以看出,模型量化前和量化后的变化:\n", - "\n", - "|模型量化前|模型大小|模型计算节点\n", - "|:---|:---|:---\n", - "|量化前|482 KB| 18\n", - "|量化后|482 KB| 24\n", - "\n", - "量化后的模型大小并未变化,另外模型的计算节点比量化前的计算节点增加了6个,这些增加的计算节点均为全连接层中插入的伪量化节点。\n", - "\n", - "为什么量化后模型并未缩小?\n", - "\n", - "原因是MindSpore中采用了伪量化节点并不是压缩训练网络用的,而是在后续的模型部署部分,在将有伪量化节点的模型文件,转化为用于推理的模型`.ms`文件时才会将插入伪量化节点的float32的存储数据和计算数据转换为int8或者int4类型的数据,从而将部署的网络模型小型化。" - ] - }, - { - "cell_type": "markdown", - "id": "5af8ada3", - "metadata": {}, - "source": [ - "## 导出模型\n", - "\n", - "使用export接口将`.ckpt`的模型文件导出为`.mindir`文件,除了导出`mindir`外,还能将模型导出为`.onnx`和`.air`等推理用的模型文件,详细导出方式可以参考官网的《[保存模型](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html)》。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "64c13a9a", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "from mindspore import Tensor, export\n", - "\n", - "# export network\n", - "inputs = Tensor(np.ones([1, 1, 32, 32]), mindspore.float32)\n", - "export(quant_network, inputs, file_name=\"lenet_quant\", file_format='MINDIR', quant_mode='AUTO')" - ] - }, - { - "cell_type": "markdown", - "id": "db99a33a", - "metadata": {}, - "source": [ - "`.mindir`模型文件导出后,查看其大小。" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "751cbbfc", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "mindir file size is 248 KB\n" - ] - } - ], - "source": [ - "import os\n", - "\n", - "mindir_size = os.path.getsize(\"./lenet_quant.mindir\")\n", - "\n", - "print(\"mindir file size is\", mindir_size//1024, \"KB\")" - ] - }, - { - "cell_type": "markdown", - "id": "b7264981", - "metadata": {}, - "source": [ - "> `.mindir`模型文件大小为248KB,比`.ckpt`模型文件小了一半,主要是由于转化为`.mindir`模型文件时,只保留了模型前向传播中用于推理网络,反向传播部分的网络被省略掉导致的。并非量化的原因。" - ] - }, - { - "cell_type": "markdown", - "id": "b9a3efa8", - "metadata": {}, - "source": [ - "## 转化模型\n", - "\n", - "将`.mindir`文件转化为部署推理用的`.ms`文件需要使用到转换工具`MindConvert_Lite`,详情可参考官网《[推理模型转换](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html#)》,模型转换工具会自动识别模型文件中的伪量化节点,完成推理模型的量化,得到最终的`.ms`推理模型文件。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/tutorials/notebook/mindspore_computer_vision_application.ipynb b/tutorials/notebook/mindspore_computer_vision_application.ipynb deleted file mode 100644 index ea97cc149208279ccee2c76df8baef4d609bff9f..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_computer_vision_application.ipynb +++ /dev/null @@ -1,500 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 计算机视觉的应用" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "计算机视觉是当前深度学习研究最广泛、落地最成熟的技术领域,在手机拍照、智能安防、自动驾驶等场景有广泛应用。从2012年AlexNet在ImageNet比赛夺冠以来,深度学习深刻推动了计算机视觉领域的发展,当前最先进的计算机视觉算法几乎都是深度学习相关的。深度神经网络可以逐层提取图像特征,并保持局部不变性,被广泛应用于分类、检测、分割、跟踪、检索、识别、提升、重建等视觉任务中。\n", - "本次体验结合图像分类任务,介绍MindSpore如何应用于计算机视觉场景,如何训练模型,得出一个性能较优的模型。\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 图像分类\n", - "\n", - "图像分类是最基础的计算机视觉应用,属于有监督学习类别。给定一张数字图像,判断图像所属的类别,如猫、狗、飞机、汽车等等。用函数来表示这个过程如下:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```python\n", - "def classify(image):\n", - " label = model(image)\n", - " return label\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "定义的分类函数,以图片数据`image`为输入,通过`model`方法对`image`进行分类,最后返回分类结果。选择合适的`model`是关键。这里的`model`一般指的是深度卷积神经网络,如AlexNet、VGG、GoogLeNet、ResNet等等。 \n", - "下面按照MindSpore的训练数据模型的正常步骤进行,当使用到MindSpore或者图像分类操作时,会增加相应的说明,本次体验的整体流程如下:\n", - "\n", - "1. 数据集的准备,这里使用的是CIFAR-10数据集。\n", - "\n", - "2. 构建一个卷积神经网络,这里使用ResNet-50网络。\n", - "\n", - "3. 定义损失函数和优化器。\n", - "\n", - "4. 调用Model高阶API进行训练和保存模型文件。\n", - "\n", - "5. 进行模型精度验证。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> 本文档适用于GPU和Ascend环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 训练数据集下载\n", - "\n", - "### 数据集准备" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/cifar-10-batches-bin\n", - "├── readme.html\n", - "├── test\n", - "│   └── test_batch.bin\n", - "└── train\n", - " ├── batches.meta.txt\n", - " ├── data_batch_1.bin\n", - " ├── data_batch_2.bin\n", - " ├── data_batch_3.bin\n", - " ├── data_batch_4.bin\n", - " └── data_batch_5.bin\n", - "\n", - "2 directories, 8 files\n" - ] - } - ], - "source": [ - "!wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-binary.tar.gz\n", - "!mkdir -p datasets\n", - "!tar -xzf cifar-10-binary.tar.gz -C datasets\n", - "!mkdir -p datasets/cifar-10-batches-bin/train datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/test_batch.bin datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/data_batch*.bin datasets/cifar-10-batches-bin/batches.meta.txt datasets/cifar-10-batches-bin/train\n", - "!tree ./datasets/cifar-10-batches-bin" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据处理\n", - "\n", - "数据集处理对于训练非常重要,好的数据集可以有效提高训练精度和效率。在加载数据集前,我们通常会对数据集进行一些处理。这里我们用到了数据增强,数据混洗和批处理。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "数据增强主要是对数据进行归一化和丰富数据样本数量。常见的数据增强方式包括裁剪、翻转、色彩变化等等。MindSpore通过调用`map`方法在图片上执行增强操作。数据混洗和批处理主要是通过数据混洗`shuffle`随机打乱数据的顺序,并按`batch`读取数据,进行模型训练。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "构建`create_dataset`函数,来创建数据集。通过设置` resize_height`、`resize_width`、`rescale`、`shift`参数,定义`map`以及在图片上运用`map`实现数据增强。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The dataset size is: 1562\n", - "The batch tensor is: (32, 3, 224, 224)\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAW0AAADsCAYAAAC/mvfrAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9eZwc2XXfiX5PbLlnZdZehUIV9q0bvTe7m01SFEVRsixaFC1ZtmXJsi2P5438keSnzZY9fpyxRl7mY8947LHn87zpyZIsyRIlSuJOkd3NZu/dQGPfgSrUnlWVe0bGet8fEQkkqguFwtYAqPrhk6jMWG6cuHHjd88995xzRSnFJjaxiU1s4sGAdq8F2MQmNrGJTWwcm6S9iU1sYhMPEDZJexOb2MQmHiBskvYmNrGJTTxA2CTtTWxiE5t4gLBJ2pvYxCY28QDhjpK2iHxGRH5jnf3HReSjd/KaDypE5EdF5Cu3cf5PiMjLd1KmOwER+TUR+ZV7Lcf7CRF5QUR+8jr7xkWkISL6jY7dxFXcz3UqIkpEdr1f11uN91XTVko9pJR64f285p2AiFwSkY/fyTKVUr+plPrEnSzzzzLuVzJUSk0ppbJKqeD9uN6dbKsPcp3ejXf2fsGmeeQ+hIgY91qGTWzi2xUP+vt1y6QtIr8kIjMiUheR0yLyXfEuS0R+Pd5+XESe6jrnSu8Xm1J+T0R+Jz72HRF59DbvZyNybxWRz4pISUSWReTfishOEfl6/HtJRH5TRArx8f8VGAf+OB6S/eJNXu/vi8j5+B5PiMgPxtuvMW/EQ66fEpGzwNmubT8tIhdiuf53EVnzmYnIvxaRyyJSE5G3ReTDXfs+IyK/u85zGRWR34/r5KKI/PRN3N/j8bOri8jvAMmufX9bRM6JyIqI/JGIjHbt+0Tcbqoi8u9E5EUR+cl16usa05uIbIvrxxCR/w34MPBv42f0b+NjPigib8bXeFNEPth1/gsi8isi8kp8zh+LSF/87Gvx8du6jr9uWTF2isgb8bmfE5He1XJep/7+poicFJGyiHxZRCa69t2RtvpnsE5fASaAr4qILyK/Gh//t0RkCvi6iHxURKZXndfNT7qI/HJXvb0tIlvXuNaHJHrvPrqWLHcFSqmb/gB7gcvAaPx7G7AT+AzQBr4P0IF/CrzWdd4l4OPx988AHvBDgAn8PHARMG9Fpg3KrQPvAv8HkCEimA8Bu4DvBhLAAPAS8H+uJfctXPOHgVGiDvJHgCYwAvwE8HLXcQr4KtALpLq2fSPeNg6cAX4y3rf6/L8G9AEG8HPAPJDsqus1n0ss19vAPwYsYAdwAfieDdybBUwCfy9+hj8UP9NfAT4GLAFPxPX6b4CX4vP6gRrw6Vjen4nP+8l16uszwG90XXtbXD9G/PuFTt3Ev3uBMvBj8TX+Svy7r+v4c0Tttgc4Edfvx+Pjfx34LzdR1gzwMFG7+v2OrOvJCfxALMP+uNx/BLxyp9vqn9E6nQE+Edfp2/Hxvx6XlQI+CkyvqqcrdQf8AnCUiOsEeLRLNhU/h+8l4sEP3C3OWvO9u0Ui2gUsxg/D7Nr+GeBrXb8PAPZ1KuUzXEvoGjAHfPiu3Sw8B5Q6D3ud4z4FHFrvRbgNGQ7HDesneC9pf2zVsQr43q7f/xPwp/H3a85f4zpl4NEbPRfgGWBq1bn/oPNy3eBePgLMAtK17RUi0v5PwL/o2p4lIuZtwI8Dr3btk7jx/+Qa1+jU12e4OYL5MeCNVWW9CvxE1/H/sGvfvwS+2PX7k8Dhmyjrn62qX5eIeK8rJ/BF4G+tegdaRFriXWurfxbqtFMPcZ3a8fE7usr7KOuT9mngB65Tf4roHZkEHr5ZDrjdzy2ZR5RS54CfjR/6ooj8tlwd+s53HdoCktcbxhC9qJ0yQ2CaSCO4W9gKTCql/O6NIjIU38OMiNSA3yDSBm8bIvLjInJYRCoiUiHSHK5X9uUbbJvkOvUjIj8fD7Or8XV6Vl3nes9lAhjtyBef+8vA0I3vjlFgRsUtuUvGzr7Od5RSDWAZ2BLv6372iujZ32x93Ui2yVXbJuPrd7DQ9d1e43f2Jspa/ZxMbiz3BPCvu+51hagD28IdbKt/FuuUaGT6h1yt09Xl3QhbgfPr7P9Z4HeVUsduosw7glu2aSulfksp9SGiSlLAP7+FYq7YiCSy1Y4RaW53C5eB8TU6kV8luoeDSqk8kalBuvYrbgGxffI/AH+XaGhVAI6tKrsba12n2442zhr1I5H9+heBvwQU4+tU17lONy4DF5VSha5PTin1fRs4dw7YIiLd1xmP/84StY2OjBki881MfN5Y1z6Jf/dy/fpqAumu6wyvkmV13V1z/S7ZZjZwX6uxkbJWPyePyDy0Hi4Df2dV3aeUUq9wh9rqDdrgt22dEpH8p+L73Rfv676fa+5dIvfBgVXl7FznOj8MfEpEfuYG8txx3BJpi8heEfmYiCSIbKU2EN5CUU+KyKfjhvmzgAO8disybRBvEBHGPxORjIgkReR5IAc0gKqIbCGyZ3VjgcjWe7PIEDWUEoCI/A0iLedm8AsiUownQX4G+J01jskBfnwdQ0T+MZDfYPlvAHWJJpZT8QTMwyLy9AbOfTW+7k+LiCkinwY+EO/7b8DfEJHH4nbyq8DrSqlLwOeBgyLyqfjZ/xQRYSS4fn0dBj4ikY9uD9HwtBurn9EXgD0i8lclmlj7EaIh9p9ssF66sZGy/pqIHBCRNPC/Ar+nbuzm9/8A/0BEHgIQkR4R+eF4351qq+u1wcN8m9Yp0ZzJjvi+1lJAzhCNNv+8iJhEtu9E1/7/CPwTEdktER4Rkb6u/bPAdwE/IyL/r43f9u3jVjXtBPDPiHq9eWCQ9z7wjeBzRBMjnQmJTyulvFuU6YaIH/gniWzyU0RD8h8B/heiCbMqEaF8dtWp/xT4R/Hw8udv4noniOx6rxK9AAeBb92k2J8jmkg5HMv2n9Y45svAl4ga4iRRR7qhoWBcJ98PPEY0EbxE1GB7NnCuSzSZ+BNEw9AfIa47pdTXgP+ZaAJpjkhr+cvxviUiTeVfEJlMDgBvEdkU16wvpdRXiTqsI0T1sZoo/jXwQxJ5YfxfSqnl+L5+Lr7GLwLfH1/7prDBsv4r8GvEE8DADT1wlFJ/QDRC/e3Y1HEM+HPxvjvSVtdrg9/mddpDROBlova5+rgq0RzRfyTS7pvEJroY/wr4XeArRB3AfyKawOwuY4qIuP++vJ/BPdeaI98/iMhngF1Kqb92TwR4ACAiCtgdzyF82yI2jU0DP6qU+sa9lmcTm7ifsRlcs4l7AhH5HhEpxKaTXyaysd5N09gmNvFtgU3S3sS9wnNEs/NLRGaATyml7Hsr0iY2cf/jnplHNrGJTWxiEzePTU17E5vYxCYeIGyS9iY2sYlNPEBYN9tV7L1w30Apdd1gkU1Zbx3Xk/VBkROuyvpXnvoMf+d7folAAZqGpoWIXA0hUAghwnpxR6FAqEH31cJQI/R1QiVo0inLI8RHxSWiNEIFhB6f+tWe617gf/yp/7caGh5ibGwrY1vHGBkZJpvtQcQg9BUtu8n5cyf5+te/ymuvvcyF82epV+v4vn+9Im8L69XrB57/S6rddjh17CU8t3LNvl17n+Wv/uj/QG9xEF03qVZrzM/N03ba9PTkGRoeYGi4jy1bhhkeHSSTTRH4Abbt4LZ9lpbKTE7OUK7UCIKA/v5+kqkkwyNDJNMpjp84z6vfeouvfP7XmJ9+Z11Zf+w/vqkEH10FpA0YziZIOSsc+fof8sJnfx2z3WI4ZzA4th1V2EU1PYLdN4Q1uoWe/i2kUwWUZhJoJj6CUhqoAC1o0SovUiotEOophrdsJ5tIo6FQgApDQhWiQh+lQk58/Q8594X/e906vV080CkKN7GJ90Ajyk4RKkJNA9GvDReUiLivh2g/KFHRR3W2C0pXoNQVMldKRymN7vdThQoJ1x/ASuDSrlcpzYJn16kszpNIJPGCEKft0mg2WV4uIaHw6MEnefTgEyzOz/Duu28xOzdPELx/femP/fhfx3VdfuPXPQ6/9WU6MXSmmeKZ557n4YMHMI0EYRAyNNTL4GCBer2Obhhkc1lS6SSBCnBdh6RvEIQK13Fpt11838c0TXL5HG3bRqmQZqOJ4zokM1GwooigafoN5TQkxCAgbUIhZZGzhHPHzvD1P/0as9UWAlRdn5X2Jcg0aCZnKWMSJJLktm5nfPdDjG7dSaYwQEJP4osgAvbyIude/BOm3n4RtAStD/9FDn74u9FTGcIwJBQtMldogiiFJrcSY3hzWJe0/94v/i9XviulIi1CQBPZUHz0rcLzPH7/d/8rczOrUxPcO1gJg8JQlvJiDa+9/oO5m3XzfmPvBwvseKRIGAQEQYBogqFrhEohaBB22kaAZhhYqQyabuH7IaIpRAPPcQg8D89t4bZboBSmlULpCXwHQjfE0EHTQ0zTIpMtkE7lSFgJLFKIkyb0EjcWloizNSKSvfIgYiIWYhKIt0FE4IpIu+bK4QpNUygJ4yQ98blapH6rzj+lISKIRGUQdi6yvowSuHitBjXfxa5XWZmfRwTankPLtmm1bNq2QxgK2VwPExPbeO6Z5/jgB5/jpW9+jZdffoVGo72xB3ib2H9gH2Go2LPvIY4e+gZBEF03mytw8JFHsBIGntsmkUjQk8lT6MvQtm08P0BEw7JMUikLXVf4vovj+ChA03Vc18O2WyA6pmWCQKPRoFatkkynsUyLTCaDYdxYt9R1SAkU0jq9GYvFqUm+9sXPM3P5AhA9/2UFKw0PGnMoFgENxMCePUf13BEub9nJ1t0HGd9xAEn0oJsm3vw0zTNvEVSmAJj+1h+wa/9B0hO7CUUICAiVIAhayJVR2N3EurXxS//gf77yXSl1JU5dI+qF7hbaTptDb79+10i7owetR70Jy2LPrh30FLKEKmBoqMjIlgGmpyc5/M4pFuaqOP7aGs+N9aBM/Hd1Ja515kaOWeu4tbZ1GKUBbGAhFYEPfM8WPvoD23HaLYIwwLIskskUIkIykUYjge/7BMrDtJJks/1YyRyeH+A4bXzfod1u4HpV6o157HaNwA0IlYWeyGMZGQxAlzb15gqhEnr7tjA4sJViboC8WcDyC4hfuLG8RC+NrtFFoAoRdYW0ISLuDjqELdJds5FJReho1QriNOZKXf2I1jG3KAijY0UDuUFGBxUEBL6Hq0J8z8NptQhUQNtt02w1aTZtXNdHExMVQq1aZWhohKc+8GH27NtPX1+R3/+9z9F27o65pBu6rqFUENVdV3MyzQTZbA4rYaBUgGkJuhFg6TqZTA9KQbvtIiLk8xlSKYtQhbRaLZTSMPQEYRBQLlcQTce0LMIgpNGoU62k6CkW0TRB0zTCcAPaq25hIJiajhcoTp6b5OLZM6CuPffqMw6ij/KgYeM0FlmYOUfl3Akqe54m37eNTP8wul8nnzJZjs/w67P4S5fJbp1AaQa2pnCIFBhdM9E1gxv22reJ9buwrsYtCHrnZ/cbcBcgG1FXbqFMFQ+MR3sLpLNJphaWrtvwM8kEY4P9DPT3YhrQ39fD1q0jTPT14pTrlBZq3GIeKWAP0f0ZRGN5uUFZ3RW/+tOBxlVS7nw6emenDD3+vAFUNiRpq9liaWmeWn2JRCJFb+8whu8TBCFKCYmEiZgGehhiahoJTUjo0VU9O8SzPXzXIwgdzISBmL20Wgqv7aOLDhjUqzbtVpOllWV8abPSqNJwVhgZ3IJfGCStFzGMwobk7ZC2EoXSVKRMXVNPEYFfW3UqGhV02ru6ahaJyCqqz45mHWnfIahOabFWHhIT9/rtIuiMWuLrBUrhBz6e6+K6Lo7j4Lo+IgFQ4/LlaUDHSibZuXMHn/qBv8TM1CSvvPYOrnd3h+NBAI7rkSv0YJgJAj/StPM9/aRTSSzLJJ1Kkkon0DRFqEKSiSSmmcC2XYIgIJWySCRNgiBE04RqtYng4PkBum4QKnAcl1arRaPRpMdxCJUQBCG2bRNswJavmSlMzwDfpx4EhKkCfXufY/mciVuegvBGI5MQ/CbO4hnmPINw2EMchd4TYhSHyFtpym6LZMJA98vYixexUj2YmQyhZRKiYSgT3djYiPB2sHGbtlzn+30GyzApZgv4vk+91cANPXR0UrpFM4hiN2p2mzZhZO65Dqr1Bi+88joJQydh6PT3ZHh41wS5bIbL5+dou7fzsnSI2oo/HZLtCHS9vx2ERP3+eqTdMe52thurPhuAgmqlztKyR7mySCaTI5VKoWsgaLTbirYXoBsaBA5aMktgmXiaEw2BlSJ0PbxWgI+BbhXRzDRaNolphKwsVnjrnSnOvDmN7wVI0qdnUNiyo4ly59GViwpaZFMNklZ9A9lQIn7V9BAJFZquEO1aU56COCdyRNKiCUoXfEWs0Sk0XUcQwhDCK40kKkWpjqauxROcAdKxkmuCUsINzZoiiGhXSFspRRiGBPEnVAoVKgLl01YOSlXQdZ1cPkWxmGH7jt389b/+tzBE8dqb79Kw757GHaIRisbYxDayuSKOXSWX6+d7v/eTjG0dIWGZJBMp8rk0okG9WcNx2uiGQS6fxvf9ePQhaKJhmgna7Qr1WhW75ZHO5BBdo1wu02y1CAU8P4zMboGCAHTdvKGctZUqrflF+nIaejHL6MQOvuev/gRzUx/lxBvfYvn8EXynhtdYQbmNSMO+zh17rTrSdsiHilrTp+6nMfJb6XVW2PHIQxjpgLnZExSLI+QTEySSBQIx0JWOrt19h7wbvL3xOPCaLZGt8NoB5lVdRq7Rwt9/dhfR0HRBwuiFJISQACd0r8hYs9tgr9/zBkphux62Gz3chVqTM9OLZAydhhfcso4dIUNUNwmiFMEdEoeIkDtvfTdpd5PHeqTd/btTrh5fp3OtG0/sdGC3HdpuSNP2aLZXCHDpK/TS2zNE0srRqNfwfQ9DEyTrI2FI2kvi+zqT52qcO13Gcw2sTArHF3wc9IRJq+5x4o0pjn39GM6Se6VfSfRblB+3SJs6xZ4WGvM4mYB8foPr4gogsceIpiOado2mrYkQhoogCPA8H8/3aNptytUm1VqVMAzpH+ijt7dIIpFA1w103QA0wvBqB6BiO7YiROsaekaj8fVZ29A1onc7jDuGMJ4z8FFh1AkgkWdClGZeoWkhXmhTqS3T05PjmWe/I1JQCr/FW+8cYna+cl1z3e0gGjloJBIWuhEN/R86+AE++p3fgWWBSEC73UaUwjANSqUypeUS+Z4842NjWJZFq9kmDEI8L2BlpUytWmNluUaj3sZMJMj2ZAiUjx96mJaB47RxWjaiFJl0klQyeUM5v/XFL9E89jrFfIrenTspbtvJ4MgoYxMTFHv7aLU+hu85zFy+xOyZU1QvHcerzqH8Bqufl+s2adk2QctG2SF204DMKGOjY2yb2M5SfZEgVEjBRPwCVpgFMwmhQn8fHK7WJW3baZMw9FgjkCuTNkoponkeQVQIEusaqota7vJk5fXgeA5zy4vXbFOAdwcWw/YUVLw7sah2nqtk2rGBdffQq00fa/0Our53a9cdaKu2dzTsmzM9qTBE13VMS1iuhJTrdZayDSZGWgz2bsVuRS9kYCaoqRaBZ7CyEHDy0AJf+uxJJo/X0fQE6UIfjgOeB0oTAruFX6+iOkNfBXjgzLmca9TIZtIMj/SQy1l4nk/bcTcmcGwWUYQgYazVRspHEAR4rofdbLCyvMzC4iJzi0tMLSwyuTzPgr2Ar3x2FHexZ3wP41vGGJ/YyvDwIMlEAkPiLlUpNCT2GonMbpGhO76VG7y4Snn4fgACQRgQeFEH4vouQeBBGCDKRyOMNVQjIm3fZ26+hN3y2L//AE998GMMbtnKI698g699+fO8ffgUjdadTZJpio/t1KmuLOK7Dppu4QbwJ1/6ErrhMTExTl/vIMXcAKlUlvmlJRZKixSKDUwjRT6XwW62MC2LcnmF48dO0Kg3sVsutWqDQrGXCWs7KnCo18r4noedzTI02IcKNZJJwTRvrL1Of+k3wL9AGcWFYwm01ADpoR30jm1jaHwHoxPb6B0YYGRsC+1HHqW2tMLcpUnOH3qV6uwxVNiGMASxUJ5O1W4yNTeH0hKowEL0JLrdoHXsBEv+Mlv3bsOykzjlBOlkkWSyh1C5GHIn+GF9rEvaLx06xNahYXoyWVIJC8uy0AzjylBHI9ICouFPgKiIvAXtPRMXm+hGZ5yvdf1dyw7dQVTP19q2O9s6BNExhaiuY7WubVd84dYo//pQfoCFIp+yaDbbzC1Ds6FImBVSyQyGnkJEww0CglZAabbNa185x2ufv0Sr3Bm22zSXurNerm/DD+ptjn5tht6+FCNjY5h5Ey+48RAZwNcCXCNEBSFK8yMdKvBpNxoslRaZn5lj8uI5zk2f4FJjivmgTlO18WgT4gKKmdJJXi/1knt3jINbnuJjzz3Lwb0T5NNJQjOiUj004vauoxRooUIPI5NKoNYfyXi+HU+wBbiuh+M4+H5AqFTUAQYBqEjj9n2PZtNBLQbUWw6aMU0+n8f1Qh597FHGdz7M8NYd7DpwkM9/7r/zja+9zGJpBS+4M7Zup1Vh8sIZzpw6QbvdJgwcDr3xFQ6/9TVEoKevjw8892Geeeoj7Ni5H123SGd7SaRy+IFOyw6o1GwajRLvvPUmh954FRVGNv1ms8XY+DZMS7DtNoffep1qtcLY2BiZjEU224MXNGEjROgvckVjDtuEzcs0LkzTuPgtLhsZrOI46YFhxvcfZPuOnWwdGWLv+Bj7dm1ncuoiuhEQuk0WLy9w9q23aTlNSq0mlvJxPIe+tM92I6RnYQ6bOsaczuTcZcyxBjuKu7CKQhAo9Nscg28E65L2L/yj/42hHTvYMTHKgW3j7Nu+naH+AXoyGYq5LJZlAQpT1xBNi2bRJTaWKFltK9nEFWRW/e62RXdPTnbILeg6rpuY1RplwNXhXrfJpHNuyM1MZWSzJmNjI9QbVZYbMzh+gGPD7KJHMrVAf28/yUSe0DVpVpJ884uTvPW5SfzWjUgjTbR+wtoatFv2OPatGQ482894qojrOBuSd76ywNnLU2QTSRKWSavZZmFmlovnTnHq4hEuNSZZ9mZpsoSDT4hFZDbqQENRxaGC41/mpclznC+9y4d3PsHDe/cwsnMrPb19mF4CtxVgez6gMDWdhG5iGhpKX5+0HacNIpFWads4jkMYKkTTQQlhGEaKexjgeh5BoGi22qilMrqRpG+gnxOmTqNRZdvEBMOjI+zd9xhbtuzgO77j+3j5G1/jtddfY2Z+AbvtokJF2jDJ6Al8L6AetHE3SC4vvfg1Xn/jTd5++w1ajWUAlPLpDFxXFuZ499Bhtm7dxZ79D9HT349ebREqDaUl0MwkYrjMzk/x1lvv8u7bb0ZarQJE0WpV8T2HlfIKx468QRCEhKHD8OgQ6UyeRqONbTc3IGmOaBW9bvu+AhWgvCrO4lGcxWNUzrzC2b4JCkNjbN22k61bd7Jj2w76BvL0aQEXk8cpHTtDRU9ipJLoniIXeBwQg+/qnyC5dxxr4RJvXDjL6UqV3dk97LESiCgMy0C/wbO/E1j37V18+3UWj73LiUyGrw8N0j8xTqGvly0DAzz90MMMDg5CADtHRxgb6ieTTqJLZENWOmh02xM3yfsq0qxtpuiYMToPvkPaHa16tTa9FrqP79bkOwTuczM2bcf3qNo1bNcBDUwLanW4NA1t12Z86xwjQxpOs8ipt8sceWFhA4Td6YiGgTqRC+J7h/WV+QZTk5OYRRs/2JjMb5x6gdmpgMFUH/3JHmy7xnztMgvtSaqqRAubAC++vhCtF9HkaofZMVsJ0CKgzFTrEp89+hYvndnBrsGH2bfjYXJmH04Nmi0PXyk0SyeVSdFbzNLTk+L72H1dGdvtNmEYeUZ0vmuahqYZKCWx+VHwPI9mq0ngB4gYiG6RyYDy2pRLc7QaZeamJxkcHGJ4dCtDW7byyNMf4uBTT/P9k2c5/NYrHDn0DqXZRSzXpFV2WF4q4wceHv6GaPu/f/a3mb50Cceucr02tzAzyZtvvUa+mOPxp5+nf3CIet2hXGnQtj1AJ5fvY2RkgpNmimp1/kpZFy/Umb58jiDwCIKoAy8vzXPh3EVCpbG4sMDs9HpLNXaQJlqjoL5q+yrPIa9Ja/4ErfmTzB17kUOpPjLFCbZu387TW/rJlxo8MbKNqVyOFjqFhsPjI1t4zNTILi6QHhhBS/YyWw9okyXRu5VEOouha9F8mrGxEeHtYH2VywpBagStCvWZGvVKCYwER40k33rxDSwrheg6W3bu4ImDu/ngI/s5MD5Bf0+BpKZHs/SKq3bF9+D+JXIBTC2aDfbDEEM3GRzoJ5tL0Wg1CcMQQ9cJg4DS0gqtjdpcgUgruJ6JojNZ2JGiW9PuNp2sR4wd0u5Gh7RvTtOu1dqcPT1NqBReEJIwoqIbbVhpQq7poy3UmT5lcOjLZVrzG82uGhIRdZZoUZIGq184rxVSXajj1IskUun3FrGWvN5FTlRLnKrmSFBAQyeghUMN8FBX6tCM/zpE2n6SaGK4I5dJVE8O0MDG5bKzxOzld3lrZpCUjJNR28iqfpTotMTD0wJSlknaMvi5dUjb8zz8eCIUwDCMOOovMq+o2P5u2zZtu42IkLAMkpZJ2jLRggC32cBtNWlXq1RKi8xOX6Z3cJAtE1sZ2jJMKptn1+6DJCXDKTnK+WNnaVQbJA2DfknjODU2Ep4zPXMSx15/lBP6DiePvIFhKYxEigMHn8FpC/MzCxiaTk8hD4Q88cQjaG6Dw++8xvziJI7bJAx9PM+OJ1wjrCxN89ZrJcLQIwxcNraSYYKbC/BWqMDBbcziNuaozx1haN9TPJPp4yP7d1EdHGQRl6GGwxMtRdGu0/JN6mcu4kiDZDLH8PAYg8OjWKaBqQsohX43A1hirH+XhhfVRQCELjRroOn4yqA8vwS2D5bJ/OkznDh0mFcf2s2Hn3qEDz60nx0jo/QWiuSyGXRNj3xfISLwTvndRN6xpKjuDbcGIepzA6JXrmMYWM8yltCEvmSSkUIvW7ZsIZvPo5smrq9YrjRoux4j40OMbxvEShr4no/jupi6wfzkFJfOXmBqboHL1Q0M5fR0JFH3A+6EQotE31UstfK7jtXjfWuF3q02p3SbULrs24YPgbbh6tXRMUXH9T1wwWtAvQy1NmgWDNkwvwxHX1qicrb2Hm+j90ByiN6LCgJQK1x9Qu89L3CgPOeg+waJ95iUrocAsAlR2LjxfdvxxyBqGR1N2+RKkAV+fIzq2p+KP52lUEsEONTDSepcBmYwGUZXRQK0yP7qtaDpsN7KWEHoRRO8GuiagaZpKAVBEHe2cdBN4LnoQuQLnU6SSiZIWjqa8gi8EE3A8wXfbdGqL1FemaRUOk/mTAHPUzSX6yxcnOX4saPMLy2QDIX+dA5DCzasLqXTDm6D1TEq74HTbDA3eYbzJ94hoVnk88NYQD6TYbA3j2g+ifFe9u4a5oMfforLk9MszJWo1KosLs1z5uwRXMfFbbsoVcf3bkYJgtvhC1Dgt+n1fPoSQiEBe8b6cQ1Ff9ujeGmBlfl5At3Gq69Q6EvxHc8/g7lrH/ltE6RMHUOi52Hca+8RvCiAAB8IvejmNC0iWz+MfB0dHZwK9kqJYzPTTF86xzvvbmP7lgn27NzN0w/vY3x4iHQygaWb6Lp21bOkm6gjx97IpqdCbjfPt99VfMd6uhZpC7B1oJcPP36QQjpNLpGht78fXwmzC4ssrFQg9Gk0qpw+tUxpaRLdMHAchyAM2Do6Qt5KMlQsUCot3zBMBoBE7Jvd8ensELQm8chcIFBRPYsOWvwyh0AgEekGcUYjXaJZbyWgtJi7Y9IOr1RstA8gIdDWNhQQCdDTk2fr1kHsdoPF0gqBZ1NdAnsF/GVINgVvOWTldJ11Hd8B0NCsrViFLTi1CspeYT1BVAAriy4ECn3DWpRONGfQOd4hIuMg3hYSEXDH3OTGxxB/d+N9BaJRQCE+bjk+rtOqmsBFPKbxyHB1nsJlLVPPNfflu6BCJMoBQOiD7wcRaUvstx146JqPkRCSSYNkQjD1EA0v8uNSQfzuCIEXEIQOjhfitMssTJu0Gh7LC1VOnTlHpR0pEj5gtSu0VLghLRvArvPeBi1X9Y3Y5R1Ng4xlE7SmqC2kyegt+vrGGR7OMjzSSyqVwkjoaLqw76EDNOo29VqLWrXOYmmR02dPMD01zdtvHGHq0tso1dqghB00uDqavHnuSIrJkKswApucOBQ1h3QqQ0YpgoKFNT5A264hoc/E2ABPfOg5/C3jVCSDgx5H0Mq9d/nDFfDcLs2uMzTvaCfE26wo4qjcpHK0yusXL/JWLk/vjl28eeYgj+7aSX9PnuH+XnaPjTJU7MUwjIi8FYREIcBarI0H8UTMrUJx7fTW9ZaKTwjsHOjh2WcO0jc0yOnT51lZKNNue7TabVYaLZquhxeGeGFIiELOX8tNh46cJWHouJ6P40f+2zfMP9AJmhKuatCaBuJB0AKnBl4JjBRs3Y0+MozoGkHLQdVcqAcRf/gqtngEsS+aHpcZ29WisfbVSkHAFHA27j2SyefI9w6SVQWUqZGdm0b3FcyCa8PFsxrKbqPsjQxhFaFn49l21BlxA/ufAZgKxEc3b8aVrUPSHZdKHSHytVZ4CCEWCXwMAppctf9nidYnDuLfeWCIyHSSI3pwzfjjEK2t6xBVvhkfZ3Ej0gi86Bw/8K94i0QRpgpN1yJXWgIMPcS0TJIJwdCjVh0pMyEiOqBQIQSBTxC6qMDHa9uo0MB3hfJyiVr76sgvAFphwFo8fD20Vw8cBXQDLAMMPWpivg+ZNPRkWhjBFKoVEjTbBDmPwNMIXINEbpxUMhPZ/tNZcgXFYKjwXQ+nZfP44w8xOTXJxPhW3nh1iAvnz7KyMofr1lCq01muh85Iar0xdRLRBRW814SXVQbptou4DYq9SbK+Q1KyKFOjnTZIbh1B2r00horke1KMjQ3hFnNIS6j4gi+Cprqixu8ibqC+tK6+9JoeaX7BWi9PHOyhPGgG0GoT1GxKLZ8vz87ycrGXXCbH6NgQ3/X04zy5YyeDxSJ9xR5M00A3Yg08DKOQXs+/5dFOQhf8UNGdCG0tOukxDR4pWuyd6EGaS3zp8+9wcr5OoNa/9OrOpO35tL2rM9amrrF315b1hbTiTlCPVRbRou92BarvgH8MWAB3EBI/yMCeMUaGhvADk8VSm+WpKv5CExpO1IMEYWzCijtWic0fYRj3MOHV8a0hUN8gaQuILriBi24qrJRF0jKwfC9an9rRCBsm79UsO2TZMTtcqT0IqwTNntiysz7RZ0Zh50MpEJ96Y35jMl9zzWicJegYCH4UTUARizwhK9SpXeneO+fo8fc6UOKq1p6L/9aJtO4VItLumKM8Iu+FzkTx9RGGASIQBiG+78feIp3Yh+ivJqAZOpZpYBh61KejQAIUGqEKo/PCiPRD5ROGAWEQOd16rqJaq11Tw90+SreMmAIcD5QBlgmpJKQscBptlhcm0bwynlOmYa/QbExTr26n2r+PgcEdZIs96IkkSjdQmoaRgKRpkc/2Usgn2TJc4MkndnPmzBnefudd3j16nLmps1fC568Pj7XncrorQKElcwQt5z32Hg+PRuCSsfowAkhhEfrgpnRcK8SZX0Il0gQDvTTEo+KUCaeXaS162NkhZHALCSt1TV6bu4UbR0ReOdKCbB4aVXC7K7AzcdNprPXIDtu2oFIntMvUSzPUjTRzR4Xzb7/JyEAfu7dt46EDe8nn8/Rks/RmMojm02y7GEaGSn31LPAGb0jXUSogWEdVT+k6Hz94gCEazJ6fROux0D0hZejYfoi/ATU/mqjUSOgGThBdL2VZjI8Nc+DhA+ufnDRiotaJ464jTbvtgn8CiBdfVy2Y+gb1IcWOwefZu+cRwj0ZzkxUOH5qltp0BewA3DCK/AlVRNxKj/vR2PWSMCbwEDRn42+tAtfzqNcbNBoNZqcbXDqeoDGZBMclIjcf3kMNRSKj1NwahfqIYUBQRbGO/V9gcFxn154CjWaFynx140J3fY8iBkx8DBQhBsIgJm1KtK7pbFygOyjLAaa6yuxEsVrAGJEWfgEor7r+jUccuhFNcHcyBHaIW9MkiuAUiXy/tSgtqYo9gkTTQEIUfuwaGFzJY9KBiOB7PrVqE7t9LdGFSOwUd/tD+BBo+xD4gBdNlVQdKJcCzhllMvkGg8MzjI7m6evrp6+4g6GhffQPjZLu6cNIFzBTPSSTCSwClK/hu5H5Z9tEgeHhPfQNhjj+Agszp6LrrIsmV8fX1zGRKIegWWItJ4AyLqebdT6UepzqhTLJ1BZEd/H1AC0JjrJZbrZZSfZhZ5I056cov/M2lTOLyN4n6P/oJ0iM7KSTWOxuYuPTra6NeFmwcih3da/X7eEA4IKqQ6sBrXZkqzY0lOtSngkpi8YpK8mX8/3omQKJbIqenhxmUqjXm2iSY/Hc6Vu6oaZ7/acrQELX+N7nnuKJfTs4/NILHFsJ2JtL8MM/+HFKTbg0s8LkzCwXZ6ep2U3CNQhcgPGePMPFXkzdoGk7YFjkenK4ymF28vL6QqaTUVYjLZ5gFAHNAH0LNA9C41JUhwRQfpfmq2VOa20yhsbE2E4e21EgnRvjnUKSykILVffBCcDvfIhMJ0Fn8jK+ru8Ts8GG69Npakyecjl1tMSFIzaVC0LY6IlrYS2vAgU0ENFQa+Z3aCFhHVQdtQ7BiQnpnoBGa5FQLFzn5iP9NIQCWVLaMGL2UfFqiKpQSKapu230oNw1DugEH3Vcx3QgizAC0g+SREig6wlSRhoVLNNymwRU2Zh3QxdUlFhJ0wTTNNE07UoCKcPQEdEIjIAwiMLYfU+hdA3TjIKjVJwG19Q72TbVlRSygQpxnICW3XpPdjwPdQNr+83DAzQfEiFUbKh1RqpLHuenVtg6usLExBS57AmyqVdIp3tBz+OoPFpqgHxPkbQlWFoWuxWSSOgMj6TJ5XWSlsbAQHGD2ms8YX/dDqkTowDo/RA2IqUoRohixq7SrIRIXqN87CzFh8ZxNZ2VyjL1hMair1j2FEZqiMlLJ1g5eYHU7CxJWiS39jLUP8z74KZ9c4sgqHqJjalpPtHQMUbItUZmFRA6TdqlJpRmaWqwohGNCYN4Ui28veaV06Df0vADQZk6Td9DE2FssJcnP/AoyrNZaNlUAFtPsOOhR/nE4x+iTYJaucHxd97m9Ze/wfnzZ5hfWaHUaBCoaIWSoUwC5bU5cXmKUMWmV93AWjYRDdo3ykqW0iPS7mjaItH3niIkvxPOOlB/CVQtqsv6RZZf/F2+cfIoPRN72PX0c/Rtf5Rt4z3MJNOsLLv4LR8cB1w3mkAOwthcEmvdYQiBHmnf+sYf+/HXKhx+eYnKVJvItNjx9V7v+bRR6nrDWZ/Qm2btptfx+2mTLIQMjgmGFRCiyCcKG5K3W8fSUGw1sjx38CNs2fUIl+dXaK7MUzRaLC0OklyZYS40aUo/gfRj6EWs1ACGmSNNhqSZRLdMMEyUYaJZOpmUSdYCt1ll9lIfs0tJmupdoE0qvqsbtdxaoxZFFOsaiUSCXE+UiCvwfJrNZmzuEHzfw3Uj90BBYVg6CcvAskyy2QxWIomuRe60nufj+z6+F+B5HoHv3ca03M3BA9ohV2YHOnB9mJyGhBni97doWy0sfQE7yDC7YlKq6AS+YOqKXLaIXbfRNZfdu4oMjmTpGxqmZdc3qLyGRKOgTp6STqCNRjRKiud7VA0CFyO1ncBbQPnLV6S+EJb52tE/5fsefxpbU6hpDyPox3c8Lk3P8frJU9RSfTz5Q9sZ2/4YOywTOf0GK6WLqOmTBAeew7j7ivatrFxzp5uAdzVH0m0b3K6iHULLCREFLT+goUCh0MsNmo6HoJHIJhnv0RndOkq2b5hs3wDFTB/jO5Ps3beXD3/4Gc6fOsGhI8d4+c1DnD5/Gtdu0JPLUS5XqXeHCoduNGm7EeRiTVuP7c+6BqYeE/cYFH4YZnfDxS9D+2xUR3YJf3KF5ZnDlE+/QWrnU6R2PIbVu5dczyCtfAbP9QhtJ3pbAnVVAQyJSNxX4OnRDNIGsXi6GZldOhOZCNFM/e20g8jNT6QH3bDwvTLRC5YmnT+IHy7Qt7XE6HiC0XETkRzpzNCGSr6WtGEorfGBR0bYtmeA6lKC0B0kZShqKztYXKlRN3qpmSMsVIRa2abq+NiBhuYJGj6+2LQ9hdPWaCmfOgrDMEmmU6T6dzGqhMpygKMOkSDAYu3xRzfaroOVSJDQDZLpJDv37KJQKHLq6DEuX7pM23Fpez71pkfLDQnCSIvWBAxDIg20N83Y6CC9PT1YmkkoPu3AxfVcPN8nDH0sQyMZhGzUc/5WERIR9lrjDT+EhQVIJSBIgqF76EmTfGGIRrvF7OXLBJ7HkrEIQUgmCZcvz7NS0eldKlCxtSgfy4bQ7bnT8fLJgVaEhIGe0AkqDaCGVnwIEnvxp45DMAMo2nh83Vlh69QMBx/dQbPWIOXo0JOmWqlxaeYCbWuBp2uzjO14Clftpl6ZgZVJnOUS5bkZ7EblNmvzxri/lhvreKjdAXjAQuxc0W3haLg+NdvFshIYuV4GdIPi8Bh12+PixUkyvQ6pTI6EDj2D/Tza8zTbH36IZ777ezhy5Ahf+eM/YPLCeVZuYbjegdGfQdMlmnzVdHRdRzeijHQAYW+GcKSAMzqCd+EV1MIpaC9A2AS/SbhwkmZpkubRbyJ9j8GO59An9pPq6cXMplAYeK5H4AdRcn4EFQqhr1AtRXgz6sA1S1utNoNdhcUIWbZQ5TTBe6LSVsMHmigFvpdE9EFU0AStDyvdD0qotcrML7SZ2Jsh35Ol2DuwIXE7Bg6XyNdDo47mztKYbuAsLaBrgp9MkhHFtqEkbjqkKg5jWUXLXOTYyZOcLNVZ8izQDay0Qcv1KTVLeGEdRQLEwTC2k8vuQXd1lOojRRaL6jWhUdeDYVoMDg2Tz+dIppJsmZjAbjR55/BJTl9ciZx+WPtVcHxFsx1QrteZnW8yMZRjbHQAI2ERBoLnhvh+gBiKZEpHEeI44c0acG4a65VfaYJtg25Gnqop3aKQzWOIT0JCmm1QfoimwNTAd0IqdsjCfIlyA/wNZTDoIXryy1wd1kemOsIm2BC0O6NEwa03IzdY4yHQHgc9A6pCKVzia80kWrPAlnyapi8krDxbHjvIjpUzXJgs4cyewh8fp9L0mG1rzC67OCtzeNsv0ap3JqfvHu4v0r4LWF19fhCyUq2TK5i4WhIXh5oTMj23wNFzUwRiksxk2bd7OwnxadUrDI2O8chjD9NTLHD+1EkOHTvGDedF1sGeiRymLui6jqbrGLoef49JO1QoyePvHcB++iDNpXnqs1M0Fi7hVhcInSZRKwzBraFmT+C7ZcK+fsyhEUaGx+gZKgI6vqfwQoUXani+wm+EzCbM62T8uHXkKPLxLZ/iUPVlzjS+zI0bbmd/GzEzJPLjBIFOvVYjxEPVQ0690WZkzGFo6xLVlguP31iOyFFPaBN5W5Say7zy1jfY2ZtGGtOo0EU0i4SuoacznG4kudDO8oG9B3l4MKB66gVOtyqsqCJtMmSdFLaq4qhJrky4qxDPO0W9UsdUWUwcMhRI00bDI3mDey8U+3j44UfI9+Tj5FAaJ46f5uzkMv4G2TVUUGuHnJyqslxuMTxYIJFK4XsagRfl4zSMeHGGjRV51xAosN3I8BVq4AQ+2GV0v0Q+EWAAbSfyb7Db0Yii1oSWdzP01yJ6PquVqS4lQ3V9qZ+A+jnAjOYsjEdg5GHCcp13a2eZeWeRkVyWlAroLbn0788iAzvob4GHRqPewAtByw4iI7sJ2h61pk1lZek2ampj+LYn7dXQRPBcl3qjyeJKDa/VYtB2uTx1mSNHj1Nv2hR6iyxP7aS+PM/C3Aw7d+/h6Q99J0qsaL3D8PbSLz6yu4ilGVhGTNiGjm5oEXFLPMDXQDQd0ScgPEjbtqlUVqhUlmm3W/i4+J6L3wrxnIC259D0XDx3lkQjZOtoht6+IZQv2F4YOZn44NSElVTyjpN2lUnaZpmx3D7ONl5G0djgmYrQmSPQUqCnCdr1aJIIn4UTIYdHKuxR4OszGyotl9rKtp6HcD0P5ddoNs7z1fMn8CsW40knWpghBF+D2hz86QqcDXR0qfP0+DZGC8v0GXWKXpsmFmZoENCijRO/8x1WrRGq84TkAQdLH6S/ZzupZBpNrf9aZTMFtoxtI5vNsby8xPmz53nt9SO4G2XsLvgKZmsey80l8imTvkKaZMLExCQIgigG7qZLvfNwg2gVHAnBcWv4gUN1scZ8KfZCiZ2bBCjmo3n1m5N7LcLuRicuoDPX0gmkAlQZ7HkotSD/LEFjgIXGaRYa5wEPvZTEOp/GVMsklbBw6DjmqYu4no8fOFimwfDWHQTxfNjdxn1F2kaqiJFMEvoubqN849jZm4AA/fk0B3Zto5jP0vICFIpQNNrtNm+/8Rqnz15AC3yahRxLF04wv1Kh1vZ458gpjh4+wsMPP8b85UtrepTcDHZtKZDUTRK6YBo6hilYpoFlGLGblwJNEYaxDzcamhRRsgXX9Wk7fuSNHPg4TpQjutVusFwps7hYxm6B6SnSomFYGkagEbh+ZGAMbiYx68bh0+SV2c8zpD+BSS/uhkkbUA281imQAqgskX+0jV9VXD7p07/LQM9urC309T/MQ0/8OOgZlL3CqSOf5fDM55mttRkSSHYyBACNJlQC8Ag4NX+e+UUdKxFQTMGw16ZJm4DI09vApEEGgx5cdDw0ApYImcRFMPXtbNv2XYztfJZQetevK1/huSGpVBaNCgtzJVZWNurSuDacQFFquDiez8hAhmQiga5bhOHdtmhvDLYTuQcaLlE4uGczvwzVVeIZloGRMaFpX+kfb28yVQd9EPRt4E4D09cprQ6NVyA7AmO74PJSbOuuEgQ+diXAJqRGkhWnje+sXClHNIP5UpV87zKVpY3GE9w67h/SFpM9j3+Qhx7aQyJp8oXf+Q1WFmfvWPFjxTQfeGQvuWIvF86fI5FM0VfIYycs5ufnOHL2PF6oSAOUyjS4GuzcbjnYJ86i2zaLlff6Ft9so+rNJOlLJ0haGpoRRVFZuoZpmCjRCAnwfA/HA9NKXFnySiwdw4rC3zXRozX0JMTTQhKJFMlUgXxmjOUVh3LTY3K6jAqg1lQs110cJ0CcBu3mzYYIbwwl9zRVqRKKewtvmQtqkajWW4ACSaKHg4TNkFxh8QbnRzACRY4kiXQ/kshTH32U2cphqq3LLFcUBTNyqvH0KEVIJyZzqdnk+IkzDEibRS96ta8m+lToWFgUgRwaOpF31DIKDwWU/TkaZpHU8MMobX3SbjbqzM/NMDw8RE+hyPYdO3lipcypU6cp11o027e+MlLNCfHm6+TTbVQo2Hc/J/+G0HYiTduM473a7SgsYTUyhX60hEmousj1pllbQDJIdozU9sfIbHuU2uwiztv/4b3RcdegDtVXYPcuqOyEyuVo2zVRlmmS2SGaXgMVRsZ2FfpUSheolC5xd1Sia3ELpL3axWN17ua1NKL1fCfjMGNNsLSA4Z48j3/gOQ5/6+VbJm09Xnw1vPIb+op53jxyhoWajRuEZBImW/qKFHsKLCyV8GJijNINXc0y0YEdhJy+cJn6Wj77Nymf5wpWwSSTtQgFAj/E9UMcH9pBQLXpUqk2cfyQdDZA14RQBWiGFiXs8oUw8PDcALcteF5IqCtsV1GrBlTqPotVm4rtUq+71OshdtMlDEBUi9C+W9qXwlVrBdTcDGrxX5NkZi99PRP4tctoAxvraPzqLM6ZN5FML2LqZPEZHdpN+eIsNd8nb0Qej5oWBYcOIJQQWmHI2akmdhLetSNd/1pEuUauh1DZuK4NTp0wsIkCjNZGpbLIqRNHUWFIsTjAQP8Qjz/+JLlckoWFGY6cmKR+w/S214ftg1270x7Zt4cwjEwknRVRfX/tVDUaPrWVFmHXBPjGB7YG6D1gjcPQ0+Sfeo6nnzpAwXT55u/+exZumM8kBG8GVk5DagQqGa6yQed5LNJY8a5jBQhZf0r2zuAWSLtT7cR/OwQQZ64jTix1jZdoJ+tc54Y6vVZnrt1DBR4n3noV13YZ3nsQsaybF60joW4QhCFubHsOFBy9OH+NI0TD8bg4V0LsFl6cVrWTuWD1zH2nK5q7YUKkjeHw+RUmF+qYpo5C8IOoQfuhwg0DqnWHhcUmthugGTqmqWMaGikrQTaVJJUyCHxFo9bCceM8LRp4QRAFVjghVduj2nBpV21CJ4TAQzd0EomQti73hZ1zbUTh75rRx9DwXjRPY/LUCmbPxqZ+xW/A8iT+8hSBFuAnQ0I3ZCpOE6x7MKDH+WE0KBgmRUky59U424TQ4WYMO1eglMvi9LscDaFSqwL/6rrHLswvsbRU5ciRU3ge9PQUKRbyuF6D5UoF7y6s9Xiv4bqwWIJmBiwL3Ot4hJQXb3EiTxvBKOwit/sJvMIWGq5JrWrz2psnUTMnab7zVTZEqH4bpk6A6YHUQLVXneeDem+X/n7iFkj7ev43t9LUry3LbVe5cP4ks9XqbXln2P57tYxgjfcgUIqlcoNO/6uIBuer46o6Hp93Cl97cwrVDnGbNoHvo0JQoqOlLBK9aRIpg2rFwZ5vRGyuFJLUMdJJDMvCSpqYlolda+BW24QBUV4YnSiJvguhp+LoRyBrkR3MMTaUY7QHDp3KUl65gzd0x2BiWGME3jKhX2Fh+m2gjW/OsuWhng2VoKkQw3XQgsjHutVsctmZok7ACaDkwYQXpYaqAJ4WoOvRqOw0sOjf2M96bThMl77I3NILBMphPdLeMrGHVDpNvVbn5KlzzC1XyWdThKFHpe7cVrK0+xVhGC2eUbu17BQ3hDHxSQYH82R6cjRdF3ulQdDwaLTrsPQqeO/tDMSIk4j5HUUTwIs8SzhLxGl3X3O+Wdw/Nm1AS6Tp33+QdLEH/8bJBm4bAZFXp+r63f23G3fyPSq9NhVl5gv8q5n4NA1Mk2bChJwVZeKp+VCPkkKppI5rNHADaGkClh7l/6gLeLH/qfhXBzZiQNqCnIUkNAbzCQ5syzNchDPZxHsyZtwPED3D0JZtlJcsWvWztNvRiiWGBfn+jTVVLXTRnToEPkHQpBHM0YxzoCii7CLLXLVSamGAFkZMEsb7bhWKNv51I0Gvondgguc/9DzNVgOlfZFMJs2Bh/byzpuv8uZbx29DgrsAA25Lg3qf4DeKzHsK5qPFolVoRhmtKhVwZlk9dh7etpPdBx9icWGJs8dOErbrXVHYG1ne7N5h3Tehd3iERr2O77ZRYSdQI7ZqG4KmR+khDVPH1HVEM9B1E8tMoGtCELQjLVHAc10CFURpV4kivETTyeQKZAtFegdG2fX0d7H3yaeZ2DLKH78P2bLgxmS81hzIbYcGLzaiyI++VOTO0HajxE8tD5b9KOGTZUZk3vF9MmNPkiCuFy16Hvh6lJJVBVxZAFUEDDOSUhNUMkG16TG9WKHZCGk2bk2XvNtQysVMpNn7yHPMzfRSmj9F4FdJDIf4iY11MzoBJjYq9EmGDsPoHCTDeRr4ccRitEDC1RRDd4KTNCCJkEYneYNx2aXJEs9+KIEf2Ixv380zzz7N+PgwZ88ef3/izm8GDwBhA5AdJPS0yNHbXgbPjpQib57VSb36R7fx5z71F8nmk3z1i19CeW0kkUXZ96Mq816sS9o/9Df/JksrVdqVBdzGMu1GlbbdIFQ+iUyOVDqL50dZ3iQIQQxEDFJmgnTSAl1IZ7IUi30EfshypUypUqbVbuD5Hol0kZEdB+kd28bgwDBjuw/QWyyi+zflVX9DpA2Tnkya+Wr1portRNe1uFb7vm3RvGY0G+bFuUdsF5peRNxOAH68zJXEWriSq7m3VWy8kdjyrjqpWLtm2jUB3wMv/vg+y16L6mUdwwxxS7Xrinb30VktZo1hZ2izMH8Gs0cjP56hne6hulyjHcDJcxtzg9C1BIZZIFQeSdIkwzw5fYid0kKUi6FCPNWiFTZoKJ8yiss4lPDiVSK1a+b/o2SfnUltdWUa3kQjiUGaBHktSZ+kKWoZkpLAvEFMpOsKiwsrlJZmGR4d5eCjjxD4rdtKSfxnHgNJKAm04tWKAi82iVyC7iUfRGffwcfJ5XO88drLnD9+FOW1Nhp2eV9gXdIefegDFFst/EYZv1XFs+u47SZh4GMYJolElD/WCwN8x8P3AzzXxdJ1crk8WiJFMpWlf2AYXTdZKJVIryzTtJt4vkcy28vwjr3kh0bI53JYmWxcnlo3terNwpLoVbvZEjtJZ+/4e9Qqg2NGWRBRV/OChBIN6a6gM4ELa0+PXu9vnIQKB9o2NOpQUviGhm8K3CWXv41hvVwFCrtynvOHL6P3CfS5qGxAYMPC5MZKdyRFPVEE3UdChRaGoPnkjBANHzMMQNm4YTNagMDX2RHYlFUNHSEhBoZIvCS1EKgQX4UoAjQJrhC6JRYJLUPSyJC0UliGiS46oKPfIMORiMbFi5cIghaPPrafwaF+pqfOI/ef+fTBwdxxWLLAaUHWhJ4krPgQXmvwspJZhkZHuXx5mtMnT0YLcgBXlpd/ALAuaTeNLG1DcI0Qz9DwEwnELGLoOuhCaCRIWBbJhIUmOiqMFiQ1TZNkMk0YE4ibSoEIWtGgJ91LNvQJFVipLP1DQ+R6C5hJC90wEN1AmSHqJtKH3ggVz6VSvfkYwM7E5B1HWIPQiG3R3S6UBpHdpENsa5H26lWE6Dq/47HTFUHih+B3jteilWDCOx0PeTO40Xg7JHTahA5YGTDT4Nsb7zgvuJcpx1FpUdZShUIhEunMmgoRAsJ47KSjRzm3VRSOJ51BjEg0kIErCxRosSKh4h0qFFSgRYOi+BlEdC/8f9aR8dKlw8zOHsc0deZmL/LyN19kZXmJ08dPbyrat4rLbxMFymskkrsxevM0a+o9bipGKsPc3DylxRlcP8BIpfBbsRIjcjP+hfcM65K2nkyQMrRolQ0UoaZBGKB0g9Aw8UyLUDfQTR3TtDANK1qJRtfxlBYFhQgEmqB8nzCVwbKShGGApusk0hkSmQzJdBoxJDLRqjBeueP9sWnfG7SJXCcN3uv3Hs0TXLvKZefTvSqLvmpfxzFxteYNV3NFq8hB9j6cEe+GFGDgg8LYdqHdCqlWopVSNgJbtbD9ezmSuDE8t3klIWSlvMy7h965twJ9W8AHloAEPXmTVNagGbZZ7VagROPQ22/RrixhJI1rncVFeyA07nVJO5lIEugGoR8SBiG6YcYTixroBnoigZhmFMiiGyjDQIlOiCCGjmhRDtsQBboPvo/yPEJf0HQTw0qiJ0zE1BFdiy2BISoMUHcwhP3+Q/dCsx3C1eNPty+7AZKK06OFoFwiwg+5Stoma0dhdbTrDnnHuYXVdezJ9xHMLbDrMZPxkTR2vU61GtB+cEyOf6Zh6PECSu97E1shcuLUaZZN2s0kBBdZnY/Et6OV3pXn4HlwzbtwmzmF3i+sr2nrADqGaWEl04imE/geKIVh6BimgWaZUSiNCLom8Vp2IJpCN0HT9CjBkmFFBSoQETTdQIkguhYtaEp8Xhjiex7qDgWy3J+ocjWUJ+SqJmxw7SNJRF4gGQvaQbzKapWr2oMQaewdAl+NDpmr+JjOsfe3S4BbhpkZl0LGI6EL6TRYiRuft4l7D/+e8d7VaNXm8vUDdLzW7eV4uR8g6gGw4WxiE5vYxCYivA+L42xiE5vYxCbuFDZJexOb2MQmHiBskvYmNrGJTTxA2CTtTWxiE5t4gHDLpC0ie0XksIjUReSn76RQdxoPkqxrQUQ+IyK/sc7+4yLy0fdPoivXfWDrVUR+TUR+5V7LsRoicklEPr7G9g+LyOk7UdYtyPSjIvKV2zj/J0Tk5duV4zau/8C207VwO1n+fhH4hlLqsTsky93EgyTrTUMp9dA9uvS3db3eT1BKfRPYe4+u/ZvAb96La98hfFu109sxj0wAa+aRFJH1M+a8/3iQZH2QsFmv9wFE5J6lWL6X174JfFu101sibRH5OvCdwL8VkYaI/JaI/HsR+YKINIHvFJH9IvKCiFTi4ftf6Dq/T0T+WERqIvKmiPzK3Ro+PUiyxtf7JRGZiYdyp0Xku+Jdloj8erz9uIg81XXOlWFwbEr5PRH5nfjYd0Tk0bsg54NWr4/HdVEXkd8Bkl37/raInBORFRH5IxEZ7dr3ifg5VEXk34nIiyLyk3dLzhhPi8gJESmLyH8RkaSIfFREprvkuhS3lSNAU0QMEfkxEZkUkWUR+Yc3e1ER+fsicj6uoxMi8oPx9mvMGyKiROSnROQs0WoBnW0/LSIXRGRJRP53kbUzZ4nIvxaRy/Gzf1tEPty17zMi8rvrtPVREfl9ESmJyEW5gbnjQWunG4JS6pY+wAvAT8bff40oVO95oo4gB5wDfpkoDO9jRCtk7o2P/+34kwYOAJeBl29Vlm8XWYmGv5eB0fj3NmAn8Bmi+PXvIwpp/KfAa13nXQI+Hn//DFHs7g8Rxbj/PFG4mPlnuF4tYBL4e3Gd/FBcR78Sy7UEPEGUrevfAC/F5/UTLVr5aSJT4s/E5/3kXWyrl4BjwFagF/hWLOdHgelVxx2Oj0vFddgAPhLfx78iCn39+E1c+4eB0fj5/QhRyvER4Ce6nw1RiO1XY/lSXdu+EW8bB850tY3V5/81oC+u058D5oFkV/tds63Hcr0N/OP4me4ALgDf8+3QTjf8nO7gC/vrXfs+HD8IrWvbf4sfiB43/L1d+37lblbEgyIrsItocZWP00WysSxf6/p9ALC7fl/iWtLuJnQNmAM+/Ge4Xj8CzBJHAMfbXomv+Z+Af9G1PRvLtg34ceDVrn0Sv7R3m7T/x67f3wecZ23S/ptdv/8x8NtdvzNEazxsmLTXkOUw8AOsTdofW3WsAr636/f/BPxp/P2a89e4Thl49EZtHXgGmFp17j8A/su3Qzvd6OdOuvxd7vo+ClxW12Z9mgS2AANEPezl65z7fuC+lFUpdQ74WaIGsygiv901VJ/vOrQFJOX69sQrMsb3NU10n3cb92W9xrLMqPit65Kls+9Ktm6lVINo1bEt8b7uulREdXm30V0Xk1z/2b2nvjs/lFJNbnL1NBH5cYm8LCoiUgEeJhpt3Ojaa227rtwi8vMicjI2OVWAnlXXuV5bnwBGO/LF5/4yMHTju7uunPdTO90Q7iRpd78Qs8DWVTatcWAGKBEN28a69m29g3JsBPetrEqp31JKfYiogSrgn99CMVdkjO9rjOg+7zbu13qdA7aIXJPvd7xLzonORhHJEA3dZ+Lzxrr2CdfKfLfQXRfjXP/Zddf3HNc+9zTRfWwIIjIB/Afg7wJ9SqkCkZnmejmS10padEO5Y/v1LwJ/CSjG16muc51uXAYuKqUKXZ+cUur7NnDu9WS/n9rphnC3gmteJ+ohf1FETIl8iD9JNHwLgM8CnxGRtIjsIxqG3ivcN7JK5E/6MRFJENn1bG4tj+qTIvLpWDv5WaIFxl+7c5JuCPdNvQKvEr18Px3L8mngA/G+/wb8DRF5LK73XwVeV0pdAj4PHBSRT8V1+VPA8F2Us4OfEpExEekF/iHwOxs45/eA7xeRD4mIBfyv3Nz7nSEisxKAiPwNIk37ZvALIlIUka1E9v+15M4RPYsSYIjIPwbyGyz/DaAeT8CmREQXkYdF5OmblLMb91M73RDuCmkrpVyiG/9zRJM8/w74caXUqfiQv0s0JJoH/ivRi3NPMibfZ7ImgH8WyzEPDBLZ7G4WnyOaSCoDPwZ8Wim1wWUE7gzup3qNZfk0kW11hahuPhvv+xrwPwO/T6St7gT+crxviWhy7l8QmRoOAG/dLTm78FvAV4gm2c4T2VHXhVLqOFGn8ltE91HmJkw5SqkTwL8k6uAWgINEk6A3g88RTRQeJurw/tMax3wZ+BLRROUkkXKyIZNDTKLfDzxGNLm+BPxHonZ0S7if2ulGcV+kZhWRfw4MK6X++r2W5Ua432UVkc8Au5RSf+1ey3IzuN/rFa6YmqaBH1VKfeNey3M/QUQUsDuel/m2xf3QTu9J7hER2Scij0iEDwB/C/iDeyHLjfAgyfog4UGpVxH5HhEpxKaTXyayvb7fpqZN3CPcj+30XkUz5YiGGaNEQ7F/STS0uh/xIMn6IOFBqdfniEwOFnAC+JRSyr63Im3ifcR9107vC/PIJjaxiU1sYmPYTM26iU1sYhMPEDZJexOb2MQmHiCsa9OOZ4TvGyilruuAvxFZTXLsTG1nun2KhnLvrHCrcLuyvhc5oghrnygNQj+Rt1SJKDah+36uFj+AzjZCplAs3KSsC9Vu29mNYx8UihCFAjQFyvdoNcqcOf0Of/Df/2/+4Pe+wnL12pXgi1mdZz+wj49+9/fznR//NCNj+zGsTKROCAhy5dpDPdcXYv/T48r3Qjw7QClBi9URTRd0SycMQ0QXrIRO4IV47YAwUGi6YCYNDFNAFJ7j06y66LqOmTBpNzz8IMTImGg6JCwN0xQIodDTzyf//F/lEx//NL/6T/4Jn/3dX7thncKD+149+/x3k0ymefVbf8rzz3+cn/m5X+Dhxx4lkUigiYYmArKRGJlbx/XawINUp7eLOz4RqRM5GwfcY2fGNRDQJGdV2OULJ7xIRo0oucC9hImOJoKnAsI1A80gklIjchkdJQrM0oly23TcustE+Y2uEniJgCXWDl/bCJRSyJov4tolKiKK9ZwWs5On+OYLf8SXvvhHHD12kpWa/57jy42AV145zvzsHJWVGb7vB/4Htu16Aj2RAjYWJgdw6tDl6OJrhSIJoIFkIJEEpwaqfe0+tFj44Lq3FlV35wYVWOlZ9u97hI98+HvWOenbB6l0GsuwcNoNXnrxCwSBz8/8wi/x6OOPkUwkUaLFz+vuEvefddw2aScEhlIJgjDEDwJ6LY2hjIHyQ6ZrLlP+vSfFDkJC2vYs6SAgD+zJaOwfTPLmQptjrfCWQg/vlFy60tBZL/yxTRRxqxOR9ApRTqMMUZqEofiYKaK4hfaVM2+LTjonr/keRjuVkogvVYhSEAYOl84f5vd/+9/zhT/5IhcnywSrbsyIi/SBWhvOnV/hc3/whwQYfPIHTbbvegzdSGzcgBfc4B4U5HIGhZ4M83Ydtx1eJezgBudf5xpeK2CptIzj3C8t/O4inUqRzxUAwfddvvXyVxAdfu6XfpmDjzxKwkpu8vX7gNsi7aym8dz2UZ59/CE8z2extIhnl8npHklCJko1CnMOp9xbj8e+07js+lfe1Q/uyPEXP/EYHzhyiX/z4iTH7q7F5LoIUAQbYg1FRHMVIpPIRSJTSQ4oEkUDF7maOuN9guoooCGEHjOTJ/j8H/5nfve3/4D50nu94zSirqejd6ct0E24NNXgjz73hwwMTtDXv4Vi/xZQkdnitthAIN+b5eG9T5DP95MwjjE3f5FmzUPd5nBQ0243h75GlFm1c3+r71O6tq3V/cp1jr3z7GlaJv0Dg+i6QRB4+L7Lqy//KV955DHGx7cxMJgEddctJH/mcVukPZSyeOLALp557ACaCBcnLzJ96TSqsUSagGQOVAO85cjy2sm+ci9Rif9aQEsJKqExPGRxICucWlH3XL6NQxGlO24S1axGdFdprrVvvw9iiEJUiN0qMzd5iq9/6Xf4g9/97JqEDVHn7XLV0pBIgWlB6MDkdJXTZ4/xfHWJQv9oTAK3zgKiQ2GwwMGHP8qB/R8ilckwMLSXuYVjHD3xJgvn5q/PhWqN713QdCGdSaHrt0PcReBDnRKJujNt1e+OXaZj/wm79kvXxyBKF97ZvtaNsOr3xn0RNE1nYHCYZDJDs1kBwHFsvvnin/Ln/8IP0Nvfj6E/CAvZ3Bga94eSuRZuq4bbQYgKA5TvoOlCNqGTz2ZotSsEzQbKc0ko2GZCv4J8ALMqytR+1yyA13nBVsMFvnqhRuMP36BZb3O4pjak696/CIlMIu0bHXjnoARUiOc2mJ+9xMmjr/Hai3/CSy+8xIWp2vqndv1tu5DKQz4FQaiRzKSR2yLCqzATJhPjjzIx8Sy9Azsp9BboH97O8OgBrOQ472pfo7w4hdNsEzixVBoUh/P4Tkiz2SQM1Jo2PhHBNMzb1LYNIhMXXEuqHULtkDZcJeyw6/hugja5Stqd/d3ad8dw313+xilARBgYHiJf7L9C2gCXLp3l4sVz7NqzFyOd6T5jw2XfbQjRLNBG8tUawHai2aFF7r/Zitsj7VAhuokfBEgQkMnm2TKxgxVDWL7UxA1sdBOG+yKNZ8yFpTpcal8l7hJ3mGZuoobPtkIunGhed/5qE1z7zndDgQoDHKfO2VOv8cXP/Sbf/NM/5fS5RZrt9Wuzw3EqjMpOpCGbh1DB0PAI+w48TrF3ILrsLbz3hgVhCKEPgWcShv1oZpZMvoeeYhHHtUkkk/T3DfDkI48zOfk2b775dU4dPoMKFZpmsH38STQxmVs4x8rKLHa1q5V2KaqmZaIb+q0JGtUG0QipA4dIpeiQsUY0tS9cS9odYu7Wmn3e25ITXCXyzjmd8zua+cYgIoyNjTE6Osbc9NUUI816jZMnTvDcBz9CJpVB0Zm87n4Z7y2BF4APAm8Cy0muDve6oQFZ0BswHkY19y0iQ+T9hJsm7e5B10A6ycBgP7qZwNAUmaRBvthL1tAIKsu0yw0sI0CP20US6CvCtnZUgJ6CQyvwin33SVOykfdAWCV6L+L29GBr13cfVy0TijCMRlaIoImG77W5dP5dfv0//x986fMv0qi6+GH0LA2N90w+AlgpSOciUg0CUAqSGTATYCVzPPPcx3nk0Q+TzfWjRCI3sg2gY1QwTcjloN6Clg+BH6LCFPt27WH71nGspEaIhWXpFPJ5LP0JTp/cwcXT86DOAQGhD7OTDfI9RfqKuyj2FpieO0XLbuE5oFrEnRb4fsDtRRV3yLNDcj7XasCdgXoY7+sQs0VEuJ1etUPKa9nEu80u3Vq2yc1q2lu3bmXvvgO88+aLV+7b9z2OHzvCytISg4NDiJLI8VNxhcCvPsZ7Q97jRNrzG4AxBMYecGcgnCQi717I7ANtGJwvQ6ISJVmfAY5wf2nbG35iGlDQhEFdkDAkUHCwz6I3ZSKigaYhvkcCH9PUCft7KQZNFmZLzJYCzCSkk4KEipQJ+aRGNqXRaPm8bUcTlXcEqUjT8mtco4QUt4MMQKMCzhyRp5zD/ePacp9ChQEowXUdavUlWs0qhm6RyxRp1Jf4wud+nS9+4SWWylfVFgH867Ry343t2Emo1sCzIWkKvYVhPvDsd/Px7/7LbN26G13rbpo3ftGzgK5DMg1JCxqNaHsqM8ijjz/FjvEJkoZFo1rGTISk0j0kTZOV8gqvvvYGh95+nWjxkgRIivm5E8zPeRR6R3jkicfJ5zOcOX+YpXr9SrtSoaLdbhMG6/kJ3ggd8uzcY8dptkPAPtGgvk3UWDv1nOk6t2NC6RB5Z1u3KaWDDnl32783BhEoFAsceOgREskcbTsygSmlOHPyBHOzs+zavRdN11BhSBCEBEGAbuhYpoWmabc/qXwL6DjK1oguvX0vDPw5mJmFqbdB1aFnLzz1XLT/1GlYPhyZU4aJlnG/n+a6NkTaAuwvZvnYvnF2FBP4lSWWKxX6xgYoJgTNdyAAcVs4Th3NbTLQk2IwOUIi9KhVlwkAXRPCUJFKGfTkUyinTdu5g9puCtJbYGhQZ/lsSK0UvUiJBFghJHTIbYfZAFydqP1XuDoa3bSRrEKICn1a9SbHj7/Ou0e/zsLcRRJGjomxfUhg89JXvkClctUFw7QAAe86XhlhAM1qNPkYuoAPA729fPKTf5vnP/IpRkZ3kkxmYs1s4y+3CxhBpAX7AbgeaFqSfQ9/iCeffgpfhRw9dpRSaY7tO8ZIJCwEeOmFF/jvv/MfaTWX2bn7eYJAY/Li66g4yqBeK+E7Fo8+/l1k0j18c/krtL2IOJVSeJ5PGN6OHtaZQISrniQdH0QV31kSSQ5iZQyQFqGv8Foh+BooPfpgEJF9gmhMq0WVe6Wcbo1c5yqx35zWYhgme/buY+vELs6eeufK9tLiLGfPnebJpz9ANpclCDwq5RWWl5fp6x+ir68/1rjff01bEZljzwG9BSg+CT07QJJR595oQO8OKG5JYopD8BGFcxrqduRce7+NxjdE2oPZFH/5U5/g40/uI9OapzV9llJpCZXqJ60FKLeFoSlCtwJ+lWzaIJ/qwW0YtJo2uwKF7Xo4XkijbhP4Ac2mjWsHlNw7o+yaGciPQt+oxdZtW9i7NUnpbJkgsEnlPA4fauHPwK4PQbEApRZIGrQUeFWiJ9PkgSFuER0RLVroU9PRdBOlQoLAi7TjO3AjKoRGvc4rL/8xn/+T/8K5M0dp1Fq0GhrpRBrLECanK4QhaAYYJqTSYDevX2Y6VhBtJzKf5HsMvuOj38nzH/4UW7fuxzCsa6IgNwqP2LfCg7YfafrF/jEefvhJRDSWy2Ucx6Wnpxff01iYW8LzW7z4wldZLl1gaGQvP/JXfoJXXvkmkxdfuVJu4Lc4/M63QCUYm9hO//Aw0+emop1XSOhOmEe6v+tEw8A2mL0Ux/sZH9HJW3UM3STQk5RbQttP0PAzlCZ9/MUVUAFXibsTKZQg0sBXuw6qrmM2DhFhfGKchx5+lLOnDl25d6dt88br3+LZ555n//6HaNSqTF48Sb3RoK9/MFLTr3mkq+vs7pG5IopwSJkw9Ik8al8bWxSJVJLhAYda0iOdSWB7vZBoUHxYSD4dkHi1yaKn7ivTCGyQtHfu3sGT3/md9PWnaV+okcqn2WIOEBoFlCEoLcTApx3UEb1FIV/EMk0aVTCSWXbtzuIEAYvlOsyXWJmvstz0EQ38O/SsBsPIDpVKmRS29bP/yYcofDRHQI2lhUssz7/J+Qstlk5AfgwYBNuDwAMS4DWI3vz30fliLWiiUegp4roeTbtBZ71RQUPQMawUo2M7efTA4+SLPVSbVaxEkkKhgOt5zM3Nc/HCFJMXTuM5i6Qz/QwMDGDbdZZKs4Thxgd6nudy8sRhfvt3/r+89upbNGshgR9NGK6exRGBTB9k05Gt2o3nLYArip5hQU8veAG4TbBM2Drey7Mf+l6GhrejG+b1Jz5vgM5dGYCjQCH0FIbJ54pUqzXS6TQDI0MoL+DShQuUMwbl6gJnTr0LKMa27mFoaIilxTmiBVKuotWc4a03vszS8gEsK4loUYcmgK6tZUe+GXQ07dWeHgFoGoMHiqS181w++QaBs4xlmFhmDtNIMja2nUcfep7pvTs59KKJM1XiWpfBRNc1IOrIO9foeMrfbOeuGBgY4JFHHuPzn0vgedELo1TIN7/xp2RSeT75F36AVFJYXJxi67bdZHNZdE3rqqWr9u4rtXCrs84bRLMoGM8UMJ7fjko5pPw0fcYAhVSDmlcnl97FWGqcTK6FbynCH3VpbzlC+KUjUL5HARzXwQ1JWwTG9+yiOLGLdtimZmQxE1mSGJjKJBAFYUDg1FF2lUQyAN+lYddptX3ESpHJpfDbNron6LkmUq5h6UI6ZdHbcNC92/ePztuwYMNipUWjdhpLE/Z/YA+potCb7OUT3/84Z94uUVqZpbrSQE9BwoxGmITg+6CyrD2rfBega0n27jlIMd9D6Ea5Lnw/QER49JGDZHJ5XvnWaxw69hZtv85Qcgt9Vg9WMsFHP/jdPPWRD9K/dYhAQkQTlFKUlpY4d34SQ8swOzWN52gMDO/iz3/fd+O5Nr//+7/JytLGg25su8Krr32RN988SmV5/Zdb+dBsg3QmIAWsdPQaeu14ctKMCF9MyPVBLg879o4ysX0/lpVCE41bdhmJ0SamJj1NYWACM52OjQ0aAcLc3CyHDh9iZGSAVqtBox7ZZcsr87zzzptcunictTRn113k3Jk6md4012RkET2y1d4yOtp1t2dHRKxGPoW4h5ibfAGntXL1mlhkyVJou2RGR3n24ATaR7Zw9O0EjcsOqtGpBZNIk/a41re7Y0JJcDM2bYhMQslUkj379tHXP8L83MUr++q1FT732d/iyKE32Lt3Jx/88AcZn9hFMpmKRyTxc1W8d/L2blpNDOj5gd0MfeIp+pO95I0UCT9LUDdolJdwHJuid5BhZwc9OdALBsuFJst7Hqdo/zozn3vzvpqJ3JCmbZkQhCHtUGhLEqWlCD2bwG6hxENTIV5zGVyblG7h1B0abZdQkkjCou6GnLo4z8xKjUazRj4N4/15LDL0LM6jE9w2ac8QzS0qT9G4VKP8x28xuXiK4T0peoopRM+wffcATyZ38u6Zd3n9xCwtAT0DrhORjp6OSEWtcJcfkkYhs4Uf+5GfYO+u7bRqDexWm+WlFVzPY3zHGEMjwwwWRnBbIYvLCzy171FGXI36mUkGSjZOvUYys5XBgQE8z+XixYtMX7rI3OVpAr/N4NAwrptly5YREpZBs+7iuzcX/ldaOs9b77xIeXkD08QK3BqsdJYjVuA7kdmkk+Al8MB1IVWAXA8MDFrs27+f/r7hVROPtwYBtISJUkmKw/vZuvsAyWwGEZ1mvcFspcI7b73G0cPfpNE6yODAKMnUELa9wOSlI1SrSzTq89ctPwxsWg11zQS3rscT8bfMOh1Nu9uFL7pAMl1l6cKLBM7KNWf06dt5YvRRtg7k6fc1hqrTDI0O8rBV5FhfnQtnA6rVAC/QCXFBb6OUGwWXGglUoENbgUoTkfdNShy7/u0/8Ng1pA3gujanTx/l8uULbNuxl2y2iIh+xaoe3eHVl+v9mJYUU2f/rifZ3/dRemo6UtGoLDapnV+mNd1A11uosEy9UsIvaAxuGaIv30u6mGF8+2McNw6jvPvHY+GGb4pScPnsKS4ce4ukoWPPXKLfqZJoO/iuIKKQwEW5LmnNxJAEYUyCRsLEURrVSpvJmQpnZ5cRLeSZh3vYvmML9YUGEqo78tBWh3KsLIW89fUa+ZM10iPQKEO2ZfGXPv4xhjLbMOolvKaHb4HngViQKsa21jsgz/oI0fExBXpyWXSl4bRDNC1JLpcnXyhSb9TI9+Z47vkP4gY++7dvJzdfY95W4LlUaivUmjUsU8NutpiemmJ+dgbXqbNtYoiDDx0gmU6hGyGO3eTdw29Tr6/cWLQuHD3xMmfPXmTDFhWfa8ykoc815/puZHbV9WgEl89n2bV7P7l8ATTttuOfM9kk43ufwOzZxeDYLsZ3bEepgFatQrNWZ3J6iqNHXqNaPsr0rMXQlnF273mWc2c0avWLLJfO3+AKQiaTo+45KF8hCJqm3ebkWrfLX8dtzwc87OopArey6miDncUJvvPJZ3l4fAhVniaYnMUyajysJXguFzCzCy43NKbqy8w1Z2hpVRphC9/SSfb20ArzLF0sQGMCGLgpaYXIjt8/MMCjjz3OSy/8MUHw3gbSajU5dOgwlXKFZDIbmZRErhK3UigVRnMyuh7PY9wdCtcEtAshLTEImh71SoXFhQWWp2aw5y9i6BUKKxVyfStk+nI4tYAtfcNoGR9ZziCSRN1HbmYbUm+OnbjAS9/4AqMDeYz6EiQNBnMF0v296EFI0KyCmKQTCQzTJPB9kiYERkijZtOq2oyNbCE9UMAyPR7ZOcRoT4qLS3XuZtSra8PSBaK1ng2ouC7/v9/8KnpgYChhd7+Jb4Ys1UMMQ6eQMTk/b6NJrO/cJW3b0tKM9Q/TbjVYqZSx2z6BptE/MgoCrcCltDyP4/psP7CTZDJN2tTRcmkG+56jrSn8fJJGrU5gtwn8kGQyxZ49u9FMIZMvkM8NEiA021WOHH6Hy9Pnr9jHN4qvv/QFFufXj2wErroYdzzMBIwEBG7XgCWM6rPVAm8F0gosyyCXz6MZBmi398rqGmzbtofHn/5z5AZ3YibTiAqolpexqzVq5TIzUxdpVCLH3OXSOS5P72LX9v2MbRnh5Ok3OHvqZRAIfBulOj3QtfeZSiZpahoBkX+253oEQfjeYzeMjjcHXPXoiEwaQbsURyBdRVH62ZUt0OfWKZRCuHgR+/J0NJJRPkXVImVqZDSfZmOB2cYizdDGUSFK1wjqKXQrjeaPEfJJooyRN498Ps/uvXuxrBS2XV/zmHNnTnL03UP09g1gJSwkFLT45fI9l2ajTkhIT6EXTYhsa3cYkgbroOLlN09w4pUBHtvVz9jWDJl8gbK1QM2tYddLlNtpdmZ2Mjqwld6BCXLpAsemppi8nEGZ28A/HgUX3AfYEGUu1T1eevMI3/nMKDsKKUIUnqGh59IknCbiOZi6i2kaUTADit5sFqVblC6XUPUWj+zfR9/OLYCHZTdwSvMsVh0ut98HE7IXfTQLqnZATwJ2bR9k/+NFJBlw+VKbhOol11fg8uSL+EFIIgGOF9m67zQeGnuMT//gJ9l3YB9WJoORFHIFg0QijRf4lOpztIKARCZNrqcAgaLRsiPvrv48gYBumRiJNOlcDsu0yBb78QMXMQ30ZBpPksys1Dk1N80ff/VrLC9ff9h/Pbz+6hu0GxtoqF3pSom/Zk1QFjTdSMPuwG2DJ2BlQdcVuh6ZA5QCQV2HtW9M5SnTYGJiH0N94xhWGsdxqNVqrCwtUS0vsbK0yPz0JKEXubZ47RInj3yNpJFk3+69TIzvwdDBces0W1Vs26Hdsmk1Z6NhY3yfTssm9KM6UUrRbttrapobR/cEpEVE4itIVqevdwR7aQXbrqFUQF5yPJnbw95UgdzcEuXjJylNH6IUTNIioIWiRsgMcDz+e41+GAAVlyjGrw58hFtdB8W0LEZGR8nlitcl7VJpjj/+wz9g+849bNu+HdMUlIpIu1Je4fy5k2iGzuNPPIPStDufaEqDwe+A8ed03vkPK8yXlsl94DGefnYfrVodPQhoVWfxHQdLS9DTl2FgywCFwR76+0dILPhIYwcDOz+EZy9QPrt4hwW8NWyItBVwbtqmkLnMtucnsFIWdruO06qhORWsdglDq9NwA5abgqVnSKWzJMIESd+iLw0jxRwQUlqpUpqc5eTpS3zpZIk36u9fSlTfhT4L9u9Ls+ORNJmtsLjUpFRvkGqn8R2XMIj8fP27uHRrqq/IzseeZPfD+/FaLRqVGp7no2kKA6EnkUcr6qRTKbK5PLbrsRg2qQUOICSSSTB15hohDdOnL92DlsziuA4N22ZhZpnTk3McPnaas2+/Su3ka+/R2DaC2Uln46d1KZpKgdOEdBKUy7UOCgqUDa0yOM2AwPNQYQhBiOoMcToQNmx6EAHHcVhemEO0FVptm1q9QblcYaW8yNz8FJWlSZS66o/otec4dewlcqk02azF6PAY1VoJy7JIJly8TMic18RzomwVupEiDLxrhmCGCPptsY1O5JutEdmXXZA2+S0mH33mSYLqNmamS9jzK2x3U3ywfycTySzJwMG3DMpagrkAFvA4S+SLXGcjPiG9wCC3RNpxkrBCocDY2HYWF6fWPMz3Pb7ypT8iny/woz/+4+zau49EMoVCMT8/w/Hjh+kfHObRUEV52++wdURMGNgFhYEAq9jAm5vEbDTJoRFY0DOQZmLbCGnfQSRHbzHP8NAAuZ4cYejSXl5CyiWGRsEbyT5YpA3RBN3CkofjhiT6Uvj1NoFdAXzKLZdS6NKyNGqmhnhtShcv0VyGqUsVlCjeXlhistFmcqXFTM1hth1QU7c+qLwZ6AJBfKGyC++crdMebbF/d4Gy4XF+oUZ9poouOsH7MAS6uLzIqXIT09ZxWhrVmofrumiajh9C00kTksfyElgNExehahaoZyNjsWVZiCEstyo0p9sEk5O06nWqi3MsXzrL8pkj1KbP4tdX4t7n1u7Jb9z6PXohlFvXubKK7Ixu22Z+7gy12gy9vQkMEigkInGII+jiE4D13uim43P27GFsW8jlRmnYTZZWlmnabSqVZSorF1FhldUtrlY7z6FDL7Bv30HyuST5bB5Ex3XLtOxaNNmBBmKQyvXSaixc4WwBdHW7czImUWrdToi5jSQN9hzs4dnnRigmelicWWH29SOot87hTU9SHx5Bf2gb9bbJSq3ImQWTN2izsfnzNHAA+DiwY0NnrAkF+XwPe/bu59Chb17X9NZoVvnt//afmZ2Z5O/+7M9z8LEnEE1jfu4ypdIMw1tGEJFOZvb4rDvD3MqDxSkYfUSnZ7TBwvGjHH+lh9GUg+rVqdQXwHdIaIJoGl6jSavawDTT1JpzXDj8TaYufZmU1iA1UbtmNHkvcVMW5VTSJCRN3VEkRQhcm7ZuMVkJaet5xvbsY6g/yeLMNO++cYavvbbCuUY0VxzICrZ6/8NBRYOxreC3YG4p6nxaIbSSAU7KxSokKQ7qVGYC/NBnIlekZlSpqRCViG3bFe7oMjxzlw/xf/2f/4zcjg/gSwrXcSLlRdMIg9h3PATNMNGsBGHgEbg1wtAFzUDTdVABbrOE31wmbFVQdhXl1MGtx6GG97Z1rfucJcpBUq3avPytP8HQcjz+2PczNLQPy8yzvLRMu92mWOyj0NvbdSvXf5lDBZPTUzSbbbZNPM5ytcrU9GmCICRUggrrrF0nPisrxzh2vM7+/Y9T6ClgmAl03Yjd2/KIGIQieIGH3+VFoBS0HQ/fv52pa52ISEOgBZYw/Ng4H3m+jy2jCewVl9KlKS6++w7+4iRtawD6tzCbsHnpyBscLR2ijL2B98oC9hGZRB4nCtI2uZ3AhGw2y+69+zAM64q/9lpot5u88MJX6Okt8rczaUZGRpmbnaTVLFMo5JHY112p25iLNnLgrzLThFB+F+rP6/RvFUrJOicWXmbrsUF2PrwTs23TqjWR0MNK+ODUcetV6OvDssDyl/H8k7QX2ngp7V6/UlewYdKO3PGFmfk2Xgtymkc28Mkn8yyueHgaDLhpTKvAkjPNK5caHGp0+V/foxs2UrD1aSHhCfZrIZkBGNmvs/uZPMXhQRaqbWiXr8hX85ooU5FIg94Tx9uERPkc75QSrnzKF1+nfPFdrsmyJsRD787ElkS9jgq46prRHbFyOzkv7iEU1JfglA2XL8yzOPefOXH0LXZse4q+3m3MTS+RsHI8/dzHyPf0INcEZlwffqBYWF6k7b6N63k47coGBQqoVSeZmulDzCSECl03yaRz0cR6kKflNqktX3rvNZVwW1HsV8LPbdAVmd1Znns2zUR/Gg2N6elLnD5+nHK5xnD/Vgp79+PsG+PY5SleuXyEdngjO54QZdD4EPBRIuLu4WoI+60JLwKpVJLtO6O0A+uRNoDrtvnin/wBug6PPPoYh9/+FroekM/m0ES7om3L1f9uTqBgbe8Obw6mj/mMjKfo3++xcrTB2bmjjPQKo4WQbB5m/ZCWUwa3TFr36e3Nkk3n6c0ZaLi0WwH2DeIU3k/clKY9teDg21MM96ZwbJuEF/ChnQOMWBZOdYWjX3iRWVPn3FKZo5ec+yLJSraoURjrRwsCtrRW2PNYiuFtA5DMUCspGgtCXutlS0+blWaDcttFXEiqSBtMpUBLQ6uz9OId5chV+a/XKvsB5OSNIGxDqw2tFagtVTn97jcZGHiLYrGArjT27nmMgcE8vtcimcqRyuQYLu68YblKKSq10i1IFLA4dw7dypNJpgl8H00zMHUTz23SbiygvNZ7T9MN1G3btCM/ba03xYEn0+wdcwkbTSYXmxw5cpyq22bimSeZ2LIVlUtxsV3nrTPHaXvr5Au4UvZjwPcDjxAtSZfkinuhJEC7hVFCfLuGaTC6ZZSenl7q9RtlqYZGo8bnPvu7fPOFr1Fv1Ni+fYLlpQphGIJoCIpQEzQ6iaXWufhqqOt0Gg4svBuQ22rQ/5SGlYLZEyc5cvgy2bE0I3t7SfWnuXS5hdtaxKTJQF+KVDaB7VZx8SEQaBncL9nlNkzaCqh7cG7ZYabi0AwiHbFXX2bfQ8MoCTh1bJ6zLpAVdvboTDUClttX7cl3E91enp2caaYIYz29JJ0JbL9BKtPC1IvUFtKUVtpUlj3UUoJ8apjsRBOZbmOvuFgiuFWFeFHUntWOJid1Q8P21uhxRY8n+r69GDad1vB9RagUgX+XXCAVOC1YbEFp0UbTbDIZaNgO9WaZwf4J+gfGSWX7ePzhX7gLAkDHzS7wqywtLeLlBzE1nYSl4bsu1ZVZHPu9pKSAwA2uzy8bQuzyJxaJAYNtIzqmZnPp4hIXT17i9Mnj9A/2UdizDTedZXphjmNnTjM7dZr125sOPAP8dWA/SGdxAj92B487C+3mg2voXFm0K5OR09NnN3SebdvMzERRuWdOX+ClF1/koYNPUuwdis0k8Vqj8eQksV+4diV3yc13kO45xeShJsPjCUYf72dB1Vk4tMT5s4piYYGxJ7dhpbaysiS0azMYrSWkLZQWzuOi0DEwjSwY5fsi3d9Ne0kHQCPunF3g8KLLEwMrFAjIGfBsDob2pFmxUnzrQpk3Lwe03oc0WX1EATY60bROyrAoDIwwPrSTVCtPtbpAYz7LRRucdhUhj2UM0l52WVlaBkq0fTdKdikabhBgN8BvQkbT2JLrJZnKMFsuoRk61WYDRTSMLgzsoFaew21vwKf5AcJP/p2/SbNVZ6U8zeuvHWJ2ag1N8yahaVAoWIQhVKruNbyjVJS3pFaDdw4tcfrUMgf3n+XJpz9Mvn0X3XmuhI6bBMogxMBKpMjnUlTKdVznOs9VgW87VyZObw2xmqEFaCkbCRwWFpY5/MablC4tEThtkukktu9SLi1SKpWor5RBref1IcB+4MfAeBQkFVU8iqiDkJi0tXii9RYlF0in0+z//7P332GWZWl5J/r7tjs+zgnvMtJnZZavrKqu9k07uhvfIKwECA3ozlyJAY2EkIQ0Us8VI3u5GiQGaR4JwYAQIGGmoWma9raqq6rLm6z0JiLDm+PPtmvdP9Y+GSezIiK9a+J9npN54mz37bXXfte3Prfuu59nn/0iSXJ1wbudTpvPffqTPPLII7ztne+nWCrjuq4p7Uo34MmQtr4QSXQN0S4taH0+ZGYC4gMxo7umKGvIHV/Dn/MJZ86QGyrj2H1Mn3iJPhtYq3H29BtowJEM5aEJmrpKvHT7FbPrTm05F8MnTzbZ60LBgooLM7M+rzc7HK8q/FtU1zCDIewQQ9795SF2P3CY8tgwQdgmaGRRjX5WqzHNesDk6AiFwiDV1XOsLFVxMzGFXBnPybJcXTGvkmUxmM+xc3CY/v5BxPGYnBijGfmsrC7Q15dF3BKxM8TRa5qS39n4Wz/3SwSBzzPPfJJTp/4Js+c2Du26GvRVHH7wR76DoeExPvmJz/DGkbN02tGbtHitoB1oQq3Ye2Avb3/nd1/3tTeHBlxwR7HcLGILTiZDoVIBt8PcQhG/tXE26Q3LiNQaFcTUGxHV6lmOHHmJvF1haKBCqdJPrlAACcgXCuyc2kUSwuz0S5uYBSaBj4JzH3il9aSVrphikWazXHOgRje3MZfLcej++8gXyjTqEaaGwZu99o5lsqejS/jg1MmT/Md//+84d26ayaldDA4OUqkMUCj2kcnmyWQyeF4G1/Pw3KtbtOEi+BCcCjl7dpbOPs0T+4aYOtikvByx/IrP6/E3ON7KkuDyuc99GeptXm+1EaBsu0yNjBPIKeaXOlh2CZVsHJt+K3DdpJ0AzzVh2YIDRTi5Bq93Ehb1ra1DW8VYh7vuudXIpw0krZDZmXlQmqGBXeQKHstzS6zMLzLdOIuKI/qKFbxshcHBEQ7tu5+Tr3yd6soME2Oj3HPgALbjcHbmPKu1Zbx8H3k3Q/+e3YxNDtDsaKoNl1O2feMWcrhDkM+XUIni9SOvcPbsxjGqlytDbnlpMEsKbdvs2H2Yj37vj/HEO76LL3/xU3z9a1/m9ImzdDo+tmuRKbrkKhkyrk02m2VuuU22NHpD7+3NiCCaJ1iLqIeTWMrBdTP0D44xufshTr/xFCq5mIzEEsoDFVzv2rXVC7WtlRCsBJw428Btr5BIRCuoktcedi5DJpslDGNy2SyqrHHc7mK/l2IM+F6w3gnZQbCdVMtO2bmbeXihyNV1DDgCXibD3n176SsP0KgvspkmHG/irY2ThOeef57XXn+dTDZHsVhiaHCEkZExRkbHGRkbZ3LHTg4cvJd9+/fD8NWl3b8JCSwem+PJeQdvICGuQa0BryYJr+nWhRZ1MLP3A8BILmCgv0bfqs0CFqXSDmrVo9yuOs43JIk8BE4pmK2b77fDXN/BdP9ubtlAPsNAX55cpULiRyyvrGFnXcb3jIEVc/LkERrNBuPlMe655x4sy6LZaCISU847JFVFVF+hOp8hV8yj/CpRu0UUJXiFfnTGRiubwf4+8zJfRcnTuwUajR+2OHH6CPXqxRqdI6ZmdW+3LRWM87bZgiAdwS4N39UCndAnTjQ7pg7yAz+0h+/6nh9hduYkayuLWI5Nrlig2FfGtiwa1Ra2U8Lz+m7uzQIQgprDb6yx1Fmk0xqj44/Q3z/O3nveypkTzxJHFw/NyhLUdfkyul6YHGop4eSTpxBO4eUtkIhas8ri8jK25dKsN1laWuL89FnmZo6BvlSjrQDfBvYHIb8bPM849CynR9sW6LUPX+ssQYy27bouExOTTO6YYu78yasq/duF1ppOp0On06G6tsrM9Fljx7ZsbNshlyty3wOP8v0/+MPc9zN/7drkvQSr9ZjP12EO0/qXxhgoTGve70BhMmDNOY1qd9Bo4vDao25uBG5Y5Y/uGgK3C0NAQUwijQLyYYu1U8do9q2i3RyTu3YwtmsHg5MDaDvD8PFzFJZWObTvEJOTUywuzuOHDerN80RhlaDdYW21jYoCpnaOMtifp69SphXYNAOL2DeLxsZ+wtrCeYLgdt79zYSmt6pmFxstJ9bqmHU/y8NQXTE1s/Ul73BzLeAP/ug3eem1J+krDXH4oW/hgx/4LqZ2HEKpBBAs28K2zRAcRRFKQS5XePMFbxp8kvgctZVZGrUs4Y57uOeeexGapvB/asvRWuMHPklyPXNKjbEtlyAuoetttFTpGxshLwGrq2ucO3MGvxmwtrLK9Jk38Durb25YbOBRkHdBYadZbcIyJQIQ15D2BcLuat3XQdrmBIhlMTg8wg/+yI8yNDLO/Pnz1Gtr1GurtFt1wsgnSWKU6q6leWVkp7UmSWKSJCYMfZ5+6nOcOP4af/sGkTYYM+oxjEbdLVbbVU0sYKQIkyOQDMH8SpulWfOcW+2ZK76Pm4GbWK7p1uLQSJlO4BPFMa7n4HgOYb2G5WXIlAoMjg8ztnsfSjTF0gQf/sj3I/UacaOKTcT4YB8jo/cyNFJkPulQXVjG8X2STofa0hyZsEi2VMFWHk7iUMpVKIrN6sIiKzPGzPLNBhEhm8mzf98hyv1fZG1hawOQUlBbA6uxadgsKoI3np/h2MszlIY8grDNrt27eeiBJ+gr9Zsyp3qdSFzXvBzXV7P6WhGj4iaz516m2ZjBct5MOoK+4sWHN4YFjg12BqIBUCNY2f1M7R3D7cwzMFghlyvRaQfMz57Bb2+WSj0FvAuyB6FQhkw3m9QG7aybQ3o17TetJnP1EKBQKPChD3+Yx9/yBO1Wm3a7Rb1aY211jVq1Sr1WY2VliaWlRdZWV1hdW2JpaZ5GfZVks45yCZRKWFq88lrwl0MOQ34tTNpRHuMJmMXM2vuAoVxab7GhKVgB7oWJze1dKeWbhrQT26MThUTKodI/ge1kiDWUKkVGD+xk+MABKsMjVFdWcDNF7r3nfrLtNV5/6gs0a0sMj4yROBlsK0Opf4LiwAwO8xScBBUEtOsxURIThEK9llBfzBCszbJWa6JabRzLIroujevORDaT513v+gjHT7zMn//Jl2iubT391YmJANkSyti5a/MhX/jMF1laWuQ7vu37+fAHfoTh4QlTCS4N4pTrZZUbAKViqisbk6XSEUpfx3MXxxSstwRU3pB2fpR8JkcuU2F4eAfZbJGV5Tp+p8WZkwHtxhIXG6bKwPsh+wT0j5nkArubmNWNEulpR5F03eAboC0K2LZN/8AAlf5+FALaLOqr4hilFHEU0el0aLfatFotGo06C4sLnDh+jNdfe4Xjb7zO7PnThOGt8woVgH5M6awIaGNm6/0Y4s4Cp1fg1CqM9Gt2TSbcX4Gvrdz+qL9vGtKudWIicfHjAD9UuCgiNHgWlaF+BgYHKZbLKDReoY8de/dQihuE/gLTJ49Sr7c5f/wMKlK4dkRfXx/9OfCSJpHfpp1ookCBUnRaLRqtBu3qCkprxHb5ZovR7sK2Xe49+Dg/8eM/y+jwGJ/7zBc4dXSOKLi++7WzkK+4aCvh7NnjvH7kBZ547EMMDIymERlwQ6sH3QQY6XRq1rlGWA54jiFQnQEpYve5eKIYHBhmcmoIz83Q3z/MxMQkp6Z287WvfI5Gtat1esD7IfcdMHgQ8iVwrHVzyIVV3XsdkT0/3wBImhCjtDZFZsUx/s+MWe9TA5XeuiICSik67TaLiwucPH6Sl55/ntdffZlTp44xN3uOZrP2pmXfbiR8jEnXwphJWhgyvB/zJs8As8oQ+j01eGAHHNhp8fSq2tA0eCvxTUPa2s2Ty1pE0TLLi/PkiyXyQ/2EKqa6toY9P09i5Qgti9F9+yhPjBEuniU3McmgA7NPPsuZo0ewA5/KYJ6B/hylQh5H2QTtDKqd4MeKMGihNRRzUCnaaGyq7RB1KzKIbguEfK7M4Yc/wPjoPh568F189Wuf4Y2jr7G8uGQW+m1ExJFKa0cInmuRy7t4no1tW4ijieKY1VUfv6HBhon9Rd7xjneye9dB8rky46P76Cv1c2HhljuYtC3Hpq+/wMDgEPccfJB8vsS1x87ZkPEABbYHuSzF4TxjIzmGB/pwbBdBKPcVGBoawRKX1197JSXtNIGm8L0w/LBZxcPVKSH32qt7qidK6vi5YG26/jYWEWzbvlAVHFKzv9ZmlZqeeE5J/3Vsm1KxRLFQZPeuPbzr3e9idWWF06fP8Pqrr/LKKy9x9swpGvUaaLBtJ12P88agjVntqlsgAuAUxim5iFm9vesiJoJ2FZYLQnQHvObfNKTt5Pop5iyIQzqNNSDCyXn4UcTszHkSqx/J9kO5D225LK5UOfPaURrVFYYGRxjaMUlfOYfbjijnwZGAJAHHscgUS+SshMbyKqurAWLByKDFYLmAbefI132OT9cIbvcQfDOQkqdj55icOMjQ8E6eeOIDnJs5xtzsaVaW51lcWqLdaqOVwnU8yqU++vv7yeWzOK6D0gn1xhqvvvEiL730MvOLi/RVMuzfey8f/a6fYmhoCls8U4PiQmU/ePPsZXOCyfX1oRJtFty9oGV2y0jKBQK52PxsRgh9YZ/0NyFNXjHn0KkpwrYdiqUiO3ZN8eBDD3LffQ/y8MNPMDg0cu3+PLGNeUS0CbXx8gwOlxgdKuJ5Hqurqzi2xeCAg20nRFGM7WRBiqB3Qu4vwcRhY8cWWV/M3UoF0rKeiKPF/G7r9HrWZZJ0LofN+7ukNvNNzVuaC1E3lm2RzxfI5fKMjo/z2OOPU6vXWV5aol6vozW4joPt2Buf6xqgeHM0uQ+8lH7PYpqyiLFvv34e3nCSWxrGvBm+aUj7fG2JfMdGRQHKsqgGAXNnp5G5BbLFEqPTawxPz+KUSjwnX6C1ukJzaY5s1mFgsIK/usCi30EHAQvLAaBR3ZXQxaRz11sd2iFkXajX4FyzTTaboLCI75S6jTcQ//ev/2feXC5To5UiUQkq0Sg1TH9pkFJeobXCsiwc2yYILcKIC+SZJDb9pQOMDtaoLjeYPlrlT4LP0FotMjqyw1QuvAhvftl//m/95Kayek7BmG8vOm5d9i79bv6MZH1P0aDTgv09x9iOTcYrQOJQXWkzfWaeduNJnn/2OY4dPbKpbFtCbHA9EAWSIIU8fcU8rgUqjgjCAO06xqpgGY12YHCU8/MQRk/AjsdgsM9EioiYWEyn62y0TN0MZa9r3ZYYUtexqS9xFaR99Mjr/OZv/GdTwOuGzIT0hf90OsCqdAky8zEbzZhqrvWd3/rEdV7zytB1NboYE8nxCFbvkFgDedOqyL0b5UZ4Km4ctNab9pJtWa8dm8lqWfYNlrNbT2IdslE84SZQKrmz2rRHk7y0nvQVP//MD8Oef2tIW3ewc8d5+70v8d59GuVCtVPDxmKwMkQ2m2dlqcqrR07zlZdrNMP3wPgHjBahHFPZzCUlcIy9XNtmW1rHA1vAVkBilhDSGfSfW1fcrnITlgS7GmzWB27W889i4rUXuDqVbKvnf73YkrS3sY1tbGMbdxZu77C5jW1sYxvbuCpsk/Y2trGNbdxF2CbtbWxjG9u4i7BN2tvYxja2cRdhm7S3sY1tbOMuwg0hbRH5DRH5xRtxrpsBETkoIi+KSENEfuZ2y3M3QUT+ioh8+jqO/wkR+eqNlOky1zsjIh+8Vde70djuq3cfROSLIvJTm2zbKSJNEbEvt++V4i+Kpv3zwBe01iWt9b+93cLcTNxo0tJa/7bW+kM36nzbuCy2++q1neu6yfBmQGt9Tmtd1DewkModS9oiciOzNXcBr21ynRuXG/sXDDf4Gd0xuM33td1Xt7Elrom0ReSwiDyfTuF+D5M41N32nen0rioiT4rIQz3bJkTkD0RkSURO907/RORjIvL7IvJfRKQO/MR13FevrJ8H3gf8SjpN+a8i8u9F5JMi0gLeJyL3piN1VUReE5Hv7jl+UET+RETqIvKsiPzirZrui8iUiPxh2l4rIvIrIrJPRD6f/r0sIr8tIpV0/98CdgJ/kt7rz1/Ftf6+iJxMn+nrIvK96e8XmTdERIvI3xSR48Dxnt9+RkROpTL9a9kkdU5EfllEptP2fE5E3t2z7WMi8t9E5DdTOV4Tkcd7tm/afy7BIyLysojUROT3RCSbHv/XReSEiKyKyB+LyMRm9yUG/0ZEFlNZXxGRB9J9MyLy/xWRcyKyICL/QURyV9rWm2G7r27ZDz8mIv+l53q702fmiMj/Dry7p91+Jd3nHWk71NL/39Fz/BfT9nkyPeZP0vb77Z72292z/6bnSrFPRJ5Jj/24iAxcKucm7fY/iMgREVkTkT8XkV2Xbez1PP8r+2BqQZ4F/hdM0uz3Y9LzfxE4jCmS9VZMhYO/CpzBLAxhAc8B/zg9x15MYa0Pp+f9WHqej6b75q5Wti1k/iLwU+n338CU0X1nep0ScAL4BS7UuaQBHEz3/930kwfuA6aBr94o2baQ2cbUr/k3mPK/WeBdwH7gW9M2HQa+DPwfPcedAT54Ddf7AWAibZMfwlSrHMcMnl/t2U8DnwEGus8o/e0L6W87MQuCdNv70uN/FLNYiAP8HWAeyPb0AR/49vT+/znw9XTblv3nkvt/Jr2XAeAI8D+lz3UZeDRtu38HfHmz+wI+nF6vAheWNx9P9/03wB+n+5aAPwH++XZfvf6+ukU//BjwX3r2250+M+fSdkv/HgDWgB9L+9qPpH8P9ux/AtiHKUj+OqbffjDd/zeBX7+Kc50HHkjv/w+6sm4lJ/A9qQz3puf9R8CTl23va3hA78HUCZee357EkPa/B/7pJfsfBb4FQ+TnLtn2D3oa5mP0vEQ3uFP1NtRvAL/Zs+3dGOKwen77nVQeGzOQHOzZ9ovcmhfh7cBS92Fvsd9HgRe2ehGu8fovpp3qJ3gzab//kn018JGev/8G8Ln0+09s1V5p53+4pw98tmfbfUAn/b5l/7nk/n+05+9/BfwH4NeAf9XzezF9trs3ui8MIR4D3nZJ3xAMkey75Fmd3u6rN76v9vTDj3F1pP1jwDOXnOsp4Cd69v+HPdt+Cfiznr+/C3jxKs71Ly7pt2H6TDaVE/gz4Cd7jrMwVWN3bdUm12K7mwDO6/QqKc6m/+8C/qqI/M8927z0mASYEJFqzzYb+ErP39PXIM+1oPc6E8C0vrjiz1nM6kPDmBFwepNjbyamgLNaX7wYoIiMAr+MeYFLmAe9dr0XE5EfB/42ppOBIbUh2LAa5UZt0PvbWUy7bnSdnwN+Mt2uSVd26tllvud7G8imU8tdXL7/bHaOCYx2/3z3R611U0RWMM/5zKX3oLX+fDrN/j+BXSLyh8DPYbTIPPCcrNdjlVSWm4G/UH11i354tZhgnZe66LZVFws93zsb/F28inNd2v9dLi/3LuCXReSXen6T9LyXXu8CrsWmPQdMilxUQXhn+v808L9rrSs9n7zW+nfSbacv2VbSWn97z3luVfWq3uvMAlNysQ12J2a6s4Spkb6jZ9vUzRcPMO21cwNb2D/DyP+g1roPY27YqB7pFSO1o/1H4KcxU74K8Ool5+3FRtfobZedmHa99DrvxkRH/CDQn16ntsV1enEl/WcrzGJekq4sBQyR9y48eNF9aa3/rdb6MYzmdA/wdzEmlg5wf48cZa11kZuDvzB99TL9sIUZLLsYu+Ral/bJi553im5bXS2u5FyX9v8I01e2wjTwP17Sp3Na6ye3OuhaSPspTOf4GRFxReT7gG6R2/8I/E8i8tbUkVMQke8QkRLGztgQkb8nIjkRsUXkARF5yzXIcCPxNEYb+/n0ft6LmRr9rjZhOn8IfExE8iJyCPjxWyTXM5gB8l+k7ZgVkXdiNJYmUBORSQyR9GIBY++9GhQwnX4JQET+GsY+dzX4uyLSLyJTwM8Cv7fBPiVM31kCHBH5xxhN+0pwvf3nd4C/JiKPiEgGQyhPa63PbLSziLwl7ccuhjB8QKVa7n8E/o2IjKT7TorIh69QjuvBN3tf3aofvgi8R0zccxljGtvqXJ8E7hGRv5w6K38IM/h+4hru70rO9aMicp+I5IH/D/D7+vJhfv8B+Acicj+AiJRF5AcuJ8xVk7bWOgS+D2OrXMU4C/4w3fYN4K8Dv4KZBp1I9yO9ge8EHgFOY0ah/4RxAtw2pPfzXcC3pTL9KvDjWus30l1+GiPjPPBbmJf/0kUvboZcSSrXfuAcZtm6HwL+N4wzrQb8KWnb9+CfA/9ITHTBz13htV7H2PSewnT+B4GvXaXIH8c47l5M5fq1Dfb5c+BTGFvxWQwRXtEU/nr7j9b6s8D/inESzWEcUD+8xSF9GHJeS2VdAf51uu3vYfr218VEOn0WOHglclwPvtn76lb9UGv9GYwi8DKmn11Kvr8MfH8ahfFvtdYrmP7ydzDP7ueB79RaX0773ej+ruRcv4XxQcxjTGiXTYzSWv8R8C+B30370auYZ7sltutpXyVE5F8CY1rrv3q7ZblTIKYA/QGt9YnbLcs21rHdV785cccm19wpEJFDIvJQau55AuNE+6PbLdc2tnEptvvqXwx8U2a03WCUMNPMCcyU7ZcwpoBtbONOw3Zf/QuAbfPINraxjW3cRdg2j2xjG9vYxl2ELc0jcpesGg53l6w//w9/VydKobFQCAlZwsBidfYcp1/5FHNHP0uc+HiYCH0HM7pamAwOCxM3F2FCA9pAHZOCdTm85eF38D//nb/H4be9g3//f/47fvWX/ylaq1u6wvW1Yqs2feJtH9bfeOazXLoqOgDi8NgHfoK//Q/+LuNjo6C6K8BrLDROqrp0j1TaLF6uERItaMAWC1s0oNGodHVziyAI+Dv/4w/x2nNfuGJZb0u75jCxOhtc+Y6TdQtsJuvdIueNwLZN+zZALMGzPZSGWFto7YCAFkF6cg+6aXZu+j3BEHQbU3AiSH+70t4qgC3mf0sEy7pp/eqWY3SojOs4hNGbh65soZ8PfvB9jA8N4loWyjAyhoIh1oDIejtqjWkloZtCplGoNItdIWjMs7qa9r+t6NxuAbZxo3AHk7ZgdMobVob2llzhSvKZz59+Ax3HWLaHeAXsbBlL8qAibMcj5+VRvgmvjbHoaIsGCR0UCesa4TVBqwvaqFxRImIJUzPJHLHxh57/L4XRaNc/l9tXNth+eTnryzOgNm6ZOGgze+40KoqwPQ/LUqmODaSatBkwlSFz6ZGml5HF6NnddpNtf9Adj6GRXbznfR9geHiAwI954bnnOXHidcKohtiRUZy62s9dgjuYtG8+bEwDXMvzcoCSmMIqjgdWzkIUlDUMOFvTdpYWYf0M1aUF5tdaNJIM2ukjihXt2hx+GBBqTQwoEhKSG6LNCUI2m8GxnZSZroS0d2ByHGDdSGP1HNv9e+Mrmm2a9TnBVkTfNQBdKtvl73761KtESbzhtjhq8Zk//e8ceuhhHnrwAQb6y1hiiu9Ylm1IWysulIPqjjMXrm3+0FaPXKmmruQOcQy5GJvZ9jhyEcRyOXjfo7zz7W9lbGyCp59+iv/7N/8Dbxx/koiIxMeYje4i/IUm7ZhrI2wXOJQT9pYcVCcmymiyYzk8N4PbiRgqbZ2ZPTqxE1WIyatVwrUlOiurrLRj6ommQ4JiY/K5XmggiEKSpPt2Xwlpd0lZc7FVHdYZrpe2LlFNLyJ3fcnvcsm+vYPCpZJvjfPLjS33Wpg5wif/7I8JkoCDe/bQVypRLBYp5AoggtJy4TLGWtIl53W1WwQQWZdcC1ZiXbCJ3wpIaivTZkQHB5xRCwY08XF91xHQzYbfabIwv8xLLx5h+INj3P/gPTx0eA9rrVeYW+yQtPR1Tl1vPf5CkzZcm2JSBMoKcoHGi6ARgDgBVkHRrIZkw8v0guwwbibLWKHM0PAke86f5OzZs5xbXGU5UDQSC1+pG96XNJp6s0nH7xhn2hX7brrapuJi8u0lfrnk941+Uz2/X4pe0r6cyeXNCC9zKzoJOfrKC+w5cBAdJwwP9rNr504K+Sy27SBq3aatNVipEeTCnUhqThLBkq5kgootLLlyOa8HXlG4/10j5DN52qsBnp2BgoXd5zA9O8sMWw9cfxHhZVzK5RLLy0s0WzWyBU2lP0OpnGG5Kgj6rmuz20PaNub93bK1ulPqmw8LUyygw+VJ3MJYeM8HmtUg5h4MiQf1mHojZr4F9dXWlufIFLLEiYudL1EemmBwfCd9g0foO3uWlUabaqvF/PIyS36b4AbbTeM4MWaEzczHb0KvPVph5icbachbEW33HrZ66F1huuaRS8+/NQq5LO2O/yaree/VGovn8OsNglgxPb+E2EKxkKO/0o9l976+gijzQq8PbIJofcEFuX7uW/PK2zbcf6Cfjxx+C6I9PCvPoYP3o22b6YV5Xjr2Os91XufMsTn8anRLZLob0GpWmZs/x/333k8Ud2gsrbK4MM/iwir1ZYW+OZPam4pbT9qCWccihJtkBbgsLNtGJesDQpeOruT1mwAeFzitTVWcZWDUhtESLIaw2CKNMtgcDz4wRRAmBGGIqBgv2UFlbJLKxEmWV1aYm58jkz1LqbrM2ZVl2pc74WVgY5F0tVytDT2KYG28ItgG6HUi9ur/vQbgzUhbX/LZ6Ny9x6pLztFr594ch3YNc+rEDGuxuYZl2YgIcY+dO2gu8cKTn6Y80I+byXHqdEAxXySXy5PJZC7IZ6Jr0itK9y5TAtdgmk1Sm7jmZkabiUC5T3jHIxO8761voegMsrzcpn9knL0T9zM0Mso7HnX4ng/5HDnzGr/98f/Kx//blwnrd9mc/ybBbzc5evwlPvrR72Z4eJiZ6TX6cgPkrRKq1bwrfQC3nrQtE3Zmu2ClbOlv2nAXeYRuCPaP3sv+xx/ha09/msbyCqRXuBJToAs8DLy7CPvb8LXE5AofSeDeJgyUYNzqWTBzE+zbN45Zo1WRqASdaFR4gAP1R1mem+HIy88RJwG2RFSbNYJOeIEqrya8T2PobjiTZTUKCFVCPpvFtZ0LU/3L49IhbbPojq3OdamduztYXEosl86uuue8/PoCVhIR98xKLMtGLBsuck5qTr76dVrNFt/xfT/KromDNFsdFheXqFT6iKIIpRWu41LIZMl4HpZlIaJRKiIIQxDBc7MIglIKU/f/5rz5XgYO7YX3PrKb973129g79QitBqwNhZTHRxkaHKNSruC6Hp2gSTa0yKwGuJG+opj9vxjQ+HEVL+tSLJboK/ZzaN9hFqbPM3v2szQbN70I4g3HzSNtm4vMmZZlOmGpAHkHrAjsrNBogx9s1ulv7MuQsx0+9LZ3cfjD38/cwgovLX/6qo7vrue02oSChhFMFfTTQNmH+/phxyDULjMCiChEwHVsbNsxjUOBymCZwdEBikUX1VrlmOMTRnX07DLtUCGW0FH6svbbrqxdQ4ZrxWQsTazAtiK0jky0xEaJKG+CYmMifdNdbbC9V7vujQrpdU6ywf6XRpl0t22OQHfPr7GBSsbDyvaxtBah1MXEPX/mFb7ymT/h3nv2Mdrfx8L8ArMLc9SaNdqdNlkvy47RUUYH+8llM8RJxOzyHCfmzuB5GXaP7wEskijCsx3afntL2a4FngeH98F9O2wOTO1g54H7mdp7mDDKUltr04l96o06UeyjooSTJ4/xiT/+A7722efxO3eh+niz4EKQ1Gl05vCDSWzHYu/+Q9Sqyzz7jRdoNt60VscdjxtH2hbrilMW3FIaDqdgoF8YHswx0d+Hbof4tYCwnbCyFrAY3boO5icxn/nan3O2rpif3rqs7kY6vsYUf/6UNtXawSS5rAK7BfIuNDqXt/oY84wmjgWlQOuQKGyj4gCHiHzeoTJYIog61Jstsp5NPuvhuB5hrJipNYm1sTJFbOz8VhiN3wXafkg7dZLHSRvRAXEcopLoCsbFXk17K1vzpU7I7vfucZeaPTb7fmlkinAlvo2+oRLlWZuwHVMEdpVLjN//GK+ePMXpU6+96Z5OvP40H//93+E7/tJHCXXC4uoycytz1Bs1HMdloK/AQLlI1nVp+C1OzJ/l+Pw5khgKxUG0Uri2QyXXx7nzpy8r39VirN/M3HJ9ecoTOykMj+NVKmSkhC9rnDs2y9rKNHYc0Fit8sxXv8rnnnqW+XpwFe7bmwvHLdBXGkSFLWrN1Vvv8ksbYuHcLJ/54h8g+EyM7CGTyxDECX547fMR280ytede6qvLVNcW6L4jSqvL20evE9dP2gK5QZfx8RytpTbtRkxfBYYK0G/BSNlh195RxibHsbXHwtklzncWWWqFaKUJbqHpTQPHl89x6ku/vVkeBmCe9ZAYs03jkt+r6aeAcUg2MCQ9n8DKIlQ7UL/MM7NtF8sSLHGxxEarmIgA328RtWucP32Co8ePcezUNFGsKJfKxjYrFkP9fWQ8j2OLqySpHC3eTNwOMOkJ9VBf9BK3woBEBwR+m8C/kjS5jSz+XYfhRvtuZArpdQv2DgCXaue9+/dKfSnhvxkry3PUo4QIKLoWQ8MVKgN95BfyG+6vkpCnvvQJ1lprPP4tb2W+usSp2eP4YYtMxsNZMtdTStPqtFltVml32hDEECoIIxALsTLo6uqWsm2EoUoRV0fM14KLWsyxYdeYzVseLjM8lGWwMkKxOICtQ6LWKq6rKWVthgeLBC2b5elVnn/qaT711Wc43ezcESZa282za9+DPPDAw4yXswTLs7x85BivnHyDKLmFhhsNxNBa9vnEH/4p586c4fFH3sGBXfeDnVDqzzN/kaJt+p3j5ChWBvByDlrFqCRGa8FzC2RzfeSKfew/+DDveNu3UF1eYWb6BGHcoN5cYX5+hsWFhY3luUG4LtIWR9h93wQ/8NEPsG8gx5GvfYkjLx4lbGnydRjPwu4h4WClyEB/mXoA5DI0bGE5ikk0uC4EXWf3jTdhb4hEXWy/MMYJkxauSWt96IsD1GzMAnXdgLccZomK7pkWNZxsGQKfu8z1FcYRaDt2StwuSkeEYYPa0jxHXnmdl194icWlNXJZj0zGpegW0EmCa0eMDThEvoXEmuFyiemqz3wnvEgfzQj05R3mwohEQ8YFPwatFJ12h9D3icJrtedtZcboRa823nvMpck5XU2816Z96bm3dpqeOlsjjMzMwkdTjWO8TouOv3kkTxz5nD9xkve850McmhomShLOLB6n2l6m0a4StQNUO0bHGvGyuIUy+eEySZzQXl5CzS6gO/WrdqgffsejvOXAbl546inma+u9xXHgLfcVeMcje5jauY9iqUwc+jRWVjn6/JOcc49g2/2U+sdQns3iwjyf/vRn+fwzL7PUCe4Iws4Vhnnkiffyjrc/zmjFgeYyUXGUcrmMle3jxde/QRzfwmDytMtVp1t8deFZXv76Gzz2lsf4lve8l0ff9lbOz68QNEKGR3exc/wgls5Q7BthdMcEXtGi1Z5nbe08SZwwNLiLiYmDjE5MMT62h8HyCCqIaN7/GAvLxzl+8gVU0qHTvvHmsl5cM2lL1uLgo/fw//qrP8K3v/0wS688RcuNadqalob+DEz22UwWCwy6OZxIYSdgWxZ2JoNyHEId4eQgXxDaDY14oCNuWVRJIb1UjNGa+zGUsYBZYyrGEHUGQ9oZDCnYmLWoTrE+xjQwayQJF2vnG0ElypixtfEFWpaF42XwsgUs26NRb7G0uEoUKCwdIn0xxUKROAloqyqNyMfKw76xApOjuxivJrx28jynV2p0rU1aQ+AnF+qT5BLzsO0oodMMSOKE6yvL29WaLyXTjWIJN3IwCqlE6fduCazeRP3ufpePcgni9b1WI81ecbj/3vvxY+H08Tc2td87dp7h0iR79u5kfGgnJ+eP8dKJ53jj1Msk2iLfV2B0cJI9O/YzNrSDwcFBIqU4NTvD888+zfKpV0gWVq9K2fi27/hOdg+UWVuc58VT80SxRgR27/Z46+E9jI9M0V/eQX9lhMXFs5w78QrHq2s4Kke5sofd9z5KbniIV197iU8/8yKrnTshxM+ir38Hh9/6Xt76tsNMDufQrSUa7Sqx0lSGBnjbO95JK7E4euQplNpc5nxxHLBoN+e4kZkvKtRUF+o88+TXmdgxyY6d9/KBj7iEHcV9B97KaGUns9PzLK/WETyCTkSjqej4Gtt2CWNNECTo2EW0R6sZouKYRstneXmFlZUlGs06jVbthsm8Ea6etG2gAEN7R/iBH/g+vvtD3044f4bnv/Rlpl8/TSaCcj+MDTgM9vXRNzCIj0fYDGnEmo4WwkyOpuVSjSKsrMXgQBYlbRIFyoJEpYEN3WJqNwGCWYpbYZyJLWAPhjJqGPoomVu9QCNuemwJQ+C9NKQwtu0rgVbaZLV1CVbAsi1cxyOTK1KsDGLZDqEGP1CU1+q4YYxd8sgMlTi/HLNYC9l70KOdtCkV+9g9OkC13mQxNFL5wKyvLsgYqJTUgpioE6DUlcbAX+qIhHXNGNY1595tvfbv3vjuXqdk74Pt/p0OoU7eeK2TxJgh9EYyXIzyKPirEITGl7KznOex+x+gE8LnPv0Jknjjl9/1iohYONpi9/BuRiujjJcn2DO0lzAJGB4YYc/EXvZM7Kacr+C5LmEUs1Kv8vCOe/nGi1/iK//P7xO0mlvK14uJoXEqlRKPPHGYV44eYWZ2jbFRh7c9Mkn/wADNTkBfGKB0TKvVZG56htbCApXSCJWBvVheluXqKi++9irV20bYgoiN4+bJeHkqQzu57+EnOHBoH6VSwcwco4RqK6TV9Omr5CiUB3n8ibeytHCO5eUzG541XxzlW771hwkCxVc+++tEYf2GS95Y6/Clz36F979vkHc/9iFKfQOEscfJEzO8fvIUs/OLRFqhbUUQN8CKKBTy1NpV1hqnqdUiGnWfQrGPRCtarSoray0ClcfJj9A/cnNTZK+etB2gBIMTw9yzZx9Bo8WXPvlpnn/yFbx6wkABCgXIZVxsy8EPIhqrNTqOTVs7NEOFr4V6oGgGUO5zyWQy5PI+jbqJqhAn1bhv4nyvD9iFqZi3nF6qkd7eDowTL4cxmXS7TZl1U8oc1z4hMEXmTIlPfcE9I1iWi5ctMDw2SSabp8UqGmg1AzIqJpfrJ+sW6R8o0Kz6NJoBZ86dwY5dcpaNq/QFyowxtvcuLpQdjWOSKERplV75co0cY4LqZYNP98ybdVLFutmj+5302BAzDLqsG51icPtwxocQNyKqV6Eh4DtsboIxmBzPcKpqzD1ZgThssby8SBL46C0cGBrBD0Nq9TrZ0BTsemDX/dy36wC2LeRzBXK5PK7jopVGxQpXLMZKJQYefoTJcpnXvvRV5lpXvjxmLufQaNeJrJAD+wtMVBpMTQ6zc2yUQjZLPYxo+w1qDYcoDnDcIsWKw8iO/QzvPUiUy/LiS6/w/BsztyUD28NiorKT4T33M75rP32VCnYuT6lcpn+gDzdjEdMmsoqEdpHVdpOOrpHXOfL5LOPju1lZmebNi5ULhx54K4cffZzZ2XlcL3sDSbvbx0ys/flTs5zsO8H+nYcJB/N85YXn+MInPk1jrY0U+nDyWbAF2zbx/rVqhGM3KDRjwlDTikKGR0coVPoQz8LuG0b8GNURhndP3CCZN8bVk3YE+ODX2pw+eozm8ZP86e9/EjVX554RQEOnAZEbEiQdWk0f33Vp2A5JscKan7C4WqfeihAB23ERsbFEUBFYNlgOJNehQFRs4YAtzIaKBd5Mrh7wEGb57JMYAi9g6GMMGE73q6e/ZdNzDGDC/JrAkxuc90qhtWCczAqdJmygLSzLJZPNMTA4jFj2BUdjIQO5kkfkd6gdWyNTsdk9aNOeazO3ommq5ELN7a3cAgpoRzFJbIIB5YqKRnW1ZDAacdcB2dWmu4QMF2vX3WN7nZh6g+89haTsMv0HDpEvKxZPPQcr50CXME9o664qShBPsEJNS8FTx8+Te/Y5giDcMrRRoemEEc2WTxwrbNvHtYoUi3lcx0ZHio7qEDkxllioJElj60N0ElHO9ZH1NnZ2boZOsMzK0jzPvfBVzp6dZaRgUcrnKGSz5DI5AldYWlhgYXaOcrnCgfuewEqyeMV+rP4RltoNnn75ZarNWx+NbQP7bZu37xjgng9/gOLOe4gQ1todHMemXCmRyTgkkY/tFBnQOWIp0mk36YQJSmnK/WO4Xh9hsHbRuS3bZf89DzAwOMTy4hJXVoXySuCSLxzAyxbx/VWC9jyiPJbnAl48Nk3UaPLlrz7J2rlnQDtY7sPkRkZIUDhZB8uGeq1K1GpTb7TI5POEa4IqufQXbJS2qDVbnFtcY+bsHJnS1fWHq8XVk7YCOlCdXeXFZ58lnJ7hxMk5drng+9COIJeBOBEaTZ9aK6aBEPflcL0y82tNjp5eZb6q8XI2uVwR17FQyhCZm7WJ/Ouzizw4OsQ/eWwnX/vyS/x6LeZM+vsQhoRHgEMYKqpj0tAHgJ3AOEYHXMN00NH07zlgEWMWOc6Vm0I2glamOpxWJjxIo0Gb6aYxkeSJEoUAw0UXsRWLax0sS9BKU19OiDQ42gQyaMxYuhm6xgcFhIkiihOTGXlF70RvpEi3WFRvWN6Gd9izfbNYa8X6Mg4arAKlXfs5/Ni9LC8eZ665AmqJ9arhBbbC5Ng+vFJIZzXg1PHzzFYbfOLPP4klbGm71wjtwKfaqFOhQDaTIfA7eLaF9jxAUDpEpGMcx5aF0oJKEnQcEwQBibq64dsrZlDLEc3qGp7WDPXlGCqVqWRKSGxTX1jh9OkzVKttHn7kEfa//WFyxXEaQQIZh8hv0my0bnk2nyXC7r5B7i/lGS26jFWylIb7aEYKXMHL5hgc7CeXz6KUIgpDhkYmGJ3YTb22xtrKGqurNQZGxhiZuI/zZ76B1uvO8HxhgKldu8nlsvitFkl8vYOSgzj95Avj7L/vHYxP7kALzJybwV9t4vT18eL5FeZeeJHWsRdBNwGNWnuFjhtCpoDuK5OrVMj3DxGXFTbC0NQuqqsrfPmrTxEszZIELVQsqDBAhx3IDF2n3Je7q2tBG+rnqzybPEO22iEJFaGGM/MQJDDcB24mJvZhYQ1WE012NCTvtDi/3GKhYRJEcq6NH4QEiaKxEqNDCCRBXeez6vg+frXKOIr9wDmMqePDrE/mbdZNHBMY+3bXEdnBmEIG0++LGJJuYlLXT3F9vlKlBLGMpq2UStOgBbEsbMclXypR6etjyLEY7i8Rtds0owTRGq3NtTsYOtusqbo6cBaYcqAZwxKQsQXHNmnaV8javFk77obg9ZLypaF+XU18I0elsYNLtkJ2ZACVxGSKRR46fIDhssX8TAutuu7eEmZu5G0p4Xve8z2cW5iltdpkbfFzLCyvUV3dOhbfmKQsOp02NUvjOmYtoZaKEa1wvSwilhlUVWJmhp6L7XqgFFolxElsfBRXgXsPvw2tEu7bswPdlzA5MMBIeYAcOYJ2jN1WxKst4moAzQCJNdoSIp2gOgl2CP254q0KtgLMU9w/0M+j9z/IZKVMpVLGdSKySRXxirQDsGyN41jkclm8NJs0ikL8TptOu83q0jLnz89hux5hcpg4DpifeSm1hcLg0DADlSJKKZbm5oii61u5wS7sZfcjH2bP3kPsP3iASn8FpYSdq01mZxc4MzfD9GvPEZz+Bqg5LrSmWiGY/zqQJbA8mpYN3igycZBMocT8XI1Wo0P93CJ67XWMYbUnbLV95f6Na8E1R48oW7NSa5JrwpQLbgbOtmBZQ6UK02nw8GJKFs7ZiPLqHDVf00xzOmqNkHZUo9Dnrk+ob4At+9XVBv/4qw326XXaCDAa8m6Mtj2HcTj2s07aGmPjDjHa+JgNKz1Kv4Mxp2xF2DbrSwZsBqU0kmi0LSitsbparxawbQqlCvc/+BC0VxgdHkCrkKXZWerVNo0ONJWRwWOdGitA1obQsmgm0FSKgmXx8OQ4RTdhba1Kvu4z0lekr1Ayle2uaOWaOmbY65bV8lg3xHRtzV3bZG/W42Z27nTZCcthYM8o73z/27AsQamErOXRri5SXV1GRwlmqM1yJWnshUIFL7uMVAYoVfpYWF677DFg4WVyhH6blo7JiMJBE0ceWiW4bojYDpYFljYzn6QDrushtlklp1qrE8dXFzoZ4xMlTSzxaTfrLAURdugxPJQn4xXoyxWwEouorWnVGjRrVZJMiU4nIo4Vke+Tsx0sS0huciJHFwOOMFbKUix4lIfLWLZFa+4YOcvHGTuArSxiXxP7bay+Ap7n4HkeYuUI81nCUoFsLoftejgZFy0av9Om1ViiUT0LwOTIIBknodmoM3/+DMn1xHRLBrv/HoJsP2drIee/cQSwSELw2zHNeo3GyizR/Cyo7tuUsN6XU5OC8s20OI7QpxS+XeCsm0UnCYTdeXoG06+d9DzFa5f7CnBlpG2nckQYVgMIoR1CxoKxCQ8njFltKZaBFQ2nw/XXWWO+1KoXlxtVCYR+guVq3KxFEqkbEuHjAy+kiTG7Mc3YxhDuZCrPYvr/CMY0IhgtuoZxUA5g0u2jCIa10Wxn2bpGiaTXe9DdYidMyJ8IKGU0bSUaEUFrMwfI5ovc+9BDNGdexdIhlpNDDQ8iWqESHx3BoGuyKddCk7Y94cK+qTID45MsNzWvn5olUdDfV8bWIQODDrbboG9gAC+TBRHkigpG1TDEbYGMYrl7cIsjuPkSmXwBsSDs+IhYeLk8lmVMKI7rdisrXSiUZ0pTCzpJyOSEQ/fu4OFDU7iOQxTFrK02OFWv0W62Ie6wbg+PuJztfWHtBJ14GRwLp5RcUcy/7WYZHBxExT6dpEPLishmHHTWZFY5cYJtO+kQZTpmkEQkcUIrjHnj9Dk8AqLo6kj76LFvsLB4nJWVOc4cDWjWAyb6Wzx8X0Clf5gTJ89wbq7NakcTv3qGhdafkh8axyv1MzoyxtDwIIcOHWSmUePkwjLNpQZsEh1zI+AB7VgzX63xoGczUCpQm59hafEUGd3C0ULHGsDODxD5HYJ2C9cx7ZYv5vDcHGFgIwhhGOCHPo1GnZGxYUZGd9Oonifj2eyfGsWOO8ws1ZmbP8n1kYEQrq5x/pkvoXUESWgyFZUFyk5tlG3MG92d1RVYj5YKMfRYxCgODigHlI+O2qz3xwoXO9m7jvWbh8uTtguUQcqg11gn7QQQyFcglITzVUWt+3KycYDWRo9AK/BbCuvSKLDrhMZEhRzAaNInWJ/YNzEhfg7m0TQxBP8Ghh72kcZbh9DUZhwtc3Fc9kYYwjg3K5d5ZkprRAlKaZRKUKJMZqQ22qubzbFjzz7ODQ6wdO4oa02fVicClVDMw6DrMjDYj2Xb1KtN2k2fvGfRNzBIX6mfUAf05Uo0mh3W1lpksy5k+rAreTKVUZxc0RRUuqL2NiYRpzDFPY9/Bw8+/ChjoyOU+vrIp7bLdstHEDIZDxELwThVwdiTuzZlSVd+USrBtSGfsYhCHzQMjk2yXGxR6yRYtmBSl9qYl8nicuaRQJ+iUBAEh6k9OeZnPGrL4ZaukXxhgJGRERIVkyQhQWgRxSFWnEFpjYdgaTPoiE6IVEQQRYRhwMmZBb7y5DPsHiuj4qszli2cfJ3G4jTnzjZ4Yx7CBDwxESNOJ0tiawbG8jTnWry2FvJC9TSWdYZiLsO+3ZM88da38cCDj3HwsXdyLqjz+a98hRe/9BTtehOi5IanUU+US3iuw94dk9xz4CCDlQqNxVmiOCFo1tDNFaRQxLEg9ts0ViFJFAWlcD2bfC6DncmA1rRzWQqFPKVSH5VymZ279xD7DQZzIeOVPM21ZY6+fo7V6vnrE1qH0H4FTYeLV1K1MTO4rnbcXYW1m43RW8ysu28O7CzkiqATCPw0YiIlLRWZyAmVgLYxK1XcPFyetD2gkN6ym95DGk4rQMaG+dWEcx1jguiNrAGwXUEykCQavZGJShuT1vVEi2yGGvAShoht4F6Mo3Epva2uK6yDoYfueJuzjM03TNZXOZf0fJvBxoQKDgDJZd4ZLV17tqSknZiiUekK37bjUe4fwbYz1BfXCPyAbNalkHeIXEXW8RgfGWNwbASJfVaXV6k1QpRbpNqGlg+lQpFyNofjOcSui1WqYNku+cFhMoUyluVchW8+R2n0Qd7y1nfwyEOHsCQhSRJEFEHHx9NtVKKIY/OyCoJtexfCpeI4ItYJrmsTxzG+75PJ5FhuB7z8wktoDY889jjiZgj9RvpUWmmL19g669Jg11gfUWQRhmAf3MdoaScvfu0Ir52Y3WSgtRgZ3Ul/uYQfdtDaLNwbJZpOvcm5M2cYGhpg585deG4GlSR0woAgDInjiPnFBerLs4QV96o5Mq6vkdSqJO2EUJm+Vwuh0+lQGYbBySFU3mOu2aHdUWbh4UTTbvqsvH6Ks4t16i2b7/2ev8SDk4NMeAUGleLE4iznz8xSn1m5OoEug+GRHdy7b4qDe3ayZ99+/HaLThiD5REjZFRIPuOQyRuzQau+QpQkiOuSy2XIZzyynouQIeu5ZDMupVKRcn+FifERBtyDlK06jo6YOz/Day9+nTC8XIra5aAxfehS9THu+T3D+mzOYT3noHcxDgUEZv3RxAPLNfupwBC4TnqSfXs17puHy5N295789P88ppfFJnpBAlhsm2YgA7lRi6QJ4arCyQnje/pZWanTqd764tndxBkwE5x+IyKDrD+6rpXWA6aAh8cdDk54ZOo+C+cUjWDdzr1ZQrSHcZeFGP0we5lb1WgSpbESjWXFWOIg2CZmWwNYuLkSuf4x/MiGBMpFFzuTo9G2CWKLtU5MycoyND5CcXgH9bbCVx4xLtkwYSyKyJAQRCFrYUzoZOgEEWJZJEql/ewKaNvKkRl4GK8wxsnj0zTqa7Raa7RabeI4Imh1iFoddKxQWpEkCVppHMcll89TLBTQotGicDIuQRBQrzfQWmhWW8wcexlEOHL0BOXBftqdFs2FM+nFuzaOy7PiZP9DrNWarHbqDPePs3O0H1loc/zkLBsVkcwXBth/4F6yWZcgaCAoE3aqNM1Wk6ee/CylYh8f+sh3s2v3HqJQESeKKIrw/Tbt2hL5HBTy3uWjJi/B0twsy+fOUykk9Geg6cNZH77+Rp1H1AxSyLK41qTafvPqRYnSzC4u8Tt/9N85dvIEjz90iLi1xEBU5z0HdzC3Y5Anv/oya4t1tAYVXd97J4DjuowODzE0PIS24PT0ab7+8kv0eQ6lwl6ylSZu3CYjCsm6qEabsFUjbPcRl4qgFK5tgWvjeRaea5HPZymVK5QrZUrxMkUVEMWKc9PTLMwf5/qn3V2/ymZz/g7r6lhX4+76ZHr/T00jWkNnLd13OT2+S9LdfdMMX31zK15f/uxhj2whhrxTw27BM03STkONKUBm0CK2NWEN3JxQGcxRrda6DuKLcQvd3xqTZn4OeARjt26y/tgAHAsmJj3GdvcxfzTiqFY8iyHlChtHahSB+y0TencGYxd3LzPY6sTYsy1LsFSCpSIkHd2VNv3D8TLsfuhxjr/0dRZPHaPeDrHJ4Ft5qoHP+RNnWGn7TO3dx8TEDspTYwzky8TaIgkjdBQTBj5JrY69tkawtsT56XOI5bD7nv0cCg+jrqDxxcmRr/QTh01efvl5tPZJVIDSmiSO0WGMhMm64z1JUFGE6Ag3m6UyOILruohj43geURjRqFVpN+vE7TV0PA9YzBxtcN7KoVWQhl4JxsbYfTm2hlL7UXqZTjBDrjyEZTmoTmdTE9Dk1H6mduxES4IlMVonaRSPJptxUYnP2VPHeeXlSSYmJ9FptE+7WWNx9hxxa5GJsQH6B8rr60leIU6fPM3y2TWG8mCnb2AIHKkpOi+uUioZ86loKDrgK4jUxa+KH3T42rNP8vXnv44rmuE+l0cf3cvg1Dj3P7CLxUaIlS1w7MVXiFauLwoj8lv4gU/L91mcPccrL7/Al09MM5F1ODRRpDLQj9taQlXzeJVRCpkMQayIO22CTockKYKAiMZ1hFzGJZv1yBeKFMv9dJo5nLCFH1ucPTdNGN6I2h2X03o1V1CPk3Uy7s1JuL01uK9oSLAy4JWEuKmJZwHL2LJHBlyqqxG5fvCbkOkXShWHUMVEg+DkNAkthic9/FaHqMe+YHmQLRn/QGDCI28a8pjBZQUTf10GHsUQcZorhAssK3jypTbfONrmSBveSIytu4wh+Y26QMYC5RmTRJsre5yx0qg4wbJtbBuSJEawsaT7OEz439DugwzsPcTK6gKtKEJUBm1niZM29YZPeHaaleVl5ifn2X3wfiqDQ6g4xm91aAURnTBicXGJuZlpVlYXmW800SLsO3aMd3Y6G2SkvRk6XGPt1NPgjpqOAMaU011xPk4QZaaVOlEQh8ZZQ50g8llonUvPZCHimEUDVLegbJB+LNAROvEwL0nXBulgiPvypOjlBihoi0xjjVyhgPgRjWZnw9cyn6+wZ/d+8vkMnaCNiCZRIYkKUSo2ZQH2PcSLKzOcPP4Ki4efoDLYz9ryHKdef5GlhWnylTLDE3sYGhrBtq/O8VR0POKCQ6wS7KzCbRs/YsYBq+jg9WUp9hUY1DDYCmkqh5mVNq3Gm+d6SWJKFUyvhix88Q0KhRMosbCyGUojg8h11oHIW4KbdEiCDn59DX+pSX1xniyQsW1DaUlIRkJoLZIAXmUUp1jCV9BudwjDboKTwrEtstkMWS9DJpPFy5doOgUCv0q9HTG7MMuds2pjl/jvhNou67gsaVslGJxymZgcIGiFzGZqiA/776ngKaizxtCoSy5OGN7dx9SOCaJawOzZBdbWmkRJg/7BAisVn1q68rGThfIA5AsOga9YCRS90T0i6SubBduD4DozWRMudi+sYOpi9wPjjlltphhBoOFkBGcjY/fudp12esxGXWlFrS96cPkCoqk8UUKSFo0yoX7amEcsy6S4a+MQ8Qp9eJVx4sIYcRzhZXN4IowXcgyMBER+G79eZWXuLI1GA8/zsJOQoNVhpdFhxQ9ZDULayfo029KasNUgCUMTtnTZF0SDqpviJThABuyi+d0Ii5b0RpLuEBinrdYC1eg9U4oMZshMHSYXwqW6n673vhv/efmQv8FKDks62BKRhHUsu0TbKb5pcpzx8rzl0XcxNTGC364Sa5VqykKSxAShTzZb4P4HD3P29KsEfpPZ6eN0miVe/MZXmZ85zZ79h5jYe5BccZBSsYhrX910uFDKM5AdIkwSBqdiBqfrrCwl7Joc4N57H8C1KgS+ZmVlDVmrUnRL1ON5Wo1TW543jCGsdYepkLX567ULw0TBxovrhPUVWlmLuL7MgBXwnl0VSvkCLgoVxzg2OFkXZWtElMl0Thz8ICSKIxKdkKjYRLU6DrbrYlkWGpvIyuHHDvMrqzTaN7fY0jcDLtvbymMuh+4bZnJ8DAeLtV3L2NpicmyCRrXOwK48/SNlgiRgdHyYHeNTxK2YmalpZmbOmym0hnqtTS6j8Gwby1GIJOTzkMnkiKIWa7NmCbLysDA0lEPbGjsb4Xia+dnkuuLVU12OPOtJKd3CUNkEdg1kGbUdWGzSSowzsVuPBNYjTjZD16UxhBkI9GUUrySJUUoRRompyQygY+O4w0JjkYiF43ns3nuA00de4Y0jr9H2IxzXo9BXJpP1UImHsgtgOywuVfFbLUQnBLGiptbdEL0wEz5NHCfEyRU4Taw82EMQtTApRllwi3SHQREzK9Baoztt0D5m6MqmLd0lEY91A1MexxtAE6NUhI7DtDa1cf6I2GBlEHHRWptkpMuYIGyxcCwhipqsLi4zMfEQA7vuh2deSB1EIAj3HjjMrt17iYKAen0VbduI5eI6eWNS0QlxkjA4MMg73vl+/NVzSOMsZ6YXCVtVHjz8OLsOPEKsTfhUEidXHSv9x587wZ4RmBxx6BuocGD/OPv35hka3MX4yB7iwMa2PMLwFOdnl0jaNZL29RPwtcBNYqJ6i/ryIllLI36NrERM9XtkbUVQX6FZr5BdXSaHi+fm0FGHIHEIJYslQpKY0NYkSVAKYiVEscL3fTqdNn5s0YkznF+sEsV3llZ7J+KypF0qO0xMFBkZylLM5dm7s5+M65LL5OkE/Wh7N5YLcdxhsK/EYKVMHGhKJdixqwJAvd5gZLRMbdEn42VptWo0GmuUKyWyuRzKDkmI2HeowMhonr5yH/lCBoVP229SGa5z9vT11eDtOhs7GGfhAYzZo6mhEdjs3z3B7nCGsytts6QY68Rtc7HZoxskpDADQS79f1ygkoHVy4gax5FZXzBWIDaCMmGjSrDEMaWklEWiE0Ymd7BjcoIzb7zMQqNNSBu31kDZFnGS4AH5TIYgjOjE8ZZZkqQyR3FMkspwOVhWDsstGIJXPoiDLUlPNqU2U1+t09btsJ5442BIuzf1HaBFHM6zbiOMQQs6MRqvTltZrBy2k8FyQGRrWXWksLUQhW3OL5ylWNlJ/8Qe3GyZsLOWSmpz5MTrHD31OoND4zx6+DDlcpZWx0cj5LJZrEQRd+rYopnq9witPFHQIj+1n32PTFEZnKDZ8ZmdOcvZM+dYmptheXn+su3Yi+oKNPshjmPajQYjk2PsmHqA/r6dRC2LxelpOq02Lz7/Em/MzJAoTXgFz+pmQEfgx5pmvUEu45IhwnUzFLIeng22ViRRRLu2ZsJWk4Sk2aRjlei4/Xj9owRBSBwpogT8UNFqhfiBQiUWUZjQaoes1kJOzcygb0sJrLsLlyVtsRJs20esNo5j0ZcrkM96iCQUKjm8Uo6O38ZVFn2ORdFNSDIutltiYDiHAFHUz759E6zMr5HEilY7T7tdIJ/PooFcKWZkqsbOnSPkC3nQGse1ScjgNiMSK0ukr934LxjC7hYHbWFCAd+OiSQ522hjzSyQ8xzGBFra1BzJs14s6mzP+bqOyQ7rhJ0BXG22DW1dJoMoMpp2ohOwdEraGpVYJr3cgiQR4jDByRUoVvqplAo0602CBMIkoREmNMyu+O0O3TLkV9TlVWLqaV+Jgy+uoeI0XIgEtJB0ujp8l3S7muZmkR6Ki9OSQrYeWtKzqTpxmEOcTLqO5uaYXzpBGLZoNBZpNtawLM399x3iwcPv5OXnvkwU1IGEIFgDsVBqiMHBASYnplhZq1GrVVFRRMdvE6BwLE3GcfDK4xRyRQqVUSw3z8zsPM89/zQnjjyN36pekV/gUuQG4C1v38GgDceOz/HG6dNEuT7s3AithTbPfvlJTi8usOh3SLSmguAiNG6DrbcZQSZnEStFbXkJTyL6K0WcfD9YFkkckyAkUUjiN4lQRG4bXxq0nIDIydJstGm2OsSxotkIabUiOu2AVqtNu9miXqtz8uQJGnfheo23A5cl7TAIabfX6GQhIxF5V3eXXjE1LGwL1w3JKYWXxFixA5k8mSx4ykLEwrYz2CIU89BpN4niMkmSRZPgBwGZYpnRnXny+QIiQhiGJEl3xRCNk1X0j1zerrkZukTdS1HzwKvA+4CW1jy9UuNwyWJ3Ds63jcPSZ90NdpHnnvUUeJs0TR8TSTJswdTk1vI0Wi0TfaEUfiiEGYuM42BbMSIO2BYqCQnaMX6ssbwsuXyOvoJFlECzrfBDo+13rcdX8zoHcUi70yYOr2Qq2l0mohdXT1TXBgW00HGLy3kLXjr2p5Bozs+9Rhg4uGKxf/cOfuD7vpt8LstTX/kcuWyRXTunGB0ZZWxslKkduyiVBgkTj1i7hGFIHLaN7i8groebzRMqYebEGU6cOMrpE6/QqC+hr6NAzoNvH+HRd72PnNIcn/tznvv6Ei8f/wYDldMkqzFzi8uEWmMBw47LjsoAoShOra3Sim9V2xvMAlYM/VFMu92h2YnRUUKu0IebKxLFmqgZgNvEtsFVIYkd0FYdalZMPj9Au92m3fIJooRGo0O1WmNxYZHFhXlWlpeYOXeW2elXuXX96u7GZUlbBRDWfUK3Q2y5JBmfxDH1GGI/RtHBtkzmmI41kW6jUYjtgJh0UVEaxxGKBRvXcUkSTRxrOp0mURSQy0M2n0WrhDCMsR2NVprYD4naAXEcEV3H9LC3OkYXGrOK+j5XeLzscWw54GzTOAfPYEg9Zp24k0uObWO06kz6PYtZ4qtSgmJua3n+yS/8jTRT0FgZHNus6CNiX0j9Bk0ShyRJQKu6RLO6ShSbIL1Ereu3V6t7KeDLLzzPG//0H7NavVxBpTsJW9/pr/7qn4IWfD8CsXn1+f+Lwf4/xvdDlhYXSKIGvvZZXFTUa4scPfYiX/nK50AswigiTmKU0qnmbDLEjH/SaJN+p0kQtC7Yx68Ha2sxz7xxnrAT8OLxFq0aUAupzl28UJ0CVpOERm0NLZrwSnwQNxgJMB8pys2AIVdDAp1mQL3eppgbJsnl6QQdwmpAsxOSzzq4uQJtq0DbtbD8NmEY4vshzbbP4uIC52emWVyYY3FhkTOnz3Hu5IskUfWW39vdCtmqbKWIaDcHxQHIZG0ynk0m4+C5tom7tExqsm1b2CJYKjV12pb5WDZaaURrbFvQWqHTehtJnBDFEVGUkGgQsVBKk8QaSUuQhr4iDLVJaog19Tm9qbolsnlSdp+ApJXuEm204y7pTVnwP/Q7qLWYs8ok43yd9USaAsYxOcObaaNrdgHjhHzYhcl+syDwby5em6y3A1pvLOvdIifcZbI6op28jYoUyr/9Yl9Ju+50hN1Fi4xK8ARKIyOUdj6Iyg7SbrVQrVXy0QqlTEip0ofKD9LIjdO39zEO3P8wlf5BZs6f5/WXXmB25jSNRp2lpWVOnzxBszHLlWYSfjP01evFZTXtqANr52G9AtatL7x+PcgAIw6MjsKahsVlGLKhGcBsArMKvrIS825ZD6PvrRwwKLDbgjgxmncGo4WnQW8Ixq5dwgwKJ5Zg9Y7qPtu445BA3Li7TAFJrOm0ErRtEn/cIMZqRXQ6Pn4nRrdjgsYqdr5DLmMTSIFq1ELXatRrNTqdgNdfeoFvPPV5GtUltIaVlTVajXluRer3NxNubr7lbYRYMFiEvg702bBvMkd2KM/RI6s4LY2VgagGCwqextTYKWKci0OYmiNZYK+YCnpOYkjdt2E1gf0FKDsQBdAOoKZhWq+bS7axjW8WCCZivhpBLoKcA9KJsIOASLcJmm2ixhKdapU+sbDdLJGdY63RwZ+ZJpfNoW2b6bPHqdfXCMIAEQfLtnHsDFGy/cZcDb5pSduxYLCQOhJDyOVK7Nt7iPbqC6yebhBpGMtBtQMtZUwiD2BKts5hGuYB4N4cxDY4vum8rVTjFgumRsAJYW7BhPnVSAtG3aZ73sY2bga6PqFuDFE7Br8VkQ06iOsStJZZWjqPpWKKHYeir2nZmk6ng794HtcG7bgkcURlYIR2q02sNOLkiSJNrRaj7rIZ/O3EljbtbWxjG9vYxp2Fm1utexvb2MY2tnFDsU3a29jGNrZxF2GbtLexjW1s4y7CNmlvYxvb2MZdhGsmbRE5KCIvikhDRH7mRgq1jYshIh8Tkf+yxfbXROS9t06iGw8R0SKy/xZe7zdE5Bdv1fWuB7dbVhH5KyLy6es4/idE5Ks3UqYbgdvdrteK69G0fx74gta6pLX+tzdKoDsRInJGRD54u+XYDFrr+7XWX7zZ17nT22EbNwda69/WWn/odsuxDYPrIe1dwGsbbRCRa6/utI27EiLyTRvzv43Nsf3cbz2uibRF5POYAnm/IiJNEfmvIvLvReSTItIC3ici94rIF0Wkmk7fv7vn+EER+RMRqYvIsyLyi7dq+iQiUyLyhyKyJCIrIvIrIrJPRD6f/r0sIr8tIpV0/98CdgJ/kt7rz99k+f6eiJxPzU5HReQD6SZPRH4z/f01EXm855gLGnBqSvl9Efm9dN/nReThGyDXm9ohNWn8pIicAz4vIu8VkZlLjuuVzRaRXxCRk6lsz4nI1AbXepeITN9Ik4+IHE7boiEiv4dJeO1u++sickJEVkXkj0Vkomfbh9LnUBORXxWRL4nIT90oue4kWUXk7/c8m9dF5HvT3y8yb6TP/W+KyHHgeM9vPyMip9J36F+LyIb8IiK/nD7fetoH3t2z7WMi8t+26OsTIvIH6ft7Wq7CNHs39YEtYarNXf0H+CLwU+n338AkBL4TMxCUgBPAL2BqKr0fs9ziwXT/300/eeA+zOpfX71WWa5CZhtTSvvfYJIls8C7gP3At2Ky0IeBLwP/R89xZ4AP3gL5DqZtMZH+vRvYB3wMU3Dw29N7+OfA1zeSL903Ar4fU0bl5zAFDd0bIF/vdXZjyq/8ZtqWOeC9wMwWx/xd4JX0PgV4GBhMt+n0OXwkbYMnbmC7epiS6P9L2ibfn7bRL6Z9cxmzbGgG+HfAl9PjuhUNvg+TJPuz6XE/dRP7wG2TFfgBYALzDv8Qpm7aOPAT9Lyf6bP6DCYBONfz2xfS33YCx1jnh0uP/1FMKXsH+DuYcj7Znv67YV9P5XoO+MdpO+0FTgEfvpPb9Yb3kevoXF/kYtL+zZ5t704fhNXz2++kD8ROb/pgz7Zf5NaQ9ttJy19fZr+PAi/0/H2GW0Pa+zGZ9B+kh2TTdvtsz9/3AZ2N5Ev37SV0C5OZ/+4bIF/vdXanL+renu3vZWvSPgp8zybn1sA/SF+sB25wu74HUxpaen57Mu13vwb8q57fi2n/3A38OPBUzzbBDCg3k7TvGFmBF4HvYWPSfv8Gz+8jPX//DeBz6feLjt/gOmvAw5fr68BbgXOXHPsPgF+/m9r1ej83MuRvuuf7BDCt9UXFh88CkxhN1rlk/97vNxNTwFmt9UVV/UVkVER+NzVL1IH/ghlhbym01ieAv4XpuIupTN1pWu+aVm0gK5vbEy+0Z/oMZjDP5Gbgap7dFHByi+1/C/hvWutXr0uiN2MCOK/Tty7F2Z5tFxYm0lo3Mes4T6bbettSY9ryZuK2ySoiPy4mIqwqIlVM+Z3N3oONnnvvb2fZpM+JyM+JyJHU3FDFrPzXe53N+vouYKIrX3rsL2AWmroc7qY+sCVuJGn3NsYsMHWJTWsnplz1EqbuzI6ebW+ya94kTAM7NyC7f4aR/0GtdR9m+tZbD/eWFWjRWv9XrfW7MB1UA//yGk5zoT3TZ7AD80yuW7zL/NbCmLy617Yxg3QX0xhzz2b4AeCjIvKz1yPkBpgDJkUuWh14Z/r/LKatARCRAmbqfj49bkfPNuHifnszcFtkFZFdwH8EfhpjsqpgFnfarC70Rn2h9z3eyQZ9LrVf/zzwg0B/ep3aFtfpxTRwWmtd6fmUtNbffgXH3k19YEvcrOSapzEj5M+LiCvGofRdwO9qszTIHwIfE5G8iBzCTEFuBZ7BPIR/ISIFEcmKyDtJy2EDNRGZxNhee7GAsZ/dVIiJfX+/iGQwdr0O11Zs+DER+b50cPpbmHWJv34DRLxcOxzDaEXfISIu8I8wNsIu/hPwT0XkgBg8JCKDPdtngQ8APysi/+8bIG8XT2EUhZ9J++P3AU+k234H+Gsi8kja7v8MeFprfQb4U+BBEflo2pZ/Exi7gXLdSbIWMES8BCAifw2jaV8N/q6I9ItxLv8s8Hsb7FNK728JcETkHwN9V3j+Z4CGGGd9Toxj+wERecsVHHs39YEtcVNIW2sdYkj62zAG/l8Fflxr/Ua6y09jpkTzwG9hGu3aV+69crmSVK79wDnMNOeHgP8N44SoYR7SH15y6D8H/lE6Jfu5myhiBvgXmDabB0YwNrurxccx97UG/BjwfVrrK1kQ8nK40A4YR85F0FrXMLbM/4TRUlpcPJX8/wH/Dfg0xrnzaxgHZu85zmGI++/fKA992h+/D2NbXcW0zR+m2z4L/K/AH2AG9H3AD6fbljHa/7/CTJfvA77BTeyrt0tWrfXrwC9hyG0BeBD42lWK/3GMo/BFzHv0axvs8+fApzAD/FmMcnJFJrb0/f1O4BGMc30Z09fKV3DsXdMHLoc7ojSriPxLYExr/Vdvtyx3O0TkY8B+rfWP3m5ZvtmQmppmgL+itf7C7ZZnK9xqWcUs93Ug9ct80+JO6AO3pfaIiBxKp8YiIk8APwn80e2QZRvb2Aoi8mERqaTT5l/A2F5vhKnphuNukvVuwp3Wrrcrm6mEMYlMYKZiv4SZWm1jG3ca3g78V0yc7+vAR7XWndsr0qa4m2S9m3BHtesdYR7Zxja2sY1tXBm2S7NuYxvb2MZdhG3S3sY2trGNuwhb2rR/+ugf6E986lfRZ87z4akP8LbH3o3KaubPzrPWaPHazEnOnDvN3MlTfPA7vxW7z+NLT36VxZfegNOxCaDrx0RA9wacjXgwOoYsBOi1OsQRXJykuCG01psG4FsixtBjl3F3vh2n0EenVoNaDTrpUupRG3QbE0Je4+J10x2wcth9OSp9Bfbv2cmHPvx+Hn/LY+QLOZRSOI5Du93mzJkz/Pr/9Z95/unnrklW+dsnNZ4FWkMUgyjI2GDZECmwLXAFUBBaoBxTLcFOQMWQaPDTBo19SCIoeFDIQKShkUBig+NBEkMcgCXgCDLgsWNHhrGyxbwPNR9qP7VrQ1l/79MvapTGti1s28Z1XVzPI+O4OGJh2+C6guvZWLaNiCAIYgkiYFkak3wqiNjYto2T7qc1KJWQKAVaowGlFEpBkiiSRKV/a6JEo7TFOx8Y27xNTfTCHYMtn/9dJ6sNmXHsoR3gWiStZaieh7hFcTzPvbsynD9TZXlREyZAVhjcv5dDb/kQu+57N25pkjDWrC6cZ23mDOdPvMLsM59EB3VzEcvl4Y/8JA+/6zvQOiH02zQaNb7wG79Ip3r+imS9m9r0erElabetOpK1cQtlOpHwZ5/5HM8++VUa59fIDBap+TVUTnPosfvZeXA3J89N01hYg8XYpIVYmHD9S/nYLTBw79vZ+bYpTn3h89RPvHjdNzKch8U2kLQhaGGV+sGywPPMDjoBXEOCaN6c0JWAKATDpe22T6vpo5TG8zJorbBtU3G2WCxRqVRS8rmGvpKzIUmMCI6A50DOA9cxv8fKjCexZUg96YAW87S0gLbBFpAYXBcCF8Qx53ESzJtjgZcBlQVfAxG4QqEkvKs/4S39UMWjaWU2FVO0RmlFkph7tGwbRxky1baVprBplNaIUmBZhrhFsKx00OlJdNNaG5JO2y1Rar2WQtqOIuYD2pB5+lHqWnKMtnFjkEAwQ3K+S6Drfb452+a5hTaqV//xNSuvneLrJ3+LF0a+SGZ0D1aU0Dh3jKixjI46FylpIhbFch+uBUESE5OQaLl1ach3GbYk7VMz3yAI16gurvLFJ7/A2rF5WnOr5l30FmDSpjwxQpgJ+fMvfJq5E7N0lmsgltEYNUbL7m19C1htUv3iV2jbOYKlWa4t6e9iTAzB2jREOiJuzJIUB43m6lrmorFKjUEbEXYKW6MtRSIxrajNSn2VVugz5Dq0GnVWl5bRAm2/RbaUQyxBJ9fQtYazpn2UBaIN+dpAxgLHNoNcO4F2ZDRw1wJJzDFBSug24LmQzZn7VAGolrm1QsY8g6wDoYLqsmnishnIshKz0wvYb8WE3ualz8MwQmuNnZKxTjXfVC9GKVBaEKUQAVsLWF3iFdPu6cjdJV6lFFqMOOstJxdIuxfrx2i2OfvakcsVGR6ewHYcFpdmaTWq13imjfv6RYR9YVdN0mnSPnuE9tluTt3Gx4tjU6rkQPvoJECrCJXE16YQXSV2jWQoZB1eP9e66de6UdiStF965hPEnSat12tUn0kuNnHEUMh50Gpy/HMvEE2HkGiy909gTeZpzy1uomVbEMaoxVn8G3gjY4PCwqpmrgm6NYdeG4diP+gIksAQ3gWqsHhzqQNtzAjkEMciJqbWbrC0vMLczHm+9OnPMn3mLJZjYWccsISLyxhcBUo2JA7YLl7GwU5ikjhCu4JCk4QKHBeKtjGLeLHRTILE5BgGQJyAJ8YskvEQP8A5O0vSilDDu6HQZ8wpjbNYJ76IGtkDo/2EtstrgZCrWfSLws2E/KVNxAzDEMu2kHSwE9FYljZ/iwbRRrO2jNy2aCxHY9kaSTVlNCCCApQ2A7lG0JaYZyCQIGZWpEGjUem/CZoYIQGSK9S7HEzxE+n5WD0fMHFb5VKJys49lHbsJFMqI5aFoMzYoTVaK1SSoFWMSmKSJCLymzQXFqmem6bR6hBielT3k2C6++UNfbcGeddm7+QEjzz8CPv37SdWCcdPn+SVl1/m3MwszWgjtr0Z2PrZiS1gxyjVSWeaEcQRcpNJ2xO4t6BwSDgBhDf1ajcOW5K2qk1TyuXoBAnJpT1RgbMY0jrfIW5wYSacTDegbG9c/mU4D3kPZmo3Sv4LGCh5jA8ELDRBqTY05sHNQtCBwDcDjpUYsrlQYfHSe0pI6jXaRpHkzMkZ6guf4Nzx45w6cfzCFN1xbR55/H4cxyaJr6XjKxAL24X+gkOf5SKJR4zGTyLadkBkg3IcLAdiHRMrBTkLCkLSUekMJjEatygsK8bJuahYAG1urz5L7qVPkXvjKZpD/ehyBi+f4VRosboGu7IWU5nc5lLqBIuuyYOUpDWCQizBtsXYpQNot9ssLS0Tq5CBwTJDg/2U8gUsEXSqgRs+TNs/Mhq7ZVuIKEPV2phaEh2nHyFRkChNklxZO/djCmY46cdN/89iyNoDpvbt4r4f/BF2vv8j9O3eR7ZUQgS0SoDUdBOHJJFPHPskcUAShwTtBrWz55h96ilOf+qzTJ84w1qi8DEvfNdTsnoNPeJGwwPeMt7HffuGGC/FDESLaNuhODVAJd7LF1aWOR517ggTRBIEvPz0UyzubjE4MEQ2m0dUzM2u05ZoeP5MRE5HVCxYvEtmc1uS9v27h+nUFKtBh2SDUa+5nFywWnYr6kbnGkQloIKpLtHVzjM25bc/RHtulehcdUuhunR6NY8s7wqljDERhxqI1sAfgjCAIIBEjOapu3rRJlpyHBOurRLV67w2t0gSm6laL0YGyxy65wAnj54l8K9hfNYaS0EmFnSQELuCi2ApY9/Nu4KyFdpNsBwhiBVBorEssHIQuwkdNKqZgCSIjrH8BnGpSFIpGrOLv4D92ucpPvtxLD/GdhysjIvnOnS0Jo4VU57HUGlz0o7CEKU0loCyBaVskiRBWxYi0OgEnJ6eptmMWe2EvHDkCItLi+ydGuN9TzzMWx5+gEIuj207OJZFoCLWmjWSICbrZPBcFy/jYrmCJYJKEqIkQScapW1UkqDCBLTGvkKty8YQVi9hO+lvOWB8fIiH/vKPcc9f+lH6du4yM4L0Geu0X2gUiQrQcQsddxAVY4uQyecY3L+PwtAgpdEx8h//E868+CpLQYROr5ehp8zhbYQFrC7XebX+Gue8NxguZhgcLDG85wAFLXQidfsIWxyc4iS2kyFsL4MlzL78BvNHZ+gbGWZkai8jo5PIDTCbboUEWNSQF9iZh6XmLSzneR3YkrTH+vt49vPnCU9tfCsJMIRFJZOnoUKWohClMSVghgSy+kIh1v77d3Po4Uc4Eb/KknsKwouJ8ALxc20N5zo2ORccK/XDqQ74TWNw62ppScL6JLbbITZyTGp0EhElb66xZFvCW956mMOPPcrXvvosa6tXP2uwLQtX23jY+J2EZpAgaCw02DEZW2FZmlglqMRIWnQcMlqwbE3Oc1hNYhbbMVpCMk5INlghCCAuOUCIu/Aq1tMfJ1o9it1/D1bWQRERJgnagoyrKGRiypnNJ/N+uwVoAtehUCgiUjTkmygW16o89+qrPPnMc9Q7mmocszA/R1Rt8trwECqOKGYziNJ4tkchnyfQMadmp+l0Aob6hyj3lclkPLK5DLmMZwJ8ophE29i2i05iIt/HEguv61C+QlxqGrGBcjHHPR/9KHu//XspT+7EEps47BCHLbQyUS4aUHFI6DeIgjpJ5KNVgkajxTiB3UKe4ScexyqX8AY+hf3Vp5lpNOmmyN3ORRMtzMDhA6+2E7LthAFgZLHD8Nkq97UTMhP78LwcdG5tzSPbyuL2TVDe8yD7Dj6C62RYqS1hOTGL58+zdvYEtfOnaTXauHZ2IzfHNSHjwsSQi+NY9JUzFPIOq6ttjp/xCeLUnGVdzEF3MrbsX51qk/kvddCb2OgzwHhuiEfueYi1xjKn5s+yFHZoqIiglaAHzMtSsQo88fbDYGnGxodRDxyg/sZporaxaruWzYM77mV2ZY351vlLrmJxJY5Kt1AkWwjIOzHtBNA+dNbALmIeRZesI8xktjvdvrrR3HZtdt+ziz17dzM4OMDpk+eu6ngwGpmIRkuEshKi1D7sWgk5V5F1FNiQKEUQCbYFE0WPqayHjSZDwqIbEPmaNj55CchmIxwSrGAZa2mW7CtfpTP/OiGKvnKJTH+GOJeQOD5ahIqn6M/Y9GU376a2aMIwoNNuEkcxCARBwOryGl9/5nk+9ck/pba8DNkyujEHKoTMDtpWjudffgMvbHPkpZfw2z59pRJuLksjCgi1oljoo1juI5PJkcnkKBcr5AsFstkc+YxHPp/DcWyiJMGyLPK5HO9/61aluA0E0+esnv8doGBb7H7icfZ91w/Rv+cAtueh4pAk7qDiDmjVHTZJkhBTUM6EKioVk0Q+KomMvdt2Edum/579uD/w/eQGh3H+/LNES8s0uXmkLUDWgqwtaKUJlYnw7BoSuoWed2DW4FKY8nd9mD5XTeDkyXl2OFnKhQLUqtcsi21DyQPfT4OTtoBb6Gdy7AHG9j9M5dCDDO/cQ3+xQhLFtP0GlhvR7DRYmJ1j/tQp2o0mmWIFNl5i8qrQX4T3vLWPd7/nEP1Dg0zs2kk2X+TUyVP81q99kWeeX8PWb9Ih72hs2b+kE6G3GIxdbAbKQ+zYuQd3PkfBqyCezWK0zCl9hkaxTl/JoU/lkfoyCwvTrCxW2TExTHF8jGMvvsLy3DI7hiu89z3v4LnXTrD08jJJ0nvRKyNVK99HvtSmP99gJcBMdXUVVD49R8Q6cV+7A0ajiVRAoVBgaGDw8gdsgKIT4UcJKg1nc20H2xIcKyabUfTlBRVrrAh0pInjGE8sRgo2JRRZYibyHqI1K4FFWGuhEp+4nKMUR8izzxE+/0X8uI0WwamUyQ+UyJZtsBSiYaro8sBQgf197qZyuhkHx7XxYhOCtbi0zMzMDM899wqvvPAyraUZ6Bs00S/xGhCCn4Fll3nH56gdceyN12mtzaF1glguYudQKgQdIraHZReBBNvJkykNMTq1k4FKCctzCLAIlKAEHNvmZ//yB6+ofXvNa4JRLkYmx9j30R9m8OCDONkcSRITBW2ioI1WkRlEtU4dkQm242LZfYawgzZBEhO16oRBA4WNtl0s8ciNVNj1HR8iM9CP/v2P056dvY7etTnyFhwesnjbgQHGB7M01pZYXQpoBTDXgdnIQseKwSa4CRzCDFgRF79BS75CTc8S2tmNL3QFEBsefNDmI/sS/BPw2WNwosOGgQXiZNj92Pt4x9s/SmH3XoJ8HsQjVhaOo8h7GcKkScYSxnd6DA+OEwUhYdvniH19w19/Dh6/T9g9BmeOTnPmdBWlHB574jAPPnQfP/xjMQ89cI7GapPZ49PMvhpedgC6E7Blq5x4aRW1xQjUImG+XWdmfpGwHbNvz/3s2jFFPVrltbUXeGH2KZLQR8IOJ59+nmonZHk5hKkmh7/t23jg3nv48p//KcFajbNzx2gmddz+Ikk1Nj2tNGhMHEH7sjdiOS7ZjEOlCPZa14Pvg2piTtbVrq/PweG6NkPDQxSKJQaGRrAs66pjiEsFCzfWxDohSBRhkqT+UUUUKdqhhaOEvG2TJCG10Ge+HWITUdQ2OzI2u0tZ7isl+DmHekeoJyFhkBDMzlB94au0GudNNIR4BOVBsrkilYzHQMbBsS0qWRuNouVvLvuJ+RqOY5NxbRzb4+TpWb72la9w9uUXiVs1kCJoF1pV1p0XCSRtwlaAm9vP4+96D0uz8zSbDcSyUEpTW1mm01jBsjM4XoFOfZ7QrxL7Tdo5Gzfpg0yGpjis+Yo4AXUVoZVdY5dgOng5n2Pft30n429/D16pRJLExHFAFAaoJDEx5SpBJRFaKbQyjlaxLCzLRrwckg/RcZMoTIg7TcJQoZTgZApk8n2Mv/1xtGOT/Pc/4rXpS2eL14exnM379nocnoD9o4qi1+bUcsBIAcb3Cm5lEH9glJmlNV7+whwzi5o8JlVilfWo2zzGrt9o+vj2VZpGxEv9QZrMxDijhw9jFxbYY3+DfzihODIPnzsNr9agqdfVIh0HnH3ua+Qij4nmYxR27SXXN4xlFRBlIUrhxhZ24hFrTeQKkVhIEiNXtJjN5mj5cO6UpjpX59W5OqGaY+Djp/jpv77MD/zlb+dtb3uCgVI/54+fJhusceTEKjOdGxGAfHOxJWmffk0jJdCNjbdr4Fh9hrnn13hg5CDveNd7uefggyzMTzO7Ok0lGuTEuXOs1ptECfTbgp1o1qaXWTo1wxNvfYz3vOutHH35RZrRGk6fw9ToPUyfO4+/sAYF5+Iww62gNZ5rUcwZu3aswHSdGmai3B19eu3ZV4+de8e57/77yeby5IsVLMu+atIe9kKirIWyNJ0IOpEmjBVhFNMJTIJB0YKCY6FCTRRarGmFFflMZDIU8x6jlkPF8/AdRc0V5sIGMy+/SPulF+icP05CYpyaTh41tJPAKdLveTw6nGUo5+CKJqdicltEZXz6+eO4noObFdysx/LyKnNKSAZHwUsdnlGQJgmVAQ1OBZKApBMQhBG79+1mx/gYrueRKIXf8el0OgRBAFqjlGZteY3mWtWElmdd6vU1wqaQFMooZZEohyS+8jbupfcMMH7vPez8tu8iPzaGFkUU+MRRCCrBth2EhETHoJSJIFEmikUnFiI2guC6GciXCP06nfoafq1J0OqAbVEaGqM4OMH4E4/wiJWQ/dPPXFV/2ApDGeHDB0rsKYT4S22qcYSVdZk9akweu/pt8rjkM3lKBypY9ZjW5xeZDqCKqeifYMwk/cCBLAxPOkxrh/psyOIWg/Y6LLJDB4kDkEyOQ2//EOP7H+f03BGO1BrsiU+wc1/ED43Cdy/DmRp8fRlerRrzTdha4OWv/z6vvfgpnIEJ+vc9yt57H2dqag+lYglXLGwsBBvLtrC0Sei1rjOnMNLQikBb0Elf/6W1mD/7k5d56N4pdh/YR59noYYKDL3nXkS9zO9/rsHiHR77tyVpa2Dy4SxzdZ+4ufk+9aTFK0tH2XvkGcrlCn1eH1OD93J65jyt+nk6cUwfwjsm9jPfavH8yhzPf/kZprw8Ow8MMpN3eeHlo1QDsHcNEwUtaLeAruPw8rAt8FybfE7IOpr1oI42JnagG5997ZPXfCnP+7/1vYxPTBjnYLmE7TjE8dUtCjOcKJo2KFvjIThoOjrBTmLE0tjYWMomihLCdIKQ82ymsi735GwmnISCSihaNpFYaBVy/PVvcOLTf8zK2gqeNvcYAX2ZPioTU4wOVrin5HIo5zDkanTUQkcBjnLYbOGP88uzSLYAuTxOXmHZDpWd4wxOZglUnU6zQby0Qi47iFfajbZsglBonJwmWaixsrjISn8ZD8jnPESEOI6wRJPxHJRSJHFCuVwg41hEYYc4CYhjn0gLysuSKIs4aKHbV6YZXupSLuSzjL71HZR2HwDLJgo6BH4TlcS4joftuYi2UFqhlUarGLFJY9ItrHQ5UW0JtkrIFlr4zRrx0hK11TmSMCAOWliuS7Y8yuAD93Jf6dpND70QYG/RItNocupMzGwblBUz7sYstmDCAu9YjDO/SKmuGDu4i4n9k0ydrPPKMZ9zmN5fxiyA+EAGHrivTN+uEUZ8yE1afO3sZRzpVj+oNfyVE+BOse/h9zPWdw/PPfUy00c/j4pz7Jl8H4PT03ROH2VcKQ5PwA89CM5JePZCEmVM0qmSnK8yP3uMxWf/jFeG97L/0Xdz/0OHKZcHcFwLRxmyTpxuZuy1Y3hQeM97B7ETxepnV1lumjZVvs+ZN44St+osz8+yOj+Pl3Eplizyxo9/R2PrOO2CQ+RYjOyxWTySEG9hKmnELf6fr3+Sk2fP8pHHv4uDB+9l9+IK2VdfAmp4WBwcuYcP3XMv8oVP8MzcGzz95a8RVHfxjedOs1gN0BZw9rwZInMZKo8+SLywSvO145e9EVtFuE6evpLLUCGkFva+vDfmKYxNjPP4W96O42SJkoD9h/bw2NseZ2Vlhbnp89SrVxZJEvoJrUgg4+LamkwS4ocxjlYUHIecZOhEmpYOyViaIc9hT8nlYMFh3BIqGrJ2RMG1UGjWwmVWjr/G/OoiHaAIZNPJZd/AEIf2TrBzuMAuzyYfJziBjxW00HGA3sJumLdX8FUbhzL9lkvWi0m8DlESozvQasbEdoxTcBke81DisrQWoy0NSUTUauF3OvhRRBzauI5DkiTEcYxKEmKliKOYKAwJfZ8waBNGPu1Oi0hbYHskykI3m1CrX1Hb9hK2C/QPDzL4wCO4+RICJFFA5NeJww6SLeF5/YjtQRSRxAlxnIBlYdmCJAodt4mCDpHfRMcdLEuTK5Vws1kazRb1pXk67QZuPo/l5bCzDn1TI1ck6+UgQFJPOFo1uVRzmDXy3NCoHn0KvnQOClbC/7+9N4uRK0vv/H7n3C3WjMyM3LnvRbKKtXRXq7o0vZRao5E8kGZGs8CQZwDDM5inMQwDfjD84hfbj4ZhCAIMzBiGnzywrdGMBHerp6vV3aqurp1FVnFfk7lHRmTscbez+OFGFslqMplJZnVVC/kHAiSDETdu3Dj3O9/y//7f19ZrvKwVMwdmGZ8qEV2P6JO5KUeAb467nHpunPHjh+hZiSdCvn7kMJPHts4bV77x92i//29Bp4wde45z3/odKvk5OlHI5OGjeMEIpYk51pfucjPtcVM1abYshVZErWPIkjKKh6ZzWYUJG3TvNbi8fofk+mXmjj5H9cABypNj+IGPz/Ym/W4FR0peOnOY00eqzJY/5s0f10hTy5kjHh4KHQ0gTem12mirMU4e+WVSf7aJLU9RyiL1hQ5nxjyiimajsfXBeknIu/MfEoWG73b6rKzVEdpFAp70mNl/ku/99h9Q6yV8+he3uVyv0Xhrg9aQ54oBWgYEVOamOHPkORbiKzzGyX8IYX+AI/IUckWq5YS7LVC7WFSQjsPp589w7MQpcvkAa+GFc8/xX/03/yVJkvLH//Mf8+5bv9jWsYTvYlMHlEvFE4w4FmM1vdQipADPIqXCR1N2BCeKAc+Vc0y5hhFrGXUFFRGRdzRSCHLdVQatBhHDvhXAx5KTPhNHj3JwdpIZV1LUGh1ZwiQilyQ4xmC9x1+k1q2rJG6J8twUrpEMkoRaxxJGkPYtpgWkOeqxRyotylp6dYttmCz1YE3GdLASV4IjDFZoJDorFGuF0TE6jVEqJFUhSTyg32uhhUsQFAj8AkFlBONvn0mwycR3gfxElfz0NI4jkSJr3FFxj7BbR0c9HMfBz5eJBgMatRVSleIFAY7jIKUEo4l7bQatNUzcxfcdgpwkVyhghc/ycoP1lRWE5+GXKuSK49h0d5wEAyykmeGdHT6awwdkAxIbwJiBxXVL8UIjY7haixny13LA8xWXV759lsmTx2gkiqXFRYxwmcqX8cKto8TXf/8f8bHU6Fhx9o3fZf+Lp3FMiVdmXqfdGuf8e5e4t7jAgSOnOFMcp+wKXCI+eO/7rHUvkW0vEuR4dkApQPfYZDjEYZ0rl99k/tZFRqeOMHrwCNMHDjA2Po59Cu2CB7lm6xuaD9+6y7fOTPNP/vA1Tu7/lNvXFyiXi4wVPEqBJJibJV8osLHRYGWtxyB9TC74K4St0yPzIQwsogRBWUDjyVbQYLlYu8C9Hy+ijaZv21gg71eYPXmW6qHDPP/CK5z5+RE+aFxjIU6pACUhaGwSMy2k9SbzP/+QZnNtW1+k3QspF/IkKqDVF5hdboHNFwu89PJLTM/MIKRCpYqcX2RsdJw4SSmVy9s+VrXoIkODsRFTSPKBYMQGtGKJkaA9i3XBcXwwgvGcT8XxyIuEgtSMOoqcDpEmQViBbjWIBv3PFmvCkGqZy7P/9HNMjZTJJxFKxfQdgdURnlH40iK2yBX3b94GOUmzIwhHXdJIoOIyuKWsaNBzQAcY16Up84CEZgyRBGvQSYxrFdKVCGvQWqGHXrZRGpXGJOGAKI5Ikpg4Dul228SDLrg5Kr7H3OFDzM3Ngt6eIdwkd7qAKwW5ySm8YhGJzcJtKTA6IQk7JP02whryI5N0NurcvX6ZjY0mxVKJIJfH9/Pk83k8qUnDmEGrhU365AoOXuBRKpdIE7h9e51YX2akOsP0QR+d7MYM5QzrZL/lQQHHc1mv2IfmfnvYCQfOFkBGsLoKfqGFU3Bxhk2xMwJOnphl7pVvYEfHWL56hXvtPr5f4O6VBX5+6R7/0xaff/jE80z+5/vAQGmshFf0cayhXBmlGfpcf/uHpK0NTr74XU4dfpWcickVU9q6T23pBlZvckok/uyLzL3wOp3aAs2rf40drDAsv9OLl+ktrLK0fIEbn05QnTlENNh5D8QBss2sTxadnP+4wcZyndfe+A0qrssoCYOeYSyfo1TwcPMlqlMTxJcUt9+p0Yw3z/arW5DcOj1yM0H6UMtpOoPtG0GNpqFrDz3XUCEf3LjC+IEDtOOQQ9OHudVaoKEH9IBZ6dDU6rMLNYi7DK5+uu3PbPYihE24fq/Pnabd9QvueR6lkSzExoJWCq1ExvYII5Id3KijJiEnUrRJGNeCUS/P3GiAJofC0reW1HHwci7tSBMYizEJ0sRASKQ0RmqEpxGppttsEcX3w09DFowWy6McP3qQOd+i4jZKWfpSIoQmJw2utJitFGBsAbTELieE9TyMjEK5kskBDNYgaWXhTLcEJmtVzwivGRe+sXaXK58YHOmibdaqbrTGaIsxGpWEJEmMUilaa7RJSZMIqzXoiKTboeDCwekqgfN4YatfOm0yg5UPfMr7D+IXR4aCVBbHcfH8PH5QIOq36TRWiLpN2hsNbn3yEe99+ClGuoyPTzI2PsHE5CRzM+OM5ARxPyTpbRA2EwqVAqWcw/jEKFeurHD16jpzB65RHp1EyJ01Aj3puxSBI+MOM5Mel+cjzLCLxwXOTQW8dqrIvWstrqwYltY1o+OagoCyhbMll9kzx9ETc8y3unw8v8KNu2uEiWS+HtLsbz01SzgBleocSIkjwTEWF4M2UFvrkNQuI2SBQHpo2+Hu4nmOH57j1NGzXMhXiXvL2YFMn6nxEt967VvM37vFe407RPM1HlZqMVjdI273WG7P8zQsrymytNjmdOHWwHL1whInDy+QizscGS9RNyEjOZdKpQxBjkYnZGm+xtufDIizQJ/ZvKCQl9R7muZXLMe9dQbHZuJxK7eenXnaVh3+5D/8CW99/AtOzB3DCIeDhRn63XkiNMsPGOynwfxKxLyqcXUxJvkCtkitDY31DVrNDp4nMi0Q65KkgijUqB2I7+SSkMBqhKMpCkkJRdkNyPmSRCcM0syUCmsZ9yQ21QQ6AtMnViF1JRCOS5Cz5KKIfqOFTh5eWUJIZub2c6BaZizpEiWKFJnJulpFLDK+utyygaEIRJlYlcxDPg9xA+pXIF4ku+E8SEYhrEK+kHWd6hZgiMMmd240tzj+FrCK9sotPurUWb17i0qpAv/jf/fktw0fDpAvlSjOHcDNFbLnDTiOR744hkoSUgVxr03cXaG1Ms/8tU/56w8vEiqD73mMFEeYrE5w/NAspw9NUi2CoyNckWJMH+m4VMcLTIwFLK2E3L12j/1HliiP7U5OG7J98OxYwBu/+wqlkTEu/OA8uXs1UmNwBXijFaYOHyJsXePiaoe1EPwYSgL2Czh9ZhY7vY+PV9r89MNPePf96zQaHbTZnkl0lMqE0awEk9VQlDBIx2QMQMBqxdKVd1mWNbrNy0yOvIEjZpHi4R6AsNPg9uV3WbzxCcnqDdxA4Pp5TKrQyVBb/TM8XaS8CMyQpYUiYD2G//fPFpmJW5yclZSr48hKDt81FIRFuw46SaitDag/sH+VZnP8nd86QXdlnR/9ZIWl/lfH8/6Vpt1DHfL+nXdY26hxdHw/lVKV457l0407z9yQcOmuRen4C2tDldLDGp9BT+MHHkpnvXe9XsT6Wo9eZ/u8V2k0SIHrukirScMQZcCkLjoeIJXAdyQ6ChkJHFxrcdMYT/eRJiFKJMpm8xDodWmtrhLGD3vMOT/gxKljjHoC2WkSDJ/DSqROCUjxrcYVWy2BJTLzNwG6C6uLkN4D+2BRMAZqIF1kKYfp98kGTewGLHG/xZ0r5xHbHLK02RHpAn6phD86jpAuWqcolfUqt9ohC7dX6LVqFAsO5UKAXygggzypzW7OKE2JWg1qrQb3Fu8gBid49ewhSAY4IgHh4foermuZnMzTa4e0mh3WFu+wm3r8Yw785jef5+T3/lMK+Sn+aPZ1Tl++Sn1jHWkGfOc3znL2SBU1GJC7fIm1FBrtTMHu1KTP2PMneX+tw08+/Ig7N26TJjvT1jRpiBxqzRgkRkgEFte3lMfGcEemUI3rXPzB/4JTHOHs179Gdewg83c2SOKHi8eNhct80IsgaTJa9Dl+6jeoTs/Qb7ZprtZobayT6BaJTkgTQz/cuT7KKhkvXZKVQHMOjEiBrYf0jSFwHMqHpylWx3ADh0gITGpZ21Cf1cByHsg0otPZYOpQhW+81OLd90OWkqfdSnYXv/JaqcEw375Nt9fm+PhBqqVJcs1FIvtsecAvWmUyClPm76xz9eoS49VRjDYobYmilH4vRO2g6mm7HRLHJfECCASuYxioiNQ6GG3wPB8fS5qEOAnkTErOhPgy62+ThkwGV4NqN2g3VgjVw5725EiJsyePUMKFJEEIg2M1jnSQwuIZjRemCL3VZrOZIW5DUiMrgT1SPJmclzBWUHRVRE/oXV3dblAkyBV39B4BSM9FegHaWAa9LrrTZHV1jV/89Ce8+9ZPiQYdzr10mm9/51Wm9+3nW7/zPS4tNPng02sP1UQKnsPs1DjVmWm69VXUICIZxEPp1pTKWI6puRwqVayvLuN7j+8y3SkOjeU59eKLlKdPUC7t5zuHX+SN37NEcQdtIsoln7S1SHnmParlKyw2DAt9qBYFB07vY027/OXPP2D+9sojNcufBB2HGJkJeiEcrHAwwqKsoVypMP3Cayz99DZW9SmPn+Loie9i7DR3b76HTj5X1LMp+47s4/iR32SiVGJ6eh+54igmSohaHbrtNTq9W3S66zSbLT7+5DppurP+cgt0gecC+LvnXF46PcnRo1VmJws0b16nvt5hdjbCyQeIfADWZX29xeU7g8+WrAQcZfn4Z4s4rkBgCZyMOPyrVWt5NL4UgovF0NDr6I2U09MnOTZxhEvr17+MU9k2tHZZXG2ysNzAy5WQElSaolI1VHrdPkGpnPZo9ULwi3hjo7iBiyIlVRHWWBSKnElwwxCpDdZqjEkwpDgYAq0RGqzSRBtrhO1GJtQ1hAMcnZphbryKk6YgJVYAKgEEnpXIOCXtRThP3GwsGdns4eDQ8XI4jiSJMq867m/QuNdDqSSTjN0JHqfUIz28XJFSpYLvbS9PvHmYnIDCaAWNoFWv0+tvcPXKJX70g+9z4dNLNDotjLXcXFzE2JTf+91vc/LMSf7Ff/Gfwb/5v/j4ylWMtRyanuK7X3uBl15+jqnpMRwpqN8bkKo+ViqMSvBcGKuWiMKUqN+nu1Hb8hx3gvHRUQqVKl5QIFccIwh8gkBQZJRUDdA2IRZ1vPI4hbJLr5HQBiquw0ro8vHPzrNwd/2pDDaASkKkkFiZedkGFxyJ1RpPOpx+5Vu0bt6gv3aTWLm89dOfkLT/lN76JT6vLC4ch+NnTvO1V1/HNR6uFFg0QmmK5TKVqSoqOUgcDWi3Wly98a9J06cTul1JoNbViHyKImGlkbK0FLF6R5OM1fD27yPwC/SjhNt3Oyy2718f38nUQltNqEUWRWYoR92M3BZ/yXmSL5WV2NItPlm9jOc8KyPzi4bAOjkarQ1Wa2tMzk5SKARkSiTivi7tNnHz0kecf/ev8fOjnHr1NeaOHqUYBDgmpdfrUGvU0NGAgxMTjI2M0Gxs0FxdYLTgc2jfIcq+h68tWI1JIxKTPvTplaDASy++SH6kiHUSpHVxRYC1irDV5Mb1WyzdvctYZYSzx89s44wfXqWuW+TQkRM4vuX2jauoOMYaTRJvXdR6FBwXSmWPdvNzkZaQOEERLyiglSE22/NxJDAqYN/BKcZOH6XfXmP9owZL63Xe+tnPeOfixwweaDhY22jzzgeXeeWVc0zPzXLulTP8c/1P+I9//n3iKOTcyy9y9sUXmJgcx3U0Rkg6zSaDVg+hFcYojIrwXIk3UkaHXeJwOyTV7SHCJ1KQKoUWhtRajFK4nsVIyWAQ0ut2SZQitYIumae5ONDc+nSB9X70TGp5ymRDLqQBK0DphO56k/X5m6zevELt5gUGa1chbTNYaJBt4Y+2aq4XMD21j+r4PlQC1iQYE2LSNEu7OB6uV8LxNVpWcdzHywY/CW0Lf3Hdcm2tzqFKg6IDa3XLYh/GF1b5+o13OHhggltLA/79L9boP7C/HJiVVEuGlTpsJvqsgOPPeRw6PMG1a3Vuzaef6wX51eFLp5L3TO+rk+F/JATV6ROcfOFVHFcQ9kPaG22EGQEDcZSQRGnWlLFN/On/8b8StuoIIbl28ecUJucIcmU8aQg7a/RadbTWlEanKJRLhM0GYatBMZ/n+HOnmT0ySxD4OBSZdAzL/fghLruWkvWkzycX36acD2iELvXQI2c8Wjcv8v6Fd1jodBjN+7x4YJb/5L//r3d0RVzPpzxSBkfjBwVU/PRBo9EQho9YANagk4jIgpAurre9peoA05MVDr/xG4wcmmXQXaWfWNZrXc5fvfKQwd5EvdWlnxiUFYyOFHj5Gy+RtymttWWmDx9i9vhhcoUSJg3RWlOdO0AabxD3a6RJTBJFGByCYhEVCqLe7o2uCvEJlaU3aOMOGgT5PJ6AwHfQIqLfq9FZn6e1ukytm9Ih828XU4tNt8pfb0oSb+0wmWEx0dgsYvn07Z+w8vEPiTfuouMO2ITtmq5CZZLx0Ul8J4dwDFZIrBCkJsU6mawA1mSzWn2VKVM9A5oG3m/CxaYlIDMzXYBVyzs/2CBwN4gUDxEXpIBTxyqosM/gAZqpstBNYP+JKgePFph8e563P1Z0vgR1wN012g7P0iX+lUS+MM73/vbf52tf/zobGw3iOCbsxNi4M6SuZVPaJdtfYOGQe26tJlxfIFxfeOTr4kadB/uZ+u0W67VVnPM+MpCI1KdSyNGp1R+SaOlEff7sh3/JiGsp5l26VtKKBE4qSQddwqFOeKuruHf51k4vCRaNn/Nw3BzmqSb3PHAsm3WIPhJaYaRGShexzRvYFzBxZJrxE/sRnoMe9MgXx8iVJeFjWnqVUnTaffr9mMoolEZHOHbuNN21UfIjI4yMVvCCAmkkSYtFRibG6dZH6TZXCMOIMFRYLLmSRVtL2Np5xPE4eLk81hoGvQ3w8gS6QMG4aOGSxj0G7SX6jbu0GssoZXDJCJfbM6OCrH92q5dkBeAojjj/4++z8Nb/iU3qT/FNJAeOv0hldBKjLVIIhHQw1stGz7kSaVOsMBhH4/ne04/zewCWjEUS8bCxiw3Ej6DyjRcFL589wO2bC0hx/wUWuDmf8vOf3eTYoRxeYCj7/A0w2puM9K9CiXWXUBmZ4LmTpzh+9Djh7D76vR5hFGXTZ4TE9zIR/3JpZNvH3FpsfVMFWvOoEMQai2pverYhteYvNyBYa2l3BmynNeFpgpw0HrBwbx7P9YijZ/UqxbAe8HkOmgQ3QLjZzav19s50tJxj8tR+gpJHlKTgOpQmZhglzgYgPwIqSVldWmZ9dYZ8IKhURilXRymVS7heQFAaQTou0ipC38PxXfxCAaUhDGN6HYW2hkIli5d7nd0LHQPPYgZtwsYKQvhIp4q2Liqx6KiD7ddJw3WcQHH8aJH1BcPFRrSNmZqGbJ1trZMiZca46ff6rH36k6c02FCsHuX5r30Xzy8SpQrXcXBENgBaSokjHYRrsSKr17iOi9gFPe0H8ST3QgDferHKN77xPJ6jOfphi2urlpBsaQ5iePd8xI0rEYGE1m4Oud0BtjTa+187wOJ7izxU5doKu9cI9pVBHA9YXVnh4KFDTE5OMj09jZQCR0p8x8N1XIzSjJQfLbr0KLiUSRlwX8RqOAxLFMGbg6AIg3bW7osi63EMyTJsm+/78mCMYuXeTj30bHivENn8UItASBfHdfEDF2MS0lih0+GoHulmw42NRicJ21VmrR6YYvzEYYTnYmKF4+fIj07ih93Hem5RHLG2usba4iJSd9H7ZhmfnCVfGsPzS0jPx9pMMMvxPFzPRToeINFJSrtrGESG8mg3m+yzS4xHIQTSMUT9LlG3jV/qQ1pCqz7KRng2JEg6eAyYmh6nWD2InZMs/Pwa9fZ2jOvmbJ/Hw/U8pBBorTHp030x6Y/x6t/+Zxw+/gKpddFK4cts1TuAFBIjTaYaKcz9U9vlUtfm3fa4LbXkwCuHShSEZXaqwhvfLHF0PubaouJazZCSNQLXdovR+pTY0mj/wT/9B/zrT/83kt7Qs/PZifDerxk8HuXdtlor/OD7/x7pSH7re9+jWq1mg26tRVqBThKSOEGn29+xUvkCmDY4Pnh58IogK2DLIEey4ce5BKJ0eEoxJG2gjhTXCPRKdhzu/yQPZ5U3f9bN2O1X3ZQrs7BaSoTrIqVACInn+zjDzkatNEobHCcbJSaEi+NokkSjjc30s5XCqs/L+G8Nv1RCBjmUzuaXOn6AkB5hGGMe43zESUpzo0272aaYF4yNj5FEA3y/gBcUsUZjTIrF4HgOfi7A8Vz8oUELY2j0odoMqY75mF26P4R08StT5Mbn8CtV8B3iuMWgW6dg+0zkJV5YJ59G6HwJ4U5SVi750TXYltHeVKp5PLI0hqRYLpObOELavM6O1pIMOPrqH/Ly679NUCpjTIQWCcpqnE32vcjWhxAaa7No0hjz1IyXrZAnu1c+n9UQwOkRGNy6xzt/3kGM5Dnz3AG++foYd+6u8v/8+V0+WtC7qmf0tNjSaF/65MJnsqOiLGFCYhe2kcQRwwTAV+ALbgdCeEzPvYS1KbWVS9gHOOPWKpYWL3Hzxhmef/754eYvcES24Iwx6DQlDHeQJhibA7UPSvnMQAsBxgdRgCCAggM2B6nK2sL7MaytkHOaHNs/wREBFd3HKEvJz3G9C2+vbJBaDaIMk18DN4D6LXAd/Mk5dL+Jbs+D6g4peV/Ej5OlNFw/QEiZTV4XEPg+nufiui6Ok0myJmmCSBOsMSitEUiUAq1tZuylyU7TcRhqpW7zFATWcbJB1CIL75MwZtBrP/Y7G2vphwMQDsVKFT9fJFUxSdzF9fNIxydNQ7RSYE32XTwPP5fD832s7DMw0O8axkayGZy7AeF4jM4cZPb4OUZLBSLVpz9oknZWsGkLV0bEjUUaKys0dY5VJ8/H9wbU6o/T63HJWk7i4WNT8ODxkMNLNjY6xtnv/WM+2rhH0rjM9taPYOzAq7zxB39EeXQqGzZhZdakgxp2VN5PFgphEJjMlBuxq0vU4f4olBkHavph7c+KC18/LkAb1heaVPanTI9XmCzkqL54FCkNzX97hxu7x+Z8amxptC9fuYjNWfDBzlio61/eoh6EAPdwhWOvvEx3vs7yB9vXDvkykc9P8Oqr38ELPN5522Vl8SLW3v9JPc9jZrpKsZDDGoPn+pkan7BYo4fDaHfgXhUFpC64LkgvuzN8AXkJI0OSaKxAD0tKfgqxZrrs88q5AxyZO0c+sPiBRAQVwistnD/7EWl/DRA4lREmj+/H9GewUjJWLNONDbX6aXSnBYMWdFtZR1KwfaGrx2MYZjs+TpDDzwVI6aBVisXiuA5B4ON5HkJItLFYkREmVZqilMFYg1LZHEohwVqTXRfhZHyrbea0HT/AK5QzNUEd4uCgkoR+t5dtIo+AtZYkDBn0eqgkJU1iXJmSuA5+PkIaQzLokaYROk2w2iKlh3R9pJ/DdbNIJk5AK7NrMU25UuXgoUPkK6PESpFoQbk0Qs6ZoH93mRtXP2R1cZmVdsoqRS63a9xY6RBHjzPEm0bb5/4gsq3PNhcEw7y24Ouv/SaBkLz/7/6YwepHT3xvUDnM3/r9f8ns7CGk62GERcg8Dh7CGJSySGMQRmFNDNogbbY2pPB2ZUakIIuhHxy9ZjQczcGygr6CEQ9+77Tk5MkCucAlV8xTnZmknC+QtOqE1uAJRX53U+xPjS2N9rd//w1+evRd6teXYN5mozA2EQzf/aCDmQc9brAzRcbLoyx/cv3RJdqvFATViQPM7p9jdLyCn5e89TPJ0t2PsSY79yQOWVxcQGuF57lZ0UQKpCfwhJPluN0d1HT7STYRVSbg5jJv0nehaGAQZuPk0ziL5UoBFEfg5AuYQkLfNXQHmrgXkotb2HSRjasb6Kg1PHimZR1HCbEW2XDgMCEcpOi+hbQMwRi4FmLBswkIPzDvXDjZBiQcQCAcgSuzIqIEpBRIR2ahrzVoa0mHKRKlMq9LyuxaGmuwehgeS4mQzrblTp0gj1sYQasQJ1UI4RLrlE67idliSo9VmjiKScIYHacYx0XFMTqJsJ7IhicMOqgkJo1CrNZYBMZKPCnwIYsUjNmVXKznl/nt3/47nD62HzXoYo1LqTzJ5KiH3FDcubfK+V/Ms9pWXBnA7aRN/4mNqJsj9x60Plu/I/Bzn9WKfS/g5d/8DvligXf/9E9o3v5rHpcrlcEIZ3/rjzj3ymsEfh4jDEYMN2E8rDVooUmEwhUWIZ3MSA9TJdJxd4U9sjngAoMDhgAACjRJREFUeTO23DTek0U4MwNB3uHIXImXXp5jrDpCv9GktlAj7nTZNzNB6EmW5xe48FGNW18BLxueYLTP1+q023EWQXV5eGMtg5gT2Ms2875zwBTYbpebf/Emoit/DQw2gGVt7TbvvvMz/tZ3vs25F89RKOb48Q9d7t2+gNF9jEm4fesa6+vrTE7OIB2ZheGYbHCs7+Nts2MPgI2NTIQJCQTDxUrGbpAiCw2FgMCBXBFyARQltULC9W6HXklhjKGiNcWow52786jNdnQ7QN+7RnO9z1CWjTjwQYtMzMlxIJfL8ukSHjuS6IlwyTy2oQFwnCHTQw49Z41wJK50sAiUtpBokiQhThPiJEGlaZa2VBqMwPoOxtqMRpimWRpHDXkQ25U5kC7CzSGtzjYkK9FxStTtZd77I2CBQRTRbXVp1BqUR3I4ooREkOQG+MLLsn1piooi4sGAOBoQhzHd1gAd6c9E+wUCz3/2uH5m5jDPnzoOSUyn32ekMkUhP4LWlsXlDuevLXK7rmhauBtDb1vuvSVT5niwdrM1PVE6MqvhIMAR5P0RXnj9dcYmR3jr/x5n4cM//+XfRrgcfOV3+O7v/SGFciVLewiLRAIim8MpyIZlCIkV2cg3TIoxAmPBSpF18T4jHmRqOQ/8u9+HoA7HDhoOTgmiToflfp87N+q8/d6AUqHDP/pDnwP7xthYa3P5RszgK9JPsqXRvv3m+7AaQSXHzGunaH14h6hxXwTGGiebxA3ZJr4MGDBqd7gwm/7AF32ttLFonVIZKXP08GH2zc6CtvxloliavwDENDdqLC4scfr080ghcT0PL3DI5Tw8x6VU2oE2hu2Q7YQJIMH6YPNgcoALwsuaFtJeNiVVSJAesZtyLehzt+ShhSRIu3jhBs1e5wF/SYOah26d+6yUEZB+FifmXDAepBIizZaTmx8Ll2yXdvmsMm0EWIG1GT1PW41jHQwaow1JnOK5LmmaECVx5jkbk21OGkBgU5kJ35vNUpEdbm7bh7FgkSADcHJYJbFJjE22diAGcUKqFEkSEScxqSkgwgHYOiUjka6DkC5plBB2OoSdDr1mm41aj35/eEVkFlHkCk9xST+HxsYqP/iPP2aqUuTssaO88PwEQT9hvlHj3fcucX5lQC/wqIWK3nbZXVieZKQ/D5Uk2dg1mUVV1pV4+YDjz58jCP4VPzSW5fN/8ZDhLs2c5NXf+YeMVKdIlAUU1qpMvwQw2mRsFK1IUNmYPRVhkxCrEozW9JNwx7NXH4VND9vjfl57ANyIQEVwu2MpFboc2Jfyi/cH/GTeUk/AFYb9F+oURUJnuY8d8BkH/svG1rHxxRA0uL7km9/+BpdTy7W/uphtVylwXd3/FptZ/l2Cg8skPl0i+rtmtl0ezuMJoEguGEMow91rNyBOmJyscmhuhnNnzhJ2Wmw0bzMYtLl44QIvv/w1nnvuNMVyEc+TOK5EpekOB9qlZEapS2a8A7Kxqw6ZoRqQDSQe/r8VoMugC6SxIO0kQETICo++6BrokOVX/GwD0CZ7JMOt0Gxm+J4mUTfkVX/msRkwFpsqjPBB2GF3mwUh0KlGWU1CglEpVm/qk2yS+ofX7jOmyNN7qkYbdJppdseJJe6HpFGKKzNqmX5MOB84HoVCgfJIiXwuj+f4oEKidgOUIVeuYLUljRK6Gw1a9TobtTrtjZSuzn65wAG0+Yy19iwY9NZ59+03cRyPSwdO8sntdfwgz+LSPDdvfES/u5GND1D2C633/5v/9u9n6ZHNHInIdNMFgNEMWvVf0poJNxb40f/+P/BXbj6TecAg7HCDtoC1WGuHkY9F2EySwRrD5sR3ozWd5uIzn//m6to0dJtmavOa3Q3hgxuGIwccun1LbdiabizU6n2aiyGmo5krwO1uNpTiy8bWRluRjf6aLJIvJij3AQ864QsbgClEwIQ8SlWntLmzi8eVjyhGRYSDRa5eWeXm9V8QBHkqlTFGR6vEscoEkACtQz4+/xbHjp3g2LGj+P5o1tE4CImikOQJntzDCMkubp/7/OvNOXqS+8bc5b6f4JMVkTZD3JAn7/tFMsOt+Yz4ahh+Vsh2miseDcX93OhwoyFLv1hthwzDTB9Zum5m0LVB62TokX3eqm3+Js/OlbNak4QxSRrR6XSI+jFC5ygUffJ+QBqmwyA9gwByUjBaLOBIidYGFcWYOMFzwGhN1GmhU4UVkiRJ6DQ3WF9aYn25Qz/MbgOfjGKsIku8S13s1mYzK2/d+YTb89ey58x2KJCbtYZnv54bdz/c8Xt03KW5cOmZP/tZ4QwfPve95M/zpgxwo26pLw94bk7w5rIlHr4m7MVEfSgU4NABWIoy2dsvO0vy5CqUD5XpEuc/eJd7F4YGVIKYquDZUZKFBXhMrvBxEEEB0gT7mNDcccsEuUk63QXSbV4iPyiTxD228tIeZIQMnwGGoj8mY9iFcYtWZ4X5R3SWJ0mf5ZVF2p0mpZFCRldTCiElnrsTOc4lHubIKjLZ0zb3OdWbN96mQnQErJEtu83pME/ysTpkG8NmCWbzYfhsR95xU+xm18NmxKIfeH5owD/77yw1gjbDBq3t61Q8LZROCQc9ur0urVaLOE5xiSjnHPaPj3FzqY/AMgaUN7+NKyn7DmjNoNulm3PISSgWc9mkdqXQuoNFEvX7hP0e7UaTVksRm/vGQZssWNh2tmLbsBm7Ykf4G9lMsW343E+JeGxtaGsJvPWh4o2zglP5LMEAsN61GF9QqsKYDwcn4WKbrWY9/Urw5DtWwd237mJiez8d5oBfrjBaOsja0sq25/cBIEXG4kofv7KV6rLcv4RgsI123AzVyf2sLd/CmKd1/zd9r8d/Xi5fIE07vPmj/49CsYgUEiElUkoWFnYSETzuHD9PwXrWG2+3QqEH/dLNdIp94LH57yhrGdvkVZkHUx1bN+/vFoxRhNGAdqtJfb1Otxfh4RDIHHMTY9xdXWagNREwJyAvQBlNUccUXclIMU8+CMAYkjDCaotKFUZprLUMOm1UmiJdh2GHN3kgyCjI4EBp7Mvmhv2aNEh8Qdh0TSzZHbTJHnkcwVEBl7vwrdTyd0/C7YtZKak2gNC4lFxD2NT0u1sznn9VeLLR1mBan1sECcSf3mONezv/RGMxgyf0gdoYZXfmWawsXtn5uTz8oU98Rbu1zF+9+af81Zv/7oEUdvaXxzET/mbgQcO81fc0YKIvNX7U2mYMFASDKGJ9vYbUlpnxGabGy5QDj8FAZ4IAFooWAguy0ybaqKFnR7G6lBVP04Q0TokHUUbzQ2BQBPkcleo4lbE2UZRmKZYgk5l1PUm+sguVyD08NTaN9YOG2j7w56MgHBiZFIx1LGNiaLS7MH835fSsQPeg1f1qxC/icQ0He9jDHvawh68evuw4bg972MMe9rAD7BntPexhD3v4NcKe0d7DHvawh18j7BntPexhD3v4NcKe0d7DHvawh18j7BntPexhD3v4NcL/D3Uz735aacuBAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore import dtype as mstype\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as C\n", - "import mindspore.dataset.transforms.c_transforms as C2\n", - "from mindspore import context\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "\n", - "def create_dataset(data_home, repeat_num=1, batch_size=32, do_train=True, device_target=\"GPU\"):\n", - " \"\"\"\n", - " create data for next use such as training or inferring\n", - " \"\"\"\n", - "\n", - " cifar_ds = ds.Cifar10Dataset(data_home,num_parallel_workers=8, shuffle=True)\n", - "\n", - " c_trans = []\n", - " if do_train:\n", - " c_trans += [\n", - " C.RandomCrop((32, 32), (4, 4, 4, 4)),\n", - " C.RandomHorizontalFlip(prob=0.5)\n", - " ]\n", - "\n", - " c_trans += [\n", - " C.Resize((224, 224)),\n", - " C.Rescale(1.0 / 255.0, 0.0),\n", - " C.Normalize([0.4914, 0.4822, 0.4465], [0.2023, 0.1994, 0.2010]),\n", - " C.HWC2CHW()\n", - " ]\n", - "\n", - " type_cast_op = C2.TypeCast(mstype.int32)\n", - "\n", - " cifar_ds = cifar_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=8)\n", - " cifar_ds = cifar_ds.map(operations=c_trans, input_columns=\"image\", num_parallel_workers=8)\n", - "\n", - " cifar_ds = cifar_ds.batch(batch_size, drop_remainder=True)\n", - " cifar_ds = cifar_ds.repeat(repeat_num)\n", - "\n", - " return cifar_ds\n", - "\n", - "\n", - "ds_train_path = \"./datasets/cifar-10-batches-bin/train/\"\n", - "dataset_show = create_dataset(ds_train_path)\n", - "with open(ds_train_path+\"batches.meta.txt\",\"r\",encoding=\"utf-8\") as f:\n", - " all_name = [name.replace(\"\\n\",\"\") for name in f.readlines()]\n", - "\n", - "iterator_show= dataset_show.create_dict_iterator()\n", - "dict_data = next(iterator_show)\n", - "images = dict_data[\"image\"].asnumpy()\n", - "labels = dict_data[\"label\"].asnumpy()\n", - "count = 1\n", - "%matplotlib inline\n", - "for i in images:\n", - " plt.subplot(4, 8, count)\n", - " # Images[0].shape is (3,224,224).We need transpose as (224,224,3) for using in plt.show().\n", - " picture_show = np.transpose(i,(1,2,0))\n", - " picture_show = picture_show/np.amax(picture_show)\n", - " picture_show = np.clip(picture_show, 0, 1)\n", - " plt.title(all_name[labels[count-1]])\n", - " picture_show = np.array(picture_show,np.float32)\n", - " plt.imshow(picture_show)\n", - " count += 1\n", - " plt.axis(\"off\")\n", - "\n", - "print(\"The dataset size is:\", dataset_show.get_dataset_size())\n", - "print(\"The batch tensor is:\",images.shape)\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "数据集生成后,选取一个`batch`的图像进行可视化查看,经过数据增强后,原数据集变成了每个batch张量为,共计1572个batch的新数据集。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义卷积神经网络\n", - "\n", - "卷积神经网络已经是图像分类任务的标准算法了。卷积神经网络采用分层的结构对图片进行特征提取,由一系列的网络层堆叠而成,比如卷积层、池化层、激活层等等。\n", - "ResNet-50通常是较好的选择。首先,它足够深,常见的有34层,50层,101层。通常层次越深,表征能力越强,分类准确率越高。其次,可学习,采用了残差结构,通过shortcut连接把低层直接跟高层相连,解决了反向传播过程中因为网络太深造成的梯度消失问题。此外,ResNet-50网络的性能很好,既表现为识别的准确率,也包括它本身模型的大小和参数量。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下载构建好的resnet50网络源码文件。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "--2021-03-15 19:23:03-- https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/source-codes/resnet.py\n", - "Resolving proxy-notebook.modelarts-dev-proxy.com (proxy-notebook.modelarts-dev-proxy.com)... 192.168.0.172\n", - "Connecting to proxy-notebook.modelarts-dev-proxy.com (proxy-notebook.modelarts-dev-proxy.com)|192.168.0.172|:8083... connected.\n", - "Proxy request sent, awaiting response... 200 OK\n", - "Length: 9521 (9.3K) [binary/octet-stream]\n", - "Saving to: ‘resnet.py’\n", - "\n", - "resnet.py 100%[===================>] 9.30K --.-KB/s in 0s \n", - "\n", - "2021-03-15 19:23:03 (194 MB/s) - ‘resnet.py’ saved [9521/9521]\n", - "\n" - ] - } - ], - "source": [ - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/source-codes/resnet.py" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下载下来的`resnet.py`在当前目录,可以使用`import`方法将resnet50网络导出。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from resnet import resnet50\n", - "\n", - "net = resnet50(batch_size=32, num_classes=10)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义损失函数和优化器\n", - "\n", - "接下来需要定义损失函数(Loss)和优化器(Optimizer)。损失函数是深度学习的训练目标,也叫目标函数,可以理解为神经网络的输出(Logits)和标签(Labels)之间的距离,是一个标量数据。\n", - "常见的损失函数包括均方误差、L2损失、Hinge损失、交叉熵等等。图像分类应用通常采用交叉熵损失(CrossEntropy)。\n", - "优化器用于神经网络求解(训练)。由于神经网络参数规模庞大,无法直接求解,因而深度学习中采用随机梯度下降算法(SGD)及其改进算法进行求解。MindSpore封装了常见的优化器,如SGD、ADAM、Momemtum等等。本例采用Momentum优化器,通常需要设定两个参数,动量(moment)和权重衰减项(weight decay)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "通过调用MindSpore中的API:`Momentum`和`SoftmaxCrossEntropyWithLogits`,设置损失函数和优化器的参数。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.nn import SoftmaxCrossEntropyWithLogits\n", - "\n", - "ls = SoftmaxCrossEntropyWithLogits(sparse=True, reduction=\"mean\")\n", - "opt = nn.Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, 0.9)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 调用Model高阶API进行训练和保存模型文件\n", - "\n", - "完成数据预处理、网络定义、损失函数和优化器定义之后,就可以进行模型训练了。模型训练包含两层迭代,数据集的多轮迭代(epoch)和一轮数据集内按分组(batch)大小进行的单步迭代。其中,单步迭代指的是按分组从数据集中抽取数据,输入到网络中计算得到损失函数,然后通过优化器计算和更新训练参数的梯度。\n", - "\n", - "为了简化训练过程,MindSpore封装了Model高阶接口。用户输入网络、损失函数和优化器完成Model的初始化,然后调用`train`接口进行训练,`train`接口参数包括迭代次数`epoch`和数据集`dataset`。\n", - "\n", - "模型保存是对训练参数进行持久化的过程。`Model`类中通过回调函数的方式进行模型保存,如下面代码所示。用户通过`CheckpointConfig`设置回调函数的参数,其中,`save_checkpoint_steps`指每经过固定的单步迭代次数保存一次模型,`keep_checkpoint_max`指最多保存的模型个数。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本次体验选择`epoch_size`为10,一共迭代了10次,得到如下的运行结果。体验者可以自行设置不同的`epoch_size`,生成不同的模型,在下面的验证部分查看模型精确度。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 1562, loss is 0.91001683\n", - "epoch: 2 step: 1562, loss is 1.2816406\n", - "epoch: 3 step: 1562, loss is 0.9639759\n", - "epoch: 4 step: 1562, loss is 0.52665555\n", - "epoch: 5 step: 1562, loss is 0.47037172\n", - "epoch: 6 step: 1562, loss is 0.78620523\n", - "epoch: 7 step: 1562, loss is 0.63779867\n", - "epoch: 8 step: 1562, loss is 0.18493408\n", - "epoch: 9 step: 1562, loss is 0.33825904\n", - "epoch: 10 step: 1562, loss is 0.21595426\n" - ] - } - ], - "source": [ - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor\n", - "from mindspore import load_checkpoint, load_param_into_net\n", - "import os\n", - "from mindspore import Model\n", - "\n", - "\n", - "model = Model(net, loss_fn=ls, optimizer=opt, metrics={'acc'})\n", - "# As for train, users could use model.train\n", - "\n", - "epoch_size = 10\n", - "ds_train_path = \"./datasets/cifar-10-batches-bin/train/\"\n", - "model_path = \"./models/ckpt/mindspore_vision_application/\"\n", - "os.system('rm -f {0}*.ckpt {0}*.meta {0}*.pb'.format(model_path))\n", - "\n", - "dataset = create_dataset(ds_train_path )\n", - "batch_num = dataset.get_dataset_size()\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=batch_num, keep_checkpoint_max=35)\n", - "ckpoint_cb = ModelCheckpoint(prefix=\"train_resnet_cifar10\", directory=model_path, config=config_ck)\n", - "loss_cb = LossMonitor(142)\n", - "model.train(epoch_size, dataset, callbacks=[ckpoint_cb, loss_cb])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "查询训练过程中,保存好的模型。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./models/ckpt/mindspore_vision_application/\n", - "├── train_resnet_cifar10-10_1562.ckpt\n", - "├── train_resnet_cifar10-1_1562.ckpt\n", - "├── train_resnet_cifar10-2_1562.ckpt\n", - "├── train_resnet_cifar10-3_1562.ckpt\n", - "├── train_resnet_cifar10-4_1562.ckpt\n", - "├── train_resnet_cifar10-5_1562.ckpt\n", - "├── train_resnet_cifar10-6_1562.ckpt\n", - "├── train_resnet_cifar10-7_1562.ckpt\n", - "├── train_resnet_cifar10-8_1562.ckpt\n", - "├── train_resnet_cifar10-9_1562.ckpt\n", - "└── train_resnet_cifar10-graph.meta\n", - "\n", - "0 directories, 11 files\n" - ] - } - ], - "source": [ - "!tree ./models/ckpt/mindspore_vision_application/" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "每1562个step保存一次模型权重参数`.ckpt`文件,一共保存了10个,另外`.meta`文件保存模型的计算图信息。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 进行模型精度验证\n", - "\n", - "调用`model.eval`得到最终精度数据约为0.76远高于0.5,准确度较高,验证得出模型是性能较优的。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "result: {'acc': 0.7557091346153846}\n" - ] - } - ], - "source": [ - "# As for evaluation, users could use model.eval\n", - "ds_eval_path = \"./datasets/cifar-10-batches-bin/test/\"\n", - "eval_dataset = create_dataset(ds_eval_path, do_train=False)\n", - "res = model.eval(eval_dataset)\n", - "print(\"result: \", res)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结\n", - "\n", - "本次体验,带领体验者了解了MindSpore的卷积神经网络ResNet-50,通过构建ResNet-50对CIFAR-10进行分类。可以看出MindSpore的ResNet-50的构建非常容易,损失函数和优化器都有封装好的API,对于初学者和研发人员都非常的友善。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/mindspore_custom_debugging_info.ipynb b/tutorials/notebook/mindspore_custom_debugging_info.ipynb deleted file mode 100644 index 123172c52f6901b3d529d2eccaa056ab78c22c62..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_custom_debugging_info.ipynb +++ /dev/null @@ -1,725 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#
    自定义调试体验文档" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本文将使用[快速入门](https://gitee.com/mindspore/docs/blob/master/tutorials/tutorial_code/lenet/lenet.py)作为样例,并通过构建自定义调试函数:`Callback`、`metrics`、Print算子、日志打印、数据Dump功能等,同时将构建的自定义调试函数添加进代码中,通过运行效果来展示具体如何使用MindSpore提供给我们的自定义调试能力,帮助快速调试训练网络。\n", - "体验过程如下:\n", - "1. 数据准备。\n", - "2. 定义深度神经网络LeNet5。\n", - "3. 使用Callback回调函数构建StopAtTime类来控制训练停止时间。\n", - "4. 设置日志环境变量。\n", - "5. 启动同步Dump功能。\n", - "5. 定义训练网络并执行训练。\n", - "6. 执行测试。\n", - "7. 算子输出数据的读取与展示。\n", - "\n", - "> 本次体验适用于GPU环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据准备" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据集的下载" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "这里我们需要将MNIST数据集中随机取出一张图片,并增强成适合LeNet网络的数据格式(如何处理请参考[mindspore_quick_start.ipynb](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/quick_start/quick_start.ipynb))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "运行以下命令来获取数据集:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`custom_debugging_info.ipynb`为本文文档。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据集的增强操作" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下载的数据集,需要通过`mindspore.dataset`处理成适用于MindSpore框架的数据,再使用一系列框架中提供的工具进行数据增强操作来适应LeNet网络的数据处理需求。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore import dtype as mstype\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\" create dataset for train or test\n", - " Args:\n", - " data_path (str): Data path\n", - " batch_size (int): The number of data records in each group\n", - " repeat_size (int): The number of replicated data records\n", - " num_parallel_workers (int): The number of parallel workers\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " # define operation parameters\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # define map operations\n", - " trans_image_op=[\n", - " CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR),\n", - " CV.Rescale(rescale_nml, shift_nml),\n", - " CV.Rescale(rescale, shift),\n", - " CV.HWC2CHW() \n", - " ]\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # apply map operations on images\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=trans_image_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - "\n", - " # apply DatasetOps\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size)\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义深度神经网络LeNet5" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "针对MNIST数据集我们采用的是LeNet5网络,先对卷积函数和全连接函数初始化,然后`construct`构建神经网络。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.common.initializer import Normal\n", - "import mindspore.nn as nn\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " def __init__(self):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(1, 6, 5, pad_mode=\"valid\")\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode=\"valid\")\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, 10)\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - " \n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x)\n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 构建自定义回调函数StopAtTime" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "使用回调函数的基类Callback,构建训练定时器`StopAtTime`,其基类(可在源码中找到位置在`/mindspore/nn/callback`)为:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```python\n", - "class Callback():\n", - " def begin(self, run_context):\n", - " pass\n", - " def epoch_begin(self, run_context):\n", - " pass\n", - " def epoch_end(self, run_context):\n", - " pass\n", - " def step_begin(self, run_context): \n", - " pass\n", - " def step_end(self, run_context):\n", - " pass\n", - " def end(self, run_context):\n", - " pass\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- `begin`:表示训练开始时执行。\n", - "- `epoch_begin`:表示每个epoch开始时执行。\n", - "- `epoch_end`:表示每个epoch结束时执行。\n", - "- `step_begin`:表示每个step刚开始时执行。\n", - "- `step_end`:表示每个step结束时执行。\n", - "- `end`:表示训练结束时执行。\n", - "\n", - "了解上述基类的用法后,还有一个参数`run_context`,这是一个类,存储了模型训练中的各种参数,我们在这里使用`print(cb_params.list_callback)`将其放在`end`中打印(当然也可以使用`print(cb_param)`打印所有参数信息,由于参数信息太多,我们这里只选了一个参数举例),后续在执行完训练后,根据打印信息,会简单介绍`run_context`类中各参数的意义,我们开始构建训练定时器,如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.train.callback import Callback\n", - "import time\n", - "\n", - "class StopAtTime(Callback):\n", - " def __init__(self, run_time):\n", - " super(StopAtTime, self).__init__()\n", - " self.run_time = run_time*60\n", - "\n", - " def begin(self, run_context):\n", - " cb_params = run_context.original_args()\n", - " cb_params.init_time = time.time()\n", - " \n", - " def step_end(self, run_context):\n", - " cb_params = run_context.original_args()\n", - " epoch_num = cb_params.cur_epoch_num\n", - " step_num = cb_params.cur_step_num\n", - " loss = cb_params.net_outputs\n", - " cur_time = time.time()\n", - " if (cur_time - cb_params.init_time) > self.run_time:\n", - " print(\"epoch: \", epoch_num, \" step: \", step_num, \" loss: \", loss)\n", - " run_context.request_stop()\n", - " def end(self, run_context):\n", - " cb_params = run_context.original_args()\n", - " print(cb_params.list_callback)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 启动同步Dump功能" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本例中使用同步Dump功能,导出每次迭代中前向传播和反向传播算子的输出数据,导出的数据方便用户在进行优化训练策略时进行分析使用,如需导出更多数据可参考[官方教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#dump)。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import json\n", - "\n", - "abspath = os.getcwd()\n", - "\n", - "data_dump = {\n", - " \"common_dump_settings\": {\n", - " \"dump_mode\": 0,\n", - " \"path\": abspath + \"/data_dump\",\n", - " \"net_name\": \"LeNet5\",\n", - " \"iteration\": 0,\n", - " \"input_output\": 2,\n", - " \"kernels\": [\"Default/network-WithLossCell/_backbone-LeNet5/flatten-Flatten/Reshape-op118\"],\n", - " \"support_device\": [0,1,2,3,4,5,6,7]\n", - " },\n", - " \"e2e_dump_settings\": {\n", - " \"enable\": True,\n", - " \"trans_flag\": False\n", - " }\n", - "}\n", - "\n", - "with open(\"./data_dump.json\", \"w\", encoding=\"GBK\") as f:\n", - " json.dump(data_dump, f)\n", - " \n", - "os.environ['MINDSPORE_DUMP_CONFIG'] = abspath + \"/data_dump.json\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "执行完上述命令后会在工作目录上生成`data_dump.json`文件,目录结构如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - ".\n", - "├── data_dump.json\n", - "├── datasets\n", - "│   └── MNIST_Data\n", - "│   ├── test\n", - "│   │   ├── t10k-images-idx3-ubyte\n", - "│   │   └── t10k-labels-idx1-ubyte\n", - "│   └── train\n", - "│   ├── train-images-idx3-ubyte\n", - "│   └── train-labels-idx1-ubyte\n", - "├── mindspore_custom_debugging_info.ipynb\n", - "└── MNIST_Data.zip\n", - "\n", - "4 directories, 7 files\n" - ] - } - ], - "source": [ - "! tree ." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "启动同步Dump功能需要注意:\n", - "\n", - "- `path`需要设置成绝对路径。例如`/usr/data_dump`可以,`./data_dump`则不行。\n", - "- `e2e_dump_settings`中的`enable`需要设置成`True`。\n", - "\n", - "- 需要将生成的`data_dump.json`文件添加至系统环境变量中。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 设置日志环境变量" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore采用`glog`来输出日志,我们这里将日志输出到屏幕:\n", - "\n", - "`GlOG_v`:控制日志的级别,默认值为2,即WARNING级别,对应关系如下:0-DEBUG、1-INFO、2-WARNING、3-ERROR。本次设置为1。\n", - "\n", - "`GLOG_logtostderr`:控制日志输出方式,设置为`1`时,日志输出到屏幕;值设置为`0`时,日志输出到文件。设置输出屏幕时,日志部分的信息会显示成红色,设置成输出到文件时,会在`GLOG_log_dir`路径下生成`mindspore.log`文件。\n", - "\n", - "> 更多设置请参考官网:" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'GLOG_v': '1', 'GLOG_logtostderr': '1'}\n" - ] - } - ], - "source": [ - "import os\n", - "from mindspore import log as logger\n", - "\n", - "os.environ['GLOG_v'] = '1'\n", - "os.environ['GLOG_logtostderr'] = '1'\n", - "os.environ['GLOG_log_dir'] = 'D:/' if os.name==\"nt\" else '/var/log/mindspore'\n", - "os.environ['logger_maxBytes'] = '5242880'\n", - "os.environ['logger_backupCount'] = '10'\n", - "print(logger.get_log_config())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "打印信息为`GLOG_v`的等级:`INFO`级别。\n", - "\n", - "输出方式`GLOG_logtostderr`:`1`表示屏幕输出。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义训练网络并执行训练" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义训练网络" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "此过程中先将之前生成的模型文件`.ckpt`和`.meta`的数据删除,并将模型需要用到的参数配置到`Model`。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore import context, Model\n", - "from mindspore.nn import Accuracy\n", - "from mindspore.nn import SoftmaxCrossEntropyWithLogits\n", - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor\n", - "\n", - "# clean files\n", - "if os.name == \"nt\":\n", - " os.system('del/f/s/q *.ckpt *.meta')\n", - "else:\n", - " os.system('rm -f *.ckpt *.meta *.pb')\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "lr = 0.01\n", - "momentum = 0.9 \n", - "epoch_size = 3\n", - "train_data_path = \"./datasets/MNIST_Data/train\"\n", - "eval_data_path = \"./datasets/MNIST_Data/test\"\n", - "model_path = \"./models/ckpt/custom_debugging_info/\"\n", - "\n", - "net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction=\"mean\")\n", - "repeat_size = 1\n", - "network = LeNet5()\n", - "\n", - "metrics = {\n", - " 'accuracy': nn.Accuracy(),\n", - " 'loss': nn.Loss(),\n", - " 'precision': nn.Precision(),\n", - " 'recall': nn.Recall(),\n", - " 'f1_score': nn.F1()\n", - " }\n", - "net_opt = nn.Momentum(network.trainable_params(), lr, momentum)\n", - "\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=1875, keep_checkpoint_max=10)\n", - "\n", - "ckpoint_cb = ModelCheckpoint(prefix=\"checkpoint_lenet\", directory=model_path, config=config_ck)\n", - "\n", - "model = Model(network, net_loss, net_opt, metrics=metrics)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 执行训练" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在构建训练网络中,给`model.train`传入了三个回调函数,分别是`ckpoint_cb`,`LossMonitor`,`stop_cb`;其分别代表如下:\n", - "\n", - "`ckpoint_cb`:即是`ModelCheckpoint`,设置模型保存的回调函数。\n", - "\n", - "`LossMonitor`:loss值监视器,打印训练过程每步的loss值。\n", - "\n", - "`stop_cb`:即是`StopAtTime`,上面刚构建的训练定时器。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "我们将训练定时器`StopAtTime`设置成36秒,即`run_time=0.6`。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Training ==============\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[INFO] ME(14134:140231287715648,MainProcess):2020-12-01-17:12:19.263.012 [mindspore/train/serialization.py:379] Execute save the graph process.\n", - "[INFO] ME(14134:140231287715648,MainProcess):2020-12-01-17:12:29.689.876 [mindspore/train/serialization.py:168] Execute save checkpoint process.\n", - "[INFO] ME(14134:140231287715648,MainProcess):2020-12-01-17:12:29.704.062 [mindspore/train/serialization.py:214] Save checkpoint process finish.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 30 loss: 2.3048654\n", - "[, , <__main__.StopAtTime object at 0x7f8a1f042950>]\n" - ] - } - ], - "source": [ - "print(\"============== Starting Training ==============\")\n", - "ds_train = create_dataset(train_data_path, repeat_size = repeat_size)\n", - "stop_cb = StopAtTime(run_time=0.6)\n", - "model.train(epoch_size, ds_train, callbacks=[ckpoint_cb, LossMonitor(375), stop_cb], dataset_sink_mode=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "以上打印信息中,主要分为两部分:\n", - "- 日志信息部分:\n", - " - `[INFO]`部分信息即为日志输出的信息,由于没有Warning信息,目前主要记录的是训练的几个重要步骤。\n", - " \n", - "- 回调函数信息部分:\n", - " - `LossMonitor`:每步的loss值。\n", - " - `StopAtTime`:在每个epoch结束及训练时间结束时,打印当前epoch的训练总时间(单位为毫秒),每步训练花费的时间以及平均loss值,另外在训练结束时还打印了`run_context.list_callback`的信息,这条信息表示本次训练过程中使用的回调函数;另外`run_conext.original_args`中还包含以下参数:\n", - " - `train_network`:网络的各类参数。\n", - " - `epoch_num`:训练的epoch数。\n", - " - `batch_num`:一个epoch的step数。\n", - " - `mode`:MODEL的模式。\n", - " - `loss_fn`:使用的损失函数。\n", - " - `optimizer`:使用的优化器。\n", - " - `parallel_mode`:并行模式。\n", - " - `device_number`:训练卡的数量。\n", - " - `train_dataset`:训练的数据集。\n", - " - `list_callback`:使用的回调函数。\n", - " - `train_dataset_element`:打印当前batch的数据集。\n", - " - `cur_step_num`:当前训练的step数。\n", - " - `cur_epoch_num`:当前的epoch。\n", - " - `net_outputs`:网络返回值。\n", - "\n", - " 几乎在训练中的所有重要数据,都可以从Callback中取得,所以Callback也是在自定义调试中比较常用的功能。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 执行测试" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "测试网络中我们的自定义函数`metrics`将在`model.eval`中被调用,除了模型的预测正确率外`recall`,`F1`等不同的检验标准下的预测正确率也会打印出来:" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Testing ==============\n", - "============== Accuracy:{'accuracy': 0.10106169871794872, 'loss': 2.302597688558774, 'precision': array([0. , 0. , 0. , 0.1010617, 0. , 0. ,\n", - " 0. , 0. , 0. , 0. ]), 'recall': array([0., 0., 0., 1., 0., 0., 0., 0., 0., 0.]), 'f1_score': array([0. , 0. , 0. , 0.18357136, 0. ,\n", - " 0. , 0. , 0. , 0. , 0. ])} ==============\n" - ] - } - ], - "source": [ - "print(\"============== Starting Testing ==============\")\n", - "ds_eval = create_dataset(eval_data_path, repeat_size=repeat_size)\n", - "acc = model.eval(ds_eval,dataset_sink_mode = False)\n", - "print(\"============== Accuracy:{} ==============\".format(acc))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`Accuracy`部分的信息即为`metric`控制输出的信息,模型的预测值正确率和其他标准下验证(0-9)的正确率值,至于不同的验证标准计算方法,大家可以去官网搜索`mindspore.nn`查找,这里就不多介绍了。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 算子输出数据的读取展示" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "执行完成上述训练后,可以在`data_dump`文件夹中找到导出的训练数据,按照本例`data_dump.json`文件的设置,在目录`data_dump/LeNet5/device_0/`中找到每次迭代的数据,保存每次迭代的数据文件夹名称为`iteration_{迭代次数}`,每个算子输出数据的文件后缀为`.bin`,可以使用`numpy.fromfile`读取其中的数据。\n", - "\n", - "本例子,在第400次迭代数据中,随机读取其中一个算子的输出文件并进行展示:" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ops name: fc2.weight_output_0_shape_84_120_Float32_DefaultFormat.bin \n", - "\n", - "ops output value: [-1.86227040e-17 7.49122057e-21 -5.01539318e-16 ... -6.28152809e-20\n", - " 7.43756225e-16 3.97661325e-20] \n", - "\n", - "the shape of ops output: (5040,)\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import random\n", - "\n", - "dump_data_path = \"./data_dump/LeNet5/device_0/iteration_400/\"\n", - "ops_output_file = random.choice(os.listdir(dump_data_path))\n", - "print(\"ops name:\", ops_output_file, \"\\n\")\n", - "ops_dir = dump_data_path + ops_output_file\n", - "ops_output = np.fromfile(ops_dir)\n", - "print(\"ops output value:\", ops_output, \"\\n\")\n", - "print(\"the shape of ops output:\", ops_output.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结\n", - "\n", - "本例使用了MNIST数据集,通过LeNet5神经网络进行训练,将自定义调试函数结合到代码中进行调试,同时展示了使用方法和部分功能,并使用调试函数导出需要的输出数据,来更好的认识自定义调试函数的方便性,以上就是本次的体验内容。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.0.1", - "language": "python", - "name": "mindspore-1.0.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/mindspore_debugging_in_pynative_mode.ipynb b/tutorials/notebook/mindspore_debugging_in_pynative_mode.ipynb deleted file mode 100644 index 4ba505b3f0ce98f22f8a13811fe0adb56af314cc..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_debugging_in_pynative_mode.ipynb +++ /dev/null @@ -1,620 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#
    使用PyNative进行神经网络的训练调试体验" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在神经网络训练过程中,数据是否按照自己设计的神经网络运行,是使用者非常关心的事情,如何去查看数据是怎样经过神经网络,并产生变化的呢?这时候需要AI框架提供一个功能,方便使用者将计算图中的每一步变化拆开成单个算子或者深层网络拆分成多个单层来调试观察,了解分析数据在经过算子或者计算层后的变化情况,MindSpore在设计之初就提供了这样的功能模式--`PyNative_MODE`,与此对应的是`GRAPH_MODE`,他们的特点分别如下:\n", - "- PyNative模式:也称动态图模式,将神经网络中的各个算子逐一下发执行,方便用户编写和调试神经网络模型。\n", - "- Graph模式:也称静态图模式或者图模式,将神经网络模型编译成一整张图,然后下发执行。该模式利用图优化等技术提高运行性能,同时有助于规模部署和跨平台运行。\n", - "\n", - "默认情况下,MindSpore处于PyNative模式,可以通过`context.set_context(mode=context.GRAPH_MODE)`切换为Graph模式;同样地,MindSpore处于Graph模式时,可以通过`context.set_context(mode=context.PYNATIVE_MODE)`切换为PyNative模式。\n", - "\n", - "
    本次体验我们将使用一张手写数字图片跑完单次训练,在PyNative模式下,将数据在训练中经过每层神经网络的变化情况打印出来,并计算对应的loss值以及梯度值`grads`,整体流程如下:\n", - "\n", - "1. 环境准备,设置PyNative模式。\n", - "\n", - "2. 数据集准备,并取用单张图片数据。\n", - "\n", - "3. 构建神经网络并设置每层断点打印数据。\n", - "\n", - "4. 构建梯度计算函数。\n", - "\n", - "5. 执行神经网络训练,查看网络各参数梯度。\n", - "\n", - "> 本文档适用于GPU和Ascend环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 环境准备\n", - "\n", - "使用`context.set_context`将模式设置成`PYNATIVE_MODE`。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore import context\n", - "\n", - "context.set_context(mode=context.PYNATIVE_MODE, device_target = \"GPU\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据准备" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据集的下载" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下载并解压数据集数据,将解压后的数据集移动到指定位置。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据集的增强操作" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下载下来后的数据集,需要通过`mindspore.dataset`处理成适用于MindSpore框架的数据,再使用一系列框架中提供的工具进行数据增强操作来适应LeNet网络的数据处理需求。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore import dtype as mstype\n", - "import mindspore.dataset as ds\n", - "import numpy as np\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\" create dataset for train or test\n", - " Args:\n", - " data_path (str): Data path\n", - " batch_size (int): The number of data records in each group\n", - " repeat_size (int): The number of replicated data records\n", - " num_parallel_workers (int): The number of parallel workers\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " # define some parameters needed for data enhancement and rough justification\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # according to the parameters, generate the corresponding data enhancement method\n", - " resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR)\n", - " rescale_nml_op = CV.Rescale(rescale_nml, shift_nml) \n", - " rescale_op = CV.Rescale(rescale, shift) \n", - " hwc2chw_op = CV.HWC2CHW() \n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # using map method to apply operations to a dataset\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=resize_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_nml_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=hwc2chw_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " \n", - " # process the generated dataset\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size)\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据图片的提取" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本次体验我们只需要一张图片进行训练体验,所以随机选取`batch`中的第一张图片`image`和下标`label`。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(32, 1, 32, 32)\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWAAAADsCAYAAABKZHxbAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAB3/0lEQVR4nO2deXhU5b34P+85s2cy2UMCWVlCABVkE1DAai2WqnW3lt6qxdsq6q232tra/qy32mtrbW9vW5drq2Krpe47KnUFlEUWQdmXhCQkAbIzmcx2zvv7Yyb7SjIb9nyeh4fMOe858533vO/3vMt3EVJKDAwMDAxijxJvAQwMDAz+VTEUsIGBgUGcMBSwgYGBQZwwFLCBgYFBnDAUsIGBgUGcMBSwgYGBQZwwFLCBgYFBnEhYBSyEuFkIsUkI4RNCLI+3PP0hhLAKIR4TQhwSQhwXQnwqhPhqvOXqDyHEJCHEe0KIZiHEfiHEJfGWqT9OBllPwuefLoR4SQjRGpb5m/GWqT+EEB8IIbxCCHf43554y9QfQoinhBA1QogWIcReIcT1Q7kuYRUwUA3cCzweb0EGwQRUAguBFOBnwLNCiKJ4CtUXQggT8ArwOpAOfBd4SghRElfB+uAkkvWkef5hHgT8wChgCfCwEGJKfEUakJullM7wv4nxFmYA7gOKpJQu4CLgXiHEjEGvklIO+A8oB24HtgPNwDOADbgWWNujrATGh/9eDjwEvAm4gY+AHOD3QCOwGzh9CN9/L7B8sHKJIGuXe28HLks0WYFTwteILsdWAfd8EWQ1nv+gdZpESPmWdDn2N+BXiSZr+PoPgOuHWu+J0gaAiUANcOVgZYc6Ar4SOB8oBk4L/5ihXvczIBPwAeuALeHPzwO/ay8ohHhICPHQEO+bsLIKIUYBJcCORJe1vQghZfdFkdV4/v3LWgIEpZR7u9xrGzCUEXC86vU+IUSdEOIjIcTZJ/CdMZc1fMxDSFnXACsH+8KhKuA/SCmrpZQNwGvAtCFe95KUcrOU0gu8BHillH+VUmqE3kyntxeUUi6TUi4b4n0TUlYhhBl4GnhSSrk7AWXdAxwFfiiEMAshvkJo6uz4AslqPP/+ZXUCLT3u1QwkJ6CsAHcAY4ExwKPAa0KIcQkqK+HPycB84EVCSnxAhqqAa7v87SH0IIfCkS5/t/Xxeaj3ORHiIqsQQiE0nfMDNw/xO2Mqq5QyAFwMfC383bcBzwJVXyBZjeff/33cgKvHMRdwfAjfGfN6lVJukFIel1L6pJRPEloWWJyIsrYjpdSklGuBPODGwcqbhihYX7TSZTQihMgZwb2iTVRlFUII4DFCGxuLw8pjuERVVinldkIjyfb7fww8OczbnSyyGs8/xF7AJISYIKXcFz42laEtl/RFrHWAJLQMNRxiLasJGHS0PhIriG3AFCHENCGEDbh7BPfqhRDCFL6vCqhCCFt4Z3w4RFVW4GFgEnChlLJthPeKdr2eFq5LhxDidiCX0AbEcDhZZDWePyClbCU0Nf6FECJJCHEm8HVCI/fhEDVZhRCpQohF7f1eCLEEWAC8lYCyZgshviGEcAohVCHEIuBq4N3Brh22Ag4v5P8CeAfYB6wd7r0AhBCPCCEe6XLoZ4SG/T8GvhX++2eJJqsQohD4HqF1ptouNotLEk3WMP9GaIPgKHAucJ6UctC1qpNZVuP5d6vTZYCdUJ2uAG6UUg5rBBxlWc2ELKCOAXXALcDFPTYQE0VWSWi5oYqQxcQDwK1SylcHvU/YbMLAwMDAIMYksiOGgYGBwRcaQwEbGBgYxAlDARsYGBjECUMBGxgYGMQJQwEbGBgYxIkh2dWep1yRUKYS/9Sf69MY+2SREwxZR8IXQdaTRU4wZB0JA8kKxgjYwMDAIG4YCtjAwMAgTowkFoSBwRcCU34e7qmjaZrQd3dI2xsgaXsNwcqhxCwyMBg6cVfAis2GkjsKz8TsjmP2Tw6gNTRClL30TPl5BPIzCLgsnfIEdMwfbkMGg1H9boP4IudOJeAygxAcKzbRdIafS07d1GfZF7efTlp+Pul7srEccaPtHJY3bOQQAjU9jbZZnbFeHHuOolXXIn3D8io3iBNxVcCKzYYYW0DtWRl4F3WGKc0/lotwt0a1MalZWdQvzOPobIl9jLvjuM9rYULdBBRP+Ltb3OhNzUbD/qIgBKbiQnZfacc02oPJpFGY3shtozewJLm+z0umJlXwj6LZ7D6US9LOTApaWglWHY6x4J0IiwW9eDRH/93Tccz+1mgyP3WhNrWGDkiJ8PrjJqdisyFSXOByhmTRJcGyQ1EfVA0qU/7oYV0rvH5kcwtaS89wyiMjbgpYmEyIwjxqF2bgvLiWzae+1HFu4ejv4thtiprSU2w2WhaOpfFCD7+b/iIXJ3Uq4JqgmwWVP8TkDW1epu7RSdtSh36g3BgVDwFhtSJUFcTAUQNlMIj0+2PeIRW7nfKrRvPLxSu4KOkIDsUy6DXfdtXxbddK9o5t5e4JF7C3sZSMP8dBsQmBsFhQ01JpGutkx9zOGDtnJV3K/tJszMdDsdVFEOzHJNlPNaG3tsZWzHDfbpyRSVOJAhIUPxT871F0j2fwG3S7Weg3C5MJNA3d6x2WTIrDAeMLKLs0fVjX245C5mdtmLbsHZEcPYmbAlbzx3D4K9kUXH6QVyd0RpjTpB6KLRRFgrMmEfxOPc9MepppVmu3c7kmJ/u+/XDH58V7FlPzfBGj/YHQGzzeDKLY4jbCCMslJo3Dm5OEZh1YTsdhD2p5DVpd36POqCAEiiuZZ7/3W6ZY7EDfyleTesffqujcpy4xJ/GHgte5Y2mAqj9HW9jeqOlp6MWjaRrrpKG0+/752tNeDCXfCdOst/FgwzRWVS7A9s9tyIA/dnJ26dvrJryFJnV2B3z86ImLB1fAPdq3mpGOXpSLJ8eBpTmAsmbrCcvTrnzLL05n13eHl/Xsj42F/M8H55OfMmXYcvRFXBSw6nJRfvUYLrlyDfdmf9ZxXJM6ewNelKCMqiIpemAv941eRaaaNGjZlRNXcuf1p/GK6ywKH2yM+BRkqCg2G8KZFBoJDIAMBJCtnoi9oYeEEKjZWQgh2PMjC0/O+T/OtA1sYDN149W4/joe58rW2Mo6CB7dT1lQ6/g8WpW4FFs3RRwv2maN4+i/e7qNfPsjRbFzR8Yuvv7Qp/xozsUEa48Mek0k6K9vD4ku7aidxoXFeJc0sur0/+GOw4uomnOCtzSZoKSI/d9IYd+3h59y8pa0Q9xyyf9Rd1HrsOToj7go4F2/KuW+c1dwifMoobCfncr3Bxd8B+u+7egJtOb68+zNlF5Tzd1jLmPCzRviIkPt0umM+8Ze/iv/5QHL/b+KiyhfcTpZD6+LjWDhTnPtmg2UWo6Qr+q4FNugl62e+Tg3Zi1m96gYyjoIdVord9eeQ9mFqR3H9vwulyfnPM6Zg/8kA/ru20OiRztSw9Ngh9BIV1UCwxyP+c+eSsV1Oh8veIBQUujEIj5LEDadLFMLVhF6QM16G39sOJ137piPbd/2qK39qhnp8LyNH+Y8R8oQlEQ7VmHmwqQKLOc9y53LL2XCtZujIl9f7HtyOjPHHeLKjBc527GfItPAOSl/U/gS7/5HCe8smcSm/UVRlVWx2QieMYmJv/2cxY4j2IVlyCPFFMXOPXmv8e5/7ImJrIOx2efnhh3XkforB8qRbR3HNV8eAWkC9P4vjgHNS+bgv6qRR09ZQX/m+4v3LGb3oVxOKT7cbVkvpgzUt+s767V5yRzqvublS+P2dRyzq40d7agdVSjhZ3NN6Nnw6QmJI00KFquP7CHMdodCimLj9pxV/Od7V6Bc0YZW3zCi+8VcAVfcNY+rT1/LRHMz7TnuAlKn0puG/cMdUR35yjYv1a+U8vENxYx2VmAWasc5t+7lldYx3Pv0VQD8fEn3TZo01cE8eyXTxx0aUgbDkaK6XOz6VSm/nvsM82yHSVVM2IVjUAVXZHLwzeSDLE7ay9qsfH7y0BWU3rEL/XgUpFZV/Mlm/mvUhziVoSRW7lvW+fb9/D95UUzqtSe31UznxS0zcO4xk3pQw7RpG/oAy1/JioWrMjfwvYeui1699kFbpsKcnApmWjX6UsBnbr8U74ujKDrop2p8MWddfClrT3uR0apkz+9ymfDrdOTOA1FdC+6rb9uEibOS9rL83+agXXlqR9mpY8u4c/Qa5tmOdRxTEd3aUZ/PZhhyCRG55UyzUBlrNvPjwje53/TlEd8vZgpYsdmoXTqdcy/czLXp6xil2tnoC/DYsQW8t38iosJOsSe6U1G9rY28Vw5zT8GlHDnvXa5L/bTjzRiQOp+4iyn+ew0Amy8uYpHjMI4uGzUOIUgxe6OqKITJhJo/hvKrx3Dvuc+wyFFLihJqzKH6OpM1FX3n+ju7cD/XZq5httWGU4FFjsMcW/g2b9pKIEqKQqqhl1N/+GSATT6Vf9/ybQD+PP2vzLRqWIUZVSg4hY0stTXq9RoSViK9Xi5cfRNWeyDUMT91UbzRj31vFdLdijbIerRVmJllbeYHUa7XXqKr4FR9HSPLnhz5PJvxW46j7K8ku7mA/ROy4TRwKTYen7Ocu0ddj3WfKaoKOOvMGr6RtpFRqr3jmFWYOd3ayv+d0T3t3Gj1OPkmBaeS1KuNdHCCz2Y4LG/JZsXh2VQ2pnbKbA7ywYwnSFHsvcr7ZIANPjPf+ehaStv2j/j7o6+AhUDNSKdl4XimLNnJbdnvUmAKjeTWtk7gnY+nUvh6AHOLe/B7jRQZskUc/2wKj9i+xLopYylyhnbhmwN2Vu8fz/j9od3Nl3ZPY37yHs6xN+AML1eYhUKBvYHyc+dGxVlDsdkQhXkc/ko2l1y5hsudtViFnRfcLrZ6CnmzchJtGzMZtbHvTvTW16ZScm4ts63lQKjznZO0m7fU0ojK2SnrGOon925CG30B1rZOpMKXTmvQysaaAkb9OVSHH/5mEqdYPu1XkUQb3d1K8XKBbg5Zv9hqGhAVNQSbmod8j2jWa1+oUybSmqeTbwtNdzWpUxH08L91Z3eUSdknUGsbCTY1oza1dpijqUJhgQ00qzK4Bc0wESYT/rOncn7uWgpNErNQqQi6+cyfSUCauDgJzrWHNjZ/31hEpTed6c5DHNPrqQxk8EFTabc20s5wns1g+GSAu47OwqeH2u0r26aRvsHMqPJwImsh8LtUGqZppPQx2fTKIO8fnxpqQydqUtcHUVfAwmIJmZFc08SDBW92jOZqgm5W108gdafA/E6M1/7Wb6fYNZPKT8dS5giNJpUgZNd3TnCyXrfxlzELmFD4IpPCg2AzKqc7yvnb+QuZsNGOjPDoR6S4aJyRScHlB8M7yCEl9YudX8O/NY2U/TrZnzeib9vV7TpT3hikzYKlKYfm4IkvBQxb1mnpjDv/YK9zfz56Nu+tOxVnuYISBEe9jmXVBkzFhTQGHQNO8aONDAYxvdfZ3gab0ooWM3v9Ocy0Hux4Eceami9lMGHaIeY79gIW2qSff3pKeP9vszvK5Gw9jt4cJwsdZxKHrtFZ6NyFLZy4fE1bIf+7/xwam5NwzH66o+wf3z4fR7XCS6ecTnZ2M0eOpJC62RpqI2+v73bfSKy6qz6NtuM2dvk95KjwWmsBrz87j2CSRDdBehmk7/Rirg0reSGwpCcRoO+XVUDqVLSlY3pvc0SsZaOvgE0mWsc42DLzUULJWEM8c/wUtu0spPhg7OwTAZSk0JKD9aNdZH/U46SmdTz01Bc/5fPzJ3Ms38Gk8DFVCCxCQ7dHaUPG5aSpRGFdjw0U/5Y0xj5VTfBgea9GqSQlUXNBAd4sQdLpdUx1VERHtp4kJ9E8VuGtcS/R9bkCrDtcRP4/NWxvbwrNEhQVNTuL8qtG829JH2IVcfeAHzLp2wVPTpxDaUk1C+JkCdE0w8f/FL7JDGtoJNCgB1lVN5mc33/crZwOoVGu2YRujtFLTgiEzcar8x8K21Yr+GSAD5pKcX+cRe6OIN9ru6ajeOnyJuSug/jOOQ33mCyKK/yY3/m4//uPEMsRN0k7M/np2K9z5ahN3P3OZUz802cc/eYptGUJPLngybXT3oalAM0qSY2R1WH0e4Ki0NfL5JEd8yl+SY/p6FdYrbQtnIxu7i2Q0AgZWK/9FIDgGZPIzmzBJXxAaLp6XPfzXN0sJty8ITp74lL264QiFdFrCiksFtoWTuaqm97hhrTtfa5ZRQvh9WM/JnmwYRp3Zu7pdq4grZGaohSS8segVR5Gzcyg7ryxvH7D/RSbnfTnABFX+pqeS0nGY+uoSZ3HK1nTWZC7JfZy9aBZb+Pjtnw27y+khLpe59X0NDzFKQTyOjezY+Hc1JVNPpWNNQWkHNSxv7yRkpc7z7X3G8vbmxieT9qJoe3cS0FLK1VHxvOTswsoCffd6d/ZPogvQN/HdUDvZ3Q8HKKrgIVAOOz4ktVuh2uCbgI+EyIYu1YhrFbExGIefuh/mWTpPU3vtAENBQWa8LvP+Hn26gE3mGKF5pBo6U5M7s6ARagqeoary++JnfIFCFYdJvupJt6qXcidD3dXwB3OKylnUfisQt3cHDb8+mHad8YTiYEcXLT6RhS7Dc0KdjUQB+lCppOqVcMsgoDCBq+Ln6y9jJKlfQcOaps1jmPf8XBwbmjaHyvnpq4s274E119dOFd+GmfjvRDBqsOkPXmYtCdHdh9N6tRrgurWFIjQlnFUFbCpuJDyq0bz+g3307XzLXz6h5SsaETu3B+TF7OSlETbwsk8/ND/UmLuex6ZqSZxf+4aytaHNguKTSqOYZhWRYO1Sx6g6WoIyO7zIgXZ7++JNz/P3swtN3xM03cVbEInEZUvDOzgcvMN/0H5FZL7zhqGY0GkeN7GyrEPMs504i/YeDk3JaKTTSRY4zWxdN1NTPz+IbTBiw+JqCpgqQh0C+FpZ4jJDy1j3MsNsD82wW1MeWOouaCAq256p8+Rb1ccioVSc+idHQ/XU1lVQ9ELVibry9h5Y6fbZLaaRLbaPUYB9C/jZp+fm3Z9E+vD6Yig7GYAHyuswkyuyUy21BPCjbcnQ3FwOe/+1cxL2sfp1lasIrYzjHbGOJpJV4bfHnUESn0TQX+U9lqkRKtv5KYb/4Ov3v9Bx1JYu5PNJ9cWo0vBYU9KRBwX4snp1lYenftX/rFyDpWtYxLbEUOdMpGKCzKY99Xt3Y6n79YQVTVRsenrC2mz4M0S3JC2naFM0wdq6B1G+E+E7BVL/6cVuetgxGwrda8X5WAFhS/BJLEM6O4QMtRO2KTbOXrMxcT3doCU6FGy/ZQ+H86ddZT89UbWLnmgT2+jRFO+J+LgclP6p9iEKW7KtycvuF3cselS8l/tu079i2ZSfrHg9kmru5+Q0V1+kAE/9g93cMibQSA8SGh3srnUGfJ0a9Bh8R9vQvMVk/+yivOjA7ENxBQBUhQ7Z9m8nDp6FTv9yYntiOEb5cQz2cuduW8BTpr1Ns7efB2jytzobbENFCNVIrJBZRVmzrC18JcFywH4btO/M/aFyZi2HYiYR5Tu8aDsL6f476Hp4k9Tr+Ku3DZM5v4nPV2dGwA0qSCDStTDEMpgEFlZzbgVVubm3YzFGuzmdeS0+5idXcHVGev6dMSIFarLhTa5iJp5TjQbvRxc+mMgQ/xr195EqWfkhvj90e64dKXrRWxhj82tnkIsOx04P9rTawrsXzST8ksUrp27liuSd+OTlpjI2Y7u8fDPd+fyTuFETGaNgvRGvjl6A992hTYK0xSdx+csJyBVbrR9C3XqRCzHweSRpJQFMK/qe007Wnzw/mk8uriO69O2nJCbslWYyVbN+M0tlF8/nuKnbWhV1cOezUdFAZvy8zg20UJpwSHGhZcf6jQNx1MpKOX70KI1HYoBKYq9w6j81q+8yR99i8nMm4Kzog1zZX1E0tboXi/sLwNg/LPJBJxmpNq/woqnc4Pu9SJ27KPo8akdzg3t+FKdvDM5izVTxw7oiBFtBxeR4uLY6Umct2Q9djXQ4eDSlWa9jV1+C5+0jeWWtP7DjkbaEL9fmZ1JjPvGXs5x7McuQssjbs2K2U2fI8eaM81cPns9S9M2kq2GBjyxkLMrY19u7WirR4sL+PkZOWw5LbT85VD8/Dx7M1YB9898gW2TC2gJ2ihrzWDr58WUtJwG67cP8g2RlfXProVsnFLU4YzVk+9nftDhNNaTLNXKpVesYcPamZhqjyaOAm7PNNEyv43bRocih2lSxytVUteUE4zxGpAIapg8oXXRdjvK/mjW29jgdfU6nqp6yFd95Jq6j5huSTtExVfWs+a0cRzcmUne+7lYI503bP32Qbd+yn6R2TH1iwc9nRvasbtcuPbn07w7pcPIvi9ZbcLEl5J38tdrz2DiJ46Ih/yUNgttowS/7TAj616jzXob77dl8fuyL1O5I4dTL3qU+bZgnx2vpyF+uxMMQkQua4IQCJOJ/8p/udv+SX+Y8sYgJ7ZyWeom8kyd8VUi6TAwJLq01dz8PFyVuby/LeQsErRD6TXV5JiamWmt5WuOZsxCpSbo5onUGTzRcg5j1/d/62jIWuyaSdm+cezKGIveRydzfMXPtenrKDbZusWNgdBI+N7sz1jomotJVXtfPEQiroDdZxbTcIGHB2Z0ZpoIolGrOZFx8ICSbg8pBzRu3LmElact77ecR0o+bsvnJ2sv63UuLes4lxd9ynfTtmAWSrep6W9ytkLOVm4dNZP3q2aTE40fcQJoUscjrRCI/9qr1tICm3fgGsTU2yrMnGnVeXX+Q/zIcTHEKOayW/cSkDoferO5Z/dieD2DkvVNXJ99DZ8seLBfE0STonU49LQ7wUgV7LWSrC1u2HiCcXCHg6Ki2G0dMszK30WW2ka7tYlHSpoDNiAGLv59EKyswlpZ1dEfVJeLu8dchrRrXDl9E0vTP2KcyU6uycl307Zw4OwsYp3y1PLhZ2S3TabuNDverN7nV2yeTcoZbXw7ZWuvwVekiLgCrvy6xhOznuLssLdYQGocDAS4v/wS1GBspkJd0Y4dI/mN4wSPTuKO3yzqt1xzwBYybu/LvnLOaTxx8TkcOiedQltDL8eDuCIEiugcUVYEPaxqmoe96uTxNos6/Ti4vNI6hk/cxby261Sy37DiWrEOXQgm/iiX6o8EaX0MbMxCodDWwK4vLwTo5gRza81M3v/bbHI2RvfnCJMJJSMd95yiHjKElESj5hnQWSMeaC0tHbG0X71rHm0XmfnPrPeHNMKPFqJ0LAcut3Lbea9xU2plr/OTH17GO8WlLHJ+Tm6UulNEb9vTaBygWffyQO0iOLcqYrZzJ4ru9aKs2TpIFHt3/411/XbGroeKpCR2fbm340HcCAextqsNqGHvnP+tO5uPnp5O/u+j5975ReGXT11F8dPVjD/YmV5GWCzoGS6UfibuKYqdOzP39GgDsbGScCh+gg5QivKp73Bu6S6DTwZ4rbWAu9+5jJI4JQ8YjOLff867wVm4rvSeeNaMCLL7VgdPLHi0Y7DYk05TUGuf5yNBZPX6CIzGDU6QLhkEFjuODCser0F3huKwE0+G4txy19FZvP7sPCb+6bOE8ELri2FnzfgCEtGFwpEajRucGEIISi3dMwgkOuW3l7B4+7V8mkApp7qimwWTLIMHvo8qXZwbfl0/gWa9DWh3bnFSYrb1OXVfvGcxq/48j6IVVTELFA/hme/7o8lb76T+3+diGtM79XvXMv/3lSf4alJ13EKSRoKygJs5P7yBpPVlIzKrjegIeOtfTuM/rrVwT95rjDM72Rto5a6qC9n/xEQyOPldEtsdDyY90ttJIl6oyJPqhWf6ZBd19ZNpkdGb1vWiroG8D1KZJJd1O5z3QRuyobHjc8/ne6I4jkhytkZG8bU7Nzz7py+z7dt5HX0K+h7gtGfEyP3gCFpVdURkGBJCIMxmfljwFpMtx7n1WjOfpU4ibV8ex/NMNM0IvWhVq8bKsQ+SroQcmtrNAE82HeHWvbzozuP+5csoWLUv5AmnD39xNaIKeNTKQ3yWOokvTx6LPdlH23Erzh1WCt4sJ/pOx9Gn3fGg+O+h9cGuThKBg8mM2RefgC0nE7rXi9QEulSIVZ41rcWNedtBig/3iL9V14DWJRFAz+d7ogivH725JWK/Svd4yHn9EJ+5OvtUfzjeSmbUmmPoh6pi4uLfkyy1lWw1iXvyXuOWxXb2H8lkbHY9DxSGQquahcY4k73by2OjL8Dd5VdQ+UZRXHRE6iYr1+nfYWJxTTenkb5oz97z/oES9Go7E585TLCubsQehhFVwMHD1eS/4cT/iRPNakP16ViO1hE8HMM3cpTpz0nC3OLBXFn3hXjRfOHQNbSmZhhCZoWuzzcR6Nmn+sOx5zBazZGoJbTtFymRgQD/r+Ii8hxNKELi00wkJ3nRpMJrTad3FH2xx6Vrqsfi+yiTgpXx0RG579eRscPJ0dLuTiN9saZ6LG0bM8lb58Pc0kqwrH9nnRMh4sYV2s69qDuh3YInXpYPMaGHk4ShfIeGaDHzobsUv+zMiKujsNs3Fqkl6tZR/OjZp/oinm1PtnooX3E6ByyiW+zvo8BR8vu9zn4slOFF27k3+kL2QXu95h7o7jTSF+2y9sxGM1IMY9GTnGNaEke14x27qc0BO0qCvwnStwv+KhewPHle50EJwqsy0bszfoIZDAvd6x122MlEeN32dBrpj2jIaijgk5Xw1O++8sWMTmrusFtdvX98t9x2iUjGY+vI6OdcYktuYBBZDAV8EqPVN8C5DXRdPRvP1n7LGxgYJBYiHvEZDAwMDAwi7IhhYGBgYDB0DAVsYGBgECcMBWxgYGAQJwwFbGBgYBAnDAVsYGBgECcSVgELIdw9/mlCiD/GW66+EELcLITYJITwCSGWx1uegRBCPCWEqBFCtAgh9gohro+3TP0hhEgXQrwkhGgVQhwSQnwz3jL1RAhhFUI8FpbvuBDiUyHEV+Mt10AIIb4hhNgVrtcDQoj58ZZpIIQQE4QQXiHEU/GWpS9G0gYS1g5YStkRb08I4QRqgefiJ9GAVAP3AouIVXTu4XMfsFRK6RNClAIfCCG2SikHSRwUFx4E/MAoYBrwhhBim5RyR1yl6o4JqAQWAhXAYuBZIcSpUsryeArWF0KI84BfA1cBG4Hc+Eo0JB4EPom3EAMw/DYgpRzwH1AO3A5sB5qBZwAbcC2wtkdZCYwP/70ceAh4k1Biqo+AHOD3QCOwGzh9sO8P3+sa4CBhu+VElZWQEl4+xN+UCPU6EagBrkw0WYEkQsq3pMuxvwG/SiQ5+5FjO3BZotVp+PqPCb2Ah/Rb4l2vwDeAZ4G7gacSWdYTbQNSyiEvQVwJnA8UA6eFf8xQr/sZkAn4gHXAlvDn54HftRcUQjwkhHior5sQUsB/leFfluCynghxkTV8zEOoUdUAKxNQ1hIgKKXsGqllGzAlweTshhBiVFj2oYzSYyqrEEIFZgJZQoj9QogqIcSfhBBDmbXFvF6FEC7gF8APhvhdcZO1KyfSBoaqgP8gpayWUjYArxGaDg6Fl6SUm6WUXuAlwCul/KuUUiP0ZuqIVSelXCal7BUJWwhRSGho/2SiyzoM4iJr+HMyMJ9QlMChxDCMtaxOoGd65Oaw3IkkZwdCCDPwNPCklHL3EL4z1rKOIpQD6HJCz35auOzPElBWgHuAx6SUJ5ow+aRpA0NVwLVd/vbQVzKqvjnS5e+2Pj4P5T7/RmjqMNQgrfGU9USJm6xSSk1KuRbIA24cwnfGWlY34OpxzAUMlnIiLnUqhFAILZH4gZuH+J2xlrUt/P8fpZQ1Uso6QqO6xYkmqxBiGvBl4H+G+D1dOWnawEg24VqBjkyQQojBorkNl28DvxrhPWIlaySItawmYNwwr42mrHsBkxBigpQdgYOnMrSpfU+iWqdCCAE8RmiEuVhKOZLUKFGTVUrZKISogm4pn0cSDCaa9Xo2UARUhKoXJ6AKISZLKacP434J2QZGYoa2DZgihJgmhLARWiSPKEKIecAYRm79EFVZhRCm8H1VQo3EJoQY7sstarIKIbLDJkhOIYQqhFgEXA28m2iySilbCS2P/EIIkSSEOBP4OqERRsLIGeZhYBJwoZSybbDCgxBtWZ8Abgm3hTTgP4HXh3mvaMr6KKGBwbTwv0eANwhZGg2HhGwDw1bA4c2RXwDvAPuAtcO9F4AQ4hEhxCM9Dl8DvCilHFGmwxjI+jNCU5QfA98K/z2UdbVYyyoJLTdUEdrZfQC4VUr5agLKCrCMkFnfUWAFcKMchglaNOUM71F8j5CSqBWddutLhnPvGNTpPYRMuvYCu4CtwC8TTVYppUdKWdv+j9CSlFdKeSzRZB1JGzDCURoYGBjEiYT1hDMwMDD4omMoYAMDA4M4YShgAwMDgzhhKGADAwODOGEoYAMDA4M4MSRb1fOUKxLKVOKf+nOir+Mni5xgyDoSvgiynixygiHrSBhIVkjgcJQGBgZDp/76uTScKpGuAKLFTPp2QcZj6+ItlsEgGArY4F8CYTKh5o2m6pK8E742bW+ApO01BCtPNCZMdFFsNmqXTke3gHNRLT8uXM84yxFWu0t5Ss4nI94CJjj+RTNpLjaj2fsfpOZ8dBxlVzn68RH5gvWLoYDbEQI1PY22WZ1hERx7jqJV1yJ9QwkWZtAVNS0NWZCDN7fv+CW2GjeiohatsTHqsig2G6Iwj+rzsvnyt9af8PUvbj+dUWl5ZHxIYihhIVAz0mlZOJ4pS3aSaXVzRdpGplr8OBUbmtzLX5PnxlvKhKfmTDNFZ1UwJaWm3zKvZZ1BgWMClm1lUWmrhgIOIywW9OLRHP13T8ex1GdySfmwDe3I0ThK1olisyFSXODqJyhTixu9qTmuLww1KwvhdOCZmE3tbDPK1OY+y8lP0xi1MQnH3mNItwft2LA8TIeESHHROCOT/CsO8tvcLSd8/dSkCn5huRBkHumrfFGVdSgIiwW9KBfPNU08WPAmKYqd0H66jWa9jX3+fJSWxO7aqsuFSHEhbRaQEuH1E6w6HFMZfPl+flz4Jmfb9X7LKOdLXmEOBRRj2aKhtfSMkDoyEvspxRBhMtE6xsGOuY92HJu0bRmpmxwDXBU7hMmEKMyjcUYmTSV9G6+k79RJ/aSGYNmhGEvXifvMYhpLTLgn+rly+sf8etSnfZa7rWg6LxZNx7lnDCkHNJLfCr349NbWyAvlctJUorBuwlsdh5r1NgKy/44HkKLYMAuVb7vqcM1+jh+Ky7G0FmN/Ob4KWHE4aB7rZMvMR+iaAcsnA6z1pvH4gbmkfzbg3k/c0SYXcez0JNpGCUQQHEckGX+OrQIeCr/J2Yq+SPAaZ1DsKYSNn0X0/tFRwGKQh3+SxJ8QCSSmmj+Gw1/JpuDyg90USVdK1/4bluYsLHFUwJVf13hiwV8GHFUA/DZ3C7/N3cLmc/zcvPtqWoNTUAIS2z+3IQP+yAolZa+giw82TOOQN52grvZ72R25b1NsCinhi5PcJJ/xN673XkfJy5EV70RQHA5k/igaSnu/hDf4zPzk80tIeiYF14rE3oCrmefkvCXr+W3uFuq0Vu44vIiqP8dYCCnQEGj9vIhVEarj3+ZuQVuk8P7R2eRsjKwIEVfAqssFdhtiACWstxxHb2s7aRRxvFFdLsqvHsMlV67h3uzIvoHjzQyrhXVTX0B7SGd3wMeP5lxMsPbI4BeeAEKXKH4oC7gpNoeWb97+6UIc7+1Ab3X3e92t71/B78c+xziTHVUoqEgUi4aakY5W3xBRGYeCMJmgpIj930hh37e7Z8OpCbq57qNbKHpCwfTeia9z/0viVdjtG02OurfP08UmFYdiAcCh+AnaQ30xkssQEVfAu35Vyu1nv8nZjr5/FMBVj9xG4T+qCJZXRPrrv5Ds+lUp9527gkucRwlllDE4EYJlhyj8SyvfqPohG3798NAvvNzL4j/exJNzHudMG8y3BXlz/p+45dmr4NzYK2D/2VOpuE7n4wUPEMpZ2snCp39IyYpG5M79I4qw/q9E6R27eMNWykp1cp/nJ7xRz8+zV5OmOvh59mZKr6nm7jGXMeHmDRGTIfJLEDadUms1pWZrv0V+ct0zbLmikJZgZyPSpeCwJwXlira4jC4SlX1PTufXc59hkaMW65ByJyYmZQE3V995O5f95J98N/Xz8MZRJ6pQKDapjHu9gV23TsO0aTe61xuZL5cSrb6BjFUHmCNvYMUvHxhSHgitvgHNV0xAmgAdVSikKjA6qZnqyEg2ZJqXzMF/VSNPnrKCbLW78p380DLGvdwA+8uRwWCMJTt50Y8fh+PH+10y3XHrNBb9uJhHJz/FNKuVHFMz0qZFVIbIK2AhUZEd6yd9cUlSDYsdlWhdeoEO7PQnc7/pyxEXabjIBNjHmD2+nHm2w6Qooanz3kArfzh6Dm98fgplix6Ls3SD49a9vOjO4/7lyyhYtY9/OL/CE5mLkCp4C/xcPn0zv8nZCoBDsfBfoz5k0U+KsDw+FdeH+9Hq6iMjiK6hHasn/W3JhXk/onBnNdowrEUUQInDGLMtU2FOTgUzrRrtEQRqgm4WPv3DDuUbsRfWCOjqEAJAQMFeaSb/no/jK9hA9LMUavpkF3X1k2mRXQaTEdYJcbGCcCgWHFi6HdOkTpbaOuDa8b8im9dM5JzCmzCZQ2/etuNW7AesZNTIXslZ4l11zUvmMHVsGaNNx2mfIgekzpbWIgqfOUywro6cNyxgMSMVQf28HNaMHgdhBQyQpjp4dPJT/NvY/yRlcxJESgFDSAnX1VH4jAO95shJM1r0L5rJ8RlevpSyG6sILUFpUqdJVyh+yQ0HK9C9XsTpU2g8xYU3s++GEG2nAoCGUyXXnL2as5x7ADgWdPGnsi/1+j3uaV5mOcuoCbp5rGkmaz44lWISa+NQ93qRmkCXCqEhYuSJuAK2Vli5r3wxL6fW9nn++5kfUGByDDhCNuhk7MutBJxmpNq59qvZgtRPTjwLwvqveblz9BpGq52WBRqS4wFbh2lcV1vPlGwX+/dn8PIEJxcndW6GTbNaCdoBJQptRMoTMtOzVlh5o3kqY81rKDBFIzH24NScaeayU9Zzlr0ScKJJnfKgh3sOX0hrvgNTxhSEhNozzCTPPsaXcw70eZ8PPGeQU+2KqgKWrgALnbs5267TrLfxvm6npi6F8f38nkrNygtl0yh+sf/N0C8yEe/Foz/yU3c4n/cdBX2eP/V7VVyVXI5T2CL91REn4NLR0p2Iw9b4OTes396x7abYbIixBdQuzGDc+Qe7FdsbaMV73Irqi86beiicPW4f82zHcCrto1+NqqCJ3U3ZOOndwcyV9WRvyONHWZeRP+fPzLBaepWJGYqK6nJCVncH3uRDkrcrSjnXtZMCU+zbgClvDHJiK5elbiLP5CQgNcqCXh5rOIuNH5diusqNqoae+bkF+7kuc02/9Th+wmyyctIQx+qi0p5NeWOwOALYRABQqQ5K/nJ4PtlvWLuV6fp7dvrNNNU5yd64KeLynAxEXAGbV20ic4Dz668ex9edB+g5lgiiUas5SaQcdTmnHKVu2ihG1efE1bmhHSV3FLVnZeC8uJZXe9gC/+HoOdgPWjC1HkckhTdpNC2u64JHtDaebpxP49ocnBzsdT5YWUX6Kh8m71huTr2adVNfiKl8wmpFqCoIgZKaQtuU0dTM6628HKqGV5qB2CpgJSmJmgsKmJW/iyy1DXBSp7XxUsvpPLtlJiYNVs99pMemXP8vsWi359rFnbJq0sGeQDY7dudTsmJ9n2XopQVijzCZQuZ97bO2Ln1GsdkQqkQR0RvUxHYeK0SfPyYgNfYHgtxf/lXUoKePC+PD2tNepLTl30iujK9zQzutpdl4F7Ww+dSXep174/NTyKiReEbbYfQUhAaW5gDK2k9jY2/dx7Pd6U/j+a0zKLm3/w0Y7dgxkt/y0BqcEkrs3X67WIg8aRzenCQ0q8CTpdIw38fB8x4a/MIYIMwW2hZO5qqb3uGGtO0dm7CVmpVVRyZhO2Rh1w0P0dMcDULrw30t8UW1PQvBzOs/5Ze575CpOikLuFnVNA97VXcVM33pdu4bvYpMNf7KF0IOTr7CDPwppl59JjhrEtmZLbiED+jfqmskxE4BC4GanYVdbUDtspWoSZ39AR8/OHAlnFtFZI08vjgoNhtBh4LV3PfGUdmix7ptytVprdxdew4HL8hCO3osukq4n2ebyKgZ6ez9kYUn5/wfZ9oSbD9CCNSMNB586A9Msdhpdzf2yQBFJj9Plfyd3Mm9FViz3kadpuGXCqmKTm6s1qz7eP7/W3c2Hz09nfzfJ671Q08Hp559Zuxvd4dfKL1fcpEiNgo4/ICuXbOBxY4jOJXO+AprvCaWrruJid8/ZCjfAahdOp0pS3byYMGbdPX/749MNYlf5X7I66tz+euCWdELKDTAs01onrexcuyDjDOdPLbVdx2dxXNrzkD1KOz7dm+HkgWbvoPzaRfOyjYOXpzUZ5mIc7I+f3o7OPXsMxC9zcp2YjYCFkJQajmCXXSuUf2+sYg/vn0+JU82G84Xg6BZIdPq7uXAMBB2YWGytSbqpn19PduhIOdOpexiB+eds5VC2/tRkq47akY6PG/jt8XPU2yydZuqN+ttPNgwjbd/uhCAxf/9fnj631nnKYqN23NW8Z/vXRFTp6HFexaz+/N8lAw/9yx6udu5yQ8tI22Phn+KStu/1TMvby/XJZX3ukfx20spfFbBvulA5AY7UqI3NPHwf1xJ2W8+6Kiv72d+wKnfq2L91eO6Ff9xztvd6vMMWwu/nv8cP15+OROu3RwpqYaGTSfL1NJh2gf995l2XVW6vCmiBmlRV8DCakVMGseeH1nIVzvXpm6rmc5rq85g3OttyJ19m80YhKi/fi6ur9RyRdpGTjSNnxojp4HBnG/6IphsRhS28oucdzELha4j+0g7wbS3w70/srBy7IMdQXYAXnC7+MXOrxH4JA37MUn2e9sBOOTN6BUxzSxUxprN/Ljwzag4DXXtL6PVzmd3qCENS6OCyAlyUdIRum625Z5dRdWpqczIq+Tboz7mDGsjVmGi54acpcqC4+AxtIbIxrWVAT+21Tu61VeBycFVyeV83dm9b6cp3U1QUxQ7c2yHmT7uUAzGmz0IO411RRUK+arOnt/l8sOMf5IcjgVR6U3HUa0gd/XeTB4J0VfAqoo3J4nH5/wZl9Jpera6ZjyZn0rU7QfQIx39KkLcVjMdZVsytpqGKJlhD42GUyU/LlzPVIsf6KzDZr2Nszdfhy8QeoxnF+7n2sw1zLbGJl6EMJlQ88dQ9s0xpCvdx1QbfQF+c+hCUjf3HhW3ZyJomqxz/ri9vdbYFu9ZTNpeHXk8cqEpu7bD9uA6AMtbsrln09fIestK3sfV4A+gaxq1S6dzufNFbKJ3tDSrMDPe3EL59eMpftqGVlUdMaeO/vqLrisIXaAoekeAmHZ+O+45mortjFLd5JuUPpcBFu9ZTMYOCQ3NUdkP0D0e/vnuXN4pnIjJrFGQ3sg3R2/g2666Qa91CEGK2Rt7BdwPLsXG43OWM9XS1uH+r6GgBIl4pL6oKmDFZkMU5XH0dDMLbNB19OZus5LTFIyqUfhIeXHLDIo/8SMq+o+YHwukK0CJpRanEgq4vc1v5/mGWRz1JeNcnkJqmw5S8tbXplJybi2zreUxkUuYTPgKM7j0ijWkq913ide2TqRsSx4lH9R1m+7KuVMpv1hhxin7OCt9P/Mde2kfqflkgLuOzqLmuSJytxxBj2TwayHQrKJXO1xxeDapa2xkfFBB8HA1akY6x782lSlLdnKOYz920feaZpZq5dIr1rBh7UxMtUcj51XXj5wF6Y3szbcyKb336HWa1UrIU6u3rFGt0x50dRo6WlzAz8/I4bOpWyiwNnBLWneri42+AGtbJ1LhS6c5YGf1/vGMZ2s/d44+zXobu/wWPmkbyy1ph8L1b++QdU31WOzHIj8Mi5oCTnSngaFgqzRj319NsKnvrA6xQrSY+dBdil/uY58/n7+Vn0Hb29mofsmoN7eie72oWVlY50ygORjDTRBVxZ9iCofI7Bx11wTdrK6fQMo+0HbuDbWF/NEA7LnSzrL5/+SalO1h+9XO0dwxzcfrz82jaGVVREeVAEiJ6pN80KYw3xbsGAE3tDkweUDaraiTJtA6NrVLpglnx+85LgXJQnZYFliFmXuzP2PWuLPI3p+OXnME9OhtIy8Zs54d6Xmc5qgctGxAahzR2tjpT6M2mMnrz0apTnvSxWkoNz8PZ81oXi+fi1bayi0Ln+wotjfQyl1lV7J/cwFJhwVKELLr46sL6jSNF5pm8sqe07rJCvBq83QaD6ZT0KShZmVFNCNKVBRwe/aG2oW9nQaa9TbuqroQ504LlqN1huXDEEjfLvirXMDy5HkoLSbSPxPkPB4y79EJvexaFo5Fn+xmqqMzxGe8nFteay1h2/588o9oodxw+aMouzQdgF8uXsFFSUdwKJ3LDj4ZoFn3syeQQuFj+wlGwWJDBoLYatxcv/4aPlnwIGlq6EW1IHc/L57tomVcDrpZEsjzcXDmM7SPfty6lyeaZrDTncvc1APclNpdAdbN9+M6lIu1tRUtii/qb7vqYJDpvE8G8MogZQGFpxvn8/yWGQivysQ/fUYwxjPNYGUVzuPHMbWOp3xs5zJOe/8/vLKQkpV1aDv7D1sbdQIKLboNTbrRELRpFoL+3ktObZoZ6QxSP8VC0DaW5DeOR8zBKSoKWM0fQ/V52eRfcbCb8tWkzh8bTmffkxMpeKOc4OFYB/U7MRIlI0bGY+v6z3ArBMEzJhH8Tj3PTHo6PB0NjYIOBgLcX35JzJ1bXq6dhmO/BQjSfN5EGkoVdn23q4ND56i3UfOwwZfGM3WzqW5NiZqsMuBHbt/NxNuyqV4vSAv3s/bMHP3xSusYHl/1JRw1ClvnjeGmuU93O1+26DEmH1xGcVkaRFAB9+dM0Ve5djb4zLx/fCqrakppXJvT4QATt7FlVgY1Z1m7Obc82DAtYfq/vdLMqqZTOdXyLiVmJ38aswHG9I712zV7y407lxA8OinkrNHOCAY4UVHAlZeM6Ug30o4mdfYGvLz7o/lkr95G0JM4Hm8nLWEbzEm/+6wjcHQ7zbqXB2oXxcW5ZeXElTBx8HI+GeC11gLufqc9yPXxhJsR/fKpq5j4dDWekiyOzut9vizgRgkAeuTUnBKQ7A14KTHbBlTC7X1KDzs/XLvmJoqfFDjf29yn63csESYTMslG0NZdOb3904Vkv7edYDRy/50g+fd8zOrWeViXBIaUrHWG1cLK05Zz9+/OoezC7I7jI8nwExM74PaG8oMLvoN133Z0I837yDmJDeDbuevoLF5/dh4T//RZXK1MRsLVd95Owap9BOsG3+0fCnprK7Z3t/ODi77D7159nEmWvp9r1z6l1DeBlJR69qN7PAmREWOg7B0nM5lqEvfnrqFsfedQYSQZfiKugCvumsd5F33CzZmr6RpsQ0eg1DcR9CemydnJgilvDLWLC5i+dDt2tTGsfLtHltvs83PDjutI/ZUDhU/jI+ggLN6zmJrniihaWRW79Ukp0eobuenG/+Cr93/Qy8miJ5MfWkbRS/Xo1bXYm1vIUsYx9vhSDn6lMxC+tUVHejwRNe2SPh/sKePGG7+PNPVvEC2CEtu+7Z19KoECWUmTgsXq6wgU1J4RJX39AbS2+AeObyf/xSq2b57KrHFnUHdGkH+fu7rDEaediq/CDxa+3WHJ4VAslJo7hwy6mWGHTo24AvbmBfhyyg4KTKE3d2dGhCvJb/40oRrJYCRCRoyuqFMmcuiCDKZ9fSf3jV6Fiug18n3B7eLHmy4l53krpk3bojqylD4fzp11THpkGQA/X9K+wda/R1zxW9djOWwmY4ckd8sRtKrYrgPKgB/7hzt49k9f5snM85D9J0Wm+KV65IFDoQ0Xnw/HJ+UU+QuYdHBZZ5ldNVGZ0UmfD/uHOwaOsi9lQs4m/YtmUn6x4PbJHwKh0bpHqmSsqyVY3xBVa5ETRauqxlR7lOz96aQczOGFzed0OOK0M76phCc+W8xDWX0/i7wP2pDDdG6J/BKEWceleDvWrrplRGhri/jXRQoZDOI47GHyx9/qOJa+S4uoM8CICWqofvBqpj4DhLQ7FWS/YSV59T60KIeilMEgsrKa4r+HXqo/Tb2Ku3LbOrJ39EXhcwJHeR00NKM3NcclK4Xu8ZDz+iGwWpBK/wpOVtV07naH88rZNvkpLk/vViZav0E/SfdJmovNnD55P1ck76bb8oOnLaGUL4TbcDCIXnMEy3E3OfucvdanzdsOkFuWjLT1M7Coa0BrGV5A+aivAffMiJCoSL8ftayaUX8e23HMsacmoimoR8zRerK2utieNYFbHU29Tr+ybRqj3jWRvqaSYCRT+QyA7vXC/jIAxj+b3Ct7R0/smw6EXGHjPBMa1g68roVMzeJsF57omFthR00u/50UmsrrUlDlSUUGAnGWbAB0LdTX++jv/R2PBFFVwINlREgowiMcy9udwVUSLWOY1tiIZYtGsaeQ94/O7nW+cG+ApO2V3dL+xJQuhvj9kVjjH4NokP5ZM0J38X5WuI1KQk5DrfHzdEtUIq+AAwpHtWTqtGqqgiYerVvYb0YEgxNHa2mBjZ+Rs7Hv84n20jD410P/dCcpn0JKz+PxECbBibgCtlea+cPBc3k79Si7m7JpXJtD/gAZEQwMDAz+VYm4As6/J6RsqwAnbmPka2BgYNAPIpGSYBoYGBj8K5FgybAMDAwM/nUwFLCBgYFBnDAUsIGBgUGcMBSwgYGBQZwwFLCBgYFBnDAUsIGBgUGcSEgFLISwCiEeE0IcEkIcF0J8KoT4arzl6g8hxM1CiE1CCJ8QYnm85emPk61eAYQQ3xBC7BJCtAohDggh5sdbpv4QQkwQQniFEE/FW5b+EEI8JYSoEUK0CCH2CiGuj7dM/SGEKBJCrBRCNAohaoUQfxJCxCSG+YkihEgXQrwUbqeHhBDfHMp1CamACTmIVAILCXk0/gx4VghRFE+hBqAauBd4PN6CDMJJVa9CiPOAXwPXAcnAAkhoz54HgU/iLcQg3AcUSSldwEXAvUKIGXGWqT8eAo4CucA0Qu122UAXxJEHAT8wClgCPCyEmDLoVVLKAf8B5cDtwHagGXgGsAHXAmt7lJXA+PDfywlV4JuAG/gIyAF+DzQCu4HTB/v+LvfeDlyWyLISUsLLh/h7jHodRFbgY2DpCfyWuNUp8A3gWeBu4KlElrXLfScCNcCViSgrsAtY3OXzb4D/SzRZCcXc9AMlXY79DfjVYM9gqCPgK4HzgWLgtPCPGep1PwMyAR+wDtgS/vw88Lv2gkKIh4QQD/V1EyHEKKAE2JHosp4gRr32I6sQQgVmAllCiP1CiKrwFLT/FBZxkDP82QX8AvjBEL8rbrJ2OeYhpFRqgJUJKuvvgW8IIRxCiDHAV4G3GJxYy1oCBKWUXVM8bwMGHQEPVQH/QUpZLaVsAF4jNB0YCi9JKTdLKb3AS4BXSvlXKaVG6M10entBKeUyKWWv6YUQwgw8DTwppdydyLIOA6Ne+5d1FGAGLgfmh7/vdEIdJJHkBLgHeExKWTXE74qnrIQ/JxOq1xcJKZtElHU1ISXWQii8zCbg5QSU1RmWsSvNhOp4QIaqgGu7/O2ha7K3gTnS5e+2Pj4PeB8hhEJoKO8Hbh7id8ZF1mFi1Gv/92lPn/JHKWWNlLKO0AhkcSLJKYSYBnwZ+J8hfk9X4tZWpZSalHItkAfcOITvjHW9KoRGuy8SmuJnAmmE9gQSSlZCSxauHsdcwKDJDkeyCdcKdCQkE0LkjOBevRBCCOAxQiOhy6SUIwmnH1VZI4xRr4CUspHQqKdrtKjhRo6KZp2eDRQBFUKIWkLrj5cJIQbPc943sW6rJmDcMK+NpqzpQAHwJymlT0pZDzzB4C/g/oimrHsBkxBiQpdjUxnC0t5IFPA2YIoQYpoQwkZo8yGSPAxMAi6UUo40mVxUZRVCmML3VQFVCGEbgbmMUa+dPAHcIoTIFkKkAf8JvD6M+0RTzkcJKbBp4X+PAG8Ai4Z5v6jJGq7HbwghnEIIVQixCLgaeDfRZA3PeMqAG8P9KxW4htDm2nCIpqythEbqvxBCJAkhzgS+TmiWOSDDVsDhBedfAO8A+4C1w70XgBDiESHEI+G/C4HvEWrQtUIId/jfkkSTNczPCE1Rfgx8K/z3YGuVMZf1JKzXewiZde0ltCO+FfhlIskppfRIKWvb/xGajnqllMeGc+8o16kktNxQRWhn/wHgVinlqwkoK8ClhDbTjgH7gQChl3AiyroMsBMym1sB3CilHHQEbMQDNjAwMIgTieqIYWBgYPCFx1DABgYGBnHCUMAGBgYGccJQwAYGBgZxYkimUucpVyTUTt0/9edEX8dPFjnBkHUkfBFkPVnkBEPWkTCQrGCMgA0MDAziRkLG1jT44iCsVkTpWHbfGnJCKv2fVuSug8iAP86SGRjEH0MBn0SY8vNwTx1N04T+H5v9mE7a5y3on+6MoWT9I1QVb66TvywIhUr+btO/M/aFyZi2HUA/PqirvIHBF5qoKGBTfh6B/AwCLkuvc/ZPDqA1NEKiOYAIgZqeRtus7m7xtho3oqIWrbExToKFMOXnUXd2PsfO9XPJqZv6Lbe6ZjwVH2VS6C9B27m333IxQwg0q+BcuwbArV95kyf2Lia33GUo4AiiTpmIb5QT3dJ9VVEJ6Jg/3IYMBuMk2cmHMJnwnz0Vaeq9Qit0ibnFD+uH6xHdnYgqYFPeGKTNwrEzc6iboWMf4+5VJksZh6O8Geqb0Juakb6hRMKLPsJiQS8ezdF/93Q7rqxLI2+VhDgrYPe00Rw71889c19mSXJ9v+U2pm/g7qSvU6kVUdDSSrDqcAyl7IGioqSm4MlSOw7dknaIh7IEWMzxkyuMmpWFcDqQJhUR1JBuD9qxYXkQxwchMBUXIhVBxYUZeCZ5sTu7t1+f18KEugkoHh8cq0drcYOuxUngBEZRUV1OyMpAJtmouE7HavP2KhYMqgQPOyg9WkSw7NCIB5IjV8CKimK3AVBzQQHeLIF9dh2/K13JxUm9FXBxy1IshzNI35FO2pY69APlCfF2FiYTrWMc7Jj7aLfjpdq/4f3MiSUyL7xh0zjBxCWnbhpQ+QLMtpr5/djnuPvrF7C3sZSMP8dPAasuJ22TcmmYnxgv2Z64zyymscRE0AGmVkjbF8T+cncFLKxWhKqCriODwYRoq0BIYWRlUH7VaHQrzDt/O3fmvsU4c/eIiTVBNwsqf4jJK8h7PwXztoNoTc3REclmA1UdtJz0+RKnHiE0+83KwDu1gOozLWhWyccLHiBbTepV1KP7eak1l/8+dhWFf2lFq28Y0QttZApYUVEz0nHPKwbgqpve4Ya07aQovZMWaFJHFQplix4DYPGexdQ8X8Ron59gecWIxIgIigIDGoycPJSYk/hDwevcsTRA1Z/jKEhmOjVnWjl4XveEHCJBVp8qv67xxIK/cLZdZ5XHzPc+vIaSl7uXEaVj8eY6Ub0a1kP1oVFPvAkr3/qvjOP1G+6nuEPp9g5Xm2tysu/bDwMwWVtGcWUaRFoBi1DHCZ4xCX+yGTmIDnburENWVqN7e48w44GakU7rrCIqrwh2aau9lS+AQ7GwJLmeeTfczzeqfkjGqgNox+qHrYRHpIAVuw33vGJWP9x11Nhb+Xp0P4eCQUrMNlQRWldZOXElt35nJu+bZ5Pz+zgrYCEQDju+5MHf3gYjoyzgRgkAuh5XOdSMdFSrhlkEGcgac/etDp5Y8BirWk7hlWfPovDBRrSWnskPYotit+GeU8SGXz9MdPIEDB1htqBmpAEw4Xef8fPs1aSpjgGvKfnrjYz7uwWxc3/cR8KKzUbLwvF4rmni4Mxnep2vCbpRhSBFsWAVnctmxWYnG379MHP0G0hfJdHq6oa1HBF1O+A6rZUf157JDy76DnsDifHG64mpuJBD3xnPP+79TbxF+cJz9Z23U/DYPoKHKuMryPM2Vp75IHOsQyv+8+zN3HHNs+z+70nRleskQklKwnveVO5f/zL3r3+Z/x718aDKF2DtkgfY+0M7gQVTYyDlwNQunU7RbXt4f/ryPs8vfPqHzHn5B9xzbHqf51f89wNULJ2AqTB/WN8fcSuI4reXkrnagqMuNCQP7RoGUffs5MYbv89X7/+g32WKeCEVgW6hy1Qu8ch/sYrtm6ey0DWzz/P+ZfU8XPp3pllDGiVFsXF7zir+870rUK5oC61VxRA5dyp7rrTzy8Uruh23tuhIjyfuVjBjHM2kK3TMyPpi35PTeWDuc5xubcUq7OSYmpG2xN3AWrxnMRVvF2E/Kqlf4OfgVx6L+nfqZkGpOdTmetblZp+fm3Z9E8v/peNPVljxywcoNjvJVpMwW4NIkxVT3hhqFxcwfWnnJkv5bRMwbdod9SWKirvmce6Fm7kt+11SlM6+XxZwc/Wdt2Nx6wS+qvHtuR/RHLRT8uE1BH2mbvVabHaimwktYQ6DESlg6fPh3FnHpEc68+iN/ciHbWcZeksXEyNNQ1itHLpcMjdpH7ZhJ4uIEkIk/PqvVlWNqfYopn42OSqvmEjLxM7hnFmojDWb+XHhm9xv+nKsxOwgmGxGzfVwUdIRwEJN0M3Cp3/I+F216Ali+dLOC24Xd2y6lPxXu3ei2ePLOcNW3a1zJkI76dnvfr5kBRclHeFQQxpmD7jzBBec0nvXWEZQdjUri5aFY5HXH+uleG+rmc6LW2bg3GMm9aBG0juf43Q4uDDvR/zkume4JKmGm6as5m/fn81hdwaz8ndx3+hVHdcv/sloLI9PxfXhfrS6gTedh0vFXfM476JPuDnrAwpMoVG7W/fyojuP+5cvo2DVPqTHQ4E+hZf3LMTkkYyuCC2XFMulHXtZMLJ6HZkCDgaRldUU/73LaKaugWBPUxchUFNc3D73bU41e7CKwacpiYJIgA4HDLoDLzWBLhWgc23VhEqW2oqIw4+QisBs1nAoIVtwVQiCSXpol3yAUWc8sCl+LJYgmtXW7XiyyYctURpAF3r2u5+mXsWh895lYtZR9iyECZn1fD/rfbquDy/es5i0vTryeGtEZBBOB81jVf42cQXQfR3HrgYQHpWMXQEcGw6itbaiAEGHxCZCKQgvT97BvMn78EoTOaqHTLVTVo/Pgt0vozJLUmw2apdO59wLN3Nz1gcUm0L7UjVBN080zeDxf36Jic8cJhhe001adwDnZ0kQ1JCtHoTDTsbowuEnnOrBiIeiutcL+8v6LyAEakY6TWePZXHSi6SpiTvNN4gMpvw8jhWbKEzvtJ02I1Ay/Ehrgs1+gFMtdVw1YQvLzzmLopZZHceL7asxd3lZpKoe0rKOw5zTImaIP1y69rvU3Tl8NmcMl43aQlKuj3xTQy9ztIq3iijYdgw9UhuIQQ2TG9Z4Sphm7W4ZclHKFjadWsCh+kJytLFIMZagQ2Hml3cx3VqNVTjINVnI7WgKIVl9MsBdR2dhfieFpD1H0N2ReVkAHXqoZeF4pizZyW3Z71JgcqAKhYqgm8caz+Bva89iwjOt3SxdtLp66DIKV/z+juXVSBD13iAsFvSiXLxLGknvMn2uCbopd2dg8iSITZJBxHBPG03TGX5uG72h41ir1LHusKM5dUxjCzqsIBLBAaLA5OS7aRvJXHCch7IWdBw/x7mz23JZvurjquItPHrluREzxB8uis2GyB8NQNMkyVRXJec5KsK2q50eqJrUWeM1kb3Vjzx0OGKOT7K5hcztHn6/6qucetGjzLcFO5YiZlvN/KL4ZZYnz+fDqeMQAiymIA8WvNl9OQcISI0jWhs7/WnUBjN5/dl5FK2sQquqjqiFRLse8lzT1CHH3kArtVoSLzaezSsbplP8UhA2fhax7xwKUVfAisNB81gnW2Y+QlcTtWeOn8K2nYUUH0zsoCwJ5TEtBMJiQZh6PzbFpKOI/k27FIcD6ffHxOynp9OITwbYH3BR9I9qahbl4s1M6rAVNXkg5YBG8hvHY2oXejxoxdvl4eaanNyUWslNc5/uUkoN/+ssc0vaDvK+1hAxQ/wTIvz8FYcDmT+KskvTAfjl4tAasEPptF31yQDNup96TbB03U2U1LqRgcg9e62lBXXDTkqOj2Np1jW8Of9PpCohsyqbUJlhsTB7zHoYs77LVZ393ycDeGWQsoDC043zeX7LDIRXZeKfPiMYBRf1dkerLTMf7ZDjvprz+ahsLOqeJIrX+jC/s3nw+1gsaNbILaFFVQErNhsyfxQNpb0FfmTHfIpf0of0o6OOlKF8sYlGj/VHNSMdvSiX1jG919BzsupwCR891+Pa7xOYXYq1ogG9ujb6iq6L2AGpcTAQ4HdVF9I4O4erb1rFd1M/77CC2ezzc+POJQSPTkJZszW6cnVh475i1mbm89Wk6hOyyOlliP/W/pgp4fbn3zzWSUOpwq7vdnVw6Rz1NuttbPC6WFF3DtWtKUz8/qGoWMHIgB+5bRcTv5/OLc9exeikZhQkZ6Qc5FzH3l7LIF3Z5FN5330qb1VPpmFtDiX3fgx03cGIMH04Wm1441QmPF1N8OC2od2iH302EseiqCpgkZdL2SXp7Pped0+omqCbgM+ECCai1ksQhEDNzuq2gda4sBjvksbwW7wveitfBYmelcrU337KytfmUPSiLaoG8KrLhWYNbcQANOtefnfkK2hXSf68/n+YYrHTdSQ0w2ph5WnLueM3i6iaExWR+mTCtZv5yUNXwLnPcYnzaDcj+6HQboi/sOm7ON7zoLdGcL2yD7o6DIRmk/2zwesKefUt3QQcJ9qvBq2+Ac5toDr8ectNF7N6yc7wVL/vl9uy7Utw/j2FlA8P4mr7PLoyRsjRqj99NhLisiOy8OkfUrKiEblzf0IOPONOWPleu2YDpZYjqOFacggtvI4+tBGbKhRKzDZ+9+rjFJpM3P6d1Zw17haKnpiK6b3ozDx2/aqU+85dwSXOo0D8A+4MROkdu7jnlqv5/Mo13Jsd27W/E6V26XSmhJXaUJ9/vMh5bAvltVP50jWj2NKHdxnA6pmP03C6xuvuKfzP++cz4eYNfZaLBKbiQsqvGs3rN9xPvD0HexI1BdyfIf7kh5Yx7uUG2J8YQXgSDcVmI3jGJCb+9nMWO45gF51Ty4GcBvpDFQqTLKElC4di6TCAjxo2nSxTC1ZhZrPPz237ltD4xmgWvv4Jxaa+RyDxchrRjx+naMVhNnw0k4WuuQRtgsZSlZ039h7h/L6xiD+8v4jcD+m3TDTRrJBpdSeUA1N/6F4v5lYdX6B/9ZKi2ElR4FuuXWSd18Kdyy9lwrXRGRT05Wg1+aFlFL1Uj15dG5XvHCpRUcD+RTMpv1hh2fx/dhjit5O+W0NU1aAlSCAOILEcMVQVf7KZ/xr1IU4lOvbSkTTI74WQHSP20aqfK/I28/wFkv8a9SGO8O85c/ulHPk8Gy01yOXTN/ObnK1xcxrRKg9jqjmCSVURziQQY3uVua1mOq+tOoNxb7Rh/rwM165cJomQE0TxrpqYOJbkfuxmZeYszOdr/CYndmvlwyHU/wW3T1rdcazdEUf1CuZ9tTNyW5rqYJ69kunjDhG16NB99O/03RocHvp+iDplIhUXZDDvq93ND4vfup6xH/uQDcMLVxtxBexfNJPySxSunbuWa1K2d9uZBaj+EjSPnYISSLzsDX0RDzt8qTIkn/rhEMvfk6naudS5izPH7+/2e458nk3KPkHjFBWnGlJe8XIa6ergogCqr/c20Oqa8WR+KjFtO4B2/DiK10fx30Nyy6qamMzk1J3lZBZNZs20cZDgCri52Mzpk/dzRfJuIAlN6jTpCsUvuVEbW9nYdhrfX5zMXYWvMttqJl0x8ZXMnTy87GJyHt8SE2sY1SeHbBUiTp/CofNTGHf+Qe7MfYuuyxiZa8zYdpaFnM+GQUQVsJw7lfKLFZbOW821qZvI7sPp4j/PfosqfxpBXWF1zXjKN2aSkxMyfu8Zvb89s4ZuVbEccSdGhoc4UhF085k/k4A09RlreTBecLvwVyVhbmmLgnS9MQuVXJOzi8F9CC01SOMUldJTK7nA9SmaNFEe9PD/Ki5BBgIxka0nis2GKBxD/eTeXcLdZiWnKdiRwWNQ56MooLW0YG3SONoWxeWjCBF0CIqT6rvF09URmCqOEqw9Qv7rJsrUYu4672J+UfwyMywWznXsZdXVe/E844BEmh0Djae4SDrrGD8veLXDsqPdaSR1vxe9sSk+4Sg7CEfm332lnWXz/8k1Kdv7VL4QyogAIU+TjekbWD4qZKwNPaL3A8fmjaJuho5M0kjdkkn2v7gC/syfyRM1Z+EOWLm49I1e52uCbio1K01a36PnOzZdSvZGMFfWEY0xmylvDBZHIOxu2v+O8+XTN+NUfVzg+pQZVgtu3cu7nhLKV4xnVGt8RncixUXjtHTGnX8w5t+tulyIFBfS1juFV1c82SaS7QMvdzTrbezz56O0xM/j0OSRlLVmUBN0k2vqrQe0nXspsJgoNxfymHMBs/PWUWRycE/+q9w64QaUNi+6x9PHnSOHJ0vFmZqC3uYdUHmqWVm0jBV8I28HM6yh59OoeXittSDkNFJZhTaCGVBEnpJit1N+1eg+DcIHYrbV3M1Yu2v0fgDHjFBmDYfw8T2uITsSwvaFlIhgqPH23ORQVR1pEgiTKe6bhgFpwh2w4gn03VGfOX4Kfzs4m4ajrj7P57+q4PxoH8EoBTg5dm4Bp47eR7rqpb+A1kCXNczQ7/BKjQ3NY8l6eF307EAHw+WkqURh3YS3ep2KtjOONrmIY6cn0TZq4OUXb76fK3P393nOrXsJSJ0Pvdk8fmAu6Z/Fb1MjpSzA1p3FvJg5iZtS+w47qn+6k9zsmbw/eQKe0R/iUCykKjoHL05iQn0Ooqwiqv2tbr4f16FcrK2tfWYIUZJC7bdl4Vj0yW6mOkIxyxs1D297xnD3O5dFxGlk5ApYCBRXMs9+77dhG8/eykGTA3er9t39rtH7u7LKY46qo4Tw+rEfkzzYMI07M/d0O1eQ1khNUQpJeaPjnrnj4iR3nyNfCNXxIzvmM/ovVjJX9Z+0M5r2lnVf8vOD7M2MUoduraFJHZ3QFDWuDOKME80sHjXznJy3ZD2/zd0y7Hu86M5jS2sRr+06lew3rLhWrIughCeG5e1NFIlZPJS1oLtnoRChf+E3mrnFj1bt4KXWXJYk13f0/4UffRdH9ZHIKeA+nm3ZoseYVLaM4kPp0Nw9PoawWGhbOBndLJDXH+OZiSuYZrXSrLfxtmcMP/n4Ukpu3hCRwUJU5yma1GnRvVRr/XcuBUmqovc5VYHQWkttMBPhjV62imDVYbKfauKt2oXc+XB3BZxQmTv6oSzgxiNV/G3muDq3nIhzQ7PeRp2m4ZUqtZqL6tYUiN4++LA5GZyGygJu7l++jMJ/VDG+PDE36NodgkRDY2c8ivXbmVhXzL3NV7Ekgs4NPRG6RPGH6qmrKZrmkGjpTkzuLnNrVUXPcPHwQ//bYb7Z7uC0weviJ2svCzu4RIaoKuA1XhPXr7+JibdV91smWDSKgxcn9TnyBbjr6Cxef3YeE//0WfympwnO1XfeTsa6Wkrr9qN7PHF1bhmqc8OCTd/B+bSL1DXlSClRg56oe2wNh5PBaejqO2+nYNW+UAjFBKSrQ9CyG76PbfWOqK/xdiVYdojCv7TyjaofhtM4hVi75AGaroaA7D5jU5CUmG09bxMVRq6ApUSrb+SmG/8Daeo+0lX8OhNqWwkeOdrv5UrLccZ7CpjcuqyXcfviPYupea6IopVVUQnQkYjobV6c68uZ86MbOjII9Ed75P70dw6i1TfEfY0aejs39EdudSvKof0E250uEirqUSe/uPQf/MxyFeOemxz3EJTtNOttPNgwjbd/uhCA9PUHQs4rCVqH0OkQpJtEhy1kf85aEUdKpNeL9Xj3V3y2mkS22nuJtKfDU3umkaytfkpr3BEdCEZkBCwDfuwf7uhtZCrDtnYDNAzd40HZX0HhS3QYt7eTtlcnd8sRtKr+R9CRoj3LQMlfb2Ttks6U1LfVTOfNt2ZR9HGMXgC6hnasnvS3JRfm/QhtgI1x1Q8Fq/bFNiLXEOjq3NAfMhhE8/sTR2n044xzUdIR/l+ul4DTHDXH6tyP3byWeQbK+bJfJ4v2LBO2SjNCA/sxSfZ7oReClgApnnri2HMU+1ujOSvpUtae9mLHcf9N9VReORGpCdIzj3ND8dqYZE2RbW04d/Tu3zC4h+mhhjQydwSxrdkZ0YhyEMEliJFMKUJKuLzDuL2D463oTc0xGdm1ZxkYt8LK3LybsViDCCHhUxf5a70ou8pjtwSia2h1dRQ+40AOsKElNL0jcn8iMVj2joSkxU3qXp0L9n6V10ve7DjsUCyYzBpSjd5qnbqznALbeN5om8sbU6f0XehTF8Ub/dgO1iB0CT4/wSgHABoJWnUt2Wst1KqjuMDWWacPl/6dphI7GoJUpY18UwCH0t1ZQ0ZyAy5Mv/17CJjWuXAcaojKsknCpCeIh3F7XzKIHfsoenwqejjRoK2mAVFRgxbrJRApu0XmN4guelMzaZvrOPxcMbd+p3vi02CZE3NL9NYstZYWLNvKKGwYhXdj30tOHe2wD5OpRET6fOgHK8gRgrK0YigJHQ8ljW0fylgAC5rUKQ96+GnFJagHDodG9FGgr/49FNrrPhokjAJOFGQw2C1SmLHx96+B9PnQD5Qz2ufnffPsbufG7A1EzXmlHa2xERobsfSzzHwytkPp8yHLq8jemhIyJe0Hj7TyZuOZMXHE6dm/h0I0695QwAYGYWQwSLC8ok9zw5NsQSVhkIEgtho33/vwmv4LBRTsVSbyH/74pHzRjARDARsYGESN9qwZJUvjLUliklj5wQ0MDAz+hRAywXbQDQwMDP5VMEbABgYGBnHCUMAGBgYGccJQwAYGBgZxwlDABgYGBnHCUMAGBgYGcSJhFbAQ4gMhhFcI4Q7/2zP4VfFFCDEhLPNT8ZalP4QQNwshNgkhfEKI5fGWZygker2eTG1VCPGUEKJGCNEihNgrhLg+3jL1x0lWr+lCiJeEEK1CiENCiG8O5bqEVcBhbpZSOsP/JsZbmCHwIPBJvIUYhGrgXuDxeAtyApwM9XqytNX7gCIppQu4CLhXCDEjzjINxMlSrw8CfmAUsAR4WAjRT2SlTgZVwEKIciHE7UKI7UKIZiHEM0IImxDiWiHE2h5lpRBifPjv5UKIh4QQb4bfXh8JIXKEEL8XQjQKIXYLIU4f3m9NPFmFEN8AmoB3E1lWKeWLUsqXgSEnhjtZ6tVoq0N6/juklO1hB2X437hElHU4xENWIUQScBnw/6SUbinlWuBV4N8Gk3eoI+ArgfOBYuA04NoTuO5nQCbgA9YBW8Kfnwd+1+VHPCSE6JmX5D4hRF24Ms5OVFmFEC7gF8APhvhdcZN1BJws9Wq01UFkDR/zALuBGmBlosrKyVGvJUBQStk1bfs2YOQj4DB/kFJWSykbgNeAaUO87iUp5WYppRd4CfBKKf8qpdSAZ4CON4qUcpmUsmtE9juAscAY4FHgNSHEgG/qOMp6D/CYlLJqiN8VT1mHy8lSr0ZbHeT5hz8nA/OBFwkpm0SU9WSpVyfQ0uNezYTqeECGqoBru/ztCX/hUDjS5e+2Pj73ex8p5QYp5XEppU9K+STwEbA40WQVQkwDvgz8zxC/pysxr9cRcLLUq9FWh3AfKaUWnirnATcO4TuNeu3/Pm7A1eOYiyFkmR1JNLRWoD1tKEKInBHcayhI+kwaMySiKevZQBFQIUIpmZyAKoSYLKWcPoz7xbpeR8LJUq9GW+0fE4OsAQ+AUa8h9gImIcQEKeW+8LGpwI7BLhyJFcQ2YIoQYpoQwgbcPYJ7dUMIkSqEWBRePDcJIZYAC4C3Ek1WQlOjcYSmOdOAR4A3gEXDvF80ZSVcnzZAJaTQbEKI4b6IT5Z6NdpqSNZsIcQ3hBBOIYQqhFgEXM0QN45jLOtJU69SylZCSzm/EEIkCSHOBL4O/G2wa4etgMMLzr8A3gH2AWsHvmJghBCPCCEeCX80EzKVOgbUAbcAF/dY5E4IWaWUHillbfs/QtMRr5TyWKLJGuZnhKZTPwa+Ff77Z4kmayTr1WirHbJKQssNVUAj8ABwq5Ty1QSU9WSqV4BlgB04CqwAbpRSDjoCNsJRGhgYGMSJRHfEMDAwMPjCYihgAwMDgzhhKGADAwODOGEoYAMDA4M4YShgAwMDgzgxJPvP85QrEspU4p/6c30aY58scoIh60j4Ish6ssgJhqwjYSBZwRgBGxgYGMQNQwEbGBgYxAlDARsYGBjEiZEE4zEwMIgRpvw8AvkZBFyWfsvYatyIilq0xsYYSvbFRc6dSsBlBtH/Mq79kwNoDY0wTI9iQwEPgJqVhXA6kCY1dEBKhNdPsOpwfAXrg66yiqCGdHvQjg0rHMXIZXG5ECkuMKn9ytFeRtp6KxRZWY3u9cZC1EHp1QaGQgTbiSlvDNJm4diZOdTN0LGPcff/tZ+mMXqNHcsWDa2lZ3ja2KLYbKE24BpCJMgE6leKzYbIHw3AnivtmEZ7MJm0fstnKeNwfFKOVt8Aev/l+iMuClhYrQhVHfDN0g0pkYEgMuCPrmA9cJ9ZTGOJiWA4iJ0Igv2YJPupJvTW1pjKMhhdZTW1Qtq+IPaXY6+AhdVKYOo46k6zE0yClAMayW8c76VQtclFHDs9ibZRvdtA8VMSUV6JDAZjJXafKDYbLQvH0jRORbMP/ToRBMcRScafR65QahcX0JYtsJ9Rx+9KV3JxkhufDHBc7+wLKYoNs1C5rWg6r9nPoNhTCBs/G/F3jwQldxSNs3NpnDT4Kmck62skKA4HjC+g7NJ0AH65eAUXJR3BofQ/6yhuWcpYXwHWzX60puYT/s7YKuCwwhWTxuHNSUKzDk0Bqz6JrcaN3LZrRN8LnNBUofLrGk8s+Atn23UAmvU2HmyYxqrKBdj+uS3mL4SB6CrrKo+Z7314DSUvx14OUTqWA5dbue2815hjP8CNO5cQPDoJZc3WbuVq5jk5b8l6fpu7pdc9Fm7+Lo6ao/FVwEIQPGMSwe/U89Skp5lmtQ750jqtlTsOL6LqzyMXY/rS7fxy9CoylNAboFHzssGXxnN1szrK3Jn7FgUmO7/N3YK2SOH9o7PJ2Tjy7x4JnonZNF95nF1znx607FGtlTsOn091BOpruLQr3/KL09n13a5ZkTqVryZ1VNH9hVK26DEmH1xGcVkaJLQCFgI1OwshBHt+ZOHJOf/Hmbah7QF2KJSlJ/iVJhOKMwlhswEgpUQ7emzY6zUpip07Mnbx9Yc+5UdzLiZYe2Twi2KAmpGOatUwiyDx3lfdfauDJxY8Gn5pWVh52nLu+M0iqubEVaw+UV0upJTItrbuyj7cVif97jN+nr2aNNXR/02izGFPCjv9yWSpoRnXP1sn84f3FzHh5g0dZX61YRH3jV5FppqEQ/ETtId+W7yXIQbDJwMc03zsCaRQ3ZrCEBJIRBzFZkM4k9DGj+HgxUns+3bv9InNeht1moZfKqQqOrmmyCWciY0CDjfoa9dsoNRyhHxVx6XYov61/rOncuganVfnP4SGYKcvl78umIV25GjUvzumPG9j5dgHGWc6gXmyAbt+VYrJrVLwlh/Te5tDB7u01cWOIziV+ClfAC73cr/py4SzgiA1nYnenej9FP959mZKr6nm7jGXdVPSichdR2fx+nPzKHxsP2rQw4mvoI6c2qXTGfeNvfx3wcOkKgBJvcos2PQdnE+7cFa2hZX0wxH7/qgqYFPeGGoXFzB96XbsaiOLHUewC0uvYXxfFL+9lMzVFhx1Gopfp7TG3W+j64vmJXPwX9XIX09ZQak5NH0ca6rh4zca2XXrNEybdifMRs9wUTPS4Xkbvy1+nmKTDVUo/L6xiD++fT6ly5u61de+J6eT9a6VzHcPETxcHRV59j05nQfmPsfp1lbAzmafnxt2XEfqrxwofDro9WUBN1ffeTvp6w+gtUXn2bTXWa69hdsyn8ArzWw/v4C3qifTsDaH/F+uQwhBqSXUVgdj8kPLSNujYfJ2zqqELjE3B4b0mwdDq284oSU0qzCTY2pG2uKhzkK0971HT1nBQDOyG9LX0nqJlffUGeTf83HsBAxTcdc8zr1wM/+Z/S7jzH2PaovfXkrhswqOT/YjPW2M9xQwuXUZO2/sHCn/9FvP8PPUKxj3nAvWbz8hGaKmgNUpEzl0QQbTvr6T+0avQkX0O5pw615eaR3DvU9f1XFs7Ec+bDvL0FuOd2zCnQhtmQpzciqYadVQhRkAp7DxX6M+5JKUaZjUE9jVjgHqlInUfCmDptMC2CvNgzdIIRBmMz8seIvxZitmEfo9ld50HNUKctfBbsVnjy9nx2elYDFH6ycwe3w5Z9iqSVFCjblJt1Nfl0zmph2Dvjw1qeORKhnragkOc0d5ULrU2WTLcVIUC5qULLTXM956hJ+UX9FRVEUOaaCQvlvDtaYM6e6xKatpJzRgGJATWDJ7we3ijk2Xkv9a/Jaiuva9gRRwgcnBl1N38MaYqbETLkzFXfM476JPuDnrA4pMfeul4reup/A5gWNT2MpBStTKI6TtSe1W7pKkGu7O9RJwmjnR3hU1Bewb5cQz2ct/jXmdTLX72+W2mumsrhlPqzc0wggGVPRqOyV/r+ksVNdAsMU97I4oVXCqPqyie5WkqQ6kMty8ftHDN8pJ03Q/9535An8q+9KQr8tSWzGLzqUHDQUlSK8NwmSTD6kydMuTE0Cx2ahdOp0r018kVelsUppUkJroNdPwL5qJe5qXWc6y3jfztEVH+XYhS20lWw1PNcPVka66wdy3ylzeks2Kw7OpbEztda7ggBu9qRnpG0pm98jSUe+uF7GFX8BbPYVYdjpwfrQnLlN66L/vtXNbzXScJh9fc33KRPNRppRWUv/NObj+vj5mMmadWcP1mWs6Zo5dadbbOHvzdb2UL4D0+1F93dvJ/zsyB/MOB9ajDSf80o24AlanTMQ3ysnRGVZKC8p7De1/31jEa6vOIGurxNkcGtUKTWJuaUXb30eH/BdB9QZRG+y83XgKNXUpjB+osBCoGek0LizGITq72UZfgDXVY7Efi9jYa2DCcrQsHM+UJTs5x7EfuwiNJiqCblY2n42tsvc0vuZMM5edsp6z7JUMPWN4jJASGQjw/youIs/RhCIkr2ybRvoGM6PKA72KKxUH0fyxs4ZRbDaU3FG0lmYTdCjd6n2jL8CblZNwlelodfUxk6kr6pSJtObp5Nsa+jzf3v91E2yYUcQPClZxY977/Oyqi3EcmYH5w21RtX4RJhP+s6dyfu5aCk2yY+bYrLexzW/n+YZZHPUl41yegmPjviE5Wby4ZQbFn/gRFTUDluuLiCvgmi9l0DTdz6TicpaMDm0CNOttbPC6APjj2+cz7vU21O0H0I/HdtdTkzprvKbQGyzBcuGZK+vJez+XT8tPJbt+YAUqLBb0oly8SxpJ77KU8uejZ9O2IZPszxt7vYn3NGVjbgW0yI2L2uXwXNPEgwVvdiw9AKxpK+SV7VMpXtt9ZGjKG4Oc2MplqZvIi+Bu8nCpCbqp1Kysdp+K0hLqDrLVQ/mK0zlgESCgcG+ApO0VfToKxGqU2e64EsxN4+g0J77zW7CYgh31vjfQyl1lV+L7qO/nHytqvpTBhGmHmO/YS7sJV1/9Xwo4dLyQFy6YyR/GrCb51H9w3bXXMfETR9SsNxSHA0qKqLhOZ6FzF7ZwMvBmvY3327L41b7zaXs7G9UvGfXmVrQh7hHZKs3Y91cTTAQztKYZPp5Y8ESH7axPBtjqS2LZhm+h+VRKlzchdx1Ej4MNbYvuZdnW75J3PICw21Dap+NxcvToSrCyCmtlFdlDKCtMJlrHONgy81FCiVhDrDk0ltxP/Oh92EvXrc0l+0AA2eqJmMz9yQHwibsY1zYr5nc+BkVFsYesXmouKGBW/i6y1Da6jn6DaNRqTmKdJPa11hKeqjiDyoNZpO8OtQfd6yXr4XXdysXTJaSrc0vLOJ0xU2rZfOpL4bN2mvU27qq6kMMrCylYWYe2c1iJgyNC02kBfpK3lkJTgDotgEdKPm7L5ydrLwPo6P8y4CfHOYu1U8dizVvHmdaQtdKPHBdDFBSwMJkQebns+0YKHy94ILwEFVp62Oa386t956M9n0XO46G9l/5eYMJiQbNGbn096mZoXhnk49ZTyHvahO3d7eh+f9xGn026TvIryXhHaZDdOcnvcPTYvjsucp0witKxdtkT0U/Vtm/qxXpdUJhMKBnpuOcUAXDVTe9wQ9r2bqPlgNQ4GAhwf/klqMHIvSCGwsu106j/IJeirQFsNU3oXV7KiUJX55abUit7nX+wYRr7/jqRgjcOxd2d115p5g8Hz+Xt1JCpZ3PAxub9hZQs3QT0r9iijbDbcU/OCJuQdZqaaVLn+YYzaHs7u0P59odisyHzR9FQ2l0B99fnhkLUFXC788IlD23lBxd9B/VYE3pzC3pbW8wbebHZyYZf97bh+6BN4fr11zDxtu7jz0RxbuiGEAiHHV9ydyuOmqCbgM+ECCaG4nAofoIOUIryqZ+b06Peu4+Wm3UvD9QugnOrYv6CWDlxJUzs3gZG6rATabo7t/Tm1V9/iZx1NUi3G2EyxdWDsP1FX9VxxE0JdfESJ8QAfaZJV6jypKL6B3/WIi+XskvS2fW93s4awyUmjhiqUCgx2/jdq4+jI7jqkdso/EcVwfKKWHz9oMy3BflkwYNUr+8+rBytypg4jJwIpuJCyq8azes33E/XKfzCp39IyYpG5M79JILa+Hn2Zm654WOavqtgEzoJt9nWg/Y2ULleOekcdlb89wN4pcLXPryZ4idFp1OJATBwnyl+yY164DCjWrfGZXQecQVc+nsP//WP73DTdAvjzj/IqxPeAkJKeJLFgSZ1fnLdM/w86wrGPZd6wobL0UAVCmmqA5ei9zqeaEhFoFtCo/muqG0Cxe0lGOcANu1YhZlck5nsPvznu3KizhrRomsbaHfYadNCU9UP3z+N4pdbE6Kt9kWx2YkmdZbPf5zVp5d2OpXcG3vnhkRkoD6jNrhDZmaD3WPuVPZcaeeXi1dEVLaIK2C58wC2fSYKK8ZQ1VjMWRdfytrTXuw4rwqFS5JqqPrKuzxi/xLFrpmYV22KtBjDIhEVblfUKROpuCCDeV/trgiK37qesR/5kA2JFwd2sDo9EWeNESMlestxrnrkNjQLzFu8nTtz3+pmKqkKpcNhRwvPJR5dXMcLU6bSUDcDAsrQHGUiSP7LKrfsuIGgAwIunZxTjnbrU+1yn2ENMNWylQuSt/GXjAW8J+bFxcMskeivz7Qj9MHni/5FMym/WGHZ/H9yUdIRugboKX7resZ+PPy+FxEF3G4Qrlsg56PjiF3l6AcryAGOaqOY7P4WVnOQD2Y8QYpix6FY+HbKVtZNGUvlp2PJjIQQPUjbG+TF7aczNamCb7vivAYVIdqdW+7MfYuuU6nMNWZsO8tCjisnERt9AX5VfiEpG60xcwvX29oofOYwUlXY6D2N7y9O5q7CV5lt7e2w0851qZtZlPwZTbqdY0HXCTnKRALnRwdI3paENKnoaU7q9oT6VFdumvwhVyTvJltNYopF46as99EvEqyrn0fO41uiUr/CZELNG03VJXkdx3I+Oo6yqzzmJqb90V+fWbxnMWl7deTxgcPK+hfNpPwShWvnruWalO04lO6xIkba90amgHsY4mda3azxzCKn2kWw6jD6wQqyA0GaWkZx7HSBb3rnGKdSs1LZkoK5NTorls5t1YxKzePnfJ0tp20bsGyKqY3THYe4OGnwSlSFgkNoNC4sxtJSQNLuo2jVtTHxhNItCnanp2PE5pMB7jo6i9T9XvTGpqh7kPVC07A0B7nzyGn8PHtzn55PmtSpCHr4c8O8XmXWtk6kbEseJR/UxW7zTUqCZYcAyH/dTJlazE1nLuHM3IOYhUaBtYFb0g51uyTX5CTXBKBRp1Xxz9SjXTaZoo9WVw9hxwphtTKqLgdPRVa3Mg9c8lXq5iZzXdpGCkxOSsw2bst+hzuuduJ5xgFRUsC+oky+/K2QB9sbB6bgKXeQfNASj8BmfdKzz7RT8VYRBduOoQ9i8lZzppnLZ69nadpGsrt49Eaq741IAfc2xLczadTsjngDMhCE462YW3WSJzdj7TIdHchpIBIEK6vI+BBsjbm8v232gGUDyfD3SR4cs7vHLp1saWSUau/wlmknXVXxLmmkJWgi8I9cUj5si/qGjepy4UtVSbZ3Kvpjmo/Xn5tHUWUVWhzWfmUwiPVQPa88exal11STY+ptiO6RVlY1zePDl6Zzyw0fk2vqVMAVvnSSDou42K2a8sYg/QFGfeLjeG0G76dkopvAM0Zn4oWPAXCGrYUUJf4R5vrKdmJ5u/uyXbGcyXLOgrnw3bSN5JqcFJkc/Ff+a/zIfHHkhVJUlIx0msZbO2I6v7RuFrZjPqQvceJk90fGziCyorrvgZMQmIoLkYro12koUn1vZAp4AEN8hEDNysAzo5DKK4IcnPlMRxm37uWDAxPIX+/r02kgUrQ7N+QMUk7NysJ9ZjHfa7um45hi1bhh2mq+5drWK/5nimJny8xnAJj06TJSN0U/ZKE2uYi6qYJLcveHPkudZl2l6PEDcYtLLINBgmWHKHywkbvHXIa09zEKCCjYD5sofvwADd9VyY29mL1QkpKouaAAb5YgZb9O1oZ65KHDCJuV4MR8lo1aAsDjc5ZzhjXQb0yDWDGUbCfmVZsoNM1iuWkeeWc2sDSlNqoyqS4nbZNyaTgrpGzrtFYK3tQxbd03pGwxwmRCNwtUNTT8irUjjmYVCLMJYbYgVAXaPUoVBeGwU37VaHQrnFW4vZfTkE8G2BNIofCx/QRHOPCKihmaVEJLE62zikLK97zHu51/0Z2HrLZhbkmMtD7asWPYXz7WLYOEPv90XvjJ6Zw3aWd4+tk3IzHCPhFqznRy4VfW8+tRn3YcC8jE2DTUWloGjj0rBIwaio9f9BFmC20LJ3c4hFy97zIOrSoiY2dqR5nRoXcr606dwFTL9m4KWAf0/rxgokTXbCfvtqlc/+F1lLzSw2FECJJ2HSVl2xhemTCNpSlvRVeozHRqzrRy8CsP4dH93F17DubmwJBd3dX8MTQXmShKa4y6I46UvZ9XwySV5L25KAEN/6hk/CnhTi7Al6zy+g33d7Ga6K589weC3F/+1YjIGnEFrDkkWrqT4zNG4V3SGB75dlIWcHP/8mVMTCA74L4oemBvR5aBeKO6XGhWsKudwWDapJ/d/tyYu+6e1AiBmpHGgw/9gSkWO2DvcMTony6R5qROvSbilr0BwmEyrRqmUZ0OI2p6GsJspmzJGC69fA33ZscuH5wmdcqCGmUXpqLUfjqk5UTV5aL86jFccmVI1joteo44SkDH7zNTE3R3m8nuvPEhSpJuJJik8835H/dRZ91nvV2zd9xf/tWIyRpxBbx2yQM0XQ1mZDhQTPeliavvvJ2CVfsI1n0xLBNiwa5flXLfuSu4xHkUMFOntXJ37TkcvCAl5LFlEBPWeE0sXXcTE79/KG6hHvtyGFGeUbk9/20mml8gXbXCCUeljS0923M0MX+4jQl1E1hQ+cNemSzWLnkAgBTFMqgc0creEXEFnK0mkd1lz6o9keXbP10IEMp20CW+ZqJSfnsJl7smc+hrgh8sfLvXznhMselkmVq6TYXbNPNJ46n1RaA900jJk81DMtyPJKW/97C0+bvcet6b3JJ2qJfDyM1ZH1BosmAVnaO2Zr2NPzaczjt3zMdWP7AVUDRRbDaCsyZR9EDnRuttGU9whq0FqzjxrCknigwGUTw+TN7eyxDZQ5zdLt6zmJrniihaWUUwwi7qUXNFfsHt4hc7v0bgk7RQKvf3QobQmseT8MoXwPTJLkyqyviWEv7YspiKr6znNzlbe5XrY3kp8giJmhAOxv96nLn9Uo58no3rgGDc9jbkzgMxl0HuPMDY5yfzR29nO+zqMOJS7B0OL7363YfbohN5UIRCdapCYbQq2fO7XDRfXu9iqiQ7s4X7Rq/qOJasWLAKOy+4Xfx406XkPG/FtGlb9BxxjtWTuzaN4sKllC16bMiXFb91PZbDZjJ2SHK3HEGrqo647hqRApbBII7DHiZ//K1e53xVTrI2Qd7H1eDzExzCzmgi0W64bt52gAJ1PK/IObCIPpVwLKkJunmsaSZrPjiVYtYNfoFBJ1IivV4uXH0TfzvrL8y0an1aOLRnRPAFTDjeSmb8p8dRaxtDQaTiELJUBvyYtx+kwDSeN1rn8sbUKf2W7dXvPNGPLudSbDw+ZzkB2TvNl4okVWnr2EtZvGcxlU2paJqCr8pJ9gZIXr1vyLF3h4PW4sa2rYJCSxGTk3vrqv4ofE7gKK+DhuZQ1pMomHqOTAH7/ahl1Yz689he58wtbZgr6+IeHm+kaC0tWLaVUUAxr8g5BBZ1b2SOIxL8vTMlRBJrhZX7yhfzcmot5e4Mtn9exIQXTyKvtz6yTACs3DeZ9Fhl7wiju1spXi64OeWbzMypJMnU3Q5Ul6IjI0Jqm45j92G0miME45ByqCvt7bCwYRTejf0HNopVvxNeP/Zaya01M0/ouprnikivCKIE9E5Zo529Q9fQ6upwbNAYpfXWVf1h33RgSBkxRsLIliCkRKtvwPJ232tiiREWZuRojY1YtmgUewp5/2h3p46crcfRm6MTwb+d0R/5qTucz/uOAkweSdFBP2yM3U53JOiZZQIg/ZhO2ufNMY1CJYNBTO9txpYzh08y0uk1aJN0ZETQvd6EasNaYyM0NmIZJCZQLGSWzS1kbXHz/t8GdnLqSd4blWiHazpGkzGr30F0VV/EYqM1JuEovwhoLS2w8TNyNvY+F20FYl61KSrxMmJJX1kmIH4Bul1/X49rgPPxkutkYaD+MBCJ9EJLBBLDkt/AwMDgXxBDARsYGBjECWF4UhkYGBjEB2MEbGBgYBAnDAVsYGBgECcMBWxgYGAQJwwFbGBgYBAnDAVsYGBgECcMBWxgYGAQJ/4/Cueh1fjXoawAAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "from mindspore import Tensor\n", - "import matplotlib.pyplot as plt\n", - "\n", - "train_data_path = \"./datasets/MNIST_Data/train/\" \n", - "ms_dataset = create_dataset(train_data_path)\n", - "dict_data = ms_dataset.create_dict_iterator()\n", - "data= next(dict_data)\n", - "images = data[\"image\"].asnumpy()\n", - "labels = data[\"label\"].asnumpy()\n", - "print(images.shape)\n", - "count = 1\n", - "for i in images:\n", - " plt.subplot(4, 8, count) \n", - " plt.imshow(np.squeeze(i))\n", - " plt.title('num:%s'%labels[count-1])\n", - " plt.xticks([])\n", - " count += 1\n", - " plt.axis(\"off\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "当前batch的image数据如上图,后面的体验将提取第一张图片进行训练操作。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义图像显示函数\n", - "\n", - "定义一个图像显示函数`image_show`,插入LeNet5的前面4层神经网络中抽取图像数据并显示。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "def image_show(x):\n", - " count = 1\n", - " x = x.asnumpy()\n", - " number = x.shape[1]\n", - " sqrt_number = int(np.sqrt(number))\n", - " for i in x[0]:\n", - " plt.subplot(sqrt_number,int(number/sqrt_number),count)\n", - " plt.imshow(i)\n", - " count += 1\n", - " plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 构建神经网络LeNet5\n", - "在`construct`中使用`image_show`,查看每层网络后的图片变化。\n", - "> 这里只抽取了图片显示,想要查看具体的数值,可以按照自己的需要进行`print(x)`。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "layer conv1: Conv2d\n", - "****************************************\n", - "layer fc1: Dense\n" - ] - } - ], - "source": [ - "import mindspore.nn as nn\n", - "import mindspore.ops as ops\n", - "from mindspore import dtype as mstype\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " # define the operator required\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - " self.switch = 1\n", - " \n", - " def construct(self, x):\n", - " \n", - " x = self.conv1(x)\n", - " if self.switch > 0:\n", - " print(\"The first layer: convolution layer\")\n", - " image_show(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " if self.switch > 0:\n", - " print(\"The second layer: pool layer\")\n", - " image_show(x)\n", - " x = self.conv2(x)\n", - " if self.switch > 0:\n", - " print(\"The third layer: convolution layer\")\n", - " image_show(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " if self.switch > 0:\n", - " print(\"The fourth layer: pool layer\")\n", - " image_show(x)\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x)\n", - " self.switch -= 1\n", - " return x\n", - "\n", - "network = LeNet5()\n", - "print(\"layer conv1:\", network.conv1)\n", - "print(\"*\"*40)\n", - "print(\"layer fc1:\", network.fc1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 构建计算梯度函数GradWrap\n", - "构建梯度下降求值函数,该函数可计算网络中所有权重的梯度。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore import Tensor, ParameterTuple\n", - "\n", - "\n", - "class GradWrap(nn.Cell):\n", - " \"\"\" GradWrap definition \"\"\"\n", - " def __init__(self, network):\n", - " super(GradWrap, self).__init__(auto_prefix=False)\n", - " self.network = network\n", - " self.weights = ParameterTuple(filter(lambda x: x.requires_grad, network.get_parameters()))\n", - "\n", - " def construct(self, x, label):\n", - " weights = self.weights\n", - " return ops.GradOperation(get_by_list=True)(self.network, weights)(x, label)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 执行训练函数\n", - "\n", - "可以从网络中查看当前`batch`中第一张图片`image`的数据在神经网络中的变化,经过神经网络后,计算出其loss值,再根据loss值求参数的偏导即神经网络的梯度值,最后将梯度和loss进行优化。\n", - "- image:为当前batch的第一张图片。\n", - "- output:表示图片数据经过当前网络训练后生成的值,其张量为(1,10)。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD5CAYAAADhukOtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAOtUlEQVR4nO3de4xc5XnH8e+DWeyATbkj1zgBE9qC0nLJ1gGFpBQEISgNILUI/khQROMIQVQkogoRqdCqkaDlIqS2tJvgABHlkgCCVigJRVSItjEs1JiLy80xYMvYEKCQG/aap3/McbVGc3Znd86Zsf1+P5K1Z953zryPjvzbM3Pe2fdEZiJp17fbsAuQNBiGXSqEYZcKYdilQhh2qRCGXSrE7v3sHBGnAzcAc4DvZOZVUz1/j5ib89irnyElTeHX/ILN+X5064vZzrNHxBzgBeBUYB3wOHBeZj5Xt8/esV9+Kk6Z1XiSprciH+LdfKtr2Pt5G78UeCkz12TmZuAO4Mw+Xk9Si/oJ+yLgtUmP11VtknZAfX1m70VELAOWAcxjz7aHk1SjnzP7emDxpMeHVG3bycyxzBzNzNER5vYxnKR+9BP2x4EjIuKwiNgDOBe4v5myJDVt1m/jM3MiIi4GfkRn6m15Zj7bWGWSGtXXZ/bMfAB4oKFaJLXIb9BJhTDsUiEMu1QIwy4VwrBLhTDsUiEMu1QIwy4VwrBLhTDsUiEMu1QIwy4VwrBLhTDsUiEMu1QIwy4VwrBLhTDsUiFaX0paasJuRx9Z27f27H0bHWvJTa/W9k28tq7RsQbJM7tUCMMuFcKwS4Uw7FIhDLtUCMMuFaKvqbeIWAu8B2wFJjJztImiVKappteeX7Z3bd+as/+h0TpOffgrtX277cRTb03Ms/9hZr7ZwOtIapFv46VC9Bv2BH4cEU9ExLImCpLUjn7fxp+Ymesj4iDgwYj4n8x8ZPITql8CywDmsWefw0marb7O7Jm5vvq5CbgXWNrlOWOZOZqZoyPM7Wc4SX2YddgjYq+IWLBtGzgNeKapwiQ1q5+38QcD90bEttf558z8YSNVaZc154gltX2rL1xQ2/fTL/5TG+UUZdZhz8w1wNEN1iKpRU69SYUw7FIhDLtUCMMuFcKwS4VwwUm1Ys4+v9G1/fkrurcD/PTkscbr2JofdG1/eeJXtfvERDZex47AM7tUCMMuFcKwS4Uw7FIhDLtUCK/GqxXrb/7Nru3jn/z7KfZqfr2Duqvul3x+inXmXqj/482d+Tq9Z3apEIZdKoRhlwph2KVCGHapEIZdKoRTb2rFvnt2n/Lad07z02tj/9t9mg/gB396Wtf2KafXJib6rmlH5JldKoRhlwph2KVCGHapEIZdKoRhlwox7dRbRCwHvgBsysxPVG37AXcChwJrgXMy8+32ytSO6IXvjNb2fffw5QOrY9OWvWv74j9Wdm3fmf96bbZ6ObPfDJz+obbLgIcy8wjgoeqxpB3YtGGv7rf+1oeazwRuqbZvAc5qtixJTZvtZ/aDM3NDtf06nTu6StqB9X2BLjOTKT4CRcSyiBiPiPEtvN/vcJJmabZh3xgRCwGqn5vqnpiZY5k5mpmjI8yd5XCS+jXbsN8PnF9tnw/c10w5ktrSy9Tb7cBJwAERsQ64ArgKuCsiLgBeAc5ps0i1a7cFC2r7nr/qqNq+O0/5u9q+pXNHZlzHV179TG3fT374u7V9896sf82D+M8Z17GrmjbsmXleTdcpDdciqUV+g04qhGGXCmHYpUIYdqkQhl0qhAtOFmL3xYfU9r140eLavp988ZravoPm7NVXTR/278/+dm3fb13pFFq/PLNLhTDsUiEMu1QIwy4VwrBLhTDsUiGceivE5iUH1va98OUbp9iz2em1v33r8Nq++av3aHQsbc8zu1QIwy4VwrBLhTDsUiEMu1QIr8ZroJZ//3O1fR+9xj92aZNndqkQhl0qhGGXCmHYpUIYdqkQhl0qRC+3f1oOfAHYlJmfqNquBL4KvFE97fLMfKCtItW7mNv95pmb95757Zj6sW7i513bd9s80DI0SS9n9puB07u0X5+Zx1T/DLq0g5s27Jn5CPDWAGqR1KJ+PrNfHBGrImJ5ROzbWEWSWjHbsN8IHA4cA2wArq17YkQsi4jxiBjfwvuzHE5Sv2YV9szcmJlbM/MD4NvA0imeO5aZo5k5OkL3i0eS2jersEfEwkkPzwaeaaYcSW3pZertduAk4ICIWAdcAZwUEccACawFvtZeiZqJd/7k2K7tt3+r/jZOML/xOv7o6j/v2v7R766s3eeDxqvQZNOGPTPP69J8Uwu1SGqR36CTCmHYpUIYdqkQhl0qhGGXCuGCk7uYD3aPru2HjTQ/vXbcX11Y27fw7he7tm/95S8br0O98cwuFcKwS4Uw7FIhDLtUCMMuFcKwS4Vw6m0n9M6XT6jt+8zXVwysjv1X/7q2b+sbb9T2aTg8s0uFMOxSIQy7VAjDLhXCsEuF8Gr8TujdJd3/2AXg2oVPzvj1Nm39RW3fibd9o7bviDWv1fZNzLgKtc0zu1QIwy4VwrBLhTDsUiEMu1QIwy4VopfbPy0GbgUOpnO7p7HMvCEi9gPuBA6lcwuoczLz7fZKLcuvzqq9VyYHnrCh0bF+trV+Ku/j179c2zexcVOjdahdvZzZJ4BLM/Mo4Hjgoog4CrgMeCgzjwAeqh5L2kFNG/bM3JCZT1bb7wGrgUXAmcAt1dNuAc5qqUZJDZjRZ/aIOBQ4FlgBHJyZ295Pvk7nbb6kHVTPYY+I+cDdwCWZ+e7kvsxMOp/nu+23LCLGI2J8C+/3Vayk2esp7BExQifot2XmPVXzxohYWPUvBLpercnMscwczczREeY2UbOkWZg27BERdO7Hvjozr5vUdT9wfrV9PnBf8+VJakovf/X2aeBLwNMRsbJquxy4CrgrIi4AXgHOaaXCXdjmz43W9s39ev302oNH/ksb5WgXN23YM/NRoG4i9pRmy5HUFr9BJxXCsEuFMOxSIQy7VAjDLhXCBSeHKC99s7av6em1qRaVvG7jafU7Trh05K7CM7tUCMMuFcKwS4Uw7FIhDLtUCMMuFcKpt5bN2X+/2r55u28ZWB1jb3+ytu/VT9VPy8FUfdqZeGaXCmHYpUIYdqkQhl0qhGGXCuHV+JYteqB++ezrFz0wxZ7zmi9GRfPMLhXCsEuFMOxSIQy7VAjDLhXCsEuFmHbqLSIWA7fSuSVzAmOZeUNEXAl8FXijeurlmTnVXFKRPvaRn9X2zd/N6TUNTi/z7BPApZn5ZEQsAJ6IiAervusz85r2ypPUlF7u9bYB2FBtvxcRq4FFbRcmqVkz+sweEYcCxwIrqqaLI2JVRCyPiH2bLk5Sc3oOe0TMB+4GLsnMd4EbgcOBY+ic+a+t2W9ZRIxHxPgW6r86KqldPYU9IkboBP22zLwHIDM3ZubWzPwA+DawtNu+mTmWmaOZOTrC3KbqljRD04Y9IgK4CVidmddNal846WlnA880X56kpvRyNf7TwJeApyNiZdV2OXBeRBxDZzpuLfC1Furb6f3oyj+o7Xv7m3vW9l278MlZjXfphuO6tj/2rd+v3WfP/78Eo11ZL1fjHwWiS5dz6tJOxG/QSYUw7FIhDLtUCMMuFcKwS4VwwcmW7XlP/bTWo/NPqO07csnxsxpv7zXZtX2fe/5rVq+nXYdndqkQhl0qhGGXCmHYpUIYdqkQhl0qhFNvQ7TPrfXTYfsMrgwVwjO7VAjDLhXCsEuFMOxSIQy7VAjDLhXCsEuFMOxSIQy7VAjDLhXCsEuFMOxSIXq519u8iHgsIp6KiGcj4i+r9sMiYkVEvBQRd0bEHu2XK2m2ejmzvw+cnJlH07k98+kRcTxwNXB9Zn4ceBu4oLUqJfVt2rBnx8+rhyPVvwROBn5Qtd8CnNVGgZKa0ev92edUd3DdBDwIvAy8k5kT1VPWAYtaqVBSI3oKe2ZuzcxjgEOApcDv9DpARCyLiPGIGN/C+7OrUlLfZnQ1PjPfAR4GTgD2iYhtK90cAqyv2WcsM0czc3SEuf3UKqkPvVyNPzAi9qm2PwKcCqymE/o/rp52PnBfSzVKakAva9AtBG6JiDl0fjnclZn/GhHPAXdExF8D/w3c1GKdkvo0bdgzcxVwbJf2NXQ+v0vaCfgNOqkQhl0qhGGXCmHYpUIYdqkQkZmDGyziDeCV6uEBwJsDG7yedWzPOra3s9Xxscw8sFvHQMO+3cAR45k5OpTBrcM6CqzDt/FSIQy7VIhhhn1siGNPZh3bs47t7TJ1DO0zu6TB8m28VIihhD0iTo+I56vFKi8bRg1VHWsj4umIWBkR4wMcd3lEbIqIZya17RcRD0bEi9XPfYdUx5URsb46Jisj4owB1LE4Ih6OiOeqRU3/rGof6DGZoo6BHpPWFnnNzIH+A+bQWdZqCbAH8BRw1KDrqGpZCxwwhHE/CxwHPDOp7W+Ay6rty4Crh1THlcA3Bnw8FgLHVdsLgBeAowZ9TKaoY6DHBAhgfrU9AqwAjgfuAs6t2v8RuHAmrzuMM/tS4KXMXJOZm4E7gDOHUMfQZOYjwFsfaj6TzsKdMKAFPGvqGLjM3JCZT1bb79FZHGURAz4mU9QxUNnR+CKvwwj7IuC1SY+HuVhlAj+OiCciYtmQatjm4MzcUG2/Dhw8xFoujohV1dv81j9OTBYRh9JZP2EFQzwmH6oDBnxM2ljktfQLdCdm5nHA54GLIuKzwy4IOr/Z6fwiGoYbgcPp3CNgA3DtoAaOiPnA3cAlmfnu5L5BHpMudQz8mGQfi7zWGUbY1wOLJz2uXayybZm5vvq5CbiX4a68szEiFgJUPzcNo4jM3Fj9R/sA+DYDOiYRMUInYLdl5j1V88CPSbc6hnVMqrHfYYaLvNYZRtgfB46orizuAZwL3D/oIiJir4hYsG0bOA14Zuq9WnU/nYU7YYgLeG4LV+VsBnBMIiLorGG4OjOvm9Q10GNSV8egj0lri7wO6grjh642nkHnSufLwDeHVMMSOjMBTwHPDrIO4HY6bwe30PnsdQGwP/AQ8CLwb8B+Q6rje8DTwCo6YVs4gDpOpPMWfRWwsvp3xqCPyRR1DPSYAL9HZxHXVXR+sfzFpP+zjwEvAd8H5s7kdf0GnVSI0i/QScUw7FIhDLtUCMMuFcKwS4Uw7FIhDLtUCMMuFeL/ACOIrgtej4weAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The first layer: convolution layer\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD6CAYAAAC4RRw1AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAztklEQVR4nO2daXBc13Xnz+kNjcbWaOwAQYIUd4qiZFOrZUuOpFiynJJT8dhSFjMezagyE1fZE1fKcjI1XzI1pZmpUc1UJVOJKtZIiW1lktiJ6IwSRWZkS7YlWbQ27gBXEDuxNJYGGr3d+cD2O/c8sQEQaAD9Gv9fFQvn9Xnd77LPe7ff/b9zz2VjDAEAAPAevvVuAAAAgOWBDhwAADwKOnAAAPAo6MABAMCjoAMHAACPgg4cAAA8yoo6cGZ+kJnPMPNZZn6yWI0C6wviWr4gtuUFLzcPnJn9RNRNRA8QUR8RvU1EjxljThaveWCtQVzLF8S2/Ais4L23EdFZY8x5IiJm/isieoSICp4MwYoqUxGJreCQHoP1prHGO5xbYN+c/lE1Pr72fkRE1q7s/i0u8Ns8PztO6fmE+5N+wXXHtbK+wtS1VxVylx3s+mLt7YzxK1/WCnoqq312LP2uEyLoy4rPHcgCkZscSNDcxHyhuBJdZ2zrYn7Tuim4wMeVF37SMQgt8E3aN75zRnejKWvbfa7YsQtSRrsWOF73sflRY0yT+/WVdOAdRHTZ2u4jotvdOzHzE0T0BBFRqDJKB+77ygoO6S2yQR2RbIVsB5LuTposnz6RUtXiNK4o+zLyOf6Uq2Mp0IG/f+R/FW70MuJa0xah3/rOfQt9ZlkR5GzB7bG0/iEbT8l230xU+fw+iXNNcF75WiqnHLs2kFQ+X4HA/uWvH1mg1US0hNjacW1pD9CfHe5c7DPLhlrWMdgUyBTYkyhtdeDHUvXKdznd4NhB1p/ht2LXGogrn/u8svnk1rOXrvX6qj/ENMY8Y4w5aIw5GKyoXu3DgTXCjmukvmK9mwOKhB3Xugb/4m8A68pK7sD7icj+ed6Uf21DY991p2r03XKq1roDn9Xvy1SJr6ZX32ElY/I76/pBJ39abF/cdQde+Ad9IRDXa7DQ3dHAfJ1j989GlW90Vu7AR+P6BqayMuXY0yH9I1gbmhPbdQe+AhBbF/Zdd50vrXxXsnLdXclFlG86F3bsiymtbLw6tsux2ysnla/POj8+33JU+dqDE0tstbCSO/C3iWgHM29l5hARPUpEh1fweaA0QFzLF8S2zFj2HbgxJsPMXyail4nIT0TPGmNOFK1lYF1AXMsXxLb8WImEQsaYl4jopSK1xZPkAlomma+T7fmYW0KxshVq9YNKUylD9FwgpN9XJ+9zP78KzMgxgjOuB5zZ5aWIIq4fzkiw6Z3TD63eGRRVYi6hY2cmZNsEdTxS/sLHiAZFQin00HI5bPTYVnFKbduyyflMnfL9lwsPO3bfWFT5shl5PrCzfVj55jKSubMlMq58xwfaHPu+Ri2prbWEAgAAYB1BBw4AAB4FHTgAAHiUFWngGxHjmi0116BfSFupg7PtOvVs936ZQ1Eb0qlhNQFJZ3q974Dy2ZP0sq6Uaz8iuCpE/ForPT3T6thH+/TkluxF0TLrzuvPqe+Rz0m06VmN4/skxXBusz4fOsOinc66gj6b0zo7KIz7WUZnQKcKHkuJ7v2feh5RvsEeSQ/0uSa4+tKyfWq2Q/naO8cc+0pK69y2dl4McAcOAAAeBR04AAB4FAzAr5NEqx4CZSpd/g4Zst1zm64R9KXm1x17yJWylMzJ8PonaS2h1HZLGllwVqeU2bVQ3CmNhYoegQ/TGJxW2+9Oblbb7/RvcuzMJT0srj8tdtOPR5Qv231ONn7jDuVLN8q02od2nlK+L9S+79h/O3WT8kFCWToHK7SM+eqcLqb3R2clVXDwTLPy+S3ZxKcVNWp9Sz53cquWxgYyjWJzo/IFoyKVhVnLOcsBd+AAAOBR0IEDAIBHQQcOAAAeBRr4dTIf1dvJVq2x3XOr6N4fqelVPrtO8M0Vfcr3w9mdjl3Vr3Xuml5JMfTPaDEuFxH9Ldnomsbthwi+VM7Oav3z0rSeLp9OyaVSNaS/18afW1Ogx+PKN/0F0b3H9uv3RZtFd99eqbXztHUKZPEwY9n8+eQ2tX14UD9fGjolcTfVroqTSYn5plf1deebl32DTfq5WGhMtsNjOnadj1xx7IbAzEJNXxK4AwcAAI+CDhwAADwKJJQlMNsov3PpGi1vfPJWXY1za2TUsbdV6CplD0ckhejfXr5f+Y6ckSLwO07oodVMl8zYq/2HM8oXCEgI/XftUr5MBEPvhbBTB9+60qV8A5cb1HbtCZGqOl6+onw8LatzzH50q/JNbrMXQtXnzg0xOVc+X/uB8k3nrHMuh8v0ejgYErnj+3Gdxtd9tk1th6wKnuGtLknjpMho/jm9ksp0l+QPz7bo6ywkq+FRSmcLU1NYjoE0QgAA2MCgAwcAAI+CDhwAADwKxLU8dpXBZL3+XbMXHG48oHXt/TU6HXAiI3p1IqeryD0zKdN4jxy9Ufm2/Y3oYYnNegHVuuNSmS47q1dDNndJWlS2Apr3QsQCCbV9cqbdsS8P67TByot6enT9GYkPz8wp3+yN8jkDH3ddUttF8/zUNv384o873nLsPtdi1T+a3SGfj6nzH8KuMnggpFP8jqZkweGfjuhnEpzW13a6Tj6n6Vu1yld7Up5RTO7T50d8R+GFxhvfkfb4fl+nh7ZVyCLHxUgPxR04AAB4FHTgAADgUTashJJz1VVP1chv2XzUtRixtajwv9vypvLV+HUh/v1hWbThSkYPyf5v/0HHbn1NHyNbIcevGHeNyVIyfDd36plkySaRaTDz8sMF/Ct88l26Z1u+PSAVB3Nz+lIIujLKwqMS51y9rkaYaJX3pjfNK9/Bjn7H/tXYUeWbyclnvpTYqXy985LGmMV91odS7roCIlMctyQTIqKnL3/Kse1FGYiIgnH9XRor7BXjrpKDaTl3MpX6fXaq4PQNegbnxUelv/hi0znlu7PqrGMjjRAAADYw6MABAMCjoAMHAACPsmE18GxI68WJdtnOhfSU57sfOObY0zmtt+2uGFTbEZ9ooP+9/0Hlu3BapvHuPKdFVt+c6GHZk926sXtFH00269RE6N4at148Yy0I/M6IXnw2MSrpmrUnddpg22uTaptzoq2PH9ApZdObrenYVVpHvTV60bGrWPvOWgvjXkzqKd/QvTVpox9aDWSk63rq8qeV74NjXY7NWdezpkp9bdd1i39il6ua525ZyHqhjD8T0Rr4Fz8iz8luq9IauK17+1g/r1kOOEsAAMCjLNqBM/OzzDzCzMet12LM/Aoz9+T/1i/0GaD0QFzLF8R247AUCeU5IvpjIvoL67UnieiIMeYpZn4yv/314jdv9cgF9ZhorlOGNtu3Dynf7mqRSer8ehZee0Avhvt7Fz7n2Jd+uEX56mRCJWWr9JDdlxDphQPal62SoV0RJZPnqAzjOpfV3133lKQOxuNVyhcaltO/5W09w9U/NKa2p+6UWF45qIfhW/dJquC/3/xD5bs9PODYx1O6wuFIpsaxiyyZPEdlFtuhTFRtf3vodsd+/4xegNpejLhme1z5Kl/Qn1MxIdf9xE7XjFfrUnMpODSzReSP+kbdB9RbM35rfDrNuNgsetYYY14jonHXy48Q0fN5+3ki+mxxmwVWG8S1fEFsNw7L/dlvMcb84rZ0iIhaitQesL4gruULYluGrHjcZowxRGQK+Zn5CWY+ysxH0/MrXwMOrA3XE9fZiflCu4ESZKHY2nGdHMteaxdQQiw3jXCYmduMMYPM3EZEI4V2NMY8Q0TPEBFV13cW7BDWArviYLpKa8m33yjpPrfUXla+C3MyHfd3mn6ofK8kdqvtnte7HLvlAz0l3rAck9M6hSgTE302d7euVJiuWrNsz2XFtXVfbF3jajPvWr3m4qhUgOQhnQIaOyXNDpwdUL7kvk61PdklIqip0HHdUycVKve70koH7DTG2S7dVrOmWbxLiq0d1103hUsmru40wu5RebbBSX0fap8CMwkd84Zhnco51yS6d0YXAaXIkPz3p7fq/uL+u9537D1VOubtwQlaK5Z7B36YiA7l7UNE9GJxmgPWGcS1fEFsy5ClpBG+QERvENEuZu5j5seJ6CkieoCZe4jo/vw28BCIa/mC2G4cFh3DGWMeK+C6r8htWXXSEfm9SnTo0aEtm0xmKpXv3rpTjv3dyY8q3wsnD6rtGkt98c3rY4RHJQXRd75f+TI7Zcg+X69T4VaDcopr2CepYJV+fUqnrlizLXv1MLjmgpU6mNGyyPhuPeN1ZpvowV036EU9vtT4umNHXAsX/3ReZn9OZvV5tVqUS2xb/ZKO1xXQz8/SaUtScWXWBqflhdYf6NTA0IBOzolvE3k057rsko3yOclNWnq5s1Yk122hgkrjqoOZmAAA4FHQgQMAgEdBBw4AAB5lQ1UjzFoZRekGrXk2WlPiG13T499LyDTqvmRU+QIn9fTs2l6r2pgrVdBOY7Q1byKi+QatuYKlYz+z6E3oEh++qGiX8/UuDdpOK92jp2MnXBmvVR1yTtzb3KN8NZYG754uf2FerwgDls6xeaneeWRyr/KlZkTbDiZcq+XEJbBVZ3VK3+w2fX6k6qxKkqM65hMHJa4PHzimfMVYTacY4A4cAAA8CjpwAADwKGUtoaSq9O9TskGGS5FGXX3uBisV6LQ1dCMiurnqkmNfnNVD5PozWibJBeQYlefjymd6JXUw+5Fd+n2holUZLHsiPp3SNZ4SGet4f7veeVCkqcZjemp4cDDu2BO365j7XCPkUEAkt3uqT+vPsWal96f1ED3pzk0DBen0Fy618drlG9R2cLjw95q4USoAznTreMS36y6PLSV1rlVfg/XNIpvdXasXWVnL2ZYLgTtwAADwKOjAAQDAo6ADBwAAj1LWGni6Wm8ntomw+dg2nRb06swex3ZPpb84I7p37wvblK/Cr1OPQnHRZ3OX+pTPt01S1VL1rtU/wJKxFyomIuqfrXPszLiuPlfbK/coNd16lZ3EHqlod+VmrX+23aJXZXp+9186dsyvK+P9YFZ0975UjMDy6HE9Pzgyvc+xE3FXCmiLXMtV3fpaqhyRc2B0v36bcd2ytv9YSiFf+KL23dd2cZEWrz+4AwcAAI+CDhwAADwKOnAAAPAoZaeBpytFy7SnyRIRRVskr/OjVReVL56VsqOTfr00x/dfl5KxrWM675tYHyMQt1ah3qNzV2fbJF85G8Jv5/Vgl4y94MrFvxyPOjandTwiw1a8MjoPfHKr5BLnNs0p34NtJ9V2i1901h/MRZXvVFI08Nkcnm1cD3bJ2DdTW5Tvny7Jc6ngiM77zlTKs6d0rX4OVW2ttMQ5fZ0Fp/W+F35VusDbdugSCZ+ok3z/Zr8ur1EqoBcBAACPgg4cAAA8iucllExYD5nno9aqO1t0xcGH2i869lhW5xjW+mQI/Z3+25Sv/ph9DD0Eqzmvp//6pmVIOLu7RfmyYev3EjPnFyTsmstuT0k/N9mofHPnah072q2/2NruKcdOtdUqn51m6vdraexA5JLaHsxKeujZ+Vblm87q1EVQmCafLmHht66n7qQuZzA1LpIjV+n4BCeta8m19HKiVXypOu2sP6PPq5ZDUt7is43vKl+DNbXfxy7ptETAHTgAAHgUdOAAAOBR0IEDAIBH8bwGPtusf4PsWfCxTXHls1eXb3CVroxaGvjA65uUrz4h+pddLpaIiNM6Nc1ERA81rn2hey8dn0vYPDMlzxNGJ/Xzi4C1Ikv1gH7u4ZsXzXNil9bAE9vFd+fmXuW7q0KvXv7TedHdoXkvn7BLS345IWWVv9tzs/L5JqV74oy+eGouWhsuDTxr9QHZCv2+RKtOR9wWlvTAGr9OJQ2yvrZLEdyBAwCAR0EHDgAAHsWTEopdUSwV1b70DhkGfbJVp4LN5qSK3e7QsPK9lxTZJKyL1pE96qsc02lI2WpdGS9VK8N7zLZcPj2JZrV9KS6V6lJT+juPDcgYOnLZNWPOiG+2TQ+nG9smHfvhxg+Ur9qnj3F8ThahxmzL68NPcgF9e/Kg8n3v4gHHTvfqBcKpRSoFho/raoRNb8uKOPF9dcpnz8D2uVSQK/fo6/cjtdJHlMpCxdcDehgAAPAo6MABAMCjLNqBM3MnM7/KzCeZ+QQzfyX/eoyZX2Hmnvzf+sU+C5QOiGt5grhuLJaigWeI6GvGmHeYuYaIfs7MrxDRbxPREWPMU8z8JBE9SURfX72mCtMdsiLKfJMWuR7cKamC7RVx5dtmrTzf7tfve+L8vY5dPaB9/qRoeIGE1smMqxphzk5bKu20wZKLa0tQpr2/OrBD+SYvRh278X39xTb/i0yHNkF9Sk/e3FTwePEpqTrpTg3szeiUMg+lDpZcXD9mlZD4mwmdAjoxIPp1wFVJMpuW91WM61zBZJt8TqpGvy9dI3b1Zf2+X/rc+2p7d8WAY3shbdDNonfgxphBY8w7eXuaiE4RUQcRPUJEz+d3e56IPrtKbQSrAOJaniCuG4vr0sCZuYuIbiGit4ioxRgzmHcNEVFLgfc8wcxHmfloen7mWruAdWalcZ2dmL/WLmCdWWlcJ8e8d0e60VhyGiEzVxPRd4noq8aYKbakA2OMYWZzrfcZY54homeIiKrrO6+5z/Uy3yAfs3OPXjh4a+UVx865VjANkZyQfzrxUeWbfl3S1londYdkz96qendE+ci1wG2yuWOhppccxYhr675YUeJ6KiHV6BJJnapnp3KGZvThTIXsO9cVVb7JGyQ+qaieBbi/Y9CxH6s9q3zHU3pRD69RjLjuuilclLg+PS4Lgb8+oBcF983KNRraMaV8DX8nWkj98Unlm7Rm1aarXVql1erwhI55QzBB5cSS7sCZOUhXT4ZvG2O+l395mJnb8v42Ihop9H5QmiCu5QniunFYShYKE9E3ieiUMeZpy3WYiA7l7UNE9GLxmwdWC8S1PEFcNxZLkVA+RkS/RUTHmPm9/Gt/QERPEdFfM/PjRHSJiD6/Ki0EqwXiWp4grhuIRTtwY8yPqXBC3H3Fbc7SyFqznH+9/S3l601J1bhPVJ9WvvaApIb98+Ae5Ws8LlXsZjq0/lo5Jr5Mp05LS0f1lGvjK+3cwV9QinE9NiYauHuFHLulwRn9cC1XIyl+mYh+JmGvyrRtx5Dy/ev21+V4rq/ix1aVPCKitNGfW6qUYlyfP3u7Y0/36oqQ4QkRARr26dV6cnOSKph7Ty8yzTvvcOxAQkv12ZD89/vvdVUx9CepnMBMTAAA8CjowAEAwKN4shqhf6vkk/8ovlv5ElmRP9yLAvx5QlJfL1/QUsjOKzK0qhjXv2v2og1uySRd7Y2htRdIZeS7nLmkK8xFz8hQ2L2+7Ogtkm42s0kPmX3VIpsFXaXp7OqUw1m9EMR4xlUZDyyb5hq5Xqci+nu1KwdO/pNe1Dgckes3/cu6iuFMm1yjs236OuecbN9x8IzyHYycX2qzPQHuwAEAwKOgAwcAAI+CDhwAADyKJzXw+WnRLn/Su1X5Pr5FNK7vnNW6WeqMpDBF+7RWGrg86tjpLVof95266Njhze3Kl94VXVqjwaIcaJbKcD+a0FrpbKukCmbDemHa+ahlt2gtu6JCtg/U9yvfreHLjj2W0882QPF4tP1tx342fZfyDYXlWUf4fV3xMWNVM+i/R8c81ZIqeLzAmOx7sO5Swf3KAdyBAwCAR0EHDgAAHsWTEkr0XUkVjN+kpZCjFdbiswO6eHxDt9jNP+hVvtyULIYbOKNna+VSMlzLRvXiqqB47IhIfaX0Dp2eebJeUkAnhvRsPjtbtLNrVLk+0SJVBj9Xd1T5Ula1yveSW667vWBp7KsQ6eqeFl31cTAqEsqJ+lblG+8tvGiQb0a6rlxEp4c23iTnkc+dc1pm4A4cAAA8CjpwAADwKOjAAQDAo3hSA68aEs0rG3KlF12IOXaFdhEbEUtNpU4b85FocWbGtWrH3u2OmWzUlQpB8Zg3cjrur9Epf12RMceeatfpZhU+SRXcF3G9LyiaeDynn1/0puVc6Z1vWEaLwVLIWsURP1bTrXz9YYnB/modu78PHHDsgdGo8lmXMsXq9PX6cMcJx+4K6Wci5QbuwAEAwKOgAwcAAI/iSQnFzgyq7XUV9w/IcC0T1imG/pSMu2b26iGzf04+1JfR1c3sRQKM3xsLNniRZE40L3clybqAFPtvC8WVz08Su4RrRuV7yc2OPZPV0ou9SINXFmzwIlnrPrHBp+UOf1DiPMI1yvcrHR849lCjrk5ZG5BU31hAf2Z7cEJ8/hkqZ3AHDgAAHgUdOAAAeBR04AAA4FHYGLP4XsU6GPMVuroidiMRlUp+z0ZsyxZjTNPiuy0NxHVRENfisVHbcs3YrmkH7hyU+agx5uDie64+aEvxKKX2oy3Fo5Taj7ZoIKEAAIBHQQcOAAAeZb068GfW6bjXAm0pHqXUfrSleJRS+9EWi3XRwAEAAKwcSCgAAOBR0IEDAIBHWdMOnJkfZOYzzHyWmZ9cy2Pnj/8sM48w83HrtRgzv8LMPfm/hddxKl47Opn5VWY+ycwnmPkr69WWYoC4qraUTWwRV9WWkozrmnXgzOwnoj8hooeIaC8RPcbMe9fq+HmeI6IHXa89SURHjDE7iOhIfnu1yRDR14wxe4noDiL63fx3sR5tWRGI64coi9girh+iNONqjFmTf0R0JxG9bG1/g4i+sVbHt47bRUTHre0zRNSWt9uI6Mw6tOlFInqgFNqCuCK2iKt34rqWEkoHEV22tvvyr603LcaYwbw9REQtC+1cbJi5i4huIaK31rstywRxLYDHY4u4FqCU4oqHmBbm6s/omuVVMnM1EX2XiL5qjJlaz7aUM+vxXSK2qw/iurYdeD8RdVrbm/KvrTfDzNxGRJT/O7IWB2XmIF09Eb5tjPneerZlhSCuLsoktoiri1KM61p24G8T0Q5m3srMISJ6lIgOr+HxC3GYiA7l7UN0VdtaVZiZieibRHTKGPP0eralCCCuFmUUW8TVomTjusbC/6eJqJuIzhHRH67Dg4cXiGiQiNJ0VdN7nIga6OrT4x4i+gERxdagHXfT1aHWB0T0Xv7fp9ejLYgrYou4ejeumEoPAAAeBQ8xAQDAo6ADBwAAj7KiDny9p9qC1QFxLV8Q2/Ji2Rp4fqptN12djdRHV59aP2aMOVnoPf5IlQlGY8s6nhdh91drb7N25fzWhl/7yJ8TO1P4N5czS2tXOj5O2dkEX8u3nLiG/JWm0l+7tIOXA+z66nzWdjanfbmsY5qcPiHY/pyAK+jLuCznslOUys5dM675411XbGMxn+nY5D4ZNw72F5k0+nuYzEYcO2v0NZk18s5oYE75Knxpxw6S61xZgOPHMqPmGmtiBpb8CR/mNiI6a4w5T0TEzH9FRI8QUcELPRiNUdfjv7eCQ3oLdsXHnxI7W6F9qTq5YrNV+o1cZ71x1PVGi1Bcn0iFOvSL33z62o6rXHdcK/21dFfrYwt9ZnkR1JeNCQUdm2dmtW96xrFz8/PKx37pFHxNDfoYmay1Y8E+WfHToRcW2+W6YtuxyU9///8al3TscsRvfe096Trleyl+wLHH01XKl8iEHPszje8r387QsGM3+XXnvpAcsn3z0KVrvb4SCWVJU22Z+QlmPsrMR7OJxAoOB9aI645rKjfndoPSZNHY2nEdH1/6HSJYH1b9IaYx5hljzEFjzEF/VdXibwCewI5ryFe53s0BRcKOayyGHIdSZyUSSqlOtV1XbNnELaHMNYlMwlv0UHtb85hjt0RUiQWqDoiE8vLpPfp41jWWS4SVz59Z2tDbBeJ6LSzZxLj0ak5a0kg2q3xkyysZrWnlZuUc8KX0MwSTkphzWMd1qZLKNUBsF8A93jiZEvno8PgtynekZ7e8L61/6Iy1vee2IeXrCE44doySyudbxoOPlfzElupUW7AyENfyBbEtM5Z9B26MyTDzl4noZbqaN/GsMeZE0VoG1gXEtXxBbMuPlUgoZIx5iYheKlJbPIlbJrG3U/V6SLTpowOOfajzDeWL+mU4HbdSlIiInj59n2MHevVwOlMpx/Cllz20ViCu9KG0Pls24YyWSYwloeTadaZXslViaQI6PpVD8vA3HdCD4WCfSGpUxHIXiK3GvnwvZnSmyT9OSKbJ+WmdjbO7Q6SRgKsTuBSXVdUqXKlg/iJXm8VTCgAA8CjowAEAwKOgAwcAAI+yIg18I+KeHu9zzXa0Z1Ruv11Pnrq74ZxjJ3KFZ1T+73P3qO3kqahjx07qBiTa5Tc4GyKwXFypeSainzXwnJUqmEor3+iDNzh2fDdprI81rkcUdT3V19yPiKjemgYY6hvXTpcGD5bPmHUdfuvKXcp3blJ079YqndrbWCGTEkfn9fyW+KCkhP4N6fTDk01tjv3l1iPK1+K//glxuAMHAACPgg4cAAA8CiSU68RVXIzmXamCt3zyjGPvqh5Wvl+qlppBYzk97LJnek2c0ClLXf8kM7YylTpkoRlJYZrYqX2QVJZOLlajtn1jeshs0iKbDH/2BuUbPyg6WleXXtO2LiSxC/m13vZOYIfsd0ZrKOkamcEZdM/8hISyZPwuaWo8G1Tb/63vIcceTOjZsPGElIjo62nWn5uQe192hSO2X1JAJ6d1SvDReZkIO9as+wBIKAAAsIFABw4AAB4FHTgAAHgUaODXyWyL1rxzm3RFsVhIpsQHXeJYf0am2F5KaZ37yI9uduxtf6crFfoTksIWOjetfJN3bJKN4syk35D4hnWqXmp7m9oe2y966MR+HdddN0iJhP3RAeX7j80/dexvTe1Uvu6Luxw7Xa1clK627q18uM9aLm7NO+bXKaB31Z937O8n9ytf6pxo4rEzykWBpPQDM506PlOW7p2d0g+iTFL2nT7gKsOsm7okcGYAAIBHQQcOAAAeBRLKEgjaK8H5tE5xY1ev2m6viIuvsk/5vjN8u2MffW+78lVdWUD/sIbQ2aaocsVvkBSznHsIVtzCZ2WHnTrIWf1lpaJ66BvfK+maD936gfLdViPD8M7gmPL9n0lZgOM7F29Vvqw1gq47p2WZ6kty0rFr5idYGDt1MOiqFPi3Uzep7b/u/YhjT76pUwVrh+WcqOnTMah8T677qhs7le9ynRXY1pTyUbV8TsSn10hdDrgDBwAAj4IOHAAAPAo6cAAA8CjQwPPYVQbd0+WTjeLccfdF5dsS0elnj0ePOnZPRueGnRxpdezwoJ4e3XxU9DBfSk+5NlalvMF79KohSveG5v1hrO8uV6fjwYOiV+e2tChf3/363qayXdI3T8X1vr8We9ux9wYnle+Pzv2KY8+8oVfraflA9FDO6eD54taDl6x7uV1g454ufzkjaXz/o+9TyvfumS61HRqSLjDokqRD0xKTXMi1cHFrg2OP7dOVRdMNEldfUMfuSzfJSlybAxO0UnAHDgAAHgUdOAAAeJQNK6G4FyP2WVlCc016OFt3kwy1H2v7mfJ9vFIv2jCeE2nkK8ce1Qf5mcgfsR6dNuZPWrLJ8bPKN/OZmx3bPWMPsokLv2uoG5bhrW/UNWStkFTB+G79xfpbtI62p1kqS+6rHVS+rJFj/tHw/crX/67M6IzM6MNnKuV9te/rypWUts6HIi5qXI70pLWs+OKEpAZemGhQvnC/zrUNxcXOuXpDe7bldIeWPMd3yazqRKfuTPxVErtH9x1Vvl+tfdexI+4yhssAd+AAAOBR0IEDAIBHQQcOAAAeZcNq4Mb105WxFsfIdWn983dueM2xzyZ1Ctlv1Oip01/q/bhjzx6rV74mS/eu7tUVB3Mh0diS9+npvlNdlv4GOfT6sEofmGq9OsrMPknrG75L65jbm3V6qJ0uujM8pHxvJGRlnX/84Ebl63xDYp4L6Hy3yhErby3tWh0burfCnUiZsi5g9wLhZ6ebqBALyc6xMzoGVeclJXS+NqZ88TskdialO5NAUD7n89G3la8YurcN7sABAMCjLNqBM/OzzDzCzMet12LM/Aoz9+T/1i/0GaD0QFzLF8R247AUCeU5IvpjIvoL67UnieiIMeYpZn4yv/314jdv9TA6K4jmG2Vos7kxrnyD6ahj+135h18fvlltv9YjVQbr+vUxfBlrZldA/3baM73Gd+tUp7Ql7xRxBPYclWFcyTWj0a7kOHJvq3KN75d9D+y7oHybq7SE0hySmZj/MrFH+V47LzEPX9JVDCtGRSrzJ3RFO9+0JaMVVzJ5jsoxthZ+a+r0x8Ojyvenlj11Iap81a5UzvoeKyauEMxukQUd3JU+21slJXVzjU5PrQnKIi817JLGisyid+DGmNeIaNz18iNE9Hzefp6IPlvcZoHVBnEtXxDbjcNyNfAWY8wvZjMMEVFLoR2Z+QlmPsrMR7OJRKHdQGmwrLimcnOFdgOlw5Jia8d1fBw1WEqdFT/ENMYYWiA3whjzjDHmoDHmoL+qqtBuoMS4nriGfJWFdgMlyEKxteMaiyHHodRZbhrhMDO3GWMGmbmNiEaK2ai1gF2nL2clxSsc0FplY0D0z8PDB5TvdK/WVSvPhB27pk/rX3bFuZktutObr5XjZ8PKVUzdezG8GVer4qCp1Cll07uijj1xow56/TZRGR5sOq58Oyp0quBbCdG5/+V9rYFHj4lAGpzRx7Bj7pvRIxVOulZrWV28Gds8WdeK3bPWvPeTroWLT52Rhb5rL+ofoeg5V6rgO5cde+bgZuUbvVGOkdOPNqg+KH3EE60/Ur7gKuveNsv9iT1MRIfy9iEierE4zQHrDOJaviC2ZchS0ghfIKI3iGgXM/cx8+NE9BQRPcDMPUR0f34beAjEtXxBbDcOi0ooxpjHCrjuK3JbVh2fNbJxVyMMd4pM8uvtbymfnbK0r05Xojsb0rO+qvtk38oh15B5ToZdiVt1Gu5sqwwR10IyKae4mrCMbzMNuqpgNmjJKzEtWXTUTjn28cQm5esM6Rm2M1mrqmFC56BGhiVgVf1J5QuMyDHWSjIpp9j+grRr6vR/7nvYsU+8slP5IpYCWjGhJa2qN3W6aHK/yCZj+3R3mNwn12+kSq/2cFNUcoS3BKZovcBTCgAA8CjowAEAwKOgAwcAAI+yoaoRZqpED5vv0KmCv9x53rEbAnq+bX9aKpH9eHib8jX/rc75CyZEaPdf0QvcZlqjjp2u0WlRa5gqWHbwrOjOgSmdUhbfIXMPdm7Sq970jMjzi9888Ibydfh17P7qJ3c6ds0lfd8zXyvnVZWrfMIapwqWFUlL9x7L6bTbnJHrJ+SSoO1FyFN1+jqbvltfv+N7rCqgjfrBmJmQZytbO/Wzr0fq31mo6WsG7sABAMCjoAMHAACPUtYSij/t2k7JcCoT1prFJ+tOO3aNT6eC/fPoXsee+JkuIdHmqjAXiluF3qv0sG/0JhnOp/Q6rB+uWA8KYiJatjJhkU1y4aB7d4fmymm1/a9u/LljdyfblO9bg3eq7cC03OtEruhgVV+WmAeu6GOApePXageFrIvifKpZ+YZnaxzb51KpKkfkgyLDOlaRfr2QyuQ2+ZxcrWvmdEjeu6tGy2+dfldZw3UCd+AAAOBR0IEDAIBHQQcOAAAepaw18Ixew5aSzaJx7evQ1eYuW6mCu0PaN5YU7Tp20pVq5FrZh+dFWx/7qF4INdloiXxYs3bZ8Jye1pyrk0AP3l2jfNn9olWeHtfPL5pC4vO5ylOeeLdLbbe+K/6Kcf3cIzAlz0yQNrh8Ejl9Pzlt5HnGTyZ3KN/AxUbHrnQtDlc5KrGq7tPPsxKbdaeQ6JDrmYP62r57x1nHfrDug4Wavm7gDhwAADwKOnAAAPAo6MABAMCjlJ0GbpeMzbnySiubJQf0M81a02oPysrS/23gQeUbeKvdsdumda5oaEJrntlamX4716QboCpiQgO/LuySsfZK80RE2QprOnST/mIPbpIVVzoq48rXPxd17De69RTr6j7XMayc4IorOpfYF7dygou7unzZo3K/XV/dxbTo3MfGdJ5+7enCq+X4rUck43v1XIxEh74mcxG5nj+247zy/WaTlFfoCujSCqUC7sABAMCjoAMHAACP4nkJxedaP9SWKeY6dbrXra1SUcy94kqDL+HYZ+ONyldzSezwiF5lJ7G5Sm+3SAOyrqEdZJOloyQTty+kp8tP7JZhcqZdpxhmrBMi4tdy14UpSfOM9OjFkGsu65Sy8IScaJzQqWmURR2E5ZK0qgq+Prdd+Z6/dIdjj5xtUL5q6zqPDOoLq+EtWa/54hd06uh8feFY3R87qba7gnHHLtU73VJtFwAAgEVABw4AAB4FHTgAAHgUz2vg5EoVnI+JHvbwR3Sq4J21MjW2MxBXvjfnJI1sdFRPx45a0+U5pUX3uXr9G5iMWQ1ytQ0a+HUQdJ2aafneTaVr1Z098sXu3aJXTgmwaJ7nEk3KN9QrGni1awZ8aFqXGw73WavLp10PXsCScSvQ9mrzL43sV77BcxKvql5ds8J+1uVPuS6sXO6a+xERmUod1307+xx7W2hE+YIeuGBxBw4AAB4FHTgAAHgUT0ooduG4VFQPcwJbZFZcwKeHS4mcpIrZkgkR0T+M3OTY4W694kvzURk+Z2q1LzKmjzHXLF+pe/gGFsEvXxhP69mOs/tkJt6VW3SKYWiLxKetUq9wGwtJeuh8Tp/u/inZDujDkfG5Fp22KyAibXDZTOe0/NVoLZt1Y92A8r0f7nRszmkJxa44mA3pWJ36mkgvnNbXJ7tW4vo3Ha87dqs/QV4DXQwAAHiURTtwZu5k5leZ+SQzn2Dmr+RfjzHzK8zck/9bv9hngdIBcS1PENeNxVLuwDNE9DVjzF4iuoOIfpeZ9xLRk0R0xBizg4iO5LeBd0BcyxPEdQOxqAZujBkkosG8Pc3Mp4iog4geIaJ787s9T0Q/JKKvr0orXQSt4m/ZCq1/3b+127G7wqPK1xW84tj/8/IDytf9882O3XRBa5y2jjbfoVf0mKt3LcnjEUoxrtlYrWy4bi1StfI9J7brnL+Htkh66H11ejp0g7V6+O+f+pzy1VrF54wr5ZOzrhQyj1QZLMW4ulebt/nyhV9z7GO97cpX1S3POtyry9dekJIWvjmd1jlyt1yj/jl9fWZCusvrDIw7thfSBt1clwbOzF1EdAsRvUVELfmThYhoiIhaCrznCWY+ysxHswnvPSTYCKw0rqnc3LV2AevMSuM6Po6HtaXOkjtwZq4mou8S0VeNMepRvzHGUIFpKsaYZ4wxB40xB/1VVdfaBawjxYhryFd5rV3AOlKMuMZiyHEodZaURsjMQbp6MnzbGPO9/MvDzNxmjBlk5jYiGin8CcVlplPOvZqdE8rXFJp27Jwrj++NhCyMOjhVq3zZarnbyAYLyyIz7do31+xKN/PQJL1Si6tvQCSuyU9sVb6BX5b4tG8aV77xlGv1aoufWjEfc82wrbPG9o3H9CgiOOrKK/RQ6mCpxXUoK+m7adcq4Cf6JT2Uh3VFSDvj0JZMiIh8Pzvh2CajLzr/1J2OnWnUFUmjjTNqO8jeieu1WEoWChPRN4nolDHmact1mIgO5e1DRPRi8ZsHVgvEtTxBXDcWS7kD/xgR/RYRHWPm9/Kv/QERPUVEf83MjxPRJSL6/Kq0EKwWiGt5grhuIJaShfJj+nBZpl9wX3GbA9YKxLU8QVw3Fp6cSk8+0cCDAT01tndOKszNZfW03TdO3eDYfldVsoph+Soqx7WmZixN3KclNWL9MWAFcEQehk5v1lrpli3Djn1PS4/y3VAhvqyr77owJ6srBfv1FHz/vJxH6Yi+FEIz7rn13ksxKxXC1kVyW4W+Jj+145Rj/+PwLcpnX2uBuNbAcwd2OfbQx+u0z1qAOlilU06/vvtltV3lpYdW1wCPmQEAwKOgAwcAAI/iSQnFn5Rh8ujFmPJdqZY0wu5hXcC/+owMoav7dfpQ9ANJTeP+YeXLbd/k2G4JxYOTt0qWxO5msTfp+Pjnwu7dHb5YO1rQ92e9shhupF/LKxkrfT2QdGlhHkobLHVmrXzAP413KF9tQKSRjr36uhsdkxTD4bv1osaJDoll5c06rZQuRsV2TbG9PXx5SW32CrgDBwAAj4IOHAAAPAo6cAAA8Cie1MBzC7T62EmpKlh1Se9YOSKCdd2paeXLHT/t2Fyhp/RmrRSz2bYFSquBFeFLW4vR+vW9xWxSnl+8Pb5F+Q5HLjh2yJXXeWVG6u8EM/qBRfiKHC8w7VrVGBSN0ynRsv/rmw8pX6xJyrTc235W+b5f3erYZkx/ZkVc7OkZXYvHROQcCIa8nSa4GLgDBwAAj4IOHAAAPIonJRSfNSrKhHS6V8Wg/Jd8rlGxvRjyXIcubRuIfdSx/bN62DV6owzRsnoyH2ZiFpFwvwyn6080Kl88IxUHT8+1Kd9/6PuCY5txLX/Vdcs9SvWgjmsgIcHzj2lJDRSPGl/SscM188o3PiSzKL8/s7/gZ7ivM6vAIeWyWtasrJfUxM9sO0HlDO7AAQDAo6ADBwAAj4IOHAAAPIonNXB7Kr1/UletS0WtlXVC+vcpF5T3pSP6fb60bGciWkdNRcWG5r168Ixol00/U6uAUeVYtWOnql0PIqxnG+G4DlCkV7RtzrkWq05Zmni6vNPN1pMdIVn857d3v6l8f9lzm2PPJVzVIrdIRcipzVrn3tMx5Ni1waTytVTIufP56M+W0WLvgDtwAADwKOjAAQDAo3hSQiFrJBya1L9BmYiMpxdKPUpF9ZDMrjKY1QqKOh5YRaxFE/zjWkKpTVjDZHbNhg1Ycph74YWUu3yk9TFJzL5cC2qsRRMeqjmmfI17ROKazOrFqSusi7LGpxd06ApJBUr34uURn6Qqxty5xGUG7sABAMCjoAMHAACPgg4cAAA8Cps1XKyVma8Q0SUiaiSiwsuorC0bsS1bjDFNi++2NBDXRUFci8dGbcs1Y7umHbhzUOajxpiDa37ga4C2FI9Saj/aUjxKqf1oiwYSCgAAeBR04AAA4FHWqwN/Zp2Oey3QluJRSu1HW4pHKbUfbbFYFw0cAADAyoGEAgAAHgUdOAAAeJQ17cCZ+UFmPsPMZ5n5ybU8dv74zzLzCDMft16LMfMrzNyT/1u/Bu3oZOZXmfkkM59g5q+sV1uKAeKq2lI2sUVcVVtKMq5r1oEzs5+I/oSIHiKivUT0GDPvXavj53mOiB50vfYkER0xxuwgoiP57dUmQ0RfM8bsJaI7iOh389/FerRlRSCuH6IsYou4fojSjKsxZk3+EdGdRPSytf0NIvrGWh3fOm4XER23ts8QUVvebiOiM+vQpheJ6IFSaAviitgirt6J61pKKB1EdNna7su/tt60GGMG8/YQEbWs5cGZuYuIbiGit9a7LcsEcS2Ax2OLuBaglOKKh5gW5urP6JrlVTJzNRF9l4i+aoxRBbDXui3lzHp8l4jt6oO4rm0H3k9Endb2pvxr680wM7cREeX/jiyyf1Fg5iBdPRG+bYz53nq2ZYUgri7KJLaIq4tSjOtaduBvE9EOZt7KzCEiepSIDq/h8QtxmIgO5e1DdFXbWlWYmYnom0R0yhjz9Hq2pQggrhZlFFvE1aJk47rGwv+niaibiM4R0R+uw4OHF4hokIjSdFXTe5yIGujq0+MeIvoBEcXWoB1309Wh1gdE9F7+36fXoy2IK2KLuHo3rphKDwAAHgUPMQEAwKOgAwcAAI+CDhwAADwKOnAAAPAo6MABAMCjoAMHAACPgg4cAAA8yv8HQ75ver1X9YoAAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The second layer: pool layer\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAYBklEQVR4nO3deXBd5XnH8d8jybZsWcaW8crisNiYLTGJAiE1LTQJOJk2Jm1hgMyUpjRuO9Ms07Sp072dNkMznaaZlmTiEmKaSUi6MTiELMa0wCSEWCSQsNrg2GDFxhteMFiypKd/+NLKRs+RdLdz3qPvZ8aje++jc85jPdePj+597nvM3QUASE9L3gkAAKpDAweARNHAASBRNHAASBQNHAASRQMHgES11bKxma2Q9BlJrZJudfebs75/sk3xdnXUcsik2aRJGcGMDYeGwpAPDI47jyM6rH7vC4843rpOmtLhU6bNGnceSbG4QJ5xGtR2sD/e7ujROHbStDgWpNL3yks62nc465k0rtq2dnZ42+xy17Vz6pEwNr21L4z1e2sY27+7M4wNtmck0xKPdPdv693j7nNOfLzqBm5mrZJukfQuSdslbTSzde7+ZLRNuzp0ib2j2kMmr23ewoxg/ITwlw+HscG9+8adx8O+IYxVU9cp02Zp2eUfGXceKRmcEnfpgSlxzzz5vm3xdr0/C2N9l701jA21jXy8R//nM+E20vhr2zZ7lub/2Ycz95m6yy98OowtP2lzGHu+f3YYW/f5XwhjB86NT7i8PT5Re/63/mjEJ1ItL6FcLOlZd9/i7v2SvippZQ37QzFQ1/KitiVTSwM/RdILw+5vrzx2HDNbZWY9ZtZzVPGvJCiMcdd1oC/+DQGFMmpth9d18BB1LbqGv4np7mvcvdvduydpSqMPhyYZXte2KRP3fY2yGV7X1k7qWnS1NPBeSacNu39q5TGkjbqWF7UtmVqmUDZKWmxmZ+jYk+A6STfUJasCa2nPehtZOrByWRjb875XwtjU9ngiYeatC+LteuNfc/1HT4SxDBOyrq92xW8iS1LbkXhCYOaXHgpjA1Xms/+seGJpxrZq9zrxavuhS+M37CXpmVfmhbF/O3d+Vcec2/JwGOt4X3cY610x/mNV3cDdfcDMfk/St3VsJOk2d6+qY6A4qGt5UdvyqWkO3N3vkXRPnXJBQVDX8qK25cInMQEgUTRwAEgUDRwAEkUDB4BE1fQm5kR04OplmfFzPhq/qf/JOd8NY988+MYw9ujd8fG4oml9DHRkrgGlri/Go4LV2vnRt4exG25aH8a+8adX1D2XsrpqevaQzbcvmFH/gw7F653sPT9rXDVeCyXCGTgAJIoGDgCJooEDQKJo4ACQKBo4ACSKBg4AiWKMcARtC+JVyM78cHwJJkm6cHq8OuflU+MxoY/8czxSNl/fyzwm/p+3xOOALy+MR7jm/nNjfsb+9jeFscc+/tkw9tvbL21EOsmyyfFo3n/9/OfC2B8sHW3kMr4mZiMMdNR38JczcABIFA0cABJFAweARNHAASBRNHAASBQNHAASNWHHCFuWnRfGnv+LeLtruu7N3O/M1vjCxWd8/YNhbMmnGRWsh74Z8TnJ/Af3hbHxrwM3Nv1/eSCM/bg/HmF74lPx6pQtE3ANyg933xfGrv3+qjB2xpHHGpFOpp/eHI+ADnRVfUHqEXEGDgCJooEDQKJo4ACQKBo4ACSKBg4AiaKBA0CiJuwY4XOr47/6m+duD2NLJ+/I3O8ND8ajguf+0aYwFq+1htfJuP7wjOf7wtjQj7NXkqzWuY/Ez6WlUzeGsV/5j4+GsYVHGzXYWGCt8Xjkv3z5PWHsjE82fwS3/6ruMHZ0dn1HBbPU1MDNbKukQzrWfwbcPf5bIRnUtbyobbnU4wz8CnffU4f9oFioa3lR25LgNXAASFStDdwlfcfMHjGzET/PamarzKzHzHqOKn59EoUyrroO9B1ucnqoQWZth9d18BB1LbpaX0JZ7u69ZjZX0noze9rdHxj+De6+RtIaSZphXRNvEYc0jauu02edSl3TkVnb4XWd8gbqWnQ1nYG7e2/l6y5Jd0q6uB5JIV/UtbyobblUfQZuZh2SWtz9UOX2lZL+um6ZNdiKs58KY7MmxSsK/uXW92bud/EH4tXPBoeKPyyYQl37p8fnHZMOZswY1uCl34hXmFs6dV0Y+4d18fNl4UPNfT4UvbYXnb0tjJ3+ppfC2FOfbEQ22bb+WmOeZ+NVy0so8yTdaWav7ecr7v6tumSFPFHX8qK2JVN1A3f3LZLeVMdcUADUtbyobfkwRggAiaKBA0CiaOAAkCgaOAAkqtSrEe79YDz69ebp/xnGtvWdHMa27unKPOaiod7RE0NNvCUe4Wq775GGHPPTf35LGPvvl+MLZM9v8qhgyk6aHF/k+am3NG+Fv9fs+NjbM6JHm5ZHFs7AASBRNHAASBQNHAASRQMHgETRwAEgUTRwAEgUDRwAEpX8HHhLZ2cY8/fuDWM/658Vxp55eV4YW3TtT8aWGGrS3xmfW5z8QDxrX+208HNfvigzPrPloTB216evCGNTxRz4cJdd8EwYe/RfLwxjc9WYK8/v/Gg8631oaTFmvbNwBg4AiaKBA0CiaOAAkCgaOAAkigYOAImigQNAopIfI9z+O/Ho0a+cfn8Y6562JYzd/1sXZxwxvjo2xsdb42Vhh9ri2MDW5+uey4OX/VNmfPn9Hwpjp+xjVHC4lqnxMOfU1ng0b+5nGzMqmOXQWWnXjjNwAEgUDRwAEkUDB4BE0cABIFE0cABIFA0cABKVxBih/9yyMHb+1U/Hsanbw9jSyRnjgD9gxcFmOPCG1jC24B+aO1K2eWB6ZnzuPVMyokP1TSZxz/3iF8PYVQuXNS8RSa2Lz8yMe3vatRv1DNzMbjOzXWb2+LDHusxsvZltrnyN12ZFIVHX8qK2E8dYXkJZK2nFCY+tlrTB3RdL2lC5j7SsFXUtq7WithPCqA3c3R+QtO+Eh1dKur1y+3ZJV9c3LTQadS0vajtxVPsm5jx331G5vVNSeAkbM1tlZj1m1nNUfVUeDk1SVV0H+g43JzvUYky1HV7XwUPUtehqnkJxd5fkGfE17t7t7t2TlPVGEIpkPHVtm9LRxMxQq6zaDq9rayd1LbpqG/iLZrZAkipfd9UvJeSIupYXtS2hascI10m6UdLNla931S2jEWz/xWlh7ML2/WFs6eQXw9jlX/9YGFush8eUVwk1ta4zn6v2EsTV2fTFt4SxDvtB5rZtR8JfRlLRtNqe8Y0PhrEl2tiow47oqY/Pburxmm0sY4R3SHpI0jlmtt3MbtKxJ8G7zGyzpHdW7iMh1LW8qO3EMeoZuLtfH4TeUedc0ETUtbyo7cTBR+kBIFE0cABIFA0cABJFAweARCWxGmH7nniE694Xzgljd99zSRg751OPh7G01ydLx+F58WqEnaedGsYGXohXmTx03dvC2Psv+m4YW73lV8OYJNlQ8mOETdPaEV+4uHXOnDA2uHt3I9IpNc7AASBRNHAASBQNHAASRQMHgETRwAEgUTRwAEhUEmOEcz73UBhrfSAeIxyaeiiOHYpjaI5Jr8SjeT9beXoYOzI7jk2+KL5Y9Te2nR/Gpt4xM4xJUhvDpWPmQxbGNv1jPB7a+d3FYWzBvfHKoluvCa87IikeaSwDzsABIFE0cABIFA0cABJFAweARNHAASBRNHAASJS5N2+VNTPbLWlb5e7JkvY07eCjK1I+jc5lkbvHy8KN0wl1lSbWz3I8qGv9TLRcRqxtUxv4cQc263H37lwOPoIi5VOkXKpRpPzJpX6KlD+5HMNLKACQKBo4ACQqzwa+Jsdjj6RI+RQpl2oUKX9yqZ8i5U8uyvE1cABAbXgJBQASRQMHgETl0sDNbIWZPWNmz5rZ6jxyGJbLVjP7iZk9amY9TT72bWa2y8weH/ZYl5mtN7PNla+zmplTLajrcccvTW2p63HHL1Rdm97AzaxV0i2S3i3pPEnXm9l5zc7jBFe4+7IcZjnXSlpxwmOrJW1w98WSNlTuFx51fZ21KkFtqevrrFWB6prHGfjFkp519y3u3i/pq5JW5pBH7tz9AUn7Tnh4paTbK7dvl3R1M3OqAXUdpkS1pa7DFK2ueTTwUyS9MOz+9spjeXFJ3zGzR8xsVY55vGaeu++o3N4pKetyI0VCXUeXYm2p6+hyq2sSl1RrsOXu3mtmcyWtN7OnK//L5s7d3cyY86xOYesqUdsaUNdh8jgD75V02rD7p1Yey4W791a+7pJ0p479ypinF81sgSRVvu7KOZ+xoq6jS7G21HV0udU1jwa+UdJiMzvDzCZLuk7SuhzykJl1mFnna7clXSnp8eytGm6dpBsrt2+UdFeOuYwHdR1dirWlrqPLr67u3vQ/kt4jaZOk5yT9SR45VPI4U9JjlT9PNDsXSXdI2qFjl87eLukmSbN17J3szZLuldSV18+HulJb6lrsuvJRegBIFJ/EBIBE0cABIFE0cABIVFPnwCfbFG9XRzMPWSiDXfHffaAz472IjP9mJ+23eLOXDo/4+BEdVr/3xRuO00Sva1HUva4nTfX2+TPqtbtCsoyflin+Nzn0zEAY885pcWx+vF1Lxvj4oU279vgI18SsqYGb2QpJn5HUKulWd7856/vb1aFL7B21HDJpB1e8LYztvHwwjNnUOLbwrklhrOM/Hx7x8Yd9Q7iNRF1TNVpdpfHVtn3+DL3t89fXL8ECamsZCmNZDbXvF3aGsYFL3hLGjvzhS2Gsc3JfGFt/xWe2jfR41S+hFHSRG9SIupYXtS2fWl4DZ5GbcqKu5UVtS6aWBj6mRW7MbJWZ9ZhZz1HFvyKgMKhreY1a2+PqeuDVpiaH8Wv4FIq7r3H3bnfvnqQpjT4cmoS6ltNxdT1pat7pYBS1NPBCLXKDuqGu5UVtS6aWKZT/W+RGx54E10m6oS5ZJWzL310axt6y/Jkwdkn7wTB2/xfiBddO+mH87y8eWMpEXcdpz6q45kNt8Zza3B++HO/0+z+uJaXIhKtt1pSJJL3cH//2OOXKrdUdc8MjYWzwY2dXtc/wWNVu6O4DZvZ7kr6tYyNJt7n7E3XLDLmgruVFbcunpjlwd79H0j11ygUFQV3Li9qWCx+lB4BE0cABIFE0cABIFA0cABLFVenH6fCvXpIZn3n+3jB2+ax4jPBTG68KY4s/+70wVuWoIMZp+x+/PYyd/564rp9f9PUwdvnf/0EYO3XHaWFsYNsLYQzHa7PsMcKp79sdxrK3rM6Rb86Ng+8bcb2qTJyBA0CiaOAAkCgaOAAkigYOAImigQNAomjgAJAoxghHYJMmh7G9172Sue3XLvjXMHb11z8SxhZ/aOTrV6I5dnwsHhOUpEXv2hrG/u60u8LYJ3dfFsZO2hYPgTIqeLys61NOa+sPY4/vWJC530WHX6w6p2ocfOuRMDa/iv1xBg4AiaKBA0CiaOAAkCgaOAAkigYOAImigQNAohgjHMH0+2aEsafO+lLmtusOd4UxRgXz1TpnThgbuDS+qLQkvXPO02FsY98pYezeW+MLHi/8/nNxPpnZTDxZY4RPfmtJGFv0t/FKno2y5SvLwtiS+fUdW+QMHAASRQMHgETRwAEgUTRwAEgUDRwAEkUDB4BETdgxwpYLloax/qH9YeytP7w2c7/tX5gVxqaJMcJGaztlYRh78m/ilen+/o3/nrnf+w+eE8ZuWX9lGDubC1KPWdao4P4jU8PY797wjTB299/G/x5rse2v4tUrz5o//osTV6umBm5mWyUdkjQoacDdu+uRFPJFXcuL2pZLPc7Ar3D3PXXYD4qFupYXtS0JXgMHgETV2sBd0nfM7BEzWzXSN5jZKjPrMbOeo+qr8XBoEupaXpm1Pa6uB17NIT2MR60voSx3914zmytpvZk97e4PDP8Gd18jaY0kzbCu+F0KFAl1La/M2h5X13PmUdeCq+kM3N17K193SbpT0sX1SAr5oq7lRW3LpeozcDPrkNTi7ocqt6+U9Nd1y6zBnvnDaWHsmhnxynObd8Ur2klS151pjwqmXtd9P396GJvZtT+MzWnLXo1w/9F4jG3xl18OY0U6hS16bbPGCHftnx7G7l7RmFHBLG+4rHmjgllqeQllnqQ7zey1/XzF3b9Vl6yQJ+paXtS2ZKpu4O6+RdKb6pgLCoC6lhe1LR/GCAEgUTRwAEgUDRwAEkUDB4BElXo1Quu+IIxdcc6mMHbRtHhE6NFrakoJDfbipfEo2m+e8WgY++ret2Xu94m154exk3seGjUvjK7NhsLYoltam5jJMds/Ea84eJqKMUbIGTgAJIoGDgCJooEDQKJo4ACQKBo4ACSKBg4AiaKBA0CiSj0HvunGjjD2yzPiOc5PbIiHvZfoBzXlhDq4+MIwtHDJ7jD23CvxUsDf2xB/ZkCSznpwbxgbzNwSw01ujX9azx+YGcbmPPijBmQjDV12URg77V3FmPXOwhk4ACSKBg4AiaKBA0CiaOAAkCgaOAAkigYOAIlKfoxw/69fGsaWdz8Rxua3HQhjp99TU0qog5b29jD29AfiK8Rf2rk9jB3oj/c5//vZw4CDT8bLD+N4WVeXnzPl5TDW996djUgn076PHw5j8dBpcXAGDgCJooEDQKJo4ACQKBo4ACSKBg4AiaKBA0CiRh0jNLPbJP2SpF3ufkHlsS5JX5P0BklbJV3r7i81LMkF88PYwDXxKnFnTdsTxlb/1/vD2Jl3l/8q40Woa5affuLNYez8c38axq6duzGM/f73rg1jZx08OrbEEpB3bdtb45/lfU8uDWNL1NOIdDLN6YjHCFMwljPwtZJWnPDYakkb3H2xpA2V+0jLWlHXslorajshjNrA3f0BSftOeHilpNsrt2+XdHV900KjUdfyorYTR7Wvgc9z9x2V2zslzYu+0cxWmVmPmfUcVV+Vh0OTUNfyGlNtj6vrgVeblx2qUvObmO7uksLPzrr7GnfvdvfuSZpS6+HQJNS1vLJqe1xdT4qXLEAxVNvAXzSzBZJU+bqrfikhR9S1vKhtCVXbwNdJurFy+0ZJd9UnHeSMupYXtS2hsYwR3iHpckknm9l2SX8h6WZJ/2ZmN0naJimez6qHyZPC0L4XZ4SxtS8sD2MLHxmqKaXUFaGuLZ2dYWzonHjVuruXfLOq4/3Ng/FLPW09P87cNqVnS961fWVgchj76Ypbw9hVWlb3XOy+U+q+zyIZtYG7+/VB6B11zgVNRF3Li9pOHHwSEwASRQMHgETRwAEgUTRwAEgUDRwAEpXERY398Cth7OwvxRejbbm/+aubYez81fij2gO7408Brj04N4y1W7wSXtcT8fNo6HDaq9Kl4qqFy/JOoVQ4AweARNHAASBRNHAASBQNHAASRQMHgETRwAEgUUmMEQ7uiS9cPGnPyfGGs7vife498YpTaDYfGAhj536qN4x97QvvDGPWuzuO7X5sbImhYZ790kVhbOmfx/8mN/32wjA2OD1eK3KJto8tsURxBg4AiaKBA0CiaOAAkCgaOAAkigYOAImigQNAoszdm3cws906dkFVSTpZ0p6mHXx0Rcqn0bkscvc59drZCXWVJtbPcjyoa/1MtFxGrG1TG/hxBzbrcffuXA4+giLlU6RcqlGk/MmlfoqUP7kcw0soAJAoGjgAJCrPBr4mx2OPpEj5FCmXahQpf3KpnyLlTy7K8TVwAEBteAkFABJFAweAROXSwM1shZk9Y2bPmtnqPHIYlstWM/uJmT1qZk29jL2Z3WZmu8zs8WGPdZnZejPbXPk6q5k51YK6Hnf80tSWuh53/ELVtekN3MxaJd0i6d2SzpN0vZmd1+w8TnCFuy/LYZZzraQVJzy2WtIGd18saUPlfuFR19dZqxLUlrq+zloVqK55nIFfLOlZd9/i7v2SvippZQ555M7dH5B04ir2KyXdXrl9u6Srm5lTDajrMCWqLXUdpmh1zaOBnyLphWH3t1cey4tL+o6ZPWJmq3LM4zXz3H1H5fZOSfPyTGYcqOvoUqwtdR1dbnVN4pJqDbbc3XvNbK6k9Wb2dOV/2dy5u5sZc57VKWxdJWpbA+o6TB5n4L2STht2/9TKY7lw997K112S7tSxXxnz9KKZLZCkytddOeczVtR1dCnWlrqOLre65tHAN0pabGZnmNlkSddJWpdDHjKzDjPrfO22pCslPZ69VcOtk3Rj5faNku7KMZfxoK6jS7G21HV0+dXV3Zv+R9J7JG2S9JykP8kjh0oeZ0p6rPLniWbnIukOSTskHdWx1xZvkjRbx97J3izpXkldef18qCu1pa7FrisfpQeARPFJTABIFA0cABJFAweARNHAASBRNHAASBQNHAASRQMHgET9L4xn1/daUrY3AAAAAElFTkSuQmCC\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The third layer: convolution layer\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAVEAAAD6CAYAAAAY2nTbAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAzOklEQVR4nO2deXQc93Hnq+bCHDgGB3EQJAGSICmeuihK1C3LlGVdtC2vY1m213EcRk6UdSwnL8o6u8r6vdirPMdxNrEd07FyOFIcRRZjRaZ1WldESiIpi5R4kyBIXCRuYnAM5vrtHwQlVH8bwAwbDQ7A+ryHR/4KNT09Nb8udFf9flVsjCFFURTl3PCc7xNQFEWZyagTVRRFcYA6UUVRFAeoE1UURXGAOlFFURQHqBNVFEVxQFZOlJlvZeaDzHyEmR90+6QuJNS27qL2dQ+17Rl4snWizOwlokNEtIGIWohoBxHdY4zZN95r/CUhE6wqmfTNvZ6MGJf7B0Gn1JMGWVsqCLLTiRDITL/PIsBzMMXy+MmOPkr1D7Hd+U4152JbX3HY+CujQmYMnq5JWf4+evDDB7rx+IkKmzfN2Mnkewa78Hsyw3ExjtMgJczItNiWKHf7BqNBU1hTOOlx0xZ7R3wJ0PHaGG2ONwmyU+kCkHX2FotxoB+PlSzE+5/4yZYuY8wcPOOp51zmbiAaMuHqIiHL2MzdkYRfvlciuylTVDIEsv44+opgu7RnfI4PdNgvdZKdfZTuH7Q9EXw1so6IjhhjGomImPmnRLSRiMY1VrCqhC79/ucmPXBZUDrNe6vfBJ27C/tB9lDnSpBtbUZZ8lnpERivc0rcfFqMm/5os+25ukTOtvVXRqnhO18SsviIH/SSXZY/KoV4Adf/BOdE0+fR2ZoEXrCemJw6S/+hD3Qyew6I8ZvmRdBxmZzsW1hTSHf+051CliG00WAqIMZXRJtAp8gTB9l90VaQfadnEch+uOUjYlz3zDDotF8TBtm+hx84DkL3yHnuhquL6PoffUrIrLYkIjpyolKMC5pRx9g8Q994yzsge+HQRSBb9ucDYnzgvnLQCdRI33Tij3+IbzhKNo/ztUTUPGbcMipTnKO2dRe1r3uobUeZssQSM29i5p3MvDN5Gm+rlXNnrG3T/WrbqWSsbeN9ePeoOGOsfRN9eEc9G8jGibYS0fwx43mjMoExZrMxZq0xZq2/BB81FFtytq23WG2bA5Pad6xtg1GMnynjkvPcDUQxbzEbyCYmuoOIljDzQjpjpE8T0WdyfSO/FwOS1aGYGF9W0AY6jw/gE0LLcCnI0k9jXGPuK51ifGIjxtzjQzLekslMW96DaIpsmzyFk7NwgYwlh/8dE33H//sIHuw0xp8Ka2Mgi3fJ4524vQx05pZeKgU7t+H7uYtj+xb58O60skDao8p3GnSuCmF48pNHN4Ks+UdLQNbwX+1ifHhTDejUXdEMsn0Pg8hNpmTutp8uRuGId9LX3fFRzJ/s6KoDWeUvMHFHrUfFMFCDfzwDgZQYM4+fgJ/UiRpjUsx8PxE9S0ReInrEGLN3stcpk6O2dRe1r3uobT8gmztRMsZsJaKtLp/LBYna1l3Uvu6htj2D7lhSFEVxgDpRRVEUB2T1OD8VLCvuANmqSIsYv5uoBJ2/brwZZKfeQ71l/4mB/Na768U4vhqXWGRiloXq6WlNLJ0T1hB3oBqXPcV6ZRa/OI6B8cwgLtJff+khkLUNYlJqaLVcvD80ggmpU1dadn08kN9dFAwRJS2ruIfT+LlqC/rE+LYIzr0HWj4KsgNPL8Vj/WQ7yE784Xoxvuejr4DOwoJOkP0KJPmHdfPCYA8mRb2D8jtILsLk3rLwSZC9/OSVICttx+Rp98fkxpxUEo9/Z8N7YvxoYPzlb3onqiiK4gB1ooqiKA5QJ6ooiuIAV2KizAYW15cHBkBvSYGMa/ys5wrQ6ejBxbgN/4bVnoZW46L8gfkyBpe2KaRBAUuFnDz/s5JJeSjWHRGy4nK0B++T1XJOrcN4ZKQSi7u88WuM2zX8K8aV2m6XsSzvEvx+v7hUxvv+OojnmV8wZSwx0blBLHW1KiQXun+9/cOg8/r2FSBreBg3G4zchnPerJeL978QxYXlh5O44STfSaS91Novr2dfD8blUyVyoft1i4+Czrff2QCyhn+zqX1Sg/kTWiAX4FeU4maS64oOivHPvRoTVRRFcQV1ooqiKA5QJ6ooiuIAdaKKoigOcCWx5PNkqNySRPDblJU/mYqKcddIBHTm/BwrrHi7caHtqVuKQBZcLAP0iRH8uB6vTCyxTRuNvMNSaWokgZ8rfEp+Dt8dXaCzKIpJk0PPLQPZiVuw/F70YrnY265K11x/r9ThFOjkE0yGQpYWHnaL2rtTsoXI8QGsYFX2Lm7a8M2fB7JDn8D5ducCueFhWxyrE73Qi4krov02svwhnfZQX5+8xr02rWdC5XJTTDyNyae5P8FNEFRbDaLDn8PvJhWVc/XbDc+Bzq/6pX1jaayadRa9E1UURXGAOlFFURQHqBNVFEVxQFYxUWZuIqIYEaWJKGWMWevmSV1IqG3dRe3rHmrbM+SSWLrJGIPZCRsKPClqiMiA/F3F74De33ddJ8Y7DteDzsJeTEZ03IjtEuKVGKH2xGVA2mNT4p/zo2hT1rZlr6FgVO6e8L2FSbW+i6Q91tkkkXyMNutbjbIFW1F2vErumHn79u+CzkOnbhDjgTTuPJkmsrKvz5OhsoBMiB6JV4Fee1xWtTr5H5j4qdnRA7LGLy4A2e9e9SzIrgvLxNKvBjCJ1DuSN722sp67lGasHFaLO4HmR+VOurffwBYqy/bYJHpsLuZ0JAqyj16+R4xXF7SDzkunl4uxsWmdfRZ9nFcURXFAtk7UENFzzLyLmTfZKYxtjTrUa9MATRmPnGyb7s/3/ed5x4T21XnriNzm7sDsnLvZPs5fa4xpZeZKInqemQ8YY14dq2CM2UxEm4mIalaWzoDFlnlDTrYNLq5V2+bGhPYda9vqlWVq29zIae4W1M2blfbN6k7UGNM6+m8HEW0honVuntSFhNrWXdS+7qG2PcOkd6LMHCEijzEmNvr/W4joGxO9ptATp2sLZXC8LYUtJt7umi/GxbuxR3TGsoOEiGhgHgZ5a5adAtmpbst72iSWCkPyEc7rsdlC4RLnYlsz4qHUUbljxmcX8y6Rdtvbgbs5hgbQ3tH38O9qx6XYB/y3r35RjL/ZeQ3ovNUhEy6DKZtdJi6Sq339nKYqv0xqzAtgQu7nx24X44JhnFepKO60i16FczRos4trz4i8Lt7qrQcdnwd3iE0n5zJ3KcPkGZLzq2EJ7ghbU9oqxrH3sMylCeJcOvRlTAIWVGMI4c+q5dx9uOM60GkcKBfjkfT4rjKbx/kqItrCZzJfPiJ6zBjzTBavUyZHbesual/3UNuOMqkTNcY0EtHF03AuFxxqW3dR+7qH2vYDdImToiiKA9SJKoqiOMCVUngjxk9HLT3kX+y6CPS6Y7IsVlkbJnW61mAZrMJ1uEEi6LMps2ZJJHl9ePwCy+vYJvmUTxSczlD9L+QujyP3oI02rt4txk+9i09evjYMzvuxVRL1rcQkxueju8T4wZY78PiWJB1Tftu20BOnayOyt863W28FvcE2uUNswS9PgM7Br84H2VUlTSDz2Owaa03I3WDWXu1ERAsjmPDKe3j0ZwxXlB8HtUe3rRfjFS/i7qTuG7CsoHfYxk4VaKfutNTrSWIJzqqQ7LvknyCRp3eiiqIoDlAnqiiK4gB1ooqiKA5wJSbalwzRU+0yBneiB/tkVzwmK9F4Exh3GCnFOMeNVRhHeaVlMcg8lphcURgrxmDcLr8ZKWM6cq+MgS5qwHYpi4NyEfNNFx0EnTeLsfqQf1chyDas2wOynSNy8f6+LlzobIy0prWne75hCM/xQBf2LV/0hNzIEF+KGxnCltY0RERR/zDI3u7H76A3ERLjeeE+0OlL5k0Vp6wJhhK07BIZP358/2WgV/uinDfGZs/98BycS4lyzIt8s34LyA4nK+SxbNqPpDLy+OkJ5m5+z2pFUZQ8R52ooiiKA9SJKoqiOECdqKIoigNcSSwl0l5q7o0Kmf8NbGER3rJNjDM3XAo6nMFTfL1tIcgGejDQHiyWFZoCNr3RZxoef4aKquVC4DtrMPHTFJdVaH684L9AZ9GhpSAb/ngCZPeXHADZy/1y80TAh7YdTsiAvcnvtfbUNhKlh45tFLLYSZy3JZZk56kr8F5kbhgXebcMRUG2IIJtREYsc97HNglX48ql6yrxET/tPyQrMpW9jZ8j8rPtYtz5m+tBx4PTlOYvwopQlxRgpbK/OSWTWR6bTSCpjKxcNtHU1TtRRVEUB6gTVRRFcYA6UUVRFAeoE1UURXEAGxei/czcSUTHiaiCiLLrSX1uuHH8OmPMnCk+5pShtnWPMbYlcte+bh17pth3Vs1dV5zo+wdn3mmMWTtTj5/PqG3dxc3Pr7adXXNXH+cVRVEcoE5UURTFAW470c0z/Pj5jNrWXdz8/GrbmX18gasxUUVRlNmOPs4riqI4wBUnysy3MvNBZj7CzA+6cPwmZn6Xmd9h5p1Tffx8R+3rHmpb95i1tjXGTPpDRLcS0UEiOkJED06i6yWio0S0iIgCRLSbiFZk8z7Z/hBRExFVTOUxz9dPLrZV+7prX7Wt2vZcfiaNiTKzl4gOEdEGImohoh1EdI8xZt84+uuD0cC2krmyDaldu9yBlKXCykms6MIxbKlgikIgS2D3EVpYLKu6xNL4ugrfkBg3N6eopyczLV1CcrUtEZG3KGJ8FTYf1nrspOUjYGdeMkGb7z6FH92miBCZAnnAuRFsh1Huke0ampqT1NWTnrYOLLnaN+CPmGAgapGijThlMYjH5oEujQZPR7BFtfGCiKydKNIhm+/Ji7LEsbYuM02L7c/FL3jC4W3+aJn8hc1HA5v4UKk8jL29qyzXMhFRzKYKXPtIiRinRmwqYlneMtXTQ+mBQdu5m009rXVEdMQY00hExMw/JaKNRDTehV5bMjdCn3vsZiH02lzFb3bXi3H6L7FPT/BXWOYtsX41yI59Ck/kRx+SSbqXBlaAzqZS2T/9I7e5uZECyNW25KsopZpv/J4UGvxufe3ygvXZfP/xpdhzintsetGfRieRXCz/uH3jiqdA594iWQ5u3Uewf7jL5GTfYCBKV636HSnM4Lz1dsg/GCYcBB0ewAu6fx32oo9H0bapsPyu+lZi7yBvSRJkxz7zdWw+5h45+wV/tIzqfucBKbVxoslCKUyX42f9/OXbQfZAOT7BvzJcDrJvHr5NjDuOoQ4n5HfQ9pffxRMdJZuYaC0RjZ39LaMy+abMm0bjEA8P9Y5Yf63Yk5NtmXlnOoZNu5RxmdS+Y22bTKltcyBnv5AenJ32nbLEkjFmszmz1eqz4VIshKqcO2dta4xZ6y2KTP4CJWvG2tbvU9tONWP9gjcyO+2bzeN8KxGNfQ6ZNyobjx12wgMxfFQ/9cI8Ma472AY66VUNIOtahY+cSxeeANm3Ttwuxhmbx95eS+vZk8nnQcdFcrWtLZ5ubPla0Cs/q/9arLRuRtCO8WL8u1q/ph1klSFZXf8Hx24AnT/rKRbjltj3QcdlHNuXkzYB4ZTl8dqLNjOF2GmBM/jsGhhEWfeN8knuiev/DnTu3fFbeF7Ty5T4BcZIBYU65NwdsEkB3F2yC2RPxLDjxXf23Qyy5EE5Lwu7bEJdlfJ7YZucwlmyuRPdQURLmHkhMweI6NNEhAGwUYwxNmZRxiEn2yo5o/Z1D/ULo0x6J2qMSTHz/UT0LJ1ZpvCIMWav62d2AaC2dRe1r3uobT8gq25XxpitRLTV5XO5IFHbuova1z3UtmfQbZ+KoigOcKXvKpOBdaG/ProA9OY2Sp1UYxPoDH3iSpAV3XISZB+u2g+yjoQMIO89XQM6z52QrX/7E6+BTl7Boz9jsF0Deolcp4ipJ6JUYyHI6i/D5F7PMCZJDh6vlufQaZPs+16LGHe15XtYzEBfZ88ArqVNz5XrCr3dMdAZWFUNssLDfSA7cB9mTb5/zaNi3JwqAx3PbmzlPBOA3K6NBxqYL+fJt69/HHTWBHBt7t3bbgVZqg/n5bIn5ffVfl0x6IRb5Yl6cKnqB78b/1eKoijKZKgTVRRFcYA6UUVRFAe4EhNNGw8NpOWupeI9uIup8PFtYsyXrgSdlg24GHnjHFxYH7RZtbunV+5C641jAZLr5jWKcUcgz7esponMkPza7GKileVyf3fL4UrQCffi65pOYP2K4Amb/fQXyS18Df+IccHUcblX3pgJAkt5ABsiTljmUQLP2dMv6waYAEacgx1YOOf4xgqQfflDz4Ksztcrxrc9+xXQqT1oswlgBsDWy9mmpEeoQsbz4wbt+9QgxumDb6Os9hWbeVko57N3GH1M30qZr0lPsAlT70QVRVEcoE5UURTFAepEFUVRHKBOVFEUxQGuJJZiiQJ6qXmJkNW+ipXPreHcVAlGbzmMCaMNJe+BbF8cShnS5WUyAdWfwsTS/tOyulQ8bbcsPX/gFFNBhyz9PbwakxiF1gRZsU2CJIll1XkYZenlWEW8+AVZ1ix9AL+Tlj+5WoyTj7wBOnlFJgOL600I52T6sExGelcuA52heZjkuPluLGT0R2VHQbbstfvEeM52vEyL92FVrrzHEFz0I2VYHumry+WGlw1hTCRf+Rwm25Zsx0LYZse7IOv4mpyXXptcMpcmpMA3fhknvRNVFEVxgDpRRVEUB6gTVRRFcYA6UUVRFAdklVhi5iYiihFRmohSoz1TxmfAS/xfUSHyHD8IamaVrKB06HPo09+5+W9AZtfBb66/D2S/aJNdQecVok6RX0aVvRP1AXCBXG3LGSLvsNzm8clV2OXw0W3rxbiwCb9qm26yVLoHv4NECVZ7qtgtd4J0fmkd6Ky8XX7nJ3+GFZHcJif7MpPxW4zS1QNq3qWLxXikCu1z6lOY7PtkGSaW/rQDO9eGX5PHix7KzwZvOfsFJjKWVs++KkwG3ReVibt7j90FOuVvYALYd6ARZMceuhpkiRLLNT4HM0sFQZmIZc/4reVzyc7fZIyZ1n7CFxBqW3dR+7rHBW9bfZxXFEVxQLZO1BDRc8y8i5k32SmI3uhD+fn4kafkZNuU2jZXJrTvWNsmUvhoqUxIbn5hlvadz/Zx/lpjTCszVxLR88x8wBjz6lgFY8xmItpMRBSqnj9+AEGxkptta9S2OTKhfcfatiRUo7bNjZzmbrB2ds7dbBvVtY7+28HMW4hoHRG9Op6+fyBDNa/JxAMHsZx/+42y7cGiuhbQ2Z3AXUZ2pbHs+p5XhORfvtbBEtApD0odm8pcrpKrbTMhQ8PLZYLmhTbcMVPQKb9aL+Y5aKAek2h8VT/ICh/H9gmeIbmjI/YhvMsIeGS5Ns90G5dytK8haA+S7sbEEi+Su+NOL8JSgb+96mWQBQjL1z32GiY+5rZLPe9gAnSM5/xH4nKdu8ZLlIjKOfelFW+C3i5LnufX25aCTm0r7mTkEpynI4swmelvlrvQjB+/l+E+6a9Manx7T/pNMHOEmYvO/p+IbiEi3OOn5Iza1l3Uvu6htv2AbO5Eq4hoCzOf1X/MGPOMq2d14aC2dRe1r3uobUeZ1IkaYxqJ6OJpOJcLDrWtu6h93UNt+wHnP7CiKIoyg3Gn7/xIgrxHW4UsM68K9OKWjUd/vmgL6KwJYNA3EzgFsicKsdSetaxdgReD0XVhmTgIePK7N7rPl6aqOfKzdnTbJH5CMkGSKMGsTmYOJiwGOiMgm3sUk0ZHPyOTgh9fth10XmiRCYHhlCvTzVV89QtAlgzIcoHd16AdNxbtAdmmg/eCrPQ9vI8pbLIk99KY1DZ+LFmY93gNmSJ5fVX58br9ZvPtYpyuxh1FoZcPgKzpDy4BmRnG69kslkvZAnY+pteSvE6PnxXVO1FFURQHqBNVFEVxgDpRRVEUB7gSpDLBACVX1QlZMoJvVXVNmxgfSNSAzlXBDpDtT+D2vI6hIpBZW2RUBDG25/PIxb98HhaE5wKTIb/lnKnLpim2RSXegHElhibgRDyAsba2G9G2VWvbxfjtnvmg09srqxGl03kexzMZ4mFpp0w5xptjC+RC7GV1zaDz89gakLW+jfO77qBNb4psCon5Zt79j9+fovlzZQ7ip21XgF7jbrmZoWw/XpT9t2P1q8CVuDEisb8UZEmfjHcme/H6KRiS7zlRcbeZ900oiqLkEepEFUVRHKBOVFEUxQHqRBVFURzgSmIpFfJQz0Uy+B7sxSRGhyXQ/p2hm0Hn0SLsed4zhJWdysJYpmhBpFeMI3YNpmcY6YyHegZlT/NMIS4ornxLfrX9/Rg8jx7BaHngNB6r6WP4t3Z5RC4I39M2F0/W+pXPwEJoPIDzquMKmWz60wUvgs5PTmF1ptL9eHzvUBJknJSLv01BnifkciBtZMKm6RS2+ilqkvOt8kmbhfX3XQSyeF8YZBTBOe4LyznOHehPbHKu46J3ooqiKA5QJ6ooiuIAdaKKoigOUCeqKIriADZm6qP9zNxJRMeJqIKI3Gyn6sbx64wxc6b4mFOG2tY9xtiWyF37unXsmWLfWTV3XXGi7x+ceacxZu1MPX4+o7Z1Fzc/v9p2ds1dfZxXFEVxgDpRRVEUB7jtRDfP8OPnM2pbd3Hz86ttZ/bxBa7GRBVFUWY7+jivKIriAHWiiqIoDnDFiTLzrcx8kJmPMPODLhy/iZnfZeZ3mHnnVB8/31H7uofa1j1mrW2NMZP+ENGtRHSQiI4Q0YOT6HqJ6CgRLSKiABHtJqIV2bxPtj9E1EREFVN5zPP1k4tt1b7u2ldtq7Y9l59JE0vM7CWiQ0S0gYhaiGgHEd1jjNk3jv764lLPtqpa2cckY3PTy5baaHETAJ0QY/m6tpEoyFJ2/Xvi8j2NXeE/ryyVlerso3RscFo6LeVqWyIifyBigiHsG2PFeOVH8MbiqJTB7z5dgmXBkoUgoppiWWYwlsbXDYzI73M6bUuUu30LokETqZH9pOwuj8UFMTE+GI+CTqrfD7JAH5YZtCNZKCeqjWlte/6MtLZ0mWnasXQufsEbCm/zl5TJX9jY15uwjAfQB5gU9opPl2EpvHQQRGQsroKxGiGx5fDJ/h5KDdvP3Wzqia4joiPGmEYiImb+KRFtJKLxLvTaqlo/fe+peiGMZXAm+FlOqoNxrEl5ceg4yB46vBFk3bEIyMw+eUEkKtDwnqj8xlq//j3QcZFcbUvBUCldds3/EDK2cYaJIjlTil8+DDomgbMndvMKkLXdgOfxv2/ZIsYv9ODr3myqF+OW//l9PJC75GTfSE0RffiRTwhZxuB18/giWT/0pr04H7uerwXZ/KdtdiLadEbsuFo6mp41NjUxB/Gm5Ogffw0vFvfI2S/4S8qo/osPCKHVYRIRFTfJ67Tk9SbQSXf3gqznrstBdnoJHj9ZIu1Z0IE3YIHTcnzkse/ggUbJJiZaS0Rj2xm2jMoEzLxpNA7x8OkedFaKLTnZlpl3JhPYsVQZl0ntO9a2I702d+vKeOTsF1JDs3PuTlliyRiz2ZzZr/rZkrLZU4k7HzhrW2PMWn8A77iVc2esbQtKbZ79FEeM9Qu+8Oycu9k8zrcS0dim4vNGZeOxg4gpbaR/7k5hYO2VvqViXBPsB50HjnwKZHaPWNZHdyKiwhPyMTf6Ej6+Nm+QFw4npnXVV662tYVT+Dhf2GRpq2LzyJ+6rAFkw+X4+X9yB4Y4jiYqxbg/iQ4o1WeJcaenLRx6lpzsawzOrTXFqP6xwx8R446X8NF97nZsK0I24RMTtul5flp+V4uexNd1fsXm+NPLOfgFJHwSQxUlb8nDJBpqQCd+dT3IzCe7QZbsRr9j0nKOJ4txXl51214xbn9mfHtn4zF2ENESZl7IzAEi+jQRPTWesjEmu+i5QpSjbZWcUfu6h/qFUSa9EzXGpJj5fiJ6ls4sU3jEGLN3kpcpWaC2dRe1r3uobT8gq26fxpitRLTV5XO5IFHbuova1z3UtmfQbZ+KoigOcKXvvB2n07gQ9oHq58V4a2wN6FQXxUDW9MZ8kM17FRfkJkrkx+trwCB+ZqEMGJuAzSrmfMOyAtyTxnM2b1uW61VUgE5vAyaD1n7pHZD5rSuPiehEQh6vZxi/33CztL8nMe2JpZwwxJTKyPuK6wux5/k/vXeVGKfqMPHj++5BkJ34/UtAVnYAQ4XFB+UixaP3REFn7ZxjIHsPJPmHtZ97ydEh0MmUymSQ/yDmq078Piab7qg5ArId/jqQNbfIXvcly3pA5ytVL1iOg0nvs+idqKIoigPUiSqKojhAnaiiKIoDXImJZogpbmQBhmXBNtDbFZfxih/vuRp0Akdwz331TowjBfdh3GT4+nox7l6PsasrFjSLcU/AZjNvnuMdQntwgYz/Dly9EHQSd/aB7IfztoPsn/sx/vRvRy8T4/jhEtApbbPEbm0KPeQTYW+CVpfIefqL05eAnvewjP8uemgb6JhLV4Ks5nWM//Uuw/l9ul4WmFl4xQnQ2X5gMcjyHU4TBfrknPAebAa9dK/cF9/+NfQLdy3BefpyG26U7+rETTiRUpkHeWLNI6Dze41yk0/ryE9B5yx6J6ooiuIAdaKKoigOUCeqKIriAHWiiqIoDnAlsZQyHupJywWzN4QaQe+XvReLcXXFadAZfh0XhAeffgtk8ZuxIGv3Grm4+w/XPws614TkAt191mqseYjxyM/lOYZJOyopFsOOy/Cr3tTwJsgOJbHm41/suxdkg90yuVKxH08hekgmUrzx/N7IUOSN081Fcvv3/z1+G+gFLcWC4nesA524TTnI8jc6QNZ7N85vTsvkS30RVic67K0CWb7jHTEUbbRkFz24AcO7ZJEY/8WXfww6/3jqGpDVFuG16/XgnLtjntyWsHVwOeg0/apejEdi2HXjLHonqiiK4gB1ooqiKA5QJ6ooiuKArGKizNxERDEiShNRarTcvzIFqG3dRe3rHmrbM+SSWLrJGGPTrhBhIvKQDOgeTJaD3vaTcsfS6QOoU92BgeHMDZeCrH8BBn5LL+0U48uDTaDz+rBskTGQ6QOdaSBr23qSGQq1ydYf6S5MPAx94koxTjVge4MbIlihaOsA7rRJJjFJEm6UO9LK92BQ3zMkd3+xTbWpaSIr+/alwvTzXrkT68ibWAWo9pBMjgxXoH0qXmkBWfd12EYk2IkPg8M1smrWez24Y+yyxdjYczpbfY4h+7k7kqLgMUvFJJu2NQe+Irs+bx/ENjZvn8BKbskhbFMdjuK8Xxc+Ksb3PfNF0ClvteysmmC3nT7OK4qiOCBbJ2qI6Dlm3sXMm9w8oQsQta27qH3dQ21L2T/OX2uMaWXmSiJ6npkPGGNeHaswasRNRETlc8dfU6UAOdk26MdCH8qETGjfsbYtqsbC0sqE5DZ3fcV2x5jxZHUnaoxpHf23g4i2EBGsLh7bv7uoFGMTij252jbg0ws9Fyaz71jbhkqx84EyPjnPXS9WrJoNTHonyswRIvIYY2Kj/7+FiL4x0WuGMwF6d0gGfvuSePHH9spEUlEL7l4ofs8mabK4FGR9y/A8fq9upxh7GBMbSSOTAhjmdo9zsS2NJIiOyNJovjoMsp9cJ/8+3rIUtxR1prFM2D8cXg+yRD86l8qDMvnhicXxXH2YcJlOcrVvfyJIzzfLiVTYhHoF3fKz9ixHO6bn4BPDx//4RZD982MbQHbX1bvEOJHBy7QvcX4d0jnN3UyGOCZ3xJlqbFtz01q5a+xf99kk/a19Rogo2IxPwJevOASyf++Wvr7sHbyXHKqSvsjmK3ifbB7nq4hoCzOf1X/MGPNMFq9TJkdt6y5qX/dQ246STd/5RiK6eDI9JXfUtu6i9nUPte0H6BInRVEUB6gTVRRFcYArpfACnhTVB+Umhv/3yztAr2KPDA4Hu7G/UWwF7mLquQgTFsvXY6m9PyhtEuOHu1eATqFXJgnyuzM6kTGGTEJunxhYjTtavIvkrqa/nvs66Pxuy/Ugi78bBVmpTaW90EnsF2QlVSqTicaX53+zB72UfksmLcubcKtK/+KIGNd8fxfodPzmZSB74jjutCtsxQRJpT8mxoeHKkEn4pt5vcDsOHovJpY2z/0HMX4gtRF09nVUg2xoISY37yjfDbIHf3mPGJfa5D8t+WYyEziGPJ/ViqIo+Y06UUVRFAeoE1UURXGAKzHRjuEi+tu9NwpZKoyxn9LXZc/pgUuwyk0qiMGIoQaMBz1c9yTqWVbIFtg0Pl8ckC0bCji/m6Oz10OeErm4u/0aDOr87WWyT/aeRBp0Xju+CGTWWBARUVELvtbf3ifG6Wgh6CQLpf2tbU3yDU+KKHxSzlNPEjdopP3yc3mKcTvjUA1+1vDTGN8f2tgPsobgSTG2i4nGUjNvd5Up8FOiQcbvL73pIOjN88m5tLzoJOi0DeBmhivmngDZtw/fArKSw/LeMV6G58qWKW+ztv999E5UURTFAepEFUVRHKBOVFEUxQHqRBVFURzgSmJpeWEn/Wr9ZiG75/DdoHf6Sll9aKgSfXq6AAP0VTV9IIsZLL/31KAMyFf7sIWFn1NizNNaxyl3UtEgdd61VMiCy/tAL+IZEeP/c+Iu0Bmxqc4UGkZ7G5s/tcYrhakSm0QH53ciyUraTzRYK8+5ax1m2iq3WV7X3QM6IzU2Scx/6gTZ5776CsgeP3WFGIdnycL6RImHTnxUVp+6K3oU9P6xX163y4O42yNehdf7vaVvgOy/vfBVkIWnuEuN3okqiqI4QJ2ooiiKA9SJKoqiOECdqKIoigPYmKlPpDBzJ51pg11BRFn1pD5H3Dh+nTFmzuRq5we1rXuMsS2Ru/Z169gzxb6zau664kTfPzjzTmOMTYOUmXH8fEZt6y5ufn617eyau/o4ryiK4gB1ooqiKA5w24lunlwlr4+fz6ht3cXNz6+2ndnHF7gaE1UURZnt6OO8oiiKA9SJKoqiOCArJ8rMtzLzQWY+wswPTrV+rjBzEzO/y8zvMPPOqT7+dHIutlL7Zo/OXfdQ245ijJnwh4i8RHSUiBYRUYCIdhPRiqnSP5cfImoiooqpPOb5+DkXW6l93bOv2lZtey4/kyaWmHk9Ef2ZMeYjo+M/GXW+3xpP31cQ3haIyMYldj1KOGPpZzOE5cNMEmWZaARkSWxzQ6ujsvRYRzoAOiWeYTFubUlTT09mWmq45WpbIqKAL2xCgaiQ2fbEtpSh42QKdVLYO4kCWGIsXokVE0MhWWpveAhL4QW75PGHE32USA1NW328XO3rL4iYgnCpRYqnm7FMo3QIJzcnbUoKFqBeKIBl7iJeKesaxP5VXhszDne2dJlp2rF0Ln7B7wtvCxZE5S9s/I/xywfkZLmN84jjQ7QnjPM56EP/Ue2XJTEHMkHQ6eiVPZySfT2UHhy0nbvZ1BOtJaKxHeVaiOjKifQDkTJa9dE/EEJr4yciosCAFEZ2Y93AVCvKhm5eB7LWm/H4b338h2L8N711oHNH4V4x/tjtbu5GA3K1LYUCUbpq2ZeEzPix5qW1KZyvvRd0Mj0o4wVzQXbgfuzktWZVkxjv2V0POsv+Xk7WNw79GHRcJif7FoRL6ZIbvyJkxovXTaxW2vv0KrxQAx14aSXrRkC2qg7n91Wlx8T4R29eDzqlu/D4u3/wteMgdI+c/UKwIErrLvmyEHqG8Y/7cK28SWq/F+3mORwGWehinM/LK06B7MHaX4rxK4PLQOdvn7xNjJu//1egc5YpK8rMzJuIaBMRlabig1N1WIWEbSnoxy6Hyrkz1rYFoej5PZlZyFi/kEzNTr+QTWKplYjGlqCfNyoTGGM2mzP7VT/rC+LjtmJLTrY1xqwN+PAvsDIuk9p3rG19BTpvcyBnv+D3zU77ZnMnuoOIljDzQjpjpE8T0Wcm0Qes8U8iolBzTIzt4p98+Up83Sm8va+ox79ynziyQYyrQzHQebHrIjFuTjwOOi6Sq21tyQTwcd7XJ2O9ZgDt4ymzxv+IGn8DQ2prVmELh+Z+eUc8F7tcQJjBNnbrLo7tG4/ifcZwtZzL4SabOPLKYZB9bvVbIPut0jdBdiAhv5ef7fwQ6HjxEphupsQvxGvwpuD0QumWktjVh/yY3qC+dkyMHAugT9lRVi/GP9h/HegYq2ecYO5OeidqjEkR0f1E9CwR7Seix40xeyfRV7IgV9squaH2dQ/1Cx+QVUzUGLOViLa6fC4XJGpbd1H7uofa9gy6Y0lRFMUB6kQVRVEc4ErfeSIia/t237DNguQRS5gkjYtJva24bvPwX9SALJLEj3KoS/avLpkbB52mny0W40SvTf/0fMOykN5urV163yEx9s2rBZ3uG+aB7PJb9oHs85Wvg+z+9nvEOLEIk1vEcpF45qiNTp7TuwLnbbpYztNEDepsXP4uyIZsNnvsS5SD7Gs//i0xrn0Xk4LHb5+BqzSMIU9C2s6TQtslLPkh7wDe6yWjNgvPfdhQ/q+WYaL4N3d9QYzj/XjNR05bNqvYvN1Z9E5UURTFAepEFUVRHKBOVFEUxQGuxETZEHkTMtaR8dsUcmhqluO1y0EnPgfjSIVFuGi+vwd3Q3zi4rfF+D+fw629Dc/LIiXH+/N7OZvxMCyu953qQ71iGVgaWoV74jP3dINs84LnQPaH7bh3u/AVae/BWoxtBbvkQme7+FdewUzpgsnvKzwD0v71a3D/+8eib4PswAjG8r/80udBtvRb28Q4cesVoGMTXs1/mMn4pH17l+IHSUXkPCnoxu8k04+yxR86BrLvtm8AWTgodyqY/VjgJXzSUhwJ1+x/8Lvxf6UoiqJMhjpRRVEUB6gTVRRFcYA6UUVRFAe4k1jKGEgsRY5gwVSzZKEYe399CHRiX7gEZX240Pi1D38XZB9+QxaArX4TV8z2XSwXO6dPurf/YCrgDC5YznRigigzNCTGnRdjpaF/WfEvIBsxaKMXn7kUZOkG+f1Wb8ekkfU8yaaSVz6RDhD1L5D3FcaLC7jnrzwpxr1DIdBpSlaA7D9OXgKyFf/rBMgyl6wQ49brcU76sEhU3pPxe2ioRtpqBAuJUaRNJqEH5uG8KV2Nm3A21WIpse+dwApYPa1RMa4+gMcfqpTzIDPBPhG9E1UURXGAOlFFURQHqBNVFEVxQFYBQGZuIqIYEaWJKDVa7l+ZAtS27qL2dQ+17RlyyaLcZIzJqhUmpw0V9Mi2r+n9h/HNF9VLgU27ipKP4W6Q5RHsF/ClI78BstCrRWJc0IvR+KBlF40ngYmEaSBr25IxxAnLriqbNseeJXL319Lb0f4Nfgyo3/TOF0AWiOFus9KdMmkUOoUVsjhlseUk7bldJCv7ZgoMDTRI2/r6MKNQ4JU6/TGctztjC0GWfqgSZAPX4o6dnmXyPZNluIuusDFvEqBZz910kKjnIvnZrDuDiLByVvlunH8vf/ZRkL0wHAVZwiYjFGyTtjMevOY9FpNP1NlGH+cVRVEckK0TNUT0HDPvGm2Bqkwdalt3Ufu6h9qWsn+cv9YY08rMlUT0PDMfMMa8OlZB9EYv0N7oOZCbbf3Y0VCZkAntO9a23rLoeTrFGUtOc9dXbLModBaQ1Z2oMaZ19N8OItpCROtsdN7v3+33z87+0m6Qq20D3hlY0fw8Mpl9x9rWW6jzNhdynbu+8Oy076R3oswcISKPMSY2+v9biOgbE75mJEn+o+1CZqowqJ45JcvQNT64BnQSjbiD5vZrsfXC3z2PJa9qm2V0OF2AQeZkuZRlfNMXJj4X21I6TdwjE2umogzUTl4r/+p/a+6ToNNoU/VvaBvutCnox+B/5PiAGI9UonMPtvSLMU/zjqVc7ctpJn+PnA8N64+DXutp+aRVFh0AnZdONICsosSmP71NX/t0WNrJrj1GpO28JEDf51zmbiZgaKhOTrpgD16TmZD8bPc/iHM37MGE3P/auxFk/e1FIJv3nvQpsVo8h8hJSxuTCUrhZfM4X0VEW/hMXx8fET1mjHkmi9cpk6O2dRe1r3uobUeZ1IkaYxqJ6OJpOJcLDrWtu6h93UNt+wG6xElRFMUB6kQVRVEc4M62B5+PMhUyseGJYe9s9kgf7k3gvoBv3fTvIHui43KQ+W16riQtuY6ATf8k74ilv/T521WTHR4PUViWE+u6thrU/LfJpN2t4RHQWfYaLu3z2/TXDnXbJDEsu5ECvXh8HrTsEMuc32TIZBi/oWSN3Gn39bqnQe9wQtr76U5MiO7bvhRk4WYsBzlUGUW9VjknU2G8LrwjEzRCz1M8CaZIk3Q5sXq83jg50f6gM9xkk0SyI9SKLi5WK8f+QTyHoiMyWeiZwN56J6ooiuIAdaKKoigOUCeqKIriAFdioslCL3VcI2Oic3bhWw3XyKClD8OmtLVnNch2HaoHWchm4XioWwpTIVxUmyiSMuOdPB5zXvF4KBMOClHPSlT79tJfivHehE0Fqzds+m2fwrhldEc7yBLz5AL/QCvG+zJFlh0q3gl6LOQBPl+aKubEhGxVAGO91wQ7xPi5boyXGZvbk541UZDZxTuTlq+lsBm/E28yz2P3NnjjhsoOyGsyGUZD9dfJefJ0F66kOvFuDciCdTGUddm0rbF8XdGjWIEsE7T4Kx7fL+idqKIoigPUiSqKojhAnaiiKIoD1IkqiqI4YNp6DDRvwDqYxcdlwHy4EoPAb7yCWZPiUxjkLduPZVasrT6SEayiY00A5Hu43jCRsVSjysxJgF61V1Z6eqj5TtBJYbt0Cp9CO6YqsT6s/6SlkpTfZirle5LOQqF/hK6paRSyr7ffCHrFlqbvjT9YBjpzYpjpHC7DxFrfpfjdBY/LCkW+uM2szPeJaoOnb4hC//GWkEXWXAR6/XUyaXnkUdy4cOVnD4DsjV/bbHBA81K4U2aWvIM45zOB7JOgeieqKIriAHWiiqIoDlAnqiiK4gB1ooqiKA5g40LVImbuJKLjRFRBRNn1Uz833Dh+nTFmzhQfc8pQ27rHGNsSuWtft449U+w7q+auK070/YMz7zTGrJ2px89n1Lbu4ubnV9vOrrmrj/OKoigOUCeqKIriALed6OYZfvx8Rm3rLm5+frXtzD6+wNWYqKIoymxHH+cVRVEc4IoTZeZbmfkgMx9h5gddOH4TM7/LzO8w886pPn6+o/Z1D7Wte8xW20754zwze4noEBFtIKIWItpBRPcYY/ZN4Xs0EdFaY4yba83yErWve6ht3WM229aNO9F1RHTEGNNojEkQ0U+JKLv+pko2qH3dQ23rHrPWtm440Voiah4zbhmVTSWGiJ5j5l3MjM3TZzdqX/dQ27rHrLXttNUTnWKuNca0MnMlET3PzAeMMa+e75OaRah93UNt6x7nxbZu3Im2EtH8MeN5o7IpwxjTOvpvBxFtoTOPChcKal/3UNu6x6y1rRtOdAcRLWHmhcwcIKJPE9FTU3VwZo4wc9HZ/xPRLUT03lQdfwag9nUPta17zFrbTvnjvDEmxcz3E9GzROQlokeMMXun8C2qiGgLn+kD7SOix4wxz0zh8fMata97qG3dYzbbVncsKYqiOEB3LCmKojhAnaiiKIoD1IkqiqI4QJ2ooiiKA9SJKoqiOECdqKIoigPUiSqKojhAnaiiKIoD/j/d5+ODeoc5jwAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The fourth layer: pool layer\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAVoAAAD4CAYAAACt8i4nAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAeqklEQVR4nO3de5RV5Znn8e9TRV2gKEBuiliCRkyCSUeNQdHuWZW4Wo3tDN2JdrAzjunoYtSY4EQzy2QyTJa9Zk2cmdZ21G6aqN3G2NGJGptR0o6JpkeTgBBEHUAJXrl4QcACiirq9swftWXgeIr3PbjfOpf6fdaqtTZnP/W++/zY9dSpffY+29wdERFJp67cGyAiUuvUaEVEElOjFRFJTI1WRCQxNVoRkcRGlWviRmvyZlqCdc0ft2DNsQ2dUXNueH5MVF1IN530+L7whpVRbL6VqNLzHTW6xRvGTQzWDbQMBGs+2bo9as4Xdk4J1jRt7w/WdO97j56+vRWbbUNTizeNOSJYV/fe3mBN71Fx+3/DWxH9w8KRdXsnPd5dtLBsjbaZFk63s4N1s+5pCtbcNn1F1JznHn1yVF3ICv9FLuOkFJtvJar0fBvGTeSEL38zWNc5J9wMnmn/+6g5j3/g3wZrTvy7PcGa5S8uiZqvXJrGHMHJn10YrBv98DPBms1fPTNqzmP+y6+DNdYU7kPL9/1syHVRhw7M7Dwze8nMNprZ9UXWN5nZ/dn6FWY2M2ZcUbapKd90lG28YKM1s3rgduDzwGzgYjObXVB2GbDT3U8AbgZuzHtDa5GyTUv5pqNsSxPzinYOsNHdX3H3HuA+YF5BzTzg7mz5AeBss4iDGqJs01K+6SjbEsQ02unApgP+vTl7rGiNu/cBHcCkwoHMbIGZrTKzVb3sO7wtri25ZQvKt4gk+25/V9ybrzUuSbZ9+2oz22E9vcvdl7j7ae5+WgPhg8tSGuWbzoHZ1o+uzrM5KtWB2Y5qqs1sYxrtFqDtgH8fkz1WtMbMRgHjgbjzVkY2ZZuW8k1H2ZYgptGuBGaZ2XFm1gjMB5YW1CwFLs2WLwSecH0sWAxlm5byTUfZliB4Hq2795nZ1cBjQD1wl7uvNbMbgFXuvhS4E7jHzDYCOxgMXQKUbVrKNx1lW5qoCxbcfRmwrOCxRQcsdwMXlTTzmGbsYycFyx5dHb6a63efqd43fpJkK/ulyNdHwb6J4Rdmd53x98GaM751RdScs+5dHqz5wvp3gjUvfXF31HwxUmRrDnW94Wx3/PncYE337K5Spj6kPf9Y+D7fBw1c3TDkOn3WgYhIYmq0IiKJqdGKiCSmRisikpgarYhIYmq0IiKJqdGKiCSmRisikljZ7rDg0wfovjH8ST0tT4wfhq0RiedNA/R9pDtYd9nyrwRrjo+4EAHg6OWtwZr//tzvBWve7toYNV+5WMdemh5dGazrmX9GsOaES56NmvOta8J3Yri87dFgzebGoe+ooVe0IiKJqdGKiCSmRisikpgarYhIYmq0IiKJqdGKiCQWc7vxNjN70szWmdlaM1tYpKbdzDrMbE32tajYWHIwZZuW8k1H2ZYm5jzaPuBad19tZq3Ab83scXdfV1D3lLtfkP8m1jRlm5byTUfZliD4itbd33T31dnybmA9H7ytsBwGZZuW8k1H2ZampCvDzGwmcAqwosjquWb2HLAVuM7d1xb5/gXAAoD6iRN443dHBuec9f1fl7KJVevDZpuNsT/fZsK3ABpJ8tx3xx7VwtmzXgzO+dqc8K1Udn8pfIUTwC/X9wZrxq4P32Le9ub/tkye2Y6acASv/fvwbWrquy1YE76WbtAtX18crPnaHeFbDm3b/tKQ66JTN7OxwIPANe6+q2D1amCGu38KuBV4uNgYB96/vX7s2Nipa14e2cLB+TYQ/qEbKfLed5uPULbvy70vtLQk3d5yiWq0ZtbAYJj3uvtDhevdfZe778mWlwENZjY51y2tUco2LeWbjrKNF3PWgTF42+D17n7TEDVHZXWY2Zxs3O15bmgtUrZpKd90lG1pYo7RngVcArxgZmuyx74DHAvg7ouBC4ErzawP6ALmu3v4nsGibNNSvuko2xIEG627Pw0c8sizu98G3JbXRo0UyjYt5ZuOsi2NrgwTEUlMjVZEJDE1WhGRxMp2K5umNzqZ9bVi5zeXzhoao+q8tyeX+WrJY1vXBGvOPfrk5NtRTdyNvoH6XMbaOzXutc74Z/M5d9cGchkmmYbdzvR/Dl+c0fC/VwVr6j718ag520evCdaM3RJ+D6/uEJutV7QiIomp0YqIJKZGKyKSmBqtiEhiarQiIomp0YqIJKZGKyKSmBqtiEhiarQiIolZuT61zMy2Aa8XPDwZeDfhtHmNP8Pdp+QwTjJF8q2WbKHC8y3Dvqtsq2PfHTLbsjXaYsxslbufVq3jVzJlm1bK569sq3/f1aEDEZHE1GhFRBKrtEa7pMrHr2TKNq2Uz1/ZVvf4lXWMVkSkFlXaK1oRkZpTlkZrZueZ2UtmttHMri+yvsnM7s/WrzCzmZHjtpnZk2a2zszWmtnCIjXtZtZhZmuyr0U5PKWKkSrb7HtHdL7KNq2a7gvuPqxfQD3wMnA80Ag8B8wuqLkKWJwtzwfujxx7GnBqttwKbCgydjvwyHA/72rPdqTnq2yrN99KyHbYj9Ga2Vzgew00ntNMy7DOzZjmYEnvtPAwve+8R9+uvYe81XI5vJ+tu5/baE0ele/Y0eFx++Luf9IzIXxnpJOmbgvWvLapl3d39FdUvoeVbY56poXnO3ZS+Jz7t7f00rGjr6KyhQP6gjWdM7quNVjf3xr+WT6+7a2oubf1jw3WdG4K59/dvZOens6i2ZbjnmHTgU3NtHC6nT2sE9tJnwjWbP1uf7Dm5W/ekcfmpDAd2ATQTAun158T/IaBUz4ZrGnYtidq8te/ODVY88zVfx2smXPupqj5htnB2Q7zvrvp8jODNX/1lR8Ea74x79U8NieF6cCm0XWtzB03L1i8+3MfC9b8+K/+MmrixTvmBmuWX/OZYM2qVbcPuS7qGG3KY1MjnbJNS/mmo2zjBRutmdUDtwOfB2YDF5vZ7IKyy4Cd7n4CcDNw4yGG3AK0Hd7m1hZlm1bO+SrbA2jfLU3MK9o5wEZ3f8Xde4D7gMLX9vOAu7PlB4CzzWyo40ArgVmHs7E1KEm2ZnZckq2tPnnmq2wPpr5QgphGu//YVGZz9ljRGnfvAzqASYUDmdkCYDnQ38u+w9neWpNbtpmvAv3AeuUL5Juvsj1Ykr7Q411JNrbchvU8Wndf4u6nufusBpqGc+oRIct3lrs3K998Kdt0DuwLjRY+C6YaxTTawmMnx2SPFa0xs1HAeGB7HhtY45RtWso3HWVbgphGu//YlJk1Mnii8NKCmqXApdnyhcATPtwn6FYnZZuW8k1H2ZYgeB6tu/eZ2dXAYwxevXGXu681sxuAVe6+FLgTuMfMNgI7GAy94viq/xusmfbH4XHe8O4PvzEkztYMq68PlnUdGf4zuPOY8MnhAEc+Ez52ee7RJwdrNng+L3qqYd/t/+ypUXV3ffXWYM3KruODNd2+OWq+kLT7bvj13xnffSZYc8yo8IUIAMv/Xfgc2breiIt2DvE7JOqCBXdfBiwreGzRAcvdwEUxY8nBlG1ayjcdZRtPn94lIpKYGq2ISGJqtCIiianRiogkpkYrIpKYGq2ISGJqtCIiiZXjg79z95GVcSfUv/yZfC40qAb72kbz0rdPDtZNWh3+XTvpjt9EzTlqRvhT7jZfE/4A6957l0fNVwsW/O2DUXVnNIcvPrlh7keDNR3vPh81X7n0HtHMO18If6j3T4/8H8GaP/ja1VFztrwX/mD7t88aH6zp2zD0z5Je0YqIJKZGKyKSmBqtiEhiarQiIomp0YqIJKZGKyKSWMxdcNvM7EkzW2dma81sYZGadjPrMLM12deiYmPJwZRtWso3HWVbmpjzaPuAa919tZm1Ar81s8fdfV1B3VPufkH+m1jTlG1ayjcdZVuC4Ctad3/T3Vdny7uB9XzwbpdyGJRtWso3HWVbmpKuDDOzmcApwIoiq+ea2XPAVuA6d19b5PsXAAsAmhkTNefAH5wSrPn02J9FjfUyR0XVlcOHzTYbY3++TaMnMOOR8O2ZmpaFr/qyU04K1gD0PVt0sw6y+7jwz+JAgpvMlmPf3fzt8FVwMxtWRo116l98I1gz5e3w/+XgXb/zlWe2Y44cy4lfeTE45492ha9CHPPTYpvzQe9eNjdYMxC+MI9D/bRFN1ozGws8CFzj7rsKVq8GZrj7HjM7H3gYmPWBDXFfAiwBGGcTR+RN2orJI1s4ON/WCcco34z23XTyznbix6fUZLZRZx2YWQODYd7r7g8Vrnf3Xe6+J1teBjSY2eRct7RGKdu0lG86yjZezFkHxuDdLNe7+01D1ByV1WFmc7JxR+T920uhbNNSvuko29LEHDo4C7gEeMHM1mSPfQc4FsDdFzN4z/YrzawP6ALmj9T7t5dI2aalfNNRtiUINlp3fxqwQM1twG15bdRIoWzTUr7pKNvS6MowEZHE1GhFRBJToxURSazib2VT99SzwZrLxr8VNdb/rOALFvLmdUbf6PDv0ZhrA2z9y1Fzvhlxm5qpzwwEa7Z1Rk1X8Rb+m4eDNX/79mejxpryN3G3E6p2nbubWfl0+FY2O388LViz9VsTouZs3h5+f27K8+HbYL3eNfQ4ekUrIpKYGq2ISGJqtCIiianRiogkpkYrIpKYGq2ISGJqtCIiianRiogkpkYrIpKYletTy8xsG/B6wcOTgXcTTpvX+DPcfUoO4yRTJN9qyRYqPN8y7LvKtjr23SGzLVujLcbMVrn7adU6fiVTtmmlfP7Ktvr3XR06EBFJTI1WRCSxSmu0S6p8/EqmbNNK+fyVbXWPX1nHaEVEalGlvaIVEak5arQiIomVpdGa2Xlm9pKZbTSz64usbzKz+7P1K8xsZuS4bWb2pJmtM7O1ZrawSE27mXWY2Zrsa1EOT6lipMo2+94Rna+yTaum+4K7D+sXUA+8DBwPNALPAbMLaq4CFmfL84H7I8eeBpyaLbcCG4qM3Q48MtzPu9qzHen5KtvqzbcSsh32N8PMbC7wvfrW0ec0TJ0QrB8YOOSt4wGoq4t7Dg0vh+/7E6ObTnp8X3jDhtn72br7uY11zT66vjX8TfX1wRJvCNcAHDlzR7DmrZ7xwZrutzro7eiqqHwPytaavJmWcm/SYan0fbeBxnPyynZgQtw49Z09wZp9UxqDNb3v7aC/s7NotlE3ZzSz84BbGPytc4e7f79gfRPwQ+DTwHbgS+7+2hDDTQc2NUydwMz/tiA49759DcGa5ubeYA3A9C+sjaoLWeG/yGUcSJMtwOj6VuZO+EJ4A44IN77e6RPC4wDX3vkPwZobXz0vWLP6qh9FzRcjx3z3Z9tMC6fb2blt43Cq9H03z2z3fu70qLpxz2wK1my8ckawZtPtNw+5LniM1szqgduBzwOzgYvNbHZB2WXATnc/AbgZuDG4VaJsE1O+6Sjb0sS8GTYH2Ojur7h7D3AfMK+gZh5wd7b8AHC2mQ3158kWoO1wNrYGKdu08sxX2R5M+24JYhrt/j+ZMpuzx4rWuHsf0AFMGmK8lcCs0jazZiXJ1syOy3k7q1We+Srbg6kvlGBYT+8yswXAcqC/b9fe4Zx6pPgq0A+s7xnI540/2W9/tr3sK/e21JQD+0KtZhvTaAtf0h+TPVa0xsxGAeMZPPh9EHdf4u6nufusUePGHN4W15bcsoX9+c5y9+bGuuYEm1t18t53Z7l7cwNNiTa3qiTpC7WabUyj3f8nk5k1Mnj+2tKCmqXApdnyhcATPtznjVUnZZuW8k1H2ZYgeHqXu/eZ2dXAYwyexnGXu681sxuAVe6+FLgTuMfMNgI7GAxdApRtWso3HWVbmqjzaN19GbCs4LFFByx3AxeVMrG70d8ffkF96rHhc9w6fr/oX9KHpX7SxGCNvRd3An+MFNkOfiMwEH7xYN3hk7Uf//HfRU35i65wLlu2R5y321cF+UrFZ7v5D+NePE8ZFz5H9qb54Z+Bb/546Lvh6ENlREQSU6MVEUlMjVZEJDE1WhGRxNRoRUQSU6MVEUlMjVZEJDE1WhGRxKIuWEhhUnMn//pjK4N1v/zGmcGa+uKX/h+WMQ+Hf/fUXVZxH1D/QWYwKvzf++K14U+me74n7gNqLn/i68GaE+8Mf2jIO29HTScj2LErwndP2LQ0rr2d8rVngzXfXfvHwZot3XcOuU6vaEVEElOjFRFJTI1WRCQxNVoRkcTUaEVEElOjFRFJLOZ2421m9qSZrTOztWa2sEhNu5l1mNma7GtRsbHkYMo2LeWbjrItTcyJZn3Ate6+2sxagd+a2ePuvq6g7il3vyD/TaxpyjYt5ZuOsi1B8BWtu7/p7quz5d3Aej54W2E5DMo2LeWbjrItTUlXhpnZTOAUYEWR1XPN7DlgK3Cdu68t8v0LgAUAo8YfwT/85HPBOdt++etSNvGQHtu6Jlhz7vrwL99+z//KsA+bbTbG/nyb68dio8K3hFn3p7cGa366Z1qwBqDx7YjdaXn4akC8K2q+UuS57zajOzgfqBzZfnnyb4I1r/7qo1FjbfnDCcGaqfNeDM/nQ19BGd1ozWws8CBwjbvvKli9Gpjh7nvM7HzgYWBW4RjuvgRYAtB8dNuIvBtmMXlkCwfnO75xqvLN5L3vjrOJyjajbONEnXVgZg0Mhnmvuz9UuN7dd7n7nmx5GdBgZpNz3dIapWzTUr7pKNt4MWcdGIO3DV7v7jcNUXNUVoeZzcnGze+TXmqUsk1L+aajbEsTc+jgLOAS4AUzW5M99h3gWAB3XwxcCFxpZn1AFzDf3WvyT4CcKdu0lG86yrYEwUbr7k8Dh3z3x91vA27La6NGCmWblvJNR9mWRleGiYgkpkYrIpKYGq2ISGJlu5XNzMnv8IM/Dx++ueEvTs1tzvU9e4M1m98bH6zp7Q9fCFBu3Uc3su4/HROs+013U7Dm209cFDXn7MVvBGt80sRgjb1X+flKGr0faWbrX84O1rWPXhOs2fA3/xQ15w//478M1rTwZtRYQ9ErWhGRxNRoRUQSU6MVEUlMjVZEJDE1WhGRxNRoRUQSU6MVEUlMjVZEJDE1WhGRxKxcn1pmZtuA1wsengy8m3DavMaf4e5TchgnmSL5Vku2UOH5lmHfVbbVse8OmW3ZGm0xZrbK3U+r1vErmbJNK+XzV7bVv+/q0IGISGJqtCIiiVVao11S5eNXMmWbVsrnr2yre/zKOkYrIlKLKu0VrYhIzVGjFRFJrCyN1szOM7OXzGyjmV1fZH2Tmd2frV9hZjMjx20zsyfNbJ2ZrTWzhUVq2s2sw8zWZF+LcnhKFSNVttn3juh8lW1aNd0X3H1Yv4B64GXgeKAReA6YXVBzFbA4W54P3B859jTg1Gy5FdhQZOx24JHhft7Vnu1Iz1fZVm++lZDtsL8ZZmZzge81WNM5o+tag/Xe35/b3Cf+XvieYXt9IFjz5uY+du4YOOQ97cvh/Wzd/dxGa/JmWnIZd2DCmKi6vknh7Op2he8H1rN7B31dnRWV7+FkG5XblL6o+duadwZrNm2eGqzZ17mD3n2VlS2U3he6j24OD1of19uOa90WrOnoD/9fdmzdy96d+4pmG3VzRjM7D7iFwd86d7j79wvWNwE/BD4NbAe+5O6vDTHcdGDT6LpW5o6bF5y7/72OmE2M8thja4I1z/d0B2v+7IK3c9iaQSmyBWimhdPt7Fy2sat9TlTdu5eEf5E1/zz8Q7ThgZuj5ouRY74lZ9v12XBuA1fEXfl5y0fvC9Ys/NbXgzXP//yWqPlilLMvvPitjwdrvDXul9gP2sNndz3acXKw5u4/e2LIdcFjtGZWD9wOfB6YDVxsZoW3qbwM2OnuJwA3AzcGt0qUbWLKNx1lW5qYN8PmABvd/RV37wHuAwp/5cwD7s6WHwDONrOh/jzZArQdzsbWIGWbVp75KtuDad8tQUyj3f8nU2Zz9ljRGnfvAzqASYUDmdkC4Fagvce7Dmd7a01u2WY+BbSb2fO97Mt5U6tSnvkq24OpL5RgWE/vcvclPvgpOX/SaKOHc+oRwd0XA38CNDfQVO7NqSnKNp2R0BdiGm3hS/pjsseK1pjZKGA8gwe/i3L3ZaVtZs1Kkq27n5jzdlarXPNVtgdRXyhBTKNdCcwys+PMrJHB89eWFtQsBS7Nli8EnvDhPm+sOinbtJRvOsq2BMHTu9y9z8yuBh5j8DSOu9x9rZndAKxy96XAncA9ZrYR2MFg6BKgbNNSvuko29JEnUebvaRfVvDYogOWu4GLSpvaYSCfX26/u+30qLoLNkwL1mzvCp+Y/Hr33cGaWGmyzdf/uj3u3Mt/3BN+03jJfV8M1tTFnf4YJUm+LaPhE58Mlm35F+E/GL957MqoKX++56RgzdjXOoM1dfvyuwAoRbYDY5roPu2EYF3rqxF/jLeH8wDoHAgfc3/w0bOCNTvfe2bIdfpQGRGRxNRoRUQSU6MVEUlMjVZEJDE1WhGRxNRoRUQSU6MVEUlMjVZEJLGoCxZS8P4B+nftymWsI2aEP30e4KbjHgjWLJx9TrBm1N7eqPmqweYHwyfC9/qvosa69cbwuekTHxr6pO731fXHnWheLj7K6D4y/OEnHr6ZBGt2Hxs155bzGoM1XWeFL7bxDZX92mqgwdh7VEOwLibbp067K2rOs565PFgz7dfhq2je6hz6AqzKTl1EpAao0YqIJKZGKyKSmBqtiEhiarQiIomp0YqIJBZzu/E2M3vSzNaZ2VozW1ikpt3MOsxsTfa1qNhYcjBlm5byTUfZlibmPNo+4Fp3X21mrcBvzexxd19XUPeUu1+Q/ybWNGWblvJNR9mWIPiK1t3fdPfV2fJuYD0fvK2wHAZlm5byTUfZlqakK8PMbCZwCrCiyOq5ZvYcsBW4zt3XFvn+BcACgGbCV7EAdF4Yvk3Nn858Imqsi//zdcGayZ2/Cda4D0TNV4oPm202xv/Pt34so6YeGZx37dx7gzX/p7slWAMw5VfvBGv6B/K7lUop8tx3Jx7dxJf/6yPBOVfuOi5Ys/wnnwrWALRNfitY0/xOV7DG+vK/N2Ke2Y4afwS7jgu/dXT5l/4pWDO+Lu7W5QOrxwdr6vp6wgMdItroRmtmY4EHgWvcvfDa2dXADHffY2bnAw8Dsz6wHe5LgCUA42ziiLwbZjF5ZAsH5zu+caryzeS97874RKuyzeSdbfPRbTWZbdRZB2bWwGCY97r7Q4Xr3X2Xu+/JlpcBDWY2OdctrVHKNi3lm46yjRdz1oExeNvg9e5+0xA1R2V1mNmcbNzteW5oLVK2aSnfdJRtaWIOHZwFXAK8YGZrsse+AxwL4O6LgQuBK82sD+gC5rt7Tf4JkDNlm5byTUfZliDYaN39acACNbcBt+W1USOFsk1L+aajbEujK8NERBJToxURSUyNVkQksfLdymbcGHrnnhas6x0d/l3w4BsnR805dmv4ZPn6CeGTl21XxH00ymzf5CZevfwjwbpfdYcvvvjeFQui5mzYsCqqrtqNr+vm/JYNwboF47cGa/7oR2Oj5tzZHr74ofW+5eGBPHxRQzk17exj5gPbgnU/OfOUYM1Dm0+OmnP6P4czqXvq2WCN+d6hvz9qS0RE5LCp0YqIJKZGKyKSmBqtiEhiarQiIomp0YqIJKZGKyKSmBqtiEhiarQiIolZuT61zMy2Aa8XPDwZeDfhtHmNP8Pdp+QwTjJF8q2WbKHC8y3Dvqtsq2PfHTLbsjXaYsxslbuHr8ut0PErmbJNK+XzV7bVv+/q0IGISGJqtCIiiVVao11S5eNXMmWbVsrnr2yre/zKOkYrIlKLKu0VrYhIzVGjFRFJrCyN1szOM7OXzGyjmV1fZH2Tmd2frV9hZjMjx20zsyfNbJ2ZrTWzhUVq2s2sw8zWZF+LcnhKFSNVttn3juh8lW1aNd0X3H1Yv4B64GXgeKAReA6YXVBzFbA4W54P3B859jTg1Gy5FdhQZOx24JHhft7Vnu1Iz1fZVm++lZBtOV7RzgE2uvsr7t4D3AfMK6iZB9ydLT8AnG1mh7yHPIC7v+nuq7Pl3cB6YHpuW175kmULIz5fZZtWTfeFcjTa6cCmA/69mQ8+6f017t4HdACTSpkk+7PiFGBFkdVzzew5M/uZmZ1UyrgVbliyhRGZr7JNq6b7QtnugpuSmY0FHgSucfddBatXM3hN8h4zOx94GJg1zJtY1ZRvOso2nXJmW45XtFuAtgP+fUz2WNEaMxsFjAe2xwxuZg0Mhnmvuz9UuN7dd7n7nmx5GdBgZpNLfRIVKmm22feM1HyVbVo13RfK0WhXArPM7Dgza2TwoPbSgpqlwKXZ8oXAE54dsT6U7HjNncB6d79piJqj3j+uY2ZzGMwg+oehwiXLFkZ8vso2rdruC8PxjmKRdwHPZ/Cdv5eB/5A9dgPwr7LlZuAnwEbgGeD4yHF/H3DgeWBN9nU+cAVwRVZzNbCWwXc1lwNnliODastW+Srbas23ErLVJbgiIonpyjARkcTUaEVEElOjFRFJTI1WRCQxNVoRkcTUaEVEElOjFRFJ7P8B2AYcP8T+LukAAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "from mindspore.nn import WithLossCell, SoftmaxCrossEntropyWithLogits, Momentum\n", - "\n", - "net = LeNet5()\n", - "optimizer = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.1, 0.9)\n", - "criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')\n", - "net_with_criterion = WithLossCell(net, criterion)\n", - "train_network = GradWrap(net_with_criterion)\n", - "train_network.set_train()\n", - "\n", - "image = images[0][0]\n", - "image = image.reshape((1,1,32,32))\n", - "plt.imshow(np.squeeze(image))\n", - "plt.show()\n", - "input_data = Tensor(np.array(image).astype(np.float32))\n", - "label = Tensor(np.array([labels[0]]).astype(np.int32))\n", - "output = net(Tensor(input_data))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "将第一层卷积层、第二层池化层、第三层卷积层和第四层池化层的图像特征打印出来后,直观地看到随着深度的增加,图像特征几乎无法用肉眼识别,但是机器可以用这些特征进行学习和识别,后续的全连接层为二维数组,无法图像显示,但可以打印出数据查看,由于数据量过大此处就不打印了,用户可以根据需求选择打印。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 求loss值和梯度值,并进行优化\n", - "\n", - "先求得loss值,后再根据loss值求梯度(偏导函数值),使用优化器`optimizer`进行优化。\n", - "- `loss_output`:即为loss值。\n", - "- `grads`:即网络中每层权重的梯度。\n", - "- `net_params`:即网络中每层权重的名称,用户可执行`print(net_params)`自行打印。\n", - "- `success`:优化参数。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "conv1.weight: (6, 1, 5, 5)\n", - "conv2.weight: (16, 6, 5, 5)\n", - "fc1.weight: (120, 400)\n", - "fc1.bias: (120,)\n", - "fc2.weight: (84, 120)\n", - "fc2.bias: (84,)\n", - "fc3.weight: (10, 84)\n", - "fc3.bias: (10,)\n", - "Loss_value: 2.3025453\n" - ] - } - ], - "source": [ - "loss_output = criterion(output, label)\n", - "grads = train_network(input_data, label)\n", - "net_params = net.trainable_params()\n", - "for i in range(len(grads)):\n", - " print(\"{}:\".format(net_params[i].name),grads[i].shape)\n", - "success = optimizer(grads)\n", - "loss = loss_output.asnumpy()\n", - "print(\"Loss_value:\",loss)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "具体每层权重的参数有多少,从打印出来的梯度张量能够看到,对应的梯度值用户可以自行选择打印。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本次体验我们将MindSpore的数据增强后,使用了`create_dict_iterator`转化成字典,再单独取出来;使用PyNative模式将神经网络分层单独调试,提取并观察数据;用`WithLossCell`在PyNative模式下计算loss值;构造梯度函数`GradWrap`将神经网络中各个权重的梯度计算出来,以上就是本次的全部体验内容。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.0.1", - "language": "python", - "name": "mindspore-1.0.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} \ No newline at end of file diff --git a/tutorials/notebook/mindspore_enable_auto_augmentation.ipynb b/tutorials/notebook/mindspore_enable_auto_augmentation.ipynb deleted file mode 100644 index 0be698debb4e59d344da6e147cef153262c79991..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_enable_auto_augmentation.ipynb +++ /dev/null @@ -1,497 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#
    应用自动数据增强" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "自动数据增强(AutoAugment)是在一系列图像增强子策略的搜索空间中,通过搜索算法找到适合特定数据集的图像增强方案。MindSpore的`c_transforms`模块提供了丰富的C++算子来实现AutoAugment,用户也可以自定义函数或者算子来实现。更多MindSpore算子的详细说明参见[API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.vision.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore算子和AutoAugment中的算子的对应关系如下:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "| AutoAugment算子 | MindSpore算子 | 描述 |\n", - "| :------: | :------ | ------ |\n", - "| shearX | RandomAffine | 横向剪切 |\n", - "| shearY | RandomAffine | 纵向剪切 |\n", - "| translateX | RandomAffine | 水平平移 |\n", - "| translateY | RandomAffine | 垂直平移 |\n", - "| rotate | RandomRotation | 旋转变换 |\n", - "| color | RandomColor | 颜色变换 |\n", - "| posterize | RandomPosterize | 减少颜色通道位数 |\n", - "| solarize | RandomSolarize | 指定的阈值范围内,反转所有的像素点 |\n", - "| contrast | RandomColorAdjust | 调整对比度 |\n", - "| sharpness | RandomSharpness | 调整锐度 |\n", - "| brightness | RandomColorAdjust | 调整亮度 |\n", - "| autocontrast | AutoContrast | 最大化图像对比度 |\n", - "| equalize | Equalize | 均衡图像直方图 |\n", - "| invert | Invert | 反转图像 |\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> 本文档适用于CPU、GPU和Ascend环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 整体流程\n", - "- 准备环节。\n", - "- CIFAR-10自动数据增强。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 准备环节" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 下载所需数据集" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "运行以下命令来获取数据集:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/cifar-10-batches-bin\n", - "├── readme.html\n", - "├── test\n", - "│   └── test_batch.bin\n", - "└── train\n", - " ├── batches.meta.txt\n", - " ├── data_batch_1.bin\n", - " ├── data_batch_2.bin\n", - " ├── data_batch_3.bin\n", - " ├── data_batch_4.bin\n", - " └── data_batch_5.bin\n", - "\n", - "2 directories, 8 files\n" - ] - } - ], - "source": [ - "!wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-binary.tar.gz\n", - "!mkdir -p datasets\n", - "!tar -xzf cifar-10-binary.tar.gz -C datasets\n", - "!mkdir -p datasets/cifar-10-batches-bin/train datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/test_batch.bin datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/data_batch*.bin datasets/cifar-10-batches-bin/batches.meta.txt datasets/cifar-10-batches-bin/train\n", - "!tree ./datasets/cifar-10-batches-bin" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## CIFAR-10自动数据增强\n", - "\n", - "本教程以在CIFAR-10数据集上实现AutoAugment作为示例。\n", - "\n", - "针对CIFAR-10数据集的数据增强策略包含25条子策略,每条子策略中包含两种变换,针对一个batch中的每张图像随机挑选一个子策略的组合,以预定的概率来决定是否执行子策略中的每种变换。\n", - "\n", - "用户可以使用MindSpore中`c_transforms`模块的`RandomSelectSubpolicy`接口来实现AutoAugment,在CIFAR-10分类训练中标准的数据增强方式分以下几个步骤:\n", - "\n", - "- `RandomCrop`:随机裁剪。\n", - "\n", - "- `RandomHorizontalFlip`:水平方向上随机翻转。\n", - "\n", - "- `Normalize`:归一化。\n", - "\n", - "- `HWC2CHW`:图片通道变化。\n", - "\n", - "在`RandomCrop`后插入AutoAugment变换,如下所示:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 引入MindSpore数据增强模块。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - ] - } - ], - "source": [ - "from mindspore import dtype as mstype\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as c_vision\n", - "import mindspore.dataset.transforms.c_transforms as c_transforms\n", - "import matplotlib.pyplot as plt" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 定义MindSpore算子到AutoAugment算子的映射:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "# define Auto Augmentation operators\n", - "PARAMETER_MAX = 10\n", - "\n", - "def float_parameter(level, maxval):\n", - " return float(level) * maxval / PARAMETER_MAX\n", - "\n", - "def int_parameter(level, maxval):\n", - " return int(level * maxval / PARAMETER_MAX)\n", - "\n", - "def shear_x(level):\n", - " v = float_parameter(level, 0.3)\n", - " return c_transforms.RandomChoice([c_vision.RandomAffine(degrees=0, shear=(-v,-v)), c_vision.RandomAffine(degrees=0, shear=(v, v))])\n", - "\n", - "def shear_y(level):\n", - " v = float_parameter(level, 0.3)\n", - " return c_transforms.RandomChoice([c_vision.RandomAffine(degrees=0, shear=(0, 0, -v,-v)), c_vision.RandomAffine(degrees=0, shear=(0, 0, v, v))])\n", - "\n", - "def translate_x(level):\n", - " v = float_parameter(level, 150 / 331)\n", - " return c_transforms.RandomChoice([c_vision.RandomAffine(degrees=0, translate=(-v,-v)), c_vision.RandomAffine(degrees=0, translate=(v, v))])\n", - "\n", - "def translate_y(level):\n", - " v = float_parameter(level, 150 / 331)\n", - " return c_transforms.RandomChoice([c_vision.RandomAffine(degrees=0, translate=(0, 0, -v,-v)), c_vision.RandomAffine(degrees=0, translate=(0, 0, v, v))])\n", - "\n", - "def color_impl(level):\n", - " v = float_parameter(level, 1.8) + 0.1\n", - " return c_vision.RandomColor(degrees=(v, v))\n", - "\n", - "def rotate_impl(level):\n", - " v = int_parameter(level, 30)\n", - " return c_transforms.RandomChoice([c_vision.RandomRotation(degrees=(-v, -v)), c_vision.RandomRotation(degrees=(v, v))])\n", - "\n", - "def solarize_impl(level):\n", - " level = int_parameter(level, 256)\n", - " v = 256 - level\n", - " return c_vision.RandomSolarize(threshold=(0, v))\n", - "\n", - "def posterize_impl(level):\n", - " level = int_parameter(level, 4)\n", - " v = 4 - level\n", - " return c_vision.RandomPosterize(bits=(v, v))\n", - "\n", - "def contrast_impl(level):\n", - " v = float_parameter(level, 1.8) + 0.1\n", - " return c_vision.RandomColorAdjust(contrast=(v, v))\n", - "\n", - "def autocontrast_impl(level):\n", - " return c_vision.AutoContrast()\n", - "\n", - "def sharpness_impl(level):\n", - " v = float_parameter(level, 1.8) + 0.1\n", - " return c_vision.RandomSharpness(degrees=(v, v))\n", - "\n", - "def brightness_impl(level):\n", - " v = float_parameter(level, 1.8) + 0.1\n", - " return c_vision.RandomColorAdjust(brightness=(v, v))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 定义CIFAR-10数据集的AutoAugment策略:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 预置一条简单的子策略,其中只包含`RandomRotation`和`RandomColor`两个操作,概率分别为1.0和0.0。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "policy_list = [\n", - " [(c_vision.RandomRotation((90, 90)), 1.0), (c_vision.RandomColorAdjust(), 0.0)]\n", - "]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 预置多个子策略。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - " # define the Auto Augmentation policy\n", - "cifar10_policy = [\n", - " [(posterize_impl(8), 0.4), (rotate_impl(9), 0.6)],\n", - " [(solarize_impl(5), 0.6), (autocontrast_impl(5), 0.6)],\n", - " [(c_vision.Equalize(), 0.8), (c_vision.Equalize(), 0.6)],\n", - " [(posterize_impl(7), 0.6), (posterize_impl(6), 0.6)],\n", - " [(c_vision.Equalize(), 0.4), (solarize_impl(4), 0.2)],\n", - "\n", - " [(c_vision.Equalize(), 0.4), (rotate_impl(8), 0.8)],\n", - " [(solarize_impl(3), 0.6), (c_vision.Equalize(), 0.6)],\n", - " [(posterize_impl(5), 0.8), (c_vision.Equalize(), 1.0)],\n", - " [(rotate_impl(3), 0.2), (solarize_impl(8), 0.6)],\n", - " [(c_vision.Equalize(), 0.6), (posterize_impl(6), 0.4)],\n", - "\n", - " [(rotate_impl(8), 0.8), (color_impl(0), 0.4)],\n", - " [(rotate_impl(9), 0.4), (c_vision.Equalize(), 0.6)],\n", - " [(c_vision.Equalize(), 0.0), (c_vision.Equalize(), 0.8)],\n", - " [(c_vision.Invert(), 0.6), (c_vision.Equalize(), 1.0)],\n", - " [(color_impl(4), 0.6), (contrast_impl(8), 1.0)],\n", - "\n", - " [(rotate_impl(8), 0.8), (color_impl(2), 1.0)],\n", - " [(color_impl(8), 0.8), (solarize_impl(7), 0.8)],\n", - " [(sharpness_impl(7), 0.4), (c_vision.Invert(), 0.6)],\n", - " [(shear_x(5), 0.6), (c_vision.Equalize(), 1.0)],\n", - " [(color_impl(0), 0.4), (c_vision.Equalize(), 0.6)],\n", - "\n", - " [(c_vision.Equalize(), 0.4), (solarize_impl(4), 0.2)],\n", - " [(solarize_impl(5), 0.6), (autocontrast_impl(5), 0.6)],\n", - " [(c_vision.Invert(), 0.6), (c_vision.Equalize(), 1.0)],\n", - " [(color_impl(4), 0.6), (contrast_impl(8), 1.0)],\n", - " [(c_vision.Equalize(), 0.8), (c_vision.Equalize(), 0.6)],\n", - "]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "4. 在`RandomCrop`操作后插入AutoAugment变换。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "def create_dataset(dataset_path, do_train, policy,repeat_num=1, batch_size=32, shuffle=True, num_samples=5):\n", - " # create a train dataset for ResNet-50\n", - " data = ds.Cifar10Dataset(dataset_path, num_parallel_workers=8,\n", - " shuffle=shuffle, num_samples=num_samples)\n", - "\n", - " image_size = 224\n", - " mean = [0.485 * 255, 0.456 * 255, 0.406 * 255]\n", - " std = [0.229 * 255, 0.224 * 255, 0.225 * 255]\n", - "\n", - " # define map operations\n", - " if do_train:\n", - " trans = [\n", - " c_vision.RandomCrop((32,32),(4,4,4,4)),\n", - " ]\n", - "\n", - " post_trans = [\n", - " c_vision.RandomHorizontalFlip(prob=0.5),\n", - " ]\n", - " else:\n", - " trans = [\n", - " c_vision.Decode(),\n", - " c_vision.Resize(256),\n", - " c_vision.CenterCrop(image_size),\n", - " c_vision.Normalize(mean=mean, std=std),\n", - " c_vision.HWC2CHW()\n", - " ]\n", - " data = data.map(operations=trans, input_columns=\"image\")\n", - " if do_train:\n", - " data = data.map(operations=c_vision.RandomSelectSubpolicy(policy), input_columns=[\"image\"])\n", - " data = data.map(operations=post_trans, input_columns=\"image\")\n", - " type_cast_op = c_transforms.TypeCast(mstype.int32)\n", - " data = data.map(operations=type_cast_op, input_columns=\"label\")\n", - " # apply the batch operation\n", - " data = data.batch(batch_size, drop_remainder=True)\n", - " # apply the repeat operation\n", - " data = data.repeat(repeat_num)\n", - " return data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "5. 验证自动数据增强效果。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 在一条子策略的情况下,因为`RandomRotation`操作的概率设置为1,也就是该操作肯定会发生,而`RandomColor`操作的概率设置为0,也就是该操作不会发生。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAd4AAAHVCAYAAABfWZoAAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9Scxs2ZbnCf3W7s451nzfbbx57u+9aCoyI5tCQBUoC0Y0EoKSkGqGgBGjEkoxQ4iECQMQYsKAaQ2QihEwZFASKlUJhIRAVVkF2ZKZkZERr/Pn7rf5GjM7ze4YrH3M7Lq/F/Ge+703ouBul7ndz77PzM7ZZ5+91vqv//ovqbXyYXwYH8aH8WF8GB/G+xnmL/oAPowP48P4MD6MD+P/n8YHw/thfBgfxofxYXwY73F8MLwfxofxYXwYH8aH8R7HB8P7YXwYH8aH8WF8GO9xfDC8H8aH8WF8GB/Gh/EexwfD+2F8GB/Gh/FhfBjvcXwvwysi/w0R+Sci8kci8nfe1kF9GL9+fJjz9zs+zPf7HR/m+/2PD3P+/od81zpeEbHAPwX+a8DPgP8A+O/UWv/R2zu8D+N6fJjz9zs+zPf7HR/m+/2PD3P+FzO+T8T7t4A/qrX+ca11Af73wL/xdg7rw/g148Ocv9/xYb7f7/gw3+9/fJjzv4Dhvsd7fwj89OrnnwH/2p/1hm3v65NtB2iUXUoh50KtUGql1oqIIAAiGNFna6W9LrRfQfv3edQ3nnS0P9GgvoIB49rLNVOpkAs16THkom8SaxExYCtYqFKoRv++VvQ4i8EUgyBYLCKCtxZjDNZarLPtHCulQlwiKWeoejxGDD4EKvDi7sDhNH9zul7UWj/+PnMuIhVgGHpCCCAgIm/MkjHC5aX1dWnzVqhVz7mU0l6rUNvniFx9l6ELHdZaQvCE4KAWas1Q69WfVipQS6WW9lkVBD0OEcEYwToDIlS5OibR+VyWRM6Fx8cj07ToHJfS3mv18Ouba8EYnW9BWOJCSgkjBmP1Op3G0/ee7+s5/zB+o/HNOf8w3+92fO81vr95Wj/65PM37i3dOuT8Wml7SUqJZZkpNZNSpJZMzUn33lIoOene0D5F95aq/9YPOe8b9Y3v0X222+5xLoA1YCy1FHJOutVL2wZqaftQ0e8vRY8p5/b50va4i00C3S+MNRhjCSFg1Bi9YXVSToyn0/k9v+F8A9/P8P5GQ0T+TeDfBLjdBv77/83/DOQINXMaZ+4fT6ScOU2JkosaLWNwzuC9wTvLfjfgvb5uRRBjsEY3zPVa1XbhV0NRBWh/k2si14wbhM1zgzGZGh+pZSbdnUh3IzHC8QQVh90/xYQeuU3IbSK7mWlzR5HEPBdyqnSngf64IRDYy45gHB89uWW36dnf3vD02RNShdNUmJfET3/6Fa9fPZJTJc+FfrPhsx//mIzh3/4//d/5v/1H/+ybU/en33e+Aay1/NW/+gf8+Hd+hHWCc6bZ1QRU+t7jvWvzlhFRp0AqzPNMipEYE8sSKbmwxEQtFec8zvl2DYSu6/m93/sDbm5v+d0ffcIPP/uImifydIeQ8a5iDORSyCWTlsJ8XKil0kWDLULnLaGzhN6zf77DOkOWQpGKOIs4y+E48ZOfvuD+/si//+/9h/yzf/YTxilyOs1Y59hudxhryLlSatU7VwzDMPDZ5z/CesdPf/pzXr58Rd8PbHc7RIT/4P/1H36n+f5Vc/5h/MbjrazxD+M3Ht97vp9//Bn/s//1/1EDJdTI5irUKuRqKVVYUiKmxMvXX/OTn/0x03TgxYufMc9HyuEF5XRHnI5M96+gZCwZoZLjRI0LJSfyPAO6Z4hAKkKqhkqg1B3d5oY//M//l3j66Y+omx1sdsRp5PD6NbUkBis4AzmeyPFIiRPL8TXLPPLzP/lj7u9eYY3FGksphWWJ5GaUc8n02w27/Z7tdsvv/M7v0Pc91toWuKidubt7zd/7j/8uh8fH33q+v4/h/Tnw46uff9Ree2PUWv8t4N8C+NHH+9oFT46ZmjLeGfrOkbNBqpBLxYggYrBWjYRz5hyVnX2qWmgOCxXD5TdCQXTDRZAqzfWxqB0WjDWIEUgCVf/GaDCMc0IB9dByRFJBUqEaMFjEwNArOm+lwxWHZMMyFnLNvH594vQYeXiMvL6fiLlwf4pMc+Jf/PHXfP3iQJwTy5TY7bb8wWQw3vHweHprc34932s0MM8Lx+MJaw3Om+ZFquFdlhlrDd5rpCoIuajXWDKAbb/rqaUyzzM5ZzWgOeK9ZxgGNpsNn/3glmdPn7LfOEgT5BlKRqTincM5Q6FSBepQ2W4GKBCSwRTBSMWaivGGIplaM7lWChWhIrUSl8g8TkzjRIqJnCvOOrZbj7WOYdhgjL2gFmLAGHzosL7DGEu/vWFfDMYYitjvNd+/bs4/jO80Psz3+x+/1Z7y+3/lX64a+3Hec0ERSbMikg3WqyWTUiKlTBWNSglbpIJzA4PtoRaCFIRCmU/kZSLNE6PcU0vGSkYomAq+Vozr8P1HdLsn3Dz/jO2zz3gVM68PC5Iq1nYY63FBCBYO6cRhGilppiwLMUZSLVQg1UIulVIqcY2KL+dMLeX8vCJq14a3lvINiPU3H9/H8P4HwF8Vkd9HL9R/G/jv/llvEBGcc5ANxQjWCMFZshFqgZLVYAJYq8bXGnOGIBtG3NCITFXslCrrUlADXJCGLMv6xQgWMRoti4EV4JCKGl4Ba/W1VLNCFrkiuSKlIhj9G+8wYiB5WCwsQiyFlIBlZiJxHCOPp5klFV4dZsYp8Sc/ecmXXz2yzInxuHBzu6Pb3+D7wGn8Fsz81ua81kqMkXGccM7ii0X3qgxUYgQxQt8HnHMIUFI5Qz2g0HnXBWpbfCklpnkipYwYTz94ttuOp0+2fPRsh5NKzQuUhJSCGBqK4XSiTYOWB00f+CSY3K5pzSCVIgVqJdNuhqJXOKfMskSWedEbulSM8TgfsNYRghpXFzqMc2fja53HWI+IIfQb+iKa6kh5zUW8lfn+ML7X+DDf73/89nPebpfzrit6C4ne2khVR1mh39yg2BYAuQ4QjO0wNmCodKZgqOTpQFlOzPbIPE/UnBAihoxUhXN959js93S7Wzb7p/T7Z6S7e+7GiUBhZzzWgA2Cs2oLxrhQ0wIpklKi1EKBM6xdaiHXoqnEq5zl2eg2KHo1vutYo/7vMr6z4a21JhH5HwD/Z8AC/9ta6z/8c95DigulZMqa3wMMBmfRyNIIRjQy1WhXN2xjDKblA87ehggiDkE0D9kweGsuXgm5gtEcLVWjY6n6neCoYijS8pyinlrOhVQyNhkkGcRaXPaIVFzL42Id0lnAUH1zCnLLXaJeVK3gnCMEYXczMKfKeJyQWug6Q2ke4W/KLP8ucw6afzXGqmHyATHgrJ5vacZOxJBTy+Omy0KrpRIC0CkSoXlUw9D3dF3g5mbP55/9gO12YL/tCV6QnNpNU/DeKuhQoeZMSzSvB8aKUoiRcz5GfaLLjVDRG9oYAxVSysSYSQVi0dxyN+wREYpYKoLgqNXifUfXD3jv2e5vEWsprqfbzIzTyPFw/LXz/13n+8P4buPDfL//8d3n/CqoqXK+l0ENUspJkbGUSDlTaflUY8E4xKJRcS0seUFKYp4jy3EizjOnOUPNeFN0pzYGawTXbdg+eU63fQZhQzKBWIQ5Jt1kXNH9vQjGACVTUqbGTFkSKUZFXNsxrcdLi27Pucv13L4Z5a48pMZF+gbT6Dce3yvHW2v9d4B/5zf++1KY5wmpGVlPAp2gEDwgeGc0l2s1x8t6sRCkZCgaCZWSNZKxekFzlUb60Qit1kpOkVILlQSSoFqkKGRscGp4jFP4mUJtm33KiViB6DGLw4jgo2BrxXmDxYA3iFiKgTxBjcDcDFYz3pVG6LGVp8/32C7weH9AyHSdI5dEWfRv39WcS4PujTi8C/T9gLWGvvcYI0zziXmZECBGJX+VVJqBi+SU1Z3Z6GLTPAd0/UDoPB9//BF/46/9VfoucDM4vDUsp8SynPDO0vcBI0KlnA37il6si9cYA0VAinrKUqloLqGczwOsKMw/z5FxjswRlmIY/Jbt7XNyqYzT0jxUj8HRd3t2T54Suo7bp8+wzjHcTiwx8vr1a1L55Z9Fjvit5/vD+H7jw3y///Fb7+NX/xZWw6T7jNqtQkqJGKMSGWNSWyYGMY7qpLFcHbUkppgosfJ4WDjeH8hxIo4LphZ6X3AG+uCwzhE2e55+8kPC7hlmuCGanjEbDlNUMqxocGZKQ9FipCyRvCzkaSYtM3mJlJjeMLwaKF1IVmci6pXxLc04rwTa72523wO56ptjZcUCl5MC9YKk5XVt826sQUHR5lvU9mAFlVvub03zVNrm3XLCzuhfOKhW8MG0iLWCVWIQvkO6CFkhj1SFoWxwOKy3WLEYCpIUxC6zUHPVC1tNY9AJtcGpYjUiFDFYBCcKePfBkYZCnj1z7/DB4q3mH813v36/0RBRZ8a0h23RrzGCtQ5r3RkyUkKDUEtFsG1SDYJphlfRh77vGDY9283AMHR0wWFMgZpACsYoMeKSn1895Mu/Lx5jvaQRKO35mqYujf2uJK95WlgmzTVTUUQi9JhaKThljVuLMRZjHGCoVYipkGtinhfmeWYeR6Zx/DMN74fxn8xhjLDpPda2ismWxTqvw7bB6nZUzw4qQKnqeOZSKLUoAtc2YfONzfhM5mS9fxR3rSutvurattYof2U9QLls2+eqCzhv/PrZeiz16lgvx0z7gBZtipyPmXNUxjmwAbh/GN/K3MrVSVzZKdazKLWceSClzWErXXjjvNtUUXNu0XEkpagRaikUNAcrtZKrULBgA27Y4fotVWxzsgumJKyBzhh6A8FAEKFzlk0IZApx8cSScSLIyp5eD6te9pv1tfU6/KoJWOf3u473angrStdeDY2IGlcRi+sGjHF03hCcaXmCRKkw5so55YgaXWP0YhpJjXgDZDURtkWx/WbAegehIkGdLD8YjapcgFwQv4PdU3KGOULBkOyOIoFUErFGSp1ZDq/JJE4sJBJ9sAydw2SDMwZxgsNgsn6/tY6KIRhLqWCfCE92PY9e2JqEtZ797UCqhuD/TILP9x7eWbrg6YIn+KAlP77DGHVMjA26gdDIAmktI1ooJSLSIdJhjdBvO6wVPvr4lqfPbnj6ZMfz5zuswHx4zRIXjBS63uKswXnaxtVy9abByly+T6Qq4lAzsOZUcvOStf5rGmem8ciLF3d8+cuvuLs7ME8jUPChY3PzDGs91mgOKceFmjNiDTlBzpFxfkUuhVevvuLw+Mjh8Y5XL79WiPvD+P+psek9f/MPP2W/65V6WytWLN66RhxUfsBqHIx1+K6jVphjJJfMaRwZ5wnvHEMfsMYQgj7bxinIpSrTH5TUJ6KptJxb+UzEiHCz39IHrzyRZieNXEpY1tROzrndJxo9jtNETJFSV0MMS24lfsZSxSDWYVzQ6pDTRCmFzhmcFbyzBK+O9b/7f/0H339iW6ppLRlajVZzDRAgxcg8TyzLrKhZTuqIiBo7tW4FKQVKIk1H4jyynB5Zjo8N2dRSoyVnooC4DabbUrtnbD79PdzmlkcTWGKCONGlE3vn+MHQs3HCYA1ehG6/59ZU0jIyDpZpPDK9/CWRSs75AoNjG+Gqtp+lOUoXJ0tEzpC5aFTxnYPe9xzxNi9j/XE9oRZ5GetwVjdsan4Ddy/N89Errtu2NG/n6qOhmsaOhdAFfBeQDkwniAUb9POqWMhFE/w+kQvYKNRqyE4N77yMyDKRciEmCzWTcmUpFVsq4QyxWI0ejTL7jBglYIkysitCHyzOCLl3zJ1r+VaLKXLOSb+roZHtm1GviEGMwRiHtQoPrZ4+tjZIpahRFIdgtfbYW7w3bIYN++2WzTDoeVCZ0ah0jQzWUoAVtjFneLmR4Eq7cLWw1kerwa3nKFSM3gYpZqZxZhwnptPINE4NBq+NBxBwLhDCFhFDmsaWatC1UyjEqPmm0+HI4fGB4+GR8XR4JxGvGI1wrscZ6PnGz2+871e89ucxAORb/7h+Uz1HVL8qlX2B0/gVXvxVLsuYC1LxfVz9q3E6/cZs/t96WGvY73qe3g5IAWrFGYu3Xg1v1HUWk+YirfeEvqdWmFrNvbMVawvBO3YbLSfpgjquzndY58mlMC0JAGMVtSs5nQ1viQvWCE+f7Bi6gDNqeI1c9u1Ssq7TrJyPdWOvtXI4CXN0lLKW7sCSihpi66hiEecR35FSxloh58zgLd4ZgrP0nX9r1+wSCepPK9goXJZcKQo155yVyFQLb67ien6WVl9bcqQ2bgi1YK7qeSuVUg0FD67D9ltst6EmPVdKxtdMQNhYGKzQG8EJlOCQMhAtmKXDlESwpmF5tZG2Lkd/nbddlSMu99dFY2K1Xd/V8v4FQM00Y1kxCM5ZxDgsHVI9eTHMM1CglEgqmYd5JOZMaQXQIhkjsU3KjGAaBGpxtmezHeiHgR/8/r/E5uaW/mZHt99SayKVSWGMZYZScFg8hpSq1ujmymnJxFx4+eILlq+/wBSw2VFSZX6cOc6F2CXmbqb3nrDr9buNwZkLHF5roeZGDioFRyE0CMwYT+ctkuVbG/TbHCIQgmWz9TjncA6QyhJnJArKi2jkq4ZJrTe5k0D1Cd91uH5D3wU++/Qpm6Hn6ZOBm31PFwxlVqagqRYr/iyHljNMRRMF5227rgIahZyiLoiSG6ktalE9WnJkxND3O6wLPHz5wM9/9hWvXj/w+ouXHI4TdZ4JFCQr0xkcwxAwxlJNpEpqpQBtY2v/zilRUqSkhZIWJeq9xWGM4fMf/4jnz5/r7bzWPLZNRFpkVEsltQhHnaEV+rfUuorLKJ9BSR2GVQClrqmWdu97Z3FuRU70fWmJ5JyZx5lpmskpM0+LQvHOI8YwbAeG3cBm2/HJD57hg1XehIEu9HRhw6bf8Mnzzwi+Y9NtCS58Y0P61j9+o/G3//bf/p4z/euHEhwLtRSsdTixOGsJNkCtJFKr31yYlwWTM0tKeuKiHmPoOsSqlsDQd4rmNqOyxLwmulpVhcFVdVBWqJdGEhVB3yNQrQEnGGvpvNfrjs7cEiNLbPWkMWupSzVUHLEkpiWqGE+pVITgPM5b3W5qRGqCuiiHRoJ+tgHewf6yfuIaMyivRYVtTqcDDw93nMYDuWQKWotfG38DqYhkhISRRLAZ4yrRV5JvOVpMI+Oq8e37Pbv9xwy759jNDabbkO8eifPCtk6ErvAsVG5tpTfQOcGKYeMG8jYwT47X9UCwmd2257TtmCYoLRovzTE1XCfX2s/fIFytWc9yjvZ++/H+DS8tSr1KUotYRAJSPSUqylBKoWSIuXKaZua0tJxLxUjGmaQXv9Xz9l1HFzwydPR9YLvb8tFnn3Pz/GP2zz9i9/QZKS9M06N6pMtMLZlgOoLtSKkwnbTm7P7wwDzPTKdHXjUY22SLpMJygOkEyWfmsFAGw7Ne63GdsTgR9fJarkXPt6oHVwveqlCEWN0oS4v4390QnLd0ndO8p9W81Mqm1jKb9junBLfUiGpGPM4lbAhY3+O7nmfPPuF2v2W3sWx6g5DJcYaakWqw4i75s1KIuVylcBsbvTTvPi5q4Us6G+KUFoDz2nB1wITK8fWJr372NfcPBw6vHhmnmbq0BVwKOSaKLerEWUc2Bj06zSGtpLeSCzWvhjdRc/yNWeW/6TDG8NHHH/O7v/97Og9vEO7AGIcYQy6FGCPAmbkfgpZ1rVHD+rySOi6GV53X1U50naYSWkKHWgrTaSTFxOP9gcPDgWWOHB5P6lSFHmMdt89vuP3olqfP9vzh3/wx/RBwAawTtsOe/fYJT/bP+Ku/+zfZDjue7p4xdJtWPf+NPV3+bAXalRG6jndpeKmXeTeilRHOOrzzrSqiIllr/lNK6iXGiBiL73tEbKs91/s0dGqw53kip0RMmZgKYiwuhIYgyWVeWCFJRRFyycSEBgnGgtXPt1YddmlIGQgxJcYlkc6Gt5IyTFEdyFT1s4OAaTyYSoaakBqRmjGiZTXv1Klv/zNwvrdzzszzxOmkJUGl5tWsnY3uyufQu7PgbMW4QrCVzq2frYa3Zo3uvR8YNk/ohhtst0F8R8mvyPORnpkbX7h1hZ2pBFMJDeUzzmK9MLpKnDooC0PvldyaIvOaZz4DzGtqjKYpIee8Ly1Vhkg7o+9qdv8CDK9CV0bzs8ZhjAcccWmbcjKQhJIrORWWnDmeIvMKG9aKdxXpwIrmMIxofbC1Hms9vhtw3YAN+nB+i/M7xETAUEpi4ZGSIsZ0YHuMQNiAxASnkVxmRbVLxVTBGUc1FScOUxM1tfpdW5DSKPXrBn7FkFsfKUYtU4oLOSclK5VMKvLWN/43h5LPjFXo2BhdMA7TCBrr91eFgo3gfUBEiMtCijAEx74Z2k4Svsws9zPx9YKwog8VFeUo1IZOXK9MKyuupo5XbTW09VsRb1QD4xyIQYo6LZ2z7IaesiT2fY9FmFIk5YKs+bGmioUIqRZ9FM3jlKo8aSV6mOZsOMTYy3V7y/MOTTK05AshhnaTXzMo4Y0cnzFGySY5UYrmotZ1ZBoEuebM1o2eupKD2mc34osaO1p5ntZjlwrWOcS6s0GyTp0vu5IPuTjGq+iMrDp834Cj2zLizX9czcQVYej8rndoEK6/d0UZQJ35mOIb61JTLxq5rqDjWjK3ehYrMaqiBEqsJWV1cPR70A+8CoqMNGNqNSrsQsBZ5a94t7JiOUOpmitd4U3DSiRVnr+KT1gXNN1WKxjBOod1th0vOCuU3JFz1nTd1Ym+zRX+huxs5RwFxriwxKRByzQRr5jDb4AiUkE0LVVropRIKYlKS23RfLgKzjuKE4b9jptnz9js9pqaqxWTEzZHemfZbzfcbnqePHtGcKpIpahDBsmksrTgYjXGDrF6cVoc2ORpv70uV8i5Xv1qJdzJr/j732S8Z8PbFE6MQwSc8Tjbk5LhdFLP3iSHKZaUIM6ZOWVeP85MaW4hfmXoL4Sm0AdCM7rGeHwY6DZ7uu0NYbjBD7f4/ik+fIQn0Q17So4cMcRlxEgPpscGi9/0uLjA3T2pHFW5KatqSmc9thg8HY5KXJQu3+ORZDCNEIa0WtTGRixt01+mSen1i8JJUhTmWLIhv1PDC8ZUrK3KNLYFaGIWGGLKbWM3zYGx7G4GnHPE+URaZrbB82wb6L1laxa6XHj95S95ePE1VsAZNdq+01z3EhdiXM4MammfuxqGCmdDwtnbpOV3M95a7KCAky0FVwob73m+3+ErPN7sOTrPaT6yLAum0AxVJuZEFphzUnJcSiwxUoEsolyBtokZ5zEuvBtW8+qDFc0t0/J2CjEX3VfLhbG6FufnJsmWs+rbasSbz0bXtHRApeVjLUiRNw1vA0FrQ17E6PyvJLcKYDyIoesCPni89xqBOYcxSUVPbIsUncNa5QSshvVXb1F/eUZtCENpTgto1JlianWh7sLyt0qALI3BaVo5oyJE9pyPrVVV0owYUq5Iymf4GWMuRkb0/VbAW98Y1kHTAVbwViNjCqoD3xwlRZlMc2JNwy6Mgno24LrG2EfXil+vnVOCZs4Zb3XtxKhps3Uu3pWjI0YwYilFDe44zRwPRw6HI0tZmlNy5XCZpsVQCqVGSlnIeSHnGUhI20vsKnJktezo9qPnfPrjHzPcfoRrqRiXFvwyczt4Pu6f8vRmx2c//pzg/dk5TctIXE5kEiY4zOxwIeC7DjMtVKOIQs3XpYtnzIIzz6HldysXx+zsiH6H8d4jXrMqUUEL2TUKiTkSoxBE87VSOCtd603RiDaiZKEQHME7tpuh1QAbwOKDx3otBVLkpp7p7epdzeS8ME+nVmsKzhmsCVjfSCTrQy7pdTk7DUqGSnDOIa3PWD2KVRWl1gtrMpdCyaXJWUJdywXeqi/6q4es3plcv6axkrNrGZfDOa9a2SJY0WMUA8EbhWecpeZILJH5dGQ8HHBGKfsqIq7iIqkpxFhjEddYkLo3oXjN+SjO63sFegwG45xC4C3qrc1orRuhqppZrKh2N6iDk5uTs+ZUz58tlXoldKc5bc1rizHfryn1rxjnyLZehVbrdT//0bcjXlgNcItqL3/8JoLC9UdqRJtzpuTG9L8migg4Z+n7TqM/r5+bWjRlnV6zc5kMnElWptXEq/P0jXKYv+SjAikVYiqkVEhmRWB0pdWW/1AlQ2n3ZqY2J47rzdeIasTDOSfrnTsbzMJaulPUAaKBlsLKw7naxq9Bg8plh1kjZfmWkax1/b1R2LasKJoK0tRqzsZMr5NunVUujU3eKqomV+hec5xLrcSkQUVqalW1lm+vl3PYuO6pzZk0tvFMVmU9XYfOejAeH3p8P2B9aLA22FqxgLeWPjTGudd9Y1kWcs6M08h4euDx8ZHHx0cOhwPTMiuprqwiPZxzvFV0bazGdt03zymeK/Tm+9wN79XwGiN0XadQY2PQxhKZc+buOLIshY+ffEy/60nRIqOjzhk5GWoRnFMZsP2u49NPbtgMHZ9/9jHD0HMcF8YxsrnZs30y0G0D2EguR+bxDqqwLEeOx6+JceTu7qfMy5H97jn73Ud03Z6bW1VUCcHRdT3Bd3gbKERqTRgDfS/EYil5ZkwzJSqDNklGNgHnLDVDymoMplHrTdOyKLSKYL2nGkcSaRvguxsicpZrrFXIud3YomVcm82W0PJ9tklGupqQlPEUvKncDp4ffvoUKZm7X37B3enEi1/8gle//JohdNzu9zhrWWaFJcuagXQNtjZGI0xruZQTXc67GAsiOK+saWeEIWikgPUkMWTrqN6D84hzGJvonKVv+dDxdCJjGErCVUO1AtWqVnRsimWo8bDe40KPCxMu9G+dXAXXsLI6lvINo7Wa5De6PsHZ4Kph0C15JYat0PG6o9dameelkRXV+XPOEII9GxgtZdnTBS0f88ZTKtw/HpmWiBuCQsnWnK2EMVZldUPH0A/0XY93/uyk/SdhaPeqmZorcYFhyDgrhNZsxdhWY95VLIY0J5Zp0nukFozYxpRvSMDgtZzO6/Nmo2s4pshpHJtDnaAknT8spgqUxuLPUKqB6jCoKI+YC2PcNONeEGxZu3mtcHY+O0W1VJZZ+S6ncQLAW0fw6jQPfYcxDmcL1jQuxbK8/QluxleDi8y8zDw8PHA4npjmkVJVktZYoTTOiPqJQi0WqQ5jewwG2+0VgVkSJq4EyIiIo9s9xYWB/bOP2T77BOM2KoCRCwHVHHgydHz8ZMOw2WB9IAMv7u85Hg98+cVP+fKLP+VwuOfnP/9TxunE69cvGacTyxKJLRgqZ0+93TeNb+G9xzl19tf02WqIvw83571DzeccVTWNPVvJtbDkmSUXiiSM01IhcaLkKbNuCGCtMjiHoWOz6dnf7NhsB7Anqgih97hgsd5oHoFEzgtxmVimE6fDPUs8cTzeMS8HVXPqNljnKDVR6+rZNolELh6oCK15g2ANrbypnOv2pF61v2v5pXMk1rxUzFUN2HuKH1avslRBihpcza9butDT99sm5GGBglTNLIkIYg2d92yGnpojpWSWZWZZFuKy4I1VZF2gFqGIaG2hEYzxCudai1kNrzWtfnhFLQWxWo/ogsUHhzNKCDOiEGqtaBG2dWp0jT1Hv84YIq0ko5HvSotmWMlIzYBJXevw7JXAhm2s1Lc/rskX67/XW/s6CnmzLVlFpFwtDVnTtkClKaW29zUt3JyJMRFdBKx2oLq8Ceccfd/hrKPzGvnOMVFQoZE3y4hWTx+ssZr/bR3DLuSuq2O7Or83X/mLHbVCTIUlFpaUcUmV7jxGERCprYMZiAMiTT5VWsvIivYEbfnd7FgdGWuaEI+1GANxmc9iG2fHqK4QZVOMb4GG6hOsEexFiOGCMsgbwXZ7kz611ZOzXvdUinIAbKHkineeLvRndMKI8mbeumN5wVwbgU0Z+CtDXNMlb6IuK/iznr0K8lhE9L6meJwNJOsRjHYkMw7ne0K/wYUBG3pEvDqhuWBRqDtYR+cVyq9wrsF+ODzy+v41X7/4muPxkdf3dyzL1I5RW68YxbRBVodXV/B6ne1aenm+KBfHd70i32W8X8NbW/lEOz1jNNGdK/jek8ncnR44zKNupEn7pqZ6yTl5a9n0e57e/pDttme3e8KwCTqJvhB6S/CFYDO9TfSSyPHAuEQeHu752S9+RkwzoN5izRlTFtL0yKuvfkLOldcvv+Z0OHI8vmaOs7IEUQbpbugIzhIomjMODspCioVUHLlaYsrMSxPkTlqPHLzFdK7B0hlMxtjCXCG84wYrPgS6Yau6xd0W7zr2N8/wPtCFLT70nE4T9w+PiFR2uyfaltFVnKnc7rdsnz+HWvmh27FMMx//8IHT/YHgO3abnW7Max63KQQYp9C/mJYDN+bsRMElp1Ib2Gut9l42aJciqaoQRoWdP5DCp9jX97x8dNTDET/+EpMe6TY7uv0e1w9aCnMF4IvR1EWtlZgr1IJ1QqiO0AWFYN+BgEaulbgKrItt0e21+W2RLC0ju0LIta4NTTVNUqp2aGo7l6xRbyOpLdOkJRs5MY8jzlq6PiBU0jIp0W1O5HEmeAfDRqHm+UBOC9V3GNeTzdoDymCqw1VLb3fc9Lfsuj2bMBB8hzP2DI9ecpp/WcztZYgI3gVC6PG+w3mP80YdOiu44DAGlhSpUclnXd+rVHhV/sMq9J9iZKwJY4QUrUa+mw39MBC858ntDbkUxvFETFEV2yhY4+i7gDOGTRfw1uKNxRnbGsSska+uiBJVtak0johcVgc0KJSqAkTVCMH4c244RVWNm+eIs5Wh13p7zRmbCzT8Vib3ku7IqMjFkhNzmlnSRCEhtgG4JavTceZ26BqrYkCUyOd9r4HBkBCEUsBlsL5j/8nvEDY3yPYHHMsGqQazaPOVzgecETbDQNd1zOPIL7/8GafTkf/PP/3HvHz5gnk+ME1Hgnf8lX/p91uvdHUiY4za7WxZuLt7YFkWXr56xfF0oussXTCEYPBO8FZzwVJUN9qJaRK2323tv3flqlyy5kvWBLWzuAIueGyBh9OReV50CxDdZFLNYGi1spY+bLjZfcxm27PZbOgHS5EF8TPWW7yreFvoJNNJ5hRPLMvE4fElX375FSkv7HaWEAw1F0yN5DlxmE+kmHi4e8V4GpnGB2KaMVJR2Witwe2DxdaIKaHJKypjWVm1tZEbtIaypEyl4rugNXcpUmNETCGYQjDg3+msC9Z5QujZbPbc3j6n7wY+/vjzptvcY4zn1es7TiclRO2f3NL3gb6zWgM89PS3O4zAZv+RlgmNkTRHnPV0vtcbvEXxcs2CtW1XaY4lRs5pnrOxaca1NXVEi7hbja9WHLCxE8WdKO41w9cTyT5iX42YKeOGDX63wfgO2/L6582hcQZyKZC0dMgqBVR5At27IVflUkmtUUZtKM9K3mmX5exhr8VB0ATbr0RhSltT17WEyv7XUqO4XFIZU2PgdlPQ96dFPysm6jyTQ6BrIXNaRhUtQKjGUU1RtAKDwWPxBDuw8Ts2fkvve63fbS0217GiBX/ZTK+IaT2jg3auam0prW+wbuMlrE7DCi3WCiUJtaxIQyalSklq8HJqcrbew9A3ZaiBXDM5R2rVwIIm4tN5JacNfU9wVpX1EHWQ/Cpsoam3JacWNa8R48XonvPTLW9qziRVQ4qZmBJUIS6Z6mAYlMW+MqTfpt2tXFK1mUpspMaYF5a8UCQjtolT1HPi6Xwa7Qy0fKoRHa0RQp8xYsjVYPFYP7B99jt0uyfI8BFj6TGlYJaIo7J3gT44TYX4wOPjHT/7k3/O3d1r/uHf+7t8+eUX9EPHsOn46PlzfvTDH7Lbbbm5vaHrOpYmHXs8Hvn5L77gdDo1fspCFyyhiZA4qypguarSn20CSd/kzfw2470rV62bba1VPSMqJeXWOKEo0zfGtrjOV2rN0GGMJefK8TBCrSxPPFYRUoXGjMEJWColJfKyIFVhiL7rubl5QsqRLlScrZQEx8cTMWYej0ure3xgnmaWWSUJ11IQg5YTWBH6roNdg5ZKY6Imhfy0r7BGd32nVyYELXYvArkkrHF46yhVGjHp3QwRYRg23N4+4fbJMz795HO6buDZs48JvmNZCilWdtuBj54/w3nLJ58+ox+C6kk7Q2hQswA1680UQiYnzYV5678ByV/YJLUR5FZuU0v3nAkZsqaxKqzSkSUnlpQouTCdtO9uWioRA93AzSefEfZPSMZw+8kD2Qai60i18jgdSaWwTLPWv+ai3ZbO9bAFJLG2RbTWvP35b/BbWptGc4Hk1rW0mqoz1UTOW2wrgVEouZZV+KNNVG3wYU4X0l4pl00tCTW3ra20BuNVI/CUK3ElDaaILRmbK64KHiFIi8haWZ40GL6InPNgVg/2sr7WVPRfOtNLS3dwri/NBkpR6De3TmKg6m3iGpO2QLb6u7VZlrWGzlst3ZJyhoJTVMUqQtufzuL5q4Z8QzIa0ahW21jAF/hyLVMSLhB2qZW+C9icldGcBMmFmoqmwuQy7yuApD1wCylFqJlaHCKql27dqrn+Nib1Ah0XKkuMHI9HTuOJqclElpLO8Ph5tGidq/tdK3mFiCVJJfst2fSI6+n7G4zvqbtnpG5LLUIaRzyVvup5hWAZnJ7nssycTkdevXrJ/f1rclpwVnj+/Ck/+OxTPvn4Y/7G3/jrbLdbQhdw1jHP2td7nCa6vud4PHI8HSi1NCdM93dd3pcUjD5/M+3y2433zmrmTBTRfKhE7YZjSsZWhXWmSWFgEQV8OuMxYjFiscYTl8zLF3dMU8/NbWi97oXOaTP0INpJKE8TSwHTO/oQuNnv+ewHP2oQ8EjNqhP66vEV4zTx8tUrYkzMoxbJp3mm0rrzVCWfOKuGNzjLzXZgWSIPjw+UrItQdYalERwE3zfWcFCIKxtDLBlrPEPosQaceXdazSLC7e0TfvDZ53z22Q/5q3/lr9GFju32FmMsX3/1iru7B7rOcXOzowuez370KcOmx1jdvNTzb4ahtXNcf15ZnOrkN8PQjAyszf2uPOQmIaewTYvkmqSfylNW8rIwTTPzkvnq1T3Hw5EubOm6HbJ7wmd/+JRaKz/4gz8gxonX9w+8eP2a+4cHvvjn/4LjaeR4HFmWqO5aVbKRtGikG1YYruL9Kjrx9kYFUi4sMbXIRHPaMa8a2E0l56rc6kyeaWUQa03v+lwbn0A38kSOc6uFbgY4K2teClC0lVposnmLt7jJIs4yTYtGelZRnJAGQhU2GAbj6U2g8x3OBYz1ZDFkURa/bUkBs54kb6ai/zKNluLHGCGXzDJnhCYyUgVZmgGtFmc7rFeHgwppWYVLMjkX+i7wZL9DgCXOWqIETNNEVzuGzaAiHdZSnGM1vMrmV+NZcqGYohGedVhz6TXepKfw1tAHjcwxhlQKWINdYsvJR2hpGKhYaTXasva/zcRppFihZI/gsK71QH+L10cdCk2jjNPIi1cvGVtOdZpGcnNqacen6ZF17a77R6W0FNNJAuBg6MB29Pun3Hz6u4j1LMUzVaEuER7uGayh7xxWLLvNlpveY8rIaTzw+u4lP/nJv+Dx8Z64THTB8nu/+2P+1f/cv8rnn3/G3/pb/xrb7ZZ5nskpMU0T4+nEEiOPh0cOxyMpRbz3jOPI8XRSqLw5OEaEIoq8rmz//2RAzVWhNCm1EXjQPFauF2JMuSaQVFaWpZN2stY2jzWSolWizyy4kDS1WC+khXWIrP19PX0/EFNiPkaFh1IhzQvzNDNPEylG4qI3l8oXriy2iyNwIU0I1io8UmXVGK5nGN1cPZ/p5yIgFkS3sQvV4t0MEZqWsSd4T9d1hNDhvNYjKtymkV8nntAFQvCam22HVrJGb7Vdm1qrsqBNWz5nGOwaEntTGESkyVNWQWrLObVOUpeMkW5ApaKF+Evk8fHAw/0j+73F+a1uOsZAqSw5Ms0Tp/HI4+GBw/GR0+mges7zTFwSVhzWeN0I69qgQ1nU0iKU+pYNL3A2sJfSsnWiLk/XX3u9Xq9JV9/8vDNp78x+vvqsq/zvqo5bhVaPp4abrEw42yItmysuV2yqSCoqWMIqnKEKa6si1RqRr+08r/UGfo32wF/YUERFS4RkZRavwjZF8Q7h0je71ss1UAUqo2IztSmGrdKO7V6+lHa9cREv1/Ea2726xd+A6RtMsYpinCHk5uCzEnysQfKFyGPavbTuL6VFymcspYWkWsNd374yXkNActOWXpaFJS7nmukV0fr2qN84/zXfa6lisWHAhAHpNtTQU0WFlVKpmiaaJ234ENT5W5tA2GKwuSFXZ6LiWR3gQuK8SivQ+qqvhMGVhFbO7+dSq8ultO/M1fye4z0b3srSEuNSynlillQZ58QUC7GowpBFsGJxxrHrt3Q2sB08m84RgiXnE8u8cPeiMB09+5vAZud10xBwpumkNilE5wOD7fmou2WZF355PDJOkfHhyOnhNdM0crh7peLeSSUM1yS6pipX4f9VW7mxEBs7uFbNC2Uq3hm81z60VtYau7Wm11JMD8YxE5hrJde3XUn65rANPnRO6fHWOtVVrRDTQsoR5zxD35/JUNR6zi2O48T9/UNjLkZqgdvbJ+z3N9dXl8qCdhda8zpoKQFgxJ7haCPa21JW49yMiDomhrgUXr164PHhkX/0j/4JX/7yl/z+7/8V/vAPB910SmaeZ/7+//s/4uc//ykvX7/myxdfs6TE42lqx631jX3o2fRbdTK0eTPWeZxXKFWMfQdm9yIUf93ZZGVB1hWrk8tGvgpjrGgQ8MYGv/5O9cpVx1f90nYOJit+erWZr+RoI+CN4ERwSdeyrbqphLESHiOdXeD+BEnotrcMXc8w9HSbAR+C9p2m4tYdqDlXa/5eD/gdTOR3HLVq729nipaoNWZ2yYrCpBQb21eNp3eVtfWltNyvl4pVdUdKVmnP0trdqa62Ng5Z0Z2zISxaxWAMZ02ANUpajaBKVTa9+aKNAaBRIowBa7TzUVIGSIzaQEBKwVuDM+CcRzu6eWqv+2tOGiyUUpnGGess3r/NiFfv37gsjPPM4fGR+/s7Nbwt6FgdwF9VO7xGwKUhKVWE7AaqsYQnHzHcPKW6jkfbkUvl8XTPMi/44wN2POA2A6Z/ig89u95ws/ME2RKk4/B4y/52R64Lp/GelBJ3d3f89Kc/JS6Rjz76iO12S991WGuZponT6cTj4yM/+elPeHh44Bc//zkvvv5aCaldh/faCENyanvZarwvohrfZbx3qDnngimldRbSRZ+zbvK5rBJvoBdY6dzBejrvCd7hvdOop2oOcJ5HhEjfN8p/ywevHT5UeEE3XWcd4rszPl9yIS6JpfV3TYvKOeazjrHT/rrIJWpdPdez29Oi36uewGeBBhGt5eMCIVYAsRQsuapqVX3HO9Z6PGszhLXfrt4cF9h/1aWVNQ9b9QaOMTGOk3q3c6SUyjBsyLmc50OLeDIqBaeGtzZIul0Rrnfo6yV7fXsqq7EyTwun08jd6ztevHjJJ5/8QI2OQI4L83zi6xdf8fOf/4yXd6/56uULJTQ1A2eMEoG8881BUubqyr42zlFFsO/C6nLZeFbVIGnIDS3G/+YcvJEf/3Wfxyr8vzJcL4pEqxjBudVivcoft1BVij6Uq4BKoWawsWDnRJ0i1UVMrSqicjYWlpUEpoH7Gt4241vPaOlbiQbexlDqhcpuWtPK3OolcskpXzpgCWRpus40fWPT1lFrZ7ky0leBlstGcPWjXK5fXeEBWsBrLp+5RrrrNZWqJXzIpWGKbRbKtt7kqnR1MfD1KhpuGWJ19uDszOZcmhP99i7Ketqlsb1VjW8hxlXz/Oq7vhX0X+ZMWo63iqFY7dkq3Qa72ZPEEsWQamKKkWWZKPOIn45kD1L3GDLOaV12Z1sf3qGj7zu6rtMyIaksy8Lh8cDD8MDr16+JcWG73RG8Pxvew+HA69eveXh44Hg4ME0Txlr6VnK47oOc77XLGX3Xrfu9R7wll7M+almNbS0t96eXxlLZd46PbzYMLvDxZkdnw7n+zkjGlIjFqohCCGyHDdvtHte1khUr+KHDDxvcMGD6gVIMNXskFa0xtZ61QboxhtvdllIT86Q5XmklBUqUqC3aus7BNSH7rBGWDwHvfJPdcw3KagSblu+oIuRzBxQlFfwKfZe3ODSK1NpdfQiGmNVxsdbQD4G+69juNtrlSbTGeplViebh4cCXX35NKQXvtPvP4+ORWlaNpPZ/W5reaV0TO2cvuOuCtnxsEoR1ZS83iE1vWT3OnCqPj0ceHg4cTxPjtGCd5cnTPcuy8OKrOx4P93z19Vf8/Iufaxu3ouUwawTYD1uC7/js08/5vd/5fY0oy3q0qg09zzPjOP5Kz/x7z/o30h3Xr/06EvU34eayqqKtRrw5S7rp6bqyLeWhOfd1Y1dueGpdmcY5U2NkcVa7ZFnDhoC3lTJHqghLhdc/+4IwDIBhejjS4SnbJ8gg+AG8vah81dV5kDeN0F+WIaJQZHCG/bZn6Ae6vmO3HRSRKZGUrlIhTRKytKj1THYrlVQgzxrxmrZGawUKxKQoHkCM+Zxr1+tm3oCmSy3EWsiFc0mQwsrqHGueXxnwOWpkfc7dI3jrVa1JdMMPPmCdP0PlKnGry1zLloTOBTbd8HYdIoF5WTieTozjxDJHUk5n1Equds61HNCs6aV1WIO1HqzDb24R3yFhw5xVVGlcRlKKLMfXpGnEzQeIRzyB223gdtux7SyDF242G/Ybj+FH/Bf/C3+Lu/vX/L2/H/jqq18CmZ/97E94+fIrXr78mhACIWjEuyzxvAd89fXXzMvC/f0dMWnHohC8EtPODjQNvTBnG/Zdx3uu420CAQ16qehmUtBcTKlq5KxUtp3lB7cbNj7wab+ht57DEhlT0nZvNWEqhEbL77uBzbBDfAGTwQi+7+g2Pabrka4jZ0NeDMZmxDrEeCqGnNpmtB2oNXOisCxN4D6rEVkrts5ND1JSBnMTxwC0gL3r2k2kFMnVQ5Y1Dy9Q9ApSsSTqOxNwWMcaba7GF5GztrGxQtc5+qFjux1a1GRUIScmpmnh8fHIy5evqbVys78lhMDpOJLTNyQMnTo97fSanGY6O1neO3poXno9w2vnY2wbTymV03Hk8HhiGmfmWQXO9zc7jsdH5jhyPD3y8vULvvzqK8Q57UnapOfEOPp+YOg3fPKDz/jDv/43MCKkqIo3x9OBeVFvt4p5J+VE14b3DZm5hpqskfCvM/qrgX1DKrIldWsrWaOC2JWUpbN4blUHpFIxpTDlQqqZ5CzeVLy1BGtw4rV2tFaWlEm54LqA9YF4mnmyfUL9dAbjsVVwmDdzdFeJ3b8ske46BE03BWvY9D37/ZYQAv2gTQSW2VJrvuSsRZ0VAwqPyOrElMYD0bRB5zusGM7qe6myLBmkktKqDd1QiXM9dgO0W+MOrcUV1QxHS4ukCfaAqLEtF3GU1fA6q1rNtkmvdr51smoObgayEaRUvGiarLeeTeh+LZLyXUeM8cKliEmbk7STNc0JNFVaP4SG/K3oH1X3fxsQ1+F3TzChJxnLUmBZEuPpkRwX0umBPJ+oywhpwrNjP3huNoFNsPReHatnt9smG1m5v3/N4+NLjGRevHzBF1/8EoA//ud/pMfXJFBTq0JJWQU3aq30g6bbQIVnrFt5LOpgrn3Mr9NH32W8X8MrF+WqnFu/yUa1tybjTWHwgq+GZ4Pnh0+2DNbx1IInk9ckviSSTZQM8zIj1hBTolA1N7x2/xBdAOI8LnRKyy+CcQUXtM2dcV4333P5R2m5E0OKiVhTo/yrodCbQuHXGJO2F7u6IIhRHkvWZH1NGl2ttcv1PBXtOK/gwncz5UI/bLi5fcIwbDiXMbTr4FeZyPYMbdOvlXmaeHw8MJ5OxKZI8yrmVq+7uiLSiuFFOwqt3qBdCQsJZw0ff6x9fOV2z9D3KqO40jCu4PhStanA6TQxjpNudqGjC4G+9+TUMfQ9w6CO1mazB2NV/cYYaBG5sR7EEpfE4+MBZzQfB3rtliUyT1qC8EZ97Vsa5izTuea6rnO99Xy+yvt4M9L9pqLVdQ0v+kntXC4Rhm7sK4O8gXrGKDSvjDiWWhhTItaCW4RUMkPxLQ8s+JipklkeTthqePzyBS/3v2C+uWFwHV0/0A2bswyotLy0NMNycQD+4ocRo2umtQsN3mGtQOuAY62qo6XUjEZRA1Gpuk8YdQBrWWuptXeREJuu9YrsaD4Z2t2wKlU1hbpaCiVLI4+aq2up3JZVOtKtnWBFyEW5EaUaBJVOtUZwDbSRVpWzzJF5ao00cm6BTGnXobS0TSLF+a15RuvePc8zp3FUTeRSuCCxK6isyoFy/q851219GGN1T7ah5bszc04sRUui0jRR0gI5YkomWGEjnj447fDU8txWwErFCgRn2Q49lB0//uEP8Law2/b0QbWbHx4PpJRZFu0bHFNuKEU+Nye5EGPfLBeqdSXWXk/Gd5/H92p4VfTaUWIi5aTC2lUNWTAJ5ys7BOMdv/98w3/2x8/pjdCdRogL+XDiOJ6ItlJ8JdbMw+HAGBf2z294UhQyDt5hg27ERQQTOsJ2jyyQaiEXw7C7JRdwr76iGEctmWmOWKlsei2Mn8aRsY5njL/Uwtyai8clsSy5bbChCX07hY9TYpmTUtGTQlRBVNRfUzntZgsOb6rmI97hnD958ozPP/8x+/3+zAp3RusSXd+jzE3bIEv1nlMu3N8/8vXXX/P4+Nj6a6rCy7xE7h4eeTyclKBkg4qc+02TeVM4mZIhL/Rd4K//td/j2dNbnHE8e3KL1peWBv+aBpdlcoFxmnj16o7Xr+8xxrLb7djvt+z3W6wRnjy9pdbC8+cf89Hz17SW2hpme+26Y60HLMfjzBe/+IqulZNZI4ynidN44v7+gZcvX771iHdtOtF13VXUCmd2ZMvP1lqpxpzLhABW3PDMsixrHejF+Ipom0qqMpBBp7EFxCpSglDEKJElCakq9JzmhBVhSQudMez7XnVvU8EaT0lw+uUL5pcPmFMkvnrk5tkzmDOb/Y7nn3/G5vYWG5SwqC5F1baY/OVBm6017Ldbntxs2G83DEPXctTaujJ0FuvheFxIy4ypTo1pMcScYa04qK2BQtbWjsuyqCPdcq+1KiRpTFMaEOUSrPW1KSvXIaWCyFqwjqZGFo2iS9WmC9JyxLkIMVtKaZ29jKdYT3aJUjk3XHl9f8dpPCn6lqJ2FtttVC++QbwpzpxqfGvXpdbKMs88Pj7y+vVrTuOsPbdZ+0UDDRkx7T+o50qGNUb0LtDv9hSx3KfCssw8nkYO04w2ZJ+RkrDziMmRbWd51g082fTsgmMTDJ0VggVvwJuK7Rzu6S23ux73r/ynOR5/j5/+7Cf89Cc/4dXrO/7pP/tjjqeRr06vtdwwaslfbQ1DVh6Ma32S1yClnm9azk7Etxjtv+X4C+jHe8kSacP4tW1Xu3jNe1FWsz6MVDBV22HV3DwPc/msM8Hn8h2XInWLiAVxSq2XVoflPN6rCLp1jpoiKSopaC0xoOUsNNr9Bk39DfKWPUeQqeVlcl7FDuqZnCGNbdvuvdZu7Lsz437TYa1T2TznmkdeG4v2iqTT4JfVo00pE5eFaZpY5rkRKRZO44lx1Bvv4fGo0K7rQCwmFMSqmL5zFqkFUxM5ZaYpaivFJWl+suSLtWgTUprnnIu2xMs5Ya1VsXLnsUbneSUSuSYJmOoKVRuqdSCiuepWH51ixADzNGGMsCwqlFJyfpMF8xbHNVv5XFqktUU0Zs23opDzT2cjzOW5WdWrQItzFL1+3/ohLfe09oStNauONlVTG7WyFGVAh1yYc6GaTMjKvrJRXZk4zsyPRyYXON3dUbMaX+MsofQKc0tr7N6O5zyT5yj/cn7X1Jt3HRifp6xy7uNda6FwaRO4OsBaNnJ5p26qpUVtV4TKBhefUyRiLxekjdV4viFssX5XI2qJUeJXIjdhMU3JiKydpioxNgc4XT3y+lprgRmTKlWhzuv1fldpSIdUzFtEdGqF1MqIYornDj/rnMOFy3VZvvKN663myxmjEkopklMlLxNlnhAKhoipWcWQrLRS0VarvMwkb1nlM1c7oYiqlmL1XaDmnm3fsxl6prGj857FLU0DXlGBtV/zKltLu358C0qub/zu+473G/Giyk+lHXcpiTnO7cYo52hLcmWeE3f3M4MTtjViSqKYjHUFvNbjGu/otxt8HzChlVSIaPcPAtZusW6PMVuQXiEmk/DOst3d4qzj4clzdk+eMx5ec/fqS1UEiqqtXHNW5aNGCquo0fZica6jG9YkuweEeV5I46J1yamc73wBqhRsWW9Kg7OVzunFfYcBrxqhEHCbLS502nIRMEGgMR+VAKW50ZISh8ORaZ559eo1L1+8YDwdebx/zThNfPXlFxwOJ+4ORx6OExhHdb3mq7oJMe6sDxusYeM9cVM4HRbGPvLw+pGXncc5Q99p68ba0gIxZ5Ylc5oOjNOBJY7c7Ld4f8vN/kZ7m2KJcybOmaHb8eTmOQlIVV23pSokqB11HH3w5HniNI7cvXihUaSsbPrEdtN/L8/110/7pUQIVpJOajl/lHAirdxMVEaQlhNbDWyj2lwi5nM9rjKSFblY21dW0rmdmqYvlAgnLJNhGXXTWrJGbLFon+nTErnPhiFUPjKRzlWeScDVSro/cUwvSPcH8ulEt+l5ePEl++dPuf30Y55//hkuBIbdLdY6VpEZ6spja8b3PClvGt93OUouHE4j1mjN7pIyK/NeBHywLY2hpD9FeVYSm+bPfdBykpISCU0rxaplSM4KIVhtK2q0vKgaveedUblGPWWVKO1838pTtDIjLpGHh0dSTrx+NbLEBE1QslQ5+6UpadXHEhPTrNr1x+OpOacq9DFsB57e7rDO0vceYyClI3Eecc5SxL01qLmUzOFwaMSqkaVUilHDWq5MLVSKQLHa8CAnvT/P0LMIXiEBltdfcxon4jxD1BKooVMBou3G443HzSem04F7Sfz0T+HJbsePn+0JN1uCE/pgiUkde3LC5oIrlV4ce9+Tug3Pdnu8WB7ujixTJBeYG26vKWiteTZNzc6ssrfq2baAzbWGId8mT/42473neNfwfQ3XS8mt0F0vmuYwKjEVxjlBFpzJOJT5jLTG3q1TjXUO6yxilQih36OMYTEeYwLafqR13hHbFIsCtSR81+H7nmX06nnGxGJNU/+p5+L2NX+2EqesdUjTSjXGKRQ9zkqrX+Xo2joUpDWlLk2isDYnRMtZ3rnIvLEY65DWiUY9vsbWK+u8GYVcCixLZJpmpmliGkfmaSIuM8s8MZ6OnE5HTqeRcZpV59eBGIurDrFeYccqJOdwCCGqUElOhWWOjCdVlQmuUyjKVKpod5iUFn20TkgheIZhoAvhHMGVrKQ3ax1dN2BrxVSFrktKVBG8d8r6bHm2GCOHxwdyytjgtKSo1Xi+y3G+tr9GT12u/gMu0exVVKuvX0fBb66tVXTh3H+ief4++NbOLJFzJKemn0HbBGulNuJVkcwuKcSakyJMeYkkGalJJQB9H+h2PaVGXOfYP72BWqjDTiFzkUtt9jpqPUdAysVai6nebcxbaiVGbVbil4SxCURpv8YIzhnESoMUpWmtNwnIJm1oRAla2QrV6hFno0GCNarFbM2quNauUUOxLr2LW1lkK2cMLtCFgBSDlZFUK/OcGaeFWo06L2grvVohJkXLliUyzVq2cziO5JzP9cYiltANWNv0maUSU2tIYyBXc+3+fK9RayUuCynFxquRc29xXZrnXErjG3AlE0ubkQvz2dRKnkfS6UhNC6SIwSkHB0vvPMEaylIpKRLnmePhkSCVktNZY8EaIYvQkvJax964Nd7YcwejxcdzTfdaurXecLo8r7kYV/fgimRcdej6PjP63qFm1acXpBp8UW3Y3IoAS6nMpZBi4Wd3J+b4S7yBrUs4KYwxs6RCILHpMzZDWkYgsowT0xQRGxhkA2ag0FFxlJhJp5OW/SwqNGClea3e03c9MahMXi5aSF9Lg2pKviyaFlVVqfSbgd3NE2rVmsCcs5KBZq5Kaji7+GdfsAmHXHJ4v57Z+lZGrRwPj7x6+YJ6s2Pfe6oxrQEFvLy/4/7+wDjNPBxUPu3+/oF5Xri/e8XhQclV968fmaaJ+TQTpwVbKr0xqmAVPMY6wtBjXDiTUoYu8PRmx2478OzplqdPtlhTOB7vyTnQ9WDFqUiJMZScmeeJuCxnGHi72XBzc8Nm6LUbSCks88Q8T0DBOpjGmePpREyJx2kEwD7/CL/b4fqezcZzOiUOh3umaVZRCB/Ybju2++3lBnyb095KgVahhWs7swJYhiYbeG4/Jo3Femk/V2pRIpXAmlWqpZJii1yXqS2zBpiJYJ22ZHxys2OzGQj+U7y3PD4+8POf/YR5WZjHSEpZYfoKYyq8Po0Eq597WDz7zUA0FS+ZPAuuLtgvv+Bwumcaj8ynA/1ux0ef/w6hHxj2t4Ru0M3JXpFTrsLcM0T+1mf8G/NfYUmJOVr6ds9Z41q6pWU52hqrDQx1LXKzDXZ0RjO9xojms31l6DqotV2nTKqJmUm5Jc5ptIQWdFljzv1cdxutMQ3ea5mKhZg2+DlyOM7MSyWvwmJVo9xSKvOSSKkwzwvjOJ0bsahzqeUuMSde3b3SCoWgdfpDX9gMHd5ro5O3FfGmlHl9d8c0zRpscGmmdTFgl/+07WtlxRv8WfrX4Bpp1aQFEycGqWyCEIJh22sP3M5UrGTC0OGD4WYY+OTZU253W3abgb7zql5lpKUooUjFi6iiV61IykjOqphYuOgytFW4pg7XwNCsKa2VNHp+/aKHcH7fdxzvHWoWUeKDVEO2TRkKUfhPKnMujKnyOJ342YsTFuh9wRkY+kAXPDuTcaXgCthlgmpY5kl1aLstUXpENlTxVDw5ZqijwsWxsailgBWCd4TQZBRtQOwlN1sbqQIuVP9Vii90A7dPP6KUzDyOxBjPsm1rd5L6DaMLK3lJ22Xp87vdgmqtnI5HXr96iTeQnt3i0FrXQuX1/QM//+JLXt898LMvviTGyHQaySk3kkPhdDxwf//IPM/M40xaEqZU+qZP23mPdZ5u6LEunOsPt5uO57dbdruBp7dbntxuSPHE8XSg0rPLAUzFoSpfJSeWeX7D8G6GDU9ubhm6VbVKyR3LPFFrUf3rvHA8PDAvM3cP91SE/aZnOwSs6RgGT1xGDscHjocj23JD1w9sNoHtZvtOmiSs1/kMN199xTkWWhmUTbry2qM+Q9XlTS981clNcdGceEpQ1TgYuyqrOcRZntzsePLklqfPnvD02RO++upLHo93HA9HltgMB6hDmTN1HLGN4T94TzQVCYYgGWZw2ZC/XnB3lunwyPh4z/bmFqph2O05R3fe4W3g+ozPxrdeJPze5ahVS6TmqNUTFRBrWqlIJaWZ0ppIrDZJb9+2AaMpIEO59JYWddSNGI6HA6fTUXW0c8G2mmFlzyuaYI0wdA7vHJshMPThDDU7K+Q04JzD+xPGpHNP31JU1zu3lNsqYHM8javHpkxn7/HBscSZw8Mjxqjhdc7Q9zuGocc5Q/BvL5eVc+L+/p5pns/VD9eyt1WujW6hSemcW2ByxRp2ovD0anj7zhG8PRteI6LzL7DtA1s3cLvZ8PzZU262Gzabni6o4V31SbRIs6pRF6MCOTlDXqPg1fGVb+3Nglzdi+YNgytnoyvndMr3Ge+5LWBlbXllrLYwc9aSa8WJEgHQvYZUK1H1qYkGXJNkrFXoMFRjFN50trXmUomvLnSErsf5QI6ZhYmcJ9V/Rs49FAsXspSxDkQl2lKumJIpTTDD2FWzWN9nWy9J5ztc15NjpNSxScRJYxQq67C2XNfqcFzhh6yCCNfaoO9kzmvldDzw+tVLNsExT88pwRNCRwWO88Trx0e+evWKn/7iF9rOcNEuPp1zeGvJGPr9La7PFBOISzwfu7Ue7zvEOvywU1GSNreboePmdstuO7Dfb9jvN9TioQ6EoHl6VctyKudYtONLnBem08g8L2cVsTO8WmuT9cxYq63cNsPAzf6miZ9rPXjnfIuQa2vRWHSTC6EV0QfVaT4zjt/N3K/Pa+nQ2aiKecPIfgvGunq+rv89L6ZzBHO1flre2Bmj0VtJlBTpguPJ7Z6cZz7//FMOhyNiLMfDibIIObZ5BSqFMSeyVMxiMZOhyw4c+GLB6YY1TxPj4yO1wuuvvuJ0UKJdXCL9ZsN2v8NYzYnJ2nD24nK8c6gZVtTxkh+teIK352MBheG1ld9KVFPFLtNyeqs87Hq4Jek+VUttZ7OuzZWwU/HB03eBrgvc3u7PPXv7rjurw81LZImKpvmgxrjSnIQzIUzb7S0xsuREzKldZDTdll0rLSqqymZFP8tpRCnwhg702xgqyKKN5EEJfK5dzdLqukut5JbxfeNRrwm1pRk6Sz9smqZAS3FIZYwz1qDODIJ1gX4tI9xsGIYNPgTtAa7dMJpBvKog4PJ9peSmuZAbeXb9uwv+xFVU+8a9iXC59a6i5O+xhN+zgEYl5Yg3ykSr1dLnQK4VclYpSRtJAmOFh9ggoqoT8dQbbr3Bi6U4D97S9x19cOy2e/a7G4b9DbubJ4hVSbB8HDnc3XN8eCCEwH67xTqH3+wQ5zG14HxAjGNJhWVJpJowtbAZerpNvx5889a0PjRsb+h2T5inE+n+nriqQAXf6v7yWZlrVT25rP4LuSfndws1l1J48fVX/Mkf/TNMifzgk+f0w4A4R0V4cX/Pn/7yC/7kT3/K3/2P/z4pJTprcMbw7MlzntzcsNnuefqDzzDG8lFzKMhFWwS2qUGklVMZrFOnaugdT24Httuezz7/mOfPbum8owu23YSNVdxYsbUU5uPI6fHA3cvXKr4+zQoJlkZSK6VFvDPee3a7HV3ouNndME0TvffklLnpN3TGQ66MR6037PsBYx27/Z6uHwhd11SM3l30da06Be0Wb8zs60j3OuJdo+S1lOGbDOmV9LHmU2stUJoKkhiGYAneUONMnI7sNx2/8+Mf8Ozplm4wHI4H/sk//iNevnjFw+sT969OCs22qHRJ2uXooS68jiObzjNJoveeaisQ4P6BMp6w3WseHw64vufh/o6bZ894+tHHfPr55/gQ2Oz2iLPARZSCc/fhdzd0iWpZ3Omocqfbba9lRGZtXGJYokofGiOt44wh+L6lB+ScEF9Vw3JjRJDLRU7kzHRWE7PZDjx7dstut+XzH3xKCJ7tZsB7hUSNEU6niQL408Rm2zPNCUxsOfhEKZmUE9MyM84z0zxzWkZNNrSgQDwUyRirxtt5y3anpZC+KS5RQPLbm++UMq/vH1WOEkUsvThtEdjMbaxVhUJY1fpgFRwpKSqnoOtBVD/g9ulz+s2OaTkwx5GlLhxO91gLT7YD4j39EHiyv+XJfsez588VZt5ucH2HCb6JXLe+w+c2jJlcEjEtqkmfFmJUZz7noqWdKyFQLEYc1rqmrqdQtxr1CzfpzTX23fft998WkLZHG4MxpekGNz4UqzOvi+sMY7TIMVdV40lZmy+7oipLYlYvVjehnCKUSppX1ZmRNJ8wJZIc1OyxXdfgOUPoNEru+gGpFZNnpGZci4wqNCF0wfkeYz2+6zDOqY5t1YtgGptRRYV0saunJ0rA4BsAm8i7RtwAyEn1TpdZyV/OubP2aD0/FEU4/4y28FtyIVSoTuXdXGj1vqmcGd80ucbaIjHnDd4bfOcJnXY88sGr8Hjw9J2ntpKhS4ZNcy8X0QjNgau3LmcS0bXn6ZwnBJWxrH0l+E6h8JgYuoHggxoomqBC12GdY7PZ0A8bus7hnX831+ANaHj1+a+ySlcEjstb3jSwb5A8zjmo9jDKk1hbDlrDVfld0/atmtKAjKHgnGG3GxBT2d9sWZaZuBTGo5aFLCk341JUDrFk5pQwVpiaLOK8RLyRRlxRwRrGEV8K4+ER6x1d33E63BD6Due9RmbOtvtUy/DeebyrgZ6eTylnHsZa5if2ctkbkHLF/llTS1xFUeUMWEmD1J2xDVrlnK6oLSdsncN7T9er+EvXdThvOcvPGnMpZ2nEzTPhvXKGZ2NOxKTqSnkFdS+4agvUDNbp3mPb4yLgX5t2wNub2tJ4L9dIzCqO8a3repXblxZhroCNiJ6HaZ2wWDXBxTRSVjN4Kw/C2iY7688omblqq7naALhyeM9pn7UD3qUD0UVb+hpVuuRxr3XQv7lHfN9g6T2zmsE6IQRHaIsjxkwRZRCbWvG20PlCTpXOal2n5lwKNSXiXDiYypdfV7ZD4GY3EHrHtEQOhwdOceJxekTEYmunueRlIswzEoVxucP6oAt12LPb7ehvnrHf7/GSWKYT8fhAjnOLzhwpZU6nE2IMTz76AcN2jwkDpttglnTOcQxDh3SWaZoZJ40M1yYJltrq0NSgKCztKM0Dfpej5ESJC9N44v7uNUuMhO0O6wO7m1t+8MMfQxhYXKctEaeRkhPzHDnESEkJGwseYbO7xblAXDI55garp5a7a3WDvcP3Vp/3A37T02029MPA0HVsWpPpvJJbmpPVhQ4rgjOWLnTaizkEgvN6LZyn80HlQftIedKz3VV2mx23uxumceLrX36pOeKiUcicImOcGUJlu70FEZ599JztbocWmsS3P+EtgvXen3O966ZdRTfKtX3lql2+SqeudcsppbOajna8KU2832CbOEctBfEWKRUnFSdNn5iKrwWTFkiG8eGOF1/9AuMtT55u2d/0GPsHHE+f8fOffcXPfvIlp9PIV1+90vsxaZ4x5cLcak1f5Ee8s6Q5sekCN9ueJ7sNrsJ2OhLTzNe/+Cn3r77m4cVX3H/9Jf2w4ePPPqfre/rtrjmrHhfC27UEv2JU2kbb9NSFwjQKh4cDzlmGIeCs1r163+k7KlfM4tYa0VgKiRoTIhCswYqh7zqqqCiHmsSGYtVCNQbrA67T8+67jmHQ8rZlnpinidMYefHijsNh5OHxwHEcmaM6OkvOTFnh5bvxyMPjQddMM66b7aAlN73Cyl3wDL3HWcPQaa9fJ6rCpyTR/OfM1m83s0htPX6Nlj4VLiSqWs54hjRHUKRiMYgUeq8Nb/qhxwVPytqRbkqZJUOsFuMMw0avTzcEOm9xvmtOqd5XPoQWMHU4f1ENBI16Sy7ntoUp62OJSdNOLVW1CtOsiJNz7uIkNXnOFaS5RizrlXH/rnjzX4CARiMGWNM6SFx7QNpKyzWavpXaskHqmdSihIO4CKdxxgikAgVpEztDWZjSiBFLZ7ZYHJITpiRqrqovnAM5ztiuJ/Rb+s0NUjPzxx+zTCfGzpLmEW8t3hlijKSSEbHsb27Y3jwh4YjVNcUnvSbOWWzVCHM2DfYQo2mZql1IKloqcIYV+X71YH/+aLnknMkxMk0T1nlSytoMPXRs93tuUuaj00ScF+bDPTkuvLp74DgfcFX1fouDznZY32vpT9UWdYUItSCSEKo2A/cO8Q7jPfbqoTCOh6psxWsG5AVuXXPlTjvkGKNMyBZlOKuRat8bXBGe3D7l42cfM08TkjiTr1KK2HnUVIYIvXcY63j27Dn7mz05zaT07pokWGvPxnO9WYVL5ApXnnkLF9b3rcSsdWNoH3peN9ZaNcIt+nSo8TVSUWC3db0pibhMjKcD3aZnf3sL4nlWb9nMg7LZHw+IA39vKW2zpuUac1Z1uVON+Jjp2rGp8cqUJIQYqVUj3mUaqVlV2zbbLf3QE4cNqzCLawjSO7a7tMlVY1rUkcg5tS46ha5zlxIo7ZDQroNcrlRjuGLKOV9qRbD20nK0UkgYhVrjosiYgFjtguW8al+7pqu8LFHJU6kwjjPjOLPEqKIUOasAT1FN51Qzc4pMccEFj7fN+PYB7ywuWKzTvK42IRG8M63zVGlqYqVp4L+9IaJRt0pico7Yz0IlrJIeLbWyIjCsDmkT2LEGae/JjTNSqrYQdd7hreC8x7h2HdZrYi95WIWD5Xxc6ygribWWc7OR0gita/R7fdtfkxyddW/U4F84AeeF9Y3n3368V8NbSuV4iqSY8dYQY2JaknYnajd65z2ysRi0BV3OlSU3r6rBzCklUoQ4F453X2FThzcbghm0VrUL1Gq4PzlSEnwVPNo0eTOobnM+3VFENxDfB3pZ2AVYilBNZiFha8EWbfl3s9moSIQINSV80MjLlsjhZscyCvNhZlkmUo7QCBuWhmmVJo945SQpDPotAPqtjlpVuWmeZk6HE4+vH6gZPvm0YLz2ZTXVYqqF6jBSGYYbSsjcPZyIaWEcD8j9VwybHc+ff8J2CHQOUoca3qybGaLkj74P+N7jOosJHeI7wDSWeNGNGc436Up0E2PxPtAPG54+eU5cFoIPmmrIhbhokwNtitFhOkfGMux3dLs9WMewGbBW6E2mpsJN3/PxzpIRZoRqDE/3A5vdBiM9RnbvZN7X3qxvQl56s79Zuasw1jURax3fZDmvkJq1FtfKWiQnpBZsKVgKphaoChmviGROC9N4BFsYUq8bl9cONrfPd3yeP2Y83bLf71imxMPLI9MpMp8W5uOMFE3zYCqPU2TOhSgwU1SghELwjm2tdLWwjCdOxqgUo7UqRNEP2NAR+p5+u3/Hzuabw3tP13n6PrDdar2r9x5rhNQio1q1isEaFc+XNXIqVSVukxrfZBW6qKY1A1gh6VrIGVKuKkW4ZHIEqRaplmVMJMlMx5nxOFOWok1dxDNlSxgij8cj9fEIZJzJVJO5GRyu9peUjbdsB4OzgrMFY2DXCU92AWcMwWsxVJ5HSmzGZU0/v4UhInhvGhSeSQWWrJFuzKrJkLXtzVmHWio4MVRTCaYSTMGxQDxQYyKeXrMcTixxJqYI2RFcR3UWHEgr8PJO5Wi1RMurZn5K6pCLkroUtlZVvlXDOqXUxEbWtrPtXlsTXcLZ4V/Zy+d7slaueoGcJ7L96juP9254D2NknjUntcIBdbVGoh03OidIjUwnFRLPWXNONVeyqGRZjoVFIsf7iCyGIQwMXpse2LwhJfjqq8w4VnoT6I1nMwTckz30C/l4T5aCDB2+bKgs7LywFCGaQpWEqYIpGmUNWzW8RoSSEt1g9CauieN+y2Qry/ElS2wtBauKUpjmLZ21dK/k2zSv/e7zvCkmlmlmPJ54eH2PYKhRnQo1ugapFmpT8xl6qBVjf0lMC2VqcFpZCLay7QM5qJZsrZmcIwqUJiqF0Pe4EHDeYjqPeE+tpnVs0TpV3Q8uijelatcq5wNDv+Hp02fEeSH4DqpQkvZOLrkQnGPoAt72FOPY7HaE3R6spd9scEYwZsHEhHcO7wdSqdzHQsaw3w30u4HgLX1w7yT6+pZkZFG25pojXJ/1b2GFOr/5/m+VEomc60PVq1KBEFsSJqcmilGhavrGiiIw43hEPOS8gFGdYrzh5tkW2xnyUvnBx58Qp8QXf/qCw92JV1/f8eIYlR1Mk94mIjExlswxJTadx1kYgsdbwZuqKlk5MY+euCwY66jWUcXQb3dsb5+8N8Mr0NCRns3Qsd1q+ZiqxVVS0uYf9UoTW1uCNqNrK6XB/oKQlOqBLXLur6udxjIpaSQbF33kVBEsUg3LHKk5M55mpuNMjpVNv8HZjiSBfpvAwDiNlKJQcZXMfrD0tqPvtLOSs0LftZIltETmSSd8tPWapmjNTo55YYqRUqTdb2/H8oqA94Z5la+s5Wx4VZlNjS6ocMqadnUaJhMManjrAulIWRbS6Y7leCTGSMwJKYEaKrU4CKZB1eCsxVs1vN47StGGKk4MydhWZWEodu2hW8hN2jLlxHUv62/OxpulQ3ImctJy19fjDaP7Haf1L4BcJVfRzgUnXxt4G1GPJfiWh4mFlCNRKsasTDlBGoPcmIo1leAMm85hQ8BvBmIWDjsQC3WpzBFkrjweZ3wsmPsHlpRI1RBjYZlHDncvScvM4fGBeTw1mFPzCxs/tBKQtgHmzDKNCk2fRqbTyDQuTFM8F9cjBVuvJYtW0ka52njeOc2ENa5MOTJNJ7qpJ8aZlBaoReFKKjlFzX2kxjCeFtISGymh4p0nLjM5LpS69gFVuLm21o6VFplWwdbCYsFRmaeZyYEj0JmqF8a4MyfCwBnqsc5hvSPlzGE8kWph92rL9qstDw8PvD48ME0z2USqOFzYsrtpZQIqZUPoAy5A3zmGwbPkwnQYSYVzUwqFwXg3uOdqeEWj7DXnqNBxvRjfS55Fb/Z6zZZc4WjOzythqLT7ZSURCaK62RWkZqoYUlOAK2fHVTciBLJp22Mu1KzXdv/sKTVDJzvGw8Km/xKy9i09Ho9NNrU5EbkSU2YxhtMUKVnFJayx9MbRW9dSAno/x1bOQS0tt/oudVIbZabptZdayDmRsmvQv2myf5cWb6rlL83gNv4BgpDPe9U1xLoSnFSfPZ2JO+t11rah2u+VWsnzQkmZZdI69ZwSFvBG2HiDxVJ3AzbfkFJi3AZtX7gs5JS1vtX7tnZ0HyxJS6E2IRAAUyomNeJTVUIhFnircy0IphH36rl7U6mtD3HV7k9QL4HFun6pxLggpVCzxwvUmAgCG2eVrJeh6zybPhCCYzf0DMEzdIHgVWp2NYopRZal4owjuJZOK/qIaTlH0Cmn1g2vtFrpazJbPW9AshKc13uSy89XvC09l3Nq4ruN92x4BRFPJbcyjhYB1rW+F3znlHaPwVnPEjPIiXlJxJJJOWKMJsK9g+ArwcPtLvDxky2+3zDcPicVh3Q9h9Hy4pd3vPzqgcOceHi8xzvhMM0NEv2C0A3EHDlNJ1KKnI4H4rLgfSD4jmG7Y7N9oso3TaoyLRPzPPL48MCLL79q0eQD0+mkkpBmVS3KiBiC7bBGz73WtvmfqX7v0vhWqqkUUxiXE69ef02VzPH4gAsOiIQgCIlpOmhT6zlRUuLx/p7xeDwvzBoT48M9U7dBpEek0xwKCjWnrDmuZY5UM5OCxcWOHCx3AzB7uN3g2WK9J/S+EY10EXvrtca26+g2G2Kt/Pzl1yzLwovTPT+/f8E8T7z4+ktSjFC0A1HBsXv6CTknqhWkc2z6PVun7N1nz244TQvLF18zzhHrVUQBWTeHtz/W3BaNsXkWTWlUPKrqwmpvYjnb/lVqdO29e64PbYhPSass6vp3bbNvalGmag1vFpiWQikLccnQ3rssM6YkkmjdZZwiaczsbp/w1/7gb9B3G0wJUAz/+B/8E/7ezT/g7vUd//yf/zHTOKmRyQVZdINPsSCptvpYR8zCM9fztOsJwbPb9FhjuD8cmecJU7f0fXgzh/YuhjTNXWvUkI0ZI7D0Gi11oW86Ao7qWv/bmjSqbRGusxrxUstZx1e8xXinZB5ryElLfs4lhLUQ48I0jpyORx7v75m8J00zJWXiNJHmGYBOIFjoB08ZLJ/tn8IPnihfpPVZts0nUHa/1qQ/Pjy0c5rONeopaY2qOr3aGGHlR1S7NgN/C9MKSLHQYNuSFc5dxVxKLcqsN82ZFjRV1N5/Oj7wuBzYdh1+v4cKt1bYDh1T9Zq+2HY8eXZDFzwf39wwhMDTzZ6bfsOmC0hVQufpdCDOgtQeb3tymlnizBInTuOBx+MDp+nEvMyq1hYTSyqk3FTCqqa5DO3a2m+gj6tD3BppVNYudfXsAH/X8d61mjl7EKrRa8zqPekVWqMfa4XOqzfad06dtiWj5UMtEoBL7jcVUiqYpDq+tYqKOwRP6AbCkDElIkW/NxUh5kJt0VnK2qM1Z20InguYQkv6r9r0qulqRIkQMSXirDeURrnagaauxBYjiKjyyjcCXz1XqVc/v7NJP4d2pRZiiiwxtrq2iDIUraoeieIRtUUnazmP3lSFuGgdXFwW1mbSrBBzvdLdLkI1kKSSkiWZqqpXthAXTykZU2w7tMvJn3OaZ3FyIZdCzIk5LozLzBIXUsnEUsjzRMnCaRwZx4laNHfcGHxgq+b8nQOXwVrEFtWFbnKdJZe3HvCukazIqkt88aKlladcOdXf+v7rG7peP9Z/rPMm6x3DWQ+ZVq4j3yKEACjBBC371ZYBKbduUdCFjmEY6N0OawLPP3rO84+eI2LYffU1xlimadQ8nIhuQqKlfSKiEXBalaIu56vozhV5zGhHr3c9VjBjLSdZ+66u8L9Ol1z9bXNy2rFe2KuXfOCZktFELN7I/TXN5toMQ0qRcRrJKVFmVWMrrbGB2m2r16kxclUmVFESb5tsaDO8q+O2zEKcHVEqKRpKNmeIXElKudUvZ9LaK/xtbzBVU2dnI3Sum12v8VrpdPnmNrWtvEudhJozUpV4ZWmCSiLnOloVR3KEJuSjtbW2qbM1Tey21lcSVW57V8qpOVAXNOLSpWq9ppeFIr/isf7yTIh8i1P43iUjnRWCU+q71IJULYvIaaau3hKZ3ls2vaNU2O4COVdeP554OIxIzozLQs7w8qEwzkKRe17fjdr0/cmC9QPutid0Az/6/d/nd//wRoup40Qpiel0T4ozsVQ1wLZg+g5qoXMbXM6tmNpD1zFlQ5wzeXzQ+rplZp5H0pLwWMT3zG4g20oqE0uaESMUiibss+Y5kYxIoopFTGqL893CzcY5bN+RKzyOJ0wI3D/eNe+95+Z2y+1xy+3NhtFbjqWygDYSDx3jPHI8HKgFXr94hakBYwbEdOo8NOdi7TdajaMYi/GWqSaqN7zkyBwELx+z2wQ6MfQ049Q2DtOIQ2KMsjuphO2AGQL97Z5wu8XEwE4yyzTx0z/6GXcv7pgWOC0w9D0fP70hOM8YR+KyMIswSWVOmVOuxCqYlJFayAnidHp3Ey/KOK6XH5si4QVmvi4lu3agz3J8b6js6B9Ya+m7DgFijhd1nppb/tfijNAHlTEMweJsRcjkNJKrsJRKqnA4zDy8nun8jpgqYHn67Bm77Q3ed3z8yQ94+eIlH3/6CQ8PD/yLf/HHvHr5kmWemaexwd6qNDfGiJlmtsvCFBMYw5KySjEaq2mg0BH64b0Y3nWUrJvyYpvecU4MvVdYuXWNuFYXW7V6VydQjYV2D6qSwRiMMzgvree3I2eh1EjKkEtkmo/c3Sf+5E9mvLX0VrvaDD4wdB3OGTaDxzpD2Disb4SpNRDJqh5XW9nLPCemaWZm5hCPTMvC3fHIOM3karSyo1SmmEi58HgameaF0HX0w+at5dRr1Tz2sqylT1pnrCSkJuJitVH9Su6Dqr2hTcU6j9BjnadUgQJTVOevDAHTeyQEqrWqTihaMx28Z9P3DH1P32nO+6NnezZ9gKLVLDlFjqcD03jicDpyHI9My9yQUg2o1CFR9ng535vSlL/eVKwyrWrAWotYrY821mpHt1y+15y+/yYJKyvTGYwKMGqEJa3YvxZUYBxCUE+wM45ShSUXhQrmhTgtLLUyLnrT2PuJOEeGoRBjR+grTzaFrjfsb5+wf/Y5qSSmqFHby68t+XSgLJFcI0LBiMPUihWHlKI3oHVgPak0QYll1rKEeWQej1CaDKVx2PZIRSM1QRl9pup79TO1w5LmedvFk3doeAXEWoxzmmOKkTktTPPMPM+ErqfvA12nzM9SCrO1ZJPPtW0yixoxMzONJ07HE9Y2OL3l2UUqrkUB1VSKKLsxRcEU4WQyLJVpumnSbeV8fABr0dy5+TS6ERrv8OJwfcCGAEYIQ0+tlXmZeXh4wHWvcMPX3Oz3PH+yR4zm7VNO1HmhjkZFV4rKIppSMFUbEmTymxbvrc37JeK9fL5cjO71uf8KX3qNDioXWGt9FjEq/EFrxdaQhtoSkdZa7WHqDK6piGnlTKHWRC3Kvk2lsswL07S0MhcNp/thYLffI2Lp+y37mxseHrTx+d3da6ZxhKq8iDXqrqLiNjFn1RkuRZ2n2jp8tSjXtHKy9xLxtmktTVlqres0BnJWydGzkMIKKVxBi2v3H7hA+pXSUhSaplilb0FlcE1FiVY5Ms2F+xJxxrDvB4JzBANGQmsraHDeMmw9vne41ku2lkJaKrUIaVEHUVIm10gqev/OaeEUZ47zpB3CjCeXyqmVJR2XhXGa6UUQH96e4UVRwNwYw7k5NbCWD+l+aE0TC2kQjbSfjLUI/lweVKqSZ2MzZOIsWHsW41njdWuMEiWda2lGx2YzsN30xKkSp5bmipFlWZpaVWzRta7DldX8LcnINwiMq2N8IUCKWbt/NaNsBPmeand/ruEVkR8D/zvg0zbv/1at9X8jIs+A/wPwe8CfAP+tWuvrP/cbqxraWlbR7PYyqlbinRaAr7APNJEWEfbbDdb1pCWzbCOGQu8zzhSmMrOcImEpHOcjzice88/pNveMUyGmjOs7+ps9YRgYbv9A77Us1CzkFEnTgZwSjw/3LPPUSl9USH2JukHHeabkiBHYbrYakUS9kbvOkqIB48H06jV5ZQrXCqlErGSsRKqxWFdwVdsE/orxL4vIP/y+8y0Iw2bP7dNPKDGS54l+2LKkwmmasVvoxOJdYOgHahZOdiKbrMIVITBNthFzMuNpog9HQmfwwSEZqmniDk6QVlBfBWLNjCWRHSxGCKixOxsRLmnMdfGvnXzmaWYaR2LV/rld7xk3PTUn0jyTllX6LTHNM4fDAe8cBos1jnFciKcDL1+8ZFxG7Z60vcU6z267I3SBmlQc5NWrVwB/KCL/6K2sca79iStS1Td+9yZ4teZ56/lvznnfZhxSSmTtE6d14SLkutZCqwa6iFH1IiOUEomxchqPPD4aeunYuA6s4bhMypvIuYnUGLzv8KFTNTa0I892r3DqH/71P+RwOOC84fMf/oCXL1/w9Ze/VOeokQV3Q08fHMPNHjsMuK7Db1WreOMDxjlunj7nuET+R//T/8lbW+O/cv5FMM5ivWttOjOmMYylGKS0zTNXfVTOEpAlK3EodBbbxBlqq1AoJZKiplVi1FzwEtVp0deFuFRmEqa3DPstwTv224GuPe+3gzYv6Jwaa6u411ktyQjWWWqRc9rs7mHkT/7kF5xOM7/8+o5lScypkgqIKxivzs60LK1EqgKGkoS7u5H/x9//6VuZ7wqtxljlM6soAgOcYfIVxXkzXdqU53w4l7IZ67QtpU8kBFajS0XmhZgyOzlRlshtvz0beO8dLnisdWcHbhWpcU3y8ZIKW4+lnp1ddUyVzGVavfx156EVidJa7UtjhFILUuTsbJyDh+8wfpOINwH/w1rrfyQie+Dvisi/C/z3gH+v1vq/EpG/A/wd4H/8Z3+UbrVaUtKiPaMCE1UEUDmw4F0rU1GZOttOfL/dst0GUoJlaV5omaAkTo93TMcHTM24esS6iftTpus7YtGwc/f0KftPbug2G24//SFhswN6oCeOI8dXX7PMI1/98gtOx0fiNLKMIyUuLId7So7kaaKkhc1mYLfZNO9U8wld54iLQbLHmEE1Pq3mMuO8aARmEiJJI2TfiFa/mmfyD4H/yvebb13wm+0Nt88+Jc8zaTwSQmCJhdO4sMkVY5wa3m6gJHV+kkkE5+lDx8k6LeWKiel04uiOUAPGdM1AlCbx5tC+vtqVJMXCaUkkKyzBk4xrXYfWaO7azNQzvFNrZR4nxtOJ2KK5vvNMm17TEstCXrQpeC7aSvDx8VFre8VgxTKfZo53j3z98mt+8cXP6YYNP/r9v8Kw3bEddoTQkahaa6kX4Ge11r/5/df4r1j1tUkONnDjOu/1m77/zJRdFrIYSkyNWUtr3t2eW69qZ/R6lZoYR+HxUJBuj/PPEWfJD4l5nomxao2uGFy4GF4AHwLea953t9sxzzObbc/LFy/4+c9/xp/+yYYUI8s8QSnnTjGb/Q43bHB9T9jt6bqOmydP2Wy2DLsbTkvkf/m/+J/zX/2v/+tvZY3/yiE0w6sbuSqkWiVbVAPFQBZl8Kc1P6uR66r0FEyHDRbJhVoNQqWUREqZmiIVhR1jzK1UEMCQlsJcEp3vGYYtfd+x3w/0nWO/7bjZripMa8770g1n5cBYZ6EYSoZaDXf3J/7ZH/2c43HmqxcPxFwJwxbrO2wn+N6QS2ZalgaFgmAprcTpP/V7P+Dff/3Pv/d8VyqJQqI0w9skH5HGVlfxljMZAThze4zgnVfZx9bHPOdMDpEoQnWOag2pVtI8M4thWyA7z7S/NGZw3reWiO4SOTcj6Fp6UA3pm86sLgudb2cdWVYi2HUHohb5no2xnK+Lpn3KOXXxTpWraq1fAF+0fz+KyD8Gfgj8G8B/uf3Zvw38X/hNbpKq5KdUajs5hcg0soRc7Pmk1vNKSUtzsB4RbUbgO9cmwVGLYILHRE9t+QeTYJwiucLD3QOhf8GSEv1+oN9ucaGnxIgNt7igUEwYNoh1hOGhlWFUlYTMGk2c766mI5xa/mWVJNMm2xVTadR0wRinJCGTNAdnBe9pD/1M+dUR79uZbxFCv2G7e0IOM8n7VojeYaxqTWNMy2E4rGvGs3ngCp/bc96m85Y+GPrg2PhV+lFz1c5opFUb+YKqyke5bWrGWMRotxojl5Ke9TjPRJyq7M1lXpiXmVQUFlSGZKXmqND3OGtuKGctMq0t19yIWQWVJD2dRi2vScrIBUGs8gfmmBiGLcDprc35t6/jeS3XX0HSuCZCnYlIcgakL3AonAk+Z/jL2SaOVoHSDEGiSMUmFdRIUaOxuFSWuSAZYtRm6aW1wcsFpmniNE6M04Rz4XztK7UJYXhubm9ACqVEak3EuDAeDpSiIhrOWva7HU+f3NJ3HU+fPSGEwG5/SzcMdMOG57sbfud3f289t7c+3zqb0lrueV3jvhC8p+sC3ro3odcG0Z9Ly2Sd/5XUVM4pgksHqZWNrgTD61EqLSq6XPfroYi2RrJIkxRNkJHWDak05bArLedSzx3BcmMTC0ajvhbZiTE457G2kFppDkXojaq2vZX5rtqnfE2D6B5hLqSkdoK1loZk8a1UjnC519de0whXLQW5yrPaMzvbWv2u9bGqALQuFqyNETSw08CttLTW2qjGGFFWexVyVKLppQWggFwi3DVOVrJs1eNrKYfvanDX8VvleEXk94B/Bfh/Ap82owzwSxSK/vNH21RLyS2fcnn9vNU0Np4RzQHEuGi5SW9wQXC+U63UWlmSkHMmEsm2Mp9mHsdHiJm5jDgzcxp/yldfvOD26Y7j61+y2W+ZHu7ZP3nG7vnvsn3ucd7z/2Xv32Imy7L8Puy39uVcIuK75a2qurqqe66kSIqkjLEEWYQf7AcLerENGIJhWPADDT4JsB4p+8mAH2wZMPxqAjYg0wIMG5JhPtCQZUOWTQGSSUocUiQ1Mz3Dmenuumfmd4nLOWff/LD2ORFfVnVPV1ZmdpWVuzr6+zIivohz9tlnr7X+67/+a/PgCTFGtkOg2A7Eal/akitJQGtVpURimDiIVchpVMJGLgHrtWTEJqllT70agRwpJtF0mdUaunVhtSlMJeN+xlV4FfMtYji/fMI73/s1ShzJ0x6hLLJ3xjdVUMFrCU8qFLGkIhjT4H1P61p672kbz9W64WrTcrZZsVpvCFNktxsQgc56rJOl1CAl7fZENohp8E2Hdy3etljjVNKuMl2ROV8mlJI47LZsb2+4udGa3aefqnLPTN7IOXP9xQ3DYaRtJ0gByWnJuRVjyWI5jBPPnj5nPUXGw0DTdBQxiGuZ8o6b3aEWyr/CNc7xtrynXDWzZPlyVtdUMYZSm5zP+ablM/LR+IpQ4WRD0zcYa5imkSko/LnLCUPBxxFTciXJFWyTuXk+Ic6w20YOYyYGh4hjCpnPnz5nnBLe90wh0Xc9q36tOTav0eMHH36fnN/lBz/8gO32TzEOA8+fPSPFsBiBzWbNxeUF3jdsNhuc8/hGHb3Z+ToGQ69mvl8cxhi6rme97vFGu+g0ztI1filzybOSHGoMNOrRfuEIC49AhUg0d+msU5GGbMjFVKMMc3kYRftKjyEyTemYV6zO1/w7WbkRJUEoyhL32aAc30LJ2uglVdh4EfQvkILWUbs5PWRQFT5jadqVBjCakWAcJva7w8na+WbznUthTEEbaQhKDnONHnOM1VlQ1rIzymDXm1w78842y1CbTJRadieaP89kbHXSVV6yWVq+aqtFT9M4vDcYyUhOaC4hQQmKSqaJMKkm9nSSlpqlaNebFU0CdnuGlGrZmcNYfy84KJWjk2spVy5paSCjxurlje8vbHhFZAP828C/Vkq5PfUYSylF5KuPQkT+CvBXAM56vzhAOWmkN2e+pJQlnF8E4avQQVkiYL1qUusvqfdNrpBHEUMWYZYET1k9lXHU/rHOCdvbW3KO7G6uFZZsH+JXujCb1Vo9e99or8eqgKIa0VGbDeTqUdUi+1wLthXqyMfju+fR1ZxBEawF6wrK2Sq49DMjXvNN51v/jdYf+7aWhM4tzBKLm0qNzm3VRJVZZ1rLvkz1MJ01+NpYu/Ea/ZIUrpm/S4wsJAtJslD4xcyL2y65FD2Zk2PnhMySszZqD5EYav/fpOvCOVtznpUrUKMDJeZx4kHr98/9e+8hKSK1DC3VKPjVrPHT8VJ1frPRffGzTuGykxyUsxbjLDEFJM4Rsj5S1dFVKUOIUeF/KXVjTtSoS6+FyuulRWwgz8c/E0oA2zaVHCM4ZxjHTlGPGJn7167WazZn5zjn6VcrRVKsrwZXL06d21eyxn/OVC4GdRbMMMv8LoaoIjBH5EFq5DuLhchJJDZzsOZzmO/xUiPY+fVjHvB+3ecCmpk58v1qR0yQIwNe1JFw1dmqC2KJCLM5icgXJ5YFGTn58G883+364tSIM2cLKZAqKlhqFDqHUuXkv2XTnhnkgDOCr0hCkiOnx4os0a2paZSZaWzMfI+cRrvVONb9uJxcg8V2oEStXOHkea5PG98viIbUjmvzdYfjWcg3i3l/IcMrIh69YP9WKeXfqU9/KiLvlVI+FpH3gM++6m9LKX8N+GsA716tirOWNAVKBOPmPqRgpDLgciBOQUlWrsEIFGt0kZZcW8kJmAMpZ+7utoyTiv8Pw0QMkSQG40TJHdYyRTgcMoGB8uPP6bqGwwirzZrLdw9cvrPn7OqK93/1hzjvWa8dXXvO4eYz7m6eM+5uuPnsY1IY8UxYEs4HfFvzlbksUsy5Eg7EVri55gi81xKEri+sNol+FVmtAlM2OPeVSfpfA/6n32S+RaTMm0CYaoecYuomoGSxWCEZATrfMPmgG0fduHNRuLxvW/qu5XzdcHnWcH7WsNk07Gxm2Km6j268YJ2jEctIIQ9CFsHVMpK2W9F1K4xzes9Q2aFS6/hqH0xToVQlbChL0orXmyKbk91LtMVgmEgpVPWsoqIB3lOsIYu2uss5kZMa4ThFhmFie7efDa+8ijUuInqLz3BbOfZppcLoSyTL6Z54dDr4iueOm5xG/F3T4L1jvVnhvNN2pKVoGZFzSMmEbSLFwhgKu0OEJrHfFcTBOECIMyrjq3G0ta1fx2q1qq3sXL0/ZT4ihEJnOnzjWKfEZrOuUJ4SCZ3zFeJVKFSqLFCBRfc2hPDK1vhXvyczDiMHK2TvyNaSjJCDtpZbr3qcszTeouIvgSkMukGnmd1ajUHKhGlEBFLUyNL5TtvTWYtpG3IuVQdApU3HYaBrGg6HEUGYOo8TIbjI5AzOGdq2wVjBN8ry1t4iM/9Xt+bkNEi5vLzkyZPHOHfHx5/cVXZ2bRIwY7yi/c51iqvD74Smd3N+9BvP9/mj94tFUxzMaZGk6nYxqJDIXFYIopExKk6y5Eej1te7epbvrHpi6Ri9MLnq4BVoxLAylt46Omu1NM6pqmHjDEJVzcuBkgM5jloiGsZaw6vpwCnM2gWBkDJZXIWUjwGSrt0a9bpmScMtXY8WHJ3KGchUetlXTdmfOH4RVrMA/1vgH5dS/lcnL/0N4H8A/M/rz//rn/hZULupVHeh1n2JaEciXTt1Moss52pFyFJPOCckR3IOpJgZh4FhnBjGyBSSwjLVDROn5QspJoZYYAi4m8ThMGKdZ7/dkjgjs6aUxOPvPUakpfEO4y1GCtN44HDYsd1uyWGgdxlvNYkrpm4q2BNvqLAoIs15D9FIV0DVtpqMbzK+SUvk/xVj+KbzPY8Z9hVK7VgiigbILCQxExM0lztHvKUcoxPvbFX8sbSNoWsNfWsIk2DqjSY1/6heqUoFFuYyAlf7aOpDbO3JWo7GZzYs5l7OaO4vq6SpGW47UYyoUGwtyp+vgZFFmCDP16Yawpy0T3MK2ogjxgjwA+D/86rmHI7Q8DxkPskjvjlntE6h13ojvPBc/WlqBOecGpDGq0jMNI1MzuKMwTcN5EwQSy5RhftjpgmZKWj7zRSFHAXxM5HkWLs650a156lZoql6JdEuYipyAIV+1dfjm5EMWR56jU+uFUAp/OW//JfhFa7xL81+gZhUp9dK7QxmDFJFYeaUxUyoIqisbJnhArQGW8SQqhi/xkEq9m9MURa/zO0DiyqElWOePYRIDInotewmLdBxWkg7RqzqBXiHrZ9rhLrWq3iGga7rWK/X7A+xGgNOjIbKQ2ZqpMeczzSqzGqFv/0PP3ll8z2XDBmr60ulQBM5H8VBjDlGh/pb0ntvFtoQwaSkfY29BxEOrjCaeg2StiH1xmiqoEa6rqajrBEgL0FEKVrHnmdksubYdb7zMu85Z4qxLCzH+V7TOwsRizHuxOB+2ejej+BfbvwiEe+/APwrwD8Qkb9Xn/sfoxfr/yQifxn4I+Bf/pM+SERw3uK8JaeIdVJLh44Sadpc/TgnR6iiXpAqMWnqxffOkzNMU6kTrjWKOc+NvYUhZvYxk4xBgqEB3HZgCIld+GM+e3rLwy8e45rEar3i4vKStm3Z3z7jsLthPGyJcSLnSCqCZTZYQY+vNgBQmO6Y38ySwOzVcDQT3kaaNtG20LbgvUaDP6Mf79k3nW8dReUtd3eAeoc5J8ZJazF94xW+SoXL9RqbhfPNWklL4UCKA8Y5imht8vXtDSLQdmdcuELTGs4uO1UrstpqLdUtWozFNR7ndfEeKyE1BaCNJDQKR2Yylxr4vl+xXg/kDF0XsLbBu7YKo6uxtJUkc2SDsnxO41UYv2m0HZu1Vcc4FyQlJEZKCKRx5KOP/hjgIfBfezVzzmLklYxxv2b5S7drZS0LwtKurK7xXLFIKUDK5JiIZWJIdwRnYRrVADph0zp841it2tr32TKNhv6i5fzijPVmxXrdgcDt/o5pPNDVmt+msaw3PZtNT983tI3DugrBoputOmFzusfMmOuy0Z8a3OPdq5Dp4muI8Lf+1t/ir//1vw6vbI1/1fyXqpdsll7YUkUZCspcjkkW/+zUSZodUeu1NKVxlk2ncxpGZdcOw8A0aWv6ko3mPkfVVQ5VX3nZ8OffYyIlZUJbOxMOlWhYd78Kl9YLXiAEzfGO08Q4jsQQFuc0hMDhcECcwXjVaHRVDnWGvI2Bp7cDf/jR9Subb4v2JDbUFqcVGcN4imQNJGRetHq/pwUB0udijByCJgVDlVK15yvsusM7T79qaYzjol+xcg2dV3SBoip4JUcEh3NCzKIiJjiGJBwi3Ownnm4PPN8OPN8OHIaJu8NEiJlkEhnDOE6LOt8i9lGK3jvznrykOE8g+xoQLNDVS4xfhNX8tzhFxO6P//rX+TIRalsnlTpTlhqL92kEYomkU+S13sOlQrpIRnLRBvOiKlgUw14iOYvms6Ka69EmooV9KOwShAg5CD4L3A00h5Hx0+dMU+TJe09o/MTZxTnf//BDzs7P2d18xmH7nGnYEeJEyVFhKwypZEzSJuopadQVFsObF8iliDKdexvU6HaJrlPD2zTV8H717P6jUspvfZP5Bp23aThw2F5Xj1Oh5bvtlpyTsjyNoe16rjZneOO4ONsQYyaMO8Zxr0xnY4g58fz6OWEauXr4GHEZ3xvOrtTwjgGV4swGMog1+LbBu6oAVG/CMkd6lWCn+e8ZGbA0vmG9XhGmgLWeGBLWepxrlDy03TGFCWdnCbk5LyM1pyc0TUPfd7WVmnZu0VRG1gRnjJRpIo0DTy4fAvzdr5jvl5pznfdcvfAZZj5GrnNQNTMcBM1pC1LFHvQz5ghX0I8oqVCCwuT73QEjQrg1WCtcPrng/OE5TedZX/bkkpiyg8Gwfthx+eiMvuvZnHXKSv18YjrsatNvaBrL2VnP2fma1aql7bxGAPX4zOmuM/+/HJ85mq1TAP3+e2fj9pf+0l+q0LS8kjX+VaPMzHjL0fAW7RWtog0adRajB1dKriB6WZpBiBG81/LGTduRUuTp50+VuDMOpDTUfK4uqxA04psZ9ilqzjxWyHMxvtFSXKlNV8zJ+p2N/twEQ/UDUsiM48AwDIQwLQTCaZpIBXznaWyjZDvvESOEFIhJz+m9d8/47/+3/wL/h//Lb3/j+RbAi8HW1ZGLYLLiXd46NfgmMxOlYpWTTXVPrPUhhBjZjdpQZn/YE3Pm0j7hbN2z8g2Pzi9prOPKNXTW0TcN3qqzNwwHYrQY6cjFqOE1DbEa3n0oPN9NfHG754vbA09vFRW93Q1a44xWPEzpyB1adKVnpGp+yFFXsCxzUN1QOTqYX3f8EroTcYxOTm7XGiNpLqkuxIUJWIkJswE+PVlrDM5A23j6LjNNRut/64SqlF6pZJNCiAJFiMlpDZeAdwZy5LC9wZTI865h3G/Z390gJSpD1FllIp4UZZfaNHwuHcgnSi45x6MAv4C1EefU07UVLjnqo7zGUVTecjjs9CavhLHZTMUpMhwGJWCJStZ5r40qTJVvNNYqOQYlqgkj+/3A/jBqSsBov0xjZ89RITyTa/cXY44KNTEyhUk/U+6XdcyGV2FPq5KbtX5VZG6ErZuOzZpPnK/zMWIp9X6Re5vaHFXIcnPle1KLr2vcN0rLJVHDs6yjSg6jGteTFnXzJlCqJ55zxhTUsTEnyI9xtE5LxbzV1Meq67AGzs/Oubq8wlmrnYViWh75ZPM55qVP7ks5rtCft1bnM3nR6N57zzeIEL7u0DzcsSerERaFqYJqTGdyZa0e9xeKOkG5FHJIpClSxGBahdJXTYNDm0PEVNf0lMhSKDGTJSu8K4bOW7pGy+8aZ/CWCpVy0tZPkJJqI4SaGz253mEKTKOmREIIhDh32kmUEFRRDxWZMdYQs5aXpbqulwj6lQ1ZHDHDsQSHE4c6i2qBK0IJc06pFH1PEkA0Q5pKZoh6Xu6wJ28bcsr0viN7T177BTafc9rWukoqVLJmKgfGENkdRp5d37LbbkkIruk5u3xAxjFOE91mR4iJwzgxpcT+MJD2Qy2Hur8Xz4gB1WHTU6/pk1PI/HXleF/HEApG9CFlZhlqXZazGl0VhClEVGF2hi715p03LtBem9YUjPWsVpnDYajNvyMxjNXj1OsfIqQ44qyhtz22OLrW0W0srUk8++iPuHWWpx//UWVhKgzU2IJdryglY/IsMahGPaXEMEzqzcXaozJNxDQpe7lJYAquEdoe2k7oWoP3tXTgXveEVz9KKezubnj22cd0qzWbi8sKQTkgs98eyGPAPhaaR4/onWXVNxymlqb1GG9xTUvbn5FC4PpmQBi4vLpmtXpO0/Vszi+UGGW0aXSIgk0K6bmk6jwhJ4YwsTscuLm7o2ka1psNdmaPFlWUadqGptUOMtYaxnFku92zXoFzKjzQ1LrrpcNNRUMUXi01mjJHhZraHHtuvjB74rEEQlav+1WPBf7myKBdanHnjRXdgEuBHFP9ObdbKzVPVY1jOpE8RB0Jh6FzTpWRup7L9QbXWNrGgxSahw9JOfHhhx/w4QcfsNvv+fSTTxkPB8b9qHXQ65oPS3lpGD6L2FuzAMy/6Fm/8nl82VGK1v9PVtGHGHX/yKalWEOqu0ouBZNUVnJmBOeqSDUWoUwJu87YfoUzjv7qEimFKUTtcz0FdtsdMSZ2WVn4TeNoG8+Di4ZHl0pKvFhb2tbQt0LXgPcZi6rvuVJwxS6OTy5FOQgpcXu9ZbcfuL6+5W63Y7cfGKaJcQrEMZCLKpW5xkNluiOCsRnM7OR/tULPywxBJXBtERwGxGnrwdnwSiGSiBWZjFp1pI4MhVih/WQKyWZCSjw/7DiMI88JuO01F+szpsPIWd9z5VvWTYO1hsY72qZh1SuhsOs6rDNc395xfXfg48+f85/97h8wHA4461k/eIdH7/2AvusJIXK32zGOEx99+il3d1s+/vRT4k8/rrXPtaPSiehGLuoYmWLvocqlIiazGNDLjDdqeAtK9DnSyjVnNG9Oc4mJ9tCsnmj9uzKHjjJT9kvN59XcsVOCSE6Rxjv1qnIgU/AU/XdNO1iham/WpuJOJzZOIzlKzaNYfNvSNC2U2ppr8e5qdFLqBjmXF9VKnfkGgoyYKqoxC24bjt5VRT6/QY7+F5r1FCem8YD1Tvuxzl6rQE6RqaRasqMnYO08nxUqFMEaRzGZKY7klBkG1YLFOHKWCiWzGJTZI5ydpJQSIYrCf9NUZfiOQ2DRyD2FjlNKi6cfkzo9eTFcx59wXCvLtVki4ROP9d76UeJJeU0GQ6pTIcg9o3tylJQiFdWpa6kcnYcXa4C/KnCZS7DmMhkzM4lNIUuDI9N1Pf1qpRKHWeVTZ1nBWQwhV0GYOQrOOasCka1x7AKNy1fa18U4f8VrL1VW9Y1HOd6fZGXppxnlyMuaqe9ccpJzpHPq/MytS6HgqnCDoIiZUAjeactBayALvpbdOavdhWwlG80lSzOKcKxrNUcNjgrx6XfrfRPrdQMUPTPa7lDSEfFJSVtOZZXowpSCsTOK+GrntR7GEdsQhZpn4ZEXAdj58s/R95zvpUa/EW2yMTt/bQgM00Rj3dLAHjiib84uLHxjDClnDsPIoQrATOPE1eUlfdex2ZxxttkQQ6RdrRmnkcMUMNZxc7dVUlvteLSkq+bI9t4KkSOgM5uvb7Cs36jhzamw3w2UmNSqGr1hDSy1WvOZCYIUNcCx1sKZeeM0QpYZklE4pWsavPN0TaGRMxXoKGdAIZZErGowxIxB6JoOZyziBJyWMy11X3ECYBoGtmg9WWMdxhj6psVZr7nSHLWMKOelZCemgjEJawvOF7o+45tC1xnaVg1vSVpPedhFhsGQ5h6rr2GUUjjsbrl99ilh3FOy5k3brscaQxgGxhC4bSzPv9gQC1gpdI1XFaqUMRi6pmcqhm24Yxonrp/f8kn3GZuzS8SsEeuYSiKVzHZK7IMqfqU4YQ18wZ5Da+mc0Drh4vyM87MzbduH3ry2MmqttQpJlczdbsez62t2h4m73aDrKGskuD0MTDnRzNBgORLb9oeB/W7Lbn/QG81HLScxhigQKDSrlsfvPNC86iseiwAMRydRiVanG9GcrlCyybLRV+3gYxqjMs8FRYNElChoDJPRKGOXAs0wsmksl5szjBWYDqScsN0aadcEc+DZbmS7HbibIvtY6KbE4RDY7g48f35DTnB5cYu1jq7p6FoWGc7Z7T9JDrzyeXtlo0AJkUwhkAlokxJnCl3bsll3lTymkU4IgRAmdUSsgFhM43FtC9YyhokYYR9GKJnN+ozz83NiCDS+IYaIM4ZxGquDreILh/2enCNdo4538lbbi+bCFAIpCRQVm5hZ4blomjjXtqQxF3zTcH55TtP34FqmkNgPgSmq1nbKM9hfr5GdKyvKz5Kkfdlp1QYoMmdrExBZ3N+CRrxl7k+szmRK6tTt0wHSRGMcvWuJzpE355RmjWx6zKojWcc2FwgTz7cqA/zwnQts43FdS7te473HVx386+sbfv9HP+Lm+pocE33b8ef/6X+a773/nkpUescUgpaejhMPnzzi5naL7xpu93fkXDRdZo325K20bJkDgFqJoGkfIBeMAgovPd5sxFsK0xgxRUXeT72jhZ06DzFK3c9AruxBZImU5hhnrpN0VuhaizMek1TVylbafSFSJFFShikBgpEWxBAlo11cZ7mxOb+mouMpa6s18S3OOsS3WDE1FyZVQbIsxjfFjPjatceC82qAnYPKUVq82ThlwqS/v84Rp4HxcItIwTXabL5rPVYcIU6Mw4Hx0HPYbcliak7bHBtwi6izYbT8KYbIfn/g9nYL0nB2CBirwvCpZA7jxGEKem1yxArsDKRJuNuu2G63NI1fIqGj56yw2Fz0X1A25/5wIKRCSMeoUVnrQbuOlJNIpd7oIQRtoxYCIWkbMA0uZMlBWe9Yn6++mev6FWMhXszru+aiOUmRLKMcz2eOrubc2Hw+p9D0IodphGQgVlhsypor6wq4psU6w1Tby4lrwHoSlv0U2U2RMWZC1jKjEBPTqNfUOc/hMDD0I0YsjWvBUIX86yGX+SxP8tQyX8tviTEuRfPlMufwM8EIw0EdvVLKUYvXAEmOa2nmmdRyRIwiL5lMGEdKimzWZ3RdS3QWYUZyRg0eSl7W1DRNiBRi7EnJUUm0GgPERDaCpVDM3I5QG1TkUsVnsqIhxlm6vsM4T8Jp05f9yDAG7bwVj3yYGaJTAPHVQ2raza8gs1CGHOtZC+ow55IWZ7iUuStQJoWJNB2gaWl8S7aG0nRqxfoV0vcUVNFvSIn9ONAZIea0dFnzTVsjVUUfDocDX3z+BYf9npIyrrN87713+fVf+7VlH5mmwO3dHeM4gTVszrd88vmnrDdrRdOi9pSWGZo4whMsJ1bXDaWSIr8rhjflzPYw0NqCl1LD+1pqUo3rEVGu+QoDtmju0DmLdR7rbCWQFMhRPRYSJUcg45yy3xb4oGieB1sQX029MSC2epaV3CB6IesWx5x5Nsw3k+aNDfl4k+YKmaP1rEYS3he6FTSd5nR9C77NWDfXzoKUqF7voEzs1zcUQlNIPyF5IoXE7rbWKMZIjond7o7Pv/iCYiy32TKmQkyCSAdoj+FY1HuQxlOcJVohiDAUMLkwhEDMkWmaiFUWNAWto9wmCE642x7Ybge6bmQaJ6wIzjQYqZgmKtjhm462W3P58Am4nssHj3n46B2V9NxumaaRj37yR0zXzyli60ZVI4SoJLDDYSSEBFJl4KxHrMdYjarbzmNFezC/4hm/B2UubOblCf0/OYGPZzWq2plP82U10p3h57nG1tZ7w4iouIxoE/rDMGLvdnzx+VPECLfbO6YY2O4GfvrxZ9xtd3z29FpbQoZCLoZpSmy3e4xYfvzjn7BerZkOE+dn5zx88IDHjx7TdT2PHjzEe7+oP81g4wI/f1sMbh0igq+iCyIWRLWkvauRZ4qEELBOm5PGmBjDpI4Put+mkok5YRKMUa9Xrvf79jBQnl8rpyNonjwVA6YhhJEwBUJWXWXnHYdhousa1uteJQu9ZbPSvrzrztO4I6kwl8IUlBT27PqOu92eZ9e3PL2+JsTMMGmz+zFGYslkQWuKEdStULEeMdoNLFay6asZhZy0u1rBUoryAYBFzSnlWNtCpoUzMBurkqWihHqcGgQl7RS0pByP94PKOc7KVaektESKkZIT42HPfnvH7e0Nn3z0Exrv+fu//fd49uwpvlWp2RAi292eaZr46cefcHO35aOPPuJwOIAI3je125FZEOV5Sc9Q96J8x5xSe/k5feOG93Z34Ky1iDdLAbvU12ZVmxl29E4bPPus3UUa7/FetWm9twoRJsgyG96AoCxlOXFayECa5cKoEYOhiCVMhTJVeTMx6sZkZWOp4TWYIlqAnzIpHJjyRBGri282vEUwZKxJNA2sNtC0Qr8WnC80bcY1YMoscehIOTPuDTm+3nnXvr8ZkQh5JGe43d0pKmA91lju7goxJ4rxTP6MJI4QDWJWFAox76rh9UhJZK+GdzRwKCC5sB9HYgzEOBFjIMXINAy6PY/grXB1tuHmbE/X9oyHASdC62qjBqi1jQ7fruhWE4/efZ/+YuLDX/l1fvU3/jSHw4GPP/6I7d0d13d3XN/ekavhTVlUFjFmDoeR7fbAOKVqdLWvslhtJ2atxVuP6dt72ZxXNuaPnJGcUtBs1sxlWChXzGz+uaa7FM1LztHvbHyNUW1tK5UVXxP12QhjiMh+UGga3SCe3TxnGEey+TFZVP4u185DJRfAchgjpWyZRhWkabznk59+TNu2fPDBB/zKD3/I1eUVm/VKywFdFRdAThyKORr+9hhfQWispXVOWcRWdYV9bZmYYmKaJnxxgCXGwDipJryt3W1i0ZrvQlHiIBqYCcLtbs92P+qXFYUknVjEOcIQ2B4iHALXNzuMEZ4+u8F7y2azYnO2pu8aHj04p2k8l2c9beMXRn9KihpNMfHZ02vutjue3dzw2dPnlaSkxiHW7JmenF1y/NoaUtWwQgiEIb8yw6v55AlrPcagKbeSjogT2pM55URIKveaFgawGt2UhJwNFIsUwYrFmoyFKlpS0d5ZT97NpaeyGF6KEKNqMM+67s+++Jwf/+E/AQrWCT/60e+yOT9jc35W53RknAI//slH3Nze8uz5Nbu9dmtbrTfVtth5Aen5Upb7MNWWpjOSNef9X2a8Wag5F4Yp0lpRAe0KH4pUYpywSAUKLBDifKLW2NprsRJwiiy9IBe2M8LCJlCslMWal2OSvNTPnyE+kUKxZXlPqZGyqpnUExCqwHlS767USL0kIOGsGl7vqKUDBWcyale01EalRSuUlB0pO7SK7HUNwboG3/X4tsO3XZ2nSC4Fa1TY3ljH0nFFTxVvVSpSciKu1jhnGKYeM6lovnpKtXRKBCMFW42BOKvbQ+OhFJwrWKMbd+ObqtRz7Jkpda5VOnJWuWpUws0WnGvxTU9KaLOFZqLxSn5rm5au7Wh9s9wQjW9Y9T0X5+c8fvyY1WpF32pxvhV1pNRzfvVQ3JI6qf+ePWTqz9mjp25U84RLzSUtN33tgjVDzUs5lJx8T6mPWuObCIx3e2WQHiZyCBRrEWuw3uO71dLyTFMIqgrknFXyGrDba2Rwe3vLzc0N1lj2uz1SoOt7xM1lYMf6U+qG9G0aGTVMSkYSnHe0nUptzqSaQqk63sohKTVqnKPHpW3Bvby8fnimdrqq6yeja2qKyvWoUBsmq1a2GGEKhWGMlAzXdod3jhRibanocL4hpcRhVELh7jBxGCNT7SSVixArabGu4LouqsJSVVyaYVmTs8Llr8jw1ttbz/lnGp6y/Jih5nmRi3EY4zHGYcQBqqRmS214kAIU3X9LrgFUZRrrJ9R7gkIKgRgD1girvmPVtXSN1/aIhz0315ZQHapc1CEIIVbxk+lIWDu5r8r833LcX5UeKov9eNmt440a3pALX9xWgowIoWQiqQquz8IHKhGWi+hq5cjWbJqGVbdeepBq66mZVazNEQRTG3kXmAkAWRtZa22f5vpySmTRQu5pmrQHsxw9d8GAmTfJsmTSQ4q1ZZeloFJpJQ8Ima4fcTbRrTN9m/Ct0DcG66GxKncWkxAmQy6eWNaE6Mj59V0GEaE7u+Ts0Xt0bctKW+ARQ1pYnxTNL1nnmEWmnQgX6zWdWxHChovNhmkacH1mGLa4tkVcpshEjFtFKEzGe7TnoXglOsUWSsGSsAJn5+dcXl1xdnZO03Y0vtUbsByF7NumY9WfcVhl3K32nbV+RdNdkEtD32/JSbg4u2S4uOXy7JyHV1dcnF8qOpEKDy4uOV+tefLwil/98H393K7DOktjQOIEjOQycqSUvrqhDoRV4YB4f64FOcqgGqmdaur6rJvK3G4xpbRwDsqS/NDWk6ao1q1WjkQIiVAGxi80vx6Lrv+mX+FXLav2jEfvvY9vWppVh/WOEANTnAjTxPb2lnFSEgq1CUgMgcePHnN5ds55dWLW63Ut05Jqbxdw7lsz5jyhISvBpvOs1j0PHpzjnKVrPcYKw+HAMA7klGid13uhajQ3xtJY1aqesxE1KbU47FRnseRCmCZyyirfOVUmLrVyIjukePZDYYojpRz46ONniMCqbWicZbVasV6viTmzHxW9GKaJkBJTLGB7ckpMYVLIu9KljdGae8QokUQMrm1xvgHryLUL0KsYIuCF2jg0aYT64nvma7CgNXPwY7CuR2i0EY3pyDnT5D3ESJ6VwKwhJkOixRgVXVKGuBrnPA0UgXG/JUwj69bx4fvv0Hvh9vnnHA57rp895fPPP1W9/rZVidO2oRTY7g+MU6iNPaq4T2Vaz81v5mN/oZHE8tDy0fjScPMbJ1cNITHGTBu1Q47i90aL1NF8rgVtiSWZWc8UWJRejiklgxFb+z5W8kr1/jRXkLWPIjDno4BqgI8TXXJWD6vkBRYszMnzo/dWKpNaX8/qHNSWVCIZZzNNU/Au4+3cFksWiES5DkLOQi6GXCw5O0p5jRGvCM43+G6Faxqsb+vzVbe0ksnMrEvKMVLz1kBlY4qA84bVrkMkVhnAREmBOO0pxtZIgQXuUodFIztTA7mK9NfNrDpNyZCKEkLm99vahxO4Z7CM0dZs3nn6rmO9Wi2Pvmv180uhcQ5vDI0X+tbUS6hlT9aIRuPlWPb1Oub9GJzO0UF96WQOFk7D6Z/OWeKTrir6kVJTKLXsijkSqFB2yuQUidOkxrvq0aqssBqRVdvRdB3dZo1rG4ZpwIxHeC2XXPsWJ8ZxYjiM2l5tmpYmALDcBjPh89tmdxeUQclS6uCIrTlyZ1UAv07+HJGpwVU9ZoWOqzN4cq3KInx62i5OvytW5m6MmZjzHONpdFaj3lz0tRQj0zhAKcQ24p0lZQPGKyu/RmQhKmyr065CFTOfYV5hs6a6rpxZ1GJ+1Aj4FV4f+RkR77K/lpMc6LJfzn87N5ef8YSCRcuuqGk4pFR+Wl6Crrn8UyiUrA5lChMxaNVE37X0NeotObE9HJimkVgjXWMdbV2oMc2KcizIz+kEzREvp2v7xL4u6+rlkeY3a3hjKnyxDcRcuN1PtN6yapTwsFmptGDOjTIOpeZuK6PWGqNJ/VL7+DLXgepiFNE8hrWGplFopczRqeKPdaOtHktSubUSAyZNuohDNeK1uHaua80VX5k76IhoH86SIiIRZw84m7nciAo9NAXfFKyHzmg5jkkOKZY8GsatI9MSxTONjpxe364lIvTnD7h690Mtl0oqZ4ckJBdKjCBK1sghgojW/4mldR2ddxjTY/05IQx0fsfh0PH0i0+5uX7KPsLtj/8JguC9epb9ak27WkFR3iOlIFkFC643lqedkPa3bGyu/TUbrLU4r1763c1zSjgg8cC0fcbh+pbx9hHx7jMshQdr4aLpaX7jB+zfveBsc8bF+bnqPFtBiDivnXeEFmHuQKP5urZV2dIUhRS+GUniZ44ZhspapkbRnq7IXMutBlTVuVCmaM1fKas56waTVXXJGcF7FdRXLetOPyPWupNpIk4qYTq3CJ+tRd+1bC7WrM57mpWj6R3nj89pNyv2hz3usMduLdthC5PBOkOJmW7V0/cdbdvRNC3eNyo2YFRcRuYUySn+/S0ZBS0jNAXGGGDUveF2u8c5y6prtQuWOFbdRlXngnJErGh7w3W/ou86jBGsU0MwjHvlMWSIRh1wSqzKbBMxRsYwMQwjzEGACCFlJe7UfGcMkfGwR4CrszV921BMQ7fSzmAxKWchJO0SlmpppcaXGSRrCVFRuLmIRm62ptzGMVR1rVg7Qb0iqFm00Ys2rU/HJ0uF7EvW3G5tSHDfAa1vrSm7IUwYYLXqWdEypoEpT7gINiesKbTG07mW1je0tZFEGLeUnNhtnzINBywTV2cdhA3ff/cJu/0eMcLdfq/dyWru3DVqem3UKgfjHb6gaS3vVbxJqnMlVFLe0avMJwZ3vr1fdlbfbB1vgd2oG/AYE70zhFZFyMUIrbf4xuKiaEcRNO9ii5CtZRadl3IqvnD09uZ8wEwzT1kq7f3EK1sMr7KhSQEpCbLCRWqj8xINlZyUiVyUPWqkqRoa9TUTsUw4k+lbz9naVq5DwbgKy0jRzy+WEixxVEHv5CwpVlj9tQ2h6Vb0Z1cqHbnfV4fCHFGCgmo51Ryfl4KYhDMNzglN4+nXG2L0pLBh3yb2zzPXwx3jfuTu+RaK0LRrrPPkiytIFyDCkr9OEaGwv1uzvV1hS+Rm09I0nqZtMdbSdj1N2zMcdpSkze3TsGPa3xIPd6ThThtZe73723ceEC4VnjvfnBNjZHe7I+eEsw5n544mRsuPgpJnGq9t2WIRiMdcziuddQ25dB3VXJKROWpVJ65ILV2RY+fSOUKe+wuXkvXvjFTnxOK7hmbTqyM+TZSkXVkq31KBg7rqRaiGuqXtGlxjcK2hP+voz9cUD9GqkXKdI5P1itmCb5TMqFrXDlcb2Zu52SsnMnvfsvwuUMX51fiYCFMIDOOIT06lUZ2j9SqxmSUp3wTw1lbJx5a+6bBOcA0VnRmUp2FK7albqr62agqkHIlRW9AplGEXwwtKIp0h/GF/0H60YsipsN5E1R0uoi05czXCZeavaEtPxGoedEEdaroCQTKAEEMiSe0X/gpZzYIi3Klo7K+GtYrA1PLLXI1uLnlZFjKnQ+t2nStCYI2waholvQ4RUlDxjwwmFaxUhMvaSprNpDiRU2AadozDHiGy6jxh1XJ1fkbjLLe7nUpSCkQRxFist4oUWK3D0/QaKshR1e1O87zM92cdM8qhd9k3o2S+WclIEWzbghOygSSGhCViCcUg2RCSiuwXI1indVWm7fSm9x04z1IkW0otdl8ko8A5jG8BjXiXdsvGI6VgZva0z5obazNNbZk3A5IL/Fhza0v7LUTJPmIWQoCzmVW31hKiM0+zNjivbf+MNbhWhVlT9uTisMFgGoPQIW6NrVrHr2ukFPmdf/Dbyi6MkRCmamlzhXZqDjFnsnaXOCHetBirNPumaUk5st0+ZZoOPH/6BbfXz4ghMuxHhYddizGWZ9dPabv+fiRUUYTnX3zEH//hOX3XcnV1ro0OKmFO+8J6trsdP/nJR+wPA598+gXb3Z5x2vH55x+pY6VtRAjjQE6h1iV35JSXjiPO2UrUq+IopZCqcVJVLkNOB2Lc8qqigdNxhN2Oht3Zub/nTOZQ4ztDyDLf9MtGPrvVM1JT0yIpVaQCSkqLKH+hchKqRGAR9dyxBnGOLHAYB6IUbu/uCGT2w1DJJnoNnbVsLjY0zvPBe+/zKx/8gIuLCx48eMCqX9H4Ro2tHFnN35ra3ZMhIrWNpVukRWPU5hLee1rnNYXlDdb6msOdeSAAhaGynL239OLrdVEo2hmd55mFH1NiGHRutc9rXiDgUmRJqbUVNcgpMg09BrjY9PSNo+m8IlECTes1ip6gRC3PyWkm/hzh3Fzmrjpq/MjVIMzZq8UBfHVzW03u6WwfjVX993wNFtFRyce0C4VMYswJWwQTLc6oIezbHidCZy2b1Yrz8wvOzs9p+haxQo6JYdBuccOw0z0gBkWKciSMA9M4VG5EUba3UfGXuSZ6Np5iBOucRrtedZ9P2nHdA3EqeLVEueWr3vQ1xhs1vGIsvt9ofpFCNIZQ9YqnpC3lhmSxydBIFcNwDtepUolpVuDamrewFZJz1TvRjd14h2m1NrPERCYgrj1OUV2EtrZf60VzO1rcHepi1nrgeYpnWT/NA2n5gTXaJ7hp4GwjNF5YP7B0K0vTCm1fN/7aEm8cLTEabAQ3CAWHsWd4Esb61zbnMQT+7n/4/+Y//Y/+lj6x3DHl/j9P/yGnP07+UVjyNl/F+jvKCcpXrke1NScb9sz8fTFaKqeSk7rR/N7v/f3aoOKrD1hOj+/LR1/fXX7mc698zBvj3NjAzH1bZwGYov5gMSqsL8f1edocgZPfFwfpnuENkHJNwehn4tSRyzXPVqzBeE8CdsMBmwJcN+ynQcmFMRDGCUGbgTx+9IiLzRm/8Su/xj/1m3+avu95ePUI73yF748bzptsfPB1hhFD2zR0na/9vVVEIY7qqK27HmuU1Kcs2wJiFmMGcBgGDoeBvmtwbqVnnGuJoVXja52jbTutL7/bMknBSGZun5hLbf9nPc5Zzs43nJ+dK78k6pz3jcEZoWsbiiTEWNpWYdFEJZCGQokncqnVd86laMqolj+W5T6gInP19Vc2sxVeB1iivvuIIhXZmVGXLFQSlkaRiOa5pxhrTbgiEJumYeV7GufYNC2b9YoHDx5y9eCKfrXCOMMUArfba+I0MuzuiGGEpP2GSgoMhx3j/qBKcJWEILVrUppJUVUJUYzRSNp7fNPUhgu1ZKgqws2ndwSwqi2QGVt6ufHGyVUhRJUcY5bHU03TLAU3GWX0TQ7vLJ2POGu5PRScc7RdoGkPzKSBUiBUUfksuUYzlq5roBTG4UBO8bipwUKYmokrRo6sxBhrV6PatLkedT32eSN3mgcVjbycg82dsrLPt5auFXxj8K3mXIzTixOCJUbhsIftHRQs2ewZpszd7vBa513Zsa/1K4DZKL+mD9eS7e/MWAgaJ2MmiizdYmYCiv7B/eh22dLKURktqhCBoRCdrkFFcPJxfSs7CKDmYtU4yCwYPAPRs9pBKQsz1RqLE8N6teL87IzNZsOq72nb7l77xcXBWnDEb2fUe8+hK0cTMef7pxiZYsRXHfDZqKntKkuYE1JimBIiKvWaawkkIpRYQCr7HBBj6LoO55ujmyeCq+IjfeuU+V+EYmx1dnQPnPsC3ytxm9G2BRWR2k3nWOt9+vrJCd+7F1/Z1ak8l1nW9GiUXljvNe+7/FwOppz8p38XK5N/jAGDQvfOGmyYuN7eYb3gm0gue8bDlptnn5NiIA57cgxYlJR7tx/ZD4H9FBhDYgoJYxsMteR0viLLGj5Zx6cT9OU44Hjs1ep+U+T+zQpopMT17d0COcwC3sKxVmup451ZbDP0iSwt5k7HcQIqfFD/Tl97ocXZvb97IUornLz3xZ9w8u7lk+brZWzNfdSbR6XH6nfKLMoPlLmAvH6mqADHs9vd15zJt+PbPk6FLxSRYemUJGXWlp6j+ireX5+fWdyz8Q4xkOKxdCNYoewtzhhWbaMt0orCarP4hhhDv1njGk+72WB8o9BoFRDxxtNU+DSlgEdlUZum4f133+N777zH9997n0cPHtXj9tzrP3pidL+d44gYnPo2QlXQ2+8Yw0TOiVC12eeNOaER2twoIQyFsfb4TkGdcmu19rmUAIzaZCKD8w0PH11wcX6OMbonQCHEqbZg1F61GgAos9oZzSmXoqpP5aRrmZHaYSjJosRnrKpbMQWI1FZ5tf3lSXedozGsG9wrGCJC27SUKZJqRy3tZnUMZhYk7P7V4H6ThNo+sBSN/EthGNWpbJxjPba0B8/0eyPrrmXVFFa+EKeJw90dlExrLNYYVq1n3TTc3lzz4y9uGMaRm93IGBJr6Vm3nX6/ZIzJWBOxVSgvCxWdqDXZctz754qCUsyylICj8f0Gc/rG2wLG+F2KW96Ot+PrjwqqHHO0dZxyBX4Re7W8c/6suZNQETXa1mAaj0XdWNXQZbEw1lmc90jtz6sHdzQoZW4BllWS1Rqjoim1TKvrukUmco7c5iM7/vj2GuDZTZg3/SVKZW5sMpfq1NKfWqk4IxLHjVfTVhTU2OSCL0JBDWnOcdmMjbG0TcN6vVoQsUJhGkVblaZIjPkk0JrLZWq6a8njqiGYYVvt3HVMeUmpkoqzuITM57fgoHNMWS/bK7pApRDDRKxtEVOGkMqxDC3n2mdXz2NuBzjXo+faT7iUk6qRpOVBsXIVknMIiRAtz64L+8bRSqQRJaVN+z0U6JsGbyzrrmXoW7Z3e+4OI+M4chi1gYSbIk1I9d7IC3KUkjrEuc5bDBFj81IsVgsjUQMrqFJWPurqF72eL0vM/KX043073o7/fx/L7XhibHNRlZ75Xj1K/AneebLJmqur2t8pJLRMS6NNk7XHaSuwsULjHA9Xa7q2YQiRMUamFBlCQDK196kwxEDaH8glM8WkfY6nQLfqKSVRSHhrWbU9667nbL3hfHNG2zR6LuUUtf32Gtr7Q0t5jBhybX4iVfvXOUvbdZWtv2a1WpFSZAo1co2hkqqUyV0KTLWBxVhrmW1gqSsVMtYIm3WnetDes9S5ZjQVkFT8wZWMnY3pApmxwMhqMwsxThRm8o/DOM0rp5yZgpb/GVsrPlDRjjkanlXkZgG/V5mG393d8P/9f/3ftKVkyUeeJqVqGpeFhHQ0/VS1qVO941LTKixQtNSfWquvJV0fN5r/NZKx9TtyVI1dV/UCfL2mcZrY7XakrLXUORdc8wW+aZej0blVB2EuMxUxOO+Y23jO6+d0LQFLy1fQ1GMIo9Ziv8R4a3jfjrfjNY3jLSzLE3neZGBJo5RcFIIWwdkMjuU51dNWBr+mXgqNQGuEzjrO2o6+axEZlbBYWzNSFEbLRgg5E8eRGBP7w0EjLOcIIVY4FEzb0qy00XjftvRdh3duOZMZsNQTOtmUvsUGeC7fgiM8qM6O1VKppqHtWpquJQRhShO5QKj9tY13OGcoCWLMpNoxJ8W4wP5GCk7AOe2D3LQt1un1EqicT83bmqWnrsw0FY3Aay9rU5U60qyKJELjtSnMjC7EpL2UC7UJjBFK1py1iGDJiCzdBJae2q9qjMOB3/9Hv/1KP/O/iOOt4X073o5XPBajWmuESoUE57zjrJRjjEZMWVR84Dj0PZq3c5S+pSSPV2ofHXAhqizmjZa2SaltLdEc4pyxLMB6vWJzcVlLX7Qu8urBJV3fMo57xmFP4z1n6w3rfoU1rubrarTLi8Sd78YoFaIXTglKeiJzzakaslQFHyq5qkK7UHOQsODWqlinOUEpGescXd/SOEe36um6hqZptJkERUVQKr/jpLoaSm0HKlTWs8KaVgRJmVhTATGGemwsedR8Im2YasnkQnyrgh1H9rNob+a341s13hret+PteA1D4T50Ay/2RA+2LBAolYGcc1bVqZN0kTHKhKUUvFfItzeWzlo6ChcVsnRGMEXbpM3ypYvhrR0ULi4ueP+DD+j7FQ8uH9A0novLc7q24fPPP+Gzzz7BWcuq6+naDm/dwlilqM24rz/03RjH+ebYWKVGgjElJBrt3hMDMcWjrrDRjmSZQqplh6XmV5MULfHJOteN9WzON7RNw+ZsQ9c2tFa7IJWsOWRFJw3M7FopzFrcoN3UjKjesnUOW/vDRrIqUKVUG7Wr9ndJeYHEQ4gYq7WoVMML1MYPWdfRjGW/Hd+a8dbwvh1vx2seL1YrnD5/8q8vv07l/s+5upnVymm3FvhqxuoJm19kYVQ7p52fvHOqW2ztCUlnjppe+Ig5J/2lT/6ujGOkezperEO/x1KV0/fc/9tj5rK+VeRLj5M3n7zv5MNP6kFPP2N545cuwmkFx5c/++SQv3yeP+e1t+OXM+R1yOX9zC8T+RzYAV+8sS99teMRb+7Yf1BKefxNPuDtfH+t8Y3nG97O+dccb9f42/l+0+NbMd9v1PACiMjfKaX81hv90lc0vovH/l085nl8V4/9u3rc8N089u/iMc/ju3js38Vjnse35dhfZwf2t+PteDvejrfj7Xg7XhhvDe/b8Xa8HW/H2/F2vMHxyzC8f+2X8J2vanwXj/27eMzz+K4e+3f1uOG7eezfxWOex3fx2L+LxzyPb8Wxv/Ec79vxdrwdb8fb8Xb8F3m8hZrfjrfj7Xg73o634w2ON2Z4ReRfFJHfEZEfichffVPf+zJDRD4QkX9fRP6RiPxDEfkf1ecfiMi/JyK/V39e/bKP9eeNt3P+Zsfb+X7z47sy52/n+82Pb/WcH5uNv74HKtny+8CvAg3w28CfeRPf/ZLH+x7wX6q/nwG/C/wZ4N8A/mp9/q8C/4tf9rG+nfNvx+PtfL+d87fz/e16fJvn/E1FvP8s8KNSyh+UUibg/wj8N9/Qd3/tUUr5uJTyn9Tf74B/DLyPHvO/Wd/2bwL/rV/KAf5i4+2cv9nxdr7f/PjOzPnb+X7z49s852/K8L4P/Pjk3z+pz33rh4j8EPhngP8YeKeU8nF96RPgnV/Wcf0C4+2cv9nxdr7f/PhOzvnb+X7z49s252/JVT9niMgG+LeBf62Ucnv6WlGc4i0l/BWPt3P+Zsfb+X6z4+18v/nxbZzzb2R4v0ai/afAByf//n597ls7RMSjF+vfKqX8O/XpT0Xkvfr6e8Bnv4Tjejvnb/aY3s73mz2mr0Pe+U7N+bdxvuv3vl3jb3p8g8T1L5xoR7sg/QHwKyfv/bO/jIT7L3huAvzvgf/1C8//L7mflP833vBxvZ3zNzjnb+f72zvf37U5/zbO99ed8+/SfH+b57yU8o0M7z8P/Lsn//7XgX/957z/X0JZZb8P/E9+2RflTzi3v4TCD38f+Hv18S8BD4H/J/B7wP8DePCGj+vtnL/BOX8739/u+f4uzfm3cb5fZs6/K/P9bZ7zUsrLK1eJyH8H+BdLKf/D+u9/BfjnSin/6s/5m7f5i198fFFeaCn1def8Z823MYbVqsdaq20/RTu/Qu3xauTkqfvdPHPOQCHnurjLsZ+pmPuZC2sdTdNijaHrPNbapYE493rUFkqBcvKZUrT5ujG1qbw1GKufX+bTqj1LY8yEEAkhcnO7I8VESlkXeG2ADl/uZ2qdw3uPAM9vbr7xfAM8evSo/PCHP/xZL98bBT3fXAohhqW5eYyxPq9Nz8dxIKVEifoAnZ9CIWWdu1wKufBCp1jqJMsy3zL3ehWpze2P7zm2g/3ZTe9f3C/0o+Tea2GamIbhF5mCe3P+MvPdeFtWrV/OWUSW663neHIqy1rVeTtd23LSRP70nObfrZzOYf34418v51/QeyTl+Xc9sjwfwPKtgrF6DcTocZaS6/sLOR/P5/RBgZR1baeU6prJTCF97fl+mTlvmqZ0/YoQJmLU78+prkkxy3yJyHFfEKFpO4w1iLGIWL02YnW91nVrrMVYCzlDTuSUGHZ3pBgQMpDv9baetyeR44WZr2GZr4y1GOPAGMR6EMG5BmMs3jV43yACRk6upBz3wuPPF++H4x6Uc+Lp5x+zu7v5E+d7Hu6rnnyVQ0T+CvBX5n+3jaeUDHWRzsdf5kU5/7uU5fd5Yer8Hm+Ar3kkzN/04jjetPd/vvieeeM63pRycg7VeJT5PJYTq4vr+PryffX/j+vm+NlTTH/0tU+RL8/3V431esVf+It/jsvLC5wzWGcoZCBijNB1DdYaSsmUkjAiGDHknBmHkRQj0xQIIZJSJkwREcF7XdCl6DlcXj7gBz/8VTbrFb/+q+9zeb4mT3fk6Q5jCt4VEIh1A5mGyHQI2CS00WAQutbinGF13rO5XFGkkEyhCIiziDV8/vktH33ynJ/+9HP+vf/7f8yzZzdsdyPTFGnbjn69ohSqkSogBkS4vHrAO+++izGG//Pf+BsvNd8vzvmHH37I3/k7f+f4Yt3fy7IQoEg1mmRCCYzTyCdffMp+2PPppx/z9NkXxDBx2O8Y9nv+ye/9LnfXN4xPr5me32FywSRIKXM7BqaY2IfMPiRigbFu+lgDRp0W6wxiDM47RAzWt7oJqmeDMaY6YoJzDmPMlzbQZbMvahWMqNFwVt+bcybnzCd//Mf80e/8TnXSfu74xmu8bx3/1b/4ITGpsfNtS9f3iLFY5xAj8zRQcibFSEqJ/TASUwZjQfTcfdNgxOCdxRjBG8EZ8N6x7hqMERorGAEnunkaEZxoU/spZmIpbA8j22EkpMxuiuRSmHIhU0jVoXLOslp3WGtpG4szhmmamKaBlArTmKCA8x5jLE3T0LYdMSZ2uz0hBG6ubzkcDnz6xZY//ujmS07R65jvruv55/4rf4mf/vSnPH36lOFw4O72DhDapsUai/cO5yy5ZFJKtF3PB7/ya6zOzvH9Ba7bYGyLa8/IRThESMDq4pJ+cwbTgGxvOdxd87t/5z/g+rOPMLLDcMBKwZms91GGUgTTeKz3iPWYpgOxZCwZod1c0JxdYJo19vwJtul5+OD7rFbnvPvk+3zvvQ/x1tJ6jzGClYSRgqFgpCAC9nTPZ76Nda6NGA77LX/9f/M/4z/89//GLzzf38Tw/kKJ9lLKX6MKUz+8PCv/jX/hn+Fwd800aGQSQiCXTIh64xzGyBgy4xS4248YYzg76/Hesek8q87TOMOmtdUDvG/Gjl98/FWMbiy5lMVbzNUIphxJJWIsNC0YK3Qrj3OGlCHl2XBmjDVcXmzougbfeNq2IRdIsZBT4e7mwDAEwhgJw0RKmXFIxJi5vhvYD4H9ELk7TBgRusZjjbBqPa2zrPoVZ2cbxAh/8z/6hy8156fz/bMi3pwzh8OAcx7vLdYZqIZXBKZJ571pHN47cimQNQorWRBxtK2n6yDFxDiO5FxIOZFyoOs62rbjwYMN7793xbrv6FyGeIA0QU76+XWxZ9SQ9l0hbzI2Cz4KpoAxYKRgW0MkqAFNmSL6PFk3rOFwYBzG6oVD23R4L3XD6hEze9tSN1thtd4gpvlqT+sXnO8X5/y3fuu3frZnl+8/oca3kI3g2oZGMl3XsWo7DlPgcHeA3Z7mLtDvInYEHw0lF3LKSBZcEnI2mJQhVgeuzJ6pATTi13PX9SxSICVkfoe1y6HKyVzM0WMphZzzl15TVKQQU1JHuj5yzi9LE/3a83113hXbCuM+MMZAKomUM0YszjcYY2gbR+MtVnQ9AHRdRymFMQSmEBApSA7q5BeHKaLzWARyJsdSER1X19LsMM9zBNbqfBgj9eoWkIq81EuSQmQKgVGEw2FEjNB6j7MWaw3OKQLjXVJHPWVinHDGgs8Yga5pcMYytgM5JZy1i0F4HXN+Ot/nF5cl50yp++jR2BdSybodF9HIvZTFwRxjxEwTwQwIlr73dG1HEcs4FlKGbYDbu4m1czy+fAfnex48fAeXA3GANB1qVOsAIRdLwdCuN7SrDcY12G4DYgjFkhBsu8J0K8S1YDyIJeVMjJGcI1IygsGIOmcyQ31zMFQflHLP+FJ/+9nY0M8f38Tw/m3gN0TkV9AL9d8F/ns/7w/Wfcc//xf/FLeff8Tu5hkhBMbhQMqZwxgJKXO7C+yGyN1uQELAOsvjixV91/Dwoudi3bDuHA83DUY42czun/4Cc1Bhymp4Q0qUUohZ4dKQRmKesB76DThvOL/saFpLSBDSDOOpl/reew85P1/TrzpW6xU5Q5gyMWQ++/iG7e3AsBs53B0IIbG9m5imxEef3fH8duBmOyIlY41h07d4Z7had2zahsuLC548eYzIzzS8X3vOv2rkUhjHgPcjKanhFcmo3wkhqLMCqwoPF0qcbzJdak3jcc4Ro0a7MUUOh0jOCecMfd+w2XQ8utrQdw3eFEqaIEekqHKLs1bhY1s9ylavly2CC6Jwao56XBa9seuNTIGSBKQQQ2QaA2EK5JQpuSicZD3ee5qmxViLa1qFIUUjPd90iPm5t8A3n+8ZtSmzj3jcHotoFISA9Q5HQ+M9rfdEBA4T7Cf8EGmGBFPBZKMbRxLIBZsFm0HqA06hMVPhP6sQnMywnoYLJlfIuZhqHE5h1/uP0032BKTW9VTvqRfyay8zvv58i+6nWRIxBXUAExol1p/eGIoxGGdpvTpf4hyIsN3toGgUX0pEyryZGv3wAmRLSUIpihoYkYpGygm0CgZ1ihakXkpFklDnrqjTG0LUnykjQNs0OOvo+571qtUoyxigMCU1rrk6NwbwzmHE0DhHsBZr7TEUewNzvlzjk2tdQB1oClJSnQddJZlCTJEQAyEEkBHfZVzTUMQiuUCEQxQOMWBXK5r1Jd44zs4vYLhlkFvGLBSEIoY6ExSx+G5Dt7nE+BbXn1PEYrLRe8g1+rAOjAMUuZth+gpDVrj5mEo4rvCa+nrBuspLmdvjeGnDW0qJIvKvAv8uyoz735VSvtJazEMXJ/WCZUQKzllMUbjLp8wwRgapYX7JWAytM/SNpW8cq9bjrZBTpMzOSeGYzzn9vvogGyhSvbG8/BQKxiaMjbgGfGuwtpDjQCjCfijshozeYGqgbvsdOUYOw57DuAUMpVhKBt/A2VnLpm8oFxtCSNzd7BmnSMwF59RT2O4OCAVLwBSLk4K30DaGVe+OOapXMOdfeR3QXIgRq1Fv4zCmeuwUUg7MCy5FNWQ55rpxpOrdO7yfN3bBWct6vQLgyZNHPHr0kKuLc/rO4h1IDIoaSME69e5LVjOKCMXIyY0631wFTD65yGX5DyrUKUYdqBAJMRGyEIulb1c0TQdiSBgKFimWgqVtV/imoe16Vpuze5Hcq57v8hX/mv9fEBxCycAYKePIcH3H7adfsHt2ze1nXzDtD8TdnjJMlJpTSykxhEhMmf04MaWsMGcuOnfG1E+XYw6tIjy5VG/e1Hk0y7mSc14i3NP83Kkhnf+t0bBo2ujktZ81l7/QXH2D+bbW4ZuMYDGYo7OQM2FShy85Q8lqqFpjMNbStZpWiVEhfwCdvYyznsZZvLP0FYZunMOKwVu7QOzW6PyGrGu8VDBSaqZRpDAn4IW6yRuDFzXuJQupZGJIhJCwRmgbNeYlGxIZa9W0iQjW6mu+sbTR4dzLV4W+3D4uiyWS2ZsDStZjzKk6muj+m2MihkicAmICxjSUFJGs975k3VtiygzJMLaFJB5xHd3ZFYQDOV0zHhR9C1kRyJATmUI+TAQ74Fqh9wUxkK3TyNg6sP6Idgk1Lx6VC5ITtsxpFVPvGDUspwa4AFLUAp/eGy87vlGOt5TyN4G/+XX+xlAoJVFyxAg0jR5C11pyKeyHkf1QcJIxJWMprBvLWec56xvOVh2UQIwjVL9HDezR8H4pZ5pYbgU1t/obUrA2gJ3wrdD2FiMQxwMhZq5vI89uI0Y83vfVSUjs1g7fg19XAlG7xlrHRXvBxaanb3o27brmYe44HEasgU1vMCVx9/yOVDJSEiYbGtvReVj3hsuL9mca3ped86+8DqKkg8a3dF2L85audeSSORy2xBTJBaYp3ot4YwiUUmiatkI0Bms1qupWLd47fuWHH/KDH3yf1ut1k5IZppEUAr7xdF0DFEpOpBOnScy8mIVkTI0q6hWTTI0PWeIuEaxRw3sYAsOYmJIhFMd5f85qc8EUAsM4aVRHg8VxtrpkvVmzWm84u7j8uTfQN57v+tFl/v0k6lWf3WpUvx/JuwPbj7/gi3/yE3bPnvP0j39MGifSdk8OUR8pM4XIbhiYUub2MDDFzIQhiKUYQz4ByQSpxDVFOlKKNaerUfBsnDBmycl+VcT6ojE+GlnDjJScRswvO15mvouAbzQXWrKQYz3/khXdGQJBCtYKkzN47/BNg3OWdd9jneUwHLi9U8emxlU0ztL3Ha1zrLsWIwYngkFonMd7V42OwtslTKScKCXVnUYf6sTOnJWiRB4RvPEAmppKCWMi1gSaxrJeNUosLIZkBGdBiPV+c5Ri6DoHNDTe/sy5eR1zvlz7e0ynuo6KGqZc4RcBsljCYcRgsdJipaGEiEmRYsCUjGSYgrCbYN0XJtPiG8PZw3dZNY5pesrdXUsOUVMKGQ4hkoqwLwdsNHQr4aLN2EbwTYtxrUYT1taclVqLGBOUiRCUyGgrimGMqYnjI9FwzuYeb90TxOoEJfq647WTq14cYl70ikXzKyKQwVYyhLWGxlu8M1ijhAZKJqWKyxe9OSrWs2wz+olHSBTqQuH0PaIQUNHj0QgMjNEYoSQlYeSYyCEjNiuboihcHbNugDEkTLJMgDOORhqkCI11QMYYaFuHSOH8vCfGyGGI3N6NpJzIaM7m6nLF+XrFxcWas7PVzzW8r+Qa1GjFVMavMQZrLNY6pGSMdVhO7ispYIqyDVEIrIL4GKNRs7GGVd/TtJ7VuqPvW5wBIaGeesGYUtf/4iLp/+oXnTpOc55FF/4MYtVFXxRiTjGRBaZxYhxGpmlajIe1Ht+0Ci+JZybQGGsxxqKQE4QKlb/KsRiumi9asmDz+RQ9pxQjIUyMhwP761v22zuG2zumuz1hP5CmQArqmeeUCFE3nSlGxpQIKRFzIZZMFlNJWxzxMnSfqGBBvZbHO2E+1nlD+RJc/CdAxqfRL7CsW2PMN0A+v+5QNMpZg5FCTuqqKZqux+WM1T2k7r1izEL8mYlkxqgTiSn31qFwvJYKR86btFQDihJ8RJbnnTU4q4xdZxPkmv/M82dXxEDg+G1quFKK5HTyHiMUjjll5mCz/qy25BsCn19ntuefX0Y3dE9Go3s5yYEDrXN03uOaBtc0dM7R1GNvDWDBkxV2zplUCpaao40bxHVkLAnlAqVUKMVQMqSUIERiDGofklX4+GecgJJGMzknrRYoRydh/uV4C8lxDbz4UfcQnq93Bd6o4Z0Zk2ro7JI7hcocM+CdoW0sm7Wn5A5nHZ0XLIXxsCeHg+YQW2UeOmerx2Kq8a5EEJh3moWmLkVTwqUUSlK423cNdmVxtmBdVDLDGAj7QDoUJGQMDZ3vsN6SJTKRCHFHOGzJBWJS2Paxv2Xj1jw6v0IeJLyznJ21CA196xnHJ/zqrx74zT+9JcXIGA5A4XJ1zqrtubh8yKMn3/tSWc7ruA6Nd3SNp2kaGt/gvUa/CkdKhZTzcQNOSujJaVRIUvS8Goca3Mbx7nsPWK973nv3EQ+u1qQwMG7vICecL4izNN7gPEu0LAJSc7yzgaUUjMzRVyV1laxrRQTEURLsDnumKfH5Z0/59ONPeX69J8eAkUK7WrO+eIgRj7EN5EwMk362tcQAd/HA7e4XKnt5uVFm0BGQQrUDalBjZH9zy/Vnn7O/u+MP//F/zt31NZ/+9Kdcf/opYRwp+4kcI8MwEkLgdjiwmwbGlLkL6vUPRePNYoVirUL0lamsx6A/FEQzODMbkOPrJeu8FhFFRF1Zjl1Po9wn0yy5fqrRak5ynYJ1b25bERG8a7GNwxpLipkUEjkXYtQ1dLbq6buWnCIxTgDE6tA4pw6zrYzvnGe4Uc8+xoQVQ04Kw4s3ut8YDQhmTycbofVOnxPBOs8YIxwMISekclj0806MZlGSYKaQwsSYIoYGildUqrE4tKpAL2vBqF2p90iqUfWbG/N1NuhjduxsLnWVKWpgjTogK2t49/KCs4tLuvUV7eqcvus595pnXxnDlAuHceI2TpiwYpgixRnOrt7Fnl9iPv1DxrJmSnuGYacM+koezNNAippQmtZ3lBRp2hXiPFphUSda1A1WVCIyhYlxHLBLymQOKsri2KgBNl9pVpd5MOZLOeA/abz5iHeGqYxZsP0lMkWjIWsFb4W2cVhbPUwKKeXqyVhyo+Umy2eJLs5CVjbi8QtBNBc5ezKlFM37okxF5wRj0zFKSIUck+5o1UvVha+fkcnEEhnzSMqFcSpIsaxih7FwaDrGcQAaNqbFWsNm09H3WjtqnCPGwGFwlJI56zZ0vuPsfM168/ojXlDyhrXHTUTn0Cj8brTkJFfPUL2VWkdnElISgkNEyVFtY2k7x3q1ZrPu6LuGprGELIxFoTdbvfdaybNc6yMgKuQix3xRqTndXCHmrMZXr6XCdtMYGA4Th/3AsFdWc8mak7fWYV2Ddx3e95rrk73esEWJdalkYk6vPDKb7dLRgy4LqamgUU2sta77m1t2N7dsnz5n+/ya4eaOsBuIMZBjJsesueuYmGLkEAJTzkxJYfpUDHlmhlQ0qZhjRMs8v3NUWk6eq9BnKWWBpGVmgdXk8D3Y+ZgsPn72AjuzlCCZr7sLfaMxR7wOax1G9P7XOvMIgG8cbduQorL3S61uyHIfnZgjTAXSTiLRk3Oe+RHL6zW0X/YuBFcsXr8J5ywlgTFJCXByfBhEwRuZU+1FHd6cWJJjRp3UBRWSk9rkGRn6JQw1SCeoIiykSZmTekWwAs4IfdOwbltWfUfb97S+obMzqF9wBlrJ+BIxRXOvqQjSdDgc4nuKeC0TqsGVXrPZKQyUFOsjMHN4lusHVHGAmofPpJyU3VyJa3C8a5azktNz/Yp5eElewxs3vJSMM4J3hlKOm60hUTJKpPIGi4Oc1eDV16RCztYK3il8dKR419+NUShtRiURYqklKKhXv1AfRBDrFI0EchwpKen3loIDWsCWrOU0uRBJGMlEm0mNQh6pZEiwmwZyEkqwxC10bcNwNdB4T7daKbtWjNYy58Jhl5imwJ4bDFsu7jJj9MxF4K9rGCO0nWO18kooc0BJjONBoa9KNHDWgZGaHwRywUlDyRm3WuPanrNNz7tPruhaz6OHK7rO0xhL2E/qhYqnzB5jgRggx8g9wLPmv3JWeB+0gJ5SiHGizMZRwFlP12/IqfDFj7/g2bNbPv3JJ9x8+pz9EDAp4oslh0gYA9Z0ONdSciLLiJxsbrmSjV694a2GbD7poiID42FLDIHrTz9j++w5d18847M//Anjbs/NTz9h3B+IdwOEQgiZ2yEyxcjTw8QQA/sQOVQYbpJCEQGrggRN1+HaTlMhqZzaRoU+53K6mVyYFwu8QJcGlNVbja6UahyKOp5lgenKAuUJijq9WGr0pkYphWmMlZld9xKrDt5Ril5z2ykpm/hIDjPLZ4A6DjlDrqxcHMv7jJ1rnmvyIydiqTncWmpnauqrcQaxBjEwxoAI7EuhpIituWMRg3NaFtMYJQ2VnMkp451lmoKWCnm7VGUY0RRJmVNeEWKosOsbm3GOwQiLv6eReyUkmQqgN96wWrWs1y0Xm5bLs56Lyw2bs3PN92LJBZqUCbnw2BemNmPMSBpuiblF+g3W97TrKzZnT7DiGG6fa0TtVFRHKV3Q+ULnMs4VrESEqEdiLEhW4R3JNaDQ/e729hpKIoRLjAFnynJOM8v5Z40Xmf9fZ7x5w0te8iB6f6tQAylTTNFI12tdWk6uXuCaj63lEUak5mzMfQ+Q+UaRe875zOgE6iqZ+Yb1761UpRTN75I112ALeKl5npKVuEEmSyaZQnaFbGokTmHYB/JoyIMQbwt92yBBDfCDxw5nPYLgnWeSyHhIHA6BNE3kWJiiRfz6Xl3l6xhGhKZxtJ3DOIWwtMxBo35XhTCMdYi1NWemML7DkXPBNj3Gt/TrMx4/fpe+dZxvHI0ThIk4BUoqGLRmdo6eclb27TGFq3NecibHSIqxKtdEZS5OIzmnZXE3TcazJqfMzee3fPbRZ1x/+pzd8zvGmDEJxBhKSsQ4M7A9mbrRapizbMD5T8hjft1xBClPA5JCyZHpsCMMB64/+ZhnH3/CzWdP+ez3/5gwTAy3W9IUSeOEJEihsJsSQ4w8nyL7EBhzIuSTbLcB59Qo+M7T9i0pZWQKtY5Sj8JVZEPyMbWzHOhsRMvMg+b+4xSqnlGQ+fkCYo6Rrl7OcoS538AoBULIWJMxJld0qm6Gbo5iSjW6aeEAnB6zzknRHK9UZ2yBLWTRATgiUXXdFJCSIGtkPe89DqOgZSl4Z9XQl0JJCRFqPtoqWRMDTuPElBIpRsSgcHnKC5qnnoTVsynVGU6FGI8qV29qlFKWeTbLqpl31CPh1VlD2zr6zrPqGzarhstNz8XZmpwKadL70BWIwIUrDE0hSGAIO5LJ4C6RtqPpz+hXF6QwVhKUBm+z4YVC4/RhbT7C8GL1bqkOExTl1pCZwsR+v8M7S4wBXx1Ia06Cgj9hfEci3nL0pmGBiUsRSonkrBfLOwMFstcptWYuWTG6aBdVnWoUYckDHkn8LL9nagQgc4lFQU5JC0ZzBUcf/ujNWauhQFLWBpIzUiMKEYM1WgZENrhRS6NKEkLMmClxdzcwDoli7tgfAocpsx0i27uBP/rJMw67kWlIpJB5tC8k21fyz2scC9Hp+NCoR29snc9c3ydYY+m81+jCQUmZs5Vl0znWjdCUgAmR3RcH9iViiBipkStRDV1SQ7qkUGZIcs4xlkJOSnZQY6C1oalGvNZajRBywdT103vPWd+x6zvO+g43JfZTUI7t/Hk5EytkHrNCWDElYoqkOef/que3aERELoQwMh52hPHA808+YjjsePrxT3n+2efsn98xjAMhBA4xEFPkdhrZDyPbaeJ2GJhSYkpZj7UofGmsoWm0RvnR43fp+17XehEOw0AYdc5yqs5OVDZsLtRyIsFazwzyz2VZS75qBtpEDfZCuJpFNF6YsBltyqd54Dc4TA1Ncim6Kb8QyYakeT3NW9v6XkGylrDYqLKLKR3TELOjeFqLKqWWbBXlpLi6/1h7IlEpQsqZlKpzlwolK5PZWYfkapyso3FeUwOl7jtlFoZRQpgxQltr0WfyY/XjyJJprCe6FveaHfUXx1camoWjQYXPBbFmSa2ZqjLnvKfpWigGejU/KryT2eIYinAwDTElbIqYCl93fc/5g4cURvznDTFmjBWsrZVaBSCrKEaOlRsSUcETFpi5HiwAMUaGcaQblZjprKGxzUlOtywEq686/19aOdHLDWUkU3TBOu+hqHSekGm8pRSPtxkrMz9ZJ8J6g7VHiTo9cfU8S9GcgEKlti4EXdC5CHnx6Ocki9VaYWswzlCieq0UMJUeao3gvJBMYUqRIhkTM8RS8xcWsULXOsiCpAYxjrIXhpAJUyDut1gxXN8OuMaxGyLX+4nb24Hf+Z3P2e5GDruJMEbe/2DPdlKW9esexmaMzVibl/yux5FrBJFzxuExoszszfkaKIQD5BS5XHnOe8+mg56BMgae/vQnjLst1hScqIHwrTJBxkn1hq2p0JkYrLOLIwTVecqz/zrneCNQVGRABEkZW138874jn58RDxPj7Z67Q+B2f8eUDSSVN4wx6rVLkTGrWs2YtJC/IORXDOvPrl4MEylEdrc3XH/+CcP2jo9+/3fZ393y/LPPuXt+Q5giw141b7fTwBQST/d33Gz37EPg+X4glsxYMgmIFBJav7m52LDZbPjzf/7P8uDBQ66fX3Nzfcvz5zfcPHtOiROhRk0lq2awGINUyLR1Vue/Gl1FkeZ8/7FUyzl3RAfQKKdWttfzPeJxs2RkeYMhmIiqzRWhHhdaPlKoqayi8xCTVh54X49V0xdmShSUDRtiUuRlFlbgCMPPSEHMGSkZby2lVlwsUHvdc8qY1bmLlTWbCs5YGlslBYpoLXHbVkNdjXxJ5KT7TmMszlo2bU/bNgufomQIMZFKpnMd0ooaixp1vpk5vx8PLjXT1GtQk9biLK5tVIrTO4x3NH1Hv1njXEvTbiqvRP8++U/BOm4mOOwnJAg2J5zA2cUlfPAB1ke++GTFNBacRDSXrIpsSCblCZLoz+wQ8Rg7R7xwunLDFNjlLd5Y9vs9QmHVVk35clwDx3O+//uxrApeNM5/0nizrGaObMr7ZQuzgsjxPUZ0cerfyRLxGidKuKqbw7wEFjhZjhsB5Rj9LlW+s8uoONFyXBzj3CPMNue/qPkX5ReppkMWTLZqQKoSEtYgTpWYZiwuz8XhMVFEa8hyUq8ZIzXyrkSflJhCWAzL6xoiR1jsOFkKZxlBCSGAc14f1mlURCEbsAhdY1W+0wppGkhhYtjtOGy3NM5QbK3vLZYiEKeoUYfVa1FMUb7WvGjleHCz1wxUne6C8R7rPMa6Cl0rpGyq+pWxFmuVgWpFWYqzOk0uczkSS47/dQ2N0gPT4cA0juzvbtleXzPs7tjf3TFst4zDQAgTU4gMMRBDYlf/vY+BfYoMKRJKXtS6gKXsrV/1XFyccXZ2xsXFGRfnG4b9nltRhux8/8wObqlRnHAs9ZmnfF4Lp5DZ7KDeZ22e5LPqH9+7p+R4/m9yKIBWy0OqzOMsWZnquac8Nx/ImKoWNStK5Xv7kH7mkr4q9/epDJpyqrlySQUw5GKWPUTT5xUORhBUqtOI0z2r1Ko8jNaTQo20tbxMN7J5ZitxKOcqZVlFZUquqa+8oEVvyujO86P7r7lnkI4uszpnKRctgQuB7W6HdZ7N+pamXdN2CWxXhXgajKjWe9c0DDnjKlImFWmxztF0Pb7tcU1LTgHJiqCKUFOPdnkoT2ZGb2Q5vsVG1ss9S92GMBGCU8bzlyGde+euP++//HVj3zdeTqRyZwVyIoWkBf0UWCDGOT+gAg+LwTWigv7WLJ47gKkLr1TCjFiHFYXR4hK5akRXah1woYALWldXClJ8vSBePWdjKVWOpqRELlK7x1ia0YOxWA9NMJiiHq8RgdaBVeg8BZAEZaqlAlndMjGWtndssDx595LDELj+4obd9kDTCtM0vQFWs8JezjpyMeRUGcYVzlqvN1jrFSaqTFGbIpRES0FM4eH5iveePGS4u+P5px8x7vd8+tMfs7/bcr5ec9avMc5gW0UfZvNhWouYBrFWhc2lElFklsqoxrGWxDSNqly1Ve7PGks2TkubrKN4jziPqSVqnXM1RxoI+x2m7WlrRyVclU+0prLa5yrDl4eMXhw5JXbXz/j840+4ff6Mm6dP+fwnf0wYBvZPPyeOCiUPU2A3Tjzb7RmmyGc3txymwHaYOEwTKRciucJ2akzPLtesz1sePnrAr//Gr7DZrPnhBz9gs1qzvXlOOGwpcVRyYrGkSZWUlk5SSE2tzIQhs+QQT734e/Xd1pJzXhoopKSwv5nrStFoULK8caMLKFqWRopoGUvMial2y8lVaznFRIkZKZFxHNUBsbUW15oa4VBzvIIRi4iazRgSBiG6tGgwA8SgzFlnHamc5oyFKQkxa7MQLTnKCiujOuJZClOYePbsZpFSTDnTVv13cYWSDUWyatqHQZXW2k4NRY2kx3HPfn9gmsY3ZnalBkTeaSniNI4zNqVRYk3rSYb9OJCuE7e7PWPQZgnvvvMpDx+8w+WDx3z/w1+n71e88+Rduq5js16RQ8DYiee7LUmgTBPBOqzvWF89Zhi2nF8+YfAd4/YZKYx465Tn0K5YdWcY3yKuB9cpEzobEKNkxOpo2urQ55KYponrmxvCNHG27uja5kgaO4nu5/TmcS5efud441DzrJ0wFy2XVIuXX6DsI5pbVE9IoTDvbO2ko0SP+kknHt8srKGfsgS2NY+VOfEk0Q3tGGzpJlzELl1bRGZPU44hWDGQLSYVfRhRyTFqrtgZsoVSmdLU/gNlXpCi8orOF/pVgxjD0HtimLBW6kb5Oq9AXTB18y25Hruo4bLW07Z9bZdVI/lK8Udm4QBD13rWfUce9sQ41c4qE2EKpDZXljJIPmGbi2Cs1tVaazGu0eOom7pQja4I2VjECL7VhhX6sJW0Uuvu7CwJp9fMWK3ptgViOXbSWappRe6V3egaebURcMmZ6XBgv73l7uY5d9fPuH3+nDgOhN2OFAJTiNrKrZYHHUJgO40cpsA+BoakaY95ybmatmy7hs1mxcXFhoePrliv15xtevq2xVUZVXKuIjTmnvYszN6+3lunke5xXdz//cUodzbKRwN7jHjnKOFNMprrV1eSnJavzIe28G7naHVekJWo57DMKku8cOxHp2KOdlke1PTWUtJCIqaC5Iwx+lkpQUpCzjOKJlB3nxoTkxOMo6onhaiNYowIbdtUUE4VlFKKlJKqxOqcQ9cIOc9M7fILtQR8ZUPFd+Y1pnM153XhWDZIzXcTI3fbLcMw4X1PzoYslvOrO1IuPIiRpmjdb9c0NF4rXwQWprcYi206XKtRb46BeGiUv2BcDSQ8znqM9RTjKKJNFMpcs0WNemW5jBR07YQwMVq7dEoTc/wbmP/o/lPLSy8xh2/W8BZlF5YcFSqhqsygCfR5Y7fGaKTlW6xonagV0Ro9KaQiTKqEcYSHUNEFIwlDpGCqyD6IyRh04/arld4LbQZbaHpD01lIEYynpERXGsomMEWYYiHhGc1aWZ1W2dc57ynbHdFkhrsIRmidwxuHJMFbNWi2GKSILgjjcKIard4YuFwzhUiTJna+0K97ztYtr7ucSMTQND1dv6m6xR1du2ZzdoWznrbbYIzl9nbLdrdXCbvNI6yBxunGfvXoiv7qAttf4fsHhGni0ff+FGGYWPcr+nalRnXOf1WD57zX1nRGtK3f/NoMMc9LBY0+XFUUs1JqjSaYLOSUOecKe35g5GNuDh15u8NuP8aGxGqzgf6MtmsRyXUh6OcbY7FWUYhv0EnnK8c4HPiDf/yf8eyzT7m7vuaw2zEctqQpsJ8CMQRudwd2h4ntOPH0bs+YIocxMsWs0oNo/ab1Cvs/uDqn61q+98Fj3nn3IefnZzx6fIG3jsP+jmF7x7C7ReKEpIArWjrRNx5vLTFrSZ3WS7iKbNjFqJ5GuMYYJfNYuxjZmY2+tANULcPFaS0Vt1vqeN9AHfo8rDGsVh2a0REV0ClqXL3XZgJJCkl0A89hbmGp6Jmzs1hLqSDpzDqWGvXWEramq5wU/czD4cAwHAhjZne3rQZfy4NSrYZLuRCCQt3DoLn8cZoYxomUIsMQQAr96gznfe345YHEGA9IKjSNU/XpkvBSERCnQYnvGxoyrvGvELP5+UMEvBUab2kbw9Sq1KxzHY8fPaRrW87Pz1itepzTLmalaBeznAvb7Z5nzz7j7u6WTz/+mNV6w+2f+jNcXj7gfHPJZn1O3wpnq8iUhTEWpsOk+uOyIjUPOXvnN2kPd4jtmfa32EoMd21L067AeoLVBgqqdqVVMrpkZyU8zRmKFIpJTGnERmGKgSklvBjEuOq41rVxGv3W+ZgpuV8X6n/jEe+siKSF7LNnKsecnhiMKTjn8G2HNdogwYpAiZAjIVPLKuooNXqlaJRaN1qpaVxTCqYYGmtZrVuMM0ivZ2+9qgqSPYiHUjCuQ1IiJiFEyNKQ/Tm5CMN0IEQ1ysNhJJXIPiVlI65FlWaSXYQ/HIIpgjVO6/CMqgs1tmBFSDFjpgOdibRdS983vEro86uGiME3LW3Xc35+xWp1xtnZJY8evou1Huc6tF7wY4Yh0fUdlw8u8M7Q9xqBnp+tadc97SazuXqXkjJP3p/IMdG4Du8a3dirOLlUuR6Zm6OKggdzSDbLHS5lHHm+UWpivep7SymqvZ0Kay6x65G7wdM9j0zmBtvdYcxE06+w6xW28cuNU09ec8PGar3mK4ZHwzjy0z/4EbfPn3PYad1umHTTPQTtYXy9H7jdHtgNE9fbAyFnxpSItUZXb3IwXht3XD4+4+xszfd/+B4ffPCubm5nG0iZu6e3jPuB6bCDFDA5YqtT23lHshARUhGyMeTaEtFUw/oiUWSGl2cpRSVLzazmE23aOWSo1+xFA/6VocFrGCJC17YLiqA14AUxFl9h5FgipkBCc47GqMF1DuyLhndObc3HX5QY5V2Dc46+0x66OcI0JIYwcH19qCWFVYq05nhzLrp/VMMbYmS/37Pb7/XYjSr5XTQrVusV3lu8N6Q0cjhsKUSVUjSWSCJJjZpt3SMbjy/5jZAxT+dbK0+0nEcNsGpav/+99zg/O+Pdd97h6uqy6rJ3pJS4ubllHAZ+93d/j08++ZhxDOy3B9arNb4UHj96wq/+2p/m6vIRTSusuohJsE+ZIUaycxTnSf6C1YMP8cOWME4Y22IlYknq1FeZ2GgcWSxZzIJ06izN6Fc1vCZTJBHShE2GKUVVGivKWj+e91fMBfdL8L7OeKOGtxRtDxWjPhBDMZa5dm7u3GHn/C4eiiFNFZ/PKqQxpsR+mnQzSEmjGQIiAdUPrqydurO3jaPrWvqzMx68+y6uaWjOO2zjKBIpErWOOExaq5pV4SeEwhSoEbbTZtvPPyPGCcmCSRZiZjpouYGNieAjK2dovPb0dFUW0chc4lAjrFoWUyh4a2i909641rz2iNday2az4eLikidP3uHy8iHr1RmXl48AwzQlUixcnJ9BKWzOVzx+5wrnLE2rwiV919K1Tc2D6+bsGxUZccbjqirJwiGsOYZiZNE1KPXnLKWY63xUKKQ6VJr5jWEi5kCMiWE3kFImhEwSi9+cc/W992kvLklWxQeC70jWM8TEbnejObFBa4JLOIoVKJT36uY258y43xGnkVTboB3GkSlErvcHxilyexjZDhPDFImpLJC8xajD0BjazrO+aGk6z+N3rlitW9abFtcYjIOSa2u5aWSaRnKKx4J/9WB0zdlaWVmZy2Ump1Xj+CVI+cQQz2OugzwlliyujMyaxUfD+8bh5qKiB9ZQt8IZJdGIxBmD9Y5iDY11dZ+RRZM9cjQoiihqOKDKbpa20UjUWUfbOoVEW08Mrfb+ta5qPyvUvSijpaL3Us4M40gIqkCm4EONnq1hmEayZLwzeG8RSThvEPE0XrsPOWOW4zaiht17Sy7ay/fUEXpTc15Kpmk8V1eXnG3O+OEPPuTq6oqHDx5wcX6ObxotdSuFcRxrJYGq9z1/ds1Pxp8Ambvba6wIT955X/PVuWiknAp2KEiKxJAYo5BjJjZrsjiah9/Hbi5J05Y8bonGgGjrvzRfxXovUIlyX2J/i+7JU5gQYH/Y0W4b1muVwVWiYn3rScpmJnUted6vueTfqOHNpTCNcz5wxDhfi6E1EoZCYx3OGIzx2NJDFkKoEFIKlAz7GLgZtlqjmTVqcUZrR6Vu2sYYurbFO4f3a87O11w8fswP/vSfoVtvuHjyhKbvmcKeaTpQUiRNSrzobIcVxzQmxkGZxrfbLcOwZzjcMe7vtA9q9JShsH8KIRbGbsK6DOeOsyuPNRpli7BIFVIlECkFVzKGQucNdB7XOoVhX/PG5Zzj4aPHvPe99/n1X/tNvve979O2Pav+jBAin37yOYfDQNM85vGjK84vz3j3e4+1bKK2JNPU2Nz5pmZRK2yxSHYWzX0BFdJdioQqiUp/T5UpKjVXR6E2vNH2kFCIObM7wOEw8elnT4kx0XfneNfRPXqXDx++R4yB7//mbxBC4LOnT7nd7fjok0/5ySc/ZZoC292BlBKmWASDFI2kX+VIMbK9fsY0jsr0Hkau7/YcpsBHz+84jIHdfmIYoooI1A6MIuqkPby45OLBGedXa5588ADfOrpzh20MZ2crmpXFUkhZy5UOhy2H3Z4UJ2YwYa4SMFZZ92JUTlGh5toRp+Y9Tw3uKXtZL98R+JearpyzlPVpqGzSU5GJN2t4C5SkmQxb0xZFUR0nBZGi9xR6P3rryCmyvbsjxkjIE8kEGu9oV53mL8kYtEF92zSs+pbzjerGN62vvXKz1u+K4dnzgZwjMSVSUVGLmIqiHIeRFBPb3Z5pCgvCI9bSdC1iYLvfErcB71XLvO887zxe09R/W2tUpanmgZ3VgCI3Dd6o0/4mh1RjlXNitep58vgRDx8+5J/9L/8WT548YbNe03c9TduwWq2x1tK0LQBPnjzmvXff4fd/9Ac8++Jz4hT59JOPuH7+nEdPvse7731ANg1nmzVNKjwft5gcGKbEbUjK5+gfYlawunqMkcz26UfcPf0YUmCMBz3GCvsv+fKaiijm9DzUWKQc2R92TNPIs+fPtQKFK9arXjkuRgVR5vreitGe3Dvf8oiXMgtUf5k9dB/yshrpJnR3jkAWlDuicnrjGLVQvW7axWW8nWXuzEKq0mS/xTqPbxqarqfpVrTdGW2/0uJu68gpEI02UvC2xxq/wBGMA3YYVcmpWh1B63hNMZAMJRXiVFnQtc5XEzL1OKrnNde7lZpfnKE8mGX+Xr/8mzITHa4yE7uuw/um1myWZTG52jd3Fmtwzi7RaoyJmGp9ZypH4pSYk0TtqTxFnYNjUh5YKqtB1EhLUQ3tudfy/L6cClOIHIaRm9s7ZZuaTolaom2/CjBU4fPtfsvd9pbt7o79YUsIWiyfk0bkVqzyAr5iLX7jUSDVRudTSAxTYJgiY4hMMRFSzeXWqTDW4hud36urKx4/ecD6sufBg0uME4oPNR+lGSqNqtKixKRr6ajGNbM3FcI3S/58iYZ/TnB0Sj88jQ4WY1yjwXKytue63xej5jcyCsemKJweg1k2xHmvnUX9y1JmwlJqdWx5WNe9sYqS9R2rrqPvWuWIeI8xwlR75zrnatergsQjVDPnxVOsgi0xElOsJYQgJVdRDtV+v6cHLdybyxkkWnKJZa6k/hkdeF77uO+w3eMBVK10U9M59iSlsTDr5YS8dpJz1bk3ZKPliHnpy57IcSKME3gPXtMg1jVK4mpWmG5NiZr6KxXtmZ3EU+xmzmmeLtFSkbaSi9b9TxMhaPmjvu+YOln+7Etr/OtdiTcc8WbGcSTnstRa3btRAGdbnPFMk+Owj5RsMMFBVl3SGBLbKfB0N+iFronczUqQTmico28bJYm4Vvvldhu6zTnt+gK/usSvzvDdQ3y3wTUj3WogTgN7eVZr5lYg2m/T4+DulnR9S0xqAEgZh8HalmjB06gi0iEx5UjsMibXJg4ztz7X2rv5hkyJ6TDUsgCFIq3xykx93RuXKDnDWY9zDd6rgk6MEzEGYpqIKdI0Dd7VEgepUHDUSOnu9o7tdk9KiTBFrHU8ePCQtu1OvkjFCXRZp2OkW6rBNZX0IJXvWfRvmFnLBUqtkdzvJ7744povPv+C3/57/4AQAn/2z/153nvXaS1jzjx79gX/6X/yt7m9ueGjzz7l5k5LdHaHsRorhfHX3YbWt5XE92phfWME33Y8v9tzczdysz3w6dM7ppjYHiZCzEyTOi1GDMY7+r7jww8/4Oz8jD/3F/4pfv03fxVxBdNkxunAP/nxj9jubwk2EExttFiENEVKVIGGMEb2+8AYMsk2ZAPROIoYVb2qzFup8oZp5iW6FxCMe+uEaiTqBgvVwCu0W4Ra5ufuybe+ScObS2Ecp0X8wztP17QnmTdhlv3KKRFjZclmsJijc1iNhzGGy6sLuq7l0cMHPLi8oGk8m3WPqcp5IvD02S3WW1Ip9H2LGKt1xDHp/ZMjUwzsxgMxRnaTKpTNpDSfHOIVzhYL3jm6rmG1ami9UXHIkpEKyKiKmO49MY6atpuiMvdjeGPzXYpG8tZZuq4jxsizp0+JIfCjH/2I58+f8/777/P44SOatl3g5XEcCdPEH/7hH/LjP/4xn3/+OVN9res7NusNl1dnPHx4QSyWsTTsp0BrJmzeE/Y33F3fkZpOdZW958yd07gWOX/Iqu+Z9lvGp5+SY8DmqAgBsjheLyZjZ4dhqUgB9vsdKUa61nNxttbubZuNru8TMu83HW84xzs3Kzjx6Op/ZvHyVPS9FGEK2nzAJr35QyjEUJimzDgkUkmLM5Ja/RuRyso0BmO0vss4j/Mt1jUKb9sGY1uMaRUqK6LEEVMlE40FsRhT3zeMymgGjaQLi+GwYrV9F4US46JUQy3mX5KVp4X41TOM9f2z0ANVFeceHvIahm6Op0xWLa3Q46pRFPleHmpesXNkNY6B/V6h22kMOOc5O4t4f+QmF5S4oFGaphJKhacFgz2hJ8ye6RJjldOj1Qh7OIxstzuePntGDIFxGPR4UyTFwG6/5ZNPP+bZs6d8/Nln3NzdkkVIVRjF2gYjltwmNSi1jvPVOjrq7acMY0gMY+QwKFMyhExcSkLQzlje0bQNlw8uubq64nvvv8+HP/whqUyEfGC3u8OIJYU5J605Qsnah7TkUv06bYOXMhr9F5RMVR2aOdVl5rQAFXqbA62TDWUmucmcvJrzu0d+Wr2HuUeq0j9+c3nG+ftyyjWSnXOz9T6ut56meOY1Se2LW7EWOSqnzZFc03j6vmNztubi8hzvHau+1XxfPc22HZSTURXAbMyLqA9okKHypJFQo92QEwsJ1IoKypiCN1WnwFu8d7jqDC0XbSEaVmNRpS1Vu+BNC2jMKQpVNYshMIwjbr/n+voaEeH87Jz1ak1Myi7PObPd7RjHkdubG+7u7hiGoa4xQ9N6ur5V3kjXEouhREPMBitZm+TEkTDuVMmt6RAyTdZdo/Et1llMhuJvdF3Ho0DPaQ52sb11w1HRpfncRLlHBUKNeo8iTi/OwsnnvMR486zmmqew1mKdw7ZNfUXPIJMJJXIIiZv9hBXP1fqBFqB7i5ksTizsBYqyE60Vzs86Hl5pEfaTR1eICMOYSBk2lyvasxbfWwojKe8ZDjeqMDTdMo23DOMdtzcfUUphs3lE41f0/SWdu8LYQuM9oWlx1mONqzOfsBbaVg1TmBIlBlKYiOOoDQg6hxVZuvukEBmnSeUMQ6ztrVSRC2tJ5SUSBl97HB2cWeklpVwbSSeaxiGmsF519H2P9VoSkNNcFhF5/vyazz//AmO0mD6lws3NHcNhWjayIoVi8nGFSlna+1lraZtWdYe95vo1951PjlLTBiLCNEVub7bc3e04HJQk1fUtl1fn3N485/bujpvb53z8ycc8e/aM3TiohquYWj7j6fsNTdPyqz/8dZ48elIPSY/1t//uf/BKZraUwhiT1uSOE0OM/P/Y+7dY27I0vxP6feMyL2utvfe5RGTkraqyXHa32zZl3LiNENcGIbWah+YBWUKoBRLIT0i0QDJuEBKIl6aREDyYB4tGckuWoKVuRAuBUGNxa6R2Cxu7Wq5y2V1VrqrIjIgT55x9WZd5GTcevjHnWicysyoiI86ODPuMzB377H3W2XuuMccc3/j+3//7/xfVampg2Gw2WOd58uQJ3/3ud7m+ueFP/ak/ydNnT/mlX/4+3/rgA46nB17ejoSYOR4G9g9HxEaMa5SdaZQcZCRjbcF7S9c1SCrMte0qRI03yuJVmU5nVXN7DIGYFlSBVcf5sp9+vQ8XG5MYVjZc+UwGIfXFjwp/iijkuFRzat+mEaOtbKh8YE4RK+Ctmg00vqsgZ6ZUv75cvxZrMd7j2452s1Xv6r6rG/JIjJHDYeDVqzvuHwZOg5LmxhCIMTPmwJwTpzhzdzoQYlIBGSc0bUPbeJw3tL1ahvatlhm61tK1Hme0Nq1OWno/rK2tXCIY77W8biw5ZZxzb5QF3uYo9TDgrKU0nmkciSGw3+/5u7/+67RNy+/89u9wfaUHlq7rKCUzzRMpRV6+fMX9/R0pJb79wXtst1v+9K/+07z33rf4Y3/0j/Kt508Z5oQcJyiJ621Dzh0fv84wHyklEq0goWFsHSlOJOcozpJtQ3f9nBJnyvGeEidMSZhylgMWZHXgMkWwstg8aIksxkxJWtI6nU5AoVxfnx+CpRyw/FfWYtkXGo9uklCKGhRYq321btVOVXap+qQmxjiyH/Z41/D82Q1N11KMpViHjUbZsQWcE5wTdtuWZ0933Nxc8b3vfwAi3N4fmefI5rqn3Ta4zlCYSdkwjQfiHDkeP+V4+JRhfOD2/vfWa9n011jn6DbXiC04X+uc1mPFUYzW6IyFtql1o32ipEAOM2meyOhrjVW4q5RSoeV5rf/kopm2MSoakR5p21KY39YPJYvEqHUNtSKDftOy2fQsJIWcCtMUmOeZ+/s9L1++pm1brq9uyAkO+wOjm851bBFUSb7WtaTWIXPCO0fZanaiymKcT+7lvLCleimHObE/HDkeB8ZxQgTatuH6ZsvxeMcwHNgf7vn05ae8evUK4xuk2q4hWuPvN1s2/ZZf+MVf4ge/9MvkmIgxfKX7VQHmmBirMMacEkmPFCzs4K5r6TdbvvPd7/An/tSf5NmzZ/zqn/lVnj17xvXNju2uR24LL28/JaXCcJo4HE74LuuHqUSiDCIJa8B7zRxyLHhyhVcVfRFrsUaN4hvfUIpeI5xNDT778RNWjAYDEYpc1BYv0YKV9vl4oVdEsM4pT6JkVuNzMVhbTQjmoF0HIjRWan+5BxESmYSaZ8xRXyfWYpzDt61yQryn7ToVR5lmYiycThN3d3v2x6my1hNTjMSUmVNkLokxzjwMJ2JK+K7RRKPzdLuNtuRUVba+b2m8o3GGxgmWghDqoUhrwWVx9DKLlrseLrLNyr14xFGWw5zTfvyYEsM48vLTV+Sc6duWplGRnKZxQKn7e32+Ba6vr/nOB9/m+bPn/Op/5E/x3e9+TxWtbq45nCamOZCT4apvyDnS2gxxoJRIEiA2jMdW6+bdBpEN1ni63RNKioQUySOYPCMpnc/+CCZXB6uyhNwFnVSVs4ze59MwqKrZsiF9hh/xs5CqlvHoGW+hqq5UHVrJaT0xCAq/GQTrDNark8jDcGAME3GeiHFiiHPlLdW+Q2No2y3bzVM2/Za22wDQ9zPWQdMIzhScyXhJWBI5nMhEDvv7ymQ7VXsxUXWbEgnTkcPDS4bjwHC8YzwdCGEk5agd8ugC7FuPNcK88VgSjRf1ls2QS4MSxfIKK5esClvLA7PyfRcy11sOvtZattsdV1c3NE271tqtUaWfxntKUYLJApbnXPQBO50YxpGx6g2nmJjGmZX2WsGcon5xSN3gbN3wSo6UnNj0He+/95S29bTe6+9cM95zKrWwnOd55nQcmKcZ57zKSLYNbdtUEkxP32/YbHYMwwzO6anIOKia02IcBcM4TOwfDvqev+ogIYJpWtpNTz/N4LweqEphpwLdPH//fa5vnvCd73yXX/jFX+D6+pqrqyu6rsPWw4KIwTqvbGSRWqbRg4Kt4vOFrBrPo3YKxNoaZYzFloJWg6tTUi2nlOqsFYLCn1bcSmg7v4fLt/NmJrXUcMvlS5da2WMTq+pYjBlKLmRb4fhStMtBVD0pp0xCVN4xo6Ug0XUm2Hoo1o+YMimqDagRq8YEk6JT8xQJU0QwdG1PzIbdVWaaI+lwghCwQRtavIVt50jZ0HYN1lk2nWPTiB6WbMYZ2DUqQuGdoXFW95a5SulWAKgkhdT162rYkBIpL7akjzgqPKvEr4XJbmoLlXaB5KwHgpId1qoNqbWG3XZDv+l4+uQp3//+93ly85RnT67ZbXpa77DC+uGMinV4a+i8Y9N6BMNQtLyU54mIYcrKvfEiYFVsxDmHtB1lzpQ4XaAxn/mfSmydiw01o59DZBhGvHPEEPHWrir+bwRcfSC+8BQ+bo2XQiYRSyTmgEsC0VXigFSqvEWsxbeZpm8IIfGj15+QknooiijBqojWVLx1tM5xvX2P589+id1OqejKngjMcaTdCo1PtDbRm4gwcxpumaPw4pMf8ns//BDnCpsNNN5Bipg8cXr4lPtXrxmHgdcvXzGeBo6nB6Z5xJqi0JUzPL3qSSnhS2DohNYLMQxYaYi5xyLMITBNMzEoIcYYQ9erOHgOk5pk24yzZ2HLtzWcUzu5b3/n++x2V/Xwom4o1pjVZszV1hOyqjyFOfD69Wv2+wP3d3c1CE/c3+2ZQuD27oFxmhHrEeP1s1ebQ++91lNTgBx4/uwJ/+Q/8QOur7Zcb3fsNpZCrIG2urwUWWUfD8cTr1695nQ60bUtbduwu9pydbVlOF1xeHrD/uEpz59/ADREXS3qVGM9YjQDysVye7dHyids+o7dbvuVBgsxBr/dsnv6lOwadjFyNQcQg29bnPf80g/+CN/+9nd4/1vf4o/8yh+lbVu2261uFrYerK1T6c62U4JUhilE3JiwfUvTbkkSOQ0HDvdHjqcT8zySxeNso0nfFClRN2itB+pGrhlvJJWMR9uO1pruT5yKy+D7RkNRXR6VuPU1BF4l+8Q18FpjCGER/9DgkEIixwwYbNUld0V11stSk82ZEFSAZ54yU5PJSTB4cswcTyM5JU77QS3kxPPk+glNF8FvtXzECzjqz5klsPGF966VbLjpW7x3NI0qPilGlnAW3t9attsOb60qjYXAMc6kWMhZhTiQQjaJLBmVIYM5RWJOhBAfrcILnJ3eRM1JnHMrTyUELaEJ6kXcNpa29dxcPWXTd/yRH/wC3/vut3nv/ff5lT/yR9lstnzwre/R9zu8a/EGGlNoLEQrbLwhNpbrvuHZbsM+Jk5DIOREOqqIxmQGMHqAf7Lb4o1w3XS0bcN8SIzTUBcLcM5xK/N5YT/LiriWUjiNA7d3ijhM06iWgbX9dTlsLj/tZ1nxj57xXl7oJQ2n/r/WmFQ0YaGch6jCCaZCXIvt2HL6VuKHoJabC9SktnQODeq2iqjr05jWK1mIRmcKv1MIctZAOU6ZaZyYhhPzNJJTUCPlCjtoa47++7ZxkBtsdRxSgoVmcOd2BanemqYaQQgpa13KieAfYfNSYoS2Vy0tUpc6vAuRRurCXJx+Qog6J+OoGVYMzNPE8XRkmmYeHh4YphmxjQZd6zFebQd9reOakpCc6LuReQqENmr2FaJa2JWlrUo3+1y1q1OKhHqS9l6Z1s66qhBWHyRjaXxL03SYUvM9Yym1n9Vabf0oKRNDYDaGaRUf+GqGMZbt9TVTyhSj2q9tiBhj6TYbvG94/t4znj5/yvXNNf2mr4cSC6YS3CrsH1JUMmJR2F7bHTQry/Ujxaz2c1GztGyU6Z/Xh0sXqtSTeVna70rtKweWFiWd9XP0PQfS+ro19n62orhSTb72seg2KwxfW7CWdhGo+GCtUSOraEuufc25ZpCLN3RKiRITMQRy1GcgBSXgqNVfoWschsKuazBFETUvel82tce27xrVmrcqg6rZbFQxDmdoalXG6t8oWUz0w1S1sWX+l755JTpe7KGPPC5Z7AsKUipp4GyaoIHKW0vrPF3bsOk7tl3Hpu9UZ9zZqi+u5DVTkzChEGMgVIEYUN0Db3W/DyVBiqhVeiKXRJqU7Gaq6Ei4ECN6c5Ve+LZfLN3lsVnIrzGG1ZSiXLSTatb/s6/5R3cnst5ivSMnX9m0tfaJBt2cVEs1h7hu0vM0M86BRTvVYnC1Pqm1K8/xOPLpi9eEsGG3cziPWsS5hsZYvAgmJ8JwwtgG39zguoZnz56TpaGUiOQZIXN6GBkejuz3Rx72R2IInI4nUoxM0wlVd9ANyqCOSwDNkxtyVoWkhbU3jhPGKFxljce3DZtOg5xvVVwjCGQRvG/pmv6t18jECK7r8f0W5xtlfzuDE3Nmnheq3KNlmkceHo4cjydev3rFw/09D/d3HPYP3D/s+fijjxmnmVf3B8Y5gGsptsFYj21GPRnX4Nh5R+csXdsxHAJeJu5e3sMcaRpD06gKRDFqJzjNqvhzGg8M4xEReP7sKdvdlu1mgzeekoVpjOQk7LZPSNES0TJnpBBK0b7MttOsXiBMA+Nhz6cfhx8jE32Z0W+3/Ol/5s9xd3fH8XhUkZek0NeTp09p2pabm6dsd1d431YtaaO2kDEzjCemeeLu/pZPXrzk/v6OEAsiTlWPThGTE/dmJs2R0z4zHgvDMXE6BrJkglUN9BDVmch7j3cNOSXCqE44VMU3PdjkGpD1PSy2gubiMFY+IzRyCc2VXCoC9XgazcsQ0ecvpUwmKYw+B+1ucHp4LEUlXbGCaWrrk9MyVpjVHSimuDKR52lkcJbjfs/93R0lRuI4k1NiOp1W+77OgG8MnW3IxfHt3Xe0VYnanQDr50VVbBpHxnEgxbTuEdQ5zCmQywxJCahYfS6Xw4KmI6V2hpTaEraoBT8OuUrfVA22l6IpxqyyrwhKXjIabPum5cn2iuvdhif9livf0VuPywWbVZmqpIBrOjpvCc7QGDilyIuPfsjHL1/z+vVrpuMD4hqebXckhEPQw+k060eyjuHUk7xHPniPZrthEqlzpN3CAmQjVcKWizmrJa6qfjanhIwDTeM5HA9QMu7aVY7EcgLiZz5vPrIfr6z1gKWuuNar86JeVBH3lNbsNyWl5a9wuqBmBCzsXEMMieE00veOENRJx/pagxCpfXGFHCNg1DLXap1mtxNynInTiZIjYTiR48jxcGBfGXjTOFFSIi1SQ8s7WtopBIX4ysL+Hc9BLANiqgGEwkmmkiSEQrGWaO0K955b/t/Wjag9tNXVRx8iAyhJpSylJFRuMMbMNKoK0ziMDMPANI2EeWIaB07HA8M0cxpOTCFSbKa4jLEZVxyIQXK9cW2DbVuFpGKuP3ticAZBAwQYsAsZTU+dyrgOOKcM003f4d1ZsCNFvejGt7RtwBTdmCQncko4a2maxVsYSkpM01gF7r+6Dct5z3sffIBvWzanEyXrZu6959l779G2LX2/pW07hbeoh50KcS1CH8M4cRoGhmki6SmInBMxQgiZeUykkGtv+9LjnkkixJxIIqvLlRofOJIIyaSLViEqgadcnD3O2e7youWQv/71G9NV3th/vg5rQN1LFh1pFekxUsjZobrtC4eC1RZx0QEs9fWr+Ahap0xVSGEaR0XA6vMfZg3A1ohKUUo1RBGD7ZXM5a3B27onLC47te58OBzY7x0hBE7WkFJinFTLO1TCJWUJpaL2p8YoQ78s4im5kvXerrf0T53vy/8sc7oGoDPzyIise1rrPZ1vaKzDG4sTgylnhJOSMVLqvFaHIzLj6cRx/8A0DOQYtATjLFnU0ABUZ5l5UoQpJ2zTIOkpi/HnMk/LGtADgurD5wVq/czIRfuVQ1Ay6ew9i/DT5SPws87/o0PNS1A5Z1ZVVTMtKkf6VqY5MYXEHDOxFHKR6lAkeNuwbbd4a9l2DY23WJNJ6cQ4FO5eqY/r9srgG8H7slqliVUvX+s84hu2O49pdgzHA7enI2GaGR6OSqzaP3DaP2jjfVD/XlMKFlbD9RUWqdAIApdmzEt9xlUvWStKBkMqYYJCLo4ihSQNU/FvvcabYuLh/pbbV69oTaFvFiELrRt98ukrhmFkfxw4DiPjMHL/sGeeJ25fvWIaRx7u9hz2DxwOR+ZxJodIg27yxjus91jX4HuVXctJ7+v1dsP1tue9pzuePd2y7VtimjgcZ4zd4FswOEU0EGKYmaaJGOqG1zRc7664vr6i9V5r5DExjQNzmBBTMAam44lxmhnnieM00rYtjX0f27Q02x1d0zCc9tzdvapqO1/NsNawu7rGNQ3XVU88l4QxVluzXDX+tnYNvBQQq5tq0zZkcmWFumrVqJKPJSdiFgLCYAtpzpzGxDAlddEqmiW1XU8WgSphuOlaNn1PCBFJmRANU/WA/YmKU0vApRKpPru7vFHyPUfwP5gV/RZHJcRUmEZJgcasiloq1xrJaDcChtU1yxVLEUeI9aAmmZgmphlevXpBCgOWanYiQmstzqpjWtNUpKirfbiLTV5JtR+/rGb106SQ5ZQHjuHINAXuT0diysRKQp+jMqNDTByGiVyg6TucU3RwWTPZnN/yz0ru+TJDURBFQhYdAFMP8G+e0ioBC1kNayRlJCZszjRiaGpSpAZ+amiv8+vpu5ZnT2+YUyR7R/GWZCyTUaLcrhGis/hsaJKWFHIctcWuZJyoDKu1llSRHUrRgyzKgC+rDGSNPRV3zrX8EELg/uGBGALbfktbxVnMStT62cbXEHiVNEMtZKes9RJJi8BCDbxB5fbmKgqQOfdceefZtD3eOfrOqZ6pKaQ0Mo2R+9tI21icvUKKh/4Mn4kxVVTDYZxn4zztpsEUeBWrAtDhxHjaczzsGfYPmrUkZV+3i2WaLLUI1sC7nPtXUQqERQ/DVvp9VQkGqPKZhYyliJBQNvHbrvGmFHl4eOD29jVPdj35arsyFEOMfPLyJXf3ez765AWfvrrVJvnTWBWilMjy8HBk/3DgdDoRRu1LdkVq3ctp61XT0PYdYowSfAo82fU8vdnx7MmOpzdb2tYR5gPHeaZpoU8eZ0BoEVAJt2mqB5+MFcP1bsfN1RWtbzCoD+00jsR5RoySmedp4Hg4cDydeDju2Ww2PLve0Vqh8cJm43lJ5P7hlpS+Or1mYyzbq2u2V7uLQLT8/CWwLexvs5I6lljmG0+haOB11WPYqqxpDJEUI6HAZFSidJxyDbyQitH57zqFJoMG175r2W565imQ54gRzbZMkh/bs98Ivuufax2Y84W++e9W3IrFDP0xx2WwFwFTM1AxiziLHn4KUfkdRsnuxggeA2JBMjEKRgoxzkySeX0bOO5vaZ1j1/U0ztFcX+NsS9sa+t7hO0d/3Wg/utPAm8JMCkEP7HMipcJUUZspTRzDwDDP3A4HNckwHsQypsgYI/McuDseKLmwE2hb8F5onK/3oiIL6wHo8QLvwolhQSsvhHjeZOfp/Vj7HDRVh5QhRkxWFrIXU4OuZ4K79wABAABJREFUklUVOTS0jaPvGp4+uSaWQjRCEmHMSddwKVivMrEuGnw0hKD980ZUB99BrRtb7TCpV7YUCpc6tIjRJ7GcDw2lFLIoAvXwsCeGwLOnz9ntdur5Lfb8Xn+G5f7o7kQh6MKap7luQJptmXrySBcn51zOm5KhsG0c28azbVqeb1rtS3QV0iBhSsZi8cbgnaNpGtqmVfWRCi27ptFsrPEY7yE7crmEXU31y1ZXk03XkrJCTJQFodIG96Vu89keyKVGauQsp6enQrPWc9TNoR4QRciyLNyzStTbGikl9vd33L1+xfHZDdO0w1qHb1piyexPJ273D3z06Ut+9PEn5JhIIeiJ32nrlLiGzfVTbLsB21XVK83anG9wrsG6Btfvap1Nb+TN9Zbr6w3XV1uurja0nYfkgMRm29O2nepIW1fbQJToNk8Tw+lE4/16cl3iwEKEyDmv93233SqhyihE27U93jqsGK11Bq3t+qbBfIWBV4fex0Wl6Mfu57pBVdzrAqZz1lC8o2lUQnCa1IbRGCg5EueJxqjXrBWh27VkEqEkpjzjO0u/0RauMipcVkJguH9QctxxIEbdvErK4M4VwpUsw2fWNJdqV6yb/2c32YVM86gZb0FtAC+wv5zzStIRYzC5HvZFe/ShEGb9uxACMUSdk1rmizEjJPpK1GurgEbjHF3f0jWepnVYbzB26TVf2lOqH7HVFi1jLTEWbu+OPDwceX275+XtAyEkjkNSEpzPsOo/67VQrQVTLERJWFNUgx7WWnqpMKke8h9pLEnGUmJb67xVD0CqDGddKAthLadFFS+tvALWlcU52VxMCazFek/X9Ww2gX4c6caJFAIlBnLKTCmQciGWRDEG21h62+Kd9kwvxLqc0hsWoBe/lYyimMs58yynkeveopKkAKdh4Hg80TUtvquk1EX+9gvu2Y+r1ZwLwzBwOp4Yj4eqE9xhqsyjwoYKBWRE4QEEKxkxhfevWr59s2PnO77VXQNwCoGQ1Raw5Bknjr5p6NpWTZW3PaZV83rxlm63wTY9ZrNFXA/BkKOtMpLaBpOy1tHapuFqc13JVUetBaV6SsKsdlFLfSiEWLWL1UVJnKHvejUdrwuzpKQPv+hBmxp4kwhZLCJV9OEtjnme+NGHv4+3wtOrDTdXW9quxzQNY4z86JUG3L/zm3+Pv/8PfhtnhNZYGu95//n79H3PkyfPeX7zlJILH1RxkJLyKs2n8UZTC/Ud1U3qatdyfdXy3vOnfPs779H3LX3rNOBUxxN9CozO5xwZTwOH+z23L18rW3kO+rsqJ0ChZlUU6voOYx1t01ZI/YFN0+KcZ+NbvHHEKXJKJ3IubLfbrxRqXoLu5TFYaj3s8q5+FqlVmQTwjcN5w3bXcfNkRyHQthZnYYgz0+nApjHstur2EtI1/U1DbjOzG+l7z9PnPYKhvRfmMbB/feDV7Z4Uq01dKQQRskCxHryeA1fkpsK2OWdKPTwoA7rUq3wzIV6D8wrhPWJfaYESSs1uNeiHqFwA41WkRxXa1C40hglEmOeZgqxcA32/yiIeiUSbeXLVcHX1hL71PLnqaLzj6XVP27gacJSnYUS1B3RSNOM2xiHRkJOQSuIf/u4nfPjDF9zeHXl9d0SMw3UbxFhcbzAebTmcR91jslGZ3LGQ54gzYPpqDlC7KGqBnsa5R8x5OXNCpKzezdYmjDhEu6UB5bYs+gUhBkKY9XOcSTnUYJhXfg+I9uF6h+taGoSbp08pzjPmwpQKDEdeTHtCCjyMJ6aQcNLhmhZvW27aK5z3tG2ngTcnYpg0oZPaYQIX9d2sbaEiGAyNGCyVkFcSKWXu9wfc4Lja3ZKT8Ozmhm2z0XIGokY5X/Cs+fhQ80oSKOfJ5lJP9nzKFinVT1P/fts4bvqGnfPsGv1eCFmdWkpaxfJzbZVYrc4MiEk6UVY/jLUYq83tkpXEYKzH2As7Kc7MTucsJgmxpMpcrG+ntiGU2jyu/Wz6vpYFqsSlSxePotfExf1aj1xfpnLwOW9BLoQwM09TtWicsd6f/XCl9jdWEQxta1ClwFhdd5IYsB7jKuxSAyB5UdnJlcCgkJ9rdMNo2gbfNvjW4xtP03jatsU7S0qxkteUEJEXfd18rpWtkGKF7JcsDRYSUYNgaZwqNJEhTAFrDV2jphmmPjDOOrquX9tqvrpxvn9L0P3pr2A9fyskp38wC3/AnF+pJQ9tu9L3K/R9h3GG/nikaTUTa7y6e3lnwWU9BkR1zcrVD7aYeo/XLIB1MS564iKCZHNxhefr/TqEMn76WA4DZ2h8+f6SoYtZTgql/r+8ud/UFy7rLtcbYaqRhtbaTQ2qlfxzkfCfs/zPzovuU3NIjOPMOAbGMWKdYJrFW/dcFpV6360519bzolq1ZGP1gKTe3o9cT2dB6YCqjX9pB7mW3C7WUqm17jN7vu6Zl2+8ImLnbLqWWZzDea+qX9ZqGcHUzoe6L5UFKRTl7xjjdN+95C8sAWZZBm9svm/uw2fcR9d+yir0NM/qfBbCRqWPVxGWz5Ze/vDx6O1EjW+ZrMeItgE5q6e1WKG/mJVMJSXRORXK2GJwxfPHP7jij3/vfXzK9HNgmhPh4UAaA8Flss2Mo+Hu4Z4uzjwNz2hy1rpe4/He1ptlcd0G116TR/XN8d1Mf/UExGCajmwcIQVKmrFG2G13AAynE3GOgFSXoUyYFfKYp0BKWjOwzmGs1m4KVrO3WGukMSJG6xvK7K61YWvwreet12xKIcdAChOn4567u1tizjSbHRnD0/e/RXYtpyR0T5+r/vR4Urb2FDikgI2JHDJN29JvnyAYwlT7TlOiJIX0pAqduF4zObtraXYtzXZLu9nQ9h2brqWxrhoe1MAg6t3cOIVUvfN0XUfXNLS+oXFqEt5WN5q+7THSgNlQMDy5uqFvNzzc3fHqxadvPPSneSTESNv03Dx9DgV+7W/9P7/iSf5i9/Cyuz1XJrT2EWqgzXWdzcPM1E6Mp4G+6/j+d75H0zRs2w2NVfGMvu2hgOuFaBKxnZhdw1wSqSpo5JSJFGyqc54sOSckQaAgSVsuTM2ujNUdazkU58xqGLBuOjXje9T0C6DIelDBCFjdEENSCVRTSUCanenzlUtlk4tolnaxG8egfdEx5MVUbA2OpfbOLuWmQiFPpR6QVDOAkii5+i0n3SOUqaycjxQy1gpt02G9o1SkuGla2s5TMsSgAXccJu1xL6keINRIHoFpnAkhEGJ43PB7UctV1cBWlaO8J4S49t7nXNS4Psaa8brVMEK7FWJVWzsnKhiDdeBboUE5D96rgpsKdniafkvxLRtabEykWQgzkIUpqjEL1mAbR9Op7G1MkTJPFKQSY9X0pp5p1F4WcGLJBjW/qUp+JUdKyTzs74nzjDfC85trsnP4tldVvi8YeR898BqrkKtZPmovr0osJoWX60pfIGbvVFji6abhW1c9dp7xzJxywucZE+eapakR+TTPGGfV87T+XoV7a8+bqFar8Q0matuLtR7ftISmRYy2wOSsqj/GK8wKEKwjGyUKLVJ1saq2xKQPrHPmnOnWwJqqMTY5q7h9EVyWs3E3RTePSgx5m6NQT6AVFp/GkbbbrCSjru/ZxsyT588ZixCnkfnwwDzPDK/vmGNgyhlfJRA731GoGatkMgHlfmeMRNXV9h68QbzOu/Ue5x3OqS+ws46SLdmcaz85LfaRsipqOedWCzhrVGnLGqvaz8XQS4uI49nT97jeXdP5FptNJWkNpBTrwXeisbYKHHy1keKNh3BF3n/C1niRJC0H8EXcYskQlC+w+B6rKEOsgiOlKey2O7bbHfd399xv7jFi8cbp+nQFm5Xv4IwlGXWiTWvdtqwowllYo9QauhpUFEqtldr1vqzvqHBx4j9ngF9HLrzUV5dsSOu5WUvoou/biFnhwZJNvW4NoHWj0GCcy1rf+3Eda9bPIhWNQTkdOSlXZXEMWrO65QrlUh1JVrZ6FoX0jVXoulQuSc6FeZwvCGtVlrOiILlk7T/+Skslf9g8n8fiLuQq+37JfNcnuKiYyeI9nuqaXn3I16x3+cF1LRmDtdVIx2rmuyAWi+GJxeC86H4Tq3hGUR/slPXmiDVYp45PSMHMb7qgXQAginpJUWMXUNc6K1AySR9MQpgZSmGaJ5VuNVLXzxefx0eHmg16o3zdbJdTRaoXn3JkToFUa6mlZgAZYRwTD4eZJgW6pIQSTMbYgvcOaTyua2j6Dt+3YNHTj1gMFiMN1m0wdoNIB9Io9dwkvO/Y7K4RETbX10zzifF+4nicmJ1RGWURzXYrnJqzbkS66CxiPKUIxugDBcIwzpQCcZGtK1oHNQZkLhiTVZ0GwQo07u0rV5VSVGptnDg+HHh4fY93HSaL+ghniykWKRaKxdkOtzVYN1Je3TLHidPxTokJkumb72BtQ3BNJaaFVWkGiRgjNBuFk23fYJoGcZ1mEkuGTPX7rTBgrh/WOrxv2e52PH/2HtdX1/WwBimoF7AgFUYuWFSTud/taHdXdHOg3xzIcaY3gZKF625Dyi0RYfpawgScQ/E52GXUxWUKE/vDkbu7Bx4e9pyGmWmKjCEzxsIxJO5OM8k2ROORpsM0G2y7wRlH5ztSyuwfbhmmiTFFJikEgeiEnM3ZJk/O5RKFsGWFuzNZg2/R7HfNys9IIhoMliB2Jt481jDGsNlsiDmocbnqRNbKjbJew6ICZhMlZt28XYuI1RKSlOrOpdKa86QM+nGamaaAM0KImhWHENFzZE0gjMV67ctvGiXB6dOcyQmiKeRsePLkhtNp5nhKCIe1DGaNUdEG0QNWjMq5LVJrqI3BYxEnqmiWg+oP1/eVcn4LpZLPMdZDRbkQ0jjD4G+8lHrYqKUSVSLURKuUskL5hapHnopaaMakRgve13hhcSJIihBrBhsTaZwJp5liLHYekaYhTx20GZtHGpM0gav90La6E+lzB0Imor3fM4YspSqG6Yp3Tvk81oG1iuIpTlp9x+SLM/m/lsBrjVOXH6uOKSXnpdJbb8ysMAWVPFP73IYxsj8EWgmIScxr4M3qG9t058DbNap7K7lWRpwGXrvB2g0iLdCg7i5GN/ftFUaEze6aaTox7G85nCYaazBZe4FNXWxL3RFkFbL3xitUbZQkFUPktD+q48XiPF5q9aAUJCgMa2vdyIG6p7xtpDkXwjwzDSog8dDds91eIwlsNtiyBF4HRU+YTbtBzLH2tk2cTvfMcWTTt/SNpWkaYqNZZ86BlGMNnRGM0FYDANd5pHGI7Sp0l1XwXDQLyFRTBlT20FiHbxq22yuePXuPTd9rTTnrYSZMASnQeo/zgrcd4jzd1RXN9op2mug2PUSDlQlThMZ7nIVTLOxj4nH3rc9WSysTvoo4jOPEMA0cDkfu7x94eDgwDFO1nUuMqWDnxN0wkX1LEo80Pabpce0Gbx1dt9Xugfya4zwzpMQshWggWSEJWnuvb3zJQJLuQjpEs8hSCsUaTK7KVDU7X+qkZ9nVn9AP/AjDGEPfbziNR4UTF36F0Z5qQNuwwqyBN+lhrjOd6gxbZSBrq9ZIyTDPQZGzcWYaFVqMQX1hQ0y6CVt197JO/ZSNEfXlkKUtplCykCxQLDc3NwxD4uWrIwsz2FQ/aCX6oFlb1jKWMU51ZLyWxoytsogZYnX6CSkQszqcPSrWvNS06/pZ6uAiC8q3vGxhw5+5LTHWEkpKFc0pNcNVvF2NQAqhyjQuOu/eOXwtO0lK6nkZZsocSMNAOA5kYzB+QEJDnraUGWyZaG1GcsEbIZd6fCwFavKkoVf/K6hLnGTBFa21e6v3ylk1tzBGAy9lCb6JM63+843HDbyFN1RiKGdhdq2N1FpNfbprnGLO2mT+6X7AmXtaSWyt9o6+GiNTyLgmY5fsOM7kILVGlojFkKUhi6fUrrGcEoRAiYmSMlLqA2XkDfjTSO2rLUuyegEjVdZ8qTBJ23Y439aFqd9b2IciZd3g6zmRFedQ/EkhpZVw9nZvRAVzmcPEMByZpkHdhrLCb9ZAKVWjFsgBhmEkzDNxDuvpdB4HYpgx4ioMZyl5WZQqXEAWQqh1cKMf06QuQSYnOqMN9IubkKnzovrK5uzd7D0ZuDvsmVLgo5efkkzmxe0r7o8HUhGyDRjr2ewm2r5mA0YwztJuWiyOzaahbSwyBqbjdAEHvp2Zrn9Yt6H1byoUF2PkYb9nDjOvb19zv7/n9u6Wl5++5HA8cKpWiDFlchFCKhxPE4Ujv/fhD7l/2PPykxfcvr6j8Q1xowpI4zQxB82KFt1fZdcr9Lrocn8WStVHcsleeWM5yo/9gTWQfB1j2djPJLGlGGvWdiLbdeDVQ7vUuus8zxiTKMVA0f71aZy1XBIvIdHaDpMzOasQTLb6jBp5k8DDgtAtV5IXxyOtZ6YUWEQngNXcoDgqN0iVnpbTTSn6PWM1K87l0oiiIgtfw7y/ca8v/2jOl1PpsWttPOVSYWBVIYyV3TyHOudZrRlNTsSUmOdAiGohCkWJmd7RtQ27vsc5S0xZEYM5UqaINUJvLY01EANpGgjTyDCOxCRoMf1CzKOALMhPybVcqeiqcgAWaEfLE1aqCJLR0o2pIi3lZ7gHj97HG2etT4UQLogbgNNaXYiWnJyqi+RCJHMIiRgS9z98xa///q0Kkzu18Gu8WsQ9aRLXOVFCwQwRKSr6YKzHby2NvULMlkJLLo44jMiUyTEr/JQC3hayF7q2pe82NE2H9y2WUmUUtSes5KRwkmjNIJWCccLV9VN210+UNj/PDKcT+/t7klx0Qgo1UhdKqafdmmksD/tbz3iBbDJJEsfTA0Jkd3PFcNqTjcHaTNMIOU0Mw54cM2mKjOOJw8Oe4XiAk4CBXX/FuH+gdAkjPYgjl0ipTPOQ1MJxmiLFGGTyMHt8mrntCnPr8OUK+gbX9nivsptSqo2j89qP3XW0255pnvntH36ICNyHEzc/vObh/pbXr19p2lAanG9pt89oNteEkileRdivOk/rhOfPb9jtel7cPjB/8voRamRvdsIuEF2qwiyn04l/+Lu/w8PDA7/zu/+QH330I4Zh5H7/wDRPvHr9umbCkYTjNCameI+8fuCjH73QtraoYv591/Lk+gkU1DVqnJlCVCUrZ7UXtBR81cktizFDqmvPCK4SS7SeyKrSAyjrvrKhizoRrK/Vt/rY2VdZCXn6pXYX2OXw7B19s6N1juF44u71HSkFhjHqM52lmnBkDYQlK/lRluxMs1w1aVG/1lS7xWyVeV00CNZauR45SakwT2qqPowDwzCsfteFwul0xFhHs22wXmFv13pyyYSqpW2KUecEU4glYMTgbKN3Iy2sXh6vsF7vd5Zz58cS/6VqIBdZFPmK6mUkmGNmiokpBKZ5YhhP7I97ssDNPGLbBsJIcU4JnIeTGoJELR903nK13WC9pdjMGAJd4znNM3tj6FB1sY3os27GI3Ma2Z+O3B6PGNfiumeanaNB1+aCyedsvCDEIhRTcOLIVjlBplhsMTSmUTKna2mbVjX6jaGYJfn6/ONr8OPljRP28pQqYeBc9yi5IEYf7ATEUpjmSI7gDbS+4KywNZZGDHFJHuVyDS4FeYdUhnHOBWIi5BEIapOWF0KJ+uQu6larV2O9MbJkqOtpc9GgqrCTb/BtqzqfIa6ydUZEC/fmYl9aH5ayzgufmZO3NhZmgVFd3zksPXazCrMb9UM2lTwAeUUqljeQclTG4jwTwow1XuExKWv9Y2mY10CvdavohBRMJXVNOBIx9qTsqofs5T288PtUqTCtbdVT8Bgm/Dyq/GFRIlKcIm6OnIYTwzAQYjzfK1O0Vuks4px+WLPq9r61Uc5/uFz3i5DHNM8cjwce9g/c399zd3vHFGZOp0Et7wq1pc3TVm3qBb4bTgMlJZ27rB+jVRu0OKujzgK/KvIi6+0Xc/E8rplifR7XhOrNrHfJhEs9jF6iBbL+w7c7nZ8dy6FGRNbnebEJpKhVoHee4JzajtZ0Ul2c8sXrY0UBClZUoladzagKdWcizVIaW3oSVvjy4nC1kuEqO30ReVG0TwO9yQUTTBWPKIitWswVQj6T3y72Ss6o2GPG3GWc18VF4+OSfNeP80y8ud9fljViWjTY42r/uRDJlr7rS+e0xTbRW0cu6mfuUj2wOIsrYEoVJc65ehjrHrQKl6/LeRHKqIpWF8/UuQVK0YvlsdDXK6S+fP5Zx6Ozmr13SIVNFHaYVeHJAEZovLYXhdqTC4liAlHgmAvjXNl/RcXInzWW3hiuxFGcx3pL3zV0fcvV7ort7prd9Q276ycULKeHPTkXjlUGrG1auqZFnMU2nW5iRnC+pWCY5oQU9RC2IvR9Q+PO01bEaGuUb2i317S7J4R0y3w4kHKh8QqdpqQboJ4CVZf0zMo/19pSTLVY/3bvg208pm2YUiSejtwf99zt7/BtR9v3uL7h5mbDzc2WeZg5xkJJjrZpiKFhf9hzPB25397z+tNXtO2AsVtEnAZrs1g71uqJVab4lD1DCtggvMh7tp1n1zm8E6xv18wp101zOQQV0UNCsUJ3vcUYQ3tzRXu9IzsoXjjeH/jR7/+etoGYnk9fPXCz2/Ds5ooimf104jRnorPczzP708S4bp1va/x4ppvrIeZ4PPL69WvNdH/nH/L69Wt+78MP+fjFJ3V30JrZ02fPax+jwxpHjpEwToRp5vbjjxlPJ+IwkqaRIBP3d6PWZ+vB0cSCT8qTWHqj177Gmp8tsqgIuNrfssiIXsKpC3kmLyWSIsp1kNrjWl/zWKNQiCmwsK9TLCqwEhOjnIjOsTUO51uu2o7t+8/JOTPP2vo2DhPTpL3sQ46A2nt6Z3l+0/DeTUvXeXa9wTtD1yhB39mEEDAl4xcgq2a7qaIH0zhze7vndBzZPxw4HE8M48AcZvIs5FEPhW5Y5EEFcVUcw+i9WIU6Nhtc06mxQO0+cEUJQfYRIYbF1jTX/v6VWCVLv/M5IJVSiCkRrUrGhpSJKRNyYopqqoIxmvkagzQtTb+la1u6zQ0hZj59vWcYJjZdp4kXBTvoOsMYijXQeEzfUebEdJpU87mxVa9BW37ENpwNeqTqQqtgRsqFklVFLCc9iM0o0JBtIYgFW01HyptqVT/rSn/cjFfOcme6EdVWCUTlUlGiUcl1oxUwqaziDXOBIS8mzEIjwiZrTTiKUIwBa/HO431D0ywfLU3bEkJhGmbCHDjcvWYaBnabDbLdYJtW242ysjSNtSBGqeSptt8YoRc1WFg5qWIQq/KItmmxTQvWrW4ztsrHCYVsCqS8nlc/W6Ip6AlcHsOdyKn+bwp64pzCzDiNFGNoN1ucdbRtQ9d6iJnRmnNLj3WUnJWgNY4Mp5Oahi+KPaaKnxj0QCXqlYwxJAvRwFwKJ5Mgea0tV/cWKoy5ZA/L5g8L5V9wTaPCHa3HtB6fW9oUGI4Dx+ORcZjpX70iZIuV93j+9IZCYQ6qqmPGiSCF0xwVWnoMdtUS5C5O1CEEjscj+8Oeu7s7bm9veXi453A4YKzFty3ee7q+p+06+qanbVriNDMdTkx+ZLCvSWWk1HJAKjCWGRGDbzqt6RbBaOWGi6SktlOca1w5Z5VX5My9uCwlLh9rlgBrZiCmvEGyeqxRSiEVRUCkTnSuh9w4BX12o/pnW2vxvqeUwuSCkndyQnJEshBFyzy9N3hv2TSWvlXj+tYrucY5cBaMVF4IYIoiX3lZtzXbiiExDlMlxwVFlmKq9UwISWvvLufVj1mFHwrG6z3wXoNZbvU9GEGZ26iM7vl9P954Q5xiybzlQg9foY812730Ol7kIlc1q6poNVcTFIoayrSbDSEW7vcjIaitpq8iGovAxcJbwBjEOUosxKSCMdqfW9uPnEMDTGXf1/+a5SEomiDI5fVmPSQIQsoFK0vHxTngLvP+s6AOf2jgFZFfAP514AN0+/grpZT/lYg8A/73wA+Afwj8+VLK7R/4s1h6s2pt16jxsTWCb9R4IFXygyIv2m7jbSHagjcZJ+csarH5C5I5HAZeSSFdb3hytSEWYZxGZDCkVx9zmk+UbFQoIGVkeMCHmTxMTPGA67c478E6rm9u6HbXpOmEKYEcZtJ4QChse8X2QwjMYcY1HdubZzjfsrl+gvWd9gGXhXHZUpJlnCZCjIixmLxMfs2G6uZljOHhNPOv/Z//fYA/KSJ/98vM908b1jp2V0+5fvY+eZzIIeB8xzAGigm0aABtvIoxlADOjGqt5xti02DEqF3aHBiOGnjb1mOdIKIWX9UXTaGnHClimEhInDEtBCtEK1WGs36cF95aTzJGyXDjMBBzIpaMcYbxsMUZQ5xG4qSkr1Sb84dhwPg94ckN1jhImdP+RAwTH3/0MSHOuG6D31xxd3cH8E+IyK9/2TX+k1b9+qlu0MsDb63WIb1TTfG2bWm6jqZrMc5iG49rG/qbLd2mZ9tv2LQ94+lEThMZy2bbInnDMczEQSFmqbWMReYRbzHeqyxebalgmrRHPSmZxQio8AOwll0WExCUTSr6tdSvV5W1ColSDKs89ecbX3qNixGsd8zzrOUGLb4iYrCNKh5lilrI5cSYgj6X3QZrO816moZxHGspqeC9sp1TigynE5SG2CkSl7MnF9HMLcbK98i1WqFrXZHNJdBo6aTbdFxd74jFIq4hpsIwxTVAFDkHE5GCOA28tipklQLTNLNYPAoVUr+Atx9jvuEMNYs5q6utblDmrGRlRZ2GFnhffsLPUAKlw1eTeYXi0X3SVi/pJiFiah0+cDieOM0T+9PIMQbGnJms0ROR91gA32C8tiC1zlGw5LNlw3IVgPIYnHMrCWw54CdUOTHWZyFm7ZuOFRo3RjDFKRn4C57dP0/GG4H/Xinlb4nIFfA3ReTfAf4bwF8vpfwrIvKXgL8E/Pf/wJ8kKtNnV3Ftaj1R1GJLDPOckBCrlVTGmIK3meQyjc14U0BqY3/J1a1F2B9OEPWhCsXgiqkZXOY0D3D3Amc8rdmqLm2ccSVTQmGUQhOf0G6vsW3PzZNn4DucJDatJUwDw/1ryImmPpSn04nj8US/2fHB938R33TMeFIxiHHKXhSh6zuoRKNcEqbKbwtgF4Z00nqOPlSe/+p/8T/G//h/+3/5u8A/+6Xm+6cMax2762c8efoBYTiRphHX9AzjDMZzXbTlq21a+q4nzRlnHc4o1JxjW80GVE3pdDySEljpESyCCgKIFdVqRlbCyRwTWRI2GkLbEJ1ZBRt+fPuoMJu16lt6GogpEnLCWMOwP+KMusHkMJPmsHo3n4YTWTzzPOOMI8bAcT8wHvd8+KPf5/Xta569/wHf/sUfMBwHgA9LKX/iS6/xn7LuKSBK06s1q4w1iswoOtPStp2SyPpOIcfG4buW7smG7W7H1XbHbrPB3RnGwz2lWDa7DksmHo+M8qZBQdUAwzQOu2kRq21fuRTKXjDTrIo/pRL6quDEItW01jDlrAy11HwXpaY1w9OFxRJ5P2cG8OXXuIgSboLW/jXwGs10Go91XgNvmElhJk4DrW+4uXlC3/WqiNbODE2jcrO5GtmjYjyn0wmRTIzdCrEX0GwoBJIRpESFYKu6Usr6upyrzzK6D1wVME1Ht9sxh8j+qEz1OeSKkNX2agGxSz1Xa545wzjNtfXQVKnJBTX83Lv+V7On1ANcqSJBl3XYS0MYaxapzc86rumbNPXfL10k1MALmrRY1N/ax4RUEuA8R/aHI8d5Yj+NHFMkFEM0GnilcTigNA3SNPhSFfGyYQ5nASWdW+pB2OBdrZynRYBDSbNSMrHu2CEnPfinRIoJZ/SgaWr994uMPzTwllI+Aj6qf96LyG8A3wP+BeA/V1/2V4H/B5/jphWRStcWUlYPSk3lI0aEOUZiyirhlfUNNc5BMQwu0rik/xZ9xhUWqPXRFIlhYhr2WCbCmLGmQZKDbEnZMcYRKYLLCVO0Odo6Q/YDeR70MJBnDI7GFDpvMMkQTYWOUcjHW0PfNjTerxCXtxZnPF3b0LYtJUKeR0pStuTi+WiWHsqLw6rC5/D0qufZzXaZ+y893z9pGGPpN1dsdk9IriG1I91mox7F1lX1H62POO+1xlqpk0ZUC9UaizOGxhpab+m9VdcW71dSlcXgtIZAKmfoKdbWDKk/y6zM0OXkKBcQp86LBvmZOQbGeUJEuHvlmacJUqSkyOHhoPKdIVFSJVSUsgaMUrexaQ6cTgPbaSbFxGazBTi9zTlfDh+6AenG45yj73tSSrz33nOcdwzzyBQmYslMJdYAkAhBdWKtKJIzh0BIQTfBClNmzplIqXNYaklH/WFlnZP1c4W/pchKUrok86zcqiVxr18ssHJBSFlrjmsAWMrHn2N86fmu0GBMSmLKFTUpy8aZc9UXVy33XFRvfBgnhXtndScKKatXYFEGcymF42mmcNTe3ZpphhirvrijbSzeGkrrquocNTMrxAzHYeR4GhmmieMwcDiNjHNijvr7Ukkq1G8W0pao/q9BD61AyWokcNaXt1Ah1lQWdSw+94R/JXv4gkyttX9z/mzOfABrLW3T0LgzyrmgIzknUozkFNf3yKLQdWEtuegcWLO0Fmr3yxv+v/WaVp51vRZjjPr/rqhThR90w11hZV3k1SXPmko6VAXFQiEVLcFoa5etJbXFkUp/rHzuFa/jC9V4ReQHwJ8B/gbwQQ3KAB+jUPQf9gMoxpKKIWaIc1boR2AYZkS02T2ni364YthtNhpc4whpZo4wTPpGU8xgIcxZDcIPkfsXJ+be0fstTB7TtpimZRgKr14VSoLeNDix3Ox6rnc9JifSdovEDf2mxfvC1gRoDWMuJBNJOWCzwRRh6x277hrjGiQnSoxsNo3qiI5XhOM103Dg7sUrwjyR0wwlrX3KhbMk3eU9U7hmqZt8yfn+KcP5hmfvf59vf+9XyPNACWNt5gfXtMr2NRbXtLSbDe40kzFkDM62NC7R+YZN49l1nue7hr7vuL7e0vgNwzAxjjPOGDrvdb9PCuMtG922sVjf4Zse71qcbbDiVtWkhVi0QFkpzpwOe07DwP39AyllXr94iXVVNtIK0zhxeDiSUmEbAqSgV13F7YsxJAz7w4lXr17Tba4I01RVxpYl+nbmvP7sukHoCth0Pd47bm5u8I3nNJzY7DZ0m477456PXr2AUhiHkyq6jRNHt2c8nng47MlB5Til8RRnSUY7Di11U6p9E6UkSgyUkjVTyhVlqcFoZdPXkseSSVGdo9d63lqDrCd8a9W2MSop6Vwu+Pyb0Jed74J6Nk/zzGkaVUmqijGElMjAMFfuRE5IEVLMvHh1qySgsnCEBWNbSokcp4kQZg6nGRElSG63bb1XV7StV3vL3Ya2ddzs+roGFS6dQmIOkcPpxMcvXjGMEx9/+pLD8VRd1zSzDanWDSvxUNd6hcmdStRO00iKEfENvqtZt1Hsec66/4Uv0A73Vazvc0lI6nNaD9DWVaMZzXibtuH6+gpvDd4VrBVtMQyBME1M04j3nhhncpopOULRDymKenonJO9oG0/bNPWjJRSwMWDX+6xIhZMFoldv6piXQ2c9hEpmaeLNJRMrp8c6bUJuauIRYtS6M4U5RT00O0vTtTSNci+crftVLp/74LOMzx14RWQH/JvAv1RKebiEDkopReQnS3eIyF8A/gLA06vNudCu6Q9SyVOxwlppae2hKFyJVONh8M7SOEsphVmW3r2l6E3dOLTlSJ1yvBomY7HikdpykpJgYiCRmLxl9hbTTIRpAIE4nhBjSfOoYhyxekDGoJq1IjijxX6sPTdjA59tL0gVlljYcmIuWjfq68p5ri4JV+bLzvdPv5kG5xqcaxWqElj0opAFYKz1s8oE1NtVayJiVijJW0PjLY3XzLfxljCbqo0r5/dkWMnampUszi+2QlMXcNRyCJXzpq+tCBosYgjVs7RUo3itI4U5kKLWiMpiirAShaSWKKS2edQWhpwrZ+CrWeO/+Iu/+ObfrXjI+TROPWUbY/DGIwK77RbnLFe7HbvtjjkFbauDqu6VyRKJaAZ87vle6mtVPWht6jxf+2pCjv6MvJQ3LjRz5Y0D4FlMY70h9b2sgbh+vYgHvNEi+Pk3oS+9xjd9sxpg/KTU77KNRUr1mymFOcb6DirzexHOL8oziUlrexQV0RAj+JhxXuFhaxWlidHjqtCLc6ruNc6BaY6chplxCpVYFbXMsoDHRdb6rlTUXmpBXargC4AEqx0CRp13LldTLmqh+gWg5i8/35vNxQzXdbb48C6oFWcGvPY6CyJKgFtaty57x8vSdviGG0XNeEWqmf3y+c0ashFZW7+0O8bgRFvAdA8Gfmx+LqDGRe6kklovUTapkpHLGlrg9KWOvfz9j//8P3x8rsArIh69YX+tlPJv1W9/IiLfKaV8JCLfAV78pH9bSvkrwF8B+N77T8tpUJaaSqoapGYbuUKxJS99h/owiZzrv0+uhLZpOI0RMYMyz6qxtbUW541q8ftC1wrPrjpunnQ02xuazY7DYJHWM02F2xd7jqeZKc7c70e6uxNPhxnfeLpPXuKalmE4Mo5H5mni8HBHzpnGN1jruLp5RtvtFHZ16jk7nfZMw5G7ly/49JNPmYYTd7dHUpgBXXjGRP0Qo4cBEYytcFLNUKK2C/wK8D/5MvP90x4kSiGGTAhZY231Mc25QKoCBFlrGF3T4J3TDakUcjXo9taz6Vq2fcv1zrPpPU+vPW3jKXFkPkV1+BBtMvdWa70hzaQJirE0XU9bP7puo60yWQOlqQ+AbmgLVFVrjWKwBqz4miULZKFky1J3zLXskFNQr1splEq+yEbIKBs2p3RZ8vnSa/zP/tk/+5Pn/M0TFsvpwqA9pk9ubghxy/e++z0Q6D5ueXX7mpgiDoPNBYelEU82EWcbcjYUZygk2t2O3bNICoF5GCm54JY+w1yQMBNTZgyRmDOnOWrGhR6CnXO4xi3vp7KVz8GddfOpm049EC/v7TLAfYFA8KXX+LObTZnGCYPQ+qauDw2KjakCF0hVmROly5QzArN0SORUZSVzZhy13CVlOVxYfPGUZNkfI6cxc38/IvKKxlm2XYtzlu12i/eesfZmzzFyHAZiTMzJgu1IUbPhIoDVYGWN+nXronaItbjqJ5sQJHqc94j3+uzGQC6FKUW19bwQEHnb8/38+XNtVzAWqoa3zYKxWf14F6W/ephA9GC/yHmGGJhnTwgqGbmUUWKYSHEi55mcG3IONUEQCrYe6g2tM/TWkq1lWw/SG+fIDpyHznXYAi2CmwNTnMlVfS+JObfYsVgqKroTQ6qQeVaylwjON7pGsiq9LbayxlnEu1VClYsDzOcdn4fVLMC/BvxGKeV/cfFX/zbwXwf+lfr5//iH/axCqdKB+WKhmIta1PmUXZY6VM12rRW6xlNRJE6jIaZCDrWGJaaStZTj4Sx0rWPTNfSbjna3RZznKvbYIfH65cyU1OllzCNTSGpH5SzjacBYxxQmxjARQmA4HbU+2XQ45+k3sZ6QtIcNEeI8af3odOJ0ODKNI+Mwk2LAVHFta4tm4capAo1UGRw5y/f9b/7tfw9g/LLz/VPvQ1GYO6WCKXr61hP4mUq/bLje6QJbD6I1pzdGXW8ab2kbS9da+k7/7J1ou4Voa0QRNDOVc/astUmHdR7nlASztnOZcyS81LI262m0Nr+LwUhdEIuEz6IKtp6k9QErwLJAFiW4lZyiJcpfAv7fX/mcL8/kogpSKjSy5sKa4XRtR+MzN1dXDMNT9of96oilREMwRQO1HmgsmKJuThZc09BueubRaN27Ii9mWV+pQAiE06A1z1hUdEYEzJngptfKWqddoOOy4CD1HpzRjIusaXmGP3/K+6XXeKEoFAv4lT2r1m9OFJVZMx+KBl5UNazU2pyhEGNmmoISeKKqTi24gclCKlZt52ZFSMI8EeYJby2bxuOc4+ZJVoZ0mJnqPjfXunMqBlV10/ovgnqDU/utjQYFRTAsxilsrTKpspZ/Ss4aRIq2zoTV//tx5nsZS31VKtQsxtYSilk5APrCmp3npIhmqi5ub8hxJvWZzqnWe1VyFvQskmspyRrBGcEbgxdDI0IrZ5TAFWEjol0jMUFKSNJ9oCBks6wAy1JFlqXWmwEyLmvb0YLEUQopRz2EVXQJIyq88zME3GV8noz3Pwn8i8B/ICJ/u37vf4DerH9DRP6bwO8Cf/7z/EK7sJllgRH0s+oiA94qSzIncgqI1I4UybSNpcXifYtvLSEm7g8n5hARMtNcONnM/UE1tH/vh/fc3o1srzObXSSaLbgNbdvx/R/8MjEZSpooaQISwqyHgwo/JaeOQ9ZGvHhK0fqosY5sW8YEkgMl7BW+mie1nzudaK0Hl3Gm1f7VPFZmc8E6rV9bqW0ckkASRRL/4MNP+P/82u8AXH0V8/2TRsmJaTgyHR8oJaKEkpl5HmkaT993pL6jMYanux3DbuRqt8UaIYwHSo4Yp73WUwi8vn3FPM9c37xH53q6reUqdrXB3ZLFEOo2ZpzDNQ3Wu3rgWj4Wkk+mFEHK4nijGVbTtGw2W4xx6mmaisp5Gr+qApWSsdao3rSRGiRYs7O2aYhdi28aFaQwSnL70UcfAjwH/vNva87XcZn5Xt6T+r3GN9o21G/YNC1SlAwWciaMESsDw2ng7vUtOWV1awHmnEhGSKLkNXJWV5eq+NNYxzhNpCkSSCSr8HKp7FTnnNbLq/Xi5cel5dsKnRdWC7Uf72z83ONLr3ERQ9f2pKiEugUyttay7TbqPmT1AJdSIIRp0eWpAWuBGhMpKyt+ChMppfV9paJabJr565Y9jxPzPNF5x812Q9N4uu0Vxgkpi+77GeLKZVPtX5XZrChfqgvUnA9hiFSpyRkRUU5EUkekZIweFBcktga+LxAAvpI95RK9Xch7lHKGaYsGsxgix+OgXYUlYgDfTHrA8CNX+4Egnm/NmSZBLJZsPAnDHDU5G8dIiGph6qyhdZ5dq0YpI4UualY7xkCeZu7vjpAy3lgl6y6eybJ47gq2lOoGp8GctWQiii7VQ66thzTBrB6+i51tXXwqjbmc97/A+Dys5n+Xn/5E/Re+yC9b0JUVmzfLh6FrXe2L0hNoTpEYDVRGppDxjcN5y04anohlDgnz0jAMepIfRjV5u7WZ46gGCV1juLoKXF1NdNfPufnuB3R9z3d+8ZdpNtfMcVbxiOHI65cfEcOsHpgxIU5tA8Vnig9QCtZqZpZtxxjURziEgz6wpz0hzBCF1jUQwduOkoUQRkJSI3JnVLnK1cBrTNIaiCT++C+/x1/7n/55/mv/o3/j10spf/bLzPdPGzlnptOB4XBPLgpzT9PE8XRQU/Xthhwim+0VNzdXjMeZ66sd1gjDsSXluLIrx3ni01cvmaaJ737/lzAu0289OD10ThFiESSpYLpxFt9q4K0FBQV9KupBZRAWq3VmY9RGsmtbdrst3jc468kZnGux1jEMGoxKyep2xUVmXD+MtbRdS06RpmlwNZOXUvjet78H8Dd/wnx/ZXP+xliDb83OoX4utE3Dbrtl12/YdD0lZ06nI9M8k/QQz+k08PrlayiFvu9wTtWssqjzUKjSp84aWuvZtS3btuVkLPNxZEII2hJAtoZsjHojWw2+S8D1zuG9Z/E+FiMXe3zlVtTa15L5fsFq15de40YMfbshyEwiKu/A6Hu46rfqRdwI1sE0DRyOs/aN27ruSjXSKJGUZmJMTPOoHAJRopPEyHGaNZjU2vg8jMzTyKZrSbHQdy3XTxKuqclWllWnWJ2Hav80Ki6TiwZmgGIWup024yn5bQZUlSvnvCotwULKrOvniwXeLz3fCwqy3ulKyFPYflFf1UP0HAL7o4qM2NrrLNYSMhTXsnk4MYvnMGXaKIRiyaItmXPUuu8wTswh1cBrab3nuutprCd7x5gSd4cHUowM08zrly/JMbHttVUsNo7S1B7dUlR0oyI3VowSEVGUgVLphLnGKSrXpQbdFUGpxgjLsS3LF173X4NkZNVLXWp2OpZT08UCqpkKYle4aFG90gAuOGfZdB1GPLNYom1ofcE0BUxhjLUny0ZSmejyidLf0Y6RdvtcIR5naDqP2A1PzAdKOplVQq+kQE4KFQ+nU+3xqxuMUXeMheyT62nbO7UILALZG5xV/1OXLQV3llXD1gc+gUkYNPiq3+MXL9Z/kVFKZp4GxuG0Mn4r8AJFT/NjFRlwok3wvrG42Va6fYWJrfoPD8OMNarQMwwzMaMsR6oISoVITV4IVXrvF5MArfvMGKduUEVkPVEvr18zLlFXpJQL1jbra5asbIFzl6CwfNb1V4kRC0mi1qJ+dsDoC4xLyHn5+rMEu4v9zBpD17SklNh0fQ2KLc56TqeBxiqpyFpdkcPxwBCD/oplSyj6WeFUcLUFzjrd/EyBJBBFqmThRZuEXFy0cCaTyFkKYblHS6bzk7xY3/bQ321XiBPObWshqBuQ1L7SUjfWUqFoYwvVgUBFdQTO1MKFZ4JCjwJilEAo1GfCW/rW03YNTesrylJqb6pC06b6b+eyeBZRSYo1YInu2qsTTlnWwppSrvX1vEbqZQ9Ulb611v6Ioyxp78VWtRAiF+Jl2zZcbzc4Z9i0Dc5arncbdpuOzXaD7zZgPQ/HAew9/dUTdiEhttAZV0veCVeobUQXveQUpmnmFGb2pyP3+z3T6cRpmhTtaVtECknU6z2vB8Xzs7Zcq9TsFlGFN60Bm7X7ZCHP6t9cxKwafL9otguPHHgX0+qpawijfwNizKkoQcao9GMp1YdShLbx9cGpJCSpqldiee/ZM1I2hLkQQkGImDJTcuD+cEcKE+4w4STSdhPXr0e67YYpZq6fPuPptz/gydMP8P0Nv/T0n0JsA6WF4hgP94z7e4bhyKsXnxDmmTAOpBDI88AwnMgxEKcTUjKbTU/TbNTNJySsha6rjF3b4bOt9RurfcchEkvB2wCSEBtUpcW83cAbY+Dh9iW32w3b6yf02x2gcD9F2N/tmQ4D1/2W7qmlbxzbTUsuEdc4zORwbUfbX5Nj4tNP9xyPiY8/uSVEx2Z3xWZzpfURo57KNhpyBpsdrngwhjlFxnlmfzzSNJa+7+nspvrE6hw472hzu2apMKl5QEoYs3g660OeUjg/GOWyNUafuMuG/qUn0Mi5fevrG7JuvDVW0viGpzdP2HQdm64j5sT7732bp8/eYxxG7u7URvD+4Z5pGvnhh7+vPAStYKKH2Vh/bsKWTOcc7z25IRZoRRgRppQYqzvWsqkZc95cdJNXKHppJYLFSSut82qkrHX4x9RqBoO1DVHUF3VhC4eYeDio4fz1Vc+GhhzV7UdEMF7LSd43yjEwwvGwJ5tcJU+rOYE20WF9rePe3NA0DSkGcgp4a9g2FucMvhUKEd9avOnVqH5Q6cE4n20GF5L5UputjRJryD+fBJcXVoW+Wme09XDUNi2Nb2h8swajxxhL/2up/InFCGZhHjfekbuO58+e8Uu/+H02mw3f+8632fQd282GvmuZw8xx0GTmd374CfzoBdn1dLun3JiGp21fVdYs3ifCrOIZzinyknLm5e1rXh32fHL7ik/uXpOnmbA/KYS87SjGk43uQbHU1vWLaTKiZRiDwRdXuWB18k09kNVGpc8WUjTgKsycf97diUSoqlVKllE6vT7cmvAWipiVGal1JGAh5dT6BqAuQpSaPbsKJwqUQE5CToaEJWQVwQ45ksqMbU6kXDge9jjv2IxXpDjhS4dvG5zvEHOFSFM3EgHr8PsjmFHbgoqQw8Tq05kSSxuREVXvyevpT/tjbTYUKgnBOCCRiLXmU7VubcHYgnnLgbfkQpgn5mmkjeqHWS6y+RhUXi8tknilYJ1ZUQotS+m8h6guK9MUGMeZcZxo2k3dL6qLDcvDyhoIc9aWnhANc9CM1zfNj13rpTuR1BNmWHyWYyRadXxZiGGXBJ8zLHaGyN74u2VRvtVA8dPv5VLXO5ORalWxch76tsMY/ftUMldXO57e3DB2Pc565rn2wI8Nt31P03hyUJhYcl7FAKTCf8YIvvHKAF6ct6rEZlnTgHrVbzCTywWJsF6xLK1FFdZnYaJ/xdP3OcaSxSz7Ra6wZ8oZg1Rbv1zNC5aMZ7nYz5QkxOCdrURH/TDO4hurZMK2Mve97jHOCN5XhrecRVJWhMZIbfHSsSy3Hwe1PjtxZyh3AUcWwqNZ5/4sYPH44/xcLeDIshYWnkDXd1zf3LDbbXn+3ntstxu2fU/fdQzjgDyo1Oft3R0hRk7jxDBOSlw1Zu0YKeVMsBSR1WBnCoFxmhhDYAyREpN2XqACS1lUvS1lbeNa2iGXE+7C/1jXbpH1e2V9OvVdruI+y34ihdVT/WcYj2uSUICS11OR6p2qSswUtYaqvWz1tVmNji0GZ9fEXyHKeVSRh9YhFtqupbMNKVliNMTk8GkHjWM8jkzHCV9m5iz4w0zMv03Xd3zw8lPuP/0huydPmfb3NP2O7bMf0PRPsK7l6vm3Me2ewxCYhoGchcKRPA9UDS5MUe/MEILWbmKodnkTYgLWZvU+RSFa53q11csRUwpdD00Lm21hs6t2iG9x5BQ4PrziYeMpeSZMJ5z3tG0PpTAeR6TA/euXbFrHeBppvWVutOpRkkpI9s2GEgoPox52Xn76uiIPllw6EoUp68PwMAXmmMkpaGYaDC/KkUPXcLNpSPPEt8RwdXVVF389qFmz1mOLaNC9e7hnmgLDFPC+XQPqPE+MYa62Y2U9mad6ODocT5xOB07DxFgJS8mqicXjjHOAvfykm7/6evq2pc9bnqGHsTnM3O/viTHw/nvf4tnTZ5oIfVc9ZA/HPdM88eSq43rbcNzvebHrySFgQ0RywWIIknHOax+mWDCWGYMPM27WNpp5DiwShOqUtWxBDVIJKa4KlqQUiUaDgUUP0MvbWXsoH2GklLg/7Ff1Iy1vLFuNPkf5aBgmrRmmoGIfztVD5DgCIyHMiHF0vee9b32r1rb14FzqRr+Uu85a8cpL8cbWoFNLW1n3sEI696FahTG9V/WplDMlhHNQuQgsC9P2clMvWa30QA/GIoL3HmvtxcHtEYeYpSFFY1Fts7GNZed2bK83/MIPfoE//Wd+laurK77/nW+z2fTstptVrS3MgYeHB/7G3/j3+fTlK15/+pLfEkv4QeRXfuWP0bSNEv9S5jTNmEEV3W4f7rk/HnmYRvYpEZoe+9SRpgD2SAFi0xCsMIaJaZgpxoHrMUYNbKzRfSFWxURhaS0DitR7XH2FK9acUJe6RXFMKjGv6o99ofGogXclz4jSwksRArrIUlA/Rqn2gMsxtphCdHXRizKicyqEFBGTsV51e50TbONIWSghQwTbNorxDzNTzqhO+oybIjm9wjuLkYBlYD4d2Wy3dNsbfPc+xm5otzvazZZYoOl3lGIY/R4zq/vLWg+qjfYqWSfEGIhBa8OYjLEZiQuL22JNbRMx2ofmvNC0haYF32qb3Fu9Dzkzj0fG0x7vPQj0/Ya+VUGNOE9qSnA6cjrsCTHhjKjYec1arVi8azAyqpVhCRwPJ0Q8XT/SbYIGXiIpZ3VoicqgLjlhs7AXSGFmfzjSOMOTp0/WE76O6s9c68oFSJVwMY4jqQjOhTU7izGogThcdOnpUkpJA8s0quxkrHXipZ3mbY9VUq5mnxdf1D+qmIO1Dt80bGSLsUW9jp0Q5pmbqx3X242+xjeUUhjGHSEGjvs7Tod79l1DGk+EaSYcT2rqnhTixAu+V83maBwWQ54NyYoKimTtr1eG+NLmATalNRl2VRlK6iznhem71DDLIov5OCOXzDhPtWRQM0O9klX3t8yBOaiATY7at+1z3UtyXMUbxKhW+rOnT9hsNlirrYkpRaZxJGdl/yvXo3IG0GcadI2tLWqV/SxGs1uzQPloEJYkKmNZFoifNegK5ccD74LWLKUTOPMavo64W5+bpR69KJsZa3Ctln+ePnvCd7/3Xa6vr/jOt7/Fpu/Z7bZs+l5/RhFev37Nr/2dX0NK4Xg88umnn/Ls2XOttzqHF4PJBesdxlkShWEaOI0DY4xMuZBcg7gWsQECQCY7RxIhpMg4DYhtcNKidftFgCSTSwIMWS5boBZoefF/qgH44n+FhRDK+povMh494y1p6dFS0oczQipS7fcqSQPt/xRTNAanQCqxkm+06O5KhXZKIiVBUoBotXZwOhFj4jRoD25IpfaCWW0HEmFKMKfC7f2AuFuOQwbX0W6uOIUtm+sjT95/jyffep+SArtti7eZuxeB4bjntL/ndPsKcsSkAaHgAWsXRSDO0CplzeKlnqIRhagKQttm2i7Tdom2jQqZv80hIFIwkoGIlJkwFfZ3sR6CIhR4eLhHrDBlwyFbhnEmF4+YjlxmFcPAamO/tSRniBZmgbGoJu4UZmJOzPNUVbyC6rPO4CKk2bM/DLTeMw4TYZ4hO/wbuKXBWk/T9vTba9774LuElPjWB99jd3XDMAycTkeOxwMf/t7vEEOgYMhFPTRTKoSYKvlrIiW0hmMdYn2t53z9Y8lixIDz2g89h5lxPCHA/f0dd69fs+zJy0Ek5cQnn3zMw/6e03AkSaF4obnqtUwA2iJhK8PTVA9pMTSVfBKCWQ3JRTRoLfKLC7Fl6UBw1ioBUFwtUSzBRoPCuUb89ocxhrbv1j7QWGvPAmsJKhVltKakcoUAbnVkUsU274y6Q1U+CUW5CZJR2L6iWo4Kry4Mb9G69mopUTTTjVE3Zu/duX0wJVXESmXdB0oppBQhpwqvKqpkpKzL/6zcdkbCLuHxR4MXAJbruIgzy3w0jQcpxDAT5plPX77kN3/z77Hb7djf37LZ9Gz7nq5riUElUO/v7/md3/otXrx4QQrayDkMR8I84r3TPLQe+kx1sjMXCIExtexYqPOmRCmppLmShZQWtrWOlBJRopb2qJoDltVlSOr7MZKrjaFKjcYwMc8qcVlyoIjWmw3lC9+Cxw28FHIMkCNUGMbZemLLGo+NW/qlwBpN50uaSanQ2A4rttoqVvm/XINHnEFgGkf2Dw91o1W1npi0X1Gcw3UtAMMpEUOmvD4yjgP97ZFhGGn7LfuTZ3vzmhgnuq0+iDfXPVMj/F6cOOzvebh9xf3LF1gyvUtYow+4cy3URvKl9pSp9QTDWqs0YvC+iif0hX6T6DeRbhNW9vbbvA8iKgQgRQPdPA8cHxIgOOcxYnl994rjOJBsy+x3THMmF48xGzITIY8kPNJ4xGngDVYYRRiyElyGcawb3kTOiTBpcA0Gyghz67l/ONFYz+k4EMYJaTLFK4yndReDdQ1Nt2F7bfng+w6M5Z/8p36V97/9XV6/fsWnLz7h1csXfPziBSEdVFu6GnHEmAlz4nRS0fqYi7omWY/YBrFff+BdNlffNHg8pSTypmWeJ06nA1D48Pd/n08++lhr6uNIQclnYoRXt6+4vbslhECsEoPdVWVDG5XQUzlEtG4pHoehqYoz1lpFauKZOb66zKx9vLKS0sRo8FWy47lur3XIx6s5Gmvot1uG4USaM7Fk5jDr91vVDY6zwoohR4ZZS1qmOhBZ0cP9dtNx0/c03tfMtBKbQIX2c8ZWWH0RbFgCbzELKUoV4NTwPWCso+ma5QaTUmaaAxnVIbZW9wxFG84HHRFwK9FteafnTHdBFC7FZB5raAUwn1GNWjYVY9TO0hoeppHTcOLjjz+ipMBm0/Pq5ffZbXratqHxjnEYeLi953g48pu/8Rvs7x8woMqEhz3zONRDi6/8gnKG7K3BuIvgm6mlEHDGYIr23lIsORvS0i9dU9RUERzyEnjPEpdrkM/6xlRoqPZUh5F5slURL+raMU4PST/PGe+Ssi/1oDP1vJ7+zHkCjKmuMsWgcos6foy8t3LEl1PPomhUamBmzT5TLrVOIsy5EHNhSgUXCxIyp2EmZoN9+SmnIdBdteyebmi8Z9P3WredBsI8kuJcswMl9khhdd1Yivdp0STNVZ+4wnc5z9pCZDPGZKwrVNln3Nmv+a0NEYNrOnzX49se33QYm0FUv9Yat56+lydLivZfN74hNxD7npISYjLj3NcHYqlPJXIJqCmEZvvFao0W7xDUWNo6zUq887VepbZq541bzg+ddVVfOmOMA+NwvqNp9Pp90+F9R9u0pBDo2o6u7fDW6UYrqg6VU+LZk6dISTy5vqGt7ktvd5TPfJY3v+TNby9/1kPHuYUi57y2Xh2rkppUwtvxdGScxlWpCxHEWYxfDBMEUql920t7RTk/g1aDqq7Raru5QJ8XxJl1oy3V2zdTP2sb3LkV6XGGMoTP6lorYY6lrWghwiyH4GoKUQOvcxbvLL7RNei8q+43NZyVWr9bdMov7t16+2rws1YPNanEalavqlogKxpwJvlV95t8DqaX86Z7inJapKI/51avJeA+brK7jEvi3UqE/QmBJ2ft5bWT5Xg8kFPEn/TgNg0jh4cHxmHECLRtQ9+1bPqOxllSDKQwK6HPlKrlUAmsC9Gylq1KdZ2QlDC159lgVic1Y7wSWpd1scL19bP8pOfxbItpFvi/CjuVFNVfPAPFrGWWLzIeHWpOOa3QqxWtdxqb8LPFGLRWa6u7hJUawMIbN3shmOU6GWjtG4sSHbqmw0jkWEJ1T9FWo1IyJ6stFoeQmaNCbLO1nEomsseYA7/3o1eUYnj9+vcZ9h+zu7riO9/7HiVnDnefcHz4lOG0J8SZYqi9fZByhFSU9ZxFHWXCEnxV2i2XkYwSrrpuwvlE32c2G+h7aJrLdo63M4zzbJ6+x9V732Hbq5KXGgekembQXUXVnRxiqnydNTy/viJs4HqzYZpGDsc7XKv1ed85xCZKGQlhjwCtQzetylhOqdFsqmQsib5tuL654cnTp2x3O5qm09o7FimCt1bbstotm801MXuMU5jHNTua7oauD3T9yHY78eT6Kb3zvPfsOVfbLVf9FmLGi+WD994npad869kN8zzgG0/Tdm91rs/jDzgRX27iS4g47+oqt+kcuWZzh9OBF69eKKs7RXIujGFkniess/hODeBNXxGeesrNITGjrydquQdjcN6CKGPXOi3zxHTOoxYpViXu1g3JKLM9Zz0JK4LCyuh9rKFCDbOak1cp2uUwkuvXGSU7ZSmVCJMV2gWu+g03Vzs2XcuuOum0jcMZQ46hmpuwQpdS/YZTlSM1VnDeIkbr84jApB0PKWdOp0GvSbRnaHGFWmDvXIoSLq2rWXTNmqvjk6vGF86qpveS4QqsteXHRZovaucXHQo5nQOwGGU2l5K1Nh4jH32U8NZW7kvVFB8HjAjbvuXp9Y7vf+db/NL3vs2z6x3z6YAtCbZJvXqzZpiulmGcM+QUCPNAjhlSxoSCiVlLKHga09K4QtMuvsHVUjFXnfaLEpNcfOjXirYaSnU8EiRHcphIYSJNIzYrsVdK/MLB9/Ez3kud5vVBlZrx8gaj1dT0NsubZ7sFwj3/oOV0uBCYzp6NqgpVWycKVaNViLkozJMLc9KNYw5gpDCOszJG72/Z372CHDhdb6FAmAZ1K6peqcshQLej2mq0ZNnVdq1UTVJdqMsDmzE2KePZlXrYkHML01scIqIZYrfBNh3Wt4jNYNLqHqIMc7ueBpf6SeMcVsDQ4pyhMNOftMZWDKgC0ESch6pYUzeKRTBFajYmUg2kz/KhlNofmoQU06qLysVGsxiALyQsqYQ11Y1u2PQ9jsxus2G7USm/pXWgbRpKcTSukJMGpPKW5/oPuAuf+1WXUKKaOyytWIE5qMxhSEGZtwVEfG1jM1VTVt9nyVkP6AUWb1JNKM4ktlI7C0wRlmLyZx4/ll6YN9pwqpyh/NiL3/4498cuT+LFNSw8iyLn7Liun4XwZJ09C4i8ISKyOryuGfPSBrRwB5S7Ue+SyIrYLPOQqyY0tTy2Zoj5fB3n3uk3rxn0es1C9akHGvOZzPsLopxfeiwtoEvGeIE1AOd1AVK7CuohIyXmaSTOMzlG0jzhrKXZbtn0mu32fYt3hhxnYrDkFFcCn/JuFdFc5Tuz+m5LNbWwKDKnv1+DrRhXDWjOSIisk7cs87LKqV9qAawZbwWNDFQnr3wm0ZUvfgMeNfDmXDgME2WO5FiwViFHzfYroYCFqn/2AlXhBL21U4gKG9czidTot0A4IobGe6xxPLuxhJQ47A8cjicomRSC9npV+HmaIzFO5MaxsS3OGrZ9gzVCmQ58+vu/w33bcP/ih4gYTscTlkjnLW63QZYTEQvslQhRqfI5F0JQxmRMMzlHfFNovPbrNm2haaBtC21rtMWhKFDyNof1DVfPP+DJt76vp++sSl2I1rJK1T1OKVFyQExWIoIY+maDiMO6a4y1HI+evjkyjidefPIjhtORu1ef8DqAMaqrba1js7vGNc3FoUn9UWPjuf2kx8YTTZnwecRaS9M0GGsqDO4Yjg9IGinzien+JbkI88ML4r6nLTPvX3mu3TX9n/xj5BhUfKJXBrCrLQOurZsiPUKrxKTP7+zyJcZlMCr8WGCS5TvL60qF7JXNrQSPjHGGpmtoY0t3tcGFgBlHUor4ZIjR4BtH1/dYZ2m8U6P2WsYpUnDRqViNWMiLPo8ajKTU4Fwi5wjkKpJQKJIpJO3dlXMgXmA/V606RWp70aO1Z2l5J44zadYuAlk3ScE5DVAhROaQGMaZ02lEKDTOYGyFHXMmxqC+x07FL4yziFVxjjTNjKOulXkO6/OSc6FtMlemwVhWuL1kizEtJiesKMnweBhUUCOr9SkiONvU+qhKU6pd49knFkq1AlSuAiFV+H8xvqjJjBbuH2fC19SwPshG2wuLFKyzIGrY4VOq/AM1HHCNV7EKU0jeKGEttbTO871vvc9us+Fbz655etXR+cJwfE0MLdYWctMipdA21ZfXN7S2wUUwU6Kphw9nHd1Vz2LEMoZZdctrNrccVxZYP2dtAbW1LmxMdVEzSz0z6fupSdFu23G127HddlirpLpcs/elde3zjscNvKUwh6jK4UkfVE32K3xVsfS1tlExfWtVR1PJIYvlMesJRmo2uzz8zjqMKWyN14Ueo7oixVA1QMvqXRxTIkVwZFLytcfY0zaWEicOt59ijGV/+xJjLG2/wTmPt4JrW53wFFiOnlqMj4Qw65+jXpdCggHjaoCrsLrzgncG70xlU/4sHLkvNoyxtNtr+uunjKeBeRo5+/HmmvUKqaiXsLX1cGEcjdfe2q5v1dLPR1K44nQs3H40cRofGO5PnPYDxnqadquWZmmm6XoWT1w9pUSYPcf9HZ2Dh9aybVRuz7ctxlq6fotznnk8ITlS4kQY9opWjHvSuMdZoesMG9fRywdQMjdXN3RdxzRMnI6naqrtqsOJxxi9JyG8WbP6KsYf/PM+z729yKDgfLI2gvUO2zh8q+znnCMmVihYCt6rwIO1VokmC/nHCrZcyINmRYGMaPCFgq9ELWuNQs15yfgu62H6sWQF52z3M+SfR0p6S1FVpxwjOcQVPTPWYkrtTIyZMMcagBcnoyX7qfXWpPZ0SshpUbEbJZoVI4SspKlhDgoDL5k0hr4t2PW9l0peczVIKvlT+86nOpuiB8JWdd/PNcYFuj0jg7ZovXLxL4dCqaWosmReX3DT/9JzvuKxhZLOcjRijGac1uG8X+VlqdCzdRYRr0ErF3wpdN7z5PqK6+2W613PpvM4C/N0pORAmDc1LjitxVuLtx5nHCaBRGXtN6Ltjn2n5NbTnPWgUz3G9XqpaBssZMZUDy2pQvyuktvQma4a2ege0zZsNp3a1Jpz8P6CVpjAY0PNuXAaI3mcKPNA0yS6BJSswuW5gD0riSx9b+e6l0Cp4tR1rRnFqz9j9abOIm2j/pAltlgiJbekqIILU8mkskTzQuMc132PrcbuYhUWValVhWBFIimq2bXS1hUWburNygv8sCha5UKMKuIA2p/rfKFpM74VvDdUkx5yKqQ5M42Rt80KPewf+Jv/7v+L3/7N3yDOeiCp6bpuRNU7U83S9ZSt+soGZzukZrLOe+b5xOHwinkaefniY4bhyDzMzGNQlqZTN6fb+9daq1nSpbppWGu5e/2RskqvdlxfbRX2rKIZzjcYY/n05Ss+/vgFx9PAx598Wg9hE//g7z87Cx2kRJgGoCixyjfEEFanF+eqTrNojTKnrHX5rzDuvnjxgr/8l//yV/CT9MSdUuTu/pZpnri9vVWJyHnieDyQciLOYSX1LdmmbxqtPVbUoCg7pIpk6L0lqc6dwpamKokpSjNNE7GWSXK9R22jjFXv6ob6RumnwqJ1A/rRhx+tpKHHGJ/d+C431UUhLcZIDKGKT6Ccj2yIIWrNETWMSKItXKWkVWUvhKBs8ZT0oF6t+ErKOKOoWkGUBGSkdlKkWuetiF4tfS1/lxAlYJWz8ULKWkYAlDn9GSLVsiusBxzO1oVf21guoEL2pajhRkzqNre0VJ6V5UQz/JIpWVXnwjQxW62pC5mSA/M0kHLEn/aEGPDtDt9s8a1nd3VFyHBzdcMwJ+acmHMGMcwpkUtiSkkFctZrrCUvuSiRcC4psJCsLsqXVGGMAhSpaFLXacufLLoCej9/rgNvyoXjaWI+nEjjga7rSEk3Qlv9GlXiTjeEs/h3hd9qxXCpQ+pNFDCmZsGFTKKUiDGmGlQ7GtOzbfT0qnWSQiYoySJmSsyIWKw0FBGiZJIUKGo1ppBb0IlOB3IpNM7TVTZuv73CiCWkWaUSk24GKRXmWW+KazLWgm8KbZ9pG6FtrbYUASUVwpzIRN421Hx/+5q//n/6P9QFt3y3XPz3YizfkMtPF1+UpebzZnP/Z1sfqIv+sz//DP2cYaD6D9+8jAX2rKgCwN/7jf/fxc//8QtenqfPvomLkPGVw8wffvghf/Ev/sWv6KeVCs2Xi1plWb//xvVfvJE3puSz8/MTNwg5/6yLH1jeeMX5nstP+neXvyK/2XP6NkcprIHwsyPWzHSeZ+Y51M9zRdpUtWyeZ0IIGIragFKYxpFozWrmMs+qBxCjGnqkSkRMUVuuQlwCr2bZc0jMIeha1XM9IhZjPSlOjCHiChivc3Qahvp6RQWttbR9u9Yql/1wLURk7fL4ugKusuYrLldPBIJgnYMCvvIOtA6qynUpZ0w2tT5roERyUOOHaRzwZGUxk8ipMI4ZE7Tn3PmO6yeOZrOj6Vpunj4B1/Ds6XNiEk5hZpjVX30/qSf6KUZizhhbZXpF1s9LjdjIZ1qxymXBfOm+WZjymaZt2O42tG2rcaeoJ7IKJ/0cQ82lFKaQmeZEnBKYiG8iVqARLZ6XIutmumiuXlhD6KSR1wWpdmXaDmGMZsSKbuiJRciVLV0DbzVnVx1hOS/nWjtRm4WKQ382UpSyFuLJFf6rcoRr4Mn17ys5ZaFlOAfOo58dqG7DolKl/66kQglLxfqt3gjVY36E8VXDuJcj5/SHv+iRRymFYRi+7sv4x2tIPYCXC1EJarFKLl9W1dekuqQ5u8LkeUF41sMab8C+6oiVyOnM4AXWYJmz+iAbOJu8FzVISFn7d6cwrwG8ILiYEJH1NW8Uz9/42ct+V5lxy2HsgsT0mKOUegCQM6lqvYr13LwWgxEW3ophOcKVrPrZZBjmgBHDYZh4OE5qbwnKuD9FrPM8jIXtKfJwmHh5v2d/HDjNI2MMqtkcZuacCSkRq9VjPaKy0KkuVdU+e2BfM9t6L03t70YW84ezQ5oCduUzWcQXuwePGnhjLrw6TAz7ifk0sp0zMUNjhV1r8EbrnUa0jzDGeF6LomIB1ltIarAgYmiapvZ4WqwzJFOYloBXIjllbXRu6mnHUGEOoGhQFidaezSOVIR5LoSUsPUUuwTZUust1KUkOap85XRQj16xepsz2oOJmt1jMv2m0G6g3wmbneA9NG3COrTGkNSDOE+6SN6Nd+Pd+BxDzgEVuxAy9a+WfWRxn/Les9mqCtfVpsN7S9M4Zd6myBQKOVs2XaP/tmbTISSmeVYm+RxrkDQrXyEm7QsOJQAqlBFzIoTIaRwJMXJ7f884TauWgPOeUjOwaY6ElNQH2Wsj/+KyFGMkp6R60FWqUxZGv7mwt3ykcRmgVs73elbI9etS74vBlOoIZlqteyvAyxgTh0NARPe+tplI/Wv2qWWKicOomeucEolCt9nSbDbMUdjPwhwSH7284ziMDJNmvAWIFYAvxtQK7RIkL3qoL94LF++nFKrOQ6qlRg24TeNom0YFPerBbUFYRPQ+fNE78MgZr8o0Llmvs0nJVsWQHOtiXyCvXJQivuj0LgF4hQkuJOxU1s5UYsO5LrwQrjCmZsLoSbH+IqWIq7FxNrKS9ZakFTifRLkglawrcJEUy5U5ai5qMGCMEpacBe/knPG6mvHKAtNCyWqg/S7wvhvvxucbpcJ9lz6658dWn8OUtH6qm6VwtmwSdS4LSevgJZNtYZojKwGQotnqrJnqHLQPWq0DDSEmppAwiiezqOmlnJlDZJi0PjxW965MdfpDcHNERJhjIqWFdFnqe0pKbsu5EuS01GLqv1+2uGwWUaBHmu+sPsdKb11yyqpdjWbkMarjlVRUkSJVI+DMKQhzYpwDi2xwSJn7/YDrDkxz5OE0KGQcJlLO+O5A07VELGN2hFTYH0+Mc2CcZsZ51pa52qtLFcwo2dS2RIOlKDM/Z0qNIYUlXizKVdrfWwxghWINXgpRVBVxOJ1I3pGrUYUUGE4nYopfaB4fl9UMHCPcj4nTKTAl/V7nDUY62gxFEnOp6b3VDNX76ptaRc5LDcbWWiXRuAaxUk+A6tZRaqTOeiRBFTULSF2kydYazNlOLudEylQyRUJclfGthXfBrNYnYoo25QNz0pOuWvxZUpopecZIYrsJGJvZdJm2yXSN0HkNvM6qlF8MhhiFXBpi3vK2a7zvxrvxj8oYx8Cv//0f1mD7ZlljCU5zTMSkiFVG+9HVIGXpm9fDvK2lqKZZzOU1O0oxMs3zWrtej9U1I/Lu5RtM7qXDQpXGVFxmnmdiNfAo6O+xfkRbhRQaNRcZ7NLXu2jpmMoe14yXc8YrcLcfK4Hz7Y9xOPHrf/vX6ldv5o6ClubiovkNq/qac66WCTV1iTEwT5MGXqdJU//hJ7R9T0rLvC2WftUQwlq1/KtowBRiRRe0FLDcE936FwvLy15zeePrc0FiydrP8pGa3LHCzNYY/oOrHV3bVbVFU/+1ohK/9fd/4wvN4yO3E8GU4BQy+zFTiHgLMTl2varoSFR1GWuNmg4UwXgNnjFDQQUxpPq0eufVx/U8e7jSaO9sXmKmpVyozlAz3IvS+VpLSVnhpZQS6SILPt8gqVBKJZZU9uQZgslQIpSAMYm2iTiX6ZpE4zON1dYha5cHXV2aUjRkHKm0lPL1awe/G+/GN2GEmPjhx7df92X8YzPCHPjh7/7+130Z3/jxyCYJKnwxZ3UHsiGznxJzKng30flA30ZaZ2m8pesczmqjuyuFIgoJGVFlqiJyVg5Z4eUFqjUsNQdT5dyk1BT7gr12xvczuQRtU0oRyRHJBpL2CEs96a6/r9p9SQ2eAtVeDIzMGJlobOFqU/Ae2q7gfcY7oZFKNci1OX6yhNGQxRGrNN278W68G+/Gu/GP5nhkyUiINeieolp1xRTxVphjpnHCtnF03tK3jutdS+NVq7fJliwZS1GNUuuQogICpSQoi7j+EnRrzUeU0dw0jpIKaT6LldceDa2x5qz9fLlAmhH1jtNMuQZmZS0rTb4qFij847zmwymqTZqd8G6g98Kza0fTCs6pWpXzGS8CWIiWUixxcExHS7YNyTreBd534914N96Nf3THI2e8Z8cQWGPeWmdN+Qz5LiyzczvK0tOof17HT/7j+vve/O4fQFoqSyJcahvQ5Tc/80K4uL4f+5vzhywZ8cXnz1yJEsnOXp5rG9O78W68G+/Gu/GP5JC32Wf5Y79M5FPgCLx8tF/61Y73eLxr/6VSyvtf5ge8m+8vNL70fMO7Of+C490afzffjz1+Lub7UQMvgIj8f0spf/ZRf+lXNL6J1/5NvOZlfFOv/Zt63fDNvPZv4jUv45t47d/Ea17Gz8u1vysmvhvvxrvxbrwb78YjjneB9914N96Nd+PdeDcecXwdgfevfA2/86sa38Rr/yZe8zK+qdf+Tb1u+GZe+zfxmpfxTbz2b+I1L+Pn4tofvcb7brwb78a78W68G/84j3dQ87vxbrwb78a78W484ni0wCsi/5yI/KaI/Ici8pce6/f+LENEfkFE/u8i8usi8ndF5L9Tv/9MRP4dEfkH9fPTr/ta/6Dxbs4fd7yb78cf35Q5fzffjz9+rud8Mdd+mx+ABX4L+CNAA/wd4E88xu/+Ga/3O8A/Xf98Bfx94E8A/yrwl+r3/xLwP/u6r/XdnP98fLyb73dz/m6+f74+fp7n/LEy3j8H/IellN8upczA/w74Fx7pd3/hUUr5qJTyt+qf98BvAN9Dr/mv1pf9VeC//LVc4Ocb7+b8cce7+X788Y2Z83fz/fjj53nOHyvwfg+4tLT4sH7v536IyA+APwP8DeCDUspH9a8+Bj74uq7rc4x3c/644918P/74Rs75u/l+/PHzNufvyFV/wBCRHfBvAv9SKeXh8u+K4hTvKOFf8Xg354873s3344538/344+dxzr9U4P0ChfYfAr9w8fX36/d+boeIePRm/bVSyr9Vv/2JiHyn/v13gBdfw3W9m/PHvaZ38/241/RFyDvfqDn/eZzv+nvfrfHHHl+icP25C+2oC9JvA7988do/+XUU3D/nexPgXwf+l5/5/v+cN4vy/+ojX9e7OX/EOX833z+/8/1Nm/Ofx/n+onP+TZrvn+c5L6V8qcD7nwD+rxdf/8vAv/wHvP6fR1llvwX8D7/um/KHvLf/FAo//Brwt+vHPw88B/468A+A/xvw7JGv692cP+Kcv5vvn+/5/ibN+c/jfP8sc/5Nme+f5zkvpfzsylUi8l8B/rlSyn+rfv0vAv/xUsp/+w/4N3/gLxOBZ9c7dpuOkhMpBqCowb1AyVlN64v69+qbWDx7Zf0ZCAiCMfIZa9vP+ty++XW5/L6cvysGjKveviVByZSUIWZKgVxfnDF6ARb9kAI2gxRkMefNQDJIAZJU2976PxHE6M/68NPjy/IZS6kvOud/2Hy/G2+MLz3fAE3TlG6zQWRZj6Lrt/pJ6+ef5BFd3vhSBIwxqy90Qdd/KYWcMimd/1xKAdH1Y4zBeoexhq5rcM6svtAiBmsdRgxd02Gtw1qLNe6NK/my48WLF3z44Yd8jr3ljTl/G3vKu/HG+Ir2FEGMQRBdP9YCkMkA67oyxuJco8+CmLPPuAiUQk6JnBPzOJJSJKdITgndZ/ObTup1XwfOj4p8Zn8XgxgDYrC+xViHcw3Otxhj8b5d91j934Xvunze9S/s719x++qTL7y+L4f7XL/rSwwR+QvAX/g8r/XO8V/6T/9H+Wf/mT/BaX/H/csfQSn4tsGIEOeBFCZCTAzjREqFaY6kVNbgZaxgvcEaQ9d7nLMYAYtU03l0usWCGEoxFAyFQtJ1g7E10EsmS8b3wua5xdhMGW8pYSTdn0ivj8QMQzRkDJPdEE2DXBXkSUGaiLk5gc+6EK1Bjh72DTIb5N5BNLTFYYvBN56mb0Dgv/u//vd+923P97vxxviZ5hvenPO27/lz/5n/bN14DM452rallEIIgVIKMUZSShhjMGYJyplSD3liCo13dH1bSRiZUjLTODGPE+Mwcv/6gRAih/2JGBLWNxjXsNn1PPv2Mza7/v/P3r/EarJleZ7Qb+2XmX2P83D3e/0+IiJflSXIapC6RAOiB4B6DjOEegKjHjFAYlA9ZdYSEohJDUoCBBICWgIJJEpCLVowaIlWCaQuqM6uyqp8R9yXX/fz+B5mtl8M1jb7jt+IqMgIP5F5s5Q7dOL4Pe7nnO/bZrbXWv/1//8Xf/v3fsjtiz3Wg3PQdT3X+5cM3Ybf/Y3f48X1K64211xtrjXQo0mgvqf13fH+6cbPPXCWbP7v//2/z9/7e3+P8/n8i7bub+7xv9z1LPttXE8XepwLbHdbbm5vqFTmNFGp3Lx4xX5/w3Z/w8uXn2Gdx4QtGIdYB8aQ5on5fGQ8HfjTP/h9Hu/e8fj2a47331JLouYRagFT2+8UPZfXalbwwbfnB6hgfMBvr7Gh5+qzv0W3e8GLj37Eq49/k+3uhk8+/S1C6OmDx1mDJeMkY6gYKS055b2k+em9rgk0/If/8H/Hv/+/+p8Q5+lX3u8PCbx/oUZ7rfUf0Iypf2HFC1hj8M5ijUAplJKJs35bzUk3vlSkCkYgeE91+n3GCNYanDOIEaxziDGYWpFa1iwLEYx1IEKpghYMBmdb/lOLVhcmg8lQjf6+Wqk4EE8VR7FGw7lo1ZFyImaDjQY3W0QEH3sMFWcMBgPeIhtHdUKdBCIQKzVXCoWUy3rhf5U9/2X2+2/WL1y/9D1+dXNTa62UollczvlJwI2UUkgpU0pZK1QWMAUwVs+aYgXBaMyrVSuGqgEawDqtMjabnpwKYj1iHV0XCCEQvMc7h3MOYwpiypoI+FYRL7+7UteA+1e8nv1M+Zv1C9cveaaYWmuhlEItmXkaOTzcAxDzDAj73Q0AKWUO5zPGJkyyiDgiQq6CdRbfX2PshpefR3Y3j0iF8XDQitRFrailope4BUNrEatFkwsdYi0VoVaD7Xq6/S0m9NhuAy5QxJByJhcNsFa0zNKP9lOXgAuYVsAtqClySTSFVmU/w6Z/SOD9R8DvishvoRfqvwv82x/6gowRrDFYEYUbaiHFArTg+WSJCM5aEMFZDZzWGrzXr1VpW1uKnmo168FVRQtezAo2KDSth1lKWWFtCkiGWpHqUJDFABYjpkEotMALpRRyzUgSSBr8XQ4YU/HVYDFgDXSGCmTfqnB9exSglPrdAuPXvud/s37u+pX3e6n+SinknNdKV/+7rIF3gYl1tc9N4CBLCv7k3l8OAWsbDN0Fiq8NwbF4r8HWugYjW4MYPbhE9PkwRqtxMfIvS/L+Ktbf3N9/+euX3vPaEsBSMilGxnNVxDBnRIScMlTIuTBOM2Irts4ghSnDXKDbbNjvthhf2b8sbLc33H/9JdY4hIwVgxFwpmDQ4rdWEGswwWtBFTrEOEoLo67f0G33GN9jfAfWUxFy0UJKeBp064KB6hGOrNWueR+/fq/98xTx/pD1KwfeWmsSkf8B8H9DO5r/y1rrP/nQF6SHVabUTKWsWH+FJxmKrBCDcR4RS3AG7/RiLehDrA3Ff6LUkpZBiWSMiHYlioApWhkbwXUeMQFcpvqCC0LfecS0i2U9fmsI1ZFKxc2VXIWODVEC4kG8gEnIdKTGwhQL1SaceJxYJGkPGitYZzAIxlqcdT/3yv669vxv1s9ev+p+f4fgQc4ts2rBr6IB2RjTEq3aEsClUlbYywdFfqwRzfq1FUUInuB8u6/1/p5SZk6F0IdWzS5BW3vMxoCzDu8CwQdcg8K/T4H3b+7vv/z1K9/jVAqVTCGVDEBpd3EumZgTOUVynLFV2A4O6zpiqpAgVsv9mLEiXPU7gu/Z7q+52l9T8oGazlArxlhtCRehVIP1gW67RazDdlvEejKGXFsg7naIC2B8ayUq6lQaP6g1Glv1+lNdFD16F87Ee+934eL8FQdegFrrPwT+4TO8jnWVkiklUUqCkqk1t9xEND4CtMxdxBD6AWM9vTMEZ4CClESplZwquTylTF2wfCNZN7FWpIDBYGzBimXYDfjgkQ4kaDHhOhoWHqAUxO8w20jOMEehIGS3p5qOOc1McSKXifGUSXXmXCbmGuk7pzdhNXixGCc4BGsFYxzO+L/0Pf+b9fPXL7vfC9nvu8H3kjYbhZVrJeeyJonLCRDjTEqRnDPWgLWWYfAaSNGMvO86drsdzliC7zBiuHs88nA4Is5i3ZOgKiDGtBaMp+96+q7Hu4Cz7tIj+56sv7m///LXr7LnubbQ24DBpTAUhDknxnlGqkEkEDphHzyh2zBPBQTGajgeI51z3L58yc5Zbl9+Qnz1BXG0nB/ftXaf1qYFDzh8v2Fz8xLrA264QlwgFiFWA9ZB6BHjENeBceSKPk8pQS1IrRipmLWXK08q2bpEWOQJEvWcAXdZv3Zy1S+zKtpfzTlTGhxXSqGKpUpFKhTRrMVIY9aJxWChGmoxCyGOXCDFTGq9CIWbE0Ju6HNETCFnKEWrT2ctzjs22y3dZsAODttbrXSdVi11jtRcsKVgcyEXmOdKqUKUgSIOOR1JKWrnrBikGPIMMVdsKcwUnAheWqUrWqGY9UL/zfrrvC7BF0qtlAKsrcgnTHyBUvXUkhagS87krBDeNFmcNRhTscYQ56REKpMoKVGsoi9iQGpB2glYpVJNY0IvpEMEKw5vPd56nHU486RlArSjZ2k5s/bV/tJ27vmWMcKm91h7Id+sfTto1U+7VtQn7PN2TSrkUii1YBb48clnaR+rsoLlnBbtxS+HeLvO1ho9s5YX+IRFu7Dd9c9t50Xa/VPWry053PL73m9Qyvqaqcv7WVpo+n33D7+Q7PZLrEtiWWq53N5GW24lZzAFKY3PUNvdJfq51ELMYMRSxIL1uK6n32yBM+PRUinKwUEDfaZiSiXmSjGN54OhGodgwVowTokS7VouCW5pcPPCkdBe7ZO2nqz/99Pv9Mk1ea71vQq8oH2weZ6Z5ol5nvTG9x2CIVelmlsRnLcYcVh6DIE8icbWEil5JpXM/XRkaoQW7anpoSXoRaNKI6F4vB3Y7Qb67ZYf/O7f5urlKzbX12xurik1McejVuPzTM0JL55gPDlVxjGTU+FwHpli5Jsv/4zz8R5TwCZHSZXp3cjjqTKGxKkb6YOnvx0wzuNFYfL3K6S/WX9dVwOLqVStCGThJui1TaWQVR/UvlypWbkE0zQR40wchfF4whhD13mMNdQUKSnSBUc5jXjnqNsN3jnieCbNZ8R4qoVsDUVU4iZ4HJ7e7Ljqb9kOW3bdlm23wYrjIlpq65kIJH+Va9N7fu9vv2a/66FUqBUrFm+d8ilyppaqwbUUjHX4rqNWmGIkl8zpfOY86T4PfcAaQwj62fqAtY5cKnNM2gozSqgsJbefn6k5YkS42m/pg6d1lxrq1pKtRSbW+ADSUIpa4TyOxBRXEmipMOeW1BlLFYNYh3GBlDOn00gphc4p58U7S/AOAf6D/+f/73k2t9bGgdHXLkWDnKASuHSemLsRCQZreorMSI7YkjC5IqWSExxmIRXDRCC6gf7mNa8+/y0e3jqOhz8jzzDNmZQLc87EXLDR8Fgfcb5jb3YEOqz3mDBoj7ERapfgW0phnmf9SBGXFFG01q6JkaxJ0OXsre19/rrW9yzwKrlIK17NlIqKjdeKYSGjaKZuNNMplpKBJApTZ0ilMk2JKUVyKeRasFJwNi18KKWgi2kELXDeEUJgs9+zu7llf/uS3YuX5BIZx0dKSeRppOZEsB3B9uRUCeekTFX3gBnP+NA1GBtMNZhiKFFII9Ssh6wtVvW8zmDEtn5zedbgKyI41y5xy+y/+7M185N2D77/d+YJ+eanM/Wf/xqfJPCa+X/nL5evlKI12fJr30865fINvF8VvJeZynfz1O92Zn7+8/M0gc2LluwZVn3vz6o5f/o3pVXC+qXyXuAtOZFTojSSimlVlTWirP6SMKUwi4BLJOcwpVDSTC1R+QrSnpcFSsNicBhxeOPxxuOMw4pWvG033vv017POvSxrDftdz+310J71ijMWb7U3XqIS3mKTdlnvCX1PrTDOkZQzzlasLQTv2G16rLV0IWCtxfkO6zy5FMY5AWCs08Db9Ki1ZEqcsUa4vdkxdAFnNPAuXBShtdea4iKl1LSmmogfTsIUHaXofZMrzKlVgtZRxSLOI74jpYy1Qs6ZwVu8MwRnlZ/y3L38JQsQqKuTwSVg1VKbmqS2c1D/W4WXNPJV1SQUoYjFhp5us8efNmD0veWSyaWSciEmIUsmTwlXLX0qGsi9EmMvAfd9RGIpvJaqFy5coQsG8i8ltT77+n4F3gopJaZpIuWspBC5wEDLZyvaqK/Fcj7rYSXJQhYNunMm5szdcWZM40oECA62AzgrdN4rtOw81nq8H+iGPd1mTxiu8P0Vvr/Bdy9xNeH8hlISo7kjxRFjejA91gmDt6SUeTjPpDJScoVcMBWC0WrFS8CRKXNlnBOu6Os1ZZEkVXgChTzHurm64d/6r/9bq270fD5zODxqgG1BdbPb0/UD4xQ5HM8XWMUIH7285fb2Cmeh9xVrK5uu4GzBiH7UKq16WwsL4pyIMZFLIcZEUT6FEh1aG+B0PPP1N98S58g0zqScVQbTBWW2OwtU5lZ9zPPMNE0KnbUDbulRLixdY4xKyNCWRa0q3VnkPPo1moym/fvGZP/H/8nvP8uet61oVaM+yUWxZWVX1kpOaYUEF+Z+iTO1FGJjPteihxdAHGd9v1QsBXGWmDJYy5QK1VlqTfiasdbiC/RV6MUxmEDnAt73WBcoxpLFkIHcEgHLQirRj/fMCv6aroXAVkvBWocTi7OWYAPUSiKt1dA0z5icmVOiCalBhNB1iFV549B3jeRTFJWLF+5JFQEMrmqlukC9KltUSVhKiVmgWgNOiZSd96tJigBzjO1+L8xRA06phoojlsQ4R0qFWJSMF5zHeUsVoEakJqgzUjMioTHX+WkG0YdvLqZor1QomKbEcMZiMdxuNnz04hbfb+l2Lwih52UfCE7oxLB3FUvi3TFiE8QYOadMv7mmdz/iGM8k2TKXzBQnUkzkaqAaTTDHIzVH4vmAEcFah3TDe9D+UvmUWlTmmWbmecJbt0r99BsW6HkpQn7+rS9PAvqHru9V4K0oA22OUSuQZSPamzWtb2GNwxpProZ5rsS5YLJBCuRUmadKTJnjIXKK83ohSif0QatL5zyddxjjMcZhXcD3A77f4Dr9sGGH81dAxtNRS2xuWoKRDqRHxBH6HpMSmK9JLTOlaKbnjWomHR5TI7kFgmQKkkUh79ICbzugn2ttt1v+a//lf5NpGokx8vBwz9dff63B3RqMNdy8/Ijd1Q2PxzNvvr1fb0prDb/zWz/kB5+/pg+wGyreVV7sI50rWJsxkihVyFkae1Dfyvk8MY4zMSbO06xwXMvWY4KUK2/f3vPP/+CPOZ8nDo9H5jmx2Q5sthuVhHUeqJzGEzFFTqcTh8NR9dZOD5UudFp9OIe1HmstIYQG9yV1xZlnzuex9XqUfamQoGpanVPo6bkCr673q8enMOJPZd8NtstJX+/6b3IhJ/37mLXnGIzgBYzLpJzBGGLO4CzVotVUKHQVOgydWILRvq5zHus8VYTS2PwFrUR+xiv/uf/912a1PnqpVVEt5xqr22sSViqShdJkXuQMMSLG4vseEdt00BbnLKELrRUwklMipkxMBTEWF4IWBkZW5cV6bjX3vFwyMYHQkj2rP99ai2suUKbJH2NKnOdEWgNvJWUYo1bGqerPDqJmP5ovZeWw1IjUjBGPNXxHGvM8S1CtuaE2boEiM04MTuCq73i539MNO4b9Nd4Fdt7hrBAMTBbGuRLKjGRDzok5F4Z+S7fpcHffkKQn1pGYKinmhnguCewIpZDmEeccNW+a3E4uvfXWTy61kHNa9fTRx1bcrClyu1a6pz+Pavg06P4rF3jhfTjUNixmMcJQeED/LtdCLIXzPDHNsPU7+uAwzUWqmIbpJFl77V3n2O97+uB4+eKavgvkrM/cZtcThoDrHZhMqTMpnpnOB0qNxPmenCceH75kGg90YU/fXWFdR28tIlVF4d6rZZqxWulIRqTiHIRgKDVRp0RtdPsSDeKV1awMwZ8CZz94R0vT3M3zxPF0oNaKeIexjjBOGD9xOp05HE8tMZiAipPE4eFbdhvHyxvPprf0P9xit07lWHah5jdpV2PleFOpTnDG4kwg18qU9cDIxZCrkonOn73mfBr5CV9zOJyw1lBKbl4Ryn50rrUBnG9mEBpc1Y5O91nEvBfcgGZW0SrvVc6jyxgl07Xz7/krAi4P6urNukD2T76mPeBW8dYFcl/QnSeQfkvIpLQDooApopBlEWxWZzYQXIIwFbxNyDlS+wnrOoJTLoMLAesdGKE8bQTU9000/iVck782S++Jur7LUgoxxadnbkNKmmqCS8BcjU0W7bNoO8YaA9aS8qVaWtsgS8tEtIWl6Iw+F10IOGtW2eNis1jb61taUwsVjsZBWdSnGIt1QdtuDbGyzmGdba9XkbySO3LOOGt4+kaf80zRO61iKiy2i9YIIaiOvO892yHQbzqudhusdXSuEflKxlLZ2cqNL1SbkTSS50DdOEzXY8OGbriipMT58I7M8rw2hrOAtZVgK942pQqZRZKHKMGwRRMqWvWO4wkrQoozpXTKbm7XS57c/T8vsD5X0IXvY+BdDyrWm8qHgLGWUnKDC4W5JOacuT8fGMdE99LSX29J0YJz1DkjR0ONgnXgHWy3gY9fXbHZdPzgB5+w2215PIwcjiP9bstw3dNtAmIjuZ6Ypgeolnk+8fjwBTGeeHf/Z5zHB672r7i5fk3f7wndoBmst3ShI7gOZzy5KtmrCoTOMCRDThXSrD3faSTaysZ3OOsUpn32hn7VAJQmTucj7+6+pdSK6zYY58D3xGp5eDzy9t0d8zxzuL8jxZkv/uxfEFzl9nrgR59fc3uz4Xb/O/T+CiuAZWUWQsuEUWJhEDUaYdNRKpxiIpUCJoAJ3O633Oz2HE+jVhy16QJLgqyZvhGD9x7nvSIh84y1jr7ftIArrbqtq3fxkrRN0/jEsEIJK0vPegm4zgne/1ST+IPXcnDra7sYZXzXgq4WdTtbA0SlVU4GUwtVFsOXVlkslqboa3ct0Loq2CrYanAzhEMi5Ih5OFONxw17Nl3H0Hd0fY/vejCGTMUuiW5dmNXy/rDQv6bBd2WzNvMS0KozxaT7Z9ylPWGt9t0btG+a/MpYqx+tH1tr1faGGFKuSMor/IwxFwaC6PdbAW99Y1gHvLNKeLItVS1Qm+vd4jegAbui9aSS4zKADbhOzwiD3lO+C/jgtZfrLTlnvF3c0go513UvnrPHa9qHyMUByhlhGAJd13G167nZ9+x2W169vMaIIUe937tUSEB2lUNXiJKY4yNxLNTtK8ywJ+xu2O0/wlTD8e5rkjnhncEHQ61a1RpX6X2lcwVnMoL22auoD0JtPIdaC4XCnCYeHx/IMTK9uCUPvdoL2/a8/oLt+W7F+6G7+b0LvHAJvnXN1i4HqrTMc80+TKVKJebIOI/kNDOXSCyJ0rIfzUC1J9h1G/q+p+8Hur5nToU5J3ywOCcNnlESQM2RFEfm6cTx+Mgcz4zTRIyRnCO1JHKemeczYIjzSEoTOcdWwbRKS8BZQxcs0RuCNzgnUDP1ycFQan3Sf3imvaSScyKlREyRGGcKgu2+08xbCqtS9YDKiZpnZiLBZU5Hx9CJQqK1UJuNZkUuKHlWCC/HQo5anhmjlm4GJQhpMl9xzcS/1sp+t2UcJ6YYGef5okNtVcdixq6Q8mLAbi7EsFrWonVhiD4NwqqBvQzNUE3r4uoEv67o8t0M+b2gu9zfTyDRWmqzrHtaobS6tx04S5BOKSPWkHMho7IMMUALCMREPI3M3hNPZ+J5IncRckFaG+Rn97M0+D5N//46xt4KpFSIqZBSIZl86QEukKQoEmasqHyxZpUiNlvZdYeMYNthu/RkvXNrwCws0p3mikej7MgCYT6Bn3m674vY6wJ3mp9RVa1tSzHKVil6H6SY9b1UfU21lMaL0Sq7ysXt7FnVErLcoUuJj8LsxmCdXX3pnbOE4FUyZPQetg32HSpswsSEJdVCzVmJWCJY6+mGDTmecd4rh6P1yqV15jRQlicfGTDr5q47KHVtLcQYidatnBfb5KjLdv+83OTXYTLzvQu86licySRiUb9OksfYut781list1RjCH0gVuHd6Z5vD/eNJap9srlExCp1PDjLtr/i5e2P2O0Grq72bLYB4wyuy7jO0flCcJnBJjYmMsdHTucz9w8P/NGf/DFznAghYS2UlLFlIp0zb04TOVXefvMtp8ORx8c3jPMJasGJumHtN4GhcwwWOpMUCiozMWZSdqTiiCkzTfPzPiQ1M41nDo/3+nF4xFjH5uqGEDp81+FDjwsR5x25JIV8rTAdT8znB0wZuOoTFiVC5aiEHuGJe1KupPNMTpnD/YHD4wlrLM4HjLP01xtC50g1k0vBiWW/6ei7wO/+7m/x2eef8uU3b/jq6zd61jUv4mUKVd/3KAyrRiO0YGzENAmayjjGcVoJaktwXgKucqqEYejwXnvIz0lmW7f8O9fvqX3k2t9th0Fpk65y1Ht2YRqrlPRialdRGVIpmZoTNVa8ERg6OmfZhLAOOSjHkThF7v70Jzz2HfNxZD5O5I/PfHz1irAD118ROtsganQvnpxYfx2D7dOVc+HxcaLmSpxhGLL2GL1VX3fbEJCuYjGkKTGPYyMLFozoZB3n1PPaDh5rhN7r581Gk86YIqfzubHUG+vcqLeAqQKlJYcZSjVQXWOYL1NyUISjBfeCYFfb2AXOzivfpZbKPM3kUjidRwC8dQSvHIeh79SIxxasaUzpeX72/V2B9vY+sBbXBXwXsMFjvMP3HcNui3MB7zeIuYSb/v6BaL7iFAtlqozziMsJS2WzGXj9gx9yfBh4ePdHpHzEmoyVBZ0BMYVSZlJ2SJmQMiNWMLasdPEnHVxSzhyPR3LMHI8H+hAw2wHbB3036zlwaQkt6/1JY8+DkH3PAm+rAqQh81WPntoqrKXSRZ726ZQkNE4j53HSikYuwm5aFmmNxVpP8APeb/C+04fKe0KxWG9wtqpZAQVLgbKQoc6cTmfmOLX+m9FebMnrYR9TYTw/Mo4n5vlMKW18oLYoG8xkKJ0j9a7luUVlAk02VWqDxp5zP6tWvDHOmvHFiG1V1pNilyWDVVF5o2ss3zcb1ZfOs0K6hdaDan33JxVvSYU4JabjpFmvLyrT2nZYb9s/1MPGOrUyvNrvCF3P8Xzm3f2Dvh5ZfKxVb6k93ebqVTX1NY2ZLM2AZN3LUvRQbfeItZrVimle4Fb9jEtL0P6y1ncdrZ62BhcWc23TWJSqeqmL9Fpqtp9qZa6JYoQ56dytzllKdRqYY6KUynw8ITEyPhw4bx6Zhi15nCg+aIXR5FyrVK81dqVBdWtIfu7ux1/CqhViKsyxMKeMSxnE4DFqMCJV368BcUBEK94qpJSaxlaNeYwINWuyp2eJShCNtRgDcZ5Ws41lcg51gSWXZ6S0rX5qWbj8G9bPC4H0Uha369P6v/pcavtEE7FKtoWSK955utC3qlcw0jgEz3yPr4hMrd8p5/UhW87uAtDOaNd1WOtXeP8cM5u+p0rCzQWTcyNqKaen327J+ayB3DksFUOmiChi0/ZmUS8sCoFl76p85/U2El00yvtY2lBLUH16jz8NtN/92uUnftj6ngVeWXsu1dp1AgtygeMAld3kRElqD2lrIafIOE3t+iuEY1u2acRijCPFwru3D8xTZLsziOhhFhoj1otKNso8EU8nxAz0oWO33fHxx58S44w1CUMhz3D37R3znLh/OBJj4ng46ti24yOl6jSM5QF0VmsLs9kQnKXkTJy1uo0xtTGHBWPcU8rLB69SCuM4cjg+cjweOB0P2o/6yuJDx9u3b/HdwHgeeXx8JKfIPJ50PmYa9eGtlTkWpjkzzZlxzvQ9+nPyJZCoqtogGUiVeZx5GI9ghHenE7Z37K52bK93mtREPaS2g2cYPON4yxTVkOBwmltPbqaQFPafVeN6HkeolWHT471vTHcBq8zrXMqSwmGsJwS/sk6Xw7AUQSlJ9tn2Wlddre7gO8F2Ca4L9Le0T9DEcHExEy6wcqVSzeVAQyq5VuYMqRYe4ozPQqyJMc10znHVDVhn6a3DVmF8+4BJIKfIn5qOzX5POUxc3d6yudqz2V9pEHG+HVwteDxPcv9XskREfalDr0m29zhv1HjHCi44jIE5RWpUdnDX99QCUh06p0avZYqRc00YI6So/tn9ZkM/DATvubm+IpfC+awMfGO0D2uNo+8Czhg2XcBbizdWZTdGCG6pfHWfS4yUlJtz1uJEtuAejXxXi8LIRgjGr73hFNWZb5oizlaG3uK9afeTedbsSV9Ns4hppMCaEveHA+dp4g//+E94e//I7fVL3nz7wDBsef3ZbzIMO66vr9ltd4Su42a/w/vI2/FMzRmTM2maAGG4uqVKZXv1kjidSdORPB2x0q6hc/TdFt8NiN9grHJHalFxXBVZg69pyIKiTonj6ajXwhmudsP7Pd71s/wU9Pw0x/jQ9T0LvI1l+ITY0MofWOC6urDVoGS1JBNUjjHPkaV+MyL0Rm96EYsRq/DTw5GcMuNpSxcsxtXVPs+JHsMlRpJMmL4neMemH3hx+0oZsvFESZEyH3g8PnIeR958+y0xRuZxUo3mPKM9B6Ad7M5oFhqsZdMHYozc3+fWe80gKlNaM+RnWrUW5nnifD5zPp8YR4XFYi4Ya9X1pvU94jQtpatmkSXpQ14rMRbmpMMe5qjOS+sY6YUAJE3yVQVJEE+J+3eP5FphPCGd5RMRNlcboFBLQsQwbDqs6ziPe8YonM6zEi6i9ulrrdQMOSrB6vDw0KwWM7XvcM4RggMKzmrFnor2zsU4fLANqVgIWbadQ5afLyD4kD2/BNWFkbzev02ju/z3QtMwTTsqTwLe+mOWrNzqxC0pWuEIlZpmrEAuiRgjG+8JYvHF4X3CYogPRw7nSD3NfFktm92OIfTkcaaWShgGDBUf3Pq71qqGv57BV+Vivg1C16rJOYP1ZtWJPzWIMc2VqlYoSahFGpKTSUmRHCOQk8EawXkPQ9+coQZyzcr7qJlFl28NdG1a1ND3BGdbcqqJeOcXYwut2ua88CfaMPgnQXftT7eq0LSWi4ghxUxMCaoQ50x1MAzaY10Y0s/avWoQ18LGrgUKicPphDHCnAtv3r7j+vodj8eR3f6G6rdcXSe6zYZdk/3tNxuMzAx2IkrBlEyaI4IhbPeUmhm210ynR8aSKdOEkaYccZ7OD7iwUSN9EyjSJhVVtQ5mRRIWrKCo4cn5zMEY9ruBlIv65C8NZP2mn7rnn7vN+70LvMBlAxokqnIQQUppzEu94DEXYlbyRG4wnEiTWYgl+EAwjj6Elc5fqg42OB8fsSYS+kLXF4ooN3c1z27kHmMtPhi2W9HB43cTMU3EMTKdzkzjmel0Um3uPCsxo7RBVS2TWn7mqqlbXLcW2BplVGq1757tItcKuVSmGDmdR1LOGOtwxnB1fYXznnma9aGViljdd/1/Q4o65tC0zL80R7FlzJ2IUFvfFEwT8wt9H5g3PWItqajHatla8AbnnQbqJzBbTpFSVA6x320QMdw9nBqTeWIaT2of2oKpMabJ+mojSQi1WAQhdErYSs0NKPjQpBvKftZpQCrZqBT18H7mZVoAXeQhy6G57O5SKUgjs9H+vHyvESEbKGYhx+ixYZuhTElCRk3fcyPxTQ12K7lgYsIXwGcSmUEqzkCZM/PjEZMLdz/5knQ6k+JEKZEwDOxe3mK9x/tOtabtOVxhReBnlcGXLvHl4/uwtNWA6qRz1j0tmtjm3PYeowHM6cTOWiDbBfrX/7bW0HnbmLwXODNFZdIS9LouaN2lZdOkY00yVqvV17Q++7LKlBT1UAi71ErfBWzOmr4nQXKhJlV0iLAaky1DqKT1KVOKmjgXp6xjw2pG8yx7KtAFx3azp+87QvAMQ4+1RolURp34FO62nE8HUkr88R/+M4ZhSzwfmE8HrFicaKuv6zr6ovzoGHNDqwJRBvzuIzaxAI5atDXlndVzrLULs3EUMVQxzUOrJY+1wfSLBlgqmEKuiZgjMSedrCRmZa8vRZ006Pxyqz8lwn34Xn4PA+9KVtdRUI1lWysaeEsbVkAlpso4Z8aoATi3bFKaDeS229L7wLZ3bHqHd0LJZ+Zp4u5tZjx5rq57zPWgZIidNOKFRazBetU9Gm9xfWCOifPDPXGMnB5PHO7umKYzh7t3OgIr6xxfby3ButYPar3INdAsGXZpjkuVWoWSlfXr/XPau6kl2/E8cv9wUMes0NN3HZ99+glD3/PN119z9+4d3kANCnMuownHU2bKEVMrNakxf5wT86w9RNVWF4rVXqoXi1TIc0aqYZMyw25HpjKHSjbQD2HtwZp2SMVppNaJzu94/eqazh95++5AjpHz8ZH7+ztovedailp8otrdnBLZqE2edYbNZoMxZoWbRax66uaijO5yqTJLLqT0vKFiMUJYDPFl6estzz0thlUW78g18EoV3CJlEaA2LWY7XL13eG+J88RZMiXr0IRSYK4Fk8HVzOE8420h28gmC4JnMJVymhi/eUd0D+TjGT8EXn71KQ/ffsbu9ppP62/TbTaYq1tsP+jvF00cLkO+Wimx/L9cdm8h3Hwfgq+gHBtjRJ3Ppozg6YLX9s/cAmi1OKv8A9fsJNOs/b+UMjkX+i5ws98hwBynlYcxjiNd7Rg2w2o9W5xjCbw69lGDZ8lFEyljm85fGquW1pCseGvog1bmGKPyO2uwc2SKiYIietboDuuQeJqDlHJO4nimWKFkj+CwrtnGPtORYkTYbjp+40ef89Grj7i+vub1xx/hvAZgMYbj4cD5PPL1V1/zB//8D5nnmX/xB/8MEL75z/1r/PZv/Q6vXn3Cb//2f57eBvabDCbxUA2n00Q1huwHijEMH/0t/PYjbPeniOsxFLwkNdAZdlgfmGwgiaPgUJWwYOty6+rwQgyIKYgtxDIzRWFKE1NOalepA9rXwFqfBN0llLeo9K8m1KxmD2UlECjxqPE1G+z89Mx6cnYh6BSX3mvg2wTH4DydU3cYKxWpeYVstBkvl8HgC8HBqvXgCnfjsNVhCzQPtjZ2sFCzsq0xaoBRnhQEC9/g6ZX6rtRFD2pZSQfPOaatotNKctG+IGJwXqE33xibzjll+NbSDvrWL4Um3VG948rCXSrfcmEY0vo8S6lurMEFLSGKCrMQrxNFvFcnr4WkUpfvL00TKKpR7TtHir6Nf2ypVpNnLRNn1kHvi9/wE0LSJXlR6c1lAH0FFA7MTWry3OQhw2KDoGL/ZV1gr+9IhhqyU0tt/77JyqomD0sF7Z2l7zq8s3ir0q7jsZJibBO59EGIjYU1pYyRzBQTk9PqbPYRR2GaRgqZ8/HA8eEOMXB6uKekiGszMNUN7MmYyjXS1vfu6eWI+j5B00rwUYmQLMzipb9etBEkXPTTtV7uGXWgMi0prqu0TZ/n941R3uNjPEms32frsG7K071Z5GSLKcYKIYuo6qE9I9Yadblr32da/3I5N0qrlJcrIKLXqJSik9V+kUj1l1jGGIah5+b6hlevXnFzfc2rV6/w3jEMG4w1nDZbxnEkp8yXX33F6WQ4HU6kmDkdH3m4v2e72SuCJQqJew82QU0qj4spU3Klug66LXa4IuxeQImQz1SBsszclctY+6fbfpGg1nXzK+oVkLIlpsQcZ8DTVb+e/+slewrltPVcNdH3LPBW4hw5n0bm84nj4QAVnOublss2d6NKzio8KlUDiwCOwlUX+OGLKwbv+Xi4YnCBuWRiKSDqZeqqpXeOTejYDTuu9jcYj5IYreA3Pd12i+23mH5LKYaaHKYIxgaMDdRqiFF1c7fXO2rNjKczMc7tIF16MU1TV3WEYM5l1ZHlrNVuCN3qxqR9m+fZzVIqx/PMnCHjsJ1j320IwWN9j1jPdneNNa4FUYVyS5votMD12o/NpJgZzyPnk6IGKSrcWXJqgUQhYNtbBtNTEbbVUEWIrpANyDKxq4IUmu5Xfx9pBjnRu8IPPrnhNA48Pn5NKSPn85njacIYw2YzaMbb+rsaUDOlCHHSsY/SRsDM88w4Tqp9jUuQ1Yo5p6LQ1jMGXtUhWmVitoTFNPaMbYd3bV9HNLOuFOI8U3JiKi0pXCBEI6vm+Hp/y0cfvaLrA1dXe+Z54p/+09/n7t07TqeJ82miAnNRZ7dvT2e8TIzzzGGe2HSBV7bii2U7VkJ2xK8Sh9M7ht2O48M7+s2Gj3/jt9hdv2Bzdc3u5gXGWtx3kZilh1aXw0ghdMMzZzG/4qpVkwtnCt6rT7M1hpJ1YHtKsbF9NXh6p2rzi82j4KVim9lKyRFoYxsbG3aRs5VFvrYEwpJb6wgN2M1P3Fq7BkG1qox6wBd9jkDhbjEGrFGr1TZNJ0Z1u5NS8NbgjLq5GePovKf2rAgQ6LM/nqfmpvd8FW8Igd/+rd/k7/7df52//bu/y2YzcLW/wjvHZrdVg5Gms/+TP/kTXr16yZs3b/hH//E/4u2373j39g05JmqFH/zwt3HdhqHvcF3P+DjyOE9MsXIfs6In/grCHr+5ofv8d5hPDxy+/bGSLUtsyamnLmK6xTpysSEzT4KvVCqFcToT55l39x3BBzabga7z6ua3zATgCTq58oZ4Lzh/yPqeBd7LdKKU0tpDsaZQW6UpOny0QQiLk4k+9Fagc4brTWDjPTe9Z7CewwynuaxZ8BIQ1UjB4X1AXEVEJ4NY57DBY73DeA9ZdK6usetHxVCq/s7gPYKlxBmqpeSlL73c77UFY51EslDZLwYPKpWR5bR9plVrJWaFImvrYzjnCN5h2kPrfUftK7W2MWZFnWVKzhTfJrmUxrYslZzymjisVVktKwApaNCzwTYyk6MKWF/JTSa2ZPq05El7Vi3wl4Q1wnYTsBY2Q0fXB2KaUbiZZqRh8T7gnWcmrvu9eHw3IjA5L9poNINedQa1DVDIP2vrPmgtvbt1GD1Pqt2nLYcn13rxaa6tj65QvKgWtBUz3jk2Q89uv+X1648Zp5Ef/+RPOY8nppgWVwGVcVSYUiIhuCarqga2OZJNxaWoUfNUVQ8ZI6HzTJsNm6sbheidp9/ucbWCsysishS+l2aY/sL1S88Ex33I0uudyVnvp7JWqfr3OV2kZCKQpfk60/yNF9OVZt+5TNIql/JYf5A8+U+5VM1rO4H2TJjLz3xqnFJprmR1uW8b2tQaudYqmUudri4Bvj6phjXpsQqPw0ruy7m0oQ3PdzWsNex2O16+uOX164/pO1V9OOfY7vRz16vaYJ5nXr/+GGqlC0FZ5LM6SJ1OR1KOmJK1ZysKv0szFZrnSBbBuA3iHK7vCbZSXIDTI2WeqPORWmLDlr6DNKz35tNEUPcl5axuVtPEeTxjnWn8Edvuh8t+6b0ul3ue9/7wK6/vWeDVLNK7QLZaAaprkTJvaynEnMm1rho2bxLVFUIvFGP5/Lrn9z69YXCOfQFfIl/OZ+bxTDaF5BKpWo6nI6lmhqttGxkoeO9w3jX7N0F8hx+2SIJUwRZLt9kxTDOH+7c6torCNEcMhRA6utAR55lpVNH6AknFmClZp8/EObWHVTNgaWbSKV8y1udYuRT1XxbB9xs2mw3XNzd457jabXHGcjo8MJ5OTNPI6Xgg10yiUMVi+4DvYRpPHB4TcxaVE02x9aQVfSjt5tYu+3LAGCAvlrNKsOIiP2iCVGoFby22055WimfEWvrQ46znN370KTe313z19df85Ce2OTwtEL3CzCWXdb9Vs7skSqrtdU5N7L0LCIaUFDGZJZLT/D4+9Qzradtg+bgc8pfAa0Ra9VvafaDTtRYI31TlXYdmR2pqIk4nws2WT16/IJfMOP0tPvn0FV/85Cu++vIb4lQ4PyZ9VmhGemVmmgsHItNDIXjLy7pj6Dx7OnamA048vHnDOXSUaui//pqbj17z4vGRfjPw4qOPNNHpujZqUp4cbEt75GL58Vfd5RVRsl5whv22Z+gHur5jtx2UOVxi6++3f98sIcuThKmiiEwqkCeteA3mwoovEFNlbmMBY8zNpnIxSTE8haZLLcSGfC2SIIWVaQVFI/3VSo5aWS8/TxC89djaWluCEgedX6HyoiOnKJUmWxI6F9h0w7Pl8yll3r19y5//+Mf0vULOr1+/JnivVrTOcXd3R6mVP//zP+eLL77g2zffcp5Gcsn44NjsBvb7LdfXe7phy0xHrIbusWDrSJknTvcPRBFyuqL6wGY7MLiO0m8Jrz6jxInz3RvSdNYW1VLErInuT7/2WuuqTxcR5hh5fHwEKufTWSdZDcMq/dRvevLpQm/44PU9C7zqSqWj+pyy49BpRMZYSow6aaLqfN1aqvp02oILgjWOT/cdv/PRFRtn8ccDEiPHh5Fv55PalqEw3Ol8JtbMOM/kdjM7b3Feg2EVwTiP6zcQKyYVbDGEfku3idjQU42lZpUDGSnsNj2d94xiKCk3Rnazd0txrbDmSWU0Ibg2KUehz1LS+hA/xyqlcDydqRhc6Nnub3j18ad479n0vfYOrUdsRzEH8mkmkdTCjcIQAiF4Mob08EgshinmlVylEGpZXFJZ5oOIGDBlrXuqoG0BWPvBtdRVceWtDrSoc2ROE1YCnR/oguWzz17zKhWsM3rNWiuilHZYIeRcmZbReb45VlVlade6VMie7bDDGMs4xvYeBCP5/T7dB66F3fr042mwfepqtcKaLUm4BLKFcKW5fLA6V1VKJk1nnEE9cK2hMPF4fImzhpwix8eR+fRAqsomr6jj1SklfDWMNRGcBSfsaocxihJRMiYnjLWMc8R2Paej6lJ3V1f0XaAfBpwVHYN0wTdYhgXQru9fddAFfVXO6N5t+p79fksIgX7QIQLzZBXlgZWhX2gphDFrCVvRKVFpUjSs8x1WDKtxQ6rMs07PSWnxhn5/rOPSW6y1kBq3RPW7dpUWKby93M8Lz6XNJW+Bd/Fzt83mtPOhGcForzoD2ajBhBeDE0NvPZvQPRths+TM/d0dX335JV0ITNPEZruhC532pK3leDwyThNffvkl33z9DXd3d0yTmoxYZxmGns1u4OpqS+g3jNkSM3S2YOtMjUfGw1smhAnIoSd3niIbXNjQ3TpKnDlNkVQMNkdqiY1kxqWf/rQzsjxWVGh7nWLkeDxgrTCOY/PU7lm9mNuz+l68fabb+3sXeFmIKWKwRmerKkXdNMSk3cA5NeJVWW/qWnVg8jhlzDoTtyi5x5QVWhZnccFjvcc4s8LWsqjsTMCYDhEPNFq+FIyphNCpz/MwEIYNearMY0ZKwhur1okxqbPVQsAoDX40zaMZHdrsXUAaA3ecIzlmUno+56pSCtP5yOHhnofHR6RWgneE0JH2VzjrmKdEKWDE0Xcbii/UfgNoxWCtME0jIpVSMufTicOD43g4cTqNa+9EQ8blAJbFN7XBw2r3WVuDF/Vstu3GbqSoZXCBGIU/EUvwDusD+/2G25trpmmGYki5YIxC/IJC2irn0Hm+4gwYMLVNlbEafPW+Klhb2zxm+14wfI71HnlOZK3Cn8qwpMGFi6WltQ4qOrSg2Qd609ohol/XQQmZHCfOxwMuqEZ5KwMvXt0wzSOP9ydSqsxz5Hiamr6zVUQNKZIsnMYZSsUjOBENxqAzZWOkGsP5+MjDt29I00jfBbphIE4v6DcbrPdYH5SIZF0jFrWAXOpfeew1YuhCoO86uuD1PrKaJIhUrNVEO6VE1sYvUhoMnARpaE5tbmi5NEkjsWk+2wnc+BuwEHKaDt/oz1AZnqwk0YvGuzKnulpHOhbig5ALlCqUahB0yII1opKwhRsBzFNkGpUfkXKT4RRtpSnHRMdjpjg9WwtL1SSJb968IefC/d09h8cDwXuGjY70HMeReZ558+0b3rz5mvP5zGajmucf/vBzPv/sh/zoB59zfbXTuepjBgp9sGw6R+cFKXp/lvlEoRDHwBw8xTTZVzXYbiBUkPkM8zI8olxQl5a4yiLBaldQGvS+tjVjYp5nnLMXN6unCfKTP/0rO51IwRaDMwuxh2aKbylp2bDMHMeVWKXGBCAZpilz/zARvSVLxJMpkrG2IN4h/YB4R7/Z4IaADZZqMohtHqoBazdYt8PYDTpzt2JNAmfY7K6w1rF/eMHu5gWnB3j31UyOEyVlOu9WvbEGXSXvGGMR43DB0i86N+Op6FD4aRo1S87Pd2LlHHm4+4ZvvvyCb759y/3umsf7e/p+4KPXn9F1fbOQEKzpuLlSSNZ1PcYYYm5DJ+ZJc/80c/ftOySNvHqx47b5L+92mu3SKuVVp0iTSzQIbTmcRXE9qtHqN7deV8FQjZJVUjwi1rK5eonvBlJ6CRUOhzPCt8xTZJ4qKYFIhzE9xhpC0M9NzN1eg8EaQ/AdIgadHKiuWymkZw28lUrJhWrq+hCHEBqZTkv81dZSRKsrUP2xs0hxmKYD96Is72AEVwsmR0gT8/nAuzdf0m16tjdbrsJA11s+/vQF3357hxscx8OZn/z4Kw6HRI6VnCq5oOYnCd7lEwdjmM4z53Fi6DuKEUKplPFEKJH8JnJ6vKPrBx7fvaHrez7+7Afsr6/pN1uG5njl+6Hp3QOmDRr/q655rTXst1turjbstxuGoWvsggQU9an2cDzOpHnCVKfBtLQZxzptFmoboJD1OZ5nbU0svddaa9OvtpNLaKx9fSRSVkJfShoUlmCdSqHOWkWX1r+X1iPORYjZUoombM54ivVklyhVW0glF97d33E6n9YBKM5ZdruNJpPNACTFiVONz9Zzr7VyHkf+6T/9Z5Rc6LqOXZPwLfuwzMLWhCDineejV6/YbLb8G//G3+Xv/N5/kevrWz59/ZpSBXn7wGmcud4GTtcDp/MjEo/UlMlSiHNgNFpkeR9g2Crxc/eCfltIh3ekg2BKxOYJKGtwNUW13PrZaKughWCF8SPeTRyOR0qt3FxfXUgM7wXfJ6SqfxXJVSvp4D1ygqy4/IrItX+7bEJp8OUYM3fHickbkplxkjnGhS26iKAbpNfgolLadBgsFYvOu7NK3HriNESDpK01q9/vYti/kHeKKU23eal218snmkQ437dg1KqtljXX9t6fby8rKc7EOOvneWKeJ4yxpJiwNrOOoUNhfmOUtCTGkmsiS1x3jFKJ08x0NozHkdPhDKWjDh1iaqvoiqIDLcCaFnhro79eRqyxQnFP7UDXG7z1OReijneOoR8oWdgMZ4xESo7kkpUJbr1OMLEOsWYlsyzsRJ3Z226aJ5Wms+bZK179Le9Xve+RqtrX6pM/LzIy5fW0aU7UVaOpe/NErxlnbDTAoBrqYOk3gWHs2O61p7fbb0GEOGbmsSgDuR3IOet/TylznhNiDOeYKIBPboW+l9c8nY7UnDkfHrHWNG9isN4TctGZ2SJ4eV57wl91KYR8qfSXaWGF/P51aVN1FiCGhYdAeZIs6t8svvGrob7YS+Xb1hI83zO2WH7XMmfZKGSb0GlIMRVKSUjT5eZciXGZQvXkI18mU+nov6ROVa3PfnFns9pSKwWkYsrzX48YI/OkpLwcYwu8TkmBAst88uAdfd9xc3PN1X7P7c0111d7dpsN3ltypnkdKAnQW6OsbSu4jDoT5kRNkRxnDBBdUL8FxUNZZnOTK5Jn7W0JWumuFe8aRfSjXoxhVOOf2tQ5RU5X85v3gqw8+f8PW9+7wKs3lJr5x3SBdYzR0XqmWmp1BGMpGGJR3deYE+NcOL498M39n2EFNj7jTIvOIvTFcD1kXAEznijFMJ4GTqcd1XgG2VLNQJGeSiDPGQ4HrUTnAjljJROc0AWdC5v6M973SG5wa2adILNc5MbDoACb3ZbbVx9TKzp0ICXO55HpPNLqpWfby5Izp+M9cTpR4kRJMyXOFOd1vF/KFGla5uaNrXB4QKxjHI/MU6TERGgV1/Hbe+rhyBc+YMbIy1e37PyAtYbT8URK84UcZNvwbp29BmIwTg0EEIXigMtAdhEQ1/r6elDlqUCe6Ezg1c0r9kMmuFvGKfLnX3zD3f0RVy0Bfe228+vD/9QkvtbCNI8N6gXnQcRijP+XbeGvtBbJyHfHsa2kqka2WshVBgjWKrM7Z8gJUwu2JA22JULTOTsDNSdOp0cKkX4OGK/zScPGcsWGH/32J8xz4uPXHzGPiftvHrl7c2A8Tdy9eSCnQi6qBX0kMeZCmCLHoojNxymy6wPboceIWocepGKdJ6dI+GZAnMf4DhcC2+sbQtfx+W/8JtcvXpLy8/EUftVVcuFwOmONUgnmtND7dIau2ohKc7SzGtDyBaWigg86xaqkRCLpaLkam4lLG/zuwJk2c9modaozqk0FffqtMXS+p+u6ZoLiiHPk4eGRlBPv3p6ZY4LW6ipVSXalciECxsQ4zaSUOB5P6mPe1BHDduD2eqf+3L3HGEjpSJzOCp/KxQr0Q5e2Ttrs8Jh1iMw8Y4zQB4d1hpurHbvtwIsXN/zoh5+x2+35nd/5Ha6urvn8s9/ko1e3eNfTWSHVQjCQLAxe2AbLfvC82A2EaWaOhZomyunAlGE2jvPjGWstV4PacA7G0e+uyPOZOY7tuWtBss0LN2sr0Vy4J63gGufI3f090zTy0YtbdpsBayzeuvYzFkIWT4L3h63vXeB9atRQS6UYWOAZY0CswWTBNmKPtMHRqcJUKtMcuZ8jBuh9xVkYOk8fPMVXdrVgqlByomQlQaWUta8inmICFUetypZlbr2GVFr/pL4nRVIXGks22qNZqtxlqPYiG2mxCGs9/bB9vwe4ONhIfa7nY93LGCfVINayyoVqm/ij03BalW0WuZZcsuaqsovaoE8plTRG5pQ4PZ54fHdg0w3UuVJtJY7aK1k8ia11LcAZFUOKGpNY34JQMwmotCS1jUfjSXZaUyYXwRDofcBI5WofCCER3h4wbsb6jA1Zg5r3as4h6q1bcmrVDuQ2dN41n942qZznbEguD2i7Apev/1TV+6Si5KLx1e+qmCoYLtXVQgDTtnkhxZkUzQrrIRXjhNA7dlcbUioMfkueK756iIaDnDi8PYEkvUerVluJQqoVGYUpJ3aTxxkI3lKypwBxmigpcbJWr7FYirH4riOVQjdseDU2V6dfQ4X1y67SlATTHPFzwtikvShUquWcUdnb2n8v5NIShgZTGVGCVraiPtlANspOtka9mK2pa6VKXc4pWZ3LWPgq1uKsIzi1r5VisHIm1co0Zc7jjHoMLwY9embEpGfJPEfGSSeMHY5ncs5Phn5YQjesc6ZF1NUv5aySuvpEbvMMy5iG8DUSWW6ue1bUFtMatZW82m14/eolV9dXfP7pJ1xfXXNzfcXQdXpuihrMLEorK9rH9kbonCVmi4sRU9SLv8hEFR2JaY0lWosFxAvBBVJJRFnkpVC/EySfSviW81jPhcw0TxiB2KSSRp5I534N63sXeLXicsTW6zMFSkmNgt9IEc4ofb1VM6ZUxGaKQATOSY8pj2ALXDnDrloMhmIdOEfoHF2wDMOG3XZHv92x2e2xoSfNMyk9Eqe3zNOMNVa1umIQ16q1WrBNCxtzZY6Z2gzLnNNe4xJAKlpdVbH4YUvY7skxkU+nRhISgtfe2OLP+xxL5URnUq5tGIJfX3OpdSWVLJbsWIvNghTtPaXGqqQUHIIzluuhY9s7bndbbndb9n2HAyX+5AK5kGIipoSY2ODshq+xjHFs1UCrePMyn3YJLu0eQAylqA4xV0OpDnEBv7mir3DdQ71SAwHrFC4NnbIS46QDKwpKCoM2BIBmsdcgwmefxytNf/ukJbLMFG5l/QqB1pbQ6bephnRpbRigWItUdVwztRIznMdENyedCpXVtSrOliSqk45TIk0ZI47PP/2EIWyYPkuMh8Sbr9/y+9t/yvFw5JtvvlUJhShxSFJhmtQt6O5hZB4TOTfSXd8xbNVXd+jV7GWKiXMcwRqCd3TB47zK/pbr+le5aoU5JaZo6VvP2RptD8nC/8rqYlcbhO9MI+I1eZTy86oGah+ovjJ0HdS6+panmpgYFWp1zZWNilCwxuC9197rplNvY6/TsqyFmDb4KXI4Tkyz9uBzYVVDlFKZ5kRKhWmaOZ/H1YmuirRhAZaYE2/v3mKs6OAXIwx9YTN0eG8JwT7NBj9wtbaIrLdzM1ERfPMI2PYD15sd+37DxncM1uNB5wyXvM4sDk73Klhhlsrh/o6f/PmPefPwyHg6kHNmGzpCFxgrzGUiFZjjgSqGOR3Be3ZXW/zVtl1FJcvqFVcnuGqEYoTGYV/h/9r+LlOZ5hmhcj6fOJ1ObIYNXeMYsTy/zxiFv3+B15pGprrMdcwlI2pbqpBztYSFRp8zsekgs8BU4SHVJg0QTDGUYKAIAUsxDqwye/vOsRkGNpst/WbHsN1hXOB0isQ4cbi74+HuHcF7rvc7nPf47ZWyOVvgFWtb4FXzbVMLG9PjQr++pyookUosYdgQhh2zmciVFniNVhelZdfP1CMrReG2lHVKj7HLwdgCby6LDYlWnEXNxk3JatTfCBKUikMIRuGdq03H7XbLzXbDrgtqsFBQH+1cyK3/oxV0Wh/6pX8rT/5bmej5YmRfMtbaNldUnahyKqQKqQj9bsfHP/wNnPdc92BwOJehva8wDCDC431mXBy1mnzDez10a43oGMMC9mKq8CyroSHvPaVPf/5CakX9EErWw0B5nRdoWvWkizG/fmMsME3a16OZ/KeYkDiTRe1K85zIU8EGw6evP+PVi49w0mGl48d/9hOmMfHu7TtO48g4q6NXyZWEjn1MuXBfz5ytMsVD6MF4jPP40KmsqAuUnDjOIwSPd5au89rn/d4E3sqcMlPMGqhQtMwHD1RS0qEbKn/T71HXxQviYk1jyhrBGEVSgldm/PFw4HQ6NtMTnXATnJJCjSg8aY0wdK4ZnwSGPqxQs7NCTkMz8DlhTFpn+paiA2Byrnq9o7ajjqczra2pTGfv8cExx4nDwyPGaOB1ztD3O4ahb0XKM14Paa2Up0GoITda0Tu2Xc/VZsO+H9j6wGA9rgqmVqSNdDUuEJzyXJwRnMDh8Z4vf/JjHs4j0/lAFsNm2CC+4zAmTnlmjpH5OFKBOPUU6yhB8Nc7ckOSdKCiBsoiUFrrabEJfY8cJKyBl1o4n9Ulz7vm1PakXVTr8+Uv37/AK82KrW0WC0lBmgewFWqbjVlLBaMQqDG1wT/6IeXSYV0m68Q5cjyP1OKJu45OLCnr2DzOBx7v3iDWk2ZttZXpgE0jQiSdKzV5bAiYlsnur2+oOfHRx68Zd1vqfIYcGfqOzabToBITVSAMe1zo2eyudO5pSiy3gXMW6QIpJWqMz7aXqh8u6nLjLoHXWGVVIzrkYMXVjGh2yKI9Tm2wQNY5mGbxrW24v7XKQhYlp5jQ4Y0hG0e2bcRaG9El1NVe8D0kttZVHlGrHja1FExtpgRTpqTClBJj1MPpeLjHdR3BGa72A6kWjuezQvXNc9o7T+0MzkSdtFMrtPCmqS4sMOBzZrI/60ctQ+9X8tx3SHSq0bQYEUpt81i5uHwZa7CiUivn2hAF03rYNVE0DpOAeU6M40Qt2iqpCKHr2fZ7po8iv/Xbv8XtixfMc2S33/Pw+MDjwz2LZeiSGBSUeTunxJwTKZf3fL9r4wUsbmjWOkqpjaDy/G5gv8paWj5Lf7TiCd62i6QRrJSEjvK7EHCssc15zDZDhuV+QREK0TbNStKkrvdUrRUfvMqvusD19X6d2dt3Hc6phG2aY0PJwAcNxpWWJKyEsEzMSYNNTsSc9HcZDX4uu/bslFawiP4sZ9fWxVMf6OdY2kppjmzL23/6d0+7oLVCU3hYBIc0s9YFdRKMqbgGkW/6jv1uS7WGhziTquqSq1SCheIEUw3Z6dlvakKyjhNcHMeeqiZKc9S7YFqXKWAXoxdZ0bZSCtM0cTyd6ILqvS+krOeFnb9XgVdY7BNd8/zUwGCbnjQER+e1JzNHPXDUtaTgbaFzhZIqwWTNfkQfrpoS81w41sLXUtgOgZubPQOOMUYOxwdkOnJ/vEPEEMwGg4MUCWmGGcbxHcY1G0kjbLdbNi9ec3NzQzCZ6XxkerwjTec2o1M1gofHAxXho08+Z3d9SzGeLB0xXQwm+r7DdpZxGqmnzLNVvLVynmZ8GHAh4EKHCx3WBw2+T/qKYi3irE5FoULJTHHmfDphY6Rzls7qiDFRwS3FeYrzJGNVyrO/wVEwMeOSagunSUkppqgtnqCOTLU22U0tmDJTq1pSplnHmsV6VvbtlCi58HA8cH94ZNhvkc4wbDe8+uEP2V3fIBQeH+/1QUtqvTeELX1w2gPNiZQix+M9OcX2u7SPY93zd3GeZsWXbLmu7Nql57yS0Iyh73uctcQUSUW1pSlHrSQ6r32vYOi90HUe53TCVckjKRnmUplL5XxM3N+d6bvMNKtd6Ha359PXn3Nz+5LbF684Ho589Ppjvv7qa/7oD/+QP/wXf8g8jxweHhrzVwP5lBOHccJ4xzkmxDr6lDGpKOfXB6zvCP2ADz0xKvFnmuZn39NfdimrV3X9p+NISonttlcZkTHrIT1HHXSvWmrbZGe9jvdbErTmKV5rG3EJ6hGwPD0r01mD8GY78OLFNbvdls8+eU0Inu1mwHurDF4jnE6jMshPI5ttzzglMFGhZrRvn3JinCfO08Q4TZzms7ZjpOl/PRTJGKvB23nLdqd6We/smkFJfsZ2SiuMpHFC6tLbRpEs1c2ihU8uSEyYlAkidOaiSbc0I5Bq6DtPKZUXt9d89snHbI4nkjVMKfOQC7EkTBAG75inSoimWcFO2l/OCS9Cbq+tUpvDIaRam5cD2MWWkyZvXLJhKkVUmnj/8EDJCW8dN9c32i6wtl1reS/R+JD1vQq88KQBvjTBaZOAalUopjSbwicHFyj04q0h2qKM2NJGlLXN1fGCpVVxpjlJzaQ4kaNFba0mRCyFjKlOM6mSVwkANVPTTE0R21Wst2TvdB4lGeaAqRFnLd4p6ct7DyL4EPDek8UjsiQXltwuqpT6fjX4DGs53JcJTAuEb8wTssXC1Fv3ff3ulggtTkwWYwXrOqzvsGHAdRtMGHRcl2u9SKA4hZxNKlSjhKangVfbq6pFrKVibNRespnAhHZgaKaebaamQlehy4XQb9YEInQ9oR/o+oGuG5QNKkGtPKunVN1nA8q2LpGUIjEKOUsDpX4d1Zk8+fyzntSLdWapTXpW6nrOfzfvEjEYe5EiVYp6EBdBDV6U6JKzJju5wZQK9Tdij3N0Xcd+rzr0ly9fUkrh/v6Ot2+/ZTx7cpopOSsUakyDju1qaapltnqWWy90xhH6viV1vvXun89k4INWvdz/uRRyk+BkbaIi9nKVnpy/umQxA9E/UxcZ0aWqM6KwcpUm5lmY7K0nbNvUr67v6EKg6zp1xUP5BaZZhS5TvlZEhCVeaoIWs/IlUs7khS60sJEMrfo1WKdVo20fq/sSi4HP82zrWvE2qdWCZi1BSV9/hcW9q+3dygxed3xpPdU291zbAF3f06dMFwLVJMw0redBrYDo+U41ytcUhfSfomjfRZUWq+qCusE9PWSX60m7BiklpnlmmlVZU61TY6Qn7bHnWN+7wFvFUI0lY4hZ1EzhFBGTGG3U3kjOxDm1sV+agfa+w+0DwSbSPLVZvUpWKLmSpBCpxKkw18jjtz9GRo/JGzoGxDlM31GK4f4epgk6sQQcXXBc7wZ8F0ibN9gacbYQBg+MXAeYi0Fs5kzE1oxtzOuw34KxeCOUGAlDR7fd0VthvLliDIbDuxPjdCLGBDU9W1YFGr90VqZ61Xad2nHSiEXGLNClzrHU2Kts8WHT4es15aRzjJ23DK9+yHa/5+YHv8GrH37OsO3ZvrxS+8LFuEhxCGUMloVc9tTdh1aS6OlYNc1XMlRsFX8zHig6sZxxHjlPZ1zwbG53OO/otjtcF3jlb2HzMSlXpqiykMf7kfN55vrqilcvb6k1M88nUo7c37/hdDowTSfOp4dnQxjeXz8dfJfcZmF9p5SYp0mpPCnrNCOpTfOsH2KUtOKdodaZaZ45nSr394Y+BTYfvabrPafHyPl8ZporqagK1fuOrt9gXaCgPcH91Z5hs+H3/gt/h/PpxCefveYHP/qch/s7/uSP/4h5mihtck/f1ACboSfs9wrvX93QbQau+oF+s6UbBl5+/Bm+6wj9Tivg0P0a9vOXWwt0WNo0MKEwnoXDwwHnLMMQcFZ1r953+h2VJ8zihWtiKSRqTIhAsAYrhr7rqKKmHJlleHtVK1tjsD7gup5+u6PvOoahxzvHPI1M48jpHHnz5o7D4czD44Hj+cwUM1NKzDkzZoWX785HHh4Pik614LrZDmq92Cus3AXP0HucNQydDhtw0lgDBSVRPtOSxpVQH33HpXUj7w24iXEmpZmYZlKKlNrkXAudHqDp7V0X8GLYXV3z8lXEdh2HmDjOE3d5JpWZY7OIFCzGBYwPXO82WOu1fdfMAfLy+zR10baZtPuhqirFim3KB4Ovpr0H/XQ4nhmnGe97hn7D0A+EFy+x7on1xpPxo7/q+v4FXrTW0kpXA2dOqpMruWCFlYjT8kTtk1qrfrVRCC4htTCjIn9qJZdKKfozchLm8cgkhjhG8jRjikPshpyF4yFzPBZ6E+jFk/vA1qm8I09nSuepccTkGVsTwVSwFSsFS8bUghTtw7nQIUbNBWpRiUvwnho8XRco2aM+r63qWyivz7WftcH33rXxcrZZ52n/hdbvlRYN9VdrkPTOYruOnDtig6j9cE3YXtPtX9FffUQ3BNxmi7GGatFqwSjRaWEZgii+LEvgbZnyYi3Y4kxVrzzNklswLjo1gDnNzGnCOEPYhHYQKTQ+FM8VgZQK5zGpjOQM81QZhg03Ny8QqaQ0kktEpOCc5XgUYpyendl8qS6eBN2fui613cfqRjRXlRRhaQMTVPq8VJDGGJ1VmhMpGaZpxHitNhReo1W7da2cpA1dv8yZNfjO4nzl5csXpOsrco6Ukrh7t+V8PnA+t4EiKam9qHfam+w6bNBg4vuBYbdnf31D6AeG3a7Nee4w1rfZsN+DtUD6RQlLOatJQq2FrnNUc3FTW0g0T+1K1F3OglmQmsVAR5pZi6VSSG3m9Bxn7c8LKnt0FueDBuHmqzzPUclTqXA+T5zPE3OMpJz1o9lTplpINTOlyBhnXPB424JvH/DO4oLFOu3rdl3AWcE7o6Swurg3aXHyfBjEpY+q+wZLj3uRKJYnRMlS8nu2vkslqhQLzUTFWoxV97ZhGBhjous6Jc1aRXhiSYyLj7vtEWMJ/UDwHT6E9dWV0qasSV0azU/qGK3Ml/+2otas1MtoxwVdOI8Tx9OoSpjVAvW5VLzfs8Bb0Tc+jnozLkQDQUv9heW8SDEqFapmc7aRTmrvub1SazxkagSGShXVvVlncQ68A++qSmOuemzo6XbXpGopYhhOhvEwc3ycmUsl5wPeW844hocHuneP9N98S0ozh8d3pHnm8f4d4/mEa65WIfRc3W5WRyhjddDD6eEd59OBu7fv1A/37sjpMCo8U593Piwoy9a33nQtkVR1UANVWq/XKmTVDpR+6HHOcrvbcnX9kiAvGP7WbxKC59VHnzAMG7rba0ZxTHPm4c0d1hl2V1tC8ATn8KFD2thGEB1tJ60P1ALvJYNkDb5rIOZS+FegK5lck15Db1YdsIjQbSNXN3q/xLmQUuZq85bHhyO77YbNoDDfbvcSEXj54orT+cC3337DT37im9zoudfFHauU9+HX1cN7udBCsyA02KBQYana48ulMo4zOQo2R0zOWJOZxoqxleMhUc3M+ZgYx0KKQhVLrYaHw4E3b99hbMD5Hmcdne/1vLMGJ44XL1/gvOF4fMX1zY5pGjk8PjJPk7ZMrKXrO16+uCWEwPXVFf2g0H6/2WCdpxu27f52DRb/ngTetrz3dJ2n7wPb7dCeB481oqSx5qFds0qAUlKylXKDKiWqpEeAZPXmrEZHNtYFkq6FnCE1hcM8Z3IEqRaplvmcSJIZjxPn40SZC5thh4hnzJYwRB6PR+rjEcg4k6kmczU4XO0JnSd0Ae8t26E5O9mCMbDrhJtdwDV1hAEtEJb500v7+RnWYoHqvVv7qaUlexpwFblK7SOXBeJvk5sWX2thtXR13lEwOO/XkZ/qqKcuf6FWuuqI0kExlGRJVc2TSDq313qHCyp9s9Go01UtWFGEoqL+17odi5VKxYmifNY6VmcyKtN05u7uLSXNfHR7gxEYnG/3+IeH3u9V4AVIMTFOk2aGLchaWRxH2nbVy+FFG2ZtnSNYg+mMamuj9rvOkzbaY84KqzqLSnkr3gnbwXGz04EHw/U1mUAyA93Z8lW+4+27t8icOB6OOFuZU2IYPD68wXc9qWRO04mcM6fTkTjPeB/oQsewyexvPsKI9nON0d7yNI0cDwfu377jdDjwcH9kPE2I0UEMz9oiE1mF9Wounii5Mp6SVkdiFd6nkElYa9nvBkLwvL7d8dHrF1zt93zSxsJ1wx7rPKlkxpKY55HT6Q7rLJ+GwMZ6rHF0oUcZgy3DNA3GbsQWhXrsEwbkEogvFR4Axqq14pN/uOSsZimmzUXUX7PODN50Gx7uHhsT3jAMPZ99rkSX0+kl03zmxz/ZEdP0a2HhXswyjLKPF3bswqJ88qHOYQpthhBwwSnJKidyLozjTJSKyxFXEs5Y5rFiTOH4mChETicNxiUbwFIQHh9PdN0d3veEMNCFDtm1Xr/Vvtrty1tevLwmxpnPf/ApMc68+/ZbTqdTSxgNXddx++IF3nu22y0hdBjrsC7oe1qDjz6XiynF92EJ4J2n73s2Q8d2u21DBwAqKc0aeEtzOas6mlNoQddWSjNVEISksxK0v77KLgyVTEpaycZZP3KqCKrFnqeolpunifE4kWNl029wtiNJoN8mMHAez5SiUHGVzH6w9LbJuIYOZ4W+a5IltHd60wmvtl7vH6fOb8c8M8bYUD55r+77oP1skioNkIZSTePSLNA+a/W+Btxy+Xxp68iqrHDeU6Wo5tmrLG0ZMuFChxehI5BMJsfClDKlCjGr018VwXiLy9pTN9aQzjo5ytDGb3JhN5dGkrOiciP1jtB7paSZWirTPHJ3/w5KZpzOOGvonVUTp2dQZ33vAu/7PB89rJcJHlbUmBxjqEYaQQdomjlD1bFlotT6q31P1ztOo+E8qytKzpUoME4Fb+D+YeLNt0dCX9mmHkwipQ5jAvurK2CAkiCPGAq+Ayxk6wBHQcB2iGSsLxRxqu/1HdjQDM8raZqRqP2POUbG8xmp4IzFisOIo9ZELs8nJwJpTlEKdal5hmapiLodldZ3SSUR89yCfsR5y5dff0HJE9dXe+bpzNAPfPzJ5/TDVvvrVE7nE199/U0zaN8ixtJ1Gy5BZr2s8CSw6gVuzYKiUFhZoZyl3pXGjDDrf+aSmaexHfB6X4TgCMFf7h8jOGfx/tLLViJL82E1ogdHQyKeG2Goy7tfm9ntpb0HQXNBAExLjqy+5hACIpUY1UbSWZVhSC2UrGYyMVVMVE9fFyEnlcBRBTFLxaB7vZCr3GLwYLQN8lT6oQPMVe6Srq4IXbdOBfO+jQR0GmzFNJRk6R18D9dKAhLTiGeJlF1LshRdUGlca7MYdO9ghUp1h3LzcW+3bAsIC8Gp1koq709KKyuJMzI1clCeZkrKzOO0WsVa1KVp4w0WS90N2HxFSonzVgdrzPNMTpngFxMfNecATQhqzWxCIACmVEybbiZVJzTpjfN8Ol59dC+jLp+up+SwFXIuZfUDWCRYpkH8pfnT51ybr3RjltuFea7jPVufAHJSJ7tZ/ZhTFcRGShwgb5Ay46RQRYNqVgqYtrFgdVQrouSvTCFLoVbBVVk5FUaahnsx2nnKWv+ON/evur53gXcZ9+mMrE4yrunWuqAkE6nq5FNrIaeJWjILQ9VZy9aru9T19UCp8ObuyLuHEyUmxtOZGAvfPhbOkzDFB759e6LvB25uZnw3sH29I2x3fPajz9hcvSZXtRTLOXL/7hvG81ED6JzAFmX11kpwW2zJWKO6RrxnzEKcM/GkNm9xGpnGEyUXPAbje0Y3kGxhzmfmeH62Hq8YnR/c91v6Yas3tPNIzojTKS01J1IpjPOZw+lRoaC7BFL58os/JljD9f6KTz9+ze3tLf+V/+q/yUcfv0YadP7mm6/5x//JP27QesfHc6YLG66vbOvrtI7ZYlzeqtnWUYZWa9dauBS2gtTmrVo8enoA6NzMb775ipQSofM4Z9jv91zfXGGkWU4ihODZbHvmeWaaRlJWFrvqX9X1x3uPs749ns+5LqiFLGwy0dcv8sR0X3TcmzWGLng679lsBoZNz3m0lKxEuyF4rAjzoRLnyJzheFaT/dOxUE1hnCDNrUrwbq0crHOErmOz2eh7Dn7d/3WvZelHaqKyv9rrISlLEBcdRLFU7e2ztvcu72e53s9VXX3QElmr+pQS53PGCMy9x3tFZDTZcVTX5t/WpFVtq3Cd1YqXqoe5GEG8xfhmEmINOaVmRNICTC3EODOez5yORx7v7xm9J406vSyOI2maAOgEgoV+8JTB8un+Fj650UTINivRlhOoFld10o8PD+09jauxf0raT40NKTRFbRWtMdTGNn+mjV3JmMYYihG+e8WXJHdJHBZtdylqRuK8mlOk5p8dYyGmyySv4D3BebxLmFyQGGGeqONEHiPz41FbkD5QrCXthDo5TEp0Roe/xFagqbypNr5IufAfkDYdrSW2RnvjwSkS563Buoq1OvaukNDJVpbnwO6/d4F3OZiN0WpmSTXXQ7n92TQGYhVDMbVxd+qTqlgaI1F1j13XkcUwzzq0fiHVTrFyOCVSjlg74aPBXk3gZoaqNoMWhwRHzokpTeAdZU6UWYMXNeqDMU9ISi0AKPmiNNJQiomUte+hNoxtRu0TiY8pzWLymZ6RxdBcD2DfKipPsY6uL7ikkHEumUKGszTvVc2kS6xMLdPrg85eHaczMc340Kn1ZCmcTiecc9qbn/UgWOrWp51aefLGtD/foL31oXyCdDRwSCeMLDUkxJiY57gyJa1VKDSnosQk0SC9eGnnnNbAoDK0sgabn5e5f+haKH/Cz8uhZK02l542DeqkFGourdLVAKn6T0N2hmT1QBGrH4q3NwZ5Ljq3d4Hy7eIRfKkiVgicJfAadIYpi3WTalhZKnZzCbhciEcL2eT7vJacpy5ywgZ9qpSnvZNVJsJK+tN780LLqe3PdSEfNBDmfQcnrYb095VVO35uRLUyqVSrtMEGGrdt63UqfmlaImZE8HZpx2jgLc1jfZ6EODmiVPXqzmaFyBeXP9UvZ9KSPD33dVpRye9UvEsqXZsBSFkCcKEs50y9GMOoZlwaGqHvwbTz31qDM6ZNK7JrIZZlMY6p2Jr13M9RZZ45UbLOJ4a6+mUvIoHlWbu8zqarL8vX9N8vrbDLeNM112R14PjA9b0KvAJ0XWC72UCeGL1rA6hRWLmoc4xCQ61masw6FwLeLob3yrIzDaa+2e3p+yty0qH11II3ESOZ83Ti8HjCHTPfPjzi3JmrR0O3+YqPPz/yep7othuuX3+M67a8/MFrjA9QHbV4cpwZH+9IceLu7becT0dyjKR51v7APEFJpOlMTjPeWa52e4WfouoLTydHTgbrAsZtno0IYYyh3+4Ydnu2uyu6vmO72+rkDRcQsesIvS++/An/2T/7fcbpzPHw0HSviVzhcDwyn0eO5yNv371hd7Xl4+1rXry64Zs3XzFNkWlO3N8d6LoHXn8cMWLbrbzMZr1cF9AgMU0zuWTGcVJCS4OXlkpKEXFZyVaIEOe5/fvEeH9uJI5M1/V45xmGXr1qh54QPOez0TZAC8Ix0og1TRIhz0OWWNbyQOdS1Ce2sZepei9WuQTcmpW4Q4XTnJhFmO8fCMHhe8/+qsd5x3avZLd7f6Z6w3YXuP3omr7vuL7Z0vUdp+nAdDrQdQN2s8F7w36/4eZ2x24/sNkEjCijvYVTWBMc01Djy5X66XVpAug/bsTH7+7d9zAWl6wBYLbN7zgnht4rrJyfMG3rol1/ondvcLIOUDBUyVpJOYPzmgQVceQslBpJGXKJjNORu/vEH//xhLeW3jqsMQw+MHQdzhk2g8c6Q9g4rG+EKWM0Ic1JWyRtSMk0Kel0YuIQj4zzzN3xyHmc1MccIZXKGDWIPZ5UFhO6jn7YPOs9rpA7SLOhWqyAclUVSsyl2XUmpnlqaNoB6y2b8ZYhzhRrkKi2ucfjyDQn4pSgZJwRdoOS4D6pt1zNG+6OR+5PJ8bTRFeUCDeIU2OZHJnv3zLGmXeHA7mCuD1iugvUXCs2LwY2Ci/nNtVRrCYwpgLGYqrBSaC3gd51dKGn8x3GKt+kPsNefq8CL0IzFld/0yXzXqkvLfhqxmpa9qJ3gWkG6Go7GJcfxzL6y4egBtthmbE7UktimhPn+YzUyrlOWJNI3NEdT3SbLdvrPZiCtR8TOsf1R6/od1dADwykaeTw9hvidCYWQ7UdcTyDnChxZh7PlJTJMVLSTHCDisNLJZEwAt4ZnFM2auEZx9SJNJlHwIVA1w9stju88+y2e9zixGUtU5rp/2xQtyt7RIrRYdqgPen5hA+ecTozzyPGCsOgY86WbHWaJsbz1IZ+G5bJOsKK70CDI0spSt1POpklxnTpHdalpVIhVXhiLqLBU5GD0/HMNE/sd3umKQJCV3Wyj/Y0DTl75uAUFqsKf3nf4Na16n1eaFQTh8aQLO99VSuQS1mgwbcUYo4KYEUhO4NhQ3e7IQTLMHhcsJxni4sGv3FsrnTMXDd4gldGZm4jH3V6lva++z4QgkrJtLp9AjGv5dt3bpv338nP/BuFzZ9pw35Na0U6Wo9u0ZgaoxNpbJvFvBrxaCbxZEbvE9OSRRJDab3A2iRf0ry5qw7HqFBR17FxKtyXiDOGfT8QnCMYMBLaWEGj2vitx/dOqzqrFWyaK7UIaS7kBJIyuUZSiUxpZkozpzhxnEaqcVTjyaVyasSm4zxzHid6EcSH5wu8y49p+4SsOfJyS6+OYbmUFeWb48wcJ1KO5JKQnFsVXIlRn//cfMtVcumoVHbNbzqVTKoZUypz56m5sKkGL4KtmTydNSk/H8lVCNsNbTLjWvGuc9LbIIoipX0shEd9M1JNM0hx7cM2dKKhkc+wlb8w8IrID4H/DfAa3d9/UGv9n4vIC+D/APwm8MfAf6fW+u7DX5KeugI6uksqNakWLUVtiNMIKUCrJmozchGWqqoCqSQgId401qij3ywm6ZVSLLEMuBLJMXM6TkhOlMNImDLy46+Zpsz+Zk9KZ4bdlvl8Ynv9grB5Rbf7WHtw+2vCsOVwmkjVIOIU+qyLW8viIVwoKTFNqiWMkzJqS40YW7C18nCK/O//oz8E+Dsi8k8+dL9zScpQtWYNOskXguspGebjmVwyjw8narUY6+mHPT701JQgpzZ5KHF9fcvV1RVXV3t2uy3b7cBmox8xZk6nkbu7B87jSMrNF3nFUvVwM1isWM7zxJs3d5zPEz/54hseDycNvM3zN6cmtk9zmyJToGasEbyz7XDUw/Xx8Mi7t+8YNgN9FxDvFaIT1SIPfY+IwTt3qWREGIYtr1695osvvwD42yLynz7HPV6WoNpIJrU5dJWSV7h7ST5y1sNEqkKKXizBWgbfsesHfOfZ+KBzTnd7gjPc3tzwo89/iLOOeZ6ZR/2I04x1XYP4anOwyivkZwws48P/Yqs9T7++MvZZ7vGftUREZz97pxbUJWMaw1iKQYogRVqZttyb+j4XR7XQWZWuGTWEMQKlRFLUa6nBIjPHqHanUb0C4lyZSJjeMuy3BO/Ybwe69nm/HXR4Qec0WFvzpP2hN651llqkSXWEu4czf/zHP+F0mvjymzvmOTGlSiogrmC8BrpxnptESpGMkoS7uzP/r//vnz3bfi+EwGX8IVzg5bwSqqpqkmshlcw0j5ynwGk8MZxP9GLoasEay/X1FbkI94czh+NEFwJ912nhMDa/bKOzq3EWCQFSJo7K+u+t4HwbO+pDI3DZ1qJcXqM0n//GDWrPaEYHZcxRW5ahoUKlERWVcLfYTD4faP8XqXgT8D+qtf5/RGQP/L9F5D8A/vvA/73W+u+JyL8L/LvA3/vgV9Q2xEjFW7VYS81YIaVESan1WBro1Xo1Vgy2ZfRKH1fpT6mFIIuZu8X1HhCmCLk4fI3EmkjnifPjmZILsUw4iYxj4t23d1xdb8jzPdv9lhojVy9fsX+VMXaPCx3D/pZaK8OhVb1VmMdRSV9Cs0osSM3aJ55mhb7mUefFooHX1Iq3wn/rv/RD/mf/19//J8B/84P2u+r0ptPp0Pq20PcbulDpuy2lCA+PjxxPZx4fjpRqMCbQD6YFvQi5jTqUys31LddX1+z3e3a7jQbe7YbNsGGUidPpTK3C6XTWWaDSrFDa8IK1PSaGOGfefHPH/cOB//Q/+0PefHsH1oNRXe08R2rJlHii5kgtkZoifR949eKGLgRe3F6x2QwcHh/5NgT2854XtzdtRrL+MmcdQz/ovbI+iFrpDpstrz56TVH4+89rrb/3HPf44kNdWtAty4D1ddB6aaYOeTVNEfSA8cbSW88mBPb9gO905J5xBm/37DY9r1694jd/+BvUWvmzP/0zzuOpBd+I86kFjkuFpzrV0ohQv+zx8Wstaz/8Hv95S2iBt2lNa+v/L85UxUAWNYpLS39WK9fF6SmYDhsskgu1qgVsKYrS1BSpWHIuxNgkjgXAkObCVBKd7xmGLX3fsd8P9J1jv+242nYteC0EuycmNq13ap3a2Kp/jOHu/sQf/PMfczxOfP3mgZgrYdiqfWsn+N5o22aetaeaQbCUJnH6137zE/7Dd//ig/d7QRGXwCtmKS006Ao8Cb5ZK9XmOe3Gs47dOx8R59jVgnOe6/01xnqK3DPHSpeLOoOhCKhpSUk1BqzDdIEsiXiKEBMu2PX6Oh8omNZGMmuw1L650Yo8J9RFq67PX5REsZZoCnY1zRDe/99lDz50/cLAW2v9Avii/flRRH4f+Bz4bwP/jfbP/tfA/4MPDbxrryU3fe4yeF5x9dJubmk37NL0VgSzUHLUaqZp2TQ2ywr7ra4mQJxnnfwxR+ZUSLkq5NCIK8ZYNYqfC6cx8vhwIuXK9tu3pFxJ7Cj2hm6zxXqvrkDOMAwd0wHiPBHHc+v5jjCfIc+Nk2LbAbzY1LU3L5WrbeBqG5a9/6D9VnH7jJGNmqZTdYh5gcPDA9Y6xjbgXIAuBFI2pKRTcowYxBWVPUglhI5a1f0LNIB559hsBpbqKKVIypGYZp2dbNsraW80zloxTOOkbO+YW/8qojzdSdGNZltY4omSo+5dnkkp0AdLCoHN4PHOMI3LKC/HNM3qSx08zqoecCnazBJ4FzKbsVjv+eTTzwBOz7HnK5T+hFxy8axldfdZ4c12j2pyrYPBs6i5+5wzUiyD0/FvNUPJOpzAuEAuhSkXTnNiSpnYIL7UgsE0TdoXbLp4Z6uychev3eXXIz91mrwXnL/zd89psfnrOlMEaSP3vLYvfCF4dYvz3zVBqCiKth4odcWpl5nfy3mzoCU0RzYN1u87n5VGdCuLv/B3tksR7QZtit4npCZ/EdGfV5Yzq17OirIQqDSxExTFM23Wtc6x9lhbSFKxGShCb4Teu2fb75Wgt7ZrLnuz+sCv8P3T70MZ9G0PLzp2RRWsUUKkaX31WgoxRsZpZpwjU0zMJZNEKMZQrTLLq7VtHGXBUdFi36yv9Qk+rvHEGCyyDiwRUYGW1OYUUZeP0uYDlAtP4xdtzl9w/VI9XhH5TeBfB/5j4HULygBfolD0B6+aEyVGSk5IVZmQM4LBkqtmqOIEh2na3iZDqYkUZ7zzON8GNVujsF8LCAaQeCaXyuPDg17QaWacopqoi0EsuL7DO68TjcbCWCZy/YaucxyOM9vdlpvXB24/Hbl6cYv1lW7o2W4c2+GK07uveHy4Yzzc8+7rL0jzmc5kvCnqxNI1FX7V6rjSZrFKpbU4n2W/a8mk6Yh3N+y2AyLC8fEI9ci7b75FKvSDelBbKtf7vRqBjCp9CsbgREfSeWC/3VGzMI8JsuCtZzts+OjVKw6HI4/HI+fxsH547zCD9hxJevOO48g4Zu4fT5wOE+dD5PFx5P7hxDlGTnNsz6tCzXk+aOBNE5Ineu+J04GhC3hblNFYcuuLRV68/IgYC+H2Ftt1GMnUxZt1GTRg1WrS+Q4/bNs98kx7XpsOtPWv3wu0pbHGl4cZ1gMit8NoVsEuriQep4nkhNthw2a7QeYzxAnX75B+S5kj92Pim8cz92PklCoSC+cxYt3Eu7sHrA1s+i377RUhdMjGNJmJbSYlT0PsX37T9td1pujEp4HtdsC3+zg4Sx98GyHHEytAWPSlYto8ZdEEY5onDM0qUlR+ZK3RYS3VtKCMohZGocmSkwaJubUWysVxb/kzBYyp1AyxRhDBlyUhqtQSFQptsLEmO5pE5ljJueJcYOgHLeCl4IwldButOpPWLtM4czqen7K4P3i/L4NTzGr8IrCyj1fzm5/xPc45vAtYY9ekQlnMDhcCoQvYaVo5IA+PR94dHrmbRt7FiVyEyRiqAwkqBSxdh+kHXK30RS1YY3Lk8t2gi+6zd9jWi85N25tb/z7WjEGIbSRjzIkcE9knpDhstUrC+sBn5S8ceEVkB/wfgf9hrfXhOxZ4VX4OQ0VE/h3g3/mL/h4tjLTaVerUMl3nQnRY3Y1YqpgGOT8ROi/mG4v121Jx1bL029RAPWe9iUvRYQuGxloTIVeIuSKpME6JUqvexIDp7zH9t4iB8/EByHRe3WNqSaQ4EeOkAvh5xnkdOG+af+l64VpvROn18EQRZz50v40x1JJXwlrOtEqykiYdBu+8I3ShSRkUYl5g0JVo8kSaovKf1ldHh1/3Xcc8R+R4UMJETjo9xwp6iwmLiYXqn2dSjCtstV6noq5Ty5CA2ibulKxkDFIiCcRpwlIVtYgz8zQzTbOOTjueAGHT9SrrEm1b6MQYRSaQZRz25VB8rnu8H4blsj793idozuXP7Xvb1Vb7wbJUvVUd13xq92jR+zSmwjRHTqeROUbOU2SMkTnXxiqtpKT9x/E8cjqdOByPPB4O9F3CG3Ud6kJYoc2nL3h5wuqTt/rscpTL+uB7/F+2lvvKmIthxoJ6rMIoWdzFliK3kcZau0r1zKuYaCURwaLJVuSntgp2+fvcmNRPg+4F9WiIdyusf1YXfZEVrvJJs7jPXVzC9Fy0FPOkIm/XdPU3eX9o/Qfvd993S+l6qXyXs3gZuLJ6v+sL/a5F6uWvnnytvfbVOIMFcdDqfgmSuUJpv7/1FdeJWVILRmnWFyj0aey9VDQrELZ8qcpCnVuu1+Ukfj5857L+QoFXRDx6wf63tdb/U/vyVyLyaa31CxH5FPj6Z31vrfUfAP+g/Zxf+B5Ka8aXqnW9iE5nqVXI0VFzajaAunneal+3JB12v/RI1geHFsizgBRse2A6FyAIc6yUHMkZYtRAP86ZXOA0V05JjS7KbPBVKA9nhjHyzbs/gD/8M1589JIY79hf73n10Ufsdjse337B8eEN0+nAHEdKjgSn8EauhZwnzYyrsipjjKTUHtKisDfwO8D/+EP223lfRWAYeq6vrnh8PHF//0CKkfPhQK2V/dXAft9xGs+cH0biOPL2my8Yx5HdMDB0PZt+YLi+QgzM06TwedMra6/1FmstDw/3jOPY2IVn7e2KegPHov3503jg/vFALsLtzYa+d3zy6hpTM/ePR0JjIirjtDCbRI6QpJJq1Ex1iky5cn/3SE6V/VTIxXA8jTweJ7z37HYqs7nabbm9uWK3Hfjh55/S9522HIDj8cA3X3/TdH/Ic9zjV7e3eixLs7EERTYKOqGl1tXJBwEflAxibTNstwqjxVw4ns7ElHA/+YrQBx6PjxxOJ3z4Mb//z/6IUivvHh91qPo8k6tjioX7+wPjeeZfuD/iyy+/5osff8kfXP8BN1fX/MaPfoPNdssPPvucq/1e4fYVljMtCHz3xPq1rQ++x3/emVJrYRonzlYo3lGsJRuhRNU1bzcDrs3NrrWQcmSO6ohGlhUWFRHt484TIjqwxTUPYde08aYLlFJXDXucE9M40ofA+TwhCHPvcSJEl5idwTlD1wWMFTU1sYuHvCYF0o7m7HS62s3NDR9//BHOPfLFl4+Nna2Bbk2gpJJybNdQE0zjhDC4pq3/8P2+ub6qxjiMcRjbPhvV3O73Ozrn6IMm6kIlNWnlPE3M3pPiRMkztQS0wW6RmqA6rEVnrgdH13nmpOMUw9ThasE2cmLN6rZsZRmxqH3yWlCvgPfw/dKSeCV7gSgsLZrIuKZ2iCm2f6M93zoMhL4jdB3eB5zzSkzMtfGKPiwc/0VYzQL8L4Dfr7X+T5/81f8F+O8B/177/H/+oFfCkoHU1Y6QlrGKVZMA40THM5mLRlezWBYU5qeOijVDXedpanZmjcXZimlG/s27AEDJXMBcYCqVXMBmIYngx0ROiXl+ZBwj0/jI60+umU7X9F49VqfTA3E6Eedxnc6hBt3LDE6teBdz9qdwUs6Ff/8/+hOA8Tn2WxMXNRA5HjUozvPE6Xxs8EImBMscBWomp5nz8ZHT8YQtuTlyWZqtKjlFUoytxwS+kZfmada+TVkm6ERKdmsfvtZMLomYZqbpjLFh1dzutj2nY0dKkXmaEFFZBbUwRa9DHbJllpZkZXUZmsaZ/z97fxJry5ataULfmJXZKnZxzrm1+6siH5GQCUQGCpFC0KKQUtmhEEIIkR1AmUJKCZqRICFEj6RDOxshJQIJIZF0IKUQIBrQQQmRERnFi+q98OK53+oUe+9VmdmsaIxptta597o/dz/7Hr/XOfNonb332muvZTZt2hxj/OMf/7BmwLqA7waGMbI7DogYulWPD573nj0ht4YDH3/84UWkW4jTxOGwI6UM8AfA//Ox1vgcCcy55SKv14vOQh6m9WOe++3O6n65VqZJexk/7Pb4wXG3e2C/32vTbppz6ZzC1bUChpRhGJVg8/LlK46HI4fdnlcvXvLs2TNWqxXX4zXvP31GXissOUdWqh4mzEIC33xHPep4lDX+TaNWSDkRU9QNurXrk6UKQlpbQIV2iZUxnpExUMUjEaMlL40HYTBUUzGmgtNoWdsHVuKk7HslXLWyt5hJvvVIvmCazwie1lY7nHfY9r5GwDYhGIPywPq+Z7PZcDylhtpc5FpFCYuFJs1IXaJksVry9B/8/c8eab4FWk7WGLuUXBljCMHTBY8zlbkNaJn1mpMKCJWcFq5Nq+uBRj7VMjizyKc6Z1vXOZV3FWP0Ol7iD20eZsGLs/pQXZyRxbJUTe9YlO+ijHLbao/1VbmRQKuw9Dae20PKvLAeIQT+VSLe/zzwrwF/V0T+dnvuf4JerP+DiPz3gR8D/603P5zKFDOnMVJiaiLjoCmXi9KcZWs4882ssYjTGyVlbRmda2v9OO/+zBCftNZlwma9QoyWZezkTKZINZPzTJTIDKN2fllZj0MXxNXGEGxl9/IL0rCjTkeer9c83D2obqiprLpALRa3NN5uGqVFz7UWVbVSODHxp5/d87f+7BXA1ZvOt4hq8CIKZxtrWTVPPzfyUq1KdMox4cTSu8Dt9pqVC9ze3HJ1tVWB+fWaLiihZ5pUhWdZiK3R+HF/YHf/wGl/ZDqN9CEs7RrNak0JGWcDV9trKoaCYxgm3nvvlkoh9No/9rKuMm021BzJOZJTbDCawoartTZzqFR2Ry1h2p9OWrfZVJ1+7wcfI86QKUw506NRe4yRu1cv+exnP+NP/uRPAJ4B/8XHWOOXEJw6hE1bt+gGPJfAwRxd1kWhy1TBFjCpIEOkTInTlBgMqlE9TRhn6buAsZ7tzRNc36k6mZ0XWbtHKoyTNgE4NcnSq+2W/X7P05snAGzW69Y4wLT6eJBZ3PaSgPTtjDde479o1FqbXrJRNa9atY2fMSgDN5OyzEjla4SxhoBoxxvnCM6y7Tsl+zSnZhgGpkn3mVqUSzKOqqscm76yGtly/j5lclYmtLV1IRqdodUzMW8WGI5RnfJxmhjHkRTjki+NUXswizMYb8Bo7+05YVWrntuLh4Ef/fzuceZbVD7UNPUMaeIrl0auVDW26vhkUjt/Te0lcpqWdpS1zo+MNappMOsaOCsEY+mMpRfDCiFYQzAeKZWuWmyubKzFNclMWuRaqmn6DxpqqeqYzmnJsR1wK0Ot0DkHFUzV+0Zo5LHWitG4ps/9SPfEr8Jq/n/xi93e/9KjHMXyYTClxHGYIEckNc/N1tZCqpWm6Jzp9636zliDNZ6K0ucrrSH7ZQ5Rz2cxSMZUjPGsVqiiTWtlNY0q85abF5Qy5DRhjWHrwYuw6h2btcfZwsPzTzkYw6vPf4axFu8DoevxpmJXfUv6JPXyoOWXVexfF+OsA5745MbyP/tv/HP8L/69P/0Htda/9ibzLXLWRS2lYK1lvd4QfdA+sFnLIOZNOYhFfODp1Q15teHps6dc39xoK0XvCMFRsjZvLzkvzmQtWp98eNhxf3fHcXdgPA3k9QpvVKayc7oh3N6ojnVMmeMQOQ0jH758hbHCerNle6UbW4pNVL22chuRRiQV7cOrjjcC3N294OXzLzidjnz25eeMcWJIiVQq+/HE6moDVphyIlMZxpHhdOTli+f87Kc/Zt0HgP/vN8z3rz3nOu+vR7zSjOFcPjTX8V5eJ2k5c1uaPm/UNVNq4dCQE8WwK7br6PyKYDo+fvI+6+trwronrHpiipymkzoWL18yDgOHeCTHyDAMBB+4vrri/afPdOt59oy+79t9IQtR6BcmHx93vPEa/0VjTuFMlrPhrZXinDKDW9RZDdA4CLpHaE6xomQp71WgZNv15Jx48eUL4jQwjgM5Dy2fq/dvjCoHSYvmcjM6qWkpL8Y3Waqri7TsmSk9G31tXkJV5zzHwjgOmsaJ0yLaoekF8L0n2IA1RhspGCHmSMp6Th9/dMV/97/+V/jf/p/+ziPMt2Cs157bnMugZDa8UlTlq2ZiSu0xOyG1Kc1N5DRRSqQW5cTUchYT8k6lIoM1dNbQWcvKGNZzbtmqytTaCa4IkovyP3KiJtWqLsZSRJam9Zdq0rnteyKValSpzDmHIJSoKJM25lHZX+Os7jm6sB7F+H6nlKsqSkqYYoaUqWNsMLKyMHNblFqPOBOozkXc82YxR5aXRAhkjpXn0bxaa7DOUItlswqkZOicQiRdrYylQU9NW3nTB/rgte/lLClX1MsDmg5zXTruSPOgFD5qsNYF2aLUs+EttS6b9GMMI4ZV37NarVivVjjX4f2KOEWsQIqRvusXhRaxmv8MTYP56XvvcXN706DygnWtXKHlBa3RXpfzBjKzZS+JHs55gvfLdbF2jswyLoPLGes9NnhsqIiviGk1rgJSVbhfO8I0NaFmeFOetOa3QCyFYoR+s8bnjg4hA9c3t6w2G0LfqzNWClPU6GHuElNyepT5hpnvoQQpXY5t/czkvlqaAzmTSVr9rnf6M7PXrYZ6XiuzERQ4w3qdKlo5b+hWgX67wiVHGQoyGax3SDKYouvbNl3wc93o64+vsVG+9YD32xuVc0lXynmBMWkwZWmduaShYrXWOQhSR7JWSszkKVHFYDrN+a5DwAEpKhdDc7qZMgv9iDZvN2LovaUPlj4YgjN4q80PrOWirZ8gNbdGCPP+dq73jlNkGhPTGIkxEltddi6ZGiOpQKZQpGpDiKJpIdVFzucI+lsYs7Mwf0aMCakVWxO2ZsKUOI0JYyPHMeJCZIyFmCG1XsYFhfKllOag6DWbiVbBOXrnWYWOzdyEojnj06AqeaZq1JqaQ1uhpSs11gV1Kq2gLQSBWgWLYFsgbJkrdZuIqsz1/jJ70vre8ji783fK8AIMY2J3GMnjkbh/wBrD1bZqhFoSUuoiUG5QQsp8QwENhDaNUl7InHPB1czKwZVS1bPpu46+88QOVlaZiLPhyCRtHlAqNWakCs526gQYKKYoiJGz1n/FCagM9Uip2rChawSMzWpN8IFctN6y5JarzK07R6xYl3C+PtqG55zj/ffe46MPP+Tjjz9GjMeYjjhFvvziC6ZxxFTt9NT5wLZbqffX2NlP33+fm6e3HI4Hnt89h1pYB+iCpfcdnfN03ml9pPP0nerQBqdGJDjPdr2h67oGpdKaQlhcTmQzUIwhbDeEMWIZkWIpsTDW1i8Yq46WdZjW1i50Dijsdy8Z44F9SuxyxAXHx3/weyoXud5iQ8cnH7zHxz/8hCc3V1QxjFNkv9+zu3vF4bAjx8ftx6upkctGF3WRLFTPXjdZ39SJfNAuQuvNGucspAQpUadIPiaVy6RB0i1l0gXPzfWWsO7pN56wtlw923L9/jMtf9l7xmFkP+xJJuO8o8ZEv16zWq3o+xVd6LWhuQtYo1FSa6J24dF/fy1vrZBSZrI0Nr3mz4vpqNYsikWlVkzWjXpmBJemSDVWoU4ZuynY1RpnHKsnt0itTDGRWsOOw/5ASplDyaSYGkHI8/Qm8N5tx6rvuNlYus6w6oQ+gPcFS9SES6242mr7qxr9nDQX/HC359AU4XaHA4fjwDBNjFMkjZFStce4C14bw9hGqrIFjLbO1P3x0WZWS6g4d07SErnEbn/QbltVE0lTKhQxrKdK/+rAIRuuj5GbCK4YsgQMjjEVUo0ch8gwKlHQWUPnHTf9GpsNNgRW6w2naeDl/oE4jdw9f8F4ONGHQOcD2RpycFQRctVeQmKdOrOiLVKlaiSrWv6CFD0HW9XIJrFUKTg5NxbRoE1798JFGvkNxnfO8CqRoZBiZpqStkyLSkayVbuvNOqD/sFXJ0HOKiOirJ4lMl4gEWbwoWqXEAu1mMZwPNPhtUG8lt80d0lzG+IQChOg3YnOpJlZGjCVTBXVEi20CHh2lV6jq9flhtOE/+PNpRih7zpWfc96tcLYgHM90xQ57deMxiKtkWsfPKuuw1pH6FZY77i6uuJqe0MVoTvtqSXjXcY1eGyeScNcvK+CBXNkJRdCFUaTnY2QoVHY8jvrVPTEZcR7ldds9axUB6hiDdaBU4GUWnMTmUjEkkm1YMXSrVZqUNYbXOjpV2tlJjrVwK6ltg1zaiSxph71eLN+jhsv1+YcVV0wIueo1zQjbBrDtlaj5UXtD+cVPYM3xpiF+KF8kuaMWlm6FumjyRF60T7JfceqX9E34plzyqYVOXv75/vq+2t0dVzcjxQttcqK3MwEpItXMgv6zijZIljR5CPn6+aMYWYdW9GrEr3TloPWQFGFJO+08sLKQjdYSpZmx762vUMLceeDqQtiVrJC4immc7MYaWpO1rQGImdSJkW0NllEHWrbwpBv4VIu+yvzUpHlfrdGcCK4ELTxi/VkDKnAmDKnKdFF1Wm2dWEknEuOGtRrG3HNzeVgDdHMDcIep4lhGDQAs5ZiGp4hC0h5waW4ENCppiEN51U/4z0qhyLLNfoq+vNI3KrvluGttXIcEy8PE/E0MTyMWCNkDJ23rJzQOQhYxKtxLLMcHoBUjNVepKWq6IMpFRc83oeWf1XocqpK/RcSJevN5H1rKTfDhIpZ6PtK206NpYrllKCm1pdjNtRN5FsQrCrDKouSSoonxhpp4pF6MxdRQW4SziS6rrK+erw9z1vHh0+f8MMPP+D3f/AJuUDKQpxG7LAhjZbeQGfQmx8LxkG/wrjA6uqa/uaG7ISbeKSUSKgTTnSfmoZImjRfYo3j9vYp1gaurm8Iocc632pLC3VWws0JKU0PNxctAXArTFdwJRKISEpUP1KKbnCCaeULHmPAuUJKE5+fXvHqxaeMpz3TNOC9Y725pe9XFFEnSqrDFoetDptVIOH4cODVFy95ePXA0PLbjzkWJw9QA9B0py8gZxqUWEpBUqYkjX5qitqZpqisHQZtSF2lZf4qxQoSPFjLcRwZKXDniaYSU+Y0DqSo5Dkrws3tDbebK95/+h7/4j//n2C72fKDT37A1dUVXehfh9S4cGq/z6NCjYlCJVKIFKQUnFGUa7vptUJCVLoxxkiMk65VKyAWEzyu68BaxjiREhzjCLWw3VxxfX1NipHgAykmnDGM09h6LAtS4XQ8UkqiD2qQstdWpblUphjJWaBqR54Z8i9V08SltLrsUvEhcH17TVitwHVMUTkSU8qtMcG82to1tLUp/FXMYwa8XDqMqoXedR1d8Pzgk4/ZrFY8e3LL9dWWvgtcbTaKNFQtn/vy/sDdP/qnfPLJD1hdv8dmI6yvA6FfUXCImRAMw2la8vAIPOz3/PzuJQ/HAz9/8SXTNDHc3ZOHSZuKmI5qKslUZgnu2UjOxtUZVW1z1WIxmv6q53mvLdfPQrCasw/1XBosv4MRbwWmVDiOiWlMHIeEN9CHiVIcFlWgqbOoOLMgfWsVWJWIZUxjOjdD6I3FN8UZ1zqSZDPngxUCBGXRQdNQZXGZNIfQapaK0R7AppTmqV0m24XakvmzGL2gGrklRVLNzXC75kWz5KuNZJwVuo6lpORNhzWGq82a2+srnt5eM8XCaUwkA3ntybZw5Q1rZ0ipMsZKNZbceXAB3/e4foXPE92qpyaDyxVLbZ5nXnoLG4T1akOtQt+vsK0hgZLc2jxyjvpmveJaK9iAuIwJDlu8Qq1VnRODkjiW2kGpWBuZSKTpxPFwR4pTq1+shLCi6zaMrTuKVGVeLh5uEaZh5LhXAlic4qLN+1jj0ui+hoZ806MUqhHIWT31kvV7LUrUNdLKhRQlqQvBpBpR8goFezrBXttoxhRVNauVB61XK54+ecL777/HD37wCdv1lidPnrDqV0s0/dVI93tvfGtVbWzJjX9RiEYYTloWMismqd4wkBWeLLWqgI4xiLMYp7KjOWcK2tik5sR2c0XfdySn2cEYI9M06j4xO1nANE2IVFJakbNjJrSXBoUXI4rimbkdoaUilCpLr9hSqzLZVz3GeTJKTnTHkWGMpFyJqS7vqxvhvI/MjUoeZ8yR7tn4qmxs1/U8ffqU6+trfvDxR7z37BldCGw2a3JOvLx7xTiO7I9HhrsdYbXlMIw43yNGNZZ9rkuK4LJ/NMAwjtzvdtwf9jy/e0WcIuV0osZMXxIbUeW/LGp4Zxml2fpKyxkbDF4sZtYtr6Dere5VibPBnadNt/S6GNzfuYgXIFZhKMIQKw9DwhnwfmJMGSE04kpFbGsmbfXGcVZrIalzlCkNlhOc14VhRDBOa2lzuez/2oAG06KyuY9UVfChcFZYKVWLt1ODgIxUtGKnQeDSAIuZfd10X3PNS5ea2vRZa4lQM52PSEj0PXS+PB7KJ1o2ZcWcDQHK1gyrnhoct9drbtc9wxjZ7wdSEQ54shhyVdm2w/HI8+dfUtKE1FGN389WHHLk+fNX/PiznzOOE4mK7zt8CJpnte5MvkJv1pwiNWv/3v3+xHFMHPdHTseR46B5npozeZqg1nNNYGmF8GSKjEzTSeuNd/daKxgTgzvx8LBTskvLs5YScU4wppLiCBmOhx27/R2n4UCR8ppK0+OMS+NazhBaG3NXotLY4NYYXNGIyRlRRCEX7TVtZphT8L7DOEvYrDHOq1GwTjcu43DiEBKpgq0QrMd54cn1DR9/+BHvP3uPm6sbVqsV3vnmIMx5Xc4O5Pfc5oLeh95agjOIWJBC8A7vWuSZEzFGrNPmKillxjjp/YlOxdxZx2QYk8LKs3j+/jRQX921rkSp1ZYbMIEYR+IUiU1tyXnHaZjo+8Bms2KzXRO8ZbvWvryb3hOcXQxvqZUp6h7z8m7H7nDk5d0DL+7uiKk0gZ/KmBKpat9nXAs2GmA6l7+UVtZDfaw1LovjJ1boVgHrrri+vuIP/+gPefrsKZ98+AFPnz5pyMKGWgvHk7bt+6d/+mf89M//nOF44rOff8pwO/CDH/7eglJiLDEXjHfU0XAcTjwcduzHgWPOmh7b3GC6RCFATJTQkY0h1cx4imp4JVDEUty5B28uGURzv/aiS9dMLNQ87pl4l2vWfZu6dBU/yzK92fjOGd6pwCnDYarcnTK2FVb3Xm8Y6yyRQhGFIbxXr7UTrfGqVSDlZVMxxhBcYNWtFhB/jhxy0eJpZf1pOz8AJKGKJ2qUa6P2zxKDs+GNMWKN6uzODRn0i2i/ztlToimiUKk1U3FQM7WMQKbrJ4LPdKvMKuRHM7xGIDjXNIpZSkXECP12jUjhvY/e58NnN+x3R+zzVwyxMhwVkk6lMMbIbr/j888+JcaRaiIilYc48dOXz3m43/Hzn34GwJObW1arHt8FzbP6MwN6KZeoqgJ0Oo3c3++0AcXuwOEQOZwih2GCUpRIh0Jm1lgKunnWmijlyDgeOT7csbt72dRqwIrj7uUdQz/ieof1llImvAdrKlM8kWpmt7/j7u4lx+FIIT+64Z3Z6pd5/0vDm9vaoSqb0wqYQY3upu8xnTbJmC9aRkuJwnpFt17TbVaYEDBO2ziKtTjxeHGIVGxT8OmsA7E8e/KU3//BD3ly+4SnT57Qhe4rLGZ+p4wu6J0YrKVzqohkLFhn8M6q4U2ZaZrw1QGWlCLjNFFrxTrt15xqRbJuvM4oiGuqvvfD4cj+OOqHNWKOE4s4Rxwi+1OCU+Tu/oAxwouX93hv2W7XbK82rPrAe0+vCcFze7WiC34Rasi5cBonppT54sUdu/2Bl/f3fPHilRqROTorLMaKlquf+0zblr+PMRKH8niGVwBrEKdBjO96NnbNe++9x1/+j/9lPvroQz58/z2e3F6z6nuurrYLGpmitij9+Z//jOPhwE9+/BMOhyP/qb/yL2n9sbGYUImlYoMHa9gPR17t7nkYThxSZjQOuX6iJUR01ClSeke0QkyZ/Wmvqa/uCuO6ltdvzSVqplK0jOi1JiFzHbVoMkcqmablUPPy3OW/Nx3fOcOraVUhVdVJzsAQ1d84jBnnIsFpbtBYoa9W9UGtyu5V5nINJTbozM5wy8Xm0nK2aowaFGHb9NcZ2+f8/ww71NzIUrnVmAqt4xrSNlhpRrrWRnyZDV77WmtGe5EkjGQ6V+k7CAE6Vx9x89MbsWTt8pFi67bRFFwEw1QKp5gYUiLmQiqVKtpE4DQMnFLh/v6e3W5HqYnVVcAGpyU9jcRjvF30UVV88UJAgnl+9fm5XV2MidPxxGmMxGkixUxOkRyTQnUlIbXVUEtr4k5qhlfrKKdRIwsV6fDYWdHMiKreeEto9cfWGdWpzokxTkxJ83lizSNGA23WFxb9DGeaVhtoKFKW5zRFok6bbdls7cXrz11oamkbrEJoysxHayMbVI0RLcfyjlwSaVIRASuq9tZ5TxcC3vmlk80cuczr5Pzld8cAF9QwKRmpIV99wPumFGWVqpmTShGaVnI4l5bMq05TJPqeC7mmoLW2LLQ3LU9qbN7U0lzUiim6jsWoRO0wJmqBO3vAO0eOiS54vHM4H1TwZNTSocNp4jQmplQb/CykVmI280WUPGpa8lUxZq0/dZhSFBl5pDU+v4ttta+gBLBpGrm7v8N7h7eatz52gXEcEDQtFWPk5fMXPNzfUXJhvVozjUMr6Wu1tcz+5sX+Aa/5iEt7y1bys0DFLT1VUG6mXq+my480mVahzPrV9fJt57TQrFWvZYa5qLhHrU27novb5g3Gd8rwao5CSFWYinBIKMOPhDOiOrTHkc4Ztp3HWcP1tiN4y1UNgMc2qEBQjWcrlpJSM3bndmjNdWM2ytZqbaSIlhPMRKkKakhroVbIeSIXqDEiecIUJWIgLIpEqkyltYPk5g07pw21c2tfKAnvjlibud1arq+NknZ9ebQ9b/7cYRi4u7sjV0OuLe/aqI4vDifGnBlPI4fDoL83AbGWzz7/nBd3D9y9/Jyf//hHhM7xx+//MddPb1htt4T1hk5gfdA+xlUMqepmtLQrm8lEs+5tKYzTxH6/54vPP9eI9yCcJhiHyDREpMxF+FU7UlWhlvlropQD43jk/tUd+4cHttsrtqstXegIwangwaanXwdubjbc3GwxtbA/7ojDwP1+x8PxQDWmRZePZ2VEdFOaBRIEZbrmlMlekZjcuhZJqZhS8MDWCsEIz1aakx9SZD+OxHkTLhlPJVthKplyPFLQ3FcphZvjievDqZFpKtYYbrZX9F3H1WbLzdW1qrTJTBc5C0PqgT/aFHwnRqUy1YKhaMey3rPerHj69BrnLH3nMVYYTieGcdC6faf15jPZLBhLsE4JNY2/2SpulyYroAStWrRpR8mFaRqZJv0DQXOLvjikeo5DZUojtZ74+acvEYF1FwjOsm4qYqkUjuOgClnTRMyZKVWwKxXeiZNC3o0urZUB2ghe814G13U4H8A6irGP61sK+C6wrmuG04n9Tvt9/4d/+2+x3Wz45OOPeO/pE4J3qo2eMrv7B8Zh5B//w3/Ej//Zj3j23vt4a+i8ZzjuGccjxjh1GCjYVm1incF61VowVjAFTCMrOqM9eq1YqJZaLSlrUxzXErKlqMEvRpX2VA+9nvXJKxixmJbdzSVqYDANDOOBfnTkNFGzRea8M28eHH2nDO/icTeIt7QWWakt+jFXTGs67UzGVy09EiOkpoVK60Zj55D1IiGuiMfsn5qz19PcGGMV96/S6jCbV7Xk2krzYpk3WI1WzHLo5zh5+dwlsJjZcyCipBdjDc5VfHB0QaMT5x6RCCGq6FWqasdWseeer02k/zSM1BRV2H2KII7aKUw/ThO73Y7Dfs8wDFQ8SCt9aa31xDSFl6WUqpxz3d/gGs4eaM5p6SoUJ0tOQk6J2khBrQP4IttZi1CKUEsk55FpHElRlcao9TVBD2sE75TB7pvsHzkxpaRM0lJa42wVmXhsq6MdnXR9mXat5+eqzG3QVN/XZs3pBqOGNzhHsJbc6kpVO7nlqZj5eHL2vpf/LyhSVWFRZy3e+UYsVBjzG4729a+/Iwa4MkP+rbzEaF7SOdfKsOxyX84ysqbdn4YZOjbLPS5LkDRrBTfBG+oiwJFybmpVhdRqXYW5SbxGvYpgaNvIaRygVlKX8M6SiwHjyblwnKZGlFOxDJWP1/1QI9/ztZ8JnZqBlPO9J7qX0s7rMed2JlYhKtYRU2S325FjZN132krUWbrgSSnx8OqOcRh5uL9nHAZSnBa2f87aQEVcq7GlnqNec1khoGc4O/NSzzXuchn5LvlYNcKlNFWyUpQc29BImMHI874+E+NqzUup4dx2lAu9iDcd3ynDKwg+BFarNeM4sd5sqKUQbNNrtpXccqqxCSuMxVKzZcwWl1RqzAatFbWNRGL6DfhGF27i47U4kAK20ce9xXQ9gnaxKFgwAfGrRW6OipJeSqWrsNW7uwl+K/Gn1SC1EqdzfaQ1DhGLpRLQkpjtOuMdbJ85+q3Bewj9PBtfvvl8GlWumtv5GWtxoSemxO5uzziN/Pndc067B9arNTdXt/huxdV7t3jvmcaJ+/s7jocDKUVcFtI0kYaRsRrSlDk87Dk87Cgp42eFsZSWWt55A5iX69IkIJelpd9xn9mPlSlrH1FDXW6AkuYGEgqz5TRyOqmHPI0jJWVsU+jquw5nXWtV2LNZrelCj7W6mZ2GyHAacWHF9ZP3MD5g+9WjbkqKMniK1lHNZ40IOG9VZ9euKX3Bi9CJwQO3outi1QVVrioFaoaa2mZfUN4BrNdrPvrBD/Be7xXnHDfXV2yvNwynI3d3z6m1crXe0vlA33VzDucMQDzmSX8HR6WSasZUGFOEUVGth/0R56waB2sw4lj320bQiy0qUjWxzWrNqu9bnbXCkMN4JCVVjEqmFZrU1GQoJ1LSVMYwjJydbVHCUOu4VasSssbTEQGeXG1U090E+rWWwqQs5CLErMsoVy0zUlNTUGlGgdrKE2XuT67GZhxjU9dSEtnjMZsbdFvPtdDGaI40TpET8OLFC4bTURn7TZp2OGhHM2rmow+e8uGH7/HDjz/g6bMnkCZO+3tCtyJ0PbWV1lmpOGtUAIZMmrSXOikjGVxKSK742uOlp1iD95oKFKNI1mx4zTes90tjLg3ScA2tN7VQ00SJI3kayM6ABIxzLcX4ZrP4nTK8oJKC3gdC6PCdCpO7JrGmWZSijFuUGh6rRYohFiEW9WqLKLHA+BXWe8R1YJxGrtJgFyPnkg1KU0ZqAuPWt5yPHtOc9wWQ1s937htZS4smS6GSWGqKW/5HNzrBiApBzDV+3gv9lRC8EDYWvzKEztCv7HlFvOGYtZqNtBxjY3jnUpiGidPhyOc//4IXn3/KsyfPcD/wrDaGq6p9PlNMnE4nxmlsrewyJSZKTEQmcq5Mp5HppFBdMQ5vVW5yLtW4PJnFr6xodJyUjTyNE8OpkDHUahtQ0fLkOVNSZS6yzykxDqNq5cakJUdiWmTXyFyikV4XupbXtGSEaUqMU8K6wGp9hQ0dbr15VCOkXAHduNMFvqeQo2YNbQs8gzGsrSMAW+Voag15y97RDG5tTHrlkFVC53n65Amr1ZpnT5/R9yuurjZst2t2u3ukaj/kdbfWeWkQ6uLYKzD02nn/YsLI99dAF6oyk3PCJJhiZBhHfHa4uQzGO7y1FMnaextt6K6Sjx2r0GOd4AJNa3tQzRxTW0/dWZlMo6RctDNXTE2IX+xieEGRJhXFiAzHk5Zpi6HkymabSKU5mI3In2uTV6yg3c3Q4GF+br6OraRPmwKJciYkL/2xHy3HW2fnuT0hLJFvygmJsD/sidNImibG4aSNYMYBSuH2+prrqy0311ue3FxzvV0jJRGHk5IorYWcmGtNTBOBoWqKruRCjQqDmQymKD/CilN42nVLVylEFoStttJGlsO+OIGLn80cbVd1GmpO1JSoOUJpKnqPMJffKcNba+Xh/g5q5TScGE4nKIVkpHmiyuezIhytSn7dnyzWCv2Do/P6fWgNo/vuHmstPgS8b7m81nN0ZrvN+S5jVWAcausZ2XpbzMa3no9xJmPNuZ25vR/MXuD8Yi4ur3ZDUg1QLYPqe/26Xhm6IO0GN4+21cWY+OzzL3lWHdfi6W1HMB7M+eY/HU/sdnvWq+3iOWv3E9HNv/XRjdozjRdfvGQ8jYuQyHAaeHh1DxXW/Yocgjau/wpUBFozZzBY47DWq6qNK+QyEVNu+SqDFcE3Rktp0Lx1ChlHB5I8J/GsgsKy3jqcDVjrqM1sWefxIWC9by6sxfiA6zJXN8/w/Qbje2y//kZI/E2GVBUImVuilbkVXSt5m9GAzjq2Tft3VTOuFnLUTkx4Q5CAxSPbniLC7fvvcf3kKR98+CF/+Y//iNVqzc3VDaHltr23eAvj6SmlFK42V4QQuL291aYdIbSWcuZiYf9FZ1N/lRd954aI6l93wS0qaykljofT0rTDiAVvsNa3HK5psD5AZWgsZ+8tK/HqVLc17AxaUiMq7pJyZhgGksyNXMoCAdcqS0etrvV2LTkxDSsMcLNd6VruPYhWNYSuEewmqEmVq0q+IG3W2ei3oKQZYkozITOVpaFyjxbwonC7GGk9hB3W+wV6FqPr23ll2IOSlTqn6ainN9c8ub7i6c0VV+uOVWdJ05Hh2FIvVh0cZ1VJMFhNlbhqMLFgSyW0Ewydx6LSjjEnUlaFQy6yXDMJS1G2lr5CVE5zVsOSdk6wiI30nWOzWbFed1jXBJKq3tNz96o3Gd85w/vi+XNevnixQJKXQ77hh9lf+SoB7uK/i/zrr3U0v/xX8he/7C8al6RSef2bRxnTNPGjn/yMYldUt4aw4coFiLm1L4vsd3vuXt2z2dyQMRRjEWsaw7tqh5wYGYeJOEY+/elnhC6QUll6i6YYsdaQb29Zr1cXrcu+zm7W/qMe5wLO91hXSfnAOCV8cEu5R7Atb2wEK4VVCKzXHVMEVzp6G9l0npV3dC7gXYc13cINsD4QVitcCFpqYR226/EYbrstIIjrEb9+dLsiFWqbFzW8SqqyrhncriMET+89V12Pk8qmJGwtHHYPTMcRcSpEItZws1Hk5v2PP+bZ++/zycc/4K/8p/+TrPrW/MI6hT9TZLMKCAmo3N48pe9X9N2avl/jfZOInPULXxtfXcjfP2N7OYwYuhDoe685uqqN6tMYCSGw6VdY46AajPHaX7chQ6VNxWkYOJ0GVn3AOe1dTFFxHGPV+NomHpFSYr/bM0lteX11wEuLvoz1OGe5ut5yfXWtm3iaECqroKVkfReoklXes+s0QGilLCVWamo565nVPDdWKVUV4GjclGZnxaBa3+UxCmDmoTWuxhocjuzVwRUBabwP651qkLdKE6kVXzqsCB88e8qHT5/w5OkTnlz3hM6Sxh0HpsXw1moJDY3ofKBzHa4IZsw4oBflZ6zXa5wNjKkwxKhNEpZ9/8xAnreflJI6CBXtzdzy/TrmpjmKHKz6wM31ls1mvexJpRRSbc1L3nB8pwwv8I0Gd/ndN/xQ5/+/8U8e0c37Ho6cM7uHHafTqE25U17UohbVpJmFjS7GhUWC5rRKyYunX2lSdqkuWrZzjWqdMWQumnCLzFVbZ4hTTOtQ5FTdytoGCQFiNEUw80KqLHQJZ4UuWIxYBm/ISWsy3ZxLbmDRLKtrjDbTnvW652OcP0cWaZ/X4fDHGJf9eLW0SBoErRGScxopOGex3mpOuymZ2d7j6DTSbwZSnDpDpZZWbzqw3z0olHc6aclYUU88pmmZk77v6fueEDqNTOxXiGTf/O3vzrh0tuuyU1Cr1uFPSQl3Pmm0NBs1Xa7zgq/EnBmmjEglRY08l/K4VEG0qYIuLZ1358NC8WFO+RjDqnMosCZUYxHqoutsGnQtc5eidg6XOvOzApM0kYda6mu/vzjh17a/R72+QmtSoyRLY/U8Lj+kzsfeiJcz4Y8WNVJn8lkl54gkYYojbhowJmCsIhU+BJWk7Hr60GttdZ01FlTkZBY6yfMnv2Zx5fV9aE5i1QtnRJYjXn42VvBB71Fp5UcLWe93DWp+Nx53jOPEj370E66ffMj26ildtyWOiTQpfFxKUnUWKsWgpQjONcm1os3n40RJCWbmZDFQnLb/koqYRJXcIHR9WKvGZdbCFVDhkQrOOqRb0/UTXb8mxApGm1aLtYSuU9GUoiImRhJOMpsent14xhigBoILbFYd677HO0epRtmeTVTAWUcfAs4YcoOhYtbifCMGMRblwr05bHQ5BGmt32bm8tnw+nnzXalB9M7iggrslwaf+c01Qa5awb9uLEOM5FLZn/bkF5kpjuQUsdZRkkZzwXuc92yvtnz0ycf0/Yrb2yds1psW0XmWusffSSv71bHgsa856YLmWffHA2PUtpIxabuTOc+XYWHp11KJQ2VMzUhEdUTnNV5rBLSkKxVwPvDsvRturq8XZT2oxKSlRnO+XlNXyqyeNYRrRetGMah2ujSJVMHm8zoyVtWtmCKk1trUmgsnT8/2HMTMnu8jjGZ0rVEN9Yr2PV4YylXJqXMvYGMdlEJtdefTOHI67Ji2PVJ1XxmHA1McyFTGONGvtlzdrnCd4+bJLUU879/tefVwYkqJQ9PUPsRETANjyoxZ62xxrenHzFBv6SuhlYm95h2c04JzinB+xgfHdrul77rW8an18W0d5d50vDO8v8OjlMLpdNJOPCkv9aOLktKl5yZNBq4lORaNlgv2ot7A84YgrSKrkps61UyoWh6vkXdafrxtHJoPaiUuYqizd2zMrICyxBMAAQAASURBVLIJ0hj8rY+p96bltDSPZs05ilwi1+b9O2saSUYholmmMadCtQXTZD1VSeyxI16WTbCUyw1To5XFMZm7CFGX3LRzTnPBtUXBuah6UlXpv3EaOR6P3N3fYcWQGsGs73u6vsN5u8DaWk4VELGIWC4SM+eLshz0V8/i+5nbvRyvgS31IkrlLNuppTot+mo+ydy/di4zqVUNh2Ze9B7yrWmFokLpXJJk7KJRbI3gnN5L06jEwJQTKZVzUIbmRo2Ydkzn++6cr2zlaKYhQ6ju+Nmpo+WV0TyHJi0XMzKXTT3mxM4drWaxGO0F0wzS0kXgDPkqNF5JSVnWqfUVlqTwfhVDFkcsQsbi1yMx6bWobW9xzpErmJRUk3k28lUfItqcph3Ea+e8pLsWKP5i8dfKRes44CzCY+2Zc7NEvI/gxLwzvL/ToyqdfxzJpyNlHCBOSIpIK4GyxijZpOtYr1b0qw47t4qT840/t/ALXaDvWi/XoCSRmFQcfnu1pmu/V2avbWhd1a4vi7Gfa1LVqLvgNdJ1rsFTjYCE0IceQ2W99RhfiKeRL198wW6/4zQO2tjaOXwX6Fc919db1us1z25ueP/2FqmFw90Dp9PA8y9fMI4Tq36NdwG+YpAeZSzzNYsICbZqXjU0WUAfXDOQkMlkqYjRvKB0DhNUachWWn/egVoLp+nE8XTgeDpyahCzaQION7c3XNtr1Ra2tjGoL8/r7DxVzvvzV9fLchLf+6FRjxGjDeFLbYQgdXq6vicEz2qzYb1WIf8ptsg1xUaq0tK0WmFqZYTjpF3NbGxt5prLZI2w3fSqB+09S71pAakFyQUpSqCzs+Ga0zpL1V2DNKmkNFHRHrLOO4zTvHIuham11DNWsFVlDqdpXKJhRTYuqjIem8MwQ7Ozpv2cGjKuKcd5REKTKzKUGjme9uQYoQinKTOaDtm+wFjLcYqkUikiZBFC17O5viVXw8t94jQVPv3iJZ8/vGJMicMwaPRcKrmRpWZYvyypgq+Ps2mdc+QavZoGLYvQrrnRPtUtQChzeR8sDXjedLwzvL/LoyVlS0rkaaLG2DQYFY+V2sQcGmM4NPb3XJc3e7TSotHZ6/Tea0P1Va+9h1NADKxXHaHT97Cm5UbmQ2m0/jkHU+UM62h9sdeOO4oJqccplq4xdUNntY67Rh52DzzsHphSXCL1mdCxWvWs1yuu1muu12uG05H9bs/xcGT3sGMcJ0qu9CEztwN7zDFHMrPhXbx+M0e5Fudsa3fWSlGkqkCLqXgv1KBS91Ib4miEKpUpjsQxMo4j4zA2HXI15t2qY53XS83iUkN9Mf+/euRzCU9+f42wmY0ZLBGpOkUWH5QU1PUdoe+IUZjyRKkQW77ceIdzhppRMmEtDDmTU1rEG4xUnIBzpvWy7pSw034vBf2+FszSU7dFqUa/r633smnkhtwENqoIwXt9v3btUs6krA6rdlZSYYiUG4EPlSGl5SXNt9CMd3nHOqvsVeYqD90zHHM7TxWuqAxjIU4JEVVjq/5E92IPItwdTkwxMaSJMSWs93SbNYjlVBypGu72Jx6GgSmq4dVI2C4lokuDm9pUmOvZzH5tzKBA25PEzJG5lvx55xajK41/UsvScPFRbol3hvd3eJgmoGGsLJ02Cir6vcBFDRKdyx3mZtOvC/yjm4c0NSTvlhs658w4DhhjlHpvz+Qh2wpWa52ha2UWpknrKaeUtHmE6OfPilgzu3GWjTNGa3CnceTu7sD9/ZH94UStFudXOBf0+J0ldJ6u024vwRiOMXF42HMaxoVVrdBXJcaBNE2PVeLYRuW1srLmtCiy4BZSlfOW2tR+kQrOIEbbv1nX4PZSEWvogldRjTy3qGybgzGETsuoXOdxwWOcdtMqM3llYZV/tSnCV4/7m3aTX2+HOTdG/24Y67nk5jUHsh1bKdqtRg2ZPsqstd5QF2BRDJunrjKXIRakaplbv+oIztGvV/S9NghxzmkKoTSxhdbpTM67voqsCI31rNfHiiCzZnqpylbPWbkL7blSL1InOVERzmTG1qd8IRS2rm2PNKZp4if/7CcLiBxjYmqdxGZnfS6Zms8458xxvyPnqI60s6zvD3xxvweE49RkGnMmFm0J6LpAbToNBeE4TAxTJOXCFKNeW1mgAuYSufkaO2sXtTg7q8i19WyYNd2NBghC64fcSlGt4bMf3/CTf/xUy0znTl5tn/iH/+A/euMe3u8M7+/wsNaw3a4xVkglkqqSqUrNesNfGFwVkrdtAzgTMy5zwQJ47wkhYJoaVkqJw/GEc4ZbrpaIOHShiahfQDu1Mowjp+ORw/HYmhUkhWOdspytd9oK0lmsKDnFiHDYj+wf7nnx4p4vnz9wPB7xq46u73GhxzRjtloF1utA7y2dMeRx4u7FHbERxKx1yiitlfF0Ynf38lEJVrrHqrIQUhtsrvmirvM45wmd1txWhFxFVQC8RayqWzlv9c+ziqauu45kjDZSaBHb3H2mW2nv427d4/uAdW7pdTw7VbOU3l883nyD/q4YXTg7j6+RbFokmFp+MUYVu0g5NUUmJfuZKirAUVSjvbb8am6da9SgZoL1bK+3dCGwbc3fO6uM+1o0h1wbcW6W7hfRHG7OCQDvjOoFW6v3QNIKhERRBaqsJUbGWj2eXBZIPMaEsU67+8gsndh6ZZeiyBGvox9vMobjwJ/8nT/52jxfjm9aA/ViD5lfs0Dry5/rPvG6sMvle5xf8/oHfu2bb/jp609+E2Q8g0KzA/NNIFFOebl2v+l4Z3h/x8dMpV8a0C+/OH9ZBC7+4nc756JoG9vMBnyNqHXOV33TeM2oXxznVw6cy1U/kzOWxwV0eHE3nx3g+f2+4bNeP4byKOUB3zi+ev7z8Z0P+Az/ikK7r0+DXGwEF3N/gVYsj18Qyb4+r4+z+f4y4/qtzeUbDfnGY56Pdfn6DfU3Z1bwxd8tSZL20q9ei8vPep2/eH7zWfBCXn+P5YXLi89v8LWp/crPv+iqPHbC4E2jvbc1vosrcR7yNm8UEfkSOADP39qHPu54j7d37H9Qa33/Td7g3Xz/WuON5xvezfmvOd6t8Xfz/bbHd2K+36rhBRCR/0+t9a+91Q99pPF9PPbv4zHP4/t67N/X44bv57F/H495Ht/HY/8+HvM8vivH/riUznfj3Xg33o134914N37peGd434134914N96Nd+Mtjt+G4f13fguf+Vjj+3js38djnsf39di/r8cN389j/z4e8zy+j8f+fTzmeXwnjv2t53jfjXfj3Xg33o134/+fxzuo+d14N96Nd+PdeDfe4nhrhldE/hUR+Uci8k9F5K+/rc/9TYaI/J6I/D9E5B+IyN8Xkf9Re/6piPxfReSftK9PftvH+svGuzl/u+PdfL/98X2Z83fz/fbHd3rOL8UMvq0HKtnyp8BfAgLwd4B/4W189m94vB8D/5n2/RXwj4F/Afi3gb/env/rwP/yt32s7+b8u/F4N9/v5vzdfH+3Ht/lOX9bEe9/FvintdY/q7VOwP8e+K++pc/+tUet9dNa699q3++APwF+gB7zv9te9u8C/7XfygH+auPdnL/d8W6+3/743sz5u/l+++O7POdvy/D+APjpxc9/3p77zg8R+UPgrwL/b+DDWuun7VefAR/+to7rVxjv5vztjnfz/fbH93LO38332x/ftTl/R676JUNEtsD/Efgf11ofLn9XFad4Rwl/5PFuzt/ueDffb3e8m++3P76Lc/5GhvfXSLT/DPi9i59/2J77zg4R8ejF+t/VWv+99vTnIvJx+/3HwBe/heN6N+dv95jezffbPaZfh7zzvZrz7+J8t899t8bf9niDxPWvnGhHuyD9GfBHF6/9F38bCfdf8dwE+N8A/+uvPP+/4vWk/L/9lo/r3Zy/xTl/N9/f3fn+vs35d3G+f905/z7N93d5zmutb2R4/3PA37z4+d8C/q1f8vp/FWWV/SnwP/1tX5S/4Nz+Cyj88B8Bf7s9/lXgGfB/B/4J8H8Dnr7l43o3529xzt/N93d7vr9Pc/5dnO/fZM6/L/P9XZ7zWutvrlwlIv9N4F+ptf4P2s//GvAv11r/zV/yN+/yF7/6eF6/0lLq153zzXpTb29uOJ1OjNNELYXcmmOHEDDGkGIkpdebOs+tQMtFQ3VrDdZarm82BO/w3uFcy1S0NSRV/7DkQsna57YU7V9ateWv9h01F31oaQmWCohFjCXnzDBGci5M0/i1ptO1lHk+lvfUhu+CMZfZk9aJVFvdUkq97KLaevXqT/f3D2883+0179b4rz5em/N38/2tjzde4+/m+9caX5vvebhv+5NF5F8H/vX5Z2sE3SMvmnzPmyOvN4QWmTdl3TBrqUuz6q81dq7nDXxeGd/c/PnrDaZ/lVHb+86OSm2btlwc//l8zp8zGxhpza1zLqRfrZH0j3+tA2zjcr5vbm74N/57/wZ/9+/9XX78kx8zDAOH/Y6+7/nDP/g9VqueLz77jFevXrYTLIiANQYROB2PjMNACI7tpuPp02v+y/+Vf5kf/vADPv7oCR+8fw2lUGJGasUWg1ThcHfk8HAipcIwZkqtjL6QbSWsPKF3GGNw1kAVYhJKEUy4woQr7h8O/NN/9nN2+wM//tGfcXf3sp1bpZRMjBO1VnxwWGvw3hGCxzpH13cYYyi1UGpFxCBiybkwDBOlVMAiWHIqpFSoFf4v/+e/+RvN91fn/N34tcYbr/F349ca7+b77Y5fON9vYnh/pUR7rfXfoQlTr4Krf/zxFbdXPZuVx3lD1zmMNXTrgHWW7XXPahMwRrBWqKUwDCM5JR4ejuwPJwTBiEZNzqqJG4fENGZyqsQRqIIzHhGDaRGRiGCbUS8aiun4Jis+DxFiLuyHRMqV3XFijJnjaeBwGPDOcrXp8daw6hzBqUHxzmCMwfuAsZbV1S2hX/OPf/Rz/sM/+TNyi9oee84v5/vjjz6px2EiZshYXFixvQ10IWB9j5jAanNFbdEfJatDUTK1FqQKznqMVEqppFgYh5HhdGIaN6SYoBZKTkiFikEA0xlWVz2lwqoIRSC6SjYV4wzGCVLBIJRSySlRSoY8QTrRucLHH1xzcxXYPWzJZWAcR4bhhIjQ971G7V2Hc7ZFrZVahDhlRAoY9YBinJimRCmFOJXmODnAkFMhxl/qBP3aa/xdRPBG4918v/3xa+0p7+b7ccabGN7/APiPicgfoRfqvw38d37ZHwRn+L1nGz75YMuT655+5VhvAj44NjdrfOd578Mrrp+scM7gg5BzZv9wYJomPv/8FS9fPjTDazAGOqd77P5h5HiIxLEy7IEqBLfCGrdE2dYYvLUA5JQp9Wz85ihbR1tbDR4dYuHFbmKImS/vjuxPkfv7ipkm+s7xwfWKLlhut6EZX0sfLNY5Vv0K6wM3H3zCantLTJm/84/+Gfk3sru/3pzXWhljIpVKFYvxHu8swXuMDYhxdN0aI0bh25oppZCmkZIzFDDGUnKk5JGcC3GKTGMkpUjOGWqhltl4Vb023hCMASxinBpAX8lyRixqrZArJRcMCakVSqLmCW+Fp7drVr3jatuzPwRyniglY60ldB5nHV3f45wjpcQ0RWqFFBUaF6eQdoyJw+FILZUz0JCp9VcyvL/2Gn833mi8m++3P97N+W9h/MaGt9aaROTfBP4myoz7G7XWv/9LP8wJ7z9d8dGHV7z3dEu/CmyveowVxAtihcLE4ZSAgkgm58zxYSROmd3dkcP9pJt3zVgDV2uDtwJkfChQK9FXajUIbVOtBpAGVTeLVwtmhrBZ0pTzuenXZnip0AWHGMMqOHIuHI1gasXUgjEVa6EPlnXvcdbgWsRbKdSqUKyRc/70bcx5qYXTMFLEYH1H3/dstluCd1xfbXHWYqxnHE7EaWIcjhQKmUxBMMHQ+Y5pGpgOiVSEmApTTNSCGuxlRufv2kkaAQpVMohQKMxmt9YKRQ1vreCsRsqZSskjIhbvAvSWDz94hu88z5+/XM7LiFlSEiKGnCvTGEHAGoVADAYxQq1grQMrdJ0HhJyhFIgkcoL6C9IOv8kafzd+8/Fuvt/+eDfnv53xRjneWuu/D/z7v+rrg7f8/idX/KU/eo+PPrpltV5xfb2l1MLDsCOmyMP4wO7hQE4T03gkp8q0gxwrr15Edg+RnCMxnfC2Yp55+s5ggqNfW8QUppip2WCS00iqGiqmwah1DmTnczjnkNv+W9pGXGixr1g2fU9fYDhNSKkcnEFqwVDwphAsbNaem20Hl5/QIkLRI/gFeedvZ85zLjzsDxQMYbXm+vYJH374Ed571qseKwbf3XE87DkcDhzGTK6JVAtFLH0fCN5T9/c87I+MWRjGxDBM5FywxpBLmc0puZ2fGIOIOZtkgTKb3lqpGlxD1sS5dxbvHMMUieOI9Z5V6Oi7wB/90Q/5JGZ+/JM/p9RMjInhOFJrxYhFGmQ8nEZEBOe9GuRSECOUCs4FrHWsV1uMGMYxEWNGGInTwoB84/l+N958vJvvtz/ezfnbH986uepyWGt4crthu+1Z9Z7gDSKaU4xxZIwTp/HIfjpQ8kSKJ0qCnB0lS6NhGyhQU9agKWaKrRivJBpjBOssRQQpApnGgBL0nxoJzUlebriykLlakNtYr/ooZHLWvzUGnBGC11yutYIxYIzmkmuV9vdnmpUIF2zetzNqrcRpZBxODKcTQ9czDAM5Z7x1FGvQVLPBiMW5gBGHMQ5qaexli3V+eb9piozDyDRF4pSoZBZGmyxAMkhB9VkaGU5md0QWJKHOcyEzIxmMqWgaKSFYQnAY51ive7abNeMYKamSc8EYNbwsD42A1fBrRDyjDNY6rHUtRVHbw2KN+aWG99cdq9WKP/7jP/6VXz8jALkknd84ElNsDHSd2+YbMk1J57wUUsrL+qRWjNEIX7kRFi6Y3m2SgdrIZa8jPHpXXPC9F3LjebFelGjofTiTIC/uo9cuJ5Bz5nQ8KZ/i3fidGD50vP/R73FZkVDnaobaHPCUyCVRciLGkVoyOU7Ukik5L1UJvzQKmbN9F9UP581Tb2prHWLkfG+37y/fNudEzknxuMbrmYmyJWtqTdd2uQgawFqLtVZthrm8h1B0cBx+80nkLRvezbrjr/5Lf4nN1tP3lmlK7Pf3nMaBn3/5KYfxyIv4kvu0w0jF2Yqplq7cYMRjnKPvLGMtxFghF9KxMCXBeIu3BhcM/dZSkpCKpVSDWKe5XsAKSK2knBTupCo0KTrpiC6gWis5ZVJOpBw5DSdygZoqXmCzcrz3ZIX3VvO6weKtkrcy+tYimiM1TvO9zrmvlLt8uyOnyP2rL/ji08958fIV9y9v2N3f03U9773/ESF0SFEnxNmeJ9fvgQjOBzCGUiZKicQYMaLv9+rFSyQPPLtdc3PVE7xltQ6IQXO9tTYo2CBSENElpszvZhAxVFP10VDnSiVgqGKpUkjxAMay2d5iQ0+M70GFw+HEZ5++ZJoSKULOYCRhTI8xhhA6jDXtQp/haGMswfcYEUpux5ArOaRHNbx/+Id/yN/4G3/jm3/ZNqZL9n0FUkk8HO4Z48jPv/wJX778nHE6sT8+KPksQs6Vn//0OV989orhOHD/4p6Szhtav+ro+kDXB7bXW6yzdH3XjLA6JSkXxnHS9V2kLX29Jlr6pfNgrcUYg7U6b6UUYoyUUiglU0rFWouztv2dlnvZC2MvAruHB/7h3/8TjofDo83vu/HbHc/e/4R/7X/4P28IopAxZCylVMakqcGHhy85Hu7ZPzzn+ec/Ig4Hdl/8lHQ6cNo/MJ0OnCs9viH9Vhv5Fd1PtQTRgWlr2ViMdWyvb/Fdz/XNLVc3t3Rdz83VNdZYjFSEysPujoeHV6SUOI0nSlbbkXPldDhxOBwoOZHygFDp+4Bzju12y/XNDdZY/IyiNULvpz//GT/+Z3/2Rg7lW454Lbe3a5wDa2GiMo0j4zhwPB05Dgf2+cg+H3BW6ACLx5EB1yIYg0EaGadSUqUYWTbPOeIVhGTMa5vvHHXpdmeoUs7ekYgaXkBmsLlFwKUUUkrkXKnFIAjOCV1ncc5irWCNnC9OvXjPb3i8raER78BwOnA87DFiCKEnxcRme6IWcMbqhonBey3F8f0aMYYYT8QkGGs1JiqFaRgZjsJwGhiOI6wC/cojZYbtC1WgGoX4MXoDSW0bvSgjvaIRbxUtEwO9p6wVSq2kmqHqz8FbVn3HdruBauj7E0JsRqAgYvVhDMY258YYMDRWe3sYvXbGmOVh7eNGvOv1mr/21/7aN10M5uh1HjNIH1Pk5e4Fw3Ri+7Oe7Rc9p/HA/e6VbhQT5FSYYmUYC855pqPWX2cr1JJZrXtW655+1XFze43zjn69wnllcCOGGBOnVlJV1UfS+6KlYeaNZHYQrbU458g5M03Tch+UUnDO4Z3Tv8sJqI3EeLHOq97z3/YwRl4rSfza+Bq6xbmcsS7//cLxWtT1a47z2pLXvvwFf8RrR9VguAU/k9dfd3Gk3/y7BYF483Ueup4/+Of+BUpL3+VqiDhyqQxTJOVM//Ln7B6e4/qO07RnPASmh+dIicTBki7q72EBJM+nP0ctzNe2oVjG6n1tA9Z5+vWWrl+zvXnKzbP3WHUrnt4+Ue4KBaFigwcDU5wwx7aWRzW8OcMwZkQipSaEgvOBEDyr9Zqrq2slczbNg/l47169euN5fKuGFyo5Fvb3B6ZpZH848fJ+x5gmXh52DHlk8BPZFcQJOWiZSypKUMq1kXpqIbScqa0oSSoXcsrK3nXzZjPnbpVko0jxTAXSCGxeq2ejWKEq+GbFqFdfaoOQqzJ5CwQDm0akWgeL90ryMgLiDFZ3fawxGKsemCY33x4bv5TM6bgjTSdqmihposRIcZGSMsUVSqkYETC2QbEKOYu1jOOJOOprHQWbM8dXD8jpxOfrNSELT55cs/6jT7BWGE5HUo4ziIAYfS/a+0ObC+sautwgKho1q4LW2DYEQgxlysQyEYzn6fVT1iFiZMswRD778hW73QlXDL6o42S6gJmvgwFKXaCtcRzVgagV6yB0BsH/QnLV2xjLtmz0+GdjZ5Nb0hZKPjN45wkhkEKi6wM+WYoTTQv0Huc0Uq21UNs6l2WdKxxfcqXkquhNAZGsjua8tlFjMb8PqOOZc/4a3KyGusXtVTezgkL4ztm3Mq995/lLf/DBcu40Cp86WBqhxJRJuZBLJWZFZLo+aJohJ2pzHEzNCJpGmo2BAM5auhA0FdVwSmmOm7R1qnOi+0ppKE5MicNx0Hpy49o1nq/ReV16pymykjMlt9K35uCMkwrJhBDoQoc1gncKp6YYKTm3FFdDkxpql0ur2jhHG/ytv/cYsspNpKYaSm1oUlUEyxiDqWVZG6XUxeHUaKTinJaQtrdSxKvBverfzMfb1lmpQGnPCc519NsbQrfiw0/+iM31DesrfThrqM6TgZRHqIUpFaaUGaeJw35PzhnBItVQc6SkgZwTKU5ApeSOUrT+f4Gb2z51ufbfdLxdw1tV1eiwO7F/2POwP/Ll3T2xRHblRCQyuUSxheKE4g1SKjlWalYcnpqRWlolZsW0dGIthZwLYkEa9i9SWtQqS61natGV4QwdwKXX1XK9Vb0th6Fkg2nRmaBqTtbAKrgWkRmFuY3mKWdDM0d3xurfq9F/e5t8rYVxPJLiSG05Fy0NSqouVQpVhNLqm7Xe2WCMw1in5TkpU1PG1YqUzLg7wsnwav2KDoNJlfzRR4gzDLuJaRqWDdoYiw9JNybjwBiscxiX1YO16pzM6MJsfFjqrQ0lFmpJePFcr9f0oWLMFcOYeDhkDkPBZrC55dhDy/WIOma1ZEqrE85JhTdc2+icNxhxjxrx/kbXqXn/mqMyaniXTR2FzmFJV8yCIcUYcKiT6AzGNiZ3cy7ORnfO9Qq1qIKXZgXO511LWSJeXf9zSkSWjfSbDO8SYLX30BtN9O/fwrQG7/j9HzxrAip2PhIE2qYJwxgZYyLmyhAL1hq22406B3GkxEmJkjVhgM4J1syl4JUuBLbrNSJqHGnvrQ61aQb/bHCzZsE4jRMv73bkUjEuQNvI9d5SpECALqgDn9NEihMpJYZxJOfM4TQyxcS6X3G10UqEvgsYYBoGUpwW2F+M0TkQmHIklaQn0XKUj2J4ZUZJLv6JIOUcvJwNb1ns7vy31gr4s1NWa9W0YeMNIPUcuNeLNdqCKDGOrr9itdly+95H3Dx5Rlht8KuN7q8pkWuhpImaK6kJFinac6KkTHAd1lhqju2RyCmB1It7QJaUy+V4rL3irRremDJffnHHw92e4+HEcRyJqZCoiFMDFYJDet8WvsIMuUUttRRlw9aCsS2nJI1JWxvchDRYYr5z6rLxlBnfhHZxFbIWmRNureBl9r4WYpBGrlRwVjc0W4TcopHgdOHP9cJiFfKEi83oK+/5NkaZxUcKzdO2GOMQscvNkZgdCjA2Y4vB1rLcOCUrJmkRrBjWwbPuLFernqtVz8p7LGiuuBRoghg55yb/2CAlUcLPvGEt0P5FTr1dPvWirWvXTV2sUi0FRxWHsSs8lU2Ace0wJlHRjSf0HSJCjlFJHYUlcps3ZrX3bUcw59ri39YQaA6PxVmHd77BvZZahDyDjKJED+cdq1UPtdJ7XXdNkWRxYNRkaMRQaiaXzDRGzfEWTbW0eBh4Td92OarLyOWbDK/+5cVJLM9/5Xff4qhopJ6LQcoFuYwZyqyNayGYkhESVIPUNAdRSsZMlZhKmzXBSmPbW0uposazEXqMSJNM9cvnlFq1mqJF/rFUcmPWy3Isr0d4M8LAmIkijSOhz3nvsdaR2r3rvH5WKYVpHIFKilOLeM05L++0br5IhaLpjHyhV/CmYwaqFleu6koz1CWvSm0ICYB1iPOavqIQa2mpON13aykY5MJpq1SpVJG2bWouWfO7jq7fcPPsI/rNFW59SwkbhmIYjiNSC7YqlFNipOapqQSKogjDSEqJ6gpGbFPAK8tD4LU1vhA0L4zvrED4puOtGt5xnPizP/uU8TgSx0gkM6ITIRuL84LdFPq1UcGDkqAYUq6QCjUlahIcGe80KrIt6jUzoiHSjJ7B2IxYzTXOUW+pZn4hMzAlDYKWtujnJO28yIwRVaISwVShtM26trxW3zXpQmuwot6wC55SKnGaFsPLsujezsi58LA7EnNBrMe4gHUBYx2lQsqFUtXxKQDFYoogzROPOTVoveAQgrE82ay5XgXeu7ri2fUV16se3/LtkgvkTBoj4zShTlD7OhsPcyEJqkud1MRMSlYmr7WWLvQgQhwzJRdSFXIRus2WZz/4Ic55nqzBGU9/yjifEOsI/QZE2D/sGYdMQWUmjQg+OIyBWpPeaFKW1MNvdTSHxFVHFzpW/YqYRpx1qKgJOr8NUuy6gLu9xhnD05srui4Q00TKkWma2B32QItKa2UaE9OUGKfI8XiiVvBeS6zKXE43MztFWhpGFsRjzu1espnn9xZ5zXyz1Gq/panTiCkuynTGKCqgv9OIS4xGsFkKUkekWqhBa+utINYylcSYlDEemwO9EXWESjXEpGTPPnR4Z+mDKsAV1MDmUpjiiVwyMWXGqPC27keNGTvPW2PT5vZ5Q1ID6p1R2Nlauk7Xv1hHSBnTCHK5FMZhUEPVghE3pyesxXWNCJQEWyxTisSYfvkk/ppDFrOrDqxtBtNS2/2mUWauAjYgYcXq6gml6xjFMIlF0FJMaiY7S82JEhM1RTCC8boOU9FX5uoptWO9fcLHf/DPEzZXuJunlK7ntNtx3D3gBHqrqGSejtQ8MU0RqVBS5nQ4EseRyWl1Q4wzCqhKfbU5m2e+0JkLcgk3m++b4a0t75FyJpeiXlmrBDFWwAnVWjBV8xO1UoslF6EWFcAopf1OLiKktlGcIRBYck/tsSAWcl40tZ69Yv159nRmoysYFD52LU8rVWUO543K2Asyyez1U1sU13IX0rzui9e8jVEri8dnjFmQgDnSfO21aOSrGZWKtKg3N/q/bVH/krO2+nWGhhGF+K0LGF8xRTNtFbt8wPkj9YMayKDCJkUjgJQSNVdMjZrHGrMqZuXCmJT0FocTNmS8Fda9Z8oJPwo0MjMiOOvIrpUPNEjUmAvMq301Rt6KlXgtMqyvf6QgWLG4OeK1Hmc8Bnexlluey2i+TGjn1KINaehEzlm5DhXMFClZDe84JWJUSK1WsMYxC8xoADLfI618iddzWvUrx7wY13r+6bU7rp4N8bc65giy/XgZoeQ857n1HjRGmmiMCtxYa5bGHnMJGst6nlGD+QG1foUoeWGChNmpNIugTqkN7q4VjFykvPTAl1LFZT+ZXVRpe4U6ZGV+fr4uDYGQ2fG52H++CQr9NlIp8/4oFyWT83lxmZoQzWuLC0jJmLDCpqT8HDTCN0DNiVRHctOLN63iwYqetxFHlY6uWxFWW3y/IYkjZ4gFSlVp2jl1ldveNTtFpaGmpaqjVKUuGgTLGp0j7q+hP48/j2/Z8FZSUq+omoq4ivUgQbAbB6GCN1ScagNPmRwNdfTkyZCmpPrAJILkBVYTY7DicHg9pTpHsaolXLNtOQiz1GaVpfRFmdK1nusmDapSZcQgVrC2ElrpRIy5kVOKerTQDDJnxyBGjdJpkSAWZ1TM373FcqJSC6dxwrkeFzw+9Liux/lOIeflWCpiDeIsWEOiQBXGaeR0PGBipLOWzjbGq7PgHdV7ivMk0c0sXN9iS8bEjEu5dRfSeZZ5o0CNRa1Qs0I8pUaETJkqcUokMnE4Qa3kMVNSYXc6sjvsWV1tkSB06zU3H37Ee1e32C9ecDjsNG7Pui5W3YYuXFFa/ibnyHDaa01fSZSmvGXt27wFmhldNkhFXcQYVr4jWMdVd01cRfJQ8byiEpnqqJGVKWQbKSlS4gFEmPYVBs/hdOIwDEwxsTsOFMD4ATF2qbkupZ4bdJTa6iANWHNxfCo+oimds5efv+KUztdPx2yadJPNVag5E3PhW9jzXxu1aqlT30qnnHOEEHRdNdb7PDrvWK0c1lhW6zXWOdLcKINKSqvzZo+A1VIZg2k5b4PBYNH9hJaGqaiDF3zAuorLFZ8rMWXEKrSvxkHnPbfcpxqnshANrVVIeXZuQfDeYG1dcvBCJbZrZVpkPkdktVbGcTwr7rU0jjogj3chFKKXGThU11pAakFqIedEjFH16MVp1NvfYPya4Fb49ROsVIIpUDLptKOkidPunvGw02hYItSKawsorK4Iqw+4ef9jrt/7IYSen+6O7A8ToRh8WCvfphNqiTzsE6fTkTqeqNPImKIq41EXidtUM6nOz87ruqU16+v53vN6q6+tqd90vGVWM5xDTxayq9gGx9jmNzeX2bRIV4pFylySMtd1SfOMTCMvWYxonVedPVFp0aZ+cDOSbXNpsALqjCrU+hUmwFyGhOhrVMBDyKYguUFyVKTmBfavoB17Wg5JnzZLxKue8tsZtarXb30rCbKa55U5x8rF8Vx4+3MENEe8UmuLdjXfJMYi1kF7VGOp1rYyLiguU9uGliUqK7zo+0it6uHWSjGZWio2C5AxVmVCFeloNb4t9ZNTZpoidpqYphEbVNxjs+rpu4C3hlJNiwO1uQPiqNZTvXY0ipPmxmq1aI2xiqG8nXEJx55/1u8Ea3TuvPV4G1pzCodQljUNaJkWWZ0HhBy1zCeOI9MwMMXMNE4aJaUKYpniGW6c4eKc5vVpzzkVzpvPXNM7/029OFrd2Rfx1QYd1QaHNpZ6nVnq3+6o8NoGOZccfnVrFJSkFrxtBDb9mlvpigqv2EYJMUvEuRBzL1CSZSOeIfr2/cxCVyZBi9isigRl0PJHqa/vMw11m1nSdkal2kdqxYHmkOcjaDhEi65lSd/MIhGFoo70QnR6bPD/Im/dLNaymuvZSVDehqB1gl7XeQCMw0rFSUEVkiLFGIw7tuqH5pKKSvIKSkDrVmv61Rrfr8muYypHjlH3Xi9WIWorVDSyjTlBztScKSWfUyHtms3cknLpHS7Bb33tcT69x5nHt2p4RUQF+g2ULBRXyU7D/jJpSdCYIjFHbLHY4jDVsbFXSO9VnerW4OpIqAeMVHovOCvIqkf6jlYnQingbjtyrBANRNFc4RQbtJlUPrIhS0YyxuguL3CBpalXGZoiSnF6c01ZpSlLyZQ46HYqsvzZcoFqpUqmFFVQKTW/DWRzGSqZqO3yui4QgsO5VpdbS7vZ1TBX5ohGYc2+D8j1FZwqpAPWW/onH7O+2nD90e/z5Icfs9msWL1/i7FGc98CqyrkalqERZvTGfZrN2nLC9daqUkNcIqJFCMzPbTWSmm/O41HDqcjvg9cv3eL7wKb21vCes1T2TC5G3KpTMmQCxwPkXHMbK7X3FxvKSUzDntSjux3rxjGg6p6jYdvHw69GK+trbZbFZQJW4CMsswxDu87KtLkOYWVWNZVVJt6SkgupFFzU9MwEYeRKRfiEMkVEpGCpg5KnSNSjZRiipRakOKUHyFzukav0JyamGHbuWRGD/8Mj8Jis7FWqwVmg5RaCdK3OYwIXQj0XWDVdUqgq5r7zFEbeRgRjAuIMdRiiKUyDCdKVd1xLd1pDTtE6ENo0PMcJLT8gND6Us8GUhT5b/WxUpRvQtZKgJozUlND4BpoXTJSs3JTnCBi6fyqpbK0lKlWZUWXqtc5pULO2piklMQ0nYCKW3dY7xBfKTY13oa24UxjIpfKaZw4HIZHRR50OubaaRXLKY1QFVMiJhXdSbmcnRrrqa2kDXHkEjnFgZwKh91AGk+Mh5FpSFgpBKva9l1j+K+un3L7/g/pn3xAdmuiOE6xchwjwSl6agqYIpSStGxyipRxogwTcZjIU6LEc3plYfLP3tU5x/haSmEmwV3met90vF3Di+Ctw0glGygWxKoXmGMiR8NwrAxjJohhbRzWBFZhi7M9XegJLmDKiEk7jBSC10CtOEdpUR3BKfRj0LKJEeoEcZg4xpGC3hDSIGldQBkj5WJnbKPqza01ugLGAwabCmILOUViiRfElLPhrSjhgCoN9kpnubS3NNTrVvgtBI/3SgSbcxmaE7Mtsp9LexQp6DqPK2syiem0woRAd/0B/c0Nm2e/z/UHv0e/CnS3W83RW1pOx4Fo3W7FaIBkdDLnCFvOiUCF7RoMV3PL7edmlFurwpgnpjhgnMWvg3r0Vgvqr82GEm5IqXA6RWIqlHxPTgNX21s++fhjoJDSQM6R5y8+Y7e743jc8fBQHgU6+tUvCEuu7nKZKfu0UgRKU+dxLlALZJuQUukw9FWIGWpM1JTJKWp96pQ0FZMLacqkUhlL1dr1VlIi1mCdskSlGV5TW0OJhnjoxpJbA5DZsApfh9tqS83MNntGotRgzASbb9ulERGCcwQfmlMvzFBJyZmcEt53LaWgyEFOmd3+SIxR0YCcMKIVCtZaOu+bMzHnrcsS1c2s6blG+OxIovtJqdBqg2vWiE5RnrnESw3vHIFbY7jarAghnI85F/KoXIc0JqaoRjemkVIzKY96fCZgg0E8WoJZKhklTA5TIqbC8TSyP0yPDPmfSy6lRbSCQvszTyOl1EqvGpJmHSyG15IjxDyQpsLDcWQ8ncjDSB4TzlSqrzgjTbLW0W2vuX72Ieb6KcX1xCKMuXCaElvqUqpuimh705TJMVGmSB4n8tS0C/LZ8M51xnXWV5D53OS1Nf9VROUxIMu3bHh1GLGIFbIo1b2WwumUmUrmdCqMY8UEML3mVGYe1Xq1YXt1i5NCkIQ10HVKfsoGkgGxFvHah1eKhyrEw0Q8jpz2e2pKxGkiTlE3rVyYxjnXmGaUG0hai2tUitLmoLkfozWpM7nBSNVosczwkkYWc4/b3AxHbnJqvw3dWmNEa1dFqFU1p3PSKCZOrtV/GmRQIzxLDa69ZfvkGnu9wr93Qwie9z/+IevNBr+9ZhJLSYW422Otod/0WNekO31oc2DaJkHbvGRhBc5cJ5mnpGjUACxqVnPqwZWEL1HLabpZjEBLxozrcf2qdSnKpJRx4tn3R66vty3KN4SwASl0veFwuOLu7iUib8fwzpvzbHCltlzvLPKC/izWYr3Hea/KUDnDlGCYsEPCjxmJhZqFmk1jtkrz9ptQWNb3X/zIOTJhlvLU9TkzmqkF1S77yjF/xfOfocuvn9ucCy7kIq+XHn3LEa+IqFyrUzU5HRURrbXHCtYJzhlS1raRKWdyKpSscK1rgi7nPbVQW43d+Tm9SgpNqlqac0p8MsYqFJyylu7MTk7OFFGiT5miiosI2Ga8Q+iwxjQESomncRwXboSqK6UF9jdGkGoQ4xuCK1RpzrO1iKnYDGTBOsi1YG3B2vyIl6E5HJXFOdH/Z3Kkogw5zwIedUkdXs4jJZPjqI9JHyU3Tk6pmnc3hlQdhg7TXRGun1FXV6QW2UtO+DyxNpabYOissLHKgn6yXhFqItlKNIV9Seyat1Dmxi7tUGrVVMAFX/d12zqnYZY0wJtb3ree4xVYxOqnWkglU6bI/fPIaZyYxkSKFbcVzDOPrQ7rwVK5ffKMDz/5Q/rQcbXRAvh+5bBOmPLIVCbEOkzQkpk+XGGtY/fyJYdXr7j/8nPycOS033PY70jTwDhNjFNkroOsQCqRUhuD1xh6GzC9w1nTSEkBHBhXNRfhTpQCU0kq4tHgl1oKKU6IGMZpxI3jmdzy1iZcJTRD8FgLtUyUAsPp0PJblto6CRXRUp7tZkUInie//0M+/vgDrjYbPnz2FO8D/eYG5zxFKicqcRw43T/gnOWDD99nvV6x6TyhWwFCmZkeyhVp8o26iduZhb4s+KUSe4E468wyRY2UntLcBUkaMaUuHnjNWp705PpLHu52OGfx3tGvej766H1CcByOHzOOJ37+6Z9j3RlK+tZHvfg6e3imQbdodyfrHd2qp5961l2PTYXjcaLuDriHgdUukoeCjYaSVQawlIrLgitCyqK53Ybk6NRpb2TTNKvn3KVy6IpCoqZekO1eH7OYAFyoDM0EJM5Q86W61Vyn+m1HvMYIq3Wg6yzBS+MlJIwUvG+CJEGNWx4y46hSm3HS6goVIwnUqnnzReimoAZUBGWQq/RpqZlStGwpBG0zSZPcHGMklYKvhY5KTAnsSGqa7zkXnNHWlSF03FzdYKwlRY0O4ziye3hQwxvbHNY5v1+bIIhFnAqDGNfSQ9bgglN0pBokV1zNytzNhq77ZofpNxkXdICLXLXmdadpZBxHpqilbaW2BipzzXwjX1ELNU2k04FpODEdd8ThiJSEVDXYY8pY6/B9ANbYqw/YfPQHDAQOGcaYsGmgzyduTc9Hq44gwsoaqnV0t7eMmxXDoWPYe15J5c4I1KJlkiU3pEedpgIXEPNl29GL8kc5O/tvOt4uqxkWeJHZa6pKoEqxEqdCSUAWKMoenAkbmkLV/rEu9HTrK5yzhN5iLNRsIVuM89hu3QzvFmM9Uz8S+wHf9coabHVcl76NolNKiIgpt4L5ijUF4yzRVshgW4P1dkAAGi0YLeBfypUu2VZC6zbzlmFNWGQI1SNWNS0tG1LPsrTvc82kEjFWsLaQsmN/3LHbrzBSOW7XdEBHBWtUnKKqpN39gxq465trnPf0pTZYiRbGLVv0snjVKLTXLNirvkZ3lfZ6Y5b3EqHlkiK11qWUS0tDWtmSUePehUDXBWZ1MpHWcaSxXkUqIXQ451/LX35b4+vbXnum1mWpLAFE0bx3GifSMBKPJ6bDQBkjkgrkshDVZoZ9zufHXJrCvBZf8+Hn9a5zro0q9L1mg/rrDFnguXZrf8sR7jd9/rxE5OK2o5UPIbN+NMzCDjnr2p+jMEWnW3HVOc3XYGVpeV3bmkZ8RY+aGQloBKlaqS3nuaRVGhRvWknRGbLWfSNdRGFzOZRzLM0sStH8Ig3KN6510JlJWMiyrV4sq4sc/NdiuDeY8GaA6vkd59LK1PK7paF8teXFFz5DW+CzIzPX8mpCqpEdmfOv+mFiPcZ2GNdhfI8U05ToIp5MJ4XeVDoDXsArYYdV8DgDJgYIjs5bTSUYUfPCcvCv3SG/dKYW+ON7Znhnz4gGw0oVTDFIEqZ9YjgmbSJvHEE6eqvSXkrdL6oIkwG3Yn37UVMimoCCtR5qwIcVq+1TxDisWQOWsMqEMRPWJ7rNDbkILtzjXAJUj3lKmeM4knJld0hMU23RmLDyYLcJbw2+azV6tlBdxrnKetNhjMeJQUqaeUMK77W8WUwJGQZSSt86/DYPMYau39CtNvQrlZtz3jdpzYSKMyQtyo8jh2FPrYW7ey1xeP7lz1h1npuraz7+4ENubm74l/7qX+PZs/c1H2gMX375BX/v7/8d1Y8Vw3vvv493K6625oJk1to5isLbZyWY0nItmaXemRap1daSK3uYa4FRveWXL5+Tc8IHFQ3YbtdcXV8tGyIihM6zXvfEpKISOUVSmtC2g4L3XhWirFfU4q1cEBrc3OC5C6EWj8HWihki9eHI8MUrnv/pTzntdjz/0U8Ydjumhz1lf9SuRK1c6zCOTDmzGyYOYyQijFW3tWydtsdkZvHLAlvmouIiDkHkzPYsF87hHOm+voFfnI6cKwu+6los2sHf3my2oaG7ESXEVsBY3VukcxfORKHkyHQ6kHJptd3NuRYoRg2xEXBNyKIPjs5Zeu+46ldNl93jrSU4s9SMK7moYo3COrnpLasjpKph1gjVOeYVrvC2rrtpmpjGCUFYr9cYo604a4Xd4cQ4qUPsnN5zLigTvZTU9A5s6ysNOQu5QIqJadR728xKmo80mk+MwFJWOY0ju909u/2eYTyRW/GOajXUOcpC6YMRK4neFawvrIJC5LMOQ8lCnCrOejabZ6yvntJdPcNsruE0EO9fUaeB9+yE9IUPfOGJUQg/OEUbr1dXIJX7kLmzAyWu2G47RBJ1n5U70pAm6pIJw6CsdJkdYMPSFrDKWVf+TcfbLyeC5mVIQyH0bHIs5FhwoiUvVizWWOxcSlHnXILWCrpupeUAGWpNgIViEeNxrm95kE5LR2zAOFVucl4f1vqmctPIWQiMsXlumSnWNssqNTeFCk6T+LVJyJ1F4mddZkM1yuadHaPZMy5NIOKy2P/bHiLS9H09zvmzTixFRTBmpiVQycQ0NYKEOjOn4z2mVvZX15Q4cTodORz3XF1fYZ0KPIzjwMsXL3HOsd8f2W4VWrvwh5eb/qtx10JduZBsk5b81OMyiwGe/yrGxPF4IqVISHpO3js2uagWtNXXWmtw3jUCkBJOlOA299i0ixPw2FHaL3u/2k7+jDq3yKsR/YhZWZiHE8P9jtNux+l+x7DfU0+jkqpybspuhZgzU0pMSZWXMkIWt4ihsFzh+dj061xKUWdm+fxLOUtFzkb3Em77qqSefm0n1cZXIbpve8gCZ57RDVBRntomYo7KZkGYy0h0FoJowVKrjRUl9ziLd47gXVOI0h7O5itOxdfOuaUSZhGdSxhTkZzXyTsz+1oVqByh66kVhhhJJS8lUKataxFhSrQGD0LJF+V3BUVCcj4Lsz1ewLt8NQK5BVM5J4WaJ206oCu7vvZ1jnappaGJqudgTW1SvDqPuQq57ac+9IRujfM94gKYiZr1sTYF52Btha51iPMNldASSmEaPCdvW2/x1l98TlHVdndcQsnzmgDm8tGv4OuPMo9vvUmCiuerWHaKWRmY00hNOpnOOvre4IPqMM+dc4zxeBcIIeC8BZMpNXE8vSDFE4fTK07DPX13xfVhr10s+qdY2zGNe3I9ITbRbT1Veq6fbvABgjeEYDkeB754/pJhiIzxQC6x5dAE8Qa/1ovXrQzWCbth5OF4R1cM3Tbg2x1rMMrwRajFkuYuMfL6pvU2hojRaHe9ZbXZErqO9XrF3ApOEGqDT754/gX/7Ed/qqIZp53CuQVKzRxORz79/DPGOHH/8Iqr6y1Pnz3j+uaaL5932qA9ZnYPB9arPdMHCdP0oOfidJrIQG0531zKUmQ/NkF4raUUFgWAClJOF+o40vJIEylF9ofDIpLQ9yucc/QNXu76TtETLyAJ39rbxRRbE4JW0yyWt2QfFgJVK4PV6LIxVvev7piGgU9//GOef/YZL7/8kvuffqZsz8MAY2IaJ+I4MqbEbhiIJbMbJ6ZcOOXCSFXHb24+YfX66iZCozE0x0QstdXezk5wndnAsxN5oTP+WnncYrTLogstzWhcGuq5j+m3OqcVYpqbPghUg25rpRkhdaRLg5dXXdDUup21w3WDLgVStkuOt+aEFU/nHJ139J3XbmVGdZypWtGgGHe9QHdmWFmbgIgxqlzV5ndGCC6JaovaVnN4RHIzXhoUeH9uYYloWWSlEiclEqpeQbt/rKVWlfqMUcmjMT0yh2ExTkKJkdMwcDqdOB6OHA/Hr0lUirRskbTSwRrJZSLniVwiIgUx9dzPQQxeOrrVmifvv8/104/ZbLdYKrYUXJxwOXK7XrEWzwfPbvnw2ZMz4gWUOlJrwp86jHcY73DBYyffqiE0xTn3Av8a6nXhSNXlqXMXqDcdvxUBjUUsp1ZynFQEIEXICWcrXSd4Lxc1trZFpx7vg7IARQ3vMN4xDg+8uvuM+4cvWfW3xKkQ/JrrG8H7NTEeKHUEm/BrT5WO7c0G74Wrbc/1dsVudyAXbeP16n5iGBMRSBVwBrey+GDwK93M8ziyO92TxHFbrpFqWmMHs5Tn1JwhnTtxLEXlb2lo04AVoV/Rrdas12uub67xzrPZXOFaNGycpfvJii9ffok5Hpji0OTU1GieTgOHhwdyyez295yGJ1j3Htc3G7o+MEXV8T0cTux2B2JMiFjmTIrMUW/L8cyRxzBO2oHlOOjfLC3TpDVyqpC1RGP2hlOKTGMk5cTDbsc4DoTQcXv7hBAKwXusFbrgtaxMCimNGGM1+k1a1yzLdXpkHO4XjMXozqtBtFNXSpFpGLj/4jmn3Y7P/+zHfPbjn7C7v2f36ZekGEnDSM2ZNOstTyP3pwOxFPYxMdVKKrL0OcWKEuZa8rPOCVjOhCgrWuZ1qTu7SESWuuR+X2Mnz8bikq1c27uKMuLhbFguW6p9a/NaIadKKc15QNAmINLqNEWZzEnr9rW9n+AWp0DXuaJUjUtQCzVXrGjEG7xGvFYERyuzKrnBjkYJcnKWlpxZxqbk8+Y9TyFcbOhnwzsb4hltyDm1OWWBmOe5zG0fiTEr+7c598YYQqcRdboo65keXatZnXpE53gcR4ZmfE+ngVRmxOsCqzLN+SBTq+Zoc5koNYIUZs6StvM0iHOEVc/1s2c8ef9DVpu1UgRrxqaIKZmbbc9VEJ49e8Z7H33Q1oNqs0/DjpRGfOe1aY3TagHrvXJUpHFd6sXdL5f0ztnwXpz3Jarxhuv6LZOrlHGancVZjaZyjlATq16N62bl2fSBzrsW9GhJTsmJcTxyOt5jXWH/EEAKJQ7KhMuFnGAYJl69uiP4Ee+3sCpQRjwZbwrOVIqtdL3BVEffq/5njLBZ31LrgPdHnJlILTeRS+YwnphKwpeCsZ4hDhivbbiMm6UoyzmybfBmrrlFG0oieBub/OUoJTNNI8fjETGGfqUQVp8ygmGK2sZvOE3UahDj8KFHjIUUVVnGF/CR9XrLarViterbvCmBqQuBlLXn5fGoOamsyv6vXX1NMShhLqfIbndkHCe+fP6K43HQUjDjmm5zi7RShNLYkMv8aSQ2C5IMpxP7/Z6+71n1fYP9G5d36aV6ZqnPkF8IHVdXN0s96mON+tp/coG66/HnNJFzYjwcOby6YzwcefHnf85pt2f//CXT7kA+DpC0nnlKGrXsY+IwJYZUOBZtcTnpVtaiXCXR2S6AGJVuhOUYrGntFkUUep93nQaLfi0lUC++f31LWiByXtuYFObT798O1CwiBOs1RYXVaHSGMFHDUFPRmlqpFNF71F70sEW0i1bTyVw6owGLLnIpKjbDrBRVz/dybs0YUp3bAjatdj3AC/heeQxFdH2nFonmMqt8aVpNe/k2/kNt4aIYaGmRWRVs3v/nabbWELxHjLDuO4Sq3ai+BSGTWe0ppsQwnBS1annTBcbnksQkLYWkXYas9XjXYSp4G7SZDY00aD3WrehWG0J7GOMoKUEu+Hb9ViGw6S3BezCWlCLH45GYJu5ffs4w7Pn8i5/z+ec/5/7hnv3hwGkYiDk1d6tpZ8g5Lrjs9GRmTft6vjG+l3W8taiW6MxCTTmR4glq5umVJ1fH9c2azWattaUiFCopjZATu4fnuOeWcdhQyk6bKvuMES2YniIcDgc+Ox2a2IaH6xtWIdP7TDWJzmVMKHDtyavKut+yWT3BmMh4XNP5E+tux86NpJqYcmIqkS/3r9rx6M3RBUfoPX41C1MYStb8mxpc7bcZS+vqotm3R0nM/+oTrh2edvsdsckGehfoukxwK5yr7PcHjqeBu/sHarVY27He3CrMmCPkTFPP5tnTJzx98pQnT265vrni6mrD1XbDdrtlGEYO+yMl6zWIKWGk3UxyVu7R0krLNEY+/fQ5u92Bf/iPf8SLl3cqTmI9JSsMXUqmxpP2zCzaO7MPgSdPrgnec3tzxWrVcX9/j/ee7XbL1XbbbhrAQHAes94AZ2aobZH1ZnPFRx//cIkYHmXKacS6GUZk5rECLb81HXeMxz33Xz7nsz/7MaeHPZ/9kx9xethzetgz7o+MMcGotaa7IXGKiRfHyN0wEUvklJS8kpt1NF4Z/z4EVtsNgjBOicueqMYYfGN/x1zPxnZGFMzXGZ6UBo+2yNZKq8uuTZyj+UNSWdi/uvTq13qZfhvDimHTreltR8AtDRCgqnRpKUiumFSJJlONso07bxchDCNCSpYRWv6xIW1Ic+gVrsZVQgi6NzVHMDfFtVJhatUQi5FsjvicLyxZ64OlZNTXS1AhxqplYCIYsaSi8qiV2q6r9pgWq3XbKgtZF4nEuWzMe8tm06vmsxE2q479/kBp7e8ec5TWcOB0OnF3d8dxODU1NNVKV82A1hRiTgEUwUjA2Ip1Bd9PFBOo3UDEKQEyJ7xb018/Y331hM2T91k/eQ9xgTiOECMrqQRjeLJd82TTsV6vEa98k89evOB42PGjP/sT7l4+58vnn/LFl58yTSMPuztlXkdtnjBXjWnlhd4k1qohn4WGKqggSm3O5PcWaoYW0Zz1PEUgBAcIwdsGJb8OgVUyKY6MwxFj4XTw+rqVafWpYERVsTTnYdqmp62nNAnUqOvSGjJXc4Y2q1AadV9oJQMtUqi0mjyZcwJarG8ktC31dbEADXYuvp/P5Rw+vJVRqa3HZ+PiFYWfjBjGYSRb1fVNMVKKLrqF8NQ2KDEZCzipeN81OFFagwGziNLr9dQC+tQaz2tpT5uHtvuXXEhop5w4JcYxcjwO7PcD1USKWNUejlGhwni4aFg9MXWB4A2pC6xXAe8N0zQxDAPBe1JK5OxwYjWPLY1tOuelLh7GOnwIjw//L+93Ji2piIrmsofDntPugePDA4f7e067PafDgeF0ZBpHnZsYlTSVlTQ1pMRUClOppMriwmkeUehXPd1qjbOO0HcqH5hyU+dpNrZorTZzJFjqYmzPQ772D74+d9/EV57hucuuRt/2qLAQxODs3F3O+9ycQ+bnqG2Nzhrk2vnMtMjStLpw5bcqdKu39xkyngGNudtNqSoGtEhzXsL0LZ56bU9rxw0sEbVgqFLIcyMFqhqsC1Tia/A/cwnS+aGkSosvKg/rnW3tUB9ptD1Rz11rlFWlbI4M5zTE+fov8hkNbVI5Ugc2K4M7ZyrapcuGuQPRFnEd1XjN3ye1F85YvKl467QLWUocDwd2uwdevXzO4bDn4eGe/WGnnA5rIQS22ytKya3RjbKxU2wSm6m09pGyOGNGWvX2TL7iF6/9X3e8ZcOrhecxZnIeiFH7bHjnuLnWbiF2bgDeIBtduOopHvaviHnEh8D93RcE73lye00IgSKebf8MVpbbW5WPu766YtV5iCPTeKAMI54GIzlHRqgZhuPE/uHE8y9fcTicSLFgjW9dh/QijWlSQ9YSdMEZbA2YAnk8YbLmJWutFDmTlmwjcTjvcN69lShgHrUU4rDH31yx3awwRtjv9gjC/Yv7hQBjvcNSubm6IuXMMA7kkvEiOJRM4oCr7RUlC9OQoAjeeDarNR+8/x77/YFhHDkNe07DnuOwxztL1zvNnBVl4g6HiWkq3D0cOO4mTofIbjdw/3BkSJkhpnZTa+4qTwdKnpA8QRrpgyOOe/ou4CRT05VSOUshTpEnT58RY+bpzQ2d7zBSqJIUXrQarYlVKrvzAb/avC6S/oZDN/gzDFmS6temaeThxZdMw4kv//wn3D//nP3dPc9//ilxjOzv98QpchxHxjhxOI28eNgzpsSL44kxJU45MRU1uhk1upttT+g8f/gHf8QPf+/3mcbIw8OecRz56U9/zjTEpvWbmwGenUCN/r00zkRzpnTzaa0f5x63oiV35w2IxgwWvsmXnMUz3kbNei2Z02lPFwzBo840TrW5x5OqKcW5dK7JMebE/f2Okgu+sf37rmPb0BLlMAneOZxxiFhycZANYxJSU6mj6rU9jZPuU6L3emMlqnBGi66ssYg/9wUHWdCIKUamSZ3fUhoI0SbWijTpzrneuzRnuqpIRUl0oaNf9Q1tUEZx8IbgOj2W/HjcEj02IZZETJkhTQzxxJRHMAnjC1JaoCOlaa5DqYZKxTWo2dmA75QQWVMm95FUND3SbZ9w/eEfEVZXRP8+u7xBMpgy4kvlar2hd7Barek6x5dffMqXzz/jyy8/5+/9g7/LMJyoRITKe+895Z/7o9/Deceq70FgHCZSStzfP/Dq7o794cDPfv5zcs70vaPvLF0weDeXRKpQiWtVNt+7frxKlGqC2kkl0UAhsL7v8N7PGYImCnCOHKASJ+3XGKMjppHgPZ13lFSwvceHTkuGwloVabzHWUuOlZIStWSMVOrM2pQmQJAS0xg5nQaGYVxk4Oa6slrP4uPtTM6lREUZkIVzv81qzJJbm/vVzuUDj5Uj+NVGpeao9W3eUQrE1iQiTxEqbLZbVmaljSBCwOZESs0hshZvNG/mRfAuaOlXUozGiME7z2q1IuXMadRSgpQjMU2I8XTMJIy6GMfTcdKIO6l0X4qFGJVstR9bByFToRbSOKjhTSOSR0p29E4oKTIMJ9Z9YBoGTuGED4FhGLHWkzYFbeXWuriIKgaJ1oqc+4Rau0hXPs6UNzGKNv+lZHKMxHHkuNsxHPc8vHjO3RdfcHjYsbu7I8XEMCoDdUyRU04cU2Q3jUwpcZwmxpyItEbjVYkhxgi+C/SrjidPn/DJJx9zOBy145Oo964SfLk1X9f1DmCctgRcuhDNiACzsX092p3zoUbVHFjEIV5L8l5Ow0WZ0rc4aq1L84BaFT6sRes0U4xLTe0cIapTUBiHkRgz3iWcddrZrGoVp2lwusEr+a5q5ysKpDwzmNUxjFlLumqdHTtZcuW19YDVaFqh4lLm6vU5em4CKGWWiMxto5yNeG2RsXIm5ILjUKs2cBdTsW7Or+vz2u3KtFIo/6joQxUNilI77lQSuSStS798fAX5qQilqfopH0HNjws9YhyCw+AI6yd022e4fks2PVN1mJywKeMMeO8JrvVWNpbT4cDzzz/li88/5Wc//RHTNLC92hA6T98FPvzgA7qu4/rmGmMMp9OJFBPPX7xQxM57Xrx4zjRNWgnRhDas6Iyb1rhivie+dxHvXHKg6ia15Z3s2cgtCkJ641sjSNXOLdIY0ClNpMmSppHkPDsXiN1Ef+Xp6Fn5Fbe3T/He0fWiMHR01KYSM9cFl1hIY+J0jAzHHfv9QJoO1JII3lL7Hm3i7hhzJI7aOzI3eCnlwhjVkI+jkN3Z958ZpXN9od6MM0niLWLN6Mf1XcfVdsvhOHJ/fySnxHA4Qq1sth3r9Q3DNDIcjsRp5O7ll4zjyKbr6UNH3wW6zQZjIcaJcThRmrB88J7b62sMwu7h/8fen8XYsqX5fdhvTRGxh8w8wx1qruouNbvZTdqk0LItUg+W/SLoRX4wBMOAYAM2+CTAAvxgyn6yn2QZEOxXAjZAAYInSDAFWAYhCbJhw4YsUiYpsymSXd1VXbfucIYc9hDDGv3wrYi98/Stqlt98mbVdZ9Vte/JzD1FrFixvun//f87fJINz08jRhWUalEKYonkFBmnnsOxJ6bC5UWHtYrnT7ekONEeB2y1gtKDmQkEUiwklcglYIDsI77AYXdEF0WMULDCb1t+X+j4Li/oVh2bVcvFZs1q1fLhB+/RGFc3TRiGIzevrx804gWgJOI0EWPguD9wd/2ase958eMfMR4O7F5+Rn9zzThO+MnjY+RuHJhC4vbQsx9G+ilw53tCyoxEosoLcKXrLN32CV3X8J3vfpOLyy3f+863+cbX3uPVy9d8+tFHpDDiNHTOQBKWsTn9uQC+qkOoZ+3pc9QmJ4N7/pij2Pn3e+8pJ4P7WENpTbNuMa1FWwE/JZVJZEKJxBwr+jdRcLTtBusK4IgxMY2+ousHvH+NMYZV12GNoW2doJmN6OIKucZ8C0vewZiqs20M69Uaa13lgk5QNEYHZoIPUKJ1XQ2pqnJ/zgJFERBCG6VEzEJphXXSInQOxprxC7ZpQBUBFyEp634YZV4QwzsOI/t+4KFKvAVBVR/6I4dDz91uR98fCTNFpCr3XryUAWqqnCLGLChDoaEYR1oLvWyzeYpbX6GaNXH9jKQdYfLosGOtFFZBYzXbdUtrFSkFhmHi9fUrfvLRH3G3v0WrzHrV8hu/8X3ee/85v/mbv8mf/a3fWgBTlCIArBDY7Xbc3t3x4uVLJj9yOB4lMKyEPlTnUs8dAFqLQ6Pe3vQ+eqrZWleh/eIBzYb3FGHCSTVHQa7MMqWIpmoSiH4whmAbGtMS2gh6g7aRzdZydfVUiuOMKBLaGlEumus5imp4E/1+YHfbM04C9Mqp0FiNVu1ieHWYOIapIhczGQi54EPCqMIUIOdTtFCUEHxLepPKeHgPsvJ4M64Ubduy2WyYpsg4DHg/cdztKCXzjfIB67UjFw85EKeB3c0r+r4nbrak9QbNFn25QenZ8I6kmNAoGtdweXEpXr3WS6Thpwln9by/k7NEwuPUc+x3aNNwcbGlaS3Pn2xIYRJEaprTwlK4n9JIVJmQI75SEyUfIGWO+75m9TTKNBz6gde3e0Fvr4Ue9L3nT/nGh+/z5OqSp8+e4BpX09iFcRi4ubl+4JRoQeUoMmfjyO76JZ/++McMhz2f/OEfMh4O+P2ONPRVWlJquHfjSB8CL497bo49PiaO3leiCwSwIv/Qtpbnzy/ZXmz4/m98j+fPn/Lhex/w/rPnhGmgRE8KI1ZDaw1RS7pyMYpLtlndf7xhdO/Vw8+M7wycmoFU5wCixze8iqZrMI1FzaAqCkllYkmEHEVqNAgvc1c5xLXuqhjBDdM0Mo4ju92AMYbLiwuccxWxLzKm1iiUKkv9V5QoMquu5dnTDmMd625L27ZM3uO9KALNbFancU5AIhGsqx1tOWd0jGhtcFaUlqyTtL8vUdp0FMxcwk0FAM3lq5wz4+hrFsOgMAz9yLF/WFnAlBN933N7d8tuv6Mf+8qENiPtzs+1Zhsy1fEDcUAMQTnZHNs1yjRsPvg22+dfJ2bFFDU5ZfIwQBpxrWXtLFY3bNYtjYEU9gx+4ubmNR9//BE+TGhVWK86vv/97/Frv/Zr/M7v/Dn+/J//83VuRlKMAv70nmEcOPY9P/nJT/jk00+4ubmpLYqTOLnVWZpjJa1mZ/Pty4WPnmqe1UAoEJPUCXIWsIO8Zr75z2HpApqwWprVjbE455ZURU4SC5xE2mfRd4dGU5oGUiOMJ/VDnWsgaRoXccYQlaCONQVXJ9g0lk4ZBgeZlika7oaRKQrzTa7UfKUSmc8N3AUW5ZeUkjCphAg2LrW2R5lvqE33LJtl0zQA+JkYHgRBnDIGjTOWTbtCF7jcXnCx2bJai2yZs6LKJI5TZf+p55lzxo8TQz8wjRPRB3In6ivGaEoj9J8Xl0XoPTFgWpopcHG5YZwmucpKwCQC0sl0Rkv7TZwIwUsWrkZp24stXdeirGWYPCkXphiEvs9Jf3LMkdW6wzhLzNVpCqKNfDweuL15TUoPZ3hzSuxvb9jd3NAfD9xdX7O7fsXU9/hxIASJcGOtZx/GiTEkDsPI4CNTEGm/BVwDKCOO6KpraFvH5dWGr33jPTabNVdXG9brDkpiOO7xY09JAXJCl4yh4LSmcZaU5bMKCu2sKCHNHvycOn7D2ApH8QmpPEcvs5M5rzMZJ6OyRMQPNrOfP2YD5IwwSgkHuYAordFQDNkJsrxQGMZeBE1m9Z+SMPO6zlJOkna4EyBLZDRrKlfPyQKp9Rljq861o2ksTWMQo2cpRfp/UxVIyDOyvwYSadEBTpAFLGRWlQXPOKjtXxJ1ifEHTtzTNYFbcpYsWxEBGqWkr1lEE4q0Jj1UjbcI8HHu3Z3LUqdVIPvBeWKv2q96f9f9UgnRizIWt7nCuJbcrpiUJuRM7ydKjKjxgIqRrDu067Da0jpNYxWNbSBBV+c/l7jgC/pjz2634+7ujpub2pFScQfTNOG9Z78/cHN7w83tLcfjkXEcoRSstQKsOkuVz1miN3t7/6Tj0SNeYyy6VTRNZpzGZdMfR48xEecErYw6iUAIrZcABjptcE3Der0BpRmDJsREKQZtGnlYaZR2jcHojDET2QZQEXYKjGa12tI5CGPBt6LVaIogaa3qatTU0FnLkFouh8IQPH/4InKTAqqI4U1Fk6WtXqTAtCKnWG+qRAgDAMWuaYpZAGWPMtta4SphQMoZYyzbiy3BByipUsoV+kNPiolGG5Rref/JM0IMPHv2jKurK1E3ahvZRGKsEW88iY4kqWPubu+4eX3N4W7HeBxYdy3OCFVbay2UwtMnz1BKE1NmmCLDMHF9e0fTWi73E1eHiZyqLFgu0kdc8um+rm0VsjlJpLbf3XJ985phHHjx6gU+BKYsmrS/8U/8Gqox+JL47RRYUxjGgWkcePniM370g99fUngPMfw08fEf/oCXn3zM3fU1x8Oeu+vXpBAZB5nnfhqZfODQj7y6PTCFwOt9zxQSQ4z4lCWzUs/bNILCf/rhlufPn/De+0/5jT/zXVarjqdXV7Rtw7gfeP1yx/72FcWPqOixOeEo4AxWG2HJrb292ViKUpgK+Fskz86i2xmxPke5b3I5L0BBVRYyAs6iYSEq+XJNr9GKzaqjcxZntRjTqp/btY7spM4ZY2Z/8Ly6eUWMVXYvQ2Nb2nVX1YpGUhYiGFDEdUeIGecMHS1GC1+y1uCsiCa07YrLyy1d67jYdjSNITSKEAzOQvADIWiGcSLEVPuMFSFmpkGIanLVnu5WK9ZrAfuFFJmFBgqZRoNyNcOwBFzSqSDOv6iiuaYFFNMUCTnJfeal/v0QI+dM3/fs9zvu7m7xwcOc6cuSbVhMvKplak65vlRlEbIxJOMwTcvm/W/SrDYE0zIpx5gGdvs7Spho9zfYMPFMP8V0T2i05WJj6Zyl0wZN5OrJBdvLDeWQybvMFDwvX75EKcVmvWG1WtE4x3q9BqTd0XvPJ598zI8/+oiXL1/y8sUL+r5nvdnQti3aGFKMVbltRjN8ZVHNnHLnS01XTmIGKxmjJW2rQJnZC5dXWaOlT68iLucJWZRBjDup8GiDNgqtM8UYSm27mB9aa3KluxMARCU5LRldH60uXDgRyJ5ocAY6p3GVeUtI/csS4S6qMJxAJyWfb0o8WrQ7H4PwNFvZRFtYJwg2EIMnRQGWKATJqmyDUYa03pBS4vLiksvLK/Hwqij1rBa1REUsS3KJfM/rOkZXvdN68sY4jLGiAFU8KWWatsG1DS4UXECiA1XbXUp970wnqRW6OmYzuCSj8CkRsuifSq++rs2nM0uNtIJJfT7ivSd4j/fTkm15iJFzot/v6Q8HhuOBqe9FYzVGYaFKSRDKUXpzxxCYQlxIMmaFLK20ZHW0olkbrNNstis2Fys22xWrdUfXCX2q1oqSEyFMNQLJSwsN52UbibekRcIYARm+IWbwx9qGfobhnDNTM53tm889BoEGVNJ+ytlNJscjFZ5Ta4hSc9Q+K4UJel7eXSqqeG7uhJAStmascilL+fLknJiFK9w5K739ja0tSlJPXK06rA1Lu02MqZJl1DTsDGNG+oa11lDBWCdTWQRtXk6pdDivp5+wI/OU5wX8VB6UMa8UKiNWqi1Zp6OUDFhdC/VSvAm+U6pmDYxFuRZlW+FgNk5qv3N7UpwowWPjhI4eVRLOgDNiB4TDwWEQkZQnT65AFW5uG5RCymlHiWhfvHhB0zg26w1KqcXwvn59zfX1Nbvdjmk67QMz41qp14m6dtR8/F+1iDfnwjgGaa2xwlJkrK20YwGQNIF1BmsMrhGBamcMKCO9im1LzCJHl4smqga0xa02bC6f0m2vsM0K0zjcSmEtTOlIGBXKiC5tAsZRFuZxHLje3TENI3HylJjQMWKz4nm35ltbRTSabzdPOYaEn4647CkOsEmYsBLEolBZkyoi0VqLShC1TLFzzSJD91jDWsv77z3nax9+yNe+/nVE5ccRvOfVy1d4P9FojVUSFaybjpnVSWvNs/ff5+rpE4Zp4HZ/iyKz7QxtY+hcS2sdjasP6+jallXbSn1dKRrr2Ky3Is9XasqsIhF9imRtyVrTbDc0k8cUi8qWHDJTidUIVdNuamrUaFwrXLz98RYfBvYxcUgB3Ti+/p1vo43GdCuUbfjW1z/k69/6Ns+eXFKUYvKB/f7AYXfL8bAneS8C3A80/Djx4z/4fY53t4y91JLiNBBi5O7QM4XAq92Ru+PIOAV2h5EUC34SbmGKYDtX646r5xc0q4b3vnFFt25ZX7Wstg2bzYruwmGNphAELDceOR52+LFHUyolq7RZaW2lTKCEmQwlKKGiNFmdehXfrOe+2UsPdXOdJRvLXBASUzCT3txn/vlyjW/JhTiN6Gyh6tQadUILS2tLqSlEw/ZiJalXNRJiJpfEEAa8DxynvoI+qyOpIZNJpcF1FmZZ0qpv3bWW9abj8nLNet3y/vtPWK/aik6W+v3z4zO8D3z24hXHvufubs/d3Z7aYyOOkSjDipb0KMQZi2iIQcQ/tKNYoT0dp6nKY0pq3FqHdXJdY5J1dDj09P3IOAZBYj+Q4c05cTgcGIaRqYrKUBSqaHR1gXRNc5sietylElSUAsXKfq3bNWb7FIwjqoYYC4M/MvlAGI6E/StUDJRxDymwds95frXiybZl02rWreX51ZquMeT/4p/n/fcv+fjjj/hP/pbieDxwd3fN9fULPvnkI/7u/+dvo42RrgyoXO+J/eHAbr8jpYQPvgZvhrZtpDxWwRWyrs3n3hd/0vG4zFW1UdnYis9UAq5KBXIOAi4wagH/2iLABFN1V511IlrtAyl6KeVbBVqjrat6u23VcLTC+mLFu6duKsYI4rio6oGmyOgnIZFISTaqmNEZOhouXaI4zXbTsQ6Jy9aydppkFVHnepMnSTtX1pq5AK+VEQYXVOWbNg9SmP+iQ2vNerVms96w3WwwtsXZVZXJS0zjCCFCSnRNy3a9EQKG2l/3/P33uXr2lN1hV1NfEWezwO1rTc1UKkaJbE0VIDilLJ11NLZhbj6Z05pZKYyV5nnjHNo16KYIPSWZbCCpQi4WkFqQtsKzapwYXs+OMRWmnPE501rL5vKCpmlothfYdsWTZ8/YXFzQrtbALM4gALFQjW5OD5dqzimyv7tl6o/EaazrSkoPU/CMPnAYJ+6GEe8Tg4/kJM7brF1sFLRNy9XlJatNy4cfvM/6osOuFbZTtG0joBstKcacIcVACEJFea5HJFtHzRQouW5FqaWXOdV2lfOaLfzxiLWcUjn1WpaaR6z4ivo950jnxxlF2OK0Ii/1/3p8kuJYon+jFa5xoBLGGjIQQ67SdlHW+MwhiCKkiIlC6CNR43ntW+hirbO4xi4ylJt1R+2cI4SIdU6Met+TS6bvh3tHP88bKMiFHNMpolJntXSlznLMpfJQS9Re9Kywo0hF2LR8CIyTP8uiPFDEmwveB2KIgtw+Ww/qzf8VeSzoQCVdLBiHcS2u3VCMIShNToUQIn4aSH4ghwEVA6QJlSXaXTWSYnZG4YxgHtZdwwcfvId1Ba0zf/CDJxhdOBx27A8H7u7uakZBLfwQgvHITN4zTpMg2dcrXONq3V4yezO505JiVmcX5i3HIxteBLhQMiYEiWbbFY6MjbYCgEREQdJnuRblBdEwjZ6Uimxew4Qyjs3zFW51webiis32ktVqgzUNuja+Ayi9wpg1WQcUDq0yECkqoZ2hXXUCIx8jxITJHp0SWSWmnEhRMxw9fUiEUFNUOZGN3KQ+aumbtA1UCjfpowNrnGymWle/9vGGNYbnV5d87b1nfPPDD8i1HzF4S+lX+EbRUrAIu5PRFqUtupN+6Hazod1esFKF7XSQViuCCEwXCD6SovROGm24vLiiFMVmc4FzjYBWSiGVsiBycwX9lJQoSd6L6dDNFtM1uNRCTKz1VKMPMbzaWCklaHAOUgq8Gvfsbl8wHnf4OOEaQ7e+pGs7KYwWgyoGXQymGHTW6ATjYWB3fcdxf5B+4gc0vFprNl2DSoGpFKaQ2A+BcQpc3/UMU+Bw9ExTIkSpwVGkbq2V5umzJ1xeXvDhNz7gt37nN2hXDeurBtMobvav2B1vSCRCG2o6U0mrXUyQCilkpiky+UjCUKyTzIKSmq44h6CitH4kKu+vlnJNTVieMrfAnOoESYcWZAPO5aShjBK2pDlNt6R3H2xmf8p8Ky2OSP3uOd1cyiy3l5fz0FrTNrKxpqIqWUzEhwRWk7SSDbn2VPfRM6ZAVIlV7igZphQoqqBCoajMsdfc3O4IIfD0yRVtC9Y2NK3DuEhSFsyEaVq0m6qzk0HDai31WF1ELUzmTNcar2QRjLFoayu3trA7lUpGoXUNUrQRPoKaCYwxCyNdnO/PzAPZXVJO7PZ7Ri9gxrkMNSf3F0N1ZnznVLpS4JoG026IphED6CNHH4g5E/xIihPFD+iKUdi2jpV28q8zdFbRaIXTLI9N18LVJSV+jX/yL/w59vsdP/rRFbd3d9ze3nFzc4cPkWM/1Pq+ABjlkWqkW/n2zyLahfXsjwGq3n4yHz3i9V48S6UVXdey2a7kguSm1jwiJYvXnnOu9G/iLfnRS0tMiBwGj20VV+2K9cUl6+0lm4tL2m6N1U01fLZ6WR3GbshmElYbYgWEZIwzNKtO0M9dpISIjgFNJqmML4kQFYcpSKrZp7rpJIl0M4SoSErTGFtr0mpBdDrroEa8D9V8/UWHMYbnV1d8+Pw53/jgfXxIjD4RJoPuO1ILF9awtoqYwIdCVobUdeBaus2GZntBpzKbaUOJAZtGDKJTGaZADILS1tpycXGFsQ3bzYWQmRhLpjKQIRTwukBWpWrKyoagbItqCia3OAI6RZRxNRKzsilpi9EOpQrWBLzPhGnH7u4zop8IYSKXFd3qgtVqg6/I4Nnw6qLRWaGSEsN7c8dxd2AaxgdtJ9Jase4aUoikVMiM7IdIP0xc3w3040QfElOU3tKUZPOyNYJ6/sF7fOub3+R73/8u/5W/9E/RdI5QBlL2/KMfRG6uXxNVFH7gWodSRQhiyJBjYZwi45RISpONI2lDUmaJxCgFlYVDW9SKJW2sNRSjlggBqAgGdQI6GvnOjELlmmKuL58R0Of14i97KKXo2ramAYXYJqcoeINUAYTV7TNG7lFTAGOJuYCdKJNHNZZSKUenuwPJZ0lJhkAxcJUjJSuaVIkiQqaQOPZwe7NfSFA2EZrW0a426BiJGNAG0zRoaylakUpCayknaKUxmJpqzsINnTIx53rMNdOTYgVhzWCfKj9aI91c5Q+HfhQ98ZrVEirXhwOWpJTZ7/dMkycViXJ1QTKKs/GtkqOqVMdwvlZF0bqWdr1hSIrBB3zK7PcHfIio7FEloOKEjR6nYNs4to1l0zpWTtNavRhcqwpWlSqqc8GqUaza36Hvjzy56Li+fs0Pf/RjwjRy7DM348gUAtMkDq/YFY3JNfCYe305Gd55/auzvz3E+CVwNc8YN0E/uqap1HQtpRSm4UioNbeqiU5OwhtcqvYlpabN9Enk3RpTVUWKEOrHTI4C8JF2I0PGklVDIhETQl0ZQ1WLCRWIIOQYOWTUYWIqEIrhkDxTzOzGwJjygqxVSiD/uiqzyKNm4SpgA05o0PKAN8HPG/OGrvUMDqgpIaVwrcNo2GxaLjvH5BN9H0hoRmMkdYc0y3vvOewP5OTRacKowmc3r7HrjpevXvPq9pZhGMgKoZ+s0YfRkrLRSxpTkbOktkOIjOPENAlrmPcB76NQ59XULKUy/iDXvBQNKhFzIIUJP45MQy/X0AfC5BmHEYWW8gVUFh9xynOKRIRKcBiO+DBVqMoD9vEWarpPgC0xZUKMhJREOIN68xbhxjbW4azjyZOndF3Hd7/3Xb797W/xjW9+nasnV2ir2feBmMTJmUYPyhCDFi1ZVFXKEUOqjWQ6rMuYfGLfKXVznLWijarSlVk2ezn0E9vUiWP489bVKQKQdT4bgJOxfcREMyFG4eMtuhpekfUjg8onUIzRmsaKM2h1bdkKihIKkYJxiqg0qbMEA14Vgi60ja7a4CJfZ4yk+9erltWqoW1bnG0oEcIUCTZibRTHD2lFmhW3FkOogPoctTiQywkAmLNch5QyxeTK5JcX/ek8U3aeqW0ZDY1r0DrhfCYlWf8hxAcDdZYi851KXmSz59UzVyDmls052J13fHHgaotUisRhEmKTsYcY0CpjqjFtnBVEfk6kkBj6A7u7WwSCdSUSjVqAr+JMSk+0Rpq5GmNojaO1DaumJYaENZqYNEqlZTpO/NLqDXbBGURVMzcPXEJ5ZMNbkKZx8YhcY9lst1hrabsOULz67GPGfpDlqAQznIukApUR9RGhQmtxrqNtV3SrNU1jcVZhSOB7stbEbFAaUkwUGrLqCGaLV5rBXzMOE2PfE/o90WdCzoSceD1GhiEQ7kbh2c2KIQgaNOpEVpmV1Wy0RStwWlLNjbE0zkjaUAuYTKTrpFdWmfCgac2fN6TH0WG1roorsuFro1ldbFBkPvzwOR88veRwGLh+fccUC2FUpKwJJTN6z+3ujo9+8mNimEB50IUbP/AHLz5ld7vjox9/TMlwsd3SbtY0q076fp2rKWyzoAFDTMRp4Dh6bq/3HKfI3e2Bw9GzHwK7MUg0lqVly5kMykhNXiUokVwGpunI/vY1d9cvZwuLRnP96jVdN2BaI8Qp6QLXKLTJTP5IKJm7u9dc37zg2PcklYSe8oFGRmT8Zt7pvtZ0B+8JOdcIU7ajrm3ZXFyy3V7w27/z2zx99ozf+rO/xfd+7XtsL9Y8f/8pMQaGj47040Dfj9ze7FgFh+2icMnOwuxErAFnNd2qAaPxOi/p5xREBMBaV6k+G4zW9ONETNOSWpYUbSHrsiD1Vd2A5n9n521OP1P/fG9LUidj/GWOnAvDMNFYi61tT2ThVyeCShUjYqBxlvWFEGhMIZJyokkjNkQxfI0mZ8OlXRFTZhwd4+RpG0ejI04pGqdoW8uTp5c8e/qEVeN4uu1onCX6TL8bKEmToq46sEKDGANMY8JPmWkStqpSe35zjQpTKaSEBAVRskHZBjR1H8mV9zvEJWpGCYmNAFal3JWyBBrGOPp+ZBrjgzVT5JwZxoFQqF0hcDKv4gBmJc9lLTgNscm6xsKaRimO08Dw8jN8CGQ/Qk40XUPXOhqruVh36JLgcMc4el59ptDDkfKNr/Fb3/4QZ0RhqnOGHAs+RVSK2FRwpbA2DbHpeLLe8OziEl0Ur5tWygW63oVzckdTO2BOwdO8mFXtotB6TkM/TCbncQ1vvRf1LC9ZgUjaGAFFaVGMWZSFlEDtZ4FpyiwCPfs2FeigpT6iSoYcSWFCGqZlAksJUGkC0VYQskqAW+fJX1k24AuMGYaQ6adISjBGOQbdgLZngtfzQ8+R5Vlz+3mL0eMFustQ1VsrZxF3ntMnswdnDcpalDXSI3Um9BlCgGFg6Af645GYPLYV4vYpBno/MlRCCKierj61ip1IGVg25oWbNkamaWIaRYknBmlPSCGKdnFOdTOKAh6Z2yhKJGdpBYq1JUgrjVFGSgA1vWiqMpE+672cNy8fAz4EYQLS6oGvzWmBlyoPh6mtJ87hUGQljsBqs+by8oLLyyuev/ecZ8+f8/TZU66eXNGtWgESUkQbGVHOimnmORe6UvTMJ54XvuIZa6y1FoL9Ks94vj3O67KUUrEUelmvP31C1Ok5NTvG98fMOveYAKtcAUWKUgld5kM8kdoICzPYuhCLFvKbldWEighGG0pRdNaRM4yNY/IN1lq6VYe1lu2qoWkaVq1j1Tq6ShdptaZUtZvgI9YGlBUA4pw+nvmyRU+3COhcl4r8qLVzxZlA+9zeNONKhB9aBBrOIsm6/0inpkKrfLarzfXXBwJXMR/HGxC+eYNGBB7myzB/63KcimWvLkFk/nSS/dliRQVNgdO1PlxbCBXCQQ1loROWfZd7++0p2U2VIFXYSvWolzV55hQuS/T03Plr5tLhQ9BEno9HNbxaKZrWLoLk2ih8TGAb7HqDtY52c0vT96iS0PnU5lFKYZw80+jJaKIyKCei9Na1oiLkKw+xF8BMP4o6yWqzoVuvUCqx3lySmoawu8VS8Psj42rPqBND8OSs6IvhLmt2UXPndSV0EM9/6xSNAmWEDchqRauNICadxlpFIhNTEBq1kihFVU5Xt7AAPcaYubGnceL29o5UIOSZZl0chOvjSCiFcZg4HEdiURS9QinDZy9ecHfouX39KR/90Y+wTvOd3/gO2ydbVtsLmvWWFlgfLuVmNFpqU+rUUjJHUnO6O6XENE0cDgdefPaCfgwc9oXBF6YxEAa5CXWWVPM417TyLN8Yyalnmo7c3d5x2O3ZrDestyu6Rkg+2say2XQ0q4arqzWXV1ssheNwIEwju+OeXX8kUdBt86Bzro2hvXqKyxqrDKtceBqEoa27mkgpo6oC19Pnz/n2t7/LxeUlv/lbf5arqyuePH3KxeVFZRyTR9O0tN0abaxcw5QYx5HcGC5WW1pnObw+cHt7y+EwMU4DMYHWLU5bTCxAqk6HbIW+eCjgo1AqGmtojGRufspqeuO3NzYiNYu95xOxxiONjCZWw6SVaPSiFa7r6gtydeYKDKPU4etGur28oFzUrFvbYqyhW60xxi5p3yVaqDVBlMY6Jw8lCFtNIQwDcRwXZidtDbZr8SHQV2akELy0AuXEGCaM1rRNV5G0hqIVOiDytbkSZARkP6mCD0vLlhHRD2usAMZyIY6Svh0rDWaK+aQr+0BDZBB1nRvq0ijM9Bm5yE+xVBUtqrKPNtK6CNic0H7ApkhnZX9cdZpubUWUpcqRbq4ucBQ+fPKED64ueP/5Uzbrjq51OFtBrbWQ6BS0Wvr2TcqoEFExozLoUpbq82nMhnZmaJvV4+bUcnWYtV46Y5BTfevx6BGvOWtFUUothAHGNtimxTQtxjWoklBpflu9qacg8HgFWZuKxpSIGQolBpKf8McdMQQO+wM+BEp6hq6GsV05jIKmaclNg3MNzjpCUqATWWc8iqko+qw4pMrIkgRt3aFwNbIzprbRVMNrqjMhXBwVhTj3P86Aq0eKAuQ7Zb5jiozjSFGGrETiKmugKHofQBX84BlCoGAoThbd8djz8uVLdrfX3N3d0a0atBUUuOtaTOuwbYPtGnIUdO5Ms3biNC1n8Fi5jimJ0Hd/PHIcA9NkiEGRfCCFKIpPOUApRHH4xehmqRGL4R2WlqDSrWTzsbbeQOIUda3w7Xatg5SYjp5pGoVLNwYwRoArD3hJlNbYboVtR4wPNF3HarPBpYR2jZDitx2uaXj/gw/4zve+w+XlJd/81je4uLikaUWlC4XUnpVIaZra/jOngmOMGCMOjbFadH6nAe89MUVSUiirlvsMxHnNlbI0VmYkoaeUtpBSW1MklXw+LScCAfmceX3dn7hy71qfLcIvfcw62ScgmFIVfASQIqQaDcdYHVJTjV4jSmaNY71e4Zzj6skVTduglBi2VBDDXqSMkLMg9WftXV0zBzGGJdLLpYjhLYUQg2RoQqhgLyHuiDlSMDSUmnConbBZnyLfXJ2mlJf3qhq1S7bt1F86139TSlXwPZwQ6w+1yAuVZ7/G2/VyF8WSTZtRE8s81ChyyQ4ihlCliM6RVjusUXRO01mJbrUqWAWda+iMZrtZs91uWa9WItdoDSfsyqlaLgzVFfewkCLN6e5TFubeKlVzlu6UqTlvIZrpQSVT9DC5g59reJVS3wb+TeDDerx/rZTyv1JKPQP+98D3gB8C/2Ip5eZnfhaS9msbIV1QztV0hBLWpKbj2XsfsFl1TMORfneDAtq2RWnNapwYp4B2Dabb0K23fO2b32b75Cmd1eQ4EccD0+6lUBpOHpcy6QBDHhl04WCrSs7umjAO+CiQ/ykEbu6O9KNnHCfR8MwJkYYvGC3AisZAYxRW11YKVZZWDNsINyspk0I66ylUC4XeL8Bf+ztKqb//VvNdo6U5AhEawJaYE/1BRKKv+z1x7HG2YdWusa5l9fQKYyw+eA6HPcPQV75ZLQpRk8erkZxgOBwZDkdyzFglTojwU1du27Mde1m4NTKKIRK8Z+wL/QQ+FnIoUjKoBcccZ4YhAVel6Jmmnmk8Ch90TNJS0jS0zmG10CO2TcuqXdXzt0JLOgWmKWCajs3FU7Rz6LZld3cH8GeUUr/3tmu8aVu+8/3f4Mn7O4ZelJjeu7kl5YQPUmvbbLd0qzWXV1d8+OHXaNsV680a4ywFiJW9x/uJ0Y9c396yP+wYpwBKWkumSVDhx0MkTTAcE9NY5DEkYiokO5GVgGtyFXoX8hGIvm7mtY9Xrk1NIS5+0ilVOz/mVPb5WF4+78Xnxvdno0Dfeo3PxzmPrKUtiFLwlVkpTCPRe1pn2XQtVms6BaZkhilhAnS5IbtCqxrWdBigcQ7X2FoXMzUVbCioBTyXUyGOkkIeJ2nlCX6gH49kVYhIhuJwuMOHgVwi2ghAS1iuDKYRJ6FQnXWVyEUATEbNkZe0I4rzFJd6/Jz6nY3tNE2EIEZXKGEVIRY++uzuweZbKYVDY9Dk2pLGbHDnVDhybKVGvylVh7pIxN60LVdPn4kDWAUvxyQZTWc068ZitGW16tg2DZeXl1xdXbG9uMC2DcY5iUJNBaflGRgoZj/lWBmwooAvk9T089y+VwSsO/MtyP5sln1L1b3r85mq3t70fpGINwL/w1LKf6qUugD+tlLq3wf+u8B/WEr515RSfxX4q8D/6Gd+kpIFt+oauq4jYgn1CW0abLPi2fsfYt9/zu72mhdFlGouLi6w1hFiIqSM69Z0F09pVxs++Na3WW0uyf0tZdiTxgPT3UtyCugiqYV0mOiHHbEEpnyUmlaIlFTwIZKwjKFwfXegHzzD4AlB+kwNtUamM0YLZZmzGmsyWmcxrrUw75w00ucQCTmjs9QgZsO7pF+/WBTw94F/9m3mW2lRJjLaUEoRVGfTULxnOIz0/ZFPf/wjrl98yrOnz/jWN75Nt9myulLYxhF8kNaBvq9RlCb6QJo8U1bEkBjPDK/TBmus1FjNTBZy38Nc6lM5E0MQfd6jpx8zsRhyMeizamSJ0pYz1/WjDwzHI+PYE7zwfGul6JqW1rVYI+LlbdOy6la0rsVoaSEbp8A4eqzr2F4+xbQtbrVBmw7go1LKb7/tGm/aju/9E3+GY98zTSP98cjd3R0pJYksgafPnrG9uKBpWtbrrWg2S5NWpbQUdqL9Yc8wDlzfiArMOAaUtqQcmaZIjpmjDURbGI4JP8A0ZMY+ElMhmImsRUKx1Min7VpZG8UTiah8Fg2XglpQ9yevf+4vRf1sO3quefsF2y7eeo3PNfXl27Swx5WSGavxOR57hnFg1bV4o3DGsNJgi5J0ZIlsSovpIOlIKCucAt0ompURpHgjTrNphMc31D7QMCXGu4kYpcZOKfRjz+54wMfIYRqJOTPUXtVcAsaAtUq45I3QgRojWAwRx5ASVc4ZQ60xGkGip5wgwZnlFcR0bU/zk2cKQbinYwKkpedr713yBx+9eoD5RmBSFRUfZ0OH9IjPCkVqjtgpkAupBME9LIa34+rZe4ToOQ53hOgZUk8IA6vG0TRrGqNZrzouVhuuLi958vQpm8utZNsaJ7gUrWfwiDiFVSd51gWPKRBjEAOcxFmqYBQkSS3nYoytj8q4VvfpOfpd9rHHaicqpXwCfFJ/3iul/gHwTeBfAP6r9WV/Hfi/8vMMAQIhyCUJw461Eqm0orrSNg6VEiWJkejaFoWkHAyCJi4qV51RUahwKmOIlDiRfE/2A8mPpBCIlewiaumzVTqCnYBM8dLW4se+Ksf0pFk4O51af1RNURijpZ7rLJ3T1ZixAF5KgclLRDOFhA8i8p7KDG2Y60NfPOXztvOdUuJut0e5Fcp2YAqNFvrNnHNlr5ro+4HN2td0nXh7ui5mAXaIvmjwkcPdQW48Y8FYhmPPcXeglELXtBQrwutK1RTXsmZVXQMzo5epQDpBfceUqwcqC91WNGSsyauZatISic5A0jijsTXVr7XUTWWFCUuNdcLdPdfllBYSjm69JaMxTYfp1mwungL0DzHnKIWu4gIzSKNQyRyq4V1vNrRdh7MOYw21w1lu8GoAde3rNfbU1H8iudDkpEhADJK2C1FUaGIqy3UU0hGp/ZkslImLdmtIUAqmyjDeX5Zv1MHma/jzcDozyAX594tsUg8x31bb+yAZqlpuLNUgImWQXDVti7Rc5aIhZkiZkj2UA23jcMaxXk9oY1hv1uhavlD3AJQ1Bakz2mqJkFsrzk1ydKlFBcMkGwQ5izRhTkVKJgnhbc6gVKj3Y6zRbJYgm+rUW02O6V47oqpp3pJqYFAUqp7nTHk5txu1raV9oPlWSigUT7s5bzhaNf3LDHSqWJNa49XGyPuVOJlSRxegmzYWa1qscxjbYIw7W/u6XodqJO3cKXEyipS5FSvXz871vjvt5/cUtnQtVVbWvTeDhfullNlgP8z4hWq8SqnvAX8R+I+BD6tRBvgUSUX/nFGgkrmXklg3LVdPLug2Fzy9vKBdbRh2nmkMNFrx/PJCwAXJS52mypytTOKqVTQtrBhxSZHGW+L+JXF3R7y7xU8Tr++E+3PMkakEVmvF82caowtpGCgxcHg1cPNyYH9M+HEihnkTK5QEOokc17pxOKd5sulYr620CyVJsKQYUEoxVSBDzAXZ1+pnVMNftBGk6yPN9zR5fvCHP+Kb2fKeariwHWvbQhSjGybPfrfn9etrum5DRI5RNn0tdPpJemSnfiKMno9++DFt91p6Es+MtzGaq6tLVqsVMYQlza5mlDNABqMM1jRY22LdCusgpoFxilhncI3BKkVrhNSBIPyzXduwWrX4oGnoGGzkonP0jaNzLY3tMKYV5RMUtmlp12ts24IR5LZtV2Sled5ueZoLyrXoRqgkH2rOlVJo17KyjlUpbEvmWXpeI0qJBkR/eq4pVcTyjNKnBjKqsC6ipuKck/p17VnPCSF2iDCoQjSF4zFxGBLTlIlZAGlNtxLihhjRMdK2LZfbDQCmwKQ9MSW0D2+klOeTmaemglDunej9Hwv3N6lZjOALzNdbzbdWilVV5AExrrLRQhiEpSglUFhSgn6cMNqQsjjPcUwkHylRqAqdNbz47JbNuuMv/IWGDz54H2M11lnJTNQIS/rTpRe76YRv3raCrDedxa5axinCvmOcPDe7gaEPhJhJUQnTVJLuC++8gOlKQkBwBevEdNnGYKwl5UCIAQongFCaU6vi5JI1ZI3KGq0sRhdpaXKWM2PyVvONUtjWEoVdVwKPkgVWVZ0yTe2mUFpqrlqx6jqssbRdg2kcTBNTzISYCVmRisG2W9pWar1t52isOMpKaYwVKVjXNCKq0tTW0rmchey3KcZaFov3at0hzmnnM9yNPn1u0za0Z2U5ecFs10/OJOVhot4vbHiVUlvg3wb+lVLK7g0e16KU+tyjUUr9FeCvADzdNDUNIJ7/zBg039+yXUgKZQZA5FTIQdhochFS8BI9JQZyMKRpQJVMGHvCOBCmET95fK3njT7giUSVKElhVKl120AugZI8OXppuq+izeeE8DN03VlDYzXOGpwxsuCLsMvMvY6iuQmpQKplyjlTnecWgi9+0fTbznfXtuwPR+FsDelMd1Yt7UXz4kw5i4rPHOHIN9U5yUsbhJ8CVLq9VJl2QvAUY6RdQrr27wMVzqKkGWSm9YzKrJmDClSZIztdz3cGZFitaZ1AJxonfZLWzK1ktX0JVbNv4iGbBeBVr6aqaSUr9SlRRmmXG/ch1vh3vvOdBaghF7GANWeGqBZCz46plNmsqeXP53zAAkgUycn5cxdSmbK8oao3VbYQpURspLFSYytFEMuzGk/OZyT9p/V+dk5npnZO/c9lg1oTXq7r+St/oVTzW6/xy00nKUHmOc/kLNFgroQ4CrW0HM5nk+t/xHgoYixMQyDYSD+0KKUkOp4jqrN7Yr7n52lRslFgUBRdsFYkFU0qS6amZEhz9F3XaU6CEclGI/wG87UoJyEKJU7Y+UTM4KpSYEExzRWx2jojkWFetMkfar5XF0/rPAuASuq65XTdl2Oc9/TayqUV1sxOXd1TqoE8EYLIvTwDxpb7SM0ERfUafj7Sr16dk8qURL61hXIpoZS5iCXHp0+llLllqJT7buT99z3M+EKGVynlkAv2b5VS/p3658+UUl8vpXyilPo68OLz3ltK+WvAXwP41vNVGcYJxggofHSk0rDejGw2V4TNBpWjsMuURFJOuEHvdkzjQIiSr2+7W3Z3O6x1bDY/wRjLdNzjhyPDceT29S0hFoaoiTgunz3h+dM1F6vM158FjAqMO02cRsZD4M71NK7gHMQC2oiiSdMYOmXoWsuzyxXOarYrg3NKOD+DcKeq2rMpxFqz8ZK+yFnucJo8ph8EZf3FNqXvA//Tt5nvrm3Lj374Y7ZX77PZPKVpt1L78bH2FEZZmBSKVmBtVa2pqixJGKJyjJxoagxkoWBUqpBUJOu03BS6or2tNVhTqeOYZROFe7br1rSdp+3WNKFAFcpQ1uDaFksRVHvOGJVAJTZdy9Mrh/cOVRqccWxXrWj+WltZf1QV3ABrLF3TYI0m5URMgg8IqXLcKi02KIjzVP/z1mv8d3/3d0uNW+dn58+ve0N1MJZX1M21/jxHMM5qlGpAZS6vNqATq3WDs4ocIn44Yo2me3bJZrUihguUSxyPA95MFBRXzzu6dsXQTwy9J06Bu48/JcXMOAbJVtTNSVsFTTmlOM82tlKkhQQlzvIMncm8sQcuW9ps9BLlZ29Vb73GP3x+WSY/0TadKH+lhEr1WEsk50jbzRESGFcvdZbap3YNTjeMKdPHsmhMz2s2JeG0lnZCXfkxMyFkQjhJ+wlgsBrUMrcC1fprLviQ8D6CkmhULrc4qKa2rGhVUHMZqH5OrJrZhYJrmkX4gXpNSl1SpYiRv7zYknKh9aEyTCFtPWJ43nq+n3zwrRJLxOeaGcyJkKsA/ewFVAfRlNqqoxWtzjiTMLkHX0jDHdP+pWhn+0mAZHRYGkq2YDpUA7ayjTWNE7yKMcQQpX1LaaE4VaLEFoxejG6IAR98TeefBVXM96As3jl1buYUOOKUqnNSnfkWLnNp/REiXiXuzf8a+AellH/j7Kl/F/jvAP9a/fdv/LzPKkWYiyS61KAnbNMDmnE4CnjJSjM6SpOV9IFOo2foB7yfCNELAXgIWGPx+wNGG0EuBk8/eG53oyw4vaJoi202bC+fcLGKXG4HNB4TeoLKNK5STc4PA4JWzpJGspquMaxXjsYaGieSaxkq8W2NMhAAkGyeuhrjXL1rqS35EM6izp87xred7xgTt7d39MeBafLSXhDzshDJ+Sx6UYIQNHrZPqVGku6nDIuCMhOcQKnN+gv8Hkl7mYVSc36fbEKqsidZa2stx4GeCUkk9aNLRqXEjBrXKuMsrDupEw2DJkVD4wzOntV25xsD0QF21krK64w8JFeCEIUSbuNTSvS7wP/9bef8NO7Fj2c1VPVTXjGbLPlJI1mWnA1d1+BDI3rIdeNPwaOLODdt61htGkJZkVTCHgwU6NaWVeekJhKSIPb3B2KIeJ9JGQEiaQX6pAt7ApWwRHa5zGQ296PZcmZ4l+ijVB7cn0/F+dZrXNp4Ik0j607lOTqdEbVZepTbRgyvlcNMXhwKXTMmXptFO3uO4qnrRdXNfI7WQMQU0pzWKmdkJGcbs8yV/J5TJsaMrprisxMmjotkgJTKC5+LUlo4uGfSm1r3l4uSmduKyvy/UnkSmgqeMwYfEzELi9o/+s9/+CDzLXMuIKWQM6mkSmokae/ZQEm0K5kTo8DqglUZnQMlKkoYSNNRWghjkPJAq6DVkEDlhMoiL2qW2q7gHcSRrqnuObNUSzdzx8TcfpVzdVzKfVe4XuFTVu4smzSvqzfj/8LDGF34YhHvXwb+JeA/U0r9nfq3/zFysf4PSqn/HvAj4F/8eR9UqgHSqsVohzMNrW0wRXG4vcGPR9qqWSsMNJmQNapZY4rGOo+KUdpyug6UJuiWqDSla6AtrC4M3/igAWXQtkNpS+MiViemYeTjww05juxvr/HjwKcvj9weIoexMEwZn6BpGnSj2K47tqsVjdVsV9LYrUhQRBqvaxu5aUxTGVak9SBl6T9VdVdSNV1tZtDSFxsXbzvfACVHcvBkP1G8lz7GedUWaTGZ6xxd29E2zan+WFNaM7jBVHH2phE2H2sFYRvCiDGK7WYjmsn1M3RFm5aCcLuWxcwvDkmhYKzFNk0VuJc5M1qjCzjTQLZ0G4uyhTh6bm5vOBz3TN6LmLsRIFXTtWw2a1brFU8utjzdbnFaMxyOjMPI7c0tfgo0TYe1skaUsvzD3/t7AM+B/9pDzPn9Me/kX/zCz0PSZLKBpFQd1lxIMRN8gJQJkyc0novNBU+unnB7d0f0ggTfdisa60SAolMonzlYh0+QlYB4BElboAIKdc6Lo5ISkr9UCr2sZWqEXq9ldXbuAVHUGfvPzx5vvcZnJHCIfonIU0kUVTBO4xBUsujBFmKqO/DMrY6W7ocKqtHGcHGx5cmTLdvtBV23wliFaJ0oSkWfg0RFOUldkUoPm3KpLGyJEBLTFGrZyzNNnqa2Dimlsc4s7EugBHeRhXhn8iJ7inGCCymFkmYWPGmJcU2zyI1qYyWynryoFMUk0WjKvLze8emLmwea77m2eypViazm3F45G7P6BkVte9uhVaas1uiuQ4eJS2eJWjEkTaKwvVixuVizbhuebbd0jeNivZKsltFQBE8zDD3JGpqqQBZjICSJcIexZxh7pjDhQ1g6YcJcBszVEaJQNLU8Uw/13hKehW7UUjKcwVkPkW/+Iqjm/wc//Q76r/8iX1ZKwcfEylms7mhMS2dbNIr9zWtBBK42oqtrHc61Uq9ptxjTQYqYnET/1Ynqj6+avbpp0I1lvbnk2Xtfw1oB3Rit2b/+CYebTxiPkc9+8pow9dzcvGaaBnZ9Zt8nei+9pBlFu2lZN473nmx47+lGYtgs4C4/CSy9sdI+JFSALUrVNgaUpKBDErChPje89y/uzxm/V0r53beZb7F4kew9aRzJ3kMIYnxzRpVS08KCwu1WHW3XVoUZdXYTneqyzgkp/Gq9YrVaCWw/TmgNq3V3SgnpmQVmrruI506th2e1bN1CpdhW9Zb6ncZoAVlZgzXQtRZtMzFPvL55Je01fpJN3hps29B0LRfbDZvNmqeXF7x3dcUw9PS7Pcdjz/WrGybv2aw3tG3H3MP3ne98F+Bvf858/+Jz/rnjj1/0U6r5818pUcwbdfh0+j1MgWIS0zDhXcPXvvaMDz78gNevXzMdR7wPNKaVHsVW0ySLmgo726AThClLajBLilBpfabxKsY31awHilpaELIOXUXCl8Td/TLbUhb9Ar7GA6xx6XuegifNnO4oisrYRoOxKKtASTdFjglBzbcn8oly6ts0xnJ5dcXz51dcXl6wXq+F9dPNzqKQdUgbITVdX6qDlAlB5B5DyASf8LWFbRw94zChlMU1HVoZGtfVPn/hDk4pi0xlPZ8MuE6jrV4iuVIKlVeFtm1pXMvSex0ik/eLKEes+23baH73L/w6f+vv/MHbzzdi91M6Gd0co0S4pmJilNA61kMl58i+v6UkjwlbmrDGhMzTxhGLpcMSFVxebthebdi2He9dXtJax+Vmzcq1OGOqYEJm6CPBarpmjcYSkidEzxRG+vFIPx4ZpxF/ZnyFKKZm6qmuvyoow6n7AhbHf973qFH0rKR2vu7fZjwqc5VSM72Zxhoh03DWiAdoKzikFFIQ9J5SVaPXNhjtKihKNB27bi3pGduitEE7WaCuaWhWDRpFDCMhZQ67O26vbxj3O+52A8FPHIaMD4pQHMq1WAWrIm0dzabFOHuK/kpNMxW5MRUCf09Lnm3mkj5FdHMaydR6orUG5+TcH3O+nXMoTQWmJe6p8agzz07PZAmn47uXWqkGc2YeW4xqmYUg1PK8NgJgOr3m9FnzjRpCJKZ02thr/5wcSwWlqFJ7pCGlwjB4+n6i7yeGwZOLxhi3UOdpI1GEcxZnBB1dUmIcBHBXKoBrPpYUAzlNPIgL+8a8/7ExT+P9KtPy3D3SAUTFJkRRbRrHiXGchNM6SsoyJHndGBKNjyQU2jbLwxSFtWJc/JhrLU6MaQJEsU2dvled0splTrHWdKZSaqEq1EXdA9Esp/DHAHXq8+fhgcecFT439ELiIJq3qigBKs31VAwzO36pNdyUTmC1+VznLIOwRdVWHTVnluea/dznrE9p5TNe5bz8XJYWxUWyL9eWIaWgAqtiFKITpbSIxiiFsg60WSgxJY2rljmfU+qlKDnvmpZOeVY0Sg8rzFJYHMDZEZid9EWDecmWnR4iQn8qCS3ocxAuc6Putc3NKkbW2Jpdk1Szc4bVqsFatWTUUkr4IKU0QTDX1HUFtJ4/lnKAXMFTdD4HGfU01XwuZ50ZJZ9zYL/deFyuZq3ZrNY0tpEm9pVjsxF+VOMkzTgMI+PxiHEtIYkAulldYa3FtSts07C5uODZex/i2pb11XNpGUGUc8JwpL95hR8GXr74mGG35+Mf/ZAXH/0R4zCyv74VoE3JZDTN+pLm8kJ4dRE0Y9tqjAFnIk5HITOvd3jTdIAWxqsQhfO4ptyWWkc6RXbOSi/lquvYrNdn/Z1f/jBGc3mxwVhNSIGYA7EkiXQUi6FbaDwrjd4MCiu5LA85e2ltaVtRtpHSQeBwPGKM4fJyI9Gzk6jXunl5nVCP4zQyDAOH/sAYPFOMwvrlnEgKOouttX6jpJXLKMXQHzgedrx+fcOnn93S9z26cbSrFtes0NZiG8t607LZtKycpTOGm9Fz8/IaH+LJcSrCdDMOR453tw9Wt/nFx31jK2soVZYrz2E40vc919e33N7dstsdGUbPMEX6mNEZrvuRSRneyxrVrtHdiF1twUZW7RpjDIdjYDdO9MEzqEzQhWCUtI9V6TyUOvVAzpu1hHiyETJvitwzcmo22GdrenHm1ENtUz9jKHEgVHXecxLZyEwFjNVNHY1wBVcUfRxr5mD0hDGSgq/RvML7iWHoGcaRcRqxViOh0Yw7VoAVg6tFy7XUtHOMwlo3E1qImpD8HGMi94Ow72lN09ia3ZG9QuuC1oW2a3ny7AnaGHofCCmR46kbxFZw1uw4SOpXfp5BRDFG/Pzw4cFcy1wKo4/4EAlzpFvBeI0RGkerBM2fmXudDbZpIYuutoDGCoMXTWm6Dt1YVOMote/eKAGdrZqGTdex6jq6rmO77vjw/SuMViR/IEfP5EcOxz37457jcKAfenz0lQEuVbIT+TnmE7PWjEU5dUacjL7si4IIN5WnOYPkqh9gVT+yLOCsJqQWMNPsFM3k1GK4ElkLqhUtCFVtG2zb4boVzfqC9cUTMbyXT7BNS84HStGUMJJTIIZp2ayPhz2Hw5Fp9PSjtM5kA0UbnLJo16CURZkOpdQCoFLkxUM6oS702bHKWQlgZ04NnrcjqSpOM+v0nloaHmMYo9lsRDJRGVlUyyZ/dhil/udEu8YCrJkrssvP1XNMOVOi3Hwh1HphmTfd+Vz1vS8pCErTe4/3oUZwcfGcy+KVzjkDFsm0EFKN/DzTFAgh0jZNJTY3S1p8br1RFOkBjxE/TcQKatMIq1FKkRgCfhr5Iv2mbzuWGPGNyHcxvjV6iUn6Dsdp4Hg4cqzGt+97EWY/S5sVClNIGB8ZJi/gqUpbmCodpK4OYUiJmAV3mpWUQMiniESpUxQ4p9TUshBm43rvhJgv76mCfT/Kvdci9SUOdf6vWlYscwF67iyQUzFVG1fILEKolJpxFmSRrIwYSun9RBlslojsZMDKKdBWsy8yr9rT8UgUWCEgIPdJilA0Ob0RWSkBBUpkJ5kcEyV1TU13KhQYTsCqmmWbW3tO902pTpxEvg82ziLH+SzvlaTUGabhLH2rtZHIXlfCGKWErrTiRzC6ZhErD3aW/fRU5qq8+NYIBkdDDjXyP8swpIrSn/eTZY7ueR7Lar1Xk54j9fM08/zvL1Ij/CLjkVPNYoSMyRibyNnTjz3GWFzKKK3IMaBKRBWLoqqN2AbTrGk3T+i2l2yvnnDx3odoYygpMx2PDHc/Ydh9yv7mNZ/98Af0hyM//oOfcLg7cHd7x353JMTI4CNoWG3XuMbhthua7QqtLc62cnPGgZIjMUwkP6Co4s2o6nUmIebw0raxIKYqek4a7W2NeKvqhWIBND3W2G63/KW//E9hV1tMs2JzuSFpYYPKWpONxufM6APHcWR/OJKKolltMdbVNk+5EUIWJO3ru1v2w7AszhA8fb+nbR0ffO09qdHWurExepmaeXPqjz0vXrzi9e2ez1684jh6dvtE76W+pU1PYzWxFeWXRhssipvrO16/fMnd3Q3DccSHwGbrWDVrrLaUJDXr1hnaxjD2e25eRe6uX0lUq8C2kr4bjwdiCAzHPfvb12c9fl/2OEW4s2txSg1KX/SnLz7l9evX3Nze8Ec//jHDOPDy1Sv6ceDly5fc3u4YJ0/Mgvh/fXfk7jjRD/9f/vE/+kOi9/T7A1opri4uaV3D7c2O46FnmkJNM+uqkCSMVk0ugiwv0gN7nkLUSp+l27gX6c5Gdt7cVHW4Tmd73xB9GUMDThVMTqgoLF4WLax0vaTnZyEW6ZOTnuocpI4dp0AMUfaaElEqcxwGjFXc3e25fn3HatVi1WYR4JC1L+9RJZFKqMj7UtOocmRWG3JqWLnCzUWDikJ760PAWsu6k2zfat1JecRKKlUZIzSnuRD6STo6JmkH09rgrBPDUxIh1zQzkMhknSk646NnGCdiFpKKh6hLglzNGRNgq6Gd+8vnSz+vitmIaWUw3QqtGpyxKG0pRRPaRFaKYuWcc4xMvQjfdwlC2zJdPGXViOJV27a46myrCpabeeGdbbDW3RMFmcfsAFlrKajKNFgqwrz2DFdWsgV1rudAhQoeU2eyjm/vyDxyxFu1GDUonckkQvACy1cCFZc6Rm0mr1GANhZtHbZZ0ay2Ike3uUQB435H8hPD7pb9y0+5e/2CVz/5I/rDkRc/+Yz9Xc8wBSYv+qtTTig0K+cwXYtphUnFGhE4UKUQasovJ/GGtRJBa2ABCcUoUm9LjQmWZnZtLaaKJ4ijUVNu5XENb9s2/NqvfYcxK0JWNF1Tb1DIWlWvU9IwIUTGacK4RtCts7dI9Z6LoFyPw8AUJDoQkgHPNPbk0i59hXPkOWcGZudXAd579ocD+8OB3f5APwXGEXwoQumpPMlaXMmiZYrFoDgeR3a7A8djj/eh6tHOgu6mGoqCrdSeYZroc2Tsj0xjjzYG1wgLTpwGhqFnPO6Z+v2jRLzzWHIIy3eeg6gi+/2eFy9f8OLFC37wgx8wTiP7g6hs7fZ7hmGSljw0JRf6wYMK7G730oJRwFKw2pJDpm1aaSfzVdkLVVOzBqUkszP3iYrROCeROa/XniKCevFPUcD5/D1GevmNYSiiO5wTtZwrJDYhEiZfMwCFnCDHmqmqm2+KsQptFJwuRKPwIeC9gKH644AG0nZVkd21dYVcjXVGkVBkjCnYDM4qWqegaDadRZfEujOMrWFCKCqdKXRO4ZxhuxLAonMW5ywZYbIMqZB8JIyBFEQQRNk5yq6O0gIWqj3gqlBUIVbd6ZQh5fMI9e1GQeaWWtvWqu5vClQ5kbGclsmM4HYCuKwobGwmGUNSUIyQvpScSUHmcEBjiqpCD7l2X5xaimrid1mn54p3J1T1Wfahpq6TBl2qTOA5Fet51P6G8Z7vj5Mm8lcMXAUsQIfC3HMlqYkQBKEafU8MEyUE8uixzYrUXtCkwpP33+fqck3bGsJ0IIXIy48/ZjgcuP30D9m9+CH7u1s++/SacZjYHQO9LxSlsW0jPKhB0huTTyQChYGcRYlEb4rcZCmQs0DkbU0Pm3ohUxZifkXGGan35KKWC1NyQRm1vA8qcXeOlBSFMegR57pbGwyGWDTGNbJ4k0FrB8ahtF3AG8PgMcYTQm2VyNRz09IP7Rzvf+PbbLYXrNcbVqsVh/0dLz77Cc5oVptLXLvCGEGcA7IpZ4he0spD1f09HAb2h55+DAwTTAmp51uHz4leiYZndsLF64FoFMlolBNO3KQsIRlMcSjVkYtjGAOKEYYjR525290S/AgKYqXoC2FClQBIhPNlR2XLWLLLc3p5BtyIwR2niReffcYnH3/Cq9evePX6NSEEfBLQTdutaLpVLQ1UicUgqfpht2M87CkxkX0gE+njHcFaYswQMyplTCrVMNW1Xk5RaSFLCjQLl7rWFqXMstGft4nMYJpCOdndilpHnSTgvvxUc3VcikIXI6C0igKfe8aVqv27GUqc16Wc99y26Kxm1Uhv+OWTCzYrwSgsoJwsSHxp6crLZ8QkRDozkKqglnprCIFxGJhGL/gIK9SNxlhc03B5cYF1ls1mg2scIUwcjgdQcr+lUhb8QxLqK4pWxJIpWYy2NqYy40FBgITTFJiGkTD5e2CwBxu6YNSstHYKkHKZ24pqmaF2LshrkKBEn2VR6vK4nzGp82OdUERacUasMzV7qBanh5KgJEqJ5BxIKRCCP0kwRulE0Vqi3fV6hY2JMIyiXjf3T2uDUjOn+6lgIAY2s7gTqtzDN7zNeHQ9XmWoFGhzzVA8suQnSsn46UgMAyFrxqgx7ZrSXbJKGWcVT59eUErBD3vGvueTH/0Bd69vuP74H3H34occDz2vX17jfeLukPGh0K0sbadJQUvdrxSGKaJjIaVC8J5V19I6iVAF7RolgtKSLjUiuolPfrloQh2pSFlRMvgShZu5KGxNs2oyqojRTSlScnq0qFdrxXrjSMZRtCGXhowlpYKyDmWaxfimDMfjiNKOEBLazvB74REu2mK7DV//3vd57/0Pef+DD3n27DmvPvuEf/h7f5eSI5vLyyom3sqGXVQlHyjEWtftjyO73cDdruf2TsBCfYSYBK3u2pZcyQyMMSTlcBYmpQhGkayG1qGiJmEJ2dDQovWanBsOR08IkSEcMGnCjwPe92JM9oFCwTlBXasSHtfw1prjHPXO6FnvPdfX1xyPR37yk4/50Y9+xM3tLZ999hkpZ2zTYKzlosqiOetYtSvIhenQE33gVVK83o/EmJkOEzln+rsBUNXhalAFTJb9w5QZsXBez5eNRqg/NcVWBi51vlneR7CWZdOvQLya6ZlR6o8R/s4obCiEGBknL1kbbSp9o5DRUqAkOaCZ4tdZuVe7xrLdtDhruNw0tI30lsdclqhRqQJRyl+ifZuIueDDqUc0F1HpiTERfFhq87Phda6hIKphV0+upH9+1WGd5fp6YrffobTBduuF1Ma0zcKbXpQY3lzAmRbTWCmVJDn//tgzDKOUYyaPUgIMe7A8RDU8tpLXzDiKkov0T5dKEiI0fqBqmruAqv3SRgt4itr3W2bCldreZYzMU+MamqahaRyNszhnhHaySPvVYnhzJKdAip7gJ4KfiEFKCLKPaJzSbHWDT5khCcJfawHvCnd6Nb6c8Duztrj0ApwZ3wcYj2t4iwCRtD5DUFYRbmZ4QKlsT7lKOKVICBPWj8SpJ05HfAgMg4BPXn32Cbevrznc3nIcJoYpMMaCTzDlQsgFkzM2V9mqXIEIZZ7DExBIoapnViPXGVCiFozJciKSutCy6ShF1tK7hp5bYIRSkXte3cPVWr7wlJcERQuQps6xpuCMoW0anlxe4d//gCdPnrJZr1m1ndykueCMpWs7cs40zUjTNDjX4Rp5NM0K61q0cRQU1jisq4Cnha+4QmyU1FFsJeuwzlUktaUBtFHY2gYkPMugVRFvtihKESYrbTRt15JTkah7vWW1WtG1LW3jpBa/5P8r8Yd16GwqOKZIW5cVohMX4+OimpWCpR1F1oiauabnSLHWnJSpBAvWoKzGto5m1dK4htVqRUmFHDyUVMXcLSpF/JICrv+ZQXNKlG5QSuTmkHarlBKqrvlaE1nQsTKTcxRzinRnYMp8SkuLS84nzAOPEO9Wx93oerpKSwSDApVBCw2j1vrETEUFRtX7sygpe8UUgczoCylHhrGlG2WdDqPHGoOzcnppNrwxM3gBMQ2jIGj748DxODAME/t9jw+RcYqEVDBGVHgKWkgdiBQ9YWJkiqLUNReKZ93fpQ2mXlRBL5/Snqe9NC/1TGnZnA3JSXHnbYeCJRihzGQrM5p6dmJrGvgM7lVq1kDlLGl9pdg4R1QQrCEbTasVTslDtL1PxEMnAqK6E5e8fGep1+Je+9Y5V3MWnenZ0C/nMu9Ps3OiZ3WiM9TvfNLMoM96Pm85j49qeMvZotFaU0oABtlcjJxQKlnkzop4rwnFcLghp4nd64+5fdGxu7vj048/YXd3x9/7W3+X61fXWF0wujCGzN2kCFFzDJEQM9kXisqEGPFBJk2ccYVRllXTiYi6YF7FKqeTioXc0Pd9RqMVxhpykVpMzoDTZKNwjejyQpXxUtJjF3PtW32s+S4CiCopU3QELEpN2AIXqxUr17D9zd/kN37t12maFd3qAmOsSAfGzMVqw9fee5/98YhC067XbDZXdKsruu6KprvCNjuU7lAkutUF6/WKrl3TuLbW2yT95GwDGNabLU+ePsUn2Kw3aOPpMGSlF/DKMukKShkJYSIzokyi6QzP3nuOUpoP3/sG281T2rajW61wBjpThTCMRQtrPNo24jxV62Gt8LK6aUSvLh7P8CqJulRVHJ6JVazJONfgmpa26+i6FavVxGqzJuWEbi3KGtZPN1y8d0XXdlxtL2qkMaJ0ZHPRkqcLBq3wd/uZmxeY3a0M1qLXK4kAnKNojR1GdD+K8fUz92+mSHOrIIWU3BdaUds81KJwWQBbCTXUzB2sFErPKcgvd5RSmGo2zGqDchpXNaF9EYRy2zY452qbVKxGIJ2B2hKpKEIaUQp2BxGU8MEzTJ6L7UguisZautaKw13bviYf2R0GfIhc3x4YR8/h0HM49HgfOR6niiyWKGq9cazbjgjc7ntZB1ocgBQjUWzK0gI0xoSPWdofa103+QmtFK5tJCuUMjFEYogYIynoddfSGLtIED7UpdBKs25amb8UyDEQvPTCS5eKJJel6h0FeFYkRa8RlL2zcKE168sLkoLeQao1YnJhozVrY1kZS2s1jdU0ztA0QqZTSqylu0DJnhQngh8FDBtFjtUHAbEJgYavWTsnjphwWTLzZisl1LXauCp3Kk76bHhnXeGZ1z7/fCrUnzt+KRHvjOgUzyRKSmVxMOqOy8n7ntMIfjwyHO447m7Z371mf7vjsLvluL+TdERj8aHgo1DDxSyIXCEJl4h39uEFGFCVPLSpEe/8lWfw8zJbgKV8NcdStcZ1ighmVJ+eqciKWjzsOR2WyyPGvIVa2BKKOZQs/qKEB1grRWs3sAZtHMZ2NfKSDa1xjs16TSmFYRxouhWNayo/8gmAUGptT1cAg1LSJ5upnnvlTi0LTaVaHEqZJlWdoCLHV1snhHhdMhQpTlAiWkPXSVS9Xq/ZbNbSW+xsZc5JZwAPhdIGY+efde3dq9KHgCs8nuGtceG8wsWzlsjX1rpW07SsViv6aaRpGyHJtwpltEi8WYO2QhYDuuqcl4U8xFhTP7zeUzWlnGtNV5n6WdaAMWgvBDalzC1F9f4sp2hljhDrlN5ru5C/qVO6+nwuH2FaC1KnnlPnRWm0qXOshe9YGaFUlEROxVwAc528zPlyZjJIcTLGKTCMHmMs/TARrERV1phFdm70gcNxwofI4TAwjJ7jYeRwHAmVH1sibYmmUqq4iSIAqLql139n7EtNn5fzEkB1dObnkOyC9NnnZZsyxmBNIRsgC5Cy5M9T2/mTDVWdr8Tpvj9fI7L0ynI2eYl46085QZL7UMpxikZB0kqulYJGzzrbkpLWlWq37janSLdGoXPEn7KUDmPlxV7a6ipRR+bU7jmTw8g9qaDy658CrJPRPZ38A00ijx3xlsw0jVjTSCN18uQKB29bV+nTFEoZGmcxTlJuTmdUmvjkR7/P9ac/ZppGjvs90+jpbOHJ5Up0HUNimiLHfhIS7wW5CAFAaVZV4eLq6pK2aelaI/UcczIi8xJa6lelQIliNLShcaqms6ib/EnSS899hPWGjlFumjgFVJqYfPzcufkyhkIQn1JrkTSXAEYE+ZpTqWhAg5g8SynSa5spfPPr7/G1D57QDwM3dzdo63jvw0valcMxEPcvCLuXDLcvUBTK+ytUMgyHO65fvySnSPAjOWVh60mJF5++5NVnL7m5vuPw+jNGHyja1KhKbuzoPf1hR6plhpwTFQvGar3hgw+/Qdetef+9D9hun0okE4N4wmWi5ERMgZJTFdAWNLOyFmobVdEa16xpL80ppfRY10VVKcl6wzvrePr0KdvtlpwTz5495dMXn2Kcph8HXu9vmWIgeM/xcMCPI2ES1ai7/Y6pH4g5oZ0Da8hakfWpN152rboNpkgho6OCnCgpLqxKC2io1mplDc9O2xvMVHMLiZJWu1IglLD0VC59lF+y8c2lMIYA1klEX2uEUict6JxxbYtrnNT95vOJQO27l9JTJgZxClsnAuz9VCh3I8c+cTgmrNGsW6GhHYeBcRzxIXEYZb+pvDl4X4jZEbJiCkG6CJXsE1l7fOkFRGR1zeqIEZHboNYY66WwamaVy4CmaEVtsCCnxDiOQo+76oS7WVtCSNzdHeiHiTRFfPAPdh0U0KAX6VOjDNa2QBGhDaCoxAyYSjWzkKuRnMaJksdK8xnQztK9/wS3atl2G1Ztx8ZanrUrVm1D1zic0cQYOPZHnNWAg5KJxZC0wxdDHxW7MfFi17PfH3lxd+TuruduP3B7GCkooprIBXwItX5bhH//LG1OOdHc1h/ODO4J/fy245ENrzAdUUxlPsqUEmvDeGWHmaNRLVGA3Ohi+O5evyT4adloc5YWANU5Dn3A+7hQssW5XqUkWxaroH1TVXE2qzWrVYfVApwyUrRYFuh5wJtLQTiqwKg52pBXCWJv7ntkqQcv783iuRISJYeFyOGxxolrp2A1NE4JktxpshEmKmssOSvinBVIEjVuL0X0YBxHLi9XoDTddoWxmpg8aZyI447Q7yRyjR6VI2EaOB72xBiYhmPlFp6IKbG7veZwd8txd8d43DH5UMUlZgawjB9HdtevBKE4HUkpsL5YsblaYzvL04sV680Fz59u2WwumMaBcRDqP+8DJcdKlZfFeVOGSrgLWoxu0Zqmpqgfw/AuNdAlepH1phAnbl0zC7lk1ps12mheXr9idzywG4/SGpIS0zSSovQ85pgYqhGmFJF1NEbaVRUV4HICOAkASq5t0ZWUobZrlJnRp5xvOnNt8YwsgvP2olP0S4EYOTO4j5PZKQV8StiSMUWcA2pvpskZlQvGCQ+4KgWCUEYWXWoiSFNU7UhI4qM0TpD+PkJOgVEnhiFitDjuVhsO+wPH45EQE733oBRNu0IbI90AmCqWokiV3hMFxSdS8RhraJVDKbVEhUtP7JJhE41fpaQzI1dqylzldUXlLWKsw7kGawtKCYp9mAJjENRifDMT8RZDIcdEUTWbIl0H0sYkBjdycrxSXpiRKUCIgRxkj+7HAdM42mcXGBSrpuFqvWFlLGvb0DmHq21COWemaaIUS+ME9JfRFGWJReMTjCGzHzy73rOvj+PoGaZQ2yiFZzvFPAfpfzxbUzMnag645k2dU8bhIcajo5q1ZqGls0UthjXnOfUoIX/KmRDGRcS7gHACV6H1uSleaYNWsGo1TkvKsXEywVVOiEYLqm2WnTPGcrFxNI1ZCvMFQe8ybzNa14U9IziFHUlT0Y1zvFs5bFVNu5Qicnx5lI1yGKRdSa8c2hnp/X2kzOb+cOQ/+L/9bWGL0RptOqy9qMcoN4fRVkjvZwRzKctzTdtgnVuYlEDh2pU0u2e5Zne31/zRj36IUjD0r9ms1/zgB/8577//noBPgiBMU2Wo2u2P3O2P9P3Ai1fXxJRYCoY1nx9jYDweJZ0XPTknmoOjuW1YvXjN3e2Bpu3Ybn6Ppt2IOomfI11hospJQCmi3CJczqrybM+QVmMdtnGLUXyUMeeaz39RUvdVFLquQyl4+uQJH77/AV3X8fr2hhgDTunag6gwdT0abUnayVo3YJuGbrsl+kCuqE5xGOvmERMoUTfKBSYfmKZQtWdlLZtSd/Y51TlTh6qTURVWNslSaKWZaSOXJOOy2X/5i/2cIUt4kdPSa59zXqQhRXRdKi85SRtgTlIDpZxE1wuaXDQpzwAyJUyBSAcDwDAFdoeBMUR2x4ECtKuAqS1DoIhRUtFn0pO1LICw8iFlGVOtgNT7q6Sla1Fo6T/OMKlJOgRKRQ8DxlrJ6CiD93KuwlyWGM/EEtIDgzptDY6KFvb8uQaalKTphRdeXitxfq3hUkBLW9ZYIvtxgOhJ19e4oV/q1E+6FU8unzL33lpjFhpa5yy26Sg5MYUdwzjx4vUtf/TRJxyPB0y7YXPl+O73VwQfOPQDh1762HeHIz5EXt/c0o+TYD2W7UBVzIKk7t/E9Mx7/cyK9fZz+IhDAdoUwpRIUTw6XZlgxPsUajFhfJmYjj0pJ4YgAIVZMHqu14EgPw2K1mhM0ZQS+SBXMI0zYBS6WBR2QQBqpVhtW6y1DGNiGCO56BrTFopWlCxEAzMttjGm1hlqD9kSwahZVaoCIwrBR1KYCDGw3x9JudCkBtMavJe6zmOM69s9/9v/0390Nv+npVSWv53+Wz7n2ZOXd/8dyyvLTMkH/+D3au3vDW3L+sLl9WX+93OiIsX971uQtef1RPWf1RtDvWHIyvmPn3e4Z7+qz33+UcZ8zHVTnyNJVGGzWrPuOnJOHI9Hbu5u+PizT/DTiFEGnWWDdliy0ljtyCZTjCJbhe061k8uiT7Q73bCdqSEaxmAaoz8MOJjZEyZMVajWe8r1zjgzOjqcqbHKylPxQmFvRCYnNX876Wuv+y5VLrWa8Wpj1U1J1YxgpILMcRaC5R6dkxyj6ekKEmhkJo/SlOKMCulIlnpDOhU0+m1RnsYAtd3Pcdx5NXtnlygW68wzmErSPB0bxSku0DSscYgPdLFVg5sEWFxpuAMQqqx3aC1JfpCStBrI0pSWQQvgKXNLKTENAZiShyHgRATwzgxeuFCjyU+2I6jQECoWhyMosSZyBRCNbwpaiCd1aMLsWIRss5oXehz4GY4kijcEDDOitLWMFGunvCNiyupA1dxmaZtWK3WWGtpu44QAoNP3B1GPvr0Fb/3+z/EGsN6fcnq0vKdX7+ia1v6YaQfhITmxz/5mOPxSCx/SLq+EZxL7R2eszezjVClkrKcpTBPwhdfMXCVUlKKSWFGhskNs3jWRVJhGQFdSY9WnhsFl4hybl84L+trrYQqDo3N0kNWKj+xmoXpS6n9eIIrVXNtVp0Qm0VJmxDGMIfhM1hKqfkYyv1ctJwdCyFCUQSESFxaDQrZJ6xOj5pqLgVCeLya8pe5z/7yhAweaCz1hzODfy+VVZbUF4gxkx7GVvo/rZA5pBBRWTGVScA9w8g0TqiqeZZyFtUhpRb+Xq1ES3lOZeZcmJTIzhmqRrRSQiWpT/26M0fu8lAnQN29kspPPdkvfyjUUpKa0+RC7ZdP4J8ardeECiXPr5WDLzXbdnYZ5LY+mwdjTU3d1ySn0bjG0paG1aojA82qk4h3AR1VV/BNEFLdb5yrBPwV+b+0NFIBVrkIsX8QkFA5O2uAmQCCWac5ZbwX/vS5e0QhGsMP6ezPsCl1zsdwBqiaz/lUdsiEksi1y8NqyNqQrZPt3zYUa4lK4VMSYYfg8dGKf1rr9sY6jJt7biP90LPb7RhHwZG4ruP999+nbRsutxc0TUs/DPTDQNt1DNNE27V8+uIFx77HOlflIFk6Hu63ENXzXRbK8stbj8dVJzKwvQCVMkPMGCVKMRQlPYklEUogImw+5CiexxITzLFlVVhAvNeCtPZ0zggyrqaDcqVPQ8vFy+XcOIhXrIk4UzAGWic1Zod4N15BIEmE0RgUheDlxl5qwbky+CiRrvMxMYRIH0TF4/VuIGVYqQmXHEefHinefTd+5ce9aF3G7GA4a7ncbskpcbHe0B+PHPoj/WGQNV92xJB4/eqaaRxpG0Gb5xiJSlCioYojOGPpGkdjLBdtK/2eIdJnAQMWJVmepA3KKJx1S7910zRoo2lcI4xsxlaChLPsSRXIkFM6IbcfY51rrVmv1ifDm1JldioLfze6zARy9x9lZpkqJ1F0JLpXBZwWxqTGOVbdSnAMKVFKYr1twF4RYmH7/InsQa5BacPxIPXflBJpkh5XpU7gS60KjTNcXWyx1hC8OFFSylKQFTFI4LHfj4xTkFaWGngUZofAYUwDIRNjYZoiu7sDPgjGpRTJ1K1nNBbHt55vqeFGUEGieDXbo0KsxjXlVIUOZs3ewOQPUBLrZkXnWmLbUS6fSQnscotuLF5rdjHSjiOv7m5IOZDUtzBtQ7NesbrYShreanzwfPyTj/mjP/oRN69fo4EPnj/nn/nLf4mLi+2SST32A8e+53A88sE3PuTubse+P5LIs0+GdgYhDjg9FgdzLiFCjYJlOb3t+KVEvMbkKgp/KgQs6jQ5oYrUXSlnmqDMLz2He1fPkLmdRwr++jylOnuyWr0xYRVIpc77E6sxr0LwRou4/awxCafUyZxGoQpjzyCslAXOPoXEFCJjSKQMOhSKkxand5b3T/E4DwbLnCq/vyBKjXxFi9TgjKWppCTMAJB8aoVZ1uJco6yOoPQc1ntDKawWcGHJmcZYgjbC66yKyLMtZPGzLvIZd+1M9XeObF7uCRZw4aLq8ua5flnTWY9vjm5LkZ7R86jzXqSy9IfMuAyW15y3SNUPvyedqYCUpbvBWE3TOLQtFCPk+9o6UJoQHJMXLuyStJTUquE1elYrOn22tPDlZSkI+b+ci6h/hTnNd5ZoU8tplZrpyGlWVDo590L2rx/0Upw3Cy3Ti2QxT5q3Z49KoVlyINlWSnpaizHQBuUasFaMdilV2CEQUqwRrwAQdZUdnFPCwzBw3B8Yx5kCUrFer1iv10skKz3UIgnYrTom72laJ90duSyyl2dowSXrtJzvvXX0MHP4uBGvyqybgbLWWFTtuVKUbAie2neXUNTifJ0AVQWt5zRAoUpKVSBQLhCMIlgl5Ne1BFvm98/gHaUpaoaLi2HXpqExttKUtXXDk8VCycQwgSqEmrqJKBKGmMXLFK9O2HN6n/GxsBsCN8eJ0Weuj4FUFGmdWbXgHxfU/G58pUa94c+MqVaai82W4ANN23Cx2dB1a548eU4pcHN9xzhNTNOI9xOH/Z7PPv4JMcybY6r1RWEw00XqiVebNeu2ZYViVUSxaqyevbFmkWGTcTJQxkgUCLUmNoNRao1XxDFq7+WjpJyrgQphSa/+tNeAlI0kY6AoSmOylLusMbROUtaV3KvWYiv4swgafKaKNEax6hpSybjmBJyCTGdXPL9sa9uVzGFOck3HYWAYBmKBm7sjutbH59eknFEqM9Odj5MnpHhSyylSY0Yl4jigpol+HDkcj3gfFoKecjr1B57uQi5R+BdKreOqGvGmSCqz0TxpOhcgJShJUYpGChxgdaZoxakVs3I5W8lgGquF6UvPjkumpEKIiWnsOe537O5u+Ozjj/n0k48ZDju61rLZblht1ljnGKeJYZw4HI78+ONPOByOfPrpp/R9T9O2dKsVTdPK3M5B7rxeZknqVB2ZM5zK26acH9nwFlrrSY1BZ8PkIUZpK4oYVNZIa/YMadKLByI3uYx5ElKhNkjPns3s489aojU9PXszKFDmLEIolZrQ1BqCoF5NzZUFLx7WDIsvpYgRRRGKwldinxCkeXuMIsE1+MRhCAwhsx9FSaZJBZ1EdeRdwPuncfz0qz5ncMqZgZu9a41i1XZs12us1XRty+XlE771re8CmttnO6bJc3t3y25/h1aKF599Uj80L5FJKQmQUoxRWgBczKlhRSgFFcPCySMBcOVqPksji0E2S0Awq3WJk6CWNN15uezLHhIhiiiBHPtZpF6jIxZQmERCuhqxIkTsQrHo7BmAcv4cKto1V6NT+YirBKJFY3Nlw5Kjwa6E4MVaR9OKxneMUop6/fqaqSLKj4Ovx2lRSlfSB0Gdm7n/P0vftcpz3k/ajwSZLTJ1wzgxjKOAys6OY8kOztmQh5hrpLSQFxKLOdtXmfkquC3VOrsMVeUmoWQNZWZCE5Orq+Gd51prteiHqyVDUNs2cxZO5jAxDT1jf+Tu5poXn35CCBOrdctqs+bq6VParsWHyOQD+/2BP/roJ/TDwO3dLZOfcI1wQVtnf0pt95Qun9fVQ41HNrywbgpmJe10vcrEyZOyJmdDLmZJs8tJR7moFV+1qEfUujAFVO3VK0mTI6CEC1T8W5h3jRm1JkLMteUnz6+6n1qYPeSFALxIuiIjqeKcYfKRfgzEWBjGQEqF3gd8TOz6wG4UqrcxCtw+5gXK9XgT/m58NcZcFD1zELWxuKaly5n33n+P1WZF3x8ZxoGLy0ueXl6gtWG76ogx8+zpBcfjU55crPHDnv544LWB8djTomgRSsVsBNzjGqlHZmUoSuNTIgdT2X+E2lQQ7aLRm2obHzNgZ07x6kJUSFtdyWR1ZvjU48Cs3twQ/3jUW5i5p3Ntrclpbgsppwi9bvrWWGFUahyNk+gspbA4EyICceIN1nPJq2YqTCmSWcjyENS1uFbGGtquk3KUj7XclQC9lKm01jTV8BSjl5RpTqmmv21lfnJopcmAjxEVAtPkz1LVuRquN/e3txknXoCf5kqeeKRhzjga29aadIvVDRaPTZmsgeChRIpVIlea2yW9b6rRJSdKDKQU8dNA8hMXmxXPnz5hd3PJ3dUFjdVcX7/GHfYc+iOuacV+AP0wMk0TIcallLO0v1UAWM5vllBO/c/3fv6qoZq1hssuE63QwN6qRH+YUMXgg6EUKyQASgtIohLYxxptKm0XZpdqnVGxascGSEo64IutN39Nfy2pDA2msRVkNZPj18iaOQUNMwF3TpEcAiknQpJ+vFg0CUU/enZHX1WQRkLMHKbIFEXU/TBlQoY+yEf7rIhFegffjT/t480t68xCaQVZY5yjW62wzvIt+y1C8Oz2dxwOey42Wz547xnOWqxt0FozTiOjn/j0008gTez3Oz7pHMf9njRO5FH6QFMWdMhq2+HaTtr3lGGKCcaBmBLjOC09sAu3eqAi/UtNE1qaxtU+Wdm4tCpVThOpgz6K5b2/WcpXnzvb5/iRUkFHZeFsFqk5jTYKVVPLbdtgjV4UcVIUUhiAplKtLjSGC9Wm7Gn1CGp/v6TEavIBCqIstV4zjBN3h4EYhSc6lxP/nbGG7ASJbp04YWnyjMELc14rbZVN19K4lqI1MWf0ZBjHsZ5f5YnXlUjlwWabWg48/b78/Wz+50wIFRdg3RoyOLvCqZZEwmURl8heVN1yMkSrKbmrqlKmtqcXSNKiGb1nOu6JY8/Tyy3pax8wHu8YDrcM08Rnn3xMLgXbdmgrzqttGmLK9OMkGQXKvWzIqe2r5p7U6d9zJ26+F9JXzfDOHqNesshlQfvdG/cKFOf59Nq8L3fTbCXvPVeWb3pzlGV13H9W/ZSf3zyGs3+WGtzME1oWYFWqTFUz4Go27eXs93fjT++4t9bfXG5zSndJ1araMqIXoW9jZvCTWv6uZ9WnnGoPqZAOmAremduLZD9ZvuRM5u+Nn39KSm0xaGcHfjJuC/7n9Pmfc4pf5nhzo3xzfO4zp+k45b6WqP3+y9Ri0M9mQN3/jPk7ltfWDeD89cscn+8hzOQjnP07f8EJ17PEmWqO2s6ypOr0+s+bm4cd5fzU33zq846g/v/+pC6pcM6iz7O33HtxzVDMNdbFATorb+QsAC2VkvBCpISemdnK2ay+OR+fc8yft5YejAHsMfsjlVIvEUz7q0f70ocd7/F4x/7dUsr7b/MB7+b7FxpvPd/wbs5/wfFujb+b78cevxLz/aiGF0Ap9bdKKb/7qF/6QOOreOxfxWOex1f12L+qxw1fzWP/Kh7zPL6Kx/5VPOZ5/Koc+8Ml/9+Nd+PdeDfejXfj3fi5453hfTfejXfj3Xg33o1HHL8Mw/vXfgnf+VDjq3jsX8VjnsdX9di/qscNX81j/yoe8zy+isf+VTzmefxKHPuj13jfjXfj3Xg33o1340/zeJdqfjfejXfj3Xg33o1HHI9meJVS/5xS6h8qpX5fKfVXH+t7/yRDKfVtpdR/pJT6PaXU31dK/Q/q358ppf59pdQ/rv8+/WUf688a7+b8cce7+X788VWZ83fz/fjjV3rO/5iSxJfwAAzwA+DXgQb4u8BvP8Z3/wmP9+vAP1l/vgD+EfDbwL8O/NX6978K/M9/2cf6bs5/NR7v5vvdnL+b71+tx6/ynD9WxPtfAn6/lPIHpRQP/O+Af+GRvvsXHqWUT0op/2n9eQ/8A+CbyDH/9fqyvw78N34pB/jFxrs5f9zxbr4ff3xl5vzdfD/++FWe88cyvN8Efnz2+0f1b7/yQyn1PeAvAv8x8GEp5ZP61KfAh7+s4/oC492cP+54N9+PP76Sc/5uvh9//KrN+Ttw1c8YSqkt8G8D/0opZXf+XJE8xTtI+AOPd3P+uOPdfD/ueDffjz9+Fef8rQzvL1Bo/wnw7bPfv1X/9is7lFIOuVj/Vinl36l//kwp9fX6/NeBF7+E43o35497TO/m+3GP6RcB73yl5vxXcb7r975b44893qJw/YUL7YgK0h8Av3b22t/5ZRTcv+C5KeDfBP6Xb/z9f8H9ovy//sjH9W7OH3HO3833r+58f9Xm/Fdxvn/ROf8qzfev8pyXUt7K8P7TwN88+/1fBf7Vn/H6fx5Blf0A+J/8si/Kzzm3fwZJP/w94O/Uxz8PPAf+Q+AfA/8B8OyRj+vdnD/inL+b71/t+f4qzfmv4nz/Seb8qzLfv8pzXkr5kzNXKaX+m8A/V0r579ff/yXgv1xK+Zd/xnve1S+++HhV3pCU+kXnvGlcWa060fA81wVVs4blrI9ZtTBzqa8tZ7rD9fci4pnGGFR9v1aKcua85SSal7NerLWWtm2q1uuZ3qkcO1qLmKjRJw3Y+9qiUIr8kLIiZfm+lKHkwjh5QozL+Wol36kUpBhIaRZlB601zrlF41YpOB6P3O5u55v0ree7vubdGv/i496cv5vvL3289Rq/uHpa3vvgmz9TiLfc+8t9/d1c94oYI6MfKTmTcqx7SD4TLhexeT1/WsmUIrq6KUZmdWI1720UlLZo06C0pVlfYFyLTwkfE4qCqXuQrprwMQViCPXAEpRMCJ487xvzuZRCkQ1R5kwrjBYd7KZpUFrzeVLIu7u7Pzbf87Cf98eHHEqpvwL8lS/7e/7/cPzoT/Km8/nuupZ/+i//BVJKlJIXg2mMYbVaYa1Ba4vWhhgjIQRyzsQYKCXjg69/S8To0VpzdXVJ0zR0TUvjWlLKhCmSYuKwPxBCZHuxZrNZ8ezZFd///ndpW4dzYDQ4BU4VrDGsuhZjNJt1h2ssWslrlC5YI55CTJZcNHdHze1B4wPsjjBOgX/8gx/z8tXNfOZ0XcOH7z3BGM3Nq8847neQCyVlum7F177+dbq2o21brHP8v/7f/0/+xv/lb+C9/xPP95tz/m78QuOt1/i78QuNt57v5+9/g//Zv/F/PEVu54Ly1UBlxGDlAglFKYqCoRQYvWfygZfXL/jBD/8x09Rzt3+FDyMlTJQ4QYoQRlQpNDqhKaTxQBqPTMORw80rSo4YAoqEKgVFpmmfsn7yfdrNM771F/9Zth9+l49u7/jRzR2OzKXOOF24bDWthZvrT3n16iP53nFH8hMvP/mIw+5WTgdx8ENMEliEQEmJdrViu92y2Wz49ne+Q9d1NZC5b3n/5v/53/2p8/02hvcLFdpLKX+NSkz9zjt96/Fz5/x8vq+eXBRjapSYqJ5mIufqTWpD0zQYY8k5LxGitQYwS5SYUmSaJNpt25amadDaUEom50TKgVwKbedoW8d3vvk1vvGND9lsOj744AnWaDQBRaY1mtZItGy0QmlVb66MRtB+CsTDBWL0xKT47NNbfv8Pbjj0gU9eHhmnxPVtz7H3NG1H261YdS1NY7FGcbs/MByO5BhIwbNZb3j27CnWaFzj+IJAxndr/HHHu/l+/PEL7Sm//ht/rrzx3E/9YKUUqohBKkqLUc4S7cYQCMETQpRknNYUY4ECygCakjNjGCAF+uPEuD8Qp5FxCJAjrckYXXBG46zFrS94+sE3abbPUKtLgukY8p7j5OlU4aKRCFlnjclADCQfyCGQh4kUpvq7ZNHmILfUTN58rnPWEKVqBk0te+oc3Py88TaG9z8BfkMp9WvIhfpvAf/tt/i8d+Pnj19ozhWgDagk6yTnUh+yoJSKgMI57i0sSSeDNhqHk/RLjtUQNzh5w5IeyjlRgMZZjNa89/wp3/32N2hby3bboFWBpKAkOmvojAHmdFJB63w/DS3+MpRCyhBj4fb6FX/0ox9xuxv54Y+vGaeET5ZUDKvNlu1FJqbEtl9jjaYfRvphIAdPnAYUEKIn5YZSknz/z9+z363xxx3v5vvxx4POeVHIrYUkgpWqVa5qoErJpJyIKRFjPJWDUCht6mdUw5sSwY+kBMcxcjiOpDARp4gqCVzBmrpPaYttV2yePKfZPEO3a5JuCFkx+YgxBWy1l6WgMpASJSRyiEQfSCGQQiSnvJxPLrUEN6fA52NV6l6EO++fX8TowlsY3lJKVEr9y8DfRJBx/5tSyt//k37eu/Hzxy8651or2rYBFCpEcinkus5VLUrMC18phTHinUoKWpNzJpdca7WSFrbWYozcIKog9VIURmueXF7StQ3Pnl2x6hqsAZUjqIKmSO11CvRxklpNiigF3abFOoO1Gu1MTR3VunNK5FTwk+d4ONAfBvrjkSlklN2gjUHrUwrdGIuxGmMt1jlGPzFNI845xnHAWkPXdbDUhh5uvt+Ntxvv5vvxx59kzue94fOG4pRqlszVqfRZKOScCCEsRjflJLiS+cUCPJpDTVLwJD8S/UicRkoKUo8lE2OmZDCmwdKS7ZrmUgxv0I6cEiUFbPZ0RnHlLCsDF1bRaAhdS768JIYGryJhsgzWEkohl0wuBX1mb9PZz+dYFVWj3xrNoL7kiJdSyr8H/Htv8xnvxi82fpE511qz2axQSuG1omRqtFtINX2SktR2m8bhXIcxpqaS9XJjWGtoGofWCuccxpgKoNLiDbYtXdPw/e99h6vLC662Ky42HZRASSOKjNVglKI/Tgx3R3JKeO/RRvHsvSd06w7dNVhjZEFruQFLSqSQGfqBu5tr7nYju7tbfCxsrhqaeszWGjG2TYO1Bte0xDAxHDN9f6xgqj1aFbabDbDmi6Sb363xxx3v5vvxxy8653M69acZX60kUhR7Wk6QqwIxRqZpxIeJmIIAq5bsU32UjCqJkhNp6vFjT+gPhOGAKlmc+VIIKREoaLfBlBWleUL33rdpNk8Yc8MYIiV6ujiwtZYPWsPGGTqtcErRbTdcGgh+oG9gGnr2n/2EkULMEpFL1G4ExlUKidno1qgXtcxJqUb3Sze8X8bYrlpWrSOnTIwSpaUa6uf6MFpha3TmrK4RjzyUUmgjqFXrjCBqqRNTikRxtc4gC6dOHCckrRglyYRWcF2N7Dghg2saBWYv6HwTPy+y39/cc4GU5Txi/Z657mq0xhi5aMcp8jBDnYxkdcxykTmQ1HJBKXkIyFi8UorMf4yxovgUFIVCo9BoZdBaok2joWtb1quO9arFWQWIVwoSNU9ThJQY+oHxOFWEYkIVTcmfl/WVOcxZ5ism8XBTSpSaK88pkpMnhpFp6tG6cDzssMYw9AfGccBPk3jYS01JAGQVpvhAc/xuvBt/WoZa9tI3wURwqvkqJQUjVffGUoFXKUZC8KQ4I5nLWcQrr0WVpdykKYL/ULlmzYp0QyyVKoVSFmM7jOvQrkXZhjJlcsyYEul0odOZVkOjodEKqxSds1BagsrkxkGyOGuwRlOyIp2d03x4f9zonvaQe/Pxx6fm3viVMrzWaP4Lv/4Nfus7X+NwPHJ9fYMPkbthwsfE6ANTSKw7y7OLjq6xfPB8w6qzdJ2l7SyusXRbQck+e39L2zmsUxijCCEwHAd8iLx8ecPQT2ilUUoL4tZAyZnjwRN8wo8QxoJWBmtatNK19QUxklpLHTLNRdP5TP5/7P1JqG1plueJ/b52732ae+/rzMzN3SM8PCoiqyKlSiqjQCARqBsIjQoSlDMhgaByooFAAxWaaVYjgUCjBA0k0EApJJBACJEICUogKlOZKmUqI5RNNN6Ym73uduec3X2dBuvb+5xnbu5mGfbMwhxyG5f37L7bnPPtb39rrf/6//9Lnf++HPBVT9NPicdB3sftYWKeI/cPR/p+Ytt5bvYblIJ/+Odvv/Z6FiBnjTGOptFrTzalSEyRUhKmyoKcLVgrmcY8TuScOfUD4zDR+Ibtbou1DrxGG4fRLdZ6utZxc7Whax0vnl+z3zbkMJDmA0tQDyHw6pPX9MeefJxJx0DjLVf7DbbRmKIwaFSVJkmGoMmlMMXMMCXGKTFOkTlmefjIxPGRFAem4YHj/SuMtdy+2qC1JkwjOQXmcWA6HaFkDgdxi3s2y/sr5cvh5n9z/Zvr31wXl7rs137xw3OGYZdEvxBSIKbC6XTg/v6W4+lAzJFMoqgsbSVdIAtDWREwBLxNGJeJrpD9Qm6SczfMmpyh6664uv6IzdUL7PYa3W5Jp7fM/cCu9OybxLVXPDWFVhe8lbP7qunAtIyj4w1Hepe52rf0p4ZhKKQU1uKklLPNo1aCyWuQ8z2Xs05JQVYr0P4rr+9U4FUorncdHz+75sEpmHrGoEklMQWFqjqvrbfcbOXQ/+jpht3Gs9k62s7hW0d31eEby4uP9rRbj7MKYxXzHDgdTkzTTGsDx6NBq4VhC60T+vjjg2GeIlMPU1/QyuJti1YaW/Wv1misNgI/xLQG3oVIsFyllsyq3pTDGGlPgWFK5AyD1Uy9IWnYOMO+tZLRvadL2g4L026pctW5ytUFrRRaS7VLgZwjKWbiPDNPkpzklMmqXGSZGq0dzjZsOgm8jbd4qwmpEMq5Yi85M44T/WmkDAHGiKmLpYpaM8ilTyId3nPFG1MhpYp85HJWHudIiVL5RiVyp5xmORRygpKI00xMkRgDoVa9Kec1A/+ruLz3WPt+H721ciiCaCyfu8wsFlbpcpD80gIsuu4LVOeXzo+vmKt80Q7+/Pdd6iW/zmWMrq/r8+9Hra3DL3vVC+fhL3OdA5B6548v+SbeeVX1rP6ldf+lTfor/k1d3t9v7rp8a18UfD+nLqpkpgVlExRtnudVslguoegFclaVXEkRYqYuGF2oYKD4B2RISoMqGOvwzQbnWpT1oA2lJEqasSRaCxuraKuiwhkJvNoqjFOoIoVADEYq3spxuaxyl2VfTqb1jL94HtYz7PML9QXXdyrwoqBrLdfXDTlZHrxsalMCOgdak7EOnu4sP/hwx27X8OMfveBq37LbbdhsW7JKxJpFjeHI8JgpOZBzIM6Z8RCZ58TtqyNDH8g5knOk9YqbvcFohCnXURl4BV0KulgUmlJE8FJSotReg66H+LIJLwupdWNl+ZxRml3XYE2kH2cMhUcNqmQMGWcKX4Dg/KWuUiCGTEHE58YorJUe7U5JQJQqvvZqs0DxKQrrUKB8SQRSSlBmDo8PGGN58uwF2+2O7abl+dMbGqchRcZhhhwQMEAgdRTs9lucdZhdwUwF7y37ikxsdh3tpgGrwCiUqb2SItCykDECOUZKSlhVRIbUOKxz5CIBWmmFVhljDPubG9qu5fH+kbdv36K05diPFDTTHEjLa/uWL+89f+tv/S3+6I/+6Auhui+/yjt/LF22Ye4Z55HTcODV7WeEMDOMJ2IK5CR8lNNp5NWnd8xT4PR4IkxB4PqcMEbTth5jNN22k16+s5IgqDPXPIRYEZ7l3F+PIjlIVwMVXRM+YchLOyVVVr286n/+T//J11lKADad59/5vY8JMRGzPK8hyWtoGi97O4n+UlFQJaEAu5i6KKlchMfgBTysD7Cu/Tr5Ojn15Z+U7Lki63EaRiHiGAtKvkfrijDV9+qstGVySnXNMyEGci6yH1Ne9fFay3OngBgCOaXVlEYt5B0FKSdJsiqadhpmfvLzO8b31qb64uvzjN7lWpL6ZXNeMpnnaWacA6fTiePxxBim5avW76i1AeRMLoFSZlKayGkGkvA+lBhhFK1wylHQXD19yovv/4Dd8+c4Iz/ExoALE1fOcvPkhutNx0ffe0GzmuhAzoGcZ7LOaGdR1mK8w3qPmQOY5VyEfKaLvfPnmVy1FF1843Ki934pBY03bDeWsTd4q4ihiAa0BJxSOAtXneH5047rq47vf3zN9fWW/dWO7XbDFCYeh0fmGLgbDoxhJMw9YR6JE4QjhFA43geGPhHjSEwjm1bTaYdzCtM6jNWEmDFzRmVQIaIw9bSpjk1cYP/l3MeAJX8rSx63fmjt6BqHVoqNt5AyViHBHcns/nIH8hdcBWKSPivkSkISrZypG1TXXkUMkWkcyUmCXU71gLKmZpiZmAvzHFFKcX19gzOatnFcX+1wBtJ0R5hGrJE+fGY5eArdpsVbj+sULkmPvmsd1hnarsG3jqyKwDT6vJlTTQRyTJQkvWeNBNnWGZy35FyrYbkRGA03V3uurq+hwMPjAbRhnGZQmjnGter9ti9rLX/0R3/E3/k7f+cLJQm//t6v0W7dZwuD9OF0z2F45O39K/7lT/6EYep5ONwyzQMpiCfB7ZsD/+JPfkZ/HLj97Jb+OJDjTAozzll2VxJwr59e021aXONp2qYGXjFAGMaJECIUVfkPSp4LqPyJ8z7TWlcnMcU0TSuvQgxdynsJvI13/O7vfMgwBeYQCakwhowxmt1ui7WGEiZymNElo0pEA40VlEuAqELjPbvNRjSZtRI3xghrXul6mKqV2ZoKxAzDNHN7fyDngnYepQ3aWLQRNCwvzH3vMFqT4kwMszg3TRMpJY79SIiJbbdht9mKuUwjScA8DsQQsMaIzE9rjDWgYE6BmONqxXR7P/Dpy8dvPPDCudpd9uvy98tnaklYUpZ20zRNjNPEOI6EHN4BIVaOjaHqZqUgyiWScqCQ0FrO2zWuGUtRhs3Vnpvnz2mvr7FaWlQmRUwMdJ3hyWbL9X7H0+cv8N6tvWWJCyfmNKGtQVuDcRbjLMoYilIUdX7Glve9cnzqm1wkU8s7+Srn93cs8CqurnZ89NELmsZAmRjGie0TzzjPGG3RyvLs6Zbf/q2nbDYN11cdm41DkZimgdPYc/d4xxRn3o63jHEk55mSZkiWgicjmbizAk0SM0RIc0QXjWlc3eAK1xhIhpK0QKzaVHJRtTOrfdMl01NqgXKFz7dUVblm4zlF0jwwR+mnGg1dY9luHG1rRVLzngJvziLDKZXolJIm51QlQ/rMgKeQUwF0lSC1sjFNIAQJtLkklNJSZRrL1b7j2ZMd+41Hl1Bje4Jc0FbjrCIjh0TOBaMMORZcUrio0ZraAtCgK31fOj6orFBZkVImhsg8h1oxCQjtfQP1T+c9C/gMCrRGG4N1Yh1nXUO73cvDoC0ZSajmEIXc9h1q8l4+sL/08K46BrVCeOtrXyA9dSmrkoRpseYsWklArEHROot3lqwLWQsRUQKNFhi/SFUrx4s+Q2iV1FKqHlxeUqotinMyc3kwL3+//Hh/i7Yui1SEOaFKhKJRpBUmNNaQYybGVHt1CqOkEnXGUJAAoZXCGCu8B+fWloA8A4UUUlUDFGIWm1SpQnknCMlanE1pxpxqAistkAJVIWBpEmgTV3RByIgTUN6peH1VExgnrymrArUaS9VS8du4lgpV3m9Z21nlvDFZMNhcSZrDODD0IyHMK8Ss1Bq+zt2ALNFXKYfWBWcbTM4k25BtUzef/B7vNyjjaTd7/HaP9W21rc2YUvAKNs6x6xxd26KsoSjNaTgxzzOHx7c83r/meHzkZ5/8jP505PbunlPfM87ziorl5e3UM11dPmNaSyuOd/f+l8GW36nAq5Xigw+f8Xu//yOOh2s+fN4wh4n7/oGQAo1vcbZht2148XSLtZq2M2gNx9PI4XDk4fjIp69+wRBHXoY39HnEWSFO2dzSlRtQFms9ymnSNBBCJqtMOCXwGrcTuMc3DmUMaVbMs6FkhTYOrQ0GQUZzzuQYzsy8CrUpI1CTqnqwkOXhmKaJYQzkGlychv3WoXRH1zhab94b1Jxz5nQaWBjGSlPhZsNut8EaIxs1rfU4WmvaViryaZpqPyYyTiNGGzabHW3b8MEH1/zW959ijUJn0eWSIipnrLY03gn8YmpnZAegsFFhk6oPUJSvsYWIwG6JDFmji8CT0zAx9gPzNJNiBjSbTYdSmqbtcM6irRPil9KweKg2G1AO3+7YP8nSo06ZhGGKiX6cmeM3Xxl8G1epkVfVpMM5S0gWbS5Y/kjAdd7jQ6ZtvJheZwPZoY3CejlMqPtaEkkt66rE9KRkJflVLkhMWRiorJWsMZpS8gq5KaXWavdXSVC+1lU5ClLBZigTqhgoM6rUdTCGOUemIO2hWSu0UuyUwRpLKpoQxaq09Q3OGtqmoXFOAm7JpJSZwkDKiRATU0jElCsn48xTkEo31bZNFI5DFA9gZyXhFxe4Vr7PWEJM6Apnp5SYpsr8F8s5nLWrht55J/c7Kkw2zFES5JK/vTTyDChf5mRrJl+DbiHlzBwCh8OBw/HEOI2Vg1BWLsviwV6Krhp+izENoLF+K3uvCag5CTE0BJQydJsbXLthe/Oc7ZPnKNPKeqeMLxmjFVdtw9OrHW3XYpwnA7ePjzweHvn0k7/gk5/8K46nRz75xU8Yx4Hj8YFpGokxE3JauVPLu1ZIIuudwzmLNaJESfUMVYrfPDmRwPvCEk4L5JlzJT+JlrRrGtrGy4GrIYVEIjMOQt7ph5FxnhnjzFwiscRKOFMolSvjrKzN8sWmUC9kk7JUscjmNqYSo5aq6t3XK/DmuRpQFTM5VyxLM/783eesX6oLYxTeankgtXpvgVde2kWPqpIWFhJGVuJktTizSH9uyeDPWl1jTM3+JWBvNhs2XYP3Bl0ypSRUyWhZaHn9NStdiQn179rIepciHq4LoaIUJGu8OMhBoOaYUpVgKVAGY71AgBVqQhnQFqU11sm/uabD+4YmFbooB+A4TiilSLkwx0hM6cs4N78x11LZyj0zGK3rva/7dql+69dZZ6XiUhajiuQsVkuf/HMHh/Qqk0jhUqrPJSwDLJaq51zR1mejVsGXUr73XvG+swYFo1QNbHo9FFV9nUprKvngHCRFpCL7EQkeyzrVcLoShFDLOaLRumBMIRfQxggjv8LR69srFzLIS/keZySCir5JvqPk53BerxWCZamkJKBdBr71Hn1Lm/lc8Z5/X1lK1vM/UmrRkXMWgmOKUpW/o9nlnXeyolc14dPaoozDWIexAuUX5Nl33R7fbdFuQ1IOVQwqZUoSaaY2piYs0iLsT0diity+fc3d/R33d285ng7VYMeiaNEGUtyKs1ZIkjjMkZwLof6/qf325U/ZLwu5uXIevuQM/24F3lK4uz/ws09ecri/49Wnr8g5gskoo+j8jvaqEWgoFcKceLy/Z5pmbh8OPBxPDHHkfj4yq5lpMxNdQntFdBoVMyFllErkHClZY0qmUQUPmCxWYjlKX7Foj7GOEpHgoFR9KCDWLBiQHlc9vBSs03iAVZ9qtEZZiLFmyKVAjpALrdNY5fHe0DXmy+7ZV7601my6Ld4LUy/EmWkeKtklk1Mhx0qqqgeqNYa26dCmHuDG4BvHld+x6Tp+7/d+zPXVFU/2HdtWk0NkHnoUhc7Z1W4yxVSNl+vBVbNbpaSxphbMkkJW9c96dKj62jWFECLDMDPOmSmJMcbm6jnGWuYQmXPG4sjF423D9voJznv21ze0Xcd2mtmPA+M48urlK1IMjHPk/uHAMEzfKaj561xaa6yxeOfo2g7IOOsJZiYVRVrIZ0ZjnWW/37FtW3ablt2mo5AJSTTOwzQQKxog/fPEPEl1dzr2jFPAGCsoA6wB5pJcpS6qN+Ade8D3xmFg+fVnrkXTWLrNTgaBbLYYYwhBEvlMwccOipi5KBQYQ0Kj0VDkQ/4zq63guZ0k5DjtCjYWXDXQV2YSa9Ncq6PaVpK/CvS5JORGSxK7JEjSMlHY7CgLIlB9yqnVlVasrnKlFKmGL7orueS1WPllFvT7vyrN7J3eJ+qCn1zR5pgLU4qMcWYMI2McSASUzaiq44czsznXYQqi1xUk0/pWeto5Y7QjF0XIGm0brj/6Mc3uBr3/Ice8QxXQMaBLZucaGmvYbjrapuN0euRP//SPORwe+P/+8T/h5auXKJVRKrPbbfi3f//3aduGrmtxzjGHIISwceTV6zeM48gnv/gF9w8PdK2lawyNN2vBJPC43F+rV8f5X3l9pwIvBaZp5ngcOBwHHh97ck6i9bS6Qo3VQSRlUogM/cTQj5yOA8dDz1RmphJIJon8RReKNGTJWvoiSw+rFFCliCZL1VzrInMHVgahPIVLJl/7GZVQKPFF12D7xRtfSxNKsiNkcy5TNaxWKCs0d6vVezuYlFJYI5WN9xZUIaa5VjBUedDCZhZZlFoz9nNW7qyl61o2246b6yuePLmmcxprROpD9T7W2on4/LJHuCa1lXyhcq00WDPfcvHwycIs26GIrCkmYpKKtyiD8Q3WekKepF+mauWrLda1OO9xTYdrOrIyZKUpRaGNISVhbM9BZEa/KdeXHadrf1cLdGqNrRptLW2zFYCR+2qdAaXoupb9biuSvXkipiiuQlCdzgSiFzZzIiymKqhqag+Lj+3nq9my/ttCvLpg7L+v4Ps5go7RGucMxmhc7VmLFI1aqRooVP1+NbhfKvSlxlWLQQLn94U8r1orTFEUCyVJdS/wfN3FuWJc5SIs1R7ownZeWd/1RWul5Xy6SCDWV6Qu2My1x5iqN7qqrYTl+V1e5zd5XXRkzwjeWuWekY7l3se0sM0jKUUKGaUX/PaiSr8gDdZRBhSVBXmjoK3H+CxFDh7tWtz2CW57A27DnA06Z0xMWDLWGDHKMOfpa/e3b7i/v+PVZ7/g5ctP2WxaNrsOrbc8e/qE7XbL1dUVTdswTzPTNNL3A6VA3/fc3d9xOh2Fh6MXcp6q3Da1SlNXeeSvub5TgTeXwuu3D/yrP/+UN6/e8hd/+lNySbSdsF8fjhPfezzResNVZyk5c7g/EuaZ4zwxhUy0BdNalIXNpiF7y6JZVSipeKMipwhR4GfrlEiIinhzrmi00ihj0RaUTkgSr9cHtJQLZhvyYMI5oJb1cV6IBIjrljXoLIG7ZI0zAsl4Z2ga+94qXjlgnQw2sA6RQSzMw9pr01UbGyI5VTcq5dHK0TYW1XVsty3Pnl+z2XQ8e7pnv+8oYSCEEXLCefGzsk56yKurixZpEGtCUldOLdVuhR5JlZghmWIIkb4PDMPM2ze3vHn1lv54hJKkktle4ZuOzV5IMdQ5ncYYStGEUHh4OHI4DpxOBx4e7hmHnlevPiXFgE4fQNwzDadvDPb8Zq567KmyJkcgpFZvHJpC9Bv2zRUmG7zeMhKRZkwkK8gmkkwgp4ESI2EszCoTUuI49MQYOZwGphAwbsa4kZQy4xgq7BYEDfKLdl2g1/XwRSodlWqVV2qwkO3O4n32vq5SCikEOfiszEZd9sA49kJUmgXmzJUMpZWirbaoZ8s0eQOibReIWpsKvRdQqT7bWc4IKsu+pITKUZ75IsB1LglVEoaCtgqlDI3rhJlcrVMXCVwuohSIsfYvo0gc53kECnbTYLxFecgmSd80iRtcnITNP4wzx37keJwrSfKbvZRWqEu9cA0yy11dHAaHaeLu/p5+GDicjvTDiRBnLgO3HA15Td6ohU+WJhwjDqU0uW3IjcI2GzZXL1C2ge4Js20J2aAPj3gF++qyt2kdO2/QOjOMJx4f7/jkk5/x8HBHmEe80/zgB9/jd3/vd/nggw/4w7/5N9ntdishcZwmxmFgmiY+/vhjTn2P0orGe0kwSq5SriWpkjbBkvz+BkLNR37+izd8+slr/n//4lNyyWx2Mu4tlcQcZzaN4WbboEph6idSTASVCCpTDGivUd5iWk/xaSU6lCLZb0lZirQEjoyxamUpq7JsCDlQlDaiHzO16C0LEUCyMlXWfSf9ySIVHEW2zjnoyKWr3WVWBSyULL3eQsE5Q+PMe1xRVasfhzEOdxF4c6nBThdIhVIMKgjEo7AoHN4rnFNcX+344MVzYZFfb9lsGvrHE2EY0Ag7WVcWuDG1j0h9QCvEnCvrUi1lMIVlSlDJNUmp1W6KmeNhoO9HHu4fub+9Z+wHQAg7vtvRdju822CMI4VZDNSLHPAxZIa5J5fC/f1b3r7+jHEcuHv7kpwju9bQ2CKH229S3F2vmsysFZLCaYtRis61bPwOksKpFlNGKDOFXANvIutIzhMlBtIEoRTmEOmPR0JMnKpTHDpUGDUzTeEd1jJFGApKa6iSobWXXEk1l6zXfFGNnY1av/4lqEjE+QZjLCDoRoqJw7EXGUsIxBTRSp49awzFuxWhKssbqm9h4TXoRevL+TGWyTbSJiopVvJTDbz1+Vc18AqnQX7WftfhnZe9npIkM5MQgcIUJPimQIwTuSRimkQ2YzzGa7BF7l0uJBKpFMY5EmKm7yeOx5l+CN+4gQZ1LX7JNvICIVgq2TnMPD4+0g8D/XBimAYWEH09aBeEMeezdWwNvKCIygEGXIuyLW7/hM33foyynjFoQoYyTZTTiWw0+1bIr23Tse08OvWM08DxeODVq894fLgnxglnNR988Jy//gf/Dt/73vf4wz/8Q7bbLdM4EkJgHEeGXvbPhx99SN/3vH7zmnEcJJE4HOp5RuXQyF45S89+/fWdCrwgPskxykSbphXizH7f4VtLu2lWfeBKCFoeGFNkozoFjRHKsXYUpUlFYOqSLDFqiIocJfiaWnmtmYsSXaJG+i/Si5QgUWqhpoquvdwld6+WYgupKiMbqUIQVPmR0qJDxte5lOpMPiiI9vWrNOa/6lXRbZkIEs9EF5S8Q5D3U4zYYposkhPtGozz7HYd+13Lft+y2+xoGwdZZFdk0MqsB38pkKL0itfAuyYkkFOs8HyuTO9ETqHC0nL4WStTiuY+cP/6geOx53h7YHjsSdOMKQVdMjlKP1p5scMs1eyDAiWn9UHORZIsmVISq3FBJIaJeRpIcf4N6vFebooFv1/BPlljaqWg5EPXPqyro9i8MvhKaospk0Oi5JkQEAh5mITINlX5lsoUJX7pIaYFTZTkKCdiiuu8VSHvnRuP0j9j7fWuycLyxJT3s8kVYI3CGpGmxSSJV4yJVHkMWkn/u279NWkpa8Uia7jQq5ZqRinhRuRCraRBxSTSHQXKaHRKZKVEbz7LftYasUE1hsY3aG3W4R8xBsI0kVJhmmXyVq5scIVA2apolHaVX7RIWKRVIk5NkixrA7pkjM1YmzAmvz8I/yst/sXvUhViR/r6c4xVwTEwTWN1nKuJ2Lp/P/ejlkC8kN6UIhtBwexmj+v2qHbHpC2lKPowEUJGjT1q6HGNQ/kNBkfjFG1jcMVjc6bbtLRtwzg6xlnOwnEYub+/p21b3r6VPu5SeY/DwDAMjNPImzdvOB6PPD480J9OxJRWhvmZNHh+D79xOt5SICa5YUpl9lceYzXPPrii7Rqub7Y0bYNRklFTKg9WF7QFHOhOYTYWTBHnETIxZVLIxNmQZkueIYaZFBJGp7XvqKoMSJiekmmVXKpVZRV85wJZWHXGWBbpAJQV58/IAS99X3GzMVpTKgzROifONUGgoxiFjEEFWN5XGaYUGAsxzeRQUNpgrKuwnENpRcxSBVuXyDZitca1W5z3vHjxAR998IyuNVztLEaDijMhzZQIRvlzYpJhikLuWJPZGvhKKaR5FuJItVHKORHDVMk4smM3G9hsPP3twM//5Sc8Ph5587NXPNw9Ms8ZR8HkTJwD2kbYGKxrICWy0qICjmkd6ZVLIcdAjvP5IwWm4UTvFfM0vre1/jYv9bktIiJ/UWsnIClFVgZrPd63ciikwqQsmyJDwMc5kcaZkmfG3BNiYh5nQsrMc2JOmZBhXk6UyuzXtb8l/by6p8qCDknwXdZe9OL2IqG97BC+n0sr8F7jvMFYQxoi0xSqNaH0pZ2zeO8rMztWj13RnKu1f1oT8KIoJZGzJC7eO2Q3izxwDAK5O+9ICDtemWkdKJJTncSlJeheXd2gla7/lpiniePjgZQyc5ADO5WlfbXMwtYoKyRFY4wkPFpjva3wtEaZgssJdCEmIWi58NUO/fdxlc/9KfdCkUE88YeB0/HI4+Mjc332ldIrrHyZ8KrP/SmVrkwEStZTtKW5+YDuyXOythxNQ0yZx75nGgf86RHXP+K2HXrzAkvLplFcbT1eGxrdcDru2e23zGHg8aQIKfLw+MAnn3xCmAMvXnzAbrdju9nUEaIjfd9zOBz4sz/7Mx4fH/nFL37B27dvadqWzXaLc45UMiolaXktZchvWuBVtUK0WiQBbSMPU9c62tbhnWSOpk6tKAj5JxdVZRBqrXwohRwSWUEJCqJBZ4NBxtvhRKjeKIVXNWt2YvhgrUdrR9EGjCEbhXOWTIVis6kHyXIClgr7LQYalx/nN7fKCEw1ySiqQnBZNJVFiQ3le+yBXWb5Z/LXcthorNUUpdFRQSnCuvQG7zTeCWvPlEwaJ7LKJGYUiZIDJYcadIVIImSTIhAklxCS9OFKFunROfCKpnKB9cg1aGfppalc7fyUQmxLFmLcIoFacqEq21iqlaXaXe/A0jyQv4sNZWXZ/obE3XVH1Pe8ktZWuO5MVVkSSG0MRht0UZAyOhZMkg+7AD1ZqidSlj8r2Y5KHCq58A58UV9NqRDhQj5aX8TyMi9hyOW7lFr/7f0tjKp6ZXmJhbJKnhbW1JI3FHU2H1k+tJb2iNECQdtlvrNSK4Kl6sNTiiTWRRWyEkvTlYS1QI1alA+i99QrwS3WykhejyT51uqKykjPV9fERWmkqq2w9/Lzl716GfQWm8aVgPX+VvbXLzu/4tFRVDvMuCYjou9evqt+Wd0yy5O5vvL6Nhf9uHINyliZOFQNcGJK9WdPpDCR4oSJM2SHNeAqAmKNorGW1hm22w1Pn9wAicPxnhjnOhDnyINvePnyM47HLZtug/deoOZh4Hg88ubNGw6HI8MgbH9fzraoC1S+vvz1z19/J75TgRdg2zie7Fs2NrHRO6wzPHuxp+laNq2l9VZIDKWOrtOVTm8LycqBm4ZMKol+Gokp4ovDY7G0dP4G3RjczqPR2Nxjc4/R0Dg5tOgasFY+nCM76KxA02VUkCCMs0zxKXmteNWiUVUJdCUvXT4lCqw2NPUgK43AWFPMhJirndzw/oKBAmOy6NmQSR4pSTTTCrRRYuXYNKQ4M48DTmue7jyts1y7RJd6pscjn92+ouSEVRmtivTFjRjdz2EGzkxRo899juVQznUE4nJAS1Uhdnpd06CtRaWEzZnWaJ7tdzRK019f4YtCPU4M04ApYumXYiSkgEmRkCNzZU2OdexfUmdd5qIB1FYE9P00k0qmH8fflLgr12U+JygpWZ9h5oSgPL5ryQo2bUsJgfR4ZLg/oR4GusOMGSJ2ghjNaixSksJlRUkKHYG41HmSIIKwoLUSprDAzUgilBJKZYyyaGXWoLscPgvjepnh+j4Dr9KKpnH1d4mZzTScZLRnXsbKgVGyVpJcKlEQWEPrLd4KCrVvW7EybTxOVzP92sRTdVqWMJgVqUqvxGJVzEEWi8yFfGmqPzPAPM/Mkwzw2Gw2ItWzjlLg8dQzz1FMTKyu1a2pa1y1r8Uwz7VfnhQpFcIsVX1OWWDnlcj4zV6SiJQ1cK7JbU0OxmnieDhy6gemcSbWwKtQ6LIYMhd0UWJKVBSsnwelDbgGjKPZP0W7FnzLmCDEWc71EJgOb4ljj516mHs8jqe7hutdx1Vn2XrN9b7lZtex7QzW/Jd5eLjnH/zD/yc//+SnTNPEn//5v+LnP/sL/vRP/znGWJyVHnGojnnzPHP38CAtgnqfO4r4gFc5XSm1FaCXUan6NyvwLv2axllUtpTKZt52nmapeLVeGYSS/deMXhdQAi3nIMYLc1/Eb1kjjkbG4n2DMY5NuxWqefLo5DG64Kv5UbKOYgTuwVqKkapLTJUKJRRIiTDVEVYlsZKDgMUbuR5LrKVZ3aiSnWpQ8pCm2p2jJHK8ZFq+hzWtWbgMsqqQdu1DyDhAQ9tYos7obHBGsWk0rTN4nTE5kKee090dJUVcDdrOG6wXmv40i72dUPfPdmryLmrlmZf/Wz5XoFQbvbo+MphETBAa50je03nP6DzeyEQjXaj921w1pllchUomFdFWC5FLekRSDZhKkpMsOqYM1YaS91l9fYPXF7/Ki/J3+RolvUBjxETDKImQZQowBcycsXOmSigFaMkLysB5MEwua7VXykVdUpnnS7UlZKMMtQf6K0fFLdVj/XhfwVchBKal8ihFhhAsU6yWSlQptRIhFawImdG6ukmJQY/V1XhDL9KQy9+lLhIKdfE7yzvJxrJGWp3hx8UDXVf43RiLr9aswywTs0xNBhattVKKOUJK4o2dKwqxjJMWs6G0bmH1K3bJN3OpyyL8IgDLa5pDINaBGpduZUt1uwwhKgVUkeC7/iytZeCEcVjfon1HVIaYIMREmCdSmMnzSAkjpAmVA4ZE4w2tN5JY1Viy6RqU2vP973/Mfr/hz/78KcfjPW/evqHvD5RcuL29ld+tBOqPUVCxmBLDMJBLodt04hoG54qX9Y2/s7+/7PpOBV4QobizBpUtNBZjDU4VbEmoDEVdwLoonHZyCKg6dmqaue8nQkyMQyKlgt9qzMZilEGXgtOGp88+YLPd01hNW9m4bSuZfCCRVEFZJ1MrlMWoBjIM9wfCOHH38jPefPJz5mliGkexVRxjZegmZCKQFvYwchgqrdlYD76pkLOryYKMvzI6U6wEsfdyFaRXVWcOe9+w2TQobXBO5BTOKFTK2JLROtM6x/MnO7ZtQzyduH1zx+H2jjeffQalsO86nLVEbzDOCNqQs2TpxsrGNRZdfWfPbjs191iIP1phrfzZNg5nDMY1RGVI2lCspVgLxqCtHEiNtRSlmKeJqCxtCpgibF2sWBqqSlCT3SEPsHEemxLGNYgnQqhQVfrNqngvrnOPTP40KByamArlNJFOPY+/eM3D27fcf/oZ95++JJx60qGnhEAOkr1PQVCCKSYex7maHhRigag0uZoBLGtaamDOVAKSrmYHlHeC0HLYLgSUd177e0x2CkIbiDGRU4YsCEop1VGqQtHaqDpiUhzWVE6UVDDa0VhL4xytF+2zVeLjTMmkGCQbV+UC2n23ile1GFiTkQUiVkvivQR6cc2SfmequlaR4Dm3GNbUPnn1EQ9zIsaESCLFL10ZK3yYGAlBJhuFmJgrZ+KbviSJOe/AJZGSOd+J0+nEw8MDp34QEt/SdlBLAifnkV5cs8uFNBNwxmE3W4q2zEn85vs5MoZYLSN7SoqoMGLTTGc0N9uWq03Dxhk6p2mM2PEuHzjLk6sdndf8u3/9r/Hxhzd89tlnfPLJzzn1Ay9fvWGeZg6nkXkKdT1FDRNSYmnPmdq+Uao21GrytTwlsinL51ozv3x9twJv3bDWanQxIgkyGqsKhoLK6d1qUImhuaKyCksijoHDbZ1UMgmrca802gsMpoqI7K9vnnHz9AO2m61MBHGarhOTiTENxBzRzqOdx1pP2+yhFO5fvWQ8HlElM9y/hZIpKRLnmWEcCXHF6MgogVlQ2GVUmAcXG1Q1rVfKgAdlQasg01Teo9m5IL1S6TnXst1e1we8BsYyQgoiPleF1mqeXG3Zb1peHm55uHvN4+0dd29eo1DY66eUpiFGCbylQnHaQGkMSju0dtKX0brCkgufEYo2QjKzhraVg651ZmV0J4QYVIx8qJqwWGPwxhAVTGFGKUtICVeyYAtGo8piCcjaEdZGID3jYh0hKCSiGGOd3PQbdi1Vhno3+GrAojGxwDCTjgPHV7fcv3zF/S9ecv/pZ5QQyaeh+otLoJpjpJ9nphA5jZNMoFKWpDRZf75Hfu7tLmiD1lDsu+YZl4H3yycufc2rJpcxSIAqpdB40X5ad4agC9XAY1ERF+lpm4r6eGfwTiRZFhn1SU7LVFjQnJ3rqIHXGHRl1Au3pKobUGugWdGci0p/WZ+cYk1CZQrYQvQCQewWm8IYzwMQxIdcJIcpSq8zxEgIsQ4g+HZTyeXOlvX1CLHqcDwyjDOx+h0bc0mhWtTcl/8t/1Zw2tK1HVlZxlCYQ6A/DRyHAUpE5RmVIzZM6BTpWsu+adi1ns4ZGqtx5hx0rSpoZ9C7LdvW8W//Wz+m//4Lfv7smqttw+3dHVPfc6Dw+HhgHHvmkJjmJTGvmu6KVqxQsjrzHHjnHXw54v/dCrxwNiTPuTq0ZGIQEo+47VjJk5aHeYFOC6iiKQnCJCxmksagscrgtJXKrxTI4rGaCmjb4Ls9xiqUlYpaq4zJBus7XLNBG4d1OzFpcJ0c4r6VDxfrGLC0TjJJJRNLJiVhj+Yic3g1Ch0jba0uratN+JIpUdi92ljUe4KajTFcXz+hbXd43+J9R9ftyaUwjsI09M5iLWiVsCrQNZ52/wTftuyfBdAN7fYZzl2jUNzsr/HOC7vVGhbmitKaphN7N1NHbCmlUFUOUCXQiExAiz+1E4as0wKH6wImK6Ia6W40+IHm7YwPDjs9oF3CugbVtKimkYe5UqhXmLUGXvEsl+rDWkNKMiQgZ8dc+3XfZfOML3pti0nFOfBSpSiZeRyYp5HD/QNvf/YLTocDp9e3jHePxH6kRJEPhUp26eeJkBJ9mOljYE6JUOocqyXI1KlGZ1IUtSVQyUMVSTkTv96temsJeP74Ne/tL3tppWisp8Q67EMJAqJUnbm7HpJGhjpU7fhiZCHJxOLelmsFuxgi1DcMdfoPRDk+1sk1q5yRM/yrVAalqzRSCHwplWq2LwlhLjBHUbWmsrC9dIXsz1LJSyIYCCLovRQbXdegFGIhmZK0Fb7p6/J3XDDVSs6imZ5n6WeHIOd3rfrXebbLWqkF/bpszC39YyXvpWTSMBLmSB5HGEe0KlgtvfvGGqwFQyaGiXHoebi/o4SJdLPDqE4Mi4wW+Vup/gpFEiurtJCvrMwAiCHi7eL4VhCNwHJusSJ4Zyb80rc4IyBnB8Nff323Am+BWBJTiuQYiPOMVjDWw7NpWxycD1hViR9K1T6BpgSYDgJ5eCc9G68bWtvUBq4wbVOCmBWm3bN58hHioiQWhMZoNJF2c0O3vUEph6IlpYRvR1LW+M01zfaKmMDalpwki8suMkyBaZyYQuFwmKmyZCiK2DnMnKoHctXZ2kzWia5TbHfdewu8znt+8IPf5unTD9jvbjDGYW3HOE38/OefMo4j1092Moe1ssi9t1w/uRLo7fojPoyBOEWm04hC0foNdumZLkG3ZoPK6vVzS4VQ0aR1tFbFmwFJcESOFKnTplEJ9BRIzYGhH7k9tQT7hkP6DH3MmKZjc32F9i3WGemxcz4ABAHJ5DoNRysldpl42q5FaxjHU5VlvJdl/lYvQQ3OffOYBDZ+eP2Gh9dvuHv1hr/4kz+hPx55/bOf0R8OhHGEKZDmIPN0Y+RuODGEmT4l+phIBWYURWmKqQFgCQS1moMFzJE+pl4OoaVmKYt+Wg7QZVTapZWhsNzfH9KgteGq3aBjQcVC1ImiI0or/DLqsLKUY9RMlXOxHB+qiKY/GbEShSJ+zEZYy9KzLCIHKjDV8ZRrkOTc212csUpRqCyDJBSRUiBEkf3IumliTsyL7tfVGb71WaJ6NmcAXVnbtafqnGW76TBGktfQzRwOR3KYmcxXO/Tf97VU8v3QM4xj/XMgZkgVETsb7lYFBIWs5enNSkLcZR3slLTAwv0bhuOJEGaIM8YZ8VM2in1n8cbDcGDujzzmwM9V4nq348cfPcU9vaaxMrc7xMiUI6SIyRmbC60y7F1DajY82e6xaO6bA4ObSWkGFeurLhJmtFpbBquCRogEYpRi7VoZf9n13Qq81KyxnDMMWGAZEWbrlOuUEfn69b0X0ZKWVKqVW0E7LkgS59RxIYmo6jKiTe1VBhm8HOJEzgHjJnyMNZsRWUxRtYdTCRDGWax3wvh1CnBkpZhTImVRVhbq7NJycdjXQwsNMSdiDrj0fu0cpOK94eb6ht3uGiFYWUDJfEql2O227PdbIVm1piY48v/WNpATyWecC4CisWIIsEy+UUpBtYlctJ6lckpK/ZDbU99bffYEtKsHW5xEY1kPnFg02AblwG/3tGNk04/sTydwDXrTgvEkSpUUBGKYKamQgvTZVxlDXoz5We/1UuV8l65Lksr6uQXCUpdfs0imxJFt6nviPNM/PHK6u6d/eGA8HJmOJ9I4k+YoEGAqhJSZYmJOiSklppwJWdCZvATdilKoOqM2L325+qAtpLw1kbp8cUuFqzhXuJet6Muvf58rVx3RVr+gWsEuM5ytkfdVqukLSrzZZbtqapZYC3N1LtLrj08IrC76ZPmTpc95Sa6Cdw7dpQIWODyv04ZE3ZWJy6FQzDvrdzlwYvF8Pk+fUjVpkEoOa9eRgdbEc5/xm7q+4LFZoPMQQh0jWp/lSspb9u35e8sZAVPLW5c2wLLXJAAXSJESZ3QOmJLEU08XnBbZqdWAVpQ6jWrxTjZKJr0t/38uTsuaPGpAF1VZ70pQySWhfGcZzzD5u2z95XO1EXPxb1+2z79TgXfJhyLyQGjrUBV2FieomVjhUaNbrNZiVK0V0yCVWZxHcpyhCOW7aTTO1c2qxRHGGS8j45q2DmKPxPnE4fgpIYzcP3zGNJ/Y7z5gv/sA57Zsti9QShHmA6mcMD7TXrdgEk/7a1LYsNt4vLfc3j3w+vUth9NEPz2ixkRQwiDVjcbvZVRVu5G0+/H+kcf+QNKeZt9Wt6uvf203W/7wD/+Q3e6art1wPA7c3z0Kmcn9gJwzH3z0jJunV8LwtJVAUKvZZYCCmOWv6sSl27dij3JIlXOQXR6oUufrIgfNCj8ugVcVSk5MtU/1+HDk4f6AUgZnNuA7nv3o3+Lm+4kPTo/8zuGeMQTuDifGOfDTT19y9/bANM0Mw1hhULHxLDI0FuPAOQUl4pwkHgJ//9Vdyz6XA+D8ucuzaSXlUB1/FOshnOaR4fjIPI68+snPGB4PvPnpL7j75CXjqefw+i1hDqTTjAqFYcw8ToF+nnkzzIQUOYVEyGI9mBQVuRA3nnazwThPvIBKF98hV4d/L62ghQEtomu58bqU9WA7exgv71UGXXzhCf6XWcuUGPpD1YRLopWSjKF7eDiQU8ZZJ++racSPt54bSikh8mjhWqQslpNTVMRcw0MRTeow1f5pnShEknsRo+jRUy6CtlTjh1KTjGV4wRQCYRZLxyySfXkylPi3LzIbreT3liyM+5hmck60TUPbtkK+YqnMpTctD2iiFIPR9+9lXb/sWooXMQCKjNPE3d0dh+ORvu/XwCuDTdZvqqCzfKSaxMQaiI1WYl6kDa4WXTqM6Kln4xS2kf72thPmt9MZo6DdtnTbhn3X8fGzp1xtN1ztN3SNr7FCoGtTf7OvsL4tBWJAxVR9A5Z9WvFjLgpAVataY8+QMpVYp430+7+CjGi5vlOBF86J34KZUw9RMQeXZret7MAF5pEpe3XodIqQk/R4jHgNS2O/QmVLVqNNNUEvFBI5B0LomaYTp9Mdw3iAYlHF0zQRa7dobUh5ppSIMgXbGFy0tBtPjpqr6y1d64kp0p96gaGNxphMrMmt0grtNNoqtJN3HMvMFAZCglw8vKfAa63lgxcv6Drp8YLmdDxVdp5skidPrnny5LrCKPJ9aTVPYD1A1rkpa58vQ7W6XOw7lxlDl4F3HRWncv2ZBZZJUHXUYiowJziNM/fHE9Z6dtsNxljafYdWmmbXsb3ecep7Jj6jnHpSmhmGA+Mw0w8jFIVB+l/ilVtwSBUPtYe+9vz+CjA53llCzrty+fznqsSLrztXQ4kcA2HsmU49x9u3HO/ueXj1ivtXL4ljYD71MsggJkoSJu8YEkPMHGvAHXMmLomTqkYSdayj9Q7nHSrEi4puOWylr1qoyNTlm1jbnerd97HsmaWaUeeD7WuvZSnEML8z61fc5GRGd4wJZ2XcpVYWqhWLrr04rSRYgiZneQBiOjOUS5FRoqHKdswy47i+t3XWbhbnOmVEL7/0NBciWk6SqKzj+wQ2k4SnImmQSat7Xa73uxKrFJjFNaxC+EbLe3HW4p3H2fDNEtnWRZc/BDnK69SqcZwYqsJjIeEV1LqJS03SxejmbHCzogsX+2NJ1HROqBJxWLxVeKPwVQ2hEU8Bby0bZ9l2Ldvths2mE2mYNet880UEp+sOMKoOv1imVZR3d+Tnn02W9splcK0I0LqvqwKgrNDer76+U4FXARtVuNGZojNFC0yoq6Yt5aqHq+JmawzOGMBUiciI0YknVzKl5Mm+pWkbWm9BidFGjBNl1hwPb1C2oPVMyT2lTJQ4oHIgxUKY4e7uwMN9Yre9wuiGxluMGnE6MZmENxk87PYOsmG729H6jv1eM/aOwoHGnYhhFEp6KQxx4s3xXgJ/fwSlmNKMbRyusVhv31vFq5TCWLG1jDGQYlglDN67Ve8JnEcDpszxeCLERKp+t03Tst3tzxuuANQKg4U6UZZ5RyzdGioEc97GFXRaHlylyTlxPIhDzM9+9gl//ud/wdX+mh//2NM0LaGMKAqf/Pxn/Oynf8bxdOIXLz9jGEde395z7AdiKsQoB1HnNxhthDijQFuNcR6VszjOGIdvOlwzEWekl/MtX+pzH7ImlS/7uedVAosQqIbjgWk4cnp84PbTnzGeel7+RPq4h9tH+lGMGPowEUPkthdk4GEYuR9HphSZs0iFqowX3zh849jv9nz/h7+Fs55pFonK/d0Db4e7OjJSQklY8Y7K7jUaa7wEELUcQGcTleVzpuq7VX1P7+sqFOYcK/s3UXA0zRbrMgVHiolxnBinQEo98yzQc9u2WGNoGidV0SLpUeKbI1s9URDOh3MWawybdoO1TjytY4KimXQgITIl6eouBIKC0hJInZBTCBQxjtEy6lRpccxTxpzJWLVrgwbrPUoVnJOjOucsSSZUP3nN0I8cTwN9NfT5Ji8FwqWp/01T4P5w4NSfuH88cDydmIOMF7W1yClAZNk/Qp5dSGnr+ZEzqUR0LjI5rvI1dtc3aOvIREoJBJV5HI9ordg0Fq8NvunYb7Zcbbdc39yw33Q03QbTeLRz1Pl9UsSV5bySdk1MoQ6nkI+UlpaXoGcL6U0pkREtHgXG6JW495dxDPtuBV4FnS5c60zSmaAlM9TegdZMg7Avk0qEakGYGofSmpgiKU5YnbjZe+lv7lp841czh1yy+AwrxelwS1ERGMnphLPQNgVdYp0/WhiGA33/wJPrgadXe+gadm3Bm4zXCWcyyhfYWyiw6bY0bstu8sxXG2Js8e5TJhPRWuRQU5q5PT3W1yOV13bT4huH9RbrbH2A38d6yuYt5Kqxi6QcAY2rv8tYs2b3ORfmOVRK/USYRaaw31/RNJsKcy0PSwQlrjoLZ3GpjrVaJixJ9lqUuO2cma71n6tzzek0cn9/4JNPPuNf/PN/yQcffMj3vvcDsQONgZwTP/v5n/OP/vH/i8PpwCcvP5NeEkLOUMqitcVZj7MW6v8rLXIkY53IQ5RGm4hvWpxvBc6bfjm3/aavy17TRUeQd+PRUv1mUphJKXK8v+Nw+4bHt2/49M/+BWPf8/azl4z9wDRGwiRORscwMYXI2/7EaZw4TDOP4yTewlRGP4WswDaO7X7D8xfP+IM/+Gt43/L61RtOxxPzOPI2J3KFU6Vyq9WvlmBhnMU2/tzjQq0TWsTusH6u2icugfd9Bd8ChByZYiCEhHOOrt2IrEd3pJSZplumaWIcRx4fB4wxXO33OOdoGy9JqBaETCnR1C+BF5XpuoanNy3GejbtnqbxTNPEpGdKFl/md+7k0p+lspJ1wZlzr1fHiFamuiRJ4NXGMJVIzBJ4F6KiJAVns4acclUkFMCi0PT9yLEf6dfPf7PXWf4D8xS4f3jk2J94OJzoh76+BumZWmXWRz7VSj1dJuHqvB9ylgGWOclkJ2MMu/01runoxwPjfCLkiWk6oTU4t8FYh/OW/XbHfrfj+vqabdfSdK0kLdasBMFlSMuCIqQigz5iiqu2P9XJUbI9pfevMOLYpheynln12wvT+V0EbcWpfuX1nQq8AEYZvHHEEinVns04B9pQUl0EU11hlExZKSWuukFrLNuNVHO+ZqnnRZGbnUtiHE9gENJUCXhnyFsn8HWxeLcR6NLAdrvDORnyTpoleKW02slFbQQZSoVIYppm+n5gGie5ceoMT+QsxuoLVKeUXq3tRFYUKO8r8iIQYk6FVCJQxBlHa5rGS+Bd5DepiBxgChyPPcMwUCrzYZ4Dfd+j9bk3V1SU4RIXte7y0NtK9lBKqhxW6OUi6ipZl1J//jCMTOPMPAdyKbStp209x8PINI+c+iP3D3f04yjkjYKwbZWwCZ3r2HYbPvroe+JOpqR6Ee9tQ8qJaZxQOmBdg/UNMU5fhCl9C9fnB55ffm45oMUWM8XI4eGBME3cvXrJw5vXnB7u6B+lxzvPs8B8YWaYg1Tyi95hAABpo0lEQVS448AcIo9hYkyBMUcieWWWagOdb9AWnj2/4dnzJzx7+ownT66xxnL75g0pziiy7PmchXdx0dOXra++8AN4589Ls4nP20l+3UspcNaQXamVeFkP/2lOVcaTMdYKcS/HOgM3kPJZYyxJZYVyda3s6nNrjaNpGprG0XiL95UrUDkK3olUKaalrSIFVi4ysnCBmskFZyymE3KiNhZq/1FRMEokUFS4VRjjdc1zFvIbrJLDnNXKv4gVwv42OINKsULn8zzR9yfGoSfnap2rl+lnS4pZB5acmylAHapQiVQoaSE6baT4sLrC0pCq/FMKFYv1LUYjz7G1dWrTwjiuFamtQXL5t4VdxVJkSJW9kN7EYSutPflz4iQ3Uy9e3lVudG6qKFC88zyX8uWHyncu8Hpr2fiWoAuUCaU1ru3Q1tHYOuqr5GpMIYf2QnIAaBrPfr+vGUn1Vc3n6T8FMdV4vH/D6XjP4aHBNQ1d2/Ls6Q3WeozfsW+vUDuPMg2b1rPddDgNeTySwgAh4BUkrSnWkVORcWTTzOP9idevbjmdBkrRGO3QYg1BDIuMAEoRWG7fOQwW6nzU/L7irlLVd3QmhACq0G08xlj2Vxuc8yhtyJk6fH7kdOr57LNXnE4nNpstXdtB6Sn5TKYAKEYJSRpqb/g8paltG9rGV6Z0K3lP1U+uLw0tlXHRnI4Dd3ePHA4n+mEi58L1zZ7dbsvDw2sOxztevXnJT372U1Iua0JGlTU17Ybd7prnz57zN/69P2S33cmBlzLzPDJOQ63kH5imme5wEBecOK2H9bd7FYEH1LnTuxrm1cZ6nieG44lpHPjFT39Kfzjw+pOfc/fyJXHsGe/vSFGkayElHoaB+37kNE68fHhkjonjODGnOnqu6p010it8+nzPZtvw2z/6IT/68W9xtdvzw4+/T06JT37yF4TxhM6RjTPMFMIohMVUR6CVavitqlZ2GfzxqzyaFytR4B0Lwa97aaXZdB5nrEysOcy8uX1NTFkMEDI429BsWsIc1klgx9OAQow3QtvgnKXDV0c1jdZyFhmjaJqO66sdTWPZ7xqcNwSnCI3BWYihkednGIlJCD9oRYiZaRiFEV0HiHSbDV23kVGLdVTm8p/ToLys0TpNEQnuufZTFwc6UIxTJOVEyJlhDky1J/9NXqryAdI8M4WZw/GRN29fMc0iwVG6VKKRJWWIlUwmdq7vul0ZJfcvKWlNNd7Tek/TNRjvxJErF8aYmDOEojG2odt0YgfZObyVCWWq8nasczjvcb4RtLPC+UvxVaD22c/VbkyREMV0SZLdtHrLC1dIAq73nsY3GHsRNpcf/Q6t4cvvwXcs8NbGxjKZorJrF3surXV1O1fVq7lcDFE/P+hLVbeAefmirFmWP8V5zdByTmgK87ghO3Cm1J/j8c2GphHSiVa1tqvZkOZdfWoKUTR/YSaGQM4yfFv6XQajMknlerbWn5W5YA8vfq7vbzUvczEZBH7Rp9BaaP2VuDZPVfw+z0zTjNaWkhWzjUxz5Jx/A1Z0jJekrJJiTWxyJQR52vYMu8g9u4Tk5HeHEJmnWWbsVvag8xbnL3VxZ6ao1gIfKWOhunAZY9dRjTL6UFNMIZeEChOrzrIe+gss+ldyVQSg5DOjo9SkRaRwYogxno6MQ09/fOR0ODCcjozDiTRNEkRSFIe2lFbrR7F/jMxRDuQoYlKKLmdfYmfY7TZs9y37qy27/YaukyqipArHpSRs5Fp9GV2n8xRdYdylh3uhHf0VFfDn2Z7vO9nRVLJNlr6+kHiWaqZgL47DVEQWuBAS5OC1Yil5ZvmwGCUsFZRzFucdvrF4Vz3Jg6aULGtng+iAg9hXCum5nHXLy89dzrFKxjqnIEL+M5XdvAyMP1dfeg0eC7cr1/sr1dpi6PEtXKp2ZvMyV1umjS1B6GwscSZTLVKshVR1+eQtFizLPpNZD2KrUaqnQ65jFbXRKN6VM677bCE/ab2ukaxsfdkrSP6uDCxXfss75i+Xb3f5Hfpz5KpfWux3OSy/7vqOBV4IGMbiSCSyEk9j4SVlVJEgtsz+zCUzTSI3kgPZcjkhYlkArWWsnHiCygYIQ5DZkaPAEnHYYLPG+ZZt6fCtZXe14dkHH2ENeJtQJZGtJWdTR65p6ZeNiTCLHGYaEo8PAzGcoCQ2bYN3DpTD28Rp7pnHLF63WTbhFDPDHLEm040F9R7jwTKEm5zFDs+Kg5dZekb1ge1PPa9evaLvB46HA30/8PbtLdMkbjSn0yjQuLGgDMp6lPU1y6ykmTSjKPzw+x/y8fde8OTmiv12i7GGTBB25gI/lVwDh/SI3ry5JYTErvZq9vst+92Wm5trSk48e/qc58+/J5CaqhWvlRaEtR6UY54zr16+4fDYs2mF2Tj0E6ejCPpvb2+ZJhn3lXOsRJRvHWeWCqb2rlOdphSnqepyT8zjwPHxkTcvP2UaR25fvpTB3DUQh2mmn2dCiLx9ODLOgbvTyGM/MqXEOCU5kLPsM2MUxsqs4xcvnrLZtvz4d3/Ak6dXPLm55umTG0ouPN6/ZhxG5uGAihM6B1zJkli1DalAqJDfYum5EPQuiSaXFa73fj2s8gW09/5CRLWSzZKIO2fY7TtCyGQ1SoVbIjHUFtA41ABbD2GjqrO6xzUWp4w41VmDb8S/ebNt2V9v2HSeD148oes8KZU6Uzfw/PiMaZ55+eoNfT/w8Hjg4eGwZJZQRKOqUZSYiWNYkwMFCKlakLFS2yLjJESplETRYa20hpb2Wq5Ve9+PjGMgJjH6+FawZlHQUwgolTC24ACPliq3SGsr5MKYBCWZUxANdEUdpUukURmc0hRtaEyhNQnHBPMjJQTm42vGw4k5zMwxUFqHNx3FGjAtqlhMAWctzjmaxuOdk985Bwwao3RtNxhyMpTCysSeZ0EDRRd/huoXFEJe5wVyUwuBkvPakqjfcN6Rv4mBNxdFLIaCoSi7Zk2rnGEh7Gi1Zq3USmgxxVgOd1Ub9ws8Uln5KIQ5l3KmJE1OBo1mGidyVjQhYl3GGstms0WrhGGqg7Ml6C/CbEB6qDEzjyNDPxMmMeCgFBn4oDSNg1IScwoYraXqrTSDlIswc5NA5ur9sat+qeow1UBCLylhzfhCiJxO0tuVzThzOBw4HE6c+oE3t/cSeK1HKYN2Hcq2tRpwsgHTjCbTtS373Za2mtVL8lsqjFoJWjWzjCkzTRPDMJJzxjmHdw7vHb7xNI2vGsaOTbcVaccyOHwZoqBEh5kz9P1ITmCVJEZxGe81BcZxZJpGcXuqpLC/imuZU5xjIsyzjFYcBnIM9IdHxv7E4/0dd69fMY8jj3e3zNNImAVJCTEypcQcE6dpph9nTsNEP85S5aZlHnFFeQwYp/CtZX+zYbfb8OyDJzx7dsN209FtGsI4cxgPjMNADrNI8opwDwziZ7wkr+I2pMmfC7i/CmZemfO/oqL4eovJGnQV8pw750AlzCTEnhhyNaxIK7xbTcYJMYgffNLVHEOvr92YOp/biaObbxybTctm06zOZyFErPOSnPY9hcKpH+pLq31Cai8QeeZKTO9oWxdi2sKgXb7v3IMsFCP9xoJafbKFkzGvRiEXBfs3eC0tkmrsQe1p6+rHXKd/yfpkgZor+pDWZ06mWam6DxYLYKMKRmU0EdJEiYE8D+R5EEVGjBhTIDpZt1ztHxWVNa9X8lOpk5uSqB7roIYlPiyVrvSpc06rJPL81JyvS3OMZT7yO0TRLwi6X7bHv3uBV2mi0hRlyKp6ZiqEyFAyJddmt5IeU+Mbcs5M80QOc2UxzisTUA6CRZguQVcDjRNnHu89znuU9lVOUwCDMq5+2Fo1KigWlTuZ+xt7Sg/aWrpuizOJ8RhJc2EyM7qI01OrJDjstgY2iocJtm1mCIFXhxOhGvXnoshoMnaFXr7upSos66zHKrNqDhUKpcXBahp6hmni8Hjk4eGBceg5HR8Zx5GHu1tu7+45DRO3DwfxP7atwLtuRrtJCFTaoBV1Iohm6GemIdIfRx5vH/DeYky9B6pQlCaVzBxmprlnmgamecB7y7OnT7i+usJbj1GGHAvzHDHasdveEFMmUEkXiN2ctQ5vPd55SoyEMnA7jSggpMAcZ2IMNN6ilaM/ReLUk+P8V1Dvir9yDEGq2lefEeaZ4fBICoEwDqR5YjgdOd3fEcJMnAZSCAzjyDBO9NPM20eRbdwfB8Y5Mk6RGGUf6SKuPVf7Ha6x7G46ttct3abhyYs9TevZXrW4VqNMIaWZECbGsWcaBkpOosAAxBVKYXSVudXnsmhNqYM2SkVPPl/xfp5AdZ7s8/7WsiCVZ40DGK3xjUJbw65UDfMUmUOiGE3WSs6IcSblQh9nxhRIOtOljqJhTkEc6mYhDp5Ohru7R8Lc8vTJNb5psM7jrUO7KAiMthjv0XYCrUgloeq8a5B7IrwGObukxysDVIxxaGsEtq3JaSlyUunaylFKS4KfE9MkAz7mSYZ9pFhRhG9YSiTrDShkrKEt8twZD0VVxnyq7bMKf6ckHJvqKa5UDdaUNflfSH/j0BOGmdZ7TJjJKbM1CtM4JqeZsqXpGq6ut3hnebbf0XnPk92ObdfSeiduYjkyTQPkGV08TjtSDIQ0E9LMMA2chqNwP2olPadESJnKgavuiTUMn7ufcgmz8B0IWmqYc5H4Gxd4E5qEBMWiNOKIgySLS2ZbhFRjoPaeisCWs0zImOcZrTXeuzoBSH72ctNRYrCttaLtWrpuQ0yafhbxPMqgtavDDyzagvUGRcTQkV0mjE4mixgj0JPNnJqBOEYGDaYEdNF47dBodt7TWMP9DF2XeRwmHvqFer8YAGnOHan3c8nAbSEinRMLBdpWRnHieBw4HI483t8zjgP94cA0jTzc3/LmzRv6ceb22EtyYDsJvH7GuE4gu9oCuGobGucY+sA0RAY38XD3SOMtu12D96b6AAuzfJ5nxrlnmgfmeRCIebfj6mqPtw6jjGiqp4TRnt32mpgzofbHpxSJOdM2DV3bYZSmVKLE6XhkGie0FckLKtd5zh6VI2nuV4ezb/tKSeRwh8cHPvnpT5mGgePdLWGeUDmiciLOE9PpKF87TaQUGceBQz/yOEx8dn9gDpHTaSbETA5F+GuARnq5z588YbvveP69Jzz/+AbjNc1eqrjtVYP1ujrDiQZ+HE6M4wgl1j7b0ocWw5WiNBhHrqPpsLJXQ652EZ9jMl9WwOcpRcJ6fW9Vb4UNl8FsxmgaY7G1LRJTATtRplmIS94RQmQqmTQHpkk0z1jFVY5QZF9lVSDIiM9jD3f3tjo0RTYRfOtpug06RjmvlJHA6xxFK1KRgQvdppXWDkag5pTrbF6pBkFkM9o6SEomFq3lAXW4feWqZCFnDsNICMLJSLHKX2qf8hu/FBSt6tB41sBbssKUWOXLtbeblqpT0J1SZDa60tXdrFaLi23kNPSE6YHYNGyycEp2WtE1npHMSKbdtNxcbWm844Orazrvue62bNtWplJV169pHEhBYXWi8cLpCVGS8HEa6IfTOfAGGQMYUpYWQmFlYS9Q89Ldku2s1rVQejGEKWvA/Y2EmrWWbDUnvR4k1Owj50KOpS5ChYRqI/8SZo4xoXUWeLkUkR9V+HYxt3bOSVWstIjhMxQsKDFccE0rxgtGiBfGSZ8mT1W7peUhz7lOdCkyvmqcJ+kZpITKGZ1mjDJsjWbnFdYabNPRWsXLO4Mm0xgwdSrSkjG+r6tceL0KC7En1SogpsTd/QOH04mH+7sq6RHYd5omebCr97FFrT6+0lcVHfDicWu0ZtPK4Ppt59l0Hms1cxhRWGLSmAzK1FZBTZZEbpHkdxhD13a0TVOHiEOOkRgCJSe5hzkL5JozQ52wo0uWbNdYnBNXsxBm+v4kLkxF2I1NIz00gT7/KmpdudRCHFzgMa0oWUgkXsseIRuCsQJnpsw8J/oxcOwn+ikIeSoJQzcvE7q0wnnPdruh7Vp+9KPf5vrJnv3TDVfPNsQSGNORkgo5ZrLJ6zSwnNJK8osxV/YvMsZxncurzw6RqyaSM9O9EmsuD55zr6wG3TUgv8f1zGdSj9FidpELWF2JRxFKKEQEco9KkxtL0DCrQtDQOF1hzoIx1Pncnq6VGdatb3HWU2IhTJHgItZIu6o2wMRPPtbDO5UKGSweTBUmLktFVVaOx2KJu1SGl71wsSVckhmNMYgaQSXmeYFSE2K0+21cZ8rmQnpUFULPRdpmYtlZOQbVUWy53VppcY1ayI1qsURd2PGiv1/20OJpnY2SUaPWrJ7wy/lvtZEer7FYI1LGrvMy6tEvra1MCMvkJAnCMaXVe3shXuZSia6X73hd/zO57VKzvrghrkYaXyHyfucCr7EG1zjSPBOCfG7J5MKciCGBzjK/Vmt84wTWqmzWnBPjOKMUpJzqGC2HcxalRCKgjWa72+KcY5hmEaQrR9Yd2jiazZ7N/gnNZo/xHdZp3EajCMTRkWepdJvGEcjMJGJOHPqeu4cHxn4gzTMmg5lnvNZ8eFP4YAuxcczNnleHE/cPd9ydMskVsonCek6Z9GvW51/nWoZTL8bfD8cjf/YXP6MfBj759LVAl/3APE0S4OaJMM/cvn1gGgdOjyfCMEMudDVYNN6jrcN3Ha7d1t+Rsdbw/MkVXdfw4YsrXjy/ghI4nR6YrMZ3oE2LswqtHZTCNA6Mw1AZ4JmuaXn+9CnX+yucsegC8zQx9CdSChinKDFxPNwxzTOPxwPjPJGfv2DbOnzbstnI3ORhPPD6zUvarqPb7thsWp4//wBrFN6797TCf7lLaYO2FufkYCdOlDiR5x6/3bLdtIxGS5UwzhzHxLGfeH3f8/b+wBQT/RSkzxdFq2q0xTjD1ZMbfvy7v8OTJ9f8F/6L/z4fffwhWUeKCtzev+Gf/+kfE6aJ4INwHQqYAmGWIRMlFqYxcupn5gTZeTKaqC1FKYTHVMSSs1orLn68SumzheTZ/V7e89LCRFUOw3uKvAVUFFtYZaBxlm6/oQBzkOTSpQEXpJLMXpOy4cp2xJQZBss0BxrvcDpitcI7mWh1c3PFkyc3bLzjZtfSOEucMv3jQEmGFCQR1c6iSMRQmKbEPCXmOeOsoXjxf8510HvKmZggJQjVDSzPAV2fVZG5BGIIpCJMZ5TCu6Ya3iiMFXJXwWDsDIxMU/ylfuM3dS3hVtee/2KEE2MhxCyzbGOtImNc++9K6RokTYWaRfMq+6ZgnEOrFmts7WXLWNUQEuwadNegGgfGUKrixSiNd45N17Hp2lXK+Pz5DdttS5p70twTY+DUiyrg1B85DScpkpJMpYqVGyEDa87EV5Ra+8eX4wD10lIxVbFihWgo/Ir8GwY1S29eKoCFMVYx84WSnnKFiysDLeVcwXfpZ4oV2YVkhUtKfq14WcgOVXweExiHsjKeyzqP9V56L8qsNHJ5yg0sc0gvobSlYV+lRpL9FBSpzn7MOJ3l0DEGr7VM11BK+p6I5GCh5b+PS/pf0msqWhFi5DT0PB5OvLm95dT3zKNAbUrobOKzWqHopfK3ueCysBB902Kskz6XbyTwZgm8XefZdI1s/lbgpxiCsGr1eeMu0ExOZ5OIFMXgQ2ROZn0Pee0PlVod1o8lu6wEi8XEcJ2FUhaHmuUBuHBUqoiFWs7/b7n4FfRG/r5IzZZMec2mK6RVkD6lHGJS5S6sVtEZShXUNA2Nb7i63vPs2VOePL3h6bNnPH36jCn1zHHAWicj8EK1A7VUdi+1uro09liqEHFlWofAryjb5+RYF72tcv7UF17v009Ywaof1ogZhV3LK/Hl7awhVkYw2pALdNaRMozeMc8B5yxN22KdZds2eO/pWk/XOBpncXVf5pzXwQhxDihb1Q0XEPJKdFrkKpTVeL9UhK4isizmEuRzn3atuEphGU6BYlVqZCXT0t6pwFaC1ntb2l+53uuvWKDicj7/FgRkUUus33cZsNR550jrr94abSjGooypZWY5jwysFXFW4u2eFsJWyvW5URXxEETLOodzjhI1sb7OXMr6sfhwr5hMebfKlXPhc9K4C5LV8pq/CL35Km2U71bgBZxWtE4xR0Ws0FeIYlAe5kyMAJmixRB8TsvsTUu73VFKIiUx1VhmtSolhBYxd9fknDkdB5TWnEZhhTY7z82za7rdFfubp1xdP6HtdhjTiPRGGSBizJZiI0kf0TgUkaJmis64ztHuNpIJBlAxYvKAIhFIjDkyjjP9rHk8zoSwwDGRrBMxKwjvrxrIKXE8Hui2O7xpeBx6fvLZZ7y9veMf/5N/xv3DA5KPw6bdcHN1jXOO7bOPuDaOmw/Fho9SKJUEpmuzQ5kKw9d5odZqnt5s6VrHxx8/56PvPZMhFbaO6TJiSqAqlDQyMfcT46nn4e6euzdv+eDpc9Gx5lyJk4Uwz4zDgFKK7XaL90KiCiGwvb3ldDpxtbui1Q5TFOMwUkrBWsd2t6PbbNnuxOYPJYmarjKXOIvz8LdpoFFKqc5m4oftmhYfA9Y3mHkmoZhjZoqZMSbGEDlNM8dpYkpp9VgGqYaur6+wzvODH/6Qjz/+mA8+/ID/3H/+r7Pb7/jhb33Mbr/l9e1LhtuBeaqSt3lAu0jG4lS19MzSnrEWmsbRdp5sC9FmQoY51Oq4OpJZLbBeTJk0jZUVeg6+UhWfDzV4v/Dycimt8ZtNTcAyKmXUMKKUYlMP+P31nnK1w1pL04gBQtttMMassG89+ZETVfa4cw7jnFTBWqHIhH4gqpE4C0teW4NtG6ZZ7uk0DIQwV0ZvYpwnjDZivGCEfa9r0leC/O6SIqnkWvGKC9+iQFim3lgrXgIpZWKYCTExjSNjdXLTpkpd3v8S/9KliyBPMURiDIQUCCkyl8RcEqFk8WZWy3xsge61Eo0uLIWPrLuqFZf1XloFSqONGGhEn5lVAGfBGkKBNIx4remSIriJfdORdoITuqqGuHS0ylk85Z11JOvrfbiAjpe9BBhtcdbJaFGV6z1YTGCWsYwLe/o8cEW8ps9tgi+7vnOBV2vxtDYKdBHHk5wWA3/pIZSFCqcqe1ArnBfHklIyJkufL2fpDUrDvYiSJReKgjCLbaPoVCM2gW0afNvRNC1N2+Gcr1IVzbJUSgkDWimHCLkTdcq9aBqdxTpHsgJnKi1ZXSqZuWSmlBlyYpqqbiwvmZf0SGRY9nsKvKUwzTOuTVhgipHH04nbx0c+ffWK27t7GmPwRnO1i3jf0irDdbejbTd0pYohcqbEOqqoZuMyu1X6k85rrNVcXbW0rWO3F8mKd5ZN61EKYqwTZJZKCkgxEubANEwM/VB7ueUMTVEZwFGMNbwTspzVVmbMTjO6QOdbrBbySgrStzHa4Jtm/XBeWNyUXHurZvW//bYvIVfF1crQWCHyqeWgqMjO0t8KSarduMJf8nO0FrP/pm158eI5v/Xbv8WHH33Ij378O2y3G26eXOEbx8PxvmoXy6rLDsFgQxb+gtEr41Rrcbay1mBLwRQt5J2QKoGqQmvGYq0D4vkQvah6LhmechUkxXvPQVgptLWQIqQa7IMY19hajTbe45w4D202Lc47bm5uRM2gBIlJpboslcIcpWpNK4Ilk2yWYSOiRlICBVuLRfgd4owVV51yLiJXKRS8amoRp6Xjm1T1KV5Io0vFLF8vwYgzpFnRmmWQSYqJGKN41pczd+WbLnnV0rouZ7nT4kp1HvhX7SHVWX4jyA6r7ehy7ypsU4OzlWluSjwZMplsjfTAq99ypqBCImvFqGdsyoTVsUut5kBKL5yfcwW6+oWryzO2IpQsuVdNeKidebXIVC+r36UCVysEIHt9QTu+HPP/0sCrlPoh8L8CPqw/7e+WUv5nSqmnwP8G+BHwF8DfLqXcfZWb9+uuZSNK5iDVz/JGzhqrUt/wos0C7xzdZiOZotWklDjcvyVUK0kN4qWcE7KjASW2hdaAdZ7Gt3jfyBB4A7pEShjIWYtkANGnFRxZNUS9IRQYp0emKRCmgTj1hBAIJZFSop8iJWXuXz7i356YkqFPliEE3vSBORes13UQs2KcM/+3P74D+OtKqX/2ddY7p8TxcMA2LbZpsN7z5MULivX8zu8fePp4QMUAWUzbRzI5RQ4xE1LB+Q3OtXUiVL0XFfaVZAOc07jWoJzBbFps63Bth29bvLW0TSP9duMERkPaf30zVshY452Vw7ESJJYPua+e1jekraaoBmssXbOBXHh6/YzT4bjq93IphCyElxfPNVfXT9ju99zc3KCNwlnpoz3cvybOAyUmHu7vl+jw+0qpP/6m97hC4Z2nazusAkuhaTvCODD2p6pXDDhtMXPCKsP2+hq8x3QbunFeYV/nPd/7+Afsdnt+/Lu/y2//6EdcXV9xfXMtxhV1OIg2Fu+lRbAQYOYQ0HPGdzJDepon+v7I1M9M00CIMwXxOs9FSYJZCXGq7odZLfK9iUzBaY9RatXO8w7yef7b0Pf8Z//gP30/e7wUTvNMmEbiPNN4x7bxMoFIgSmZYU6YAF32FFdodGRbOjSl8j/cCkPL/pSZuovVbE6ZOMqhKmYVmXnq6YcjWQlpK8TI8fTAHAZyCehK0HJ+gT4FGSoIgbKoTC6RVIqMGtS6BmUJrpS46nJLJfykJMF2nifmWYJuSomSNefm2a+9vvZ610WvyVepFo2OuWTiKTPX6l32ncjQKsheofPzmEPR7xZQF0MlCueWUFng3OXsFwMeVVtO1jqcrbp/Z/FOXNmcFRmqKgnxDkiQkyCLOQhiME/EEGT4R1oQBk3XtSjrYZqYh3G1Hl5IX2ddkTi5ldozKLXCl1Zn+dL21VepeCPwPyyl/GOl1B74R0qpvw/8d4H/aynlP1ZK/UfAfwT8j77Cz/v1VzkbWMtHOuuiamW5HPrLDVVKDqFus8E6R9N2xDCLLCaNC1x/gevLTFypRsX82tkG76VP5qz0X1UOlDBSkiIuVm5JAm9SDUFvmElMcxKD/3Egjr14NufCmBKvR7HzG25PzDEyR8UUaw/bCap1VcQxx1RI649+74a/9w9f/TPgv/p11julxPHwSLvb02y2WN/w9MUHuG7HjxMcTz3z6UCcBk6ngdu7B0JO+JSZY2G3aTGbG2LMzEoy+RxngfVUQpFQzkDnUE5juw7XOnzX4tuWxlrathNrvBSluq8P0cn3NQtWOOtovDjOOGtlvqiVwNs4uSfKtNhGs2k3fPTiI7TSPL9+zvHxQAyzHEYx8NgfiTmz2V2jjOHq+ppnL54BmTD3hDBxf/uUMA1Mw3DZ4v15KeUPvvE9roSV2nUbGudoGkecJ7RWzOPA48M9p+OBoCx2Cjhj2N1cY7uWdhY2s7EW37S03Ybf+/2/xs2TJ/zWb/023//BD7HW0nadZPRa1toY+XrrfA0oMIeAmiL7Tl5D6CdOpyPDaWQcB6mkjMbaRlAlzmS9Um1OSWKSMC2j7CrBZFnRLw4EUjX8wd/4G/wnf//vf+09vgTe40mkUJuuJWiFzZqNBlMUKgRUSexo0C0k0xDKiFUF7TS+s1LlOytoiG/QxhBiRRzGxPA4iRFHEi7CaTxxOMkzfZhGUhJtfkjSOjIGrFX4RnwAjNMYIwdzypmiErnIAAWDk/6nUYgB+tlycom+OdcqN0RRHNTAu3jUK8yvWaX1+trrDTIw3ihdlQ2CNJqUSCUTUh3HoUBXYqsqhRRDLaiqveTiulEuzvSlEEZ6taXU8YgKOTeKVL6LQ6GzDud89WaWBMrVoSgydCbV4JsoJVJSkClEYWaehEiaFitKLbKurrPYRhGB0zQLwmNMVSFIq0DVxAzOffwqPpI20FfIgL408JZSPgU+rX8/KKX+BPg+8B8A/5X6Zf9L4P/+5Tfty69Ld5vVeYdSWZNcULupm7USn9TCfhY5UEEJhGedDExequaajaVUKCkRiwTVUqj9FCNZUg6kOVLmCSgkVXucWiA5CjjfkMKMc57sAs466XvVWcJJaSYUU1GcEoxRdMJzlIDrHNiFFFAhlm5jud6sa/G11rvkwjiM5JhEDl3EuE6rxe/UYl2Lphqul0yOgWk6UYDd/gbvjHx90ZJ1W4GeUQlIWC/6ReM0yjUoK9lEWQ+NvGaGK6GkskW0NljrxJVqs8U7j9xIMTKXua4KZ63IWozGtw2uaSujvcE3E0YVLBFvCkY1An1pTVaabefYtF7WwytidOw3Hce2xTt3Cev372PNv8q1wFlKFVANWms2+yt806CMkaRlM6CdF89s5xmnefVlds7Tbbe0bcsHH75gf3XNdretzMo6yKIedikn+nGk7wfGaV6h/lRnGM9zZhoT85SJoRBDIYRMCNWD10ZizCtRRleSXK5Tf8gK8e69IFddZvsLGrfCe4Vu09FtOt7Hesu8YvE/p4irk7hTGWZjMEojbByZQlbykbaZxZWuazHWstl266AHVZPBBUpURaN1xliBLZtW5IVt8swpQjD4nAkqkScJhCkVOUKSOFsZLcWBBE/p45baItOAtnKO5domWY1G6rMgRQcrg3hljFPHLuoKh2r9pYf+e9nfy9pQf+eyXvU1G6XBFCFALv7MOV2YfEjxs6CXWRw35H2mAiR0SthS2FiLBoI1RGuwSuNROCXEVKs1VlWPZ60qkVPVQJ4pJVXCWp0+lBZryLKSsxbZk/SCF8EulUC1kKp07c+fq16lzv7ZckuWd1QuLem/8PrX6vEqpX4E/HvAfwp8WIMywGcIFP31rkIdBi1TNxZ4eXl4tRHGs66BSimZHLHY1s0xYTuD311jYsBv94SU0SWi8lnnlnPhcOwJMZKUJSlDzAVjvZgtpECeZB5pmnrmOXA4nUAprp48oek6nC3srm5wxhD3d4xGc9rsSNMsI/jCTDSZYzGcUuZt1JxmcZ/JMWMLXFezT22NVHta0SwM1/ew3jFG7l/f8uL5x5isMdlgskVniyoWhaNtHbaDcUqVFDJS7j7D+obnz56x3zSUoonJ1IonyGZehoQ7i29bnNWYzksFrL1oRZUEcqVUdYJRKzu9KIXzYgX57OlzwpTYbvdiSZgKYQoSdI1l07Y02pG0p2m3bG5uUEqzOZ5E3zsr1BSkbWAbCnCMmSkXNtc79td7jNG0XpNS5PD2M/LU83B390u9xm98jyMEJdd4wIPqoBS2V9eUkolhIoXANE8cj0fCPHN3d8c0TRX6THTdhpunT/DOs7+6kTGI1mGMo0Cdu5w59kfmeeL1m7e8fP2a+4dDNWlxdShF5kjEppn+ITCcMuOpMJwS/SmS9EQ0WeJWknvWNB5rHHEOBIJQThclSznrNpeTS1c26kJKeZdp/vXXu6TCdIqkpFBYYiz0gxCaUhZXtThGSaLjQJoHnDO8ennPZtPwN53nxQfP36l4qQFM0lRh2vvWita8ETtC0xps1zBOAQ4t4zRz+9Az9EFIk7E6TSUZ0OGcuOmJOYFYPFonZE/byHCPmAIhBijnwQGkpZqS8k8lhcoaVQxGGYx2KGPQzmByvkwkv/B6X/tbGYM2oJ2gHKp6GGsUxsqZXEqmJJmbHsNIzukCOlZkUu2FRwoSEIXhbbHV5evDTUdWitEUJosUxznjlGJjDBtjaKzBW03jNI2TvyuSnFM5kNNMShMhjMQwVbcvUbPMIVQDDdH1lqVNrlacW6pcLNo4tPX1/dr139dADWTEGvPLrq8ceJVSO+B/B/wPSimPlze4lFKUUl+Iaiul/kPgP/yqv+eXf8A5iVNcVLp6qRQruwxV/WmVzLlE4ZqGpm0pcaYk+X6tpHdjrCMVUNqhjcP5RiAL66XCTokcJsLYE+aZMJxAQehaCf5ospFZumtmpKVnXIhVF1aIuRALxAwhn6sCXVT9PkmgJHE8kxGQFsjXWu9nT5/KgR3jSlpapBdL3whTZSKlrL6lYRZmcE5RtHYIOWKZkrKwVgt5nYtcqlax1KojxURWkLNU2OU852y9l1rLQ2udMBFBMQch//TDiDEigUo5kTHk+lrQkn0qo+v3G2yxGANtY6RoHiPE6lZlhCzkvSUnRVMhbZnBqi7X7lvZ45dEjfVmG4HdjNFk59BV/hJCqIbvs2TmpQbemxsZ+t5tsNZRTfhqv0leZq6mDLES1ER6p2oipCAh2suQq2mG6EtTqoRGhJAocfQs13DWUFImaZHKKaVW39133+dyp+ufSvYHrNXB197jXeNWG1RTpWLy84WsSJa5tTkrYshMQyCGRN9PlfRXOR/vNqSXV/EOD4g6bKXoIh7ORqxQTR3KkjN1bKm831LEvUlpSfaVWiRbtZ+5VlWsBcb6HuvrKYXVDpP6c7UyGF2126agrJFA+OUV79de7w8+/Hit8i4Dz1oZIhIuVT2nUylrf5ciQVevwVfWOJMvkM68TscqSqpqowUZzAvSicbpOrtXG+wy0Ugtd63KmqpH87J0i8e1GHwkQszSTqj/n1Im6UxWn3dWW37vxQ2jbpuLBf/XUUd8pcCrlHLIDftfl1L+9/XTL5VS3yulfKqU+h7w6ou+t5Tyd4G/W3/Or39lShiVzjtyNNVarI7RWrIppar4WiQqm67FWkuxthLkNMa1uEbz/d/6HXKcONy94fBwi7OOrhPv1OtpJqaMbbfYbsf+yTO+/9s/FrlKnsnzwHx8y3D/ipwytj6g810gHjxZBbKaySEQjkfiPDOnTFSO43Di5ZsH+ikwjpOYfuQkZpAqU3TGG0VjoLEKq3N12tIynkwSpt8F/idfZ71/+IPvl7u71xwO9wzDgZQmgbdNIcwDw3Ckj4kSC/d39wxHsShUQ4+1lv7xnvF4QG5/C0t2Wm0GU4kELcPXrdG42BCd4aGBOx3YtB7HVtxkmlYyxQKagrV1zJpvaDYdftNx3x95e3jg6u41hzSgjeLTTz/hcHiE4ig4rq6f0u2f431DUoCr83ido/GOp0/24uX95h517LG+ZXn0pF8kbNfGOax+py+mvpU9zruQ1BKWloCElulRvurJc85sd3thxxYR58vUH5lBaq2rh1x1wK3rq0ym7VqMFXa3sD0rgzpGcpTWwgT0pTCcCqchCuwcIRYtfsSbrcyyjhGUYr8V1v9oR4YCcwyMs659/xpiayJ6xpnPZ5UEk3U849fe4ze7tuQcaTphLmsDxtVflDUUhXYepzxjzPSxiP9WkYN0YQmjBHpXajGpVhVyLytUKa+7BtR81oTmqrwIITHPESFt1qNVLUmBqYlBQSlh6ppSlQwxUirByHl/aRYs/WDZLIiW3XC135FKoelExpNKnQhUk9xfc33t9f79v/bvlqSoo1blPtsaAJ0SKZBRlSRWMjrL3jbWSzDTstYSbmVWUaowb6pPRoiB4yhckjHMpJJpn17TPrmicZ6rzRZvLM99S2cd26bFWQul0A89IRrIDms1ISuy9kQmhqg4zoU3x4nX9yde3p94dd8zjDN3h5GQEolZKuw51nGOdQiEkiyo5Fz3Vb0nCLqzPPTnAP3rr6/CalbA/wL4k1LK//Tin/6PwH8H+I/rn/+HL/1tX+FaGW3nyk/cW5bgq8RfVlGEjOSF2RaUFtM0pVBGmv6b5gZNIseZaezx3rPd70T7FxIpF5rdNc32iu31U65vngg79viWPAXSPBD6R4F+apM59omiDSGPzHmQG1H7YSkXMpopJo6nkTFEYoiryFsvma4uWF2wtR+hde3p1Df8f/p/fwowft31zjkzDCfmeSCEqZI+5PelFAhhEhguJHGQmmdiijJT1Bghi82TZKk1SBVihRSjwM4qk2MhGc1kFDobxsEw9GDIxI1HUTfa2geq1nHV8cVYi3GO0zjw8PjIECfMxmGM4u7+jnHoIRvIFmUcc5iFsasAozGNou0sXddw9ewGpTW3Y+CU8sXrPq+xUUKo+5yc6LeB/+Tb2ONAZf7+8gOqKvxhKFWuU2i79oIY+LkGKue504tlS6lv1NaB3Us7RlezDq00aakCI4S5MIdCCOI8lGpbQGlx19I5r8+WrwlOjpFgTfUkrjaSXATcd17ju58rpfD/+Qf/EN7DHge5t8YafOurHEoKrDRL5aO1RjvNXOU4Ob+77jlnVNZVPwtLDzKls0EDn5NJrbehFnN5bZNVi1qzzCleTFEW3Wc+D2xRMqFnmdwDFflQSH+yJmZrolYkAPhG2ikYg61zl1XKWBu+DGp+L+ud1bkAV+psiqHRMoYPQQZS7UUXqv5fF7JSoKofsjSwK4J2VjykJAhPTolT3xNTwu46NAjZsO1ojGXjGlpt8MZitKxUCAFKJrjK+ylQlCEVw5xhTtBPkcMwcxoDpzEwTkHO6phkHC2KUPvAlLJW8pf7jSXgFgSWvtjjqiaXv+76KhXvfwn4bwP/VCn1n9XP/Y/rzfp7Sqn/HvAT4G9/lZv2pZdidTharlJA5cU+bflsrsSKCUrCbq5p2w273Zb9ZiNkhVE8kxtnud5tMVphK7XcloQGOpPZeEVnMq5M6KyYxgN5OJL6R+LpSIyJYQykXAhKvJldk/CNZEVlmkgxc7o/cjpMjMej9BJC9SytIy9UAau19HK9Ztc5vBMzgpIhlszP7478058+AOy/7nrnkjlNI8fhxLE/koqi2zTMKbLdtkxxZiwTcy51nqWXrHHspU/6eODx/gGlPVoHOQ9UBlXJEKTq42spRjPrgo6Gw2PkNvfksOVm14ICXwPNJflm0SdmROesnRiQNLsNbtsJvT9OaG94eHvg/vUt4xRpdz+h67Z0TuOMJRZ5gGIBczihtKaPkYBC54IqkZzglKN4Ipe8ajwvHqhnwH/tW9nj8CsgwfP+PmtjL6FwgfkX6LZ+cv36UuTwyUie6p3A792mYb/fcOobrK2JV5wI88SucWw2DYrM/vkWPzpmFSlDpttbdlctKSZMDzll5uOR+eHANAXGYRK/6KqJx1U4kQVOByjnClcpcsm8ff2GT37yE3gPe1wphfW6MoLld8WU5QSvZhUajS7SClJIe2q/33Nzs2O329F1nZjAOFnPoowQAhEi5cIoJi+2jmLqE0Oqwwpk7OQ0zUzThPfS7lJKY52pA97lLqUYCVlkQFOYhQVrHKjaJkhLYJf34Lx4CegqaUmprG2HOSZCFjezKUZCmL/swP/a671yu8rnKr3lvhdhkhukSLLaCvRboeWkEllJwE1C41mh4aQhKshavj6ROcyT+Cvf33Egses2EDOdb9jdPKVrhVBoranzeJuq2RaLzWEcOfQTdw8H/uLnn3I6npgxdPsnfM9vefriY6YQeDicCCHycDwxjhMPhyO39w9i06mX1uZ5HcQgJqPRda/V9aktuy+7vgqr+f/Br+4c/Ne/9Df8a12VLXf5Jpdmjare2EuHpCTISiq5EuncU66u9+z3O/a7DQo4jgdSCLTW0ux3MoM3jFKlUkAVNrZw1Wi8LTRlhFSYxnti/0g83hOPB4Zx5s3dgTkm+jwRSuTJjeHZU03JiTT0xBA53fY8Psz0j4k4y5i2GAtRZtihs8IbRdco2saw32zwTiYw5Tq/8vne8d//b/wO//P/y5//cSnl3/86651z5jj0HE5HHo8P2G7Ddn9DIrPfd4Q0o4KYI3hraZynJHHECTFyeHjk/vYOrT3azLWPXVCqiMmJRgKvsWStmVVGBcNjzughQQ588PxaSDXIIVlYskhWZ56CGKVob2mbHe1ug99tMFazIeJax5tXd3z26ac8PJ5IesN2u+e3f/gxT2+uCSlwmCeGlAhOWLfHORGK9B51DjK2e6qBN+fqbPNOxfuPvmC9/7XX/C97fXGlsgTfRaKz9Jp+1c9YvkekDd4bctFsty37qy2HUyXBaUjzxDycMDd7ttsG6yDkK8ZppM890QW2146bm5Y4JwyZMEVu395yOgzEmGXYPIqkZTRgacp6AC9cgqVazHWkZymZJy+e89/82/8t/s9/73/7tfe40uBagZilkkqUmFBorDIXmtA6ShRxU7q+vubp02v2+z1dtxGI2tbgUFn4MghFJp5NeVr7gyFEQkjVlziKKck4Mw4z4zChtcN54TA0vq33VjzHUsyVKJcYwwyAbQ3a6jVBKaVAHfzQNA3eNazKgCITlUJKzCmRSmGOkTHIUPf864k9X3u94aLivaj6QQyPNNLjtbXPixFG1ML6DUoRRYe27uNShE+QkMCrdCHrQiTzOA30w4Ahoocj15stKhX2Xcf3dntQnbhdWYt3jrZtJfC20oKM5Z5DP/H67sC/+ouf0w89225Dd/2MD7Y7rvZ7Qkwcjyemeebnv/iU+4cHfvGLTzkNY7WwVSvcvCST6wAL6W+uObJwY34DnauAi2RerY17EPKUyknmVua0Qms6FaZxYhxGfDMQplGq41IETtS6zoAtIvbPYm6RKRh3QGuLm2bEUbVwur9j7o8cDycOx5EpRMYAsSisb7EGNhvFtoOSAiHPBIqYbqiM1ktfQHq6wr4WZmfjNZvW0niDM6IXToWaDCwH7XtaRqVxvqWgmENE+UJTnYesFWu0hZxmtMEaU5mXUFImhsA8zRircKSVRKPUpfds7deUQpiDjLQzhWTFi3mBktZ3tfb6zoSMZUxeqjxHGfg+U4pZ6f+L/i9G0TEa64Rdqg1pzoRRqo1TJcFF5UhovHFY61FkSPJgrBZ82sAXBrzv0iV74hyYvwCevvj7WYK3HIxn0kopZ2gvperXXD+M0lztr2jbluOhp+RM6xsZVmEVjXXoJP08g5Cz1PrzqZr7CylgJe+dp9NokcMsWvz3tToVIRNDnDrjFcMiuVnfa5KhKQuatniAp9UtSi2+CPX71EXlXmfoqvP7Wt/bavZzHlovJhfzav0o3AJpj4mzlfSS27aV58Ba0IaYKjJQ3u0Tnte0IkYXKMKSDMh7Se9zaX/FdTYyWoLpwrr+ov8WZph8/flrV9JTWbydxWITlQWlUpC1ojhRSBTfUHxLso45Z6aUGOeZcZ6IOUkrxQpR0ziHMRatNfM0c39/z7HyV7TSPH36lOurKzZdx3a7JYRIt91IZZ0ivm3oh4GXb14jOl697rHLmPRusrxEXr6Kf8Z3LfCW9UNp6XcUJSw+UIQsA81LTpVsAiUHQBPLPeNUCHNiu9vjnMMiPYGkLUk7Yhi4v72v46EmUo483j3Qbl7irGfTblHA1B+JYebxoefxoScVzYRHW8+HHz1ld9XxfD/zwfVMDiPDA0zjxN2bnv4I3hWsA0NB6YSi0FrxId1vG55cdTgrla/WME2REKq4V7+/QGCsZX/znFwMh+MArmOn7fpe501mOgRmHWTCR9tRcoXUQmLoBw6PjzR+g9o2a49KAcYZsNVURLo19GFkUoVNtmy1IYZ44eH7LudvYaQrrZjGif5wJJZELJmcAqdth3WGOI3kGM7953ni8fGBGOXQc9YzTYmHNw/0w4lPP/sFMSdefPxDdtdPuL6+Zv9s//9v711+JEuy9L6fve697h6RkZlV1TU1Pc3RjEQIGEASSBBc8Q8QuJGW2vFPIJcDaK2FtBCglQABWnAhQBsJELcSwTUhQiBHGA2ne6q7p7qe+YgIf92HPbU4dq9HVldNV3VHRkWSfgq3IsIzwt3c3K4dO+d85/ugJPzgKaWynK1WuB9Ypei72/dbE3NdUGrxaWE8SvEkSh6mgB8mxn5k7Ec2mw1/8Id/IACeYrjuXgupjG1JKuPWmmAiQ3sg2yDi8aWCY6JQ+Ll06pOcD13eV+dj9ZLNMnMd816mRmHmVLNGDpDaSOlmkvfqB0+cRH3LWPld7yeGoWccB8ZpxNrKx65OchtgKyJfDnhZC4hKEOCpElrUlPP8c4z0x4FhDBgjEoVKq9rLixzKdaHtWp6+9wytNb0PhJTIUT4r6YU1Ffw1HzqroEtKC8l/jLGS8kS8D5U+8u16XnGlJ+RvIhFLJJYo7YOqkFTNIZMoKix/V4pEsakkuWpte6bADOFIiQONbaDdkJwjXzyBNqMuVpj1iqwVh1zIPnC936JS4IPfe4ZuHLbtaDcXNM7irOiw325v+auf/kwcb4h0Tct/9p/+J/zxf/jHS4ubD4Hdfo+fPB989Hvsdnts63i9uyFGaf1SWrooMLUFxdwR6F3KZ2UpKf6m5f2oHO8pGJoLBqf0mbS8zItwVqwROSoopJBJVTEkjCPkhLK2tsGcUiOilZmX9gofAnqayFEQeABh8qQUiRmyshRtMWaFcRbXdMICZAuaKC009aYoeVa+gLnYpatCims0zhjaRi4BVkn0+EZN7B4jMKU0bbtCVUUWOcVLCsgYcwdgJI/N4JsFTCD9EJI5qNEp9b3d+YiWx1KsVHhZQzlpar5Rk1w+1FPTfa6OIaRIzBFvNeMw4IIlhUlI5ENlzpqdSJLmUV3rnjEKe9huuyOkyMXTD+jW8jfamFo7O0UJc7TyWO3XUs/l7rflG//t1JIiC37WGE25RrZVgKLkygV9514oQNt1KBRd24n6lK7I7zzrjp6IJZacdzm93hyJnQAzRVCtqqZOa92v8B1Cgu9jC0q61CFJr22qZBbCaSwMRfNMSYQYiVEupSQt/2bO6cThu6z15fGaWaglsHlKNEh2IUUoktZXWaGtqqLp0ponbHlOCIDCzK4k5SHRGDfLZzlHubk+9/xZz1zQd0kgvk9Ly29tav68Ye7dzvUQ9mv/VSTaDNqf+7jvXst6qbV0o2aglQJtwWSUcULOUw/6qdZSY4qSCa3ELtropacYJAvXH4+Mg7SFCtGGprFW+qK1wmTRAk/WCvbDmUWUQshLyhLpll+LdJnfWL0V6hq8B3DVg5rSGaUjSkUKsToyqej7JMCEmTsYNI3pMNrR2o6V6yAEXn/1OcZa2m6FrSkHjcJj0esrbIyUNpCzsABp5yjKMOpKpH9xiVKK5x9u+LDd1NRsJ+mPcEtJI9vrgZtffYGfRna3r5gmz5evBvbHwO2xcJwE4XyxWaON4fnVhk3X4moLEfXmLCXTNZbGWdEUNu7enG/TtHz0kz+iaRoBCTQtGIuySdiRQkQZJ+hDZXC2o7GRVeMgRa5WjucXLV3XcXm5ARTjICm0xhgaZ4iFhcUozBsIHbbpcK7DmhZrnKSmZ0e8qHzIYWMaB477Pf3Qc+wHnLPcvLwWRaN6MHn96oaxn2g6Q4kBUkTrgjFyuslK41PmdrvDe8/zD3ouLr2QbNmWlGE/eKah54vPX3B7/YqXr65riv9dtlL/X1N31dHmnDmOPSF6trc7Xl/fcHu7o+89wxgYfKKPhe0YcIcBujXFrtDWYLoNthtprQjA98eB4/WRcZpE4EMVgoFotYiWzECbObVb05+qiKIRCjSyMRoMf3PHy/eznDLDvmeW4hOpJdkwc5A2oDAFUogohExHq8xxGDAWtts916+3rFctRl1U3u/5kCF/o0oiETAkGiseJyUFtY6cU0vnCteXDSY5AT2FIH3WnSD312uRHHRW+ISVkcg8pULoR4ZhFFKTKYAxqM6hFVKzzrIHZio4SUtkOcXAME6EPPejJn7Dfv87m7zrsqyxGCemqcdPAylM5DSRtUUpQympkmOcAJUhi3Z5TAkfw51ShKDsU4JkFKVIAGB0xuoZ35Nr1kBhrGRRtNWVrapyIdTPKwRPyZn+uGe3vWF7c8Mvf/kLtFJs1i0ff/xTulVHt14TU6LvB7wPfPrFl9xud3z2+RccDnuUNqzXG0lhW/vGRJR6gAO+syrRbI/P8aqMqsjZpR6QhXYwlXxKUSQ5GSpj0coJsb62kDL9focymhgT1jU412BtQyoG5VZok7FWnJ4xFm0tpShikfSBaVuUNayfvc/Vsw+EmN+1lBy5+TIyHDyH3rP7as80Ddzc7PEhcHtMDL4w+EIIoKww/ThnuXqy5uqiReW8oGz9JKpL1hpR+jEWY9t7m0ttLZdXwlMMeWFbUVoL05FrhGFGkscY7bDa4bQlGU3XGNatYdU6LrpGbn4vtZi5eb0U0OUURZUk6qPGCDGJtK+YJVqWBMaJCAWoteSR4dhz2O1FrnF/RCklotlacTgcCT5iTKpRuPAHS/qy1vBzYRhGAZ/UdgQByRiy0kwh0U+e292eV6+vORyPDxEf3Iu9EcmUrz92ii5OVHgJ772IHwwDx2PPMEz4EPEh4VPB58IQEscpcBEy2QiDkrJSVrGN0HMaL0CeKUZ8yQRKRZ9Wfd60pKZ+rcY7472KKuJ8s6IU/c1Rw28zL6UQJk+MuRJ/QI61zaO2hKQa7RolbXzR6krUYhjGif44YJQix7Qo0ggXRa7OWjZzRRZhgAzOiaMgazatRWXLpjP41qBJqCT62yunsM5wsWqXvcA5S0YIdXIRVHgYPDEEoXflVF+W3u25Uqqq6k9F/WZxXimL8IVQ5r7tFT0TYgiPfk4iDRhjqCIEiZxFtWzuOz8VEMsSmacZZ5Pz4pQl4p+zmJKJE+4GvSgi3aULniNcrU/ZhHlsuWbFwjQxDQPHw4GbVy9JOfPzjz9mu73l4skll1dPhGd78vgQ+PSzz7m53XK73TH5CecaYZpzroJE38wjz2t91gyHbwNKvmmPzvFqXbCuoI3UB6TBGnJFJTrbYVUmkdDKLMxEbWPpukbKpFZyQzklfBopGVKWdLXtLmXjr5qM641otmrrsN2mFumlxjwrBqUQ2d1uCdPIV5/9iv3ta443r9i/PhBC4DhCzoZs1ti1Zd1o1Fpg7u26qXSFtqaQqIhFjXMiJB9TWXon54bs+zIhVZDFne6AXZw1VYhAHNbcsw8K5ywliwj4Zm1Zd5bLjSUlGPssm0NVhdJKYbWTXgAlp3JtbGUCE+EJVzmYZwAgsNT6TL1xFjFzVdPenDiiBcUxK4IIo1ZOUVRe5jSWsVAR0nMabuZmTTETQ2QcJobjyPXrG7788gXb3f5Rp5u/3cobThdmgE9hGHpeX79mnEa++PILDscjL1695OXrl2y3O3a7A+M4EmIhFU0/RtgeCBGU+jOs1ly/fMnQ96y7NRerDcMgXM/T5OVwJfUTtJXPw1bUOup08i+5nNoCFTVVff9YNoWqJCK1fzdBsUteEymLyGbcWDlMto3lydUl65XotuZS5UezZG+SSizBS21P8jGdRO6ZiTcksh3GgWmaUCBEDiisETa2yyeXOOtYX6xxzuHDxOF4AKUpxpJyQRnRok0UyJGiFbFynDtjF+IZWaqJoR/x3jMOI2HyNdOgKA8CrgJVEiUFcozk6E8ZKIrIuaqTq5XPaIEenXa2wgLGk19SGNNQLFjbYlVDKWDLJCWC6CFkilakrMkqL9TBWivB01SueZUVYRyIMdA6zQfvPUOXyKtnV0zTRH/YE6Nnu9vS3ayRI5b0Dx+PR0IIFRh3SlkvvcblFOXK8ii/9v27p8erBNLvmoy21fEqUfrJWVdKR0sxmWIiGk1jxfF2nWO9btBGY5wl58xuf6zSWQqTNLppcZsnaOto1xuMczx/7wOevvc+zWrD5tn7tYG9hxI43rzk8Pol/Xbg9vVn9Ps9n/z8Y25fvaI/DBz3R0HzKlDasr66ounWONVwoVuMhraVhWjUhCZQqN1RStM0Et1mL6mwgmh83t/mpEhFqOzm/saURSrLGWFvUkqfiHKKAqVF95bEZu14snGsV46rS0cMhcO2kHysOsVCrGGNON7SaxKgrD3pGrerRfdUNmMWh22M6PjOKWdd0aNaaYyqAgZFCb1tZSEqhSqd52tdSW5GnKUYIzSVVBTvDCwKkTBFhn7kcOh58eIVn376GYMff1P7xSOzU1p57mOnpvGEwCWxP+z4609+yW6/5y9/+lOur6859j3HoWcYR25utxWUk8kYDpVI4NXrLX/9y0+EWSyLs3pyccGTiyfEENnve6mVZpE/w5jawwi2Ri0FTkjfktFFV+zAXBf+btHA9zGlFNY2WFszN5kaMZ3qrs4q2SNay+VaZD+fXDQ0zmIbV51u1fxWBaKQXCxI+lzwIVdJRQkCBGQlXL/9sWea/OJ4bT1otm3L06dPq2KUsIhdX4/s9jtJNbdrGbMxmLZBlwTJUJQ43pyhMQrT2KoGJeu6P0qv6Xgc8FOQth1lKKHw1nPNFFSOlOjJPpC9F+ebAkZJWtjU2FzN9U7uwAHmZynz4bhqdKPRRqRLnXU43VFywuV60A6RrDxFa5LTJC36xtKPrxYZwBImklL44YifJjpn+OhH7+N04dWLZxL57m4ZXk4Y14jAS92vlFL0oxea2pSqIMhMe1QW/eFvWsOz0/26Y/42e1yOFxFCsK4sVykQ9QmYYbSlkGofVSW00oWCFNoNgj4TWP7MOpNr0Vsa27Vx0hbUtLjVhnZ9gXXt0ncYx4GcBvrtLfvrVxxut9y8es1wOHI8CMXY6ANTzJIRsRZlDbpC2ZW28jqqoLWkRCXSTMJDWmZ8Qu0bTCI/9iaA43e3kgsheGKcSCngGifAsVxorCW3DV0jepY52KrOdEoDhhDoh77WtwvGgmsNTbZopxfU+RzKKKPR2YiThTcUOuaWlhOj68nJzuQdbRMIXVclv4QScY6g5nHNqcvZJC2oKwPWPH49v2glbBXu11ypEiViTu9mfXdxtqcbvZSCr1Jnx8NRItv9jt1ux/5wYPLTgnhtuw6XM7bpBD+RpRUsh0Doj1IqqHMT9MRUekmjhUiJGVVRm7pIZyrIvcmsmVoqgCbn+vmfaDkXBZt7jnpn7m6t5XBXEpyAUQXhOJC0Z0iRQmaYIKVIP3a0g8MYzTB6jDW42lyQkrQuxiiavjFnhjESU+F46On7kWGY2B96vI9MPhGS4A60sRSlJVIuULTHRI2PsXJII+sSVTm4y50Y8ZS6TLlgl4NBrq1IMpczSIsq2671/bck/ppVABg1QDBGCQd6ThitySrJnH+NZU3SzqfWq1OkSyV+kj1BWmKlrEgpWGMpTjiUY8mYUlBJLqMqFafRlT9fnLSAqkaCH9Fkutay6hwX6xWqZPpJWpC0rXzv88EQtewtksY2JxbFmrkpyydU6V7vJFcEbFa+09nnUTleBTRtYX2RSbGw3kBwQrQfo6RmG7siJS+9W6pgbMYYCHHi2B8xxuJSB6WQU6i1GanPaK0wrsO2Hd3lc5rVhsvnH/Lkg98TCrxpJPmB7VcfMx1f8/LTT3j56Sfstgc++fgLxmFivz8IQ02KTDHiWsfFxVp4hy/XNO2qCjQ3gsb1npIifhpJwUuztzZkREUm51Il34Kg5u6xnSilxO72ln7ombyANxon9Yqn6w1lfcHLZ7fs9gMHU5imA7kElDFkpdjutnz2hSZl+PCj38c6y5PnHavgCFkRar2OLHVW17RSc7EiSp8pZCWXQL8BpaugtBBoOOfYbDZcXT2hbVpWqxOYDaDve4IPTH48CWLc7aVDyXOs1/SrFU3bkGKUtH7OqJRQPoD3pGEgDj3Ze0hRasXvki3Z5ZPznetk2+2W25sbvvzqSz7++GN2+z2/+tWnbPc72UCMwbUtP/rwmQAPXYvRhjB6wjgx7Pe8/OwzQhjxx5HkPeNuIqidIEaNE7BLlbyjVAW3Ugsk5YRklRpzbY/Dsoia1AjiXqNepSja4NoG55xE/inWGvPsvIS2NRVPSFIL3B2EcnYGKD05juQsEpRdK90Qs2jINAW2x5HgI9e3B8bJs9/3HA89PkSOx1k5SjIy603Duu2IBW73wgKnKi1sSsKwVnLB+0AGppjwMRPK3KaTSX5a6CGN0aQoxB0xBqzWNM5A19JYR8lKlnKJKDXd39x+g5VSSDGikANG1zU8e3pJPzj64QhF+vHnJPPMVhdj1euNkZjl+7mOXWHA8rsKUhGOZqMU6/UKrTrGMDCFAZMLpiKfG+XoXMfKSQBhdMFPe0pOHPevmMYeXSaeXXaYfMnxow85HnuUVmwPR5JWJK0l++AEwzLVrI62DtcK7aqbgVUaMtILfGr7VHP+aelo+S7H+UfleFGgTbkT8UrAIhwYtYCuDSXrE8RfSUtOLgIoQWn0TNxQo8ySUi385+WFrGtwbSvpBmspKRGmiTCNDPsdw+6Ww/aW/e0N++2Rw+7AOHrpuU251mWLFP21llB9dghVFzKjKvH33FOZBd0r7a/LCXaOOqAsabL7sIK0UsQQCL5ek0ejcBtxgI0TRLV1NeI1s9akwscoYCU/VfIBI7qkCnIU36VnDrk54sUsJ8MlnVRyzTfp2m5R+U9rH50xVkS1bcLaJIQmTnpsrbFkkxde5W/atOU5qjyZqv2izN0YMvdkST2XVBVi3kLa84ewBUlcezmnyTONo1zeE0LAIAQuSiuJeJ1j3a1xxjGZgQkF3uO0oShDqDnVXPVMlTbYRtb3nHlSqqBreULN2ZolarvTKrKcFk6sWzPA7t5sjk6MlbVZpNZZYClPCBtUrfki6VCtFNMUGCaPtY7jMOGsOGujdaWHTIxT4Hgc8T6yPw6Mo+dwHDj2IyEk+kGoGosyJ8wEipKLkEKwNAPV/2b2vRlbcadmXwNWYXOiMn6VpSSiEHEYmy3ZAiWREEc+H0TfrpUlU6Rqucg5hwthKRlJ99iCpFzezyw0MbcQzdGumt9Z/UFwcVVJyxqMAZs0SWkMkkMRvnW7tP0YrVBKOOgF8DUJnTAJZwWk2TpLrOA2W6ULmbWMlZ4dioym7k16lp3VbyqZwZ2M3gIOmz/FE5Xmt9njcrxA0yRWaxGhX28i1kCYFMFLnUbQbCy1wqIq9rlU5ZCYyHmglIQfj+QUiP1IZEt7MXBlOlaXT/jx2vH0+SXOZsbDDYftls9/8deMxz2vPvlz+t1Lrl++4vrFS4YhcnMMpAjGOlzjiMGTvUj+HYeA8YWU9zTNyGa1Qm/WUmOsaD8h+NZVwmpuvxHOVaMKrdWCofxNCsrfx+qCl75dR/CR21c3rNdrnl1e0dqGVdew3rR436CtQVtH062hwDBk/LDF2ktevLil7VY03UYQr6pQjNTTTBR1GhutRLjAFDzDNLI7HOhiy2q9xtRUPsjCbpqGpmmWGyFWUnRbtWWF69aiFDWldkqtLgxCtY6k55RTZd+aBdtLpRGMpXLklkDbOp5cXsBw5BimNwASj9qUWg4wVF5mUwXJrZW5bJuWtlvRhUi3XhFyRDcW7SztxYrN+5e0Tcvl5oLGNRyubyhlIiXHkycbJqfJQ0/xftlZFJDIciBr5aCqjaExRmphw1iBRrU2RpFWOU09OauKTz31sd7blGiNW62xbSullEqSv6CjihxuZycWgzjc1jlBz4+FXEaOfeZwiKJ41rYYrRmGgXEcJaodpUSTqs/wHmJ2hCRRc577lcnkPuBzX9teZEMvVfRAdNQrXqFWwpyaAYYZ0BStqBgtcoyMZcQ6R7da4VyD0o4YE9vtgX6YSFPEB08ob7+PtxQIMS7Ny65xbC7W9b1Jye8EqZq/vnmwkAOCppILkoVSroJfpHNliGXpZ7ZJ47Tlcn2J05qVlbTxe++9z/P3n7O6WKOtJoaJ4/6GGD39QcCwKQQ0AgYb+yNjL/S+cwucrhrWuabzU+VikAyeZORcVfcqs/CYrhe11ZUTRkac72/ewx+d4zU245ooIgSNIPuskzdl5uqgVksv4HLCpp7+K2tOLgkfRlIMTMkzJUXE0D7psU1D01g2m05INKYj/X7L6y+/pN9vefn55xy3L9ne7Li92TP5Qj9JbXbdWqxTUsiPmoTC+4Q2slBSjFitWHdO4PZZ6mWqbjgzlWQhk/OMnhMyi1RJBu6v0ivHfqW0nOBjoj/0aBS6FKxWNNbQtk7I3Gd9W9uSbMIPI3Hy7PYDu/3AKimethdYbSuCUd6zLpIiEu5jI5F2SsIhO4nuadO2kllgPgwIc5WpJ1bRM01M01RLswVjZM6UlV7suQi2NN3Ptc6aytTzBlad7hzRzgJkuSQRSHCiZDREf0/z/IBWne/MTrX0NeqT7J9zcjh0jcO1DcoZcBrbOdrNirZrWV9uaJuGMPX0O4VrDN2qQZXM0RgUeglC5o0JQDcW0zaCa3COFIT4REVFzmk5EMxZhhpuophpVGfQ0z2tcaUka+UatLXoUiDI5iCHM4RPnLpmUhWQUELT6KOUpKYpM/QBozXrtsNozX6/pz/2+JgYvAelaNoV2hhyhlREnSlmRUp1F1KAj6Qi8pONalCqRoA1ypbDUj1ElXokUUXGWQlGci2NS39ylPJVpUJU2hJjoh8DOkRKSCILWN622wWQliAh2pFyUdu2eO/r0rxb15XfP/18hywIZI1R3ljLqLkOHFGA1Y5cNE3T0DlHay2btuNivWKzuWC9uaBpW5RRJJ8Zxp7oJ6apKqulmmnIkeg9wU/VP1DLVbJW0px5qHM4a08bY7BWsmmpdlGcar3zoWI+N9THTvHFt9qjcrwKYeRyTtE4aFqgFJpGWKpyHImxkHMkZUkZpkTtG4so5SXIV/lUzIcqBg4hTIzDHus0w+41/cpyOBw4HA68/uolH//lnzMcDwy718RpYDdGDlHhEwxVNcREEWgOcZYAk1rHSSTcVcckqWZJceY7/J0nns/Zxeql9gXxHsuOOSX6/a307WoNKTCGEaMKh92OHAMlhdqSpRb9SWccxbb4POKnwNCP3N5smaaIdRd0ncYXATtMqXDw0lsd/EjJicM+clMmjCrcPt0Q/IrVei03CHNGXi3R6bwHTT6wPx5xkycXYdHKlSLvOI7EkqRn+O5NUiAESYkPY+1TjZFEIWtF1sgZ3GgunmywFoz+Cb/34VN+/smveHF7+51QiI/JThuVWqLS1Wq1oET/+I/+iP1hT8iJ6+0NQ5jow0TJmWkaKSWxBybrOBwO9ONIDAGMRllD0YqsqgbpctUDbk6UFGWT1GoBYt3lap53oru8wsvqV9/C/PPbWinEGNBK1q/gJlSte6qKQK4a12WW6tOUIkxVKZ8EWXRNjcZ6XhjGyO4wMPrArh8oKNpVqEQKcvfGmBh9qOuxRtmqSFtjLiikbXFudLNVeMFowawotPQfZxjViKpp5VQEnCaOVqOVwXsplw1jIKYkpYTK+JZKulNbfcumWD7nwtyhYOrhwBLiore8AKpmh1YK3zDGClgqZUm7zyvGpyh7Yk6EMOGsxefImD2/+PJTbvstr25bfvV5ix979revySlQguj5NlrhtGZ7u+fVXgBxu94z+IBFgKWyJ+fapRHQqiL3T/UqqAeDu4cJmQO1BH/zeynfIWv5qByvAG4UjVPERrHqJDpsuwhkhlTw0UvqKM5vXiZNqwCMNQKoII9aWUklSS+eHxiON2gV2d98SWMiX335BV99+SUvvnjBn/2rf8M0jnSdwxrNYSocvCbEzDHKBqJDJhXFFBIhFkyZaTuld69zrehDzrWNXCqS+U4NZzGJBGyNNHWtQd1X6jOlwP72BReXIpkY4sR47CF6djevCNOaHANdY3FGLzyjjW3RDezzjmn07PdHXrx4zXoz4Zor/FrjSYQipAq70dc2kogqmdukyKMiR8+zi5bpYsPz955XcoJ62FBqSQ1TiRiGaeR2u0Vrw3EIAuWvczGOPT4l1B0E6Iz0nLxnfzhy7HsG75lCFACL1iSliBSUMzx974p81fGTHz/Dqkwxiv/33/7snXO8wAJaUUqEPTZrkbfbbDas1isOxwMhRy5ebfjq+iXT9StKzgx9j/eGME0YZThutxz7IyUkaRFyjqw1uZ6QTKUuLDUaISdKUsuGKyjxVMlKTiIMJyrAvGxE8z52n463FBHVKLkQQ6wbvTjemORrSoqSJL1pMKA0BUMpWlLH9eCnk9SDQ5KDx2HwXG+PHIeJl7d7CtBt1hhrF0GRmbCEeigB0ds2BkEZFyMRrZZoz5mCM+Cc5eLiAq0N0UsA4ZSB2tYUKvDP1naXEBNTdbiHfiCkRD+OjN7jYySUtCCB37Ypat08ixfVRiLDxjU0TUsugViZxNKMFVicb923589vvqrTnQ/UwgRcGGOQrz6jcsY5yzG0NIPF/2Ji1To6nWhNIvnAeDxCKaxcg9OGi1XDpms57vd8fr1nHEf2vceHxMasadfS0qlrwKa1R+u8yBjOUTicsCGy7uZ1XVHMS513fld/8xp/XI4XmG9PiYjq6dBktAWlBCpeCjUtSwXpaCEXVwFt3jxRSx8hWFup4EomR8/+9hpVIrvr1/S7W6bhUJWFZEElREs3RJH/EsCG3NQzGNYaaWFpm0ao4Oa06dc2lrIIQtcmfWShCTru5Izqb98fPKJAybHC6iEGT0qeEDR935NLYQxFyBQSCGzBUtDkIhuUKDsJSCyWQihgi2wMIUfpeZup3yoFptcwopgmzzgFGhdPKjgaSWtQluhH1/arplux2lzgmo6rq/fQRov2aEpkYBhHipp5u1n6j0+Iz1pfktMX6BMwwhhN0xqKtbQ6Y1XBOvOb7o9HY2+o1ZxyJ/LTPI9FVJfWq45cEpcXl4zTyPa4x+qZkGQmlOAEfqGm3Oa5aiy2bWa6JICTeHspqJRlc53VfXyQlphU+xz1DF+6E97M4MG3MTmLk69R2N1LJoyS5030jT+p4JqZzW1GzZcabWrpViiF1bqjAG3Xoq298wT1XS27b72U6Ic7Z4XWtELBtZJat6qDKFkQvzHOPfaKuxu3qvzpkCtpRyaEIPX0mW5RSco3JeqbfHvOdxxH/uqnf7EAi2KKTCEwjgNfff4pwzAwhSiiEbkgMsa5ijucspCFk5ONFX0eU6q0l2XJvs1c8apIe2g0htI2WGNQ/kDrDE4lGpVE63gcoUBrhWd83TpWbcPQH3l1K3Sy/RgkY6kMsai6p8jYhnHAxyBRtyoYYwg+SOvkG3feLKQhN4awps3r4V2LeKEeP0WkvG0kYduuIqjCNCKQ7qjwk4GiEa5rQ04TMXpBq3VNnRqNVopVq+k6gY0bFUnTgZ//+b8GCn6a8NPENAUu145VI2w+ISTp0TsK+CYhCOYQQKUKslp1tG3Ls6dPcc7SOo0zcmA4oTrlw8nLRpDJqUa6M/fnjNitPKP3d9tkSuw5bnv2VYpMo0lx4vMvpTY26TXBNPRjQuk1aEPIAz5lsmnQXQutI1RO1F4CeMYpMPlBQDVepBbDJPWT4mASgCk3T4+UpOgPPZu2xdDitEZlAI3SlqZd0a0vee9Hv0+xFzx5+pz/6D/+E6xtePnyBYfjgV9+/DNuD0eKFm7pVKSuFkJmGGt7Rz9RMGAalGlQthUgmLVoV+iaFRSLyQFdEm1n3hW/+6a9sTfPW4Cc4Lqmwz01rFdr/vAnP+HJ5QU+Bm63NyijMUWhk0RcVlm8bnCmkXXpFKBZP7lCG4sfRqZ+kBLQrJkXE2pmbPKemAp9ENJ62cCgUQ0OQaWfZPPkq67yhPcZ9aoT7Rplbt8XJhUpUSURhEBXfEieI6yMMwLsa51j1a3kgJESpSTWly24K57EwuV7TymohWb1cDjSH4+CfJ4k2p8Zm7SqgElneHq5kc07TFWWrh6YiiIGeZ39YWCc4uKYZJ+YD6VOGJ1IxFiYxsBue8CHU1RpjGVtHVpFtHq7uIWXL77gf/wf/pvl54WmtORFCnF5D/XUM0e08vun53oj+i1v/t6bkfvpAKfUSevZ6NqDy9zSxoK4nrVzZ9xHTgJmlQyFZEONltrtMgbg1GN8qvVq/W1qWncOw3Me/TvuKI/P8VIPh8ztJmB0Rs+pG10AXUGLlZ9UFVRNF2qtFsCNNDjLaZYZLl57yKZhIFUyBWkbAGsUCoNWiRPMSUJrW0tdsyjySXzZ0rVOol19alO5WwM5pVPgVME40eiVWpCfP7L7dAZKlaroUwERxkl6dvLoVIjOEYulZOQm1xmtBcChraBhldHUdl0SpSqD5EqhdyfFOPOwJghQ+XPluqtfOt9UqoIbtBZxCNd0dOvM5vKKZ+/9COsaxiCk8e1qVaH9Rt6HqqpTM4CmAtME9Sk0gnqO8pRQURpbkbVJiWLMXL98F+zuMEtdLKXciVrlppeGf0cumXW3wq89XdOKypBSS0tNyXKil89FHGQBkeG0BtM4lA9LZDO3qiylr5SFuSiLUEGuqf1vms+33+LC6Qab56e+8td2embSgxkcM2+sc1ZEASlHKAVjNU3j0LZQjJXjjXWgNN5PeC/sbSVpSqnsSaqIQo6aEy9VzzWKpCZ1mCXPaVhBCYfaxz/jHWS44qBnJzYra4mi0umAPmd1vt1B3J8F7/n0k1+83Rf598AeleMtBcKYGA5CoZhjqZqrsvjarhCi9AxOgxbatgoNd43CqapfmQTlapSAnCYfmIIXesdcO2tzPuWhkJOxthZl4Eo5Sipcrho+uGolz1Y3baeMtMXU3i/XOC4vG9F19CInmIshU4WzlaJoLXUnatuLnuuceU4oAUIQrjndeL+raWO5ePoBIYhGqrySODvbtEJkoWWzWTcN719dEdaRdeMIwdPdaNqdqYjkgtKRnHpiBEOkc0AxZNdRSiY2QtVplVxCjHHFk8sL2m6Fcy1GW1QRtShnLI2NdN2G9fqS4whKR7TpcO0lrmlouyeEABebK55ePqVtGj54/pz1ak1rHCUkNt2Kjz78kOdPr3j6ZE3OiadPn7Far1k5i04BlCeXCYUQmqgsvd0PURN7a/aNWcXaYqUUq9WKlBIX6zWbbo2Pgb4fJKW2H1Fodtsdu9sdWkmvI7ngSyZpTVRyv1ilpddba6mdGc1OaZSPeC00i6lArkxm0pNt75APSFuGdVKaMcZ8w5v5Xe2UYdK1Ji34D73QjVojvZyCLJ4d40klKxfRzp57/o1RrLqGVDKuqU6z1vu652vev2qZ5TQLwvNcCtKGNAykDDe7vh4S5YOa+Z6VzhgnJ/PRB0GGV64CKaMoUIk49qhJ0w8Dh77H+0Cao/s7752vPXK2x22PyvFCIcaCH1OlrpOjnjHgANcIs1WK0g9XCkQvnKbGaLJRlZACoBJoK02JI3GaCCkyeE+hCPn4TC84O9KqRdtpjS6Ki5WhFNFtxAk6UeOg6AUMYKxh1Qk7T4pSg6lMpVLLUOpUl6wxtK4IZv21+0TfQTrfh2ltaNeX2CrmPCNPZW5qDVeJ422dg3VDSpnGWkIMxNKT1VSZiwCVycWTk7RFOc1p/oBkRQdVVyWXrpX+3dV6VRWiJAqlbohWG6yxNE1L03QYO4FyaN1gbId1La5Z4ZpA167ZrNZ0Tcvl5pL1aiXqSCnTuobm6ik5R55frSUCryFHYw0qRxQRSqSUCDkIOvddY66abV4gdyPf+aHlVxSta0hdR9e0dE0jmY/JE5PIueVc2N7u2N5uscawXgtfba7glgTEnNFGY42mMZZN29JZS54io+lRueCKrOxsNEXPvdTSnjY7WmtF43Sh/rzPVLOqakQi0VGfW9XsF8IGV9+Dq33htefgBNquDvVEhp+X1jSLxua4IHOhYFdNJX5xNG2HUooQBPD36tVrcZBFceinBceglCZGqekqldFOni1mobFU+USnmpF9LCe5d4dpZBjHGulmTq5XvRntn+2dsMfleAukUAgTFTkjYB1tFEaBawpNhJwU3QpiEM1NSmVCQjb1kgsZ6W3TQE4BckKVgr7jACUjrRZpvDmnb4zGKY1wKyvhetWS1pObVEudoAiAYhax1ipjKqLRVdpFZV0lWhe4vzVamupLLcbPrE7IhqeUureDa1GKXOtDpAIqyfcFYu1vwwRQBaMsq6ahYFmvLmp/8Z7GjEzTyOG4I45wezyilcFqK31u1tF2K1D10AOonFAl0TeK/c1rCCO3lx0lDDRWmGOUNhjX0A8jyY+oHMhTjz/e4jcdadhSWLG2Gbu2hA+eose/Rds0vP/8OU3TcrFuMSSUEYUXsBTXUpZaWZH+VCcI1hzvNPLzzuCqvqOd0s13wUUUaF3DxXqDNWYpdKxWF1jbsNvu2W331eEIQOX2+jVj5byt+PGK4kyoktEUWmd5stkw1dcJ1PJCxS7MvY/LHNexyP1l30KGX1Cmi+OkkNNcrxMnqoxaolxrJPJtGida2BRiCidnrBXa1HIEdd+YSxsFTCkiF5jlmg+wUKoWeEeMiSnEWvoTHmXhZZd0dGNqhGvk4C8AtQSzo9bSKaGViI80MaF0YPKeUpIAeur+qOdSwH1P69neij0uxwsED+NR6rlGRyBhrCypdlVrUEqRgiJMkMaEzwhVYamAiixRcCgTqmThFi1fc7xF3TnuChXJ3PbgnKG1GpU1OkvPZJq5zYz8TS61N1BBybJJaZWxuuCMonWGnDUWSX9PJRHJWKuxztS669wDKe9dArV7vHWUJtuVoMF0Fo5HRDMzehGKtiWjTcCZDtd1GGNZbS7QWrPuRq7WietXX3F8+Ro/TlzfHIk+4poV1omQ9JNn70kkW8URRBQ1scdz/aLDH1dcusy4F0Fp6xph4unWjN4Tpx6dPHHcMWxfMrWGsH9JUy64dKAbxxPzI378rKNxjqurp8IsdBxFcNyIuLiuRCRQxSFSwjlD0wjNqC81HVgJ/fW/g6k5dcfhzrDerm159uQKH9esVvIZ//6P/xaXF0/YH47sd0fGaeTm5pph6JmGnqE/1siq1hIrDaMqGV0K67ahcQ6PwilNAPoY8TmfDqgLiclpnnXFRtyn55X+SjlopVkXupSFs9nZmdVMoY3gNLpW5DqFu9yQYiBMwnPcWOlM0LPGa1HLgfiUJCmVGEda3ACR+yzgnGO1XjMME9vDTtC6tVZbm1Iw1pCdxhSNdRptFHHyTMEvrGDGaJpO1nxRSsBZRjMMY31/NYOlK4joLmz7bI/aHp3jhYpsm6+6luYlNa+tu7iYN07Vbz7RglCb7TudCdWbr/X1vypzQkh9/UXLG61B6s445ffvpNjUMsQ3vqr5D+/ZlJoj/TsvXsd8AjvBTGivK+mGqTSdM3iqzFq4KZJ1PBEo3I3U89zPWTVxK+jqdCVyNlUd546nqKiTGaxFOQlfa6OWdKVdxBJ44x3N45/nekY2KnUHbzO/53+HnK668903NeyoGUSkZvCVqNpYa0WCzTliigtX968d/u5gIeYlv6yT+fmpjvaN0XzLOr7nVPM322/4fOe1cmcYkgG7A/y787t3UbVLOn+5eXnjlprX3wzimiPlXD+fZfWVN2Z1eaq7ffzqzXz4Ag4727tt6iF5apVSL4Ej8OrBXvR+7X0ebux/WEr54Hd5gvN8fy/7necbznP+Pe28xs/z/dD2KOb7QR0vgFLqX5VS/t6Dvug92bs49ndxzLO9q2N/V8cN7+bY38Uxz/Yujv1dHPNsj2Xs9ycTcrazne1sZzvb2X6jnR3v2c52trOd7WwPaD+E4/2ffoDXvC97F8f+Lo55tnd17O/quOHdHPu7OObZ3sWxv4tjnu1RjP3Ba7xnO9vZzna2s/37bOdU89nOdrazne1sD2gP5niVUv+5UuovlVJ/pZT604d63d/GlFI/UUr9C6XU/6eU+nOl1D+ujz9XSv2fSqmf1a/Pfuix/k12nvOHtfN8P7y9K3N+nu+Ht0c95zPjy9u8EKHXj4E/Bhrg3wB/8hCv/VuO9yPg79bvL4GfAn8C/HfAn9bH/xT4b3/osZ7n/HFc5/k+z/l5vh/X9Zjn/KEi3r8P/FUp5eelFA/8r8B/8UCv/b2tlPJFKeX/qd/vgb8AfoyM+Z/WX/unwH/5gwzwu9l5zh/WzvP98PbOzPl5vh/eHvOcP5Tj/THwqzs/f1ofe/SmlPoPgL8D/Evgw1LKF/WfvgQ+/KHG9R3sPOcPa+f5fnh7J+f8PN8Pb49tzs/gqr/BlFIXwP8G/JNSyu7uvxXJU5wh4fds5zl/WDvP98Paeb4f3h7jnD+U4/0M+Mmdn/+gPvZoTSnlkA/rfyml/O/14a+UUh/Vf/8IePFDje872HnOH9bO8/3w9k7N+Xm+H94e65w/lOP9v4G/rZT6I6VUA/xXwD97oNf+3qZEOuV/Bv6ilPLf3/mnfwb8o/r9PwL+j4ce2/ew85w/rJ3n++HtnZnz83w/vD3qOX9AhNk/RFBlHwP/9UOjyL7nWP8Bkn74M+Bf1+sfAu8B/xz4GfB/Ac9/6LGe5/zxXOf5Ps/5eb4fz/WY5/zMXHW2s53tbGc72wPaGVx1trOd7WxnO9sD2tnxnu1sZzvb2c72gHZ2vGc729nOdrazPaCdHe/Zzna2s53tbA9oZ8d7trOd7WxnO9sD2tnxnu1sZzvb2c72gHZ2vGc729nOdrazPaCdHe/Zzna2s53tbA9o/z8m948uKEeoIAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train\"\n", - "data = create_dataset(dataset_path=DATA_DIR, do_train=True, batch_size=5, shuffle=False, num_samples=5, policy=policy_list)\n", - "\n", - "epochs = 5\n", - "itr = data.create_dict_iterator()\n", - "fig=plt.figure(figsize=(8, 8))\n", - "columns = 5\n", - "rows = 5\n", - "step_num = 0\n", - "for ep_num in range(epochs):\n", - " for data in itr:\n", - " step_num += 1\n", - " for index in range(rows):\n", - " fig.add_subplot(rows, columns, ep_num * rows + index + 1)\n", - " plt.imshow(data['image'].asnumpy()[index])\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 在多个子策略的情况下,每张图片首先等概率的随机选取一条子策略,然后根据该子策略内俩个操作的概率情况,进行随机的自动数据增强,增强数据的泛化性。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAd4AAAHVCAYAAABfWZoAAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9ya9lWXbmif12d5rbvNbMvG/CGR4RJINkZRtJJlKqBFEFQZOaCZJGGtVA0CwHyr9A0FSANCEEQdJIAiQNCspMVCkLVcmqlMhsSFYmI4LRe2tu3Wtvc5rdabD2ue+Zd+Hu9twsgmUr4vqzd99tzt33nL26b32fyjnz3J7bc3tuz+25PbenY/pZH8Bze27P7bk9t+f23yd77nif23N7bs/tuT23p2jPHe9ze27P7bk9t+f2FO25431uz+25Pbfn9tyeoj13vM/tuT235/bcnttTtOeO97k9t+f23J7bc3uK9kSOVyn1P1JK/Ugp9VOl1D++qYN6bp9tz9f86drz9X669ny9n749X/Onb+qrzvEqpQzwY+A/Aj4A/jXwP8s5/+DmDu+5Xbfna/507fl6P117vt5P356v+bOxJ8l4/y7w05zzz3POI/B/A/6Tmzms5/YZ9nzNn649X++na8/X++nb8zV/Bmaf4LmvAO9f+/0D4Huf9wSlVAao6wbrHDlnUoryt3LTqvzUYLVGKTDlp9YapZQ8VqnpRZF75N8pZ1JKkDNTLq+VRunymJLh5+s/cwYFSk1xSHluzuScQCl5791fy9/IsHv3q/8qpVDl8dNPlNo9Jl87Nq499/pxvffRg0c559sfW8IvteZV5XLTVMSYiCmhtcZoI59HKTlOdXVoSmWQ/6NQ1HVLVVXUdcNisbxa8y9tmRgjOSf86Ak+EGPE+4GUEj5GUk5Y67C2wjhH285RWgOKnCHGSAxB1jzLN+THnhQCkCAnlNbYqkYpTVYGlGZ1fsbZowd8gcrOE683wK1bt/Kbb775Jdfnyj5+lGq6p3zmcvKRktw/fScpJVLOKKXKd1zOw6/8nX099vDhQz744AO5Rj+55l95T7FGofWnf9bH1mA6Dz5tXZSc91qVTShluVZzJibZI3TZi5TS5XHl8cjfoJyrMQF5d96p3fWmUFrO6WmfUlr2MK01xujrh4MxsnfllEk5k1PGx0jOeff+u+85X72f0Wp3PJOdnG+e+Byf1vtp23K5ZLFYfOXnjyOcn0OM1+9dA6snPLLPtU9bb+DJHO8XMqXUfwr8p9Pv2hheee0N7rzwMuPY03UbFBmrMkZlZgYaA/PacjivqKxhuWhwzjBvKpq6QiuFUUZOflvJBq3kNnrPdrslpkRMmZwVTVPT1DXkjM5y0obgSSkRQiCGiDYa5xwoSCQymRBGvB+x1tLO2nJhi6OWiyvIBYMFlDh4pXDOUVcV2hjqpikOz6G1IcSEjwFQ5F3wYAC9cy5k+F/+b/4P7z7pejdNxd/5e9/l/PySzWZL28xZLg/k89QNxhisA2NBm4x1EVAkr1DK8J1v/zZvvPkbfOPN3+Dvfu8f0NTNVzoHYgxsNueMY8/Duw94ePcBq9U5dz96l27oeHhxxnYYODh+kcNbL3F060W+/Tt/h6ZdELIhZcX52SmnJ49IKZFjJgbPgw9/xur8EYQN2a+pZwuOX3kL18wI1ZJoGv6r/+z/yf/j//i/x4/DLzvMr7Te8Piav/766/yrf/WvvupLIds1QJZyVM5ARuVMioEUIyEEhr4HQFuHUpqu7xn6gaqqmM+XGGOo6hpjzSeDjmfgiyfn90d/9Ef8o3/0j9hsNvAV1/zje4pScLBXsZxX8ruenJD83RiD0VqCk5SuOdCrhVBaHKjWmrauMcYQ+oE4ekbvWXcdKEUzn2GspakqalfJOlcVRmuapsFow/nFJRcXl6QUiTmgAOcc2mjquqaua7wPbLcbUsq4ymGMYTGfsVzOShKS0Uqzv5zR1BXD6OkHTz8MPDq9IISAqyqsMVhncZUjpyTnec4s5w1N7VBcBV//5//Xn9zIen/dJgGI2R231prf//3f5/d///c/EUhKUP5xh5qBEpAXe+cd+Kf/FC4urj/mT4F/8djjbtg+c72fxPF+CLx27fdXy32PWc75j4A/AomWckpcXJ6TgHHo6TZrNJnGgtUQa02qNLpxzHONcpbsFqhsCVjGqCFLhKhQoB0oLY42Z0KI9P0gzrVErJ21WGvQgFGytcUYJWsoCa/Wk+PN+ChOOaZATB5nHSrOy8UqV3MqTlIpjVMOUPicyDkzKMVWKbQxVLU4Xq0sSulrG+vkeBUhSQaTUiKFyMfz4S+z5tfXe29/njMeys06WMwrrHO09QxtDKPvGccRV2mqqkYphbYGYxzHxy/x+uvf5PjWCxj91bsSWmuaZoFzDeaVmv2DW3g/8o3tt/HBc7E+px97IpaYLe18D2sVEEgpEBNstyvOzk6BktHlTDtbUDvD6uw+56szsh7xowftySpADpAin8wjv7B96XP8b//tv52fJMtU136q8g+VISuIYySMA6vLS+5/+CEpJaq6KlmSVAdi21I7A64i1w5U2cDyx97gV9O+0p4CSDCu1a66pI2hqhxaaWJO5JTJWU+Z9ieCkcpa2lmDtZblrMUaw/pyxXadUAayziitWSwXuMphlcEoU4JqLUG2tSWorfBNTSaRiSitqOsaaw2uqqiqCj96lJIkYDZrcZWjrhx1LQ40+BFJASKJiLGGRhm0Mez5RIgRa7XsLUZhrSYlSUgkG4YcM8ZonHGfnuF/wTX/tPX+Ou3OnTv83u/9HnVdU96Tl1566VMfmzP89Kfw4x9fFTOgB/474P7ucasVdN3Hn32fJ9gbnsiexPH+a+BtpdQ3kC/qfwr8z3/ZkzKw3W6IKTH0Hd1mgyEzd+A0VHOLrQ11qgi2wSYLQYOpSKPCT/tohowCLBmFD5EQ5TaOoZSHEhkYSknVaHBanpySRMRKaRQSYSVvyWRGPxBjIJNIOZKcw+koUZikIaQYSTGi0WgjkWUIoZRUpUSttaaqxPFSMvQpM1dKgdJkYPSRmJK8Zvrc6OtLrrlEfpkARKzJNI3DuYq2qVHa4P1ACFFKWsqilEEZizEVy+Uht2+9xN5y/1oZ/subUhrnapyraZo5B0dXf0s5sd5eMPqe88st55cbtKsxRpGR9YgxM4w9680ahaJyDUYr6qbFtjX9+hzvI3oMBB/RJqJsBBWvX41fxb7SOf4kdtWQUFc+UsmZnnMihkC3XvPg3kekEGjbaUOvcVWNJhF9aQvk9PgLP0NTSn2Rcv9XXm+llLSpSjBrjaYpmevoPSEGcspoLccRY3xsz7VGM2saXGVZLuc4awl+YBw6lIasJLudzxvqqkYlCWamRpdWCmcMtjjgpnJkEmiD0oq2bXDOSXbqHKNReO/IZHnNpsZojTWaGCF4aWXlUn1TWuOMRWnNbBaJMaK0tOSmnzmBzsXxoiBllFFY87nb/FM/x3+ZHRwc8Lu/+7vMZrPdfZ8XzH70Efz5n1+/1D3wM+AnX+dhPpF9Zcebcw5Kqf8V8J8DBvg/5Zy//0WeK6VicFoRNVgF80pTWcWicSwbx7ytaJuGqrJYV2GsRevSH0mp9FCQaBbEcWUAiXYzkGMiUfpkKqONwjnp0cSYS6Zr0CUr0NrIZwO0sVIqSgGtLTkrUlYYpl6SRimDVhpTHG9CkZUm50RK0rvJSu6fThutFcbYcsx556RTSp+X6X6lNddK0TQVzhm0gZgDXb/BR4/SDmMsKIWxFSiN9wljDfN2QVPPqJsF1rZo5UgJYkjEkkEaYz5RrivHSD+M9KOn63tOLi9AKfYODiX7z1Kc1xqMUSiVUcZRK83ewmJNjTYW27RyTFH6a/HWATmKYx22g1Qcho4QBqIPWFM+T+nxqxAge/LjNagvZU9yjj+JSY99+tcUZWZSCIRxxA8Dw3ZD8B7fDWilcZXBVoZx6KlnM+qmpZnPsVQ8c6/LJzPMz3jMV1pvhcJey3C1MRhjmM1ajLHooWf0EjgqJf1S70dyyjhrscZS1Y66rnDWYK3BGE1TV4R5u6tQKaVpWmnR6KzRWReHr+W6Lq0o5wzzWUNWGXTaZbzTNaO1OPq6dpBLH5cMWQLNnDPW6d05EGIgpUiKvmAjpCJXVaa8N1gLKWtImpwzJmtM1qgMMXz2NfCszvHJ6rrm9ddff6x/+9JLL2Gt/VRnewG8h7hWkKvjI6YY6gJpV1+Wf//q2hP1eHPO/xT4p1/mOQqK05XWprLgjGJ/Zmms5mhRs9fWzJqa5WKOc5a6arDOlGqJImVxiDnnAswpLqugsow2Ev3ojMpIyTFHtLU0swqlFKk4XqMtWkuPVpLNjDKWnBMhBunjakXKBpI4TaU01sobaqVx1gEQ0SQVUKUnp6A4XbWDjysj5aaUM3EYSDmVfvQVyOym1lwbzXzRsN5Ytp0iRs/l+hznarJyOFujtaGqW3KO9EOgxjFfHrJcHDCbHeLcAq1rUoCkEt6PpJxompqq+vQseLXpOL245N7JKf/uZz8hK81vfOe3ODg6xmSLSRbnFG2rcUZxu22ZNZpZC7cOHge/TKX5RVtzeHTEZrXhw3c+oN92nJ9v6daX+GHEuRprKnKCHBJRjaClF/wk1aSvco5/5ffi6lBzvnK6asIVeI/vO4bths3lBWM/kAYtmY4NGBvZP75FO5vRLpYsDw6hbQsQ8Fo/81cMcHXdvtJ6K3DO0jYTbkFKvnt7e9Lf3RrMMEg7yVpSTHTbLTFGFrM5bdtitATlxmhqZzFWM1u0GKtKH7dGIQE+GQwGncWRWisBnw+BlBJtXdHWDjTk4nitlWw1JQFeWauYtQ2QMVahlQTgISSUUlSVOJ5MxgePHyNDH0gp470kHVVVY4zGOo2rFCllNJacMiYYVNKQIYz+c5fvaZ7jH7fZbMb3vvc9Xnvtqtq9W9NPsQfAf4XAoia7CiseAf814ng//zM/a/vawVWfZhrpQRgFGKiMonGG1mlqa6isFpSikmw150iK1xCcIUlJN2VSLmhOrUo5VJxhRlCAqiCW5X6JLrVSxJxIAlgu7yGOeIdynso1WsuDci6lpel2DU1YTKlPltQe3+My5ETOiVwiW7JsrBN69ab3RKXyLhuQXqBc4DknUk5oZdDakBBEuWQLTvq/Y2C12gCZvaWAy9QOEf3p75eBfhi4uFzz6PSU9z/4kKwNyzsvEW2FSRaTHVpnqk3CGUVcNMwri9VGkOxal/J8QYeXn7O6ghAEcGI1aVhgVUITCOMWayXjQamyvoH8+aX7XymTcvJVxrs7dZFfUowE74mhZPIpoRWyTtO5lSLjOGDHilQek1G7a+BX2ek+iRljsNaijd6BqcrSYIzBOYvR8phkEsG70gpy1M6hNeiCjE45QkwFVWww1mCsZI+5TDNM6GddQFlZUSpX8do+MB1dmY4oSOaUBOE/fRVThewq9Lr6jqbWVYhyveaCapfn5d1kSAhq9/45ZfFGU0XtV+gaWCwWj5WQDw4OmM1mu37udQt80oWeDtBfwjh9pMQ1L2yBPR6fkt2U26+WPQPHK+hlR6QyGVUbGqe5sz9jVlsWlThga8Bmj4oB3wWCUuQsLiqGxDiWOEdn0FBVFc6UTdvYMlY0QI6CENRQO828rtAKBqRsk6IAeFLK+BB3Gx8ZjLU4V5WT26MiqFRGm0rZCCBnKb8qMkZLGTulUEYNrJRTkew7h0woyGpicfAqY7VstOnJepIfs0ROgflcSvYhJMZBLviUIzFHnGkwVY0rMUZVNbiqQZuKu/cesFp5Xn3lRfaWc5pGynH6c0Y3cs7ce/CIf/fDn/Cjn/yE//d//l+QtOFkyLz8jW+gsoNUEcNA151jFLxwa4/5rGbRzlnM5lhX0S4WGGuZ1TWVtRw1jlvziqPW8uKyIYbA2eltNpuODz78gJ+/Iz10VzWgNX70jGEgjiPPCkDxJLZzupldsDZ2GzYXZ/SbFSqOGAKzeY3VhpghZo1KidX5GcF7hpe2tPMZylgo5+GvQtn5pk0pKeUuFovdx9NKSeCRM3Vd0cwEcVw5R4oRpzUxBJbzBfN2BjmRspRwh64j5YixlqqanLlcoDrKOW6UXOtagda5TEjIFMQE8EKDsjLyo8ucZCw4FKXUbnQojJ6cEtZKtq5I0tpK0PU93gcU0hK7vj1I9kzJtIt7KoDRNGSpUl1ryz1rM8bwO7/zO3z3u9/dBYDGGA4ODj718VvgXwJ3r9033IP+j8sfJ1tRLvEXgP+YK5RyBv4c+Lf8qu0BzyTjFTACuz5fZTW1MzTOUlkpPxpTuqZZysJKSY81Z00MAkSatpGpFyI94DJPl2QcI+a8c3zynlcZm2xsgnhMBdyUJ9CWKuUkpQXgkCnw0qv31Ery1F1Z8Nps7PRF7za74s1zKsNKmR3AS+qpU8Z9gxdJidCt1SjlgIgfx2sZfipZpdkhI20Ze1AKhmEgpxVdt1960FK+tvaTJeZrwT1dP3BxccnJ6Rl3794ja82j01OawyMUFeSacdhyeXGCVhmfPfN5w97csz8mXFUzQ7KTZYTKRmZGoVorvbd5Q4qZGBLW1pyen2OrBpDPQqk65JJZPO1L7stmlZ/ofz72e5nNTIngA+MwEL0npwIt1OAsEBWp9PfGccQ4AQimFKWvyCed7tPOfr8qS94XMWM0zgq2I6W0qzylnLDaYqyV0RtrSUpKvxpw1kr5OQVU1KAkawwxoE3ph8nRAwUvUpZy2nemLDinKJUWkL0CJeVexa7UPwX7qqChUWVPSJGcFdOLT9W3WMbHjBbsCupaVaw8JsWEDwFKJk4WPEby+VfG8VoroLLDw0NefvnlTx0LSunxsaAeKR5/dP2BawSMfL3WvLMKOOZawwaYUepHN/I5bsqeuuNVKCqtmFmNzgZnDU1laStHUzvmJeNV106wad5VKS2nltLk0v9ISk4qZXQB+2jMVHaLnhwCSSrQjKNns+nQKLyXfoz3AR+EwGGMgQxYU6GVgZjIKpcINBWHW7JXrVHGQU4CXshglENphcGjp7JUlBJTnErL5TOllOm9gCmGIKNPMSZ8uEFHoRRGa8KYSOWWfSLpDH4aq1L4rKirmqPDOdZYnB7RKXNwcJvDW7e59eI+s7mjru1nZrogp3YiMw4jm9WGsZcZ6KwV42ZFd/aI2fyIdjYjDQq/HUkpcqE0/UXPtu65qFYoY9FOiDAE/Ka5c7DkpaM9KmeYz2VucirTn0RLXx/J0horQZEFp3IBrvw62fVNQzZO33UEP3J+8pAHdz9ksxnoQ01OCd1ljAqMSb5H7T1VuqAfA+v1mno2p25nNAWH8NfX5FzIJYBWQIgRtJSBY04ErQnBS1aItKdi8PTdtlQVgrSuojjQvuvp+h5jDLUTbIjOJf5OiUC4ym6z/MHYAq4sgXZKMrMvKE8lIKmU0eW7Vbtk4FoFLQmgSuIUg7FgtMMULIoyEuRXTkrg0oIQ5+1HCcpMdoIsydx8/+pL2mKx4Pd+7/c4Ojri1Vdf/dTHpAQ/+hH8/OdX9w3A6ccfeF7+8Kn2CPh3iMuGK+jVsw88Pm7PJOO1WpyvReMwNNZQO4Hh15WUnvMuK8uSECoBKeUS8lklZdldz0WJQ9ZKFWDTFIEGyIqoIHhF341Sao2S6Y7eMwZPzIkxeSSkNALrT1l6PSqXKclywSqkp2wMOanSMwWrHUZpEhllrhz7FHWmKTIGQkxsh5EYE9sx4qOAJgb/1VG4HzdpoWuIkEImhUQKGW2EhAKVJcpMCu0qFnsLrFbowaNzZLFw3Lqzz8HhgqZxn5rpXrepQ+W9p9t0+NFjjARJod8yrC+YVQtqoxmA0HtCCKwTDNbQ6R5nLAlFSJqUwUfZJ4+PDrh7+4imcRweCehub15TV5ZVMvhqicpyQmsyyg5YZSRr+TVxvXnX53j8vnEY8H3H6uKC85NHDF4zpgpSpus7dI6MGEYUOkSGuCHkTNd1DEOPrSoed+i/HuvxZewq+0zkUg1LSEk3azA7R1vAjlkqVjEGxpRQpKvnJ7kNIeBjwBpLbhJGaSptC0bkGiak2DRTm5LsQTsWPdg5vwnXkQtrFchxKD2xT02ZbhbnrRTGOIwWLIZSCoNUK6ydnLVUAUOIdFthg6udwpmCaFfP9htv25bf/M3f5JVXXgE+vdKSM7z/Pvybf/Mk73SBON7LJ3mRp2LPxPE6q2kqi1VQqUztCsGFUbu5tAkLnGGHWhanq8sYjlwoamKoKRdMAintxAgpSv9V6ZItJ0IK8vckJ3jIiTBdJCVKDTGRc8QTySqgSGgSWkvkbENAjRrlFCkmxn6ErJhVlso4tBa0sNYRY0dUjsQ8kgtpRyjIRu8jISbGMTCExDAENp1/vNL4BJZSZhgGYkylTy1MXTpmRj0QLFSVx1gpQfuhk3556UE5m2gbuxvB+rQLZvSebdcTQmS9WTMOIxdnZ5AitbPcOjwmkBkGz+nZBds+8+jsgs16zb0PPyLGwLxtcNZSu5qmqgrcXTaNECFlhVUBzYirHJuuwVppTTij2XSR1TpSWcPRbI4zmlq32ErGk35tTElJTDKqTExBCDMuzuk3G9abgc5rxmQIxkIO5O0aPXbkCDZBamv6431UNnTDyHa7paoFPXtlf32dr1Ifxx8UQGZOqAhJgS4ATNlhVAmqpUWUUWg0iVRGdfRuCgKV8NELdEfJqOB1gKU4T3lXaY+BUdOIYll/pbFGqjjWuXJd5av2DxSHXfY1pXZVPmOMVHPKBrEDl8ZETgpyIerRGWsrwaeQb2w/+SKmlOLll1/m+Ph4d99yuWQ2mz22f5yfw4cfQgjye0rw8OHHX21AGCyv15X3EI6PX+8KztMvNSuonWHROpw21Eb6unVlcFYX5KzwMxtjgXw171pyToHeZ1TKhIw43zhFulIWjilKmSfFHaIz5sgQYyn5CFjLp4RPEx9rmfr1EYVijCNjTOUCEgRpM4AzmqQiUY2kEPHdiEJzMJ8xqyxtWzGfGzIRZbekHAlxTYwDIQnBhw+JfpAy96b3DD6wWo+cXfQ31gtLKbLdbMjZAEai+7GHbAjeoYynMTOcrumrTL9OZKNwOqGtoXGJ5aKmbexn7tNdP/DRgxO6bceH77/HarXi3r2HEAZmdc0br75BHwIXm4HT9X1G/y6DHxi6ntXpKTllFrM5lXPM2xmL2RxjHE21QClDTJqUFd264eK8RRuDrZ1kMgXZrvQc1JLlbI59ecascbTLfdpa4dzimZfavoztkPs5E4aRftvx8KN7rC7OODlbczEYkrYEV0vweXYCl6e0o6cdPf3xbTYHR/hkuVhvce6cpm0LilZQ7X8dne4UWGiFMOVQEkoFQoTDDsthlMK5qrQxZLpCKY1REuRppdFojIFU2LBSGVsMIaFyRlcW42x5XGEHkxeSShu5jB3KFpsKKtlZV9DXgrQW7nlJIrwXesqccnk80jpRWsYQnZMPVUBjU9Uu+kyKihw1WlWgoGpamrompWlfezpmreW3f/u3+Rt/42/s7tuxAl6zu3fhn//zx9mk/CcmgDqE1vG9a/d9E7jNc8f7FcwYTeU0TisqI47MGH2V8Rb00jS3O43a7EAM18ZwjFaoLL1MrRRkKf2qnHfcq1zbzFLpocaoyRlCKOCD0sOFKQJNpAg+XF3QUyAdtCJrI/3fDMZUwmCVDTnJEDtJo3Qu5BxZIudChn7dr6rShzXalJllV5DN4xOvs3y+sBvct9ZSVxU5m8Id7XDWUllLZQy1NTRWs98amsqxmDU0dYWzthz3FcXHOI54H1ivN6zXa7ptx3q1ZrNasd1s6fuenBLz+RIbI50fiDEyRF8Yxjzee3KKDNtMMgYdRpQfMMYRqhGtDAkRO4ixxvst2lpsLf1fNY14KUBrLIrReypr0boqHLju6buZa1/w1b/Utf9+/G+fhH6kGPDDwNh39F1Ht+mF4QwtpdB+TRrkb7Hry6YdCCEIJkJrmRVPcQf+mw7rOjjn6v1/ebB3JUbyqR/6lz7/67QpY4VrGaikwCRdNhPFThwEeAxzoUvpNqUkvM5ZKGbL0AQ6X/VxM4/zCV/dkPcwGpMLl/y1vq1G7+6brvmrsSMJjFTZs3SJkXLJeGVf+9j5UwIJ2Q+vhBYoP0WgJX2tX03TNMzn893nnOanm6b5XPBeStD3cvtsS8g+2ANzoC03hcxLrXh8n+yAA8StrbmJPfTrsmeQ8SpmjeVg2eB0ojEJYxSzWhCrE5fydWRwaXyQyqadszg8pTStE+rDqnI4Z4lhZBh6FJm6qnAZfEqElBlHz7rbElNmDJqYQCUp3VQaFnaa9fWgIrFPbHopX6d81VvWCpyrcFXFvJnzyvHLVKYidobsNcpDHsFYhZsJMjJaA8kQYyoFLuTCQ7Oc1cxRLBeZw0NxJt8/feeJ1zrFyHrdcXDQMp/PmLUz9vcOUBiMmqGUo1ocYmdLbu87Xjtq2JvVvP3GHfYWc45ef5O9W8cSWGgJf0LOhJT44O597t27T7ftODs/Y+h7Ht27S7dZ89FHj3jw4JRm74hX33ibZBz1ENmEyMmj98iP3pNv1pySQqB7eEo/DGz1FHgZtG1Q2uDaFm0tyhiUNVhX086PsMYxq+dUtsanS8bU0O8fcHt/H1LCvXzA/uEBzWz+DDLe6zvdp7x3vvaQAnjPO2co5//QbTi9f5fNas29Dx9weX7JRjt8XcPlCfqDn+K7jnfuP+Ry24tTyInDvYFvWEPtSrUoxgLoScU5mE8ez8ePO187uCdeu2uv9XXatVaIQnqmzgrYMahMLH83pY+qQunlokELunlvPpfgXIMJHp0iKsfS2xV+8DgIILCeN1SzZte+2R0DisrVQuKTMskL/kMXsQpjLcbYMncsjHcxiAPVSguw04CyorQ2etm7UqbsHRmdJqGXSIyZGDIkhcJgjROVLitsfykEcgGNfh32+uuv8/f+3t+jqopAhVLs7+/f8Lso4LeA30Ucb4M42f8vj9NK3wb+B4hT/m95XHTpV8ueDbjKaCpnSsYrVGvGaoHM56sM98qmiF0AUzD1gAW1qgvxgtGarCfZwKJAgiLkaeYtM4yBEDNDcbwml5uVCNYqMCV61jmRYiBm8GWGOFAIyBFSB3KmqRpqW9P3UrYm5d2IkFHiqXWR6dIlLJ6IKNBCW6m0wWZFdYMb1JTxgszLKaPRlUFhMapBK0fdNrimYd5YlnXNsm042luwv7dgMZdylSrqSxkZz4opse16zi9WdF3H5eWKcehZr9f02zWb9Zr1aoVt92hnC6ga5lUmh8Rme07dzAijx1UVKgbpfRcWr5xjKa31KGNI2WOrikKNi3ENOWqsrTEBcJEheYbgqZxlGHpGL31dUwgVnrY91ilQE91CvvpbQcYiLd3dz1wCTIjE4Bm6Df12U9C1I77SxMqi/AjrM8K2Y73tOO9H4UsATCgyltccZt55+sed4GNZbubx+6b0Kl9lunnix8/XztGPna6fvHafrl2tfQEsFWETCXSlukQqHMhTBQBpbTnnSDlJhShFtMqYJPKkttCRqhhJUWGdw1VOFMWKIywJt1SXnDBk5QCyiEILOSmYTVSTjx93yVgzGKXLORIEp7IDck2YlqsqIHn6FCIgosp0xw6wdcN2nVVquVzyyiuvfCoBxvXPFj9Gm/75TK6p3CZifsVVb3f6PAFBMV93vA0wEcH/amM7nsE4EVTWMKsdhoDLQco8xXE6bYXzNGcBQBUAgcyjOpxzuy8wS10HudxTGSLPuKqSrLbP+JhZbTzrTviDL7vigCPErJhpxUzDvGn41ou3mFWO2czhnObd0w3vnKzZjIGHqx6fprNHZoOFmjUyDhsInnEIhDFhjSZFhdK5lL0TRiNzslYo3lQEh0YY6GQjqOuKqp7dWIaWcmboPd22wxorG4KxWOtoFy11ZXntjRe588Ir7LnI7SZgCDx45xc8UvCGPaQ9Kie7ku9j7EfGEKmqmuOjW9y/f58HD09Zry55752fs7q84OL0ktX5iuxmvBoSrnbsHRxQm4Zbx7fh29+l3645v/8RY7/l/IO79KsV3XZLt9lQO8PBoqVyhoODlqaxnJ6f8+jshGGInF8ODGGgO1mhM0QjghrDeMa7R47z9ZLDvS053uL87O7NzkZ/SStVQKA423KbHHCeNJC1/Nv3W8LQcfHoPg/ee4dtN9Jpg58viOcnpI/O2Fye8+Dskm4YOe1Htj7uWjBD4XKOITCfzzg4PKRppvKc+pywTs7B6aAzGZULll9dzYd+flz4aU73KWS8OdMNnotNX4Jzae1UMQuDlTNoa2Tzj7mg3w1aw6ypmTcVlXPYSgnCv4gOVAiDnjMC5NModFsUvCqDcpZ+6OlGCRor5wqHgCeMgRQTwafilA0KRfIeFSNovSPSSLt2gGSMKmdiGIklY04hE5Nwuwu1pUErS20lGdhstvhCiVk3rtBmaqnMWY3C3Vg4tLe3xx/8wR/sfr99+/Zn0jtOttnAv//3Aqia7OTk03q6kz0Avo+gk8++xNE9Av6ba//+1bWnn/EqcEZKYToldJQSi9FqR89mtSZFQf/ugBEoqjIELyaRX0iCfJ7ABijJdEgQs8eHzKaLXFwODCGy7iMhwZgUKQvncmsVM1Px+vEt9mcNhwcNdW2w1RljMpxtey47T/a5aPXKNqJVhhQJY4/SAT96ok/ESpOSkb5QmrJv6W1PvWyUwmKKgLmgLpvKsFy2Nxal5pzxo2foB5yVUn60BmhwDtqZ5ZWXjvmNt16nST3LcMmwveQX792l7zYcv/ldSKEENxRyhsAweipbsb+3z8nJGWenF5yenfLu+x9ydnbCsO4Ztz3t/m1SlD73fHFM2+5xtGg4XDaM/ZbVySOGvuODd97l8vyC89Mzzk/PWDSW145nzGrDC8cNi5nl3fff5WfvGM7PN5w+vEffDQyrLXHw0NQwa/Fxxb0HFZt+wUtHiUqds7p89LUSN3zBL6L06R53viXNFbpBxPX5sWfYrFifn3J67y5dSPT1EaFtiR+tyPfeZb3Z8u7lhiFEtqMvGAX5jN4H4dOOgaZpWe7tfSwb+fRza8qKrziir+5TWTGN7H05bNbHu9hfj/PNwDAGNt24+661VoQse0qtNFa4KkhZRgOdMhilaOqK+Ux0i61TID5RWk6lN1xZTVtZ0dytpIwcNUQy0Y9EP0op3xoMipS89NeDcC+TZQ5XCcMAsVQlovDgSrm7VL/k2k+k4AUYJTTzUmrOGW0NtpZxR2PE0UfvWRcmrboSnmoBegnSWjt7Y9fAfD7ne9/73pd6TtfB978PH3zwRZ9xBvwZV1SPX7RqdYEwVf3q2zMh0LDWijB9yJBtAR1JdJ2S9BFTTPhUdDR3ZU5BMANlM0PQy0g2KapEUiJLMdENgW3n6YfA6Is+LwZUIcDICmPAGZELbFSgUR4bFFYZmqxYuJbgNPNqRKtCQ5iCRK8xMXrPxXqFUYbQBenpWItuHE4ZbHRCXydXAdponLMy3qD1Dk2dUsYYIPvHS3lPstZKYW1FTJG+72lnDXXTMp+13L5zwHK5x+FBy3JmiRcD5/fvsV2d8+j9u3T9ls3pJbH3KGvRtbxe7QqJRlZQStDnqzVnFyvOVxsuNx15DAX4IWjzFBJ+DEQd8A3EZMm5QusGrTNaz9DGU9eR+RzmjWO52GPWWPb3GhZzyytJ4+qW84sNxh2x3fScPTxlu96SKkdqa+pZIzOrCk4e3Uf1Z5yePHjqjvdxCpTHv8sdOdHk3FSpd0QhQNherFidPORytWWjagbjGc0pXkU2/pxu23PZjQxjYAyREOTccUVubjabsb9YsLeY0zRNIdK3V4jFT7VrZUuuADuTzF6mOIScuSKLuP6BPuvj7j7s57z3zVjOkhHGdKWTnbIqc+RQUXAhGYxSJANJ6yLvKeXclENRObvK/DVAjCTv0UajqwqjoB9GOj8yDgOmXNfWGJy1u766tgJiJCuMsrLHiQdFa6nyKQWhAKe00iiji/qaSP/lqCEJa5U1Ug3MKTHx9uUshB2SuCic1RgjLbGUZF44psclEJ/Uvjwz28faL1/sWZ9x/yVCInnJZ9BX/VrYM+nxNnXNcrYgjgqfBiZJPoUmxoxPotjji5yVLj3XkCTjnLaQnDNjLOAFV5cejVyAHs/Fuudy1bHpItsJJKUL7Rq6UMZlGpeZ2cDCDMxVohkCZtQsk+FOs4fNI2cz2HrPqlsx+kHYZcZIHHu6zSDZTEyQMoOuiXVDnStM2MPK+D4ojbGWtp3IzSWrCEH0eI0G4ufC/L6UaaVpmhkhDAxjT9VYFntzDg/2eeubL3N8eMhrLx1w+7DiwaMV7//oB6xOT/nZ9/+Svut5+2/cY7zoMK2hqqR8t5g15JypbKJyGTDcvfeIBw8fcPfBGavVJTNtaLUhhkQeI2kMDFvPGEcWDYRQkRJovcSYCmMvRa1l3uLsPnttw+3bR8wbx4svNOzNHS+/8g2wnouLDW984y6r1Zaf/fQdTh6eMlrNUGmUBttkMoF3fv4j3u1OuX/3ASndHCnJL7PM4zw5ZQpz+uWqvLx7tJzHcRwJwXN27z4P3vkFZ2PkxMzxZkvvfo7njEfDBSenl6wHz3ozEmIqM+cwm83Z21tyeHjIS7dvcXR8xP5yj7adSwWoZFWfnXdeZbxC5J/IU0+UvAuM5UT/rMz3uqO9ft/XbzFLYD0Onm3XoZSi8gFjNLMQqb0rM7saqzVVrdAmE/yIH5NU1lSZ549Tpp9RhVI2RI+yBt22WA3DdsvJ+QVKUdo4itoJwDMo4WvX2mJdLWDKbCErUohkH9FK6D4BCMIlYJTGWuGSjn7Eh0DODrJk58KeVcrQZJIpBCEp4JxQvtaVSCLGYSQWfXBhwXrGVZ8bswfAf8knEc2/XvZM5nhFJcShUgVOymC6oPFEjaMod6iPbWHq+mVdBtgL7F5mFI1QP4bA6CM+BkKZv5P3Vpgdi5pEws5krJb38yEwjmWOWGl8UORsdyLXLiWcMaRkBJxB2vWUckGVkjMxC+AiFnq4Ky5nduhLQV6WbN7I37QCVfiTb2qxhYzdoo0wyBwe7HN4uM/B/oL9/TlNbTA6oYhkP5KCRycZn9Blq5ocxzQykct6KyXKN5JpSM88ZgXKoI0VfeMMJFFQySGW+cSJrIRrrysguWn0KSfpk/sRRpdwjcE5Q91klnv7aF2zv3+J94rBKKxRon1qRnIa6UbPeLlm6IcbjfZ/mYmSTNghXEV7Wk7S6TMLFqack0BOCT+OhHFgGAa6cWQYAqH3xNyjfYfNPbEb6b2c25Ih591nq+uaxXLJcrFksZgzn83kGttpJn9Kxlsy74lNTbiNZZ40xigEMDoVOT3HhB7aue9rmbt6/D/XXv/pmFYC0pTq2XRMuQToqTBIlXmCArZShbBE+LwlW56wkTIDPIGyNJW9IrDIZTY2Rlmbyk7a1IWKQ03KWqqMHBmIWqoDOpN1Lu2na9W7NPEqTwj0qd97LVjaLe103OVzFtlBa+3uOIT9Ku2O81nYMEh/9+Liiijjsy0jygc911QPrtkWOEE4Izdc0UL+etozyHgVVdMyW+5BaEhtIxeBlnLYMPR4PzLGQFRjGR2aZmzLJrFDKyqcLgPm2hGxbIctj85WDMPIpusYwkDGFPYlQU8rdSUGsNSZmU6krPjwZEVjNEQDWdPZOZ2ZM8ZE21pspVCqYfRaFI2ycDz74IXe0E94vERIEZcixCDDgPLRhXVGrvpd30+XkjMp7VDbN2IZclbs7e8zn1d85zvf5A/+4O+wv7/grTdfYD6rUR5UWGNVT20S88rw0q3bhJg52DumqpcoV/OJkmnZpFEG7WboaoaqlzBC1dQs6pqmXcoxxEQaR2LuiL7Hh05E7fNIYiwMX4kcEkklgvecna3ZaM32IlLZzMHtlsMXZmQ0x7dfZu8gkdUex3e2bFGsMqQ84sM5vl/xsx9/n0cfnrDtuqca7ccUuNieFYo/jTOOpgSXAnaXkC0jlRyHwvuB83v36TYb7p+eca/v2Z6d0713Fz127A8nmNDxwUcbTldbfMiEsbxGQW6//MqrfPs3v81rr73K229/m/liQTNbkDEIM7Bs/B+P6Sa3O236IQROTk7YbkVm0VpHXTccHR5eyS5+IrN9OlntZ5kCmtqxnDdYnUleqmhNbUsgBxRhAqGVldaUc1f8yDHmwtmeCVFaQ9rIPtM0DXeODlBKMfQDwzDix4j3mbo21M28ODyQopcmY1Da4eoarTTRS69WglhdZvulFOyj4CayB3o5RypXYV1m9IoYJcEQFj5QRoLhVPYMV9W0s9lOmS1npP/f94/TVj5le/99+JM/Eed79ksxUgkBVP0l4mSHj/3thwiCuePX3enCMwJXaWOwrkJJiodEiQLgmTiNDaCM6ImqrEpvd8ocJatCK3QBGGQsKRtChK73DONIiEJ6LtmUgCVsGe0RgnFDoxJOwku2g8erknUnTawrYh2JZMmolOgFq2xJWdSSYopkYtm4ZDAcxDFdZcFlY8rIBce1vpm6QpomMjd9jWhtaJuG5XLG0eEhL7/0AnvLGUeHS5raMlz2jH5AqYDRQmYyaxtSFOEEqUQIcvzTv0+FMhalHRgHpsK4Glc1GFvtWgKkWCgzQ0FxBlk3EpRNSOmJOk96wglFGgeMjtiZZjbWGKupmxmuUiz3ATXDCP8JMQ4MY0LnTIrQb4erGcunZDlnet+XfpxkSMnIZSYjP7kEZxmDwmRNjIF+u2W7WtH1PdvgRZru4gzVD9hNj/OevAoMPgg6NyGtCy1Scsvlklu3bnF0fMze/j6z2UwEKihymvmqkHJFXzj13wpzW3G8XdexWW+wThyv4BD2MNl8Cv6gZF9TSvYsKppKlRFFS/AivKIUOGtKqRyuMnPJYnXZB6ZqgGS8Beg4cU6I2DHWWJqmRQFDP5YRomkcSe1ATpmihpVlzWEaIdQklQUdLiU/OQ6Vio5vITqhlLmVQjupN8WYy3dXAG5cI+xIZa80hqquQanC0CUZfihUsU8z8JxaLRlYbQRQ9fkkGdMzAiKJ8N5nPO683P562DPJeKXfZKSUbMxj/SJtZDOyyuLKSaOvkkS5zBU7dh5rG7TWjEPC+8w4RPy2IwXPwmrmxpGSIkVVgE0V1hr295Y0tcPlSJUjjdZQGSLgBwFZZBNAd4iikhFRB7tHyoqmrWlnNaPvuVzfx/uR88stXTdS12B1RCsPDJAtk5ZwqSHJxaXMbvObFuEmL5GmbfnN3/pd3nrzNi/e2eeVl25xNK+obGJ7/pBBZ/CQg0LhqVpLjhbXOmJU6MqBsQLz/HimZBTZaXAVul6imxHd7KODwS0WtPMZ1XyfbBxZa1DTengiIyl7fOgJYUTrhDVgnKbKQiU6cw5Txsy0ymTr2I5gEtSlQtGlQEdgMw6sugGIOJVwKGo3o2n3yf2afhx4Wh5h9APvf/QL6qqSsS3bsKiXoBSxtEu0E0IQHRK6D3TrDR9+8C7r83PO7t5neHhCXq9pNj1+HPn+asN2HHi/H+hjKk1kTd1UfOO3foP9Wwf89ne/y3e+8x0OjkRY3FUVWRUFnIn2UL65x4rOE73qdrvl3v2PhPrzvbtcnq+wlcFUhoP9fZyzzGailzyRJew29MkZ6ymQvDpZdtCqrzkpNoUTQLc1tZEZ1rqud8x1SimiD4y9xyjhA0hJobXFVRVpVBIMltEfyU3lfymK4hY5CyvbMDAOoQTaCrJUFLz3QlaSp3ZZxHuPUopxkPaAUQarDZCJJfBMqkxhl/1NG01VVSilMVYREyJpaCzaKOrKXK1nLkxZxhBCpO86fAiElKAEfk9TnysCP0aoKx7yuIj9p9sWyXQ/z+n+9bNnAq6SxmwZlsv6CqnMRLemSQpsSXAnQgtVBs4zAuVHa6xrUdqQhwEfRrxP+H4gR8+sEaRhDBBi0eysLK5yvHAwZ9bWqBhQIcjAujbS48ob4WQ1Aa16UBZX1Btsu0Cbmv3DfY6ODxiGFY9OR/p+iyJgVMAYMDoWylgvPaasyVleQxWFE10c8ERkPvV+vwh93xexum745tvf5ne+8wpvvHrMvNXszww5e7rVKSmOONPidAU5UNWGHJxwIUeFskZGiT6N7UgrslXieKsZqhpQ9RIVFG62R7Nc4tolGEtWQl0nWEtPwhPziI8DMXqUEserKg1a6Cvb2gmhinIolcnG0o3lnHBSrh1ypM+ezndsNiushqpRWBSVa6ibBT76T4X8fF3mw8i9hx8IothVtG7G0AwopYlFzrJqW1xdkbuRvOrYXl5y/95dLk9P6d5/iL93iomeZhwZ/chPN1vuDz1jEIIWlRVWZVzteP3tN3j5jVd4+1tv89Zbb9E0DU3borTGB18ynqvP//FzK5X+57bb8tG9j1hdrnn/Fx9ycXqJrhSmUdy+dYsXXrhDyom6clSV2znd62QOQpN67fx9ioxhViucUdS2QrXicKuqEq3p4nz7bcfl4NFTXzfHHY9wiFKZkk5QIaCY1ijJWF6KV1KB4xhJSRjtMjIhIfSzofSMP+Z4vSeGRGWrHWAtk4hTDWTq+StBJ1dOKk2mcJULjaSgtGVWV+/2C/keJGPvx55x9KKsdk3k4WlZAn4OfHGRoR4pL//3x+nCM3K806gJKckw+VRCJhOjL7cyl7vriagC+RdCDV8EvlEOY0T3MiePUZHWZbCKvZmmsooQpLlvq4p2sY+zjuW8pqmsiEXrJOxXthKiDjx4RVXXVHVDQuGzgCSa+QxXtSz3WhbLGld7fNyjri2rVcc4eLmQcoQ86WxayIaUy7xe0RWe8K4T9/RVBfBmLpSqsrz+yh32Fy2u9LnG7QAEdKEQ1OW9c4p4P+LDKD3IiUXAqE8Zo1NMybs2qvQCpdc+bV4TMbuxGuc0bWNwlaVpK1yZK83aknQiaSP/jgFyxmfFkKQkb50RukpliMlgooCuUobNGNgMQozig0cZBQigqJnNWe4f4mPPDhH2FCylxKbbEKLHWctoR0JfBApK9aD2A1VV4Vcdw6NLIQ95dIo/OydttuADffCc9z1r79n4wBgTPiZCTtS2Yt7M2NtfcOvokDvHx+wvlzRtTVW54u8Ee+BDwGWHw0lp1Qq6cFqNED3j6Om7ntX5msvLS07PTzm7OMceGOzSoLPho0f32HRb0Y8eBpm5n2bqiySYcXYHkBMk9dPpAysK13Jhh7NG2JtcAT0Z6wobVSbOxt2oVEqJfujRRAYvfdaUpxZQxmmzo37s+54UI8MwMI4jIWRihBgiIUQ5R1MulS053YROUnK+EKJwE+BL9S6jtZSCjTG4amq5idwoTBWFKcCRDB2VpAedrrWoSh9XMu7CM6AVxtpdWfrrth5Rvl0nOLuPVIXvIinwr6EdHR1x+/Zt+r7no48+YhxvFkH99B1vzoQYGX1ApQDRc0URlhjHnuBHQoYYZU5Na1GiGL1sJCF6ei8X/762VFVFiD2kgUqPHM7FX9ze1zS1xnuRl6vnCw5uv4qxDqcChkQ/wDAEtLW4dg4oqGQofTFfsDdfElKiG0eUNhy/cIvZYkndVDSzGu8N7exFhr5ju+5ldGYc6YeIyhptKox1qGzRmF3gMHGuSxtYQBY7NPcN+YjFrOV7f/M76LRBp4E8blmvVhidaecaO9EpJgE0bbs1Y+9LNUGRrZYh50+5cFXpFlhraJqapm4wyqCSvgKqkKhqQ91amoMampp2v6VZLuiMZbXqiVoRTUW0spGlnPBREXzCGGiqBuscvVIQNRqDGw0pRR5tBi5WG8Z+yzBswVkyDmMs+8e3MHnEp4EHD957Wn6XEAMn56dYIwh6o2ocLVpZ6XsbQ1vV1K5i/eiCk3fuEVcbxp//grTekIYIPnHR9/xsdck2BE76gW30khvlxLxuuf3SEbdv3+LtN7/Bm994kxdefJGDw2X5YgQrse2kLFo3NXVdC5jIVCUOkU2/Gzq2246z83PuffCA8/Nz3vngHR6dPaKqaupXas7jJfxUsbRz3nztDY4OD5k1LfPZfIfyVUrRtFLiruua1l4R3Yh9vbrIVitqK0IglasE5VxQvnXTUtU1g3NUWhGC9NRj9FxcdJyX3m6MlH5xFmyENdgSEK4uOtGOXq3k+k4WHw3GSNBijC6ZvmIav4gx0nWCMRA+ZqFwHcZRGKgqqXpVtaWuHZPjzblQHJRRJlCl/5zRUUriwngle0aKiRADMQpvtDjzCv1LGKVu0lYIZ9S9AOO/B/4Cadv+8lrzr6S99dZb/IN/8A+4f/8+/+yf/bO/Bo632ESTeAWaSleDDQpUkefK5UTMWZSERu8JMYjAfM6E4NFayWxbGCEHnIHKaOYzx6wxDD4z+kzTWtpZjTEOp6z0DgtVn3aOqhX0rsqJGBztbEbTNjKSpKS0XVUaVymMzaACqLgDSUk5yJSfV+MFArQo5fWJyurxxdj9/PhExpOYUlBZwCdyDpCT4FuVbFRGF8rKMj8xOSdtjJTDp97uJ45nSpnkexMO2ozVGmeMqB2VkmRVGeraoGuDqi11bamdITkBwRANwRoZavSTQJqU7FLW6BCIMtMhZUMyBENMiVB4cmMMxBBIugCFlIDKjHVo8yll8q/RctlwQfp/CS9KN1p0oWM06JTI3jOs1/jLFXGzIfUDeRwJPhNCpg+BrY90Ufp1KQs4BwXaGZrZnGY2x1UOa4U4xvuxOF5FiFHoDIde+og5EaItilkUxrRE13dsuy3D0BcB+Lyb/dSpRuMIyRB8xAdPt+nY2IroQyGqKDJ6Su8Q1inZAgS66gOrj80u37QZIxmu9ELleCaheDex4VlD5SxaQSw/+97jx3EHlpQSs3x/E4mFygmV4k7ub/dF50JQEQI562np5fNCWZvyiylc9FqjlZFycqWL6pkAqdJEOpMzWigBSpAke+QOuJXKaGVxzCnHK9pJxWMArKfVYkkBui1sp2mg7ac9KvNJVPIF4qGfnVlrmc1mVxUchBpzPp+zWCw4ODiQdsx2i/9snssv95438ipf0sTlFihqFojmRBCvtUVbhVMGtCNG2HQRHxOXm57VRma8sopYo7H2ksoZ/HpD2G5pTOBgppi1jt/4xjEH+y0X657VZsS1S2bHexjbMGtnOGPZdBu22w3WWdpW6BpzwTo4Y6msw/uR1eacTKaeRbTd4lNi6CJjHzi/2DD0nhAUxtRUlVxw2lpiKHJ6ugDCpshfFXHuHdqylLi+MD3aF7DkCZu7EEZUDFRW0S5mGAN1Kz2jbRcYQpDPayt0bYTtyDUSiKhPAqsAUgrEEElhIIcOnQYOmpoqZV65fcgrtw+488Ixr7y4RzNrqY73ME2DXcyw85ptlamHGeNgWeMZe80az6oXYohuCOQMq80FWStcXVO1DdoYqqYm54z3HTl6/NCzXa/ITc24nOG0KmpSMkrzVC3LKR0TJJXxRKDHKCPsZCiGzqPHQHhwCu++j+5H9HpDGgMPhsDDMXHe95xuO8YU8YULS8slwXx/jxdef5vj40NMbfGp49HpPc4uTzDG4qqaGBP3Hz5i23VYazHOUdeO/f250CkG2awvL9dcXF4ybkbqqmI5n4twxrbGqBfR4xtYLIvUMIuasw/OOL97TtSBYALOWfbnS6q65rXXXuXw8AitM01bP1a9yXnqNd78aItSinnbcLi3t3N82hjapt2VxI02OCosMga3t5gTU+TDD+9yenZe/KhI6bW1whrDhkjyPc4oZrXgTJraYY0mdIneR4IfWa9XpfdaYY3BWGmRGKtpGglGnG0wWtjFXFWJNGotesA5BHJKbNZbVpdrEhplJOHwMcmYYczkFEhZEZiUpq6ddOQdkltETS1Zmd188Ndul8C/RCiSTz/rQRH498ho0GQemc99dnZ8fMzf//t/n+Vyubtvb28PYwxHR0f84R/+Iev1mj/90z/lF7/4xY2857NxvCVyv1JjuWrBaRSqqPVoYwVYQxTRAx/o+lG4VI1Ehn4cIGnC2BPHHuokDqY27O+1HB7MyVo2QdtU1G2FdTXzxRLnanAWrGRfbVtLHywbVNaYEs3b0RByR8oRYzPaBGKUvuIYopTAfQAU2lgsCXIZMciqjCeoK0FfpR4DlMldU2h8Ux3e0hfyG+mjpyRZvasxVlFZVXxqJEc5FqM0GKQ87kRWbAcn/+SrI+IDCU3EqkxbWVSqWc5n7O8t2F/OWMwr2nlFPa+wbY1uHabWmGgYG8eoMrmx2GzxVtPpJAL30Qt7Gaq0EDPaic6pBDNJwmwiOQVi8MRghFiihDP5KaI5r1va9etFvCOTiSphEQRs2m7JmwEuL+HyEjV68hggJvoQuPCBdfD0IRCyMCoJzan01Ku6YrY8oJ3vgYaQPGM/kpLwlNfNjBgj6/WKTddJf9MYmrpC6YjRmhACMUUuLldcXFySxrQjW7HGSpmWGSoeYpTG5YRNmW03MAbPoHp61VFVjuQDbdvQ98eEMBJiLTOn18aLrtgmvwbHi6B+68oxiUUYLdmtLVrSChnjyc7uQt2YhABDGKLknNLKEE1EoQjeM+SAqgzUrsz/ioauGQNKSfIQgxdkc7JgdCHzkOqPczJL3NYN1lZUdU3dNJLx1oJOTuNIihE/hh0dq0IIfmKStlTc7ZVTZeraWBFTpaxUCcvnzUqTiLuRsZuyXYvs2kumAenpfvTYI3k80IqIk33nS7zbx0tuV33vL/Z0dYXx2B3w46/ZNDNeffVVjo6OPvH0pml49dVX2W63/OVf/uWXOO7Pt2cwx6twztI0FTlkko7klAghln6GsILHkPDjwBgil5cbusGz3W4YR3G81kpmuk0Bo8CGgE2KytQslzXLRU27PKJZznBBoz2ougJr0JVjdnyHdraP3VxiNyusStQmSElsCgZSIsWINTDXrZy8tnyRyYKrUTazVHO8TyjbstgM+LHDDwJEkaF8yErLhZDTVaZbxgesLpFviuQbZJlRCqoqUxmHmyQJVZQsUAnwQriphL/WOovWGVNVGNc8Vnr5uNXGoFG8cuuQv/Uf/Bar1ZbTky3jEHj95WNeefGQw4M5b77xAlVdYdsGZS3KNmgLY2V5yc0ZB8fdfMFaBw6S4patyVmTsKA0pq6FBctaIXtHNqEQAh/mFRdKcTYqUqWobMIRMBlU8uxYC56ijcrzvrmPSmUUJUmG1aJ4MxuqpPCnA+Ghh8s1atvhfeC0H+li4n7nORk9vY+MWSQ5pJSv2NubsTxsOTxeUi8gu8Dd+2ecnm9w+gKnL0tp1ZKzZb1e4r3gI3LObI1h/XCzQ9JTWLZygNVqzbvvv4cfR1pb8dadV4l7c6LrmCfwg4wx+SSz8c44nBXAlg8Bhp71Zk3dVIQwEuPIdSH4abNcrS+/xrlS6TXr0kaROV0kkJMBV8wU9BdmKZl9LX1ZZWTqoFyX02btrGNvbyFzwdaitGZvNXKx9QLmKiXtdlaLGIktHNE5EtOINZY7t28xny9p2xntfCbARFtm1ruOGDwqQ7ftiDEx+CvMh1ZQOUPlrqplcOU6rDHUzhX93mm/KfrZKRNSuFFwYYzwk5/Ah9cU+bZbWK0+/sh7yHDRVJ5NwBdWSkBk/n4LOLx230Pgr/jCdJFvvAFvvSUMHj/8IfQJ+A6i3TvZPqLz+/TsmWS8zlnqpipqPwIKEEWPq/JzCpFhjAxjYLVase1Htr2wWhldNjUgdgGVMwtrqIyhshXz+T6LZUO7OKBeznBDxPQeXdUoa1BVRXt4m+X+bcx6gW0v0XnEpUtUjiQ8mSijST5iMszqFnLhi84T4XrG1BpVG2LMODfH956x2zBuLxnHkdXlWnqRGBIakvTXJOYqzlcbjFYk8i/RqfxyJj3ezLx11JUTBObopf82Od5yHe80RDXYusZUFdp+juPVmlprXjw+5D/4nW/RdSOXZwNhjLzy8hEvvXDAcl7zwu29KxDXNUsY4t6MfrDYteI0BkajGOoaYx11u4exjtlyn6quiROaPUY2Q88wjLCxVEFBB30F1mYsAZ0yKhbg3lNm7RkJfKAforNGZ03KkRg9+8AbSVNFiGce7kbUMKI6kfB72PdchsiDfuR0CIQ8bS1SQjRKsVzMuXPnkMPjBdUMMJ57D89JQbGo3mfu7qJULsI0LTn9NuQjYpA+eEqZUJQtdRlHqWtLXVnWZxt+/IufEoLn9976TV48vE0/g871mJDxMe4IYjKKSjU0dkZSgT5siUQ22zV1bfFjzzj2BVFshdEpiiNZb1Y37nhLHghM7dSiAlZGaXJKpBDKOsL0j5hzSYakxTONEVGG+lTBMjhr2FvOqOuKxXKJdY7FamS5HneO1xiZ7RcnLA5/HAdWqzOMtdw+Pubw8JjFcsliuZRmW5a5326zFrrQbuDs9JxxDIxhKKO9UulwRbkN2GWweRKzKKV02Ud7mSgojjepXCpDN2eT4/2zP/tlj3wI/AnS1/0q1gDfBb5x7b4fIQNLX8DxKgWvvQZ/8Afw7rvw859DH4HfRJzvs7Nn4nhjDEQ/koMnR09OwhWs1FQgvCqjQBYx+ujRKssJaBS1M6hCsKFSwjlxLvVsTrt3Czev6VOL7irWW816k6gT1AuHUY4UIYSMNg31XKPyiMsOciDlgZQDKkRMKEo7Xn7u2GKUlJJi0uhoSBGc2yeOibFfM26X9Nsto0+ocSD5EnnnfAXvV6JVNhVkoqwCNwZAKWMGWSRZhAHMyqgBRZowZyTLnnh/lfz98+b/rt/fVJZbB0uGNjAzA95HZo0mhm0JIvKnvo4S4DTOaPb3FhgFfvQE7wupwQxtLHU7w5ZoPiNSkQvvZARmc0RbGWZVRW2ECOHW8TEaxQPVsZppNqv7T3eOcUz073RUpsJpS06BHEbGnDlJmTFBdZ5wfUL5gI4RHaVd0YXAGAIhBWIu4BqtaFqHc4aDwwW3bh2yOJjTNDKOojJkA+iGMR+h8ohOGxQRk89QBCChTWQMifN1KGxI8n1rKxv66eUZ3dgj41xR+ohW0LZaRbIJxJxkFjkL2F1rwTEsZktsZVgslsxnc7z3nJ+dI2hB9Vhlb7vdfC0Zrx9FHMFoQTYbIw7UaAMxolJCKXG04pQSMcsJb6wtVVyhiKycxVmLMxFnMlVtmc0a6rqSHq+1tE3GJ8mup4y3aSqMlcqSUqCJjAXgp3IW5rYgDG5Kq1KZ0ERnUTlS1xVt22BMICRFjBEfNTFFjNUYa3bczWR2kxBaizJbUBFnRPylfDnknAifEvg+qX32V5gQh3uO1J2/qNNXwC0ez25rpGF8HdB094u/ZgZOFfxEMVvPeeH1t3BDApZcLzXfuQPOff5LGWN46aWXGMeR8/NzHj58+ETn8bMZJxoH+u0alUZ0EPFqnUtvU0uGorOw0SgF3g/4YYuxjqqppB9bubJJdOQYmbcty2XD3vEdDl75Fq6quBgz52Pm4SPD6YPI/pHi6NYMp2b4QbHdBpr5kuXhHEVEM5BzIIY1KY2CZsyJ6D39ekOKSYAQOWPrBtu0BDRdsoLAZYbKDj+s8f0l67MTBj+yXa0YL9aEYQCtUcY81skNqtAJZriCW93AUpMZY5Co12pQhcVIKZSbENeJ5AMpivYxSmGswTiZh/1ltpy3fPuNF4kxs92I471cnbBan9C4SM6f7JtAyUwU6Mry+qsvXYlJlGbg1POeEOPXP1XKQgB/Z79htd5wcrrm/sMLmqbhxRdfQmvF+z9dcPbgHsP2hD/7t+pGKwmfZ3EVOP8X5+zvLzCzlhQGot+wTYkfDZ4qZr7ZK17xCp0yOkT6EFh1W06DZxUTfZQSc1QC8Ns/2mO+aHn9zZd461uv42pLs0zAiK+Ey9ePh2z9HQhnqP7nKDyNfherFMaBs4rL7ci79zf0fWQcRmKIjBGGJMo+vU84V9FFzxZP6xqWiz3C0LPdbogqEJUlYahNxlio2pr9F4+p25qXbh+xv1jwwQfv8e477xCCZwxjuV4qjDM8fPTwxvu8OWc22y2np2c4K+OFxmiqasRoTW0MlZaANivpPSdjdjO7VV2L9q1PGG2YzRrausIq4QXYW864dbxPXVc4J2h/bR11k3YZr9KKqrJlz5J9q+8UeewBjQoJ3w1EVxMHj60tbd1I7z6NeAPL5YKjw0PG0WNsL0xUY0eIvsgOCkezVjKLPalI6RIo+xBIPhB0RNsKZayU1mO4QmN/7RaBfwf8d3y5eSKNlJX/FldO8RL4F4iznSzwOI/zL7GfAO/CrVdu8Yf/wz9kfwmSSV+ZMdA0n/bkK6uqir/5N/8mv/u7v8uf//mf88d//MdPREf7bMBVhTVmJwpQIk/ByE9Mn1fjByklYpLyrp4ehyramho0VHVNW0YsqnYPYw3bbiPC4KNw9xILzCKXwXcfyGiMrUs0bIEIJqOTg2mMQHm0S6ACGYkilalQxqGxWFORMFizxKgaazXWZMahF1IBK8coJd7rwDL5fDsASoHi3GDCK8jjdDViwA44LWs+oR5zGbyXt1fXA8LPNaM1ptISlMSMs5FhtPSD6IJ+FlTsauxCoX9ZuPkpFmNkMWtQZGLMxJCp65rD/TlaKTYHe6hxy3zW3CBc7QtYgrQNmJmhzo2UOb0iBwidCEHkqNFR5jCHGOliZExpJ4eZSgCktaBj21nLYjFnNm8lI3JCFpGBpOV8CUoDFagKdCWZsIRSqKxKQKcm/hZ2J4LyaB3k8bkg8Ms5Kg31XCbgJs5h2fx9DGz8lmSkQ6aVCLMLtzOMwyDXXgxFgtajg7Rubt4FTDzTMtqnS2VJ60DSGqcoCHdpZWWlCuuUVBSscwQvWT5c8TgbbbBGSs0yGmcLuErjnKIuTF1Tedk585jjjUH0eaeh/RyjSP7FUOZx09XYkRa+aWcF/W6taF2bWJirSjB6pXZV5BbUVXXQKI0pgiu6EOAYLWNMN7XmMQom8GqqZkRKydM7BGSe6Jdp5TrEAU7BvS73XQ/KAjKX9GV1dxXSs62oVUOtFXvOsJwvuAZc/nKvqBRtKzif5pd56S9gz8Tx7vb+FEheylsTAlVNjjdBUopAoEuZjfhDtIfaSOZpFMyso7KKOy+/wiuvv8r+4R2OXn6bMHo+/OAvOD95hE0DM91QK4uJnjz2rM9PYNth6zl7x3O0s9hWItCcOsieOHTEsQPvcWlOLjqZKUXJEPqMthW2vYU2Fe38CFfNCOMFYZiTdMbOLcpn8jqSrEcpcw1lV0rPpZ8kQcjNpWYpJNanPXvziF5qsi4zlyABTkrEccB3XaHEk+8hIhf0J/jwP+871Yq6sVTZ4KpDDg8bnK1Lye/mTWspUc/nLYeH+7z2ckDrQhafEwv1Kt3tPf7yL/+tlNafluWMIfHy7Rd57bU3MecPsPfeoRsG3t2OjDFjQsJFxb0Q+NEwsI6Bh96zDoFRcLm4uqJdtCwXC775rW9xfHzMiy/vs79clPDMFxVKCUizEqS/cS3V4lVyynRry+g1KihUVGA8r726JcbMMFhC1MzqB8yqezw89/zZTzakLBKAtatIG8+qO8egmWkZ1QlJFLk+enCf9z/6kMPDI/7WrObw8IAXb98qPM6KvuuJSaHNHkZZxrVk2MNa3Vhged1ilBn/SXJSa01IksHWOzaogsTPyPx3hqapOTpybNZb+u6UqCJKgTWKWVszawx7+0uWy3nJaEu1xkDtculjC5CrqiYqx9IicAblEzFKi2rsOzpTBBwGK0GPUdIWKA68qhygpZTtI2Po8KE0o+SDYXVRiSqym7kANlFaNIG1IRstI4xWkaqPs4h9dTs/h3/yT+BkNwH0HvCvucpsM19sPOgV4O8i5WTk8/Ee8J9de4xH5pO+rDXA3wNe4VvfOuR3f1cxn8Ns9hVe6muyZ+Z4FQXiXqLTK5PYfBIVmHoxwmQ1adaCJ6GMwtSi0zpfLNk/vsVi75hmfkivOvousL7csKwijbNYpVFJxlD80Mu8ZYxCFmFqdLWQTTpXkD0ZS0wKlT2myqQYSVhUDKRRymhWGSpVY01LVS2pmjnGJIwZcE0jJV2LsPtribZFx3bKR1JxvlOmf3NluJwyvg+kOA0YqGuIzVTKVZEUpNScUi6Z+dXRfFFTSjYOyDjXUD40NzqX/LH3q+tqd9let5wSdn/BwmkW89lT7fGWegHzds7xwTHGd1TWsQ6Bu1kxpiwy0ynTxcj9ENjEyDYmfM5ldEgIIZqmop23HB4ecXzrFotFTeUcKUWpZORJrF1WOquMNYaqWZASbLcVMRtyUqV/GVguGjIwjC0xWg5mgYP5CnSHMVtSYMcjHn0iDAGsw85aQQrnkZgC3bbn7sMH+JzYbjvms7mAtoozCCGQshWmLCzRR/yQiP7mHa/QMwr5RIwJpSO6YCmynsbeptBeWhqT7q61lrpWDP24o16cslABgcr8c1VZqsqiC9gtIfGz1qo4Xk1VROin+4mJpqoJIeFDJoVI9ELYkUnYQUBglVMyo60o/VqhkRSM1IRAR/AXmd24ElCQ80DMO1GRiCJrRZ4U2cy1JvsT2jjCO+9c36MugF/w5UXp58CbwOQNAwKa+vkXf4lPw44AUAEvofVvcHQkoOYvwqPzcSWnaVRLEsNSh/3YY76qPTPmKpAPkVJBMpdF1EooD3TK4LOoGxQvUDmDc47GwMJCXRnu3N5jPmu48+pL3Hr1daxtCSniw4BKPTb1zKqKg+Wcdlbvepxt06CqOc5YUkigkiDetCosMIk4QPCaFAwxV8TkWXVrxqHjcnXB+cUpbbvghQx1M6eeN1RYYuwYhg0hdiiTMJWiXliyqcgT12LO8r5ZCwAnJaIPhHG8sY1JZUXjNc5rtC9i88qUsnqeatFySxNnNiRtSEXc4KvZJCP4NB3e42arCp0z9iuUsZ/IlMI40Xt2tuIywcOup+t6zgbPOAbeiYqLCA/HwMUwMsTEmCBkRTYGYzT7B4d885tvsb+/x8sv3uHgYB+FZ3s+EIJmGBpSEmRuAggOgiXXGzDvQQi4hwZzqTgJmdMgNKG3X7S4yjLbfxFjLG1zi7ZpqDYnaC6I48BH739Af3rJsTHcshY7X6KXC6gMKlh0Alc5mqL+s96sqSorDFjjQMxRWixpxNl3UECojoD51Wz4zS55KbfqXXsl64zVZjcLKwF23pWBVWldyQgfRD8y9gNJK8ZhIDhF5VoO9hYs5g3OaZzVuKJ85JwmJCM9WoDibIWDWd7DWkPbOkLI0EdCyEJ5u+3I28Sj8xOUVuwtW+racXGxYbMdGMfA6nKLD5FxHMgpoqwRYKlShT49Ywo7Xiqo96CUiDKQSGU/VdZib7Tis0L6rpN9xDMhZK5r+M534PAKjHUEfBuoqYEjlILXX/9U//yp5r3nxz/+MQ8fPuS1117jrbfeQq1W8MMfErqOHwMPgA8//PCJSUmeqeMVyrX4GKBmQvurlCFkckhQonpnDU3tmNnMvErMGsvtF/bZ21sWx/safkxsVyNjGFFxwKSeWd1wuDfHNM1OGKlpGlw7x1lHColMFHkupQgFLRg9pGCkKpsrQoZ159lstjx89Ij7999nb7lHO6uZpyX74RiYieMd1/jQoWzCOGgWFlU7ma8rpebktZSJguhw5j6QYn9jjldnaEaNGzU6SHPPaF3CZqHAI0UZNyiON6ssZSpjp2bgV3lnvq5M9wuZkr6dAMXsU3X/SoGxpqBiHesMP+k6xk5GoNIY6ZPiw6zYjp7L3hNyYgDpn2qLtoaDgyO++c1vsb+/4OUXbjOft5yfnnF5sWYcazabOTlrcAo0mKAwAWLsCfW76LGjeQj6JNMNiQ+HyOGtmlu3D7B6xuH+IbN2gaqOUdUrVOfvofIPiMPA/Q8+5Ix7VPOWN5Yz9PEt0utvkGsLOqEi2MrR1AJiWm/XGKvZdhtxvCnK6Ev24nhVIFbfBY6wVjiJb37hRfkrk8voklR8UCK2ElNEA9bBhA+R4DMV2keP73uS1vhxIIyaujIc7M9Zzhsqq3BWUVdOhBOUFXaoXLR0swi1TONfUq7WNG1FDIkQIMXIGCKDjwxjz8XlOajM7dtHzOctm3XPZjMwjCOXq43oiTOSiSjE6WulET42oWjVWpO1JhbRE1uUj+S/CmvlnLo5WwN/fO33pwXa+pjVNfzO70g6W+wI+B6wB0zn2Jcpdnnv+eEPf8gPf/hDvve97/Hmm2+iLy/h3/wbwskJf4XoKP36ZryloppTJpZsVhvZrPME91cZVMIoS+Mcoc40zlBbRVNp2rZiNmtYHhyxPNhntndIvdgnrjYM/TnjsMFaaNuKZjajXuyhmxY3P0BXDbaeYZyIR8cUBFpMR8owjltiHAljRxi3ot0ZRmIMjINo/WoSzkn5bhgvMX1iGFY045wYenIM5DhxB0p5W0Vf+qalXJGEP1mpiNEJazNVrW9s3EIpReU0WmdRgtJTVptKfzmSYiCEkZR8YRsyWKsLof501k7H89ln8VQ4l+5BIMZCjOLDbqoEwGhhURKWJMmMtZ4QzFfZyxN9bgCtyabMvDxFm0ru6+0lD04+4nx1Ru+DZC8hFSeg8KmgiVHEieRXIUo6laOqG9p2jjEVpyfnXJyfs764YLteo72jGtbErLhUmRHYz5o2K8buERdDT/Ij5jKjNplznxjGxGYDj063zIZE3Z5LGdPvQafpNwFKFhiDx0fPhcrcJaEwxPfehabZlaHXq8vCtjRweX5GCp6HDx8yqx2XlxtStAQMG5XI2uP0BUbXKC2Urze86lIyrupdOdloRVPXwiDlbOEfh2l8TxWgo1EJo6GymrauRNfXCt9zZa3IIDpBK9sCgDJGkxC1MrmOSwmykACpMkeMSrgkiGelPSjwMdKHTD8EVtsBcsJVW8Yxsd0ObDci69d1vRCVVJI5W2Ok1E9hvZs4D3YouEK0okXPvJDwXruebjLYeUbO9ppZpbijFO216/tlBJ71pa/4roMHD2C1Iq9WpJQ4OzvjZz/7Geb0FIaBIWdu8sx9ZrKAOQnowHspM1tTFaYbQQ2iE1pFnDUcLebMqgpXGaxVLGYNx4cLFsslL3/jbQ5u3eLO69/k4MU3GMO7nJ/dZ1hf0Naa5nDBwZ3b7L/8MrqeY5YvoG2FqRcoW4GxeD8Q80BIG2KMrC7PGPuOfntJv70UjtQ4AgljC5LReBYLUSdZrT9kGGuWewdYrfDjhjj2pHEge08ePQwdjFsgkLIAEXIRv7bGoJ3C6lTYaW7GjFEs5jXOZIgjgtguzjd7cooE3zF0Gwko0ohB07aOZlHjnMTNYp9/4WYosXlmO3T02y2+H9hcrskxyaahFHXdUjcN1jqapilyjLb8VF+oF/OFzFpp7Fj75cLeJzStNVVbcffhezw8/4jNasPFZkscg2i4+oiKQBT91EEZCYWsNPmqtqWZNSyWhxwc3CGnwA9/8O+4PD8h+4HkR26jeFuJ8/5Z7zmNie9Ujlcrx4M08Iu4oguJ/sIThsSYFUNSdH5gnTrauSNpuHVrSRxfIvrMycMN+ITJibAdCEPgnbXmnjFk/YD4i3dR1jDf36NqW8YcCSkSx5F3tluqyuF05OThPcJoGPuGjet51wS86XjLvsPtfA+tP+Cm+ZqVUrR1y/5iX1jtynjN5DCrSrJVASFFCfQLGryykvGmtuLW/hKjFYumZlZVLOcNB3st87aicQbnNE0llJpBOaISJrVprCflSMxZKh7OYsnYOhF8RK8HUp/ovOd0G9huttx/uCLGyMUqUDnH0I/0/SDgzdCjNdy+vWDWNNTW0DhpUeUxFl9bJB6ncQTyTqd3Eryg6Iz/dbMW+H3g9Wv3Wb4i/9SjR/DP/7mwW/Ui4PDzn/+cDz/8UOh2+57MV6cB+TT7FejxCpgkJinR6ASF4GmX/TprSFnm1RQCPKibOU27oJnv08wPcM0CbWuUMgQ/EkNBDDqDq2tsO0dXc0w9A20Li1QiBU+gEy7okIkxsF1f7BzvsL2UPlDyKJWp2lo0YhXUlZ0qXBJJ50RKIvpQ6HUlixA9MLIvril7prIulBEOoyFnbrIdo5TC1lYyV1WINHYZbymz5YmaTrIEU+YFrdElEy2KKAh6cgL1pJRIUdikYhA57y5FQk502zXddsuw7VmdXgjYqYw2zGYLZvMF1jnamcdos5NvkwkI2UlE8uzKZ+qCyGYCPFxfJ1WEy5XaURSqQrgxjuPXSFH4SdNaMW8NUUcGImP2OyBPTFnYkkqVJ+aSp0ypvta7kmlMkW7biSTd5YrLyxUqeFTyou+iKcj6QB8TfaoYsmNIkd5n+pjZ+kyIeTdKlHfIOinEhAAxJOIoFQoZPRHpyhwTvoCVslKkcUQZjbJaHK5SxBIjep+JwdJ3K/qhIccGaCEXqsLiHASY8vWtuzA4CZho0uWdRoOUngYJplywTFeU9ouzlrapRabPCUmGKB2ZHf3klO0/XpnJQjiDKpnt1Q0URmXRzi1LH2NiGD3D6Bm9rHs/yETBUDAAKUdSFFlMxaR6VsYBM7vjl2tXS+WwMLvtbMJj5Zspjd6c1Qj4qUVWJCEubeTzAVrT88Q0c2ZY9m/giEKM9Os1280G1zQs21ZYBz/JgXlj9kwc74TKSymzHUaEolmBKpGiMeV6TSSTaVuLc4quH+m6kf2jFzh+9bfZOzjm6NXfY//oFvXiGJiTU0UcPGn01NZhzZzZwR2WL7xBNg3B7RNS4vTRQ/p+W9CQomc5DL1obp49Yuy7orwzSN9OC3vJ8nAP2pr5rOLW0YtFLN6hjaOpDzF6VrikE72uMCGgxpFwuWLcnEt0XUaG0gTwNLLZaqextbuxqpCuLPPXDjEuo2wiq0TWEhikHCRIQD5XU9fYpcLWDctZTd04nEnAQMwwhMw4Bh48OGW77Tk/X7G6WLPebHn44BHej6y7FT6O+GEU1ZbLNQ/u3iOlyLxpcdbywp2XuHPnRVxTM9vbQxldAEKZYdvRd1vGYWB9eUFKcbfRLRZS4Zg2SaUUqVTQXD2jbudYW7NY7Es5O/aoGPjpj995Ouosxeat5m9/d8H7Gh4q8O+BuhQE/ZjFWU7TzVlrYYJSilwylRAz4+h5/7332VxcAplxuyIFj8kRkyNdhlNEDOIcw6g0P1WJhyrhlWYwM5LKBNvhU8RVNVVVc3Rs+ea3G+q2xjW/gU+HQELbNVUd2F8uqbRmvfKMWbiDKQ7GWJkdHbc9fhTNZl82+kSirg0pzrDVgEpCEuOUpxogMcN0r9ONx/hxzdfR/5fZc3G+tmS6ZhIssApjIcdQujt5x95ljKU1ltrWzJslWsGsTEo0TYMxVm7aoHWpTuSJiOOql5sT1JUmGmnVGFuK3llQ3lPFZ7PZ8NG9E8bRs+nHwiE94rVo6XovFSkhyVAItast7TddAmjpZYfSyvIxMYYwZSoS1MUkpDQqkvyviuM1CAXktxH2qArpGf8JAlv6rNEhhdA8/iZXm6NDmK6e3E7KEfRVxRt/62/x26+/zl/91V/xF3/xF1/b3vFsxolUUc/I0vNICRJBgBBljlROIUkbrdUYDf0AIUS0qZnt3WF+cIt270Wa5TG2agFHTrr0VqNo4yqwzYxqvk9UNdHMyKOnGwbW6xXBe2IYCcHTdxtCGFmfneD7DpUjOscdLZy1hnZusDbhTM1yPicrQ9I12lQ426JVRVYetMNgCm9wJA8DcduVbE4uhChTHqQCuLKNu9LAvYl1Nhq3bIGhZNllfKnos6acdlmithZbg60ramtwRmN0Rkrj4FOi9yOnFxesLjc8uH/CyaMzzs8uee+9DxnGntX6FO8HUhDJwNXFJXfff58Uk8jNVRWvvnLB6mJL1Ta0B+J4xxgJKbG+uGR1cUnfbTl79JAYwo697ODwkOOjY4zVVJUInZcpGZrZnvBy1y0HB3dEXcdvUHHg7PTsqUb8ldW8fKfmTCVOVUafS8k7q0hEMk0B9pQqglLFaYgDFo3pyOXFJRenJ+IIKif9RTImZ3oyF6lkoq4GrTiLmVWQUqNzotKTlCdpwDlsXdPOHbdvzaiblq0/xKdjFJcodYkxkaauyD7Qac1O8yZP8nPieGOQa8vniE/TWFxEZUPOa4wRNSBtwWVoAuTk2Ix79P4WMS74UgPiX9RKEDthB3QhpZh+is9Su1GiKfvWJcvHaJrKFBpTsEZGjXYKQFMfPk/NlwzEMimQyToLYYzSPAYtyIqkpmxZqC3X6zU+iIAKGSEV0eIsY7rq1V6BFIs4AlOFpGApSmXIh8hY1NGMFtxEmKpGZOKOlOhZm0LECd7mapPziHDCe7/kecfAt7iOBXkSm/aEnDPbnHkHGIzht198kW9+85s8evSoqMula895sve8br/U8SqlXgP+r8ALyLf3Rznn/51S6gj4vyPDWO8A/5Oc89kXedMUpTxJTjijZXOwGrTCmIwpQIGpF2ScAgwvLF/gJTfj5Tfe5tXf+A3me4fMDw6pZnOiH+mGU3y3QuUIRMYwEHOg21ywPX+Ex7CJjq7v+fBnP+D87EROchIhesahI+eISh6dE9YIoCKmSNeJyLiqFIMfsHXDbA9cVTNfvIixNa5aolRFSj2+j4xjxHvwQTLGPpSrNqtd6THlK8pIHRKbi4F/8icXAL+tlPr+k613lp5unBxuGT1BgbIobWhne6iDCpUSxke0dRhTobOSsn+C7XrLhw/OOL+45E//5M948OAhp6eXXFyshBRh8GilmM9b7N6CRTtj1rYMXc/FN96CnJm3Mypr2d87YG95wGbouXt+SsgJN2/RzjISGZNnO/ScnJ7hx3GXKSgMdSXMMaMfpRe/WTOMA8cvvMoLr72FdT0nl7L5+osHxP6S9z74aBfo/BL7llLqB09+jtfk+Cbh9D7j+pzxXsAPQdS3oLD0l2xmcm7FCXMNBJNSJgZh7t7GhFGKxhoqW2gDtYx7JWXklmD0CR0y2kuZfoiJmEHFhAqByxW89+5A2xoWtxXVzLI66Vg9us9227FebRn7AWFzq0rXMEuJuuz9umx8tXY0VkqtOUcqragfRIweiLND+r078jmySO4NuiK6gWTC7izkRs5xsVzmeH2MaO/LqI2ReVafUDERYsRHcXbKGDQTpkTkJGUUR9Y+JEXvPcMYsNYQs0IlRUgZnVMZU9eyfxQn4LNMCuSJ2CIJojoFUUWyRhOCZ7NZ46PsC6BIAUEjl06QVnJLWfjkQ8g7Td6CzQQUPhYin9Gz7YVGUSlxvMJGB5C43HT8s//fj250vZ+VHR3Bt74Fe3uPTRJ9Jfvggw/4xS9+wdnZGX0v1c4f/vCHnJyckHPmD/7gDxDayh/hfcdPfwoPH97Ep/hiGW8A/lHO+c+UUkvg3yql/j/A/wL4L3PO/1ul1D8G/jHwv/7lL5cLAYAgeu2EnHXTuAvoidqtUKppK8wzx6+8xP7tV3jh1W/y+ttvU7cLZssjjK1Z9/fpLk4Yt5flkBM+jPg40m0u2Fw8ZAiZiyGz2Wz44Gff5+TBfaGDs4aUA94LoGGaq3O2oq4c/TCy3ZxLpGwV3dhTz/fZG8BUDfPFi7iqheQga3zSDENkHBI+gA+qON7EpLWZEvhRLq4hF9rvMbGJib/59oz3H4zfB/7hE623Qkg7ktBzJiZayMnxwmy+R8MeNiVcjGQUo6mEezpJRLBZdXzw/l3u3b/Pv/xv/lveffc9Vustm21P27YcHR4xm7Xcvv0NDvb3eOmFF3nh9h1yTCTvUUoxqxqsEZJ3suL9+/f4/rs/p/cjB9Vt6mqGJzIkz3boeHRyytgNO8o7ayrm8z2GceT09JR+GLh3/yMuVyve+NZIqI8wpkYZT0qJy49+Tn/xkEcPP/qi5aIPcs6/9eTneAXxTcKDNcPdRwyXnnHwMlt6vY/LRPunpsvi6meWzd8HQcSHch3Q1GhViYCFsWSliNqSlMJHCWh3L1QqKxl2Dd2LVea9dxTtzPDNI5i3lq7v+OjuA8ZhZLXekkIENMZVRKRvOCHxURSNakVlLLUTRqScA5WC+n7CXPb4F2b01ZtkrQgMu5ZRNgPJ+Ou515Of4+UTpyxgNYKQikxYhQz46Mk5FOpZyVIFeCW63wpNjJ7Re8iZaCR7HUZPP3pcZaVSoRREmZEVTXCFJmFK6VcVZbUdu0YZU0oh7wRBYpCMN2RFyDJalXTGKr3rsWutMEmTk7QevM9EFUUnWIm4AgrGlPEh0o2eddeV9lyJkIpzhkzvPd/77mt8cP/iRtb7WdqtW/D7vw/L5ZNnve+99x5//Md/LBMYJZ39wQ9+sBsn+of/8B9izEfAe3Rdx+XlU3S8OeePKPLGOeeVUuqHCN/XfwL8h+Vh/xfgv+aLfmk5F2DABMmTuTrZcbJc6BNkXimUKnqX7Yy9/QNm87kodSjouy2oge16xXazYui7wsIUCcGLMkzf0W9XjFHhRwhDRw4eUhAxBil8i4i1RmYwrUWRCd4TgmzmCYghEENgHEb6vkObLd1mjR8jKSpyAt+tGMeB0QeZ9VMVAceYTCGwkOxeOdBZYVVxxiqzqGAxAzh/8vVWSFSeTAlipIydYmLsR4KPXD5Ysz3paLRmUWpko3FkUzE/3NAuB7JPONcym+3x6quvY10l2XpKtE3Lwf4+Td3w8ssvspjPOT464nD/QDb/IBR8jaukDFYy/g7P0fEBnR/YO9qnns9QJFSOBO+x1hKMx2iLUQZjnKgWWRGuVtpiXYM2Pcu9I15+9Q2sazBuQU6JdasYLg/IuePuRz/6IkycWzk1n2zNfQjcOz3hcr1hHDzBy0WdyUWMSkBiqgiW51zaKsUbaa3L3ycN2cIpXPLEOGXIU284l7qQksrAjpQGJdJ/CFioqio0mX4zkAKc3b1P6CPnD0/o1h0+BLwPpJiEvAZhP7pOtTjtcwpoteagXCMpK5yCCkWOWaYWsujc5pLF51KOnXrGk93InpKl3zl6jyujQDLWFstxFFxF6QErtPRpUxKgG5INx1QYrnJGJXG8267HaMW2a8ookZSeq+xEeCHLnjGxZ8U4UcAWpHNMOzYs0dxO+BAYQ6bzXtbG1jhjccZKidtokpaqWIoCcEsUjmct7QiAwfsyphbw0+zybk1k3ZXK1JVIP97Yej8DOzyEgwN48UVREvryU4Id8FAQhY+ATWb/5IQ3UmKbMw+BpDW3bt1isVhwdHSEMQbvFQ8fKtZr2Gxu7vN8qR6vUupN4G8Afwq8UJwyiOLxC1/0dWTopIiU512IX0puxSGrUm4ukle2qjk6vs1rb3yD+d5tKufIKXFyeo9hGOkuH9Gvz9ieP2Ace4IfGLot0Xdcnj/COk3Imj47+n4g+w6TAk4palPQsqpCG81yLhnvttuyWW8ZQ5DIFUUYB0CxWV1y+uiEbuPJeY6xFeM4EkNAE9FENn2H1zOC9XTM2aQNxhicsQV1KZyrTltQBh89Yxh20dcTr7dSUFey2XktakPGEPuR8/NLtqstP/y3P+b9v3qfg8WcV44OMNYSqxrlal7Ur3GsXiYkxf7yDk1zyH/0H+8xjj23bh9ydLRP5RzzWugErbOFLN+UGV12jZGprz/Z/geH3O/P2Q49sxeOsPOG9ckhq9ND2rbmnR/+VEQXdCOz3PWSul5Q11DXC4Zx5HzV0wfNm2//Fv/gD//HNG1LM1+ggOHRPcLqgn/+Xzh+8P0/IX1BeaInXfN13/GnP/gLLlYrNtsOPxQADUUDV+ddeXParMsWiULhygxoDKHQaaddn6lKIK7OoIwr0wBy6Rgn6PAYI8GP8tXXNdZa5rOWedswbDac3T2FlNg+XFNZRxdGujBeOfX/P3t/HmtZlqX3Yb+19z7DHd4UL8aMHGuu6q5udXWxmkOTbVCy3CQgUxIMQaZBEIYJAgZkif5DNiXZhiUZNi3Kli0LEEBDtGhCFk2TFElb3eLQYE9s9lBzV9eQNeScEZER8cY7nHP2sPzH3ue+F5FZWTlERmZSsQI33nt3OMO+5+y111rf+j7ABcUkME2FmTZA0bPVLGVISuw7yyfbSU7NlhriVLKTCEXtKMvc5nsZq6gtzvcBjvdo/eBZrNZMmpq6cpASvfc58FRP0khdV7TtBFTp+0BKIesLa2a78iGQ6WohGeHo5BTFs5xPEEkZ5Vzqxls7c7aY5VY48jb8oASfSBasyeRAMXhSUozmNqcYAuvlmkXnuXPSkVS4MNtiWje0VUNbN0VsIfPGD0Oi72PR7u1y1qHo8nbF8faDZzX44njHcoUpV1TuVR5d8oMa74dpxsBnPgOf/3x2uu9Mo+A28A+hP4HfBr6vfKzveTwlXgb+ATA0DZ///Of5xCc+QVNY2Q4O4Jd+CQ4Ocrvvg7K37HhFZA78TeDPqerJfatWFRH9IZ/7s8Cfvf/5MQrYNIGXVf1mda2ghY5NTK7FjOCGGCO+74kJ+vWKvs9Rre+7HOWO3JqqBcGY9X+jOFQFCotNTmtTVqh5P9bIBsqgmlVPxlSloBsUtB8G+vUKwbJenmBtXVDRHmtyyXrouyJza3KEUsjLk8tOUIrSiRiHGIOGRMKO7QLm3Y73k4/v431Ah4D6UFizEt164PhkxfJ4wd07x7z22hGx88wkC5enpsHUDbOjU9rjUwbXYtst6tqyvb1NjBMuXNjjwv4OjauYtZMSbY2glTOJPz07rk0LhpABMFVlccniqoxmrxpH3VRUdZUBMoWpx5S61RgdWutwLiNSc/9vTdNOaNopk2lWJ2r9LqmyTKezt5ySehDXeDuZsFivM2F/Ku0zG4COjB/I3+65DPjIo41sWLU3TS9aliybWOqsn2WzkVEuLpHOAYDOtiUikCD4LB23CgsGY/ACQ0Z7kezYikW+HyVfo6qS06ippHWLALto5geuJAsDmCpf2+rOphVTIq+kkvWguefLeNfXOBSYU4z4EvHGAjzLI8CmjU51HM+z6HTkeB7PScm4C1QZvKfroHKGdTfk9iRy5OrqClc5nEsZ0KnKetXj/VDafwwiiiGW2MJszlzLHOeLcpj3AS8WKxZnXAYOpkRKJregxQyQijHmxVZZzPoQCTERUuaz30S85b8y8ugZpeMDGe+3b0IWLqihrjIg+Q3U/eq6pnoDildjDNvbNbu7bzW9nMrGzy+2T4EjUjqhX0I8zs+OjPLjcmVmLTvO4WNkuVyyWg143xLCFNWBt64v/Ob2lhyviFTkL+y/UNW/VZ6+JSLXVPWGiFwj48FfZ6r6l4C/VLYzFqDyQyOaAmAKqTklvZYK9jbTolV1S9203L51i8XK00y2me+9hLEV1rWIWHy3IAwr4tBjjMPZCq1bkkDlMlrYiQHjQJX5bIKknG5OYThDNybNLS2DoR8GYswTTu2qUi8LhE5ZpDsMyxXONZzcfA0xBh8GUgqbPr4QQ5YmDAMyMdQ0iFNMrYhJiPOIGHJXsaFWmKWKGBXgo8C/+27G+8c+9bg++82XWJ8uGbqc+l53uXn/+e89z+nxgpe+eYvXXjzk4rzh4OLtLHvWVBhX8+rhjJ0XV+w9/jTXf/LzGGNZhiww8eLzP+AH3++ZzaZc2t9HROjXgRhS7uHt1gTv6dZrQJi20ywg3jRM6oZX7tzi5Refo4+ePeNpuxn96RK/WqC+o6oMTV0hKYNOuq7j4O4BzlmapiHGuKnNHBwe8Oyz36adzJlv71O7ikuTxFZtcO4tF4LkQVzj27u7mpJiTEVdG6xkpqGUsvh5LlmMKeJU0ptFLpkM5NMykRprUWMw46zgKlJh/irch0iKiCrOOmpXQ4I+5ayJ7zzJZsEADZF+uS4kHgn1A0aEYIRgc+1ZKinkC1mUpLGGqq43JR+NkW6xIqzXvOQj3Xpgu3J8bD6nbRv6pz5KuHwJtZNM5IBQpQbRompEpBruofB819e4iOhGj/dYiuOTktaNCMp01tK2VcYv+EBKSjcMxJA2jjdvOP8YUh6zo9OexSqxXM+wpU98tVgRQ2A2mzKbTjJ3fDshpcTdu3dZr7sNRm5ne8bTT16irmok1JAy3/N00hIx1OuY28eCJ8VMtBF9LCpHQoiJri+/pwEf+uxGU+6bHxHxWYQhZVdrzmq8qorGQAx9qf8/mPF+o/e8uW0BPwPmIvz4pQxOfhH4IpvWXWMMn/nMZ/jUpz71uk+LCBcvvp3WoY6smHRex3cFrOjLK6/c98oSqIYBvvQlePZZfkBWFW5nM77whS8gInzxi1/kxRffDH391u2toJoF+M+Ab6nq//ncS38X+NPAXyg//87b3/352lGp944N9xmSkXvtnMPaitViwemio5mcslj0VHXL1s4+Vd0Q/Zroe1KIG8ksax0mZW7VHBFIAerkST3UNWGIhDCCjgyiieA9hNxYPaYJnTGoFh5nlGG9wq97rHH4xRIRQ0jZ8ao1RUZJSGKJJMSBm1hwEeqUCdtdrsGIKgaDw+LU8Nd/8WWA7t2Od/CB27eOOD0+pVutWXcDp4uOxWLJ9599kdPjU1578ZSjm2uG+YrKr6icwdYVxjk69zInfo42cx4vtUmNiRgGDo/ucnx6wNbWHAgIhtXpgB8iy9MTlovT3I97eoogbM93aOqGremU+WTCncO7nBwdMmikXcwRA0O3Jg65HWnsw8waseCDZ71eU1WuKLeM5B/KarXkzp3btJM1w2Bo6oa9yy22rd4OAOMp4NcexDWeEhsiD9ThXFa5MimD1+RcRiZpKtGtZt3cc69lYhDNeIDSTJ4kZ08YW34k5WhTCjCniKRnRG0u5Qx97sn1vSeOvODE7OhtjkaxYG1e/BYcVY6srUWSZJCeKj7l1pXjpAQf8U3DM9Npvv53LzBcfRwzJOw6YBNUapEEkoSogo12c0/xAK7xPMaK9+X6sJahoJNjHBCgmdRQauqx1FxDyPq9KZV0v4y1d4q0YCKEAVJeoCyWa1A4Ojxm6AemizWTtqWqKiaTKTFGbt16jdVqlQ8J5crlPa5c2gYVHBlYaI1QVw4XUs4uEYk+s2cZFWxWHMeHvFDzIZbINjLESEyRIfh8vVgHxpSIeByLcUhyBBxLdu4b37394Mb7bVsDPAX2cbhMbuMN5FATyLVow+XLl/nkJz95DwbgnVkAXkb1u2/4yivAd97gU1WM8OqrKHBQ3nPt8cf5g3/wDzKdTnn22Wff5XGd2VuJeP8Q8KeA3xWRr5bn/m3yl/XXReR/ArwA/Ctvdadn/VCZSEMQnMncqVqyZ0kMTrLzlJRFpIfgGUIi9B2x76nqhoqANpNcr4kR0UDTNFA5bGUQDbSTBlvV+Kis12u89zltU45BihhA0oQmKelfwRRi8jwRyiaNM6asICOwNQ35b80tUhoCgcwUFY2ikpC6o24j0XlSPaCieCkSY8FgosHYlpdehS9/8xBg692O99HRkr/7d3+bddczDJ4+RJZDRtnevX1E3/V0y0APhEGJJx5nhNZGrLHc1Vdpj4HpnI8tDhli4Ld+89e5eesVDo8OOD49om0nbO/sYk1F7aYYcYQwEEK/WexUrkLsnCiGWitaNyHZFlKD71Z8//d+gE+R0PX4vuP48IQ7t+/QdwOaLKhh1dWcnp5kGbW6QlU5OTrAd0tuvvA9vvrrjmYyY/fCNabTKfOf+Ciz65fw3eqtXpb7wB99ENf4WKbY/FW4qY0tKliFzi+TLORrz5T7ILOdgXEuC7hTJOZM5iO2ziHWlnTjKCwJQwgkeoZhKOLzSvS5TzukiI+BOHgGIbfYlOJmXthkuczMM5xTzQKYwWNK77mGQAqB3qcsbZgTplhVXkSYIWyp0uqoKR3yHWJs8eKxlJXSubF599f4aCWHho+Rdd9nZHAetgK8CuU7yI5/LAHFlBfeefFSwFUEkIQlt3GFCKs+A8/uHixYr9a0TUdTNyWt7FBNnJ6e5pYkA2KEuq65e3jCdDIwMQErjhhCpp6MjtmsZfCJsOzQIYMQxWQw2LpbE6Mj+MmG+Sw/hKhlcZBCTrMnMiEIkPlISy+zySCyg5Oem7eXD3S835ZNyK27O2RC5XusJZNqPEnGez1Ye/llePHFM58zDLle+0bmgW8Dh+SAXIHT01O+/OUvU1UVtx8UpJm3hmr+dUYv83r7Z9/JTksJJdf6jJxzvGl8AcSixmGcy/XeGBnWS1brNdY4+tND6rplahMymeU6Khlq304mWCNMq22cyb2ESSND7Fmt1/hhyNFsLmSV5viRvlJL32hGU4+ONxWHK2NqaizXkdDUbzBighDjwBB7kiRiNaAmYbZ66jYS3MBQr0lEfAqoKkEy5Vxd7/DJT13k//YXfpL/2Z//2jdV9fPvZrwPDxb8tb/2a3hytcMD/fmICmg0XwSnQ+JgSFTAHKhEcMcvYpq71Ht7LE8POFku+NVf+Yd869vf4uj4ONMYGoexLVXVcvXyE0wnc1wtuEqYtBP29vaYTKa0s8vgDC0102pGMEvQBr9e8Z3f+y63X3uNED3eD3jvWZ0uc5q/OCbBYmS8XIvTUY8Sefl73+KVHzxH3c64cPkxtrd3eGo7cWVuGLrl+ZXem9mX3mC83/aYj8e3QewLyOh4XZVR+zFm5iNjEXNWowUKhFkxzlFTCBxKxJvrhjmTEso5jddl7wO9D4QQGEoKXksYZHwmkdCYMumIkYxKR0EFSWWUk25udAHwHpbrHJnHiMbI2gd8zPDIIDnS+gHCFOHTmtjTSNRAVF+i9LJBPwp0ZPH4Un5719f4aOMCxMfAuu+x1jCpK0QMPiTM4IvjLUjnVFxszC05ULK0mhjJZpoqtwz5pKzWGfV9++4ppycLaldTVS73/PpSxy36pcZm5iprLXcOjpm2HVt1oLY1IQw0lSHgmEVD5RPL3uN9JhDCQNRUAgTL4LdKvXdEihfq0aQZ9Jl045C1OGQRoWlrKlORVNnaavi5P/A0v/JPnn9g4/22bAb8NPA4b+BJpsDnyu/vNtK911Th+efhV381a7mP9sO6Cz25v+33OFvIHR8f85u/+Ztle+8gy/5D7P1hrhpRdxt92M0LxekVwJGt86RVQFgGcDbzn1pJGAIahizdV8SykiRSyik3sQ3GGTQZJEVsJUymEesG1sslDD6DVfTeFoeYFNGItXLPEYsUlL4ZwWGZacaHiCYIUdEEQ/IMqUdtQkxATMJZxZQbWUwGmsRUJugSDGiZnM07luO71xKwjmkDcDLW0TYNYiyubnN6MuX2EatKpYpNiRizopBpt3CTOcE4jk9OOVmecLpYslwu6fs+Zw2SIGkkLIm4EOmDB/XEeWQ+26KplUk7Y3trh62tXWZbuwxBuXLtSZrpDq/cvEM3KF23Rro1xgXQqpAA5DESMiHCmHbIAJVQkAAVqjV13TCdTJhNJ0wnEybthMrVD/p+/pGmOoJ5RrQLlD6aTVRjyC0vRuQNFgYjt2/+zMjTK8XxngdqySjzWBxwnvTtGcgNzviFEcSV4zOSOcRl3Ic5I5Qqn0spE91kkGLc8Gcr+VpNJrPMDarYlAlN2sNDKuNobJWP/1zpSDeaOQ/YJDs55xzOVbk0VQCZxuaoz4eQo9MyvinpBjWeNC84xgy4jC1Q5HsoO7lI8LkmG1NJwxtbeprrs++bvHASURSh6zOOo5GAJNn0WgulbShBVVkIDlc7XJUxKBoz4Yr3nq7rs2iMtVgo1JUjo1gq3235XgpIzBiDcxbUoal6L0b9rZtwhmA6JOOcbgMJnBMuXrRMp5kU413ZagV370I8BXJpIB0UufG3OABv5JPfC9rI90kkwSJUGJNJKpRM0g8RayuccVjXUDdzVCCQSNFTO3DGle8xYo1Hh1OCDqg4FAuxwlpF6xpTbeGaFlJerbet0OxeZuh7los1XR9IEggMmHESRIk+N9vXtWwWBqYofIjkGyzEULQ1A4cn69zI3iWCTwQ8gQHXwtRBZYWt2lFNLWAJYtGgDEOOJKy3mChoW9E0882+3q1FMhPqhEJLPpmztX+Nqp2yc/EaVdXSdwNh8KQQiT63Ta0WJ3Qh0l7cZ7q7Q9/M+d4LL3B8csTNW69xcHhYolHKhG9IKgwJTFJOjw5YHt9hf/8iO/MdZu2cxy49xlNPfYT5/gXm+3tcvOa59PgnODk5xZtttl94gcPDAw4P7mZO3aHL6klDR4x+42xGpHqmwtNSJzVEtcync5554il2d3Z4/Op1rl6+yvbW1gOoGb11G/toz6Nks9PNKGE1OaNirN6LAh+R80U1Z0Qiv+GD4nBhU5PdUCDqiETO2yx7z05fgaa0y6S4WdSMx3nP7ygStKSHdfPeBFly0RiStXgjrGIkDD0vvPIyt48PeOzSFT5y/UlE5AzARGSETL4XVjcN0/kWdeWYTIrYQen1D9HTrwesdYVOM4tDqEIIiRizAIsp9JjW5q4DJTND4ROSOryP9F7xydDYOlPRVhXT6RTZIJEV73sGP5CM5fBkxXo9YOeGWDV4PyBapAirnA5m1hIrR+1yS1GKiWHdAcrp4pTBr9nanrKzt0VMiaB5sRtTQIkFhZ2dbizp56p2TGcTYqjw9euRwu+LJbKg7VfZaCLMd+BnfxYeewym03e5/ddey70/ixNKW37GWb2vq443tvct4hUxmxW9aAGXjM+Pq9WyCpeCTjSbaCHXbqzJQJWsNVtW75pKf15ZFZbamRFbWngcSQVb5bpvioEUs1oQRRh7bC8YbyRT9glnXSBakKqh9NENIdL1iRCUKIEoMZd8sfkcJNfqRCUjdZOBKGgwSDIYNRjJIDL7AGW8NtHuJiqoqKuG6XSbupni7ICvcs3bD5kspIuQQsBM5tjJjCCWo5NTjk9O6PueEM7D9MfoQEhAUMUHT9939H2pOXpfygkWZzKJSOWE6XROTMLOzh67e4uSAhRIAcIAKeKHFTH4km6lOI1ckhhrXTEJMRnm0xk729vsbG3RNE3h7H3whPxvy0o4snF+Zy8wfjsZyGTu+XsE+8AZUOt+B5xfKwFzIfDNJCllgWLOtJ1FMtJ1HI4UZeNox0XCZrFQ2vngLL22ccZj5G3OAQjJqc+u71GBbqvDB4+Y3NGeycrGQXgPhhg284YUVK+UElI57RzV6ogiP2t9G03PNpSBa5JTu7mkobllJ6az5i3J/ALGOVyds1QxFuUuEkYjYkxOZZv82Wgy2M1Zk9tYbGa8ctaCHbmec0bMWFvmokxlOQrdj+dlRgWLN+C9FgoHtTWglmTvqau/91bXWY5z83cLg8nQ4WPgMCt21i3MZpkYY28vA+RWK5/V55rmzRfMKeWC7bkccjw9pT88RBcLoNT8+UD63ffD8ebUmTOSa48hAEruWDI5cpUqp7CSL9D8hBHFFPFpIxZrXG4bamY4V1FVNa5yhBgzcUbwDH4PWzfUkzmTdooPkVXfk6ywd+UJZjv7mWVqtWDo15ye3CX4gRA6vPeg5h5CiBxxZZG1oes4Pj1h3QUODpb4oKUH12AboWpqmtYymzbUreAkr5zxFdIr1jsmyyzpNa9bmqrmYnORvcmFBxbxChm6MG0aJlUFxnK6XBOpueqmTCe7zKYVIg4fA733xBSZDh0xRSato6odd7s1N770FVaLE45P7pPKMgbqGnU16xRzjTbm5ex6dcLLL73E6nTNC9/7PjYIW7fvMN/bzjSHJrdMPHXtEpf3tvPEVoAuzuRlfEx+04ozOo/RQYRSbw9RCV6xxjKr2ixg7hqO7hyxXq7fYon3wZiIYJ3FMnLm5kl77OlN5yf7c85OJG3gDWbczibLMjpZs3HCZ6jpUbJRz02u+rpz3myjeL9U6sBn9cOzY4kxYw+kgKFyRJ1/jmlsZzNaewTxQJ44FeXmndcYfI+ZzbGPP4NtJly2ni0fcNWDU9/ajCOZMawfhhLJh82xOWsx1uHMKMqSHZgt5DWYhBQ0txSqAGtTZvZMhtzyaPAhX2diLLZyVJOaZpYXd8lklHgsyG+phcrW2CoT1iQxDD5ikqdpai7t77HySlzmrJkOYRMwrIdAbSt293YwInTdCYPvCSEQUyr1SQE1eB/p+qHUI/OgOuuw1tBUjrZ2dJroGTMvD8GshR//cfjoR8+e6xv4vd0chN7KTz32GHzuczCfZ1aqlBLf+ta3+M53vsP169f56Z/+ado3Y8pYr+GLX4RbtzZPvbZY8MWuu0c39zaPHO/GxugPRnq70mgvpkSIBfqfMtJv5M4w42rPVFjblBVnjXEVVV1T1xX0HavVUNLBmQbOuIpmsoUOA6kPqCjTrV3a6Yxh1eCbhtXihOXyBELM4IWQiDYSwhs1TGtRM1qz7gLrVYcPirgGsQ6DxbosT9bUDVVtsHiIARksrB0SDFWXHcm0njG1DXM3Y1JPH1jEK+Re9do5mrpmQOgGj6sjxtRU1YS6meFci08RFwaiJmwMedUuESGyOljw8ouvFL3V+7rejQHrUGuLIkokFlSr9x3Hh0dIMhzcucvubJcw9IR+Dc6iTQXWcGFnjtnfo3YNtWswVqgqhwjEzBhcWj2K0xgdb1FzCT4x+JR5dLsAKeK0Y7VcMQwDD/vWs6Yo2SCIUTQKIokYx/CUTWoSigPWTXBchAjOganK+8+IGcaUc34t1/bunVrPgoWzz8NYTiljmM6yO/c43pBVWbIOdeGxLZGVLWipcTGahdfznmIBLJ4uFvRDh/We5olnqKqavaSbKO+9sCztGYGUAY/nnI2Y3KQz9n6LGJwbSXkMUgBtGWOimYO5kG5IqZ1nso2c4s/SgxZXu5KVK4BFUZKkUjO3iCukOSI5RUzEWct8NkWGSBt6EME5QwxCiDmDVlnHZNpijWUYFsQUi9M9IwEB2YjNjI5XjNlQZjpnsbaA6u5ZlL3HZgxcu5appsYV1m3gS9zTPLuzo3zykzApyvUxKjdv3uSb3/wmKSV+8id/8nXHfH69pt7DCy/AD36weW4BPEsuIX/Q7aE7XgGaWphOwMRIXK4LtB80WWIsKTgBiXkmGlMwztW4wqSTEU6ZdBxNBCs4Y3KPYhKijxzeucPpyQnb646tbsW67zk6PskXa/J5UgkDGvKKUopaibMV6koNqNR7KdWylCKqWTuzcpZpa7myX5NUMHWLsQ5bC7Y1uFqoMdgIrCH1kHpB1i6DmcggkL3ZHtvbM3a29pg380397t2aAkEEcQ5XNyg1URusdcShx69X4CPJdQwx0IUhpw2zeA5JcnTfr9ecLlYM3Zow1nZtnUkcTAO2xtgqg5nqhqB7BAe1rZnW28xnWySN9N2K1ICrA9W0ZTrdQSys44owKKvjwFGXwSi4vNpSa1AjNHVN09SMaURVZbFaMwye9WpgueyoXc3+9g6Vc7jKUJuKyo2ojodjUlL6o/TceXCVKXnh+51pTgvnyXFkTZPx/ZxFt2MUN372vN2TOt1kdUcAjtzzHJoVb8YO4nxMecGQAVplW8miuV6yiXgz21yOfJ0bKULzYkDLQjo7OEFXS8wrL6CTlts7kX6aOKjuvifRl7Hj8Yyp5dzDalJiFBbOi3YtP80mUt/MN0bLQj+Dowp2CYSy/COXC4xgnKFqbFmw5MW5WMEimYkqZGKg3leoNQwYrAIu6407m0UTkjNMmgorMIjQq+IqU2gjDW1bAy3WmpxR0ExvuQFmQmF5q3KrXeOwLl9Tw9DnlsHuIWZ9EvAc9xI8LfJDBJ56auRbvstXv/ocIvmNKSVu3MjslQcHB3zpS1+irrPwvQBPkDuRxqt+WK/5/vExx+d2c5sNH8cH3h5+xCvQNDCfCxICg1sSEwwJoCIEycxNkoXbZVw9iqFtLcZUZbVvQYXoB1LIHLfR2KyuEnJ/3p3lDaImVstT1usjunXH4eExKaYRzImzmU0m89tajFRZEi3lG2oYPAgMQ470chouRwa1y+mcvZ1M2l+1LdZVJFGSSWAiKgMaIr4XYgT1Bukzof2ktlTOcWnrIvsX9zL4abKFPMC6ZICcEWhb0JqoE6yrCH1HL6cksyKIpU+BVexREaSpEJPrtiqwXq04Pl7ghxWM9d2qhWYO4kBqxNVMpzOmkwmxNqTZjMo4Zq5l2k7RFFmvFsRqwLkls2aLq7MtcHDraMmiG1i8dsDdm3eJCMFUqDH5YnGO+WzG9tZ8M8GqJm7fOWS5XHF8vOTw8ITtrW2mn/gUbjqlmlRMmtzy8TAtLxCrTM6gZwu2PKHbTMB/zmluaqhaHGDOG28i2vORb1VVOHfv+Zx3YucjhBGANdaWN+9JuYabNG2cKOZeR5+l5Uqop7menkakdpnwnXPn6P3yflOKm0WqCJh0Cs9/l1ALN3/M8NpF4Xb9Wo4KH7A5Z6nqCsZIXXXjeE1hkhNjsiDKOYT4mHkTc5ZqHh3vvWNWWoZKDdhWlqq1hBDxvQc9Y8jrfaQf1qTksBZisswwOAu4LMJSJ6hLH3ea1NSVZS2gKVJXlqoyOGuZTFucy5mCoe/LgqLQRhbyIeccTdtiraWdVPmckqfvPOuuY7lcPbyIN5KbYe9nqIhgXZb0+8IX4JvfvMXf//u/wvocAfKYAbp9+za/+qu/unneAj/HvS3AHfCVlPjBueeUN0YlfxDtfYl4Kyc0jSG1Bj81hKCoz5JdGRh1lr4RAZxizNlqGgLRDJvIRwSGIa/y/BA3rS59WBM10q0XNEuLHzwSfZbrGtNrRggmR7bD4EkpYMTgXEVKUmjZNKuboCVCkc1Nm7mCq03rgjHZWaWSgopFpUWiQDRIBJPAOUNT1TRNRdM0NE2NNQZfGv0f7KCXNJStsdWcumqR0lwfYkab+uQJ3qMGztiSMkOSxpJqVJMdrVF29y6ydfEqPijdOmKMpakanLHUbYtpTFZe0jwuQ/Csh56UDM5UVJVlsjUBZ2kGz4Ay+I67B3fwMbHyMVMrVjlimM1mzGezLPc2CmQcHrNcrVkuO05O1wTf4YeniG2FYAuT01lv6sOws3pr2rSq5IXa+FWcgaP0nBM+73jHiFdMXmAaM0ekwhi/iRDeyO69bCxGZhk7MQHakp5bLiHqpqUtyzSOGxiPL/+Ra7ymBIQl4i216DHNfHbe2TGn0dFJPgdShCikRUIPhbRKDzzzvwGjWZNL0qIZlVzS54jBjIHteI6cXRfGlIyAjDXeojhEWXtIHL+UHBWbfMabxUtJJ+hmrZLT0jGmLFCfEsE6ghiICfVZJtJowqBUNn8PqbZorKgrg2oGPBpRKjcCTjMyPZFFF+raYVxe6FW1LcpWeeyTFg+t4Kx97x1vVWWxXDeFo3kGUrEEDnFYLnCBKQ07uezNfD7h6tWrLBYLDg4OSkkoW8YZnIGmErkL6aVzu1uWx1uTPvng2fsS8c5mlv0Ljr52zKuKvo/cOYr0Q2S1VvwQSotIJhBoGou1MMhA0oi1hqSrXA8rtbT1coEmRwiJdRczYUZYoRogrgjdQUkjN5ikLJarkhLKddyk+UYxIkwnbS7sa4IUCTHQdSuSKmP5rq5rmrrGiGRUIhBTBl9hBC3pxiFkxxt7m0XKE4gKja24uLvNZNKwv7vN7s4WXRc4OT55wKk4yZqjtmK2vcvOpScRcWh0GYHsPSkGhuBZ9l2OeJ1FTEntW0fqIxZHkppk5hij/PhPfIHP/czv5+DgmOe+/xJ+GEh9BymytbXF1rxm6AeWJwsMhuP1gpASj+3sM53M2N6dcOmpa0hV0dcOc7rg+OtHfP0bX+VkseDGrdcYfMjMX0BdN7T1ZFP/VYWT5Ypu8CSyIPxTTzzFZz76JNMmt+w0lcXahwytKGMaQiiOdyS1l00J4TxI6v7PjuxVWb3KIjLF2k8gso3qC8Arm5qd3OMo4SwRJ4g0GPNRRPbgusKToAcHxO88S+o6jC3SgmS60oyillKfHKNts3FQ45ZToUx11t1DaK+qRO+JMWGtKWDE8rkA6cUEN5T0cnrwYYkI1pmMC0ARinKTJlJUnOZo3opkLnIhp9glFkdVPK7J456jR0PseyIDqBBMQKxinGJIJMlqRqoZcIWSS1wRgoc45O6GfhiorGEqDmkMad2RVkog98w3KFUF6gypbUnbDTEm+v4EFJqqwtVZvYyyWCYFFGG2PStpfgtiC9Aw976HmIMUwTBtJw94wN/Atrfh534O9i7DP57ltiFeAn6VLWb8Ef5ZrnKVWXn79evX+WN/7I9xeHjIL/3SL23SzG9kCnyTnMEeLZEz2B9We/gRr+SUQ1ULNEJq8m1dVSmLTZtCqzfWlRhXrmMUQQGqpFJXyhCTGBIpRnxIeF/0eNOAasT7Ht8Lzla4psixxdyzGnygH4ouppLTm0Lp35WSctWikJPOWpqMKe0qmX0rf76k5ETOT4FZg/WcOotIbq0Z65ZVVeVVafL0XX8P8vXdmqtcUfFx1HXNdDoFLOtVzIARzeMWYiTGLHqe05SCEBE1WbdUM7EJYhEDs9kW+/uXQB13t47p1mtW3mfNUEqnSYmAFMXHyBBDdkLC2A8GLvedmkKD2PuBru9ZrZb0g990f1auoq/WGJPrXwos1j29D4irEFfjQ09MmaVspCeUB7qI+dG26YXdPDavAGcR7/j7+c8h5RoZHWrR5jWmQaTJLXL3nc64iTcCU4lUpQxA7t+tHRtvOEbeRpB09vv5Y8y+SMa3bzavqpjKYNqRkD87HmsMGuPm3ihZ2fzZ3PuF9Q+uVe7ecTi/YCijsPkOSjha7st8PiXNvjkxvW/szFkIDDAqpZVz0hLxFlQVKBtVo5Kd39TEo+Se8wibrF0qx2CgjHvel4pkhHaf0+W5xdCU7JMlasKUq7qqssLXyFolQubiPnftCQb73gz5vWZM1uubzjYCHjkeXeEw7JI4L3OQs3wNwOsUiYwx1HV9TzZoCIHVGwJdP5z2vqCaq9oymVokwOAGXIq0M8HUglYJN4D3ynoVch1Xs8i8xAYxFWxuLiGUMlpKiZg8PgTWPtPcFV13NIXco2qKiofC4H1xNjGLd6OoZvCL7wPEnA6uKoO1uYaSWy8CSRO+0PNZk7mDRcwZ4rB0tKsqVnIaOjhQyY7aiDCZT9jdv8B02mJdjR+Ug4Ml3//+jTNyindpbdPw8aefZjLZoq6n7Oxf4tK1x/A+sXzlNn1pG4qa1WiilB7IQug++JTJAIYBSR6jEeMyeIOk+K7HiXBx/wKrxZLDmzc4OTzkzq0TUjwlqsEHR9NMmM32mG03rBBe6wKrgwXx2ReQuuJwNbAeApPdS3zyJz9Pt+546ui4tFDkmrqBrPRTwEvIKPxV0OSu5cLuHtPZHBXJqe0u83I/bFTzRkZSskpOTtGeTSKp9GSOtkGdqmZHWxZxAiRZkdL3EanRdEJSX7ad/7PnUsNla+XnQErPIfIq6RawVLTrUN/n/dixHzeDgsZNjAtDkdL7vfEJ+T0jwYa9bLGPW4QpNj6O04brKbGrZ/kazSdXjjdfW/2x53flG8QHnCQ0ZSysNZs6+EhKYovTtCVdmw9rRJRnbuPMEZ8X1H2f0+i+6wl9l3tGNZP8jE5QIAMNkxJDrn2HPqFFatCaCusE15iMMq4bsK443rLAHcFcJi/yMVLUqCy+rdCkGaVsskpUVU8IKcKwzt0HVkAyy1XwGeE/+MyiJ0aoqrqMzkMotpycwD/+x9Bswc3PAh8hV2X/ebKb2X3Lm7p69Sqf+9znNo45pcQ3v/lNvvOdN5I2+HDa++J4rRVcbXAVWBuxLlA3Bmyut0oNsla8L7WSEDPpRXKklIFWo5RXDm60KIpAiD0+rst+sl6rasr9wpI2SkQh3NtKkeeHgogOCSGWm1QQyXVc1cQw5KXtWI82Zqy/jAUkU4juU4kq8o1qTZaDy6o7uc93Opsxm02yhGBQlsue27eP7yOoeOfmnOPSxYuIacFUTGdbbG3v0vUDmDuElEXLY0p5NT4S9hfea02RFGMeO40YEmIMzmZau+hDFgWfzZCYCINnuViwOL3BcnkzI57dDrPZFj4lxFUMalj4RFj1yGsHSOVYqsUjVNNtrjz+FMEHLq7WZd8DqimTqKSUsxEljIqVQ60pjnfCtG2py80aY8B7sszeAxnNt27nSSs2QVUhiRkzI/eQN+gZcxXnnGgkZ3cyXpMNsAnIDlPyRJ2d+L1nqRqAO/m6Ps6PscFGyTVeNKsajZHuaGPEbYRzUWQJI8f97wjymGCkwfkrNDrlCnCNwh286Q2m1Igzuvx7W997T0hNsmPNoitVcbwboBeZyd1sFim8/jsoCxlFS5nAEErJgJRrsZAX8zkaHluydMPONQyeFDTjPcQWIgiHswbj3EayT5OWOnTet5VMJatFDEPJbUFJFWuyRnhVOdq2wcdApz7fDybDiVRj1hyPumHkcrbG2rPp/T1nb+t7+P73yQ2M18mOdw/Yu3chxo9eBmxvb/PpT3+a2SwnpmOM3L59+5HjfTcmY6rWGowz2MpkQI4IEg1aKyYkjCvsrlEYhsxo5GxhkTIOZ3NzdSpzTogZGCXWYlxxINYWBiyHMVmPMyEbJz32xW0i1eJ8M8WkZqde2ptMuX2dMQiOYYj4IRQn0GeH5DJdZUZMjm0f+ZaNYgqrU0J9xNSBzifskIr8YVY+efnVgwfmeL33vHzjJnW7haumuNmCvfWa1XrN7dducHx8TPADKQY2KjVicHWdiQJMQX1KoJ04VA2urrBVhZJYLE/oe8/pyZLVckkSxTYV0k3AznHVhMlsl/n2DhcvX+bytStcuLjN9sUtqqbG1jMwgg15tb49n9NcrzMbVVlQ+WHIqlMhICH3YI/AO2+FaATralzVUlnLbNLQOEPbVjStxTnzMNb795i1Z1zJOQWpGXAXCyvUG0yCoxPISOiMFdBk7gVhnQdBZS9YtHvZMHvJG53t6Gw2jnfc3/g4t00543Y2O4LsjdHu2ElwAXSaX1PB6ATRClUhpkDQVMTbS0Q5Yqk01xtz3+mDXwpZk+9Na0whCTkbLCtSGL70ddmk8+hmY10m96lym5Spa6yQeZP9AMZQ1zZXSposnYlCVRYxrcuRr4w89Faw1QjeKulgY7CVy4sdJ0ipK4sxJMnO14nStBndVYnDFrCnGIPRDBrMi/oIknEnzuVzFAOqgnEOY88BzN5H68g12hvktqArPJQY/ANt70vEa2zug7O1wbUWEkwaQ6VgvVKFwNAqtlJCgG5Z+nsToJl1xlVzBDlbWcsaiZndyiZbVsAuI1tNlVN1ZERg0sxpmgr6EDibfzSnpkFy2ijkiSizSQkYh5HEoB1DP4ARhhgQY5hMoZJMDm+l1JRMjh5DptDAh4y6xlWsuojYSD/kPuJXbx7xvR/cykolD8C6YeDZF55na+sSk+kOZrLD/nLB6emCV155gbt3bpNCR0w9VmqsbbHWUU9mWOdoJw1NU4EMzGYuI6PbKa6qSASOTg5Yr3sO7x7R9wNREq6tMd0cWQfqyZSdCxfZ3d3lsSeu88STT3J1f4f9CztlyLMjMn6NjYELO3tMrk4RWyHNFBD6tSf6iAwD0vdZ1Uk9EaUnEVBcVWWxcY1Yv8aiTGc1k8ZS1Q+jwHVmmXHKbJybaJmMU8rtNmPEeK6emAejZHA0bQjdRyKNMTW9IdDgzK1kKkFybfh8jfb1B3ZPxHuP85OzBfEIshIBcwnk4yX6TYLgkPQEotcYg2w558BDKHX8OJJZAJoXwZoKc5T3D2agz58aWS+7tu4eIYRxDJzNvcZZuSmgBbwmUuhnjcE6R1VXRVwgX+tqQJ0leo8vrTvTaYuzligZIGXE4MZ7Hpf3WhbzKkqiMPONYgwlbSzWYGt7lkkgCy9E8vxo65xtc1roZG3pnACscxuJHdWYpVPLOdc4SlooZ4ViLP2/Dzvvc2YL4LfIsfAfJTve/7bb+0IZmZLkliHNaLxcOzlLr6nkSDT6nJbVCDFIIcfIjEqQkX2xTGyZKq7Q2km+EI3JK9d8M+TJYQOuAEb+5xFEMQJb8up3nIBKenAEx5jMsjVyoSpsyOhjyKlX40o6tKSUkiq9j3Q+0K0HVssOwbJYrjMgogCCYumDfJArVEOmpQwhsO46jk9OWC0WDMOKGDtS6tHkiSXiUQLGS1Z4MgFNFWhAk8+TRZlA/JCFx1fLFaenJwzDQEweMcpkUmPYpmkbJpOatqkK+X/MQuD1mbxfjBErGYGKRjRmXtoYM9huvewJQ0D7Drouo9VT1jOWtoLK4mzMFJMaEdZAIqWBGCwp9W82PO+ZbTLGYxbl/tcYQTxvvPYfJ9L7PeiYrh6zvucDYTaRsW5qymx2r5tt3dPCdM8uSgZo/KsHORVwCpNyj5Adwdl2AyoLlFju5cxzLCXCy9ssTv89nPsFzpGP5B2ZzSiO/8Z3joucPIDZCW9GID9f5pIxGralVSeD3XLmzMgIPpPNgsuIPROUKIuocaGTVDd0oBsu7vPMUuVYNjOVUjJRWrimC5p9TP2TS1mapCimSQaSMaousWHrenimwBH30FRR2n4UDo/hlXMvnRT+93utI8fHc2APkYrt7W2uX7/Oer3m6OjofY/i3609dMerCiFkAEMIFUiLsZ7KBZwoNkJI0DaGSW2JEbqpEoPgh0gYBmJQ/JCj1cFrqW0EYsjUgo2ZISJ5BSym1Hh9WQgKoooTJYpgbQG+jOk0gabJTjUDUIASJUOJ1o2hTo6J1oQQWXcDKQQGVeIwUDUtZmJRUULK8n+Hx2tOVh3Hhwvu3j5id2fKznzKzvaUyZalaS0+dKiMTDvv3qyxbLWzrBzUD9y9cwev32Lo1ywXrxHDKRv1GR0IcQ1JCCHf2KtlhYjdkFZY51BxVClxeHhExHB8cswrL7+MpsTWrKFyjseu7TGfz/NBiJTWrI6uO8KabbZnzQYdGmPAdyfEYSB2nmV3SjcE7pys6IfA0cEJ61VPXC0Jy0yf14cO44TrT19l58I25sKE7ekMNJJ0ASkyrIFe8d3t9yS1+cMt51VgnPDHWoaeAxSfIYXNpr56LiWuZ84VSsucsLku8iQ+ou/Pu5L805z7fbTR6WwonseojLM09wg42pCT3RQ4EsyuQT4OpgWrCZNilgUUAZZE810sHuOeppJ9RAKqgbN+zDOn+159F0aL8pwmJEk519E5ydm4n190cCYessksKBmPIHl7ltwHW7VNjqxt/ladLZSTqptmUldVuf0uRlJMhKikcEZvKqo4WwiBSpSNCIP3pJgKJ0AutaQi5xljxmGYJKXFsmQ/inQqKJoppzMexucSWj8M+Bg2YK6HZwH4Gpm88V6LEX73d+F73zv/XOT09H6Sx1eBXyQDsv4oIo/xmc98hqeffprnn3+eX/7lX2a1Wr1XJ/BQ7H1JNccyucdoSGrJsoBnvMxuQ9dWHKNCdJC74xNIIISBvLZNZfLIdVLGVankOmuOANIZknRMkZEnKC0pNjgDlWS05xmgJE9SukGSCrqZNKXUjnRD7pH783IbjRCJ+KT0g6frPKv1wGLRUznLatXT1JZ60mxWqtZkndMHYcYYppMpXh2RrHbSd2v8sAaNGBnZizZr8k2aLH9RAcgk80pTgCceYyx937FaLlkvl6yWCyAxqcEVlZW2qUiaU45Cwg8dXbdk6DuCH3ILls1tXSl4UhjyfpPSrXtOjo7o+oG7d45YLdeEjeMN9KHDVZYLl6bM5w6NYLBAIGlXIudca0zpIXM154uRTS72vI0O4A0+Nl57Y5T1Rp/Lv5/7KeTevHPXKmO09vo9vME+5exYJU/gKJkwJYEOoB60UhhArULM2Y8MRjKo9KhZkvAkBqIpqFqR+/arb3ziD8rkbEhykmqstVLqoedAYoz381lqPWeuCuipbGsE8QHngFnj9yqbqF7J7XbnD0bk3Fv1fG19fItstj1GtGNpYZOq1gx81JhQMvlGKpHzyDo2XmabhVThdA4x4n14n2q8hSPyjV5Z5MebWUoDfX8X5xJVNZRgqME5x2Qyee+BYg/B3peId72EowOIgyGsHGKUahoRl+X9xOSI1NU55dK2OYUyDKnIzAndeiAGYbUyhCAMnTL0I3rRkKs3mR4gabwHxajlDthMgqPDldymEsmkF0RBSo7EFISkDR4hEYeeNPS5TixCMpYhRgiBkCiC2cra597igzunHC06Fic962XPpK4IfSQOSmMnbE+mXJgvuby3l8FVz//whvK3ajvb2/zz/9x/j2Xv6Xyi84FV5+n7FU1a0HUz1stThn5FSEof73dQoxZrIgyBFAwpDhhrWZweUlVTBt/hl8eAcjQcY63h5O5NbtR1mQwSVVVxeniLra05t55/lheffQxnDW3RCY2FbKJuGpq65u7hMb/37e9xsljyys1bnJ4uinPOKi6aItNJw5NP7jCvdpngsT473BT7Ao5LJFE0PvzeP01ngu+KbCJWV3rSYxEoyD5yJO13ZeGZOXrv2d4mXBzZ3Ep6tGlwTz2JbG0BJe18coK+/AqMbVTZR4xbOoe4pvwsv8wNcr0sNF9RODmbzdMiMXxnABcZ0nMINzgrDHuMXaE28crVVzjePmRX97ikl0rgL5vINyV9zyZNZ4TK2nvOkQJUG6PdTe323HtG7usYI4MfStSfkdC1ddTWbnr3pXwHCBlA5mNJcIwp/hXW2PIote3xoTlMCKpozDpmNuZ9h5HEhyyjGFJk1RepzvVA9JGqamia3E60Dh1JU15zmTLGyRBjYr0OhJjohgE/lr4eSiPvg7Nbt7Ks7s4O/NRPwf6+8uyzz/Ld736X4+PjN0hNf/jsfYl4+x6WCyENQlg5jE2oNdikuEqy8zRgSr+/cTmtVlWJuokMQ46MQxBUHd6bTDKVAI053VSSbiN93+ZmjNy7WhyTcDIieEtKJ51NEKIJqylT6KUcBsTg0ehzXUUciexANOXm+NzalFh2A4OPLE8WLE47VqtA3wV8H4g+9/1VpqKtpswmU3ZmswdWk5lMJnz2sz/B8WLFct1zfLrgtTsHrNeOsNqhqywmDkjogMQQ708E5mlDNaExs/8F35XXlmS4RCJTkyurLk9a9yeOrLMM/YLJdEp3fIfTOzeonGPWZlm1tm3z37MZs9mMwzu3eem573N4fMwLr77M0enJ2XdBTv9tzab4fkFrlUoiNnpUIyZm8QvGCD7Fhxrwwr2Ol7KgM8hGH/eMSnJ0ggrk+qgpiP/zDup8y9smZWoEU1eYK1ewly5tQil97TXSrdfQGDdOgs0i4Ox/OOd0BaQ1mGsm1woPE7rIkbOqop0SVwklAK+dfb6kc521aC0c7B5wsnuMsZbL6UpOtJfFV27/i+9Z0Gskt93ckz7fANnOjZk9A76dHwNNCe+HHB1qvv/ExSwwYrKYgRHZyPJFEqGk5sdgXpISjaGuaqypNmN9bt2Uv/8y+cSUAVsxxsw4ZSIpBHyMdEX3ul91xCHQNAnV7JSX/YqkiaoSspCZAQwhJJargRjOHO+9nNofDjs+zo+9Pfj4x+HCBeXGjRt89atffb8P7YHZ++J4VSrUtiSzTbKXwfpCzRYx2iMa8iTkLGKFusnwe9tE6pSoPbg2131dXeEHQ9UKfQeaLBqKio1WoJIBDuns6h/bCjIzTGmIl/GmFAJkjuWUKedEE4ZQajQGoxFkINET1aCS6SHFVEhKWOuwroKk1OoRG5nNDdG0TKfKzraytzfnwqVL7OzO2d69yNbODnt7nstXTko70ffeZATfmi2WS37jd36DdTcw+MC66zlZLPF+YHF8Bz/0dF1HH1Jeib+trY+Lg7Oa5g/zcJqU1XJF8IEbqqyXK5y11M4VJqqMPq+bmqZpODlZ8Oprt1it1/T+Xr2RcW99CHzj29/ndLmmnVgm0yycEP0AhQPXoPzud1566Kk259zG8SbN7kbRDejm/h7S8/zN51Oh97/3/HustZtWmPOoJWlbzLVrUOgMNX8wb7NboQd3gIDsC0zYsKkxE3Dk8sz+xaxSXpyXakfiTk4zHyq6PqtBSyvIRYNpydsoaP6xP3hkjjJFfen+nuEHYUmVm3dPMmL8fGYL8qKijNV5r3//+IcYGHxWOhtfq2xudTQiuWVIYBQwSQhjgmiEZNjyfWT1IUdMCR/92SJgA/BUjDXUVYWQyXxiTFlEw+XP9YPPUod9IMZI5Sqq6pSoiT5k2VNrxzKZ5HkuKX0fSqo56/eOLGLvbZ7/vbG+73n22We5ffv2m1JKfhjt4aeaEdQ0qJmTrCG6CjWRiCcTJSxA1rneUQnOCc28xjnZAI9CUIYhEaMwWdZ4b+mWQt8JMRj8YNEkpOTIPQH54tRUopFUOIpTymmh3BWfm9hV6EMWWQ9DIAwh3yjqMSiNNDiJiOlJ0mcHHbPjdShGFGcclXXYpKgNVDGxw4Rq6ssKtGZne87VJ55ke2vK/pUr7O1us+4tJ6e+RLy/8a7H+uj4iL/zC3/nHLp0nHDOTehv2+GOFrhX++uHW0qJ05McBx/cPdhowo4OYVMNPOd0YjqvinPf9oBVP/Brv/0VfuOLXytRZXnxvveHmM6kDB+CSSE7GI8iFJIWSQUHMNb/z9UYzysQjXYmCHJm54XnjTG5Z30cw9E5z2bwsY/l73UcTzFZ7OPOa+jpSa5JPgFcAqMmg5HGVAIWe/0JJF7e1EeRA5J0MKxI307orTPHJnOwHxNkehZc55R52S7ct3h48OQZKSnPvXqXF24cvKvt3J8RGBcXb9fuYSUb0/2ve899+y4lgQ0yYIwTNuWCs2N5o+O8d0MPPcnznthqteJ3fud3EJGHjMx+7+19iXgpieAMGbYloVmELyltRoz11xKRmnFyVYzJDEq5d7Foa46iz2bkNc2tD6QzUXLyxzPwQjLwQgqAQkY1nvPoF8kd6VJAW/l48hvGKFmKUx8/syFcP5fCHrlf84R5JiA+/j1G3fk8HtzElDU5PxgKlefl7x6UGwwhEj6I+iTnJkl5Gwj1+xG3b/lz9+17JOcVuAexj7EZTJgz24jNQB7MfVszBhF3tvECUlQ3/n1uwSTl81ZGSMDrzueHHusDtJRGmOWDtA+S+3qzY/kgHeeDtfBPET/zeZOH2VgtIrfJxcE7D22nD9Yu8vCO/SlVvfRuNvBovN+Wvevxhkdj/jbt0TX+aLwftn0gxvuhOl4AEfmiqn7+oe70AdmH8dg/jMc82of12D+sxw0fzmP/MB7zaB/GY/8wHvNoH5Rjf/AFl0f2yB7ZI3tkj+yR/VB75Hgf2SN7ZI/skT2yh2jvh+P9S+/DPh+UfRiP/cN4zKN9WI/9w3rc8OE89g/jMY/2YTz2D+Mxj/aBOPaHXuN9ZI/skT2yR/bI/ttsj1LNj+yRPbJH9sge2UO0h+Z4ReTnReQ7IvI9EfnzD2u/78RE5AkR+Uci8k0R+T0R+TfK8xdE5B+IyHfLz733+1jfzB6N+cO1R+P98O3DMuaPxvvh2wd6zO+npnsvHmROnO8DHwFqsm7UZx7Gvt/h8V4DPld+3yJrXH0G+A+AP1+e//PA//H9PtZHY/7BeDwa70dj/mi8P1iPD/KYP6yI9wvA91T1B6o6AH8N+BMPad9v21T1hqp+ufx+CnwLuE4+5r9S3vZXgH/xfTnAt2aPxvzh2qPxfvj2oRnzR+P98O2DPOYPy/FeB1469/fL5bkPvInI08BPAb8FXFHVka37JnDl/Tqut2CPxvzh2qPxfvj2oRzzR+P98O2DNuaPwFVvYiIyB/4m8OdU9eT8a5rzFI8g4Q/YHo35w7VH4/1w7dF4P3z7II75u3K8b6PQ/grwxLm/Hy/PfWBNRCryl/VfqOrfKk/fEpFr5fVrnBcnfXjH9WjMH+4xPRrvh3tMbwe886Ea8w/ieJf9PrrGH7a9i8L1Wy60k1WQfgA8c+69P/Z+FNzf4rkJ8P8E/i/3Pf8Xubco/x885ON6NOYPccwfjfcHd7w/bGP+QRzvtzvmH6bx/iCPuaq+K8f7B4C/d+7vfwv4t97k/X+cjCr7PvDvvN9fyo84t58lpx++Dny1PP44sA/8EvBd4B8CFx7ycT0a84c45o/G+4M93h+mMf8gjvc7GfMPy3h/kMdcVd85c5WI/A+An1fVP1P+/lPAz6jqv/bDPjNvre7P3L1Kz+dF2RHgMWAfkSOQV4F49ppcK2N2DNxANWxExUfRakGKDmiD6nWUKbl+fhvQIjRdozwBzDl/+lk+1GPkVZDTIj0qwAzhMaA6k/XV/Ei6JIQXUR3wEWKCoBBSPsOoeUn5uDHsGcOdlHglvSXl0Dt6n6TU2x1zEVERYT6fMZm0b0kLNYTI8cnpPTqYTdOwvTXHmLeyBaGdTmkmkzd8ta5qmqra/K2qdENPiJGOFT1r8AoLeDPhXhHDZDanampwFdR1fn9PEQMfgEAPdPnEYLGAeE7fs2lhNgMRbn3r2+96vAEuXryoTz/99JuMz8O3zQ2flJQiClhjyrWtqOZrXzZa02f60jDeNdkepJ7ul770pXvG/J2Mt7wdweN7P1l+3vdxeYNzlXFOAZIWcXrdfHIzbibrHouxiFiMcVg3BZShPyGG/p59StFKNsaBCJoiKcXyWtYgr5s5rqrL3KZUlWNra46xhq4b8CHQrZesFrl0mbdlsFWDsY4UPSl0oNB363d9je/u7eq1a4+VebdoqJ+7buQNNJg3pudG+8w5nuk6n3+rpvFtoIIp+uViDNZmfWjh3uv03MZ/2O5/6HHd/6puds4918T46tlHlBgjN159leOjw/u3/LrxHs39sGN5UCYifxb4swAXpo4///OPQwp5YDWiGtGUGIaelBrE/OsY+ZMY+1/j3F9AOWZIA0lrjPufYuyfQvXvkdJfIIYDlssl3gesOAwWax21a4AnCOF/S0o/hep/RNK/hJiENYJynT7+n4j6R9DkSeoRUZwBI69RVf8e1v46lTXUzgC/D6P/Tnb8RhCBECEEpet+kzsH/wu64XlunMBJD4cd3FlDn+DEwxaG/+XsAn+infGfr0/59xeHdD/a9b7wbscbwDnL7//CT/Ljn/nEuetHyMuB15f4Dw4O+aVf/mVu3nqtvG756Eee4uf+8B9g0rZvum9jLdZaPvGTP8VHP/PZ112vIsKTV6/x5JVrmxu0Hwa++9IL3D054gd8g+/xdfRmhN9WdKmkVK7/lKAsWBJQNw0//jN/kMeefgYuXYXHHoeVgReBPgDPoxzwIvAdIN69Q/zN34DjI4gRVJGPfxLz+S9AVfEXP/8z72i8y3ltxvzJJ5/kd37nd3inC9p3bW+w2+A9fvB4P7BYnKIo0+mUyjlSDGgMWGtp2haxFuscYmx2uCKcoU8Ew4NzvtbaB3KN/9D3mTNHIOWox+U9kifvPA+l7D9GJ7JxpAZjBOsqmnYKqvjVkhQ8PgZCDPmarxqMsdTVBGsdVbNF1WwxnV7h4uV/hpQiL3zvv+Ho8AebOc8Yg6srjK1oZxdx9ZRuechqcQdjLXU7oW6mPPGJL7B7+UlcClTJc+XqRX7uj/4BpvMp3/3By7x2+5Bnv/Elvvwbv0Re9O5hq4btK88w3b7E6vgVTm9/F02R73796+96vK9evcpf/qv/D1bLFX03IMaCrRFjqZsWYywGwWAoPhNQYkooSh5uRWMkeo8Azrq8qBdFRIkp4qNHkxI9aBImWzMmsylt27K7u4O1DucqROy561HL//fdBOXPewO8/Hz+WzeOfvO+lDaLIIy9b3NK0hwQaoicnpzwv/t3/1f83f/qb9w/dD90vN+N431LhXZV/UsUYuqnL07U1RXBJzQkFEEx+ctxNYJB5JeAFzHuJVwtKFNSrEgqiPwayC1UXyClHlWLtS1CwpoKa1y+oG2FsKaq/z8o/4iUvkxMNSKKdZC0wq+FGCOYX8PwKxhJWAMiK9T8gAT4mCMEI89hzX+KkRnWWjB5AnJGqbnJbNZTVTP65KmriLUJROlCdhqGlv/a/vf5lvw+vi6/gudvAv49GfPz452jgUTOCt09964Z8ElyT/m9Np3C5/4ZWK0NuZRznZ3tbSpXve69561uGh7/6MfZ2t3jYx/9GE8/8eTrZmhBmE+n9zznrOWxi5e5sLPDFbb4DJ9A91dw8Q6xD9y8C4sV8Oor8NILLFR5+f6dn5IbG0J5nLMLwKeB5WzGSz/+E/SrFTz/fbh9mz1yfuVNEIZv+xr//Oc/n4tLb7byfy8tB7H3/J5iIPiOYb1idXJACpHlQV7ReO/xPjCdzbh6/XHqpsFYiyn+R4vLGqNe4cFGvffZ2x7vN4t45VwWDJE8WddNzkSlQNKEqICasxRW3gEINJMp9XTG7sUrPPHxHyMOA89/7Tc5uXMTjEWsULdTZjsXMcbhtMGow7oGZxt253s89dhjpBQ5ubOP7++geFQ8TTNj58J16mbK3vUnmWztcuuFb/Pq9wem29s89qlPM5lv89j1j7C9s8/J7Vc4fPUGEcOgaxrTcPHqRea7e6QQWJ4mVJRmu8FWlvmFGe2sZXl3ycnsIilFvvv1dzbm58f7Y5/4mL52+xWWJ55uHfEqdMkg1tHOpljncNFgoyEZCAYiyjpGoipRlCRKZQyTyuBEmBmHE6E24IyQNBIIxJRYDpEQFRs9ZrFkPmkYfEflHG0zxVmHqx2uthixWFODmM13qWXZmP+NztWUFYGee7yJpXQWlkuOxs/SxsV5v8119rtxvL8DfFxEniF/Uf8q8Cff9BMC1jli9KhA0nxDqxjEmhIhfQ34OuIspmpQ6pwWSwn4PeD3SCUlo2qwpsYIWOs2jtcai0iPMf8YQYnJE1OFiGIcpOQQL6AJkd8D/ibGRIwBSkyVFBJKTGDlJvCLqBEMLqeRbGYfUY20TcDahnlIGEkkFXxUKgOLAbzWfFF+hi/Lv8yKYyJ/m3foeN/+mJPIqfab5567ADzFGznetoWPPFO+LK4AnwJ+tCNxVc3lx5/g0rXHuP7Yda5dfMMMy+u2Y61lb3sbgGuUz+yfwBPPMYSeZ1+A20fA0MPLL3Kgyk3u869dedy/r3KGc+ConXDzyafovYe7t+H2bebkWca+/qOjvYPxpkRS72NXiNz7u2okhYEwdAzLBd4PDN2aEDz94Ol7z87eHhcuXsRag2pzFvXdt8H3eDnxjsb7h9mYPh/PwjpH3bQoSu9z9LVxtilBjGfZRYWqbmhn2+xdvs7Tn/kcvltx+7lvsjy8hRoLaqjbltnOXp6HhhqTsgMwGObtjMsX9kkamU+3OG6mIAMqlsl8mwsXH6edbXP1o59gvr9P9Kcc3vge2xcu8PinPsXW7h5X9i/mxWq4xcGNY5JOCAwkiezsbrNrapYLz53bETWRegdsDbPtSD2JLJs5TdgixfhAxjzGwNHxHZbHSreCVYCTAcRZpjse5yoqnx1vNIK3EEgch4DXRLRKNDBtHHvTltoYvInUYpgYoTWCSiJIIGjiNHj6kEi9R1XY6RsmJtFWFWkSqVxNozXYCmsrrNSbL183zrdEsWehL7LJFetbuFd1kyrRc5/jnPN9u573HTteVQ0i8q8Bf488d/1lVf29H/Ehku8gBkQTBiUnD3dQPoemLYz5BmKez6vsEv4bymRW6rkmPY5Jn0V1QMyXgEPQz6LxE6Rkcn1Elhj7RZA7GD6NyEcx9ibOfZWklsoIUQzwaZR/ESM3qdxXENZlP6AxkUIkIuANRoCUMNYgKIiSQsKkgNNEaxSpICYhqMFtKScfVwY3wLd+E24EDF9GiO+ocewdjfk7sNf72Hc23b7VqO/176uBC1gT2NsG54CPfQLqhm0dsJzirWFrf37f5zw5/B3Ixd6zo2+AayKsgENgdX7/P+S43ul4v69O9x7Lx+GHnvXylMXJMYd3buH7Ht93xBiJSQkJJm1DGAZiVaEpcTaRSIkW3nvn+yCvbxHBurwQN9ZhjGO6c5G9q88Qo+fw9osM/RJja8RUJN8R1ieIWCaTi7hqwtaFXWa7O1y8fJW9vS3C4Lh0/SqGnmY+p5nOMbbG1XOMOKZhC5dq6qalbhradhsnkZQCj195kq1JRaoHtB5opztcvPIMVTtlsjehahK7ezOuPn6d6d4Okwac7em6O2i02Dpw5YkrzLe36cMJx4vI8vBVumXk1ZfvcPDaK9STiq2LF2mnFdvbNdO50MQ1tt8u3+m7H3NFCQpUBjsRmmSYtYJYS9tWWGupnaVKFi+KMYpBcAmSJrrB0w2BGCGFnkoMnbHUIswnjllj86rHKEkBcTirDFEJXllJ5NbxispZpl2kco5Z3zDvG5ytaNuIEYORjGEw1iD23mtXNi5Yc5lhMxbnkUevL/1uPnUPUOrez71Ve1c1XlX9BeAX3vr7E2lYQhpXHGBESFyE9KdBn8HIf4yR5xFJqAYEwZYwP0Eu6qdPYtK/AZxg7P8BkTXe/1Gi/1dz6hpB5BVM9TLW3MDan8XYP42zv0HTPE9MjqU1JGNJ+oeJ/D6c+U2a6jlEelLIdYghBKL3kAQfSm3XCznbrBjJg29jwijMXaQ14KxQV4bpp5TFvx7o5mv8f/i3iDd+AceKdxjtvqMx/3BaC1zHGrh2Ea4quYb7078PZcFneZ4hDDx3w3L3nnb4HniVHP7eeytMgI+SR/5b3Ot438w+bON95vRHV6n06xUnRwcc373DzReeo+874jCgMWLqBlO1NHXFsM4Tmsb5Pam6zUg+hPT5gxpvEcHVdam5TnBVw/71j/HkZ/87+H5F/N1fZnl8m6rdwTVbDKtDVhpwruXStc8ynV5k5+KUrb0Jl65d4rFr+wTfcfTJj7FzYc61J5/h8vUn6buB48NTTDTsc5UJU2Y728x2tlgt1tx+9TYRz6c++lmM+XHSliduDTSTKbv7VxBjOF3cpe/XXLm2R2U+STWpmM8V41Ysl69xGjq2trZ5+sc+grGWlb/Dyeo1nvvaK9x5+YjTkzWHh6fsXtzlI5/eZWdnysWLM7Z2GpZtZKtZ/VDH+3bHXBV6VaR11K3DiaUmYwJqW2OMoVFHjaVHsUR6oMaSNDHcDZysA6KRwzRgEabGURnD7l7Ntq2xAs6AFWhsRW0NQz/g+8AwDBws1xgR2skplTNcmEy4MGlp6pr5fIW1jsbWWGOpJjXVpM5OuNSDjeYiZ0Fuve78dPNTzoGsxshYz1LLWurVDznV/PZNG0L8xOZAlWNUbiN4DLdRaTCsywAJqlJAczkNLTGWoXCgM5SAqi0DdUzSlzerFJGbmLhENWKlApmT0kVi+BhJFdU7oN8q4AoFvY0QMAJ6HtUpoDJFzGMIFWeJqzvALQTFSp6gnBWMgFehUqGKifq2EJcKXYeYiJF37nQ/2DbGlRPgzevB5y1poh88MeYbdCAvu1vAIOTo10JloQErkYZtTBiYttB5zu0vv1/Rgmkmo5lHEAfnwDX/VNu4Ms/Xdgyeoe8J3qOacrZJIF/smtG0MRKDJ3qfQSUbEIpuAFbyIRo8MYaqnuCqGls1WFchxqExgRgmO/tIZWlne7STLVZHFak7pnITLuxfYmv7MrPtium8orKG5cldYhxwjWW2M2O6NWMynyFi6BZLxBhaU9GahnbS0LYNw3qF747xfqCuLcYKGmIZ7x4/LEFgvTyi65aEsMZWAhLpl6cYoyg9SgQiYiAEz+L4mKEPLBcL+r7D+54UAzF6QugJQ8fq1JOSoTs9pFst39Txvh3TfCSbyDFpQiWVQCqCghnDn5RIKZFQkslLOGMFV1s0QPRKUmWtES+RqrPYLmIN1JVgRTA2l2O1oMajCkMkX4xRCZJY9oFaPUMAjMXaQKwUZy3JgjowYqnsWPeX8bJmvKC1oNSzaxp/H18bz328r2TMNPNOUQ8P1fGmdJ3V+n+PlNW4kV/C2L+MkRvU9j9FTIs1xxg7IxLxKZSVq82jlCIS88mn4phDBAiE+P8l6K8QU2QIA0JP429gZaCpIrWzePNphuHfJOodhv7/TQj/SSk0KyIdVo8wmvJEYy1iA2pBzMdx7s9huIKzYIwi/A0MfwXR7KxBaKuM+TQe0iDozcj2fxKoFfobQrCwHmGhH5Rs5AMzRxYDeYa343hDiLz82k2OFqe8RAYl7wAfB1ocGf60nbvILsEWE57iCWqrXL8Mly+M+xayu36ciPIicABweBdeu3V29/xTbmN9WVE0BjQFVssFRwd3GVYLamtxTUNV2st6H1gPgTR0rE6OISXmu7swSWcI37G+xYfG72Jdw9b+0zSTLVQjaIQknNx4GTed8NhP/AxuOuHi3h5721vceu45vvNPZkyaCT/zB3+WK5evEcKSGNYcHb/MN/7JL4JJ7D/xGNuXr7O3t8dsq8WmgUEGxBimU2FSOdqpUDWJGA557ZWv0K0WiM0oa7dfY/cbrLPcnrQkTRzevcl6dUo7mzHZmtOv19x58SYQufjkRWZ7M1JcsVw3nB4e8+xXv8V62eHYxeiERMJYSGng+OgWQzjghR+8wtAd5mAl+Ac23yhCrwYNEVIkleeMMYhGnLGotojWxCGwWvYMovRTZbDCZOqoZw3rk8DJssP7yGK1JsXEYogcLQKuMTRbjsoZ9ieO1oFYS90YQggs1okEdMFgMCxXgTtDoK4N89mayhm2pzWts8xmLVuzCc45ps0EayzOOawxGRdkXc7EpoBqbgdVlTJdmOJgS/unnqsbFy+d3cfo0d66PVTHq9oQ4kdKzRYwX8eoAZYYea7k5qcYbYiQV94GjBrMCDykgEW0I2lfYN2JqDdI+jxRAz71GS+thiQ11gxYWYM6Ek+Q1JLSDVS/ljeaFFKN6AxRi0iNqkEkgQyITDE8jcjjGFOXi+y3EGaI+E3qwkgASbhU4UyF6wP1K57kEyp1XljICqF/yH7XlEcqDyXHg/4N3gM55nS8Kd53NBEwBmwF0pCd31u/CFWVVddxulxyK0SeC4F9Y9ivHBNTl2OMMAUGUAO9aTBA5aDeXMGlp5AJQYRK8sqb1TK3A6RIKjeNMbntzNj3hqr8YSOaX1dTzgCJMqEkQvAMfUcokb8RwVmLEcGHCJrQGHPtt64zEOc8GGVcKL7Bab1v6O0fYSIWV0+p2zkx9KQ4QFL8egHOUM93aPd22d7fY3dnm9XRKe10l0nTsre7x/7+Ht1K6NfK8WHg+O4NsHDhicu4xmHsiPVOWA2IGkQiYhIQUB2Icc3QH9GtT0pgpLh2QtVOEWcwfkFKkdOj26xXC8Qq7daEGAb6xXGOjP0WhgmaAt4PdOuOo4NDVqdrtrcmTJpJ7rKwuaYZo2cYIseHd1kc38Sp4B7gcknJPAWUvvAMRQVLzpxkB5XnGQ2ROASCKLHKnAamddja4V2+XzWBD5HgI7YLIBanltgYKhXmda4TV5ToNxoCQlQFFWwC9UroEkNKYJXKCs4osbIYJzhrqKtEbSrUKCKmRLxa/Ips0MlnTlfIM0jGN2RHa+51vOXs367ThYcd8eoNhuHfo3IOaw1iX8Q1FYZtrEwBS985vLesw4qTYQEGqjYh1mBiwkQYwrdYD/9Xkg5EfQkl4VCs5BVISgVUYiASIf1Dot5GkmKIKEsS3yu5s9JPnIQYetTskPgTKJ+A9EtI+vuI/ABr/2OMXKWy/xJGPo3hD2GYIZKwkoAlmv4Oqt/A2p/BVT9Poy+xPft/0YQl6/Qv4/VzHHa/Cvwd3k2d9+2ZJbPBXQFukRnfVsDvktPCo10mV0GnwGfJddLLP3rzu3vwzEdhugVbHVm8ZA/YfltHqaq8+Nu/za/++j/m6pNPYP/YH2Nrdw+4A3qaOWZ+C+pt+P5HoG1yLLxzz1Ya4CJiKmYXYGcKzOdw/XG6vufmwR1SjFx/5iPs7O+ztbuHsW+Caf5QWl4Eqqachhx6Tk5OODg8IvYd/nQBKdFUFmuEvh8Y+p6lwJ1bN5gsTpnvXsBWNa5ucW0mgDiDdMqHJOxNWAYMPcGfEvsVw7Ak9SfYfkp6Aaq7W6xe2+LWdEp395jK5e7TWz/4Ad1rt4lhQQpLjo5eJqw6QvI8++WvI3XFhb2L7OzuUSWhDXmRf+OwRzQv6Kw1rFcLmq0K227R9wtCCPhhQXewyPNZXaGq9KcrfO/xTUdolhAHtrYbRJSqsmiC5cmS7u6a9bKnnu5gqi22d64wmV7I81pUJtOWa1euYq1w98WXOL7VE4ZAWA3nK/XvyqJPHN1Z0UwcrjZndVADyeSFcdIMmo1WCVYZYmRxd02niXo7Uc8VQqJtcweK73LXShg8pzHgosO7hqqyWKM00dGI0IhhMInkxgVhAgy2hsoYnBXUQjTKSRgwSTgOPW5xyrSuubrVU1tHXdU4a6mqmqZuCvFHLm9iHWLGtGQpxYR0T9tQLu/m1xKWRIu+WW/EG9hDrvHexYe/irE1lgoxDbaaIbRI2kbU4QdYr2HR9xyuAyqKm5JrA5pwCEN4kbV/nqgQSo13YpXWkJHGZXdewZBAv0xKX8NIwpRGlDRGc5pX/KSAxoGkDuVnUX4O0i1E/wGGGxj5W1i5jDWfx8lnEH4Mw4+XVqYE3CWErxDT72LMp7H2f0Tlvsa0+QWci0j8wwzpX6JyHfD/4+E5XkNOAX+K7ISfJ4OQnrvvfR54mlxTffrc8z9ilt3ago99LLNAMZAZwmreruMFuPWtb/O7f+u/4uCnP8f1n/sj7OzukFnKyI0OXwUuAS3UW/DjZOd7ZnNgF2crPjGDyzOBdgLthNPVkjvHR4gx7F+9xv7Va/nsPqAR27s21RwhdR2r5YrT0wWp7/HLNaKJWDmsEYIf8ENGgB8fHDD0PavFgunWNhiL24S69//8YFtO/gUMA4QVaViQEEInmKEl3HSY6YzF4ZS6bXEDNFYwKAc3brC2txFWGDoW69vEfqD3HXdv3qL3nouX9tm7sMdWO+Pa9mUshvXiNn4IZFKgiHUV7WyeI7jTFanPZbD1kFPTpq5QYFj3RB8I3UBYd1gTmc5qrIXKWUhKt+o5OOqJAVw7pWod090LzKZ71NbSWkvbTtjfuwYk1DcsjzzrRcfiePHAUs0xJpbHPTjBtGaTP8NAkpJTEyWRSEaJRgk+sTrpWHuPGodYi0ahqgURQ+UMGoQ+BPyQcBpJjeCixdRCTWLiLIMzJFF09HGiIIqx2emakrSLovgYSTER+0SMke26pkVoXUVT1TjraGpPDBFjLFXV5j5gaxAZS1fZtLAnnmGoZLPgUCyJirerN/RwHS+7CD+PlW9j7csZ/Vto1tQLGg0SDCYIrZ2yN98j6IRF+hRDnEP4GqRnCemMnGJ0YJPKU7mAEseyLQN55dVZct+tUSqjGAFnwYrBWMGahDMGIWF0hXG/jcgS0e9CshgUqxHRJRp/nSTHWDFlZXSJLPMom0WSMUrl8hpz0hqqGJH4ZVyy1O53EX5oT917bNvkqHZFjn77N3jPW5xUL5DDzckSXnoemilcupwZON6GxQh3b8Irr8LJ4QVUPwarC/Diq+B7uDhu8wg4gHULL14mTioOuJ9V0gNHWLfE1/CyZqqQHaDre2IMqAhHZQTG1/7p0cbUzSNpous61qsVy9WK5XLJ0HWsj08gJaZ1hbMGQ8IQQUwG4SicHB1i64ZthaqZIsYgbkxYfvCd7mhCyve0NVhXpjoRxECKHephiB1xZZlIw1Tm1MbQOmHiDM40OLHEsKASR8DhqppoDDEp/apHeuX2WrFiNteicw7bWkztMPMml8LW4IeIaxu2qnl+LgNUmO1vY6xje/cCO/v7WBOp3DKDrDQQQiR0iX4x5DKZTDG2pp3tsLVzMbNaRY9TWB+dEmNgWHqit2hyGFOXFOn6XY9pUmU9BEznUXfGauasoTaOZBWbC9r0Eog2klzEVDkm1Bjw6wHB5OBLYNJaKgNmAPEJYxWVRAQ8IZcCQySkAjQgExiVpC8qimJIqgwxLwWixLwA0Pz6WiN3ug5nPC54jJpCX9tT147dLY9zliYFXFWRyZwKiCqW/Ywp6VxYLM8lUvKcZ756K/aQHe91hP811v5H1O6VfIFXAlEYgiENBjtUWO+YTyou7ExZhWucvPY/ZtFdp+//IsPwlcwqpRlZV0nAoey0StvkVUmK4BMcB6VLEEwkSKSy0DqojLA7N9jKUFmhdnkCsgQMRzTur2PdjEpWVFJlmjM/oHqIpv+ciEOcK2jpLwD/PjA7q+M4pZWEqxRjLSF56vC3GdLfY3p8BBLeJ3DVFTJK6RD4dd7Y8b4FM8CTZPKrVw/gK1+Eagu+8PvftuP1A7zwLHzrm3Dr5cdR/UNwIvC1b8LNGXz+C2WbrwJfg9NL8PVtolS8BPexWPXAK0gtfEPyjfwk8AlyC0FM+WZ+uWztCTLBxj8djlfP/cx0d8vFgpPjYw4Pj7h7eMR6seLw9h00JeZtQ20t84lja1LlyT0p1brj1quv0HU9McFkvoOtKmrnXsfh/ME2RYgYIs5ZpG1QLShcC9Gforpi6Hp08NjpBer9p2itYasxzCrHtGponUFSxw1pSCZRt1OSBmJQFkcLliFy0L2KNYbZ/hb1pGa+s818ewfT1tgLU2KM+GOlX3m257vsXLiE73pWdw8xRrj25EfZ2rvA1t5ldvcfw5iAswtC6Hjhhe9wcHALfxpY3F2DazE7M+p6xtbeNS5duY6sTuH0kDB4jm/czlmOo57YWUgV1rbl6jj5EWP2oy0m5XjVERz08czZuMoilaEWR7KCF1jZgK88kYidKM5C9ANhSNRVzaTNPUPVrCJNHNUqYdcZkdwTSCr0CYIaukGRBM4Y2qrCilCV7ItiMtg2JWLK0elAyNVXZzDO4NPA4jQD7PTIoJ3BGYezjq2Z4+nHZ0xay9Ywo2lyK5I1DkFyTzBnACrd4LYV1UiMXQbwvQ17uI5XLGK2MdJgCudxPvyS6lVBk6Aj0jjj5YAZ6BZJq1w/KKseEWitoTaw2woXJ0JK4L3SR+XI53SD34VhT4idIgc5yhoGxSjYep/KXgRJiIS8updZbh2SuxhZg8xI5gqo3SQYRE5RjhG9g/IcMENkhbEW4RjD97HyMlYGkERl16jxWNOPVZH3wSxnzTo7cE/kPeXeWXVcIfdklPKUe1yUzy/bLtEOiQke+yOQw6qKDwF/ToBh1fUM/QnBH9PUnv0LNVtzxcZA6juWi1OGoyM4XMBBD2YNs2PEBmbkqu7rd0SeY+5mOo2jc0ceEHqmBGq6GhYTsA/cmbzV7/e+Hev5X3/0NuSez+c08Aj6UFVSymm2GCM+RIYQ6IYBLX3nwVmcURonWAWxAWM8/WrNql7Qr5YMfUelStU0mzbe88nmDdPTG5653vPaQzWRDZGCESFJrrsa60jOMNQVyZbmEuuwdZXBQSnmmqlC27ZsbU1Z6pLpbBsdHE2wJPU0WBoMqeuJwynWGqbb27TbM5ppi53UmMZBZTAWpjuZX3g626VpZhh1hKbHiKWZbNFOtplMtphMMmATDYBSuZa6mtK2MJ1DlJpgapJWqJrMZpgyiU8KgX7d0a87os/MfkIBgj0wVLMSYqIfAmrYCCNElN5HVASjAbXCECOBSJKIWMVWG9wVVrTwfiticmdbDoKE4AypTFXGZM4ELdnLjOFJiEhObasQNRFVSJqv94QSJBDRzTWAChojRCEFiwaDlYgNAazjcAFr7xgiNPWAs47KVRgjVGXRacYomJwiz5aIyZM+yBGvSAa/VpVQVfmmF+2z001KioboLb43JCOwAp92ISWsKcxR5N+dGCbW8MxuzU5r+eyllk9eaOh65eQ0crQOHD1/yKnvWf1h4fhfENxXEs1/FqmOldBHGmfY3frnYP4/pKkM01axJuftY1Tgb2PN30DMJ7HuzyBcQgogK6a/S4j/JUmew/IfIuIw1cs4M0XSb0N6ERMWKDeJEpCqpTGOpvogxFdz4Ke5t87ccu/lkIDvkXmeHwd+kly7LS89D9yC3R4+6qGps2v+UXbn6JAbd+8wzgSrxZrF+ovAq3zkmSWXLyl1nUuz/eD5yre/ycvf/z58dQW/pbB/DL/vizRzy+8jR7Svs0AWLnshd1svOe8YHGt+HHiCu0/C6lMg79ldMM5278b1jIWl+4FNP3ybmUs2ElMs7XWBbvCs1j1HpyuCD5yIYI2w3p4S4oS6rtlRIcbE3Zuvsjg8wophMp0wmedITuqGMttmVOgbnebrDuv9WWQKgnM1lWvQqGgSZvMddi9eJTrDYe3xVpk2E9qmQU46/M1ThiGR7IBozaXHr/PUZz7K9q2rLNvI6XqBLO6yCB2Xti5wYb7DcHDAyfe+i6srnvnZP8T249c4OX2Nk8VtMAZxhso0fOLx309bzxmO1vRHK8LgmVYXMcayu/8k0+1dtnd32dm9QPArlicLNFi251eo3RaTXcfWE47FauCFl48IHhYLT12fUncn1Ktj+mXHwc1D1suObhmQVONMReXGToMX3vW4pqQs+4FF73OKuTLUTeZKHgSq2tFWPZXLMNZQnGY1VZwKVi2WGkkGE3PfLzYHWmZiaasmR9O1oFagFnDgRQkhC+qELpIQBmtIIiSvhCFnMwKBJIp3SrLn7hmFFAVUkFAhxpA8xEGxa7h1KlgrNK7FWkfb1rTTmrq2bG+1VM4ycZbaGioj1DYv6JwxrNYLQhze1jg+5FRzwpgeMakoh5BbGTTDylMSYgrEmCBktqiQeqBH6Evevc2aOQKVNWw1FXut4/Ks5dr2hHUHTRIMAzWnoD3xgjB81BBfBTQSfe6nTUGZNpcI8VNUzuZjKsWDvL66ArSI7GPNTyBcK6v+RNKvoUxQHRD9NkaKLJi1CHcxehuVhBEFbahMQTuacXZ6mBNSIke3GSKfv/YLjGnJs2MZfx8rKEuyuMIO91dTZQXSQYOwZwyNzcvT89y4o430akmzBODJcrGJyLrVEh/uIHKbrS1ha8duDqtX5fDklJt6Aq9R6KYH6G/TtsqS12kinNlh/hHEsCqM/xmTUZFYoQTirqEPrz/eB2Nj9eve7/oNWw/0vl/0/pcKaKFsaqO08zp9gHP7Opd9yGOthJQYfCAMnkD+lieNYz1kMEloIyKBfrUm+ch6uaBbLrHOkWJu2ztjrxqd7+YIzw74vp7f96OF+mwNMKJTwdiKZjInOoOrepJV6tkW7XRKGk4I6ST3xCJYY5jMZ2xfvMDCL9na2UOriql4kq/Z3r3A/u4lOgzMb1G1NftXrrJ7/QniLc/CHxW8B1hn2Lqwx9Zsn9N0QDqNGOegNYixODfB2Ux56KxFo82YzyhUbgKtJTQNITUks0ZY5VrpEBm6AfGeOoZMguJDjnYz3yJGaio3P/e9vXsLKeGDEmOiipYkilOl6gOxSE9WUVAj6FhaN2UxhOBE8o0bz887ma1KXM4QaEFIxxLlSibWzopFpVIXtcgDDgn6ROSc42XMjLIhqMpiQ4JRECwhJULI210OKR+f9VhxtNOaaWhoakcwkbqy+MrRWENthImVHA1bS9+v34wL+w3toTpeY27Rtv8xxn0DFYPHoFEI4SIn/o/j/T5H3d9jsfqdLGLghaieLv7fiWkfmy4wkf8NlX2WafWPmFUDV+qW/cqwHSLN6SkSP0Hi5wlmxcz9VSbmWap/YuBQ4BWDrmxuvvaGEMAtfwXlmK15xfbWBLFn0mdp2MKn/zlWrgI7ecIr9D2BnyHIvwk8T0i/CHpE5cl9ZSoIBiNPUJmfR2VK3fwGWn2fphoeBvPeOYtkBPMRucb7NGdf+xi6nhdQmAIfI0OPniJHxzucv1RE4PLjsH8VrrDLR3mGerIDz3wUubDP9uxeDuUQI68d3GXZdZwsFwAsjo959fkf0K2WLI4LcvnaY/DYdTiVfMiDL8d+eLax4xP48vfwdce37jvyN7THrsFHnmLbWj4FNES+y3Pc5JDH7lzmY199Cmvey5ai+1HAPwIVfN7pbijtsqSaaF5A5C38sG0Ud2Ms8605IsJ0PqdqWoxdMfjIMITMjYFgVh1BlHkbqFxF4yImKbiK09u3uNU2bO/vM9/boZ3NqaZzbNVkh1uCcSkMP+d8HJun3qeqig+eO3duUy9OiSE7pdXgWQ0JaRri/j5aNyyXkUFWtL1hu7rKzmzCx37qk1y+tMflpy5RTYS6Fdoakhqeme4QZMbV/Stc3rsKj12Cj34E6xxbzzxJtT3j7ve+zuG3vsV0a8K1py5TG084+iYnJ46jl084eukE0QpntrBS0a09alvc/hx3aYdYWmsiCeNyHTIcHHJ85w4npwPdrQXewzKBO1wzu9By7cqTcEm4fk3ou57f/I1/xOL0Fvs7V7j22CcwxvF73/kH73pck0I/pCzzlxTtEzF6jMn7NVao2txqZIzBOsGIUFuXU/5GSZJTvhJyP21KWsgrct95JCOkI7AiEVAGjQwpO0erOfvZm6xuVAWoQ1Y9ijaQDPQuEc15JDKbWq0Yz3ihqsv32HhXppSBV3Ft6X1G/h8f5/1VKWE1YWPA+SEHgNYw9B23b915W+P4kFPNd6nr/xJjaxRLUCGo4OMup+G/S++f4qj/XU77Y9IAqROQI7AvgrRY/bdx/Bkm9u+zU3+NeX3KftWy7wzzcEy1WmHkEvAv0MsJrf1vaOQ72K+DfGUsUFmSZMlWr4oxXybplzCuIcg21liM5Nqyyp8lpD9JklzbFSnINiDpZ0nyWZL+NjH9GnCIiuCUwilhMHIVMf8KsI+tD5H6FeofIa/34C2Re3FeIaeWn7zvtVeBb557bo/cpDMntyFde90WReDCZXjmU3CJbZ7gk1TNTuZT3toq7zlzDDFFbh8dcnh6Bu5YLxe89L1n6VbnWJP3L8KnPgM3JSOghjU5WXzO8S6X8K1niZzwPHnZ8KamCs88yVXyMsKReJ5X+RavEo8izxw9gX2bPXhv336Esz3/tvHXc8oqY/ScM86ZACC39b9+m2PMa4xhMp1hjGUynVLVNWItPubUc07BCboeGFIkhMRuO4EqUaeEcQOrwwMOKktKntXpE7l1o26wVbOJIs5UXsYo/Ow8xjr1+1HjzSo6B9jlWFiEVTewWAfcZM6svoCbVqz7SD94nG2Z1nvszLd44lPPcP2pfaq2wdbgaqgrAMPO1gxTG65e3OfS3iWaZsLW/AJiLYNJ+BRxyzUnzz1HdWWX7ae2qE1gefoqa99zemvJ0Y0VlZ0zmz6BSk1/Z4kGQ3Uyo15uoabGV3OwFtlucM4SlksWr/yA1TIxHIAPhi4aquMBO73Oxb2rNG3LZHuHwQ88+50vY78b2J5Pefrxp7D2wcw7qsrg4+b6jDHih5i/60VGTbtphWsdzgi1tVhjmNY1ztgi8Z2QJEjMjjfEnJFJKRWUMKSQgVzrwdOnxEDCo5ksxBXyl+J4m5TBtphEqhNJlLWJhCK0kDSTxlRVjpCxPm9HzqR2x1tPk8dEwXsgmYw+L4Qy9F2moF2vYbnIWg5GCGHg8ODwdWP1ZvaQU82FH0nzwOdVh0A6IumvkHSfJM+hkgvuzoLIFsb9FCIXMfQYfpG2+gZtHaid4DWxDspByswnSV4iyC9w7Ncgd2kqyyx9hm37KURvIvEriOYLRFIBC6RE5yMnS0/jE/NJQ20rsC9g639YEG3l4hrp+MbIQr6P0INkAXFrDM5mWjKSR90XgTnSHiBVe9bW8JBsFAWsyHHrZc5k8FKC127D0XFuNLoC2FrzL5PXT5fWOXYvXqKdtMx38nP9asqduy9Tt6dsX7lGvX3Wv9sPcLLKP4eQb9rTo0MWx0ccH9wlhogPgVu3XmO5WnPhsSe4qIpMDDwBdVf9/9n7s1jL0iy/D/t90x7OdOcYMnIeasiq6q7qsbqbLZEiJYi2JRqwIRi2BRkiJPvNBvxgwW+CH0zAsgEDJiAQIGQKkkXZkkzahjmJEtksNru7xqw5K+eMOe54xj18w/LDt8+NyKzKqszKyGQ/eCUObuS590zf3mevb631H/gSL/K0XGElDYtJA5tdOFXgVzyc4OYoqpLdwyO0NszPTmlWK86LQ47fUDSq5W0eUBFYcARMkZ0FHL39U0bXjz8eWcufWQE+si2Xh/+/PdNkaLNlIIvmvX+s3v9MbMEfzjqkEKbTHQ6PrhB8oq7HiCj6vh9AV3kj1WrPqmmJwVGoXKW0bYdZLLFFyfz4BN97TDHOxu9ozNZLe5ttt5ut93zcD4KJfbLpWOBynJGlM2PeNCtDSBF9cRfbztEedIDOTegrQ18r+vUZ/UoIHXQO2vUpQkCpiPYeK4Is7+OlxU4PMZMdoo/cvfU688U5zcUx+7MJe7MdppMDXGXRKuB7jbu+y3RUYdSIyl3BiMUtIqYTJrsV01lFFM2ms6SksUWNHpUgieX8jE0rJF+ipWAymnKwc4XSOJqLU2JZkJInhICNPWOnmU1K9g+nmMe14RdInVzqgKfYkeIGhsJEacWonlI7Q4GiVvl6U6aUfzqhKB62mFNMhIuW2Ad81xN6TzUu2T/YISmDbg1tgNWmY9P4zLm1LrtPJdCD4U4Wjcl7rO3pKPGhX27SGq8y1dNIzH7TOmU97LSd/0L0mpSGvCQaUiL2XT6HNgukb4ltg1/n645WEEOg73+GL+nPiU+34iXPZnVKjwAzFCreJsW/SkhCYoXoTMkpHGh9QGH+Fxj9ObT6v6LV/46qECYjodCKNkXowbeeB12H0t9B2+/TJAEaxpVjn38Vzb9DjP+I0L2OpAav8y5LVKKNgmo99842lKXjupmh6hG4b1LYb5MkkaLPu7wYISVEDSATFVG6RekS5woKayldRVVWIA0i/zdAkQoQM6EoL4Zq8NPpwUXgx2Thp8+TyUTbNBMjvPoT+MGPMuXmADC7wL/Ie0Wthiirmudf/iJ7R0dsBZ9W53d441vfohpPeOkzn6W4cvXy71cNvHk7d4yzBqrw4NYt3vzR90khZj/YruOV7/6Qd2/f4deefo6DPyuoHeBXYUTNf4/f50CmvP0b93k93CE9iPAnf2HYXX+drO6cY/fwkC/+1u/gypIff+sb3H3nbb7/quH0a5pFXPLHfAvYEPhtYAqfvw/7pwyovcccH+E5Hz0VLhXZBxynyECRSBhjMCZzCdUHOhYoGExFyrLG2oKr167xwrrF2ZK33ngXrQ1nZ+dD8hWCj0hInCpNVbjMrVeKtFzR9z2+D4ymbzOe7VBWU4qyxhqLdkXelG7fi/rpKvxnYT0/lQpYIEpCUiT6luh7VN+h+xbdWEJYYazDUKDFYqsD1hPNSHc0p++yGZ0R1YrAhuUmkVRA6YBrF1jpie19mtOIvfoS5upz9F3P97/2D3j3jR9TjQzPXLvC7rWrXL3yLLYwrAvBdxdUNz5DNfksKhXoMAIP4d6KtOqoR4p6ouiaAHfXxASjehe3O4UoPLj/Ln2vCX4f5yZc2T/imSc/i4sPmN9+G+MM9WKPKAkXGg5qx7XDCU+9cIRzxeNZ1iiEdRqug0IIa7ruAYqYtRGspt4zHFY1VYJZzFQ+GyMqJYpxwo1AbCI5oW896wenhHnDZr6iWa6pnz3i2Wf2UEXJZOXY9JqT20tOl9kGsK5GoMA3npQSNpOJMpYkDpvVAa6yxZaIGbakCuphE2V0xJpAjIrQZEZNChriMJzXGvGBtFoivicu7pCaOeu2ZbkZNvyiSCnSbJYfaR0/9Yo317wHIFNgRZay98AGRcxiFlYonKKuMtKsdgGjPUgLaY1zBmfc4AqUiMMMQJKglcfQEhRYpyi1ZaQ8gSUhNLRBSEqhJLtmqEHtxEhWWDEqDeeURmuPMh6dEokeBmeX7QVmS3jC5CSsTJ5r5JsBBJF1rkt0DbrIKINPMwT8BvwF9GWP1AuypnINKJyrqKodHD3QQJlAr8go5wooMNZRVjWT6ZRZVbNTlPiuI2w6/GrFqj3G25bQrqBts4HuoAVsTZ55OwokVdm9qevo+57NpmG1WrPebGiblrCOmf9T5LentWJExYwxu9QcUBEaoatBYpaHhDa3AkvYOThgZ3cXV5bsHRzQNxv2z2HvCoSug7YeEBb5GIxsQvHR0Igfds1zPJJi3l8MCj9dC8rDdnJMkRDDQJHIBP2iKDLdjQFYLEOiUz/1NPl8VBqlhbKsmE6n+TabEmNksVjlTo0IKUIMmQ6iUHQ+UrgIOqC1xnc97XKNVoZu0+CbFlVVWOcG55j8gfJ34iF8TMh669vq86fX5xMMpTDGoowl+e4hwmaQiE2hQxEffoslYgepR5FcJfvQ0MclXaPwncrnjhralraiqAzWVZASKQSadcNqsaIaHTCeHVCP9tC6HtzVCpQuKcoRo8kMlSx0FWIFPYq5uq3BlkLfJ3zfEEJCLed4G4i+pXAOEUVMemB5ZIDP5cciq5UlSYzGE/avXGO6u0tRFRj3eCreLUDKaIvWQi8WjAWlsEYwRlEIuJAoIpQhuwwVZVaFsk5jC00yQnQJFRTVQB0SC7rQjAtDXTl04ZhKhS00fhJIs4QtHdW4AgWdsYMISRZKCSniox8KpZx8MzV1cE8y2dBGic+Khsoj2ud5c2+GwbJBkgJlMto6BMT7nHj7nth3BN8SfDt0VMj84T/N4CoApCSlvwTpz6LUP0Sr/wjFBqdbkklMRxnIMB0XHOzXlEXP3uzv4+y3WM5vsV4dgfRoWhQBUkeUSFP0tDAkiQkJxU5K1AL75g/ozVtsmmNO9IYQNKHPHF9j8nmTJOHXPaEX+h1onaGqLa4eIymAb0AS2g2Y32E3JSpLlKHAWZvF561C9PbqMjgraQfafvqJN5JHuO8Az96DX/0auB3gKxgz4/Of+wxPP/UkI27h+B64Dex8m5x0XwaeY/fwiOdf/iKzqubl2YwdSdx5+00evP0W8+4t3vBfYxJ2ef7+Vzm0FRwewf4BkxG89BSkZIFrpHTAgzdfB+Dk9Iyvf/MVVqsV5xfD7PcW8M/IYmBf5D1V9xWy2MWcTHLqcGQZzGc5ug7PfAaKsqCsa5TWPPOZz3Lt6WeY/CqM/lWIF8dZnnp1wVbheTyGfy5SzfJT/3gkEolE0zacX5wTYxbHF0ns7e0x29nJmzqVLjsnj1aZOb1kMffBU4W9/SPKckRVjTk/m3N6ckqz6WibDomJFBJ9ilzQ4KzBFpZNDOyUNapWkFY8ePMW9XjMeLZPCsLs6ABbFYhSg1QBaFHDKCnPz2JKNL7NPMsYM9cx5Qrkk06+xhjGkz1sUbImIbHHWIMpshavSh7x8XKzXFcFR1eusL83wllHisLqbMX8/AGrVnMyL7BaMz2qKEYTDp75HNeffQkRRfCJZrliftFzdpG48ZmXeOaLv4MpDEkMsff4tI/HYop9ptM9UlR4o5AgmANgUmNdRLuEX6+5e+tVNosl/c1EsImNNLzwwvM0jXD3rkakIEnHpj1jVCVGswNIka5rEIHP/Mbv8IXJhGo8o9jdHVT2HsO6KsWBq5iNS8alowkj1n2FEFHGo1RiN1jq+xvGQbPTa8qq4Oj5KeW0Iu5VyKwkpEiXAhHNbDYiKoPsViCRydV9jqa76LJib3dGoqA99HQvhS2AhgRsYsInYYOwFmGxari4e07f9fTnC2IbMSmixaOVUHRZ9qKMPUYiwTd0fpM1tHSFUoYoJUkMYizJmKzws1oivqPdrGibNV23oW9WxJTou56Y0qXs6oeNTznxKkQMyA3giyh+lNEZKqBUQquEsw6jLaO6YGdaUJXCwf5tCrtApzXEahi+t3knQwAJRJMQC+I0zuULQikRi1DaeyR7iqVjs0oEpQkpt8HssGHrI4Q+ISRS7iYjyqCMywc79iADKR8eaneq3BLM58NAkXrIZcj7f7VFo24p459iCKgF6CXo/TbzpgcxNq01e7s77O3ukEtNRe4+nAMWpVqUyuCcgytH7JQVuwKzGDldreDBfTz3WfCAlAKhXWQ3IL8LZPcgZ4dEwIgUI0WRJS/atufe/RNWq3WmXBiDblSmDVneq+3BtvbOd+dcqYH8OmUNOwePjGoFxrNdxjv5Uz0FxBODLJ+A+SNs420R9JjjPXlVvf/O7f/Lz/zFlnoVgmezWRNCoPcdkhL1aMR42FkbYy6f/L2V8yMn31DPlVWFNZb17oaDw0NEoK4rjDHEAVGaUqLziSSJpvcYm/1lg0toPM1yhYTEZr5is7Okmo6zCpQoslQBGNTgVJOrsDRUIDHlsULcgmfiJ/8NUErjigJXVvTW4U3GYLjCXQL/siqRZKMTo6mqkrIsh26VInSBdtnQdpZuDck6EIvRJdXkiMnhc/hmw+rkPr7t6LpE14EudhjvPYUQCHE5ULkcUQqULrDWkZQimIgImNKC1tlhyEaSTjTrOavFGevFhp6eYn/C7tEOzgkXF4EYLEoJMfVgFLaq8zisa0DBztFVdq/fIIoQ0gdP2j9qaBSVMUycY1KV2BARM0aIYDoUCddEVOcxQWN7TaEME2upK0dfWrwz6AjJR4zSGGMQZ7HWYKxQjCqcdmjtcGUFpmRUlKTJQIsL2RbQieAHaEFEsTGGdN4QkiJiiFGhRNApohEcAS2CiR06BegaYrsGbbEFl0YJCptb1lYQH7KtYuiJwROCz52o6Ekx4n2XN5Uf0e/40694dUKpP0BxD61+mNslqUCJRyVHZf87GP1b7M3e5tq1b1KWmt2dEucsRTlitmPoWsN6vkGJptQFRglNs6FtW4xzmHENCPQNKUVMoTBOM64qRnVJ8IHm4pjQRZzNybft4cxkge9CdZAaYizogxB8z3qVzaRL7QY5sdzuAUhElMongWjJVoWXMzgNShMlIjoR06eadjEGXngOrl2Dw4NDjHmWTBUaf8AjauAFlNrhypO/wf6VL3Dt2g7PP2mpFdTnvEfydco+L/IVRvaA0eELGdk8eR+diJxP12RSU44d4MsUheczL8LBvuLatSc/0LTglGy/sIGfag6fP4CfvPIQ22MsXHsKZgcZo/0ZYDMec/fzX6DrHgFBHJOr7MfjEf5TIfw0nUaQSw/onB+3s6fs9dm0G9qu5cGDY157/TX6vqftWkQSTyzmzJcLppMpV69ewxqLGcy95ZHnv2yqDvMtrQ2mNOzu7fLiSy9ycHDA3dt3MMZwdnzKyYOTrHObhBCFi82GJvSoCJWyRJMookF84vjmLZq2oQs9elSgnCVVFtGK1AckxgxcRfDRs+wW+Bjo+54QPTF+9LbcLxNGKyZ1ST2qmZZPkI6OqKczZleuYmyBcXnU8uCt1zm7fQsVG7rNMX60Qzl9gvHBlNMH5/j2PhIqKr2LMxajahQFEjSx62mO73P83T/m9PSUxdk56x58NEBB33acHR/jfUPbnxHSmrqaM9tf0Lc9Fw/OkSjMqgNKW4P2JFo8G4JeEc0GYxKFMuwdHHH4/Iv03rJz5IhiODy4wmQyYTq17O47lAjJZ1Gc8d4BxXjE3ds3efMnrxLjBzLeP1JYC/s7mvV6zul5TyeRDZFEIopHiBTtEts17GNpVcWOG3NUC+XE8M7Jiltvdpky5RPKR4oHS0zrmdWWSWlYnZ5x/92GVJSMrjyNrScUTlMYxWqx4t7NO3if6E1N1A55+gh56pBYVJRPHWHanouQuepq7TFtj5XArjQYCUMbvyf4jk3foJXGmw0oTSuWIBptS7StcRKZSQ860hLpiEObOq/HewGPH/7a/ulXvErQ+k/QfB2tAhqFVi7Dy5OjLP4FyuLfZmfyjzg8vEVZ9UymBdYaqqqi33FsFhETM01gXBYYrVFLTTQKUzrsJKMA1aZFolCViqrQJGXZ2a2IwbPQF/TrlmKoytZdRsQF8pxYpY4YhT5C7zuWq2zWXBeCM1m7tnB5jrv1uQ0pJ1cZYO/5gFiygHcibRPvp5h7tYZnnoYvfgGyaMYX+flG9SXwIkpf5+Dal3jucy9zZR+eugYuAmuQRxLviBnP8DK1OaTeexoeAVdtI5Jz3Bm5VZw//hT4PIWDF1+A555VZDj1z068F2RDw5+1dPOzfNuGK2Gyk6vgGfl2UdWcvPDie9Wpf0JmU30CifcSI/W+O+QSeimXFfF2XisIbdeyXC05OT3mrXfezkYHzYaUEr339MFzdHjE/v5BlrEbPIUfrZ9leLk0dGWM0VhtmM5mPPPsM+zu7fLm668TQxbTOD0+GSqjPAubNy2211TKsWNrMEIdDeIjp3fvsVyvUJVjfGUPXRUoGYHWdE1D9B4tCSNCH3vmzRk+epq2wQefkdT+k3fm0loxqhyTcUXhdrDGMTu6ypXnX8QWFeVoDxHF95qOxZ2bEDv69pTgNcV4Rr13FWXewbcFwohim3ixaAwEReo97fkppz9+hdPTM1YXC1oPPmpy4k2c3T+j71Z04YJIw+7hkk2XhUlOj9+FBOXViqKsiASgJaiGqDckvUHbAmMtO3v7PPHUi0Qq9m9MSGIZjRzOGaazkt2DEUbn2atSipQSKQmb1YLXfvgdfP94sAzWZJ3745MFdx9c4LWlNS7r56eQPdTbBcpfsDQlrhgTRuArkLHhzltrvvv6Wf4KBIUJkdmioewjflrCuOB2s+YbF3eIRcnu85ZqZ4/Z2DEbO47v3OdH3/weXReIxR5ia8ZYxteOqMqCvZ0xru+5OJkTWo9ulqSux0jPLC2xKTBvlsTQE0JPE3o0iqByl2OVoBeFsyNKN0EbRV1kI50LFQdV6OGCMchl5ttHXMfHcjQ+ZGQghiZJbodltwdFEo01FpzCmJ+g9T8kyY9ouwZREVetyUwuQVvBFoF6nN1SxtUoq704jRuXaKcxlUMk4QtI0TOqRtRlTRAwUWXlk5kmOKhLGBWK+RpWndAFoQstvhGMrogm0fcz+vAFUiwhOTqtGVX3MPYWWkFhHUol7IBzUUpntZX8qdnWIaIeX8vnw4cCdQBMUOoQLuVB3h8T4BmMnbB78CxlfY2r0xlHQNkrzhegIxByntpcPvvDtmb+8cFn4Hs++UhlYi1kf4mf8bBI4pwVgrCkef8zPIwpueu8fQ7LpX7l5dNuXyCRdwBrckf9E6p2f6qFjBBCyECMkNtU26SbJP8upsTx6THnFxfcP3nA+fyCtm1pm4aUEpOzU2zhsNbSdu0AYFZD23n4eDK04pJcUoaqssQaM7RTC0IYce36NWIMrNdLjo/v0/meZbMmSkLFXCk3nWfdtCTrGA0Ggaw39CQuTk54cOcOdlRRpT2UNRkg530GIaVASJ5VPyckT9d3+TMO2tGf9LfAKM2kcuyOSqazPerRlMneAfu7O7iyYjzbR2vD5rnnMOs5ZVJIbNisTnjtB6/w4N6MW6+/xt37D0A1KGOp6wp1/RquGOFXCxY3f8LZrTe5e+8+ZxcL1s2GPiQ2izUX989pN2tS0EiypJh15JvVORcnb9GuWxbzYyQqClXTLJcUVaCsPaI79m5cZbw7A1egtGWyM0Oiz1CwaBHRdI3gezC2pqj8sIHLsopd2+B9z9nZHXrfEsLj2ezEmFguV3k+60BKgx5VQEYvqxThYo6sDOO9MdduXGNnd4KMHI0RurHB7xc5GSSNCglVKlQf6ZRiIYmVRLrUEYPQtA3RlqiUTXSazQarNdEYfArE0NEuWuReQ5oVjCqHNpbJwQ6lNSTpSesVyQvt2mNCx7rvaHzHJvZ0vkOh8GR72U1UdElROHKr2mq8MhnEGzySshhLiuHS9/2XsRb95zLjjZLxhFncWyNRUxYFzias+bsY84eEoLhYaIpeg+soSoXTFltYqnHCqhKtDNPJHs6V7KRIkISohJjcc99sdgghMRlNGI3GeN+x2iyIXjG1mtTB/kSzPzUcnyQ2m8hqk7i3mdN4hQojVBzT+xdpmv8lMV5nlXK1sjv72zj3NymLSF1ZrBEkapBAUvkzIqBUxrVn7PVQDX8qfIptGOCl4bY1SfhZcQXYoxrt8sIXf4+9gyu8YC1PAacbeOvmIM84JKqP3Sw8AH53+PcHMB0CkXd4wE0U8RIq9DPiOvAl3vvRPqioD2T56bfJH+ITTbzbd6yQFGnbJlOomoa+79ha1MUYWayWdH3Pzdu3uXd8n9PTU96+fZO+72jWG1JMdH3HfLHAB8+zzz1HkIAyglMONYjBhxhpu1xZNpuGEDOSsyodtjBM7YRqVPClL7/M08/eABU4PX/AfLFgfmdF7z3BZ+nPC9ngAkyKklosRYgQe9SFokk9835NNZtw+MJTmKqkaVv67RzMdyQCvaxJRGLcznhlkDP8ZMNZzbXZiCuHM64//SwHV56gHE0Y7x5QVBWHV6/hXMm1QnH/6ec4fvcNXv/G1zi5f4f/+m9/l5gi6z7S9InK7rJTbtjd3ePzn3ue0egq7b3XuPvam9y6eZvv/uCHXKxbTvqSjThO7h5z8wfvgIpEZUBKYq/xUTh/8C4hvkHXJM4fJFJQzO/epzAl+1dLDm6UaFPw/Fe+QDY4KEBZfBT6sCHGjuR7YoLNekWIHV0/JaQ9Ugq0zRwfei5O7rFeLTh5cM66nZPi4znR+95z984DlkT8WMFOgb4yG8xlTNbcf2uJ9CsOnrnCr/z5L1HWBb40nCtheVSwsRO0ysWWCoJe9Ng+sjxZMz/fcCIdq7Ampp50cYFtEysbKUwkNR2VsVinWfuO1nd0dxfAnOkTY6qjMWVdcvTCDSpJnDnN6bolLITF6Qa6NQ+aFWvf04SOxrcD+j4XS2uv6KOiKBvKqsU7zVEqKBT4viH2HdF3+fwWUJgB2/PRLuqfbuKVh2AjJQ9RSEprnMutY+M8xqwwRYmyY5TRWbcz0xPRWlBOoUqDVgZbaIxTgBkI/YIMfplBLDoKxXhCMRojnUb7NYLGFBotiqKylLWlrEpKN6GzgpJTiA3JB6TvCaEjxhUxLYkxISnhQ4MPAaPTABjLn0Z4+LmykEF+41qr7IbyiXBGf14ochbaCqW/N4qyxD7C8avHM8p6TFGVKDoiG4I4fKwIj2XHUADTrB5Tt7ynMiyG2yNGSQaFwSAZvpGT63h4WEdOpGn4qfLHFJ1/9VBaA9qY2Wh05HL9l3RE/HAxdDlEHnH0kUGHPNB2uX28RR6HGFgsc+KdLxcsVyvWbYOPgRAjMSZSjPTe07QtTdOyaTYok9V4Ukponf2hvQ80bU8MMVcLMVAVDu+HYyyJEEP2Hq0cVV1Sj0saX6BMXsctYj/EROcDThm6GC4rbC2KvmlYL5YkJbTrDTYG+uDxKeKDz4AwIoHskR1jytgH4VNJvMYYprMpe3u77MxmTKcTinrMaFRTlhWjssAVjp3pFH+wT1ifs3t0wHppubhzymazohVNnwzRNjjWlG3BZrlgs6gw6wWmXZL6DUpyCzJXm5pms+Li/BhrFa5KoAJGKZR12AFTIoViNM66zLUZ4XSRr0FlhdUlpd5HK3vZFVR9IHYBQaHDYDQfOkK/xnca3xUkifjQEHyP7zf07YqUPMZZlHlMay5CCgFdgLMK5TSq0IjOzs6SBFOXqMmI2XTMeDbFlhafemKKKKuwlbl0jbJWUSZH5S2h88QQcOIxRfb0VQ6UyyCyru8xkigri4mClYCOg0hK1xP6gj7mMYc4g1IWN6kZ7UwxcZ332SnhY1YYuwT7SZatjJKFNFLMymch9Hhl6ILKutExd6y24iFsqXy/xNJ+uq1mspCCueS7Jqxk38N6NMFag6ummHJEMXKM9kushaoOWJMolMKRkXV6nEt87QJJJ5wdUIEZd4dgcLtTIgX1dEY9nqLmJyz6BUILlUPpEjudUu5OGIWX2dn7H6Ft4nT9f6FtvsFm3bPeBCLfwfPvk6TCh3yQVstziCeMK0OtK0qncYbhAmhAWYzRFIXJ/1+WiM1uF39aQhvDjedf5OpTD2UkjbWMJlOExAPeZM7b9Fwn8gU+sDT90KHI/eU/Q4ZbfR94BOz0JPACecxcgcPwLFfZY8odTrnJMbID/BoZ4PUj4D55Trskt5u/CHGcZTXuP/LKYQXt94e/+/i2pD8nBAiDQ5BccmpD9HTtmrZruXPvNifHJ7Rdx3KzwofAcrOmD57FesVqs6H3HnEKrSw2FKTB1m+5WnJ8eszrb77OeDxmf3+XqiozKlwbuq5nMd8MUn49SYT17i6bZiejMPsOkUjoe6J46l3H9WcP0ffh5gNDEEi9IFHR+sB5aOlDorCWylomMTtsrc7OaPuGajZBO40bVUhdIIWlT54u9sijBhyi81hpAFd90ql3NJnwpd/6Ki+++CKuKLHWUdVjJjt7WGMzellprl3bZ2c64smnrvDZL32O+3dv85/8jf+QO/fvEUWTRFPaiA+KjT/llT+84Pak4tlrhqeuGmYu8tmrM87XHXfvtczXHXfu/JDN6j6z8ZQnrz9BVZcc3hhRz0bsP7HP/pP7GFVjVU6ulSswxlKWmrI2wzXMkmJicXqfZrPEFpZy4vJ8fLmi7zvW8wu6xRlaFmjmAypaDd7iLZaOyaQEN31s663JQylbGWYjjZ0aylEG9/VdBCVcf+Yq+88ccuOpJzjYe4qkE+vFXSR6ZpXhui1JIRH6QK00zxxM2TUWnhpD77l5b8n593WetV4fo0cl67sXbO6fsDMacf3GUW613/PodczqYn5B6IT5akojJXZW0xaW/aeu8+zujOVbE959+wc0m4Y2ZrMQkYTZ0uAHnIWWhE5C8i2dBHQwPMDhFLTdhug7JMZc4csj5ISPGJ86qnnoinPplykaa/WgJ2txoym2GuNGlmLiMEawrseqiEEwSCaw67xiSYPohHYK5wZzAnGIckQzJeiKYjLDjaeYvgFjEKOHnwZcgS5GmOIaZfmbdH3E6n0MCgkJHyNJnSD2AkGRoiEmTe8tbWuxyhK8xWKwKlcdmVmZNwFGG5TR2XrD5sr30655hy3dJa1pCwrQ2jDd2eHo6jXkp1olgYYlLQ/IJWj6ZTd3j4Qil6tXUPToQchCbTsEY4W6yqV5rkEzY8QBU+as0ChSIZnn24K8NTztZrhllhSSsjTL6tGX9WSFyfn70YeP0m8eT4jES1rQNlIKhOjxvme9XnMxv2DdbDibX+CDZ9XmDkrjOzqfVdKUyWIZ2uSOShLJVW/TcDGf0/seY6HrMzXIWkvbdFzMV8QQCYPGrLMa5zQxBro2V9oZDS1YpxlNa6p1iXE6K2MNmgwhJToJGKVpvCeJUFrBKkNqBS8BQWgW2YhAqRGoAi+BPuXSWT/ayxdFHMQ6PunEa53j4Mo1rt54Ms/kUqKqR0wm4yzevxVRGVUUziJMOXriGmY0gqJg3fuB/qcROqxfgbSc3lsTC8OV0RH66gGl0+xNatCG6jRgjKdrl5zFBtI+fm9GaYWqnDGdjJjt7LO7d4PCjpnU1zGmoCzyJl0PVo1ZbSnPFNv1Et+1aF1iVInSHa5Zk6KgxCOhJWW2C9oaCltlaqMSjBZcYRm5MT/TFeuXCK3IlniFIdaaotTULlflnc/H+up0zPXasb+3S1VOCIR8PRShtJqJydoHXRBqA7NJwU5ZoKNFp8AyJkbTEUbA7jjUyNCcZMN5ZUvGOwWCpVpoyhgJBlTqkFDQdz3iNG2q8vZlPGJ/VMP8FHEFQRkiKgMPyTKsWrILUr4SDCJJKbvXBSKNF7zO0pCSwmWyVUr90hfETzfxKlBGIRJJMaKcpqxqqtGIw2efoRpPsKNdbDlGuYQqPVYnRqbFElHeo4LPFCSTk5ypC5TV2Y7PaLQq0GqCKEepZjgcSBZUit5QMEYkcLYsadcOZSYke0AXW9zhf8No1DNaXNCmGt8E2iZksfKyIDGh0f8yIb6A5k8I/T8h2oRmjNZqMG1OKJVQKmS+b8jKKBFFMoHg+08XXhUjvPk2LJdw5QiefYZyMuHaM88ynkx44eCQp8iF4CkPR54KzT5PskNFLiUtnoxO/miqpI+GAHeBb7PLgl/HA449ngF2OOAqh49cICwPSU/75Cn1du0iuaJ9j1DbhlwFl4/cdwTceM+CADeHT3tELrMfXxfCB8/xyX0uoXWSdW29DywWqSpgmgABAABJREFUc7q+Z9OsafuOdbNhvlrmlpdRmMJRWIWuipxIiwKJiaZYEzpP2zQslkuUUZRvv0VZFhwfj3HODSbvBu8jm02XXV4GMFdVFdRlMSgy+exYNCqxznCxOsfWitG05MqVfVZ1zfmDFe2qR8j2ak0MnLUbSqOxFBAMNjpccngU8zv30VVBmJfE2tJJoiF7rZqUK4OUYdaEGPA+8HG3cL8wBCRlwY7tpsRZl+07gRQ8kgSVEkYLaBCtMAPvPA1JN/fXVB55SEJJjxLN5MbTHP3Gb5KCcKWJNH2kPG44awJu4KSOqhGH+wfZ6/hgRDkqKEaOoigo3JjxaII1Ba7IwLcMzMznTIqJaByjaRa/6LpI00aUCNbmTfS1p5/i8MZ1jDVYZ4Z6Jm8sJ6N9UkgsV2vOzhfvVQ77GDGZTfj9f+WrhDIRnWCcwZQujyb6fMyPxlN2q5p6PGZkC4Jojup9pm5ElQJXUyQ6jzcbCmO4MdpjVJQD2j/RHBQ8//yGTgS1V0Hh2GnHLPQO47KmMHmNDnYdVe14sPT0i1OQhu4HiTCqOHvyOuvZhPHEMZs4wmSXp37lK7Tnp7z74+9wcXyPLrS0XoiSq++YJOeloRI2VlMYwQxFH4PpAkmG0UlWONyOZj5KfOrgKqUVEvIOVFUF5bhmtLPD/jPPM97dx9Z76HJCoiWxxBCYqgZLIGw2xKbNKizWoK2h3JmgnSUpTVQKrWuM3QMsWmoES7tY0C1XxN7gGBFTYLUsmF9YKCaEah+jWtzBHyCTjtGDOW1f0aWesiV7ZhYlqD2U+tfpw58jhr9K8P8NMciww8xqPbmgHKToBMTHrO4jiqAj8VOgUrwnUoJ3bubby5+Dp5+kqCqeeuFF9g8OeRa4oRT3eS/IV6HY4wme5InLp2rIXdqPl3jvA99jh8SXgVxNPws8xR650/yz9ua7bOUycvTkGe5PJd7X3vfAz/I+g6VETryvD798gseZeEPwnJw+QGuD1jp7pA639WaD955Nu6HzHZuuYblaIkpRz8YYZyi0xSooy5LxZJovnjj6TUfTNKzW60zgT3lOW9cFdhjdKJ2Rs1u3lxiyli2Sd+paZa10Yww7e1OqqsCUGltrRtOCw6M96rKimXd0q37bNCelSGobCq0YEbDRUqaAkdwRmt8XsJrN1OFLQ6uERgmIxgSLEp3byymjt/vHRG35RSEpA7mszYnQ2WxNh0D0HknZF9xofYk7zLokWexmaxCqhn9pJNvmiGJ0/SkOf/X3sEWNLWeIKJ5Z9XifsM5gbT4m1pnsRasUSgldt6TvlriiZlyPsbbAFhmgAw8FVJKPJBMZTXbQ1iKLNc1mgUIyCNUq9q7ewNUTUsxmA5Iisd+ACGU9wxYjTu7fRPyrpMfE4x1PR3z1z/16Bj6lMKB8c9s2hbyhGo/2KcvsUiYIKWmod4kxsJ/8wOfuCEajjWVc72BdzaBOTtjXPPPMnF4iqa5I2jCNIxZuhk2Owuc12t8tGEdD01xwvpwTmxX9qoW6JoQas6+YXhuzWxfU4xlPfOFLxPk5q9O7+OWC1Apr74lJ8D4rGYpkxKVRispYnE4DIZQM3FXAwBy45Kv86U+8QlIJpSNCRIxBnKAKRTEeU05m2GoXXcyIaZOdU5LHx0hMgKqhsKAN2lqUNeAmUFiUcRjjULpC6R3AoLyGBNFH2vUaaVtUDBiJODSFsvgmsThvqUdjdg920UVgsn9IFOjSPqvNPkktiOENIj2d/x5dUsT4JlFyBzMRs2mCJJRI7n9fSkPGDBIwaVDJ+ZTpRErBwT5MJ1TPPM30yacZ7+/hihJR6tLqfgmD6lCe4RSoS8XGn9ukqskqFfu8t9J8JDSZS6uU4mJnh8WTT7G1assPGoFSXLlScrhbP0ItMjyEJ+fK2wfPqtmgdWLnYGtu/XNiL38AX8D8Wp7/ciEP+VCPOWKInJ2fvY9Ym0UsyrKkcAXTyYyuj7S9Z2szabTF2YKosq+os46qKEkm0tkCsXm9fIiovme9XmGMIXmbNYZtrnhRGrncSITswKLS0I1RWRLSaMrKZP3c0mBLSxxH9vd3KIuSi5MloQ90baDtQk7AAiopuqDYqARaDRVkRLoeCZqgI8ErolYkA2DQohBJ9F12zQk+J97HVYF9UPRdy723XmOMZ7IzZTQaZVTz7DAnRBl0vXTWXscYxDpi7HMnLZe+iMCoqLmxe5WRVVytFsyKxGRcY4saY0u0MYgoisKhdcJYPeBYsoVd3oxnmRNrHMmUWO2y7IkktujRrfqdJLJAP4K1DucKyioxmiZCcGjnSUSqaoota0hbXeJINA5IuDL7Jo+qMdPRNPNrH0MYY5juTIlxSLyS8nNLBgAiUJYT3KV1ZPbYVSYNCmaZUpZiSTAOpTWVG2FsQZDM/RgVNYeTPUJKRGdJWjGeJnbFgQfdCN4nmgcb2iYxiZ7rU0criovQkJqEOZtjokKNFPGgQqGoJxOUUTzz0kvsT0csmyWL9YLNcs39d+/RtR1N7PApUFhDaXQ29UGyOImoLJ8hculBrVR26FLvYTH84vjUW81JB0T1IJ5YKmQUUVPN6PCA6eF1bHkNbffx7ZJmNSaGDZs2kIKmcmOK0qCMyc4oxiKjCeIcqp5gqhFQgJpAFPR6Tep7uuYWi+NjrF9R+wYXPCMsiYr1hef09JzDJ0Y88dJzGGu4FoTp4RFJ/zk28z9PG77NRfNX6NMDFvE/pUn/BSIXJKAQCMpnc2cJ6BiHSbQZZqIpJ15bY7gcX356YQ187iX47Ivsv/gZPvNbX8WVJa4oLuvPE7YSIDmPPk2mxn6ok2OfDHbaIWfXnxGOQbZRKconn2L81d995MKrgAKl4JkndnnuyWvv4wZvk8gOMGGxXvH6rXeJrufpl+DJ53/R5wc0zCfww1/NQil8h6xd/QlE17e88dYbhK4nhIjVBmcLppMpn3/5C0wmE0QVVKMZPkKS7K5UFhVlWeElEInUZcVsOkVCItQ9Omb5x6bv6XxL2yzRCsZOYbXClQ5XOowtKOox2misy1rkZQlFqXKFXOZ5cD2qcNZlXnDhGFUV43JMs+5oVw1aC2dnC1Zt9h0dxFmZ43NyFcEZi47DOa+g2wi9EYIzxNKitCXZXD1u5ks2mwbvA23b//KolA8Z6/k53/5v/7/cfWWHJ555msOrVxnvHrH3xAsURcX+bDbMdhuEDmxBciN8t8QaQ1HVJB+IIXIw2eM3nvkS01KxV71DXWy4erhPWe+gdAa1iSjqOmtkXxpUKAaaDcN9gNTZUlE7EMl4gJg5pNqA05CUAjQqQVnUmb1ha6rxbADpzUgp4YoCbS16+E+SDIYagjY2YwS6HnW4+ciShh8UrnBcfeIaKXmShEsdmO0mBdkmodxxSCm7GKUwypV8L6Qw2AGGCGQeulKKLgW6FHEjx8iMSCkRJDtzpfoKcjXgmxXN2X1Wi4bTH97h4u6Kq7s7PHd9h7NVz0/untMljQoGiguMi/grY1Rp2D26wkgLL944xMWOZnXOenHKvZv3+Gd//4+Zny84aResfUupPBU9ioiKeaMYRWWUuwhKhuOrLCgI6k+zZKQSlMtzFUFQluwAYRh6YNnhx1hD0GpwZkm0nSf4HqWqDIwwFq1L0AZRjqQcWhWIKsm1WkE22zXDbnIYmg87F4XgtKYwlk0QYu9JIYK2aOcox2OSeOpxSVkrUlei/dXhhFoAiyzokcdWw64VlKThb9KQPBzIPiiLwvLpi2fknbabFtRXRlR7I6pR/R76UGTLyfVAj0LhKCk/bPvVkLN1xQd2bLeEJguMqoqdnZ1HlJ1y+zGJUJaGwhW5HfgzX8hQ9AVlUSAC1UcAWYskRiYSDTCqskMCNbnifnxfg5SETbPBtx2hD0PlmqjKEqP1oEVuMudRGC4+5HbhJb0hYyAk5PZXPtEe0nwutyzDiMNohdUaqzVma/BNVhnSBpzTlEX2iXYut0CNVsPPfFNGUVUlJMVoUjOejlhvWozVmUIR0wC4UvQofMrgLS0aq/I4RdRAtdDZK1Wp3MYV8nOkEIk+V72fdOJNMbJaLpnrxHRnSlkWiHIUkzmx9sRRla85eKDNM1WlkBSoRxOm0302qyUxrDDaMKpqxrVhOtulrmrK8QitM2NBKQ0KtOiBQvYQwLilD+b7MutBKzOYFsiQfAcgnjyU/lTb/wa3M2MsdqCvBOvQKaFNpuUYZTDKInp4bZH8/Cp3JcqyfmyJVylwzgw2n+YhaEtABiPcS6/eFFEx/85oOyTpRNKZ1qNNPve3DMuY8u2SgjdQ30TSIJ6f6CWhqorURUbWMDYwGxXMdido13He+Ez/KRxiNVOjGCFUSuGMwVnFqJxQqRGFUxRW0W8SB1euYd0I1Y4Y+QYXWlxYk6Knb5aDzGneBAjZpCTTRfXAKvpTzONVViiveHJ3R1NVYKqIcp7eL+m6OdpOsK7E+1NWi3doNitu3nyHptlweHCV3Z0DRiPLrhuhtSX4CpUsWizKq2GOlTOi6hP4ROkcs+kEmkjqBKWF3UnJxCTk3NNului4ovUNUigm168y5QAfXiX1f8xydUS68z+n6QPB/yeY9B26PtJ0oCKoPpskmEKwBqIMghnyLMi/DWoH0t9F8YNPc7mBvIt+6gX41d+BavTzPN+PeYhM+iLvnag+vjja3WNSPzQq8CFw8/69h/6WvyBGVcXzN576yBeSRdsi9++yjhFe/AzceJKMunqexz3jPT45pls39G3PZDRhb2ePusrnq7UFXeeZzxecn885PTlDAWXpqOuKkDwhBfyygU2HJGgWHb4LGR+goKpK9nfHlNawPy6prKGsS1xZDBvVrIpVj8BaxXhqGU9s3rWHfMFw5D2vFXCisjLT2DIqap5//hkODg6pqndo2g7f9WyWG2JMNDHj9bTuMy7CGMZliTYKlKAFitJiqzHaFRSTGYJis1iwTh6JPbFvP/FWs4+J49WGPkW8vsPFYsVk55SDiwU7O3vsT36dUWkhrSHNiR6aBCoGvvQrf4ajK7/O9175A1790ddBC/XUMNufcf3LzzM7GrH73OfyBvaRTeIW5Kou+fu5FamGXypAGUE5gCyqEkXyxVuEIJkIdKkCsG1R64z/z0OXiFIJURmxTgpgSrR2gELrAhhESgSqcsTO7uFjXG9B4bGmRqmCLUPl0Ugpax2k1BPiBoXG6hGKbMoRowwt6m1S7RBJ2BQoU7ysoiUJsQ9ITBASEiJdMaK0BePxhl/7bGB9OGX67PPMnn2WLiS+2nSEKLStJkaF2Zlgx3kcU8eERaMLC1pRjA9xo33K2ZOMj17Ae0/fbzJ/9/4D+lu3WczPefXVH7Jcr2jimpXvEBXQNgwbqbwmaP+RVIU+3cSrwdaZJ6UFTAHaCuiYD1JokdQBPSk1+H5B0y65WF6wXm8oqhmuDGgrBDEYsaiYq9rss5h3T0rnnaQOOQEbbSgLRwwGrxSiFWVhKcRRGo+VHp08MQYiidF4jC1gsvc2s71vI+or1OUXELGU/B1C0oSQYehKGBSQMphqgEgMZ84E5EsIh8DXh6r401zx/J2Y7MLh9Z/6fjysOkRArYG7oEb8tA3B5QOG2yP8zI/0XhR1WVGX1eV9Xd9z/+z0Q+udOmvZnUw/8mtbYzgyhpHWsLeXb1wht7AfX+JNKdE0G5pNQ990WG0J4zzX2lK4Yoy0bU/bdrRNi1LQtR1aqUHhyaOTsCEjgn2bLm0sUQpjDaNRTeUcO9OaqjCUVU68XedJMctrljZT7EaFY1QVxBjpUo/IIBwq+ZzNNn4qG8NrYWd3B+dKTo7PqOsCJUKjFRIhSE7cfUx0IbcaK5fHKVk7XjBKUxqLti53J4aqXCG5ek7xE0+8SYSmD1kmc7WGlAgxYa1DSyL6nvw9DSjJ3MxciRsOD59AW82br38vywIqwRYKVzsm128wvbFPubM/VLv59UTyEuSk+VDJaKvluw2tNaLNpZY2MiQpFGiVkdYK7PCQ7UO1UojKxylfaNKQ5Qdn8EdeJ3dG8nfUmmwu8/hQ5JLBo1uP4UcpimzfUsomMQo0HUoZrKlQyl6ec1lsJPtMp5g9klXMKPiczAe0sM6JV3zWwTda4yVilOPqwZReJ6bX95k+dY2EDHggYb3o8H0kape7pwqigEqCkqzLbFyBNhZXw2j/au48+A6ip5ndYk3BSTnh7s07xKCwm5Q9byQDGRlyjZAG+N2Hj0+31ZwiqlvmpCvD0NoUqD4gvkN8R9/OQRSb1T0W65s0bYNzHaORYHWP+A3NMnDcdfnEtyVKW2w9wZZjnCup6llutUm2fxJrMaPpwIuMiG+hqJBuzSSestMHbBFZLU7weCZXn2W0P2Pn4hy/muPGwsXy/4nbBLrVW5g+UqIYqYJZbbGmzPrMSoiarJqFIHKKxP8M1Agdf4JO25bSp99yfjQcmUjjUuLszm2WpyewdwY34i84IzqyNMWSLIDx8cMYw9X9A2bjMTuT6SfGcS5dwY2jK/T+UXTnmF8AHfvIISL0XYf3PT4EEgljDdpofPC0fctqvWK+uKDvO8oiVygqJsQHJASIkeBb1hufW9fLFt8HmvUSYmA2HvPZl16iLgtGRrAIi/WSxXJBs+64OFmBJNqRwTlFMzfUY4PRBmsLFBqvGqDH2BbjbBZwqMZoZZhUYybVlM1TPb6D8/MLXmvfoJUmCx8koQkJIz2FTRhrMUkTUj7/+7bHbxpsSqhRDcqAlkzPKC1JPgCF9xhDKYUxDqsL2j4hqUGbkt1NIFY9frMgbBy68OjCob1Gxzwu6hcnNGc9/eaCEDzN+oLjO6+j5SpK/yr15ABXjC6T7ENEsv7A9LZNilrpDCodAFEIA/JcMtpXsgpZ4fIXMYQw2M9F+s4TU0f0LSIRV9RY4/Ko4JKHmrO/mFxFJ2Oylv1juuZkalyLMTYD93SBUmUuKIbkq1VWegILUl9W7QqFUUOKkkzfEhJi9PC8G2JoQVm0qTL31+QiJoU8fjGxR08c0feMtSI2LW5/n2JaDmprGdFfK0f0QieKjoz272MWtVGbjiCJBs2GzIZpbXbYKrdjm5093AsvsHftKl882KFpNlw/PuF8ueLu3du8/dYbhEGhLcb4M6qanx+fbsUrCd0tMaIHsL5G2TH4gPge6Tu8WpBCYL2+z2J1C+89rsjwfKN6UmhomjWr9gRBoU0F2lJNdinGM8pqRNoNGGMoTcptZ2PQ9RRdlJiihNBBaaBbMu48O5slwSXWi1M8CV1/gfrgOrPVEro1pjph9/hv4Yo1bVqhSdRaEYxjVDiszio4SWuigqhTtgGUM2L8zwFwcR8VR//cky5k/anrwEgS/s4tlj95FZ4TuMaHSLyvkyFZj+dzGK25srf/WJ7r50XhHE8cXvnEX0eS4LsO3w++nSKDZF9OvF23Tbxzuq6lKNzQMhRSH2BIvDH0rL0n+MD8YkXXe1rlUSoyHY/4zIsvMqpLpFsjwbN+d8N8uaBZtJzdv0BioqsNzmrKWlFUUFU1uzt7eUQTQ57JacAonCsYTwTnSvaP9hiNp6RosKbmzp373Hz3Ln0fkegJEmlDQoKnjNm0xCSdL3oabNvRF4ZCBOf7AW0N2moK5dBGf+IVr0LlpGQKOu9pu57C9sRNINY9YTPHN1lYRDuXr0YhV1798pj2bEW/mRODZ7Oac3LnDZzpckt+coBzefOwnWPnDVwWz3lUPEUGgt72sqyH2W5SQ0sWuRyb+NDThy770w4JK4ZAitlwfbPqEDoCDdm/fITVLs+MkcvEBuSZu2QOs9GPM/EKwTeIKIzN+vNG5Zb7Zc03vAejLXrgRlyCzS5Xwlyi74VyWLOApLzG1o5ypawy8CqmQEyRQjzV4EyXDvZzt2CLfZDMcpAE0QopwCZGNiFLlraNJ4ZI2zT4zrOIwoMIvdEsqgIxht3xiKpw7O7scrC3T0ni4OXPkULg2Tt3WM3nvPLKtzg7P6ZpW/wydx5+WoDo58enXPEOHc3hoq1E0EXEpITFYpVD41AUWFNTVzOc8ZRGkKSwScBvSH2g3+SWmagClCb4DtOsKOsRMeaTt3QZRKJij4o9RitKV6Osw8QdlDOY6hxjHUlriBGJEcSCqjDlLsX0OmVrqXYeEGjhYpIh/eoWZfEW1imSzhWuSokQFWFoa0mKSMwKx04ErfRHHsI/zhiIOxR41pzTSUMnKx5CEz9MfDTY/C+KX8bZ40/z68iW95oiMQW879k0G6yznJwcs95sODk55vzslNVqOXAfwbcdojW+bYl9DzEhIaNq+7Yl+pCpdwZi71nM5/RtQerWJN+zmC9Zrda0m56u7ZEoGIl4q4hJEZImBo1RDUobvE+kBNoqtNVY09G1EWsLfNAU1ZL5YsNm3eB7P5jIF/iQv4siQkxZIrsLCSMQBmGDNLQGMZk6pG1GfrrCkVKukD/pcK7k6tGzHOzsoHQWtdmb7XDt6CqznQnVbB9TT/K8VQM6orXBWcN07OimhmllGVvHtCzZG0/YHY0prBtAVeoR7n5u88rW+UsyiPNSMhQetmQl5SQUh7mUkC1DBZbLlvOzZdaQ3s3gt2be4tusYLVZXeBKxfSqw5ZFTmx6EN4QQQ0MissnZluRm8cH7ExC6jzJb/CqQ8IF0d9BaY0tx2hjsWWNsbmTg8gAE3u0BT/c1LYOye9Xq7xRUDqDxrbgNI1CBu9zkYSmyNU8uYJNMowwZFAyTCB9Dzqh41Y9TaitIimDrizeQO8jfYh0MRAXLV4U5+ctQVlOreO+KzIroAJNwjqHmYzYv3LACy88w2K54J23I5vNhq7RfBSFhk838XpQ9wCdLfLUTsCWnsInKlVSmzFJTxE1ZlQdcbD3LBJ7TGwhBZZnKzbzE/y6ZXW2JIXEFiCZTEEylqIeM949wFhHVdUYY/PsyhjG0x2OnniKwiiKcYmJDeX5ClfdR5SDLiBtgFiD7FJMLerJI2J1wu55gT4DufNnaJqnmI7+JtPxX8eYRLCJqIRNUBAVwWd1HpUCJvpMxhaw2xPqkS/Gpxk7ZKmKnjXv8B3WnBE+cJ77/49fJkQkt6BCT+89y82K49NjlqslIeQZ449efZ13b94Z1KUiSmC1aVEibOZz2tU6U0uH0USI2ZtJxiV25GhWK956402cNYR2SQqeeycPeHB6SmgS7TxAFDZGMEooa0tVG6ztmV/kRN82PTEkytJSVdnyL0nmn6viHZR1KONQtmS12lCUNfXgJNbTXwJkoihSH1EqIlaBAas9zmiKJOjVCmstyihG46HtqD7aPOyXifF4h1/78p/n2RvPUU0sZa2pRxWz3TGuMEz3S2ypEekQerQIroiM6oYnr56z6zpuvlFzZzTiqZ09PvPEDY6uP8F0NM6CHFpjNZcCHHnUpHPls62+UgYFAQOtaNsM1kTyTDmR8Cnho3Dr5gU//u5txpOap5+/hjGG49cvWB03tMubNPM32H9in6/8xa8yqmcU1uZqNiUkhWHIPNyGjK90nmU+ruuNxES4WNOHDp88m/N7zO+9jrIF1ZWnMfWE3aOnGO9cwejBgQhQMcOblRpIleoyFQ8cZ4XRoIsyz471MIfdJm1lMrdZDEhWIxQ7bHQHrnA+h1XmE/cXxBQwyqNTT6E1VeVQKNpaE1Ji2rXsty3ri4bq5h0W654/PHW8tdaEeoIf7eDGhp2nSkaV5jf3Kp6b1bxQf5Znn5xw7859/uuu5/j4hG6zoG1+zsK9Lz59dyKfdyXoDMhjaA2opCBlcTZUpgsVxQSJHcqnSwBT8B3Bt4RuQ4zCMB4hxn6w4ApoqzHGkXyHNRZtC7R1aGsJIeQ5C1yCAi5ruAG2LoMSDNphygmu7qimu3gvlPWIsqpwhbuka5CdgvPzDKCJTLJ2aBkhyoBU/zxy7RAlMEZTUpLnKoGG/hEVCU0+GQo+KtfYDI+q+WVASjmxPPS2/MBXMYPu9adUuf6y8RCNyrCjT3ifZRpXqyXGWDbrFc2A4lYy9ECSoJIQek/oPTrjbmC4sAhDYabzpSiEAJLwvSf5PivpyLZ5kc/pGBMJwUQhxG3bc0i8XW67ZepHRpKGofqQLiDaYlyJKSJt22XwUFZ4QJQeADSZpBeGqgbJc0s1YDjStupDLt2TlMoa7Z90aG2o6wnjyS7jnYJqYqmqgvGsyprUpUJbNShqRbaGDkoZqqoijUfs7u5ydHSF/YNDpnv7jGY7GOcuK9xtwtgimC+bWZeAp/eO/tRwrVE87MCIQNt6Wh9YLFouzjeEAKtFjzOG5UXH8rzFr1v6RUu/6wGdzwP1UFlr29iWofKVNHiAv4cv//FDJBG7Dd16Rd82rM6OOb1/H1MU7FQTihToRtNB+KNEF6NhrUIG16lMpWPgKovK4D5BodT2M0kGWz2ygRhe/fJTiLrEjz88BgORWFKkXa3olktWfc+y6yiNxdXjLB/qLNpZRCySMnl6pBNeJQoJ2KTxIdD2geCEwluUFfpg6GPGi0x2dmjWLZPRiE1VYe1Hu/Z9qok3CawDD73Yg6IKGtdrmgbsJiu12HJMWWpMURD6DcvjN+naxMWy4ezsjG6zYbM4RyPMRiOcM3Qh4UMi9HB2dgfQFK5Ga0sURUiK/StXKKSjrgqwPYUKbJZnrPsNySSwY5T09MtzmtP7uNGMcroD1vAUL9EsN/T+mxxd+0f0y2/Rz8MAlupBVFavQhNNwimB9DTa/0/Raoakv4f3P8z8zE81AxvgReD3yGCin32CzMjkmoJMyf3wcQB8niwRtfOR312IgVsP7rNYfzCdSCnF0e4e1w4OP/Lzf9qhtKYe1SStUC5fSNq+xfue0HpIwum9+yxOTtiKESoUxUA7SX3AKnu5CYScnFEwqkbUOzN2pjMm1Ritoe17QlLMxjtoU9BuAvPUEkKkbxtSjDhTkGxJBDqfZ4pdL0SfaLuW1SKDQ2TYLQRtSGiC5IItitD6SIpZREC0JUlC2Xxx314N88U+Xw5L69CupKhGuMJdbkSGv/zEj4MoRXSOWBa42Zjxbo0rDK6yGKMwhUEbCKkjxkToPX6d6VuzK88yPYA/O3uRr3x1w2xUcGN/Sj0ZUe4d5GNymXx5mBzUdsuT71fD/BwBbdRAN8qLkIZWdfCBH716h9v3L7j95hnvvH7MbDSBdU2hHQ9unbOcb6iUMOIA4i6oArS9hFPJcOwEhiGn4PuOGAOQ7fEeV8Sw4fT+N7j4zh1Wb57yznLJKyfHTHZHfPX3hIPDGf3JbYwrme4+weETn8+6Cyar9qnCoazNmxOdzT8k5TmptjYrEmIxqkPIPtwfhItJbDkkanhvEd/0bOZLvv7/+Tvc/ckbPOg8x33P1b0dfudzLzDbmbH/8hcYXblCWTgmZUXjCnRoOex66kYz7zW3O81bDYgTTJXQOrG8t+QN3/PEnmPv6JD9A8XLzz7L1dGY5fyM07OTD72On7otoB928gKUURGjzjMoL4QgA5zcYkydrc5wJHEEr+i6wKZp6duGrmuwCvTY4cygSysB3wXaZYckRbA1Shn6IPQxYVVkc3EVRhXjMi+m7xp89IiyeUZCIHYNfrPGVmOMKymVMDvcpxxZDq9/By1vs7h3j4u1IFGQsL2U5O2+2nZS2AH922gOQL516ZLy6YZGsYfiyffNl9V7NsGlyqnzZ/vHy/t+PnyKXOk+gWKX92tGfhCg49H7Q4wsVitOF/P33K/gPeXCqKxIaetv+3Ev3A+f93EX0Eqr3A2JmaNJykb3USKx6Ukh0axX9E3Dtu2oUGCyn7RKPLLrf/hJFQpnLFVZURblpe6w15akIkVRUSsN4llXIH0AH7L2hnaIzvPVPmakdB8znzLEgAo+r8gguuDJph5diLR+kDy1mSeahEz10LnN97BfJA87PihQw6zOWuy2StR5xvmpbDyVIhlNMhpdFNi6zABNp9FGoY3J/sMoUsodrhgCCkM52sPYkulOiRKHsVBUCVtYbFW9r+uiHvk0clntAgO9caCdPBxrbllAoLLgyvHJgndvnvDg/pyTsyW+gSPbUJjAxVnDatlACXVV584Zhi136bJbN4CpkCxiEkJP8D1aZzDb46p4U/I0yzss332LxQ/vc2/T8ebFmt3DCb/yhQt2qkS/Os9djqjY3XkaXJFhMxpyjyTl82HruhWyjrelACVZhnQApSV5+O/tej9c92Glhwtuivna3a0uuPPaa7zxre9yt/fc6QPLqwd8dmLR3SG7n3kJ6ywWDcqgSfQHIyrvqILGR0W1SHTniaAhujw68OuW82XDfjXFFTV1PeZodxfrA3Xx0SxTf2HiVUo9BfzHwFXyMf5rIvJ/VkrtA/85eWz4NvBviMj5zz1oQGMMMSWSCE4VeDujVyM2fQ/rFet4D7VYoo3COPD9hvnZnG6zoFk3+L4n9s8Sw19C6Zau/UOI97PYdlVgUMQuZuCIynsmIeuJKt/g13P62LBueryKbFZzurYBp9GFB9+zXJwTbU0bhKYXjE4UViisZW9vH5tapJmzvO+IXCWF30eSA74G6s3clgNId0D+Y2AE8iOylfuHTrxfUEr94OOsN+TmwgHZBXc6/H9BzXVeYl/dyGVuCZP9rbhGzUNPoG1cALfItgSr/OGuAxZGO3BgLi103xNN13E6vyAOOrEiwtn9+5w9eIiKDjFycnHBpm24fecet+/cZTad8txzT1OWA3IUOH5rxZtTjVJrssnBL+tkb8iORHvs7MPhE3Dv7h2AzyilfsjHPMeVUhR1ychobCgQL0gfST4SNi2p95SimLkyJ7Sh/ZPN4bNIvEBOgnYAnQyttBATm3WLVpr77jjPFqNHpUTX9TR9R9MGOu/xIdKnlNvHPhCaARAV4wBIUYg2IILReViSQSpCEwIhCj7lm9bw8Nqd31NWRtrWGsPlX+V2bRLBe4+2mr7PLa6idNmKTynWyyV/9I//CB7TOf6zIsVIt96wWa5YVhotiWrk0Hs1WhJ+vQQCXXuO7xZoCtx4hlIO68Yo5eguOvr5ClsI1ThSVAWjvTE2FQMXd1vqD6l320feojjUsB3ZtuK3fzvclFKE4Ln51vf4wSs/ZHGRmJ8LCzchbToKXeIbTfIKkxS1cbTeEjyEXkDHzNoYsrgg+JDN5M9P77BZnCKq5MHJir/yf/g/Ppb1Duue02/c5p3bCx40Htmr+K2X99k9OuKFr/w++4cHzM/fZb18QOiWnHz/D7GFobw6xlQWVViUs2jtMLYGhpmsCM6NsW6EVgVGT1DooaXMUCEP559SIIO1nwAmazMs7zzg1p98h7PjU3709tu8sZyzjsI6CbfOzvkHr/yYvYM9/vzzn+H5vX0KbanUtvp2aDPgKoC9SvHMTBOVJri84WxGij4pxg6IkcJZrj19ldGsYPS90Qct2c+MD1PxBuB/LSLfUkpNgW8qpf4B8D8D/qGI/BWl1L8H/HvA/+bnPVFSio01BC/EJJQUeLtDr8es255klvjVhpA0zjmKoiSGlsXZBV27oF1vsgauf44U/zIxXdC1PyaFt5hNpoyrAositXlmKDGX1pEMRVe+Iawv6L1lTUMngc3igrZp0Mlga4/4nuX8gi45ii5RtpG6dhwdTHDWsbe/z6gQNmf30LYgpadB/idIGoPcAn4ynBwalW5D+o+GFtMeIqOB2P6h4gfAn/s4601+JxyQv1nbvaKj4gk+m7l2g4DTeyu/9++Oz4HvkZPdsKt4ArieUdJPqdyifn9s2oab9+/R+wzgSiK8/r1XeP17r7ynut3SL/7kG9/hj7/+LW48cZ1/+V/6fWazrVCGAgJKjcn84X/K+3yJPkIUwG8DL/D0S/D5X4f5xQnALRF5+eOe41prirpCFQVFSqQuENc9gR7fe2LbUaKYFUPiVZYkQtv7vCFVibS9mKpcD5sB4RlCIqwbgg9EH7DGMKsqnNa0bU/TtrQ+5tlUjHQi2cijjzThvVWD2c7UdN5wZZ3fbI3WtJ7eB5LK4xNtQG+TxyND7O1o85G6CyQNvsEBZQxdlxOvcQ5jcyWvjeUrv/1r/L2/9Xcfyzn+syKlRLvesJmvWBoNPhH3aqppgdYB318gsaXvLvD9krI+YLK7g9YFWtcgmr5ZsXhwjisTsQv4ccVOf4WUMrYkDd7aWSsib5Dy6j4S2+/7IDwiki5dk9QgmPLOm9/le9/8xwSZ4NMUpydcnKxwqmbqrlCaCU5VTMqSzlt8nzuE6IDoiDIWbRxJsi2l7zvOjm9xdv8dkio5nQf++//av8J3vvu9j73ecdVz8se3eHvR8U7j+cwLO3z1955m98rTvPjr/yKjneuoN/8p/o4n3L3HyavfxBSaSTrC7VS5lWw0xta4YjdX6oNaVen2KOwMrccYk7sPDLgANcgJQ8YZIAxuQoIYhVjF/OY7vPb3/j4P7p/wwzvHvL7cYJXBacO8bXntwQk7u7s8+zu/w8FTTzKtRpTVJA98tMUYsCkXR3uVxhohKYXXipDgYqzYCIycoGKkdIZrz1xldjhiPHt/sfLz4xcmXhG5SzZRRUSWSqkfkeukvwT82eHP/gbwj37RQUODjBL0oIJCXG519X3HanFK322IKdMfrHWUriKGjs38nL5b49sGCQGVTrD622iWiCwGu7GI935oFwlKCkReQNIUo9+lcLcprMXqgcQ95A+t9EAGtxnmjyKFQPQdodugNyvwmvO4zm28k2P69Tld02KsJcUNSV4hyRglhyC/A+oeiltk5NgAMpdBtPQjtNk+9noP8QiA/z33XCbbX9iFqoGraPpcNT/yuAl7Q7v08j3TdB1t33F2fsbxvTtsNj2nZ9C2wvJkPuie5tBaM93dw5Ulz720oonC0eEBV248yXiUOYCColkrNsu75E1Az7ZzICIsVysuLh56jhqt2dvbZTx+uAvtup7Ts3NiVOzt3WAyrtms4OTepYzmZni+j7/mWuW3934QjmQAVaFNbt2q3OqKKdGrkBXntM6gz2Hzli8wg9pPlEzTEdioNrsS9RGnDZuuoelaAnrw57XEJGz1ZdNwoc/gLNjq6QoKUYakEnFoMaeURWBEZRBLdjYa/K+36gfD8d/m4dyGHpx2tsChJBndrxTW2aFFK7iiwA2tucd1jr8/QvCcnB9TFBWu0ozGFT4EuqZDKY/vV6TUZApRtYMyFX3ncxZNCUmwXi3ZbJYUMWGKiDKR0G8IvsrS8nrQ6d0Cz1QeHuSibACbSV6vS15v2so5PhzfeN/TdQ0hCX0MRN1l5LSq0BGCadkUu7RdSddlZx7fRyINSId2BaYoicGzXs/puoaz5QUnizlRSqKMGE2nj2W9E+BVPjeMUlhXUIxnuNEkJ1VtMLbCFTPW4ZR7Jw1WC9dHBfW0xViDshrjNsS6y2OLAdyXipbgFmhbY8plviaXg9mDHjzXlUZpO5xvgzK+iiQVma+Pub1ecd51HExrzKignE4ppzMWyw1vv3sfJcLq7n1OX3sTfe060xsVW31xEqTYE3vPxbzh9tmahCbZzFMObUcKARnp3OFUW8e5j97I/0gzXqXUs8BXgD8Grg5JGeAeuRX9c0OcEK5EVKcxvUaKROsbfOzpmnluqUXJMo/K4kxJSpGmWxKSJ8WOlHo038TZ11BEkBNiELqmzco/SdApoeSIFP5NJH2Byv1N6tHfYjIuqYosKmCjRieNswVlWYMrM4VCG2LXgFrk9mDTEnzHrdWcGFqa+T18u0T6NUU1ArnDWv4DYpxg5d9B85dB/1co9dcR6UgpDEdl22b+8In3467344tD4LcpEJ7mvc1yg8U+MhkW4PjijDvHx9y/fZPvff2POT1p+Cf/FO7ehS++3POFz201TsEVJc99/mUOrl3nc7/+W/zFrsdYQ11Vl4pACLzz2pu8+YM/RCTwfkfgd969xTe++cplQi/Lkt/96m/wwvPPXv7NfLHgn/3xN1ivN/zObwdeevE+p/dhefHeT/qxz3EFyajchpRB/m+QlzPDRbe0BbouEBRJa/oQafoen0BZm63ktEZph6REXHUkH4l9IIVIC2wWGwyKFRoLNMHThp5yMmHn+jXQGbzSeU/f9nRdjzE6z1uB4DMlSCmNMppEIhAJIvRJ8APPVKlcUVhjBz5oRKVMB5QtcneQg1QpU4wyGDXToNbrNiOoExRdvKywH9d6f1Bsmg0/ePXb3L53E13+LjsHu7AR1HFEpKXvbyHScPDEC8wOn6JtOubna2JI9K0n+sTy7jGrB2fUda5wva/ZrPcxdURk5+H8fdhcFZR5k2KyfkAeuW75vNk45bLi3Y6jBLp2w3o1p/fndH0WpGjsbYwu2RQ3KO0OOr1IrXeoF5H1MuCqnr47JYYLTFXhRiP6vuXs7DZNs+bVm29w984dQizwsWKbGj7ueicFa6fRzlB7qMczqqOnKPevZSESJRTlPqPJ07zVHPO1H5xT9h2/9dqSA2cxpcI4ha4NZuYe2cWDqSy6tOi6wO6NUYXF7I1QlUUph9I2V6Yui2uILRFtCGGND2veuneHrx0f4zee3//skzxzMGP8mReZfO4lXnvtJv+P/+zv0c433P2jb+B+9Bqf/f3f5fDogKSyYY8CfDOnXS157Ye3+YNvvY0oTVHXOGd58nDG3qTC2ymy5xGdK3GtPtIIEfgIiVcpNQH+S+B/JSKLRwEGIiLqA4xmlVL/LvDvAkxnIFUm3iilEJ2I4pEQEC9okSzbloSkDKI7kiSCb4gSgaGaZYNRa3ItlFs4KQkxW2Y8hDuIBSlRymJ09se8nK+Q+YTG2uwd6QpkqHpJkRSyawkCvmvZLM4JfUuzmhO6NRaPVQqlPIr7KDYoahRPAtdQHKKYIzRA+Cj5dhv64673ZTxqa/uLQni4PxhoX1EUMRokgfe5a2Z4ePJ4uvyYvid5n1WW+o5+6ECkOFifJUXhSqaz8rLaruoRs51ddnb3fu7bmkwqyjoOtKM8+x3YaPTJsljGy8Rb9RHv37s8McJqFVmtsoVdUanL+x9Zu499ju/sTTONI20pZZf9hayHrHTuuCiIZO/VS/PzNCS7fPXOftMxn+FJ8sU/MniDppS1Z0URBPoY8DFgQxpGfvqy0r48rvAIlUdt3/yAipVHtoX5vp8G420reHn0KS8r3i1PVV0qOeX3rYbvZowRLVukUX47H3e9PyhSCqzWC5Q2rNbL7BjlFcErlOpR2qN1ImFAF6TU07YtwQe6dU/wkaZt6H2PsYm+T5hOaFfn2ELQWKwdQ4pI6FBKo5lk4QixYCzRd/SbRUanb1vztkTb8j3CEWyPfwzE4BEV0WiS7un1AkTR+TVt39H2Pb6P+G5QYOpXKN9jfEfvWxarBU27ZrlpWDZtlq+Neov8/djrvV8XiMkymU4rjDZo6zJQiqxbnD+rxUe4WHuKtmcRNYU12FJhnUZVCu3Nw8SrFKbU6FJjakchXU68tKjKopUbBEMc1nV592YrRBtav6L1K5aLBUvvSSJMpxOuHh0wuXaF6Y3rLBYte7tjljHQNhvOfGC9nBNCkzs7sUOCJ3YNYbOhWa6Yn88Rpai6nqKwdJUmGKHfaNqVw1pNsgbfdpl++hHiQyVepZQbDth/KiL/1XD3faXUdRG5q5S6zgeI94rIXwP+GsDVJ5WkfXBtwvUC/YqmSeioKUKBTllSzmFQKotog2B1xFyqK2VDBKu3reKHiTZt9U9jQuQU1N9AqV1Qt4GID4HVao1zFjcqcK5ksltTT44IGFoqEpq+b0ldVhISpeibNcvju/i+o+vXxNBTFxZbOkwSautIFOhkBp/G3wV9BeHHePMfkiUWcxUk77mY/dx4Afj3P856K6WymM4+GV31YaIfnrknw5x3Ybk64c7xq6w3nrd/DKt5Hg1f5ZEWy2iclb8Oj9gMTPLp3h6f//XfRJLmz/6Fqxg1Yn8P9nYfPlAbw87ePkVdf/B7EqFSmnI0ukSFB7Kl7hnA8Q58rXoki24/9KMxBX4dbQ1PvfCv8ytf/fX3/f4/UDyGc/zak1dks2kJPhJDwgTBhfye6vEUXKBftfiNp+k65qs1fYzM+5Y+JYoiG58rk9vREiJeZf/blLJAgCRBfMi835jnr0ESQRKq9awWa9CatmnxIRJ8RKKgjMKZjDBOPmTgj9Y5wStFMjYbWhYOm4EJOZQeLAC34K/MLY6SjR8Y/FS1NSg01lqcLTDGUBTZKB40g086Kma1Jh7XOf4zwgfP6fkd1ptzfvTDGevFkhQbYpgznY35yle/zP7RDfpUsFituTg54fabr9N3nvUi4H3ChPxefd8Rl3Ps2jM//i7ORJ77tb/A01/6F+gv7rB86+soYPrMlymnR5hyiimntA/e5uyVv09slwPtSrH3ud9l/+U/A8qA5LZ/VZZMxxlo1PUBIRFiQ0w9yD1aNccYk9HpZ9c4vvMEvqk4OX2b1fpdVt2Ki80FYjSpqogi3L57zvlZ9hnGFgw0+Y+93s/uT6SaWGYedNKMjBD9htBv8H6NtgV9P6frzrjYzLm52kDT4aKwYw1VayitIWohnD7c56OgsobCaCqrmRXr3KEpLdpoamMotcZqQzmg5MUaktLcbltudy13zhu6TaAajbnyxd/g6S+9zOj6E4yfeBJjZvylf+0tTh8c8+0/epuf3H5Acfomz5y8gkYIXUNsA8sfn7K+v2Fz5wF9e5bn8KyJnWEtG9xFSXNf8+B1i7OGSV3iQ2B+/NEwgB8G1ayAvw78SET+T4/86v8F/FvAXxl+/u1f+GoapM6bHGsFpMevIzoZtK8xyYLRZIJ1QlTKLTo1KF0NT6LU4EKkMk1OIQPgYQunF0Q2wLeHXX+BUGQ6RZ9dSYQabYqsqqMcPoJ4RUiCb3yepZFIRMJmRbc4wfuOPniSRApVgTMoAacNMggbKBFQTyLqBqLGJDVBOB7aTI9czH5xtB97vbcx4j0UW/kZ/3r4quRs1pNHuzPo4ppTdZML3/HqfZg/yJXmexjB0yk8/Uzm6AGgqOoR1Y0RpSv43LPPsTedfei3/P7YtC1L31/OynqVtzMIsLsk2xr+vF1nCdxA64qd/Ze5+tSvXP5mwBE/A/yTj7vmIuB9yG5CIUHKiFSNwhVFrgQ2gYSnD4H1pqFPkTZ6AmBFhjmvBmOyRB95o7EVZ5GYzcQRQXzKFTC5DRhCpOs8Sqv8PraevkN1bAZHHbWtiNXwWmIu+aDKmEHAY5hFqtxREtIw931EJEYNtPxhrnnpHWst2piBw5lViESy3GRSwve/+W14nOf4+yKlyKZd4kPLg+M7aLH4fs5mc5+Do0Ne/s2voIoZUQxt17NZb5ifn9E1PYt5JHihLmpKV2VcSbdGxTXL01fRfsXBk58l+IZudczy1vdBBDM5JKEwPmGiYX16j/PXv4lfnZHIXOni4Aa76atDez4fC2dzMmmMZ0BiEiWgJOJlRVQd6/4M2z1gpylYLxucVlycXnAxf8Dp4pi7Z7dQRUlxeA2s43zZsGwCuhA0mte//rXHst5Kgyk0pdNEKzgFKfgsYBR7UuyIoSGEDa1vmXc9sfPctoZ5SoyjUBnwkmjTIPk4PHetDbXWjJSiVRqrclWtgYnRjLXCGk2wGRSYBtehB43nnbbn3EeiT5nhcvUGu8++RL13SL17hRQaPvfZ6zzYhT/69tu8u1lxvDljvbqLQZC2JTaB9nhOe7ehv1iQ/CY7RvUhj4Qk0jSOjcq2r9kprSamSLf+CLJVfLiK9/eAfxP4nlLqO8N9/9vhYP3flVJ/mVx8/Bu/8KCRryW6yJ2C2ENPQgIslx14KNWv49SXMQacFay5YFT9E6w5Hlp1uWWjB0RhBogYYvwyIbyM4m2U+hrQDgRrhVEOrQtMNWZ2eI3COWzhsv6nzibSwQdCm4UHUt+QfEATURLRfkOle6wJWJVNoAotkAIGoS4qsJrk/zGSbmXOJhGUx+j/LqJatH41g67Uh1Y4mX7c9QZyYlowZKkcHjgFeoSMmzt97y+32KWfAN+De82bvHnxCqvzjvY7IPfyIwSybe/T4PopV97eoT47hSsvwuFzZBT0nJgiD87Ofr7nrsD5yQPOHzygGo+58sST2MJd/u7tt9/kzdd+QhrV8MSTxNKw4jYw56OYNqQoPLh9wRs/uJsr+uvw2nf/BDL4+1/62GsuZNHiKBlbJ5qERhCiEkQrNkrToFiLYiXgk9BGISLoPiBtn2e8PidNH/PsVYSsFpUSYetoM5Di1XYnmhLrpgGt6H3mjactoCcEmq7LCOmUBvvQnFwjWcAArbNXrx02sGnQFrYDaGrY9KLV0N1RGJNt0rLJu0IZmyv2S31dyXxW8lPMz8658+5NeFzn+AcdCsmV+aqZc7K4ixKPIlsanhw/QBlFNRrjqorVfEGfSrooXMznNJuOwjkKZ9FqhdUPsHTs6UhlLJuuZzVfcOvmCf/sj96ib1v2Xk2U4z2irYmmpr+4y/KNt5F+M4CxFL/57CmH3RxjSrQek1JgUhkOphUxwbLXpBhIfTMAsPI1LIQF6/XbrNeaZtNTFTVtp+iCYdMrlq0Q2w5pH5BQLJcdbRNQxrNZ3eHknTcey3r3Qbh55mnXuf3O3Tn2T95m50rL7mSB2nXMX7/NyRuvsnj7HiEKUSl6bWgHYZZWFI2XnJQlXVLcSm1wWlMqxVgrjMo8aA1URg8Vr6awGQQbBxrc/c5zv/f0KCI6n4elQY8s2ERMDWjB1XvYusMLbLoNb7x1jz/4Jz9GSSJ0HbGPnL+zZnPec7NpOF2vUVqxLhxWa9ZmQ6UNxmqMM1TWEiSDF7v3uJ794vgwqOav8cGgrT//kV5N5YRrdJaU842wRjI/cRGJjcbJ72Ll36IooK4VpXsTa15Fq/s4Y7A6z5K2X+SYIiKGEP4Mff8/Ruv/Fuu+A8rnGRoKURarS2w1Y/foBoVz4HtICW0MWhuQhhAWBO9J3Sa7JUkgiUf7jkr3RBMpbKaAa5Vy4lWashqhROj138s+n8kTo0epr2DM/x7UIai/inDKR5BV/KGI/MbHWm/IieACuPPwrn743yWRbPP36nv/HnJJ+8P8qzvyOq/yDfqLFvnj/OAH5BqT58m0on7G5PUxtXkAv/IkHN4gJ8UVIXrunR4/Otv76bcpwhvf/y6vffcV9q9e5Ve++nvUk8n2l7zx+k949ZVvkQ6vwOERlAXCW2T64YdHi8eYuPvuGa+Ob8NLZEveL98A+ObPWG/4qGsugNdD8lUIhoRBlBCUkFRkrTRrFEsUy5StzLooWXCj8yTdZvNzY3KXJmQxjgEgSxz4tSKJbb1glMFoRZCUxTmUIg7C8cCA1gfVtlzO9obqaqs4pe3glzrYyEl66JyD5Ep360Oam8eDD+z2O2myFrm2Ju+wB2lWJdvjk+fG+4dX+Iv/g/8hf+e//C8ezzn+AQdCyNeH+eacLjVURcmsHtOHyIN7d+j6DfVkSjUa0zVCl0ramDi9WLKcz7FGYSxIWpHCA0obeeG6YncyJN7zC9586z7/7z/4CcuLObvuNUpt2CRhs+2+pYgGaqMorGb25RN+tT3DujHWWCQGZrXlyk5FmywnfUXoW7zvEImXbATvLwj+nNXasV53lEWi6RSNN6w7xXwj9L5nvbogxHQpTCQDHuDo2Wc4fvudj73enRfePs7UN0nQ3rygO91weKPh6RfPsVJw/uN3uf+NH3Bx9wQfEglNpw1aW1rRkBRLn7i/6Qgx4vuOFOMlgE9rsIbLzw7ZT9tok5HUGVo/WK9CGwdgoSuYjqZobTC1xYwtuEiKG0Ql3OgQN4r0Cdbtmp+8dpv5/fWg5NYTY2K+6mi7QKcULRlv4YphlDKI3VRlwWhUM7KGFDxGhNZ/FIuEfw5azbq7FMhBK4WzCmzWPE4aUngXH7+eDRUAwh266hwtHbYsKYwZKAwP7bdSCsR4kxi/CbwJJJS2lHWF0gZbTzDVmGI0yQo+2mELkzWJUySmlG/BE4PHGJWxogmIgjIRY4o88CE9pA9s1VVIZPp6RFTMPyWCLCB9H9QumrNfCnb+OGIjcJEyenKzWLDpPffPL1j5hjmvseLWTz8oAq8B78App7yNJ6wSNLyno1tvYO8uhLJnzgMwitFzJ1SsWFzc59a738X7n27DVFXJ/u4OxgzWYCKsFnNCDHQxciqJWoQJ2dkpV26JZtFy/v1jQqGB2yC3uLj1CDuDjGU5PYObj3yss/MtMMxyevoj3n13ATICNcvziscWW/VYPXRjsuSiDJVrjDGL4ovkKlaRfUmHZJhBgvHS3SbJw7YuAx0HneepIoOWrWQta63NwHfMa6qVukTP5so0z3LVJaDlYSJVPAQ9aa2HFqDk6lby8RlAFQ9xWcNjtN4Crx4aAQxPerkGIo869sjD5P9JhdKYYc7syjGuGlOWFdV4ii1KFqs1QSLjkKi90Ld5899uWpq+owuBqLZjAkE5g1jwInQBVos18/vHLM/ndH2kD6Bsdljbm8w4GM2yZZ33SIqkfoESD35Du7hFUe2hRyMkeYxELAmjFcZaJFmitRAVxlQYVWa8S+r5/7H3p7GWZVl+H/Zbe+8z3OlN8SIyIuex5uru6qpqdpNsUvQHUiINigJlW7RM2rINwYAFyB9ogIIByR88wBRgGLZMAfwgm5ItCzQoUrQhq0WaJtnsbnZXVXeNXVWZlXPGPLzhTuecPSx/2Ofc9yJyroyMjKx6K3HzvbjvDufue85ea/3Xf/1X2624eu0nrJsDqGG6c4Gx76iPD8G0rFYZiUMyCpGiJ8UP5xTeyxKw7lnvKSnRB9Ja6Y4WvPKT19g+OOaVy9e5fDDn+rLBx0QClj7keRF9rWLpA13MU9x8ykzvSOYRmJT7ZmU4LwGbcsImAjYCkiWIE4qPAR8jRiIxZo2IFBLqc7ky2YCmgMYOjV0mscVE03nmqzWasp56jIm283QhZtlUYzDkazCf0SetQ8Ml1EVFNOWg+UPYg53HG6C4A3aU4eYKgx2VdAhdrZAUv/i7hO4f0DaJVQxUhafSO3SVx+1uMSodg4z/pp4WIl3792nb/5qySpSjgCvH7OxfpKonuOk2bjzDFjXejlFTMBqPqJylXRzSLA5pfUe7PiaFyHQ0oXQjus7TdR5rCooENgZiavtMOGU5M010MW9+SQxqS1LKWTH6Fhr/twgOS9VHbA9yxfOFcg34EXD1+nVe+t538Ddv4n/rd+lu3+H3aPjROw20UjJS7CHg6Qi5FfkeH/r0Dfij/xScWfMy38HZmmeffIEn/8gXefnF3+M/+g//Aw4P7rzt5R9/7FH+2K99ncn4pNfWdy2oMu+PtwI+S25mGuzg9SP+2d/9JsfLDvhnwCu0837gxvA6Ab73ffjRiyf3xQCrdXbK3/vBb/HjlwoonofqK3AftWwFsOJIGnLdNSo+BGKMrJpMupp3XW7/0UQ0JtdtjcmtQykRfOhJxAZVCCkPO9hoO1tD6ars9GRwfhlio3fkwKYePiBEIgZjh9puHyL0m5mqnnL+ObIyfdY7kKkgQ9309VyRweGe0i0esOjeAWuf7aZ0cjxm6CX7GM0YSzXepixrZnuPMt46x7QecW62RUotr19+jRDW7J67xGxnn27tWRyu8J3n6PiYrvOU4nDGUVWG6XSKNZFFWBGWifrVt4h3LFdee4N1k4haMK132RvNePazX+fZz36V2HnWh8e0zYI33vwW8/k1XHOTO6/8Q8ZbT1E8tgVtxIWGKjVUrqKc1Fgn4EcQlao4hzNTfHeEb+9weHyDf/o7f5vZbItf/zP/Es9/7mvIZJ+5rzJ7u3kLbRsk5n7slI7xcXl3ZPoRLCrc9ol1jHQpZpmCtVKvOt78O3+fyjleu3Gb64dHLJuOeZcvzDatc4DWU+dDymTXlLLGvqaE9Ap3BumnuJ2Esfn8CiAJzKCJkC33Ric0CVUZaLtAWHri3CMTwOYZwqE5wq+P6NqGtgscxSXdusvBYa+e5UN23LEoUVOdMP37RNGKobCGkTFYMSz6AKR7nyEv99qDdbwpZ7wySI2mjNmrEQqXWfhqD8EeABGiz1FyhBRlk/UYU5LSVq8j35BSIKVjVG+iVCDbOdN1BUVZ5rGArkCMIYRMVdFePCDEHO1473vN0ISRIYMwpyL5XNAXzdNBGAQI1KFplhnLugBaNs0W2iB61G+Ql7Bme5BYfWCmZGh5DRw2DZdv3sRfvw6XrxBu3eYad6HQd1lROJwdTpFeh7ngrtakJsF6nvfZUCRc2eFjCzQ0zZxrV69w59Zt7jVrDbeOjljHAJ2HlMkKzuURdVneHYZh2dbWlNUUKx5/uKI7WvefqkOA0T16larQnVKVTID28lrrrmO9BmvPURTHyAfutfpgNsxhHaDa2Ge6Ych6Uzq5oE3OWqWfq4oMDiqnmsNFP7S/DU7ODsPl+/uG6T/05KfhOOCE9JTfzmyIUPlvJ8SojcM1pwbV9z5S0il4enC8p15zeJ/h5yAo0afb93V9P5j162Xs5mZciatGhABdyPBi1bSYVYNvA50PGabFoMaSjEOtAwe2KjEmoqEjaqBtOhbMWS/XfRuYyaMTRxNmWzvs7p4ntB2lWtqyYDKZEeIcayB0S0K3QmMemlFVJZPplFoKyljggWAdStqQ36yrEJ2BKPPFbZLmVsuyHlNWE8pigrMRISMhw9d3Mkjh/qzqcD4mhoFxuUSinef27QMcwp35ksM24GPqyw1KUPo+b2DobZY+kDNDPpnfIPU/T0PNeRiRgvRBYN+w0cMpmafQcx9iTLlPvWvyPInkSN7jV2v8ak0MuS0xkOg0z5WVnjMRU86ktUdqRPurUE+tYV85SUnpyD7pQ/rdB+x4o1DecZh1gkpxarGpoMJSb5foxMCWQDCk4EltixApbIOViG9bjjUi8jmM+e+BTjIZKnnQ/xdl8Y9wtsKYArD4ziOyJrWeeHQI9GQqa4nbM6qq4PjOAccHd5CQkFYwYlGfB+eFrsW3bd6QtGdmpqw05EPKdZj0BCn+60AN8W+D/gGaWkgNQqIoAtYYJuNAOQuMqvjJ4M3AlavX+Me/+Ts0h0cwn6P0ddp3MGMMn3n+OZ55+on3fM35NfjdH0Bp4Re/DOcvWHjy8ffdbG+T81XXtvC9HyK3bvH8c8/wmeefZUqmGI+wbHEemHD+0T3q8Rc4/8QVQvynLI4DeSrSex/fYIfAt4FFIItxXoHHHt3iC5+3OAv/7//PB3qZ97WkStu0tK3foDG+66GtpiWGk35j6yz1qCalhPN54o/pIcIN9CuC68e/GWM3GWsegN4Tm3onaEw/mpITxaTBTr6OHk4+7Rw5gYCHmuBGaam/L6ahkaiv1vZkqtNOfTBnLUXhEAG3mUOrPbkyB1cft6lGfLciqUcOlba5gzn/GPsXLmBKw3j/Es5v0/rI+s4NqnqP2cUXSEmQoyWh8xmZq5TRODDb77DS4Ra3Ed9yvDA0hwvuHHVgLVVVcen5p3nswiUmj0xZV0cEaWmmR4SqZeu5fapuxOTRbVK9Q7LnCMsVTh2/9Ed/nce/+CW++cpt1i/eYnnnFtdXc7r1ipAOCfGY89tf5NFzX6Vpb3Ll5j+iCSvW64b1vCW0BssWElva+RHr5RGqmciEUao6dxPMufaR17U0wuOTmpUPtMOsYXI5ZR2VtSq2LNkyNjvA0SiXW+jPxg0CkggxJ0B5NKOelFSUk3KWnsw2jimX81J/FiZNmzKKSB5rue5aFs2cO3d+wo0blq3wBFv2SVZXjrj8mz/i2tVrrK4fozFzJTqTe+0tvUocQhIhqt0w+VMwGJMg9u10qcOHPli2ZZZ89fFta/Ve9sAzXrt0mBQRnzDWUFiLqGUyKpHksCOLTYboPbExpJSz0UFLtmkiInsY8+vAXt/y0GHsD7D2N3PrgmQnmWIk+A4fc61hCI6stZS6xlclx3cOObx9SCGWiavz5hWVZGKeJhR9H81kmE/VkJIQg+ZB1mmLlH4NdIakfwQaclasHYLiTMI6qMpIVUUKd38gn5/Gjo7nvPzyq6xWq/d9rBHh/PlzvPD8s5v73qku95LAN34ERQmfeRx4QtDdnc2G/W62Al4F8BGuXEPeeIPdnR30OaUELiiM1QAzkB2m23tMt5+mrKbcufq7rOaW3En8wcSNrgE/BBYtecbCFdjZhheeg6IA7pPjVVVCCHifyxS5DOIzpNbXkZKmnplvcIXLgd0m0z1lA4RrXQ8T274mfuL0BmRGeoGYTcvRXa81ONGTezaOt08tTjvc0/wJ6F8P7na8pyDmex3vAHsbkzNz2by5bv72cdd484buURJNkwhhwWx7B3U5Cy4mW0gomR/cZrVaYqo9yq19UEeIixy0jyKmjlSTQLXTYukwNNAammNhddyyWgcwgisds/N77D32COWkprNrYtES6jWpCNTllFJHlFtbaLGDypTkPdYIjz7zHPtGuGZfZ/foZQS4XRSEVggxj3Yc1TtcPP9V5stXuXLrnxBihsW71pM8iNZIcoS2wTdLNHPTMVWBq+6dqPTTmzPCbukogVYShiwd2SZlmTo6zb35I2P7bDh/F16H5DRjPSnFLOyhGapVJddnh/JG38qW+p8+9OpQCjoMce8z+RyWZqEZHyOtb1mubjKfTynrGZPpY3RHa45euc7hlet0x1kYKWkOGIz0ga5KloiUnpSbMipKynXefJbHrIkdNSMpts/oH2qoWc9Tpn8N8XkxnXuVovgDDIkC01PHFUPMtOdRr5ZUjvsvKsMIwhvA/wORMa4oMJIQ8zKYGhWhbTo6yZvcQN6h36TKwmEQ2nmLXxqWR8+znv9JOrlBst/GmhUxWFwhhNgSYkOPjuSpK8FnMfkYiVFISfAeUMXgMbQUNlAWSumU7REUhTLeUYppoq4/hITGfbCUIreuXObVH/6AW1cuv+9YwpqcbU410b51hd9Pyu7uNk8+8RjOjckSryNyC89tdnbgF76cSazTaQ5Uv3/jOt82ltfeeoPm3Wj2x+TMs/9dFa5eu84ffOf7PHvL8/ntX2G8G+HSAcxOJCLrScsTzz9L11z4UOuwT0bIl5Hc1/zLcOkSPPdsP3nnPpmmxLpp8F3OdkOIhBjyLNsYN6MNjTW4ntyUNGG82TjgjQLUUEM1ti9XmAw9bvA3Noxia03fL7sBqIcUo+dfnb7vFBx8ulbWO9vQ1942RKieVDI43SHjfSfHqz1svsG8dYCl82Ody8HDx+14RQymqPoSU/70ne9YLOaZwOQcphxR1DOqIFSjGWU9QnCEqeB8xE4TZpwYjQPVVoPRDoktYisa6/HRZxIQWQFv8siUrSd3aLsly+4Oxhiq3YroDTdev87i6IhidJ7RVsdW3bDFFIoRttyhcDVb05qL+zVVnLJ85BFWoxHzg1u06xXTrYL9SzXVfMr+zX18cCwP7nDl5R9xcLzm+GDBcn4HTSH3UtcXMMU2k90tth/ZQ4zh6o+//dHX1QjF2DAtS8ZJsCbXPIPCyGfiYApKihl+jWmYtpWh5Bg8qT+/csYLoc92Y8q3gcy34Teosg6JJsbM2lffZ8dDuY8MFw/8AjV8/wdXmB+3PLYz5/Hdq1y9cpXfe+saNw+OOPARer1nFZOz8f4yOD2lS0VB8xhDFSGQM2KRmImMxtDjoJlM+yHsgTpew0Xq9NdInUGDUBR/l0J/iDVramv6KSz5gxoHzlnA4lORF7/paDuPppdJ8f+MMZZxPcY5109SGdN2nmVfd1ktlohAVVhKZ6iqgnI2QhTWhwtCSMyP/zSL+f8AK9+gNd/CmkOatVA4xViPcZ5ND6Yq3uc6QAhKCkKIQtfmep4zLVbWVC4xqhPjGi7sQVVCsZuwo8R4lB4o1Jxi4uobr/Oj3/8mV994nZTe+wQZA78InE/Kb7/6Or/3+pt85vlnuXTpEZw7TXf6feA25/Zg9+s908/k4OQbVy7z+1evEF95jRjehVF5AHxzOMj84823rnD5ylVWTyh/etrBhQSj21l0aji+qfLcFz53yl18cPsqvZvpf8lZ50/xQu9hKSVWqxUx5P7Z2CtHDWzllPqszxkMFkfeZDrbkVK6y6GJDNNYTpjC2m9Kqc8cxGSnZp3BObtx1sBmN9EBwjtlm3rsPSfjkPFuxgf2gZqaPt/tN9DT73OP9OA7kqestb24hn0gULMYgytH2LJEY0tST9u1HB0dUdQVo908a7scgWhJPd6hGo0RcahWxKAU24qbQTkK1NvZ8RIjuBWNO6QNnpB66dnCMXtsm90X9rhx+RbHl19iNN5m6/wzdK3h9ncvc/X1N3H1nNHM46crHnHbmJHHbO1RlmN2t8Y8fnHMxGyzevQxFpMZybdo6JjtlFx8aszkcMaNq4+wXjoWN27w2nzB0sNxqzTtnJQCYgzV5FHK6ZPsP/EIj33uKayzfOO/+Jv3YV2hnBgmlFiyelNZOUSlnwqp0MY8DUq11wMHW+Rzs1uvssRiLyOaVGn7aU+xP083TlDzHPWgcBwj85joNLFI+bWzU84QdBqIiTG30H3z91/ju999kxcmL/HCdMqV5Yp/cuU6R23HcZcd7yA4rGTSGJy+ThR6GUhNOdgNpxZBJJ/PpWT0St9nX73XHmw7kTRY98NM5EAwXMUImA1LWXsIoWdSJlBqQnyCpDWqryNcAwFrI8YoYkIPlWUIOMUsG5kjlgwvJ5vnKcaYmXQieUNMMaJ6DeF7qL5MSAuStrhkkCgYIlayWk+SdArGy5lNZjUfIOY7IDWuuIOzSlEpRQWuyJmgteAc2EIx9sGXeHVoUk8V6AVwIStZOYX5PNN9T5ntb+iUGMekNCPPEEzk3lxhGFTQD9M5/WbE4zndcpl7euK9J+QEmFJVu+xsW7BwyA4tLROyj51ub7PAcBhhrHrXyEERcrtMSrlO3Ta0jFkzZcASxMB4CtUpwlXwsJpDDIk8UrChGo0YT6e9g7s/pnBSIz1FbhoITAPEbE5NCkiqOQs85XTZZKT0BJU+5RzQ4UFn7xSBaagNy+CoT0HF5m3jKOVt/x+OdchIT8PPSdOGWPNujvc0vD3UmTUlkggpGUT0Lmf+sZoI4hy2KDM9Qw2Iwfs1KhHXCCk5hERZOgonGAmIyWUT6wTXC/pXlWU2rTHJE9YNCYsrV4hLiFXEKGISQVu6uCKRMK5CpCB6IbTQtYmmjRweLrl67Q66Y2h3WpwtSMd30K6B9QEjXeOLjvM7jomrcHGH1XbmiMyPX6FtDtieOcblhNFkRFmN0C7Q2g5bl1yaPEoiUc4u4EZ7TPfGFA7E3p8S1wa1UJNvpwI+iWQybH9u2oFrIIItej5AKrCkDfFQVXEhbbLd1F83Q6m30pyR2mSoUqJLyihEkmZ4V1N2wMOM9wxJgxGLUcH7yMG6YeUDhbWMioJQZa7BEMRC73ZOcx3uYnbJqUukD4gll0xcX0rxHxLBebBQs7lMOf53iSkHE84uKYwiFJkZRkJ6x0skF7PTHm33PyTFZxD5vwD/Bc5mxnKue+XxatFHOp/wPpJCX5BHs5ONEEwvebcMeQ1Tl1db/yuM+01iWtH6G4gkCCOiZh1mGwGjYPuezJgzmZA6utgB38eV/0uMESazG5RVZFzCuILaQdk74GqkuAkUFZ8YuQouAtswJY+k3QrwrW/DSy+/w2MN8DxZZaIknypr4Ltkt9y8w3PIm/1PXoEf/Cizldvu1B8FeAb4PPvnSv7or1bIGH6bL/MWn+dZ4JeAcfEkr9U113l7O9HGvIcf/xCuXOYmL/AKX8yC9+SN84VfgEceP3n4agE//gNYziNZkusNHn36GZ7/0i/0c2Lvj+XaYmC4cjMUa3unk/+eWfZ3O14xnJCrZDPmYxOV58HfGVpDTxjOxvX1VDtMSjF5XN3goTW/bmbd51e8+3jzZSCnHH1d54hlYGQPk4ZUlSR64njv+dwnYhv9pgqQhjYjRdVuGN4ft4kxFKMxxXiy0XM3xrJcXMdaS/AZKdvZ3mG2PaMcgTXHGFNQzca5rj4yMDLs7c549vFLiEaO3YRucUx3ec1idBUbIrZK4DyL5jp35gU+Rer6AlYr1rctq2XH0UHi8DCyWF/nJ69d44Unn+C5vaeIXcDfuEwMHaSW87Fht45c/GxJTAUdu0Tg6huH/OgHf4tJVfP8k49QVzuY0R5STLi5OsQsblFPtnj8+V+mHo9JsovKhPmy5eBo9aFZt+9mxgiTyQgJBoJQlo5qnLWTh/RRXUS7iLWCKwfyXz5fYg2p64VayG1E7dqT+p7eAWoeoGNx+blJyCI0QVmvBzJWPn9DzE44ahZjigrzEGljolHlpVVLUuXSdEJUOAiJJuZsugvDQedgMTHokfdwNycZuDGnBoH0I2TzFKqEX9t3asp8V3uwGS8dal7NH8RAlk8soI+u0UGdp486eggihCkpbWNNhTV9P6Kxm14v6DPcmCOpt0n2DvBcoq8R50lI+c1WiDRk8crcmpK0DwKQPE1Fh+zhFAzRK+iK8Rh3jLWKq6Cos6N1ZS+Pafus0Oaywn1Mrj6EFUCFtQVVXWF67Wbd8YSq4q5tUIQcIhc4ptTsUBTD5pmA95B9hLw06wAHDRsuowjFaIR1BcRtCDtMJiYPS5jChCkjcra7BxjGNASUloADbK7/920usY+U43pFOjpiEY+4HY6Jfe5dlHD+AEanIOqjA7h9ExbHg2DmdbbO7b8nAeynsYFlOaSi2fEOEGw+v3M99tSJoJloSGJDPOr3np6YlEkkm5aeAWLm7gx5OIC7oF8GOHn4nPn39/rYp53wAFNL71RP+9vTjxmec+96nmZH51ZA82AyXgRjHcY6nOvZ4Cmhqe/Bj4EkYIziCoO1yjA7O08uAsSiaC/0U2f5WMk6AiIgJmFs6qW1lRBbWr8mxazLHrwSmpZm2RE6JUVD1wW6FGja0DuMhF/OCe0SbKR0kcJCPTGoGFJVk6zj6PYBnb/JqNhiMnmMybjGjKZIOWXtOkaUjLdq9i7sMZpO8KEmRocPLRqavC/el2XtW9kS/ejJzKiXnm+A0u/DfTZc5r8NG58k1wuzKIZhv86CFzbmzBXN+7nQZ8om95sjEE2iiNK3kvZ1YKt9nTjXlEPKPenO5NdbpYRBGInt68xQJKULMbORBy4EfY13QGZOBa5wcm0O/w1XVkpZ/OTD2AN1vCEpt5dpI6CuxlJYl3ulNABDI3QihkjXdaT4Jqn7G6Azivo1qiLLirkB8wohr1uIECISdeiQ6B16D9VHxSIY7Ulc0svbmT+Btb+Oyg+x8n8DOcYUFqzL+s626uHNDNkqC5CWwiq2SlinlCPFOpjMcoZbFzAqoJQsfZb1qQ2myFnJgzVL1nX8NS5dusaf/OM/IRURZtkt/pAsGrmx8Ri+/HnM9g6f4RznyfMPig/c6ipketY2uVnpR9RbI37hX/kL7D/3HLxxDG/OmYyh6luDf4E8NuUcAxhwRK4hT4DPAPvsbm1zYXePVdNw5dYNVmRy8iHK9976Jt948R/0g99zoPONP4DR9OSouhYOb4L3CbgFLPC24ktf/yO4D/7h3teMtUwmE0QNYPpWiIG1mfrs1oCRPqMMGO37b9NpBzZE3L3ylYLoEKCeoM4DLCymj9hTJJ7KaoUBcb534x0gtp45yuDoT4hQQ9/8puVD+2aOwdHeU8vdOF3JhCORoYuUDYQ3vM7HbSKCKxxVXTM9d4HR1i4peELbYEhUNmCNMp6NqMcOCPj1ASA0OSsgjQq0KtDUUE+nqG+58aMfsL59HX/jOoY1hfHMaktRGNbrNYdHRzQ3W9pbDc0icnjD45uInwe2yz32Hpty7okpj56/SHXucXAOf+0a/s4ROomUs4gpSsrZNuJK0mhKKisuPbXLsrnApJiwe2HKeLTF1sVnqbcfZXR0Db09w1VCOY5g5hwfvcX8eM2dGytuvDEnxfu55r0z7adj5IAqZ7dGBHUFjLRHYcyGTAhgK0G96xnvOaEp2r6vtguZDxGV2I/1FNvDvH16K0ZxkjNeHwOaEjaB9NfOUJYZSx4aQmlyr6MCMV+Lx02iCUrb5gDodLscrlf8SifkrWHkn3V5UpLGRIq5ztyGRBcC3bFj8SFW8IE63qSw7BKVkQwXY0mSYTh62GCIjH2MNJ1H4wHi/3FeuHqKNePsMPtu5txjmyDGvgm636DglPPNhKi8OQ2zKQVwiHwBY/48RrYx8p8D8zwhyVrElBhb91mLoCSi5vmNxgAmYYu8wVsH1TQ73tLlW0E+N40hD1q2clfG8GDMAOeBZ9nZjuxsvwJ9jut9HpFwl+OtSnjycczFRz5Es85pE7ILPUc+vX5CUdc88bWv8tSvfB2++20Yf+ckAlfl8XteQVn1RzWwqPeZ1DUXdvc4Wi64fnCbROZnXQFeO3idH7/0ex8awnzhS1++79mXiFBVFZLDvI3T1Z5osoGx0Kwz3jsyi+0L6/kc2TjeAflR3Zzzp+0kCs+BYd4I44Y09V7b7QmUNrxP/5pDIKAnUp1313oziiH6dunH7JRP1bU5qawMDjfG+ABYzWyIXKPZLlv7lwhdR7teIeqpWGIJlHWBKwwpJEK76mH1kFmzsSKFElM47hwvSO2am9eusrrxFtVqSYnHmkjlDM4K3nes1muWR3NWNxfMj1quvHZICjArthm5Cft7+zz19AV2d/Zxk90ckHUJv2jABdwk4gRGlcWUBTqtSGXN9t6E/UszRnbEZFYzqmt2L5xnuv843cRw5BowHldm9G69usnx4R2O7zTMby43wdV9WFkGbsHw/Q6CL6af00sxCKqcoCfG9mx8K1BkXXFbZIdoq5z5mjZgujxOkzbv5fQBpUbN8r2SsNjckucVSZnUZXTgJ7j8s8ioUj0uqCcFmjLPIyZlsQy0XaJtI03Tq2fFnMCZ0iHmFL8h9apWgC1yiSiGQPRZIGTZRdbe86q177xc72IP1PH6LnH5SsNsWxhNINVKZQwWxfSQQ/CB6H1uaQgR0W1s8cewsocrXsQWr/dOevhiMkZvivydapQNPVwRED0ZIUgmmWTFk7y4Rr9PIX8Hw0uoKEoNUQnJ9xthxJiLOPOrIIKr/muc3ME4jy2ywx2Nc1ZbjTLEXJqM1joFiWQqenSoLwnxw31BH90S2b3+kDza4J0dze7ONo89domt2YzxeEy+Si6Qwd9jcifsB3FsyskIhTv0mpPwFrAtcOs86OeYzxe8+dbVXsc5j2wYbDqd8sQTj1KfGtE7X624fPMGTdf2DtaSpwLXnN+P/MKXl+/L2M5mGGrdTzz2ixhzfy8BEcnzdBHoFYQGJyomO6+Q4kbbdei9RezdmaCSI1WRkz5FPXHiQ8qrQ8QqA/zV9x4OafE9NrQV6RAQpH5jG/6mebAAnKrx9r/r5r9Tr3cPoQrNn2moh5k+KDjd9/ux6zQD1jimo22mkz1mxYgRltDTOI11zKY7OAexWXJ8+06/dravDY8whaPe2aOabTGebbO/PUNDRfHU87TbO4Qblwm3rjI7t8tjn/kcblSw/dge5XbNthXSjsE3kSefzlKrpYxw4ti6OGL7kRHTeoymXB8sds9hrCLbBeyVmLKknGwh1hHqEcmV7JyriexRmIqd8jxlMaIcZ63zshgxHu+g0uGckNQxmz2C6BZ1AdNx/q5/+7/66M3qmpSmjRQqWBUCfe3fGVxhoZAMQdu+PNJ/1Wr6DFbI9VFjMJK1xU2dnXJaeXSd9ZrdFHIWRY8GAT28nGJGRbs214ZjSCSfBZyHMo9q3nbaIKR1fl7wMTve1tP6XFMOklCjpD7aPQ11p5QZywN5rChd5gf0QWlhDCMxiD0Z3PBB7YE63qZNvPzKgvOXYHsP0lSZFJZClFJTP56pw7cNMSaij1i7S1H/GxTui7jqP8QUb+ZF8Vk2LDvefi6vE0zY1OY3dazhy7coViOSB4qiClZ+B2d+Pw/1BpJO8aElakdyHUFXWPtZKvffxhpHNfoOrvh2zmzrDCXXVa7hFhXZGfdBn0lAm4/HB0doK0K4v/KE72+RLFUx4qQ2/Xa7cGGfP/qrX6Ouqr732QBPktWhXiPDsx/U8b5BJmH1V0tH5jO1QHoMuMSdg8v87jdus1gckZWZ39y8whOPP8qF8+dg6+RVD+fHHC3mG9JDPnWfA57l8cfGPHrJoh/o+Aoys+xZnnk+z4u9n2aMoa6rflOFvGvkIM/2bMzUtZlr0GcCuVc224akpPQQjW6GHTDUWzfZMHCvIyRPDeo96V02QNcbcQwFHeibepL5ppg2xzE43s27ZC+aL66+5r6p4zJsrnbjdK3NPIxhmthGU/pjNmsdO9N9drYfYVxOqXF4PA5wzrG/v0dRO669/CJ3rl7FFRVlPaGoKkbn9qmmEy5cepJz5x+lqiu2tqeQIueLXySu19x58fc50BW7Fy7x3Je/hqtL5mZFJ4HJpXOM7TksBaUZZQQu5LpkkBZvGlz00K2ISakuXMLs7+F2dij29hgY7SrQGUsUw379FDuPOKwaqlRhxFJUU4x1VPWErel5oja5FJA69na32ZpY0sUxSWd9MPa/+sjrmpKyXgXoRVtSgNBGbGEp6iLfXwpSGk6fhEP5wxjBpFy7s6YAZ7DbBVIYtPRQBIwVbHVvgNYjOAk05pKkXwdiSPi1p2tD9gt9G1/XxKyc1QnrrkdRfSCmxHHb0obYq6+d5kMIyUc09CVPzd0GrsoOtyoLnLO0ndnIShYlFN5SuIc441WFrlXaJtKsPI1V2kZIBkQSRhOh17RNfYFc6RB9FdRAvIlG3xfW+xakHvZIfYarAkb07ZvF8HtPkjrhSnlEEiJTRJ7MAh7mJ6geYJ1iC3DukLL6MdY6qvoYV2RIuaqz+EKRx5jm+ZGa44GUyBrTHaC9CEJyhGD6ze7jr3OdWCaCrdYN8+PFplcthMBqnVuJ2rbj9p0DymIEbCFimU1XjMcHNM1tjuc3SKl9h9eugFleiG1QC0tGwB5VpWxtwWy6S+nWEG9tnqXlgrCbCAVwnKCLMBnDZILsn2O2v8/W3jmKvhB84nCzGSOMZ5bdfc3vz0U+WGBQ9I+vmW7dTRa6Xyb9eXi6vjrAtIOq1JBBAidQMQNUKxsnewI53+ti3+HY5eRUf7ez614nPVwL+ra/Du8xHNkJeet0e8Xgg999Le7ifbGpKX3MZsjfdKngUsJGn9/XFlnS0jgKsTkH3rROBZIWFEVBVVYYYzPZJkW8bzGqWDEY5xiNJ4SdXSazLaqqz1JNJlw5GWGloDAlo3KEYIi+F2UIAR/ZQNo2BYx1OfNzNdbVfWnMZ/TAOrAOVYtikZTHOypgouYNRwVrLOAQKtRYtHAYLCGW+Oi4X2LNSuYqBM1DNMwAI0MvEpMgSE+eze2TcAql0XyH9FoIhoSE3H2SBtJ+HxAqw7kjJ4iOpS9NSk5sbNY9dyY7gTx7GrCB6HtmvfYjMk0mCRaSSEHuuu5S0l4KWvoMO5FS7ElTd6/d8K+T9sC3P+b97MHWeCOs5sqhW+ObBpaWcXCUzjCpDNYIvmvxwfdyXA7kJur/90gsSemYELIcWpQRisGbLPEFIAx6sPnbG+TGhstckqIhbr4MAGNC7veVpzH2f4KScPLXsbxFNVLqseKK7zCZ/ntYJ4zH1yhKKItcDkXzSbTJsENuXQ0Bogd/nJ2wjEooRqwWDzrjPbHLl6/xjW/9Ab5Xk1JVln0P75Wr1zn6x7+FkW3g61h7jl/+yit8/rOXuXb9ZX73G/+Mtn0nx/sY8FWY1vCroOeFNc8Cj3J+H37tj8B04phMLpOh597Oefj1FhZAP+OXZ56CL3+R6WOP8fk//utcnG1Rj0fv8J4Z4n/6s/D4s8Mx7H3AVRBy9p/Zz+ZjQf5PO9LUq/gkQvB9k3/o65z02c3g4ARRiBr7mmt21KGXO90c/lBjg01WsIFwN37tbje9OZ7+3wMcnTaP7JV6YMO8N8bkjejU8wZO4+b/Kgx9JHmzHMiN2h+jDtzP/PgE8V1Ql/tpRpVJDMyCp2iW2NTiXEk5mmFKR2VyX1/pSlxRA4rvWmxRMBtP2do+h1jDslnQdkLbGgqBLbGUVjh/8RKPbNUUVU1VFuAc0/Eu0RVIC6lRpDCMZmOssfjWE2Okmyt+1RB9S1ovsCjT6QRXlki5hS2mxNjimxwgl+UWRTlFYq59phhZLVtIUKUClywxaubMSI0pziMkOifEICxWyvLow4v4v5tpUnybtROCgaosGFcOYwXfRULQXAPtW4nKcb7AUsjzex2CU4tPCWkCxghlAuNynV1DQlJmSIiAcT0vxnLSltTfV9QOFJxPVD719cQcCYZV2GS/Q0DQrrOaoYueLuW20xQyvNx2EVUokmAVoo9E3xN+oe8T7tvyOGH4W2twaj60EM8DFtDI+L6mLGQQgxB8xvSjk76+BElzdGOMRQnAm9lrpxKiQ8WhJotwJDV5BF8fvVhRjB12n7wZSE9p2XR5wObx9AxTkRrDBVQSmApEKcpeDKNcUI7mOJfruEUBZVFSFhUkIfnsxGJoUA1oyid9DOB936jvHOCI0TzQZDefs46CkhSUxXJF193dcVaWJSlFDg4OyQe3xtqORRPpWLHsDrh9cEDbvJPj3QJa8DIMC4JogQnOGSYTx2QiONei2mz6OL0x6I6DEtzYYaoqa06e26Xa22W0s0M1nfFu57MxMJoMn7AG8rCB9C6sWZHsrLOfelB19nwO5kx3uHBTD2NlrVlBTuWTpyBgTkHCnMqEN5/n7t/fRnLauNpTNqSnd2WxJ+8HJ4mR9PyI4Y4NFH7qZbS/xjZZrwzPIz/3rnXgBFL/GFCGe00QChFKEawmTAo4HJWxuSuiv+7zBLMa1UiMoSdkFRQu9+O2zYpgIHVKMIZJWaHWUjpLOR5ji6LPwBQRi5EC1UDqGbebOMnmcXeIbrKpECMq/cxlsihDHgUpxBBIvTZxJunFDJV2gXa1zCM6TU1SRxdbutAgRimGCT69nG3oAs3av0257Kc1VWXdeoJRrOT2G1NaJBlMSmAkJx0WimgJ/cjNGDJRqcjyIzkO8xnqLTRinWwgZDEGU/RsaGfyRDdr+ral3Nt7ErBmdb40dAT0nInIcL0piUTUPPc3aiKQiJJHbUZNhKR5xGGCzMuQ/v7sjLOUaz6vY0p0IdJ6n5nzauj6aUcfxh6o463rgs985jxiAiKJ7bGhnjoKa3CVxRghFB5SIInFG4uKYqTBSaJyFZUrCRQ0MiZPG8rcYVOYHH2IUpncMK+bWlXshekFO6is9Ni+9JAIMgfzn4EkbLFA7KPUE089CRRFYDJrsC5RV4pzgpU/hZU/SwyWbiVoXBLD3yPG7+LjGJ/GxGRpTUUShzUXEbNNMrfQB7Hz9GawPMqzfJFfZXRxD/lj4NPbW72vXL3OD3/0EiGsge+RGPMTnuMOj3HEKbm0t9lt4BvQWPgO8BKbkUe3bp/jt3/ns0xnFV/6Auztwquvv8krr7zO/Nwu7Rc+S1WWfPFLn+f8M0/C/jkQ4Ry5SnxAnj20/QE/63qx4I2XfkzbrN/2t/EUnngBRuOhNvzh+dofyHqHlIVW0qZOmn/miDvESAz9Rb3ZEE+c0wk83QdO1mCRPkCUzXucdqwnwcZJEDp4uGEKkRmoqCdPylyKU/KQJ1n0wMCXu94j9kFDSokQ7p6LauzQojfMU83wbf9q+ViM+dA9jz+NOWfZ393i4v4uIXpiCggJ/BKxFVWxgxlVXHriWbbPP0KMAR9aXFEymW5jxHL78itcv/EmaERiR+UcT1/YZ2s0Ym9k2aktyRaon5CMY75o6KSANiBdYF2NSU1H4UpG0wmuKPqSQ162RNZSX6zv0HRKKiPFbIxvlizvXCemQJAxLhmu3b7BW9evsD485uYPXyO1iSee/kX29p7gyuEbvHLzx9gysbVvcYUhhBEpldy5Mef6G3fuWzvR8brhH/3gxRPCqj2lXNV7wiS5MmgM2J7hnHphDIPpiYcCvT8zzmxiwZNWNWFzuvZOdjh3h717iPFy14+e/E2zMx54PANjP/b+IGrqpwyxGcgQNseXr5McHN2NmNp+8McwKQkydyFq4vLh4YdaxwfqeIvCcumxHbquw3vPqHaUlcNawZQ2f5k2QIokDAGDlTyT0krCuZrClkCJoUbFghQgFuMsprBYm4cSnHa8sZ+Bao3BlQVD3+MmMhdFTIMxv4OYSFl1WLdDNemoJx1F2TGeJqyNVGXCWkH0i5D+O4gv6DpBOSDp9wjxJUKc4dMeUR3ejFF1qNnGmBFJygcS8Q8mGHa5wOPyPLqjrHfuEN5BY0VEePGllwmhA95AsVxjj2s89j7vsMg3zz19SbBYPMWLP3mO2bTkmadgb1e5efMWf/ijF9EnH4cXnqOuKp584jGeuedVbwELVfa5i2P1Dp/vZOtvmzXX3nydxdHR2x63uw+XngLGJZmt/QjcmxHeJ7uLmKQ5uxmy8dM/Yz87dGjrOflMdzOATc8ClWG3G2rEevJ+p979pB7WO87TcPTpyUCqChLJ/b/pxDm+CwNZVdGQleCGgHZzzPeQqTatJpt2raGmpqcmI318Zq1hOhmzszWlWS1p25gzn9Ai0eCcw1U1O6OaLQMxBlq/RkSoilyKmN+5xbVXf0QKHbFbU5cFE/8EYTaj3p0ykwlqK1QTEccyetbJYnxEQsQXDSZayrKiqByuyIIcJzX1XoK2XWCko+pmpBAIXUezOMxDBKZLnJtw5+A2b15+jeMbt3jjD75LaiLazvCXCl699hLfe+13cVXi/JMlRWWJcZuURty+fJsrL10hhvujFtb4wA8uf/Txgj/v9kAd77qLfPeNg1wDSInKCZMyZ7rO5ovRa261SEDE4FB2xFOgVLbFGUfE0lFk5yx57qR1uW/LGqWwubY09IoOmYeYnBWfRFD9De0JVjkTt0XEmIQrA0UVsC47Y2MSzuX2JPguov8xMVh8I6S4xLcvksI6q6fEQEqW0BUoFlPcQmzB91+9fp+b2d/HDNnHvABTtnmCF4jvQEIq2aW+dY51t+I212hNC+f6uun2Fnz+s3SLJW9evsJq9faM8p1tDryE9yWvvAqHh8r1G7fyhj9fwI9fwlcVr8HdzedvXoVbh9TTKeNHL9Juzd72yoZc1R3373II3Lh+ne9//4fM38HxXnpyxGe/+iizjfKzkoU63r3F6qexwYmd9L4Og7JPcQ0Gh9rPz81Qbx+x3+N0JbM3TjwZA9z7DkpRqhmuZHCu99R/uQei5u3OOL/M0Erxzs/b9A6fev/Tz7GnFH6clVOH/vb3+rhstV7x/Re/z1vX3sJ3bZ6KQy+4VJTU11/HVFUWpTP00K/vjznzMK5deYWbN66iKZJCR+EsuoxM6orXJjVboxIxDmMrEoalFng1SExITDhbMqqneXLRbEZRlqxWK5arFSQPfgUaMbQIgfHsZabb38d3KxZ3rpBSxG29hRnNuH1wi2u3rtPMF9y5fgP1ifTSd3jzxnVuHl3h5u0bmEJZrSy2MKR0iGrB8mDJ/OjwPvbxntn9sAfqeBeN55/+8Npmszm1l9yVBd4VwMvdIvzDBXz3aSTv9IP+bdgUsYYa1DvaqVfcvI6e/C73nrj/GPitzVPzXzty8WWBcrv/2+mDkgw1PhDJvN4sWUjqa7DDebbehYT0ub3Er0ti0R3yPX6bQ26djO45vw97uxweHnE8X3wIx3sAfIumhe9+Pw/ZicNnPziE3/t9WuB73DNowQgYy3gypvwTf4zjZ5582yuXwBfIjvc2uVvpravX+a3f+ud9rfpu++wXz/On/twO5zklZ8UN8oikdwfSP7QpfR07vE18YmDgbZwdsJHS659+2qltMtMeWhsc+Om/DQIgm6qtSD+AQTZsy3udav9LZsZmvHAD8akq3vu7hEWyMIHd/D78vPcYNrKQA9tTslDO0Mt7+vkft80Xc37zm7/Z10zfoeZvzN17zuZ/J2h86muxp+01ucyJOMgJqnDq6XdvJXLyHWSw4qSUcDfiogz62xlyHXgpGZrLDPceHemVlH5y9RpGTOYPpHgCzXLqmPQ06nBmD4s92HYi8gDh+/NKn7T5/vZu9jAcY2+50yDX396NrlTkW9CGkpKSU+zrPusqnLur7+39TRmcWoz3NPuoZuo3/TyMtz3X440QfEcMb3eMp6uLOryG93jv8e/w+BDCO2zAqT++++h438Xufe/Byd2bgb7N5G6l5feyuwlX+RXvdXRvy5KH1/+Q5Jt3cqAnr6GbY7gra36f599PU/Qdz4OPag+VCztzqJ9akwehm7p5M5GbZJX9W+/32IfU9nlwx/6Uqp7/KC9wtt4fyj7yesPZmn9IOzvHz9b7QdtDsd4P1PECiMg3VfVrD/RN75N9Go/903jMg31aj/3Tetzw6Tz2T+MxD/ZpPPZP4zEP9rAc+ycypO7MzuzMzuzMzuzn1c4c75md2Zmd2Zmd2QO0T8Lx/s1P4D3vl30aj/3TeMyDfVqP/dN63PDpPPZP4zEP9mk89k/jMQ/2UBz7A6/xntmZndmZndmZ/TzbGdR8Zmd2Zmd2Zmf2AO2BOV4R+RdF5Mci8hMR+WsP6n1/GhORJ0Tk/ycifygiPxCRf7u/f09E/oGIvNT/3P2kj/W97GzNH6ydrfeDt0/Lmp+t94O3h3rNhwkoH+eNLOHwMvAsWXToO8AXHsR7/5THewn45f73GVkY6QvAXwf+Wn//XwP+d5/0sZ6t+cNxO1vvszU/W++H6/Ywr/mDynh/BfiJqr6iqh3wnwH/8gN67w9tqnpVVX+//30O/JA89PVfBv5W/7C/BfyFT+QAP5idrfmDtbP1fvD2qVnzs/V+8PYwr/mDcryPAW+e+vdb/X0PvYnI08BXyOPaH1HVq/2frvGxzZa7L3a25g/Wztb7wduncs3P1vvB28O25mfkqvcwEZkCfwf4n6nq8em/acYpzijh99nO1vzB2tl6P1g7W+8Hbw/jmn8kx/shCu2XyTPNB3u8v++hNREpyF/W/11V//P+7usicqn/+yXyeJsHfVxna/5gj+lsvR/sMX0Y8s6nas0fxvXu3/fsHH/Q9hEK1x+40E6egvQK8Mypx37xkyi4f8DPJsB/DPwf7rn/3+fuovxff8DHdbbmD3DNz9b74V3vT9uaP4zr/WHX/NO03g/zmqvqR3K8vwb8xql//zvAv/Mej/+zZFbZy8D/4pP+Ut7ns/1xMvzwXeDb/e3PAueA/y/wEvAPgb0HfFxna/4A1/xsvR/u9f40rfnDuN4/zZp/Wtb7YV5zVf3platE5F8F/kVV/R/3//7LwB9R1X/rPZ5zVr/44HZL7xkp9WHX/Gy9P5R95PXuH/Oxr/lkVLO/t4vrh9MDGGsRY7HOUdWjPAfXmHefe3vvUb7Dw5ImosZ+MHveMJpuTQie48M5d24ffNTq2F1r/rCu98+QfeRzfH9/X59++umP/UB/Fuxb3/rW29Z7MPdxv7mI/JvAv/lBH18UBc65dx603f9M2g+kViAqooCc3juGR/bj0oc/qCF3UJ1+7Yi8bby1RZH8OB0ePrzmqefqyT/l1D/ebSfIf00g8dS/33ljbDr/+ru8zHvah13vM9vYT7Xe8PGuuQDOOYzJ56MAX/nC5/g3/tU/z9Z0wjDsfry1w2i2zc658zz5wuco65pqPME6h0oeDC8qm1NZEqAKIuhwqt/jw1bdkkU7J6ZECJ7Od/zk9R9y8851fuPv/UP+n3/r7xH8Rxo2f3aOP1j7yOv95JNP8s1vfvO+HtTPqonIu673R3G8H6jQrqp/k16Y+v2i06Io+FP/wr/AV37pF7HOURYViBAVFKFLEBLcDonX2khaJ+pXAnaeqIpIUQQExUoCaTH2NZA5iOTXiTu07VOoFiTN+05RXKZwV4GEaAItiekZNO2QkiEmi0jC2ohIPg5FiMkSo8OYRFl6RE453mRIyZAfnbILl0yec/Y6zr2JMZoDDGMwYhERkioxJhT4v/79f/pTrfmHWe8ze1+77+f4h7XpZMyv/sIXubC3i7MGawx7W1OaowPiaoFxBcZYbFkzmm3ju4ajWzcoRzW77iLGjTfnLKIn4aPkYHH4t6oSiSi56AdwvDzg6p3LhBho25YQA8v14n5+vHvtE1/vn0P7UHvK1772tbP1vg/2URzvN4AXROQZ8hf1rwH/3Y9yMNZavvzlL/Hf/HN/jrKsGI3GIIagQlJhFaGN8HoXGS074lFiNm4pbkbGdceo8hhJOBMRWWCKbyDmJogBEbx/lOXqq6Q0IqS82dT1D6irP0SISIqgI0L4FWJ8jBgtIWTn6gqPCCQMihBCgQ8l1kZG4zXG9I5XDSkaYrSgiiEioogoxiSK4iWqssJapaoqjDFYWyBiiCnhQ0RV383x3vc1P7P3tE98veuq5EsvPMfzTz5O5SyFNXRtw+L4iBbFlTXGFcx8B0DwnuXxIcGPmO3tUTI6ca4beKZHgZThfySUqHGD2wiwbpbcObqJ9551syamiPf+4/y4n/h6/xza2Zp/AvZTO15VDSLybwG/QQ6S/yNV/cFHORgBxqMxu7u7vPHmm3znO/+Ykff8ihH2LOgXDfqcwbrHiOkXCdGhjxoYBdLSEVY5440oIiXSvYCYR0h7lrRr8GmXddgjaUlUIaEE9wydKzGqmJSAkpAeR3WHFHPmakgk40EgGkvCEFJBSAWuDbgDh2lTD48LMRpicAiKk4BYRfeBKUT7BL4A2yndoWCCYIzBiJA0ElN8oGt+Zu9uD3q9jTH80i/+Ap/9zAuslksObt+iKhxJ4ebtA+qioCospIhqIp9rEVWYHx4CQuEc86qmqCra9ZLRbAtblFhXUo5GTPf2sNZhJEPUiQxD++BZNEtUI5bsm4MPjIoxxBWH3R063xFCIKbI7oVtfulXf4Hjg2Ne/8mbtE37kT//2fn94O1szT8Z+0g1XlX9L4H/8j4dCyLC9tYWFx+5yO/8zj/n//gf/J94ZH7MFwp4vgb5tx38ssU1f5bAHyEUM/QzDll1xB8q3S3NAXwSlASyDybiH7P4pyzROjpXksQQTa5tGT2P0S9jFVzMVbRoHCoG0YT0DrmIARS6oszOV/LNXQ/Yf77ALBKmLx+n0MPQJLAeKZW474iPG1T2UfMFzEGieqNFjhWRCJIQSUPx7YGt+Zm9tz3I9S6Kgj/7L/0Z/vt/5V/n6ltv8cNv/wHz4zmvvfwqr1++xrgqGZUFdWHZnpQYgeQ9gcCd61c5uHENkxQbIq4ouP76q9TTCfV4Rj2esn3+Ak996RcoRyNcWWCsIZDwJNZ+za3DW4TgsaoYVZImZtU2ySeWqwXrdkWMnpQSl566wJ/e/2/w1quXuXX99n1xvHB2fn8SdrbmD94+dnLVh7K+FivGME2JJ9cr9sISd0nwO8Lx5BI34z6LuIcksDFhmoSsFOkRMHWglaBiUClRo8TakGy+qe1ZWAZUBNWcUZiO/DopBwBIVhcxvfMlZGcoVhGjSK0wyih2UoMkRaUH8wREEoJiRHP60CosATH52FaJGLJTFzHZ8ZqEkY9EVrnLnHPs7Z8j+Y4UAyCZT2MM1mWY27gCY13/e44cBl5b8J4YAil6om8zszX1gUFP1BERxJpM+zE5iyrKnHEZEZx1CJkhCxBCIIQcxAwVxsysV4LvCL5DNZFShIF+T67Ha8o/Q0yoKkkV3cCliohgba6XO2swpg/ASPn5fTyThtcJEf/RyEEfyXZ2dnj00Uex1gJKWZZcvHiRqqqYTCbs7O5hjKGsK9brBh8Tad0Sg8VKQkwuwSBCYS3OGExSTIzEmFgvFyRNpKDELlFWI9rVCoAYPGIM0ShByOtOXsm2a0khoCiqiaZtiTGSUiKlTG6MUQkhEaPyUzZGnNmZ/dzaQ+V4VSEqdBG+gvLXHcijwux/arn5uZpvbf0rfO/4z6PNhGqu2PmC+ocNxa2INIoY0HOG8BmHFhCsJQnoCJIAmrA+9Y5XegdrAIs5SNTf95hGSSk7KGMC1vROIimqgtFAwhCfccTPOMQnUsy7kbERYxLGeLCSa7o2ZFLLGyX6lkMRDAJBs/OFDc/a2khRdu/Cc/7wtrO3y1/4i3+Rxa0rNPM7pCQktbiiZvv8k5T1hNm5Rxht7VKMKkbbE4wVnM0FwMPrNzm+c8B6fouDG6+QQkdcr9EYEbUYtdjCUU9qxFpcVWGd48KTz3L+sacYVzXntrYxYvBdQ4iRg4MDDg4OSSmhKaIkQvQkjRzcuMzB9TcJvmO9npNiwAdPjJEQDKETOh85PG7xMdG0nhAjqgFVT+EKZrMZZeHY2hoxGpUkWpKuSArBZ6fbtkrwcOdgzrWrt0mfkOf4tV/7Nf7qX/2rzGazjHYAzsDhwQGI8Ngzz7B1fMytO4cYV3Dj+k1u3zrACpTGZHTFACKc35qwP51gAUc+9/T2Ae5oTmGPKExFu2iY7p6nGo9ygCtAVSCVIzmDqwrUCjduXmN+fEQMHh86utCyXntCBFUDKhzcOuSNV69x69otuu5jrfue2Zn9zNlD5XihbxVS2LFwfgx+R7j+wojFFybcWTzJ9dWXqUPHTljhfMQsE7LonalTdCTorpIqQ7RCEsnePObN1WhCBcQJxgqqme1pvOKOImapxJgdrzURY3LNNQ2M5gig6BK0AwkKRtG+MCbkDFdMJlSJjZlTukpIiKCCqsmM0lPM0r7R4yRDvg9WFiWPPf44R86zqnTD0i6qCecefZx6vM3WhceY7O5TTUdMzm1hraEoBDRxc+syB9dvsDisKeWY6FviaoXGiNUCo46iLhjNJlhnKUYjbFHw+PPPc/HZzzCrxzyyu4cVoW3XhBi4efMWN2/eRlMkpYBqwseWmAI3aqikwbdrFnOIIdD5NpN6OoNvDU0b8MHShQhi8CHkgCIpZVkwGY8oy4Lt7QmTcUnCEYEUla5TYoTCKd7Dcrk+RTL6eE1ENmS6wS5dusRXvvIVdna2yVl54spbb3LrxnXEGOrJmBAjVV3jypKosGraHKTBXY53Uha0ZYUVSGJyANg0OOuIkogE1pMFq+NjovcbdMmMCkwokarAVGMA2q5jtV7jfYv3LSEFOp8z3mGtmrVnfrxitWxye5LIIFpwZj/DllJivV7fdd/bv/af9jz4AM3l79Os+dPaB3r2B36L93/gw+d4EQKG8GVD+z83HJcX+L3t/xY3j57hZvNLTLtI6cGKw9QG/xkhPp6dlYoSpwa/ZVHbw3AK7q2Aeyv364qJaAX6bEHcshgMolCSGJUeExTfOWI0qFo0GNKWEJ41aMXmNU2juO/4DF0/l7MAuWz6mm2GmEUUTWbDFDWSn580b75WYg9pC6KCxoLQ3a98F4xxjCa7xK0DnDYkKUlSU1RTZvuXKEdbuHpE0oRqRDTk+ngA1UhsFoTlAXE1R7sOkyLjyQRnDePxeep6l3pas31hm6IsmI0mVEXBzoVLbO+fp3Qlk17Moa4cmiJ14Ti3s41qQlMgaSKEjhgjFZGuWbM4PuR4PsenhNgSZxNgEDGILdhJBT4kqqqhC4HWNzQtFGVBUVmK0iImkQiIQGEKfIq0TYMPMWe8AXx4cDDzpUuX+Ct/5a/w5JNPbu77zGc+w2g04oRHbNje2aWqSpaLBbdv3yKmSBsCrfc0Xb7JUA7QzEZW4Eo85vB4neF9wFnDztaEuiyYFjWzoiZcV9I//yauLCm3ptiqJJWGVFnK6ZjZYxdQA8v5MW2zooueNnas1iuu3bhO5z0mWkSFm9cOWB0tiT6xvbdPPZ6yODqibdbv+PnP7GfDrl29zv/mf/3v940iQoxKDLmUk0LMWRMeNOTEoy9g6HCm9tIIg2/SjQCDbGpJmXR/9z6ofaBIXzpCQGwfP6og6R59hc17DNcWJ9oQp/9uTh5x+h1zGS3/Jj15x7yrII1ksiLDk95/BMLD6XhV0GcM8XnDod/lRzf/DG8tv8LYK6OouAgWi5SG+Jglkp2uokRn6Mr8wSX0tzsJ+2LEiGJcRCeCvyjolsH0tUonSuUiplA0WkhCSkKMQqoN8WlLmgxHqbg/9LhXInrOEL9uoRTkKN8Mfeaaj6LPqqVPMnLLUT675G45j+SI3nK/WhPFGKrRjDiZYuIEtSPUTnDVjNH2Ocp6hhjJMHpKkCKIoDHDwKlbEZsFqV1Dl9upxrOKqqzY3rvIbPsxxjtjzj2+R1UVnBvNGBUl9WRCPR7nHmWTu0I1OTQlJqOatKtAdryqivfZ8S6OD7lx8yYhKkktMUkvpgKWof4Pk1QSU8I4QxdyeSCkDlc4XGFxhckoBLn32pqCAHQ+0HWBzishKDHFB1afPHfuHH/xL/5FvvrVr77bt4UITKYzJtO8dtnxplyLDhEfAj7EzeMVNllo2wbu9PdayG1HKTIeVaRRwil0Tcv6+gHGOSYXz+OmY4ID72C0MyOUYApHs17SdS2dBtrkWTRrrt+6Q9u02Fgg0TC/M6dZNiSfmM62qKoR7Xp95nh/xu32nTv8p//p30YsGCMEn+jahCYltqHfR9YIHULCSBYxSnmXzmIup53vCSkGYuydrzD8N5hK5pBkp5s7TIwDsYJEg0nmXZJh2TjLoetk8zjTP/8eWaWNI95wjmzPH3Gb/ezudwAjuTMFzMZRv5c9dI43JKVLmmtyavBBMCFRhkjlE3XQTCBJfTSlOnB98hcbEzYACJrx4bywpic79ZIWaizJ2py9KgS1tF2RM95zjlgbgloChrhj6GoLDkyKme28bTBPCToVtBhOknwbIGYUYhr6eQPGQKSAAW5MkqHCcya3GjFEWffPG6SYCD7hu4QbFVSjXWw5yuQi7/soVHGV5JqrCGrzfdVozGxnj/GkZmt7hDXC3mxGXZRMty8xnp6nHJeMt8YUhaOuRpTO4Yoik64A6QlSKeaLMvW3TaYPOFdiTGI03mK6fZ62jThXE3zMJDVRUlK8T6CGqioy6UoSLoCPQr96kJvJcgSelBgjbds73C7Q+UCI+W+ZKHb/zTnH1772NZ577rnNfU8++STnzp17Z0U2HUhmA+6tWGsZjcfsJOW5z7zA/oULbO/ssX/+BovFguvXbuKDp2ty0JI5CHkDwUBSZdl2BE1U4piYEpuEFAWTFI7m2KbF24Q3iXaxIKWAOMtyvaDzntYkGptYLlfE5ZrUeYwK4E7Wrv9uUwxnUPPPgW2u6ZgJdpoU0+cSpjI9YlagUTOC1qsH9mFifpGcc/S7nCDWghFM6TLRNCka0vBXQLDO9uRPRQlAIqUuI3XD4+TEyQ7n4uC8ByfKkGHrqYy2L/mdznpl81pDpmuyA+73blU9FRrIiZMWQcy9+fPb7aFyvAp0SVmFTDryONbe4tpI3XnGTWTiU/4aJS/A0IdIylCG9EViRQhiUJW7Ha/2UIB1qCsyMzYpXmGxHGGSEp826CWhcwVd4YjG0JUOUCrfYGNALxp0L0MP4gTTDV8giFGM86RkSCFnvKUErIlE0dxDrIbgbW5tesKSnu5PmPtGrerViHykawLrdWQ2qplsP4rYghiFGLu+PSRgHcQwRjA98UyZbe8wKmvKyjKZOkrr2K9n1K6kGk0pqzHJKsn1NUxnscZgOJXJ9xTi5D0xxUyUihFrLUVRYMRSFvmC29q+wP4jnhgNZTUh+EiW10zE2NE0gaIomM1yrbTowAdDFxbIse/PhC4DW5qz5qZpWa7W+BBZrltCHLSH6Xum77+zqOuav/SX/hJ/+S//5c191lrG4/F7PCvvBnnDSBSlY2d3l63tbc4/coEYI1feus716zd5/dXX+a3f/G2WiyVHYU6MXQ6agqIGMBCTcrBcIWvBJcOYAqdCmQwCLNctIuDVE7RDnOXGK6+CkU1m0o4s67Gj6zr8wTEpJlIhGNNvjAqqieg7os/HcGY/+yYKKSgpt5JjBcQarMtoXVxBai2aGlLq6Hs883m16WXIgaYISGkzObMeYcsKDYG4btGkCFk+uK5L6qroQetEioFmeYfQ5TLUUIoy1pKTrtQ7xyGplo3TpD8i6Qm22SP0jr5/iEjvbCU7XkSwziHG5ms0JQTB9LCyMXnzFzM43ve2h8rxQs5sQkysmTLXZzkOO6RUYZNiFEwCDQptriugnPoiFUrBTCAZPYmqamALhrhGxwZ1kuGLocXEGeLUZHJXLWgJsRBiYYgiJJMxktj/1FJIziCaWcqSBDsx6I4SC4MpLEkNPrjcwqMBULQJyCpDI2KyaEJ0ghYWlZ4Mdv+WM0M3nMrIU+4Vlh7qLopcKKkqR1k4rDM4JxgBWysFhrKyVKOCwlooKpJx+J5Mk6ISNYc/63WDqFI6R2EdzhgqlwOWQWQ/pURMfXsPgojB9S1OCXCuwLmSoqhwRUkITc5O+8cMog9o6p14rhNndTBwzuBs/rS5LUmJMR9n6i/Gk1ao+7fUg9V1zec+9zkeffRRdnZ23p7hvoefHzYIVcEaS1mWpKQUZYkmZXunwfvE0eExW1vbCMJysaZru7tefEg8Q0yQYNV55m2DVUMZ8/nqjEGMEFNH1A4JBkvMjrc/VbxaApboA+JjRppswhqldJbxqKYzQru2aLJUVUmKgRDjR9VwPrOH1QSsNRsUKjum7GyKymbEWA3JGDT1SBeZbzGga5tLQASMQcoKcbkrwpU1yYbcvZlAsAiGuioz0jW43hRQX2OA5DOfZ+A9nOTSd9duh58D56bfAk9lrSeHNWS8G5j6nv+fXMb51ZST18+v8SnKeAGaLjJfed7UX+C78d/DBwvteZxP2Jj7bO1Nxb4YUA8x5Y2q7/YkXjTolwxSCbFfOH3ckLZyP6mKJTmD7hS5SKAJiKQdS/v17IjTlqJW8cbgjeSsVAQMdGIRpwQD3mSHZtVgEtRfUtxzfZ+uOJIxdGWJANuNZ9QFyrc8o5dWaDRga9Q5oh0RgWAcvnT3MQdTUooYYynKGmKkOb6DLWrqrX1s6dg6N2G8VTEej9jayqxm1/fCEiMasrgHJjuxWz4QkycumqzsJUo0keA7Di5fpV2uuLC/z7ndXbanE5585DzOmo02tg+RpmsJSWljrw1sHQbDsumwtqSuJ2zt7mOc49at66yaBsRQjWpAWHUtSSPH8yOaZsW6XWGsUFWW7VlFVRZZZSkGYozEADFkBxyjUjiLtaZnGN99GX1Ue/rpp/kbf+Nv8NRTT73vxfd2kz7CNxSlZWqLDM/HSErKxYslOzv7VGXN0cExt2/dYbn4FqvlavMKScGHnPs3eAKRLkRuLBZYFcpocGLYG48ZFQVGO2zqsM5SxyrX7VKmwrRLpZnnDdbGiBPDqIwUVtnanSGP7LNarXnLKU3TMJnWhBA4uHPIzeu3zqDnn0EzRhhPC3KCJxhjcdbl82c8whiD+g4NgZQ80TeZe3NKjnSYamPImSQuB4G2KLBFkYPzHpkaisKFKzZ8D2PzvrasxnRdy3q5YrVc5/bDLusNGBnKfifOc0C6B8KXGNMjoGy85mlo+nQHwmnLvn0IAfKzTDK9HsMHW8eHyvGq5hpvExLztMONeI4UEtu+ofIxi2REQdYJcydBC8ScKalkmE2m2U+I9iilgFTADtkhmhyNUZzEQgpomdBzfZRubMYczIDbs4HgkskAQ7RKcBbpa8lGIewYmPUCAyokK3SlgCijleJ8wh1ERCJiLIgjWUW0hOhQS86o72cmpjmrNsb1pMAAGrFGKJxlNKqYbo2pyoqqytDvIEKBiWATSWPutSWyDIEuRELX98VKIppI1zRcu36T1dExisHaEmcsKWlGgpSN6EVKiZASjff5IjEZtsnEoUxmKMqKoqxADDEp1hisc/m5vUPtfLdpNzJWsFYoC0vhDN6zqSdr6hHvXjhjuKjMEO7eR/8wmUz4+te/frL87+Z8Tt99V0gugMUYKAyZtCKBlJTRyFIUNdvbO5zb30cTjEajnBmbkLMMzXVtRQkpEUgsu44Vvs94DYXJ8L4KuORx6ik0UbqMusSU+oEdeWgHA/nEgFXFoRSFo5yOEYGqLlES1pos3LFusM5maPpjqqOf2SdjWSCnLylJRmacLbHOMhrVWGchWDQGUiwIwfbXfdqc8gNxyfR1WXqGtHHZgSfVjIrB0MeJMwXWunwOuryHJB9wxhF9oms8UXP1d8hC78p277redPND+nbSk78N2eqp2vC9duqaPsl3T93xAeyhcrwJeKsNfHfRMVfDOBhkpYx+HHDHCZd62vgCCCe7lSDExx3pUm79kVIwnVK/FmCeKPEUEkhbjvB4JjeVPmBjJAJRYFy8ysXyNyhEcO7PIfIC0RqCEeYKb0bwKpiUoY+LZcH5SrDkIYIGkJhJXz4EuuDp5gcc/PA7hG6NffQ8ZmeWiQkejCplncAI8a2E3k6kS4nu2b4n+L6YYGxJOdrCGMtsZ5f9i49SVjW7O+cpyxozLjCVxaPcWiwzqa1vD4iNJ3YeHzzrdoX3nttHR7TeU9gJzoxQk0gmsFoc873f+V0Oblznl778i/B5xcREeuJxEJvZgAJ1lclHXQioNnkwRMoX2tHBMTeu3GG9Ouqj3JK6HhGjZ0N/iJEUPLGHl42B8ahkNispi1xjCtEzXy1YrBpiEEKwxJQjXGcM49GIUV2yXoX77Xff2zYXrJ5E/+/aepD7vXPuaTJ/wQpWDOcu7POVr/4yq+WSi5cucnR0hG8bfNdy48YN/vAHP2C5WpLmt1n5mNvDYiZceZSkkcNmzTq0pOTR1FEaw3a7zutT1RTW5nM0t54TLaCJddPSJKU2FuqKpJGyLk4FV1BPavYv7rOcL3n95Tdo1s3Hv7Zn9kDM2YLd3YsUrsa6ks0AGGcYzQpsYSiIWBLRe0KzykGgTaj05UIFK4ZCbB9mZifpQyL0pas2JRKnCVwOg8U6Q1laBKUuLCkG6rqiLkf44GmaJTElYhNIMRGDJ8a+ziw9IarPTI2xOUNHUXM3mes9wapTmbAMdeSBt2XuybLfbR0/0rdwn00VbvjES2tPkQyjaDBHSvVyxN5SjJ6SNNx89hw16SOW+AV3gt+vI+61gLkaKMpAUUTCRUO8ZJBSKELAonTOEK2htpe5NP57jKxQuV/Bmi8RJTvlm0m50WWo1USLUXiksHyxtjgB14/+i+Rst+s8TWtY3VlT/vgPaFZHNNNfIeyN82Yb6PtL82CF7kYitJonHD2tma1wX0wwpsBVU4wtmO2d55HHL1HXI85v7VEVJSuNNCnRdS2H6yU+RFaNJ4SEX7SEtadpG+arY7qu4+atWzRty3Syz2S008MLHcdHd/jud7/PtbdeZ1ZPubj3CNujcS4pywDrKqUtKQHXebyPhJAIsSMlZTFfcvPqLWJcgQrOFVRlSQx1jyJkhzVkzQDW5B7h8aTsCQ65hrxarzleLBEKDHWvRJahrbqumIxqqnLVXyAPwPX2hIShJAIDkU438NZdD2fggpr+OVki0hrY2d3h3LlzxBh5+tmn6dqOZr2gaVa8+OMXuXXnOncODEfhmFZbYs+LSEpPPIOFb1l5pUueLnXUInTWUFmLFYOtakShYJN0oElpO09ICaqSMmTFsaLXfR7IKLvVDtWo5vbNO1x969qZ4/0ZMmMds9k+VTXDFSOUgNJhCqi3DbaAkYXSQOpa/KogkfBFdrxOwSYoxFAah0VwKTvj5bKjaTydJqwmoio+xLyHqEXUUFjLqCgxBmRcgiSsLbCmwoeOZVMSQ6SRhtB6uqREn3XEc5AgmIGhLKaHu5UkmVA4wNPvCjuKZAnh3vlKShtuxpBif5AS00PleAFczK1DVsGq3fTEGsmkDtcPkVcSWEH3Fa0FEyPF68OGprBWpEl9wV3QZGAF9kpARoLfhzSCSWU5VxVsl47pSKnMmtL9Ls4safUFGv0M9Txw6c0FXRepbMAZZbRVcSuMqJywUzusOekGU/I+m8oR4dLThHZJnO0Rihq3DekJxXiB1kIyfa9pwpLr2GrukyMQIZmCZEckHB0Fi05p1RPiAmssqy7QhMjxesWN40NCTASfa4qp9aQuq0ctmwU+eNqmzaPhRglrDDF2dN0xfj2HlLBiSDHQNSvWzYrFeoUacDZnp4P4g1hLVVUURcIUjpASs9mEyfaYrg3Eo0DSjqouwI5YN4G28cTEiRJZ1MyuDEryCbFZb1rEYCXXMjXlOceCUBQF1hgK5/rh8u/f6P5T2T1f3wZuvsv5npC77oWj+5h7gzzLcG71DHzt1c3ECFVVYq3gw4rUBGwJ050xyQV0HGl8w/K4ZXXc4tvIet6iKeE1s/yj9spsqnR9MNOEgBGfW8sQkukbOATWqcUbsxkSokPmYG3e0Hr258nnOLOfJTMilEXZz4Y+CRAFRbtIiuBNPj+TD/gulz18jLkcqL1evEAhuX2xLAucEaQuKK3QaaLQmMuOayEEBTVoMrl1KYZMxCyy+uBoVAAQfEFZWmKIrKTAd562sbSN3ZCyyITknAADbK7GrCaInBbJGK7E/Gt/NaCDWEZfthJOZ7qfSserlD4ybjzG5JqdAQqb2ZSlizibSFEI3qC1IX7eortK8VKH++bQ4iCgSuiyPCTaz8c9gGLRESdC/Lqj2zI8NnU8s1VTu4LtOmHNIZX9TzBScxz+R3TheWbLls9/8yqyaNnZaqmqyGuPbPPSpR1m45Jif4tR0UdQkt8/qhAn27Rf+BXa6Gm3a0JV4B5NpFEkHQE/BlmANeSMXCw2JO5XS5FiSGZMLGqiU1ZacWsVUAK+WRJDYrHoaNae48Wc6zdvoSlRmp75KgkrCR89bVwRUmLRZmLUzixRlpZm1bI+uk5zfIhJkdIWxK5jNT9kPp9x8/iAdfJsT0ZURYExGVpyzjGbFSC941FYLtYcrVasjiLNcUNizWSrZGQq4p0Vh+uUyXRBwSdSl29REsEkpABbOzBKaSyVsXRJ8CFhnWVcjyico65rirLE2gdw+m+Y9z1N8zQfUnMUnh82MDFl8/2b3vGpBUxuDUuaem6KYi1MZ2NUE6v2EH+8xtVw/vFdtroRT48fAQfX37zFtcu3mR8seeuVm3RdoksdSSOm34CiwiokOlHKpsVH7XkOWe882Nzit/SJNiaKRUM5X1CNRuxfPE9RFpvN5+SDn9nPmpm+VDNwRgZUBhJp5UkkVAO+Zx6HkPXYPZndXOYJ5YBQkLCFZVxW1KUjVZYkJUETa434kDjWjq5NxJiri4ncviYJ7KTEVYayrNneHhNDolvnASHz2Sqrva3XNOsmy87GjpQS3bwldoGkgdQHnvTws9zlePvPvMlkYeA7DHcMj72rnegDbN8PleMVBdMlimWEMmJG2mPovfwifUuKE1Il6AS0VrTMURSNbiIZFaAW1GSYTUL+G14Rb6iM4ArLyBlqZyjdBGuexMqo59tZOkYcq2CTMu4U1yZGjVJpolgE5KgjRmG57QlGGVmhIEdXIoKxjnI8RVPEVJbgDLZMhDpAo0TSSbYjinjFzhPq7l+eEJOhDREfI8m2pONe6ajJJ+hqkeGd+XzBcr4ATQSTAwgrEUPCp442rAkxsepaYlIWi2PqekS7nrNar2m6Dtc7NWsNmiK+8ywXKwShNIaUEqVz4HolGDfUWnoCRZHlHq0ThAgas5NRs2kLClEJIfZTk2LfInQyrSjGofZrcG6YnWywxm5m0Go61Vr0MdimYeKkpHvqb/fccfp3uecnwClCWky5fSpEpfOZkOZc7stdrZYcHR+ybjLD2xWWsjKYQhhPR8y2JiSfKCuX68bBEtLpvnf6LoDcS29CzPifSpZi1dyJ6WMixITEhImJItcScqvX4Hh7xqexltFkjPeRtm2JD1Ci8+O2UX/zZMrJz0uYMbTsAZmtKENkuSny5VuvST/kiUNmnDUCchum9BJWuRabMAX9gBZDBVhj6MqISUIwio2aVQ1j/159i6I1BidCxIDL53FVO8TBMHE6pogJQowJbXPwGhNI6ueXmYFMpac+6yCMMSDJw6fo2diny0Q9jD0Qrd7P+T5UjheF+nLL7HsL0v6I+EwBEYxGhJiJMljiBYN/wUAtmcWsiZDILTrkc0HH4D9nSTtQveExlz2qQlJwzvLMrGa0W7EzdkxHBsvnEf13QbtMLALe8I/xrWC5EC2/YipmIlStowiJ2QIuvHbM+kLJDyYKW47nqwn7riQZcKVlUtY8NTpPQkmFQY1wE8/lrsWuAqQlLsQs4iERezNQL+4fQJeSsF7B9YM5B4s5SANm3r98Xquu9QQfadcty+MVqGL7AQ/eN4TY0nUd6/WSGAPtekVMkauXX6eebfeteIqkxGx3n53tPbZnMwgdy8MjXn7xFerRiP0L+4zGI3ZmE3ZnE5xzjMZZCENs3wdYGsaTgrACG9dIt8S3Bq/CetEyX7WEpqE9WpBCh/cNMbVYB5UWhBCYz7sMY1nHzmxGFw1rbzZjBb0PrCXLR7Zt9/FJRm6c7pDJ9jrf0EtVDmzfXPDPw+l7EhVDopydbtu2hBho25au7ViuFty8dQMjsLe7Q1kUfP8Pv8cP/vDbDAM6jGVzGu3v77K3vcOt6wesFmvWqzWrdYsPgWbV0CzXJGOJriAh3AkB0wVELCKBYcyiCqxV8aKIGCpbgi1O7TImZw5khz2eTnnhy5+nWTW8+uLL3Lp242Na7AdrAnwB+DLwFvBbwM+LUKYxwmRm8U0g+kEpvNdOdi4H0hQYlBAC6i1s+sMjriqoq4IiQNFlFbXjRYulYbpTMq4KamuZlXVm8ovB+0jX5hGerYf5SrIeVogEElUB41JIVimLRHJKNamIpqRtK9pmTPCR9coTQmRdrOhaTxdauq4hasLEkDsmfEdKsZeHLHIZ0PZOV1J25JIVrDa8aTkpoSHmA+3eD5njVdwqUB60xJEla/vlqEVE84CBJKQC4q5AmdsbSHfj9jnjFdKWkM4JegvEpOx4EaxTpmViq1LGheIsiO4g6Zc2bS+KskiGG1EoNRfjbT9blpRw60C98jQVHHQtyScuFRVbmlnPYgQjlnHZK3n3rUlHztA5xRohqEHSEDUK0iTc8j62XyiEAMtVx9HxipTm+HQng0N9K1Tw/di9JtCsWti09yhNt6LzDW3Xslouc5/cekmKkSYGimaFK0rq0ZjCWM7VY0YuK1JpjHRty/HhMW3TUdQ1XcotK+OyRIEyFf2FmhnP1gquyAIeVjSrycQ8Azb2M2ZjyNrF0YdTWWvvplTxvh+M0LcmZKjUZvZ0F9CU58jmDPpu6Pd+rvtpE+jnAQ8iIrkPcXjooLijmqPmDQVL8zzcGPMMY+89bdeyXC65fec2Ql6zqiq5fec2165fo65Ltne3skBGH7FXo5JyUtI1HdNZnX2jEbwPaIj4pssO01oU6HxEY+rZmoKVHLAiEM1Gpj4L0CAbtMFADzXl47euYHt3h3rUUVVv3f91/oRMgG3gMWBFPynq58TE5Npq8mRexVBLGVAP6bXqgWRBYuzj/ExiksJiS4c1mWQlMRG6rGZXp9xiaYxQuSw8FOtA0e8Htg/qGpsJTj7lsqO43OKWyPwYBaSyJJehcWttrjVHQ7CR5FPmd3RZQ1pSJJEFeGLQjLT1LX25ZjsUfwbHKyeOlhMIevPvD+B6HyrHKySK8nVG42+R/AXSm89BsBjrYap4X+KDwy8t8Yd5dqikHrK41e93Qpa3SuDeTHBbMXc8CU+zU3L09IxytuLJ3b9Daa8Af5IQ/kQu+vfUde2jHx8NdRQolBvPjVm0EWOzfvC8bViuoZta6hSJjXBdOo5irgKoWNTkMXxW4AkHezaDE7HX/iSZfHb2x23UoB8kXPqAljSxXq84uH2bG9evYaTBygLViA8dUWOu9fqGFCKxy321MXV5apD3hOBJKeL7lh5MdmamKnCzMZPxNvvnHqdyBRfKgpEx6OI216/dZDpbYa1SjUYYq9TLCd1yxfHRkvGk5vwjO5RlwdYkTzWqXMHOZES1twfPPk+7WnDzzgGL9TrLW3pHqBytdKTQkXSM0mW1qtLifcfxPGfkRVFgbZ49PBlVmemM9s57YFM/OOgzxMDx/AjvPSF6Ygw54+xayqLk4sWL1NWILnT44LHOUpUlIUaOlwc0bctysWK1WnPr1k1+8vKLxBh4481MdLl+7TIA1hnG0wJX5pGN1hkKLE4cFy7s8pVf/mKuuwWLJuH2nUNu3zpiuVxx5dotuq6jSQ2RkM/jQfyzPy+HbkzfBZbznDkoinMOV5YY53DWUhSu7ynOSMPPEhSrwE/IWe4BWU7g58WMGEZ1SWEMoS42cK8qhC6g6WS+eM+xygFc7POPJJgklM4x26ogJVqrpOBRY2i6hJIoehGMqqwpneK03bSFatAswEPmHZRJIAsDbgK/5BMpAl4xMZcAJ6WgDsZFQUrKcr1ivswDV7o+023bihD6F5MMk2ls80ncw9YiFjFuE5jmy+PU3+T9Q7GHyvEiiaK4QjX6PoSn4eouQoGYCGNl3YxAalg7/CtZuEI01wuGXFcloSbXoNyVkJ9LIBFptwoOPjeins3R7d/A2d9DmBHjrxNT7iPTlOg6T4yJkAy1Cloot5+sOVYl2fz6oYXYBIJApYnQBG6Zjk7JGZZRkrG0aihF2Daw15MRohiMGFDbj9FiA1fYDdj40U1TVhQ6PjjgzvUblDYwKjpiCixWR3nI+fqA0M5BE4YseNB0mYyQQsr1lGGKh7UUk53MYC0L3LhmtL3LuUeeZlxWXCwdI1Gu/eSY2zdv49s1kxH4ZoRxBc26YbVsOTxu2NqeYMeO8ahiVFWUzlE6y9aopmab8oknaddLooA9hOgdsXWE0tLaQIoepUXxqEaSRlYk2s7TeU8RsjOYuppxXRJiwoc81cR7TwhZhONBWYyR+XzOuln305gC67bheDFnNBqzvbNDUZSsmzXrdk1VlThnCNGzXB2zXC05PlqyWKy5duMar7/5Gl3boOrzdye9Yq0z1OOCoiooqxJrDVYNToVxvcWF8+cxYqnsBEPJtRu3uHrtNjdu3GK+6lgsVvg2kYcg5XGXWdE6b0SDAw0+EGLCtB0+BIy1lHWNKwqqqmQ8Hm0ybt2Mo/nZMAXe6G8/byYCdVUQi4wiDQhh9Ikm5P1jQ1U47XhhM75PEhSFYzIZ98o2XS/GA11IiE3UfRZdFmUmeqaso+AwJJ/7/q2PRFVcMnAqhlZAe71/Yu/sMRSFQxBMMUKMUM4LwGbiVeiIKeKKEu99Vt2KbR4yE/KYw81J3HcEbFjMmSHBUP/Ngznfew9/uBwvDLl936ubThXwcy3R2kQkYm1ATY6e4ARjV1EwQ1N3izEpj4ALUHKTi+WLuGLBUXyO6HdR83zeX6LmeZI+4a60mEVAk2GSLKOYoI2oUcJ5Q5pAUotSQKO4I5CojOuWsuiwZUNRrwlas4yPIKZkdb7k6swx9zmzNWpBXc94pae49gHbfdqlfOy4cXCV+eIm7foOUTzBZDF7H3KEV5cFtt5iVJXMxuM81WbZDxVoAk0XcUVBPcraydPdfYp6xOj8PvXuLnW9zWy2Q2ksVgOkQNO2HM2P8anDVEI9GpHclElIFOWIqq5wxaBCFU+RnHKEGbo181vX6JolhcJsPMswrIVmveQgrQhdoGkDIbZ5dF4KNK3H99lsTB7TxZzxxpjVmFJGNGIMdP0owo9X1bAHjYc6r8m6tovlkvn8mOPFnJu3b1FVFQCz6WzQe2dU14To6bqWK1evcHx8xHLVsl61HB0dZqi6Fy8QcrnEWcG5BMlDVDTkmdK+83RdxNqCsswtHYerJb5TjuZLlosV63WeVRxjQozBFo7Ul3AETuq4PZlGMvuud6z5MxoxveqZIaUeXZRcctne3SOGxHq1ZH509InKSZbAo0AF3CBnrffT9vvbErjKXT7hU28iguu7N5w5YeBHSaQyECVuHC0kYrBEBZdsLvMlgyQL0RBj3reNdThV1MSMwiVl3UasUbA2twVqT3kVS2HyHPWRM0Q9TWQajqYv3fV9S8b0mvqS67JCDgRL55iOahKRaByqiVHnCDHQNR3N2pGCpyOXhlT7oSomJyE54x04HL3jFdNnvJ8qxyu5iDBMODY9pt6bcYojO8BhrozdKIcMEfmg0ZxwxRoxAb8s6XzBTF/kM+P/hDgu+XH3Vzlof5noxkSb+4Bd5ykWgfPfmjN+s0Wj43y0WBQkEmuh+9qI8JhDgsOkGo4S5R90sIxMiiViOrZ23mR372W6bo9bh79KZ7a5/dUt3nxylCOw6LAJ0BJV2w+iV0RSztDvE9zctGt+/Pp3uH3jDeaHtyB2qF8jYiiLGmsd5y7tsLs345H9fV54+mlUhdu3VzRt4PJRw415x3QyY//ceUajEZcef5TxdEI1m1BMR3Rema9yMdkd30ZD4Gh+zOXrV7GF4/LBHcajEZ+PwrndPWbbO2xvzygqS4iRzoc8xCDRj7aLtIsDrr34HXy7Zu/SC+zuXmB3d8wFmXB8cIPY3WS5aFmsGpbLOT4m2hBpW896HfA+ZDg3KWJLplu+r6umDCd1Hat2TeeHySkfhw3RcZaIV0k4l/tdb9y8wWuvv86Nmzd4+dVXsM7x4osvMplMuHDxAnvn9hhPRuzt7bJer/j2d36f27dv4UM/Rzj4TbbuikymqiqhqqCqEhLXaOdyW5s41kcr1sdrnCsZT2aEkHj55cscHswxrkKKiqOjBc26pfN5NGBpLT4kUtePitxMculrvxbE6qadQhWsc5Rl1TvevPnZnl3++NPP8OgTT3L59dd5af6DB4o23Gsz4I8BF4B/wv11vAJ8FvhV4DXgN8is558VE6AWk3sgIU/8sZYYIzZm/sVgtjEQsoQpMUvPulhgvEPFEHxumSuKGoqSNjZ0sSUGpVu2OGNIlaG0jiIaHAVgGGUghnGRz702BtoYN8zqgUcton1vue3zULdhUWtUpuWIcVUiTjGTrEcfUiCmyPyg5fBmg/eB1XKSOSbJ95rTg0rzEFTnoowMte53VaM7sYfM8Q4klNP/Ppl5mJuve+gK+sBmgL9y6pgcxDpvErZPlrUX0FANJD1E4wid12h7jjTOQhriI7JskUVAGo90ARvABMFKwtiQI/x1hIXBpohLkdQmjE8QEqVZY2XNRFdMtKXQNet4iFPlKFTEUGW45TQbrocqNlCe3D9HoCg+dagBWziQhKYCZx2T8ZSiLNnZ2WNvb4udnXPMZnuoCm1X44rA2K+pY0s9nlKPtqlHI8aTbcaTCeVoRFFVQMRIxyAXTtIenglElLhuUIXlqmFUr4kp5WH1zuFsvp2eoZlSIrSe9dExvlkRdlakaoatxozqim5cUY8rYsxQrDFCCto723zRDxCn6UlrG5JSiptbih9fO9HJF3Dy+poSTduwWq9YrpYslguWqxXrpsVaz3yxIMZIPa6xhcWHzM5umjWr5YqmaTZKX6q5JJL3P4N1UJaGqsqTmTQmVCNRM9GsazqaZYMrMmkk+MhqvmQxX2LLiCsTbdv2UX0/622DF8oJOZBebq8nkohwMgLt9CXZf/RhRjBkmVCc24hrfBJWAmNgB5gCEzJJahfoyESpD3tGFP1rDlutkAehTchD0T4miZZPzASwp3plBlU6o+TpZgM5UEFdIliDQYnJElWwkrNVwWzGCg7Xf+rJr3m/zq/vU05ISHlATtqUFdloPech9Kd13hTT+4WsrZDf02rOUvP76CZoEJeQIiJGcWryXlELfiwEHzDkUpGPGY4eWvuGfQVO2l5PgPX3tofL8fZ9mN7nyRQSC/LlkkOcrqsIviSkTHhWlCR5KHIIBSFawiOW9osVRiNbryjFUUcMFZoqFr7k5aUga5h8MzG7EfHPKuFZKOcrtt68gms8dQC3BV0HXWexNlDUy3wVvZ5wbznG1ZppvSS2QlNbpArs7L3BaHKbeuz5/7P3Z7HWpll+J/Rbz/AOezjTN8f0RQ6RU5WLcrlc7WrcxnZbSEi0uPMdAoFk7lBLgGghuOCurxAguLHgopEQuCUQ5oZu7FYb23JX2a7BNWVlZgwZ4zefaQ/v8ExcrHfvc77IyIzIjC8yo8q1Mnec70z77P0Oz3rWWv9hNm+JcaCZ/TY9DaP9a5TtVwnOM/oKsRlrR6yLSImYSUQ8f4aT9llDDFSzmsO791gc38LGTDUk2rbl/v37LBZz7rx0wsmNA7ytaNycFApFRvo+0dk1qeqofIX3M5yrsGaOlYZSKlL0hLEwdIkSEmaMmIlfG0shj4EYRrqh572HT7nYJl755ki7aJg1DTeWS7xz1L6mYIgxM3aB9bMLHv3RWwyrc8o2s7j5iMP73+To9i2sn9F/5Q6b7YwkPdZlxmcXXG7WlAyV99S+wliHiGU2bxDR1vNmu2IYg9JzYpxQzV9QlCvEtRHoh57vvfl9Hj97ytvvvMNHDx4wDAPWq4LWtu8IKdK/3/Pg8QP1K3ba/hrHjpwy/XZFv1krgKlq8MZy0NY0tWGx9MwWjjQkwkUgph2/WTh7cs7Z0/OJW25JKfPk4RnrdU82jmwtwxgJQ69oZuP04pkWNjEySUKKNqSmxpTZ/4gm6pTVDnCHJM1Azkk3wXbSxf0FtphfAf4dNCkeo7fzLwOvAe8Av81PD5S6A/y7KKcXrhDPLxAj+aUKQajkKm1Yo25mWMvCK0JFgYuZ0Y1UQEqZOtbkUrAyGf0Vw9ADkokEColsClnstHPTMWIaI1YyVSz4BKSCKFRDN94iiPV455Bp456lIE6tXK1YHBZTBJesFmLVFddWpIDJFEalUjq9xo9OCncXqms+RN2UdoMCH4cu0m8Gci6MKe4NRXK+Yi18Wny5Ei/snSmkoIjfYlDAvuiuKdtp9qSLpo54hZANITuCd/Q3KmxKBOcwJLI4ijhCcWyDxxbH7ccw+yATjgrjXWhWI0fP1rghYPGY2ijUqTiMi7g6gBT8xiJjppn3zBdbYjbgK0Qi8+WKxfKMqnbUbUUMIzGeYYunYUM1RopYxoqpq54RUbciSlIEaZGfetf9Y0ME6x3NvKK0gg/QDIXFfM7de69yeHjA7XvHHJ0sKFFIgyHYQl3rjdA0jnZUVxBrKox4RBwiDhW1EEoSheDHokCsieJTgFQyY4ykAuvtgNieMSWcd1Te09Yt3qrTCWiFlGIm9oHu9JL+8pzt6SnWGRYvbakqIRfH/HCO8YXZvKXb1BgrhBh1UXA1RgzeqzKV3/sBZ2JSlHZKmbLzKfwiYo/0vErsMUbOLs55+uwp55cXrLcbcs4KVDMybQICYxhgc/W6jBHaxmOtIceRGHosFc5XCAZv1fO4qRxt7RkD9MNAjoWYVOe223Rs19t90ksp0623DH0gMhIRYlZz8R07aycGcN2XVE2+pwLY8JxKz27WmycIs8lTMk7TnPe6sLxRGt3POwnP0eTbTJ8XNAGfoPNYhwpi/DTbsRnwKlpB74htcI1y9blf9ZcrBJloPRoqzTp1GJ2ieUejnRlJmVg5YipgLKkUzBVkR9dxUOMOJhTW3kReufdjUrngPd4gF2zRateW6xtDHTvapNek84ZkBYfFYzFZZ9NyjTaya+RMUkYIGSfTelQL1KI606Ui58ymHxhDoHcjvjhSyvQxkrIK+6Q0id18hjHKly7xFrTdquhtAcwEPgIjGecilownkjP0o1rwbe9XrO+2lANLmjtssmxfnROPK6KZEaXBLf4S8/T3aJLh1VvHHLhnbIxn85HDx1MWix/g5j1NNcNaT9dltl3GukIz0517GitKstQ+0NZKyK62gUKkbk4xricWiD2EseFy/VWGOCOezjE54O4KzX3BDYkcCjGq8fhublCut/k+Z3hbcef4VYxpMVJTZ2gztE3Fya3btLMGxNFtI8Mmsj0diRG6reg8cRyxktQCzBXEFYpkUkkKEMui6L8YIQZKSpScsbamrg8hBfowUKxHXIO4GhG3T8z7pWr64Iyl8Z5Z23J0ckLvoG08zhQsEWGgqg23794lhAHw3LhziZu9TdcHxjGy2URKERaLJfPZDOvAVFBVnlwyIUY2m05pMKXn8jK8mIO9i6JAjK7bMIwDMQaGceTZ+SnPzk45X10QcsRWjrapaRcLHZH0gZwSm/WK7XajG4UY1H2psnhrWMwbTl66p4YR2UDJbJ6t6Upi9USVqsYusD7fkhMY4wDD6nJD14WpmxTIqUxdJRWh34akPr4TUqWqzSToUXBWxePTNM/KSa9U6y1uIjqq6PxEkZuOwS6pXknqaRV8cvMmb3znO5Rc+NM//IMXe+w/JR4C/4yrRc8CX0MT5x3grwOXwJ8AFz/lcw/T751e+9oZf/6oRsZAU7s9mtkawe66IdO6VTuHswVvLXXlyakwTiYzOy57ylnV9NAiKxOVgzsZx5jp+czEDXaKmcUjtGilWxmLxZAlq5RkKdRZ0cyDU4tV9UpXZH8d3Q5axW4+uwNMlT0bWyDvPYr09YmjmIKpG1KViFUhLhIZVcvKCNk4ijGqirfbUP7n/+THHscvXeJFFIwCQDGT7BiAaOK1ETERY1WusR89EcvmFc/FX54pei2DTQX/8owYMnExI81mzMcj5utfZhEDL914xI3FGWerlvMHDbY+pzl6G+u3LBYHVL5ms93QbDbKjdypLBWLFBUVsGIIYcBUijL1tUWsUVRcCIzDTdab+wzDEenxHLOK+CiYO4KMmTSh1Isp0zBI57yfhYD9WcJZz62jl6mqI5xf0AjMjc4Dj260+MqQ40DfBdbnPacfrVAcREXGkErEkLEmYVxGXKbo5Haaw4jKNsZAiZGSEqSCNTVVvSSFUbsW1oFtMK4GY6eq6vogH9TzwlL7ilnTcnR8RG+zqtxIxkpCGKkrz3JxiwL45pDtuiOkzNnjR6zWPV13TkqwmC04OT6iSCShXGTnDSklKu/oup6+y1eo3BcYOaVplrum6zsu12vOLs45uzjjcnVJzBFTWWaHC269dJeSCqsnF4zdwPnFBev1lhhHQugQCqMTKme4cfg6L929wzAEVqueOI6sn54RuivdpDEENtstpUBdKYBu6EeGIU5AkU6r/akCDSGy3Y56CibXlrpRn9UJP6PqWSlMCT9rVWsVYSrGTG19FVDYn9JpVvzxxHt0csLRyQnAzz3xPpkeu9jNZ1/lCon8DPiQny3x/jHw1ud/mV/qEBGayun8PheMkavOh6JcsVbwqD0rtCpa0ydyKkQysWjS7cYJrFQKqTiKFLJkjBE1TRBB3CTKMdHlvLXMvMOKocKptUGOqr1QVO40U+hMIUpBskGSwRVDGz2Gqw1Czomc4gRIqKAUYlEpyR0XuYgKChUpVA5dnxcGsVYRhtOaRqMAsR2n+dPiy5V4RZ1OvHc61i2JUiIpmQlQJUChZCFmR7CG7m7DUDkWJxVL75AMJMhS6JuaocpINRkoh0T9LNDEnso8oGrOccNtxN1CXASrPMh6vqBp5xTnKFYXoKbWk8+Ok1hAJv5k3TpyMViv8ofFCNYIDqGeX2CqyGJYY60nypJ4dkJZTYuTmdRWJLOHB7ygPCDoLjHGQMwdxlka70hFHXu0rQ85QRHB1H6io+lMxGdDnS3GGazR959LJuaELSqMXriuXmMoYrC+pmrmROMxCcRajK8QX2kSFploU1NLpkz6rUy0MGMQb5HKYrzFVlaVx9JAjoHQ9xQBWzKVtxwdHvPyq6+zWm/JtISQmM8XOF8rGKNECrLnFdbOIzVU/sXb1ZVSGENgtVpxdv6MbddxsV5xuV4pjxgFulVWJsDb1BqzjmK1TatWaKrUZo3B1hXeO9rZjPlsDnRcrjbEFNh2I/162CP7d7QpEMZUkJIZU2FMeq4LlrLb3IkgxuGceqBqCz4TQgS0ijVitA1o1L3FSNH523Vnp2nBzQUFwdhrKkbG7P8NP9p2/kVGAh4B37/2tRU/nfzjBk22efr3n/cQEdVAn6rX3bx/BxRV+o7DGLVoLUVIMTMUHX2MAiNCQhiMAqyGZJQTDJRicN5NJiYGXznECP2mZ7MdGG2BOuGtYV6rNGzKiZSCNotlkrKYRpC7zZ8gk93qjnvL/qNKCWtnNacJvDt5fRdTYPIS3vXIxVmVx7QO8S1iLa6ZYX21B5Z9Wny5Ei9CVTlms5oSDckMU5vCk5Lo0cyGmCwhVAxLy/mvHDDc8Xxn0fC1uiZniKGwBn6/NZwizHOmTYVmPXL4/RXLcsH8jd+hPfyQKv0qbpwjfkS8QeqKxa07LI9uUq9X1JsVpgSqvEFIlMllIwQFkrgCy3quwDC0OjDTw2UwzbukJDTtyNglQnqN4Z1vEYeKdW6J3pJFBT5StrxIloUALhfWw5o+bUhtQ7VcUIpXC7gkxFBU/s04/EGrKkOTdRdjwQW9YItTHnXIkZKEKuvFXCRiJFJM1laL9fjZAfOj25S+xyWPWIudHWDnC0xdU6yZ8HKRIhO0Me9GPJPFXOuwyeMXnmruMT6S4iUpJfpelzjnbuHrBfdfu8/xyR1WF2tu3HibbtuTou5cY9pAHCkx7xPvvGlYNC0XZwMv2o835cym2/DRgw/54MP3WW83nF1cMIaRvu+1lds0VN5QNyoygRHqqsZlS0HoRqU5FRzGWprlEYtZzfHJTW7fvMXT02c8fPKQYex4erpmfbZV20wpOG+oWwtG6KNursKQGUcdqIlRC7XdO3ZeqMWpDu64JZdIR8cwjnhf4X2lvMVqaicWCztE6P5CM/vEm1OebAIVNOb2bcjnK9/PYp32RUdGq9Tvf+xrP8127CHwj6Z//9vgOmxEVECj5ElGNE8P9rK4VdXgXaNrcRKGMbI6j2zHxNZCb5SAk60nk4jRUIIgyUCCdtEyPz7Ce8dyXuOs8O7pYz78cEvtIqvZSFUZ7tw+ZOZgSAND36lSXa3a4YmJNywqaSFG2Ola6AATVTdMOqoc8No2DpaUDcYXTKX9bWuvGBuUAlWFqWtF6c+WiHUs2zmNr68G2J8SX7LEC4pingO1Vj+7nTK70afuuLUCNvjWUuYeU+1mwXpwtDgVijFIyhP9Z6TKK3xZUcqWRE/MgZgK1gFiMEblwDAeWzX4lDAl4HJGSiRPSknFRIoxTA7t2q7MCitnesWmCMWKyuZF5VyGPmG3A6OF3lZTomO3RftCImcFO4RkCTlhkiFOIIVURK1tMTs18KmNxFSNaQW/448Uma4/5RVM9o16WvbVjdGW5UT2VB6dsbrgGrl2MqekW8rzF+wEmNgBLfYiFGVqdedpLmsCQsA7w6ydUVLh8PCQqmrotgNjSDD27BTgUtKqzpndwv9FHPOdQEdQ+cV+YNttCRP6UZHFBpn8gEtRh6DdDtwZS+V0YYspImJo25b5YoZ1jhh3us0jY1CMQUxZ28JG9q4rALHoYCDqJcpz+rK7l2uM5s2dMO10/JF9H2PfiSgw0YeuAa6uPa7O3+45uLqGYD/fY/r3lyEGPt8cNvFvR6W7D1HAXy7aAZkcavdgSopgSlaEsWIu9SFCMMJohNGiWulZOyw5Zt0YJ4NkIWYIRZVkxiLkbBhjYRwipEBnBlIybLuBQmHsB8Z+xDhDNQECE7qJNeJ0My8gpjyXUxTApbKoWayONYtDsgFXKG4yf7CTtkKaQL8TittYh3Ue4zzOVTjvn5t1/6T4kiVeQxhfod/+15A0R+KCUgymXDk/MHG3jWRmFr5V1bi64WnO/Ff9iE0ZP0aSCKGpaaxl1vUsNwMH6SFHd/+AJq+5CD3r05YnT2uePnEsDioWhzO8JMIodF3EVQsOFicIEVt6KImUtuQyqpVazuQYGbc9OWVySpRcsJXHVDUJQ5ccqRhIHrIlDsLYbdiuBvL3C93GsVrXxN6DUd/hFxeCWGHMPZuxI0kii6dpMm6xpMaQoiFFp8AZa7Wajwr0CbkQimCwWOOVKG88WIepa/y81vV1rCjBIDJSRr1Ix4lzStK2Y+MrZk1D5d10LnWwq/uVKQFTMFZfc7GWYoRxHMlEmnGAnNUJCeXahc0ZOV1Q7JzKLrCLhq9945uEmHj69BHr9QXnz0a6bSGOkfWqI4SItx5nHEMfXzjsNOdM129ISTcHwzhwenmuQJRGb86qanC+AiAOw6QyNVJK4ubRIY2tuFxd8NGTBzTe8e1vfIO7d25y+ewx3/ve9zm/vODDjx4ydCPDMAJg6xbftuQcWY+DGttLJqGbqCyTwcck+G6nuVkqiWwTJYOtKp2jNQ3WTfzqyaeUPLUVnZ2s3fS8qv2ie25WbnY8o2tSkTklckqT2cUvTjzjL+Jzhug4zqHb5i4H1jERcuK874ipMK8irQuULJRsCQXWrWFoKzqr89dx07F5tqaMEXO+RYbJEcg6OudYbwZkCNj1CAVWp+dsNmd0ZWB9usaYwrOnT7HeTHzxjBWhsgYD1GLwItw4XHBwtMRWNb46QKwm2Ywh4pX2iGE0HsTgjceLU4lh4jReCQgFm3VDQVVBXWN8RdsozbJuZjhf73XKPy37frkSbxFyXhLDLUz2mFTp6HB39wpIEbLoPMsJnFhLbR2P8sjDlPEx0US1WsvZY41QhUjdD9RlTTN/jE9bhpRIwbPpLNutUNUWg8OiFWoIGd94qnaBSEZooCQkOnIe9oTpNAZyMqSYMCmpc1HT4NuWhKFkT8ZgZYaRijhsCf0lxmXqdiSEgkhNLhYpGV6gctWu4kg5EtKAiRVdiBTrGJIS0lMSUtZFUihkUdJ6zoVQdlNYncsVmWzfjEGcygpSWZrKUiSTvSElgwh7GUEmwRBnzUQduqZvOu2Qd5hu4DnqShHRRTrkyS2naLtYtBWew0AMGVM5rJvp7LQ9JGUYw4bCQLf2Wq1lFXEfxwDOkI3Onl50lFKIcdxTiXJR7e8i0EiNWPUG9sapPF6MKoaRI+RMU9W4hSeOg44rRDg6OuTmzZusnj3m7OyMy/WKzWZDGJTMjwjips1eMIz9SCpM7EiuNq1lV7+yb3EXMxmDmLKXwbPO4bz/0ap0Ova76tjsZrjTxz2S+WMX8BXg5Bdf5X781vppX9F+Pb3+i9PXviRF/BcaewwGU1exwEimz4lVCIwxkUtFSm4ygckkI8TKkI0h2UK0hbGHbQwwBtygNpTWGxxFOcAxTSh63RSGYSTFkZQGxrEDEt3Q7WVYxRgsUIlgUXpXEshesAuPtepwhFEQYRZLQgjTXDgZrzgTV+GsY8yRlBXXzIRjMXnqztlJeMNanPN45zHWY4zHOp1Pf1p8uRIvV6MCiqFEP/FaJ8rJrjWwa3WKpTLCzMJ9hLmFSyc8MJYE+JSwKXG8fZ+7q/c4siOHRzcpIXP6rmV9aYjdEY0J1DJiktJiutUlQ0i4umFuGwUXeYtIIeclpQRyGClhBBNwscIkNWcvKZHEEEYDxuOqAzCeujnAu5Y4rgiDJxewLcgAxRuKdRRJSuh+QYm3lEIMkXEY6LcdUjxNpWjwceiRokCaFCMpRMIwKg83RaSowbQ3QuUt89ZhK0c9d9jKs5g7ZjNDto5UKvIoRFOTauH2jQX96pDzdU8fQKxMFn8DJUVKyRO4S3aA5knhZ8cHdNTzGTl1GBP1mFDIKWDdJDE3Je7kFMWYxwuyOEgDiOHwoGI2v01TOSrnOT875/xsRRrTpEuc2Xb9C2957vjCTVNxfHyIqyvaxZKu7/no8SM23ZZUZUav7lAxj5SUYd1TQqLbDAxdYN1tkJwZu54/+jd/xPvvvEtJI7aeMSuGuzdEvUXngRgyfYB+Uu8ai3Ied0QJ5QpbkIlny5RkjWoIWWMRmxGr95Z1OhIQdqCoqUX8MbDUruL9+MxWzARUKUUlBKXs0aTX280/77gDvMHVopeBt1FP3c8STQPf+hYcHaKKG+8BN4BvwlDgT/8Uzs5e9Kv+EoYIfcqMOXM5Rk77CAIniwOsMczE0zAZEmDIxjLWDdEYnpTIGYkL6VhtEjbDy7cPmTmLdRZjDampGA8aogjbUcU47tY3OXhpRho2DJeP1Bs8KXXo6PCQo8MjTMm4HJAQ4eFj2Gyothu2DyLVYk5dN5i6Idee4hwjhnNRYRfvq0netKI2DlsyLitZqBb9mFNQBTqxbIuhLoYZKgiyi51i26et4V/KxCswJd6KvTwkBZmENMg7iUVNvK2B1wy8UoQfZsP7VjmJbRdpYuZk8wH3Lv8Ni+URB3e+ShgaVpctjz9yLLxj7gMVAZsDJY706xWpDyyObmJtjfUVbrHQnX7pgUjst4RuQ7EBlyslTY8jedIf7seAdZ5Ze4DzNfPFDUX6hpY4WEIKmHaDbAs4SzHTfFV2KOEXEAViSIz9yND1OFuT80ieEm/JiRACKSaGvme7WkMpVNN4dd7WeFfR+sLBzOEqTz1Xy7nZzNHOLaVyZOspQRglEmvDrRsLcndEXW95tgpkCpasIKc8+ejmSVauTHPjiUC/I+LXs5aSZ+TYQ9HkW1IA6/G+UUF+CskVxqGj7y51pp83iHEcHtzCN0vm7YxZs+Txw0e88/136CZDgPW238+IXmSUUkhxpGlrxBxwcHTMvZde5fT8nAcPn7C97EhVxHtHyqopXWKkXGwpY2R1uWWz6YgkBE28f/wHf4Ixwv3XX+Gll+8ytxVz35JyYnM4ElLk0eM1F0/XxJgYpwqzTOhmZy3OekrOFIKWbdbCrlWcM8WDy56dbGcpZZ9kYTcOYM/X1e+pItX1xLv3FN6JqEy+w1YU6Q/ajv9FgKtuoypTOwGNiAKifprE+yu/Aq+/Ov3yLvH+NbhI8OjRvwWJd8LNDCGyiYnLMXHaJRpvuXdywKKuaFLCp4wRpQVhHDQzsvW40CNxJIlFtglvhZdvHXLjsJ2aaYXeWVZtxVjgaR8YY+aVuzf4SnOXcXPB6gmMYeRsMzKkzP1XXuO1V17F5IgZt+SuYz2sCeOa1XbL5emKcjxibxxiTSE1S4q1DOK5EId3lmVdY6zBu5raWFwpuJJxAnOjzm1DGAgp0CVYh0LMwjEGjzyXp0r5dEroly7xwhXEu2TRBLwzimdHdxVKQlsZ+ikXKXOREmep4CLYkFmc98yGwBLL8uiAumn3ylhkiykOb1fMmjV1dQESFQk8tdqMsUqIzpCjitHvduwxaps2Z0MujlxgCIUYApuuY71ZU1Utxs+oSqTNS6ChlMmPNUcd9ltwdca3aUIH7iAtLyIKpkScydQWWgeLSqlRB3Mlt5dslEIyWoaZhZIxU1u38hbvDE1rmFUFVxWWM4tvHIu5Zz6rKclSvFBiYjSOOARyfwNywM07Nrkhl8KtWy3zuedwuaSyKr6wa1PuW3W7dz5VvdY5BEvJWW0Up6vA7MBXom0gYw2+rshAkgKSKKknBcEQaZqa5cGCey/fo521ZPmIlDP9kHZX2ws63qrpenlxgfcV89mcfoistoFxCPRdT9/1WMBp+af6szHTbzritEFK4zhx2SMYwTc11ju8q/DOK1o0jqqJPUZiTAgTMtoljLXknBl7lcXMKZNE8Qe7jm/OV5It7F7LBKoysE+8xtppbKDvz0zSjzLNec3188gngKZEXWKucFs/XzqRoPzcnX7ye1wtepHPxtedA7eAI4GZKG2Tm8DXgXv6hFVWx6Pry+0WdT/68zbRLghdTFz0IyEX2qqicRafDTYCQ9bNJJBFwBXEqTJfd77l4nJDf7rFRvAF/JDx26hrn2SMc5Sk12I9yUw2xlBZC87TVA3WWEKuqFLBuYqYC64I3jqs9zSLCjvUjOcDVRdhHFmfXmK6gNgFYjzWW2rvJm6wpzKW2nkq48gUHAUnhcYWlcmQgk2GLmRcKVjnJjCunTpI6sy1B/n+hPjSJd79vqEIOU2SkZJ3QEslUmeBYChBJRazwJsh8cf9iEtCOxrqPvDy26csLzte/nrNva99lRRVV3gII5IcLjcsmz/h5vHv4xqDmBaMeom6ZkntvCLuSGTRxJhyIpeszmvBULIjUxOL4WJzwWaz4fT0CY+fPGA+n/MVycznC2bLloaKmLb0wyVjUL6jqw3tQUSaETVJeHFzRykZR8/cJXIDJ3N46VCYzSx37zS0bUvtVBVJDzqUlBi2AzkmUhxIacR7R91kqqZw55Zntmg5Wi5YzmfTeRH1Md5uiSFy99YBT+6/zGo18NqDNbkU5oce31hefemEZVVRe4e1buoiyH58UAzq+9vU5NiQQqJkrYJ3OiPWqMxiZASJuNrhZw05R4ZxQ86BPDwjDWCNGkDM2pqm8mzWG377t/8V8uY7hAjybP1CZ3PDMPDuOz/kjTe+yZ1bd3j05JSPHp5xdnbBs2dnPHt2ijk+otrRawrEIXP6+Bn9aoNkRcerY1XG1RXLoxOaxYyD+ZJ5M2OIHd12xTgGtl3PEAJWKo4O2z2PMMXE06dn9J2ey36IezszBFJQGogYBbNdF8AoEwrZTnKeGQVHIeC8x07PwfQ7bmpb75KuukDpdWx2N+7umhStwH9eYYFfmR5vAv8YNUQA3Ut8FkTzPeDvoAm4BX3v30ZlrxzQQDvCb/K8BeAPUarRnyd3ooIQxfC0C3xwseFw1nLr8JDGCE20+FBgNZK2Axnlh0vtsa4hV5mnbz/mnTcfQSzUAVpvaJ+NtJtMKlG1vb0jtzV4w+Fhi6kth+KZWU9VtbjFESllZq0hF4OpajZDojEw8zW2Bv/SgnI0Im+fk8+29KsNH37/XahrbpqKmYH6wHHYNtTOc6NuqYxl5hoq49hZbjoDrVfzhXGoiCEQx8TFEKmsopnthGr2vlKkd/n0PtqXLvGyo9bsKEO7DfknvJVSCn1WTdAuFbqUaYPgtpm6j7RpYF56Wm+oZzPVpl1viPHqoHqfqJoBW1cqWO+rCVyixO2cM5ISykkRQtRqNceROI7avotqbB6i0jtCjIQ4EoJjDB0+WK1QciCnuAcMgC5gO5etIqIi4S+oAhMjzNqaPtYUEou2ZtFWzFrPwayibT1N5amcZSfTllOmdwoWi6MQoya9qnZUtWPeONraMvP6EDE6G8w6E4nW0LY1TT+Ss+H4WNuOs8MKX1vmbYOzRvVQJ0rP7t1eVbxc0ZKeW7h3ACyuKmUpk1ar3d8sOotX3WFjMtYUvDfMF3OMMSyXCxbLGfVl98J5vCVn+n5gGAbCGMgpK7/QWnVjcm5qRyd25WcKOmOPIermQrk3+kg72ww9N2GIjENg6AMhBMIQSTGRTSLvgE7WaKUB7ETbNREadKJxdcw/iRa0w5jv57ilKHUOFcU3E9K5yPOJ9UdVwMqejrQ7e7L/uy/skH+mEDQprvjs9KG6VgDrAfqYzdnJxmu/urn6WWNgPkM9B6fY/Z4UVJXjE0rfgra7/6x49paJnjbERDdGZk2eDDQMJULKhRILJUyAJNRujyESM3Tbnm61wYmlNpVarodMMlNBU9S6M5RA9kZ5uQVKlacuJ5ANJqskbsESi2EYE9aJagQgGO+gcjhv8cYQUqEMo6pbdR2221BmB3gj08Pgja5LOkpRAK/iqDTxOqvsD5cFH8Ebux+9yE5FhN245Scfxy9d4t1Z+KUkhKiv3pqISCIbLffLdPOPJfHdTUe6jKzGwGEozJ6N3HirY87AS7cfc3x3w61X73J49x5njx9z8da79Cto6yXuBI5uNxzeewlTt7jlDYyvcO0h1tdgHWMcKTEQ+0GFsteXhHFg6DYM3YqSEyWqzHcqChxyJjKfW7yPrDYPGGPD4mCJs0IY16SxJ4+WMlYwOhgbGGfo6bUvLA3M5y2/9ld/ifPTS9brjoODlps3F8yampdu32TW1LRtSzMZsSv6NzN0HTkmYhqIKUwtRwXRLJZzfOVpmoa6clOCdOSSMSURrSAkQr+h8o5XX13inOP4eEFTe2pvqbxWrDtt3137fo9tntxwjDUkIzqrTIkSAkybBLGTJN1UseEMJUDKChYT0U0BZSDHMwTLbFHhG8fXvnWfxXFD9m/yp289fKEuRTlntpsNb/7gB3z4wQccHp/w+msvc3y04MnTxzx59ozzp085OztDjVEKcRg1EQN7tNmEzyipsN30jLkQhsCjh08J/cjmYq1I76iWaV1I9FEr5OZwQS6FzbanGwZduErBYqmKnVr5O071lSZuVSkP8fqM106t5l0Fu6t4VVMddjzd3Xt/nqt7NeP11mLFUF7wRufTIgF/iLaYL1EThM8SxsB3vgPf/rYiZCtQjcmbP+YXWtSE91euvnQbrZRjh1offfSjv9ZP3/rwM76uX3SkXDjdDJyebzg7XeGtpbupXN0cCiYWxqEQR01azllCyDz76JQuZ9579ID16gFNtaBe3CaJcLYNhCFhvcV6z/kQePRkrefqgbpxVa8c0t5bkvuecNFjEGYnC2zT8GSz4cl2zUFtaI4qaknUucLZA+o6cLDoqcZIjB3jOPD0h+8xPHzM0beEk1t3acRO4y9V2MhTBWBkwjOLVaU8D8Z6liZxWyJGHJVv9proCqr69KQLX8rEy149KU1UE5GMkaSyipK1RBRLLMLTMdD34GKmStBsE7MnPXPbs3x1w8HJlvmhoZ4vMe6cod8y9BlnB1wTaOaOenmgiXdxpLQM3yKTtGFKasYwJl0ct+sVQ9/Rb1b0m0v1ME2qNOQbj3UGI5m6MojJjOOGUkaGYU0Yt8RxQvYmUROIZCA5iGryrJOFF1MOeO946aVbzKqW9WXHYllxfNIwqytuHc1p65r5fEbdtnv9ipIyYeZJKZJSQ8q6F5cpUVZ1rZZ13qkqkbGT/rKQrIFiEDIpBWpvOTisqSvPzeMFbV0ponmiwMCuyr1KvCrcMCVUc3UcSsmqBV2mscOuRcpu/iAUMy3+OSvlSYASIXeIqfBVi/GOoxtHiMss33n4wkE+pRTGceT09BkiQlXXHB0scNZw88YJuRRWZ2f0/aBOKzmTxrhPbFdl/4RqKBCD8tKHXpukcVRbMsmFpgi2QNePbIYRP2uQ2lNEuzN7j2J01lqYOgbTZmoX19HJe2EPkX3i3R2nHZgqc3W+rr/33fvYb6amWXJ5rr382WT1XkQUdM76+Kf8PSNw6xa88cZPqs6vbSI8an10LebAV0Ez/h9d/7WrJ1wBf/KxO/7LTEvKpbAdI10f6LYDwxgVzEdREYxY6GNmjGqxVyEMKfNwvWUTIhebNcO41upx2mh3ISExa9XpDP1YlHOfIZuEWMt6WdMdNZQukNZqPTk/tjjjCaGwWg/YZBlngrEFj6NIhXMVTa0ayu2kLtOdX3AuK9pX1viScJSJ1z4VAhSufN9lolGCWKdrYDbMnIBYrHEquDTdq8BnYqV8auIVkVeB/wuKxi/A3y+l/O9E5AT4B8Dr6Djj75ZSPjemz0jBmsx4BMMrjmxQ+7xi8U8i9kxnfhFDHoX6ewnzkRBetgx3PMvFmoPbjznwiYM7N1necFSzW0AD2ZHGSAk91r6FrRuaRcPs5Db4hlIfkRAu1xtCGCmiqiYpZ4YxEGNkdX7K2HfE8YA0HAMbRB5jzMg8z5CmovEwnx9pcnAOaz1NtcRIjbMFYxNeLCY5JHryZkbcLMknf8xp9f/hH/zf/yHAL4nIH3+e4115x6t3bnBjNqPvRprWs1jWeGdZzFq80x3mdOCVPmIMXhpcyeSi7R9kZzqNzmVFESYZVUoykzF7zomcE03jOTle4L1nXjmcs0gK5DChbfXCYuc8tfOtLZN+9K7yVdqcLt47M4YUHTmr8IWKl5k99YUEYRsIw0C36Ql9YHl8xPGtWyCZmDdQwJmId4Kzz90h3xCRP+FzXuMxRZ6cPuHw4JBZO9sntxBGHj18yEcPHrBZ69QvxUjoevJU7YqbWrj56h4uxhCHhEQh6zh84iNHRfcnbU2H6TlSSmy6niwKoFK/UnOlHGb1HFur9A2ZzoVupiqMkX3SvO6hu5/Z7tr/ORPz5Pk7nS9FTV+nDE1IdX0n+t+S2a47/uj3fv+FXOMvOlrgG6hd4Cs/9qcEfYmvoH5EP+Bqcvwp8dprcP/+PvlWjHyHH3CbZ/sfOT+H738f+hevQfm5j/c4Zt57f8UHHzziwycP6NZnDJdnOLG4oLZ7aUykmJU761Ut79lmxRAD28tzxpwwseeyP6MzlmwclQgut7hQs+m3XHQXpJIp4hBjeXZmaRsHQ6BcBow1dM1A1Rku+0zC0cfC08uBxsHttsHWDXLD4ExNs91w2EA9Bs5WgThmxstTPnznB8yXR9iXPFXd0rgaZ5yOh/wEJBTtQsZcdNyTDRkLqGGOKShlcsJKmM8AkP0sFW8E/iellN8VkSXwOyLyj4D/PvBflFL+YxH5j4D/CPiff4bn+4khov6L5UTo3/AkK5RUKDHhe8E90ipHAAah/tOEl8JlbRle87AMHN15xGFlObz9yyxv3KBqWxRR7MhjJIct1j7Du0Sz/CVmN+6TTcVo56QQuXz8lNXlBSlO/q0x0HUdKQbW56cMfYfJ30byVzAm4f0Z1m3w9ggrLbN2yfHJkSYnUyPGU1dLjDQYU8BGvLHY5DHBkTczwsWScvMdyiv/Kf+t/9GG/+P/jD8G/tbnOd6Vd9y/c4NwrJQhVzmqppqqSg2z232rwSoiosLksBce2qPJYUqMTPZXk/7xtACrMlHUxHuyxImhNl4X/xTViWQnwjGtyNNyrAjb6bl3jpB7P4pSlB8dRnJUUXTKtKJbfXWCXifjNtBve04/esL6Yo28Drdu35qAcVskZ6wEvFMXlWvFxwellO983ms8hMDjZ09o2pblwYHOnoFxDDx88BHvv/f+1Ww3RsbNZqL9KGJYq3bYD1ER8qA0oGS1QRLDVeKNMWPyRMcCYkr0205lPlF5P+sU6bmb2YoximNwdrJf08S7azXvYlfdllJIKT03v82lUJLysVNJ01nUeI6rO83j98l3Ao298e1v8i//+b/43Nf4i44W+FXgNa6u+R8NAb6CkpN+ALzLZ0q8Ipp0/8bf2LtDVaz5ZU4p1xLvu+/C++9/IYn3cx/vYUy8+/6K995/yIdP3uP8cc3Zex+pm5WtdT5qFe2LUd31lAPb9SkxjdNGD0roKN0zjBjWpsKIxcaAHVvCsGbbn1Ny0ucTw9OzCkyDhIhsAtYaNn7AzwyjKSRj6ULk6aqnccJydkTT1JibDf7oALe+wFcdw7bjNK2IKbK9eMbp29/n4OQOy8O7zFGrwd1YzVVOLS8ny8CYEyEJYxYVpkHNZEre3Wc7o5cXkHhLKQ+AB9O/VyLyXeBl4L8D/M3px/4T4J982kn7LKEzXgtZMKVAhHJRKH3BdkrI1jc3LUxSsJJZDOfUlyM38pqjkyXLuqKezXBVrfKCQ0caR3ZyDTkfE7GEsSb0HUkCHZFhKJw/q7g8O6TkQMkqSjD0CqqKQ4OkHiOHWGMotIzjy5g40Pc1GEc9q0hJXTqqaoFxFdbpLCBHQ4pq+J5yIRGJB98ntAGOVsxu/gazOwH4zz738ZaJw4mffDO9xU7gg92loRil3U5NriqUj7VLflT0YAecUb3WHSYOtFXpnZuq5J3GjWo8T7+qN9+1xXn/9St01dRx2KXlws4NW1CBaymCxag3cDHYZDBJsFGU0rANpO3IuOkw3mGrCjEWZwQne6fnXWyn9/m5r/Fdu7nrO87OT/ngg/c4PTuHHKmdYegHTZwhYMWCmbxKURWdYqatpRgokEKZtG1341+ZFm61PmNq/ObdWd/xb6fjap3D73VkVenHOaudiF3i3YlifEJf9ZM0lnfI5Z2O9q6Lsfv1q8ulTP/f/U+o25ambXfP94WvKT9VODC3wM6Bw90XZ+hwd9cuN2hNvNNIuo/2k5/yid5GDriLDpxPZNK81efSZuxdrk+fjelRA8MXD7n6vMe7FCFEy9hHhnWHGRK2V2MMa8Jep91MG3ljDTknhr6bKJQ6QrKuEJJS0byNez65iyMxdPRxgKLG9EYM22HDanOOpIKM+vNxbXGxI1eGXAmNFxazluKFVRHSmJmJYeZqSt2SF0uysdAMmD6q3O9qzUYqHr3/HnU753J2QF3VLI+WHJwcqke4VfW7kBIhFVKBwlWSlZ37iuza1J+OGfmpZrwi8jrwl1E8wJ0pKYOadNz5aZ7rx0WOlhAcJRiqmMk9yJ9kOC/YgPbhd7wTBPHaur1z+T2W777J7Vt3+Pq3fom2nbM8uoWrGrrLM7rVOf36HCkREccQvg35hM1qxfrZI4ZUuBxh23ne/uP7PHt8gLMJb7OKdAePSGE+21L5QOWFqhLG8Ranq5dVpUfW1H2PVBvmh5e0tmF5+LLOFqkRHCH19NtI31vGsTDmnu5r/4DtyW+D/ztQ/a9AGuA/+9zHWwQq5/DO6JI3SasVriVBubLJ2le1cLVY7hbVnRzgHom8S4nX5rSozKN3Xs2ty1WFnIq2oXZmEPo7eoHaCaTDrtITIVur2tG76jwnSBFyRHLCZKNuPsVgssUlSxks9WjJg2AuAzzrGeeXrI4eU8/nHN67h/Gerd0SjEqO/ugx+/zXeCmFy/UlIQVOz0556523iGOANHC8aPjo8oLLZ2c01nPoW0qBIURt6zunADJjEOPJORMuO+XjZtTmEMF4T86FPmVdCCbnFCdCW9WaeKcTWjcNTVPvrwkRoa6rqdWsJ9+IdgD2re7p+rgOmNq1nHPOajWZ4nNzXjstqtflIXPe1eITHNiwTzov6ni/0FgAfx1NS3vU8j3g32ciE3EFawbNqP9NlBH8j1EY18eiAf4dNI/WfGxTWwO/gR6CXXyIEpEuPtdb+aT4vMe7FEM3NqxPBy4fnLIRx5n4qdKtFOG722rL1XqB7iExlVWPXSeYaosxgq/VaKW2jsoa8uTXa4CZcXhjeHaZWHUXep2KXkumf4wYQ3V4TLU84fhkxq1bN8AZ3t1uCZcDLy9qXl7MEV9RfCF2W+Qy4EIkdh2r8xXnD5/x/tsPwHraw5v4ZsFrX3+FN37pKzS+4rheYI2lz56IoYijiJ02rMqa2Jm65Gnk9mnxmROviCyA/wfwH5ZSLq/vjEspReST/ZBE5O8Bf++z/p2Copop0/wwAx3IZmpZTWe0FDO1pZUy0jJykDsWJtI2NXVT6wKeMzEExkGNw0vJCgTKLaQlIWwY+46QFY03djXD1tBvGiqfwSdKtqRUqcNFMSqaLar0WUpFTgekLIRYMEGIYSDEjI8ZdbyY/CaLOsvEpDOQXDqyjKS6Iy46oEJ4iekGN5/3eL/22mt7Os5ulneFByn7j1f6CDK1fK8l3f3PaJm6O/554jPv2v655MktZxJtmJ5jt/czU4fix7kCybUUvq/aZMcv3r2WPM0Rd3WfDnpNAclMPN+pCsZgRSBl4hhwVdDraarsd/7CHzt2n/san81auq5XP1FnGcMI3VZlIXPCCsgEFDPG4Yy2coPskipXx3qaD4tVF6znW7hTkjSyT8a6spn9LHcnprOrbq+9XkWNT+3O/X31Y7ocJV+pWX08ET/3O9NCe9UdKc8913PkIj22n/saf+Fh0OR7BIpl9ihH6AitfH/k1aDLaIXet/Nr30vAoBfm4lP+YCkQAowjdHa/QX3B8bmP92x+AlbBlAq1yEQigmDMTpFs1wPTtUFEVNvYCKY4JFtIICUgRpHDxioVqEy88Tjdq96iIj9lICYV0rFu8rHO2hVq65ZSD4yhUu1lhFU/MvQ9CwNLZzA5InsLQCGKqHtXDISc2JZEFsdYLK4ZOTqfcXlxxOgrbBNwxjHaliiVdjC9IJO+vXZhJz/1kl8cj1dE/HTC/q+llP/n9OVHInKvlPJARO7xY4CDpZS/D/z96Xk+5fVoQlVkr2XnjCJFIO9ZYeznX6Zg7IDzieOTQ1557TWWxzepqgpK4fTpY0LIbC+e0a/O2Jw9YxwHQvCMfU0e56zOB+r6IbHANln6YQYhY4vHE2mMruiNDxiTWc566npkDBWbzYIxOGJU5ao4VIhk1qvI2dMz+u2AmAd4XxNGTUjkEeLApuuJ8oTkIr3966zNfxub7+FSIIUBlJ7/v/48x/vX/8qvF0S0RTm1P3Yz0zzN7GKM5Jj2iS2XoiYJMOkpXz12bZRSCjEkYlTHmTjGSZdXqzZXq3m9mUTErRha43CiMxNt0e9aMopaBzVZLygNoao9JXqSyaQSSHFkHDpsLZTUUXKCol2EUgK5FJBA3VhMqTi6eYC3gl9UpGEkVp6cA7boLLPNc6qqvp435EVc41VdlX/5W7/Pt//S13nl/j3dbGRV9krbjjQG2mI4qVtaX3PYtKScGWMkpsjYdQwp4ZuW5tBjvKU9WpILbFYbxk3P9bTmqmoyQNPkWTU18+UcM+kuFwp1XVM31e7FIqJ6zGZfne6LVkCvlZ2kp3aSi1a312a9RcB5d3XkYBKrl2kikPTvl7ynKOms12An9PmLuMY/fU35WcMA3wG+hSbeHyd8/wj4V2iS/TrPV64PgH/JJ7afPyneegv+4A9gvYbNF2I2+LmP942XXi/1qy3V20dU9S1yisQY9B6M50C4Nme46pIpz16waYkJc0rpSaxACuME4uuMxxqH9vt1Xtw1ap9p6HXj7C22VY0F5w3GCIuUmHdbwnhIXnpE4OKdt+gvzni/ciy8x9pCXRWkJMLlmtQr8jo6Q04oVTEHhotHDCvDu90jTt/7U5x1kwKhx919GXN4xMnxCXfv3GHmPTPXYsRSkiNj1Qksw6dl3s+Cahbg/wx8t5Tyv7n2rf838N8D/uPp4z/8tOf6bHHltXs1MJp2Nwh56q1rh0xlHI1JtLOaw6NDmvlc1XZKodts6LpRTQ/WK7puO9GDLDFYUvAMfaTbrCanioo4GEhFZ4aokbKYgpiEtXlqM0dCqAljRYyWnLR9m5LFREsYMt22oxRoLldYNzIMPTFGnFHhjiGsSfIB2RSi+WsE+XVySZQU+Mf/h/8pQP8ijndGJpnMK6SpCjgohSUMIymoYoyqchWGVEhlSr7oQhx3FU/RhXccAiEkBfp0o7ZYphuuXlRU8xrnHHURnHF4pwuzQVGClI9fnfrahIIx4LwlOTtRgjIlx4niFKfZu0EvXwNqfodIwjkDlaOdNcr99bK3pFMEbp6ARJUqMF3FfeCffd5jHsbA++8/5KX7d7ibb6n5fBZKzOQQKCHiEVrnaZ2ncY6Qd1d1IYdIHEeMdVMBa3CVWpZ1ndr9lX3hqAvadfnGqqqp6goz3QMAfocsv/Y6zeSLfJVwryg+exSzdq/JOe8T7u7jjmf93Ex431p8vjq/3qY2U0X8J7/7e/CCrvEvJgQVivwGn9ShuYo18BZ6Hf4qmtt2YYHfY594n1uMP2HHc/oMvv+9a6IpLzw+9/EWK9hDj21arJtTyoBu7yIld/r57rzv+2hgyzQ+KRWkmpwHQroECtlMICapEPGIqfY64SHr3yRlSBlTGxwesaISlUaBlTYlshfkfEshc/HwCcOzx5wZVV/zXnn81oDNI1KydsqMtrYpKqmawkApmfPtGZePC4hBfIOpKpoY8cOWZDPLkzlGKnYevuofkDR3Zfn8iRf4rwP/XeAPReT3p6/9L6aT9Z+KyP8QhfX93c/wXJ8hphmiqBh3kSsqC7cFTmQyadKZoNuM+DRwcfaId978kGa2ZPl0BcYR1GmNcbti7DtSGPcVl/cJW0asTROoyODFULyhbQJx1mNFKEnVSowkKIYw1JTsGAdPTjq48DZN4B9DGiu61U1Ib+Ary7heYK1lDA0paVIxthDGOdvOEVKBj5b4s4ABHr79O3zv//f/Alh+3uO92fb8q9/9U4ZhRQgdKQTi2OmCOCXSMAZiSPTDyGq9IWWteAtQNzVV7THO4qtq36mGwtgNhH5kvdny+MkzYowqNkJheXTA/HAx7RY1wR00C2pXsZgvmS9UVKOe1VhnmM09vjLYnLBFz663juI9dV1jQoOrqgmwASlnJCfEJK2ibEFwWDEsjg9VjEKEatmq7Z0t+LbG1QbrBU8LleDq5vrCdwP42y/iGheBum2YLxeacMeMOOUWExUJbkvBG722SsoQE4S0H76nlOj7EWMT4vT9hBDUbHxq8IsBX6mgxS75umrnd4xKOcoVkGo3t7+KH20J78vca4WkGBS9OX1SSsE4u58R755zv9BeA2ntEu7l6SnDdosxhrHvefjBB/ACrvEvLjLKsCnopfF1tO388TgB/ip6Zxx9ynMW+OB9eO9fwOExfP3r6rywi5eB34SDc/i1N+Gy15T+7Mc8288Qn/t451wY+0SpDHJQYQcom0JJhswRpQzkvKXkK30wxZQUJGdgoLAhl2FfCKSUSKLiSEJEZETMqKOY8RBjK0oaKHGAWAhBpU6DqzHGEt1I5wNuFTgftLPXPzsnbQPGRqwVvDPMSsAaaCRrT9WwN3EwixoDtKISRi5lXFT1u5ChlEg5fUzqt5xfnPHO40e0Tc3FzRu0Tc1Lt1/i6OAIa9zEUf7JW6fPgmr+5z/hWf79T/v9ny00ERZjJ4CI3sjlJYE3tOWcMEifcW8H/GrL2ekDnp19j6qZMT94jPMVy4NjfFWTQk8KAykEhblbi3eBwoCzeRLdn3QbEWbtSIlb4tgQxwZTsi7uuTD0DeNYCNGSki5G3ir6MCZLSobNeJvt5Q2sSayqHpFCTJ5cjM577ESfkXukIsjlIT4E8IaX/W/wP/4P3+F//7/9yp+UUn798xzv9abjn//WH7FdP6bvzgn9hmF9pgvntL6GkIgpc3G54eHjU1LKxMnW6uTkkIOjBW3TcnC4xFqjMmnAsN4Sth1PTk/53ts/ZBgHtsOWlCM3b93m5ObNiQvncdZyuDykqWpu3rrLjVt3aeYzjm/doG4qbt5dMltUzESYTbPXyjtMVZHbFpdVsUq5xlPiTQljgrandXirdKl2AUWwjafttipBR8R4i28s4oTatzhqqrq9XrH9zicc75/6mIOmxaZtWBwuyUMkbkaEgpu3uomLCRcTkg0lTpugkNjvFIGUMqnr95QMRBhD2NOGBLBi8LXHe3/li2vNvpL1Ew/ReTeh2a/mtjqrvf6qryfPa9JZ06Z3J+Zhp5m09W7fav440vnjpgklZ549esTpw4dXx0gr3899jX9xUVB157dQYebX+OTEexOtTeAnEZD2T/nDH8I/fRfufxVeeeX5xPsa8Coc/xB+80PY9Cqw8QIT7+c+3iXDuI1kbzHHDawtLgo5+mkTOeFoGODapmw3aigMZLImsh1WYI/eDvtEo7VQpfr5zpNCT46X5BJJDFrlmjkinrXpMbZH3Dn2oycwFTdoTkUceAezqDanC2uojcFXKoVrnaOZtzhrWHqhMUI1BppeRZM2XUdMgc2TR4zlIafG8sQ4/KzlvZfv0i4W/JVfAm8qmqrCt+3znYxPiC+dctU+dpvuUjDo+ygbKM+mFrNk7JiRABRDDEfE/DIl1xhZ4r2h8RGZdJVTCgpusRaLwc82kM6o6gFjHAkIYTevyFcvYrcgFZUSkzJ1SVFQF0UX/gLYwtTeEygGKYWcHCJlAghczQAKysksYhDxOC8U95TsP+JF+ZnEmHh6dkHfrRmHLd3lOZfPHkKByvlJqUVbNZvNlvV6rcCvoqjEdl5TB3XGiROYaowjlII3ltnikKNsuHcvMIaRbtiScuLw+ISDoyMomZyVJtH3PeMQyDxh0w80bcvl+oJm1mCrVyjlAFd7Zo1qsxpUSabyNbaZKfDCWZyvEHHsuMBT83wCchUFuxe0IqsqzGRKKM5M4hGaRPSa+mIaerkUnj054723P2TW1Bws5shOUi6rCElV1ZRYyFFbvKEUxgwhF2KequAQEZOxpUwAuYK1O9qPOgdZa59r+Wq1O7UmRKZK+Pqr+zjw42r8sJsJ6xPxfLtsl3x5vqKFq8S7+14YBlbn59oWn9rUYRie+7kvbUTgoRb+ZxQuKcxmiRs31ElRowDnKOq4RSvin7CURuAZlC1cnBbOY6FJG26WD3DxXDU4tgUOzuAYBqvEpDUTxw3VfD5C2cJP+cVpO5ecid1IyapmJt4gjVP9Yj9HSoNxDWLvqOBN6NV4ZRjUaGM3HhIHxk9qatp9e/5uLIDTfWhKE5gzXQMvFSBObnWWXAQphizjpN0+4Q0SmAjiDdFXgJCtCiOFIqSYcaiWtLMGXxzFCsVU2EWDLYmTRYWUxDApwYUEIQnZFWI/EDCcPTun9k85Wi5x1n2qA9eXLvGWaa4kuWCTtuX8jtf4vsADKJLAJIxJSAXZeIbu22z772BtobvIVNUWn/6QPHtGmdxvrBVmTYs1hvroI5w8AukQaQjjyGazZgx5rwKkgOA8tTcn3tZkVm8kUbvJ+mp6zSbLNZoOCBZSoxfJhNKO2TFmTxZDcp5sDGZW09SO6H6P4P9PlM8KxviU6IaBP3n7HVLckNPA0wfv8sGbf4QgHC5vUfuaw+WSedtydnHJ42dPdAZetJVTLWuqwzmSI3NRRPHF6QUxRL7++jd5/eXXSTnzzb8UlKsXBlJO6l9cObrNitOnH9B3He9+8ITL9Za+f4d+6Kgqz3K5YLlc8u/1f4vXXr+PuXXI0e0jTBFsNgiOZnGEzOZk44jWYiqDdVoBlt31Ms2YDGBMRDBI6/HVxFOdLASLT/veTeY5VssLjRgif/g73+V7f/QW3/pLX+ff/du/oVZ9q5GSClXdUh96xu2gmxVgnQvrXOhTUUH3HCFtETFa0VqDtY620XmvTJWwr3THvluQsIJxk12fnXSsn2v7TmOR3dCg7NSnrhLo7hLWunf6zDyfdM2150yTLOXOIvDy7Ix3vve9aaGdwFnhs6ok/4JjDfxzVXH9Y3RC+9Wvwd/+2zDbg5oz8CfA76BCGn+bn7iU9sBvQfkhfL+H3y7wMk/4O/wjDgajxJ53gF/r4TczT1Fi0jmwg1h9DZXreIwSjc5f3Dv+qSKHTP94RewjWItpReeuYjgwFd56Dm6/wuzoFnF9wXj6kL7b8PDDD+i2G1KY8Ba6jUQ3fZE9oGC3CZxgtDEWJPXkPFCyjrJ22IVces0LuQfjkCjIuMM7KLXJiII2U9sgzTFeDHM82TiGGOlDxEqi2SacMWyblsp7FreOOXjpJidV4dsHgaVJuOESGzv6dWR9FrgY4Y/PVmzOtnyvf5M333nKV19/hV/55eo5FsEnxZcu8WoUBTgNBQYmbrIgo36OyZp4bZoWFkPJLSU35JJIRBKQgyWP6NCbjBWLiFPuWBXwNk7tMfVSRLT624Gmy1R27/diBR2cG2HngiPXlrEi5to70OdJTufUSq+ZEm/ylGLIRT8aazBeMDaC2yAvKPEiO0EGC0XnyevNFiOGpgqAI8ZMTirooVSgSC5JEa8yVY7eYbyjpDzxaoW6bjg4ONxj4HIpjFETr0w2cdvakcMlW++YzdeMMRPiSBnUXnEYBqqqIoyRGDIpFnJSWbYcEzkmPdVF9g47xYDJVivXsksq02M6bwgqk3hNMEKdnxJlOmcTkuDFHOdPiO2m2z+KlP1GAUCsxXjR2a3VEUcxhrx7nbsnKfqfXUtODJMl3y7xmslsAnZtveeqhh8p6HfI9Os/9zxP+4oidKU2VeBaRS17ihpoCzGMo44vpsQ7DgPDdnulP/1nKEqGfqWV5iVajN6+SJTN9loHIEN1Ce4U5Jir9Ph8pNQzDJmyAS6gnF095yJH1t0FNuj3OJ3+4BbWPZwXTa4VSmJaopIdI8pMuq6TFfnMgpWfPwQVwPAeVzV6wHLCG0vTzqhczcGNmyxv3GWoa7YxYGxNM1uTcYx9r3r1GLK4Cb0XuSKP60O4Js4y4Rq0lf98G2b/2R6CP93Zk9Tt3mrUGHwzo6ormqahdZYURkoYSKUwxkxCNSESFpeFxjiiBfGCdYlGKnxKmCCUKjPkgpOE5EzfdaRkWW+2dOOIyz85tX7JEm9BSAgRexoovx8gCqw1uVqTsCZibcD5Dsy0L0qWyoLUESNgRXCmpoz3iZxQ5H2Qj5DsCLZQvMPMD3B1RUyJkhN1Nefm/JAw1gyjZxgDWSxpOqE7s74cLaU4nAvK8xWwU1UsUihFiNmQkiEsLJev18TGMFZCdEIsnpArzEqovy+4HmY+U7UZ5NeI8r8ESSjI8PPFfLHkN//G31RzhjDw3abho7d+gBXD7ZNbzNs5TV1ReU9dDXi3QCSQyoixws3bN/nq1+/TzBccnNyk5Mzh4SklBL7zna/xy1//GsYZbDXJIoZR+crTwhzjSNe9QUyBi9WGYRxZdR2XfUccE9t1wBrHy6++ynxxQM6W87OO4fyc0/c+IPaX2HFAcmKz6VhvtrTHC+788qvUixnL4xOaejZ51xaMsRinNASMohQVirejK0WKZL2+xGLNF58YYkp024HsMzYr7SdWFTghWYdYj+1Hmph0prvtKMMwqfu4fcVuxFA3Nb6upvdn9khmmRZDI3oeUkrknLGT9rP6+2rveDdGKdoqYNe0A63jVD7STRrN09fLFap510LbIZ23qxUfvPMOQ9ftE3K3Xv+ZTLqgKfS/QhvIe27Nw4fwj/6RipuA4jR+6bEyjXiE1qc/upQ+frzmX//rnu7y6smeoOvI06fwX/6X4GV6CtBx8go2W2UTVcBfQRWhb3Cll/Xf4PlE+ybwB7yoAdVPDj+vuPUb9ykPjpifbsjbkXzR41vH8TeOaZcVXz054d5yyaMHLd+3gh9GXrtxlxhGHj9+h7PTjyhZSFmdx8oQrwCGKYMVpNLRTO6CAhSLpZQZMDFN9q9otz2UfWreJWcB7NzhFp6br9zjl//mX2N5sOSlqmJpHe8+PeftR6dsL8549tb3iEPHKkaMsxxZQ8iWjYOxKbS2cHsWOKiE3BtysIzOcvhSQ1sMj88i3eUznp4t+P6jZ5g/exVvARKyzdhnSeHa0w2tBgoJ5yJ1PVCAMXtStlgB4/M0vxPlg6Vjcmwp8hhkIEkmJj852ziMqzBGNYa9MdRVxTh6XGXB6CKdpypix2dPyZCzwRjlKQqo/vK1Vy/oTDg6w/aWZ1xatq0lVIZUHLFUuKfAO4XcFWY2YHzC8CqmvPHCjmRd17z+ta8x9FvCOPD4g/fwfoYDlrMFy/liD4RxzmNtrW15MsYKi+WCGzePqeZLZsc3IWdaEYiRu3du8tK9m1hv8Y1eRmPQDsJeY3nfFsrEqYV9OY6shoHtZuTpwxU5FaqmxllLzoluM9Ktei7PzojbC2QYkRi5OL/g/PSc5Z1jlveVxL88sljbICURcwRjEaMOIiKAUYQ0ebc3TqhtlxKl5DOImX/eyCkTQtCZddFjvVPlyqi4iliLn7VUIowpYVPCWAVFPaed7B1+p6cs5lp9K/vkWyh7IEsuajSZpyR7vcr9+Nsu1z7u2tr793BNNOP660nT7O7Zo0d0Xwzv9OceI4plfi5WK8pqdXXIDFeWMayB7+9/9Po8fb2GH/wALi9/9O9sNvq95+IZzyGpZmjS/SWe/9r1FaKgm4U//Anv6UWGqSyL+ycMMsNUgXTeE+MGv3Qc3r9Je6Pm7rLlfluRS+GHDzskJJqjm5Sc2LJinc50dhtVXz2bACFTTIKYFI/RWiiFFAy5KKMkF5X9MpNq1P4YXOMN68y3wNS+tlWNmzfMb97ktW9+g+Mbx7zqPQfWkt5/wjNZUMQx5u8xDEGPaDKY9ZbKrBmM0FmoLAwnwslcMHEahXlHezijEsuzizPSsGWz3fB4tUHsn7HEq8m1kA4hvWwhCDzKSJ/3s91MIeTpjclOgehqMbWSsWbA+g+wboV1G6xtdRI49pATIQZcrjC+om0rbTekTBZYHF1i3Jaxg9AVYpwx9LdJUpFuCrF2yDpjVwkjCWt17iuTcH84qtgczDAHz7h38k+gHQn2N4nma5wF4XGMuJxpfKSqFb4uqehFF8PEY/78EWPi9ImCq8LQsb7QSiSLEGNiHCMpJ1LOdN0w7RoMVizGCuRICh1xEMLWk2Nic/6UPAx89OBD5lULFIooD3gcVI/VuArjPGIctqqnjQ4gcLlZsdqsSNEQRodgsQjOWLZPnnD2+AH92Qc8e/M9UreCMUDMPFlveXSxZt5FhsWbzA7m3Hm2ZXF0jHGCOK3UmqbVzYTdaRAXvEMTrpk6FN5ibEUuO1fzLy5On57xR7/zXRaLOV959TVm7Uzb5kU50MM4Klp5qkSdc8rDtQbr7b7hizBVm9q2Nm6HUp5awPbKRtFMAD8RppabYiSuJrtXsRuJiOz0oX8UdKabS0OKkScPH7JZrfYI5n67/bMzv/0Z4xSd5u4lNAqqDHnt8qkq+NrX4OjoZ/87LwGvXj0lFVrpfpnCiXDiDeHQUQTSQkgHhrp13FnWNN5z+aTnu92G9955jx/+we9RiuXk1mtUdcv84BZ35w1xmxnOo1azTaLETAoDOY7q9FM5Ss70bqVGNbkjln4H79MXM9EeZQIG6XW6r5AAuPG1V7j59Vd45aXbfPXomMN2xoEx1CK8dnJIJZaHVWbz/QMuS2ZLIpBVXS5H7ZeJEBFWvdJHTcrYmDFVxjeFYorisa1l6AbOP3qso8ufdBy/oPPzM4eyegr5RMjfsJStYFYR+kSRSBFNvDF5kIKxmnjFZIxkjIlYO2LMBle9g7NPqWtHXc8YxoHL9YoUA2OIuJSZzVvmyyP6EOhWa7JJHNw4Y3EUGbdrxs2afnuLp49VYDu+Kow3LfK2w5wmnBG8D/s5GxjCScXqjSXL2Tu8evP/RuvOKfEOJX+Tt2PifAy4lJjVHVUqeHEQLTJEHS69qMQbIk8fPGO7uWActlycXZBTphhDCInRBjZ9zzCM9KN2EIwIGIu1QsmBNK4RAoNJpBA5f/aAsO15r5qTeiHEga67JKVAvz0npZGqWeKbBa5uaRbHGO9oZh7rhMuzp1yePcVXcxYH93C+UZS5WJ49fsyDP/g9hrOHXLz1JqnfUEKipML7XeSHXWD+5JLz9SWzecVLDy84vHlC1VbU8xrnHfPlHDvxjq1zVJWhbZ068nirxhU4nNTk8sVf/k8ePOPpo1Nu3Dzh+OgWvp2rpGaCYQxsh4EYVPlLZFKDkqnqdPa5TCmTZZ8AzmonKDMB+6x5zr9Yf+GTMczPz4G1myTPPZ5HZO7ci2IIPHz/fR6+//7+ubgmlPHnNZ4A//T6Fwra33376kuHh3B8/PkS733gb3JlxQCfSlD6uYcTuF0ZwrGnzC051+Q0o3WGlw8aGhF+8INLPvjhigdvfo/v/dZ/QeUbqr/0t1ge3+Xg1buc3PkW42lg825HHjM5KnBzDBti3EygKEcukcvFM0LuCPkCk1f7mW8pKOZkh2ydFNJ2tDmJ2ny+/a03+Na/9xu8spjx7ZMTFt4ScyaXzOHtmm/dvcnbdeH93zvB50zu1qQ4QsmUFMjFEEUlMi820PfKVrGpYOvCrC2ILYzFUKyl3/T07z7g0zb0X7rEa22h8lCSYM4dpdfWJgVytsrhLJOC1R4ABY6ifJ4yoP2ajpIrshwqZyxHpQFlVXPqtjW5zCbZyTXDOLJdr1XRKQ6UnIhBlVhKCcBkwr7JGJspW8jJkgqE4DGmkCfR/ry2mGcF1g3j+DrGXUKpoXSYITPbJuwm49NEFYmT2cBQkDG/sMQbxpGHH7zHdnvG0K85P31MTANkw6a7JKWRbhgYQ2QMIyH0FDJiIrnA5fkFjx48wfqKql2rDV03IllIRT2RYxbGoPZ06+3AOHbINiO2w7ga32416VUZMZnVxRnrizOqes7B4ZammeHvfw13eMR6veLp06cMl+esN4MAhM8AAQAASURBVCNpjKppnQqXIdGnQhkzT1cjTSzkR5ecD+BrT916nLM08xZrjUopWquJt1EBibZt8b7i6PYh8wNDiF/8slZKoaSicqEFQKaZ6eTuM6GKQROvsQaH3RvV76vPXVU6LS7GTiiyCU+yc325Hs8VrlL2P//cN68n9o/RhPrtlm6z2beZx1H1b/+szm9/1piGFJ/4xQaV0DhIUE854PJSH0+e6NL1WWOnxfYFge1fSORS2I5B0caTProYNTopRSl9cRwJmy2hH8gxkk0khi0pblhWt1getmyLI3VCCpkcNIGqzr2dRkXaYp6FTEwzQqyIcaZWlGGiccbp/slZ12bKzm0R41Wx/Xh2yO1ZS+scp9uRFYVhvSGNI0eHc26cHGCqitmtmyyM4eJxplur9nTJBYxM7nKW5bJl2XhIEUmqLSDVjCyC2IAYpS7mncvMT4gvVeIVgaaGg2Wh2zji783JQQhbUXu+IKQ0AUYU1oq12l6u6w6IYJ/h+FdQhBi+QUpfw5r3sPIBOSRKNORc8eiD22S5zdHJRxyevMcwDJydX5JS2mPlrCjoNIw9IgGbIv7tETKU3jD0HhHPOGoTKmdHzoa0MdTvRYq7w5P6f4C1AVstsFZ3Qi9PLj0mOCiGODpigTJmZPjR+dvPGuvLc/7Zf/4PWW0f0/cXhGFDv7nACLw3rlQzuZTJRUjBMhOXCwS++8c97/7wXYxxWFsxa1re+MrrHB0eUExD9DNChCF6+j7w4PGazeaCy1XHatOTM8QopBzZdE8IccvQd/R9T1PXHB0ec3R8wn/wH/xd6q9/i/c/eJ9//W9+jzh0jJuNznxzJpTCUAp9hpITDx70iBlwH76rlBoD3kxoS3clJGGMUHlDUwmzpubl2zdYLJb88l875pU3XmKztfy8ijXZoQ+KUd3pMKrW9cSPxgBW8M4h4qe279T6VcryJKsn0/zXg6g5d9FWxY9WvDu+43MJmI99coVWdhOYa1fBPv7oI9767nfZib6XnBm+AJPYP8txF/hbKOp4gd4+f/qn8K//NQzDF+Kp+wuNMSTee7oiF4cUtRk1zuCkMKREotBdnNM/eki8uNC1MkXG7jHDNnL78BW+9tVDng2Jd14LjKmQRk28aYjk8WqnUkrhYAzklEnbQOoCpU/ki6iz4Uk/PKeRnAJSEqQBg+DdHGsrvnXzFX79+JDzfuS33n3CZtOx+t6bjE9P+dVf+ya/8dd+mTybce83fp3m8pL1v/gthm7AFUsJBZzDL49pZw3f/PpLvHb7kBQScYh0MfFgPdCFiLv0GLcmlWuU0p8QX6rEC7uKtxA78FtDSkIqliyFYi3FTdo9xkwtBsFkte5Tvm6hlBGw5Nwi5YAUW5KtFBiVIOaGvrREZtSDpd4mhiExdqpXPIlU4qylGKMmCFmBANJlbEzkJGrHViAVlanfVbwkJo9KR6zukk2i1D3ZjYgIjVECdzb6vohF+UZRWyQvKvHmFFlfnrFan7Ltz5CsVXsBxjhOomA7+H3ZzxllWnj7biQmVSvyNkEyiHH4qsX6Wue41iPGA5YxJLo+cLnecn5+SUoQQiGmwOXqIeO4ZhxHxnGkrivGviOGke1mRRgH+l7NI8IYCFGdRLpiiGjFka1O8mMWShbKGCklYMlYkqaQiV6zE+z3VqgrYd7UuGzpDjLrdWAc9dr6eUXOmXEI9P2gZhI7P9t9q3aqeM3ONehapWvheit431YWwaBOUJ+olCO7hP+jc9ur7+i/r3Ntd6+p225ZX17+uW8lf56o0Ip3qWqI5A62F4pa/qyHrUKr3E/SxfqyRQaGmCb/r+maVEF3ZYCUTIpJdclT2l19lJKgBGonHLaewVtaK9hUiOPkRz04yrjTcJ805ceakgrRR5JP5EpFefaJNxdSGslphJKQqHPgxi/xrmbZzjjwjnU3cLbacH6x5vLRM4bHTzg9vcd6GwgIs4MDorU0zRxna8y0rhnrMXWDbVpmiwUHBwfEkAhDRMaICwZLwNUNrk5IVtGYT4svVeIVgfksc3wcmVWRViJjMJyvLUMyDC/DcNtSJKu2Z1+o3szYFerhS4O1L5PL39D6Qk4QGsb+a6y4TYyFri8k7xlev0c+mCOXQn58DGXElg2SC31fE6MjZUtOmsBDOAEslYtU1UjJFrIlZcs4KhpYQVbgXMT7iJGMs1FpRlOrT10CFRE9pkwuonOOlLAJ7GfZLn3GqKuKr75yjw8fw+mFn6g1ejPYSX5RJlTzddSqc2oYb1yFOM9i1nByfMTh4QHf+rW/ws1btzg4OmZxcMC46Zm5ltXlOW+/9QO2m5Hz81MePX2EtTW+OqLonoI4ARUQpdlcrjcY63ny6CNODg/oEzS3vkLpe9YX56ScqeuG1nptvRqLcYaq9kBhtTpn6DvCuGYcFPAzxqS37TRzNwZsDxddYRvPWCwCr54O3FoL/bAz5PjiY7PZ8Lv/6neZzVruvXKPk5s3SCkTgrpW7ZKbdRY/VbOgCdO458+RGhTo/FfylDSnRD79kibxHbp7iuuJwF63XQRWFxe8/9bbjMMwVcr6tb9Iup8xetSI6E9Q6tBnPGw18GsosOoWX76Z7sdDADtZbhqZDFTGQsnCdhBMKWSx+KrB+TnOHeC8w1eHVPURJ+2cV2Yts6A662Mu9HUi5YJb6LQQmND40MVMKoU+ZPqQdWwzaKIOWX8vpURKygWWlLAi3Gpa5s5x6/YcpNBdnvPBH/4OT58+4/KttxhOz2hmDcXf5fCk5avfOKYc1pwdf5XhuMFWHtdU+EVFe/+AdlZRHR7hqznFRKIJWJ+ZicOkTD2fE8fEGDPdtAb9pPhSJV6AqirMZhlPxobEOApdMJQIw01Hfk1IkkgmIquMe087oympsEApx4gcar9/2jnlVJHTDWIU+sGQW0NYtuRbDr+12MslzgaaeoMUSOOccagYg2UMblJHUonIutoomGqigsTop4rYYGzBmEJdBZpmwEjBTVSjmISUzdTGLWT0gpKsiGYT85728qKEHZyz3Do+4HKzYdvHaQaii7PZzQynCqvskIFiqOoZxjhwHqxnebDg9t2bHB0fcfe1r3Pz9m3q2lFVltpVuFEUtFUM4xDZbres1mdU1ZKFO9gRxEiwF2PIOdMPA9u+Y3V5xuX5M0IGv7zJaDtir3PQZnZAXTVYq+LjlXcsFjWQMTg2ckFHIowbRa1nSLvW6LXjaEKgTxvmI1xsIv2g9qc/rxiHkffffQ9rLXXbcHB0qC20lHS3v1twJlDVvkI17D/fVcha9V6nU+hl9bwPrpooPF/pXv2dj89zx77nwfvv/7mhBf28owTghz99s8qjoKpvXn+u6ePPrx/z04XKuYI1Ex0o6fU2Ru06FQzWVVhbYWyDtR7rWpxrWfiaI6+UuD4nxlLYOPXGrUWodgUBOk9eT6OmTSl0Ralxqej3+pxJuRBTnjAUOnf2ItxqKw68Y+FVWz70W84+eJdnjx9x+fBdhotzPvjgdZrjM+4Phb/67TvUleV4dpPFHGgc0la4pcOfNPiZw7UzrKt3tT7GZrxYyIV2Pscg9DGxHj+dUf3lSrySsf4xVfMmxMLovoLNjqoplAhxHeHDQkzCEC0kQz4RykFGLiJmHckZUpJpodJLWLm3hdAaxpcrsjNwbpGVoZxbUrST/2gDhX0i3T1oC+VOIVshrSxhVMqTmywJvR+11a0KwMRk6PsaYzLeKdVoB74j64y6FMFM6ARr1cLNWJU3e1FRVTWvvP4G7cldXttutfqzkx/rZAu3F7QvXKt4VYRCty6GpqlYLmY0bUsJme3FmmAEZyBsB7pnl1ycn3L67AmnZ0/ZbhWkFm0k5DRhesykWJ64vqSEkHj3g4eM0dJ1GbdY0NQ1x76ilMK8nuGdp7IO7yqsgHOFlNSppw8DqagdoAjEnDFF9Y5TKVq5mylxFUuI8PjhE95+822ePnn6CwUK7YFSplCK2gDWVUVV1wBTl4SprcxeIWxP/dkr+5SJOnVlpXklrnE9QRdyVkvIRw8f02+vFNLWl5d/7mlBX2SMqOrjBfAhnz0BB+AHqGjVLpaoEGXzib/xiw1nhKPW443FiSEWde8RA9UkpBPuLrBAqM6Yn7b4umb5+m2Wt25SHc4VzkChLnqvGjJZCnPvaZ3DiGCMpQB9Vu34PhfGohvHnVf4OOnKpynxCoohdCLcrGtaa5lJ1iInZsImEjcZUo0xC0rKhO6UPDq89bRNzf37NzC1w1YOU3t8a1neralry722YekdTapoQySUTN1kYinkkMgps4kJa8KfrYpXyHj/IU37BxBqBveb5OJpZhkTC1wI7hSGXigrS55B+suWcgTmewP2UucDSuEqlKQJJWUhFyEsPP0vzShZqH8nYh9nirNE70Asw6hgmxgNOYv67CahzAW+ncFC+ENHuRSqKuLqqBVuDTkbQvCk5IjBMgw11ibqetQqc5KYBN2VARhRpa1kVbTCmoJ1Ly4R1E3LG9/5y7zhFCVmrYrvGCPYiecqRvZCbDsNVCsGKUKKKiWZciZMQAZCZv3sHIkJiYnQ9WzPzjk7P+XRww95+PhDNt2KGAJiAyEF7USIBVOBSSqSPAlrjCHw3R+8w7sfnnLr3svcvvcKHkN7Sw0cWmPxYqhtReMqBWoMK4axI5bEZtziSFTekawClVIuajyfCsY6nG90Fp8LQ4T33v2QOFo+ev/DX2ziNYKb2r3GKCm0qWvqptHzIZP5Q1ZxeIydBAJ2c1gFPJVSMGIw7up23nUzJmousNtYFcKQeP+tt3n84MotiFL+rUMrv8joUa7vO8BPcxQH4Pd5vrq9j+pzfDkTr+H2oqKe7stYrlS0jNFenXv9kPm9Oak949HDOW424/jbr3B09y7tzQMMGV8yTclUJTOTjAgcVZaDplJrPaebz5gjhUzKe+Mu7WaVMsnbFtJU+UrZVeNC4yucsXRjRzd2pJAZLyPjRYLYYo0jx8SwfkQcaiqvlqXf/NZdXn79EO8c3lm8syznFc4amlLwQI6JGFXSMjFV311PDJHzEHDe7F3Efuxx/ILOz88cIll9Vm3WCrAUXKWeqlEmAUAH3okWj30hbwomMFUFyuXdtdVUXcpqpRmBy6nbO2qbd2fIrg4ZuwSkibq0Qlmi1iCeqXITNWEvu8fE3p3EuIvJxMqSKkPJYEeLRKPzXzMZXBfRrdnkapRFRS1KEdILpbjorC+MA6lEnLdI7SjGYM2kgJSLaknnTIoTuGYqz0vM5KTbWTst4rtNRBpH8jiQx5449qQ4YsTibIX3DakUXNVgndeZY7TIpIkt1qkVXtZqLcRAP/aEEKa/px0BQWc5EcFlPT45xUnHWWlXIhbBYmWHUI57FK5WfR7n5wCkqO5UepzTLyTRlFLYbrecn55RNw3L5YLdLBoE69RxKE8guD0oWZ4XwBD25n0T7aHsQVSy41RMP9xvO/pOfZhLLgxDr7Sg9PMQGfy3I3aUo5/FNejjZ2ELPOTKmejT4pwXhsf8TGGZ3L1QXu/OMGPXRcM7KhH6oyV3X7mHbxruHi05XDQsKoc1gjeG1k2uQuja0lhDJQbjBNdMHbmioFl1utrBroBSSINVbfdsmJxMdS3G4F2DFU8wCSu96j14wXhVipNcEHH7jpgxBmsNbVVRpOCsUhCdN1QzjzWCHRMmql69kStnNNA/nNERXmWUsvqT4suXeCfFHlcJrkmQIrO6EEvBBqGKjnoQfC3EDMP7CmGXXhBXsCbg/VoXbTuBZ2JNzhXm0uB+L0ER7KCKU9YGjBl2ulfkSUM0JUN+FfgqUBVFQYxAdpAKOQh5MkswMi2BNmFNId7zjF+p4KIw/oHDbDN1PeB9nIQ+JoMF0SF8xDBOLe4wVnzcIOtnj0IaBx4/fJfz08e08xkHh4f4qmJxcIjzXiumXAhDR79dUVKEoUNSQnLElES1OGB26w7eOw7bBm+FVXfJZnvK0I0M3ZYwbpi1C44Pb1OngT6NiK1xzQG5ZEIclKvnAWvJKRL7LRRhGDpS1oTUdb3y+CZliDEkJBWi1ZmztpgCIQYoFudmVOKoMIgEchn1Jp2Ooa+XzA9eASCFESsG61omh76fe+Scef+H7/Hoo4e89vpr/Oqv/SrOV/uxiPMVzjlCiqQwwqRUdZ0VJLs52HTuUkqqViWCWIubgFc5Z3IpPHj/A979/pvP0YL67s8Zz+XPUezciT4rn7fj56PTDFOynTRhM5nKCDOr+smVVRU6cQ5EePWNr/DqnRtU1nL/1g0WTcNh21A7izNC43ZjEX1ubx3OGszMYm8K2J2DEdNGVO+Rgo4ey5nVVkMxyibZv0iLkWOg0fs8bmhqS33kqceKkjIJh6vn+OYIXy+pnKfxnqP5nEVulEfqLKaG6qbiJcyTAVZ5n+BLmWiYOTPmTJdVknbpzKdC2r90iXenpLMzMzYUrNUKzANlEm+PQTCpkDeCRKbthrKMRFQYX42Xp+oSIBakm0BENuvPXdPr1WMl7GGxHpgVdeBJIFFRpBOfiVLM9Hd2L13nFeKhzAQGIVnIRvAYchFMkX0lAzrHSw6iUfccSvfi0m4pk/7xisvzZ8TQI2SqukaMckFzUgJ66Ld06wtNvN0GcsKVhEVFy+flBlYMtYfKGXqTkDxACsQ4klJU2pGvqZ3RDYr1iHOknHHOU1LCOItIRRx78tBPr3ECGaVEitN5meycyhghJoLLhKRVYJoEKIwYvPNYMqZYRHY2dFfXkrWepplpq1kcRlBdamMnP+Kffwz9sH9Ya/HeKcAKppmvYPJ+ivujMSXZT/5WIU6qDSlrVd9tNlxeXH52fstfxC80AipT+aWMsmv5aqes6FJ9Jf4hOmM1YsizllB5vDUczxrmzlFZHWxpZaj/lonSaEUmJzoQW8Cyp7wJsvc936/pdvrjGKTYa/eKA/EUPNY47KS85usKV1dYH8ljwXqPrWts7TFORWm8OEw2FCMUa3a2wYgp18CK8tzx2H3YHQtv5MfcuFfxJUu8QozCMFhiLBQZEGuprCEjGAcuQ+0LlUvkVOhrVYdJwRKjV5equFClnaCz2hidop6B2nQIBW+V7mNKIUfVHTWTu9BOFMg8AtuX/XxWUsF3BdfoLFblOFVFC3SjgBT8aaT5g0DyhuGbjlIs47uQnnl8NeIngYqcLcnB+SsNq5OKl9vf4xvLf4yVEf6TF3A4Syb0K957/02+9/0/wjlD5S3WOmbtHGsdKUc1tw6BMHSUnDEpIsBBO2fetLzy9a9z9zvfZLGYceN4QestedMwnDtW24Hz1ZpN14NRFanFbEbVVoxj4XKTyBnmt15BBG7dPObWjWMeP/6Qf/O7/4K+77GmwuApoRC3SmdJE0evRCXLF+fJXmlF4izFCDdunHB0sqBbnbG5iJp4dxKI0y17crTkja++DChyVwTu3r7J0cGC1Xr+CfzWn18YY/BVRVVVupsv+y3gJJ0qSCnkqNzAGNPe+3b3k8YYqqraA6kuzs55750fEsbAzgLw8uz8L5LuX8QLiZgKTy5GmAqOujKsGos1wsxbrJj9NexEaK3BURjGkZwiLo5KZWOnKc6kVKXJ2hiDBMH2k/Wn/kcLpN0+WUCKYINB8hVPfZeiIVK4AFnjzEBd1yzbY24ffwtJpzyO77Cpz5i/fsiNX32Jw1du4l5yyBLcJZhByKIjQIkFc6brvxknEKNVw5NMgVwwRVi2FW1lKTmpl/anxJcq8RYgJyEEISdAIiIR67zSXyzYIiSbsaILurFCSjAOQhgdMcCQCilDzIYYhRyFnAQjCe9GRDLOpCnRWuXkyrSrQQf0haJQw4vdK1OqkKszrpqq3OlCKNnsvy+m4NYZ/zTCDUt5oyJXQnwEZTT6t71QxBApRCNsb3hW92uqk4+4f/f/S2U+63TnU45nKcSx49mzR7z3wdt6QZSAEaF2te5KJ+WklNJ+xmtRDujNw1scLo5Y3LmNaxzNvGF52DD3jovW452Bktn2PdthoAg4b1ku5hweztlsA2O/JRuhWsxxVcX9+/f56uv3efvtOd/77u8TYtRZC1apCYOKS4Q4Kn1mUtMyOSOlqFKVVRGJ5XyBc3CWR7rL0+mE6K5JptnTYtZw7/YJItB3W6Bw68YBB8s5i3nz4wrHn0uIUeqQvWYhthPWkKkqmJAkezBJCLsZti4CdV3j7JWL0dD3fPjDd/+ilfwX8YVEyoXL7eTiQ6GKhlEyzhpGClZEkcalcOgdC2cU4ZwiKQsm7kXa1GNCBMHuE6+IYAbBbHZe2pOv9lRWy8RRFxGMd9OG0yB7z14UOyMdFHWzq5ynqeccLl5iHFouNo/puaS+NWN5/4TZvSXmyCAzTbqEsn8eyQXZJqQUJOkMt4i2waVAlkm72ViKN3tA7qfFlyrxUlRmbbOGPGZil0ASprGIVRs0ER3ui9dNvLVK1Rm9Jt4QDdaZCdGq1XAYhDDuzBR2ji/TaLxMlCFRUe1JROhaM2E3j52q1Ikrqm5EAJNyy9R6NiKkUFOip8TJU1IK4Z4hVoZsM7iGbAqDE4qL3Ln9b3j58ENe+eB3af5pj/sMO6bPGsY67t59iW9234YcKf9/9v4k1pJ02/OEfutrzGx3p/HjbfS3e/dlvlcJJClShYoBjEo1KQYIIQQzqFFJCbNMIaEUEgMKCRUDJjlAVCEkGBQSkxQpKpWqAaIqmyIrk3z53m3jRuvh4e6n242ZfR2D9dk+xz3iRufu50ZU+grtOMfP7mx/2+xba/3Xf/1XGhEU5hFEGbO1FpizfkYjCsMerI5YzlccntymHwuX25FHTza0Vji97NnGyFAKOEs2cLm54HK7Ztuf8+SpI0bDrndY5zm+fcxyuaTzlhhHckoIFiMOQ0GKZnNDUGJcKhrZaGVHCVahKNmu5IwzQjtbsFi0jMOW9fkS6z0YJWudbTdsh4HdbsfTJ4+hwK7fQim4ksi7JdvLyz+oQMTZ6Sn/8v+nk4vefOct5vPZ/r6JzZxy1uBI5an283ALCil/9vGnjP1Q2cvC2ekpMb4mTd2UnQP/BbqbfMn0v//SmVD33wkVROfYx5zZ1lHcE8s42cQ4RKwRVZITZT6bPbd06klXfWZT504bqbCzXMmksp8Oq+I/RmDhHM4YnIw40X1dRLPpKQYvWbHxrjX80c9ucfd+y/z+H3O6u8cf/fg9/vjogDvdDL9FJxqFGvRSK5SuYDoNAmZboRlVi6FU7YNUUaVdsYwlk8XsZ19/lX2vHG8Bdls4P4U0FuIuYkykXVmsB+fUqVrRzhSdTqRechwtYxBCgF3bElPBuUgIhb6O75IimDINRkcFtitrjiJ7VjPlqolbmdIFY6vwRW3gpjLspIAt+qWb3CLFk6Ijh4YSCzklJVT/xJN/aolJiNGQjLDpGozf8Ffv/gPeO/i7LP6TnsX/boPZvSRnIDrD9Ufv/YTbJ7coeaTEHskZUzPJiVyFCMXUE9cqdNnM5viuY354xLbPjLFnd7HDlsz2bE0/juxKgsaSB+HpxVMeP/2cFEZiHGmaBfPFPRaLFYfzlnsnhzStI4w9KUaseKx4yAUhEmNgN471IqykCtHvKRSFXU3OxJRprGG2OuDWySFxHNhenJNS4GA5I8TA8PBjNkPPZnPJpx9/Qi6Z7W6LUGC3ZlgsuHj69A/qeB89fMSTz59w6+QWB0cHzzrerBluSrlKTOp3ZK+1DI3DyG9/8SuefP746nmlMtFf243YI+A/qb+/vHD5+20Oaq9tJVkFdUBbdUPE2g1xpl5aPaabuiI0ey1Zh4cgonOnreDM5HwFp/gxpjKepSreWdFeYifCsXN0xtCJ0FWmsZ2y5ursnWjL3mJu+Wt/9Q36lLh7eZcn48DPjw75106Oab2ju8yUTayZ8yQaLFouPtJWzFWBRaIO47naNzLwNCfWVS4z1m6Xr1vD74/Vz5NVhKTeyjUOef05IQqwj7w0mpraKdCWoRo1MT3u6ln6UkWuMYifLYg/A0E+hxx86V69d96VcfDc44oFTH1YUSdXnN6MCXjZYlNEtgXZfcnrv4ApgcdrrZQIOWsLQM4UMWCKwicKCWCs0uydc1irVPtpqH0uOglkIgPp+uhFUYrCpCmn6jTStT7TKtYBe23iL1naL+cNydV6lmf+XGEmc1Uj4jmt44lglutPSiEnlegsuXztBfIqbSJCxRi/UQBwdQrX3wq1RPCvypb//bOCkqH+VbFayLnaV0H3bWG/R+x7zEG7NUVroRhtC5JcHe/UA1S0TqolJd2Gqr8lK87JRIAohj0BK1clq0lQAzH7cqGUq/1CyzKauDkDrnE4ivbqGpW/ZDpO9jtH/V32jkYPqzqg63tU0defkFJ5/v4vW8ebjPhF5HNggzLmf4h2m5s79ndLKXde5AVer/e3shdeb3i95t/SXp/jr9f7pu17sd436ngBROQfl1L+2o2+6UuyH+Kx/xCPebIf6rH/UI8bfpjH/kM85sl+iMf+Qzzmyb4vx/59H4bx2l7ba3ttr+21/ZfKXjve1/baXttre22v7QbtD+F4/84f4D1flv0Qj/2HeMyT/VCP/Yd63PDDPPYf4jFP9kM89h/iMU/2vTj2G6/xvrbX9tpe22t7bf8q22uo+bW9ttf22l7ba7tBuzHHKyL/poj8hYj8SkT+5k2973cxEXlbRP6BiPyZiPwLEfkb9e+3ROT/JSK/rD+P/9DH+lX2es1v1l6v983bD2XNX6/3zdv3es3LvuH51d1Q/ZJfU4fsoSprf/km3vs7Hu8D4K/W31fAL4C/DPx7wN+sf/+bwP/mD32sr9f8+3F7vd6v1/z1en+/bt/nNb+pjPe/AfyqlPKbUsoI/F+Bf/uG3vtbWynl01LKf15/vwT+JfAmesz/QX3YfwD8d/8gB/jN7PWa36y9Xu+btx/Mmr9e75u37/Oa35TjfRP48Nq/P6p/+96biLwH/NeA/wy4V0r5tN71ELj3hzqub2Cv1/xm7fV637z9INf89XrfvH3f1vw1ueorTESWwH8E/M9KKc8MHymKU7ymhL9ke73mN2uv1/tm7fV637x9H9f8hRzvtyi0fwy8fe3fb9W/fW9NRDz6Zf1fSin/9/rnz0TkQb3/ATqc5KaP6/Wa3+wxvV7vmz2mb0Pe+UGt+fdxvev7vj7Hb9peoHD9jQvt6BSk3wA/uvbYP/lDFNy/4WcT4D8E/v3n/v6/5dmi/L93w8f1es1vcM1fr/f3d71/aGv+fVzvb7vmP6T1/j6veSnlhRzvvw78vWv//lvA3/qKx/9bKKvs18D/4g/9pXzNZ/s3UPjhnwH/tN7+LeAE+PvAL4H/GLh1w8f1es1vcM1fr/f3e71/SGv+fVzv77LmP5T1/j6veSnluytXich/D/g3Syn/k/rv/zHw10sp/+7ve87B0Um5/eBthqIzLKWUOgLx2lzUq1GjFFP/UKgzGwuSFZKXUigiZJFnBriaUHC7rPMaO6FYuRq1K+jjr38OXQR9z3qfyXUW41BgvLY+IpQW8FJfT/av8YX14dnCwfT+09+MCN4aSk48efQJm4uz51/icXlupNS3XXMRXd22bfHef9lDvmA5Z/q+J+erYerOOdq21WHUX2MiQtM0NE3zpfe3bfvMfaUU+r4nxshY/yupQI/OyPyK9+m6DuccNA20LSTq8wowAIFQfys5w26nw54nm54HrD/99IXXG+D27dvlvffe+8Lf+77ngw8/YLd78WHLVxOl2c+Tns4vnZVaz8urB+2fKPvH6waQSyaXXEeOmmcf/4othvjMmn+X9W4bW+YzX49dZ7rWy1c/qdTkglKHt0/VtXrN57pJy9W6Xe2JwjOn/JdUA0UEYw2CUKaZsXVtZdqb6szZZzZe6rGI1O9E9rOs9TUtInXO7JQg1b2vPLdXjmOkH77RVOCXtqe8tm9kX1jvydyrfmcR+XeAfwfg5P5b/O3/03/M+yHzKBVsyvioU+8lqwPMUihSSM4QfB07nEAydLuBZhiRnDEpkI1hO5sRncUUwRRh8dnAyZ+tEQP9TxrSgSU5IVkhWcPoXJ1Trye0DyM+jBQxJNdAgXYM2Jgwv0vY96NebyLQCOnHhnLLkLwjeoeUgktx+qx1CHPB5EIWSEZ0SLSRveMvInTecu+gI/Ub/s///v+S//ff+4+eX7rfveh6A1hree+9d7h//961TaROmv6Sac3b7YZf/OLPuby83D/u9u3b/OQnP/la523qEPof/ehHvP32219w1CLC22+/zTvvvLO/bxgGfvnLX/L09Ckf8iEf8AHlLMMvM/TqJ0th/0tB/bH3np/97GfcvXsX3nwL3v0xrI3GsLsA/JLCIz4Ffguky0vyL34B2+3+teTBA8xPfwrW8vf/9t/+TutdP9d+zd955x3+8T/+x194zJ/9yz/jb/zP/wb/xT/7L77r2+zNojgfCAlDRsjGkI1graX1DhHBGYOI7Dd7YwRrLFCIKZFLZjus2QxrjFi8afR7SdyI8/38089f+ByfdY7/9n/zPbxrsdYTQmYc9Ho0Vh1nTCMpR5qmYTGfIaKBWCmZcQzEEDHW4nxLKYUYIyUXrLN1vdRBU6DErIGdbiI0TcNyuUTEkHPS5+dAzBFjDbbxIBBjJOfMOI6EMeCsZ9EtsMZhbIMxhvV2y+V2g/Oe+WqJMYYxBlJKpDEQh1FfP6X6fRoEeP/jJ/z5rz4h56/90l7KnvLavrH93vV+Ecf7jQrtpZS/QxWmfuOP/yvlN2MgDIlF1Gx3ip+mKFNqdCol1UgVsqa+6nBzRkrBotFksY7i3H6DHg8965/MECAeWnJrCM4RnCUZw2j1QnJxxJQM1lBKdUQiyjYrghRBTEZchE5Ity1YsBcB87SQbjeYu4ARsjUUEYox+6yZok538BYxwl1nWFnBWMFag7fCsrMMdqRx35jj9rVrfn29NTrNKHfg8tqjWrS3vPvCG3gPb70FIQhwBzii62bYum6/z7z33L9/n8ViwXvvvfeMc71uy+XymX8753jjjTe4dXKL+9znL/GXYNjAOw+JQ+Szz2B9WeDpU3j8mC3K/3/GzlBnEYDx2bsO0eLVrm15+M47jMMAjx7BxQUHwF00BPn7X/6xvvU5/tf+2l/70t0vp8x2veXy/PLL7v5W1jAhKoYtnoihSEMRT9dBd2xw1tC0Ddbaq6zu6ogVTEiQxkx/MSIFBhmvP+QPYd96vY8OupJCpnEW7xoMCYO5Sk5rIE82IIYQEiJgK2xgjQFrQUx1xuqUcy5IukLbpjRTFDbAGM2QC5ltv90HOFNaXCjEkhlTJJfMMI6klGooKxQSMQSyJEQSiJBixBihlEzf72rQkMilkMZAGsd6KJkCGCxi9NVe5Zp/cU95bS9qL+J4/xHwMxH5EfpF/Q+A/+FXPSGUwsMYOQiJWSgUNPsrImSoEEw9eQu4kvYZYkH2WbGpMI6IXjTFWL3ACsS5o3/QIAV1iEYYvWd0jiiG0WiWSk64VEgiYA1SpF6w0wUGIgUxidIZyl1DEXC/iNgnGfGCnFgyhuwdxQjRWnKF6woQrWHbeYwxtI3l2Bm8ExpvMAa8LdjksPbrIdzvuuZ6JOf1NtkCuM3vc7wnJ9MKHKAOmq+Fma213Lp1i1u3bvHgwQMePHjwpY97/nWstRwfP6/YdgZ4xnHHL34Bjx8DMcKTJ5yXwueon93bpt6ef6/6SefAZdPw+PZtxpTg8hIuLpgD9/lKav93WO8vt5wzQz/Qb/vv8vRnrKChU0bYkQk4NA92WNBzWQzee6yzV9laKeRSKDljJFGkkEMhbCaY8hvBla/SvvV6lwIp6X5gjUWcetRSiibupagbFgWDY8oYqdkwej7aGnxPcK6ul65TngKW+tNg1OkaLRcVCmMY9scjIhiRPUw85kjKmd3QE1OisY7GOTKZFGPdL9Th5wqHl5IJYaRQyPUYcozkGJ8NoIzhqthwc2v+2l7cvrPjLaVEEfl3gb+HXvX/x1LKv/iq50gBmwqSFTo2ZN0QRDeRIkDJ6vyMUGrdRmSCa9WR5rpVZmMpYtTxpoyUTEaIxkENcosIQYRoBC/CUa25bKwhGMPCWY5wCAaKIRfYGksfBbmdEVNo5pbVsUcEhjcj/SphDkdMSZjeYh83CIZ0u4GFIxlDshZrDbe9pTGGVeNonaHr/oLF7D/HSMIJ2Nzj3W9f2Zp/F/uij/1ul/Y3qQl/+eMa4A7WBo6PNRigbeHoiMMy0HBGFJjNng8cRjTAGICrWqoAHrgjQg9cPHPv7/90L3O9RQTnHN6/eHXHocdsxdDaBiuWiCVioDOI1ZuxRh0vAkWdhMmZknVDL4C4Bpo5hoK3uonnCmW+agshPvPv77reOUGOhZxyDd5BzFUAnEqecnx1qrX0Y0Rw1iLW1TUy5FKwxpD3KEENxPf171IdJIg1NRveH7/WaWuGXCgkNNgR0WBb4WGDQbBiEJmyc9HH1fpwjPG5knLRcvG1a2UqH78IPPFt13y5OuKv/vX/zjUewbWrR5758czvX7zGnn3eF+4vV3dOARJTiXC6swIS159r9iShL/KGJnZHuQIl9f/CHjWQ+p0r+lH23+n+fUT46MP3+fUv/uwZHsy3tRfaBUopfxf4u9/08QK4ULCpOuFccDFQREgVpi1MUHOFbSukIwIYQ7GWgqGIJRlLMR7E7r+UJJahwqJZMkUKwRiCCEsR3jW6xL/0hg2WpYV3raEgpCKMGX7ZeJ6mTF4a8psNx97w5rzFkfl4ueVySMz6kcUuUtYO+dWMnB2xEdKi1pIby9Ia3mk9c2tYdZ6usawW/5jjo/8Vhh6TDZ2FrvnmhJtvu+Y/TJsD72Ft4cGDmmyUAjlTOOdP+AUh9PzmN4anT68/r0fLKjueZ2Z1wDtAROmY33TFX9Z6iwht6+m69kVfap/fihgWXUu2jg2WiCCNQZzFWIt1DusdYhwYV7O4WKHUDAimmUObsbaw7ApCJgwjOaavOYoXt+cdL3yH9S5QkpBiJo4RMYIxWoYy1lJEiDlDUhIZKU1RCxTB+4bG+1puUucXgifnRAyREANGagBTCjEG3ZBNxauLuoZSCikmSi6kEEkhqmd0uo+JMVhjMKJO14jB1X+niZCFnrUpZ0II5JKVZGU0WzdGj1F93fTzxQOkb7PmJ3ce8D/6n/4tDSAqeWzitjxPRhPKnkkyHfN1m2D76blfPDAwRV2pMaIJvhQgaSJ29cJ7wqEt9ZVyrbLV9y0CqT5numUyWXFXjOhPyREpWevqtS4fKtIwfd6////8f/D+b35BHscvHvM3tFdOrnrGSsHmXGu7clU3KVdwyQQrI1PNVfQ5AjZnZRwbNBsWwZSMyVevZY0wF1MdtS54dJbgDB2GABXW1tqIs9B4/SYyYAosgZgLKRtSiXQCJY2kksgmUnyGmMEURPSYJNX6c4XDr4KtShrLiRgLIcwYhgcYRhyWGDI5f8oXipM/ODPADAV3v5zR/GWWc2YYBlJK+xKtRaFUvZxbwGnK2oCVjpYDjGmZz2EYuPZ+BmgpFAYqHB0CjKNelHwxQr5JyzXTfFErRhAjSkuYfp82Nlv/tmfUTrH8s/mTiD7PWkPjHU4UMdJNi5dynDdlpcg+izcYMHL9Tv1RkTMxVrNVMdVh6GONCIhFSiFJBqlZas1KpToY4xxScmUkT05Ff5mcc8mZEmvSUJ2/oPuOqa9prcU5hxFDiUmzZNhn7Opkzf67FDFYO9WR1aZjukkTY+hmi73jvX4Mcs0JTj/NlEkaubru6kW4/4tc3ffM9VkJs+rQCyJTeBL1+ZUVPr2XAKaetqae7lKTt8JXO15RzBVSgJIoKZFrfd2luGeiA/imQV5wF7lRx2sKtEPEFc1IpUzLhf4UITmvmWzdN0zJuHFASqIZAz4kkmsY2haD0I4Bz0iwhmiElff8aD6nMYJ3GSOFZIRk4CzBB7FoO1Ny+GLpOsOiMxgp2Pq1LjOEAmMYGceRsd+wefyQmEfiLGF9RjyUBmxT8D6BUUiLmEjOMuXuKSVCyVxuRoTM5fq/ztPT/zXOwLJ19Jsd/fB/4PfSe34w5lGOxjt8G8cbY+Sjjz7i/Pych8An6BiRd4EGr7/JkaqpvgErmfMuP6axhTffhHv3pvcW1PH/mEjmA+C0FPj8c/joo/0G/IeyUgrDMLLbDV//4K+z1sKqQSwaTRjqxaLZrnVOyXDGAEY3kFiZ93owGGtwOJazBlKixETZjeRUGPvAEF7Ccd6UFSGlQggJ6zJibA046paa1QVYa2lbdbxOqoMTA1lhY+t8hRYLKWndV9AgXow6wcZrwJJSIqWkJCejiFvnHCKGcdMz0Cvq0DYgwlhZ5F4sjbG03nMwXyEIl+sNKY0aPKSMGGHWdZqlVbjTWsEVDYhiiIoKPuf4bmatr4LH553u821sgmbyAkh+Dk8uXDnccu1v9YlKcy1kDFIghJGYRi0ZFEUTGt9gjbZyCaJITkVqnHUaHDhbM+s9e6ii0HpkWWpZICVKyeQwUFKk+mv9fq16homk+DJ2kpvPeFOpvbP6textD0UoO3giDSgknTE54VLG5mt1nIJmmRSyFYoRWiusGktrhM4LpkIMSWAgM4TEkAu2FJpSsKJRqRXwFW5wCLnUx+fMWuB87BnzSO5qRK17GhjBmgJZSV8mX+9NLqRSiBmoEF+IR4ThEGcFyZ6hX5PS8Ste+CnAKdduiWcpSteDoOnDfcML2hiFMmWGwsTffCMopbDb7bi8vOTznPkoJY6M4cBaOmnQHDgpzysARhjNDAv4Rltx1SZcaU5EaEyNttdrZa3u4dWrtqerns5Xb1Ot6GVkkhmzz3anr2nKfKXWLq9nvQW0jxn2tU8R7SN1Tmi9IZXCmIWSIKeXk5nfmBW5tqPKMzXX66RkwWCmjJdcE2NhKuVO/OCpF9qI6gRcd3DWWow1lJJJ9VrXJEH2AU8eI8k6bVFyfn+1kZUAZozBWod3vsZL8gWCn7GazU0MayNUAuj0mWss+dw2elP2+zgAsgcfBZ4hQF/lsvvfav/0s4+hLuiEAGRKEUIKjKM63lLiHq3IxjK53pyzQvxA8YKxZe9A98jPdKqgbPeMtpXlVEswIZJjwNbuE2TK5KdU6pkj/c52o45XABcS/lHEngFHUO5CcULGUqTgovbVQsXwS6EUbSwsor2KFPDjqO06nSFZ4c7ScTxrWDWeWzOLN9A6MFLY5MIuwcHTnj/+Z48pY2TRjXiX6U+W/PpkwaJ13F/N8Hv4QihFSAWSOMZuwZAbxs6SGoOLhWyzQlTFQhFsKriYcSHhQiZa+JSMMZU8hqEbBmb9DqwhjS273Y6n46uspwnaNHOA0oo+R53ZRzybma7q4xq0oyDU53yNLZdw9y60c5g9Rauot4FvH0yc/uY3/PoXv+Do5AT3V/4K3dwCD6GcaTPuX0Azg4/uQdvo0a6eeYUOuA+mYX4XfrICDg/hxz+m3+347LPPyDlz9+5dlssli8XixqG6FzHnFJ70jUVss89oQVgYw8I5Ou9oWo+zFusbjHUgCTHxinELZAmUnIjOEZ0lhcJmFFIwiPV0Tgk+8QZqvS9qVjLzrmG+mDNdaakU+pCJuZBjIqdMKplEohjBeaeZUK5BUYqEqOziFJSJnEuuZKpMSbluCxZj1bs4KaSciWPQzoXFnLZtmRtHmc0rs1pZyS4UYlIHYdCvznkNAheLlqaxbPqRtBMSRRFPUUpYQRhiIgf9HMMwPFNz3I3hD+J8r9t0PFe+9KrAATznYJ89XJnun87NUkg5EVNmfbFhDIHLywvWm0tECsZkRITOdThjAW3/zDkTqqZC07ZaH68kQ0Qwxl5LCfS7SSWRcyKMO0pOpFEz3q5rmM87rLV0s/aqpg31PV5swW/W8RawKeMeJ+wnmZyEeKe25NRIpIkRl64u9iIQTY00xJBsfZ0YwQrZeJITDmeWd1cNrbesWoM1aNYjmRChj4XFdmD+y6fY3cDxUU/TJv5lOuZ9mzladNyaNxohVW6cRspCEktoOsasohnJG5LLZJPJRtuYJINJ+vlsKphUiKXwhEw2op9PhPmYOLjcEazhIhe2w8jlK93cDNrN+qD+/hh1jo+fe9w91GG6+nOyr3FMXQf371fq8Rrt62n5to63lMLlJ5/w6T/5J2zefZfVz39ON58DlT31EHW+B8Afg5/Bz1Dne2UHwG2ca/jZCu4eCMznMJ9zeXnJkydPEBGOjo44OjrST/cDcrzWWrx3qtZlfN3MNN5vjaG1Bu8s3lmMsxjrEOvU2Wbd1KyZamKZksA6o5E9hiEIMQpdpw5eyUTff8crFNrGs5h15JxIKSIpa52uOt2SsxKXJFWvpyIj2loIuSQKKpyRotb2rtiviZSD+gar3Q9aqxVKSaQwgrV4Y5j5Buu0ApBSoh8HUtKsytTstGTZg0TOCrNZQ+MtMcOmV0b5mJLSA43uiyEWxiGSUmIYwjMZ5xjiH9rvAlfO9wt/vwY9f+E+YE/Qmp6bMzFnQgycry/Z7nqenj7h/PwUkYJzWpNXx+v2RduUMyEFhaG7FutcRYEMYgzO2Gu12UIqSW8x0PdbFUAJAyUnlssZhwdLvPfkslQxFWsR0TLDi9rNQs0UkEI6qg7t2BBrr+2UxqcptZ9OeoFotETvThP2slA6YKUb0UnrMJ3nsHW03uCtbip6giurOSUICWwxNOJwZGVXk+jGwir0yFj4ePD47DhxLTPjKKK1sK7x3FouiWRCZ8neMIbIzkfcLOFOAmYo5L7AZxlzlPAuYo1o+4ZAMoYsgktQjMcCs5BhiLj0qi+b6WSboa5qQLPfL7JKvzFMvERR5WaAJ5+Db+HgYC/B+E0tJTg7Q4Uy1gtKuQvjgvL4VHt3V6v6mltgDcHD4wOyd5zzPHc5AI+xvqEcwMOih7iCvSxlEeESZTXP68f4wczGTBlCpEgmjHpuGr9ArMU7h/ce5z3We4ytjtdYjG8QazHO4+cLKJlw+jm53xBsqNCnwXkt8RjjuCpHfL9NoJKOMinF6ngTOWtbULFCTGNlxCtxSpzdtxIplJmV8CRlL1NbipLY1PFlcqXmiVGSj7FGVa0K2pIkhqHvKbmwaBoa36gcZwzklCAnTLmSs8w5MY4DyQg5Kpw8wZ2pMmkzqn4HhWEIDL2KcAzjQMkF5x3W2P1rfh+sTPj3BDlLURhfJoLb9MirPV4Vv6jfW2IcA5vthnEMPD49Zdf3+u8YMBr3KOktBiJZhcSy1sNDiopUp3jldCvSMzleqcznRCKVTM6RcdhRSkJS1BeTQikZ5wzbfqvXh3MYY1mv1y/cbnfDjhfEZOIDoTwwe1lIZS9npAixwrL7qqQIwVpKAft5wrwfyHcg/URoWuGtZcdi2bKaW+adYApITup40Sg3RKEPwixZFtLixdAODhsyq23iTn/BmWn4s23BeM+fzlbc8Q1iBNdYFs2M+bLVwMqqkMYHeeQ344BnxL59gd0leL/A04x9KzKba8051/qCKmcJNgO2w5bM4XZktgm08abqaQeoq9mg2orr7/YygibFbwKna/jNb8DN4Kc//daON0b45BP41a+EJ09uAT+FnYH3P4aD5tprngK/g90BvD8jieNT4LNnXm2nj3HCLwFzoXn+j0DZqjWL+AzV8rqPcrB/MBYjxEhKQio91nuWzRLvGtqmpe1aXNPQdB1iHMZ6MJZmvqJZLHGLFd29B5ASm1/9M8KTRBwigx/wvtC1hmgLSlYzfHlg9v0yEfBOKDkyDr3CuykBBmc91gghRELO5JIIOUDxOGNonCemQCLjRGidpQBTe67xFnFVnMckSknE0JNzwjmHtQ6DkLw6+vXFGsoad3zEwXGj0PawIyZNBCaguSCkFNluL5UuUhS1iCGSQiKmxDCMxJJJaMDQV2JeSuqwS4H5fE7TmCuRjz+wXcHN6nzLxCeRq3KuIJVBPjlndZY5Z7bbLX3fs9lsefL0CcM48vT0gn4YUTmUrCx80cApVK5QTIWUtLwwomhFSJFUtINGO6cFLwo1qzphhfKlKMQcBwoZX2vCu37L5WWFKETLDE3T4pzj8dMnPyzHK7ngdoEkllTZariJHaA/r/c9Q6EkkCFBFOgTxIQpFuMtjbc01tJaixWzJxykPDWi6wm/i8I6aWa9XDqS1WjWmMLYjhRJ2vaVElkM25S4NAmPoZlYjXsAWpW0RAyp9uYVZ8AXLY+2II798IWpx8+mKkNZKgOwoApcBV5Q8u1b2FQT9Gj2e93hT0KEV2uvGWRAT5Pn7k/ACHYsNDHREbFfczKWUgghEMKVQtJuNzCOW2LcYG1iPnd0DZicKDEy7Hak7RY2PWwimBGaHWIyDb/nBC5obHG+/7HPahPCQEPEMTjYNs92n7w6EzBWb19yvFJSrb1yxXO7BtEJYIpgC3XTMogoUcdYUzNcW8X1bc10O8R42sWK+dEhbr5gtlxSYiA4FXqZ2jy0tUjItTVHBwfwBThAKrRNDSj3zCWlvew/z1ef0ldw34uaiOC932/4X7Yh7lm2oj2+RpQdPPVpllz2LVoTeF+KEtGMMdqOUZ1xikbrSvVbETFY4xAyKYZ9y1jJSuzxvsEYhbz3ilh1n0up7IlZk02qV6UUyKUmD1VbXK5/Fqr2tsLeN2lXNKkrkpUeV3W603khz37DpZ4rk6JYKXk/lCXFxHa3Zbfbsdvt2O52jCEyxkBMUbtLa1tQLrp+WoMXYsrqfEsm1Jp6SJrNqlCJstsN1ySBqQSrUoUyNFOr3LyiPiRRH1V7v0WFVdJLKL/cqOO1Y+Tw/cf0Y0eIDeMdR3ynASfXTp6JhVmQnJFdwb+fkHVBdoFEpGvnHJysaJcNi1lH4xqFz4LWcmLSBU1jIOXM+9nwu2w4mBU2f7rCpwxWJfN62bBDCMYw30WSLXxAz4c+c1csd406de88Rq5ygYwhGsX9sVb90ttUbs+EoUwAuvYvq/qWpVRxc/ZQ1k3XGVu0Yee64/VoZWqywlVueAv9cO7qrkfAGawivJ2gcerKv84eP37Mw4cP9xds34+s138BPOH27ZHVSpfTe201+vjjjzn77DP4cITfFlhs4d1f41rDO/yeSnICPgU+1zz5WbEMS8/bwAlnd6B/87mP/arMGGgX0B1+4S4piXZYY3Ji9BAd+nV0er23WYUB2uBoogPnoOswztF0M3zT0sw6mvkc6xzGeYxraQ/fwnUH3HvvDvfeuUPjLHPfEHZbfvl+Rzyz2o5Re0TbucHGzNhHYhjBxavmZz1SGuZYGvCit5xgHCEnBgKJpIny721Lt1y1f734pCbnHIe3jjVbTKp5rO0fstdeFqnEtNqzLAK77ZZ+gh0LiDSI9VoLzFVLwE4qYGC9OusxBEqu+vFZ166bNbX2ekmMgXHUjNV7x907d8k58+jJGZudQtExJbKAWJWIFG9wRlnmvoEcCrYybskJSsYZmLeOlARTdONfzhpms471eneNTfxqbYqprkKP6+H4tcBLrj2+sHdw4zAwhkAYA9t+RwyRs/NzxnFk1+8YhoGYEuM41rauTMkK56sKW2ZIdWBKVgccYmIMig3EqmOdK1LQOI93DovgavLkqhNWqp2SaK1VH2Kmfl5RR66fQSOIISTGmBljeuG1vuGMN+M3A7G35GAwC8FEbWHGFKZ+LBCdIJO1odZcRMxFpkgkm4Rx0HaepvVIlY3MNUKMGcakX0oIiZQym1K4zAUxsDm0uKIwd5FCGhxx0OV2SYsF65gIBJYmEyVRxGGyJRuDsUKZlOLkmtiHqQVFqLT/SrqqUV4x+k1Nk2Sulzpevd/NPHupWBRyLtduPPeYgtaCN+gHe/ZMkxEkaLxxKEKjenj7msp1uy691vc9FxcX+78Nw0CMF4hc0nZCOzP7QwqlsN3tuCxoSfoCIEK8xLsyNRp9uVUUPYnQT+QNACwFha7yQhjzlYjCKzURsE6prM/flVU+0JCUqDw53qZmNhlsFmwx2Gz1dWyLsRXutAZTVaqMqXVd6/DdAj8/YH5wxOHJMV6EBTCS8c7uWbHT4RlbhQekoCPBnvvOEQwWh1en25iq15goGQyxts185UJQdbdefE3RbLFpGiVFpVRbqcw1H6DXvTHTgBI9t1OIWnuse04p+Uolz9QerSpMMmXKBfXiE+lSnbrBiKWU2jNdxUdSyngPbdORS65DRmSf7U5Z1f5Sq/XPafjC3qlVcMHKlN1Ccvr5JiKdvcG2OP0MNWO8RqZ6tjOoZo/Ivrsr1baoMQSGYWAYBjbrLWMYOb+4YBhG+qFnGMf9XkFd36kPdyITpqxc7zw53pQJqTreiS10HTGSybNcIRp79BLdxylCqSpZlPr61z71lAxO7/uiduM1XgwYW7Al49eR+BvN+ia473kES2LClQGZJYZjSzgQ8n2LX81I3vGLUBhj5LbNHFu4TJmHIVBiZHG2xo0BjOVELF3KEJKipK0nG0NJDnKHUGhywpHx4YIihcP+CbPdY1I8ZL37GdHO2P5kznir4WIUbHYKr0oDksmiX9j+4PuCeZggQbkDLCeAYzonrj32lVlGW4i2aI33DlebXkHZzefXHt+gDOeWq0EKM57HHG/dguNjuM2CH3OXplvAT36iesqHz2Z1MUYePXrEdrvl4uICgO12y6NHjxiGge12qw88OtIX3olm1DHWY782AWG3gw8ekVzgM6ov/io7PITbt+lEuAs4Mo95xAVrDi8Puf3b2zfCbBaBxn95Cdxmoc1eFdi6iLSZ7HSqFgjOOHw22J3BDBZjGkzXYrzDzzpc43HdDNPNsbbBt0vabs7b79xjdXTC/buH3JvNdAa0CLuccLCXNoyxZov1PAw8G9QIgsdhsDTOVX1jQSrfoXSeUiy5FEwx6rRJeuoFnonZEpb4hbLGi6yr4Btf2z6utLtzKZixkqycITYZa1SPupTMmAdyurpec81/tL9TN+QYI7GkfV0x50wIiRAyySSMKBGtqeMyc1WkHPrAxm3IKdE2rWZgaUosDHugsyQKgnWexnuGOJJzoBR9bW+FebfAeVehUHXow9BRSqFpGu03vZlaybW1vWobKuVq0tOkZ12yznlOJROLtvhcrteEEFiv1+x2W0KI7HYKMW/6HTEmYk7E+tq5wsFal60lkKQEqnFCNKs2dwiJGOOEvyNicN5eKytokJWKBg1OlAxXSJrMlUwqcV8GUKu7tEKT9fmaLKSXsF/fvOMVMDZjSybvwNUpaRUcQuSqNpAQRDK22SFNotxuiQ8a8pHFzhuicXzQJ85S4icOvIXHKfOrIUII3D/fMu8HxDoOnVUnmbWOG4wlWYNJFiktlIjPGUrCxQ0mjSzPfkt7+gv6/gG7Jyf0/pCHB571zGnrUDaYovXPIqneMpSM5IQZC+ZhhhHKXCjLWvutKV3Nl19xvlXQaT9n6FZ6wpUTne775Nrj5yiA26JtSF8CjYr6yDffhDvMeIsH+NmB/uFL2nRSSjx+/JjT09P93/q+5+HDhwzDNYWkg/oaZ6JdRHFExxlec7zjCJ89JNNPjUZfbycnrFDA3FJ4yimfcUreZE42J9/0VV7IjCh83nzJSGObhSY5TBZKW6DNRGOIxtccs8UZg81a4radxzUtpnX4tsE1Htu2mKbDug7XHdAuFty7f8Kdu3e4veg4adv9xuTCiC2QYyTVXt2Ucj0ryxeQYoPQ4DGibUbWVQESEbCQrNcKb67yrYDukijsfG2fChjifszDS7CpxttMoWzdHFOmEEhJs02fM8YUrGRlPo9BRUWmWqTUWuq++gcxBcYY9PNaT67tVSllckJbW6xVjgeQs5CztvfstkrsGedjzZJ1I1dyEVCFG4oI1jqc94gJ5JygOl4RYbmYaUBR1JnllAhjs59aVEq5ccdbGTR7dvDVHqbZekI5MzFlhhQZx5HTs/M92rXdbohB4fhSSiW+Ke9GM2TZE2w9iqLp1q0tmmPM++9C0QVlsosYXJX3dEbPU3JUx6ty3WqWCu1ESolaR89XRMIrNPIKgZ0QCq3hv/ga3qjjVTafJ9ZbaoV8rC0MCsJejfJLYgjGYWJidhkwWch4ivH0xfJ4hESh2xQOY8F42NoCu5Hbp2dIiKzOe9oxKbyLwUrCmZEigheHSKKVnkYGLcjHDBKZ+6d4u2HltixXHu8hxDWtM1zIjDA6qIMaJmFtkxNUx1tq433xhXJLE4AyU1LWHtqr/3/VZZkJpbVo7nq9faYUFXba7dTNrqjcnwMUSnzOrLWsViva1jOvsPowtDy+fEozDxyEQHPN4Q6DTuAbBvWXpRQ2mw3b7Zb1er2/YC4vLxnHkfnJCQvQTfQE/Oh5l3e5z322dstmvqGMC7gQSAPqlK/G7HnvOTg4wBjD5eUlfT+ws0vWj4UggSdc4Mn0LIEZdDtYfa6lghswB0zl/+vw9vS7orwGVxxSHBSrbS6lTrRpDWZlMHOPbRRadk2jN99iXYtbLJnducN8ueDgYMnhvGXeehrv6GPg6fqSy8szLndb+n7QtpVrYhEUrSc7rgoUV/CwZgqq+lXH6QGlTIq8Wj8lF3CZLIXsE+VaPSDlWrd8SY63lMxut62ksiskR+uDKoRxlcRMAS/airOfLlSwViHMLNVpFyGkSEyRgmEMFSKu8CYVnlQHEEnpygHEkBnRmu1ut1PHkQvWWEJMxKAMaUpAjLBebxiHgb6fsl1h3rWIMSxmHW3XkVMkxkAWMMXtHU7OmW8+VfRlmIK1usq6nqm2ao1Be5D7oWccAzFH+hgIYeT84pxhGOiHHaGS0PQ1KpTP9bJR2b9VEj0DdeZBIdYe31wyKaf9FCGhUmZEA5tStDacU6KkUGu8dg9/799Jrt6uXHt//a6nGm8tE6Y6orG8uOe9WcdbDGPoGMeOEDzpGOKP9Uo3RZmuo/d1cL1j8B12E+GfR/zFSCotmJbL7Djf6HSj4/PMyZjJrnDmoHmy5ke/+QQ7Jny2mGLoB2EYHcZFmtlWL5xtIUXHarFmNV8Tk2E3Nhg7cnLvA+aLx7St0K3mjMEwWzxmx46NmVG2jqFp6FsLkrFpxMaISI2eRKXmaKG8V6/tRrQVyVwROmpp55kT4WXb1D7zOQogz7lyvDlr/+zDh9rduwDMHFWm+JLMzHvP22+/XZ2b/m29ecpvfvM+3XLJT//kT57RwtpstNNoHBWCA3j69CkffvjhfuOIMfLJJ59wenrKW3fvsihlGk5ES8vP+TnHHPNx/pjfpd9RzjP86k+hH9E5Q0/277darfjZz36G955f//rXPH78mE8/M2x/LQy553f8DmEk8WNgBvfPYbG+EXKVjm9Q0P6q2qRWkP3wDlMcJjsaLHPT7uFJMJiVxxx4XNvQzlus97TzBa5tse0C1yzojk84+Es/ZbWcc+/uCQ/mHa1ztNaxHnf85snHnD095fOzUy4uN2x3vbZzpKLqTLnsgeCRKfM1QINgVXKxOl3nLKWIjuMstf6oUxpUmD4lBjL5Wp96CRmGlzcQJKXE+eU51qojnaQwS9HpRzlPm7HR2qHRiTmzrtmHOxOrNaZALjCOWWu1pW7wCVLN5FO8dsGKkFOmTyqSMY6REBJ9jjAmYgxKmqo1eWccOUXGYdShKWmAUthtNhgDYixiHNY5lqslzjmWqxVN1zIOPUNfAwqjDn8cR2KKuOsDCG7CZOrz1h7cFDXg2O62xBg5v7hgu9kQUqCPO2KMXK4vCPF6VjnBuAq+79nEVw+gCAQysVQFsKxOfkxRoeaofc9TnV57s6sQR07kkohxIIYRbwzet3oeSLmuLHo1OOFaFhSzTicqUCHoK65KmkoUL2A3rNVc6wPUWnkp2JQpplAcFAO2NUijJBG8RVIhzxwxFlLrSc5TxKlOdi7MQ2AWEkMURhGaYaDLFzgi3upmUWKgmB5rA43VeqK4SC6Omd3Q2S2xOLIYhIhnwNFjmFGkA3yNk5VpbXLCZJ1GZEquX+Q00FDhqlJU53O/wteSqhu9SArkEfIOkkvgdyCVuQNY6/F+hiUCAVwG6bli+CiBx3vPfDZj1TQcOEcIgThEYj+wDZfEMZP6LWy3iqlWZSDnlIjifUspM4wxhBBqP+K4J1qEGMlDpkyH50GM0NIyY8aClgM8aSyMHq3NswICzulbLpdL5vM53ntWqxXjOHK5g9mBwqqMylz09RtorMJNN2GCYMXqsPbJ8dZrNxsVYs8Isuf1WAwTIUfhLm0dqqpU3l27eWzX4bo57WLOajFjNZ/RNhbvdA5sQYerr8/OuDw7ZaiCIjlPm8j0LlclkImEYjB70pGZWnJqq40mjFYn9hSDKZDF1Bsqz1QZplooA/3fy8p461QiowQvjMK9+6H2paDTJKpuL5XEVNt2bBVZ0E0/10zsapPPJSNSEKOZc77OByg6a7fUILLktEcN9GGVJCSJqSUlJ61N5pyJUYX5k+hYuqa1NN7jnNv/dM7i6jjU7DTTlZp1FWfrZ7hBzXEg1oEClEJMkTAOpBjZbHeEENhud2z7npgDYxyIFTlIOTIRpVTI4iqAuf4Opeh+ChPiUvaZ5sRW3p9PsBfImL5TEa7qteXqcdPPQu3f3bd21e9xv8tX2cpr/JvJ8T5bB/7udsNQs0aOxiSMh2ab4HeBOIP1m5Y8s5wsOw4XC1IxJAyDdXz2swMux0Q8WRAPZsyi43BXWIbAT9ZPOBwGzrctl32LNw+ZHf9TnBmZz1Y469lsB3a7AeeF2UwhqRI9FIO3mcZlxrBgvb5HkUQ722D9lpCPGbZvMgwNZ2dLBvGkVcF2A82oQxFsjNoXLLX/kSmVTZRSRdb3wDL7vl09uXj1XjijcotPgVtn8NYAdga8izEz7t+/z/HxMQ2nWD4CO0L3O9TpvgHcYbVa8fbbb7NqGn4+m3FQCp9+/jmPHj3iMnzOh+kXLMKSn3z455z0Izx4AHfvslyq/kXOHniLUu5xenrKr3/9azabDR988AHDMLDb7XTNngB/gZaV30ZTxGq30Iz8EvgAGLH74zs+hjfeAO8dTdMgIrzxxhvcvn2b5Y/A/1chbS71ibstU+NTM8kd34CJMTTdnHa2fDa0LpUlaZwSguyEfKvDAy2pgOB8Q9O0+HlHe7jANh6/WmDbjvm9B3S33+TerUP+yt07LFvP8cxhnZBTZAiRs88/5df/n/+M06dPOf30ofZPplQLMQVsqtU1NYfF0GGNY9aoak/TaFvH5BygkIpO9SEMuBjZIWwRMIWmbZFSrlqNSlK04iUSCvWYtWSlxB5t98gTDGl0OIE1Bm9NdcDqiA8OliyWc2UyOxXqGYakU5qGkRBGUo6E2Gt2u1NYOcZCioEYE30/klPWFqyc8a1hNvN4a7W7ISe2uzUhFnZjYhwSMUX6Yaczd6tAw/3VAQ/u31Ulsdpj3bYe5yytbekaR0qJXb8j58ysVVhqPruoMPurLlyp0316cU4YAylF+n7L+vKSEAPryzUh6gzjFKOKjhiFn1OOTDzhkkvNUieimX4n152pDkMoe0LT1RyMQtZOYGwNUp0xeGOvOhdKJpdYM9SEtVqPL5LJBqLUmi2JWNJe4/l6D3iMiljAFV9lujf/0Bwv6MHrSZ+QlDDbCBikWLBC5x2rtlGoJwnbRvjssNGTfdES2xayoRkzXUgcxJ6juCP3kC4NbrajnT/F+YH5KuFdqwwut8E7y3zeKkW96M1O0dJYiPmIXDLOB8QkcrKENCNERwiWIAZSwuZAzWGQrP3AeepguNYiMDWVTx9cSsWYTY2spiLaK75epAd6kHmAEuubJkSE2WzGbDbTByBoNrJlUi4SMbRty+HhAQfec1RglTNPdjs4PydywSUX2j+9vYSLC2Umo1mo91AwFJbknGk7ZZ6GELm8XDMMw1UbxShKsLZ8oSWlRXP0xAQeGCbdqaZRZcnrTnQ2mzObz9kAR0C+MJTNMWyeG1n46vcqQC9ea51Oq6lRuP7Qc8L6guSikGO90A2TlKr+Z40OZLfeYrsqDdl6TOPxiwXd0RHLgxUns45F42gciCk60D0M7DZrzj/7jIunpww7hQDLNINXniU3SrkCua1YrNV2nCnTtcbgrGL0ptRBAFHlLAVDQqcAGavXu5lgRJNB8ktzvNfj1kl4Yi+kUSbFqIqwVRha413VTm7bhsVirhPKvCcXaJtESoXB94yDI8aR3RBJJhODspNzSnshiJzUGetAdfYzjtV5om2OMTKOOpM719anGBUOnWbNGmOYVfb5xP5wpu5R1mKtIUYhBEUTjOgnsfYmGtHVSs70/cA4DjW73XBxeUEIgcvLtaIoJVPIKq/pcv2SpnN+EjmZJi1NgiETqe3qv4zqKWsyI5V0W/avpa1Wyuq2Zsqkr8PC+epxNeuZJEmAOsBCs+hUg7TJcj2n9QivfX72oMYL2c1LRk4ofrGkuSHc7mBuObo1x849q/mcplHHG2NhZuHNQ0df4LF1nEqhWQ8c/GZgxcD86JTuYIePCRPANAHxgmkbFid36eYr3PoSv13jJNPaqJvLBDMkHVLv7ZaF/VQX32YwDeQNkj5AvGFlGmZZsKNheGII8R4hvqG6zQtLsVOknfdQm9RChWRwnyfMGsqRo9wxlEEZz+YiI5evbvcXgdu3lTC8WKww5jbqxn6ftKMH7iIy59atH3F09BZ378758Y8NswLd1JlUbc6cd3mXuT9k9uBH8OBH2sJzza43BZ3u/zoD3sHaxJ07sFgIBwfHv7e15wxN2nu0Q+W6nZ/D++9f/dta/czLlfLEfgTs2pZHb731jGoW52iWXQD+we9Zj5djIkLrPV3TcjWuTmHKXMk9uUybiR7TtBHoDAOhnXc0ixnusMPdm2HbFn9wG9cuefvBPX507x5H85ZbvsEbg4hmG7/7zW/5zS/+nIefPOTs9Jzttld4sEJ/zxO9GhosVrUbYsEUi1s4jLe0bYP3jq5tdGZsKXXCi25yRgwBQ5u1ltrULMSKUZhwFinSAIX16XeULL1mxhqWE9Pv6kOwB56ueDoYuYImW6+Z5PHRIXfunmB9QzPTCnxJ2o+73Wzod1vW6ws+e6RZ7Wq2RMRWMlXRtpjtcAUd58LR0nO08HV2biDExHY8ZxjCNSaw7El92lYEiOBcZT1P40VzJBcd7CBWSW3OWh3qPnmBm4oegWEceP/D39T5uJEQRs3cc2aMKnpBnXVcqcj1C6jJRj2n87Uh90Y0GEqiEruZCqXr6qDnVdV5FnWyUkSZ30XbmfbfbX2GN1LDPX0dkVLZ1leTppSgleuxaxCaJzi5ljB0eIPZv64e0Q8u49VMd0oPU2cZ71n8zHN4VNmyXYf3HpU31Yv3rrUk0dFYFynTbAOLDy9Z2h3drXPaxQ63s9jeI00AZzCNZXZ8wvLgBDtb4NaXmDLS5EsokUykkFTHNWSs75m3vdYtciGXBilbpGyxRZDGklLBnfeEdWTc/oxx2zHOGy7eXJK86llltBE/m2s0+wzmScZ+VsjFkG6jAhSfZszTjGxe3YVjjPbbPngAmiF+nVTT5HiPOD5+mzfffJO7d+Gtt8BH4ALKNcc7m/5zx7R33tYHPmcJdbxP9en1tO2AB1gLd+7ArVvCl7UuTXaJDjL8spVar/U2mXM6lGi1Uhb3ErhsGk7v3XvWadt6UDewb4kI3nla72ufYs2WqqJPclbriXDtwq+biatD2GctzbzFHnS42y1mNsMdHtO0h7xx54Q/uX1C63Q8oBgYURboJx9+yD/5T/9TLi83XF5cqmxn1NYVg0pMXudZexwWR06QxlyHKCjE3DSetmmYdS3LxUzbQZK+VgiJnKEphSZGpTjUXkpTdXKLSeRGs/6X4njFMOu6vcD+JOAi00aNwoYp5n3tzxihaTxN4zg8WHH75Ba+7egWyzp1yCNFuLy8ZLNe8/Sp4/z0c7LNHBwcVr4C5KwtLf1uIKWyr9keLRuOlg1hHFmv1/T9wOOnlxXiquS6uqFrgDCJRWjQaPb1/7IfIGCc0+EOtb4OSvK5aZ3mEEY++uSDyhhPNWN8dq63qTOhC7Wx+UtKuZIncFDqLi+1nUj22a4GIxNaUeFpU+lYpWCqBO91hGD/Pnt4uAZ8UKU3IU3yrLW2m1La60Xna5nvlFGbIteEQl6G2/1DZLy1z4pSasSiy64XtkI9ISZ2qXAeNOKYVUKHDYF2iHScM1t9iJeePj6B3cjldsF6bWgXlnblsWJIUQghI7alXVpMGXHZA4mcBwoRiQkbEyVlckgaadYsxBlDYyyxGGy0pAzeZ1IohH7OuB3YSWHcecbBkmdQvFEt5+mCqLhELhqdpVK0ZufAHAvZCKV7lYVeQWQBdIisuBYOPGcdcIK1LavVHRp/yO3ZjDtAOwinT8EkoI7+3F29+v43/fENP0sjWriFvULT85bJXHBBobBh8/sjzRnKhJ5ew7JP6PcvO71BRlPvHlW3uimoGa0zOuMqe5kaYcu+HaJMkmhoHSxJBgOucYi1NIeHNCeHmMMGu1rguhkHB0fMukNW8zkz72is4I0hpsRn52dc7LY8enrOer1TXeygTkgTpUqYslZbJ6wOCSgVdjYesAbbVsdrLW3j6dqG1eqIW8d3SCmw3Z4RQ2BsddPKor2ZcKVRVZFAcjak9PLOdxGh8Y5sDDlbZO+Y2EMHRoRkMtY4vPU4a5jPOprG0bYNjXfKD/AqJphjIac6xzcmZXrXMYldo4p5pRK4YrRaGclXfaWzztO2Dmsg505n9baOxtu9uhsxIuNY4W/HpBIWY1Q3NA16qd9TKlorTnUeb86JEBM56TSfm3K/yv4eFQIuqhaV9/Uy3Q2q6GJ1YF9eVjCF2vNdHTRCzqoKmIsqUMEVucqK7qsqmz05QfaPmWD3fWoqminrmIkqjDI5zX1QU/b90KnWeCcdaA3S9H1y0QE+++rzSwh2blYyEsFiqewGdbilqHqrb/FNR4qBcRh5HDO/HQNODO9KYWYNbrtjte45KB9x9OAf4XLP6ThSRuHzx0c8/fxtjm81nNxe4IAwwm6X6JYHzOdzhIRhpJRISpfkHOiKsgTTGOjXW+37qpGr6zpcNyMWwy47MhbDDCmOOAyE/pKLU8/w55lt8IxvLIizFnJGUr6KwjLEQp2voWQZmRXkR4awteR/9GodrzYS3eNqSMKX2QEwp2kWvP32H3G4OuDH1vIW8PQS3v9V5f/W4PaFZcIXwI/r778nAY9EPuZjVEzxK97xGHiHZz/a7zuzM3sd55uor08mInjraVzLnpNZ61Aa4cerzaFcwWBiDO2iwzaO9u038O++CTODHFratuPB3bc4mh9zb3bEcddoS4UYLofIn/36A95/9Bkf/PpDHn8+6eGOygKmspaNrXXnTBKjHQZVLlI6QWYqk+obZdgu5nOWixn37r/D2+/8ZcZ+w+NPf00/bCgFnIXGOxrrKqmmEpxq4KNtZC+PSW6MsOy6q8EENZBgD1cWYsrklOsUpw7vHIerjrbxrFYLZrOWpm2ZzWbkXNgOPWmMpGEg9gOSEoumxRg4Ws1ou3bveFMuVdCi7KHmeWeYd0JOnvnc0/cjn33+lL4f8cWRisP2I5t+oCBYL/th7ePQa028Ol6pocuYM33tFVYd48wwDsSYVPnthjLfnDNDWCvruNZci1x1e+t5Lc/C4DXI3Ds01PHaopmuVOZUlnor1wcp6GvYSsWyIjSiazXJnVDbN6/P9C0yyT4mUonqzCt6oMSt6VycSp/TJzSVLDA533LN0Zf9GrzovnHDGa9AmS75GqFUuMFU7H5ir4mUGiUXQslado07ZuMFXdnSdAWThU3whCSEbCgpQ63lS1E91hACbRGsayqBwVFKglgwRVmIUhKlOIxPIKoJXXJGbIMYr/q0tqVgcXaJMQ3RXWLtyLAzShbLGYrCJhIL9Og8zwb9BhsoMyh+0neuv3v5/b7wZa35vjXoiw7ee/8MOaPrZsqcbTxCJDESiyVET/zSTPnbmgNmYOqkoefvcmi2WtfEYLDYK1KEQZPzAvvmV41m9OMpWZ3AsxL8Q4YyciXg/3yh+AZsUi2alH80H5D95gFVohxqv6FCjG42x3YtZrHEzBdIJ5jW0rQdi3bGqmnpnFOIr15TOWV26w3rs3OGnTJycy57gpEpk+jA1BYkFFvLOw4NHL2B6hSmCUjOWpx1WGsrLKs3izJLvfOULGSvmUGsGrq1VKcban5m8sJLWFOF4ouUq89Dqdq6NRuqmbCztUWnfgZbyWICKsaQixKnot5yUgfoncMYaJyjqeznyfFK3cSNKFLhneCtIRuFu3PKeO/w3kI2lGSq9rOaKlcpjKztR2U/3Wza9rWGPDGqFVZX5u1UX78pmwQkai20Ol+RUnNe9pniPru8TnibXuUZpIf9z+khsn+c/lGqMEalO+gVlNWhijFV+UeuENV6wk3raOqtMEk+KoKQpyCw4tpi0A3kGktcl7fsj+dFRwLCjffxCiV0yMRzTBPB3NFZYWFrxOPBJIN3lqHAWYw8TYl7F3/Bg9NfcjhruHXnLcZs+fCi4WKwOH/M3A60MmBjoIyJ9dlT2OxoZisO3BzjHW7WaUCTt5r5DjvS2IMN+LJQOCeM5JSIudD3BeNa3OwW1rbMFsf4ZkYczwnDKakk3CJoi5JYcnTIY1EVxrlQ3rXQCvKWIHeFMjNg68ZghWSrotUfwIwx3Lt3j5OTK9lEay2z2YxC4RGfcc5jRo5IvMXLOV1uA3+EVns/5BkPeIIm5nXen8PxFm9xwAGPeMRDHlJmRTPlAS36nnM1gmgJvA250+V/fO1d4w6GD+vjrtWob8oErbebCqVR8hXbEhQJopBM1o3BGqxrMLM5s5/8ZdzBMebeEeZ4he8si1XDqmn52eoW97slx0anZxEV6RnXkcv3f8fZb3/J8Pgxgs6ftUbfZ5JL9M7hvIM9zFZIVttpVI9Z4Wjv1VnNuo5Z25LHgbPHD0mhJ/VbJAQWvqU1ltAmhtmoIg8xqL5udRohaOTzshI0AZxo5i5i9jt2LpnENVlIlMDWdS3eOZqq5qVDBiClyObyUoeqXOyIY6Tfbgi7HaYUjpZLnDMcH8xpO78X6FfhjKBZWlCWbOuFtqFmVRrcHh4uGWNis0uUnTKZU0qkXFgsFswXM9rWEuOIlSsQSCQjWGKMbHsNoMYQyCXXlp5ETC+jweWbWaGQ03UB7qJsYamtcEwlE02s0lSvvhYbTOf8BN3u2c01q9WecVsDm5FSMm3lFlgpdFalMy/OLxnHHucbrKv9z82MSTOagpYQ04Qq6XtGqwHgLgzEYUPZy0kanFe986vicrmCrev1EfML431/gIw3q9zilVB4AQxOBCdQrBbdZwIHGLax8HnK7ELEhXMOxk9Zzm7TdvfIpWXYzVlHx8o6Okk4SUjRiSlhGNBfcx0M3mKbhYZMxUGJlGJJWZDsML5QUiIXi0gkjYEhjngRGmmxtsO3C5p2jrERsQO+GxEfVcwF0Yi2z8i5ULJQskZPZS7IYoqklIhVKkTyyi+a66EkQIVkjAiL+ZxbR0df4vwTO3p6zlFPeNVc/t1NmDSghXitPlzv60CO2Ge7FsOCBUccccmlPrqKazBCeVSfOtRbQQdSZPWt2+kt9eOor99e2zCer0+/SpOqoGSMquo8885XzOKpxoo14D2m7bCHt3DHd5DlHOk6fOvo2pZF03DUdNzyDV3R+da5QAyFMEaGi0uG01NSP+g7TGMsqVJ6pdTsr/6t8hGMmD2jU8kyqlLlnMNbh3ceSmbsN+QwqjhJTvg6QtOahLFCynn/EyBIqq9rvjEV4JuYoU7uMfaKB5CnjLFmN+iUIlvXwNZJY1Pwk5ISLVPMhHEkjokUIjkljCk0vsF5Q9s42sbuRTpSUmJPzllZudnQeKixDBmtzzZtQ9s2jGFUgilTn7HKV3Zti7Vlnznrvn91Vkys6b1UZIXxU3o5Gdg3tkJNActVqiq5Hq/+W1HeqTt88rKltlPK1aV3RcCo/6znmxhsdbwqzQvOWA0STaExkSQJykiOvU5+qyCKMRN3CIWgk7az7c8E2Se3Kn6U6gzlpPtyMflKX6Ee5x62noLTH9x0oqLRkJEpsgG8UJzZy3aNYyLEwHmCh0HIQ2T1ySVH/cDdZcedt9/G2o6QIyGpAqczLYvZyPFhTzfvwWTEwGzWYZo5zjpyVAUZep3ZqSd9Jo6QRkNOlownFeFiu2Yces4vzji/OGM2X3K/QNst6BYdtJaUdozDmpAyYrPOM90mSh8gJ8pRBi/k8wwboWRHKUKaO+JBQzGZ5CzBjmR5hX14OcPjx9D3SvM9OcE3DXfu3GHedby3WvEGyjN6AtccgnCLWxzgqWKSBLQ0Onz5O30DK2hj0PvM2PIWCbDMuQ3MOeKQ42tO0LKftMgh8B5Xjj/V4702PkEJUx/xrNzlAVckrv0xPK6feIWm2a/e8ba+4703f4QZLZvduoonJFIOSDH4PEew7Ej0ZPK8IR4tkMUce/cucniAO1hgl3Nut56/vJqxcpZbtqHZk1rg8dmGX/zZY85OTzk93ZJjwVvPcrHAGId3sz25URm+CSMqSJDzVU10v6fWYMF7j3cNb9x7l8ODY9abC84vPieFwLC9pKSkkK0xpDqZBqBxzR6SzSVrL/JL7DsVERqnsLGIjvdLWR2Bs6qkNeRIzolxGLgsl3jnEOY0wXF6viaVQt8HLi9VTtaiwylICWsE7wyzmcU6s5ellEqGstbQViGLCWK1JuOMTkYKKZGzYbXoiDExDOeEcSQMPWPfkwukEBTyLOzZ2FK978SyzaphuZ9Rm2tpS8wUzN+QaZqviM1ULJnOn1C5AXWghtZL7f5pFHSEK0KxFoxXUZa2wxq79wtGBGemSUf6+ZvWa/mrRFzekqPlaL5grAGXWId1XqcZGVNblKTypYWUEv3Ya8Bi9PxoMCy6OaVowASGxi2wxqtMZU6kkgkpT/0qP1CoWcOHekHrSYYTcGYC7gkx0w+R8yg8HA1uE3n3o3NWmx13/rTl5M03CENiexkIKSLicLZl3vUcrbbYbtCGdCMKK3VzfHW8hUw2uiHE6nhTgBwNJVty8cQC611ks9nx+eMnPPr8Ew4ODpgvWpa5J6UTYEZKPcO4JUYQVweJ9xEJQaOrg6JR30WmZEOOyugsJ54878humkDTkF+lSH8p8PSp3u7fh+Njmqbh/v37HK1WvAO8IcJnKGJ73fEec8yb10bN79C2nu/ueEGx4Q/pyLwJaDZ9BzjhABWs+rJt5KDeJptquM843hElTl23N+HaR0A/4RNUwfoNnvPKr8zapuXt++/QpJYnZ4+5WJ+R8sgYd5js6NJtTG45l8BaEulozvDGLZi3yJ3byLLDLlb42YKTtuFPl3MWVujMFOwr2eT0bMc//ecfcnZ2xvnpjhLBW0fTeJzrmLVH2tojUz2uh9JTSOQcK5SmmfMkOG+MpXGqmnX/ztuc3HrABx/9OZ9++kvCGBj6gZIz3jca5KI+yBhL61SwZIyBELVebK19aVmaspr9vs8zZ037haJ11oIOOMhKSorjRgejW0MIHmMNISbW6y2ff/4UwXB0cEzTtLSifcjOWWYzHcHnXFW+2hPQ63QkUda6Cv5HpGi7DWOhZFNbr+Dp0zUxDIRxZOwHBWniSMkRskEq+VSq/rL2C19lxyUXrT3X7HE/r/YmLSVkEqcgIQQomZxHQLtIxDRVatRSRGmtBR06n6VK6toGcR4/P6DxHi+CEyVQeaYgpAYzjcM4C2mgDJliDWY2I5n6mgXEqrqaGIOr34VDcBhCGUlBRw9aUygGGmMx7VzB7qKlCi8dVhxjjAwFnYZUpUM1zPgBOt5a2q6p/AQ7GGKOPBoD3sAYEiEU+k2ifZppQ8/BbM1Bu2Nx2NItD4npkn53xjCO+MuBWXDMZEt3cIlpI351gPEG186xTQciehHEQCk7CoVh2JJSII5bwrCj5ESJKoMWhi0lBYxkvBeMiQzDBdZmhv6Stu2IoaekSEkCyUGySHRI9BXK0BOC5KCIbnZGcEFo1irWzhpcD6b/moV7UVssoOtoT05YnJwwW6oAexFhg7qhNeqSLFoq9QjTdNOvvKwbNCE+4ksHK4Aixwf1dS7nczYnt66dvKobLSLcPplx+/b8WgDvYD92YQacqN7wek0ymdVqIj581WfXN44O1keQOtRbD/CyCD7fxJyzHN0+JOeImwnzTctmt+XpxSlSHM4cYk23V/ti2WCP5pTOI22DcZ4D33DYttxqHJ016hSo67odWG9HHp895eLyYzbrc8awJeY6HB6DkAhm1EEHYmtWYBFaNHOJyr72SzAtvrU0rcFby0HraZ3n+PCQ5byh8TrNJxWdFlNyweZClrpBTY4dJcaknInpSlD/ZVopuWZ9Uyav2Zitjsk5i8uZFKEfBoKJmv37QEyZ3RDZbndcrPuagQUSFuuFxikk7Zqa8Tqr8HnKlKR9w9bZZxyvLdo9KqkONLBC21hC42m9xVtL4yzzrlGlLGdxRnWjzUQOqheBWNGqQ/bMWiGmhCDVEWgi45y/OddbMikMlBQoKWElYY22FqXJ8bqM2ISIQ6zXoTE4DQ6NI4vDugbbLhBrGdFyXz8OSAzYkrElYcTQdjNFSGIAYyhxR9pdQAqYISKxQu5ZmXsFbdmyVVd8hvaSZyCkSIiRbQ6Ekknekxoto2n4UsgMWAJFBGcExOCzwWQl+U7n14vajQtoJBkxJiOSQQymWPqU+RfrLWHw2BAxMTP7rOfozzYsmy1v/PFDDo967rz9Ew7vv0U//I6z00eMG8vs9JjZKBy98YSDB59h2g63eguxHtstMLahWMsYBnIZCGlLSpHLi1PGoaffXDJsLyrDbQQK1itL0tnEcmmxduRy/QlD6Dg4PMIYiOOGNPbk0VPGOSV4GGd6K45ct8SJwWxtxtiC64X2oUrS7XYFdmDPXuGSG6OZ7t27HD54wHs//WkVX3cUNO97zBU5WPWk1Pl+o5NjiZKdDrnChZ8zVWpWeLg9OaH7oz+65niFaZzcO++c8N57b+2j3CtGNmhmesDFxQW/+tWvSGngwQO4d+9rjq82kq47+NV7sIvAb1HM/AatnTW890dvc/eNW1xeXLDbbvn40WO2v/4dGUd3+Ba2XTAcdMRli3GZ3EWKMUjbYq3jzcWCH6+WnBhhZQ2+knBKLnz+ZMOvP37KJ7/7LZ9+8o/YbjSzyjlhsopkJDOS0hojBmfnGONxpsHZBSIZYwNiPc3yj7DNCYtbLas7LcvG8O7c0VmY+YIzhU8eOkKODDEwRB1QYEzSrHNivjq3nysbY6QfRmLUYfIvry5ZmTuT4H6JSI4qRuENiKHQYIzl4nLg7PySkoWziwFjLF3X0rSeMQTW260OnXdz5gns3DNzDuOF7qBR6NpUIZCgggvGWpqu2bOpBbBFcAViEnUQGFbzDmsMy3nHrGkos8LJ4YpSYNl5Oi94a7HG1XxEP4+zOmxEbIP3CtmPobbHJIWeu3bDVVHy1VrJmXF7Sew3xLHH20LnFE1MqaeUgmmX4Duca7RFVCxRWjKWbDuiazDdEnd4GzBsxkAeEsPpBeP5EyQFzLjDOcfJnXu0szkhZULOpGHDcPEYWxK3Wk9nDUOI2p9uLaGNqqNQY5c78znz+ZwkhV3f0w8DT7drtmGE+YqyPNTM2GiLnU9bbC7MZzOWywUpF0pxxCzkoGpvTKzuF7CbzXgNpLnsg9Pkp1p9YUyZIImmCmjbkmnyjpaBrjN0C4drPcY2IIYYIilkTIyYHHVCzUyQxmmWaxy5KLYvYUSKaM9dyMSU2a4Hxj4ybCPDtg6lzkqLb0lYX7Cm0DaujuyaNEUrIzSz17OVIkgxFOMo3lNwFGrmWx1vsUlh7iDIqGIULmZszFPv/KsxEVzn6A4a2kVD0zQ4d/W1J6aeXFXHEASPp/2mPU4GTUq/vFtJD6He7USYty0HBwf7jVdFCHQTa1tL0zRXAgjPmM6DbZqGruu+5P6vsZyZ2Ug2QOuhbdG+pJabmAsoYmjappJ4lAXrvCdVrdg4jSprPDLrEJcQr5Cjdx5nLXNrWVrDzCgcZ6hCD6mw3Ww5O33K5eUZ47ghxp2KLlQSjJFKDsmKOIkkcqkZrxSM0e/DoLCmVHgtT3NNc6GYq9FrZgqO6g431SUnQpCgGsJtpxOp7IXbt+BMPZ0vxYrO+J3aTaa+Tyo7dk8krE4xVWeVmYbNVzg6Bsags7uHMWCdJXUGxFVouWpV189tKhysTPVrk3F47iaVLGSVMOSsxTuL955Z21LQViVbZxzvP1YlB2E0azdWx6CKFJ1NW+vx1Baqm7JSMjH0hLEnDjvEqtIfZFIalD1sPLmWPnBOF8Lq5xHrsb7FOA9WeS8xK/IwhsAQRkwKuKjM6RADJo6MMRNSIY6BYRyxJROdKhrmQs2qtVtQv309wdKko29Elb+SJQt7DsIE1+cauKSSFTavAz2EojrQdWSJvKQT90Ydb5obzv5Kh08BmxM02kqTkzDfBorNHM8aVjNHd/yU1TsfsJhZbr31FgfHC7rlMbCkJE8eRnKA1u9wvmV+tGR1r9OIyh8QYuLp48/p6ySPnLNOBOkHUjRcnt5hHDpKPKDEJSIZaxLWBQ6OP6aZrVkuGu6c3NUeMeuxtqFrj7BmjrHgXGYnDhsbJDWkOyvG+YESCERVgK4IcHqh2M8T7pOISGCxHHFtT9O8utF0U8L785+rr/n9vJYL4GP0lHibaQDBy7bbt2+zWFy9dgiBjz76iMvLy2/0/Pl8zo9//ONvDVdebrfw4YdsUlL9zFu30BrvTwDL3/t73+rlvrWJMbhmhk+C+ESx0I+BJ59/SMiGdnDYdgWLt+D2guIstmlw1nJ3sWTuPW/OWu47YaaVQHKC89ORfjvyq3/5Z/zT/+8/ZLu+ZLvbkGKqaMuz0VAqQMmEtK2bvlXFJoOCgUlYDxsynubc0X7qaa3j89mSedPwpz95j/u3b6kz8R6PIHUbmflG+RQlk0viYHXIz3/+pzRNwzb+E84367027styvClnLjdbGu9wVsdKTE6xVDEE9b8WZQZofTmEiaka6qae2IWIiYlHj5/QNo4Dewe76lTBzl1ByqCBhTE6uMA7QEodC4jWy+sHNMbq9mEtzmVmsxkHh4fMFpnZ8pACuEaduqDEqesEK28s1jl1aLkgpuDFkHIhlagKUjdY4k0xcPboE8JuTRp6vIPOaUAgUkcfNj3BdzTtTLUVXEdZHWH8jOboNu7wNkUcyXpiSFxsd4z9QOp35HGgM4Wu1Tr8GLaEMhKLELOQxh1DGLEUdsVpSa9ziHMKRTutKU9qWkNr2bgCi4ajt+4xjoGLTwu7tWCWc+RgWevylQU/ZlLMjIxsdxlrLYtGA/0x9Iw5qWbDC9rNZrwOhjuWPGZcZB8ZUzT7kwyLGRw6Q9NF5gdr5rOWbnVAuzzC+rkecjHa2J7AmIg1Edd6/HxGkoZkOkoJ7MbAerMlhbGO8Ars+i0peNanR4R+jhSLKRZjCs4lnB+IC3BuxNuO5XxOEUc2HcY2ONdhRGcCI6ppK0knHeVZQzrsVG+0NsjncpXVl1IoFwGTs7Ilm0Q2EWNfXcoroiXe4+MvIT8+s/sNwBnINAPoy6w89/PbHsv1aUj1XYeBR48eXYOXv9q0J/LwW7+3c45bztEaowuyWKDw9S1uIuMFEGN1KLpxFHHElOl3a8YkxGaNyYY2RVxt5DTWqlpU07DyDStnWUgFF4qeU/0usF4PnD15zOefvE+IsU4dKlD1kSc241UgqIxNHZmXdL60TNdiYRjW2p86WPq1xduG0B6wmM3o37yv5CURrHP6elUIwXmPtQ6TE7kIbdtyfHRC23V0NfMV5KW2v5TaI6ynj1UB/fqhy1RnVlUE7e+sqko518ajmFWmNms/rGTY7XbEYIkx7ImgKvDDHtUpVok7pma9MM141Yxpkifci+zXm/aaNlgHxteSoUQQHZ03iTWUKyWhvSM2mtLrgAAKmERJ9Qu+Ieebc2LYrgm7DWnYkevnECkYq0M+YjFIyhgxpBBAvLKbXYPv5jTzFSEXQoJEYoyJIYxQ29KKEaxTQaWUAyUmEpZUjA6oryIjEb2JM5jWV7JulT2ljv6zQpSC9ZbWLzAh4E5bzLDDNB7TKofEoN8bWUlgKWdCGhGmgSNX07FfBqR/s5KRBdpQcFlULkyASvi2oWAk0jz9Ne7sjFVruP/eW8xmM5ZHRzTzBSmM7PoN43atvboYxtiQSstus2N7dsZYLJvk2EXDR8MB5/kQOY2Yp5HYFcajQrEGkRUme5wtOFtIybDdtoiJiN/RhwNcV5itwLcty9VdrGtp/BKRhpR3xCExjkKIehINRuidYWooLwLR6lcVSlUkumswxtGMCdkIsf8GBKGXbB6VsWhK4enTp5ptLjdwK3+ND4poRXiHZsgvbtZa7t69y2q14vDw8Bs74G9rbdvyxhtvPDudiEm7+tVbSpn1Zc92s2O3GRiHQI4ZK5628RzePaE5uMXi1iGz+YxoM73PtM7yTus48JZjKzSAKYUUC+v1mn/2z/8hn376kN/+7pf0g7JkjVOBPWsdUoUlxExewwIO7x4gZoVfCH5hcK3QHVooifWHnzCeXxLjjl2/Y5CBoe/pxzl9ihTfsrz/Bm+av0q/6zn77JQ0BkwaIMdavyiEnHh0+gTvG0IYab0lxdqj+ZLqkSlnLnc9ubRktIWo8RZKoYyRghCSEJMQY9krJsU0KWvVHuOS6FNAKIRY8M6QYsQ7gxXIMZAwGFfJT1kH30/ZkuYP1/o9U6mzgYWUlTTaj5F+DPRhJGUI1Wm6Rglb1giSdaJOzLUhfRxVS74YSjG1XBZJVaIy1ZLAjfEEcyYNW4gjUlIlJcmEiiMiVVUrYFMmFYu1LfOju5jFMWl2wCiOkAN9vyOEkZwGpAScE9ysZeaEWWdBCrs0EnPG+I7GdzjrcXRYgdXRUmcSO4d4x5irUto+YDFYBHLBOcfBbEHJmYujY7yxRN8SKsiscyeEbFXcycWCiwUvwsxr+WvWtoQc2boXd5s373hjxuRJ6NpQRBv/TSqYHPH9b3HjX7B6+8e8+e5fp5ktmK+OsLZhvduwuzhl3K2ROpk1BE/ILbvNKdvzJ+xi4bzPrMuMj9Ifc5qPaU4T/oNMPrGEO21l2e5oc8T7QOsDu9Sw3R5oPOOgGbfMlmccjGe4pmO+vIv3MxUAKYacDEMfGYbnHa/UAFSHLodGGZ6D1PpDY+DIMbuMzH8p2mZ0c+RaQEuyD4B5KYxPn3L5ySeqGHXE1zjeADxEW4Jejk2O91Vb0zS88cYbr/x9fp/lVNisey7Pt/RjTwiBlApWHK7pOLp3m/mtOxzdOuRg1jHYwtolOmt4u/UcOsscbbMoqZBiZrPZ8M//xT/kl7/8czbbkX4csdbhm1bZuV5/Tq16+7YL6ejmP8a6B/hbgj8W/NKwvO8paSSvC7IVQhjZ9jvN4FJmCHOGnMiuUcf74Bbri0tG8z7DZgvrJ5R+re2bZEJOfH76FGMcIQw0zhCcxVizn7z0wutaMutdr4GFtWRRnrZyB7SXNmVLLpaYIBcV409JmbDThJ1QEkPWWdVujHgDOUW8NRjRofdgMOKqAI/WAEH2BLJJH6nkvJfozFlIWTSrGwN9rWOmVMlXIrjWYazTFhtjyFmlbrV/d1SkQDxGPKmOsEtJuSop5Zci6PBNrZTqeHNC0OEBImbfeiaCqv9lIaZMxGJsx+zoLv7wDmtxDGIZ8shu2JHGkZJGJAcaZ2hNw8wbZnNHLon1ZsOYBmbO4zsLzuPNDG+F5eGCeddQjKEYgw2RfhvJFEwlpTlR8qETw+F8iQDbw1t01rNNiU3Snl5b6+vRaeuTySr/oY7XY6xl1jSMOdC4qZfgu9uNTyeaQiOFwmrzdwK5TJhY6LqGg4MV8/lMm9Up7LZboGezvmS7vmToEykfkbMnxo6SPEPfsNs0DGkkhJ5UtWcRJY9YChIFzgtGMk2JeB+RIoTgieIph1YZmcaSomccCv22x5gt2/UG7yI5ic4K3l0yjiMhFjIjWQxRAqNJmFL73HJBgkqiuTqzo9QLUUqsgt03oFxVber88US2bBgJDLzqXqavtleV4f6h3uf3WUyJ07MNp6cXqrmbE2O0dPMTbNdx1M1Ytg0H3rH0hmAKrYPWGroKL5uqLJVTZOgHxqHXzcV7ZgcL7MpjnMd1M21zqTrjtrXYxpBTIQwJpKVbHOKaOfMjy+LY4DvDcmUo0WNv3WM3OuRUGOJGW+1SwHtPMYYkYH3DsvUYcZw86An9gNmtkLGvM2wvlRQ211F7y6M74Brk7Izt8LkSFF+ClQIhJcYUsVVKsSLLFXKv5Z4ilVjFtWxRyTsxJcYc2ATVAJh5EGcoOWu9uKCKdlJIxmCKIcVCSqXqMyupOqfKOh4CoQ8qjlHfa7Pdsdn2DONY218gZiXdhRiropL2FpRCdcRT9pyqdrepQiQqVxljIqREnJSkbsAEsFKV04tqFEzqWVOQZ22LtQ1+tqRdHOPmhxTxGoDkyJAD425H7LfkEJA4YPKIk1LbrVSgxKCtViKe1jlaV1nq2eGNYdY2zNuWyn7FiVEZzVxwzmuWKoaZCAvf0FmHEeF4saA1hk0YaYdRHa/TkyY3quVvE7gIXdtx5/YJxhi2eWAsI97ZF+aQ36zjFdTpYvYShVIEQsZ8mHB94uRfO+btd2FxeJvGe1LKPH3yGeMQ2F48pl+fsT2DMP6YGFr63S1ynHFxusTagcgZfT6jtwOsCtY5GlNobYZtRt4fsTaxWvQ088B2O2e7XjCuLPEdJTGEhy1lXVhfjvjHT9htB0qZY63XCydGjGQsifWuIRpIfmRnt2zMHJsSLkZsSfgUkJLxucpYuobRtbjagpGL5abYEYeo+lNg4Hf8jg1rIq+O2PXarqwfAr98/yGffvoEV1msQ2g4vPuXmM8afnR8m8PDBYtly2xmtQOg1rnmYnAFbFUvCrsdF6dPWV+c4puG5eER9vAdzMGb0DSwWIIYUtB65uKoY37UEfqgM3CLoTs8wXUz7h603DtoaUxh4dQbPO5XbG8HPvzgnxNTT04jJa7puha8YxChXSy4f2tJyIXj+2+Rc6bLmSYXPnn8iN9+/BEmFWYRbIHZ4V1KKnz60a8Yhv6lTShKpbAZA8UYxpRU3z1oQ3mKOo3GSocxQhiTolNBlapijOyGgT6MbMeep5sLrBHuHHbkriHFiDNKeoohYpISQcUYcqrTo0omemVyj4MOLbi82HJ5sdGJRbkQYuTho6esNzvOL3vNWCvUXBBSX7DB7FnS1hiatgWB3XZHCAEjykyf6tOpZD32YWQYxxsL3kWg85kxZ+3dTZlx0CEaYrXG72cHdPMVs6M3OHrjp9AuiXZOiMJ6s+Z8tyUNG8L6CZIiNvTYnJjNWw5mHisFi6pgrWYtRTxN1+JbD8kgdQjF7dUBq1lX4X6hH0a6oEhD07RYa+lMvfmGE99ijeHW3fvkUrjYXHK2vqiOt/ZgO2WXe+tobUPbdty+c0eZ0DZSTOCia164QnXDYwGnzEPZzBo11PaDlDEhUaIlp4YYhKHvSQmG3ZZxCIx9TxgHUrCU4vbKMRSVVstJB9xTdMqQDRmb65Sjpg5PDgWTQCq/RyNhS851+JOo0kzOljg6hr5BcGzXRQU0BiVhOFG5uGFQLdZcGXGlT5SUKLFCQEkvTqlsuOIS2SdcSDibtG3EvOrLRkf+WBwtKmQSGBm5mg5kuJKr+Hbn1PRMz3cZs3S9neirbJoH+4fOXL+r5Zzp+x2bzQbvHc5aUpnqSwZvhNZAa4TW6FzSUttzHGXfyiBV6rFknWu0WCw5PDzGHB5jD48pviEvFpodqaAQi1XLYtUyupE8ajuK7yy2hbYV5q3BkXFF2+oMXGuhsUocrMcSUmQ7Dvjc4a0DJ7SVddsl8KXQ9TtmqwMkZrqxThZrBcnC+XylUHh8SS0wRZ1vzBmbpvF0VQwhKUJQTMZOspV1BmtK6iQnMpr+TBQ7kb/KviWIMsk0QhpVuGKSbQxNRsQjAn0/EGNks+lZb3Z7Gk6IiX4Y2PUDIUQ9hnJVYkpZRUdMzcwnItyUVZWaOSfSPoOfBEkUdn65giRfZYIKUiRryFlZ29Y4rLM03RzrPe3igG5+QLs4wHVL8HMConXvECnjAGFA4ojkiCsJKwVHxpSsjYNG0QBrPUgd5lGvfxForKXzKupijGBF5UHHtqVUx+uso7WW1hha5+kajzVm3xoqdayZiGjt3qhKmTUG7zyta2i7lsODBbko09pbdcwvugvdONQs1Obw2juVMThXaNqBJg48eli4WDe0s1OWR79T1Riv0FnoN8RhSwwDxqxxbk7xC4qAtw8x8hucJN3/k2H58YCMG5h58k88ZpORzxIkod/OGIeOfmxIUeASmt9q71jZWWLsuHzyNv3lEd4J5w9VZi8kQ86CmIKYTEzCrreE7DC/yjR21MjVmtoS4BEKLlsMmUZgKSOdG7l39JQ4P2U+fzERxq82gxZwf85X9aweoM01DfDtumSXqC7jVyhofIXFGL9RO9GdO3e4f//+t37974vlNLI5/5SLJx8hkhCpykNYGJbE3ZI8B1c6ZtXxptri4ApaR8x68xYW8xm+8fwb//p/iz5EdjTsiicaS+9a8tQsL0JLpCEymMxpsoSU2cRLwrow80sOZiuGMPD5+pR+N/DJrz7i8tE5l9sn5Lwhl0BMI3nM/ObjjzgdM/fLe7x174gsRoMEYEiBMWe6Wcd7997AAyvl/WNQBy5p4NNPPiI+Q3L77lYohKiqWzHnfXAiAk60ztdYi7OGfgj0/cAYIuvNlhAiu7GnDyORjGubqsvcMZ83OGcoOWmmnLWG/tv3P+PycrdnZs9mc05ObmGMYbfbEqO+9mazwzeO+XJByplPHz3hcrMhZkvMVklXpU6kCpr9TxmvS7bqXqsAj7GO3W5ku13rhzaOnAtnFxdstju2u91zHQqvzowR5l2DbRpidjTtgvn8iKbrOHnwgHY2Y3F0m25xQHIrYnubmIXdECjDDru5oF2fUkJPGTZYKRx2Hm/13MgXa2zXcHCwxDvHfDHDe62BG+OqjrPgrOXWckXXehrvaSs6eu/wFgXomgZrLY01eGdwxjBrGqwxOKsjGBNFrzFBx16KOnwj2lvdetUAb+cdwzjy57+1dBLw8oObTsS+4V5ZgHWChRGMixgX2KzhcmNpu57N9iHeCwdHt3C+IYUdKYyUtEXkKcYMWNcDI8asEZ5ixGFNi41CcxmJ20CcOeKRqex8FayIwUOEFK1GmSO4UdsAYtUVHdMBoV9iTSRsdogUYm7I2VJsVgwNlclLxaoEZEzgDXjVDBWvPW5KKDM4Ep7EbB5YnuyIzRbnXiXcK8AS4RbXc9nnY7YWlTT+cvG5L7mo9w9rgGOEA54/nX5f28j1v6eUuLi44OnTp8/c93xmO58v6jzZ33M838pqmeMGk+ecM2O/ZtieUxigBG0vMp7gEzn1kAMWRVMKVGGVsu8xlCpXZwTNmr1ndXgLjOW8D1wMgRHDBqeDwWvw54YdfszsilA6wxizqlqFhI+eNrWE0LPdXLLZbDk//YTzJ08IuaeUQCmx1vMS5+tLgn3KYnt3P9eUOos1lgw5qTzmcokXw6H1ONGpygbD6uCQtlth7UtyvEV7k0PO5FggZ0qK1eEqbCsoLBpivYVICJFxDIxBs91swHiVhfReJ+EYVcxRDYAxsNsOPHz4hMdPzvfXz2KxIITyjOPd7nq2ux1d16pGcSlstlvW2x2YBowOnJ6y2WkQgrYmacAUU8ZMY3JE5Tl3fQ9idIpRKfTDyK7vCSHeINQseO8odJjSMpsfsljdoZsvuHX3HWaLJbOjE9rlAUNuWOc5JSRkd0qJARMG3NhDGiAFvBEWtqH1hiEmwthjnKI+rbMcdnPaptFWMJGajWr2u2g7mjrZada2iiS0OgSnbVV0xlsdcmFEaJwOYtAZ5AbjHKZRtGJqQ7PU68tZWq8yv8YZdoNh5g1OElZuoI9XRN4G/kM0bSrA3yml/O9F5Bbwf0PLhu8D//1SyulXvhbgstadMMqQyyXjYkCSULJlPPCEhSEMnvikoXEDDU9o2jNy0gHQkiNd2yGNxTafIeWSdrbB+pYxzdnubhPGGSm1esRnBRkz9JCC0cxBMhglWnk7jY0qV1M0ylX53IihZAVpydrNleeOdGQoo5BOleVs2o6msyTvyd5TRAi1RpRiwaaCcTu83dDMB2bzD4n5c5z70mzvT0TkX7zIeqPLzAnwLpqbqtBUw33uc0tuaRurh+WyUuqrdvKztgWeouN/ev0ij/XFZnM4seq4n8+Ud7sdT548eQYKOzs74/z8ihWdUuLx48dst1vOz885Pz+nbVtOTk6uKWzJ/5+9P421LMvy+7Dfns5w731jzJFzZdbU1SN7EOluiZIJwTQlk7JMU7QNSvIAwoAFWLABm7IBw/piyDJg2h/sDy3IMA0IpgSKIAmacLsp0oY4NNnsZk9V1V1VmZURmRlzvPEOZ9h7L39Y+973IiuyMqtiyMxirKxXEfHucM7d59y9pv//vzg6Et5/32LMgFKafthgxaIfesrWlvKbDw4eAnzJGPMNnvAe/2jLKCpcnS4kgq+ZTHfY2tpm2k6Z1A3elrmYCGVsPOup1evyJyVK3/SknPJ7XeXokiBjLuXXopCVOurc40wk1I4UDLvOMqbM/qRiajXiv1TXTEQ4unSFzk5g8ZA0v493DU19CR8qLr90na2dfdqtmtWwVFUqUyQ0kgYHUYReEqssPBhXChYyAYPjrnMsQss//Nt/E57CPS4C3RDxsahIQemHatVAUcxCNjCOkeVqSRwVyVzq94DoaL5pTRUcdeWpgiXGkfnpnCgjq6yO8+BozuHxUgFVWTg5HTg5HTc0miyZvu/phoHprMU3E7Dn+rkpk+JYmgei4MoClvJeaV862EG5yWunHJPqUWcRxmFQ0FZK/ICwqide71C1XHv1pzHTi9hmm9AogCpUNdu7OzobN9RgVFrFxoTPiS3vmODZ2p4Ra6GyQusv4a1hZ1ITnOHo4QNOjg7Ymk24srtDXdXs7+9T1zXWuSJY4qiCloyr4PDWUtcVdVVvFMqsMVS1LxmvOl4RIUUVj2jqWmVz6wpf63x2V+4DUlSqqiQddJEiq9WKxXLFanHK2HXE8ckTpU+S8Ubgfy4iv2mM2QJ+wxjzq8C/DfwXIvIfGGP+AvAXgP/l930nMfi1erqgXDhJuBjPHO/WhNXlBndni/7GBWp7xMS9D+2DEvtrRNo0qn06qe7iLeQcyVIjcZvV8jrj2JByow70UDD3M2BIdj2LVvHExmSCyxtxAREwWSkH66zOYJFcbZyyQciTiuFKRZ470kGFRIudBKraMYbAGFoylkEcZIMzevZVJfhmqY53epOYbuMe73i/DvxLT7Te6EdYO951ghcIXOe6zn29AOyfz/4elwYugJs8MrR+D9iFqVGdqw+7aoDlcsn777/PMGgvWUS4ceMGN27ceOR561FbN2/e5MaNG+zs7PDlL3/5Q9KQqWTBJ8AfwA+NxnbAW8BVrl2DN9+E5fIU4H0R+bEnvsc/0gRkAOkLDSUTgmNre4ft7W1m7YRp0xKM0xROEmsBvM0I76LIBOo0jNHN2jmHC46JgeUYGRe9KjGlgRgTIffUuac1sFurTKS02s33TgfBV95CXbMwltuXrjBv9pCHMMZjfNMyvXidqplw+eWr7O9uw6xhNSwIxjIzXofRZ4fBsMyZXiLLlLjVLemzlD6oZ+Uc9sJ1fuZP/pv8yv/133/iezyL0A+R0SiWw1stLTqrfdT1jOKchXEcWS6XZXB8WUsDGJ1LPJ22VMHR1FYdbxnI0aeBk2HOYtlxcLTg8GhJipkUtRNrzJH24r1mSGMcGcaRnT4x3dnFeasTnzCMKTPEAVjPJDabXq+xSoVKAv0YWc/5XVOj1sIO3TASs9K1ZC0L+snsidc7VBOuvf6HaC+/Sr17CVu12MnWhrpjBGI/qHBGjNg4EHKm8gV8Vc2wEpg2gQvbDcFZppXDGbjl4G4a2N6acWV3l6YE4E3bqDhLyXSrugjaSgIRqqqiXpeW6xrnDFXwRVVM/UUcR+aLBSC07YQQAlXb0kwmYMCW90qDDr8ZhyVj3zHGjvmJtgm6xSnDqtPP9oQ1ho91vCJymzJsTUROjTHfRJt6fwr4F8vT/hLw/+WTbEpisKJFZjMkzGrAdgkTQbLFLQ3hSLBLHefkCJB2yGPRssWQTVJ+mxEdBeUskpV87itLOx1xg2M1T6ru0iqn1kTB9AoiTlMDweGHVPxJAVMoVgtjlfyexZIqwzhzGAeNMQQLdj/gpxXDaDk1noQlOIMLkJz2f30+oU3fhhwZ0heJWYsGzo1Yty7fyUf2Z57KesMGJGIe+c25cu7HllwDsIMjquTE+vkGpkxxhZVdzpmu6+i6jsPDQw4ODui6gcUCxlE4Pl4+AqQyxjCZ6Beh77XXPZvN2N/fJ4SwOVDfw2p1iAYByrdcH6/ve1ar1fe8Z1WdhQMKelkgYmjbfeq6YrXSaYlFRnNZ3u+prPmHzRpLHRraaopxOiRka2vG3vaUra0JwfsNqEZSoWqIZrxrQRadN6tVgpSTlt+QzQg5azSsCCV6H3JEcsRKKgIBQpKklSeno/SyROIoLPsVBycHdDEznUy42k6ZVx1NPRCqmq3tS9RVze5swqyuyN5peVYyeeiI2eCMx27Gjpfy+tDTpYQxEYwOVmj2L9Ds7q+v3xOvt6pClSpAmc+q2ZHHGavgqaIFrM9X0BjGELInkwjBFT3l9axdpfGM40gfR1bdQNePYCy2AHp8WGtTr1tn+j2WqCIOMelAAydOxfYx576IyvuFR0FWKWkrTEFWQkpJecGokpnD4LyW1L0P5AzOffIM7InX23rs7AI5TBkISgXtOkUr5KTtkFEnF5kk1BSZS+9xxhGsIdhAW3l2pzXeWSZBRUrG/T0qMtNJy6V9HV+6uz2jamp8CHjvtcJSvrBpHJCcaOtAO2nU8VaVXpsCgvLO4B0ghiqoXKi2aTzeu6ILX4bbiwJ00zgyDj19t2IYOuLQk8Ze8RYlo35SWPMP1OM1xrwO/Azwj4ArxSmDqip83JwYCp8Il0Z8TvjDjnB7iUSIgyNnT3VX8PcjNulm5W2L9G8Sc6fkdJMx+SGD+y5SGWzYwtc1JEUTN62j2T2l73vmpxO60ZIveeK+YI/B3hSwhnTNk7ctcmfA3k3ahy2zenWDM4zJkZOlm3gOvzLBTCyvNpatYLBOBzacCDw0MGQhVBE/SWSTSCSa+C0uD/9bbDrivfF/wyL9K0g7UjULfFiSYmQY8kcqVz35ej8t2wZaAsIrnM3sBXDlv7WJCPfv3+fWrVs8ePCAb33rW5yeDrzzDhwfC1evRq5cOXPe3ntefvlldnd3eeutt4gxYq1K653v896+fY/33vt2cUaP9gcPDw+5efPmpj/svef111/n4sWLm+d0Xce7777LMAy8/nrm0qVjjo9hsXi01/us1txZz/b0IvvbA/XEEGrY293lpWtXaZqGtq21zFgGeZAjOeq0LBsUFyFGeZ5DSqzGHms9DVsYewaZCxZaMp5ISivGOFBZnSMbU6TvdZMMlYr1ny46ThdLHp4e8vvvfodsLV/96V/k0vXXOBiu8aD/IjWGi1RUKPLaGxi8ofeQhsjq6ASJCe9rnAvEEKBqSCkxPz1iMfSIC2A8jBbTuE3C8KTrreVFr3ODjaLfdaiEpaq0KnZ0cML8ZEXXq4NyzlK1tQJqKkOIDl8H6toTLDiTMJIYh4HFYsVi6DhYnNINCbEV1UTFSaqg5U3nLDlnFssl4zjSxcyYRvoozJcDzivVKWbIxmKCJgo5UUQyZCMtmwowTHmlFGWnhHMBX9U4EfAqDCLG4atItUwYFp8o833i9a5a7LWvscowRkhdR+wPMDkRhg4rmQaoDNShYqttCd6zvaXVhGnraGtH5Q3TWjPd2hmcgVcvbIN8gSp4pmtHOmlx3hOqgA9B++0xkVJkMc+MUdjbmbC/v6fBVmlP2dKmWQudeGsxVAiGuqnxoSrOXOWHhxTJKdKtFox9R7c8Zjk/II4Dq8Ux/arDi9CGmso9R+UqY8wM+M+Bf1dETs5viiIixjx+xo4x5s8Dfx5g96IqB5komDFjesF2uWSrlJ6A0iYs2nNUQZFQKm2DRs7iyDkUh+XAFA6cUUATPuNywlcjLgzkypJrMMFijNchBpUhNxbxWuqxmA0ZZvPJnCU5h7QeM/PYqcM1Fu/Bisdlq9mSyUjpF1lbgDA5Y1LCphU2rrB5wIrOr/ROZ/2mWBFjTc6PpVbYJ13vjTkeXwt+nJ21FvV1FlKGlGwh7WuC7jjb7EdGHSQxDMg4ao+r6xiGgZRSyXA91hqqyjOdnjm7qqrY2tpie3v7MSdzZsfHx1RVLopHetuuJ5FEsXR93ogyeK9iCectZ6HvM8OQMUbLT/r7R9buie/xV1999bHnb4zBO08InqqyVLWlrgJVcARXom7RzyBlvu2mVrqWVz03bUhkfbHWP6WKIToL1omoBnOOGOPAOlSKUIFHGNmUWnMRQ4iidJWmDmxNGsZa6FtLLbCVLEHAp4jNKi0qBkbJyDiQx0RGVeiMV4EDX+aiOmuIkkmiWA4T7QY+8aTrXdeBtSjzml641kdea0OnlBmj9rut03LwmlJirSlUtXK+jiIjq8dKSemDY8rELJpRO9WpdkEBWM45cs4455Xba1WnQKsTqoy0EZcqGbJYo5J26Okja+qQXstc+vxrEQ1b7iEp70GpgBhrfxCU4BOv92TnMgOOPkXGlMjjQO5X2Jzwhb9mrcVbQzCZxhmCN9QOKm9og2dSeypvmdaKLq7XSGJX6eu8o6lVLcqX/u7a8aaUGIxOHRq8BbElqz3jQRv9QBjJilsW3SnWUkUiSifLkouCnDrcnCLDckkaVSJ1HHpSHMkxImWudXDuqUyD+kSO1xgT0Av2n4jIXy2/vmuMuSYit40x14B7j3utiPwy8MsAL3/hx0UkIvci8iCSojDEMhuwpPBKnI5Yk3A2Yl3E1yu8GwjVfbw/IqaGbvUmOQr9uMSGRNXOmDQThpRYdj3ihP2r95jtGQYGxtVAP+5zat8kuoaxaol1ADIMgncJ5zTzNaI3c3+l4eTqFs2W582rE6pgmJAIZGIvDMOI5MSk6QkIAVvG/o2Y3jCM17mz+F9g80Bo3uJKeMjF+iF77YEiRB9+mfniOt3q9x63dG8C//6TrLdZy7+s2USfxDp0SFEHXAYuwOnxKbdv3Wa5jHzwASwX+pYXz7+ubXUD2N1luVwCWjJ+8803McbxR//oS9T1jLaFyTnWkbWW2Wz2SFn4MZ+Jtm1VOalktRG4BRwLsJrA2+VaAhQk96PWAK9jrePq1X+BL3/5K488+qu/+v8yPIV7/Od+7ucev3lZcHXGt0LdGprGgomcnp7QVRW5SD1O6gl11XJ+kLuzAessMY8kiThraCotl1oxkLQPiIAZVcFJxhGGgTz25CqQHQx54Hg8wRjDzE5oK0PdVmyZKSZY3jSAdVzY3WZSWSw1E3H4DNMo2JyRUZAkOpzdGrqUOTk9JvU63zTXDZN2wvbWNvvTKdIGTseBWwcPOVqckuYPiDdukYcRnsI9vjVtZOhH/d4CVioINRRd44wwpsgwDoSqYba1TZLM6eKUvh9Vuz9Y6sozndTU3nJx5mm9pQqBrh8ZU0JMAJdxjSV4dcDZZhUC0tkL+NZjKkM/DviuB+sYk1JWxKuqkyYHDpPNxjkbB5LBubPseUwjkDFllKC1On0nx8iq64gpaW87KZ/3E3Ycn3i920uvyTdvfBcbO0wamZnInuupnePCbELtPbuTlmlTU4WKtmkBIeceYkfFhMa1zNrAhd0trUwUx1sXYQxbnK8xBhv0c1unAU/KlmAdKYC3DTF5KpcZVsebYFQEjOjc3CzabskYojiywMHREWPMHBwccP/+A4ahZ3FyhOTM3vaUSVNTeaHxJRhOEZsykyqwM21p68dzP34Q+ySoZgP8x8A3ReT/eO6hvwH8W8B/UP786x97tDK9QxYZOdAsMTnlx651Pm0ZL2VtxLke5xLODVg/EsIpVfUQ6S+TlvuKFk4DKUesD1TtVMcFdhGxiXa2pG5HhsUp42qOTYkFr4KpyE5J4Mk4UnbYdbB3rgcTZ4HuakMz8exPaxpnkDggWcdxrSOhEAYVAKGC5GDM0EVS3GbofwkjsFMvmbgFU7+kDSvG0XK0vMjidEYc28etVvfE673+PDMURFVMHvO3jS2Bu+V1E31dn3oemoecpJG3j+DkmHPSG8XaFi5cwMS4XkCqqtqgEr/85S+wt7f3iU75cbZcLjk5Odk43tHAw/VHmK6HNny/7ScAexhTM5u9zoULZ5FIKdG9BvyXT2XNP8KsF6wXnFdQFGgPNEumGlZkMsF6vPGqHeusTqIxGmVbk8hZ57sGq9mOgQ1OQOdEJ2xKmJwgRUgJstMoXyJd6rHWkE0Ga/DBU4vO2r0gBqyjrWsqB9Z4KhxWhMqoDGoU5d9r00jFYxg6pO/JoQbnqQzs1DUVcKFy1GngcDnneHmKDEvGg9v8zt/7h/AU7nFBs1JTMBo5rLP/sylIOWdSTtTWMpm2jDFysjgmphHvdJyg95Y6eJrgmLQNk2CxRdwliSBGvYPxOsnGbEr/Vnu1BlxwGGew3m0y67SuWmAwdl2d08DeiAJKDKhTXmfiQI4aTPmibb2ux4nIOdGPRMyi03o+met94vWOKXHv6JAmrgh5oAoZV2eqKrDlG9rasDcNbE+aMokpbMrwKSWsVHiTtdTc1gp+smjJ2dsN9cetVVxcWauSylrAeIPLFkMgJQNk4tCVsv06CFXHKzmS84gYR7YVKcPp6YpuGLl16wNuvHuDvltxfHiAQXjl2hV2d7bYmlSYWaMIeRJkITi9R7xbp9U/vH2SjPcXgT8H/K4x5rfK7/5X6MX6z4wx/0PgBvBnPv6tLOIq0gVBfAExFLyBNVqiDacDfpXKlyYXGHhQjWNnyDaSY4KUieI4uLeHqyI7q47Z6gNWXcXR8YSUQLIvvL4WidukuFece8KlEUkGdoTxDU+2luzLTW8cgiVteUJKyCpx9KAniGAe9piVAlLGQRhXguuzlmeXQu5B6oy5lHBdIgwRnzL7swO2t5fsbN1j2txlmRvG1Rb9oibFx4pabD35eqPfxwPgvbNfjeVXKjR3BMzPHhzKTwa+q0e6N7/Hew/eYzkf6d8DOdZXAdAC++Bjw/69Cc3pHHZfha2X0SlGB6SUuH//PvP5ueM8xo6Pjzk5OaGulUawphOJCO+99x7vvfceUlWwv08KliWHaKRw/ImWAnQTPjg44L333tOAZBfuvPcHoKHJf/WprPljTMikPJCkR8qAMWMsxml7RFJGYiSNHckIWIfNvqhXJayzOjZOdOYpTjf+URYk43TAfcqkGIl9R04Jj+IZgoGAMHGBS5MdjLVMQkvlKxye4CqqKuFcjVjL1HuCCL5wSHUYOJCVrpEK+jYbA1XF3t4+cRhx7RQTanbamqnTgQ7XsCzE8pDMXCKdRO7fu8cH796Ep3CPG6PjB1McyUn3jaRf/tLn057uZNLQNJW2GEyZPpQtde2p6kBdB6qy8Qdn8c5DjMScGHKiHxNjzgzjQEyZECoFxInOxTWA9RZnHC7YTSahhQijms8GjHOI1UqFKyMKxRgQNjxeQXWPdWxjCSKQovFcxoui/WxTZv2yoYp8X3vi9fYGLtWw03omxrDjhUtBaOuaqxf3mbQtk7alrmuW81Puf/Aufbfi3r079EPH9vaM6azl+rWrBPcV2qZme9IQvEOyMI6lV792bBsAqHnE2WURhrEn58QYB8ZxPcJxDZaKiCRyGklpJIlhEM8QEzdv3eP4dMHJ8TGnJ8cs5nPufPAeOSVSv2Rne8b+9pRubxskk4aOMUZu3zniwfGS+enJE+uVfBJU89/jo/37H/vBDmfA1cQrYC4pgAkpUo45Y1LGvBexC0rpKIMIcQwYo2RobyM5aiQfR8/9+T4Zw2r1HXaWN1mt9jk63CPnsHHswSWCT8Sx1j6sifg8QIS85xkveEaBIVdgDMlqH1iMoYoJGUYeHi+xfSK802EPItkYVe0xDmdUjH7shJRBrgvmasTNM81BopLIpe2H7F88ZtLeZdbcIvc7DKs3WM0DaXys4/2GiPzck6036kAfoE602ICWaU8RNG+89ehr1n3et/Whe3KP78p3GVcj8l3gWB33IWi9eQea2DK5U9OYY3j9ddh6HX3WCTEO3Llz5/ue5tq5rulEX/rSlx6hE928eZPvfve7yM4ObG9DcAj3gPv8IND+nDP379/XzeqanjuvVQC/8Zj1hh9mzR9rQpaBlDuyNCgfyGGc9l/J6nizXREZEeuxvggH5LH093T+TUERIQaGXqkNOer3QkfdqQMKKArTG4NDR6O1fg9rLdNqQnAVxunGnkXYapQr6nzAZNH+rFFHJVZLo9Y6YhkxKBh8VSP7F0kp4YJSOtpJw8yrIF+FZZUtt8kcS8TlyPWLu/zpf+NP8lf+07/xxPe4tZambuhFh9lL1uBDjFHEraFQhSZUVV0cbyYESxZH21a0ba29d2+L8/UE72CNhk6JboyMOdGPPTEmfLA4r9lcHkfdZ6oGZ53q/pasdj0RyhYcClavuch6ilLB/2R1vJoYnI1ONEVwSNZSlzmrPC2qL+yMonw/kdt9CnuKN3ClhittYK+qmBjYcUrReenKJdrpFOM9xnmOTw64eePbnJwc8913vs1iMWcym9C2NV/5ype5emWH7a0tKreLMTVjipAjFhWygLNqzvnrvQZQDbEn5cRytWC1Uk558BXGQEqKeI5xII4DMcMywrIb+YM/eJu7Dw6KKA2cHB1x88a7jMPAan6kPOIL+wyLi+ScWM1PGceRDw7mHC07To6PP1Ic6BOv4xO9+ge1aHBHjtQ6cq1ayxmjMO0oqMaMJSUHBJLVnquIwZDp+10kX2Ew+3TTliQVfW7JYunYoV5cZBy2IAfIWkIWMeSZMLZC9IHRVGRrsX3GH4+IzWTrEW+QRocoW9ENzvaCGcq4whQxQ8INWQU47JoQoDFJNoJpBVzGNAoM8zbRVAO1HWjqE5rqEGsGxnHCKBPGqWccDTk8acfgo00oOWGGYVDazTIl7i+XLNPIijsM6kIftYySyB7CIXMekki9aLp87p4LA7QnkHzklBMwhubqMTUnrJYPePjwJil9L+fWe89kMilwfnW8i8WCGCN9ShznTCfCBPCF5ysijMuB5fsnZc0OETlkdfDoOYnAcglHR2e/WywUSCViWSw+4PCwqAj5do3ge8ZmznRt3VoCT2flrhWSWH/OrKCQlKM+FnOZp7sGh6yzgPVAd1Gd8pw2WsRSKBLr8pvRCXZrjBZpHEpBVufLSi7PBy2NJs3MpFA3zLlzU/qMwaDgqRAqrMs478tndIUeqwG1F2FiLDvWIz6wDBX5KcjugTrWqvZYUxE82g8tkxCt1QKtcZZswFohpR6RRF07nAs0lSJsvdXN3oghjaOqcMWEycKYMv2oU4xSVBaC3kvFJ6yV+ESUBmaUQlMWoYC4CtjLeYz35TqdtbfWg9o0a5NNoqegK33fdc/alvLreirPho/8hFnYJzFrUKCUNQRT1s2qopQ1ZVjEMBBzz/HJMfcf3GM+P2G5WtAPHSwzMfUcHDzg1q0PWO5sM6kMhimUFok1snFMa8CklMVeU8FEhG5YEZOKoiy7teMNgJDGocwFHkhxZEjCSZfphkgcR5y1bM222N3e4mA64cG9O6yWCzLCquuYr5acLJZITqrBPeos5WHQkYxPutjP1fHaztB+s2bxRcMw8yq1WHpH9AmHZp15cCqLllul+ZQPuVy0iLxBvBjo3qhJwTPaVif83KqJt1/GWs1uczbMFxOG6Bn3HPGyxlA5W8yYmb7f08xXKDzTEncC3WstORhFgopQ3x+p7o1YMr5k37lTSgBGEKd8vSFm7RlfTtg9sB6MFZpq5OLuCW3u2N+9we7OB6xWFzg5foVjmTF/dcriYmT8te87BPeJLKM8gd8H7h8fc+PGDeLpKcM77xAXC24ycvejVKBGVFaTxFgE2j/E5GH/FN74jiKb3+M9nLvDK1euc40L3LnzNn/n7/wKi8X3lph3d3d54403vodrCxoovI2qYb2OanWsbflwxTu//U6hhnwbuEfsNGNYW0pw6xbcvXtuHTKMozqt27e/w717DvxlCK/xwwx3+EHNGkNdNbTNhKae0NStUl+cL6LrOnlIkiEbCrJZ++XqoNeqPAUxW26ZWIBVWVLh/srZn0k2/06xbOxGUb/EVDIr5b0iWr7DQHa6kduC9lWxeo/BlNK4weAUKOQs7dQhQpkbq5J8pkzucSlSpchVVxGaGbfbbbrp7lMT9vfOsrtTg3hAqSbjoFlTZT3OGN28RRHdXX+INYb93Rpnm7MSMGCJ2JTo5pHRGJzRqTeLfuT4dK7j73JBSleQRjQAMQ4MZXi9KgCv+/nGKXjKOocPHlfVuKrWyUm5O+O0rwMbtOXmnCksAh12n0THEApCqKv1SxAUkPW8zBnYrQ0zb2mtoXGGylt8gYGnlDg4PORofsq3v/Mt/unv/AZDt8IUqsRipSjtGDty7rh4YQ9vfp6rVy6psp9kHIa64BeyqJNbsyM29KucmC9PGUad69t1Ky2olkrB0OkErJR0NvKyH7l7vCQKuGab2WzG137sK/z0T/80tz54n5wGHj58wJ3bt3h4fFyqG+PmM6UkHM47ln2kG+MTBznPdzpRBt9Z7GAhKvlQvAIUZF26tUL2GvXlgrMXVzYEaclSM1rLUDty5Yg2qOOkZRws3o14u9RMF0+2nug9Q/DqLEskbBBs0kzAJEFqixnyhla0ftzFhM2Ck6zRZ14T5te57rropqIZCngRTBR8jtRhRc2S4Fc415ERujRhcC2m9TgjGP9sM64R7baejCP3Tk+JJydweEheLDiCx+W7AI+Q1RWcxKM8InSm6FgS2uQEJ5ExD0DHOC44Ojp8bG9XjOHCckmVUgEAZZxTQvu60q0kAO1fWVsTwgRnInk5kFbrZrQKQmy0Ns7Z+b1dKHuzwBij9pLsFGtXunE+Tyu0F3V2WRGuWXumIhkRzW5TYsNlNiWbEGOQch+vs9lcnOuZ4y1TbNYbFTr5RhXb0PtXdNZsNu6RqoMAuaB0rbM4rw5WXFa6BgEjqnmbbelNGqOMnuLUNyCXnHVSV0oEoDGW2nmqqn56E3W06q6lXLOWZ1WH6Z2eJ6VqlgSt6xZ1K+/dZtoTaNnRyLlMtPRjM1ryzKU0KZTSb5KysGzWTy9vqYhtJCnXl11rDJvg59EbYv0mm2c+YgWxSwkIhLXj5UPv9axN+6eI1zYh2pe2m3s30/cdi8Wc5XLOarkkxoHpRMUyhkLPGYaeo6MDnBUWizmr5URHXwo4jALZoNzLUpzf2d9zTiwWpwxjTzd0dH0H5VxEHnW8KQrLfuTkZE7Gsl1vUXnPZDplb2+X5WLOdGvGcrXEWKv0sRjpiuPV75KQN8jbJ1/v55vxOsNk4pCHhjCHYcewuuyVK2otOQjm6oCZjnAimHvolbgKtJBK5X+cerqmQazBqTgVOXrGIROtZYgWCTBcC8TWkneVrysUJGJtGK56ZNfgDzLhIONWkebdlXJ+L3vyxDDsesbW4xaZ6vaIGc+ADSYX0IMYnFF1IH8HeJixVkcItvUhe/u/R1ud4twJwyA8lAlv+6vYesL1C7tcHFb8WvMYr/EM7Pj4mO985zuMqxX0PcIjsKpHzBjD5cuX2d/f/77v2Z/CjVsKPnzpOsy2LHzMa0Cz2ncAF6Omp/M5ly5d4tKlS0zQSUk1nhnXgC3291+mrn+Cw8N75PzrrFYJbdJ+/LFAA49bQL8uoR/Dzk7LlSsWa+Eb3/hEb/NDW8qJ+eKU45MjrFnPYM4YEs5YpnWFdw4mE0zpbUsprXmnij3ZZaxV6kgqogtjShvHK6Kj6mKKiBTuas5aurRZBWmcL1VJs97HMcWJS2mxjCV79r7Clxmmla9KIBawRlWhFPxmwfriTFQNytqB3q5ISTfhmCL9oIPhJ5Mp165dP3NuT2iSFdHqg8N5FbSv6oAzjlnd4qxl7JeM46DIVhMKn1wnFpm8DqiFnPTvuejJWxfAeXxVM51tEVMmdpmcYBwSMS50elpdOKRyxg8O3uKcBgIiiXEcGFPEZyGgWdQYx4JEf7SPCVpFFjE4pyVlK2YTPK35vDGqY3kunZJiwzBw69YNzM4OZjrBTRpsmGCcEMclKffcvXODG++9x8HDO9RtYKee8uM//lV2trc5ODjg+PiYvl9x/+EDun7FO999h+XihNZ6aqvBkEsF35NV4W/teBVMpff1slsWVbKRsVTL1vfy2A+kFPWaxcSyG7l7tMD6ir1LL7O7s0fbNBs0+tb2Fv3QU08m+PkcfCCWKXoamBsmbkqdDYfLbn2gH3odn6/jNVBVlrg0mBMAw7hnyB6St2Qn+K2ENEk3kXtGOXLbIFso8thADBWj183JpahE6VR6w8lAVIH0uG3Ju4YcSnnO6DHFCWnbYSbgVgr3N0PCdwapLMOuhYkjNZY8cThTBhimTAmLNZDNtvRwylzV06jZsRWcF8L2ivbabSaTE4y1pGhYSsVDu03rW65PW6QSmvBsM651JN51HQ8ePNhoJ38/M8Ywm824dOnSI7/7sN03cPOOZh2XdoF9o9Qivn81ZkChV+QMx8dwcMBkMuHixYsE1J22YoEdMBeYTJT/G8KEw8PfZrUqj31CO0FJin1EU/xjPc1Ll/Tcn7XjFRGVT+xXdH1H8DUiCUmDjiqTRPaeugqkHDY9bcN6o9X7TorQoIpmCmPSUuTaceacGeNQ/uw1s7QZTMZZLW0bDOv9XpsRWs7TrFkY0kjKmRBqQmjwztGEWh2vDUpn8oHgA8Y6nFfVLWPyGQqV9QzijpgTMUZEIISa7e2dJ0aFnltZUoo4D8Z4xVZ4h7eeuq7w1iFpICcdTJCMOyclWHrVuchjr3voWUCK3ILVKTZVXeNiVn70WoBhTDhf+rbFQa7lI916bmuRn4wxqjd1EUadP51z2jje89+ssyqHVp0QKRSu0pVfVyhKVeHJp8N+ckspcnR8wF7lmQVHrD3WGYxdA5pgfnzIw/u3WS5OCMExnU159dVXuXjxApPbE5qm4cGDe9y7d4ucIg8fPsAbYRoqWl9pMDSeDx5VInUtlToMen/3Q09MUWcT53UFRYOYcVSU+xjTxvEeHi0JVYMxhknRBaBUduq2oZm0+CpgvVfxJGPKiFeHwRJ8Q4Xy7Z90xZ9vqdlBM8vIYHDRaWftoVb/h7J5+FFLv3YR8abXvtcByMIQxZPFY7YS9kopfaWETVn7M2ZU2L0We8gW8nqouAEN8aGstiI10eoTgqrMpox/mDBLGPY9434gzTL5OgrIOojYVSoZxBocoZthMpmIYZxZZN9ha0cH2AGS7CPMqGbXeO3SvgqA7zTELqlA/TMyEeHw8JD333+fw8PDj0XjedTpVSLEoyPeF2EymbC7u4tz6+GBFUrhmdO2cP267il1rce7eXzMrfff596DBwwfVVLsKArgbOYdnJyc8P7775OWibemb8E0wd4DaM90mOv6mKtXLzGO0x9oHTqUPTRklAJ1Cjs7cOECZSrTszURYRyHoiu9wBhTMkm7AeGATq4Zh/4MFIUhpag8Xqc/giFZ7cmOSZHMqWxO49izWJ4W3V/dvNZZrfIj1fGuy5TaG07nQF2bkQy0jWCNx4j2lAWIaUREJ/1YO2KMw/jxrA9s7KaXHFNksVyq2ENUOg4CdVU/tXXV4QShOKNC23EF4GWVMxtCBSI4ElZ0DJwPrshMKhDKWU92RTHMCmSU5iVamg8+4Bw4Kp1MJBkhYyxF0hOwpZXVGmoXdI2ztgdCCBjnsFWFC16vbQl0VA/gHLCulJtB+ave6tD5tRjb+nb1BazlnmfKmxO5mzOsGnoPSzNwFEa89yyHU7II9x/e4eHBPergefnll7h44QJf+tKXuHz5MluzLXZ2dvHecvPmd4lj5O69B/Rdx1bdMq0ajGRMzMXxDkUmUqldOenfs2T6YSTlRCxobzUpvlcz5jXnedVHThcrqgirVceq61itOpbLFctVR19GRFrr8FWFC5VWPAoyXQzrQY6bAO1J7PlmvFZotxNmNITkCL3B3TXECN1SEYNlN8DZkeAXIJl8VzebHBtMMtirGbebMV6FuV1OWIkYO24ABxnlG2Zb+IawKc0bg1I4nH6xctRFtTZBFLiXsAh9aBiuVhAEO/GYLtN2K8JCBaE22a/VDSviGDEMO47h9QojnmUv0Bv64RJjvMZk/xW++PplquDYqQ2reaSunt1lyDnz4MEDQgjcv3//kQEFj7MKLfPORPjuw4fcePiQy5cvs729jXOhPLqFThGbM52eKVEZo7nTuwcHvH94SH7wgPxRjneJDjyCTWp8eHjI0dER5sgQQ4TtBNVddZTFmkZ45ZWrP/A6CLDRqvrC2fk+r/bYemBA36+YO8uYBtqmIfitcnY6PSvGkb4rpV9t8qpTMToQ3ZSAMTvNgmNW5zrEkTGNrLoFB4f3SfkMAGKxm8x53Qst+xMxK01Goc56rawPRTfaEHyD8RaCATFK4UgJEaXjYCx4RWd751XswxqcNYwxcrQ4LZN1LKAa3G3TPLUsTeU/K6wrWAvrAI8rgxKsdYSgA9CzyQSjYYUrzs05sOtgVHTdU6+ZqBirfV1jqSoPGGytgyAogbwG+lGvhYxaog8VfmLLGMIOAUJTK03LB4z3SM44Y7QyAaSYdOypnEOiG0PlHN57RdLGXK5nyYhtGZpgnx9GQXIiLk8Y5p6VidhU4WShQeGJSmbevfs+9+7d4uWXrvOFL3yZa1ev8pM/+ZNcvXqVvb09Lty+TYwD7jc8Q9/xwQd3eHDfszudsdVOdFvNWsEZx3X1JpaqibYERIR+DTwrcp7nuuhYp0FYTJGYRrohcjzvqEZhvlyxXK2YL5fM5wuWyyVdP+goRuep6oZQBZzX4CmuIeyF4vo0XO/zzXgNeF9gOtYgGVIQnBEkoFF8skiyCujAI1ZIlWqb5uzIYqFVEI9J4BYRO2ofiy0DI5iV2SQRBp2GtEYjGASTwS4EO+i0ojVYal3wUYSj4HLGpYwUpKcJljRz2FGQIWLGBGvpN2MIWw7rPM1+hew2tP2M/uQiMrQkmSFSIdmXzU+l4dJa7u8Z2pmiiwe2tSneopF91ync95xZ1vtKjVCRc1N+k1GPaVhrV33YeYkIue+JfX/G4XnEaqDG+wlNYzEWVkyIxPIIhLZlgfa0WoTzHXA9ntX37TpkHBmo6Fifoz6naeC8CmWMsFpReosrYKSqKpqmeS7gFEX7VlRVVcQXAsFXWs51pXdaUu81MCmluPnQxhhMSsXxapcjIwxxIEliGAeGcaQfuk2WcB7sRF5nonqXbxTAUlL+pCJIAPBilVo8ZnJMZBI5Zm37xDWqVMvaYgyS1CnF0ke2TtHSYxxZLbXHq5rqhirU2uN+iuVRRXsrk2Bz9xpzBooR2XBf17zY9TpQMkxTyoomi0pBllJzFp3lq7uDbuZ2/VxjtGKQ1yCzEiCVwprqQK8z8RLwoFnt2sFC2aOsPVNd+tB9s/7PljL2WnPYlHDG8Bgw1jMykUy/WnB66rBpYOgq8tjoNa88SYTFcsEwDogIdaX3/Po75r2nrmuC15aFCErXiSNODDnmdT5TMtax0IKiBnAiG6DbmJRilXImpnVlR69PVWkvPwmkoraWRbERq65nvliyWCxZlFm7i8WSxXLJMGqbxcSkPmrteDFYEzFGzmXXP7w9X8droW4El1V4P1aZtkqkBN3EkJMw9I44VKRoGAcFXPUvWfLEEJ1TDq7N1GaBXQnh3YidC3LFkt7ycAT2XcGIwYsKA0iZArJxvD1UNxLuWHBRMB4oIwABHbtlM1UeaZc9Y3CsmhppDcPrhnQ9E2512Jt6I8TsMMGw9UZLc6Vm+/KM3WvbrO62vPe7f5jheMVky1I3hjrWII6cYBkH5t1AfFoIz4+1XWByxtNpErz3Htx7nESrQcWar6C3iUOd7U10cxsf8xp0A79/H27fVm8XP0xVughcZzp1vPGGhwq+y8sck7mAziqr7AVuhlDUlR+lE20sJfjgAzg45IArvM8rBXWowd3rr2sZeW1dB+++C8tlQmvcD7h8+TKvvvrqOeT2szPnPVvbO+ztXaCpW2pf0TQTtmbbOGtprE5oyWkgRlXb6ft+QweCNV3Hko0wmkySxGk3Z4gD3Wqg7waMFc3+DIVXa0kj5Pjo1pzLLNdYemCbnrIxtAGC80QzkPxACpBsRKwjjpmUhJhG7aOBauBiyhhN3VxD8AzjwMPDA8Y4bvxh287Y2tp9asGOlpq9lnwLIGlD+YmanSgoJ25Kz4p4VseZhp6YMr5klmKVkYAYLWuW4CKjM7ytt3irkpDWWWIcGVV3GucCzoDNCZMyzlnautaY39lyLrG0ANZB4Fo4w22Ut9af68zpotSmwuVeD1AwhXdtn23c/ojFceTBnQ9Y3L9NbQxNE5hOyyCDugIDdx88ZLE4JUtia3uL6WxKSom+63DOMZvNaNsWaz05w8OjE3JKHIYTKh82AQ5SxGDyGtWs1Yp1HpUypc2ipWZrtKLinGNry1FXHjEecZaUINFDytx78BCcp560bO/vcv/+fW6+/wEnJycsFguGYcC4iOltCb5Ur8HZAWsc/dA/8To+XwENNOK2Bs6Gh2q5RyiVLtHYDuOJKSuqOVikQkvDzkDO2DFjh4xdZcxSI93cWsxKNKV2Z/w8SukCKJODwHSCWWg/R8qmIAWRKF4r0cYabBa9sY1m3alxSGVwjSV7LUVFA7ay+Fmg2a2Ybmd2Z3PM8UDMM/pYUeceIZIy9Cnj0AsaY3pqCM+PNgsE7V94j6nQmQGTRPL+e8smzmGMw9HgmSiykrKQfIKbbkywHNjUOo3BlR4XqYU8oaqMYrBqqGgI+lcmgKGiJyGMpDIiaS1AsKYTiAhpGJDlklVecZJXjzjexWKD8QJUUOPkZO1458AJ29vbz7zasDZb+nxVpdG+9wHv/GZmrAqyaF9PN5czPu6691qWklT472ktlxcHhrFnGAcFiqw1gdfUh3MUGf2464xVlG6RSu8qq3CDOJ3/KymTx6THi0kz7aiZboqJNEYSMOZMFkMsSOvkkwbRpac9juOGdmOtp6o6rVo8JStVwNJTLYGK6DoJZrNpWw8uK/Zj3SkVWSO69b1MuQrrn3zuIGLOZnafTzFFzjLisxJ6yZBL1pTK7ySL6luvr0XJjtevOh+QmDXHeAO2KgMA1n0Cc/b652Uiwjj0GIQoQoyenHt1vIMq//V9V4IL2VAE11krIoU7bTZrMI6JOA5IEqJLm4ADdLrT2vGu22TlFiYJJYvVe9JaRxaDExhTxq51sk15bjngquuYLxacns45OTnl9HTOopSfhyKUYkQwTjZVDz2wgnvXgfCT2HN1vJKFYVW4rtbgrcFXAELT6Ifre8s4VoyDsOqCSiAfZdKx0I+GIaLQ+izagF/q3accYINsGfhCAVO15uymluJ0Rbm7KWm5goRKy80M9or2stYgkTiryXWlfWLJ5Gzog/aQ4hUYG0W+rSqLrxwvvTZjf69ia+dv007/Gqvdy+RX/2vknR0as2KbJQd5xu8fHtE6w2sukZY9fXzyC/nRZtGs9Ytsbx/x5pt3EZeh0RvxHh/i8VYVXL+OaSdcYsYULdt+8qTQoFltizq42/gmcP1nf5bp5SvwYAkPVtSVOkjQLPcicAaXWqI95BoKnWhvb49Lly6xWq24desWq2HgDnCC8N7Rd/nuva9vviDGaDJ8TnGScYTTU4hRgFOgI4TAW2+99YlX8knMGsuknjBrZxuHa50jpkQ2Kpea0ftTaT+Bukbv28Lh1RnQjkiiz72CTazykivf0dc9zjvqugJMAQFBn6P2D0uJGGDNFbbWE8pgAR3IYAjO460lRVgsVvgukkYpG6gGBTEl4qgi/csxkUSISbnEql4ViqrQXLVy80DOI4uTQ44e3uFpuYuUEqfzuZaOjZCTXmNnHG1ocMbS9yvGscdXntBUOgGoqXFWVaryqBmoK4pIY0p6XfLaARfHV3Z8EdHh6GX4wjgOGhgVIX8nGcc6aNKO4FiGLQwpFcBh6UYaQx1qXMm4H+FtFwDephVWMl3Fl5SWC/qc52kCJRNVYF6Ko+7pXkstq6Enxsw4RLpVR7dasVwsqUPFYrFgsViwWnWMY2QcI30fGcfEMArWrru0xcHmEgjls9L8OghNpX2wCXyyVijsKMT5ArfqNkFSSolhTNiYuXXnLkfHp5wuF9x9cI/FaskHt27TD0MBZYENvlRSFNVuCitGzNOAVj1vxysQB8EHlYe0dq3yBLaIZDhvidEwDGCcJ45CnifimMlLyN3aga4hBrYshlKGaICmfFnM2ZdFo0TRKKj0CNK6ZgEYZ3H7BlNblRI0nuwD4gJilYuXEdVxthbZDsgEorMs20DwDn9hwmxW0U6+S1X/FXz7ZWT/F8l+l9CNNLFjmVe8s1wyc5bdCmw/PpWewUebQcFQl2nbTNuelZVTUhf0iOP1Hvb3MdvbbJVX/uDHm5YfB9zFVRW7b7zB/htfgBvvgr1xRicR+R5SkDCgAtM1irHeYjKZcPnyZU5OTrh79y4ZxVXfA+4tH3D33juPZK/nVas+yl555ZXnlvEaYwi+og6NloxLjzDlgogngdFRgM5YsBCMdredKcpVvsJ6j5OoOhA5kSTjnM7YdU4FT6q6xmAYB1XtiXadOeVHaDzn+/NaZ9JNfj1DV5IwdCPJZa1EncvGUtJseUyZVT8qsjRFUk6KdMaTJTHEjpwj47AkpmEDTHk625eWa7uuVyoTmZQgjoIzFlsr8Gi16uj7FSEGaqN0o+Ac4iw5ava+xlxoT09n8Mr57qlwxoxAiCkyDAqmirmM5Vt/j82js1+zCCmqUxhjZFhP8Cozg4MPONxG+eu8wMbmkFACsw+Voo0pzur5Wi6zo3PO5PVAMqel2ShaTYkpMY4jwzAyDIP+9D1936usZGlzrOcdk9brd7bmrGcusv7VmfPNm7/DulYhBdsQ+6H089ngHNaPHR+fsFgs6ePA6XJOjJH5Yl6yZr0m2n20uNJbN8Y+cv2f1J5zqXkkyW2s1BjxGJOxPmGdEJqEsYJvHCkZxmgIrUavvh6JY6ZqoV8JkhOSxhLqRDAZyQY5Lg43a68niyWLwZqMsSWbIMMojAZyrQPeczKY0WLvO4y3BJOwxsN2hWxXJCtkl8gGTB5Uai9FfBzBWSoC3lu608RxDDxYvMpw+K8znGxRz+9RdQu268j2NLMXj7l88z4+OE53HLFf0H0CXu0Pb4LOErqFuqrHO/m2bdnd3aVpGuW3AcqTnaJgpI9+7fce7wTNdudAUtLp2o+ebgMv0fcdR0eHpDSirv+shF3XNbu7u4+863w+10x3tSqqRw51yoHZTHjppf5jEdtqa/7vhN3dV59qyfP7mRL+V5wu5/jgN6IYzqoes3VGnW0BXmXWghZnm++6r+jEU7kGLxnrPCknqlDRD/0mwhc5o50EH5BKKSzOJtbD1R9xwuuqnKjjt+ifvuhJSxnarn1I3SjHpBmvyhqWPpyobCIMRfhg1M9RglwRUR3kp2T9EHn7xgN1dEaUdpMEi6X2c6wxjKP2za13+DrgrKEOCmaTFJGk058qHxCEofBs1xjZNVhLg6dQ0LJp06tNcq4EWjpoG+rJGsRWgv2YUgHrAEUnu6pOCzL5XMn7vOMFLfun9EidYO147zx88mk5n9TWpVdbWoKbQErYyLaW3Ia+Hzg8OlKq0XJJ09QcHB5ycHDAwcEBXd8zjGOpVJXS8+YDroFkRY96DWhj81R9rlkHkOocrbMlGHGF2mZYTyzS6hFUwZdhFgUtbgztZKoYoJJx++DxQasjoa6LApqugA9P7jafc8Y7EOUGJk/BNMqjC4ILmWYacSGDsQgl6x0VINEuBuKYWS0z/UpIURiHAggpvQSSwENRB5yUezWOgZxsmemrAxeszSqhZw2xNZjBEweHHSz2fe23VSbgjYPrFbmpiVYQl8lG8FLk6HLEp6Q9nORxzrIyKx4uA7fHL3Fz/J+wtXrAl47+MbO04MLuRXa3ZyxvWU7fFrrGc/BSwyKvWHbfO0Tg6VlGJ/ioePhHRWtbW1u8/vrryjdUvhVrIJS+/pRP7ngP0DmE5XgRxTP1gOwBuyyXh9y4cUrfr1A16YPNO+zu7jKbPTrI/ujoiOMyFUQ3s3UJ/TI7OzXb2+YTnp9H54Ff4dIl+9wcb8qZ0+Uph/MjmrpRiotRjqazFl9X4By2cVRNWzY0eYQ+YZwtvE6HdxWC0JTn9EPHUAQF+r7TgQmFZ155VcXKORNH5UBqWfjcvZBFhzEIeDTS98bjbdkidCQp46BTkIac6FMkCcREcSplTnWZgQqynjGjJTw0Q43x6YEJV93I737zbLrW+bv7zHXJWQCzefD8v+Rcb3b9Ht/zinPv+aFjyWMePP+APPa3j77nx4HNZPN/3/uQ8Oi1fMaWsn7/1mecWF/bkomWQuJq2XHv3n0QODk9paor7t67ywcffMCdu3dYrpb0w6AT3TDntBbOeVScVmO80ylMG3qdYnAUVGeKprgpQi6FU25VAFgrMOZs5nuRuLRe5SGdd2zNpsrbLmvsnMOHgF07XmtZC82c15f/Ye25Ot5u6PjWjT/A+xZna3wQqlZRxM0klnKzlmBSQif3ZKFbjaSY6Tth6FUjNY5rwex0hlKQdeRlIJvS+Dc4l7FWVaes1Y1qSIaUDXF0pOiwYrXcg6Ey6oCHFOj7it4Ixy6TyoIpYjoRciIbwxi0TDS0FdPguJ8it8fItD/AH3zANC85WazYmk24d2/K++/fZ6g8J33FKvcsT4+f8cprMXMYzpCysOaWarYdY2S5XBaubgtYmqYnhAUxzum6U0Qeh2T2QBnH1IBYQ08FTPFe+6x1PcHbAfLp2Rm5FXkiZAesSpmpqqCusbMZ060tZrPZIzN5z5eFjTG0rWF7W/T47PLJHK8rny/Qth+/3z01M2Csw1oPZd7zmuJgZb3xlNLm+T20ZIk5lzGZOmNukxqsWYU6Wk7v+zX5ZMM3LRuhZZ0FF9To2akh52ZjSwFJZSM6dQvKzNi1kpbDWYM3Sr+JUlIcEwsQLLOWnzyvaG6t1cleT3nR0/cFJ37UYx/+/eOe9/yc2fM91hOYnA1zUJKNYm70ttVgZf1JxjhyenpKVVXcv3+fmCL3Hzzg4cEBi8UCjFFGgVHQ5DmEWakwKK3UFLSyLwM61mIpRaG0lIdL2b1MbnK2OF7ji+NlA7bdjF90FuOtItqDSqLmct9uBFgU9fjInazn+GTL+Fwd772De/yf//L/pZQPzNrHlsWTRz+MULh0BZEp61Fc5xwscK5ZeO615eKLObsVNr0stfV3Vcow6nXEeybMpuP6JOhc0bQ5xvqZ6z7OmRRlKJFURBhExwnWcYkj40vENo6WYdBZqilYMpn50Vm29yzt+PiYmzdvPiJQv3a8x8fHdF2HOqU3sHbGyy/f48qVA05O7nHjxreJ8XGOdw94HWoLbwAzGLkE7DKbwWuvQV1b6vohmjUXmyZ4M2oW/C5ayb5wAV56icn+Pm9++cvstu1HRpfeq2LWlSugJefZY5/3eKs37/G8cCnWOOpmi2ays+F7KhVNdOMxHoxHxOoUGlkjm4VxXE9mKc6sFCTOb9VjLoAgEcgeIxlvAJsZdSxXAeecvU7Hk2x8syq8FaRzTEK2QnJSFK+UB2xNhfOG4AytU/SodIoEHfqhiEDoSEOglAkppT9Ploj7PtnbC/vsm+S0QQmva2Nq5W+l0nI6X/DOu9/l7v17xDQym0558OABh4eHdF1HqMKG/rPGPOifZcZwcbgqYKISpdZqORnY9HDXKOm1E6aUmrWHHrCuBO9rcNq692utjlvaULUo8qv5LJAwZyjmteN+Gnfuc3W8Qxy5eee953nIF4YqF3k8JBj6oQganNkazblcLtHbasTayBAzCcOQliyXi83YvketBSJksx4WpOoO1FhrqGtLVRmMiYiMGzGPZAzSWnBgK4dZp8fTKWEyoZ5MqNr2Iwf2GbNGLRu0jB42VKOPen75TsJHvuszNGMw1mGs17+XX8t59ydm42xZ92DXQaawGYcmhk0Zef0uqdBoZJ3eiqKUFYpZlJjyGTXJSD4D6nw4fN84/ay6ucYUJK05VwUsog9isFZL1LbwZA22zA+GMwnJ9ZQpwZii0frCPpf2PRWLc0hsWJfOtfytwDezcbZHx8csFgtSSptqli9883U2e+Z4lYJnrYrPhBA2Ai3nDvu9QDNjVKTF2MIECBtw1dnzoQxtBthM+9KAWBMi1Q199KOWzv0Tr+Fz5/G+sOdrFstlLvNFvshsewZvogCYD9nx8bGihfMAvEem5j6XWLBHxzrjf5zNgXdgtPAB2g4uI48Wiy2++92r1LXn2jWVlnz48CEPHz6kn06JV6/ivefq9evMLlyAqRKKZsD7aNf3Kp8cWb1arbh9+/Zjh0C0LVy7BnXt0N7wzid816djOQurYWQ5jLRNS10p8thhcQiSIzGPCjGLGR8CVa18qMbrxrFaLRkGlYbs+qUiZtdRvHNFzN1s+M7D0JFSYrla4whkk/XaMu90Pbh+XWHSMrSCuER03i/AUIY0piJRaZ1XST0VK8YHz1a1y5bZ2Uzb0X5fkfcbVHnIjD1Jls8NTf7Cnq5Z55hubZdQsWSZ6x5FCeHW3N0qWJpaM9q+V6EgYy2zra1zvdp1NUX7s+uS8RoYpe+lGazd4DHkzLtzLuPdOO6Sh2+CXeVCrjWwNxPmjNWxapt3PWvBuTL3+jy+bv36p9EqeeF4f8TNYNhhhyvmCjIR5pO5UlceY6rlHFGXZzllyunjdaPOWQ/cV898+KFH+sj9+5eoa8f+vjre+XzOnTt3dHTgpUt479nb3f2e4X4HwEKEPb5/Efl86WccRx48eFAy90dte1snEdV1kc1kh+eZdWWUw9nHRG0s1uvoOm88VhIyRLIkRk0VwKoQxtlGotJ6KQvjMLJaLhVAVWQG1xmBsw7jfOGaRmIc6VZLFsv5I6pIlfeFNuSV01c6wKbUR4yxJDlznCkrmnmIK7KMWBtwrsb6gG8VhFLVjW6U5T/Vki4SfIuOjkEz5Dg+17V/YU/PrDW0k5a1utS61HzmjDRLDd7jnaGp1emNMTLGkbquqZqmSEdWjzjZRwVDCo/ZPQqAPJ+56uHWCl+claHNOW9ZJM3MudfnQuE76yUb1gIoSikym7ryGvuwoTHpC594HV843h91M6iPuQYTJlzlaimjPGpTO2V7sU0fe444YrBxk4HSNHD1KrHvOT4+/kRjBdU64C4peR4+hNVKOD0tfd6ug3v3yN5zwIf0sI6PYbWiqmtmu7sM55Uwiq1JQQ2wQAlMx8fHG8rRh20YAuO4y9ktL6hQxwnPxQkUdHKKka5bIUXizmHxBqZEvBFsUNF/pRudaS3rhhE39BwVV3AEr2pBKsrhytSWjpQyXb8kpqgTjDanodiEIRYak1WK0TrTNSjK2qJZesq5cFXHc4ApS5JMij1GIqNRUZxhVIEPaxzO+ILKVvbBMA7ErDzfszTihX3ezHvPhQsXNtmuYdPS3TjMUPq23huq4DDnUO3WOb2/rC3AqnP4ms0bmpLdahZ8Prs1opiIs+cXhLJd04j0HEqXRjEG1m5A4WaTscoGwLVuz5xVYc4aQfoKOfe7p3PvvnC8P+pmUcnlt2CLLWYfkT/KlmY3y7jkW3yLU07PmihbWzCdslqt+Na3vvUDON4F8C4xqnQznLu5l0t4990N0+gRMwa+/nWqqsJ98Ysc7X84H9au7luo4z1C8VlHJye8/fbbj814V6stfuZnJuUVazsB3uH7FdKflonoLNMYRxaLzHK10rIcEKzFtw3Ga1msqmrNXovAfoxp47RTHMg5bvR9m3pSemV6rfq+Y7lallFoS9USzrkQIcoGmFGHyFm/y57j7AbncBbljmbRIQypKy5Xs+KYImPukAgM87OynjFYW+FdfQa8EWHImShlfJuxLxLez6mFELh+/fpZhitsAK7n1bZ0UIYhuHWPX5+esvKRBc7AfmvGwvmerXUlcS3Z6oZHy1oFsqCOTZl97DbYA+BsCMU6q10fh6Lp/QgYl8cOqNgcsIB8RdZB8AvH+8I+iRUeiT1XsnnscxyMMuLKf4+Ye7Ts+cntjOP3vQ/lc8/4kCWlaqUYHyuMsZ4ZC2uyFEWTNz3++Y89gfUrn6Vy2GOOWspl1ihifk3PgHMlu5IJPAoA/l5vdSYQ8KH3P//z4eOf23bOy/CdzYH98Ovk3M+H3mdzgkYBdtaW3nPelADX/d5HMooXCe/n0gzm0aEij3G8ZpN9nu/ZlqeLINYW+tnZ/ffovmIe/fO80z1375wpfJmzx86/y7n33JSWi4N/hLf9YYf7yLnII388LTPPE+RgjLmPpkEPnttBn65d5Pmd+2siculJ3uDFev9A9sTrDS/W/Ae0F/f4i/V+3vaZWO/n6ngBjDH/RER+7rke9CnZ5/HcP4/nvLbP67l/Xs8bPp/n/nk857V9Hs/983jOa/usnPunQGh8YS/shb2wF/bC/tm1F473hb2wF/bCXtgLe472aTjeX/4Ujvm07PN47p/Hc17b5/XcP6/nDZ/Pc/88nvPaPo/n/nk857V9Js79ufd4X9gLe2Ev7IW9sH+W7UWp+YW9sBf2wl7YC3uO9twcrzHmjxtj/sAY8x1jzF94Xsf9YcwY84ox5u8aY75hjPm6MeZ/Wn6/b4z5VWPMt8ufH6en+KnaizV/vvZivZ+/fV7W/MV6P3/7TK/595Dtn8EPKtDzNvAFoAJ+G/ix53HsH/J8rwF/qPx9C/gW8GPAfwj8hfL7vwD87z/tc32x5p+Nnxfr/WLNX6z3Z+vns7zmzyvj/QXgOyLyjogMwF8G/tRzOvYPbCJyW0R+s/z9FPgm8BJ6zn+pPO0vAf/ap3KCn8xerPnztRfr/fztc7PmL9b7+dtnec2fl+N9CTg/iPf98rvPvBljXgd+BvhHwBURWUsL30Hny31W7cWaP197sd7P3z6Xa/5ivZ+/fdbW/AW46vuYMWYG/OfAvysiJ+cfE61TvICEP2V7sebP116s9/O1F+v9/O2zuOZP5Hh/gEb7B8Ar5/79cvndZ9aMMQG9WP+JiPzV8uu7xphr5fFrwL1P4bxerPnzPacX6/18z+kHAe98rtb8s7je5bgv7vHnbU/QuP7EjXZ0CtI7wBvnnvu1T6Ph/gk/mwH+H8D/6UO//z/waFP+P3zO5/VizZ/jmr9Y78/uen/e1vyzuN4/6Jp/ntb7s7zmIvJEjvePAL9y7t//HvDvfZ/n/wkUVfY28L/+tC/Kx3y2X0LLD78D/Fb5+RPABeC/AL4N/G1g/zmf14s1f45r/mK9P9vr/Xla88/iev8wa/55We/P8pqLyA+vXGWM+dPAHxeR/1H5958D/jkR+Xe+z2s+1f5FWwf2t6c6OLkMSE5xJMVxfYaPzAkV0YHK5RFEhJQyWYRuSCyHZzpA/YF8aKTUD7rmn/Z6f87side7POfFmn9ye2TNX6z3M7dntqeEquLClUtUTU3OiSwJnXprEYEUdT9NUkbxGoPxFoPRwfZiEKNeMiMkER31W/Zj7wzOWgzgyzRdu96ujc6GzgJJMiKwHlYfnKVyZT6wcWDAWgtGz818aL64fLjd+0nvLnn0lQb41u/91ves99r8J3zbH9qMMX8e+PPP+jiPs+Asxq6HJMNXX7/Cn/ljf4idrZa6qrHOcvLwHicH9/VCWKfPNDqAvB8j/TACggFyFo5PO7p+5BvvH/HbNw5JOZOfzVf/xg/zok9zvT/n9kOtN7xY8yewF/f487Vntt6Xr1/l3/qf/Y959a03OB1OWI1LLAFLTYqG4wNh6IWTARYRqqam3ZlhsdSjw2XD4GB0sMqZ4xjJRsBmMMLupGG7raiMZWotwRhm3lFZQzKJbCJdyhz0AzFDipacLde2W65vTwg+MGmmOOepJxN8CDgX8K5CAwSh/A85958RQOyZA94kqpusuvxaE7LzYcm//NbeR673kzjeT9RoF5FfpghTP8/odNpU/KEvv8SlvZlGVQZ2Wk93co+8tFRVjXOOoe/Qc7P4EMAYYkxIzhgEX6IwEcEamLaeprJc35+y7CLzPnL7aMWY8vP4WB+75p/Wev+I2mf6Hv8RtBfr/fztqewpUTLHfcfD1ZI+JcZssYBDEAE/9ZjGsCWWWiw+VNR1wBlHmx1OLCuT6YxgRFiJpUmZnzhZcWFIVP2KcDxyxxh+yzqis+ztVTS1wxrBWUHE4H2DTcK8S4xj5u7RipPlgPeOtl0QnOPCdMKkqqiqiqaqscbirMcYi6sCxrmSU5ekbe1gzzldPvQ3KdhoefThj7Qncby/DnzRGPMGeqH+LPDffYL3e6pWV46vvHaZN1+6oGUNYxi7OYuDOwySN44XYwGLtQbndTliShq9AM5qaSSfe19w7G3VXN+dcLDouX/SMT7TqvPGPtNr/iNoL9b7+dqL9X7+9lTWPOfMYhw4HXqyZDJaRnaIFpxrizUOaz21cTgbqFzAYZngUVenyU5CCFgmY+JrR8LrfUJSRvJINpaH1rMIlnFimXgIFoJAMJap9VgESZlxFLpx5OGywznDpF9ROQtxZKwrmroitQ3OOipXY60jWHD20TK0ZsOP96brpGz9909qP7TjFZFojPl3gF9BkXH/NxH5+g/7fj+M1ZXnjZf22ZrU67PS+n7OTJvArBJyv9gsSBp7jHMgFjFGnamAMUJOiTT2gIEcsQhjHOn6gZyFsfR2U9ZewvHpipNFx7IbN33gZ22fhTX/Z8l+mPW+cOECf+pPfWbFfJ65iQjf/OY3+fVf/3VS+sGi0ad2f5fGn7FW/zRnD2w20/KnnN9UjQERcopIls1rTelVgpAlUzaNzY+16mSsDxhrcW9s41/fIj/sGL95iHQRSelDpUlBJOtubTh3TuWYpYHprcNay6zdYme2wzD2HJw8ICbdlzJg7St4/yUEh2QDRFL6fSTf4uXrr/DlN7+Mc46/+St/45mteRZYdsJ8JVhvsU6riBmrjdqk65ZMJhlHcBkv+hwKuDqOI12M9A5GLyxz5vcngYfBcf00cWWeuJSFX4o9g7dUtcV1iVuV5U5tqJxDgtNyr7ME7xlTZBUzzhgkWXoM909H5otM22QmfaJyjmkV8c7RSibEAe8C3tXlVir5r7HaTy6g5PWto1muXrvNYx9jT9TjFZG/BfytJ3mPJ7FZW/FLP/0FXru2x7o6n1MmxQiSccOCtDompkRKCWMs1vnSwzWbLHbzZesixmjT3mEYh575YkVMmeWQSFnoh0xKwoPjFQ+OlqzGTMrPpcxcTvXTXfN/1uwHXe9XXnmFv/gX/+IzPKPPvv3yL/8yv/Vbv/UDO154Cve3KcAda7E+gLHqeI3BGoM1xRk7W3AbWR2gAYxFJDOuVogkfR8Mzlh82SpHGRU4ZB1Yi7UOH4JmS80EV1U0/+IXaP61Nxh/5wHL/+jrpAcrUt/rHrP+nFlIMeomXRyvtVaduEWdlzW0VUPwnlevvsabL3+Jk8UhX3/7N1l0pwxR96Sq+Xnq9t8EqUpvc8nQ/8ekeMzP/vTP8W//2f8BTdM81vE+lTUHYoLjecafZNpZoG7XrkV7dXnUYEbBU5B9INQJYzQRMjkzLDrmi4FVJfQz6Jzl7++0eOv5Y+OKq4cdr/UjX553iIGTmFk1gf/Pluc3twJNBXmWCcYQvKOyjpOVcJIy1hi6ZLHJMF8MuFGYtD2TiaOtLBemNXWw7Maetg5UoaWpChDLe4wxOKf3j0gugRPo1m82P5uA6mPsmYOrnqZtTWomTcW6sb23PWHaVtSVR3JEckIQEnpjYzQQMSUqxdizSLVEJmtUHAJZsv5Vw1xSysSUGKMwxkRMQtdHxqjO13unzX6xjFnoR/0ifFbMGENdBXLOJVIvETUaURsoN5NZY8pKZKd/scZpBO8DVV1vNqI1+ttgsM4RfNhsdmBIKRFzJMXIsFqSJev1EEGykEugIqVdZKyWdZyvcKHS9wx6vHXPJOdETpmcE2kYEMnEcSCnWDIa0XMNNdZaXNVgnWd+csLx4cOPLBU9bXPOsb29/VyOtbFNg0k3hJwzFAS+iOBCwJVr9OxPRWia5rkc66Ps0WOXlPI86tSUtVp/8aU8R5E0ul84RcFa47AYrJhCA1kfQ79D1lqcc9iqwl+/gtveYm92kYuLKaerOe9nQypJweZ1xiCmxdjdsu88QGSp72UdLgTq2QznHG2oqLxjZ/8iF69eplrUXDi9SrWccnxyxKpbsbtTcfVqC9EzHg3kKKQLF8nuDS5dvogJIO7ZJgciwjhmhj7hgt1sJmbTJ9UlLvUCsjkPYdK9OouQyFRJuLxKRGc5djUrLzw08IE37I6GVwSsCJMh4YALleF6rVXMMSaStQiavqfiA8SctQtz2esSQpLMkKBLkYTF9T1DTkyyxZiAs+pojdGKh4grDnd9N51zuuWzitjN9f4o+9w4XmctP/XFl/jJL14n58Q4DoTg2d9uAYj9ktjPNw4V1KEYHwiVxRpLzplhjCVi0dKPs2ePjWPcfEEFWHUDq25kGDPzbmSMmcPjjq6L1JOWixd3EeByhjFlbtw75nDef4qr9KhVlefVly8xX63o+x6DwxqPtZY6eJyzzCaBtnZYJ/igm07l9TlNu0VVT9je2+fqq6/jvaIArXUY4zDWMZ1ucenSFbyvCHWDtY6j+TEn8xOOH97jvW99g7HvGEvEPywHhlVPNkK2EazBNQ3WB3YuvsTupVdoZ1tcuvYKPgRi0i9kN1/Qnc7plwuO7rzP0C05uv8+3eIYYzLOZkI7YfvKy1TtlK1rb9DuXOQ3/su/y9/5a3+FOI4fv2CfZ4sDpJ40jgzLBSlFFosVY0xsX7zC9sXPsgTw0zOz2eqBHGFd2RLR77ktQZ9kdclm/X9oqQvw3oPxhFATfIXEiPS9tpy0loi1GnR672mqFnthj/rP/KuEr77Jz33rmF/6m8f8we05/9nJSDf05NwjknDO44LHudeomj8LeFbL/ydx/F2cD3gfmG5f4PqbP03VTGmMEAx8+atv8dM//TXmiyWzN17l+PiY3/2Nf8Ctm2/zsz/l+W/9NzLmeM7JP/g2adEx+dmfp3r9j7JKI+8evv29NJmnbDkJ8+MON1mxnPf44nwVO+OY7NW4ym0cr3OQnC55lkwiE70wOuHlbuDn783pnOVXLhneayv+McLvzRw/ZjP/+tKyHTOTRcd0Af+8jHyVmm/Xnr9pMnNvmXhHMJbOZZX6sIDJGAyhtYRGixbZQU/ibh/1vlnOAeHKdMr1rRXeOmoXcNZRVzXeB6wNWFuBsdiSDa/LFtqiOAu0Pso+s47XGFOa3GreO/a2W166uE1MkXHsMdYQnEFyJqdIKpurWaexTktN1jmcdZASJqYNx+vDi5NzITiXXnHKmZyEnDV7SCkzjolhTDTW0rZVAaILY8wE557X8nwis9YymdTEHMk5lQi+wllLWwe8c2xNA9PWY706XmsNddAbbTLbpm232L98hde+8Bqhqgm+wRmHcR5jHNvbu1y99gohVNTNBOscD48PODw94uDODqY7pl8tGVYrUox0845uviKTSHbUyzSZYH1g//pVLlx/la3tPa6/8Rahahij9tVXJ6csjo5ZzU+YeEO/nFPRs6gM1iasTdTTLfavX6WabrP36heY7F/lnd//+qeafT1TOwfqkBSRoScNPeNqToyRbr5kjInJ9t6ne56fgmkRRDbVLLVCFpHSqxUpNMKycWbR8nFTY5yj8jWVr8hDzxiH8+1YQvBUjZaBmyZgtxrqVy8R3nqZCzccL9/teXhY42IA8RgGMIJ1CuIMYUYzeQNMIJp98tDinadynmY6Y2vvEk27RZARL4ntvT12Ll0gzKZcXPSEZovp1g6hatjd9bz+asYcDBztnZJCx9b112i+8DLv3b/NnfcfkPIzRn+KYGKGITJmSxwNzhiCtZg6k3IA7MYlZeTsx2pwnY3+VDlxoRtZWYMbIsk5DsTwIBj2nWFhDZWFWcwEyVwYHVtj4tQZXNKqp1iDOPQI9qycjwGl85pS+NRziCkhZGJMZMlMvGNVV3jjyDaXzNcQsuAcBO+0gFruoXN33Sbx+372mXW81y9s8+NvXCV4lZM2xvDSdk13fEjMkWHsAaE71R6OK2VSaw3eO3XcxpVNV0qWm5V0bc2mtyAiSMyl/HkOQm6E4D2TpqJKGWstKQlt3ZASbO9M2dndImWhG0ZWfaQKny3Hawz4yuGDJ0RP8BVN1eCcpa1qvLM0rcNX9izjBVK2CJZ2us2FS1e49tJrfPmLP0ZdN4Swzng91jqqqma2tV16XRXGOIKH3VnLpWnLfl0x9D2nyzn9ODA/OGF+cMLQLzg5vUtMkdUQif1AWs4ZTw8YvSWPK7IFssUKSE6knLE+MLtwlWbWEdOK0HisDFjpqCZbzLb28e2UnGA1XzL2w6d9GZ6xJZDEg1s3OXj/XeIw0K8WSIZsPWIc7c4+j6B4foTNGN0DtMypLQzfNHhfkSWRcqlq5axZYD7jYxoBu7PD5L/+xwkvvcRLhysuH3cc3n6Pd3/n1xj6FdkFKgK/9Ef+ML/wsz+L9QfY+jswsdirvw/xAZPZPjdevsoDv0N9+2Vm/pjM30XMDba2LzHbuUQ7u87+SydIO+HGhX+Zo8kvcOnwiMsPHzKbbHH95Tdw1nFw6x2WJw+JcpkoPb62XH/5Gnv7+xw8+Hmq5gp5+hL/6J17uBjh5QpGw3j4NvGffIfYZfpl/ESAnyexbRH+BUlcz5l/GjPfyYYviPALWRgGx+80lqNe3Y0AY6NAtOAyuIFgHJ0bSSFys0r89VYYBD5YdXRDJLiKYAN3K8Nf3w3sJ8cvLjPXY8b4TCDyigh/KkNvLL4bMMBvG8NvUkrf5TuQsWQxjJIVNIswEhUfpSQXHgwd/bFgosUsPSZbqlDhnWdvu+bSfoP3nrbRKp/6Gqv9avn4EQifWcd7YXvCz3zxJZrq7BTH5THD8pSY1PGKCEkiIDTNhKpuMcZqGbSAKTboxYJGXPcxrbEYEY1wckYBhuc3JoN3jqbyxJS1LyOwZQPGOra2JuzuTBlj4njR4b3dBAmfFdO+qcN7h/OOqva0bY23lklV453DVwbrwTnBOf1ypqR9iqqdsrW7z8VLl3nlpVdpmlYdryulZmMLIMSVHq8KkDSVI88m7E+n7E+3GMeRg8Uxq7Hn+O4Bx7NDlvNDrO3o+xXjcEIcI7lbEVenpLYhxwHxHg1bDYheJ+M8k+19Uhzou4cYG7G5wyZPaGY0kxmumpKSMKw60hCfV3v3U7CSO0jk+OE93v/Ot7TU3K3AGKrpLr5u9N//bPhd4KyPqlmNcvarZkJKI/240r0gFyBMSkiSTVbsplPaX/pFmp/8Ca69c58vvH/I+5OWW9/+p6Q8UFnFJPzk177Cf/Nf+ePg3yFXp4hZkOQ9UrzF2+3P8vbFtzjuW8Lk52g4Quw7YB6ws3eJC5dfZ7q3z7UvL8nblpOv/iGGS7tcfPddvvD2d5iEisv7+5iUWB5FFidHZBYkRlxouHDpAuOQuf7qV8jsIw18884xocpML1ZYYzi48V0WDx8ws1vs2f0CKnt2NgF+QoQ3ydzI0GXYTpmfiYl5tHx94VnGsz6zCPhJIBjBuRFvMr2LJJ+4GzLfqCElIfcDSMK3Fl87Dj38g1lgPyXezIb9wVA7IRC5hPCLYhTwFLVq+dA7/on3rLNtSoUyQ6mUJhKZAQW+uUr3tJM4cDJEcmdIDxxEQ3AB5zzXL1WEuqUOHhjxTulRznpt5+E/tsr2mXC8bR147folpk21KZ9d32tJw4I+2U3XJhdpR2ssIVTKtc22iFuoI6UASzbJ/znYvykAChGIo0LvrRG8B49QG82KxxjJOROjKcczOGc3GAwxGUOCPGIRJpVDcsa7z9rOJkACUumDQlM5gvM0dY2zlkwsiG8DrAMWi7GOdrLL9t4VprN9qrpVwJNVekAufXJrLM4p+MB53eg0cDRY76naFltVTC2EOBByxazaolvtsbVVMQw9e6cnrIae0G5RtVu02zuatVDk37JhHAb61VKZHFhyToRqwmS2x7g4ZFidgs3kmDEuIymDyfAJEIafV8s5MyxPSUMHcaSuAuIdbfB65a3RPmcaIfUgHqw/Vxr70TPrHKEKGLuNcz+OddvsXUnM9jPLieNo19PS89XxPWZpwbfennPj/SWXwoQvNvuY7cvM7y9J79xhr0vsbLUs9ne4/PJ1xr6jmW4T6ho72+Wduwc4lwn+NYwd8VUDxjMs9yD2TGrh9VcMw7gi19fB9+zsX2Nv/yXCpGE6sUQ7sn//NswfsHN8QOVGrImslgkkM92pse4K9TSw7I5I0TA/+oA0RrbCB3zp1SN6u0VndwmVZ2erwtoK2d+iMj2TtM0s7RQ61LMzA3jJ1DnzJRTI9MWcmKWEl8zPnKx4zVmit2RnOCZzu/F03pIrwVvLcoz0OZFtJNTgssFki8FROd2jQchWHee3K8fCgq0trrLEYBg8eANvirCbhWti+PkxMreW2zkzGoONkLMh50jMiWyEwSXEgJXSesgGSQZGS8bjjeH1GLk0GvaOPZfdis47bjZLBuuo6poQFGhaV9XHfsU+E453ezbhX/q5r/LS5T1MzhjJrOZHnDy8RS9ZHQHq/KxVSpBzNRSurYjytEzWUrLNuSBs1Yy1GOeVSxY1s+37hjQ6JhNHUzmcy1TVgEhivlgwjAPjaLVcZYVQ3i7ltcj1iMSSFU8qKg/VZyzj1Q5GRBiBkRBatqYVwQeaaoI1lsVqzqrrMQRMHdSBOo/zgZ3dq1y59hb7ly4zme4QnCfGQYOScSTGEWuVL2es6l9vVF+MJYQaWwKkMJ1pqfgC2Ahx7FnNv8wYRx52J6xiT7caWa1GbAg4b4GsJeYEQ7dgcXwExuK9Ip6bdodJO+NoTJws75IkMQ5JD2Aj4NZ4/x9JyymyOLhPPz8h9yumbYO3lsY7chYOTub044gZOxiW4CsI0x9px+ucp2knWP8Kof7v46svcP1L73Hp9Xs8fG2P+DPXuOaO+HPL/zevde/zf/8rt7h7cI+vbr3Mn7v6U+TphF97+5D791e89MZLXHn5CiZe4/THv4Yx8MoXvsx0e4dxNfCPvnOThoY9fprgK7b3tgl1xfLoFPoT9iYrfv4nF4jLpN2vIc0XmW3vsrW1x5gG5ssjhnHFK+/cYH+5YLpV0W5X5DxyfDzHGGHv8h7XX7uEtZ7DxS3mh0ve/q33kXHFP//PnfLFr/bcnr/OjeM9QlVz8cIE72FiFix3LdVyh/bkInyC8ueTmEGoJDOVxC8BfxioU2I2jJgsXDvtEIGu9Qy14zf6mv/UGFbeMq9XOGvIDrIFcUI90z5shccRYDTaQ0bAqfP9+9OAyZ6hgjFADoYUYMfAfwdhOwlfHSNvRPiOMfxVZ5kL5KXgozBKZGQkOxhrWVNxAZAEOYERh6FiagxfG4SfHQS7FOw94aax/C1fc9c6ptsNTRuYzip2dlo+rsDwXB2vd46Lu7Mzegmak+1vtdQefMnMDBlv1IkKbErGpvDc1oRmxU6cwbnXzICclRZkc1budjLkpHSUYUhIMuRkkKywNoPKm1ljEZMVsWhN6QXr8a3V/q3NmjFby4Z8b8g4AzuTiks7Das+Me8+AyjaNfdsoyaeSTlhs1XqVOEzG2uV1yz6Wb0PhFDjQ433NRjHOCZyFmIcyVm50jmtEaIq8yY6VYIhRsaUiDnRjYOiSpVRj8tGj48he5XorJgg2YMZwYwY73GhBmswRQ6uqiqatj7Hm6P053LhQpoNoEayYHKGlM6hDH/0TETouo7VckHf9YxDBGdJxmyuURpHhtWS1fwEV0+ofPvMs59P02ZTzysvTfGhIdQDPqy4OonsZxAsB7am9RNccxFrEntNxyvNistbu7SXLpObhlld0RkPvWNxYkljoJ1NVf1oa8ZkNmM+HLOKS5KpybZGcoVIg0ggVgf0s2OsBR8cxgEhapEzj8S0Yhwy3dIzjBn6SBhXyKqmtzNgRZYjrEtgM8YZgvVUtqGXkXEYyGOHYaAKI84OpDTA6FjMlQKzWs4YukTFFvVs+uwzXgGfMiEmgv6GLmVupQxZcDFjBPoRBpM5cYZh0SHeshcdE792vIaVgUNjyMYV6FOCKDDKZs8VYIyZnIUBGDPUEXayYRvhJCbeT0JOkKJhNIZLzjIR8F3GREhERhMRp6Cu80LLkg05a9XNiqEVw65kJus9P2VaY9nPQrSOaSdUjEwY2TKxVOw+2p6r472wM+W/9yf+CH3XM4yq+JQzWDJpecjd5QOclQKUEuq2KujmgC1OAjGkFInjUNRDChvPWpJxikLuR3WcCGTLcTdw2kEaE2M3YMWz7WfUNmBGkB6kAmo2mXXAEUaLDxbrLHXtMdaUXvAa9SxabkVwYviFty7yE6/u8zvvPuQff/vep87pddawM6vohxXdCEOKHM1PCT6QxOK9RyxUTQOgIIbg2NnbZTKZMZlsE3zDMGRu33+AMYaUtcIQnCNYSxUC3nkMSimQnLl7/yF3Dg45OT3h/bu3EGO4+NIrtLMZTlSX1RjBuYy1UIeayrVUjWV7V8vcJgTEsOnBN1XF9vaEfjVweO+YcRjo5nNiNyf3HSHUOB82CHeGAZwg8TMQAD0ji2Pk3t37HN6/Q390RH9yQnCWaR0QERbzU8Yxcu/Gu6QxMrt4hWtf2Sa4z0Sh65nYj39llz/3b3yJdjLB+7+Hyf8Y+UaDvBt41/b47QlV43lv6w9zSuSrs9/kay/9Pv61t+h/4b9CrAJX0pztnLh1Z8Z3vxmYbM+4+NI1qga292bUdU2aQ6aj9VoyDt5R1eCqzOL6+9x+5TdRkX2DGQzhuw32yOPrgK8r+m6fg4dfJYthsvUeoe44ebDH8ts/RTO7x8XXbuN9R4xLut6wP73I69O3uLO4y9f5Dn2OpCykBKvlgqOHt+iHfb7zjVdJcQZxG/LIm29UvPGTDd4/2yqHzZnpome77nSmgDF8Mwp/Zcwsi6NChBxHshHmx0uOHhxz0Vn+1VnFW8EyOEt0lt9zjr9WBZbGkWzGG49JFpvsRj1QBUg0sE6iU49eyZk/nRJehL8tib8leSN08qox/Eln2RJDF4uQls/gE85m2pCwRlj732QtybmC1lMu966BqhJlJgEvZ8N/OzmGaHDHDnNs8UY2TvV/933W67l+A+vK88b1iywWS/q+1wXLQhx65ocndGOPL47XB0/dFAFrp0Ae7fdpfV5yPkPqFacsUsg9KWOzIXjNfIchs1xp/y92CYcp4g1G1VQSCrDIggLgTFG5sgr/d5YQ3AZgBarnHJNsBAusMVzYqjHGcfPB/Hku60eaMRC8LSV6g6D9azCMqXBoraKTc06krDdECBVV1eCcR8SQUtZytIGUo0ITqgrrvcplruOLUnFYdT1Hxyc8PDrgvQ8+QKyF6RYzLE4sThzWQajAO1UYssbhXcDZUKTDfBHx0BFjedKCZLxdsayWkBK9FCeL4JzHrulcIkhOgH3maM5PxcpnypJZrVbMT+f0iyXDYknwhTYnmaHrFTU+nzM/PMDVE3LSASCcAx7+KNnOduCLX9hiOvOIP0CiMN7YZxy3WKw69o9XMLR0YY9T53jZXeDKZJfV9h5HF/YZK0+9FMwwkIfA6UOLrRx+FqgasN4DrgTbCUfC2oyxWds6IsR6wao90HsvWYy1VOMMt6iwg8V2lr5vWJ1WCI56AoHE2Fcsj3cwLCCDMYmcR8Y4YLOhNRMq05T9TvfBlCwxZsaxp+8Gjo8gDhYTa0wODC956jYQwjO+1gJ5TKpQZQxiDUdJ+FYU5oJWpxCIUeu4RTjDeMtlk3mlcozOMjrLbechQbaZ7APZgM2GnK1W3caC4xmjVjVFUcrTmHmtHyELc4ncEEGCI3vPtjG8ZA2XMCyyYRCwKWNzJtjMLI84I+thSAze03spilUZY2BwKshhAScwMfBatirTmRT7Ygpl9eMQnc899BURuuUppyfHyo8tm2TOCYzZEKy1vKslXWsShqS4qSzEOLAcVoDgncq1WVHE8piEVacScN2oTvN0GVkuI23VcHn/Mg6HWSXG4QTJMHQQatVgtk7KGiqAa2qVruRLmLPmAGsmLqpXknMh7ptz6k6fARMQSUybgDVTBRQkpUqJKIXKe4cL1Qa4UFU1IVQ45zk6OAa5ye7+Dtdfu0YIjsoHrDVM24ZJU6vSThHcMMYDhuPjUz64+T43b73PP/yNf4xYx49Hy8Wr1zHZY8ST88A4zrEW9nd3aJuapm5p6gnWO0LbFJ629o4rY2ibCY1v2PIT0jhytD9htTrl6MF97t91WBe0/2s9MUZyHpFx+BGsNAvkSBo7Hjx8yAd37nB09z7HDx5SB89O2xKcZVJZgjMcHx6oog+Wi/du08y2qLcV8fyjZlIwqgdmyq/VP8dJs8PP/lTNF68FXj05xX7wbcRY6vvbGOe5c3rEN5oGt1ri/+BbiDOkeIJI5Nrua7zyR7Z4sHPM77/y60j0XPzWHu3RHnVMVOmEKCsWD461XeVV7vG0fcB2MyPGbfrVq+SxprvXkhcBGwRbZbanW3zlJ6ZYF7l1MHAyXxDskt29JfW0xxkhj5nD5RExnXBqe26426yGiJ20TCZXuXsqyA2hS7tc2L2E3+v52su/Sho7fu0frvjOt0deOa5p35tQf0zp80ntQcr85UXP/891WKdtupvZsIxWs8sizRmHI1I8xVrBWuGgqfhb+1f4jclEBx0I3CEzS5EGoakzLgjiBEymO+3pD49p+sg/fzrn2jhy4/Ur3HztCjYafnU5pRoyP35nztcWA79T1/zTSQ1ZGIfEKGVmb8kSbMrYJBD1c5gkWBG+ETJ/v3JcMJk/ake2EZLPJJcxo8EOBpvBjlalsFBGhxkGbAGBfj/7VBxvv1qwPDncOF4MOKcOLq9JyKx7rIIhKWJZBDLkNNDHDsHQGIsrHC0HjFE4XeUiamFwNrPoRrp+pA0t+9t7OCzHq0OGsWddiaxGBUo5D7QGgsUFiwu+pNljQTWbTWCgymCy6Vef5wF/NkwdbFMFgvcl849nxHEpMotOnamzUFV1mVVpmZ/M6ZZ3EDKXX76M94qI9s4xaRqmk/YcL10je8mGxemCu3fucuPdm/z2b/0OYh2TSy+zSg6yCgr0/ZLT03sYI1y+dIHJdMJ0MmM22cJXgWZ7gvOeaROovGevmbIzafG1ZbK9Czkz26pYrBYYa5kvFmAczld6jYaONI7kpPOUf6RMMshIGnuOjo+4/+Ahd27d5t6tO7RVxf5sRh081y/MmLYV6cTQ9QOuqlke3Ic44tvJj6jjzWRGjm3F369+ktv+JV76kvBTXxTq3/0m+7/3m6Qh0k1n9M7zzRX8RhWYdT0Xb9zQrIcFzma+/FMX+cJXPf+0mfOrW1+nO6k5vv2LTG/UXJolLk4WxDGynPdFzlSrQXXTMmknjP0l0smPE9OUVaoZxWNDxISRSzPPj3+xJfglp78ZOXzY4e2KZrvDN8Omcnd6vGSxjNweHjL036GupuxfeJ02NBwsHPPOsrW9y/7+Rbba9/nCpV9D4m3+yd874fbNFfPbE+pvbNM84+rGURZ+ZTVSux7vNeEhWYihkCu00jKuThmH+1grhAAnueHvuwvUNWxFmCUVpZikiDVQV4JrhRyEFAQZV+TlAdWi4+ceHPAT3cA/eXMKr77EyRj4tXnFdJn5MwcDXzlJDC7wO00DYyYNA6NklLkrmLwuG4uSPwRscbzvZOFvGviCEX6GkRmZlEeSz7jOYBeKfLZjGQJhnJaklwvs/PSzlfHmlOgXJ+QUFSRVHJeBDQpsfX94Z6jKpIk0jFqSHhMxFbpPEamOSVVoxqSlxf8/d/8Vq2uW3ndivxXf8MUdT6pTp1JXZ4amSFEiJSpRMKzRSBpNxGAwMxBmMLZvDBjwyLoYwHe6MAwY8I1lGBqNJQsyJHlGEkdZYm42xSa7q3NVVz5xn52+9KaVfLHefU41pWY3p1mnKa7Crn3OPt/+wnrXu571PM8/dIOn6zPH13uPEILeeZwLtH3LxfYSLSRJBpSVdH1P1/eUSLQzaCQ2ZmNmAWOWmz7w3r61RJdiIsrRYSNlPvDhrODTzx+w6QbeP90yfIC/9qyHFCI7KoU09mBHda6QmxkxCkKSKK2o6hyglQiI5CgqRT2dMFtMWcwmFEXWjVVSorVhXLLjc0a6ocP7SNM29F2LHwbk+BjfNfS7FdbMMMbiAD9kM4tm05BcILaBsPNIrVGrHXJ0GFFSsprO2M4XGCWZFhYhEm3f4XygS4pgpoBAyOxUJVTIVRB51ZH5vTMSub0RYmBwnr537Nqe1bahUz2+95RWUxpwoaCOUEtJs91w+uAB9a7BLvYo6nq88cTvIaTzgsQrVOkan/aa23iOyF2k5GuUu0MrA1+7MeGiVjy+f44e1hiZKLVECzCqRCpYacUbBIa4x4+4Hycmw+LWEbawzCcts+oRl+dzVpsXcAG0vY9UW2RtkJMCpMVvNEEmyvIxhRqIsSPGluhLLjcHWBuZzva4wcsouUSrnsSAI5Gc4kZzA7Et8WzxcgVmgpwuELbCRo+KWS2quVhjvIDjH0TqF5DqS0j5PqtqyTeWN7FSwRtvfHjTPnKiY/DEJMEHrgnLK1LjheC9qmInEqrYoxsiUiaUDmhrMF3CrHoKp6i8xFaWyV6FLDRqWiAqgxMh93utIdWWWsCFmPOu9zTLGZOiRhoDusZW0LygeLg/oGrLC5OCRYg86DzrmNgRGEjcajtutx1b5/lS3xJj4OMhci0FbqbIH0iB4xQwaSArObdIHMJrnDOZ6hT0mHRopIzQD9B3iO9AY3ymgTcEx+b8IcG7rFc63vAppWxykBJSZMP5QkvqwhJDZN1kxGbvHG5EzIaQRc6H0Qh3CDCEyDD07HY7Qoxk2JMg6zAlYvSE6Ci04Xi6R1VYVn3H6e6cMmhiWWKDYs9WudcIowLOqHQqGEuqTxGCiQRyRPSmzO196XjC7cMZd892/P1/9Tbn3yf9ZoFAIxmCJ/SJ4CLRhcxTc5nj6i3EJCgLy/JwhpYC7R0iBWbLgoMbh1y/dsCNaweZH3ml7DUe6GLMYAcfApfbHV0/cL66YLe+ZGgb9IiYHrYXNGcava8o6iVDB0PT451H9p5GZ09MqwsikpBURi7GnJnv7e9xdHSAtYb5okJrRVFKtBFsomEoDjIwQuZQL1JES7Ib1e+xkSVMPc472m5g2/ScXW54cHqOQmCQlFaTcOzNaw6WIVdo0mPe+vKXmSyWLG7cYLa/AGlA/N6Zo8QNAn+YRZzxp4cSwkCFwAuBHPYx7e+jmQr+6Scr3rwmOPyV32B5ek6lLdNCjRZxFVIr3rGGewy87J/nP9r9l1RI4qfLvKdUK0T5Db759U/z1dd/P+2gWM5+Fl11yHmJms9Iqwn9mSWR2N//JlX9kHa9ol2tiP2Ce49eoawnHB4/z83bH0PKgFJbttuG996LpNby0ctPcnx5mzh5izj7Ok015+7hLfqyQmwvoW/pdz3nJ2s4nMErfxZdCLT9qyh9zv3lLf75iz+MUho++88/1JmP3hMHh89QGV6ymv+isjTW8Hf3Z7xvNZu+oHELEB5kj06Jeh2xFw2zQbIYJPMbipsvzJBTiz+cECtN5wYG7wmTwLW9KWLmeV9NeSTB3jrmYLokqZIbagEozm8GLnxEI/hRsiTl12LCA2cp0aTET92/5NX3z3l31/L/vuhpnOe/iZ6j5PikiNxqAiIGpO/pY6DqLrG+xcmSVk8QaHSqMj5FG6TU0DawXX9H/YBne8elvGFkwfKnwhYk8CMwanzgE9DSlbNO/rrSWc1oPsiBWoisRCMSKBJ2pFR0IQcFIXI5O/eHPTLB4F22/os+B+kU8lcEHzzKX9l05TJ2EunfXLEcq8v5n/MDtBRorSmMyp/z+zSuMqMnWe5YFo8pjqIZmeeaRpQgV+X85BHIrElqNdpqtFZopZ5k/8FnHesQA/3g8N6z2+5ouo6h64ghIMiI5UBG4HZdh95tkXrFbrum3W0J3iO8wSiJ0wODGgBJQJMSDKP/sTEaaw220AQGtFaUvUJrSbMdaDuPEhI5zrkSGiFjVtP6vZLMjUNclYiEyt+lJMTE4PLazgfOxLbt0VpRlwOu9gg50Dc7lNZ0mzXtZo0uJ+hy+ntmilIfSJc9IhZUy4BA0g+enfMUnWeqBoKS+GRwITtppRggRmRKKCEyIrqwGCmga4lIHDpjOuyAUImhN0S3R/BzqlmBsAI9MShrkYUGI9Al1IsMuS2rhC0g2PylzdX+JzG2pKwmCFoEDqPB6IJoIqK0pNrk9g+akK5UmESuFsZAcI6u6XDrCk5KpNUsi4qbt0qmexVuMiXID3mrT+T3kyJLJDMhWSqJt5JBZyZKHLKhfabpiAxKioqhq8FrBrdjcA0xeLTNSoBl7yAEisHhnKfdOrouEH2kLbJghtkNmNMthYks6wlIuIiJDnC2x9se1wu25xIXJKuJpTOGpiwI04o+JtZa04SACwlCwOPpcMgYsENHxHMx6wi2p1BTSrWPI3EmWnzqsesC1WqkH1Ap/O7KeGNM9INHSDB6zB5VJvvHbgRPpUhIkb53bDZZ3q3zA4GQtXu1wrnIdhsRJGZ6wEpBKSUlAl1q7HwPFxP3LnZse48RWbBbAGEIdCLyaDhDInHOkWRG2IUYc494t6VpE3VlqSsLKSOm07ihXZ0XpPgAl1jAFawqCfFvjNHPesQQ2WxbYpSkJAkhMjgHBELskTJgZIkWhsIkug1YKTAyIJXC6kQ1MRSlRoxi2NnWL9F1PX0/0LQtZ5eX9H3P44cnNLuGs0ePwDkqY7lx/RZ9iKx3PavuMenBY+DLDF3H9vycFCNFWaC0orAVhS1R0mBM5h5mFoKg251zuXqIUoqitKOt4cjRjRUpVFRlxY2jI0prmFQzbAHaTH73gN1+h4aQGm1qTDGhnMyopjOikGx3Xf53JHqQpEdn1OsNg09ooalMj+wdodlx70tfZHfykKOXP8rRyx/7PVNqTncf4P7Jz+OuHSH/0I+Rlgu+/Ppd3n7/hNut4wf3e7xVHLw/o32sUA/u0e02uCBIeo6xFTdffZnZwRJ/eg/x9pv4oeUfNSuMkhwvj6lsyfb+C+we/ee5SKoJAACa1ElEQVSYvZqP//5E1AOr7ZTeHeYqi4bFPPL8Sz0ySfrLPXynqM0+Q9VSTqfMls9RlBWz+QFVXdO30O8apKjY27tNmCVOl5Izv+boPHDj0YQ4WDabjmbwFLs1pt+wvdjx+P4l+m7Ef6ljbkr++I8s+MwfvsnD8xu8/fAaIX7YgTeBdxjv+JNVzU8WBY+mBf/kwLJynrfvv8e6HWiloBcZ4BSSg7jgsv8pVDhmzb9gwWdxXnN7qZlIwa2vPmR60ZJ87sN+oxn4/OmGDjieWSqrOH3jbU7DW7y62OfP3vkoqij4xsTwlhVcvvoOl6+8TXjd4v7hAtoK8xMfRT1/yKPljLPFhNX5hr5pcZsdPF6jmobXU8s/izv2g+MPNRuKKvCP/0zHV37M81Pph/hz6d/jffGQvyb/Jve7RxR/c4b5bIEWkUIGkL+LeryJ7PijR7uojFqWCCJXzko5+8oZmRsyWCHEQEgxi1uLLPTgfW6Cy5F+JMnCF1YLJlXBEBKPVYcWIRtZj36KV+Id7Yh2FaTc5pKM2TQMzqFkorAiy+whnngsppRGabGnCfBVLzg32Z9+XkHm0iopsvPRM5xryPeCc+FpT/pK5CIlQgjEJJEhIkIk+pEKoLOMphICrQTaKJQST/blOFYinPdj4O3YbLe0bcflxSXNdkvXNKQQUFJST6YoH7gYejrX44aWYcj932G9yWC7waK0prAdRVGhlaEaTbJjHLnbqWeILVJKtM3esvHKb1bMUDISHAyLiFYghEVpibxqGfweGkJIhJJIZTDWjl7JEh9GiU0SPiU2bc8QAptZR9sNyJBwSqKB7fkZUiRmxzczvSg/8b/1FKOwbRjuPmAIEbHbEeuSs/WG908vqBgYio6kFfXWMWs1rskOToSITFnkZ7JcMD/cY35+j9lmzUW/5cHmJLucKc20nLF6fMT6vQP2TeLWNY8oIt3DAr+rETIjcG0d2T9yyKi5HCydnyKtRkZDWUwwukLrEq0sWmsckjSqJRXFFG8S6xJ6OmZdQEhFSgI3eAYS2jt0zCIpbnC4xpEeBaRO3PjJgqOPzRDvTbi3rvDhw97qc6tNxsB1JfiE1QxW8bAUXBLZtju6dcNgCpw2hBRxMUCU9N0hMt6i0BVGDXQyEAuJiDBbtSxPNsggkEFwt3NsVwM7YIICC+fbHfe2O472Ira4ga5q+oVhU0ku3DmP7V2CL+jv9ojtlNmne0oHXWXoa8XgPbEqSIPLrBQf2AjHe3T0caAddkTteeea50uvBj4SS2J8jlY4Xlc9b+1WlCWYzlMUiklpfpdJRqaEd44YBTKA1oZCZk5cabLObNd6XPB4kehEDtSbtstKSEESgsC5XHYuteaFwwP26oKq1JSFYgiSple0LnJeJSRDpiZliQcQ/kkZm5SYTTXTqSGEwDAMkOKT2CmEQilDQmYiNnJUU7kKwjlRUEqTg7rLKk5ESJ69WvEHPn6ddeP46vvnPLxonul0xxTphoFJZSiswRpNXZWARIoKITTFdIYup+zNDYfLgrowXDtcUlcV+9eusagnVLaEJPAhsNpuGQbH2ekFlxdr2rZldXlO33dcnNyn2205Pznj7PQcUUy4dv15glCYYaAJgc3qhPXqEUkIkBuSD/S7TeadSk0jMj1MW5s53DJTlaTWCJM3PlNMkFJhlEVJTUwDIbW42ZyjvWU+OOyX1LMKW9pnOufPchS24CMfeZX5bM7JwzPefecubddxuVrhgkNsE71WPLYrKiFZ1BWFEviUePzwEdvtlmK2z2y5j6kn1AdHCPNv90HlrXbH33l0H7tbo/5+T5rPePTSS5z/gY9z8Ogc+fpd5k3kM9uBdXK82024X5Yc3zjklU+9zHQ+YXpQo0qBKQSFhT1rMbN9tDE8d3jMpJyR9vaJn1xSLBPzw0jnPG/e15w/6Di4/j771+6iwynN+ZboCi7u3aa9uIYS99HihKFtaDY93hiKa2fEaYnzHu88SSaUKUgp0D5+wHqz5dq0xX6mQbUlzT3BLhQcXZtwcLjH3iJx+/nE/PEU+4sLhlZz/91bXPx6z+VwQF3kZOXDHQkRByI9b8wMP3s4RzjPTz14zKlz/EPlWU8FLCvUbHKFFIFBke7+C0RruPWJhlde+SEmh1OaSmJCQt8qmUwSCU1MEt151KoCF7hoHJvgufh9Hc2rW966KPjb3zhnvm14eQs/ZBLNakfzWslbDyU/c9GwConNO2taNyXcnjB/acpiWrP4+G26XYtOPQnJnZd7/sTHNzQPBn72H7RsN4HX/6ee3RcCv5L+FV10nKcV73KPtWvof12gu54f/Pgn+SN/4icxxvCLn/un33a2nm3Gm8YGfCTXaYWgSDl7KY0iJYnrYIgB7xOBhA+Rza5ncIEhCFzINnEqgVGKW/tLbi5mzGeW2cSw2gbuPh7YCs+iDKT0dCPJLGFHlk70pBQ53Ks5Pq7Y7XoenvR4/9S3UsjMDc2S/4aUni7gNP5fIJBKjvB/nw9+ZArUvFJ85uUjmiHweNV+HwJvtiycTqAs9OjHqxAoBCVCKGw1xdiavYlhb1YwnVQ89/xzTGczquUhRVVTWjsqdUW2u4Zd2/Lo8Rknj85wfUuzucQNHeuzE/pmy+XZOednl8wOC547ukkyBaEPlCEQRKQZMrIdmXmXvt0R3ZB59RmcjtC5v17abMWVhCAJiVSWop6jlGVSzLGmpA89vd+B97RNgzUaZRTVpMJY83umjPqbh7WWOy+8yMH+AV/78jfY2z+Ay0seX5wTvCeFRC8k52qDTQm3mHEwn+Y1fHqKXa9Z7B9xeHhEtXdAudzPyNB/i8f7Xcv56Qni/BTefRvKEvvf/GeYz7zMK18qkF85ZbLr+WTnsgCOnbGxNQdHN7jz6ZeoJgW2LrP4jAVroLSGg+kCawtuHB4wqebUVfaqzjr6idVlYni0z+rNyEG5Y3HzbUJc015e4rqK1cPnac4PqMoT6nKDCNB3a4JUVN0lYlbiVUVQFRQKNS+IydGdnbA+uY/7FJhPgnhU0H5Z0e4qquv7HO4fYauaop6i31WYz5e4beTRvevcFSDmhvJQPgOcQ4LkiAy8M9H4wymffHzJHz895yR4fs4kQi1JhwXyaJrdzaQm7XaE08+ihhXXXvkxPvZTP8AgElsi2nn0tYJqJvHKEKRGdwG1KqF1rN+5IOwGVp/uaf9cw/tfLDj70opr5y1/UTt+Qkbi6y1pZ/klF/lnbYvTEXd3i9htiZVh+qJkVpfMXr6O6gbUw3Pi1nHzk4+Y/KkdX/+q4+/9s557DwPNP28ZoueL6TW+kL5MiNCGHBOGIqH1wOHtI/7Yv/snqOoK/jfffraeaeAVZK/MKBhBPDxR0Ynkn8kRmJSu1Kgi9EOiGyKDl7iYRhAVKAL3LzYE57nNhImpiL3Htz3RRSoFodDZnyelnEkpixRgrEIqwf6+YbEwSLnj8nKHE46UQlZayXK/5FAaMqBhFIlIPFXOCvFKtlKNGINRQ5h8UeK3wWV96CNB8AnnAv3gUTLmL5EoilxW29ubM50fMqsTiwlYI2kvzgi7NdLsU0yPP3Ctch88xIjSirIq8UPHbrula3acnJ3TbDdcrNZsdlv0tCPGhBL5sfkAc5vZtMTtduzmx/i+Z3txhmtbXO8Y+gGtBJNKo5VkWhdYq9l1HdumJSaB9wHvexq/oZctIRkclsYkzlcPcWnH0aWkLjr6dv0dOXXfv/Gb39dvb3cUUlLVNQA3bt7klY98hPv37/Po5IQ+pJH2ng9f66ZDa83FrqEPIWuPR8HmfMXp+w+YD5H5cy+glQY1Ktf8WzgS+V4XIRC8I8VA+trr+NmEe3cv+dzje5S9geF5IjX3XugYbrYMh1vccIkWJttiknBunbWSU0ANHhkdcfsQP6xA3UZNZrSbyP2Hjs3aYdIJe/Md8+mWab2HDxaZerwsEM8Jhv1EoWcU+nmUi5hNNneZzGvKiWHwmt7LzEWwFVYqrt/omEwv2Ds6RpibGGtZLGZZpjV4utUFYegzjUdYTj4uCc9L3HXLYjmhEwO79epD338mQvBDZcFBVbOYV0z2KtpdwxdItAJetJKFldwvBKdWEFPCx4DUkun1fYq9mr39A8pyQYoDuA0tia8UivdJuAQ+Bu6Vgrmu0K7IfNyu4vneo7/gkHcnmLmlEor3Nhu6ocdWCjOZ8EYX4cJhMXgiyXe85wZ+tnc0RvKS0qA022t7fIVEffCQmVJYRqe6mNs44gkGdax4juJJKQQCjgfv3+VXfvbnKWzxW87Xsy01C4FWGp9CVqhKiehDBjaPMmJaSWRZ4mOi93mT3+wS2yYyRPggJXarBr4YHzAvJDIccmz2cBtHt9rho2BZTJmVBU1MdDFijKGuaqw1HBwuqaqCsvaUlePifMX6YkfX9bRdh/d+DFpXS9aDEEhjEFITYsCPATdelaeVRihNDD5nHImn2qLfh80/Juj7wK7piUlkbWWlsNYyn86oKsPzd25x8/mXKeio0wbX7Th953Vc36LLQ2bHdzLqM+WDiHOZO2ptwXK5pNtuOHt4wnp1yRtvv81qs2Joe4ZugHJBGIE988UR07JmeucOk1Li+4HuYsXQ9Tx47302qzXr1SWr1QXTQnFrWVJazeF+SV0b7j64zzvvv8+u6Xn06JJh8OzcBSEE0BahLYObIt53TKYTZuU5st9jffGA9LvaGvBbkAK/raGUYrm/z3yx4Ad++IfRquDLX/oyb3zjTaJL+OBwMXLZtHRdTzM4bFUwKQuEFyyqyIO37tGdt1x/6UVuvPIxCqmhKv6tDbx5Q4+kGBmabdYO+Af/GPkvfoEvaMO7pkJxE5X+MMrc4egnvsj+H32dxkO3K0hbgRNrAo628aADMvbYZoMm4Lq7CC2YFQp7cIvNSeDX/0lD26ypJz/PizfvceP4BQ4PXsC7Na3KLlnF8xJlPTLcQPnniK3DP95CjEwmAmsF28sezjsUJeVkD1V2HF9bIfS7lOYFpP0JiqniuVuRZuuhu8vF3QfYekIxm7Gup3zlT1sGU/Dpbc3tXnD/4X0evn2PED7ce+BAaf7j+ZyP7u3x/o09Hj+/5L224eclLIA/M9NcqxT/aKb45Rq8SwytoywkH/nUyyyt5s5LH2E+ew76Sy76NZdE/sepZTPRuHbAdQP7ZcUre/tYKVCvLJDB84nXCz7+NwyprPE3J5z7yH//1TWvrS7Ye+4GB8/dYLMJxLc6Jk6wEw43rPiVxvK1zZRPVIb/qrAsrOKzn3iRz330Nj96/Jg/Ig01juCvpHYTSo5UypiZLoKISInoEzEoXvvcr/Lg61/9jmyWZ57xipF6cyWckWlFPKGyZGs5Qc4XE0+9mkZFqw9kjyElXMhI5H7wdL1jcG4MFBkkhExoMljIGkVdF1hrqOuCsipG5GvM2tCFJcaEc54Us/6vGvV/Y0xkkFX6AO0pf5gnITWlnKmP5eac8Wa+bFUoFrWh95FueFrO/vBHFtDw3qNM1mVWSlBUhmpiqWpLXZfIfiA2A6Fradcbhq7BdR1pBDCl8TooldWrgk7EsSrZdj1N29F0HU3bZ8ET7/PNnkZdVZF9NbUuKEtLkANyACV7imrN0EMxRMohUhaKyWRCZRWzWQ68i6Zjb9dibMuuSfTdQNPscl9eaYTRSCkI0eN8z263ZrVKdO3uGc71dx4pJbq+ww3ZbCKmgJSSoqhQSuV+tpDfFcgp68hmDfHZbMbR8REHBwcs5nNCCGy2a6IPmWqUIp1zbLuMfO4Hh1OGvuvoNjvazZbd5QqUotBLtPn2W0NKaVSIiiMgMT4B7qWUcMF93zSyRVWhrh1nqdD7DXgHbQvOMxQFqxqkHDAYlCzZ8wbtFTJkm8UQEy40+NDjOoEfQBGze5mSGFNijEJKRYoBR2SnEr1JzGtFXWpsYZGiyF/SIoSiqBS2UghnwFsiGlEHRAjoMkuTQ4frWoKUiC1IL7BThRKKViW65NilBFKipMh20yLrp3vXE6JFVgFVJnRUaAxlWTCpK2L4cPccoQRqatGLknJWU08nlIsZ6mCJToFqXzMpJfPZhOWkIgyJQURKJVkuapaloapqtDIYaTDSYhAgYq4uukTSEWUNdVFQKIkyChkDxlQQSpIsiHVOippKsq4EupDYQtINoCqNVgKJB9fRNS1h1XDuCtZFiTCKS6W5MIoLJlw0S4ZYcXStxjnP+cUpu2b7JNtNo3lA1jRIQGToW9YXw3e8f59xxgtK5KItCbSUVIWFFOn77IWb5Qo1Ygj0Q358bQ0iws6RxSoQBEApQVlqKivZdo53T9Y4P7oGCUiiJ0qPkppSKBaLgjsvHGGtwdgsk4gaSNJja83B9QP6zmHO1nRtz3wxY76cEbynaTpCiLgUCc6RZO7tZmh0ltiIyZP8KEI/8o5dyAvnU3dmvHC95M37W157+/KZOBdJkYUxvA9sgkfPa6azKbP5lNuvHDOfzzm+MWc+12zvbXj0ja/Rrlfc/eabDF3P0SvnXO96rPXEJFDKcDDfZ155VtsNm11DAO6enHF5cc7p5Zpds0Wn3EWOMZJcIA2eoclc31oXhFKTAkhRoaVCqwVGayZ1jZL7zEvL4dGcutQcHBbUtWJ6+Dx3Ptay3bbcffeEZtdy9+49Li8u8UbijUJqia0MQsI777zJgzdb7t97mIF0v0tGCIE3vvkG7773LoMbaNuGuq75+Cc+zmKxZD6ZM6kmv63nFEJw6/lb7B3sUdYF77z9Fo8ePuLXf+PzPHp8wpAifYq4dkd4mJgUBVMMaRqhT8gmkJB88Rd+kenBPh/5gz/O4Z3bv+Vr9qlhl1Y472iahhA8g3MEHzjZ3OfZY/jzsJ/8GIv/5D8gnJ+T/vr/B//++5jCoGyR1fJCD3FAKIkKmsWbC66pYxZ7BeJ5iY8D69NzunbL+dawagzzWrF3OKWqJ1x79VNMF/vEqOi2O3bTxPlPCoKf8Co/yrHoMKXGDRrvEy7tI6WirPaop0vCAL4XJKnQUUCMaBsRKrJ99z0evPUlvDigf2sOWlEcHqMmr7C62XN+5xcx7ZRldwc7WMy0oCgOCG6gaRqMUnymdyhlmWqDnGpu1be58/wRgsRf/r98ePPeTDRf+cFDdi/c4ODFl7h58yWmh7c5evFldAok5TkTidsapirbtAYf0FKxN9mjMAVFWWGkQpgJYnoTFwOHccClgNcNoWypiwn79XFOiFIgpci7dxJfNoFoLWFZ0fpA55YcXZfUusTgSZVkcnuC6RPD+yvCaiCGDd3jC95Yzvm/v/oC5bQiXSthZvj8w5c5fftPsZCWv/C/u4bfDfy1/+Gv88uf/dx40PFPkioSmNFbwIocVL/Tsfn7kPGOEpHk7FYr+QQdnMg9XqUl0qdR/UOgpUQriY4JlRiNFDL4RiuF0RIXEpvOkeJI/RGCJMKoNqXQQlAWOYMy1lzpduT+LwmlBdWkRClDuxtIQVCWBVVV4pwbDwaAv6LkyNEv9oqqw5jBRLLidM4KQvIgEvtzwz6G8/XwzFil2chBMQSHDx4EFKWhqiyz5YT5cko1sRgjITjay3N2lyt25yuG3jE0faYZxSvJTElpS6zKaOl2yF67u65j03Z0g2NwHqRCSTUeAmMmzbtEEHGUrZSkpBBohEhIUSCkx2gJhaUoLWW5R1lqitJiS0VlE7KI7DYtwRXsti27nSMEhdOCwWSOl1QBkmdzsWK1O2ezWX9fHYo++NopZcGRy8tL7t2/R9d3bDYb5vMZN567gSkMdVn9tl9DCMF0NmU6m3L6+BrPPXcrH7q+XiJFRjF7Iil41k1D8IFd29OZgU4YBnqa9YbT+/dp+47bzS5nSB+gFz0JpOM3lwa6uGMIA5thjfc+ewJ7Rztsv29zrveWlJ/4KOHRI9ppTdIKYzTaXrnEjp7TIit8FStDfb/CouG57FY2NAPdrqXfRvom4VU50tNq6sUtpoc3aC7OaNvLvPZuSIgaOxwz8YkYO0JoCUETUgEopLIYYyFGgo8ILaE0WS/YeIQMeD9kxbdo2F70JGUpfIWe7/F4GrjvHjHxHVU4RiWBNBpV6SyI07aYGDgKCRsTQUiiSkzqmoN6+qHjC72WnB+WTK7X7B3Mmc73MNWEapH916PvGVJgFh3TGEgpjNLBmqo6QOniKRRDGSZmQkyROvTEFAgpexHooqIyFULqXHNJiQezmnePaoLWhMoQgiTuVUy0p3AG7RJRQTlRSB3RaUANO/wlhC6ybhNfPuzQXnJtzzJL8LiZE87u8NLBlI9+8mWK0PMP/uE/RiiNSAkZ3Li/5TctSWjJqPUdnyj7fbvxjLXiEjIFjBj5tykSfO7tKjU63ChNEoreey42Lf0Q2LWewSWkhNpkMX6hFJVR7C9KFoVGa8GgZfa/JJffdKVJOpfxrC2ZTAyklhQHhMmBO8aMRNbWMtuf4H1EqIKhdewdH7J/7YhmsyG89x591+F3A955crj2o2hG1p3WlczYqwgpBGRI+DbrSiuZrQ215ZkpKeVyiGBWVxgref72TT7x6Y8xnU147s5N6klNqQoEHVIMGBkptWBvNsVXkWk9wZoSLbMu85U6zZUfcQaNKYQsEaoiqZKoAkpnYwOti9F2MRK8I4hcgo4htwMCnig8qIjU2bNT+kD0nu26xTUS1+2wFqqlYbJv8UEwXe5jK8/NXjCZXacX0EqIyRPjjuAaHp/cZbfa5V7z9zPwksvJbddx9/49NpsNX3njq7z+1ht0bcd2vWYymVBUBYcHh/zAJ3+AxXT5v/j1jq4d81N/4o/y8MEDHpzcQ8jI6eUFZ5cXhJToQiANAyeXK0LnCbOAnUu6lOjeeY/y/Jzy1w94dP6I+dEBy2tHWdBm6LPAjOsJ0dOlLW1a4b2j6RpCCHiX2wvbdsv3CU7I7fsP+aM/+8sMXceX54dsXjbMj64xWewhlUGZgqo54M4395jsBJ3b0u8ekdIR1eJ5UuiI8R6+7ZBxRqGmGGmQlIhUEl0kdD27e+9w8fbXMYt9ft+dV8Bali5vp83mku36MT7s6N0lxmoWexuUrWnWW3arLUZaJnaJ0pIkBsDh5CFe/QhJOIz6GtJGDq9XTI4+xnyq2N8qbKg4vnGNIlqW+4aqVhlY5TzaWOq9JQjBO2++wenJQ27fCCxfdFw5Zn5YQ4nIVHUsdMvRVHNzbz5WQeoceMOQAaejnWuMGVyFkKAsSUh8zPzzEC3WmSymNFYQg58T/YDUFmsngMjVxxQ5nhyhRbYPDEoSY6S9WeP7ntR0pF2HueiZvHOfoY38hlXcv16xbTyb3TngkN+comYTmN0iVAprCg6OjzCV4d1Nj4kDNz/5Kf6wlOxN73Ft8RYnjxz/7J+2nJ9FSpUwYmxLfBeYkmfM4wWRAhpQQiCJhBCfcmEFJKlJQtL7xGrb0Q+Bpgv4AFUhsUagtMIWlsoo9mY18zJ/jAGQQmZBcClQViKUYDatmE0mmEJB7IhBoKzO9lV4CKCtQU8XpCQxusYPgYObNzm89Rzrs1NWlxcgBU0XwPkR5ewzp1cYpBLoiUCXMmd4HoRP6OSJIaK1REpy4H2GIyaYVCXzWcFzz93ko5/6BPWkZu9ggTUG13T4vkeKAS0jhREsZjUpSiZVjdVFRlCmbz0tjDEYUAhVIFRBUiVJBaSxGGOyU9AYeKP3BLJbUIw+W0HiiXiEiggdkT5m/90Au01Hh2C3GpAisHQTRDFDas10vswWh3HCfO5oEuwihNAz9KcM3ZrHXtJu2txLfbZT/i0jpoiPjl275Ztvv8Hjs1O+/ubrvPXu23RNy+ZyQ11VaKM43D/g5rWbfOSlV/8Xv97h0SGHf+QnOXn0iM//2q+w3VwyRM/pGHhDyD3fk9Wafttjo2KhS8QwcNlt0bUlLgoWj+9z82Mv8/wE3Mjf9sHRtJvs6kWLoyFGT++G0WYyA/Ca9vuX8d56eMIf/6VfpTUWPz3gdO86Ry+8xPL6TbQpsdWcxVnJjz7aY7kT/PKw5bX2MSktqOYHI62txvUbJDMKucTKjDQmFSQXiV3P7v57nH/18xTPv8gPvXgHYQvaVuMRNNuOs0enhNDg/ApTGg5ubTCTmvX6MZenj6nKOcXhBKEsMJAYCGKfIK+R5Pto808wZcf+0R9k//aL7PuSa02FUpLqmkFryWK/pqotUma++1VA6/uOxw/v8vpXX8OGjldv7ZDmw70eksRE9cxNz1GtubGYcWU9moiE4MfAeyWIQw6yKdEGh4+RLkb6GPLnCGVWyXOjzG1IT2PFqAzYxfx7h5MDFsU8B+orfEF1CES61Qnt5UMWu5YX7z3CNZ729g3EfMbpgzXh9II09NBXyMkEXljgjybYScHedI6VkbubHpU8Nz7+Mfbv3OTlW6/xgy81fP0rLV/+8ppm56noMfi84X4Xra1nW2oWAqNzySeR6RAhZsUpNapZxXFSU8qKVTEGhIwoQCsopELJhJFgpMjuM0qN5goJpUbBaqWoZhXaaiZlTV2WaCuxdTa3l1YipCBKQZQSZUuK6QxQGF3hXGS2f8xkeQ0hC67f2dBsNwT5HmK1ytlbVuUfv0BIDzEgiEgV0ClibTZwzoCMrHn8rIbWmmvHx9y4sWRvb8rR0SGFNWiZiMMWFyC4ODoVBbSRRKuQRpGCGA0hRiHMFMdMPZCFSHIfHSUR2iJ0gVQWqRzaVJiiRJkKZKaG8aQzH58Ac3wMhBhAjGhBJZBGYVAYNWouS40U+Rr7kMXXUSNanFxCdd7TO0+KLpeBkkBKjVQm63g/w+G94+TxQ/q+Z3DZxLx1HZvdhnsnD7hYrWhdj9AKqbOXsZCCru/ZNg2rzYrz1RnGGKqqymBDJAJB7wac96PmdkQISV1mUFYiPEHXSwFKR+b7Ew6v7/Hg4gQhx4PSiLRvg0MmWA0tF22DNopSaqKMbM7OiUSqxYTZ0ZIoBF3y+BSzWL0biHgCMVc9giAmQQijvvqzxA7+ptFryVlpiGXF3mIfW5Ts7e8xm02xZU01XVJpzfDSwGo+0B5OcXu3Ob8h+IZ5DT106NX77J2uOLkpWB8njJ2giiOkMHTnJ6TdGecP7/Hw0WOWUbNffRFRzunsLVo5Ydg5CIoUFDGAd4Fmc4K2PZvViu1qhW97TCqyiFDt0DpgqoL956p8n+ljlHXY0pKCI0UJURITdF2TAZKtR4gSrkBuwdN3LX3Xsd1d4HzP6VrzjXsHaPXhltlSiAwXHf3jHd3tht414/0rnzA+SIDMNFGV8r0cUwKvCCmigsPEEU2fsuNbMFeBNx/sPqjbHyPIFFFRoYMeM2P9BPREilDOYNIjF4nm+gLfBfbuXOfO3h7LvR0HxwckCoLeh6LELguUFuxL0CmNyo8ShMYUc4Q0BPUi533LYHvufHJLsXSozSWia6hmKyb7jxEi8sbf2n7b+XqmgVdKSV3XT07DLiQ675Aj2lUqddWrJsTI4HtcjCidvXYrbZjo3AM2CgpNDhLaoHXA6ogyGlNOKIqCGzevMZnUGcAjJUInlA0kkYjki5436kRVzzm8dh2hLG1QuCRZ7j/Pcv8FvG85fO4W7W6F+NWf5eHdt3CxZ4hiBHFJIBKHDnw2gpZagsr0KGA0l7eUlX5meg5VVfGpT32Cl155jus3DnJvtyoQOIbNfVIacglNFEgxUEwtQkS6nSF4kFohUciUFzgxknAkEUgighKgDbKcoqqAKmboqCgmM+p6iq33SMqSpEaKiBKehCPg8dHTu57gPVJGrAFBrlQYpaisHWVFTTa5MJq+B6khqrzhd9HRpYFd37Be71AyMbEJjcDoEmOmDD4BLc+q9Nl1LV/7+ms8enzC5WrFrmu52G3p3cDZ+oLeOfphQNYWTaRwHiUV6+0W5wPvPbjL3nt7LJcLbt64jtEGg0UiWO/WXG7WhBDxzmGM4blrt5hUNSEO+NghRT6gKjPw3MtH9OIFHlyewBsCQoKQmyQXrmUtOuRWQRLUheY4VdhB8eCb30Tdtbi+IUWPnJTIwzlJS7auZQgOQRgV2hIpyVHKMOJDwrvvH5jtojB8fX/KdLHkhRdepZjOqWZ7FPWUejpjeXBE6Byn5j7tpuX84JhueYO3qy/yevU3WGx2/Il3A9e+CY9eveS9Hz9GtLcx5x9Fu8Tlm18idee88doX+fpXvsFL4h1e/cWvouollz/405wd3iGKFihIweF7QfA9pw++wmYd2FxEtucRLSwX5gHWaI6eq5gsNPXhjFd+fJF14PUPZOnPAGHYEeNA9AMhOvrdFiESkT3qrsa5lr7fMvQtl2cP6duWx48f0g5b3rh/gze3L2VDjQ9xxM6zffuStbOsbp1yefOU0lrmZZ0TDjUFoUdd+xE3kM/jeB8yDSx0hNAjpcHoChC48NQwJ8VAHzzboceniIoDPoZx/5XZlnDwuZwdAikmSm1p6xlBLXj4gz0pJF741A/w8vF1XAjZ8cgn2ibvKaGqSCYnZSIkTAKhs8hTNa8BSaee542LH8eVgT/877e41tF+4y36hyfc/MQbvPQTv4Yynr/5tx5/2/l65qhmxgwqkT7ALwK48gVlBApnaXtJ1lIWQlBoKDUoI7GFwVqN0dmhxliBNaCMxVYWW1jKOn9JBEpIUBGh07caGmRC8ZglG6Q2aG0RaLStUKZAyESMFSH2mEKjrSR6kD5nfVGE/ETRk9x4Ok0ZNHbVfI8hAwq0FiznlrYPNO1T4NKHMZRSLJcLZrMpk0mN0bnUT/L5FJ0cSRoEYSydjwxxQV54V+AakYFBYsxc05jxIgRCCqTK9IrsBDS65kiVe/EyL2KlBKiMREeKUbVqDIeC8ffG1xAiH3ilII3mDFFIfBTIjF0jkS0DXQxZYjTzPgCFQKCURhuLHPTIE//QpvlbRozZj3i9WXOxuqQderZtgwtZCjWkgDKa0hhkEsQuIBI45xF0rDdrzi/OiSlQ1RajDFZYBJLL1SUXmzXBB5xzWG2oTEFX1/jQElKPkoLCSnbtFmUE1aTI90FV4AeP6zKmIZBdvrrg2boeISLOZVWz1ApU8LTrDduLS+RQIi0ko9j6jj56ZIpI4rjG46i5PYwgK/d9a6sLa5HzOXo+p57NqCZTykmFrUrqqqAuLU6AWkqElZTLktm8ZB0kF6fnhMdbLjeKqlWknaDeVWhf0RuLSB7lOmg3pEFCXIAPyGGN7CLq4hylp0QbCTZnXUoppNJIGUbzd5PtLIXBKosxGmMz8EoXBVZmEEgiO4F1XWZKyCSQIuMigmtJRNxQ4DQ41+Jcl3XQuy1935LI6yxg6V39oQdeAJHrx3l9BocJMlcsEQiZaZgib+ZZE4EcfJXMB3uRFDIppMoVLxAIMeJJYsygWSlwKSBjxAU/7hkKKfQoYCRzhjz2hgUBIRJuMuD2F6QI9WKKmdeEmAgp4l2ktNnz3UlNFDI7quVGIoTM2Q1JgZT0viCEXGlQswpVOsTBCuMSk/1LJnvXUNoDX/u2c/XMJSOHDxC5o5AoqxFSkqTOdmaj1ZmWLbVSRAGllWgpOJgqFpWirGdM92+gtaIyWcy/nk8oJxXGasqqzICG/ZKiUE+QmTEKfACEpKjmSGWfWA+iK9pBI6JGlwu0qdA6N/FD9PT9lr7fkqRHWZDeIYZmVKgae5YXG2Lb4cd6XxoVuSKCxncMUbE/Nfy5P/kCJ2cdP/e5B1yshw9tvuu64pOf/iRVGbAygGvp2i1SRLR1yCuHnxQJg6PZNvhuIJJISoG6skUUo7Z1lnhkFDsRArTKlLDWWhQSoiD6iHO5V2MLha005dySrKWclxTTCjpNaD0hSqKyRCWI3hGSQ0UIo2i9MRapFU5I2iCRSLRThBg53/Zs1g1tt2U3rCmNBjFDac10vk84ukkUcLk651llvCFGLjcrHp095uHjx6jCYOsSKQyLaUESMJlOKaua3cWWy/untE3Dg7v36PsO/cUvcPL4AXVdsb83RymZe+wImm6g691oIJLR8YVRmdM5ZqBVXXJwtAABO39JvW+59cIRnzp/icvzDe988wFu8ATyeed8aOjCwMIYbKyptGbqJhSF5fSd92mbhlgZ+sMJwUjWydOliIoSFXJwCD4QY6TrMj/53Tfuft8oXIfXbvDDP/FTTCZTbFkilWY6W1LWE7Q2WKOIyiJuXsN5z5HRtEbxhS9avvL/2/DgYs3Ze4pJY/nRn3uFf+cb/yvWrxZ846enaLHh5cc989BwPPsx5M0/z/H2DfTDn0G3a26//sscvv8a7x0e8t7BEcXMcHxrhq0NB88fUM1rFDMUc5RSlMbmnm2l0Sbz3EFlLujZ42xhajQRie56RNMiWsd6e0YIA43cEYYyH36lQNIh6dDSsdxfUM0O8Po6gzkYs8IPb0glmS5r5gcTooJ10xK8x6aIVhpTZsMSqUqkKhDjQT6XoTMTRYhM8ZRCjnuTeCJEkbkwChs1pTX4GFBNoPeJypSUpspc6xGLEkJWnAqux7uOYb5kvb+fcQ71lGiLXLqOEEPC2VzK7kaAl/MR57ONYWgGXEycBsE2Qa8VjdUoKZlZjRGG+e3bVNeu003u8N6DzyBkBH72287Xs3cn+uD+N2ZLQmareiEkpLz4pMgUIKSg1DK7DhWSaSmop4blcoJSCpU8UiTquqKazzFGUdUWrSVFZTD26YLzQUAvQEiUKdC2yiWJEPHCknUtBFpYlMxaxoz9NB9ymUeIlLWZRYLgc/aYXC5z9D2xzWCeMGp+RCkJSbAbAq0X7B0Ynr85oywU1n64p1ClNfuH++DXEBpidIRhR5Kgn3gFpycgCO8y1zaN1+Yq+/xgxpie5KlppAJlSpdWo+hDugJe5cxZaYk2ElUoRKEwhc5CHj6CVNmSUSiQkSgCYZQnETFmOW8ESihCEqSYxSV8tk+lc4G2d/TO4eOAj1fYAYEtssepMcUzzXhTytaLTdex2W2p5ZRST5AqYwuQgvlywXQ2xwiD3/SkBL0b2O62PH78mBAGysJwflZlNyatEFIwuCwWE0dDj1xS63NmJXPlfzqv2XZH2MJQTg22UswWFYdHe8SQUErirt4r0EWPix6ZPK0SCK0prUYD7XqDSxFfKnauwmvBmkgrEipotDekUXAmxkjbNDjnWF1svm8ZbzmZcnTzOeq6yp7TQD2pqScTrrrlSSgm05oYE7Nxjb+7krRfcKxXPWdJYYDP3F3w0slLvFMHvmR7pJbcUTnTnxQ3SPPPMIsxBxS/Y7Y5oWrOeGgTflJRTiZM6j3KacVifky9XFAW+1TFAUoqikKPfdAcYGLKlYNut6FZrzLNUpZEofEholWLkpHke6Lv8IMYMRWZpyxEyC0dGSnLEl1WOFWjdUlu1n14Q0qJLQ22MiQBvfdYKfAuG8/IMJBLmRpE/twZmJp/P1fXZNaSHwPyFeU0P0CSyNmxQeKDpJDZra5UKvOWhUTL/Dn9qMUfB0nSkr4oUZMJnkQbI27EQI2SC2iRgWnCe1wIyJQQIWWP9sERfaTxiVWAxio2UWO0JkpFobILm5hJhrRkvbvJdzroP2M6kbhK3p/8HRhpNiVCaoY+4V1+1LzKC3JvmgNvYfLjpRgo1GUuK5cLtCnZu3mL5fH1nIEGh9KScn+BLSwpDsTkMDFR+Axhl2aBUEU2bfCevu259+ghSUiOr0Umkzn9sGa3fUwGFCW0Kjg6vk1dTbh48B6nbYcbOvx2S/QOXMg9iwi9z3xjJ/L3KA1aa6TUGRiQa9wf7nQnR+wfIXwLcUASKMsaqcAWGfHtXVbsEWT9ZREzwlyoAl0YxGhmccXjzEFVPEFQphgg9IjoKY0llBP25zOOljMOD+Ys9yrKukIvC2RZZLPwiaaXEdEZ/CDovCVoQZMczdijGdqWhKDrGoSUqCKX4qSS6JCpBt73pDQQXM/Qtsix98loHZmEGoFdz24URclLL73CvUePce4+QmjqaopQEocHKVhMZ+zt7aGdoKt3uC6bfO/alsdnpzTNmtJq5nXOiIrSorRC6gKpC4SIaB0QEgqT1asKayispa4r9g4mGGsop9nn+MUXb1OZmrvvPuDi8SWb9Y71qqHv/RMuex8jZ31PEzyqsyTy5qB9wFnBttvgtaAzEqckSZYIWTEMntXlOh82dg1DP9Csvo/c6fGgTAKjdd5blMre2SHbjaYEUkRk7giNCYAYTVDGVgm5bBmAKO6B/Cyq8sxfvs7hzRdZdC/j+zlm9yrq5D9GxI7SapJSvDhfslgssZVlfjhBW4WdFiijqYuaup6ilMSMamtXHbcMTIukMjKZ7+GGnrYLDC4iRUKbSD0tufnSy6QU0VajtBwrt7kPOp0cEHzgcr2jaTuCMgSt8uH2QxxCS8z+jOJ4iSoNIkYG57lMAiEdqe2BhNtd4JoNqpygp5m/W072ULqgloJK5qQmhtH3/EkLckzO8qshUmJiNFZBoc2Tg7+8CtoiWygma0lKolJCxkhIiUnMjbWrwDt4zyq0uBQReETyFEpQFppgBL3MOtGh98y9Z73Zcv5oS+ckd5sCFxVvVzXGFtiJpFoIvlOB4RkHXsaT15Xs4tPAq0yJVJa+zyR8KWBegVGCo7mktALnc6lYiQGrLjGmopwfY8o5+zducXz7JYa+Y7u+zKjmvWNsVRF8QwhNDvspAZKQamIyBO8QztFvT7n78CEhRkyhgR5GtUFbFMznS4wuODy+zd7eMWpwNI8e0A2B3XYguty4B/Ae2g5CEvQkohCY2qB1kR05xpPth741RUfsTxDBI2JA24KyrBFKoou8OKLv8DHrkGotiQikqpGmxFj7tMf79AKOYiGjEUT04AdkdBS6IJWK/cWcG4dz9g9m7C0rbF1R7JWowiJLiywVvdTI3uB7SesdXguEa/A7zxA8fdsRYsSPfOFiUlNMc5XDBAspEXxHio7ge/quQ8WEdx4hZT7JjjfrswwBRVHy4guv8OuvfRXv+UDgFfShBwnz6YyD5R6iS2zrNe22YfCeXdvRdTsuCFRGsikURivqWXZZqmdLqukcZcgmF1owm2sKq5hMKyZ1TVFUzOa5rFrWFdpoSl1y/eiY2aTi7Tfe4bzQdJ17Enhz5hs5G3oqL5nYAo1A+YDuewaZ2K4DXoGrSoLVYAOiUPRtx9mjM/quZ7vZ0Xf9WAn6/gVeYtbPNSO7Qanc908pEIbc2lFXJQKV8QRSCiIy89LJWVkae32R+yB/BlkWLF7+33KgPom2M7SdMAyvst2+AAmszSXSF43iZa2uWpmkFGjbS0LoqcuaaT1BKoU26gnQMqOSI/EqS58tca5n8GsYutweMgFbFezPbyCVHgFHiegHou+RUmPLOYmEef8NLs8fkrQmmWxp+mEOoRVmf4Y9WqLKrEY4+MTgc5LRux0+9GwffIXm8Vuo2RHF8Svocsbe8csU1YyjwuY9J4UcAAFE1j8Alf+MQJL7xLXJByQhde4Zj31jyLTSlNWTYGTSFONiDymMyY8gRtgNPU3bEkREJo/EY5SmGMv8Q5HZN0Xb4lzg/GyHeu8epxu4e1dzOSja+QGumlJfMyxeKBDfAUX+zE0SlB5RquPfr+wBu74H4QgpI1cZb4SYroQgYDIxWGsp67FcqTVidFKJiWzzJTS2mo1G4fmCKFsisbk36X3WHY4lRDVyLyTKVkwmM7zPQJ3dbs0VDgAxBfYRQqOlQaqIIJu0h5BBPj4krpLYKDXSZuEwnSAJgSkVqhDjGkhMKs0nPnLAjeOB9+9vOL/sPoT5Bilj1v9NCqUViDhO+wheGKOqEGPZn/QtNJcnuhnj8zF6ESupMEoymdRcu3FMPZ1RVC1973nu2oKbR3OW+0uOj/YoyhK7mKAKizAlwpT0SqJ7h9MO3cEQI7EUiKnGB8m0VBmXJhVJCkxRYKpyFE/JN0LsJMpD7KGTYGQWaBEp5mt9RSl4hiPGSNN0bDdb1qsNVV3RbHYIJehCByKxubigkIrt5Zqh7fCDe1Kar8qSqlBMCs3BxGYpzXmdy4mmzBxpK6grgTGCvXlJUaqMflYZC6HGPOGJAaSxqNqwt1xy+/Yt6nrC5WVD1/WZAhQyyr8HSInGOQzZsjNXDxKEnG8USiNsgakqinryBACZYsjVjxie+Zx/cDRrePCWYDbzzBYrjI342R5FNUVcofPzhRppO5qkNCkFlDLZfzvlLLKt1lxOH+CkZ3H/Zaquori1RJcVShukkigNtjC5QjBaWUolc2B/EnjFCNhM+b5LH9jUyK0cKUXmykeZlfa0gQRlWQMK7UG6hNSaoqjz3jZujtFbgrfZo7qoSClRlxNcNQMzgaLiww68oTfs3j1mzQ3szR364A0wFlFUhMGzu3+Kb1r0o/dZnp8QjmEolyTXspMKV0yo5ofYyR6COJbNQUuNEnIEUCkiOaEBgVUyg2YjGQDL2GpKH6wl5t0rpVFhP2VCYwIGH+l85PL8nPe+/FX6Zsf+MDD3Hjef0y4XUBSoxR7CGIoYUFJQT0vmBzWxghfQrJ3ksVZsVKKoEkYlhPxdVGqWQlJU9VjPH0uwJJwPXFyc40NkOp1R1hV+UAzJkqJn8BGj4Ma1Kc/dXNBFwdZDUgUYQzIKFxJNEzBFwfTgYLwBcsdQ1/uYekpwDtdsM/Bj0JnMLTxIRzURXL/eMwwtTXvCZnuC1qA1hHjM4eELaFmhVIAkEVgGJ+gdNEPEDTFvd0mSTIWuZiTAAEiwC1B1VjeRKXD9sOQ//fc+weDhb/ydr/LLv3bvd3y+xViesqbC6GLcHB0gkWOfRZIXqpISay0pJqQtUaZAGj3SpfKXECJ7FCdBYQzTELl57Ygf+bEfpusdu8sBP0SOj2YcHkyZzyqeu36AsRY7maCMyQEmQtt3nNlI3w08Cit2yrPUkr4uEEqjTd5cirpEm9w3AjLR3mdpyvvRsFICE8DvwOqESQMygogDhGEMwL/jU/tth3OeBw9OuPf+A+6+e5fgHHVZIiX0PqNR46ZhMzuhaxy7iyzXGEJEKMXB0T7XDvfYm5Q8dzCnsIbpfIY2mvWmYbvtKEvJfKEpSsW1m1Oq2tB1PV03ZGnVBDoKbBKYJKnrEjUrMdISveT09JzL1Zq239I1gbbxOGAFmJSompZBdMzqmiUyn08BaSSzYkI5m1EullR7h5ydXfDuW2+DH55+fR/HyXuCX/0ZwWLR8txLr1HP1syP7zDZu0ZVFMwmE6RIeFoQgWQqkrLEMGDLGtt7gvOIBOcH7/Pmc7+KkRNe/JU/y2R/wuJ//RL2aJ5R/EKNCni5jPsEEzGCnSDjDVKKECdEWWCUIaUwngkzSlfL3LsMY1k1YajKCcEElKnwITC4jr5f5p6/LXKVUEgkIouiBD8ennNFLSyPKdHIaoGaHPAda5/f43CrCQ/+6Q+ilq/if+Af4F/+LHI5R904xl30nP3Pr+Pvr3l1vePOruXsY9d5q4j42tDc+1puMz73GZrrn0BKUDKhpGBuDVYpjJAYkRXWLkO24tu3hlKqzPVXmb+bwtP7/YqCe9VHztUvkVsJQrBpex63HXdf/wb/8q/+NfyjE/6099yJkbuvvsD9T7+KPb7O4Y/9JKauKYyFELACqknkuodbg6TzgtcuEu81iVRGUhGevOa3G8884xUjWu0K8ZJGWoPzHufCE4P6TILO52k59mBsYaknJXGIBOdJUWBFBtykEAlDj5RytMAaEbkyo+WQRc7gpCHFjFjzPpGCy6pKIUP/r/pB+VhwRXUS+SYTaqQJCZQ02HLC4BzKlIQIwWcumBASnvReBIiENAmlx3UgE8ZIDvYqfMxGDx/elI9gMJWz3ZxZpafvLTdtR3bQCPO/ogGJpz5M6UlrID9Gydw7K0vLcjlnGDyF6PFDYD6rqeqCuiqpqhJrDUVZII3JN0aMiKjpCoMmMi0NwhucTBQKlMpzK5WmnFRoa0YXnCwzt+slg5NsaksYCvq2ZFeVGG2Y1AVSQFMWxL7A6mdPJ2rbLhsGhJArKMOAkOBdNgLptg27KOhbT7sb6Npm7LMnJvWEw8MDlnXB3t4UoxS6sHntJgjOZ1k8lws2yQWSFyQfM+0hReLg8RK8djkbMAYtElYblntLYoTlcslisYC4y/Z35BXvgT5FugRFCKPJB3jJ+B7SkwqJGjO7sWg18q7FU+eW78voSeKSECLN1hFjRFctQq+RccKsKkkikRgATxxBTVLAZLogeMF2vSIFTzAOX7VYO2HCgpoJWhRIqZ9Q7YTMLLYMhMrB7SkVj7znkUFDGUt0NYejn+uY9V7N1tXzSilJKaG0Hit6gRBy60fJDETVIovLXH1BRhfHKDC6oLC5QqKM/tADbxSS1tRsdU3bDXTnF6gYUKVh2PRs0iVObRhEToaUlPn9ac3gO0IYGJoV7focpQWqyPr8PYak5KhoqBhSovcBIQSDKFFKE6RBjZS23N9/CsoSORQQERmgKUT+oZC0uw2b8wvWj084f3yCO3nMRQhcpsjF3pTzx3tUtmTpPfoK6CUl2ihsaZExgRXYIJgPgXmKRANeJb6TWMMz12q+UhxJo3zd4D2D87Stw3mP2u6IweHbBhnBGM1sWjOfGmaLA+rlPuvThtOzc4SK3JoVWFkTNit2TQum5KKaY8qK49svUM9mxFAxtDqryKQJg+94dO8uu/U6B96YLdrCSMk4mM9Qqs5qK1JT1/sYXSOEYRgSrvfM927x0R+YcHl5RtQzdtsNJ48fsN1s0Ab0vEdIgVFjw1/noCUN2BLKiWQyq/BR/ZYWbN/TbKeUYfUygc3KN1kiSiK0HDP0RHI+84yJJJFtGaVWuf81UqLyYehqPUmsMePNY1lMpqNt1pq+Gwihww1rYpQUhaYsLGVhUdpADKQQsBIMU7wvmRvJMPS5x+UjUiqsqZBKY6sqZ8pjycg5x3a3ox8c+5Vmvdlyebnj9OZm9FneR0p49O6SzfkZSjrefufrz4zeMriBew/uEWNgsZhRGkPoegQQ+54UA6v1CVtgu2k4P13TDD3ddoUi8gOf+Bh/7I/+IXQKFCm7F339m9/g4vKSh++dcnr/nKpULOcWayWP3y0oSsV0MmNST0kMxNhmSl6hkUoymS6YTBbYoubl2y9x69ixXTk+8srH+bV/9UV+9XO//iRQRmBFogEG5wltS5IwqIQwkv5iRRE8Cymhruldh7aZQZBEiSk0Qz/QN9+fzPfwxW/ww3/mr4I74u1f/wO09z7C/updZssvcP3GcxwsZigNMW5JacC5NUOCxXzCj//Ev8vZ6Rm/9tl/xOXpQ7SFyUIxv6a4/glNNdfUByaXga/AtulpTLtKKJ4EZXiSYKAjYbR7jMGPvHhJkln8QYwKT5mJmBG+Qib0+PcgAZGNK9JViVqONpJSjG49+Z6XQmQAlxCgC5L9QNr3IY3+SPDOf6lZv2BIvzCQPr9C2QY9uWS3F/jSn9qyPQiINw9Jd5fMX/o0P/AH/x2ciLx57zfYbE9pHr1O9/XfwOxVlC/uoYxmp7OGv9EztJ5k3fAQEELhzB5aVSg5Q8spAoEWObmppUAz0qyEoE9wmRIeSCofdN79V7/Km//yF7h7csYbD++z27WsESyFYPP2e6xOz7j5kVP+1Kc+w6GUVKbAKJ2TMGVRIqFT1jR4fiZZFIKgJF7nzPpnfov5euZ0okjKcpYh92SdCzjncT7kLzfQywjO5RKokBS2oCgLTFmhi5okPU0LUkGKCoUm9T0uNgRV4LqArR3xpgBREpMhOUlMihg1IUh2my2r8zNSzPrBSktMmTVQy8JQFgVRWqIwFHaClAZQhADeRWw5Y7+cIXTF5OEjgijg4hJHQ1IgCo+SAkzupZIyyEeohLQJZcBYjQgapeSHlpXFUepRSPkU4Zvv7vF9XZVn4pOsVkjxATRzvnJpVPlKSYy9Y4kUgsJIZrXE+/wcbdex3QzstgMkn0ufOkPvlVL5JcUojFJaQlBYmQijNisp95+NLhBSYcv6WwLv4AYqLemHgRgGqtJS2YrC1NjScnCcA6/2LetC89bbM8SHvOl8cIQQWG9WxBSw1qKkIrp8yk+DJwVP1/VE51ivtxmYFDyeAVFojg4OefWVV0iuwzcrVqtLnBtYb9Zcnl9w9uiMqlCkJrtKDa3GFop4CGa/JEbP4PrcGjD5OsYB8BK1tCxmC6YTuPP8C5TllLffvJsrG+kpar0j657bGCmdz4E3RkSUiK7HaYXte5wfCNFnnXIjMTHjAmKI9Hx/Am+1OOPaq68xbO7wtc/9MS4vjlHqTaJ/yGI2zRkRIJLLuu0+4EOktCW3nvsI1i4xpsxStgp0qSgXiultSTWV6FqNdqB5XMlwwoiYuBKdgSffr6pIcNWWTaPoSIQIIWY+vRIfSJSuMt8R3Jiz5yfIRiCjf/PT5qCd4ijUI3JPmTIRpSLKDz7xhzNCDetPS8THBetf82wf9GjZY9SOzQvw6JXA5UfhZDrhbHrA5PlbHN/+BC447m3eZxdb/Lvv4N/7JmaYIY5uIgtDr0ZdB7tE6XnuaSeHQJN8RKrJWKpXKCEwMjtP6auKnciIaJcSuxRxjJUi4Pz+PR594Qs83jac77ZsfGQlFUpIhtWG/vySvp6x3W6Zdh1WaozM1YNcAc1ocyVhbkXex2Q+JH0n6Ox3DLxCiNvA/wBcI7/fv5JS+r8JIfaBvwW8ALwD/IcppYvf8skSI9k+A5Pa3rPZNjjvGYaBGALtNuBbAT7kTEzCZRMJMnKxNdS7mm3rcb5CeMnq5By3aZhOa+qqzDaBVYUpSwSjmENscaHF+4G23eH6FjeiG4duR79bZXWlERyxuwSlBZP5HpPFPik0EE8QKNrtCtdnaT4pEk3X0A8dg+8RhcTMDGrmEZOOJMGJhEAQgkAFgQuC3U7QRsf08hwlCj724hL5U6/w7v0VX33j8ZXY1SeFEF/5XuY7hciwHSirBNXVKXpckGM5P3pHcrncnmK+sWNeq0/ACjFlhRcREyKN3Lsr8/PsrkyMkUIJpNXIaY01MJnUufQZssAIQo4lY5GtA22JTAll7BPXkhz65RM3J6nMmFLkMqrUmqKu0UXBsdYshoF22XO836KNZrqYAQkzNKwqzWJvng8R33m8KoT46ve6xtu25Stf/hJnJ48IQ08bIxfDQHSe7fk5fhjIptGBbkQCBwlMFDJKHt2/z5df+xJh6HDNJZvNmjffepvHp2ecPVpxebml0YJ+p9BaMG8KbKFod5LL8wHvI02b13ZZW7RRGHuKsRVFNeX11++ShOLsYkvT9rRdTzWZ4p1j6Lt8fcZJaFLIpbqYPa5FlPi2pxACtdliVyv6vqesCqSW1DFTzC7limbTfjfl5u95jf/mcZJe5HPhj1NVlms/8iWuv/waw+NL/CoQ2h7frbI6khEIYxHJIaLPtnWXDf3lY4Zuh/MD68Mlpx99keJ4SbE4oCyzbSiIsbyedYevRBtgnLgP/FWMJ+qrwEsc9YnJhzQIRD+QSJkPrxUxhicCEH3n8D7gQkf03Qi8qpBSPS0vk4N+vjUlKQq00qTgkUJy7+ED/tL/6S99KPN9Nex6zfM/93McvfVNFg/fJ841Q8zA014u0OHHqfwhtuywewNqsp8Rx0lh7AGFddTn30C+vsE9uElz8odIZoIyIFRE1d9AVl8h61JnnfK+fA+pLaqeoeoZ0mr0JPsu76RGyyzaoaTBCcVW6KzvnjpiHHiwfpevb3YkKfjhl68hraW6fRuzXPLO6+/y+muvI3Y77v7iZ+mOjnjxMz9M8fzzQBhL+h7frui7nrffeMTDR2te0oYfLIrvyJr+bjJeD/wfUkq/LoSYAZ8XQvxT4L8A/nlK6S8LIf4i8BeB//a3eqIrlZuQBDEput5zucmWYjE6iIHYR/oPlAW9EFzsAi5FlltDta1Ztx7va1LwXJyc0mhQN2/kwKtVplGUFRJB9IGhz96xfdeyXl3ghwE39AAM7Y7t5cmTdDMJCCKSROL6rYQtZnjR0O1OSEnQ9x3ee7TM/ou7Zkc3dLgwIEqBUQYxc4hpRxKRIQWICTVk8r4L4LrELjhml2dUpubjr+zx6nOH/MKvvc833joj+gjwFeCPfi/zHUOi3wzEZeSKki7GTDeL6pN5zIMjulG+Uoy55ZjtxpSpXzHF0aEoa8g8QQxfeRCnRKnACo0x2TWltAVXjiExBJLU+TUBpEIbM3LuRvlOnnTWn/hZZq7d056YFIJyokjAZD6DBL4fGLrc3ze2IKaITj3TmWG5N/9uD/t3U0qf+F7XeNs0fPm1Lz7pm7dthxvRy4/fv8fQtqgIMmWVKxc9wmp0MUelyIN793nti6/h+4Z+d8mu2fHGN9/Kus+PHe25R4nEWmbHq11TUhSa1YWjLDf0vWOz3iEELBYVtjC5ShMBZRC2QihDNVuiTUHb9lTTKUPf4dyQBWW4CryR/kppLoAIAtd22JQQ5RZ9WQBQ1gVFKjL/W0r8EDgVZ99NBed7XuO/eTxKL/Er4c9zfXLGn/zR/weH7h3e+bmbPHpwSGg6fLtCYjOP3RhkSMiQSG5Ld3lKd3nK0O4YnGN9sOT04y+znNWUiz0qbfJnhCfqSrl2kwNgpthl3AojSv2q3CylGrPO8THpSkY2Zc3y6DMI0donbbg4Hs6G3hPpiHRIDFpOUcp8QGiCpwpPSpFExqvkvigURvMX/9v/I3/+3/8Pfsfn+2oU6zUv/Muf5dZyjo+XxIUhDBHfBnq1hwl/lsq9SlG8htl7AzU9GDE/GmMPKQrB5NxQfWPDpZpz+tU/QlBLdOmR2iEXbyPnr/HkZCNBThTCStR+hdqvEHWBOpqC1khpQSiUrpC6ztgeVZMA587xoeHe5Tt8bbPjxnLCT798jb2DBfMf/3GKO3f4pZ/5eR6+8R5iu+X9n/8l1vMFB9eO2H/uBomYW4cu4HaXdJstb33hq3zty+9zVBR8dFJRfq893pTSA+DB+OeNEOJrwC3gzwB/ZHzYXyPrY/2WF00IgTEa4QU+ZDehJ7JgYsyChESp7EiRYhzpFHNmswnVbElRLzB2QI6Y6EIrSiuewPeREmUsUhti8ATX0TU7tk3D0Hc02zXeDTTbNa7v8EOHJLvkZOcXiFqAFAz9QNd2KJUwpiImaDYr+nY3Iu9gu7tkcA0+dNnyUOSysoiaJHOZN6lIFIlARKAppaGSBYUqsMriI1lbNMSn5anfifkGVBDIIFBBEkcnpfEWHYFV8QkF5IlIhpBZYk5claPJ9n7i6a9dBZYn/34FXJCgkgSpn1yTpyIWT3WvxNVbEBlgkn/+b16sUuYSnYijitb4uKtNB6MgmqwCpHI1JWcPesw0vqvI2/xOzHkIgd1qlcuOKbuwmCTw/TDyS0We6/EwcbWJV2VJUdeURYnVBrzBodBCU9dTQoTYdIR2gBgIcQAhiMIQpaEPgjAEhiHQDlnwRW46TDtk8fckSFIRtUdIxaZ1CGVYrTd4HzIg8TcFyquD0JPrALn6IFXWNDcWIUBj8tWTWXVI6bG98l3Ayb/X+f7Noxoi19aOw6iw5jYyGWaza4RbS+aHh+hqiiw1qHHtSYlSGmscs4kkTiSfKAy3jOFOs2P+6AGTeIC6to8Y2ysjZioDoJ6uwgw6S6MC1VXb5qrsLAUyyRFrkAFrIWaru8uLhq7tqScVkxkEF+jWHcF5dptL+q6lnEvqfYtS5qlrWEqZOneVTecJzddqpAcmEtePj7l+fPyhzPcHXzfF7LUdyfz7K2tWFQbKN15H7DqUeUTSjhQcIThCiITY40PPPe/YDR6lTqnErxPkjFMinfYg75GC42pNCSnQXci6/UcCeyMhS4daBGQoMe/eRK33UPocrR+DUCRdEkzi4uaaZt5xWTREErKomNy8w/z4gP2jW9TLY27evMYLH71GbAY2lwO+3bJtz+m7R3jv8K7HbRuGkzOGVYvb7uP9lCRXyO4UJX7rtf/b6vEKIV4Afhj4HHBtDMoAD8ml6N9yKCmZ1jV92zMkhxERLQUxk34AKI3CakVwDt/2TMqaW3de4OBon+ObL7B3dJOuTZj0NaTw7M3mTOsCW+Y+oNQaM5mhjMW5Budazk9OODk9xbkMVgluYHt5ztB3FKLDigEfBjbrDSElTFEhjWa33nBhzyjKGYv9OTEETu+9ycXj+1lSUQqG0LFtT/DRIYkUUkAyCFeTdMCXA5HITnQMeBay5lDuM7VT9qt9jLbshhW7VUM/qrv8Ts23iALbS2ynML0iykjQWcBEyhFdHBwElw0e8osSpc5CHx90MhjFfeMVyIoPhrNcCo4yGx1oobAStNJ5wSOI4qp0fZUR5AxBAGFEyl4hyJO4OsELpLx6rSt1IT6wueTf0dKQtB4fJwkhYI3FmhKtzHeapm+ds+9xzv3Q8+jdd5Ejm9ZIRaE1Iqbs+yz1ON951QsEVhuO94+Y7i043j9kb76klYrUtIhScfP683TDwCO1RsstXd+yWV/mfpKdEa1l5z1u53G9o2k8KQTWFxtkDKMphSQgGYQiJNgNDhcjMeWfpxByCyCbtcEHLvvVSAhQBmkLbDVhOl9kKcwrvvfYvijK8rvG8nyv8/2bx7Vtzx9474L5tGQy+ZMIHbh1e8Lzd0qKWlPtWbKzz0Aii+0bAou54M6tC5Ip+ZH9GbJtCA/uEj77cxx/7GPYV15A63yYVGPPVo24iTiqQqWxR5QCTw7RuR/8lC8fo3tiUDHESNs53nz9MY8frji8tuT4xj791nH+1oqh6WguvsnQnnHnh15m//onMUajVT5kphCzFSb66c0w3rBSqawhkLKrz5M+9O/wfF+NRMLhGfC4EPERjBaUlUYOl+z97b+OE5bip17Cf+Y5nNswdBt8DLj+gm445+faDZ/dtfyQ+gL/abhPkJp/NUjek4J+tWEQDZBXp0QwUVnRcP6ZnvkfkigpMCj02T7zv/sfUXz+hyn0/0hhfo4kIkkJuiV8+S8MPHwhcLbfEQG7POToD/40N27d4vat2ywXC9Qf3FBPH3H3ncf8k7/7RfrVlpcvvs7RWSAOA6nr6c861p8/ZXcuaR/9NN5/mhh/Fun+FjKz4r/t+K4DrxBiCvwd4H+fUlr/pswsCfFvDvFCiP8a+K8BFpOSpnekKx1ekbjyAXwKSMhQ+VzKkRitqSY19WxGUdVoW2CKAlsWiCiz+8dIYxCjP69QGqQi+Nwn6dpd7s16R9e1BO8Y+gY/9BjlQAdS9ITgCTEhTYAgM9Cr7QDN0LVZI7drGNrdE+GNIXQMw0BI/ongxpW6SgJkymVR4th7kZpKlZS6xGqLURYx9r6VzPZsWkaa3svvdb6PlxPkCLwQcaQQ5RRzBErFJzdmGsvFOXMRWZjkCpmZxv99AEiSR96kr1CWSVzFz1xKCzF75YaY0IMb/TTHxwiR+7jwgV7gUwrGFajkKl9N4/MzvpWrEt6T73L8/ZR72WrcGD84b99p/E6scSkgOT++10SU+TPnbIgngI+rKoBEPKWEyOys4l3I7YY4Zs1Co2VCKo3QCpzKwRKBTwIXBUPIggA+JFxkNBGPiHAVeAMBSZ8CPiWarmcIIXsoa/sE2Pb0SPWvf9Sry//0cuV7NQuziGwUQnzCYf0uxve8xn/zMDExHSL1IFDlEqkktiwpKou2AmkEuWn9rfxupTRVVSAnNYvlHrb17IqCNgSKESl8NTNXBbo8W0/bJFfuXU/rMeMjxgMlH/i9GCNtN9C0A+t1y+VFQ1FWTGeOfu3YXPQMTUe/6vBdS+hD7utK9YE1PQb6fHfk/+JVuXt8o+NNmT6k+X7y+4ALgT4EXChxoSQKgYoC7xPx1EPocect/aahLzoGP/avmx1uu2XV9zwKkY1osekhKUpMyNiHPgZWMY52rnlJD1JhlSC2kdhmT3cTQK870rmjPA1Y3VOaHUkEooTOJy4uPRebSNPlY6XUBjNbYBdLympCZSum0ynLwwXnFzuGFGhcT9ysURdnuGFg1/b0FwPr8xXNpcq9+JBxEWexp+S3FkT6rgKvEMKMF+xvpJT+7vjjR0KIGymlB0KIG8DJv/GCpPRXgL8CUBcm/e2f+yqfeemYl68vsibv0JFi5qtJIRmGiO8TRgqqwjKdTTi+/RzHz93EFjXSRubXD3jp9/04oWvwZ++x7Xbs7WmqyQRTTxBFTUBwdv6Irmm4/+7bPHz/bXxwuKEDEtYIlMzG5bsuG6n70WQ8DAPJBzbnp3SbDUoZHt99DxJ0m3Nk8hAEKQhcP3B61uKip1yAqQSlEtRV5vuGrUWESNFIVG/Z3z/g9tEtqrJib7pEoDD6EoDnDqf88R9+Ducjf++z77wM/J+/l/n+6PNHqaw1SkUSPeAReCCQ0kBKHu9ahr7Du54UHVJllS1TG7QRSGLWtR1tAOUVOnuUcosx4kM2Ye+8wyc/9nSzsLjrsvSb0blEprTO11pJjDEjcjOXJuUYcK/KeBKyEtNIr8jUCYm2+blytiXHQJzLelqaLFiiLbUZy7bf3RC/I2vcmHRzuhhl7BQhxCfm9YPKHqEYAyoLmFgygGy12tH6gJJvc3F2iRGCSgm891yu17R9z+pyx7pr6UfQFhHCeodWWV4z98IjUUiEMkidEFITYsgbXArsvCOkhB9xFCPAdtyZ5fiHq9Jl+sD3ACnR944oOra7js2mwRaWqZ7mHqbMZfTfhgXd97zGf3OwSFISrCaVBXYxpSg0tszuP9pKlJWk6PG+JfiA7x2+cyhVsbzxKmLfU8w/gmod870J83nN5PgQOcqnfhC1nGcmF+S/5SB4xdcVuUoDfEtwFsB60/KFr91ltWp48+snnJ/s2K3AXxj6neP07go/DFSiwIp9iFOENOPciievnUZufgyeGANu6LMdnjCAJIZA8B7n3Icy31ej94H3Lrase0UffhoXfx9GZs/0FBN9m0gp4D//OS7u/jrrT0Zms08T+477v/gFzu6+y+bt+4TxIDqViomQ/KkkWSfBPxgC/2wYsoxsyHvKhVKZ0vj3wX4pz6tKIIae8pv/PXrz99HyLkatSUSiAL9OPP5/OnZ/O6DuG3Q0WTe71OhSk0ReG0JV2OltVOHovcdt1lz/pa/zidcf8svO848GR9N5Lh82DD08Hs7Z+L/Hv5QnvKceo76lSfOvj+8G1SyA/xfwtZTS//UD//T3gP8c+Mvj9//pOz1XO3i+8s5jXjyaIW4sICXCuCkppTKwySd8BGUEplAU1jJZzJnuLZ8s+GI6Yf/mbYbdhrPL+ww+89uMLdDGgjaZQN92bLcb1qsL1mcnhOBxvs90k3mFtpoQ/HgjZv/GmCD5QFKJoQ0MbUNe6OcIwIqMZk7jSTIOgWbr6aND1AJKQSE1ygiiF4gBhJPoDqRTVKJiMZlRFCWVLUcxjnyKXUwsL16f8T//6nsA3fc630KKrAcrI2QgPWJEBSY8KfmxL5N5vMSsTJsdhbLd3NUJXlxRGbgKchlSnxCkGAkpZ1wuZkqY8z2ud3TrNh+sxg3LFBZTFCilsIUds9MRwSxGcQB48ngrZZbUVSpTBrSmID0J4lLlTV5eZV+jVJuSEqNGubnvNFF53AF+4XudcykEs6IEaRBS0Q+OGCIhxezdPArIZN5m9okWUtH3A0OKnJ1eMHQD07LgcDolxEC77Wi6PgNtvGMIATeW6kPnEMI/yaquesZSjNaOCGLMLisuJHrv//UtIZHX83h1vzXrfdrpTYxIXOcZBkc/uCzuMG6WKV0F7u864/2e1/i/9lGEyIILWmVjjcqgTHaGygc1NfLSR4P1EIjeI3VJWS8QM408eBmJQpuENolyWmWvafgAR/cpBesqDRZjapkPjjnwPo3R4700/n7fOx4+uuT8fMfJ4w2rs5aaKQs6hnZgddkRnUdXGmtroOCqpJY+8JQp1/hH9kDAuZ4UYv680oyBKvCX/rv/7kOZ76sRYmTVDoTk6OLzuPj70YKx9ZYIAUQaMPe+RDh9wGR6m/WuI7YN63cfsHnrPfqLNXGsIRghmQjJR5LAJ/glH2lcwF95bwNqFDJRXwf5jQ/UGESPlr+BQKBHetFV2ySmSPd5h0+Bicn7kZQCqSXSyBygo0dIjbILhJ7gY8APPdN3H3P0YIMPgTecZxsC627IVSZO8ELwtpS8q67wMd9+fDcZ708A/xnwJSHEF8af/SXyxfr/CiH+AvAu8B9+d5cogYgkAj4F+pgIEYbBI2XESIvWipRFoohEXNfi2oZqsUc5W+BPL9isT3DNFoTI4v/TJfX+EU5Y1mcPadue++98k9XFObvzs/FCCbTJSkYyAWFUbEKghcSqDNl62le8Kh+NJ8yU2HUD3gd8yC5KrRvwIesfW1NQlTpTd4NA+ITyAuGAIBE+kTpBt+0RQcDUo5RiMS/xbsH9B2u++u4DvvruBcDse51vqRXTgyWy0qBjLrcIP4J7Yu4R5WM4WmvKukLagsIajNUZszFu2F0fiCHR7AacC7lP3zvatmV1mcv4236D8w7ne7x39G3H+mKdQXJKI6ViuViwnC/RVlPUVeYXjzuJcx7vcsl/6DpSipgR0FKVJXVVZSee8WZBjcHWWIwtUNpSVYtMNdhc4LqGhw9PntA3vsM4AP7Y9zrnWikOJjOSzPzDtWhp+m6kq+WsKSsfKZLzuNaDj8RmgMGTfKJZN6y1ZmcLQoycbf//7b3Zk2VXdt7328OZ7pBTZU2oAlBoNNATm+ymSLbYoqaQZVO0NVh2hKwHhRxySI7wi/2oCP8F9oMfHCFZtqwHhSRbskKWREk2NbAlShybbPbAxtCNuVBVWVU53+FMe/LD3udmAuoGGmwgqxC+HyJRWZlZmSf3PWevvdb61vfN6KxBjifsbG/TtD0h9bJNHwkqmY7uV8F7nB2sKaOnsnWRPe2CSMeos/WQUqGz6EttV8HkXGFSQJqBgfQZ7z1d13N6uqDoDFJmUQRGeIQI9L3hB8QPfY+/G85Y6kWDlhnLSuO6jDAtEaMMbzqM6/G+p6tPca5HypJsvIFQBTKrCBbak1jazUtPXniEdHi/gQ9RE16EVaxlOGgM/frUyo0jL+cOIAOTeQi+4yrnmZtbbIwKXn9J8OCo4ebNjuuffpUTK3jrKUHXCcp7An2aYYzEGiIhTybzzLQ3Oe+x1mK6muP9t7GmAzkCkROc55vf+B3+yT/7px/Jeq9+P4ijOhjgtxCi5uYzt/jxn/oJmrrhq7/8qxzt3+fIvMjSd+x40KMxrQt896Tm9t4Je4sW6z2vW8vfaTsqYqnch8DXTTxwOu+xqS3igksHTcHQ3RjWPMY+gRQhFmJSbSKEgPGxQtcLR699bNH0DttalqrDB09tapxZ4GyNdY6ldfyrtmXPOH7He06tpU9tIRcCXg58lh8MPwir+Zf5/kfYP/ID/px3fk9ixmXxtI64kElrdFxlSK0IOoD0BHzsqy7nbFy+zvTSVRbzhvnpEbZZkAlBXlSUG9uML13ndL7g5N495rMZb3/3JY4OD8iVQKs4VF3IFHg9BOsjSVPIVJ5U6eGIJ6RBQWb47Z0PzJuWZdPTdZ629VHKDY/MBEVRMK5KlHIIa8EEVA/SACaRh1tBM2uRHggGrWB7a0SeaQ6OGlSAn/3CE/zCN+69GEL4iR9mvZXWTC/vYEKTHoj4aATi6TiOjsTfUWcZeixQWUFZ5tFyTAZCsBgbWKbD0f7DI5q643j/iNnRKbPTU+7duUPftcyWpxjTY10MnsvlkoOHBwTnKYqCTGfcvHqdm1evkxUF5cYEIQXGRbeQxWLJYrGkbVtOjg9xzq3Kz1sbG+xsbaGVJM/jSdenTa4YTSgnm+T5iOnWdZTSyH4JtuXe3fuxBPv++Nr3WO8PvOaZVFyZbOCTV6j3nkOI7NmyQCqFzDKEzrCLlr6e423A1B1eQD2rkSGQB8EhsZR/2tdYPDeee5ZLO7ss6wbrYoA77WY44ygyRZ6VWGswnYEAVus4S46jDxL3PcwLlFTkOo+l6rTJrSZ5dVQxw3uCjQF52LyapuP4ZE5R9IAiy2KVRypB1/ffq0X8vfBD3+PvhjGWelajgqLUElNmSRxH412Ltcd419O1xwRvGU1HZKMthMgQssS2lnp2TD9vKEaOonJILXDWRpaw54ylP5D8hqC7wrBWYTUWN/ReY+AUTMYFz93aYXvS88+dZu+gIYze5MaPvgqiYNlfYV7nTL4yIj/I6HqF6RMnRvYxoOg8SuQGT2d6mnrOwcM36JoFnjGeApzk8tYW/+df/xv82b/4X33o630esYllCeJXEPwat57/o/ypP/8Fjg5OefE7/5i7D15g3jt8cNzwoMcTXO946XDJt+8e0ZloOP+d4HjNJ3KSjwcWG2LVyLvIY4CAGMRQECvDwJVxoPCpGe8IYvi6hPTveuVorac3HttaTGuZ6442OJbdEtfPcGaBtYaFdfwT1/ALIgZm530aCUuvdhbZ/sOr/341n4u3BQywf9rw+oMZXWvYGOVY51m2Fh8CWggyLcgzSVFm5EV2zgxZgcghyDgqZB3SWfCW5fyUo4d7NG2LaxdgW6oyYzIexQF1Z4jk2XjiXLWzVsSDWOaMX5M0V4d+TkiOFsEjgkeEGLSMcavegZAC6RXaa6QFYQKYaKaMS72HlF0PM6neB5wLNHXHbNYwX7QsW4Mx7nsv3u9mveVAuPCrU1/85SSImO3k5Qi8QxiHzDKUUJGklE7y1jjqpqNuWu6+fYf5bM7J4Snz0xlt3TCbzfDeRveaLKOqSqTSjKoxucwJwVPkJVprLm3vMp1u4gjUTRNJSKk0Y5yls4au71jWNdaaVDYVZDqjKsr4etTxheudxXnHeMMwDZos83ThFCUVrj4hmJqT2cV7w4YATdvRWceibmiNwSuJlCVCK4SOzlpImWwPA9ZFHozzDkJkGIv0ud5bHIHeGLq2p+/6eEI3sQoRfEAgo2iCPxtJE1ImffJ0CPge5a8Qwqo/vMppQyovh5TyChAqbmk69ei1ztIsqcJ7cC4gVBr3utjlfgeyXDLZ0oxGCmT83YwxdE2XAu8C8EhVIjT4oGKGnmbSbWto6iV93SCkQ0qP6WusbZEq4FJrQ0jiM5J0mVdM/FXGG95Zjh5s1uK/ZrGY8/JLL/Dg4YL9hzPqZcve3Yd8+5v3OZIVJ17S2ZJGK6obmn7qsTZeo6NB4FG5QyhN1zU09YJlfcrxfEZbL7DB4kOBCBpczgco//+uIKVgPFJMxzq2n4LA96e8/coLnByf4LoFWjlkgBAkhVQUUlKoqPkeD4A6kbMTCy2kWzAEZNp7vYz3dywepAAchmPP2fHn7BY8rzfPqhfP8Nx5h7EtXXtI1xbofBspRrQnC+ZvPmB57xjXR1MLIyMbPRIlkya5AJLz0aAnHzkn7/0QXHjgNc7z9dcOePH2MZ9+YoMvP38Fax2v7J1Sd5Yqg81KMB0V7G6N2djcOKdlnAMjgs9wrcG2Ha5rwXW8/drL3Lv9evQfrSoUcP3KFpd2Nnl47x6HDx+ChCyLQTc6Wsg4xjr0E3UOgAuxdyITYcd7H3vAzqO8JQuGZd/RLuMsJTpDBEXWF5SmIjhL6E2UNGsswQWUi2UTnTazEMCa2Bu5e++Eu3cPefP2MfcPl/EB+xAQRMDIWNK3wZ4LvAJEhpAZ5SQjkx7pPcpZEBKvcoKXK9Widtmwt/eQw8MjfukrX+H+3j2Wy5am6SLrPK8oipyb168wGY/ZvnSFzZ1LBO+xXY8AiryMJhQyWivuH+3z7e++gLE9k+1N8qqgw9L6nkXfcHx6jOn6FcudIMhUjrGW2XJBbw2zxSlt33L5+tNcf1KhdIHSDSHA4vAO7fKY/Yf3LkynGeLD11nH/f1Djk5nzJ3h0DaovGBrezP2t4syzpm3liad4p2PvFTnDM73KAJ16q32IcoAns4XiAeHdH3PYr7EOoft4v2lhabMS0QQtKIhiIDM8lg9sg5nTAzSccR3BescvutjCVkMvpZx3AmpCGSrMrkUgmo0Is8y8nJEORojk/exMT4GoRBJh48q9k62NU9+ZkyRVcwOoO8dy3mN6TqcO8bau+i8YOf6c+TVhHresDie463Hdhbb9pw+eIBZNhgTvbVV6WnrLZyvAKKvdjqQKKXIsiK1PuKeEfxg5B7VvoCVMhvJjfD2W2/yv/61v8Lbd+5yb69hvrD80r/IeeGrOUJPoLhJNplQ/fEfJfxkyayKLlIitJjuAWDQ1QiZ59TLE2azfU5PT3j59pssFwuMq3A+QzBCMeGjDrx5LrnxRMXl7THRk01RH7zFP/xrf4W2NywP9plkWRqyE1zKc3ayaO+6VRZsVRUuFLjEyLYpSRoY2t47vHfpoJj68ykbHvr1nPs33iXSGZwdflKglon3YbwnmI5Fc8jx4bcoygcgPk/w1zl66S5v/tPfZG//iP60wTtwPu6pUV1bprxNENJIrA9xWmQIvu+FCw+8AMvWsmwtbe+pMo2RgkzF8Y+VVV06+QQh0ok6EkCEUKu+lNMaJ+Iv7NqG0NSUVUmRSYTU5FmO1IJM69UJJK596nOF1CcJIZVc4/XF4fQzrdSwGluJJCNJ1FVV6UJFCigKhQoK7z0eGdV+EglrkHcbyC5DtisIWOsxNpIgnIvasR8WgiTZow4P3pn8YvCRzOZsXAuVKgDOOvAWbz3eBZz1WOMwfZT27LouktKIzkdlVVCVJdPpBhvTCds722xf2o295OQkUmaRUIUXBA91V59lCpmK/sqZRmqFkCKtg0siK1FowCfBgd46emOp246mrdk0DlQWlbGEIoQ4wG9diDPCF4i4aQRaY1m2HU2w9MGhvcOF+MANle8oxRn/HIjFLpXUzkhNIVVboLeWtuvoe4NNjlo+af6G8w/7kC0MY0uR7QMhHmLOZEzSNYe4oZyxU4a3lO2KSAaTMo7vDW/DaMtqicMZH+JRwXlDZ5YEEWiRWCHQlGgvcD4+X9J7kBohc6xd0rUtzjhMY7BtT991WNPT94G893TdknZxjHcdmR4jhSL4qLvtszRTO5QZhcR1cZ46kuhSLqbjyNtQlHZdR/3wAcv79+iXLdZYZidj+uWETHkm1Snlpkd0DZnsIRiscQRn6eoaHwzSaUQmo/9zvaCua2wdcI2Io5EhxHnykL+rFP7hQwjIMklRDEpeknrWcrp/FA9l3pFriRYShUTjsfUS19TkQjDKNS49D8MzNPAJAiEeTH3a6/0wrnhufGqoIhIzYp3Htg7tEpoFJgTq9BwJOBvtCwFrDIvZMbORJs9OwIxZHJ1yun/K4niJd+eeFVKwTcnTO95PrYQQBv29749HEngHCCRC5GQqsD0eUWYGJaBpOqyxNHXHdOm48lRP0IGtPrISp9uXufW5L9LMjrn3yjdYnh5i2zm2rZP/ZZxsyFR8kUejgo3pJBbknQGRTksk1Rch0mhTJIVonceNJZWPhJSgFVpAm2mC1VzaLtjcUEilUEXM5rYmJaVUuJBGSLwElYMMK1MBB5zMOzonqDYNWQYbmxOeVIrZ0rBR3sfYDylYCIlUBTIEQsocBRJrDPXimL5pefDyHse3DxkVGZuTCqEUPitA52yGJ5jK6/TGMxpvc1lVfPln/jDLekE1KsnLnCrP2RyNyLRmYzIlz3KqqqIoqlSSiXSeIouGAc7EEumlvQ2OuyOavqXavYQelWyenLA4PmL//kMe3tvDuYASGRJFno0o8jFae5QqMdbQ9w7nBTeefpbf8zN/iCwvKfIxBGiPH2DqGV//2q/w8P5bH1oV4f1gERwgOAieA28xeHrAOc98tqRpLCprkVrTLhpaa2M2xFCucgQcySsKiA83CBbLms7EzcgYk0728WuW9RIvBhnKZIzhA9J5glJkZYn3WbRmTKpwMROIG2UMrsT9wmsgRPKajiMbWVIBG3yakTKVtaOLjlyN2qh0n/2gbd4PFy+/8DL/+1/9G4yuTrn0B28xurzBj1Q/ylb+DKoLhNqBUrSdp3dLDh484Oj+fUznqRexeiDjFoFvFnRhwWxxh6O3fpVqssknv/TH2bzyFPXeSzQPv0s+ucz0ic+hshG62kTIgtnrM+ZvzNPhR6JLze7nt6mulEQTQclTSvIXRiPujcf83c7xLeux3tD2c6zsY/vAjvj0L27wxReXZM/D7POX6cwphwev0Zue4/pTNGZCyJaE4k2qOuP5gx8l7zQuu4NXR0gn0bZDIPhfPsJ1D4DB0+FwLuC9wEpPNpFor6hc1M7PdUamFXa2zy//g7+H6Xp2bM2PXN3G+hhUhUyqWyHQtTXu3CHThYDxMauMydjAO4gHWOM81caUL//pP8XNTz1H8YtfofgX/5LXneMfes9xykhT8QEfYHHc8G//6TeZjCue2HiVjXLK62+8xUt3D5i3HY0HpF7J6IZwJizjh6CfepbBi+gC9z54pIE3UvAVUgZGRYYUsfRlbXxr2h4vCxa1p2rBukgEKkZjdq7dYFlVPHj7VcJijvUBY3qcLVYFVSXjmEOeRWs6Zy02RHlEPwSFEFI5OWBM9CWNeqyKM7cR0LGIhJaSTCtKXaB1GftdZYWUkkITHUa8wHuBD2Kl5qZUVLvxIdC0BpSm6xwITVkWKK2YTArKjPcV2P5gSxytDcEjhEKJDG8FfWtply2H9w7Ye+UOG5MR4dImUit8WUKeo3eX6O0OJxR5XqGygluf+CTeO7Z3N9nYmlBlGZtVmfx5szMxiHTGHk6WRbIRNJ3FtAaL4fKVSyz7jnx3B1VVZJkk14GubVejQgqNFBqlMpTOUEGgVY6xhjwv0W3L5vYuNz/xLGUxoiwmURN7dglbL7h35/V3zF1+1HDAEsGSwCIV9x2RXNO2PcoGlLFIqejbHrsKgAPOJAXP96lA0PWGztaxhLEqn6fPmR7fJMGQlHi6EGLGICUqyxBexfJxCHjrz0p0pDRgoPMz9DFFMnVXKD3MTuvk7RxLqkGeddDEuf8eFR7sPWR2fMT46U0+9dM929UVnh09hy4KgnJIG92qeucRrmc5nzE7OqRrPYu5QwRJVYzQSoHpcGIB9QEnR68w3tjhxqd/mvHWZeqj28ze/jrl9lPo0S6q3CDzEqEs871Tjl4+IgRBQJJNcqbPVBSXMwYp0S0p+VJecL8o+FdKI4TBBxcZt94hvMDalsvf2ee5O5oTeZ3Dpzvqvub44JC6abl3eIuTpURvdOQ7R+x2W3y+foKtfoxjTmCGspAZy/eevv3wEIga95aADQ6Xkj6Vx52gIEcRJxLyPMN1S17/5tcR3jP2htG0ihUqH6toWR5902vlsEZGr3MfsD7QeX/OXnZF1cF5T2c9G5sbfOnLP8Vnv/x7GT+4z+SXf5XfsoZfdh4TouGLDyS2v6erLa+/eI9cSpbj+2xnGa/Pl7wxW2Kcw4bIl4g9YhGfzqGsvVqAc+XsIa1+DzzSwPvgpObXXtljXGqubpVMJiVxUN+TlxXVdJONrUvcfOYpti9fAwlH+/t09ZzF0SHtYob30b9Vqwx0HvV5hYzzkUKAFOR5TjEaYU00vg7e41wf592I2bFzfkVq0sqdcUyGnkF6IKKjSGRAj6ocoSRKxxI0KQMJISBFVGUSyYLKAcZH4wLnHCZ0FAcz8kIjFQgVvXM/zDTBGsPDvT26LqprBRdwfQxs92/foZ7XvP36Q/YfLNie9fR1H8vBRWTdtrzOfFGQbV1idPNWpMuncZXjg4fMTu4zKiuaza14qLApJpgeZ6Pgvut7BIKyrCLbGIVAsbe/x+H+QzrbM8kgcxWmnuP6hmD7pDylohsS0BvDYrmIr63UOOdWWVu9WLJ/7yFFOWI06lFSUWHI9JmAwUXBh8DSGnoPHrl6NEOIBzvnAtJ6pJRYYwlhODunE5qMbkwrYp9IAiNCRNakSONXiaEs02ynzvMYXFPQjX3ZWLYPPhKrzpypwAu3Oq37oTQ2iD0M2asUZ7rAUq2sIgcG0eDR7L0jijV4vEjl7wtc8/MIRKUwszAcf/UQ/7an+7EF8llHVy84eBAVEvPxGCkVy8ZiKak3lhxeOyHIQCHmKKGQ+S5K36I8WrI1f45MlNTNiMXpnBe/e4+XfuNVro2O+OLv1FS6pMpGKKEJd5YU92o8gl5K2Jpgf+IPYW7cQooSKUqON7b4+k//Yfb2jpl/taO4Y3H9C/ju66vyfk/g17t9DkPHzvImV2pPZwNdHy1On7wpuZVJ7k5qXtu8z6kL2KdzynaE/84pfu8tPpFV/FgxjRryHyHirRrbD1kuyIZWmxYIL9BOI5DkhSYvI3kzjnSmkUbr0WkESKh47wbvkbLCG52edY9znraPTmrWnZWYY3VB4YCqcOx/49f5zul91JvfRY0y3naSS96jA6vM2vooJOPDZVz4CQqR8yPuWzzLHp/+7Gf4wqc/xf7+IV/9t7/C/GSGS5l2DAupFB6GqtRw96XFADr7/dWrHmngvXu0YO9kyZWtEf/xTz3D5c0RmXQo4ZnuXGb35i2mW7vc+vRzbOzsMp+1PNy7R98sqE/3MW2Ndy4GiyxH5gWZzmOvWCZPRikpi4JyMqHvDQYZrQkbj/cm9cAiK9P00SRByVjGs0nxJQZeey5DgDxTTMfFqi8cCHTGRUUgEWX+kqo8AZib6Lpk+jgDqTuH80dkmWK6mVOOFNb1589QPzRMb7j39tssZw1t09E3JooxLBvefu0Oy3nNg4ctxyc9u4WiO5ihVRwkF0pzshgzvt+w++zz3HriFlJHTV/b95wcPGS5PGQ82WB57ToCRbPsMb2jW5zS1jNs39EuZggE1WiDLCuoqjFVOeHg5ICH9+7GCkQRqNyIvm5xbY03HWow9/aRlNb1HafzU7TSjPLRikBBCCxmC/Zu36Uox4ymPXmW8cSOZjSWqQ//oS3p+8KHwMIY+kDS8E2n4xAwvQHhEDKOzkVv2BR4BTHgqhxSP1UOc8o6lm+DHOgJyR9ZxDWKs8FqFSSlGr5d8nmWEpEOKXLIeFMpz3sfmdSJ6T98r/hnZEUPKkzxLZEDCYm0dlbys2kk8CLJbO9G7IwH+tOew39zn3ayoJmeIp9xtMs59+/exodAtbmJ1BltHTCMWG4tefCpQ5y2aAMyaLx+nqB+mp07gfyORwvLcjFDHZ3w29+6zf/7z1/gc1JzrfgW20SVsSyeRKhCwEiwWsKVXezPXqfvNlFyilKS/Z1dfuWP/An2Dhyz/YxyJujnf4uu+2Z67QQdnq/Ue/xSs8eX58/zHy7TTH0vCULx6VuSJ24I/nW55JfGb9NLybdlQWjG+DeP8Ldf5T/ausQXr9wgl+9f/vzhkO5BpaP4jlYIKxC5ACegk1E7vtKU4+yMwuCSTaj15KUiy2WSOI0VlTKDYB0yOAQeZyxd3cUKpY2Z71AzlkogM4VUnr1f+woPvirploZukuFDxlUfuBygd1E7wqbg27pnOer+HJWv+En3P/Mlf5vux79A89/8JV5+6Tvsv/k2d/1tOpvaOOfCrDsXgFOetooR3fLk+67WIw288cATKf+xKJP0hIUgyzMm0w0m0wl5nsqUfcPy9BRnOpyN8ntCZaisRBQGhU+qSFkUKUiazVlRMQoCIVvqukPIQFFW4Au8t9GSMCkIibSpxcxWoHWW2HRxwc8CLRhr0wY1WOj5ZKPHmfBGyjCMja4xbdNTL1uKXDPKcySCvnMIEfugN69fwjnPr79174de364zvPb6Ps2ypW8NfWdp64626Xh40tHUhnnnaF1gYTxHnUfLgLIBqcDO5jTqkPzyDGsM3hoe7N1lNj9hfnrAYnHMaDzmdLZECIWNxjn0zQLbLaMiVt/FjVyNCGToHDQiijl4gbOek4MTZvMFtuswTcvxUfS4tKYn+GgjI0VACJ98RqO7T29anLMs5sc83LtNXowYT5cUecFWtsM4G0dFrkeQfimlyPKc4FNJNqahZyztYXNZZZQ6ZrdKIZQ6F3AFSusVES2I8I6Hewi8Is2cCxlHOwSpzZECoUxeyC4F+oFYsipNE4l4cQOVq8B7ZvqeyIYqGsErKdFplGb4epUODEJ81Jv894eUO+jsKUaZ4Elt2FQZYtHz4P4hTd0znWxhnaVue1zoKKRnOvUwDtj8Mk4aZDgG5zhZeuatxx1LeqPpvGd5OkdySrOo6XrPcnPEwTNP46RGzA1VH0cNvXV0peJkK0de2mSiOvRij7zwyGJECAtC9jahtIinb6D6Ceq2QjUZigqVPYMkw7n7uDDndHnMvb1XUZkjK0eorKI2hoPjI7ItxSfHTzHnGneloBUGm5yC5kXG/u4OpfowG1jfGyEIQogKZsEPUq5yRZQdWj5DJVaqaKoiCkXQAlkoRBY1FcTAISg1wcaqjxQBYTR++FyfqisuEFzqDedy4BbGDFUIQpZBiHwHEQJagEjWnHHM07Ch7lFS0ivLiRrRu5Zu7w7m6IBpptgZVzTGYWwayxyU6NLvPgTeEMLKLOPkPdbq0fZ4E0QIqGDRwcbyg4DxeMzNp55kNN1iVI1QUjI/fMid116LClGjMQRBNtpE5SPUaIJ0HbmW5GUkgoSsAKmZVptMhOD0+JjZ6RyAzZ1LFEXO7OSI+ekxyguyzKVSauwxV0VJWVVYZ2mahhB8MmOIGfLJfBHnz4oMIWQqWwSEUsgwyMMpXAicLHqOZg2nJ0sOH87YGJdM8hI1kixMz1IKrmxu89x/8CRSCP7+r/2jH3pdD4+X/B9/7zdif8QFbDrt+dQPD96v5oxr7zk2BiUEpZAoacnru+i9I5qy4tbihLpt+KVf/AXeeusNZvM580VNlmdUVYXWGaNqi0wVSBlQMpDnGdPxhLIoQW8zFRo1LtHZGKdqRMgxTcsrr73M6XwWmZjO0nc989NlymhjxlsrmYJM7LMEoDU91jneeuMF9h7cJtcl0+llxqMJo9/3JcbiGfq65qIjrxCCalQilUw6uUMlI2WPg3mDlPGAKKOASdSiVrF9IaOtoRAClalIzAthlU0Ogff8ZhYPhmeym0P1Oma18bBirQXORh5sap+sCCKIyCxP2bRSMWsfBgCKIidL/fpMq9TTikzSPM/JstjueVTkKp1/gWr6F7lRdvxnl3+FJ6pjXr89498sv8aTN67whc9+nmVT8yu//ZscnRzy+U81fPJWh6qeJCt/hkBL738Z6w/42msNL3x3TjA5s7rCqI47L7/CKL/L4b2HdFby8JlP8LX/+r9kq9rgc680XDq29Isas6g5vTLirS/uoiuHzN7AvvEVNi/9OPryFGFfIxd/m2LaU/4nf5rSfhb5/0jk4RQdblFmfwZBRV//A6z5Fm++/SLL+gHXbtzkD/7sH2O8uclbd+/yjVff4NYnpvylyZ/hbV3wt73gnjmmNzXGGu7uXuLXv/R70HkOf/2vfmTrHoIg+AzvsjTRIVErlrtYFUviJEX0ks6K9ByUSfZVidXsuVRx2oRSRT9zLVFKEFyg6GOw7ZseZ3z0/e18DNiptRfSfLsSnjyL97g3PSEEtAed+rZRY/yEK+rvo5Rib7pkWd7Eze5i/6+/xXLZ8dQo4/KNq9RNT2dSy3EgMCaDEJ8mC1aKgATeuvse9+lH9kp8APgAXe9oe0OeK7JBxzYJ5VvT4xG09YJ2MSOUI4o8T9lnzFSlzlEyvnhKxY8FlYNSiEQI0XmNynKEdGR5js5ylM5ihuEiQzO2r5JjUhqfCASUVCuylBBxNME7T5ABqeNpbFWEOFd2GP5wNvaQu87SNIZMStrWoOUwDwZ5UZAXefKQ/eFhjOP+wxnDdIpDYFEpEY8MZ00MkhZoQjwFeqKriOktKrTUXUvfd7RtzdHhAQ8f3Ge2qFnWbZJwLNA6Y2PqohWfFigFVVmhVQ5CE4JCyCyJ9+eorCArKnTX07eOxbzBOYtN5greD+SftIL+HJUhBYlh1KDvWjpjyLMS6RXSGWzfQhgqFReIob+q1MoEwnPO6zat/TCGJlWGkMkwYgi8Mup3R2OJGHiFEKgQVipc7xYFGdogQoiVzvWKZOU8QsT+6zB6JNK/UYnMcL5fNZhRDG/x+/uzLFhKlDr7XMxszsrdH8Cd6MOHKJBqG6VaxqpkKjW2tZycLNjd2ULIDKmydKCL8otKW3IEVT+BoNB9lGjMG4eax/vIeTCipunnhDDDuJ6AwBYF9e4u+WSb7qSlV5YuX9KrBc3OiPrKZXTR081exXYLnGnBWqTvKMWMkerJNxsy2eGnHqc10udINUEwQmdTpJhgTM/h8R6jzTEyy8mKEa2xnMyXuPmE6XzCWEe+CF0AlRPGY+zGhOX2Zgy8HzG8J5JKSamgSEpSA9coVWq8CwSVyHuCJKBDanecvQkE+Bh4RaYQOmoLCB0zXB8EQrmocJi8z1FEro0Ncd+TLjqteYmUUe0qJH2CQbtbSofK9pFK0lUFJ1WO7zvsg/v0xpNJIM+iHoMQOB/lWpEiCuIIsZoxDt5H1cXw3sfOxyLwzhvDv3vpAdMq48dubfHstSknhwe8+u1vovISXX4HpObw/j1OD/YZT6bkro1yaX6YdXQo4VBlgSrGKJ2hx5tIndP2htYYstEW15/5FNb0NLNjFsslCMV0ukXTNNR1NA0vqzFaK3TaWKSMwcWnXq/3MYPsux4hJcb71eaptFr1H0WUHsI7j3Y9VeiphUOrQNN1fOuFuwghmHtoA5Rlzmh01jf+YRECtCagVSwLlllJVo7ROmcy2UbpDNv3sWwffDRM8B7btdjgycdTytEYK0v27r7FYrHg6PiY2XxJnxjgQUS/I4hD7yIE6uUSYxqm4ylVXpHrku3ty1y/fpNqe5Nqa4Pp1g6jzR0W8zm9mjDZ22O2WDBfLKJwvemizZwzkeWZgslgfQZx5jW2eOJITFVUPHH5GhvTDa7s7rK9s814POIim7xCCLIyQ2VqNfbTdfExG2Y6pYynZCEjy/hsDEekAwcIlYTblaQo8nQAhKG/OvSOffIcHJ7zuH/JocuxKilHzoKj69p4H6cs16t4ndEjQaSDg1wRsYYgOlg/DmYBg0ejFAKdFZHVn7JhrT/6sub3g7Mv0rV/jQcu8I/kQzYKhxrnyPISd0+O+Bdf/TVGueb5p29SffIW+/URv/7iDG8rbPc2AUvQGwSRUfdLpk/+Dsr0qOUSn/ccfmIfOa6p3w7olwomdc/NF95gY3qE6gvaXNFXc7qwRLg5O985ROeBbGeMr27hfIVdzNgMU35v8Sc46nvs65AfvczR3QMOlCTwkK7/+2g55ub1m2yOP8vx6bfZP/5N2r6maTryoifYAhU2+PZLb/Otr/86c65yX/wxOn0ZeeuPUv7Fz7O1u8vNy9sfeY/Xu0A7t9TSrLgBSgi0AoWgTLTKKKpj8VVGVuhIvspllCYduh5iIKvGQ2DseUetd1EodKWjSMmsQ3aOTJ4dMlePuk+ztS4K1Hjr6dse7wKmMXjjVwTkWAlKbl+1QHXxWTEmagac1h026Qh4EQgqENLYKs5yJuYRW2F5/v6OaI9F4G2N49X7czIluLFd8szuiGax4OG9OwilceSRIbicY5oa5Q2mKpCDWbsAZIimCiH6i6o8J6/GqKygCzXOeGResXGpwvYd9eyUruvJlKQoK6z1if4vyLKCPM8QRIk2CegsI4RA38eNzvv44pAo9IO4gAwxWz8r3UVDgqGUrnEoBV1nOdhf0pvAoYPFR1CXC0DvQzxQqljOHFVjiqJiZ+cqWVZSty19b1bZpnOWnnlkqlab6PEUJzWnx0fMFzPqZZ1E+s/KnD71bF3KmJu+o6nnKKkwfVRMGo+mbG1doticUmxOKScblNNtlosld+4eYKwCfULvj8FbsB0hOGzf4L1dMT0HQhCwyv6ixrBkVI3Ynm6yuRGFPMbjEfmqMnIxEAJ0pohOGQLZy9iXhUSWir3YIfiSHJ4gHZIdZ2Ws5KijM43W+h19shXbfmCFvvMqVmNw50epjDGxUuN9lJAMAXl+nGlgLZ/zMV5xGlZfEne3oRAhpIyTBOnPKKzx/u4sHxVC2MOaOadB8o1lQWZyPuk/wQ2tOKmX3Ds6Yndzwo8/+zTXd3Z4+NKIN/aOaeuOxckxAY8cV4isYHunY2P7HqKbI3hAKBzLqxq/Cf1mQGaaords3ztkOu0Rm1uYPMfkLdY34C2jBz1ZLlHTHcJ0k+BzfNdSqpJPZD/GJWd4+eC7HN9+QHey4FhKnJ9j3NcQYsLW5o/wxO5PYv0xe4e/inE9po+yssFrpCi5s3fEd1/+Gs4/i+dnYTQh+8Jn0L//WUYBdoIn+4jr/sGDaT197mLQFBKlApkmygAnLkLk8cVDpfMBGWJZOejzzYnhiBlVoSSCICKhTGYaOS0iCdYJhHaoXCLz8/eb4KzPEg/qzjjyOsoMd/NIAvUpKFvjoyBQCETjo0jcMsbRO8uyN5GHJMW5ak48RHgTuUY+CXkoFTXy3w+PReA9j5V27GpcxNKZPjbKo6ckwTtc3yRJu7hZZHlGpjWu7zg63EdnORtBkpUV3XJJ3zRYa+m6Htv3NPMTuuWCLm17bdvjbCwJWuuSRV18Q8SAEoBMKZQQST84zuXG2WAwfR9JSOlrB4aodZ43Hy7ZO1yybA2nC4O1gTYa0/ADe7l8QAggV7EXp3UkMvTWxIOJyinyCqUrvAfrLb2NG3M22sB5x6iqyIqCZW/57htvUddLlk33zp8gkji4lLgQEKln6K2l61pOZicIITl+uM+0mFA2DWXT4AKYEOj7np3tHZSUXLlyjaZtiTlstCi0rj/LeFPncBhXGUa3nBc4B3mWszPZpCpKtCpplm1kEl9gs1FISV4UZx+Qg0U5sQUykO5Iwi1CnJ28Q0gUw/i10couBt9htOes9BsPgE6IVQUg0SNW/19dU2Ik6yxjRFLMsjaJEJyNyIXzJJh3BM6oGAakDcaB91Ee0nukjCSvXgici04571dq+6gg5FOo4qeRsgPxbfBLrrUNn1nMOZCO17NoHGBev0O3d4iYK/KiIASJM5IgPXrqkXmg2ugoJga0QpiOUFiarMNoi1fxAJVVOePrU0aTEZ05prMGVWjKKmMhJuyLDaQMTN0h4XQfPZKMKQlZgS4qikyyvVFy9VKJ292kuXKVvqmZHR+QScXGTs6l6wWLy89y6XN/lCqrODo5wCznbE0X7F5q6eoFr74CG9OGz3/mDUZbLQ+KkpM7Gblc8lCeoi/gIQgh6hcM5L7g4l4eBHTK46QgyzSZio1Y0zmE89FTWAtUJlE6tgC8ixrM2ilkEAQTs1IVHJk2gMD1PpaUxSDgwkoERgypsxQrspaSEulCtIw0cSLCA9548jpJqqZiUm8sfWfJvMP3Kv4eib3v0yiT8AGZxaqmszGQK6XwPrzvufOxC7wuxPEG56M2sg+OZrHEWBtlGaUiOItrl5x5QQmknpLLjK5rOT7aR+kMLyTVaExTL+naJW3TcXIyw5iednaM7Vp6E+UHvQdnYo/MJP3botBRTCOEmP0CSiWFizIHOcI6Q13XOGep646+tyujcSkleZbTWc/Ld2e8eGe+6qed35c+qkdCCEGpFXmmyHQMZU3fIXVBlpWU5YQsH6F1gfGWxna4EOhcFOWXwiGF53R2xHdffJWubZgv6+TOktZeyqjqIjU2+DhuZC3OWtqm5uDoANMbHty5S+E0o80Nqs0pQUtCHmdWr125yrVr18m0ivaBSqDzWMnwyT9YIJOECSsig3ND5QGMiQ8AJjIXM2lZnNR0TX+hJB8hBGVVpb+A6vWqxDwE3kEvP/4u8f1IcAISC1RrhU4M4oG3oHUUkx/YkwBSpDbBuY8NGCYfhrKxVooyHQqMtatSeG9MKl7He3Ml9j8cBkIghDRO56KlWhAQHDgZD3RSxjleKc0HsQX80CH1p8nKv4AQ+wS3hwhznm6WfOn0mJergjtbY4TzdC+8QuMDXPkExc5NpC4QogIVyLZBlTCe9JSjHvISvMdmHbY4pNMWp2LZPZuUbD69QzXOmN9+g3p+wNb2E4y3nmCuL3Gn+AzeW6YHv4if3aHckuzICsEGWl+mVHDl0oieEWK5iz19isXsBNMsUDJj52rJ9WdGdNd/lPlTz6H2DnnwC7/FvJ7zk38Inn4+cHJ8glKBK7tL/vOfe5GrV+/zb/ee4juv7JDnD7hTvrHavz46CIKPbGZP7PM7F5MkIWIpTEmYaEWhY8JkGkNQgt7GVm4xzihGOmqXm3iwy5GoEDX+sQHVewqXRI9SouRDiGRWCTJLAVglAwspo4a2ADWKY0xqUkQWdPoabz22tmdlaR/oW0vXGqz3aBftOU3r8SbQG0fbRcOHwkczE9uZlbf8oCH9XnjsAq8U0RxcIiPDOcRSRZACJYc+fKyn+yBXM4fASgovyug5muUS5zxdu6Tv6pj9OIPwLvYP01D2u0sc3nucE1jrUDLmWUoEEGHV2xpE0iNpIPnbBggIFq3lpO7jZqfj7N2itbgfzJ7uQ8WwkUqpCERCFYhUZnF4a2Kp2LkozUaS0hxKmWLoDfb0fb8i4Ig0NiKSib0QUdVIKh3HaGxJprJV+dE7i+lbXK8JvUSSoUqFUFHMIQgPxuC6gJeC0CuInAiCiGQj1LnbNQScMVjr6HtH31kkklwVDBrbg4zhRULIWH0ZDgdSS7QbWJuJlX1OcYdEIBvE3aOwvlgFXKkGwt85TVrOE6zOOr+rsaAVBy2xkZNjUTi3Ft5HAtt5ybvhtfVDxjJc2/ld5Fw7bcBwXc67s+v88Jf2B4KYCuQzEqFKcDdRQlBcfoJy8xKbwXHNtGTesyR6aLfCR8qhhKyI4g06B5VF0ZfJqMILhWsbJBp5fIyYBXaXgbEMXBeOENrV/L5UGcErnBFo2XApfwvomYweUopjpNrBWIPoGsL8CBMC2i6oRMtm6bm8kzPOR0i7g5AFYqdiPlUI5dhuG5TtGZUZRSjojOFk7pD5hOtP3WD36gaUU3pVIQDtLVXbsdHVH3ngdc4xWy7jPTyMyAWHCJao+Bd9cWtjKZsuCbzE2XSTxec8r6K4RiwNRz/jjAyJBCvAC6QW5GXcB0IqLQo9VIVAapH4E6m6NARgMWjxJ+KVC6vPBRdwbUy2BnMF0ztMZ7HB07pYHbJdjC3GeDoTA28+BF5jU5X2rBX2XnjMAq8gV5oqLyi1JgtxEUOR40NkFxM8SkYzZCUGXWUNISoBOePBSqxx3L9zL8p7+RZ8G6XvVI4YMrNUYtNa4l1IJygfXUmMw1uB7+MNUpTRGD4kC6gYwkT0KjUdwdroUKEUbx4t+K3XjonPYiwlLlr73r/6R4BAMpmRGXmWY9GEkCOFxrQtbVA42aKkpPOOhe2jwlGmUok0njMi8azB9G28qUSUIJRZDkLFWWqVUZQjsiyn0BI/KtFSM9IFo6LCmY5meUKlG7SaUW5M2ZqUoOB0WdP1luXBEfXBIQGJU0kPuChBa8pqRDmaROavUvjgmZ+c0jYNi8WS+cmc0WjKk089S1VWZJWkzHLyCyb6KKWYbEzo+x7rDDKV0Ia+a8wMYpDzQ1slgHJqFeBC4gxopVPZN+CxaVzNcS7Gcr5esuqSpYDpnIt9cD+MA0VFIDjLeAczCh/i+4GzXjSc2fwN7PBYRlTEuft0f/uAC/EgK4TAuou/1wfI5w35n1sgqxLp/zglhklesalzPvnqa+z86m8w855v7+xwWhSQGySnqCKjGpWRATtRyFxy9dIlru5u0y9PmasR7eKU2W8/xB5avviW49O5pxNLTut7tKJC6hFFVeDNhOUhTLbf5A/c+Hl0tmA8PSZzLfo0Z3n6BH4+wzx4Desdlau54ixblwxPT8Y4xvThGr0quX39Oi9uVFw9uMOP/c63kQ7kE7uEsMP9431eP55TTT7Jz/3Z6+R5yfF4mxOjqX1LHpZcOz3lMw8PUP7Dsxr9Xmi6ju/cvo0aSFzpZhSrg2H8n0wtEzjHHxDxcD1MjAz3cExvxIoRPXxjIc/9gOFj506DqzLz+R987o/VqVecfZ8hiK8+var0DJWgsJoOiWpv7zqADlMX4fwT+f3xmAXeqCbS20DmVu1xtBR4JD64tIEMGQCxqC/kWd8qiSs45+lMh/UWQYcIbfQQLWRanLQ8cV+L74qwYrnFoBXwbnAViSIIw88dkg3no4SZMZbWK3ovWLSOo8WjyXDfjQDxJC6jJ4gWWQxcPhIKhPcEITDeYqxJvqJZDLwpS3LOp008GUoLEX18q1HsjQWJUrEsr7RGkYMKaCHJZMx6vXcY28dheC/QWMosnkiX0tNjsd2S+vQIFwRGKIKQUFWg89g/d5E9rrTCh8B8dkJT1yzmC2bHM7xzONMT8jxKS6p00r3A9R4EL6SzyCR3GR/Is8DrRSyH42NtQYZ4lDufWQ6SjcMpPaQM1Af/3r/P+U1jMENI5A8p5Uooyzt3Fvi9j+IvA2ktkbLioTFtTEOlY/gcaeMRaQNMlaDwg+48HxGykWV8vUaORni7QxYEOmmH50XO1EWz9UYKTrWiFJ4imNh1VxlChVWPUCtNnpV43SFFhvAZ4jRDHuRsOcXNSeC4cBy5BmMl3ucQCkIoCEGjcIzVIZlekIkapRxIi/UBbw1mcYLzBqUcufRkuWBUxLaNz0taVXFnImmzHhlaNuolUmaI8RZOSA5OZ8ybhvHOhMvXL4PI6F2JNYCtyV1L2RvK1qM/YjUxHwJ1+/0lEtd4Jx6rwOu851tvnbB33PCpG5f4Az8yIdfR/USEgJYKIRRKRhMDkHHG1xrq3kTP0cQ2dj7Q9gbrPIEWaNFKY5MecwgeqRXOulWvMKTNQ+tEnsokeRaDT+8AF1bGCtI6vLA8OG74dy8eMWsMNkRjhMN5t2LcPg6IpWDFuJow2thFigwpM2wIGBflpjpnaWzsh0qjUmapEVITjEMGlcwKcrQOPPfc57j1iU8xXyzZu7+Psw6VjiajaUlVSKy19E2HlJJ518Bcsj2VlHnFZFywu7sFmcZkCtG0vPXaghdff5W275nVdQwKWhOEJMtz8qJgUHQKIVA3DabvMancfPXyNW7euEGZg2CM1gVKPYrXIQq9KxFZ7srHao1PDV3hk/KOFKuxB7eaW04s49TfFulwKYijkdEX/N8Pbqvybip1DafyAMlWMEo6BhvfN9augq8PqzM9MCidngXcof8rgCLNJiupzpSrUk9YSbViOT8qPO/3+E/NL9HNN/jte9eo6xKM4cRZ7h3v88LGmEYImklOkSsILV1t0u8rCUrgmwIyTZErgtQsD+7x8JVvYheefvkppPxR+t/zb5g//YBZFpiFOd1M0uzdxNVb3PjEmCvPjJkVV/kNdvDtkmt7bzFZzNnOn2N7fBU/P8Icv4mzDXLTkhUeVY5R5RR0TqgmVEhuzr5F3n6Da8Yz3SnJy00mV58jqAJ3OKZaHjHdLJFZi+nnHB2e4Jqey7cXPHPQUpcZX3/imXe0GdZ49HisAq8PcPtwye3DJXle8PuSgakgcXjSKIaS0U6ZkE7uBNqmpm3as3Q/QN95rPV4OoLo0Cqq+AspCSJLjhOD00UqxxHLIVrJKASRSXwQ9Kkvp0KIpepg8KHndN7wwp0FB4tHRyh5T4hBSlBRFiXbG9sEJF0byUnWWbyLjObexBOrcGkDVTlKBYIdyE0qGh0ouHbtBp/9zI+xf3BI3yv6tsU0C4I1TKqKzWlJ23WcWBfN2U2PQOL8mCyTlIVmujGCLOfEGnoZaEzL3Yf3WdY1+8eHGGdXoiSrca30J0BnTBq9UbHkLTx9t8TZEUIUMTv4qG1Z3oXV2I2MFMsVeyAxiKN8I3ghkB7UwGoWnOnOrkQ/VhO7q78PfdhUj1u9vwqQw0RAgIA8x5aOAdkmAogxZlVaXn3/VOrzISqGrQ4CDJpWEJLClkoiGjHwpkxf6dXrI/j3zgYXguv+mN9vX2bhN9m7Dw9nE2hq6r7nAS0vVAVOSbYLTZ5JTG9wpl0ZoAQpsK6EPGM230SXG8yPj3hw9y1CU1B1XyATT2CfeYnm9yuak0DzRktbV8yONrDzqzzxyQkb1yechB1e7Xfo+xp7ULFztE925RrbGxuERY2te1xfIyqLzj2ZKsnLDLIMxiWZ92w/eAOO9tmqrlBNblBNJly6fh2yilnWExYZWW6QqseHJYv5Xdyi5tZBzY39jlevXufNGzexFyAZucYPjscq8J5HCAEbYlnGOIPHxa6qEFFpRPn0sMfDqg89Uka/RueiRF5v4+YilV/NTgqpo8BAKtgltnmsWKuzj4lUPvP+zLjcOM8r9084mbeRNBAsD2c9rXl0ovDvBa0UO5sbaXa3ZDSeMN3cjMLg/QJjXTKfjtZ1Xgy9l2iJaH3ABYsdbIfwyfYv9vWs6VECtiYjWgX78yOausaaBacnMai3XY9WmuloA6EVfYClcYim4/7BCSLLOV02LDtLPppy/cmnaduW6dZOJHyFKPQgk5jDkG1FXfWAIyBkBjJj99JlsrwAEcliNgWXi0XAO7sKVUP2GClrkTASBVjigc+6M9WoocMbCUyJxHeeJHYu6L6bvjRkpbEvfyY0cvbxpDUuRWR26jiidNayYcW+HhpmPqn8RE5/kkAlpHshsUfPB3+fGM8fcT/xvXAwK/jG69tYOcV1OQWSfSQvAUd5wXRcYIOjq5e0zhJL/JGzkI/HyCyj3N5BFxU725fYnIypLu2S3foUrva4vYfQnKLlGN98GeUrpuNLlHrC5OkKOsXmpkB0gUmQPG9KrBVcmlxnJMYU5QbOOtAZ+e5Vgt9C7GYwUuhyjConoDJcPkIS2NkVFKMbTLJNJtkOebWB0lEKtyjGjKxB6QYlBXkGWxtP4HKL857ZJWBri93LV/AfuUnCGh8Ej23g9cFjnEVZT9d3yXosIkrrNUgZiVEyMY+ljqL7vbM452hNjXWWUuYUyThBihwhBC6lxYLIlCaRpobsWhD3OW9T+0pCZxxffWWfl94+SlcSkq/j41NWPo9Ma65f2UWoKDYy3dxi5/IubWc5OKnpvU3elFEy0op4KlZE2y7rHT4YemsIOAQBIeNIi3cO07doEbiytcEyk+y93TGbn9A0p7TNDKkUOsupyhFXLz+ByDM64KS3NLOa9u4+QmfUQWACjKY7PPvpz2F7Q7eMzlPemThOFDzCh9Svjk04rxReKoTKEbqgqkZxlEdEp5i+Jw7GX2DuFRWiTDI6iNc6GBR4ovGGHUhN3mNszDrPmJeJUCIieW+QaQRWc4Tnmc2rnzuEWBFWht0r39AhaxUhuQ0RWwjvmnkIgwJkypKj3KU869uuWC6xpbDSEkiEBz/M+qZM+lHg7uGIr3zjGnk1ppyUjJTmDoI3gdFoxKVrW/Rdzdsv7VHPTsnLETovqTY2KDc2KKox1248w2g8ZTIeUY1KXJVxbTzCLGYciN+kPT4gU8/jFn8SjWN7swEUo+sTtNAUmYQmsOMVP2kmhFBhNjP8Rk/hOpzpEFlOdfMZhIJsZwdVjVJvWeCExEiFF4JrT97CB4EOGXmIVRydjwhCMK6mOKEILKJ4TT5id/cS3knstQkHIRITbxaad/LQ13jUeGwDb8Q7C21nHx56Ye9ktsWG2DvO/u/8Hu+egzj7l3FfOceAW3FGzngjBGLW29vHM8N9N0RiEQ6kHjnQ7AcZwHP/fzfeXeL8XoNpZ7Ok551H4gZ8xpD1ye/13PcdGL3OI6RflUSj2o0CHXBa44XEixAtj4IEmZyfUuAVWq/GjEQaW1qxIofre7+Buo8c//4NF8698/2u7oNsk+8uSL/jc+/6/c8nzudHrVal5HPfR3D2HKxG5951ge++znf/vIuG94LeSqSLHopDb9wSFdaG8beVrm44I46RDjmDF7SUsbqDlHiV4bVGyDirHFtdBQiDkD0ChdICLZMrVAARBFmIcjBeSggq9vdJfF2lkjBK0o9mdRmJlyGQIvJZVNAIH1tAZ+oMyV954P6KJEcafdoI5CvDjfdVdFjjQiEu8kERQuwDS+Dgwn7oh4tdLu7anw4hXP5hvsF6vT8Qfuj1hvWaf0Cs7/H1el80Hov1vtDACyCE+K0Qwk9c6A/9kPBxvPaP4zUP+Lhe+8f1uuHjee0fx2se8HG89o/jNQ94XK593XFfY4011lhjjQvEOvCuscYaa6yxxgXiUQTe/+0R/MwPCx/Ha/84XvOAj+u1f1yvGz6e1/5xvOYBH8dr/zhe84DH4tovvMe7xhprrLHGGv9/xrrUvMYaa6yxxhoXiAsLvEKInxVCfEcI8aoQ4i9f1M/93UAI8aQQ4l8LIV4UQrwghPhv08d3hBD/UgjxSvpz+1Ff63thveYXi/V6Xzw+Lmu+Xu+Lx2O95mf2Rx/dG9E6/jXgE0AOfBP47EX87N/l9V4Hfjy9PwW+C3wW+B+Bv5w+/peB/+FRX+t6zR+Pt/V6r9d8vd6P19vjvOYXlfH+FPBqCOH1EEIP/F3gT17Qz/7ACCHshRB+O70/B14CbhCv+W+mL/ubwJ96JBf4g2G95heL9XpfPD42a75e74vH47zmFxV4bwBvn/v7nfSxxx5CiFvAF4HfAK6GEPbSp+4DVx/Vdf0AWK/5xWK93hePj+War9f74vG4rfmaXPUeEEJMgH8A/HchhNn5z4VYp1hTwj9krNf8YrFe74vFer0vHo/jml9U4L0LPHnu7zfTxx5bCCEy4ov1d0II/3f68AMhxPX0+evAw0d1fT8A1mt+sViv98XjY7Xm6/W+eDyua35Rgfc3geeEEM8IIXLgvwB+/oJ+9geGiLYtfwN4KYTwP5371M8Dfz69/+eBf3zR1/YBsF7zi8V6vS8eH5s1X6/3xeOxXvMLZJj9HJFV9hrw3180i+wDXuvPEMsP3wK+kd5+DrgE/CLwCvCvgJ1Hfa3rNX983tbrvV7z9Xo/Pm+P85qvlavWWGONNdZY4wKxJletscYaa6yxxgViHXjXWGONNdZY4wKxDrxrrLHGGmuscYFYB9411lhjjTXWuECsA+8aa6yxxhprXCDWgXeNNdZYY401LhDrwLvGGmusscYaF4h14F1jjTXWWGONC8T/B/A35bJjrjJDAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train\"\n", - "data = create_dataset(dataset_path=DATA_DIR, do_train=True, batch_size=5, shuffle=False, num_samples=5, policy=cifar10_policy)\n", - "\n", - "epochs = 5\n", - "itr = data.create_dict_iterator()\n", - "fig=plt.figure(figsize=(8, 8))\n", - "columns = 5\n", - "rows = 5\n", - "step_num = 0\n", - "for ep_num in range(epochs):\n", - " for data in itr:\n", - " step_num += 1\n", - " for index in range(rows):\n", - " fig.add_subplot(rows, columns, ep_num * rows + index + 1)\n", - " plt.imshow(data['image'].asnumpy()[index])\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - " > 为了更好地演示效果,此处只加载5张图片,且读取时不进行`shuffle`操作,自动数据增强时也不进行`Normalize`和`HWC2CHW`操作。\n", - " 运行结果可以看到,batch中每张图像的增强效果,水平方向表示1个batch的5张图像,垂直方向表示5个batch。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/mindspore_enable_cache.ipynb b/tutorials/notebook/mindspore_enable_cache.ipynb deleted file mode 100644 index effa52a23b9cbc1089e39036287421890de370c2..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_enable_cache.ipynb +++ /dev/null @@ -1,360 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "biological-register", - "metadata": {}, - "source": [ - "# 应用单节点数据缓存\n", - "作者:MindSpore团队、[陈超然](https://gitee.com/sunny_ccr) \n", - "`Linux` `Ascend` `GPU` `CPU` `数据准备` `中级` `高级`" - ] - }, - { - "cell_type": "markdown", - "id": "considerable-stack", - "metadata": {}, - "source": [ - "## 概述\n", - "对于需要重复访问远程的数据集或需要重复从磁盘中读取数据集的情况,可以使用单节点缓存算子将数据集缓存于本地内存中,以加速数据集的读取。\n", - "\n", - "下面,本教程将演示如何使用单节点缓存服务来缓存经过数据增强处理的数据。" - ] - }, - { - "cell_type": "markdown", - "id": "closing-birthday", - "metadata": {}, - "source": [ - "## 配置环境\n", - "使用缓存服务前,需要安装MindSpore,并设置相关环境变量。以Conda环境为例,需要完成`LD_LIBRARY_PATH`与`PATH`环境变量的配置" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "pretty-johnson", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[WARNING] ME(4968:139868289333056,MainProcess):2021-02-25-22:00:42.129.964 [mindspore/ops/operations/array_ops.py:2302] WARN_DEPRECATED: The usage of Pack is deprecated. Please use Stack.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/usr/local/cuda/bin:/home/sunny/miniconda3/envs/seb/bin:/home/sunny/miniconda3/condabin:/usr/local/cuda-10.1/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin\n", - "/home/sunny/miniconda3/envs/seb/lib/python3.7/site-packages/mindspore:/home/sunny/miniconda3/envs/seb/lib/python3.7/site-packages/mindspore/lib\n" - ] - } - ], - "source": [ - "import os\n", - "import sys\n", - "import mindspore\n", - "\n", - "python_path = \"/\".join(sys.executable.split(\"/\")[:-1])\n", - "mindspore_path = \"/\".join(mindspore.__file__.split(\"/\")[:-1])\n", - "mindspore_lib_path = os.path.join(mindspore_path, \"lib\")\n", - "\n", - "if 'PATH' not in os.environ:\n", - " os.environ['PATH'] = python_path\n", - "elif python_path not in os.environ['PATH']:\n", - " os.environ['PATH'] += \":\" + python_path\n", - "print(os.environ['PATH'])\n", - "\n", - "os.environ['LD_LIBRARY_PATH'] = \"{}:{}\".format(mindspore_path, mindspore_lib_path)\n", - "print(os.environ['LD_LIBRARY_PATH'])" - ] - }, - { - "cell_type": "markdown", - "id": "sufficient-christopher", - "metadata": {}, - "source": [ - "## 启动缓存服务器\n", - "在使用单节点缓存服务之前,首先需要启动缓存服务器:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "known-webster", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Cache server startup completed successfully!\n", - "The cache server daemon has been created as process id 5005 and listening on port 50052\n", - "\n", - "Recommendation:\n", - "Since the server is detached into its own daemon process, monitor the server logs (under /tmp/mindspore/cache/log) for any issues that may happen after startup\n", - "\n" - ] - } - ], - "source": [ - "!cache_admin --start" - ] - }, - { - "cell_type": "markdown", - "id": "sized-invitation", - "metadata": {}, - "source": [ - "若提示找不到`libpython3.7m.so.1.0`文件,尝试在虚拟环境下查找其路径并设置环境变量:" - ] - }, - { - "cell_type": "raw", - "id": "convenient-diameter", - "metadata": {}, - "source": [ - "export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{path_to_conda}/envs/{your_env_name}/lib" - ] - }, - { - "cell_type": "markdown", - "id": "stone-ceiling", - "metadata": {}, - "source": [ - "## 创建缓存会话\n", - "若缓存服务器中不存在缓存会话,则需要创建一个缓存会话,得到缓存会话id:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "convinced-dinner", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Session created for server on port 50052: 4173327275\n" - ] - } - ], - "source": [ - "!cache_admin -g" - ] - }, - { - "cell_type": "markdown", - "id": "unexpected-addiction", - "metadata": {}, - "source": [ - "缓存会话id由服务器随机分配。" - ] - }, - { - "cell_type": "markdown", - "id": "linear-slovenia", - "metadata": {}, - "source": [ - "## 创建缓存实例\n", - "在脚本中使用`DatasetCache` API来定义一个名为`some_cache`的缓存实例,并把上一步中创建的缓存会话id传入`session_id`参数:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "appreciated-tonight", - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "some_cache = ds.DatasetCache(session_id=4173327275, size=0, spilling=False)" - ] - }, - { - "cell_type": "markdown", - "id": "chronic-graphic", - "metadata": {}, - "source": [ - "## 插入缓存实例\n", - "下面样例中使用到CIFAR-10数据集。运行样例前,需要参照数据集加载中的方法下载并存放CIFAR-10数据集。目录结构如下:" - ] - }, - { - "cell_type": "raw", - "id": "incorporate-wrestling", - "metadata": {}, - "source": [ - "├─my_training_script.py\n", - "└─cifar-10-batches-bin\n", - " ├── batches.meta.txt\n", - " ├── data_batch_1.bin\n", - " ├── data_batch_2.bin\n", - " ├── data_batch_3.bin\n", - " ├── data_batch_4.bin\n", - " ├── data_batch_5.bin\n", - " ├── readme.html\n", - " └── test_batch.bin" - ] - }, - { - "cell_type": "markdown", - "id": "bearing-humidity", - "metadata": {}, - "source": [ - "在应用数据增强算子时将所创建的`some_cache`作为其`cache`参数传入:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "separated-closure", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0 image shape: (32, 32, 3)\n", - "1 image shape: (32, 32, 3)\n", - "2 image shape: (32, 32, 3)\n", - "3 image shape: (32, 32, 3)\n", - "4 image shape: (32, 32, 3)\n" - ] - } - ], - "source": [ - "import mindspore.dataset.vision.c_transforms as c_vision\n", - "\n", - "dataset_dir = \"cifar-10-batches-bin/\"\n", - "data = ds.Cifar10Dataset(dataset_dir=dataset_dir, num_samples=5, shuffle=False, num_parallel_workers=1)\n", - "\n", - "# apply cache to map\n", - "rescale_op = c_vision.Rescale(1.0 / 255.0, -1.0)\n", - "data = data.map(input_columns=[\"image\"], operations=rescale_op, cache=some_cache)\n", - "\n", - "num_iter = 0\n", - "for item in data.create_dict_iterator(num_epochs=1): # each data is a dictionary\n", - " # in this example, each dictionary has a key \"image\"\n", - " print(\"{} image shape: {}\".format(num_iter, item[\"image\"].shape))\n", - " num_iter += 1" - ] - }, - { - "cell_type": "markdown", - "id": "multiple-bubble", - "metadata": {}, - "source": [ - "通过cache_admin --list_sessions命令可以查看当前会话有五条数据,说明数据缓存成功。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "shared-capture", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Listing sessions for server on port 50052\n", - "\n", - " Session Cache Id Mem cached Disk cached Avg cache size Numa hit\n", - " 4173327275 575278224 5 n/a 12442 5\n" - ] - } - ], - "source": [ - "!cache_admin --list_sessions" - ] - }, - { - "cell_type": "markdown", - "id": "grand-active", - "metadata": {}, - "source": [ - "## 销毁缓存会话\n", - "在训练结束后,可以选择将当前的缓存销毁并释放内存:" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "happy-three", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Drop session successfully for server on port 50052\n" - ] - } - ], - "source": [ - "!cache_admin --destroy_session 4173327275" - ] - }, - { - "cell_type": "markdown", - "id": "contemporary-climb", - "metadata": {}, - "source": [ - "以上命令将销毁缓存会话id为4173327275的缓存。" - ] - }, - { - "cell_type": "markdown", - "id": "metric-antibody", - "metadata": {}, - "source": [ - "## 关闭缓存服务器\n", - "使用完毕后,可以选择关闭缓存服务器,该操作将销毁当前服务器中存在的所有缓存会话并释放内存。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "hazardous-lawrence", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Cache server on port 50052 has been stopped successfully.\n" - ] - } - ], - "source": [ - "!cache_admin --stop" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/tutorials/notebook/mindspore_evaluate_the_model_during_training.ipynb b/tutorials/notebook/mindspore_evaluate_the_model_during_training.ipynb deleted file mode 100644 index d556d441adda9d3b7e9d8d6840027dde89810390..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_evaluate_the_model_during_training.ipynb +++ /dev/null @@ -1,514 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#
    同步训练和验证模型体验" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在面对复杂网络时,往往需要进行几十甚至几百次的epoch训练。而在训练之前,往往很难掌握在训练到第几个epoch时,模型的精度能达到满足要求的程度。所以经常会采用一边训练的同时,在相隔固定epoch的位置对模型进行精度验证,并保存相应的模型,等训练完毕后,通过查看对应模型精度的变化就能迅速地挑选出相对最优的模型,本文将采用这种方法,以LeNet网络为样本,进行示例。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "整体流程如下:\n", - "1. 数据集准备。\n", - "2. 构建神经网络。\n", - "3. 定义回调函数EvalCallBack。\n", - "4. 定义训练网络并执行。\n", - "5. 定义绘图函数并对不同epoch下的模型精度绘制出折线图。\n", - "\n", - "> 本文档适用于CPU、GPU和Ascend环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据准备" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据集的下载" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下载并解压数据集数据。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据集的增强操作" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下载下来后的数据集,需要通过`mindspore.dataset`处理成适用于MindSpore框架的数据,再使用一系列框架中提供的工具进行数据增强操作来适应LeNet网络的数据处理需求。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore import dtype as mstype\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " # define map operations\n", - " resize_op = CV.Resize((32, 32), interpolation=Inter.LINEAR) \n", - " rescale_nml_op = CV.Rescale(1 / 0.3081, -1 * 0.1307 / 0.3081) \n", - " rescale_op = CV.Rescale(1/255.0, 0.0) \n", - " hwc2chw_op = CV.HWC2CHW() \n", - " type_cast_op = C.TypeCast(mstype.int32) \n", - "\n", - " # apply map operations on images\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=[resize_op,rescale_op,rescale_nml_op,hwc2chw_op],\n", - " input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - "\n", - " # apply DatasetOps\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size)\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - " \n", - " return mnist_ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 构建神经网络" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "LeNet网络属于7层神经网络,其中涉及卷积层,全连接层,函数激活等算法,在MindSpore中都已经建成相关算子只需导入使用,如下先将卷积函数,全连接函数,权重等进行初始化,然后在LeNet5中定义神经网络并使用`construct`构建网络。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " # define the operator required\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " # use the preceding operators to construct networks\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x)\n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义回调函数EvalCallBack" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "实现思想:每隔n个epoch验证一次模型精度,需要在自定义回调函数中实现,如需了解自定义回调函数的详细用法,请参考[API说明](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.train.html#mindspore.train.callback.Callback)。\n", - "\n", - "核心实现:回调函数的`epoch_end`内设置验证点,如下:\n", - "\n", - "`cur_epoch % eval_per_epoch == 0`:即每`eval_per_epoch`个epoch结束时,验证一次模型精度。\n", - "\n", - "- `cur_epoch`:当前训练过程的`epoch`数值。\n", - "- `eval_per_epoch`:用户自定义数值,即验证频次。\n", - "\n", - "其他参数解释:\n", - "\n", - "- `model`:MindSpore中的`Model`类。\n", - "- `eval_dataset`:验证数据集。\n", - "- `epoch_per_eval`:记录验证模型的精度和相应的epoch数,其数据形式为`{\"epoch\":[],\"acc\":[]}`。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.train.callback import Callback\n", - "\n", - "class EvalCallBack(Callback):\n", - " def __init__(self, model, eval_dataset, eval_per_epoch, epoch_per_eval):\n", - " self.model = model\n", - " self.eval_dataset = eval_dataset\n", - " self.eval_per_epoch = eval_per_epoch\n", - " self.epoch_per_eval = epoch_per_eval\n", - " \n", - " def epoch_end(self, run_context):\n", - " cb_param = run_context.original_args()\n", - " cur_epoch = cb_param.cur_epoch_num\n", - " if cur_epoch % self.eval_per_epoch == 0:\n", - " acc = self.model.eval(self.eval_dataset, dataset_sink_mode=False)\n", - " self.epoch_per_eval[\"epoch\"].append(cur_epoch)\n", - " self.epoch_per_eval[\"acc\"].append(acc[\"Accuracy\"])\n", - " print(acc)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义训练网络并执行" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在保存模型的参数`CheckpointConfig`中,需计算好单个`epoch`中的`step`数,根据保存模型参数`ckpt`文件时,需要间隔的`step`数来设置,本次示例每个`epoch`有1875个`step`,按照每两个`epoch`验证一次的思想,这里设置`save_checkpoint_steps=eval_per_epoch*1875`,\n", - "其中变量`eval_per_epoch`等于2。\n", - "\n", - "参数解释:\n", - "\n", - "- `train_data_path`:训练数据集地址。\n", - "- `eval_data_path`:验证数据集地址。\n", - "- `train_data`:训练数据集。\n", - "- `eval_data`:验证数据集。\n", - "- `net_loss`:定义损失函数。\n", - "- `net-opt`:定义优化器函数。\n", - "- `config_ck`:配置保存模型信息。\n", - " - `save_checkpoint_steps`:每多少个step保存一次模型权重参数`ckpt`文件。\n", - " - `keep_checkpoint_max`:设置保存模型的权重参数`cpkt`文件的数量上限。\n", - "- `ckpoint_cb`:配置模型权重参数`ckpt`文件保存名称的前缀信息及保存路径信息。\n", - "- `model`:MindSpore中的`Model`类。\n", - "- `model.train`:`Model`类的执行训练函数。\n", - "- `epoch_per_eval`:定义收集`epoch`数和对应模型精度信息的字典。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 375, loss is 2.3327153\n", - "epoch: 1 step: 750, loss is 2.301087\n", - "epoch: 1 step: 1125, loss is 0.18899053\n", - "epoch: 1 step: 1500, loss is 0.31486228\n", - "epoch: 1 step: 1875, loss is 0.14021991\n", - "epoch: 2 step: 375, loss is 0.049191322\n", - "epoch: 2 step: 750, loss is 0.08493232\n", - "epoch: 2 step: 1125, loss is 0.2740858\n", - "epoch: 2 step: 1500, loss is 0.0712947\n", - "epoch: 2 step: 1875, loss is 0.084480055\n", - "{'Accuracy': 0.9782652243589743}\n", - "epoch: 3 step: 375, loss is 0.056499712\n", - "epoch: 3 step: 750, loss is 0.10981669\n", - "epoch: 3 step: 1125, loss is 0.013717058\n", - "epoch: 3 step: 1500, loss is 0.16365167\n", - "epoch: 3 step: 1875, loss is 0.052067317\n", - "epoch: 4 step: 375, loss is 0.05080418\n", - "epoch: 4 step: 750, loss is 0.013522813\n", - "epoch: 4 step: 1125, loss is 0.08582015\n", - "epoch: 4 step: 1500, loss is 0.04939629\n", - "epoch: 4 step: 1875, loss is 0.09115914\n", - "{'Accuracy': 0.9836738782051282}\n", - "epoch: 5 step: 375, loss is 0.0035727315\n", - "epoch: 5 step: 750, loss is 0.03130674\n", - "epoch: 5 step: 1125, loss is 0.0011531024\n", - "epoch: 5 step: 1500, loss is 0.009147665\n", - "epoch: 5 step: 1875, loss is 0.0024722838\n", - "epoch: 6 step: 375, loss is 0.03595736\n", - "epoch: 6 step: 750, loss is 0.004377359\n", - "epoch: 6 step: 1125, loss is 0.044095017\n", - "epoch: 6 step: 1500, loss is 0.016356776\n", - "epoch: 6 step: 1875, loss is 0.01198354\n", - "{'Accuracy': 0.9818709935897436}\n", - "epoch: 7 step: 375, loss is 0.011158295\n", - "epoch: 7 step: 750, loss is 0.021831619\n", - "epoch: 7 step: 1125, loss is 0.0027707873\n", - "epoch: 7 step: 1500, loss is 0.0001371978\n", - "epoch: 7 step: 1875, loss is 0.00040429938\n", - "epoch: 8 step: 375, loss is 0.005541572\n", - "epoch: 8 step: 750, loss is 0.0038450873\n", - "epoch: 8 step: 1125, loss is 0.1304332\n", - "epoch: 8 step: 1500, loss is 0.021286076\n", - "epoch: 8 step: 1875, loss is 0.025266083\n", - "{'Accuracy': 0.9817708333333334}\n", - "epoch: 9 step: 375, loss is 0.0045793867\n", - "epoch: 9 step: 750, loss is 0.009571521\n", - "epoch: 9 step: 1125, loss is 0.06868767\n", - "epoch: 9 step: 1500, loss is 0.00035104403\n", - "epoch: 9 step: 1875, loss is 0.0010347537\n", - "epoch: 10 step: 375, loss is 0.058423545\n", - "epoch: 10 step: 750, loss is 0.0044561117\n", - "epoch: 10 step: 1125, loss is 2.982349e-05\n", - "epoch: 10 step: 1500, loss is 0.040188752\n", - "epoch: 10 step: 1875, loss is 0.047129657\n", - "{'Accuracy': 0.9833733974358975}\n" - ] - } - ], - "source": [ - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor\n", - "from mindspore import context, Model\n", - "from mindspore.nn.metrics import Accuracy\n", - "from mindspore.nn.loss import SoftmaxCrossEntropyWithLogits\n", - "import os\n", - "\n", - "if __name__ == \"__main__\":\n", - " context.set_context(mode=context.GRAPH_MODE, device_target=\"CPU\")\n", - " train_data_path = \"./datasets/MNIST_Data/train\"\n", - " eval_data_path = \"./datasets/MNIST_Data/test\"\n", - " model_path = \"./models/ckpt/mindspore_evaluate_the_model_during_training/\"\n", - " \n", - " # clean up old run files before in Linux\n", - " os.system('rm -f {}*.ckpt {}*.meta {}*.pb'.format(model_path, model_path, model_path))\n", - " \n", - " epoch_size = 10\n", - " eval_per_epoch = 2\n", - " repeat_size = 1\n", - " network = LeNet5()\n", - " train_data = create_dataset(train_data_path, repeat_size=repeat_size)\n", - " eval_data = create_dataset(eval_data_path, repeat_size=repeat_size)\n", - " \n", - " # define the loss function\n", - " net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')\n", - " # define the optimizer\n", - " net_opt = nn.Momentum(network.trainable_params(), learning_rate=0.01, momentum=0.9)\n", - " config_ck = CheckpointConfig(save_checkpoint_steps=eval_per_epoch*1875, keep_checkpoint_max=15)\n", - " ckpoint_cb = ModelCheckpoint(prefix=\"checkpoint_lenet\", directory=model_path, config=config_ck)\n", - " model = Model(network, net_loss, net_opt, metrics={\"Accuracy\": Accuracy()})\n", - " \n", - " epoch_per_eval = {\"epoch\": [], \"acc\": []}\n", - " eval_cb = EvalCallBack(model, eval_data, eval_per_epoch, epoch_per_eval)\n", - " \n", - " model.train(epoch_size, train_data, callbacks=[ckpoint_cb, LossMonitor(375), eval_cb],\n", - " dataset_sink_mode=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在同一目录的文件夹中可以看到`lenet_ckpt`文件夹中,保存了5个模型,和一个计算图相关数据,其结构如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./models/ckpt/mindspore_evaluate_the_model_during_training\n", - "├── checkpoint_lenet-10_1875.ckpt\n", - "├── checkpoint_lenet-2_1875.ckpt\n", - "├── checkpoint_lenet-4_1875.ckpt\n", - "├── checkpoint_lenet-6_1875.ckpt\n", - "├── checkpoint_lenet-8_1875.ckpt\n", - "└── checkpoint_lenet-graph.meta\n", - "\n", - "0 directories, 6 files\n" - ] - } - ], - "source": [ - "!tree ./models/ckpt/mindspore_evaluate_the_model_during_training" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 绘制不同epoch下模型的精度" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "定义绘图函数`eval_show`,将`epoch_per_eval`载入到`eval_show`中,绘制出不同`epoch`下模型的验证精度折线图。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEWCAYAAABxMXBSAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAx4klEQVR4nO3debyc8/n/8dc7EgSxJeGHaGKrip2IfWkUobXFFntr60LLt9WW0m9bLW1Vq/WlizqHIPY1FEEklNpOIkHEEkHWEktEBJHk+v3xuYfJcXLOTDJz7nPOvJ+Px3lk5t7mmpxkrvks9/VRRGBmZlaqTnkHYGZm7YsTh5mZlcWJw8zMyuLEYWZmZXHiMDOzsjhxmJlZWZw4rOok9ZEUkjqXcOw3JT3aGnHVCklHS7p/Kc6/V9LxlYyphdcr+d+L5cOJwxYh6XVJ8yT1aLT9mew/c5+cQrMlFBFDI2LvUo6V9EtJ1zY6f9+IGFKd6FpH9m93w7zj6CicOKwprwFHFp5I2hxYIb9w2ob2+A24PcZcSbX+/qvFicOacg1wXNHz44Griw+QtIqkqyXNlPSGpHMldcr2LSPpIklvS5oEfL2Jc+skzZA0TdJvJC1TSmCSbpb0X0nvS3pE0qZF+7pK+mMWz/uSHpXUNdu3i6T/SJolaYqkb2bbR0k6qegai3SVZd9UT5X0CvBKtu0v2TVmSxotadei45eR9DNJr0r6INu/rqTLJP2x0XsZJul/mniPf5N0UaNtd0r6Yfb4rKLrvyDp4EbxPybpYknvAL9s4j01Gb+kgcDPgCMkzZE0rvHfkaRO2e/6DUlvZf8GVsn2FbqYjpc0Ofv9n9PM73Kxv6/M0U1dR1J/SY9nv8sZki6VtOzifmeSHsl2jcve1xGLi8lKFBH+8c9nP8DrwNeAl4BNgGWAqUBvIIA+2XFXA3cC3YA+wMvAidm+7wAvAusCqwMjs3M7Z/tvB/4BrAisATwFfDvb903g0WbiOyF7zeWAPwNji/ZdBowC1sni3ik7rjfwAakV1QXoDmyVnTMKOKnoGou8fhb3A9n76JptOya7RmfgR8B/geWzfT8GngM2BgRsmR3bH5gOdMqO6wHMBdZs4j3uBkwBlD1fDfgIWDt7fhiwNumL3xHAh8BaRfHPB76fxde1iffUXPy/BK5tFM9nf0fZ3/9EYH1gJeA24JpsX5/s7+uf2etuCXwCbLKY3+Xifl/NXgfYFtghi78PMAE4o4XfWQAb5v3/q6P85B6Af9rWD58njnOB3wIDs/+EnbP/fH2y/+TzgL5F530bGJU9fgj4TtG+vbNzOwNrZh8CXYv2HwmMzB4v8iHXQqyrZtddJfsQ/QjYsonjzgZuX8w1PvtQbOr1s+sPaCGO9wqvS0q4By7muAnAXtnj04B7FnOcgMnAbtnzk4GHmnn9sYXXzOKf3Gh/s3+njeL/Jc0njhHA94r2bQx8WvQhHkCvov1PAYObeM3mfl8lXyfbd0bx77ep3xlOHBX9cVeVLc41wFGkD52rG+3rQfrm/kbRtjdI3xwhfRue0mhfQe/s3BlZV8MsUutjjZYCyrqBfpd108wmJblCPD2A5YFXmzh13cVsL1Xxe0HSmZImZN0rs0iJqzCZoLnXGkL6tk/25zVNHRTpk+4GPh9nOgoYWvT6x0kaW/T3t1nR638h3sZaiL8la/PF33vhC0HBf4sezyW1TBpr7vfV7HUkfVnS3VmX5Wzggibib/bvwJaOE4c1KSLeIA2S70fqjij2NulbZu+ibV8CpmWPZ5A+QIv3FUwhtTh6RMSq2c/KEbEpLTsKOJDUIlqF9M0U0jf0t4GPgQ2aOG/KYrZD6uYpHvj/f00c81kJ6Ww84CfA4cBqEbEq8H4WQ0uvdS1woKQtSd2AdyzmOIDrgUMl9Qa2B27NXr83qQvnNKB79vrPF73+IvE2VkL8LZXLns4Xf+/zgTdbOK+x5n5fLfkbqSt0o4hYmTQuo0bHuOx3FTlxWHNOJDX5PyzeGBELgJuA8yV1yz7Mfkj6YCTb9wNJvSStBpxVdO4M4H7gj5JWzgZbN5C0ewnxdCMlnXdIH/YXFF13IVAP/EnS2lnrZEdJy5G+rX9N0uGSOkvqLmmr7NSxwCBJKyhN1zyxhBjmAzOBzpL+F1i5aP8VwK8lbaRkC0ndsxinAk+TWhq3RsRHi3uRiHiG9OF6BTA8ImZlu1YkfSjOBJD0LVKLo1Qtxf8m0EfZRIcmXA/8j6T1JK1E+h3cGBHzy4ihpd9XKe9hNjBH0leA75ZwzpukcRmrACcOW6yIeDUiGhaz+/ukb+uTgEeB60gfBJC+EQ8HxgFj+GKL5ThgWeAFUv/6LcBaJYR0NalrZFp27hON9p9JGph+GngX+D1pMHoyqeX0o2z7WNKAK8DFpPGaN0ldSUNp3nDgPtJkgDdI35qLu0X+REqc95M+3OpIA7wFQ4DNWUw3VSPXkVpX1xU2RMQLwB+Bx7OYNwceK+FapcZ/c/bnO5LGNHF+fRb7I6QW6cekfwtLosnfV4nnHUWa8PBP4MYSzvklMCTr3jt8iaK1zxRmbZhZK5C0G6ll1jv8n8/aKbc4zFqJpC7A6cAVThrWnjlxmLUCSZsAs0hdcn/ONRizpeSuKjMzK4tbHGZmVpaaKADWo0eP6NOnT95hmJm1K6NHj347Ino23l4TiaNPnz40NCxuVqmZmTVF0htNbXdXlZmZlcWJw8zMyuLEYWZmZXHiMDOzsjhxmJlZWZw4zMysLE4cZmZWFicOq5wXX4Q77sg7CjOrMicOq5wTToCDD4bTT4cFC/KOxsyqxInDKmPCBHj8cdhiC7jkEhg0CD78sOXzzKzdceKwyqivh86d4YEH4NJL4e67Yffd4b//zTsyM6swJw5bep9+CldfDfvvD2usAaeeCnfemVohO+wA48fnHaGZVZAThy29f/0L3norjXEUfOMb8Mgj8MknsPPO8NBD+cVnZhXlxGFLr74e1loLBg5cdPu228KTT0KvXrDPPjBkSD7xmVlFOXHY0pkxA+65B44/Po1xNPalL8Fjj8Eee8A3vwn/+7/gVSfN2jUnDls6Q4akqbfF3VSNrbJKSi4nnAC//jUcd1zqwjKzdqkmFnKyKolI3VS77gobbdT8sV26wBVXwPrrw7nnwpQpcPvtsNpqrROrmVWMWxy25B59FF55BU48sbTjJTjnHBg6NN3zseOOMGlSdWM0s4pz4rAlV18P3brBoYeWd95RR6X7Pd56K03XffLJ6sRnVsumTElT4+fNq/ilnThsycyeDTfdBIMHw4orln/+brulVke3bmng/LbbKh6iWc164AHYZpt0f9Xzz1f88k4ctmRuugnmzm1+ULwlG28MTzwBW22VWi0XX+wZV2ZLY+HCNAFln31gzTWhoSElkApz4rAlU1cHffvC9tsv3XV69kw3Bw4aBD/8IXz/+zB/fmViNKsl77wDX/96mvJ+9NGpC3jjjavyUk4cVr4XXkgthRNOSAPeS6tr19SCOfNMuOyyVGF3zpylv65ZrXj66dSyeOgh+NvfUhfVknQhl8iJw8pXKGh47LGVu2anTvCHP8Bf/5ru+dhtN5g+vXLXN+uIIlKi2GWX9CXu0UfhO9+pzBe6ZjhxWHkKBQ0POCAVNKy0734X7roLXn45zbh67rnKv4ZZR/Dhh+nL2/e+B3vuCaNHw3bbtcpLO3FYee6+G2bOXLpB8Zbstx/8+99prGOXXdIMETP73EsvpfHF665Lg+F33w3du7fayztxWHnq62HttdOsjWraeus0uNe7d0ok9fXVfT2z9uLmm6FfP3jzTRg+PFVi6NS6H+VOHFa66dObL2hYaeuum/psBwxId6efe66n61rtmjcPzjgDDj8cNtsMxoyBvfbKJRQnDivdkCFpnng1u6kaW3nl1Aw/6SQ4/3w45hgXSLTaM3VqulH2L3+BH/wAHn44fbHKiYscWmkKBQ132w023LB1X7tLF7j8cthgAzj77M8LJLZin65Zbh58EI48Ej7+GG64AY44Iu+IqtvikDRQ0kuSJko6q4n9vSWNkPSspFGSehXtu1DSeEkTJF0ipfllku6TNC7b93dJy1TzPVjm3/+GiRNLL2hYaRKcdRZcf30a+9hpJ3j11XxiMWsNCxfCb34De++dZjA+/XSbSBpQxcSRfaBfBuwL9AWOlNS30WEXAVdHxBbAecBvs3N3AnYGtgA2A7YDds/OOTwitsy29wQOq9Z7sCKFgoaHHJJvHIMHw4gR8Pbbabru44/nG49ZNbz7Luy/P/z856m18eST8JWv5B3VZ6rZ4ugPTIyISRExD7gBOLDRMX2BwmLUI4v2B7A8sCywHNAFeBMgImZnx3TO9nu0tNpmz04zOY48sqp3o5Zsl13SneurrJIGzm+5Je+IzCqnUF/qgQdSJYVrr4WVVso7qkVUM3GsA0wpej4121ZsHDAoe3ww0E1S94h4nJRIZmQ/wyNiQuEkScOBt4APgCY/NSSdIqlBUsPMmTMr8X5q1403Ln1Bw0rbaKPU2thmGzjsMLjoIs+4svYtAv7xD9h55/T40UfTzX1Vvgt8SeQ9q+pMYHdJz5C6oqYBCyRtCGwC9CIlmwGSdi2cFBH7AGuRWiMDmrpwRFweEf0iol/Pnj2r/DY6uLo62HRT6N8/70gW1bNn6rY67DD48Y/T2gMukGjt0Ycfpmnu3/lOakWPGdP2/r8VqWbimAYUzxfrlW37TERMj4hBEbE1cE62bRap9fFERMyJiDnAvcCOjc79GLiTL3Z/WSWNH5/6V088sU1+82H55dNMk5/+NNXsOfBA+OCDvKMyK91LL6XxumuvhV/9Cv71rzY/Y7CaieNpYCNJ60laFhgMDCs+QFIPSYUYzgYKtwdPJrVEOkvqQmqNTJC0kqS1snM7A18HXqzie7D6+jQd9phj8o5k8Tp1gt/9LjXzhw9PU4anTWv5PLO83XJLqi81Ywbce28qid7Kd4EviapFGBHzgdOA4cAE4KaIGC/pPEkHZIftAbwk6WVgTeD8bPstwKvAc6RxkHERcRewIjBM0rPAWNI4x9+r9R5q3rx5cM01qaBhe+juO+WUdLPgxInpG9yzz+YdkVnTPv00rT9z2GFpXZtnnql+GZ8KUtTAgGK/fv2ioaEh7zDan9tuS9Nv//WvVC+qvRg3Li1oU5gN1o7+Q1oNmDYt3Y/x2GNp4bKLLoJll807qiZJGh0R/Rpvb/ttIstPfT2ss077++Ddcss0XXf99VMC+ec/847ILHnooTQTcOzYdDPrJZe02aTRHCcOa9q0aanP9fjjYZl2eHN+r17pbve99kpdWGefne7ENcvDwoVwwQXp32P37uku8MGD845qiTlxWNMKBQ2/9a28I1ly3bqlRaG+/e00eH7UUanej1lreu+9NE54zjmpsu1TT8Emm+Qd1VJxkUP7okJBw913b/2ChpXWuXOaprv++mnK7tSpcMcd0KNH3pFZLRg9Gg49NLXg/+//0r1GbXFae5nc4rAveuSRVEAwr4KGlSbBT36S7oBvaIAdd4RXXsk7KuvIIlJF5512ggUL0v+p007rEEkDnDisKfX1aR2MvAsaVtrhh6fByffeS8njscfyjsg6orlz4ZvfTF2ke+yR7gLfYYe8o6ooJw5b1Pvvf17QcIUV8o6m8nbaKc24Wn112HNPuOmmvCOyjuSVV1KSuOYa+MUv0oqZHbBb1InDFnXjjfDRR22roGGlbbhhKpC43XZpPv3vf+8Cibb0brsNtt02jWfccw/88pftc0ZiCZw4bFF1dWk94+22yzuS6urePZWtHjw4LRD1ne+4QKItmU8/hR/9KHXtbrJJugt84MC8o6oqz6qyzz3/fJoqePHFHWYQr1nLLw9Dh6YZVxdcAG+8kbquVl4578isvZg+PbVaH300zZj64x9hueXyjqrq3OKwz7WHgoaV1qkTnH9+urv8wQdh113TlF2zlowcCVtvnQa/hw6FSy+tiaQBThxWUChoeOCBHXIwr0UnnZT6pV97DbbfPpWEMGvKwoXphtKvfQ1WWy210o86Ku+oWpUThyV33ZXW8e7Ig+It2Xvv1OXQqVNqedx7b94RWVvz3ntw0EGphM1hh6XSIZtumndUrc6Jw5K6ulTQcO+9844kX1tskRau2mgj2H//tMaHGaRB7223TV8oLrkkFSns1i3vqHLhxGGpT3/48HTTUgedPliWtddOd/rus0+abfXTn7pAYi2LgCuuSDeNfvpp+rfx/e/XxgSSxXDisI5R0LDSVloJ7rwTvvtduPDCNG33o4/yjspa29y5qfv25JNT9+WYMSmB1Dgnjlq3cGGaTbXHHrDBBnlH07Z07gyXXZYW2rn55nSn+cyZeUdlrWXixJQkrroKfv5zuO++9rESZitw4qh1jzwCkyZ1nIKGlSalm7tuvjn1ce+4I7z8ct5RWbXdfnsaz5g6Nc22O+88d+MWceKodYWChoMG5R1J23booWne/uzZKXn8+995R2TVMH8+/PjH6f/Dl7+cuqb23TfvqNocJ45a9v77cMstaQ56RyxoWGk77JBqXPXokebwX3993hFZJc2YAQMGpK7J7343Tc3u3TvvqNokJ45adsMNacDX3VSl22CDlDy23z4l3AsucIHEjuDhh9Nd4KNHw7XXwl//WjN3gS8JJ45aVlcHm2+e+nKtdKuvngokHnVUWg705JPTNE1rfyJSdeQBA2DVVdM9PEcfnXdUbZ6LHNaq555Ld73++c81PR99iS23XPpmuv768JvfwOTJaQB9lVXyjsxKNWsWHH88DBuW7gKvq6vZG/rK5RZHrSoUNPS3qyUnwa9/nT5wRo6EXXZJCcTavrFjU0v7nnvSl6cbb3TSKIMTRy0qFDQ86KDaLGhYaSeckMpQTJ6cBtDHjMk7ImtOXV36PX3ySRrbOP10t7rL5MRRi4YNg3feqe2ChpX2ta+lNcw7d4bddoN//SvviKyxwkSQk05KrcMxY9JSwlY2J45aVFcHvXrBXnvlHUnHstlmaXB1443hgAPSzBxrG159NSWJ+no499xUm22NNfKOqt2qauKQNFDSS5ImSjqrif29JY2Q9KykUZJ6Fe27UNJ4SRMkXaJkBUn/kvRitu931Yy/Q5oyxQUNq2mttVL3x377pRXhzjzTBRLzduedaTzjjTfg7rvTuJT/7S+VqiUOScsAlwH7An2BIyX1bXTYRcDVEbEFcB7w2+zcnYCdgS2AzYDtgN0L50TEV4CtgZ0l+bbOcgwZkqYguqBh9ay0EtxxB5x2WlpK9LDDUrE8a13z56fKxgcdBBtumLqmvv71vKPqEKrZ4ugPTIyISRExD7gBOLDRMX2Bh7LHI4v2B7A8sCywHNAFeDMi5kbESIDsmmOAXlhpFi6EK6+Er341TSO16llmmbRmw8UXp7pHAwbAW2/lHVXt+O9/07jThRfCt7+d7gLv0yfvqDqMaiaOdYApRc+nZtuKjQMKRZIOBrpJ6h4Rj5MSyYzsZ3hETCg+UdKqwP7AiKZeXNIpkhokNcx0RdPk4Ydd0LA1SXDGGXDrrfDss2kmz4sv5h1Vx/fII+ku8Keegquvhr//HZZfPu+oOpQWE4ek0ZJOlbRaFV7/TGB3Sc+QuqKmAQskbQhsQmpNrAMMkLRrUUydgeuBSyJiUlMXjojLI6JfRPTr6VLISX19ukHNBQ1b18EHw6hR8OGHaYD2kUfyjqhjioA//CG17rp1SxMVjj0276g6pFJaHEcAawNPS7pB0j5SSZOepwHrFj3vlW37TERMj4hBEbE1cE62bRap9fFERMyJiDnAvUDx6imXA69ExJ9LiMMg3SVbKGjYtWve0dSe/v3hiSdgzTXTbLahQ/OOqGN5//30hegnP0ljGg0NqZyOVUWLiSMiJkbEOcCXgeuAeuANSb+StHozpz4NbCRpPUnLAoOBYcUHSOohqRDD2dm1ASaTWiKdJXUhtUYmZOf8BlgFOKPE92iQChp+/LG7qfK03nrwn/+kVscxx6RSJS6QuPTGjUuzpu6+G/70p1T6ZeWV846qQyupVpWkLYBvAfsBtwJDgV1IA9tbNXVORMyXdBowHFgGqI+I8ZLOAxoiYhiwB/BbSQE8ApyanX4LMAB4jjRQfl9E3JVN1z0HeBEYkzV8Lo2IK8p837Wnrg622AK22SbvSGrbaqul6dAnnZRWlXvsMfjKV1LXykorpZ/FPS48X3ZZ3+lccNVVqQT66qun7sCdd847opqgaOEbj6TRwCygDrg1Ij4p2ndbRLT5DvN+/fpFQ0ND3mHk59lnYcst4S9/gR/8IO9oDFJL4/zz4W9/gw8+gDlzSm99dO7cfIJpKfE09bhLl+q+30r76KP0b/mKK9KYxnXXpW5AqyhJoyOi3xe2l5A41l/cAHR7UfOJ44wz0gfU9OnQvXve0VhTItKHYSGJzJmzdI/nzEmD8aVabrnyk01zCWqllap3k92kSWlFxmeegZ/9zMu6VtHiEkcpXVUnSbowG7Qmm131o4g4t8IxWjV88kkq/33QQU4abZmUVmFcYYXKfXNesCDdeLikieeDD+DNNxfd9/HHpb9+166VaQ0VHq+wQhrHOO649Pd1113wjW9U5u/KylJK4tg3In5WeBIR70naD3DiaA9c0LB2LbNM+tDt1i2VQqmE+fO/mFyaSzyNH8+alcreFG8vdREsKbXMttkmzRBcb73KvCcrWymJYxlJyxXGNiR1Jd3Nbe1BXR2su266i9ZsaXXunFbKW3XVyl1z3rzSE8/KK6caYL6hL1elJI6hwAhJV2bPvwUMqV5IVjFTpsD996dqoO4DtrZq2WXTrKjVm5vdb21Ji4kjIn4v6Vlgz2zTryNieHXDsoq46ioXNDSziivpPo6IuJd097a1F4WChgMGuC/YzCqqlFpVO0h6WtIcSfMkLZA0uzWCs6UwahS89prvFDeziiulVtWlwJHAK0BX4CTSOhvWltXXpwHMgw/OOxIz62BKKqseEROBZSJiQURcCQysbli2VGbNSqW8XdDQzKqglDGOuVmRwrGSLiStj+G1ytuy6693QUMzq5pSEsCx2XGnAR+SSqUfUs2gbCnV1aXaVFtvnXckZtYBNdviyNYNvyAijgY+Bn7VKlHZkhs3DkaPTsuWuoKqmVVBsy2OiFgA9M66qqw9qK9PN1QddVTekZhZB1XKGMck4DFJw0hdVQBExJ+qFpUtmUJBw4MPdkFDM6uaUhLHq9lPJ6BbdcOxpXLnnfDuuy5oaGZVVUrJEY9rtBd1dfClL8Gee7Z8rJnZEmoxcUgaSVq+dRERMaAqEdmSmTwZHnggLUfqgoZmVkWldFWdWfR4edJU3PnVCceWmAsamlkrKaWranSjTY9JeqpK8diSKBQ03HNP6NMn72jMrIMrpauquEh+J2BbYJWqRWTlGzkSXn8dLrgg70jMrAaU0lU1mjTGIVIX1WuAa1m0JS5oaGatqJSuKi/m0Ja9914qaHjSSV5O08xaRSnrcZwqadWi56tJ+l5Vo7LSXX99uvHPBQ3NrJWUUuTw5IiYVXgSEe8BJ1ctIitPXR1stZULGppZqyklcSwjfV4tLyt86NpVbcHYsTBmjFsbZtaqShkcvw+4UdI/suffzrZZ3urrYbnlXNDQzFpVKYnjp8ApwHez5w8AV1QtIivNxx9/XtBw9dVbPt7MrEJK6arqCvwzIg6NiENJSWO5Ui4uaaCklyRNlHRWE/t7Sxoh6VlJoyT1Ktp3oaTxkiZIuqTQXSbpfElTJM0p7S12UHfemWZUuaChmbWyUhLHCFLyKOgKPNjSSdlYyGXAvkBf4EhJfRsddhFwdURsAZwH/DY7dydgZ2ALYDNgO2D37Jy7gP4lxN2xuaChmeWklMSxfER89u0+e7xCCef1ByZGxKSImAfcABzY6Ji+wEPZ45FF+4NUF2tZUuumC/Bm9vpPRMSMEl6/43rjDXjwwVSXqpOXfzez1lXKp86HkrYpPJG0LfBRCeetA0wpej4121ZsHDAoe3ww0E1S94h4nJRIZmQ/wyNiQgmv+RlJp0hqkNQwc+bMck5t+666Kv3pgoZmloNSEscZwM2S/i3pUeBG4LQKvf6ZwO6SniF1RU0DFkjaENgE6EVKNgMk7VrOhSPi8ojoFxH9evbsWaFw24BCQcOvfQ169847GjOrQaWUHHla0leAjbNNL0XEpyVcexqwbtHzXtm24mtPJ2txSFoJOCQiZkk6GXii0EUm6V5gR+DfJbxux/bQQ6mr6ne/yzsSM6tRpXaQb0waj9iGNMh9XAnnPA1sJGk9ScsCg4FhxQdI6iGpEMPZQH32eDKpJdJZUhdSa6SsrqoOq74eVlsNDjoo70jMrEaVUqvqF8D/ZT9fBS4EDmjpvIiYT+rSGk760L8pIsZLOk9S4fw9gJckvQysCZyfbb+FtM75c6RxkHERcVcWz4WSpgIrSJoq6Zclvtf277334Lbb4OijXdDQzHKjiC+sCrvoAdJzwJbAMxGxpaQ1gWsjYq/WCLAS+vXrFw0NDXmHsfQuuwxOOw2eeSbVpzIzqyJJoyOiX+PtpXRVfRQRC4H5klYG3mLRsQtrLXV1qZihk4aZ5aiUkiMNWVn1f5IWdZoDPF7NoKwJzzyTfi69NO9IzKzGlTKrqrD2xt8l3QesHBHPVjcs+wIXNDSzNqKUFsdnIuL1KsVhzfn4Yxg6FAYNSjOqzMxy5HoV7cEdd7igoZm1GU4c7UFdXbpLfMCAvCMxM1t8V5WkZhd5iIh3Kx+OfcHrr8OIEfCLX7igoZm1Cc2NcYwmValVE/sCWL8qEdmiXNDQzNqYxSaOiFivNQOxJhQKGu61V1p7w8ysDSil5IgkHSPp59nzL0nyQkqtYcQImDzZg+Jm1qaU0mn+V1Jl2sINBB+QVvazaquvT+uJu6ChmbUhpSSO7SPiVOBjgIh4j7Qyn1XTu+/C7bengobLlbTEu5lZqyglcXyarR8eAJJ6AgurGpXBddfBJ5/AiSfmHYmZ2SJKSRyXALcDa0g6H3gUuKCqUVm6d2ObbWDLLfOOxMxsEaXUqhoqaTSwJ2lq7kHlrv9tZRozBsaOTWXUzczamFJvAHwLuL54n28ArKJCQcMjj8w7EjOzLyj1BsAvAe9lj1clLe3q+zyq4aOPUkHDQw5xQUMza5MWO8YREetFxPrAg8D+EdEjIroD3wDub60Aa84dd8CsWb53w8zarFIGx3eIiHsKTyLiXmCn6oVU4+rqYL314KtfzTsSM7MmlZI4pks6V1Kf7OccYHq1A6tJhYKG3/qWCxqaWZtVyqfTkUBP0pTc24E1sm1WaVdeCRIcf3zekZiZLVYp03HfBU6X1C09jTnVD6sGLViQEsfee7ugoZm1aaUUOdxc0jPA88B4SaMlbVb90GrMiBEwZYoHxc2szSulq+ofwA8jondE9AZ+BFxe3bBqUKGg4YEH5h2JmVmzSkkcK0bEyMKTiBgFrFi1iGrRO++kgobHHOOChmbW5rU4xgFMytbiuCZ7fgwwqXoh1aDrroN589xNZWbtQiktjhNIs6puy356ZtusEiLSvRvbbuuChmbWLrSYOCLivYj4QURsk/2cnq3J0SJJAyW9JGmipLOa2N9b0ghJz0oaJalX0b4LJY2XNEHSJZKUbd9W0nPZNT/b3m6NGQPjxrl8upm1G80VORzW3IkRcUBz+7M1PC4D9gKmAk9LGhYRLxQddhFwdUQMkTQA+C1wrKSdgJ2BLbLjHgV2B0YBfwNOBp4E7gEGAvc2F0ubVl8Pyy/vgoZm1m40N8axIzCFVBX3SVKBw3L0ByZGxCQASTcABwLFiaMv8MPs8UjgjuxxAMuTVhoU0AV4U9JawMoR8UR2zauBg2iviaO4oOGqq+YdjZlZSZrrqvp/wM+AzYC/kFoOb0fEwxHxcAnXXoeUeAqmZtuKjQMGZY8PBrpJ6h4Rj5MSyYzsZ3i2Bsg62XWau2b7cfvt8P77HhQ3s3alueq4CyLivog4HtgBmAiMknRaBV//TGD37AbD3YFpwAJJGwKbAL1IiWGApF3LubCkUyQ1SGqYOXNmBUOuoEJBwz32yDsSM7OSNTs4Lmk5SYOAa4FT+XwZ2VJMA9Ytet4r2/aZiJgeEYMiYmvgnGzbLFLr44mImJOVOLmX1HU2LbvOYq9ZdO3LI6JfRPTr2bNniSG3otdeg4ceSq0NFzQ0s3ZksZ9Y2fjB48A2wK8iYruI+HVENPlB3YSngY0krSdpWWAwsMiAu6QekgoxnA3UZ48nk1oinSV1IbVGJkTEDGC2pB2y2VTHAXeWGE/b4oKGZtZONfdV9xhgI+B04D+SZmc/H0ia3dKFI2I+cBowHJgA3BQR4yWdJ6kwI2sP4CVJLwNrAudn228BXgWeI42DjIuIu7J93wOuIHWdvUp7HBhfsACuugr22QfWXbfFw83M2hJFRN4xVF2/fv2ioaEh7zA+N3w4DBwIN90Ehx2WdzRmZk2SNDoi+jXe7s71PNTXQ/fucECzt8KYmbVJThyt7Z130rriLmhoZu2UE0dru/baVNDQJUbMrJ1y4mhNhYKG/frB5pvnHY2Z2RJx4mhNo0fDc8+5tWFm7ZoTR2sqFDQcPDjvSMzMlpgTR2v56KO0YNOhh7qgoZm1a04creW221JBQ3dTmVk758TRWurqYP31Ybfd8o7EzGypOHG0hkmTYORIFzQ0sw7Bn2Kt4corU8JwQUMz6wCcOKqtuKBhr14tHm5m1tY5cVTbAw/A1Kle5c/MOgwnjmqrr4cePVzQ0Mw6DCeOanr77c8LGi67bN7RmJlVhBNHNV17LXz6qe/dMLMOxYmjWiJSN9V228Fmm+UdjZlZxThxVEtDgwsamlmH5MRRLfX10LWrCxqaWYfjxFENc+d+XtBwlVXyjsbMrKKcOKrhtttg9mx3U5lZh+TEUQ11dbDBBi5oaGYdkhNHpb36Kowale4Ul/KOxsys4pw4Ks0FDc2sg3PiqKRCQcOBA2GddfKOxsysKpw4Kun++2HaNBc0NLMOzYmjkgoFDfffP+9IzMyqxomjUmbOhDvvhGOPdUFDM+vQqpo4JA2U9JKkiZLOamJ/b0kjJD0raZSkXtn2r0oaW/TzsaSDsn0DJI2R9LykIZI6V/M9lKxQ0NDdVGbWwVUtcUhaBrgM2BfoCxwpqW+jwy4Cro6ILYDzgN8CRMTIiNgqIrYCBgBzgfsldQKGAIMjYjPgDSD/6UsR6d6N/v1d0NDMOrxqtjj6AxMjYlJEzANuAA5sdExf4KHs8cgm9gMcCtwbEXOB7sC8iHg52/cAcEjFIy/X00/D+PG+U9zMakI1E8c6wJSi51OzbcXGAYOyxwcD3SR1b3TMYOD67PHbQGdJ/bLnhwLrNvXikk6R1CCpYebMmUv4FkrkgoZmVkPyHhw/E9hd0jPA7sA0YEFhp6S1gM2B4QAREaREcrGkp4APio8vFhGXR0S/iOjXs2fP6r2DuXPh+uvhsMNg5ZWr9zpmZm1ENQeWp7Foa6BXtu0zETGdrMUhaSXgkIiYVXTI4cDtEfFp0TmPA7tm5+wNfLkawZfs1ltd0NDMako1WxxPAxtJWk/SsqSWwrDiAyT1yAa8Ac4G6htd40g+76YqnLNG9udywE+Bv1ch9tLV1cGGG8Kuu+YahplZa6la4oiI+cBppG6mCcBNETFe0nmSDsgO2wN4SdLLwJrA+YXzJfUhtVgebnTpH0uaADwL3BURD5GXiRPh4Ydd0NDMaorSsEHH1q9fv2hoaKj8hc85B373O5g82bWpzKzDkTQ6Ivo13p734Hj7VShouO++ThpmVlOcOJbU8OEwfbrvFDezmuPEsaTq6qBnT/jGN/KOxMysVTlxLImZM2HYMBc0NLOa5MSxJK65BubPdzeVmdUkJ45yFQoabr89bLpp3tGYmbU6J45yPfUUvPCC7xQ3s5rlxFGu+npYYQU44oi8IzEzy4UTRzk+/NAFDc2s5jlxlOPWW+GDD9xNZWY1zYmjHHV1sNFGsMsueUdiZpYbJ45SvfIKPPKICxqaWc1z4ijVlVdCp05w3HF5R2JmlisnjlLMnw9DhsB++8Haa+cdjZlZrpw4SuGChmZmn3HiKEVdHayxhgsampnhxNGyt96Cu+5KBQ27dMk7GjOz3DlxtMQFDc3MFuHE0ZyIVGJkhx2gb9+8ozEzaxOcOJrz5JMuaGhm1ogTR3Nc0NDM7AucOJqzwQbwgx9At255R2Jm1mZ0zjuANu2nP807AjOzNsctDjMzK4sTh5mZlcWJw8zMyuLEYWZmZalq4pA0UNJLkiZKOquJ/b0ljZD0rKRRknpl278qaWzRz8eSDsr27SlpTLb9UUkbVvM9mJnZoqqWOCQtA1wG7Av0BY6U1Pj264uAqyNiC+A84LcAETEyIraKiK2AAcBc4P7snL8BR2f7rgPOrdZ7MDOzL6pmi6M/MDEiJkXEPOAG4MBGx/QFHsoej2xiP8ChwL0RMTd7HsDK2eNVgOkVjdrMzJpVzcSxDjCl6PnUbFuxccCg7PHBQDdJ3RsdMxi4vuj5ScA9kqYCxwK/q1jEZmbWorxvADwTuFTSN4FHgGnAgsJOSWsBmwPDi875H2C/iHhS0o+BP5GSySIknQKckj2dI+mlJYyxB/D2Ep5bTY6rPI6rPI6rPB01rt5Nbaxm4pgGrFv0vFe27TMRMZ2sxSFpJeCQiJhVdMjhwO0R8Wl2TE9gy4h4Mtt/I3BfUy8eEZcDly/tm5DUEBH9lvY6lea4yuO4yuO4ylNrcVWzq+ppYCNJ60laltTlNKz4AEk9JBViOBuob3SNI1m0m+o9YBVJX86e7wVMqHjkZma2WFVrcUTEfEmnkbqZlgHqI2K8pPOAhogYBuwB/FZSkLqqTi2cL6kPqcXycKNrngzcKmkhKZF4hSUzs1ZU1TGOiLgHuKfRtv8tenwLcMtizn2dLw6mExG3A7dXNNDmLXV3V5U4rvI4rvI4rvLUVFyKiGpc18zMOiiXHDEzs7I4cZiZWVmcOBZD0rqSRkp6QdJ4SafnHROApOUlPSVpXBbXr/KOqUDSMpKekXR33rEUk/S6pOey+mYNecdTIGlVSbdIelHSBEk7toGYNm5UJ262pDPyjgtA0v9k/+afl3S9pOXzjglA0ulZTOPz/LuSVC/pLUnPF21bXdIDkl7J/lytEq/lxLF484EfRURfYAfg1CZqbeXhE2BARGwJbAUMlLRDviF95nTa7vTor2b1z9rSXPu/APdFxFeALWkDf3cR8VJRnbhtSXXiWnMySpMkrQP8AOgXEZuRZmoOzjcqkLQZcDKpxNKWwDdyLLx6FTCw0bazgBERsREwInu+1Jw4FiMiZkTEmOzxB6T/1F+Y5dXaIpmTPe2S/eQ+wyGrbPx14Iq8Y2kPJK0C7AbUAUTEvEY3v7YFewKvRsQbeQeS6Qx0ldQZWIG2UaduE+DJiJgbEfNJtw8MauGcqoiIR4B3G20+EBiSPR4CHFSJ13LiKEF2T8nWwJMtHNoqsi6hscBbwANFd9Ln6c/AT4CFOcfRlADulzQ6K0XTFqwHzASuzLr3rpC0Yt5BNdK4TlxuImIaqZr2ZGAG8H5E3N/8Wa3ieWBXSd0lrQDsx6IVM/K2ZkTMyB7/F1izEhd14mhBVgrlVuCMiJiddzwAEbEg60roBfTPmsu5kfQN4K2IGJ1nHM3YJSK2IZX4P1XSbnkHRPr2vA3wt4jYGviQCnUjVEJW7eEA4Oa8YwHI+uYPJCXctYEVJR2Tb1QQEROA35OWfbgPGEtRvb22JNK9FxXpnXDiaIakLqSkMTQibss7nsayro2RfLFfs7XtDBwg6XVS+fwBkq7NN6TPZd9WiYi3SP31/fONCEjVoqcWtRZvISWStmJfYExEvJl3IJmvAa9FxMysdt1twE45xwRARNRFxLYRsRupmsXLecdU5M2sWGyhaOxblbioE8diSBKp/3lCRPwp73gKJPWUtGr2uCupXteLecYUEWdHRK+I6EPq3ngoInL/NgggaUVJ3QqPgb1J3Qu5ioj/AlMkbZxt2hN4IceQGmtcJy5vk4EdJK2Q/d/ckzYwmQBA0hrZn18ijW9cl29EixgGHJ89Ph64sxIXzbuselu2M2m9j+ey8QSAn2VlVPK0FjAkW2GxE3BTRLSp6a9tzJrA7emzhs7AdRHRZEXlHHwfGJp1C00CvpVzPMBnCXYv4Nt5x1KQLaNwCzCGNOPxGdpOmY9bs3WEPgVOzWuSg6TrSfX/eiitV/QL0npFN0k6EXiDVHF86V/LJUfMzKwc7qoyM7OyOHGYmVlZnDjMzKwsThxmZlYWJw4zMyuLE4dZBUnaI8/qwJK+KenSvF7faoMTh5l9Jrs/yKxZThxWcyQdk61pMlbSPwoflpLmSLo4W1dhhKSe2fatJD0h6VlJtxfWNJC0oaQHs7VRxkjaIHuJlYrW2Ria3encOIZRkn6fxfGypF2z7Yu0GCTdLWmPovj+kMX3oKT+2XUmSTqg6PLrZttfkfSLEt/3HyWNA3JfE8TaPicOqymSNgGOAHbOCkUuAI7Odq8INETEpqTy2IUP3auBn0bEFsBzRduHApdla6PsRKraCqmS8hlAX2B9UhWCpnSOiP7Zsb9YzDHFViSVc9kU+AD4DekO74OB84qO6w8cAmwBHCapXwnv+8mI2DIiHi0hDqtxLjlitWZP0gJFT2cNga58XvhtIXBj9vha4LZs3YxVI+LhbPsQ4Oas/tU6EXE7QER8DJBd86mImJo9Hwv0AZr6QC4UzhydHdOSeaQKrJAS2CcR8amk5xqd/0BEvJO9/m3ALqQyHYt73wtIxTzNSuLEYbVGwJCIOLuEY5e0Hs8nRY8XsPj/Z580ccx8Fu0JKF4e9dP4vEbQwsL5EbEwW9yooHHcQfPv++OIaJOlwK1tcleV1ZoRwKFFFU1Xl9Q729cJODR7fBTwaES8D7xXGIMgFb58OFsVcqqkg7LrLJct5LO0Xge2ktRJ0rosWQn4vbL31ZW04ttjNP++zcriFofVlIh4QdK5pBUBO5FVNCVVDv2QtDDWuaRunCOy044H/p4lhuIqtscC/5B0XnadwyoQ4mPAa6QS6xNI1WDL9RSp66kXcG1ENAA0877NyuLquGYZSXMiYqW84zBr69xVZWZmZXGLw8zMyuIWh5mZlcWJw8zMyuLEYWZmZXHiMDOzsjhxmJlZWf4/xDlINkcRrx8AAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "def eval_show(epoch_per_eval):\n", - " plt.xlabel(\"epoch number\")\n", - " plt.ylabel(\"Model accuracy\")\n", - " plt.title(\"Model accuracy variation chart\")\n", - " plt.plot(epoch_per_eval[\"epoch\"], epoch_per_eval[\"acc\"], \"red\")\n", - " plt.show()\n", - " \n", - "eval_show(epoch_per_eval)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "从上图可以一目了然地挑选出需要的最优模型权重参数`ckpt`文件。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本例使用MNIST数据集通过卷积神经网络LeNet5进行训练,着重介绍了利用回调函数在进行模型训练的同时进行模型的验证,保存对应`epoch`的模型权重参数`ckpt`文件,并从中挑选出最优模型的方法。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.0.1", - "language": "python", - "name": "mindspore-1.0.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/mindspore_improve_model_security_nad.ipynb b/tutorials/notebook/mindspore_improve_model_security_nad.ipynb deleted file mode 100644 index aab0207947906dd5ebf92287c0b656c05d54801e..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_improve_model_security_nad.ipynb +++ /dev/null @@ -1,623 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "herbal-ticket", - "metadata": {}, - "source": [ - "# 使用NAD算法提升模型安全性\n", - "\n", - "\n", - "\n", - "## 概述\n", - "\n", - "本教程介绍MindArmour提供的模型安全防护手段,引导您快速使用MindArmour,为您的AI模型提供一定的安全防护能力。\n", - "\n", - "AI算法设计之初普遍未考虑相关的安全威胁,使得AI算法的判断结果容易被恶意攻击者影响,导致AI系统判断失准。攻击者在原始样本处加入人类不易察觉的微小扰动,导致深度学习模型误判,称为对抗样本攻击。MindArmour模型安全提供对抗样本生成、对抗样本检测、模型防御、攻防效果评估等功能,为AI模型安全研究和AI应用安全提供重要支撑。\n", - "\n", - "- 对抗样本生成模块支持安全工程师快速高效地生成对抗样本,用于攻击AI模型。\n", - "\n", - "- 对抗样本检测、防御模块支持用户检测过滤对抗样本、增强AI模型对于对抗样本的鲁棒性。\n", - "\n", - "- 评估模块提供多种指标全面评估对抗样本攻防性能。\n", - "\n", - "这里通过图像分类任务上的对抗性攻防,以攻击算法FGSM和防御算法NAD为例,介绍MindArmour在对抗攻防上的使用方法。\n", - "\n", - "> 本例面向CPU、GPU、Ascend 910 AI处理器,你可以在这里下载完整的样例代码: https://gitee.com/mindspore/mindarmour/blob/master/examples/model_security/model_defenses/mnist_defense_nad.py" - ] - }, - { - "cell_type": "markdown", - "id": "continent-gender", - "metadata": {}, - "source": [ - "## 准备工作\n", - "\n", - "本例采用LeNet5网络进行示例,将展示训练后的模型,正常验证的结果如何,使用对抗样本后的验证效果如何,在完成上述情况前,需做如下准备。\n", - "\n", - "1. 下载安装跟MindSpore版本对应的MindArmour安装包。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "quick-peace", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "256" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import os\n", - "import mindspore\n", - "\n", - "version = mindspore.__version__\n", - "ma_link = \"https://ms-release.obs.cn-north-4.myhuaweicloud.com/{0}/MindArmour/x86_64/mindarmour-{0}-cp37-cp37m-linux_x86_64.whl\".format(version)\n", - "os.system(\"pip install {}\".format(ma_link))" - ] - }, - { - "cell_type": "markdown", - "id": "fitting-strip", - "metadata": {}, - "source": [ - "2. 准备MNIST数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "naughty-daniel", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "id": "handmade-cookie", - "metadata": {}, - "source": [ - "## 建立被攻击模型\n", - "\n", - "以MNIST为示范数据集,自定义的简单模型作为被攻击模型。\n", - "\n", - "### 引入相关包" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "surprised-jonathan", - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import numpy as np\n", - "from scipy.special import softmax\n", - "\n", - "from mindspore import dataset as ds\n", - "from mindspore import dtype as mstype\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "import mindspore.nn as nn\n", - "from mindspore.nn import SoftmaxCrossEntropyWithLogits\n", - "from mindspore.common.initializer import TruncatedNormal\n", - "from mindspore import Model, Tensor, context\n", - "from mindspore.train.callback import LossMonitor\n", - "\n", - "from mindarmour.adv_robustness.attacks import FastGradientSignMethod\n", - "from mindarmour.utils.logger import LogUtil\n", - "from mindarmour.adv_robustness.evaluations import AttackEvaluate\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - "\n", - "LOGGER = LogUtil.get_instance()\n", - "LOGGER.set_level(\"INFO\")\n", - "TAG = 'demo'" - ] - }, - { - "cell_type": "markdown", - "id": "linear-northwest", - "metadata": {}, - "source": [ - "### 加载数据集\n", - "\n", - "利用MindSpore的dataset提供的MnistDataset接口加载MNIST数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "bacterial-absolute", - "metadata": {}, - "outputs": [], - "source": [ - "# generate dataset for train of test\n", - "def generate_mnist_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1, index=True):\n", - " \"\"\"\n", - " create dataset for training or testing\n", - " \"\"\"\n", - " # define dataset\n", - " ds1 = ds.MnistDataset(data_path)\n", - "\n", - " # define operation parameters\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - "\n", - " # define map operations\n", - " resize_op = CV.Resize((resize_height, resize_width),\n", - " interpolation=Inter.LINEAR)\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # apply map operations on images\n", - " if not index:\n", - " one_hot_enco = C.OneHot(10)\n", - " ds1 = ds1.map(operations=one_hot_enco, input_columns=\"label\",\n", - " num_parallel_workers=num_parallel_workers)\n", - " type_cast_op = C.TypeCast(mstype.float32)\n", - " \n", - " ds1 = ds1.map(operations=type_cast_op, input_columns=\"label\",\n", - " num_parallel_workers=num_parallel_workers)\n", - " ds1 = ds1.map(operations=resize_op, input_columns=\"image\",\n", - " num_parallel_workers=num_parallel_workers)\n", - " ds1 = ds1.map(operations=rescale_op, input_columns=\"image\",\n", - " num_parallel_workers=num_parallel_workers)\n", - " ds1 = ds1.map(operations=hwc2chw_op, input_columns=\"image\",\n", - " num_parallel_workers=num_parallel_workers)\n", - "\n", - " # apply DatasetOps\n", - " buffer_size = 10000\n", - " ds1 = ds1.shuffle(buffer_size=buffer_size)\n", - " ds1 = ds1.batch(batch_size, drop_remainder=True)\n", - " ds1 = ds1.repeat(repeat_size)\n", - "\n", - " return ds1" - ] - }, - { - "cell_type": "markdown", - "id": "becoming-rebel", - "metadata": {}, - "source": [ - "### 建立模型\n", - "\n", - "这里以LeNet模型为例,您也可以建立训练自己的模型。\n", - "\n", - "1. 定义LeNet模型网络。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "affected-assist", - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " # define the operator required\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " # use the preceding operators to construct networks\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x)\n", - " return x" - ] - }, - { - "cell_type": "markdown", - "id": "stupid-invalid", - "metadata": {}, - "source": [ - "2. 训练LeNet模型。利用上面定义的数据加载函数`generate_mnist_dataset`载入数据。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "vulnerable-pierce", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 1875, loss is 2.3040888\n", - "epoch: 2 step: 1875, loss is 0.29309553\n", - "epoch: 3 step: 1875, loss is 0.19545117\n" - ] - } - ], - "source": [ - "mnist_path = \"./datasets/MNIST_Data/\"\n", - "batch_size = 32\n", - "# train original model\n", - "ds_train = generate_mnist_dataset(os.path.join(mnist_path, \"train\"),\n", - " batch_size=batch_size, repeat_size=1,\n", - " index=False)\n", - "net = LeNet5()\n", - "loss = SoftmaxCrossEntropyWithLogits(sparse=False, reduction=\"mean\")\n", - "opt = nn.Momentum(net.trainable_params(), 0.01, 0.9)\n", - "model = Model(net, loss, opt, metrics=None)\n", - "model.train(3, ds_train, callbacks=[LossMonitor(1875)],\n", - " dataset_sink_mode=False)" - ] - }, - { - "cell_type": "markdown", - "id": "simplified-hunter", - "metadata": {}, - "source": [ - "3. 测试模型" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "engaging-corner", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[INFO] MA(22790:140157538080576,MainProcess):2021-04-07 15:35:15,249 [:24] [demo] prediction accuracy before attacking is : 0.9779647435897436\n" - ] - } - ], - "source": [ - "# prediction accuracy before attack\n", - "\n", - "# get test data\n", - "ds_test = generate_mnist_dataset(os.path.join(mnist_path, \"test\"),\n", - " batch_size=batch_size, repeat_size=1,\n", - " index=False)\n", - "inputs = []\n", - "labels = []\n", - "for data in ds_test.create_tuple_iterator():\n", - " inputs.append(data[0].asnumpy().astype(np.float32))\n", - " labels.append(data[1].asnumpy())\n", - "\n", - "test_inputs = np.concatenate(inputs)\n", - "test_labels = np.concatenate(labels)\n", - "\n", - "def get_net_acc(network, inputs_data, labels):\n", - " network.set_train(False)\n", - " test_logits = net(Tensor(inputs_data)).asnumpy()\n", - " tmp = np.argmax(test_logits, axis=1) == np.argmax(labels, axis=1)\n", - " accuracy = np.mean(tmp)\n", - " return accuracy\n", - "\n", - "accuracy = get_net_acc(net, test_inputs, test_labels)\n", - "LOGGER.info(TAG, 'prediction accuracy before attacking is : %s', accuracy)" - ] - }, - { - "cell_type": "markdown", - "id": "improved-composition", - "metadata": {}, - "source": [ - "测试结果中分类精度达到了97%以上。" - ] - }, - { - "cell_type": "markdown", - "id": "secondary-headset", - "metadata": {}, - "source": [ - "## 对抗性攻击\n", - "\n", - "在进行对抗性攻击前,选取32张图片查看,没有攻击前的图片展示的效果如何。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "genetic-reservoir", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADcCAYAAADTE3J+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAA5XUlEQVR4nO2dfUwb15r/v1zjklle7NWWrbjwuwGRKyuVUYlahVYKEP+xfUmCGlaJylu1Eq6iRdu8EIoKRHTVRoUkJNDotiWNYqQrkpSqackW0jSrVXhJpC1RVySA2lgqhVUhdEtW2JjEEHDn9wc7U49n/D6eGcjzkUYJ4/HM12fOPPOcc57nnDiWZUEQBEEowx/UFkAQBPE4QUaXIAhCQcjoEgRBKAgZXYIgCAUho0sQBKEgZHQJgiAUJD7Qh3FxcZqKJ2NZNs7fZ6Q1cvxpXSs6AdIaDetB61rRCZCnSxAEoShkdAmCIBQkYPfC40ptbS3++Mc/ivbfu3cPLS0tKigiCGK9EBcoDXgt9ZPIoTUhIQGVlZVoaGhARkaG6PPbt29jy5YtIZ3rcesnKyoqQnZ2dkjHDg8PAwAGBgZCPT2A9VGmQGy0FhYWBqybPT09GB8fl/xsPZRrrHVmZ2ejqKiI//vChQuYnZ31ezz16RIEQWiEx7p7gfPMcnJyAAAMw+DEiRNISkpSU9aaw2Kx4MiRI8jLywvp+J6eHgDA4uIihoaGYilt3WOxWGAwGFBZWSnwxHyZnJz06+kS/uFshNVqRX19Pb+/v78/oKcbiMfa6O7atQsA8MEHH6grZI3T2tqK3NzckI/njIPH40FxcXGMVK0vUlNTAQDp6emC/e3t7TCZTEG/r4XZBE0mExiGAQDeYE1PT6spKSicjfA2uNHyWBtdf7hcLmzYsAF6vT7m12IYBgkJCbKf1+PxwOVyyX5eKVwuF5aXl6HX67G0tAQAcLvdAIC4uDikpKQgLs5vF1dM4MqUe8hjwYMHD7C8vByz83MwDAOr1QoAaG5uDnr88vIyHjx4INi3srISE22hwNWBzz//nG9Vtre3A1gdtPbVGmsSExNFz7a/5+XRo0cAgIWFBdlawGR0JdixYwfefvtt/i0XSxobG1FVVSX7eUdHR1FQUCD7eaXYuXMnLl68iF27dqGjowMA0NDQAAAwGAy4c+cODAaDIlo4KisrAQBNTU0xu0ZFRQWuXLkSs/NzNDY24tChQyEff+3aNbz++uuCfUobNm9SUlJw584dweA0d3+Sk5NFWmPNmTNnRM+2v+eFq88OhwMXL16U5fqyGt3a2lqUlZUFPY4Tr3b4FafDdxT93XffRX5+viIaGIaB0WiU/bzJycmyn9MfLpcL+/fvR2NjI99sdDgc/OdqNG2feOIJAIhJ2XLExyvjszAMI/LYXS4XduzYgYWFBdHxDodDUP5qYjKZ8PnnnyMjIwM6nY7fz7VEioqK0NXVhZKSEsU0JSUlieqFv+dlz549AICjR48C+L3c7XZ7xNeXpda0trYiKysLOTk5IYUN9ff3AwDy8vJQV1fH76+ursbk5KQckkKCMxC+HeJ/+tOfFPfM1jqTk5OwWq0iDyI+Ph6JiYkqqYotSnSZ1NfXiwbIJiYmsH//fgwNDSnSvREpeXl5aG5u5rsUpDAYDCH1SctFa2tryAO+APDkk08CALKysgCsdkOMjY3x3WeRELHRTU9Px1tvvQUAKC8v5zv6/dHS0oKXX34ZOTk5KCwsBCCOfXv33XcjlSMLDMOgsbFRNFgBrDY/Tp48Kfs1L1++jI0bNwJAwEElzhu/fPmy5Oe7d+/my1UNrFYr/uVf/iXkOOZYw5VXdXV10GPNZjPfZxoOlZWVmJubCzveOBy2bt0qcmScTqci3RqRwtXDmpoaWCwWldWswj3bFRUVQW0VR3FxcUwGeiM2uqmpqQH7mcbHx/nQIAA4ffo0/vznPyMnJ4d/MLXygHLGf8OGDTh06JCgKccF8n/00Ue4cOGC7Nf2fmD/+7//2+9xg4ODAIDu7m7Jzzdu3Kiq0d21a1fY9zM7Oxvl5eUxKdfbt28L/g2E2Wz2O+jI/Sapsi0qKsL169djZnTLy8slW45PPvmk5LMXadKJnBQWFuLgwYMAIPLQz58/jxdeeCHkJBq5SE1NhdVq9ftsDwwM4N69e6LvFRQUxOSZouQIgiAIJWFZ1u8GgPW35ebmsr6MjIyw3d3dbHd3N1tfXy843mKxsN9++63oOyzLso8ePWJ7enrYzMxMv9dblRqZ1mDb7t27JXWNjIywVquVtVqtYZ8zVlr9bW1tbQLtP/30E7tz505Wr9dHrDWc63d3d0uWYTDk0BmrMgXAHjx4kD148KCk9m+//ZYtKiqKmdbh4eGwyvKrr75iv/rqKzYvL0+VupqXl8d+9dVXfvXl5uZK1pPh4eGotQb6jpStYlmWbWtrY9va2kJ+pliWZefm5lij0RhVmco6/Gqz2XD69GkAqx3k3gHz/oK43W43vvvuO5SVlSkWV8rB9e1kZmaKPpuYmMDx48dj0vRVgqysLJw/fx5ZWVmKjGRPTEyE1JRPT08X9KkprTNUMjMzYTQaJfv3AcBut6O+vh59fX0KK/MP15xPSkpCVVVVVCPs4WIymdDc3Czqw3W73bwOf4NPDMPwtsJut0c1SBUqU1NTkl0KHJmZmfwgmuxE+paTenvU1dWxRqORNRqNbGlpqd83ntxvuWBapTaGYdi6ujq2rq5OUterr74alYckp9ZQtli8lZXS6XA42I0bN7I6nU4zZRrMc8/NzY25VilP99GjR+zc3Jxge/Tokeg4pZ6ruLg41mAwsCMjI5LlNDIywtsEo9HI9vT0xKRcA30nmK2S2vzp1Jyn+8477+Dtt98GoFwMY6SEG3BOxA4ueH7btm0YGxtTW46mkUp86OzsVCSRRwqpxAdvnn76aUxMTPB/ayV80NtWSRFTnZG+5RiGYbdt28bOz88HfHMFQw1Pt7W1lf31118D6lrrnu7du3dZs9kc1HsMpDUWOtPT09kTJ06wJ06cEJW5Et5jKFtXVxfrcDhE+u7evcvm5uayubm5LMMwMdfq7el2dnaynZ2dkuMemZmZbGdnp0Drw4cP2cHBQTY5OTlmddVkMrEjIyPsyspKwGcpGHKUa6Dv+OvTjQRVPV23242hoSGUlpZCr9ejrq5OFHQ8MTGBw4cPA/g9gcKboaEhWSeSCIXW1laUlJQEjdWrr6/Hb7/9Jgh7W0u43W5Neo3T09OaneQkMTERZ86cwcsvvyyZHON2u0Pqt5aLw4cP8zp+/PFHAJBMHpqcnMT9+/cF+xiGQU5OjiALTG64a0RLrMt1YmKCj7eVskOhMDQ0hGPHjknOaxEuUfUBLC8v80HaHo9HFH83OzvLB/P/67/+K7+fiyM8deqUYgMR4QZH5+Xloaamhu8m8RcfS6ymf1+9ejVkI6/05DfB4JIjEhISsHfvXsEERN5JKZFO5RcuXF1lGAY2m02TL89I8E6Q4ohV0pE3TqeTt0OJiYlBn/+amhpRd8nMzIzfxKRwka3jNRyPkAtKVsqL9BccHYzCwkJea3d3N1JTU1FeXs5/Hmg2fiUpKipSNNHEN5PwwIEDMJlM+PDDD4N6LIWFhYpNxBMqmzZt8tu/z91/Jaf/TEhIQFVVFYxGIwYGBiIyurOzszh37hw/65tSDAwM8GXmi3eClFqEEo30T//0T7zR5X5Lb2+vbBooOYIgCEJBtB1iIANpaWl47bXXgs5D6nQ6+a4ObjZ+4PeZ43fv3o3MzEy0tbXx39HCbPxSqzbMzMzEpNvG3yz63L7R0dGAni7XZRNohQOlMJvN2LRpE4DV+Q18GRoawszMjOpN+61bt2J0dDRgPcvLyxN17U1PT/PTa8YKh8MhanJ3dHSE1YLNycnBW2+9pdl4eK57yWazyXbOmBtdnU6HzZs38836YEHJcrN161aBofTGe/Z6u93OTy83PDzMB2tzBkLKUPzfqKmqSK3aMDQ0xA9gykmgWfQnJiZEgznA7/c/Pj5eMngeWB0P+P777xUJiufgupv80dzcjH/7t39TTI8/6uvrER8fzycd+Q5C+ktKUILJycl1v/IHlyDBPWMrKyv44Ycf4PF4Ij5nTI2uTqdDRkYGbt68yXuOJ0+e5CuQWiwsLGBlZQXnzp0DgIg8ApfLpeps/FrjyJEjuHr1qmieUoPBILj/vng8HkxNTSE/Px9Op1MBpauDKVKrdbAsi/n5ebAsq6l7W1tby69a4FtXvVdj4FheXlY8u3M9wK1w4R3xUVFRIfjX4XBEnT0ZU6O7efNm3Lx5EykpKbG8TNjs27cPV69ejWqQYceOHbSoohdnz56VNFRcRfbH999/j/z8fMzPz8dSnoAzZ85g7969ov3z8/N45pln4HQ6VV1pQQpupYXS0lLBfqnJt69duxbSYgKEkGCJHnIRU6MbHx+vucnAS0pK8M033wT0qkpKStDe3i5qsnl3Qdjtds1OIG2xWFSZjT9Uzp8/D2A1ZNDtdivm4XIkJSX59XSdTqfqc0C4XC7k5+fj0qVL/HwlnN5Q1tNbWVkhTzcC4uLiYDAY/MY22+127NmzJ+qyVXQgrbm5WdbQi1C4deuWoN+pr68v6EPOTWaSlpYm2O90OhUNjo+UWM3Gz927p556KuKkFpvNxodfqTFIFWjlgMTERHR2dmJlZQXHjh1TrSXDrU5QVVUV0vLqwGq5cvdnZmZGCZmPFVwilxx1NmZG12w2o6amRrDv1q1bio/2RxrUTF0HYrh7Z7PZ8Ouvv0Z0jv7+flUjAiwWi+hlyqHX6/nBwr/+9a9KypKEi0CZm5vD9evXAx7b39+/JhwCLeN2u9HQ0CDZmhgfH5ctIihmRnfTpk185zOxvhgfH1c0WUBOLl68iOTkZMnVC5aWltDR0YGlpSU+7VYLDAwMqLoaxOPC0tISvzR8LKHkCIIgCAVRrE+3r6+P+ppiQF9fH5566im/TWZCSEtLC+Lj42G1WkXeLte8VHsgbT1z69Yt0T7vqR8fB2JmdH0TBw4fPkx9TjGAS4KQirQgpGlubsbKyooorMrlckUV9E4EJ1hm6ONAXKCsqri4uIhTrl599VXBANaWLVuiNrosy/qdnioarbFgPWhdKzoB0hoN60HrWtEJBDG6BEEQhLzQQBpBEISCkNElCIJQEDK6BEEQCkJGlyAIQkHI6BIEQSgIGV2CIAgFIaNLEAShIGR0CYIgFISMLkEQhIKQ0SUIglAQMroEQRAKQkaXIAhCQcjoEgRBKAgZXYIgCAUho0sQBKEgZHQJgiAUhIwuQRCEgpDRJQiCUBAyugRBEApCRpcgCEJBAi7BvqZW2CStEbMuVlglrRGzHrSuFZ1AEKNLEOFSVFQEAKisrMTExAQOHz6ssiKC0BZkdNcIhYWFAIDdu3cHPXZsbAw2my3GisQUFxfj4MGDAFb1zs7OYnFxEUePHoXb7VZcD0FokTiW9e+Vx8JlLywsxJYtW/i/Z2dnceHChZC+q3QzyFvr+Pg4enp6Qv6unFoLCwt5Y1ZcXBz0+MuXL4d0HIdcTba2tjYcOnRIsM/hcCArKwsOhyOcU0myHprBwOOlNTU1FeXl5aL9w8PDAICBgQHBMRcuXMDs7GzYWsPRWVRUhOzs7KDH9ff34/bt26GeVkCgMqWBNIIgCAWRvXuBe4Pk5ORIfl5ZWcn3+wHA7du3Q/Z0lSQvLw81NTW81suXL4fl6cZKByEPZrMZmzZtCvn4mZkZDA0NxVBReOj1erz44ovQ6/Wiz7SiNS0tDa+99hra2tpEn3HP0t/+7d8iMzOTP6a/vz+gpxstFosFR44cQV5eXtBjbTYbPvjgAwCrXXaywbKs3w0AG86Wnp7Onjhxgj1x4gQbCg6Hg+3q6gr5/HJqDbZ1d3cLtHZ3d4f1fbm0+urgWFlZYUdGRtiHDx+KPrt+/TprMpmi1hpumbW1tQl0PHz4kB0cHGSTk5NluSdylWlmZibb2dkZUh1V+/77bgaDgc3NzWW3bdvGzs/Pa1Jramoqm5qayh46dEhS388//8xvvuTm5sa0rg4PD4d139va2ti2tjZZ66psnm5iYiKOHDmCqqqqgMe53W4sLS0BAHp7e/H666/LJUE2kpOTER+vjTHGhYUFLC0tISEhgd/n8XgwNTWF/Px89PT0ICcnBwzD8MdYLBZ0dXUJ+s5jTWJiokAjANjtdhQUFCimIVTa2toEA5LLy8t48OCB4JjExERJL1ItuLLdsWMHLl68yO93uVzYsGGDZrQyDAOr1QoAaG5uFnzmcrng8XjQ2trK/57m5mZ4PB64XC4AwMrKSkx0xcXFISUlBTqdTvJzj8eDhYUFpKSkIC7Ob3esPMj1luvs7GQXFxeDvjnq6upYo9HIGo1GNjExURPeg+82ODjIPnr0SKBbLe8hMTGR/fjjjwVaRkZGWIPBwMbFxbHJycms0WgUHTM8PBy11mjvfzgalCxT39ZDT08PXye5raenRxP3n9uqqqrYqqoq1uVyCXRt27ZNU1qbmprYhw8fSrbAtm3bxhqNRvbUqVOCY0ZGRvhy1+l0MamrBoOBnZycZFdWVkS6OA0bN25kHQ6HYL9mPd2uri68/PLLkp5OSUmJYN/09LQsI9mxJDk5WTOew4MHD/D+++/j7Nmz/D632w2n0wlg1XtobW3Fnj171JIIAEhKShLd/7XCysqKqE7u378fDocDFRUV/D6TyYSuri7+78OHD6Ovr08RjU888QSA1XL2ZmFhIWbeYbi0traioqICDMPw+7xtgN1uh9vtxh/+8Af+mL6+PlRVVcXcJsTFxcFgMAT0dKemprBt2zbEx8ejtbUVFoslJlpkMbomkwkGg0Gwb2hoCPX19RGHXGgFrsP/2LFjqmmYnp7G9PS0YF9iYiLOnDmDpKQk5OXlITU1VSV165PJyUkcP34cX3zxBQDgT3/6E9rb25Gbm8sf41vnY4XVauWb7BwLCwvYt28fJiYmFNEQjNbWVpSUlAjqoT8bYLPZMDAwAGB10M9utyspVQSn0+Px8ANm9fX1SEtLw48//ij79aIyugzDoLGxEenp6fw+rjBPnTqlmBcgF1K/Z3x8HABUHQ0uLi4W9Y0mJCRg7969kt7l6OgoTp48qZS8sPFN9HC73ZpMoBgbG8Pf/d3fAViNuomV5xMMs9ksiAaamppCU1MTvvzySxw4cEAUKZSTk4Pa2lq0tLQoptFisSAtLU2wb2ZmRtIGjI2NyRsNECVSOmP5vEdldBMSElBVVQWj0Sj6LDs7WxQo70u4CQexxvf3DAwMYHBwUF1RAAoKCoKWJfB7wPlHH32kyTA8Liide4FwCRxutxvz8/Ow2WwxDRfyR3Z2NsrLywVl5qvVN2Tv/PnzMfGCfCkqKhINiN6/fx/t7e0AgLKyMlGgf3Z2NsrKyhQ1ulJoyQYsLS2hvb0db7zxhqhVKHX/Q6GoqAg///xz2K15So4gCIJQkkhHLg0GA1taWioaTQ2Hb7/9lrVYLKqNCAf7PQcPHoz4fHJq9Y1/9YfcI63hnMM3IuDevXtsa2srq9fr2Z07d7K7d+9mv/3224D6Dx06xKalpcW8TOvr69kff/xRcG3vSAuLxSKp1eFwsN3d3Wx3dzebmZmpyP0PVq4//fSTZFnKEb0SjtZw41855LIB0er86aef2J07d7J6vT7g933r87lz51iz2cyazebQyzTSAjeZTOzIyIggBOPnn39mh4eH/W6+4Rgsy7J3795lzWZz0FAROSuy72YwGNjXXntNdBPKy8sjPqdcWsMJ5O/s7GQ7OzvZ3NxcxZMjpJJJGIYJGMQvxauvvqrI/fd9kd29e5fNzc1lc3Nz2bt374p0qZHIYzKZ2OvXr4vKFQBrNBrZubk5yTJUQ2tXVxc7PDzM/vrrrwId3mV59+5d0THc/mhtQDg6pewQy7Ls/Pw8u23bNjY3N5c1GAyi7/qrz1LOTsAyjabAudi3ubk5dm5ujq2qqgp4vL9Y3rm5OdZoNCpSOaS2V199VaQp0MOvVEUOVGbB8I7ljXVFBqRjX7dt2ybS9fDhQ76+zM3NCSrw/Pw8u3PnTkXuf6B4Um8WFxfZxcVFtrOzU/H77+2VeevQ6XSSMaUcasfpcve2s7OTNZvN/N+cNyhV9tHagHCfqbm5uYD3v7S0VBS77a8+NzU1sU1NTaGXaTQFHhcXxxoMBl5UQkJCwOOlAv3lKPBIKof3pmWj66/MgrGyssJOTk5KvrFjUZF9je6jR48kPVzv5Bjfirxt27aAzTs57z/DMGxdXR1bV1cXsBw//vhj9uOPP1Ylkcfb6HrrMJvNrMPhYH/77TdJzWoaXYZhBMlPOp1OlPggVfZKGt3ExETWaDQGvP8ul0vgHPg6CBx1dXUswzAswzAhl2lU0Qssy/JB+qHABfovLCwAAGpra6O5fMwoKSlBf3+/2jIASCdHAKtB8l9//TWSk5Mlv6fT6ZCRkYEbN25g7969isdC6vV6UYJJdXU1Lly4wAfCWywWfhQeWI09XV5eVkSf2+0OGqLW0tKC06dPA4AoTVhpuNT5rVu3or29XbEY4XCRKlffxIdQyj6WHD16FBaLBU8++aTfY3yTUPwRyW9RfIIBqUB/teDCgOrr6wX77XZ7WC+TWCNVZnq9HqWlpbxh8529DVg1vNy8DGpTXV2Nzz77DLOzs7zOI0eOwGQyqaapt7cXAPDUU0+J6gCgzbr6wgsvqFpmax0uicM3pjhcqqurMTk5idHR0bC/q8qsLjGfUCJEuPhGbpo3t9uN9957TzMPWm1tLa5evSoZSL68vIwrV67wf8/NzfHpoOFMYK4U/f39mJmZASAud7XIyMgAsJp8IIVW6inwe5lx/3J1dXFxEZWVlX6nUo0VZrMZVqs1osSW4uJi1eqoVBJHOHDlHmyy9UBoYyotFSgsLBRleS0tLeHMmTOqzg3hPYv+gQMHYDKZ8OGHHwoCsP3Nxu/L0tISOjo6FEk46O3txcaNG/3ObFZWVobt27cDgGS5K6XTG06rv7mKCwoKBCscaAnvulpYWKi40d20aRMOHToUUWJLQUEBn5UIrK4ec+7cOb4LRcvIYSMoOYIgCEJBovJ0fWevHx0d5ecqCMT/jTaqhtRqDE6nE729vYoN5PgjPT1dMNM+N9EJ1/8IQDDTfiDcbjcaGhoU8dxtNhvMZrNfTzfQoKmSOjnMZrOoW2F5eRnXrl1Dfn4+DAYDiouL+XmVFxcXFZ9/o6+vD0899ZSoOayVugqszlfS3NyMxcVFfPbZZ3wXkj/y8vJEacvT09NoaGiIpcygOJ1OwfwLeXl5Uff7+iMqo5uYmIjz58/zcxV4j/b66xdNT08XTCijBnV1daIm5cTEhCYnVAekZ5kKhtvtxujoKDweT4xUibl37x6mpqYA/N5fGgw1dALSZfrgwQO8/vrr6Ovr42cT4+qJx+NRvB+SW77ed6Idu92uel11Op2w2+38oJ7UcjucDdDpdNi8eTPi4+PR3Nws+D3cedTml19+wbvvvsv/3draqk2j60ttbS0fauHvzRXK6hJEdCwvL+O7775TfNWGlpYWPhzwxIkTfsNuuL47zuBqbXUJl8uF5eVlTcypzBlerdHX14c9e/bg5s2b/GoLnOHlwgA5G2AwGHDz5k3JMLe+vj7RnNtqYDKZ+P77WCP7QFplZSUAoLS0VPJzLYQvrXeuXbuGsrIyVa7d0dEBYDU203tZGaljGhoaFPdwQ2Hnzp24ePEidu3apbYUTfPDDz/gmWeewZ07dwQG1dcGcEvlEKtEZXRdLhfy8/Nx6dIlvpnBze8a6ioCdrsde/bs4ddIUgNu9nq18U0WCIeWlhbeyDkcDtXKk/Niv/76a7/9u1zzU80okZMnT/J9olx/c3JyMgYHB+HxeJCVlaWatrWC92oL4dgA37qqJCUlJWhvb49obmS5bFVURpebab2qqgoGg0EyQD8Q3Iztak9orJV+pbt376Kuri6i74Y6iKkUTqdT06uGSCU+cMkkROh424Dm5uagsdfNzc2w2Wyq1VW73c6vCsG1ZEIZL5HTVsnSvcCN+s3NzeH69eshf298fFyV1SU6OjoEcZdaMVYzMzO4fPmy2jIeG7g6YLPZ/D543DFclwghTV9fH95//31RZIIvPT09qj9vXBTK5OQkAIRkSOW0VXGBwrf+b4YqzcCyrN8UIdIaOf60rhWdQHRauewqKbiVQ7q7u8M6p5L332q1CsLfxsbGYLPZQv4+1VX5CVSmlBxBEAShIOTpxoj1oHWt6ARIazSsB61rRSdAni5BEISikNElCIJQkIDdCwRBEIS8kKdLEAShIGR0CYIgFISMLkEQhIKQ0SUIglAQMroEQRAKQkaXIAhCQcjoEgRBKAgZXYIgCAUho0sQBKEgZHQJgiAUhIwuQRCEgpDRJQiCUBAyugRBEApCRpcgCEJByOgSBEEoCBldgiAIBSGjSxAEoSBkdAmCIBSEjC5BEISCkNElCIJQkPhAH66lteRJa+T407pWdAKkNRrWg9a1ohMgT5cgCEJRyOgSBEEoCBldgiAIBQnYpxstqampKC8vD3rc0tISOjo6sLS0FEs5hEbIzc0FAGzfvh0A3f9wCfZcjY+Po6enR0FF64vy8nKkpqaK9s/OzuLChQtRn588XYIgCAWR3dPNzs4GAOTk5CAzMxNtbW1Bv+NwOPDpp5+Sp/MYYDab8eabbwIArFYrALr/4ZCWlobXXnst4HM1NDSEhYUF9PX1KagMyMvLQ1paWljfmZmZwdDQUIwUhUZaWhry8vL4v48ePYqsrCzRcRMTE3A4HPj3f/93LC8vR3y9OJb1H2kRSRjGwYMHAQAffPCBYL/dbofb7UZ6errIdXc4HMjKyoLD4Qh47lBDW3yv4Xa7YbfbQ/4NciB3GA73e9LT00WfTUxMwOl0hntKHqXCcDIzM3H06FFUVFTw+9xuN7777jvs3LkTLpcrIp1AdFoNBoPkQ+ZLOPUoVlpfffVVXL58Oehxdrsde/bswQ8//ACPxxPwWLm0dnd3Y/fu3aEeDgDo6+tDVVVV1OUazTNVXl4eknMIyGOrwLKs3w0AG+5WVVXFVlVVsXNzc4LNbDazANi2tjbWl7m5OdZoNAY9d6hafa8xMjLCGo1G0cYwDMswTNi/MZRNznJlGIatq6tj6+rqRGXHsixbWloa1e+R8/7725KTk9menh6R9uHhYdXKlKsHpaWlkuWqFa0A2ISEBDYhIUGkdX5+np2bm2MXFxdFeuV+roJt3d3dIZWjLyMjI6zBYGD/z3AqVlcPHjzIHjx4MCytDoeD3bhxI6vT6SIuU9m7Fzo6OgAAn376qWB/MC8mljz99NOYmJgQ7W9vbwcANDQ0KC0pLBobG3Ho0CG/n589exYrKyua/j1XrlzB888/r7YMnsbGRlRVVQEA4uNjOp4sC5WVlQCAEydOCPbv2LEDY2NjaGpq4n/PWuPpp5/GnTt38Mwzz0TVYlOClJQU3LlzB9u2bcPY2FhE55C9tnH9clL9c62traJRV64ZFEujrNPpYDQaRfvfeOMNAMArr7wS0XkPHz6sSL8ZwzBgGMbv50lJSQB+/z0bNmzA4cOHY64rFJKTk3HlyhU899xz0Ov1gs+4pqXStLa2oqKiQrJOaJHa2locOHAAwO/3mmNhYQEOhwPvv/8+FhYW+OOB1bIfHBzE3r17Fe9eCwedTgeDwYC4OP8tcqWprq5Gf38///f27dvR1taGuLg4GAyGqF7Uir7is7KyRP25brc74jeGP2w2GwYGBgAAW7duRX19veRxnBap8JBQMBgMkQmMEdzvCKV/Uil0Oh1ycnIkXxpOp1NxY9Da2oqSkpKI77nS1NfXw2q1IiMjQ7B/YWEB+/bt41tw09PTmJ6eFhzDlX17ezvq6+tjPmB17Ngx/PWvfw16XGVlJYqKigT7EhMT0dnZif3792NycjJGCv0zMTEhcFSGhoYwMzPD6ywpKZHtWrIb3cLCQgDA7t27ce/ePbS0tABYffvm5OQIjh0dHcXJkyflloCxsTHekI+OjiI+Pp5/+681rFYrX6YcU1NTOHXqFACgpqZG9EASYhiGQWNjIyoqKvwaXLfbjffeew+Li4v8voKCAgBAcXEx0tPT0dTUhKNHj8Ltdiuie+vWrXxEEMfU1BSamprw5ZdfClqUnKNhs9n4yBAAsFgsYUcVREKoRt23PgOAXq/Hrl270NjYKLcsv3h71k6nUzBAabVaYTabsWXLFgAQRDdEi+xGlxN56NAh3L59mze6ZWVlosozPj4uS7BxIMbHx3H69GlR0zZcsrOzRW9nJdi1axdfphz3799He3s738+nRbjy2rBhAxISEgSfDQ8PAwB6e3sV05OQkICqqirJLoXx8XH853/+J3bt2oUzZ85g586dmvCEy8vLRc8M8Pv99+X27dsAVsvV2+hqiaKiIlF91gJPPvmkYNyksrJS5CQCvyfyzM7ORnwtSo4gCIJQENk9XdYn7lev1+PFF18U9H+Oj48DAG7duiX35SWZnp5GdXV1RN/lPA2r1aqKpyuFwWDAP/7jP+LEiROigRWtkJOTIxn7ODo6io8++gjAajNYC9y/fx//9V//hd27d+OVV16RDI6fmZlBV1eXopEhb731lqS3FYyZmRn09fXBYrHw+7Zu3YrR0VH+2VMSzgbo9XrU1dVJNtWXl5dx7dq1oPGvcuJtqzIyMoLG6jqdTvT09KC2thYPHjyI+LoxHUhjGAZ5eXn49NNPkZycDGC1P+qTTz4BAL7rQcvs2rULAESDcXa7XZXwFqfTiUePHuHixYuKXzscfF++wOpgxfHjx2PepRQueXl5vCHwV65DQ0OKRYTodDps3rw54sFHTivXjQOs1t//+Z//wenTp2XXGwiGYfDss88KbIAvXIJMWVmZYqGl6enpkolG/nA6nfjmm2/w+uuvR33tmBpdk8mEGzdu8H8vLCygqalJsj9KiwQK1SopKeH70JTkxo0bOH78uKBctQTXf/s3f/M3gv0ulwv79+/HlStX1JC1ZtDpdMjIyMDNmzcFrUNuwKynp0eWB18J9Ho9nn322YB1dXl5Gd999x0/YKkEiYmJOHLkSMBwxYWFBaysrPB/9/b2ylbuioaM7du3D19++aWSl4yKYEkJavDSSy9Jjv5qhUBB/Grn2K8FNm/ejJs3byIlJUWwn0s6WktROC+++KIoScqXa9euoaysTCFFq5w5cwZ79+4NeMy+fftw9epV/u9o5lrwRVGj+/DhwzUzqQkXQO/r6bpcLuzYsUO1YHO9Xh91JEYseeKJJwBIB/HLWXHDxeVyIT8/H5cuXYLJZFJNRzDi4+NF8d8tLS18t0A0fYlKEx8fL9ml0NLSwnfjOBwORbNVu7q68PLLL4sianx5+PBhzPqXZTW6Vqs1YKiKljJOgiGVyDExMYH9+/djaGhIMQNy7Ngx6HQ6zQziBSLY/VcTj8eDsbExVFVV+U1qYRgGZ8+e1dzgpFTiQ7g0NzcrGqIHSD/vzc3NsNlsqgzoAatdnqEkNdXX1+O3336LybzEshpds9kccLS1srISc3NzfBC3VpFK5ABWO9OV7pMcGhoKuYJy5co1RZXG9/57J3FEazTkIlDattFoFPTjAeqXKSCPs3Lr1i3VDJ03ZrMZGRkZmtDCMTo6io6ODmzYsAHvvPMOHwBQU1PDp/t2d3fLdj3ZjK5U0PPs7CwuXbqEyspKJCQkoKioSFCptWR8vWfjP3DggCjLa3x8XLWIgcHBQdG+hIQEvlw5uNFqNVYNkLr/9+/fF03xudZQs0wjJTs7W/F+Uil+/PFHnD9/XjCVZ1FREa5fv66JZ5+7tx999BFsNhsYhkFKSgreeOMNpKamorCwkD9GTqNLyREEQRAKIpunW1lZKRpVn56eRkNDA0pLS3mPrLi4mHfZFxcXNTOinZ6eLhkczTWDbDabanHF3d3dojet0WgUlKvaSN1/Qh6kYp4DkZOTo4koh7GxMZw6dUrg6QLh/55YMTU1BQD8PC1utxsNDQ145ZVX+PEcLjkqLy9PNlsV8+gFj8eD0dFRPPfcc3wkADcolJSUFNas8XLjvRqD1Ij2WkvkICKHYRiYzWbodDq1pYjwDuT31zfuvVpKZmam4DOlE3mCPVdawdsOeSe+eEcsccd4PB4UFxfLct2YGl2dToc//OEPKCwsxMDAAJ5//nlBuJPFYsHnn3+O/Px8zM/PK/oGZBiGH2lvbm4Wfa7lRA4tzj+61vFN5AFWPR+lZhPjWFlZgdPpREpKCn9/a2tr+YgKf2nIUsH+LMtifn4ee/bskW36VK5lxRkm7hosyyIxMRF6vT7gc6U2LpcLy8vLIjvknb0Xc2K5VMfKygo7OTnJGgwGv8u1eB8T7BpyaQXANjU1sQ8fPmQfPnwouSxHaWkpm5CQEPYSILHQ6ruZzWbW4XCwv/32m0BzW1sb29bWJpvWaO9/OMvbqF2mubm5Iv11dXWyL4EUTKtOp2M3btzIOhwOgZbFxUV2cXFRtAwWt0kt1yPH0jK+x/oux+X97HZ2drJzc3MBnyuWZdmDBw/KXgdC/b4/OxSM7u5u2e5/zD3djIwM3LhxAx6PR3JybaW9tq6uLphMJqSnpwdcjUHLiRxSAfRaxGQyYXBwMKRFJ7WIGp6ux+PB1NQUtm3bJkjk4DzMUPvwuRVZpqamgi5MGQ5c8gs3RWZycrLg+Q5WL6urq1WdN8TlconCApVGNqPLzRrPTRDDNTG42ev9wSUcKJVpYzKZkJubK/kZNxu/2+1WbAa09YLU/f/ll19w/PhxwaTgWmViYgJlZWWC5Air1Yrt27cLjlFi0hvvRI7m5uaQJ9Dmwto6OjrgdDplX5FFimDPtzfV1dX47LPPopqLVg782SqlkM3ociN73FIbod7w2dlZzUyCsrKygqtXryo6vZxcdHd3h7Q0d6yQuv9aurfBcDqduHr1qsALysnJERiU2dlZLC4uKrZyRF9fH95//33Jicyl4PolYxkD6291Cn94J8hcuHBBdYMLRGar5EzmiGMDDF5FspZ8LGEDrCUfqtbh4WGRpzs+Po6enh643W7ZHig5tPojNzdX1PFfXV0dcSKCP63r8f4Hwmg0YmJiIuCClQ6HA1lZWUFfzLHWKieRaDWbzSEZXe8lu+RgPdRVSo4gCIJQEEVnGdMCfX19otVGb926pcnwFn84HA5RV4KWctnXKsvLy+jt7Q044Y3as6VphbGxsYhXY3nceey6F5RiPWhdKzoB0hoN60HrWtEJUPcCQRCEogT0dAmCIAh5IU+XIAhCQcjoEgRBKAgZXYIgCAUho0sQBKEgZHQJgiAUhIwuQRCEgpDRJQiCUBAyugRBEApCRpcgCEJByOgSBEEoCBldgiAIBSGjSxAEoSBkdAmCIBSEjC5BEISCkNElCIJQEDK6BEEQCkJGlyAIQkHI6BIEQSgIGV2CIAgFIaNLEAShIPGBPlxLyxqT1shZD8tak9bIWQ9a14pOgDxdgiAIRSGjSxAEoSBkdAmCIBQkYJ8uQYRLbm4uAGD79u0AgKWlJXR0dGBpaUk9UYTmSUhIQGVlJRISEgAA/f39uH37trqiYgR5ugRBEAqiKU/XYrHAYDAI9s3MzGBoaEglRUS4FBYWAgDa2toAAAsLC3A4HPj666/hdDrVlEZoFIPBgB07duDEiRNISkoCABw6dEhRT9dgMMBisYj2//jjjwCAsbEx2a6lqNHNzMyE0WjE7OwsAGB6elrweWtrK9885bh8+TKKi4ujui7DMDCZTFGdw+FwYHJyMqpzyIlOp8PmzZsRHy99C91uN+x2u8KqxCQlJeHixYvYsmWLppqL6enpSE1NDXjM9PQ0X1eJ2JGVlYWLFy+qrqG7u1u0/4MPPgAAVFdXy3YtxYxucnIy/vKXv2DXrl1ob28HADQ0NMDj8cDlcsX02iaTCcPDw1GdQw7jLxc6nQ4ZGRm4efOmqGXAMTo6ivz8fMzPz4NlNRXCqAneeustHDp0KOAx9fX1OH36NIDVlxgRmMTEROj1er7/fq2UmV6v5z1sX7g+5sTERDx48ECW6ylmdK9cuYLnn38eAFBZWQkAKC0txejoKAoKCpSSsS7YvHkzbt68iZSUFL/HPP3007hz5w6eeeYZatZHyDvvvMOXcUNDg8pqtM+ZM2dETtVa4MUXX8Snn34q+Rlnq5KTk/H666/Lcr2YGl2TyYSuri7+/3q9HsDvb4+EhAQkJycjOTkZV65cEXQBnD9/HgDQ2NgYtQ673Y4tW7YI9tXU1KCioiLqc8ea1tZWUV8TwzACD7e6uhrPPvus4PfodDoYDAbExflNjCGCwDAMGIZR/Lrc85CcnBzVeVwuF3bu3BnzliQAdHV14eWXX4bBYFClzKIhPj7eb1lztsqfJxzR9WQ7kwQMw4j6aG02G/9/q9XK9+c8//zzvFEGgPv37wOALP2obrdb1J94/PhxfPHFFyK9Z8+elbWAoyUrK0tUht5UV1fjs88+w8aNG5UTtQ6w2WwYGBjg/87MzOQH/9RGp9MhJycHRqMxqvM4HA7odDp5REngXWZSg+ChkJeXh+bmZsG+5uZm9Pb2yqIxFJR2TGJmdM1mM2pqagT7bDYbpqen+RFuYHXUcNeuXYLjuru7cfny5VhJA7A6Guk9Ipmeno4jR44IDD8A/sHs6OiIqZ5I6e/vx8zMjNoyePxV4JqaGhw/flzWUeBo8L3/gV5shBiz2Yy3334bu3fvjuj7nA2oqakRteRu3bqF8fHxaCVqFtmNLld533zzTcnme2FhocDo+tLT04PTp08LvJBY4R3I/8c//hFVVVWiY7gBuJ6enpjrCQcu6eAf/uEfsH37dlH3idaoqKjAF198oQmjW1RUhOzsbP7v8fFx/Pzzz4Jjenp6MD4+jsHBQaXl8Zw/f55v8QWjsLBQ0TqwadMmyed7YGAgpDLjtBYVFQn2nz9/ng/TWq9QcgRBEISCyO7p/r//9/8ArDY/fLFaraJ9TqcTfX19/N/Hjh1TJBnCbDbjzTff9KtraGgIMzMzqntmt27dQk5OjsAzczqd6OnpQW1tLW7evCnZNHY6nejt7cXy8rKCaqH58DSLxYIjR44gLy+P33f58mVUV1cLurSUqodSLC8vo7e3F42NjSGPabS1tQk83Vjdf64ebt26VfTZ0NAQTp06FVKr0F89OXXqlOrPnDdcN8etW7dkO6fsRvfbb78FsDqa6V2xvfFOjrDb7SgpKZFbRlCsVqukseXo6upCf3+/6sHxzc3N+Pu//3tBTOkvv/yCU6dO4c9//rNgpNi3XOUKcZELLRhkqQQcg8GAhIQEzcRhP3jwIKx7l5mZiSeffFKwb2JiIib3nxt/qa+vF+y32+2or68XOFD+SE9PR3p6umCfx+PB999/r0psr796OTU1hU8++QQA0NLSItv1ZDW6DMPwhsx3RBJYTQldWVnBuXPnAGg7jo8blVU6kUMKt9vNV0Yuu8432cPtdq+JclUbl8sFh8MBhmH4cCCLxYLPP/+cjxd3uVzweDxqygwZ76QjjuXlZcXraUlJScgZh1KJKS6XCwUFBXA4HLJri5STJ0/yyTFyIqvRbWxsDJjls2/fPly9enVNzTilhUSOo0ePYn5+HoD0ywwA3nvvvZhUkPXGzp07odPp0NTUJBg4ffrppzExMQEAyM/P11QTNxDeSUcc165dQ1lZmUqKiGDIZnRbW1tRUVHhNzC6pKQE33zzzZrLjvJO5HjuuecwODioWMA5h7enK0V1dTUuXLig+bTLtrY2JCUl4cKFC6pp4O6b74tfp9PxcbGXLl1CVVVVSE1ltUlOTubDHL0TipT2dLu6ukKuf75dC2pRXl4OYNWpURLZjG5WVlbACUTsdrumDK5vcLwUlZWVgpAWhmGQk5MT04DzcOGSI9Tue+bggtqfeuopUb9fVlYW3n77bWzYsAGAMFFGaQIlR5hMJjQ3N2NmZob/PWpqlSIxMRFnzpxBVlYWv0/OhKJwiXZCKTXg+sG9y1AJZDG6tbW1yMnJCXiM1oPjpZibm8P169cBrEY7BBp4izX+kg6effZZ/Md//IdmEiS40V6bzYb4+HjU1tYKPs/JyZGMbFEa3/ufmpqKDRs24J133gHDMPwgsJZmluPgEnn27t3Lt8SUSCgCfk8Wstlsqj4PchAoEy0UpyxSojK63GzvBw4cQEZGBoDfkwmmpqYEXqKWguNDZWBggPfOtWAopNBquY6Pj+PixYsio6tVZmdncfr0aaSkpOCNN94IOu2jmqSmpgr6o5VMKOIGyz744IOwuzDKy8sly3V2dhbnzp3T1FhPb29vzKYipeQIgiAIBYnK02UYBk1NTfwAxOjoKD766CMAwP/+7/+KUvzWIlzKstpNKe9YQqfTiRs3buCll16CXq/H1q1bMTo6umby1bkA+7y8PE2tCuJ2u9HQ0IANGzagpKQEaWlpaksKiY6ODkW8XG/GxsbCnth7+/btIk93ZmYGXV1dqoQ5qhU3HrGnyzAMzGazYFDJZrPxm9QP0kJwfDhwQdxaGW3l4ALfuUmV6+vrRZMGaQG3243R0VFRzGtRURGKiopQV1enkrLAfPLJJ7h7967aMiQxGAyCQSutDVBLodPpYDabJSObhoaGcPjwYRVUBSaWtipiT9dkMuHGjRtyalGduLg4pKSk8B3sR44cEfSdeTweOJ1OTbw8WJaF0+lEcnIydDqdYO5XrYSO2e12FBQUYGJiQnKawvj4eBiNRrAsq6kVLrq6ujQ769j27dv5OaqB8JIS1CI5ORk3btwQ1AGu/3ZhYUElVeqhqYUp1SYlJQV37tzh5wX1fTN///33/BI4ajM/P49nnnkGN27cQE5Ozppc5eCll17CxMQEnE4nrXDxmMFNlbpWBlrlRDajW11djYsXL/oNOC4pKUF/f79cl5MVi8WC1tZWfu0xqTjcvr4+VFVVacYwcJ4u13RXa5WDYLhcLuTn5+PSpUuiWE69Xs97P2qvcOG7yonWUCuQP1Zwnq5c646tJWQxutXV1cjKysLZs2f5QRLfgGOt9D21traKtKWlpUk2J202Gx8cPzMzo4nVdYHVsu3s7MQ///M/qy0lKB6PB2NjY6iqqkJzc7PfSZDUhFu9QGqVE60kRfgG8i8sLGDfvn186rKWefDgASoqKvCXv/xF8USESImlEyCL0X322WfxwgsvCKYf5HC73XjvvfdEy62rhcViCdpf19LSgnv37qG/v18z/WW+Qel79+6Fy+XS3CCfP/r6+vD+++9L1pGlpSXF+6GLi4v5eTSys7NFqxfYbDZ8+OGHmol/9jUCKysruHr1qiYcmWAsLy/jypUrMBqNfPRCtKtzy4FarauIje7s7Cy/Jjzgf2UFt9uN06dPa2ZwR4rx8XGB/tOnT2vmJcHhHZSekJCAiooKyZUutIyWVt8oKCgQTc7ErcaxtLQEm82mGYO7XlBzzg0pOMPvbcc4Yrl6BSVHEARBKEjEnu709HTYwdFaoK+vT5RPf+vWLb9TJmqNsbExNDY2Sq5YTJ5Z6IyNjYnmKlhYWEBtba0mB3e0Ek63nuC67JROLIkLdDPj4uI0dadZlvXbCUNaI8ef1rWiE1j/Wg8ePAjg96aww+FAVlaWLJN+r4dyXSs6AepeIAiCUJSAni5BEAQhL+TpEgRBKAgZXYIgCAUho0sQBKEgZHQJgiAUhIwuQRCEgpDRJQiCUJD/D4RTcQYs5MLYAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "count = 1\n", - "%matplotlib inline\n", - "for i in test_inputs[:32]:\n", - " plt.subplot(4, 8, count)\n", - " plt.imshow(np.squeeze(i), cmap='gray', interpolation='nearest')\n", - " plt.xticks([])\n", - " plt.axis(\"off\")\n", - " count += 1\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "three-printer", - "metadata": {}, - "source": [ - "调用MindArmour提供的FGSM接口(FastGradientSignMethod),对验证的图片数据进行对抗性攻击。\n", - "\n", - "查看之前选取的32张图片,对抗性攻击后,图片产生了什么变化。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "superb-bearing", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAADjCAYAAACcnE9mAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAABxTUlEQVR4nO29e3gWxd3//17DKYnkzoONPBiwCYRGAlG0YtCWU7Tl4EKAb/kFJfQp514eKghogdD6/RLFKpJYrZYAej0lULh85LhysG1AsYimajQaiJysJPBAFJJgEmII+/vjzoyzc8/szt6HQNp9XRcXuXfn8Nk5fHbmM5+Z1UzThIeHh4dH++OaKy2Ah4eHh0dweArcw8PDo53iKXAPDw+PdoqnwD08PDzaKZ4C9/Dw8GineArcw8PDo50SkgLXNG2UpmkVmqYd1TTt1+ESKhJ4skaG9iJre5ET8GSNFO1JVmVM0wzqH4AoAMcA9AbQCcDHANKCTS+S/zxZ/71lbS9yerJ6srr9F8oI/A4AR03TPG6a5rcANgLICiG9SOLJGhnai6ztRU7AkzVStCdZlQlFgScCOMn8rmy9djXiyRoZ2ous7UVOwJM1UrQnWZXRWqcX7iNq2s8AjDJNc2br76kAMkzTfIgLNxvA7NafPwxBVgCAz+eT3qutrRWGY69zfGWaZgL5EYysdvLwsvl8PioLicf+tpETpmlqIjmjoqJ+eO2111rSspPNLo8wcdE0zWjyg5e1paWFyiYqC6dyIDi1A/Z+c3MzmpubERMTQ++3Iq1/u3IVyRIOmUVER0ejsbHRVla0tlUih6ht2eXLxgu2ffh8Pnz77bdobGyUtlW46P+8zHyduoHXC8xvaVsF8EO+XarKISp3VflFfbj1mqX+qcwhKPA7ATxhmubI1t+LAMA0zeU2cUI+eEXXdek9wzCg67rwfwkfmKZ5eyiy2qXvkLclDHmu8+fP4/PPP0dGRgYMw8BNN92Ew4cPWzoFS3x8vDlkyBAVUQFAKA/Jm9yTlbHTs7RSZZpmT9ENlTLly0OGXd3alSmJ24q0/t2WK5euUCbVuAkJCVTWo0ePAgAOHz6s3Fb5+iTX+H5BwqiWOQ9frkePHrVtq276VDiwaxut2LZVUbmIypSXm+9Hiv3GCWH9h2JCKQHQV9O0ZE3TOgGYDGC7XQTZG0jXdfpPBbZADMOg/9h7bMGPGTMGMTExyMzMxJgxY9C1a1elfIKFf46bbroJI0aMwNGjRzFixAj699tvvx0Q3ufzobq6mj7H4cOHHfMTNRCnMrGLLypf0XNJqFEJxMJ3EJV82DBs22H/J7L//e9/R319PRoaGnD58mW34lG5+HJhrzt1Uj48f42F1P/ly5dRVVWF7t27K8nIKgxRnfJlRP4ORnkD/rZKytUwDFRVVblOwwmVspXFAwLbbENDA/uzxikdPr6onZF/RFb+dyTpEGxE0zQvaZr2EIA98K/wvmKa5mehCuS289rBVmL//v3x3nvvob6+PiT5VOQi+Xbu3Bk7d+7El19+icTERPTp0wfFxcU0fJ8+ffDMM88EyDpo0CCUl5eHTVZRo7MLZ3eN/33mzBle1osqsgD2I5Nx48ahT58+Addnz/bPcAsLCzF79mz06dNHqFh5GhoaLGUfLtwqPl55kv9ZuUn979u3D7169QrbgMOuvHklPnz4cNx6663S8Nu3b8fzzz+PQYMG4b333gMA3HDDDaioqIiInGz/cKMU2ec6c+YMlbUV27aqIpudrCx8OzEMA3/729/o77vvvtutKABC9AM3TXOnaZo/ME2zj2maT4aSVqTp3r07RowYcaXFUMKT9d8bUqaZmZno27fvlRbHFrb+25Os/yoEPQIPlWCmbKr2OpV7kZjakDSjoqIAAG+++SYmTZqExMREet/JhMHL5TTiqK2tVRo5y+R1Uw/BTrXdYhgGbrnlFvTo0SPgXkFBAQD/iJr8vX2733K3evXqsMlAFpPYKTGLGxu9qpmCr2u3bTSUNk1ky8zMxMKFC9GjRw+kpqZKw7/44osBeYarTymuX9nG5XFpBpTGF6XnZu2DzCrLysowYMAAGjc3NxelpaVKebMEvYgZDPHx8aZolTvcixZOC50MIS9isvTu3RsA8Pzzzwvvi+QaO3as7SIo4JdZZWHIblEvXNgpota8XS8M8jIvW7YMAwcOdC3b5s2b8eqrrzqGY8rJtv7dds5gEb20Bfcdy1V1jcOOFStW2CpuQlZWVsB6AimvcCxihvNlICIc9R9M/nPnzgXgN5mw+mrp0qVOClwo6xUbgQPhVSxu0gtnA1HhwoUL6NKli/BeqAqXuBnpuo7o6Gjk5OQAAIqKimiY+Ph4FBUV0XtuaWlpwYULF6Qyh2tUQ6irq0NzczM6duyIpqYmACCudAD8z6xpWkDeo0ePRnV1tWNewYzqOnfuDAB4/fXXASDosuQh9VJUVIQJEyZgy5YtwvydZmEknAhR/P79+wMAnn76acf0m5ubA9Zjgl0MVsGpfqZMmYL+/ftj0aJFMAwDL730EgBg165dlnB23iNOsA4XoegLflD57bffAgA2bdqE2NjYANlk8WVcUQUuGjEHO3VikYxgpL9DQcUkc//992POnDno2LGjJV5eXl7I+bNT/cGDB+PBBx8EAIwcOdISjv8NgCoO9n8CeQGQa0RhsvITVMtT1ce4srISe/bswR//+EeMGTMGAHDddddRmdatW4f4+PiAvEePHi1Mz61Zio9nGAZ69uxJ82YhZSe6pqLkSb2MHDkSU6dOdQwvws6/WPScgwcPxrvvvmsbhmXPnj1YtWpVULIFi6iOdF2Hz+ej5U10BXnJx8XFYdOmTTQsH5egUve1tbVCpc9fMwwDtbW1iIuLE/YZXg7Snrt27YoNGzYI83ajn66oCcVpunfTTTdhypQpjumuX78egN/lTmUK6XYKLap80Uti2rRpAIBXX30Vubm5VEHPnDkTq1evDpCjtLQUS5cuDZBNJL+dHzgZgRcUFODQoUOiYCHBy+nwglSe6ovsiCTt5ORk6lIJwKJA1q1bJ1SOLS0t6NChg9DmLOkUjiY0kpaTeSwcZGVl0Q7OYxiGbbnK1kKY+PRZjh49KvTw0TQNubm5Addra2tx4sSJgPRk2JlQ3Nq3WcU5f/58rFixwjZ8Q0MDsrOzw1b/KrJt3bqVrnuxjB07NuAacR3Oy8tDcnIyLly4gMrKSpSUlNBZphtZ23QErjoCGzFiBJKSknDLLbcIGxoPcQ87fPgwtm7dSq/PmzcvoOEB4VnsEV0nykbXdZSWltKwZHFNNCpYunSpG5t9AHzHjeQirR0qHZKMakQKlo9/4sQJzJw5k4b5wx/+AADCjsLLwf4PBFcWPp8PxF6v6zq1XbKmnHCjaZqq4rHA9iu72YZhGBgwYICwT61atQoTJkyQ2mF5nGa5IviZgigNWV9YtmyZ8OXCQ3bbOi06q7YJvq3yMubn5zu2SZbvfe97APwDFMA/6HjssceUF7p5rhoTimEYdJNLcXGx41v6d7/7HcaMGYP09HTqGpSSkoJdu3bRQnjhhReClo00NtUCFRV+dHQ0fvOb31jCEMrKyvD00087pumGrVu34pFHHgFgr1D37dsHwD+dI37VLOPHj8fw4cMt1+xmNm4bHatQZMycORNZWeE5aygcax5vvfUWANByEZUbYcCAAZg5c6ar9HVdx6VLl/Dcc8/R+iHX3YxUefi4gwYNUgong1dmvAyqstopRRZS3uHwLyd5BROej0f69v79+5GSkqK0LjRx4kThAnFOTg5qamqEcZzKtM0VuJ1Ajz76qCUc+z8AHDt2jP79zjvvoG/fvkhPT6fT+2DeYDLcngnB5tunTx+MGzcOXbp0wYABA4ThZsyYgaFDhyqlq9ro2I4/fvx4rFy5kv5mXezITCE/Px/9+vUL6EwffvhhgAKXyUZQHYE5lSuRJZh6jIqKEtqkeVlJPk6w5fLRRx9Z/u/Xr580XlpaGr0/a9Ysy73bbrsNAALKl8hz6623WuoxmJGtjLi4OPTt21do2503bx5mz55NN0gVFhbSZ2XlYQmmjkT1K+rrgL+MPvvMvzdw4sSJFrmLiopw55130gGLKm5mqLLnS0hIwMyZMzFgwACLKyApr71792LhwoVISUmxxBs6dCgSEgKOM1HOV4T3RR4PDw+Pdkqbj8BFbz67tyE5ve7999/HU089Ra+vXLkS8+bNs4Qlb64tW7Zgz5490jTdeiHI4ov+B4Cbb74ZmZmZAemUlZVhxowZAICzZ88Kp6OiBT03LFiwgP7PpkVsuLLnYf/mF0N9Ph8Mw8CECRPQ3NwsnOmEw0xhh+qIKTs7GzU1NdizZw+am5tDylOl/EV1tXz5cpSVlQEAYmNjLbL/85//BPDdCJzPI5jNSHazGjb9HTt2WGTm/y4oKKAbpBoaGjB//nwAfrs/2YIeygyX9Zhi0xK1p4yMDMyfP19a75s2bUJMTIxwNsE/VzAy281qEhMTcddddwXks3fvXsv/7DlGslkGAFtd5USbKnCVKTSrDP/2t7/R3XY+n49u7sjLy6ONUcS7775LvRZEU+dwTPf5CtF1nU6PkpKSAsK+8MILePrpp1FXVxcgj+x3MCYh2RTRbVpsOsnJyTBNE1OnTpXa6lRg/ZWdbIYnTpywTEFli2uJiYm03EmasbGx1J/aznbvBtlLVWa35H8TrxqROYLUTX5+ftjPa2GfNS8vT2khkI+bm5uLBQsWhGyHlvUpvrxSU1ORm5sbUE+NjY1Uhvnz5+PcuXPC+AMHDgwYEIXLtCpC13WsWrUKp0+ftlxj801OTlYyn/A4tdU2dSOUuebouo6BAwdi2bJllusHDhzAyy+/DAC49957MXnyZKV8RO47PK0F4+iaJZJVRHR0NB3lkrczm5fdzjUVVI+TFaXntvEWFBRQTwWSHhnV1tTUCOMw+Tq6ZorkFM08VMpmxIgRlrUTIivrU21j/w7rcbJs+vzsbNq0aZg4caK0Lux24rWmqSyrbPQpGoGLNunExsZa9iwAYldSWR06ubyKZAP8XjhxcXHo378/0tPTA+KXlZWhvLyc+tjHxsbij3/8Y0A4IpeCHnDd/wn8ALJbt25UV4lYt26dsD2TflVUVOS0EH3l3QhVIQ/a2NhINzrwyjvSb1Un2fjp2uDBg+lxmiIXucLCQouLYzDKKlSZSX4qSt5JnmDkVVlsEy1uuc3P5/Nh3bp1VIkHI6vMNU+lzYlmZ6tXr0ZUVJSSeSzY0aPMPGkXX7RJZ86cOY55ujWFqhAXF4fBgwejV69ewvtpaWlYtGgR1QlkJ2M4ZSC4dWKoqqqicokW0rds2RLwUiQQ5R2MV88VHYGzjSQ6OhqDBg3CwoULbYVWaczhGIHbzRZ42/WIESMwdepU2ynSmTNnhK5ldjZw9rfT5gg2DRmqCoG3gVdUVGDBggXIysrCtm3bbONCYVQTrPeCDCIr+fgBIdQRmN3Mhrff2rnFEXstz3PPPUfXLERwo3ilTUd2srKjRmLKk20Zz87OtmyWamxsRElJCSorK3HhwgXb+lAdgbP5pqamYsWKFdi2bZsrv2oe0lYVCXojj50J1w38bJGHqZMrPwInmyNk9lliu37wwQfx61//GoMHD7aEO3HiBF555RUAwPTp04V55OfnR0Z4Bl5533fffbbKm4S/fPkyPTlPFsbpmpNcdh2L3LNbnAX8rm+sWYJsXiELysHCL2KJ8laBD0tmPiqzCzcjNJXFWtHiFPk7NjYWhYWFFuXNxnXaFOR2NOn0cs7Pz6cKm7zsZDb9J5980vISnzRpEiZNmkSVjcN03zXR0dEwDCMk5a3rfvdEVoEHK6dMVxHGjx8PwF+mZFOOGw4ePIinn37asU85ydmmCpzfiUcghUw8HFpaWvDkk08G7Bh78skn6dTk1KlTdKGDLAw999xzrjwH7HBaxCRmE7uRNy/LrFmz0KGDv8g3b94sTTcU05DbRTu2Lsj/sjOTwzE9lSlT9rkfe+wx7Nq1i3pyyOQg4ck5Lbx8oiM63Zis7MrSzqySnp6OGTNmoHPnzoiLixOmmZKSYjl4SzWvYCCbTqKjo7F27VphucpeVm4IVk7iWEBGtSrmG3YjH8umTZvCsnApW3Anv4nizc3NRUJCAgoLCwPSIJu9Fi5cSI+UJpw6dSrkARFwhUbgPCKbsQjWrsSuUn/44YeWtCIBK1NCQgL69++PRx99FNHR0TaxAiGbdzZv3oyEhATLNHX79u0BCs6pEZIRVSgNlldqZLOJLJwMlVGNLFyfPn2oW+iQIUOQmpqKF198MWBhm5dj+PDhtEw7dOhgUe7vvfeesFxU2kgoH39OSUmRHgFBNqP169fPdjMQEB7XTF33n6RINpSlpKRYFLidTZ5XXJ9++ik9OCpS8vKwm/cIK1eutGzkC8fs1Q67WRhpe6KZP6nf6upqqsDJZp+srCzhyZluy8/byOPh4eHRTmlzEwpBZcRtR15eXlCH/rtBJFePHj0wefJkaou3G8G0tLRg1qxZyMzMRE5ODgzDoDbwXbt24c0337R8ZPXFF18MmFY5vZGdzFJuRuZkcxSbzunTpzF58mT+W4JBwR/RSRg8eDCWLFmChoYGKuv111+P9evX246eyWYP0T2CbHSmOluQpcuTnp5O/dZXr16N66+/PkCGlpYWfPrpp5Z0VbxRVHCy9ZJjcO+44w588skndGQraicZGRn43e9+h3HjxtF08vLyLAevRcILjJgVN2/ejNWrV+OVV14RrhmJvE8I2dnZwqMU3OLkMeXGVJuRkUGvkU0+77//vrS9qniNERwVuKZpvQD8CUB3ACaAQtM0n9c07QkAswCQecBi0zR32qVFdvSFUvFRUVFIS0ujJpSqqiqLA70KgwYNQklJiW0Y2RQ6IyPDsstS9iwNDQ20ofXu3ZtWwrhx41BdXY2DBw/iF7/4BQBg1KhRGDdunNAmpjotFS0Ms3+rlDkxYbBhv/jiC9x+++106nzjjTeid+/eqKiowJdffkk/dKDyFRceckTrkiVLAuQ+ceIEvvrqq4AXZFZWFtLS0hAVFRWwKaW6uhr5+fk4f/48Hn/8cXTv3h2xsbEBsvKbqUSwL0a2Dvi/CTNmzKCLfuzzkGc6c+aMxY20sbER1113HV2/EZVrXFyckqxsPk71PGDAAHzwwQd45513LNdJPH4TDSnT//3f/6V5sPm5wcksxX5NSfRlJadnO3nyJH7+85/jH//4BwB5W3VTpuGGvNjJmTSLFi3C8uXLpe3KCZUR+CUA803T/FDTtK4APtA07S+t9/JN01zh6glgP2plw/D3o6Ki0LNnT8uW+meeeQbHjx9XzjtcowbRaJeMrhYtWmQbNyoqCtOnT0dKSgoaGhowb9484Qdhwz3CCYYOHTogLS2Nvnz/+c9/ory8nN4nit3phchCnotNh2fJkiWWrwCRDzgUFRUJ/ekBf7n+4he/wKOPPors7Gy88cYbuP3223HkyBGLrKovRdGCpyhebGwsHn74YTzwwAOW66ZpUqV16dIly72//e1vGDJkCHw+Hy5duoT9+/fTxfDevXsHbKSygz05Uxa+qKiIluHjjz9Ov3RPvmJD7vHnbUdFReHnP/85zp49i2effRaxsbEBG39UUdmJzWLX/kWL4T/84Q/x3HPP4be//S0Mw0B5eXlAW1Wt+1DWQFimTJlCPWvIYIBA1ndOnjwZ9A5nRwVumuZpAKdb/76gadohAIn2scTwheJUkKL769atw44dOzB27Fjouv8zYmS7vSh+RkYGzp07hw4dOiidLU4gCotg54oWFxeHrKysgAUe0ljISJHE69atG7p16wbAf35xr1698NhjjynLFgxOnigyevTogf3799PfwXZeIHARm6zc33333QEyrl69WniWyciRI7Fjx46ALwQB/nKtqqqCpmkYO3Ysrr32WnoMLCGYTuLU6QsLCxEXFxcQpra21tbHd//+/dB1Hbt370b37t1x8eJF17KxuBkRkzN57r//ftt0unXrhgMHDmDVqlXQdR0lJSVBtwGnj7nYoTK76NGjB/0QNl9n/fr1c93/+fxF2Mk0ZcoUFBUVYdu2bUEPxsLqRqhpWhKAWwG8B+BHAB7SNO3nAP4B/yj9fFBSuoCMFJxoaGhAly5dEB8fj3PnzuGLL75AZWUl4uPjHVf/AfnLZufOnZbNLNnZ2WhoaJBuMQf8B0utWLFCWIkPP/wwDh06hPHjx+Ozzz5DZWUlOnTogPj4+LDaGe2UELnOriuQfKOiolBfX4/Y2FhkZmaiuLgYI0eOxPHjx6ms7Eg5VBmJPHZ2Th5i8ySf0wL8Gy26dOkSIOvHH3+sVP8y2QhsWRJZZS6KKmV/5swZnDlzBoDf7Y9smPnpT3+KN99801Y2XjHyM9yoqCiUl5fTmathGNScQP63e9bRo0ejqqoKDQ0NjucD2SkcmReaCqo2Z1kdHDp0iJq4VMvUTmmrvCzJIMPuXPvFixfT/hOMB4+yAtc07VoArwOYa5pmnaZpLwNYBr9dfBmA5wAE7K7RNG02gNmAv2GG64smhmEgKSlJuCPq0qVL+OCDD9C/f3907NgRSUlJdBp94cIFnDx5UvaMVFZAXFGXL1+mTvyAf2FK5HfK2k8rKiqQl5eHG264wZLW2bNnsW7dOvTt2xeXLl2ict5777300B5RpfJyRpqYmBiMGjUK7777Ln74wx/SMv3BD34AwL/7rampSViuvKxsOZG66969O3Vzc7twN378eKSlpVmuXbp0Ce+++y5qa2uxZ88e/OQnP6GyvvHGG0r1z7uH8jZw8gyiL7KQMDk5OcjJyUFLSwuWL18e8Pk8GaSPpKSkSBUNL6udz3hLSwvKysroRp7p06c7LviS0zIBv88y26d69OiBpqYm/OUvf6HhZdvf7crUDaL+RT5hyJKXl+fYhlTL1E5XhWNgJZpFuk5DZSu9pmkdARgA9pimuVJwPwmAYZrmALt02G8iAu5HLOz2Xl3X8corr2DLli0BYRMSEqRfJ+cI6Zt4rLw8TqPny5cvo6SkBAkJCXQxj6WhocFyMp3dVnpVOZ1YtmxZgGePrutobm5Gp06dkJaWJpW1pKSEjCRcb6UnH8BgEX3xRrRZYt++fdS31jAMjBkzhparnY29FaVvosrQdR0ZGRlKJ/zJ2qoLlLfSOzF8+HDceuutlmvkIw4EtlxJW62urhbmQep/2LBhMAy1Yx/CwdatWwO+RP/BBx/giSeekMYhs0gSPJijNADxC4V/ts6dO2PGjBl4+OGHA9IR+bc7IJTVUYFr/tfEfwM4Z5rmXOZ6j1b7ODRNmwcgwzRN2+MCnQqFIGrkUVFRlsUrwL+dNZjdTDfccANOnToFhEmBq8A+o2maeOONN5CcnIz+/fvT6xcvXkSXLl0AAMePH0dNTQ1uu+02x07hJn+ZXR/wj4j5hmiaJv7rv/4L77zzjlRWwzCUylTlo9ahQMrViXDW/0033YRf/vKX0i/DVFZWoqmpie6AjKQCB+R9yW4NJ9yonpwZKuRjwuR5TNPE0qVL8fHHH0vjpKWlsS/2oOpfpLgjXaYI4SyUHwGYCqBM07TS1muLAdynadpA+E0oXwCYExYx24C0tDTSga8I58/7lwq++uor+pXq1NRUnDp1iro4xcTECI/UbGv+/ve/Y926dejatatUVuDKlynwXbk6cTXI6hF+ysvLbZU3AHz99ddtJE3b0KanEbJvYNUFOvJmY0fgJF4YbEhKo0W70YvK2zeUFWiSfjhNKDJ58vPz6WYUfuTGnuzn8LwROY2Q5BuM94INYZmBkY8Ys14ORE7yEQxVbOR1fXZ5G4wKhTi1VdEswckcIWLx4sW44447LNd+8pOfBHwFyQbXs8UrVaa4Gk4jZFd2VSqMLSz+RcMqbxV/ctFGDFVU3QlV4jsRKd9v2UIrm+e8efOQn5+PjRs3WuKxu0XDgZNd2W5aquq/bbfO4sYPXGWK/Omnn+K2226jLovEJv7YY49ZdgWK0uHrJZjpeTh8lt0qKF4uN/3KqV5DqT/WIyiUfmvn8ux2H4EbZH1UxlXxRR4e0QNcc8011J/SMAylM78VaDMbeKiEYwSusigHqL1QbdK+4mXqQhkpyxrKiyBMOI7ARfXm5DBgh4o9nb8HRL6tulGMbTEDC7Yd8P3Nrv5ksra1Aq8G8M82y9CZ75umKTwL9iqTtb3ICXiyRor2Imt7kRP4F5C1TRW4h4eHh0f48I6T9fDw8GineArcw8PDo53iKXAPDw+PdoqnwD08PDzaKZ4C9/Dw8GineArcw8PDo53iKXAPDw+PdoqnwD08PDzaKZ4C9/Dw8GineArcw8PDo53iKXAPDw+PdoqnwD08PDzaKZ4C9/Dw8GineArcw8PDo53iKXAPDw+PdoqnwD08PDzaKSEpcE3TRmmaVqFp2lFN034dLqEigSdrZGgvsrYXOQFP1kjRnmRVxjTNoP4BiAJwDEBvAJ0AfAwgLdj0IvnPk/XfW9b2Iqcnqyer23+hjMDvAHDUNM3jpml+C2AjgKwQ0osknqyRob3I2l7kBDxZI0V7klWZoL+JqWnazwCMMk1zZuvvqQAyTNN8yCaOCQA+n89yvba2lv7N33OCjRsEX5nyj5oqFQyRV/QMtbW1lucRycqHbW5uRnNzM2JiYizhTYUvfft8Pkc5QiwvmgefF8NF0zSjnWTl0xThJKtIBlHbiomJoWXKpetY/+zzqsgkkkGEKE1S/w0NDaIoym1VlL+d3OFqGwSntuqmj4dLJgmObdWpbGz6QbgR1n+HSOeqadpsALPZa0OGDBGGNQxDek8U1gld1wEAp06dQnV1NW655RY+ruWr0yJZnSDysvKwFcr+TeRhw8vC2jUKXk5d12nZkXTZvEh+JBy5JypDURg+LYKkXC/YycrmI5ONlUsmB/+8hNra2oC0brrpJouclZWVKC0tBRTqX1S/ThAZRM/mxKlTp/Dhhx+Kbim3VbcKRTW8qE316tULt9xyi7R8VMpUVudOyPIUpUfa6smTJ9mgjm3VqWzYZ5HVueg5VZ6Re75/isKEMgK/E8ATpmmObP29CABM01xuE8cExErCDaoVTDh//jw+//xzZGRk8Pl9YJrm7Xayqsri5oXCYhdP13UcPXoUhw8fVhqB23UEVQUik08UV1KuVaZp9lSR1S5Pcn/cuHEAgHfffRdffPEFYmNjLXKpkpCQQOUkZQqb+o+PjzdDGVmpvAAJoheXANu2qqIcVJWkaluR9atg2qoImfLjw6jITWStrq5mL0vbanx8vKny8nYaDLGyye7Z4aSrQhmBlwDoq2laMoAqAJMB3O8USUVwu7eXWyVkGAbGjBmD6upq150+0jjJc/nyZVRVVYUlPbbchg8fDgAYP368JcysWbPo36tXrwYAvP/++zh79qyw8/t8PtTX16OhoQFdunQht2vsZFStfwCYOHEipk2bBgDo0aMHqqurUVhYiIMHDyqlxcpM5CwuLkbXrl0dZQjHtFh19E3u6bqOy5cvY9++fRg8eDC6dOmCnTt3Kucly5/Piw0rmtnwf/P9UVT/xcXFSnKSNGQDOJG8st+qSpHogPr6ev5WjZ2sREa7l42sbmXlHswgyo6gR+AAoGnaGAAF8K/wvmKa5pMO4U27wlCFV0a33nor/V1dXY2ioiIA4kYdExODXr16oaKiAgjjCJxFVlGsrMeOHcP27dstcUh6IrlVRjVOchEZHnnkEQBAhw7W97co382bN+PVV18VpiXBcaQogx9BFhQUoE+fPpZ7NTU12LNnD61jUYd2MStyXf+hdkC7aT/hzJkzKC8vR319PVJTU123VTtFFi4FQp5j0KBBKC8vh2ma1HYva6tkVKs44xBC2kNhYSG99tFHHwEA9u3bh4SEBOTk5AAAioqK6MBN13WcOXMGJSUlbHKOMzCZKZJl3Lhx6NOnD2bPDrRoFRYWYvbs2SgsLMTdd99tueeyDMI+AodpmjsBqA0RrhIyMzMBgHQKD4+rju7du6N79+4wDAN9+/a9qtsqkRVwb9Zqa4ic/0pEfBGThazoyhbK7BpAVFQU/XvXrl307x49eiA1NZX+PnLkiHBqRa5dqUaWkZGB+fPn09/Dhg2zjMBlo0Y3b2kV22ePHj2we/du5bQ2b96slLcKrPeB3RoIuTdr1iw8+uijtvI5XePvual/u+k6b1JIT09HSkoKANARF4EdmZF23NLSoiyzW2QmCbv0OnbsiC1btmDt2rVUXvIMp06dwnvvvSdMn89LtXyDHX0XFBQAgMVTh7zgdu3ahaSkJEycOBEALDPHIGZnSm0sMzOTmh6PHj0akF9mZiYKCgrQ0NCArKwsbNu2LSBMKIRkQnGdmWQKbWeH+/GPfwwAePzxxx3Tr6mpwe7du7F+/XpVkUI2obDYLVwlJCTQhgUAo0ePxvjx45Ur0c6EoqK4AWDatGkWGQgtLS0oLy9HSkoKoqOjLc8CAAsWLMBzzz3nuDDXmp/ttFTkOcJD0q6vr7co8MbGRpSUlKCyshIXLlyQRXezOBa0uYclOTkZeXl5iIuLU5aDNU0p2vJDMvfI7MU+nw/JycmIi4tDz5496foAG7elpYWul6i0VxUTiltY5c3CrhElJiZa7o0dO9YpWaUytXMO2LFjhzRx0XOuXLkSALB3714n2fiXTfhNKG6RjcB4dF1HbGwsVqxYQdy9pI2xsbERTU1N9PqmTZtsZQjW80UF2UISAMyZM8dyfdasWUKvAbdy8WVqR0NDA5qamtC5c2d6raWlBSdPnsTixYuxcOFCPPPMMwHxVqxYgeeee47+DnZk6GZhMDY2lo5SCRUVFXj22WdDsuOGYw2GJz8/3zJDBEBt9ARilyWMHj2a94gQEoqsdn2MtIHt27djw4YN9PqFCxfQpUsXS76jR48WpqHiJcJSW1vrauRrGAb69+8PAPj0008tz/PnP/8ZLS0tWLFiBX2Wp59+Gi0tLcKXu5vZvgqapgW8sGV5EtzmqRK+TRW4rAJFD5qdnY333nvPomxE4Q8cOICXX37ZlRzh7sB2K8y6rqNfv3648847pXFFaYUih4xNmzZhxYoV6NnzO8+p5cuX47PPPsPYsWPx7LPP4vTp01i+fDl9cQKgf4er3FQW2QoLCxEXFxdUnnYjpkjCmunq6+tRVFREFTevwPk4QPBy+3w+6R4AWbskJhHibUS4//77MWfOHKW8gzH32czcAu4PHjyYzsD456isrMQnn3yCnTt3om/fvpb0NE04AZDK7YTIfDZlyhQMHjwY27ZtQ1RUlPC5HnroIbz44ov0N9EFAJCSkuLKi0pGm4/AySjMrpFNmTIFo0aNEirvpUuXIi8vD7m5uQD8U6iamhphfrIGFYySdDNy558nLi4OHTt2tIRZvXq1dAQW7JtbpfPk5eUhIeG7DV2NjY1Yv349zbOqqgp/+ctfLGHChdMOPPa5ib93sDjZlkN9GbF1PGvWLItp6ttvv6VtkozE/+M//iPAzGIYBpYtW0Z/L126NGhZ7WZ/ojTnzp0LANQzgg377bff0nCAf5YWDg8Wu/rnnzM/Px/79++nJj0CKaOKigq89tpreOKJJyz3FyxY4ChnOOp//fr1GDVqFJ15icqnsrISixcvRlRUlKWeiQwswc4M2tQGzm+OkL2Nly1bhoEDB1qu67qO/Px8ZX9TUboCQt4coUJGRgYGDhxoSSMvL8+yMCSD5O/kRhiKfLGxsSgsLERsbCyioqICyqq0tFSoXCQob46xk3nr1q0BspSWliqVGcFO6aja6/l0RKNOfn1DZt9mFzpvvPFG7Ny5k3pFAX6/fN5spGKv523LToONrVu3Iisr8BiQjRs3wjAM/Pd//7fFJKRqAyeyOtnAncjPz0dKSoolr4MHD+K6664L6P9smfKLrSI4+ZXqXxAPALBu3TrEx8cHXBfJmpGRgRtuuIEudJaVlSnJ6FT/bToC5xEJOnjwYMtixL59+wAAp0+fDlp5k/T5PN3Ix9v6nBb0ACA6Ohq/+c1vqO+pYRh0CiVa1ZeZYMJhByX/T5w4EUOHDrWE6dy5s2VkyMsxZcoUqXx8XnbIbOAyM4Lqc7Mbk2bNmoXf//73SE9PtzVtOUFkdZLFMAy62YhFlGdZWRmuu+46AP5OTnaZ2uF29iVqL/y1AQMGBMTftWsX/ud//gePPPJIgD0/KioKjz32mHB9xI2sqmsgRCGzcp86dQoff/xxQNiysjKhMuTbfagjbr6/ixQ34f/+3/8bsElOdeDhVs42t4Gz8I08JycH69atszzEihUrAADFxcWYN28egEA3LQK/OYbFztYYDKQi7ZRD586dUVpaSs0R+/btw4QJEwAAo0aNCpArGJsisX/yachMVEOHDqWbIVTYtm1bgMIPF3bTft4sIYJsoCBmitTUVDz66KM4dOgQ/v73v2PNmjVBz0x8Pp/SaFNE37598frrr1vKjchKrvGunEVFRQGjb1XYfqViShHV/0svvQTDMDBw4EDheURvvfVW2NZA3MxqSRhd1x0HcKT/8y/dcK55EXmamprwhz/8Ae+++25A+s888wzefvtt1NXVKacH+NvIrbfeSjcmqeB9kcfDw8OjnXJFTSgsPp8P9957LwCxVwexFRqGQR3jeWbNmoVvvvkm4E0d7Mq+aMonM6uI4m/bts1yb8uWLbh06ZJj3mycUBaxZLKJ4vHhjh07BgBYs2aNY96qhHL05unTpzF58mR8+OGHGDlyJB588EHs2LEDhmEEjNQnTZoEALh48SI2btyI06dPux6FiRbbeci9kpIS3HLLLXRkm56ejoULF2Lp0qXQdZ1u9jAMwzLyrqmpoW1106ZNUvOcyixMtVxF5p6jR4/SPMjMIxTXVlVU+qVK3iRuZmYmvvnmG9d5i+DLUxY+Ly8vYL0uPT0deXl5+NWvfoUJEyZgy5YtFjnZ9Dp27IiRI0eiY8eOmD59Os6ePetqx2ibK3BZRf3nf/4nJk+eDOC7h1u1apXFfzovL88SJykpyWKLMgwD8+bNQ3V1NcrLy+nqOUuoq+kqrl4+nw+jRo2izwMAJ06cwFdffeWYNi+rG7lEsM+bnJxs8S7hX0ZsGkuWLAEADBw4EI2NjaioqAiLJ4KTGxkbhg3b0tKC++67D3feeSfmzJmDOXPmSD2MCJmZmThx4gS2bt3qWk5VpUhk2LZtm2XTUXR0NPWUIqY/tu3wm85Ez6xa1uwRuk7Ex8db6rGlpYW64bGLcmyYmpoacnpjyNitJQD+Z25oaMDKlSuRmJhI22tNTQ3OnDlDd11XVFSgsbERiYmJljQvX76M9PR0LF8uPhQ1mPYra/eGYeDw4cMBegjw97UNGzbg/vvvt4Tn2bJli2W38/vvv48RI0YAUNvs0+ZeKHbn5/p8PgwePJgWxqJFi1BZWSlNLzs7G5MmTQpwN6ypqcHUqVNVRArLcaJ85Y4fPx4zZsywVFhWVhYuX75sq6TtlLDdyr7qoTvr168Pyre6rKwMn332Gerq6iBqL8Gu7MvSIJ4dr7zyiiXMs88+i4ULF7qS3cEnWGknpmj9hC/b5cuX08VB2QuJZfLkyaivr7cd7XJK2fWuQT5f0a7Buro6/PznP0fPnj0tfY+NO336dOry6rSYb9dW3e5uHjx4MB588EGa/qZNm7Bu3ToAoP2b9RUnLoek/8sWdJnncCxTlRlsdnY2dF2neoh3fQT8/vWiDUZsfzIMA42NjXS3Jjl1sxWhrFdUgbuFL8zY2FjU1dXhgQcesFy/GhQ4Pw3KysrCmDFjlNLjcVLg7AlvdvC7W1VpaWlBVlYWcnJyVHbTRcQ1T/Zs7EYuspi5Y8cO6LoOTdMsU1g2LdUOLIOXJzo6mvpWnzt3ThqeDEgWLFggOt7UAmdCcb3tn28TrDsrkaNXr14A/KNvn89HX3gyBe5EOBU4DzlLJCcnBxcuXKAzbLJb8+mnnwYQ/v6v0q86duxIFx+JHCz19fVobm62XIuKisJ9991nuXbgwAH86Ec/Upb1inmhhMPGVl9fjxtvvJG6HbGHyYQbOzdE3la5c+dOy6E1NTU1uO+++yL26SWR8haNwurr62mH5dmwYYP0nOyoqChs27YN/fv3x9dffx10fdk9P+8+KLLV8hQXF6OoqMiykYt4LZG0eOXN3rPDzoQi6tCNjY1obGyk9/kXlGEYSElJoed5OClvNh0VWVXkzM3NpbKQUR7gLzPZTlHAv+mMd4sTEazHFCuvXRqkT/Eb955//nkAoOWvmp4dvGePnRLPy8ujOoiNw8pANqY5vQwaGxuVzIyEK7aIqTLNVKGqqsrVRw9IXuGwL8vuL1q0yHJv6dKlASPXcC0Ssf7KKvZT0fWOHTuirq6OdoDp06cHyBYVFYX09HR6QJCbRiYLI/tbpS0UFxcHnK+8ZMkSy8mULHYv4HBxzz33APBP65csWRKQ17333ovjx49b5HHCTVsVmWPY52YX3MhGnrvuugupqam2C/Kyc3tCRZSW2/RFbd7uewDBInt+sulIJBd5WYri8RQXF+OLL77AJ598Is1bRJsrcP7NRP5WjcfGJfGczj5QzUMlb7t0iRfC4MGDAfjfpgMGDMAdd9wRFjlE8CNFmSJ87LHHsGvXLuGmh+bmZku88+fP048+8Gnl5ua6NsHw6YSjY7ELPHy5i7CxLQsRfdfSDja99PT0gPu6riMmJgb5+fm26dilayer04Kurut0BA58V2Z9+vSh4Q4cOIDi4mLL7lA2vp1CCuYlQ+IsWrQIM2bMQGNjI/7f//t/ASNpPn22/+/evTvAtp+Tk6NqA1dC9lIEIFTeonh2HDhwgH58wi1tqsDtXPPcjnKd3t579uyhUy224cniOuXtVOGiaX9TUxM+++wzJCcnW8wDbtINReGxXycZMmQIUlNT8eKLL1o2CrBhnKisrAyqkRHcfI38jTfeQHJysuVrSyxTpkzBn//8ZxQWFgac/NfU1IS1a9dizpw5rmdnLDLTlMhEAnynFNkdlmz8oUOH0rJfsGCBJd1QX+yhjFzJs7z88ssoKirCpUuXhEcqkHh2Lwu7tipabAf8SpCU3dy5c7FmzZqAdibr/yLvmE8//RSTJk2Svgjczn5UZ5Zu6oCk6cJeL8TbyOPh4eHRTrlqbODs77Vr1wIAPvnkE7qhxA7ek6aoqAhFRUXSKZTqiNbOpsy+cTMyMuimEkJNTQ3GjRuH7Oxs1NfX29qkZaNylTe63TbqV199lXpykHuffPIJ/aJRYWEhkpKShFNmURktWrTIsoAU7MxA9lz8LGXv3r10BC7Ki2zoIs/Iprt161bh6FtVZnbBTdQO+Gvp6ekoLy+3pMF7HXTo0EH6ebRI2uYJR48elc5c6+rq0NzcrFw3wSKKP3v2bFx//fXQdR133XUXfv7zn+PYsWOO7T8jI0N4LAC/BhXq7FskP/mfXVfg+y17NIJsRhMqjgpc07ReAP4EoDsAE0ChaZrPa5r2BIBZAMhcZ3HrNzJtcZpqkc0Euq5jyJAh1EWoqqpKOE1JTEykh1+dPHkS9957L77++mtpHqqF6OQxwsrBNhjCF198AZ/PF/C8uq6jsbERpaWlVE6ZfJGo8JkzZ2LmzJlUFhFsvtXV1Xjuuefwi1/8As3NzejZsyd69+4d1Hca3ZSpYRg4ffo0VcL811ZEnDx5EqNHj8bly5dRVVVlkfXLL7+kh3alpqbyH7cNmYMHDwZ8/q2+vh5Tp06lvsvAd+aVmpoavPbaa47pDho0yFFWYppymsbPmzePLrqRTXG5ubnIycmxfMWKPYemurra8gnDtLQ0lJeXo6KiAkeOHKGHoKmc+8G+FNk21tLSYmlPBQUFmDt3rlDxRkVFIS0tDVFRUXSjFOHkyZPIycnBhx9+CMB/4mN5eTn69u1L69/NC5zIyQ+qVNNgj7rlj5MNF45+4Jqm9QDQwzTNDzVN6wrgAwDjAfx/AL4xTXOFamb8phNRQfBHNL700ksA/KeliWy1X3/9NfUDP3fuHM6dO4fvfe97IfuBOm3kYBHZDHVdl37S6eLFi2hqasL+/ftVZARg/5kquzK1++QTiyz+mTNn8Pbbb+NPf/oTRo0ahf379zu5wLn+0rcIIgf5+MSKFSukZ4ST9E6fPo3Tp0/jt7/9LS5duoT9+/fj9ttvx6lTp9ChQwfLaK01/bB8Ug3wK51Dhw4FyMQrcPJcRUVF9OtRpFyJrEeOHLGk4SSrG59lGWy9k/YM+Mt0zpw5tB3FxsY6lanS5/9kfZ/1RQcC+39RURFM0xTGJ2Wq6zqt//r6evTt29dSpkzcoDadsaj2L0D8zKH6rDuOwE3TPA3gdOvfFzRNOwTAeTikgMpK9owZMwDAsiWVhd311K1bN3Tr1k36gQe3yBaxZLhRTOH2RiGysumSTRtu3ftYSkpK8Kc//QmA3wQgU96qnh1sXnZlQO6R3W1koCHKh2xKWbRoEZ22dujQAddeey3eeuutgC+2uMHJu8NOdsL999+PuLg4y/WYmBj6N5H14sWLAIB+/fq5OjHSyQtJ5okhusaH7dGjB7Zs2UL9wEtKSqicocDLEBUVRV927ACO7/8jR47Ejh07hJ5nbJmyZ84Q5e0GmXtuJCAvJrsFcxmudmJqmpYE4G0AAwA8CuAXAOoA/APAfNM0z9vFd3qrkenRU089RR/ALbquY/HixfjHP/6B/fv3Y9iwYTh+/DgqKyvRoUMHfjtrWHZiTps2LWDb94IFCxzNDG6ez+0h+ZmZmfQMDj4/JzPW7373O7zzzjv0N2lEmZmZOHDgAC1TScdQ2p6solRYyMd3RRCPBdbm3dDQgAMHDgQom169eqFfv3548803lWSVwZt7gMAPcJOPRbe0tAScl0E++qDrOpWVb6vx8fE4efIkiRKWtqoCOwInzzF+/HhbOZkydRyBy5Qi6f/XXXed1J+fh2+rKvz0pz91rH/VEXhqamrA5jE7+OdevHgxbSMy7GZgyouYmqZdC+B1AHNN06zTNO1lAMvgt4svA/AcgOmCeLMBzAbEZwSwtLS0oKysDPn5+fD5fMqH3hN03f/Vno8++gjvv/8++vfvj44dOyIpKQk/+MEPAPjd02yekcpK0nPj30oYPXq04+FPodi3Vcr00KFDAS8VAn999mz/I5Mz1n/4wx8GHM4zatQovPvuu8IyraioQFNTE6tspLKSzTdu3SNra2uV/M+d0jx58qRQTl5WO2R1eurUKctvsvnJLp1Lly7hgw8+CKmtytqAXRmL1mbsys5OzoqKChw6dEiYhqhMZeVH+n9mZiYOHjwo9esn+SQlJQnbqt1zpKSkWExdMllJmTrNFisqKpCXl4cnnniChiFrTCLY9PLz822/zOP0LIDiCFzTtI4ADAB7TNNcKbifBMAwTXOAQzquPv81fPjwAD9g8jEH0Ucdjh07hq1bt6KkpAQJCQno3bt3QJoNDQ3scbNBjRZFsFNeFc8Zt0Tyk2qAeLGGXB8zZgx27tyJtLQ0YZkahoGuXbuS2Y3rUU04p6mXL1+m9c97hQhQ/kwZj2gETsjKyhJ2YjbO6tWrw95WZXK6HSxMmzYNEydOpPJevHgRN954o1BOknbXrl0xbNgwGIb9WSiq9Uw+gGHH9u3bLX1N5TkzMzOVylR2bo8MNm8V89fjjz9u+fiM7EXhZK9XWcTUAPw3gHOmac5lrvdotY9D07R5ADJM05wsTsWP7OClUNx82Pj33nsv3QBy4sQJYViyit6K604hkq8tXMBUpqWA2DzCTluD3XDggqAWBkVtws7UImrwpmmitLQUnTp1ot5LgF8B/fWvfwXgr/+amhoyWg6bCYX9nZ6eTm23PG+//TYA4PXXXxfKalPeQbVVFTsqz/XXX09lMk0TBw8exMWLF5XTCfabmMG+yEPof8rfxJXh1KdC0Q0qC64qJpQfAZgKoEzTtNLWa4sB3Kdp2kD4TShfAJgjityWnD/vN8Hbnbstct3z+Nfg/PnzqKqqQteuXamiTE1NtZg2vv76a6SnpweYO9oamaxXG7W1tXQdgcjpcfXQpsfJqh4nqfoWDnaRM5xuRMG+Yd2OikKZltrZ8dxODxVQHoEHW3/hwI1rnpMcoZqAFMtBebp/pQlmETOYWbjq7MLGjztoE5qTrBGohyt/nCzBqdBVXKHaErbB8Y1P1XWQb7jheh72fBGnNGX33chiZ/py467Il5uqYg9HuanUmZNftVt30LZov7J1DNV47G9VgmnPbvq/EzJTYSj5A98dEGZnknTKw0lW/prKPZ4rMgKP5FvLZYMKasHtShCpQ/IjQNB25StA2GR1s18gSEKW1U6xqSgXmTLj03A7W7yCg7Mr2lZV/PRDXsQMJ5qmVQP4Z5tl6Mz3TdNMEN24ymRtL3ICnqyRor3I2l7kBP4FZG1TBe7h4eHhET6842Q9PDw82imeAvfw8PBop3gK3MPDw6Od4ilwDw8Pj3aKp8A9PDw82imeAvfw8PBop3gK3MPDw6Od4ilwDw8Pj3aKp8A9PDw82imeAvfw8PBop3gK3MPDw6Od4ilwDw8Pj3aKp8A9PDw82imeAvfw8PBop3gK3MPDw6Od4ilwDw8Pj3ZKSApc07RRmqZVaJp2VNO0X4dLqEjgyRoZ2ous7UVOwJM1UrQnWZUxTTOofwCiABwD0BtAJwAfA0gLNr1I/vNk/feWtb3I6cnqyer2Xygj8DsAHDVN87hpmt8C2AggK4T0Iokna2RoL7K2FzkBT9ZI0Z5kVSbob2JqmvYzAKNM05zZ+nsqgAzTNB+yidOmH+D0+XwAgNraWlmQr0z5R01NhfhKMtTW1tL/g8V0+Cq9KP1g8iTPTGBlV3yWi6ZpRtvJqiIDyYeXg/zNymr3jCJZo6Oj0djYCDjUv4qsKtjVDXvP5/OhubkZDQ0NomSUZRWVG19m7P1wQcrVqa2KkNW1E6z8dnGam5vR3NyMmJgYNk7IbVUE/yzkWjBlzcQT1n/EFzE1TZutado/NE37R6hp6bruKnxtba1ToVm+Os3KGh0djSFDhmDIkCE0b5I/+Zv9ZycD+384EJWpKH03eZJnGDJkCC03Xvba2lrouo4hQ4ZA13Xcdttt6NWrF//8F5xkdYKUOS8HkY3kJ6pfvo5IWiytyhuwqX9RmnawYfg2Iasbtix1XUdtbS1uuukmWRZKsgKg7ZYtRzbf2tpaet+p/bqBKVclOXmZiVwiOdm2wIdl0+DDkn8NDQ1obm7m6yLktiqClZu9ZlfOMl3CyPvPgEgIbQR+J4AnTNMc2fp7EQCYprncJo7rzEQPbRiG7X2e8+fP4/PPP0dGRoYlLoAPTNO8XRQnPj7eFHV+O7i0w4rTqEbXdaVy4cMYhkHDqsjPhv3Rj34kKtcq0zR72smqkgeRy21dE9kUkda/nay8fHZy2ckiise2VS5+UG2VL0e+rlkZwvFMgHNb5RHJ5wY3/Y5rUyG3Vbt8ePmcno0vdxVdFcoIvARAX03TkjVN6wRgMoDtIaRngX9YwzDoPxmyez6fD/X19WhoaMCYMWPQtWtXJRnY9Pi8Rb+dcDNyB+BKVh4iD/s/L6PdPad0dV2n5crFrQlKYJu8SH7sNf5/8rfKc6iUqcoUXlb/Km1VFs7n86G6uhqGYeDy5cuOMgDfjdLY9Jxk4+8Fo7zHjBljuafaVp3SD6Y9quYRqbYaDniZVXRE0CNwANA0bQyAAvhXeF8xTfNJh/COmdkJnJCQgJycHEe5mpqasHbtWjQ1NdFrZ86cQXl5OUzTRK9evVBRUQEojMCCHQW6QaWxBjOqCUUeVmHajdLYcm213QY1qpXJLyob/tluvfVWAMDw4cMB+Ot/zJgxlnBu6z8+Pt4UmT4iUf+A9TkHDRqE8vJy1NfXs0GUy9VmFBcQxjAMTJs2zbZfHTt2DNu3i8dmonJ1O1u0k8+JEGe9YW2rMt5++20kJCSgsLDQcr26uhpFRUWO6bc+o1DWDqpCijBNcyeAnaGk0VZ0794d3bt3p79bO7BHiLDlGkkTUqi0p/onsl7N5UloT+X6r0hICjwS8CO+Pn36AABuvvlmJCUlITMz0zF+TU0NXn/9dQDONnQZ7Kqx29GAbIQhMgs5pe1mtMJPjYMdLTottrB5tBWy6X16ejoeesjv+HT99dfT6zk5OaipqWkz+UJBNsNTqXvA31aHDBkSVH3MmjULkydPlnm+0DDffPMNiouLXafPo9omMzIycMMNNziGmzBhAmbPng0AaGlpUc4zkm23R48eGDx4MJUrPz8fycnJaGhoCJClpqYGe/bsQXNzc1B5XXUKnGfs2LEAgLvvvtsxrKhSwlFRiYmJSEjwe/Dk5eUBcG6IbjwXVBV+uCDPkpiYGHDvxIkTlpV6WeNv607B27x1XUdycjKOHz9uUdwEWWcOBrf14PP5kJyc7BiusbERFRUVlvrny1ZViYuQLVCz6WdkZNBBkSwfwzAwb948VFdXY/ny5ar25QBUXQMBYNGiRYiKihKmzZfJ1q1bAQDFxcXIz8+nYdoCNh/SryZPnozMzExMnz5duu5EniE2NhZbtmyRpulUplelAmcb3i9+8QsAwPLl3zm35OTkQNM06LqOgoICOkoPN0SGhQsX4tChQwCAHTt2oKysDIsXL6ayECZNmgRA7E6lmleoyGzHpEyjo6Mxc+ZMAMBdd90VEPb5559HVlYWXT+QPQuffjg7DK/A+PxI458zZw5ycnIC7pGXvgy3ClG2mMrKGR0djddeew2AfzFvw4YNwrT4cho7dqxtJ1fByV1UVHb/5//8HwBAbGysJcyFCxfQ0tKC6OhodO7c2ZJGWlpagHyheI8Ei92MZeXKlcjJyUFdXR1CWd8LhilTpgCAxUpg9wLWdT3ABi5ayLRrryEtYrolEht5jh49ShU4edCamhpMnToVgLgAmGtKi5j19fV49NFH6b2WlhZcuPCdC2lOTg6Kiorwhz/8AQDwve99z9UzqCw02S0MuRmllZWVYcCAAdJ86+vr0dzcjIEDBwLwj4RUZGWI2MIQqyz69euHO++8Ex07drSEKS0txXvvvacqb1CLmCKl9dVXX+HBBx8EAHTs2JEqRjtKS0uxdOlSx3BOsjotZItmTA888AAAYPTo0TQM8N2Md/To0TQMgUz3idKRjRRlbVVWpiKmTZuGiRMnOoaTvThycnIs3jl8eNX+7yb/uXPnAlCzFrAUFRVh6tSpTjOb8C9iRgpVhXTkyBHL6JsUwOLFi5GVlUWn0aGODlavXk1H4AAQFRWF+Ph4+tswDMTHx1ObV1VVlav0ic/v0qVLhXZyO8i0lLcRi7xHAOBXv/qV5WXEQxRPSUkJAL9C37t3r5vHscWukYpcBUUj8H79+mHQoEEBypufQodi0rFTNPwz5OfnY//+/ZY2ESzByE1s4ICaB9HRo0cDFDfPrl276CDl8ccfB+CfWaSlpWH+/PkRX6xcvXq1rQnFCU3TAuKJfrvFLm/S/59//vmAsMXFxZZ+9Oc//xlHjx6luiNY2lSBk4VBkbJhsStktlBkdsaysrKwyEo6xdq1a5GSkgLA37CWLFkijENsYOR/t5AGy47wnDp0bW2trU2aj7969eqAGYsI8gxJSUnSMG5fNjK5ZM8pMqXEx8fjmWeeEeZFFi2d1hbcIkvHMAwcOXIEe/fuRY8ePZTTInFFhGLvlqXBlsfixYtx5513Wu4ZhoH6+nrMmjXLcu3UqVOWdKKiopCeno7i4mJpHuFaB1m+fLlwEXP27NkWd7zp06fTv0nezc3NiI2NVXLRCwaZPb537970+okTJ9Chg1+9FhYW4uDBgzh9+jTGjRsHwG85IPFV8pHR5iNwtjHJ7okUz4gRI3D69Gl677HHHrO8oQmbNm0Ku8xlZWX0pZCRkYGUlBTcdNNNYV2003UdhYWF2LJlS0ieHnYvw+uvv144YykuLsbs2bOxcOFC4cKmU15uZjj8YmQ4Wb16NaqrqwPyChbSBmVlOnjwYOzfvx89evQIeCGRdjpgwAD8/ve/p4rn0qVLNJ3ExEQMHjwYBw8edJTBrdysnCx33HFHwPWqqirk5eVZzILAdzbdNWvWYOvWrTReZmYmBgwYgBkzZgTdVp3aDGsGYxk9erRl0a+oqChgRtmxY0fouo6EhASUlpYG3Q5EZW/3m4z6Af/AilgAZsyYga1bt2LAgAEB63V2L14VrgoTishwTyAP/Oijj1pshuzWYfLgmzdvxquvvuo6PzccO3YMBQUFWLhwIfr160evk9EL4FckPOx9AEhJSaFv41BR7eT33nuv8PpLL72EF154gU6rWUaPHu1YprIZgCoqsvfp0wfjxo1Dly5dAu6RvLdt2ya8HkyePp9PuFjHxnvwwQct0182v6FDhwLwbzKqrq4O2MSh67pSWw0G2bPl5OQIBz1z5syBYRiorKy0PMNHH30EwO+qx0PMBW29iMk/2/z5812nodJf3HjMiEhISLAo66ws68GHJH+y6TBYvC/yeHh4eLRTrooRuB28l8yECRMwcuRIvPDCC9QGfuzYMQDfLbw5EerUuqqqiq44E1ivA/4euW8YBh555BEA/iniuHHjQh658IuYdohmBgDws5/9TDj6BvwzB5W0QylTlfRvvvlmob9yWVkZnaquWbNGKb9QzVI8ojr8xz/+gczMTNx7773Izs4Wxlu3bh0OHjwYsCYUrJxOHDlyRLj+QJC1RWILZ+MNGjSIyq4qa6gncrJ5REVF4de//jUGDx4cEK65uRkTJkzAm2++6ZiOE6I6kbVXVlclJiaioKDANs+amhoYhoFdu3Ypy8NzxRS4yrRb13WUl5fT39HR0diyZQsmTZqE++67DwCwatUq/PGPfwQAHD58OKLyymRUhfVRvueeeyzp6roeYFN0I5tKvFmzZuGVV14JuD558mSLLKL0I4Gbzs92DtKBTpw4gaeffhp1dXXCNGVlovLCCFXZkJcNKVue6dOnW7wS7BZK3cI/d1RUFNLS0hAdHU3TZMPY7cIE/IONpUuXUm8pAFiyZAkdjLDy25ke2N3Nbr1K2PAbNmyQHpzV2NiISZMm4f7778eFCxeECle1/kVtSLaGl5iYiM8++8zxOQg1NTXYvXu3Zc3OyblDRJsqcFHlkd8yLwp2ZTc1NRU7duywhLnxxhstitttY4gEbKPh/z9x4gQA4Omnn7bEIS6E4fBCAOzd9di/V61aFVCmImT1w95XsSuKjj21i0s2k8ybN89y5saFCxfwq1/9SiqTbFQbDnRdR3x8vO0BUHzediM3PrzbchWlReJGRUWhZ8+eSEtLE7qr1dXV0cVKFpmTgZ0suq5j//790vv8Dl+V5yJhJkyYgC1bttC2ale/7EYukcJ1U56yemSJjY1Fbm4uevbsaUnfTsa6ujps2rTJdv2PTUNGm4/ARauudiMldgTOs3HjRvzP//yP607qdgQWDm8GwHkTDRtWNV+nbe8sq1evxq5du2i45uZmOnuR5bd69WrpYpvbciejGj6+3XPOmDEDwHe7XAn333+/bV7heEkH+xJgFzx5BUL+Hz16ND02lr3O/x2s3IS0tDSkpaUFjIzJUbBxcXEWuSO9GG0307Drl1u2bLHsbpXJ6tQu7GRgkQ02RBQWFlrKkc/DMAx88803eOONN+h9lbNPVOriqrCB8yNVFrJ6L9rSXV9fbzkyNpyorkLbvYTYa/n5+dSXnOXChQuorKykGyPcKh7+U00ieUSKBAj0LhCZH1paWsI2qwlmBE4OqiKbI9wiKw9VZM9+4cIFLF68GE899ZQ0TVGeTiOuYLAz9/CbzgDgd7/7Hd55552Q8+VxUox8nxK9tGTrANdcc43QbHL48GGsX78egL8ceFfIYGGtBXYv102bNiEmJkZoVWDDr127NqjD1Zza6lWhwGWVN3PmTMu2VP5hZLutwjltVu3sTuFEm45OnDiBX/3qV5gwYQKam5uDHjXKFqX4+8uXL8eXX34Z4NIWDtzMapym4YSZM2dS9ys+Tm5uLkpLSy3X7EZxwbwY7WhpaUFZWRk0TRO65gF+H2ViAw/FPOaE2+8t8ht0WOxs0+y9J598EsePH6fno/D3RbhdV2CVp2ih784770RGRgZ1YmDj2A0Kw/ECJ4iUtyivEydOKJlj3NKmboQ+nw+6HvhVGnaqwULMDQQ2DuBfCBo+fLjlWrhtnk7pifLl45BNR4Zh/doIadBbtmyRjuDC+TxLly6l23edGvGCBQuwYMECupuMh30W1Q4hU4qyZ+Trn3j/xMTESI8riPT6Bo+u62hpaQn4N3r0aDplZmVyKtdg4BWjU52wG05UEKVVUlLium2yHlOhQPrFu+++i169egXIKfqfLROV/FVn4OR0UpaysjLMnTsX3bp1o3nl5eVh/vz52L17N3bv3q0sh1OYNl/EtJtqssKOGzdOeMrgmDFjsHPnd9+Q+Oyzz4R2dTbtSHZq0cuH/E3kLy8vD+hkx44do1M/p7TtYMtU5Y3OLjKtXLkSq1evRufOnQMWYD788ENXcriV1SmuqP6rq6tx7Ngxet6Jm7xZVGcLqh2eH+nZzQjZcmXDymZRqqYp0VqIYRj0UDKWwsJCYRnKRqyPPPII9u3bZ/FCCQbZDExUfvzg5bHHHsPQoUMDbPYvv/yyZQTuRKi6gJ/lsWVCNj69+OKLuOOOO1BQUIC4uDi61jF8+HBa/+HSSd5GHg8PD492iuMIXNO0XgD+BKA7ABNAoWmaz2ua9gSAWQDI4ROLWz+x5hrR2yghISHggCBd13HddddZrk2cOJFOR0UbVURpDxo0SHnTD8k3mDcm68hPOHbsGM6fP48VK1bQbx6mpaWhd+/eqKiowJdffkld51JTU13JCdgvlhmGgVGjRlmeZe7cuYiPj8e6dess4doS0ciVHFJkN2LnZ2582LS0tAAvpri4OJw5c8aVfCozG7v7svKUzRzdlr/IhGIn29/+9reANGQmyMbGRuzevRsNDQ144IEHMGrUKIwbNw6XL1/GX/7yF0tbVV3EdFrwF90vKyvDs88+i2XLllmeMVLHYcsWMe04efIkAODhhx/Ge++9h8bGRixatAg7duyg8hJHhnCd8KliQrkEYL5pmh9qmtYVwAeapv2l9V6+aZor3GQYqoIQfWmFPVNEttuQRUUpyhZcRJ4aoueJiYkRxt2/fz+++uor3HbbbfD5fLh06RL2799PT//r3bu3qw9UyD6nJWp4ooZI4o0cOTIsx6E64eRG5iYdnnvuuQd//etf6W+RC2pdXZ1S/cu8e9i8ZTLruv/DGTfffDP9HcmXop0JRURiYiI9tKyqqkr4HImJiVi1ahUAoGfPnnj55ZfR0NCAefPmwefz4Sc/+QnefPNN6gWm2qdEJiMeVhb261GpqamOeQwZMsTSr7gPQ7tCtjbHkpubS/9m5Zad80501aVLl6h7bijtw1GBm6Z5GsDp1r8vaJp2CID6kXUCRALzjY73WSZcc801GDduHP1KNv/mLiwspF/keP/995GUlCQ92cwOMlrg7XGylWTDMNC/f38A/k06ovC7du3CSy+9ZEm/Q4cOuPbaa3Hx4kXXMjoRTqURLiWksmIvG43pum77sYZwLvjaeUyIRst8+eTm5mLhwoWWeI2NjUF9rckJuxE4P+Ah7fHLL78EAFx33XXCDUlffvllwAav6Oho/Od//icqKyvphjS3sOVEvgpEvl87ZcoU1NXVYceOHcjOzsamTZvw97//HYD461GitAlkodBJhmDk5/MSQWYKfDzRQMAuPSc5XX2RR9O0JABvAxgA4FEAvwBQB+Af8I/SzzvEN2WjQP761q1bA9yztm3bhpMnT+Lhhx/GfffdJ/1sFQCMHz8ee/bswbBhw3D8+HEcOXJEFEzpixwqrj/Lly8P8JpgIZuOeL/1hoYGHDhwgMpZWVmJDh06ID4+Hv369UOnTp1gGPZf5CEyBtso161bB5/PR70TdF2nC0PsuS5OsraePWFbpqoN9dKlS0L3vDB/Li0sXw+SsWPHDktbOXDgADWrsYo81HKNj4832VkYqyyysrLQs2dPvPjii/QaAMdP5vGfVAP8Z6n85je/wdChQ3HkyJGg2yqRkbiBks8l1tTU4OGHHwYAZGdnQ9d1KgM5BkAkp8hUGY62SuS0mx3afT7PjpaWFowfP14pbGudhfZFHk3TrgXwOoC5pmnWaZr2MoBl8NvFlwF4DsB0QbzZAGYD8kpQGTkZhoGoqCj06tULTz31VMCnvli++eYb7N+/H/3790fHjh2RlJSEH/zgBwCAN954A7169aL2KpmsRC6nUR5x5HdCtOno0qVL+OCDDwLkfOONN+iGBBU5gdBG2/Hx8Y51IJMVACoqKixfLJLJyte/nZeH7NyWHTt20HMuwo2oXEOFfcapU6fi+PHj9Dd7HwD27NmDn/zkJ3SwYfeMfLmyM0F+BF5ZWQlN07BixQpqhiCKkVfSMj755BO88MIL6NevH6655pqAtipqp7ycBPLsnTp1AgBquuvatSueeuoptLS0ICkpydGkFxMTI/xoQ7jaKtmDYtc3Lly4gOnTpzt+/i3c7s0EpRG4pmkdARgA9pimuVJwPwmAYZqmfAiK70YKKmRkZOCJJ56gD04+xEuQFQj5niNpZDyconMcLTopcKdzRDZu3IiGhga6JVwG/6ZvaGhASUkJhg0b5jiqsbNt8+mL4EeKACwj8MuXL6OkpAQJCQmWs2kIDQ0NKC4uJj+VR4pOrF27lspFvjy/atUq7NmzR7oV+WoagXft2tVy0BZgNWl8/vnnGDZsmLRcDcNA165diSJXaquyNkuuLVmyRHiCnwhiprz99tuxcuVKREVFUTmDaats/zeM7845crPLluRZXFyMjRs34vTp05b74W6rKpDBpUxXsXITwjUCd1Tgmn9e/d8AzpmmOZe53qPVPg5N0+YByDBNU3z02ndxlE0ojOAAoLSwZ5omEhMTUVlZif79+9P47OJWWloaampqyG40x4/aOpkmli1bhtzcXOGiJvtxZVXY/G644Qa6ay6SJhT2gCDAP4IhnWrv3r144403kJycjBMnTqjko2RC4W2BTrL36dMH1dXVAQoxRNt82D5qK3uedevWCW3Mpmniv/7rv2CaJpqamqgpw+ZZgjJNsZC02b5ktyv3o48+gmmaKCgoQKdOnegaDwBcvHiRflzj+PHjqKmpwW233aaswAH/xy4A/+ieKD1ZfbKKG/DvcmW/vgT4y7S0tBSdOnWibVUmq0r/Fw02VAZLIl3Fl/OxY8ewfft2N203aAX+YwD7AZQBuNx6eTGA+wAMhN+E8gWAOUSh26SlNKoF2mxHneuvkvMsW7YsYLPEsWPHsG3bNjQ2Ntp+LksEOe8hJiYG6enp9MWjosCB4MqNn0XMnTvX1eYIDteKpq3dFhkiOgIH5Ar8nXfewZAhQ5Ceno7Kykq0tLQ4uY2GZbTolnPnzuHAgQPo2rUrXSNJTU3FqVOn6MuUtNUuXboor9ewpKenC2eo/FeuTp06hWeeeYZe49vNXXfdpSxra79yPQMXyS8agLKzoTA5AARnAzdN8x0AogoJyufbw55hw4ZdaRE82oAf//jH1Id56tSpQR101Baw28FZ2KN9rxa6desGILAPXY2yhgtXXighZ+ZiVBNu31nerc/OrgSoyzpixIgAW3tJSQk+/fRTRznc4NYGTlDJZ9q0aZbfIX6rUWll34krPQMLxYTCkp2dbbvIXVBQYHvuhkpbjeQI3A3M5holL5Rw58t7i7HXJO6eQc3AnbzSIuS+G5wJJZywhSIqbIKbyhXZnlX8zFtx3YFVKstpo4cMu+lWODuFm80UIr93UViVTiGz14fSmUX174KwKHDZoqGbQYjd2pDKYMNp8ZLNI1wKhk2f/VulrfLpsKi0zzChPNiQlaNT21VpB4rPf+UVeLjsik64aKghL2K2FW47hQwnZewUR4YbBR4KYa6PsI3AgfAoRpv0wuJfHwG5AghmsHGF+ljE10DCyFWhwKsB/LPNMnTm+6ZpJohuXGWythc5AU/WSNFeZG0vcgL/ArK2qQL38PDw8Agf3nGyHh4eHu0UT4F7eHh4tFM8Be7h4eHRTvEUuIeHh0c7xVPgHh4eHu0UT4F7eHh4tFM8Be7h4eHRTvEUuIeHh0c7xVPgHh4eHu0UT4F7eHh4tFM8Be7h4eHRTvEUuIeHh0c7xVPgHh4eHu0UT4F7eHh4tFM8Be7h4eHRTglJgWuaNkrTtApN045qmvbrcAkVCTxZI0N7kbW9yAl4snq4wDTNoP4BiAJwDEBvAJ0AfAwgLdj0IvnPk/XfW9b2Iqcnq/fP7b9QRuB3ADhqmuZx0zS/BbARQFYI6UUST9bI0F5kbS9yAp6sHi4IRYEnAjjJ/K5svXY14skaGdqLrO1FTsCT1cMFQX8TU9O0nwEYZZrmzNbfUwFkmKb5EBduNoDZrT9/qJK2z+cLuFZbW+sqPBsvJiYGzc3NiImJ4dP5ymQ+FMrKGhUV9cOWlhZXebF5+nw+S17ktyw+udfc3ExlJdcB65e+VcuUzZP/n03b7tlEz+LARdM0o93KKpOD5CuSwa4sFVGqfzZvXja7ehVdt3seHi6MVFaEUK6RwKmtitqhar9i4/M46QE2TOtvS5myqH6VXtd1lWABGIahlDYJx5YpSygK/E4AT5imObL196LWjJbbxFEuFMMwLIVj98Cq4VhuuukmHD58+APTNG93I2uwFaYim67rOH/+PD7//HNkZGRY4sgqMD4+3hwyZAgNS+Rj8xJdk+XvBlZWJu0q0zR7isK7rf9gZVNtAwAiXv8uZHHCVlaRXLIydKs8ZG2Kr//WPiVXNkyZOpUj3//Za21VpioJyJ6DLbtwyCsr0w4hpFkCoK+mackAqgBMBnB/COlZCKbDuonTvXt3HD582LVcooYVTBhZPACIiYlBQ0MDunTp4hiHH4mIGotTRwxWbp/Ph/r6ej7PGuUEJIjqUyZbGDuzkFBe2Hx80aCEHawYhoExY8Zg586dAGD5WwVRGYnkV1Eq7H27NsX+rdKfVBWaSHm7gc/HzcsrHIjy5wdY4VDuQStw0zQvaZr2EIA98K9Gv2Ka5mchSWNDMB2JjXPmzBmUl5fDNE306tULb731lm1cn88HMrJlMQwjoOPJ8g22choaGlBcXBxUXDtkHdFuBmNX7tdccw369++P8vJy1NfXk8sXwyEr4dZbbwUAHDp0CPPmzUNTUxPWrl2LpqamcGYTEdy8TMn/rMLeuXMnUlNTUVFREVT+dnUXzIu7rZSfXT6ieyJl/8ADD+DQoUMAgGPHjgWEDeezqMjL/8//LZt1OskZyggcpmnuBKA+RLiCdO/eHd27d6e/g+0UHlZIubZV5/53o2/fvl5b9ZDyL7UTkx9JZGZmYsKECZZ/xLasOuIQvRVJXJU0nMKEOkVXWfxRyY+UHf9PFI79P1KQvIcNG4Zhw4ahoKAADQ0NqKurw5tvvinNP9TyFKVF2kEw03i7thJOWd2kZzfSs3vOcLXlcD83i8/nw5///GfU1dWhoKAABQUF6N27d1D1p4IszSlTpiAqKirgn106fN9TmRkFvYgZDOFexEpOTobP50N1dTUAoKqqynJ/x44dAXFaWlowfvx4AIBhGEEtYkVHRyM1NRV5eXnIzc0FAOTl5Tk9lhAVezpgswqtWKZsXqE2ZAf7ZFgXhubOnQsAaGxstIQZO3ZsOGyIjguDdsqMNaXNmTMHq1atCkUWS9osrTJIZY2Pjzdra2sjqhhVMQzDtq06KaZgXpIsAwcOxLJlyyzpPPLII0hLS7Pk0SpHyG1VJhORg+RHWLlyJQBg7969lvBOZRKJRcyIw3cg9iG7du2K3//+9wD8iz0AsGvXLhpPxqxZs5QaicxVicTdsWOH5QUhelmIZCHxR48eTV8kdoSipHglw+bPs27dOsTHxwvvlZWVYfHixfS3aJExnN4BwS4ChxOnmQ1fnp9//rltGzAMA926dUNBQQEA4LXXXrPcI38HA2mnV0O52eHz+Wy9S4JBlp4dkSojkm7Hjh0RFxcnlI04JmRnZ2PTpk0AAr3H7NakeK5qBS4y+JMH3LBhA7Zs2YKOHTtixowZAID7778fn3zyCV28kBFmVyRXkMoR+ZhfKdatW2ersNLS0vDCCy/g4Ycfptd45X2liHRdhssbgqQTFxcHwD+jcFLcbvISvaRFL3C7/Jzg0wqmbFSVU3Z2NnRdxx/+8AcAwMGDB4V5qlBYWIh77rmHxg93e+FlGjlyJObMmWO5zy9S19XVsQv/wrAq8rapAieeHaqrraLry5Yto2aLCRMmAPiuIXTu3BlxcXHo2rUrNmzYgNdeew3R0f49JUVFRQBA33pOOG10GDt2rOV3dnY2cnJyLLK7JZhVaDYvu06q6zry8/Olo0RCcXExbr/9dqpoDMNAVlYWLT/V/CJNbm4uSktLgyozlTChjmplLzjSHvnr4UI26Fm4cCGt02Dp168fnn32WWl+qvI5maY2bdpEN7LdeeedAL4zpYnS4dcqRG08UrNFUR126NCByiMLGxsba7knk+eqG4E7LZDI3GwIAwcOtIwAzp49izVr1gAAZs6ciaSkJFRWVmLLli2WznL27NmA/NyOcIhMolFI7969LXm88soriImJweTJk5Xz4Al2tCeSG/CvGdhRXFyMjRs34vvf/z7t7Gz8nJwcqsj5jnOlUFkvCeeLRhY/PT3dUv/FxcXIzMx0nX4wZcmaJmTxb775ZqmJTJWamhr6N78465Q/Dz8ST05ORn5+PgDYLvbJ8gf8g7tz585Zwj355JPUcYHNL5JomtBcbYtstuREmypwle27dh0sOzvbEmb8+PGoqqrCsGHDaJj4+PiANDZv3mxZNFBBZgOXNdyysjKUlZXRcImJicjNzcXrr7+Ozp070+v79u0DADz33HOu5LGTUyRfMOzduxenT58OuE4aFKu82f8jacaQdYYjR44gPT0dZWVlrl4kbuR0M/rm63/gwIFUgbN59uvXz1EuNzZQPo5I3nDWTU5OjkWR8/kHw6JFi/DrX/9aSXGLGD58OABg/vz5AeV49uxZbNmyJSBOuNpsqPXGy8T+r5LGFbOBOxUgfz8rKws5OTmWQiosLMQzzzxDK1DE9u3b8eqrr7rKG7C+bOwaJ3uPbDoZPnw4brjhBpSWllqUNwB8+OGHtvmGG5VFEV3XUVlZiZ/+9KcYMWIE+vTpYxtelEc4UVEGOTk5OHv2rEVp2qUXyZfNuHHjLGV29913wzAMy8hv+/btOHr0KPbv3w8AGDVqFAC1BTeVtmr3ssnJyUFOTg7q6uqkC+erV6/GrFmzsHr1agDAhg0baHsOF7IBXEpKisW8wz7L22+/7ZiuTM7Jkyejd+/elmttNVsMx7qDiiL/l/ID9/Dw8Ph34oouYrp5Kw0YMCAg/PXXXx8w+q6pqbFsQyejb9Gint2bTbaVXkZ6ejoeeughKhef9sGDB3Hq1Cl89pn6aQNuR4tOCyEDBw7EzJkzLaPFmpoa1NXVYcGCBRgwYAByc3MD0qmpqQnLSFEVdgSmsk8hGLOIUxxiQrNbXNd1HZmZmZg1a5bl/rRp0/Dqq69i8+bN9Bo/C3RaC3KDU1ttbm5GXV0dcnNzAxbPCHPnzkVsbCxdLKyvr7eMbGtqajBu3LiQ6pg3S5J2uHr1alx//fWWsLm5uXjyyScD0uAXLgFxG9F1He+99x5KS0uF8cO5iMmbFHft2mV5HlGdvv/++0rpX3WLmG7QdR0JCf7THu0WhNiNPIcPH8b69ettbYKAumlA1GBEzJgxg1aayA0oMzMTe/fupZ4z/KajYHGaPrN8+umn2LZtGx599FF67cyZM/D5fMjNzbUs+vJlumnTJum0MFLTUsMwArwPWEjHdSOHGwUpS5ftsPwCGQC6WMgr7WBkU3kmp7Wl+vp6TJkyRVmO5ORk5OXlWcwXOTk51PNK1t7clj/xFnn++ectYSoqKoTKm82DyJCYmIjExO+OIGfTZzd/RaKNytoS/0JhB6zEhPbMM88ENZDluaoVeHR0NGbOnAkAuOuuuyz3DMNAfX09srOzqavRwYMHpQ2L74xOozB2ZxtvQ+ZXjO0UCLnW0NCAZcuWobKyEoB/4aalpQUXLlxQKAl73Ix+GxsbacMmO0pZtyvDMNDY2IjCwkIA35WpzC0ukojytCtj2X0+rKr8KmVaV1cXcC0zMxMJCQmWzU/BoFqnbo9T4PNgy4NskGPzbm5uxmOPPUZ/i7yw3ORH4O3ThAULFiint3DhQsuMkjB16lSh1wwvQ6i4SatPnz50Ryg/E5QNFp369hVxI1St9N/85jcYMGCAMA3A/6abOnUqevXqBQD43ve+B6BtFAzfIMgOOyd69uwJwL95Bgj0Jw8Gp8VglujoaDpi4V2uSPgDBw4ElKksPdm1UHCbXrCubG7Slcn37LPP4vTp01i+fDmtW8C/+YnU8dSpU21dG/k0nfLnUTlS2CkvwoYNGwKu7dmzh76oZLMR8pJXGRTJzFIE4uPPpy/6PWvWLKWTO3k5Rc8STpxmb+zfsv9V+sFVOwLPz8/H/v378cUXXwjvZ2dnY8KECcquifzIO5zeFaruPyLzw44dO6irnKwTO9nq3XxdhR2Bi0hJScHQoUNtw1wtNDQ0WHzT+ZmRDLcd18kLqaamxnK8LcmfmFJWrFiBBQsW2KYTik3czpVU5UUhw2nzm8j+q4qu6ygvLxfKGR8fT01TvImK/52YmChcs+E3njm9QIOFTZds5CPuzuR+sHmpxGtTBc6+gZ0a0lNPPYWJEydK7zc0NGD9+vWWa6rufm5RSXft2rVISUmx3Js9e7bl9/Tp06lpQpVgK1/lefnGNXfuXBw7dkx4fnIkZFRJl0yPu3fvHjAbS05Ohq7r9HyJNWvWKL9Mww1b/7Nnz0ZSUhKdlaWmpmLJkiU4deoU3njjjbDn7XYEzkLKKTY2NqBt8pvfeEIpW8MwpCaU1NTUoNOV5RUJ+JEyP1ttC67YaYQyO5ph+D/N9Mtf/lJo2yJhJ0+eTEcGbpQzN0KzPeFNtu3fTV4sK1asoCv7AwYMwPvvv4+amhpMnTrVkr6owal+Uk1FTnJWDGv20XUddXV1ePrpp7F8+fIAs4QTTN5hOeGNz7NPnz745S9/iZtuuimg3ZAXjt2Cp2hh2U5WcsIfF14qnyiv/v3747e//a1lcVh0Gp0ijm2Vz59HJjPZdEZMQIZhYPPmzbaLsHYjS7cnZ2ZlZdG1rnDA9ykbwtpW582bB8A/uOTvnz17Fi+++CI++ugjAO5fKlf9aYTkgXr27Im5c+dSOy154KVLl1rCb9y4ET/96U/pQVaqqCpgmQuZk03OLq99+/bR0ZLIti/znHFj7lFRLPyLkcSJi4ujo0jZwqAs/XDantk8WSW9fv16ekSnWyK1LmL3gvvss8+wcuVKzJ49m3pT2RGKF4ob0xE/eEpISLDY70Wb35zkVZFTdhbStm3bLIvBZDORCOKyOXXqVGGZVldXu57ltgWGYeCjjz6S1gEJw+NUp95GHg8PD492yhUbgctGeMuXL6eLP2VlZbj55psB+A+Hmj59uiUNYmPm32rhGA3KRguszOyIWdXUQOz2d9xxR0jyyWRRmeqzZjP+XAsiV1u5CspQNdvouk5nFBkZGXjvvffCKoNswdvNzGvdunW477770KNHD8ewojxUkbV9u/5Ano9dHHzllVdc5euUhwrEpKTruq0pbO/evdB1HVVVVUhISAgov6NHj1o2ULUlKuZo3kVQdo/gVKaOClzTtF4A/gSgOwATQKFpms9rmvYEgFkAqluDLm79RqYUFY8J9kCbNWvWUL/JnTt3WhS4YRjCjRzXXXddWKbyrAmFb5yiSlBROImJidRjYfHixaipqcHly5fp/YqKCnz55Zf0/JTU1FTLdzxlcgLOni8y+b744gtMnTqVurwNGDAAf/3rX+n5yXfffTdKS0upl8WNN96I3r17B8gq8ocOFpn9vbGxEcuXL0daWprw4KOcnBwUFBTg66+/BuB35SsvL0ffvn1x5MgRAH4zkRtZw7EYetttt9EPiRDzQEpKSkBb5dtZXFwcUlNTUVJSopyX7GVjN8ggR7cC/jbY0tJCwzU2NirVf2pqqpISl617ieKK5M3KykJaWho9UpqlpaWF2qGd+mNbLW4Tgl1rdCpTlRH4JQDzTdP8UNO0rgA+0DTtL6338k3TXKEqDLsoxCP6FFJhYSFdaLOza7FxSOcNFfKyUXlLytA0DXFxcdRN8Msvv8Tjjz+Oc+fO4dy5c0hOTsaRI0ewcOFCGqd3796uD5NSwS4dvnFFR0dj0qRJAIDz588jLS2N7iArLy+3uH9F8uvwfFmTExzZrwexz7Vp0yZ8/fXXmDJlCtavX4/y8nIMGzYMb731Fg0zdOjQgHgi2IFGKC5+gH/xmigjsqP4nnvuQVNTE3w+Hy5duoTdu3cH5DN06NCIK5prrrnG4iZYU1NjeS5N02j9jxo1Cvv376e2Z7dt1a7/i66LwnXt2hVPPfWUML7oAwn/6jgqcNM0TwM43fr3BU3TDgFItI8lhj2zQfR25RvA7Nmzcfz4cQCwnKLWFm9PVnnLzD0sosYWFxeHwYMHU2VDvBG6deuGbt260U+Vde/eHUlJSdKNNZF+3rq6Oos/dVVVFU6ePAnguy+htDUiM4BTWUycOBEjR45ETU0NNVWxyluUrgzWt1qWr8rirmEYyMvLs5xjDwB//etfaZzdu3dL44ar/mWmNdKvZPl06dIFXbp0oeGvvfZaXLx4UZiHiqwqJicir8ooWtd1urvZzQ7OK4VKXbqZ9blaxNQ0LQnArQCIofEhTdM+0TTtFU3T/sMpvpP5hK2wuXPn4vjx44iLi0NcXBwaGhos9sjs7GyL+UGURlvCK/mMjAz0798fvXr1Qnx8POLj4y1HyxYXF2Px4sXIzMxEbW0tVfKHDh3CW2+9hY8//hjffvtt2OQjClBUPjt27EBtbS3y8vJgGAaio6PpvyuBTFmS61OnTkVFRUVAmI4dO9KybmuIfETG1NRUZGRkYNmyZUK/ZrYeMjMzqS87S69evcLWBvh6J0fMzpgxQ9kM2NDQYGmrX3zxRVjaqorpRAQJ09TUhKamJssIvK1NJCyh6CC3M37lRUxN064F8DqAuaZp1mma9jKAZfDbxZcBeA7AdEG82QDojhZSOXaCzZ07F8nJyRg6dCj69u0LIPBrMqyvZbiUNisr/+krWYMYMWIEkpKSLNeWLFmCgQMHBoS94447aDrdunXDqFGj8O6776J///7o2LEjkpKS8IMf/ACA3xb55ptvOsrpFsOwbqBISkqiH1gVHcwU6ggwFFlFEHmuueYa6UhFNkJ0Qlb/qnIRMjIyhKc6rlmzBoMHDwYAeo75pUuX8MEHH6B///7o0aMHmpqa0KlTJwCw3fTjtlx5pSjadLJx40acOHFCGJ+VU9RWDx06RGdtbuV0o7Tq6+uxatUqzJkzh5avzLe+LZS4k+4h9+fMmYPRo0c7pudWZqWNPJqmdQRgANhjmuZKwf0kAIZpmoHOzQz8hgMWYgMH/Jt07rzzTot9jS2oAwcOYM2aNfTEPJbLly/TD4cqYOvIL1v4YQt52bJlQmXNkpKSglOnTmHfvn3Ur/3y5csoKSlBQkKCcEdaQ0MDSkpK6GFXbjdH8Ih8dskGiqamJqxduxaTJk2i9k3y5Zi5c+cKZeUbWteuXYmsYdkcYSc/gf+QAuBXNIsWLUJzczP9Wk8osqpOZzt06EDNg6KzOfiNHIBaG2DSisimE/aUT/YAKPZ5x4wZ46ZPKW06C5W3336btlV257BLIl6mhFdeeQVbtmxxpaDZthf0Rh7NvwK3FsAhVnlrmtaj1T4OABMAfKosmYDq6mpaEaKjJMm9xsZGFBQUCM/qME0TH3/8cShiBBDM6F7XdUujevbZZy3HxxI5r732WkvHvXjxIp1K/+///i+raMIKeaZt27bhT3/6E95++2088MADQvllst5zzz1U1uPHj1sWNsMlnxPbt2+3/DZNE6WlpejUqRP69++P73//+0JZa2pqHMvV7XnwQ4YMocf0si/9yspK+oJkvx6k2gbCDZGNuOASJwH+/BAWN8rbDjdn9vDw6w11dXVBeRPxaYUT8nIm7Y7l6NGjyum4kVXFhPIjAFMBlGmaVtp6bTGA+zRNGwi/CeULAHOUJYwQ58+fD9s525GEyNm1a1d65nJqaipOnTpFG2VMTAzS09Nx6tSpKymqsqxXA165evy70eZnoURikVFlS6oERxOKE/n5+Xjqqacs23/ff/99qauTG1gvhFBNKAT+mdivgfN5A367raqsrUTkLJRgkS2ItcqrZEJx8jYxDAPXX3897r33Xsv1goICzJ49O2j3Ns4LJaymKTLqJx9TcHF+iCMqbVXmBx7Kmots/wBPuNsqn78dMr9/Oy8mwKZMr8RhVrxtWUUBR8idTvmAIDtUXCJDJVwKnEWlkYvWAUS/GcLWKUQNPJhytakbZRs4H9dumuumrbpQXCGXK5tHsApcxbUvHG3VrjxCfblHSoGL4Nuwk8LmuSoVuIhQOmgIBLWISXBQZEJkowSnuG2lwGX+wrL7AsKiaJwIUztxrcAjLI8dYX0xkjULosB1XaeLmKE+SyTaqhtUXjKthK2tyvpNuNrF1aLAqwH8s80ydOb7pmkKj4m7ymRtL3ICnqyRor3I2l7kBGxkbS+0qQL38PDw8Agf3nGyHh4eHu0UT4F7eHh4tFM8Be7h4eHRTvEUuIeHh0c7xVPgHh4eHu0UT4F7eHh4tFM8Be7h4eHRTvEUuIeHh0c7xVPgHh4eHu2U/x/Ee26/nXOIMwAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "# attacking\n", - "# get adv data\n", - "attack = FastGradientSignMethod(net, eps=0.3, loss_fn=loss)\n", - "adv_data = attack.batch_generate(test_inputs, test_labels)\n", - "\n", - "count = 1\n", - "%matplotlib inline\n", - "for i in adv_data[:32]:\n", - " plt.subplot(4, 8, count)\n", - " plt.imshow(np.squeeze(i), cmap='gray', interpolation='nearest')\n", - " plt.xticks([])\n", - " count += 1\n", - "\n", - "plt.axis(\"off\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "earlier-amplifier", - "metadata": {}, - "source": [ - "受到攻击后,图片出现了很多的类似水印的背景,但是在视觉上还是能明显地分辨出来图片是什么,但是对于模型来说,可能不一定。\n", - "\n", - "接下来,验证模型在攻击后的图片分类能力。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "greater-prophet", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[INFO] MA(22790:140157538080576,MainProcess):2021-04-07 15:35:17,892 [:6] [demo] prediction accuracy after attacking is : 0.045072115384615384\n", - "[INFO] MA(22790:140157538080576,MainProcess):2021-04-07 15:35:17,926 [:12] [demo] mis-classification rate of adversaries is : 0.9549278846153846\n", - "[INFO] MA(22790:140157538080576,MainProcess):2021-04-07 15:35:17,926 [:14] [demo] The average confidence of adversarial class is : 0.70117253\n", - "[INFO] MA(22790:140157538080576,MainProcess):2021-04-07 15:35:17,927 [:16] [demo] The average confidence of true class is : 0.04269705\n", - "[INFO] MA(22790:140157538080576,MainProcess):2021-04-07 15:35:18,667 [:19] [demo] The average distance (l0, l2, linf) between original samples and adversarial samples are: (2.2191764481915106, 0.5356972896720278, 0.3000000376345752)\n", - "[INFO] MA(22790:140157538080576,MainProcess):2021-04-07 15:35:20,650 [:22] [demo] The average structural similarity between original samples and adversarial samples are: 0.5092086321477988\n" - ] - } - ], - "source": [ - "# get accuracy of adv data on original model\n", - "adv_logits = net(Tensor(adv_data)).asnumpy()\n", - "adv_proba = softmax(adv_logits, axis=1)\n", - "tmp = np.argmax(adv_proba, axis=1) == np.argmax(test_labels, axis=1)\n", - "accuracy_adv = np.mean(tmp)\n", - "LOGGER.info(TAG, 'prediction accuracy after attacking is : %s', accuracy_adv)\n", - "attack_evaluate = AttackEvaluate(test_inputs.transpose(0, 2, 3, 1),\n", - " test_labels,\n", - " adv_data.transpose(0, 2, 3, 1),\n", - " adv_proba)\n", - "LOGGER.info(TAG, 'mis-classification rate of adversaries is : %s',\n", - " attack_evaluate.mis_classification_rate())\n", - "LOGGER.info(TAG, 'The average confidence of adversarial class is : %s',\n", - " attack_evaluate.avg_conf_adv_class())\n", - "LOGGER.info(TAG, 'The average confidence of true class is : %s',\n", - " attack_evaluate.avg_conf_true_class())\n", - "LOGGER.info(TAG, 'The average distance (l0, l2, linf) between original '\n", - " 'samples and adversarial samples are: %s',\n", - " attack_evaluate.avg_lp_distance())\n", - "LOGGER.info(TAG, 'The average structural similarity between original '\n", - " 'samples and adversarial samples are: %s',\n", - " attack_evaluate.avg_ssim())" - ] - }, - { - "cell_type": "markdown", - "id": "stainless-blogger", - "metadata": {}, - "source": [ - "对模型进行FGSM无目标攻击后:\n", - "\n", - "- 模型精度由97%以上降到不足10%;\n", - "\n", - "- 误分类率超过90%,成功攻击的对抗样本的预测类别的平均置信度(ACAC)为 0.70117253;\n", - "\n", - "- 成功攻击的对抗样本的真实类别的平均置信度(ACTC)为 0.04269705;\n", - "\n", - "- 同时给出了生成的对抗样本与原始样本的零范数距离、二范数距离和无穷范数距离,平均每个对抗样本与原始样本间的结构相似性为0.5092086;\n", - "\n", - "- 平均每生成一张对抗样本所需时间为0.003125s。\n", - "\n", - "FGSM无目标攻击后生成的对抗样本。从视觉角度而言,几乎没有明显变化,但是均成功误导了模型,使模型将其误分类为其他非正确类别。" - ] - }, - { - "cell_type": "markdown", - "id": "private-shoulder", - "metadata": {}, - "source": [ - "## 对抗性防御\n", - "\n", - "NaturalAdversarialDefense(NAD)是一种简单有效的对抗样本防御方法,使用对抗训练的方式,在模型训练的过程中构建对抗样本,并将对抗样本与原始样本混合,一起训练模型。随着训练次数的增加,模型在训练的过程中提升对于对抗样本的鲁棒性。NAD算法使用FGSM作为攻击算法,构建对抗样本。\n", - "\n", - "### 防御实现\n", - "\n", - "调用MindArmour提供的NAD防御接口(NaturalAdversarialDefense)。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "animal-resident", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[INFO] MA(22790:140157538080576,MainProcess):2021-04-07 15:35:31,896 [:15] [demo] accuracy of TEST data on defensed model is : 0.9793669871794872\n", - "[INFO] MA(22790:140157538080576,MainProcess):2021-04-07 15:35:32,001 [:29] [demo] accuracy of adv data on defensed model is : 0.7190504807692307\n", - "[INFO] MA(22790:140157538080576,MainProcess):2021-04-07 15:35:32,002 [:31] [demo] defense mis-classification rate of adversaries is : 0.2809495192307692\n", - "[INFO] MA(22790:140157538080576,MainProcess):2021-04-07 15:35:32,003 [:33] [demo] The average confidence of adversarial class is : 0.5715536\n", - "[INFO] MA(22790:140157538080576,MainProcess):2021-04-07 15:35:32,004 [:35] [demo] The average confidence of true class is : 0.16227658\n" - ] - } - ], - "source": [ - "from mindarmour.adv_robustness.defenses import NaturalAdversarialDefense\n", - "\n", - "# defense\n", - "net.set_train()\n", - "nad = NaturalAdversarialDefense(net, loss_fn=loss, optimizer=opt,\n", - " bounds=(0.0, 1.0), eps=0.3)\n", - "nad.batch_defense(test_inputs, test_labels, batch_size=32, epochs=10)\n", - "\n", - "# get accuracy of test data on defensed model\n", - "net.set_train(False)\n", - "test_logits = net(Tensor(test_inputs)).asnumpy()\n", - "\n", - "tmp = np.argmax(test_logits, axis=1) == np.argmax(test_labels, axis=1)\n", - "accuracy = np.mean(tmp)\n", - "LOGGER.info(TAG, 'accuracy of TEST data on defensed model is : %s', accuracy)\n", - "\n", - "# get accuracy of adv data on defensed model\n", - "adv_logits = net(Tensor(adv_data)).asnumpy()\n", - "adv_proba = softmax(adv_logits, axis=1)\n", - "tmp = np.argmax(adv_proba, axis=1) == np.argmax(test_labels, axis=1)\n", - "accuracy_adv = np.mean(tmp)\n", - "\n", - "attack_evaluate = AttackEvaluate(test_inputs.transpose(0, 2, 3, 1),\n", - " test_labels,\n", - " adv_data.transpose(0, 2, 3, 1),\n", - " adv_proba)\n", - "\n", - "LOGGER.info(TAG, 'accuracy of adv data on defensed model is : %s',\n", - " np.mean(accuracy_adv))\n", - "LOGGER.info(TAG, 'defense mis-classification rate of adversaries is : %s',\n", - " attack_evaluate.mis_classification_rate())\n", - "LOGGER.info(TAG, 'The average confidence of adversarial class is : %s',\n", - " attack_evaluate.avg_conf_adv_class())\n", - "LOGGER.info(TAG, 'The average confidence of true class is : %s',\n", - " attack_evaluate.avg_conf_true_class())" - ] - }, - { - "cell_type": "markdown", - "id": "sonic-violence", - "metadata": {}, - "source": [ - "### 防御效果\n", - "\n", - "使用NAD进行对抗样本防御后,模型对于对抗样本的误分类率从90%以上降至不足30%,模型有效地防御了对抗样本。同时,模型对于原来测试数据集的分类精度达97%。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/tutorials/notebook/mindspore_linear_regression.ipynb b/tutorials/notebook/mindspore_linear_regression.ipynb deleted file mode 100644 index a6ed73f44464cf41a3d59ad8d0ebefff88efa4ef..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_linear_regression.ipynb +++ /dev/null @@ -1,640 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用MindSpore实现简单线性函数拟合" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "回归问题算法通常是利用一系列属性来预测一个值,预测的值是连续的。例如给出一套房子的一些特征数据,如面积、卧室数等等来预测房价,利用最近一周的气温变化和卫星云图来预测未来的气温情况等。如果一套房子实际价格为500万元,通过回归分析的预测值为499万元,则认为这是一个比较好的回归分析。在机器学习问题中,常见的回归分析有线性回归、多项式回归、逻辑回归等。本例子介绍线性回归算法,并通过MindSpore进行线性回归AI训练体验。\n", - "\n", - "整体流程如下:\n", - "\n", - "1. 生成数据集\n", - "2. 定义训练网络\n", - "3. 定义前向传播网络与反向传播网络并关联\n", - "4. 拟合过程可视化准备\n", - "5. 执行训练\n", - "\n", - "> 本文档适用于CPU、GPU和Ascend环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 环境准备\n", - "\n", - "设置MindSpore运行配置" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:52.617310Z", - "start_time": "2021-01-04T07:04:51.919345Z" - } - }, - "outputs": [], - "source": [ - "from mindspore import context\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"CPU\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`GRAPH_MODE`:图模式。\n", - "\n", - "`device_target`:设置MindSpore的训练硬件为CPU。\n", - "\n", - "> 本教程代码依赖`matplotlib`第三方支持包,可使用命令`pip install matplotlib`安装。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 生成数据集\n", - "\n", - "### 定义数据集生成函数\n", - "\n", - "`get_data`用于生成训练数据集和测试数据集。由于拟合的是线性数据,假定要拟合的目标函数为:$f(x)=2x+3$,那么我们需要的训练数据集应随机分布于函数周边,这里采用了$f(x)=2x+3+noise$的方式生成,其中`noise`为遵循标准正态分布规律的随机数值。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:52.623357Z", - "start_time": "2021-01-04T07:04:52.618320Z" - } - }, - "outputs": [], - "source": [ - "import numpy as np\n", - "\n", - "def get_data(num, w=2.0, b=3.0):\n", - " for _ in range(num):\n", - " x = np.random.uniform(-10.0, 10.0)\n", - " noise = np.random.normal(0, 1)\n", - " y = x * w + b + noise\n", - " yield np.array([x]).astype(np.float32), np.array([y]).astype(np.float32)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "使用`get_data`生成50组测试数据,并可视化。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:52.988318Z", - "start_time": "2021-01-04T07:04:52.624363Z" - } - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAEICAYAAAC6fYRZAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3de3yU5Zn/8c/FwRNYxSVBFBE8tFu1Fdsg1EOrQpWqLZWtVi3Iri7xUFrRdn/i4VdPuxC79dDtzxXSFnRbqIcqeAIFFKwBFYO1cgj+RAUMYBJqBTyFhrn2j+eZMElmkgwzTyYz+b5fr7wyz2HmvnkyXLlzP9dct7k7IiJSmLrlugMiIhIdBXkRkQKmIC8iUsAU5EVECpiCvIhIAVOQFxEpYAryIs2Y2RIz+9d2nnuamVVH3SeRPaUgL3nLzNab2adm9lHC1//Ldb9SMbN/NrOKXPdDupYeue6ASIa+7e6Lct0Jkc5KI3kpOGa2t5l9aGbHJewrCkf9xWbWx8yeMrM6M/tb+HhAO197XzO7P3zeGmBos+OTzextM9thZmvM7Lxw/xeBacDXwr84Pgz3n2Nmfzaz7Wb2npndkq3rIAIK8lKA3L0eeAy4KGH3BcAL7l5L8L6fCRwODAQ+Bdo7zXMzcGT4dRYwvtnxt4FTgQOAW4Hfm1l/d68CrgBecvfe7n5geP7HwCXAgcA5wJVm9t00/rkirVKQl3w3Nxy1x78mhPtn0zTIXxzuw93/6u6Puvsn7r4D+A/gG+1s7wLgP9z9A3d/D/ivxIPu/oi7b3b3mLs/BLwFnJjqxdx9ibuvDM9/A/hDGn0RaZPm5CXffTfFnPzzwL5mNgx4HxgCzAEws/2Au4FRQJ/w/P3NrLu772qjvUOA9xK2NyQeNLNLgGuBQeGu3kDfVC8W9q8MOA7YC9gbeKSNPoi0m0byUpDcPQY8TDCavxh4Khy1A/wE+AIwzN0/B3w93G/teOktwGEJ2wPjD8zscODXwETgH8IpmVUJr5us5Ots4AngMHc/gGDevj39EGkXBXkpZLOB7wM/CB/H7U8wD/+hmR1EMM/eXg8D14c3bwcAP0o41osgkNcBmNm/EIzQ42qAAWa2V7O+fODun5nZiQS/kESyRkFe8t2TzfLk58QPuPsrBDc2DwHmJzznHmBfYCvwMvBMGu3dSjBF8y6wAPhdQntrgDuBlwgC+peApQnPfR5YDbxvZlvDfVcBt5nZDuBnBL9ERLLGtGiIiEjh0kheRKSAKciLiBQwBXkRkQKmIC8iUsA61Yeh+vbt64MGDcp1N0RE8sqKFSu2untRsmMZB3kzOwz4H+BgIAaUu/svw0JLEwhzhoEb3H1ea681aNAgKisrM+2SiEiXYmYbUh3Lxki+AfiJu79mZvsDK8xsYXjsbnf/RRbaEBGRPZBxkHf3LQQf9cbdd5hZFXBopq8rIiKZy+qNVzMbBJwAvBLummhmb5jZDDPrk/KJIiISiawFeTPrDTwKTHL37cB9BDW3hxCM9O9M8bxSM6s0s8q6urpkp4iIyB7KSpA3s54EAX6Wuz8G4O417r4rrAb4a1LU1Hb3cncvcfeSoqKkN4dFRGQPZRzkzcyA3wJV7n5Xwv7+CaedR1ByVUREOlA2smtOBsYBK83s9XDfDcBFZjaEoPTqeuDyLLQlIpLfYjGoq4PiYrDolw7IRnZNBckXOWg1J15EpMuIxaCmBtzhwgvhpZfgpJNg8WLoFm3hgU71iVcRkYITi8Hpp8OLLwZBPm7ZMqithYMPjrR51a4REYlSXV0Q0Juv3dHQABdcEPwSiJCCvIhIlIqLg6mZ+Pz7sGG7p2heein4JRAhBXkRkSiZBXPvmzbBli3BqP6UU6BHDzjpJBr6HsQNz93AypqVkTSvOXkRkah16wb9E7LKFy+Gujoe2foCF/x7sK77xzs/5pff+mXWm1aQFxHpYNt27uDAabtvuI48YiT3jLonkrY0XSMi0oGmvDiFA+84sHF7zVVrWDhuIRZRzrxG8iIiHWDDhxsY9MtBjdvXDr+WO89KWtIrqzSSFxGJkLvzg8d+0CTA1744nDu/+Z8d0r5G8iIiEXml+hWG/3Z443b5092Y8GoMelQGqZP9+kXeB43kRUTSlVimIImGWAPH/vexjQG+uFcxn97wCRP23Z06SXFxh3RVQV5EJB3xMgUDBsBpp7X4xOojqx+h5+09WVO3BoCF4xZS89Ma9um5b5A6WV0NS5Z0SHEy0HSNiEh64mUKGhqC7+G0y/b67RxQdkDjaSOPGMmCsQuaZs1069YhUzSJNJIXka6tjamXFuJlChKmXaa8OKVJgE+ZFpluW1mgkbyIdF3xqZdly9pf+jdepqCujg17fcqg23af32pa5J60lQUK8iLSdaWYemmLmzF26bXMXjm7cV/tT2sp6tXKEqZ72FamsrH832FmttjMqsxstZldHe4/yMwWmtlb4fc+mXdXRCSLkky9tGX5puV0u61bY4AvP7ccv9lbD/B72FY2mGc4NxSu5drf3V8zs/2BFcB3gX8GPnD3MjObDPRx9+tae62SkhKvrKzMqD8iImlp53J8DbEGhkwbwuq61UCQFrlh0gb26bFP1ttKl5mtcPeSZMcyHsm7+xZ3fy18vAOoAg4FRgMPhKc9QBD4RUQ6l3jGSytBN54WGQ/wjWmR6QT4draVbVmdkzezQcAJwCtAP3ffAsEvAjNL+reJmZUCpQADBw7MZndERDLSrrTITi5rt3bNrDfwKDDJ3be393nuXu7uJe5eUlTUxpyWiEhHiMWYMv+GJgF+9VWrI60WGZWsjOTNrCdBgJ/l7o+Fu2vMrH84iu8P1GajLRGRKG344F0G/eqIxu1rh1/DnWfdlcMeZSYb2TUG/BaocvfEK/EEMD58PB54PNO2RESi0lgtMiHA1/4c7vzyv+WwV5nLxkj+ZGAcsNLMXg/33QCUAQ+b2WXARuD8LLQlIpJ1yzctZ9hvhjVulz8JE1YQ3CDNs+mZ5jIO8u5eAaS6CiMyfX0Rkag0xBo4YfoJrKpdBYRpkVevZ5/nR0GP8JOpHVxrJtv0iVcR6ZIeWf0IF/zxgsbtheMWMvKIkcFGWLYg2/nsuaAgLyKFK8mHjzprtcioqAqliBSmJHXfm1eLzNe0yHRoJC8ihSmhINiG1UsZdHv3xkPXDL+Gu/I4LTIdCvIiUpj69sWHljD20FeYfdyuxt01P62huFfHFAfrDBTkRaTwxGIsH13CsLNeb9xVfm45E746IYedyg0FeRHJX0lurDbEGjjh3i+xauhaAIo/gg2T1rPPoYfnsqc5oxuvIpKfktxY/eOaP9Lz9p6s+iAI8AtmdaNm+dfZ55CuW/xQI3kRyU8JN1a3Vy7lgIQbqyOPGMmCi5/BrthaELnumVCQF5H8FK60NMUquPH03TdWV1+1mmOKjgk2CiTXPRMK8iKSlzZs28igM/7UuN2V0iLToSAvInnF3Rk7Z2yTRbS7WlpkOhTkRSRvNK8WOf3c6ZR+tTSHPer8FORFpNNLWi0y3UW0uyilUIpIp9aYFhkG+AVjF+zZItpdVLaW/5sBnAvUuvtx4b5bgAlAXXjaDe4+LxvtiUjha7VaZJIPQUly2RrJ3w+MSrL/bncfEn4pwItIu0x9cWrqapFJPgQlqWVlJO/ufzKzQdl4LRHpujZ8uIFBvxzUuJ00LTLhQ1AsWxZsKx8+pajn5Cea2RtmNsPM+iQ7wcxKzazSzCrr6uqSnSIiXcDYx8Y2CfA1P61JnvcefgiKHj2C78VKnWxNlEH+PuBIYAiwBbgz2UnuXu7uJe5eUlRUFGF3RKQzWr5pOXarMWvlLCBIi/SbvWneeywGNTXgHszBL14M1dWwZInm5NsQWQqlu9fEH5vZr4GnompLRPJPu9Mi43Pwy8KFtRcvLqjl+aIW2UjezPonbJ4HrIqqLRHp5BJH4qSZFplsDl7aLVsplH8ATgP6mlk1cDNwmpkNARxYD1yejbZEJM8kjMS3n3oiB3xjWeOhpItoNxefg4+P5DUHnxbz8DdrZ1BSUuKVlZW57oaIZFNNDQwYwNThDdwwcvfuJtUi28p7V158q8xshbuXJDumT7yKSKQ27PUpdtPuAH/NsEn4zd40wLeV9x6fg1eAT5tq14hIZMY+NrYxawag5ifvU9y72Q1T5b1HSiN5Ecm6FmmR50wL0iKbB3hQ3nvENJIXkaxpiDXwlelfYWXtSgCKdvZk450x9nl+NiyeEEy7NBfPe9eceyQ0kheRrIinRcYD/IJzHqL2584+9bvaTn3UnHtkNJIXkYykrBYJcNK9Sn3MMQV5EdljU1+cyg3P39C43SQtEjQN0wkoyItI2ppXi5w0bBJ3j7q75YkqP5BzCvIikpYWaZFaRLtTU5AXkXbRItr5SUFeRFrVIi1yvyI2XrNRa6zmCaVQikhKLdIixy6g9t9qFeDziEbyItJCq4toS17RSF5EmiirKEu9iLbkHY3kRQRiMTa+82cOn7W7Wm3KtEjJK9laNGQGcC5Q6+7HhfsOAh4CBhEsGnKBu/8tG+2JSBbFYoz9YX9mHVzbuEtpkYUjW9M19wOjmu2bDDzn7kcDz4XbItKJLN+0HLu9e2OAn/50N/yK9xXgC0hWgry7/wn4oNnu0cAD4eMHgO9moy0RaUOz9VSTaYg1cPy04xvz3ot29uTTqd0p3fcU1ZgpMFHeeO3n7lsAwu9J3zlmVmpmlWZWWacFekUy09AAp5zS6ipL8bTIN2reAGDBrG7Uvjicfd59D5YsUY2ZApPzG6/uXg6UQ7DGa467I5K/YjE49VR4+eVgu9kqS83TIkcceioLrlxGt4Zd8O5LQZ0ZBfiCE+VIvsbM+gOE32vbOF9EMlFXB6++unt76NDGqZdkaZGLLnuBbiedrBWZClyUI/kngPFAWfj98QjbEpHiYjj5ZFi6NAjwFRVs3P4eh99zeOMpLdIiVQq44GUrhfIPwGlAXzOrBm4mCO4Pm9llwEbg/Gy0JSIpNFtGb9zcS/j9G79vPJw0LVKlgAteVoK8u1+U4tCIbLy+iCQRi7UchXfrxvKGDQy77eDG01QtsmvL+Y1XEdkDsRicfvrupfUWL6aBGF8t/2pj1oyqRQqodo1IfqqrCwJ8QwMsW8ajr8xsmhbZVrXIduTSS2FQkBfJR8XFcNJJbN+vO3ZTA99b8K8AjBg8gl0/28U3j/xm6ufG/wpoJZdeCoeCvEg+MqPs30dxwP/Z1bhr9VWrWXTJIrpZG/+tm/0VgD6EWNA0Jy+SZzZu29h6WmRbwr8CGufzlR9f0BTkRfLIuMfG8vtMF9Fulmqp/PjCpiAvkgde3fQqJ/7mxMbtaWuP4vJZbwZ57slSKdui/PguQ3PyIp1YvFpkPMD3/Rg++Xe4/I/rg8Ce7k1UZdV0OQryIp3Uo2sebZoW+YNnqXvl6+xLQq2ZdG6iKqumS9J0jUgns6N+B58r+1zj9ojBI1gwbkGQNbN4ZNOpmXRuoib7haApm4KnkbxIJ1JWUdYkwK+6clXTtMj4XHp87j1+E7W6uu1a8PFfCKo62aVoJC/SCWSUFtnem6jKqumSFORFcmzcnHFtV4vMFmXVdDkK8iI50iIt8pxpXF5yeQ57JIVIQV4kKiny1xtiDU2qRfbdry8bJ21k35775qqnUsAiv/FqZuvNbKWZvW5mlVG3J9IppEhXbJkW+Qx1l6xiX5UDloh01Ej+dHff2kFtieRes3TFHZve5XMzjmo8PGLwCBb84Bm6nTGiSU14uinhTbJL7yiRKCSkK5ZdNKBJgG9Mi9z6V1WDlMh1RJB3YIGZrTCzFmuQmVmpmVWaWWWd3uRSKMzYOPcB7KYGrj9yPQBXD7sav9k5tvjY4BzlrUsHMI+4hoWZHeLum82sGFgI/Mjd/5Ts3JKSEq+s1LS95L9L5lzC7974XeN2yrTIPSkuJtKMma1w95JkxyKfk3f3zeH3WjObA5wIJA3yIvku7bRI5a1LxCIN8mbWC+jm7jvCx2cCt0XZpkguKC1SOquoR/L9gDkW/BnaA5jt7s9E3KZIh3p0zaN875HvNW4/O/ZZzjzyzBz2SGS3SIO8u78DHB9lGyK50mq1SJFOQu9GkT1wR8UdrVeLFOkkVNZAJA3Nq0VePexq7hl1Tw57JNI6BXmRdrrs8cuY8fqMxu1Iq0WKZIn+thRpw6btmzjvofMaA/y0c6bhN7sCvOQFjeRFUoh5jOmV05n83GR27tpJ2Ygyfjzsx0qLlLyiIC+SxJq6NZQ+WcrS95YyYvAIpp87nSMPOjLX3RJJm4K8SIL6hnqmVkxlyotT2H/v/bn/OzO4pP+3sD76VKrkJ83Ji4QqNlZwwvQTuPWFWzn/2POpunI14yfdjx12WJOa8CL5REFeurxtn23jyqeu5NSZp/LJ3z9h3sXzmDVmFsWfmEoBS97TdI10aXOq5jBx/kTe/+h9rh1+Lbeefiu99+odHIyXAo4v6qFSwJKHFOSlS9q0fRMT509k7tq5DOk3hMe/OYOS485sWu7XLFitSaWAJY9puka6lJjHuO/V+zjmv4/hmXXPcMeIMpY/2JuSr5ybfN49XgpYAV7ylEbyUpiSLMaRNC3y771h6U1N591V310KiEbyUnhiMTj9dBgwAE47jfqdn3LLklsYMm0IVVuruH/0/SwctzDIe9cSfFLgNJKXwlNX15gVU/HeUkqnHU/V397i4i9dzN1n3d20HIHm3aXART6SN7NRZvamma0zs8lRtydCcTHbTj2RK79tnDp+F5/Edu5Oi0xWb0bz7lLAol7+rztwL/BNoBp41cyecPc1UbYrXductXOZeO563v/IuObEq7ntjNt3p0WKdDFRT9ecCKwLV4jCzB4ERgMK8pJ1m7Zv4kfzf8SctXM4vt/xzP3+XIYeOjTX3RLJqaiD/KHAewnb1cCwiNuULqZ5tcg7Rt7BNcOvoWf3nrnumkjORR3kk01yepMTzEqBUoCBAwdG3B0pNKoWKdK6qG+8VgOHJWwPADYnnuDu5e5e4u4lRUVFEXdHCkV9Q32TtMiZo2fuTosUkUZRj+RfBY42s8HAJuBC4OKI25QCV7GxgtInS6naWpU8LVJEGkUa5N29wcwmAs8C3YEZ7r46yjalcG37bBuTF01m2oppHH7A4cy7eB7fOvpbue6WSKcW+Yeh3H0eMC/qdqSwJVaLvGbYJKVFirSTPvEqnVqTtMgdvZj7sDH0udfgzP1y3TWRvKDaNdIpJVaLnL9uPmXDbuTVX33G0Pd2aQEPkTRoJC+dTtK0yD5HwPAXtYCHSJoU5KXTaL6I9szRMxl//HjMHWpr4fnnYetWFRITSYOCvHQKiWmRFx13EfeMuidIi4yXDY6P4BcvVoAXSYPm5CWnki2iPfufZu/Oe08oG6y5eJH0aSQvOdMiLfK4H9H70MFNT9Ji2iIZ0UhesiMWg5oacG/z1M07NjPmoTGMeXgMRfsV8fKly7hr6mv0HvyFluusxhf1qK6GJUs0VSOSJgV5yVyz5fZaLIYdP81jTKucxhfv/WKQFjmijFcnvMrQnoNan5LRoh4ie0zTNZK5ZPPmzRbDbrVapKZkRCKjIC+ZayVIp0yLTByVa51VkcgoyEvmUgTppRuXMuHJCS3TIpOJT8lAMN2jgC+SFZqTl+xImDePp0WeMvOU5GmRrWnn/L6ItI9G8pJVTdIih1/Dbaffll61yHbM74tI+2kkL1nRIi3yspe566y70i8HHJ/f79FDN2FFskAjeclIzGOUryjnukXXsXPXTspGlHHt167d80W0dRNWJKsiC/JmdgswAYgnPd8QLiAiBaKqrorSp0qp2FjBiMEjmHbuNI466KjMXzjxJqyIZCTqkfzd7v6LiNuQDtautEgR6RQ0XSNpSSstUkRyLuobrxPN7A0zm2FmfZKdYGalZlZpZpV1qjDYaSWmRX5c/Q7z/tCN2f+1ieJ9++a6ayLSCvN2FJRK+WSzRcDBSQ7dCLwMbAUcuB3o7+6XtvZ6JSUlXllZucf9kWgkpkVe/aUJ3PaD39D7011BBkx1tebPRXLMzFa4e0myYxlN17j7yHZ24NfAU5m0JR1v847NTJw3MVhEu9/xzP3+XIYeUgJDq1RnRiRPRJld09/dt4Sb5wGrompLsqvNtEilOIrkjShvvP7czIYQTNesBy6PsC3Jkqqa1ZTOvYyK91/hjMFnMP3c6S3TIpXiKJI3Igvy7j4uqteW7KtvqGfqi1OYsuR29v/Mmbn+84y/aQHWvXuuuyYiGVAKpTRNi1xj3DMfiuvfga1bNWIXyXOqXdOFNUmL/PvHzLvoaWbXnkpxverGiBQKjeS7qMS0yEnDJnH7GbcHxcQWj9JNVZECoiDfxWzeVs3Exy9nzrvzdqdFHjp09wm6qSpSUBTku4iYxyivnMZ1T/yYnb6Lsg2DufaGV+jZc+9cd01EIqQg3wUkVos84z1j+pNw1Pb34BcfatQuUuB047WA1TfUc+uSWxkyfQira1cz8zszWPTuKRy1vY0bq7EY1NRABiUvRKRz0Eg+36VY9DpltcjF41u/sRpfYzVetmDx4mCeXkTykv735rMki163SItsvoh2woLbSSVbY1VE8pZG8vmsWUCes/wBJi67qWVaZDria6yqAJlIQVCQz2dhQN78xlImXnwAc569lC/3+3LLtMh0aI1VkYKiIJ/HYjjlv/g+1y36MztjH1P2jQwX0Y5TrrxIwVCQz1NN0iJTVYsUkS5PQT7P1DfUU1ZRxpSKKfTq2UuLaItIqxTk84gW0RaRdGWUQmlm55vZajOLmVlJs2PXm9k6M3vTzM7KrJtdW5tpkSIiKWQ6kl8FjAGmJ+40s2OAC4FjgUOARWb2eXfflWF7XU7KapEiIu2Q6ULeVUCy+eDRwIPuXg+8a2brgBOBlzJprytJXEQ747RIEemyopqTPxR4OWG7OtzXgpmVAqUAAwcOjKg7eSIWI1ZbQ/nGOVz33GR2xv7echFtEZE0tBnkzWwRcHCSQze6++OpnpZkX9JqV+5eDpQDlJSUdN2KWLEYVecMo/SQSioGwhnvwvT3h3LU9f+m2jEissfaDPLuPnIPXrcaOCxhewCweQ9ep0uob6in7NmbmFJSSa+/w8y5MP51sB5/Dj55qg8micgeimq65glgtpndRXDj9WhgeURt5bUmaZFbi7hn9gcUW2/o/pFqx4hIxjIK8mZ2HvAroAh42sxed/ez3H21mT0MrAEagB8qs6apbZ9t4/rnrue+yvsY+NnePD2nG2cX/yOsfSgI7Fu3qnaMiGTMvBMtDFFSUuKVlZW57kbkEtMif3zcv3L72N/S+9Nd0KMHVFdrekZE0mJmK9y9JNkxfeK1A7VIi7zgMYb2OBxKquCllzQ9IyJZpyDfAWIeo3xFOdctuo6du3YydcRUfjLsGnqOPHN33faNG+HggzU9IyJZpSAfsZTVImtqmq7A1K2bAryIZJ0SsCPSYhHt0TNZNG7R7nLA8RWYerSxqLaISAY0ko9Au6pFagUmEekACvJZ1CQt8oCBPH3x05x99Nmpn6AVmEQkYgry6YrFko6+VS1SRDojzcmnIxaD00+HAQPgtNMgFmPzjs2MeWgMYx4eQ9/9+vLyZS9z96i7FeBFpFPQSD4ddXWNGTGxZUspX/ILrlv+H7vTIr/2E1WLFJFORUE+HWFGTNWbSym9qBcVL16nRbRFpFNTkE9D/a6dlN1yOlMqXqbXXt2ZeZYW0RaRzk1Bvp20iLaI5CMF+TaknRYpItKJKMi3Yu7aufxw3g+VFikieUtBPi4h/33zR1u0iLaIFISM8uTN7HwzW21mMTMrSdg/yMw+NbPXw69pmXc1QmH+e2zAoUy7+PN88d4vMn/dfKaOmErlhEoFeBHJW5mO5FcBY4DpSY697e5DMnz9jlFXx9o3lzJh3C4qDl/HGX1PYfqYmUqLFJG8l1GQd/cqIH9SCJOUJKhvqKes6j6mXB6jVz3MXPN5xv/fF7Bu+jCwiOS/KCPZYDP7s5m9YGanpjrJzErNrNLMKuvq6qLrTZKSBEs3LuWE6Sdwywu38k9f+j5rr1jJPz+4VgFeRApGmyN5M1sEHJzk0I3u/niKp20BBrr7X83sq8BcMzvW3bc3P9Hdy4FyCNZ4bX/X05RQkmDbiqVc/+il3LfmAaVFikhBazPIu/vIdF/U3euB+vDxCjN7G/g8kLtVusOSBHO3VvDD0d15v+p3SosUkYIXSQqlmRUBH7j7LjM7AjgaeCeKttpr80db+NFV/8Bja2N8ud8/Mvfbv1HWjIgUvIyCvJmdB/wKKAKeNrPX3f0s4OvAbWbWAOwCrnD3DzLubbpiMWK1NZS/N5frnpusapEi0uVkml0zB5iTZP+jwKOZvHbGYjHWnjOMCYdUUjEQzhh0BtO/rWqRItK1FGQayc5dO7lt/mSOL6lkdRHMeLIbi86apQAvIl1OwZU1WLpxKaVPlbKmbg0XbS3i7j98QL/jT9ZaqiLSJRXMSH7bZ9u46umrOGXmKXy08yOevvhpZt/7Pv3e3ARLljRZj1VEpKsoiJF85eZKRj84Onm1SI3gRaQLK4ggf0SfIzi26FhVixQRaaYggvxB+x7EgnELct0NEZFOp2Dm5EVEpCUFeRGRAqYgLyJSwBTkRUQKmIK8iEgBU5AXESlgCvIiIgVMQV5EpICZe3Qr7qXLzOqADRm8RF9ga5a6k03qV3rUr/SoX+kpxH4d7u5FyQ50qiCfKTOrdPeSXPejOfUrPepXetSv9HS1fmm6RkSkgCnIi4gUsEIL8uW57kAK6ld61K/0qF/p6VL9Kqg5eRERaarQRvIiIpJAQV5EpIDlVZA3s/PNbLWZxcyspNmx681snZm9aWZnpXj+YDN7xczeMrOHzGyviPr5kJm9Hn6tN7PXU5y33sxWhudVRtGXZu3dYmabEvp2dorzRoXXcZ2ZTe6Afv2nma01szfMbI6ZHZjivMivV1v/djPbO/z5rgvfS4Oi6EeSdg8zs8VmVhX+H7g6yTmnmdm2hJ/vzzqob63+XCzwX+E1e8PMvtIBffpCwnV43cy2m9mkZud0yPUysxlmVmtmqxL2HWRmC8NYtNsmmtMAAATRSURBVNDM+qR47vjwnLfMbPwedcDd8+YL+CLwBWAJUJKw/xjgL8DewGDgbaB7kuc/DFwYPp4GXNkBfb4T+FmKY+uBvh14/W4BftrGOd3D63cEsFd4XY+JuF9nAj3Cx3cAd+TierXn3w5cBUwLH18IPNRBP7v+wFfCx/sD/z9J304Dnuqo91N7fy7A2cB8wIDhwCsd3L/uwPsEHxjq8OsFfB34CrAqYd/Pgcnh48nJ3vPAQcA74fc+4eM+6bafVyN5d69y9zeTHBoNPOju9e7+LrAOODHxBDMz4Azgj+GuB4DvRtnfsM0LgD9E2U6WnQisc/d33H0n8CDB9Y2Muy9w94Zw82VgQJTttaI9//bRBO8dCN5LI8Kfc6TcfYu7vxY+3gFUAYdG3W6WjAb+xwMvAweaWf8ObH8E8La7Z/Jp+j3m7n8CPmi2O/F9lCoWnQUsdPcP3P1vwEJgVLrt51WQb8WhwHsJ29W0/A/wD8CHCcEk2TnZdipQ4+5vpTjuwAIzW2FmpRH3JW5i+CfzjBR/IrbnWkbpUoJRXzJRX6/2/NsbzwnfS9sI3lsdJpwiOgF4Jcnhr5nZX8xsvpkd20Fdauvnkuv31IWkHmjl4noB9HP3LRD8AgeKk5yTlevW6RbyNrNFwMFJDt3o7o+nelqSfc1zQ9tzTru1s58X0foo/mR332xmxcBCM1sb/tbfY631C7gPuJ3g3307wVTSpc1fIslzM86zbc/1MrMbgQZgVoqXyfr1at7NJPsifR+ly8x6A48Ck9x9e7PDrxFMSXwU3m+ZCxzdAd1q6+eSs2sW3nf7DnB9ksO5ul7tlZXr1umCvLuP3IOnVQOHJWwPADY3O2crwZ+JPcIRWLJz2q2tfppZD2AM8NVWXmNz+L3WzOYQTBdkFLTae/3M7NfAU0kOtedaZr1f4U2lc4ERHk5IJnmNrF+vZtrzb4+fUx3+jA+g5Z/ikTCzngQBfpa7P9b8eGLQd/d5ZvbfZtbX3SMtxtWOn0sk76l2+hbwmrvXND+Qq+sVqjGz/u6+JZy6qk1yTjXBfYO4AQT3I9NSKNM1TwAXhpkPgwl+Gy9PPCEMHIuB74W7xgOp/jLIhpHAWnevTnbQzHqZ2f7xxwQ3H1clOzdbms2DnpeivVeBoy3IRNqL4E/dJyLu1yjgOuA77v5JinM64nq159/+BMF7B4L30vOpfillUzjv/1ugyt3vSnHOwfH7A2Z2IsH/779G3K/2/FyeAC4Js2yGA9viUxUdIOVf07m4XgkS30epYtGzwJlm1iecWj0z3JeeqO8sZ/OLIDBVA/VADfBswrEbCTIj3gS+lbB/HnBI+PgIguC/DngE2DvCvt4PXNFs3yHAvIS+/CX8Wk0wbRH19fsdsBJ4I3yT9W/er3D7bILsjbc7qF/rCOYeXw+/pjXvV0ddr2T/duA2gl9AAPuE75114XvpiKivT9juKQR/qr+RcJ3OBq6Iv8+AieG1+QvBDeyTOqBfSX8uzfplwL3hNV1JQmZcxH3bjyBoH5Cwr8OvF8EvmS3A38P4dRnBfZzngLfC7weF55YAv0l47qXhe20d8C970r7KGoiIFLBCma4REZEkFORFRAqYgryISAFTkBcRKWAK8iIiBUxBXkSkgCnIi4gUsP8FUy7XCPXim48AAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "eval_data = list(get_data(50))\n", - "x_target_label = np.array([-10, 10, 0.1])\n", - "y_target_label = x_target_label * 2 + 3\n", - "x_eval_label,y_eval_label = zip(*eval_data)\n", - "\n", - "plt.scatter(x_eval_label, y_eval_label, color=\"red\", s=5)\n", - "plt.plot(x_target_label, y_target_label, color=\"green\")\n", - "plt.title(\"Eval data\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "上图中绿色线条部分为目标函数,红点部分为验证数据`eval_data`。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义数据增强函数\n", - "\n", - "使用MindSpore的数据增强函数,将数据进行增强操作,操作解释如下:\n", - "\n", - "- `ds.GeneratorDataset`:将生成的数据转换为MindSpore的数据集,并且将生成的数据的x,y值存入到`data`和`label`的数组中。\n", - "- `batch`:将`batch_size`个数据组合成一个batch。\n", - "- `repeat`:将数据集数量倍增。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:52.993381Z", - "start_time": "2021-01-04T07:04:52.990360Z" - } - }, - "outputs": [], - "source": [ - "from mindspore import dataset as ds\n", - "\n", - "def create_dataset(num_data, batch_size=16, repeat_size=1):\n", - " input_data = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data','label'])\n", - " input_data = input_data.batch(batch_size)\n", - " input_data = input_data.repeat(repeat_size)\n", - " return input_data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "使用数据集增强函数生成训练数据,并查看训练数据的格式。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.079377Z", - "start_time": "2021-01-04T07:04:52.994402Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The dataset size of ds_train: 100\n", - "dict_keys(['data', 'label'])\n", - "The x label value shape: (16, 1)\n", - "The y label value shape: (16, 1)\n" - ] - } - ], - "source": [ - "data_number = 1600\n", - "batch_number = 16\n", - "repeat_number = 1\n", - "\n", - "ds_train = create_dataset(data_number, batch_size=batch_number, repeat_size=repeat_number) \n", - "print(\"The dataset size of ds_train:\", ds_train.get_dataset_size())\n", - "dict_datasets = next(ds_train.create_dict_iterator())\n", - "\n", - "print(dict_datasets.keys())\n", - "print(\"The x label value shape:\", dict_datasets[\"data\"].shape)\n", - "print(\"The y label value shape:\", dict_datasets[\"label\"].shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "通过定义的`create_dataset`将生成的1600个数据增强为了100组shape为16x1的数据集。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义训练网络\n", - "\n", - "在MindSpore中使用`nn.Dense`生成单个数据输入,单个数据输出的线性函数模型:\n", - "\n", - "$$f(x)=wx+b\\tag{1}$$\n", - "\n", - "并使用Normal算子随机初始化权重$w$和$b$。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.085026Z", - "start_time": "2021-01-04T07:04:53.080390Z" - } - }, - "outputs": [], - "source": [ - "from mindspore.common.initializer import Normal\n", - "from mindspore import nn\n", - "\n", - "class LinearNet(nn.Cell):\n", - " def __init__(self):\n", - " super(LinearNet, self).__init__()\n", - " self.fc = nn.Dense(1, 1, Normal(0.02), Normal(0.02))\n", - " \n", - " def construct(self, x):\n", - " x = self.fc(x)\n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "调用网络查看初始化的模型参数。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.100773Z", - "start_time": "2021-01-04T07:04:53.086027Z" - }, - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Parameter (name=fc.weight) [[-0.02289871]]\n", - "Parameter (name=fc.bias) [0.01492652]\n" - ] - } - ], - "source": [ - "net = LinearNet()\n", - "model_params = net.trainable_params()\n", - "for param in model_params:\n", - " print(param, param.asnumpy())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "初始化网络模型后,接下来将初始化的网络函数和训练数据集进行可视化,了解拟合前的模型函数情况。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.242097Z", - "start_time": "2021-01-04T07:04:53.102786Z" - }, - "scrolled": true - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYQAAAD8CAYAAAB3u9PLAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3dd3iUVdrH8e+dhIAUBSFApFqAVUQpERBBQFGUteEuoq7KKor4iksRK6wRwYaKa0cQxYoooriISKRYAJVQVBClL0QgIFWlJnPeP2YyCWFCEqYm+X2uK1fmqefmyTD3POec5xxzziEiIhIX7QBERCQ2KCGIiAighCAiIj5KCCIiAighiIiIjxKCiIgAIUgIZlbPzGab2XIzW2Zm/X3rHzSzX81sie+nW/DhiohIuFiwzyGYWTKQ7JxbZGZVgIXAFcBVwB/OuSeDD1NERMItIdgTOOc2AZt8r383s+VAnWDPKyIikRX0HcIhJzNrCHwJnA4MAv4J7AbSgTudczsCHNMH6ANQqVKlVn/5y19CFo+ISFmwcOHC35xzScGeJ2QJwcwqA18ADzvnJptZLeA3wAHD8VYr3XSkc6SkpLj09PSQxCMiUlaY2ULnXEqw5wlJLyMzKwd8ALztnJsM4JzLdM5lO+c8wFigdSjKEhGR8AhFLyMDxgHLnXOj8qxPzrNbd2BpsGWJiEj4BN2oDJwDXA/8aGZLfOvuB64xs+Z4q4zWAbeGoCwRkbLH44HMTDCDWrW8v8MgFL2MvgYCRTct2HOLiJRpOYmgZ0/4+mtwDjp0gDlzIC70zxWH4g5BRERCzeOBzp1h3jzIyspdP28ebNkCtWuHvEgNXSEiEou2bs1NBnmriLKz4aqrvAkjxJQQRERiUc2a0K4dJCR4q4mWLMmtJpo/35swQkxVRiIiscgMZs/2fvDXrOld1769966hXTuoWZMde3fQ4uUWIStSCUFEJFbFxXl7FeXwJQiXlMQ1H1zDxGUTQ1tcSM8mIiLhExfH+E2fEjc83p8MUjumhuz0ukMQESkBftr6E01fbOpfblG7Bd/c/A2J8YkMY1hIylBCEBGJYX8e+JNTXziVDbs3+Net7b+WhlUbhrwsVRmJiMSoftP6UfnRyv5k8OF7cbhZ59Lw2PphKU93CCIiMWbKz1O4YuIV/uV+p9/Ec1e/4X0mIWGet+dR3sbmEFFCEBGJBI8ntwtpAWMRrdu5jhOfOdG/XO/Yeiy/fTmVylWEdqsO6XIaDkoIIiLhlncYinbtvN1H84xFdCD7AG1facvizYv965betpSmNXMbkQ95JiFMg9upDUFEJNzyDkMxb94hTxkPmzOM8iPK+5PBa5e/hkt1hyYDyH0mIUzJAHSHICJSPEWo+jlMzjAUeap8Zq+dzXlvnOffpWfTnkz42wQs0DmPpsyjoIQgIlJUhVT9FCjPMBSZFR21H8o9pnx8eTbduYlqx1QLbZlHIRQzptUzs9lmttzMlplZf9/6480szcxW+n4X8K8VESkhjlD1U5hsHBel9aL2qNzJJL/p/Q37hu4rOBkEWWZxhSLNZAF3OudOBdoCt5vZacC9wEznXCNgpm9ZRKTkyjsCaTF6+zz/3fMkDE/gs9WfATDqwlG4VEebum3CVubRCMWMaZuATb7Xv5vZcqAOcDnQybfb68Ac4J5gyxMRiZr8I5AWUp+/cONCUsam+Jc7N+zMjOtnkBBXjI/eYpYZjJC2IZhZQ6AF8C1Qy5cscM5tMrPwpTURkUjJPwJpALv27aLe0/X4/cDv/nUbB20kuUryEY4KrsxQCFnLhJlVBj4ABjjndhfjuD5mlm5m6VvDWDcmIhJuzjmum3wdVR+v6k8Gaden4VLd0SeDCApJQjCzcniTwdvOucm+1ZlmluzbngxsCXSsc26Mcy7FOZeSlJQUinBERCLL4+Htr14k7qE43v7xbQDub38/LtXR5aQuUQ6u6IKuMjJvp9lxwHLn3Kg8mz4GegGP+X5PCbYsEZFY8/OWnzj1pdyHyJrVbMaCWxZQPqF8FKM6OqFoQzgHuB740cyW+NbdjzcRvGdmvYH1QI8QlCUiEhP2HNzD6S+eztqda/3rVj0DJy/7DEpgMoDQ9DL6Giio2fv8YM8vIhJrBk4fyH++/Y9/edJ78Lef8PYACmMvoHDTWEYiIkU0dcVUbJj5k8GtrW7F8+9s/lbjXO9zAh06RKQ3ULho6AoRkUKs37WeBv9p4F+uVakWq/61isqJlb0rIvScQLgpIYiIQMAB5A5mH6T9a+357tfv/Lt93/d7zqh1xqHHRug5gXBTlZGISM4AcnXrQqdO4PHw8JcPkzgi0Z8Mxl46FpfqDk8GpYjuEEREMjP9A8h9tWEu5w6P92/q/pfuTLpqEnFW+r8/KyGISNnm8UDPnmxNzKLmUIBsAOIsjszBmdSoWCOq4UWSEoKIlGmeLZlcVv9rPsnTSX7uTXNpV69d9IKKktJ/DyQiAt47gcxMcM6/anT6aOJfPoFPGnnXPbbqRNwDnjKZDEB3CCJSFuSbdWzxO6No+UrusNQd6ndg1kUTSKh9QonuNhosJQQRKf18s47tjs+i4dlfsiNPMtgwcAN1j60bxeBihxKCiJR6LimJm3pXZ3xypn/dp//4lItOuSiKUcUetSGISKn27tJ3iRse708Gd509GJfqlAwC0B2CiJRKK7etpPHzjf3LTao3YUnfJVRIqBDFqGKbEoKIlCp7D+7lzNFnsnL7Sv+6Ff1W0Kh6oyhGVTKoykhESo270+6m4iMV/cng3b+9i0t1SgZFpDsEESnxpq+azsVvX+xfvrH5jYy7bBxWhruQHo2QJAQzexW4BNjinDvdt+5B4BZgq2+3+51z00JRnogIwK+7f6Xu07ldRo8/5njW9l/LseWP9a4IMIKpFCxUVUbjgUBN9k8755r7fpQMRCQksjxZtH+1/SHJYFGfRWy7e9uhySDfCKZyZCFJCM65L4HtoTiXiMiRjJw7knLDyzF3w1wAXvrrS7hUR4vkFofu6HsYjaws7++tWwOcTfIKdxtCPzO7AUgH7nTO7ci/g5n1AfoA1K9fP8zhiEhJNXf9XNq/1t6/fEnjS5hy9ZSCh6WuWRPatfMPV0HNmhGKtOQyl2egp6BOZNYQmJqnDaEW8BvggOFAsnPupiOdIyUlxaWnp4ckHhEpHbbt2UbSE0k4cj+rtgzeQlKlpMN3zt9mUEbaEMxsoXMupfA9jyxs3U6dc5nOuWznnAcYC7QOV1kiUvp4nIfuE7tT44ka/mTwxT+/wKW6gpNB/jaDnKktS3EyCKWwJQQzS86z2B1YGq6yRKSEyzc09diFY4l/KJ6Pfv4IgIfPexiX6ji3wbkFn0NtBkELVbfTCUAnoIaZZQCpQCcza463ymgdcGsoyhKRUibP0NQ/XHAGZ7ZZ5N/Upk4bvrrxK8pZvDdhHKnqR20GQQtJQnDOXRNg9bhQnFtESrmtW/k9fS6nDMhmS+XcZPC/Af+j/nH1D5vLgNmzvVVB+Zl5t5WBNoNw0dAVIhI1zjn6fDeUY+/OZktl77r/Xv0xLtV5kwEUrypIbQZB0dAVIhIVk36aRI/3e/iXBzTrw9PdRx/+Ya6qoIhRQhCRiFq9fTWnPHeKf/nkYxvy4+3LOCaxYuADVBUUMaoyEpGI2J+1n9NfPP2QZLD8mxRW3Z3BMRdcfOShJVQVFBFKCCISdkNmDqHCwxVYtnUZAG92fxPXdzN/+XyJuonGEFUZiUjYpK1O48K3LvQvX3fGdbxxxRveYamdU9tAjFFCEJGQ2/j7RuqMquNfrpJYhfUD11O1QtXcndQ2EHOUEEQkZLI8WVzw5gXMWTfHv27BLQtIOaGAYXZy2gYkJqgNQURCYtT8UZQbXs6fDJ696Flcqis4GUjM0R2CiATl24xvaTuurX+568ld+eTaT4iPi49iVHI0lBBE5Khs37ud5KeSOZB9wL9u852bqVVZVUAllaqMRKRYnHNc9f5VVB9Z3Z8MZt0wC5fqlAxKON0hiEiRjV8ynhun3OhffrDjg6R2So1iRBJKSggiUjDfjGPL2Mrpo5v5V7dKbsW83vNIjE+MYnASakoIIhKYx8Of559Lk5Zz+fXY3NVr+6+lYdWGUQtLwickbQhm9qqZbTGzpXnWHW9maWa20ve7WijKEpHI6Df5Zip3yk0GH3Z9DZfqlAxKsVA1Ko8HLsq37l5gpnOuETDTtywi0ZBvisoj+ejnj7BhxgvLXgOg3wLDzTqXK9r0CneUEmWhmjHtSzNrmG/15Xin1QR4HZgD3BOK8kSkGIo449jaHWs56dmT/Mv1dsHyRe2oNGES1K6toSXKgHC2IdRyzm0CcM5tMrOAI1eZWR+gD0D9+vXDGI5IGeTxwE8/HT7jWJ7hIg5kH6DtK21ZvHmxf93S0fE03ZwNCd95k4eSQZkQ9ecQnHNjnHMpzrmUpKSkaIcjUnrk3Bk0bw6VKkFCwmGjiqbOTqX8iPL+ZDD+8vG4Bzw0bXxOwP2ldAvnHUKmmSX77g6SgS1hLEtE8suZizg7G/74A5YsgaZNwYzZa2dz3hvn+Xft2bQnE/42wTssNWgU0jIqnAnhY6AX8Jjv95QwliUi+eWfi7hpUzb/mUnyU8n+XSokVGDjoI1UOyZfJ0CNQlomhSQhmNkEvA3INcwsA0jFmwjeM7PewHqgR8FnEJGQyzPfQHaN6lz8VlfS1qT5N3/T+xva1G0TxQAl1oSql9E1BWw6PxTnF5FC+J4oPqyKJy6O59a9x79G/8u/atSFoxh49sAoBCmxTk8qi5R0BXQrTd+Yzlljz/Lvdt6J5zHjuhkalloKpIQgUtLlNB77upXuzFhF3bda8ufBP/27bBy0keQqyUc4CQXfZUiZEfVupyISJF/jsUuI59qbq1HttSb+ZJB2fRou1RUtGXTuDHXrQqdO3mUpc5QQREo6M9585ibihmYzofZWAIZ0GIJLdXQ5qUvRzpHvLoOtW8MYsMQqVRmJlFQeD8tXzOW0ief6VzWr2YwFtyygfEL54p0rfxdVPYxWJikhiJRAe/b/wempSaw9Zp9/3ao7VnHy8Scf3QnzdFFVG0LZpSojkRJmwPQBVHqsij8ZTJoUh+u7OTcZFGNk00PkPIymZFBmKSGIlBBTV0zFhhnPfPsMALf+moxnRDx/O759bhXP0TYOH20SkVJFVUYiMW79rvU0+E8D/3KtSrVY9a9VVE6oCMPzVfEEahwubAiKIg6PLaWf/uoiMepg9kFaj219SDL4oe8PbB68mcqJlQNX8eQ0DhdnpFL1MBIfJQSRGDTiyxEkjkhkwcYFAIy9dCwu1dGsVrMjH5jTOJyRAXPmFK094GiSiJRKqjISiSFf/u9LOo7v6F++8tQreb/H+8RZMb67FXekUvUwEh8lBJEYsOXPLdR6MvdDPN7iyRycSfWK1SMTgIa7FpQQRKLK4zxc8s4lfLrqU/+6uTfNpV29dlGMSsoqtSGIhNMRunO+tOAl4h+K9yeDx7s8jkt1SgYSNWG/QzCzdcDvQDaQ5ZxLCXeZIjGhgO6cizctpuWYlv7dOtTvwKxes0ggzps8VI8vURKpKqPOzrnfIlSWSGzI151zV8ZqGrxzFrv27/LvkjEwgzrH1tGzABIT9I4TCZc8w1L/s3d1qr7W2J8Mpv9jOi7VeZMB6FkAiQmRSAgOmGFmC82sT/6NZtbHzNLNLH2r/hNIaWLGu8/dStzQbF5PzgTg7nZ341IdXU/peui+ehZAYoC5MI9dYmYnOOc2mllNIA24wzn3ZaB9U1JSXHp6eljjEYmEFdtW0OT5Jv7lJtWbsKTvEiokVCj4IM1YJkfJzBaGon027G0IzrmNvt9bzOxDoDUQMCGIlHR7D+7lzNFnsnL7Sv+6Ff1W0Kh6o8IP1rMAEmVhrTIys0pmViXnNXAhsDScZYpEy10z7qLiIxX9yeDdv72LS3VFSwYiMSDcdwi1gA/Ne/ubALzjnJse5jJFIurTlZ/S7Z1u/uWbmt/EK5e9gqnaR0qYsCYE59wa4MxwliESLRm7M6j3dD3/cvVjqrOm/xqOLX9sFKMSOXoaukKkmA5mH6Tj+I7Mz5jvX7f41sU0r908ilGJBE/PIYgUw+NfP07iiER/Mnjpry/hUp2SgZQKukMQKYL8s5Zd2vhSPrr6o+INSy0S45QQRI5gz8E9jJw7kpFzRwIQZ3FsvnMzSZWSohyZSOgpIYgE4Jxj4rKJ3J12Nxt2b6Bn05483uVxGlRtUPjBIiWUEoJIPgs3LqT/9P7M3TCXFjWa8Xavt+jQ8NxohyUSdqoAFfHZ/Mdmek/pzVljz2Ll9pW8srwRCwb+RIde//YOKyFSyukOQcq8/Vn7efbbZxn+5XD2Ze3jzrPvZOhf+nDcfadBVnbu6KMaVkJKOSUEKbOcc0xdMZVBMwaxavsqLml8CU9d+BSNqzf2znDWrl3u/AQafVTKACUEKZOWbVnGwM8GkrYmjVNrnMr0f0w/dEhqM+8kNRp9VMoQJQQpU7bv3c6Dcx7kxQUvUqV8FZ656Blua3kr5bbv9N4V5P3g1+ijUsaoUVlKlwImtc/yZPHCdy/Q6LlGvLDgBfq06sPKO1byr7P6Ua7LhVC3LnTqpMZjKdOUEKT0yJmXON+H+8w1M2k+ujn9Pu3HmbXOZMmtS3jxry9So2INTV0pkocSgpQe+T7cV69aQPeJ3enyZhf2HNzD5KsmM/OGmTSr1Sz3GE1dKeKnNgQpPXwf7r+nz+Xhnsk8/d65lIsrx6PnP8qAtgMCT1+pxmMRv7AnBDO7CHgGiAdecc49Fu4ypWzy4HjjP7247/Of2bxnA71O78Uj5z/CCVVOOPKBajwWAcKcEMwsHngBuADIABaY2cfOuZ8C7f/zz9Chg/f/p5n395FeF3U/HROdY8wi94V73oZ59J/en/SN6bSt25Yp1/6X1nVaR6ZwkVIi3HcIrYFVvpnTMLN3gcuBgAkhLg4SE71tgR6Ptyo457VzgV8faVs4jpHiC2fiya6UwdYz72FXg3dI2HsC9Ze9xb4p1/B/L8WFPMHNnw8ZGeG/VgkJEB+f+7uw10Xd72iOCee5g40nTi2gIRfuhFAH2JBnOQNok3cHM+sD9AGoX78+M2eGOaIg5SSJcCceHXPk1wfcXn4+/klWVX8MRzZNMofSKPMe4uIq46lT+PE5XzaKE1u4kwH4/m0Hwl+OSCDhTgiBKgwO+Z7tnBsDjAFISUmJ+e/gZt5vJxIdzjkm/TSJwWmDWb9rPX8/7e88ccETNKzaMNqhBc05yM72/mRlHfq7sNdF3a80HaM79tALd0LIAOrlWa4LbAxzmVJKLd60mP7T+/PV+q84s9aZvHHFG3Rs2DHaYYWMWW61SPny0Y5GSpJQtdWFOyEsABqZ2YnAr8DVwLVhLlNKmS1/bmHorKG8sugVqleszsuXvEzvFr2Jj9OtmkgohTUhOOeyzKwf8BnebqevOueWhbNMKT0OZB/g+W+fY9gXw9iTtZcBbQfwQMcHqFqharRDEymVwv4cgnNuGjAt3OVI6eGcY9rKaQz6bBArtq+g20pj1LZWNBnypLqWiISRnlSWmLJ863IGzRjE9FXTaVL1FD6ZEEe3XzyQsEST1IiEmb5uSUzYsXcHA6YP4IzRZzB/w3xGXTiKH25fSrda7TXOkEiE6A5Boirbk83YRWMZOmso2/dup0+rPgzvOIykPUB8osYZEokg3SFI1MxeO5uWY1py2ye3cXrN01l06yJGd3uRpEuuyh3CGrzVREoGImGnOwSJuLU71jI4bTCTl0+mwXENmNRjEleeeiVm5p3cJv/8BGo3EIkIJQSJmD8O/MGjXz3KU/OfIj4unhGdRzDo7EEcU+6Y3J1y5ifQ5PYiEaeEIGHncR7e+uEt7v38Xjb9sYnrz7ieRzs/TJ39iZB/jgLNTyASNWpDkOIrYN7iQL7N+Jazx51Nr496Ue+4eszvPZ83Lh9PncuvK3ge45z5CZQMRCJKCUGKp4B5i/P7dfev3PDhDbQd15YNuzbw+hWvM7/3fNrWbat5jEVilKqMpHgCfZjnafTdl7WPUfNH8chXj3DQc5D72t/Hfe3vo0r5KrnnUDuBSExSQpDiKeDD3DnH5OWTGZw2mHU713HlqVfyxAVPcFK1kw4/h9oJRGKSEoIUT4AP8+83f8+AzwYwZ90cmtVsxswbZnLeiecd+Tz55zH2eJQgRKJMbQhSfL4P8617fqPv1L60HNOSHzN/5MVuL7Lo1kWFJ4P8itguISLhpTsEKbaD2Qd5YcELPDjnQf448Ad3tL6D1I6pVDum2tGdsJB2CRGJDCUEKZZPV37KoBmD+Pm3n+l6cldGdR3FaUmnBXdSNTKLxISwJQQzexC4BcjpU3i/b24EKYF++e0XBs0YxLSV02h0fCOmXjOVbo26eYebCJYamUViQrjvEJ52zj0Z5jIkjHbu28nwL4bz7HfPUrFcRZ684EnuaHMHifGJoS0ofyOziEScqowkoGxPNuMWjWXozKH8tm87N7e8mRHnjaBmJVXniJRW4e5l1M/MfjCzV80sYIujmfUxs3QzS9+qJ1ZjwhfrvqDVmFbc+slt/GXFNhZ+14Ixfx2tZCBSypkrwng0BR5s9jlQO8CmIcA3wG+AA4YDyc65m450vpSUFJeenn7U8Uhw1u1cx91pd/P+T+9Tv3Idnnh9Ez1+9GAJCZCRoSodkRhlZgudcynBnieoKiPnXJei7GdmY4GpwZQl4fPngT957OvHeHL+kxjGsE7DGHz2nVT8uBskqOePSFkRzl5Gyc65Tb7F7sDScJUlR8c5xzs/vsM9n9/Dr7//yrXNruWx8x+j3nH1vDuo549ImRLORuWRZtYcb5XROuDWMJYlxeHxsGDZDPp/9xDzM+bTKrkVE/8+kXPqn3Pofur5I1KmhC0hOOeuD9e55eht2vUr993TiteTM6m1vxyv/v0VerW4kTjTKCYiZZ0+BcqIfVn7eOzrx2j8QhMmJGVyz9ew4hkPN9a5RMlARAA9h1DqOeeY8ssU7pxxJ2t2rOHyJpfz5PiNnDJnsRqLReQQSgil2I+ZPzLgswHMWjuLpklNSbs+jS4ndYGrNNS0iBxOCaG08XjYtuEXHlj6HKMXvsxx5Y/j+Yuf59aUW0mI8/251VgsIgEoIZQiBw/u56V/NuXBeqvZXR7+r83tPNhpGNUrVo92aCJSAqg1sZSYsXoGZ77UjP6NV9NqI3w/Jp7nWv1byUBEikwJoYRbuW0ll024jK5vdeUAHqb80JQZE+Jp2vgcNRiLSLGoyqiE2rVvFyO+HMEz3z5DhYQKjOwykn+1+Rfl+5UrWoOx5jAWkXyUEGJZgA/tbE8245eM5/5Z97P1z63c2PxGHj7/YWpXzjPGYGENxjlzGOfMUDZ7trehWUTKNH0KxKoAE89/vf5rWr/Smpv/ezOnHH8K393yHeMuH3doMiiKQHMYi0iZp4QQq/J8aK9fOper37mSDq91YMufW3jnynf4+savSTnhKEe7zZnDOCFBD6eJiJ+qjGJVzZrsad+GkXHzGHmOw/3vM1I7pnJXu7uolFgpuHNrDmMRCUAJIQY555i4bCJ3XfY/MnY7eja9ipEXjKT+cfVDV4geThORfJQQYszCjQvpP70/czfMpUXtFrxz5Tt0aNAh2mGJSBmghBAjNv+xmSEzh/DaktdIqpTEK5e+wj+b/5P4uPhohyYiZURQjcpm1sPMlpmZx8xS8m27z8xWmdkvZtY1uDBLr/1Z+xk5dySNn2vMmz+8yZ1n38mKfivo3bK3koGIRFSwdwhLgSuBl/OuNLPTgKuBpsAJwOdm1tg5lx1keaWGc47/rvgvgz4bxOodq7m08aU8deFTNKreKNqhiUgZFVRCcM4tB7DDe6lcDrzrnNsPrDWzVUBrYH4w5ZUWy7YsY+BnA0lbk8apNU5l+j+m0/UU3USJSHSFqw2hDvBNnuUM37rDmFkfoA9A/foh7EUTSzweyMxk+/6dpP70Ai+lj6ZK+So8e9Gz9E3pS7n4ctGOUESk8IRgZp8DgR6FHeKcm1LQYQHWuUA7OufGAGMAUlJSAu5Tonk8ZJ3XiZf3fsUDnWDnMdD3rNsY1vkhalSsEe3oRET8Ck0IzrkuR3HeDKBenuW6wMajOE+J9/niDxjQ9CuW1YTz1sB/0uJodlsqKBmISIwJ19AVHwNXm1l5MzsRaAR8F6ayYtLq7au54t0ruGDqVeypUoEP34XP34Bmp2hYahGJTUG1IZhZd+A5IAn4xMyWOOe6OueWmdl7wE9AFnB7Welh9Pv+33n4q4d5+punKRdXjkfbDGFA5/uo0H+3d4iIWrU0VISIxCRzLnaq7VNSUlx6enq0wzgqHufhje/f4L6Z97H5j830OuMGHnnxF06Ys1BDTItIWJnZQufcUY52mUtPKofAvA3z6D+9P+kb02lbty1Trp5C64QGcFXdQ4eY1thBIhLD9JU1CBm7M/jH5H9wzqvnsPH3jbzV/S3m/fNrbzJIStIQ0yJSougO4SjsObiHJ+c9yeNzH8fjPAztMJR72t9D5YSKh85ENnMmbNumIaZFpERQQigG5xzv//Q+d6Xdxfpd6+lxWg9GXjCShlUbenfIzDx0JrJt21RNJCIlhqqMimjxpsV0HN+RnpN6Uq1CNeb0msN7Pd7LTQagmchEpETTHUIhtvy5hSEzhzBu8TiqV6zOy5e8TO8WBYxEqpnIRKQEU0IowIHsAzz37XM89OVD7Dm4h4FtB/Lvjv+maoWqRz5QM5GJSAlVthOCx3PYt3nnHJ+s/IRBnw1i5faVdGvUjVEXjqJJjSZRDlZEJLzKbhuCx+PtEVS3LnTqBB4Py7cu5+K3L+bSCZcSZ3FMu3Yan1z7iZKBiJQJZfcOYetWf4+gHYvmMuyjvjy/9FUqJ1bm6a5Pc/tZt2tYahEpU8puQqhZk6xzzmbs3rn8u4uxY+k4bml5C8M7DyepUlK0oxMRibgymxBmrZvNgJ47+XGLh44NOvDMRc9wZu0zox2WiEjUlLmEsGbHGu5Ku4vJyyfTsGpDJvWYxJWnXhloGlARkTKlzCSE3/f/zqNfP8qo+aOIj4tnRAuPU48AAAnnSURBVOcRDDp7EMeUOybaoYmIxITSlxDydSX1OA9v/fAW935+L5v+2MT1Z1zPo+c/Sp1jA07xLCJSZgXV7dTMepjZMjPzmFlKnvUNzWyvmS3x/YwOPtQiyNeV9Jv18zh73Nn0+qgX9Y6rx/ze83mj+xtKBiIiAQR7h7AUuBJ4OcC21c655kGev3h8XUl/PSaLe5O+4q3XziG5cjKvX/E6151xHXFWdh+7EBEpTFAJwTm3HIiZBtm91aow6pq6PFJvHdnxcH/7+7ivw/1UTqwc7dBERGJeONsQTjSzxcBuYKhz7quQnTlfO4Fzjg+Wf8BdaXex7uR1XHniX3nikmc46fiTQ1akiEhpV2hCMLPPgdoBNg1xzk0p4LBNQH3n3DYzawV8ZGZNnXO7A5y/D9AHoH79+oVHnNNO4JuE5vsJ/6H/jIF88b8vaFazGbNumEXnEzsXfh4RETlEoQnBOdeluCd1zu0H9vteLzSz1UBjID3AvmOAMQApKSmu0JP72gm2Jmbx7+O+YuwrKVSrUI2X/voSN7e8mYS40tdxSkQkEsLy6WlmScB251y2mZ0ENALWhOLcB6pX5YWe9RlWbw1/JsIdrfuR2vFBqh1TLRSnFxEps4JKCGbWHXgOSAI+MbMlzrmuwLnAQ2aWBWQDfZ1z24+6IF+bwae7FjJwxiB+abSGrvU68/Qlz3NqzdOC+SeIiIhPsL2MPgQ+DLD+A+CDYM7t5/Hwy1/bMKj6QqY1cjQ6vhFTr5lKt0bdYqZ3k4hIaRDTFe479+3koU/v5bmz0ql4EJ5Mi+OO92aSeEK9aIcmIlLqxGRCyPZkM27xOIbMGsK2Pdu4eXNtRkzcSs3m50By3WiHJyJSKsVcQvhi3Rf0n96f7zO/p0N977DULWqdCcM1cb2ISDjFVEJYs2MNnV7vRP3j6jPx7xPpcVqP3HYCTVwvIhJWMZUQdu7byUOdHmJwu8EallpEJMLMucKfBYuUM1qc4X5Y/EO0wxARKVHMbKFzLqXwPY8spob/TIxPjHYIIiJlVkwlBBERiR4lBBERAZQQRETERwlBREQAJQQREfFRQhAREUAJQUREfJQQREQEUEIQERGfoBKCmT1hZj+b2Q9m9qGZVc2z7T4zW2Vmv5hZ1+BDFRGRcAr2DiENON05dwawArgPwMxOA64GmgIXAS+aWXyQZYmISBgFlRCcczOcc1m+xW+AnNlrLgfedc7td86tBVYBrYMpS0REwiuUw1/fBEz0va6DN0HkyPCtO4yZ9QH6+Bb3m9nSEMYULjWA36IdRBEoztBSnKFTEmKEkhNnk1CcpNCEYGafA7UDbBrinJvi22cIkAW8nXNYgP0DjrPtnBsDjPGdJz0UQ7iGm+IMLcUZWiUhzpIQI5SsOENxnkITgnOuSyGB9AIuAc53uZMrZAD18uxWF9h4tEGKiEj4BdvL6CLgHuAy59yePJs+Bq42s/JmdiLQCPgumLJERCS8gm1DeB4oD6T55j7+xjnX1zm3zMzeA37CW5V0u3MuuwjnGxNkPJGiOENLcYZWSYizJMQIZSzOmJpCU0REokdPKouICKCEICIiPhFPCGbWw8yWmZnHzFLybSt0uAszO9HMvjWzlWY20cwSIxDzRDNb4vtZZ2ZLCthvnZn96NsvJN3Aihnng2b2a55YuxWw30W+a7zKzO6NQpwFDnmSb7+IX8/Cro2vo8RE3/ZvzaxhJOLKF0M9M5ttZst9/5f6B9ink5ntyvNeeCDScfriOOLf0Lye9V3PH8ysZRRibJLnOi0xs91mNiDfPlG5nmb2qpltyft8lpkdb2Zpvs/ANDOrVsCxvXz7rPT1Bi2ccy6iP8CpeB+imAOk5Fl/GvA93kbqE4HVQHyA498Drva9Hg3cFuH4nwIeKGDbOqBGpK9pnvIfBAYXsk+879qeBCT6rvlpEY7zQiDB9/px4PFYuJ5FuTbA/wGjfa+vBiZG4e+cDLT0va6Cd9iY/HF2AqZGOrbi/g2BbsCneJ9dagt8G+V444HNQINYuJ7AuUBLYGmedSOBe32v7w30/wc4Hljj+13N97paYeVF/A7BObfcOfdLgE2FDndh3q5M5wGTfKteB64IZ7wByr8KmBCpMsOgNbDKObfGOXcAeBfvtY8YV/CQJ9FWlGtzOd73HXjfh+f73hcR45zb5Jxb5Hv9O7CcAkYCKAEuB95wXt8AVc0sOYrxnA+sds79L4ox+DnnvgS251ud9z1Y0GdgVyDNObfdObcD77hzFxVWXiy1IdQBNuRZDjTcRXVgZ54PkwKHxAiTDkCmc25lAdsdMMPMFvqG5IiGfr5b71cLuJUsynWOpJvwfkMMJNLXsyjXxr+P7324C+/7Mip8VVYtgG8DbD7bzL43s0/NrGlEA8tV2N8w1t6PV1PwF75YuJ4AtZxzm8D75QCoGWCfo7quoRzLyM+KMNxFoMMCrMvfJ7bIQ2IUVxFjvoYj3x2c45zbaGY18T6b8bMvw4fMkeIEXgKG470mw/FWb92U/xQBjg153+OiXE87fMiT/MJ+PfOJ6nuwuMysMvABMMA5tzvf5kV4qz3+8LUlfYT3AdFIK+xvGEvXMxG4DN+ozfnEyvUsqqO6rmFJCK6Q4S4KUJThLn7De0uZ4Pt2FrIhMQqL2cwSgCuBVkc4x0bf7y1m9iHeKoiQfoAV9dqa2VhgaoBNERlWpAjXM9CQJ/nPEfbrmU9Rrk3OPhm+98RxHH5LH3ZmVg5vMnjbOTc5//a8CcI5N83MXjSzGs65iA7UVoS/YSwNc3MxsMg5l5l/Q6xcT59MM0t2zm3yVa9tCbBPBt52jxx18bbbHlEsVRkVOtyF74NjNvB336peQEF3HKHWBfjZOZcRaKOZVTKzKjmv8TacRnTk1nx1r90LKH8B0Mi8vbUS8d4ifxyJ+HJYwUOe5N0nGtezKNfmY7zvO/C+D2cVlNDCxddmMQ5Y7pwbVcA+tXPaNsysNd7/69siF2WR/4YfAzf4ehu1BXblVIdEQYE1ALFwPfPI+x4s6DPwM+BCM6vmqzq+0LfuyKLQat4db/baD2QCn+XZNgRvL49fgIvzrJ8GnOB7fRLeRLEKeB8oH6G4xwN98607AZiWJ67vfT/L8FaNRPravgn8CPzge9Mk54/Tt9wNb8+U1VGKcxXe+s0lvp/R+eOM1vUMdG2Ah/AmL4AKvvfdKt/78KQoXL/2eG//f8hzDbsBfXPeo0A/33X7Hm/DfbsoxBnwb5gvTgNe8F3vH8nT8zDCsVbE+wF/XJ51Ub+eeBPUJuCg73OzN942q5nASt/v4337pgCv5Dn2Jt/7dBVwY1HK09AVIiICxFaVkYiIRJESgoiIAEoIIiLio4QgIiKAEoKIiPgoIYiICKCEICIiPv8PsVW4qWEViusAAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "from mindspore import Tensor\n", - "\n", - "x_model_label = np.array([-10, 10, 0.1])\n", - "y_model_label = (x_model_label * Tensor(model_params[0]).asnumpy()[0][0] + \n", - " Tensor(model_params[1]).asnumpy()[0])\n", - "\n", - "plt.axis([-10, 10, -20, 25])\n", - "plt.scatter(x_eval_label, y_eval_label, color=\"red\", s=5)\n", - "plt.plot(x_model_label, y_model_label, color=\"blue\")\n", - "plt.plot(x_target_label, y_target_label, color=\"green\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "从上图中可以看出,蓝色线条的初始化模型函数与绿色线条的目标函数还是有较大的差别的。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义前向传播网络与反向传播网络并关联" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "接下来需要定义模型的损失函数,这里采用均方误差(MSE,Mean Squared Error)的方法用于判断拟合的效果如何,即均方误差值越小,拟合的效果越好,其损失函数公式为:\n", - "\n", - "$$J(w)=\\frac{1}{2m}\\sum_{i=1}^m(h(x_i)-y^{(i)})^2\\tag{2}$$\n", - "\n", - "假设训练数据第$i$个数据为$(x_i,y^{(i)})$,公式2中的参数解释如下:\n", - "\n", - "- $J(w)$为损失值。\n", - "\n", - "- $m$为样本数据的数量,本例中$m$的值为`batch_number`。\n", - "\n", - "- $h(x_i)$为第$i$个数据的$x_i$值代入模型网络(公式1)后的预测值。\n", - "\n", - "- $y^{(i)}$为第$i$个数据中的$y^{(i)}$值(label值)。\n", - "\n", - "### 定义前向传播网络\n", - "\n", - "前向传播网络包含两个部分,其中:\n", - "\n", - "1. 将参数带入到模型网络中得出预测值。\n", - "2. 使用预测值和训练数据计算出loss值。\n", - "\n", - "在MindSpore中使用如下方式实现。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.249228Z", - "start_time": "2021-01-04T07:04:53.243109Z" - } - }, - "outputs": [], - "source": [ - "net = LinearNet()\n", - "net_loss = nn.loss.MSELoss()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义反向传播网络\n", - "\n", - "反向传播网络的目标是不断变换权重值,使得loss值取得最小值,一般的在线性网络中采用权重更新公式:\n", - "\n", - "$$w_{t}=w_{t-1}-\\alpha\\frac{\\partial{J(w_{t-1})}}{\\partial{w}}\\tag{3}$$\n", - "\n", - "公式3参数解释:\n", - "\n", - "- $w_{t}$为迭代后的权重值。\n", - "- $w_{t-1}$为迭代前的权重值。\n", - "- $\\alpha$为学习率。\n", - "- $\\frac{\\partial{J(w_{t-1}\\ )}}{\\partial{w}}$为损失函数对权重$w_{t-1}$的微分。\n", - "\n", - "函数中所有的权重值更新完成后,将值传入到模型函数中,这个过程就是反向传播过程,实现此过程需要使用MindSpore中的优化器函数,如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.273562Z", - "start_time": "2021-01-04T07:04:53.250245Z" - } - }, - "outputs": [], - "source": [ - "opt = nn.Momentum(net.trainable_params(), learning_rate=0.005, momentum=0.9)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 关联前向和反向传播网络\n", - "\n", - "定义完成前向传播和反向传播后,在MindSpore中需要调用`Model`函数,将前面定义的网络,损失函数,优化器函数关联起来,使之变成完整的计算网络。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.287238Z", - "start_time": "2021-01-04T07:04:53.275579Z" - } - }, - "outputs": [], - "source": [ - "from mindspore import Model\n", - "\n", - "model = Model(net, net_loss, opt)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 拟合过程可视化准备\n", - "\n", - "### 定义绘图函数\n", - "\n", - "为了使得整个训练过程更容易理解,需要将训练过程的测试数据、目标函数和模型网络进行可视化,这里定义了可视化函数,将在每个step训练结束后调用,展示模型网络的拟合过程。" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.305631Z", - "start_time": "2021-01-04T07:04:53.288251Z" - } - }, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt\n", - "import time\n", - "\n", - "def plot_model_and_datasets(net, eval_data):\n", - " weight = net.trainable_params()[0]\n", - " bias = net.trainable_params()[1]\n", - " x = np.arange(-10, 10, 0.1)\n", - " y = x * Tensor(weight).asnumpy()[0][0] + Tensor(bias).asnumpy()[0]\n", - " x1, y1 = zip(*eval_data)\n", - " x_target = x\n", - " y_target = x_target * 2 + 3\n", - " \n", - " plt.axis([-11, 11, -20, 25])\n", - " plt.scatter(x1, y1, color=\"red\", s=5)\n", - " plt.plot(x, y, color=\"blue\")\n", - " plt.plot(x_target, y_target, color=\"green\")\n", - " plt.show()\n", - " time.sleep(0.2)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义回调函数\n", - "\n", - "MindSpore提供的工具,可对模型训练过程进行自定义控制,这里在`step_end`中调用可视化函数,展示拟合过程。更多的使用可参考[官网说明](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#callback)\n", - "\n", - "- `display.clear_output`:清除打印内容,实现动态拟合效果。" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.318392Z", - "start_time": "2021-01-04T07:04:53.306647Z" - } - }, - "outputs": [], - "source": [ - "from IPython import display\n", - "from mindspore.train.callback import Callback\n", - "\n", - "class ImageShowCallback(Callback):\n", - " def __init__(self, net, eval_data):\n", - " self.net = net\n", - " self.eval_data = eval_data\n", - " \n", - " def step_end(self, run_context):\n", - " plot_model_and_datasets(self.net, self.eval_data)\n", - " display.clear_output(wait=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 执行训练\n", - "\n", - "完成以上过程后,可以使用训练数`ds_train`对模型训练,这里调用`model.train`进行,其中参数解释:\n", - "\n", - "- `epoch`:训练迭代的整个数据集的次数。\n", - "- `ds_train`:训练数据集。\n", - "- `callbacks`:训练过程中需要调用的回调函数。\n", - "- `dataset_sink_model`:数据集下沉模式,支持Ascend、GPU计算平台,本例为CPU计算平台设置为False。" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:05:27.693120Z", - "start_time": "2021-01-04T07:04:53.319412Z" - } - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAD8CAYAAACSCdTiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3dd3xUVfrH8c9DqFIEpAhSBLvgSomouO5aUBFdQXct6M+GEMDeQUBAsKJYkBpEAUWKikhViqKrCBKQHpGqhhISek1I5vz+mGGNYQIJMzeTTL7v1yuvzNx7554nN5Mnd8499znmnENERKJTsUgHICIi3lGSFxGJYkryIiJRTEleRCSKKcmLiEQxJXkRkSgWcpI3s9pm9o2ZJZrZSjN7LLC8t5ltMrMlga9WoYcrIiJ5YaGOkzezGkAN59xiMysPLALaALcB+5xzb4QepoiInIjioe7AObcF2BJ4vNfMEoHTQt2viIiELuQz+b/szOx04DugIfAkcB+wB0gAnnLO7QzymjggDqBs2bJNzz333LDFIyJSFCxatCjVOVc12LqwJXkzKwd8C7zknJtoZtWBVMABffF36bQ71j5iY2NdQkJCWOIRESkqzGyRcy422LqwjK4xsxLAZ8AY59xEAOdcsnMu0znnA4YDzcLRloiI5F44RtcYMAJIdM69mWV5jSyb3QysCLUtERHJm5AvvAKXAXcDy81sSWBZN6CtmTXC312zEegYhrZERAonnw9SUqBKFUhNhWrVwMzzZsMxuuZ7IFik00Pdt4hIVMjIgMsvh4ULoVw52LcPLrsMvvkGinl7T6rueBUR8ZLPB//4B8yfD5mZsHu3//u8ebBtm+fNK8mLiHgpJcV/Bn9E+fL+7xkZcNtt/n8CHlKSFxHxUrVq0Lw5FC8Ol14KiYkQE+Nf9+OP/n8CHgrHhVcREcmJmb/vPSXFn/DB3x8/bx40b056xaqU9LB5ncmLiHitWDGoXt2f8ANJP2PjH9z/r86Ue6Ypv/6+y7umPduziIgENXtRElVfbs/I/W0pXSqGbfu2e9aWumtERPLJ/gOZ3PLaQGZmdIdKjjsrv8nIHo9QIsa7VKwkLyKSD0bOWEbn6R04VOUnah5uyeSOQ2h6xumet6vuGhERD21JPUjjp5/j/h+bkl5mHV0+u5CkBftpWq9OvrSvM3kREQ84B71Hz+GlpR3JPHkdDQ7czYzBk6i9f6l/OGVKiv9irMd0Ji8iklc+HyQn+zN5ECvWbafuY/fRZ2MLYmKMwRfPYcWro6jdtLE/wTdv/udwSo/pTF5EJC98Prjyyv+Nc89afyYz09FhwFhGbn0cV3EnVxbvxqQXelDhpDL+12YdL58PxclASV5EJG9SUvwJPiPD/z3Q7TJr4UZuG92ZXVW+pEJGMz6+dTY3xP7tr689Ml4+H6m7RkSKtuN0vRwla5mC5s3ZX74y1/buz7WTGrCrwvfcVXkA2/vNOzrBn0hbYaAkLyJF15Gul1q14Iorclcs7EiZgqQk3u/yFlWeu5RZ9jQ1069i8QOr+OiRRyh+pDZNqG2FQThmhqptZt+YWaKZrTSzxwLLK5vZLDNbE/heKfRwRUTCKFjXSy5s2XGIRv3f4IEFzUgvncRzZ04gqd9kGtevHfa2QhWOM/kM4Cnn3HnAJcBDZnY+0BWY45w7C5gTeC4iUnBk63rJzYiXXqNnUvuVhiwt+wYND7dj4zOJvHzXrdjxLqSeQFvhEI6ZobYAWwKP95pZInAa0Bq4IrDZKGAu0CXU9kREwiZ7hchjJOrl61K4YcCT/FH5I0ra2Qy6eC4dW/7Tk7bCKayja8zsdKAxsACoHvgHgHNui5nlz78tEZG8OM6Il8xMxwMDPmR08pO4k/dwVczzTOrbjfJlSoe9LS+ELcmbWTngM+Bx59ye4350+fN1cUAcQJ06+XObr4hIbsxMWMftozux65TZnJzenLH/ief62AaRDitPwjK6xsxK4E/wY5xzEwOLk82sRmB9DSDoZIbOuXjnXKxzLrZq1arhCEdEJCT796dxbfe+XDfpAnaVW8DdlQaz/Y3/FroED2E4kzf/KfsIINE592aWVZOBe4FXA9+/CLUtERGvjZj+Ew9NvY+06omctv4ypj4/lkZnHmPUTAEXjjP5y4C7gavMbEngqxX+5H6Nma0Brgk8FxEpkDan7qNRlydpv+BSDp+0nW7jmpA0+gcalS3chQHCMbrmeyCnDvirQ92/iIjXen44g5eXdiaz/G9csK890weOo1ba4j+n6yvEdMeriBRZy9YlU/uJtvRd34ri7iSGXfw9y/rFU+viJv7x7Jdfnu+jYcKtcH8OERE5AZmZjnbvfsCHyU/jyu3n6mIvMOnFLpQrU8q/QQTGs3tFSV5EopfPd1Sy/iphDbd/FMfuSnM5+dDljL0rnutjz/3r6yIwnt0r6q4RkeiUrSDYvv2HaNHnZVp+cQG7T/qZeyrGs73/3KMTfJTRmbyIRKfk5P8VBHtvw24e7hFLWsWV1DrwH6Y+OIALz6gR6QjzhZK8iEQfnw9uv53NxUpx/fWXsazZd8TsP43u9b/gxbtvinR0+UpJXkSiT0oKz6ek88pDFcis8B1/2xfH9K79OK1KhUhHlu+U5EWk8ApyYXXpui3cOOgxku5YQKnkMxm6pA3t5wwp9KNkTpQuvIpI4ZTtwmpmRgb3vBNPoxHnkVR2Mi3sRVKf/Zr2cz4rsgkedCYvIoVVlpmWZvySRNunrmR35e85+cAVjLtrGC0vOjvSERYISvIiUjhVq8a+S/9O6xgfX/99Ppaxk3sqjuD9HvcTE1N0z9yzU5IXkUIp/qt5PHrRNtIqrKL2rjuY+vDb/O2M6LiBKZyU5EWkUElK3U2r/s+xvPQQYqwOz9efRp+7W0U6rAJLSV5ECo3uH33Oa8seJrPMVi488DjTu/alZpVykQ6rQFOSF5ECb8m6Tdw4+BE2VficUukXMuzKSTxw/UWRDqtQUJIXkQIr0+fjvoHD+GhrVyiTTgteZdIrT1K2TIlIh1ZohGuO1/fNbJuZrciyrLeZbco2W5SISK5MX7iSyk9fzkc7H6TivmZ82XoFs3p18Sd4n89fm8a5SIdZ4IXrZqiRQMsgy99yzjUKfE0PU1siEsX2HjzE1S/25IYpjdlbcjX3nTyK1Ldmct1FZ/g3yHYTFD5fROMt6MLSXeOc+87MTg/HvkSk6Br21Xc8NiuOtPKrqb3n/5j2yJtccEbVv26U5SYo5s3zP4+S2u9e8LqswcNmtizQnVMp2AZmFmdmCWaWkJKS4nE4IlIQJaXu4oJucXSa/08yXBo9633J729/eHSCB3+dmubN/dPzNW/ufy45MhemPq3AmfxU51zDwPPqQCrggL5ADedcu2PtIzY21iUkJIQlHhEp+JxzdP/4U/ote5TM0tu48OCTTH+2NzWrlP3rhtkLkQUpTFaUmdki51xssHWenck755Kdc5nOOR8wHGjmVVsiUvj8vP4Paj/bmlfW3kbxQzV57+KFLOn3evAEn70P/sj0fErwx+XZEEozq+Gc2xJ4ejOw4ljbi0gUy3LmneHzcd+gwYxJ7gYlfVzj3uDzVx+jbJkc0pH64EMSliRvZmOBK4AqZpYE9AKuMLNG+LtrNgIdw9GWiBQyR87E581j+j9a0LbRTvZUWEDFPdcx/p4hXHtRvT+HRAbrfjnSBz9vnvrgT0C4Rte0DbJ4RDj2LSKFXEoKexfMp/U/m/JN89lYWiXurzCG4T3a+qtFZvknQPPm8M03/u6YI8z8y9QHf0J0x6uIeGrYzyt5tFMN0istoM6q65n20mganlnlzw1y0x1zpA9e8kwzQ4mIJ/5I3UHD7u3otOBqMosVp1fVSfw2btpfEzxoSKTHdCYvImHlnKPbx+Ppt/wxfKW202jnU0zv3ocaVU8K/gJ1x3hKSV5Ewmbx+t/419DObC47g9IHLmLwvKu4/8d34OeFR/e1Z6XuGM8oyYtIyDIyM7ln8ADGJveAEsa1vreZ+MStlD27roY+RpiSvIiEZGrCEu4a14E95ROouKsVE+4ZzDXN6vorRGroY8QpyYvICdlz8ACt33qBuWn9sWJVaFd+PPE9bv1zEm31tRcISvIikmeDv5rFE3M6kV52PXV2tGfa4/1oeEaQGoTqa484JXkRybXfU1O5/u0nWVXiQ2IOn03v07+hZ88rdJJegCnJi8hxOefoOnYMbyx/Al+JXTTe04Np3bpTo2rpSIcmx6EkLyLHlLBuPTfFd2bLSTMpve8SBrcczv03NIx0WJJLSvIiEtThzAzuHfI2Y7f2hJjiXJc5kM9e60TZk2IiHZrkgZK8iBxlyqJF3DWuA3vL/UzFHTcx4d5BXHNxrUiHJSdASV5E/lfvfXe5srR+pxffpr2NUY125T5lWPdbKF5cV1YLKyV5kaIuUOp30NbNPHnDAdJP3kyd1I5Me+JVGp5ZMdLRSYjCUoUyMFH3NjNbkWVZZTObZWZrAt+DTuQtIpG18ZdVnF9lDw/fuZbM9LK8cMp0Ng4cqgQfJcJVangk0DLbsq7AHOfcWcCcwHMR8dqRWZacO+Zmzjme+eh9zvjwnyQ2WEnjua354+cz6PlQS417jyJhSfLOue+AHdkWtwZGBR6PAtqEoy0ROYZgk14HsXDdWmo+14I31j1AyW2n88Gc21n88RBqfD9d5QeijJeThlQ/MpF34HvQ6kRmFmdmCWaWkJKS4mE4IkVAcvLRsyxlkZ5xmDsGvkqzkRew1RJoObUNqR+s5L6fxvlLECjBR52IzwzlnIt3zsU652KrVq0a6XBECi+fD26/3Z/gzY6q/PhFwk9U6RbL+O3PUSmlFbNar2LGSTsoG5OpKpFRzMvRNclmVsM5t8XMagDbPGxLRFJS4Mcf/Y+LFYPx48GMXQf20vqd5/kubQDmq0m7kz4nflAbYmJQlcgiwMsz+cnAvYHH9wJfeNiWiGSdK/Wyy6B6dd79airV+zTgu7QB1N32IMs6rmLEM4EED39WiVSCj1phOZM3s7HAFUAVM0sCegGvAhPM7AHgd+DWcLQlIjnIUr99g/lo1esOfomZQMyBBrxwzg883+tS5fIiKCxJ3jnXNodVV4dj/yISROAu1axdLc6Mp+dM4e0Vz+CLOUDjnX2Z1uNZalQrGeFgJVIifuFVRE5AkKGSC9atpma3K3lzTQdK7ryQD5otY/HbPZTgiziVNRApjFJS/jdUMu3HH7j7rZ58susNcGVomT6cT/u3o+xJxziHC/IpQKKTzuRFCqPARdbP69SgSqc6fLLvJSpta82sNonMeKn98RN8Lm6YkuigM3mRQmjnwb3c1LIh36f9F9tbiwfKTGHY4Bv/HDVzLFk+BfzvhinNwxq1lORFChOfj3cmjuGZhOc4XHozdbc+wtSnXqThWeVzv48jQy3nzdNNUEWAkrxIIbF+WxKtnr2Z1fUSiNnTgD5nfkqPXpfkvUs9y1BL9clHPyV5kQLO53w8PTaed1Z2wVcrnSaz/8O0Bd9y6h/1wDixi6hHboKSqKcLryIF2Px1idTs/k/eWtOZkttj+WBOWxbNn8Spl57nT+p5vYiayzLEEj2U5EUKoEOH07h1UG8uHXUhyZmruD7tA1L7z+a+H96DpCSYO9d/1h7sImpONKqmSFKSFylgPlv4PVWfb8SnqS9QacutzL45kekv30fZsnZ0rZms9WqOdxE1L/8QJGqoT16kgNi+fxet3+3KD2nDsPS6PFBpOkOHXE/xY/2V5uUiqkbVFElK8iIR5pzj7ZkT6TL3EQ6XTKbu1ieZ9lQfGpxdNnc7yO1FVI2qKZKU5EUiaO22JG4Y9BC/FptMzJ7G9GkyhR69m3qXfzWqpshRkheJgExfJk+NG8K7q7rhI4MmO15nWq/HObWa/iQlvPSOEvFKDuPX561dwc0fdGBbyfmUTrmGoTcM5d6b6kcwUIlmno+uMbONZrbczJaYWYLX7YkUCEGGKx48fIh/D+rBZaMbs+3wWlod+pCU12dw78VlNW5dPJNfZ/JXOudS86ktkcjLNlzxkzlTuP/rZ9lf+lcqb7qHCe37c/XFlf3/CI6MdvnmG3+fuUgY6R0l4oXAcMXtZYtz2R1NuW1eGw4czKB9qZkkDxvF1ZdW0bh1yRf5keQdMNPMFplZXPaVZhZnZglmlpCiN7lECQf079qJGo+cwrz6CdRNepZlnZczvOs1f457z8uNTCInyJzHfYFmVtM5t9nMqgGzgEecc98F2zY2NtYlJKjbXgq3Ndt+54bBD7LGphGT3JRejYfTo33j4MMiNUOThIGZLXLOxQZb53mfvHNuc+D7NjP7HGgGBE3yIoVZpi+TJ8cN5N1V3XEOmu5+iym9HqZG9WP8mWncunjM0yRvZmWBYs65vYHH1wJ9vGxTJBK+X7uUWz7oQErJhZROvp6hNw7h3tZ1Ix2WiOdn8tWBz83/MbQ48LFz7kuP2xTJNwcPH+Su4X34PPl1SD+FVhljGf/W7ZQrp64XKRg8TfLOufXAhV62IRIp4xfO4YHPO7K/1Doq/96OCR1e5+rmlSMdlshf6I5XkTxK2bedmwY9xfxDo7D9Z9Kh3NcMir+SEiUiHZnI0ZTkRXLJOcfrX31M9/8+TkbMLupu6sbUZ3rQ8NwykQ5NJEdK8iK58EvyBm4c2pl1fEXMjmb0aTycHi/8TaMepcBTkhc5hgxfBo+Pe4fBiT1xmcVoumsAU3o9SI1TYyIdmkiuKMmL5ODbNYv5z6gOpJZYTJnN/2LIjYO49+bakQ5LJE+U5EWy2Z++n/97rzeTkt+CQ1W5Ye+HjH3rTspXUKknKXyU5EWyGLtwJu0ndeJAyQ1U+q09E1YdpsXP98OS4aoSKYWSkrwIkLw3hdZDnmTBwY+wvefQofq3DHrxHErUq/XXKpEqQSCFjJK8FGnOOV778kOe//5JMmL2cPofPZnS5TkanlvaP5FH8+Z/1ntXlUgphJTkpchKTF7HjUM7sZ7ZxKQ2p0+TeHq80ODPYZFm/i4aVYmUQkxJXoqcw5mHeWz8mwz9pTcuowRNdwxiysP/pkbDapA9j6tKpBRyuook0cnng+Tko+ZO/ebXhdTofRFD1nSl1B/XM7LJShISx1OjyZ9zsYpEEyV5iT5BJtHel76PmwY/wVVjLmH7gRRa7Z3ItgETuffykpqCT6Kaumsk+mSbO3XMnPHEze3KgZK/U3lDZ8Z3eIUWl5/s37ZcNV1claimJC/RJzB36tYlP3DTLQ1YOO9ObPf5dDj1ewa+dxklS2bZVhdXJcp53l1jZi3NbLWZrTWzrl63J+KAl7vcTe1HKrCwViJ1N7zAsocWE98jW4I/4sjFVSV4iUJeT/8XAwwCrgGSgIVmNtk5t8rLdqXoWrHlV24a3pENbi4xyZfTp0k83V84VzeqSpHldXdNM2BtYIYozGwc0BpQkpewSs9M59HxrxP/S1/c4dI03RHP5D4PULOGsrsUbV4n+dOAP7I8TwIuzrqBmcUBcQB16tTxOByJRnNWz+f2jzqwvfgKyvx2K0Nav8O9t9SIdFgiBYLXST5YJ+dfBi475+KBeIDY2FgXZHuRoPam7eXOEd2Yum0Q7D+NVkxm7Lv/okKFSEcmUnB4neSTgKwFuGsBmz1uU4qA0Qsm02nKQxwsvonKax9mfMeXaHF5+UiHJVLgeJ3kFwJnmVk9YBNwB3Cnx21KFNu8Zws3DX2URQc/xXY2pEP1Txj4/iXBR82IiLdJ3jmXYWYPA18BMcD7zrmVXrYp0cnnfLw0PZ4+P3Ylww5Rd+NLTHnuGS44v0SkQxMp0Dy/Gco5Nx2Y7nU7Er2Wb/mFm4bHsdH9l5iky+mz/FK6L+hKseIaOSNyPLrjVQqstIw0Hhn/Gu+tfgmXXpamMzsz+eeJ1Cz+I2x/UtUhRXJBp0JSIM1a/QM1+zRm+NpelN5wCx80WUVC+ZXULL5dNWZE8kBn8lKg7D60mzvf78r0lKGwtw6t3DQ+HtCSk9NT4N9fQ2qqasyI5IHO5KXAGDn/c2q8eD7Tk+OpvPoJZrZZybQ3W3Jym0DZ4KuugqpVleBF8kBn8hJxSbs3cdOwh/n54CRsx4V0qD6Jdz+4iFKl8E/8kb3eu/riRXJNSV4ixud89J0xlL4/diXTHabuhteY3O0J/tYgy7DIaqr3LhIKJXmJiCWbV9LmvTh+c/OI2dSCPo0H0/2hChQ7NdtbUvXeRUKiPnkJjxzmVM3uUMYh2o/pSZNhjflt/2qa/j6Kjb2/5PlP21OsTg7zrKreu8gJU5KX0AWZUzWYr375jpp9GjFibV9Kr72d95smkjDiHmqVStU8qyIeUZKX0GWbUzV7kt55cCfXD+5Ay/H/ZOeedFpt/4rNgz7k/tuq+jc40u9evLj63UXCTH3yErocLo4653h//qc8PP0RDhVLpfLqZxjbqRfXXlH2r69Xv7uIZ5TkJXRBkvTvu/7gpvgHWXpwKqQ0oUP1Gbw7qrF/WGQwR/rdwd/do4QvEhbqrpHwCCTpTOej1/R3qd//fJbu/pq6v/RnyUMLiH/hGAk+q1z274tI7uhMXsLm583LaTOiA7/7FhDzx3W80HQIPfrWy9sk2sH693Xzk8gJU5KXkB08fJCHJvTlg19fh4OVaJoyhs/7tqV27RPoatHNTyJh5VmSN7PeQAfgyFCLboHa8hJFpiV+zf+N68iuYmsp8+t9DG7zBvfedsqJd6XrIqxIWHl9Jv+Wc+4Nj9uQCNh+YDt3jnyGmSkfwK4zuME3m4+GXE3FimHYedaLsCISEnXXSJ445xg+72Me/fIJ0ortoFJiV8Z37sk1V5aJdGgiEoTXSf5hM7sHSACecs7tzL6BmcUBcQB16tTxOBwJxYadG2kzvDPLDn4JybF0+OlRBszrRumTNEhLpKAyd5xaI8d8sdls4NQgq7oD84FUwAF9gRrOuXbH2l9sbKxLSEg44XjEGxm+DHrPeJdXF/QgM9OoOyeOL376igtjfoWkJHWtiESYmS1yzsUGWxfSmbxzrkUuAxgOTA2lLYmMhKQl3PxBe5J8i4jZeAMvNBlE95L3ERPzq0a/iBQCXo6uqeGc2xJ4ejOwwqu2JPwOHD7AgxNeYNSv/eFAFZpuG8/EF2+lTh2DR+do9ItIIeFln3w/M2uEv7tmI9DRw7YkXHw+pvz4KXfPfI7dxdZT5pf2DLq5H/fdXunPfK7RLyKFhmdJ3jl3t1f7Fm+k7ttG20evYHbdRNhxNq18X/PR0CupVCnSkYnIidIQSsE5x9B5H/HEl0+QVms3lb/tzNjvV3HtH+eDErxIoaYkX8St27GeNiM6seLALNhyKe0X/ot3V75E6cua6qKqSBRQki+iMnwZPD/9Lfr91AtfRnHqrh3IpO6daXQBkNJOF1VFooSSfFHj8/HT8tncMrkLm3xLiFnfmhdiB9K9by1iYgLb6KKqSNRQki9C9h/aS+e4S/mwXiLsr06TrZ8y8aVbqFtXZ+wi0UpJvoj4YuWX3DO+I3vO+J3SCfcwaM5+7l//d+xUJXiRaKYkH+W27d/GnaOeYE7Kx7DjXFp9/xofrniDypedB9V1YVUk2inJRynnHAN/GMnTM58inX1UWtGbjx/sSssBJSDl3mNfWNUcqyJRQ+UDCzufD5KTIUuhuTXb13LBGy14dE470jc1oH3GUjaN6UXLFqX+vFv1WAlec6yKRA0l+cIsW0I+fDiNLlNf4dwBF7ByRwJ1lw9l8SPfMvzl8yiT23LvweZYFZFCS0m+MMuSkOdtmEfdl5vSb1E3bM0N9K6ayLrxHWncKI+/4iNzrBYvriqTIlFAffKFWbVq7L28GZ1Kr+HjZqmwZxdNtk5i4iutqVv3BPepOVZFooqSfCH22YpptPvHH+whldLLOzPw5ldod1eF0POyqkyKRA0l+UJo676ttB39GHNTJkBKA1pl/sDo+Es55ZRIRyYiBY2SfCHicz4GfD+CLrOfJd13gErL+zLmwWe5/tqSkQ5NRAqokC68mtmtZrbSzHxmFptt3XNmttbMVpvZdaGFKb+krKbhG1fyxNdxpP9+IQ8cXkbSxz2U4EXkmEI9k18B3AIMy7rQzM4H7gAaADWB2WZ2tnMuM8T2ipz0zHS6TX+NtxJexJd2EnVXv8fE59vRpIkuiIrI8YU6kXcigB19pa81MM45lwZsMLO1QDPgx1DaK2q+/+1HbvuwA1syVxKz+nZ6xb5NjxdPpbg62UQkl7xKF6cB87M8TwosO4qZxQFxAHXq1PEonEIiUE5gT/mSxI15ivGbRsKeWjTZPJVPX7mBevUiHaCIFDbHTfJmNhs4Nciq7s65L3J6WZBlLsgynHPxQDxAbGxs0G2KhMDdqxNSF9C+ZWn2lt9D6YUdGXjba7TrH4ZhkSJSJB03yTvnWpzAfpOA2lme1wI2n8B+iozNG5fTtkYi312VBlvPodX4NozaOowq7/cGqxDp8ESkkPKqrMFk4A4zK2Vm9YCzgJ88aqtQ8zkfb343lNNH/YPvztxLpdnPMO2j05m29SWqXHaOygqISEhC6pM3s5uBd4GqwDQzW+Kcu845t9LMJgCrgAzgIY2sOdrKbav496g4Vh/4AX67igeqDOGdUSdTtm4VSE1VWQERCZk5V3C6wWNjY11CQkKkw/BcWkYaz01/hbcXvYw7VJ463z7LxIRPaXrZSf66McVUN05Ecs/MFjnnYoOt02C8fPbtxv9y+0dxJGf+QkziXfQ4rw89Es6neGYazCvuLwymujEiEiZK8vlk16FdxE3owicb4mHn6TTePINPX76W+uW2wTfN4McfVdpXRMJO/QIec84xbuln1H71fD5Z9x6lFz/F8MYrWDT2Wuo/cCXUru3vd//9d5g7V33wIhJWOpP3UNKeJO748CF+SJ0MWxvT8vAURr/XlKpV8U/Zl3UGpmLFlOBFJOx0Ju+BTF8m/b4dSP3+5/PDlllUWvg609r8xIwRgQQPmoFJRPKFzuTDbHnycv4zKo5fD86HDdfQrtpQBoyvT9my2TbUDEwikg+U5MPkUMYhukx7kXd/fg13oCJ1Ej/i0553ctFFx0jemoFJRDymJJ9XgSJiWc++v14/l7Yfx7Etc5yzTP8AAArKSURBVA3FVtzD8xf1p8fEKpQoEeFYRaTIU5LPi0ARMebNg+bN2TH9M+I+68pnG0bAjvo03jSLT15rwRlnRDpQERE/Jfm8SEmBefNwGRmM2TmfTv3OZz87KL24C2/f3JO4t09S17qIFChK8nlRrRq/X9mEtlXXMu/sHbCpLi0Pz2TUiEYaHCMiBZKSfC5l+jJ5/buBPH/pSjIyoOL8N/nwkUe5sVVMpEMTEcmRknwuLN26lP+M7sDagwth3fW0qzaEdz6pS7lykY5MROTYlOSP4eDhgzwz7QUGL3kDt/8U6qwayye9bqdZM3W8i0jhoCR/RLahkTPXzub/xnUiJXMdMcva0e2i13l+UmUNixSRQiXUSUNuBXoD5wHNnHMJgeWnA4nA6sCm851znUJpy1NZhkZu/8dFtL/7LCb9Nhq2n0WjpK/5pN+VnHlmpIMUEcm7UM/kVwC3AMOCrFvnnGsU4v7zR0oKbt4PjD4PHmy6lAPrF1J6UTfevqUHce+U0bBIESm0QkryzrlEACvkWXBDif207VCTBdX/gKQLaHk4npHv/00VB0Sk0POyCmU9M/vZzL41s8s9bCf3fD5/id/AlIcZvgxe+qY/Zw+4gAUVd1Lxh3f44qbvmTFSCV5EosNxz+TNbDZwapBV3Z1zX+Twsi1AHefcdjNrCkwyswbOuT1B9h8HxAHUqVMn95HnVbaSBIs/7s9tYzqy7uBi+PVf3F99EO98Vpvy5b0LQUQkvx03yTvnWuR1p865NCAt8HiRma0DzgaOmqXbORcPxIN/Iu+8tpVrgZIE+y2Dp8osZtjwi2FfNeqs/ITxvf7NJZcU7i4nEZFgPBlCaWZVgR3OuUwzqw+cBaz3oq1cq1aNGTecx92nb2R7pb0U+7kD3S56jZ6TK2lYpIhErVCHUN4MvAtUBaaZ2RLn3HXAP4A+ZpYBZAKdnHM7Qo42rwJj31PKGg98+iRTGi+H1HNo9PMwxvf7J2efne8RiYjkq1BH13wOfB5k+WfAZ6HsO2Q+H+7KK3h/73weua4kB0ukU2phT97+dzc6DiilYZEiUiRE7R2v69Yu5I7Tl5FQ/zD8fhHX7RvIyA8ac2qwS8giIlEq6pL84czDvDL3Tfr8tzeZNUtScepLjDy4h9bLG4HO3kWkiImqJL9w00JuH9OBDQeXwi83c3/1AbwzriTl61fVRNkiUiRFRZLfl76PJ6c+z3vLBuD2nkrt5ROZ0OdmLrkk0pGJiESWl3e85puP5yxj+NIB2OKOdK+4irVTleBFRCBKzuTb/r05U8au5Y3X63HOOZGORkSk4IiKJF++PEwZXS/SYYiIFDhR0V0jIiLBKcmLiEQxJXkRkSimJC8iEsWU5EVEopiSvIhIFFOSFxGJYkryIiJRTEleRCSKhZTkzex1M/vFzJaZ2edmVjHLuufMbK2ZrTaz60IPVURE8irUM/lZQEPn3N+AX4HnAMzsfOAOoAHQEhhsZjEhtiUiInkUUpJ3zs10zmUEns4HagUetwbGOefSnHMbgLVAs1DaEhGRvAtngbJ2wPjA49PwJ/0jkgLLjmJmcUBc4Ok+M1sdQgxVgNQQXu8VxZU3iitvFFfeRGNcdXNacdwkb2azgWAzo3Z3zn0R2KY7kAGMOfKyINu7YPt3zsUD8ceLIzfMLME5FxuOfYWT4sobxZU3iitvilpcx03yzrkWx1pvZvcCNwJXO+eOJPIkoHaWzWoBm080SBEROTGhjq5pCXQBbnLOHciyajJwh5mVMrN6wFnAT6G0JSIieRdqn/xAoBQwy/wTZc93znVyzq00swnAKvzdOA855zJDbCs3wtLt4wHFlTeKK28UV94Uqbjszx4WERGJNrrjVUQkiinJi4hEsUKV5M3sVjNbaWY+M4vNtu64ZRTMrJ6ZLTCzNWY23sxKehTneDNbEvjaaGZLcthuo5ktD2yX4EUs2drrbWabssTWKoftWgaO41oz65oPceVYHiPbdp4fr+P97IHBBOMD6xeY2elexBGk3dpm9o2ZJQb+Bh4Lss0VZrY7y++3Zz7Fdszfi/kNCByzZWbWJB9iOifLcVhiZnvM7PFs2+TL8TKz981sm5mtyLKsspnNCuSiWWZWKYfX3hvYZk1gJGPeOecKzRdwHnAOMBeIzbL8fGAp/ovA9YB1QEyQ108A7gg8Hgp0zoeY+wM9c1i3EaiSj8evN/D0cbaJCRy/+kDJwHE93+O4rgWKBx6/BrwWieOVm58deBAYGnh8BzA+n353NYAmgcfl8ZcRyR7bFcDU/Ho/5fb3ArQCZuC/f+YSYEE+xxcDbAXqRuJ4Af8AmgArsizrB3QNPO4a7D0PVAbWB75XCjyulNf2C9WZvHMu0TkX7I7Y45ZRMP/wn6uATwOLRgFtvIw30OZtwFgv2wmzZsBa59x651w6MA7/8fWMy7k8Rn7Lzc/eGv97B/zvpasDv2dPOee2OOcWBx7vBRLJ4S7yAqg1MNr5zQcqmlmNfGz/amCdc+63fGzzf5xz3wE7si3O+j7KKRddB8xyzu1wzu3EXyusZV7bL1RJ/hhOA/7I8jxYGYVTgF1ZkkmOpRbC6HIg2Tm3Jof1DphpZosC5R3yw8OBj8zv5/ARMTfH0kvt8J/1BeP18crNz/6/bQLvpd3431v5JtBF1BhYEGT1pWa21MxmmFmDfArpeL+XSL+n7iDnE61IHC+A6s65LeD/Bw5UC7JNWI5bOGvXhIXlooxCsJcFWZZ9bGiuSy3kRi7jbMuxz+Ivc85tNrNq+O81+CXwX/+EHSsuYAjQF//P3Rd/V1K77LsI8tqQx9nm5njZ0eUxsgv78coeZpBlnr6P8srMygGfAY875/ZkW70Yf5fEvsD1lkn4b0T02vF+LxE7ZoHrbjcRqJCbTaSOV26F5bgVuCTvjlNGIQe5KaOQiv9jYvHAGVhIpRaOF6eZFQduAZoeYx+bA9+3mdnn+LsLQkpauT1+ZjYcmBpklSclKXJxvIKVx8i+j7Afr2xy87Mf2SYp8Ds+maM/invCzErgT/BjnHMTs6/PmvSdc9PNbLCZVXHOeVqMKxe/l0iWObkeWOycS86+IlLHKyDZzGo457YEuq62BdkmCf91gyNq4b8emSfR0l1z3DIKgcTxDfCfwKJ7gZw+GYRDC+AX51xSsJVmVtbMyh95jP/i44pg24ZLtn7Qm3NobyFwlvlHIpXE/1F3ssdx5VQeI+s2+XG8cvOzT8b/3gH/e+nrnP4phVOg338EkOicezOHbU49cn3AzJrh//ve7nFcufm9TAbuCYyyuQTYfaSrIh/k+Gk6Escri6zvo5xy0VfAtWZWKdC1em1gWd54fWU5nF/4E1MSkAYkA19lWdcd/8iI1cD1WZZPB2oGHtfHn/zXAp8ApTyMdSTQKduymsD0LLEsDXytxN9t4fXx+xBYDiwLvMlqZI8r8LwV/tEb6/IprrX4+x6XBL6GZo8rv45XsJ8d6IP/HxBA6cB7Z23gvVTf6+MTaPfv+D+qL8tynFoBnY68z4CHA8dmKf4L2M3zIa6gv5dscRkwKHBMl5NlZJzHsZ2EP2mfnGVZvh8v/P9ktgCHA/nrAfzXceYAawLfKwe2jQXey/LadoH32lrg/hNpX2UNRESiWLR014iISBBK8iIiUUxJXkQkiinJi4hEMSV5EZEopiQvIhLFlORFRKLY/wNQ0IMDOgZ73wAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Parameter (name=fc.weight) [[2.0064354]]\n", - "Parameter (name=fc.bias) [2.9529438]\n" - ] - } - ], - "source": [ - "\n", - "from mindspore.train.callback import LossMonitor\n", - "\n", - "epoch = 1\n", - "imageshow_cb = ImageShowCallback(net, eval_data)\n", - "model.train(epoch, ds_train, callbacks=[imageshow_cb], dataset_sink_mode=False)\n", - "\n", - "plot_model_and_datasets(net, eval_data)\n", - "for net_param in net.trainable_params():\n", - " print(net_param, net_param.asnumpy())" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "ExecuteTime": { - "end_time": "2020-09-14T04:00:18.787349Z", - "start_time": "2020-09-14T04:00:18.784236Z" - } - }, - "source": [ - "训练完成后打印出最终模型的权重参数,其中weight接近于2.0,bias接近于3.0,模型训练完成,符合预期。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结\n", - "\n", - "本次体验我们了解了线性拟合的算法原理,并在MindSpore框架下实现了相应的算法定义,了解了线性拟合这类的线性回归模型在MindSpore中的训练过程,并最终拟合出了一条接近目标函数的模型函数。另外有兴趣的可以调整数据集的生成区间从(-10,10)扩展到(-100,100),看看权重值是否更接近目标函数;调整学习率大小,看看拟合的效率是否有变化;当然也可以探索如何使用MindSpore拟合$f(x)=ax^2+bx+c$这类的二次函数或者更高次的函数。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/mindspore_load_model_for_inference_and_transfer.ipynb b/tutorials/notebook/mindspore_load_model_for_inference_and_transfer.ipynb deleted file mode 100644 index f6ef0b3fc239c080f6b61b17acfdd8a69c6886a5..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_load_model_for_inference_and_transfer.ipynb +++ /dev/null @@ -1,639 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 加载模型用于推理或迁移学习\n", - "\n", - "## 概述\n", - "\n", - "\n", - "在模型训练过程中保存在本地的CheckPoint文件,或从MindSpore Hub下载的CheckPoint文件,都可以帮助用户进行推理或迁移学习使用,提高效率。\n", - "\n", - "以下通过示例来介绍如何通过本地加载加载模型,用于推理验证和迁移学习。\n", - "\n", - "> 本文档适用于CPU、GPU和Ascend环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 整体流程\n", - "\n", - "1. 准备环节。下载数据集,配置运行信息。\n", - "2. 数据处理。创建可用于网络训练的数据集,可视化数据集图像。\n", - "3. 预训练模型。生成CheckPoint文件。\n", - "4. 本地加载模型用于推理验证。\n", - "5. 本地加载模型用于迁移学习。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 准备环节\n", - "\n", - "### 下载数据集\n", - "\n", - "运行以下一段代码,将数据集下载至当前工作目录中`./datasets/MNIST_Data`目录下。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 配置运行环境\n", - "\n", - "运行以下一段代码,配置训练网络参数,配置运行平台为CPU,并指定模型文件目录和数据集目录。\n", - "\n", - "相关参数含义为:\n", - "\n", - "- `device_target`:硬件平台。\n", - "- `data_pat`:数据集目录。\n", - "- `ckpt_path`:训练后的模型文件存放目录。\n", - "- `epoch_size`:迭代训练次数。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "\"\"\"\n", - "network config setting, will be used in train.py\n", - "\"\"\"\n", - "\n", - "from easydict import EasyDict as edict\n", - "from mindspore import context\n", - "\n", - "\n", - "args = edict({\n", - " 'device_target': 'CPU',\n", - " 'data_path': './datasets/MNIST_Data',\n", - " 'ckpt_path': './models/ckpt/mindspore_load_model_for_inference_and_transfer/',\n", - " 'num_classes': 10,\n", - " 'lr': 0.01,\n", - " 'momentum': 0.9,\n", - " 'epoch_size': 1,\n", - " 'batch_size': 32,\n", - " 'buffer_size': 1000,\n", - " 'image_height': 32, \n", - " 'image_width': 32,\n", - " 'save_checkpoint_steps': 1875,\n", - " 'keep_checkpoint_max': 10,\n", - " 'air_name': \"lenet.air\",\n", - "})\n", - "\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "以下一段代码中定义预训练模型使用的损失函数`CrossEntropyLoss`。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore import Tensor\n", - "import mindspore.ops as ops\n", - "from mindspore import dtype as mstype\n", - "\n", - "\n", - "class CrossEntropyLoss(nn.Cell):\n", - " def __init__(self):\n", - " super(CrossEntropyLoss, self).__init__()\n", - " self.cross_entropy = ops.SoftmaxCrossEntropyWithLogits()\n", - " self.mean = ops.ReduceMean()\n", - " self.one_hot = ops.OneHot()\n", - " self.one = Tensor(1.0, mstype.float32)\n", - " self.zero = Tensor(0.0, mstype.float32)\n", - "\n", - " def construct(self, logits, label):\n", - " label = self.one_hot(label, ops.shape(logits)[1], self.one, self.zero)\n", - " loss_func = self.cross_entropy(logits, label)[0]\n", - " loss_func = self.mean(loss_func, (-1,))\n", - " return loss_func" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据预处理\n", - "\n", - "使用`create_dataset`函数来创建数据集,对数据集进行预处理操作。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore import dtype as mstype\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\"\n", - " create dataset for train or test\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # define map operations\n", - " resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Bilinear mode\n", - " rescale_nml_op = CV.Rescale(rescale_nml, shift_nml)\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # apply map operations on images\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=resize_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_nml_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=hwc2chw_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - "\n", - " # apply DatasetOps\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds\n", - "\n", - "\n", - "ds_train = create_dataset(os.path.join(args.data_path, \"train\"), args.batch_size)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 可视化数据集\n", - "\n", - "使用`matplotlib`可视化工具查看第一个batch中的32张图像。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The 32 images with label of the first batch in ds_train are showed below:\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAr4AAAHdCAYAAAD7D3ocAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAABYlAAAWJQFJUiTwAAEAAElEQVR4nOy9eXyc13nf+zuz7yswC2awCcTCVVxESaTsWLZIWXElqlaSNs3SJLaT3vbe3OQmaZOmSeqmSZpuVn2btkkcp27apLdNnNSWo5VSJVsSSUsmwU0EQJBYOAPMgtn39dw/BudwAAyAATgAZjnfz2c+IN9512fOe85znvMshFIKgUAgEAgEAoGg3ZHs9Q0IBAKBQCAQCAS7gVB8BQKBQCAQCAQdgVB8BQKBQCAQCAQdgVB8BQKBQCAQCAQdgVB8BQKBQCAQCAQdgVB8BQKBQCAQCAQdgVB8BQKBQCAQCAQdgVB8BQKBQCAQCAQdgVB8BQKBQCAQCAQdgVB8BQKBQCAQCAQdgVB8BQKBQCAQCAQdgVB8BQKBQCAQCAQdgVB8BQKBQCAQCAQdgawRJyGE0Eacp12hlJKtHiNkujHbkSkg5LoZoq02HtFWdwbRVhuPkGnjEe//zrBduQLC4isQCAQCgUAg6BCE4isQCAQCgUAg6AiE4isQCAQCgUAg6AiE4isQCAQCgUAg6AiE4isQCAQCgUAg6AiE4isQCAQCgUAg6Agaks5MIBBsDUIIpFIpdDodpFIpCKk/M0u5XEa5XEahUEA6nQalIuuNQNCsyGQySCQSKJVKKJXKDffN5XLI5XIol8soFou7dIcCQWchFF+BYJchhEAmk8FoNOLkyZMwGAyQyWR1K7/ZbBaZTAaLi4u4du2aGCAFgiZFIpHAaDRCo9Ggv78fAwMDG+4/OzuLubk5pNNphMNhMakVCHaAtlB8CSHcgvYgUEpBKUWpVGrQnTUfhBBIJBIus52AybFcLnd0xy2RSPjfallLpVLIZDJotVo4HA5YLBbI5fK6f490Oo1UKoVcLgelUrniuFKpxOUvEAj2HqVSCY1Gg+7ubvT29m64byKRQCAQQLFYBCFEvMcPAFtJq9WvsrFJ9JU7A5O5RCLh4yAArl/ttcxbXvGVy+WQy+UwGAxwuVzbVn7z+TyKxSJisRi8Xi/K5XKD77Q5MJvNsFgsUCgU0Gg0KxplIyiXy0in08jn8wiHwwiHww09f6vALLoKhQJWqxUajYZ/J5FIoFAo0NPTg3PnzsHpdEKhUNTVdimliMfjiMVi+Oijj5DNZpHP5wFUlF6v14t4PI5CoYBCobBjzycQCDZHIpHAYrGgu7sbp06dwtNPP73uBLdcLuO1115DPB5HIBBAMBhs23Fop5FKpejp6YHRaIRMJoNCoeDf5fN5xONx5PN5hEIhZDKZPbzT9oMQAo1GA4VCAYvFArPZDKDSvrPZLObm5pDNZvfUMNbyiq9EIoFcLodOp0NPTw9ksu09UjabRTabBQAsLi62bYej0WjQ1dUFlUoFk8nUcKtvuVxGLBZDJpPp6A5FIpFAo9FAo9HAbrfDYDDw71hH7Ha7cfDgQbhcLiiVyronbdFoFNFoFMViEb29vbzdsolbJpNp61ULgaBVYEqAyWRCf38/jhw5sqHie+vWLZhMJiQSiV2+0/aCuZh0d3dDpVJBpVLx7/L5PKRSKTKZDOLxeEePUzsBIQQKhQJqtRpWqxUul4tbeuPxOBYXF5HP5/fU2t6yiq9cLodUKkV3dzdsNhvGxsbw9NNPr2jg9UIpRSwWQzQaxbVr1zA9Pd1SfpNyuRwSiQRqtXrD5yeE4PDhwzh+/DhMJtMDWcjXo1QqwePxIBKJ4PLlyzxYI5lM7vnyRiNg/rkGg2HDpTStVov9+/fDbDbj+PHj6OnpWXEOqVQKo9EIq9XKf796YZOWsbEx/OAP/iBvq+l0Gn/xF3+BQqGAaDSKXC734A/cpNTb5gEgk8lwC4Owggv2ivX6P0opkskkstksEokEstlsS40/zUJ136zT6fDEE09gdHQUer0eBoMB+XweuVwO8Xgc4+PjCIVCiEajiMfje33rLQMhBDqdjq9Q1hq3pFIpXC4XTCYTjh07hmPHjqFcLiOfz8Pn8yGbzcLv96NQKKBUKu1J/9ySii8hhLs42Gw2DA0N4ZFHHsELL7wArVa7rXP6fD74/X4AwDe+8Y1G3u6OUi0Lo9EIk8m04b5HjhzBJz/5SdjtdoyOjm7bQr4exWIRExMTCAQCyGazmJ2dRTweRyqVannFlym5crkcZrMZKpVqjf8uw2w24/Dhw3A6nfj0pz+N4eHhmufbDsyCYTKZMDo6yrfH43Fcu3YNHo8H2WwWsVhsW+dvdrbS5oH7FvJ8Pi8UX8Gus9nqIaUUiUQC8XicK76inW6N1X1zV1cXTp8+jcceewxWqxVWqxWZTAbRaBSBQAD5fB4ejweTk5N7festBSEEer0eOp0Ocrm8pv4gl8sxPDwMm82GT37ykzh79ixKpRKy2Szu3r2LGzduQCaTIZfLcSPNbvfPLav4slnd2NgYHnnkEYyMjEChUNStTKwOwCoWiygWiy23RFwtiwMHDmB0dHRdGRBC8PDDD8Nut8NkMm05jVY9SCQS7kKxf/9+RKNRzM7OIhaLIZ/PN4Vj+3ZhFgWTyYTHH38cVqsVMpmsptVcq9ViZGQEJpMJOp0OwP00ZPVeq54gxOrvdipYsZlgv4HFYoFOp8Po6ChGRkbWfXZKKSYnJzE1NYVEIgG/39/SbVDQfhSLRUxOTmJ2dpYbDWKxWNu62+0Eq/tml8uFvr4+GAwGqFQqEEKQTCYxPT2NxcVFTE5OYnFxEclkcq9vvSVgq5RKpRL79+9HX18fNBpNTUMj8682GAxwOp3cKswmJY899hgeeughFAoFbii7desWkskkT+W307Sk4stM6U6nE08//TReeOEFyOXyFQ7sG8EUXWZaZ7ORVpxpV8vis5/9LF544YUNFSA2S1sdbbkT92MwGHDkyBG88847mJiYQDweRzabbbnJBUMqlUKlUqGnpwef+9znMDw8DI1GA7lcvmZf1hEzBbZUKvGAs3qULqlUypfzt5LqrJ1hMlUqlRgYGEBPTw+ee+45nDt3bkO/yZdeegkvvfQSPB4PtywUi0Wh/AqaglwuhzfeeAPvvPMOvF4vD64Wim/9rO6bR0dHYTKZuNILVFZ133rrLczPz+O1115DKBRqufF+r2B5qE0mE77/+78fH/vYx9Dd3Y3u7u6a+zOjGhsbJRIJVCoV3G43Pve5z3GDY7lcxte//nVQSuHxeLC0tCQU341gQlUqldBqtVtSDEqlEmKxGLLZLMLhMFKpFEKhEMLhMAKBQMt1OEwWarUaer1+z5UkplTrdDpYLBY4nU6MjIwgFAphdnYWmUymZVOdMeVLq9XCYDBArVbXVHwZlFKkUikeQRwKheq6DgtCVCgUMJlMDXdJaUWYX7RWq4Xb7UZ/fz+fYK0HpRR2ux2Dg4MghMDj8SCdTvPgQIFgr6GUIpvNIpVKCf/eB6C6b2ZL8dXGnXK5jFwux3192zkGolEww41arcbAwAC6u7vhdDphsVhgMpmg1+u3dD4W9M2glEKtVkMmk+3qGNeRo2kymcQHH3wAv9+P119/HXfu3FmRzoxFyQseDL1eD5VKhSeffBJ9fX2Ynp7Giy++iNnZWeRyuY7o4EulEubn5xEIBHD+/HmcP3++LoV/ZGQEZ86cgd1ux6lTp2A0GnfhbpsbnU6HkydPwuFw4Pnnn8eBAwdgtVo3PIYQguPHj2NgYAA3btyAWq2Gz+fDhQsX2tYHWiAQCBoBc28YGBjAL/zCL+Chhx7C0NAQzz3fqnSE4suWmNnyUTQahd/vh8fjwd27dzExMcF9fpkLRKew2u1jq8/O0snVWpJn/q9msxlDQ0MoFApQqVQNzySxmzB5JZNJni93ow6gUCggGAzC7/dzH756FF+FQoFQKAS1Wt2yriGNRiqVwmAwwGKxwOFwwOVy1dX5Mst8JBKB0+lEuVyGVqvlrk2d9L43EyqVivcZ1f3Gg/ZJzUC1Ww5La9jKioKgs2DvJHNv6O7u5krvaheSVqQjFN+bN29ifHwcsVgMCwsLSKVSmJ2dRSKRwPz8PDKZDFdGWrGTfRBSqRSmpqYQiURw9epVLCwsbOl4l8uFhx9+mKfXql7GqI60VavVK6qMteJLw3zBvV4v/viP/xgWi4X7S69HuVxGMBhEMpnE1NQU0ul0XddiS57CF/U+bNmN5UFWKpV1+amz36i3txef/vSn4fP5IJVK4fP5MD4+jsXFxV24e0E1KpUKZ8+exb59+3hwLusTkskkJicnEY1GW/b30Wg0GB0dhdVqxdNPP43h4WGMjY21ZL8n6Dx0Oh30ej3279+PZ555Bg6HY4XSuxPxQbtJ2yu+lFL4fD5cu3YNPp8PU1NTPF9iPp9HNBptCwf36vKLG0W4ryafz2NhYYEv/96+fbuu67FrjI2NwWg08hejFkxZ2Wq+2maDWaJisRguXbq0YTqz6mPS6TSKxeKmba36PMziVU8qJEYnTNpY7kipVLruygGTSfUkSyqVwmQy8Uwbc3Nz0Ol0dbd3wfap9X7I5XKMjIzg0Ucf5emm2H6RSARyuRw+n69lfx9WmdHpdOLAgQM4cOBAzbR7LMBH8GAwI8t640v1+CjkvTkKhQI6nQ59fX34+Mc/DovFAovFsiWldyNdZK9pWcV3KwpUNptFNBpFOByG3+/nDu6lUqnl/UzL5TIikQgA4MqVKzAajXA6nTh06BB3Fi8UCrhx4wb8fv+KCnVAxboyMTGBcDiM6elp+Hy+Ta9JCEFfXx/6+vowODiI3t7elvf5qQemUBUKBYTD4bqzLbDl2vV8x+VyOQ4ePAiHw8Fz9A4MDGD//v2wWq1QKpUr9me/4eLiIm7evLmigMXExASi0WjH+amzSUk8Hsf3vvc9ZDIZ7N+/HzabjctULpdDr9cjm83yXJQiaLDxEEJgsVigVqt5P7EatVqNkydPYt++fbzAAIOVoE8mky33+7AKbd3d3Th9+jQcDgf6+vq4pQxY21b9fj9u377dke/tg8LebbfbzX35VxcFmpubw/z8PG7duoXJyUkEg0ER2LYJLKuQRqNBd3c3TCYT5HL5huNdLpfD4uIistksLwm93vu/17RWr1KDehQPpviGQiFeMaRdKJfLCIfDyGQyuHLlCvL5PI4dO4axsbEViu/Vq1dx7do1niyakc1mEQwGkU6n4fF46qpiQwjBvn37sG/fPgwODsLtdneMEkEp5Ypvo5DL5Th69CgOHz4Ms9kMk8nEK+up1eo1afpYe56cnMRf/uVf8sGyUCh07ADK/EGXlpbw5ptvIhwOQyqV8qwYzJ9Up9Mhm81Cp9N1TJvdbSQSCS8icOrUKZw+fXpNPy2XyzE2NsZLylZP7lj2jng83nK/j8lkwuHDh+FyufD444/DbrfD7XavUOxXt9WZmZmOfW8fFFbIZ3h4GM8++yxsNhs3wkgkElBKMT8/j/fffx937tzBxMQEz7IjWB+2SqvVatHd3V1X9oZcLoe5uTlEo1F4vV5ednszxXezXPU7QWv1Kg2gHf0l2UscDAb5Eu74+Di3MGQyGUxNTfEqatXR7NVL8JtNCCQSCRwOBwwGA4aHh/HQQw/B5XJBr9dDrVavscIXi0UUCgVEIhHMzs5ienoa6XS6ZVOZPSjM3UOv18Nms/FBXaVSYXh4GAMDAzAajTwYS6/XQyqV8mT20WgUmUwGsVgM8Xgck5OTuHfvHh8sy+Uyd+HptIA4toSZz+cRCAS4FQ0A+vv7ua+5RqOBTCaD2WxGLpdDf38/ksnkmgmhYHPYBE2hUECtVvPBSyqVcivn0NAQent71xwrlUqh1+t56dPVNOsS6WYoFAoYjUZeUdBgMPCVMNYfRqNRzM/Pw+v1YmZmhisJnfjePihSqRQymQx6vR5Op5Nbe6vbTyaTQSgUQiQSQSqV4mOQoD7Wexej0SgikQgKhQIymQwSiQRu3bqFRCLBC1ZlMpldvtv66DjFt91gPqTpdBqlUokP+plMhlsK8/k83nvvPczPz/MXv/r4en2f5HI5Hn74YQwPD+ORRx7BiRMnYDAY4HA4alaBY77UU1NTeP311+H1ehEOh1s2UvtBUalU0Ov1GB4exic/+Ulu5VIoFDh9+jT6+/t5NRzmr5ZOp3H79m3EYjHup85+w5mZGW7lB+4rf50g29VtjT17Op3mkzwA+Oijj3Dq1CmeEYJZF9kk4+TJkzCbzRgfHxeK7xbp7+/H4cOHodfr0dPTwxVYiUQCu93OK2tWl9VmsPbdyj7/tVCr1TzjiMvlQldXF39G1h9OTk7i9ddfx8LCAi5duoRQKNQx6R0bjUwm48vxBw8e5MYC1j9QShGJRDAzMwOPx8PLFXei4aXRzM3N4fr164jH47wK3szMDLLZLFQqFRQKBaLRaFP6+grFtw1gLzGbYUWjUXg8nhWuDsxayOpj14JFvyuVyjV+pUBFcXO5XHC73Xz5Q61Wr1v6mFUrSyQS8Hq93L+6Uy2+zG9Kr9fD7XZzizxbjler1dwXlWWQSCaTCAQCCIfD8Hg8WFhYQDabRTqdFgPmKqpTEubzeYTDYUgkEkQiEaTTaahUKlBKIZFIoNVqUSqVYLFY0NXVVbP0pmAthBBoNBooFAp0d3fD5XLBaDTC5XJxBY+lMNRoNNDr9dziSSlFqVRCMpncsGx0KBRCIpFAMplsuUkcC6RkAZjVin2t/pAtu69n6SWEQKFQ8H5ZJpPxyW2hUEA6ne7IvpTBlslZ31rLNYa1u2KxKMqV10m5XEapVEI6ncbS0tIaFxxKKfx+P7xeL2KxGLxeL/ftLZfL0Ol0vJ9oRoTi20YwpTaTycDn862Y9abT6Q0trRKJBEajERqNBv39/RgYGFizj1qtxqc//WkMDw+jq6uLWzPWm83lcjnE43HMzs7i7bffRiwWQyqV6tjOR6lUwmAw4KGHHsLZs2eh0+kA3FcmqgMymNwCgQDeeust+Hw+XL58GYuLiysGPqH01obVgGfuOaOjo5BKpaCU8oh7q9WKUCgEg8GA2dlZEEI6sl1uBZlMhrGxMTidTpw+fRqPP/4490evdlmonkQDKzOifPDBBzwHdi0SiQSuXbvGFeB2YTv9oUKhgNPphE6nw8DAAEwmE7LZLDKZDBYXF3Ht2jXRBwgaDmurMzMzeOONN6BWq1d8TynFlStXMD4+jkgkgoWFBUilUnR3d/NsEP39/XA4HE1n7QWE4ttWsJktsxZuRHXqF2adYKUeu7u7a/rlqdVq2O122Gw26HS6NbM5plQzxSyXy3GrZTgcRjKZbNCTtiZMzmq1GlarlSu+APjgxwaxVCqFcDiMYDAIn88Hn8+HpaWluksedzrlchmZTAbFYhGpVGrFJIElZpdIJNDr9TAajbxsJrN0dCpskFrPDUGpVMJqtcLpdMLhcMBut8NgMKxY0gfur0KxCRqzuKVSKfh8vg1dnpLJJEKhEGKxWMsEIrP+lKXZq+W3zNpWJpOpuz9kpd+ZS1l3dzfS6TRSqRRyuRyfWHSqMUGwM7DViVgsBo/Hs2YFuFwuY2FhAX6/H/F4HJFIBCqVCjabja9gWq3WFXn9a12D9Qu7vbIjFN8ORa1Ww2Kx8EaqUCjQ19cHs9mMkydP4pFHHlkzU2PWs1rR8Myak8/nEQqF+FJ8KBTCwsJCyy1Z7ialUgkLCwt8oC8UCrh37x4+/PBD+P1+fPe73xXBV6toxCBPCIFarYbJZILb7cbo6ChftuvE9lrtxmCxWGA2m9cov2q1GmfPnsXY2BgGBgbgcrn4UjyDUsonG+FwGOFwmLufLC4u4pvf/CYWFxe529NqSqXSijzrrYDZbIbFYkFfXx9sNhvMZnNDMlIolUpuOWOrbSxA+ebNm0in04hGo5ibm0M2m+1YNzJBY2EFlFghmVqTYDYmKRQKOBwOWCwWnD59Gna7HR//+McxODgIs9lc8/xszGMZIGKx2K667QjFt0ORy+Xc0mW326HRaDAwMACLxYLR0VE8/PDDWzofCyzKZDIIBAKIxWKIRCKIRqM8K4GgdoQspRTRaBTBYBDZbJanhZmcnEQoFFqRGkawOfWmx2GWX5YSyW63AwB3J+lEWIYGq9UKl8u15nuNRoORkREcOnQIJpMJRqNxzT4s5R+Lpvd6vSgWi8jlcvB4PLhx4wYWFhaQy+Xaxrqu0WhgtVphMpm4f2MjAvdYBhKbzYaRkREcOXKEKxz5fJ6X7WYTCYGgEbCqoSzd6UYYjUbY7XZYLBYMDg7C6XTioYceQn9//7rHsCxFTFfIZDK7mmKu5RXfemYILOhFcB+z2YwjR47AarXi6NGjPP2OSqXasMGuRz6fh9/vRyQSwZUrV+D1evmS3Pz8fNsMcNtBLpfzrAJWq3VNTsRCoYDx8XHcuHGDD2os5RELSBRsDFtqlsvlsFgs6O7uRjAYRCaTWbfCm0QigdVqhVqtxokTJ6BWq3Hr1i3E43Ekk0kkEomOKhktkUi4pffUqVM4derUuvl3WRWyaDTKlyuZnIrFIrfiXL58GePj43xZMx6Pt11mF1bQ59SpUxgaGsLY2Bj0en3NAOFGwCZqIyMjOHfuHGZnZ3lwEXN3E9xHjP+VNsrcE1cHXW5EdcBwIpFAqVSCVqtdcR6Xy4Vjx47B4XDgxIkTvOBFrXOxwi3JZBLvv/8+JicncfPmTXg8HmQymV3rE1pW8W2XTnOvYInWe3t78cwzz6woGbodisUifD4f/H4/vve972FqaornrWQvTCfC8seybA5M8a3ueFiBkTfffJPLsNM76u3AZG21WnnOzkKhsG4nzyqMsVKcLHiIBVZls9mO8p1khSfcbjdOnTqFH/qhH6rZJ7BtbILGrLvVlQ1v376NQCCAt99+G2+88Qb/rl1l2dfXh9OnT6O3txejo6M7WsWSVSvTarVwu924desWXnrpJQQCAZ65QCCohhDCq1Wul/2iFixNZHXFV4PBAK1Wy8+zb98+Xqzl+PHj6yq91cWflpaW8P777+PSpUt7ErvSsoqvoHE0onKKTCZDT08PNBoNHn/8cQwMDPB8s7Ozs3xprpMUCaAiW4PBwHOaPvLIIxgeHl7T8bBKOeyzEdWBQ500AWTWLBYkVCqVeFYRJj+TyYTHHnsMLpcL3/nOdzA/P8+Lq2zUxlnGjf7+fnziE5+A1+vFW2+9xZf8OknOTE7r9QtM/h6PB1NTUzzlHpNRtc96u7uNsHbHAtq24mbD0nAB4L657N2u/r763AymkOxVcJCgeVndtoCKC9P+/fvR19fHc8XXA2tf4XAYb7/9NlKpFA4cOAC3283P09PTg4ceeojHCq2G9dvZbBbhcBgXLlzA4uIi5ubmEI/H92RFUyi+goag1Wpx5MgRlMtlPProo6CUIhAIIBgM4tvf/jYmJiYQj8e5Fa1TkEqlcLlccDqdePrpp/HCCy9ALpev6CBYrk7mb7pRJGx1wRHmg9VJlMtlHiiVy+W4NV0ikUClUsHtduNzn/sc9ysnhMBsNm+6vKfT6aDVaqHX6zEyMoLJyUle3lRUelpJPp/n7jksUO3mzZsr0mqxCW6rZGXYLgqFgr/P9S4hV2fOYNlFqvNxs4T/rG2zT3V6ymo/6lwu1/YWdUF9MKVXJpNBpVLxNqPX6/H93//9+NjHPobu7m50d3fXdb5CoYBsNoupqSnMz8/D7/fj2WefxWOPPcbPw6rnsawm1bC2ms1mEY1GcefOHXz1q1/lRZmYe85uIxRfQUNgLxsArtQxPz6n04mRkRGEQiHMzs5yX55O6aTZIKZUKnlVttXf2+12DA0NwWQyoaura8PzscwPrFBJpyhlLHOIWq3G4uIizGYzrFYrrFYr30cikUCj0aBcLqO3txf79u2D0+nkxVbWs8axAUOpVEKv10Or1fJOvBnzUO4VpVIJfr+ft71AIIClpSXE4/GOmtAClUmt3W7ngZGbtbHVx6rVagwMDEChUGBhYYHnNi4UClCpVLBarbDb7XA4HLDZbDyXKnMhC4fDuHPnDmZmZpDJZDpuNU2wFplMBrlczjPVsImYTqdDT08PLBYLTCbTmjiT9SgWi1AoFOjq6sK+fftgNpvhdDrrPs/qtjo9Pc37i/WyuuwGQvEV7Bh6vR4qlQpPPvkk+vr6MD09jRdffBGzs7Oi4lgVarUazz//PD75yU/ygW89KKUIhUIIh8O4dOkS/viP/3hFCep2JpVK4YMPPoBWq0Umk0F/fz/Onj2LM2fOrNm3WqYGg4FXEGu3Erm7TSaTwTe+8Q188MEHmJqawu3bt5HP5ztm8lWNSqXC888/j5MnT2JkZATDw8N1tTGpVAqVSoWuri78wi/8AqxWK/70T/8Uly9f5unf3G43nnrqKbjdbjzzzDOw2Wx8gscCBC9cuICvfOUrCIfDK9KZCToXg8EAs9mMxx9/HJ///Of56iFbeTQYDFvyP6+eoP38z/88isUiNyTUc55mbatC8e1Q2DJ5JpNBPB7fdjAGi6Rnvm7V1g7m+2Y2mzE0NIRyuYyuri4eFLPar61dYUn8mayZdZHlkZVIJLDZbLBYLJvKg1KKYDCIYDAIr9cLk8nELe2UUj6haEe5sqwAuVwOi4uLkMlkiMfjNWvBS6VSLlPmN81kztxE2O9S3QGz34mV1QXE8nE1lFL4fD7MzMwgEAjwssKdJCOmuBqNRvT29mJoaAgOhwN6vX5DH19WFIRlZeju7sZDDz0Em80Gp9OJrq4u7g9psVjgdrvhdrvhcDhgtVp5H838JSORCO7evcuXjDvJ4s7cSlQqFXQ6HS//Xk02m+VL9e1e5ZLJQ6fToaurCy6XCyMjIyuKJKlUqjVj9GYwX2Gm/FJKeensjc7DYjGSySSWlpawuLiIO3fuNI27o1B8OxS/34+33noLer0eV69erdlx1IPL5cLDDz8Mk8mEsbGxFf6pbBBgHf2+ffvwhS98AYuLi3jllVcwOTmJRCLR1jlqWaBPPB7H17/+ddy4cQNSqRQKhQJ2ux3nzp2DzWbjflLA5opWd3c3d5lQq9XcQpzJZPDGG29genq6LeXKFFapVMp9fDfqQBUKBfc9q+6kU6kUpqamEIlEcPXqVSwsLPDvmA/x0tISTxElrGj3qfYtz+fzba1MrEYikfD39vnnn4fb7cYTTzyBgYEB6HS6ddPmMTQaDRwOBx577DFeKXNoaAgajQbPPvssjh07hnQ6jXQ6je7ubgwPD0Ov13MrHbMkV/v4Mx/fTmqjUqkUPT09MBqNeOKJJ3D69GkMDAysSB+XzWbxxhtv4Pbt2/jggw/4WNOOclotj1OnTqG/vx8mk4lPllgA8HZXvNgkg/17o/NUF6d4//33ceHCBczNzfECV83wGwjFt0NJJpO4c+cO5HI57t27t2mnvR6jo6MwGAyw2+0YHBxcUdObKRtSqZQndH/ssccQCoVw8+ZNLC4uIp/Pt52CVg2lFPF4HJlMhldkY/6+Dz30ED7xiU/w4KvVkdvrodVqoVQq+eDJOpJEIsHTSLWrXJlFjKVt2qgTXa+DLhQKWFhYgM/nw4ULF3D79u0V52fpe+LxeEtGzK9uQ42yxjLZsEwXneZTyla1jEYjTp48iaGhIQwMDPAqbZu9u8zi1tfXB7VazXNOs9zIfX19PPMNqygol8v5qhCDKb6srGwnTT6ASvs2Go3o7u7G6OgoHnvsMRgMhhVjWLFYxO3bt3Hp0iVMT08jGAw2jdLVaCQSCS8iMTo6iscffxwGgwEqlWpN/1drdaweqmN4NqO6OMXk5CQuXrzILb3NIv+WVXyFr96DwfLrSqVSZDKZbctTKpVCr9fDZDLB7/fzf2s0GvT19aGvr4+/aCzHqlKpxOnTp9HV1YUbN27w0puRSKTtBlIWfV0qlRCNRpHNZrnFt1gs4uWXX4bNZuM5EZnMNBoNzGZzzU6KDcBsGYvJjM34u7u7216u22mvrLRrMBjE+++/D5/Ph+npafh8vhX7MWtaoVBoCbmxfMRqtZq3n+pk8ZcvX+ZK/GbPU2ugZOf53ve+B7/fj9u3b/O23GlU54q22WzQaDR8VWEz2HvLUklJpVJe3IZV0WQpCmUyGV9SFmPdSqqzFrDc6NUZDID7BodgMNg0y+uNRqlUwul0QqfT4YknnkB/fz8OHjyIrq4uKJXKFfIoFAq4ceMG/H4/bDYbb7vrjTHbgbmgJRIJvP/++5ibm8PNmzextLS0IvNIM9Cyii9jKzkTBfcpFotIJpMPfB5WYletVmNiYgIajQaDg4OwWCwAKondgfsDhsVigcFg4FWOFAoFotEoQqEQYrFY23VOALgrQi6XQywW42lf4vE4Xn31Vb5MKpFI8PGPfxxSqRRWqxVGo7GmJZ5ZMmUy2QrXkmKx2FFyBbaWcSESieD69evwer24ePEi/H4/PB4P4vH4Dt7hzsMKT3R1deHUqVM4ffo0SqUSLxHMov5Z3td6YHJlftBLS0t48803MTMz07GK7+rqgF1dXVzxrYf13lsAdUfZCyooFAqoVCpeDbPWKgcLrEokEm1Z/VKpVKK/vx82mw2nTp3C2NgYXC7Xiiw3DFYk6dq1azh06BAOHjy44RizHXK5HObm5hAIBHDhwgVMTEzA4/HsenGKemh5xVewtxQKBV5ru1gs8opClFJkMpmax1BKkUqleHWtTsvwwJaMc7kcfD4fdwORSCS4ffs2TCYTzGYzYrFYzU6JpazR6/VwOBzcIkQIgclk4n/VanXNhOKdRjQa5YFAt2/fht/vRzAYRCwWa4s8sywNm1qthtVqhdvtRrlcRj6fh1wux+HDh2GxWDA/P8+fufp9Y+W02SpNb28vb0esrebzeQQCASwuLiKZTPIl+U5lPWNKJpPhxXpYyevtwFwdFArFmmV8QWfD2obFYsHw8DDsdjt6enpgtVrXzQEvkUig1WphNpuh0+n4akIjKZfLSKfTSCQSiEQiCIVC6+oAe41QfAUPRDqdRjab5QOBQqGARCJBKpVCNBqt6VNUKpUQDAb57DCZTO5ZIuu9gPnnpdNpTE9PA7hvYUun0wgGg9Dr9bDZbDWXOVnpyeHhYXR1dXHllqWscTgcGB8fh9FoRDKZ7PjVjrm5OVy/fh3T09N4++23EY1GMTs72zaFKViQo9FoRH9/P44ePcpdbNjgs7CwgNdeew23b99GIpFYsdrDLEdWq5VXFuzt7QVwf5KWTqcxNTWFyclJJJPJtrSgNYJoNIpr164hFothampq26kGe3p6cPjwYRiNRoyMjGxY1EbQWZhMJhw5cgROpxOf+tSn4HA4MDY2tu54AVQU3+7ubmQyGXR3d/NMGI0cG4rFIkKhEAKBAObn5zEzM9O0/atQfNsUloaELaOv18DZUiZbGt2OFae6cRNCePDLRoosW3btlJRmtWD+k9UkEokVkwGZTMYnE2yWzj61fFBZBgPhG1iBUop0Os3rwUejUcTj8bYLdKn+3VkkN7PysFUBh8OBWCwGQgh/fpYlg1Wtq17CZ1RnEVid/q2TqA7ui0QiWFpa4kFqDGYVj0QiuHfvHlKp1Lav19/fD6lU2rHyFtRGoVDAaDTCbDbDYrHAbDZDrVZvuCpACIHBYEBXVxcMBgOUSuWKaoCrqV7pyWQyvI+oJwCbvSPNvCIkFN82hA2ALPrYYDCsG3Wcy+WwsLCAVCqF2dlZRKPR3b9hAWdxcRHhcJj7AiqVSvT09ECr1fIIcsHWWFxcxJUrV7gVIpfLtYWLw0awPsBkMuHUqVNIpVIoFosYGBjA9evXcePGDaTTaYTDYcjlcvT09MDtduPQoUM4dOjQitRQggpM+Y9Go7h48SLm5uZ4yjyGx+PB+Pg4otEobt26tW3F9/Dhw7DZbHC73cjn8416BEEbwMqq9/b2Yv/+/TyYbSNkMhnGxsYwNDTEA6lXp3msJpfLIZPJwO/346OPPoJGo9lUl2glhOLbhrAgDKVSCbvdzpOf12qsLLm3QqGA3+/nuQ530gLLAt1UKtWKl7CTYTNpVtecodFouEVeoVBArVZDrVZz2QH3XSfYb8Zm3MJSVIH5WyaTSe6L3m6wd5alemMBlGwCrFKp4HA4kEwmsbi4yIOpkskkL6Wt0+mg1+v5d8zqw6oJdnp7Wu2bXyqVeFYGxuLiInw+H2KxGJaWlrbt6hCNRpHJZDrKBUxQHyw4UqPRQKvV1uUGQwjZcD/2brN+JJvN8jgcr9cLvV6PVCrFs0WsZ/mtDuBkqxXN2H6F4tuGsMhjq9WKp556Cg899BDPIbmaeDyOmzdvIhQKQS6X4+7du7xs5k4hl8tx9OhRDA0N8ZyqMpkMwWCwIwfX6gTkzLWBodVqcejQIVgsFjz66KNwu93c1UGn03F/6vn5eR65Xy6XMTs7i1gshnQ63ZQdj6BxsEDSaDQKj8eDyclJGI1GuFwuPqGsfucMBgPUajVfjjcYDOjr64PD4eBJ6pmyGwqFcPfuXUxPTyOTyXRc7t5q2CQgFArh/PnzKxL6M9LpNHelEX7QglagWCzy4hKhUIi34VgshsnJSbz++us8XaLL5cLAwADPP13t5iOTyWA2m5HL5dDX14dkMrnjusR26TjFt9VN9PXAZmQajQYjIyMYGxtb44vGiMfjKJVKCAQCmJqa4hkadhKJRIKenh5QSuFyuWAymdqy2EK9sATk3d3dUKlUK6roGQwGDA0NwWq14uDBg+jv7+ffsaVXFnGfSCS433Q4HEYmk+n4ZdJO8B9npapZGVu/3w8AcDqdXCmrfueCwSD8fj9PM6TRaGCxWKDX63kfwZS8ZDIJn8+HYDDIc3G2uzw3ggX6saBUgaDVYW06nU7D7/cjFoshGo0iGo1ienoaN27cgNVqxezsLACgq6sLer1+jU+xRCKBRqOBTqeD2WyG1WoVWR12ino6YVYyN5PJwG63ryh3ms1m22bpky0zaLVadHV1rXBxWE/hV6lUfAbn9/vR1dWF9957D16v94Hvp57fptkGUpYaitUnb3TKl1qoVCp87GMfw8jICC9RylAqlXA4HNBoNDCZTADArbqsOMXi4iJefvllXrGtVCphfn4eHo+HW4HblfXaDqWUBwgmEom2es9XwyY62WwWly9fRjabxf79+2EwGLj7AvPLI4Tw3NrMSqxQKNa0sUwmwy0+3/jGNxAIBBAOh4XLg6CpaKaxo5lh/SEbH6rf4VQqhVu3biESieDy5ctYWFhANptFNptFIBDg7jrnz5+H3W6HRqPhKdWqfYt3UpdoNC2r+G6l82WKbzabhd1uRy6XQzwe5/ks22VArE6OvlrxXQ+FQoH+/n7k83mk02n09PTg3r17uHTp0rY7lVYeGCUSCbeOm83mFdbXnUKn0+H06dN49NFHYbVa1yQgX/37MR/eZDKJhYUFTE9P45VXXsHc3BxX8Np9QNisjVFKkUgkEI/HueLbrgFtlFK+pJjNZnmQ6pEjR2CxWKBSqVb45LHqbuxYYG0by2aziEajuH37Nr71rW919IqMoPlo5TFmL2D9Ya0c3JFIBNeuXYPP5+MpD9kxjEwmg/Pnz8NoNOL48ePo6+uDSqWC0Wjk++yELrFTtKziWy+EEDgcDhw9ehSxWAz9/f1IJpOYmZnhFo12qUDEglT6+vrw5JNPwuVycb/RjYLHWNBGdYqxToNZw5gfE8uf2d3dvePXZjNlVl+dBbmxz+rfI5VKIZ1O4+7du3j33Xfh8Xh4YYJ298FkbjwseGK9dl0sFjE5OYnZ2VlMTEwgEAggFou1fdtmk/rZ2Vm88847cDqdePLJJ2E2myGXy/mqUHXRk2pWpy7rhEmUQNBKsCDWUqnEffHZ+MU+bB9GLpfDrVu3MD8/z8cPRiqVwtTUFE/1uNH7Xk9fwILrWYCtXC7nx60OxN4r2l7xBYBDhw5hdHSUCz0UCuGNN96Ax+NBJBJBMBjc61tsCDqdDt3d3Xj88cfxcz/3c1yR2kzpZVbvaheQToNlmjAajThx4gTcbjfOnTuH/fv378q1WVokpoiwAgS1MjQsLS0hGAzi3XffxZe//GVei77dlTqWoouleWMuKbXI5XJ444038M4778Dr9cLr9fLJXTuTTCaRSqUQj8cxOTmJkZER9PX1YWhoiK9ksA+bRFTDZFQsFtvaSi4QtCpM4WU5djOZzIpsCjKZjL+/TMFMJBJ45ZVX8O677yIYDK7QeZgOwMacRsCUXqVSCY1Gw/td5l661zpGRyi+rDEwcrkc1Go1T83RLrCBTKFQQKfT1ZXmpFQqIRaLIZVKYWFhAR6PZ9NZXzuiUqlgtVpht9vR29vLa56z1E67TTabxdLSEvL5PKLRKHfHoZQiFAohHA7z32q7KZNaDalUCqPRCK1WC6fTCbfbDaPRWPMdrk7J087+vathFttcLsdTak1PT6NQKPDMLsydRqFQQKvVrphssYBIn8+HmZkZBAKBtp8sCAStBPO9JYRgcnISZrMZWq2WG24MBgPP8MLeXeYWx4r47KTrEls5NRgMcLvdGBsb40ZH5orFYk/2Ss/oCMVXsD7JZBIffPABfD4fvvGNb+DmzZtNmX5kp3G73XjqqafgdrvxzDPPwGazrfG13U08Hg/Onz8Pv9+Pixcvrigsks/neQqadnHTqQedToeTJ0/C4XDg+eefx4EDB/b0N2pmqgeZF198kbvQyGQynDlzBmfOnIHNZsPIyAi3+rKgyEAggJdffhmvvPJKx7UxgaDZWVhYwLe+9S0olUq89NJLUKvV3C3v+PHjOHbsGL773e/iq1/9KjeKlEoleL1eHtu0k0ilUvT19cHpdMLlcuG5555DPp9HPp/HzMwMXnzxRczOziKXy+2ZQaJlFV+WbiebzSKZTEKhUHR8tSHmo8v8/ABsWsqwXC4jk8kglUohmUwimUwCqCgZhUJh01yULGdwdaUxvV4PjUZTM31as6JQKNDd3Q2bzQabzYbu7u4Hun+2fMTa6VatZqFQCB6PBwsLC5icnEQkEllx7urzdwpSqRQGgwEWiwUOhwMul6ul2thuwiwsmUwGs7OzvA+QSCQYHh6G3++HRCKBzWbjMiwUCjzVmcfjwdzcnMjiIBA0GdlsFsFgEBKJBIFAAAqFAlKpFKlUCg6HA729vfB6vZiamuLjOTvuQX322UpaIpGA0WhEsVhc4zLFLL6syBJLa8Z8iJm+sJe0pOJbKpWwsLCAeDyO1157DaFQCCMjIzh79mxHK7+pVAp+vx+XLl3Cl7/8ZfT29uLcuXOw2WxQKBQ1G5tGo8GBAwfQ29sLk8mEpaUlxGIxxGIxTE1N4fz58xsqvw6HAw8//DBMJhPGxsag1+vR3d0NvV6PsbGxlnElYU74CoUCCoXigV9OFjAQiURw9epVLCwsbOn4YDDIOy4Wrc+ortDWSbCgCVbkQ6lU7nkH2uywSmPA/UC2y5cvI5FI8JgAJsNyuYxgMIhkMompqam2T4UnELQizJhCCOG+vnfv3kUgEEAoFML4+Djm5+cRjUa5dbc6gP1ByOVyeP311zE1NYXTp0/j9OnTa4rlAPeLuhSLRcTjcdy6dQuvvvoqFhcXsbi4iFwut6d9S0sqvpRSxONxZDIZTE1NoVQqQSKR4Mknn4RCoahb2apVeq+VfVtZYNr8/Dy+/e1vY3h4GJ/4xCdgNpvXzewgl8tht9thsVhgMBiQyWQQCoUQCoVAKcXbb7+9YREEg8GAkZER2O12nD59GmazmQfRsJyg61GduWCvqY5EZTXMH+S+8vk8FhYW4PP5cOHCBZ4ipl7YDLlQKCCdTu95MECzwDI5SKXSDVcyBBXYykA1Xq8XqVSKpz6s9vFNp9MoFosr/MoFgmakevxYr3zuRjnsW5lqpbFUKiEcDiORSCCRSODevXs8hWOjlctisYjbt2/zPL1DQ0MAKgawWnIuFArIZDJYWFjAxYsXEQqFkEgk9ny1smUVX5a6KRAIgFIKuVyOrq4u2O12HD9+HAaDYUWUfDVKpRIDAwPQ6/X49Kc/jf379/OEzYuLi7h582ZLdvrV+V0XFxdBKcXLL78Ml8uF48ePo6enZ01lsOq8tVKpFIVCAVqtFmazGaVSCdFodEMfP7fbjYcffhgWiwW9vb3cyV4ikaybA7dQKODGjRvw+Xy4cuUKvF4vwuHwnirALJfhwsICMpnMAwe1JZNJTExMIBwOY3p6Gj6fb0vHs6jcTk0vJ9g5WI5eiUSyJsiFuTYIv15Bs1Iul7nr15UrV2A0GuF0OnHo0CEexM5KdMtkMly7dg3Xr1/nBX+awdDSSKr1IfbusiqLO3GtRCKBfD6PGzduQKFQwGQyYXx8fE2xJ7ZvIpHAxMQEFhYWkEqlUCgU9vw3aEnFFwA34QcCAR7ZXigUMDg4iIGBAajVam4ZWo1SqUR/fz9sNhv0ej1isRgikQii0SjGx8cxOTnZsopvuVxGIpFAJpNBIpHAq6++yp+TVWaqVkgJIfz/Op0OAGAymZDL5aDX66HT6TaUhdFohNvthlqtht1uh0Kh2PQ+C4UCrl69iqtXr+LGjRtYWFhAOp3eUwUvGo3i+vXr0Gg0mJ2dres5NoL5YaXTaZ59QSBoBtgkXyBoRapLsl+5cgX5fB7Hjh3D2NjYGsWXle0Oh8MIhUKIxWJtuXrG9KHNYnIelGpl9ubNm4hGo1Cr1TAajTVXlKsrwC0uLjaNXtWyii+jXC4jn88jkUjA6/WCEIIbN24gEomgr68PRqMRcrl8xWyE1ZSWyWRcKVSr1Xz20g5+g8y3z+fzoVAoYHJyEhKJhKc7YchkMuj1eigUCl6pjKVE0+v1cLlcG3YUarWaH7+e3NhyVDabhcfjQTQaxdTUFGZnZ7G0tIRMJrPn+UILhQLPh1ssFh+4VDFbLmYJxgU7y+o2FgqFeNtqls5WIKiXTCbDV+2uX7++wm1saWkJoVAICwsLHbsaxNzvYrEY/H7/muI0rERvJBJBKpXa0wwC7Uo6nUYoFIJCoVgRRFcNK4TTbMWDWl7xZYqF3+/nuevUajWcTieeeeYZDA8Pc8slQyaTwWKxgFKK7u5uUErh8/ng9/tx7969lld8WUR3Op3G9PQ07t27B41Gg9u3b0Oj0UCr1fJ9NRoNRkZGYDKZcOTIETgcDp7gXqlUoqura8NlCeYbu5EvFVuKWVpawvnz5+HxePDtb38bMzMzSCaTfLl1L5c/0uk0stnsiuo3D8JGldcED06timOr29jt27c3ddURCJoRtgI1NzeHYDAItVrNv0ulUkilUrh7925HKnPMFz2TyfDCNAMDA2v8XoPBIObm5hAIBJBMJlcUdBA8OJFIBLFYbNOxH0BDAusaScsrvoxSqYR8Po9UKgWfz4dyuQyfzwej0VjX0h6rY91OLwcLbCGEIBQK8WCWalcHnU4HnU6HTCYDh8PxwJbOWrDfJhAIwOPxwOv1IhqN8mT5zSLvZnoxBVuDTTDy+Tz8fj8WFhaQTCa575tA0EqwwjWFQmFNqs5sNsv9VTu1z2JGBZbSNJFIYGlpiY/z2WwWfr+fG8RyuVxT+Ja2E6vLIrcSbaP4MiUvGo3iwoUL0Gq1kEqlmJ2d5RbfjWYlsVgM8XicZ4loJ4rFIiYmJnDnzh2etYCh0+kwOzsLs9kMv9+Pnp6ehl+fKb5+vx+vvfYafD4fT4zfqR23oLGwwM5oNIqLFy9icnIS4XB4z33HBYLtwMYiiUSCW7durRi7mPWsUCh0pMW3GpazfmZmBm+88Qa3jGcyGbz22mu4ffs2lpaWsLS0tKeVwgTNRdsovsB95ZcpVT6fj1s0q10dasGKN0Sj0bYbKNnSUC0ymQzMZjNyuRw8Hs+OdKSstnggEOC5BkVifMF2YJHLq32nc7kccrkcMpkMwuEwIpFIU9SEFwi2A3NVE2wMG1tisRg8Hg+3jLPxn7k5bJSSU9B5tJXiWw3LHHDnzh3IZLJNl/CLxSKKxSLPMdcp5HI5zM3NYWFhAQsLC+umIHsQmIKbzWYRDoeF0ivYFsyiq1QqMTExseI9ZenfZmZmkMlkUCqVhHVHIGhzWDDy5OQkT9EHgLs6JpPJjhrPBfXRtopvuVzecrWsTqRUKvGciIFAYI/vRiBYn3K5jHQ6jUQiAb/fvyLgp1gsIpfLIRgM8hyWQvEVCNobZrBi6SMFgnpoW8VXIBC0F9lsFrOzs/D7/UgmkzAajfy7crmMUqmEeDwuVhUEAoFAsC6kEVYRQogwrWwApXTLubGETDdmOzIFhFw3o5Xaaq1g1Wa08oq2ujO0UlttFYRMG494/3eG7coVEBZfgUDQojSjkisQCASC5qa1KzUIBAKBQCAQCAR1IhRfgUAgEAgEAkFH0BAfX4FAIBAIBAKBoNkRFl+BQCAQCAQCQUcgFF+BQCAQCAQCQUcgFF+BQCAQCAQCQUcgFF+BQCAQCAQCQUcgFF+BQCAQCAQCQUfQcMWXEPLFdq44Qgj5HULI64SQECGEEkJ+cpeu27ZyJYQ8Qgj5Q0LIBCEkTQiZJ4T8KSFkcBeu3c5y7SeEfIMQMkcIyRBClggh7xBCPrPD121bma6GEPIry/3Auzt8nbaW6bIMa32O7vB121quAEAI2U8I+fPl9z9DCJkkhPzcDl6vbWXKnm2dT3Y3rr2T19hLCCF9hJD/sjz+ZwghU4SQ3yKEaBt+rUanMyOEuAG4KaUXG3riJoEQkgAwDuAugL8L4KcopV/bheu2rVwJIf8GwCkAfwrgJgAXgF8HYANwlFJ6bwev3c5yPQjgFwC8DcADwADgpwH8DQA/QCn9yx26btvKtBpCyEMArgFIAbhNKf3YDl6rrWW6PKB/DcAfrPrqGqU0vYPXbXe5PgLgLVT6gD8GEAMwDEBHKf3SDl2zbWXKnm3VZi2AVwH8FaX0b+30tdtUrloAVwDIAXwRwDyAkwD+GYBvUkr/dkOvJ/L4bg1CiIRSWiaE7ANwG7uk+LYzhJBuSmlw1bZ+ADMAfotS+ht7c2ftByFEhopcxymlz+31/bQyhJDXAMwCGAUg20nFt91ZVnx/m1L6a3t9L+0CIUQC4AaASUrpZ/f6ftoVQsiPA/gTAM9SSv96r++nFSGEPA3gNQCfppS+XrX9dwH8EgBDIyfAu+LqsLwM8FuEkF9cXnZNE0L+mhBiW/78T0JIjBByjxDyy6uO7SaE/MGy2Tu9vM+fEUJcNa79d5aXy7OEkOuEkHOEkLcJIW/XOOfvE0K8hJDc8jE/U8/zUUrL2xDLA9POcl2t9C5vmwMQRMX6u2O0s1xrQSktomL1KW7n+HroBJkSQn4EwHEA/3hLwtkmnSDTvaDN5fokgP0AdsSyux5tLtNa/AQAPyqK247R5nJVLP+Nr9oeRUVPJXWco34opQ39oGKmpqu2UQBzAP4alWXWzy0/4KsA3gPwawDOoLLMRQF8purYUQBfBvADAL4PwA8D+AAVS4uqar+zAMoA/heAz6DSGO8CWADwdtV+BgCTqJjSf3r5uv8aQAnAz27hOfct3+tPNlqGnSzXqvPtX77nXxJyfTC5otJxyAA4APwGgDyAp4RMtydTAGZUBrqfWv7/2wDeFe30gWRKAYQA5ACkUVme//hOyrTd5YrKu06Xr3URQAFAAMD/C0AtZPrgYxWA3uXj/q1oqw/UVlUApgC8A+AAAB2ATwFYBPAfGy7LXfxxplBZDmTbvrS8/deqtsmWX8z/vMH5pcuNjQL4bNX291FZ1iFV204s71f94/w6gCyA4VXn/QqApep73OQ5m0XxbSu5Vt3vO8v3bBZyfTC5Avg3y+elABIAXhAy3b5MAfwRgO+w62BvFd92kel/BfC3AXwcwI8BuIqKovakkOv25Arg95fPFwbwm6hYgH8JlYnFXwmZNmSs+sfL5z+yk+20E+SKSkzPd3B/rKLLx0oaLcvdTGf2Bq0sszImlv/y5YHl76dRET6HEPL3CSFXCSFJVJZo55e/Gl3+XgrgEQBfp8sSXD7f91DxZ6zmGQCXAMwQQmTss3wfVlRmG61EO8r19wCcBvBjlNLIFo5rJO0k13+HSqDAcwBeAfBnhJBn6ziu0bS8TAkhH0clqPXvV19nD2l5mS6f88cppf+DUvodSul/A/AxVCxKv7WpBHaGdpArG9//G6X0Nyilb1NK/w0qAUN/kxCyf2MRNJx2kOlq/i6AK5TSa1s4ptG0vFwJISoA/wMV5ffHAXwCwD9EZTL8HzaVwBaRNfqEG7BagclvsF3F/kMI+VlUlma+hIogIqi80Ber9utCJRowUOO6/lX/t6FirS2sc5/WdZ+gOWkruZKKM/vPAPgJWuXkvge0jVwppR5UsjoAwLeW/bL+DYBvbXZsg2kHmf4BgK8C8BBCTMvbZACky//PUEpzGxzfaNpBpmuglCYIIX8N4PNbOa6BtINcQ8t/31i1/XUAvwvgGIBbGxzfaNpBphxCyKMAxgD8fD377yDtINfPo7IisY9Semd527cJITEAf0gI+X1K6dUNjt8Su6n4bpcfBvAmpfQX2QayNr/rEirCttU43o77sxig0hkEAPzcOteb3P6tthRNJ1dCyD8B8Muo+AP91832b1KaTq41+BB731lvhWaS6f7lz/9R47sIgP8HFQt7s9NMMt2IZrCqb4VmkuvNTe51TwK1t0EzybSan1i+5p/VuX+z0UxyPQwgUqX0Mr67/Hc/Ku5PDaEVFF8N1kb6/VT1fyilJULIhwB+gBDyRWaSJ4ScADCIlT/OqwB+FsA8pbTWLKZTaCq5EkL+b1SWNf8JpfT3tnp8E9FUcl0NqaQ4+hiA1R1MM9NMMv1kjW3/DhX/uJ9FZTmxFWgmma6BEGIA8CzuD3ytQjPJ9RVUggU/DeClqu3PLP/9cIvn2yuaSaZYPq8CFcXxFVojK1GL0Exy9QEwE0L2UUqr+9DHlv96t3i+DWkFxfdVAL9MCPlVVDrBTwH4wRr7/VNUlnD+ihDyh6iY6L+IikCrZ7YvouI38h1CyIuozEK0qCxZfJxS+vxGN0MI+QSAblQi5AHgkWX/GFBK/2I7D7hHNI1cCSE/jIry8CqAtwghj1d9HaeUfrSdB9wjmkmuXwRgQSW614dKm/08gEcB/Mi2n3D3aRqZUkrfXr2NEBJFJXBjzXdNTNPIlBDyS6j4FP5vVPx6+1EJwnIA+NHtP+Ke0DRypZSGCCH/AsCvE0LiqGTKeASVbA//ZZWC0cw0jUyreBaVvvW/bOeBmoRmkuvXUCm29DIh5LdRUagfQSVg7nuojGENoxUU398EYEJlCVGFSrT/p1FJp8GhlL5BCPlRVH6kv0LF8vKLqLzksar9YoSQ08vbfxmVPLFRVH6kr9dxP/8MFcdrxv+5/AEanWtuZ2kmuT6DiuyewX1rBOMdVHx/WoVmkutlVFwafhiAEZWO6ioqnVBDO5Idpplk2i40k0wnAXx2+WNExQr1HoDPU0pbzeLbTHJl95MA8A9QmUwsopJi6p9v8/n2gmaTKVBxcwhj9+MkGknTyJVSOrts8PoiKiu/XQDuAfhDVArbNNQtp60rt5FKib9pVATXSi96UyPkujMIuTYeIdPGI2S6Mwi5Nh4h052h1eXaNoovIUSNSnTieVQcsh8C8I9QccA+SCld3MPba1mEXHcGIdfGI2TaeIRMdwYh18YjZLoztKNcW8HVoV5KqPiE/R4qaTNSqCRD/qFW/GGaCCHXnUHItfEImTYeIdOdQci18QiZ7gxtJ9e2sfgKBAKBQCAQCAQbsZuV2wQCgUAgEAgEgj1jRxVfQsgXCSFbNikTQgYIIZQQ8oUG3gtdTu+0nWN/ghDydULI3PJ5vtao+9rm/bSLXP8zIeQWISROCEmSSunEnyWVMom7ipDpztBGctUQQv4ZIWSKEJIhhNwjhPwJIWSgUfe3hXtpF5m+vXz86s/PN+r+tng/bSHXVec5TQgpL59v110b20WmQgfY8JzbkishxEkI+ReEkA8JIVFCSJAQ8iYh5PsadW/r0U4+vjvJj6GSu/cNAD+0x/fSTqgB/HtUiilQVFKpfBmVsofrVX8RbIyQ6c7wRwD+JiopfT4E0IdKasM3CSEPU0qTe3hvrcw1AH9v1bbZPbiPtoMQIkelxLYf9/POC7aH0AEazwlU8v7+Z1TKJCtQSbv3NiHkHKV0x1LFCcW3Pj7N8sgRQlbnmRVsE0rpD6/a9DohpAfA5yCUtG0hZNp4CCEaAH8LwL+ilP7rqu1+VKpjPQHgtT26vVYnQSm9uNc30ab8Q1Tyo/8xgF/d43tpdYQO0HjeBTBCKS2yDYSQ11Aptf2PsIM5knfdx5cQ8n8RQi4QQsLL5u2LhJC/sc7uCkLIlwghAUJImhDyrVpLi4SQn1le0s0SQpYIIV8lhFgadc+NTp68E7SiXNchBKC46V67gJDpztCCcpUuf1aX94wu/93zWIkWlGlL0KpyJYQMAfg1VCxohUae+0FpRZkKHaDxcqWURquV3uVtRQDjqBS/2DH2osMeQGXZ8IdQMXN/COBbpPYs6h8DGEalfvT/iYpp/HVSWcIBABBCfhfAf0Alx9w5VGa5zwB4hWzg10ju+7p8sQHP1AwMoAXlSirICCEmQsgPoFIR50v1HLsLDEDIdCcYQAvJlVKaAPBfAfzfhJBPEkJ0hJCDqFTAugrgzXoeeocZQAvJtIpjhJAYIaRACLlGCPl8ncftFgNoTbn+PoA/p5R+u879d5MBtKZMm50BtLhcCSEKAKcA3NrqsVuCUrpjH1TKz9ENvpeg4m7xOoBvVG0fQMU/8SMAkqrtTyxv/3zVfiUAv7HqvGy/v1m1jQL4YtX/+1Gxgv3GFp/JA+BrOym3TpIrKjXP6fKnDOB3hEzbQ6btJFdULL7/oUquFBW/tG4h023L9DcB/DQqJeCfR6WsKQXwa6KtPpBcfwyVcrq26ucCIBMyFTpAs8p1+djfQWXM+vhOym8vXB1OLJvV/cvCKQA4C2C0xu5/QauWGCil76HS6E4tbzqLyg/8p8sWLhmpRK5eQqU++brRgZTSOUqpjFL6mw15sD2mheX6HQAnAZwB8LsAfokQ8tt1HrujCJnuDC0q199CRaH4JVQUtR9HJZn7K4QQbR3H7yitKFNK6W9QSr9CKX2HUvoNSukPAPhfAP4JIURXz3PvNK0m1+Vl6C8B+FVKaaDuB91FWk2mrUKry5UQ8iMAfgXAP6eUfmcrx26VXQ1uI4T0orIs+BGAnwUwj8oP9M8B7K9xiH+dbcz/w7b8d3qdS1q3fbMtRCvLlVIaQ2VJBqhEyOcB/Doh5D9SSr2Nus5WETLdGVpRrstuDb8C4AuU0q9Wbb8EYArAF1DJnLEntKJMN+C/o5I94zCACzt4nU1pUbn+FoBFAP+TEGJa3qZa/mskhGQppakGXGdbtKhMm55Wlysh5DkAXwPwVUrpP23kuWux21kdngFgBPC3KKUetpFUoqZrYV9n2/jyv0PLf58GEKmxb6jGtnakneT6ISozzUEAe6mkCZnuDK0o18PLfz+o3kgpvU0IiaL2wLKbtKJMN4PuwjU2oxXlegDAkXXOtQTgG6hMLPaKVpRpK9CyciWEPAXgzwH8FdamNtwRdlvxZT8CjzIlhIyg4jfiqbH/DxJCvkjvpxF5AoAb9y0Bb6DiD9JHKX1jx+66+WknuX4ClUHv7i5fdzVCpjtDK8rVt/z3UVTyzlbftwl7P5loRZmux48CyAC4vsvXrUUryvXnUWmT1fwkKgGuZ1Db0rebtKJMW4GWlCsh5BQqk7E3AfwY3aXsGbut+J5Hxfz+J4SQfwvAiUoS+HnUzjChB/C/CCF/gEry6H8B4DaAPwEASukdQsi/BPB7hJBRAO8AyALoRcVH5Y8opf+71o0QQvpRSfL/m3X4TR1AZSYNVAoE9BNCfnD5/+9QSoP1PPwO0nJyJZU0Kz8F4KXl+9QD+H4APwPgDyilC1uSQOMRMt0ZWk6uqPhMXwXwbwkhZtwvYPFrAGIA/kv9j78jtJxMCSEfR8V95C9RKVhhREU5OwfgV/ZyOb6KlpMrpXS8xrFPLv/zHboqfdQe0HIyXd5X6ACN7wPGAPw1KisR/xrACUII/57uZH7vB42O2+iDGpGHqCSCn0BFiDcB/DAqvh2zVfsMoGKh+geoOOoHAaSXhTRY4zo/jkqEdQpAEpVUGL8HwF21z+rIw4HV2zZ7jnU+T+6kDNtVrgDGUBn07gHIoWKJeBcVi49kK/IQMm1OmbaLXJf3tQL4t6gMDpllGf8PAKNCpttqq/tQKf7hXW6rSQDvA/g7e9FO20WuGz0XmiSrQyvKFEIH2Ik+4Cc3kCmtVxbb+ZDlGxAIBAKBQCAQCNqaPa84JBAIBAKBQCAQ7AZC8RUIBAKBQCAQdARC8RUIBAKBQCAQdARC8RUIBAKBQCAQdARC8RUIBAKBQCAQdARC8RUIBAKBQCAQdAQNKWBBCBE50TaAUko232slQqYbsx2ZAkKumyHaauMRbXVnEG218QiZNh7x/u8M25UrICy+AoFAIBAIBIIOQSi+AoFAIBAIBIKOQCi+AoFAIBAIBIKOQCi+AoFAIBAIBIKOoCHBbc0AIQSEEKhUKrjdbigUCkilUhBSn/9zoVBAsVhEIpGAz+dDuVze4TsWCAQCgUAgaB8IIVCr1ZDL5TCbzTCZTGv2KRaL8Pv9SCaTXPfaTdpK8ZXL5bBarXjqqafQ3d0NuVwOqVRa1/GJRALJZBK3b9/G0tIS8vn8Dt+xQCAQCAQCQfsgkUhgsVhgNBpx+PBhHD58eI0BMpvN4q233sKdO3e47rWbtLziSwiBRCKBWq2G0WiE3W6H2+2GzWaDQqGARFKfN0cikeA/QFdXF1KpFFKpFEqlEigVWUUEAoFAIGhWCCGQSqXQ6XQbrvaWy2UUCgWUSiXkcjmUSqVdvtP2hBAChUIBhUIBq9UKi8UCl8uFvr6+NftmMhno9fotGScbScsrvkqlEmq1Gi6XCydOnEBvby+eeeYZrvjWK9R4PI5EIoH+/n6USiUEAgF88MEHiMfjKBaLQvkVCAQCgaAJIYRAJpPBaDTi5MmTMBgMkMlkNZXfXC6HhYUFpFIpzM7OIhqN7v4NtyEKhQJOpxNGoxGnT5/G4OAgjh07hqNHj675HeLxOK5cuYJ79+4hk8ns+r22rOLLfHoVCgU0Gg0sFgvcbjdcLhdsNht3dajX4qtUKqFSqRAOh+FyuSCRSKDVapHL5UApFZbfBsBm5PX6XVNKQSlFuVxuadmz55VIJOu2R9a+Wvk59wrWF2zHctAubexBYe1SIpHUfD+r+0D2Dtf7Hm8H9nuId0KwGUzpVSqV0Gq1cDgcsFgskMvlNdtoJpNBLpeDXC6HQqHYgztuTyQSCXQ6HfR6Pex2O5xOJ+x2O7q6utbsK5fLoVKpIJVK69bRGklLKr6EEGg0GigUCgwODqK/vx+HDx/Gc889B5PJxBv9VgSqUqkgk8kwNjYGrVYLj8eDdDoNr9eL2dlZRCIRFAoFFAqFHXyy9qTaHaWvrw9qtZpvW49yuYx0Oo18Po9wOIxwOLyLd9w4qtuqxWKB2Wxe89ylUglerxfxeFy0sS0il8shl8thMBjgcrm2pPy2Sxt7UJiljC1RajQa/l25XEa5XEY2m8Xc3BwKhQIcDge3qO2E4pDP5xGPx5HP5xEKhfbEIiRoDdj7b7FY0N/fj97eXpw7dw5Op3PdFd9IJIL33nsPPp8PHo8HgUBgD+68/VAqlejv74fT6cTJkydx4MABmM3mvb6tmrSk4gtUzOpqtRpdXV0YGBjA0NAQDh06BJVKta3zyWQyyGQydHd3w2AwwGg0YmBgAACwtLSEZDIpfIG2CVNyFQoFbDYb9Hr9ppbfcrmMWCyGTCbT8gOfQqGASqWC2WxGb2/vmu+LxSJ/VtHGtoZEIoFcLodOp0NPTw9ksvq7tHZqYw+CRCKBRqOBRqOB3W6HwWDg3zFLbzwex+LiIsrlMkwmE6xWK1Qq1bb7243I5/OQyWRIp9OIx+Md/dsINqb6/Xe73ejv78fBgwfhcrmgVCprKr5LS0u4d+8eAOxI++1UZDIZzGYzuru74Xa7uf5Ui71exWlJxZdFDZrNZhw7dgyf/OQn4XQ66x70KKXI5XIoFovcosE6cfYiWa1WnD17Fj6fD5lMBtlsFtFoFLlcboefrjVg8pJKpev6UjGkUinkcjkcDgc+85nP1OV/XSwWMTs7i3A4jPfeew9er3cnHmPHkUgk0Ov1MJlMOHz4MI4fPw61Wg2dTsdllslk8Nprr2FqagqBQACBQIAHYAhqw4Iiuru7YbPZMDY2hqeffnpLA1mpVILH40EkEsHly5eRy+WQy+WQTCb3vGPeSVa/uzqdDvv374fZbMbx48fR09PD9y2VSsjn8/D5fMhms4jFYvj4xz+OoaEh6PX6FUpyo4jH4xgfH0coFEIymUQ8Hm/4NXYaQghXvNRq9YZjU7lcRjKZRD6f5+ORoD5YUPvo6CjOnTsHu90Oq9W65RVfwfaRyWRQqVQwGAzQ6/U8aG01lFIUi0XE43FEIhFkMhmug+36Pe/6FRuARCKB2WyG2+3GsWPHcPbs2S018nK5zIWezWZRKpVgMpmgUqlWpEU7c+YMYrEYLl++jPn5ed7xCyqDp8lkgkwmg0aj2VDxZf48g4ODeOaZZzAwMMBdS9ajUCjgypUr8Hq9uHfvHi5dutSSygghBAaDAVarFYcOHcKZM2dgMBhgt9t5m00mk1haWuI+lGyZVyi+tWHvqFwuh81mw9DQEB555BG88MIL0Gq1dZ+nWCxiYmICgUAA2WwWs7OziMfjSKVSLdnW6mX1u2uxWHD48GE4nU58+tOfxvDwMN+X9ZF3797FjRs3EAwG8cQTT+D48eOwWq2wWq0Nv7+lpSUolUrMzc3hxo0bDT//bsBcu1gu040mZOy9L5fLe6YItCqsLY+MjOC5556DXq/f61vqOORyOVd4DQYDdDrdmrGd+eoXCgWEw2EsLS0hnU6jUCgIxXersGW4YrG4pUCLTCaDe/fuIZ1OIxKJIJfLYWxsrGai5Z0M4Gg26gnCYvu53W4MDw9Dr9fDZrNtuD+z+HZ3d8NkMnFr3UayZcEzrS5/SimfLE1OTsJisWBwcBDd3d3c4q1QKDA6OgqpVAqpVIp4PI5kMolcLicGwVWwQBaLxcItlSdOnMDIyAgUCsWW2otEIoHJZAIhBPv370c0GsXs7CxisRjy+XzbBrQ6HA6MjY3xd9dgMGBkZAQmkwkGg2GFDNm7azab8dhjj/HMNwaDgRsKGg1771vJYsful927Wq1Gb28v9Ho9RkZG0N3dve6xuVwOFy9exL1795BKpZBOp/l31QF+oi+4T3VAq1wu33TVUfBgbBSYbjabMTIywvsVh8OxYgLC9LRsNotwOIwLFy5gcXERc3NzSCQSe7KK3rKKL+sI8vk80un0lhp9OBzG9773PQSDQZ7O5Ad/8AcxOjra0S8P67iZNW09WRBCcPz4cTz77LNwOp04ePDgpm4mbGBg7iSdAgtcW1xcRDKZxI0bN/DUU0/hxIkTfDlIqVTi7NmzePLJJ2EymZBIJODxeLC0tCQGuyqqo7cHBgbQ09ODz3zmM3juuee2FaEtlUrhcrngdDphMBhw5MgRvPPOO5iYmEA8HuerQe0EIQSHDh3CZz/7WbhcLhw+fBgKhYIrDquXKNk763a78bnPfQ7lcpkv4XdyX7kamUzG+0ypVAqLxYLjx4/D5XLh3Llz2L9//7rHJhIJvPjii3j33XextLSEYDDIv6u2Au+VdawZYcYZtppYa2ld0DgkEskKn+nqd39gYABPP/00XC4Xzp49y5MLAPcnbsxV9M6dO/jqV7+K27dvIxaLIZvN7olxoSUVX0opMpkM4vE4PB4Pbt26taVOmFl2IpEIotEostlsxy8rMysFyz5QKwUJQyKRYGBgADabDVarFQaDYU+SULcK1ZHxqVSq5gxXqVRCoVBAqVTyQEvBSqRSKYxGI7RaLQ9ksdls0Ol02z4nG0B1Oh0sFgucTidGRkYQCoUwOzuLTCbTdqnOZDIZ1Go1NBoNz86wGSwATlAb5s6kUCig1WphNpsxMDDAfU43WoInhKCvrw8jIyOwWq0r+t5ischjS0Kh0AprcKdSPVY5HA6+gtZJBpXdojojE3NRXD3p3bdvH1wuF+x2OzQaDZRKJf+uWCxy94Y7d+5genoaS0tLiMfje7qi2ZKja6lUwtzcHBYXF+H1evHSSy9t6XjWmQBAb28vLBbLDtxlayGVStHX1webzYannnoKZ8+e3XAywSqziCACwW6h0+lw8uRJOBwOPP/88zhw4EDDfEz1ej1UKhWefPJJ9PX1YXp6Gi+++CJmZ2d5IKxAUAu2AnbmzBnYbDaMjIxApVLBaDRCqVRu2kbVajWef/55fPKTn1yTzjASieDixYvw+Xw4f/48pqend/pxmp7qseozn/kMnnnmGR6jI2gsUqmUr7D9wi/8Avr7+9fE52g0mhWTvmri8Th3b/jKV76CcDiMubk5ZLPZPV29aEnFl5nOc7kcstnslvPwsehCmUwGm822Q3fZGlQvH1ssFtjtdgwMDGBsbGzTgDW2tNfIJU/22+RyOWQyGaRSqY63xnc6bElTq9XCZrOhp6cHLpcLLperYUucMpkMUqkUZrMZQ0NDKBQK3LrRLrAlYRZ0tdGzsSVK9i4yK1s7yaORGAwG3i5HR0e5+whruxshlUphs9lgsVjWFOwIhUJYWFiAVCqFyWSCRqPp+FzfrC0yH/WBgQFhgHkAmA7A2mq1HFl/0d3djYceeghDQ0O8/2BUH7e6f2C+vZFIBHfv3uXuDXvtQtaSii8APltgnfNWEAED99FoNBgdHeXp24aHhzEyMrLpsuZmAXDbJZVKYWpqCqFQCK+//jqmpqYwOTnZVkvNgq3hcDjw8MMPo6enB08//TQcDgfcbjeUSmXD2iCbwLGOXqlU8gldO/iyqlQqnD17Fvv27cOjjz6KsbEx6HS6deWXTCaRSCRw584dvPPOOzCZTDh37hxPRSiUjJUw44FKpeLL8FsJ0mOKMrAyx6nVasUjjzyCffv2gVKKgwcPYnx8HFevXt2xZ2kFmFyrK4AJtgfTAcxmM44cOQKXy8W/k0gkPD/v0NAQD06vbtOrAzurqZ5As4p5zaBztaziyzqHUqm07dmDRCJpih9ht6g1gCuVSvT09PAgtQMHDvDG3UjqVVzz+TwWFhawuLiImzdv4tatW6KWeoej1+sxPDwMt9uNsbEx2Gw2mEymNYNdPW1sMyWWdfTtZkGSyWTYt28fHn/8cezbtw/d3d0bKrD5fB7JZBJzc3P4zne+A7vdjk984hMwm83cOrQRDzJRbUWjBLOasWp2W/XRX8+QIJFI4Ha7YTabEQgEoNVq4fP5cP369ZaSTyNhihZ7V0VGh61TLS+mAzgcDpw+fXpFOkMma7YivNXg9HK5zDNvFQqFpnEZa1nFt1G00+C2HnK5HAcPHoTD4VhTbUmn02FsbAwWiwV9fX0N9ZXKZrPIZrNcia2n0SeTSUxMTCAcDuPevXs8+FDQuTA3h1p5IrfSxkwmE/r6+qDVanlJ006BEAKj0Yiuri7uz7yRlYwVrkgmk/B6vYjFYnj55Zfhcrl4kYvVfUkul8Pi4iLS6TTm5+e3PWFlBSzC4XBLFq9oJCywSCqVYnBwEAaDAclkEoSQLfWr7YBSqeQZWB599FEMDg6ir69PKL1bhJV21mg0MJvN0Ov1GBsb41Zdh8OxYn82qdso01M1lFKEw2FkMhlcu3YN169fx9WrV5uqnXa84sto55dHLpfj6NGjOHz4MMxm84p8xUqlEjabDRqNBm63u6GVmFgKk8nJSfzlX/5lXQpsNptFMBhEOp2Gx+PpuIGvHXIXNxqZTAa9Xg+dTsf/MrbSxgYGBvCxj30MXV1dsFgsHaf46vV6noWlOvK6FsxSk0wmsbCwAEIIXn31VV5yXKPRrJkk53I5zM3NYWlpCe+++y7m5ua2da8sbiOTyXT8ag8hBCqVCkqlEoODg3A4HKCUQq/X48qVK5icnGwqhWInUSqVPJPLI488grGxsRXL8oLNIYSgt7cXTzzxBKxWKwYHB6HRaBqqA5TLZYTDYYRCIVy+fBlvv/02fD5fU7XTjlR8pVIpDAYDNBoN+vr60NvbyxPZtyPM2mOz2dDV1bUiylgul8NoNPJgtY0ol8vw+XxIJBJ1LVvEYjHE43FMTk7i3r17dSm+LONGOwZwFAoFZDIZBAIB3LhxA1arFS6Xq+NyG9eLyWSCyWRCb28vD/5ZvYTMljtZBadoNIqFhQXuS1Yul2GxWGAymbgPZrX/brvCJlAsB6/FYkF3d/em5XNXU11RzOfzoVAoYHJyEhKJBEajccUgmUwmcevWLYTDYczOzsLj8Wzr3tu5D3gQWP9sMplgt9thMpk6ot9Qq9UwmUywWCwYHh6G3W5HT08PrFbrllLssYlUIBCAx+PB4uJiR64msiwMdrsdLpeLl31ulA5QKpX4is/c3Bx8Ph9isVhTueZ0pOLLZo4scGB4eBi9vb17fVs7BkvUv3//ftjtdtjtdv5ddYW0zTrRQqGA8fFxTE9PI5lMIplMbrg/q0I0MzODK1euIJ/Pb3qv7Rx4yKyTU1NT+OY3v4ne3l4899xz6O7urnsZqZPo7+/H4cOHMTw8jNHRUZhMpjVWWuZ/ZjKZMDAwAI/Hg0uXLiEYDCKfz4NSygM2DAYDzGZz3blrW5nq0utPPfUU3G439u3bxycA9cLew3Q6jenpady7dw8ajQa3b9+GRqNZkb4ok8lgZmaGT3arCzFshXbuA7YLIQRarRaUUvT09EAikWBubq4jFF+TyYQjR47A6XTiU5/6FK8QtlnF0NVEo1Fcu3YNi4uLGB8f5wpZp2E2mzE4OAiXy4WHH354S4GY9egA5XIZfr+fuy1OTk5yI0Sz0N69/zpIpVLuM8iKNVTPHFmJvWQyyX1MW62GOqsgpNPpYDKZoNPpoFQqoVQqtxS4Vi2LZDKJxcVFeDweJBIJJJPJDYNYstks0uk0QqGQyIWKldYzlo6v3YojNAqWsoi5JTDf3tV+qSy4haU6Y1UcS6USrzRkMpm4xVir1UKtVq/p4Nlvk8lkEA6HEY1GUSwW16SXahWqg1KYhUyv1/PyrluhOjKbEIJQKASZTAaNRrPG1YG5KaXT6ba31hJCeMJ+1r9uJyiYUopUKsVTbLKgw9W/E1NOmL9lp2QyUCgUMBqNMJvNsFgsMJvN20qtVyqV+CpCPp9HoVBoyXf7QWG6Qa1ATKbn5HK5moWWstksFhYWNtQByuUyIpEI0uk0EolEU477Han4yuVy9PT0wO1249ChQzh06BC3grAOPhaL4YMPPoDf78fs7CwSiURdFstmgHWORqMRJ0+ehN1uR39/PwwGw5b8GlfLIhAI4LXXXsPk5CTi8TgSicSGx7NZXjNFcwpaB6fTiWPHjqGvrw+Dg4M1FQu2rb+/H3q9HhMTE3jppZcQjUYxPDyMrq4unD59Go8//ji6urowODgIhUKx5jwsb/S9e/fwve99j5cyZ8pvq8EUJ5PJhFOnTvGqYBqN5oGshMViERMTE7hz586aTATVZXVrDZrthkwmw9jYGJxOJ7dAGo3GLa/cFItFTE5OYnFxEU6nk/tbWiwWsQqESlaXkZER9Pb2Yv/+/ejq6trSqoWgPsrlMmKxGNLpNGZnZ2v66GcyGbz22mu4ffv2hjpAtQLdjHSU4lud+0+r1fJgmepykszyk81m4ff74fF4kEqlkM/n9zzpcr2stva4XC7odDooFIpNk9YD9xVWloYklUrB5/PB6/XC7/cjEAjU5eogWEl1+2PWMpb/ULAWpVIJvV7PrbS1LJXMeqFWq1dYg1jFLKfTCYfDAbvdzv36q5U1Zs3M5/NIpVKIRqO8nefz+Za2yFcrvywV0YNaCSmlomzuMhKJBCaTCQ6HA2azGRqNZksuS2w1jZUj9vl8vM3LZDJQSju6b6gex9g4rdVqt106WyqVQqFQ8NLwIh/1WnK5HNLpNJaWljA/P7/m+2w2C5/P1/I6QEcpvmxwtNlscDqdcDqda1J3MatFOBzG+fPncefOHczNzXHrTyvAlCvm37dv3z709/fDZDKtu8zJltvYs4fDYRSLReTzeSwuLuKb3/wmPB4PPB5PS8mimVCpVDCbzRgZGcG5c+dgsVhWlH1uVQWrGWCDpEajwfDwMAwGA86cOYN9+/ZhYGAALpdrzUBX3eZnZmYwOzuLGzdu4Jvf/CZisRjC4TC3YAoEq2HZch577DHs27evZhurRbVbzfz8PB9rJiYm8Pjjj/NKbt3d3R2tmDHXBlaemOWQfpDzHTlyBA6HgxtyPB7Pliu/tissG0MgEMCFCxfw+uuv13Rj8Pl8SCaTLa0DdJTiyzIYMH8h5vNWDZuFp1Ip3L59GxMTE8hkMi3lr8asYBqNBiMjIxgbG1tTZrAWLOtAKBSC1+vl5Uo9Hg9u3LjBo+VbxfLdbLAqQ93d3Thw4MCKtFyCjdlsUlBtHbLZbJDL5RgZGeEFWYxGY81zsja/tLSE2dlZTE9P4+bNmx0Z7b0enWx13AiJRIKenh4MDw/D4XDUbGPrUS6Xkc/nEQgE4Pf7MTU1hZs3b8LtdiMWi61YhexUWPYBFqPyoG46KpUKDocDMpkMLpcLpVKpYTnr2wG2mhOLxTA3N4dr1661rTGmIxRflmi9r68Pjz76KBwOB06cOIHu7m6e05Yt76dSKSwtLSEUCvGo8Hb98aspl8sIhUIIh8O4cOECLly4wKvixeNxYf3aJTqlvdUDq/FuMBiQSqU2TEVWvcpx9uxZZDIZDA8Pr8g1y4IK2fJyPp/nxRnef/99XLx4EYuLiy1tyWgUEomET57tdjvP3lAul5FMJrkbSCf3B5RSxONxhEIhaLVaXk1wPQMDk1c6nUYkEsHi4iJefvlleDweTE5OIhKJiAnXMoQQ9PX14dSpUxgaGsLY2Bj0ev0a314mU+aqJJVKuauImLA1BhYzZDAYIJVKeZBlJpNBNpvlq+StRMcoviaTCf39/XjsscfgcDhw/PjxFYUcWLR9Op1GMBjkS/2dooSwZQ6v14sLFy7gz//8z/mzd4oMBM0FS/9mMpmQTqdBKV03mr06fdeZM2dqnq9QKPD8k/F4HNlsFrdv30YgEMDFixdrLu11KswfUqfTwWazccW3VCqtyE4iFN+K4ms2m5HL5WoGTjKYzFhRkOnpabzyyiuYm5tDJpNBqVQSim8VfX19OH36NHp7ezE6OlpTrkymqVQKwWCQr6oxBU3wYDAlVy6Xw2w2r4hLiUajiEajPENGK9ESiu9Wcs3WOtbtdvNcoIODgzWrNrHUW3Nzc3jnnXfg8XgQi8VatnOvDtopFApQKpWbBktUy5gFV7BzsQFPINgNKKU832YqleKrM6zS0HaWPBOJBI9GnpmZ4QpILBbD4uJi27Xv7fQBDIVCAb1ej8HBQTz11FM8OjuXy+HixYu4d+8ekskkj+puN9nVQ6FQwNTUFO8fWQCWUqlc40sOAOl0GslkEnfv3sW7777Lx5jqlbROlON6MLmWSiUeZMra7mqZ3rt3D5cvX4Zer8dTTz3F0xx2so/0ati4LpVK1w2qZjoAW7lgSq/FYsHp06dhNpv59snJSUxNTSGZTCIYDHIdoRXyb7eM4ssCgLbamAkhOH78OJ599lm4XC4cPXq0ZnaDVCqFQCCAS5cu4ctf/jK3CDX7D7gRpVIJ+XwemUxmRSqo9QY+trypVCqh0Wj4szNLhPDtFewmN2/exNTUFA4ePMhLbVqtVl5haKuDms/nw1tvvQWPx4Pz588jHA7zzrrVLBb1stU+gKHT6aDVamG1WnHixAmuaCQSCbz44ot49913sbCwgEwmwzPAdBq5XA7nz5/HO++8g2g0Cp1OB5fLha6urjVp3lhu04WFBbz77rs1xxhhoVwJizFhEzc2/hNC1sj0/fffx1e+8hW43W7s37+fZ38Qiu992GoE039qyWa1DsAy5gwNDeHzn/889u3bxy3qL730El566SXeD+TzeW4obHa3yJZQfFUqFaxWK6/QtJXITolEgoGBAR4VypLar4bNUvL5PJLJZEun7GEz5Ww2i5mZGRBCsG/fPu4HWWvJiBUMMBgMcLvdGBsbW3Ge2dlZPsgJq4RgNygWi7x8rdfrBQAsLi6iVCptuVwpAJ5XMpvNrrBWtiPb6QOqYZYflv6pentfXx+Gh4dBKUUymeQpkDqxX2CKGStytHoCQCnlSsHi4iKmp6fh8XgQj8dbeozZDeLxODweD+RyOcLhMDQaDfejXk+mqVSK/wad2B7XQyqV8jLXdrsder0earV6xWSrlg7AysEPDg6iq6sLBoOBFwCy2+0YHBzkblDMNY2l52vm9t0Siq/b7cZTTz0Fu92OU6dOrfDNrQer1boibVS7U62svvjii+jq6sIXvvAFPPbYY1wWq5FKpejr64PT6YTL5cJzzz2HfD6PfD6PmZkZvPjii5idnRUV2AS7zsLCAr71rW/Bbrcjm83C5XLh7Nmz2Ldv317fWtOynT6gHtRqNZ5//nk8+eSTePnll/Hqq6/C5/Ph9u3bHWn13YxSqYT5+XkEAgG8/PLLeOWVVxCLxYQv7yZQSnH58mXMzMzg8OHDyGQycDgcOHXqFLRa7RqZptNpIdMNUKlUeP7553Hy5EmMjIxgeHh4jT5USwdgro9qtRp9fX1c6WUr6QMDA7z4TzgcxsWLF+Hz+XD+/HlMT0/v4RNvTFMqviqVii9pEEJgsVjgdrvR09ODkZGRLXfazMLBzlcN8x/KZrNIJBJt8fIwa08mk+EVqNhMbD2llc32WAYMm83GUz0RQtDV1cXLN7Oyj6zkoxjwHhwmV/ZXTC7uk81mEQwGQSmFx+NBuVxGNBpFOp2uy3rZidTqA3w+H0KhEJRKJQwGw5p3t9qlbD1Yjlmz2QyXy8X7iXv37vGlzk60tBUKBaRSKaRSKcRiMd4mC4UCgsEgL4Y0NzfX9MvAzUI8Hkcmk4HFYsHCwgIAIBKJoFgsrpEpgE2NWmwJPpvN8owEndJWmYWWrX7rdLo1fr61dIDq41eXiWbWX+Z2EgqFsLCwwMvEazQaric0G02n+KpUKm7NMRgMPKp4eHgYer0eVqt1y7n3VpfWZJRKJSwsLCAajeL999/H+++/z62a7UD10i7zvdnoRWcyYulLlEollEol9u3bhy984Qu80ymVShgfH8fVq1cRi8Xg9XpFR/4AZLNZvPHGG7h9+zY++OADTE5OIpFICJkuwwasWCyG733ve7hz5w5kMhlu3ryJo0eP4uGHH97rW2xaWB8QjUbx8ssv4+bNmzh9+jROnTrF4xpYO1MoFDh69CicTueG51QoFJDJZDh69Ch0Oh0mJyehVqsRDocxMTHR1EucOwGlFDdv3kSxWOTjFetLy+UygsEgkskkpqamuLuYYHPYmHXv3j28/vrr0Ov1+PDDD6FQKNbIlI1XG+Hz+TA+Po7FxUW8/vrr8Pl88Pv9u/Q0ewub1KrVav7+1mK1DsD+Xf0dg8VbsYA2q9WKRx55BPv27QOlFAcPHuR6QrPRVIov+3FGRkbw6KOPwmq1cl8+k8nES70+aNlNBqUUsVgMwWAQk5OT+O53v4t4PN42FkxKKYrFIgqFworI2PVgDVwqlUIqlfL0URKJBI899hifIbNIW5/PB6Didyk68+1TLBZx+/ZtXLp0CdPT0wgGgzxPqqACq3Tl9XoRjUbx0UcfIZlMbqqkdTqsDyiXy5icnMTi4iKsViseeughRKNRzM7O8v6OBbE4HI4NA62YIcHtdkOn00Emk2F6ehpKpRJ3797tOMUXqChV2WwWMpkMGo1mRfaBdDrNfdU3Wsmpzl4kAt3ur1pEo1FMTU1BLpdjdnYWEolkjUzrkVcikeD+wFNTU7zsbqcglUp5Vof15LVaB9gIlv2BwfoEs9mMQCAArVYLn8+H69evN91YtueKb3Vy5OPHj8Nut+PEiRPYt28f9Ho9DAYDn801Oj0JIQQGgwHlchnDw8MIBoOYnZ1FMBhsuh/qQSgUChgfH0exWMSRI0dw+PBhaDQamM3mTdObAeD5UdkyJqUUR44cQblcxszMDIBKp7K4uIh8Pr8rz9ROsHygwWCQR3q3y+SrkSgUCh6gwfzUqpfjBOtDKUUikUA+n8eNGzegUCjWWHyVSiX0ej0CgQD6+vrQ19e34Tmr86OfOnWKK3/BYBDz8/OIRqO78GTNAQvskUgka4ImmWvDem50crkcBw8ehNPpxLFjx+ByuWCxWITyuwzLv82C2iQSyaYyXe888XgciUQC8Xi85cvuNhvV7hCDg4MwGAxIJpMghGBxcZGvijQDe674VldcOnPmDAYGBnDkyBE4nU5eqWknr82qvAwNDSGbzUIikeDDDz9sSr+U7cIUX2aZtVgssFqtMBqNdc3qWB6/ag4fPgyLxYKPPvoIkUgEgUAA4XBYKL7bgCm+4XAYiUSibVxtGo1cLofdbofNZsPo6CjGxsbQ1dW117fVEjDFN5FI4ObNm9znP5lM8lUglsKILf/Wo/iqVCpuoQwEAtz1KRaLdZziu934ELlcjqNHj+LIkSM4dOgQXC6XSMVVBSv60YjzpFIp/h50krV3NyCEcJ1tcHAQDocDlFLo9XpcuXIFk5OTQvGVyWSQy+UwmUzo6+tDT08PBgYG0NPTA4PBUDPX7k7AghBYmg+j0dh2HQ5z6QCAmZkZfPTRRzCbzYjFYtzKo1AoeGWWemB11FmtepPJhKWlJUQiEVF6U9BQ1Go1TCYTLBYLhoeHYbfb0dPTs62UZoJK0v9QKMRLODPFN5fLwev1glIKu93O0xc5HI4N+0TWj5fLZV5gZGFhYUV1J8H6EEJgNBr5aoZarV7jg8nSohUKBXi9XkxPT2NhYaGtViYFe0t1G4tEInW/t6x/VigUMBgM3JgJAN3d3RgYGEAymcShQ4cQjUaxsLCw5zUS9kzxValU0Ov1GB0dxdNPP42enh48/vjjsFgsPKvDTkMIgVarBaUUPT09kEgkmJubazvFt1Qqwev1cp/cSCQCvV7PoztHRkZgMplw5MgROByOus5pNpthNBqh0Wggl8vh8/mQz+fh8/lw7do1fi2B4EFhbdPpdOJTn/oUHA4HxsbGVgQR1YtYPq68/2wiXO3zL5VKcfXqVdy9exfZbBbxeBwjIyPo6upaU+myGo1Gw90e2PkppXC5XBgfHxeK7yZIpVK4XC6Mjo7C6XRyOVa3VVZSPhqN4sqVK7h48SLu3r3bNBa0VkG8/+tT3cZu3LiBa9eu1XVcT08PDh8+DKPRiJGREWg0Gp7bd2hoiE+iCSHwer341re+hWAwuKfZTXZd8WV+ukajEV1dXXA4HHC73bwe/HoFJrYKpRSpVIoHdpTLZe4rLJFIoFAoVgQSKBQKqFQq6HQ6WK1WSKVSngy7HVKeMBkkEgnu1J/NZnnmjEwmA7fbzX+DjQY64L5ju1qthsViQT6fh8ViQTabFemlVsHavFarhdls5p2CoD4UCgWMRiPMZjMsFgvMZvOa1DqC+tkoBSELTAsGg/B4PNDr9VhaWoJWq4VWq103MIZZefR6PY/wjsVioq2vA+s/dTodTCYTdDodr6y3nnLGMvREIhH4fD5EIhFh8RXUBdOHIpEI5HJ5zb6zVCrB7/dzd6V79+7Vrfv09/dDKpWuqUKoUCi4RdjtdqNYLHIdbC8nIbuq+DKFV6PR4NChQzh8+DD279+PM2fO8FnCRhGHW6FYLPIoZuZ/ZTKZMDAwAK1Wi56enhX+wyy35cDAAJ588kn4/X588MEHiMfjbZWbcnFxEeFwmJd/1Ol0mJ2dhcViQalUQjAYxMDAAPr7++s6n8FgwMjICMxmM+bm5qDVanHjxo0dforWobrN79+/H4cPH8bIyMiurGi0C3q9HiMjI+jt7cX+/fvR1dW1o77/nUq5XEYsFkMikeCrRPPz8zx378mTJ3mwca0+WqFQcBeUUCgEg8GA2dlZEELapv9sBCyg22g04uTJk7Db7ejv7+cufrVgv43P58OtW7fw3nvv8bzJAsFmFItF3Lp1C1KpFAaDAUajcc07XCwWeQW8K1euYHx8vK739vDhw7DZbHC73WtifNhkrr+/H3q9HhMTE3jppZcQCAT2NI5q10dfFkDR1dUFt9sNh8MBq9W6qYVxq7Ak936/H8lkEqlUiiu/pVIJ3d3dK4paSKVSbrHo6enhbhC5XI5bSFqh865+nlpQSlf435ZKJUQiERBCEA6HYbVat+SfK5VK+aRF1EevDWvzZrOZL2UKGW0OqxrE/ND1ej1vYyy7CEvRt1GbF9QPU6Ti8TgopbBYLPB4PKCUIpfLrZD3aggh3Jqj0+k2VOQ6mep2bbfb4XK5oNPpNo1rYakpWZGMVhiPmgnWR7CUXtU5aNsdpg/5fD6k02m+ssPe43K5zOsaxONx+Hw+LC0t1XXuSCTCdSy2wl69ms5Whs1mM0wm04riZHvFrlt8LRYL7HY7jh07hqeffppbEBoNy2Rw6dIlLC0tIRwOo7e3F16vF3a7nVeEs1qtvFqJTCbD2NgYtFotPB4P0uk0vF4vZmdnEYlEmrYKCYNVsTIYDHC5XHUpAswiqVKpEI/H4fV64XK5duFuO4PdbPPtBnNt6Ovr49XCZDIZX7ZjaY7i8TiMRiNcLtemE4pOGOQaASt6MzU1hWw2i+HhYZw4cQIGg4ErDYLtUZ3J6KmnnsK+ffvQ39/PlQLBzqBWq+F0OlEqldDT0wOFQoFQKIRMJrPXt7bjFAoFXL16FXfu3IHNZoPdbodCoYBGo0GpVOJltL1eL+LxOCKRSN3nzmQyvGKb1+tFPp/nelWzsqtvGSEEGo2GK2YjIyM7pvWzpbqpqSleoSUWi8FkMiGdTmN0dBQymQx6vZ5H0cpkMnR3d/OlgIGBAQDA0tISkslk0+dWZR2qTqdDT0/PljpRmUyGXC7XNmWbm4XdbPPtBsscwnwgq1cTWOnRaDTKLRNOp1MoZA2iWCzy0rCpVApAxf93syI4gs1hVjCNRoORkRGMjY1BrVbXFRshZL99mFHIZDLBbDYjn88jkUh0hOJbLpd52ed0Oo1CocB9bwuFAvx+P9LpNHd12ApMjiyFIXMb7XjFly2BqVQqGI1GmEymLZcdXg8WtJXNZpFKpXjHkEgkeHUX5nAdjUZx7do1LC4uQq/X84ThLKMDc3dQqVSwWq04e/YsfD4fr+vNcl82K2q1GkajEQcOHMALL7ywpYYnkUj471Kvf69ga4hBq34IIejr68OpU6cwNDSEsbEx6PV6KJVKXhc+Eong+vXruHHjBvbv388DNVlubjHBuA8hZMVyOisCwIqlMJeuRiGXy/kAaLVa1+QMFtQHczHJZrNcsehUwwTTI9jSebVhh5XjtVqtK1wYqzGbzTw7DFvOv3LlCk8LVyqVkM/n27qdVverVqsV/f39yGazuHXrFiKRCC5dugSPx1NXXmqWx1sqlcLv94NSirt37yKVSsFkMsFgMOzSU22dXVF8WUUPZvliClYjBqZyuYxisYh0Or2iChFTfKtTZrCckiwq3OFw8KU7qVQKhUIBvV7PMzucOXMGsVgMly9fxvz8PO98mhWWUmh0dBQvvPACdDrdlo4XisLOICKvt0dfXx9Onz6N3t5ejI6OcotYoVBAOByG1+vF5cuX8eabbyIajeLIkSM8HWKjgmTbBUII79vkcjlkMhkymQyi0Sh34WrUihYresNSVlqtVsTj8RWGCUF9sFLdqVQK8XicK76dKEemR8jl8jU556sNVutlxjCZTDCZTIhEIshkMvD5fIjH4yiVStx/OpFItH07Zf2q2+3G0aNHkUqlYLPZsLi4iIWFBSSTybomWEzfkEqlCAQCKBaLmJmZQSaTwejo6C49zfbYFcVXoVDA6XTCbDbjwIED2LdvX935YgFwB3QWzFJNKpVCOp3G3NwcLl26xKMKs9ksZmZmalbCyufzWFhYQCqVwoULFzAzM8MDtFj0IYsary7T2yqIeu+tAQuaLBQKLdfGdgvWhle3Zda+WRGGhYUFvP/+++jp6eEW30aXOG9lZDIZRkdH0d/fzwNRA4EApqameOaaWgMdixvYigV9dSn4kydPYnZ2FktLSx0/CWSBbax9bibTfD7PM/HcvHkT09PTHZcjnb3rarUaLpeL54vt7u7m+7DV2u7ubhiNxg3ffblcjp6eHmg0Gpw6dQqDg4Ncj5idnUUsFkM+n2+ZgPbtwGTK3CN7enqgUqlw+PBhqFQqTE1NcRen9Y53u90YHh7mNQFY9h2TyQS9Xs/3ZXobm1w0Qx+wK4qvRqPB4cOH4Xa78eyzz+Lw4cNbyvVKKeUCq3ZdACr+t8FgEO+88w6+/OUv8x+LUsrTvaxuvOl0GteuXYNEIsF3v/tdSCQS2Gw2dHd34/u+7/t4gBsAXslE+LYJGg1z0clkMrxdizZWH8w1KZ/PIx6P4/r165ibm8Po6ChGR0eh1WohkUiE4ruMUqnEmTNn8OSTT/Lglps3b+Kb3/wmt/LUCmhhS8gqlYrLcjNlTSqVwuFwoLu7GwqFAkNDQ3j77bdx5cqVpg4O3mmYsrEVmbKxyuPx4K//+q9x/fr1jpMhk5nRaMSJEyfgdrtx7tw57N+/f81+EolkhVxrodVqceTIEZTLZTz66KOglCIQCCAYDOLb3/42JiYmEI/HuRtQu8Pkkc1moVKpsLCwgJdeeolXcawFIQTHjx/Hs88+C6fTiYMHD66YIDP9jhl0CoUCMpnMCpeqvRzrds3HVy6X8yIRW3V6zmazWFpaQj6fRzQa5Sl3KKUIhUIIh8PcKZul6dgISik/B7MQs+URr9eLiYkJXgo1mUwiHA7zKGeB4EFhM+BMJoPZ2VkEg0GEQiHRxuqEWX+qAwZlMhlUKhVsNtuK4jSCCoQQqFQqaLVanhrOarWit7cXCoUCfr+/ZoU1JtfBwUGo1eq6Es8zowPrr/1+P0+P1snIZDLI5XJYLBYMDQ3VJVM2VuXzeaTT6Y4IxFoNc2Gw2+3o7e2Fy+WC1WpdYVXcCiyPMgCebo8Zt5xOJ0ZGRhAKhTA7O4tMJtP2BgkmD5VKxfP5DwwMYGxsbN1jJBIJBgYGYLPZYLVaucvoapiVNxwO486dO9wVYq+t6S2RO8Xj8eD8+fPw+/24ePHiig6aWXVZOo7twrIZvPXWW5icnOQzRpYdIh6Pd9xMW7AzlEolZLNZzM7O4ktf+hLu3r2LO3fuIBwOizZWB1KpFH19fXA6nXC5XHjuuee4tUetVqOvr48rFIL1cblcePbZZ5HP53Hu3Lmak67tyLVUKmF+fh6BQAAvv/wyXnnllQfun9sBg8EAs9mM06dP4wtf+AKsVqtoq3Xgdrvx1FNPwe1245lnnuHKViPR6/VQqVR48skn0dfXh+npabz44ouYnZ1FLpfrCINErX51I6xWKywWC+Ry+brtNx6PIxwO48KFC/jKV76CcDiMubk5ZLPZPXV5aErFl1m+mJk8FArB4/FgYWEBk5OTK5bk2D6rXSC2SrFY5E7uq31bqu+nmWGRqblcDvF4vKY8mM+eSPi/d6y2+N65cweRSKRjg1a2CrP4sqhim83Gv2NKWnXbZsFbbKktn88LOQNcduVyGTabbUOZ1JLrelBKkclkeHn0ubm5pvHt2wtYFg2dToeuri44nU7s27ePp9IU/fDGKBQKdHd3w2azcZfErbhK1gMramE2mzE0NIRCocCDZDuFjfrVWlTrEqtXLEqlEsrlMpLJJJaWlrC4uIg7d+40jQtJ0ym+2WwWb7zxBqanpxGPx5FMJnkQRrXbAYN11g/aqVYr0Ol0mv+QTElphU47lUrB7/fjvffew7/8l/9yTedACMHDDz+Mo0eP1p3wX7BzlMtl5HK5FT6+gvqo9o1ky5bsnV3dpm/evInx8XFMTU3hwoULiEajdblEtTsssIVSyv/WYj25bkR1eqhmGOj2CqlUip6eHhiNRjzxxBM4deoU+vv7YTQaoVAoRP9bByxwTaFQQKFQbGhh3C6r/a+VSuW6gbXtTK1+daN9a/0OrAJcNBrF+++/jwsXLmBubo4HDTbDOLdrim+1Q/NGloVCoYCpqSl897vfRSgUQigUQjqd5ml30un0jnairaLk1iKXy/Flxmw2u2a2yqy8TqcTAOBwOBr2Uq+XdUNQm+qJVidbw7YLa7csFeF6UErh8/lw7do1zM7OYn5+ngesCramzG4F1r7ZKlqnWtgJITAajeju7sbIyAgee+wxGAyGTQOwGKJfvT9usYDWnZwssIwbO6FcNxOrS76vZquBwdXnKJVKiEajCAQCmJycxMWLF7mlt1na8a4ovrlcDnNzc0gmk3j11Vdx69atdffNZDL44IMPuMU3kUjwVDvMfC6oDct6kUwmAdROAcWyWZhMJoyPjzesRGY8Hsf4+DjC4fCWK7+0O+3cgbYCrPgMyyWby+VEPyLYFSiliMfjkEgkmJ6exocffoiBgQF0d3dv2C9Eo1HMzc3B7/fzIguiXxU0gvn5ebz77ruwWCyYmppqeJnsYrEIr9eLWCyGmzdvYmlpqeEFch6UXVV8A4EA4vE4TCbTuvsWCgVMTEwgGAwim802daW0ZoNZq5m7Ri2uX7+OcDjMq7w1SinLZrMIBAI8Kb5gLZ20ZNZMMMU3FovxojYCwW5QLpe58WZ6eppXHnzkkUc29FNlVQm9Xi/Gx8fXzbohEGwFSinm5+dRKpUargMwyuUyYrEYMpkMPB4PQqFQQ8/fCHZF8S2Xy7x8cCAQQCKR2HDfRCLBE0gLGks6nUYoFIJCoeCW4UZQLBZXVIESCHYTtnSXzWbh8Xi4VbdcLmN2dhbRaBSZTKaprA67SalUgsfjwcTEBA82UavVMJlMUCgU66YjEjw4LGVmLBaD3+9HLBbbdMWhUCjwyVokEkEsFuvYfjWTyWBxcRGUUly/fn1Dw9mDwlaX7969i3Q63ZapzHZKB6gmmUyiWCw2bfq9XVF8i8UiwuEwCCEIBoMbWr5aKZisFWGdaKPznG5UXU8g2GlYkvSlpSWcP38ewWAQxWIRxWIRt27dgtfrRSKR6Ni2WSwWce3aNcTjcV65raenB4cPH+aVsFjuckHjoJTy/LterxflchkDAwObtsNMJgO/3w+v14uFhYWOrnoXjUZ5gZpgMLjlOgBboVwuI5/P8/oA7Rh/sVM6QDXNrgvsanBbMwuiU2ABJ4LdgyWgj8fjWFpa4jNhVoxFlCuuDUvLF41GEQwGN1waZhkEAoEAPB4PAoEACoUCSqUSlpaWkMlkOjqNWblcRiQSgUKhgEajgUqlQrlcRldXFzKZDMxmM69W+aAwa2U8HheualhZvSqbzSKZTCIUCqFUKkGr1UIqla5RQFg/zT6dPG6yQiiFQgEKhQJKpXLHrsWykcRiMb5q1G59htABmjCdmUDQTpTLZUSjUUgkEoyPj0OpVHIlzev1rlB+BStZXFzElStXMDc3B6/Xu2EQBpOp3+/Ha6+9Br/fzxVflru3HQexeikWi5iYmMCdO3d4xLbL5YLP54PZbMbc3FzDFN9isYi5uTlEIhH4fL6Olflq2ERudnYWb7/9Nux2O06ePAmDwcBLvQrWEo/HMTU1BYlEglu3bu24nJhimEql9rzCmGBnEIqvQLCDsPKtzK9qbm6OW38CgUDHK2TrwZaIWanyzQqulEolLtNAIIClpaW2XKbcLkye1SgUCiwuLiKTyUCr1TbM1aFUKvEyxauLAXUyrI0mEgl4vV5QSpFKpfhkeHXRlU639DJKpZLIuy1oKELxFQh2EEopwuEwEokEkskkPvroIz6YZbPZtvUjawTz8/OIx+O8jvxG0cdCplsnEong2rVrUCgUuHnzZsOC28rlMq92WV1ls9NhMpmYmEAymURvby/UajUcDgcUCsUK+U9NTcHv9yMSiXREuVyBYDcRiq9AsINUW9pisRjm5+f3+I5ah2g0KlI47SDZbBY+n2+vb6NjYMGWwWAQ8Xicuz0UCgUolcoVrjws9SfLLCAQCBqHUHwFAoFAINglyuUyCoUCQqEQzp8/D71ev8bHNxqN8iqDIkBQIGgspBG+hYQQ4aC4AZTSLXvjC5luzHZkCgi5boZoq41HtNWdoV3aaq1grb3y+W8XmTYT4v3fGbYrV0BYfAUCgUAg2DNEYKtAsLs0tladQCAQCAQCgUDQpAjFVyAQCAQCgUDQETTEx1cgEAgEAoFAIGh2hMVXIBAIBAKBQNARCMVXIBAIBAKBQNARCMVXIBAIBAKBQNARCMVXIBAIBAKBQNARCMVXIBAIBAKBQNAR7IjiSwj5YjtXHSGEDBJC/oIQEiWEpAgh/5sQ8kgDz9/u8vsdQsjrhJAQIYQSQn5yg31/mhAyQQjJEUImCSH/xzavKWRa2e8nCCFfJ4TMLe/3tQe8bsfLlRDiJIT8C0LIh8t9QpAQ8iYh5Pu2ec2Ol+nyfv+ZEHKLEBInhCQJIVcJIT9LCJFu87pCrmuPOU0IKS/vv+WCVkKmfL+3l79f/fn5bV5XyPX+vmZCyL8jhMwv6wGeBx23dsri+0cATu3QufcUQogVwLsADgH4ewB+ePmr/00I2d+gy7St/Jb5WQBqAN/aaCdCyE8D+AMAXwfwDIA/B/AfCSF/fxvXFDKt8GMAhgC8ASDegOsKuQInAPxtAN8A8EMAfhJAFsDbhJBnt3FNIdMKagD/HhWZvgDgPIAvA/jSNq8r5FoFIUSOSv/qf4BrCpne5xoqsqj+/H/bvK6QKypKLyr61hkAvwbgLIBfApB4kIuLPL5bhBDyawC+CGCUUnpneZsWwF0A71BK/9Ye3l5LQAiRUErLhJB9AG4D+ClK6ddW7SMDsADgFUrpT1Rt/2MA5wA4KaWFXbztpqYemVbvt/xvD4DzlNKf3NWbbSHqbKsmAElKabFqmwzATQB+Sum2LL/tSr1tdZ1j/zuAZyml+p28x1Zkq3IlhPwqgB9BZcL2qwDk1W1YsKV+9W0AMkrpx3b5FluSLcj19wF8P4DDlNJGGGoA7KKrw7I5+7cIIb+4vMyaJoT8NSHEtvz5n4SQGCHkHiHkl1cd200I+QNCyNTycfcIIX9GCHHVuPbfIZWl8Swh5Doh5NzyMsTbNc75+4QQ77L5fIIQ8jN1PN7jAG4zpRcAKKUpAN8B8Ox2lotqPEM7yw9M8dqEUwC6Afy3Vdv/KwArgC11MEKmW9uvXoRcAUppdLXCsPz/cQBr7nszhEw3JARgW8qZkOuK6wyhYkH7BwC2bUAQMt0ZhFy5QfHvAvijRiq9APDAStoW+XEAN1B52ewA/h2APwGgB/AKgD9EZVnrdwkh1ymlLy8fZ0Fl6fAfAwgC6AHwiwDeI4SMUUqzAEAIOQvgTwF8E8AvoKI4/TsAKgBT7CYIIQZUzOdqVKy3MwA+DeA/EUKUlNJ/v8EzlADka2zPLZ9vCMBknfLYKu0gv3o5uPz3xqrtN5f/HgDwvxtwnU6S6W7S0XIlhChQmbxda+BpO06mhBACQApAB+ApAD8B4F816vzLdJxcAfw+gD+nlH6bEPKpBp6X0YkyPUYIiQHQALgF4MuU0q828PxAZ8n1xPL5/YSQvwDwN1DRv84D+H8opTPbPjOltOEfVARBV22jqAhOVrXtS8vbf61qmwxAAMB/3uD8UgC9y8d+tmr7+6g0ClK17cTyfm9Xbft1VBrB8KrzfgXAUvU91rj2vwKQBmCt2iZBxVxPAZwS8ltffqv237d87p+s8d2vLn+nWrVdtrz914VMtybTGvt6AHxNtNXGynV5/98BUAbwcSHT7csUwLPL+9Blef6OaKsPJldU/PzDAGzVcqn3/EKmNb//TQA/DeATAJ5HJS5lxfMKuW5NrqjET1FUYlH+Byr+vT8CYG75o9+ObCmlu57O7A26cklwYvnva2zD8vfTqPwoHELI3yeVqN4kKktd88tfjS5/LwXwCICv02WpLZ/ve6jMRqp5BsAlADOEEBn7LN+HFRVr4nr8PiqK7p8QQoYIIU4A/y+AweXvd3JppB3k12wIme4MHStXQsiPAPgVAP+cUvqdBp66E2X6HQAnUQlu+V0Av0QI+e0Gnh/oILkSQiyoKEu/SikNPOj5NqBjZLp87d+glH6FUvoOpfQblNIfAPC/APwTQoiuEddYppPkyvTTuwB+mFL6BqX0zwD8LQB9qEzgtsVuuzpEVv0/v8F2FfsPIeRnUVEuvwTgHy7vLwFwsWq/LgByVGY6q1kdtWpDZaaxnm+Tdb0HoJTeJYT8KID/gErjAoDLAF5EJdpwcb1jG0DLy28LsGcyY6VMLct/ww24RvV1GO0s092kI+VKCHkOwNcAfJVS+k8beW50oEwppTEAHy7/901CSB7ArxNC/iOl1Nugy3SSXH8Llf70f5JKUCZw/16NhJAsrcSsPCidJNP1+O8A/iaAwwAuNOicnSTX0PLfN1cp4pcIIXEAx7Z74t1WfLfLD6Py8L/INhBCBlfts4TKj2Crcbwd92c3QEWgAQA/t871NvTRpZR+nRDyvwCMAMhTSu8QQv4TgHuU0vmNjt0jmkp+dcJ8eQ9ipeLLZpIfNeAaD0IryrQVaFm5EkKeQiXl3l+hkuqwWWhZmdbgQ1QG7EEAjVJ8t0sryvUAgCO4r1RUs4RKhoe/2YDrbJdWlOlm0M132XFaUa43N/l+26vrraL4arA25+hPVf+HUloihHwI4AcIIV9kMwRCyAlUOsnqH+1VVPLIzW93uYdSWkLFgR2EkB5U8nj+6+2caxdoOvnVwQVUXsQfRcWZncH8097boevWSyvKtBVoSbkSQk6hojS8CeDH6B5Fg69DS8p0HT6BiiJxd5evW4tWlOvPAzCt2vaTqAQNnsGD5fRtBK0o0/X4UQAZANd3+bq1aDm5Uko9y/dzlhBCqu7nFAADgA+2e+5WUXxfBfDLpJJ38LsAPgXgB2vs908BvA7grwghf4iK6f6LAHxYOTt4ERVF9TuEkBdRmZ1oAYyhEozy/Ho3QipJv/8VgHdQaUgHUYmUvAng327/EXeUppEfABBCPoFKtKhjedMjy35HoJT+xfLfAiHk11EpWOFFRfn9FIDPAfhZSmmtzBq7ScvJdHm/A7hvNVcD6CeEsPt+h1Ia3PzRd5SWkyshZAzAX6MyUfvXAE4QQvg5KKUX63z2naIVZfo3UBmYX0JlwNWjks/zZwD8AaV0YQvPv1O0nFwppeM1jnty+Z/v0L3P49tyMiWEfBwVn/6/BDALwIjKROIcgF9pkOvIg9Jycl3mV1DxG/4LQsgfLR/z26j4Nv9ZXU9eC7rNqLiNPlg/IvG3Vm37yeXt+1ZtfxvAu1X/VwP4T6ik4UigUu1jcPnYL6469kdQ+RFyqCijnwVwBcBfrdrPjMqPN4OKP0wAlUCKn9/k2WTL1/cvX+MOKn5TGiG/zeVXdX+01qfGvn8PlUjWHCqZM/6BkOn2ZYr7Edy1Pk8KuW5drlXPVlebFjKtS6ZjqCgS95bvxY9K+qQfBSDZqkyFXDeXCxqb1aFjZIqKr+srqLje5AAkUcmO8He2006FXNfs+/2oWHezqLhY/AkA+3ZlSylt/8pthBA3KkFov00p/ed7fT+thpBf4xEy3RmEXBuPkOnOIOTaeIRMd4Z2lGtbKb6EEDUqUYvnUVl2fAjAP0LFMfsgpXQnMy60PEJ+jUfIdGcQcm08QqY7g5Br4xEy3Rk6Ra6t4uNbLyVUfEZ+D5V0GqyU8A+1yw+2wwj5NR4h051ByLXxCJnuDEKujUfIdGfoCLm2lcVXIBAIBAKBQCBYj92u3CYQCAQCgUAgEOwJO6r4EkK+SAjZskmZEDJACKGEkC808F4oIeSL2zjOSQj5F4SQDwkhUUJIkBDyJiHk+xp1b9u4p5aX6/Kxby8fv/rz8426vy3cS7vI9CcIIV8nhMwtn+drjbqvbd5Pu8hVQwj5Z4SQKUJIhhByjxDyJ4SQgUbd3xbupV1k2jTv//L9tLxcCSFPriNT9nm8Uff4/7P35+FxXfl9J/w9te87asEOYSUJUiIpSiSl7mZLpFrd0WIr7YyT2GPH6iTzOk5sdyaJneSdtD1OnExed489zps4aTuO40yeSV7L1tJaKKpbnZYosimKG0isJLYq1L7v633/KJyjAlgFFIACUFU4n+eph1Lh1q1bv3vuOb/zW+u8npaX6epn+bxa+5wtNwe0W4zvbnAS5Xp1/xHl9n4yAL8I4ENCyEuCILy1nxfXBtzGw12uFvbhOtqFn0G51uH7AH5qn6+lnfguyh2t/hnK3cN6AfwGym10HxUEIbGP19bK8Oe/sXwG4EyV9/8Q5Xbv2y76f8Dh8+rusedzAFd8N+cjACNCRWFvQsh7KNe3+4co18PjbJ+4sP8F/tuJrwirXcMIIc/v98W0A4QQFYC/AuD/EAThX1e870W5fudTKBdZ52wd/vw3EEEQYigbaBiEkD4AhwD8jlDuOMrZOnxe3T32fA7Y8xhfQsgvEUI+IYSEVkMHrpByl55qyAgh3yaE+AghKULIW9Vci4SQv0UIuUUIyRBCAoSQPySEmBpxvYIgRIR13WxW//8mgK5GfEcjaDW5tgKtKFOhuVrlVqUF5Spefa1v+RlZ/XffcyVaUKYtQZvI9WcBEAD/aRe/o25aUaZ8Xm2vOWA/Jux+lN2GP4VyCMGnAN4i1XdRvw5gGOXWlX8H5bCDi6TcNhgAQAj5lwD+Dcp1514C8A8APA/gHUKIuNZFkM9jXb611R9ACJGh7E6a3Opnd5F+tKZcjxNCooSQPCHkNiHk1To/txf0ozVl2uz0o4XkKghCHMB/BvD3CCFfJoRoCCFHUG5PfAvAB/X86F2mHy0k0wqa+fkHWleulfzPAD4TBGFiG5/dDfrR+jJtRvrRmnLd+zlgJ23fttN2b93fRSiHW1wE8HrF+/0ot6+7h4r2lCi7FAUAr1YcVwTwv607Lz3uJyreW9OaD0AfgML6z9b5u/4Fyn2rv7Cb8mt3uQL4TQB/E8CXALwM4M9Wz/dPuUx3PlYBOAH88X6M0XaTK8oW33+Dte01rwDo4DJt/ee/neS67txnVs/197hM+bzabHLdrzlgz28OyjuLt1DuvV7C54vIVJWb85tVzrkM4Lur//03V48bXL3Jla8YgG/Xujk7+E1/bfW6f2OvBvlBkGvF+f4cQBqAhst0x7+pKSfoVpQrgN8GEAXw9wF8EeVkl1mUrSpqLtOG/bZ9ef7bVa4A/h2AHADLXsuzjWXK59UWnwP2NNSBENKDslvQBODvAjgL4BSAdwEoqnzEW+M9GltrXf13DkB+3UuLcueRhkEIeRHAHwP4Q0EQ/lkjz70TWl2u6/ivKF/z0V38jk1pM5k2Da0o19Wwhl8D8E1BEH5HEIT/IQjCnwL4GsqLTcNKA23z+lpOphvQFM8/0PpyJYTIUU7K/J4gCIFGnnu7tLpMm5U2k+uuzwF7XdXheQB6AH9FEAQnfZOUs6arYavx3s3V/w6u/vscgHCVY4NV3tsWhJBnAfx3lHcj60tv7DctK9cNEPbgOzaiHWXaDLSiXOkEvKYUlCAIs4SQCMoZ8/tJK8p0M/b7+QdaX64vATCiSZLaVml1mTYr7SjXXZsD9lrxpTchT98ghIygHDfirHL81wkh3xI+LyPyFIBuAJ+s/v19lE36vYIgvL9bF00IOQPgdZR3VD8jNF+GZ0vKtQZ/HWU3x509/t71tJNMm4lWlKtn9d8nUK45WXndBgCuXfreemlFmdaiWZ5/oPXl+nMAAgC+twffVS+tLtNmpZ3kuutzwF4rvpdQDnr+E0LI7wBwoFwEfgnVK0xoAfwFIeQPUC4e/dsox9X9CQAIgnCfEPKvAPw+IWQUwA8BZAD0ALiAcrzKD6pdCCnXNryPcqzLb9a6YELIGMoTRwDlLO6ThBD2d6E5alC2oly/gLL7+DWUi1XrUZ6oXwLwa4IgJLcigF2g5WS6euxhAIdX/1cJoI8Q8vXV//+hIAj+en78LtKKcv0RytUbfocQYsTnDSz+Kcpxv/ttUWs5mbbA8w+0oFwrjrcC+AqAfysIQn6z4/eQlpQpn1fbbA7YreBhoXYA9l8BMIWyEO8C+GmU42YXKo7pR9nM/YsAvg3ADyCFsgI6UOV7fhblDOskgATKZcZ+H0B3xTFrArArvmPDoGwAP4+1mdxrXrspvzaX6xDKxf9dALKr578M4K9ymW5PppW/o8brHJfrtuVqBvA7KC8OaZQTQf4fAKNcpq3//LeLXCuO/9XV40/ulzzbSabg82pbzQFk9QI4HA6Hw+FwOJy2Zt87DnE4HA6Hw+FwOHsBV3w5HA6Hw+FwOAcCrvhyOBwOh8PhcA4EXPHlcDgcDofD4RwIuOLL4XA4HA6HwzkQcMWXw+FwOBwOh3Mg4Iovh8PhcDgcDudA0JDObYQQXgx4AwRBIJsftRYu043ZjkwBLtfN4GO18fCxujvwsdp4uEwbD3/+d4ftyhXgFl8Oh8PhcDgczgGBK74cDofD4XA4nAMBV3w5HA6Hw+FwOAcCrvhyOBwOh8PhcA4EXPHlcDgcDofD4RwIuOLL4XA4HA6HwzkQNKSc2X5CCIFIJIJMJoNKpQIhW6twUSgUUCwWkc1mkcvldukqW4udypRSKpVQKpWQz+eRSqUgCLw6C4fD4XA4nP2j5RVfuVwOpVIJh8OBY8eOQSKp/ycVi0VEIhGkUiksLCxgcXFxF6+0ddiJTCvJZDJIp9Nwu924ffs2CoVCg6+Uw+FwOBwOp35aTvEViUTsX0IIFAoFVCoVjEYjuru7IZVK6z6XIAhQKBRIJBIIBoOQSqUolUooFou7dflNA7XqAp/LlLITmVaSSqWQTCaRzWYhl8sBlDcb3PL7MGKxGISQqtb1UqkEQRDYi8PhtCb0GReLxTs6D50LDsJaxdl91utVzQId53QNbBQtpfhKJBLo9XrIZDKYzWaoVCrodDpotVqMj4/jhRdegEKhqPt8hUIBy8vLCIfDMBgMAIBoNAqXy4VSqbRLv6I5UCqVMJlMkMlkMBgMa6y6O5EpRRAExGIxRKNR3L17F6lUCpFIBIuLi8hkMg0fyK2MWCxGZ2cn9Ho9JBIJZDIZ+1sul0MsFkMul0MwGEQ6nd7HK+VwONtFKpVCKpVCp9Ohq6tr28pvLpdDoVA4MGsVZ3epplc1A6VSCalUCrlcDqFQCKFQqGHnbinFVyQSQaVSQaVSwWazQafTwWAwwGAwYGhoCMeOHduSklYsFqHVahEMBrGysoIHDx4AANxud9tPJlKpFHq9HkqlEjabbY2ytROZVhKJRBCJRJDL5dDV1QWpVAq3241sNtuon9EWEEKg1+vR0dEBhUKxRt65XA4SiQSpVAqxWIwrvhxOiyIWiyGVSqHRaNDZ2bmjELJMJgPgYKxVnN2lml7VDJRKJUSjUaTT6Yavey2h+EokEigUCuj1eoyPj8NiseDEiRPo7OxkioLdbt/yRCISiWA2m6FUKnH8+HFIJBLcvn0bc3NzbRuPSuXV29uLJ554AkajEYcOHYJarX7omO3IdP13GQwGjIyM4KWXXsLCwgJcLhcymQx301UglUpx/PhxjI+Ps00HJRaL4ebNmwgGg0gkEojFYvt3oU2EVCqFSCSCUqnc9sZMEASW3FqpTHDKc6NIJIJcLodGo9lRgmuxWEQul0MikTgwXh5CCORyOcRiMZRKJSQSCfOkjY2N4bnnntu2Jy0ejyMej2NychKxWAyJRALxeByFQuHAyLcahBAm541Cx6pxEMfpRnpVM1AoFLCwsIBQKISPP/4YLperYeduCcVXKpVCq9XCYrHg6NGj6O7uxle+8hUMDw8DwLYnZUIITCYTTCYTxGIxHA4HAOD1119v2LU3G1QZ7evrw5NPPgm73Y6nn34aer1+zXGNiPOhCrRarUZ3dzcmJyfx5ptvwufzMYWDUx7fjz76KJ599lnY7XbYbDb2t0AgALlcjsXFRUxMTOzjVTYPhBDmNtbr9Ws2CltBEASkUikUCgVEIhGu+FYgEokgkUigVqvR0dGxbbd8oVBAPp9HPB5HMpk8EAoFALYpk0qlMBqNUCqVMJvNMJvNePzxx/HKK6+sMTZshWAwiGAwCIPBgNu3byMYDCKTyRzo/Amq5FJ5KxSKLcWrHsRxupletd/k83ncuHEDLpcLy8vLuHr1asPuS0sovlqtFsPDw7Db7RgbG4PD4YBOp9t0UNNyWkB5kaMJXdV2gtTCQd1REomkLScSKsPR0VEMDAywON+tKrqVAeebySifz6NQKKBQKHC3XA3omFw/Nun/r09APKhQq47JZIJGo8Ho6ChGRka2tVErFovw+XyIx+OYmppCJBJp/AW3GFSOKpUKGo0Gvb29OH369JpQqK2QTCZZ1ZxoNIpcLteW8+r6tUWpVKKnpwdarRYjIyPo6OiAVqtla9l25lyKXC6HTqdDX18fvvSlL8HlcuH73//+gZpjqezoui0SiSCVSmEymXD27FkYjUbmFaqHgzJOK9muXrVXVFsPG0VLKL5WqxVf/vKX0dPTgwsXLqCjo6OuSgM0CYAqaVSppdaMagqGRCKBUqlELpdju+h2gRCC8fFx/ORP/iR6enpw/PhxKBSKbVlz6A6Zbi42miDy+TzS6TSy2Sw7rp0nFM7uQJ9PuVyO/v5+dHZ24sUXX8RLL720rckxn8/j1q1bcLlceO211zA9PX3gxyVVIoxGIxwOB86dO4df/uVf3rZ10ufzwe/343/8j/+BqakpxGKxtptXgbLbWCqVsooNJpMJJ06cQFdXF1566SUcOnSIKcdisXjbGwkA0Gg0UKvVTKmenp7G1NQUU94OguJLNxnU8yOVSqFUKjE4OIhXX30VQ0NDUCgUdVckOijjtJLt6lXtQEsovnSxo/VlaWmsjSgWi/B6vYhGo8jn88jn81CpVLBYLFUrGdB4F5PJhOHhYQSDQSwsLCCdTrdVBQKq2NMwhO3G8MZiMQQCAeRyuU2bU+TzeWSzWczPzyOdTrf9TpqzO4jFYuj1ehY609fXx6wU2yGfz0OtVkOtVh+YCX8jqKVSJpPB4XBgeHgYXV1d0Ol02870pptjh8OBkZGRtp1XdTodzGYzZDIZ1Go1jEYj+vv7YbPZYDabodVqG/Zd1Egjl8uh1WqhVquZ8aJZrHW7SeU4NZlMsFgsbP0eGBiAxWKBTqeDUqms26hzUMZpJdvRq9qFllB8t0M6ncbrr7+Oa9euIRgMIhQKYXh4GOfPn4fNZsOZM2fWxLVqtVooFAqcO3cOvb29mJubw3e+8x0sLCwgm822bbLbdhAEAdevX8cHH3wAn8+HmZmZDeVDJ450Or2mnBmHsxU0Gg1OnToFu92Ol19+GYcPH4bZbN7vy2obxGIxent7YbVa8bWvfQ3PP/88DAbDtpMHgYMxrxJCcOLECZw/fx5WqxUjIyMsaUgul/Mx2mAqx+mzzz6LCxcuME+FUqlEb28vlErllsLDDsI45XxOUyu+1IWhUCggl8vriosqFotIp9OIxWJYXl7G3Nwc/H4/AoEApFIpgsEglErlQy4MiUQCsVgMo9GIwcFB5PP5bYcBtDo0brdQKFRN+CmVSvB6vZifn4fb7cb09DTy+fym56XZ883sPqLudBoztn7ypGEzjfwdgiAgk8kgHo9Dr9ejUCgwlyl158nlclZyhnowDgr0XqjValitVnR2dqKrq4uVyKuEPv+Vsed0HqnWOEAsFjMviEajQS6XOzDl9irHOK0f3dHRAZvNhq6uLvT3928pTrIa7TCvVobBVdsEiEQi2O129Pf3w+FwYHR0FDKZbM08wtk5laFOJpMJNpsN/f39GBsbWxPzuxVLL2X9OC2VSrBYLKwkZ735LAeBTCbDQkh3Ux60wkYymWz4etfUiu+RI0fw6KOPYmhoCGfOnIHBYKjpciuVSsjlcvB6vXj99dfhdDrx0UcfYXFxEalUCqlUit2wamVfKrNCqdmfPkwHwX1UCS2PMzMzg4sXLyKXyz10zMzMDGZnZxGPx5FIJDa14FJ5N7ulV6VSYXR0FEajEceOHUNXVxf7Gy134/V68cYbb8Dn8yGXy+34N2WzWVy8eBEzMzM4e/Yszp49C71ej66uLqhUKhw9ehQ9PT1Ip9M4dOgQbt68iVu3bu30p7YMdrsdjz76KDo7O/Hcc8/Bbreju7sbcrmcKWXrn3+/38/uzaOPPorHHnuMyZR+RiwWM1f+V77yFZhMJszMzODSpUsHQvmlcjUYDBgbG4NWq0VHRwc0Gg1GRka2bDWrRjvMqxqNhsXTXrhwoapLeGRkBMPDw9BoNNBoNGvKafHE1MZA52az2YwLFy5geHgYIyMjTCeoVH63Cr1XtOrR0NAQvvGNb8DtduOdd97B9PQ0KyN3kMlkMnj//fcxNzfHSuntlvJbKpVY8nGj8y+aVvElhMBut+PYsWPo7+9HX18fKw9TjWKxiHw+j0gkgmvXrmFubg6Li4sIh8NM2aUltDZSVKj1Y6eWjlaG7rQWFhbwwx/+EMlk8qFjKptTtJOSIJVK4XA4YLfbcebMGYyMjLC/5fN5ZDIZPHjwAB9++CFCoVBDsqgLhQJmZ2fh9XphsVgwNDQEAHA4HCzeUqfT4dChQ1CpVHC73Tv6vlaCEAKdToeRkRF0dXVhbGwMVqsVBoNhjVVn/fM/Pz/PrPKVpQodDgd7rum5lUolRkZGIBaLUSqV8IMf/GBffuteUilXm83GMuHpHLs+B2I9tRahWspsK8+rtIrCwMAAzp07VzXRj9bflslkG8ZKbmXxbpWNwV4hk8nQ2dkJh8OBI0eO4PDhwzAYDA21qIvFYqhUKohEIjz55JMIBAKYmJiAy+Vqq3VuOwiCgHw+j5mZGfz4xz9mZfV2S/FdX26ykTSt4guU68AajUaW0FJp4aHQwvNOpxM3b96E0+nEzMwMPB4PEonEgSnv0kgqC3l7vV4kEomHjqFyb+awhe0gEolY7WGLxQK73c7+RqtTRCKRh6qC7ARalD6XyyEej6+JKaOuO6qo0JhLQkjbu936+vrQ09OD4eFhPP7447BYLLBYLNBoNEwpq/X8V1rjb9++DZFIhIGBAQDleD66qaCKGE1O0mq1ba9wrJeryWRCT08PS/Kjz0A18vk8JiYm4PV6H2r6IZfLYbfboVar0dvbu+36ys0ArfGuVCoxPj6OI0eOYGxsDF1dXVVlQ5OF17vYBUFAKBRCOp3G0tISlpaW6vp+g8GA3t5eqNVqNlYPKrTufEdHB86ePQu73c7G107iz6tBn32pVAqz2cy6mrXihq0R0JDHWCyG69evw+fz4fr168ziu9sWcJp02Oga602v+BoMBpZVXG1nl8lkEIlEMDMzw9zP1AXPqwdsD9rFJpVKMVfDQYEqmmq1GmazeU0ziXw+j1QqhUAgwOLNG6EkUcWXhphkMhkW00Tdb7SiQTweb/hk34wQQtDT04OnnnoKg4ODOHnyJDQaDavKQqn1/NNNryAIuHPnDkKhEA4fPgyj0Qir1crqV9P4X61WeyAU31pyXd+2vBa0BNzt27eZ14ei0+lw/PhxmM3mHTUWaQZoSTeLxYKTJ0/iy1/+Mot93kolnFKphFAohGAwiE8++QQff/xxXZ/r7+/H008/DYvFwsbqQcVgMODo0aPo6urC6dOnYbPZ0N3dvWutdWloDm1sRTvvHUTFt1QqIZ/PIxAI4IMPPsDCwgJu3boFj8eDTCbTslbwplZ8K6m1GFEXZyKRgNvtZiW2uML7OTT5QqfTobu7G3q9Hkqlsq7Pcjk+zG4qRhu5kFUqFQwGA7q6ujA6OopYLAaPx9O2Hg2VSgWz2Qyj0QiNRsNckLRCSD6fx9LSEhYXFzEzM4Pl5WUWflMpk1QqhWAwiHA4jHg8Do1GU1Vm7azwVlJLrpVUdrLyer3Ms5NOpzEzM4OFhQXEYjFEo1E2LmmZKRrj2srQcmFKpRIGgwE2mw0Gg6Gm8hOJRBAOh5lXiD7HxWIRS0tLiEQiuH//PpaXlzf8XpPJBIPBALlczpK6149Lem/C4TAWFhYwNzfH6ve203xNw0cGBwcxPDwMm83GSpVtFt5QKpXg8XgQj8dZA6X10PFKu71V26iJxWJ0d3djdHQUYrGYGSVSqVSjfuaeQHUArVbLGnSNjo7CZrPBZDI9tJmjYywSiWBpaQkulwvz8/NwuVzMO9nK3t6WUXxrUSgUkE6n4ff7cffuXW7prQJtiTs8PIxHH30U3d3d0Gq1B3IHuxWaSRESiUQwm82QSqU4ceIEisUiZmZm2EavHTEajRgYGEB3dzesViuzsheLRYRCIRbPe+XKFczPz+P27dtVOy6Fw2FEo1Ho9Xr4/X7I5fIDXZ6ollwroVVGZmdn8YMf/IBZdnK5HD7++GMsLS2xhgkOhwPHjh1jVjKLxdLyXgmqFNGESKr41JozFxcXcefOHcRiMbjdbqYU0Ao4iUQCU1NTmJ6e3vA7aUKtTqeD0WiETqd7SCmh94YmH7tcLoRCIeYWbhf6+vpw9OhRDA0N4dy5czAYDBgYGKi6UVtPPp/HzZs3MTc3h0QiUTVcj8b+63Q6luS5Hrp26nQ6qNVqZDIZRKPRlivJWakD0OYnNpsNhw4dglarfcijQMfY9PQ0Ll68iJWVFVy9ehXBYLAtyrs1teKbzWYRi8UQiUTg9/ur7vJojd5YLMY6tXHWIhKJYDKZYLfbWdzaTlpmHlRohrZEIoHBYIDJZEIwGKyre10jkMlkzLpmt9vh8/nabvNCLdu0OL9KpWKhHpXjNZvNIpPJIBQKwe12IxgM1nz+BUFAsVhkr1ZasHYDWtaNvirlSnMiIpEIgsEgPB4PlpeXWYwdtQLRToz5fJ7dM1r9oDIGm5b/S6fTbLOyF6WQGgHtDkZL3q2fLwVBYKWW/H4/XC4XotEoXC7XGsU3HA4jlUox6+N6qPxoYyUaiqNWq6tW1qBezng8DpfLBa/Xi2w22xYW30pZdHR0oKurC3a7nYU8ymSymnMefc6pout2u+F0OlkYGZWNSCRiYU46nQ4KhaKm3kBDXtLpNEwmE1QqFTKZTMutnZU6gF6vh06ng8lkYs/reg9NtTGWTCZb3tJLaWrF1+1248aNG1hcXITL5Xpo5ysIAqLRKGKxGGZmZtrihuwG1K1x9uxZ9PT0wGazsUmdUz+0jqTBYMCZM2fQ2dmJTz/9FC6XC+l0uuEB+JWIRCLo9XpotVq2S89msy3vUl6PRCJhfeNpBQe9Xr9mrJZKJUSjUXg8HkxOTuLjjz/mm94GQOWaSqVw584dTExMYGpqCu+99x6z+NJM60rrIi311dPTg0OHDsFisbDKBtlsFul0GsvLy7h+/ToWFhbWKL+tTKFQwPT0NNxuNy5fvowrV64gHA5jZWVlzVpENxO14iErx/zZs2dx+vRpWCwWDAwMMAWtEmoQWlhYwIcffohoNIpkMtkWns6tyoJCk7Ci0SiuXbsGn8+H9957D9PT0w8lYcnlcnR2dkKv16Ojo2PDBiP0egYGBuD1ejE1NcXqrLcSlTqA1WqF1WpltbvFYvGBGmNAEyu+dIKlrtxqxecBsN0dLTJdjcoe6fRV6ztpMHc7uY0IIVCr1TAYDFCr1TUTJSobV+TzeTZ5VzZT2Al0R97sDw69Rlr+jm4S6DiSy+Ww2WzI5XLQaDSQSqUNCzcolUrse+l3UwsH3fjRe6lSqVpuAt4MkUjELNpGo5EltdLfSRMvM5kM0uk04vE4otHohmNqo+e/1pg/qGSzWRYT7XQ61+RNrIfOC9Q6T9vnVtZap3KNxWLMckRjsJt9HtgMahn3eDzweDzwer2IRqMIBAJbWjsqx7zNZoPNZmMJ3ZXWTTpWc7kcMpkMEokEQqFQVTd+q1KvLCqhz282m0UymYTH42FjzefzsTr+wOcWZfqsU+WvVsIijfWmjTMkEsmWkhv3A7pWVc5z9Bk1Go0wmUybdhOk8yz11LTTGAOaWPEFgKWlJcRiMdYxp9rAp/V54/F4TYsPvdm0zaHRaHxo8FJlNxgM4sGDB5ibm0M6nW4JRa0RVLrtXC4XZmdn4Xa7mdJM20BupyA7DQXIZDJrWhY3o1xpzUCFQoHl5WVotVqYTCaYTCbWhclsNuP8+fMIBoMsvq5YLFatd7xVIpEIFhYWAIA1sNDr9U0/2TYKqVSKxx57DE8++SSGhobQ1dXF3JvUopNMJrG8vIzFxUVEIpFNx1Gt57/amF9ZWWmbDe9WoRUIvF4vbty4gYsXLyIWi+1oXs1mswiHw5idncWbb76JUCjUNvGoNI706tWrmJ2dhcvl2lZDm43GPOWgjNV6ZFEJXbdDoRAWFhbgdDrxxhtvwOl0wul0IhqNQqfTsVh2lUoFrVaL8fFxmM1mfOELX8DAwACMRmPV8xeLRaysrCASibBQllQq1ZRrF/B5l0qdToeuri6m/NKulxv1QjhINPVqur5cznZRqVSwWCwwGAw1s5ippTeRSMDj8cDv9yObzbZELFqjoBnJ0WgUfr8fsVgMQDm21Gq1QqvVbsvyS3fk8Xgcbre7qUuglEolpFIpJBIJhMNhBINBKBQKmEwmAGB1HWmWsdlsZuVuGgG1thkMBrbL1mq1DTl3KyASidDZ2YmRkREWj0apvDeRSITVR92MjZ7/yjHv8/kQjUbbTpmoF+plo3GqMzMzG8599cyrtOmL3+/HvXv32spyVCqVmJw8Hg+i0ei2ziMWi2uOeQptHtDuY7UeWVRC120ai7qwsICJiQmsrKwgm82iWCzCbDazhEuDwQC9Xo/BwUGYzWY88sgj6Ovrq3l+QRAQiUSYvNPpdFMnE1PjjEajQWdnJ1uXaAz+duoRt5tXEWhyxXcnEEKg0Wggl8sxPj6OkydPsr7eWq32oe46dEKZnp7G66+/Dp/P1zaWiXoolUqIx+OIRCJ48OABbt26BZ/Px0rIfO1rX2O75q3GlRYKBeRyOTidTrhcLmQyGRb20GxkMhksLCwgGAzCYrEgEAjg7Nmz6O7u3vXvFgQBS0tLrOi9xWJBR0cHjEbjga7jSclms1hYWIDf78fVq1cxMzNTsyFAPc9/qVRiybGfffYZPvzwQ7jd7gMXKywIAksWjEajiEQiNePVtzqvtjI0pCAWiyEYDEIul0Oj0TREEaD5ArR6g0qlqlo1gt6bXC6H+fl5BAIBFk/cLmO1XllUQkMV/H4/fD4f7t27h7fffht+v5911KSKHh2ntBSkSqWC3W5nicIbQa36d+7cwZ07d+B0OpFOp5tWJ6CVSA4fPoxXXnmFlS2VSqUYGxuru+kHDQ1TqVSw2WxVuxXSvBa6+Wgl2lrx1Wq10Ol0OHLkCCtAPjY2VtU6Rwvhz87O4q233jpQTRuA8gQbi8UQCARYkWqJRAK73Y6BgQE8//zz6O/vh0Kh2LJ1k9Y9nJycxJtvvgmfz8fiWJuNXC6HxcVF5hbzeDzo6enB6dOn92TnS7s7JZNJPPLII0gmkzh06BA0Gs2uf3ezk81msbi4iKWlJVy/fh23b9+ueWw9zz91kbpcLty8eRPvv/9+0y5ouwmtupBMJlkVHbo5Xc9W59VWhXqpaFmnYDDISlrtdB6gMZi0fqzFYmGVDKp5Ium9mZ+fx+LiIq5cuYKLFy+2hSdyK7KgUOt3Pp+Hz+fD/fv38dlnn+HNN99k4WYikQhqtfqhcTo6Orqlrpu0YcsHH3zAYrmbGWrVHh0dxSuvvLJm3djKuBWLxZDJZNBoNLBarVUVX+qRz+VyXPHdbyoTLg4dOoTe3l6MjY3VLEC+PrmlHbKNtwMhhLmVxsfHIQgCy/bs6OhgPdG3E+pA44KpG0YqlSKbzTZ1212a6Ej/uxYSiYTJhTZX2MlvqkzEov9yHqZSQa2WzCGTyep6/ikHKaRpPblcDl6vF+FwGNPT05ibm0MoFIJMJntIJvXKtdWTBmnFIACYnJxkbXMrrdr5fH7L46ZyfTKbzTh79iwcDsdD56bnTafTcDqdCIfDmJiYwP379+F2u9tmrFZWyjl9+jS6urrQ29u7ofeAGmkSiQTm5uZw/fp1zM/PMwUaqD1O612/qEeSjt1mzUmpBZ0Tt7tJk8lk0Gq1GBgYwLPPPvtQeKIgCJiensbMzMyaJjetIqO2U3xp1r3BYMBXv/pVPP300+js7ITD4ViTIU+hiVd0d99qO5dGIRaL0dXVBYfDgaGhIbz88ssAPlfEaiUXbgVaP1GpVK6Rdas8LOshhEAmk0Eul7MQEJoNu12oIk276zSimkY7Qyd3mgBLZaXVaut6/jlll+W9e/fgcrlw6dIlTE5OIp/PV+3uWK9cW31eLRaLcLlccLvdiMViuH37Nr70pS9hbGyMWb9o98B6lSI6Vun6NDg4iFdffRXDw8PQ6/Vrxi+1akYiEVy/fh1OpxNvvvkm7t2713Ky3AixWAyFQoHOzk78wi/8AkZHR5k7fqNurdT6+uGHH+KNN95AoVBgrnlg588/TcZOp9OsHF2rrlPbQaPRQK1Ww2w24+TJkw/99lKphDfffBNvvvkmnE4ns/y2iuGwbRRfqqAplUr09/ejo6MDDoeDFQKv5oajO+pcLgePx4P5+Xn4fL4D6e4EwCYGqkQ0isp788gjjwAA5ubmEAwGmcuqFSGEwGaz4ZFHHkGhUEAsFmPloLb78Ot0OpjNZjgcDpjNZuj1em713QC6UTAYDOju7mYLG03u2Oj555ShCgPNBN8oYbAeubbLvEqV90QiwSq4TE1NMeWKlhPLZDJ1xdrSsWo2mzE4OIihoSFYLBZm3axUyjKZDAKBAHw+H6tQQBuHtBt046pWq+tKwKIGB6VSCYvFgt7e3oeO2e7zX9lwheYTBIPBuu9xu0A3aTKZrGp+iSAIsNlsGBgYACEETqcTqVSK1ehudtpmNaDuo/7+fnzzm9/EI488gsHBQZhMpprlO2gfdZ/Ph7fffhvvvPMOa0fIaRxisZhtSH7lV34FwWAQ/+E//AdcvXqVlTdqRZRKJV5++WWcO3cOb7/9Nt599114PB7Mzs5uy+pLCMGJEydw/vx59Pf348yZM1CpVFXjqzhlaELM6dOn8eqrrzKlhHowdDodL9+zCWq1GqdOnUI2m8WJEyc2VK7qkWu7zavxeByZTAbf//73MT09zZQyahWOxWJ1bd7pWD179iy+8Y1vwGw2szKR6xU9p9OJS5cuwel04r333oPP50MwGNyV39dqiMVi9Pb2wuFwoKurCy+++GLVY7bz/NMa4QsLC/j2t7+NBw8e4P79+yzRnVOGrlX9/f2YmJiAUqmEx+PBJ598su3qJntJyyu+691HHR0dTOndzGVCLRPxeBw+nw+Li4sHpopDo6AZndTCSeOsKidyGtdGlV+DwQCj0bitRLm9hP42WiyehjVQxGIxq1/a1dUFq9WKQqEAv9+/rUlSJBLBbrejv78fnZ2dMBqNkMvlD1l8qWVeLpdDp9NBEARkMpmWi6Osl8r7QDOJ6XijrlKz2YyhoSHodDr2OTq+qrWapQXvaeLQQV7UxGIxdDodSqUS+3cjasmV0m7zKk3ELRQKD9XqppbAjTw8NF5fo9HAYrGwcDKtVgulUlnVo5PNZlnFAq/Xi0AgcKDHaCWEECiVSigUCigUClit1qrHbTZOq7He4nv//n2Ew+GayZ4HGZ1OB6VSiXA4DIfDAaBc27uyUVWzenSbV+uoE9of/tChQ3j++edht9vXKL2bxfTQmExauqZdlYfdwuPx4ObNm2xwy2QyPPbYY+xBqITGCtO6t83cNpkWLo/FYnjvvfcQDAYxMjKCCxcurFF+adefxx57DBqNBolEAn6/f9uL/MjICIaHh6HRaKBQKKrG+NJyPE8++SR++Zd/GcvLy3jjjTfg8/m2VUC/2aFjzO124+LFi6xLFvB5GA11ldK41MqEyvUkk0nMzMwgGAzi4sWLmJmZwfT09IFd2KirmSa01tMF7yDNq5WJeqlUak0cLg2HqAWtS6vX6/HUU0/hzJkz6Ovrg16v37ByQWUH0XbrJNoIqNzo2F1PveO0FrTFdGWML2ctdA3v6enBV77yFcTjcZw8eRLJZBIrKyuIRqO4efMmbt26td+X+hAtq/jSyUehUECn06G3txdf+MIXWJetepReOnHR9rCtEpi9H9SSSywWw+zsLKvSoFAoMDQ0VPVYOknRFrTNqvQCn2cOp9NpzMzMoFgsQiwW45lnnllzHJ1cu7u7odFoWJH57Y4jg8EAg8GwYRtNasmgY352dhYffvghq1/ZbpN0PB7H/fv34XQ6MTs7C7/fzxSQyrh0mhS4GYVCASsrK/B4PLh79y4mJycb0iinlaDKXKWnZjtdGTc7fzvNq5spudWg1XI6OjowOjqK06dPQ6fTbbg+0XWpsiJGuz3TldCxsRX50rVjfQvyRlwL1Qmo/FtR9vR37PS522iNpp5cg8GAkZER5PN59Pf3I5PJYGpqCj6fD263e0ffv1u0lOIrl8vhcDigUqnQ29sLo9HIesSPjY2hs7MTarWaKVa1yGazLFv32rVrePDgAWscwPkcauWIxWK4fv06YrHYQyVLnE4nbt68CZFIBIfDAYPB0BLB7ZtB3TSlUgnRaBTBYBDxeLzmJEjrJ+60mDd13200mVdaODs6OhAMBrfs0msl1Go1BgYGoNfrWXdF6mLW6/UwGAw4duxY3WEzNGM7mUyypi2tGn+6HZaWlvDRRx9haGgISqUSOp0ODoeDN0nZBSorjmi1WpjN5g3D72id6snJSUxPT7MOou0KnS8DgQAuXbqE2dlZHD9+HN3d3Wwu3AsymQwymQycTic+++wz1sSo1RTfdDqNSCSCqakp/Nmf/VnVyiz1QGVvt9sxPj6+4dwqlUqh1WpRLBahVCpZmE4ul2Njvdl0q5ZTfPv6+mCxWPD000+jr68PCoUCcrkcVqsVDoejrsWPFsL3+Xz49NNPMTU1BafTuQe/oLWonJQ++OADLC8vP1STk/YwV6lUrPxWM8b0bAfqYqRZ3Rs1NdnLSZpaOVUqFTo6OuD3+yGTyZreir5dtFotHnnkEdjtdphMpjXjq/L5347iSxs2HBRod8BisYhIJAKr1Qqr1QqTycQV311CJpMxz6TZbN4wNnppaQmXL1/G/fv3MTU1hWQy2dQtcncKXWNCoRAuXboEm80GnU4HjUZTd5exRkAbWM3MzOCtt95q2c6t9HdMTk4CwLZzaKjn8bHHHtu0OY1EIlnTKCOXy8FsNiOdTu/Z/dsq+6740sVaoVCgu7ub1UOtNjloNBocOnQIRqMR/f396OrqYvVONypOvx6aGKRUKqHRaKDT6dqq1eZOoS6eSCSCpaUluFwuzM/PsxaZlRZdmsTSjLu6RkE7z3m9Xty+fXtN5QC73c5K8Gx3kimVSvB4PIjH46yJCkUikUCr1UImk7GEwPW0o7JbiVQqZe5hsVi8Rj5bef6pNYSWiHK73UilUrt9+U1HKpVCMBjEysoKi3WmyVeb0agx3+7QhEHqnezp6YHBYKj6rNLnn4aN0bCeRCLR1O1xGwVty+z1elEoFDA9PQ2xWIy+vj6WLE3n3EZQLBYRi8WQy+VYibhoNIpYLIaZmRksLy+zurSttqaVSiXkcjkkEgm4XK5thy5Rg4BarcbNmzfZulPP80+NMgaDAf39/Xj00UdZ2FMmk8HKygprdbxf7PusRQe22WzGs88+i46ODtYJaz1KpRIDAwMstKGjo4NNJFsJZK/MYrbb7UgkEvD5fA39Xa0MbdM5PT2NixcvYmVlBVevXkUwGHwohnR9nDTQfooY3UVPT0/jtddeY5YxhUKBZ555BoODg9BqtdtuK0z7wc/NzSGRSCCRSLC/qVQqjIyMMHe+3W5vyG9qJVQqFRQKxUNxqQC29PxHIhHcvn0bbrcbN2/ehMfjaYnSO40mHA4jGo0ilUohl8tBr9djamqqLrdoo8Z8u0O9k2azGY8//jiGh4fR09NT9djK5//TTz9lYWUej6elumFtB6oQpVIpzM3NYXl5GSqVCrOzszhz5gxbqxvRQIlCPb50PvB4PEgmk0ilUpifn8ft27eRy+VaUvbUS5nNZhEIBLa9FqvVaqhUKni9XqTT6S2teSKRCGazGTKZDGfOnIHBYGBd8Hw+H9566y1W+Wi/lN99U3wrmxro9XrYbDZ0d3fDarXWzHZVKBTo6OiASqWCSqXadn1O+r35fB5msxnxeBxWqxUdHR1rMniTyWRLDv71CIKAZDKJcDhcc1NRSSwWQzweh8fjgdPphM/nYy639YkqtJwUrTer0WjaruECfWhjsRicTifb5dLahVqtlsWIbQe6C3Y6nYjH40gkEkzGNC4QQNuEkGxEtbEqkUhYgX8a0rEdcrkcotEowuEwIpEIotHogZDpeqiykU6nEQ6HkcvlmIzXU9kOWqPRsKSraq16BUFgVRwSiQRr6HIQEYvFUKvV0Gq1MJlMsFgsNa2WpVIJoVAIHo8HgUAA8Xgc6XS6LdaeeqFrLiEEwWAQMpmMdWfLZDI1163K5Fa1Wr3h3FAsFpHNZhGLxeD1ehEKheB0OpkFknpC6DrXymwnEbOSTCYDQggikUjNNU8QBKarrbf8VjYY6enpYRVeaOnZ/a7otG+KLw016OrqwsmTJ9HT04Pnn3+eKb61BjmtEbuT0ASZTIbOzk5mMaa9vO12O7NoRqNRXLt2DbFYrOWzkguFAiYnJ9nuWa/XbxhnFo/HEY/HMTU1hUuXLiGVStXcBOh0OvT19cFut7MEQ61Wuxc/a8/IZrOsWoPH41lTUUQQBDx48IDtfrfzMKfTabz33nuYnZ1lmw4KjQns7OzE+Ph4w35Ts1JtrNLQJrVajc7Ozm0/+/F4nLkyJycnEQgEDqxiBoC5dkUiESYnJ6uOXZFIxDK3H3/8ccjl8poLai6XYyUAJyYm8ODBA3g8npaeO7eLVCpFZ2cnuru7MT4+jvHx8ZrjtlAoYGZmBpcvX8by8jK8Xu+Ba5FLKRQKmJqawoMHD5DJZBCNRqHX69HV1VVVJ6C5FZ2dnZsmuMZiMSwsLMDn8+H73/8+PB4PPvvsM7jdbqYorg81O6jUs+YdPXoU4+PjUKlUMJlMa7xver0eWq0WRqMRhw4dYueanp7Gm2++CZ/Pt69Gh31TfMViMaRSKfR6Pbq7u1kDABrq0Ci3RjXorkMqlcJkMkEikaCzsxOhUIhZ92jSDN2ZtPIkVCqVEIlE4PF4mCVmI6i73ePxsB1wLWQyGWtIYTQaodfra1riqaWJNrxoFZnS66YxShS5XI6VlRWoVCpoNJptu30zmQw8Hg98Pt9DoQ6EEMTjcaRSqZauhVov1cZqNptl7jKz2fzQ3FBpldwIWoc1mUwy1+ZBhrqYN4IaG6jlnZbSq2axKZVKLGEwFAohEAg81PDhoECNMwqFgtWaXw+dV3K5HOLxOMLhcNsns22GIAhIpVJIpVLw+/1wuVyIx+OsnOR6NBoNc8tvZuGkHh9qXacWdt4R72E2W/OoESKVSrEa4JXzAd2AyGQyVuYzlUrBYDCwCkQH0uKrUChgNBoxMjKCl156idXf3W2ltxJCCEwmE7PWHT58mO1M5ufn15TwauUEg3w+j1u3buH+/fuQSCSbJqTQBLZ4PL7p7pfGnnZ1deH48ePo6OiAwWBYc0xlN5ylpSUEAgFWi7WVXc1blWstaHJLIpHYUN6tslHYCdVk2tvbC5fLBZPJhFAotKaFM63dS61C9dTu5tQPVXxpDsbQ0BD6+vrYAlZJNpvFwsIC3G43fvzjH2NychLhcHifrrx5qBYS0o7zYaNZWlpCLBZjG65qG14aRnL27FmcPn16w8okNKaXliwLBAIHqqJLI6Dz84MHD5hXI5/Po6OjY8/0tkawb4qvVCplMbuHDh3aF/c4IYTFXen1evT29rKdiUQigUql2vdYlEZQKpWwsrKyK+dWKpWw2+3o7OxEZ2cni0ddD43/oy04qZLXyhuK3ZRrJfUoa/u9g24U1WQai8VgMBhgsViYC41CPTMA4HA4Npx8Kwvlc+qDWtJpkuXY2BiUSmVVr06hUEA4HIbf78fy8jIWFhb2/oJbBDof+v3+tpkPG00kEtlUMbXb7bDb7RgYGNhUdul0Gm63GysrK3C73dzSuw3o/EwIgcvlQjQabcnQxn1TfGmSRDKZZBl+Wq12y4X4aWwOrcu5kZIglUohl8tZIkc7KAp7CSEEGo2GxWCLRCJYLBZoNBoolcpN228Gg0FcunQJCwsLmJ2dPXCNA7YDHdupVIqVmKFxbTRBlIbs0Jq+yWRyx8kNzQS11CiVSiwsLLBnWCwWszq0Y2NjGB4ermp1p4mHsVgMyWRy30vptAI0aUitVsNiscBsNtfsuFgoFJh8aXImj5OsDV37YrEYPvvsMywvL8PlciGRSGw5zIE2yNDpdKwMaK2SadlslrmuW/3+0PAbWhu5FZUvzv6xb4ovjfWksTzFYpHV6dyq4ktj93w+34YLmlKphMFgYO7Rdqs+sNsQQlhYCK3hZzaboVarN1V8c7kcU3ynpqa4W69OKjd11AJSWdi9shwgjRHMZrNtZT2qtPzQhZ1aHYeGhjA4OAiZTFYzBpqWo4vFYkilUlzxrQOaqa1SqR5SfNeTz+dZQixXfDensiPmjRs3cP/+faysrCCZTG5pTqTPglQqZTW+a3koae1amrzVyveH/mbaMYwqvq3kaufsL/um+NIiy8vLy7h8+TKrBkDj+uqN1UulUkgkElhcXMTVq1c33DHbbDaMjo5Cp9NBqVRyxXcVOoFuFtYhk8lw6NAh9Pb2svJlnZ2dGBgYYBuKzWilpLZmIJ/Pw+12I5vN4t69eygUChgbG2MNG2jzhieffBJdXV348MMPUSwW21b5oEkUdAM2OjrK6qTWirF2u92YmprCzMzMmlaknNrI5XKo1Wr09vbi3Llz6Orqgl6vrzo3x+NxzM7Owu12Y3p6Gm63e02CJmctlc/tE088gc7OTva3RCLBqrrQebLW/Fzp7Tl79iyMRmPNHBlaozYajWJ6erqlPW2EENbdbWxsbNPnn1OmstZ55TiqrI9ej0GAjl/q9W1Fz/m+jRTqcrx+/TqcTidGR0cxOjoKtVpdVzF66soNh8NYWVnBD3/4Q/zu7/7uhlnEx44dwyuvvIKuri7Y7fambae3l1BXWWX2di20Wi2++tWv4umnn0ZHRwc6OjpYtjcte8RpLKlUCrdv34ZcLkcul8PNmzfx9a9/HaOjoxCJRKzj4S/8wi8gEomwigX0s+2IWCxGV1cXHA4HnnvuObzyyiuQSqVVE1sEQcDdu3fx2muvYXl5GTdu3GAuX05tNBoNOjo6cPr0afzyL//yhk0EPB4Pvv/972NpaQnvvfce31hswvrnNhqNsvVsZWWFdWsrFosbzs8SiQRKpRKDg4N49dVXMTQ0BIVCUdUAEQwG8f7778PpdLI47FZlK88/53NkMhkLJa18jqnXnIYkbqb8ymQyJm+q/LYa+17OTKvVwuFwsPq99STpCIKAdDqNXC4Ht9uNubk5OJ1O5sqsBXUD03JanPJ90Ov1kMvlMJvNG7aG1Gg06OzshMlkgsFg4HFVe0BlYXfqpl+vVNAWkYVCgS2OrbgL3wrU3Uktkxv9XlqpJZPJsDAQzsbQpDZajmijeYHGj+ZyOWSz2QNdG3kr0Oe2VCqhp6cHw8PDEAQBiUQC2WwWqVRqw/mZVjsYGBiAxWLZ0JOZzWahVCrbJrdlK88/p7zO22w2Vm60cnNUKBQQiUSQzWYRDAY31KHoeQwGA2w2G7RaLZRKZcvJf98UX51OB6PRiLNnz+Ib3/gGzGYzent7N4wVpRSLRSwtLcHn8+Htt9/GO++8g2g02tLum/1Co9Hg1KlTsNlsuHDhAoaGhmoeS3faOp1u213zOBwOh/M5SqUSL7/8Ms6dO4e3334b7777LjweD2ZnZzecnyu7n9a7dnIOJgqFAi+//DJOnToFs9m8puFEOBzGlStX4PF4cOnSJczNzdV1npGREQwPD+9pCdpGsW+KL20/qlarYbVaYTAYWPu7jbqKFQoFZLNZhEIheL1eOJ1OLC4uNrTvM7V20JaTtJBzO8anVmbG9vf3Y2xsbMPjaQH7enZ4VF7ZbJYlvVAXc7vJkbP7ULevXC6vq205tY7Tf7mld3Oo65I2XtgoHCyfzzNrOrX48ud669DqJEajkTVyKhQK8Pv90Gq1sNls6OrqwuDgYNX5mSq/lZZeem9oCEVlYmer3qOtPv8UPk7LY8Rms6G/vx82mw1Wq5Wt4cFgECsrKxCLxTCbzfD5fDXPo9Vq0dPTg8HBQdjtdmi12g299M2qA+xrjK8gCLh+/Tr+4A/+AL29vfjJn/xJ2Gw2pgBX+8zMzAyLV5qdncXMzAyLiWoEYrF4TfyV2+3GO++8g+npaZa13E5QqwF1mymVyk2PrXd3RxM1Jicn8e6778LlcsHlcvGses62UKlUGB0dhdlsxnPPPYfh4WGMjY1VnXQzmQybI65du8aeXz7uaiMWi9HZ2Qm9Xo+nnnoKZ8+eRX9/f81Wu3fv3sXNmzcxMzODTz75BJFIpG3jyncbGn/52GOPQaPRIJFIwO/3QyqVor+/Hzqdjll1K6lsE1sJvTfRaBQrKyuIx+NYWFhAPB6H1+vds9/VSLby/FfCx+nnoSFKpRJKpXJNyIzZbMbjjz+O0dFRdHd3b1jfWCaT4cknn0RPTw80Gs2meT3NqgPsa1WHUqkEp9OJTz75BIFAAM8++yxMJlPNGEXaC97tdmNiYgKTk5OIRCINteTQXaVer8cTTzyBQCCAO3fuwOVytW3sGrXySCSShoQw0J0cre25uLiIH/3oRwgGg4jFYm1Vaouzd0ilUjgcDjgcDhw+fBiHDx9+qEsgpVAoYHZ2FlevXsXc3Bz8fj+bczjVIYRAr9ejo6MDo6OjePLJJ1l92PWUSiW43W7cunULi4uLWFpaQjqd5kltFdB5kFq9NlLQqEGhu7ubtXhNp9MQiUTQ6XSsNXw983PlvfF6vZiZmUEmk0E8HmfVlFoRmUyGzs7Oup5/Ch+nn0MT0dfH+NJxl8/nYTabNwwZFYlEzNJbj+c3m802pQ6w7w0sEokE3G43BEHA22+/DavVymrErieRSGBqagqhUAjLy8u70gCBhlNEo1F89tlnWFlZgdPp3FZx8YNENpuF2+1GKpXC0tISwuEws5BPTU2tqVN50NxMjaLV4qgaDXXBq9Vq6HS6NfWM1yMIAmKxGPx+P2KxGDKZDK/ksAl0069QKFh9VNoohZLP5zExMQGPx4NPPvkEExMTCAQCiMfjDQ03a3Wy2SwWFxeRSqVw7do1xONx9Pb2ore3d8PPKRQKGAwGlmFPQwKp0rIR1e5NJBKB2+1GPp9HLpdDsVhsuZAfg8GAvr4+dHR04OzZs7Db7ejt7d3w+efjtH5oqAxNjN5ofBBCoFarNyw5KwgCQqEQ0uk0JiYmcPfu3abTAfZV8S2VSojH40in04jH43j33XeZ+byaUDOZDOtpTqs4NJrK4uKfffYZFhYW2A3jim9t6EQfCATw0UcfYXFxkXXM8vl8cLvdLTfhNiutlkHbKOgErVKpmOJbC6r4hkIhxOPxtvXWNBqZTAaFQsHi/tePtXw+j1u3buHWrVuYmJjAvXv3kE6nkUgk9n0xaybofBgKhWC1Wpn7uB7Fd7tlNmvdm0gk0tL3xmAw4OjRo+jq6sLp06dhs9nQ3d0NnU5X8zN8nNYPIYSNOY1Gs+PzlUolhEIhBINBfPrpp/jwww/h8XiaSgdoiorPtByOx+OBSqWquZOgZTdosPpWob26BUHAnTt3qi6cxWKRlUlbWFiAy+ViLqJ2tBgVCgUEg0FIpVJMT09veyecSCQwOTmJUCiEhYUFOJ1OFAoF5PN5VqeSw9ktqDs5k8nA6XQiGAwiEAggnU43zWTbrNAEV5VKhd7eXvT09MBgMNRsfRuJROD1ehGJRFhZSc5aSqUSUqkUBEGAy+WCIAiw2Wys7Jjdbt+2B4fOqzRel65L6XSaNWmhY7+V743BYIDBYMDg4CCGh4fXyG+zkA8+TtdSLBbhdDoxNTWFQCAAt9vNOtnKZLKaIU3bgSazpdNpRKNReL3eptMB9l3xFQQBxWIRqVSKldHYLEOw3g4j64lEIrhz5w4WFxfh9/urJnLR9rqhUAhXr15FMBhs69qf2WwWy8vLiEajEIlEuHHjxrbOk06nMT8/j1gshunpafj9frazptZ9DqdRrJ8jBEFAPp9HIBDApUuX4HQ6MTs7uyvhUO2GXC5HX18fS3IZHh5GT09P1WNpk4WZmRm43W7WSppb0dZSKBQQCoUgkUhw69YtPHjwgOU8jIyMwGKxbLvhAo3XnZ2dxQ9+8APmzcjlcvj444+xtLRUtQNcq9HX14ejR49iaGgI586dg8FgwMDAwIbGMQofp2spFAq4ffs2YrEY1Go167p69OhR6PV6jIyMbFireyvQPgvRaBQulwvT09MoFotNpQPsu+JLoSEGu0kul2MWY5lMVjVbmXbMiUajLLyhHS29lFKpxNw/Ho9n20pCNptlYSipVOpAJg/sNoVCAblcDrFYDIFA4CHlj8ay7nfiwH5AN8O5XA5erxcrKytIJBLI5/Nt/fw2gsrSjSaTCRaLZcNFsFAooFAosBKPnOpUGnUAwO/3w+l0QqPRwOfzbTukIRaLIR6Pw+PxYHl5mc3Z+Xx+jXWzle8NIQRKpRIWiwVmsxkGg4El+W0UW0rbtUciESQSCVbKsJVl0Qhol1uZTAaVSgWFQoFSqQSLxYJ0Og2j0Qi1Wt2Q78rn8wiHw4hGo0ilUigUCk0n/6ZRfPeCWCyGmZkZiEQiTE5ObmhZLhaLSCaTbT+507AOkUgEp9O57X7nle0OeTxl46GuO5FIhJs3b1btwJRKpVjJooPm1qPJspFIBFeuXMH09DRCoRBSqdSB2wRsFalUis7OTnR3d2N8fBzj4+M1S5hxtkapVEI0GkU8HkexWITL5cLCwgJKpdK2FF9BEFjS8PT0NN577z023wqCwIwO7TDmHQ4Hjh8/jt7eXgwMDEAul9cMcahMSr927Rq8Xu+BnQurUSgUMDU1hfv377MKIl1dXfB4PDAajVhcXGyY4lsoFLC4uIhwOAyPx9OU+tOBUnwrd9+cMjQeByiHK3CaE0EQkMvlkEqlEAwGsbi4+JDlg7pS29lLQV1m6+P8aavcdDqNUCiEcDjMKznUCS1lSL1gVCGr5rWhlsRmXMyaFerJjMViEAQBOp0Oy8vL295cJBIJVg0pEAi0rWJH687K5XJW4rSWV5hWq0gmk/B4PHC5XAfCY1svdFNUiUwmg9vtRjqdhlqtblioQ7FYhNfrRSwWQzKZbMg5G82BUnw5nFaFloih3W/u3bv30DGlUgkejweJRKItY9KpRVcul2NqamrNbywUCshkMpifn0c6nW57T00j2Uiu60kmk/B6vTycaRvQMKTp6WnmvdkONNQkHo+35XNOiUQiWFhYYEmCG3kjK5PS33jjDTidTjidzobX+W8nwuEwbt++DZlMhrt37zYsua1UKrGxHg6HG3LORsMVXw6nBajcsUejUSwtLe3zFe09NFOeZrNXJqfSVuZ+vx/ZbJZbJbfARnJdTzKZRDKZPJBx5DuFKqy0LCdnY6h3Cyh3bdtI8aXPv9PpxMTEBFZWVpDNZrm1dwMymQw8Hs9+X8a+wBVfDofTEmQyGSwsLMDr9SKRSECv17O/0aRUWru3XeIc94KN5LqefD6PqakpXi2Ds6sIgoClpSUIggCVSlWzvB6FP/+crUAaYRUhhHDTygYIgrDljgNcphuzHZkCXK6b0UpjtdpC2IxW3lYbq5s1SGkWGbfSWG0VmlWm9TbtaZaxWUmrPf+twnblCnCLL4fDaVGacZFrB7hcOc0GH5OcRrK96HoOh8PhcDgcDqfF4Iovh8PhcDgcDudA0JAYXw6Hw+FwOBwOp9nhFl8Oh8PhcDgczoGAK74cDofD4XA4nAMBV3w5HA6Hw+FwOAcCrvhyOBwOh8PhcA4EO1J8CSHfauciy4SQf0EIuUgICRJCBELIz1c55tzq32q9Tm/jew+8XFePUxFCfoMQMkMISRNClgkhf0II6d/Gd3KZgsn0O4QQFyEkSwi5Qwj56zv43raVKyHkcULIvyeETBFCUoSQJULIfyGEDFQ5VkQI+XVCyAIhJEMIuUUI+cvb/F4u0/Kx3ySEvEkIca+O6W/t8LsPvFwJISOEkN8lhNwmhCRWZfsGIeTRbX4vlykhWkLIfyOEzBFCkoSQCCHkx4SQn9nBdx94uVb53E+vzgPOnV7DTi2+3wVwZqcX0cT8XQBKAG9tcMxnKMtg/eseAA+Aa9v4Xi7XMt8F8A8A/AcAXwPwTwF8EcAHhBDNFr+Ty7TMawB+AcC/BPAigI8B/OkOJul2lutPAzgC4PcAfBXArwE4AeBTQkjPumP/dwDfAvD7q8deAfDfCSFf28b3cpmW+ZsArAD+okHfzeUKPAfgywD+E8rP/y8C6ABwhRBychvfy2UKyAAUAPw2gJcA/DUAkwD+MyHkV7f53VyuFRBCDAD+T5R1qp0jCAJ/1XgBEK3+OwRAAPDzdX6uD0AJwL/e79/QjK965ApAhfJk8i/Wvf/86me+st+/o5ledcr06Wp/Q1lZXgEg3u/f0UwvAB1V3qPP9m9WvGcFkAXwG+uO/QDA7f3+Hc30qlemq+/TMS1ZHbff2u/rb9bXFsaqBatlTCve0wMIA/iT/f4dzfTaylit8flPANzZ79/RbK/tyBXAvwfwHoA/BuDc6TU0PNRh1RT9W4SQv08IWVw1ZX+PEGJdff03Qkh01W39j9Z9toMQ8gerru3U6jH/NyGkq8p3/9VVU3lm1V37EiHkQ0LIh1XO+e8qXLtThJC/Vc/vEwShtA2xAMDPAiAo76q3DJcrAEC8+oqtez+y+u+Wxi6XKQCAht28s+79dwE4Kv5eN+0sV0EQ/FXeWwTgB1B5PV9B2erzp+sO/1MAR8kmLrwqv4vLFDuaf6vC5QoIghAQVjWJiveiAGawTv71wGW6IUGUjTdbhst1zfc8BeBnAPydzc5dL5JGnWgdPwtgAmU3ig1lE/WfANCivOj+ewA/BeBfEkLuCILw9urnTAAyAH4dZSF0Avj7AD4mhIwJgpABAELIBQD/BcAbAL6Jsqvm/wSgQPkBxupxOgAfoewC/haAeZQXqX9LCJELgvB/7cqvB/5nAJ8JgjDR4PMeGLkKghAnhPxnAH+PEHIV5ZCRPgD/GsAtlK1pjeDAyBRAcfXf3Lr3s6v/jqMc+tAI2lKuhJBDKFt4JyvePoKyDOfWHX539d/Dq9+7Uw6STPeSAy1XQogJ5Wf/P27l/Jtw4GRKCCEoG2v0AP7y6ve8upXz18GBkishRLr6m/61IAhzZRE3gB2arL9VPsWa9wSUBSSpeO/bq+//04r3JAB8AP7jBucXA+hZ/exPVrx/GeWbTyreO7l63IcV7/2/Ub7Zw+vO+x8ABCqvcZPfWXeoA8pxOQKAv8flujO5rl7nv1k9hr6uoIqrhMt0c5miHCctAPjquvf/aPX9X+dy3fC3SgD8cPWajRXv/3sAng3uxc9ymW5NplWO2XGoA5drzWP/C4AUgCEu0+3LFMAv4fN1KgfgF/lY3ZlcUc7rmQOgWP3/P8Z+hzpswPuCIFSa+KdW/32PvrH69zmUhc8ghPy/SDkjOoGym2Bp9U+jq38XA3gcwJ8Jq5JYPd91PGxVeR7AVQDzhBAJfa1ehxllS0yj+TkAeQD/9y6c+6DJ9bdQdnH8rwC+hPJu1wzgHUKIukHfcZBkehHlHfXvEULOEEKMhJBXAfzV1b830rXcjnL9fQBnAfyMIAjhLXyuUXCZ7g4HVq6EkF9HORnrlwRBWO+x2AkHUab/D4BTKCdsfRfA/0UI+dtbOH89HBi5EkKGAPwTlMdmZgvn25TdCnVYPyhyG7yvoP9DCPm7KGf6fRvlbP4wyrGcVyqOswCQorw7WI933f9bUba65Gtcp7nmL9gGhBA5gL8C4HuCIAQaee5VDoxcCSFHUM72/IYgCH9Y8f5VlHe93wDwuzv9HhwgmQqCUCCEfB3lTdnliuv4dQDfAeDe6XdU0FZyJYT8SwB/C8DPCYJwcd2fwwAMhBBSuWCg7F4EgFA931EHB0mme8mBlCsh5H8B8C9Qthb+UT3n3gIHTqZCOXaVxq++SwhRAfj/EEL+SBCEWt+/VQ6SXH8PwPdRrjhiWH1PVv4YMQDICoKQrud71rNbiu92+WkAHwiC8PfpG+ThxJAAysK2Vvm8DZ/vYoBycLkPwC/X+L7p7V9qVV4CYMQ2k9p2kVaU69HVf9eUgxMEYZYQEgFwqAHfsRNaUaYQBOEegMdIuRayGuVNxCurf25UfO9OaDq5EkL+CYB/BODvCoLwn6sccheAHMAg1sb5UqvHvc2+Y5dpRZm2Ai0rV0LIzwL4/wL4HUEQ/vlm591DWlamVfgUZQ+wDcCOa8/ukFaU62GU83qqWdfDKBu+fmWz76lGsym+Kjycxf83Kv9HEIQiIeRTAH+ZEPItamEh5RqEA1h7c95Fub7pkiAI1XYxjebnUB4839uD79oKrShXWq/vCQC36ZuEkBEABgCuXfreemlFmVZe28LqtUhRjk27KAjC/d3+3jpoKrkSQv4eyiE3/0QQhN+vcdi7KC8Yfx3Ab1S8/zMAJgRBWO8m3GtaUaatQEvKlRDykygnsn1XEIT/davfs8u0pExr8CUACVS3oO41rSjXn0aF1XqVX0M55vinsIPNRLMpvu8C+EeEkH8M4McAngHw9SrH/TOU4xX/nBDy71E20X8LZWWpMk7xOwD+JwA/IoR8B+VdiBrAGIAvCILw8kYXQwj5EspZjfbVtx5fjY+BIAj/v3XHWrGa1dhAt0ajaEW5/gjl6g2/Qwgxorx77kU52D2K/beqt6JMaUzfIsp1e3tRLhHTC+Cpen/4LtM0ciWE/DTKGc3vAvg+WduFMbZqPYcgCD5CyLcB/DohJI5yU5v/afXaX9rSr98dWk6mq8c+DqAfn5cuPLwaqgMAbwuCkNr0l+8uLSdXQsgXAfxXlOfWP153XFYQhBv1/fRdoxVl+rdRLgV5CWVlzIxyyOPXAfyaIAg57D8tJ1dBEK5U+ezPozxOP9z0F2+EsIPMONTOPPytde/9/Or7Q+ve/xDARxX/rwTwb1GOk4mjXFh/AFUyelEOyJ9GuYzQXQA/CeAGgD9fd5wR5Zs0j3Lciw9lpepX6vh9H2JtRQH2qnLsr67+7eROZMrluuY4M4DfATALIA1gGeUEglEu023L9LdWz59FOW7rPwHo4WO16m/741oyRUWG8+qxYpQ3ZYur13MbwNe5THck042O7edy3bpcqQxqvBa4TLcl07MA3kY5RyKLsjfyEoC/tJ3nn8t108/uuKoDWT1Zy0MI6UY5vu6fC4Lwv+/39bQLXK6Nh8t0d+BybTxcprsDl2vj4TLdHdpRri2p+BJClChnJ15COab2EQD/EOUA7COCIDQyO/3AwOXaeLhMdwcu18bDZbo7cLk2Hi7T3eGgyLXZYnzrpYhyLOPvo+wOT6JsYv+pdrkx+wSXa+PhMt0duFwbD5fp7sDl2ni4THeHAyHXlrT4cjgcDofD4XA4W2W3OrdxOBwOh8PhcDhNBVd8ORwOh8PhcDgHgl1VfAkh3yKEbDmWghDSTwgRCCHfaOC1CISQb23jcw5CyG8TQj4lhEQIIX5CyAer9RD3hXaQ6+pnP1z9/PrXrzTq+rZwLW0h03XnOUsIKa2eb1/i+dtFroQQFSHkO4QQFyEkSwi5Qwj56426ti1eC5fpLtAOcm229aodZLr6WT5Wa5+z5XSAVk1u20tOolyo+T+i3NdaBuAXAXxICHlJEIS39vPi2oDbAP72uvcW9uE62gpS7sj2ByjX6rVvcjhnc14DcAblWr3TKLd5/lNCCBEE4U/39cpaFy7TxsPXq92Bj9XdY891AK74bs5HAEYEQSjQNwgh76Fc2PkfolwImrN94kKVDi2cHfMPABAAfwTgH+/ztbQ0hJCnUe7K+DcEQfjj1bcvrta3/D8IIf9VEITivl1gC8Jlumvw9arB8LG66+y5DrDnMb6EkF8ihHxCCAmtumKuEEL+Uo3DZYSQbxNCfISQFCHkLUJIf5Vz/i1CyC1CSIYQEiCE/CEhxNSI6xUEIVI5iay+VwBwE0BXI76jEbSaXFuBVpUpIWQQZcvELwJotvbZrShX2lLznXXvvwvAUfH3fYPLdHdoNbm2wnrVajIFH6ttpwPsR3JbP4DvAvgplF0ynwJ4ixDyfJVjfx3AMIC/AeDvoOzGuUjKblwAACHkXwL4NygXXH4JZUvX8wDeIYSIa10E+TzW5Vtb/QGEEBnKbo/JrX52F+lHa8r1OCEkSgjJE0JuE0JerfNze0E/WlOm/w7AfxcE4X/Uefxe04/Wkiu15uTWvZ9d/Xd8k8/vBf3gMt0N+tFacq322WZbr/rRWjLlY7XddICd9jzear/pdX8XoRxucRHA6xXv96Pct/keAFHF+0+tvv9qxXFFAP/buvPS436i4r01PakB9AEorP9snb/rXwAoAfjCbsqv3eUK4DcB/E0AXwLwMoA/Wz3fP+Uy3bZMfwZACIC18ncBkPCxuj25Avja6me/uu79P1p9/9e5TFtbpu0i1xrXvW/rVTvIlI/V3Rur2CcdYM9vDso7i7dQTroprf5IAcBUlZvzm1XOuQzgu6v//TdXjxtcvcmVrxiAb9e6OTv4TX9t9bp/Y68G+UGQa8X5/hxAGoCGy3TLv8EEwAfgf1n/u9BEim8LylWC8kIxi7LlzAjg1dVxKgD4R1ymrS3TdpFrle/f1/WqHWTKx2r76QB7GupACOkB8AHKC/TfBXAWwCmUY2UUVT7irfEejVWyrv47h3IsY+VLi3LLvYZBCHkRwB8D+ENBEP5ZI8+9E1pdruv4ryhf89Fd/I5NaVGZ/hYAN4D/RggxEEIMFdeqJ4SoG/AdO6IV5SqUYyS/jnL7zssoW9T/OcruQqAs832Dy3R3aEW5VtKM61UrypSP1fbTAfa6qsPzAPQA/oogCE76JiFEVeN4W433bq7+d3D13+cAhKscG6zy3rYghDwL4L+jvBtZX3pjv2lZuW6AsAffsRGtKNPDAI7VOFcAwOsAfqIB37MTWlGuEAThHoDHVhNA1ABmUC5pBAAfN+I7dgCX6e7QknIFmnq9akmZ8rH6EC2tA+y14ktvAss0J4SMoBw34qxy/NcJId8SBKG0euxTALoBfLL69/dRNun3CoLw/m5dNCHkDMpKwwcAfoZeTxPRknKtwV9H2c1xZ4+/dz2tKNNfAWBY997PA/g5AOdRfZe/17SiXBmCICysXocUwC8BuCgIwv3d/t5N4DLdHVpSrk2+XrWkTCl8rO4Ju64D7LXiewnloOc/IYT8DsqlQH4DwBKqV5jQAvgLQsgfAOgA8Nsox9n8CQAIgnCfEPKvAPw+IWQUwA8BZAD0ALiAcrzKD6pdCCGkD8B9lGNdfrPWBRNCxgB8D2WL2b8GcJIQwv4uNEcN2laU6xcA/BrKhcEXUN6t/hzK2aO/JghCcisC2AVaTqaCINys8tlzq//5Q2FdmaN9ouXkunrsrwNYBLACoBflTOhelBeW/YbLdHdoObm2wHrVcjJdPZaP1TbSAfZU8RUE4S4pt/n7TQBvoCycX0PZTH+uykd+G8AQynFKagA/APBLgiCwXY0gCP+YEDKJ8kD8Oyibx5dR3u3ObnA5BIAYm5d0O41yMLtx9furnWdfaVG5uleP+U0AFpR3qrcB/DVBEP7rJp/ddVpUpk1PC8tVjXJcXyeACMqxc18XBGG5js/uKlymu0OLyrWp16sWlSnAx2pb6QBkNYuOw+FwOBwOh8Npa1regsThcDgcDofD4dQDV3w5HA6Hw+FwOAcCrvhyOBwOh8PhcA4EXPHlcDgcDofD4RwIuOLL4XA4HA6HwzkQcMWXw+FwOBwOh3MgaEgdX0IIr4m2AYIgbLl2IpfpxmxHpgCX62bwsdp4+FjdHfhYbTxcpo2HP/+7w3blCnCLL4fD4XA4HA7ngLDXLYs5HA6H06ZUtsethDdK4nA4zQJXfDkcDoezbQwGA/r6+qBUKuFwOCCXy9nfMpkMMpkM3G437t69i0KhsI9XyuFwOFzx5XA4HM4OMBgMOHr0KCwWC44fPw6tVsv+Fg6HEYlEcPPmTUxPT3PFl8Ph7Dtc8eVwOBzOljEYDDAYDBgcHMTw8DBMJhN6e3uhVqvZMUqlEjKZDAaDASLRwUkpEYvF0Ol07LcrlcodnU8QBKTTaeRyOUQiEUQikcZcKIdzAOGKL4fD4XC2TF9fH44ePYqhoSGcO3cOBoMB/f39a5Q8j8cDr9eL5eXlA6X4yuVy9PX1wWAw4NixY7Db7Ts6X7FYxMrKCuLxOG7evMkVXw5nB+yb4ksIgUgkgkwmg0qlqpkUsRUEQUChUEAymUSxWOQJFRwOh7MLEEKgVCphsVhgNpthMBig0+mgUCgglUrZcVKpFFKpFGKxeB+vdu8Qi8WQy+XQ6XSw2WwwmUzo7u6Gw+HY0XmLxSJKpRLkcvkaizqHw9k6+6b4yuVylgxx7NgxSCQ7uxQ6MUSjUVy7dg2xWAyFQoErvxwOh7MLOBwOHD9+HL29vRgYGIBcLl+j9B5EdDod+vr6YLPZ8Mwzz8But+P48ePo7Ozc0Xnz+Tzu3LmDlZUVLCwsgBDC1zYOZ5vsueJLCAEhhFl6jUYjuru7dzxhCoKAfD4PpVIJtVqNbDYLQRC45ZfTVNDxvxULWKlUgiAI7MXh7CdisRhisRhqtRp6vR4ajQZKpXKN8YKO1UKhgHw+j2KxuI9XvPtQD6ZSqYTZbIbFYoHdbofdbkdHRwfMZvOOzp/P52EwGJBMJpmsS6VS28u1EdB7Q+fenSIIAkqlEgCwf9uR7axV1aBzQTON1T1VfAkhUKlUkMlkGBgYQF9fH8bHx/HCCy9AoVDs6Nx0gl1ZWUEqlYLL5cLCwgLC4TDy+Tzy+XyDfgWHsz2o21en06Grq6uuCSWXyyEWiyGXyyEYDCKdTu/BlXI41RGLxejs7IRer0dXVxf0ev1DoWqCICCZTCKfz8PlcmF2dhYrKyttrSQYjUaYTCYMDQ3hi1/8Imw2G06dOgWj0QiDwbDj89PQEoPBgO7uboyOjiIajcLlcrW1XBsBvTfU2LbTWPNkMolQKIRcLodoNNqWlUq2s1ZVI5fLoVAoNN1Y3XPFVyaTsdiw/v5+DA0N4dixYztWfIvFIrLZLIxGI/r7+wEAwWAQiUSiqXYanIOLSCSCVCqFRqNBZ2dnXeE9uVwOEokEqVQKsViMK76cfUUkEkGv18Nms0Gv17OqDZVQ71s6nUY0GoXP50M0Gm2aRW83UKlUsFgscDgcGB4ehtVqRXd395rSbjuBEAK5XA6FQgGDwQCbzQYAcLvdbS3XRkDvDZXdTq2+kUiEje94PN6gq2wuxGLxlteqatA63kBzjdU9UXzpQ0stvRaLBWfOnMGZM2fgcDh2HN8LfK5UmM1mXLhwAT6fj7njfD4ffD4fSqUSt/xy9hya3NPR0QGr1YqxsTE899xzdW32YrEYbt68yTZxsVhsD66Yw/kcQggkEgl0Oh00Gg2eeuopjI6O4siRI+jq6mJWNEEQkM1mkcvlMD8/j0AggMuXL+PKlStwu91tZxkjhECj0UAul2N8fBwnT55Ef38/xsbGoNVq1zTyAMpu8VKphEwmg2QyycKWRCIRNBoNZDIZRCJRVYukSCSC2WyGUqnE8ePHIZFIcPv2bczNzbWdXLdL5TgVi8UsxOHo0aM4ceIEDAbDjqyXlIWFBVy+fBnBYBDhcBi5XK5Bv2BvUCgUUCgUEIvFkEgkVTcCOp0OWq12S2vVegRBQDQaRSQSabqxuieKL419UqlU6OvrQ39/P86cOYPnnnuuITE3QHnQU8X3/PnziMfjcLvdLMaXuou54svZS+i4lEqlsFqtGBwcxOOPP45XXnmlruzsQCAAuVyOxcVFTExM7MEVczifQ+P8pFIpjEYjLBYLzp49iyeffJJVdKAUi0Wk02kkk0nMz89jcXERV65cwcWLF9syNp0QAq1WC51OhyNHjuDLX/4ybDYbxsbGqhpzSqUSCoUCUqkUM8QAYPGnIpEIEomkquJLCIHJZILJZIJYLGZVIl5//fXd/ZEtwvpxqlAo2Cbi2LFj7N6Mjo7u2NB28+ZNhEIhLC0tYXJyEolEokG/Ym+glm+JRFK1ohYhBGazGWazeUtrVTVoOUOgucbqnii+MpkMDocDRqMR4+PjGBoagsPhqFvprUzsoZMFHeh00qj2naOjoywRIxaLIZFIIJvNNo25vdmgcqxlddgNqBUEQNstjtQCYTKZoNFocOjQIZw8eRIjIyMsmXOz5MtCodBWCZr7McZ2CpV/u9yDeqGJLXK5HGazGWfPnoXD4UBfXx90Oh2zaFLZpNNpOJ1OhMNhTExM4P79+3C73W0rN4lEgtHRUWbltdlsaxp1VK5ZNO45lUphcXERV69eZZZCuVyO06dPo6enBxqNhoVH1FofG5Wk1S5UG6dGo5F52h599FF2b6gleDvQe0nvJ32vmaGyob+ZEILu7m4MDw9Dq9XCarVWnYe1Wi20Wi2Gh4chk8m2LbNmHat7oviqVCocPXoU3d3deOGFF3D06NEtVXGgMWN0xywIAls4qTVtPXK5HBcuXGCF1ePxOJxOJwKBAFd8a0A3EVSmuz1g6X3N5/NNl/W5U6jSK5fL0d/fj87OTnzta1/Diy++CKlUCplMxtyeGyn+tFsTlVGrs9djbKcIgoBMJsNcdO1wD+qBLlhyuZx1Z3v11VcxPDwMvV4PhULB7h19jiORCK5fvw6n04k333wT9+7da2sPm1wux/nz53Hu3Dl0dXWhq6uLrUu0ogUNrysWiwgEAvD7/fjhD3+I3/3d30UymQRQVjJ+9Vd/FU899RS6urqgVCohEokOTO3jnVBrnA4NDbGa0lKplFnSd7LZpgn0dD5uhfVKJBJBLpezsSQSiXDixAm88MILcDgcOHLkSFULODUqisXih2L424E9i/Gli71Codhy+8ZMJoNAIMDaNRaLRajVashkMmaSrwaNK5bL5ZBIJA2JJW5XaNawTCaDyWSCxWLZk+8NBAIIhULIZrNIpVJto1iIxWLo9Xqo1Wp0d3ejr68PVqsVGo2GWQ7S6TQWFhaQTqfXWBEqiUQicLlc8Pl8LEmgVdmvMbYTCoUCXC4XotHogaoOI5FIWOjY4OAghoaGYLFYWOxqpQJB52efzwen0wmn04lIJNL2iZiEECgUCqjVaigUijXrS7FYRDQaRSaTQSgUQjKZRDAYRCgUgtPpRCwWQyqVAlD2ei0tLcFms62J91Wr1U2/Mdxvao1TnU4HpVLZ0M1DLBZDIBCA2+1GKBRCNBptWuW3srxef38/i+kVi8Xo7++H1WqF2Wxm8dAHjZbQBJ1OJy5dugSv14srV64gmUxiZGQEHR0duHDhAs6fP7/fl9jyiMVi9Pb2wmq14tlnn8WFCxd2fdItlUq4dOkSu7ezs7NNO5FsFY1Gg1OnTsFut+Pll1/G4cOH2QatWCwik8lgYWEB3/72t7GwsIBsNls18L9QKCASibByZq3MfoyxnZJIJPDd734X165dQygUQigU2u9L2hN0Oh2MRiPOnj2Lb3zjGzCbzejt7WXWyEro/Ox0OvHee+/B5/O1/FjdKYlEAteuXYPX68XFixdx//79NaWdKjexmUwGr7/+On7wgx/gL/2lv4Tnn38eNpsNIyMjB1Ip2QpbGac7QRAEXL9+HR988AHm5+dx5coVpFIpZrVvNmjoR39/P775zW+ir6+Pbc7MZjNMJhOkUmnLhJs1mqZUfKlrkcZIBYNBOJ1OrKysYHp6GslkEjKZjNU4FQSh6RfQvYC6fSQSyZazMKVSKTo6OmCz2VjM2l4ovvfu3YNWq0U8Hm+Le0jDb9RqNaxWKzo7O5kblIbkrLf4zs7OIp1OV7UmrneZtiKVYR8mk2lPx9hOicVi6Orqwvz8/Jr5pl02aOsRi8XM6kjLcw0NDUGr1da0oGWzWfj9fvh8Pni9XgQCgZYdq/UgFouhUCig1Wohk8mqxo0Wi0XEYjEEg0EsLi5iampqTVOPynC7UqmEWCyGfD6PWCyGTCaDXC73kAeIdifNZDKIx+Mt7wHaKnQeoXOsWCxmCZf1jNOdEovFsLy8zCy+uVyuaecBGturUCjQ19eHkZERKJVKFvpBw8waMf/StakyFBUor13xeLwpx2rTKb6ZTAbvv/8+5ubmWEKaz+fDzMwMEokEQqEQq9mbzWabduDtBzQxYmRkBBcuXHionM5GiEQidHR0QKPRYGRkBCqVahevtIwgCDUXjlbFbrfj0UcfRWdnJ5577jnY7XZ0d3c/5B4GyhNGNptFOp3eUPFdn9jZaqhUKoyOjrJSg8PDw3s2xnaKSCTCV7/6VRw5cgSXL1/GJ5980nTF2BtFZXOKp556CmfOnEFfXx/0ej0rtVUNuvBVvtpNNkB5LMhkMthsNrz88svo7u7GE088AZvN9lDWe2X+QiaTQTqdZgrBetnI5XKcO3cOIyMjePzxx1niUaW8i8UiVlZWEIlEcPnyZVy+fJl5ig4KdB4xGo04duwYK6VHq0VtNk53SrFYZPe0UCisUfKaFar8KpVKpvg2OrHY4/Hg5s2biEQizDBJoQmdzTZWm0LxrRw8+Xwes7Oz+PGPf4xgMIhAIIBMJsPcvZlMBiKRCMVisa2y3RuBXC6HTqfDwMAAzp07t6USJDT+UiqVwmAw7LiFdD0IgsCU3nZRfGkmbHd3N8bGxmC1Wlk2MYUqscVikSVMtHP8qFQqhcPhYMkUhw8f3rMxtlPEYjHGxsbQ2dmJYDCIBw8eAGiuYuyNghACvV6Pjo4OjI6O4vTp09DpdKw0VDXoWK5sTdxucqHQcmN6vR6nTp3C4OAgenp6oNVqH8ofqbTublZGUyKRYGBgACdPnsTg4CAsFstDChytier3+zE9PY2rV68iFosdKMOPTCZDZ2cn7HY7zp49i+HhYZZ8pdVqNx2nlWx1vaFeHjrGW0H3qPQAV1p6t8pmvzMWi2F2dhZerxeXL19GJBJhf6PzQjweb6qxum+KL+3o4Xa7cffuXRbfSHdSjzzyCHp7e9nuKpPJsNhIGqBtNBrR29vbNkrTdqD1HZVKJcbHx3HkyBGMjY2hq6trW+EOIpFox130DjI0zIHW99RoNGxRpGPe6XTis88+w8LCAoLBYNtayCh0TKnVauh0OhgMhpYZY7RWq0wmw/j4OGvOAIDVCm+1Ava1qAyT0mq1MJvNa6o3rGdxcZHVMp2enobf728qq85uUFkv3mq1QqVSram9G4lEsLi4CK/Xixs3bmBlZWXTpjN0w0GTB2kiUiXUgkzDHILBIDKZTNMrX43AYDCgr68PHR0dOHv2LGw2GwYHB2G325n1Ui6XbzpO0+k0wuEwZDIZ7HY71Go1ent7N2wpnc/nMTExAY/Hgxs3bsDlciEUCrW93GvpZ9VwOp2stvHy8vIaiy8t/5bNZptKZvuq+FLT+GuvvcZiQKRSKY4ePYqhoSEolcqqCyRtm6lUKtHV1bXpd7WTRXE9IpGIxTmdPHmSFeru6upqiSoW7XZvJBIJtFotCzvRaDTsb3TMz8zM4K233oLP50MoFDoQii9tYEMV31aBKr5arRZHjhyBwWDAvXv3EA6H2f1rF8UXAKu8o9PpYDabaz6bgiBgaWkJly9fxv379zE1NYVkMtlWslgPzZSXSqWsKglVfCnhcBh37tyBy+XCzZs34fV611jAap2XbjQq6yOvJ5/PI5vNstjhZlIkdhODwYCjR4+iq6sLp0+fhs1mQ3d3N3Q63aafrRynwWAQ8/PzUKvVOH78OMxmM/R6/aaK761bt3Dr1i1MTExgZWUFqVSqredroLZ+Vg1aeSidTsPr9bbEHLBvmhF1G8TjcaysrLBdgkKhwPHjx2E0GqHT6aoObkII1Go1pFJpXXGCBoMB/f397NjKmpy0TmokEtl0gmomxGIxK9lCG4L09/c/VES9Xqgs8vk8wuFw3bJQKpUwGAyQyWQHtjTKRlTKdWlpCYuLi5iZmcHy8jIL3zkoC1iro1KpYDabYTQaodVqkUgk2iIrms4lKpUKvb296OnpgcFgqKr0lkoleDwe5t68f/8+nE4nEokEK8t3EFi/YY9EIgiHw3jw4AFz+/r9flYGr9pnFQoFuru7YTKZ0NHRAaVS2RLGir1GJpNBr9czJVWn0z3ksqehJdSjRhtVlUolTE5O4v79+ywniIb1aTSaTderUqmESCTCNjC1cjGaDRqakUqlMDMzg1wu91DJvY3W7lr6WTXS6TTi8ThyuVzLPP97/pTRyaJQKCCdTsPn8+H27dus7Z9Op8NP/MRPoLe3FzabDTabreZ56A58s+/r7e3FU089hXg8jqNHj7KbQxMG4vE4C85uFeRyOfr6+mA2m/HFL34Rw8PDGBsbw9jY2LaC10ulEkKhECKRCCYmJnD79u26PtfZ2YmjR49Cr9e3TMLSXlIp12vXruHKlSuYn5/H7du3WVbwQVJ8W9m6bzQaodfrEY1G0dHRgUwm0xaKSuVcQpOrenp6qh6bz+dx8+ZNzM3N4dNPP8X169cRi8Xg8XgO3FiuZHFxEXfu3MHc3Bw+/PBDRCIRLCwsVLUOVoZLPPvss+ju7sbQ0BAMBsOWEpIPCkqlEna7nVXHsVgsD61vNBQkEAjg0qVL8Pv9LGxyenoa09PT0Gg0sNvtayz2m4VclUolrKysYGZmBm63m+kIzT7OacJpMBjEpUuXYDKZHoob32jtrqWfVaOyO2Gzy4WyJ7N2ZfZ6OBxGIBBgxbxjsdia4P98Pv9QQPZOUavV6OjoYE0vKhVfkUiEaDQKt9sNl8uFXC7X1I0UaH0+nU4Hm80Gi8UCm80Gq9VaNcliMwRBQC6XY3Vig8EgXC4XlpeXa8qAdsqhO8VW6WKzX2SzWVbI3u12IxgMspqeBwH6/NP4xHg8zsbPXkArwNAkrMpySJVdjTaDlgiir1a39labSzo6OpgLn0KtR4lEAolEAm63G06nE36/H/F4HOl0+kAovZWJfHQdo1Zar9cLl8sFj8eDSCTC1rVKpbeyqYBer2cu++7ubmi1WtZhbP130qTuRCKBWCzW9nHU66HPHe0kVu25o/cml8vB6/ViZWWFGRZ8Ph/i8TgUCgVUKtWaMLRq62XleI9EIkgkEshkMi3VPZPKI5vNwuv1Ip1Os3mrnrWbjlW5XA69Xl/XHFnZpbDZK27tieKbz+exsrKCbDaLjz76CIuLi4hGo4jFYpiZmdl1ATkcDphMCcGn4wAAQllJREFUJjZpUYrFIpxOJ6LRKKv/t7Kygtu3bzetUqLT6dDX1webzYZnnnkGdrsdp06dYiWztkoul2MJGB9//DEWFhZw48YN3Lx5c0PFt6+vD729vdDr9S3j3tgPSqUSotEoPB4PJicn8fHHHx8opRf4/PmniSL5fB79/f3o6+vbk+93u91YXFxEKpVCMBiEXC6Hw+GARqNBf39/S8UcN5J65hJamSAajeLatWvw+Xx47733MD09jUAgwFrAt4pCsBOoLCKRCK5cuYLFxUVWjpHOmX6/H/Pz81WrtMjlcpaXcvLkSfT09OD555+H3W5nYWvrlbrK+XliYgIPHjyAx+M5EPLeCnRtp/dmZmaGKavZbBa5XA5msxlDQ0Po6enBoUOHYLFYHloz1493r9eLhYUF5spvFSrH6ieffLKmelI9azfdEPf39+PChQt1dWHMZrMsLGJhYaGpPeh7ovgWi0Ukk0lIpVJ4PB4AYNaDSCTykPAry4aUSqUdJ0DJZLKq1iW6Q5TJZKzcUiaTeShDlFozmmGykclkMBgMMJlMsNvtsNvtLOawHta7JTKZDGKxGMLhMNspezweBAKBDc9jtVohEomY226rsb20JMz6Yu6tSmVvc/qi0JIuqVSKZXhv5smorNvb6vKhz79MJkMwGITP54PJZEIul9v10AdBEBCLxZjVx+fzQaFQQC6Xs1JTWzkXLUPXymW7Ki2PZrMZFoul5lxCf3Mmk4HX64XT6YTX64XP50MikWD3kI73Wta4dmj6UWlFo6EdtBHAysoKvF4votHoQ7HOdP2SyWRQqVQwmUzo7u5GV1cXax27votWpQWTzs+0vGezdgvbLejYqXzuqukE9LhkMol4PL5mfNIxShNV1Wp11bC8auOdJm222vitVOIrsVgsTAega/h6xGIxpFIp9Ho9uru762pAkU6nkc1mIZVK98ybt132RPHNZrNYXFzEysoKVlZWoFAoWPxNPB5/yArrcrkwNTWFUqnEasvuRt9yWgpMq9VCEATYbDbcu3ePdc6pvB7aWWe/A9sNBgMr3n38+HF0dHRsyWIVDodZJjqdIO7evYtgMIgf/ehHmJ+fRzgc3vAclXHT/f39GB0dhUqlqtviXFmM3eVyIRqNNnV4ST0YjUaYTCbWktdoND7kRpNIJFAqldBqtejq6tpws5BMJtl9ikajLW0hps+/1+uFTCbDzMwM/H5/XVaEnSIIAq5du4br168jHA5jcXERer0eZ8+ehcPhYMpHPdBnZ2lpCT6fD+FwuCXvCx2rQ0ND+OIXvwibzYZTp07BaDQ+NJdQ92UoFMKlS5dw//59LC4uIhKJsN+uVCpZDKHBYFgz7ivb9LZ604/1cZM0PlQkErHk6PUVWgghUKlUkMlkGBgYQF9fH44ePYoXX3yRGTCqtY5Np9MIhUIIBoO4fPkyvF5v3fNzu5FOp7GysgKxWMzCEc1mM5RKJTuGbuZUKhWr7/vgwQOEQiGm7G40N1PqGe+tDCEEOp0OnZ2dsNlsMJlMVZPSaSLc2NgYjEZjXc9tOBzGxx9/DI/HA6fTCZ/Pt1s/Y8fsmcWXPqybCaOyULfNZtvVxZFOSgBYIfJCoYCenh62w6GTNo1j229ooH9nZyc6OzthNpu39PlUKrWmKUg0GsX9+/cRCATw4MEDLC4ubnoOQgiMRiP6+/vR3d0Nq9W6pdhiQRAQiUTg8/mYbFvJjVQNmvFvMBig0WigUqkeWszoLlqj0aCzs3NDmdFFlGbMtjL0+ReLxbh//z4ikQjMZjPsdvueWHwfPHiAyclJVs7IaDSy5K2tzC80VILG/bVqWSM6Vh0OB4aHh2G1Wlmc6XoqrWizs7OYmpp6KLOdWoaUSiVsNtsaaw+tBwq0R9OPUqmEVCqFubm5uj8jk8mgVCphsVjQ39+PwcFBjI+Pb5hYlc/nEYlE2JhdWVmpe35uN3K5HOLxONtcUDd8NcVXLpfDarUilUoxr4RWq4XFYtlwbqbUM95bHRrfq9FooFQqq9ZAlkgkrKZ3R0dHXecNBAJYXl4GgKZPcm+6lOR8Po8bN27A7XYjHo9DEAQYDIZNFYWNoPWAaVJLNRQKBQwGA8bGxvD1r3+d7e7S6TTee+89zMzMwOfzwefzsV1hqyAIAhKJBLLZLCYmJvDpp58yC0wqlYLH40Eymayr3qRGo2EF7hUKxbaSD2lm+J07d3Dnzh04nc6WLoVELeBnzpzB4OAgxsbGoNVqmQVcKpVCoVDAarViaGgIo6OjeO655zZc+BYWFljtyXA43PIbA+DzChfpdBoff/wxmyR3m6WlJTidTuTzedZEg1qB6h2/giBgYWEBV69eZXVr4/F4yyUabTZWKbQUVDKZZMnItPQe9cwoFAooFAr09vbiiSeegNFoxKFDh1jHSGrEiEQiuH37Nubm5trCarYVRCIRTCYTjEYjjh8/ji9/+ctwOBw11zK6UVhaWsKPf/xjeDwefPrpp/D7/U0dM7mb0PHj9Xphs9nQ2dkJjUazptQpXdtpW3SfzwexWIzZ2VkMDQ1hcHAQ/f39D413aqFPp9MsJjidTlcd7+0ArWssCAJCoRBLaDUajTsOT6jWqKhy49tMNKXie+vWLdy5c4cV9rbZbBCLxdtWfA0GA6tXt5HiS5Xf0dFR9n4ikUAgEGBxvuurULQCgiAgHo8jFovh7t27+PDDD+H1ejE9Pb2lfuP0flDFQS6Xb+ue0Hv8wQcfwOPxsLjvVqa3txdnz55FT08PRkdH2TgrFAqQSqXMEjE4OIjHH38cr7zyyoYtpWknHNoZa6NyMq0CnWwBwOVy4erVq3v2vcDndSurddWrh+XlZaawT09Pt9QcUEmtsVoJTRZKpVLw+/0IhUIPzRV0vuzr68OTTz4Ju92Op59+Gnq9nh3j8Xjg9XoBAK+//vru/7gmgzYY6u7uxvHjx3HhwoUNq4FQT9zi4iKuXr0Kt9uNGzduHFilFwCz9HZ0dMBmsyEQCODw4cPo7e1lx1SWiDt//jzrqlgqlXDy5EmcOHECVqsVY2Nj7JmnJdDy+TzboNHueNXGe7uwtLSEpaUlJJNJPPLII0gmkzh06NCaZkvboVLxpTWXI5EIV3y3QqlUYg+9wWDA0tLStsoHEUIwMjKCkZERaLVadHR0rMlwrHbOSrO/TCbD6Ogoc1OnUinE43F4vd59KeFTGehfb8xxNpvF5OQkFhcXMTU1BY/Hg2g0uuXrl0gkGB0dZTtnq9W6pWYZ6/udt1M2eOV9ob+LjjGqCIyNjUEmk2F4eBgymWxDN79IJGIl/WgJn1a1iNdir++9VqvF8PAw7HY7RkdH4XA46koKpUpg5b1txXFLxxRNvqyVIASUwzoSiQQePHiAjz76CC6XC4lEgpWaFIlE6O7uxvDwMEZHRzEwMMDifCvPSWuK0/lTIpEciNJnwOdluKjbuFYiEQCWNOl0OjEzM4OZmRnMz88zyyPncxlVJpjWkildtyUSCR555JGqaxX1SCQSCdZ2myYi0zC8dkm+rkW9zyFd3yqPr+wXQOeFzs5OKBQKHDlyhOVzJBIJ5kFqFppW8QWAu3fvYmZmZk3W8FYhhOCll17CCy+8AIfDAaVSCZlMxiaiaokFlcjlcly4cAHnzp2DxWJBPp+H0+lkXbf2eldIFcdcLod0Oo1UKrXpZ+LxON555x189NFHWFlZYTvhrQ5EuVyO8+fP49y5c6yY+FaaZdBs2XQ6zSaUdlkAC4UCK5uTz+fXKBldXV0snrJYLEIsFm/qVqJjs/LV7q2Ndxur1Yovf/nL6OnpwYULF9DR0VFXqAO9p3TD1qpjlnq8aAmuas8tnRfC4TBWVlbw0Ucf4fd+7/eY0kurE4hEIpw4cQIvvPACurq68Nhjj7HzVkI/QxM7aU3aZsiX2E3ob5ZIJJDL5ZvWi6Zj7ObNm3jjjTdYy+NWrCawm9BNaC6XYxUEqj3DdN1+5pln1tTdrhzzNMl6ZWUFr732Gv78z/+cbWrpWsXn2zLV5EFlT/UztVqNY8eOIZPJQKFQYGVlBX/xF3+BpaUlZklvFppa8aWVH3YCIQRerxfz8/NIJpPMHE+75JjN5k0DsWnBZ5vNhoGBARBC4HQ6kUql9jzbM5PJwOfzgRCC6enpuio6JBIJrKysIBgMIplMbvt6aZtNtVr9UPvDjaATSTqdxsLCAvx+P4LBIDKZTNvE/MViMTidTkilUoRCIahUKpbhTifcrbjVaUxwPp+H3W6HSCRCMBisa6PDqU6lEkKTOjajWCyydqVerxfxeLwlK5CIxWLWztxms7G65ZXWssoW7m63G3Nzc8zSKwgC+vr6oFAomCLR39/PsuRrKXY0QcZkMmF4eBjBYBALCwsspr/V5FgvYrEYer0earUaDocD3d3d0Ov1Va2TlWOMZsOHQqGmbwKw19BEc6VSCbfbDaPRCLPZXDPBu97nm3YpozlFnM9Zv3ZXPrdU9jKZjFXdqnzei8Uient78cgjjyAcDsPpdDbNRqKpFd9GIAgCPvvsM8zPz7MFz2Qy4cyZM7DZbLhw4QKGhoY2PQ8hBCdOnEB/fz8mJiagVCrh8XjwySefPFQnbzdZWVnBW2+9BblcjjfffLMuZWp9Sba9plgsIpPJYGFhAd/+9rfx4MED3L9/H6FQqGXjJCupHGNHjx5FOp2G3W7HmTNn1sQ7boWuri688MILrO6sy+XCpUuXtpRNztk56XQar7/+Oq5du4aZmRnMzs62VE96ikKhwMsvv4xTp05hZGQEw8PDD3m7isUiK9X29ttv45133mGW3r6+Pnzzm99kyq9EIoHZbK5ZjotCk2DPnTuH3t5ezM3N4Tvf+Q4WFhaQzWbbZuO7Ho1Gg1OnTsFut+Pll1/G4cOHaypo7TLGdptkMolr165BrVYjnU6jr68PFy5cwPnz5/f70tqW9Ws3fW5LpRLOnz+P8+fPw2q1YmRkhG18xWIxent74XA4oFAoMD4+jqtXr+KP/uiPmsZw0/aKL1C2xqXTaWaSN5lM6OzsRC6XQyQSQSqVqqs9Mi2hEggEYDQakUqlth2CsV0ymQz8fj9EIhGz/Nb7ue2GZYjFYlbJgboz6/neyiYZkUgEfr8fDx48wNzcHAt6b5cdNh1jFouFWXw3s9YUi0Wk0+k1MaOVoQ0dHR0ghMBmsyGXy22rM99Bhipo1N2u0WiY96be50YQBHg8HszPz7PySK1oqRSJRLDZbMxKq9FoHoqPpBbfeDwOv9+P5eVlSCQS6HQ6dHR04JFHHsHg4CCrrV7p6qwlTzo/qtVqWK1WhMPhTcPLWhkaokR/b2dnJwsLW7++0Oc/FotheXkZc3Nz8Hg8zPLYamNstykWi6xls9vthkQiQSwWY/kU9UIbO2SzWaTTaSSTybYwwOwG6y2+s7OzrLTr8PAwvF4vxGIx0uk0S3YnhDCPms1mQz6fx4MHD5rqmT8Qii+NJ6UTdCgUwqeffspc0Xfv3sVjjz2GRx99dMPzULc1jZXba6UX+LzANiGE/bsZdPBu1YJAf6vNZsPLL7+M7u5uPPHEE7DZbBtWJKAkEgnE43FMTk7i3XffhdvtZnVc6a6xXaBjLJ/PsxCdWgtXZU/5119/HX6/n1l4Hn30UTz22GPQ6XRwOBwsJpMqGJz6UCgUzJtDKzjYbDaMjo5Cq9XWXWeSLpK0qU2rWihp1ntljkM1aOKQXC6HwWDAyMgInnvuOdjtdgwODsJgMDDFtZ74/mQyiWg0isnJSXzve9+D2+2G2+1uu+efYrfb8eijj6Kzs5PJjbaAprJa//w7nU589NFHWFxcRCKR4OENNaDPolgsZjG+25FVMpnEzMwMgsEgLl68iJmZGUxPT/ONxgbQjoXpdJrVNf7ss88Qj8cxOjqKTCYDk8mEsbExVie5MvRhO2VPd5MDofiub5dZKpXgcrkQDofR0dGBRCIBu92Oo0ePbjiRU4txZaD8figje7Vg0LhUvV6PU6dOYXBwkDX6qCfEIpfLIZFIYGlpCT/60Y8QDAYRCoXaMmmgsqpDZeJetcmUJihGIhFcu3YN8/PzLOGHZsYCQGdnZ9UWyJzNkUgkGBoawunTp1ksGo27rse7UwndyLRyYhsAVmGglsemcoNMWxp3d3fjC1/4AkwmE0wmE6uHXi+5XA6pVApOpxNXrlxBMBhk3TrbbQ6gXbFGRkbQ1dW1pvJN5fNLE7Si0SiuXbuGubk5LC4usk6AeznGCCEtNaYrx+hWnsfK43K5HEvyvnv3LiYnJw90ubiNqGzTTite0JfL5UIymWTW31wuh8HBQQBrk1ppNReqLzXDeDsQiu96qOm+WCxifn6edXcplUpwOBwYHx/fds3gdqOyPqLVaoVKpWKDeDOKxSJrjUwLsOfz+aYY+LtFOBzG7du3sbKygnQ6XbVcFp1AvF4va4xCi6VTa3irWhabBVpGjo5bWsGBuuOaye2231DXcSwWw7Vr1/DgwQO4XC5IJBKoVCrWFn07XoeDMgf09fWhp6cHw8PDePzxx2GxWGCxWGrWiqY1ZIPBIPx+P1Kp1J5sBgghMJlMUCqV7LnweDy4e/du0885crkcDocDOp0OTzzxBAYGBtDb27vpmFxcXMTS0hLS6TTC4TDi8TimpqYQCoWwvLzctLVm9xPaeMLpdOKzzz7DwsICgsHgmspCld1fk8lk1SZUtHb6yMgIXnrpJXi9Xnz22WeIxWL7Wif5QGp3NO40m81icXGRxcrGYjEcP358TZHrgwytcyyVSmEymWCxWJjiWw90p0iL4Ld66916iEQiuHPnDlQqFRYWFqqWLaNyicfjmJ2dRSKRYItONBplHYQ424c2W6Gdiex2+35fUtNSOQ9++umnmJqaYt4HtVqNjo6OuuodV+MgzAGEEPT09OCpp57C4OAgTp48CY1GA4vFUvX5p1a0fD6PUCiEQCDAFN/dhjbUMJvNOHz4MMbHx3Hz5k1Ww7aZkcvl6Ovrg9VqxeOPP46xsTF0dXVt+BnaqYx2waTVnehmw+l0IhaL7dEvaB2oUjszM4O33nqLVRpZr/hmMhnEYrGaii+toDUyMoIXXngB8/PzmJ+fZ8fuV1jPgdfuaHFwn88HtVoNrVaLO3fuwGAwoKura8uuvXZlowSWemg3C08t8vk8YrEYSybcKJYylUo9lL19UOTUaOj4VCgU6O7uhslkQkdHB0vE2gxqha9sTgOU49T9fj+rPd0qbCSP9WOSjkXa1rxYLMJoNLIk4EaF2bTb2BaJRLDb7dDpdBgeHsbg4CC6urpYqbj16wYdY+FwGAsLC5ibm2Ou4kbIhl4PbcVdbe6hGfcGg6FmY4dmg1oNaUk82ra4nlKkAFgLYq/XC6fTyZS6ehtAHURoOF4ikYDb7UYgEGBeya1A84S0Wi0cDgeSySTkcvmO9YmdcqAVX0EQkEqlkEqlUCwW4fP54Pf7USgU0NPTgxdffJG5SNdPDDzRiFONVCqFTCbDHuxa44ROIJWLHh9T26cyJOfZZ59Fd3c3hoaGWL3uzchkMswC/4Mf/ADZbJa9PzMz03Lu0K3Io1AoIBgMIhAIIJlMIpfLYWBgAE888QQeeeQR7v2qgVQqxaOPPsrCG06ePAmdTge73V41jpqOsZmZGVy8eBEul+shK1qjrocacdZDq3uo1WoYjUYYDAYsLy83teJrMBhw7NgxOBwOPPPMM7Db7Sx+up7rDofDmJ+fh9PpxO3bt5kCRy3vnIehtY39fj/u3r2LeDy+rQ1aZYUiWjdcqVTWXRlqt9i3GY260WUyGZRKJYrFYkN3v/VCv4t2QotEInC73ZDJZCzzuN0sFZtB741SqWSTY2VZqM2gsWrJZBLhcBjJZPJAyZBPprsPjdOt7IollUphs9nQ3d2N7u7uDS1f66EWDtqIJJ1OA/jcgk+7trUK9BmmJYU6OzsfkkexWEQ2m0UikWAvoNzhTa/Xs4YXzawU7ScikQgmkwl2ux1ms5lZetcv6nQ+jEQiCAaD8Hg8rFFFrTVGLBavGdf13AOFQoGuri429jUazUPHiMViGI1GKJVKVu6vmRJnCSFQqVRrKodYrVY4HA7Y7XZYLBZ2/fVeN008ph3fWslzs5sUCgUkk0kkEgkEg8E1Y5AmosdisYbIjCbKN0spw31TfGkzCZvNhkOHDiGdTuPatWv7FvSczWZZKapEIoGxsTG89NJLsFqtTVeKY7eh96anpwcnT55EX18f9Ho9U343olQqIRqNIpVKYXJyErdv38bMzExLKQ2c5kYkEkGv10OlUqGvrw/9/f1MQbDb7Xj++eeZC7qay7ka2WwWsVgM8/PzeP/995kSSL1CrdYumi40BoMBZ86cwcjICHMNU3nEYjEsLCzA6/ViamoKPp+PWQQPHTqEp59+esPSZwcdiUSC0dFRnD17Fj09PbDZbA9V+qmcD+/cuYOJiQlMTk7i0qVLSKVSNY09Op0OfX19UKvV6OzsrMtroVQq8ZWvfAXDw8PQ6XQ147LppjEUCjVdNQOJRIKxsTHY7XamnHd3d+Oxxx6DyWTCsWPHoNVqeU3zBkA9XKFQCIIgsPEiCAKi0ShisVjbrt37NqPRhUqn06G7uxvxeBxqtRrZbJbt0Pba8kvrK1LrB72Gg2KtpBZdmUzGSj91dnbCZrOxOpT1WHyz2SxSqRTC4TA8Hg8ikUhLKQ2c5kUkEkEsFkOlUkGr1aKjowM9PT2QSCSQSCSwWq2wWq0wm811WRfo853L5ZDJZJj1gyq+rQwtv0hlJZPJ1jzDNIYvHo8jmUwik8lAIpGwmLxaLXbrgcqVxrW20+JJ5UrlZDQaoVarqyayAWU5ZzIZRKNRliQUj8eRzWZZyaf1qFQqZkXu7u6GQqHY9LqoIYk2KNFoNGvc+etzCUQiUdPdG5FIBIPBALvdDo1GA5VKhc7OTtjtduj1+i3V4OZsDG3gBQAul2uNXKkO1Ii1uzKZs1kMCPum+CoUChiNRoyOjuInfuIn2K7Y5XJhYWEB4XB4T4PPaSyK2WzGwMAA61BUS9lrN2WYuphkMhkGBgbQ19eHo0eP4sUXX2SJBfUoEqVSCaFQCF6vFzdu3MDFixeZFZ/D2Qm0prRKpcJjjz2Gzs5OnDp1Co8//jh7Tmmf+HqVXtq1yeVyYXZ2FisrK00xMe8UajxIJpOYnZ1FqVTCI488wmQjlUqRzWbh9/tZnCl1K2u1WhgMhh0pve0o18oQsN7eXlaybKMEStoNL5FIIBwOswpC4+PjG8qjp6cHjz/+OEwmE8bHx6uGLayHJrdVllBLp9MIhUJMyamch4PBIILBYFPdG6lUisceewxPPvnkmvrber0eMpmMW3obCC29KZPJcPfu3TWhI7R2Oa25vV2oDhcMBlnXVlpKdj91qH1TfKVSKRQKBSwWC44cOYJoNIr+/n4AYBaXvdyJUgu0Wq2Gw+GA1WrdkpWzHaDx1haLBf39/RgcHMT4+Hhd1gYKdQ1Ho1G4XC7MzMy03SZhv2iG2Kj9RCQSQaVSQafTobOzEwMDAxgdHd2042ItaC3VdDrNrHHRaLRplICdUGnJ9vl8UKlUTKmlC1yhUEAqlWJts0UiEbRaLWtUsZPvble5Uo+Y1WqFzWZjcdMbPZs0tpQqwCKRCA6HY8Pv6e3txcjICCwWC44ePbrtcnK0UU4mk4HH42GGJFoznNZhbZZ7IxaL0dnZiZGREdjtdthsth2d7yB5bLcKHRO7CbX0JhIJeDwe+P1+5tU/kIovhbqMzGYzLly4AJ/PB7FYDLVajWg0WtXlSF1oxWKR1ZLbCoQQljhAy/vQmKhDhw7ha1/72pbcpe0ATdQwGo04fvw4vvzlL8PhcNQd3ycIArLZLHPptVoWfLMiEomgUCigVquh0+lgMBi2NeZbGdr2Uq/XY3x8HFarFWfPnsUjjzyCvr6+LZ+PjtVcLof5+XkEAgFcvnwZV65cgdvtbgvvBF1wgsEgLl26BJvNBpVKxUpDyeVyGI1GHDt2DOl0Gv39/SgUCjAYDFAoFFyuVaisaf7ss8+iv78fw8PDTGa1PmM2m6FUKlkscD0YDAb09PRApVLVbeUUBAGJRAK5XA7FYpF1KL1x4wbC4TAmJyeRTCbZ8XQe8Xg8+3pvaLiHTqeD0WiESqVi3VEroeOLJguWSiUoFIqqsqeyoJU0aHlJzt5CN7/T09N4/fXXq9YD3g/2XfGlk4lGo8H58+cRj8fhdrtRLBaZK2b9zoBaFQuFwrYULOqukkqlLEOUulVOnjyJF198sS7XUjtBC5t3d3fj+PHjuHDhwpYUftoNL5lMIhaLsfvCd9s7o1Lx1ev1MBgMB25TIZVKWTOKo0ePoru7G6dPn8bIyMi2zlc5Vufn57G4uIgrV67g4sWLbTNeKzuDXbp0CXq9HidOnEBvby/bRBgMBhgMBnY8sLOSeu0uV1qFyGKx4MKFCxgbG2MVCGpBO6WZTCY2buthO/dBEATE4/E1yu/c3ByuXLkCj8eDjz/+GNFo9KHP7Cc0r4SuxbRJEo1Hr4SOr0KhwFq819p0UFnEYjGm+PKavXsPrZk8OzuLt956q2ka2Oy74rsemUyG0dFRiMVilnixnmKxyLoAzczMrNnF1gOtWKDVajEyMsI6E2m1WgwPD9dMUqjsW013nPs9cTQaOuFudeLN5XJwu90IhUK4e/cu5ubmdt2NchCQSCTo7OyEQqHA4cOHIZVKMTU11XTZ2LsBjak0GAwYHR2F3W7HoUOHWMWGrULda+l0Gk6nE+FwGBMTE7h//z7cbnfbPcuVbPbbtqto0TmQypW2gG13uW63AH8jmgDRtWc92WwWk5OTWFpaQjKZRCqVwsrKCh48eIBwOLytBgS7CU0SlMvlMJvNOHv2LBwOB/r6+qpWbsjlcvB6vaxMZjqdxujoKNu8VVIoFDA9PY2FhQVWsaSZQjr2EzqvVhvDlfWN65kzaLIxfdViv0Mb1tN0iq9cLseFCxfwzDPP1BR+Pp/HxMQEPB4P3nzzTbhcri0J1Wg04sSJE+jq6sJLL72EQ4cOrbmJtRRfmqGcy+WaLht2v0mlUrh9+zacTie+973v4c6dO3yH3QDUajWOHTuGTCYDmUyGpaUlvPbaa5ienm6qiWQ3oHUfe3p68Nxzz6GrqwsXLlxgCVpbhVpBI5EIrl+/DqfTiTfffBP37t3jY3Ub0PmQKmLBYBDXr1+Hy+Xict0F6HpIE4bWP//xeBzvvPMOPvroI/j9fvj9fla/dj/bw1aDKl1yuRwGgwGDg4N49dVXMTw8DL1eD4VC8ZBSlk6nce/ePfj9fty/fx/hcBgvvfRSVc9PNpvF+++/jx/+8IdwuVxwuVw1NwwHDTqv0o0HlXNlNa16CgvQ89Da4PvdlGIrNJ3iC2DTmKZCoQCLxQJBENDf34+xsbEtnd9gMKC/vx82m42VjKmHWCyGQCDALJvRaLSpJpP9hMZd53I5ljDD2Tk0/k0ul7OQh4NSV1qn08FsNqOrq4uV1dtKzON6MpkMAoEAfD4fnE4nnE4nIpEIH6vbhM6H9JmnrXi9Xi+Xa4OhFvVcLsdCANeTSCSwsrKCYDCISCTSNG7lalClyWw2Y3BwEENDQ7BYLMzSWy3MrrI0n9FoZMmu1RAEAZlMhpXp4/G9n0PnVZlMBrVavUbxpZ0ba42xaudxOBwwm83Q6/VN1QxlI5pS8d0M2m/c4XCgq6sLL7744pY+T8siURdLPQiCgOvXr+ODDz7A/Pw8rly5wgqQczicxkIIwYkTJ3D+/Hn09/fjzJkzUKlUUKvV2z6n0+nEpUuX4HQ68d5778Hn8206uXOqUzkf+nw+zMzMsHg+unByGkexWMTS0hJ8Ph8uXbqES5cuPWTxLRaLcLlcrNNgM0MT2c6ePYtvfOMbMJvN6O3t3bDhjFqtxqlTp1jN7UKhUPf6zSlTOa9arVaMjIywBPZcLofZ2dk1Y6ye8zRqft5L9k3xpa4X2jGJuiCo+2OjFrm03zPN6LRarVv6bmpFo4l1m0ED430+HxYWFliPdZpA0MpUWhRVKtWmyRrrodnjmUwG6XSaJ7TtkHw+j2QyyWRJnwWgvOGTSCRQKpXQaDTI5XLIZrP7fMWNR6FQsPbD/f396OzshNFoZJVYtgp120UiETidTrhcLng8HgSDwaZXEBoFtYDF43GoVKqqruStUCqV4PV6MT8/D7fbjenpadbalM4JnK1TWeS/0i2fz+fh9/vh9XpZ3Gq1eZYqhM0+B4vFYlbLf3BwkHVZ3Oj5FovF0Ol0a1zy69eqYrGIdDq9ps14s8tir6HlILu6ujA6OspkSOWlUqlgs9mg1+tryo7WjK41P9M8qMqqGs3Evim+yWQSXq8XH3/8Mf7Vv/pXTPgKhQLPPfcchoaGWMJZNeiusFbnm42gCjWN692ITCaD999/H7Ozs/j0009x+/Zt1nWnHR4qlUqF0dFRmM1mPPfccxgeHsbY2Fjdi6LH48HNmzfhdrtx8eJFeDweeL3eXb7q9kQQBNy9exeFQgGjo6PI5XIwmUwYGxuDQqFAV1cXdDodvvKVr8BkMmFmZgaXLl1qK+VXoVDgwoULGBoawuOPP45jx45Bo9FAoVBsO4bs7t27uHnzJubm5vCjH/2I1S7d75I6e0k2m8XFixcxPT0NtVrdkO5XMzMzmJ2dZZUEKjtdHhS5Nho6VqPRKFZWVphhpVQqwe/3I5FIYGZmBqlU6qHPUrm3guzp2ks38gqFYtO1mK71giAwBYt+hnZd9Xq9eP311+F0OvHjH/+YJcNxPqcydK6y+YpcLsfg4CBsNhuUSiWOHz++4XlGRkYwPDz80PxcLBaxsrKCSCSCy5cv4/Lly1hYWGiqdWrfFF+qOC4tLSGTybCBrNFoMDIyAqvVylpCVoMugJtlE26HSmU2n89jZmYGP/7xjzE3Nwe3280sG+2AVCqFw+GAw+HA4cOHcfjw4apZsrWIx+OYm5uD0+nEzMwMfD5fW7R73S88Hg8ymQxKpRJGRkaQy+VYF0FqFRkZGYFYLIYgCPjwww+bLlt7u9CyRiMjI3jiiScwNDQEh8MBmUy25c0tpVQqwe1249atW1hcXMSDBw+YNb0VFIRGUSgUMDs7C7fbzZJRdgptgNCunodqrK+s0MhsdUEQ2Fj1er2YmZlh68z6Ep6tblGnhifaOKre8VjLC1wsFplX59q1a5ibm8Py8vKOO4+1I3QDQduTU9kLggCTyQStVgulUrlpLW9aDnH9/CwIAqLRKPx+P6anp3H16lXEYrGm8o7va6hDoVBgShIdzEqlEh9//DH8fj/Gx8dx5MgRqFQqGI3GXc0YpMW83W43s7oB5UxS+iD5fD5WP7BdqNYgYStdm/L5PKuVGIvFkEgk+ESzA2icZDQaZSEPpVKJKYVisRhWqxWEEGbhCAQC+Oyzz1hr6FZSgiuL1584cQI2mw0nT57E0NAQrFYrsyRslcrKL5988gkmJiYQCAQQj8cPlKWXQuuaZrNZiESihjTloXNmO82HG0FDOAKBAC5duoR79+7taFO2HkEQcOPGDUxMTCASibB69hQ6bpvNbbyf0DHodDpx8+ZNZoDxeDxsLTpoz/p2qVxj6tEDaKjp+vmZVoWgYQ7BYLDpQiD3VfGlym+l20YikeDy5cu4f/8+crkcDAbDnmQMUoVjenoar732Gptc8vk8pqam4Pf7kclk2s6yQZt50FawW7H2AmVLUjKZRDweZy5PzvahE3ksFluj+AJgLimr1cqsv1KpFPPz85ifn2fHtpIiQuPszWYzS5Q4duwYHA4HFArFtis45PN53Lp1C7du3cLExATu3bvHWsY20wT8/2/v3OPkqKo8/j0hhAkhIRMkD8AwIAIK+AJZWERgeYMiArKAICDKqojrrq7AAhoB0RUVEVwfiAIKyoogCAIJ+Ik8E4gCCSHJBNOZyUzmPdM9k3k/zv5xbzWVpifT3dM93TV9vp9PfWrm1r11b/3qdtWp+zh3oggMXyN3AsO3vb2dJ598Mrn8c75W9lRV6urq2Lx5M729vcTj8bKsq9kQvLerq6t5+OGHaW5uTg6/mQxDESea8LCHnXfeOefzDA4OJudvpVuErNiUnFeHoJkcIBaL8frrr1NZWUkikSio4ZtIJOjs7GTdunVs2rQpafiOjIzQ1dU1KSay5ZN4PE5HRwe1tbU0NTXR3t5uLb15pLe3l4aGBqZMmUJ9fT0DAwNUVlZSUVGR/GDs7e2lvb2deDweuZaNwJ3R7NmzWbhwIbvttltyosSsWbOYNm1aVr/3oMs5aP0JXoYbN26ktbU16QrKMMZDsGxuU1MTXV1deV/Ovr29nZ6ensgPZSgkgWu3wcFBamtrqampobq6Orl4ymQZ+lUo4vE4sVgMVWXBggXJ3t5c7avgI21gYICuri56e3uTwx5L9eOt5AzfwCVLsOpXR0cHM2fOZO7cuXl/yIQJVrqJxWK8/PLLyZdklCYMjJdshpLU1NSwatUq1q9fz/r165MrAxn5IR6Ps2rVKlpaWpg/fz4LFizgPe95D/Pnz2doaIj+/n7i8TgbNmygvr4+OWa1FB8y6aioqGDmzJnst99+nHDCCey2224cdthhzJkzh4qKiqy7j4PutaAbuq6ujqeffppYLLbVCpBR0ccoPQJvAj09PbzxxhvA+FZiS0f4XWN1NT0jIyPJD/6XXnqJZcuWEYvFWLlyZbKByrRLj6pSW1vL888/T2trK7Nnz2bOnDnsu+++OU94jcfjrFy5kkQiQXV1NVu2bKGxsZHOzk5qa2vzfAX5oeQMX3jzx9/V1ZWcLNXX11dQw7evr4+enh7a2tro7++31sttEEy0aG1tpb29Pdktby3i+WNgYIB4PM60adOSH4Hz589n6tSpyTHVzc3Nydb2YCWnqDzwg0ktM2fOZPfdd2fevHnMmDEjY5dlQQt3f39/cqLswMBAcnGK+vr65CIK1gJk5JNgsR4jd4IhWcF7JJtxy8PDwzQ1NZFIJGhoaKChoYG2trZJNem8kHR3d9PS0kJlZSWNjY3J3sRcffA2NzfT0NBAR0dHckJhR0dHSS9kVZKGb0CwQlrg9qSQBMb24OCg/XgyoKGhgZdffpna2lpisRj9/f3WPZdHOjs7k5M0AObMmUNTUxMLFixIjqdeu3YtTz75ZHIhlSi1dOywww7MmjWLqqoqjj76aHbeeWdmzJiRkcuykZEREokEPT09bNy4kZqamqTh29TUxBNPPEFjYyOJRKLsvDcYRhQIxn/GYjGWLFnC9OnTM047NDREXV0dnZ2drFixItlDa+/tzAjsqtbWVgYHB6msrKSmpiZnwzeYWBiPx1mzZg3d3d1bNUyUIiVt+A4MDFj3eYkxPDyc/FJPJBJs2bKF3t5ee+jkmUBjcF/UAwMD1NXVJT2hBN1JQUtH1Aj8eAYuC7MxegNturq6aGlpoba2NunOqLm5ObkiWzl6bzCMKBD8XhOJBHV1dVlNYh0eHqahoYHOzk5aWlqIx+OFK+gkJLCrOjo6aGhooLe3d1y+vRsaGpINDa2trWl9TJcaJW34GhPLWK2FYcfU9fX1yVa3qLQyRpH+/n5qamrYvHkzmzdvpqKigqGhIYaGhiLtozLw5tLS0sJrr732liVL0xnAQ0NDyTr3yiuvsHnzZl566SVWrFiRHGbT19eXHPphRq9hlCbBCnPr1q0jHo9nNYwxcOk2NDRER0dHAUs5ueno6GDlypVMmzaN1atX5zy5raenh87Ozkj58zbD18jYcB0ZGSEej9Pc3EwikbCZ8hPA8PBw8uHe3Nxc5NLkj+Hh4aTx3tjYyMjICAsWLGCHHXYY1Un9yMhIsqdh8+bNxGIx1q5dy6uvvlqEKzAMI1eCj/e+vj5aWlqKXZyypK+vLzmUrtwww7fMCb6eu7u7aW1t3eYPoa+vj+eee47q6mpWr15NXV3dVn5mDSNTwv43H3zwQRYuXEhFRQXz5s1LriqUSnd3N2vWrKGtrS3p67tUZw0bhmEYpYkZvmVO2PBta2ujqalp1LhdXV08//zzvPjii7S2ttLW1jaBJTUmE8FCHVu2bKG2tpa9996bAw88kMHBQSoqKpKO1MMEXXONjY0sW7aM6urqIpTcMAzDiDJm+JY5g4ODNDQ00NPTwwsvvEAsFhs1bl9fH7FYjM7OzsiM5TFKm8CTSkdHB8uXL2fDhg3JZTNT6e7uprq6mng8TmdnZxFKaxiGYUQdycfEJBGx2U3bQFWz9nI+UZqKSHLZze23336bkwxUNek2ptg+Y3PRFKyujkWx6uqUKVOoqKgYdWKbL1uy7kVp8prV1cJQys/VqGKa5h/7/ReGXHUFM3wnBHuY5B97mBQGq6v5x+pqYbC6mn9M0/xjv//CUHTD1zAMwzAMwzBKncKtAWwYhmEYhmEYJYQZvoZhGIZhGEZZYIavYRiGYRiGURaY4WsYhmEYhmGUBWb4GoZhGIZhGGXBuAxfEVk0WV1uiMghIvJzEVkrIj0iUisi94jIXmnibhQRTbOdnmPepuub8XcXkV+KSKOI9ItITES+nUO+Za+piFw0Sj0Ntvk55F32uvq4u4jILSKyQUR6fT29TUR2zSFf09TFfZv/7bd4TZeLyInjyHsy67qniDwkIjVeq1YR+auInJImboWI3CQiDT7uCyLy4RzzNU1d3BtFZLGItPln6UXjzLvsdc3WVsiqDONxZyYiewB7qOqy8Rak1BCR7wGHA/cAq4HdgWuBucD7VHVTKO5GYC2wKOU061S1I4e8TVcXtwp4DogBPwKagCpgH1W9Nst8y15Tb4S9IzU58Cdgg6oemkPepquIAM8C+wJfB9YA7wauA94ADtcsHrSmKYjIDsBLwNuAq4FG4BLgY8Dxqro0h7wns64HAP8JLAXqgFnAZ4FTgTNV9YFQ3Ht8+H8BG4DLgJNx9fSVLPM1TV3cLuAVnJ6fAi5W1TvHkXfZ65qNrZA1wQpctm29AbumCdsTGAGuSwnfCPym2GWOwpalro8DLwLbF7vcpbxlo2maeEcCClxW7OsotS1TXXEGrwKXpsT9nA/fr9jXUipbFpqe77U7OhQmwErgxWJfRxQ2YCqwCfhTKOy9XteLU+KtAx4udplLfUunqQ+f4vf7eH0vKnZZo7SNUldzfq+NteV9qINv5r9BRL7im7J7RORREZnrt/8TkYSIbBKRK1LS7ioiPxORap9uk4jcKyK7p8n7XN8E3iciq0TkNBFZKiJL05zzpyJSL66rfK2IXDrWtalqS5qwGqAF9+VRMExXEJF3ACcCt6rq4FjnHQvTdFQuBAaA346VTzpMVwCm+X1nSvS432f1nDVNATgM6NVQy666N99i4IPpyj4Wk1nXdKjqEJAAhkLBpwGDwH0p8X4HnCiupT1jTNNkeF7XTzddC2yDjdNKX4R/HoXCFKgBHsU1XX8a90J4HNdtfQ1wHPAzH/eUUNr9gFuAM4EPA+fgurs2AhWheMfjrP4/AqfgXt4bgM3A0lC8Wbgv2VpcU/pxwE3AMHB5Dtf7Ll/mr6aEb8TdtB6gH1gGnG665q4rrrtIgbOAJV7XDuBuYBfTNLe6mhJnuq+3f7C6Oq66KsBfcd1xhwA7AYcCrwN/Nk1z0vQWIJEm7rd83BNN17TXOAXXejYfN+xmADg2dPx3uCF4qenO9td3gGmanaYpcfPS4mu6Zv6syEnfAt2camBqKOwHPvyaUNhUoBn41TbOvx3wdp/246Hw54HX8GOUfdjBPl745lwL9AHvTDnv7UBruIwZXOtU3MutGahMOXYrzlA7EmeoLfVlOd90zU1X4EqfbyfwY+BfgEuBNmAFvmvJNM2urqbEO9eX47Rc6qnputWxGcADPv9gewSYbprm9Pv/gs/3XSnx/+LDzzVd05bhe6H61wWckXJ8MbAsTbrjfJojTdPsNE2JW2jDtyx1DV3XmO+1TLZCuTNboq7pOmCt3z8RBPjjb+DETyIinxeRV0VkC67Zu9Yf2s8f3w7XqvIH9Wr48/0NNwkqzEnAciAmIlODzZdjF9wElEy5DfhnnDHbET6gqper6t2q+oyq3g8cizPOsvY+MAblpGtQN5eq6mWq+hdV/TnuhXgwbhhEPignTVO5EPcQ+XMW586UctP1dlz3/OeAo/z+EOB+EcnXc7acNL0X9wK9S0QOEufh4b9xrVXgWqXyxWTS9YfAB4GPAo8B94rIRzJIl29M08JQzrpm+l4bk6njSbwNUgs1sI3wiuAfEbkcN3v/B7gZpx04A2hZKN7bgO1xL+xUmlL+n4v7AhttjOguo15BCBH5Dq618UJVXTxWfFUdFpHfA/8jIgtUtSGTfDKgnHRt8/slKeFBvPfjfizjpZw0DcdbgGvluTXlQZovykZXETkV13p+nKo+5YOfFpENuPr6UeChTPIZg7LRVFXjInIGcBduQhvAP3AtYdcD+XqmwiTSVVXrcDPlAR7x4zK/h+t9wJdxzzRJ5/h9+1h5ZEg5aTqRlKWu2dpgY1EowzdXzgGeUtWvBAHyVp9trTix56ZJP483v2LAGU/NwL+Pkt+6sQokIlcDV+DGrfx6rPhp0LGjFJwo6rp6jFPkdTJBDkRR0zDn47q77hrrvBNMFHU9yO9fSgl/0e/fRX4M31yJoqao6jPiJrnug6ur1biXdi/wt7HymABKTtc0rAC+HPp/NfBxEdlRVXtC4e/GGUtv5JBHPomiplEgsrrmwQZ7C6W2ctuOvPUL4uLwP6o6jBPoTBGRIFxEDgZSb+TjwP5AraquSLN1baswIvIl4AbgalW9LdOL8E3+/+rzbcw0XQGJoq7LcL47U4c0nOT3qUbGRBNFTcN8ClipWfrtnACiqGvwG0/1g/xPfl+/rTwmgChqGpRLVXW9qq711/FZ4Neq2r2tdBNESemaih9i8yFcS3nAn3Ctep8IxQveV4tVtT+bPApAFDWNApHUNVcbbCxKrcX3ceAKP5brRdyEprPSxPsGrgvxQRH5Oa6JfhHuBRRuCbwZ94N+RkRuxn2FzMDdsCNV9WOjFUREzsGNQXkc+IuIHBY63Kmqr/t45+Kcqv8Z54duHs4h+Adw3Z+lQOR0VdUhEbkSuFNEfoqbOLQPblb3Utwkl2ISOU1D8T8AHAh8hdIjiro+gKuXd4vI9bhxd/v7Mm4CHsz04gtEFDVF3AqNf8O1RO2Da+0dBK7K9MILTCnpugg3XOE5f975uAU/DgXOC+Kp6ssich/wQxHZHjd28/M4w+aT2V1+QYicpj7uUcCuPg7AIX4sLerm/RSbyOma7XstK7QwMw9vSAm7yIfvkxK+FHg29P904Cc4P21duLEee/m0i1LSnocTux/ffQO8DDyYEq8Sd5NiuK6cZuAZ4MtjXNudbD1DO7yFZzcehjPCmnAP5TjwJDm42zFd39Q1FP8C3CzTfty4vluBnUzTcWl6i6+r83Kto6br1rriJpLc4fPo8/vbgd1N05w1/SVuDOCA398KzLG6mvbaTsO9h5p9HjXAw8ARaeJOx431bPR1dTmhhUJM05w0XTpavTZdc9OVLN9r2WzjWrK4lBC3xN8bwLdU9fpil2eyYLrmH9O0MJiu+cc0LQyma/4xTQvDZNQ1koaviARfrE/iusH2Br6GG2ZwgObPi0JZYbrmH9O0MJiu+cc0LQyma/4xTQtDuehaamN8M2UYNy7kNpzbjG5cE/snJsuNKRKma/4xTQuD6Zp/TNPCYLrmH9O0MJSFrpFs8TUMwzAMwzCMbCk1d2aGYRiGYRiGURAKaviKyCIRybpJWUSqRERF5DN5LIt6Nxq5pP2ViKwRkU4R2SJu2b/LxS3xN+FMBl1F5GifdrTtsLHPkj8mg6Y+7Y4icrOI1ItIv4isEpGiuSmaDLpaXd3mOXOuqz59pYj8UERqfX2tE5E781W+LMsSeV1FZIGIfFtEVohIXERaROQpEfnw2Knzz2TQ1KfdUUS+KSLVItIrIptE5G4RqcpX+bIsz2TRdekoz9Qv56t86YjqGN+JZjrOlc4/cK40TsS5hNqH0VcuMbbN34HD04TfgfPxV+wFKqLKAzhdr8G5pDkD+I2IiKr+pqgliy5WVwuAiFQCz+KeqdcAG4HdgCOKWKyoczDOv+qvcIsATQO+ACwVkdNUtRjL7E4GfgGcjvNzuwJYCHwTeEpE3quqW4pYtqizEvi3lLCNhczQDN8MUNVzUoIWi8huwKcxwzcnVLUT92BOIiJ74pZ3/b66VWSMLBCRD+E+yi5W1Tt98GJx7mi+KyK/NV2zx+pqwfg2sBNwkNc44HdFKs9k4FlgX1UdCgJE5AmcP9av4fy3GlkgIjsCZwPfVdWbQuFNwGO4D7UnilS8yUCXqi4bO1r+mPAxviLyRRF5QUTafVfMMhE5dZTo00TkByLSLCI9IvJIuq4FEblU3PCDPhFpFZE7RGROYa+ENmBozFgTxCTR9QJAgLsKmEfGRFDToMv9sZTwx4EFoeNFJYK6psPq6vjKOwO3bPYvUozekiJquqpqPGz0+rAh4BVg93zkMV6ipimwnd9S62nc70tirlQEdS0axbhhVbhug0/gumRWAI+IyElp4l4FvBO3pvRluG6cxeKWWgRARL4D/Bjnd+403LKWJwGPyTbG4MqbY10WZVJocUwVkdkiciZwIc7fXalQRQR1TeFTwN9V9bUc0haCKqKladDyOJAS3u/3B46RfqKoIlq6psPqahqy0PRg3BCyJhG5X9y4yS0i8kcR2SuTC54gqoiWrunSTsMN1VmTbdoCUUWENFXVLuDXwJdE5BgR2UlEDgBuAl4FnsrkoieAKiKka4j3i0hCRAZFZKWIXJJhutwZz7JvuSy7l3J8Cm64xWLgoVB4FW7c1+vAlFD4ET78klC8YeDrKecN4p0eCttqaT5gT1yL7dczvJaP8OZyeSPAjYXUrlx0DaU73J/rS6ZpbpoCp/i0J6eE/9KHX2W6Wl0tBU2Bc3zaTuA+4HjcUqk1fptpuo6/rvq0N+LeWUeaprlpimvx/TFbL5u7DNh1ojWdZLpeB3wWOAr4GPAHf75rCqlfMYY6HOyb1Zu8OIO4h95+aaLfr6ojwT+q+hxuzfZgosnxuBt8j2+NnSoiU3Frj3cBo85kVdUaVZ2qqtdlWPRngA8CxwHfAb4qIt/KMG3BibCuARf6Mt+bZbqCEUFNF+NadX4kIoeLmzF/CXCuPz4yetKJI4K6pmJ1dRSy0DR492wAzlHVJap6L24s5ULg/DHSTwgR1DW1/OcBVwLXq+oz2aQtFBHV9AZcnfwqzki7ALfAw2Pihu0UnSjqqqpfV9XbVfWvqvqQqp4J/BG4WkR2yuS6c2FCJ7eJyNtx3QKvA5cDtbgbdD1uokgqTaOEBWOV5vr9G6NkuUvOhU1BVRO4rgNwMzkHgGtF5H9VtT5f+eRClHUFEJEdcC+8R1W1NZ/nzpUoaqqqQyJyFs4gez5UhquAm4Gir7wTRV3DWF0F8qNpm98/pb7pB0BVl4tIJ/D+POQxLiKqaxIR+ShwJ3CHqn4jn+fOlShq6oc1XAl8RlXvCIUvB6qBz+C8PBWNKOq6DX6L86BxEPBCITKYaK8OJwE7A2eral0QKG7WZDrmjRL2iv87eHieAHSkiduWJixfrMB9Ee0FFNXwJfq6ngZUUiIThTyR1FRVXwfeJ26iwgzcg/kMf/i5fOQxTiKpawirq/nRdPUYx0uhdyKKugIgIscCvwce5K2uoopJFDU9yO+3cluoqutFJE56w3KiiaKuY6FjR8mNiTZ8g5swGASIyL64cSN1aeKfJSKLgiZ5ETkC2IM3vwKW4B6QC1V1ScFKnZ6jcDdmwwTnm46o63oh0Ao8OgF5ZUqkNVXVjb4c2wNfBBar6j8KnW8GRFpXrK7mBVWtE5EVwPEiIkGrr4gcDsyiNHwjR05Xn+/hwEO4FsDzw13aJUAUNW30+0NxPmfD5Z5N8Ru+IJq6jsYngV5gVcFyKOQAYlIGYAMH4G7ME7gviQtxjoo3ABtD8apwRuUm4GHgVOAiXFdtNbB9KO6NXqTv+njH+rj3AMeE4uU6CeNU4H5f1mNwLT4/wQ38/kkh9ZvMuobiz/Vl/1ExtJxsmuKGNZwHHI3zPLDcl2Vv09Xqailp6s85hJvQcrKvr5tw49Snm645va/2B9p9OY/GuTBMbqZpTppuh2sJTeA8Gxzjy70e59Jsoemak65H4hoQLvHnPgP3wabAFQXVbyJvjg87G1gL9OG6u87BjUNKd3O+gHMZ1gL0eJH2SpPPBbgZlt3AFtyD8zZgj23cnKrUsFGuYX/calibcG6hmnBOwj9JaFZkMSt9FHUNxf8PH//gYmg52TTFTcKIherqXcDbTVerqyWq6cm41t0+XPfp3cA80zXn99VFbO15YKvNNM35uboL8H2csduLswfuA/azuppzXd0H53O+Hve+2oKbm3JuofUTXwDDMAzDMAzDmNSUxIojhmEYhmEYhlFozPA1DMMwDMMwygIzfA3DMAzDMIyywAxfwzAMwzAMoywww9cwDMMwDMMoC8zwNQzDMAzDMMoCM3wNwzAMwzCMssAMX8MwDMMwDKMsMMPXMAzDMAzDKAvM8DUMwzAMwzDKAjN8DcMwDMMwjLLADF/DMAzDMAyjLDDD1zAMwzAMwygLzPA1DMMwDMMwygIzfA3DMAzDMIyywAxfwzAMwzAMoyz4f/WM/vaUkX+gAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "from matplotlib import pyplot as plt\n", - "import numpy as np\n", - "\n", - "\n", - "print(\"The 32 images with label of the first batch in ds_train are showed below:\")\n", - "ds_iterator = ds_train.create_dict_iterator()\n", - "next(ds_iterator)\n", - "batch_1 = next(ds_iterator)\n", - "batch_image = batch_1[\"image\"].asnumpy()\n", - "batch_label = batch_1[\"label\"].asnumpy()\n", - "%matplotlib inline\n", - "plt.figure(dpi=144)\n", - "for i,image in enumerate(batch_image):\n", - " plt.subplot(4, 8, i+1)\n", - " plt.subplots_adjust(wspace=0.2, hspace=0.2)\n", - " plt.imshow(np.squeeze(image), cmap='gray')\n", - " plt.title(f\"image {i+1}\\nlabel: {batch_label[i]}\", y=-0.65, fontdict={\"fontsize\":8})\n", - " plt.axis('off') \n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义网络\n", - "\n", - "使用下面一段代码定义LeNet网络" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " # define the operator required\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " # use the preceding operators to construct networks\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x) \n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 预训练模型\n", - "\n", - "运行以下一段代码进行预训练,生成后续加载模型进行推理或迁移学习所需的预训练模型CheckPoint文件。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Training ==============\n", - "epoch: 1 step: 375, loss is 2.2881835\n", - "epoch: 1 step: 750, loss is 2.311615\n", - "epoch: 1 step: 1125, loss is 0.03919042\n", - "epoch: 1 step: 1500, loss is 0.10693374\n", - "epoch: 1 step: 1875, loss is 0.19927995\n", - "Epoch time: 18645.494, per step time: 9.944\n" - ] - } - ], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor\n", - "from mindspore import Model\n", - "from mindspore.nn.metrics import Accuracy\n", - "import os \n", - "\n", - "# clean up old run files before in Linux\n", - "os.system('rm -f {}*.ckpt {}*.meta {}*.pb'.format(args.ckpt_path,args.ckpt_path,args.ckpt_path))\n", - "\n", - "network = LeNet5(args.num_classes)\n", - "net_loss = CrossEntropyLoss()\n", - "net_opt = nn.Momentum(network.trainable_params(), args.lr, args.momentum)\n", - "time_cb = TimeMonitor(data_size=ds_train.get_dataset_size())\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=args.save_checkpoint_steps,\n", - " keep_checkpoint_max=args.keep_checkpoint_max)\n", - "ckpoint_cb = ModelCheckpoint(prefix=\"checkpoint_lenet\", directory=args.ckpt_path, config=config_ck)\n", - "model = Model(network, net_loss, net_opt, metrics={\"Accuracy\": Accuracy()})\n", - "\n", - "print(\"============== Starting Training ==============\")\n", - "model.train(args['epoch_size'], ds_train, callbacks=[time_cb, ckpoint_cb, LossMonitor(per_print_times=375)], dataset_sink_mode=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "预训练完成后,在`ckpt`目录下生成CheckPoint模型文件(`checkpoint_lenet-1_1875.ckpt`文件)。此时`ckpt`目录结构如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./models/ckpt/mindspore_load_model_for_inference_and_transfer\n", - "├── checkpoint_lenet-1_1875.ckpt\n", - "└── checkpoint_lenet-graph.meta\n", - "\n", - "0 directories, 2 files\n" - ] - } - ], - "source": [ - "!tree ./models/ckpt/mindspore_load_model_for_inference_and_transfer" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 加载模型用于推理验证\n", - "\n", - "针对仅推理场景可以使用`load_checkpoint`方法把参数直接加载到网络中,以便进行后续的推理验证。\n", - "\n", - "使用`load_checkpoint`方法,导入预训练模型文件`checkpoint_lenet-1_1875.ckpt`中的参数到网络中。\n", - "\n", - "其中:\n", - "- `load_checkpoint`方法会把参数文件中的网络参数加载到模型中。加载后,网络中的参数就是CheckPoint保存的参数。\n", - "- `eval`方法会验证训练后模型的精度。\n", - "\n", - "运行以下一段代码,进行本地模型加载进行推理验证,得到推理精度值数据。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Testing ==============\n", - "============== {'Accuracy': 0.9668469551282052} ==============\n" - ] - } - ], - "source": [ - "import os\n", - "import mindspore.nn as nn\n", - "from mindspore import load_checkpoint, load_param_into_net\n", - "from mindspore import Model\n", - "from mindspore.nn.metrics import Accuracy\n", - "\n", - "\n", - "network = LeNet5(args.num_classes)\n", - "net_loss = CrossEntropyLoss()\n", - "load_checkpoint(ckpt_file_name=\"./models/ckpt/mindspore_load_model_for_inference_and_transfer/checkpoint_lenet-1_1875.ckpt\", net=network)\n", - "model = Model(network, net_loss, metrics={\"Accuracy\": Accuracy()})\n", - "\n", - "print(\"============== Starting Testing ==============\")\n", - "ds_eval = create_dataset(os.path.join(args.data_path, \"test\"),\n", - " args.batch_size,\n", - " 1)\n", - "\n", - "acc = model.eval(ds_eval, dataset_sink_mode=False)\n", - "print(\"============== {} ==============\".format(acc))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "通过运行的输出结果得到推理精度达到0.96以上。\n", - "\n", - "## 加载模型用于迁移学习\n", - "\n", - "针对任务中断再训练及微调(Fine Tune)场景,可以加载网络参数和优化器参数到模型中。\n", - "\n", - "同理,使用`load_checkpoint`接口导入预训练模型文件`checkpoint_lenet-3_1875.ckpt`,并使用`load_param_into_net`接口将预训练模型参数加载进网络中。\n", - "\n", - "其中:\n", - "- `load_checkpoint`方法会返回一个预训练模型中的参数字典。\n", - "- `load_param_into_net`会把参数字典中相应的参数加载到网络或优化器中。\n", - "\n", - "运行以下一段代码,进行本地模型加载并进行迁移学习(重训练)。在重训练过程中使用损失函数`SoftmaxCrossEntropyWithLogits`。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Training ==============\n", - "epoch: 1 step: 375, loss is 0.011971839\n", - "epoch: 1 step: 750, loss is 0.010163541\n", - "epoch: 1 step: 1125, loss is 0.12516475\n", - "epoch: 1 step: 1500, loss is 0.22787872\n", - "epoch: 1 step: 1875, loss is 0.15650317\n", - "Epoch time: 18165.255, per step time: 9.688\n", - "epoch: 2 step: 375, loss is 0.04129391\n", - "epoch: 2 step: 750, loss is 0.19876541\n", - "epoch: 2 step: 1125, loss is 0.04154645\n", - "epoch: 2 step: 1500, loss is 0.011596533\n", - "epoch: 2 step: 1875, loss is 0.21028273\n", - "Epoch time: 17832.701, per step time: 9.511\n", - "epoch: 3 step: 375, loss is 0.12846899\n", - "epoch: 3 step: 750, loss is 0.0042161625\n", - "epoch: 3 step: 1125, loss is 0.024917062\n", - "epoch: 3 step: 1500, loss is 0.024145193\n", - "epoch: 3 step: 1875, loss is 0.031243846\n", - "Epoch time: 17812.343, per step time: 9.500\n" - ] - } - ], - "source": [ - "network = LeNet5(args.num_classes)\n", - "net_opt = nn.Momentum(network.trainable_params(), args.lr, args.momentum)\n", - "\n", - "# return a parameter dict for model\n", - "param_dict = load_checkpoint(ckpt_file_name=\"./models/ckpt/mindspore_load_model_for_inference_and_transfer/checkpoint_lenet-1_1875.ckpt\")\n", - "# load the parameter into net\n", - "load_param_into_net(network, param_dict)\n", - "# load the parameter into operator\n", - "load_param_into_net(net_opt, param_dict)\n", - "net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction=\"mean\")\n", - "time_cb = TimeMonitor(data_size=ds_train.get_dataset_size())\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=args.save_checkpoint_steps,\n", - " keep_checkpoint_max=args.keep_checkpoint_max)\n", - "ckpoint_cb = ModelCheckpoint(prefix=\"checkpoint_lenet\", directory=args.ckpt_path, config=config_ck)\n", - "model = Model(network, net_loss, net_opt, metrics={\"Accuracy\": Accuracy()})\n", - "ds_train = create_dataset(os.path.join(args.data_path, \"train\"), args.batch_size)\n", - "\n", - "print(\"============== Starting Training ==============\")\n", - "model.train(3, ds_train, callbacks=[time_cb, ckpoint_cb, LossMonitor(per_print_times=375)], dataset_sink_mode=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "完成重训练后,将在CheckPoint文件保存目录生成新的CheckPoint文件。此时`./ckpt`目录结构为:" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./models/ckpt/mindspore_load_model_for_inference_and_transfer\n", - "├── checkpoint_lenet_1-1_1875.ckpt\n", - "├── checkpoint_lenet-1_1875.ckpt\n", - "├── checkpoint_lenet_1-2_1875.ckpt\n", - "├── checkpoint_lenet_1-3_1875.ckpt\n", - "├── checkpoint_lenet_1-graph.meta\n", - "└── checkpoint_lenet-graph.meta\n", - "\n", - "0 directories, 6 files\n" - ] - } - ], - "source": [ - "!tree ./models/ckpt/mindspore_load_model_for_inference_and_transfer" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "可以看出重训练保存的新的CheckPoint文件为`checkpoint_lenet_1-3_1875.ckpt`。\n", - "\n", - "运行以下一段代码,使用`eval`方法测试加载模型重训练后的得到的新的模型的精度。" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Testing ==============\n", - "============== {'Accuracy': 0.9827724358974359} ==============\n" - ] - } - ], - "source": [ - "network = LeNet5(args.num_classes)\n", - "net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction=\"mean\")\n", - "net_opt = nn.Momentum(network.trainable_params(), args.lr, args.momentum)\n", - "model = Model(network, net_loss, net_opt, metrics={\"Accuracy\": Accuracy()})\n", - "\n", - "print(\"============== Starting Testing ==============\")\n", - "load_checkpoint(ckpt_file_name=\"./models/ckpt/mindspore_load_model_for_inference_and_transfer/checkpoint_lenet_1-3_1875.ckpt\", net=network)\n", - "ds_eval = create_dataset(os.path.join(args.data_path, \"test\"),\n", - " args.batch_size,\n", - " 1)\n", - "\n", - "acc = model.eval(ds_eval, dataset_sink_mode=False)\n", - "print(\"============== {} ==============\".format(acc))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "通过输出结果,得到重训练后模型的精度达到0.98以上,优于使用预训练得到的模型进行推理的精度。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结\n", - "\n", - "通过以上流程,完成了在本地加载预训练模型并用于推理验证和迁移学习的体验过程,了解了使用MindSpore的`load_checkpoint`方法和`load_param_into_net`方法加载模型的方式和过程。通过加载模型进行验证或迁移学习,可以在提高精度的同时有效减少训练时间,减小数据集规模,提高开发效率。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.0.1", - "language": "python", - "name": "mindspore-1.0.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} \ No newline at end of file diff --git a/tutorials/notebook/mindspore_loading_text_dataset.ipynb b/tutorials/notebook/mindspore_loading_text_dataset.ipynb deleted file mode 100644 index 3909bb7c8575f7cce087a7a1e8c81dcbf5958ca8..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_loading_text_dataset.ipynb +++ /dev/null @@ -1,445 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#
    加载文本数据集" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore提供的`mindspore.dataset`模块可以帮助用户构建数据集对象,分批次地读取文本数据。同时,在各个数据集类中还内置了数据处理和数据分词算子,使得数据在训练过程中能够像经过pipeline管道的水一样源源不断地流向训练系统,提升数据训练效果。\n", - "\n", - "此外,MindSpore还支持分布式场景数据加载,用户可以在加载数据集时指定分片数目,具体用法参见[数据并行模式加载数据集](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html#id6)。\n", - "\n", - "下面,本教程将简要演示如何使用MindSpore加载和处理文本数据。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 整体流程" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 准备环节。\n", - "- 加载数据集。\n", - "- 数据处理。\n", - "- 数据分词。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 准备环节" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 导入模块" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "导入`mindspore.dataset`和`mindspore.dataset.text`模块。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.text as text" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 准备所需数据集" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "创建文本数据,内容如下:\n", - "\n", - "```\n", - "Welcome to Beijing\n", - "北京欢迎您!\n", - "我喜欢English!\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets\n", - "└── tokenizer.txt\n", - "\n", - "0 directories, 1 file\n" - ] - } - ], - "source": [ - "import os\n", - "\n", - "if not os.path.exists('./datasets'):\n", - " os.mkdir('./datasets')\n", - "file_handle=open('./datasets/tokenizer.txt',mode='w')\n", - "file_handle.write('Welcome to Beijing \\n北京欢迎您! \\n我喜欢English! \\n')\n", - "file_handle.close()\n", - "! tree ./datasets" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 加载数据集" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore目前支持加载文本领域常用的经典数据集和多种数据存储格式下的数据集,用户也可以通过构建自定义数据集类实现自定义方式的数据集加载。各种数据集的详细加载方法,可参考编程指南中[数据集加载](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_loading.html)章节。\n", - "\n", - "下面演示使用`MindSpore.dataset`模块中的`TextFileDataset`类加载数据集。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 配置数据集目录,创建数据集对象。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "DATA_FILE = './datasets/tokenizer.txt'\n", - "dataset = ds.TextFileDataset(DATA_FILE, shuffle=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 创建字典迭代器,通过迭代器获取数据。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Welcome to Beijing \n", - "北京欢迎您! \n", - "我喜欢English! \n" - ] - } - ], - "source": [ - "for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据处理" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore目前支持的数据处理算子及其详细使用方法,可参考编程指南中[数据处理](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/pipeline.html)章节。\n", - "\n", - "在生成`dataset`对象后可对其进行数据处理操作,比如`SlidingWindow`、`shuffle`等。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- SlidingWindow\n", - "\n", - " 下面演示使用`SlidingWindow`对文本数据进行切片操作。\n", - " \n", - " \n", - " " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 加载数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "inputs = [[\"大\",\"家\",\"早\",\"上\",\"好\"]]\n", - "dataset_slide = ds.NumpySlicesDataset(inputs, column_names=['text'], shuffle=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 原始数据输出效果。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['大', '家', '早', '上', '好']\n" - ] - } - ], - "source": [ - "for data in dataset_slide.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']).tolist())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 执行切片操作。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "dataset_slide = dataset_slide.map(operations=text.SlidingWindow(2,0),input_columns=['text'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "4. 执行之后输出效果。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[['大', '家'], ['家', '早'], ['早', '上'], ['上', '好']]\n" - ] - } - ], - "source": [ - "for data in dataset_slide.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']).tolist())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- shuffle\n", - "\n", - " 下面演示在加载数据集时使用`shuffle`对文本数据进行混洗操作。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 加载数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "inputs = [\"a\",\"b\",\"c\",\"d\"]\n", - "dataset_shuffle = ds.NumpySlicesDataset(inputs, column_names=['text'], shuffle=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 数据输出效果。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "a\n", - "b\n", - "c\n", - "d\n" - ] - } - ], - "source": [ - "for data in dataset_shuffle.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']).tolist())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据分词" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore目前支持的数据分词算子及其详细使用方法,可参考编程指南中[分词器](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/tokenizer.html)章节。\n", - "\n", - "下面演示使用`WhitespaceTokenizer`分词器来分词,该分词是按照空格来进行分词。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 创建`tokenizer`。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "tokenizer = text.WhitespaceTokenizer()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 执行操作`tokenizer`。" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [], - "source": [ - "dataset = dataset.map(operations=tokenizer)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 创建字典迭代器,通过迭代器获取数据。" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['Welcome', 'to', 'Beijing']\n", - "['北京欢迎您!']\n", - "['我喜欢English!']\n" - ] - } - ], - "source": [ - "for data in dataset.create_dict_iterator(num_epochs=1,output_numpy=True):\n", - " print(text.to_str(data['text']).tolist())" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.0.1", - "language": "python", - "name": "mindspore-1.0.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/mindspore_mixed_precision.ipynb b/tutorials/notebook/mindspore_mixed_precision.ipynb deleted file mode 100644 index fd19f5e6fd2eb90fb898ffe7bb5780bc6812aff9..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_mixed_precision.ipynb +++ /dev/null @@ -1,585 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#
    混合精度训练体验" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "神经网络训练的时候,数据和权重等各种参数一般使用单精度浮点数(float32)进行计算和存储。在采用复杂神经网络进行训练时,由于计算量的增加,机器的内存开销变得非常大。经常玩模型训练的人知道,内存资源的不足会导致训练的效率变低,简单说就是训练变慢,有没有什么比较好的方法,在不提升硬件资源的基础上加快训练呢?这次我们介绍其中一种方法--混合精度训练,说白了就是将参数取其一半长度进行计算,即使用半精度浮点数(float16)计算,这样就能节省一半内存开销。当然,为了保证模型的精度,不能把所有的计算参数都换成半精度。为了兼顾模型精度和训练效率,MindSpore在框架中设置了一个自动混合精度训练的功能,本次体验我们将使用ResNet-50网络进行训练,体验MindSpore混合精度训练和单精度训练的不同之处。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "整体过程如下:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. MindSpore混合精度训练的原理介绍。\n", - "2. 数据集准备。\n", - "3. 定义ResNet-50网络。\n", - "4. 定义`One_Step_Time`回调函数。\n", - "5. 定义训练网络(此处设置自动混合精度训练参数`amp_level`)。\n", - "6. 验证模型精度。\n", - "7. 混合精度训练和单精度训练的对比。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> 本文档适用于GPU和Ascend环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## MindSpore混合精度训练原理介绍" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![image](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/mix_precision.PNG)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 参数以FP32存储;\n", - "2. 正向计算过程中,遇到FP16算子,需要把算子输入和参数从FP32 `cast`成FP16进行计算;\n", - "3. 将Loss层设置为FP32进行计算;\n", - "4. 反向计算过程中,首先乘以Loss Scale值,避免反向梯度过小而产生下溢;\n", - "5. FP16参数参与梯度计算,其结果将被cast回FP32;\n", - "6. 除以`Loss scale`值,还原被放大的梯度;\n", - "7. 判断梯度是否存在溢出,如果溢出则跳过更新,否则优化器以FP32对原始参数进行更新。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "从上可以理解(float16为半精度浮点数,float32为单精度浮点数),MindSpore是将网络中的前向计算部分`cast`成半精度浮点数进行计算,以节省内存空间,提升性能,同时将`loss`值保持单精度浮点数进行计算和存储,`weight`使用半精度浮点数进行计算,单精度浮点数进行保存,通过这样操作即提升了训练效率,又保证了一定的模型精度,达到提升训练性能的目的。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据集准备" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下载并解压数据集CIFAR-10到指定位置。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/cifar-10-batches-bin\n", - "├── readme.html\n", - "├── test\n", - "│   └── test_batch.bin\n", - "└── train\n", - " ├── batches.meta.txt\n", - " ├── data_batch_1.bin\n", - " ├── data_batch_2.bin\n", - " ├── data_batch_3.bin\n", - " ├── data_batch_4.bin\n", - " └── data_batch_5.bin\n", - "\n", - "2 directories, 8 files\n" - ] - } - ], - "source": [ - "!wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-binary.tar.gz\n", - "!mkdir -p datasets\n", - "!tar -xzf cifar-10-binary.tar.gz -C datasets\n", - "!mkdir -p datasets/cifar-10-batches-bin/train datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/test_batch.bin datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/data_batch*.bin datasets/cifar-10-batches-bin/batches.meta.txt datasets/cifar-10-batches-bin/train\n", - "!tree ./datasets/cifar-10-batches-bin" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据增强" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "先将CIFAR-10的原始数据集可视化:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "the cifar dataset size is : 50000\n", - "the tensor of image is: (32, 32, 3)\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD5CAYAAADhukOtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAde0lEQVR4nO2da4yc53Xf/2fue+XyfpNoUhdLVX2RjY3iwkbgJEigGAFkA4Vrf3CFVgiDIgZqIP0gOEDsAv3gFLENfwhc0JUQpXB8SWzDSmC0cYUAhoFWMWXLtC61LNGiTGrJJblc7u7M7FxPP8ywpYTnf3bJ3Z2l/fx/AMHZ5+zzvmeeec+8M89/zznm7hBC/OpT2G4HhBCjQcEuRCYo2IXIBAW7EJmgYBciExTsQmRCaSOTzexBAF8AUATwX939M9Hvj4+P+cyO6aSt0+3SeQWz9HiRu29kDgBsttwYHa/vfWpjz2t41GBekdqskD5m4Ea4VoUivx90Op3Ajxu/jxSCOcXAFr2c1Wo1OV4q8Wun2WxSW/ScI/9LwToWyXXMfB+Qfs1en3sdi4uLSeNNB7uZFQH8BYDfAXAWwA/M7El3f4HNmdkxjUf+7b9K2i5evETPVSnXkuPT0zvpnFK5Qm3tLn/BorcB9mKurq7yc7Xb1FYt8wugAB6dY2MT1FapjKX9aPE301KJr9XY1Di1zV+cD/zgx2SMT/BzTU5MUVun3aO2u+66Ozm+d+9eOufUqR9T29zceWqbnEhfpwAwMzVJbTtm0tfxXXe+lc7xfvra+di/+dd0zkY+xj8A4GV3P+3ubQBfBfDQBo4nhNhCNhLshwH84rqfzw7HhBC3IFu+QWdmx83spJmdrDf4dyEhxNaykWA/B+D2636+bTj2Btz9hLvPuvvsxHj6+6QQYuvZSLD/AMDdZnbMzCoAPgLgyc1xSwix2dz0bry7d83s4wD+BwbS2+Pu/nw0p+99unMd7Y7WV9If/yOJpNzju9lW4tJVr8d3dhcXF6mNUavxHdrdu7masHR1gdqWV1aobWI8/f592+Hbk+MA4MF7/qUFrpJE0hDbjY/mTE7yHet7772X2lotrng06vXk+AsvnKJzXp87S22dDj9Xrcb9n5rmSoNZ+lp97nmuChw8cFtyvBsoTRvS2d39OwC+s5FjCCFGg/6CTohMULALkQkKdiEyQcEuRCYo2IXIhA3txt8o7h5kDQWZVyQBJcp26gZZdN7jtkqYaZRm165d1FYul6mt1WpR2/R0OjsQAPpcHUShkD7f7Oyv0TnNVe7H9//X96mtUuPJLsvLy8nxHTt20DmR7fTp09Rmxi+Ey5cvEwufc8exI9yPn/+c2tz5C9Ptccmu3U6v/wqRnAHgpZ+9mBxfbfGkLN3ZhcgEBbsQmaBgFyITFOxCZIKCXYhMGOlufMGKmBxL77j2+0GttmI6caXdDv7oP6hP1w52TSeD8kHdbnrXtNFconPGwdN6K0EdtHabKwZ7du+htpWVRnL87/7+W3ROtcqTdVDgu9bL5FwAUKulVY1+l+8wv3bmZWobG+Pr2AjKgnVIWbCdO3kS0i/OvUZtu3bP8HMF9ele+ulL1AZSA3CsxpNnut30NdyLajlyD4QQv0oo2IXIBAW7EJmgYBciExTsQmSCgl2ITBit9FYoYLyalhOqRKoBeFLL0lUuee3bv4/a+kErnlabyzi7ds2kDUEiRqORroG2lm28FnV94Qkovd5icjxK0miu8pp2URencoknLzUb6UQYOJeGom5Yi4tc5iuVg+QlcsxOUKutUuHJSyxpBQB6gRTc7/NrhD9vPufAgfT1XS7zkNadXYhMULALkQkKdiEyQcEuRCYo2IXIBAW7EJmwIenNzF4FsAygB6Dr7rPR77v30SFZT/WFq3TexERahiqWeKZcu8Ozq8bGZqgtqqtmlpavlpa476zdFQCsBu2rqoGcFLW9ajbT57NA1yoHUhPA19id20pBRh+jVuOZbb2gA3BUy69K2m9FNQqjuoEeFD60YBlvuy3drgngr2f0Op8/fz45HmXebYbO/pvuzhuCCSFuCfQxXohM2GiwO4B/MLNnzOz4ZjgkhNgaNvox/n3ufs7M9gH4rpn9H3f/3vW/MHwTOA4AO6anNng6IcTNsqE7u7ufG/4/D+BbAB5I/M4Jd59199nxcb4BI4TYWm462M1swsymrj0G8LsAntssx4QQm8tGPsbvB/CtoaRTAvDX7v7fowmFQgFj42lJqdHkmVdETQqzpBqN4HgdLhmNT/BPH04yti5d4mLESp1kfwHotXkmWmeMS0ORJNPrpY8ZFWyMpKZx8noBsQzVaKSz1IqkeOhax4uyuSwoLsqeWyQNRusRzivw57Za51l7O2fSxS/37eWZm6+cfoXaGDcd7O5+GsA7b3a+EGK0SHoTIhMU7EJkgoJdiExQsAuRCQp2ITJhpAUn+/0eVlbSktgkyWwDgHo9PWdliRecnNm1m9omJ/hf8tUDqWx5JZ3dFhVztCBrrN/jGUrlEn8fjmy9brq3WbHApbedO6aprbnKJaOpm/iLyEaDZwGWSly6KgWSXbvNpchiNb1Wq6vBnGB9K4Esd+XqFWorB/Lg65deT45PjPFeby2S1dkPMhF1ZxciExTsQmSCgl2ITFCwC5EJCnYhMmGku/HlcgWHDh9O2hYuXqbzjGwweofvgteCGm6VCn/anV56NxvgyRjdLvdj9550kgMAVAu83l3R+e5zfZmrEKVCOjuo1+F12ppB0lBtgu8IN4P6eldJa67pqRk6p9PliTCdNn9dqsEu/ipJsPJCkMSzypOQ6h1ebzDI40GjxXfJ2cTLi7w9GNvcj5LDdGcXIhMU7EJkgoJdiExQsAuRCQp2ITJBwS5EJoxUeisWC5iZSSdPNEiCDAB4Nz2nucyTGaand1Hbzr0Hqe3Kyy9RW7GYToJYXeWyFgIZpx/IJBMVnriysswlGdbmqVjkMl8zSE7pBG2SCkE9NlZrrl7nvo+P84ScbpEnDa12uf+s5mE9kBu9w1+YCrkGACBYKhTB5xnS0lslqLt35Wpaqu73uP6nO7sQmaBgFyITFOxCZIKCXYhMULALkQkKdiEyYU3pzcweB/D7AObd/W3DsV0AvgbgKIBXAXzY3XkBriHdbgfzF9P1tgokWwsAxkgtrnIpaGkU2GpVnslFuicBAAqF9HKNjfH6eZcXeGuoMni2VnWG+18wLqOVmFwTZNFF2Vq9PjdaMJG1oYpO1unwzLZSiT9nVPhza5AaesGpsNrgUmqrzifWavw66PW5LjdeqyXHq4HMt1JIZxUy6RVY3539LwE8+KaxRwE85e53A3hq+LMQ4hZmzWAf9ltfeNPwQwCeGD5+AsAHN9ctIcRmc7Pf2fe7+9zw8XkMOroKIW5hNrxB54M+u/SLmJkdN7OTZnZyJWhbK4TYWm422C+Y2UEAGP4/z37R3U+4+6y7z04GJY6EEFvLzQb7kwAeHj5+GMC3N8cdIcRWsR7p7SsA3g9gj5mdBfApAJ8B8HUzewTAGQAfXs/JisUSZnams9F6LS7JnHnlbHK8ELTUid7HOm2ur01NTFLbzl3prKxymZ+r3eIZWbVSWnIBYqmp2+Nfh3bsTPtYrfDjrQY+Vipc/hkbD+TBQnre9PQMP1eZr8fUDJ9Xb/OMuNOvvJYcL1iUscfXtwgubfX5MiLoKIXOSnrivgO8WOnMsTuT47UKL7S6ZrC7+0eJ6bfXmiuEuHXQX9AJkQkKdiEyQcEuRCYo2IXIBAW7EJkw0oKTViigVEkXj+y0eSHCqZ1pOczBJZfzF89QW7/C+24dOriD2tqddJHC8Vr6OQHAP/9n91DbxQu8b9jC5UVqC1qi4dzc+eR4JZDeSkHhyN5SIDX5IrXt2bMnOV4OanNeXV6mtvmFN6dn/H9WgoKZ/e7e5PjEJH/NWq2XqW08kHsL4NJh1/m1CqSvx5Uu7+lXq6VfT2eNEaE7uxDZoGAXIhMU7EJkgoJdiExQsAuRCQp2ITJhxNKbozqWLti3cPkinTc9k5YZri7yXm+lMs/WKlW4pFGpcunt6tW0PHjm53PJcQBYWuRa04VAemvU+bxiIP+USunii5G8Vijwgo2FYB0RZIDNnUvXH+32eOHFVosXc1y8ku5tBgBw/twOveXtyfG33HmEzzl8F7VVnV87hTJfj25g48VWeXZmoUqec2ljBSeFEL8CKNiFyAQFuxCZoGAXIhMU7EJkwkh344tFw9RM+pR799xL5y3Mp5Mg5uf4Lvje/ekECACY2sHrzD39v5+httfOpHeEm3WemdLv8ffTftBqyoIaadXqjdfXM+M73Vbgx/PgdlCO6to100pJg4wDg/ZgjKUFvgvebfLkD6ul6xeO7TtE57TP8oSWI3sDG1GNAKDV4M+7SpJa2m0+p+Dp51zkS6E7uxC5oGAXIhMU7EJkgoJdiExQsAuRCQp2ITJhPe2fHgfw+wDm3f1tw7FPA/gDANeyVz7p7t9Z61i9Xg/LV9N1xnq8aw1KpE3Srt0H6JwL59OJGADwo1Onqe2FF35ObQWkJbuC8WSRdofLJ0HzW1SrfEGaraCemaePacYTJHp9rtd4IbA1uP9LS2mpzILkmW6fJ8mMj/P16AYJIwtX0/LsDpLUBADe58lQnSUu946DNy4dCxKRlkh9vWKRr9VqK92iqtPha7ieO/tfAngwMf55d79/+G/NQBdCbC9rBru7fw8AL+0phPilYCPf2T9uZqfM7HEz4+0mhRC3BDcb7F8EcCeA+wHMAfgs+0UzO25mJ83s5NIS/54khNhabirY3f2Cu/fcvQ/gSwAeCH73hLvPuvvs9PTEzfophNggNxXsZnbwuh8/BOC5zXFHCLFVrEd6+wqA9wPYY2ZnAXwKwPvN7H4MtKNXAfzhek7W6zgWL6Szr04vvkTnzcykWwm1+tz9F372OrWdeY3LJ3Auo5mlZY1Wi389abd5LTlWLw4AWl2epVar8cwrVmuu2+NyXcGCrLd+YCMyHwCMkZZYRuutxbJcqcT9aJbTMhQA9Fl2WDPdJgsA2j2eYff6Mp/39mP3U1uhz9dqenw6Ob6ykm43BgC1YvpTciG4f68Z7O7+0cTwY2vNE0LcWugv6ITIBAW7EJmgYBciExTsQmSCgl2ITBhpwclup4fL59MtjyyQVq6QYoMX5nn7pHlSpBIA2h2eyVUpB0ti6XkOfrxiIK9FLaoqQTHHaF6fZrBxWavV5jKfeySHcR+pG4EEVQ7Wvt3m0mG/x9e/u5TOOmyefZX7Mcmz1wrOpdSZMT5v/+60FAkAy0Ri2zPD/wqdzYlag+nOLkQmKNiFyAQFuxCZoGAXIhMU7EJkgoJdiEwYqfTmfUenmZZ52j0u/xQr6WKDjas828m7XOKZCCSSSIbqdNLyTyHoldYOjtft8uKAXEID6nWeZcfm9Xq8KGPkR7U6Rm1mXIZiGXGRpBitVaUayI2B9ObN1fScK0HvuFZ6DgAcPLKb2qaDopjNVX7Mycl0IdMo6216Op0pVyxyqVd3diEyQcEuRCYo2IXIBAW7EJmgYBciE0a6G1+pVHDk0JGkrdULarVV0jXX6kt8h7lNdv0BoN7lSRVRmyS2G99s8hZP0fGinenFxUVqi3b/2fk6wU63BceLEmEqRCUBeC285eV0+y8AKActkpodfn30gzZaY6Re32qd73Qf2r2f2u655yi1lat8J7zHBQ80GmlVaWKCV2NmO/WRiqM7uxCZoGAXIhMU7EJkgoJdiExQsAuRCQp2ITJhPe2fbgfwVwD2Y9Du6YS7f8HMdgH4GoCjGLSA+rC7X4mO5X1Hp5WWgGZ28XpbXdYyqMRlhg64VMMkNADoBy2Nmo20xBZLYUHbolUu2ZXLXJaL2i41mmkZp1TiiSTdYD0sqBlXiNpXNdLJOq0Wf871QJ8qlvm5qjW+VgUib+4/fIDOOXr3W6jtjmN3UFtjmSco9UgbKoDLlJcvX6ZzKlUuezLWc2fvAvhjd78PwHsA/JGZ3QfgUQBPufvdAJ4a/iyEuEVZM9jdfc7dfzh8vAzgRQCHATwE4Inhrz0B4INb5KMQYhO4oe/sZnYUwLsAPA1gv7tfa4d6HoOP+UKIW5R1B7uZTQL4BoBPuPsbMv998CUy+eXOzI6b2UkzO7kSFF0QQmwt6wp2MytjEOhfdvdvDocvmNnBof0ggPnUXHc/4e6z7j47GfytrxBia1kz2G2QWfEYgBfd/XPXmZ4E8PDw8cMAvr357gkhNov1ZL29F8DHAPzEzJ4djn0SwGcAfN3MHgFwBsCH1zpQsVjAjul0G5wmqRUGAHOXL6bHz5+lc2o1/j62f99t1Ba1Gbp4MfnhJZTJlpZ4rbNiINn1+lyG6na4bXIiXV8vqq1nxuW1Si1oydThNQBrlfS8d9z3djpnbyC/Hjiwj9rGp/gnxi55Pe8+eozOKfJkSniHG6s1Loe1+4HcSzLVaiRjDwBarbS0HMmyawa7u38fvFHYb681Xwhxa6C/oBMiExTsQmSCgl2ITFCwC5EJCnYhMmG07Z/c0SGthgpBYcb9u/ckxzvH7qZz7r3jrdR2JMhcKhX5kszPX0iOc7EDeOmnP6W2H/zoFLVdCTKoggQwNOvpgo7VIEsqyhrr9blkd/87uYz2rvvuS44fPXiIztlLWhoBQLfPJa+VLveRPe8Oka4AYCz4469VkvkIAAtBkdBSJShGSVpzRcUjo9eToTu7EJmgYBciExTsQmSCgl2ITFCwC5EJCnYhMmHE0huo9FYN+p5VCmk37zx8O51TrvICi802l0+8yN//7jySlo1Yry4AqNpd1HbhQlrKA4Arz3PJLsquOriPSVtc2mwHMtTb7uES5nt//depbdfMjuR4Jyg4uVK/Sm1RUc9Oi2eULV1ZTI5HtRUW2uk5ANAPCkcWSaYfALRaPKuTyc5R8VOWTclkPEB3diGyQcEuRCYo2IXIBAW7EJmgYBciE0a6G18oGKqkrhbfKwaWl9LJHZWoRVKQONHv893nfofvgM5dXUyfK9g1nRwfo7a33cMTeVaW088ZAA7s5yX63/GOdybHz58/T+dcupiu8QcA7/u1WWqrBC/aJZI0NBnVi/Og+FtApcLVCdYmaXWV745bMUpaiWoDBnX+gqSWVdJ+K0qwYtecB7N0ZxciExTsQmSCgl2ITFCwC5EJCnYhMkHBLkQmrCm9mdntAP4Kg5bMDuCEu3/BzD4N4A8AXNNtPunu34mO1e32sLCwQGxc0pgmtck6gby2eHWF2irl4D0u0DuskNaaouSDq4u8/dPM1CS1vfXoEWo7cPAgtU2PE6lpkrcS6jS5PLi8wv3fsSOd7AIAFZKss1LnSUOl0s0pwY0mlynZMctlnijVCmS5SGbtdvl1EF3fRhJhojmMwL116exdAH/s7j80sykAz5jZd4e2z7v7n9+wR0KIkbOeXm9zAOaGj5fN7EUAh7faMSHE5nJD39nN7CiAdwF4ejj0cTM7ZWaPmxlvwSmE2HbWHexmNgngGwA+4e5LAL4I4E4A92Nw5/8smXfczE6a2cmVOq+FLoTYWtYV7GZWxiDQv+zu3wQAd7/g7j137wP4EoAHUnPd/YS7z7r7bFQdRAixtawZ7DbYKnwMwIvu/rnrxq/fEv4QgOc23z0hxGaxnt349wL4GICfmNmzw7FPAviomd2PgVj1KoA/XOtA7o4OyfCZnOQyVLOZrlvGjrXW8RpN/nWiFdRjq5A6eVGbnsCEK4E82AfPvIpsFy6mpc16g2dkwfhlEGVetdtBlheRk9garnW8SIaK1r9OvjpGNe0mgk+gkR+RBBvJiuw6jnyMzkV9WOsX3P37SGeghpq6EOLWQn9BJ0QmKNiFyAQFuxCZoGAXIhMU7EJkwkgLTkZELZSYjFMMCgPOB0UUl4K/5Ov3udjEWlSVbjKDarnOWyEVyjxLbWGRZ3kVC+k1aQfFEDsdLl1dWQxaMgUFJwvEj0aw9uVAlusFklcrkOyiLDUGk3oBfi0CsVQWSXZMlovmsOuqF2Te6c4uRCYo2IXIBAW7EJmgYBciExTsQmSCgl2ITBip9Nb3PhpE1jC78fedKEMtksNgXLLr9nkmHYisMX85nWm2FoHKBw/eh63IXzbWw6wTSG9RRla7w+WfCxcvUxuTocrBuXpBAVEmKQ5sm3vPKpX4tRMpeaurXLIrBs/bekz65DLf2Hg6My+S/3RnFyITFOxCZIKCXYhMULALkQkKdiEyQcEuRCaMWHpzrJIMq2KRywwsc6lQ5llSrUAyimScnTt4scFLly4lx+srvHDk5AQvfBlJRp2goOCVK1zyYoU2+4GstbLCJaN2O5A3S4EcRuTBKFOxRvrDAcBYNThXkLVXLKbXuBJcO+5BOl/US63Me+b1nK8/cz+S+bok068fTNKdXYhMULALkQkKdiEyQcEuRCYo2IXIhDV3482sBuB7AKrD3/9bd/+UmR0D8FUAuwE8A+Bj7h70GAIKVsDYWHrHkiXIAHzXukcTCOJd38i2vMzru7HWRYcOHqJzotZEq0HttF6wqzo+Pn7D54uec9QqK6oL1w8UA3a+KOkGQTJUu80TlMqBqtHvpdex0+fXWznYqS8ENej6wVZ9oRQ8N9L+qRsoSixHJqq5t547ewvAb7n7OzFoz/ygmb0HwJ8B+Ly73wXgCoBH1nEsIcQ2sWaw+4BrQnJ5+M8B/BaAvx2OPwHgg1vhoBBic1hvf/bisIPrPIDvAngFwKK7X/uccRbA4S3xUAixKawr2N295+73A7gNwAMA7l3vCczsuJmdNLOT9aA2vBBia7mh3Xh3XwTwjwD+BYAZs//X2Ps2AOfInBPuPuvusxPBxpIQYmtZM9jNbK+ZzQwfjwH4HQAvYhD0/3L4aw8D+PYW+SiE2ATWkwhzEMATZlbE4M3h6+7+92b2AoCvmtl/AvAjAI+tdaBer4eFhRuv11atphMkeoH0E9Xi6hCpAwDKQe06dr5I7oiktwp5XgBQJRIlENfeYz5GaxXJcpG8djOtkKKWRoVgrfrBubpBwgjzMfK9HsjAHshrYd3DoFdWp5u+HqNrp0Res74Hc6hliLufAvCuxPhpDL6/CyF+CdBf0AmRCQp2ITJBwS5EJijYhcgEBbsQmWCRbLTpJzO7CODM8Mc9ANJF3UaL/Hgj8uON/LL58RZ335syjDTY33Bis5PuPrstJ5cf8iNDP/QxXohMULALkQnbGewntvHc1yM/3oj8eCO/Mn5s23d2IcRo0cd4ITJhW4LdzB40s5+a2ctm9uh2+DD041Uz+4mZPWtmJ0d43sfNbN7MnrtubJeZfdfMfjb8f+c2+fFpMzs3XJNnzewDI/DjdjP7RzN7wcyeN7N/Pxwf6ZoEfox0TcysZmb/ZGY/HvrxH4fjx8zs6WHcfM3MeGXMFO4+0n8AihiUtboDQAXAjwHcN2o/hr68CmDPNpz3NwC8G8Bz1439ZwCPDh8/CuDPtsmPTwP4DyNej4MA3j18PAXgJQD3jXpNAj9GuiYY1I6dHD4uA3gawHsAfB3AR4bj/wXAv7uR427Hnf0BAC+7+2kflJ7+KoCHtsGPbcPdvwfgzYn9D2FQuBMYUQFP4sfIcfc5d//h8PEyBsVRDmPEaxL4MVJ8wKYXed2OYD8M4BfX/bydxSodwD+Y2TNmdnybfLjGfnefGz4+D2D/NvrycTM7NfyYv+VfJ67HzI5iUD/haWzjmrzJD2DEa7IVRV5z36B7n7u/G8DvAfgjM/uN7XYIGLyzI2wOvKV8EcCdGPQImAPw2VGd2MwmAXwDwCfcfel62yjXJOHHyNfEN1DklbEdwX4OwO3X/UyLVW417n5u+P88gG9heyvvXDCzgwAw/H9+O5xw9wvDC60P4EsY0ZqYWRmDAPuyu39zODzyNUn5sV1rMjz3Im6wyCtjO4L9BwDuHu4sVgB8BMCTo3bCzCbMbOraYwC/C+C5eNaW8iQGhTuBbSzgeS24hnwII1gTGxSEewzAi+7+uetMI10T5seo12TLiryOaofxTbuNH8Bgp/MVAH+yTT7cgYES8GMAz4/SDwBfweDjYAeD716PYNAz7ykAPwPwPwHs2iY//huAnwA4hUGwHRyBH+/D4CP6KQDPDv99YNRrEvgx0jUB8A4MiriewuCN5U+vu2b/CcDLAP4GQPVGjqu/oBMiE3LfoBMiGxTsQmSCgl2ITFCwC5EJCnYhMkHBLkQmKNiFyAQFuxCZ8H8BnU92n9EmObcAAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import mindspore.dataset as ds\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "\n", - "train_path = \"./datasets/cifar-10-batches-bin/train\"\n", - "ds_train = ds.Cifar10Dataset(train_path, num_parallel_workers=8, shuffle=True)\n", - "print(\"the cifar dataset size is :\", ds_train.get_dataset_size())\n", - "dict1 = ds_train.create_dict_iterator()\n", - "dict_data = next(dict1)\n", - "image = dict_data[\"image\"].asnumpy()\n", - "print(\"the tensor of image is:\", image.shape)\n", - "plt.imshow(np.array(image))\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "可以看到CIFAR-10总共包含了50000张32×32的彩色图片。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义数据增强函数" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "定义数据集增强函数并将原始数据集进行增强,查看数据集增强后张量数据:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "the cifar dataset size is: 1562\n", - "the tensor of image is: (32, 3, 224, 224)\n" - ] - } - ], - "source": [ - "import os\n", - "from mindspore import dtype as mstype\n", - "import mindspore.dataset as ds\n", - "import mindspore.dataset.vision.c_transforms as C\n", - "import mindspore.dataset.transforms.c_transforms as C2\n", - "\n", - "def create_dataset(dataset_path, do_train, repeat_num=1, batch_size=32):\n", - " \n", - " cifar_ds = ds.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True)\n", - " \n", - " # define map operations\n", - " trans = []\n", - " if do_train:\n", - " trans += [\n", - " C.RandomCrop((32, 32), (4, 4, 4, 4)),\n", - " C.RandomHorizontalFlip(prob=0.5)\n", - " ]\n", - "\n", - " trans += [\n", - " C.Resize((224, 224)),\n", - " C.Rescale(1.0 / 255.0, 0.0),\n", - " C.Normalize([0.4914, 0.4822, 0.4465], [0.2023, 0.1994, 0.2010]),\n", - " C.HWC2CHW()\n", - " ]\n", - "\n", - " type_cast_op = C2.TypeCast(mstype.int32)\n", - "\n", - " cifar_ds = cifar_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=8)\n", - " cifar_ds = cifar_ds.map(operations=trans, input_columns=\"image\", num_parallel_workers=8)\n", - "\n", - " cifar_ds = cifar_ds.batch(batch_size, drop_remainder=True)\n", - " cifar_ds = cifar_ds.repeat(repeat_num)\n", - "\n", - " return cifar_ds\n", - "\n", - "\n", - "cifar_ds_train = create_dataset(train_path, do_train=True, repeat_num=1, batch_size=32)\n", - "print(\"the cifar dataset size is:\", cifar_ds_train.get_dataset_size())\n", - "dict1 = cifar_ds_train.create_dict_iterator()\n", - "dict_data = next(dict1)\n", - "image = dict_data[\"image\"].asnumpy()\n", - "print(\"the tensor of image is:\", image.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "cifar10通过数据增强后的,变成了一共有1562个batch,张量为(32,3,224,224)的数据集。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义深度神经网络" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本篇使用的MindSpore中的ResNet-50网络模型,下载相关的代码文件。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "--2021-03-16 18:05:54-- https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/source-codes/resnet.py\n", - "Resolving proxy-notebook.modelarts-dev-proxy.com (proxy-notebook.modelarts-dev-proxy.com)... 192.168.0.172\n", - "Connecting to proxy-notebook.modelarts-dev-proxy.com (proxy-notebook.modelarts-dev-proxy.com)|192.168.0.172|:8083... connected.\n", - "Proxy request sent, awaiting response... 200 OK\n", - "Length: 9521 (9.3K) [binary/octet-stream]\n", - "Saving to: ‘resnet.py’\n", - "\n", - "resnet.py 100%[===================>] 9.30K --.-KB/s in 0s \n", - "\n", - "2021-03-16 18:05:54 (126 MB/s) - ‘resnet.py’ saved [9521/9521]\n", - "\n" - ] - } - ], - "source": [ - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/source-codes/resnet.py" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下载后的文件在notebook的工作目录上,可以导出resnet50网络作为本案例的训练网络。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "from resnet import resnet50\n", - "\n", - "network = resnet50(batch_size=32, num_classes=10)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义回调函数Time_per_Step来计算单步训练耗时" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`Time_per_Step`用于计算每步训练的时间消耗情况,方便对比混合精度训练和单精度训练的性能区别。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.train.callback import Callback\n", - "import time\n", - "\n", - "class Time_per_Step(Callback):\n", - " def step_begin(self, run_context):\n", - " cb_params = run_context.original_args()\n", - " cb_params.init_time = time.time()\n", - " \n", - " def step_end(selfself, run_context):\n", - " cb_params = run_context.original_args()\n", - " one_step_time = (time.time() - cb_params.init_time) * 1000\n", - " print(one_step_time, \"ms\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义训练网络" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 设置混合精度训练并执行训练" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "由于MindSpore已经添加了自动混合精度训练功能,我们这里操作起来非常方便,只需要在Model中添加参数`amp_level=O2`就完成了设置GPU模式下的混合精度训练设置。运行时,将会自动混合精度训练模型。\n", - "\n", - "`amp_level`的参数详情:\n", - "\n", - "`O0`:表示不做任何变化,即单精度训练,系统默认`O0`。\n", - "\n", - "`O2`:表示将网络中的参数计算变为float16。适用于GPU环境。\n", - "\n", - "`O3`:表示将网络中的参数计算变为float16,同时需要在Model中添加参数`keep_batchnorm_fp32=False`。适用于Ascend环境。\n", - "\n", - "在`Model`中设置`amp_level=O2`后即可执行混合精度训练:" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 1562, loss is 0.907213\n", - "473624.29022789 ms\n", - "epoch time: 510142.338 ms, per step time: 326.596 ms\n", - "epoch: 2 step: 1562, loss is 1.1226461\n", - "456184.42153930664 ms\n", - "epoch time: 456187.154 ms, per step time: 292.053 ms\n", - "epoch: 3 step: 1562, loss is 0.76163876\n", - "456118.32642555237 ms\n", - "epoch time: 456120.763 ms, per step time: 292.011 ms\n", - "epoch: 4 step: 1562, loss is 0.68869615\n", - "456123.6324310303 ms\n", - "epoch time: 456126.046 ms, per step time: 292.014 ms\n", - "epoch: 5 step: 1562, loss is 0.8315078\n", - "456153.0604362488 ms\n", - "epoch time: 456155.529 ms, per step time: 292.033 ms\n" - ] - } - ], - "source": [ - "\"\"\"train ResNet-50\"\"\"\n", - "import os\n", - "import random\n", - "import argparse\n", - "from mindspore import context\n", - "import mindspore.nn as nn\n", - "from mindspore import Model\n", - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor\n", - "from mindspore.nn import SoftmaxCrossEntropyWithLogits\n", - "\n", - "\n", - "if __name__ == '__main__':\n", - "\n", - " context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")\n", - " \n", - " model_path= \"./models/ckpt/mindspore_mixed_precision\"\n", - " batch_size = 32\n", - " epoch_size = 5\n", - " ds_train_path = \"./datasets/cifar-10-batches-bin/train\"\n", - " \n", - " # clean up old run files before in Linux\n", - " os.system('rm -f {0}*.ckpt {0}*.meta {0}*.pb'.format(model_path))\n", - " # create dataset\n", - " train_dataset = create_dataset(dataset_path=ds_train_path, do_train=True, repeat_num=1,\n", - " batch_size=batch_size)\n", - " \n", - " # define net\n", - " net = network\n", - "\n", - " # define \n", - " step_size = train_dataset.get_dataset_size()\n", - " lr = 0.01\n", - " momentum = 0.9\n", - " \n", - " # define opt, loss, model\n", - " loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')\n", - " opt = nn.Momentum(network.trainable_params(), lr, momentum)\n", - " model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'},amp_level=\"O2\")\n", - " \n", - " # define callbacks function\n", - " steptime_cb = Time_per_Step()\n", - " time_cb = TimeMonitor(data_size=step_size)\n", - " loss_cb = LossMonitor()\n", - "\n", - " cb = [time_cb, loss_cb, steptime_cb]\n", - "\n", - " # train model\n", - " model.train(epoch_size, train_dataset, callbacks=cb, dataset_sink_mode=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 验证模型精度" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "使用模型进行精度验证可以得出以下代码。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Accuracy: {'acc': 0.7505008012820513}\n" - ] - } - ], - "source": [ - "# Eval model\n", - "eval_dataset_path = \"./datasets/cifar-10-batches-bin/test\"\n", - "eval_data = create_dataset(eval_dataset_path,do_train=False)\n", - "acc = model.eval(eval_data,dataset_sink_mode=True)\n", - "print(\"Accuracy:\",acc)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 对比不同网络下的混合精度训练和单精度训练的差别" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "由于篇幅原因,我们这里只展示了ResNet-50网络的混合精度训练情况。可以在主程序入口的Model中设置参数`amp_level = O0`进行单精度训练,训练完毕后,将结果进行对比,看看两者的情况,下面将我测试的情况做成表格如下。(训练时,笔者使用的GPU为Nvidia Tesla V100,不同的硬件对训练的效率影响较大,下述表格中的数据仅供参考)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "| 网络 | 是否混合训练 | 单步训练时间 | epoch | Accuracy\n", - "|:------ |:-----| :------- |:--- |:------ \n", - "|ResNet-50 | 否 | 100ms | 5 | 0.8128245 \n", - "|ResNet-50 | 是 | 58ms | 5 | 0.7717347" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "经过多次测试,使用ResNet-50网络,CIFAR-10数据集,进行混合精度训练对整体的训练效率提升了60%左右,而最终模型的精度有少量降低,对于使用者来说,混合精度训练在提升训练效率上,是一个很好的选择。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "当然,如果你想参考单步训练或者手动设置混合精度训练,可以参考官网教程。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本次体验我们尝试了在ResNet-50网络中使用混合精度来进行模型训练,并对比了单精度下的训练过程,了解到了混合精度训练的原理和对模型训练的提升效果。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/mindspore_nlp_application.ipynb b/tutorials/notebook/mindspore_nlp_application.ipynb deleted file mode 100644 index 8c048e0b0d681665a6bc9af5e7b357150c72cd6c..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_nlp_application.ipynb +++ /dev/null @@ -1,1128 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 自然语言处理应用" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "情感分类是自然语言处理中文本分类问题的子集,属于自然语言处理最基础的应用。它是对带有感情色彩的主观性文本进行分析和推理的过程,即分析说话人的态度,是倾向正面还是反面。\n", - "\n", - "> 通常情况下,我们会把情感类别分为正面、反面和中性三类。虽然“面无表情”的评论也有不少;不过,大部分时候会只采用正面和反面的案例进行训练,下面这个数据集就是很好的例子。\n", - "\n", - "传统的文本主题分类问题的典型参考数据集为[20 Newsgroups](http://qwone.com/~jason/20Newsgroups/),该数据集由20组新闻数据组成,包含约20000个新闻文档。\n", - "其主题列表中有些类别的数据比较相似,例如comp.sys.ibm.pc.hardware和comp.sys.mac.hardware都是和电脑系统硬件相关的题目,相似度比较高。而有些主题类别的数据相对来说就毫无关联,例如misc.forsale和soc.religion.christian。\n", - "\n", - "就网络本身而言,文本主题分类的网络结构和情感分类的网络结构大致相似。在掌握了情感分类网络如何构造之后,很容易可以构造一个类似的网络,稍作调参即可用于文本主题分类任务。\n", - "\n", - "但在业务上下文侧,文本主题分类是分析文本讨论的客观内容,而情感分类是要从文本中得到它是否支持某种观点的信息。比如,“《阿甘正传》真是好看极了,影片主题明确,节奏流畅。”这句话,在文本主题分类是要将其归为类别为“电影”主题,而情感分类则要挖掘出这一影评的态度是正面还是负面。\n", - "\n", - "相对于传统的文本主题分类,情感分类较为简单,实用性也较强。常见的购物网站、电影网站都可以采集到相对高质量的数据集,也很容易给业务领域带来收益。例如,可以结合领域上下文,自动分析特定类型客户对当前产品的意见,可以分主题分用户类型对情感进行分析,以作针对性的处理,甚至基于此进一步推荐产品,提高转化率,带来更高的商业收益。\n", - "\n", - "特殊领域中,某些非极性词也充分表达了用户的情感倾向,比如下载使用APP时,“卡死了”、“下载太慢了”就表达了用户的负面情感倾向;股票领域中,“看涨”、“牛市”表达的就是用户的正面情感倾向。所以,本质上,我们希望模型能够在垂直领域中,挖掘出一些特殊的表达,作为极性词给情感分类系统使用:\n", - "\n", - "$垂直极性词 = 通用极性词 + 领域特有极性词$\n", - "\n", - "按照处理文本的粒度不同,情感分析可分为词语级、短语级、句子级、段落级以及篇章级等几个研究层次。这里以“段落级”为例,输入为一个段落,输出为影评是正面还是负面的信息。\n", - "\n", - "接下来,以IMDB影评情感分类为例来体验MindSpore在自然语言处理上的应用。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 整体流程\n", - "\n", - "1. 准备环节。\n", - "2. 加载数据集,进行数据处理。\n", - "3. 定义网络。\n", - "4. 定义优化器和损失函数。\n", - "5. 使用网络训练数据,生成模型。\n", - "6. 得到模型之后,使用验证数据集,查看模型精度情况。\n", - "\n", - "> 本次体验流程支持CPU或GPU环境,Ascend环境暂不支持。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 准备环节\n", - "\n", - "### 下载数据集\n", - "\n", - "本次体验采用IMDB影评数据集作为实验数据。\n", - "\n", - "1. 下载IMDB影评数据集。\n", - "\n", - " 以下是负面影评(Negative)和正面影评(Positive)的案例。\n", - "\n", - "| Review | Label | \n", - "|:---|:---:|\n", - "| \"Quitting\" may be as much about exiting a pre-ordained identity as about drug withdrawal. As a rural guy coming to Beijing, class and success must have struck this young artist face on as an appeal to separate from his roots and far surpass his peasant parents' acting success. Troubles arise, however, when the new man is too new, when it demands too big a departure from family, history, nature, and personal identity. The ensuing splits, and confusion between the imaginary and the real and the dissonance between the ordinary and the heroic are the stuff of a gut check on the one hand or a complete escape from self on the other. | Negative | \n", - "| This movie is amazing because the fact that the real people portray themselves and their real life experience and do such a good job it's like they're almost living the past over again. Jia Hongsheng plays himself an actor who quit everything except music and drugs struggling with depression and searching for the meaning of life while being angry at everyone especially the people who care for him most. | Positive |\n", - " \n", - "  将下载好的数据集解压并放在当前工作目录下的`datasets`目录下,由于数据集文件较多,解压过程耗时大约15分钟。其中,参数`--checkpoint=1000 --checkpoint-action=dot`表示每解压1000个文件将在底部追加打印一个黑点,如下所示。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "--2020-12-02 16:15:42-- https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/aclImdb_v1.tar.gz\n", - "Resolving proxy-notebook.modelarts-dev-proxy.com (proxy-notebook.modelarts-dev-proxy.com)... 192.168.0.172\n", - "Connecting to proxy-notebook.modelarts-dev-proxy.com (proxy-notebook.modelarts-dev-proxy.com)|192.168.0.172|:8083... connected.\n", - "Proxy request sent, awaiting response... 200 OK\n", - "Length: 84125825 (80M) [application/gzip]\n", - "Saving to: ‘aclImdb_v1.tar.gz’\n", - "\n", - "aclImdb_v1.tar.gz 100%[===================>] 80.23M 157MB/s in 0.5s \n", - "\n", - "2020-12-02 16:15:43 (157 MB/s) - ‘aclImdb_v1.tar.gz’ saved [84125825/84125825]\n", - "\n", - "............................." - ] - } - ], - "source": [ - "!wget https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/aclImdb_v1.tar.gz -N\n", - "!mkdir -p datasets\n", - "!if [ ! -d \"datasets/aclImdb\" ];then tar -C datasets --checkpoint=1000 --checkpoint-action=dot -xzf aclImdb_v1.tar.gz;fi" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 下载GloVe文件\n", - " 下载并解压GloVe文件到当前工作目录下的`datasets`目录下,并在所有Glove文件开头处添加如下所示新的一行,意思是总共读取400000个单词,每个单词用300纬度的词向量表示。\n", - "\n", - " ```\n", - " 400000 300\n", - " ```" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "--2020-12-02 16:31:53-- https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/glove.6B.zip\n", - "Resolving proxy-notebook.modelarts-dev-proxy.com (proxy-notebook.modelarts-dev-proxy.com)... 192.168.0.172\n", - "Connecting to proxy-notebook.modelarts-dev-proxy.com (proxy-notebook.modelarts-dev-proxy.com)|192.168.0.172|:8083... connected.\n", - "Proxy request sent, awaiting response... 200 OK\n", - "Length: 862182613 (822M) [application/zip]\n", - "Saving to: ‘glove.6B.zip’\n", - "\n", - "glove.6B.zip 100%[===================>] 822.24M 192MB/s in 4.4s \n", - "\n", - "2020-12-02 16:31:58 (188 MB/s) - ‘glove.6B.zip’ saved [862182613/862182613]\n", - "\n", - "Archive: glove.6B.zip\n", - " inflating: datasets/glove/glove.6B.50d.txt \n", - " inflating: datasets/glove/glove.6B.100d.txt \n", - " inflating: datasets/glove/glove.6B.200d.txt \n", - " inflating: datasets/glove/glove.6B.300d.txt \n" - ] - } - ], - "source": [ - "!wget -N https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/glove.6B.zip\n", - "!unzip -o glove.6B.zip -d datasets/glove\n", - "!sed -i '1i 400000 300' datasets/glove/*" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 在当前工作目录创建名为`preprocess`的空目录,该目录将用于存储在数据集预处理操作中IMDB数据集转换为MindRecord格式后的文件。此时当前工作目录结构如下所示。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - ".\n", - "├── aclImdb_v1.tar.gz\n", - "├── ckpt\n", - "├── datasets\n", - "│   ├── aclImdb\n", - "│   │   ├── imdbEr.txt\n", - "│   │   ├── imdb.vocab\n", - "│   │   ├── README\n", - "│   │   ├── test\n", - "│   │   └── train\n", - "│   └── glove\n", - "│   ├── glove.6B.100d.txt\n", - "│   ├── glove.6B.200d.txt\n", - "│   ├── glove.6B.300d.txt\n", - "│   └── glove.6B.50d.txt\n", - "├── glove.6B.zip\n", - "├── nlp_application.ipynb\n", - "└── preprocess\n", - "\n", - "7 directories, 10 files\n" - ] - } - ], - "source": [ - "!mkdir -p preprocess ckpt\n", - "!tree -L 3 ." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 确定评价标准\n", - "\n", - "作为典型的分类问题,情感分类的评价标准可以比照普通的分类问题处理。常见的精度(Accuracy)、精准度(Precision)、召回率(Recall)和F_beta分数都可以作为参考。\n", - "\n", - "$精度(Accuracy)= 分类正确的样本数目 / 总样本数目$\n", - "\n", - "$精准度(Precision)= 真阳性样本数目 / 所有预测类别为阳性的样本数目$\n", - "\n", - "$召回率(Recall)= 真阳性样本数目 / 所有真实类别为阳性的样本数目$ \n", - "\n", - "$F1分数 = (2 * Precision * Recall) / (Precision + Recall)$\n", - "\n", - "在IMDB这个数据集中,正负样本数差别不大,可以简单地用精度(accuracy)作为分类器的衡量标准。\n", - "\n", - "### 确定网络\n", - "\n", - "我们使用基于LSTM构建的SentimentNet网络进行自然语言处理。\n", - "\n", - "> LSTM(Long short-term memory,长短期记忆)网络是一种时间循环神经网络,适合于处理和预测时间序列中间隔和延迟非常长的重要事件。\n", - "> 本次体验面向GPU或CPU硬件平台。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 配置运行信息和SentimentNet网络参数" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 使用`parser`模块传入运行必要的信息。\n", - " \n", - " - `preprocess`:是否预处理数据集,默认为否。\n", - " - `aclimdb_path`:数据集存放路径。\n", - " - `glove_path`:GloVe文件存放路径。\n", - " - `preprocess_path`:预处理数据集的结果文件夹。\n", - " - `ckpt_path`:CheckPoint文件路径。\n", - " - `pre_trained`:预加载CheckPoint文件。\n", - " - `device_target`:指定GPU或CPU环境。\n", - "\n", - "2. 进行训练前,需要配置必要的信息,包括环境信息、执行的模式、后端信息及硬件信息。 \n", - "\n", - "运行以下一段代码中配置训练所需相关参数(详细的接口配置信息,请参见MindSpore官网`context.set_context`API接口说明)。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Current context loaded:\n", - " mode: 0\n", - " device_target: GPU\n" - ] - } - ], - "source": [ - "import argparse\n", - "from mindspore import context\n", - "from easydict import EasyDict as edict\n", - "\n", - "\n", - "# LSTM CONFIG\n", - "lstm_cfg = edict({\n", - " 'num_classes': 2,\n", - " 'learning_rate': 0.1,\n", - " 'momentum': 0.9,\n", - " 'num_epochs': 10,\n", - " 'batch_size': 64,\n", - " 'embed_size': 300,\n", - " 'num_hiddens': 100,\n", - " 'num_layers': 2,\n", - " 'bidirectional': True,\n", - " 'save_checkpoint_steps': 390,\n", - " 'keep_checkpoint_max': 10\n", - "})\n", - "\n", - "cfg = lstm_cfg\n", - "\n", - "parser = argparse.ArgumentParser(description='MindSpore LSTM Example')\n", - "parser.add_argument('--preprocess', type=str, default='false', choices=['true', 'false'],\n", - " help='whether to preprocess data.')\n", - "parser.add_argument('--aclimdb_path', type=str, default=\"./datasets/aclImdb\",\n", - " help='path where the dataset is stored.')\n", - "parser.add_argument('--glove_path', type=str, default=\"./datasets/glove\",\n", - " help='path where the GloVe is stored.')\n", - "parser.add_argument('--preprocess_path', type=str, default=\"./preprocess\",\n", - " help='path where the pre-process data is stored.')\n", - "parser.add_argument('--ckpt_path', type=str, default=\"./models/ckpt/nlp_application\",\n", - " help='the path to save the checkpoint file.')\n", - "parser.add_argument('--pre_trained', type=str, default=None,\n", - " help='the pretrained checkpoint file path.')\n", - "parser.add_argument('--device_target', type=str, default=\"GPU\", choices=['GPU', 'CPU'],\n", - " help='the target device to run, support \"GPU\", \"CPU\". Default: \"GPU\".')\n", - "args = parser.parse_args(['--device_target', 'GPU', '--preprocess', 'true'])\n", - "\n", - "context.set_context(\n", - " mode=context.GRAPH_MODE,\n", - " save_graphs=False,\n", - " device_target=args.device_target)\n", - "\n", - "print(\"Current context loaded:\\n mode: {}\\n device_target: {}\".format(context.get_context(\"mode\"), context.get_context(\"device_target\")))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "安装`gensim`依赖包。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Looking in indexes: http://repo.myhuaweicloud.com/repository/pypi/simple\n", - "Requirement already satisfied: gensim in /home/ma-user/anaconda3/envs/MindSpore-1.0.1/lib/python3.7/site-packages (3.8.3)\n", - "Requirement already satisfied: numpy>=1.11.3 in /home/ma-user/anaconda3/envs/MindSpore-1.0.1/lib/python3.7/site-packages (from gensim) (1.17.5)\n", - "Requirement already satisfied: six>=1.5.0 in /home/ma-user/anaconda3/envs/MindSpore-1.0.1/lib/python3.7/site-packages (from gensim) (1.15.0)\n", - "Requirement already satisfied: smart-open>=1.8.1 in /home/ma-user/anaconda3/envs/MindSpore-1.0.1/lib/python3.7/site-packages (from gensim) (4.0.1)\n", - "Requirement already satisfied: scipy>=0.18.1 in /home/ma-user/anaconda3/envs/MindSpore-1.0.1/lib/python3.7/site-packages (from gensim) (1.3.3)\n" - ] - } - ], - "source": [ - "!pip install gensim" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 数据处理\n", - "\n", - "## 预处理数据集\n", - "\n", - "执行数据集预处理:\n", - "- 定义`ImdbParser`类解析文本数据集,包括编码、分词、对齐、处理GloVe原始数据,使之能够适应网络结构。\n", - "- 定义`convert_to_mindrecord`函数将数据集格式转换为MindRecord格式,便于MindSpore读取。函数`_convert_to_mindrecord`中`weight.txt`为数据预处理后自动生成的weight参数信息文件。\n", - "- 调用`convert_to_mindrecord`函数执行数据集预处理。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Data Pre-processing ==============\n", - "======================= Successful =======================\n" - ] - } - ], - "source": [ - "import os\n", - "from itertools import chain\n", - "import numpy as np\n", - "import gensim\n", - "from mindspore.mindrecord import FileWriter\n", - "\n", - "\n", - "class ImdbParser():\n", - " \"\"\"\n", - " parse aclImdb data to features and labels.\n", - " sentence->tokenized->encoded->padding->features\n", - " \"\"\"\n", - "\n", - " def __init__(self, imdb_path, glove_path, embed_size=300):\n", - " self.__segs = ['train', 'test']\n", - " self.__label_dic = {'pos': 1, 'neg': 0}\n", - " self.__imdb_path = imdb_path\n", - " self.__glove_dim = embed_size\n", - " self.__glove_file = os.path.join(glove_path, 'glove.6B.' + str(self.__glove_dim) + 'd.txt')\n", - "\n", - " # properties\n", - " self.__imdb_datas = {}\n", - " self.__features = {}\n", - " self.__labels = {}\n", - " self.__vacab = {}\n", - " self.__word2idx = {}\n", - " self.__weight_np = {}\n", - " self.__wvmodel = None\n", - "\n", - " def parse(self):\n", - " \"\"\"\n", - " parse imdb data to memory\n", - " \"\"\"\n", - " self.__wvmodel = gensim.models.KeyedVectors.load_word2vec_format(self.__glove_file)\n", - "\n", - " for seg in self.__segs:\n", - " self.__parse_imdb_datas(seg)\n", - " self.__parse_features_and_labels(seg)\n", - " self.__gen_weight_np(seg)\n", - "\n", - " def __parse_imdb_datas(self, seg):\n", - " \"\"\"\n", - " load data from txt\n", - " \"\"\"\n", - " data_lists = []\n", - " for label_name, label_id in self.__label_dic.items():\n", - " sentence_dir = os.path.join(self.__imdb_path, seg, label_name)\n", - " for file in os.listdir(sentence_dir):\n", - " with open(os.path.join(sentence_dir, file), mode='r', encoding='utf8') as f:\n", - " sentence = f.read().replace('\\n', '')\n", - " data_lists.append([sentence, label_id])\n", - " self.__imdb_datas[seg] = data_lists\n", - "\n", - " def __parse_features_and_labels(self, seg):\n", - " \"\"\"\n", - " parse features and labels\n", - " \"\"\"\n", - " features = []\n", - " labels = []\n", - " for sentence, label in self.__imdb_datas[seg]:\n", - " features.append(sentence)\n", - " labels.append(label)\n", - "\n", - " self.__features[seg] = features\n", - " self.__labels[seg] = labels\n", - "\n", - " # update feature to tokenized\n", - " self.__updata_features_to_tokenized(seg)\n", - " # parse vacab\n", - " self.__parse_vacab(seg)\n", - " # encode feature\n", - " self.__encode_features(seg)\n", - " # padding feature\n", - " self.__padding_features(seg)\n", - "\n", - " def __updata_features_to_tokenized(self, seg):\n", - " tokenized_features = []\n", - " for sentence in self.__features[seg]:\n", - " tokenized_sentence = [word.lower() for word in sentence.split(\" \")]\n", - " tokenized_features.append(tokenized_sentence)\n", - " self.__features[seg] = tokenized_features\n", - "\n", - " def __parse_vacab(self, seg):\n", - " # vocab\n", - " tokenized_features = self.__features[seg]\n", - " vocab = set(chain(*tokenized_features))\n", - " self.__vacab[seg] = vocab\n", - "\n", - " # word_to_idx: {'hello': 1, 'world':111, ... '': 0}\n", - " word_to_idx = {word: i + 1 for i, word in enumerate(vocab)}\n", - " word_to_idx[''] = 0\n", - " self.__word2idx[seg] = word_to_idx\n", - "\n", - " def __encode_features(self, seg):\n", - " \"\"\" encode word to index \"\"\"\n", - " word_to_idx = self.__word2idx['train']\n", - " encoded_features = []\n", - " for tokenized_sentence in self.__features[seg]:\n", - " encoded_sentence = []\n", - " for word in tokenized_sentence:\n", - " encoded_sentence.append(word_to_idx.get(word, 0))\n", - " encoded_features.append(encoded_sentence)\n", - " self.__features[seg] = encoded_features\n", - "\n", - " def __padding_features(self, seg, maxlen=500, pad=0):\n", - " \"\"\" pad all features to the same length \"\"\"\n", - " padded_features = []\n", - " for feature in self.__features[seg]:\n", - " if len(feature) >= maxlen:\n", - " padded_feature = feature[:maxlen]\n", - " else:\n", - " padded_feature = feature\n", - " while len(padded_feature) < maxlen:\n", - " padded_feature.append(pad)\n", - " padded_features.append(padded_feature)\n", - " self.__features[seg] = padded_features\n", - "\n", - " def __gen_weight_np(self, seg):\n", - " \"\"\"\n", - " generate weight by gensim\n", - " \"\"\"\n", - " weight_np = np.zeros((len(self.__word2idx[seg]), self.__glove_dim), dtype=np.float32)\n", - " for word, idx in self.__word2idx[seg].items():\n", - " if word not in self.__wvmodel:\n", - " continue\n", - " word_vector = self.__wvmodel.get_vector(word)\n", - " weight_np[idx, :] = word_vector\n", - "\n", - " self.__weight_np[seg] = weight_np\n", - "\n", - " def get_datas(self, seg):\n", - " \"\"\"\n", - " return features, labels, and weight\n", - " \"\"\"\n", - " features = np.array(self.__features[seg]).astype(np.int32)\n", - " labels = np.array(self.__labels[seg]).astype(np.int32)\n", - " weight = np.array(self.__weight_np[seg])\n", - " return features, labels, weight\n", - "\n", - "\n", - "\n", - "def _convert_to_mindrecord(data_home, features, labels, weight_np=None, training=True):\n", - " \"\"\"\n", - " convert imdb dataset to mindrecoed dataset\n", - " \"\"\"\n", - " if weight_np is not None:\n", - " np.savetxt(os.path.join(data_home, 'weight.txt'), weight_np)\n", - "\n", - " # write mindrecord\n", - " schema_json = {\"id\": {\"type\": \"int32\"},\n", - " \"label\": {\"type\": \"int32\"},\n", - " \"feature\": {\"type\": \"int32\", \"shape\": [-1]}}\n", - "\n", - " data_dir = os.path.join(data_home, \"aclImdb_train.mindrecord\")\n", - " if not training:\n", - " data_dir = os.path.join(data_home, \"aclImdb_test.mindrecord\")\n", - "\n", - " def get_imdb_data(features, labels):\n", - " data_list = []\n", - " for i, (label, feature) in enumerate(zip(labels, features)):\n", - " data_json = {\"id\": i,\n", - " \"label\": int(label),\n", - " \"feature\": feature.reshape(-1)}\n", - " data_list.append(data_json)\n", - " return data_list\n", - "\n", - " writer = FileWriter(data_dir, shard_num=4)\n", - " data = get_imdb_data(features, labels)\n", - " writer.add_schema(schema_json, \"nlp_schema\")\n", - " writer.add_index([\"id\", \"label\"])\n", - " writer.write_raw_data(data)\n", - " writer.commit()\n", - "\n", - "\n", - "def convert_to_mindrecord(embed_size, aclimdb_path, preprocess_path, glove_path):\n", - " \"\"\"\n", - " convert imdb dataset to mindrecoed dataset\n", - " \"\"\"\n", - " parser = ImdbParser(aclimdb_path, glove_path, embed_size)\n", - " parser.parse()\n", - "\n", - " if not os.path.exists(preprocess_path):\n", - " print(f\"preprocess path {preprocess_path} is not exist\")\n", - " os.makedirs(preprocess_path)\n", - "\n", - " train_features, train_labels, train_weight_np = parser.get_datas('train')\n", - " _convert_to_mindrecord(preprocess_path, train_features, train_labels, train_weight_np)\n", - "\n", - " test_features, test_labels, _ = parser.get_datas('test')\n", - " _convert_to_mindrecord(preprocess_path, test_features, test_labels, training=False)\n", - "\n", - "if args.preprocess == \"true\":\n", - " os.system(\"rm -f ./preprocess/aclImdb* weight*\")\n", - " print(\"============== Starting Data Pre-processing ==============\")\n", - " convert_to_mindrecord(cfg.embed_size, args.aclimdb_path, args.preprocess_path, args.glove_path)\n", - " print(\"======================= Successful =======================\")\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "转换成功后会在`preprocess`目录下生成MindRecord文件,通常该操作在数据集不变的情况下,无需每次训练都执行,此时查看`preprocess`文件目录结构。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "preprocess\n", - "├── aclImdb_test.mindrecord0\n", - "├── aclImdb_test.mindrecord0.db\n", - "├── aclImdb_test.mindrecord1\n", - "├── aclImdb_test.mindrecord1.db\n", - "├── aclImdb_test.mindrecord2\n", - "├── aclImdb_test.mindrecord2.db\n", - "├── aclImdb_test.mindrecord3\n", - "├── aclImdb_test.mindrecord3.db\n", - "├── aclImdb_train.mindrecord0\n", - "├── aclImdb_train.mindrecord0.db\n", - "├── aclImdb_train.mindrecord1\n", - "├── aclImdb_train.mindrecord1.db\n", - "├── aclImdb_train.mindrecord2\n", - "├── aclImdb_train.mindrecord2.db\n", - "├── aclImdb_train.mindrecord3\n", - "├── aclImdb_train.mindrecord3.db\n", - "└── weight.txt\n", - "\n", - "0 directories, 17 files\n" - ] - } - ], - "source": [ - "!tree preprocess" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "此时`preprocess`目录下的文件为:\n", - "- 名称包含`aclImdb_train.mindrecord`的为转换后的MindRecord格式的训练数据集。\n", - "- 名称包含`aclImdb_test.mindrecord`的为转换后的MindRecord格式的测试数据集。\n", - "- `weight.txt`为预处理后自动生成的weight参数信息文件。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "创建训练集:\n", - "- 定义创建数据集函数`lstm_create_dataset`,创建训练集`ds_train`。\n", - "- 通过`create_dict_iterator`方法创建字典迭代器,读取已创建的数据集`ds_train`中的数据。\n", - "\n", - "运行以下一段代码,创建数据集并读取第1个`batch`中的`label`数据列表,和第1个`batch`中第1个元素的`feature`数据。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The first batch contains label below:\n", - "[0 0 1 1 1 0 0 0 0 0 0 1 0 0 1 0 1 1 0 0 0 0 1 1 1 0 1 1 1 0 0 1 0 0 1 0 1\n", - " 0 0 0 0 1 0 0 1 1 1 0 0 0 1 1 1 1 0 1 0 0 1 1 0 1 1 0]\n", - "\n", - "The feature of the first item in the first batch is below vector:\n", - "[249996 54143 184172 203651 229589 221693 185989 118515 64846 54704\n", - " 19712 140286 54143 10035 223633 182804 110279 20992 185989 118515\n", - " 54143 229589 124426 189682 129826 98619 251411 16315 100038 112995\n", - " 237022 116461 30735 229874 38533 25750 44090 30219 30735 229874\n", - " 171780 118515 65081 44090 74354 128277 82354 118515 215392 61497\n", - " 212639 923 210633 105168 249996 54143 185745 184172 187822 185213\n", - " 223619 100038 65443 73067 129442 44090 118515 156542 82301 111804\n", - " 66658 184172 42988 95885 185989 76874 13192 171920 229589 156542\n", - " 45558 5290 52959 80287 91542 91662 114496 112876 42988 192087\n", - " 185507 186212 66658 233582 230976 143758 128277 215027 229589 154143\n", - " 246234 167821 184159 40065 100038 112995 238258 180552 118515 95633\n", - " 128277 118515 99327 98619 184172 24185 98619 184172 88217 128277\n", - " 159969 128277 98619 96460 44090 118515 130663 710 128277 247284\n", - " 118515 90362 185989 118515 90745 100038 112995 187822 42867 249652\n", - " 118515 123509 239643 184172 118515 212864 185989 98619 161660 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0]\n" - ] - } - ], - "source": [ - "import os\n", - "import mindspore.dataset as ds\n", - "\n", - "\n", - "def lstm_create_dataset(data_home, batch_size, repeat_num=1, training=True):\n", - " \"\"\"Data operations.\"\"\"\n", - " ds.config.set_seed(1)\n", - " data_dir = os.path.join(data_home, \"aclImdb_train.mindrecord0\")\n", - " if not training:\n", - " data_dir = os.path.join(data_home, \"aclImdb_test.mindrecord0\")\n", - "\n", - " data_set = ds.MindDataset(data_dir, columns_list=[\"feature\", \"label\"], num_parallel_workers=4)\n", - "\n", - " # apply map operations on images\n", - " data_set = data_set.shuffle(buffer_size=data_set.get_dataset_size())\n", - " data_set = data_set.batch(batch_size=batch_size, drop_remainder=True)\n", - " data_set = data_set.repeat(count=repeat_num)\n", - "\n", - " return data_set\n", - "\n", - "ds_train = lstm_create_dataset(args.preprocess_path, cfg.batch_size)\n", - "\n", - "iterator = next(ds_train.create_dict_iterator())\n", - "first_batch_label = iterator[\"label\"].asnumpy()\n", - "first_batch_first_feature = iterator[\"feature\"].asnumpy()[0]\n", - "print(f\"The first batch contains label below:\\n{first_batch_label}\\n\")\n", - "print(f\"The feature of the first item in the first batch is below vector:\\n{first_batch_first_feature}\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义网络\n", - "\n", - "1. 导入初始化网络所需模块。\n", - "2. 定义需要单层LSTM小算子堆叠的设备类型。\n", - "3. 定义`lstm_default_state`函数来初始化网络参数及网络状态。\n", - "4. 定义`stack_lstm_default_state`函数来初始化小算子堆叠需要的初始化网络参数及网络状态。\n", - "5. 针对CPU场景,自定义单层LSTM小算子堆叠,来实现多层LSTM大算子功能。\n", - "6. 使用`Cell`方法,定义网络结构(`SentimentNet`网络)。\n", - "7. 实例化`SentimentNet`,创建网络,最后输出网络中加载的参数。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "OrderedDict([('embedding.embedding_table', Parameter (name=embedding.embedding_table, value=Tensor(shape=[252193, 300], dtype=Float32, value=\n", - "[[ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00 ... 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n", - " [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00 ... 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n", - " [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00 ... 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n", - " ...\n", - " [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00 ... 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n", - " [-2.64310002e-01, 2.03539997e-01, -1.07670002e-01 ... 3.17510009e-01, -6.45749986e-01, 4.42129999e-01],\n", - " [-2.82150000e-01, 2.53950000e-01, 3.94300014e-01 ... 1.75999999e-01, 7.86110014e-02, -7.89420009e-02]]))), ('encoder.weight', Parameter (name=encoder.weight, value=Tensor(shape=[563200, 1, 1], dtype=Float32, value=\n", - "[[[-1.65955983e-02]],\n", - " [[ 4.40648980e-02]],\n", - " [[-9.99771282e-02]],\n", - " ...\n", - " [[-6.54547513e-02]],\n", - " [[ 1.46641862e-02]],\n", - " [[-2.03442890e-02]]]))), ('decoder.weight', Parameter (name=decoder.weight, value=Tensor(shape=[2, 400], dtype=Float32, value=\n", - "[[ 8.68825766e-04, 1.55616635e-02, -3.46743106e-03 ... -1.70452073e-02, 6.96127317e-05, -1.37791187e-02],\n", - " [ 5.52378222e-03, -2.03212705e-02, 1.68735497e-02 ... 1.62047185e-02, 5.66494651e-03, -1.49743268e-02]]))), ('decoder.bias', Parameter (name=decoder.bias, value=Tensor(shape=[2], dtype=Float32, value= [ 0.00000000e+00, 0.00000000e+00])))])\n" - ] - } - ], - "source": [ - "import math\n", - "import numpy as np\n", - "from mindspore import Tensor, nn, context, Parameter, ParameterTuple\n", - "from mindspore.common.initializer import initializer\n", - "import mindspore.ops as ops\n", - "\n", - "STACK_LSTM_DEVICE = [\"CPU\"]\n", - "\n", - "# Initialize short-term memory (h) and long-term memory (c) to 0\n", - "def lstm_default_state(batch_size, hidden_size, num_layers, bidirectional):\n", - " \"\"\"init default input.\"\"\"\n", - " num_directions = 2 if bidirectional else 1\n", - " h = Tensor(np.zeros((num_layers * num_directions, batch_size, hidden_size)).astype(np.float32))\n", - " c = Tensor(np.zeros((num_layers * num_directions, batch_size, hidden_size)).astype(np.float32))\n", - " return h, c\n", - "\n", - "def stack_lstm_default_state(batch_size, hidden_size, num_layers, bidirectional):\n", - " \"\"\"init default input.\"\"\"\n", - " num_directions = 2 if bidirectional else 1\n", - "\n", - " h_list = c_list = []\n", - " for _ in range(num_layers):\n", - " h_list.append(Tensor(np.zeros((num_directions, batch_size, hidden_size)).astype(np.float32)))\n", - " c_list.append(Tensor(np.zeros((num_directions, batch_size, hidden_size)).astype(np.float32)))\n", - " h, c = tuple(h_list), tuple(c_list)\n", - " return h, c\n", - "\n", - "\n", - "class StackLSTM(nn.Cell):\n", - " \"\"\"\n", - " Stack multi-layers LSTM together.\n", - " \"\"\"\n", - "\n", - " def __init__(self,\n", - " input_size,\n", - " hidden_size,\n", - " num_layers=1,\n", - " has_bias=True,\n", - " batch_first=False,\n", - " dropout=0.0,\n", - " bidirectional=False):\n", - " super(StackLSTM, self).__init__()\n", - " self.num_layers = num_layers\n", - " self.batch_first = batch_first\n", - " self.transpose = ops.Transpose()\n", - "\n", - " # direction number\n", - " num_directions = 2 if bidirectional else 1\n", - "\n", - " # input_size list\n", - " input_size_list = [input_size]\n", - " for i in range(num_layers - 1):\n", - " input_size_list.append(hidden_size * num_directions)\n", - "\n", - " # layers\n", - " layers = []\n", - " for i in range(num_layers):\n", - " layers.append(nn.LSTMCell(input_size=input_size_list[i],\n", - " hidden_size=hidden_size,\n", - " has_bias=has_bias,\n", - " batch_first=batch_first,\n", - " bidirectional=bidirectional,\n", - " dropout=dropout))\n", - "\n", - " # weights\n", - " weights = []\n", - " for i in range(num_layers):\n", - " # weight size\n", - " weight_size = (input_size_list[i] + hidden_size) * num_directions * hidden_size * 4\n", - " if has_bias:\n", - " bias_size = num_directions * hidden_size * 4\n", - " weight_size = weight_size + bias_size\n", - "\n", - " # numpy weight\n", - " stdv = 1 / math.sqrt(hidden_size)\n", - " w_np = np.random.uniform(-stdv, stdv, (weight_size, 1, 1)).astype(np.float32)\n", - "\n", - " # lstm weight\n", - " weights.append(Parameter(initializer(Tensor(w_np), w_np.shape), name=\"weight\" + str(i)))\n", - "\n", - " #\n", - " self.lstms = layers\n", - " self.weight = ParameterTuple(tuple(weights))\n", - "\n", - " def construct(self, x, hx):\n", - " \"\"\"construct\"\"\"\n", - " if self.batch_first:\n", - " x = self.transpose(x, (1, 0, 2))\n", - " # stack lstm\n", - " h, c = hx\n", - " hn = cn = None\n", - " for i in range(self.num_layers):\n", - " x, hn, cn, _, _ = self.lstms[i](x, h[i], c[i], self.weight[i])\n", - " if self.batch_first:\n", - " x = self.transpose(x, (1, 0, 2))\n", - " return x, (hn, cn)\n", - "\n", - "\n", - "class SentimentNet(nn.Cell):\n", - " \"\"\"Sentiment network structure.\"\"\"\n", - "\n", - " def __init__(self,\n", - " vocab_size,\n", - " embed_size,\n", - " num_hiddens,\n", - " num_layers,\n", - " bidirectional,\n", - " num_classes,\n", - " weight,\n", - " batch_size):\n", - " super(SentimentNet, self).__init__()\n", - " # Mapp words to vectors\n", - " self.embedding = nn.Embedding(vocab_size,\n", - " embed_size,\n", - " embedding_table=weight)\n", - " self.embedding.embedding_table.requires_grad = False\n", - " self.trans = ops.Transpose()\n", - " self.perm = (1, 0, 2)\n", - "\n", - " if context.get_context(\"device_target\") in STACK_LSTM_DEVICE:\n", - " # stack lstm by user\n", - " self.encoder = StackLSTM(input_size=embed_size,\n", - " hidden_size=num_hiddens,\n", - " num_layers=num_layers,\n", - " has_bias=True,\n", - " bidirectional=bidirectional,\n", - " dropout=0.0)\n", - " self.h, self.c = stack_lstm_default_state(batch_size, num_hiddens, num_layers, bidirectional)\n", - " else:\n", - " # standard lstm\n", - " self.encoder = nn.LSTM(input_size=embed_size,\n", - " hidden_size=num_hiddens,\n", - " num_layers=num_layers,\n", - " has_bias=True,\n", - " bidirectional=bidirectional,\n", - " dropout=0.0)\n", - " self.h, self.c = lstm_default_state(batch_size, num_hiddens, num_layers, bidirectional)\n", - "\n", - " self.concat = ops.Concat(1)\n", - " if bidirectional:\n", - " self.decoder = nn.Dense(num_hiddens * 4, num_classes)\n", - " else:\n", - " self.decoder = nn.Dense(num_hiddens * 2, num_classes)\n", - "\n", - " def construct(self, inputs):\n", - " # input:(64,500,300)\n", - " embeddings = self.embedding(inputs)\n", - " embeddings = self.trans(embeddings, self.perm)\n", - " output, _ = self.encoder(embeddings, (self.h, self.c))\n", - " # states[i] size(64,200) -> encoding.size(64,400)\n", - " encoding = self.concat((output[0], output[499]))\n", - " outputs = self.decoder(encoding)\n", - " return outputs\n", - "\n", - "embedding_table = np.loadtxt(os.path.join(args.preprocess_path, \"weight.txt\")).astype(np.float32)\n", - "network = SentimentNet(vocab_size=embedding_table.shape[0],\n", - " embed_size=cfg.embed_size,\n", - " num_hiddens=cfg.num_hiddens,\n", - " num_layers=cfg.num_layers,\n", - " bidirectional=cfg.bidirectional,\n", - " num_classes=cfg.num_classes,\n", - " weight=Tensor(embedding_table),\n", - " batch_size=cfg.batch_size)\n", - "\n", - "print(network.parameters_dict(recurse=True))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 训练并保存模型\n", - "\n", - "运行以下一段代码,创建优化器和损失函数模型,加载训练数据集(`ds_train`)并配置好`CheckPoint`生成信息,然后使用`model.train`接口,进行模型训练。根据输出可以看到loss值随着训练逐步降低,最后达到0.262左右。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Training ==============\n", - "epoch: 1 step: 78, loss is 0.2971678\n", - "epoch: 1 step: 156, loss is 0.30519545\n", - "epoch: 1 step: 234, loss is 0.2370582\n", - "epoch: 1 step: 312, loss is 0.25823578\n", - "epoch: 1 step: 390, loss is 0.2899053\n", - "Epoch time: 27745.798, per step time: 71.143\n", - "epoch: 2 step: 78, loss is 0.20885809\n", - "epoch: 2 step: 156, loss is 0.2168142\n", - "epoch: 2 step: 234, loss is 0.14624771\n", - "epoch: 2 step: 312, loss is 0.2152691\n", - "epoch: 2 step: 390, loss is 0.3756763\n", - "Epoch time: 27407.312, per step time: 70.275\n", - "epoch: 3 step: 78, loss is 0.116764486\n", - "epoch: 3 step: 156, loss is 0.20790516\n", - "epoch: 3 step: 234, loss is 0.2118046\n", - "epoch: 3 step: 312, loss is 0.18587393\n", - "epoch: 3 step: 390, loss is 0.25241128\n", - "Epoch time: 27251.069, per step time: 69.875\n", - "epoch: 4 step: 78, loss is 0.11729147\n", - "epoch: 4 step: 156, loss is 0.16071466\n", - "epoch: 4 step: 234, loss is 0.43869072\n", - "epoch: 4 step: 312, loss is 0.37149796\n", - "epoch: 4 step: 390, loss is 0.18670222\n", - "Epoch time: 27441.597, per step time: 70.363\n", - "epoch: 5 step: 78, loss is 0.08070815\n", - "epoch: 5 step: 156, loss is 0.143559\n", - "epoch: 5 step: 234, loss is 0.292204\n", - "epoch: 5 step: 312, loss is 0.07726648\n", - "epoch: 5 step: 390, loss is 0.15458854\n", - "Epoch time: 27602.059, per step time: 70.775\n", - "epoch: 6 step: 78, loss is 0.16412595\n", - "epoch: 6 step: 156, loss is 0.1664415\n", - "epoch: 6 step: 234, loss is 0.1091502\n", - "epoch: 6 step: 312, loss is 0.112443276\n", - "epoch: 6 step: 390, loss is 0.14458877\n", - "Epoch time: 27568.301, per step time: 70.688\n", - "epoch: 7 step: 78, loss is 0.110504806\n", - "epoch: 7 step: 156, loss is 0.079935536\n", - "epoch: 7 step: 234, loss is 0.29199448\n", - "epoch: 7 step: 312, loss is 0.1512347\n", - "epoch: 7 step: 390, loss is 0.3185295\n", - "Epoch time: 27512.058, per step time: 70.544\n", - "epoch: 8 step: 78, loss is 0.22663717\n", - "epoch: 8 step: 156, loss is 0.21799277\n", - "epoch: 8 step: 234, loss is 0.13152371\n", - "epoch: 8 step: 312, loss is 0.168206\n", - "epoch: 8 step: 390, loss is 0.1784227\n", - "Epoch time: 27545.180, per step time: 70.629\n", - "epoch: 9 step: 78, loss is 0.27715153\n", - "epoch: 9 step: 156, loss is 0.085485235\n", - "epoch: 9 step: 234, loss is 0.35549596\n", - "epoch: 9 step: 312, loss is 0.1265975\n", - "epoch: 9 step: 390, loss is 0.081303015\n", - "Epoch time: 27582.971, per step time: 70.726\n", - "epoch: 10 step: 78, loss is 0.19696395\n", - "epoch: 10 step: 156, loss is 0.03179455\n", - "epoch: 10 step: 234, loss is 0.11651886\n", - "epoch: 10 step: 312, loss is 0.050257515\n", - "epoch: 10 step: 390, loss is 0.025655827\n", - "Epoch time: 27546.935, per step time: 70.633\n", - "============== Training Success ==============\n" - ] - } - ], - "source": [ - "from mindspore import Model\n", - "from mindspore.train.callback import CheckpointConfig, ModelCheckpoint, TimeMonitor, LossMonitor\n", - "from mindspore.nn import Accuracy\n", - "from mindspore import nn\n", - "\n", - "os.system(\"rm -f {0}/*.ckpt {0}/*.meta\".format(args.ckpt_path))\n", - "loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')\n", - "opt = nn.Momentum(network.trainable_params(), cfg.learning_rate, cfg.momentum)\n", - "model = Model(network, loss, opt, {'acc': Accuracy()})\n", - "loss_cb = LossMonitor(per_print_times=78)\n", - "print(\"============== Starting Training ==============\")\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=cfg.save_checkpoint_steps,\n", - " keep_checkpoint_max=cfg.keep_checkpoint_max)\n", - "ckpoint_cb = ModelCheckpoint(prefix=\"lstm\", directory=args.ckpt_path, config=config_ck)\n", - "time_cb = TimeMonitor(data_size=ds_train.get_dataset_size())\n", - "if args.device_target == \"CPU\":\n", - " model.train(cfg.num_epochs, ds_train, callbacks=[time_cb, ckpoint_cb, loss_cb], dataset_sink_mode=False)\n", - "else:\n", - " model.train(cfg.num_epochs, ds_train, callbacks=[time_cb, ckpoint_cb, loss_cb])\n", - "print(\"============== Training Success ==============\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 模型验证\n", - "\n", - "创建并加载验证数据集(`ds_eval`),加载由**训练**保存的CheckPoint文件,进行验证,查看模型质量,此步骤用时约30秒。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Testing ==============\n", - "============== {'acc': 0.8476362179487179} ==============\n" - ] - } - ], - "source": [ - "from mindspore import load_checkpoint, load_param_into_net\n", - "args.ckpt_path_saved = f'{args.ckpt_path}/lstm-{cfg.num_epochs}_390.ckpt'\n", - "print(\"============== Starting Testing ==============\")\n", - "ds_eval = lstm_create_dataset(args.preprocess_path, cfg.batch_size, training=False)\n", - "param_dict = load_checkpoint(args.ckpt_path_saved)\n", - "load_param_into_net(network, param_dict)\n", - "if args.device_target == \"CPU\":\n", - " acc = model.eval(ds_eval, dataset_sink_mode=False)\n", - "else:\n", - " acc = model.eval(ds_eval)\n", - "print(\"============== {} ==============\".format(acc))\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 训练结果评价\n", - "\n", - "根据以上一段代码的输出可以看到,在经历了10轮epoch之后,使用验证的数据集,对文本的情感分析正确率在85%左右,达到一个基本满意的结果。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结\n", - "\n", - "以上便完成了MindSpore自然语言处理应用的体验,我们通过本次体验全面了解了如何使用MindSpore进行自然语言中处理情感分类问题,理解了如何通过定义和初始化基于LSTM的`SentimentNet`网络进行训练模型及验证正确率。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.0.1", - "language": "python", - "name": "mindspore-1.0.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} \ No newline at end of file diff --git a/tutorials/notebook/mindspore_save_model.ipynb b/tutorials/notebook/mindspore_save_model.ipynb deleted file mode 100644 index e78442650541432dfe5193303ae72eef65646a0e..0000000000000000000000000000000000000000 --- a/tutorials/notebook/mindspore_save_model.ipynb +++ /dev/null @@ -1,765 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#
    保存模型
    " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "在模型训练过程中,可以添加检查点(CheckPoint)用于保存模型的参数,以便进行推理及再训练使用。如果想继续在不同硬件平台上做推理,可通过网络和CheckPoint格式文件生成对应的MindIR、AIR和ONNX格式文件。\n", - "\n", - "- MindIR:MindSpore的一种基于图表示的函数式IR,其最核心的目的是服务于自动微分变换,目前可用于MindSpore Lite端侧推理。\n", - "\n", - "- CheckPoint:MindSpore的存储了所有训练参数值的二进制文件。采用了Google的Protocol Buffers机制,与开发语言、平台无关,具有良好的可扩展性。CheckPoint的protocol格式定义`在mindspore/ccsrc/utils/checkpoint.proto`中。\n", - "\n", - "- AIR:全称Ascend Intermediate Representation,类似ONNX,是华为定义的针对机器学习所设计的开放式的文件格式,能更好地适配Ascend AI处理器。\n", - "\n", - "- ONNX:全称Open Neural Network Exchange,是一种针对机器学习所设计的开放式的文件格式,用于存储训练好的模型。\n", - "\n", - "以下通过图片分类应用示例来介绍保存CheckPoint格式文件和导出MindIR、AIR和ONNX格式文件的方法。\n", - "\n", - "> 本文档适用于CPU、GPU和Ascend AI处理器环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "说明:
    在保存和转换模型前,我们需要完整进行图片分类训练,包含数据准备、定义网络、定义损失函数及优化器和训练网络,详细信息可以参考:https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/quick_start/quick_start.ipynb" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "整体流程如下:\n", - "\n", - "1. 数据准备\n", - "\n", - "2. 构造神经网络\n", - "\n", - "3. 搭建训练网络、定义损失函数及优化器\n", - "\n", - "4. 保存CheckPoint格式文件\n", - "\n", - "5. 导出不同格式文件\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据准备" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 下载MNIST数据集" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "运行以下命令来获取数据集:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据处理:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "数据集对于训练非常重要,好的数据集可以有效提高训练精度和效率,在加载数据集前,我们通常会对数据集进行一些处理。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### 定义数据集及数据操作\n", - "我们定义一个函数`create_dataset`来创建数据集。在这个函数中,我们定义好需要进行的数据增强和处理操作:\n", - "1. 定义数据集。\n", - "2. 定义进行数据增强和处理所需要的一些参数。\n", - "3. 根据参数,生成对应的数据增强操作。\n", - "4. 使用`map`映射函数,将数据操作应用到数据集。\n", - "5. 对生成的数据集进行处理。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "ExecuteTime": { - "end_time": "2020-09-04T06:46:31.263831Z", - "start_time": "2020-09-04T06:46:31.242077Z" - } - }, - "outputs": [], - "source": [ - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore import dtype as mstype\n", - "import matplotlib\n", - "import mindspore.dataset as ds\n", - "\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\" \n", - " create dataset for train or test\n", - " \n", - " Args:\n", - " data_path (str): Data path\n", - " batch_size (int): The number of data records in each group\n", - " repeat_size (int): The number of replicated data records\n", - " num_parallel_workers (int): The number of parallel workers\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " # define some parameters needed for data enhancement and rough justification\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # according to the parameters, generate the corresponding data enhancement method\n", - " resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR)\n", - " rescale_nml_op = CV.Rescale(rescale_nml, shift_nml)\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # using map to apply operations to a dataset\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=resize_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_nml_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=hwc2chw_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " \n", - " # process the generated dataset\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size)\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 构造神经网络" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在对手写字体识别上,通常采用卷积神经网络架构(CNN)进行学习预测,最经典的属1998年由Yann LeCun创建的LeNet5架构,在构建LeNet5前,我们需要对全连接层以及卷积层进行初始化。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "ExecuteTime": { - "end_time": "2020-09-04T06:46:32.448830Z", - "start_time": "2020-09-04T06:46:31.265357Z" - } - }, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " # define the operator required\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " # use the preceding operators to construct networks\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x) \n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 搭建训练网络" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "构建完成神经网络后,就可以着手进行训练网络的构建,包括定义损失函数及优化器。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "ExecuteTime": { - "end_time": "2020-09-04T06:46:57.649137Z", - "start_time": "2020-09-04T06:46:33.811666Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "========== The Training Model is Defined. ==========\n" - ] - } - ], - "source": [ - "import os\n", - "from mindspore.nn import SoftmaxCrossEntropyWithLogits\n", - "from mindspore.nn import Accuracy\n", - "from mindspore import context, Model\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"CPU\")\n", - "\n", - "lr = 0.01\n", - "momentum = 0.9 \n", - "\n", - "# create the network\n", - "network = LeNet5()\n", - "\n", - "# define the optimizer\n", - "net_opt = nn.Momentum(network.trainable_params(), lr, momentum)\n", - "\n", - "# define the loss function\n", - "net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')\n", - "\n", - "# define the model\n", - "model = Model(network, net_loss, net_opt, metrics={\"Accuracy\": Accuracy()} )\n", - "\n", - "epoch_size = 1\n", - "mnist_path = \"./datasets/MNIST_Data\"\n", - "\n", - "eval_dataset = create_dataset(\"./datasets/MNIST_Data/test\")\n", - "\n", - "repeat_size = 1\n", - "print(\"========== The Training Model is Defined. ==========\")\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 保存CheckPoint格式文件" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在模型训练的过程中,使用Callback机制传入回调函数`ModelCheckpoint`对象,可以保存模型参数,生成CheckPoint文件。\n", - "\n", - "通过`CheckpointConfig`对象可以设置CheckPoint的保存策略。保存的参数分为网络参数和优化器参数。\n", - "\n", - "`ModelCheckpoint`提供默认配置策略,方便用户快速上手,用户可以根据具体需求对`CheckPoint`策略进行配置。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 单次运行训练脚本保存模型" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在这里配置`CheckPoint`时,设置的是每隔375个steps就保存一次,最多保留10个CheckPoint文件,生成前缀名为“lenet”,具体用法如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "========== The Training is Starting. ==========\n", - "========== The Training is Completed and the Checkpoint Files are Saved. ==========\n" - ] - } - ], - "source": [ - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig\n", - "\n", - "model_path = './models/ckpt/mindspore_save_model/'\n", - "# clean up old run files before in Linux\n", - "os.system('rm -f {}*.ckpt {}*.meta {}*.pb'.format(model_path, model_path, model_path))\n", - "\n", - "# define config_ck for specifying the steps to save the checkpoint and the maximum file numbers\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=375, keep_checkpoint_max=10)\n", - "# define ckpoint_cb for specifying the prefix of the file and the saving directory\n", - "ckpoint_cb = ModelCheckpoint(prefix='lenet', directory=model_path, config=config_ck)\n", - "#load the training dataset\n", - "ds_train = create_dataset(os.path.join(mnist_path, \"train\"), 32, repeat_size)\n", - "print(\"========== The Training is Starting. ==========\")\n", - "model.train(epoch_size, ds_train, callbacks=ckpoint_cb, dataset_sink_mode=False)\n", - "print(\"========== The Training is Completed and the Checkpoint Files are Saved. ==========\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "上述代码中,首先需要初始化一个CheckpointConfig类对象,用来设置保存策略。\n", - "\n", - "- `save_checkpoint_steps`表示每隔多少个step保存一次。\n", - "- `keep_checkpoint_max`表示最多保留CheckPoint文件的数量。\n", - "- `prefix`表示生成CheckPoint文件的前缀名。\n", - "- `directory`表示存放文件的目录。\n", - "- `epoch_size`表示每个epoch需要遍历完成图片的batch数。\n", - "- `ds_train`表示数据集。\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "创建一个ModelCheckpoint对象把它传递给`model.train`方法,就可以在训练过程中使用CheckPoint功能了。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "生成的CheckPoint文件如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./models/ckpt/mindspore_save_model\n", - "├── lenet-1_1125.ckpt\n", - "├── lenet-1_1500.ckpt\n", - "├── lenet-1_1875.ckpt\n", - "├── lenet-1_375.ckpt\n", - "├── lenet-1_750.ckpt\n", - "└── lenet-graph.meta\n", - "\n", - "0 directories, 6 files\n" - ] - } - ], - "source": [ - "! tree ./models/ckpt/mindspore_save_model" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "其中:\n", - "- `lenet-graph.meta`为编译后的计算图。\n", - "- CheckPoint文件后缀名为`.ckpt`,文件的命名方式表示保存参数所在的epoch和step数。\n", - "- `lenet-1_750.ckpt`表示保存的是第1个epoch的第750个step的模型参数。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 多次运行训练脚本保存模型" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "如果用户使用相同的前缀名,运行多次训练脚本,可能会生成同名CheckPoint文件。MindSpore为方便用户区分每次生成的文件,会在用户定义的前缀后添加_和数字加以区分。如下所示:" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "========== The Training is Starting. ==========\n", - "========== The Training is Completed and the Checkpoint Files are Saved. ==========\n" - ] - } - ], - "source": [ - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig\n", - "import os\n", - "\n", - "# clean up old run files before in Linux\n", - "os.system('rm -f {}lenet_2*.ckpt'.format(model_path))\n", - "\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=375, keep_checkpoint_max=10)\n", - "# Specify that here the script is executed for the second time\n", - "ckpoint_cb = ModelCheckpoint(prefix='lenet_2', directory='./models/ckpt/mindspore_save_model', config=config_ck)\n", - "ds_train = create_dataset(os.path.join(mnist_path, \"train\"), 32, repeat_size)\n", - "print(\"========== The Training is Starting. ==========\")\n", - "model.train(epoch_size, ds_train, callbacks=ckpoint_cb,dataset_sink_mode=False)\n", - "print(\"========== The Training is Completed and the Checkpoint Files are Saved. ==========\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "生成的CheckPoint文件(以`lenet-2`为前缀的`.ckpt`文件)如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./models/ckpt/mindspore_save_model\n", - "├── lenet-1_1125.ckpt\n", - "├── lenet-1_1500.ckpt\n", - "├── lenet-1_1875.ckpt\n", - "├── lenet-1_375.ckpt\n", - "├── lenet-1_750.ckpt\n", - "├── lenet_2-1_1125.ckpt\n", - "├── lenet_2-1_1500.ckpt\n", - "├── lenet_2-1_1875.ckpt\n", - "├── lenet_2-1_375.ckpt\n", - "├── lenet_2-1_750.ckpt\n", - "├── lenet_2-graph.meta\n", - "└── lenet-graph.meta\n", - "\n", - "0 directories, 12 files\n" - ] - } - ], - "source": [ - "! tree ./models/ckpt/mindspore_save_model" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "lenet_2-1_750.ckpt 表示本次运行脚本生成的第1个epoch的第750个step的CheckPoint文件。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 配置时间策略保存模型" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore提供了两种保存CheckPoint策略:迭代策略和时间策略,上述代码为迭代策略。我们可以通过创建`CheckpointConfig`对象设置相应策略,CheckpointConfig中共有四个参数可以设置:\n", - "\n", - "- `save_checkpoint_steps`:表示每隔多少个step保存一个CheckPoint文件,默认值为1。\n", - "\n", - "- `save_checkpoint_seconds`:表示每隔多少秒保存一个CheckPoint文件,默认值为0。\n", - "\n", - "- `keep_checkpoint_max`:表示最多保存多少个CheckPoint文件,默认值为5。\n", - "\n", - "- `keep_checkpoint_per_n_minutes`:表示每隔多少分钟保留一个CheckPoint文件,默认值为0。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "因为时间策略需要训练时间稍长一些,所以这里把`epoch_size`改为10。\n", - "\n", - "以下代码为时间策略:" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "========== The Training is Starting. ==========\n", - "========== The Training is Completed and the Checkpoint Files are Saved. ==========\n" - ] - } - ], - "source": [ - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig\n", - "import os\n", - "\n", - "os.system('rm -f {}lenet_3*.ckpt'.format(model_path))\n", - "# define config_ck for specifying the seconds to save the checkpoint and the maximum file numbers\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=None, save_checkpoint_seconds=10, keep_checkpoint_max=None, keep_checkpoint_per_n_minutes=1)\n", - "# define ckpoint_cb for specifying the prefix of the file and the saving directory\n", - "ckpoint_cb = ModelCheckpoint(prefix='lenet_3', directory='./models/ckpt/mindspore_save_model', config=config_ck)\n", - "#load the training dataset\n", - "epoch_size = 2\n", - "ds_train = create_dataset(os.path.join(mnist_path, \"train\"), 32, repeat_size)\n", - "print(\"========== The Training is Starting. ==========\")\n", - "model.train(epoch_size, ds_train, callbacks=ckpoint_cb,dataset_sink_mode=False)\n", - "print(\"========== The Training is Completed and the Checkpoint Files are Saved. ==========\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "这里`save_checkpoint_seconds`和`keep_checkpoint_per_n_minutes`这两个分别设置为10和1。
    \n", - "意思是每10秒会保存一个CheckPoint文件,每隔1分钟会保留一个CheckPoint文件。假设训练持续了1分钟,那总共会生成7个CheckPoint文件,但是当训练结束后,实际上会看到4个CheckPoint文件(以`lenet-3`为前缀的`.ckpt`文件),即保存下来的3个文件和默认保存最后一个step的CheckPoint文件。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "生成的CheckPoint文件如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./models/ckpt/mindspore_save_model\n", - "├── lenet-1_1125.ckpt\n", - "├── lenet-1_1500.ckpt\n", - "├── lenet-1_1875.ckpt\n", - "├── lenet-1_375.ckpt\n", - "├── lenet-1_750.ckpt\n", - "├── lenet_2-1_1125.ckpt\n", - "├── lenet_2-1_1500.ckpt\n", - "├── lenet_2-1_1875.ckpt\n", - "├── lenet_2-1_375.ckpt\n", - "├── lenet_2-1_750.ckpt\n", - "├── lenet_2-graph.meta\n", - "├── lenet_3-1_1023.ckpt\n", - "├── lenet_3-2_1254.ckpt\n", - "├── lenet_3-2_1875.ckpt\n", - "├── lenet_3-2_194.ckpt\n", - "├── lenet_3-graph.meta\n", - "└── lenet-graph.meta\n", - "\n", - "0 directories, 17 files\n" - ] - } - ], - "source": [ - "! tree ./models/ckpt/mindspore_save_model" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "另请注意,如果想要删除.ckpt文件时,请同步删除.meta 文件。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 导出MindIR格式文件" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "当有了CheckPoint文件后,如果想继续在MindSpore Lite端侧做推理,需要通过网络和CheckPoint生成对应的MindIR格式模型文件。当前支持基于静态图,且不包含控制流语义的推理网络导出。建议使用`.mindir`作为MINDIR格式文件的后缀名。导出该格式文件的代码如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore import export, load_checkpoint, load_param_into_net\n", - "from mindspore import Tensor\n", - "import numpy as np\n", - "\n", - "lenet = LeNet5()\n", - "# return a parameter dict for model\n", - "param_dict = load_checkpoint(\"./models/ckpt/mindspore_save_model/lenet-1_1875.ckpt\")\n", - "# load the parameter into net\n", - "load_param_into_net(lenet, param_dict)\n", - "input = np.random.uniform(0.0, 1.0, size=[32, 1, 32, 32]).astype(np.float32)\n", - "# export the file with the specified name and format\n", - "export(lenet, Tensor(input), file_name='lenet-1_1875', file_format='MINDIR',)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "完成以后,在当前目录下会生成一个MindIR格式文件,文件名为:`lenet-1_1875.mindir`。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 导出ONNX格式文件" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "当有了CheckPoint文件后,如果想继续在Ascend AI处理器、GPU、CPU等多种硬件上做推理,需要通过网络和CheckPoint生成对应的ONNX格式模型文件,建议使用`.onnx`作为ONNX格式文件的后缀名。导出该格式文件的代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.train.serialization import export, load_checkpoint, load_param_into_net\n", - "from mindspore import Tensor\n", - "import numpy as np\n", - "lenet = LeNet5()\n", - "# return a parameter dict for model\n", - "param_dict = load_checkpoint(\"./models/ckpt/mindspore_save_model/lenet-1_1875.ckpt\")\n", - "# load the parameter into net\n", - "load_param_into_net(lenet, param_dict)\n", - "input = np.random.uniform(0.0, 1.0, size=[32, 1, 32, 32]).astype(np.float32)\n", - "# export the file with the specified name and format\n", - "export(lenet, Tensor(input), file_name='lenet-1_1875', file_format='ONNX')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "完成以后,在当前目录下会生成一个ONNX格式文件,文件名为:`lenet-1_1875.onnx`。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "以上就是保存模型并导出文件的全部体验过程,我们通过本次体验全面了解了训练模型的保存以及如何导出成为不同格式的文件,以便用于不同平台上的推理。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.0.1", - "language": "python", - "name": "mindspore-1.0.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/model_encrypt_protection.ipynb b/tutorials/notebook/model_encrypt_protection.ipynb deleted file mode 100644 index cc9708c4c4c419d52b0fa9c3506e63f2de27670c..0000000000000000000000000000000000000000 --- a/tutorials/notebook/model_encrypt_protection.ipynb +++ /dev/null @@ -1,394 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#
    模型加密保护
    " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "MindSpore框架提供通过加密对模型文件进行保护的功能,使用对称加密算法对参数文件或推理模型进行加密,使用时直接加载密文模型完成推理或增量训练。\n", - "目前加密方案支持在Linux平台下对CheckPoint参数文件的保护。\n", - "\n", - "以下通过图片分类应用示例来介绍保存CheckPoint格式文件和导出MindIR、AIR和ONNX格式文件的方法。\n", - "> 本文档适用于Linux平台下CPU、GPU和Ascend AI处理器环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "说明:
    在保存和转换模型前,我们需要完整进行图片分类训练,包含数据准备、定义网络、定义损失函数及优化器和训练网络,详细信息可以参考:https://gitee.com/mindspore/docs/blob/r1.2/tutorials/training/source_zh_cn/quick_start/quick_start.ipynb" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "整体流程如下:\n", - "\n", - "1. 数据准备\n", - "\n", - "2. 构造神经网络\n", - "\n", - "3. 搭建训练网络、定义损失函数及优化器\n", - "\n", - "4. 加密导出CheckPoint格式文件\n", - "\n", - "5. 加载密文CheckPoint格式文件" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据准备" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 下载MNIST数据集\n", - "运行以下命令来获取数据集:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[01;34m./datasets/MNIST_Data\u001b[00m\n", - "├── \u001b[01;34mtest\u001b[00m\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── \u001b[01;34mtrain\u001b[00m\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 数据处理:\n", - "数据集对于训练非常重要,好的数据集可以有效提高训练精度和效率,在加载数据集前,我们通常会对数据集进行一些处理。\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### 定义数据集及数据操作\n", - "我们定义一个函数`create_dataset`来创建数据集。在这个函数中,我们定义好需要进行的数据增强和处理操作:\n", - "1. 定义数据集。\n", - "2. 定义进行数据增强和处理所需要的一些参数。\n", - "3. 根据参数,生成对应的数据增强操作。\n", - "4. 使用`map`映射函数,将数据操作应用到数据集。\n", - "5. 对生成的数据集进行处理。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore import dtype as mstype\n", - "import mindspore.dataset as ds\n", - "\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\" \n", - " create dataset for train or test\n", - " \n", - " Args:\n", - " data_path (str): Data path\n", - " batch_size (int): The number of data records in each group\n", - " repeat_size (int): The number of replicated data records\n", - " num_parallel_workers (int): The number of parallel workers\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " # define some parameters needed for data enhancement and rough justification\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # according to the parameters, generate the corresponding data enhancement method\n", - " resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR)\n", - " rescale_nml_op = CV.Rescale(rescale_nml, shift_nml)\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # using map to apply operations to a dataset\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=resize_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_nml_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=hwc2chw_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " \n", - " # process the generated dataset\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size)\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 构造神经网络\n", - "在对手写字体识别上,通常采用卷积神经网络架构(CNN)进行学习预测,最经典的属1998年由Yann LeCun创建的LeNet5架构,在构建LeNet5前,我们需要对全连接层以及卷积层进行初始化。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " # define the operator required\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " # use the preceding operators to construct networks\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x) \n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 搭建训练网络\n", - "构建完成神经网络后,就可以着手进行训练网络的构建,包括定义损失函数及优化器。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "========== The Training Model is Defined. ==========\n" - ] - } - ], - "source": [ - "import os\n", - "from mindspore.nn import SoftmaxCrossEntropyWithLogits\n", - "from mindspore.nn import Accuracy\n", - "from mindspore import context, Model\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"CPU\")\n", - "\n", - "lr = 0.01\n", - "momentum = 0.9 \n", - "\n", - "# create the network\n", - "network = LeNet5()\n", - "\n", - "# define the optimizer\n", - "net_opt = nn.Momentum(network.trainable_params(), lr, momentum)\n", - "\n", - "# define the loss function\n", - "net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')\n", - "\n", - "# define the model\n", - "model = Model(network, net_loss, net_opt, metrics={\"Accuracy\": Accuracy()} )\n", - "\n", - "epoch_size = 1\n", - "mnist_path = \"./datasets/MNIST_Data\"\n", - "\n", - "train_dataset = create_dataset(\"./datasets/MNIST_Data/train\")\n", - "eval_dataset = create_dataset(\"./datasets/MNIST_Data/test\")\n", - "\n", - "repeat_size = 1\n", - "print(\"========== The Training Model is Defined. ==========\")\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 安全导出CheckPoint文件\n", - "目前MindSpore支持使用Callback机制传入回调函数`ModelCheckpoint`对象以保存模型参数,用户可以通过配置`CheckpointConfig`对象来启用参数文件的加密保护。下面示例展示了训练过程中每32个steps导出一次加密CheckPoint文件的过程:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 1875, loss is 0.04268155\n", - "epoch: 2 step: 1875, loss is 0.33143944\n", - "epoch: 3 step: 1875, loss is 0.13743193\n", - "epoch: 4 step: 1875, loss is 0.062083688\n", - "epoch: 5 step: 1875, loss is 0.090901025\n", - "epoch: 6 step: 1875, loss is 0.02297108\n", - "epoch: 7 step: 1875, loss is 0.07450344\n", - "epoch: 8 step: 1875, loss is 0.00035639966\n", - "epoch: 9 step: 1875, loss is 0.015088511\n", - "epoch: 10 step: 1875, loss is 0.0009289649\n" - ] - }, - { - "data": { - "text/plain": [ - "{'Accuracy': 0.9861778846153846}" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from mindspore.train.callback import CheckpointConfig, ModelCheckpoint, LossMonitor\n", - "\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=1875, keep_checkpoint_max=10, enc_key=b'0123456789ABCDEF', enc_mode='AES-GCM')\n", - "ckpoint_cb = ModelCheckpoint(prefix='lenet_enc', directory=None, config=config_ck)\n", - "model.train(10, train_dataset, dataset_sink_mode=False, callbacks=[ckpoint_cb, LossMonitor(1875)])\n", - "model.eval(eval_dataset, dataset_sink_mode=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "除了上面这种保存模型参数的方法,还可以调用`save_checkpoint`接口来随时导出模型参数,使用方法如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore import save_checkpoint\n", - "\n", - "save_checkpoint(network, 'lenet_enc.ckpt', enc_key=b'0123456789ABCDEF', enc_mode='AES-GCM')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 加载密文CheckPoint文件\n", - "MindSpore提供`load_checkpoint`和`load_distributed_checkpoint`分别用于单文件和分布式场景下加载CheckPoint参数文件。以单文件场景为例,可以用如下方式加载密文CheckPoint文件:" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'Accuracy': 0.9861778846153846}" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from mindspore import load_checkpoint, load_param_into_net\n", - "\n", - "param_dict = load_checkpoint('lenet_enc-10_1875.ckpt', dec_key=b'0123456789ABCDEF', dec_mode='AES-GCM')\n", - "load_param_into_net(network, param_dict)\n", - "model.eval(eval_dataset, dataset_sink_mode=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "可以看到密文CheckPoint文件已被正确加载。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tutorials/notebook/programming_guide/mindspore_optim.ipynb b/tutorials/notebook/programming_guide/mindspore_optim.ipynb deleted file mode 100644 index 09b55a59deca5d5ce4b32699809f850d4a8b8140..0000000000000000000000000000000000000000 --- a/tutorials/notebook/programming_guide/mindspore_optim.ipynb +++ /dev/null @@ -1,264 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 优化算法" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "`mindspore.nn.optim`是MindSpore框架中实现各种优化算法的模块,包含常用的优化器、学习率等,并且接口具备足够的通用性,可以将以后更新、更复杂的方法集成到模块里。\n", - "\n", - "`mindspore.nn.optim`为模型提供常用的优化器,如`SGD`、`ADAM`、`Momentum`。优化器用于计算和更新梯度,模型优化算法的选择直接关系到最终模型的性能,如果有时候效果不好,未必是特征或者模型设计的问题,很有可能是优化算法的问题;同时还有`mindspore.nn`提供的学习率的模块,学习率分为`dynamic_lr`和`learning_rate_schedule`,都是动态学习率,但是实现方式不同,学习率是监督学习以及深度学习中最为重要的参数,其决定着目标函数是否能收敛到局部最小值以及何时能收敛到最小值。合适的学习率能够使目标函数在合适的的时间内收敛到局部最小值。\n", - "\n", - "> 本文档适用于CPU、GPU和Ascend环境。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 学习率\n", - "\n", - "### dynamic_lr\n", - "\n", - "`mindspore.nn.dynamic_lr`模块有以下几个类:\n", - "\n", - "- `piecewise_constant_lr`类:基于得到分段不变的学习速率。\n", - "- `exponential_decay_lr`类:基于指数衰减函数计算学习率。\n", - "- `natural_exp_decay_lr`类:基于自然指数衰减函数计算学习率。\n", - "- `inverse_decay_lr`类:基于反时间衰减函数计算学习速率。\n", - "- `cosine_decay_lr`类:基于余弦衰减函数计算学习率。\n", - "- `polynomial_decay_lr`类:基于多项式衰减函数计算学习率。\n", - "- `warmup_lr`类:提高学习率。\n", - "\n", - "它们是属于`dynamic_lr`的不同实现方式。\n", - "\n", - "例如`piecewise_constant_lr`类代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0.1, 0.1, 0.05, 0.05, 0.05, 0.01, 0.01, 0.01, 0.01, 0.01]\n" - ] - } - ], - "source": [ - "from mindspore.nn.dynamic_lr import piecewise_constant_lr\n", - "\n", - "def test_dynamic_lr():\n", - " milestone = [2, 5, 10]\n", - " learning_rates = [0.1, 0.05, 0.01]\n", - " lr = piecewise_constant_lr(milestone, learning_rates)\n", - " print(lr)\n", - "\n", - "\n", - "if __name__ == '__main__':\n", - " test_dynamic_lr()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### learning_rate_schedule\n", - "\n", - "`mindspore.nn.learning_rate_schedule`模块下有以下几个类:`ExponentialDecayLR`类、`NaturalExpDecayLR`类、`InverseDecayLR`类、`CosineDecayLR`类、`PolynomialDecayLR`类和`WarmUpLR`类。它们都属于`learning_rate_schedule`,只是实现方式不同,各自含义如下:\n", - "\n", - "- `ExponentialDecayLR`类:基于指数衰减函数计算学习率。\n", - "- `NaturalExpDecayLR`类:基于自然指数衰减函数计算学习率。\n", - "- `InverseDecayLR`类:基于反时间衰减函数计算学习速率。\n", - "- `CosineDecayLR`类:基于余弦衰减函数计算学习率。\n", - "- `PolynomialDecayLR`类:基于多项式衰减函数计算学习率。\n", - "- `WarmUpLR`类:提高学习率。\n", - "\n", - "它们是属于`learning_rate_schedule`的不同实现方式。\n", - "\n", - "例如ExponentialDecayLR类代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.094868325\n" - ] - } - ], - "source": [ - "from mindspore.common import dtype as mstype\n", - "from mindspore import Tensor\n", - "from mindspore.nn.learning_rate_schedule import ExponentialDecayLR\n", - "\n", - "def test_learning_rate_schedule():\n", - " learning_rate = 0.1 # learning_rate(float) - The initial value of learning rate.\n", - " decay_rate = 0.9 # decay_rate(float) - The decay rate.\n", - " decay_steps = 4 # decay_steps(int) - A value used to calculate decayed learning rate.\n", - " global_step = Tensor(2, mstype.int32)\n", - " exponential_decay_lr = ExponentialDecayLR(learning_rate, decay_rate, decay_steps)\n", - " res = exponential_decay_lr(global_step)\n", - " print(res)\n", - "\n", - "\n", - "if __name__ == '__main__':\n", - " test_learning_rate_schedule()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Optimzer\n", - "\n", - "### 如何使用\n", - "\n", - "为了使用`mindspore.nn.optim`,我们需要构建一个`Optimizer`对象。这个对象能够保持当前参数状态并基于计算得到的梯度进行参数更新。\n", - "\n", - "- 构建\n", - "\n", - "为了构建一个`Optimizer`,我们需要给它一个包含可需要优化的参数(必须是Variable对象)的iterable。然后,你可以设置Optimizer的参数选项,比如学习率,权重衰减等等。\n", - "\n", - "代码样例如下:\n", - "```python\n", - "from mindspore import nn\n", - "\n", - "optim = nn.SGD(group_params, learning_rate=0.1, weight_decay=0.0)\n", - "optim = nn.Adam(params=net.trainable_params())\n", - "\n", - "optim = nn.Adam(group_params, learning_rate=0.1, weight_decay=0.0)\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 为每一个参数单独设置选项\n", - "\n", - "优化器也支持为每个参数单独设置选项。若想这么做,不要直接传入变量Variable,而是传入一个字典的iterable。每一个字典都分别定义了一组参数,并且包含一个key键,这个key键对应相应的参数value值。其他的key键应该是优化器所接受的其他参数,并且会被用于对这组参数的优化。\n", - "\n", - "我们仍然能够传递选项作为关键字参数,在未重写这些选项的组中,它们会被用作默认值。当你只想改动一个参数组的选项,但其他参数组的选项不变时,这是非常有用的。\n", - "例如,当我们想制定每一层的学习率时,以`SGD`为例:\n", - "```python\n", - "from mindspore import nn\n", - "\n", - "optim = nn.SGD([{'params': conv_params, 'weight_decay': 0.01},\n", - " {'params': no_conv_params, 'lr': 0.01},\n", - " {'order_params': net.trainable_params()}],\n", - " learning_rate=0.1, weight_decay=0.0)\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "这段示例意味着当参数是conv_params时候,权重衰减使用的是0.01,学习率使用的是0.1;而参数是no_conv_params时候,权重衰减使用的是0.0,学习率使用的是0.01。这个学习率learning_rate=0.1会被用于所有分组里没有设置学习率的参数,权重衰减weight_deca也是如此。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 内置优化器\n", - "\n", - "深度学习优化算法大概常用的有`SGD`、`Adam`、`Ftrl`、`lazyadam`、`Momentum`、`RMSprop`、`Lars`、`Proximal_ada_grad`和`lamb`这几种。\n", - "在`mindspore.nn.optim`模块中,他们都有对应的类实现。例如:\n", - "\n", - "- `SGD`,默认参数为纯SGD,设置`momentum`参数不为0,考虑了一阶动量,设置`nesterov`为True后变成`NAG`,即`Nesterov Accelerated Gradient`,在计算梯度时计算的是向前走一步所在位置的梯度。\n", - "\n", - "- `RMSprop`,考虑了二阶动量,对于不同的参数有不同的学习率,即自适应学习率,对`Adagrad`进行了优化,通过指数平滑只考虑一定窗口内的二阶动量。\n", - "\n", - "- `Adam`,同时考虑了一阶动量和二阶动量,可以看成`RMSprop`上进一步考虑了一阶动量。\n", - "\n", - "例如`SGD`的代码样例如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[Parameter (name=z), Parameter (name=conv.weight)]\n" - ] - } - ], - "source": [ - "from mindspore import nn, Model, Tensor\n", - "import mindspore.ops as ops\n", - "import numpy as np\n", - "import mindspore.common.dtype as mstype\n", - "from mindspore.common.parameter import Parameter\n", - "\n", - "class Net(nn.Cell):\n", - " def __init__(self):\n", - " super(Net, self).__init__()\n", - " self.matmul = ops.MatMul()\n", - " self.conv = nn.Conv2d(1, 6, 5, pad_mode=\"valid\")\n", - " self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z')\n", - " \n", - " def construct(self, x, y):\n", - " x = x * self.z\n", - " out = self.matmul(x, y)\n", - " return out\n", - "\n", - "net = Net()\n", - "optim = nn.SGD(params=net.trainable_params())\n", - "\n", - "print(net.trainable_params())\n", - "conv_params = list(filter(lambda x: 'conv' in x.name, net.trainable_params()))\n", - "no_conv_params = list(filter(lambda x: 'conv' not in x.name, net.trainable_params()))\n", - "group_params = [{'params': conv_params, 'weight_decay': 0.01},\n", - " {'params': no_conv_params, 'lr': 0.01},\n", - " {'order_params': net.trainable_params()}]\n", - "optim = nn.SGD(group_params, learning_rate=0.1, weight_decay=0.0)\n", - "\n", - "loss = nn.SoftmaxCrossEntropyWithLogits()\n", - "model = Model(net, loss_fn=loss, optimizer=optim)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.0.1", - "language": "python", - "name": "mindspore-1.0.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/test_model_security_fuzzing.ipynb b/tutorials/notebook/test_model_security_fuzzing.ipynb deleted file mode 100644 index 8fca52e6c9f29ff782177f1cc0e75852244e867a..0000000000000000000000000000000000000000 --- a/tutorials/notebook/test_model_security_fuzzing.ipynb +++ /dev/null @@ -1,496 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用fuzz testing模块测试模型安全性\n", - "作者:MindSpore团队、[徐威](https://gitee.com/chow-chow) \n", - "`Linux` `Windows` `Ascend` `GPU` `CPU` `模型评测` `企业` `高级`" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述 \n", - "传统软件的决策逻辑由代码逻辑决定,传统软件通过代码行覆盖率来判断当前测试是否充分,理想情况下覆盖率越高,代码测试越充分。然而,对于深度神经网络而言,程序的决策逻辑由训练数据、网络模型结构和参数通过某种黑盒机制决定,代码行覆盖率已不足以评估测试的充分性。需要根据深度网络的特点选择更为适合的测试评价准则,指导神经网络进行更为充分的测试,发现更多的边缘错误用例,从而确保模型的通用性、鲁棒性。 \n", - "\n", - "MindArmour的fuzz_testing模块以神经元覆盖率作为测试评价准则。神经元覆盖率,是指通过一组输入观察到的、激活的神经元数量和神经元输出值的范围。我们通过神经元覆盖率来指导输入变异,让输入能够激活更多的神经元,神经元值的分布范围更广,从而探索不同类型的模型输出结果、错误行为。 \n", - "\n", - "这里以LeNet模型,MNIST数据集为例,说明如何使用Fuzzer。\n", - "\n", - ">本例面向CPU、GPU、Ascend 910 AI处理器,样例代码:\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 实现阶段\n", - "### 导入需要的库文件\n", - "下列是我们需要的公共模块、MindSpore相关模块和fuzz_testing特性模块,以及配置日志标签和日志等级。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import numpy as np\n", - "\n", - "from mindspore import dataset as ds\n", - "from mindspore import dtype as mstype\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "import mindspore.nn as nn\n", - "from mindspore.nn import SoftmaxCrossEntropyWithLogits\n", - "from mindspore.common.initializer import TruncatedNormal\n", - "from mindspore import Model, Tensor, context\n", - "from mindspore.train.callback import LossMonitor\n", - "\n", - "from mindarmour.fuzz_testing import Fuzzer\n", - "from mindarmour.fuzz_testing import ModelCoverageMetrics\n", - "from mindarmour.utils.logger import LogUtil\n", - "\n", - "\n", - "LOGGER = LogUtil.get_instance()\n", - "TAG = 'Fuzz_testing'\n", - "LOGGER.set_level('INFO')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 参数配置\n", - "配置必要的信息,包括环境信息、执行的模式。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "context.set_context(mode=context.GRAPH_MODE, device_target=\"GPU\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "详细的接口配置信息,请参见`context.set_context`接口说明。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "### 运用Fuzz Testing\n", - "1. 建立LeNet模型\n", - "- 加载MNIST数据集:利用MindSpore的dataset提供的`MnistDataset`接口加载MNIST数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "# generate dataset for train of test\n", - "def generate_mnist_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1, sparse=True):\n", - " \"\"\"\n", - " create dataset for training or testing\n", - " \"\"\"\n", - " # define dataset\n", - " ds1 = ds.MnistDataset(data_path)\n", - "\n", - " # define operation parameters\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - "\n", - " # define map operations\n", - " resize_op = CV.Resize((resize_height, resize_width),\n", - " interpolation=Inter.LINEAR)\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # apply map operations on images\n", - " if not sparse:\n", - " one_hot_enco = C.OneHot(10)\n", - " ds1 = ds1.map(operations=one_hot_enco, input_columns=\"label\",\n", - " num_parallel_workers=num_parallel_workers)\n", - " type_cast_op = C.TypeCast(mstype.float32)\n", - " ds1 = ds1.map(operations=type_cast_op, input_columns=\"label\",\n", - " num_parallel_workers=num_parallel_workers)\n", - " ds1 = ds1.map(operations=resize_op, input_columns=\"image\",\n", - " num_parallel_workers=num_parallel_workers)\n", - " ds1 = ds1.map(operations=rescale_op, input_columns=\"image\",\n", - " num_parallel_workers=num_parallel_workers)\n", - " ds1 = ds1.map(operations=hwc2chw_op, input_columns=\"image\",\n", - " num_parallel_workers=num_parallel_workers)\n", - "\n", - " # apply DatasetOps\n", - " buffer_size = 10000\n", - " ds1 = ds1.shuffle(buffer_size=buffer_size)\n", - " ds1 = ds1.batch(batch_size, drop_remainder=True)\n", - " ds1 = ds1.repeat(repeat_size)\n", - "\n", - " return ds1" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 定义LeNet模型网络" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "def conv(in_channels, out_channels, kernel_size, stride=1, padding=0):\n", - " weight = weight_variable()\n", - " return nn.Conv2d(in_channels, out_channels,\n", - " kernel_size=kernel_size, stride=stride, padding=padding,\n", - " weight_init=weight, has_bias=False, pad_mode=\"valid\")\n", - "\n", - "\n", - "def fc_with_initialize(input_channels, out_channels):\n", - " weight = weight_variable()\n", - " bias = weight_variable()\n", - " return nn.Dense(input_channels, out_channels, weight, bias)\n", - "\n", - "\n", - "def weight_variable():\n", - " return TruncatedNormal(0.02)\n", - "\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"\n", - " Lenet network\n", - " \"\"\"\n", - " def __init__(self):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = conv(1, 6, 5)\n", - " self.conv2 = conv(6, 16, 5)\n", - " self.fc1 = fc_with_initialize(16*5*5, 120)\n", - " self.fc2 = fc_with_initialize(120, 84)\n", - " self.fc3 = fc_with_initialize(84, 10)\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " def construct(self, x):\n", - " x = self.conv1(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.conv2(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.flatten(x)\n", - " x = self.fc1(x)\n", - " x = self.relu(x)\n", - " x = self.fc2(x)\n", - " x = self.relu(x)\n", - " x = self.fc3(x)\n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 训练LeNet模型。利用上面定义的数据加载函数`generate_mnist_dataset`载入数据。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 1875, loss is 0.5441832\n", - "epoch: 2 step: 1875, loss is 0.18585104\n", - "epoch: 3 step: 1875, loss is 0.27877027\n", - "epoch: 4 step: 1875, loss is 0.01745773\n", - "epoch: 5 step: 1875, loss is 0.025925232\n", - "epoch: 6 step: 1875, loss is 0.033913765\n", - "epoch: 7 step: 1875, loss is 0.06670261\n", - "epoch: 8 step: 1875, loss is 0.0076941913\n", - "epoch: 9 step: 1875, loss is 0.5493405\n", - "epoch: 10 step: 1875, loss is 0.0064236256\n" - ] - } - ], - "source": [ - "# Downloading MNIST datasets from OBS.\n", - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "\n", - "mnist_path = \"./datasets/MNIST_Data/\"\n", - "batch_size = 32\n", - "# train original model\n", - "ds_train = generate_mnist_dataset(os.path.join(mnist_path, \"train\"),\n", - " batch_size=batch_size, repeat_size=1,\n", - " sparse=False)\n", - "\n", - "train_images = []\n", - "for data in ds_train.create_tuple_iterator():\n", - " images = data[0].asnumpy().astype(np.float32)\n", - " train_images.append(images)\n", - "train_images = np.concatenate(train_images, axis=0)\n", - "\n", - "net = LeNet5()\n", - "loss = SoftmaxCrossEntropyWithLogits(sparse=False)\n", - "opt = nn.Momentum(net.trainable_params(), 0.01, 0.09)\n", - "model = Model(net, loss, opt, metrics=None)\n", - "model.train(10, ds_train, callbacks=[LossMonitor(1875)],\n", - " dataset_sink_mode=False)\n", - "\n", - "# get test data\n", - "ds_test = generate_mnist_dataset(os.path.join(mnist_path, \"test\"),\n", - " batch_size=batch_size, repeat_size=1,\n", - " sparse=False)\n", - "inputs = []\n", - "labels = []\n", - "for data in ds_test.create_tuple_iterator():\n", - " inputs.append(data[0].asnumpy().astype(np.float32))\n", - " labels.append(data[1].asnumpy())\n", - "test_inputs = np.concatenate(inputs)\n", - "test_labels = np.concatenate(labels)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. Fuzzer参数配置。 \n", - " 设置数据变异方法及参数。支持同时配置多种方法,目前支持的数据变异方法包含三类:\n", - " - 图像仿射变换方法:Translate、Scale、Shear、Rotate。\n", - " - 基于图像像素值变化的方法: Contrast、Brightness、Blur、Noise。\n", - " - 基于对抗攻击的白盒、黑盒对抗样本生成方法:FGSM、PGD、MDIIM。 \n", - "数据变异方法中一定要包含基于图像像素值变化的方法。\n", - "\n", - "前两种类型的图像变化方法,支持用户自定义配置参数,也支持算法随机选择参数。用户自定义参数配置范围请参考:https://gitee.com/mindspore/mindarmour/blob/master/mindarmour/fuzz_testing/image_transform.py 中对应的类方法。算法随机选择参数,则params设置为'auto_param': [True],参数将在推荐范围内随机生成。\n", - "\n", - "基于对抗攻击方法的参数配置请参考对应的攻击方法类。\n", - "\n", - "下面是变异方法及其参数配置的一个例子:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "mutate_config = [{'method': 'Blur',\n", - " 'params': {'radius': [0.1, 0.2, 0.3],\n", - " 'auto_param': [True, False]}},\n", - " {'method': 'Contrast',\n", - " 'params': {'auto_param': [True]}},\n", - " {'method': 'Translate',\n", - " 'params': {'auto_param': [True]}},\n", - " {'method': 'Brightness',\n", - " 'params': {'auto_param': [True]}},\n", - " {'method': 'Noise',\n", - " 'params': {'auto_param': [True]}},\n", - " {'method': 'Scale',\n", - " 'params': {'auto_param': [True]}},\n", - " {'method': 'Shear',\n", - " 'params': {'auto_param': [True]}},\n", - " {'method': 'FGSM',\n", - " 'params': {'eps': [0.3, 0.2, 0.4], 'alpha': [0.1]}}\n", - " ]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "设置评价指标,目前支持5种评价指标,包括:\n", - "\n", - "- 通用评价指标:accuracy。\n", - "- 神经元覆盖率指标:kmnc, nbc,snac。\n", - "- 对抗攻击评价指标:attack_success_rate。 也可以设置为‘auto’,默认使用所有评价指标。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "eval_metrics =['accuracy', 'kmnc', 'attack_success_rate']" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 初始化种子队列,种子队列中的每个种子,包含2个值:原始图片、图片标签。这里取100个样本作为初始种子队列。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "# make initial seeds\n", - "initial_seeds = []\n", - "for img, label in zip(test_inputs, test_labels):\n", - " initial_seeds.append([img, label])\n", - "initial_seeds = initial_seeds[:100]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "4. 测试Fuzz测试前的神经元覆盖率。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[INFO] MA(5041:139824560817984,MainProcess):2021-02-09 17:46:41,372 [:5] [Fuzz_testing] KMNC of this test is : 0.0807\n" - ] - } - ], - "source": [ - "segmented_num=1000\n", - "neuron_num=10\n", - "model_coverage_test = ModelCoverageMetrics(model, neuron_num, segmented_num, train_images)\n", - "model_coverage_test.calculate_coverage(np.array(test_inputs[:100]).astype(np.float32))\n", - "LOGGER.info(TAG, 'KMNC of this test is : %s', model_coverage_test.get_kmnc())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "结果:\n", - ">KMNC of this test is : 0.0807" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "5. Fuzz测试" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "eval_metrics = 'auto'\n", - "model_fuzz_test = Fuzzer(model, train_images, neuron_num, segmented_num)\n", - "_, _, _, _, metrics = model_fuzz_test.fuzzing(mutate_config, initial_seeds, eval_metrics=eval_metrics)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "6. 实验结果 \n", - "fuzzing的返回结果中包含了5个数据:fuzz生成的样本fuzz_samples、生成样本的真实标签true_labels、被测模型对于生成样本的预测值fuzz_preds、 生成样本使用的变异方法fuzz_strategies、fuzz testing的评估报告metrics_report。用户可使用这些返回结果进一步的分析模型的鲁棒性。这里只展开metrics_report,查看fuzz testing后的各个评估指标。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[INFO] MA(5041:139824560817984,MainProcess):2021-02-09 17:46:53,608 [:3] [Fuzz_testing] Accuracy: 0.6404040404040404\n", - "[INFO] MA(5041:139824560817984,MainProcess):2021-02-09 17:46:53,610 [:3] [Fuzz_testing] Attack_success_rate: 0.3227091633466136\n", - "[INFO] MA(5041:139824560817984,MainProcess):2021-02-09 17:46:53,610 [:3] [Fuzz_testing] Neural_coverage_KMNC: 0.3714\n", - "[INFO] MA(5041:139824560817984,MainProcess):2021-02-09 17:46:53,611 [:3] [Fuzz_testing] Neural_coverage_NBC: 0.1\n", - "[INFO] MA(5041:139824560817984,MainProcess):2021-02-09 17:46:53,612 [:3] [Fuzz_testing] Neural_coverage_SNAC: 0.2\n" - ] - } - ], - "source": [ - "if metrics:\n", - " for key in metrics:\n", - " LOGGER.info(TAG, key + ': %s', metrics[key])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Fuzz测试后结果如下:\n", - ">Accuracy: 0.6404040404040404 \n", - "Attack_success_rate: 0.3227091633466136 \n", - "Neural_coverage_KMNC: 0.3714 " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Fuzz测试前种子的KMNC神经元覆盖率为8.1%,Fuzz后,KMNC神经元覆盖率为37.1%,神经元覆盖率提升,样本的多样性提升。Fuzz后,模型对于Fuzz生成样本的准确率为64%,使用了对抗攻击方法的样本,攻击成功率为32.27%。由于初始化种子、变异方法和相应的参数均为随机选择的,结果有一定的浮动是正常的。 \n", - "\n", - "原始图片: \n", - "![原始图片](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/fuzz_seed.png) \n", - "\n", - "\n", - "Fuzz生成的变异图片: \n", - "![变异图片](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/fuzz_res.png)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/notebook/test_model_security_membership_inference.ipynb b/tutorials/notebook/test_model_security_membership_inference.ipynb deleted file mode 100644 index e53d54cf4fb9a93187adf116bedbfdb66db014bf..0000000000000000000000000000000000000000 --- a/tutorials/notebook/test_model_security_membership_inference.ipynb +++ /dev/null @@ -1,526 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用成员推理测试模型安全性\n", - "`Linux` `Ascend` `GPU` `CPU` `模型评测` `企业` `高级`\n", - "\n", - "作者:MindSpore团队、[丁一超](https://gitee.com/JeffDing890430)\n", - "\n", - "## 概述\n", - "\n", - "成员推理是一种推测用户隐私数据的方法。隐私指的是单个用户的某些属性,一旦泄露可能会造成人身损害、名誉损害等后果。通常情况下,用户的隐私数据会作保密处理,但我们可以利用非敏感信息来进行推测。如果我们知道了某个私人俱乐部的成员都喜欢戴紫色墨镜、穿红色皮鞋,那么我们遇到一个戴紫色墨镜且穿红色皮鞋(非敏感信息)的人,就可以推断他/她很可能是这个私人俱乐部的成员(敏感信息)。这就是成员推理。\n", - "\n", - "机器学习/深度学习的成员推理(MembershipInference),指的是攻击者拥有模型的部分访问权限(黑盒、灰盒或白盒),能够获取到模型的输出、结构或参数等部分或全部信息,并基于这些信息推断某个样本是否属于模型的训练集。利用成员推理,我们可以评估机器学习/深度学习模型的隐私数据安全。如果在成员推理下能正确识别出60%+的样本,那么我们认为该模型存在隐私数据泄露风险。\n", - "\n", - "这里以VGG16模型,CIFAR-100数据集为例,说明如何使用MembershipInference进行模型隐私安全评估。本教程使用预训练的模型参数进行演示,这里仅给出模型结构、参数设置和数据集预处理方式。\n", - "\n", - ">本例面向Ascend 910处理器,您可以在这里下载完整的样例代码:\n", - ">\n", - ">" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 实现阶段\n", - "### 安装MindArmour" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.2.0-rc1/MindArmour/x86_64/mindarmour-1.2.0rc1-cp37-cp37m-linux_x86_64.whl" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "注:本次实验使用的平台为MindSpore1.2.0-rc1,所以MindArmour也是用对应的1.2.0-rc1,如果使用的是1.2.1的,只需将命令中的1.2.0-rc1替换为1.2.1。 \n", - "\n", - "**MindArmour安装文档参考:**" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 导入需要的库文件\n", - "\n", - "#### 引入相关包\n", - "\n", - "下面是我们需要的公共模块、MindSpore相关模块和MembershipInference特性模块,以及配置日志标签和日志等级。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import sys\n", - "import math\n", - "import argparse\n", - "\n", - "import numpy as np\n", - "\n", - "import mindspore.nn as nn\n", - "from mindspore import Model, load_param_into_net, load_checkpoint\n", - "from mindspore import dtype as mstype\n", - "from mindspore.common import initializer as init\n", - "from mindspore.common.initializer import initializer\n", - "import mindspore.dataset as de\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "import mindspore.dataset.vision.c_transforms as vision\n", - "from mindarmour import MembershipInference\n", - "from mindarmour.utils import LogUtil\n", - "\n", - "LOGGER = LogUtil.get_instance()\n", - "TAG = \"MembershipInference_test\"\n", - "LOGGER.set_level(\"INFO\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 加载数据集\n", - "\n", - "这里采用的是CIFAR-100数据集,您也可以采用自己的数据集,但要保证传入的数据仅有两项属性\"image\"和\"label\"。\n", - "\n", - "数据集:CIFAR-100 下载地址:[链接](http://www.cs.toronto.edu/~kriz/cifar-100-binary.tar.gz)\n", - "\n", - "也可执行下面代码完成下载解压到指定文件夹。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./cifar100\n", - "├── test\n", - "│   └── test.bin\n", - "└── train\n", - " ├── fine_label_names.txt\n", - " └── train.bin\n", - "\n", - "2 directories, 3 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./cifar100/train ./cifar100/test\n", - "!wget https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/cifar-100-binary.tar.gz\n", - "!tar -zxvf cifar-100-binary.tar.gz\n", - "!mv -f ./cifar-100-binary/train.bin ./cifar-100-binary/fine_label_names.txt ./cifar100/train/\n", - "!mv ./cifar-100-binary/test.bin ./cifar100/test/\n", - "!tree ./cifar100" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "ckpt文件参考MindSpore代码仓库中ModelZoo中VGG16代码将cifar10的代码修改为cifar100的代码进行训练产生\n", - "\n", - "训练代码下载:[链接](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/vgg16)\n", - "\n", - "训练完成的ckpt文件下载地址(百度网盘提取码: jits): [链接](https://pan.baidu.com/s/10jeLzJ1Sl23gjoc-AZd-Ng) \n", - "\n", - "可执行下面代码下载训练完成的ckpt,并放置到指定位置。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "!wget https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/0-70_781.ckpt\n", - "!mkdir -p ./ckpt\n", - "!mv -f ./0-70_781.ckpt ./ckpt/" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 构建数据增强方法\n", - "\n", - "使用MindSpore提供的cifar100数据集处理接口,将`.bin`数据集读取出来,并通过归一化和标准化等数据增强操作将数据集处理成适合`vgg_net`训练的数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "# Generate CIFAR-100 data.\n", - "def vgg_create_dataset100(data_home, image_size, batch_size, rank_id=0, rank_size=1, repeat_num=1,\n", - " training=True, num_samples=None, shuffle=True):\n", - " \"\"\"Data operations.\"\"\"\n", - " de.config.set_seed(1)\n", - " data_dir = os.path.join(data_home, \"train\")\n", - " if not training:\n", - " data_dir = os.path.join(data_home, \"test\")\n", - "\n", - " if num_samples is not None:\n", - " data_set = de.Cifar100Dataset(data_dir, num_shards=rank_size, shard_id=rank_id,\n", - " num_samples=num_samples, shuffle=shuffle)\n", - " else:\n", - " data_set = de.Cifar100Dataset(data_dir, num_shards=rank_size, shard_id=rank_id)\n", - "\n", - " input_columns = [\"fine_label\"]\n", - " output_columns = [\"label\"]\n", - " data_set = data_set.rename(input_columns=input_columns, output_columns=output_columns)\n", - " data_set = data_set.project([\"image\", \"label\"])\n", - "\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - "\n", - " # Define map operations.\n", - " random_crop_op = vision.RandomCrop((32, 32), (4, 4, 4, 4)) # padding_mode default CONSTANT.\n", - " random_horizontal_op = vision.RandomHorizontalFlip()\n", - " resize_op = vision.Resize(image_size) # interpolation default BILINEAR.\n", - " rescale_op = vision.Rescale(rescale, shift)\n", - " normalize_op = vision.Normalize((0.4465, 0.4822, 0.4914), (0.2010, 0.1994, 0.2023))\n", - " changeswap_op = vision.HWC2CHW()\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " c_trans = []\n", - " if training:\n", - " c_trans = [random_crop_op, random_horizontal_op]\n", - " c_trans += [resize_op, rescale_op, normalize_op,\n", - " changeswap_op]\n", - "\n", - " # Apply map operations on images.\n", - " data_set = data_set.map(operations=type_cast_op, input_columns=\"label\")\n", - " data_set = data_set.map(operations=c_trans, input_columns=\"image\")\n", - "\n", - " # Apply batch operations.\n", - " data_set = data_set.batch(batch_size=batch_size, drop_remainder=True)\n", - " # Apply repeat operations.\n", - " data_set = data_set.repeat(repeat_num)\n", - "\n", - " return data_set" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 建立模型\n", - "\n", - "这里以VGG16模型为例,您也可以替换为自己的模型。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "def _make_layer(base, args, batch_norm):\n", - " \"\"\"Make stage network of VGG.\"\"\"\n", - " layers = []\n", - " in_channels = 3\n", - " for v in base:\n", - " if v == 'M':\n", - " layers += [nn.MaxPool2d(kernel_size=2, stride=2)]\n", - " else:\n", - " conv2d = nn.Conv2d(in_channels=in_channels,\n", - " out_channels=v,\n", - " kernel_size=3,\n", - " padding=args.padding,\n", - " pad_mode=args.pad_mode,\n", - " has_bias=args.has_bias,\n", - " weight_init='XavierUniform')\n", - " if batch_norm:\n", - " layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU()]\n", - " else:\n", - " layers += [conv2d, nn.ReLU()]\n", - " in_channels = v\n", - " return nn.SequentialCell(layers)\n", - "\n", - "\n", - "class Vgg(nn.Cell):\n", - " \"\"\"\n", - " VGG network definition.\n", - " \"\"\"\n", - "\n", - " def __init__(self, base, num_classes=1000, batch_norm=False, batch_size=1, args=None, phase=\"train\"):\n", - " super(Vgg, self).__init__()\n", - " _ = batch_size\n", - " self.layers = _make_layer(base, args, batch_norm=batch_norm)\n", - " self.flatten = nn.Flatten()\n", - " dropout_ratio = 0.5\n", - " if not args.has_dropout or phase == \"test\":\n", - " dropout_ratio = 1.0\n", - " self.classifier = nn.SequentialCell([\n", - " nn.Dense(512*7*7, 4096),\n", - " nn.ReLU(),\n", - " nn.Dropout(dropout_ratio),\n", - " nn.Dense(4096, 4096),\n", - " nn.ReLU(),\n", - " nn.Dropout(dropout_ratio),\n", - " nn.Dense(4096, num_classes)])\n", - "\n", - " def construct(self, x):\n", - " x = self.layers(x)\n", - " x = self.flatten(x)\n", - " x = self.classifier(x)\n", - " return x\n", - "\n", - "\n", - "base16 = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M']\n", - "\n", - "\n", - "def vgg16(num_classes=1000, args=None, phase=\"train\"):\n", - " net = Vgg(base16, num_classes=num_classes, args=args, batch_norm=args.batch_norm, phase=phase)\n", - " return net" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 运用MembershipInference进行隐私安全评估\n", - "\n", - "1. 构建VGG16模型并加载参数文件。\n", - "\n", - " 这里直接加载预训练完成的VGG16参数配置,您也可以使用如上的网络自行训练。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "# load parameter\n", - "parser = argparse.ArgumentParser(\"main case arg parser.\")\n", - "args = parser.parse_args(args=[])\n", - "args.batch_norm = True\n", - "args.has_dropout = False\n", - "args.has_bias = False\n", - "args.padding = 0\n", - "args.pad_mode = \"same\"\n", - "args.weight_decay = 5e-4\n", - "args.loss_scale = 1.0\n", - "args.data_path = \"./cifar100\"\n", - "args.pre_trained = \"./ckpt/0-70_781.ckpt\"\n", - "args.device_target = \"Ascend\"\n", - "\n", - "# Load the pretrained model.\n", - "net = vgg16(num_classes=100, args=args)\n", - "loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)\n", - "opt = nn.Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9,\n", - " weight_decay=args.weight_decay, loss_scale=args.loss_scale)\n", - "load_param_into_net(net, load_checkpoint(args.pre_trained))\n", - "model = Model(network=net, loss_fn=loss, optimizer=opt)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 加载CIFAR-100数据集,按8:2分割为成员推理模型的训练集和测试集。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[INFO] MA(27840:140583037941568,MainProcess):2021-04-08 16:10:33,472 [:8] [MembershipInference_test] Data loading completed.\n" - ] - } - ], - "source": [ - "train_dataset = vgg_create_dataset100(data_home=args.data_path, image_size=(224, 224),\n", - " batch_size=64, num_samples=5000, shuffle=False)\n", - "test_dataset = vgg_create_dataset100(data_home=args.data_path, image_size=(224, 224),\n", - " batch_size=64, num_samples=5000, shuffle=False, training=False)\n", - "train_train, eval_train = train_dataset.split([0.8, 0.2])\n", - "train_test, eval_test = test_dataset.split([0.8, 0.2])\n", - "msg = \"Data loading completed.\"\n", - "LOGGER.info(TAG, msg)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 配置推理参数和评估参数\n", - "\n", - " 设置用于成员推理的方法和参数。目前支持的推理方法有:KNN、LR、MLPClassifier和RandomForestClassifier。推理参数数据类型使用list,各个方法使用key为\"method\"和\"params\"的字典表示。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "config = [\n", - " {\n", - " \"method\": \"lr\",\n", - " \"params\": {\n", - " \"C\": np.logspace(-4, 2, 10)\n", - " }\n", - " },\n", - " {\n", - " \"method\": \"knn\",\n", - " \"params\": {\n", - " \"n_neighbors\": [3, 5, 7]\n", - " }\n", - " },\n", - " {\n", - " \"method\": \"mlp\",\n", - " \"params\": {\n", - " \"hidden_layer_sizes\": [(64,), (32, 32)],\n", - " \"solver\": [\"adam\"],\n", - " \"alpha\": [0.0001, 0.001, 0.01]\n", - " }\n", - " },\n", - " {\n", - " \"method\": \"rf\",\n", - " \"params\": {\n", - " \"n_estimators\": [100],\n", - " \"max_features\": [\"auto\", \"sqrt\"],\n", - " \"max_depth\": [5, 10, 20, None],\n", - " \"min_samples_split\": [2, 5, 10],\n", - " \"min_samples_leaf\": [1, 2, 4]\n", - " }\n", - " }\n", - " ]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "我们约定标签为训练集的是正类,标签为测试集的是负类。设置评价指标,目前支持3种评价指标。包括:\n", - "* 准确率:accuracy,正确推理的数量占全体样本中的比例。\n", - "* 精确率:precision,正确推理的正类样本占所有推理为正类中的比例。\n", - "* 召回率:recall,正确推理的正类样本占全体正类样本的比例。在样本数量足够大时,如果上述指标均大于0.6,我们认为目标模型就存在隐私泄露的风险。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "metrics = [\"precision\", \"accuracy\", \"recall\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "4. 训练成员推理模型,并给出评估结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[INFO] MA(27840:140583037941568,MainProcess):2021-04-08 16:12:20,029 [:5] [MembershipInference_test] Membership inference model training completed.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Method: lr, {'precision': 0.6514925373134328, 'recall': 0.8525390625, 'accuracy': 0.6982421875}\n", - "Method: knn, {'precision': 0.5489396411092985, 'recall': 0.6572265625, 'accuracy': 0.55859375}\n", - "Method: mlp, {'precision': 0.6491739552964043, 'recall': 0.65234375, 'accuracy': 0.64990234375}\n", - "Method: rf, {'precision': 0.6684574059861857, 'recall': 0.8505859375, 'accuracy': 0.71435546875}\n" - ] - } - ], - "source": [ - "inference = MembershipInference(model) # Get inference model.\n", - "\n", - "inference.train(train_train, train_test, config) # Train inference model.\n", - "msg = \"Membership inference model training completed.\"\n", - "LOGGER.info(TAG, msg)\n", - "\n", - "result = inference.eval(eval_train, eval_test, metrics) # Eval metrics.\n", - "count = len(config)\n", - "for i in range(count):\n", - " print(\"Method: {}, {}\".format(config[i][\"method\"], result[i]))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "5. 实验结果" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "成员推理的指标如下所示,各数值均保留至小数点后四位。\n", - "\n", - "以第一行结果为例:在使用lr(逻辑回归分类)进行成员推理时,推理的准确率(accuracy)为0.69824,推理精确率(precision)为0.65149,正类样本召回率为0.85254,说明lr有69.8%的概率能正确分辨一个数据样本是否属于目标模型的训练数据集。在二分类任务下,指标表明成员推理是有效的,即该模型存在隐私泄露的风险。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 参考文献\n", - "\n", - "[1] [Shokri R , Stronati M , Song C , et al. Membership Inference Attacks against Machine Learning Models[J].](https://arxiv.org/abs/1610.05820v2)\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/requirements.txt b/tutorials/requirements.txt deleted file mode 100644 index 1755dcd967228348c2f9cb29bac44580af862770..0000000000000000000000000000000000000000 --- a/tutorials/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -sphinx >= 2.2.1, <= 2.4.4 -recommonmark -sphinx-markdown-tables -sphinx_rtd_theme -numpy -nbsphinx -IPython -jieba diff --git a/tutorials/source_zh_cn/_static/logo_modelarts.png b/tutorials/source_zh_cn/_static/logo_modelarts.png deleted file mode 100644 index 9b499805e2f8ab52dcde3fd4a7708ef753da9b84..0000000000000000000000000000000000000000 Binary files a/tutorials/source_zh_cn/_static/logo_modelarts.png and /dev/null differ diff --git a/tutorials/source_zh_cn/_static/logo_notebook.png b/tutorials/source_zh_cn/_static/logo_notebook.png deleted file mode 100644 index f28598315f19f4be76a73ddf5dc6bbdbe4db35fd..0000000000000000000000000000000000000000 Binary files a/tutorials/source_zh_cn/_static/logo_notebook.png and /dev/null differ diff --git a/tutorials/source_zh_cn/_static/logo_source.png b/tutorials/source_zh_cn/_static/logo_source.png deleted file mode 100644 index 9932d67ab50871edb0c95979c4e948c812c7cdea..0000000000000000000000000000000000000000 Binary files a/tutorials/source_zh_cn/_static/logo_source.png and /dev/null differ diff --git a/tutorials/source_zh_cn/autograd.ipynb b/tutorials/source_zh_cn/autograd.ipynb deleted file mode 100644 index bc8594d6d894ce78ec35687441f62a288af82b35..0000000000000000000000000000000000000000 --- a/tutorials/source_zh_cn/autograd.ipynb +++ /dev/null @@ -1,264 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 自动微分\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/autograd.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/quick_start/mindspore_autograd.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3F1aWNrX3N0YXJ0L21pbmRzcG9yZV9hdXRvZ3JhZC5pcHluYg==&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在训练神经网络时,最常用的算法是反向传播,在该算法中,根据损失函数对于给定参数的梯度来调整参数(模型权重)。\n", - "\n", - "MindSpore计算一阶导数方法`mindspore.ops.GradOperation (get_all=False, get_by_list=False, sens_param=False)`,其中`get_all`为`False`时,只会对第一个输入求导,为`True`时,会对所有输入求导;`get_by_list`为`False`时,不会对权重求导,为`True`时,会对权重求导;`sens_param`对网络的输出值做缩放以改变最终梯度。下面用MatMul算子的求导做深入分析。\n", - "\n", - "首先导入本文档需要的模块和接口,如下所示:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import mindspore.nn as nn\n", - "import mindspore.ops as ops\n", - "from mindspore import Tensor\n", - "from mindspore import ParameterTuple, Parameter\n", - "from mindspore import dtype as mstype" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 对输入求一阶导\n", - "\n", - "如果需要对输入进行求导,首先需要定义一个需要求导的网络,以一个由MatMul算子构成的网络$f(x,y)=z * x * y$为例。\n", - "\n", - "定义网络结构如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "class Net(nn.Cell):\n", - " def __init__(self):\n", - " super(Net, self).__init__()\n", - " self.matmul = ops.MatMul()\n", - " self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z')\n", - "\n", - " def construct(self, x, y):\n", - " x = x * self.z\n", - " out = self.matmul(x, y)\n", - " return out" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "接着定义求导网络,`__init__`函数中定义需要求导的网络`self.net`和`ops.GradOperation`操作,`construct`函数中对`self.net`进行求导。\n", - "\n", - "求导网络结构如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "class GradNetWrtX(nn.Cell):\n", - " def __init__(self, net):\n", - " super(GradNetWrtX, self).__init__()\n", - " self.net = net\n", - " self.grad_op = ops.GradOperation()\n", - "\n", - " def construct(self, x, y):\n", - " gradient_function = self.grad_op(self.net)\n", - " return gradient_function(x, y)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "定义输入并且打印输出:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[4.5099998 2.7 3.6000001]\n", - " [4.5099998 2.7 3.6000001]]\n" - ] - } - ], - "source": [ - "x = Tensor([[0.8, 0.6, 0.2], [1.8, 1.3, 1.1]], dtype=mstype.float32)\n", - "y = Tensor([[0.11, 3.3, 1.1], [1.1, 0.2, 1.4], [1.1, 2.2, 0.3]], dtype=mstype.float32)\n", - "output = GradNetWrtX(Net())(x, y)\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "若考虑对`x`、`y`输入求导,只需在`GradNetWrtX`中设置`self.grad_op = GradOperation(get_all=True)`。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 对权重求一阶导\n", - "\n", - "若需要对权重的求导,将`ops.GradOperation`中的`get_by_list`设置为`True`:\n", - "\n", - "则`GradNetWrtX`结构为:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "class GradNetWrtX(nn.Cell):\n", - " def __init__(self, net):\n", - " super(GradNetWrtX, self).__init__()\n", - " self.net = net\n", - " self.params = ParameterTuple(net.trainable_params())\n", - " self.grad_op = ops.GradOperation(get_by_list=True)\n", - "\n", - " def construct(self, x, y):\n", - " gradient_function = self.grad_op(self.net, self.params)\n", - " return gradient_function(x, y)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "运行并打印输出:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(Tensor(shape=[1], dtype=Float32, value= [ 2.15359993e+01]),)\n" - ] - } - ], - "source": [ - "output = GradNetWrtX(Net())(x, y)\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "若需要对某些权重不进行求导,则在定义求导网络时,对相应的权重中`requires_grad`设置为`False`。\n", - "\n", - "```Python\n", - "self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z', requires_grad=False)\n", - "```\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 梯度值缩放\n", - "\n", - "可以通过`sens_param`参数对网络的输出值做缩放以改变最终梯度。首先将`ops.GradOperation`中的`sens_param`设置为`True`,并确定缩放指数,其维度与输出维度保持一致。\n", - "\n", - "缩放指数`self.grad_wrt_output`可以记作如下形式:\n", - "\n", - "```python\n", - "self.grad_wrt_output = Tensor([[s1, s2, s3], [s4, s5, s6]])\n", - "```\n", - "\n", - "则`GradNetWrtX`结构为:" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[2.211 0.51 1.49 ]\n", - " [5.588 2.68 4.07 ]]\n" - ] - } - ], - "source": [ - "class GradNetWrtX(nn.Cell):\n", - " def __init__(self, net):\n", - " super(GradNetWrtX, self).__init__()\n", - " self.net = net\n", - " self.grad_op = ops.GradOperation(sens_param=True)\n", - " self.grad_wrt_output = Tensor([[0.1, 0.6, 0.2], [0.8, 1.3, 1.1]], dtype=mstype.float32)\n", - "\n", - " def construct(self, x, y):\n", - " gradient_function = self.grad_op(self.net)\n", - " return gradient_function(x, y, self.grad_wrt_output)\n", - "\n", - "output = GradNetWrtX(Net())(x, y) \n", - "print(output)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/source_zh_cn/conf.py b/tutorials/source_zh_cn/conf.py deleted file mode 100644 index dd1e0ddb33393c43bb82fd1516bfbb10c82f5a40..0000000000000000000000000000000000000000 --- a/tutorials/source_zh_cn/conf.py +++ /dev/null @@ -1,90 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys -import IPython -import re -import nbsphinx as nbs - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2021, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx_markdown_tables', - 'recommonmark', - 'nbsphinx', - 'sphinx.ext.mathjax', - 'IPython.sphinxext.ipython_console_highlighting' -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -highlight_language = 'none' - -suppress_warnings = [ - 'nbsphinx', -] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_search_language = 'zh' - -html_search_options = {'dict': '../../resource/jieba.txt'} - -html_static_path = ['_static'] - -# Remove extra outputs for nbsphinx extension. -nbsphinx_source_re = re.compile(r"(app\.connect\('html-collect-pages', html_collect_pages\))") -nbsphinx_math_re = re.compile(r"(\S.*$)") -mod_path = os.path.abspath(nbs.__file__) -with open(mod_path, "r+", encoding="utf8") as f: - contents = f.readlines() - for num, line in enumerate(contents): - _content_re = nbsphinx_source_re.search(line) - if _content_re and "#" not in line: - contents[num] = nbsphinx_source_re.sub(r"# \g<1>", line) - if "mathjax_config = app.config" in line and "#" not in line: - contents[num:num+10] = [nbsphinx_math_re.sub(r"# \g<1>", i) for i in contents[num:num+10]] - break - f.seek(0) - f.writelines(contents) \ No newline at end of file diff --git a/tutorials/source_zh_cn/dataset.ipynb b/tutorials/source_zh_cn/dataset.ipynb deleted file mode 100644 index 5ed3705df58b7e3a764b3348989cd326c5c40eb0..0000000000000000000000000000000000000000 --- a/tutorials/source_zh_cn/dataset.ipynb +++ /dev/null @@ -1,354 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 数据加载及处理\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/dataset.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/quick_start/mindspore_dataset.ipynb) " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore提供了部分常用数据集和标准格式数据集的加载接口,用户可以直接使用`mindspore.dataset`中对应的数据集加载类进行数据加载。数据集类为用户提供了常用的数据处理接口,使得用户能够快速进行数据处理操作。\n", - "\n", - "## 加载数据集\n", - "\n", - "下面的样例通过`Cifar10Dataset`接口加载CIFAR-10数据集,使用顺序采样器获取前5个样本。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds\n", - "\n", - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train\"\n", - "sampler = ds.SequentialSampler(num_samples=5)\n", - "dataset = ds.Cifar10Dataset(DATA_DIR, sampler=sampler)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "## 迭代数据集\n", - "\n", - "用户可以用`create_dict_iterator`创建数据迭代器,迭代访问数据,下面展示了对应图片的形状和标签。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Image shape: (32, 32, 3) , Label: 6\n", - "Image shape: (32, 32, 3) , Label: 9\n", - "Image shape: (32, 32, 3) , Label: 9\n", - "Image shape: (32, 32, 3) , Label: 4\n", - "Image shape: (32, 32, 3) , Label: 1\n" - ] - } - ], - "source": [ - "for data in dataset.create_dict_iterator():\n", - " print(\"Image shape: {}\".format(data['image'].shape), \", Label: {}\".format(data['label']))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "## 自定义数据集\n", - "\n", - "对于目前MindSpore不支持直接加载的数据集,可以构造自定义数据集类,然后通过`GeneratorDataset`接口实现自定义方式的数据加载。\n" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "\n", - "np.random.seed(58)\n", - "\n", - "class DatasetGenerator:\n", - " def __init__(self):\n", - " self.data = np.random.sample((5, 2))\n", - " self.label = np.random.sample((5, 1))\n", - "\n", - " def __getitem__(self, index):\n", - " return self.data[index], self.label[index]\n", - "\n", - " def __len__(self):\n", - " return len(self.data)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "其中用户需要自定义的类函数如下:\n", - "\n", - "- **\\_\\_init\\_\\_**\n", - "\n", - " 实例化数据集对象时,`__init__`函数被调用,用户可以在此进行数据初始化等操作。\n", - "\n", - " ```python\n", - " def __init__(self):\n", - " self.data = np.random.sample((5, 2))\n", - " self.label = np.random.sample((5, 1))\n", - " ```\n", - "\n", - "- **\\_\\_getitem\\_\\_**\n", - "\n", - " 定义数据集类的`__getitem__`函数,使其支持随机访问,能够根据给定的索引值`index`,获取数据集中的数据并返回。\n", - "\n", - " ```python\n", - " def __getitem__(self, index):\n", - " return self.data[index], self.label[index]\n", - " ```\n", - "\n", - "- **\\_\\_len\\_\\_**\n", - "\n", - " 定义数据集类的`__len__`函数,返回数据集的样本数量。\n", - "\n", - " ```python\n", - " def __len__(self):\n", - " return len(self.data)\n", - " ```\n", - " \n", - "定义数据集类之后,就可以通过`GeneratorDataset`接口按照用户定义的方式加载并访问数据集样本。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0.36510558 0.45120592] [0.78888122]\n", - "[0.49606035 0.07562207] [0.38068183]\n", - "[0.57176158 0.28963401] [0.16271622]\n", - "[0.30880446 0.37487617] [0.54738768]\n", - "[0.81585667 0.96883469] [0.77994068]\n" - ] - } - ], - "source": [ - "dataset_generator = DatasetGenerator()\n", - "dataset = ds.GeneratorDataset(dataset_generator, [\"data\", \"label\"], shuffle=False)\n", - "\n", - "for data in dataset.create_dict_iterator():\n", - " print('{}'.format(data[\"data\"]), '{}'.format(data[\"label\"]))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "## 数据处理及增强\n", - "\n", - "### 数据处理\n", - "\n", - "MindSpore提供的数据集接口具备常用的数据处理方法,用户只需调用相应的函数接口即可快速进行数据处理。\n", - "\n", - "下面的样例先将数据集随机打乱顺序,然后将样本两两组成一个批次。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "data: [[0.36510558 0.45120592]\n", - " [0.57176158 0.28963401]]\n", - "label: [[0.78888122]\n", - " [0.16271622]]\n", - "data: [[0.30880446 0.37487617]\n", - " [0.49606035 0.07562207]]\n", - "label: [[0.54738768]\n", - " [0.38068183]]\n", - "data: [[0.81585667 0.96883469]]\n", - "label: [[0.77994068]]\n" - ] - } - ], - "source": [ - "ds.config.set_seed(58)\n", - "\n", - "# 随机打乱数据顺序\n", - "dataset = dataset.shuffle(buffer_size=10)\n", - "# 对数据集进行分批\n", - "dataset = dataset.batch(batch_size=2)\n", - "\n", - "for data in dataset.create_dict_iterator():\n", - " print(\"data: {}\".format(data[\"data\"]))\n", - " print(\"label: {}\".format(data[\"label\"]))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "其中,\n", - "\n", - "`buffer_size`:数据集中进行shuffle操作的缓存区的大小。\n", - "\n", - "`batch_size`:每组包含的数据个数,现设置每组包含2个数据。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "### 数据增强\n", - "\n", - "数据量过小或是样本场景单一等问题会影响模型的训练效果,用户可以通过数据增强操作扩充样本多样性,从而提升模型的泛化能力。\n", - "\n", - "下面的样例使用`mindspore.dataset.vision.c_transforms`模块中的算子对MNIST数据集进行数据增强。\n", - "\n", - "导入`c_transforms`模块,加载MNIST数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAENCAYAAADJzhMWAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAMaklEQVR4nO3dX6ik9X3H8fenJmnBeLFGul2MZtNUQiGlWkQKlWIpCdZeqDc2QsE0pZuLWhLIRcReRAiFUGzaQqF0Q2w2tjUEjFHE1lix2eQmuIrVVTFauxKX1Y0sbbQ3afTbi/OsPbuec+bs/Htmz/f9gmFmnjP7zHef3c/+/s3sL1WFpJ3vZ8YuQNJyGHapCcMuNWHYpSYMu9SEYZeaMOxSE4Zdp0hyJEltcntl7Po0vXeNXYBW0n8Df7XB8TeWXIfmKH6CTuslOQJQVXvHrUTzZjdeasJuvDbys0l+H7gY+B/gSeBgVb05blmahd14nWLoxn9ggx/9J/AHVfWd5VakebEbr9P9PfDbwC8A5wK/AvwdsBf45yS/Ol5pmoUtu7Ylye3AZ4FvVdX1Y9ejM2fYtS1Jfgl4HjhRVe8bux6dObvx2q4fDffnjlqFpmbYtV2/Pty/OGoVmpph19uS/HKSd7TcSfYCfzM8/YelFqW5cZ1d6/0e8NkkB4GXgNeBDwG/C/wc8ABw+3jlaRaGXes9AnwYuAz4DdbG5/8FfA+4E7iznNE9azkbLzXhmF1qwrBLTRh2qQnDLjWx1Nn4JM4GSgtWVdno+Ewte5KrkzyX5IUkt8xyLkmLNfXSW5JzgB8AHwVeBh4FbqyqZ7b4Nbbs0oItomW/Anihql6sqp8AXweuneF8khZolrBfCPxw3fOXh2OnSLIvyaEkh2Z4L0kzWvgEXVXtB/aD3XhpTLO07EeBi9Y9f/9wTNIKmiXsjwKXJPlgkvcAHwfum09ZkuZt6m58Vf00yc3Ag8A5wB1V9fTcKpM0V0v91ptjdmnxFvKhGklnD8MuNWHYpSYMu9SEYZeaMOxSE4ZdasKwS00YdqkJwy41YdilJgy71IRhl5ow7FIThl1qwrBLTRh2qQnDLjVh2KUmDLvUhGGXmjDsUhOGXWrCsEtNGHapCcMuNWHYpSYMu9SEYZeamHrLZp0dlrlLr/5fsuFGqqOaKexJjgCvA28CP62qy+dRlKT5m0fL/ltV9docziNpgRyzS03MGvYCvp3ksST7NnpBkn1JDiU5NON7SZpBZpnASXJhVR1N8vPAQ8CfVNXBLV7vbNGSOUE3jjEn6KpqwzefqWWvqqPD/XHgHuCKWc4naXGmDnuSc5Ocd/Ix8DHg8LwKkzRfs8zG7wbuGbor7wL+qar+ZS5V7TB2pbUKZhqzn/GbNR2zG/Z+dtyYXdLZw7BLTRh2qQnDLjVh2KUm/IrrHDjb3s8qfoV1Elt2qQnDLjVh2KUmDLvUhGGXmjDsUhOGXWrCdXatrLNxLXuV2bJLTRh2qQnDLjVh2KUmDLvUhGGXmjDsUhOus8/BpPXgnfx9986/97ONLbvUhGGXmjDsUhOGXWrCsEtNGHapCcMuNeE6+xKczWvRs36n3O+kr46JLXuSO5IcT3J43bHzkzyU5Pnhftdiy5Q0q+10478KXH3asVuAh6vqEuDh4bmkFTYx7FV1EDhx2uFrgQPD4wPAdfMtS9K8TTtm311Vx4bHrwC7N3thkn3AvinfR9KczDxBV1WVZNMZpqraD+wH2Op1khZr2qW3V5PsARjuj8+vJEmLMG3Y7wNuGh7fBNw7n3IkLUomrfEmuQu4CrgAeBX4PPAt4BvAxcBLwA1Vdfok3kbnshu/AGOu07uOvnqqasM/lIlhnyfDvhiGXettFnY/Lis1YdilJgy71IRhl5ow7FIThl1qwrBLTRh2qQnDLjVh2KUmDLvUhGGXmjDsUhP+V9I7wFbfPFv0N+IWeX6/UTdftuxSE4ZdasKwS00YdqkJwy41YdilJgy71ITr7Dvc2bxd9Db+m/MlVbIz2LJLTRh2qQnDLjVh2KUmDLvUhGGXmjDsUhOuszfnOnwfE1v2JHckOZ7k8LpjtyU5muSJ4XbNYsuUNKvtdOO/Cly9wfG/rKpLh9sD8y1L0rxNDHtVHQROLKEWSQs0ywTdzUmeHLr5uzZ7UZJ9SQ4lOTTDe0maUbYzAZNkL3B/VX1keL4beA0o4AvAnqr65DbOs7qzPdrQKk/QTdJ1gq6qNvyNT9WyV9WrVfVmVb0FfBm4YpbiJC3eVGFPsmfd0+uBw5u9VtJqmLjOnuQu4CrggiQvA58HrkpyKWvd+CPApxZXosZ0Nq/D61TbGrPP7c0cs+84qxx2x+yn8uOyUhOGXWrCsEtNGHapCcMuNeFXXDWTWWa8x9xOuuNMvS271IRhl5ow7FIThl1qwrBLTRh2qQnDLjXhOru2tMrfatOZsWWXmjDsUhOGXWrCsEtNGHapCcMuNWHYpSZcZ9/hOq+Td/zO+lZs2aUmDLvUhGGXmjDsUhOGXWrCsEtNGHapiYlhT3JRkkeSPJPk6SSfHo6fn+ShJM8P97sWX25PVTX1bSdLsuVNp5q4ZXOSPcCeqno8yXnAY8B1wCeAE1X1xSS3ALuq6nMTzrWz//YtyE4P7bQM9Mam3rK5qo5V1ePD49eBZ4ELgWuBA8PLDrD2D4CkFXVGY/Yke4HLgO8Du6vq2PCjV4Dd8y1N0jxt+7PxSd4L3A18pqp+vL4LVVW1WRc9yT5g36yFSprNxDE7QJJ3A/cDD1bVl4ZjzwFXVdWxYVz/b1X14QnncfA5BcfsG3PMvrGpx+xZu6JfAZ49GfTBfcBNw+ObgHtnLVLS4mxnNv5K4LvAU8Bbw+FbWRu3fwO4GHgJuKGqTkw4V8smypZ5Orbc09msZd9WN35eDLvOhGGfztTdeEk7g2GXmjDsUhOGXWrCsEtNGHapCf8r6W1y+Ww6Lp+tDlt2qQnDLjVh2KUmDLvUhGGXmjDsUhOGXWqizTq76+TTcZ1857Bll5ow7FIThl1qwrBLTRh2qQnDLjVh2KUm2qyzd+U6uU6yZZeaMOxSE4ZdasKwS00YdqkJwy41YdilJiausye5CPgasBsoYH9V/XWS24A/An40vPTWqnpgUYXOyvVmdTdxf/Yke4A9VfV4kvOAx4DrgBuAN6rq9m2/WdP92aVl2mx/9okte1UdA44Nj19P8ixw4XzLk7RoZzRmT7IXuAz4/nDo5iRPJrkjya5Nfs2+JIeSHJqtVEmzmNiNf/uFyXuB7wB/VlXfTLIbeI21cfwXWOvqf3LCOezGSwu2WTd+W2FP8m7gfuDBqvrSBj/fC9xfVR+ZcB7DLi3YZmGf2I3P2jT2V4Bn1wd9mLg76Xrg8KxFSlqc7czGXwl8F3gKeGs4fCtwI3Apa934I8Cnhsm8rc5lyy4t2Ezd+Hkx7NLiTd2Nl7QzGHapCcMuNWHYpSYMu9SEYZeaMOxSE4ZdasKwS00YdqkJwy41YdilJgy71IRhl5pY9pbNrwEvrXt+wXBsFa1qbataF1jbtOZZ2wc2+8FSv8/+jjdPDlXV5aMVsIVVrW1V6wJrm9ayarMbLzVh2KUmxg77/pHffyurWtuq1gXWNq2l1DbqmF3S8ozdsktaEsMuNTFK2JNcneS5JC8kuWWMGjaT5EiSp5I8Mfb+dMMeeseTHF537PwkDyV5frjfcI+9kWq7LcnR4do9keSakWq7KMkjSZ5J8nSSTw/HR712W9S1lOu29DF7knOAHwAfBV4GHgVurKpnllrIJpIcAS6vqtE/gJHkN4E3gK+d3ForyZ8DJ6rqi8M/lLuq6nMrUtttnOE23guqbbNtxj/BiNduntufT2OMlv0K4IWqerGqfgJ8Hbh2hDpWXlUdBE6cdvha4MDw+ABrf1mWbpPaVkJVHauqx4fHrwMntxkf9dptUddSjBH2C4Efrnv+Mqu133sB307yWJJ9Yxezgd3rttl6Bdg9ZjEbmLiN9zKdts34yly7abY/n5UTdO90ZVX9GvA7wB8P3dWVVGtjsFVaO/1b4EOs7QF4DPiLMYsZthm/G/hMVf14/c/GvHYb1LWU6zZG2I8CF617/v7h2EqoqqPD/XHgHtaGHavk1ZM76A73x0eu521V9WpVvVlVbwFfZsRrN2wzfjfwj1X1zeHw6Nduo7qWdd3GCPujwCVJPpjkPcDHgftGqOMdkpw7TJyQ5FzgY6zeVtT3ATcNj28C7h2xllOsyjbem20zzsjXbvTtz6tq6TfgGtZm5P8D+NMxatikrl8E/n24PT12bcBdrHXr/pe1uY0/BN4HPAw8D/wrcP4K1XYna1t7P8lasPaMVNuVrHXRnwSeGG7XjH3ttqhrKdfNj8tKTThBJzVh2KUmDLvUhGGXmjDsUhOGXWrCsEtN/B/M3kbdmYwBvQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "from mindspore.dataset.vision import Inter\n", - "import mindspore.dataset.vision.c_transforms as c_vision\n", - "\n", - "DATA_DIR = './datasets/MNIST_Data/train'\n", - "\n", - "mnist_dataset = ds.MnistDataset(DATA_DIR, num_samples=6, shuffle=False)\n", - "\n", - "# 查看数据原图\n", - "mnist_it = mnist_dataset.create_dict_iterator()\n", - "data = next(mnist_it)\n", - "plt.imshow(data['image'].asnumpy().squeeze(), cmap=plt.cm.gray)\n", - "plt.title(data['label'].asnumpy(), fontsize=20)\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "定义数据增强算子,对数据集进行`Resize`和`RandomCrop`操作,然后通过`map`映射将其插入数据处理管道。\n" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "resize_op = c_vision.Resize(size=(200,200), interpolation=Inter.LINEAR)\n", - "crop_op = c_vision.RandomCrop(150)\n", - "transforms_list = [resize_op, crop_op]\n", - "mnist_dataset = mnist_dataset.map(operations=transforms_list, input_columns=[\"image\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "查看数据增强效果。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQEAAAENCAYAAAAPLtCGAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAA5FElEQVR4nO2debDsaVnfv2/v+3bOvXdmAB00SIWYMlDUQJXGWE5iIS4TKxRBjQGclJWKayQlg/4hf8QqiEQlZUoyCRgw6IBbpCoYQQJSVgkREGULOo6Ms5x7z9L7vr35o/v73qd/t889v95O9+/086nqOr/u06f7vX37fd5nf4y1Foqi7C+hbS9AUZTtokJAUfYcFQKKsueoEFCUPUeFgKLsOSoEFGXPUSGgKHuOCgHFF8aYrxhj7Dm3m9ten7I8kW0vQAkUNQC/NOfx5iWvQ1kjRjMGFT8YY74CANba+7e7EmXdqDmgKHuOmgPKIsSNMf8CwFcBaAH4CwAft9aOtrssZRXUHFB8MTUHvnrOr/4GwOuttX90uStS1oWaA4pffhXAgwDuAZAG8PcB/BcA9wP4fWPMN2xvacoqqCagrIQx5m0A3gDgf1prv2fb61EWR4WAshLGmL8D4K8AlK21B9tej7I4ag4oq3Iy/Zne6iqUpVEhoKzKy6c/n9jqKpSlUSGgXIgx5u8aY+446Y0x9wP45end/3Gpi1LWhuYJKH745wDeYIz5OIAnATQAfC2A7wCQAPBBAG/b3vKUVVAhoPjhowBeCODFAL4RE/u/CuCPAfwagF+z6mEOLBodUJQ9R30CirLnqBBQlD1nY0LAGPMKY8yXjTGPG2Me2dT7KIqyGhvxCRhjwgD+EsA/AfA0gD8F8L3W2i+u/c0URVmJTUUHHgDwuLX2CQAwxjwG4CEAc4WAMWZnvJOhUAjhcBjGGITDYYRCIXeT940xW1ujMQaRSMStiTd5n+tUgs94PMZoNEK/38dgMMBwOJx7PRqNMB6P7/ZSp9baa94HNyUEngPgKXH/aQAvk08wxvwQgB/a0PsvRSgUQj6fRzabRSqVQj6fRz6fRyqVQjabRaFQQCqVQiaTQTgc3soaw+EwIpGIWxfXmU6nkUwmkcvlkMlkkEgkkEqltrJGZb20223UajU8+eSTOD4+xunpKZ588kncvHkTJycneOqpp3B0dIR6vY5Go3G3l3py3oNbyxOw1j4K4FFgtzSBcDiMaDSKWCzmNlImk0Eul0OhUEAmk0E2m0Uksp2PLhQKIRKJIJvNIpFIuDUmk0nE43FEo1GnCShXk3Wb8Jv6Jj8D4Hni/nOnj+00NAGi0SiSyaQ7/bPZLA4ODnD9+nXk83kUCgVEIpGtmATGGIRCISSTScRiMSes5HU0GlUhoPhmU0LgTwG8wBjzfEw2/2sAfN+G3mutRCIRxOPxGS0gn8+jWCzi8PAQhUIBpVIJ0Wh0a0IAAOLxOCKRCCKRCKLRqLuORCKqCVxx1v2924gQsNYOjTE/AuAPAIQBvMta+4VNvNc64SnLjUVBkE6nkc1mnRZAIbCtjUaNRToD1Sl49dlUdu/GDFtr7QcxKSzZKbxSVN4PhUJu8yeTSaTTaacFlEolXL9+3ZkF8Xj8speuXGG8G1zeZ3TAWovxeIzxeAxr7dqEwl4UEM07OeeF2KLRKO655x5cu3YNxWLR+QPoeaeqrSjrgJt6OBy68N5oNMJwOHSbfTAYoNPpoF6v4+TkBOVyGdVqFfV6Hc1mE91ud+bvl2GvhIC0nWOx2B3X8XjchQWz2SwymQySySQSiYTzvG87R0C5OnDTDwYDF+cfDAbo9/szj7fbbdTrddRqNTQaDTSbTXQ6HfT7ffT7fSc0ltUM9kYIcJPHYjHn+IvH43dcX7t2DaVSCdls1sXcU6mU87ozkUhRVoVCoNfrodfrYTAYoNvtotPpYDAYoNfrodPpoN1uo9Fo4PT01GkBjUYDrVbLCQOaC8uwF0KAzj7G1ZlUE4/HXQQgmUwimUyiVCq55JtisTiTiENNQlHWAU2Bfr+PdruNbreLVqvlTvput4tms4lms4lWq4Vyuew0gWq1imaziXa77YSAmgMXQHOAJz83diqVQqFQcPfz+TxyuZyLCtAcoMmg5oCyLqy1Tu3vdrvodrtot9tuc7daLdRqtZnrVquFVqvlns/UYTUHLoCaAAVAJpOZuR0cHLhUYToC5fNoLmgMXlknXnOAAoCbvV6vo1wuo9PpoNlsotFoOA2BpkC321Vz4CKMMc4nwLh/NptFqVRCLpdDPp93mYCZTAbpdNr5Dmg60I8Qi8U0OqCsDZoDvV7PbfpKpYLT01M0Gg2Uy2WcnJyg3W473wCdge12G71ez91Xc+ACZHRA5gAwAahYLKJYLCKXyzkHIKMGsVjM1RIwlKjmgLIOpDnQ6/XcCd9oNFCr1VCtVnF2duaEAJ2HFByMDNAcWJa9EgI80SkEqAmUSiUcHh4il8u5CABzCaQJoCFCZZ3IECGjArT9q9UqyuUyTk9PndNQlgvzWuYVqDlwF0KhEGKx2MzmPzg4wOHhIa5du4b77rsPN27cQD6fn1H3aUrI+4qyLmgOUAOo1Wool8s4OzvD2dkZbt68iaeeesoJATIvW3CV7MG9EAISbmzWCdBpKDMHld2EX35m0/H04+nI3wWlg3alUnGnfr1eR6vVcqp/t9udUf9Ho9HG1rFXQmDeqS6Fgp70uw03PFVoeU2VeNMbZp3UajXU63WXAiw9/r1eb2U13y97JQSAO1V8JTjI5BqGxvr9vsuwo209GAwCoQ0w8efk5MRFBGRNQL/fVyGwCdZZfaVcLtKbLk9MnqCMtXe73UD8H1P9ZzowswWl51+FgKIIKASkNsBEGgoCnqKrhMwuC9r/1AiYLkwhwAQgFQJrRs2B4CIFALPrWq0WKpWKK6ipVqvodDqB8Asw9bfRaKDRaLgoAdOCaQ5smr0TAkpwoSZAPwBTaSuVCiqVCprNJsrlMprNZiCEAL3/shrQmxik5oCiCGRyDTPsqBEwy46VdsPhcNvLvZDhcOicmdz0sqx4OByqOaAoEmutMwe8msDZ2Rmq1Spu3bqFWq2GwWCw7eVeCKMdMheA1+tIB/aLCgElMHjr71lXXy6XcXx8jLOzMxwdHaFSqQRCCPCUl0lO3vs7LQSMMc8D8B4ANwBYAI9aa99ujCkBeB+A+wF8BcCrrbWV1Ze6PPwwpWOJN5mzLTsHXXTbZ+SX05ulJ3/n53Nc9LP0/l/SRyD/P+lUCwrys5v3c9OsogkMAbzBWvsZY0wWwKeNMR8G8DoAH7HWvmU6jfgRAG9cfamrwaILflFarZbrE1CtVhGJRDAej2cKhuS1tznpviLDdEzdndcoE7jdx8HbDt07P9GvIJAnpRQGfE/eqFIr/lhaCFhrjwAcTa8bxpgvYTKD8CEA3zJ92rsBfAxbFgKya2uj0UAsFkOlUnE2ZigUQrvdRjabdW3EWD7MBqOyrHifKwllDbw3S0+eyNZahMPhmc+P1+zwFIvFVLPaAdbiEzDG3A/gxQA+CeDGVEAAwE1MzIV5f3NpA0llphmdSq1Wy6n+8XjcdXiRTUWSySRGoxFisZj7UofDYVhr9/aLK51zTGphlh4/206nA2utG+fGBi2j0QjxeBzj8dhpVutSeVWYLM/KQsAYkwHw2wB+wlpbl/8R1lp73rDRyxxI6v3i0qnE30UikZlU1FQq5QQDBQjLkffZFADgTACGsuilZ6cbJvBQCGSzWcTjceeoo/8gFothOBxq49YdYKX/AWNMFBMB8F5r7e9MH75ljLnXWntkjLkXwPGqi1wFOeCh2+26/oChUMjFmbvdLtLptBvzzX6DrORKpVIwxrhmo0HIS98U0hzg5pedb1kWOx6PkUgkZsa5j0YjpNNppxFEo9FApPdedVaJDhgA7wTwJWvtL4hffQDAawG8Zfrz91Za4ZqgF5n2fDgcdtrBaDRCu91GMpl0ziU+lyYDT64g1atvApm1JzWBWq3mMvaq1SrG4zGSySTG47FLf6U/hZrXvn+Wu8IqmsA3AvgBAJ8zxnx2+thPY7L532+MeRjAkwBevdIK14D84so+AszQGg6Hzm6Vz6czKxKJOAERhHTUTeLti8d22Mx/r1arOD09hbUW6XQa4XAYo9EI4XAYqVTKOQlVoO4Oq0QH/hjAeZ6YB5d93U3A6IAxZsY3wBAg1fxEIjETb2ZfQWMMksnkyq2drwLelljNZhOVSgUnJyeo1+u4efMmTk5OMBqNkMlkMBwOUSgUnB+AnymbgizLPv8frJu98MrQMWitRSgUmtnkcrR3LBYDMNESqM6m02lEo9GZRo/7/AXkZ0lTgFoAe+Sfnp7i1q1bGI1GLgGL+Rd0EiaTSf0sd4i9EQJUPWkO0Dcgb9FoFJlMBs1mE9Fo1DV9SCQSM51saFbIsNR5P4OGt2DFm73Gfz8LeNgPnyWwFAg85flZptNpFz5kwQxfy2+EQCYEyQQlmTikQmVx9kIIAHd+ueepoizrbDabiMVi7gtNUyGTyTinYjKZdJqE90bNIkhJRQz9yY3Fa/k4B2RUq9WZHnn0CVBw0g/Az7LZbLrPMhKJIJfLuUiL7KR7N+r1uos+UPAwNCkdjSoIFmNvhIAfpANRzn+TcwsYH0+n0zOjzeWsw0gk4jLjgiIEmHrLLED6TrzX0vnHSECj0bhjM1JwzPssGW0ZDAZoNptIJpO+1sgoRLlcRrlcdqO65TSefXfcLoMKAQ8MacmBENzcyWTSnTK9Xs+lEnPG4Wg0mtn0QUqEkZu22+065x99IVT/uRHZEJPtvLwz8WR+Bl+r0+m41OtUKoVQKIThcIh2u32hsLTWulFdvMnegusYzLmvBOdbegnQ6SW93/F43PkSjDHuy8zR5jQTBoMB4vG4ey06F4NiEkivv2za2Wq1nACgut9oNHB2duZMADlKW25EClQmZdVqNfd4OBxGv993n/FFWGtnGojQLJCawGXV3181VAh4oOc7HA6j0Wi4aAJv9XodtVoN+XzejS/v9/tujLkcahIk5Dgsbmr6RNrt9sx1u91GpVJxNrmclittc/lZslbAa1qwYMsPdCzy/blO2ZNPnYOLo0JAIFXYXq/nBpNKFZOebZ5yo9FoRu1neTKLjoKA/HfLTECe+nTqlctlpynUajW38b0quTQHaEow2kJ/ATAxqRKJhAvNXrRG2f+BgkeO51ZNYDlUCHiQmYX0WkuHWTweR6vVmimnlUVFqVRqZnBkUJBCgKct1W7pDKT9LwdkyJNYmgPULliqDcx+lhQAfiY9UwhQqDQajZlQ5WX26b9qqBDwwM3NZhUslGHzEToD2+02CoUCisUiQqGQ+/KlUikkk0kkEgl3IgbBJyCHesimncfHx+762WefncmopP9EagE0B/ia/CwpYOTId2Zq+jWdZI4A31/2MVBNYDlUCAh4egG34+Y86bvdrkszpvrPDZ5Op92XmhuCQiQoeEd8sTCIsXlmA3KzMVlHtvnijScxPz/Zgcj7WTKnwg/c5N73lE05VRNYHBUCHmQPAXYdks4+XrMfYTgcRi6XQyKRQCKRmOm4Q7t4GU3gsrUHr0/A2867Wq2iUqnMON9kFGDeVOC7fZby56Ltxbzv5X1/ZTFUCHiQ2sB5GGOQSCRmhABt20wmg0QiAWMMUqmUK0m+CDkufZ7Q8aY57zpSECi7jQqBJTivXVksFkOtVnO5BdFo1AmLi+Amp4bBbERZg7/J/obUbpj7wOKpXC43k0MgfQHnjQm/7G65ymqoEFgS7zQcZsOxkSnbllEruAhZ1sysOtnPgD0ON6UFsOaBfRWSySQymYwTAqPRaGZSDnMCKBxkxh6frwQDFQJLIlNsm80mwuGwy4QbDocubOa3LyFP/lQqhVQq5Rqd0unIpJpNJSGx9oEZkMBt5yj9HSyp5r+ZdRT8tzJ/QhYhKbuPCoEl8XbXkUJgNBqh0+lgMBj43rgMneVyOXS7XdfpGIB73UQisbF6BFkAlUql3L9xPB7PaCccmMl8CW8FoOzepAQDFQJL4jUHuDmpusu+en7gRpO18bTTQ6GQa9W9idOVzkaaA3wv2v3UUugQZeYfHaMyOUpOcFKCgQqBJZETjVqtljMPhsOhOykbjYbbPBcRj8dda26G6QDMbL5NZiFKTUBOYopGo+j1eshkMshkMi6VmLUV0Wh0JolnkyaLshlUCCwJtQBucDrLut2ua6cdj8d9b4hEIoFUKjWTXceTmabAJotjGJFgsxRqJmwFJicB12o1AHCRDJkxyAxA1QSCwzqGj4QBfArAM9ba7zTGPB/AYwAOAHwawA9Ya4MzHdIn0hwwxrhrCgOq0H6HlSSTSXS73Zl8+nQ6PdMsY5PONmOMM2V4TSHEU57dgo0xaLfb7nGumZEMFQDBYh2awI8D+BKA3PT+WwH8orX2MWPMOwA8DOBX1vA+OwVtZv5ctb0Yy5LlZORUKuWac0pBsAn4nsxJkL4JGfZj5KLb7brHWWJM4aeaQLBYdQLRcwF8B4CfA/CT04Ek3wrg+6ZPeTeAN+OKCgE5gpucd30RrITLZDJOGOTzeff4prvzyh4I8xqN8prdg2u1mgsLUhPwWxGo7BaragK/BOCnAGSn9w8AVK21nAv9NCaTiu/gMgeSbprzNs0iyAIcefpuo3Hm3QSZd8S4/KlRgWCytBvXGPOdAI6ttZ9e5u+ttY9aa19qrX3psmvYB3RjKZtm1TFk322MeSWABCY+gbcDKBhjIlNt4LkAnll9mYqibIqlNQFr7Zustc+11t4P4DUA/o+19vsBfBTAq6ZP25mBpIqizGcTWR1vxMRJ+DgmPoJ3buA9FEVZE2tJFrLWfgzAx6bXTwB4YB2vq9w5OUlR1o3mdyrKnqNCQFH2HBUCirLnaAHRjiBtf17P66jLQh1g/hj0y8grmNcDUbZD420TeFuXnfdT8Y8KgR3CO8RzXvdfFvac15B0mS6+i8LeA3KGAHsTshoSmFRWrpN5nYbv1u1Y8YcKgR2BX2BueNbtVyqVmcq+VquFZrPpRqSx5l/ORGAOv98KxkXgqc8ORL1eD4VCwVVTcuOz7+A6kRqRbHbK6k35e8U/KgR2BFmazHHotVoNsVhspnowm80ik8kgmUy6ll/JZNL1MEgkEgDgu7fhokghwF4D+Xx+potSJBJxXYnXCc0hNnZlYZWchcg5D6oN+EeFwI4gTQH5RW80GjMDT/i7TCbjuhFJlZiawSb7DlDjoAnA+YsA3Ebk+LF1Ik0jChq+D80A7Wq0OCoEdghOOu52uzDGuFl9crxXq9VCOp12zUiTyaSbg8C2YPy7TUAhkEgknAqey01aSbC1GMexrVsISFMpFouh1Wqh0+nM+APoD1FNwD8qBHYEmgNs48UvMduXc8AJW5K3Wi3X9082OGFbs01552kOJBIJp3kAcI1GEokEstks+v3+WjeitXbGV0ItSI47H4/H6HQ6WnW5ICoEdgQZFmSTUdm/kGYCx4bLScjc9JFIZONj0akJUNgAE4cdW5FRQAyHw7ULgXa77XwdctBJIpFwbd/VHFgcFQI7AtXZwWDgNric/MNJwZwORDOBm4Ab8DI6EFEIAHAbjw5KDkzZxBo4Ij4cDruW7uPxGK1WC/F4HN1uV1ubLYEKgR2Bdj2/3NL+pSMuHA670Nx4PHZhOAqAUCiEXC7nTslNwA0PwEUuOChFziVcd+GTtdaNSU8mk87+B4BWq4V2u41er6eawBKoENgRZHSAg0c40VgmAMViMXQ6HaRSKScc8vk8UqkUEomEm30w7yRexwkpIxWyIam3Hdq6tQD2N6SWVK/X3Rg0jkjzJlLJv1XOR4XADuFnLDrDhPl83jnH2OlX3igMgNn0Ym+q8TKCQTYlvWxGo5ELkXIkGgVgPB53WtG8NGyi5dmzqBAIGNJ3QCdhrVZzuQFU1akiy0xCXtOm5/WmwonrRuYnpFIpZ/q0222Mx2PE43EYY5DJZFxLdGpF541TV2GgQiCQWGtnBp3QYRYKhZBIJGamAlFVZn4/i3s4Oh1AYIQAU6c5KYlJSu12e8ahyjwFDm5hZiE/ExlBUSGgQiBwyKEg/GJLByKFADP3OCyEpyevaTdvIrV4kzAZidmKg8EAmUzGCQEOgmE2IR2GTMBiIhGnRysqBAIJNQGmz9brdQCzKbupVArNZhOZTMal9uZyORfGk5OIg4IskGKEJBQKucnJ6XTaJSvRVKrX605INptN9+/VEeq3USEQMKRPgCGxWCwGAE7N7fV6SCaTaLfbLnKQzU7mwzCJh6YB/zYIMFuRg1JZXclMyVQq5TSedrvtpkLTN0L1fzQauYQrZfUxZAUA/w3A1wOwAH4QwJcBvA/A/QC+AuDV1trKKu+jzMIv8mAwQDgcRqfTAXB7gzPBiPMLWdzDDR+JRNDr9RCPxwOlEssGJnLtTF9mZEBOhJa+AlYeMqdCmbCqJvB2AP/bWvsqY0wMQArATwP4iLX2LcaYRwA8gkkbcmUNMLzF+n06tnq9nsvfbzabTmWm2TAcDhEOh51DkHZ0kISAVxNg8lQkEnG9DXK5HJrNpssiBODMJX4Og8FABYFgaSFgjMkD+GYArwOA6fjxvjHmIQDfMn3auzFpRa5CYI2w2lB2IZKjxOlBZw5/p9PBaDRCLBZzHnb2JAiSEODaec2x6clk0oX/+v3+TOMVVlay9oBVmjo49TaraALPB3AC4FeNMd8A4NOYjCm/Ya09mj7nJoAb8/74Kg0kvUx48jPGzUiBHIvOGz3o3DA8HVOplNs0m0ov3gQ8uWkWUOh5x6fT3zEYDNBsNp0glMlEWmNwm1WEQATASwD8qLX2k8aYt2Oi+justdYYMzcQa619FMCjAHDec5T5sNpQ4v1Ch0Ih96Wn4yyXyzkzgTFz+hGCsCEYzZAFTPPi/K1Wy2k5rVbL+UhYgq0j1GdZRQg8DeBpa+0np/d/CxMhcMsYc6+19sgYcy+A41UXqVyMdzPIgiQ2Ka3X6y7HniE2hsrkmHGelN4x5LuYVDRvI9McosDLZrPodrsolUruc+r3+0gmk2i1WjNlybyeNyb+qrL0/6q19qYx5iljzAuttV8G8CCAL05vrwXwFuhA0q1BByJTZmVSUTweR71ed6chexXSs86TUl4DCMzpKXsexGIxlyeRTqed9tPpdFyGJess2N2ZDtPBYLD25ii7yKqi/UcBvHcaGXgCwOsxGWjyfmPMwwCeBPDqFd9DWRIpBNixiGnD1Wp1xp9AW1lmFbJ/YSwWQygUCpTZIEOJMllK5lnQT8LsQkZSWKJtjHGaQJAcqIuykhCw1n4WwEvn/OrBVV5XWQ/0HdAcqNVqM1/odruNZrOJdrvtKvEymYxLukmn004VNsa4TsZBgIItlUq5jTwcDl14MR6Po9FozHwGMrOQacY0D9bdPn2X2D0jT1kL0hygY4wqMADXhajX62E4HLrNT7U4lUq5vgH0wgdFLZ6XVMSNLH0bLMPmAJV2uz2Te8HciiBoP6ugQuAKIzMLqeoCcDn3dBrKSUeyBp9zDKLRaKDUYSkEGB2hJkDBRn8A04f5fFl+TKFx1ZOKVAhccWjb8jSjzcsMw3g8jmq1ikKhgGw2O+MUo9MwHA67XP0g+AXoGKQ5IGckZjIZ9Pt9FzHodDquqIozHhgJGAwGTmBcZVQIXGHoE+Cm5elODYAnJUty+/3+zPiyRCLh2netu3vwJpEt0FhXwDBnMplEv993uRKyvTsbmHY6HQyHQ1d1qEJACSyyXZmsOZCdhWQVorUWyWQSiUTCpRbLkFlQkBmFsoMSswtl+I8jzJiGzUQjakrqE1ACj7RvOZjDe+MJ2Ov1nOocCoWQTCZRKBQCrQmwi5K336DsySA1ARZgscpyHwqNVAjsAd7GmvJkYyycqjFDZtFoFOl0GtVqdSbcxpwB2dX3vOtt4h3NPk+A8d/ORCnZg1GaRdv+t2waFQJ7yLzOuzJjjg4zCgTZ3JOmgrdpKe8Dd27AXWDeemTX5HkaEp9z1VEhoACYHYbKjc823rLakI/JpqWcgMQ0XWoCuyYIlPmoEFAAwKUXy1mHtIdlKi0dh+znx2tm4YXD4ZnpwMruo0JAAQCXJCMdhNQIqAkYY5BKpVyZrpyWxE3PeoMgsQ8q/91QIaDMLTSSG5n9CgeDgavGkxV3dAay5XfQhMC+o0JAAYCZqUYAXFUdQ2X1eh2VSgWZTAa5XM75D9jxmGE59ilQQRAcVAgoADCjCTB0xqgBu/dykAer8bj5pU8gaC3LAARuvetGhYACYNYn4B1zxjBgNBpFr9fDaDRCIpFwJkAul3M9/GgOqCYQHFQIKAAw0zyDtfTS4cf7uVwOrVbL2f/j8XgmUsApSEESAuoYVBTMdjE+b3y5MQbRaBSdTsfd2OOfDTybzSYajYZrbuoXr7CZl7yziZAjX1f2JWQ3Ig495XBTCjfvzft40FAhoDhkfv15MIQoy3BlqnGj0XDNSfw2JmU+wrzmpjLysKm8A1lsxKrKZDKJdDqNTCbjsillm3Zvm3PeVAgoVx76DphUFI1GUalUYIxxLbg4Et2PEKB2wWo/Zh/KU5m/XzfUOrjx0+k0crkcer2eqx601iKbzbqKSjYlZfIUm7jK66ChQkBZCOYUsOCIU5FlpqC11o1IvwgKgWQy6fr/MTWZCUpy3sC6kW3I2FeRHZj4b+V65AQjliDL53K9QdMGVh1I+m8B/CtMhpF+DpNuw/cCeAzAASZTiX5gOqJMuQLIxKJut4toNIp2u+3mHPIEZQrxRbCBqRyQStta9jjclCZAfwBrIGgGALdzJ+gviMfj6Ha7M+FR4HZ41TsQJiisMovwOQB+DMCLrLUdY8z7AbwGwCsB/KK19jFjzDsAPAzgV9ayWmXr0Bxg6BCY1OHzsX6/70aB+zEHmGDERqdUt9nvkEJgU6crNZFEIoHRaIRMJnPH6PZ2u41Wq4Vqteq6EcvqSQqLoHYkXtUciABIGmMGmEwkPgLwrQC+b/r7dwN4M1QIXBkoBJgpyBN7MBi4tt2sOfDjyAuHw8hkMsjn88hkMi5xh9EF9jCgB36dzkGpCXCCMQDXhJRRAqr/zJxk63VqA9QO+v1gKryrTCB6xhjzNgB/C6AD4EOYqP9Vay31oqcBPGfe3+tA0mAih3fIWnz6CZhgFI1GfW1YChC+Ltt/sx8gHW6byuqT04rG4zGSySQAzDQaoR9AFkuxS/NwOEQ8Hp+JYOyNT8AYUwTwECbTiasAfhPAK/z+vQ4kDSZMJWbcXPbqo0efNrOfJJxQKOQcbmx0SjOCyUebDL3RHKA/g/6B4XCIwWDgOjDT68/IBdOnKQTkuLagsYo58I8B/I219gQAjDG/A+AbARSMMZGpNvBcAM+svkxlV6A5wNTiea3G6Bz0qwmUSiUnBGRjEnrqOUVo3cj5BKFQCOPx2PkGvDkA/LdSCFATGI1GqNfr7vEgssqq/xbAy40xKUzMgQcBfArARwG8CpMIgQ4kDRAyM/Bu196bDI8Btwty/AoBliTz9JWbcNNZeBRcFAjWWkQikZkMQAqCdDqNdrvt/APUeihEgtpEZRWfwCeNMb8F4DMAhgD+DBP1/n8BeMwY8++nj71zHQtVNossB5b2rTzVN9F+W6YKz/vdZcB/23lQEHBICROapADYSyEAANbanwXws56HnwDwwCqvq1w+sje/d0y5nOCz7mKbUCiEQqGAw8ND5PN5XLt2DYVCAblczjU2DbK9HQSCacQoa0fO7mOMnJWBbDrK8uF1v28+n3cbP5vN3tGzcB8GgGwTFQIKgNn5fYyRp1IpN6acmXSbEAKZTMbNQszlci5xSLWAy0GFgAIATgtIJpPIZrNIJBLI5XJOLU+n08jn82vfkN6MwXw+j1wu54QOpyL7TT5SFkeFgDITKmOWXCaTQTabRaFQQDqdRjabRalUWrtqztoBbvhMJoNMJuPMD6YNqwDYHCoEFAC3a+qpDSSTSScIstks8vk8Dg4O1n4im+n4c1YRShOEswBVCGwWFQIKAMz4A1hXXywWce3aNeTzeZRKJdxzzz1rT4iRg0OpidAPQK1EjjjbJYLaSciLCgEFwGxdPW30bDaLYrGIQqGAa9euOSGwbk1AdhDyzjncdU1gV9e1CCoErgDeKcOLws3H1uLe9lp0EBYKBd+FQYu+v+wtKDP4Lnuuofdkl30E5U32Fgw6KgQCiJyY623OKSv7/IbzmKfP05+bnvH7QqGAYrGIUqm0ESGwbeSGntdMVLZU48AV9j2QfQeDKhBUCAQQ2R3XOxrc+9Pv6x0eHqJYLOLg4MAl7tBLz+Shq7b5gdsFUdzMrGGQhUOsabh16xbK5TLK5TJqtRoajQZarZZrprKJIqfLQIVAwPC2xKIKT0EgG3X6deKxki+fz7ummjJZh/nxVw1ZIMTOQOymzHJpFjf1+31UKpWZzU/NgFpBUCcZqRAIIKyBZxiNGzYajbqUW/7eD0zdzWQyODg4cNdeQXAVNQHZH4Gt0WQTUV53Oh2cnJygXq+jXq+jWq2iXq+j1Wo5IaCagHIp0NaXjTFlvj1Pcibb+CEUCrnXKBQKyOfzSCaTM0U8NAeukiCQXZK63S663S4ajQYajQY6nQ5arZa7brfbKJfL7rFqtYpGo4Fms+maogR1IrMKgQAizQGZ48/U3mw26+77JZ1OI5VKIZfLOYEitYBdDtOtAjUBdg/iVKVOp+M2OxuN1mo1tNttd5/mgOyBEERUCAQMmeLLUl8m92QyGWfbM9XXL9z0FAaJRMJl7l3VIh76BNjVqNvtumlKzWYTtVoNZ2dn7vRvNpszGgOv2VtRzQHlUpBCgLn2uVwOpVIJuVwO165dw+HhoavM8/ua7CMgowEyYy+orbMugpoA1f96vY5KpYJ6vY6zszMcHx87H0Cn03GTm+kQpCmhQkC5dOTUHKr+3PjFYtEJhkVeT0YcvI1ErrI5wOhAr9dzmkCj0UCtVkOlUkG1WkW1Wp3JDZDXjCzoLELlUvCaA1TbqREUCgWUSiUUCgUcHBz4ft15A0Hpe+DjVw2ZCMTQIH0C9XrdmQPVahXlcnmm9yFDgrIZqfoElHPx28DTD9z43gw/ZvQdHh7i+vXrKBaLODw8XHqtQUdOWD7vmhufpz83fqVSQblcxunpKU5OTlAul3F2djbzulcJFQIbRjbulE0pvdd+BQF9ATztWeqbSqVcOe6+1+B7uwTz1JZjxUejkZuXQJW/Xq+7kKBMDw6qmu+XC4WAMeZdAL4TwLG19uunj5UAvA/A/QC+AuDV1tqKmXzr3o7JPMI2gNdZaz+zmaUHAxnXl1NtZFbfIsM6KASY2suQnnToUZ3fZ6iyy7RfacfL5CA6AmUmID3+VPmvMn40gf8O4JcBvEc89giAj1hr32KMeWR6/40Avh3AC6a3l2Eyg/Bl61xw0JAluszkox3PttXpdNr3xg2Hw0gkEq7On34AKQyuapqvX3ja86SX9n6v13MmADMCqfIzL6Ber6PZbLr04b0XAtbajxtj7vc8/BCAb5levxvAxzARAg8BeI+d6E6fMMYUjDH3WmuP1rbigEGnmszwSyQSro9fKpVCNpv13c6bDsFSqeRMAZkxeJVz/f0iHX6M5cskoG63i3q9PpMJWKvVXIiQAoJThvZeCJzDDbGxbwK4Mb1+DoCnxPM4kPQOIbAvA0nlwEvG3ZnII7P0/I6xomnB3n+8UbioOTCBvgCv158Zf9z07XbbpQDzd95MwKvsDwDW4Bi01tplBoruy0BSlvXKjj2M4TPPv1QquX56F0HzIpvNzvQCZJYfX2dfnYIAnB1Pc0AmATEH4OzszAmFZrOJVquFXq+HVqs1kxkY5NCfX5YVAreo5htj7gVwPH38GQDPE8/b+4GkMs9fCgG27crlcrh+/bo7xf28Xjgcnknp5aAODg5hrH+fkeYA035p99dqNZcJ6D35aSJQi6Bz8CqzrBD4ACbDRt+C2aGjHwDwI8aYxzBxCNb22R8AYCYMKFt3yS6+xWIRqVTKtxAIhUJu0/N1GX3QHv3nJwG12+2ZTEAWBMlOQbKfAMODey8EjDG/gYkT8NAY8zQmswffAuD9xpiHATwJ4NXTp38Qk/Dg45iECF+/gTUHCp7ccooP03tLpZLr4suTfZHX9DbkpL/gqmb4+UWWCFMASJOANQHlchmdTsed9vQjcOMznLj3QsBa+73n/OrBOc+1AH541UXtGqucqswDkA5BZvex2Oe+++5z4b1l1nXez33B67iTuQA0BWq1msv8Ozk5wbPPPotKpYJWqzXzGuf9vMpoxqAHb5db78huZvb53WhU+zOZjIsGyBHXVOX9Rgf2GW/zT9kcVF5T7WfyD6MAnU5nxv5nItG+o986D1LF9jbwZPhtkZRcJvTIHn489VmnH+TZ9peJTAWWDUG9jUKlI7BaraJWq7kcAZkJuA+nvB9UCAjoyffW0zOzj9fxeNx3HJ6bnsU9bOMtY/v7HtP3C+109v9jMxBmBbK2n01ByuXyTD8AmQgU1FZgm0CFgAd68qUdT6ceY/GJRMK3440mgGzjLcN7dOipJnAx1ASkIOj1eu6UZy0AuwBJLYCtwWUXIBUCE1QIeJDZfWzWEY/HnW1Pe96v/U7BIU0B+gVobqgQ8If04NPrz16A3OT0BbTbbVcaTMEgW4SrOXAbFQICaQ4wls/Tm3Y9Q3x+/QJ0/jE9WGb5MddfzQF/yM7AtPtbrRbOzs7cxqfazzRh9gJkZiA1hn0I/flFhYBAxt+lIMhmsy6un8lkkM/nfSfkSH8CtQqaFKz9V03AH7IVmNzYsgU4i4FYNMS5AHKGgGoCs6gQ8MAR3dy43tZdiw7mZETB62SUU4OuWj//TSFbe8ksQNkViBEBlgxz01MQyKiCCoEJKgQ8UAhQC+BATo7mZqJPLBbztXGZvSdnB3rDjWoO+MNaO9MQlBoAMwCr1SqOjo5cU9B54UOGGFUI3EaFgIA+AZ7UrPVnfv/h4aG7xePxhU5vb7KR91q5GDoGWegjpwExG/D4+BiVSgX9fv+O5KJ595UrIATmNexcpoEncLt1F513DAvyxvAebftlVfh9Uv3nNfr0Pub3dVjeS/ue5oC8MUNwMBjc8ffKfAItBKhOy5u3qGaRQppIJOLy+WkGZDKZGU++Nu3wh8zu8xbjyEKdRdRyFgDJ8eBMDKLXn++nm94/gRUCsoGnLKVljz3pgPN78kYiEVy/fh0HBwd39O5jXH+fq/MWhX3+2OSTmXpyag/Vdj9wNBjVfxkODPoUoG0SeCFARx4FAVX1WCzmEnX8ntrhcHgms2/eeG4N5/lDlvN6G30ys0+29Pbzeuz6Qw1AjhGnpiFNDcUfgRUCwGx2H/PwmZDDOX3s5Ov39TiaWw75pCC4ytN518288V4ya4/JPdzAfl5PJgnJrsBSE9AEoMUJrBCQ5gAz8NLpNIrFotu4HM7hN8U3FArNqP/eVOGrOp13E1ATkE0+2dyz2Wy6Wn7a8n6gQ5Apwfx79gSkQFFNYDECKwQAzG3bxcYdmUwGxWIR+XzeV9suvh5z+xki1GKf5ZHdfXiKezP8qA34gX4FahLUAGQSkGoCixNYISAHc8rWXUzwyWazODg4cIk9fl9TpvRSsNDkUHPAP9IcYLmvN7OPtn232/X1mrIHIIWHLCumEFBNYDECLQQYBqQ5kMvlcHBw4EJ8N27cwLVr13z37gMwMyqMUQb6HrSJp38oBOTAT474LpfLODo6Qrlcdt59Pye4tXZmHPi80eAqBBYnsEIAuNMcYEIPG3ewkcciDTwZcaCQYU6AHNutQsAfUhNgMg9NATb8ZKmvH1g7IIeLeq/VHFicZQeS/jyA7wLQB/DXAF5vra1Of/cmAA8DGAH4MWvtH/hZyKLJN9QCGB6UQoAlwIVCAcVicaEGnheND98XAeA9TRc9XeUEINrt9OxTK6BJ4FcIzMs0PG/0uOKfZQeSfhjAm6y1Q2PMWwG8CcAbjTEvAvAaAH8PwH0A/tAY83XW2rsGgiORCEql0kKtsjmK6/Dw0Nn+MsYv23ctYg7sK/JU9Y7w9l77od/vo1wu4/T0FOVy2fX5pxCgTU+vvrI9lhpIaq39kLj7CQCvml4/BOAxa20PwN8YYx4H8ACAP7nbe4TDYRweHjo73A/RaNQ1+mBsP5vNznjz1ZPvH25yZvjNG+fNOnw/9Pt9l+I7L6bPtGFl+6zDJ/CDAN43vX4OJkKBcCDpHciBpLFYDMVi0Tnf/BCJRJDL5Vw3X5nUI517KgT8wfx+VujJPn5M8+31er6FQK/Xcw0+2QGITT7Vk79brCQEjDE/A2AI4L2L/q0cSFooFOy99967kOoeiURcy66Dg4M7+vszw0+FgD8Y06cDj4M72LuPIT6/QmAwGKDRaLghoLVazWkDrPLTmv7dYGkhYIx5HSYOwwft7f/JpQaSsnqPDTh9vDfC4bAr9WW1H8d0y57+ij9kA0/m5DcaDdemm9d+hcBwOESr1XKOP1b7sd2XDOkp22UpIWCMeQWAnwLwj6y10rX7AQC/boz5BUwcgy8A8H8vej3m7NO77+f0lvP9crnczGhuWfKrmsDFyAGe3vl9VOVZvef39B6NRk6QMDJArULNgd1i2YGkbwIQB/Dh6Sb7hLX2X1trv2CMeT+AL2JiJvzwRZEBYKIJ3HPPPS7hxw+hUMiFB1k5SKGgjsHFOa9jDzv5VqtVnJ6e+s7Np5NRRgHa7bbL+GP1oAqB7bPsQNJ33uX5Pwfg5xZZRDQaxT333IN8Po98Pu9r47IVmEzpZeIQ8/z9NgNV5rfz5sZnmu/R0ZFvIWCtvSPSwMEfjDRosc9usBMZg+FwGMVi0SX3+EFm98mmnbKZp/oE/OGd8SfbddMXwHCf34o/OS1Ivjav1RzYHXZGCBweHqJUKqFUKq2tgec+ZfitAjesbP7hNQlOT09x8+ZNDAYD3xuXTj9vo09v009lu+yEEAiFQs7Tn06ntYGnD7z58ky8kY/59byPx2OcnZ2hUqm4G+f41ev1O0J7y25c3fC7yU4IAQAzJ7lyd7zNOr1qNlVtPx17+Hpy089r3SXHeetmvlrsjBBQ/MFNKEtoZdiNnnf+3u/rsayXGX4UBHxtTfG9uqgQCBi0qVmdx8w+ntqyZZffZh3WWtfgg2aBHOZJr75qAFcTFQIBhJoANzqHbnjHdHc6Hd+vJ5OC2PaLmoCaAlcbFQIBRJoDTPNlgQ7tem5iv69HIVKv12f698kUXxUAVxMVAgFDmgMM5dVqNafOn56eolqtun5+fmHjD57+FC5yPoByNVEhEECkJiAz/Or1uhvRxdCeH5gtKEuGh8PhjD9AHYNXFxUCAYOagLeTrxQEbOJRqVR8vyZDi71ez4UaKRg0u+9qo0IgYNA5x1ObgzjYwuv4+BhHR0euh59fmFzE7EG+D69VCFxddkIIjMdjtNttVwm4T5l/i8KIAE/9RqPhPPsM67F2v9Fo+H7deQ085/1Urh47IwSazeZCPQb3Fdrq3PiMCshpPP1+33n1FeUidmLHDYdD3Lp1C51Ox3dse1+h3X58fOyaePL0l117tGOP4pedEAKDwQA3b950aqxyPkwSOj4+dhEA71w/ncmnLMJOCIHRaIRKpeLUWPUJnA+99rKHPzMEZThPhYDil50RAicnJ865pZwPE4Wq1apL5qEGxZoBFQLKIuyEEBgMBjg6OnKtwZTzYdiOxT3dbtcJAE7p1eQeZRF2QgiMRiOcnZ2pEPABE3t46rMTEFN7Nc9fWZSlBpKK370BwNsAXLPWnpqJMf92AK8E0AbwOmvtZy56j+FwiHK57CYHKedDTYDFQ3QUMrtPzQFlUZYdSApjzPMAfBuAvxUPfzsmswZeAOBlAH5l+vOuDAYDPPvss65foHI+POG9w0LnjelWFD8sNZB0yi9iMoDk98RjDwF4z3Qi0SeMMQVjzL3W2qO7vQeThYD96hO4Ct5mnd6bovhl2QlEDwF4xlr7555N+xwAT4n7HEh6hxCQA0kBaJKQomyJhYWAMSYF4KcxMQWWRg4kNcbo0aUoW2IZTeBrATwfALWA5wL4jDHmASw5kFRRlO2xsBfOWvs5a+11a+391tr7MVH5X2KtvYnJQNJ/aSa8HEDtIn+Aoijb5UIhMB1I+icAXmiMedoY8/Bdnv5BAE8AeBzAfwXwb9aySkVRNobZBU+y+gQU5VL4tLX2pd4HNSivKHuOCgFF2XNUCCjKnqNCQFH2HBUCirLnqBBQlD1HhYCi7DkqBBRlz9mVDh6nAFrTn7vCIXQ9F7Fra9L13J2vnvfgTmQMAoAx5lPzspm2ha7nYnZtTbqe5VBzQFH2HBUCirLn7JIQeHTbC/Cg67mYXVuTrmcJdsYnoCjKdtglTUBRlC2gQkBR9pytCwFjzCuMMV82xjxujHlkS2t4njHmo8aYLxpjvmCM+fHp4282xjxjjPns9PbKS1zTV4wxn5u+76emj5WMMR82xvzV9GfxktbyQvEZfNYYUzfG/MRlfz7GmHcZY46NMZ8Xj839TKYt7v7T9Hv1F8aYl1zSen7eGPP/pu/5u8aYwvTx+40xHfFZvWPd61mau/Wu3/QNQBjAXwP4GgAxAH8O4EVbWMe9mPRJBIAsgL8E8CIAbwbw77b02XwFwKHnsf8A4JHp9SMA3rql/7ObmCSeXOrnA+CbAbwEwOcv+kwwmYL1+wAMgJcD+OQlrefbAESm128V67lfPm+XbtvWBB4A8Li19glrbR/AY5gMMLlUrLVHdjouzVrbAPAlTOYl7BoPAXj39PrdAP7pFtbwIIC/ttY+edlvbK39OICy5+HzPhM3CMda+wkABWPMvZtej7X2Q9ba4fTuJzDpuL3TbFsInDesZGtMpy29GMAnpw/9yFS1e9dlqd9TLIAPGWM+PR3UAgA37O3uzTcB3LjE9ZDXAPgNcX9bnw857zPZhe/WD2KijZDnG2P+zBjzR8aYf3jJazmXbQuBncIYkwHw2wB+wlpbx2SW4tcC+AeYTFH6j5e4nG+y1r4Ek/mOP2yM+Wb5SzvRMS81vmuMiQH4bgC/OX1om5/PHWzjMzkPY8zPABgCeO/0oSMAX2WtfTGAnwTw68aY3LbWJ9m2ENiZYSXGmCgmAuC91trfAQBr7S1r7chaO8akhfoDl7Uea+0z05/HAH53+t63qNJOfx5f1nqmfDuAz1hrb03XtrXPR3DeZ7K175Yx5nWYTPL+/qlggrW2Z609m15/GhNf2NddxnouYttC4E8BvMAY8/zpKfMaTAaYXCrTkervBPAla+0viMelDfk9AD7v/dsNrSdtjMnyGhNn0+cx+WxeO33aazE7DPYy+F4IU2Bbn4+H8z6TrQzCMca8ApNBvd9trW2Lx68ZY8LT66/BZHL3E5tejy+27ZnExIv7l5hIxp/Z0hq+CRM18i8AfHZ6eyWAXwPwuenjHwBw7yWt52swiZT8OYAv8HMBcADgIwD+CsAfAihd4meUBnAGIC8eu9TPBxMBdARggImN//B5nwkmUYH/PP1efQ7ASy9pPY9j4ovg9+gd0+f+s+n/5WcBfAbAd1329/y8m6YNK8qes21zQFGULaNCQFH2HBUCirLnqBBQlD1HhYCi7DkqBBRlz1EhoCh7zv8HNxhUu4AHnrgAAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "mnist_dataset = mnist_dataset.create_dict_iterator()\n", - "data = next(mnist_dataset)\n", - "plt.imshow(data['image'].asnumpy().squeeze(), cmap=plt.cm.gray)\n", - "plt.title(data['label'].asnumpy(), fontsize=20)\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "想要了解更多可以参考编程指南中[数据增强](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/augmentation.html)章节。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/source_zh_cn/images/introduction1.jpg b/tutorials/source_zh_cn/images/introduction1.jpg deleted file mode 100644 index 7df00bba3bfbcb54e273b25a7726342da94f6369..0000000000000000000000000000000000000000 Binary files a/tutorials/source_zh_cn/images/introduction1.jpg and /dev/null differ diff --git a/tutorials/source_zh_cn/images/introduction2.png b/tutorials/source_zh_cn/images/introduction2.png deleted file mode 100644 index 7b0a0228323b226cc956a3db369f083c91b5e23b..0000000000000000000000000000000000000000 Binary files a/tutorials/source_zh_cn/images/introduction2.png and /dev/null differ diff --git a/tutorials/source_zh_cn/images/introduction3.png b/tutorials/source_zh_cn/images/introduction3.png deleted file mode 100644 index a66da13137d7ed238c6ead4ffae8228dc72f2543..0000000000000000000000000000000000000000 Binary files a/tutorials/source_zh_cn/images/introduction3.png and /dev/null differ diff --git a/tutorials/source_zh_cn/index.rst b/tutorials/source_zh_cn/index.rst deleted file mode 100644 index 8e36304e5c732734315e23dc7925cee4c43a3892..0000000000000000000000000000000000000000 --- a/tutorials/source_zh_cn/index.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 11:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -快速入门MindSpore -=================== - -.. toctree:: - :maxdepth: 1 - - introduction - quick_start - tensor - dataset - model - autograd - optimization - save_load_model - inference diff --git a/tutorials/source_zh_cn/inference.md b/tutorials/source_zh_cn/inference.md deleted file mode 100644 index c49f58f8f5910a6dbc375b3f5923a93c9bfb3bd6..0000000000000000000000000000000000000000 --- a/tutorials/source_zh_cn/inference.md +++ /dev/null @@ -1,410 +0,0 @@ -# 推理 - - - -本节是初级教程的最后一节,为了更好地适配不同推理设备,因此推理分为 1)昇腾AI处理器推理和 2)移动设备推理。 - -## 昇腾AI处理器推理 - -昇腾(Ascend)AI处理器是面向边缘场景的高能效高集成度AI处理器。可以实现图像、视频等多种数据分析与推理计算,可广泛用于智能监控、机器人、无人机、视频服务器等场景。本节我们来学习如何在昇腾AI处理器上使用MindSpore执行推理。 - -### 推理代码介绍 - -首先创建目录放置推理代码工程,例如`/home/HwHiAiUser/mindspore_sample/ascend910_resnet50_preprocess_sample`,可以从官网示例下载[样例代码](https://gitee.com/mindspore/docs/tree/master/tutorials/tutorial_code/ascend910_resnet50_preprocess_sample),`model`目录用于存放上述导出的`MindIR`模型文件,`test_data`目录用于存放待分类的图片,推理代码工程目录结构如下: - -```text -└─ascend910_resnet50_preprocess_sample - ├── CMakeLists.txt // 构建脚本 - ├── README.md // 使用说明 - ├── main.cc // 主函数 - ├── model - │ └── resnet50_imagenet.mindir // MindIR模型文件 - └── test_data - ├── ILSVRC2012_val_00002138.JPEG // 输入样本图片1 - ├── ILSVRC2012_val_00003014.JPEG // 输入样本图片2 - ├── ... // 输入样本图片n -``` - -引用`mindspore`和`mindspore::dataset`的名字空间。 - -```c++ -namespace ms = mindspore; -namespace ds = mindspore::dataset; -``` - -初始化环境,指定推理使用的硬件平台,设置DeviceID。 - -这里设置硬件为Ascend 910,DeviceID为0,示例代码如下: - -```c++ -auto context = std::make_shared(); -auto ascend910_info = std::make_shared(); -ascend910_info->SetDeviceID(0); -context->MutableDeviceInfo().push_back(ascend910_info); -``` - -加载模型文件: - -```c++ -// 加载MindIR模型 -ms::Graph graph; -ms::Status ret = ms::Serialization::Load(resnet_file, ms::ModelType::kMindIR, &graph); -// 用图构建模型 -ms::Model resnet50; -ret = resnet50.Build(ms::GraphCell(graph), context); -``` - -获取模型所需的输入信息: - -```c++ -std::vector model_inputs = resnet50.GetInputs(); -``` - -加载图片文件: - -```c++ -// ReadFile是读取图像的函数 -ms::MSTensor ReadFile(const std::string &file); -auto image = ReadFile(image_file); -``` - -图片预处理: - -```c++ -// 使用MindData提供的CPU算子进行图片预处理 - -// 创建算子,该算子将输入编码成RGB格式 -std::shared_ptr decode(new ds::vision::Decode()); -// 创建算子,该算子把图片缩放到指定大小 -std::shared_ptr resize(new ds::vision::Resize({256})); -// 创建算子,该算子归一化输入 -std::shared_ptr normalize(new ds::vision::Normalize( - {0.485 * 255, 0.456 * 255, 0.406 * 255}, {0.229 * 255, 0.224 * 255, 0.225 * 255})); -// 创建算子,该算子执行中心抠图 -std::shared_ptr center_crop(new ds::vision::CenterCrop({224, 224})); -// 创建算子,该算子将shape (H, W, C)变换成shape (C, H, W) -std::shared_ptr hwc2chw(new ds::vision::HWC2CHW()); - -// 定义一个MindData数据预处理函数,按顺序包含上述算子 -ds::Execute preprocessor({decode, resize, normalize, center_crop, hwc2chw}); - -// 调用数据预处理函数获取处理后的图像 -ret = preprocessor(image, &image); -``` - -执行推理: - -```c++ -// 创建输出vector -std::vector outputs; -// 创建输入vector -std::vector inputs; -inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), - image.Data().get(), image.DataSize()); -// 调用Model的Predict函数进行推理 -ret = resnet50.Predict(inputs, &outputs); -``` - -获取推理结果: - -```c++ -// 输出概率最大值 -std::cout << "Image: " << image_file << " infer result: " << GetMax(outputs[0]) << std::endl; -``` - -### 构建脚本 - -为编译器添加头文件搜索路径: - -```cmake -option(MINDSPORE_PATH "mindspore install path" "") -include_directories(${MINDSPORE_PATH}) -include_directories(${MINDSPORE_PATH}/include) -``` - -在MindSpore中查找所需动态库: - -```cmake -find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) -file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) -``` - -使用指定的源文件生成目标可执行文件,并为目标文件链接MindSpore库: - -```cmake -add_executable(resnet50_sample main.cc) -target_link_libraries(resnet50_sample ${MS_LIB} ${MD_LIB}) -``` - ->详细样例请参考: -> - -### 编译推理代码 - -接下来编译推理的代码,首先要进入工程目录`ascend910_resnet50_preprocess_sample`,设置如下环境变量: - -> 如果是Ascend 310设备,则进入工程目录`ascend310_resnet50_preprocess_sample`,以下代码均用Ascend 910为例。 - -```bash -# 控制log的打印级别. 0-DEBUG, 1-INFO, 2-WARNING, 3-ERROR, 默认是WARNING级别. -export GLOG_v=2 - -# 选择Conda环境 -LOCAL_ASCEND=/usr/local/Ascend # 运行包的根目录 - -# 运行包依赖的lib库 -export LD_LIBRARY_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/lib64:${LOCAL_ASCEND}/driver/lib64/common:${LOCAL_ASCEND}/driver/lib64/driver:${LOCAL_ASCEND}/opp/op_impl/built-in/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} - -# MindSpore依赖的lib库 -export LD_LIBRARY_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore/lib"}' | xargs realpath`:${LD_LIBRARY_PATH} - -# 配置必要的环境变量 -export TBE_IMPL_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe # TBE算子的路径 -export ASCEND_OPP_PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/opp # OPP路径 -export PATH=${LOCAL_ASCEND}/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin/:${PATH} # TBE算子编译工具的路径 -export PYTHONPATH=${TBE_IMPL_PATH}:${PYTHONPATH} # TBE依赖的Python库 -``` - -执行`cmake`命令,其中`pip3`需要按照实际情况修改: - -```bash -cmake . -DMINDSPORE_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath` -``` - -再执行`make`命令编译即可。 - -```bash -make -``` - -编译完成后,在`ascend910_resnet50_preprocess_sample`下会生成可执行`main`文件。 - -### 执行推理并查看结果 - -以上操作完成之后,我们可以开始学习如何执行推理。 - -首先,登录Ascend 910环境,创建`model`目录放置MindIR文件`resnet50_imagenet.mindir`,例如`/home/HwHiAiUser/mindspore_sample/ascend910_resnet50_preprocess_sample/model`。 -创建`test_data`目录放置图片,例如`/home/HwHiAiUser/mindspore_sample/ascend910_resnet50_preprocess_sample/test_data`。 -就可以开始执行推理了: - -```bash -./resnet50_sample -``` - -执行后,会对`test_data`目录下放置的所有图片进行推理,比如放置了2张[ImageNet2012](http://image-net.org/download-images)验证集中label为0的图片,可以看到推理结果如下。 - -```text -Image: ./test_data/ILSVRC2012_val_00002138.JPEG infer result: 0 -Image: ./test_data/ILSVRC2012_val_00003014.JPEG infer result: 0 -``` - -## 移动设备推理 - -MindSpore Lite是端边云全场景AI框架MindSpore的端侧部分,可以在手机等移动设备上实现智能应用。MindSpore Lite提供高性能推理引擎和超轻量的解决方案,支持iOS、Android等手机操作系统以及LiteOS嵌入式操作系统,支持手机、大屏、平板、IoT等各种智能设备,支持MindSpore/TensorFlow Lite/Caffe/Onnx模型的应用。 - -在本环节中,提供了运行在Windows和Linux操作系统下,基于C++ API编写的Demo,帮助用户熟悉端侧推理流程。Demo通过随机生成的数据作为输入数据,执行MobileNetV2模型的推理,直接在电脑中打印获得输出数据。 - -> 运行在手机中的完整实例可以参考官网示例:[基于JNI接口的Android应用开发]( https://www.mindspore.cn/tutorial/lite/zh-CN/master/quick_start/quick_start.html)。 - -### 模型转换 - -模型在用于端侧推理之前需要先进行格式的转换。当前,MindSpore Lite支持MindSpore、TensorFlow Lite、Caffe和ONNX 4类AI框架。 - -下面以MindSpore训练得到的[mobilenetv2.mindir](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.mindir)模型为例,说明Demo中所使用的`mobilenetv2.ms`模型是如何生成的。 - -> 本小节展开说明了转换的操作过程,仅实现Demo运行可跳过本小节。 -> -> 本小节仅针对Demo所用模型,详细的转换工具使用说明请参考官网[推理模型转换](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/converter_tool.html#)章节。 - -- 转换工具下载 - - 根据所使用的操作系统,下载转换工具的[压缩包](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html)并解压至本地目录,获得`converter`工具,并配置环境变量。 - -- 转换工具使用 - - - Linux使用说明 - - 进入`converter_lite`可执行文件所在的目录,将下载的`mobilenetv2.mindir`模型放入同一路径下,在电脑终端中输入命令完成转换: - - ```cpp - ./converter_lite --fmk=MINDIR --modelFile=mobilenetv2.mindir --outputFile=mobilenetv2 - ``` - - - Windows使用说明 - - 进入`converter_lite`可执行文件所在的目录,将下载的`mobilenetv2.mindir`模型放入同一路径下,在电脑终端中输入命令完成转换: - - ```cpp - call converter_lite --fmk=MINDIR --modelFile=mobilenetv2.mindir --outputFile=mobilenetv2 - ``` - - - 参数说明 - - 在执行命令的过程中设置了三个参数,`--fmk`代表输入模型的原始格式,这里设置为`MINDIR`,即MindSpore框架训练模型的导出格式;`--modelFile`指输入模型的路径;`--outputFile`设定了模型的输出路径,这里自动将转换后的模型添加了`.ms`后缀。 - -### 构建环境与运行 - -#### Linux系统构建与运行 - -- 编译构建 - - 在`mindspore/lite/examples/quick_start_cpp`目录下执行build脚本,将能够自动下载相关文件并编译Demo。 - - ```bash - bash build.sh - ``` - -- 执行推理 - - 编译构建后,进入`mindspore/lite/examples/quick_start_cpp/build`目录,并执行以下命令,体验MindSpore Lite推理MobileNetV2模型。 - - ```bash - ./mindspore_quick_start_cpp ../model/mobilenetv2.ms - ``` - - 执行完成后将能得到如下结果,打印输出Tensor的名称、输出Tensor的大小,输出Tensor的数量以及前50个数据: - - ```shell - tensor name is:Default/head-MobileNetV2Head/Softmax-op204 tensor size is:4000 tensor elements num is:1000 - output data is:5.26823e-05 0.00049752 0.000296722 0.000377607 0.000177048 ....... - ``` - -#### Windows系统构建与运行 - -- 编译构建 - - - 库下载:请手动下载硬件平台为CPU、操作系统为Windows-x64的MindSpore Lite模型推理框架[mindspore-lite-{version}-win-x64.zip](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/downloads.html),将解压后`inference/lib`目录下的`libmindspore-lite.a`拷贝到`mindspore/lite/examples/quick_start_cpp/lib`目录、`inference/include`目录拷贝到`mindspore/lite/examples/quick_start_cpp/include`目录。 - - - 模型下载:请手动下载相关模型文件[mobilenetv2.ms](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_imagenet/mobilenetv2.ms),并将其拷贝到`mindspore/lite/examples/quick_start_cpp/model`目录。 - - > 可选择使用模型转换小节所获得的mobilenetv2.ms模型文件。 - - - 编译:在`mindspore/lite/examples/quick_start_cpp`目录下执行build脚本,将能够自动下载相关文件并编译Demo。 - - ```bash - call build.bat - ``` - -- 执行推理 - - 编译构建后,进入`mindspore/lite/examples/quick_start_cpp/build`目录,并执行以下命令,体验MindSpore Lite推理MobileNetV2模型。 - - ```bash - call ./mindspore_quick_start_cpp.exe ../model/mobilenetv2.ms - ``` - - 执行完成后将能得到如下结果,打印输出Tensor的名称、输出Tensor的大小,输出Tensor的数量以及前50个数据: - - ```shell - tensor name is:Default/head-MobileNetV2Head/Softmax-op204 tensor size is:4000 tensor elements num is:1000 - output data is:5.26823e-05 0.00049752 0.000296722 0.000377607 0.000177048 ....... - ``` - -### 推理代码解析 - -下面分析Demo源代码中的推理流程,显示C++ API的具体使用方法。 - -#### 模型加载 - -首先从文件系统中读取MindSpore Lite模型,并通过`mindspore::lite::Model::Import`函数导入模型进行解析。 - -```c++ -// 读模型文件 -size_t size = 0; -char *model_buf = ReadFile(model_path, &size); -if (model_buf == nullptr) { - std::cerr << "Read model file failed." << std::endl; - return RET_ERROR; -} -// 加载模型 -auto model = mindspore::lite::Model::Import(model_buf, size); -delete[](model_buf); -if (model == nullptr) { - std::cerr << "Import model file failed." << std::endl; - return RET_ERROR; -} -``` - -#### 模型编译 - -模型编译主要包括创建配置上下文、创建会话、图编译等步骤。 - -```c++ -mindspore::session::LiteSession *Compile(mindspore::lite::Model *model) { - // 初始化上下文 - auto context = std::make_shared(); - if (context == nullptr) { - std::cerr << "New context failed while." << std::endl; - return nullptr; - } - - // 创建session - mindspore::session::LiteSession *session = mindspore::session::LiteSession::CreateSession(context.get()); - if (session == nullptr) { - std::cerr << "CreateSession failed while running." << std::endl; - return nullptr; - } - - // 图编译 - auto ret = session->CompileGraph(model); - if (ret != mindspore::lite::RET_OK) { - delete session; - std::cerr << "Compile failed while running." << std::endl; - return nullptr; - } - - // 注意:如果使用 model->Free(),模型将不能再次被编译 - if (model != nullptr) { - model->Free(); - } - return session; -} -``` - -#### 模型推理 - -模型推理主要包括输入数据、执行推理、获得输出等步骤,其中本示例中的输入数据是通过随机数据构造生成,最后将执行推理后的输出结果打印出来。 - -```c++ -int Run(mindspore::session::LiteSession *session) { - // 获取输入数据 - auto inputs = session->GetInputs(); - auto ret = GenerateInputDataWithRandom(inputs); - if (ret != mindspore::lite::RET_OK) { - std::cerr << "Generate Random Input Data failed." << std::endl; - return ret; - } - - // 运行 - ret = session->RunGraph(); - if (ret != mindspore::lite::RET_OK) { - std::cerr << "Inference error " << ret << std::endl; - return ret; - } - - // 获取输出数据 - auto out_tensors = session->GetOutputs(); - for (auto tensor : out_tensors) { - std::cout << "tensor name is:" << tensor.first << " tensor size is:" << tensor.second->Size() - << " tensor elements num is:" << tensor.second->ElementsNum() << std::endl; - auto out_data = reinterpret_cast(tensor.second->MutableData()); - std::cout << "output data is:"; - for (int i = 0; i < tensor.second->ElementsNum() && i <= 50; i++) { - std::cout << out_data[i] << " "; - } - std::cout << std::endl; - } - return mindspore::lite::RET_OK; -} -``` - -#### 内存释放 - -无需使用MindSpore Lite推理框架时,需要释放已经创建的`LiteSession`和`Model`。 - -```c++ -// 删除模型缓存 -delete model; -// 删除session缓存 -delete session; -``` diff --git a/tutorials/source_zh_cn/introduction.md b/tutorials/source_zh_cn/introduction.md deleted file mode 100644 index d9e3ba8d063660b0f584e3e32001e05040eb8588..0000000000000000000000000000000000000000 --- a/tutorials/source_zh_cn/introduction.md +++ /dev/null @@ -1,84 +0,0 @@ -# 基本介绍 - - - -本节将会对华为AI全栈进行整体介绍,并介绍MindSpore在其中的位置,对MindSpore感兴趣的开发者,最后可以参与MindSpore的[社区](https://gitee.com/mindspore/mindspore)并一键三连[(Watch/Star/Fork)](https://gitee.com/mindspore/mindspore)。 - -## 华为昇腾AI全栈介绍 - -昇腾计算,是基于昇腾系列处理器构建的全栈AI计算基础设施及应用,包括昇腾Ascend系列芯片、Atlas系列硬件、CANN芯片使能、MindSpore AI框架、ModelArts、MindX应用使能等。 - -华为Atlas人工智能计算解决方案,是基于昇腾系列AI处理器,通过模块、板卡、小站、服务器、集群等丰富的产品形态,打造面向“端、边、云”的全场景AI基础设施方案,涵盖数据中心解决方案、智能边缘解决方案,覆盖深度学习领域推理和训练全流程。 - -昇腾AI全栈如下图所示: - -![昇腾全栈](images/introduction1.jpg) - -下面简单介绍每个模块的作用: - -- **Atlas系列产品**:提供AI训练、推理卡及训练服务器([了解更多](https://e.huawei.com/cn/products/cloud-computing-dc/atlas/))。 -- **CANN(异构计算架构)**:芯片使能、驱动层([了解更多](https://ascend.huawei.com/zh/#/software/cann))。 -- **MindSpore(AI框架)**:全场景AI框架([了解更多](https://www.mindspore.cn/))。 -- **MindX SDK(昇腾SDK)**:行业SDK和应用解决方案([了解更多](https://ascend.huawei.com/zh/#/software/mindx-sdk))。 -- **ModelArts(AI开发平台)**:华为云AI开发平台([了解更多](https://www.huaweicloud.com/product/modelarts.html))。 -- **MindStudio(全流程开发工具链)**:AI全流程开发IDE([了解更多](https://www.hiascend.com/software/mindstudio))。 - -详细信息请点击[华为昇腾官网](https://e.huawei.com/cn/products/servers/ascend)。 - -## MindSpore介绍 - -MindSpore是一个全场景深度学习框架,旨在实现易开发、高效执行、全场景覆盖三大目标,其中易开发表现为API友好、调试难度低,高效执行包括计算效率、数据预处理效率和分布式训练效率,全场景则指框架同时支持云、边缘以及端侧场景。 - -MindSpore总体架构如下图所示,下面介绍主要的扩展层(MindSpore Extend)、前端表达层(MindExpress,ME)、编译优化层(MindCompiler)和全场景运行时(MindRE)四个部分。 - -- **MindSpore Extend(扩展层)**:MindSpore的扩展包,期待更多开发者来一起贡献和构建。 -- **MindExpress(表达层)**:基于Python的前端表达,未来计划陆续提供C/C++、Java等不同的前端;MindSpore也在考虑支持华为自研编程语言前端-仓颉,目前还处于预研阶段;同时也在做与Julia等第三方前端的对接工作,引入更多的第三方生态。 -- **MindCompiler(编译优化层)**:图层的核心编译器,主要基于端云统一的MindIR实现三大功能,包括硬件无关的优化(类型推导、自动微分、表达式化简等)、硬件相关优化(自动并行、内存优化、图算融合、流水线执行等)、部署推理相关的优化(量化、剪枝等);其中,MindAKG是MindSpore的自动算子生成编译器,目前还在持续完善中。 -- **MindRE(全场景运行时)**:这里含云侧、端侧以及更小的IoT。 - -![MindSpore](images/introduction2.png) - -### 设计理念 - -MindSpore源于全产业的最佳实践,向数据科学家和算法工程师提供了统一的模型训练、推理和导出等接口,支持端、边、云等不同场景下的灵活部署,推动深度学习和科学计算等领域繁荣发展。 - -MindSpore提供了Python编程范式,用户使用Python原生控制逻辑即可构建复杂的神经网络模型,AI编程变得简单。 - -目前主流的深度学习框架的执行模式有两种,分别为静态图模式和动态图模式。静态图模式拥有较高的训练性能,但难以调试。动态图模式相较于静态图模式虽然易于调试,但难以高效执行。 - -MindSpore提供了动态图和静态图统一的编码方式,大大增加了静态图和动态图的可兼容性,用户无需开发多套代码,仅变更一行代码便可切换动态图/静态图模式,例如设置`context.set_context(mode=context.PYNATIVE_MODE)`切换成动态图模式,设置`context.set_context(mode=context.GRAPH_MODE)`即可切换成静态图模式,用户可拥有更轻松的开发调试及性能体验。 - -神经网络模型通常基于梯度下降算法进行训练,但手动求导过程复杂,结果容易出错。MindSpore的基于源码转换(Source Code Transformation,SCT)的自动微分(Automatic Differentiation)机制采用函数式可微分编程架构,在接口层提供Python编程接口,包括控制流的表达。用户可聚焦于模型算法的数学原生表达,无需手动进行求导。 - -随着神经网络模型和数据集的规模不断增加,分布式并行训练成为了神经网络训练的常见做法,但分布式并行训练的策略选择和编写十分复杂,这严重制约着深度学习模型的训练效率,阻碍深度学习的发展。MindSpore统一了单机和分布式训练的编码方式,开发者无需编写复杂的分布式策略,在单机代码中添加少量代码即可实现分布式训练,例如设置`context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL)`便可自动建立代价模型,为用户选择一种较优的并行模式,提高神经网络训练效率,大大降低了AI开发门槛,使用户能够快速实现模型思路。 - -### 层次结构 - -MindSpore向用户提供了3个不同层次的API,支撑用户进行网络构建、整图执行、子图执行以及单算子执行,从低到高分别为Low-Level Python API、Medium-Level Python API以及High-Level Python API。 - -![MindSpore API](images/introduction3.png) - -- High-Level Python API - - 第一层为高阶API,其在中阶API的基础上又提供了训练推理的管理、混合精度训练、调试调优等高级接口,方便用户控制整网的执行流程和实现神经网络的训练推理及调优。例如用户使用Model接口,指定要训练的神经网络模型和相关的训练设置,对神经网络模型进行训练,通过Profiler接口调试神经网络性能。 - -- Medium-Level Python API - - 第二层为中阶API,其封装了低阶API,提供网络层、优化器、损失函数等模块,用户可通过中阶API灵活构建神经网络和控制执行流程,快速实现模型算法逻辑。例如用户可调用Cell接口构建神经网络模型和计算逻辑,通过使用loss模块和Optimizer接口为神经网络模型添加损失函数和优化方式,利用dataset模块对数据进行处理以供模型的训练和推导使用。 - -- Low-Level Python API - - 第三层为低阶API,主要包括张量定义、基础算子、自动微分等模块,用户可使用低阶API轻松实现张量定义和求导计算。例如用户可通过Tensor接口自定义张量,使用ops.composite模块下的GradOperation算子计算函数在指定处的导数。 - -## 参与社区 - -欢迎每一个开发者都参与到MindSpore的社区里,为全场景AI框架MindSpore添砖加瓦! - -- **MindSpore 官网**:可以全方位了解MindSpore,包括安装、教程、文档、社区、资源下载和资讯栏目等([了解更多](https://www.mindspore.cn/))。 -- **MindSpore 代码**: - - - [MindSpore Gitee](https://gitee.com/mindspore/mindspore):2020年Gitee千万开源项目中Gitee指数TOP1项目,一键三连(Watch/Star/Fork)即可随时跟踪MindSpore最新进展,参与issues讨论、提交代码! - - - [MindSpore Github](https://github.com/mindspore-ai/mindspore):Gitee的MindSpore代码镜像,习惯用github的开发者可以在这里进行MindSpore的学习,查看最新代码实现! - -- **MindSpore 论坛**:我们努力地服务好每一个开发者,在MindSpore无论是入门开发者还是高手大咖都能找到知音,共同学习,共同成长!([了解更多](https://bbs.huaweicloud.com/forum/forum-1076-1.html)) diff --git a/tutorials/source_zh_cn/model.ipynb b/tutorials/source_zh_cn/model.ipynb deleted file mode 100644 index 2985570a168fb3bb3c1b62de9cf7c9d422cf5a0c..0000000000000000000000000000000000000000 --- a/tutorials/source_zh_cn/model.ipynb +++ /dev/null @@ -1,294 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 建立神经网络\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/model.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/quick_start/mindspore_model.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3F1aWNrX3N0YXJ0L21pbmRzcG9yZV9tb2RlbC5pcHluYg==&imagename=MindSpore1.1.1)\n", - "\n", - "神经网络模型由多个数据操作层组成,`mindspore.nn`提供了各种网络基础模块。\n", - "\n", - "在以下内容中,我们将以构建LeNet网络为例,展示MindSpore是如何建立神经网络模型的。\n", - "\n", - "首先导入本文档需要的模块和接口,如下所示:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - ], - "source": [ - "import numpy as np\n", - "import mindspore\n", - "import mindspore.nn as nn\n", - "from mindspore import Tensor" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义模型类\n", - "\n", - "MindSpore的`Cell`类是构建所有网络的基类,也是网络的基本单元。当用户需要神经网络时,需要继承`Cell`类,并重写`__init__`方法和`construct`方法。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "class LeNet5(nn.Cell):\n", - " \"\"\"\n", - " Lenet网络结构\n", - " \"\"\"\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " # 定义所需要的运算\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120)\n", - " self.fc2 = nn.Dense(120, 84)\n", - " self.fc3 = nn.Dense(84, num_class)\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " def construct(self, x):\n", - " # 使用定义好的运算构建前向网络\n", - " x = self.conv1(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.conv2(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.flatten(x)\n", - " x = self.fc1(x)\n", - " x = self.relu(x)\n", - " x = self.fc2(x)\n", - " x = self.relu(x)\n", - " x = self.fc3(x)\n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 模型层\n", - "\n", - "本小节内容首先将会介绍LeNet网络中使用到`Cell`类的关键成员函数,然后通过实例化网络介绍如何利用`Cell`类访问模型参数。\n", - "\n", - "### nn.Conv2d\n", - "\n", - "加入`nn.Conv2d`层,给网络中加入卷积函数,帮助神经网络提取特征。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(1, 6, 28, 28)\n" - ] - } - ], - "source": [ - "conv2d = nn.Conv2d(1, 6, 5, has_bias=False, weight_init='normal', pad_mode='valid')\n", - "input_x = Tensor(np.ones([1, 1, 32, 32]), mindspore.float32)\n", - "\n", - "print(conv2d(input_x).shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "### nn.ReLU\n", - "\n", - "加入`nn.ReLU`层,给网络中加入非线性的激活函数,帮助神经网络学习各种复杂的特征。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0. 2. 0. 2. 0.]\n" - ] - } - ], - "source": [ - "relu = nn.ReLU()\n", - "input_x = Tensor(np.array([-1, 2, -3, 2, -1]), mindspore.float16)\n", - "output = relu(input_x)\n", - "\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "### nn.MaxPool2d\n", - "\n", - "初始化`nn.MaxPool2d`层,将6×28×28的数组降采样为6×14×14的数组。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(1, 6, 14, 14)\n" - ] - } - ], - "source": [ - "max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - "input_x = Tensor(np.ones([1, 6, 28, 28]), mindspore.float32)\n", - "\n", - "print(max_pool2d(input_x).shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "### nn.Flatten\n", - "\n", - "初始化`nn.Flatten`层,将16×5×5的数组转换为400个连续数组。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(1, 400)\n" - ] - } - ], - "source": [ - "flatten = nn.Flatten()\n", - "input_x = Tensor(np.ones([1, 16, 5, 5]), mindspore.float32)\n", - "output = flatten(input_x)\n", - "\n", - "print(output.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "### nn.Dense\n", - "\n", - "初始化`nn.Dense`层,对输入矩阵进行线性变换。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(1, 120)\n" - ] - } - ], - "source": [ - "dense = nn.Dense(400, 120, weight_init='normal')\n", - "input_x = Tensor(np.ones([1, 400]), mindspore.float32)\n", - "output = dense(input_x)\n", - "\n", - "print(output.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 模型参数\n", - "\n", - "网络内部的卷积层和全连接层等实例化后,即具有权重和偏置,这些权重和偏置参数会在之后训练中进行优化。`nn.Cell`中使用`parameters_and_names()`方法访问所有参数。\n", - "\n", - "在示例中,我们遍历每个参数,并打印网络各层名字和属性。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "('conv1.weight', Parameter (name=conv1.weight))\n", - "('conv2.weight', Parameter (name=conv2.weight))\n", - "('fc1.weight', Parameter (name=fc1.weight))\n", - "('fc1.bias', Parameter (name=fc1.bias))\n", - "('fc2.weight', Parameter (name=fc2.weight))\n", - "('fc2.bias', Parameter (name=fc2.bias))\n", - "('fc3.weight', Parameter (name=fc3.weight))\n", - "('fc3.bias', Parameter (name=fc3.bias))\n" - ] - } - ], - "source": [ - "model = LeNet5()\n", - "for m in model.parameters_and_names():\n", - " print(m)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/source_zh_cn/optimization.ipynb b/tutorials/source_zh_cn/optimization.ipynb deleted file mode 100644 index a078dd8ffbe6038e4afac8464b8a8b4320ac8032..0000000000000000000000000000000000000000 --- a/tutorials/source_zh_cn/optimization.ipynb +++ /dev/null @@ -1,198 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 优化模型参数\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/optimization.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/quick_start/mindspore_optimization.ipynb) \n", - "\n", - "通过上面章节的学习,我们已经学会如何创建模型和构建数据集,现在开始学习如何设置超参和优化模型参数。\n", - "\n", - "## 超参\n", - "\n", - "超参是可以调整的参数,可以控制模型训练优化的过程,不同的超参数值可能会影响模型训练和收敛速度。\n", - "\n", - "一般会定义以下用于训练的超参:\n", - "\n", - "- 训练轮次(epoch):训练时遍历数据集的次数。\n", - "- 批次大小(batch size):数据集进行分批读取训练,设定每个批次数据的大小。\n", - "- 学习率(learning rate):如果学习率偏小,会导致收敛的速度变慢,如果学习率偏大则可能会导致训练不收敛等不可预测的结果。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "epochs = 5\n", - "batch_size = 64\n", - "learning_rate = 1e-3" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 损失函数\n", - "\n", - "**损失函数**用来评价模型的**预测值**和**真实值**不一样的程度,在这里,使用绝对误差损失函数`L1Loss`。`mindspore.nn.loss`也提供了许多其他常用的损失函数,如`SoftmaxCrossEntropyWithLogits`、`MSELoss`、`SmoothL1Loss`等。\n", - "\n", - "我们给定输出值和目标值,计算损失值,使用方法如下所示:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1.5\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import mindspore.nn as nn\n", - "from mindspore import Tensor\n", - "\n", - "loss = nn.L1Loss()\n", - "output_data = Tensor(np.array([[1, 2, 3], [2, 3, 4]]).astype(np.float32))\n", - "target_data = Tensor(np.array([[0, 2, 5], [3, 1, 1]]).astype(np.float32))\n", - "print(loss(output_data, target_data))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "## 优化器\n", - "\n", - "优化器用于计算和更新梯度,模型优化算法的选择直接关系到最终模型的性能,如果有时候效果不好,未必是特征或者模型设计的问题,很有可能是优化算法的问题。MindSpore所有优化逻辑都封装在`Optimizer`对象中,在这里,我们使用SGD优化器。`mindspore.nn.optim`也提供了许多其他常用的优化器,如`ADAM`、`Momentum`。\n", - "\n", - "使用`mindspore.nn.optim`,我们需要构建一个`Optimizer`对象,这个对象能够保持当前参数状态并基于计算得到的梯度进行参数更新。\n", - "\n", - "为了构建一个`Optimizer`,我们需要给它一个包含可需要优化的参数(必须是Variable对象)的迭代器,如网络中所有可以训练的`parameter`,将`params`设置为`net.trainable_params()`即可。然后,你可以设置Optimizer的参数选项,比如学习率、权重衰减等等。\n", - "\n", - "代码样例如下:\n", - "\n", - "```python\n", - "from mindspore import nn\n", - "\n", - "optim = nn.SGD(params=net.trainable_params(), learning_rate=0.1, weight_decay=0.0)\n", - "```\n", - "\n", - "## 训练\n", - "\n", - "在模型训练过程中,一般分为四个步骤。\n", - "\n", - "1. 定义神经网络。\n", - "2. 构建数据集。\n", - "3. 定义超参、损失函数及优化器。\n", - "4. 输入训练轮次和数据集进行训练。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "模型训练示例代码如下所示:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "from mindspore import nn, Tensor, Model\n", - "from mindspore import dtype as mstype\n", - "\n", - "DATA_DIR = \"./datasets/cifar-10-batches-bin/train\"\n", - "\n", - "# 定义神经网络\n", - "class Net(nn.Cell):\n", - " def __init__(self, num_class=10, num_channel=3):\n", - " super(Net, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120)\n", - " self.fc2 = nn.Dense(120, 84)\n", - " self.fc3 = nn.Dense(84, num_class)\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " def construct(self, x):\n", - " x = self.conv1(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.conv2(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.flatten(x)\n", - " x = self.fc1(x)\n", - " x = self.relu(x)\n", - " x = self.fc2(x)\n", - " x = self.relu(x)\n", - " x = self.fc3(x)\n", - " return x\n", - "\n", - "net = Net()\n", - "epochs = 5\n", - "batch_size = 64\n", - "learning_rate = 1e-3\n", - "\n", - "# 构建数据集\n", - "sampler = ds.SequentialSampler(num_samples=128)\n", - "dataset = ds.Cifar10Dataset(DATA_DIR, sampler=sampler)\n", - "\n", - "# 数据类型转换\n", - "type_cast_op_image = C.TypeCast(mstype.float32)\n", - "type_cast_op_label = C.TypeCast(mstype.int32)\n", - "HWC2CHW = CV.HWC2CHW()\n", - "dataset = dataset.map(operations=[type_cast_op_image, HWC2CHW], input_columns=\"image\")\n", - "dataset = dataset.map(operations=type_cast_op_label, input_columns=\"label\")\n", - "dataset = dataset.batch(batch_size)\n", - "\n", - "# 定义超参、损失函数及优化器\n", - "optim = nn.SGD(params=net.trainable_params(), learning_rate=learning_rate)\n", - "loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')\n", - "\n", - "# 输入训练轮次和数据集进行训练\n", - "model = Model(net, loss_fn=loss, optimizer=optim)\n", - "model.train(epoch=epochs, train_dataset=dataset)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/source_zh_cn/quick_start.ipynb b/tutorials/source_zh_cn/quick_start.ipynb deleted file mode 100644 index ef89a04a326284cb1bd0b355058a1638db923dac..0000000000000000000000000000000000000000 --- a/tutorials/source_zh_cn/quick_start.ipynb +++ /dev/null @@ -1,488 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 初学入门\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/quick_start.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/quick_start/mindspore_quick_start.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3F1aWNrX3N0YXJ0L21pbmRzcG9yZV9xdWlja19zdGFydC5pcHluYg==&imagename=MindSpore1.1.1)\n", - "\n", - "本节贯穿MindSpore的基础功能,实现深度学习中的常见任务,请参考各节链接进行更加深入的学习。\n", - "\n", - "## 配置运行信息\n", - "\n", - "MindSpore通过`context.set_context`来配置运行需要的信息,如运行模式、后端信息、硬件等信息。\n", - "\n", - "导入`context`模块,配置运行需要的信息。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import argparse\n", - "from mindspore import context\n", - "\n", - "parser = argparse.ArgumentParser(description='MindSpore LeNet Example')\n", - "parser.add_argument('--device_target', type=str, default=\"CPU\", choices=['Ascend', 'GPU', 'CPU'])\n", - "\n", - "args = parser.parse_known_args()[0]\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在样例中,我们配置样例运行使用图模式。根据实际情况配置硬件信息,譬如代码运行在Ascend AI处理器上,则`--device_target`选择`Ascend`,代码运行在CPU、GPU同理。详细参数说明,请参见[context.set_context](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.context.html)接口说明。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 下载数据集\n", - "\n", - "我们示例中用到的MNIST数据集是由10类28∗28的灰度图片组成,训练数据集包含60000张图片,测试数据集包含10000张图片。\n", - "\n", - "你可以从[MNIST数据集下载页面](http://yann.lecun.com/exdb/mnist/)下载,并按下方目录结构放置,如运行环境为Linux,还可以直接运行如下命令完成下载和放置:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据处理\n", - "\n", - "数据集对于模型训练非常重要,好的数据集可以有效提高训练精度和效率。\n", - "MindSpore提供了用于数据处理的API模块 `mindspore.dataset` ,用于存储样本和标签。在加载数据集前,我们通常会对数据集进行一些处理,`mindspore.dataset`也集成了常见的数据处理方法。\n", - "\n", - "首先导入MindSpore中`mindspore.dataset`和其他相应的模块。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "import mindspore.dataset.vision.c_transforms as CV\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore import dtype as mstype" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "数据集处理主要分为四个步骤:\n", - "\n", - "1. 定义函数`create_dataset`来创建数据集。\n", - "2. 定义需要进行的数据增强和处理操作,为之后进行map映射做准备。\n", - "3. 使用map映射函数,将数据操作应用到数据集。\n", - "4. 进行数据shuffle、batch操作。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " # 定义数据集\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # 定义所需要操作的map映射\n", - " resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR)\n", - " rescale_nml_op = CV.Rescale(rescale_nml, shift_nml)\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - " \n", - " # 使用map映射函数,将数据操作应用到数据集\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=resize_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_nml_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=hwc2chw_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " \n", - " # 进行shuffle、batch操作\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size)\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - "\n", - " return mnist_ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "其中,`batch_size`为每组包含的数据个数,现设置每组包含32个数据。\n", - "\n", - "> MindSpore支持进行多种数据处理和增强的操作,具体可以参考[数据处理](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/pipeline.html)和[数据增强](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/augmentation.html)章节。\n", - "\n", - "## 创建模型\n", - "\n", - "使用MindSpore定义神经网络需要继承`mindspore.nn.Cell`。`Cell`是所有神经网络(如`Conv2d-relu-softmax`等)的基类。\n", - "\n", - "神经网络的各层需要预先在`__init__`方法中定义,然后通过定义`construct`方法来完成神经网络的前向构造。按照LeNet的网络结构,定义网络各层如下:\n" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"\n", - " Lenet网络结构\n", - " \"\"\"\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " # 定义所需要的运算\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " def construct(self, x):\n", - " # 使用定义好的运算构建前向网络\n", - " x = self.conv1(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.conv2(x)\n", - " x = self.relu(x)\n", - " x = self.max_pool2d(x)\n", - " x = self.flatten(x)\n", - " x = self.fc1(x)\n", - " x = self.relu(x)\n", - " x = self.fc2(x)\n", - " x = self.relu(x)\n", - " x = self.fc3(x)\n", - " return x\n", - "\n", - "# 实例化网络\n", - "net = LeNet5()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - ">阅读更多有关[在MindSpore中构建神经网络](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/defining_the_network.html)的信息。\n", - "\n", - "## 优化模型参数\n", - "\n", - "要训练神经网络模型,需要定义损失函数和优化器。\n", - "\n", - "MindSpore支持的损失函数有`SoftmaxCrossEntropyWithLogits`、`L1Loss`、`MSELoss`等。这里使用交叉熵损失函数`SoftmaxCrossEntropyWithLogits`。\n" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "# 定义损失函数\n", - "net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - ">阅读更多有关[在MindSpore中使用损失函数](https://www.mindspore.cn/tutorial/zh-CN/master/optimization.html#损失函数)的信息。\n", - "\n", - "MindSpore支持的优化器有`Adam`、`AdamWeightDecay`、`Momentum`等。这里使用`Momentum`优化器为例。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "# 定义优化器\n", - "net_opt = nn.Momentum(net.trainable_params(), learning_rate=0.01, momentum=0.9)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - ">阅读更多有关[在MindSpore中使用优化器](https://www.mindspore.cn/tutorial/zh-CN/master/optimization.html#优化器)的信息。\n", - "\n", - "## 训练及保存模型\n", - "\n", - "MindSpore提供了回调Callback机制,可以在训练过程中执行自定义逻辑,这里以使用框架提供的`ModelCheckpoint`为例。\n", - "`ModelCheckpoint`可以保存网络模型和参数,以便进行后续的Fine-tuning(微调)操作。\n" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig\n", - "# 设置模型保存参数\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=1875, keep_checkpoint_max=10)\n", - "# 应用模型保存参数\n", - "ckpoint = ModelCheckpoint(prefix=\"checkpoint_lenet\", config=config_ck)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "通过MindSpore提供的`model.train`接口可以方便地进行网络的训练,`LossMonitor`可以监控训练过程中`loss`值的变化。\n" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "# 导入模型训练需要的库\n", - "from mindspore.nn import Accuracy\n", - "from mindspore.train.callback import LossMonitor\n", - "from mindspore import Model" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "def train_net(args, model, epoch_size, data_path, repeat_size, ckpoint_cb, sink_mode):\n", - " \"\"\"定义训练的方法\"\"\"\n", - " # 加载训练数据集\n", - " ds_train = create_dataset(os.path.join(data_path, \"train\"), 32, repeat_size)\n", - " model.train(epoch_size, ds_train, callbacks=[ckpoint_cb, LossMonitor(125)], dataset_sink_mode=sink_mode)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "其中,`dataset_sink_mode`用于控制数据是否下沉,数据下沉是指数据通过通道直接传送到Device上,可以加快训练速度,`dataset_sink_mode`为True表示数据下沉,否则为非下沉。\n", - "\n", - "通过模型运行测试数据集得到的结果,验证模型的泛化能力。\n", - "\n", - "1. 使用`model.eval`接口读入测试数据集。\n", - "2. 使用保存后的模型参数进行推理。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "def test_net(network, model, data_path):\n", - " \"\"\"定义验证的方法\"\"\"\n", - " ds_eval = create_dataset(os.path.join(data_path, \"test\"))\n", - " acc = model.eval(ds_eval, dataset_sink_mode=False)\n", - " print(\"{}\".format(acc))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "这里把`train_epoch`设置为1,对数据集进行1个迭代的训练。在`train_net`和 `test_net`方法中,我们加载了之前下载的训练数据集,`mnist_path`是MNIST数据集路径。" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [], - "source": [ - "train_epoch = 1\n", - "mnist_path = \"./datasets/MNIST_Data\"\n", - "dataset_size = 1\n", - "model = Model(net, net_loss, net_opt, metrics={\"Accuracy\": Accuracy()})\n", - "train_net(args, model, train_epoch, mnist_path, dataset_size, ckpoint, False)\n", - "test_net(net, model, mnist_path)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "使用以下命令运行脚本:\n", - "\n", - "```bash\n", - "python lenet.py --device_target=CPU\n", - "```\n", - "\n", - "其中,\n", - "\n", - "`lenet.py`:可以把前面的代码都粘贴到lenet.py中(不包含“下载数据集”的代码)。一般情况下,可将import部分移到代码头部,类、函数和方法的定义放在之后,最后在main方法中将前面的操作串起来即可。\n", - "\n", - "`--device_target=CPU`:指定运行硬件平台,参数为`CPU`、`GPU`或者`Ascend`,根据你的实际运行硬件平台来指定。\n", - "\n", - "训练过程中会打印loss值,类似下图。loss值会波动,但总体来说loss值会逐步减小,精度逐步提高。每个人运行的loss值有一定随机性,不一定完全相同。\n", - "训练过程中loss打印示例如下:\n", - "\n", - "```bash\n", - "epoch: 1 step: 125, loss is 2.3083377\n", - "epoch: 1 step: 250, loss is 2.3019726\n", - "...\n", - "epoch: 1 step: 1500, loss is 0.028385757\n", - "epoch: 1 step: 1625, loss is 0.0857362\n", - "epoch: 1 step: 1750, loss is 0.05639569\n", - "epoch: 1 step: 1875, loss is 0.12366105\n", - "{'Accuracy': 0.9663477564102564}\n", - "```\n", - "\n", - "可以在打印信息中看出模型精度数据,示例中精度数据达到96.6%,模型质量良好。随着网络迭代次数`train_epoch`增加,模型精度会进一步提高。\n", - "\n", - "## 加载模型\n" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.train.serialization import load_checkpoint, load_param_into_net\n", - "# 加载已经保存的用于测试的模型\n", - "param_dict = load_checkpoint(\"checkpoint_lenet-1_1875.ckpt\")\n", - "# 加载参数到网络中\n", - "load_param_into_net(net, param_dict)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - ">阅读更多有关[MindSpore加载模型](https://www.mindspore.cn/tutorial/zh-CN/master/save_load_model.html#id3)的信息。\n", - "\n", - "## 验证模型\n", - "\n", - "我们使用生成的模型进行单个图片数据的分类预测,具体步骤如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Predicted: \"6\", Actual: \"6\"\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "from mindspore import Tensor\n", - "\n", - "# 定义测试数据集,batch_size设置为1,则取出一张图片\n", - "ds_test = create_dataset(os.path.join(mnist_path, \"test\"), batch_size=1).create_dict_iterator()\n", - "data = next(ds_test)\n", - "\n", - "# images为测试图片,labels为测试图片的实际分类\n", - "images = data[\"image\"].asnumpy()\n", - "labels = data[\"label\"].asnumpy()\n", - "\n", - "# 使用函数model.predict预测image对应分类\n", - "output = model.predict(Tensor(data['image']))\n", - "predicted = np.argmax(output.asnumpy(), axis=1)\n", - "\n", - "# 输出预测分类与实际分类\n", - "print(f'Predicted: \"{predicted[0]}\", Actual: \"{labels[0]}\"')" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} \ No newline at end of file diff --git a/tutorials/source_zh_cn/save_load_model.md b/tutorials/source_zh_cn/save_load_model.md deleted file mode 100644 index a48c1cc32d3312263805c103442da4d2aee41eb5..0000000000000000000000000000000000000000 --- a/tutorials/source_zh_cn/save_load_model.md +++ /dev/null @@ -1,150 +0,0 @@ -# 保存及加载模型 - - - -上一节我们训练完网络,本节将会学习如何保存模型和加载模型,以及如何将保存的模型导出成特定格式到不同平台进行推理。 - -## 保存模型 - -在模型训练的过程中,使用Callback回调机制传入回调函数`ModelCheckpoint`对象,可以保存模型参数,生成CheckPoint文件。 - -> 上面我们也曾提到过Callback机制,其设计的理念不是针对下沉式,而是针对流程进行设计的,其支持网络计算前后、epoch执行前后、step执行前后的回调处理机制;下沉的目的是为了提升训练执行效率,由于下沉在加速硬件上执行,所以Callback需要等下沉执行完毕后才能回调执行,在设计上两者解耦。 - -```python -from mindspore.train.callback import ModelCheckpoint - -ckpt_cb = ModelCheckpoint() -model.train(epoch_num, dataset, callbacks=ckpt_cb) -``` - -用户可以根据具体需求对CheckPoint策略进行配置。具体用法如下: - -```python -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig - -config_ck = CheckpointConfig(save_checkpoint_steps=32, keep_checkpoint_max=10) -ckpt_cb = ModelCheckpoint(prefix='resnet50', directory=None, config=config_ckpt) -model.train(epoch_num, dataset, callbacks= ckpt_cb) -``` - -上述代码中,首先需要初始化一个`CheckpointConfig`类对象,用来设置保存策略。 - -- `save_checkpoint_steps`表示每隔多少个step保存一次。 -- `keep_checkpoint_max`表示最多保留CheckPoint文件的数量。 -- `prefix`表示生成CheckPoint文件的前缀名。 -- `directory`表示存放文件的目录。 - -创建一个`ModelCheckpoint`对象把它传递给`model.train`方法,就可以在训练过程中使用CheckPoint功能了。 - -生成的CheckPoint文件如下: - -```text -resnet50-graph.meta # 编译后的计算图 -resnet50-1_32.ckpt # CheckPoint文件后缀名为'.ckpt' -resnet50-2_32.ckpt # 文件的命名方式表示保存参数所在的epoch和step数 -resnet50-3_32.ckpt # 表示保存的是第3个epoch的第32个step的模型参数 -... -``` - -如果用户使用相同的前缀名,运行多次训练脚本,可能会生成同名CheckPoint文件。MindSpore为方便用户区分每次生成的文件,会在用户定义的前缀后添加"_"和数字加以区分。如果想要删除`.ckpt`文件时,请同步删除`.meta` 文件。 - -例:`resnet50_3-2_32.ckpt` 表示运行第3次脚本生成的第2个epoch的第32个step的CheckPoint文件。 - -## 加载模型 - -要加载模型权重,需要先创建相同模型的实例,然后使用`load_checkpoint`和`load_param_into_net`方法加载参数。 - -示例代码如下: - -```python -from mindspore import load_checkpoint, load_param_into_net - -resnet = ResNet50() -# 将模型参数存入parameter的字典中 -param_dict = load_checkpoint("resnet50-2_32.ckpt") -# 将参数加载到网络中 -load_param_into_net(resnet, param_dict) -model = Model(resnet, loss, metrics={"accuracy"}) -``` - -- `load_checkpoint`方法会把参数文件中的网络参数加载到字典`param_dict`中。 -- `load_param_into_net`方法会把字典`param_dict`中的参数加载到网络或者优化器中,加载后,网络中的参数就是CheckPoint保存的。 - -### 模型验证 - -针对仅推理场景,把参数直接加载到网络中,以便后续的推理验证。示例代码如下: - -```python -# 定义验证数据集 -dateset_eval = create_dataset(os.path.join(mnist_path, "test"), 32, 1) - -# 调用eval()进行推理 -acc = model.eval(dateset_eval) -``` - -### 用于迁移学习 - -针对任务中断再训练及微调(Fine-tuning)场景,可以加载网络参数和优化器参数到模型中。示例代码如下: - -```python -# 设置训练轮次 -epoch = 1 -# 定义训练数据集 -dateset = create_dataset(os.path.join(mnist_path, "train"), 32, 1) -# 调用train()进行训练 -model.train(epoch, dataset) -``` - -## 导出模型 - -在模型训练过程中,可以添加检查点(CheckPoint)用于保存模型的参数,以便执行推理及再训练使用。如果想继续在不同硬件平台上做推理,可通过网络和CheckPoint格式文件生成对应的MindIR、AIR或ONNX格式文件。 - -以下通过示例来介绍保存CheckPoint格式文件和导出MindIR、AIR或ONNX格式文件的方法。 - -> MindSpore是一个全场景AI框架,使用MindSpore IR统一网络模型中间表达式,因此推荐使用MindIR作为导出格式文件。 - -### 导出MindIR格式 - -当有了CheckPoint文件后,如果想跨平台或者硬件执行推理(如昇腾AI处理器、MindSpore端侧、GPU等),可以通过定义网络和CheckPoint生成MINDIR格式模型文件。当前支持基于静态图,且不包含控制流语义的推理网络导出。导出该格式文件的代码样例如下: - -```python -from mindspore import export, load_checkpoint, load_param_into_net -from mindspore import Tensor -import numpy as np - -resnet = ResNet50() -# 将模型参数存入parameter的字典中 -param_dict = load_checkpoint("resnet50-2_32.ckpt") - -# 将参数加载到网络中 -load_param_into_net(resnet, param_dict) -input = np.random.uniform(0.0, 1.0, size=[32, 3, 224, 224]).astype(np.float32) -export(resnet, Tensor(input), file_name='resnet50-2_32', file_format='MINDIR') -``` - -> - `input`用来指定导出模型的输入shape以及数据类型,如果网络有多个输入,需要一同传进`export`方法。 例如:`export(network, Tensor(input1), Tensor(input2), file_name='network', file_format='MINDIR')` -> - 导出的文件名称会自动添加".mindir"后缀。 - -### 其他格式导出 - -#### 导出AIR格式文件 - -当有了CheckPoint文件后,如果想继续在昇腾AI处理器上做推理,需要通过网络和CheckPoint生成对应的AIR格式模型文件。导出该格式文件的代码样例如下: - -```python -export(resnet, Tensor(input), file_name='resnet50-2_32', file_format='AIR') -``` - -> - `input`用来指定导出模型的输入shape以及数据类型,如果网络有多个输入,需要一同传进`export`方法。 例如:`export(network, Tensor(input1), Tensor(input2), file_name='network', file_format='AIR')` -> - 导出的文件名称会自动添加".air"后缀。 - -#### 导出ONNX格式文件 - -当有了CheckPoint文件后,如果想继续在其他三方硬件上进行推理,需要通过网络和CheckPoint生成对应的ONNX格式模型文件。导出该格式文件的代码样例如下: - -```python -export(resnet, Tensor(input), file_name='resnet50-2_32', file_format='ONNX') -``` - -> - `input`用来指定导出模型的输入shape以及数据类型,如果网络有多个输入,需要一同传进`export`方法。 例如:`export(network, Tensor(input1), Tensor(input2), file_name='network', file_format='ONNX')` -> - 导出的文件名称会自动添加".onnx"后缀。 diff --git a/tutorials/source_zh_cn/tensor.ipynb b/tutorials/source_zh_cn/tensor.ipynb deleted file mode 100644 index 612f82b29360b6751873cbd5eab8681803baec02..0000000000000000000000000000000000000000 --- a/tutorials/source_zh_cn/tensor.ipynb +++ /dev/null @@ -1,388 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 张量\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/tensor.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/quick_start/mindspore_tensor.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL3F1aWNrX3N0YXJ0L21pbmRzcG9yZV90ZW5zb3IuaXB5bmI=&imagename=MindSpore1.1.1)\n", - "\n", - "张量(Tensor)是MindSpore网络运算中的基本数据结构。\n", - "\n", - "首先导入本文档需要的模块和接口,如下所示:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "from mindspore import Tensor, context\n", - "from mindspore import dtype as mstype\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"CPU\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 初始化张量\n", - "\n", - "张量的初始化方式有多种,构造张量时,支持传入`Tensor`、`float`、`int`、`bool`、`tuple`、`list`和`NumPy.array`类型。\n", - "\n", - "- **根据数据直接生成**\n", - "\n", - "可以根据数据创建张量,数据类型可以设置或者自动推断。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "x = Tensor(0.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "- **从NumPy数组生成**\n", - "\n", - "可以从NumPy数组创建张量。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "arr = np.array([1, 0, 1, 0])\n", - "x_np = Tensor(arr)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "初始值是`NumPy.array`,则生成的`Tensor`数据类型与之对应。\n", - "\n", - "- **继承另一个张量的属性,形成新的张量**\n" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[1 1]\n", - " [1 1]]\n" - ] - } - ], - "source": [ - "from mindspore import ops\n", - "oneslike = ops.OnesLike()\n", - "x = Tensor(np.array([[0, 1], [2, 1]]).astype(np.int32))\n", - "output = oneslike(x)\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "- **输出指定大小的恒定值张量**\n", - "\n", - "`shape`是张量的尺寸元组,确定输出的张量的维度。\n" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[1. 1.]\n", - " [1. 1.]]\n", - "[[0. 0.]\n", - " [0. 0.]]\n" - ] - } - ], - "source": [ - "from mindspore.ops import operations as ops\n", - "\n", - "shape = (2, 2)\n", - "ones = ops.Ones()\n", - "output = ones(shape, mstype.float32)\n", - "print(output)\n", - "\n", - "zeros = ops.Zeros()\n", - "output = zeros(shape, mstype.float32)\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "`Tensor`初始化时,可指定dtype,如`mstype.int32`、`mstype.float32`、`mstype.bool_`等。\n", - "\n", - "## 张量的属性\n", - "\n", - "张量的属性包括形状(shape)和数据类型(dtype)。\n", - "\n", - "- 形状:`Tensor`的shape,是一个tuple。\n", - "- 数据类型:`Tensor`的dtype,是MindSpore的一个数据类型。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Datatype of tensor: Float32\n", - "Shape of tensor: (1, 2, 3)\n" - ] - } - ], - "source": [ - "t1 = Tensor(np.zeros([1,2,3]), mstype.float32)\n", - "print(\"Datatype of tensor: {}\".format(t1.dtype))\n", - "print(\"Shape of tensor: {}\".format(t1.shape))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "## 张量运算\n", - "\n", - "张量之间有很多运算,包括算术、线性代数、矩阵处理(转置、标引、切片)、采样等,下面介绍其中几种操作,张量运算和NumPy的使用方式类似。\n", - "\n", - "类似NumPy的索引和切片操作:\n" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "First row: [0. 1.]\n", - "First column: [0. 2.]\n", - "Last column: [1. 3.]\n" - ] - } - ], - "source": [ - "tensor = Tensor(np.array([[0, 1], [2, 3]]).astype(np.float32))\n", - "print(\"First row: {}\".format(tensor[0]))\n", - "print(\"First column: {}\".format(tensor[:, 0]))\n", - "print(\"Last column: {}\".format(tensor[..., -1]))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "`Concat`将给定维度上的一系列张量连接起来。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[0. 1.]\n", - " [2. 3.]\n", - " [4. 5.]\n", - " [6. 7.]]\n" - ] - } - ], - "source": [ - "data1 = Tensor(np.array([[0, 1], [2, 3]]).astype(np.float32))\n", - "data2 = Tensor(np.array([[4, 5], [6, 7]]).astype(np.float32))\n", - "op = ops.Concat()\n", - "output = op((data1, data2))\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "`Stack`则是从另一个维度上将两个张量合并起来。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[[0. 1.]\n", - " [2. 3.]]\n", - "\n", - " [[4. 5.]\n", - " [6. 7.]]]\n" - ] - } - ], - "source": [ - "data1 = Tensor(np.array([[0, 1], [2, 3]]).astype(np.float32))\n", - "data2 = Tensor(np.array([[4, 5], [6, 7]]).astype(np.float32))\n", - "op = ops.Stack()\n", - "output = op([data1, data2])\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "普通运算:" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[ 4. 10. 18.]\n" - ] - } - ], - "source": [ - "input_x = Tensor(np.array([1.0, 2.0, 3.0]), mstype.float32)\n", - "input_y = Tensor(np.array([4.0, 5.0, 6.0]), mstype.float32)\n", - "mul = ops.Mul()\n", - "output = mul(input_x, input_y)\n", - "print(output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "## 与NumPy转换\n", - "\n", - "张量可以和NumPy进行互相转换。\n", - "\n", - "### 张量转换为NumPy" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "output: \n", - "n_output: \n" - ] - } - ], - "source": [ - "zeros = ops.Zeros()\n", - "output = zeros((2,2), mstype.float32)\n", - "print(\"output: {}\".format(type(output)))\n", - "n_output = output.asnumpy()\n", - "print(\"n_output: {}\".format(type(n_output)))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "### NumPy转换为张量" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "output: \n", - "t_output: \n" - ] - } - ], - "source": [ - "output = np.array([1, 0, 1, 0])\n", - "print(\"output: {}\".format(type(output)))\n", - "t_output = Tensor(output)\n", - "print(\"t_output: {}\".format(type(t_output)))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/training/Makefile b/tutorials/training/Makefile deleted file mode 100644 index 1eff8952707bdfa503c8d60c1e9a903053170ba2..0000000000000000000000000000000000000000 --- a/tutorials/training/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = source_zh_cn -BUILDDIR = build_zh_cn - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/tutorials/training/requirements.txt b/tutorials/training/requirements.txt deleted file mode 100644 index 5400249aed67ae0fd8d2ab98ce33995841e52c45..0000000000000000000000000000000000000000 --- a/tutorials/training/requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -sphinx >= 2.2.1, <= 2.4.4 -recommonmark -sphinx-markdown-tables -sphinx_rtd_theme -nbsphinx -IPython -jieba diff --git a/tutorials/training/source_en/_static/css/bootstrap.min.css b/tutorials/training/source_en/_static/css/bootstrap.min.css deleted file mode 100644 index 35722284f98a1189566e9200862b02aac7cbec50..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/_static/css/bootstrap.min.css +++ /dev/null @@ -1,6 +0,0 @@ -/*! - * Bootstrap v3.3.7 (http://getbootstrap.com) - * Copyright 2011-2016 Twitter, Inc. - * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) - *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */html{overflow-y: hidden!important;font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}dfn{font-style:italic}h1{margin:.67em 0;font-size:2em}mark{color:#000;background:#ff0}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{height:0;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace,monospace;font-size:1em}button,input,optgroup,select,textarea{margin:0;font:inherit;color:inherit}button{overflow:visible}button,select{text-transform:none}button,html input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}input{line-height:normal}input[type=checkbox],input[type=radio]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}fieldset{padding:.35em .625em .75em;margin:0 2px;border:1px solid silver}legend{padding:0;border:0}textarea{overflow:auto}optgroup{font-weight:700}table{border-spacing:0;border-collapse:collapse}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{*,:after,:before{color:#000!important;text-shadow:none!important;background:0 0!important;-webkit-box-shadow:none!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}blockquote,pre{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px solid #000}.table{border-collapse:collapse!important}.table td,.table th{background-color:#fff!important}.table-bordered td,.table-bordered th{border:1px solid #ddd!important}}@font-face{font-family:'Glyphicons Halflings';src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix) format('embedded-opentype'),url(../fonts/glyphicons-halflings-regular.woff2) format('woff2'),url(../fonts/glyphicons-halflings-regular.woff) format('woff'),url(../fonts/glyphicons-halflings-regular.ttf) format('truetype'),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular) format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons Halflings';font-style:normal;font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-eur:before,.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.glyphicon-cd:before{content:"\e201"}.glyphicon-save-file:before{content:"\e202"}.glyphicon-open-file:before{content:"\e203"}.glyphicon-level-up:before{content:"\e204"}.glyphicon-copy:before{content:"\e205"}.glyphicon-paste:before{content:"\e206"}.glyphicon-alert:before{content:"\e209"}.glyphicon-equalizer:before{content:"\e210"}.glyphicon-king:before{content:"\e211"}.glyphicon-queen:before{content:"\e212"}.glyphicon-pawn:before{content:"\e213"}.glyphicon-bishop:before{content:"\e214"}.glyphicon-knight:before{content:"\e215"}.glyphicon-baby-formula:before{content:"\e216"}.glyphicon-tent:before{content:"\26fa"}.glyphicon-blackboard:before{content:"\e218"}.glyphicon-bed:before{content:"\e219"}.glyphicon-apple:before{content:"\f8ff"}.glyphicon-erase:before{content:"\e221"}.glyphicon-hourglass:before{content:"\231b"}.glyphicon-lamp:before{content:"\e223"}.glyphicon-duplicate:before{content:"\e224"}.glyphicon-piggy-bank:before{content:"\e225"}.glyphicon-scissors:before{content:"\e226"}.glyphicon-bitcoin:before{content:"\e227"}.glyphicon-btc:before{content:"\e227"}.glyphicon-xbt:before{content:"\e227"}.glyphicon-yen:before{content:"\00a5"}.glyphicon-jpy:before{content:"\00a5"}.glyphicon-ruble:before{content:"\20bd"}.glyphicon-rub:before{content:"\20bd"}.glyphicon-scale:before{content:"\e230"}.glyphicon-ice-lolly:before{content:"\e231"}.glyphicon-ice-lolly-tasted:before{content:"\e232"}.glyphicon-education:before{content:"\e233"}.glyphicon-option-horizontal:before{content:"\e234"}.glyphicon-option-vertical:before{content:"\e235"}.glyphicon-menu-hamburger:before{content:"\e236"}.glyphicon-modal-window:before{content:"\e237"}.glyphicon-oil:before{content:"\e238"}.glyphicon-grain:before{content:"\e239"}.glyphicon-sunglasses:before{content:"\e240"}.glyphicon-text-size:before{content:"\e241"}.glyphicon-text-color:before{content:"\e242"}.glyphicon-text-background:before{content:"\e243"}.glyphicon-object-align-top:before{content:"\e244"}.glyphicon-object-align-bottom:before{content:"\e245"}.glyphicon-object-align-horizontal:before{content:"\e246"}.glyphicon-object-align-left:before{content:"\e247"}.glyphicon-object-align-vertical:before{content:"\e248"}.glyphicon-object-align-right:before{content:"\e249"}.glyphicon-triangle-right:before{content:"\e250"}.glyphicon-triangle-left:before{content:"\e251"}.glyphicon-triangle-bottom:before{content:"\e252"}.glyphicon-triangle-top:before{content:"\e253"}.glyphicon-console:before{content:"\e254"}.glyphicon-superscript:before{content:"\e255"}.glyphicon-subscript:before{content:"\e256"}.glyphicon-menu-left:before{content:"\e257"}.glyphicon-menu-right:before{content:"\e258"}.glyphicon-menu-down:before{content:"\e259"}.glyphicon-menu-up:before{content:"\e260"}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}:after,:before{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:10px;-webkit-tap-highlight-color:rgba(0,0,0,0)}body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857143;color:#333;background-color:#fff}button,input,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#337ab7;text-decoration:none}a:focus,a:hover{color:#23527c;text-decoration:underline}a:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}figure{margin:0}img{vertical-align:middle}.carousel-inner>.item>a>img,.carousel-inner>.item>img,.img-responsive,.thumbnail a>img,.thumbnail>img{display:block;max-width:100%;height:auto}.img-rounded{border-radius:6px}.img-thumbnail{display:inline-block;max-width:100%;height:auto;padding:4px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px solid #eee}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}[role=button]{cursor:pointer}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-weight:400;line-height:1;color:#777}.h1,.h2,.h3,h1,h2,h3{margin-top:20px;margin-bottom:10px}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small{font-size:65%}.h4,.h5,.h6,h4,h5,h6{margin-top:10px;margin-bottom:10px}.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-size:75%}.h1,h1{font-size:36px}.h2,h2{font-size:30px}.h3,h3{font-size:24px}.h4,h4{font-size:18px}.h5,h5{font-size:14px}.h6,h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:300;line-height:1.4}@media (min-width:768px){.lead{font-size:21px}}.small,small{font-size:85%}.mark,mark{padding:.2em;background-color:#fcf8e3}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-nowrap{white-space:nowrap}.text-lowercase{text-transform:lowercase}.text-uppercase{text-transform:uppercase}.text-capitalize{text-transform:capitalize}.text-muted{color:#777}.text-primary{color:#337ab7}a.text-primary:focus,a.text-primary:hover{color:#286090}.text-success{color:#3c763d}a.text-success:focus,a.text-success:hover{color:#2b542c}.text-info{color:#31708f}a.text-info:focus,a.text-info:hover{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:focus,a.text-warning:hover{color:#66512c}.text-danger{color:#a94442}a.text-danger:focus,a.text-danger:hover{color:#843534}.bg-primary{color:#fff;background-color:#337ab7}a.bg-primary:focus,a.bg-primary:hover{background-color:#286090}.bg-success{background-color:#dff0d8}a.bg-success:focus,a.bg-success:hover{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:focus,a.bg-info:hover{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:focus,a.bg-warning:hover{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:focus,a.bg-danger:hover{background-color:#e4b9b9}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #eee}ol,ul{margin-top:0;margin-bottom:10px}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;margin-left:-5px;list-style:none}.list-inline>li{display:inline-block;padding-right:5px;padding-left:5px}dl{margin-top:0;margin-bottom:20px}dd,dt{line-height:1.42857143}dt{font-weight:700}dd{margin-left:0}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}}abbr[data-original-title],abbr[title]{cursor:help;border-bottom:1px dotted #777}.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #eee}blockquote ol:last-child,blockquote p:last-child,blockquote ul:last-child{margin-bottom:0}blockquote .small,blockquote footer,blockquote small{display:block;font-size:80%;line-height:1.42857143;color:#777}blockquote .small:before,blockquote footer:before,blockquote small:before{content:'\2014 \00A0'}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;text-align:right;border-right:5px solid #eee;border-left:0}.blockquote-reverse .small:before,.blockquote-reverse footer:before,.blockquote-reverse small:before,blockquote.pull-right .small:before,blockquote.pull-right footer:before,blockquote.pull-right small:before{content:''}.blockquote-reverse .small:after,.blockquote-reverse footer:after,.blockquote-reverse small:after,blockquote.pull-right .small:after,blockquote.pull-right footer:after,blockquote.pull-right small:after{content:'\00A0 \2014'}address{margin-bottom:20px;font-style:normal;line-height:1.42857143}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,"Courier New",monospace}code{padding:2px 4px;font-size:90%;color:#c7254e;background-color:#f9f2f4;border-radius:4px}kbd{padding:2px 4px;font-size:90%;color:#fff;background-color:#333;border-radius:3px;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.25);box-shadow:inset 0 -1px 0 rgba(0,0,0,.25)}kbd kbd{padding:0;font-size:100%;font-weight:700;-webkit-box-shadow:none;box-shadow:none}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:1.42857143;color:#333;word-break:break-all;word-wrap:break-word;background-color:#f5f5f5;border:1px solid #ccc;border-radius:4px}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;background-color:transparent;border-radius:0}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}@media (min-width:768px){.container{width:750px}}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.container-fluid{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}.row{margin-right:-15px;margin-left:-15px}.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{position:relative;min-height:1px;padding-right:15px;padding-left:15px}.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{float:left}.col-xs-12{width:100%}.col-xs-11{width:91.66666667%}.col-xs-10{width:83.33333333%}.col-xs-9{width:75%}.col-xs-8{width:66.66666667%}.col-xs-7{width:58.33333333%}.col-xs-6{width:50%}.col-xs-5{width:41.66666667%}.col-xs-4{width:33.33333333%}.col-xs-3{width:25%}.col-xs-2{width:16.66666667%}.col-xs-1{width:8.33333333%}.col-xs-pull-12{right:100%}.col-xs-pull-11{right:91.66666667%}.col-xs-pull-10{right:83.33333333%}.col-xs-pull-9{right:75%}.col-xs-pull-8{right:66.66666667%}.col-xs-pull-7{right:58.33333333%}.col-xs-pull-6{right:50%}.col-xs-pull-5{right:41.66666667%}.col-xs-pull-4{right:33.33333333%}.col-xs-pull-3{right:25%}.col-xs-pull-2{right:16.66666667%}.col-xs-pull-1{right:8.33333333%}.col-xs-pull-0{right:auto}.col-xs-push-12{left:100%}.col-xs-push-11{left:91.66666667%}.col-xs-push-10{left:83.33333333%}.col-xs-push-9{left:75%}.col-xs-push-8{left:66.66666667%}.col-xs-push-7{left:58.33333333%}.col-xs-push-6{left:50%}.col-xs-push-5{left:41.66666667%}.col-xs-push-4{left:33.33333333%}.col-xs-push-3{left:25%}.col-xs-push-2{left:16.66666667%}.col-xs-push-1{left:8.33333333%}.col-xs-push-0{left:auto}.col-xs-offset-12{margin-left:100%}.col-xs-offset-11{margin-left:91.66666667%}.col-xs-offset-10{margin-left:83.33333333%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-8{margin-left:66.66666667%}.col-xs-offset-7{margin-left:58.33333333%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-5{margin-left:41.66666667%}.col-xs-offset-4{margin-left:33.33333333%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-2{margin-left:16.66666667%}.col-xs-offset-1{margin-left:8.33333333%}.col-xs-offset-0{margin-left:0}@media (min-width:768px){.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9{float:left}.col-sm-12{width:100%}.col-sm-11{width:91.66666667%}.col-sm-10{width:83.33333333%}.col-sm-9{width:75%}.col-sm-8{width:66.66666667%}.col-sm-7{width:58.33333333%}.col-sm-6{width:50%}.col-sm-5{width:41.66666667%}.col-sm-4{width:33.33333333%}.col-sm-3{width:25%}.col-sm-2{width:16.66666667%}.col-sm-1{width:8.33333333%}.col-sm-pull-12{right:100%}.col-sm-pull-11{right:91.66666667%}.col-sm-pull-10{right:83.33333333%}.col-sm-pull-9{right:75%}.col-sm-pull-8{right:66.66666667%}.col-sm-pull-7{right:58.33333333%}.col-sm-pull-6{right:50%}.col-sm-pull-5{right:41.66666667%}.col-sm-pull-4{right:33.33333333%}.col-sm-pull-3{right:25%}.col-sm-pull-2{right:16.66666667%}.col-sm-pull-1{right:8.33333333%}.col-sm-pull-0{right:auto}.col-sm-push-12{left:100%}.col-sm-push-11{left:91.66666667%}.col-sm-push-10{left:83.33333333%}.col-sm-push-9{left:75%}.col-sm-push-8{left:66.66666667%}.col-sm-push-7{left:58.33333333%}.col-sm-push-6{left:50%}.col-sm-push-5{left:41.66666667%}.col-sm-push-4{left:33.33333333%}.col-sm-push-3{left:25%}.col-sm-push-2{left:16.66666667%}.col-sm-push-1{left:8.33333333%}.col-sm-push-0{left:auto}.col-sm-offset-12{margin-left:100%}.col-sm-offset-11{margin-left:91.66666667%}.col-sm-offset-10{margin-left:83.33333333%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-8{margin-left:66.66666667%}.col-sm-offset-7{margin-left:58.33333333%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-5{margin-left:41.66666667%}.col-sm-offset-4{margin-left:33.33333333%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-2{margin-left:16.66666667%}.col-sm-offset-1{margin-left:8.33333333%}.col-sm-offset-0{margin-left:0}}@media (min-width:992px){.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9{float:left}.col-md-12{width:100%}.col-md-11{width:91.66666667%}.col-md-10{width:83.33333333%}.col-md-9{width:75%}.col-md-8{width:66.66666667%}.col-md-7{width:58.33333333%}.col-md-6{width:50%}.col-md-5{width:41.66666667%}.col-md-4{width:33.33333333%}.col-md-3{width:25%}.col-md-2{width:16.66666667%}.col-md-1{width:8.33333333%}.col-md-pull-12{right:100%}.col-md-pull-11{right:91.66666667%}.col-md-pull-10{right:83.33333333%}.col-md-pull-9{right:75%}.col-md-pull-8{right:66.66666667%}.col-md-pull-7{right:58.33333333%}.col-md-pull-6{right:50%}.col-md-pull-5{right:41.66666667%}.col-md-pull-4{right:33.33333333%}.col-md-pull-3{right:25%}.col-md-pull-2{right:16.66666667%}.col-md-pull-1{right:8.33333333%}.col-md-pull-0{right:auto}.col-md-push-12{left:100%}.col-md-push-11{left:91.66666667%}.col-md-push-10{left:83.33333333%}.col-md-push-9{left:75%}.col-md-push-8{left:66.66666667%}.col-md-push-7{left:58.33333333%}.col-md-push-6{left:50%}.col-md-push-5{left:41.66666667%}.col-md-push-4{left:33.33333333%}.col-md-push-3{left:25%}.col-md-push-2{left:16.66666667%}.col-md-push-1{left:8.33333333%}.col-md-push-0{left:auto}.col-md-offset-12{margin-left:100%}.col-md-offset-11{margin-left:91.66666667%}.col-md-offset-10{margin-left:83.33333333%}.col-md-offset-9{margin-left:75%}.col-md-offset-8{margin-left:66.66666667%}.col-md-offset-7{margin-left:58.33333333%}.col-md-offset-6{margin-left:50%}.col-md-offset-5{margin-left:41.66666667%}.col-md-offset-4{margin-left:33.33333333%}.col-md-offset-3{margin-left:25%}.col-md-offset-2{margin-left:16.66666667%}.col-md-offset-1{margin-left:8.33333333%}.col-md-offset-0{margin-left:0}}@media (min-width:1200px){.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9{float:left}.col-lg-12{width:100%}.col-lg-11{width:91.66666667%}.col-lg-10{width:83.33333333%}.col-lg-9{width:75%}.col-lg-8{width:66.66666667%}.col-lg-7{width:58.33333333%}.col-lg-6{width:50%}.col-lg-5{width:41.66666667%}.col-lg-4{width:33.33333333%}.col-lg-3{width:25%}.col-lg-2{width:16.66666667%}.col-lg-1{width:8.33333333%}.col-lg-pull-12{right:100%}.col-lg-pull-11{right:91.66666667%}.col-lg-pull-10{right:83.33333333%}.col-lg-pull-9{right:75%}.col-lg-pull-8{right:66.66666667%}.col-lg-pull-7{right:58.33333333%}.col-lg-pull-6{right:50%}.col-lg-pull-5{right:41.66666667%}.col-lg-pull-4{right:33.33333333%}.col-lg-pull-3{right:25%}.col-lg-pull-2{right:16.66666667%}.col-lg-pull-1{right:8.33333333%}.col-lg-pull-0{right:auto}.col-lg-push-12{left:100%}.col-lg-push-11{left:91.66666667%}.col-lg-push-10{left:83.33333333%}.col-lg-push-9{left:75%}.col-lg-push-8{left:66.66666667%}.col-lg-push-7{left:58.33333333%}.col-lg-push-6{left:50%}.col-lg-push-5{left:41.66666667%}.col-lg-push-4{left:33.33333333%}.col-lg-push-3{left:25%}.col-lg-push-2{left:16.66666667%}.col-lg-push-1{left:8.33333333%}.col-lg-push-0{left:auto}.col-lg-offset-12{margin-left:100%}.col-lg-offset-11{margin-left:91.66666667%}.col-lg-offset-10{margin-left:83.33333333%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-8{margin-left:66.66666667%}.col-lg-offset-7{margin-left:58.33333333%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-5{margin-left:41.66666667%}.col-lg-offset-4{margin-left:33.33333333%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-2{margin-left:16.66666667%}.col-lg-offset-1{margin-left:8.33333333%}.col-lg-offset-0{margin-left:0}}table{background-color:transparent}caption{padding-top:8px;padding-bottom:8px;color:#777;text-align:left}th{text-align:left}.table{width:100%;max-width:100%;margin-bottom:20px}.table>tbody>tr>td,.table>tbody>tr>th,.table>tfoot>tr>td,.table>tfoot>tr>th,.table>thead>tr>td,.table>thead>tr>th{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>td,.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>td,.table>thead:first-child>tr:first-child>th{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>tbody>tr>td,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>td,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>thead>tr>th{padding:5px}.table-bordered{border:1px solid #ddd}.table-bordered>tbody>tr>td,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>td,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border:1px solid #ddd}.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover{background-color:#f5f5f5}table col[class*=col-]{position:static;display:table-column;float:none}table td[class*=col-],table th[class*=col-]{position:static;display:table-cell;float:none}.table>tbody>tr.active>td,.table>tbody>tr.active>th,.table>tbody>tr>td.active,.table>tbody>tr>th.active,.table>tfoot>tr.active>td,.table>tfoot>tr.active>th,.table>tfoot>tr>td.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>thead>tr.active>th,.table>thead>tr>td.active,.table>thead>tr>th.active{background-color:#f5f5f5}.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr.active:hover>th,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover{background-color:#e8e8e8}.table>tbody>tr.success>td,.table>tbody>tr.success>th,.table>tbody>tr>td.success,.table>tbody>tr>th.success,.table>tfoot>tr.success>td,.table>tfoot>tr.success>th,.table>tfoot>tr>td.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>thead>tr.success>th,.table>thead>tr>td.success,.table>thead>tr>th.success{background-color:#dff0d8}.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr.success:hover>th,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover{background-color:#d0e9c6}.table>tbody>tr.info>td,.table>tbody>tr.info>th,.table>tbody>tr>td.info,.table>tbody>tr>th.info,.table>tfoot>tr.info>td,.table>tfoot>tr.info>th,.table>tfoot>tr>td.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>thead>tr.info>th,.table>thead>tr>td.info,.table>thead>tr>th.info{background-color:#d9edf7}.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr.info:hover>th,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover{background-color:#c4e3f3}.table>tbody>tr.warning>td,.table>tbody>tr.warning>th,.table>tbody>tr>td.warning,.table>tbody>tr>th.warning,.table>tfoot>tr.warning>td,.table>tfoot>tr.warning>th,.table>tfoot>tr>td.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>thead>tr.warning>th,.table>thead>tr>td.warning,.table>thead>tr>th.warning{background-color:#fcf8e3}.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr.warning:hover>th,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover{background-color:#faf2cc}.table>tbody>tr.danger>td,.table>tbody>tr.danger>th,.table>tbody>tr>td.danger,.table>tbody>tr>th.danger,.table>tfoot>tr.danger>td,.table>tfoot>tr.danger>th,.table>tfoot>tr>td.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>thead>tr.danger>th,.table>thead>tr>td.danger,.table>thead>tr>th.danger{background-color:#f2dede}.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr.danger:hover>th,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover{background-color:#ebcccc}.table-responsive{min-height:.01%;overflow-x:auto}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>td,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>thead>tr>th{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}}fieldset{min-width:0;padding:0;margin:0;border:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:inherit;color:#333;border:0;border-bottom:1px solid #e5e5e5}label{display:inline-block;max-width:100%;margin-bottom:5px;font-weight:700}input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=checkbox],input[type=radio]{margin:4px 0 0;margin-top:1px\9;line-height:normal}input[type=file]{display:block}input[type=range]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type=file]:focus,input[type=checkbox]:focus,input[type=radio]:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}output{display:block;padding-top:7px;font-size:14px;line-height:1.42857143;color:#555}.form-control{display:block;width:100%;height:34px;padding:6px 12px;font-size:14px;line-height:1.42857143;color:#555;background-color:#fff;background-image:none;border:1px solid #ccc;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075);-webkit-transition:border-color ease-in-out .15s,-webkit-box-shadow ease-in-out .15s;-o-transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6);box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.form-control::-ms-expand{background-color:transparent;border:0}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{background-color:#eee;opacity:1}.form-control[disabled],fieldset[disabled] .form-control{cursor:not-allowed}textarea.form-control{height:auto}input[type=search]{-webkit-appearance:none}@media screen and (-webkit-min-device-pixel-ratio:0){input[type=date].form-control,input[type=time].form-control,input[type=datetime-local].form-control,input[type=month].form-control{line-height:34px}.input-group-sm input[type=date],.input-group-sm input[type=time],.input-group-sm input[type=datetime-local],.input-group-sm input[type=month],input[type=date].input-sm,input[type=time].input-sm,input[type=datetime-local].input-sm,input[type=month].input-sm{line-height:30px}.input-group-lg input[type=date],.input-group-lg input[type=time],.input-group-lg input[type=datetime-local],.input-group-lg input[type=month],input[type=date].input-lg,input[type=time].input-lg,input[type=datetime-local].input-lg,input[type=month].input-lg{line-height:46px}}.form-group{margin-bottom:15px}.checkbox,.radio{position:relative;display:block;margin-top:10px;margin-bottom:10px}.checkbox label,.radio label{min-height:20px;padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.checkbox input[type=checkbox],.checkbox-inline input[type=checkbox],.radio input[type=radio],.radio-inline input[type=radio]{position:absolute;margin-top:4px\9;margin-left:-20px}.checkbox+.checkbox,.radio+.radio{margin-top:-5px}.checkbox-inline,.radio-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;font-weight:400;vertical-align:middle;cursor:pointer}.checkbox-inline+.checkbox-inline,.radio-inline+.radio-inline{margin-top:0;margin-left:10px}fieldset[disabled] input[type=checkbox],fieldset[disabled] input[type=radio],input[type=checkbox].disabled,input[type=checkbox][disabled],input[type=radio].disabled,input[type=radio][disabled]{cursor:not-allowed}.checkbox-inline.disabled,.radio-inline.disabled,fieldset[disabled] .checkbox-inline,fieldset[disabled] .radio-inline{cursor:not-allowed}.checkbox.disabled label,.radio.disabled label,fieldset[disabled] .checkbox label,fieldset[disabled] .radio label{cursor:not-allowed}.form-control-static{min-height:34px;padding-top:7px;padding-bottom:7px;margin-bottom:0}.form-control-static.input-lg,.form-control-static.input-sm{padding-right:0;padding-left:0}.input-sm{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-sm{height:30px;line-height:30px}select[multiple].input-sm,textarea.input-sm{height:auto}.form-group-sm .form-control{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.form-group-sm select.form-control{height:30px;line-height:30px}.form-group-sm select[multiple].form-control,.form-group-sm textarea.form-control{height:auto}.form-group-sm .form-control-static{height:30px;min-height:32px;padding:6px 10px;font-size:12px;line-height:1.5}.input-lg{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-lg{height:46px;line-height:46px}select[multiple].input-lg,textarea.input-lg{height:auto}.form-group-lg .form-control{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.form-group-lg select.form-control{height:46px;line-height:46px}.form-group-lg select[multiple].form-control,.form-group-lg textarea.form-control{height:auto}.form-group-lg .form-control-static{height:46px;min-height:38px;padding:11px 16px;font-size:18px;line-height:1.3333333}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.form-control-feedback{position:absolute;top:0;right:0;z-index:2;display:block;width:34px;height:34px;line-height:34px;text-align:center;pointer-events:none}.form-group-lg .form-control+.form-control-feedback,.input-group-lg+.form-control-feedback,.input-lg+.form-control-feedback{width:46px;height:46px;line-height:46px}.form-group-sm .form-control+.form-control-feedback,.input-group-sm+.form-control-feedback,.input-sm+.form-control-feedback{width:30px;height:30px;line-height:30px}.has-success .checkbox,.has-success .checkbox-inline,.has-success .control-label,.has-success .help-block,.has-success .radio,.has-success .radio-inline,.has-success.checkbox label,.has-success.checkbox-inline label,.has-success.radio label,.has-success.radio-inline label{color:#3c763d}.has-success .form-control{border-color:#3c763d;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-success .form-control:focus{border-color:#2b542c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;background-color:#dff0d8;border-color:#3c763d}.has-success .form-control-feedback{color:#3c763d}.has-warning .checkbox,.has-warning .checkbox-inline,.has-warning .control-label,.has-warning .help-block,.has-warning .radio,.has-warning .radio-inline,.has-warning.checkbox label,.has-warning.checkbox-inline label,.has-warning.radio label,.has-warning.radio-inline label{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-warning .form-control:focus{border-color:#66512c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;background-color:#fcf8e3;border-color:#8a6d3b}.has-warning .form-control-feedback{color:#8a6d3b}.has-error .checkbox,.has-error .checkbox-inline,.has-error .control-label,.has-error .help-block,.has-error .radio,.has-error .radio-inline,.has-error.checkbox label,.has-error.checkbox-inline label,.has-error.radio label,.has-error.radio-inline label{color:#a94442}.has-error .form-control{border-color:#a94442;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-error .form-control:focus{border-color:#843534;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;background-color:#f2dede;border-color:#a94442}.has-error .form-control-feedback{color:#a94442}.has-feedback label~.form-control-feedback{top:25px}.has-feedback label.sr-only~.form-control-feedback{top:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#737373}@media (min-width:768px){.form-inline .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-static{display:inline-block}.form-inline .input-group{display:inline-table;vertical-align:middle}.form-inline .input-group .form-control,.form-inline .input-group .input-group-addon,.form-inline .input-group .input-group-btn{width:auto}.form-inline .input-group>.form-control{width:100%}.form-inline .control-label{margin-bottom:0;vertical-align:middle}.form-inline .checkbox,.form-inline .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.form-inline .checkbox label,.form-inline .radio label{padding-left:0}.form-inline .checkbox input[type=checkbox],.form-inline .radio input[type=radio]{position:relative;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}}.form-horizontal .checkbox,.form-horizontal .checkbox-inline,.form-horizontal .radio,.form-horizontal .radio-inline{padding-top:7px;margin-top:0;margin-bottom:0}.form-horizontal .checkbox,.form-horizontal .radio{min-height:27px}.form-horizontal .form-group{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.form-horizontal .control-label{padding-top:7px;margin-bottom:0;text-align:right}}.form-horizontal .has-feedback .form-control-feedback{right:15px}@media (min-width:768px){.form-horizontal .form-group-lg .control-label{padding-top:11px;font-size:18px}}@media (min-width:768px){.form-horizontal .form-group-sm .control-label{padding-top:6px;font-size:12px}}.btn{display:inline-block;padding:6px 12px;margin-bottom:0;font-size:14px;font-weight:400;line-height:1.42857143;text-align:center;white-space:nowrap;vertical-align:middle;-ms-touch-action:manipulation;touch-action:manipulation;cursor:pointer;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;background-image:none;border:1px solid transparent;border-radius:4px}.btn.active.focus,.btn.active:focus,.btn.focus,.btn:active.focus,.btn:active:focus,.btn:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn.focus,.btn:focus,.btn:hover{color:#333;text-decoration:none}.btn.active,.btn:active{background-image:none;outline:0;-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;filter:alpha(opacity=65);-webkit-box-shadow:none;box-shadow:none;opacity:.65}a.btn.disabled,fieldset[disabled] a.btn{pointer-events:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default.focus,.btn-default:focus{color:#333;background-color:#e6e6e6;border-color:#8c8c8c}.btn-default:hover{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active,.btn-default:active,.open>.dropdown-toggle.btn-default{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active.focus,.btn-default.active:focus,.btn-default.active:hover,.btn-default:active.focus,.btn-default:active:focus,.btn-default:active:hover,.open>.dropdown-toggle.btn-default.focus,.open>.dropdown-toggle.btn-default:focus,.open>.dropdown-toggle.btn-default:hover{color:#333;background-color:#d4d4d4;border-color:#8c8c8c}.btn-default.active,.btn-default:active,.open>.dropdown-toggle.btn-default{background-image:none}.btn-default.disabled.focus,.btn-default.disabled:focus,.btn-default.disabled:hover,.btn-default[disabled].focus,.btn-default[disabled]:focus,.btn-default[disabled]:hover,fieldset[disabled] .btn-default.focus,fieldset[disabled] .btn-default:focus,fieldset[disabled] .btn-default:hover{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#337ab7;border-color:#2e6da4}.btn-primary.focus,.btn-primary:focus{color:#fff;background-color:#286090;border-color:#122b40}.btn-primary:hover{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active,.btn-primary:active,.open>.dropdown-toggle.btn-primary{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active.focus,.btn-primary.active:focus,.btn-primary.active:hover,.btn-primary:active.focus,.btn-primary:active:focus,.btn-primary:active:hover,.open>.dropdown-toggle.btn-primary.focus,.open>.dropdown-toggle.btn-primary:focus,.open>.dropdown-toggle.btn-primary:hover{color:#fff;background-color:#204d74;border-color:#122b40}.btn-primary.active,.btn-primary:active,.open>.dropdown-toggle.btn-primary{background-image:none}.btn-primary.disabled.focus,.btn-primary.disabled:focus,.btn-primary.disabled:hover,.btn-primary[disabled].focus,.btn-primary[disabled]:focus,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary.focus,fieldset[disabled] .btn-primary:focus,fieldset[disabled] .btn-primary:hover{background-color:#337ab7;border-color:#2e6da4}.btn-primary .badge{color:#337ab7;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success.focus,.btn-success:focus{color:#fff;background-color:#449d44;border-color:#255625}.btn-success:hover{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active.focus,.btn-success.active:focus,.btn-success.active:hover,.btn-success:active.focus,.btn-success:active:focus,.btn-success:active:hover,.open>.dropdown-toggle.btn-success.focus,.open>.dropdown-toggle.btn-success:focus,.open>.dropdown-toggle.btn-success:hover{color:#fff;background-color:#398439;border-color:#255625}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{background-image:none}.btn-success.disabled.focus,.btn-success.disabled:focus,.btn-success.disabled:hover,.btn-success[disabled].focus,.btn-success[disabled]:focus,.btn-success[disabled]:hover,fieldset[disabled] .btn-success.focus,fieldset[disabled] .btn-success:focus,fieldset[disabled] .btn-success:hover{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info.focus,.btn-info:focus{color:#fff;background-color:#31b0d5;border-color:#1b6d85}.btn-info:hover{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active,.btn-info:active,.open>.dropdown-toggle.btn-info{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active.focus,.btn-info.active:focus,.btn-info.active:hover,.btn-info:active.focus,.btn-info:active:focus,.btn-info:active:hover,.open>.dropdown-toggle.btn-info.focus,.open>.dropdown-toggle.btn-info:focus,.open>.dropdown-toggle.btn-info:hover{color:#fff;background-color:#269abc;border-color:#1b6d85}.btn-info.active,.btn-info:active,.open>.dropdown-toggle.btn-info{background-image:none}.btn-info.disabled.focus,.btn-info.disabled:focus,.btn-info.disabled:hover,.btn-info[disabled].focus,.btn-info[disabled]:focus,.btn-info[disabled]:hover,fieldset[disabled] .btn-info.focus,fieldset[disabled] .btn-info:focus,fieldset[disabled] .btn-info:hover{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning.focus,.btn-warning:focus{color:#fff;background-color:#ec971f;border-color:#985f0d}.btn-warning:hover{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active,.btn-warning:active,.open>.dropdown-toggle.btn-warning{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active.focus,.btn-warning.active:focus,.btn-warning.active:hover,.btn-warning:active.focus,.btn-warning:active:focus,.btn-warning:active:hover,.open>.dropdown-toggle.btn-warning.focus,.open>.dropdown-toggle.btn-warning:focus,.open>.dropdown-toggle.btn-warning:hover{color:#fff;background-color:#d58512;border-color:#985f0d}.btn-warning.active,.btn-warning:active,.open>.dropdown-toggle.btn-warning{background-image:none}.btn-warning.disabled.focus,.btn-warning.disabled:focus,.btn-warning.disabled:hover,.btn-warning[disabled].focus,.btn-warning[disabled]:focus,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning.focus,fieldset[disabled] .btn-warning:focus,fieldset[disabled] .btn-warning:hover{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger.focus,.btn-danger:focus{color:#fff;background-color:#c9302c;border-color:#761c19}.btn-danger:hover{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active,.btn-danger:active,.open>.dropdown-toggle.btn-danger{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active.focus,.btn-danger.active:focus,.btn-danger.active:hover,.btn-danger:active.focus,.btn-danger:active:focus,.btn-danger:active:hover,.open>.dropdown-toggle.btn-danger.focus,.open>.dropdown-toggle.btn-danger:focus,.open>.dropdown-toggle.btn-danger:hover{color:#fff;background-color:#ac2925;border-color:#761c19}.btn-danger.active,.btn-danger:active,.open>.dropdown-toggle.btn-danger{background-image:none}.btn-danger.disabled.focus,.btn-danger.disabled:focus,.btn-danger.disabled:hover,.btn-danger[disabled].focus,.btn-danger[disabled]:focus,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger.focus,fieldset[disabled] .btn-danger:focus,fieldset[disabled] .btn-danger:hover{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{font-weight:400;color:#337ab7;border-radius:0}.btn-link,.btn-link.active,.btn-link:active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link,.btn-link:active,.btn-link:focus,.btn-link:hover{border-color:transparent}.btn-link:focus,.btn-link:hover{color:#23527c;text-decoration:underline;background-color:transparent}.btn-link[disabled]:focus,.btn-link[disabled]:hover,fieldset[disabled] .btn-link:focus,fieldset[disabled] .btn-link:hover{color:#777;text-decoration:none}.btn-group-lg>.btn,.btn-lg{padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.btn-group-sm>.btn,.btn-sm{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-group-xs>.btn,.btn-xs{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:5px}input[type=button].btn-block,input[type=reset].btn-block,input[type=submit].btn-block{width:100%}.fade{opacity:0;-webkit-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}tr.collapse.in{display:table-row}tbody.collapse.in{display:table-row-group}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition-timing-function:ease;-o-transition-timing-function:ease;transition-timing-function:ease;-webkit-transition-duration:.35s;-o-transition-duration:.35s;transition-duration:.35s;-webkit-transition-property:height,visibility;-o-transition-property:height,visibility;transition-property:height,visibility}.caret{display:inline-block;width:0;height:0;margin-left:2px;vertical-align:middle;border-top:4px dashed;border-top:4px solid\9;border-right:4px solid transparent;border-left:4px solid transparent}.dropdown,.dropup{position:relative}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;font-size:14px;text-align:left;list-style:none;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,.175);box-shadow:0 6px 12px rgba(0,0,0,.175)}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:400;line-height:1.42857143;color:#333;white-space:nowrap}.dropdown-menu>li>a:focus,.dropdown-menu>li>a:hover{color:#262626;text-decoration:none;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:focus,.dropdown-menu>.active>a:hover{color:#fff;text-decoration:none;background-color:#337ab7;outline:0}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{color:#777}.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{text-decoration:none;cursor:not-allowed;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-right{right:0;left:auto}.dropdown-menu-left{right:auto;left:0}.dropdown-header{display:block;padding:3px 20px;font-size:12px;line-height:1.42857143;color:#777;white-space:nowrap}.dropdown-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:990}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{content:"";border-top:0;border-bottom:4px dashed;border-bottom:4px solid\9}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:2px}@media (min-width:768px){.navbar-right .dropdown-menu{right:0;left:auto}.navbar-right .dropdown-menu-left{right:auto;left:0}}.btn-group,.btn-group-vertical{position:relative;display:inline-block;vertical-align:middle}.btn-group-vertical>.btn,.btn-group>.btn{position:relative;float:left}.btn-group-vertical>.btn.active,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:hover,.btn-group>.btn.active,.btn-group>.btn:active,.btn-group>.btn:focus,.btn-group>.btn:hover{z-index:2}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar .btn,.btn-toolbar .btn-group,.btn-toolbar .input-group{float:left}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.btn-group>.btn-group{float:left}.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-bottom-left-radius:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-right:8px;padding-left:8px}.btn-group>.btn-lg+.dropdown-toggle{padding-right:12px;padding-left:12px}.btn-group.open .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-group.open .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn .caret{margin-left:0}.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child){border-radius:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-left-radius:0;border-top-right-radius:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-top-right-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{display:table-cell;float:none;width:1%}.btn-group-justified>.btn-group .btn{width:100%}.btn-group-justified>.btn-group .dropdown-menu{left:auto}[data-toggle=buttons]>.btn input[type=checkbox],[data-toggle=buttons]>.btn input[type=radio],[data-toggle=buttons]>.btn-group>.btn input[type=checkbox],[data-toggle=buttons]>.btn-group>.btn input[type=radio]{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*=col-]{float:none;padding-right:0;padding-left:0}.input-group .form-control{position:relative;z-index:2;float:left;width:100%;margin-bottom:0}.input-group .form-control:focus{z-index:3}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-group-lg>.form-control,select.input-group-lg>.input-group-addon,select.input-group-lg>.input-group-btn>.btn{height:46px;line-height:46px}select[multiple].input-group-lg>.form-control,select[multiple].input-group-lg>.input-group-addon,select[multiple].input-group-lg>.input-group-btn>.btn,textarea.input-group-lg>.form-control,textarea.input-group-lg>.input-group-addon,textarea.input-group-lg>.input-group-btn>.btn{height:auto}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-group-sm>.form-control,select.input-group-sm>.input-group-addon,select.input-group-sm>.input-group-btn>.btn{height:30px;line-height:30px}select[multiple].input-group-sm>.form-control,select[multiple].input-group-sm>.input-group-addon,select[multiple].input-group-sm>.input-group-btn>.btn,textarea.input-group-sm>.form-control,textarea.input-group-sm>.input-group-addon,textarea.input-group-sm>.input-group-btn>.btn{height:auto}.input-group .form-control,.input-group-addon,.input-group-btn{display:table-cell}.input-group .form-control:not(:first-child):not(:last-child),.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:400;line-height:1;color:#555;text-align:center;background-color:#eee;border:1px solid #ccc;border-radius:4px}.input-group-addon.input-sm{padding:5px 10px;font-size:12px;border-radius:3px}.input-group-addon.input-lg{padding:10px 16px;font-size:18px;border-radius:6px}.input-group-addon input[type=checkbox],.input-group-addon input[type=radio]{margin-top:0}.input-group .form-control:first-child,.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn-group:not(:last-child)>.btn,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group .form-control:last-child,.input-group-addon:last-child,.input-group-btn:first-child>.btn-group:not(:first-child)>.btn,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle{border-top-left-radius:0;border-bottom-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{position:relative;font-size:0;white-space:nowrap}.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:active,.input-group-btn>.btn:focus,.input-group-btn>.btn:hover{z-index:2}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{z-index:2;margin-left:-1px}.nav{padding-left:0;margin-bottom:0;list-style:none}.nav>li{position:relative;display:block}.nav>li>a{position:relative;display:block;padding:10px 15px}.nav>li>a:focus,.nav>li>a:hover{text-decoration:none;background-color:#eee}.nav>li.disabled>a{color:#777}.nav>li.disabled>a:focus,.nav>li.disabled>a:hover{color:#777;text-decoration:none;cursor:not-allowed;background-color:transparent}.nav .open>a,.nav .open>a:focus,.nav .open>a:hover{background-color:#eee;border-color:#337ab7}.nav .nav-divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.nav>li>a>img{max-width:none}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{float:left;margin-bottom:-1px}.nav-tabs>li>a{margin-right:2px;line-height:1.42857143;border:1px solid transparent;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover{border-color:#eee #eee #ddd}.nav-tabs>li.active>a,.nav-tabs>li.active>a:focus,.nav-tabs>li.active>a:hover{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-tabs.nav-justified{width:100%;border-bottom:0}.nav-tabs.nav-justified>li{float:none}.nav-tabs.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-tabs.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-tabs.nav-justified>li{display:table-cell;width:1%}.nav-tabs.nav-justified>li>a{margin-bottom:0}}.nav-tabs.nav-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs.nav-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border-bottom-color:#fff}}.nav-pills>li{float:left}.nav-pills>li>a{border-radius:4px}.nav-pills>li+li{margin-left:2px}.nav-pills>li.active>a,.nav-pills>li.active>a:focus,.nav-pills>li.active>a:hover{color:#fff;background-color:#337ab7}.nav-stacked>li{float:none}.nav-stacked>li+li{margin-top:2px;margin-left:0}.nav-justified{width:100%}.nav-justified>li{float:none}.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-justified>li{display:table-cell;width:1%}.nav-justified>li>a{margin-bottom:0}}.nav-tabs-justified{border-bottom:0}.nav-tabs-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border-bottom-color:#fff}}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.navbar{position:relative;min-height:50px;margin-bottom:20px;border:1px solid transparent}@media (min-width:768px){.navbar{border-radius:4px}}@media (min-width:768px){.navbar-header{float:left}}.navbar-collapse{padding-right:15px;padding-left:15px;overflow-x:visible;-webkit-overflow-scrolling:touch;border-top:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1)}.navbar-collapse.in{overflow-y:auto}@media (min-width:768px){.navbar-collapse{width:auto;border-top:0;-webkit-box-shadow:none;box-shadow:none}.navbar-collapse.collapse{display:block!important;height:auto!important;padding-bottom:0;overflow:visible!important}.navbar-collapse.in{overflow-y:visible}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse,.navbar-static-top .navbar-collapse{padding-right:0;padding-left:0}}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:340px}@media (max-device-width:480px) and (orientation:landscape){.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:200px}}.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:0;margin-left:0}}.navbar-static-top{z-index:1000;border-width:0 0 1px}@media (min-width:768px){.navbar-static-top{border-radius:0}}.navbar-fixed-bottom,.navbar-fixed-top{position:fixed;right:0;left:0;z-index:1030}@media (min-width:768px){.navbar-fixed-bottom,.navbar-fixed-top{border-radius:0}}.navbar-fixed-top{top:0;border-width:0 0 1px}.navbar-fixed-bottom{bottom:0;margin-bottom:0;border-width:1px 0 0}.navbar-brand{float:left;height:50px;padding:15px 15px;font-size:18px;line-height:20px}.navbar-brand:focus,.navbar-brand:hover{text-decoration:none}.navbar-brand>img{display:block}@media (min-width:768px){.navbar>.container .navbar-brand,.navbar>.container-fluid .navbar-brand{margin-left:-15px}}.navbar-toggle{position:relative;float:right;padding:9px 10px;margin-top:8px;margin-right:15px;margin-bottom:8px;background-color:transparent;background-image:none;border:1px solid transparent;border-radius:4px}.navbar-toggle:focus{outline:0}.navbar-toggle .icon-bar{display:block;width:22px;height:2px;border-radius:1px}.navbar-toggle .icon-bar+.icon-bar{margin-top:4px}@media (min-width:768px){.navbar-toggle{display:none}}.navbar-nav{margin:7.5px -15px}.navbar-nav>li>a{padding-top:10px;padding-bottom:10px;line-height:20px}@media (max-width:767px){.navbar-nav .open .dropdown-menu{position:static;float:none;width:auto;margin-top:0;background-color:transparent;border:0;-webkit-box-shadow:none;box-shadow:none}.navbar-nav .open .dropdown-menu .dropdown-header,.navbar-nav .open .dropdown-menu>li>a{padding:5px 15px 5px 25px}.navbar-nav .open .dropdown-menu>li>a{line-height:20px}.navbar-nav .open .dropdown-menu>li>a:focus,.navbar-nav .open .dropdown-menu>li>a:hover{background-image:none}}@media (min-width:768px){.navbar-nav{float:left;margin:0}.navbar-nav>li{float:left}.navbar-nav>li>a{padding-top:15px;padding-bottom:15px}}.navbar-form{padding:10px 15px;margin-top:8px;margin-right:-15px;margin-bottom:8px;margin-left:-15px;border-top:1px solid transparent;border-bottom:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1)}@media (min-width:768px){.navbar-form .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.navbar-form .form-control{display:inline-block;width:auto;vertical-align:middle}.navbar-form .form-control-static{display:inline-block}.navbar-form .input-group{display:inline-table;vertical-align:middle}.navbar-form .input-group .form-control,.navbar-form .input-group .input-group-addon,.navbar-form .input-group .input-group-btn{width:auto}.navbar-form .input-group>.form-control{width:100%}.navbar-form .control-label{margin-bottom:0;vertical-align:middle}.navbar-form .checkbox,.navbar-form .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.navbar-form .checkbox label,.navbar-form .radio label{padding-left:0}.navbar-form .checkbox input[type=checkbox],.navbar-form .radio input[type=radio]{position:relative;margin-left:0}.navbar-form .has-feedback .form-control-feedback{top:0}}@media (max-width:767px){.navbar-form .form-group{margin-bottom:5px}.navbar-form .form-group:last-child{margin-bottom:0}}@media (min-width:768px){.navbar-form{width:auto;padding-top:0;padding-bottom:0;margin-right:0;margin-left:0;border:0;-webkit-box-shadow:none;box-shadow:none}}.navbar-nav>li>.dropdown-menu{margin-top:0;border-top-left-radius:0;border-top-right-radius:0}.navbar-fixed-bottom .navbar-nav>li>.dropdown-menu{margin-bottom:0;border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.navbar-btn{margin-top:8px;margin-bottom:8px}.navbar-btn.btn-sm{margin-top:10px;margin-bottom:10px}.navbar-btn.btn-xs{margin-top:14px;margin-bottom:14px}.navbar-text{margin-top:15px;margin-bottom:15px}@media (min-width:768px){.navbar-text{float:left;margin-right:15px;margin-left:15px}}@media (min-width:768px){.navbar-left{float:left!important}.navbar-right{float:right!important;margin-right:-15px}.navbar-right~.navbar-right{margin-right:0}}.navbar-default{background-color:#f8f8f8;border-color:#e7e7e7}.navbar-default .navbar-brand{color:#777}.navbar-default .navbar-brand:focus,.navbar-default .navbar-brand:hover{color:#5e5e5e;background-color:transparent}.navbar-default .navbar-text{color:#777}.navbar-default .navbar-nav>li>a{color:#777}.navbar-default .navbar-nav>li>a:focus,.navbar-default .navbar-nav>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.active>a:focus,.navbar-default .navbar-nav>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav>.disabled>a,.navbar-default .navbar-nav>.disabled>a:focus,.navbar-default .navbar-nav>.disabled>a:hover{color:#ccc;background-color:transparent}.navbar-default .navbar-toggle{border-color:#ddd}.navbar-default .navbar-toggle:focus,.navbar-default .navbar-toggle:hover{background-color:#ddd}.navbar-default .navbar-toggle .icon-bar{background-color:#888}.navbar-default .navbar-collapse,.navbar-default .navbar-form{border-color:#e7e7e7}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.open>a:focus,.navbar-default .navbar-nav>.open>a:hover{color:#555;background-color:#e7e7e7}@media (max-width:767px){.navbar-default .navbar-nav .open .dropdown-menu>li>a{color:#777}.navbar-default .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav .open .dropdown-menu>.active>a,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#ccc;background-color:transparent}}.navbar-default .navbar-link{color:#777}.navbar-default .navbar-link:hover{color:#333}.navbar-default .btn-link{color:#777}.navbar-default .btn-link:focus,.navbar-default .btn-link:hover{color:#333}.navbar-default .btn-link[disabled]:focus,.navbar-default .btn-link[disabled]:hover,fieldset[disabled] .navbar-default .btn-link:focus,fieldset[disabled] .navbar-default .btn-link:hover{color:#ccc}.navbar-inverse{background-color:#222;border-color:#080808}.navbar-inverse .navbar-brand{color:#9d9d9d}.navbar-inverse .navbar-brand:focus,.navbar-inverse .navbar-brand:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-text{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a:focus,.navbar-inverse .navbar-nav>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.active>a:focus,.navbar-inverse .navbar-nav>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav>.disabled>a,.navbar-inverse .navbar-nav>.disabled>a:focus,.navbar-inverse .navbar-nav>.disabled>a:hover{color:#444;background-color:transparent}.navbar-inverse .navbar-toggle{border-color:#333}.navbar-inverse .navbar-toggle:focus,.navbar-inverse .navbar-toggle:hover{background-color:#333}.navbar-inverse .navbar-toggle .icon-bar{background-color:#fff}.navbar-inverse .navbar-collapse,.navbar-inverse .navbar-form{border-color:#101010}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.open>a:focus,.navbar-inverse .navbar-nav>.open>a:hover{color:#fff;background-color:#080808}@media (max-width:767px){.navbar-inverse .navbar-nav .open .dropdown-menu>.dropdown-header{border-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu .divider{background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#444;background-color:transparent}}.navbar-inverse .navbar-link{color:#9d9d9d}.navbar-inverse .navbar-link:hover{color:#fff}.navbar-inverse .btn-link{color:#9d9d9d}.navbar-inverse .btn-link:focus,.navbar-inverse .btn-link:hover{color:#fff}.navbar-inverse .btn-link[disabled]:focus,.navbar-inverse .btn-link[disabled]:hover,fieldset[disabled] .navbar-inverse .btn-link:focus,fieldset[disabled] .navbar-inverse .btn-link:hover{color:#444}.breadcrumb{padding:8px 15px;margin-bottom:20px;list-style:none;background-color:#f5f5f5;border-radius:4px}.breadcrumb>li{display:inline-block}.breadcrumb>li+li:before{padding:0 5px;color:#ccc;content:"/\00a0"}.breadcrumb>.active{color:#777}.pagination{display:inline-block;padding-left:0;margin:20px 0;border-radius:4px}.pagination>li{display:inline}.pagination>li>a,.pagination>li>span{position:relative;float:left;padding:6px 12px;margin-left:-1px;line-height:1.42857143;color:#337ab7;text-decoration:none;background-color:#fff;border:1px solid #ddd}.pagination>li:first-child>a,.pagination>li:first-child>span{margin-left:0;border-top-left-radius:4px;border-bottom-left-radius:4px}.pagination>li:last-child>a,.pagination>li:last-child>span{border-top-right-radius:4px;border-bottom-right-radius:4px}.pagination>li>a:focus,.pagination>li>a:hover,.pagination>li>span:focus,.pagination>li>span:hover{z-index:2;color:#23527c;background-color:#eee;border-color:#ddd}.pagination>.active>a,.pagination>.active>a:focus,.pagination>.active>a:hover,.pagination>.active>span,.pagination>.active>span:focus,.pagination>.active>span:hover{z-index:3;color:#fff;cursor:default;background-color:#337ab7;border-color:#337ab7}.pagination>.disabled>a,.pagination>.disabled>a:focus,.pagination>.disabled>a:hover,.pagination>.disabled>span,.pagination>.disabled>span:focus,.pagination>.disabled>span:hover{color:#777;cursor:not-allowed;background-color:#fff;border-color:#ddd}.pagination-lg>li>a,.pagination-lg>li>span{padding:10px 16px;font-size:18px;line-height:1.3333333}.pagination-lg>li:first-child>a,.pagination-lg>li:first-child>span{border-top-left-radius:6px;border-bottom-left-radius:6px}.pagination-lg>li:last-child>a,.pagination-lg>li:last-child>span{border-top-right-radius:6px;border-bottom-right-radius:6px}.pagination-sm>li>a,.pagination-sm>li>span{padding:5px 10px;font-size:12px;line-height:1.5}.pagination-sm>li:first-child>a,.pagination-sm>li:first-child>span{border-top-left-radius:3px;border-bottom-left-radius:3px}.pagination-sm>li:last-child>a,.pagination-sm>li:last-child>span{border-top-right-radius:3px;border-bottom-right-radius:3px}.pager{padding-left:0;margin:20px 0;text-align:center;list-style:none}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;border-radius:15px}.pager li>a:focus,.pager li>a:hover{text-decoration:none;background-color:#eee}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:focus,.pager .disabled>a:hover,.pager .disabled>span{color:#777;cursor:not-allowed;background-color:#fff}.label{display:inline;padding:.2em .6em .3em;font-size:75%;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25em}a.label:focus,a.label:hover{color:#fff;text-decoration:none;cursor:pointer}.label:empty{display:none}.btn .label{position:relative;top:-1px}.label-default{background-color:#777}.label-default[href]:focus,.label-default[href]:hover{background-color:#5e5e5e}.label-primary{background-color:#337ab7}.label-primary[href]:focus,.label-primary[href]:hover{background-color:#286090}.label-success{background-color:#5cb85c}.label-success[href]:focus,.label-success[href]:hover{background-color:#449d44}.label-info{background-color:#5bc0de}.label-info[href]:focus,.label-info[href]:hover{background-color:#31b0d5}.label-warning{background-color:#f0ad4e}.label-warning[href]:focus,.label-warning[href]:hover{background-color:#ec971f}.label-danger{background-color:#d9534f}.label-danger[href]:focus,.label-danger[href]:hover{background-color:#c9302c}.badge{display:inline-block;min-width:10px;padding:3px 7px;font-size:12px;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:middle;background-color:#777;border-radius:10px}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.btn-group-xs>.btn .badge,.btn-xs .badge{top:0;padding:1px 5px}a.badge:focus,a.badge:hover{color:#fff;text-decoration:none;cursor:pointer}.list-group-item.active>.badge,.nav-pills>.active>a>.badge{color:#337ab7;background-color:#fff}.list-group-item>.badge{float:right}.list-group-item>.badge+.badge{margin-right:5px}.nav-pills>li>a>.badge{margin-left:3px}.jumbotron{padding-top:30px;padding-bottom:30px;margin-bottom:30px;color:inherit;background-color:#eee}.jumbotron .h1,.jumbotron h1{color:inherit}.jumbotron p{margin-bottom:15px;font-size:21px;font-weight:200}.jumbotron>hr{border-top-color:#d5d5d5}.container .jumbotron,.container-fluid .jumbotron{padding-right:15px;padding-left:15px;border-radius:6px}.jumbotron .container{max-width:100%}@media screen and (min-width:768px){.jumbotron{padding-top:48px;padding-bottom:48px}.container .jumbotron,.container-fluid .jumbotron{padding-right:60px;padding-left:60px}.jumbotron .h1,.jumbotron h1{font-size:63px}}.thumbnail{display:block;padding:4px;margin-bottom:20px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:border .2s ease-in-out;-o-transition:border .2s ease-in-out;transition:border .2s ease-in-out}.thumbnail a>img,.thumbnail>img{margin-right:auto;margin-left:auto}a.thumbnail.active,a.thumbnail:focus,a.thumbnail:hover{border-color:#337ab7}.thumbnail .caption{padding:9px;color:#333}.alert{padding:15px;margin-bottom:20px;border:1px solid transparent;border-radius:4px}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:700}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable,.alert-dismissible{padding-right:35px}.alert-dismissable .close,.alert-dismissible .close{position:relative;top:-2px;right:-21px;color:inherit}.alert-success{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;overflow:hidden;background-color:#f5f5f5;border-radius:4px;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,.1);box-shadow:inset 0 1px 2px rgba(0,0,0,.1)}.progress-bar{float:left;width:0;height:100%;font-size:12px;line-height:20px;color:#fff;text-align:center;background-color:#337ab7;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);-webkit-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress-bar-striped,.progress-striped .progress-bar{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);-webkit-background-size:40px 40px;background-size:40px 40px}.progress-bar.active,.progress.active .progress-bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-bar-success{background-color:#5cb85c}.progress-striped .progress-bar-success{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-info{background-color:#5bc0de}.progress-striped .progress-bar-info{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-warning{background-color:#f0ad4e}.progress-striped .progress-bar-warning{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-danger{background-color:#d9534f}.progress-striped .progress-bar-danger{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.media{margin-top:15px}.media:first-child{margin-top:0}.media,.media-body{overflow:hidden;zoom:1}.media-body{width:10000px}.media-object{display:block}.media-object.img-thumbnail{max-width:none}.media-right,.media>.pull-right{padding-left:10px}.media-left,.media>.pull-left{padding-right:10px}.media-body,.media-left,.media-right{display:table-cell;vertical-align:top}.media-middle{vertical-align:middle}.media-bottom{vertical-align:bottom}.media-heading{margin-top:0;margin-bottom:5px}.media-list{padding-left:0;list-style:none}.list-group{padding-left:0;margin-bottom:20px}.list-group-item{position:relative;display:block;padding:10px 15px;margin-bottom:-1px;background-color:#fff;border:1px solid #ddd}.list-group-item:first-child{border-top-left-radius:4px;border-top-right-radius:4px}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}a.list-group-item,button.list-group-item{color:#555}a.list-group-item .list-group-item-heading,button.list-group-item .list-group-item-heading{color:#333}a.list-group-item:focus,a.list-group-item:hover,button.list-group-item:focus,button.list-group-item:hover{color:#555;text-decoration:none;background-color:#f5f5f5}button.list-group-item{width:100%;text-align:left}.list-group-item.disabled,.list-group-item.disabled:focus,.list-group-item.disabled:hover{color:#777;cursor:not-allowed;background-color:#eee}.list-group-item.disabled .list-group-item-heading,.list-group-item.disabled:focus .list-group-item-heading,.list-group-item.disabled:hover .list-group-item-heading{color:inherit}.list-group-item.disabled .list-group-item-text,.list-group-item.disabled:focus .list-group-item-text,.list-group-item.disabled:hover .list-group-item-text{color:#777}.list-group-item.active,.list-group-item.active:focus,.list-group-item.active:hover{z-index:2;color:#fff;background-color:#337ab7;border-color:#337ab7}.list-group-item.active .list-group-item-heading,.list-group-item.active .list-group-item-heading>.small,.list-group-item.active .list-group-item-heading>small,.list-group-item.active:focus .list-group-item-heading,.list-group-item.active:focus .list-group-item-heading>.small,.list-group-item.active:focus .list-group-item-heading>small,.list-group-item.active:hover .list-group-item-heading,.list-group-item.active:hover .list-group-item-heading>.small,.list-group-item.active:hover .list-group-item-heading>small{color:inherit}.list-group-item.active .list-group-item-text,.list-group-item.active:focus .list-group-item-text,.list-group-item.active:hover .list-group-item-text{color:#c7ddef}.list-group-item-success{color:#3c763d;background-color:#dff0d8}a.list-group-item-success,button.list-group-item-success{color:#3c763d}a.list-group-item-success .list-group-item-heading,button.list-group-item-success .list-group-item-heading{color:inherit}a.list-group-item-success:focus,a.list-group-item-success:hover,button.list-group-item-success:focus,button.list-group-item-success:hover{color:#3c763d;background-color:#d0e9c6}a.list-group-item-success.active,a.list-group-item-success.active:focus,a.list-group-item-success.active:hover,button.list-group-item-success.active,button.list-group-item-success.active:focus,button.list-group-item-success.active:hover{color:#fff;background-color:#3c763d;border-color:#3c763d}.list-group-item-info{color:#31708f;background-color:#d9edf7}a.list-group-item-info,button.list-group-item-info{color:#31708f}a.list-group-item-info .list-group-item-heading,button.list-group-item-info .list-group-item-heading{color:inherit}a.list-group-item-info:focus,a.list-group-item-info:hover,button.list-group-item-info:focus,button.list-group-item-info:hover{color:#31708f;background-color:#c4e3f3}a.list-group-item-info.active,a.list-group-item-info.active:focus,a.list-group-item-info.active:hover,button.list-group-item-info.active,button.list-group-item-info.active:focus,button.list-group-item-info.active:hover{color:#fff;background-color:#31708f;border-color:#31708f}.list-group-item-warning{color:#8a6d3b;background-color:#fcf8e3}a.list-group-item-warning,button.list-group-item-warning{color:#8a6d3b}a.list-group-item-warning .list-group-item-heading,button.list-group-item-warning .list-group-item-heading{color:inherit}a.list-group-item-warning:focus,a.list-group-item-warning:hover,button.list-group-item-warning:focus,button.list-group-item-warning:hover{color:#8a6d3b;background-color:#faf2cc}a.list-group-item-warning.active,a.list-group-item-warning.active:focus,a.list-group-item-warning.active:hover,button.list-group-item-warning.active,button.list-group-item-warning.active:focus,button.list-group-item-warning.active:hover{color:#fff;background-color:#8a6d3b;border-color:#8a6d3b}.list-group-item-danger{color:#a94442;background-color:#f2dede}a.list-group-item-danger,button.list-group-item-danger{color:#a94442}a.list-group-item-danger .list-group-item-heading,button.list-group-item-danger .list-group-item-heading{color:inherit}a.list-group-item-danger:focus,a.list-group-item-danger:hover,button.list-group-item-danger:focus,button.list-group-item-danger:hover{color:#a94442;background-color:#ebcccc}a.list-group-item-danger.active,a.list-group-item-danger.active:focus,a.list-group-item-danger.active:hover,button.list-group-item-danger.active,button.list-group-item-danger.active:focus,button.list-group-item-danger.active:hover{color:#fff;background-color:#a94442;border-color:#a94442}.list-group-item-heading{margin-top:0;margin-bottom:5px}.list-group-item-text{margin-bottom:0;line-height:1.3}.panel{margin-bottom:20px;background-color:#fff;border:1px solid transparent;border-radius:4px;-webkit-box-shadow:0 1px 1px rgba(0,0,0,.05);box-shadow:0 1px 1px rgba(0,0,0,.05)}.panel-body{padding:15px}.panel-heading{padding:10px 15px;border-bottom:1px solid transparent;border-top-left-radius:3px;border-top-right-radius:3px}.panel-heading>.dropdown .dropdown-toggle{color:inherit}.panel-title{margin-top:0;margin-bottom:0;font-size:16px;color:inherit}.panel-title>.small,.panel-title>.small>a,.panel-title>a,.panel-title>small,.panel-title>small>a{color:inherit}.panel-footer{padding:10px 15px;background-color:#f5f5f5;border-top:1px solid #ddd;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.list-group,.panel>.panel-collapse>.list-group{margin-bottom:0}.panel>.list-group .list-group-item,.panel>.panel-collapse>.list-group .list-group-item{border-width:1px 0;border-radius:0}.panel>.list-group:first-child .list-group-item:first-child,.panel>.panel-collapse>.list-group:first-child .list-group-item:first-child{border-top:0;border-top-left-radius:3px;border-top-right-radius:3px}.panel>.list-group:last-child .list-group-item:last-child,.panel>.panel-collapse>.list-group:last-child .list-group-item:last-child{border-bottom:0;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.panel-heading+.panel-collapse>.list-group .list-group-item:first-child{border-top-left-radius:0;border-top-right-radius:0}.panel-heading+.list-group .list-group-item:first-child{border-top-width:0}.list-group+.panel-footer{border-top-width:0}.panel>.panel-collapse>.table,.panel>.table,.panel>.table-responsive>.table{margin-bottom:0}.panel>.panel-collapse>.table caption,.panel>.table caption,.panel>.table-responsive>.table caption{padding-right:15px;padding-left:15px}.panel>.table-responsive:first-child>.table:first-child,.panel>.table:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child,.panel>.table:first-child>thead:first-child>tr:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table:first-child>thead:first-child>tr:first-child th:first-child{border-top-left-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table:first-child>thead:first-child>tr:first-child th:last-child{border-top-right-radius:3px}.panel>.table-responsive:last-child>.table:last-child,.panel>.table:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:first-child{border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:last-child{border-bottom-right-radius:3px}.panel>.panel-body+.table,.panel>.panel-body+.table-responsive,.panel>.table+.panel-body,.panel>.table-responsive+.panel-body{border-top:1px solid #ddd}.panel>.table>tbody:first-child>tr:first-child td,.panel>.table>tbody:first-child>tr:first-child th{border-top:0}.panel>.table-bordered,.panel>.table-responsive>.table-bordered{border:0}.panel>.table-bordered>tbody>tr>td:first-child,.panel>.table-bordered>tbody>tr>th:first-child,.panel>.table-bordered>tfoot>tr>td:first-child,.panel>.table-bordered>tfoot>tr>th:first-child,.panel>.table-bordered>thead>tr>td:first-child,.panel>.table-bordered>thead>tr>th:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:first-child,.panel>.table-responsive>.table-bordered>thead>tr>td:first-child,.panel>.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.panel>.table-bordered>tbody>tr>td:last-child,.panel>.table-bordered>tbody>tr>th:last-child,.panel>.table-bordered>tfoot>tr>td:last-child,.panel>.table-bordered>tfoot>tr>th:last-child,.panel>.table-bordered>thead>tr>td:last-child,.panel>.table-bordered>thead>tr>th:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:last-child,.panel>.table-responsive>.table-bordered>thead>tr>td:last-child,.panel>.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.panel>.table-bordered>tbody>tr:first-child>td,.panel>.table-bordered>tbody>tr:first-child>th,.panel>.table-bordered>thead>tr:first-child>td,.panel>.table-bordered>thead>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>th,.panel>.table-responsive>.table-bordered>thead>tr:first-child>td,.panel>.table-responsive>.table-bordered>thead>tr:first-child>th{border-bottom:0}.panel>.table-bordered>tbody>tr:last-child>td,.panel>.table-bordered>tbody>tr:last-child>th,.panel>.table-bordered>tfoot>tr:last-child>td,.panel>.table-bordered>tfoot>tr:last-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>th,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>td,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}.panel>.table-responsive{margin-bottom:0;border:0}.panel-group{margin-bottom:20px}.panel-group .panel{margin-bottom:0;border-radius:4px}.panel-group .panel+.panel{margin-top:5px}.panel-group .panel-heading{border-bottom:0}.panel-group .panel-heading+.panel-collapse>.list-group,.panel-group .panel-heading+.panel-collapse>.panel-body{border-top:1px solid #ddd}.panel-group .panel-footer{border-top:0}.panel-group .panel-footer+.panel-collapse .panel-body{border-bottom:1px solid #ddd}.panel-default{border-color:#ddd}.panel-default>.panel-heading{color:#333;background-color:#f5f5f5;border-color:#ddd}.panel-default>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ddd}.panel-default>.panel-heading .badge{color:#f5f5f5;background-color:#333}.panel-default>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ddd}.panel-primary{border-color:#337ab7}.panel-primary>.panel-heading{color:#fff;background-color:#337ab7;border-color:#337ab7}.panel-primary>.panel-heading+.panel-collapse>.panel-body{border-top-color:#337ab7}.panel-primary>.panel-heading .badge{color:#337ab7;background-color:#fff}.panel-primary>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#337ab7}.panel-success{border-color:#d6e9c6}.panel-success>.panel-heading{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.panel-success>.panel-heading+.panel-collapse>.panel-body{border-top-color:#d6e9c6}.panel-success>.panel-heading .badge{color:#dff0d8;background-color:#3c763d}.panel-success>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#d6e9c6}.panel-info{border-color:#bce8f1}.panel-info>.panel-heading{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.panel-info>.panel-heading+.panel-collapse>.panel-body{border-top-color:#bce8f1}.panel-info>.panel-heading .badge{color:#d9edf7;background-color:#31708f}.panel-info>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#bce8f1}.panel-warning{border-color:#faebcc}.panel-warning>.panel-heading{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.panel-warning>.panel-heading+.panel-collapse>.panel-body{border-top-color:#faebcc}.panel-warning>.panel-heading .badge{color:#fcf8e3;background-color:#8a6d3b}.panel-warning>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#faebcc}.panel-danger{border-color:#ebccd1}.panel-danger>.panel-heading{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.panel-danger>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ebccd1}.panel-danger>.panel-heading .badge{color:#f2dede;background-color:#a94442}.panel-danger>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ebccd1}.embed-responsive{position:relative;display:block;height:0;padding:0;overflow:hidden}.embed-responsive .embed-responsive-item,.embed-responsive embed,.embed-responsive iframe,.embed-responsive object,.embed-responsive video{position:absolute;top:0;bottom:0;left:0;width:100%;height:100%;border:0}.embed-responsive-16by9{padding-bottom:56.25%}.embed-responsive-4by3{padding-bottom:75%}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.05);box-shadow:inset 0 1px 1px rgba(0,0,0,.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,.15)}.well-lg{padding:24px;border-radius:6px}.well-sm{padding:9px;border-radius:3px}.close{float:right;font-size:21px;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;filter:alpha(opacity=20);opacity:.2}.close:focus,.close:hover{color:#000;text-decoration:none;cursor:pointer;filter:alpha(opacity=50);opacity:.5}button.close{-webkit-appearance:none;padding:0;cursor:pointer;background:0 0;border:0}.modal-open{overflow:hidden}.modal{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1050;display:none;overflow:hidden;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transition:-webkit-transform .3s ease-out;-o-transition:-o-transform .3s ease-out;transition:transform .3s ease-out;-webkit-transform:translate(0,-25%);-ms-transform:translate(0,-25%);-o-transform:translate(0,-25%);transform:translate(0,-25%)}.modal.in .modal-dialog{-webkit-transform:translate(0,0);-ms-transform:translate(0,0);-o-transform:translate(0,0);transform:translate(0,0)}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #999;border:1px solid rgba(0,0,0,.2);border-radius:6px;outline:0;-webkit-box-shadow:0 3px 9px rgba(0,0,0,.5);box-shadow:0 3px 9px rgba(0,0,0,.5)}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{filter:alpha(opacity=0);opacity:0}.modal-backdrop.in{filter:alpha(opacity=50);opacity:.5}.modal-header{padding:15px;border-bottom:1px solid #e5e5e5}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857143}.modal-body{position:relative;padding:15px}.modal-footer{padding:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{-webkit-box-shadow:0 5px 15px rgba(0,0,0,.5);box-shadow:0 5px 15px rgba(0,0,0,.5)}.modal-sm{width:300px}}@media (min-width:992px){.modal-lg{width:900px}}.tooltip{position:absolute;z-index:1070;display:block;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:12px;font-style:normal;font-weight:400;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;filter:alpha(opacity=0);opacity:0;line-break:auto}.tooltip.in{filter:alpha(opacity=90);opacity:.9}.tooltip.top{padding:5px 0;margin-top:-3px}.tooltip.right{padding:0 5px;margin-left:3px}.tooltip.bottom{padding:5px 0;margin-top:3px}.tooltip.left{padding:0 5px;margin-left:-3px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;background-color:#000;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-left .tooltip-arrow{right:5px;bottom:0;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-right .tooltip-arrow{bottom:0;left:5px;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-left .tooltip-arrow{top:0;right:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-right .tooltip-arrow{top:0;left:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.popover{position:absolute;top:0;left:0;z-index:1060;display:none;max-width:276px;padding:1px;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;font-style:normal;font-weight:400;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.2);border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,.2);box-shadow:0 5px 10px rgba(0,0,0,.2);line-break:auto}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:5px 5px 0 0}.popover-content{padding:9px 14px}.popover>.arrow,.popover>.arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover>.arrow{border-width:11px}.popover>.arrow:after{content:"";border-width:10px}.popover.top>.arrow{bottom:-11px;left:50%;margin-left:-11px;border-top-color:#999;border-top-color:rgba(0,0,0,.25);border-bottom-width:0}.popover.top>.arrow:after{bottom:1px;margin-left:-10px;content:" ";border-top-color:#fff;border-bottom-width:0}.popover.right>.arrow{top:50%;left:-11px;margin-top:-11px;border-right-color:#999;border-right-color:rgba(0,0,0,.25);border-left-width:0}.popover.right>.arrow:after{bottom:-10px;left:1px;content:" ";border-right-color:#fff;border-left-width:0}.popover.bottom>.arrow{top:-11px;left:50%;margin-left:-11px;border-top-width:0;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,.25)}.popover.bottom>.arrow:after{top:1px;margin-left:-10px;content:" ";border-top-width:0;border-bottom-color:#fff}.popover.left>.arrow{top:50%;right:-11px;margin-top:-11px;border-right-width:0;border-left-color:#999;border-left-color:rgba(0,0,0,.25)}.popover.left>.arrow:after{right:1px;bottom:-10px;content:" ";border-right-width:0;border-left-color:#fff}.carousel{position:relative}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner>.item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>a>img,.carousel-inner>.item>img{line-height:1}@media all and (transform-3d),(-webkit-transform-3d){.carousel-inner>.item{-webkit-transition:-webkit-transform .6s ease-in-out;-o-transition:-o-transform .6s ease-in-out;transition:transform .6s ease-in-out;-webkit-backface-visibility:hidden;backface-visibility:hidden;-webkit-perspective:1000px;perspective:1000px}.carousel-inner>.item.active.right,.carousel-inner>.item.next{left:0;-webkit-transform:translate3d(100%,0,0);transform:translate3d(100%,0,0)}.carousel-inner>.item.active.left,.carousel-inner>.item.prev{left:0;-webkit-transform:translate3d(-100%,0,0);transform:translate3d(-100%,0,0)}.carousel-inner>.item.active,.carousel-inner>.item.next.left,.carousel-inner>.item.prev.right{left:0;-webkit-transform:translate3d(0,0,0);transform:translate3d(0,0,0)}}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:0;bottom:0;left:0;width:15%;font-size:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6);background-color:rgba(0,0,0,0);filter:alpha(opacity=50);opacity:.5}.carousel-control.left{background-image:-webkit-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.5)),to(rgba(0,0,0,.0001)));background-image:linear-gradient(to right,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1);background-repeat:repeat-x}.carousel-control.right{right:0;left:auto;background-image:-webkit-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.0001)),to(rgba(0,0,0,.5)));background-image:linear-gradient(to right,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1);background-repeat:repeat-x}.carousel-control:focus,.carousel-control:hover{color:#fff;text-decoration:none;filter:alpha(opacity=90);outline:0;opacity:.9}.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{position:absolute;top:50%;z-index:5;display:inline-block;margin-top:-10px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{left:50%;margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{right:50%;margin-right:-10px}.carousel-control .icon-next,.carousel-control .icon-prev{width:20px;height:20px;font-family:serif;line-height:1}.carousel-control .icon-prev:before{content:'\2039'}.carousel-control .icon-next:before{content:'\203a'}.carousel-indicators{position:absolute;bottom:10px;left:50%;z-index:15;width:60%;padding-left:0;margin-left:-30%;text-align:center;list-style:none}.carousel-indicators li{display:inline-block;width:10px;height:10px;margin:1px;text-indent:-999px;cursor:pointer;background-color:#000;background-color:rgba(0,0,0,0);border:1px solid #fff;border-radius:10px}.carousel-indicators .active{width:12px;height:12px;margin:0;background-color:#fff}.carousel-caption{position:absolute;right:15%;bottom:20px;left:15%;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6)}.carousel-caption .btn{text-shadow:none}@media screen and (min-width:768px){.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{width:30px;height:30px;margin-top:-10px;font-size:30px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{margin-right:-10px}.carousel-caption{right:20%;left:20%;padding-bottom:30px}.carousel-indicators{bottom:20px}}.btn-group-vertical>.btn-group:after,.btn-group-vertical>.btn-group:before,.btn-toolbar:after,.btn-toolbar:before,.clearfix:after,.clearfix:before,.container-fluid:after,.container-fluid:before,.container:after,.container:before,.dl-horizontal dd:after,.dl-horizontal dd:before,.form-horizontal .form-group:after,.form-horizontal .form-group:before,.modal-footer:after,.modal-footer:before,.modal-header:after,.modal-header:before,.nav:after,.nav:before,.navbar-collapse:after,.navbar-collapse:before,.navbar-header:after,.navbar-header:before,.navbar:after,.navbar:before,.pager:after,.pager:before,.panel-body:after,.panel-body:before,.row:after,.row:before{display:table;content:" "}.btn-group-vertical>.btn-group:after,.btn-toolbar:after,.clearfix:after,.container-fluid:after,.container:after,.dl-horizontal dd:after,.form-horizontal .form-group:after,.modal-footer:after,.modal-header:after,.nav:after,.navbar-collapse:after,.navbar-header:after,.navbar:after,.pager:after,.panel-body:after,.row:after{clear:both}.center-block{display:block;margin-right:auto;margin-left:auto}.pull-right{float:right!important}.pull-left{float:left!important}.hide{display:none!important}.show{display:block!important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none!important}.affix{position:fixed}@-ms-viewport{width:device-width}.visible-lg,.visible-md,.visible-sm,.visible-xs{display:none!important}.visible-lg-block,.visible-lg-inline,.visible-lg-inline-block,.visible-md-block,.visible-md-inline,.visible-md-inline-block,.visible-sm-block,.visible-sm-inline,.visible-sm-inline-block,.visible-xs-block,.visible-xs-inline,.visible-xs-inline-block{display:none!important}@media (max-width:767px){.visible-xs{display:block!important}table.visible-xs{display:table!important}tr.visible-xs{display:table-row!important}td.visible-xs,th.visible-xs{display:table-cell!important}}@media (max-width:767px){.visible-xs-block{display:block!important}}@media (max-width:767px){.visible-xs-inline{display:inline!important}}@media (max-width:767px){.visible-xs-inline-block{display:inline-block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block!important}table.visible-sm{display:table!important}tr.visible-sm{display:table-row!important}td.visible-sm,th.visible-sm{display:table-cell!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-block{display:block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline{display:inline!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline-block{display:inline-block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block!important}table.visible-md{display:table!important}tr.visible-md{display:table-row!important}td.visible-md,th.visible-md{display:table-cell!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-block{display:block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline{display:inline!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline-block{display:inline-block!important}}@media (min-width:1200px){.visible-lg{display:block!important}table.visible-lg{display:table!important}tr.visible-lg{display:table-row!important}td.visible-lg,th.visible-lg{display:table-cell!important}}@media (min-width:1200px){.visible-lg-block{display:block!important}}@media (min-width:1200px){.visible-lg-inline{display:inline!important}}@media (min-width:1200px){.visible-lg-inline-block{display:inline-block!important}}@media (max-width:767px){.hidden-xs{display:none!important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none!important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none!important}}@media (min-width:1200px){.hidden-lg{display:none!important}}.visible-print{display:none!important}@media print{.visible-print{display:block!important}table.visible-print{display:table!important}tr.visible-print{display:table-row!important}td.visible-print,th.visible-print{display:table-cell!important}}.visible-print-block{display:none!important}@media print{.visible-print-block{display:block!important}}.visible-print-inline{display:none!important}@media print{.visible-print-inline{display:inline!important}}.visible-print-inline-block{display:none!important}@media print{.visible-print-inline-block{display:inline-block!important}}@media print{.hidden-print{display:none!important}} -/*# sourceMappingURL=bootstrap.min.css.map */ \ No newline at end of file diff --git a/tutorials/training/source_en/_static/css/training.css b/tutorials/training/source_en/_static/css/training.css deleted file mode 100644 index 76124cbaae24754e52be36594c242d05c0afd1b7..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/_static/css/training.css +++ /dev/null @@ -1,113 +0,0 @@ -.doc-filter-btn { - border: 1px solid #BFBFBF; - margin-right: 0.5rem; - font-size: 0.7rem; - color: #444444; - background-color: white; - width: 8.2rem; - height: 1.7rem; - text-align: left; - position: relative; - -} -.doc-stage-detail button{ - margin-bottom: 0.5rem; -} -button.doc-btn{ - background-color: transparent; - outline: none; -} -.doc-btn-color{ - border: 1px solid #379BE6; - color: #379BE6; -} -.doc-btn-hover{ - border: 1px solid #379BE6; - color: #379BE6; -} -.doc-article-list{ - margin-top: 1.1rem; -} -.doc-article-item{ - padding:2.5rem 2.5rem; - margin-bottom: 1.3rem; - border:1px solid #e5e5e5; - border-radius:0.5rem; - width: 1140px; - box-shadow: 0 0 30px 2px rgba(199,196,196,0.50) -} -.doc-article-item a{ - display:block; - text-decoration:none!important; -} -.doc-article-head{ - color: #444444; - font-size:0.9rem; - font-weight:bold; - margin-bottom:0.8rem; - text-align:left; -} -.doc-article-desc{ - font-size:0.7rem; - color:#444444; -} -.doc-footer nav ul li a{ - font-size: 0.7rem; -} -.doc-footer nav ul li span{ - font-size: 0.7rem; -} -.doc-title{ - font-size: 1.6rem; - color: #444444; - font-weight: bold; - margin-bottom: 2.2rem; -} -.doc-filter{ - font-size: 0.7rem; - color: #666666; -} -.doc-delete{ - font-size: 0.7rem; - color: #379BE6; - float: right; -} -.doc-condition{ - margin-bottom: 2rem; -} -.doc-label-choice{ - font-size: 0.7rem; - margin-bottom: 0.53rem; -} -.doc-os{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-hardware{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-user{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-stage{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-label-content{ - margin-bottom: 2.2rem; -} -div.col-sm-10{ - padding-left: 2.5rem; -} -.container{ - margin-top: 1rem; - margin-left: -15px; -} -#all{ - border: none; - background-color: transparent; - outline: none; -} - diff --git a/tutorials/training/source_en/_static/img/choice.png b/tutorials/training/source_en/_static/img/choice.png deleted file mode 100644 index 5fb06488a24489616b937778c06af9e8d409046b..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/_static/img/choice.png and /dev/null differ diff --git a/tutorials/training/source_en/_static/js/training.js b/tutorials/training/source_en/_static/js/training.js deleted file mode 100644 index a766d26c2f48ff0efc93e47e958a6c58c137bdbb..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/_static/js/training.js +++ /dev/null @@ -1,251 +0,0 @@ -$(function() { - $("button.doc-btn").hover(function(){ - - //移入事件 - $(this).addClass('doc-btn-hover') - },function(){ - //移出事件 - $(this).removeClass('doc-btn-hover'); - }) - // 每页显示数 - var curNum = 8 - // 计算总数 - var all = $('.doc-article-list').children('div.doc-article-item').length; - - var list = [] - - - - $('button.doc-btn').click(function() { - $('.doc-article-item').removeClass('OUO'); - var id_val = $(this).attr('id') - if (id_val !== 'all') { - if ($('#all').hasClass('doc-btn-color')) { - $('#all').removeClass('doc-btn-color').find('img').remove(); - list.splice(list.indexOf('all_exist'), 1); - $('.doc-article-item').removeClass('all_exist'); - } - } else { - $('button.doc-btn-color').each(function() { - var tag = $(this).attr('id'); - $('.' + tag).removeClass(tag + '_exist'); - list.splice(list.indexOf(tag + '_exist'), 1); - }); - - $('button.doc-btn-color').removeClass('doc-btn-color').find('img').remove(); - } - if ($(this).hasClass('doc-btn-color')) { - $(this).removeClass('doc-btn-color').find('img').remove(); - $('.' + id_val).removeClass(id_val + '_exist'); - list.splice(list.indexOf(id_val + '_exist'), 1); - - } else { - if(id_val == 'all'){ - $(this).addClass('doc-btn-color'); - $('.' + id_val).addClass(id_val + '_exist'); - list.push(id_val + '_exist'); - }else{ - $(this).addClass('doc-btn-color').append(''); - $('.' + id_val).addClass(id_val + '_exist'); - list.push(id_val + '_exist'); - } - - } - - if(list.length > 0){ - var os_list = []; - var hardware_list = []; - var user_list = []; - var stage_list = []; - var all_list = []; - var hasWindows = false; - var hasCpu = false; - - $('.doc-article-item').addClass('hidden'); - var str = 'OUO'; - for(var i=0;i -1){ - hasWindows = true; - } - }else if (list[i].indexOf('hardware') == 0){ - hardware_list.push(list[i]); - if (list[i].indexOf('CPU') > -1) { - hasCpu = true; - } - }else if (list[i].indexOf('user') == 0){ - user_list.push(list[i]); - }else if (list[i].indexOf('stage') == 0){ - stage_list.push(list[i]); - }else{ - all_list.push(list[i]); - } - } - - if(!((os_list.length === 1 && hasWindows) && (hardware_list.length && !hasCpu))) { - $('.doc-article-item').each(function(){ - var os_count = 0; - var hardware_count = 0; - var user_count = 0; - var stage_count = 0; - var all_count = 0; - if(os_list.length > 0){ - for(var i=0;i -1){ - os_count += 1; - } - } - }else{ - os_count = 'empty'; - } - - if(hardware_list.length > 0){ - for(var i=0;i -1){ - hardware_count += 1; - } - } - }else{ - hardware_count = 'empty'; - } - - if(user_list.length > 0){ - for(var i=0;i -1){ - user_count += 1; - } - } - }else{ - user_count = 'empty'; - } - - if(stage_list.length > 0){ - for(var i=0;i -1){ - stage_count += 1; - } - } - }else{ - stage_count = 'empty'; - } - - if(all_list.length > 0){ - for(var i=0;i -1){ - all_count += 1; - } - } - }else{ - all_count = 'empty'; - } - - - if(((os_count >0 && os_count <= os_list.length) || os_count=='empty') && ((hardware_count >0 && hardware_count <= hardware_list.length) || hardware_count=='empty') && ((user_count >0 && user_count <= user_list.length) || user_count == 'empty') && ((stage_count >0 && stage_count <= stage_list.length) || stage_count == 'empty')){ - $(this).removeClass('hidden').addClass(str); - } - }); - } - - }else{ - $('.doc-article-item').addClass('hidden'); - } - - var hidden_num = $('.doc-article-list').children('.doc-article-item.hidden').length; - var all_article = all - hidden_num - // 计算总页数 - var len = Math.ceil((all - hidden_num) / curNum); - // 生成页码 - var pageList = '
  • ' + 'Total ' + all_article + ' Result(s)' + '
  • ' + '
  • '; - // 当前的索引值 - var iNum = 0; - - for (var i = 0; i < len; i++) { - pageList += '
  • ' + (i + 1) + '
  • ' - } - pageList += '
  • ' - // 首页加亮显示 - if (all_article > 0){ - $('#pageNav').html(pageList).find('li').eq(2).addClass('active'); - }else{ - $('#pageNav').html('
  • ' + 'Total ' + all_article + ' Result(s)' + '
  • '); - } - - // 标签页的点击事件 - $('#pageNav').find('li.doc-data').each(function() { - $(this).click(function() { - $(this).addClass('active').siblings('li').removeClass('active'); - iNum = $(this).index() - 2; - if(iNum > 0){ - $('li.pre').removeClass('disabled'); - }else{ - $('li.pre').addClass('disabled'); - } - if(iNum+1 == len){ - $('li.nex').addClass('disabled'); - } - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = (iNum * curNum); i < (iNum + 1) * curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show() - } - - }); - }); - if(iNum == 0){ - $('li.pre').addClass('disabled'); - } - - if(iNum+1 == len){ - $('li.nex').addClass('disabled'); - } - // 向前页点击时间 - $('li.pre').click(function(){ - if(iNum > 0){ - iNum -= 1; - if(iNum == 0){ - $(this).addClass('disabled'); - } - $('li.nex').removeClass('disabled'); - $('#pageNav').find('li.doc-data').eq(iNum).addClass('active').siblings('li').removeClass('active'); - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = (iNum * curNum); i < (iNum + 1) * curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show() - } - } - - }); - - // 向后页点击事件 - $('li.nex').click(function(){ - if(iNum+1 < len){ - iNum += 1; - if(iNum+1 == len){ - $(this).addClass('disabled'); - } - $('li.pre').removeClass('disabled'); - $('#pageNav').find('li.doc-data').eq(iNum).addClass('active').siblings('li').removeClass('active'); - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = (iNum * curNum); i < (iNum + 1) * curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show() - } - } - }); - - // 首页的显示 - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = 0; i < curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show(); - } - - if ($('button.doc-btn-color').length == 0) { - $('#all').trigger('click'); - } - }); - - - $('#all').trigger('click'); - - }); - - - diff --git a/tutorials/training/source_en/_static/logo_notebook.png b/tutorials/training/source_en/_static/logo_notebook.png deleted file mode 100644 index f28598315f19f4be76a73ddf5dc6bbdbe4db35fd..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/_static/logo_notebook.png and /dev/null differ diff --git a/tutorials/training/source_en/_static/logo_source.png b/tutorials/training/source_en/_static/logo_source.png deleted file mode 100644 index 9932d67ab50871edb0c95979c4e948c812c7cdea..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/_static/logo_source.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/apply_deep_probability_programming.md b/tutorials/training/source_en/advanced_use/apply_deep_probability_programming.md deleted file mode 100644 index ce36e0cc0f4de63d2cc8824c589f792d5813957b..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/apply_deep_probability_programming.md +++ /dev/null @@ -1,658 +0,0 @@ -# Deep Probabilistic Programming - -`Ascend` `GPU` `Whole Process` `Beginner` `Intermediate` `Expert` - - - -- [Deep Probabilistic Programming](#deep-probabilistic-programming) - - [Overview](#overview) - - [Using BNN](#using-bnn) - - [Processing the Dataset](#processing-the-dataset) - - [Defining the BNN](#defining-the-bnn) - - [Defining the Loss Function and Optimizer](#defining-the-loss-function-and-optimizer) - - [Training the Network](#training-the-network) - - [Using the VAE](#using-the-vae) - - [Defining the VAE](#defining-the-vae) - - [Defining the Loss Function and Optimizer](#defining-the-loss-function-and-optimizer-1) - - [Processing Data](#processing-data) - - [Training the Network](#training-the-network-1) - - [Generating New Samples or Rebuilding Input Samples](#generating-new-samples-or-rebuilding-input-samples) - - [One-click Conversion from DNN to BNN](#one-click-conversion-from-dnn-to-bnn) - - [Defining the DNN Model](#defining-the-dnn-model) - - [Defining the Loss Function and Optimizer](#defining-the-loss-function-and-optimizer-2) - - [Instantiating TransformToBNN](#instantiating-transformtobnn) - - [Function 1: Converting the Entire Model](#function-1-converting-the-entire-model) - - [Function 2: Converting a Layer of a Specified Type](#function-2-converting-a-layer-of-a-specified-type) - - [Using the Uncertainty Evaluation Toolbox](#using-the-uncertainty-evaluation-toolbox) - - - - - -## Overview - -A deep learning model has a strong fitting capability, and the Bayesian theory has a good explainability. MindSpore Deep Probabilistic Programming (MDP) combines deep learning and Bayesian learning. By setting a network weight to distribution and introducing latent space distribution, MDP can sample the distribution and forward propagation, which introduces uncertainty and enhances the robustness and explainability of a model. MDP contains general-purpose and professional probabilistic learning programming languages. It is applicable to professional users as well as beginners because the probabilistic programming supports the logic for developing deep learning models. In addition, it provides a toolbox for deep probabilistic learning to expand Bayesian application functions. - -The following describes applications of deep probabilistic programming in MindSpore. Before performing the practice, ensure that MindSpore 0.7.0-beta or a later version has been installed. The contents are as follows: - -1. Describe how to use the [bnn_layers module](https://gitee.com/mindspore/mindspore/tree/master/mindspore/nn/probability/bnn_layers) to implement the Bayesian neural network (BNN). -2. Describe how to use the [variational module](https://gitee.com/mindspore/mindspore/tree/master/mindspore/nn/probability/infer/variational) and [dpn module](https://gitee.com/mindspore/mindspore/tree/master/mindspore/nn/probability/dpn) to implement the Variational Autoencoder (VAE). -3. Describe how to use the [transforms module](https://gitee.com/mindspore/mindspore/tree/master/mindspore/nn/probability/transforms) to implement one-click conversion from deep neural network (DNN) to BNN. -4. Describe how to use the [toolbox module](https://gitee.com/mindspore/mindspore/blob/master/mindspore/nn/probability/toolbox/uncertainty_evaluation.py) to implement uncertainty evaluation. - -## Using BNN - -BNN is a basic model composed of probabilistic model and neural network. Its weight is not a definite value, but a distribution. The following example describes how to use the bnn_layers module in MDP to implement a BNN, and then use the BNN to implement a simple image classification function. The overall process is as follows: - -1. Process the MNIST dataset. -2. Define the Bayes LeNet. -3. Define the loss function and optimizer. -4. Load and train the dataset. - -> This example is for the GPU or Ascend 910 AI processor platform. You can download the complete sample code from . -> BNN only supports GRAPH mode now, please set `context.set_context(mode=context.GRAPH_MODE)` in your code. - -### Processing the Dataset - -The MNIST dataset is used in this example. The data processing is the same as that of [Implementing an Image Classification Application](https://www.mindspore.cn/tutorial/training/en/master/quick_start/quick_start.html) in the tutorial. - -### Defining the BNN - -Bayesian LeNet is used in this example. The method of building a BNN by using the bnn_layers module is the same as that of building a common neural network. Note that `bnn_layers` and common neural network layers can be combined with each other. - -```python -import mindspore.nn as nn -from mindspore.nn.probability import bnn_layers -import mindspore.ops as ops - -class BNNLeNet5(nn.Cell): - """ - bayesian Lenet network - - Args: - num_class (int): Num classes. Default: 10. - - Returns: - Tensor, output tensor - Examples: - >>> BNNLeNet5(num_class=10) - - """ - def __init__(self, num_class=10): - super(BNNLeNet5, self).__init__() - self.num_class = num_class - self.conv1 = bnn_layers.ConvReparam(1, 6, 5, stride=1, padding=0, has_bias=False, pad_mode="valid") - self.conv2 = bnn_layers.ConvReparam(6, 16, 5, stride=1, padding=0, has_bias=False, pad_mode="valid") - self.fc1 = bnn_layers.DenseReparam(16 * 5 * 5, 120) - self.fc2 = bnn_layers.DenseReparam(120, 84) - self.fc3 = bnn_layers.DenseReparam(84, self.num_class) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - self.reshape = ops.Reshape() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x -``` - -### Defining the Loss Function and Optimizer - -A loss function and an optimizer need to be defined. The loss function is a training objective of the deep learning, and is also referred to as an objective function. The loss function indicates the distance between a logit of a neural network and a label, and is scalar data. - -Common loss functions include mean square error, L2 loss, Hinge loss, and cross entropy. Cross entropy is usually used for image classification. - -The optimizer is used for neural network solution (training). Because of the large scale of neural network parameters, the stochastic gradient descent (SGD) algorithm and its improved algorithm are used in deep learning to solve the problem. MindSpore encapsulates common optimizers, such as `SGD`, `Adam`, and `Momemtum`. In this example, the `Adam` optimizer is used. Generally, two parameters need to be set: learning rate (`learning_rate`) and weight attenuation (`weight_decay`). - -An example of the code for defining the loss function and optimizer in MindSpore is as follows: - -```python -# loss function definition -criterion = SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - -# optimization definition -optimizer = AdamWeightDecay(params=network.trainable_params(), learning_rate=0.0001) -``` - -### Training the Network - -The training process of BNN is similar to that of DNN. The only difference is that `WithLossCell` is replaced with `WithBNNLossCell` applicable to BNN. In addition to the `backbone` and `loss_fn` parameters, the `dnn_factor` and `bnn_factor` parameters are added to `WithBNNLossCell`. `dnn_factor` is a coefficient of the overall network loss computed by a loss function, and `bnn_factor` is a coefficient of the KL divergence of each Bayesian layer. The two parameters are used to balance the overall network loss and the KL divergence of the Bayesian layer, preventing the overall network loss from being covered by a large KL divergence. - -```python -net_with_loss = bnn_layers.WithBNNLossCell(network, criterion, dnn_factor=60000, bnn_factor=0.000001) -train_bnn_network = TrainOneStepCell(net_with_loss, optimizer) -train_bnn_network.set_train() - -train_set = create_dataset('./mnist_data/train', 64, 1) -test_set = create_dataset('./mnist_data/test', 64, 1) - -epoch = 10 - -for i in range(epoch): - train_loss, train_acc = train_model(train_bnn_network, network, train_set) - - valid_acc = validate_model(network, test_set) - - print('Epoch: {} \tTraining Loss: {:.4f} \tTraining Accuracy: {:.4f} \tvalidation Accuracy: {:.4f}'. - format(i, train_loss, train_acc, valid_acc)) -``` - -The code examples of `train_model` and `validate_model` in MindSpore are as follows: - -```python -def train_model(train_net, net, dataset): - accs = [] - loss_sum = 0 - for _, data in enumerate(dataset.create_dict_iterator()): - train_x = data['image'] - label = data['label'] - loss = train_net(train_x, label) - output = net(train_x) - log_output = ops.LogSoftmax(axis=1)(output) - acc = np.mean(log_output.asnumpy().argmax(axis=1) == label.asnumpy()) - accs.append(acc) - loss_sum += loss.asnumpy() - - loss_sum = loss_sum / len(accs) - acc_mean = np.mean(accs) - return loss_sum, acc_mean - - -def validate_model(net, dataset): - accs = [] - for _, data in enumerate(dataset.create_dict_iterator()): - train_x = data['image'] - label = data['label'] - output = net(train_x) - log_output = ops.LogSoftmax(axis=1)(output) - acc = np.mean(log_output.asnumpy().argmax(axis=1) == label.asnumpy()) - accs.append(acc) - - acc_mean = np.mean(accs) - return acc_mean -``` - -## Using the VAE - -The following describes how to use the variational and dpn modules in MDP to implement VAE. VAE is a typical depth probabilistic model that applies variational inference to learn the representation of latent variables. The model can not only compress input data, but also generate new images of this type. The overall process is as follows: - -1. Define a VAE. -2. Define the loss function and optimizer. -3. Process data. -4. Train the network. -5. Generate new samples or rebuild input samples. - -> This example is for the GPU or Ascend 910 AI processor platform. You can download the complete sample code from . - -### Defining the VAE - -Using the dpn module to build a VAE is simple. You only need to customize the encoder and decoder (DNN model) and call the `VAE` API. - -```python -class Encoder(nn.Cell): - def __init__(self): - super(Encoder, self).__init__() - self.fc1 = nn.Dense(1024, 800) - self.fc2 = nn.Dense(800, 400) - self.relu = nn.ReLU() - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - return x - - -class Decoder(nn.Cell): - def __init__(self): - super(Decoder, self).__init__() - self.fc1 = nn.Dense(400, 1024) - self.sigmoid = nn.Sigmoid() - self.reshape = ops.Reshape() - - def construct(self, z): - z = self.fc1(z) - z = self.reshape(z, IMAGE_SHAPE) - z = self.sigmoid(z) - return z - - -encoder = Encoder() -decoder = Decoder() -vae = VAE(encoder, decoder, hidden_size=400, latent_size=20) -``` - -### Defining the Loss Function and Optimizer - -A loss function and an optimizer need to be defined. The loss function used in this example is `ELBO`, which is a loss function dedicated to variational inference. The optimizer used in this example is `Adam`. -An example of the code for defining the loss function and optimizer in MindSpore is as follows: - -```python -# loss function definition -net_loss = ELBO(latent_prior='Normal', output_prior='Normal') - -# optimization definition -optimizer = nn.Adam(params=vae.trainable_params(), learning_rate=0.001) - -net_with_loss = nn.WithLossCell(vae, net_loss) -``` - -### Processing Data - -The MNIST dataset is used in this example. The data processing is the same as that of [Implementing an Image Classification Application](https://www.mindspore.cn/tutorial/training/en/master/quick_start/quick_start.html) in the tutorial. - -### Training the Network - -Use the `SVI` API in the variational module to train a VAE network. - -```python -from mindspore.nn.probability.infer import SVI - -vi = SVI(net_with_loss=net_with_loss, optimizer=optimizer) -vae = vi.run(train_dataset=ds_train, epochs=10) -trained_loss = vi.get_train_loss() -``` - -You can use `vi.run` to obtain a trained network and use `vi.get_train_loss` to obtain the loss after training. - -### Generating New Samples or Rebuilding Input Samples - -With a trained VAE network, we can generate new samples or rebuild input samples. - -```python -IMAGE_SHAPE = (-1, 1, 32, 32) -generated_sample = vae.generate_sample(64, IMAGE_SHAPE) -for sample in ds_train.create_dict_iterator(): - sample_x = Tensor(sample['image'].asnumpy(), dtype=mstype.float32) - reconstructed_sample = vae.reconstruct_sample(sample_x) -``` - -## One-click Conversion from DNN to BNN - -For DNN researchers unfamiliar with the Bayesian model, MDP provides an advanced API `TransformToBNN` to convert the DNN model into the BNN model with one click. Currently, the API can be used in the LeNet, ResNet, MobileNet and VGG models. This example describes how to use the `TransformToBNN` API in the transforms module to convert DNNs into BNNs with one click. The overall process is as follows: - -1. Define a DNN model. -2. Define the loss function and optimizer. -3. Function 1: Convert the entire model. -4. Function 2: Convert a layer of a specified type. - -> This example is for the GPU or Ascend 910 AI processor platform. You can download the complete sample code from . - -### Defining the DNN Model - -LeNet is used as a DNN model in this example. - -```python -from mindspore.common.initializer import TruncatedNormal -import mindspore.nn as nn -import mindspore.ops as ops - -def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): - """weight initial for conv layer""" - weight = weight_variable() - return nn.Conv2d(in_channels, out_channels, - kernel_size=kernel_size, stride=stride, padding=padding, - weight_init=weight, has_bias=False, pad_mode="valid") - - -def fc_with_initialize(input_channels, out_channels): - """weight initial for fc layer""" - weight = weight_variable() - bias = weight_variable() - return nn.Dense(input_channels, out_channels, weight, bias) - - -def weight_variable(): - """weight initial""" - return TruncatedNormal(0.02) - - -class LeNet5(nn.Cell): - """ - Lenet network - - Args: - num_class (int): Num classes. Default: 10. - - Returns: - Tensor, output tensor - Examples: - >>> LeNet5(num_class=10) - - """ - def __init__(self, num_class=10): - super(LeNet5, self).__init__() - self.num_class = num_class - self.conv1 = conv(1, 6, 5) - self.conv2 = conv(6, 16, 5) - self.fc1 = fc_with_initialize(16 * 5 * 5, 120) - self.fc2 = fc_with_initialize(120, 84) - self.fc3 = fc_with_initialize(84, self.num_class) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - self.reshape = ops.Reshape() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x -``` - -The following shows the LeNet architecture. - -```text -LeNet5 - (conv1) Conv2dinput_channels=1, output_channels=6, kernel_size=(5, 5),stride=(1, 1), pad_mode=valid, padding=0, dilation=(1, 1), group=1, has_bias=False - (conv2) Conv2dinput_channels=6, output_channels=16, kernel_size=(5, 5),stride=(1, 1), pad_mode=valid, padding=0, dilation=(1, 1), group=1, has_bias=False - (fc1) Densein_channels=400, out_channels=120, weight=Parameter (name=fc1.weight), has_bias=True, bias=Parameter (name=fc1.bias) - (fc2) Densein_channels=120, out_channels=84, weight=Parameter (name=fc2.weight), has_bias=True, bias=Parameter (name=fc2.bias) - (fc3) Densein_channels=84, out_channels=10, weight=Parameter (name=fc3.weight), has_bias=True, bias=Parameter (name=fc3.bias) - (relu) ReLU - (max_pool2d) MaxPool2dkernel_size=2, stride=2, pad_mode=VALID - (flatten) Flatten -``` - -### Defining the Loss Function and Optimizer - -A loss function and an optimizer need to be defined. In this example, the cross entropy loss is used as the loss function, and `Adam` is used as the optimizer. - -```python -network = LeNet5() - -# loss function definition -criterion = SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - -# optimization definition -optimizer = AdamWeightDecay(params=network.trainable_params(), learning_rate=0.0001) - -net_with_loss = WithLossCell(network, criterion) -train_network = TrainOneStepCell(net_with_loss, optimizer) -``` - -### Instantiating TransformToBNN - -The `__init__` function of `TransformToBNN` is defined as follows: - -```python -class TransformToBNN: - def __init__(self, trainable_dnn, dnn_factor=1, bnn_factor=1): - net_with_loss = trainable_dnn.network - self.optimizer = trainable_dnn.optimizer - self.backbone = net_with_loss.backbone_network - self.loss_fn = getattr(net_with_loss, "_loss_fn") - self.dnn_factor = dnn_factor - self.bnn_factor = bnn_factor - self.bnn_loss_file = None -``` - -The `trainable_bnn` parameter is a trainable DNN model packaged by `TrainOneStepCell`, `dnn_factor` and `bnn_factor` are the coefficient of the overall network loss computed by the loss function and the coefficient of the KL divergence of each Bayesian layer, respectively. -The code for instantiating `TransformToBNN` in MindSpore is as follows: - -```python -from mindspore.nn.probability import transforms - -bnn_transformer = transforms.TransformToBNN(train_network, 60000, 0.000001) -``` - -### Function 1: Converting the Entire Model - -The `transform_to_bnn_model` method can convert the entire DNN model into a BNN model. The definition is as follows: - -```python - def transform_to_bnn_model(self, - get_dense_args=lambda dp: {"in_channels": dp.in_channels, "has_bias": dp.has_bias, - "out_channels": dp.out_channels, "activation": dp.activation}, - get_conv_args=lambda dp: {"in_channels": dp.in_channels, "out_channels": dp.out_channels, - "pad_mode": dp.pad_mode, "kernel_size": dp.kernel_size, - "stride": dp.stride, "has_bias": dp.has_bias, - "padding": dp.padding, "dilation": dp.dilation, - "group": dp.group}, - add_dense_args=None, - add_conv_args=None): - r""" - Transform the whole DNN model to BNN model, and wrap BNN model by TrainOneStepCell. - - Args: - get_dense_args (function): The arguments gotten from the DNN full connection layer. Default: lambda dp: - {"in_channels": dp.in_channels, "out_channels": dp.out_channels, "has_bias": dp.has_bias}. - get_conv_args (function): The arguments gotten from the DNN convolutional layer. Default: lambda dp: - {"in_channels": dp.in_channels, "out_channels": dp.out_channels, "pad_mode": dp.pad_mode, - "kernel_size": dp.kernel_size, "stride": dp.stride, "has_bias": dp.has_bias}. - add_dense_args (dict): The new arguments added to BNN full connection layer. Default: {}. - add_conv_args (dict): The new arguments added to BNN convolutional layer. Default: {}. - - Returns: - Cell, a trainable BNN model wrapped by TrainOneStepCell. - """ -``` - -The `get_dense_args` parameter specifies parameters to be obtained from the fully connected layer of a DNN model, and the `get_conv_args` parameter specifies parameters to be obtained from the convolutional layer of the DNN model, the `add_dense_args` and `add_conv_args` parameters specify new parameter values to be specified for the BNN layer. Note that parameters in `add_dense_args` cannot be the same as those in `get_dense_args`. The same rule applies to `add_conv_args` and `get_conv_args`. - -The code for converting the entire DNN model into a BNN model in MindSpore is as follows: - -```python -train_bnn_network = bnn_transformer.transform_to_bnn_model() -``` - -The structure of the converted model is as follows: - -```text -LeNet5 - (conv1) ConvReparam - in_channels=1, out_channels=6, kernel_size=(5, 5), stride=(1, 1), pad_mode=valid, padding=0, dilation=(1, 1), group=1, weight_mean=Parameter (name=conv1.weight_posterior.mean), weight_std=Parameter (name=conv1.weight_posterior.untransformed_std), has_bias=False - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (conv2) ConvReparam - in_channels=6, out_channels=16, kernel_size=(5, 5), stride=(1, 1), pad_mode=valid, padding=0, dilation=(1, 1), group=1, weight_mean=Parameter (name=conv2.weight_posterior.mean), weight_std=Parameter (name=conv2.weight_posterior.untransformed_std), has_bias=False - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (fc1) DenseReparam - in_channels=400, out_channels=120, weight_mean=Parameter (name=fc1.weight_posterior.mean), weight_std=Parameter (name=fc1.weight_posterior.untransformed_std), has_bias=True, bias_mean=Parameter (name=fc1.bias_posterior.mean), bias_std=Parameter (name=fc1.bias_posterior.untransformed_std) - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - (bias_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (bias_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (fc2) DenseReparam - in_channels=120, out_channels=84, weight_mean=Parameter (name=fc2.weight_posterior.mean), weight_std=Parameter (name=fc2.weight_posterior.untransformed_std), has_bias=True, bias_mean=Parameter (name=fc2.bias_posterior.mean), bias_std=Parameter (name=fc2.bias_posterior.untransformed_std) - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - (bias_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (bias_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (fc3) DenseReparam - in_channels=84, out_channels=10, weight_mean=Parameter (name=fc3.weight_posterior.mean), weight_std=Parameter (name=fc3.weight_posterior.untransformed_std), has_bias=True, bias_mean=Parameter (name=fc3.bias_posterior.mean), bias_std=Parameter (name=fc3.bias_posterior.untransformed_std) - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - (bias_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (bias_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (relu) ReLU - (max_pool2d) MaxPool2dkernel_size=2, stride=2, pad_mode=VALID - (flatten) Flatten -``` - -The convolutional layer and fully connected layer on the entire LeNet are converted into the corresponding Bayesian layers. - -### Function 2: Converting a Layer of a Specified Type - -The `transform_to_bnn_layer` method can convert a layer of a specified type (nn.Dense or nn.Conv2d) in the DNN model into a corresponding Bayesian layer. The definition is as follows: - -```python - def transform_to_bnn_layer(self, dnn_layer, bnn_layer, get_args=None, add_args=None): - r""" - Transform a specific type of layers in DNN model to corresponding BNN layer. - - Args: - dnn_layer_type (Cell): The type of DNN layer to be transformed to BNN layer. The optional values are - nn.Dense, nn.Conv2d. - bnn_layer_type (Cell): The type of BNN layer to be transformed to. The optional values are - DenseReparameterization, ConvReparameterization. - get_args (dict): The arguments gotten from the DNN layer. Default: None. - add_args (dict): The new arguments added to BNN layer. Default: None. - - Returns: - Cell, a trainable model wrapped by TrainOneStepCell, whose sprcific type of layer is transformed to the corresponding bayesian layer. - """ -``` - -The `dnn_layer` parameter specifies a type of a DNN layer to be converted into a BNN layer, and the `bnn_layer` parameter specifies a type of a BNN layer to be converted into a DNN layer, `get_args` and `add_args` specify parameters obtained from the DNN layer and the parameters to be re-assigned to the BNN layer, respectively. - -The code for converting a Dense layer in a DNN model into a corresponding Bayesian layer `DenseReparam` in MindSpore is as follows: - -```python -train_bnn_network = bnn_transformer.transform_to_bnn_layer(nn.Dense, bnn_layers.DenseReparam) -``` - -The network structure after the conversion is as follows: - -```text -LeNet5 - (conv1) Conv2dinput_channels=1, output_channels=6, kernel_size=(5, 5),stride=(1, 1), pad_mode=valid, padding=0, dilation=(1, 1), group=1, has_bias=False - (conv2) Conv2dinput_channels=6, output_channels=16, kernel_size=(5, 5),stride=(1, 1), pad_mode=valid, padding=0, dilation=(1, 1), group=1, has_bias=False - (fc1) DenseReparam - in_channels=400, out_channels=120, weight_mean=Parameter (name=fc1.weight_posterior.mean), weight_std=Parameter (name=fc1.weight_posterior.untransformed_std), has_bias=True, bias_mean=Parameter (name=fc1.bias_posterior.mean), bias_std=Parameter (name=fc1.bias_posterior.untransformed_std) - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - (bias_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (bias_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (fc2) DenseReparam - in_channels=120, out_channels=84, weight_mean=Parameter (name=fc2.weight_posterior.mean), weight_std=Parameter (name=fc2.weight_posterior.untransformed_std), has_bias=True, bias_mean=Parameter (name=fc2.bias_posterior.mean), bias_std=Parameter (name=fc2.bias_posterior.untransformed_std) - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - (bias_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (bias_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (fc3) DenseReparam - in_channels=84, out_channels=10, weight_mean=Parameter (name=fc3.weight_posterior.mean), weight_std=Parameter (name=fc3.weight_posterior.untransformed_std), has_bias=True, bias_mean=Parameter (name=fc3.bias_posterior.mean), bias_std=Parameter (name=fc3.bias_posterior.untransformed_std) - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - (bias_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (bias_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (relu) ReLU - (max_pool2d) MaxPool2dkernel_size=2, stride=2, pad_mode=VALID - (flatten) Flatten -``` - -As shown in the preceding information, the convolutional layer on the LeNet remains unchanged, and the fully connected layer becomes the corresponding Bayesian layer `DenseReparam`. - -## Using the Uncertainty Evaluation Toolbox - -One of advantages of BNN is that uncertainty can be obtained. MDP provides a toolbox for uncertainty evaluation at the upper layer. Users can easily use the toolbox to compute uncertainty. Uncertainty means an uncertain degree of a prediction result of a deep learning model. Currently, most deep learning algorithm can only provide prediction results but cannot determine the result reliability. There are two types of uncertainties: aleatoric uncertainty and epistemic uncertainty. - -- Aleatoric uncertainty: Internal noises in data, that is, unavoidable errors. This uncertainty cannot be reduced by adding sampling data. -- Epistemic uncertainty: An inaccurate evaluation of input data by a model due to reasons such as poor training or insufficient training data. This may be reduced by adding training data. - -The uncertainty evaluation toolbox is applicable to mainstream deep learning models, such as regression and classification. During inference, developers can use the toolbox to obtain any aleatoric uncertainty and epistemic uncertainty by training models and training datasets and specifying tasks and samples to be evaluated. Developers can understand models and datasets based on uncertainty information. -> This example is for the GPU or Ascend 910 AI processor platform. You can download the complete sample code from . - -The classification task is used as an example. The model is LeNet, the dataset is MNIST, and the data processing is the same as that of [Implementing an Image Classification Application](https://www.mindspore.cn/tutorial/training/en/master/quick_start/quick_start.html) in the tutorial. To evaluate the uncertainty of the test example, use the toolbox as follows: - -```python -from mindspore.nn.probability.toolbox.uncertainty_evaluation import UncertaintyEvaluation -from mindspore import load_checkpoint, load_param_into_net - -network = LeNet5() -param_dict = load_checkpoint('checkpoint_lenet.ckpt') -load_param_into_net(network, param_dict) -# get train and eval dataset -ds_train = create_dataset('workspace/mnist/train') -ds_eval = create_dataset('workspace/mnist/test') -evaluation = UncertaintyEvaluation(model=network, - train_dataset=ds_train, - task_type='classification', - num_classes=10, - epochs=1, - epi_uncer_model_path=None, - ale_uncer_model_path=None, - save_model=False) -for eval_data in ds_eval.create_dict_iterator(): - eval_data = Tensor(eval_data['image'].asnumpy(), mstype.float32) - epistemic_uncertainty = evaluation.eval_epistemic_uncertainty(eval_data) - aleatoric_uncertainty = evaluation.eval_aleatoric_uncertainty(eval_data) -``` diff --git a/tutorials/training/source_en/advanced_use/apply_gradient_accumulation.md b/tutorials/training/source_en/advanced_use/apply_gradient_accumulation.md deleted file mode 100644 index 1c678c26d63ffa441f550e434d17eed7d0731ccf..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/apply_gradient_accumulation.md +++ /dev/null @@ -1,621 +0,0 @@ -# Applying a Gradient Accumulation Algorithm - -`Linux` `GPU` `Model Optimization` `Intermediate` `Expert` - - - -- [Applying a Gradient Accumulation Algorithm](#applying-a-gradient-accumulation-algorithm) - - [Overview](#overview) - - [Standalone Mode](#standalone-mode) - - [Importing Library Files](#importing-library-files) - - [Loading the Dataset](#loading-the-dataset) - - [Defining the Network](#defining-the-network) - - [Defining the Training Process](#defining-the-training-process) - - [Defining the Training Model](#defining-the-training-model) - - [Training and Saving the Model](#training-and-saving-the-model) - - [Experiment Result](#experiment-result) - - [Parallel Mode](#parallel-mode) - - [Defining the Parallel Training Process](#defining-the-parallel-training-process) - - [Defining the Parallel Training Model](#defining-the-parallel-training-model) - - [Training the Model](#training-the-model) - - - - - -## Overview - -This tutorial describes the gradient accumulation training methods to solve the problem that some large-scale networks cannot train large batch_size due to insufficient memory. - -In a traditional training method, after a loss and a gradient are computed each time, a parameter is directly updated by using the obtained gradient. - -Compared to the traditional training method, mini-batch is introduced to the gradient accumulation. The loss and gradient are computed for each mini-batch data, but the model parameters are not updated immediately. Instead, the obtained gradients are accumulated first, and then after a specified number (N) of mini-batches, the accumulated gradient is used to update the network parameters. Before the next training, the accumulated gradients are cleared and re-accumulated. The ultimate objective is to achieve the same effect as training with N x Mini-batch data. - -This tutorial describes how to implement gradient accumulation training in standalone mode and parallel mode, respectively. - -## Standalone Mode - -In standalone mode, the training process consists of three parts: forward and backward training, parameter update, and accumulated gradient clearance. MNIST is used as an example dataset. To customize a simple model to implement gradient accumulation, perform the following steps: - -> Download the main training sample code: - -### Importing Library Files - -The following are the required public modules and MindSpore modules and library files. - -```python -import argparse -import os -from collections.abc import Iterable - -import mindspore.nn as nn -from mindspore import ParameterTuple -from mindspore import context, DatasetHelper, save_checkpoint -from mindspore.nn import Cell -import mindspore.ops as ops -from model_zoo.official.cv.lenet.src.dataset import create_dataset -from model_zoo.official.cv.lenet.src.lenet import LeNet5 -``` - -### Loading the Dataset - -Use the `MnistDataset` API provided by `dataset` of MindSpore to load the MNIST dataset. The code is imported from [dataset.py]() in the `lenet` directory of `model_zoo`. - -### Defining the Network - -LeNet is used as an example network. You can also use other networks, such as ResNet-50 and BERT. The code is imported from [lenet.py]() in the `lenet` directory of `model_zoo`. - -### Defining the Training Process - -The training process consists of three parts: forward and backward training, parameter update, and accumulated gradient clearance. - -- `TrainForwardBackward` calculates the loss and gradient, and uses grad_sum to implement gradient accumulation. -- `TrainOptim` updates parameters. -- `TrainClear` clears the gradient accumulation variable grad_sum. - -```python -_sum_op = ops.MultitypeFuncGraph("grad_sum_op") -_clear_op = ops.MultitypeFuncGraph("clear_op") - - -@_sum_op.register("Tensor", "Tensor") -def _cumulative_grad(grad_sum, grad): - """Apply grad sum to cumulative gradient.""" - add = ops.AssignAdd() - return add(grad_sum, grad) - - -@_clear_op.register("Tensor", "Tensor") -def _clear_grad_sum(grad_sum, zero): - """Apply zero to clear grad_sum.""" - success = True - success = ops.depend(success, ops.assign(grad_sum, zero)) - return success - - -class TrainForwardBackward(Cell): - def __init__(self, network, optimizer, grad_sum, sens=1.0): - super(TrainForwardBackward, self).__init__(auto_prefix=False) - self.network = network - self.network.set_grad() - self.network.add_flags(defer_inline=True) - self.weights = ParameterTuple(network.trainable_params()) - self.optimizer = optimizer - self.grad_sum = grad_sum - self.grad = ops.GradOperation(get_by_list=True, sens_param=True) - self.sens = sens - self.hyper_map = ops.HyperMap() - - def construct(self, *inputs): - weights = self.weights - loss = self.network(*inputs) - sens = ops.Fill()(ops.DType()(loss), ops.Shape()(loss), self.sens) - grads = self.grad(self.network, weights)(*inputs, sens) - return ops.depend(loss, self.hyper_map(ops.partial(_sum_op), self.grad_sum, grads)) - - -class TrainOptim(Cell): - def __init__(self, optimizer, grad_sum): - super(TrainOptim, self).__init__(auto_prefix=False) - self.optimizer = optimizer - self.grad_sum = grad_sum - - def construct(self): - return self.optimizer(self.grad_sum) - - -class TrainClear(Cell): - def __init__(self, grad_sum, zeros): - super(TrainClear, self).__init__(auto_prefix=False) - self.grad_sum = grad_sum - self.zeros = zeros - self.hyper_map = ops.HyperMap() - - def construct(self): - success = self.hyper_map(ops.partial(_clear_op), self.grad_sum, self.zeros) - return success -``` - -### Defining the Training Model - -Each mini-batch computes the loss and gradient through forward and backward training, and uses mini_steps to control the accumulated times before each parameter update. After the number of accumulation times is reached, the parameter is updated -and the accumulated gradient variable is cleared. - -```python -class GradientAccumulation: - def __init__(self, network, loss_fn, optimizer): - self._network = network - self._loss_fn = loss_fn - self._optimizer = optimizer - - params = self._optimizer.parameters - self._grad_sum = params.clone(prefix="grad_sum", init='zeros') - self._zeros = params.clone(prefix="zeros", init='zeros') - self._train_forward_backward = self._build_train_forward_backward_network() - self._train_optim = self._build_train_optim() - self._train_clear = self._build_train_clear() - - @staticmethod - def _transform_callbacks(callbacks): - """Transform callback to a list.""" - if callbacks is None: - return [] - - if isinstance(callbacks, Iterable): - return list(callbacks) - - return [callbacks] - - def _build_train_forward_backward_network(self): - """Build forward and backward network""" - network = self._network - network = nn.WithLossCell(network, self._loss_fn) - loss_scale = 1.0 - network = TrainForwardBackward(network, self._optimizer, self._grad_sum, loss_scale).set_train() - return network - - def _build_train_optim(self): - """Build optimizer network""" - network = TrainOptim(self._optimizer, self._grad_sum).set_train() - return network - - def _build_train_clear(self): - """Build clear network""" - network = TrainClear(self._grad_sum, self._zeros).set_train() - return network - - def train_process(self, epoch, train_dataset, mini_steps=None): - """ - Training process. The data would be passed to network directly. - """ - dataset_helper = DatasetHelper(train_dataset, dataset_sink_mode=False, epoch_num=epoch) - - for i in range(epoch): - step = 0 - for k, next_element in enumerate(dataset_helper): - loss = self._train_forward_backward(*next_element) - if (k + 1) % mini_steps == 0: - step += 1 - print("epoch:", i + 1, "step:", step, "loss is ", loss) - self._train_optim() - self._train_clear() - - train_dataset.reset() - - save_checkpoint(self._train_forward_backward, "gradient_accumulation.ckpt", ) -``` - -### Training and Saving the Model - -Call the network, optimizer, and loss function, and then customize the `train_process` API of `GradientAccumulation` to train the model. - -```python -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='MindSpore Grad Cumulative Example') - parser.add_argument('--device_target', type=str, default="GPU", choices=['GPU'], - help='device where the code will be implemented (default: GPU)') - parser.add_argument('--data_path', type=str, default="./Data", - help='path where the dataset is saved') - args = parser.parse_args() - - context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target) - ds_train = create_dataset(os.path.join(args.data_path, "train"), 32) - - net = LeNet5(10) - net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - net_opt = nn.Momentum(net.trainable_params(), 0.01, 0.9) - model = GradientAccumulation(net, net_loss, net_opt) - - print("============== Starting Training ==============") - model.train_process(10, ds_train, mini_steps=4) -``` - -## Experiment Result - -After 10 epochs, the accuracy on the test set is about 96.31%. - -**Start training.** - -1. Run the training code and view the running result. - - ```shell - python train.py --data_path=./MNIST_Data - ``` - - The output is as follows. You can see that the loss value decreases with the training. - - ```shell - epoch: 1 step: 27 loss is 0.3660637 - epoch: 1 step: 28 loss is 0.25238192 - ... - epoch: 3 step: 2 loss is 0.12296932 - epoch: 3 step: 3 loss is 0.15799297 - ... - epoch: 10 step: 448 loss is 0.06443884 - epoch: 10 step: 449 loss is 0.0067842817 - ``` - -2. Check the saved checkpoint files. - - The checkpoint file `gradient_accumulation.ckpt`, that is, the model file, is saved during training. - -**Validate the model.** - -Use the saved checkpoint file to load the validation dataset through [eval.py]() in the lenet directory of model_zoo. - -```shell -python eval.py --data_path=./MNIST_Data --ckpt_path=./gradient_accumulation.ckpt --device_target=GPU -``` - -The output is as follows. The accuracy of the validation dataset is about 96.31%, which is the same as the result when the value of batch_size is 32. - -```shell -============== Starting Testing ============== -============== {'Accuracy': 0.9631730769230769} ============== -``` - -## Parallel Mode - -If gradient accumulation is used in `SEMI_AUTO_PARALLEL` and `AUTO_PARALLEL` modes, the accumulation steps and update steps are delivered as two graphs and executed alternately. In an accumulation step graph, only the forward and backward operations and gradient accumulation are performed. In an update step graph, the forward and backward operations and parameter updates are performed. The example in [Parallel Distributed Training](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html) is used to describe the procedure. - -> Download the main training sample code: - -### Defining the Parallel Training Process - -Generally, after the forward network is defined, [`TrainOneStepCell`](https://www.mindspore.cn/doc/api_python/en/master/mindspore/nn/mindspore.nn.TrainOneStepCell.html?highlight=trainonestepcell) is used to associate the forward and backward networks with the optimizer. However, two different situations, accumulation and update, exist during gradient accumulation. We need to make some modifications based on the original class definition. The sample code is as follows: - -```python -import numpy as np -import mindspore.common.dtype as mstype -from mindspore import ops, context, Tensor, Parameter -from mindspore.nn import TrainOneStepCell -from mindspore.common.initializer import initializer - -zeroslike = ops.ZerosLike() -reset_accu_grads = ops.MultitypeFuncGraph("reset_accu_grads") - -@reset_accu_grads.register("Tensor") -def _reset_accu_grads(accu_grad): - succ = True - return ops.depend(succ, ops.assign(accu_grad, zeroslike(accu_grad))) - -cast = ops.Cast() -update_accu_grads = ops.MultitypeFuncGraph("update_accu_grads") - - -@update_accu_grads.register("Tensor", "Tensor") -def _update_accu_grads(accu_grad, grad): - succ = True - return ops.depend(succ, ops.assign_add(accu_grad, cast(grad, mstype.float32))) - -class TrainAccuStepsCell(TrainOneStepCell): - def __init__(self, network, optimizer, sens=1.0): - super(TrainAccuStepsCell, self).__init__(network, optimizer, sens) - self.accumulation = False - self.accumulation_steps = context.get_auto_parallel_context("grad_accumulation_step") - self.accu_grads = self.weights.clone(prefix="accu_grads", init='zeros') - self.hyper_map = ops.HyperMap() - - def construct(self, *inputs): - """Defines the computation performed.""" - weights = self.weights - loss = self.network(*inputs) - sens = ops.Fill()(ops.DType()(loss), ops.Shape()(loss), self.sens) - grads = self.grad(self.network, weights)(*inputs, sens) - if self.accumulation and self.accumulation_steps > 1: - accu_succ = self.hyper_map(update_accu_grads, self.accu_grads, grads) - loss = ops.depend(loss, accu_succ) - if self.accumulation: - succ = False - else: - grads = self.grad_reducer(grads) - accu_grads = ops.depend(self.accu_grads, grads) - accu_succ = self.hyper_map(reset_accu_grads, accu_grads) - loss = ops.depend(loss, accu_succ) - succ = self.optimizer(grads) - return ops.depend(loss, succ) -``` - -On the basis of `TrainOneStepCell`, definitions of the accumulation flag `accumulation` and the accumulation gradient parameter `accu_grads` are added to distinguish the training process and save the accumulation gradient value, respectively. In an accumulation step graph, if `accumulation` is set to True, only the forward and backward operations are performed and gradients are accumulated to the parameter `accu_grads`. In an update step graph, if `accumulation` is set to False, the forward and backward operations and parameter updates are performed. - -> The gradient accumulation in parallel mode needs to be implemented based on the internal graph optimization of the framework. Therefore, `accumulation` and `accu_grads` defined on the network are specific characters and cannot be modified. - -In the dynamic loss scale scenario, in addition to the gradient, the overflow flag status also needs to be accumulated. The code can be modified based on [`TrainOneStepWithLossScaleCell`](https://www.mindspore.cn/doc/api_python/en/master/mindspore/nn/mindspore.nn.TrainOneStepWithLossScaleCell.html#mindspore.nn.TrainOneStepWithLossScaleCell). The implementation code is as follows: - -```python -import numpy as np -import mindspore.common.dtype as mstype -from mindspore import ops, context, Tensor, Parameter -from mindspore.nn import TrainOneStepWithLossScaleCell -from mindspore.nn.wrap.loss_scale import _grad_scale -from mindspore.common.initializer import initializer - -zeroslike = ops.ZerosLike() -reset_accu_grads = ops.MultitypeFuncGraph("reset_accu_grads") - -@reset_accu_grads.register("Tensor") -def _reset_accu_grads(accu_grad): - succ = True - return ops.depend(succ, ops.assign(accu_grad, zeroslike(accu_grad))) - -cast = ops.Cast() -update_accu_grads = ops.MultitypeFuncGraph("update_accu_grads") - - -@update_accu_grads.register("Tensor", "Tensor") -def _update_accu_grads(accu_grad, grad): - succ = True - return ops.depend(succ, ops.assign_add(accu_grad, cast(grad, mstype.float32))) - - -class TrainAccuStepsWithLossScaleCell(TrainOneStepWithLossScaleCell): - def __init__(self, network, optimizer, scale_sense): - super(TrainAccuStepsWithLossScaleCell, self).__init__(network, optimizer, scale_sense) - self.accumulation = False - self.accumulation_steps = context.get_auto_parallel_context("grad_accumulation_step") - self.one = Tensor(np.array([1]).astype(np.int32)) - self.zero = Tensor(np.array([0]).astype(np.int32)) - self.accu_grads = self.weights.clone(prefix="accu_grads", init='zeros') - self.accu_overflow = Parameter(initializer(0, [1], mstype.int32)) - self.accu_loss = Parameter(initializer(0, [1], mstype.float32)) - self.cast = ops.Cast() - self.logical_or = ops.LogicalOr() - self.not_equal = ops.NotEqual() - self.select = ops.Select() - self.reshape = ops.Reshape() - - def construct(self, *inputs): - """Defines the computation performed.""" - weights = self.weights - loss = self.network(*inputs) - scaling_sens = self.scale_sense - status, scaling_sens = self.start_overflow_check(loss, scaling_sens) - scaling_sens_filled = ops.ones_like(loss) * ops.cast(scaling_sens, ops.dtype(loss)) - grads = self.grad(self.network, weights)(*inputs, scaling_sens_filled) - # accumulate gradients - if self.accumulation and self.accumulation_steps > 1: - accu_succ = self.hyper_map(update_accu_grads, self.accu_grads, grads) - loss = ops.depend(loss, accu_succ) - overflow = self.get_overflow_status(status, grads) - overflow = self.logical_or(self.not_equal(self.accu_overflow, self.zero), overflow) - accu_overflow = self.select(overflow, self.one, self.zero) - - if self.accumulation: - succ = False - self.accu_overflow = accu_overflow - else: - self.accu_overflow = self.zero - # apply grad reducer on grads - grads = self.grad_reducer(grads) - grads = self.hyper_map(ops.partial(_grad_scale, scaling_sens), grads) - accu_overflow = self.allreduce(accu_overflow) - overflow = self.less_equal(self.base, accu_overflow) - accu_grads = ops.depend(self.accu_grads, grads) - accu_succ = self.hyper_map(reset_accu_grads, accu_grads) - overflow = ops.depend(overflow, accu_succ) - overflow = self.reshape(overflow, (())) - overflow = self.process_loss_scale(overflow) - if overflow: - succ = False - else: - succ = self.optimizer(grads) - - ret = (loss, overflow, scaling_sens) - return ops.depend(ret, succ) -``` - -`accu_overflow` is a parameter used to store the accumulation overflow flag status. - -### Defining the Parallel Training Model - -The network encapsulated by `cell_wrapper` contains the forward and backward operations and optimizer implementation. You need to connect the dataset to the network and execute the two graphs alternately. The preceding functions are implemented based on the [`Model`](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.html?highlight=model#mindspore.Model) API in the framework. - -```python -import math -from mindspore.train.callback import RunContext -from mindspore import context -from mindspore.context import ParallelMode -from mindspore import Model, connect_network_with_dataset -from mindspore.common.dtype import pytype_to_dtype -from mindspore._c_expression import init_exec_dataset -from mindspore.train.train_thor.dataset_helper import DatasetHelper - - -def _convert_type(types): - """ - Convert from numpy type to tensor type. - - Args: - types (list): Numpy type list of element in dataset. - - Returns: - list, list of element in dataset. - """ - ms_types = [] - for np_type in types: - ms_type = pytype_to_dtype(np_type) - ms_types.append(ms_type) - return ms_types - - -def _get_types_and_shapes(dataset): - """Get dataset types and shapes.""" - dataset_types = _convert_type(dataset.output_types()) - dataset_shapes = dataset.output_shapes() - return dataset_types, dataset_shapes - - -def _exec_datagraph(exec_dataset, dataset_size, phase='dataset'): - """Initialize and execute the dataset graph.""" - batch_size = exec_dataset.get_batch_size() - input_indexs = exec_dataset.input_indexs - - # transform data format - dataset_types, dataset_shapes = _get_types_and_shapes(exec_dataset) - init_exec_dataset(exec_dataset.__transfer_dataset__.queue_name, - dataset_size, - batch_size, - dataset_types, - dataset_shapes, - input_indexs, - phase=phase, - need_run=False) - - -class Model_ACCU(Model): - def __init__(self, network, loss_fn=None, optimizer=None, metrics=None, eval_network=None, - eval_indexes=None, amp_level="O0", **kwargs): - super(Model_ACCU, self).__init__(network, loss_fn, optimizer, metrics, eval_network, - eval_indexes, amp_level, **kwargs) - self._frequency = context.get_auto_parallel_context("grad_accumulation_step") - self._train_network = self._build_train_network() - - def _exec_preprocess(self, network, is_train, phase, dataset, dataset_sink_mode, sink_size=-1, - epoch_num=1, iter_first_order=1): - """Initializes dataset.""" - if dataset_sink_mode and not is_train: - dataset.__loop_size__ = 1 - dataset_helper = DatasetHelper(dataset, dataset_sink_mode, sink_size, epoch_num, iter_first_order) - - if dataset_sink_mode and context.get_context("device_target") != "GPU": - network = connect_network_with_dataset(network, dataset_helper) - network.set_train(is_train) - network.phase = phase - - if self._parallel_mode in (ParallelMode.SEMI_AUTO_PARALLEL, ParallelMode.AUTO_PARALLEL): - network.set_auto_parallel() - - return dataset_helper, network - - def _train_dataset_sink_process(self, epoch, train_dataset, list_callback=None, cb_params=None, sink_size=-1): - """ - Training process. The data would be passed to network through dataset channel. - - Args: - epoch (int): Total number of iterations on the data. - train_dataset (Dataset): A training dataset iterator. If there is no - loss_fn, a tuple with multiple data (data1, data2, data3, ...) should be - returned and passed to the network. Otherwise, a tuple (data, label) should - be returned. The data and label would be passed to the network and loss - function respectively. - list_callback (Callback): Executor of callback list. Default: None. - cb_params (_InternalCallbackParam): Callback parameters. Default: None. - sink_size (int): Control the amount of data in each sink. Default: -1. - """ - if sink_size == -1: - epoch_num = epoch - else: - epoch_num = math.ceil(epoch * sink_size / train_dataset.get_dataset_size()) - - iter_first_order = 1 - iter_second_order = self._frequency - 1 - train_dataset.__loop_size__ = iter_second_order - dataset_helper, train_network = self._exec_preprocess(self._train_network, - is_train=True, - phase='train', - dataset=train_dataset, - dataset_sink_mode=True, - sink_size=sink_size, - epoch_num=epoch_num, - iter_first_order=iter_first_order) - - self._train_network = train_network - cb_params.train_network = self._train_network - cb_params.cur_step_num = 0 - - run_context = RunContext(cb_params) - list_callback.begin(run_context) - - # used to stop training for early stop, such as stopAtTIme or stopATStep - should_stop = False - switch_branch_one = True - index_first_order = 0 - train_network_init_flag = True - has_do_dataset_init = False - - for i in range(epoch): - cb_params.cur_epoch_num = i + 1 - list_callback.epoch_begin(run_context) - # for data sink dataset_helper only iter once, other wise iter epoch_size times. - for inputs in dataset_helper: - list_callback.step_begin(run_context) - if switch_branch_one: - cb_params.cur_step_num += iter_second_order - if train_network_init_flag: - self._train_network.add_flags_recursive(accumulation=True) - self._train_network.phase = 'train0' - else: - cb_params.cur_step_num += iter_first_order - if train_network_init_flag: - self._train_network.add_flags_recursive(accumulation=False) - train_network_init_flag = False - self._train_network.phase = 'train1' - if not has_do_dataset_init: - _exec_datagraph(train_dataset, iter_first_order, phase='train1_dataset') - has_do_dataset_init = True - switch_branch_one = not switch_branch_one - outputs = self._train_network(*inputs) - cb_params.net_outputs = outputs - list_callback.step_end(run_context) - - list_callback.epoch_end(run_context) - should_stop = should_stop or run_context.get_stop_requested() - if should_stop: - break - dataset_helper.stop_send() - - list_callback.end(run_context) -``` - -In the sample code, the subclass `Model_ACCU` rewrites the `_exec_preprocess` dataset encapsulation and the `_train_dataset_sink_process` training offload method of the base class, and delivers the data subgraph and training graph of the accumulation step graph `(accumulation=True)` and the update step graph `(accumulation=False)`, respectively. The training process is alternately executed. The number of offloaded steps of the accumulation step graph is the value of `grad_accumulation_step` minus 1 and that of the update step graph is 1. - -### Training the Model - -After the preceding definition is complete, you can use the training API to complete model training. Configure the `grad_accumulation_step` parameter in `context.set_auto_parallel_context` to enable gradient accumulation. Then, use the modified `cell_wrapper` to encapsulate the network structure and transfer it to the `Model_ACCU` to initialize the model. - -```python -context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL, gradients_mean=True, grad_accumulation_step=6) -loss_cb = LossMonitor() -data_path = os.getenv('DATA_PATH') -batch_size = 32 -dataset = create_dataset(data_path, batch_size=batch_size) -num_classes = 10 -net = resnet50(batch_size, num_classes) -loss = SoftmaxCrossEntropyExpand(sparse=True) -opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, 0.9) -net_with_loss = nn.WithLossCell(net, loss) -net_with_loss = VirtualDatasetCell(net_with_loss) -wrap_net = TrainAccuStepsCell(net_with_loss, opt) -model = Model_ACCU(wrap_net) -model.train(epoch_size, dataset, callbacks=[loss_cb], dataset_sink_mode=True) -``` - -The following information can be found in the logs: - -```text -epoch: 1 step: 234, loss is 1.7588712 -epoch: 2 step: 234, loss is 1.7275971 -epoch: 3 step: 234, loss is 1.5423206 -epoch: 4 step: 234, loss is 1.2762429 -epoch: 5 step: 234, loss is 1.0915408 -``` diff --git a/tutorials/training/source_en/advanced_use/apply_host_device_training.md b/tutorials/training/source_en/advanced_use/apply_host_device_training.md deleted file mode 100644 index 7f80cf8acbf12a3e7d11779dd1db66769068bfeb..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/apply_host_device_training.md +++ /dev/null @@ -1,116 +0,0 @@ -# Applying Host&Device Hybrid Training - -`Linux` `Ascend` `CPU` `Model Training` `Intermediate` `Expert` - - - -- [Applying Host&Device Hybrid Training](#applying-hostdevice-hybrid-training) - - [Overview](#overview) - - [Preliminaries](#preliminaries) - - [Configuring for Hybrid Training](#configuring-for-hybrid-training) - - [Training the Model](#training-the-model) - - [Reference](#reference) - - - - - -## Overview - -In deep learning, one usually has to deal with the huge model problem, in which the total size of parameters in the model is beyond the device memory capacity. To efficiently train a huge model, one solution is to employ homogeneous accelerators (*e.g.*, Ascend 910 AI Accelerator and GPU) for distributed training. When the size of a model is hundreds of GBs or several TBs, -the number of required accelerators is too overwhelming for people to access, resulting in this solution inapplicable. One alternative is Host+Device hybrid training. This solution simultaneously leveraging the huge memory in hosts and fast computation in accelerators, is a promisingly -efficient method for addressing huge model problem. - -In MindSpore, users can easily implement hybrid training by configuring trainable parameters and necessary operators to run on hosts, and other operators to run on accelerators. -This tutorial introduces how to train [Wide&Deep](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/recommend/wide_and_deep) in the Host+Ascend 910 AI Accelerator mode. - -## Preliminaries - -1. Prepare the model. The Wide&Deep code can be found at: , in which `train_and_eval_auto_parallel.py` is the main function for training, `src/` directory contains the model definition, data processing and configuration files, `script/` directory contains the launch scripts in different modes. - -2. Prepare the dataset. Please refer the link in [1] to download the dataset, and use the script `src/preprocess_data.py` to transform dataset into MindRecord format. - -3. Configure the device information. When performing training in the bare-metal environment, the network information file needs to be configured. This example only employs one accelerator, thus `rank_table_1p_0.json` containing #0 accelerator is configured as follows (you need to check the server's IP first): - - ```json - { - "version": "1.0", - "server_count": "1", - "server_list": [ - { - "server_id":"10.155.170.16", - "device": [ - {"device_id":"0","device_ip":"192.1.113.246","rank_id":"0"}], - "host_nic_ip":"reserve" - } - ], - "status": "completed" - } - ``` - -## Configuring for Hybrid Training - -1. Configure the flag of hybrid training. In the function `argparse_init` of file `src/config.py`, change the default value of `host_device_mix` to be `1`; change `self.host_device_mix` in function `__init__` of `class WideDeepConfig` to be `1`: - - ```python - self.host_device_mix = 1 - ``` - -2. Check the deployment of necessary operators and optimizers. In class `WideDeepModel` of file `src/wide_and_deep.py`, check the execution of `EmbeddingLookup` is at host: - - ```python - self.deep_embeddinglookup = nn.EmbeddingLookup() - self.wide_embeddinglookup = nn.EmbeddingLookup() - ``` - - In `class TrainStepWrap(nn.Cell)` of file `src/wide_and_deep.py`, check two optimizers are also executed at host: - - ```python - self.optimizer_w.target = "CPU" - self.optimizer_d.target = "CPU" - ``` - -## Training the Model - -Use the script `script/run_auto_parallel_train.sh`. Run the command `bash run_auto_parallel_train.sh 1 1 DATASET RANK_TABLE_FILE`, -where the first `1` is the number of accelerators, the second `1` is the number of epochs, `DATASET` is the path of dataset, -and `RANK_TABLE_FILE` is the path of the above `rank_table_1p_0.json` file. - -The running log is in the directory of `device_0`, where `loss.log` contains every loss value of every step in the epoch. Here is an example: - -```text -epoch: 1 step: 1, wide_loss is 0.6873926, deep_loss is 0.8878349 -epoch: 1 step: 2, wide_loss is 0.6442529, deep_loss is 0.8342661 -epoch: 1 step: 3, wide_loss is 0.6227323, deep_loss is 0.80273706 -epoch: 1 step: 4, wide_loss is 0.6107221, deep_loss is 0.7813441 -epoch: 1 step: 5, wide_loss is 0.5937832, deep_loss is 0.75526017 -epoch: 1 step: 6, wide_loss is 0.5875453, deep_loss is 0.74038756 -epoch: 1 step: 7, wide_loss is 0.5798845, deep_loss is 0.7245408 -epoch: 1 step: 8, wide_loss is 0.57553077, deep_loss is 0.7123517 -epoch: 1 step: 9, wide_loss is 0.5733629, deep_loss is 0.70278376 -epoch: 1 step: 10, wide_loss is 0.566089, deep_loss is 0.6884129 -... -``` - -`test_deep0.log` contains the runtime log (This needs to adjust the log level to INFO, and add the `-p on` option when compiling MindSpore). -Search `EmbeddingLookup` in `test_deep0.log`, the following can be found: - -```text -[INFO] DEVICE(109904,python3.7):2020-06-27-12:42:34.928.275 [mindspore/ccsrc/device/cpu/cpu_kernel_runtime.cc:324] Run] cpu kernel: Default/network-VirtualDatasetCellTriple/_backbone-NetWithLossClass/network-WideDeepModel/EmbeddingLookup-op297 costs 3066 us. -[INFO] DEVICE(109904,python3.7):2020-06-27-12:42:34.943.896 [mindspore/ccsrc/device/cpu/cpu_kernel_runtime.cc:324] Run] cpu kernel: Default/network-VirtualDatasetCellTriple/_backbone-NetWithLossClass/network-WideDeepModel/EmbeddingLookup-op298 costs 15521 us. -``` - -The above shows the running time of `EmbeddingLookup` on the host. - -Search `FusedSparseFtrl` and `FusedSparseLazyAdam` in `test_deep0.log`, the following can be found: - -```text -[INFO] DEVICE(109904,python3.7):2020-06-27-12:42:35.422.963 [mindspore/ccsrc/device/cpu/cpu_kernel_runtime.cc:324] Run] cpu kernel: Default/optimizer_w-FTRL/FusedSparseFtrl-op299 costs 54492 us. -[INFO] DEVICE(109904,python3.7):2020-06-27-12:42:35.565.953 [mindspore/ccsrc/device/cpu/cpu_kernel_runtime.cc:324] Run] cpu kernel: Default/optimizer_d-LazyAdam/FusedSparseLazyAdam-op300 costs 142865 us. -``` - -The above shows the running time of two optimizers on the host. - -## Reference - -[1] Huifeng Guo, Ruiming Tang, Yunming Ye, Zhenguo Li, Xiuqiang He. [DeepFM: A Factorization-Machine based Neural Network for CTR Prediction.](https://doi.org/10.24963/ijcai.2017/239) IJCAI 2017. diff --git a/tutorials/training/source_en/advanced_use/apply_parameter_server_training.md b/tutorials/training/source_en/advanced_use/apply_parameter_server_training.md deleted file mode 100644 index 6069b718abcfc70f5bd76b7c8795f3e3458f37ec..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/apply_parameter_server_training.md +++ /dev/null @@ -1,166 +0,0 @@ -# Training with Parameter Server - -`Linux` `Ascend` `GPU` `Model Training` `Intermediate` `Expert` - - - -- [Training with Parameter Server](#training-with-parameter-server) - - [Overview](#overview) - - [Preparations](#preparations) - - [Training Script Preparation](#training-script-preparation) - - [Parameter Setting](#parameter-setting) - - [Environment Variable Setting](#environment-variable-setting) - - [Training](#training) - - - - - -## Overview - -A parameter server is a widely used architecture in distributed training. Compared with the synchronous AllReduce training method, a parameter server has better flexibility, scalability, and node failover capabilities. Specifically, the parameter server supports both synchronous and asynchronous SGD training algorithms. In terms of scalability, model computing and update are separately deployed in the worker and server processes, so that resources of the worker and server can be independently scaled out and in horizontally. In addition, in an environment of a large-scale data center, various failures often occur in a computing device, a network, and a storage device, and consequently some nodes are abnormal. However, in an architecture of a parameter server, such a failure can be relatively easily handled without affecting a training job. - -In the parameter server implementation of MindSpore, the self-developed communication framework (core) is used as the basic architecture. Based on the remote communication capability provided by the core and abstract Send/Broadcast primitives, the distributed training algorithm of the synchronous SGD is implemented. In addition, with the high-performance collective communication library in Ascend and GPU(HCCL and NCCL), MindSpore also provides the hybrid training mode of parameter server and AllReduce. Some weights can be stored and updated through the parameter server, and other weights are still trained through the AllReduce algorithm. - -The ps-lite architecture consists of three independent components: server, worker, and scheduler. Their functions are as follows: - -- Server: saves model weights and backward computation gradients, and updates the model using gradients pushed by workers. - -- Worker: performs forward and backward computation on the network. The gradient value for backward computation is uploaded to a server through the `Push` API, and the model updated by the server is downloaded to the worker through the `Pull` API. - -- Scheduler: establishes the communication relationship between the server and worker. - -## Preparations - -The following describes how to use parameter server to train LeNet on Ascend 910: - -### Training Script Preparation - -Learn how to train a LeNet using the [MNIST dataset](http://yann.lecun.com/exdb/mnist/) by referring to . - -### Parameter Setting - -1. First of all, use `mindspore.context.set_ps_context(enable_ps=True)` to enable Parameter Server training mode. - - - This method should be called before `mindspore.communication.management.init()`. - - If you don't call this method, the [Environment Variable Setting](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/apply_parameter_server_training.html#environment-variable-setting) below will not take effect. - - Use `mindspore.context.reset_ps_context()` to disable Parameter Server training mode. - -2. In this training mode, you can use either of the following methods to control whether the training parameters are updated by the Parameter Server and whether the training parameters are initialized on Worker or Server: - - - Use `mindspore.nn.Cell.set_param_ps()` to set all weight recursions of `nn.Cell`. - - Use `mindspore.Parameter.set_param_ps()` to set the weight. - - The size of the weight which is updated by Parameter Server should not exceed INT_MAX(2^31 - 1) bytes. - - The interface `set_param_ps` can receive a `bool` parameter:`init_in_server`, indicating whether this training parameter is initialized on the Server side. `init_in_server` defaults to `False`, indicating that this training parameter is initialized on Worker. Currently, only the training parameter `embedding_table` of the `EmbeddingLookup` operator is supported to be initialized on Server side to solve the problem of insufficient memory caused by the initialization of a large shape `embedding_table` on Worker. The `EmbeddingLookup` operator's `target` attribute needs to be set to 'CPU'. The training parameter initialized on the Server side will no longer be synchronized to Worker. If it involves multi-Server training and saves CheckPoint, each Server will save a CheckPoint after the training. - -3. On the basis of the [original training script](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/lenet/train.py), set all LeNet model weights to be trained on the parameter server: - - ```python - context.set_ps_context(enable_ps=True) - network = LeNet5(cfg.num_classes) - network.set_param_ps() - ``` - -4. [optional configuration] For a large shape `embedding_table`, because the device can not store a full amount of `embedding_table`. You can configure the `vocab_cache_size` of [EmbeddingLookup operator](https://www.mindspore.cn/doc/api_python/en/master/mindspore/nn/mindspore.nn.EmbeddingLookup.html) to enable the cache function of `EmbeddingLookup` in the Parameter Server training mode. The `vocab_cache_size` of `embedding_table` is trained on device, and a full amount of `embedding_table` is stored in the Server. The `embedding_table` of the next batch is swapped to the cache in advance, and the expired `embedding_table` is put back to the Server when the cache cannot be placed, to achieve the purpose of improving the training performance. Each Server could save a checkpoint containing the trained `embedding_table` after the training. Detailed network training script can be referred to . - - ```python - context.set_auto_parallel_context(full_batch=True, - parallel_mode=ParallelMode.AUTO_PARALLEL) - context.set_context(enable_sparse=True) - network = Net() - model = Model(network) - model.train(epoch, train_dataset, dataset_sink_mode=True) - ``` - - In the information: - - - `dataset_sink_mode`: whether to enable the sink mode of dataset or not. When `True`, it indicates enabled, and pass the data through the dataset channel. It must be set to `True` in this scenario (The inference during training also needs to enable the sink mode of dataset). - - `full_batch`: whether to load the dataset in full or not. When `True`, it indicates fully load, and data of each device is the same. It must be set to `True` in the multi-workers scenario. - - `parallel_mode`:parallel mode, auto parallel mode must be enabled in the multi-workers scenario, please set `parallel_mode`=`ParallelMode.AUTO_PARALLEL`. - - `enable_sparse`: whether to enable sparse training, default: `False`. `enable_sparse`=`True` indicates enabling sparse training. The parameter `sparse` of all `EmbeddingLookup` kernels which enable cache must be equal to the value of `enable_sparse` in the parameter server mode. - -### Environment Variable Setting - -MindSpore reads environment variables to control parameter server training. The environment variables include the following options (all scripts of `MS_SCHED_HOST` and `MS_SCHED_PORT` must be consistent): - -```bash -export MS_SERVER_NUM=1 # Server number -export MS_WORKER_NUM=1 # Worker number -export MS_SCHED_HOST=XXX.XXX.XXX.XXX # Scheduler IP address -export MS_SCHED_PORT=XXXX # Scheduler port -export MS_ROLE=MS_SCHED # The role of this process: MS_SCHED represents the scheduler, MS_WORKER represents the worker, MS_PSERVER represents the Server -``` - -## Training - -1. Shell scripts - - Provide the shell scripts corresponding to the worker, server, and scheduler roles to start training: - - `Scheduler.sh`: - - ```bash - #!/bin/bash - export MS_SERVER_NUM=1 - export MS_WORKER_NUM=1 - export MS_SCHED_HOST=XXX.XXX.XXX.XXX - export MS_SCHED_PORT=XXXX - export MS_ROLE=MS_SCHED - python train.py --device_target=Ascend --data_path=path/to/dataset - ``` - - `Server.sh`: - - ```bash - #!/bin/bash - export MS_SERVER_NUM=1 - export MS_WORKER_NUM=1 - export MS_SCHED_HOST=XXX.XXX.XXX.XXX - export MS_SCHED_PORT=XXXX - export MS_ROLE=MS_PSERVER - python train.py --device_target=Ascend --data_path=path/to/dataset - ``` - - `Worker.sh`: - - ```bash - #!/bin/bash - export MS_SERVER_NUM=1 - export MS_WORKER_NUM=1 - export MS_SCHED_HOST=XXX.XXX.XXX.XXX - export MS_SCHED_PORT=XXXX - export MS_ROLE=MS_WORKER - python train.py --device_target=Ascend --data_path=path/to/dataset - ``` - - Run the following commands separately: - - ```bash - sh Scheduler.sh > scheduler.log 2>&1 & - sh Server.sh > server.log 2>&1 & - sh Worker.sh > worker.log 2>&1 & - ``` - - Start training. - -2. Viewing result - - Run the following command to view the communication logs between the server and worker in the `scheduler.log` file: - - ```text - The server node id:b5d8a47c-46d7-49a5-aecf-d29d7f8b6124,node ip: 10.90.53.118,node port:46737 assign rank id:0 - The worker node id:55e86d4b-d717-4930-b414-ebd80082f541 assign rank id:1 - Start the scheduler node is successful! - ``` - - The preceding information indicates that the communication between the server, worker, and scheduler is established successfully. - - Check the training result in the `worker.log` file: - - ```text - epoch: 1 step: 1, loss is 2.302287 - epoch: 1 step: 2, loss is 2.304071 - epoch: 1 step: 3, loss is 2.308778 - epoch: 1 step: 4, loss is 2.301943 - ... - ``` diff --git a/tutorials/training/source_en/advanced_use/apply_post_training_quantization.md b/tutorials/training/source_en/advanced_use/apply_post_training_quantization.md deleted file mode 100644 index 002b0f5f44a2c70aa3d75c76e935c0930d9158f0..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/apply_post_training_quantization.md +++ /dev/null @@ -1,40 +0,0 @@ -# Applying Post Training Quantization - -Translator: [unseeme](https://gitee.com/unseenme) - -`Linux` `Model Optimization` `Expert` - - - -- [Applying Post Training Quantization](#applying-post-training-quantization) - - [Concept](#concept) - - [Weight Quantization](#weight-quantization) - - [Full Quantization](#full-quantization) - - [Post Training Quantization Tools](#post-training-quantization-tools) - - - - - -## Concept - -Post training quantization refers to perform weights quantization or full quantization on a pre-trained model. It can reduce model size while also speed up the inference. -This process does not require training. Small amounts of calibration data is needed for activations quantization. - -### Weights Quantization - -Quantify the weights of the model, only reduce the model size. Float32 operations are still performed during inference. The lower the number of quantization bits, the greater the model compression rate, but accuracy loss is usually become relatively large. - -### Full Quantization - -Quantify the weights and activations of the model, int operations are performed during inference. It can reduce the size of the model, increase the speed of model inference, and reduce power consumption. -For scenarios that need to increase the running speed and reduce the power consumption of the model, you can use the post training full quantization. In order to calculate the quantitative parameters of the activations, the user needs to provide a calibration dataset. - -## Post Training Quantization Tools - -Choose to use the corresponding post training quantization tool according to the hardware platform deployed for model inference. - -| Post Training Quantization Tools | Quantization Method Supported | Inference Hardware Platform Supported | Quantization Model Deployment | -| --- | --- | --- | --- | -| [MindSpore Post Training Quantization Tools](https://www.mindspore.cn/tutorial/lite/en/master/use/post_training_quantization.html) | Weights Quantization
    Full Quantization | CPU | [Inference on edge device](https://www.mindspore.cn/tutorial/lite/en/master/use/runtime.html) | -| Ascend Model Compression Tool | Full Quantization | Ascend 310 AI Processor | [Inference on Ascend 310 AI Processor](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference_ascend_310.html) | diff --git a/tutorials/training/source_en/advanced_use/apply_quantization_aware_training.md b/tutorials/training/source_en/advanced_use/apply_quantization_aware_training.md deleted file mode 100644 index 92215fd32cd2ec1d2c29d95465ea324d48c4d1b3..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/apply_quantization_aware_training.md +++ /dev/null @@ -1,280 +0,0 @@ -# Applying Quantization Aware Training - -`Linux` `Ascend` `GPU` `Model Optimization` `Expert` - - - -- [Applying Quantization Aware Training](#applying-quantization-aware-training) - - [Background](#background) - - [Concepts](#concepts) - - [Quantization](#quantization) - - [Fake Quantization Node](#fake-quantization-node) - - [Quantization Aware Training](#quantization-aware-training) - - [Quantization Aware Training Example](#quantization-aware-training-example) - - [Defining a Quantization Network](#defining-a-quantization-network) - - [Automatically Building a Quantization Network](#automatically-building-a-quantization-network) - - [Manually Building a Quantization Network](#manually-building-a-quantization-network) - - [Exporting a Quantization Model](#exporting-a-quantization-model) - - [References](#references) - - - - - -## Background - -Deep learning technologies are used on an increasing number of applications on mobile or edge devices. Take mobile phones as an example. To provide user-friendly and intelligent services, the deep learning function is integrated into operating systems and applications. However, this function involves training or inference, containing a large number of models and weight files. The original weight file of AlexNet has exceeded 200 MB, and the new model is developing towards a more complex structure with more parameters. Due to limited hardware resources of a mobile or edge device, a model needs to be simplified and the quantization technology is used to solve this problem. - -## Concepts - -### Quantization - -Quantization is a process in which weights of a floating-point model with continuous values or tensor data flowing through the model are approximated at fixed points (usually INT8) to a limited quantity (or a relatively small quantity) of discrete values at a low inference precision loss. It is a process of approximately representing 32-bit floating-point data with fewer bits, while the input and output of the model are still floating-point data. In this way, the model size and memory usage can be reduced, the model inference speed can be accelerated, and the power consumption can be reduced. - -As described above, compared with the FP32 type, low-accuracy data representation types such as FP16, INT8, and INT4 occupy less space. Replacing the high-accuracy data representation type with the low-accuracy data representation type can greatly reduce the storage space and transmission time. Low-bit computing has higher performance. Compared with FP32, INT8 has a three-fold or even higher acceleration ratio. For the same computing, INT8 has obvious advantages in power consumption. - -Currently, there are two types of quantization solutions in the industry: quantization aware training and post-training quantization. Quantization aware training requires training data and generally has better performance in model accuracy. It is applicable to scenarios that have high requirements on the model compression rate and model accuracy. Post-training quantization is easy to use. Only a small amount of calibration data is required. This mode applies to scenarios that require high usability and lack training resources. - -### Fake Quantization Node - -A fake quantization node is a node inserted during quantization aware training, and is used to search for network data distribution and feed back a lost accuracy. The specific functions are as follows: - -- Find the distribution of network data, that is, find the maximum and minimum values of the parameters to be quantized. -- Simulate the accuracy loss of low-bit quantization, apply the loss to the network model, and transfer the loss to the loss function, so that the optimizer optimizes the loss value during training. - -## Quantization Aware Training - -MindSpore's quantization aware training uses fake quantization nodes to simulate quantization operations. During the training, floating-point numbers are still used for computation, and network parameters are updated through backward propagation learning, so that the network parameters can better adapt to the loss caused by quantization. MindSpore adopts the solution in reference [1] for the quantization of weights and data. - -Aware quantization training specifications - -| Specification | Description | -| --- | --- | -| Hardware | Supports hardware platforms based on the GPU or Ascend AI 910 processor. | -| Network | Supports networks such as LeNet and ResNet50. For details, see . | -| Algorithm | Supports asymmetric and symmetric quantization algorithms, as well as layer-by-layer and channel-by-channel quantization algorithms. | -| Solution | Supports 4-, 7-, and 8-bit quantization solutions. | -| Data Type | Supports the FP32 and FP16 networks for quantization training on Ascend, and the FP32 network on GPU. | -| Running Mode | Supports graph mode. | - -## Quantization Aware Training Example - -The procedure of quantization aware training is the same as that of common training. Additional operations need to be performed in the phases of defining a quantization network and generating a quantization model. The complete process is as follows: - -1. Load the dataset and process data. -2. Define a quantization network. -3. Define an optimizer and a loss function. -4. Train the network and save the model file. -5. Load the saved model for inference. -6. Export a quantization model. - -Compared with common training, the quantization aware training requires additional steps which are steps 2 and 6 in the preceding process. Next, the LeNet network is used as an example to describe quantization-related steps. - -> You can obtain the complete executable sample code at . - -### Defining a Quantization Network - -A quantization network is a network with fake quantization nodes generated after the network layer to be quantized is modified based on the original network definition. There are two methods for defining a quantization network: - -- Automatically build a quantization network: After a fusion network is defined and the conversion API is called, the fusion network is automatically converted into a quantization network. You do not need to be aware of the process of inserting fake quantization nodes. -- Manually build a quantization network: You need to manually replace a network layer to be quantized with a corresponding quantization node, or directly insert a fake quantization node behind the network layer to be quantized. The modified network is a quantization network. You can customize the network layer to be quantized, which is more flexible and easy to scale. - -> - The automatically building method supports the quantization of the following network layers: `nn.Conv2dBnAct`, `nn.DenseBnAct`, `Add`, `Sub`, `Mul`, and `RealDiv`. If only some of these network layers need to be quantized or other network layers need to be quantized, use the manually building method. -> - The conversion API for automatically building is `QuantizationAwareTraining.quantize`. - -The original network model LeNet5 is defined as follows: - -```python -class LeNet5(nn.Cell): - """ - Lenet network - - Args: - num_class (int): Num classes. Default: 10. - num_channel (int): Num channel. Default: 1. - Returns: - Tensor, output tensor - Examples: - >>> LeNet(num_class=10, num_channel=1) - - """ - def __init__(self, num_class=10, num_channel=1): - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02)) - self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02)) - self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02)) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x -``` - -#### Automatically Building a Quantization Network - -First, define a fusion network: - -1. Use the `nn.Conv2dBnAct` operator to replace the two operators `nn.Conv2d` and `nn.ReLU` in the original network model. -2. Use the `nn.DenseBnAct` operator to replace the two operators `nn.Dense` and `nn.ReLU` in the original network model. - -> Even if the `nn.Dense` and `nn.Conv2d` operators are not followed by `nn.BatchNorm` and `nn.ReLU`, the preceding two replacement operations must be performed as required. - -The following shows the fusion network after operators are replaced: - -```python -class LeNet5(nn.Cell): - def __init__(self, num_class=10): - super(LeNet5, self).__init__() - self.num_class = num_class - - self.conv1 = nn.Conv2dBnAct(1, 6, kernel_size=5, pad_mode='valid', activation='relu') - self.conv2 = nn.Conv2dBnAct(6, 16, kernel_size=5, pad_mode='valid', activation='relu') - - self.fc1 = nn.DenseBnAct(16 * 5 * 5, 120, activation='relu') - self.fc2 = nn.DenseBnAct(120, 84, activation='relu') - self.fc3 = nn.DenseBnAct(84, self.num_class) - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.max_pool2d(self.conv1(x)) - x = self.max_pool2d(self.conv2(x)) - x = self.flatten(x) - x = self.fc1(x) - x = self.fc2(x) - x = self.fc3(x) - return x -``` - -When the quantization aware training is used for fine-tuning, the parameters of the pre-trained model need to be loaded. - -```python -from mindspore.compression.quant import load_nonquant_param_into_quant_net -... -# define fusion network -network = LeNet5(cfg.num_classes) - -param_dict = load_checkpoint(args.ckpt_path) -load_nonquant_param_into_quant_net(network, param_dict) -``` - -Use the `QuantizationAwareTraining.quantize` API to automatically insert a fake quantization node into the fusion network to convert the fusion network into a quantization network. - -```python -from mindspore.compression.quant import QuantizationAwareTraining - -quantizer = QuantizationAwareTraining(quant_delay=900, - bn_fold=False, - per_channel=[True, False], - symmetric=[True, False]) -net = quantizer.quantize(network) -``` - -> If the quantization precision does not meet the requirement, adjust the quantization policy parameters. For example, generally, a larger quantity of quantization bits results in a smaller precision loss, and channel-by-channel quantization provides greater precision than layer-by-layer quantization. In addition, you can manually build a quantization network. Select some of the network layers to be quantized to balance the relationship between accuracy and inference performance. - -#### Manually Building a Quantization Network - -Replace the layers that need to be quantized in the original network with the corresponding quantization operators: - -1. Use `nn.Conv2dQuant` to replace the `nn.Conv2d` operator in the original network model. -2. Use `nn.DenseQuant` to replace the `nn.Dense` operator in the original network model. -3. Use `nn.ActQuant` to replace the `nn.ReLU` operator in the original network model. - -```python -class LeNet5(nn.Cell): - def __init__(self, num_class=10, channel=1): - super(LeNet5, self).__init__() - self.num_class = num_class - - self.qconfig = create_quant_config(quant_dtype=(QuantDtype.INT8, QuantDtype.INT8), per_channel=(True, False), symmetric=[True, False]) - - self.conv1 = nn.Conv2dQuant(channel, 6, 5, pad_mode='valid', quant_config=self.qconfig, quant_dtype=QuantDtype.INT8) - self.conv2 = nn.Conv2dQuant(6, 16, 5, pad_mode='valid', quant_config=self.qconfig, quant_dtype=QuantDtype.INT8) - self.fc1 = nn.DenseQuant(16 * 5 * 5, 120, quant_config=self.qconfig, quant_dtype=QuantDtype.INT8) - self.fc2 = nn.DenseQuant(120, 84, quant_config=self.qconfig, quant_dtype=QuantDtype.INT8) - self.fc3 = nn.DenseQuant(84, self.num_class, quant_config=self.qconfig, quant_dtype=QuantDtype.INT8) - - self.relu = nn.ActQuant(nn.ReLU(), quant_config=self.qconfig, quant_dtype=QuantDtype.INT8) - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x -``` - -> - Quantization operators: `nn.Conv2dQuant`, `nn.DenseQuant` and `nn.ActQuant` are operators that contain fake quantization nodes. For details about quantization operators, see . -> - The fake quantization node `nn.FakeQuantWithMinMaxObserver` can be inserted after the network layer that needs to be quantized to implement quantization of more network layers. -> - You are advised to preferentially select the layer at the rear of the quantization network because the network layer at the front of the quantization network may cause more precision loss. - -When the quantization aware training is used for fine-tuning, the parameters of the pre-trained model need to be loaded. - -```python -from mindspore.compression.quant import load_nonquant_param_into_quant_net -... -# define quant network -network = LeNet5(cfg.num_classes) - -param_dict = load_checkpoint(args.ckpt_path) -load_nonquant_param_into_quant_net(network, param_dict) -``` - -### Exporting a Quantization Model - -The quantization model deployed on the device hardware platform is in a general model format (such as AIR and MindIR), and does not include a fake quantization node. The export procedure is as follows: - -1. Define a quantization network. A quantization network in this step is the same as a quantization network in quantization aware training. -2. Load the checkpoint file saved during quantization aware training. -3. Export a quantization model. Set the `quant_mode`, `mean` and `std_dev` parameter of the `export` API. - -```python -from mindspore import Tensor, context, load_checkpoint, load_param_into_net, export - -if __name__ == "__main__": - ... - # define fusion network - network = LeNet5(cfg.num_classes) - quantizer = QuantizationAwareTraining(bn_fold=False, - per_channel=[True, False], - symmetric=[True, False]) - network = quantizer.quantize(network) - - # load quantization aware network checkpoint - param_dict = load_checkpoint(args.ckpt_path) - load_param_into_net(network, param_dict) - - # export network - inputs = Tensor(np.ones([1, 1, cfg.image_height, cfg.image_width]), mindspore.float32) - export(network, inputs, file_name="lenet_quant", file_format='MINDIR', quant_mode='QUANT', mean=127.5, std_dev=127.5) -``` - -After the quantization model is exported, use MindSpore for inference. For details, see [Inference Using MindSpore](https://www.mindspore.cn/tutorial/inference/en/master/index.html). - -> - The exported model can be in MindIR or AIR format. -> - Models exported after quantization aware training support [Inference on Devices](https://www.mindspore.cn/lite/docs?master) and [Inference on Ascend 310](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference_ascend_310.html). - -## References - -[1] Jacob B, Kligys S, Chen B, et al. Quantization and training of neural networks for efficient integer-arithmetic-only inference[C]//Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition. 2018: 2704-2713. - -[2] Krishnamoorthi R. Quantizing deep convolutional networks for efficient inference: A whitepaper[J]. arXiv preprint arXiv:1806.08342, 2018. diff --git a/tutorials/training/source_en/advanced_use/convert_dataset.ipynb b/tutorials/training/source_en/advanced_use/convert_dataset.ipynb deleted file mode 100644 index 437cf82a26173323c0010d96ab90e86e1009f2c5..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/convert_dataset.ipynb +++ /dev/null @@ -1,464 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "chief-microphone", - "metadata": {}, - "source": [ - "# Converting Dataset to MindRecord\n", - "\n", - "`Linux` `Ascend` `GPU` `CPU` `Data Preparation` `Intermediate` `Expert`\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_en/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_en/advanced_use/convert_dataset.ipynb)" - ] - }, - { - "cell_type": "markdown", - "id": "hungarian-capture", - "metadata": {}, - "source": [ - "## Overview\n", - "\n", - "Users can convert non-standard datasets and common datasets into the MindSpore data format, MindRecord, so that they can be easily loaded to MindSpore for training. In addition, the performance of MindSpore in some scenarios is optimized, which delivers better user experience when you use datasets in the MindSpore data format.\n", - "\n", - "The MindSpore data format has the following features:\n", - "\n", - "1. Unified storage and access of user data are implemented, simplifying training data loading.\n", - "2. Data is aggregated for storage, which can be efficiently read, managed and moved.\n", - "3. Data encoding and decoding are efficient and transparent to users.\n", - "4. The partition size is flexibly controlled to implement distributed training.\n", - "\n", - "The MindSpore data format aims to normalize the datasets of users to MindRecord, which can be further loaded through the `MindDataset` and used in the training procedure (Please refer to the [API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/dataset/mindspore.dataset.MindDataset.html) for detailed use)." - ] - }, - { - "cell_type": "markdown", - "id": "brown-complexity", - "metadata": {}, - "source": [ - "![data-conversion-concept](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_en/advanced_use/images/data_conversion_concept.png)" - ] - }, - { - "cell_type": "markdown", - "id": "suited-tours", - "metadata": {}, - "source": [ - "A MindRecord file consists of data files and index files. Data files and index files do not support renaming for now.\n", - "\n", - "- Data file\n", - "\n", - " A data file contains a file header, scalar data pages and block data pages for storing normalized training data. It is recommended that the size of a single MindRecord file does not exceed 20 GB. Users can break up a large dataset and store the dataset into multiple MindRecord files.\n", - "\n", - "- Index file\n", - "\n", - " An index file contains the index information generated based on scalar data (such as image labels and image file names), used for convenient data fetching and storing statistical data about the dataset." - ] - }, - { - "cell_type": "markdown", - "id": "wireless-writer", - "metadata": {}, - "source": [ - "![mindrecord](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_en/advanced_use/images/mindrecord.png)" - ] - }, - { - "cell_type": "markdown", - "id": "extended-liverpool", - "metadata": {}, - "source": [ - "A data file consists of the following key parts:\n", - "\n", - "- File Header\n", - "\n", - " The file header stores the file header size, scalar data page size, block data page size, schema, index fields, statistics, file partition information, and mapping between scalar data and block data. It is the metadata of the MindRecord file.\n", - "\n", - "- Scalar data page\n", - "\n", - " The scalar data page is used to store integer, string and floating point data, such as the label of an image, file name of an image, and length, width of an image. The information suitable for storage with scalars is stored here.\n", - "\n", - "- Block data page\n", - "\n", - " The block data page is used to store data such as binary strings and NumPy arrays. Additional examples include converted python dictionaries generated from texts and binary image files.\n", - "\n", - "## Converting Dataset to MindRecord\n", - "\n", - "The following tutorial demonstrates how to convert image data and its annotations to MindRecord. For more instructions on MindSpore data format conversion, please refer to the [MindSpore Data Format Conversion](https://www.mindspore.cn/doc/programming_guide/en/master/dataset_conversion.html) chapter in the programming guide.\n", - "\n", - "Example 1: Show how to convert data into a MindRecord data file according to the defined dataset structure.\n", - "\n", - "1. Import the `FileWriter` class for file writing." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "accompanied-puppy", - "metadata": {}, - "outputs": [ - ], - "source": [ - " from mindspore.mindrecord import FileWriter" - ] - }, - { - "cell_type": "markdown", - "id": "recreational-thanksgiving", - "metadata": {}, - "source": [ - "2. Define a dataset schema which defines dataset fields and field types." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "different-empire", - "metadata": {}, - "outputs": [], - "source": [ - " cv_schema_json = {\"file_name\": {\"type\": \"string\"}, \"label\": {\"type\": \"int32\"}, \"data\": {\"type\": \"bytes\"}}" - ] - }, - { - "cell_type": "markdown", - "id": "lesser-school", - "metadata": {}, - "source": [ - " Schema mainly contains `name`, `type` and `shape`:\n", - " - `name`: field names, consist of letters, digits and underscores.\n", - " - `type`: field types, include int32, int64, float32, float64, string and bytes.\n", - " - `shape`: [-1] for one-dimensional array, [m, n, ...] for higher dimensional array in which m and n represent the dimensions. \n", - "\n", - " > - The type of a field with the `shape` attribute can only be int32, int64, float32, or float64.\n", - " > - If the field has the `shape` attribute, only data in `numpy.ndarray` type can be transferred to the `write_raw_data` API.\n", - "\n", - "3. Prepare the data sample list to be written based on the user-defined schema format. Binary data of the images is transferred below." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "higher-likelihood", - "metadata": {}, - "outputs": [], - "source": [ - " data = [{\"file_name\": \"1.jpg\", \"label\": 0, \"data\": b\"\\x10c\\xb3w\\xa8\\xee$o&\\xd4\\x00\\xf8\\x129\\x15\\xd9\\xf2q\\xc0\\xa2\\x91YFUO\\x1dsE1\\x1ep\"},\n", - " {\"file_name\": \"3.jpg\", \"label\": 99, \"data\": b\"\\xaf\\xafU<\\xb8|6\\xbd}\\xc1\\x99[\\xeaj+\\x8f\\x84\\xd3\\xcc\\xa0,i\\xbb\\xb9-\\xcdz\\xecp{T\\xb1\\xdb\"}]" - ] - }, - { - "cell_type": "markdown", - "id": "cubic-opinion", - "metadata": {}, - "source": [ - "4. Adding index fields can accelerate data loading. This step is optional." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "timely-neutral", - "metadata": {}, - "outputs": [], - "source": [ - " indexes = [\"file_name\", \"label\"]" - ] - }, - { - "cell_type": "markdown", - "id": "broad-fiber", - "metadata": {}, - "source": [ - "5. Create a `FileWriter` object, transfer the file name and number of slices, add the schema and index, call the `write_raw_data` API to write data, and call the `commit` API to generate a local data file." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "plain-psychiatry", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MSRStatus.SUCCESS" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - " writer = FileWriter(file_name=\"test.mindrecord\", shard_num=4)\n", - " writer.add_schema(cv_schema_json, \"test_schema\")\n", - " writer.add_index(indexes)\n", - " writer.write_raw_data(data)\n", - " writer.commit()" - ] - }, - { - "cell_type": "markdown", - "id": "fuzzy-injection", - "metadata": {}, - "source": [ - " This example will generate `test.mindrecord0`, `test.mindrecord0.db`, `test.mindrecord1`, `test.mindrecord1.db`, `test.mindrecord2`, `test.mindrecord2.db`, `test.mindrecord3`, `test.mindrecord3.db`, totally eight files, called MindRecord datasets. `test.mindrecord0` and `test.mindrecord0.db` are collectively referred to as a MindRecord file, where `test.mindrecord0` is the data file and `test.mindrecord0.db` is the index file.\n", - "\n", - " **Interface Description:**\n", - " - `write_raw_data`: write data to memory.\n", - " - `commit`: write data in memory to disk.\n", - "\n", - "6. For adding data to the existing data format file, call the `open_for_append` API to open the existing data file, call the `write_raw_data` API to write new data, and then call the `commit` API to generate a local data file." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "indie-thickness", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MSRStatus.SUCCESS" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - " writer = FileWriter.open_for_append(\"test.mindrecord0\")\n", - " writer.write_raw_data(data)\n", - " writer.commit()" - ] - }, - { - "cell_type": "markdown", - "id": "palestinian-liberal", - "metadata": {}, - "source": [ - "Example 2: Convert a picture in `jpg` format into a MindRecord dataset according to the method in Example 1.\n", - "\n", - "Download the image data `transform.jpg` that needs to be processed as the raw data to be processed.\n", - "\n", - "Create a folder directory `./datasets/convert_dataset_to_mindrecord/datas_to_mindrecord/` to store all the converted datasets in this experience.\n", - "\n", - "Create a folder directory `./datasets/convert_dataset_to_mindrecord/images/` to store the downloaded image data." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "boolean-criterion", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/convert_dataset_to_mindrecord/images/\n", - "└── transform.jpg\n", - "\n", - "0 directories, 1 file\n" - ] - } - ], - "source": [ - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/transform.jpg\n", - "!mkdir -p ./datasets/convert_dataset_to_mindrecord/datas_to_mindrecord/\n", - "!mkdir -p ./datasets/convert_dataset_to_mindrecord/images/\n", - "!mv -f ./transform.jpg ./datasets/convert_dataset_to_mindrecord/images/\n", - "!tree ./datasets/convert_dataset_to_mindrecord/images/" - ] - }, - { - "cell_type": "markdown", - "id": "least-scholarship", - "metadata": {}, - "source": [ - "Execute the following code to convert the downloaded `transform.jpg` into a MindRecord dataset." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "committed-chosen", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MSRStatus.SUCCESS" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# step 1 import class FileWriter\n", - "import os\n", - "from mindspore.mindrecord import FileWriter\n", - "\n", - "# clean up old run files before in Linux\n", - "data_path = './datasets/convert_dataset_to_mindrecord/datas_to_mindrecord/'\n", - "os.system('rm -f {}test.*'.format(data_path))\n", - "\n", - "# import FileWriter class ready to write data\n", - "data_record_path = './datasets/convert_dataset_to_mindrecord/datas_to_mindrecord/test.mindrecord'\n", - "writer = FileWriter(file_name=data_record_path,shard_num=4)\n", - "\n", - "# define the data type\n", - "data_schema = {\"file_name\":{\"type\":\"string\"},\"label\":{\"type\":\"int32\"},\"data\":{\"type\":\"bytes\"}}\n", - "writer.add_schema(data_schema,\"test_schema\")\n", - "\n", - "# prepeare the data contents\n", - "file_name = \"./datasets/convert_dataset_to_mindrecord/images/transform.jpg\"\n", - "with open(file_name, \"rb\") as f:\n", - " bytes_data = f.read()\n", - "data = [{\"file_name\":\"transform.jpg\", \"label\":1, \"data\":bytes_data}]\n", - "\n", - "# add index field\n", - "indexes = [\"file_name\",\"label\"]\n", - "writer.add_index(indexes)\n", - "\n", - "# save data to the files\n", - "writer.write_raw_data(data)\n", - "writer.commit()" - ] - }, - { - "cell_type": "markdown", - "id": "gorgeous-anchor", - "metadata": {}, - "source": [ - "This example will generate 8 files, which become the MindRecord dataset. `test.mindrecord0` and `test.mindrecord0.db` are called 1 MindRecord files, where `test.mindrecord0` is the data file, and `test.mindrecord0.db` is the index file. The generated files are as follows:" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "touched-seventh", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/convert_dataset_to_mindrecord/datas_to_mindrecord/\n", - "├── test.mindrecord0\n", - "├── test.mindrecord0.db\n", - "├── test.mindrecord1\n", - "├── test.mindrecord1.db\n", - "├── test.mindrecord2\n", - "├── test.mindrecord2.db\n", - "├── test.mindrecord3\n", - "└── test.mindrecord3.db\n", - "\n", - "0 directories, 8 files\n" - ] - } - ], - "source": [ - "!tree ./datasets/convert_dataset_to_mindrecord/datas_to_mindrecord/" - ] - }, - { - "cell_type": "markdown", - "id": "initial-population", - "metadata": {}, - "source": [ - "## Loading MindRecord Dataset\n", - "\n", - "The following tutorial briefly demonstrates how to load the MindRecord dataset using the `MindDataset`.\n", - "\n", - "1. Import the `dataset` for dataset loading." - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "compressed-western", - "metadata": {}, - "outputs": [], - "source": [ - " import mindspore.dataset as ds" - ] - }, - { - "cell_type": "markdown", - "id": "smart-kuwait", - "metadata": {}, - "source": [ - "2. Use the `MindDataset` to load the MindRecord dataset." - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "powered-occupation", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "sample: {'data': array([175, 175, 85, 60, 184, 124, 54, 189, 125, 193, 153, 91, 234,\n", - " 106, 43, 143, 132, 211, 204, 160, 44, 105, 187, 185, 45, 205,\n", - " 122, 236, 112, 123, 84, 177, 219], dtype=uint8), 'file_name': array(b'3.jpg', dtype='|S5'), 'label': array(99, dtype=int32)}\n", - "sample: {'data': array([ 16, 99, 179, 119, 168, 238, 36, 111, 38, 60, 113, 140, 142,\n", - " 40, 162, 144, 144, 150, 188, 177, 30, 212, 81, 69, 82, 19,\n", - " 63, 255, 217], dtype=uint8), 'file_name': array(b'1.jpg', dtype='|S5'), 'label': array(0, dtype=int32)}\n", - "sample: {'data': array([175, 175, 85, 60, 184, 124, 54, 189, 125, 193, 153, 91, 234,\n", - " 106, 43, 143, 132, 211, 204, 160, 44, 105, 187, 185, 45, 205,\n", - " 122, 236, 112, 123, 84, 177, 219], dtype=uint8), 'file_name': array(b'3.jpg', dtype='|S5'), 'label': array(99, dtype=int32)}\n", - "sample: {'data': array([230, 218, 209, 174, 7, 184, 62, 212, 0, 248, 18, 57, 21,\n", - " 217, 242, 113, 192, 162, 145, 89, 70, 85, 79, 29, 115, 69,\n", - " 49, 30, 112], dtype=uint8), 'file_name': array(b'2.jpg', dtype='|S5'), 'label': array(56, dtype=int32)}\n", - "sample: {'data': array([ 16, 99, 179, 119, 168, 238, 36, 111, 38, 60, 113, 140, 142,\n", - " 40, 162, 144, 144, 150, 188, 177, 30, 212, 81, 69, 82, 19,\n", - " 63, 255, 217], dtype=uint8), 'file_name': array(b'1.jpg', dtype='|S5'), 'label': array(0, dtype=int32)}\n", - "sample: {'data': array([230, 218, 209, 174, 7, 184, 62, 212, 0, 248, 18, 57, 21,\n", - " 217, 242, 113, 192, 162, 145, 89, 70, 85, 79, 29, 115, 69,\n", - " 49, 30, 112], dtype=uint8), 'file_name': array(b'2.jpg', dtype='|S5'), 'label': array(56, dtype=int32)}\n", - "Got 6 samples\n" - ] - } - ], - "source": [ - " data_set = ds.MindDataset(dataset_file=\"test.mindrecord0\") # read full dataset\n", - " count = 0\n", - " for item in data_set.create_dict_iterator(output_numpy=True):\n", - " print(\"sample: {}\".format(item))\n", - " count += 1\n", - " print(\"Got {} samples\".format(count))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/tutorials/training/source_en/advanced_use/custom_debugging_info.md b/tutorials/training/source_en/advanced_use/custom_debugging_info.md deleted file mode 100644 index c37c8dfac548a253f20ecb8a47197759f43a1ebe..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/custom_debugging_info.md +++ /dev/null @@ -1,383 +0,0 @@ -# Custom Debugging Information - -`Linux` `Ascend` `GPU` `CPU` `Model Optimization` `Intermediate` `Expert` - - - -- [Custom Debugging Information](#custom-debugging-information) - - [Overview](#overview) - - [Introduction to Callback](#introduction-to-callback) - - [Callback Capabilities of MindSpore](#callback-capabilities-of-mindspore) - - [Custom Callback](#custom-callback) - - [MindSpore Metrics](#mindspore-metrics) - - [MindSpore Print Operator](#mindspore-print-operator) - - [Data Dump Introduction](#data-dump-introduction) - - [Synchronous Dump](#synchronous-dump) - - [Asynchronous Dump](#asynchronous-dump) - - [Running Data Recorder](#running-data-recorder) - - [Usage](#usage) - - [Log-related Environment Variables and Configurations](#log-related-environment-variables-and-configurations) - - - - - -## Overview - -This section describes how to use the customized capabilities provided by MindSpore, such as `callback`, `metrics`, `Print` operators and log printing, to help you quickly debug the training network. - -## Introduction to Callback - -Here, callback is not a function but a class. You can use callback to observe the internal status and related information of the network during training or perform specific actions in a specific period. -For example, you can monitor the loss, save model parameters, dynamically adjust parameters, and terminate training tasks in advance. - -### Callback Capabilities of MindSpore - -MindSpore provides the callback capabilities to allow users to insert customized operations in a specific phase of training or inference, including: - -- Callback classes such as `ModelCheckpoint`, `LossMonitor`, and `SummaryCollector` provided by the MindSpore framework. -- Custom callback classes. - -Usage: Transfer the callback object in the `model.train` method. The callback object can be a list, for example: - -```python -ckpt_cb = ModelCheckpoint() -loss_cb = LossMonitor() -summary_cb = SummaryCollector(summary_dir='./summary_dir') -model.train(epoch, dataset, callbacks=[ckpt_cb, loss_cb, summary_cb]) -``` - -`ModelCheckpoint` can save model parameters for retraining or inference. -`LossMonitor` can output loss information in logs for users to view. In addition, `LossMonitor` monitors the loss value change during training. When the loss value is `Nan` or `Inf`, the training terminates. -`SummaryCollector` can save the training information to files for later use. -During the training process, the callback list will execute the callback function in the defined order. Therefore, in the definition process, the dependency between callbacks needs to be considered. - -### Custom Callback - -You can customize callback based on the `callback` base class as required. - -The callback base class is defined as follows: - -```python -class Callback(): - """Callback base class""" - def begin(self, run_context): - """Called once before the network executing.""" - pass - - def epoch_begin(self, run_context): - """Called before each epoch beginning.""" - pass - - def epoch_end(self, run_context): - """Called after each epoch finished.""" - pass - - def step_begin(self, run_context): - """Called before each step beginning.""" - pass - - def step_end(self, run_context): - """Called after each step finished.""" - pass - - def end(self, run_context): - """Called once after network training.""" - pass -``` - -The callback can record important information during training and transfer the information to the callback object through a dictionary variable `cb_params`, -You can obtain related attributes from each custom callback and perform customized operations. You can also customize other variables and transfer them to the `cb_params` object. - -The main attributes of `cb_params` are as follows: - -- loss_fn: Loss function -- optimizer: Optimizer -- train_dataset: Training dataset -- cur_epoch_num: Number of current epochs -- cur_step_num: Number of current steps -- batch_num: Number of batches in an epoch -- ... - -You can inherit the callback base class to customize a callback object. - -Here are two examples to further explain the usage of custom Callback. - -> custom `Callback` sample code: -> -> - -- Terminate training within the specified time. - - ```python - class StopAtTime(Callback): - def __init__(self, run_time): - super(StopAtTime, self).__init__() - self.run_time = run_time*60 - - def begin(self, run_context): - cb_params = run_context.original_args() - cb_params.init_time = time.time() - - def step_end(self, run_context): - cb_params = run_context.original_args() - epoch_num = cb_params.cur_epoch_num - step_num = cb_params.cur_step_num - loss = cb_params.net_outputs - cur_time = time.time() - if (cur_time - cb_params.init_time) > self.run_time: - print("epoch: ", epoch_num, " step: ", step_num, " loss: ", loss) - run_context.request_stop() - ``` - - The output is as follows: - - ```text - epoch: 20 step: 32 loss: 2.298344373703003 - ``` - - The implementation principle is: You can use the `run_context.original_args` method to obtain the `cb_params` dictionary, which contains the main attribute information described above. - In addition, you can modify and add values in the dictionary. In the preceding example, an `init_time` object is defined in `begin` and transferred to the `cb_params` dictionary. - A decision is made at each `step_end`. When the training time is longer than the configured time threshold, a training termination signal will be sent to the `run_context` to terminate the training in advance and the current values of epoch, step, and loss will be printed. - -- Save the checkpoint file with the highest accuracy during training. - - ```python - class SaveCallback(Callback): - def __init__(self, eval_model, ds_eval): - super(SaveCallback, self).__init__() - self.model = eval_model - self.ds_eval = ds_eval - self.acc = 0 - - def step_end(self, run_context): - cb_params = run_context.original_args() - result = self.model.eval(self.ds_eval) - if result['accuracy'] > self.acc: - self.acc = result['accuracy'] - file_name = str(self.acc) + ".ckpt" - save_checkpoint(save_obj=cb_params.train_network, ckpt_file_name=file_name) - print("Save the maximum accuracy checkpoint,the accuracy is", self.acc) - ``` - - The specific implementation principle is: define a callback object, and initialize the object to receive the model object and the ds_eval (verification dataset). Verify the accuracy of the model in the step_end phase. When the accuracy is the current highest, automatically trigger the save checkpoint method to save the current parameters. - -## MindSpore Metrics - -After the training is complete, you can use metrics to evaluate the training result. - -MindSpore provides multiple metrics, such as `accuracy`, `loss`, `tolerance`, `recall`, and `F1`. - -You can define a metrics dictionary object that contains multiple metrics and transfer them to the `model` object and use the `model.eval` function to verify the training result. - -> `metrics` sample code: -> -> - -```python -metrics = { - 'accuracy': nn.Accuracy(), - 'loss': nn.Loss(), - 'precision': nn.Precision(), - 'recall': nn.Recall(), - 'f1_score': nn.F1() -} -model = Model(network=net, loss_fn=net_loss, optimizer=net_opt, metrics=metrics) -result = model.eval(ds_eval) -``` - -The `model.eval` method returns a dictionary that contains the metrics and results transferred to the metrics. - -The callback function can also be used in the eval process, and the user can call the related API or customize the callback method to achieve the desired function. - -You can also define your own metrics class by inheriting the `Metric` base class and rewriting the `clear`, `update`, and `eval` methods. - -The `Accuracy` operator is used as an example to describe the internal implementation principle. - -The `Accuracy` inherits the `EvaluationBase` base class and rewrites the preceding three methods. - -- The `clear` method initializes related calculation parameters in the class. -- The `update` method accepts the predicted value and tag value and updates the internal variables of Accuracy. -- The `eval` method calculates related indicators and returns the calculation result. - -By invoking the `eval` method of `Accuracy`, you will obtain the calculation result. - -You can understand how `Accuracy` runs by using the following code: - -```python -x = Tensor(np.array([[0.2, 0.5], [0.3, 0.1], [0.9, 0.6]])) -y = Tensor(np.array([1, 0, 1])) -metric = Accuracy() -metric.clear() -metric.update(x, y) -accuracy = metric.eval() -print('Accuracy is ', accuracy) -``` - -The output is as follows: - -```text -Accuracy is 0.6667 -``` - -## MindSpore Print Operator - -MindSpore-developed `Print` operator is used to print the tensors or character strings input by users. Multiple strings, multiple tensors, and a combination of tensors and strings are supported, which are separated by comma (,). The `Print` operator is only supported in Ascend environment. -The method of using the MindSpore `Print` operator is the same as using other operators. You need to assert MindSpore `Print` operator in `__init__` and invoke it using `construct`. The following is an example. - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.nn as nn -import mindspore.context as context - -context.set_context(mode=context.GRAPH_MODE) - -class PrintDemo(nn.Cell): - def __init__(self): - super(PrintDemo, self).__init__() - self.print = ops.Print() - - def construct(self, x, y): - self.print('print Tensor x and Tensor y:', x, y) - return x - -x = Tensor(np.ones([2, 1]).astype(np.int32)) -y = Tensor(np.ones([2, 2]).astype(np.int32)) -net = PrintDemo() -output = net(x, y) -``` - -The output is as follows: - -```text -print Tensor x and Tensor y: -Tensor(shape=[2, 1], dtype=Int32, value= -[[1] - [1]]) -Tensor(shape=[2, 2], dtype=Int32, value= -[[1 1] - [1 1]]) -``` - -## Data Dump Introduction - -When training the network, if the training result deviates from the expectation, the input and output of the operator can be saved for debugging through the data dump function. For detailed Dump function introduction, please refer to [Dump Mode](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/dump_in_graph_mode.html#dump-introduction). - -### Synchronous Dump - -Synchronous Dump function usage reference [Synchronous Dump Step](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/dump_in_graph_mode.html#synchronous-dump-step). - -### Asynchronous Dump - -Asynchronous Dump function usage reference [Asynchronous Dump Step](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/dump_in_graph_mode.html#asynchronous-dump-step)。 - -## Running Data Recorder - -Running Data Recorder(RDR) is the feature MindSpore provides to record data while training program is running. If a running exception occurs in MindSpore, the pre-recorded data in MindSpore is automatically exported to assist in locating the cause of the running exception. Different exceptions will export different data, for instance, the occurrence of `Run task error` exception, the computational graph, execution sequence of the graph, memory allocation and other information will be exported to assist in locating the cause of the exception. - -> Not all run exceptions export data, and only partial exception exports are currently supported. -> -> Only supports the data collection of CPU/Ascend/GPU in the training scenario with the graph mode. - -### Usage - -#### Set RDR By Configuration File - -1. Create the configuration file `mindspore_config.json`. - - ```json - { - "rdr": { - "enable": true, - "path": "/home/mindspore/rdr" - } - } - ``` - - > enable: Controls whether the RDR is enabled. - > - > path: Set the path to which RDR stores data. The current path must be absolute. - -2. Configure RDR via `context`. - - ```python3 - context.set_context(env_config_path="./mindspore_config.json") - ``` - -#### Set RDR By Environment Variables - -Set `export MS_RDR_ENABLE=1` to enable RDR, and set the absolute path for recording data: `export MS_RDR_PATH=/absolute/path`. - -> The configuration file set by the user takes precedence over the environment variables. - -#### Exception Handling - -If MindSpore is used for training on Ascend 910, there is an exception `Run task error` in training. - -When we go to the directory `/home/mindspore-rdr`, we can see several files appear in this directory, each file represents a kind of data. For example, `hwopt_d_before_graph_0.ir` is a computational graph file. You can use a text tool to open this file to view the calculational graph and analyze whether the calculational graph meets your expectations. - -## Log-related Environment Variables and Configurations - -MindSpore uses glog to output logs. The following environment variables are commonly used: - -- `GLOG_v` - - The environment variable specifies the log level. - The default value is 2, indicating the WARNING level. The values are as follows: 0: DEBUG; 1: INFO; 2: WARNING; 3: ERROR. - -- `GLOG_logtostderr` - - The environment variable specifies the log output mode. - When `GLOG_logtostderr` is set to 1, logs are output to the screen. If the value is set to 0, logs are output to a file. The default value is 1. - -- `GLOG_log_dir` - - The environment variable specifies the log output path. - If `GLOG_logtostderr` is set to 0, value of this variable must be specified. - If `GLOG_log_dir` is specified and the value of `GLOG_logtostderr` is 1, logs are output to the screen but not to a file. - Logs of C++ and Python will be output to different files. The file name of C++ log complies with the naming rule of `GLOG` log file. Here, the name is `mindspore.MachineName.UserName.log.LogLevel.Timestamp`. The file name of Python log is `mindspore.log`. - -- `MS_SUBMODULE_LOG_v` - - The environment variable specifies log levels of C++ sub modules of MindSpore. - The environment variable is assigned as: `MS_SUBMODULE_LOG_v="{SubModule1:LogLevel1,SubModule2:LogLevel2,...}"`. - The specified sub module log level will overwrite the global log level. The meaning of sub module log level is the same as `GLOG_v`, the sub modules of MindSpore are categorized by source directory is shown in the below table. - E.g. when set `GLOG_v=1 MS_SUBMODULE_LOG_v="{PARSER:2,ANALYZER:2}"` then log levels of `PARSER` and `ANALYZER` are WARNING, other modules' log levels are INFO. - -- `GLOG_stderrthreshold` - - The log module will print logs to the screen when these logs are output to a file. This environment variable is used to control the log level printed to the screen in this scenario. - The default value is 2, indicating the WARNING level. The values are as follows: 0: DEBUG; 1: INFO; 2: WARNING; 3: ERROR. - -Sub modules of MindSpore grouped by source directory: - -| Source Files | Sub Module Name | -| ------------ | --------------- | -| mindspore/ccsrc/backend/kernel_compiler | KERNEL | -| mindspore/ccsrc/backend/optimizer | PRE_ACT | -| mindspore/ccsrc/backend/session | SESSION | -| mindspore/ccsrc/common | COMMON | -| mindspore/ccsrc/debug | DEBUG | -| mindspore/ccsrc/frontend/operator | ANALYZER | -| mindspore/ccsrc/frontend/optimizer | OPTIMIZER | -| mindspore/ccsrc/frontend/parallel | PARALLEL | -| mindspore/ccsrc/minddata/dataset | MD | -| mindspore/ccsrc/minddata/mindrecord | MD | -| mindspore/ccsrc/pipeline/jit/*.cc | PIPELINE | -| mindspore/ccsrc/pipeline/jit/parse | PARSER | -| mindspore/ccsrc/pipeline/jit/static_analysis | ANALYZER | -| mindspore/ccsrc/pipeline/pynative | PYNATIVE | -| mindspore/ccsrc/profiler | PROFILER | -| mindspore/ccsrc/pybind_api | COMMON | -| mindspore/ccsrc/runtime/device | DEVICE | -| mindspore/ccsrc/transform/graph_ir | GE_ADPT | -| mindspore/ccsrc/transform/express_ir | EXPRESS | -| mindspore/ccsrc/utils | UTILS | -| mindspore/ccsrc/vm | VM | -| mindspore/ccsrc | ME | -| mindspore/core/gvar | COMMON | -| mindspore/core/ | CORE | - -> The glog does not support log rotate. To control the disk space occupied by log files, use the log file management tool provided by the operating system, such as: logrotate of Linux. diff --git a/tutorials/training/source_en/advanced_use/custom_loss_function.md b/tutorials/training/source_en/advanced_use/custom_loss_function.md deleted file mode 100644 index f93603bd3d960cbae9c718f087fb780b75cfe8cb..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/custom_loss_function.md +++ /dev/null @@ -1,447 +0,0 @@ -# Customizing and Using Loss Function - -Translator: [Misaka19998](https://gitee.com/Misaka19998) - -`Linux` `Ascend` `GPU` `CPU` `Model Development` `Expert` - - - - - -- [Customizing and Using Loss Function](#Customizing-and-Using-Loss-Function) - - [Overview](#Overview) - - [Defining Loss Function](#Defining-Loss-Function) - - [Loss Function and Model Training](#Loss-Function-and-Model-Training) - - [Defining Dataset and Network](#Defining-Dataset-and-Network) - - [Training Model](#Training-Model) - - [Multilabel Loss Function and Model Training](#multilabel-loss-function-and-model-training) - - [Defining Multilabel Dataset](#Defining-Multilabel-Dataset) - - [Defining Multilabel Loss Function](#Defining-Multilabel-Loss-Function) - - [Training Multilabel Model](#Training-Multilabel-Model) - - - -## Overview - -Loss function, also known as object function, is used for measuring the difference between predicted and true value. In deep learning, training a model is a process of decrease the loss value by iteration. So it is important to choose a loss function while training a model. A better loss function can efficiently increase model's performance. - -MindSpore provides many general loss functions for users. However, they are not suitable for all the situations. Users need to define their own loss functions in some cases. So this course will introduce how to define loss functions. - -## Defining Loss Function - -Cell is the basic network module of MindSpore, and can be used to construct the network and define loss functions. The way to define a loss function is the same as defining a network. The difference is that its execution logic is used to calculate the error between the output of the forward network and the true value. - -Taking a MindSpore loss function, L1 Loss, as an example. The way to define the loss function is as follow: - -```python -import mindspore.nn as nn -import mindspore.ops as ops - -class L1Loss(nn.Cell): - def __init__(self): - super(L1Loss, self).__init__() - self.abs = ops.Abs() - self.reduce_mean = ops.ReduceMean() - - def construct(self, base, target): - x = self.abs(base - target) - return self.reduce_mean(x) -``` - -The needed operator will be instantiated in `__init__`method and used in `construct`. Then an L1Loss function is defined. - -With a series of given predicted and true value, users can call the loss function to get the difference of them, as follow: - -```python -import numpy as np -from mindspore import Tensor - -loss = L1Loss() -input_data = Tensor(np.array([0.1, 0.2, 0.3]).astype(np.float32)) -target_data = Tensor(np.array([0.1, 0.2, 0.2]).astype(np.float32)) - -output = loss(input_data, target_data) -print(output) -``` - -Taking `Ascend` backup as an example, the output is as follow: - -```python -0.03333334 -``` - -When the loss function is defined, the base class `_Loss` of the loss function can also be inherited. `_Loss` provides the `get_loss` method, which is used to sum or average the loss values and output a scalar. The definition of L1Loss using `_Loss` as the base class is as follows: - -```python -import mindspore.ops as ops -from mindspore.nn.loss.loss import _Loss - -class L1Loss(_Loss): - def __init__(self, reduction="mean"): - super(L1Loss, self).__init__(reduction) - self.abs = ops.Abs() - - def construct(self, base, target): - x = self.abs(base - target) - return self.get_loss(x) -``` - -Firstly, we use `_Loss` as the base class of L1Loss, and then add a parameter `reduction` to `__init__`, and then pass to base class by `super`. Finally we call `get_loss` method in `construct`. `reduction` has three legal parameters, `mean`, `sum` and `none`, which represent average, sum and original value. - -## Loss Function and Model Training - -Now we train model by the defined L1Loss. - -### Defining Dataset and Network - -Taking the simple linear function fitting as an example. The dataset and network structure is defined as follows: - -> For a detailed introduction of linear fitting, please refer to the tutorial [Implementing Simple Linear Function Fitting](https://www.mindspore.cn/tutorial/training/en/master/quick_start/linear_regression.html) - -1. Defining the Dataset - - ```python - import numpy as np - from mindspore import dataset as ds - - def get_data(num, w=2.0, b=3.0): - for _ in range(num): - x = np.random.uniform(-10.0, 10.0) - noise = np.random.normal(0, 1) - y = x * w + b + noise - yield np.array([x]).astype(np.float32), np.array([y]).astype(np.float32) - - def create_dataset(num_data, batch_size=16): - dataset = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data', 'label']) - dataset = dataset.batch(batch_size) - return dataset - ``` - -2. Defining the Network - - ```python - from mindspore.common.initializer import Normal - import mindspore.nn as nn - - class LinearNet(nn.Cell): - def __init__(self): - super(LinearNet, self).__init__() - self.fc = nn.Dense(1, 1, Normal(0.02), Normal(0.02)) - - def construct(self, x): - return self.fc(x) - ``` - -### Training Model - -`Model` is a MindSpore high level API which is for training, evaluating and inferring a model. After creating a dataset and defining `Model`, we can train the model by API `train`. Then we will train the model by `Model`, and use the defined `L1Loss` as loss function. - -1. Defining forward network, loss function and optimizer - - We will use the defined `LinearNet` and `L1Loss` as forward network and loss function, and choose MindSpore's `Momemtum` as optimizer. - - ```python - # define network - net = LinearNet() - # define loss function - loss = L1Loss() - # define optimizer - opt = nn.Momentum(net.trainable_params(), learning_rate=0.005, momentum=0.9) - ``` - -2. Defining `Model` - - When defining `Model`, it specifies the forward network, loss function and optimizer. The `Model` will associate them internally to form a training network. - - ```python - from mindspore import Model - - # define Model - model = Model(net, loss, opt) - ``` - -3. Creating dataset, and calling `train` to train the model - - When calling the train interface, you must specify the number of iterations `epoch` and the training dataset `train_dataset`. We set `epoch` to 1, and use the dataset created by `create_dataset` as the training set. `callbacks` is an optional parameter of the `train` interface. `LossMonitor` can be used in `callbacks` to monitor the change of the loss function value during the training process. `dataset_sink_mode` is also an optional parameter, here is set to False, which means to use non-sink mode for training. - - ```python - from mindspore.train.callback import LossMonitor - - # create dataset - ds_train = create_dataset(num_data=160) - # training - model.train(epoch=1, train_dataset=ds_train, callbacks=[LossMonitor()], dataset_sink_mode=False) - ``` - -The complete code is as follows: - -```python -import numpy as np - -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Model -from mindspore import dataset as ds -from mindspore.nn.loss.loss import _Loss -from mindspore.common.initializer import Normal -from mindspore.train.callback import LossMonitor - -class LinearNet(nn.Cell): - def __init__(self): - super(LinearNet, self).__init__() - self.fc = nn.Dense(1, 1, Normal(0.02), Normal(0.02)) - - def construct(self, x): - return self.fc(x) - -class L1Loss(_Loss): - def __init__(self, reduction="mean"): - super(L1Loss, self).__init__(reduction) - self.abs = ops.Abs() - - def construct(self, base, target): - x = self.abs(base - target) - return self.get_loss(x) - -def get_data(num, w=2.0, b=3.0): - for _ in range(num): - x = np.random.uniform(-10.0, 10.0) - noise = np.random.normal(0, 1) - y = x * w + b + noise - yield np.array([x]).astype(np.float32), np.array([y]).astype(np.float32) - -def create_dataset(num_data, batch_size=16): - dataset = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data', 'label']) - dataset = dataset.batch(batch_size) - return dataset - -# define network -net = LinearNet() -# define loss functhon -loss = L1Loss() -# define optimizer -opt = nn.Momentum(net.trainable_params(), learning_rate=0.005, momentum=0.9) -# define Model -model = Model(net, loss, opt) -# create dataset -ds_train = create_dataset(num_data=160) -# training -model.train(epoch=1, train_dataset=ds_train, callbacks=[LossMonitor()], dataset_sink_mode=False) -``` - -The output is as follows: - -```text -epoch: 1 step: 1, loss is 8.328788 -epoch: 1 step: 2, loss is 8.594973 -epoch: 1 step: 3, loss is 13.299595 -epoch: 1 step: 4, loss is 9.04059 -epoch: 1 step: 5, loss is 8.991402 -epoch: 1 step: 6, loss is 6.5928526 -epoch: 1 step: 7, loss is 8.239887 -epoch: 1 step: 8, loss is 7.3984795 -epoch: 1 step: 9, loss is 7.33724 -epoch: 1 step: 10, loss is 4.3588376 -``` - -## Multilabel Loss Function and Model Training - -In the last chapter, we defined a simple loss function `L1Loss`. Writing other loss functions is similar to `L1Loss`. However, some deep learning datasets are complex, such as the object detection network Faster R-CNN's dataset, which has several labels rather than simple data or label. The definition and usage of loss function is different in this situation. - -Faster R-CNN's structure is too complex to detailed describe here. This chapter will expand the linear function fitting by creating a multilabel dataset. Then we will introduce how to define loss function and train by `Model`. - -### Defining Multilabel Dataset - -Firstly we define the dataset and make a slight modification to it: - -1. `get_multilabel_data` will output two labels,`y1` and `y2`. -2. The parameters of `column_names` of `GeneratorDataset` are ['data', 'label1', 'label2'] - -Then `create_multilabel_dataset` will create dataset which has one `data`, and two labels `label1` and `label2`. - -```python -import numpy as np -from mindspore import dataset as ds - -def get_multilabel_data(num, w=2.0, b=3.0): - for _ in range(num): - x = np.random.uniform(-10.0, 10.0) - noise1 = np.random.normal(0, 1) - noise2 = np.random.normal(-1, 1) - y1 = x * w + b + noise1 - y2 = x * w + b + noise2 - yield np.array([x]).astype(np.float32), np.array([y1]).astype(np.float32), np.array([y2]).astype(np.float32) - -def create_multilabel_dataset(num_data, batch_size=16): - dataset = ds.GeneratorDataset(list(get_multilabel_data(num_data)), column_names=['data', 'label1', 'label2']) - dataset = dataset.batch(batch_size) - return dataset -``` - -### Defining Multilabel Loss Function - -We will define a loss function `L1LossForMultiLabel` according to defined multilabel dataset. The inputs of loss function's `construct` are predicted value `base`, and true value `target1` and `target2`. We will calculate the error between predict value and `target1`, `target2` respectively, and take the average of two values as final loss. The code is as follow: - -```python -import mindspore.ops as ops -from mindspore.nn.loss.loss import _Loss - -class L1LossForMultiLabel(_Loss): - def __init__(self, reduction="mean"): - super(L1LossForMultiLabel, self).__init__(reduction) - self.abs = ops.Abs() - - def construct(self, base, target1, target2): - x1 = self.abs(base - target1) - x2 = self.abs(base - target2) - return self.get_loss(x1)/2 + self.get_loss(x2)/2 -``` - -### Training Multilabel Model - -Model will internally link the forward network, loss function and optimizer. Forward network is connected to loss function by `nn.WithLossCell`, and forward network is connected to loss function by`nn.WithLossCell` as follows: - -```python -import mindspore.nn as nn - -class WithLossCell(nn.Cell): - def __init__(self, backbone, loss_fn): - super(WithLossCell, self).__init__(auto_prefix=False) - self._backbone = backbone - self._loss_fn = loss_fn - - def construct(self, data, label): - output = self._backbone(data) - return self._loss_fn(output, label) -``` - -It should be noted that the default `nn.WithLossCell` of normal `Model` only has two inputs `data` and `label` , which is not suitable for multilabel case. Users need to connect the forward network and loss function as follows if they want to train by `Model`. - -1. Defining the suitable `CustomWithLossCell` in this case - - We can copy the definition of `nn.WithLossCell` by changing the input of the `construct` to three parameters, that is, passing data to `backend`, and predicted and true value to`loss_fn`. - - ```python - import mindspore.nn as nn - - class CustomWithLossCell(nn.Cell): - def __init__(self, backbone, loss_fn): - super(CustomWithLossCell, self).__init__(auto_prefix=False) - self._backbone = backbone - self._loss_fn = loss_fn - - def construct(self, data, label1, label2): - output = self._backbone(data) - return self._loss_fn(output, label1, label2) - ``` - -2. Connecting the forward network and loss function by `CustomWithLossCell` - - We use the forward network `LinearNet` defined in last chapter, and loss function `L1LossForMultiLabel`. Then connecting them by `CustomWithLossCell` as follows: - - ```python - net = LinearNet() - loss = L1LossForMultiLabel() - loss_net = CustomWithLossCell(net, loss) - ``` - - `loss_net` contains the logic of forward network and loss function. - -3. Defining Model and Training - - The `network` of `Model` is set to `loss_net`. `loss_fn` is not appointed, while the optimizer is still `Momentum`. As the user do not appoint `loss_fn`, `Model` will know that `network` has its own loss function logit. And it will not encapsulate forward network and loss function by `nn.WithLossCell`. - - Creating multilabel dataset by `create_multilabel_dataset` and training: - - ```python - from mindspore.train.callback import LossMonitor - from mindspore import Model - - opt = nn.Momentum(net.trainable_params(), learning_rate=0.005, momentum=0.9) - model = Model(network=loss_net, optimizer=opt) - ds_train = create_multilabel_dataset(num_data=160) - model.train(epoch=1, train_dataset=ds_train, callbacks=[LossMonitor()], dataset_sink_mode=False) - ``` - -The complete code is as follows: - -```python -import numpy as np - -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Model -from mindspore import dataset as ds -from mindspore.nn.loss.loss import _Loss -from mindspore.common.initializer import Normal -from mindspore.train.callback import LossMonitor - -class LinearNet(nn.Cell): - def __init__(self): - super(LinearNet, self).__init__() - self.fc = nn.Dense(1, 1, Normal(0.02), Normal(0.02)) - - def construct(self, x): - return self.fc(x) - -class L1LossForMultiLabel(_Loss): - def __init__(self, reduction="mean"): - super(L1LossForMultiLabel, self).__init__(reduction) - self.abs = ops.Abs() - - def construct(self, base, target1, target2): - x1 = self.abs(base - target1) - x2 = self.abs(base - target2) - return self.get_loss(x1)/2 + self.get_loss(x2)/2 - -class CustomWithLossCell(nn.Cell): - def __init__(self, backbone, loss_fn): - super(CustomWithLossCell, self).__init__(auto_prefix=False) - self._backbone = backbone - self._loss_fn = loss_fn - - def construct(self, data, label1, label2): - output = self._backbone(data) - return self._loss_fn(output, label1, label2) - -def get_multilabel_data(num, w=2.0, b=3.0): - for _ in range(num): - x = np.random.uniform(-10.0, 10.0) - noise1 = np.random.normal(0, 1) - noise2 = np.random.normal(-1, 1) - y1 = x * w + b + noise1 - y2 = x * w + b + noise2 - yield np.array([x]).astype(np.float32), np.array([y1]).astype(np.float32), np.array([y2]).astype(np.float32) - -def create_multilabel_dataset(num_data, batch_size=16): - dataset = ds.GeneratorDataset(list(get_multilabel_data(num_data)), column_names=['data', 'label1', 'label2']) - dataset = dataset.batch(batch_size) - return dataset - -net = LinearNet() -loss = L1LossForMultiLabel() -# build loss network -loss_net = CustomWithLossCell(net, loss) - -opt = nn.Momentum(net.trainable_params(), learning_rate=0.005, momentum=0.9) -model = Model(network=loss_net, optimizer=opt) -ds_train = create_multilabel_dataset(num_data=160) -model.train(epoch=1, train_dataset=ds_train, callbacks=[LossMonitor()], dataset_sink_mode=False) -``` - -The output is as follow: - -```text -epoch: 1 step: 1, loss is 11.039986 -epoch: 1 step: 2, loss is 7.7847576 -epoch: 1 step: 3, loss is 9.236277 -epoch: 1 step: 4, loss is 8.3316345 -epoch: 1 step: 5, loss is 6.957058 -epoch: 1 step: 6, loss is 9.231144 -epoch: 1 step: 7, loss is 9.1072 -epoch: 1 step: 8, loss is 6.7703295 -epoch: 1 step: 9, loss is 6.363703 -epoch: 1 step: 10, loss is 5.014839 -``` - -This chapter explains how to define loss function and train by `Model` in multilabel case. In some other cases, we can train the model by similar ways. diff --git a/tutorials/training/source_en/advanced_use/custom_operator.rst b/tutorials/training/source_en/advanced_use/custom_operator.rst deleted file mode 100644 index d3aa912a2ab6653f6b66d1de56b97441c5fcf5c7..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/custom_operator.rst +++ /dev/null @@ -1,9 +0,0 @@ -Custom Operator -=============== - -.. toctree:: - :maxdepth: 1 - - custom_operator_ascend - custom_operator_gpu - custom_operator_cpu \ No newline at end of file diff --git a/tutorials/training/source_en/advanced_use/custom_operator_ascend.md b/tutorials/training/source_en/advanced_use/custom_operator_ascend.md deleted file mode 100644 index c73061d7302ac928869f55df90c1e2bdf33d7e4b..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/custom_operator_ascend.md +++ /dev/null @@ -1,264 +0,0 @@ -# Custom Operators (Ascend) - -`Linux` `Ascend` `Model Development` `Expert` - - - -- [Custom Operators (Ascend)](#custom-operators-ascend) - - [Overview](#overview) - - [Registering the Operator Primitive](#registering-the-operator-primitive) - - [Implementing a TBE Operator and Registering the Operator Information](#implementing-a-tbe-operator-and-registering-the-operator-information) - - [Implementing a TBE Operator](#implementing-a-tbe-operator) - - [Registering the Operator Information](#registering-the-operator-information) - - [Example](#example) - - [Using Custom Operators](#using-custom-operators) - - [Defining the bprop Function for an Operator](#defining-the-bprop-function-for-an-operator) - - - - - -## Overview - -When built-in operators cannot meet requirements during network development, you can call the Python API of MindSpore to quickly extend custom operators of the Ascend AI processor. - -To add a custom operator, you need to register the operator primitive, implement the operator, and register the operator information. - -The related concepts are as follows: - -- Operator primitive: defines the frontend API prototype of an operator on the network. It is the basic unit for forming a network model and includes the operator name, attribute (optional), input and output names, output shape inference method, and output dtype inference method. -- Operator implementation: describes the implementation of the internal computation logic for an operator through the DSL API provided by the Tensor Boost Engine (TBE). The TBE supports the development of custom operators based on the Ascend AI chip. -- Operator information: describes basic information about a TBE operator, such as the operator name and supported input and output types. It is the basis for the backend to select and map operators. - -This section takes a Square operator as an example to describe how to customize an operator. - -> For details, see cases in [tests/st/ops/custom_ops_tbe](https://gitee.com/mindspore/mindspore/tree/master/tests/st/ops/custom_ops_tbe) in the MindSpore source code. - -## Registering the Operator Primitive - -The primitive of an operator is a subclass inherited from `PrimitiveWithInfer`. The type name of the subclass is the operator name. - -The definition of the custom operator primitive is the same as that of the built-in operator primitive. - -- The attribute is defined by the input parameter of the constructor function `__init__`. The operator in this test case has no attribute. Therefore, `__init__` has only one input parameter. For details about test cases in which operators have attributes, see [custom add3](https://gitee.com/mindspore/mindspore/blob/master/tests/st/ops/custom_ops_tbe/cus_add3.py) in the MindSpore source code. -- The input and output names are defined by the `init_prim_io_names` function. -- The shape inference method of the output tensor is defined in the `infer_shape` function, and the dtype inference method of the output tensor is defined in the `infer_dtype` function. - -The only difference between a custom operator and a built-in operator is that the operator implementation function (`from square_impl import CusSquareImpl`) needs to be imported to the `__init__` function to register the operator implementation with the backend for the custom operator. In this test case, the operator implementation and information are defined in `square_impl.py`, and the definition will be described in the following parts. - -The following code takes the Square operator primitive `cus_square.py` as an example: - -```python -from mindspore.ops import prim_attr_register, PrimitiveWithInfer -import mindspore.ops as ops -# y = x^2 -class CusSquare(PrimitiveWithInfer): - """ - The definition of the CusSquare primitive. - """ - @prim_attr_register - def __init__(self): - self.init_prim_io_names(inputs=['x'], outputs=['y']) - from square_impl import CusSquareImpl # Import the entry function of the kernel implementation from relative path or PYTHONPATH. - - def infer_shape(self, data_shape): - return data_shape - - def infer_dtype(self, data_dtype): - return data_dtype -``` - -## Implementing a TBE Operator and Registering the Operator Information - -### Implementing a TBE Operator - -To compile an operator implementation, you need to compile a computable function and an entry function first. - -The computable function of an operator is mainly used to encapsulate the computation logic of the operator for the main function to call. The computation logic is implemented by calling the combined API of the TBE. - -The entry function of an operator describes the internal process of compiling the operator. The process is as follows: - -1. Prepare placeholders to be input. A placeholder will return a tensor object that represents a group of input data. -2. Call the computable function. The computable function uses the API provided by the TBE to describe the computation logic of the operator. -3. Call the scheduling module. The model tiles the operator data based on the scheduling description and specifies the data transfer process to ensure optimal hardware execution. By default, the automatic scheduling module (`auto_schedule`) can be used. -4. Call `cce_build_code` to compile and generate an operator binary file. - -> The input parameters of the entry function require the input information of each operator, output information of each operator, operator attributes (optional), and `kernel_name` (name of the generated operator binary file). The input and output information is encapsulated in dictionaries, including the input and output shape and dtype when the operator is called on the network. - -For details about TBE operator development, visit the [TBE website](https://support.huaweicloud.com/odevg-A800_3000_3010/atlaste_10_0063.html). For details about how to debug and optimize the TBE operator, visit the [Mind Studio website](https://support.huaweicloud.com/usermanual-mindstudioc73/atlasmindstudio_02_0043.html). - -### Registering the Operator Information - -The operator information is key for the backend to select the operator implementation and guides the backend to insert appropriate type and format conversion operators. It uses the `TBERegOp` API for definition and uses the `op_info_register` decorator to bind the operator information to the entry function of the operator implementation. When the .py operator implementation file is imported, the `op_info_register` decorator registers the operator information to the operator information library at the backend. For details about how to use the operator information, see comments for the member method of `TBERegOp`. - -> The numbers and sequences of the input and output information defined in the operator information must be the same as those in the parameters of the entry function of the operator implementation and those listed in the operator primitive. -> -> If an operator has attributes, use `attr` to describe the attribute information in the operator information. The attribute names must be the same as those in the operator primitive definition. - -### Example - -The following takes the TBE implementation `square_impl.py` of the `Square` operator as an example. `square_compute` is a computable function of the operator implementation. It describes the computation logic of `x * x` by calling the API provided by `te.lang.cce`. `cus_square_op_info` is the operator information, which is defined by `TBERegOp`. For the specific field meaning of the operator information, visit the [TBE website](https://support.huaweicloud.com/odevg-A800_3000_3010/atlaste_10_0096.html). - -Note the following parameters when setting `TBERegOp`: - -- `OPAQUE` in `fusion_type("OPAQUE")` indicates that the custom operator uses the non-fusion strategy. -- `CusSquareImpl` in `kernel_name("CusSquareImpl")` must be the same as the name of the operator entry function. -- `dtype_format` is used to describe data types supported by the operator. In the following example, two types are registered, indicating that the operator supports two data types. Each type describes the supported format in order of input and output. The first `dtype_format` indicates that the data type input0 is in F32_Default format and the data type output0 is in F32_Default format. The second `dtype_format` indicates that the data type input0 is in F16_Default format and the data type output0 is in F16_Default format. -- About the interfaces `auto_schedule` and `cce_build_code`, please see the TBE documents [auto_schedule](https://support.huaweicloud.com/odevg-A800_3000_3010/atlaste_07_0071.html) and [cce_build_code](https://support.huaweicloud.com/odevg-A800_3000_3010/atlaste_07_0072.html) for details. - -```python -from __future__ import absolute_import -from te import tvm -from topi import generic -import te.lang.cce -from topi.cce import util -from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType - -def square_compute(input_x): - """ - The compute function of the CusSquare implementation. - """ - res = te.lang.cce.vmul(input_x, input_x) - return res - -# Define the kernel info of CusSquare. -cus_square_op_info = TBERegOp("CusSquare") \ - .fusion_type("OPAQUE") \ - .partial_flag(True) \ - .async_flag(False) \ - .binfile_name("square.so") \ - .compute_cost(10) \ - .kernel_name("CusSquareImpl") \ - .input(0, "x", False, "required", "all") \ - .output(0, "y", False, "required", "all") \ - .dtype_format(DataType.F32_Default, DataType.F32_Default) \ - .dtype_format(DataType.F16_Default, DataType.F16_Default) \ - .get_op_info() - -# Binding kernel info with the kernel implementation. -@op_info_register(cus_square_op_info) -def CusSquareImpl(input_x, output_y, kernel_name="CusSquareImpl"): - """ - The entry function of the CusSquare implementation. - """ - shape = input_x.get("shape") - dtype = input_x.get("dtype").lower() - - shape = util.shape_refine(shape) - data = tvm.placeholder(shape, name="data", dtype=dtype.lower()) - - with tvm.target.cce(): - res = square_compute(data) - sch = generic.auto_schedule(res) - - config = {"print_ir": False, - "name": kernel_name, - "tensor_list": [data, res]} - - te.lang.cce.cce_build_code(sch, config) -``` - -## Using Custom Operators - -The usage of custom operators is the same as that of built-in operators in the network. The operators can be directly used by importing primitives. The following takes the single-operator network test of `CusSquare` as an example. - -Define the network in the `test_square.py` file. - -```python -import numpy as np -import mindspore.nn as nn -import mindspore.context as context -from mindspore import Tensor -# Import the definition of the CusSquare primitive. -from cus_square import CusSquare -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.square = CusSquare() - - def construct(self, data): - return self.square(data) - -def test_net(): - x = np.array([1.0, 4.0, 9.0]).astype(np.float32) - square = Net() - output = square(Tensor(x)) - print("x: ", x) - print("output: ", output) -``` - -Execute the test case. - -```bash -pytest -s tests/st/ops/custom_ops_tbe/test_square.py::test_net -``` - -The execution result is as follows: - -```text -x: [1. 4. 9.] -output: [1. 16. 81.] -``` - -## Defining the bprop Function for an Operator - -If an operator needs to support automatic differentiation, the bprop function needs to be defined in the primitive of the operator. In the bprop function, you need to describe the backward computation logic that uses the forward input, forward output, and output gradients to obtain the input gradients. The backward computation logic can be composed of built-in operators or custom backward operators. - -Note the following points when defining the bprop function: - -- The input parameter sequence of the bprop function is the forward input, forward output, and output gradients. For a multi-output operator, the forward output and output gradients are provided in the form of tuples. -- The return value of the bprop function is tuples consisting of input gradients. The sequence of elements in a tuple is the same as that of the forward input parameters. Even if there is only one input gradient, the return value must be a tuple. - -For example, the `CusSquare` primitive after the bprop function is added is as follows: - -```python -class CusSquare(PrimitiveWithInfer): - @prim_attr_register - def __init__(self): - """init CusSquare""" - self.init_prim_io_names(inputs=['x'], outputs=['y']) - from square_impl import CusSquareImpl - - def infer_shape(self, data_shape): - return data_shape - - def infer_dtype(self, data_dtype): - return data_dtype - - def get_bprop(self): - def bprop(data, out, dout): - twos_like = ops.OnesLike()(data) * 2.0 - gradient = ops.Mul()(data, twos_like) - dx = ops.Mul()(gradient, dout) - return (dx,) - return bprop -``` - -Define backward cases in the `test_square.py` file. - -```python -import mindspore.ops as ops -def test_grad_net(): - x = np.array([1.0, 4.0, 9.0]).astype(np.float32) - sens = np.array([1.0, 1.0, 1.0]).astype(np.float32) - square = Net() - grad = ops.GradOperation(sens_param=True) - dx = grad(square)(Tensor(x), Tensor(sens)) - print("x: ", x) - print("dx: ", dx) -``` - -Execute the test case. - -```bash -pytest -s tests/st/ops/custom_ops_tbe/test_square.py::test_grad_net -``` - -The execution result is as follows: - -```text -x: [1. 4. 9.] -dx: [2. 8. 18.] -``` diff --git a/tutorials/training/source_en/advanced_use/custom_operator_cpu.md b/tutorials/training/source_en/advanced_use/custom_operator_cpu.md deleted file mode 100644 index 5db68e51446429b25f37ff7bebc960282d1fea69..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/custom_operator_cpu.md +++ /dev/null @@ -1,282 +0,0 @@ -# Custom Operators (CPU) - -Translator: [JuLyAi](https://gitee.com/julyai) - -`Linux` `CPU` `model developing` `advanced_use` - - - -- [Custom Operators (CPU)](#custom-operators-cpu) - - [Overview](#overview) - - [Registration Operator's Primitives](#registration-operators-primitives) - - [Implementing CPU Operators and Registration Operators Information](#implementing-cpu-operators-and-registration-operators-information) - - [Implementing CPU Operators](#implementing-cpu-operators) - - [Registration Operators Information](#registration-operators-information) - - [Editing MindSpore](#editing-mindspore) - - [Using Custom CPU Operators](#using-custom-cpu-operators) - - [Defining Operators' BProp Functions](#defining-operators-bprop-functions) - - - - - -## Overview - -When the built-in operators are not enough for developing the network, you can extend your custom CPU operators fast and conveniently using MindSpore's Python API and C++ API. - -To add a custom operator, you need to complete 3 parts of the work, including operator primitives registration, operators implementation and operators information registration. - -Among them: - -- Operator primitives: Defining the front-end interface prototype of operators in the network; The basic unit of a network model, mainly including operator's name, attributes (optional), input / output name, output shape reasoning method, output dtype reasoning method, etc. -- Operators implementation: Using the C++ API provided by the framework and combining with the specific characteristics of the operators, the internal calculation logic of the operator can be realized. - -This paper will take the custom `Transpose` operator as an example to introduce the steps of customizing operators. - -## Registration Operator's Primitives - -Each operator's primitive is a subclass inherited from the class `PrimitiveWithCheck`, whose type name is the operator's name. - -The CPU operator primitives are defined under the path `mindspore/ops/operations`, and the appropriate file is selected according to the operator type. Definition of CPU operators' primitives' interface is as follows: - -- Attributes are defined by the input parameters of construction function `__init__`. Operators in this use case have no init attributes, thus `__init__` has no additional input parameters. -- The input and output names are defined by the function `init_prim_io_names`. -- Checking shape of the output tensor is defined in `check_shape` function. Checking dtype of the output tensor is defined in `check_dtype` function. -- `_checkparam` file defines a series of operations for validity checking, such as value checking, type checking, etc. - -Taking `Transpose` operator's primitive as an example, the following example codes are given. - -```python -from mindspore.ops import PrimitiveWithInfer - -class Transpose(PrimitiveWithInfer): - """ - The definition of the Transpose primitive. - """ - @prim_attr_register - def __init__(self): - """Initialize Transpose""" - self.init_prim_io_names(inputs=['x', 'perm'], outputs=['output']) - - def infer_shape(self, x, perm): - x_shape = x['shape'] - p_value = perm['value'] - if len(x_shape) != len(p_value): - raise ValueError('The dimension of x and perm must be equal.') - out_shapes = [] - for i in p_value: - out_shapes.append(x_shape[i]) - return out_shapes - - def infer_dtype(self, x_dtype, perm_dtype): - return x_dtype -``` - -## Implementing CPU Operators and Registration Operators Information - -### Implementing CPU Operators - -Usually, to implement a CPU operator needs to write a head file and a source file. The file path is `mindspore/ccsrc/backend/kernel_compiler/cpu`. If the logical realization of the operator is by calling the third-party library `MKL-DNN`, it will be placed in the subdirectory `mkldnn`. Please refer to [oneMkl](https://github.com/oneapi-src/oneMKL) and [oneDNN](https://github.com/oneapi-src/oneDNN) for details. - -The head file of the operator contains the registration information of the operator and the declaration of the class. The operator class inherits from the parent class of `CPUKernel` and overloads `InitKernel` and `Launch`. - -The source file of the operator is the implementation of the class. It mainly overloads the InitKernel and Launch functions. The head file example codes of the `Transpose` operator are as follows: - -```cpp -class TransposeCPUFwdKernel : public CPUKernel { - public: - TransposeCPUFwdKernel() = default; - ~TransposeCPUFwdKernel() override = default; - - void InitKernel(const CNodePtr &kernel_node) override; - - bool Launch(const std::vector &inputs, const std::vector &workspace, - const std::vector &outputs) override; - - private: - std::vector shape_; - std::vector axis_; -}; -``` - -- The input parameters of the function `InitKernel` contain a constant reference to the node pointer. Through the member function of the class `AnfRuntimeAlgorithm`, the input and output shape of the operator node and the attribute information of the operator can be obtained. -- The input parameters of the function `Launch` are 3 vectors, including all the input addresses, workspace addresses and all the output addresses, respectively. The concrete implementation logic of the operator is described in the function body. -- `shape_` and `axis_` are 2 member variables defined. - -The definition of the function `InitKernel` in the source file is as follows: - -```cpp -void TransposeCPUFwdKernel::InitKernel(const CNodePtr &kernel_node) { - MS_EXCEPTION_IF_NULL(kernel_node); - shape_ = AnfAlgo::GetInputDeviceShape(kernel_node, 0); - axis_ = AnfAlgo::GetNodeAttr>(kernel_node, "perm"); - if (shape_.size() != axis_.size()) { - MS_LOG(EXCEPTION) << "The size of input shape and transpose axis shape must be equal."; - } -} -``` - -- The functions in the class `AnfRuntimeAlgorithm` implement various operations on operator nodes. `shape_` represents the shape of the first input of the operator. `axis_` represents the attribute "perm" of the operator. -- The parameter "perm" of the`Transpose` operator's primitive is as an input, but "perm" is actually considered as the attribute of the operation when parsing. - -> For details of the class `AnfRuntimeAlgorithm`, please refer to the declaration in MindSpore source codes under [mindspore/ccsrc/backend/session/anf_runtime_algorithm.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/backend/session/anf_runtime_algorithm.h). - -The definition of the function `Launch` in the source file is as follows: First, get the address of each input and output in turn, and then transform the dimension according to `axis_`, and assign the value to the space pointed to by the output address. - -```cpp -bool TransposeCPUFwdKernel::Launch(const std::vector &inputs, - const std::vector & /*workspace*/, - const std::vector &outputs) { - auto input = reinterpret_cast(inputs[0]->addr); - auto output = reinterpret_cast(outputs[0]->addr); - size_t size = IntToSize(inputs[0]->size / sizeof(float)); - size_t shape_size = IntToSize(shape_.size()); - if (shape_size > kMaxDim) { - MS_LOG(EXCEPTION) << "Input is " << shape_size << "-D, but transpose supports max " << kMaxDim << "-D inputs."; - } - size_t pos_array[kMaxDim]; - size_t size_offset[kMaxDim]; - size_offset[0] = size / shape_[0]; - for (size_t i = 1; i < shape_size; i++) { - size_offset[i] = size_offset[SizeToInt(i) - 1] / shape_[i]; - } - for (size_t position = 0; position < size; position += 1) { - size_t temp_position = position; - pos_array[0] = temp_position / size_offset[0]; - for (size_t i = 1; i < shape_size; i++) { - temp_position -= pos_array[SizeToInt(i) - 1] * size_offset[i - 1]; - pos_array[i] = temp_position / size_offset[i]; - } - size_t new_position = pos_array[axis_[SizeToInt(shape_size) - 1]]; - size_t new_position_size = 1; - for (int j = shape_size - 2; j >= 0; j--) { - new_position_size *= shape_[axis_[j + 1]]; - new_position += pos_array[axis_[j]] * new_position_size; - } - output[new_position] = input[position]; - } - return true; -} -``` - -### Registration Operators Information - -Operators information is the key information to guide the back-end selection of implementing operators. The first parameter of `MS_REG_CPU_KERNEL` is the name of the registration operator, which is consistent with the operator name in the primitives. The second parameter indicates the type of each input and output in turn. The last parameter is the name of the class which the operators implement. `Transpose` operator registration codes are as follows: - -```cpp -MS_REG_CPU_KERNEL(Transpose, KernelAttr().AddInputAttr(kNumberTypeFloat32).AddOutputAttr(kNumberTypeFloat32), - TransposeCPUFwdKernel); -``` - -> The number and order of the input and output information defined in operator information, the number and order of input and output information in operator implementation, and the number and order of input and output name list in operator primitives should be consistent. - -## Editing MindSpore - -After writing the custom CPU operators, you need to recompile and reinstall MindSpore. For details, please refer to [Installation Document](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_source.md#). - -## Using Custom CPU Operators - -After compiling and installing, the custom CPU operators can be used directly through the import primitives. Take the single operator network test of `Transpose` as an example. - -Define the network in document `test_transpose.py`. - -```python -import numpy as np -import mindspore.nn as nn -import mindspore.context as context -from mindspore import Tensor -import mindspore.ops as ops - -context.set_context(mode=context.GRAPH_MODE, device_target="CPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.transpose = ops.Transpose() - - def construct(self, data): - return self.transpose(data, (1, 0)) - -def test_net(): - x = np.arange(2 * 3).reshape(2, 3).astype(np.float32) - transpose = Net() - output = transpose(Tensor(x)) - print("output: ", output) -``` - -Running case: - -```bash -pytest -s test_transpose.py::test_net -``` - -Running results: - -```text -output: [[0, 3] - [1, 4] - [2, 5]] -``` - -## Defining Operators' BProp Functions - -If an operator needs to support automatic differentiation, its back-propagation function (bprop) needs to be defined in its primitives. You need to describe the reverse computing logic that uses forward input, forward output, and output gradient to get the input gradient in bprop. Reverse computation logic can be composed of built-in operators or custom reverse operators. - -The following points should be paid attention to when defining operators' bprop functions: - -- The order of input parameters of bprop function is defined as positive input, positive output and output gradient. If the operator is a multi-output operator, the forward output and output gradient will be provided in the form of tuples. -- The form of the return values of bprop function is arranged as a tuple composed of input gradient, and the order of elements in the tuple is consistent with that of forward input parameters. Even if there is only one input gradient, the return value must be in the form of tuples. - -For example, the bprop primitives of `Transpose` are: - -```python -import mindspore.ops as ops -invert_permutation = ops.InvertPermutation() -transpose = ops.Transpose() -zeros_like = ops.zeros_like() -@bprop_getters.register(ops.Transpose) -def get_bprop_transpose(self): - """Generate bprop for Transpose""" - - def bprop(x, perm, out, dout): - return transpose(dout, invert_permutation(perm)), zeros_like(perm) - - return bprop -``` - -- `Transpose` bprop operator uses `InvertPermutation` operator, which also needs a complete process of primitives, registration and implementation like `Transpose` operator. - -Define the bprop case in document `test_transpose.py`. - -```python -import mindspore.ops as ops -class Grad(nn.Cell): - def __init__(self, network): - super(Grad, self).__init__() - self.grad = ops.GradOperation(sens_param=True) - self.network = network - - def construct(self, input_data, sens): - gout = self.grad(self.network)(input_data, sens) - return gout - -def test_grad_net(): - x = np.arange(2 * 3).reshape(2, 3).astype(np.float32) - sens = np.arange(2 * 3).reshape(3, 2).astype(np.float32) - grad = Grad(Net()) - dx = grad(Tensor(x), Tensor(sens)) - print("dx: ", dx.asnumpy()) -``` - -Running case: - -```bash -pytest -s test_transpose.py::test_grad_net -``` - -Running results: - -```text -dx: [[0. 2. 4.] - [1. 3. 5.]] -``` diff --git a/tutorials/training/source_en/advanced_use/custom_operator_gpu.md b/tutorials/training/source_en/advanced_use/custom_operator_gpu.md deleted file mode 100644 index a76aadaae3d9273403a1d5cb350e13e702f09f66..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/custom_operator_gpu.md +++ /dev/null @@ -1,262 +0,0 @@ -# Custom Operators (GPU) - -Translator: [Leon_02](https://gitee.com/Leon_02) - -`Linux` `GPU` `Model Development` `Expert` - - - -- [Custom Operators (GPU)](#custom-operators-gpu) - - [Overview](#overview) - - [Registering the Operator Primitive](#registering-the-operator-primitive) - - [Defining the bprop Function for an Operator](#defining-the-bprop-function-for-an-operator) - - [Implementing a GPU operator](#implementing-a-GPU-operator) - - [Registering the Operator Information](#registering-the-operator-information) - - [Compiling Mindspore](#compiling-for-mindspore) - - [Operator verification](#operator-verification) - - - - - -## Overview - -Operator is the basic element of constructing neural network. When built-in operators cannot meet requirements during network development, you can utilize MindSpore to quickly extend custom operators of the Graphics Processing Unit. - -- Primitive registration: the register operator primitive is the basic unit of constructing network model. Users can directly or indirectly call the operator primitive to build a neural network model. -- GPU Kernel implementation: GPU kernel is used to call GPU to accelerate computing. -- GPU Kernel registration: operator registration is used to register the GPU kernel and necessary information to the framework, and the framework completes the call to the GPU kernel. - -In this tutorial, we will develop a TensorAddV2 operator using C++ and CUDA in the mindspore framework. TensorAddV2 is used to add two tensors of the same dimension element by element. - -## Registering the Operator Primitive - -Operator primitives usually include: - -- Aperator names: operator names are used to uniquely identify operators. -- Annotations: describe the algorithm and usage constraints of operators. The annotations will be exported as Mindspore API interface documentation for developers to refer to. -- Input: the tensor(s) for operator input. -- Attributes: for example, the `data_format` attribute in Conv2d describes that the input data is in `NCHW` or `NHWC` format. -- Validation of input data: verify the validity of input data and attributes, which is convenient for developers to find the problems of network model as soon as possible. -- Output data type and dimension derivation: used to derive the data type and dimension of output. - -The following code defines an operator called TensorAddV2: - -- `TensorAddV2` is a subclass inherited from `PrimitiveWithInfer`. -- The constructor `__init__` is used to initialize the operator, since TensorAddV2 doesn't have any attributes, there is none additional input for `__init__`. -- The function `infer_shape` constraints two input dimensions must be the same and the output dimension will be same as the dimension of x1. -- The function `infer_dtype` constrains that two input data must be of type float32 and the output data type is the same as the input data type. - -```python -# mindspore/ops/operations/math_ops.py -class TensorAddV2(PrimitiveWithInfer): - """ - Adds two input tensors element-wise. - """ - @prim_attr_register - def __init__(self): - self.init_prim_io_names(inputs=['x1', 'x2'], outputs=['y']) - - def infer_shape(self, x1_shape, x2_shape): - validator.check_integer('input dims', len(x1_shape), len(x2_shape), Rel.EQ, self.name) - for i in range(len(x1_shape)): - validator.check_integer('input_shape', x1_shape[i], x2_shape[i], Rel.EQ, self.name) - return x1_shape - - def infer_dtype(self, x1_dtype, x2_type): - validator.check_tensor_type_same({'x1_dtype': x1_dtype}, [mstype.float32], self.name) - validator.check_tensor_type_same({'x2_dtype': x2_dtype}, [mstype.float32], self.name) - return x1_dtype -``` - -Next we'll export TensorAddV2 type in '__init__.py', which convenient for users to import and use in the network. - -```python -# mindspore/ops/operations/__init__.py -from .math_ops import (Abs, ACos, ..., TensorAddV2) -... -... -__all__ = [ - 'ReverseSequence', - 'CropAndResize', - ..., - 'TensorAddV2' -] -``` - -## Defining the bprop Function for an Operator - -If an operator wants to support automatic differentiation, its back-propagation function (bprop) needs to be defined in its primitive. You need to describe the forward input, forward output and output gradient in bprop to get the reverse computing logic of input gradient. Reverse computation logic can be composed of built-in operators or custom reverse operators. - -The following points should be paid attention to when defining the bprop Function for an operator: - -- The input order of a bprop function is defined as forward input, forward output and output gradient. If the operator is a multi output operator, the forward output and output gradient will be provided in the form of tuples. -- The return value of a bprop function is conventionally a tuple of input gradients, the order of elements in tuples is consistent with the order of forward input parameters. Even if there is only one input gradient, the return value must be in the form of tuples. - -For example, the reverse primitive for `TensorAddV2` can be: - -```python -import mindspore.ops as ops -@bprop_getters.register(ops.TensorAddV2) -def get_bprop_tensoraddv2(self): - """Generate bprop for TensorAddV2""" - - def bprop(x1, x2, out, dout): - return dout, dout - - return bprop -``` - -## Implementing a GPU operator - -Custom GPU operators inherit from `GPUKernel`: - -- `Init()`: it is used to initialize the GPU kernel, usually includes recording the input / output dimension of the operator, and completing the preparation before launch. -- `GetInputSizeList()`: feedback to the frame the number of bytes of video memory to input tensor. -- `GetOutputSizeList()`: feedback to the frame the number of bytes of video memory to output tensor. -- `GetWorkspaceSizeList()`: feedback to the frame the number of bytes for `Workspace`, where `Workspace` is the space used to store temporary data during calculation. -- `Launch()`: generally, CUDA kernel (CUDA kernel is a kernel function developed by Nvidia GPU's parallel computing architecture) or cudnn interface are called to complete the operator acceleration on GPU. - -The following code shows the implementation of TensorAddV2: -In order to support generalization of data types, we use class template to define `TensorAddV2GpuKernel`: - -- `Init()` records the number of tensor elements. -- `GetInputSizeList()` returns the number of bytes the input tensor needs to occupy. TensorAddV2 has two Input and the number of bytes per input equals to element_num * sizeof(T). -- `GetOutputSizeList()` returns the number of bytes the output tensor needs to occupy. TensorAddV2 has one output and the output occupies element_num * sizeof(T) bytes. -- Since TensorAddV2 doesn't need `Workspace`, the `GetWorkspaceSizeList()` returns a null `std::vector`. -- `Launch()` receives the addresses of input and output in video memory, and then calls `TensorAddV2` to complete acceleration. - -```c++ -// mindspore/ccsrc/backend/kernel_compiler/gpu/math/tensor_add_v2_gpu_kernel.h - -template -class TensorAddV2GpuKernel : public GpuKernel { - public: - TensorAddV2GpuKernel() : element_num_(1) {} - ~TensorAddV2GpuKernel() override = default; - - bool Init(const CNodePtr &kernel_node) override { - auto shape = AnfAlgo::GetPrevNodeOutputInferShape(kernel_node, 0); - for (size_t i = 0; i < shape.size(); i++) { - element_num_ *= shape[i]; - } - InitSizeLists(); - return true; - } - - const std::vector &GetInputSizeList() const override { return input_size_list_; } - const std::vector &GetOutputSizeList() const override { return output_size_list_; } - const std::vector &GetWorkspaceSizeList() const override { return workspace_size_list_; } - - bool Launch(const std::vector &inputs, const std::vector &, - const std::vector &outputs, void *stream_ptr) override { - T *x1 = GetDeviceAddress(inputs, 0); - T *x2 = GetDeviceAddress(inputs, 1); - T *y = GetDeviceAddress(outputs, 0); - - TensorAddV2(element_num_, x1, x2, y, reinterpret_cast(stream_ptr)); - return true; - } - - protected: - void InitSizeLists() override { - input_size_list_.push_back(element_num_ * sizeof(T)); - input_size_list_.push_back(element_num_ * sizeof(T)); - output_size_list_.push_back(element_num_ * sizeof(T)); - } - - private: - size_t element_num_; - std::vector input_size_list_; - std::vector output_size_list_; - std::vector workspace_size_list_; -}; -``` - -`TensorAddV2` calls CUDA kernel`TensorAddV2Kernel` to implement the parallel addition of `element_num` elements: - -```c++ -// mindspore/ccsrc/backend/kernel_compiler/gpu/math/tensor_add_v2_gpu_kernel.h - - template - __global__ void TensorAddV2Kernel(const size_t element_num, const T* x1, const T* x2, T* y) { - for (size_t i = blockIdx.x * blockDim.x + threadIdx.x; i < element_num; i += blockDim.x * gridDim.x) { - y[i] = x1[i] + x2[i]; - } - } - - template - void TensorAddV2(const size_t &element_num, const T* x1, const T* x2, T* y, cudaStream_t stream){ - size_t thread_per_block = 256; - size_t block_per_grid = (element_num + thread_per_block - 1 ) / thread_per_block; - TensorAddV2Kernel<<>>(element_num, x1, x2, y); - return; - } - - template void TensorAddV2(const size_t &element_num, const float* x1, const float* x2, float* y, cudaStream_t stream); -``` - -## Registering the Operator Information - -Operator information includes: - -- `Primive` -- `Input dtype, output dtype` -- `GPU Kernel class` -- `CUDA built-in dtype` - -Framework calls `CUDA built-in dtype` to instantiate `GPU Kernel class` template class based on `Primive` and `Input dtype, output dtype`. - -The TensorAddV2 operators supporting float and int are registered in the code below: - -```c++ -// mindspore/ccsrc/backend/kernel_compiler/gpu/math/tensor_add_v2_gpu_kernel.cc - -MS_REG_GPU_KERNEL_ONE(TensorAddV2, KernelAttr() - .AddInputAttr(kNumberTypeFloat32) - .AddInputAttr(kNumberTypeFloat32) - .AddOutputAttr(kNumberTypeFloat32), - TensorAddV2GpuKernel, float) - -MS_REG_GPU_KERNEL_ONE(TensorAddV2, KernelAttr() - .AddInputAttr(kNumberTypeInt32) - .AddInputAttr(kNumberTypeInt32) - .AddOutputAttr(kNumberTypeInt32), - TensorAddV2GpuKernel, int) - -``` - -## Compiling Mindspore - -After writing the custom GPU operator, you need to recompile and install MindSpore, see [Installation Documentation](https://gitee.com/mindspore/docs/blob/master/install/mindspore_gpu_install_source_en.md#). - -## Operator verification - -At the end of the tutorial, we construct a single operator network to validate the TensorAddV2 operator we just developed: - -```python -# tests/st/ops/gpu/test_tensoraddv2_op.py - -import mindspore.context as context -from mindspore import Tensor -import mindspore.ops as ops - -context.set_context(device_target='GPU') - -@pytest.mark.level0 -@pytest.mark.platform_x86_gpu_training -@pytest.mark.env_onecard -def test_TensroAdd(): - x1 = Tensor(np.ones((3, 4), np.float32)) - x2 = Tensor(np.ones((3, 4), np.float32)) - y = ops.TensorAddV2()(x1, x2) - print('result: ', y) -``` - -When the command `pytest -s tests/st/ops/gpu/test_tensoraddv2_op.py` executes, you can see the results meeting expectations: - -```text -result: [[2. 2. 2. 2.] - [2. 2. 2. 2.] - [2. 2. 2. 2.]] -``` diff --git a/tutorials/training/source_en/advanced_use/cv.rst b/tutorials/training/source_en/advanced_use/cv.rst deleted file mode 100644 index 3f3db6d6cfc64e3eb129b5bc57907ed20aa29172..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/cv.rst +++ /dev/null @@ -1,9 +0,0 @@ -Computer Vision -=============== - -.. toctree:: - :maxdepth: 1 - - cv_resnet50 - cv_resnet50_second_order_optimizer - cv_mobilenetv2_fine_tune \ No newline at end of file diff --git a/tutorials/training/source_en/advanced_use/cv_mobilenetv2_fine_tune.md b/tutorials/training/source_en/advanced_use/cv_mobilenetv2_fine_tune.md deleted file mode 100644 index 72325d82f6fabd1171ea287f8be202bc9ae6454c..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/cv_mobilenetv2_fine_tune.md +++ /dev/null @@ -1,409 +0,0 @@ -# Using MobileNetV2 to Implement Fine-Tuning - -`Linux` `Windows` `Ascend` `GPU` `CPU` `Model Development` `Intermediate` `Expert` - - - -- [Using MobileNetV2 to Implement Fine-Tuning](#using-mobilenetv2-to-implement-fine-tuning) - - [Overview](#overview) - - [Task Description and Preparations](#task-description-and-preparations) - - [Environment Configuration](#environment-configuration) - - [Downloading Code](#downloading-code) - - [Preparing a Pre-Trained Model](#preparing-a-pre-trained-model) - - [Preparing Data](#preparing-data) - - [Code for Loading a Pre-Trained Model](#code-for-loading-a-pre-trained-model) - - [Parameter Description](#parameter-description) - - [Running Python Files](#running-python-files) - - [Running Shell Scripts](#running-shell-scripts) - - [Loading Fine-Tuning Training](#loading-fine-tuning-training) - - [Loading Training on CPU](#loading-training-on-cpu) - - [Loading Training on GPU](#loading-training-on-gpu) - - [Loading Training on Ascend AI Processor](#loading-training-on-ascend-ai-processor) - - [Fine-Tuning Training Result](#fine-tuning-training-result) - - [Validating the Fine-Tuning Training Model](#validating-the-fine-tuning-training-model) - - [Validating the Model](#validating-the-model) - - [Validation Result](#validation-result) - - - -   - -## Overview - -In a computer vision task, training a network from scratch is time-consuming and requires a large amount of computing power. Pre-trained models often select open large datasets such as OpenImage, ImageNet, VOC, and COCO. The number of images in these datasets reaches hundreds of thousands or even millions. Most tasks have a large amount of data. If a pre-trained model is not used during network model training, the training from scratch consumes a large amount of time and computing power. As a result, the model is prone to local minimum and overfitting. Therefore, most tasks perform fine-tuning on pre-trained models. - -MindSpore is a diversified machine learning framework. It can run on devices such as mobile phones and PCs, or on server clusters on the cloud. Currently, MobileNetV2 supports fine-tuning on a single CPU or on one or more Ascend AI Processors or GPUs on Windows, EulerOS, and Ubuntu systems. This tutorial describes how to perform fine-tuning training and validation in the MindSpore frameworks of different systems and processors. - -Currently, only the CPU is supported on Windows, and the CPU, GPU, and Ascend AI Processor are supported on Ubuntu and EulerOS. - -> You can obtain the complete executable sample code at . - -## Task Description and Preparations - -### Environment Configuration - -If running a task in a local environment, install the MindSpore framework and configure the CPU, GPU, or Ascend AI Processor. If running a task in the HUAWEI CLOUD environment, skip this section because the installation and configuration are not required. - -On the Windows operating system, backslashes `\` are used to separate directories of different levels in a path address. On the Linux operating system, slashes `/` are used. The following uses `/` by default. If you use Windows operating system, replace `/` in the path address with `\`. - -1. Install the MindSpore framework. - [Install](https://www.mindspore.cn/install/en) a MindSpore framework based on the processor architecture and the EulerOS, Ubuntu, or Windows system. - -2. Configure the CPU environment. - Set the following code before calling the CPU to start training or testing: - - ```python - if config.platform == "CPU": - context.set_context(mode=context.GRAPH_MODE, device_target=config.platform, \ - save_graphs=False) - ``` - -3. Configure the GPU environment. - Set the following code before calling the GPU to start training or testing: - - ```python - elif config.platform == "GPU": - context.set_context(mode=context.GRAPH_MODE, device_target=config.platform, save_graphs=False) - if config.run_distribute: - init("nccl") - context.set_auto_parallel_context(device_num=get_group_size(), - parallel_mode=ParallelMode.DATA_PARALLEL, - gradients_mean=True) - ``` - -4. Configure the Ascend environment. - The following uses the JSON configuration file `hccl_config.json` in an environment with eight Ascend 910 AI processors as an example. Adjust `"server_count"` and `device` based on the following example to switch between the single-device and multi-device environments: - - ```json - { - "version": "1.0", - "server_count": "1", - "server_list": [ - { - "server_id": "10.155.111.140", - "device": [ - {"device_id": "0","device_ip": "192.1.27.6","rank_id": "0"}, - {"device_id": "1","device_ip": "192.2.27.6","rank_id": "1"}, - {"device_id": "2","device_ip": "192.3.27.6","rank_id": "2"}, - {"device_id": "3","device_ip": "192.4.27.6","rank_id": "3"}, - {"device_id": "4","device_ip": "192.1.27.7","rank_id": "4"}, - {"device_id": "5","device_ip": "192.2.27.7","rank_id": "5"}, - {"device_id": "6","device_ip": "192.3.27.7","rank_id": "6"}, - {"device_id": "7","device_ip": "192.4.27.7","rank_id": "7"}], - "host_nic_ip": "reserve" - } - ], - "status": "completed" - } - ``` - - Set the following code before calling the Ascend AI Processor to start training or testing: - - ```python - elif config.platform == "Ascend": - context.set_context(mode=context.GRAPH_MODE, device_target=config.platform, device_id=config.device_id, - save_graphs=False) - if config.run_distribute: - context.set_auto_parallel_context(device_num=config.rank_size, - parallel_mode=ParallelMode.DATA_PARALLEL, - gradients_mean=True, - all_reduce_fusion_config=[140]) - init() - ... - ``` - -### Downloading Code - -Run the following command to clone [MindSpore open-source project repository](https://gitee.com/mindspore/mindspore.git) in Gitee and go to `./model_zoo/official/cv/mobilenetv2/`. - -```bash -git clone https://gitee.com/mindspore/mindspore.git -cd ./mindspore/model_zoo/official/cv/mobilenetv2 -``` - -The code structure is as follows: - -```bash -├─MobileNetV2 - ├─README.md # descriptions about MobileNetV2 - ├─scripts - │ run_train.sh # Shell script for train with Ascend or GPU - │ run_eval.sh # Shell script for evaluation with Ascend or GPU - ├─src - │ config.py # parameter configuration - │ dataset.py # creating dataset - │ launch.py # start Python script - │ lr_generator.py # learning rate config - │ mobilenetV2.py # MobileNetV2 architecture - │ mobilenetV2_fusion.py # MobileNetV2 fusion architecture - │ models.py # net utils to load ckpt_file, define_net... - │ utils.py # net utils to switch precision, set_context and so on - ├─train.py # training script - └─eval.py # evaluation script -``` - -During fine-tuning training and testing, python files `train.py` and `eval.py` can be used on Windows, Ubuntu, and EulerOS, and shell script files `run_train.sh` and `run_eval.sh` can be used on Ubuntu and EulerOS. - -If the script file `run_train.sh` is used, it runs `launch.py` and inputs parameters to `launch.py` which starts one or more processes to run `train.py` based on the number of allocated CPUs, GPUs, or Ascend AI Processors. Each process is allocated with a processor. - -### Preparing a Pre-Trained Model - -Download a [CPU/GPU pre-trained model](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2_cpu_gpu.ckpt) or [Ascend pre-trained model](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2_ascend.ckpt) to the following directories based on the processor type: -`./pretrain_checkpoint/` - -- CPU/GPU - - ```bash - mkdir pretrain_checkpoint - wget -P ./pretrain_checkpoint https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2_cpu_gpu.ckpt - ``` - -- Ascend AI Processor - - ```bash - mkdir pretrain_checkpoint - wget -P ./pretrain_checkpoint https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2_ascend.ckpt - ``` - -### Preparing Data - -Prepare the dataset managed in ImageFolder format. Add the `` parameter when running `run_train.sh`, and add the `--dataset_path ` parameter when running `train.py`. - -The dataset structure is as follows: - -```bash -└─ImageFolder - ├─train - │ class1Folder - │ class2Folder - │ ...... - └─eval - class1Folder - class2Folder - ...... -``` - -## Code for Loading a Pre-Trained Model - -During fine-tuning, you need to load a pre-trained model. The distribution of the feature extraction layer (convolutional layer) in different datasets and tasks tends to be consistent. However, the combination of feature vectors (fully connected layer) is different, and the number of classes (output_size of the fully connected layer) is usually different. During fine-tuning, parameters of the feature extraction layer are loaded and trained, while those of the fully connected layer are not. During fine-tuning and initial training, both feature extraction layer parameters and fully connected layer parameters are loaded and trained. - -Before training and testing, build a backbone network and a head network of MobileNetV2 on the first line of the code, and build a MobileNetV2 network containing the two subnets. Lines 3 to 10 of the code show how to define `backbone_net` and `head_net` and how to add the two subnets to `mobilenet_v2`. Lines 12 to 23 of the code show that in fine-tuning training mode, the pre-trained model needs to be loaded to the `backbone_net` subnet, and parameters in `backbone_net` are frozen and do not participate in training. Lines 21 to 23 of the code show how to freeze network parameters. - -```python - 1: backbone_net, head_net, net = define_net(args_opt, config) - 2: ... - 3: def define_net(config, is_training): - 4: backbone_net = MobileNetV2Backbone() - 5: activation = config.activation if not is_training else "None" - 6: head_net = MobileNetV2Head(input_channel=backbone_net.out_channels, - 7: num_classes=config.num_classes, - 8: activation=activation) - 9: net = mobilenet_v2(backbone_net, head_net) -10: return backbone_net, head_net, net -11: ... -12: if args_opt.pretrain_ckpt and args_opt.freeze_layer == "backbone": -13: load_ckpt(backbone_net, args_opt.pretrain_ckpt, trainable=False) -14: ... -15: def load_ckpt(network, pretrain_ckpt_path, trainable=True): -16: """ -17: train the param weight or not -18: """ -19: param_dict = load_checkpoint(pretrain_ckpt_path) -20: load_param_into_net(network, param_dict) -21: if not trainable: -22: for param in network.get_parameters(): -23: param.requires_grad = False -``` - -## Parameter Description - -Change the value of each parameter based on the local processor type, data path, and pre-trained model path. - -### Running Python Files - -When using `train.py` for training on Windows and Linux, input `dataset_path`, `platform`, `pretrain_ckpt`, and `freeze_layer`. When using `eval.py` for validation, input `dataset_path`, `platform`, and `pretrain_ckpt`. - -```bash -# Windows/Linux train with Python file -python train.py --platform [PLATFORM] --dataset_path --pretrain_ckpt [PRETRAIN_CHECKPOINT_PATH] --freeze_layer[("none", "backbone")] - -# Windows/Linux eval with Python file -python eval.py --platform [PLATFORM] --dataset_path --pretrain_ckpt -``` - -- `--dataset_path`: path of the training or validation dataset. There is no default value. This parameter is mandatory for training or validation. -- `--platform`: processor type. The default value is `Ascend`. You can set it to `CPU` or `GPU`. -- `--pretrain_ckpt`: path of the `pretrain_checkpoint` file required for loading a weight of a pre-trained model parameter during incremental training or optimization. -- `--freeze_layer`: frozen network layer. Enter `none` or `backbone`. - -### Running Shell Scripts - -You can run the shell scripts `./scripts/run_train.sh` and `./scripts/run_eval.sh` on Linux. Input parameters on the interaction interface. - -```bash -# Windows doesn't support Shell -# Linux train with Shell script -sh run_train.sh [FREEZE_LAYER] - -# Linux eval with Shell script for fine tune -sh run_eval.sh -``` - -- ``: processor type. The default value is `Ascend`. You can set it to `GPU`. -- ``: number of processes on each node (equivalent to a server or PC). You are advised to set this parameter to the number of Ascend AI Processors or GPUs on a server. -- ``: device ID of character string type. During training, a process is bound to a device with the corresponding ID based on ``. Multiple device IDs are separated by commas (,). It is recommended that the number of IDs be the same as the number of processes. -- ``: a JSON file configured when platform is set to `Ascend` -- ``: path of the training or validation dataset. There is no default value. This parameter is mandatory for training or validation. -- ``: path of the checkpoint file required for loading a weight of a pre-trained model parameter during incremental training or optimization. -- `[FREEZE_LAYER]`: frozen network layer during fine-tuned model validation. Enter `none` or `backbone`. - -## Loading Fine-Tuning Training - -Only `train.py` can be run on Windows when MobileNetV2 is used for fine-tuning training. You can run the shell script `run_train.sh` and input [parameters](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/cv_mobilenetv2_fine_tune.html#id8) on Linux when MobileNetV2 is used for fine-tuning training. - -The Windows system outputs information to an interactive command line. When running `run_train.sh` on the Linux system, use `&> ` at the end of the command line to write the standard output and error output to the log file. After the fine-tuning is successful, training starts. The training time and loss of each epoch are continuously written into the `./train/rank*/log*.log` file. If the fine-tuning fails, an error message is recorded in the preceding log file. - -### Loading Training on CPU - -- Set the number of nodes. - - Currently, `train.py` supports only a single processor. You do not need to adjust the number of processors. When the `run_train.sh` file is run, a single `CPU` is used by default. The number of CPUs cannot be changed. - -- Start incremental training. - - Example 1: Use the python file to call a CPU. - - ```bash - # Windows or Linux with Python - python train.py --platform CPU --dataset_path --pretrain_ckpt ./pretrain_checkpoint/mobilenetv2_cpu_gpu.ckpt --freeze_layer backbone - ``` - - Example 2: Use the shell file to call a CPU. - - ```bash - # Linux with Shell - sh run_train.sh CPU ../pretrain_checkpoint/mobilenetV2_cpu_gpu.ckpt backbone - ``` - -### Loading Training on GPU - -- Set the number of nodes. - - Currently, `train.py` supports only a single processor. You do not need to adjust the number of nodes. When running the `run_train.sh` file, set `` to the number of GPUs and `` to IDs of available processors, that is, GPU IDs. You can select one or more device IDs and separate them with commas (,). - -- Start incremental training. - - - Example 1: Use the python file to call a GPU. - - ```bash - # Windows or Linux with Python - python train.py --platform GPU --dataset_path --pretrain_ckpt ./pretrain_checkpoint/mobilenetv2_cpu_gpu.ckpt --freeze_layer backbone - ``` - - - Example 2: Use the shell script to call a GPU whose device ID is `0`. - - ```bash - # Linux with Shell - sh run_train.sh GPU 1 0 ../pretrain_checkpoint/mobilenetv2_cpu_gpu.ckpt backbone - ``` - - - Example 3: Use the shell script to call eight GPUs whose device IDs are `0,1,2,3,4,5,6,7`. - - ```bash - # Linux with Shell - sh run_train.sh GPU 8 0,1,2,3,4,5,6,7 ../pretrain_checkpoint/mobilenetv2_cpu_gpu.ckpt backbone - ``` - -### Loading Training on Ascend AI Processor - -- Set the number of nodes. - - Currently, `train.py` supports only a single processor. You do not need to adjust the number of nodes. When running the `run_train.sh` file, set `` to the number of Ascend AI Processors and `` to IDs of available processors, that is, Ascend AI Processor IDs. You can select one or more device IDs from 0 to 7 on an 8-device server and separate them with commas (,). Currently, the number of Ascend AI Processors can only be set to 1 or 8. - -- Start incremental training. - - - Example 1: Use the python file to call an Ascend AI Processor. - - ```bash - # Windows or Linux with Python - python train.py --platform Ascend --dataset_path --pretrain_ckpt ./pretrain_checkpoint mobilenetv2_ascend.ckpt --freeze_layer backbone - ``` - - - Example 2: Use the shell script to call an Ascend AI Processor whose device ID is `0`. - - ```bash - # Linux with Shell - sh run_train.sh Ascend 1 0 ~/rank_table.json ../pretrain_checkpoint/mobilenetv2_ascend.ckpt backbone - ``` - - - Example 3: Use the shell script to call eight Ascend AI Processors whose device IDs are `0,1,2,3,4,5,6,7`. - - ```bash - # Linux with Shell - sh run_train.sh Ascend 8 0,1,2,3,4,5,6,7 ~/rank_table.json ../pretrain_checkpoint/mobilenetv2_ascend.ckpt backbone - ``` - -### Fine-Tuning Training Result - -- View the running result. - - - When running the python file, view the output information in the interactive command line. After running the shell script on `Linux`, run the `cat ./train/rank0/log0.log` command to view the output information. The output is as follows: - - ```bash - train args: Namespace(dataset_path='./dataset/train', platform='CPU', \ - pretrain_ckpt='./pretrain_checkpoint/mobilenetv2_cpu_gpu.ckpt', freeze_layer='backbone') - cfg: {'num_classes': 26, 'image_height': 224, 'image_width': 224, 'batch_size': 150, \ - 'epoch_size': 200, 'warmup_epochs': 0, 'lr_max': 0.03, 'lr_end': 0.03, 'momentum': 0.9, \ - 'weight_decay': 4e-05, 'label_smooth': 0.1, 'loss_scale': 1024, 'save_checkpoint': True, \ - 'save_checkpoint_epochs': 1, 'keep_checkpoint_max': 20, 'save_checkpoint_path': './', \ - 'platform': 'CPU'} - Processing batch: 16: 100%|███████████████████████████████████████████ █████████████████████| 16/16 [00:00 --pretrain_ckpt ./ckpt_0/mobilenetv2_15.ckpt - -# Linux with Shell -sh run_eval.sh CPU ../ckpt_0/mobilenetv2_15.ckpt -``` - -### Validation Result - -When the python file is run, the validation result is output in the interactive command line. The shell script writes the information to `./eval.log`. You need to run the `cat ./eval.log` command to view the information. The result is as follows: - -```bash -result:{'acc': 0.9466666666666666666667} -pretrain_ckpt = ./ckpt_0/mobilenetv2_15.ckpt -``` diff --git a/tutorials/training/source_en/advanced_use/cv_resnet50.md b/tutorials/training/source_en/advanced_use/cv_resnet50.md deleted file mode 100644 index 1aa4eeb44671437b25f8e6db1d323170cdc6d86c..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/cv_resnet50.md +++ /dev/null @@ -1,209 +0,0 @@ -# Image Classification Using ResNet-50 Network - -`Linux` `Ascend` `GPU` `Whole Process` `Beginner` `Intermediate` `Expert` - - - -- [Image Classification Using ResNet-50 Network](#image-classification-using-resnet-50-network) - - [Overview](#overview) - - [Image Classification](#image-classification) - - [Task Description and Preparation](#task-description-and-preparation) - - [Downloading the CIFAR-10 Dataset](#downloading-the-cifar-10-dataset) - - [Data Preloading and Preprocessing](#data-preloading-and-preprocessing) - - [Defining the CNN](#defining-the-cnn) - - [Defining the Loss Function and Optimizer](#defining-the-loss-function-and-optimizer) - - [Calling the High-level `Model` API To Train and Save the Model File](#calling-the-high-level-model-api-to-train-and-save-the-model-file) - - [Loading and Validating the Saved Model](#loading-and-validating-the-saved-model) - - [References](#references) - - - - - -## Overview - -Computer vision is one of the most widely researched and mature technology fields of deep learning, and is widely applied to scenarios such as mobile phone photographing, intelligent security protection, and automated driving. Since AlexNet won the ImageNet competition in 2012, deep learning has greatly promoted the development of the computer vision field. Almost all the most advanced computer vision algorithms are related to deep learning. Deep neural network can extract image features layer by layer and retain local invariance. It is widely used in visual tasks such as classification, detection, segmentation, retrieval, recognition, promotion, and reconstruction. - -This chapter describes how to apply MindSpore to computer vision scenarios based on image classification tasks. - -## Image Classification - -Image classification is one of the most basic computer vision applications and belongs to the supervised learning category. For example, determine the class of a digital image, such as cat, dog, airplane, or car. The function is as follows: - -```python -def classify(image): - label = model(image) - return label -``` - -The key point is to select a proper model. The model generally refers to a deep convolutional neural network (CNN), such as AlexNet, VGG, GoogleNet, and ResNet. - -MindSpore presets a typical CNN. For more details, visit [model_zoo](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official) to get more details. - -MindSpore supports the following image classification networks: LeNet, AlexNet, and ResNet. - -## Task Description and Preparation - -![cifar10](images/cifar10.jpg) - -Figure 1: CIFAR-10 dataset [1] - -The CIFAR-10 dataset contains 10 classes of 60,000 images. Each class contains 6000 images. 50,000 images are for training and 10,000 images are for testing. The size of each image is 32 x 32 pixels. - -Generally, a training indicator of image classification is accuracy, that is, a ratio of the quantity of accurately predicted examples to the total quantity of predicted examples. - -To use MindSpore to classify images, do as follows: - -1. Download the CIFAR-10 dataset. -2. Load and preprocess data. -3. Define a convolutional neural network. In this example, the ResNet-50 network is used. -4. Define the loss function and optimizer. -5. Call the high-level `Model` API to train and save the model file. -6. Load the saved model for inference. - -> This example uses the hardware platform of the Ascend 910 AI processor. You can find the complete executable sample code at . - -The key parts of the task process code are explained below. - -### Downloading the CIFAR-10 Dataset - -CIFAR-10 dataset download address: [the website of Cifar-10 Dataset](https://www.cs.toronto.edu/~kriz/cifar.html). In this example, the data is in binary format. In the Linux environment, run the following command to download the dataset: - -```shell -wget https://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz -``` - -Run the following command to decompress the dataset: - -```shell -tar -zvxf cifar-10-binary.tar.gz -``` - -### Data Preloading and Preprocessing - -1. Load the dataset. - - Data can be loaded through the built-in dataset format `Cifar10Dataset` API. - > `Cifar10Dataset`: The read type is random read. The built-in CIFAR-10 dataset contains images and labels. The default image format is uint8, and the default label data format is uint32. For details, see the description of the `Cifar10Dataset` API. - - The data loading code is as follows, where `data_home` indicates the data storage location: - - ```python - cifar_ds = ds.Cifar10Dataset(data_home) - ``` - -2. Enhance the data. - - Data augmentation is to normalize data and enrich the number of data samples. Common data augmentation modes include cropping, flipping, and color change. MindSpore calls the `map` method to perform augmentation operations on images. - - ```python - resize_height = 224 - resize_width = 224 - rescale = 1.0 / 255.0 - shift = 0.0 - - # define map operations - random_crop_op = C.RandomCrop((32, 32), (4, 4, 4, 4)) # padding_mode default CONSTANT - random_horizontal_op = C.RandomHorizontalFlip() - resize_op = C.Resize((resize_height, resize_width)) # interpolation default BILINEAR - rescale_op = C.Rescale(rescale, shift) - normalize_op = C.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)) - changeswap_op = C.HWC2CHW() - type_cast_op = C2.TypeCast(mstype.int32) - - c_trans = [] - if training: - c_trans = [random_crop_op, random_horizontal_op] - c_trans += [resize_op, rescale_op, normalize_op, changeswap_op] - - # apply map operations on images - cifar_ds = cifar_ds.map(operations=type_cast_op, input_columns="label") - cifar_ds = cifar_ds.map(operations=c_trans, input_columns="image") - ``` - -3. Shuffle and batch process the data. - - Shuffle data randomly to disorder the data sequence and read data in batches for model training: - - ```python - # apply shuffle operations - cifar_ds = cifar_ds.shuffle(buffer_size=10) - - # apply batch operations - cifar_ds = cifar_ds.batch(batch_size=args_opt.batch_size, drop_remainder=True) - - # apply repeat operations - cifar_ds = cifar_ds.repeat(repeat_num) - ``` - -### Defining the CNN - -CNN is a standard algorithm for image classification tasks. CNN uses a layered structure to perform feature extraction on an image, and is formed by stacking a series of network layers, such as a convolutional layer, a pooling layer, and an activation layer. - -ResNet is recommended. First, it is deep enough with 34 layers, 50 layers, or 101 layers. The deeper the hierarchy, the stronger the representation capability, and the higher the classification accuracy. Second, it is learnable. The residual structure is used. The lower layer is directly connected to the upper layer through the shortcut connection, which solves the problem of gradient disappearance caused by the network depth during the reverse propagation. In addition, the ResNet network has good performance, including the recognition accuracy, model size, and parameter quantity. - -MindSpore Model Zoo has a ResNet [model](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/resnet/src/resnet.py). The calling method is as follows: - -```python -network = resnet50(class_num=10) -``` - -For more information about ResNet, see [ResNet Paper](https://arxiv.org/abs/1512.03385). - -### Defining the Loss Function and Optimizer - -A loss function and an optimizer need to be defined. The loss function is a training objective of the deep learning, and is also referred to an objective function. The loss function indicates the distance between a logit of a neural network and a label, and is scalar data. - -Common loss functions include mean square error, L2 loss, Hinge loss, and cross entropy. Cross entropy is usually used for image classification. - -The optimizer is used for neural network solution (training). Because of the large scale of neural network parameters, the stochastic gradient descent (SGD) algorithm and its improved algorithm are used in deep learning to solve the problem. MindSpore encapsulates common optimizers, such as `SGD`, `ADAM`, and `Momemtum`. In this example, the `Momentum` optimizer is used. Generally, two parameters need to be set: `moment` and `weight decay`. - -An example of the code for defining the loss function and optimizer in MindSpore is as follows: - -```python -# loss function definition -ls = SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - -# optimization definition -opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, 0.9) -``` - -### Calling the High-level `Model` API to Train and Save the Model File - -After data preprocessing, network definition, and loss function and optimizer definition are complete, model training can be performed. Model training involves two iterations: multi-round iteration (`epoch`) of datasets and single-step iteration based on the batch size of datasets. The single-step iteration refers to extracting data from a dataset by `batch`, inputting the data to a network to calculate a loss function, and then calculating and updating a gradient of training parameters by using an optimizer. - -To simplify the training process, MindSpore encapsulates the high-level `Model` API. You can enter the network, loss function, and optimizer to complete the `Model` initialization, and then call the `train` API for training. The `train` API parameters include the number of iterations (`epoch`) and dataset (`dataset`). - -Model saving is a process of persisting training parameters. In the `Model` class, the model is saved using the `callback` function, as shown in the following code. You can set the parameters of the `callback` function by using `CheckpointConfig`. `save_checkpoint_steps` indicates that the model is saved once every fixed number of single-step iterations, and `keep_checkpoint_max` indicates the maximum number of saved models. - -```python -''' -network, loss, optimizer are defined before. -batch_num, epoch_size are training parameters. -''' -model = Model(net, loss_fn=ls, optimizer=opt, metrics={'acc'}) - -# CheckPoint CallBack definition -config_ck = CheckpointConfig(save_checkpoint_steps=batch_num, keep_checkpoint_max=35) -ckpoint_cb = ModelCheckpoint(prefix="train_resnet_cifar10", directory="./", config=config_ck) - -# LossMonitor is used to print loss value on screen -loss_cb = LossMonitor() -model.train(epoch_size, dataset, callbacks=[ckpoint_cb, loss_cb]) -``` - -### Loading and Validating the Saved Model - -The trained model file (such as `resnet.ckpt`) can be used to predict the class of a new image. Run the `load_checkpoint` command to load the model file. Then call the `eval` API of `Model` to predict the new image class. - -```python -param_dict = load_checkpoint(args_opt.checkpoint_path) -load_param_into_net(net, param_dict) -eval_dataset = create_dataset(training=False) -res = model.eval(eval_dataset) -print("result: ", res) -``` - -## References - -[1] diff --git a/tutorials/training/source_en/advanced_use/cv_resnet50_second_order_optimizer.md b/tutorials/training/source_en/advanced_use/cv_resnet50_second_order_optimizer.md deleted file mode 100644 index ccb2e81bf6368a1d629d96633fd77015760d47c6..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/cv_resnet50_second_order_optimizer.md +++ /dev/null @@ -1,509 +0,0 @@ -# ResNet-50 Second-Order Optimization Practice - -`Linux` `Ascend` `GPU` `Model Development` `Model Optimization` `Expert` - - - -- [ResNet-50 Second-Order Optimization Practice](#resnet-50-second-order-optimization-practice) - - [Overview](#overview) - - [Preparation](#preparation) - - [Preparing the Dataset](#preparing-the-dataset) - - [Configuring Distributed Environment Variables](#configuring-distributed-environment-variables) - - [Ascend 910](#ascend-910) - - [GPU](#gpu) - - [Loading the Dataset](#loading-the-dataset) - - [Defining the Network](#defining-the-network) - - [Defining the Loss Function and Optimizer THOR](#defining-the-loss-function-and-optimizer-thor) - - [Defining the Loss Function](#defining-the-loss-function) - - [Defining the Optimizer](#defining-the-optimizer) - - [Training the Network](#training-the-network) - - [Saving the Configured Model](#saving-the-configured-model) - - [Configuring the Network Training](#configuring-the-network-training) - - [Running the Script](#running-the-script) - - [Ascend 910](#ascend-910-1) - - [GPU](#gpu-1) - - [Model Inference](#model-inference) - - [Defining the Inference Network](#defining-the-inference-network) - - [Inference](#inference) - - [Ascend 910](#ascend-910-2) - - [GPU](#gpu-2) - - -   - -## Overview - -Common optimization algorithms are classified into the first-order and the second-order optimization algorithms. Typical first-order optimization algorithms, such as stochastic gradient descent (SGD), support a small amount of computation with high computation speed but a low convergence speed and require a large number of training steps. The second-order optimization algorithms use the second-order derivative of the objective function to accelerate convergence to the optimal value of a model, and require a small quantity of training steps. However, the second-order optimization algorithms have excessively high computation costs, an overall execution time of the second-order optimization algorithms is still slower than that of the first-order optimization algorithms. As a result, the second-order optimization algorithms are not widely used in deep neural network training. The main computation costs of the second-order optimization algorithms lie in the inverse operation of the second-order information matrices such as the Hessian matrix and the [Fisher information matrix (FIM)](https://arxiv.org/pdf/1808.07172.pdf). The time complexity is about $O(n^3)$. - -Based on the existing natural gradient algorithm, MindSpore development team uses optimized acceleration methods such as approximation and sharding for the FIM, greatly reducing the computation complexity of the inverse matrix and developing the available second-order optimizer THOR. With eight Ascend 910 AI processors, THOR can complete the training of ResNet-50 v1.5 network and ImageNet dataset within 72 minutes, which is nearly twice the speed of SGD+Momentum. - -This tutorial describes how to use the second-order optimizer THOR provided by MindSpore to train the ResNet-50 v1.5 network and ImageNet dataset on Ascend 910 and GPU. -> Download address of the complete code example: - - -Directory Structure of Code Examples - -```shell -├── resnet_thor - ├── README.md - ├── scripts - ├── run_distribute_train.sh # launch distributed training for Ascend 910 - └── run_eval.sh # launch inference for Ascend 910 - ├── run_distribute_train_gpu.sh # launch distributed training for GPU - └── run_eval_gpu.sh # launch inference for GPU - ├── src - ├── crossentropy.py # CrossEntropy loss function - ├── config.py # parameter configuration - ├── dataset_helper.py # dataset helper for minddata dataset - ├── grad_reducer_thor.py # grad reduce for thor - ├── model_thor.py # model for train - ├── resnet_thor.py # resnet50_thor backone - ├── thor.py # thor optimizer - ├── thor_layer.py # thor layer - └── dataset.py # data preprocessing - ├── eval.py # infer script - ├── train.py # train script - ├── export.py # export checkpoint file into air file - └── mindspore_hub_conf.py # config file for mindspore hub repository -``` - -The overall execution process is as follows: - -1. Prepare the ImageNet dataset and process the required dataset. -2. Define the ResNet-50 network. -3. Define the loss function and the optimizer THOR. -4. Load the dataset and perform training. After the training is complete, check the result and save the model file. -5. Load the saved model for inference. - -## Preparation - -Ensure that MindSpore has been correctly installed. If not, install it by referring to [Install](https://www.mindspore.cn/install/en). - -### Preparing the Dataset - -Download the complete ImageNet2012 dataset, decompress the dataset, and save it to the `ImageNet2012/ilsvrc` and `ImageNet2012/ilsvrc_eval` directories in the local workspace. - -The directory structure is as follows: - -```text -└─ImageNet2012 - ├─ilsvrc - │ n03676483 - │ n04067472 - │ n01622779 - │ ...... - └─ilsvrc_eval - │ n03018349 - │ n02504013 - │ n07871810 - │ ...... -``` - -### Configuring Distributed Environment Variables - -#### Ascend 910 - -For details about how to configure the distributed environment variables of Ascend 910 AI processors, see [Parallel Distributed Training (Ascend)](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html#configuring-distributed-environment-variables). - -#### GPU - -For details about how to configure the distributed environment of GPUs, see [Parallel Distributed Training (GPU)](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_gpu.html#configuring-distributed-environment-variables). - -## Loading the Dataset - -During distributed training, load the dataset in parallel mode and process it through the data argumentation API provided by MindSpore. The `src/dataset.py` script in the source code is for loading and processing the dataset. - -```python -import os -from mindspore import dtype as mstype -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as C -import mindspore.dataset.transforms.c_transforms as C2 -from mindspore.communication.management import init, get_rank, get_group_size - -def create_dataset(dataset_path, do_train, repeat_num=1, batch_size=32, target="Ascend"): - if target == "Ascend": - device_num, rank_id = _get_rank_info() - num_parallels = 8 - else: - init() - rank_id = get_rank() - device_num = get_group_size() - num_parallels = 4 - - if device_num == 1: - data_set = ds.ImageFolderDataset(dataset_path, num_parallel_workers=num_parallels, shuffle=True) - else: - data_set = ds.ImageFolderDataset(dataset_path, num_parallel_workers=num_parallels, shuffle=True, - num_shards=device_num, shard_id=rank_id) - - image_size = 224 - mean = [0.485 * 255, 0.456 * 255, 0.406 * 255] - std = [0.229 * 255, 0.224 * 255, 0.225 * 255] - - # define map operations - if do_train: - trans = [ - C.RandomCropDecodeResize(image_size, scale=(0.08, 1.0), ratio=(0.75, 1.333)), - C.RandomHorizontalFlip(prob=0.5), - C.Normalize(mean=mean, std=std), - C.HWC2CHW() - ] - else: - trans = [ - C.Decode(), - C.Resize(256), - C.CenterCrop(image_size), - C.Normalize(mean=mean, std=std), - C.HWC2CHW() - ] - - type_cast_op = C2.TypeCast(mstype.int32) - - data_set = data_set.map(operations=trans, input_columns="image", num_parallel_workers=num_parallels) - data_set = data_set.map(operations=type_cast_op, input_columns="label", num_parallel_workers=num_parallels) - - # apply batch operations - data_set = data_set.batch(batch_size, drop_remainder=True) - - # apply dataset repeat operation - data_set = data_set.repeat(repeat_num) - - return data_set -``` - -> MindSpore supports multiple data processing and augmentation operations. These operations are usually used in combination. For details, see [Data Processing](https://www.mindspore.cn/tutorial/training/en/master/use/data_preparation.html). - -## Defining the Network - -Use the ResNet-50 v1.5 network model as an example. Define the [ResNet-50 network](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/resnet/src/resnet.py), and replace the `Conv2d` and `Dense` operators with the operators customized by the second-order optimizer. - The defined network model is stored in the `src/resnet_thor.py` script in the source code, and the customized operators `Conv2d_thor` and `Dense_thor` are stored in the `src/thor_layer.py` script. - -- Use `Conv2d_thor` to replace `Conv2d` in the original network model. -- Use `Dense_thor` to replace `Dense` in the original network model. - -> The `Conv2d_thor` and `Dense_thor` operators customized by THOR are used to save the second-order matrix information in model training. The backbone of the newly defined network is the same as that of the original network model. - -After the network is built, call the defined ResNet-50 in the `__main__` function. - -```python -... -from src.resnet_thor import resnet50 -... -if __name__ == "__main__": - ... - # define the net - net = resnet50(class_num=config.class_num, damping=damping, loss_scale=config.loss_scale, - frequency=config.frequency, batch_size=config.batch_size) - ... -``` - -## Defining the Loss Function and Optimizer THOR - -### Defining the Loss Function - -Loss functions supported by MindSpore include `SoftmaxCrossEntropyWithLogits`, `L1Loss`, and `MSELoss`. The `SoftmaxCrossEntropyWithLogits` loss function is required by THOR. - -The implementation procedure of the loss function is in the `src/crossentropy.py` script. A common trick in deep network model training, label smoothing, is used to improve the model tolerance to error label classification by smoothing real labels, thereby improving the model generalization capability. - -```python -class CrossEntropy(_Loss): - """CrossEntropy""" - def __init__(self, smooth_factor=0., num_classes=1000): - super(CrossEntropy, self).__init__() - self.onehot = ops.OneHot() - self.on_value = Tensor(1.0 - smooth_factor, mstype.float32) - self.off_value = Tensor(1.0 * smooth_factor / (num_classes - 1), mstype.float32) - self.ce = nn.SoftmaxCrossEntropyWithLogits() - self.mean = ops.ReduceMean(False) - - def construct(self, logit, label): - one_hot_label = self.onehot(label, ops.shape(logit)[1], self.on_value, self.off_value) - loss = self.ce(logit, one_hot_label) - loss = self.mean(loss, 0) - return loss -``` - -Call the defined loss function in the `__main__` function. - -```python -... -from src.crossentropy import CrossEntropy -... -if __name__ == "__main__": - ... - # define the loss function - if not config.use_label_smooth: - config.label_smooth_factor = 0.0 - loss = CrossEntropy(smooth_factor=config.label_smooth_factor, num_classes=config.class_num) - ... -``` - -### Defining the Optimizer - -The parameter update formula of THOR is as follows: - -$$ \theta^{t+1} = \theta^t + \alpha F^{-1}\nabla E$$ - -The meanings of parameters in the formula are as follows: - -- $\theta$: trainable parameters on the network. -- $t$: number of training steps. -- $\alpha$: learning rate, which is the parameter update value per step. -- $F^{-1}$: FIM obtained from the network computation. -- $\nabla E$: the first-order gradient value. - -As shown in the parameter update formula, THOR needs to additionally compute an FIM of each layer, and the FIM of each layer is obtained through computation in the customized network model. The FIM can adaptively adjust the parameter update step and direction of each layer, accelerating convergence and reducing parameter optimization complexity. - -```python -... -if args_opt.device_target == "Ascend": - from src.thor import THOR -else: - from src.thor import THOR_GPU as THOR -... - -if __name__ == "__main__": - ... - # learning rate setting - lr = get_model_lr(0, config.lr_init, config.lr_decay, config.lr_end_epoch, step_size, decay_epochs=39) - # define the optimizer - opt = THOR(filter(lambda x: x.requires_grad, net.get_parameters()), Tensor(lr), config.momentum, - filter(lambda x: 'matrix_A' in x.name, net.get_parameters()), - filter(lambda x: 'matrix_G' in x.name, net.get_parameters()), - filter(lambda x: 'A_inv_max' in x.name, net.get_parameters()), - filter(lambda x: 'G_inv_max' in x.name, net.get_parameters()), - config.weight_decay, config.loss_scale) - ... -``` - -## Training the Network - -### Saving the Configured Model - -MindSpore provides the callback mechanism to execute customized logic during training. The `ModelCheckpoint` function provided by the framework is used in this example. -`ModelCheckpoint` can save the network model and parameters for subsequent fine-tuning. -`TimeMonitor` and `LossMonitor` are callback functions provided by MindSpore. They can be used to monitor the single training step time and `loss` value changes during training, respectively. - -```python -... -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, TimeMonitor, LossMonitor -... -if __name__ == "__main__": - ... - # define callbacks - time_cb = TimeMonitor(data_size=step_size) - loss_cb = LossMonitor() - cb = [time_cb, loss_cb] - if config.save_checkpoint: - config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size, - keep_checkpoint_max=config.keep_checkpoint_max) - ckpt_cb = ModelCheckpoint(prefix="resnet", directory=ckpt_save_dir, config=config_ck) - cb += [ckpt_cb] - ... -``` - -### Configuring the Network Training - -Use the `model.train` API provided by MindSpore to easily train the network. THOR reduces the computation workload and improves the computation speed by reducing the frequency of updating the second-order matrix. Therefore, the Model_Thor class is redefined to inherit the Model class provided by MindSpore. The parameter for controlling the frequency of updating the second-order matrix is added to the Model_Thor class. You can adjust this parameter to optimize the overall performance. - -```python -... -from mindspore import FixedLossScaleManager -from src.model_thor import Model_Thor as Model -... - -if __name__ == "__main__": - ... - loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) - if target == "Ascend": - model = Model(net, loss_fn=loss, optimizer=opt, amp_level='O2', loss_scale_manager=loss_scale, - keep_batchnorm_fp32=False, metrics={'acc'}, frequency=config.frequency) - else: - model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics={'acc'}, - amp_level="O2", keep_batchnorm_fp32=True, frequency=config.frequency) - ... -``` - -### Running the Script - -After the training script is defined, call the shell script in the `scripts` directory to start the distributed training process. - -#### Ascend 910 - -Currently, MindSpore distributed execution on Ascend uses the single-device single-process running mode. That is, one process runs on one device, and the number of total processes is the same as the number of devices that are being used. All processes are executed in the background. Create a directory named `train_parallel`+`device_id` for each process to store log information, operator compilation information, and training checkpoint files. The following takes the distributed training script for eight devices as an example to describe how to run the script: - -Run the script. - -```bash -sh run_distribute_train.sh [RANK_TABLE_FILE] [DATASET_PATH] [DEVICE_NUM] -``` - -Variables `RANK_TABLE_FILE`, `DATASET_PATH`, and `DEVICE_NUM` need to be transferred to the script. The meanings of variables are as follows: - -- `RANK_TABLE_FILE`: path for storing the networking information file (about the rank table file, you can refer to [HCCL_TOOL](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/utils/hccl_tools)) -- `DATASET_PATH`: training dataset path -- `DEVICE_NUM`: the actual number of running devices. - -For details about other environment variables, see configuration items in the installation guide. - -The following is an example of loss values output during training: - -```bash -... -epoch: 1 step: 5004, loss is 4.4182425 -epoch: 2 step: 5004, loss is 3.740064 -epoch: 3 step: 5004, loss is 4.0546017 -epoch: 4 step: 5004, loss is 3.7598825 -epoch: 5 step: 5004, loss is 3.3744206 -... -epoch: 40 step: 5004, loss is 1.6907625 -epoch: 41 step: 5004, loss is 1.8217756 -epoch: 42 step: 5004, loss is 1.6453942 -... -``` - -After the training is complete, the checkpoint file generated by each device is stored in the training directory. The following is an example of the checkpoint file generated by `device_0`: - -```bash -└─train_parallel0 - ├─resnet-1_5004.ckpt - ├─resnet-2_5004.ckpt - │ ...... - ├─resnet-42_5004.ckpt - │ ...... -``` - -In the preceding information, -`*.ckpt` indicates the saved model parameter file. The name of a checkpoint file is in the following format: *Network name*-*Number of epochs*_*Number of steps*.ckpt. - -#### GPU - -On the GPU hardware platform, MindSpore uses `mpirun` of OpenMPI to perform distributed training. The process creates a directory named `train_parallel` to store log information and training checkpoint files. The following takes the distributed training script for eight devices as an example to describe how to run the script: - -```bash -sh run_distribute_train_gpu.sh [DATASET_PATH] [DEVICE_NUM] -``` - -Variables `DATASET_PATH` and `DEVICE_NUM` need to be transferred to the script. The meanings of variables are as follows: - -- `DATASET_PATH`: training dataset path -- `DEVICE_NUM`: the actual number of running devices - -During GPU-based training, the `DEVICE_ID` environment variable is not required. Therefore, you do not need to call `int(os.getenv('DEVICE_ID'))` in the main training script to obtain the device ID or transfer `device_id` to `context`. You need to set `device_target` to `GPU` and call `init()` to enable the NCCL. - -The following is an example of loss values output during training: - -```bash -... -epoch: 1 step: 5004, loss is 4.2546034 -epoch: 2 step: 5004, loss is 4.0819564 -epoch: 3 step: 5004, loss is 3.7005644 -epoch: 4 step: 5004, loss is 3.2668946 -epoch: 5 step: 5004, loss is 3.023509 -... -epoch: 36 step: 5004, loss is 1.645802 -... -``` - -The following is an example of model files saved after training: - -```bash -└─train_parallel - ├─ckpt_0 - ├─resnet-1_5004.ckpt - ├─resnet-2_5004.ckpt - │ ...... - ├─resnet-36_5004.ckpt - │ ...... - ...... - ├─ckpt_7 - ├─resnet-1_5004.ckpt - ├─resnet-2_5004.ckpt - │ ...... - ├─resnet-36_5004.ckpt - │ ...... -``` - -## Model Inference - -Use the checkpoint files saved during training to perform inference and validate the model generalization capability. Load the model file using the `load_checkpoint` API, call the `eval` API of the `Model` to predict the input image class, and compare the predicted class with the actual class of the input image to obtain the final prediction accuracy. - -### Defining the Inference Network - -1. Use the `load_checkpoint` API to load the model file. -2. Use the `model.eval` API to read the test dataset for inference. -3. Compute the prediction accuracy. - -```python -... -from mindspore import load_checkpoint, load_param_into_net -... - -if __name__ == "__main__": - ... - # define net - net = resnet(class_num=config.class_num) - net.add_flags_recursive(thor=False) - - # load checkpoint - param_dict = load_checkpoint(args_opt.checkpoint_path) - keys = list(param_dict.keys()) - for key in keys: - if "damping" in key: - param_dict.pop(key) - load_param_into_net(net, param_dict) - net.set_train(False) - - # define model - model = Model(net, loss_fn=loss, metrics={'top_1_accuracy', 'top_5_accuracy'}) - - # eval model - res = model.eval(dataset) - print("result:", res, "ckpt=", args_opt.checkpoint_path) -``` - -### Inference - -After the inference network is defined, the shell script in the `scripts` directory is called for inference. - -#### Ascend 910 - -On the Ascend 910 hardware platform, run the following inference command: - -```bash -sh run_eval.sh [DATASET_PATH] [CHECKPOINT_PATH] -``` - -Variables `DATASET_PATH` and `CHECKPOINT_PATH` need to be transferred to the script. The meanings of variables are as follows: - -- `DATASET_PATH`: inference dataset path -- `CHECKPOINT_PATH`: path for storing the checkpoint file - -Currently, a single device (device 0 by default) is used for inference. The inference result is as follows: - -```text -result: {'top_5_accuracy': 0.9295574583866837, 'top_1_accuracy': 0.761443661971831} ckpt=train_parallel0/resnet-42_5004.ckpt -``` - -- `top_5_accuracy`: For an input image, if the labels whose prediction probability ranks top 5 match actual labels, the classification is correct. -- `top_1_accuracy`: For an input image, if the label with the highest prediction probability is the same as the actual label, the classification is correct. - -#### GPU - -On the GPU hardware platform, run the following inference command: - -```bash -sh run_eval_gpu.sh [DATASET_PATH] [CHECKPOINT_PATH] -``` - -Variables `DATASET_PATH` and `CHECKPOINT_PATH` need to be transferred to the script. The meanings of variables are as follows: - -- `DATASET_PATH`: inference dataset path -- `CHECKPOINT_PATH`: path for storing the checkpoint file - -The inference result is as follows: - -```text -result: {'top_5_accuracy': 0.9287972151088348, 'top_1_accuracy': 0.7597031049935979} ckpt=train_parallel/resnet-36_5004.ckpt -``` diff --git a/tutorials/training/source_en/advanced_use/dashboard.md b/tutorials/training/source_en/advanced_use/dashboard.md deleted file mode 100644 index 3d74e3bedac36bdc6ca60503b9d5c0321ec387c1..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/dashboard.md +++ /dev/null @@ -1,201 +0,0 @@ -# Viewing Dashboard - -`Linux` `Ascend` `GPU` `CPU` `Model Optimization` `Intermediate` `Expert` - - - -- [Viewing Dashboard](#viewing-dashboard) - - [Overview](#overview) - - [Scalar Visualization](#scalar-visualization) - - [Parameter Distribution Visualization](#parameter-distribution-visualization) - - [Computational Graph Visualization](#computational-graph-visualization) - - [Dataset Graph Visualization](#dataset-graph-visualization) - - [Image Visualization](#image-visualization) - - [Tensor Visualization](#tensor-visualization) - - [Notices](#notices) - - - - - -## Overview - -Training dashboard is an important part of mindinsight's visualization component, and its tags include scalar visualization, parameter distribution visualization, computational graph visualization, data graph visualization, image visualization and tensor visualization. - -Access the Training Dashboard by selecting a specific training from the training list. - -## Scalar Visualization - -Scalar visualization is used to display the change trend of scalars during training. - -![scalar.png](./images/scalar.png) - -Figure 1: Scalar trend chart - -Figure 1 shows a change process of loss values during the neural network training. The horizontal coordinate indicates the training step, and the vertical coordinate indicates the loss value. - -Buttons from left to right in the upper right corner of the figure are used to display the chart in full screen, switch the Y-axis scale, enable or disable the rectangle selection, roll back the chart step by step, and restore the chart. - -- Full-screen display: Display the scalar curve in full screen. Click the button again to restore it. -- Switch Y-axis scale: Perform logarithmic conversion on the Y-axis coordinate. -- Enable/Disable rectangle selection: Draw a rectangle to select and zoom in a part of the chart. You can perform rectangle selection again on the zoomed-in chart. -- Step-by-step Rollback: Cancel operations step by step after continuously drawing rectangles to select and zooming in the same area. -- Restore chart: Restore a chart to the original state. - -The threshold value can be set to highlight the value. You can also delete the threshold value in the lower right corner of the figure. As shown in the figure, the threshold is set less than 1.5. The loss values that are below the threshold are highlighted in red, and it is clear to check the expected data values or some unusual values. - -![scalar_select.png](./images/scalar_select.png) - -Figure 2: Scalar visualization function area - -Figure 2 shows the scalar visualization function area, which allows you to view scalar information by selecting different tags, different dimensions of the horizontal axis, and smoothness. - -- Tag selection: Select the required tags to view the corresponding scalar information. -- Horizontal axis: Select any of Step, Relative Time, and Absolute Time as the horizontal axis of the scalar curve. -- Smoothness: Adjust the smoothness to smooth the scalar curve. -- Scalar synthesis: Synthesize two scalar curves and display them in a chart to facilitate comparison between the two curves or view the synthesized chart. - -![scalar_compound.png](./images/scalar_compound.png) - -Figure 3: Scalar synthesis of Accuracy and Loss curves - -Figure 3 shows the scalar synthesis of the Accuracy and Loss curves. The function area of scalar synthesis is similar to that of scalar visualization. Differing from the scalar visualization function area, the scalar synthesis function allows you to select the maximum of two tags at a time to synthesize and display their curves. - -## Parameter Distribution Visualization - -The parameter distribution in a form of a histogram displays tensors specified by a user. - -![histogram.png](./images/histogram.png) - -Figure 4: Histogram - -Figure 4 shows tensors recorded by a user in a form of a histogram. Click the upper right corner to zoom in the histogram. - -![histogram_func.png](./images/histogram_func.png) - -Figure 5: Function area of the parameter distribution histogram - -Figure 5 shows the function area of the parameter distribution histogram, including: - -- Tag selection: Select the required tags to view the corresponding histogram. -- Vertical axis: Select any of `Step`, `Relative time`, and `Absolute time` as the data displayed on the vertical axis of the histogram. -- Angle of view: Select either `Front` or `Top`. `Front` view refers to viewing the histogram from the front view. In this case, data between different steps is overlapped. `Top` view refers to viewing the histogram at an angle of 45 degrees. In this case, data between different steps can be presented. - -## Computational Graph Visualization - -Computational graph visualization is used to display the graph structure, data flow direction, and control flow direction of a computational graph. It supports visualization of summary log files and pb files generated by `save_graphs` configuration in `context`. - -![graph.png](./images/graph.png) - -Figure 6: Computational graph display area - -Figure 6 shows the network structure of a computational graph. As shown in the figure, select an operator in the area of the display area. The operator has two inputs and one outputs (the solid line indicates the data flow direction of the operator). - -![graph_sidebar.png](./images/graph_sidebar.png) - -Figure 7: Computational graph function area - -Figure 7 shows the function area of the computational graph, including: - -- File selection box: View the computational graphs of different files. -- Search box: Enter a node name and press Enter to view the node. -- Thumbnail: Display the thumbnail of the entire network structure. When viewing an extra large image structure, you can view the currently browsed area. -- Node information: Display the basic information of the selected node, including the node name, properties, input node, and output node. -- Legend: Display the meaning of each icon in the computational graph. - -## Dataset Graph Visualization - -Dataset graph visualization is used to display data processing and augmentation information of a single model training. - -![data_function.png](./images/data_function.png) - -Figure 8: Dataset graph function area - -Figure 8 shows the dataset graph function area which includes the following content: - -- Legend: Display the meaning of each icon in the data lineage graph. -- Data processing pipeline: Display the data processing pipeline used for training. Select a single node in the graph to view details. -- Node information: Display basic information about the selected node, including names and parameters of the data processing and augmentation operators. - -## Image Visualization - -Image visualization is used to display images specified by users. - -![image.png](./images/image_vi.png) - -Figure 9: Image visualization - -Figure 9 shows how to view images of different steps by sliding the Step slider. - -![image_function.png](./images/image_function.png) - -Figure 10: Image visualization function area - -Figure 10 shows the function area of image visualization. You can view image information by selecting different tags, brightness, and contrast. - -- Tag: Select the required tags to view the corresponding image information. -- Brightness adjustment: Adjust the brightness of all displayed images. -- Contrast adjustment: Adjust the contrast of all displayed images. - -## Tensor Visualization - -Tensor visualization is used to display tensors in the form of table and histogram. - -![tensor_function.png](./images/tensor_function.png) - -Figure 11: Tensor visualization function area - -Figure 11 shows the function area of tensor visualization. - -- Tag selection: Select the required tags to view the corresponding table data or histogram. -- View: Select `Table` or `Histogram` to display tensor data. In the `Histogram` view, there are the options of `Vertical axis` and `Angle of view`. -- Vertical axis: Select any of `Step`, `Relative time`, and `Absolute time` as the data displayed on the vertical axis of the histogram. -- Angle of view: Select either `Front` or `Top`. `Front` view refers to viewing the histogram from the front view. In this case, data between different steps is overlapped. `Top` view refers to viewing the histogram at an angle of 45 degrees. In this case, data between different steps can be presented. - -![tensor_table.png](./images/tensor_table.png) - -Figure 12: Table display - -Figure 12 shows tensors recorded by a user in a form of a table which includes the following function: - -- Click the small square button on the right side of the table to zoom in the table. -- The white box in the table shows the tensor data under which dimension is currently displayed. The colon `:` indicates index range of the current dimension which is basically the same as the meaning of Python index. If no specific index is specified, it indicates all the values of the current dimension and `2:5` indicates the value of index from 2 to 5 (not including 5). you can enter the corresponding index or index range containing `:` in the box and press `Enter` or click the button of tick on the back to query tensor data for specific dimensions. Assuming a certain dimension is 32, the index range is -32 to 31. Note: tensor data from 0 to 2 dimensions can be queried. Tensor data of more than two dimensions is not supported, in other word, the query conditions of more than two colons `:` cannot be set. -- Query the tensor data of a specific step by dragging the hollow circle below the table. - -![tensor_histogram.png](./images/tensor_histogram.png) - -Figure 13: Histogram display - -Figure 13 shows tensors recorded by a user in a form of a histogram. Click the upper right corner to zoom in the histogram. - -## Notices - -1. Currently MindSpore supports recording computational graph after operator fusion for Ascend 910 AI processor only. - -2. When using the Summary operator to collect data in training, 'HistogramSummary' operator will affect performance, so please use as few as possible. - -3. To limit memory usage, MindInsight limits the number of tags and steps: - - There are 300 tags at most in each training dashboard. The total number of scalar tags, image tags, computation graph tags, parameter distribution(histogram) tags, tensor tags cannot exceed 300. Specially, there are 10 computation graph tags and 6 tensor tags at most. When the number of tags exceeds the limit, MindInsight preserves the most recently processed tags. - - There are 1000 steps at most for each scalar tag in each training dashboard. When the number of steps exceeds the limit, MindInsight will sample steps randomly to meet this limit. - - There are 10 steps at most for each image tag in each training dashboard. When the number of steps exceeds the limit, MindInsight will sample steps randomly to meet this limit. - - There are 50 steps at most for each parameter distribution(histogram) tag in each training dashboard. When the number of steps exceeds the limit, MindInsight will sample steps randomly to meet this limit. - - There are 20 steps at most for each tensor tag in each training dashboard. When the number of steps exceeds the limit, MindInsight will sample steps randomly to meet this limit. - -4. Since `TensorSummary` will record complete tensor data, the amount of data is usually relatively large. In order to limit memory usage and ensure performance, MindInsight make the following restrictions with the size of tensor and the number of value responded and displayed on the front end: - - MindInsight supports loading tensors that contain up to 10 million values. - - MindInsight supports the column of tensor displayed on the front end up to 1000 columns for each query. - - After the tensor is loaded, in the tensor-visible table view, you can view the maximum of 100,000 values. If the value obtained by the selected dimension query exceeds this limit, it cannot be displayed. - -5. Since tensor visualizatioin (`TensorSummary`) records raw tensor data, it requires a large amount of storage space. Before using `TensorSummary` and during training, please check that the system storage space is sufficient. - The storage space occupied by the tensor visualizatioin function can be reduced by the following methods: - 1) Avoid using `TensorSummary` to record larger tensor. - - 2) Reduce the number of `TensorSummary` operators in the network. - - After using the function, please clean up the training logs that are no longer needed in time to free up disk space. - - Remarks: The method of estimating the space usage of `TensorSummary` is as follows: - - The size of a `TensorSummary data = the number of values in the tensor \* 4 bytes`. Assuming that the size of the tensor recorded by `TensorSummary` is `32 \* 1 \* 256 \* 256`, then a `TensorSummary` data needs about `32 \* 1 \* 256 \* 256 \* 4 bytes = 8,388,608 bytes = 8MiB`. `TensorSummary` will record data of 20 steps by default. Then the required space when recording these 20 sets of data is about `20 \* 8 MiB = 160MiB`. It should be noted that due to the overhead of data structure and other factors, the actual storage space used will be slightly larger than 160MiB. - -6. The training log file is large when using `TensorSummary` because the complete tensor data is recorded. MindInsight needs more time to parse the training log file, please be patient. diff --git a/tutorials/training/source_en/advanced_use/debug_in_pynative_mode.md b/tutorials/training/source_en/advanced_use/debug_in_pynative_mode.md deleted file mode 100644 index fdfa218797bb50c2a89ed0f0ce4df37ce1f17953..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/debug_in_pynative_mode.md +++ /dev/null @@ -1,386 +0,0 @@ -# Debugging in PyNative Mode - -`Linux` `Ascend` `GPU` `CPU` `Model Development` `Beginner` `Intermediate` `Expert` - - - -- [Debugging in PyNative Mode](#debugging-in-pynative-mode) - - [Overview](#overview) - - [Executing a Single Operator](#executing-a-single-operator) - - [Executing a Common Function](#executing-a-common-function) - - [Improving PyNative Performance](#improving-pynative-performance) - - [Debugging Network Train Model](#debugging-network-train-model) - - - - - -## Overview - -MindSpore supports the following running modes which are optimized for debugging or running: - -- PyNative mode: dynamic graph mode. In this mode, operators in the neural network are delivered and executed one by one, facilitating the compilation and debugging of the neural network model. -- Graph mode: static graph mode. In this mode, the neural network model is compiled into an entire graph and then delivered for execution. This mode uses technologies such as graph optimization to improve the running performance and facilitates large-scale deployment and cross-platform running. - -By default, MindSpore is in PyNative mode. You can switch it to the graph mode by calling `context.set_context(mode=context.GRAPH_MODE)`. Similarly, MindSpore in graph mode can be switched to the PyNative mode through `context.set_context(mode=context.PYNATIVE_MODE)`. - -In PyNative mode, single operators, common functions, network inference, and separated gradient calculation can be executed. The following describes the usage and precautions. - -> In PyNative mode, operators are executed asynchronously on the device to improve performance. Therefore, when an error occurs during operator execution, the error information may be displayed after the program is executed. - -## Executing a Single Operator - -Execute a single operator and output the result, as shown in the following example. - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import context, Tensor - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -conv = nn.Conv2d(3, 4, 3, bias_init='zeros') -input_data = Tensor(np.ones([1, 3, 5, 5]).astype(np.float32)) -output = conv(input_data) -print(output.asnumpy()) -``` - -Output: - -```python -[[[[-0.02190447 -0.05208071 -0.05208071 -0.05208071 -0.06265172] -[-0.01529094 -0.05286242 -0.05286242 -0.05286242 -0.04228776] -[-0.01529094 -0.05286242 -0.05286242 -0.05286242 -0.04228776] -[-0.01529094 -0.05286242 -0.05286242 -0.05286242 -0.04228776] -[-0.01430791 -0.04892948 -0.04892948 -0.04892948 -0.01096004]] - -[[ 0.00802889 -0.00229866 -0.00229866 -0.00229866 -0.00471579] -[ 0.01172971 0.02172665 0.02172665 0.02172665 0.03261888] -[ 0.01172971 0.02172665 0.02172665 0.02172665 0.03261888] -[ 0.01172971 0.02172665 0.02172665 0.02172665 0.03261888] -[ 0.01784375 0.01185635 0.01185635 0.01185635 0.01839031]] - -[[ 0.04841832 0.03321705 0.03321705 0.03321705 0.0342317 ] -[ 0.0651359 0.04310361 0.04310361 0.04310361 0.03355784] -[ 0.0651359 0.04310361 0.04310361 0.04310361 0.03355784] -[ 0.0651359 0.04310361 0.04310361 0.04310361 0.03355784] -[ 0.04680437 0.03465693 0.03465693 0.03465693 0.00171057]] - -[[-0.01783456 -0.00459451 -0.00459451 -0.00459451 0.02316688] -[ 0.01295831 0.00879035 0.00879035 0.00879035 0.01178642] -[ 0.01295831 0.00879035 0.00879035 0.00879035 0.01178642] -[ 0.01295831 0.00879035 0.00879035 0.00879035 0.01178642] -[ 0.05016355 0.03958241 0.03958241 0.03958241 0.03443141]]]] -``` - -## Executing a Common Function - -Combine multiple operators into a function, call the function to execute the operators, and output the result, as shown in the following example: - -Example Code: - -```python -import numpy as np -from mindspore import context, Tensor -import mindspore.ops as ops - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -def add_func(x, y): - z = ops.add(x, y) - z = ops.add(z, x) - return z - -x = Tensor(np.ones([3, 3], dtype=np.float32)) -y = Tensor(np.ones([3, 3], dtype=np.float32)) -output = add_func(x, y) -print(output.asnumpy()) -``` - -Output: - -```text -[[3. 3. 3.] - [3. 3. 3.] - [3. 3. 3.]] -``` - -> Summary is not supported in PyNative mode, so summary related operators cannot be used. - -### Improving PyNative Performance - -MindSpore provides the Staging function to improve the execution speed of inference tasks in PyNative mode. This function compiles Python functions or Python class methods into computational graphs in PyNative mode and improves the execution speed by using graph optimization technologies, as shown in the following example: - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import context, Tensor -import mindspore.ops as ops -from mindspore import ms_function - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -class TensorAddNet(nn.Cell): - def __init__(self): - super(TensorAddNet, self).__init__() - self.add = ops.Add() - - @ms_function - def construct(self, x, y): - res = self.add(x, y) - return res - -x = Tensor(np.ones([4, 4]).astype(np.float32)) -y = Tensor(np.ones([4, 4]).astype(np.float32)) -net = TensorAddNet() - -z = net(x, y) # Staging mode -add = ops.Add() -res = add(x, z) # PyNative mode -print(res.asnumpy()) -``` - -Output: - -```text -[[3. 3. 3. 3.] - [3. 3. 3. 3.] - [3. 3. 3. 3.] - [3. 3. 3. 3.]] -``` - -In the preceding code, the `ms_function` decorator is added before `construct` of the `TensorAddNet` class. The decorator compiles the `construct` method into a computational graph. After the input is given, the graph is delivered and executed, `add` in the preceding code is executed in the common PyNative mode. - -It should be noted that, in a function to which the `ms_function` decorator is added, if an operator (such as `pooling` or `add`) that does not need parameter training is included, the operator can be directly called in the decorated function, as shown in the following example: - -Example Code: - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import context, Tensor -import mindspore.ops as ops -from mindspore import ms_function - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -add = ops.Add() - -@ms_function -def add_fn(x, y): - res = add(x, y) - return res - -x = Tensor(np.ones([4, 4]).astype(np.float32)) -y = Tensor(np.ones([4, 4]).astype(np.float32)) -z = add_fn(x, y) -print(z.asnumpy()) -``` - -Output: - -```text -[[2. 2. 2. 2.] - [2. 2. 2. 2.] - [2. 2. 2. 2.] - [2. 2. 2. 2.]] -``` - -If the decorated function contains operators (such as `Convolution` and `BatchNorm`) that require parameter training, these operators must be instantiated before the decorated function is called, as shown in the following example: - -Example Code: - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import context, Tensor -from mindspore import ms_function - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -conv_obj = nn.Conv2d(in_channels=3, out_channels=4, kernel_size=3, stride=2, padding=0) -conv_obj.init_parameters_data() -@ms_function -def conv_fn(x): - res = conv_obj(x) - return res - -input_data = np.random.randn(2, 3, 6, 6).astype(np.float32) -z = conv_fn(Tensor(input_data)) -print(z.asnumpy()) -``` - -Output: - -```text -[[[[ 0.10377571 -0.0182163 -0.05221086] -[ 0.1428334 -0.01216263 0.03171652] -[-0.00673915 -0.01216291 0.02872104]] - -[[ 0.02906547 -0.02333629 -0.0358406 ] -[ 0.03805163 -0.00589525 0.04790922] -[-0.01307234 -0.00916951 0.02396654]] - -[[ 0.01477884 -0.06549098 -0.01571796] -[ 0.00526886 -0.09617482 0.04676902] -[-0.02132788 -0.04203424 0.04523344]] - -[[ 0.04590619 -0.00251453 -0.00782715] -[ 0.06099087 -0.03445276 0.00022781] -[ 0.0563223 -0.04832596 -0.00948266]]] - -[[[ 0.08444098 -0.05898955 -0.039262 ] -[ 0.08322686 -0.0074796 0.0411371 ] -[-0.02319113 0.02128408 -0.01493311]] - -[[ 0.02473745 -0.02558945 -0.0337843 ] -[-0.03617039 -0.05027632 -0.04603915] -[ 0.03672804 0.00507637 -0.08433761]] - -[[ 0.09628943 0.01895323 -0.02196114] -[ 0.04779419 -0.0871575 0.0055248 ] -[-0.04382382 -0.00511185 -0.01168541]] - -[[ 0.0534859 0.02526264 0.04755395] -[-0.03438103 -0.05877855 0.06530266] -[ 0.0377498 -0.06117418 0.00546303]]]] -``` - -## Debugging Network Train Model - -In PyNative mode, the gradient can be calculated separately. As shown in the following example, `GradOperation` is used to calculate all input gradients of the function or the network. Note that the inputs have to be Tensor. - -Example Code: - -```python -import mindspore.ops as ops -import mindspore.context as context -from mindspore import dtype as mstype -from mindspore import Tensor - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -def mul(x, y): - return x * y - -def mainf(x, y): - return ops.GradOperation(get_all=True)(mul)(x, y) - -print(mainf(Tensor(1, mstype.int32), Tensor(2, mstype.int32))) -``` - -Output: - -```text -(Tensor(shape=[], dtype=Int32, value=2), Tensor(shape=[], dtype=Int32, value=1)) -``` - -During network training, obtain the gradient, call the optimizer to optimize parameters (the breakpoint cannot be set during the reverse gradient calculation), and calculate the loss values. Then, network training is implemented in PyNative mode. - -Complete LeNet Sample Code: - -```python -import numpy as np -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import dtype as mstype -from mindspore import context, Tensor, ParameterTuple -from mindspore.common.initializer import TruncatedNormal -from mindspore.nn import Dense, WithLossCell, SoftmaxCrossEntropyWithLogits, Momentum - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): - """weight initial for conv layer""" - weight = weight_variable() - return nn.Conv2d(in_channels, out_channels, - kernel_size=kernel_size, stride=stride, padding=padding, - weight_init=weight, has_bias=False, pad_mode="valid") - -def fc_with_initialize(input_channels, out_channels): - """weight initial for fc layer""" - weight = weight_variable() - bias = weight_variable() - return nn.Dense(input_channels, out_channels, weight, bias) - -def weight_variable(): - """weight initial""" - return TruncatedNormal(0.02) - - -class LeNet5(nn.Cell): - """ - Lenet network - Args: - num_class (int): Num classes. Default: 10. - - Returns: - Tensor, output tensor - - Examples: - >>> LeNet(num_class=10) - """ - def __init__(self, num_class=10): - super(LeNet5, self).__init__() - self.num_class = num_class - self.batch_size = 32 - self.conv1 = conv(1, 6, 5) - self.conv2 = conv(6, 16, 5) - self.fc1 = fc_with_initialize(16 * 5 * 5, 120) - self.fc2 = fc_with_initialize(120, 84) - self.fc3 = fc_with_initialize(84, self.num_class) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.reshape = ops.Reshape() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.reshape(x, (self.batch_size, -1)) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x - - -class GradWrap(nn.Cell): - """ GradWrap definition """ - def __init__(self, network): - super(GradWrap, self).__init__(auto_prefix=False) - self.network = network - self.weights = ParameterTuple(filter(lambda x: x.requires_grad, network.get_parameters())) - - def construct(self, x, label): - weights = self.weights - return ops.GradOperation(get_by_list=True)(self.network, weights)(x, label) - -net = LeNet5() -optimizer = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.1, 0.9) -criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') -net_with_criterion = WithLossCell(net, criterion) -train_network = GradWrap(net_with_criterion) -train_network.set_train() - -input_data = Tensor(np.ones([net.batch_size, 1, 32, 32]).astype(np.float32) * 0.01) -label = Tensor(np.ones([net.batch_size]).astype(np.int32)) -output = net(Tensor(input_data)) -loss_output = criterion(output, label) -grads = train_network(input_data, label) -success = optimizer(grads) -loss = loss_output.asnumpy() -print(loss) -``` - -Output: - -```text -2.3050091 -``` - -In the preceding execution, an intermediate result of network execution can be obtained at any required place in `construt` function, and the network can be debugged by using the Python Debugger (pdb). diff --git a/tutorials/training/source_en/advanced_use/debugger.md b/tutorials/training/source_en/advanced_use/debugger.md deleted file mode 100644 index 963e96833edb65c7d246ac1348aceedc77ddfd0b..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/debugger.md +++ /dev/null @@ -1,243 +0,0 @@ -# Using Debugger - -`Linux` `Ascend` `GPU` `Model Optimization` `Intermediate` `Expert` - - - -- [Using Debugger](#using-debugger) - - [Overview](#overview) - - [Operation Process](#operation-process) - - [Debugger Environment Preparation](#debugger-environment-preparation) - - [Debugger UI Introduction](#debugger-ui-introduction) - - [Computational Graph](#computational-graph) - - [Node List](#node-list) - - [Graph Node Details](#graph-node-details) - - [Watchpoint List](#watchpoint-list) - - [Setting Watchpoints](#setting-watchpoints) - - [Recheck](#recheck) - - [Training Control](#training-control) - - [Tensor Check View](#tensor-check-view) - - [Debugger Usage Example](#debugger-usage-example) - - [Notices](#notices) - - - - - -## Overview - -MindSpore Debugger is a debugging tool for training in `Graph Mode`. It can be applied to visualize and analyze the intermediate computation results of the computational graph. - -In `Graph Mode` training, the computation results of intermediate nodes in the computational graph can not be acquired from the python layer, which makes it difficult for users to do the debugging. By applying MindSpore Debugger, users can: - -- Visualize the computational graph on the UI and analyze the output of the graph node. -- Set watchpoints to monitor training exceptions (for example, tensor overflow) and trace error causes. -- Visualize and analyze the change of parameters, such as weights. - -## Operation Process - -- Launch MindInsight in debugger mode and wait for the training. -- Set debugger environment variables and run the training script. -- After the training is connected, set watchpoints on the MindInsight Debugger UI. -- Analyze the training progress on MindInsight Debugger UI. - -## Debugger Environment Preparation - -At first, install MindInsight and launch it in debugger mode. MindSpore will send training information to MindInsight Debugger Server in debugger mode, users can analyze the information on MindInsight UI. - -The command to launch MindInsight in debugger mode is as follows: - -```shell -mindinsight start --port {PORT} --enable-debugger True --debugger-port {DEBUGGER_PORT} -``` - -The Debugger related parameters: - -|Name|Argument|Description|Type|Default|Scope| -|---|---|---|---|---|---| -|`--port {PORT}`|Optional|Specifies the port number of the web visualization service.|Integer|8080|1~65535| -|`--enable-debugger {ENABLE_DEBUGGER}`|Optional|Should be set to `True` or `1`, this will launch the MindInsight debugger server; Default is `False`, not launch.|Boolean|False|True/False/1/0| -|`--debugger-port {DEBUGGER_PORT}`|Optional|Specifies the port number of the debugger server.|Integer|50051|1~65535| - -For more launch parameters, please refer to [MindInsight Commands](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/mindinsight_commands.html). - -Then, set `export ENABLE_MS_DEBUGGER=1` or `export ENABLE_MS_DEBUGGER=True` to specify the training is in the debugger mode, and set the debugger host and port to which the training is connected: -`export MS_DEBUGGER_HOST=127.0.0.1` (the service address must be consistent with MindInsight host address); -`export MS_DEBUGGER_PORT=50051` (the port must be consistent with MindInsight debugger-port). - -If the memory space of your equipment is limited, you can use the memory reuse mode before starting the training to reduce the running space: `export MS_DEBUGGER_PARTIAL_MEM=1`。 - -In addition, do not use data offload mode during training (you need to set `dataset_sink_mode` in `model.train` to `False`) to ensure that the debugger can obtain the training information of each step. - -After the debugger environment is prepared, run the training script. - -## Debugger UI Introduction - -After the training is connected, you can view the training meta information such as a computational graph on the MindInsight Debugger UI which consists of the computational graph, node list, node information, watchpoint list, and watchpoint hit list. -The Debugger UI components are shown as follows. - -![debugger_init_page](./images/debugger_init_page.png) - -Figure 1: The initial UI of debugger - -### Computational Graph - -Debugger will display the optimized computational graph in the upper middle area of the page. -Users can click the box (stand for one `scope`) to expand the graph, and analyze the nodes contained in that `scope`. - -The area on the top shows the training metadata, such as the `Client IP` (address and port of the training script process), -`Device ID` being used and the current training `Step`. - -In the GPU environment, the `Current Node` and `Next Node` buttons are used to return to the current execution node and execute the next node, respectively and are displayed in the upper right corner of the computational graph area. - -### Node List - -![debugger_search_node_type](./images/debugger_search_node_type.png) - -Figure 2: The node list filtered by node type - -As shown in Figure 1, the computational graph `Node List` will be displayed on the left of the UI. -The `Node List` can be expanded according to the `scope` of the nodes. -When clicking one node in the list, the computational graph on the right will also be expanded and choose the corresponding node automatically. - -You can filter nodes by `Graph File` and `Node Type` under `Node List`, as shown in Figure 2, and search for nodes by entering their names in the search box under `Node Type`. - -### Graph Node Details - -After clicking a graph node, you can view its detailed information in the lower part of the UI, including the output and input, training steps (`Step`), as well as data types (`DType`), shapes (`Shape`), and values (`Value`) of a tensor, as shown in Figure 2. - -In the GPU environment, select and right-click an executable graph node, and choose `Run to This Node` from the shortcut menu to run the training script to the selected node (no more than one step). - -### Watchpoint List - -![debugger_watch_point_list](./images/debugger_watch_point_list.png) - -Figure 3: The watchpoint list - -As shown in Figure 3, the watchpoint list is in the lower left corner of the UI. The three icons from left to right in the upper right corner of the watchpoint list are used to `recheck`, `clear`, and `create` watchpoints. - -### Setting Watchpoints - -![debugger_set_watch_point](./images/debugger_set_watch_point.png) - -Figure 4: Creating watchpoint - -To monitor and analyze the computation result of a node, you can set a watchpoint for the node in a computational graph. Figure 4 shows how to set a watchpoint. You can click the `+` icon in the upper right corner of the watchpoint list to add a watchpoint and select a check condition. For example, if you want to check whether a tensor is above the threshold, select a check condition, enter a threshold, and click OK to create a watchpoint. -After a watchpoint is created, manually select the node to be checked and click `√` next to the watchpoint. If you select `Weight check`, `Gradient check`, or `Activation value check` when creating a watchpoint, the weight, gradient, or activation node is automatically selected. You can manually change the selected node after clicking OK. - -The following conditions are supported (abbreviations in parentheses): - -- Tensor check - - Operator overflow (OO): Check whether overflow occurs during operator computation. Only the Ascend AI Processor is supported. - - Whether tensor values are all 0 (TZ): Set the threshold to `Percentage of 0 values ≥` to check the percentage of 0 tensor values. - - Tensor overflow (TO): Check whether a tensor value overflow occurs. - - Tensor value range (TR): Set a threshold to check the tensor value range. The options are `Percentage of the value in the range >`, `Percentage of the value in the range <`, `MAX-MIN>` and `MAX-MIN<`. If setting the threshold to `Percentage of the value in the range >` or `Percentage of the value in the range <`, you need to set the `Upper limit of the range (inclusive)` or `Lower limit of the range (inclusive)` at the same time. - - Tensor above threshold (TL): Set a threshold to check whether the tensor value is too large. The options are `Average of the absolute value >`, `max >`, `min >`, and `mean >`. - - Tensor below threshold (TS): Set a threshold to check whether the tensor value is too small. The options are `Average of the absolute value <`, `max <`, `min <`, and `mean <`. - -- Weight check - - Weight change above threshold (WCL): Set a threshold to `Average change ratio >` to check whether the weight value change is too large. - - `Average change ratio` = `mean(abs(Current weight value - Weight value in previous step))/(mean(abs(Weight value in previous step)) + Offset)`. - - Weight change below threshold (WCS): Set a threshold to `Average change ratio <` to check whether the weight value change is too small. - - Initial weight value (WI): Set a threshold to check the initial weight value. The options are `Percentage of 0 values ≥`, `max >`, and `min <`. - - Unchanged weight (WNC): Set the threshold to `Relative tolerance` to check whether the weight is updated. - - Weight overflow (WO): Check whether a weight value overflow occurs. - - Weight above threshold (WL): Set a threshold to check whether the weight value is too large. The options are `Average of the absolute value >`, `max >`, `min >`, and `mean >`. - - Weight below threshold (WS): Set a threshold to check whether the weight value is too small. The options are `Average of the absolute value <`, `max <`, `min <`, and `mean <`. - -- Activation value check - - Activation value range (AR): Set a threshold to check the activation value range. The options are `Percentage of the value in the range >`, `Percentage of the value in the range <`, `MAX-MIN>` and `MAX-MIN<`. If setting the threshold to `Percentage of the value in the range >` or `Percentage of the value in the range <`, you need to set the `Upper limit of the range (inclusive)` or `Lower limit of the range (inclusive)` at the same time. - -- Gradient check - - Gradient explosion (GE): Check whether a gradient value overflow occurs. - - Gradient above threshold (GL): Set a threshold to check whether the gradient value is too large. The options are `Average of the absolute value >`, `max >`, `min >`, and `mean >`. - - Gradient disappearance (GV): Set a threshold to check whether the gradient value is too small. The options are `Average of the absolute value <`, `max <`, `min <`, and `mean <`. - -After a watchpoint is generated, you can select or deselect nodes to be monitored in the node list, as shown in Figure 3. In addition, you can click the `clear watchpoint` icon or `X` icon to delete watchpoints. - -During training, the debugger analyzes the outputs of these monitored nodes in real time. Once the watchpoint conditions are hit, the training is suspended. You can view the information about the hit watchpoints on the UI. - -![debugger_watch_point_hit](images/debugger_watch_point_hit.png) - -Figure 5: Viewing hit watchpoints - -The hit watchpoints are displayed on the left of the UI. The hit nodes and watchpoint conditions are sorted based on the node execution sequence. Each record displays the configured threshold and the actual value. -In addition, after you click a record, the corresponding node is displayed in the computational graph. You can view the node information to analyze the possible cause. Click `View` to enter the tensor check view. You can view the hit watchpoint information and optimization guide, as shown in Figure 6. - -### Recheck - -To perform more detailed monitoring and analysis on a node, you can modify the node to be monitored, add or delete watchpoints, and then check the current step again. The `recheck` icon is in the upper right corner of the watchpoint list as shown in figure 3. - -### Training Control - -At the bottom of the watchpoint setting panel is the training control panel, which shows the training control functions of the debugger, -with four buttons: `CONTINUE`, `PAUSE`, `TERMINATE` and `OK`: - -- `OK` stands for executing the training for several steps, the number of the `step` can be specified in the above bar. The training will be paused until the `Watch Point List` is triggered, or the number of `step` is reached. -- `CONTINUE` stands for executing the training until the `Watch Point List` is triggered, or the training is finished. -- `PAUSE` stands for pausing the training. -- `TERMINATE` stands for terminating the training. - -### Tensor Check View - -![debugger_tensor_view](images/debugger_tensor_view.png) - -Figure 6: Viewing tensors value - -Some `tensors` have too many dimensions and cannot be directly displayed on the home page. You can click the corresponding `View` button to view the detailed information about the `tensor` value on the displayed tensor check view. - -As shown in Figure 6, the tensor check view displays the `tensor` values in the upper part of the UI. You can set the `Dimension Selection` and click `Current Step`, `Previous step`, and `Comparison Result` to display and compare tensors. (Currently, the parameter node can be compared only with the previous one step.) In addition, you can set shards in `Dimension Selection` to display a `tensor` in the specified dimension. - -The `node information`, `current step`, and `statistics` are displayed on the top of the view. The optimization guide is displayed on the left of the view. When a watchpoint is hit, the hit information and optimization suggestions are displayed. The tensor relationship diagram and detailed `node information` are displayed on the lower part of the view. - -Based on the tensor relationship diagram, you can analyze which tensors are used to compute the current tensor and which constants are affected by the current tensor. Abbreviations of watchpoint conditions are displayed on the diagram, helping you quickly identify the propagation path of tensor issues. Each condition abbreviation can be found in "Setting Watchpoints". - -## Debugger Usage Example - -1. Prepare the debugger environment, and open the MindInsight Debugger UI. - - ![debugger_waiting](./images/debugger_waiting.png) - - Figure 7: Debugger Start and Waiting for the Training - - The Debugger server is launched and waiting for the training to connect. - -2. Run the training script on terminal. - -3. Wait for a moment. A dialog box is displayed on the MindInsight UI, asking you whether to use the recommended watchpoints, as shown in the following figure. - - ![debugger_ask_recommend](images/debugger_ask_recommend.png) - - Figure 8: Debugger ask whether to use the recommended watchpoints - -4. Later, you can see that the computational graph is displayed on the Debugger UI, as shown in Figure 1. - -5. Set watchpoints. - - Select the watchpoint conditions, as shown in Figure 4. Select or deselect certain nodes as shown in Figure 3. The debugger monitors whether outputs that meet the watchpoint conditions exist during the node computation process. - After setting the watchpoints, you can set `step` and click `OK`, or just click `CONTINUE` to continue the training. - -6. Trigger watchpoints, as shown in Figure 5. - - After watchpoints are hit, you can view the corresponding node information, find the exception cause on the tensor check view, and modify the script to rectify the fault. - -## Notices - -- Scenarios: - - The debugger does not support distributed training scenarios. - - The debugger does not support inference scenarios. - - The debugger does not support the single-node multi-device or cluster scenario. - - The Debugger does not support connected to multiple training process. - - The debugger does not support CPU scenarios. - -- Impact on Performance: - - Debugger will slow down the training performance. - - When too many `Watch Points` are set, the system may run out of memory. - -- GPU Scenario: - - In the GPU scenario, only the parameter nodes that meet requirements can be compared with the previous step. For example, nodes executed on the `next node`, nodes selected when `Run to This Node` is chosen, and nodes input as `watchpoints` can be compared. In other cases, the `Compare with Previous Step` function cannot be used. - - The previous step in the GPU scenario is a subgraph (not a complete graph). Therefore, when multiple graphs are rechecked on a GPU, only the current subgraph can be checked again. - -- Recheck only watchpoints that have tensor values. -- To check overflow during computation, you need to enable the overflow detection function of the asynchronous dump. For details about how to enable the function, see [Asynchronous Dump](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/custom_debugging_info.html#asynchronous-dump). -- The graph displayed by the debugger is the finally optimized execution graph. The called operator may have been integrated with other operators, or the name of the called operator is changed after optimization. diff --git a/tutorials/training/source_en/advanced_use/distributed_training_ascend.md b/tutorials/training/source_en/advanced_use/distributed_training_ascend.md deleted file mode 100644 index d948a9277761516eacdcd624c2f12086650a5939..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/distributed_training_ascend.md +++ /dev/null @@ -1,646 +0,0 @@ -# Parallel Distributed Training (Ascend) - -`Linux` `Ascend` `Model Training` `Intermediate` `Expert` - - - -- [Parallel Distributed Training (Ascend)](#parallel-distributed-training-ascend) - - [Overview](#overview) - - [Preparations](#preparations) - - [Downloading the Dataset](#downloading-the-dataset) - - [Configuring Distributed Environment Variables](#configuring-distributed-environment-variables) - - [Calling the Collective Communication Library](#calling-the-collective-communication-library) - - [Loading the Dataset in Data Parallel Mode](#loading-the-dataset-in-data-parallel-mode) - - [Defining the Network](#defining-the-network) - - [Hybrid Parallel Mode](#hybrid-parallel-mode) - - [Semi Auto Parallel Mode](#semi-auto-parallel-mode) - - [Defining the Loss Function and Optimizer](#defining-the-loss-function-and-optimizer) - - [Defining the Loss Function](#defining-the-loss-function) - - [Defining the Optimizer](#defining-the-optimizer) - - [Training the Network](#training-the-network) - - [Running the Script](#running-the-script) - - [Distributed Training Model Parameters Saving and Loading](#distributed-training-model-parameters-saving-and-loading) - - [Auto Parallel Mode](#auto-parallel-mode) - - [Data Parallel Mode](#data-parallel-mode) - - [Semi Auto Parallel Mode](#semi-auto-parallel-mode) - - [Hybrid Parallel Mode](#hybrid-parallel-mode) - - [Multi-machine Training](#multi-machine-training) - - - - - -## Overview - -This tutorial describes how to train the ResNet-50 network in data parallel and automatic parallel modes on MindSpore based on the Ascend 910 AI processor. -> Download address of the complete sample code: - -The directory structure is as follow: - -```text -└─tutorial_code - ├─distributed_training - │ rank_table_16pcs.json - │ rank_table_8pcs.json - │ rank_table_2pcs.json - │ cell_wrapper.py - │ model_accu.py - │ resnet.py - │ resnet50_distributed_training.py - │ resnet50_distributed_training_gpu.py - │ resnet50_distributed_training_grad_accu.py - │ run.sh - │ run_gpu.sh - │ run_grad_accu.sh - │ run_cluster.sh -``` - -`rank_table_16pcs.json`, `rank_table_8pcs.json` and `rank_table_2pcs.json` are the networking information files. `resnet.py`,`resnet50_distributed_training.py` , `resnet50_distributed_training_gpu.py` and `resnet50_distributed_training_grad_accu.py` are the network structure files. `run.sh` , `run_gpu.sh`, `run_grad_accu.sh` and `run_cluster.sh` are the execute scripts. - -Besides, we describe the usages of hybrid parallel and semi-auto parallel modes in the sections [Defining the Network](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html#defining-the-network) and [Distributed Training Model Parameters Saving and Loading](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html#distributed-training-model-parameters-saving-and-loading). - -## Preparations - -### Downloading the Dataset - -This sample uses the `CIFAR-10` dataset, which consists of color images of 32 x 32 pixels in 10 classes, with 6000 images per class. There are 50,000 images in the training set and 10,000 images in the test set. - -> `CIFAR-10` dataset download address: - -Download the dataset and decompress it to a local path. The folder generated after the decompression is `cifar-10-batches-bin`. - -### Configuring Distributed Environment Variables - -When distributed training is performed in the bare-metal environment (compared with the cloud environment where the Ascend 910 AI processor is deployed on the local host), you need to configure the networking information file for the current multi-device environment. If the HUAWEI CLOUD environment is used, skip this section because the cloud service has been configured. - -The following uses the Ascend 910 AI processor as an example. The JSON configuration file for an environment with eight devices is as follows. In this example, the configuration file is named as `rank_table_8pcs.json`. For details about how to configure the 2-device environment, see the `rank_table_2pcs.json` file in the sample code. - -```json -{ - "version": "1.0", - "server_count": "1", - "server_list": [ - { - "server_id": "10.155.111.140", - "device": [ - {"device_id": "0","device_ip": "192.1.27.6","rank_id": "0"}, - {"device_id": "1","device_ip": "192.2.27.6","rank_id": "1"}, - {"device_id": "2","device_ip": "192.3.27.6","rank_id": "2"}, - {"device_id": "3","device_ip": "192.4.27.6","rank_id": "3"}, - {"device_id": "4","device_ip": "192.1.27.7","rank_id": "4"}, - {"device_id": "5","device_ip": "192.2.27.7","rank_id": "5"}, - {"device_id": "6","device_ip": "192.3.27.7","rank_id": "6"}, - {"device_id": "7","device_ip": "192.4.27.7","rank_id": "7"}], - "host_nic_ip": "reserve" - } - ], - "status": "completed" -} -``` - -The following parameters need to be modified based on the actual training environment: - -- `server_count`: number of hosts. -- `server_id`: IP address of the local host. -- `device_id`: physical sequence number of a device, that is, the actual sequence number of the device on the corresponding host. -- `device_ip`: IP address of the integrated NIC. You can run the `cat /etc/hccn.conf` command on the current host. The key value of `address_x` is the IP address of the NIC. -- `rank_id`: logical sequence number of a device, which starts from 0. - -### Calling the Collective Communication Library - -The Huawei Collective Communication Library (HCCL) is used for the communication of MindSpore parallel distributed training and can be found in the Ascend 310 AI processor software package. In addition, `mindspore.communication.management` encapsulates the collective communication API provided by the HCCL to help users configure distributed information. -> HCCL implements multi-device multi-node communication based on the Ascend AI processor. The common restrictions on using the distributed service are as follows. For details, see the HCCL documentation. -> -> - In a single-node system, a cluster of 1, 2, 4, or 8 devices is supported. In a multi-node system, a cluster of 8 x N devices is supported. -> - Each host has four devices numbered 0 to 3 and four devices numbered 4 to 7 deployed on two different networks. During training of 2 or 4 devices, the devices must be connected and clusters cannot be created across networks. -> - When we create a multi-node system, all nodes should use one same switch. -> - The server hardware architecture and operating system require the symmetrical multi-processing (SMP) mode. - -The sample code for calling the HCCL is as follows: - -```python -import os -from mindspore import context -from mindspore.communication.management import init - -if __name__ == "__main__": - context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", device_id=int(os.environ["DEVICE_ID"])) - init() - ... -``` - -In the preceding code: - -- `mode=context.GRAPH_MODE`: sets the running mode to graph mode for distributed training. (The PyNative mode only support data parallel running.) -- `device_id`: physical sequence number of a device, that is, the actual sequence number of the device on the corresponding host. -- `init`: enables HCCL communication and completes the distributed training initialization. - -## Loading the Dataset in Data Parallel Mode - -During distributed training, data is imported in data parallel mode. The following takes the CIFAR-10 dataset as an example to describe how to import the CIFAR-10 dataset in data parallel mode. `data_path` indicates the dataset path, which is also the path of the `cifar-10-batches-bin` folder. - -```python -from mindspore import dtype as mstype -import mindspore.dataset as ds -import mindspore.dataset.transforms.c_transforms as C -import mindspore.dataset.vision.c_transforms as vision -from mindspore.communication.management import get_rank, get_group_size - -def create_dataset(data_path, repeat_num=1, batch_size=32, rank_id=0, rank_size=1): - resize_height = 224 - resize_width = 224 - rescale = 1.0 / 255.0 - shift = 0.0 - - # get rank_id and rank_size - rank_id = get_rank() - rank_size = get_group_size() - data_set = ds.Cifar10Dataset(data_path, num_shards=rank_size, shard_id=rank_id) - - # define map operations - random_crop_op = vision.RandomCrop((32, 32), (4, 4, 4, 4)) - random_horizontal_op = vision.RandomHorizontalFlip() - resize_op = vision.Resize((resize_height, resize_width)) - rescale_op = vision.Rescale(rescale, shift) - normalize_op = vision.Normalize((0.4465, 0.4822, 0.4914), (0.2010, 0.1994, 0.2023)) - changeswap_op = vision.HWC2CHW() - type_cast_op = C.TypeCast(mstype.int32) - - c_trans = [random_crop_op, random_horizontal_op] - c_trans += [resize_op, rescale_op, normalize_op, changeswap_op] - - # apply map operations on images - data_set = data_set.map(operations=type_cast_op, input_columns="label") - data_set = data_set.map(operations=c_trans, input_columns="image") - - # apply shuffle operations - data_set = data_set.shuffle(buffer_size=10) - - # apply batch operations - data_set = data_set.batch(batch_size=batch_size, drop_remainder=True) - - # apply repeat operations - data_set = data_set.repeat(repeat_num) - - return data_set -``` - -Different from the single-node system, the multi-node system needs to transfer the `num_shards` and `shard_id` parameters to the dataset API. The two parameters correspond to the number of devices and logical sequence numbers of devices, respectively. You are advised to obtain the parameters through the HCCL API. - -- `get_rank`: obtains the ID of the current device in the cluster. -- `get_group_size`: obtains the number of devices. - -> Under data parallel mode, it is recommended to load the same dataset file for each device, or it may cause accuracy problems. - -## Defining the Network - -In data parallel and automatic parallel modes, the network definition method is the same as that in a single-node system. The reference code of ResNet is as follows: - -In this section we focus on how to define a network in hybrid parallel or semi-auto parallel mode. - -### Hybrid Parallel Mode - -Hybrid parallel mode adds the setting `layerwise_parallel` for `parameter` based on the data parallel mode. The `parameter` with the settig would be saved and computed in slice tensor and would not apply gradients aggregation. In this mode, MindSpore would not infer computation and communication for parallel operators automatically. To ensure the consistency of calculation logic, users are required to manually infer extra operations and insert them to networks. Therefore, this parallel mode is suitable for the users with deep understanding of parallel theory. - -In the following example, specify the `self.weight` as the `layerwise_parallel`, that is, the `self.weight` and the output of `MatMul` are sliced on the second dimension. At this time, perform ReduceSum on the second dimension would only get one sliced result. `AllReduce.Sum` is required here to accumulate the results among all devices. More information about the parallel theory please refer to the [design document](https://www.mindspore.cn/doc/note/en/master/design/mindspore/distributed_training_design.html). - -```python -from mindspore import Tensor -import mindspore.ops as ops -from mindspore import dtype as mstype -import mindspore.nn as nn - -class HybridParallelNet(nn.Cell): - def __init__(self): - super(HybridParallelNet, self).__init__() - # initialize the weight which is sliced at the second dimension - weight_init = np.random.rand(512, 128/2).astype(np.float32) - self.weight = Parameter(Tensor(weight_init), layerwise_parallel=True) - self.fc = ops.MatMul() - self.reduce = ops.ReduceSum() - self.allreduce = ops.AllReduce(op='sum') - - def construct(self, x): - x = self.fc(x, self.weight) - x = self.reduce(x, -1) - x = self.allreduce(x) - return x -``` - -### Semi Auto Parallel Mode - -Compared with the auto parallel mode, semi auto parallel mode supports manual configuration on shard strategies for network tuning. The definition of shard strategies could be referred by this [design document](https://www.mindspore.cn/doc/note/en/master/design/mindspore/distributed_training_design.html). - -In the above example `HybridParallelNet`, the script in semi auto parallel mode is as follows. The shard stratege of `MatMul` is `{(1, 1), (1, 2)}`, which means `self.weight` is sliced at the second dimension. - -```python -from mindspore import Tensor -import mindspore.ops as ops -from mindspore import dtype as mstype -import mindspore.nn as nn - -class SemiAutoParallelNet(nn.Cell): - def __init__(self): - super(SemiAutoParallelNet, self).__init__() - # initialize full tensor weight - weight_init = np.random.rand(512, 128).astype(np.float32) - self.weight = Parameter(Tensor(weight_init)) - # set shard strategy - self.fc = ops.MatMul().shard({(1, 1),(1, 2)}) - self.reduce = ops.ReduceSum() - - def construct(self, x): - x = self.fc(x, self.weight) - x = self.reduce(x, -1) - return x -``` - -> - In the semi auto parallel mode, the operators that are not assigned with any shard strategies would be executed in data parallel. -> - The auto parallel mode not only supports the parallel strategy that can automatically acquire efficient operators by strategy searching algorithms, this mode also enables users to manually assign specific parallel strategies. -> - If a parameter is used by multiple operators, each operator's shard strategy for this parameter needs to be consistent, otherwise an error will be reported. - -## Defining the Loss Function and Optimizer - -### Defining the Loss Function - -Automatic parallelism splits models using the operator granularity and obtains the optimal parallel strategy through algorithm search. Therefore, to achieve a better parallel training effect, you are advised to use small operators to implement the loss function. - -In the loss function, the `SoftmaxCrossEntropyWithLogits` is expanded into multiple small operators for implementation according to a mathematical formula. The sample code is as follows: - -```python -import mindspore.ops as ops -from mindspore import Tensor -from mindspore import dtype as mstype -import mindspore.nn as nn - -class SoftmaxCrossEntropyExpand(nn.Cell): - def __init__(self, sparse=False): - super(SoftmaxCrossEntropyExpand, self).__init__() - self.exp = ops.Exp() - self.sum = ops.ReduceSum(keep_dims=True) - self.onehot = ops.OneHot() - self.on_value = Tensor(1.0, mstype.float32) - self.off_value = Tensor(0.0, mstype.float32) - self.div = ops.Div() - self.log = ops.Log() - self.sum_cross_entropy = ops.ReduceSum(keep_dims=False) - self.mul = ops.Mul() - self.mul2 = ops.Mul() - self.mean = ops.ReduceMean(keep_dims=False) - self.sparse = sparse - self.max = ops.ReduceMax(keep_dims=True) - self.sub = ops.Sub() - - def construct(self, logit, label): - logit_max = self.max(logit, -1) - exp = self.exp(self.sub(logit, logit_max)) - exp_sum = self.sum(exp, -1) - softmax_result = self.div(exp, exp_sum) - if self.sparse: - label = self.onehot(label, ops.shape(logit)[1], self.on_value, self.off_value) - softmax_result_log = self.log(softmax_result) - loss = self.sum_cross_entropy((self.mul(softmax_result_log, label)), -1) - loss = self.mul2(ops.scalar_to_array(-1.0), loss) - loss = self.mean(loss, -1) - - return loss -``` - -### Defining the Optimizer - -The `Momentum` optimizer is used as the parameter update tool. The definition is the same as that in the single-node system. For details, see the implementation in the sample code. - -## Training the Network - -`context.set_auto_parallel_context` is an API for users to set parallel training parameters and must be called before the initialization of networks. The related parameters are as follows: - -- `parallel_mode`: parallel distributed mode. The default value is `ParallelMode.STAND_ALONE`. The other options are `ParallelMode.DATA_PARALLEL` and `ParallelMode.AUTO_PARALLEL`. -- `parameter_broadcast`: the data parallel weights on the first device would be broadcast to other devices. The default value is `False`, -- `gradients_mean`: During backward computation, the framework collects gradients of parameters in data parallel mode across multiple hosts, obtains the global gradient value, and transfers the global gradient value to the optimizer for update. The default value is `False`, which indicates that the `allreduce_sum` operation is applied. The value `True` indicates that the `allreduce_mean` operation is applied. -- You are advised to set `device_num` and `global_rank` to their default values. The framework calls the HCCL API to obtain the values. - -> More about the distributed training configurations please refer to the [programming guide](https://www.mindspore.cn/doc/programming_guide/en/master/auto_parallel.html). - -If multiple network cases exist in the script, call `context.reset_auto_parallel_context` to restore all parameters to default values before executing the next case. - -In the following sample code, the automatic parallel mode is specified. To switch to the data parallel mode, you only need to change `parallel_mode` to `DATA_PARALLEL` and do not need to specify the strategy search algorithm `auto_parallel_search_mode`. In the sample code, the recursive programming strategy search algorithm is specified for automatic parallel. - -```python -from mindspore import context, Model -from mindspore.nn.optim.momentum import Momentum -from mindspore.train.callback import LossMonitor -from mindspore.context import ParallelMode -from resnet import resnet50 - -device_id = int(os.getenv('DEVICE_ID')) -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") -context.set_context(device_id=device_id) # set device_id - -def test_train_cifar(epoch_size=10): - context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL, gradients_mean=True) - loss_cb = LossMonitor() - dataset = create_dataset(data_path) - batch_size = 32 - num_classes = 10 - net = resnet50(batch_size, num_classes) - loss = SoftmaxCrossEntropyExpand(sparse=True) - opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, 0.9) - model = Model(net, loss_fn=loss, optimizer=opt) - model.train(epoch_size, dataset, callbacks=[loss_cb], dataset_sink_mode=True) -``` - -In the preceding code: - -- `dataset_sink_mode=True`: uses the dataset sink mode. That is, the training computing is sunk to the hardware platform for execution. -- `LossMonitor`: returns the loss value through the callback function to monitor the loss function. - -## Running the Script - -After the script required for training is edited, run the corresponding command to call the script. - -Currently, MindSpore distributed execution uses the single-device single-process running mode. That is, one process runs on each device, and the number of total processes is the same as the number of devices that are being used. For device 0, the corresponding process is executed in the foreground. For other devices, the corresponding processes are executed in the background. You need to create a directory for each process to store log information and operator compilation information. The following takes the distributed training script for eight devices as an example to describe how to run the script: - -```bash -#!/bin/bash - -echo "==============================================================================================================" -echo "Please run the script as: " -echo "bash run.sh DATA_PATH RANK_SIZE" -echo "For example: bash run.sh /path/dataset 8" -echo "It is better to use the absolute path." -echo "==============================================================================================================" -DATA_PATH=$1 -export DATA_PATH=${DATA_PATH} -RANK_SIZE=$2 - -EXEC_PATH=$(pwd) - -test_dist_8pcs() -{ - export RANK_TABLE_FILE=${EXEC_PATH}/rank_table_8pcs.json - export RANK_SIZE=8 -} - -test_dist_2pcs() -{ - export RANK_TABLE_FILE=${EXEC_PATH}/rank_table_2pcs.json - export RANK_SIZE=2 -} - -test_dist_${RANK_SIZE}pcs - -for((i=1;i<${RANK_SIZE};i++)) -do - rm -rf device$i - mkdir device$i - cp ./resnet50_distributed_training.py ./resnet.py ./device$i - cd ./device$i - export DEVICE_ID=$i - export RANK_ID=$i - echo "start training for device $i" - env > env$i.log - pytest -s -v ./resnet50_distributed_training.py > train.log$i 2>&1 & - cd ../ -done -rm -rf device0 -mkdir device0 -cp ./resnet50_distributed_training.py ./resnet.py ./device0 -cd ./device0 -export DEVICE_ID=0 -export RANK_ID=0 -echo "start training for device 0" -env > env0.log -pytest -s -v ./resnet50_distributed_training.py > train.log0 2>&1 -if [ $? -eq 0 ];then - echo "training success" -else - echo "training failed" - exit 2 -fi -cd ../ -``` - -The variables `DATA_PATH` and `RANK_SIZE` need to be transferred to the script, which indicate the absolute path of the dataset and the number of devices, respectively. - -The distributed related environment variables are as follows: - -- `RANK_TABLE_FILE`: path for storing the network information file. -- `DEVICE_ID`: actual sequence number of the current device on the corresponding host. -- `RANK_ID`: logical sequence number of the current device. - -For details about other environment variables, see configuration items in the installation guide. - -The running time is about 5 minutes, which is mainly occupied by operator compilation. The actual training time is within 20 seconds. You can use `ps -ef | grep pytest` to monitor task processes. - -Log files are saved in the `device0`,`device1`... directory. The `env.log` file records environment variable information. The `train.log` file records the loss function information. The following is an example: - -```text -epoch: 1 step: 156, loss is 2.0084016 -epoch: 2 step: 156, loss is 1.6407638 -epoch: 3 step: 156, loss is 1.6164391 -epoch: 4 step: 156, loss is 1.6838071 -epoch: 5 step: 156, loss is 1.6320667 -epoch: 6 step: 156, loss is 1.3098773 -epoch: 7 step: 156, loss is 1.3515002 -epoch: 8 step: 156, loss is 1.2943741 -epoch: 9 step: 156, loss is 1.2316195 -epoch: 10 step: 156, loss is 1.1533381 -``` - -## Distributed Training Model Parameters Saving and Loading - -The below content introduced how to save and load models under the four distributed parallel training modes respectively. Before saving model parameters for distributed training, it is necessary to configure distributed environment variables and collective communication library in accordance with this tutorial. - -### Auto Parallel Mode - -It is convenient to save and load the model parameters in auto parallel mode. Just add configuration `CheckpointConfig` and `ModelCheckpoint` to `test_train_cifar` method in the training network steps of this tutorial, and the model parameters can be saved. The code is as follows: - -```python -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig - -def test_train_cifar(epoch_size=10): - context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL, gradients_mean=True) - loss_cb = LossMonitor() - dataset = create_dataset(data_path) - batch_size = 32 - num_classes = 10 - net = resnet50(batch_size, num_classes) - loss = SoftmaxCrossEntropyExpand(sparse=True) - opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, 0.9) - ckpt_config = CheckpointConfig() - ckpt_callback = ModelCheckpoint(prefix='auto_parallel', config=ckpt_config) - model = Model(net, loss_fn=loss, optimizer=opt) - model.train(epoch_size, dataset, callbacks=[loss_cb, ckpt_callback], dataset_sink_mode=True) -``` - -After saving the checkpoint file, users can easily load model parameters for reasoning or retraining. For example, the following code can be used for retraining: - -```python -from mindspore import load_checkpoint, load_param_into_net - -net = resnet50(batch_size=32, num_classes=10) -# The parameter for load_checkpoint is a .ckpt file which has been successfully saved -param_dict = load_checkpoint('...') -load_param_into_net(net, param_dict) -``` - -For checkpoint configuration policy and saving method, please refer to [Saving and Loading Model Parameters](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html#checkpoint-configuration-policies). - -> By default, sliced parameters would be merged before saving. If the size of parameters is large, we recommend to use sliced parameters to save and infer, which could be referred to [Distributed inference](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference_ascend_910.html#id1). - -### Data Parallel Mode - -In data parallel mode, checkpoint is used in the same way as in auto parallel mode. You just need to change: - -```python -context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL, gradients_mean=True) -``` - -to: - -```python -context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) -``` - -> Under data parallel mode, we recommend to load the same checkpoint for each device to avoid accuracy problems. `parameter_broadcast` could also be used for sharing the values of parameters among devices. - -### Semi Auto Parallel Mode - -In semi auto parallel mode, checkpoint is used in the same way as in auto parallel mode and data parallel mode. The difference is in the definition of a network and the definition of network model, you can refer to defining the network [Semi Auto Parallel Mode](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html#semi-auto-parallel-mode) in this tutorial. - -To save the model, you can use the following code: - -```python -... -net = SemiAutoParallelNet() -... -ckpt_config = CheckpointConfig() -ckpt_callback = ModelCheckpoint(prefix='semi_auto_parallel', config=ckpt_config) -``` - -To load the model, you can use the following code: - -```python -net = SemiAutoParallelNet() -# The parameter for load_checkpoint is a .ckpt file which has been successfully saved -param_dict = load_checkpoint('...') -load_param_into_net(net, param_dict) -``` - -For the three parallel training modes described above, the checkpoint file is saved in a complete way on each card. Users also can save only the checkpoint file of this card on each card, take Semi Auto parallel Mode as an example for explanation. - -Only by changing the code that sets the checkpoint saving policy, the checkpoint file of each card can be saved by itself. The specific changes are as follows: - -Change the checkpoint configuration policy from: - -```python -# config checkpoint -ckpt_config = CheckpointConfig(keep_checkpoint_max=1) -``` - -to: - -```python -# config checkpoint -ckpt_config = CheckpointConfig(keep_checkpoint_max=1, integrated_save=False) -``` - -It should be noted that if users choose this checkpoint saving policy, users need to save and load the segmented checkpoint for subsequent reasoning or retraining. Specific usage can refer to [Integrating the Saved Checkpoint Files](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/save_load_model_hybrid_parallel.html#integrating-the-saved-checkpoint-files). - -### Hybrid Parallel Mode - -For model parameter saving and loading in Hybrid Parallel Mode, please refer to [Saving and Loading Model Parameters in the Hybrid Parallel Scenario](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/save_load_model_hybrid_parallel.html). - -## Multi-machine Training - -The previous chapters introduced the distributed training of MindSpore, which is based on the Ascend environment of a single machine with 8 cards. Using multiple machines for distributed training can greatly improve the training speed. -In the Ascend environment, the communication between NPU units across machines is the same as the communication between each NPU unit in a single machine. It is still communicated through HCCL. The difference is that the NPU units in a single machine are naturally interoperable, while cross-machine communication needs to be guaranteed that the networks of the two machines are interoperable. -After confirming that the network of the NPU unit between the machines is smooth, configure the json configuration file of multiple machines. This tutorial takes the configuration file of 16 cards as an example. It should be noted that in the json file configuration of multiple machines, the order of rank_id is required to be consistent with the lexicographic order of server_id. - -```json -{ - "version": "1.0", - "server_count": "2", - "server_list": [ - { - "server_id": "10.155.111.140", - "device": [ - {"device_id": "0","device_ip": "192.1.27.6","rank_id": "0"}, - {"device_id": "1","device_ip": "192.2.27.6","rank_id": "1"}, - {"device_id": "2","device_ip": "192.3.27.6","rank_id": "2"}, - {"device_id": "3","device_ip": "192.4.27.6","rank_id": "3"}, - {"device_id": "4","device_ip": "192.1.27.7","rank_id": "4"}, - {"device_id": "5","device_ip": "192.2.27.7","rank_id": "5"}, - {"device_id": "6","device_ip": "192.3.27.7","rank_id": "6"}, - {"device_id": "7","device_ip": "192.4.27.7","rank_id": "7"}], - "host_nic_ip": "reserve" - }, - { - "server_id": "10.155.111.141", - "device": [ - {"device_id": "0","device_ip": "192.1.27.8","rank_id": "8"}, - {"device_id": "1","device_ip": "192.2.27.8","rank_id": "9"}, - {"device_id": "2","device_ip": "192.3.27.8","rank_id": "10"}, - {"device_id": "3","device_ip": "192.4.27.8","rank_id": "11"}, - {"device_id": "4","device_ip": "192.1.27.9","rank_id": "12"}, - {"device_id": "5","device_ip": "192.2.27.9","rank_id": "13"}, - {"device_id": "6","device_ip": "192.3.27.9","rank_id": "14"}, - {"device_id": "7","device_ip": "192.4.27.9","rank_id": "15"}], - "host_nic_ip": "reserve" - } - ], - "status": "completed" -} -``` - -After preparing the configuration file, you can organize distributed multi-machine training scripts. Taking 2 machines with 16 cards as an example, the scripts written on the two machines are similar to the running scripts of a single machine with 8 cards. The difference is that different rank_id variables are specified. - -```bash -#!/bin/bash - -echo "==============================================================================================================" -echo "Please run the script as: " -echo "bash run.sh DATA_PATH RANK_TABLE_FILE RANK_SIZE RANK_START" -echo "For example: bash run.sh /path/dataset /path/rank_table.json 16 0" -echo "It is better to use the absolute path." -echo "==============================================================================================================" - -execute_path=$(pwd) -echo ${execute_path} -script_self=$(readlink -f "$0") -self_path=$(dirname "${script_self}") -echo ${self_path} - -export DATA_PATH=$1 -export RANK_TABLE_FILE=$2 -export RANK_SIZE=$3 -RANK_START=$4 -DEVICE_START=0 -for((i=0;i<=7;i++)); -do - export RANK_ID=$[i+RANK_START] - export DEVICE_ID=$[i+DEVICE_START] - rm -rf ${execute_path}/device_$RANK_ID - mkdir ${execute_path}/device_$RANK_ID - cd ${execute_path}/device_$RANK_ID || exit - pytest -s ${self_path}/resnet50_distributed_training.py >train$RANK_ID.log 2>&1 & -done -``` - -For the reference scripts listed above, the required code organization structure is as follows. The script will get the path of the script and the path of the command execution, and put all tasks in the background for execution. - -```text -└─tutorial_code - ├─distributed_training - │ resnet50_distributed_training.py - │ run_cluster.sh -``` - -When executing, the two machines execute the following commands respectively, among which rank_table.json is configured according to the 16-card distributed json file reference configuration shown in this chapter. - -```bash -# server0 -bash run.sh /path/dataset /path/rank_table.json 16 0 -# server1 -bash run.sh /path/dataset /path/rank_table.json 16 8 -``` diff --git a/tutorials/training/source_en/advanced_use/distributed_training_gpu.md b/tutorials/training/source_en/advanced_use/distributed_training_gpu.md deleted file mode 100644 index 422fa3dbdba0ae0da90e8ba6fd2443494eedf75f..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/distributed_training_gpu.md +++ /dev/null @@ -1,153 +0,0 @@ -# Distributed Parallel Training (GPU) - -`Linux` `GPU` `Model Training` `Intermediate` `Expert` - - - -- [Distributed Parallel Training (GPU)](#distributed-parallel-training-gpu) - - [Overview](#overview) - - [Preparation](#preparation) - - [Downloading the Dataset](#downloading-the-dataset) - - [Configuring Distributed Environment](#configuring-distributed-environment) - - [Calling the Collective Communication Library](#calling-the-collective-communication-library) - - [Defining the Network](#defining-the-network) - - [Running the Script](#running-the-script) - - [Running the Multi-Host Script](#running-the-multi-host-script) - - - - - -## Overview - -This tutorial describes how to train the ResNet-50 network using MindSpore data parallelism and automatic parallelism on the GPU hardware platform. - -## Preparation - -### Downloading the Dataset - -The `CIFAR-10` dataset is used as an example. The method of downloading and loading the dataset is the same as that for the Ascend 910 AI processor. - -The method of downloading and loading the dataset: - -### Configuring Distributed Environment - -- `OpenMPI-4.0.3`: multi-process communication library used by MindSpore. - - Download the OpenMPI-4.0.3 source code package `openmpi-4.0.3.tar.gz` from . - - For details about how to install OpenMPI, see the official tutorial: . - -- `NCCL-2.7.6`: Nvidia collective communication library. - - Download NCCL-2.7.6 from . - - For details about how to install NCCL, see the official tutorial: . - -- Password-free login between hosts (required for multi-host training). If multiple hosts are involved in the training, you need to configure password-free login between them. The procedure is as follows: - 1. Ensure that the same user is used to log in to each host. (The root user is not recommended.) - 2. Run the `ssh-keygen -t rsa -P ""` command to generate a key. - 3. Run the `ssh-copy-id DEVICE-IP` command to set the IP address of the host that requires password-free login. - 4. Run the`ssh DEVICE-IP` command. If you can log in without entering the password, the configuration is successful. - 5. Run the preceding command on all hosts to ensure that every two hosts can communicate with each other. - -### Calling the Collective Communication Library - -On the GPU hardware platform, MindSpore parallel distributed training uses NCCL for communication. - -> On the GPU platform, MindSpore does not support the following operations: -> -> `get_local_rank`, `get_local_size`, `get_world_rank_from_group_rank`, `get_group_rank_from_world_rank` and `create_group` - -The sample code for calling the HCCL is as follows: - -```python -from mindspore import context -from mindspore.communication.management import init - -if __name__ == "__main__": - context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - init("nccl") - ... -``` - -In the preceding information, - -- `mode=context.GRAPH_MODE`: sets the running mode to graph mode for distributed training. (The PyNative mode does not support parallel running.) -- `init("nccl")`: enables NCCL communication and completes the distributed training initialization. - -## Defining the Network - -On the GPU hardware platform, the network definition is the same as that for the Ascend 910 AI processor. - -For details about the definitions of the network, optimizer, and loss function, see . - -## Running the Script - -On the GPU hardware platform, MindSpore uses OpenMPI `mpirun` for distributed training. The following takes the distributed training script for eight devices as an example to describe how to run the script: - -> Obtain the running script of the example from: -> -> -> -> If the script is executed by the root user, the `--allow-run-as-root` parameter must be added to `mpirun`. - -```bash -#!/bin/bash - -echo "==============================================================================================================" -echo "Please run the script as: " -echo "bash run_gpu.sh DATA_PATH" -echo "For example: bash run_gpu.sh /path/dataset" -echo "It is better to use the absolute path." -echo "==============================================================================================================" -DATA_PATH=$1 -export DATA_PATH=${DATA_PATH} - -rm -rf device -mkdir device -cp ./resnet50_distributed_training.py ./resnet.py ./device -cd ./device -echo "start training" -mpirun -n 8 pytest -s -v ./resnet50_distributed_training.py > train.log 2>&1 & -``` - -The script will run in the bachground. The log file is saved in the device directory, we will run 10 epochs and each epochs contain 234 steps, and the loss result is saved in train.log. The output loss values of the grep command are as follows: - -```text -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -``` - -## Running the Multi-Host Script - -If multiple hosts are involved in the training, you need to set the multi-host configuration in the `mpirun` command. You can use the `-H` option in the `mpirun` command. For example, `mpirun -n 16 -H DEVICE1_IP:8,DEVICE2_IP:8 python hello.py` indicates that eight processes are started on the hosts whose IP addresses are DEVICE1_IP and DEVICE2_IP, respectively. Alternatively, you can create a hostfile similar to the following and transfer its path to the `--hostfile` option of `mpirun`. Each line in the hostfile is in the format of `[hostname] slots=[slotnum]`, where hostname can be an IP address or a host name. - -```bash -DEVICE1 slots=8 -DEVICE2 slots=8 -``` - -The following is the execution script of the 16-device two-host cluster. The variables `DATA_PATH` and `HOSTFILE` need to be transferred, indicating the dataset path and hostfile path. For details about more mpirun options, see the OpenMPI official website. - -```bash -#!/bin/bash - -DATA_PATH=$1 -HOSTFILE=$2 - -rm -rf device -mkdir device -cp ./resnet50_distributed_training.py ./resnet.py ./device -cd ./device -echo "start training" -mpirun -n 16 --hostfile $HOSTFILE -x DATA_PATH=$DATA_PATH -x PATH -mca pml ob1 pytest -s -v ./resnet50_distributed_training.py > train.log 2>&1 & -``` - -Run running on GPU, the model parameters can be saved and loaded by referring to [Distributed Training Model Parameters Saving and Loading](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html#distributed-training-model-parameters-saving-and-loading). diff --git a/tutorials/training/source_en/advanced_use/distributed_training_tutorials.rst b/tutorials/training/source_en/advanced_use/distributed_training_tutorials.rst deleted file mode 100644 index ac9d237c4204d9ea74185bd848e1ef0625a0be95..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/distributed_training_tutorials.rst +++ /dev/null @@ -1,27 +0,0 @@ -Distributed Training -======================== - -In deep learning, the increasing number of datasets and parameters prolongs the training time and requires more hardware resources, becoming a training bottleneck. Parallel distributed training is an important optimization method for training, which can reduce requirements on hardware, such as memory and computing performance. Based on different parallel principles and modes, parallelism is generally classified into the following types: - -- Data parallelism: splits data into many batches and then allocates the batches to each worker for model computation. -- Model parallelism: splits a model. MindSpore supports the intra-layer model parallelism. Parameters are split and then allocated to each worker for training. -- Hybrid parallelism: contains data parallelism and model parallelism. - -MindSpore also provides the parallel distributed training function. It supports the following modes: - -- `DATA_PARALLEL`: data parallelism. -- `AUTO_PARALLEL`: automatic parallelism, which integrates data parallelism, model parallelism, and hybrid parallelism. A cost model can be automatically created to find the parallel strategy with a relatively short training time and to select one parallel mode for users. MindSpore offers two different strategy search algorithms as follows: - - - `dynamic_programming`: Dynamic programming search algorithm. The optimal strategy of cost model description can be found, but it takes a long time to search for parallel strategy of huge network model. Its cost model refers to modeling the training time based on the memory-based computation and communication overheads of the Ascend 910 chip. - - `recursive_programming`: Double recursive programming search algorithm. The optimal strategy can be generated instantly even for a large network or for a large-scale multi-device partitioning need. Its symbolic cost model can flexibly adapt to different accelerator clusters. - -- `HYBRID_PARALLEL`: On MindSpore, users manually split parameters to implement intra-layer model parallelism. - -.. toctree:: - :maxdepth: 1 - - distributed_training_ascend - distributed_training_gpu - apply_host_device_training - apply_parameter_server_training - save_load_model_hybrid_parallel diff --git a/tutorials/training/source_en/advanced_use/dump_in_graph_mode.md b/tutorials/training/source_en/advanced_use/dump_in_graph_mode.md deleted file mode 100644 index 58e1dea9f9de5fe2861384479407453debdfa7b5..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/dump_in_graph_mode.md +++ /dev/null @@ -1,525 +0,0 @@ -# Using Dump in the Graph Mode - -`Linux` `Ascend` `GPU` `CPU` `Model Optimization` `Intermediate` `Expert` - - - -- [Using Dump in the Graph Mode](#using-dump-in-the-graph-mode) - - [Overview](#overview) - - [Debugging Process](#debugging-process) - - [Applicable Scene](#applicable-scene) - - [Dump Introduction](#dump-introduction) - - [Synchronous Dump](#synchronous-dump) - - [Synchronous Dump Step](#synchronous-dump-step) - - [Synchronous Dump Data Object Directory](#synchronous-dump-data-object-directory) - - [Introduction to Synchronous Dump Data File](#introduction-to-synchronous-dump-data-file) - - [Synchronous Dump Data Analysis Sample](#synchronous-dump-data-analysis-sample) - - [Asynchronous Dump](#asynchronous-dump) - - [Asynchronous Dump Step](#asynchronous-dump-step) - - [Asynchronous Dump Data Object Directory](#asynchronous-dump-data-object-directory) - - [Introduction to Asynchronous Dump Data File](#introduction-to-asynchronous-dump-data-file) - - [Asynchronous Dump Data Analysis Sample](#asynchronous-dump-data-analysis-sample) - - - - - -## Overview - -The input and output of the operator can be saved for debugging through the data dump when the training result deviates from the expectation. - -- For the dynamic graph mode, MindSpore provides native Python execution capabilities. Users can view and record the corresponding input and output during the running of the network script. For details, see [Use PyNative Mode to Debug](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/debug_in_pynative_mode.html). - -- For the static graph mode, MindSpore provides the Dump function to save the graph and the input and output data of the operator during model training to a disk file. - -Aiming at the static graph mode, this tutorial introduces how to analyze and compare network data based on the Dump function. - -### Debugging Process - -1. Find the corresponding operator from the script. - - The Dump function needs to use the IR file of the final execution graph. The IR file can be viewed with the `vi` command. The IR file contains the full name of the operator, and the dependency of the operator on the input and output of the computational graph, and also contains the trace information from the operator to the corresponding script code. For the configuration of the Dump function, see [Synchronous Dump Step](#synchronous-dump-step) and [Asynchronous Dump Step](#asynchronous-dump-step). For the final implementation of the image IR file naming and directory structure, see [Synchronous Dump Data Object Directory](#synchronous-dump-data-object-directory) and [Asynchronous Dump Data Object Directory](#asynchronous-dump-data-object-directory). Then find the operator corresponding to the code in the script through the graph file, refer to [Synchronous Dump Data Analysis Sample](#synchronous-dump-data-analysis-sample) and [Asynchronous Dump Data Analysis Sample](#asynchronous-dump-data-analysis-sample). - -2. From operator to dump data. - - After understanding the mapping relationship between the script and the operator, you can determine the name of the operator you want to analyze and find the dump file corresponding to the operator. Please refer to [Synchronous Dump Data Object Directory](#synchronous-dump-data-object-directory) and [Asynchronous Dump Data Object Directory](#asynchronous-dump-data-object-directory). - -3. Analyze Dump data. - - By analyzing Dump data, it can be compared with other third-party frameworks. For the synchronous dump data format, please refer to [Introduction to Synchronous Dump Data File](#introduction-to-synchronous-dump-data-file). For the asynchronous Dump data format, please refer to [Introduction to Asynchronous Dump Data File](#introduction-to-asynchronous-dump-data-file). - -### Applicable Scene - -1. Analysis of static graph operator results. - - Through the IR diagram obtained by the Dump function, you can understand the mapping relationship between the script code and the execution operator (for details, see [MindSpore IR Introduction](https://www.mindspore.cn/doc/note/en/master/design/mindspore/mindir.html#overview)). Combining the input and output data of the execution operator, it is possible to analyze possible overflow, gradient explosion and disappearance during the training process, and backtrack to the code that may have problems in the script. - -2. Analysis of the feature map. - - Analyze the information of the feature map by obtaining the output data of the layer. - -3. Model migration. - - In the scenario of migrating a model from a third-party framework (TensorFlow, PyTorch) to MindSpore, by comparing the output data of the same position operator, analyzing whether the training results of the third-party framework and MindSpore for the same model are close enough to locate the model Precision issues. - -## Dump Introduction - -MindSpore provides two modes: synchronous dump and asynchronous dump: - -- The mechanism of synchronous dump is that after the execution of each step in the network training process, the host side initiates a dump action, copies the data in the operator address from the device to the host, and saves the file. Synchronous Dump will turn off memory reuse between operators by default to avoid reading dirty data. -- Asynchronous Dump is a function developed specifically for the sinking of the entire Ascend image. It can dump data while executing the operator. The data will be dumped immediately after the execution of an operator. Therefore, the correct data can be generated by turning on the memory reuse, but the corresponding network training speed will be slower. - -The configuration files required for different modes and the data format of dump are different: - -- Synchronous mode takes up more memory than asynchronous mode, but it is easier to use. -- Generally, for small and medium-sized networks (such as ResNet), it is recommended to use the synchronous dump mode first. When the network does not occupy much memory, please use synchronous dump first.If an error of insufficient device memory occurs after enabling synchronous dump, please use asynchronous dump in the next section. -- When Dump is enabled on Ascend, the operator to Dump will automatically close memory reuse. -- Synchronous Dump supports the graphics mode both on Ascend, GPU and CPU, and currently does not support PyNative mode. -- Asynchronous Dump only supports graph mode on Ascend, not PyNative mode. Memory reuse will not be turned off when asynchronous dump is enabled. - -## Synchronous Dump - -### Synchronous Dump Step - -1. Create dump json file:`data_dump.json`, the name and location of the JSON file can be customized. - - ```json - { - "common_dump_settings": { - "dump_mode": 0, - "path": "/absolute_path", - "net_name": "ResNet50", - "iteration": 0, - "input_output": 0, - "kernels": ["Default/Conv-op12"], - "support_device": [0,1,2,3,4,5,6,7] - }, - "e2e_dump_settings": { - "enable": true, - "trans_flag": true - } - } - ``` - - - `dump_mode`: 0: dump all kernels in graph, 1: dump kernels in kernels list. - - `path`: The absolute path to save dump data. - - `net_name`: The net name eg:ResNet50. - - `iteration`: Specify the iterations to dump. Iteration should be set to 0 when dataset_sink_mode is False and data of every iteration will be dumped. - - `input_output`: 0: dump input and output of kernel, 1:dump input of kernel, 2:dump output of kernel. This configuration parameter only supports Ascend and CPU, and GPU can only dump the output of operator. - - `kernels`: List of operator names. Turn on the IR save switch `context.set_context(save_graphs=True)` and execute the network to obtain the operator name from the generated `trace_code_graph_{graph_id}`IR file. For details, please refer to [Saving IR](https://www.mindspore.cn/doc/note/en/master/design/mindspore/mindir.html#saving-ir). - - `support_device`: Supported devices, default setting is `[0,1,2,3,4,5,6,7]`. You can specify specific device ids to dump specific device data. This configuration parameter is invalid on the CPU, because there is no concept of device on the CPU. - - `enable`: Enable Asynchronous Dump. If synchronous dump and asynchronous dump are enabled at the same time, only synchronous dump will take effect. - - `trans_flag`: Enable trans flag. Transform the device data format into NCHW. If it is `True`, the data will be saved in the 4D format (NCHW) format on the Host side; if it is `False`, the data format on the Device side will be retained. This configuration parameter is invalid on the CPU, because there is no format conversion on the CPU. - -2. Specify the json configuration file of Dump. - - ```bash - export MINDSPORE_DUMP_CONFIG=${xxx} - ``` - - "xxx" represents the absolute path of data_dump.json - - ```bash - export MINDSPORE_DUMP_CONFIG=/path/to/data_dump.json - ``` - - - Set the environment variables before executing the training script. Setting environment variables during training will not take effect. - - Dump environment variables need to be configured before calling `mindspore.communication.management.init`. - -3. Execute the training script to dump data. - - After the training is started, if the `MINDSPORE_DUMP_CONFIG` environment variable is correctly configured, the content of the configuration file will be read and the operator data will be saved according to the data storage path specified in the Dump configuration. - In synchronous mode, if you want to dump data, you must use the non-data sink mode (set the `dataset_sink_mode` parameter in `model.train` or `DatasetHelper` to `False`) to ensure that you can get the dump data of each step. - If `model.train` or `DatasetHelper` is not called in the script, the default is non-data sinking mode. Using the Dump function will automatically generate the IR file of the final execution graph. - - You can set `context.set_context(reserve_class_name_in_scope=False)` in your training script to avoid dump failure because of file name is too long. - -4. Read and parse synchronous dump data through `numpy.fromfile`, refer to [Introduction to Synchronous Dump Data File](#introduction-to-synchronous-dump-data-file). - -### Synchronous Dump Data Object Directory - -After starting the training, the data objects saved by the synchronous Dump include the final execution graph (`ms_output_trace_code_graph_{graph_id}.ir` file) and the input and output data of the operators in the graph. The data directory structure is as follows: - -```text -{path}/ - |-- {net_name}/ - |-- {device_id}/ - |-- iteration_{iteration}/ - -- {op_name}_{input_output_index}_{shape}_{data_type}_{format}.bin - … - |-- graphs/ - ms_output_trace_code_graph_{graph_id}.pb - ms_output_trace_code_graph_{graph_id}.ir - |-- execution_order/ - ms_execution_order_graph_{graph_id}.csv - - |-- .metadata/ - data_dump.json -``` - -- `path`: the absolute path set in the `data_dump.json` configuration file. -- `net_name`: the network name set in the `data_dump.json` configuration file. -- `device_id`: the id of the training device. -- `graph_id`: the id of the training graph. -- `iteration`: the iteration of the training. -- `operator_name`: the name of the operator. -- `input_output_index` : the index of input or output. For example, `output_0` means that the file is the data of the first output Tensor of the operator. -- `shape`: Tensor dimension information. -- `data_type`: the type of the data. -- `format`: the format of the data. - -When data dump is performed on the CPU, there is no directory level of `device_id`, because there is no concept of device on the CPU, and there are no `graphs`, `execution_order` and `.metadata` directories. - -### Introduction to Synchronous Dump Data File - -The data file generated by the synchronous Dump is a binary file with the suffix `.bin`, and the file naming format is: - -```text -{operator_name}_{input_output_index}_{shape}_{data_type}_{format}.bin -``` - -According to the `Tensor` information provided by the file name, you can use `numpy.fromfile` to read the data and restore the `data_type` and `shape` of the original data. - -The suffixes of the final execution graph files generated by synchronous Dump are `.pb` and `.ir` respectively, and the file naming format is: - -```text -ms_output_trace_code_graph_{graph_id}.pb -ms_output_trace_code_graph_{graph_id}.ir -``` - -The files with the suffix `.ir` can be opened and viewed by the `vi` command. - -The suffix of the node execution sequence file generated by the synchronous Dump is `.csv`, and the file naming format is: - -```text -ms_execution_order_graph_{graph_id}.csv -``` - -`.metadata` records the original training information, and `data_dump.json` saves the dump configuration set by the user. - -### Synchronous Dump Data Analysis Sample - -For the Ascend scene, after the graph corresponding to the script is saved to the disk through the Dump function, the final execution graph file `ms_output_trace_code_graph_{graph_id}.ir` will be generated. This file saves the stack information of each operator in the corresponding graph, and records the generation script corresponding to the operator. - -Take [AlexNet script](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/alexnet/src/alexnet.py) as an example: - -```python -import mindspore.nn as nn -import mindspore.ops as ops - - -def conv(in_channels, out_channels, kernel_size, stride=1, padding=0, pad_mode="valid", has_bias=True): - return nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding, - has_bias=has_bias, pad_mode=pad_mode) - - -def fc_with_initialize(input_channels, out_channels, has_bias=True): - return nn.Dense(input_channels, out_channels, has_bias=has_bias) - - -class AlexNet(nn.Cell): - """ - Alexnet - """ - def __init__(self, num_classes=10, channel=3, phase='train', include_top=True): - super(AlexNet, self).__init__() - self.conv1 = conv(channel, 64, 11, stride=4, pad_mode="same", has_bias=True) - self.conv2 = conv(64, 128, 5, pad_mode="same", has_bias=True) - self.conv3 = conv(128, 192, 3, pad_mode="same", has_bias=True) - self.conv4 = conv(192, 256, 3, pad_mode="same", has_bias=True) - self.conv5 = conv(256, 256, 3, pad_mode="same", has_bias=True) - self.relu = ops.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=3, stride=2, pad_mode="valid") - self.include_top = include_top - if self.include_top: - dropout_ratio = 0.65 - if phase == 'test': - dropout_ratio = 1.0 - self.flatten = nn.Flatten() - self.fc1 = fc_with_initialize(6 * 6 * 256, 4096) - self.fc2 = fc_with_initialize(4096, 4096) - self.fc3 = fc_with_initialize(4096, num_classes) - self.dropout = nn.Dropout(dropout_ratio) - - def construct(self, x): - """define network""" - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv3(x) - x = self.relu(x) - x = self.conv4(x) - x = self.relu(x) - x = self.conv5(x) - x = self.relu(x) - x = self.max_pool2d(x) - if not self.include_top: - return x - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.dropout(x) - x = self.fc2(x) - x = self.relu(x) - x = self.dropout(x) - x = self.fc3(x) - return x -``` - -If the user wants to view the code at line 58 in the script: - -```python -x = self.conv3(x) -``` - -After executing the network training, you can find multiple operator information corresponding to the line of code from the final execution graph (`ms_output_trace_code_graph_{graph_id}.ir` file). The content of the file is as follows: - -```text - %24(equivoutput) = Conv2D(%23, %21) {instance name: conv2d} primitive_attrs: {compile_info: , pri_format: NC1HWC0, stride: (1, 1, 1, 1), pad: (0, 0, 0, 0), pad_mod: same, out_channel: -192, mode: 1, dilation: (1, 1, 1, 1), output_names: [output], group: 1, format: NCHW, offset_a: 0, kernel_size: (3, 3), groups: 1, input_names: [x, w], pad_list: (1, 1, 1, 1), -IsFeatureMapOutput: true, IsFeatureMapInputList: (0)} - : (, ) -> () - : (, ) -> () - : (Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/Conv2D-op107) - ... - # In file {Absolute path of model_zoo}/official/cv/alexnet/src/alexnet.py(58)/ x = self.conv3(x)/ - ... - %25(equivoutput) = BiasAdd(%24, %22) {instance name: bias_add} primitive_attrs: {output_used_num: (1), input_names: [x, b], format: NCHW, compile_info: , output_names: [output], -IsFeatureMapOutput: true, IsFeatureMapInputList: (0), pri_format: NC1HWC0} - : () -> () -> () - : () -> () -> () - : (Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/BiasAdd-op105) - ... - # In file {Absolute path of model_zoo}/official/cv/alexnet/src/alexnet.py(58)/ x = self.conv3(x)/ - ... -``` - -The meanings of the lines in the file content shown above are as follows: - -- The input and output of the operator on the Host side (the first line) and the Device side (the second line, some operators may not exist). It can be seen from the execution graph that the operator has two inputs (left side of the arrow) and one output (right side of the arrow). - - ```text - : (, ) -> () - : (, ) -> () - ``` - -- Operator name. It can be seen from the execution graph that the full name of the operator in the final execution graph is `Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/Conv2D-op107`. - - ```text - : (Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/Conv2D-op107) - ``` - -- The training script code corresponding to the operator. By searching the training script code to be queried, multiple matching operators can be found. - - ```text - # In file {Absolute path of model_zoo}/official/cv/alexnet/src/alexnet.py(58)/ x = self.conv3(x)/ - ``` - -Through the operator name and input and output information, you can find the only corresponding Tensor data file. For example, if you want to view the dump file corresponding to the first output data of the Conv2D-op107 operator, you can obtain the following information: - -- `operator_name`: `Default--network-WithLossCell--_backbone-AlexNet--conv3-Conv2d--Conv2D-op107`. Based on the operator name declared in sequence number 2 in the graph, replace `/` with `--` to get it. - -- `input_output_index`: `output_0` indicates that the file is the data of the first output Tensor of the operator. - -Search for the corresponding file name in the data object file directory saved by Dump: -`Default--network-WithLossCell--_backbone-AlexNet--conv3-Conv2d--Conv2D-op107_output_0_shape_32_12_13_13_16_Float16_NC1HWC0.bin`. - -The following information can be obtained from the file name: - -- `shape`: The tensor dimension is `32_12_13_13_16`. - -- `data_type`: The data type is `Float16`. - -- `format`: The data format is `NC1HWC0` (the data format to be saved can be modified through the Dump configuration file). - -When restoring data, first execute: - -```python -import numpy -numpy.fromfile("Default--network-WithLossCell--_backbone-AlexNet--conv3-Conv2d--Conv2D-op107_output_0_shape_32_12_13_13_16_Float16_NC1HWC0.bin", numpy.float16) -``` - -One-dimensional array data is generated, and then execute: - -```python -import numpy -numpy.reshape(array, (32,12,13,13,16)) -``` - -Restore to the original shape data. - -## Asynchronous Dump - -Large networks (such as Bert Large) will cause memory overflow when using synchronous dumps. MindSpore provides debugging capabilities for large networks through asynchronous dumps. - -### Asynchronous Dump Step - -1. Create dump json file:`data_dump.json`. - - The name and location of the JSON file can be customized. - - ```json - { - "common_dump_settings": { - "dump_mode": 0, - "path": "/absolute_path", - "net_name": "ResNet50", - "iteration": 0, - "input_output": 0, - "kernels": ["Default/Conv-op12"], - "support_device": [0,1,2,3,4,5,6,7] - }, - "async_dump_settings": { - "enable": true, - "op_debug_mode": 0 - } - } - ``` - - - `dump_mode`: 0: dump all kernels in graph, 1: dump kernels in kernels list. - - `path`: The absolute path to save dump data. - - `net_name`: The net name eg:ResNet50. - - `iteration`: Specify the iterations to dump. Iteration should be set to 0 when dataset_sink_mode is False and data of every iteration will be dumped. - - `input_output`: When set to 0, it means to Dump the operator's input and output; setting it to 1 means to Dump the operator's input; setting it to 2 means to Dump the output of the operator. - - `kernels`: List of operator names. Turn on the IR save switch `context.set_context(save_graphs=True)` and execute the network to obtain the operator name from the generated `trace_code_graph_{graph_id}`IR file. `kernels` only supports TBE operator, AiCPU operator and communication operator. The data of communication operation input operator will be dumped if `kernels` is set to the name of communication operator. For details, please refer to [Saving IR](https://www.mindspore.cn/doc/note/en/master/design/mindspore/mindir.html#saving-ir). - - `support_device`: Supported devices, default setting is `[0,1,2,3,4,5,6,7]`. You can specify specific device ids to dump specific device data. - - `enable`: Enable Asynchronous Dump. If synchronous dump and asynchronous dump are enabled at the same time, only synchronous dump will take effect. - - `op_debug_mode`: 0: disable overflow check function; 1: enable AiCore overflow check; 2: enable Atomic overflow check; 3: enable all overflow check function. If it is not set to 0, only the data of the overflow operator will be dumped. - -2. Specify the json configuration file of Dump. - - ```bash - export MINDSPORE_DUMP_CONFIG={Absolute path of data_dump.json} - ``` - - - Set the environment variables before executing the training script. Setting environment variables during training will not take effect. - - Dump environment variables need to be configured before calling `mindspore.communication.management.init`. - -3. Execute the training script to dump data. - - You can set `context.set_context(reserve_class_name_in_scope=False)` in your training script to avoid dump failure because of file name is too long. - -4. Refer to [Asynchronous Dump Data Analysis Sample](#asynchronous-dump-data-analysis-sample) to analyze the Dump data file. - -- If you need to dump all or part of the operator, you can modify the `dump_mode` option in the json configuration file to 0 or 1. -- If the data sink function is enabled (set the `dataset_sink_mode` parameter in `model.train` or `DatasetHelper` to `True`), only the data of one step specified in the configuration file can be dumped (in this case, `iteration 0` means The 0th step), and save it to the specified directory. -- If the data sink function is not enabled (set the `dataset_sink_mode` parameter in `model.train` or `DatasetHelper` to `False`), `iteration` in the configuration file must be specified as 0, and all step data are stored in a directory In, cannot support multi-step data management. At this time, it is recommended to execute the step data dump only once (you can train only one step by modifying the script). -- Using the Dump function will automatically generate the IR file of the final execution graph. - -### Asynchronous Dump Data Object Directory - -The data objects saved by asynchronous Dump include the final execution graph (`ms_output_trace_code_graph_{graph_id}.ir` file) and the input and output data of the operators in the graph. The directory structure is as follows: - -```text -{path}/ - |-- {device_id}/ - |-- {new_name}_graph_{graph_id}/ - |-- {graph_id}/ - |-- {iteration}/ - |-- {op_type}.{op_name}.{task_id}.{timestamp} - … - |-- graphs/ - ms_output_trace_code_graph_{graph_id}.pb - ms_output_trace_code_graph_{graph_id}.ir - |-- execution_order/ - ms_execution_order_graph_{graph_id}.csv - - |-- .metadata/ - data_dump.json -``` - -- `path`: the absolute path set in the `data_dump.json` configuration file. -- `net_name`: the network name set in the `data_dump.json` configuration file. -- `device_id`: the id of the training device. -- `graph_id`: the id of the training graph. -- `iteration`: the iteration of the training. -- `op_type`: the type of the operator. -- `op_name`: the name of the operator. -- `taskid`: the id of the task. -- `timestamp`: the time stamp. - -### Introduction to Asynchronous Dump Data File - -After the training is started, the original data file generated by asynchronous Dump is in protobuf format. It needs to be parsed using the data analysis tool that comes with the HiSilicon Run package. For details, please refer to [How to view dump data files](https://support.huaweicloud.com/intl/en-us/usermanual-mindstudioc73/atlasmindstudioaccuracy_16_0022.html). - -The data format on the Device side may be different from the definition in the calculation diagram on the Host side. The data format of the asynchronous dump is the Device side format. If you want to convert to the Host side format, you can refer to [How to convert dump data file format](https://support.huaweicloud.com/intl/en-us/usermanual-mindstudioc73/atlasmindstudioaccuracy_16_0021.html). - -The naming rules for data files generated by asynchronous Dump are as follows: - -- The naming rule of the dump path is: `{path}/{device_id}/{net_name}_graph_{graph_id}/{graph_id}/{iteration}`. -- The naming rule of Dump file is: `{op_type}.{op_name}.{task_id}.{timestamp}`. - -Take the Dump result of a simple network as an example: `Add.Default_Add-op1.2.161243956333802`, where `Add` is `{op_type}`, `Default_Add-op1` is `{op_name}`, and `2` is `{task_id' }`, `161243956333802` is `{timestamp}`. - -If ".", "/", "\", and spaces appear in `op_type` and `op_name`, they will be converted to underscores. - -The final execution graph file and node execution sequence file naming rules generated by asynchronous Dump are the same as that of synchronous Dump. You can refer to [Introduction to Synchronous Dump Data File](#introduction-to-synchronous-dump-data-file). - -### Asynchronous Dump Data Analysis Sample - -Through the asynchronous Dump function, the data files generated by the operator asynchronous Dump can be obtained. - -1. Parse the dumped file using `msaccucmp.py` provied in the run package, the path where the `msaccucmp.py` file is located may be different on different environments You can find it through the find command: - - ```bash - find ${run_path} -name "msaccucmp.py" - ``` - - - `run_path`: The installation path of the run package. - -2. Change directory to `/absolute_path` after training, execute the following commands to parse Dump data file: - - ```bash - python ${The absolute path of msaccucmp.py} convert -d {file path of dump} -out {file path of output} - ``` - - Or you can use `msaccucmp.py` to convert the format of dump file. Please see . - - For example, the data file generated by Dump is: - - ```text - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491 - ``` - - Then execute: - - ```bash - python3.7.5 msaccucmp.py convert -d BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491 -out ./output -f NCHW -t npy - ``` - - Then all input and output data of the operator can be generated under `./output`. Each data is saved as a file with the suffix of `.npy`, and the data format is `NCHW`. - - The generated results are as follows: - - ```text - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.0.30x1024x17x17.npy - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.1.1x1024x1x1.npy - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.2.1x1024x1x1.npy - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.3.1x1024x1x1.npy - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.4.1x1024x1x1.npy - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.5.1x1024x1x1.npy - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.6.1x1024x1x1.npy - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.output.0.30x1024x17x17.npy - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.output.1.1x1024x1x1.npy - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.output.2.1x1024x1x1.npy - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.output.3.1x1024x1x1.npy - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.output.4.1x1024x1x1.npy - ``` - - At the end of the file name, you can see which input or output the file is the operator, and the dimensional information of the data. For example, by the first `.npy` file name - - ```text - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.0.30x1024x17x17.npy - ``` - - It can be seen that the file is the 0th input of the operator, and the dimension information of the data is `30x1024x17x17`. - -3. The corresponding data can be read through `numpy.load("file_name")`. For example: - - ```python - import numpy - numpy.load("BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.0.30x1024x17x17.npy") - ``` diff --git a/tutorials/training/source_en/advanced_use/enable_auto_augmentation.md b/tutorials/training/source_en/advanced_use/enable_auto_augmentation.md deleted file mode 100644 index 33ea05ae69bb57516e3f8e36cf79e1c504c8ab94..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/enable_auto_augmentation.md +++ /dev/null @@ -1,241 +0,0 @@ -# Auto Augmentation - -`Linux` `Ascend` `GPU` `CPU` `Data Preparation` `Intermediate` `Expert` - - - -- [Auto Augmentation](#auto-augmentation) - - [Overview](#overview) - - [Auto Augmentation on ImageNet](#auto-augmentation-on-imagenet) - - [References](#references) - - - - - -## Overview - -Auto Augmentation [1] finds a suitable image augmentation scheme for a specific dataset by searching through a series of image augmentation sub-policies. The `c_transforms` module of MindSpore provides various C++ operators that are used in Auto Augmentation. Users can also customize functions or operators to implement Auto Augmentation. For more details about the MindSpore operators, see the [API document](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.vision.html). - -The mapping between MindSpore operators and Auto Augmentation operators is as follows: - -| Auto Augmentation Operators | MindSpore Operators | Introduction | -| :------: | :------ | ------ | -| shearX | RandomAffine | Horizontal shear | -| shearY | RandomAffine | Vertical shear | -| translateX | RandomAffine | Horizontal translation | -| translateY | RandomAffine | Vertival translation | -| rotate | RandomRotation | Rotational transformation | -| color | RandomColor | Color transformation | -| posterize | RandomPosterize | Decrease the number of color channels | -| solarize | RandomSolarize | Invert all pixels within the specified threshold range | -| contrast | RandomColorAdjust | Contrast adjustment | -| sharpness | RandomSharpness | Sharpness adjustment | -| brightness | RandomColorAdjust | Brightness adjustment | -| autocontrast | AutoContrast | Maximize image contrast | -| equalize | Equalize | Equalize image histogram | -| invert | Invert | Image inversion | - -## Auto Augmentation on ImageNet - -This tutorial uses the implementation of Auto Augmentation on the ImageNet dataset as an example. - -The data augmentation policy for the ImageNet dataset contains 25 sub-policies, and each sub-policy contains two transformations. A combination of sub-policies is randomly selected for each image in a batch, and each transformation in the sub-policy is executed based on a preset probability. - -Users can use the `RandomSelectSubpolicy` interface of the `c_transforms` module in MindSpore to implement Auto Augmentation. The standard data augmentation method in ImageNet classification training includes the following steps: - -- `RandomCropDecodeResize`: Randomly crop then decode. - -- `RandomHorizontalFlip`: Randomly flip horizontally. - -- `Normalize`: Normalize the data. - -- `HWC2CHW`: Change image channel. - -Add Auto Augmentation transformation after the `RandomCropDecodeResize` as follows: - -1. Import related modules. - - ```python - import matplotlib.pyplot as plt - - import mindspore.dataset as ds - import mindspore.dataset.transforms.c_transforms as c_transforms - import mindspore.dataset.vision.c_transforms as c_vision - from mindspore import dtype as mstype - ``` - -2. Define the mapping from the MindSpore operators to the Auto Augmentation operators. - - ```python - # define Auto Augmentation operators - PARAMETER_MAX = 10 - - def float_parameter(level, maxval): - return float(level) * maxval / PARAMETER_MAX - - def int_parameter(level, maxval): - return int(level * maxval / PARAMETER_MAX) - - def shear_x(level): - v = float_parameter(level, 0.3) - return c_transforms.RandomChoice([c_vision.RandomAffine(degrees=0, shear=(-v,-v)), c_vision.RandomAffine(degrees=0, shear=(v, v))]) - - def shear_y(level): - v = float_parameter(level, 0.3) - return c_transforms.RandomChoice([c_vision.RandomAffine(degrees=0, shear=(0, 0, -v,-v)), c_vision.RandomAffine(degrees=0, shear=(0, 0, v, v))]) - - def translate_x(level): - v = float_parameter(level, 150 / 331) - return c_transforms.RandomChoice([c_vision.RandomAffine(degrees=0, translate=(-v,-v)), c_vision.RandomAffine(degrees=0, translate=(v, v))]) - - def translate_y(level): - v = float_parameter(level, 150 / 331) - return c_transforms.RandomChoice([c_vision.RandomAffine(degrees=0, translate=(0, 0, -v,-v)), c_vision.RandomAffine(degrees=0, translate=(0, 0, v, v))]) - - def color_impl(level): - v = float_parameter(level, 1.8) + 0.1 - return c_vision.RandomColor(degrees=(v, v)) - - def rotate_impl(level): - v = int_parameter(level, 30) - return c_transforms.RandomChoice([c_vision.RandomRotation(degrees=(-v, -v)), c_vision.RandomRotation(degrees=(v, v))]) - - def solarize_impl(level): - level = int_parameter(level, 256) - v = 256 - level - return c_vision.RandomSolarize(threshold=(0, v)) - - def posterize_impl(level): - level = int_parameter(level, 4) - v = 4 - level - return c_vision.RandomPosterize(bits=(v, v)) - - def contrast_impl(level): - v = float_parameter(level, 1.8) + 0.1 - return c_vision.RandomColorAdjust(contrast=(v, v)) - - def autocontrast_impl(level): - return c_vision.AutoContrast() - - def sharpness_impl(level): - v = float_parameter(level, 1.8) + 0.1 - return c_vision.RandomSharpness(degrees=(v, v)) - - def brightness_impl(level): - v = float_parameter(level, 1.8) + 0.1 - return c_vision.RandomColorAdjust(brightness=(v, v)) - ``` - -3. Define the Auto Augmentation policy for the ImageNet dataset. - - ```python - # define the Auto Augmentation policy - imagenet_policy = [ - [(posterize_impl(8), 0.4), (rotate_impl(9), 0.6)], - [(solarize_impl(5), 0.6), (autocontrast_impl(5), 0.6)], - [(c_vision.Equalize(), 0.8), (c_vision.Equalize(), 0.6)], - [(posterize_impl(7), 0.6), (posterize_impl(6), 0.6)], - [(c_vision.Equalize(), 0.4), (solarize_impl(4), 0.2)], - - [(c_vision.Equalize(), 0.4), (rotate_impl(8), 0.8)], - [(solarize_impl(3), 0.6), (c_vision.Equalize(), 0.6)], - [(posterize_impl(5), 0.8), (c_vision.Equalize(), 1.0)], - [(rotate_impl(3), 0.2), (solarize_impl(8), 0.6)], - [(c_vision.Equalize(), 0.6), (posterize_impl(6), 0.4)], - - [(rotate_impl(8), 0.8), (color_impl(0), 0.4)], - [(rotate_impl(9), 0.4), (c_vision.Equalize(), 0.6)], - [(c_vision.Equalize(), 0.0), (c_vision.Equalize(), 0.8)], - [(c_vision.Invert(), 0.6), (c_vision.Equalize(), 1.0)], - [(color_impl(4), 0.6), (contrast_impl(8), 1.0)], - - [(rotate_impl(8), 0.8), (color_impl(2), 1.0)], - [(color_impl(8), 0.8), (solarize_impl(7), 0.8)], - [(sharpness_impl(7), 0.4), (c_vision.Invert(), 0.6)], - [(shear_x(5), 0.6), (c_vision.Equalize(), 1.0)], - [(color_impl(0), 0.4), (c_vision.Equalize(), 0.6)], - - [(c_vision.Equalize(), 0.4), (solarize_impl(4), 0.2)], - [(solarize_impl(5), 0.6), (autocontrast_impl(5), 0.6)], - [(c_vision.Invert(), 0.6), (c_vision.Equalize(), 1.0)], - [(color_impl(4), 0.6), (contrast_impl(8), 1.0)], - [(c_vision.Equalize(), 0.8), (c_vision.Equalize(), 0.6)], - ] - ``` - -4. Add Auto Augmentation transformations after the `RandomCropDecodeResize` operation. - - ```python - def create_dataset(dataset_path, do_train, repeat_num=1, batch_size=32, shuffle=True, num_samples=5, target="Ascend"): - # create a train or eval imagenet2012 dataset for ResNet-50 - dataset = ds.ImageFolderDataset(dataset_path, num_parallel_workers=8, - shuffle=shuffle, num_samples=num_samples) - - image_size = 224 - mean = [0.485 * 255, 0.456 * 255, 0.406 * 255] - std = [0.229 * 255, 0.224 * 255, 0.225 * 255] - - # define map operations - if do_train: - trans = [ - c_vision.RandomCropDecodeResize(image_size, scale=(0.08, 1.0), ratio=(0.75, 1.333)), - ] - - post_trans = [ - c_vision.RandomHorizontalFlip(prob=0.5), - ] - else: - trans = [ - c_vision.Decode(), - c_vision.Resize(256), - c_vision.CenterCrop(image_size), - c_vision.Normalize(mean=mean, std=std), - c_vision.HWC2CHW() - ] - dataset = dataset.map(operations=trans, input_columns="image") - if do_train: - dataset = dataset.map(operations=c_vision.RandomSelectSubpolicy(imagenet_policy), input_columns=["image"]) - dataset = dataset.map(operations=post_trans, input_columns="image") - type_cast_op = c_transforms.TypeCast(mstype.int32) - dataset = dataset.map(operations=type_cast_op, input_columns="label") - # apply the batch operation - dataset = dataset.batch(batch_size, drop_remainder=True) - # apply the repeat operation - dataset = dataset.repeat(repeat_num) - - return dataset - ``` - -5. Verify the effects of Auto Augmentation. - - ```python - # Define the path to image folder directory. This directory needs to contain sub-directories which contain the images. - DATA_DIR = "/path/to/image_folder_directory" - dataset = create_dataset(dataset_path=DATA_DIR, do_train=True, batch_size=5, shuffle=False, num_samples=5) - - epochs = 5 - itr = dataset.create_dict_iterator() - fig=plt.figure(figsize=(8, 8)) - columns = 5 - rows = 5 - - step_num = 0 - for ep_num in range(epochs): - for data in itr: - step_num += 1 - for index in range(rows): - fig.add_subplot(rows, columns, ep_num * rows + index + 1) - plt.imshow(data['image'].asnumpy()[index]) - plt.show() - ``` - - > For better visualization, only five images are read from the dataset without performing `shuffle`, `Normalize`, nor `HWC2CHW` operations. - - ![augment](./images/auto_augmentation.png) - - The images above visualize the effect of Auto Augmentation. The horizontal direction displays five images in one batch, and the vertical direction displays five batches. - -## References - -[1] [AutoAugment: Learning Augmentation Policies from Data](https://arxiv.org/abs/1805.09501). diff --git a/tutorials/training/source_en/advanced_use/enable_cache.md b/tutorials/training/source_en/advanced_use/enable_cache.md deleted file mode 100644 index 78ce6b073657d6ce42b9dbf3a1ea8741f8830a5d..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/enable_cache.md +++ /dev/null @@ -1,150 +0,0 @@ -# Application of Single-Node Tensor Cache - -`Linux` `Ascend` `GPU` `CPU` `Data Preparation` `Intermediate` `Expert` - - - -- [Application of Single-Node Tensor Cache](#application-of-single-node-tensor-cache) - - [Overview](#overview) - - [Configuring the Environment](#configuring-the-environment) - - [Starting the Cache Server](#starting-the-cache-server) - - [Creating a Cache Session](#creating-a-cache-session) - - [Creating a Cache Instance](#creating-a-cache-instance) - - [Inserting a Cache Instance](#inserting-a-cache-instance) - - [Destroying a Cache Session](#destroying-a-cache-session) - - [Stopping the Cache Server](#stopping-the-cache-server) - - - - - -## Overview - -If you need to repeatedly access remote datasets or read datasets from disks, you can use the single-node cache operator to cache datasets in the local memory to accelerate dataset reading. - -This tutorial demonstrates how to use the single-node cache service to cache data that has been processed with data augmentation. - -## Configuring the Environment - -Before using the cache service, you need to install MindSpore and set related environment variables. The Conda environment is used as an example. The setting method is as follows: - -```shell -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{path_to_conda}/envs/{your_env_name}/lib/python3.7/site-packages/mindspore:{path_to_conda}/envs/{your_env_name}/lib/python3.7/site-packages/mindspore/lib -export PATH=$PATH:{path_to_conda}/envs/{your_env_name}/bin -``` - -## Starting the Cache Server - -Before using the single-node cache service, you need to start the cache server. - -```shell -$ cache_admin --start -Cache server startup completed successfully! -The cache server daemon has been created as process id 10394 and is listening on port 50052 - -Recommendation: -Since the server is detached into its own daemon process, monitor the server logs (under /tmp/mindspore/cache/log) for any issues that may happen after startup -``` - -If the system displays a message indicating that the `libpython3.7m.so.1.0` file cannot be found, search for the file path in the virtual environment and set environment variables. - -```shell -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{path_to_conda}/envs/{your_env_name}/lib -``` - -## Creating a Cache Session - -If no cache session exists on the cache server, a cache session needs to be created to obtain the cache session ID. - -```shell -$ cache_admin -g -Session created for server on port 50052: 1493732251 -``` - -The cache session ID is randomly allocated by the server. - -## Creating a Cache Instance - -Create the Python script `my_training_script.py`, use the `DatasetCache` API to define a cache instance named `some_cache` in the script, and specify the `session_id` parameter to a cache session ID created in the previous step. - -```python -import mindspore.dataset as ds - -some_cache = ds.DatasetCache(session_id=1493732251, size=0, spilling=False) -``` - -## Inserting a Cache Instance - -The following uses the CIFAR-10 dataset as an example. Before running the sample, download and store the CIFAR-10 dataset by referring to [Loading Dataset](https://www.mindspore.cn/doc/programming_guide/en/master/dataset_loading.html#cifar-10-100-dataset). The directory structure is as follows: - -```text -├─my_training_script.py -└─cifar-10-batches-bin - ├── batches.meta.txt - ├── data_batch_1.bin - ├── data_batch_2.bin - ├── data_batch_3.bin - ├── data_batch_4.bin - ├── data_batch_5.bin - ├── readme.html - └── test_batch.bin -``` - -To cache the enhanced data processed by data augmentation of the map operator, the created `some_cache` instance is used as the input parameter of the `cache` API in the map operator. - -```python -import mindspore.dataset.vision.c_transforms as c_vision - -dataset_dir = "cifar-10-batches-bin/" -data = ds.Cifar10Dataset(dataset_dir=dataset_dir, num_samples=5, shuffle=False, num_parallel_workers=1) - -# apply cache to map -rescale_op = c_vision.Rescale(1.0 / 255.0, -1.0) -data = data.map(input_columns=["image"], operations=rescale_op, cache=some_cache) - -num_iter = 0 -for item in data.create_dict_iterator(num_epochs=1): # each data is a dictionary - # in this example, each dictionary has a key "image" - print("{} image shape: {}".format(num_iter, item["image"].shape)) - num_iter += 1 -``` - -Run the Python script `my_training_script.py`. The following information is displayed: - -```text -0 image shape: (32, 32, 3) -1 image shape: (32, 32, 3) -2 image shape: (32, 32, 3) -3 image shape: (32, 32, 3) -4 image shape: (32, 32, 3) -``` - -You can run the `cache_admin --list_sessions` command to check whether there are five data records in the current session. If yes, the data is successfully cached. - -```shell -$ cache_admin --list_sessions -Listing sessions for server on port 50052 - - Session Cache Id Mem cached Disk cached Avg cache size Numa hit - 1493732251 3618046178 5 n/a 12442 5 -``` - -## Destroying a Cache Session - -After the training is complete, you can destroy the current cache and release the memory. - -```shell -$ cache_admin --destroy_session 1493732251 -Drop session successfully for server on port 50052 -``` - -The preceding command is used to destroy the cache whose session ID is 1493732251. - -## Stopping the Cache Server - -After using the cache server, you can stop the cache server. This operation will destroy all cache sessions on the current server and release the memory. - -```shell -$ cache_admin --stop -Cache server on port 50052 has been stopped successfully. -``` diff --git a/tutorials/training/source_en/advanced_use/enable_graph_kernel_fusion.md b/tutorials/training/source_en/advanced_use/enable_graph_kernel_fusion.md deleted file mode 100644 index dde7e765f824311d692a1d91491dbe157631dd43..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/enable_graph_kernel_fusion.md +++ /dev/null @@ -1,154 +0,0 @@ -# Enabling Graph Kernel Fusion - -`Linux` `Ascend` `GPU` `Model Optimization` `Intermediate` `Expert` - - - -- [Enabling Graph Kernel Fusion](#enabling-graph-kernel-fusion) - - [Introduction](#introduction) - - [Enabling Method](#enabling-method) - - [Sample Scripts](#sample-scripts) - - [Custom Combination Operators](#custom-combination-operators) - - [Sample Scripts](#sample-scripts-1) - - - - - -## Introduction - -The graph kernel fusion is used to optimize network performance by cooperating with JIT operator compilation. With analyzing and evaluating the compute graph, it will apply optimization such as computing workload reduction, operator splitting, fusion and special operator compiling, to reduce network execution time. Also, the whole optimization process is completed automatically only if the graph kernel setting is enabled. This will help the user focus on the network development. - -The graph kernel fusion is available for: - -- Network with high performance requirement; -- Custom combination operators with high performance requirement. - -## Enabling Method - -The graph kernel is disabled by default. We can just specify the `enable_graph_kernel=True` parameter for `context` in the training script to enable it. - -```python -from mindspore import context -context.set_context(enable_graph_kernel=True) -``` - -> Only Graph Mode is supported by graph kernel. - -### Sample Scripts - -To illustrate the fusion scenario, we construct a simple network `MyNet`, including multiplication and addition operators. The two operators will be fused together with enabled graph kernel: - -```python -import numpy as np -import mindspore.context as context -from mindspore import Tensor -from mindspore.nn import Cell -import mindspore.ops as ops - -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") -# save graph ir to view fusion detail. -context.set_context(save_graphs=True) -# enable graph kernel optimization. -context.set_context(enable_graph_kernel=True) - -class MyNet(Cell): - def __init__(self): - super(MyNet, self).__init__() - self.add = ops.Add() - self.mul = ops.Mul() - - def construct(self, x): - a = self.mul(x, 2.0) - res = self.add(a, 1.0) - return res - -x = np.ones((4, 4)).astype(np.float32) * 0.5 -net = MyNet() -result = net(Tensor(x)) -print("result: {}".format(result)) -``` - -The output is: - -```text -result: [[2. 2. 2. 2.] - [2. 2. 2. 2.] - [2. 2. 2. 2.] - [2. 2. 2. 2.]] -``` - -The fusion of this graph is shown in Figure 1, the left graph is without graph kernel fusion being enabled and the right one is with graph kernel fusion being enabled, which can be checked by dumped graph IR or device profiling. - -![fuse basic example](images/graph_kernel_example_fuse_basic.png) - -Figure 1 Graph kernel fusion on computational graph - -## Custom Combination Operators - -We can easily implement high-performance custom combination operators based on graph kernel. The steps are as follows: - -1. Define custom operator by combining basic operators; -2. Enable Graph Kernel; -3. Graph kernel automatically fuses the basic operators and generates high-performance fusion operators. - -### Sample Scripts - -We construct a simple network `MyNet` and define the custom operator `MyOp`: - -```python -import numpy as np -import mindspore.context as context -from mindspore import Tensor -from mindspore.nn import Cell -import mindspore.ops as ops - -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") -# enable graph kernel optimization. -context.set_context(enable_graph_kernel=True) - -class MyOp(Cell): - """ my first custom OP composited by basic OPs """ - def __init__(self): - super(MyOp, self).__init__() - self.sub = ops.Sub() - self.mul = ops.Mul() - - def construct(self, x, y): - a = self.sub(x, y) - return self.mul(a, x) - -class MyNet(Cell): - def __init__(self): - super(MyNet, self).__init__() - self.mul = ops.Mul() - self.pow = ops.Pow() - self.my_op = MyOp() - - def construct(self, x, y): - a = self.mul(x, 2.0) - b = self.pow(a, 3.0) - res = self.my_op(b, y) - return res - -x = np.ones((4, 4)).astype(np.float32) * 0.2 -y = np.ones((4, 4)).astype(np.float32) * 0.3 -net = MyNet() -result = net(Tensor(x), Tensor(y)) -print("result: {}".format(result)) -``` - -The output is: - -```text -result: [[-0.015104 -0.015104 -0.015104 -0.015104] - [-0.015104 -0.015104 -0.015104 -0.015104] - [-0.015104 -0.015104 -0.015104 -0.015104] - [-0.015104 -0.015104 -0.015104 -0.015104]] -``` - -The fusion of this graph is shown in Figure 2, the left graph is without graph kernel fusion being enabled and the right one is with graph kernel fusion being enabled, which can be checked by dumped graph IR or device profiling. - -![cusom op example](images/graph_kernel_example_custom_op.png) - -Figure 2 Custom combination operator on computational graph diff --git a/tutorials/training/source_en/advanced_use/enable_mixed_precision.md b/tutorials/training/source_en/advanced_use/enable_mixed_precision.md deleted file mode 100644 index a8a615c96f5f83b58ad7121392caeb54e9822080..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/enable_mixed_precision.md +++ /dev/null @@ -1,232 +0,0 @@ -# Enabling Mixed Precision - -`Linux` `Ascend` `GPU` `Model Training` `Intermediate` `Expert` - - - -- [Enabling Mixed Precision](#enabling-mixed-precision) - - [Overview](#overview) - - [Computation Process](#computation-process) - - [Automatic Mixed Precision](#automatic-mixed-precision) - - [Manual Mixed Precision](#manual-mixed-precision) - - [Constraints](#constraints) - - - - - -## Overview - -The mixed precision training method accelerates the deep learning neural network training process by using both the single-precision and half-precision data formats, and maintains the network precision achieved by the single-precision training at the same time. -Mixed precision training can accelerate the computation process, reduce memory usage, and enable a larger model or batch size to be trained on specific hardware. - -For FP16 operators, if the input data type is FP32, the backend of MindSpore will automatically handle it with reduced precision. Users could check the reduced-precision operators by enabling INFO log and then searching 'Reduce precision'. - -## Computation Process - -The following figure shows the typical computation process of mixed precision in MindSpore. - -![mix precision](./images/mix_precision.PNG) - -1. Parameters are stored in FP32 format. -2. During the forward computation, if an FP16 operator is involved, the operator input and parameters need to be cast from FP32 to FP16. -3. The loss layer is set to FP32. -4. During backward computation, the value is multiplied by Loss Scale to avoid underflow due to a small gradient. -5. The FP16 parameter is used for gradient computation, and the result is cast back to FP32. -6. Then, the value is divided by Loss scale to restore the multiplied gradient. -7. The optimizer checks whether the gradient overflows. If yes, the optimizer skips the update. If no, the optimizer uses FP32 to update the original parameters. - -This document describes the computation process by using examples of automatic and manual mixed precision. - -## Automatic Mixed Precision - -To use the automatic mixed precision, you need to invoke the corresponding API, which takes the network to be trained and the optimizer as the input. This API converts the operators of the entire network into FP16 operators (except the `BatchNorm` and Loss operators). You can use automatic mixed precision through API `amp` or API `Model`. - -The procedure of using automatic mixed precision by API `amp` is as follows: - -1. Introduce the MindSpore mixed precision API `amp`. - -2. Define the network. This step is the same as the common network definition. (You do not need to manually configure the precision of any specific operator.) - -3. Use the `amp.build_train_network` API to encapsulate the network model and optimizer. You can learn how to set parameter `level` through . In this step, MindSpore automatically converts the operators to the required format. - -A code example is as follows: - -```python -import numpy as np - -import mindspore.nn as nn -from mindspore import Tensor, context -import mindspore.ops as ops -from mindspore.nn import Momentum -# The interface of Auto_mixed precision -from mindspore import amp - -context.set_context(mode=context.GRAPH_MODE) -context.set_context(device_target="Ascend") - -# Define network -class Net(nn.Cell): - def __init__(self, input_channel, out_channel): - super(Net, self).__init__() - self.dense = nn.Dense(input_channel, out_channel) - self.relu = ops.ReLU() - - def construct(self, x): - x = self.dense(x) - x = self.relu(x) - return x - - -# Initialize network -net = Net(512, 128) - -# Define training data, label -predict = Tensor(np.ones([64, 512]).astype(np.float32) * 0.01) -label = Tensor(np.zeros([64, 128]).astype(np.float32)) - -# Define Loss and Optimizer -loss = nn.SoftmaxCrossEntropyWithLogits() -optimizer = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) -train_network = amp.build_train_network(net, optimizer, loss, level="O3", loss_scale_manager=None) - -# Run training -output = train_network(predict, label) -``` - -The procedure of using automatic mixed precision by API `Model` is as follows: - -1. Introduce the MindSpore model API `Model`. - -2. Define the network. This step is the same as the common network definition. (You do not need to manually configure the precision of any specific operator.) - -3. Create dataset.You can learn detail step at . - -4. Use the `Model` API to encapsulate the network model and optimizer. You can learn how to set parameter `amp_level` through . In this step, MindSpore automatically converts the operators to the required format. - -A code example is as follows: - -```python -import numpy as np -import mindspore.nn as nn -from mindspore.nn.metrics import Accuracy -from mindspore import context, Model -from mindspore.common.initializer import Normal -from src.dataset import create_dataset - -context.set_context(mode=context.GRAPH_MODE) -context.set_context(device_target="Ascend") - -# Define network -class LeNet5(nn.Cell): - """ - Lenet network - - Args: - num_class (int): Number of classes. Default: 10. - num_channel (int): Number of channels. Default: 1. - - Returns: - Tensor, output tensor - Examples: - >>> LeNet(num_class=10) - - """ - def __init__(self, num_class=10, num_channel=1): - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02)) - self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02)) - self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02)) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x - -# create dataset -ds_train = create_dataset("/dataset/MNIST/train", 32) - -# Initialize network -network = LeNet5(10) - -# Define Loss and Optimizer -net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") -net_opt = nn.Momentum(network.trainable_params(),learning_rate=0.01, momentum=0.9) -model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()}, amp_level="O3") - -# Run training -model.train(epoch=10, train_dataset=ds_train) -``` - -## Manual Mixed Precision - -MindSpore also supports manual mixed precision. It is assumed that only one dense layer in the network needs to be calculated by using FP32, and other layers are calculated by using FP16. The mixed precision is configured in the granularity of cell. The default format of a cell is FP32. - -The following is the procedure for implementing manual mixed precision: - -1. Define the network. This step is similar to step 2 in the automatic mixed precision. - -2. Configure the mixed precision. Use `net.to_float(mstype.float16)` to set all operators of the cell and its sub-cells to FP16. Then, configure the dense to FP32. - -3. Use TrainOneStepCell to encapsulate the network model and optimizer. - -A code example is as follows: - -```python -import numpy as np - -import mindspore.nn as nn -from mindspore import dtype as mstype -from mindspore import Tensor, context -import mindspore.ops as ops -from mindspore.nn import WithLossCell, TrainOneStepCell -from mindspore.nn import Momentum - -context.set_context(mode=context.GRAPH_MODE) -context.set_context(device_target="Ascend") - -# Define network -class Net(nn.Cell): - def __init__(self, input_channel, out_channel): - super(Net, self).__init__() - self.dense = nn.Dense(input_channel, out_channel) - self.relu = ops.ReLU() - - def construct(self, x): - x = self.dense(x) - x = self.relu(x) - return x - -# Initialize network -net = Net(512, 128) -# Set mixing precision -net.to_float(mstype.float16) -net.dense.to_float(mstype.float32) - -# Define training data, label -predict = Tensor(np.ones([64, 512]).astype(np.float32) * 0.01) -label = Tensor(np.zeros([64, 128]).astype(np.float32)) - -# Define Loss and Optimizer -loss = nn.SoftmaxCrossEntropyWithLogits() -optimizer = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) -net_with_loss = WithLossCell(net, loss) -train_network = TrainOneStepCell(net_with_loss, optimizer) -train_network.set_train() - -# Run training -output = train_network(predict, label) -``` - -## Constraints - -When mixed precision is enabled, backprop network only can be generated by 'AutoDiff' block but cannot be defined by users. If user defines the backprorp network, a data type dismatching exepcetion may be thrown out by MindSpore. diff --git a/tutorials/training/source_en/advanced_use/evaluate_the_model_during_training.md b/tutorials/training/source_en/advanced_use/evaluate_the_model_during_training.md deleted file mode 100644 index fc093bd1e57adcf25e049daffae7baf33e273736..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/evaluate_the_model_during_training.md +++ /dev/null @@ -1,176 +0,0 @@ -# Evaluating the Model during Training - -`Linux` `Ascend` `GPU` `CPU` `Model Export` `Model Training` `Beginner` `Intermediate` `Expert` - - - -- [Evaluating the Model during Training](#evaluating-the-model-during-training) - - [Overview](#overview) - - [Defining the Callback Function EvalCallBack](#defining-the-callback-function-evalcallback) - - [Defining and Executing the Training Network](#defining-and-executing-the-training-network) - - [Defining the Function to Obtain the Model Accuracy in Different Epochs](#defining-the-function-to-obtain-the-model-accuracy-in-different-epochs) - - [Summary](#summary) - - - - - -## Overview - -For a complex network, epoch training usually needs to be performed for dozens or even hundreds of times. Before training, it is difficult to know when a model can achieve required accuracy in epoch training. Therefore, the accuracy of the model is usually validated at a fixed epoch interval during training and the corresponding model is saved. After the training is completed, you can quickly select the optimal model by viewing the change of the corresponding model accuracy. This section uses this method and takes the LeNet network as an example. - -The procedure is as follows: - -1. Define the callback function EvalCallBack to implement synchronous training and validation. -2. Define a training network and execute it. -3. Draw a line chart based on the model accuracy under different epochs and select the optimal model. - -Source code address of this example: . - -## Defining the Callback Function EvalCallBack - -Implementation idea: The model accuracy is validated every n epochs. The model accuracy needs to be implemented in the custom callback function. For details about the usage, see [API Description](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.train.html#mindspore.train.callback.Callback). - -Core implementation: Validation points are set in `epoch_end` of the callback function as follows: - -`cur_epoch % eval_per_epoch == 0`: indicates that the model accuracy is validated every `eval_per_epoch` epoch. - -- `cur_epoch`: indicates `epoch` value in the current training process. -- `eval_per_epoch`: indicates user-defined value, that is, the validation frequency. - -Other parameters are described as follows: - -- `model`: indicates the `Model` class in MindSpore. -- `eval_dataset`: indicates the validation dataset. -- `epoch_per_eval`: records the accuracy of the validation model and the corresponding number of epochs. The data format is `{"epoch": [], "acc": []}`. - -```python -from mindspore.train.callback import Callback - -class EvalCallBack(Callback): - def __init__(self, model, eval_dataset, eval_per_epoch, epoch_per_eval): - self.model = model - self.eval_dataset = eval_dataset - self.eval_per_epoch = eval_per_epoch - self.epoch_per_eval = epoch_per_eval - - def epoch_end(self, run_context): - cb_param = run_context.original_args() - cur_epoch = cb_param.cur_epoch_num - if cur_epoch % self.eval_per_epoch == 0: - acc = self.model.eval(self.eval_dataset, dataset_sink_mode=False) - self.epoch_per_eval["epoch"].append(cur_epoch) - self.epoch_per_eval["acc"].append(acc["Accuracy"]) - print(acc) - -``` - -## Defining and Executing the Training Network - -In the `CheckpointConfig` parameter for saving the model, you need to calculate the number of steps in a single epoch, then set the `checkpointconfig` file according to the number of required steps to save the model weight parameter `ckpt` file. In this example, each epoch has 1875 steps. Based on the principle of validating once every two epochs, set `save_checkpoint_steps=eval_per_epoch*1875`. The variable `eval_per_epoch` is equal to 2. - -The parameters are described as follows: - -- `config_ck`: configures the information for saving the model. - - `save_checkpoint_steps`: indicates the number of steps for saving the weight parameter `ckpt` file of the model. - - `keep_checkpoint_max`: indicates the maximum number of model's weight parameter that can be saved. -- `ckpoint_cb`: configures the prefix information of the name and path for saving the model. -- `model`: indicates the `Model` class in MindSpore. -- `model.train`: indicates the `Model` class training function. -- `epoch_per_eval`: defines the number for collecting `epoch` and the dictionary of corresponding model accuracy information. -- `train_data`: indicates the training dataset. -- `eval_data`: indicates the validation dataset. - -```python -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor -from mindspore import context, Model -from mindspore.nn.metrics import Accuracy - -if __name__ == "__main__": - context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - ckpt_save_dir = "./lenet_ckpt" - eval_per_epoch = 2 - epoch_size =10 - - ... ... - - # need to calculate how many steps are in each epoch, in this example, 1875 steps per epoch. - config_ck = CheckpointConfig(save_checkpoint_steps=eval_per_epoch*1875, keep_checkpoint_max=15) - ckpoint_cb = ModelCheckpoint(prefix="checkpoint_lenet",directory=ckpt_save_dir, config=config_ck) - model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()}) - - epoch_per_eval = {"epoch": [], "acc": []} - eval_cb = EvalCallBack(model, eval_data, eval_per_epoch, epoch_per_eval) - - model.train(epoch_size, train_data, callbacks=[ckpoint_cb, LossMonitor(375), eval_cb], - dataset_sink_mode=False) -``` - -The output is as follows: - -```text -epoch: 1 step: 375, loss is 2.298612 -epoch: 1 step: 750, loss is 2.075152 -epoch: 1 step: 1125, loss is 0.39205977 -epoch: 1 step: 1500, loss is 0.12368304 -epoch: 1 step: 1875, loss is 0.20988345 -epoch: 2 step: 375, loss is 0.20582482 -epoch: 2 step: 750, loss is 0.029070046 -epoch: 2 step: 1125, loss is 0.041760832 -epoch: 2 step: 1500, loss is 0.067035824 -epoch: 2 step: 1875, loss is 0.0050643035 -{'Accuracy': 0.9763621794871795} - -... ... - -epoch: 9 step: 375, loss is 0.021227183 -epoch: 9 step: 750, loss is 0.005586236 -epoch: 9 step: 1125, loss is 0.029125651 -epoch: 9 step: 1500, loss is 0.00045874066 -epoch: 9 step: 1875, loss is 0.023556218 -epoch: 10 step: 375, loss is 0.0005807788 -epoch: 10 step: 750, loss is 0.02574059 -epoch: 10 step: 1125, loss is 0.108463734 -epoch: 10 step: 1500, loss is 0.01950589 -epoch: 10 step: 1875, loss is 0.10563098 -{'Accuracy': 0.979667467948718} -``` - -Find the `lenet_ckpt` folder in the same directory. The folder contains five models and data related to a calculation graph. The structure is as follows: - -```text -lenet_ckpt -├── checkpoint_lenet-10_1875.ckpt -├── checkpoint_lenet-2_1875.ckpt -├── checkpoint_lenet-4_1875.ckpt -├── checkpoint_lenet-6_1875.ckpt -├── checkpoint_lenet-8_1875.ckpt -└── checkpoint_lenet-graph.meta -``` - -## Defining the Function to Obtain the Model Accuracy in Different Epochs - -Define the drawing function `eval_show`, load `epoch_per_eval` to `eval_show`, and draw the model accuracy variation chart based on different `epoch`. - -```python -import matplotlib.pyplot as plt - -def eval_show(epoch_per_eval): - plt.xlabel("epoch number") - plt.ylabel("Model accuracy") - plt.title("Model accuracy variation chart") - plt.plot(epoch_per_eval["epoch"], epoch_per_eval["acc"], "red") - plt.show() - -eval_show(epoch_per_eval) -``` - -The output is as follows: - -![png](./images/evaluate_the_model_during_training.png) - -You can easily select the optimal model weight parameter `ckpt` file based on the preceding figure. - -## Summary - -The MNIST dataset is used for training through the convolutional neural network LeNet5. This section describes how to validate a model during training, save the model weight parameter `ckpt` file corresponding to the `epoch`, and select the optimal model. diff --git a/tutorials/training/source_en/advanced_use/hpc.rst b/tutorials/training/source_en/advanced_use/hpc.rst deleted file mode 100644 index 0f47dd0ffbbc73c7a303bbefb35bf9f05d4379e0..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/hpc.rst +++ /dev/null @@ -1,8 +0,0 @@ -High Performance Computing -============================= - -.. toctree:: - :maxdepth: 1 - - hpc_gomo - hpc_sponge diff --git a/tutorials/training/source_en/advanced_use/hpc_gomo.md b/tutorials/training/source_en/advanced_use/hpc_gomo.md deleted file mode 100644 index bfa1c32cbd98075ef53594914b573150a9a5c3bc..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/hpc_gomo.md +++ /dev/null @@ -1,236 +0,0 @@ -# Implementing Regional Ocean Model GOMO - -`Linux` `GPU` `Model Development` `Expert` - - - -- [Implementing Regional Ocean Model GOMO](#implementing-regional-ocean-model-gomo) - - [Overview](#overview) - - [Preparations](#preparations) - - [Installing Software Dependencies](#installing-software-dependencies) - - [Preparing Data](#preparing-data) - - [Loading Data](#loading-data) - - [Defining GOMO Grid](#defining-gomo-grid) - - [Initializing Variables](#initializing-variables) - - [Defining GOMO Model](#defining-gomo-model) - - [Training Grid](#training-grid) - - [Running Script](#running-script) - - [References](#references) - - -   - -## Overview - -Generalized Operator Modelling of the Ocean (GOMO) is a 3D regional ocean model based on OpenArray. It is a universal model developed by Huang Xiaomeng from Tsinghua University for ocean modeling and parallel computing ([Xiaomeng Huang et al, 2019](https://gmd.copernicus.org/articles/12/4729/2019/gmd-12-4729-2019.pdf)). The basic equations and algorithms in the GOMO model are derived from the POM2k model ([Blumberg and Mellor, 1987](http://www.sciepub.com/portal/downloads?doi=10.12691/ajmo-2-2-2&filename=ajmo-2-2-2.pdf)). The 3D ocean model plays an important role in the earth system modeling. Following the law of conservation of mass, the model simulates phenomena such as ocean currents and whirlpools to display the distribution of sea surface temperature and sea surface height and predict typhoons, tsunami and other phenomena in real time. Traditional ocean models have complex code implementation and run on CPUs. However, the GOMO model has its framework accelerated by MindSpore and runs on a GPU, which greatly improves the model performance. - -The following describes how to use MindSpore to build and run the 3D ocean model GOMO on GPU. -> Download address of the complete sample code: -. - -The directory structure of the sample code is as follows: - -```shell -└── ocean_model - ├── README.md # descriptions about ocean model GOMO - ├── scripts - │ ├── run_distribute_train.sh # launch distributed training for GPU - ├──src - │ ├── GOMO.py # GOMO model - │ ├── Grid.py # grid initial - │ ├── stencil.py # averaging and differential stencil oprator - │ ├── op_operator.py # averaging and differential kernel operator - │ ├── read_var.py # read variables from nc file - ├── train.py # train script -``` - -The overall execution process is as follows: - -1. Prepare a seamount file as the ocean simulation dataset, and load and process the data. -2. Define GOMO variable initialization. -3. Define the GOMO model. -4. Load the dataset and perform training. After the training is complete, view the result and save the file. - -## Preparations - -### Installing Software Dependencies - -1. Install MindSpore. - - Before the practice, ensure that MindSpore has been properly installed. If not, install MindSpore by following the [Installation Guide](https://www.mindspore.cn/install/en). - -2. Install netCDF4. - - ```shell - pip install netCDF4 - ``` - -### Preparing Data - -Prepare a seamount file in netCDF format. The seamount problem proposed by Beckmann and Haidvogel is a widely used ideal test case for regional ocean models ([Beckmann and Haidvogel, 1993](https://journals.ametsoc.org/view/journals/phoc/23/8/1520-0485_1993_023_1736_nsofaa_2_0_co_2.xml?tab_body=fulltext-display)). Download the [seamount file](https://github.com/hxmhuang/GOMO/tree/master/bin/data). - -## Loading Data - -Load the seamount data file and read the initial variable values from the file script. The data type in the seamount file is float64, which needs to be converted into float32 for MindSpore computation. The script for loading and processing data is in the `src/read_var.py` script of the source code. - -```python -import numpy as np -import netCDF4 as nc - -# variable name list -params_name = ['z', 'zz', 'dz', 'dzz', 'dx', 'dy', 'cor', 'h', 'fsm', 'dum', 'dvm', 'art', 'aru', 'arv', 'rfe', 'rfw', - 'rfn', 'rfs', 'east_e', 'north_e', 'east_c', 'north_c', 'east_u', 'north_u', 'east_v', 'north_v', 'tb', - 'sb', 'tclim', 'sclim', 'rot', 'vfluxf', 'wusurf', 'wvsurf', 'e_atmos', 'ub', 'vb', 'uab', 'vab', 'elb', - 'etb', 'dt', 'uabw', 'uabe', 'vabs', 'vabn', 'els', 'eln', 'ele', 'elw', 'ssurf', 'tsurf', 'tbe', 'sbe', - 'sbw', 'tbw', 'tbn', 'tbs', 'sbn', 'sbs', 'wtsurf', 'swrad'] - -def load_var(file_obj, name): - """load variable from nc data file""" - data = file_obj.variables[name] - data = data[:] - data = np.float32(np.transpose(data, (2, 1, 0))) - return data - -def read_nc(file_path): - """ put the load variable into the dict """ - variable = {} - file_obj = nc.Dataset(file_path) - for name in params_name: - variable[name] = load_var(file_obj, name) - return variable -``` - -## Defining GOMO Grid - -A GOMO model deduces differential equations and boundary conditions based on momentum, energy, and law of conservation of mass, and determines seven equations to be solved. For details about formula derivation, see [this paper](https://gmd.copernicus.org/articles/12/4729/2019/gmd-12-4729-2019.pdf). Figure 1 shows an overall GOMO execution flowchart. - -First, data is loaded from the seamount data for initializing model variables. After the initial value and model parameters are loaded, the computation is divided into two parts: internal mode and external mode. In external mode, the 2D sea surface elevation `el` and column-averaged velocity (ua, va) are mainly computed. In internal mode, the number of loops `iend` indicates the total number of time steps during training (set by users). The 3D array computations predominate in order to compute the turbulence kinetic energy `q2` and the turbulence length `q2l` that generates the turbulence kinetic energy, temperature `t` and salinity `s`, as well as the velocity fields `u` and `v` in the x and y directions. After the computation is complete, save the required variable result and end the training. - -![GOMO](images/gomo.png) - -Figure 1: GOMO model flowchart - -### Initializing Variables - -```python -... -from src.GOMO import GOMO_init -... -if __name__ == "__main__": - ... - # define grid and init variable update - net_init = GOMO_init(im, jm, kb, stencil_width) - ... -``` - -### Defining GOMO Model - -```python -def construct(self, etf, ua, uab, va, vab, el, elb, d, u, v, w, kq, km, kh, q2, q2l, tb, t, sb, s, - rho, wubot, wvbot, ub, vb, egb, etb, dt, dhb, utb, vtb, vfluxb, et): - """construct""" - x_d, y_d, z_d = self.x_d, self.y_d, self.z_d - q2b, q2lb = self.q2b, self.q2lb - dx, dy = self.dx, self.dy - # surface forcing - w = w * (1 - self.z_h) + self.z_h * self.vfluxf - # lateral_viscosity - advx, advy, drhox, drhoy, aam = self.lateral_viscosity(dx, dy, u, v, dt, self.aam, ub, vb, x_d, y_d, z_d, rho, self.rmean) - # mode_interaction - adx2d, ady2d, drx2d, dry2d, aam2d, advua, advva, egf, utf, vtf = self.mode_interaction(advx, advy, drhox, drhoy, aam, x_d, y_d, d, uab, vab, ua, va, el) - # ===========external model=========== - vamax = 0 - elf = 0 - for iext in range(1, 31): - # external_el - elf = self.external_el(x_d, y_d, d, ua, va, elb) - # external_ua - advua, uaf = self.external_ua(iext, x_d, y_d, elf, d, ua, va, uab, vab, el, elb, advua, aam2d, adx2d, drx2d, wubot) - # external_va - advva, vaf = self.external_va(iext, x_d, y_d, elf, d, ua, va, uab, vab, el, elb, advva, aam2d, ady2d, dry2d, wvbot) - # external_update - etf, uab, ua, vab, va, elb, el, d, egf, utf, vtf, vamax = self.external_update(iext, etf, ua, uab, va, vab, el, elb, elf, uaf, vaf, egf, utf, vtf, d) - # ===========internal model=========== - if self.global_step != 0: - # adjust_uv - u, v = self.adjust_uv(u, v, utb, vtb, utf, vtf, dt) - # internal_w - w = self.internal_w(x_d, y_d, dt, u, v, etf, etb, vfluxb) - # internal_q - dhf, a, c, gg, ee, kq, km, kh, q2b_, q2, q2lb_, q2l = self.internal_q(x_d, y_d, z_d, etf, aam, q2b, q2lb, q2, q2l, kq, km, kh, u, v, w, dt, dhb, rho, wubot, wvbot, t, s) - q2b = ops.Assign()(self.q2b, q2b_) - q2lb = ops.Assign()(self.q2lb, q2lb_) - # internal_t_t - a, c, ee, gg, tb, t = self.internal_t_(t, tb, self.wtsurf, self.tsurf, self.swrad, self.tclim, self.tbe, self.tbw, self.tbn, self.tbs, x_d, y_d, z_d, dt, u, aam, self.h, self.dum, v, self.dvm, w, dhf, etf, a, kh, self.dzz, c, self.dzz1, ee, gg, dx, self.dz, dy, self.fsm, dhb) - # internal_t_s - a, c, ee, gg, sb, s = self.internal_t_(s, sb, self.wssurf, self.ssurf, self.swrad0, self.sclim, self.sbe, self.sbw, self.sbn, self.sbs, x_d, y_d, z_d, dt, u, aam, self.h, self.dum, v, self.dvm, w, dhf, etf, a, kh, self.dzz, c, self.dzz1, ee, gg, dx, self.dz, dy, self.fsm, dhb) - # dense - rho = self.dens(s, t, self.zz, self.h, self.fsm) - # internal_u - uf, a, c, gg, ee, wubot = self.internal_u(x_d, z_d, dhf, u, v, w, ub, vb, egf, egb, ee, gg, self.cbc, km, advx, drhox, dt, dhb) - # internal_v - vf, a, c, gg, ee, wvbot = self.internal_v(y_d, z_d, dhf, u, v, w, ub, vb, egf, egb, ee, gg, self.cbc, km, advy, drhoy, dt, dhb) - # adjust_ufvf - u, v, ub, vb = self.adjust_ufvf(u, v, uf, vf, ub, vb) - # internal_update - egb, etb, dt, dhb, utb, vtb, vfluxb, et = self.internal_update(egf, etb, utf, vtf, etf, et) - steps = ops.AssignAdd()(self.global_step, 1) - - return elf, etf, ua, uab, va, vab, el, elb, d, u, v, w, kq, km, kh, q2, q2l, tb, t, sb, s, rho, wubot, wvbot, \ - ub, vb, egb, etb, dt, dhb, utb, vtb, vfluxb, et, steps, vamax, q2b, q2lb -``` - -Call the defined GOMO model in the `__main__` function. - -```python -... -from src.GOMO import GOMO -... -if __name__ == "__main__": - ... - # define GOMO model - Model = GOMO(im=im, jm=jm, kb=kb, stencil_width=stencil_width, variable=variable, x_d=x_d, y_d=y_d, z_d=z_d, - q2b=q2b, q2lb=q2lb, aam=aam, cbc=cbc, rmean=rmean) - ... -``` - -## Training Grid - -### Running Script - -After the training script is defined, call the shell script in the `scripts` directory to start training. -Run the following command to execute the script: - -```shell -sh run_distribute_train.sh -``` - -Pass the variables `im`, `jm`, `kb`, `step` and `DATASET_PATH` to the script, where: - -- `im`, `jm`, `kb`: resolution of the simulated ocean region, which is related to the used data. -- `step`: number of time steps during training (corresponding to `iend` in Figure 1). -- `DATASET_PATH`: training data path. - -After the training is complete, the variable change values during the training are saved in the `train/outputs` directory. The following four variable values are saved every five time steps: east wind velocity (unit: m/s), north wind velocity (unit: m/s), position temperature (unit: K), and sea surface elevation (unit: m). - -```bash -└─outputs - ├─u_5.npy - ├─v_5.npy - ├─t_5.npy - ├─et_5.npy - ├─u_10.npy - ├─v_10.npy - ├─t_10.npy - ├─et_10.npy - -``` - -In the preceding information: -`*.npy`: saved variables. File name format: *Variable name*_*Step count*.npy. - -## References - -1. Huang X, Huang X, Wang D, et al. OpenArray v1. 0: a simple operator library for the decoupling of ocean modeling and parallel computing[J]. Geoscientific Model Development, 2019, 12(11). -2. Blumberg A F, Mellor G L. A description of a three‐dimensional coastal ocean circulation model[J]. Three‐dimensional coastal ocean models, 1987, 4: 1-16. -3. Beckmann A, Haidvogel D B. Numerical simulation of flow around a tall isolated seamount. Part I: Problem formulation and model accuracy[J]. Journal of Physical Oceanography, 1993, 23(8): 1736-1753. diff --git a/tutorials/training/source_en/advanced_use/hpc_sponge.md b/tutorials/training/source_en/advanced_use/hpc_sponge.md deleted file mode 100644 index c2cf80b7076e1776e3f044e32d2ca3f353dea61d..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/hpc_sponge.md +++ /dev/null @@ -1,5 +0,0 @@ -# SPONGE Molecular Simulation Practice - -No English version right now, welcome to contribute. - -   diff --git a/tutorials/training/source_en/advanced_use/hyper_parameters_auto_tuning.md b/tutorials/training/source_en/advanced_use/hyper_parameters_auto_tuning.md deleted file mode 100644 index 509e26225b32d7b8d3c9ebb38af988577b674ad8..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/hyper_parameters_auto_tuning.md +++ /dev/null @@ -1,211 +0,0 @@ -# Use Mindoptimizer to Tune Hyperparameters - -`Linux` `Ascend` `GPU` `CPU` `Model Optimization` `Intermediate` `Expert` - - - -- [Use Mindoptimizer to Tune Hyperparameters](#use-mindoptimizer-to-tune-hyperparameters) - - [Overview](#overview) - - [Installation](#installation) - - [Usage](#usage) - - [Configuration File Rules](#configuration-file-rules) - - [Usage Examples](#usage-examples) - - [Notices](#notices) - - - - - -## Overview - -There are two kinds of parameters in machine learning. One is the model internal parameters, relying on training data and algorithms to tune the model parameters. And the other is the model external setting parameters, they need to be manually configured, such parameters are called hyperparameters. Because different hyperparameters impact the performance of model, hyperparameters are highly important in training tasks. Traditional methods require manual analysis of hyperparameters, manual debugging, and configuration, which consumes time and effort. MindInsight parameter tuning command can be used for automatic parameter tuning. Based on the parameter tuning configuration information provided by users, parameters can be automatically configured and model training can be performed. - -MindInsight provides `mindoptimizer`. This tuning command can extract past training summaries from the training log according to the user configuration, analyze past training records and recommend hyperpameters, and finally automate training scripts. When using it, users need to configure information such as the scope of hyperparameters in yaml format. And then users need to replace the hyperparameters in the training script according to the tutorial, with the aim of synchronizing the auto-recommended hyperparameters into the training script. Currently, only the Gauss process tuning method is supported, and other methods are gradually supported. - -## Installation - -This tool is a submodule of MindInsight. After MindInsight is installed, you can use the MindInsight parameter tuning command. For details about how to install MindInsight, see the [installation guide](https://gitee.com/mindspore/mindinsight/blob/master/README.md#). - -## Usage - -MindInsight provides parameters tuning command. The command-line interface (CLI) provides the following commands: - -```text -usage: mindoptimizer [-h] [--version] [--config CONFIG] - [--iter ITER] - -optional arguments: - -h, --help Shows the help message and exits. - --version Shows the program version and exits. - --config CONFIG Specifies the configuration file for parameter tuning. - The file format is yaml. - --iter ITER Specifies the times of automatic training. - Automatically recommended parameters are used every time - before the training is performed. - The default value of ITER is 1. -``` - -## Configuration File Rules - -The file format of the configuration file is yaml, which requires configurations of running command, the root directory of training summaries, tuning method, optimization objectives, and hyperparameters information. It is necessary to configure the information about hyperparameters including the range of values, types and sources, etc. MindInsight extracts training records from the training summary based on configured hyperparameters and optimization objectives, such as learning and accuracy. They can be used by recommended algorithms to analyze their relationships and better recommend hyperparameters. - -1. Configure the running command - - Use `command` to configure running command, such as `command: python train.py`. The running command is executed directly after the tuning program recommends hyperparameters. - -2. Configure the root directory of training summaries - - The `summary_base_dir` is the root directory of training summaries. It is also used for the extraction of training records, which makes hyperparameters better recommended. At the same time, it is recommended that users add `SummaryColletor` in their training scripts to collect training information, you can view the [summary collection tutorial](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/summary_record.html). The tuning command generates a subdirectory path based on the configured `summary_base_dir`, which can be configured to record the training record at `SummaryColletor`. Therefore, after training, the training information is recorded in the subdirecte of the root directory of training summaries, and the training information can be used as a training record to recommend the next required hyperparameters. Configure the `summary_base_dir`, such as `/home/summaries`. - -3. Configure the parameter tuning method - - Use `name` to specify the name of an acquisition function, and `args` tp specify parameters of the acquisition function. - - The current algorithm is Gaussian process regressor(GP). The acquisition functhon of GP is optional, and its range is in [`ucb`, `pi`,`ei`]. The default value is `ucb`. - - - Upper confidence bound (UCB) - - Probability of improvement (PI) - - Expected improvement (EI) - - For example: - - ```yaml - tuner: - name: gp - args: - method: ucb - ``` - -4. Configure the parameter tuning target - - You can select loss or self-defined metrics as the target. - - Configuration description: - - group: This parameter is optional. The value can be `system_defined` or `metric`. The default value is `system_defined`. Use `group` to configure the group in which the optimization target is located, such as the system custom collection field, which is the `system_defined` group. However, other evaluation metrics used in `Model()`, such as `model = Model(net, loss_fn=loss, optimizer=None, metrics={'Accuracy'})`. `Accuracy` belongs to the metrics, so the group is `metric`. - - goal: This parameter is optional. The value can be `minimize` or `maximize`. The default value is `minimize`. Use `goal` to indicate the optimization direction of the target. For example, if `Accuracy` is higher, the performance of model is better, so the `goal` needs to be configured as 'maximize'. - - Config `loss`: - - ```yaml - target: - name:loss - ``` - - Config `Accuracy` in metrics: - - ```yaml - target: - group: metric - name: Accuracy - goal: maximize - ``` - -5. Configure hyperparameters bounds, choice, type, and source - - Configuration fields for hyperparameters consist of `bounds`, `choices`, `type`, and `source`. The fields of hyperparameters configured here are used for extraction of training summaries and recommendation of hyperparameters. In addition, `bounds`, `choice`, and `type` affect the recommendation of hyperparameters. `bounds` are configured as the upper and lower boundaries of the hyperparameters, `choice` indicates that which recommended values are selected, and `type` is the type of parameter configured. - - The tunable fields currently collected by the system customization include `learning_rate`, `batch_size` and `epoch`. Other parameters are user-defined parameters will be automatically collected in the training summary during training if the source is configured as `user_defined`. - - - bounds: a list. The number of elements is 2. The first number is the lower bound min, and the second number is the upper bound max. The value range is [min, max). The method for generating a random number is `numpy.random.uniform()`. - - choice: a list. The number of values is not limited. Values are selected from the elements in the list. - - type: This parameter is mandatory and should be set to `int` or `float`. - - source: This parameter is optional. The value should be `system_defined` or `user_defined`. If the name of parameter exists in system-defined field, the default value is `system_defined`, otherwise, the default value is `user_defined`. - - > You need to choose either `bounds` or `choice`. If you have configured `choice`, values are selected from the configured list only, and if you have configured both `choice` and `type`, `type` does not take effect. - -## Usage Examples - -If you want to optimize the `learning_rate`, `batch_size`, and `momentum`, and the optimization objective is `Accuracy`, configure the YAML file as follows: - -1. Configure config.yaml - - ```yaml - command: sh /home/example/run_alexnet_ascend.sh - summary_base_dir: /home/summaries - tuner: - name: gp - target: - group: metric - name: Accuracy - goal: maximize - parameters: - learning_rate: - bounds: [0.00001, 0.001] - type: float - batch_size: - choice: [32, 64, 128, 256] - type: int - momentum: - source: user_defined - choice: [0.8, 0.9] - type: float - ``` - - > The name of `momentum` is not the same as that of the variable defined by the system. Therefore, you do not need to set the source field. - - **If the fields with the same name exist in the YAML file, the last one will be selected. Do not use the following method.** - - ```yaml - parameters: - learning_rate: - bounds: [0.0005, 0.001] - type: float - learning_rate: - source: user_defined - bounds: [0.00002, 0.0001] - type: float - ``` - -2. Instantiate the `HyperConfig` object in the training script - - (1) After instantiating `HyperConfig`, use the parameter variables of the `HyperConfig` instance as the values of the corresponding parameters in the training script. - (2) Please add `SummaryCollector` to collect lineage information, including hyperparameters and evaluation metrics. - - For example, the training script in [Model Zoo](https://www.mindspore.cn/doc/note/en/master/network_list_ms.html) is as follows: - - ```python - ds_train = create_dataset_cifar10(args.data_path, batch_size) - lr = Tensor(get_lr_cifar10(0, cfg.learning_rate, cfg.epoch_size, step_per_epoch)) - opt = nn.Momentum(network.trainable_params(), lr, cfg.momentum) - - model.train(cfg.epoch_size, ds_train, callbacks=[time_cb, ckpoint_cb, LossMonitor()] - ``` - - After the modification: - - ```python - from mindinsight.optimizer import HyperConfig - config = HyperConfig() - params = config.params - - # Replace batch_size with params.batch_size. - ds_train = create_dataset_cifar10(args.data_path, params.batch_size) - # Replace cfg.learning_rate with params.learning_rate. - lr = Tensor(get_lr_cifar10(0, params.learning_rate, cfg.epoch_size, step_per_epoch)) - # Replace cfg.momentum with params.momentum. - opt = nn.Momentum(network.trainable_params(), lr, params.momentum) - - # Instantiate SummaryCollector and add it to callback to automatically collect training information. - summary_cb = SummaryCollector(config.summary_dir) - model.train(cfg.epoch_size, ds_train, callbacks=[time_cb, ckpoint_cb, LossMonitor(), summary_cb] - ``` - -3. Execution - - Please make sure that the training script can be executed correctly before performing automatic tuning. - - ```shell - mindoptimizer --config ./config.yaml --iter 10 - ``` - - > Please fill in the training command to execute the training in the configuration file, and run the automatic tuning program in the directory where the training command can be successfully run. - -4. Visualization - - Enable MindInsight based on summary_base_dir configured in config.yaml. For details about the visualization method, see the [MindInsight start tutorial](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/mindinsight_commands.html#start-the-service). - -## Notices - -1. The training script is written and maintained by users. This tool does not automatically modify the training script. If the training script is incorrect, an error occurs when this tool is used to support the training script. -2. This tool does not process or modify the printed information during the running process. -3. Ensure that the parameter tuning process is trustworthy. If a parameter configuration error or script execution error occurs, the parameter tuning process will be terminated. You can locate the fault based on the displayed information. diff --git a/tutorials/training/source_en/advanced_use/images/adv_attack_result.png b/tutorials/training/source_en/advanced_use/images/adv_attack_result.png deleted file mode 100644 index 3169f95a7821e1a936a23d7c0cb54e28c8d0b858..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/adv_attack_result.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/auto_augmentation.png b/tutorials/training/source_en/advanced_use/images/auto_augmentation.png deleted file mode 100644 index 3daa904f181d2c7a6a2b6f7f2271c8e33f2ba933..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/auto_augmentation.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/bert_model.PNG b/tutorials/training/source_en/advanced_use/images/bert_model.PNG deleted file mode 100644 index 8dddbe6be41ae9ae4cd5fef0ea7e4ab1b98a46eb..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/bert_model.PNG and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/checkpoint_integrate_process.pptx b/tutorials/training/source_en/advanced_use/images/checkpoint_integrate_process.pptx deleted file mode 100644 index 29ecea853306ea5ea769915510ca755037797cf0..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/checkpoint_integrate_process.pptx and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/checkpoint_integration_process.jpg b/tutorials/training/source_en/advanced_use/images/checkpoint_integration_process.jpg deleted file mode 100644 index 39d89bc4a04f0076ac5fa435553e202eb1cc21b3..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/checkpoint_integration_process.jpg and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/cifar10.jpg b/tutorials/training/source_en/advanced_use/images/cifar10.jpg deleted file mode 100644 index 90057bb785d1be501b133985bd56bc7608d9051b..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/cifar10.jpg and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/cifar10_c_transforms.png b/tutorials/training/source_en/advanced_use/images/cifar10_c_transforms.png deleted file mode 100644 index 10dc267dc650764566f6d20b7f090e20c12f8e11..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/cifar10_c_transforms.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/cluster_iterative_trajectory.png b/tutorials/training/source_en/advanced_use/images/cluster_iterative_trajectory.png deleted file mode 100644 index c7708d80d361fbf11237ba52e05df56b6719792b..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/cluster_iterative_trajectory.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/compose.png b/tutorials/training/source_en/advanced_use/images/compose.png deleted file mode 100644 index 944e2057a85e73af07cb5f998dc8037c6204c3a5..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/compose.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/data_chart.png b/tutorials/training/source_en/advanced_use/images/data_chart.png deleted file mode 100644 index 9f1d5f4247472602823649909d934ad6f7160005..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/data_chart.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/data_conversion_concept.png b/tutorials/training/source_en/advanced_use/images/data_conversion_concept.png deleted file mode 100644 index 8646dc4777d73111d0178c41529a907d27c082ac..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/data_conversion_concept.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/data_conversion_concept.pptx b/tutorials/training/source_en/advanced_use/images/data_conversion_concept.pptx deleted file mode 100644 index 6c4c82e94b668f77312abedf916259898a635fa6..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/data_conversion_concept.pptx and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/data_enhancement_performance_scheme.png b/tutorials/training/source_en/advanced_use/images/data_enhancement_performance_scheme.png deleted file mode 100644 index 6e9a7457b3f082f43ee4d4c28c16f8d3d289c4c0..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/data_enhancement_performance_scheme.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/data_function.png b/tutorials/training/source_en/advanced_use/images/data_function.png deleted file mode 100644 index 921e61aaa7fb438274f394c535db99436234028c..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/data_function.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/data_label.png b/tutorials/training/source_en/advanced_use/images/data_label.png deleted file mode 100644 index ac79c2d53fe416e96b9ac841692b26f3eaf6ddd2..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/data_label.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/data_loading_performance_scheme.png b/tutorials/training/source_en/advanced_use/images/data_loading_performance_scheme.png deleted file mode 100644 index 2a9fe506e9528af6168bd40697661f3ab61db627..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/data_loading_performance_scheme.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/data_op_profile.png b/tutorials/training/source_en/advanced_use/images/data_op_profile.png deleted file mode 100644 index af3d87007363b0f71f61f9d35272636d8ec92501..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/data_op_profile.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/data_op_utilization.png b/tutorials/training/source_en/advanced_use/images/data_op_utilization.png deleted file mode 100644 index 94d83303fc156068002f182cfdef648aff360854..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/data_op_utilization.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/data_table.png b/tutorials/training/source_en/advanced_use/images/data_table.png deleted file mode 100644 index c9f73cd59b8202eff0121b4c57466f9b39d1d0b9..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/data_table.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/debugger_ask_recommend.png b/tutorials/training/source_en/advanced_use/images/debugger_ask_recommend.png deleted file mode 100755 index b25b57e86131544a8375eef42261833a480ffbab..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/debugger_ask_recommend.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/debugger_init_page.png b/tutorials/training/source_en/advanced_use/images/debugger_init_page.png deleted file mode 100755 index 665321b7fe5978d8412b85901c0a3af9573766ac..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/debugger_init_page.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/debugger_search_node_type.png b/tutorials/training/source_en/advanced_use/images/debugger_search_node_type.png deleted file mode 100755 index 21f5e8a25cb270caacd1fb152d6c1020b4d6f1e8..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/debugger_search_node_type.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/debugger_set_watch_point.png b/tutorials/training/source_en/advanced_use/images/debugger_set_watch_point.png deleted file mode 100755 index baf9f57495be5c3492cb33b6a88d5d077ec70f2c..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/debugger_set_watch_point.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/debugger_tensor_view.png b/tutorials/training/source_en/advanced_use/images/debugger_tensor_view.png deleted file mode 100755 index 62d7c97318799fc0ed8fa1f4a2a418116326bf74..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/debugger_tensor_view.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/debugger_waiting.png b/tutorials/training/source_en/advanced_use/images/debugger_waiting.png deleted file mode 100755 index 47dfb444fdca544d3817e892d887a65689cd04b3..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/debugger_waiting.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/debugger_watch_point_hit.png b/tutorials/training/source_en/advanced_use/images/debugger_watch_point_hit.png deleted file mode 100755 index e1da9aa506baeceda9bfe85a5b853bec415af7eb..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/debugger_watch_point_hit.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/debugger_watch_point_list.png b/tutorials/training/source_en/advanced_use/images/debugger_watch_point_list.png deleted file mode 100755 index 609186d4a80e6fe48551ae154e27b637158a9f60..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/debugger_watch_point_list.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/device_cpu_utilization.png b/tutorials/training/source_en/advanced_use/images/device_cpu_utilization.png deleted file mode 100644 index 5a054a03616c475772c3263f430cac8c809ecfaf..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/device_cpu_utilization.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/evaluate_the_model_during_training.png b/tutorials/training/source_en/advanced_use/images/evaluate_the_model_during_training.png deleted file mode 100644 index cbecb6c9739eaf047c89ea79f9d596a2793e6283..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/evaluate_the_model_during_training.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/finetune.PNG b/tutorials/training/source_en/advanced_use/images/finetune.PNG deleted file mode 100644 index cbdc6263669c5a40c910af9f5f2483d0d1137455..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/finetune.PNG and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/fuzz_res.png b/tutorials/training/source_en/advanced_use/images/fuzz_res.png deleted file mode 100644 index be6d022850438ff4b9c070f7225cbd950e1e3686..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/fuzz_res.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/fuzz_seed.png b/tutorials/training/source_en/advanced_use/images/fuzz_seed.png deleted file mode 100644 index cb138aebfabea1a1f778fbb65b6a0ee4533974e2..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/fuzz_seed.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/gomo.png b/tutorials/training/source_en/advanced_use/images/gomo.png deleted file mode 100644 index edbfdbcbf92c164d50363ff910a79315e92a10d9..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/gomo.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/gomo_diagram.pptx b/tutorials/training/source_en/advanced_use/images/gomo_diagram.pptx deleted file mode 100644 index 1e5195c25849f508bcba94a0844169963cafb3c2..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/gomo_diagram.pptx and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/gpu_activity_profiler.png b/tutorials/training/source_en/advanced_use/images/gpu_activity_profiler.png deleted file mode 100644 index 633599d845ffc1f308d704540dc501b5288038f4..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/gpu_activity_profiler.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/gpu_op_ui_profiler.png b/tutorials/training/source_en/advanced_use/images/gpu_op_ui_profiler.png deleted file mode 100644 index e8e1dcaacf5c1dbd80dafe9e634f60db1048efb9..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/gpu_op_ui_profiler.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/graph.png b/tutorials/training/source_en/advanced_use/images/graph.png deleted file mode 100644 index 0bc13636b5c84952978469c652c38500e6d34f43..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/graph.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/graph_kernel_example_custom_op.png b/tutorials/training/source_en/advanced_use/images/graph_kernel_example_custom_op.png deleted file mode 100644 index 22dfddef960b7792bfa181a1ad0356c6aed97c53..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/graph_kernel_example_custom_op.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/graph_kernel_example_fuse_basic.png b/tutorials/training/source_en/advanced_use/images/graph_kernel_example_fuse_basic.png deleted file mode 100644 index d446ff7b00111b8e5e61d9540397fb1e1cf79afc..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/graph_kernel_example_fuse_basic.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/graph_sidebar.png b/tutorials/training/source_en/advanced_use/images/graph_sidebar.png deleted file mode 100644 index e138bcfbbfda77ff3468442a3e5e169dcd7fed03..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/graph_sidebar.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/histogram.png b/tutorials/training/source_en/advanced_use/images/histogram.png deleted file mode 100644 index 5b240083bd7fe971b55537386e5e4ca6090a6bd2..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/histogram.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/histogram_func.png b/tutorials/training/source_en/advanced_use/images/histogram_func.png deleted file mode 100644 index 84dfd7f82e667b45d80fc7cf28761b4177d5df80..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/histogram_func.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/image_function.png b/tutorials/training/source_en/advanced_use/images/image_function.png deleted file mode 100644 index 4a43b649c106e81b70a0a5bb824bc6563cd2a66b..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/image_function.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/image_vi.png b/tutorials/training/source_en/advanced_use/images/image_vi.png deleted file mode 100644 index 1fe3ee2c28367d5fc5d7b322e49b3a731c91f620..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/image_vi.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/introduce.PNG b/tutorials/training/source_en/advanced_use/images/introduce.PNG deleted file mode 100644 index 137182a23932485aaa7fa8ee00aa768a7f947c12..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/introduce.PNG and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/introduce.svg b/tutorials/training/source_en/advanced_use/images/introduce.svg deleted file mode 100644 index d89e2d3955b160c17d28c71288ffe19ddd9c0111..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/images/introduce.svg +++ /dev/null @@ -1 +0,0 @@ -Poetry DatasetPre-trainingPre-trained modelsFine-TuningModelServiceMindSpore ServingBare-metal/Docker deploymentData pre-processingData post-precessingClientsHttpModelHub \ No newline at end of file diff --git a/tutorials/training/source_en/advanced_use/images/lineage_label.png b/tutorials/training/source_en/advanced_use/images/lineage_label.png deleted file mode 100644 index 15c88f91edb7e870246b85f9f4d96f00145d9199..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/lineage_label.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/lineage_model_chart.png b/tutorials/training/source_en/advanced_use/images/lineage_model_chart.png deleted file mode 100644 index 56d08cc34e51293a82aa63dd50fc1fa1c90e7ab3..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/lineage_model_chart.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/lineage_model_table.png b/tutorials/training/source_en/advanced_use/images/lineage_model_table.png deleted file mode 100644 index a288ac6aa099c69a8b7f5cf97183992adb94b71a..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/lineage_model_table.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/memory.png b/tutorials/training/source_en/advanced_use/images/memory.png deleted file mode 100644 index 40f41c3f78d0435ece50005983aeb8677c2939e1..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/memory.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/memory_graphics.png b/tutorials/training/source_en/advanced_use/images/memory_graphics.png deleted file mode 100644 index fcead6628c0eda7ed4696a4e5f8200c6a099771c..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/memory_graphics.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/minddata_profile.png b/tutorials/training/source_en/advanced_use/images/minddata_profile.png deleted file mode 100644 index 984f72e2fdbccfe2b625b776356fdab57c90baad..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/minddata_profile.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/mindrecord.png b/tutorials/training/source_en/advanced_use/images/mindrecord.png deleted file mode 100644 index b136eedc031781eae22c698cce91491ed5d75712..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/mindrecord.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/mindrecord.pptx b/tutorials/training/source_en/advanced_use/images/mindrecord.pptx deleted file mode 100644 index fcb5dddc8237918ef53f70e986b47e0134413bbf..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/mindrecord.pptx and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/mix_precision.PNG b/tutorials/training/source_en/advanced_use/images/mix_precision.PNG deleted file mode 100644 index 93fa9ab6863f1dcd7076bc99c00bfacf3a3109fc..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/mix_precision.PNG and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/mix_precision.eddx b/tutorials/training/source_en/advanced_use/images/mix_precision.eddx deleted file mode 100644 index cfec9db4f1651e09cfb65e1c2b1bc1b1eef1fe2e..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/mix_precision.eddx and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/multi_scalars.png b/tutorials/training/source_en/advanced_use/images/multi_scalars.png deleted file mode 100644 index 0ea23d83bdc44a72d5118950575758ceb1178803..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/multi_scalars.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/multi_scalars_select.png b/tutorials/training/source_en/advanced_use/images/multi_scalars_select.png deleted file mode 100644 index 7153bd3002aad05fc68e4a879aa07f021af70e0a..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/multi_scalars_select.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/op_statistics.PNG b/tutorials/training/source_en/advanced_use/images/op_statistics.PNG deleted file mode 100644 index ac22f98dac493a5221481b9029e7539a95b29d85..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/op_statistics.PNG and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/op_type_statistics.PNG b/tutorials/training/source_en/advanced_use/images/op_type_statistics.PNG deleted file mode 100644 index 92cf3c96eca35ddf7ddc76430c24884526dbaafa..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/op_type_statistics.PNG and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/operator_fusion.png b/tutorials/training/source_en/advanced_use/images/operator_fusion.png deleted file mode 100644 index 249101458d0be59dd8f5ffff301bc4d70fb408f5..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/operator_fusion.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/performance_overall.png b/tutorials/training/source_en/advanced_use/images/performance_overall.png deleted file mode 100644 index 67d1dc36e9c2867071825663a295eab842ce8294..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/performance_overall.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/pipeline.png b/tutorials/training/source_en/advanced_use/images/pipeline.png deleted file mode 100644 index 8b93cf61e5e8074c140a644d8f60b604c9c4f717..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/pipeline.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/process_cpu_utilizaton.png b/tutorials/training/source_en/advanced_use/images/process_cpu_utilizaton.png deleted file mode 100644 index 6d4977e542b7c22ef08f12afefeba19cfc413cf7..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/process_cpu_utilizaton.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/resource_visibility.png b/tutorials/training/source_en/advanced_use/images/resource_visibility.png deleted file mode 100644 index 7156f76acd6de337fb57784528ffce783d65611b..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/resource_visibility.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/resource_visibility_gpu.png b/tutorials/training/source_en/advanced_use/images/resource_visibility_gpu.png deleted file mode 100644 index 84f2b74ff47d734b28c67334ce24dde1268a97b1..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/resource_visibility_gpu.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/scalar.png b/tutorials/training/source_en/advanced_use/images/scalar.png deleted file mode 100644 index 93eac7ab61ea9eb28d24a98e8beb9dd183206e9a..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/scalar.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/scalar_compound.png b/tutorials/training/source_en/advanced_use/images/scalar_compound.png deleted file mode 100644 index c248af843f3e850eda275d33eaadcfaba4304840..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/scalar_compound.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/scalar_select.png b/tutorials/training/source_en/advanced_use/images/scalar_select.png deleted file mode 100644 index 056797d9da760ad9878c86e09732eac6c6bac303..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/scalar_select.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/shuffle_performance_scheme.png b/tutorials/training/source_en/advanced_use/images/shuffle_performance_scheme.png deleted file mode 100644 index 5cce3e29578530761a36b0ab716bd7feb463b639..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/shuffle_performance_scheme.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/single_car_performance_overall.png b/tutorials/training/source_en/advanced_use/images/single_car_performance_overall.png deleted file mode 100644 index e166f05cd7588e58092f41366617773099bfdd10..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/single_car_performance_overall.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/step_trace.png b/tutorials/training/source_en/advanced_use/images/step_trace.png deleted file mode 100644 index 6eac06f3d1ffc34c176c6a52d979e5e135571507..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/step_trace.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/targets.png b/tutorials/training/source_en/advanced_use/images/targets.png deleted file mode 100644 index 7268ff92fe52003c5721a5d9d4f8eeed16e7ea45..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/targets.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/tensor_function.png b/tutorials/training/source_en/advanced_use/images/tensor_function.png deleted file mode 100644 index 43dbda65cbe55a6e7e3388808087469f11186dde..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/tensor_function.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/tensor_histogram.png b/tutorials/training/source_en/advanced_use/images/tensor_histogram.png deleted file mode 100644 index 967a452efde4efc9f464782244f4e790417b7122..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/tensor_histogram.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/tensor_table.png b/tutorials/training/source_en/advanced_use/images/tensor_table.png deleted file mode 100644 index f2d1ad90b3930f71fa4014d94ae52df909bea434..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/tensor_table.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/timeline.png b/tutorials/training/source_en/advanced_use/images/timeline.png deleted file mode 100644 index 19c60e104169d86f1022758eda15bbc9c8a0dcf6..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/timeline.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/xai_hoc.png b/tutorials/training/source_en/advanced_use/images/xai_hoc.png deleted file mode 100644 index 0627470dcf5313b1825dc255b2a39e3b492dbf4f..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/xai_hoc.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/xai_hoc_index.png b/tutorials/training/source_en/advanced_use/images/xai_hoc_index.png deleted file mode 100644 index c8bec0336bd41ff2282630259905cdee44beb414..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/xai_hoc_index.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/xai_index.png b/tutorials/training/source_en/advanced_use/images/xai_index.png deleted file mode 100644 index d0b10a87c216d64f1af0fcdf9192f6e666e940c7..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/xai_index.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/xai_metrix_class.png b/tutorials/training/source_en/advanced_use/images/xai_metrix_class.png deleted file mode 100644 index d0d346f3d6615c2336f2126e0022d135b2b7ef04..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/xai_metrix_class.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/xai_metrix_comprehensive.png b/tutorials/training/source_en/advanced_use/images/xai_metrix_comprehensive.png deleted file mode 100644 index c94c001eccdf005b36c814b0dbacb0f3dd831e7e..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/xai_metrix_comprehensive.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/xai_saliency_map.png b/tutorials/training/source_en/advanced_use/images/xai_saliency_map.png deleted file mode 100644 index 43bd72adb1ba329536ff07a950d87aa619bba27d..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/xai_saliency_map.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/xai_saliency_map_detail.png b/tutorials/training/source_en/advanced_use/images/xai_saliency_map_detail.png deleted file mode 100644 index a9df098a81a7b3aa93353e613901a7785cdc6cc2..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/xai_saliency_map_detail.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/images/xai_uncertainty.png b/tutorials/training/source_en/advanced_use/images/xai_uncertainty.png deleted file mode 100644 index 73740a3abda20d56065c76c5309af854eb083d84..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/advanced_use/images/xai_uncertainty.png and /dev/null differ diff --git a/tutorials/training/source_en/advanced_use/implement_high_order_differentiation.md b/tutorials/training/source_en/advanced_use/implement_high_order_differentiation.md deleted file mode 100644 index 9f850d41bd0d80d92a8af6aea3e4537604581d9d..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/implement_high_order_differentiation.md +++ /dev/null @@ -1,401 +0,0 @@ -# Implementing High-order Automatic Differentiation - -`CPU` `GPU` `Ascend` `Whole Process` `Beginner` `Intermediate` `Expert` - - - -- [Implementing High-order Automatic Differentiation](#implementing-high-order-automatic-differentiation) - - [Overview](#overview) - - [First-order Derivation](#first-order-derivation) - - [Input Derivation](#input-derivation) - - [Weight Derivation](#weight-derivation) - - [Gradient Value Scaling](#gradient-value-scaling) - - [High-order Derivation](#high-order-derivation) - - [Single-input Single-output High-order Derivative](#single-input-single-output-high-order-derivative) - - [Single-input Multi-output High-order Derivative](#single-input-multi-output-high-order-derivative) - - [Multiple-Input Multiple-Output High-Order Derivative](#multiple-input-multiple-output-high-order-derivative) - - [Support for Second-order Differential Operators](#support-for-second-order-differential-operators) - - [References](#references) - - - - - -## Overview - -High-order differentiation is used in domains such as AI-supported scientific computing and second-order optimization. For example, in the molecular dynamics simulation, when the potential energy is trained using the neural network[1], the derivative of the neural network output to the input needs to be computed in the loss function, and then the second-order cross derivative of the loss function to the input and the weight exists in backward propagation. In addition, the second-order derivatives of the output to the input exist in differential equations solved by AI (such as PINNs[2]). Another example is that in order to enable the neural network to converge quickly in the second-order optimization, the second-order derivative of the loss function to the weight needs to be computed using the Newton method. The following describes the high-order derivatives in MindSpore graph mode. - -> For details about the complete sample code, see [Derivation Sample Code](https://gitee.com/mindspore/docs/tree/master/tutorials/tutorial_code). - -## First-order Derivation - -The first-order derivative method of MindSpore is `mindspore.ops.GradOperation (get_all=False, get_by_list=False, sens_param=False)`. When `get_all` is set to `False`, the first input derivative is computed. When `get_all` is set to `True`, all input derivatives are computed. When `get_by_list` is set to `False`, weight derivation is not performed. When `get_by_list` is set to `True`, weight derivation is performed. `sens_param` scales the output value of the network to change the final gradient. Therefore, its dimension is consistent with the output dimension. The following uses the first-order derivation of the MatMul operator for in-depth analysis. - -### Input Derivation - -The input derivation code is as follows: - -```python -import numpy as np -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -from mindspore import ParameterTuple, Parameter -from mindspore import dtype as mstype -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.matmul = ops.MatMul() - self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z') - def construct(self, x, y): - x = x * self.z - out = self.matmul(x, y) - return out - -class GradNetWrtX(nn.Cell): - def __init__(self, net): - super(GradNetWrtX, self).__init__() - self.net = net - self.grad_op = ops.GradOperation() - def construct(self, x, y): - gradient_function = self.grad_op(self.net) - return gradient_function(x, y) - -x = Tensor([[0.8, 0.6, 0.2], [1.8, 1.3, 1.1]], dtype=mstype.float32) -y = Tensor([[0.11, 3.3, 1.1], [1.1, 0.2, 1.4], [1.1, 2.2, 0.3]], dtype=mstype.float32) -output = GradNetWrtX(Net())(x, y) -print(output) -``` - -The output is as follows: - -```python -[[4.5099998 2.7 3.6000001] - [4.5099998 2.7 3.6000001]] -``` - -To facilitate analysis, inputs `x`, `y`, and `z` can be expressed as follows: - -```python -x = Tensor([[x1, x2, x3], [x4, x5, x6]]) -y = Tensor([[y1, y2, y3], [y4, y5, y6], [y7, y8, y9]]) -z = Tensor([z]) -``` - -The following forward result can be obtained based on the definition of the MatMul operator: - -$output = [[(x1 \cdot y1 + x2 \cdot y4 + x3 \cdot y7) \cdot z, (x1 \cdot y2 + x2 \cdot y5 + x3 \cdot y8) \cdot z, (x1 \cdot y3 + x2 \cdot y6 + x3 \cdot y9) \cdot z]$, - -$[(x4 \cdot y1 + x5 \cdot y4 + x6 \cdot y7) \cdot z, (x4 \cdot y2 + x5 \cdot y5 + x6 \cdot y8) \cdot z, (x4 \cdot y3 + x5 \cdot y6 + x6 \cdot y9) \cdot z]]$ - -MindSpore uses the Reverse[3] automatic differentiation mechanism during gradient computation. The output result is summed and then the derivative of the input `x` is computed. - -(1) Summation formula: - -$\sum{output} = [(x1 \cdot y1 + x2 \cdot y4 + x3 \cdot y7) + (x1 \cdot y2 + x2 \cdot y5 + x3 \cdot y8) + (x1 \cdot y3 + x2 \cdot y6 + x3 \cdot y9) +$ - -$(x4 \cdot y1 + x5 \cdot y4 + x6 \cdot y7) + (x4 \cdot y2 + x5 \cdot y5 + x6 \cdot y8) + (x4 \cdot y3 + x5 \cdot y6 + x6 \cdot y9)] \cdot z$ - -(2) Derivation formula: - -$\frac{\mathrm{d}(\sum{output})}{\mathrm{d}x} = [[(y1 + y2 + y3) \cdot z, (y4 + y5 + y6) \cdot z, (y7 + y8 + y9) \cdot z], [(y1 + y2 + y3) \cdot z, (y4 + y5 + y6) \cdot z, (y7 + y8 + y9) \cdot z]]$ - -(3) Computation result: - -$\frac{\mathrm{d}(\sum{output})}{\mathrm{d}x} = [[4.5099998 \quad 2.7 \quad 3.6000001] [4.5099998 \quad 2.7 \quad 3.6000001]]$ - -If the derivatives of the `x` and `y` inputs are considered, you only need to set `self.grad_op = GradOperation(get_all=True)` in `GradNetWrtX`. - -### Weight Derivation - -If the derivation of weights is considered, change `GradNetWrtX` to the following: - -```python -class GradNetWrtX(nn.Cell): - def __init__(self, net): - super(GradNetWrtX, self).__init__() - self.net = net - self.params = ParameterTuple(net.trainable_params()) - self.grad_op = ops.GradOperation(get_by_list=True) - def construct(self, x, y): - gradient_function = self.grad_op(self.net, self.params) - return gradient_function(x, y) -``` - -```python -output = GradNetWrtX(Net())(x, y) -print(output) -``` - -The output is as follows: - -```python -(Tensor(shape=[1], dtype=Float32, value= [ 2.15359993e+01]),) -``` - -The derivation formula is changed to: - -$\frac{\mathrm{d}(\sum{output})}{\mathrm{d}z} = (x1 \cdot y1 + x2 \cdot y4 + x3 \cdot y7) + (x1 \cdot y2 + x2 \cdot y5 + x3 \cdot y8) + (x1 \cdot y3 + x2 \cdot y6 + x3 \cdot y9) + $ - -$(x4 \cdot y1 + x5 \cdot y4 + x6 \cdot y7) + (x4 \cdot y2 + x5 \cdot y5 + x6 \cdot y8) + (x4 \cdot y3 + x5 \cdot y6 + x6 \cdot y9)$ - -Computation result - -$\frac{\mathrm{d}(\sum{output})}{\mathrm{d}z} = [2.15359993e+01]$ - -### Gradient Value Scaling - -You can use the `sens_param` parameter to control the scaling of the gradient value. - -```python -class GradNetWrtX(nn.Cell): - def __init__(self, net): - super(GradNetWrtX, self).__init__() - self.net = net - self.grad_op = ops.GradOperation(sens_param=True) - self.grad_wrt_output = Tensor([[0.1, 0.6, 0.2], [0.8, 1.3, 1.1]], dtype=mstype.float32) - def construct(self, x, y): - gradient_function = self.grad_op(self.net) - return gradient_function(x, y, self.grad_wrt_output) -``` - -```python -output = GradNetWrtX(Net())(x, y) -print(output) -``` - -The output is as follows: - -```python -[[2.211 0.51 1.49 ] - [5.588 2.68 4.07 ]] -``` - -`self.grad_wrt_output` may be denoted as the following form: - -```python -self.grad_wrt_output = Tensor([[s1, s2, s3], [s4, s5, s6]]) -``` - -The output value after scaling is the product of the original output value and the element corresponding to `self.grad_wrt_output`. - -$output = [[(x1 \cdot y1 + x2 \cdot y4 + x3 \cdot y7) \cdot z \cdot s1, (x1 \cdot y2 + x2 \cdot y5 + x3 \cdot y8) \cdot z \cdot s2, (x1 \cdot y3 + x2 \cdot y6 + x3 \cdot y9) \cdot z \cdot s3], $ - -$[(x4 \cdot y1 + x5 \cdot y4 + x6 \cdot y7) \cdot z \cdot s4, (x4 \cdot y2 + x5 \cdot y5 + x6 \cdot y8) \cdot z \cdot s5, (x4 \cdot y3 + x5 \cdot y6 + x6 \cdot y9) \cdot z \cdot s6]$ - -The derivation formula is changed to compute the derivative of the sum of the output values to each element of `x`. - -$\frac{\mathrm{d}(\sum{output})}{\mathrm{d}x} = [[(s1 \cdot y1 + s2 \cdot y2 + s3 \cdot y3) \cdot z, (s1 \cdot y4 + s2 \cdot y5 + s3 \cdot y6) \cdot z, (s1 \cdot y7 + s2 \cdot y8 + s3 \cdot y9) \cdot z], $ - -$[(s4 \cdot y1 + s5 \cdot y2 + s6 \cdot y3) \cdot z, (s4 \cdot y4 + s5 \cdot y5 + s6 \cdot y6) \cdot z, (s4 \cdot y7 + s5 \cdot y8 + s6 \cdot y9) \cdot z]$ - -To compute the derivative of a single output (for example, `output[0][0]`) to the input, set the scaling value of the corresponding position to 1, and set the scaling values of other positions to 0. You can also change the network structure as follows: - -```python -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.matmul = ops.MatMul() - self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z') - def construct(self, x, y): - x = x * self.z - out = self.matmul(x, y) - return out[0][0] -``` - -```python -output = GradNetWrtX(Net())(x, y) -print(output) -``` - -The output is as follows: - -```python -[[0.11 1.1 1.1] - [0. 0. 0. ]] -``` - -## High-order Derivation - -MindSpore can support high-order derivatives by computing derivatives for multiple times. The following uses several examples to describe how to compute derivatives. - -### Single-input Single-output High-order Derivative - -For example, the second-order derivative (-Sin) of the Sin operator is implemented as follows: - -```python -import numpy as np -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.sin = ops.Sin() - def construct(self, x): - out = self.sin(x) - return out - -class Grad(nn.Cell): - def __init__(self, network): - super(Grad, self).__init__() - self.grad = ops.GradOperation() - self.network = network - def construct(self, x): - gout= self.grad(self.network)(x) - return gout -class GradSec(nn.Cell): - def __init__(self, network): - super(GradSec, self).__init__() - self.grad = ops.GradOperation() - self.network = network - def construct(self, x): - gout= self.grad(self.network)(x) - return gout - -net=Net() -firstgrad = Grad(net) # first order -secondgrad = GradSec(firstgrad) # second order -x_train = Tensor(np.array([1.0], dtype=np.float32)) -output = secondgrad(x_train) -print(output) -``` - -The output is as follows: - -```python -[-0.841471] -``` - -### Single-input Multi-output High-order Derivative - -For example, for a multiplication operation with multiple outputs, a high-order derivative of the multiplication operation is as follows: - -```python -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -from mindspore import dtype as mstype -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.mul = ops.Mul() - def construct(self, x): - out = self.mul(x, x) - return out - -class Grad(nn.Cell): - def __init__(self, network): - super(Grad, self).__init__() - self.grad = ops.GradOperation(sens_param=False) - self.network = network - def construct(self, x): - gout = self.grad(self.network)(x) - return gout -class GradSec(nn.Cell): - def __init__(self, network): - super(GradSec, self).__init__() - self.grad = ops.GradOperation(sens_param=False) - self.network = network - def construct(self, x): - gout = self.grad(self.network)(x) - return gout - -net=Net() -firstgrad = Grad(net) # first order -secondgrad = GradSec(firstgrad) # second order -x = Tensor([0.1, 0.2, 0.3], dtype=mstype.float32) -output = secondgrad(x) -print(output) -``` - -The output is as follows: - -```python -[2. 2. 2.] -``` - -### Multiple-Input Multiple-Output High-Order Derivative - -For example, if a neural network has multiple inputs `x` and `y`, second-order derivatives `dxdx`, `dydy`, `dxdy`, and `dydx` may be obtained by using a gradient scaling mechanism as follows: - -```python -import numpy as np -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.mul = ops.Mul() - - def construct(self, x, y): - x_square = self.mul(x, x) - x_square_y = self.mul(x_square, y) - return x_square_y - -class Grad(nn.Cell): - def __init__(self, network): - super(Grad, self).__init__() - self.grad = ops.GradOperation(get_all=True, sens_param=False) - self.network = network - def construct(self, x, y): - gout = self.grad(self.network)(x, y) # return dx, dy - return gout - -class GradSec(nn.Cell): - def __init__(self, network): - super(GradSec, self).__init__() - self.grad = ops.GradOperation(get_all=True, sens_param=True) - self.network = network - self.sens1 = Tensor(np.array([1]).astype('float32')) - self.sens2 = Tensor(np.array([0]).astype('float32')) - def construct(self, x, y): - dxdx, dxdy = self.grad(self.network)(x, y, (self.sens1,self.sens2)) - dydx, dydy = self.grad(self.network)(x, y, (self.sens2,self.sens1)) - return dxdx, dxdy, dydx, dydy - -net = Net() -firstgrad = Grad(net) # first order -secondgrad = GradSec(firstgrad) # second order -x_train = Tensor(np.array([4],dtype=np.float32)) -y_train = Tensor(np.array([5],dtype=np.float32)) -dxdx, dxdy, dydx, dydy = secondgrad(x_train, y_train) -print(dxdx, dxdy, dydx, dydy) -``` - -The output is as follows: - -```python -[10] [8.] [8.] [0.] -``` - -Specifically, results of computing the first-order derivatives are `dx` and `dy`. If `dxdx` is computed, only the first-order derivative `dx` needs to be retained, and scaling values corresponding to `x` and `y` are set to 1 and 0 respectively, that is, `self.grad(self.network)(x, y, (self.sens1,self.sens2))`. Similarly, if `dydy` is computed, only the first-order derivative `dy` is retained, and `sens_param` corresponding to `x` and `y` is set to 0 and 1, respectively, that is, `self.grad(self.network)(x, y, (self.sens2,self.sens1))`. - -## Support for Second-order Differential Operators - -CPU supports the following operators: [Square](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Square.html#mindspore.ops.Square), -[Exp](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Exp.html#mindspore.ops.Exp), [Neg](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Neg.html#mindspore.ops.Neg), [Mul](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Mul.html#mindspore.ops.Mul), and [MatMul](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.MatMul.html#mindspore.ops.MatMul). - -GPU supports the following operators: [Pow](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Pow.html#mindspore.ops.Pow), [Log](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Log.html#mindspore.ops.Log), [Square](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Square.html#mindspore.ops.Square), [Exp](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Exp.html#mindspore.ops.Exp), [Neg](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Neg.html#mindspore.ops.Neg), [Mul](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Mul.html#mindspore.ops.Mul), [Div](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Div.html#mindspore.ops.Div), [MatMul](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.MatMul.html#mindspore.ops.MatMul), [Sin](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Sin.html#mindspore.ops.Sin), [Cos](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Cos.html#mindspore.ops.Cos), [Tan](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Tan.html#mindspore.ops.Tan) and [Atanh](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Atanh.html#mindspore.ops.Atanh). - -Ascend supports the following operators: [Pow](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Pow.html#mindspore.ops.Pow), [Log](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Log.html#mindspore.ops.Log), [Square](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Square.html#mindspore.ops.Square), [Exp](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Exp.html#mindspore.ops.Exp), [Neg](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Neg.html#mindspore.ops.Neg), [Mul](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Mul.html#mindspore.ops.Mul), [Div](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Div.html#mindspore.ops.Div), [MatMul](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.MatMul.html#mindspore.ops.MatMul), [Sin](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Sin.html#mindspore.ops.Sin), [Cos](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Cos.html#mindspore.ops.Cos), [Tan](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Tan.html#mindspore.ops.Tan), [Sinh](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Sinh.html#mindspore.ops.Sinh), [Cosh](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Cosh.html#mindspore.ops.Cosh) and [Atanh](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.Atanh.html#mindspore.ops.Atanh). - -## References - -[1] Zhang L, Han J, Wang H, et al. [Deep potential molecular dynamics: a scalable model with the accuracy of quantum mechanics[J]](https://arxiv.org/pdf/1707.09571v2.pdf). Physical review letters, 2018, 120(14): 143001. - -[2] Raissi M, Perdikaris P, Karniadakis G E. [Physics informed deep learning (part i): Data-driven solutions of nonlinear partial differential equations[J]](https://arxiv.org/pdf/1711.10561.pdf). arXiv preprint arXiv:1711.10561, 2017. - -[3] Baydin A G, Pearlmutter B A, Radul A A, et al. [Automatic differentiation in machine learning: a survey[J]](https://jmlr.org/papers/volume18/17-468/17-468.pdf). The Journal of Machine Learning Research, 2017, 18(1): 5595-5637. diff --git a/tutorials/training/source_en/advanced_use/improve_model_security_nad.md b/tutorials/training/source_en/advanced_use/improve_model_security_nad.md deleted file mode 100644 index 12cf1a064438d2e406b2b10fe17ad2faf267981a..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/improve_model_security_nad.md +++ /dev/null @@ -1,331 +0,0 @@ -# Improving Model Security with NAD Algorithm - -`Linux` `Ascend` `GPU` `CPU` `Model Training` `Model Optimization` `Enterprise` `Expert` - - - -- [Improving Model Security with NAD Algorithm](#improving-model-security-with-nad-algorithm) - - [Overview](#overview) - - [Creating an Target Model](#creating-an-target-model) - - [Importing Related Packages](#importing-related-packages) - - [Loading the Dataset](#loading-the-dataset) - - [Creating the Model](#creating-the-model) - - [Adversarial Attack](#adversarial-attack) - - [Adversarial Defense](#adversarial-defense) - - [Defense Implementation](#defense-implementation) - - [Defense Effect](#defense-effect) - - - - - -## Overview - -This tutorial describes the model security protection methods provided by MindArmour, helping you quickly use MindArmour and provide certain security protection capabilities for your AI model. - -At the beginning of AI algorithm design, related security threats are sometimes not considered. As a result, the developed AI model may easily be affected by malicious attackers, leading to inaccurate judgment of the AI system. An attacker adds small perturbations that are not easily perceived by human to the original sample, causing deep learning model misjudgment. This is called an adversarial example attack. MindArmour model security toolkit provides functions such as adversarial example generation, adversarial example detection, model defense, and attack/defense effect evaluation, providing important support for AI model security research and AI application security. - -- The adversarial example generation module enables security engineers to quickly and efficiently generate adversarial examples for attacking AI models. -- The adversarial example detection and defense modules allow users to detect and filter adversarial examples and enhance the robustness of AI models to adversarial examples. -- The evaluation module provides multiple metrics to comprehensively evaluate the attack and defense performance of adversarial examples. - -This section describes how to use MindArmour in adversarial attack and defense by taking the Fast Gradient Sign Method (FGSM) attack algorithm and Natural Adversarial Defense (NAD) algorithm as examples. - -> The current sample is for CPU, GPU and Ascend 910 AI processor. You can find the complete executable sample code at -> - -## Creating an Target Model - -The MNIST dataset is used as an example to describe how to customize a simple model as the target model. - -### Importing Related Packages - -```python -import os -import numpy as np -from scipy.special import softmax - -from mindspore import dataset as ds -import mindspore import dtype as mstype -import mindspore.dataset.vision.c_transforms as CV -import mindspore.dataset.transforms.c_transforms as C -from mindspore.dataset.vision import Inter -import mindspore.nn as nn -from mindspore.nn import SoftmaxCrossEntropyWithLogits -from mindspore.common.initializer import TruncatedNormal -from mindspore import Model, Tensor, context -from mindspore.train.callback import LossMonitor - -from mindarmour.adv_robustness.attacks import FastGradientSignMethod -from mindarmour.utils.logger import LogUtil -from mindarmour.adv_robustness.evaluations import AttackEvaluate - -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") - -LOGGER = LogUtil.get_instance() -LOGGER.set_level("INFO") -TAG = 'demo' -``` - -### Loading the Dataset - -Use the `MnistDataset` API provided by the MindSpore dataset to load the MNIST dataset. - -```python -# generate dataset for train of test -def generate_mnist_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1, sparse=True): - """ - create dataset for training or testing - """ - # define dataset - ds1 = ds.MnistDataset(data_path) - - # define operation parameters - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - - # define map operations - resize_op = CV.Resize((resize_height, resize_width), - interpolation=Inter.LINEAR) - rescale_op = CV.Rescale(rescale, shift) - hwc2chw_op = CV.HWC2CHW() - type_cast_op = C.TypeCast(mstype.int32) - - # apply map operations on images - if not sparse: - one_hot_enco = C.OneHot(10) - ds1 = ds1.map(operations=one_hot_enco, input_columns="label", - num_parallel_workers=num_parallel_workers) - type_cast_op = C.TypeCast(mstype.float32) - ds1 = ds1.map(operations=type_cast_op, input_columns="label", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=resize_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=rescale_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=hwc2chw_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - ds1 = ds1.shuffle(buffer_size=buffer_size) - ds1 = ds1.batch(batch_size, drop_remainder=True) - ds1 = ds1.repeat(repeat_size) - - return ds1 -``` - -### Creating the Model - -The LeNet model is used as an example. You can also create and train your own model. - -1. Define the LeNet model network. - - ```python - def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): - weight = weight_variable() - return nn.Conv2d(in_channels, out_channels, - kernel_size=kernel_size, stride=stride, padding=padding, - weight_init=weight, has_bias=False, pad_mode="valid") - - - def fc_with_initialize(input_channels, out_channels): - weight = weight_variable() - bias = weight_variable() - return nn.Dense(input_channels, out_channels, weight, bias) - - - def weight_variable(): - return TruncatedNormal(0.02) - - - class LeNet5(nn.Cell): - """ - Lenet network - """ - def __init__(self): - super(LeNet5, self).__init__() - self.conv1 = conv(1, 6, 5) - self.conv2 = conv(6, 16, 5) - self.fc1 = fc_with_initialize(16*5*5, 120) - self.fc2 = fc_with_initialize(120, 84) - self.fc3 = fc_with_initialize(84, 10) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x - ``` - -2. Train LeNet model. Use the defined data loading function `generate_mnist_dataset` to load data. - - ```python - mnist_path = "../common/dataset/MNIST/" - batch_size = 32 - # train original model - ds_train = generate_mnist_dataset(os.path.join(mnist_path, "train"), - batch_size=batch_size, repeat_size=1, - sparse=False) - net = LeNet5() - loss = SoftmaxCrossEntropyWithLogits(sparse=False) - opt = nn.Momentum(net.trainable_params(), 0.01, 0.09) - model = Model(net, loss, opt, metrics=None) - model.train(10, ds_train, callbacks=[LossMonitor()], - dataset_sink_mode=False) - - # get test data - ds_test = generate_mnist_dataset(os.path.join(mnist_path, "test"), - batch_size=batch_size, repeat_size=1, - sparse=False) - inputs = [] - labels = [] - for data in ds_test.create_tuple_iterator(): - inputs.append(data[0].asnumpy().astype(np.float32)) - labels.append(data[1].asnumpy()) - test_inputs = np.concatenate(inputs) - test_labels = np.concatenate(labels) - ``` - -3. Test the model. - - ```python - # prediction accuracy before attack - test_logits = net(Tensor(test_inputs)).asnumpy() - - tmp = np.argmax(test_logits, axis=1) == np.argmax(test_labels, axis=1) - accuracy = np.mean(tmp) - LOGGER.info(TAG, 'prediction accuracy before attacking is : %s', accuracy) - - ``` - - The classification accuracy reaches 98%. - - ```python - prediction accuracy before attacking is : 0.9895833333333334 - ``` - -## Adversarial Attack - -Call the FGSM API provided by MindArmour. - -```python -# attacking -# get adv data -attack = FastGradientSignMethod(net, eps=0.3, loss_fn=loss) -adv_data = attack.batch_generate(test_inputs, test_labels) - -# get accuracy of adv data on original model -adv_logits = net(Tensor(adv_data)).asnumpy() -adv_proba = softmax(adv_logits, axis=1) -tmp = np.argmax(adv_proba, axis=1) == np.argmax(test_labels, axis=1) -accuracy_adv = np.mean(tmp) -LOGGER.info(TAG, 'prediction accuracy after attacking is : %s', accuracy_adv) - -attack_evaluate = AttackEvaluate(test_inputs.transpose(0, 2, 3, 1), - test_labels, - adv_data.transpose(0, 2, 3, 1), - adv_proba) -LOGGER.info(TAG, 'mis-classification rate of adversaries is : %s', - attack_evaluate.mis_classification_rate()) -LOGGER.info(TAG, 'The average confidence of adversarial class is : %s', - attack_evaluate.avg_conf_adv_class()) -LOGGER.info(TAG, 'The average confidence of true class is : %s', - attack_evaluate.avg_conf_true_class()) -LOGGER.info(TAG, 'The average distance (l0, l2, linf) between original ' - 'samples and adversarial samples are: %s', - attack_evaluate.avg_lp_distance()) -LOGGER.info(TAG, 'The average structural similarity between original ' - 'samples and adversarial samples are: %s', - attack_evaluate.avg_ssim()) -``` - -The attack results are as follows: - -```text -prediction accuracy after attacking is : 0.052083 -mis-classification rate of adversaries is : 0.947917 -The average confidence of adversarial class is : 0.803375 -The average confidence of true class is : 0.042139 -The average distance (l0, l2, linf) between original samples and adversarial samples are: (1.698870, 0.465888, 0.300000) -The average structural similarity between original samples and adversarial samples are: 0.332538 -``` - -After the untargeted FGSM attack is performed on the model, the accuracy of model decreases from 98.9% to 5.2% on adversarial examples, while the misclassification ratio reaches 95%, and the Average Confidence of Adversarial Class (ACAC) is 0.803375, the Average Confidence of True Class (ACTC) is 0.042139. The zero-norm distance, two-norm distance, and infinity-norm distance between the generated adversarial examples and the original benign examples are provided. The average structural similarity between each adversarial example and the original example is 0.332538. It takes 0.003125s to generate an adversarial example on average. - -The following figure shows the effect before and after the attack. The left part is the original example, and the right part is the adversarial example generated after the untargeted FGSM attack. From a visual point of view, there is little difference between the right images and the left images, but all images on the right successfully mislead the model into misclassifying the sample as another incorrect categories. - -![adv_attack_result](./images/adv_attack_result.png) - -## Adversarial Defense - -Natural Adversarial Defense (NAD) is a simple and effective adversarial example defense method, via adversarial training. It constructs adversarial examples during model training and mixes the adversarial examples with original examples to train the model. As the number of training iteration increases, the robustness of the model against adversarial examples improves. The NAD algorithm uses FGSM as the attack algorithm to construct adversarial examples. - -### Defense Implementation - -Call the NAD API provided by MindArmour. - -```python -from mindarmour.adv_robustness.defenses import NaturalAdversarialDefense - - -# defense -net.set_train() -nad = NaturalAdversarialDefense(net, loss_fn=loss, optimizer=opt, - bounds=(0.0, 1.0), eps=0.3) -nad.batch_defense(test_inputs, test_labels, batch_size=32, epochs=10) - -# get accuracy of test data on defensed model -net.set_train(False) -test_logits = net(Tensor(test_inputs)).asnumpy() - -tmp = np.argmax(test_logits, axis=1) == np.argmax(test_labels, axis=1) -accuracy = np.mean(tmp) -LOGGER.info(TAG, 'accuracy of TEST data on defensed model is : %s', accuracy) - -# get accuracy of adv data on defensed model -adv_logits = net(Tensor(adv_data)).asnumpy() -adv_proba = softmax(adv_logits, axis=1) -tmp = np.argmax(adv_proba, axis=1) == np.argmax(test_labels, axis=1) -accuracy_adv = np.mean(tmp) - -attack_evaluate = AttackEvaluate(test_inputs.transpose(0, 2, 3, 1), - test_labels, - adv_data.transpose(0, 2, 3, 1), - adv_proba) - -LOGGER.info(TAG, 'accuracy of adv data on defensed model is : %s', - np.mean(accuracy_adv)) -LOGGER.info(TAG, 'defense mis-classification rate of adversaries is : %s', - attack_evaluate.mis_classification_rate()) -LOGGER.info(TAG, 'The average confidence of adversarial class is : %s', - attack_evaluate.avg_conf_adv_class()) -LOGGER.info(TAG, 'The average confidence of true class is : %s', - attack_evaluate.avg_conf_true_class()) -``` - -### Defense Effect - -```text -accuracy of TEST data on defensed model is : 0.974259 -accuracy of adv data on defensed model is : 0.856370 -defense mis-classification rate of adversaries is : 0.143629 -The average confidence of adversarial class is : 0.616670 -The average confidence of true class is : 0.177374 -``` - -After NAD is used to defend against adversarial examples, the model's misclassification ratio of adversarial examples decreases from 95% to 14%, effectively defending against adversarial examples. In addition, the classification accuracy of the model for the original test dataset reaches 97%. diff --git a/tutorials/training/source_en/advanced_use/incremental_operator_build.md b/tutorials/training/source_en/advanced_use/incremental_operator_build.md deleted file mode 100644 index deaaedc4f2bb09d6860ea776fc0b2832b5cfc4b9..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/incremental_operator_build.md +++ /dev/null @@ -1,117 +0,0 @@ -# Incremental Operator Build - -`Linux` `Ascend` `Model Training` `Beginner` `Intermediate` `Expert` - - - -- [Incremental Operator Build](#incremental-operator-build) - - [Overview](#overview) - - [Usage](#usage) - - [FAQs](#faqs) - - - - - -## Overview - -When a network model is executed, MindSpore builds the used operators. The time consumed in this stage increases with the scale of the network model. To improve the performance of secondary model execution, an incremental operator build mechanism is provided. When MindSpore executes a network model, the `kernel_meta` folder is generated in the directory where the execution is performed. During the execution, operator cache files (in the `.o`, `.info`, or `.json` format) generated during network build are saved to this directory. If you execute the same network model again or only part of the model changes, MindSpore automatically calls the reusable operator cache files in the `kernel_meta` folder, which significantly reduces the network build time and improves the execution performance. Currently, the incremental operator build function can be used only on the Ascend AI chips. - -The following demonstrates how to use the incremental operator build function. - -## Usage - -Incremental operator build is enabled by default on MindSpore and does not need to be controlled. The following describes how to build a simple network model case `test_square.py` in the `src` directory. The current directory structure is as follows: - -```text -└─src - └── test_square.py -``` - -Execute the following test case: - -```python -import numpy as np -import mindspore.nn as nn -import mindspore.context as context -import mindspore.ops as ops -from mindspore import Tensor - -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.square = ops.Square() - - def construct(self, data): - return self.square(data) - -def test_net(): - x = np.array([1.0, 4.0, 9.0]).astype(np.float32) - square = Net() - output = square(Tensor(x)) - print("x: ", x) - print("output: ", output) - - -``` - -The network model consists of a single operator `Square`, and the output is a square value of the input. The command output is as follows: - -```python -x: [1. 4. 9.] -output: [1. 16. 81.] -``` - -The `kernel_meta` folder is generated in the directory where the execution is performed, which contains the `.o`, `.json`, and `.info` files of the Square operator. The current directory structure is as follows: - -```text -└─src - ├── test_square.py - └── kernel_meta - ├── Square_3307185124911971026_7.info - ├── Square_3307185124911971026_7.json - └── Square_3307185124911971026_7.o -``` - -For an operator: - -The `.o` file is an executable file generated by MindSpore for the operator during network model execution. - -The `.info` file records all valid information about the operator, including the operator name, attributes, input and output formats, and input and output data types. The `.info` file is used to search for and determine whether the `.o` file of the operator can be reused. The details are as follows: - -```text -{"SocInfo":{"autoTilingMode":"NO_TUNE","coreNum":"","coreType":"","l1Fusion":"false","l2Fusion":"false","l2Mode":"2","op_debug_level":"","op_impl_mode":"","op_impl_mode_list":[],"socVersion":"Ascend910A"},"impl_path":"","op_info":{"Type":"Square","attrs":null,"full_name":"Default/Square-op1","gen_model":"single","graph_id":0,"inputs":[[{"dtype":"float32","format":"NCHW","name":"x_0","ori_format":"NCHW","ori_shape":[3],"param_type":"required","range":[[3,3]],"shape":[3],"valid":true}]],"is_dynamic_shape":false,"kernel_name":"Square_2989580383048251395_7","module_name":"impl.square","name":"square","outputs":[[{"dtype":"float32","format":"NCHW","name":"y","ori_format":"NCHW","ori_shape":[3],"param_type":"required","range":[[3,3]],"shape":[3],"valid":true}]],"py_module_path":"/usr/local/Ascend/opp/op_impl/built-in/ai_core/tbe","socVersion":"Ascend910A"},"platform":"TBE"} -``` - -The `.json` file stores the operator build result, which will be used during running. The details are as follows: - -```text -{ - "batchBindOnly":1, - "binFileName":"Square_3307185124911971026_7", - "binFileSuffix":".o", - "blockDim":1, - "kernelName":"Square_3307185124911971026_7__kernel0", - "magic":"RT_DEV_BINARY_MAGIC_ELF", - "opParaSize":0, - "parameters":[ - 0, - 0 - ], - "sha256":"64d4963bf6b619c2d85da67611f5677e0ea11bba0413ed3620b0926b1d072a1a" -} -``` - -After the preceding three types of operator cache files are generated, you can perform incremental operator build when executing the network model. That is, only new or modified operators are built, greatly improving the network build performance. - -## FAQs - -- Cache files cannot be shared in different scenarios, such as multi-device and single-device scenarios, or training and inference scenarios. - -- When multiple devices are running, the `kernel_meta` folder is generated in multiple `device` directories when the network model is executed. - - Note that when multiple devices are running, if the operator cache files in `kernel_meta` of some devices are deleted and the same network model is executed again, devices that do not need to be rebuilt may time out. As a result, the execution fails. In this case, you can set the environment variable `HCCL_CONNECT_TIMEOUT`, that is, the waiting time between multiple devices, to avoid failure. However, this method takes a long time, which is equivalent to deleting and rebuilding all devices. - -- If the process is interrupted during network build, there is a possibility that an error occurs when the cache files in `kernel_meta` are generated. As a result, the subsequent re-execution fails. In this case, you need to delete the `kernel_meta` folder and rebuild the network. diff --git a/tutorials/training/source_en/advanced_use/lineage_and_scalars_comparison.md b/tutorials/training/source_en/advanced_use/lineage_and_scalars_comparison.md deleted file mode 100644 index b610509e07e181690f10221555e80316fde82a12..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/lineage_and_scalars_comparison.md +++ /dev/null @@ -1,113 +0,0 @@ -# Viewing Lineage and Scalars Comparison - -`Linux` `Ascend` `GPU` `CPU` `Model Optimization` `Intermediate` `Expert` - - - -- [Viewing Lineage and Scalars Comparison](#viewing-lineage-and-scalars-comparison) - - [Overview](#overview) - - [Model Lineage](#model-lineage) - - [Dataset Lineage](#dataset-lineage) - - [Scalars Comparison](#scalars-comparison) - - [Notices](#notices) - - - - - -## Overview - -Model lineage, data lineage and comparison Kanban in mindinsight are the same as training dashboard. In the visualization of training data, different scalar trend charts are observed by comparison dashboard to find problems, and then the lineage function is used to locate the problem causes, so as to give users the ability of efficient tuning in data enhancement and deep neural network. - -Access the Training Dashboard by selecting Comparison Dashboard. - -## Model Lineage - -Model lineage visualization is used to display the parameter information of all training models. - -![image.png](./images/lineage_label.png) - -Figure 1: Model parameter selection area - -Figure 1 shows the model parameter selection area, which lists the model parameter tags that can be viewed. You can select required tags to view the corresponding model parameters. - -![image.png](./images/lineage_model_chart.png) - -Figure 2: Model lineage function area - -Figure 2 shows the model lineage function area, which visualizes the model parameter information. You can select a specific area in the column to display the model information within the area. - -![image.png](./images/lineage_model_table.png) - -Figure 3: Model list - -Figure 3 shows all model information in groups. You can sort the model information in ascending or descending order by the specified column. - -The overview page on the left shows information about optimization objective and related parameters. - -![targets.png](./images/targets.png) - -Figure 4: Overview page - -Figure 4 shows the optimization objective distribution, parameter importance, and scatter plots. You can select the optimization objective to view the importance of the parameters, and then click the histogram to view the scatter plot of the parameters and optimization objective. - -## Dataset Lineage - -Dataset lineage visualization is used to display data processing and augmentation information of all model trainings. - -![data_label.png](./images/data_label.png) - -Figure 5: Data processing and augmentation operator selection area - -Figure 5 shows the data processing and augmentation operator selection area, which lists names of data processing and augmentation operators that can be viewed. You can select required tags to view related parameters. - -![data_chart.png](./images/data_chart.png) - -Figure 6: Dataset lineage function area - -Figure 6 shows the dataset lineage function area, which visualizes the parameter information used for data processing and augmentation. You can select a specific area in the column to display the parameter information within the area. - -![data_table.png](./images/data_table.png) - -Figure 7: Dataset lineage list - -Figure 7 shows the data processing and augmentation information of all model trainings. - -> If user filters the model lineage and then switches to the data lineage page, the line chart will show the latest filtered column in model lineage. - -## Scalars Comparison - -Scalars Comparison can be used to compare scalar curves between multiple trainings - -![multi_scalars.png](./images/multi_scalars.png) - -Figure 8: Scalars comparison curve area - -Figure 8 shows the scalar curve comparison between multiple trainings. The horizontal coordinate indicates the training step, and the vertical coordinate indicates the scalar value. - -Buttons from left to right in the upper right corner of the figure are used to display the chart in full screen, switch the Y-axis scale, enable or disable the rectangle selection, roll back the chart step by step, and restore the chart. - -- Full-screen Display: Display the scalar curve in full screen. Click the button again to restore it. -- Switch Y-axis Scale: Perform logarithmic conversion on the Y-axis coordinate. -- Enable/Disable Rectangle Selection: Draw a rectangle to select and zoom in a part of the chart. You can perform rectangle selection again on the zoomed-in chart. -- Step-by-step Rollback: Cancel operations step by step after continuously drawing rectangles to select and zooming in the same area. -- Restore Chart: Restore a chart to the original state. - -![multi_scalars_select.png](./images/multi_scalars_select.png) - -Figure 9: Scalars comparison function area - -Figure 9 shows the scalars comparison function area, which allows you to view scalar information by selecting different trainings or tags, different dimensions of the horizontal axis, and smoothness. - -- Training Selection: Click the expand button and select or filter the required trainings to view the corresponding scalar information. -- Tag Selection: Select the required tags to view the corresponding scalar information. -- Horizontal Axis: Select any of Step, Relative Time, and Absolute Time as the horizontal axis of the scalar curve. -- Smoothness: Adjust the smoothness to smooth the scalar curve. - -## Notices - -To ensure performance, MindInsight implements scalars comparison with the cache mechanism and the following restrictions: - -- The scalars comparison supports only for trainings in cache. -- The maximum of 15 latest trainings (sorted by modification time) can be retained in the cache. -- The maximum of 5 trainings can be selected for scalars comparison at the same time. diff --git a/tutorials/training/source_en/advanced_use/migrate_3rd_scripts.md b/tutorials/training/source_en/advanced_use/migrate_3rd_scripts.md deleted file mode 100644 index 37765379b3af0e916adc331fa584c6b3e862d9b7..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/migrate_3rd_scripts.md +++ /dev/null @@ -1,277 +0,0 @@ -# Migrating Training Scripts from Third Party Frameworks - -`Linux` `Ascend` `GPU` `CPU` `Whole Process` `Beginner` `Intermediate` `Expert` - - - -- [Migrating Training Scripts from Third Party Frameworks](#migrating-training-scripts-from-third-party-frameworks) - - [Overview](#overview) - - [Preparations](#preparations) - - [Operator Assessment](#operator-assessment) - - [Software and Hardware Environments](#software-and-hardware-environments) - - [E2E Network Migration](#e2e-network-migration) - - [Training Phase](#training-phase) - - [Script Migration](#script-migration) - - [Accuracy Debugging](#accuracy-debugging) - - [On-Cloud Integration](#on-cloud-integration) - - [Inference Phase](#inference-phase) - - [Examples](#examples) - - - - - -## Overview - -You've probably written scripts for frameworks such as TensorFlow and PyTorch. This tutorial describes how to migrate existing TensorFlow and PyTorch networks to MindSpore, including key steps and operation recommendations which help you quickly migrate your network. - -## Preparations - -Before you start working on your scripts, prepare your operator assessment and hardware and software environments to make sure that MindSpore can support the network you want to migrate. - -### Operator Assessment - -Analyze the operators contained in the network to be migrated and figure out how does MindSpore support these operators based on the [Operator List](https://www.mindspore.cn/doc/note/en/master/operator_list.html). - -Take ResNet-50 as an example. The two major operators [Conv](https://www.mindspore.cn/doc/api_python/en/master/mindspore/nn/mindspore.nn.Conv2d.html) and [BatchNorm](https://www.mindspore.cn/doc/api_python/en/master/mindspore/nn/mindspore.nn.BatchNorm2d.html) exist in the MindSpore Operator List. - -If any operator does not exist, you are advised to perform the following operations: - -- Operator replacement: Analyze the operator implementation formula and check whether a combination of existing operators of MindSpore can be used to achieve the expected objective. -- Substitution solution: For example, if a loss operator is not supported, check whether it can be replaced with a loss operator of the same type supported by MindSpore; alternatively, check whether the current network structure can be replaced by another mainstream network of the same type. - -If the operators used for replacement are not able to fulfill complete function, you are advised to perform the following operations: - -- Delete unnecessary functions. -- Find a substitution solution for necessary functions. - -If the preceding requirements cannot be met, you can raise requirements in the [MindSpore code repository](https://gitee.com/mindspore/mindspore). - -### Software and Hardware Environments - -Prepare the hardware environment, find a platform corresponding to your environment by referring to the [installation guide](https://www.mindspore.cn/install/en), and install MindSpore. - -## E2E Network Migration - -### Training Phase - -#### Script Migration - -MindSpore differs from TensorFlow and PyTorch in the network structure. Before migration, you need to clearly understand the original script and information of each layer, such as shape. - -> You can also use [MindConverter Tool](https://gitee.com/mindspore/mindinsight/tree/master/mindinsight/mindconverter) to automatically convert the PyTorch network definition script to MindSpore network definition script. - -The ResNet-50 network migration and training on the Ascend 910 is used as an example. - -1. Import MindSpore modules. - - Import the corresponding MindSpore modules based on the required APIs. For details about the module list, see . - -2. Load and preprocess a dataset. - - Use MindSpore to build the required dataset. Currently, MindSpore supports common datasets. You can call APIs in the original format, `MindRecord`, and `TFRecord`. In addition, MindSpore supports data processing and data augmentation. For details, see the [Data Preparation](https://www.mindspore.cn/tutorial/training/en/master/use/data_preparation.html). - - In this example, the CIFAR-10 dataset is loaded, which supports both single-GPU and multi-GPU scenarios. - - ```python - if device_num == 1: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=4, shuffle=True) - else: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=4, shuffle=True, - num_shards=device_num, shard_id=rank_id) - ``` - - Then, perform data augmentation, data cleaning, and batch processing. For details about the code, see . - -3. Build a network. - - The biggest difference between MindSpore and TensorFlow in convolution is the data format. `NCHW` is used in MindSpore by default, while `NHWC` is used in TensorFlow. - - The following uses the first convolutional layer on the ResNet-50 network whose batch\_size is set to 32 as an example: - - - In TensorFlow, the format of the input feature is \[32, 224, 224, 3], and the size of the convolution kernel is \[7, 7, 3, 64]. - - - In MindSpore, the format of the input feature is \[32, 3, 224, 224], and the size of the convolution kernel is \[64, 3, 7, 7]. - - ```python - def _conv7x7(in_channel, out_channel, stride=1): - weight_shape = (out_channel, in_channel, 7, 7) - weight = _weight_variable(weight_shape) - return nn.Conv2d(in_channel, out_channel, - kernel_size=7, stride=stride, padding=0, pad_mode='same', weight_init=weight) - - - def _bn(channel): - return nn.BatchNorm2d(channel, eps=1e-4, momentum=0.9, - gamma_init=1, beta_init=0, moving_mean_init=0, moving_var_init=1) - ``` - -4. Build a subnet. - - In MindSpore, `nn.Cell` is used to build a subnet structure. The network structure must be defined before being used in a subnet. Define each operator to be used in the `__init__` function of the Cell, connect the defined operators in the `construct` function, and then return the output of the subnet through `return`. - - ```python - class ResidualBlock(nn.Cell): - """ - ResNet V1 residual block definition. - - Args: - in_channel (int): Input channel. - out_channel (int): Output channel. - stride (int): Stride size for the first convolutional layer. Default: 1. - - Returns: - Tensor, output tensor. - - Examples: - >>> ResidualBlock(3, 256, stride=2) - """ - expansion = 4 - - def __init__(self, - in_channel, - out_channel, - stride=1): - super(ResidualBlock, self).__init__() - - channel = out_channel - self.conv1 = _conv1x1(in_channel, channel, stride=1) - self.bn1 = _bn(channel) - - self.conv2 = _conv3x3(channel, channel, stride=stride) - self.bn2 = _bn(channel) - - self.conv3 = _conv1x1(channel, out_channel, stride=1) - self.bn3 = _bn_last(out_channel) - - self.relu = nn.ReLU() - - self.down_sample = False - - if stride != 1 or in_channel != out_channel: - self.down_sample = True - self.down_sample_layer = None - - if self.down_sample: - self.down_sample_layer = nn.SequentialCell([_conv1x1(in_channel, out_channel, stride), - _bn(out_channel)]) - self.add = ops.Add() - - def construct(self, x): - identity = x - - out = self.conv1(x) - out = self.bn1(out) - out = self.relu(out) - - out = self.conv2(out) - out = self.bn2(out) - out = self.relu(out) - - out = self.conv3(out) - out = self.bn3(out) - - if self.down_sample: - identity = self.down_sample_layer(identity) - - out = self.add(out, identity) - out = self.relu(out) - - return out - ``` - -5. Define a concatenated structure. - - The ResNet-50 network has a large number of repeated structures. In TensorFlow, you can use the for loop function to reduce repeated code. In MindSpore, each defined Cell object is independent. Especially for subnets with weight parameters, the defined Cell cannot be used repeatedly. If a large number of repeated concatenated structures exist, you can construct multiple Cell instances using the for loop function and concatenate them by using `SequentialCell`. - - ```python - def _make_layer(self, block, layer_num, in_channel, out_channel, stride): - """ - Make stage network of ResNet. - - Args: - block (Cell): Resnet block. - layer_num (int): Layer number. - in_channel (int): Input channel. - out_channel (int): Output channel. - stride (int): Stride size for the first convolutional layer. - - Returns: - SequentialCell, the output layer. - - Examples: - >>> _make_layer(ResidualBlock, 3, 128, 256, 2) - """ - layers = [] - - resnet_block = block(in_channel, out_channel, stride=stride) - layers.append(resnet_block) - - for _ in range(1, layer_num): - resnet_block = block(out_channel, out_channel, stride=1) - layers.append(resnet_block) - - return nn.SequentialCell(layers) - ``` - -6. Build the entire network. - - The [ResNet-50](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/resnet/src/resnet.py) network structure is formed by connecting multiple defined subnets. Follow the rule of defining subnets before using them and define all the subnets used in the `__init__` and connect subnets in the `construct`. - -7. Define a loss function and an optimizer. - - After the network is defined, the loss function and optimizer need to be defined accordingly. - - ```python - loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') - opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), lr, config.momentum, config.weight_decay, config.loss_scale) - ``` - -8. Build a model. - - Similar to the `Estimator` API of TensorFlow, the defined network prototype, loss function, and optimizer are transferred to the `Model` API of MindSpore and automatically combined into a network that can be used for training. - - To use loss scale in training, define a `loss_scale_manager` and transfer it to the `Model` API. - - ```python - loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) - ``` - - You can use a built-in assessment method of `Model` by setting the [metrics](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/custom_debugging_info.html#mindspore-metrics) attribute. - - ```python - model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics={'acc'}) - ``` - - Similar to `estimator.train` of TensorFlow, you can call the `model.train` API to perform training. Functions such as CheckPoint and intermediate result printing can be defined on the `model.train` API in Callback mode. - - ```python - time_cb = TimeMonitor(data_size=step_size) - loss_cb = LossMonitor() - cb = [time_cb, loss_cb] - if config.save_checkpoint: - config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_steps, - keep_checkpoint_max=config.keep_checkpoint_max) - ckpt_cb = ModelCheckpoint(prefix="resnet", directory=config.save_checkpoint_path, config=config_ck) - cb += [ckpt_cb] - model.train(epoch_size, dataset, callbacks=cb) - ``` - -#### Accuracy Debugging - -The accuracy optimization process is as follows: - -1. When validating the single-GPU accuracy, you are advised to use a small dataset for training. After the validation is successful, use the full dataset for multi-GPU accuracy validation. This helps improve the debugging efficiency. -2. Delete unnecessary skills (such as augmentation configuration and dynamic loss scale in an optimizer) from the script. After the validation is successful, add functions one by one. After a new function is confirmed to be normal, add the next function. In this way, you can quickly locate the fault. - -#### On-Cloud Integration - -Run your scripts on ModelArts. For details, see [Using MindSpore on Cloud](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/use_on_the_cloud.html). - -### Inference Phase - -Models trained on the Ascend 910 AI processor can be used for inference on different hardware platforms. Refer to the [Multi-platform Inference Tutorial](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference.html) for detailed steps. - -## Examples - -- [Model Zoo](https://gitee.com/mindspore/mindspore/tree/master/model_zoo) diff --git a/tutorials/training/source_en/advanced_use/migrate_3rd_scripts_mindconverter.md b/tutorials/training/source_en/advanced_use/migrate_3rd_scripts_mindconverter.md deleted file mode 100644 index 9b8c82c88fd7d65d82fdd47ae1ee43b6a8802a09..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/migrate_3rd_scripts_mindconverter.md +++ /dev/null @@ -1,289 +0,0 @@ -# Migrating From Third Party Frameworks With Tools - -`Linux` `Ascend` `Model Development` `Beginner` - - - -- [Migrating From Third Party Frameworks With Tools](#migrating-from-third-party-frameworks-with-tools) - - [Overview](#overview) - - [Installation](#installation) - - [Usage](#usage) - - [PyTorch Model Scripts Migration](#pytorch-model-scripts-migration) - - [TensorFlow Model Scripts Migration](#tensorflow-model-scripts-migration) - - [ONNX Model File Migration](#onnx-model-file-migration) - - [Scenario](#scenario) - - [Example](#example) - - [AST-Based Conversion](#ast-based-conversion) - - [Graph-Based Conversion](#graph-based-conversion) - - [TensorFlow Model Scripts Conversion](#tensorflow-model-scripts-conversion) - - [ONNX Model File Conversion](#onnx-model-file-conversion) - - [Caution](#caution) - - - - - -## Overview - -MindConverter is a migration tool to transform the model scripts and weights from PyTorch(ONNX) and TensorFlow(PB) to MindSpore. Users can migrate rapidly with minor changes according to the conversion report. - -## Installation - -Mindconverter is a submodule in MindInsight. Please follow the [Guide](https://gitee.com/mindspore/mindinsight/blob/master/README.md#) here to install MindInsight. - -Third party libraries below are required after installing MindInsight: - -1. TensorFlow is not a dependency library explicitly declared by MindInsight. If the user want to use graph based MindConverter, please install TensorFlow(MindConverter recommends TensorFlow 1.15.x). -2. ONNX(>=1.8.0), ONNXRUNTIME(>=1.5.2), ONNXOPTIMIZER(>=0.1.2) are not explicitly stated dependency libraries in MindInsight, if the user want to use graph based MindConverter, above three-party libraries must be installed. If the user want to migrate TensorFlow model to MindSpore, TF2ONNX(>=1.7.1) must be installed additionally. - -## Usage - -MindConverter currently only provides command-line interface. Here is the manual page. - -```bash -usage: mindconverter [-h] [--version] [--in_file IN_FILE] - [--model_file MODEL_FILE] [--shape SHAPE [SHAPE ...]] - [--input_nodes INPUT_NODES [INPUT_NODES ...]] - [--output_nodes OUTPUT_NODES [OUTPUT_NODES ...]] - [--output OUTPUT] [--report REPORT] - -optional arguments: - -h, --help show this help message and exit - --version show program version number and exit - --in_file IN_FILE Specify path for script file to use AST schema to do - script conversation. - --model_file MODEL_FILE - Tensorflow(.pb) or ONNX(.onnx) model file path is - expected to do script generation based on graph - schema. When `--in_file` and `--model_file` are both - provided, use AST schema as default. - --shape SHAPE [SHAPE ...] - Expected input tensor shape of `--model_file`. It is - required when use graph based schema. Both order and - number should be consistent with `--input_nodes`. - Given that (1,128) and (1,512) are shapes of input_1 - and input_2 separately. Usage: --shape 1,128 1,512 - --input_nodes INPUT_NODES [INPUT_NODES ...] - Input node(s) name of `--model_file`. It is required - when use graph based schema. Both order and number - should be consistent with `--shape`. Given that both - input_1 and input_2 are inputs of model. Usage: - --input_nodes input_1 input_2 - --output_nodes OUTPUT_NODES [OUTPUT_NODES ...] - Output node(s) name of `--model_file`. It is required - when use graph based schema. Given that both output_1 - and output_2 are outputs of model. Usage: - --output_nodes output_1 output_2 - --output OUTPUT Optional, specify path for converted script file - directory. Default output directory is `output` folder - in the current working directory. - --report REPORT Optional, specify report directory. Default is - converted script directory. -``` - -### PyTorch Model Scripts Migration - -**MindConverter only provides Abstract Syntax Tree (AST) based conversion for PyTorch**: Use the argument `--in_file` will enable the AST mode. - -> The AST mode will be enabled, if both `--in_file` and `--model_file` are specified. - -`--output` and `--report` is optional. MindConverter creates an `output` folder under the current working directory, and outputs generated scripts and conversion reports to it. - -> If the user want to migrate PyTorch model script using graph based MindConverter, it is recommended to export PyTorch model to ONNX, and then use ONNX file to migrate model script. For details, see [PyTorch instructions](https://pytorch.org/docs/stable/onnx.html). - -### TensorFlow Model Scripts Migration - -**MindConverter provides computational graph based conversion for TensorFlow**: Transformation will be done given `--model_file`, `--shape`, `--input_nodes` and `--output_nodes`. - -> AST mode is not supported for TensorFlow, only computational graph based mode is available. - -`--output` and `--report` is optional. MindConverter creates an `output` folder under the current working directory, and outputs generated scripts to it. - -### ONNX Model File Migration - -**MindConverter provides computational graph based conversion for ONNX**: Transformation will be done given `--model_file`, `--shape`, `--input_nodes` and `--output_nodes`. - -> AST mode is not supported for ONNX, only computational graph based mode is available. - -`--output` and `--report` is optional. MindConverter creates an `output` folder under the current working directory, and outputs generated scripts to it. - -## Scenario - -MindConverter provides two modes for different migration demands. - -1. Keep original scripts' structures, including variables, functions, and libraries. -2. Keep extra modifications as few as possible, or no modifications are required after conversion. - -The AST mode is recommended for the first demand (AST mode is only supported for PyTorch). It parses and analyzes PyTorch scripts, then replace them with the MindSpore AST to generate codes. Theoretically, The AST mode supports any model script. However, the conversion may differ due to the coding style of original scripts. - -For the second demand, the Graph mode is recommended. As the computational graph is a standard descriptive language, it is not affected by user's coding style. This mode may have more operators converted as long as these operators are supported by MindConverter. - -Some typical networks in computer vision field have been tested for the Graph mode. Note that: - -> 1. The Dropout operator will be lost after conversion because the inference mode is used to load the ONNX or TensorFlow model. Manually re-implement is necessary. -> 2. The Graph-based mode will be continuously developed and optimized with further updates. - -## Example - -### AST-Based Conversion - -Assume the PyTorch script is located at `/home/user/model.py`, and outputs the transformed MindSpore script to `/home/user/output`, with the conversion report to `/home/user/output/report`. Use the following command: - -```bash -mindconverter --in_file /home/user/model.py \ - --output /home/user/output \ - --report /home/user/output/report -``` - -In the conversion report, non-transformed code is listed as follows: - -```text -line : [UnConvert] 'operator' didn't convert. ... -``` - -For non-transformed operators, the original code keeps. Please manually migrate them. [Click here](https://www.mindspore.cn/doc/note/en/master/index.html#operator_api) for more information about operator mapping. - -Here is an example of the conversion report: - -```text - [Start Convert] - [Insert] 'import mindspore.ops as ops' is inserted to the converted file. - line 1:0: [Convert] 'import torch' is converted to 'import mindspore'. - ... - line 157:23: [UnConvert] 'nn.AdaptiveAvgPool2d' didn't convert. Maybe could convert to mindspore.ops.operations.ReduceMean. - ... - [Convert Over] -``` - -For non-transformed operators, suggestions are provided in the report. For instance, MindConverter suggests that replace `torch.nn.AdaptiveAvgPool2d` with `mindspore.ops.operations.ReduceMean`. - -### Graph-Based Conversion - -#### TensorFlow Model Scripts Conversion - -To use TensorFlow model script migration, you need to export TensorFlow model to Pb format(frozen graph) first, and obtain the model input node and output node name. See [Tutorial of exporting TensorFlow Pb model](https://gitee.com/mindspore/mindinsight/blob/master/mindinsight/mindconverter/docs/tensorflow_model_exporting.md#) for details. - -Suppose the model is saved to `/home/user/xxx/frozen_model.pb`, corresponding input node name is `input_1:0`, output node name is `predictions/Softmax:0`, the input shape of model is `1,224,224,3`. Output the transformed MindSpore script and MindSpore checkpoint file to `/home/user/output`, with the conversion report and weight map file to `/home/user/output/report`. Use the following command: - -```bash -mindconverter --model_file /home/user/xxx/frozen_model.pb --shape 1,224,224,3 \ - --input_nodes input_1:0 \ - --output_nodes predictions/Softmax:0 \ - --output /home/user/output \ - --report /home/user/output/report -``` - -After executing the command, MindSpore script, MindSpore weight file, weight map file, and report file can be found in corresponding directory. - -The format of conversion report generated by script generation scheme based on graph structure is the same as that of AST scheme. However, since the graph based scheme is a generative method, the original tensorflow script is not referenced in the conversion process. Therefore, the code line and column numbers involved in the generated conversion report refer to the generated script. - -In addition, input and output Tensor shape of unconverted operators shows explicitly (`input_shape` and `output_shape`) as comments in converted scripts to help further manual modifications. Here is an example of the `Reshape` operator (already supported after R1.0 version): - -```python -class Classifier(nn.Cell): - - def __init__(self): - super(Classifier, self).__init__() - ... - self.reshape = onnx.Reshape(input_shape=(1, 1280, 1, 1), - output_shape=(1, 1280)) - ... - - def construct(self, x): - ... - # Suppose input of `reshape` is x. - reshape_output = self.reshape(x) - ... - -``` - -It is convenient to replace the operators according to the `input_shape` and `output_shape` parameters. The replacement is like this: - -```python -import mindspore.ops as ops -... - -class Classifier(nn.Cell): - - def __init__(self): - super(Classifier, self).__init__() - ... - self.reshape = ops.Reshape(input_shape=(1, 1280, 1, 1), - output_shape=(1, 1280)) - ... - - def construct(self, x): - ... - # Suppose input of `reshape` is x. - reshape_output = self.reshape(x, (1, 1280)) - ... - -``` - -Weight information in MindSpore (`converted_weight`) and that in source framework(`source_weight`) are saved in weight map separately. - -Here is an example of the weight map: - -```json -{ - "resnet50": [ - { - "converted_weight": { - "name": "conv2d_0.weight", - "shape": [ - 64, - 3, - 7, - 7 - ], - "data_type": "Float32" - }, - "source_weight": { - "name": "conv1.weight", - "shape": [ - 64, - 3, - 7, - 7 - ], - "data_type": "float32" - } - } - ] -} -``` - -#### ONNX Model File Conversion - -To use ONNX model File migration, you need to obtain the model input node and output node names. To get input node and output node names, [Netron](https://github.com/lutzroeder/netron) is recommended. - -Suppose the model is saved to `/home/user/xxx/model.onnx`, the corresponding input node name is `input_1:0`, the output node name is `predictions/Softmax:0`, the input shape of model is `1,3,224,224`, the following command can be used to generate the script: - -```bash -mindconverter --model_file /home/user/xxx/model.onnx --shape 1,3,224,224 \ - --input_nodes input_1:0 \ - --output_nodes predictions/Softmax:0 \ - --output /home/user/output \ - --report /home/user/output/report -``` - -After executed, MindSpore script, MindSpore weight file, weight map file, and report file can be found in corresponding directory. - -The format of conversion report generated by script generation scheme based on graph structure is the same as that of AST scheme. However, since the graph based scheme is a generative method, the original onnx file is not referenced in the conversion process. Therefore, the code line and column numbers involved in the generated conversion report refer to the generated script. - -The example of weight map refers to that in **TensorFlow Model Scripts Conversion** section. - -## MindConverter Error Code Definition - -Error code defined in MindConverter, please refer to [LINK](https://gitee.com/mindspore/mindinsight/blob/master/mindinsight/mindconverter/docs/error_code_definition.md# ). - -## Model List Supported by MindConverter - -[List of supported models (Models in below table have been tested based on PyTorch 1.5.0 and TensorFlow 1.15.0, X86 Ubuntu released version)](https://gitee.com/mindspore/mindinsight/blob/master/mindinsight/mindconverter/docs/supported_model_list.md# ). - -## Caution - -1. This script conversion tool relies on operators which supported by MindConverter and MindSpore. Unsupported operators may not be successfully mapped to MindSpore operators. You can manually edit, or implement the mapping based on MindConverter, and contribute to our MindInsight repository. We appreciate your support for the MindSpore community. -2. MindConverter converts dynamic input shape to constant one based on `--shape` while using grpah based scheme, as a result, it is required that inputs' shape used to retrain or inference in MindSpore are the same as that used to convert using MindConverter. If the input shape has changed, please re-running MindConverter with new `--shape` or fixing shape related parameters in the old script. -3. MindSpore script and MindSpore checkpoint file are saved in the one file folder path, while report file and weight map file are saved in the other one. -4. The security and consistency of the model file should be guaranteed by the user. diff --git a/tutorials/training/source_en/advanced_use/migrate_script.rst b/tutorials/training/source_en/advanced_use/migrate_script.rst deleted file mode 100644 index 614247864bc9b763aefb8d64167a2d37c871f93e..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/migrate_script.rst +++ /dev/null @@ -1,9 +0,0 @@ -Migrating Training Scripts from Third Party Frameworks -====================================================== - -.. toctree:: - :maxdepth: 1 - - migrate_3rd_scripts_mindconverter - migrate_3rd_scripts - \ No newline at end of file diff --git a/tutorials/training/source_en/advanced_use/mindinsight_commands.md b/tutorials/training/source_en/advanced_use/mindinsight_commands.md deleted file mode 100644 index 2bdd0522ef02afd343cc27d22dbf6b52576e7738..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/mindinsight_commands.md +++ /dev/null @@ -1,147 +0,0 @@ -# MindInsight Commands - -`Linux` `Ascend` `GPU` `CPU` `Model Optimization` `Intermediate` `Expert` - - - -- [MindInsight Commands](#mindinsight-commands) - - [View the Command Help Information](#view-the-command-help-information) - - [View the Version Information](#view-the-version-information) - - [Start the Service](#start-the-service) - - [View the Service Process Information](#view-the-service-process-information) - - [Stop the Service](#stop-the-service) - - [Parse Summary](#parse-summary) - - [Use Mindoptimizer to Tune Hyperparameters](#use-mindoptimizer-to-tune-hyperparameters) - - - - - -## View the Command Help Information - -```shell -mindinsight --help -``` - -## View the Version Information - -```shell -mindinsight --version -``` - -## Start the Service - -```shell -mindinsight start [-h] [--config ] [--workspace ] - [--port ] [--url-path-prefix ] - [--reload-interval ] - [--summary-base-dir ] - [--enable-debugger ] - [--debugger-port ] -``` - -Optional parameters are as follows: - -|Name|Argument|Description|Type|Default|Scope|Specifications| -|---|---|---|---|---|---|---| -|`-h, --help`|Optional|Displays the help information about the start command.|-|-|-|-| -|`--config `|Optional|Specifies the configuration file or module.|String|Empty string|-|Physical file path (file:/path/to/config.py) or a module path (python:path.to.config.module) that can be identified by Python.| -|`--workspace `|Optional|Specifies the working directory.|String|$HOME/mindinsight|-|-| -|`--port `|Optional|Specifies the port number of the web visualization service.|Integer|8080|1~65535|-| -|`--url-path-prefix `|Optional|Specifies the URL path prefix of the web visualization service.|String|Empty string|-|URL path prefix consists of segments separated by slashes. Each segment supports alphabets / digits / underscores / dashes / dots, but not single dot or double dots.| -|`--reload-interval `|Optional|Specifies the interval (unit: second) for loading data.|Integer|3|0~300|The value 0 indicates that data is loaded only once.| -|`--summary-base-dir `|Optional|Specifies the root directory for loading training log data.|String|./|-|MindInsight traverses the direct subdirectories in this directory and searches for log files. If a direct subdirectory contains log files, it is identified as the log file directory. If a root directory contains log files, it is identified as the log file directory.| -|`--enable-debugger `|Optional|Whether to launch the MindInsight Debugger.|Boolean|False|True/False/1/0|The debugger entry can be shown on MindInsight UI only when MindInsight Debugger is launched.| -|`--debugger-port `|Optional|Specifies the port number of the debugger server.|Integer|50051|1~65535|-| - -> When the service is started, the parameter values of the command line are saved as the environment variables of the process and start with `MINDINSIGHT_`, for example, `MINDINSIGHT_CONFIG`, `MINDINSIGHT_WORKSPACE`, and `MINDINSIGHT_PORT`. - -## View the Service Process Information - -MindInsight provides user with web services. Run the following command to view the running web service process: - -```shell -ps -ef | grep mindinsight -``` - -Run the following command to access the working directory `WORKSPACE` corresponding to the service process based on the service process ID: - -```shell -lsof -p | grep access -``` - -Output the working directory `WORKSPACE` as follows: - -```shell -gunicorn /log/gunicorn/access.log -``` - -## Stop the Service - -```shell -mindinsight stop [-h] [--port PORT] -``` - -Optional parameters are as follows: - -|Name|Argument|Description|Type|Default|Scope|Specifications| -|---|---|---|---|---|---|---| -|`-h, --help`|Optional|Displays the help information about the stop command.|-|-|-|-| -|`--port `|Optional|Specifies the port number of the web visualization service.|Integer|8080|1~65535|-| - -## Parse summary - -MindInsight provides tools for parsing summary log files. Users can save the scalars in the summary log file into a csv file and the images into a png file through the commands, which is convenient for viewing and further processing. - -```shell -mindinsight parse_summary [--summary-dir] [--output] -``` - -Optional parameters are as follows: - -|Name|Argument|Description|Type|Default|Scope|Specifications| -|---|---|---|---|---|---|---| -|`--summary-dir`|Optional|Specifies the root directory of summary files. If the directory contains multiple summary files, only the latest summary file is parsed.|String|./|-|The summary file directory needs to be readable and executable, and the summary file needs to be readable.| -|`--output`|Optional|Specifies the root directory for saving output files.|String|./|-|-| - -Execute command: - -```shell -mindinsight parse_summary --summary-dir ./ --output ./ -``` - -The output directory structure is as follows: - -```text -└─output_{datetime} - ├─image - │ └─{tag}_{step}.png - │ - └─scalar.csv -``` - -In which, - -- output_{datetime} is the output directory. The rule is 'output_yyyyMMdd_HHmmss_SSSSSS' including year, month, day, hour, minute, second and microseconds. - -- {tag}\_{step}.png is the image in training process. 'tag' and 'step' are the tag and step in the training (special characters in tag are deleted and '/' is replaced by '_'). - -- scalar.csv is the file which save scalars (encoding: 'utf-8'). - -## Use Mindoptimizer to Tune Hyperparameters - -MindInsight provides parameters tuning command. The command-line interface (CLI) provides the following commands: - -```shell -usage: mindoptimizer [-h] [--version] [--config ] - [--iter ] - -``` - -Optional parameters are as follows: - -|Name|Argument|Description|Type|Default|Scope|Specifications| -|---|---|---|---|---|---|---| -|`-h, --help`|Optional|Displays the help information about the start command.|-|-|-|-| -|`--config `|Required|Specifies the configuration file.|String|-|-|Physical file path (file:/path/to/config.yaml), and the file format is yaml.| -|`--iter `|Optional|Specifies the run times for tuning parameters|Integer|1|Positive integer|-| diff --git a/tutorials/training/source_en/advanced_use/model_explanation.md b/tutorials/training/source_en/advanced_use/model_explanation.md deleted file mode 100644 index 72ffd0b0da9137b0c6e50e0e6640e7b20e1018e0..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/model_explanation.md +++ /dev/null @@ -1,237 +0,0 @@ -# Explain Models - -`Linux` `Ascend` `GPU` `Model Optimization` `Beginner` `Intermediate` `Expert` - - - -- [Explain Models](#explain-models) - - [Overview](#overview) - - [Operation Process](#operation-process) - - [Preparing the Script](#preparing-the-script) - - [Restrictions](#restrictions) - - [Enabling MindInsight](#enabling-mindinsight) - - [Pages and Functions](#pages-and-functions) - - [Saliency Map Visualization](#saliency-map-visualization) - - [Explanation Method Assessment](#explanation-method-assessment) - - [Comprehensive Assessment](#comprehensive-assessment) - - [Classification Assessment](#classification-assessment) - - [Uncertainty](#uncertainty) - - [Counterfactual](#counterfactual) - - [Hierarchical Occlusion](#hierarchical-occlusion-counterfactual-hoc) - - [Restrictions](#hoc-restrictions) - - [Pages and Functions](#hoc-pages-and-functions) - - - - - -## Overview - -Currently, most deep learning models are black-box models with good performance but poor explainability. The model explanation module aims to provide users with explanation of the model decision basis, help users better understand the model, trust the model, and improve the model when an error occurs in the model. - -In some critical application scenarios, such as automatic driving, financial decision-making, etc., AI model cannot be truly applied if it is not interpretable for legal and policy supervision reasons. Therefore, the interpretability of the model is becoming more and more important. As a consequence, model explanation is an important part of improving MindSpore's applicability and user-friendliness. - -To be specific, in the task of image classification, a widely-used group of explanation methods will highlight the most critical area that affects the classification decision of the model. We call it "saliency map". If the highlighted parts are indeed the key features of the targeted label, then the features learned by the model are usually correct, thus the users can trust the effect and decision of the model. If the model focuses on irrelevant parts, even if the prediction label is correct, it does not mean that the model is reliable, the model developers still need to optimize and improve the model. This may be due to the correlation of some irrelevant features in the training data. Model developers can consider further data augmentation to correct the bias learned by the model correspondingly. - -Besides a variety of explanation methods, we also provide a set of evaluation methods to evaluate the explanation methods from various dimensions. It helps users compare and select the explanation methods that are most suitable for a particular scenario. - -## Operation Process - -### Preparing the Script - -Currently, MindSpore provides the explanation methods and explanation evaluation Python API. You can use the provided explanation methods by `mindspore.explainer.explanation` and the provided explanation evaluation by `mindspore.explainer.benchmark`. You need to prepare the black-box model and data to be explained, instantiate explanation methods or explanation evaluation according to your need and call the explanation API in your script to collect the explanation result and explanation evaluation result. - -MindSpore also provides `mindspore.explainer.ImageClassificationRunner` to run all explanation methods and explanation evaluation methods automatically. You just need to register the instantiated object and then all explanation methods and explanation evaluation methods will be executed. Explanation logs containing explanation results and explanation evaluation results will be automatically generated and stored. - -The following uses ResNet-50 and multi-label dataset with 20 classes as an example. Initializing the explanation methods in `explanation` and the evaluation methods in `benchmark`, the users can then use `ImageClassificationRunner` to execute and explanation and evaluation for the black-box model. The sample code is as follows: - -```python -import mindspore.nn as nn -from mindspore import load_checkpoint, load_param_into_net - -from mindspore.explainer.explanation import GradCAM, GuidedBackprop -from mindspore.explainer.benchmark import Faithfulness, Localization -from mindspore.explainer import ImageClassificationRunner - -if __name__ == "__main__": - num_classes = 20 - # please refer to model_zoo for the model architecture of resnet50 - net = resnet50(num_classes) - param_dict = load_checkpoint("resnet50.ckpt") - load_param_into_net(net, param_dict) - - - # initialize explainers with the loaded black-box model - gradcam = GradCAM(net, layer='layer4') - guidedbackprop = GuidedBackprop(net) - - # initialize benchmarkers to evaluate the chosen explainers - # for Faithfulness, the initialization needs an activation function that transforms the output of the network to a probability is also needed - activation_fn = nn.Sigmoid() # for multi-label classification - faithfulness = Faithfulness(num_labels=num_classes, metric='InsertionAUC', activation_fn=activation_fn) - localization = Localization(num_labels=num_classes, metric='PointingGame') - - # returns the dataset to be explained, when localization is chosen, the dataset is required to provide bounding box - # the columns of the dataset should be in [image], [image, labels], or [image, labels, bbox] (order matters) - # You may refer to 'mindspore.dataset.project' for columns managements - dataset_path = "dataset_dir" - dataset = get_dataset(dataset_path) - - # specify the class names of the dataset - classes = [ - 'aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', - 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', - 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor', - ] - - data = (dataset, classes) - explainers = [gradcam, guidedbackprop] - benchmarkers = [faithfulness, localization] - - # initialize runner with specified summary_dir - runner = ImageClassificationRunner(summary_dir='./summary_dir', network=net, activation_fn=activation_fn, data=data) - runner.register_saliency(explainers, benchmarkers) - - # execute runner.run to generate explanation and evaluation results to save it to summary_dir - runner.run() -``` - -### Restrictions - -- Only support image classification models, such as Lenet, Resnet, Alexnet. -- Input images must be in 1, 3, or 4 channels format. -- Only support GPU and Ascend devices with PyNative mode. -- All instances of explanation and evaluation methods cannot be reused across runners. Explanation and evaluation methods have to be instantiated exclusively for each runner. Otherwise, errors may occur. A correct example is shown below. - -```python -gradcam = GradCAM(net, layer='layer4') -guidedbackprop = GuidedBackprop(net) - -runner = ImageClassificationRunner(summary_dir='./summary_dir_1', network=net, activation_fn=activation_fn, data=data) -runner.register_saliency(explainers=[gradcam, guidedbackprop]) -runner.run() - -# generate another summary with GradCAM only -runner2 = ImageClassificationRunner(summary_dir='./summary_dir_2', network=net, activation_fn=activation_fn, data=data) - -# reusing explainer instance in other runner, errors may occur -# runner2.register_saliency(explainers=[gradcam]) - -# instantiating a new GradCAM is the correct way -gradcam2 = GradCAM(net, layer='layer4') -runner2.register_saliency(explainers=[gradcam2]) - -runner2.run() -``` - -### Enabling MindInsight - -Enable MindInsight and click **Model Explanation** on the top of the page. All explanation log paths are displayed. When a log path meets the conditions, the **Saliency Map Visualization** buttons are displayed in the **Operation** column. - -![xai_index](./images/xai_index.png) - -## Pages and Functions - -### Saliency Map Visualization - -Saliency map visualization is used to display the image area that has the most significant impact on the model decision-making result. Generally, the highlighted regions can be considered as key features of the objective classification. - -![xai_saliency_map](./images/xai_saliency_map.png) - -The following information is displayed on the **Saliency Map Visualization** page: - -- Objective dataset set by a user through the Python API of the dataset. -- Ground truth tags, prediction tags, and the prediction probabilities of the model for the corresponding tags. The system adds the TP, FN, and FP flags(meanings are provided in the page's information) in the upper left corner of the corresponding tag based on the actual requirements. -- A saliency map given by the selected explanation method. - -Operations: - -1. Select the required explanation methods. Currently, we support four explanation methods. More explanation methods will be provided in the future. -2. Click **Overlay on Original Image** in the upper right corner of the page to overlay the saliency map on the original image. -3. Click different tags to display the saliency map analysis results of the model for different tags. For different classification results, the focus of the model is usually different. -4. Check prediction type checkboxes to display images with the checked tag types: TP - true positive, FN - false negative, FP - false positive. -5. Use the tag filtering function on the upper part of the page to filter out images with specified tags. -6. Select an image display sequence from **Sort Images By** in the upper right corner of the page, options: "Probabilities in descending order" and "Uncertainties in descending order". -7. Click **View Score** on the right of an explanation method. The page for assessing all explanation methods is displayed. -8. Click image you will see the higher resolution image. - -![xai_saliency_map_detail](./images/xai_saliency_map_detail.png) - -### Explanation Method Assessment - -#### Comprehensive Assessment - -The provided explanation methods are scored from different dimensions. We provide various dimensions scores to help users compare the performance and select the most suitable one. You can configure weights for metrics in a specific scenario to obtain the comprehensive score. - -![xai_metrix_comprehensive](./images/xai_metrix_comprehensive.png) - -#### Classification Assessment - -The classification assessment page provides two types of comparison. One is to compare scores of different evaluation dimensions of the same explanation method in each tag. The other is to compare scores of different explanation methods of the same evaluation dimension in each tag. - -![xai_metrix_class](./images/xai_metrix_class.png) - -## Uncertainty - -The model predictions come with uncertainty, which is called [Epistemic Uncertainty](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn_probability/mindspore.nn.probability.toolbox.UncertaintyEvaluation.html#mindspore.nn.probability.toolbox.UncertaintyEvaluation). It inserts a dropout layer to the network and inferences multiple times. The results are standard deviation and 95% confidence interval of the model output predictions: - -![xai_saliency_map](./images/xai_uncertainty.png) - -The restrictions, preparation of network and data is the same as the saliency explanation methods, users enable uncertainty calculations by invoking `register_uncertainty()` of `ImageClassificiationRunner`. The sample code is shown below. - -```python -runner = ImageClassificationRunner(summary_dir='./summary_dir_1', network=net, activation_fn=activation_fn, data=data) -runner.register_saliency(explainers=[gradcam, guidedbackprop]) -runner.register_uncertainty() -runner.run() -``` - -Please note that `register_uncertainty()` must be used together with `register_saliency()`, their calling order doesn't matter. - -## Counterfactual - -Counterfactual is a relatively new way of explaining a model's decision, which inverts the decision by modifying the traits of the sample. For example, there is an animal image that is classified as a cat by the model. How can we edit that image in order to make the classification not happening? By answering that question, we can explain the model decision of classifying to "cat". Counterfactuals come in various forms, currently, `ImageClassificationRunner` provides an easy-to-use method called Hierarchical Occlusion Counterfactual (HOC), more counterfactual methods will be provided in the future. - -### Hierarchical Occlusion Counterfactual (HOC) - -HOC is an occlusion-based method, it searches for the smallest possible display region that is subjected to the constraint of the target label's prediction confidence greater than a threshold (currently fixed at 0.5). The search process is conducted in a hierarchical manner, at the beginning, the original image was covered by its blurred version, then HOC searches large occlusion areas and recursively deeps down into smaller areas for achieving a more accurate result. It ends up with an area tree, each node represents a square display area and the smaller child areas are fall inside the parent. The root node represents the entire area of the original image, its immediate children are the first layer display areas. - -At the moment, `ImageClassificationRunner` automatically generates the number of layers (1 to 3), the sizes of occluded areas, the strides, and the blur mask base on the image dimensions. The side length of the first layer occlusion square is defined as the round down of half of the short side of the image, we cut the side length in half in every next layer. Meanwhile, the side length has to be equals to or greater than 28, otherwise, stop adding layers. The stride is the round down of 1/5 of the occluded area's side length. - -The preparation of network and data is the same as the saliency explanation methods, users can employ HOC by invoking `register_hierarchical_occlusion()` of `ImageClassificiationRunner`. The sample code is shown below. - -```python -runner = ImageClassificationRunner(summary_dir='./summary_dir_1', network=net, activation_fn=activation_fn, data=data) -runner.register_hierarchical_occlusion() -runner.run() -``` - -Users may combine the use of `register_saliency()` with the same runner. - -#### HOC Restrictions - -- Apart from all the restrictions from saliency explanation methods, models must take 3 channels input images. -- Input images must be in RGB 3 channels format and the length of the short side must be equals to or greater than 56. -- If `register_hierarchical_occlusion()` is called but `register_saliency()` is not called, then both PyNative and Graph mode are supported. - -#### HOC Pages and Functions - -You can see that the 'Counterfactual Explanation' operations are enabled for those explanation jobs employed HOC. Clicking it will lead you to the HOC explanation page. - -![xai_hoc_index](./images/xai_hoc_index.png) - -The HOC explanation page displays all HOC results, includes: - -- Samples with prediction confidence of any tag that greater than 0.5 and their original images. -- Prediction confidence of the target tags. -- The outcome images and their prediction confidences of each layer. - -![xai_hoc](./images/xai_hoc.png) - -Operations: - -1. In the upper right corner of "Picture list" panel, there is a "Hide" switch. When the switch is turned on, the samples without HOC explanation result will not be displayed. By default, the switch is on and the users can turn it off to display all samples. -2. Change the tag filter and sampler sorting on the left "Picture list" panel. Samples can be sorted by prediction confidence. -3. Browse samples or switch to the next page in the sample list inside the left "Picture list" panel. Select a sample then its HOC results will be shown on the other panels. -4. Change the tag of HOC result showing on the center "Original Image" panel. Only tags with prediction confidence greater than 0.5 have HOC results. -5. Inspect the HOC search process on the bottom "Layer-by-layer Masking Process" panel, select a step image then it will be enlarged and shown on the right "View Explanation" panel. (Notes: The occluded regions were darkened and converted to greyscale for display, but it is not the case in the actual HOC search process, only Gaussian blur is employed while brightness and saturation are not altered.) diff --git a/tutorials/training/source_en/advanced_use/nlp.rst b/tutorials/training/source_en/advanced_use/nlp.rst deleted file mode 100644 index 9e81949150668904a30036c266a21929b3bb995d..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/nlp.rst +++ /dev/null @@ -1,8 +0,0 @@ -Natural Language Processing -=============================== - -.. toctree:: - :maxdepth: 1 - - nlp_sentimentnet - nlp_bert_poetry diff --git a/tutorials/training/source_en/advanced_use/nlp_bert_poetry.md b/tutorials/training/source_en/advanced_use/nlp_bert_poetry.md deleted file mode 100644 index 8382258aae3057acffd4c8db3d086674f4581354..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/nlp_bert_poetry.md +++ /dev/null @@ -1,304 +0,0 @@ -# Using the BERT Network to Implement Intelligent Poem Writing - -`Linux` `Ascend` `Model Training` `Inference Application` `Expert` - - - -- [Using the BERT Network to Implement Intelligent Poem Writing](#using-the-bert-network-to-implement-intelligent-poem-writing) - - [Case Overview](#case-overview) - - [Model Description](#model-description) - - [Model Training](#model-training) - - [Pre-training](#pre-training) - - [Fine-tuning](#fine-tuning) - - [Modifying a Model](#modifying-a-model) - - [Sample Code](#sample-code) - - [Implementation Procedure](#implementation-procedure) - - [Basic Information](#basic-information) - - [Data Preparation](#data-preparation) - - [Training](#training) - - [Inference Validation](#inference-validation) - - [Service Deployment](#service-deployment) - - [References](#references) - - - - -Poetry is an indispensable part of the five-millennium-old Chinese culture. When appreciating poetry, you can perceive the pure and vast world with ultimate sensibility and reduce stress and anxiety brought by the fast-paced world. As we know, one has to practice a skill a lot to become good at it. Today, let's see how the science-backed MindSpore trains a model to show its sense of arts! - -## Case Overview - -Use MindSpore to train an intelligent poem writing model and deploy the prediction service. The following flowchart shows the process: - -![introduce image](images/introduce.PNG) - -Figure 1: Case flowchart - -The following skips the process of pre-training BERT and directly describes the process of fine-tuning a pre-trained BERT-base model of MindSpore. - -In addition, the following shows how to deploy the model as a prediction service through MindSpore Serving. The client code can send a request to the prediction service and obtain the prediction result. - -## Model Description - -NLP-related networks are required to deal with poems. BERT, as a milestone model in the NLP domain, greatly promotes the development of the NLP community. The BERT model is proposed by Google and uses the Encoder structure in Transformer. It stacks multiple layers of Encoders and uses the attention mechanism to achieve the state of the art (SOTA) effect in multiple general language understanding evaluation (GLUE) tasks. - -This attention mechanism is different from the RNN structure and can be used for high-level parallel computing. In this way, the computing power of the Ascend 910 AI Processor can be fully utilized to achieve optimal performance. - -## Model Training - -There are two steps: pre-training and fine-tuning. Pre-training is first performed on a large amount of unlabeled data. It is expected that the model can master a common human language semantic mechanism through this process. Then, in the fine-tuning phase, training is performed on labeled data in a specific segmented domain to complete a specific task. - -### Pre-training - -Pre-training is self-coding training performed on unlabeled data. Therefore, the design of training tasks is especially important. Pre-training in BERT includes two tasks: masked language model (MLM) and next sentence prediction (NSP). - -- The **MLM task** randomly replaces some tokens with the [MASK] labels during input, and then predicts the original tokens based on the context through the attention mechanism. - -- The input of a BERT model is two sentences: A and B. When data is built, positions of A and B are randomly exchanged at a 50% probability. The **NSP task** is used to predict whether A and B are originally connected. - -Since the MLM task does not exist in the actual task, a pre-training NSP task that better matches the actual task type is added based on the MLM task. - -In the preceding description, the pre-training process does not require a task data label. Such an MLM training task is essentially a denoising self-coding model. Therefore, BERT may perform pre-training by using massive unlabeled data. Through tasks set in the pre-training stage, BERT can learn basic semantic logic from unlabeled data and then complete specific task training in cooperation with the fine-tuning process. - -The following figure shows the BERT model structure. If you enter two sentences in a Chinese model, each token corresponds to a Chinese character. [CLS] and [SEP] are inserted special tokens. - -![Teaser image](images/bert_model.PNG) - -Figure 2: BERT model structure [1] - -### Fine-tuning - -Fine-tuning is used to add a layer of adaptation task to the end of the pre-trained BERT model and then perform a small amount of training on labeled data. - -Fine-tuning modes are classified into two types: end-to-end fine-tuning and feature-based approach. The difference between the two modes lies in whether to modify parameters in the pre-trained BERT model at the fine-tuning stage. In most cases, end-to-end fine-tuning is used. - -### Modifying a Model - -BERT uses the Encoder structure. `attention_mask` is an all-ones vector. That is, each token can view tokens before and after it. This helps each token learn the entire sentence information and enhance the semantic understanding capability, therefore, BERT is not a generative model. - -In the statement generation task, when the next token is generated, only the information about the previous token can be viewed. You need to change `attention_mask` to the lower triangular matrix so that the current token can view only the information about itself and the previous token. - -The data used for fine-tuning is more than 40,000 poems without labels. The output of each token must be close to the output of the next labeled token, and the cross entropy is used as the loss function. - -![Teaser image](images/finetune.PNG) - -Figure 3 Training process - -## Sample Code - -Download the [sample code](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com:443/DemoCode/bert_poetry_c.rar) and run the sample code to view the poem writing effect. The code structure is as follows: - -```text -└─bert_poetry - ├── src - ├── bert_for_pre_training.py # Encapsulating BERT-base forward and backward network class - ├── bert_model.py # Defining the BERT forward network structure - ├── finetune_config.py # Fine-tuning configuration file - ├── fused_layer_norm.py # Defining fused_layer_norm - ├── __init__.py # __init__ - ├── utils.py # Defining the fine-tuning forward network structure - ├── poetry_utils.py # Tokenizer - └── poetry_dataset.py # Parsing poetry.txt and generating the required dataset - ├── vocab.txt # Vocabulary - ├── generator.py # Function used for generating poems during inference - ├── poetry.py # Training, inference, and export functions - ├── serving - ├── ms_serving # Enabling MindSpore Serving - ├── bert_flask.py # Receiving requests on a server. - ├── poetry_client.py # Client code - ├── ms_service_pb2_grpc.py # Defining grpc-related functions for bert_flask.py - └── ms_service_pb2.py # Defining protocol buffer-related functions for bert_flask.py - -``` - -## Implementation Procedure - -### Basic Information - -Perform training and inference on the Ascend 910 AI Processor using MindSpore 0.7.0-beta. - -### Data Preparation - -A dataset containing [43030 poems](https://github.com/AaronJny/DeepLearningExamples/tree/master/keras-bert-poetry-generator): `poetry.txt`. - -Pre-trained checkpoints of a BERT-base model: [Download from MindSpore](http://download.mindspore.cn/model_zoo/official/nlp/bert/bert_base_ascend_0.5.0_cn-wiki_official_nlp_20200720.tar.gz). - -### Training - -Modify the `pre_training_ckpt` path in `src/finetune_config.py`, load pre-trained checkpoints, set `batch_size` to bs, and set `dataset_path` to the path for storing poems. `BertConfig` is set to the base model by default. - -```python -'dataset_path': '/your/path/to/poetry.txt', -'batch_size': bs, -'pre_training_ckpt': '/your/path/to/pre_training_ckpt', -``` - -Run the training command. - -```bash -python poetry.py -``` - -### Inference Validation - -Modify the `test_eval` function in `poetry.py` to randomly generate a poem, continue to complete a poem, or generate an acrostic poem. - -The `generate_random_poetry` function is used to randomly generate and continue to complete a poem. If the input parameter `s` is empty, a poem is randomly generated. If the input parameter `s` is not empty, the poem writing continues based on the input value. - -```python - output = generate_random_poetry(poetrymodel, s='') #随机生成 - output = generate_random_poetry(poetrymodel, s='天下为公') #续写诗句 -``` - -The `generate_hidden` function is used to generate an acrostic poem. The value of the input parameter `head` is the first word in each line of a poem. - -```python - output = generate_hidden(poetrymodel, head="人工智能") #藏头诗 -``` - -Run the inference command. - -```bash -python poetry.py --train=False --ckpt_path=/your/ckpt/path -``` - -By default, a randomly generated poem, a poem completed based on the input value, and an acrostic poem are generated in the script. The output poems are as follows: - -A randomly generated poem: - -```text -大堤柳暗, -春深树根。 -东望一望, -断回还家。 -山色渐风雨, -东风多雨禾。 -无情与去, -万里所思。 -``` - -A poem completed based on the input value: - -```text -天下为公少, -唯君北向西。 -远山无路见, -长水见人偏。 -一路巴猿啸, -千峰楚客啼。 -幽深有诗策, -无以话年华。 -``` - -An acrostic poem: - -```text -人君离别难堪望, -工部张机自少年。 -智士不知身没处, -能令圣德属何年。 -``` - -### Service Deployment - -Use MindSpore Serving to deploy the trained model as an inference service. Server-side deployment includes the following steps: model export, Serving startup, and startup for preprocessing and post-processing services. A client sends an inference request to a server for model inference. The server returns the generated poem to the client for display. - -- Model export - - Before using Serving to deploy a service, export the MindIR model using the `export_net` function provided in `poetry.py`. - - ```bash - python poetry.py --export=True --ckpt_path=/your/ckpt/path - ``` - - The `poetry.pb` file is generated in the current path. - -- Serving startup - - Start Serving on the server and load the exported MindIR file `poetry.pb`. - - ```bash - cd serving - ./ms_serving --model_path=/path/to/your/MINDIR_file --model_name=your_mindir.pb - ``` - -- Startup for preprocessing and post-processing services - - Implement the preprocessing and post-processing services using the Flask framework. Run the `bert_flask.py` file on the server to start the Flask service. - - ```bash - python bert_flask.py - ``` - - After the preceding steps are performed, the server-side deployment is complete. - -- Client - - Use a computer as the client. Set the URL request address in `poetry_client.py` to the IP address of the server where the inference service is started, and ensure that the port number is the same as that in `bert_flask.py` on the server. For example: - - ```python - url = 'http://10.155.170.71:8080/' - ``` - - Run the `poetry_client.py` file. - - ```bash - python poetry_client.py - ``` - - Enter an instruction on the client to perform inference on the remote server to obtain a poem. - - ```text - 选择模式:0-随机生成,1:续写,2:藏头诗 - 0 - ``` - - ```text - 一朵黄花叶, - 千竿绿树枝。 - 含香待夏晚, - 澹浩长风时。 - ``` - - ```text - 选择模式:0-随机生成,1:续写,2:藏头诗 - 1 - 输入首句诗 - 明月 - ``` - - ```text - 明月照三峡, - 长空一片云。 - 秋风与雨过, - 唯有客舟分。 - 寒影出何处, - 远林含不闻。 - 不知前后事, - 何道逐风君。 - ``` - - ```text - 选择模式:0-随机生成,1:续写,2:藏头诗 - 2 - 输入藏头诗 - 人工智能 - ``` - - ```text - 人生事太远, - 工部与神期。 - 智者岂无识, - 能文争有疑。 - ``` - - Read the poem and appreciate its tonal patterns, rhymes, and meanings. An AI poet has established fame. - -> You can also modify other datasets to complete simple generation tasks, such as the Chinese New Year couplet writing and simple chat robot. - -## References - -[1] [BERT:Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/abs/1810.04805) - -[2] [https://github.com/AaronJny/DeepLearningExamples/](https://github.com/AaronJny/DeepLearningExamples/) - -[3] [https://github.com/bojone/bert4keras](https://github.com/bojone/bert4keras) diff --git a/tutorials/training/source_en/advanced_use/nlp_sentimentnet.md b/tutorials/training/source_en/advanced_use/nlp_sentimentnet.md deleted file mode 100644 index da025af64a3307cf06bd99c0854dedd5bf8b0423..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/nlp_sentimentnet.md +++ /dev/null @@ -1,305 +0,0 @@ -# Realizing Sentiment Classification With SentimentNet - -`Linux` `GPU` `CPU` `Whole Process` `Beginner` `Intermediate` `Expert` - - - -- [Realizing Sentiment Classification With SentimentNet](#realizing-sentiment-classification-with-sentimentnet) - - [Overview](#overview) - - [Preparation and Design](#preparation-and-design) - - [Downloading the Dataset](#downloading-the-dataset) - - [Determining Evaluation Criteria](#determining-evaluation-criteria) - - [Determining the Network and Process](#determining-the-network-and-process) - - [Implementation](#implementation) - - [Importing Library Files](#importing-library-files) - - [Configuring Environment Information](#configuring-environment-information) - - [Preprocessing the Dataset](#preprocessing-the-dataset) - - [Defining the Network](#defining-the-network) - - [Pre-Training](#pre-training) - - [Defining the Optimizer and Loss Function](#defining-the-optimizer-and-loss-function) - - [Training and Saving the Model](#training-and-saving-the-model) - - [Validating the Model](#validating-the-model) - - [Experimental Result](#experimental-result) - - - - - -## Overview - -Sentiment classification is a subset of text classification in NLP, and is one of the most basic applications of NLP. It is a process of analyzing and inferencing affective states and subjective information, that is, analyzing whether a person's sentiment is positive or negative. - -> Generally, sentiments are classified into three categories: positive, negative, and neutral. In most cases, only positive and negative sentiments are used for training regardless of the neutral sentiments. The following dataset is a good example. - -[20 Newsgroups](http://qwone.com/~jason/20Newsgroups/) is a typical reference dataset for traditional text classification. It is a collection of approximately 20,000 news documents partitioned across 20 different newsgroups. -Some of the newsgroups are very closely related to each other (such as comp.sys.ibm.pc.hardware and comp.sys.mac.hardware), while others are highly unrelated (such as misc.forsale and soc.religion.christian). - -In terms of the network itself, the network structure of text classification is roughly similar to that of sentiment classification. After mastering how to construct the sentiment classification network, it is easy to construct a similar network which can be used in a text classification task after fine-tuning some parameters. - -In the service context, text classification is to analyze the objective content discussed in the text, but sentiment classification is to find a viewpoint, which is supported by the content in the text. For example, "Forrest Gump has a clear theme and smooth pacing, which is excellent." In the text classification, this sentence is classified into a "movie" theme, but in the sentiment classification, this movie review is used to explore whether the sentiment is positive or negative. - -Compared with traditional text classification, sentiment classification is simpler and more practical. High-quality datasets can be collected from common shopping websites and movie websites to benefit the business domains. For example, based on the domain context, the system can automatically analyze opinions of specific types of customers on the current product, analyze sentiments by subject and user type, and even recommend products based on the analysis result, therefore to improve the conversion rate and bring more business benefits. - -In special fields, some non-polar words also fully express a sentimental tendency of a user. For example, when an app is downloaded and used, "the app is stuck" and "the download speed is so slow" express users' negative sentiments. In the stock market, "bullish" and "bull market" express users' positive sentiments. Therefore, in essence, we hope that the model can be used to mine special expressions in the vertical field as polarity words for the sentiment classification system. - -Vertical polarity word = General polarity word + Domain-specific polarity word - -According to the text processing granularity, sentiment analysis can be divided into word, phrase, sentence, paragraph, and chapter levels. A sentiment analysis at paragraph level is used as an example. The input is a paragraph, and the output is information about whether the movie review is positive or negative. - -## Preparation and Design - -### Downloading the Dataset - -The IMDb movie review dataset is used as experimental data. -> Dataset download address: - -The following are cases of negative and positive reviews. - -| Review | Label | -|---|---| -| "Quitting" may be as much about exiting a pre-ordained identity as about drug withdrawal. As a rural guy coming to Beijing, class and success must have struck this young artist face on as an appeal to separate from his roots and far surpass his peasant parents' acting success. Troubles arise, however, when the new man is too new, when it demands too big a departure from family, history, nature, and personal identity. The ensuing splits, and confusion between the imaginary and the real and the dissonance between the ordinary and the heroic are the stuff of a gut check on the one hand or a complete escape from self on the other. | Negative | -| This movie is amazing because the fact that the real people portray themselves and their real life experience and do such a good job it's like they're almost living the past over again. Jia Hongsheng plays himself an actor who quit everything except music and drugs struggling with depression and searching for the meaning of life while being angry at everyone especially the people who care for him most. | Positive | - -Download the GloVe file and add the following line at the beginning of the file, which means that a total of 400,000 words are read, and each word is represented by a word vector of 300 latitudes. - -```text -400000 300 -``` - -GloVe file download address: - -### Determining Evaluation Criteria - -As a typical classification, the evaluation criteria of sentiment classification can be determined by referring to that of the common classification. For example, accuracy, precision, recall, and F_beta scores can be used as references. - -Accuracy = Number of accurately classified samples/Total number of samples - -Precision = True positives/(True positives + False positives) - -Recall = True positives/(True positives + False negatives) - -F1 score = (2 x Precision x Recall)/(Precision + Recall) - -In the IMDb dataset, the number of positive and negative samples does not vary greatly. Accuracy can be used as the evaluation criterion of the classification system. - -### Determining the Network and Process - -Currently, MindSpore GPU and CPU supports SentimentNet network based on the long short-term memory (LSTM) network for NLP. - -1. Load the dataset in use and process data if necessary. -2. Use the SentimentNet network based on LSTM to train data and generate a model. - Long short-term memory (LSTM) is an artificial recurrent neural network (RNN) architecture used for processing and predicting an important event with a long interval and delay in a time sequence. For details, refer to the online documentation. -3. After the model is obtained, use the validation dataset to check the accuracy of model. - -> The current sample is for the Ascend 910 AI processor. You can find the complete executable sample code at . -> -> - `src/config.py`: some configurations of the network, including the batch size and number of training epochs. -> - `src/dataset.py`: dataset related definition, including converted MindRecord file and preprocessed data. -> - `src/imdb.py`: the utility class for parsing IMDb dataset. -> - `src/lstm.py`: the definition of semantic net. -> - `train.py`: the training script. -> - `eval.py`: the evaluation script. - -## Implementation - -### Importing Library Files - -The following are the required public modules and MindSpore modules and library files. - -```python -import argparse -import os - -import numpy as np - -from src.config import lstm_cfg as cfg -from src.dataset import convert_to_mindrecord -from src.dataset import lstm_create_dataset -from src.lstm import SentimentNet -from mindspore import Tensor, nn, Model, context, load_param_into_net, load_checkpoint -from mindspore.nn import Accuracy -from mindspore.train.callback import LossMonitor, CheckpointConfig, ModelCheckpoint, TimeMonitor -``` - -### Configuring Environment Information - -1. The `parser` module is used to transfer necessary information for running, such as storage paths of the dataset and the GloVe file. In this way, the frequently changed configurations can be entered during runtime, which is more flexible. - - ```python - parser = argparse.ArgumentParser(description='MindSpore LSTM Example') - parser.add_argument('--preprocess', type=str, default='false', choices=['true', 'false'], - help='whether to preprocess data.') - parser.add_argument('--aclimdb_path', type=str, default="./aclImdb", - help='path where the dataset is stored.') - parser.add_argument('--glove_path', type=str, default="./glove", - help='path where the GloVe is stored.') - parser.add_argument('--preprocess_path', type=str, default="./preprocess", - help='path where the pre-process data is stored.') - parser.add_argument('--ckpt_path', type=str, default="./", - help='the path to save the checkpoint file.') - parser.add_argument('--pre_trained', type=str, default=None, - help='the pretrained checkpoint file path.') - parser.add_argument('--device_target', type=str, default="GPU", choices=['GPU', 'CPU'], - help='the target device to run, support "GPU", "CPU". Default: "GPU".') - args = parser.parse_args() - ``` - -2. Before implementing code, configure the necessary information, including the environment information, execution mode, backend information, and hardware information. - - ```python - context.set_context( - mode=context.GRAPH_MODE, - save_graphs=False, - device_target=args.device_target) - ``` - - For details about the API configuration, see the `context.set_context`. - -### Preprocessing the Dataset - -Convert the dataset format to the MindRecord format for MindSpore to read. - -```python -if args.preprocess == "true": - print("============== Starting Data Pre-processing ==============") - convert_to_mindrecord(cfg.embed_size, args.aclimdb_path, args.preprocess_path, args.glove_path) -``` - -> After successful conversion, `mindrecord` files are generated under the directory `preprocess_path`. Usually, this operation does not need to be performed every time if the dataset is unchanged. -> For `convert_to_mindrecord`, you can find the complete definition at: . -> It consists of two steps: -> ->1. Process the text dataset, including encoding, word segmentation, alignment, and processing the original GloVe data to adapt to the network structure. ->2. Convert the dataset format to the MindRecord format. - -### Defining the Network - -```python -embedding_table = np.loadtxt(os.path.join(args.preprocess_path, "weight.txt")).astype(np.float32) -network = SentimentNet(vocab_size=embedding_table.shape[0], - embed_size=cfg.embed_size, - num_hiddens=cfg.num_hiddens, - num_layers=cfg.num_layers, - bidirectional=cfg.bidirectional, - num_classes=cfg.num_classes, - weight=Tensor(embedding_table), - batch_size=cfg.batch_size) -``` - -> For `SentimentNet`, you can find the complete definition at: . - -### Pre-Training - -The parameter `pre_trained` specifies the preloading CheckPoint file for pre-training, which is empty by default. - -```python -if args.pre_trained: - load_param_into_net(network, load_checkpoint(args.pre_trained)) -``` - -### Defining the Optimizer and Loss Function - -The sample code for defining the optimizer and loss function is as follows: - -```python -loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') -opt = nn.Momentum(network.trainable_params(), cfg.learning_rate, cfg.momentum) -loss_cb = LossMonitor() -``` - -### Training and Saving the Model - -Load the corresponding dataset, configure the CheckPoint generation information, and train the model using the `model.train` API. - -```python -model = Model(network, loss, opt, {'acc': Accuracy()}) - -print("============== Starting Training ==============") -ds_train = lstm_create_dataset(args.preprocess_path, cfg.batch_size) -config_ck = CheckpointConfig(save_checkpoint_steps=cfg.save_checkpoint_steps, - keep_checkpoint_max=cfg.keep_checkpoint_max) -ckpoint_cb = ModelCheckpoint(prefix="lstm", directory=args.ckpt_path, config=config_ck) -time_cb = TimeMonitor(data_size=ds_train.get_dataset_size()) -if args.device_target == "CPU": - model.train(cfg.num_epochs, ds_train, callbacks=[time_cb, ckpoint_cb, loss_cb], dataset_sink_mode=False) -else: - model.train(cfg.num_epochs, ds_train, callbacks=[time_cb, ckpoint_cb, loss_cb]) -print("============== Training Success ==============") -``` - -> For `lstm_create_dataset`, you can find the complete definition at: . - -### Validating the Model - -Load the validation dataset and saved CheckPoint file, perform validation, and view the model quality. - -```python -model = Model(network, loss, opt, {'acc': Accuracy()}) - -print("============== Starting Testing ==============") -ds_eval = lstm_create_dataset(args.preprocess_path, cfg.batch_size, training=False) -param_dict = load_checkpoint(args.ckpt_path) -load_param_into_net(network, param_dict) -if args.device_target == "CPU": - acc = model.eval(ds_eval, dataset_sink_mode=False) -else: - acc = model.eval(ds_eval) -print("============== {} ==============".format(acc)) -``` - -## Experimental Result - -After 20 epochs, the accuracy on the test set is about 84.19%. - -**Training Execution:** - -1. Run the training code and view the running result. - - ```shell - python train.py --preprocess=true --ckpt_path=./ --device_target=GPU - ``` - - As shown in the following output, the loss value decreases gradually with the training process and reaches about 0.2855. - - ```shell - ============== Starting Data Pre-processing ============== - vocab_size: 252192 - ============== Starting Training ============== - epoch: 1 step: 1, loss is 0.6935 - epoch: 1 step: 2, loss is 0.6924 - ... - epoch: 10 step: 389, loss is 0.2675 - epoch: 10 step: 390, loss is 0.3232 - ... - epoch: 20 step: 389, loss is 0.1354 - epoch: 20 step: 390, loss is 0.2855 - ``` - -2. Check the saved CheckPoint files. - - CheckPoint files (model files) are saved during the training. You can view all saved files in the file path. - - ```shell - ls ./*.ckpt - ``` - - The output is as follows: - - ```shell - lstm-11_390.ckpt lstm-12_390.ckpt lstm-13_390.ckpt lstm-14_390.ckpt lstm-15_390.ckpt lstm-16_390.ckpt lstm-17_390.ckpt lstm-18_390.ckpt lstm-19_390.ckpt lstm-20_390.ckpt - ``` - -**Model Validation:** - -Use the last saved CheckPoint file to load and validate the dataset. - -```shell -python eval.py --ckpt_path=./lstm-20_390.ckpt --device_target=GPU -``` - -As shown in the following output, the sentiment analysis accuracy of the text is about 84.19%, which is basically satisfactory. - -```shell -============== Starting Testing ============== -============== {'acc': 0.8419471153846154} ============== -``` diff --git a/tutorials/training/source_en/advanced_use/optimize_data_processing.ipynb b/tutorials/training/source_en/advanced_use/optimize_data_processing.ipynb deleted file mode 100644 index 2a0ee910553b61b27307c5aad335a8b8d4b58819..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/optimize_data_processing.ipynb +++ /dev/null @@ -1,783 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "literary-december", - "metadata": {}, - "source": [ - "# Optimizing the Data Processing\n", - "\n", - "`Linux` `Ascend` `GPU` `CPU` `Data Preparation` `Intermediate` `Expert`\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_en/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_en/advanced_use/optimize_data_processing.ipynb)" - ] - }, - { - "cell_type": "markdown", - "id": "million-tumor", - "metadata": {}, - "source": [ - "## Overview\n", - "\n", - "Data is the most important factor of deep learning. Data quality determines the upper limit of deep learning result, whereas model quality enables the result to approach the upper limit. Therefore, high-quality data input is beneficial to the entire deep neural network. During the entire data processing and data augmentation process, data continuously flows through a pipeline to the training system." - ] - }, - { - "cell_type": "markdown", - "id": "right-skating", - "metadata": {}, - "source": [ - "![pipeline](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_en/advanced_use/images/pipeline.png)" - ] - }, - { - "cell_type": "markdown", - "id": "natural-extraction", - "metadata": {}, - "source": [ - "MindSpore provides data processing and data augmentation functions for users. In the pipeline process, if each step can be properly used, the data performance will be greatly improved. This section describes how to optimize performance during data loading, data processing, and data augmentation based on the CIFAR-10 dataset [1].\n", - "\n", - "In addition, the storage, architecture and computing resources of the operating system will influence the performance of data processing to a certain extent.\n", - "\n", - "## Preparations\n", - "\n", - "### Importing Modules\n", - "\n", - "The `dataset` module provides APIs for loading and processing datasets." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "composed-shape", - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds" - ] - }, - { - "cell_type": "markdown", - "id": "ignored-suspension", - "metadata": {}, - "source": [ - "The `numpy` module is used to generate ndarrays." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "silver-refund", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np" - ] - }, - { - "cell_type": "markdown", - "id": "burning-margin", - "metadata": {}, - "source": [ - "### Downloading the Required Dataset\n", - "\n", - "Run the following command to download the dataset:\n", - "Download the CIFAR-10 Binary format dataset, decompress them and store them in the `./datasets` path, use this dataset when loading data." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "returning-walnut", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/cifar-10-batches-bin\n", - "├── readme.html\n", - "├── test\n", - "│   └── test_batch.bin\n", - "└── train\n", - " ├── batches.meta.txt\n", - " ├── data_batch_1.bin\n", - " ├── data_batch_2.bin\n", - " ├── data_batch_3.bin\n", - " ├── data_batch_4.bin\n", - " └── data_batch_5.bin\n", - "\n", - "2 directories, 8 files\n" - ] - } - ], - "source": [ - "!wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-binary.tar.gz\n", - "!mkdir -p datasets\n", - "!tar -xzf cifar-10-binary.tar.gz -C datasets\n", - "!mkdir -p datasets/cifar-10-batches-bin/train datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/test_batch.bin datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/data_batch*.bin datasets/cifar-10-batches-bin/batches.meta.txt datasets/cifar-10-batches-bin/train\n", - "!tree ./datasets/cifar-10-batches-bin" - ] - }, - { - "cell_type": "markdown", - "id": "described-russia", - "metadata": {}, - "source": [ - "Download cifar-10 Python file format dataset, decompress them in the `./datasets/cifar-10-batches-py` path, use this dataset when converting data." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "further-traveler", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/cifar-10-batches-py\n", - "├── batches.meta\n", - "├── data_batch_1\n", - "├── data_batch_2\n", - "├── data_batch_3\n", - "├── data_batch_4\n", - "├── data_batch_5\n", - "├── readme.html\n", - "└── test_batch\n", - "\n", - "0 directories, 8 files\n" - ] - } - ], - "source": [ - "!wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-python.tar.gz\n", - "!mkdir -p datasets\n", - "!tar -xzf cifar-10-python.tar.gz -C datasets\n", - "!tree ./datasets/cifar-10-batches-py" - ] - }, - { - "cell_type": "markdown", - "id": "corporate-monday", - "metadata": {}, - "source": [ - "## Optimizing the Data Loading Performance\n", - "\n", - "MindSpore provides multiple data loading methods, including common dataset loading, user-defined dataset loading, and the MindSpore data format loading. The dataset loading performance varies depending on the underlying implementation method.\n", - "\n", - "| | Common Dataset | User-defined Dataset | MindRecord Dataset |\n", - "| :----: | :----: | :----: | :----: |\n", - "| Underlying implementation | C++ | Python | C++ |\n", - "| Performance | High | Medium | High |\n", - "\n", - "### Performance Optimization Solution" - ] - }, - { - "cell_type": "markdown", - "id": "correct-swing", - "metadata": {}, - "source": [ - "![data-loading-performance-scheme](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_en/advanced_use/images/data_loading_performance_scheme.png)" - ] - }, - { - "cell_type": "markdown", - "id": "removed-repair", - "metadata": {}, - "source": [ - "Suggestions on data loading performance optimization are as follows:\n", - "\n", - "- Built-in loading operators are preferred for supported dataset formats. For details, see [Built-in Loading Operators](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.html), if the performance cannot meet the requirements, use the multi-thread concurrency solution. For details, see [Multi-thread Optimization Solution](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/optimize_data_processing.html#multi-thread-optimization-solution).\n", - "- For a dataset format that is not supported, convert the format to the MindSpore data format and then use the `MindDataset` class to load the dataset (Please refer to the [API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/dataset/mindspore.dataset.MindDataset.html) for detailed use). Please refer to [Converting Dataset to MindRecord](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/convert_dataset.html), if the performance cannot meet the requirements, use the multi-thread concurrency solution, for details, see [Multi-thread Optimization Solution](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/optimize_data_processing.html#multi-thread-optimization-solution).\n", - "- For dataset formats that are not supported, the user-defined `GeneratorDataset` class is preferred for implementing fast algorithm verification (Please refer to the [API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/dataset/mindspore.dataset.GeneratorDataset.html) for detailed use), if the performance cannot meet the requirements, the multi-process concurrency solution can be used. For details, see [Multi-process Optimization Solution](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/optimize_data_processing.html#multi-process-optimization-solution).\n", - "\n", - "### Code Example\n", - "\n", - "Based on the preceding suggestions of data loading performance optimization, the `Cifar10Dataset` class of built-in loading operators (Please refer to the [API](https://www.mindspore.cn/doc/api_python/en/master/mindspore/dataset/mindspore.dataset.Cifar10Dataset.html) for detailed use), the `MindDataset` class after data conversion, and the `GeneratorDataset` class are used to load data. The sample code is displayed as follows:\n", - "\n", - "1. Use the `Cifar10Dataset` class of built-in operators to load the CIFAR-10 dataset in binary format. The multi-thread optimization solution is used for data loading. Four threads are enabled to concurrently complete the task. Finally, a dictionary iterator is created for the data and a data record is read through the iterator." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "nonprofit-liquid", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'image': Tensor(shape=[32, 32, 3], dtype=UInt8, value=\n", - "[[[209, 206, 192],\n", - " [211, 209, 201],\n", - " [221, 217, 213],\n", - " ...\n", - " [172, 175, 194],\n", - " [169, 173, 190],\n", - " [115, 121, 145]],\n", - " [[226, 230, 211],\n", - " [227, 229, 218],\n", - " [230, 232, 221],\n", - " ...\n", - " [153, 153, 171],\n", - " [156, 156, 173],\n", - " [106, 111, 129]],\n", - " [[214, 226, 203],\n", - " [214, 222, 204],\n", - " [217, 227, 206],\n", - " ...\n", - " [167, 166, 176],\n", - " [147, 147, 156],\n", - " [ 78, 84, 96]],\n", - " ...\n", - " [[ 40, 69, 61],\n", - " [ 37, 63, 57],\n", - " [ 43, 68, 66],\n", - " ...\n", - " [ 55, 70, 69],\n", - " [ 40, 54, 51],\n", - " [ 27, 44, 36]],\n", - " [[ 33, 61, 50],\n", - " [ 37, 65, 56],\n", - " [ 54, 72, 74],\n", - " ...\n", - " [ 47, 60, 56],\n", - " [ 58, 66, 64],\n", - " [ 36, 50, 46]],\n", - " [[ 29, 41, 37],\n", - " [ 38, 60, 59],\n", - " [ 51, 76, 81],\n", - " ...\n", - " [ 32, 51, 43],\n", - " [ 47, 61, 54],\n", - " [ 56, 67, 66]]]), 'label': Tensor(shape=[], dtype=UInt32, value= 5)}\n" - ] - } - ], - "source": [ - " cifar10_path = \"./datasets/cifar-10-batches-bin/train\"\n", - "\n", - " # create Cifar10Dataset for reading data\n", - " cifar10_dataset = ds.Cifar10Dataset(cifar10_path, num_parallel_workers=4)\n", - " # create a dictionary iterator and read a data record through the iterator\n", - " print(next(cifar10_dataset.create_dict_iterator()))" - ] - }, - { - "cell_type": "markdown", - "id": "higher-bangkok", - "metadata": {}, - "source": [ - "2. Use the `Cifar10ToMR` class to convert the CIFAR-10 dataset into the MindSpore data format. In this example, the CIFAR-10 dataset in Python file format is used. Then use the `MindDataset` class to load the dataset in the MindSpore data format. The multi-thread optimization solution is used for data loading. Four threads are enabled to concurrently complete the task. Finally, a dictionary iterator is created for data and a data record is read through the iterator." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "hourly-boston", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'data': Tensor(shape=[1283], dtype=UInt8, value= [255, 216, 255, 224, 0, 16, 74, 70, 73, 70, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 255, 219, 0, 67, \n", - " 107, 249, 17, 58, 213, 185, 117, 181, 143, 255, 217]), 'id': Tensor(shape=[], dtype=Int64, value= 32476), 'label': Tensor(shape=[], dtype=Int64, value= 9)}\n" - ] - } - ], - "source": [ - "import os\n", - "from mindspore.mindrecord import Cifar10ToMR\n", - "\n", - "trans_path = \"./transform/\"\n", - "\n", - "if not os.path.exists(trans_path):\n", - " os.mkdir(trans_path)\n", - "\n", - "os.system(\"rm -f {}cifar10*\".format(trans_path))\n", - "\n", - "cifar10_path = './datasets/cifar-10-batches-py'\n", - "cifar10_mindrecord_path = './transform/cifar10.record'\n", - "\n", - "cifar10_transformer = Cifar10ToMR(cifar10_path,cifar10_mindrecord_path)\n", - "# execute transformation from CIFAR-10 to MindRecord\n", - "cifar10_transformer.transform(['label'])\n", - "\n", - "# create MindDataset for reading data\n", - "cifar10_mind_dataset = ds.MindDataset(dataset_file=cifar10_mindrecord_path,num_parallel_workers=4)\n", - "# create a dictionary iterator and read a data record through the iterator\n", - "print(next(cifar10_mind_dataset.create_dict_iterator()))" - ] - }, - { - "cell_type": "markdown", - "id": "liquid-collection", - "metadata": {}, - "source": [ - "3. The `GeneratorDataset` class is used to load the user-defined dataset, and the multi-process optimization solution is used. Four processes are enabled to concurrently complete the task. Finally, a dictionary iterator is created for the data, and a data record is read through the iterator." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "familiar-ghana", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'data': Tensor(shape=[1], dtype=Int64, value= [0])}\n" - ] - } - ], - "source": [ - " def generator_func(num):\n", - " for i in range(num):\n", - " yield (np.array([i]),)\n", - "\n", - " # create a GeneratorDataset object for reading data\n", - " dataset = ds.GeneratorDataset(source=generator_func(5), column_names=[\"data\"], num_parallel_workers=4)\n", - " # create a dictionary iterator and read a data record through the iterator\n", - " print(next(dataset.create_dict_iterator()))" - ] - }, - { - "cell_type": "markdown", - "id": "sufficient-hunger", - "metadata": {}, - "source": [ - "## Optimizing the Shuffle Performance\n", - "\n", - "The shuffle operation is used to shuffle ordered datasets or repeated datasets. MindSpore provides the `shuffle` function for users. A larger value of `buffer_size` indicates a higher shuffling degree, consuming more time and computing resources. This API allows users to shuffle the data at any time during the entire pipeline process.Please refer to [shuffle](https://www.mindspore.cn/doc/programming_guide/en/master/pipeline.html#shuffle). However, because the underlying implementation methods are different, the performance of this method is not as good as that of setting the `shuffle` parameter to directly shuffle data by referring to the [Built-in Loading Operators](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.html).\n", - "\n", - "### Performance Optimization Solution" - ] - }, - { - "cell_type": "markdown", - "id": "assured-rugby", - "metadata": {}, - "source": [ - "![shuffle-performance-scheme](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_en/advanced_use/images/shuffle_performance_scheme.png)" - ] - }, - { - "cell_type": "markdown", - "id": "detailed-large", - "metadata": {}, - "source": [ - "Suggestions on shuffle performance optimization are as follows:\n", - "\n", - "- Use the `shuffle` parameter of built-in loading operators to shuffle data.\n", - "- If the `shuffle` function is used and the performance still cannot meet the requirements, adjust the value of the `buffer_size` parameter to improve the performance.\n", - "\n", - "### Code Example\n", - "\n", - "Based on the preceding shuffle performance optimization suggestions, the `shuffle` parameter of the `Cifar10Dataset` class of built-in loading operators and the `Shuffle` function are used to shuffle data. The sample code is displayed as follows:\n", - "\n", - "1. Use the `Cifar10Dataset` class of built-in operators to load the CIFAR-10 dataset. In this example, the CIFAR-10 dataset in binary format is used, and the `shuffle` parameter is set to True to perform data shuffle. Finally, a dictionary iterator is created for the data and a data record is read through the iterator." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "extensive-pittsburgh", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'image': Tensor(shape=[32, 32, 3], dtype=UInt8, value=\n", - "[[[119, 193, 196],\n", - " [121, 192, 204],\n", - " [123, 193, 209],\n", - " ...\n", - " [110, 168, 177],\n", - " [109, 167, 176],\n", - " [110, 168, 178]],\n", - " [[110, 188, 199],\n", - " [109, 185, 202],\n", - " [111, 186, 204],\n", - " ...\n", - " [107, 173, 179],\n", - " [107, 173, 179],\n", - " [109, 175, 182]],\n", - " [[110, 186, 200],\n", - " [108, 183, 199],\n", - " [110, 184, 199],\n", - " ...\n", - " [115, 183, 189],\n", - " [117, 185, 190],\n", - " [117, 185, 191]],\n", - " ...\n", - " [[210, 253, 250],\n", - " [212, 251, 250],\n", - " [214, 250, 249],\n", - " ...\n", - " [194, 247, 247],\n", - " [190, 246, 245],\n", - " [184, 245, 244]],\n", - " [[215, 253, 251],\n", - " [218, 252, 250],\n", - " [220, 251, 249],\n", - " ...\n", - " [200, 248, 248],\n", - " [195, 247, 245],\n", - " [189, 245, 244]],\n", - " [[216, 253, 253],\n", - " [222, 251, 250],\n", - " [225, 250, 249],\n", - " ...\n", - " [204, 249, 248],\n", - " [200, 246, 244],\n", - " [196, 245, 244]]]), 'label': Tensor(shape=[], dtype=UInt32, value= 0)}\n" - ] - } - ], - "source": [ - " cifar10_path = \"./datasets/cifar-10-batches-bin/train\"\n", - "\n", - " # create Cifar10Dataset for reading data\n", - " cifar10_dataset = ds.Cifar10Dataset(cifar10_path, shuffle=True)\n", - " # create a dictionary iterator and read a data record through the iterator\n", - " print(next(cifar10_dataset.create_dict_iterator()))" - ] - }, - { - "cell_type": "markdown", - "id": "norman-anderson", - "metadata": {}, - "source": [ - "2. Use the `shuffle` function to shuffle data. Set `buffer_size` to 3 and use the `GeneratorDataset` class to generate data." - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "opened-element", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "before shuffle:\n", - "[0 1 2 3 4]\n", - "[1 2 3 4 5]\n", - "[2 3 4 5 6]\n", - "[3 4 5 6 7]\n", - "[4 5 6 7 8]\n", - "after shuffle:\n", - "[2 3 4 5 6]\n", - "[0 1 2 3 4]\n", - "[1 2 3 4 5]\n", - "[4 5 6 7 8]\n", - "[3 4 5 6 7]\n" - ] - } - ], - "source": [ - " def generator_func():\n", - " for i in range(5):\n", - " yield (np.array([i, i+1, i+2, i+3, i+4]),)\n", - "\n", - " ds1 = ds.GeneratorDataset(source=generator_func, column_names=[\"data\"])\n", - " print(\"before shuffle:\")\n", - " for data in ds1.create_dict_iterator():\n", - " print(data[\"data\"])\n", - "\n", - " ds2 = ds1.shuffle(buffer_size=3)\n", - " print(\"after shuffle:\")\n", - " for data in ds2.create_dict_iterator():\n", - " print(data[\"data\"])" - ] - }, - { - "cell_type": "markdown", - "id": "hearing-taxation", - "metadata": {}, - "source": [ - "## Optimizing the Data Augmentation Performance\n", - "\n", - "During image classification training, especially when the dataset is small, users can use data augmentation to preprocess images to enrich the dataset. MindSpore provides multiple data augmentation methods, including:\n", - "\n", - "- Use the built-in C operator (`c_transforms` module) to perform data augmentation.\n", - "- Use the built-in Python operator (`py_transforms` module) to perform data augmentation.\n", - "- Users can define Python functions as needed to perform data augmentation.\n", - "\n", - "Please refer to [Data Augmentation](https://www.mindspore.cn/doc/programming_guide/en/master/augmentation.html). The performance varies according to the underlying implementation methods.\n", - "\n", - "| Module | Underlying API | Description |\n", - "| :----: | :----: | :----: |\n", - "| c_transforms | C++ (based on OpenCV) | High performance |\n", - "| py_transforms | Python (based on PIL) | This module provides multiple image augmentation functions and the method for converting PIL images into NumPy arrays |\n", - "\n", - "### Performance Optimization Solution" - ] - }, - { - "cell_type": "markdown", - "id": "first-oxide", - "metadata": {}, - "source": [ - "![data-enhancement-performance-scheme](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_en/advanced_use/images/data_enhancement_performance_scheme.png)" - ] - }, - { - "cell_type": "markdown", - "id": "unauthorized-boston", - "metadata": {}, - "source": [ - "Suggestions on data augmentation performance optimization are as follows:\n", - "\n", - "- The `c_transforms` module is preferentially used to perform data augmentation for its highest performance. If the performance cannot meet the requirements, refer to [Multi-thread Optimization Solution](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/optimize_data_processing.html#multi-thread-optimization-solution), [Compose Optimization Solution](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/optimize_data_processing.html#compose-optimization-solution), or [Operator Fusion Optimization Solution](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/optimize_data_processing.html#operator-fusion-optimization-solution).\n", - "- If the `py_transforms` module is used to perform data augmentation and the performance still cannot meet the requirements, refer to [Multi-thread Optimization Solution](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/optimize_data_processing.html#multi-thread-optimization-solution), [Multi-process Optimization Solution](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/optimize_data_processing.html#multi-process-optimization-solution), [Compose Optimization Solution](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/optimize_data_processing.html#compose-optimization-solution), or [Operator Fusion Optimization Solution](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/optimize_data_processing.html#operator-fusion-optimization-solution).\n", - "- The `c_transforms` module maintains buffer management in C++, and the `py_transforms` module maintains buffer management in Python. Because of the performance cost of switching between Python and C++, it is advised not to use different operator types together.\n", - "- If the user-defined Python functions are used to perform data augmentation and the performance still cannot meet the requirements, use the [Multi-thread Optimization Solution](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/optimize_data_processing.html#multi-thread-optimization-solution) or [Multi-process Optimization Solution](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/optimize_data_processing.html#multi-process-optimization-solution). If the performance still cannot be improved, in this case, optimize the user-defined Python code.\n", - "\n", - "### Code Example\n", - "\n", - "Based on the preceding suggestions of data augmentation performance optimization, the `c_transforms` module and user-defined Python function are used to perform data augmentation. The code is displayed as follows:\n", - "\n", - "1. The `c_transforms` module is used to perform data augmentation. During data augmentation, the multi-thread optimization solution is used. Four threads are enabled to concurrently complete the task. The operator fusion optimization solution is used and the `RandomResizedCrop` fusion class is used to replace the `RandomResize` and `RandomCrop` classes." - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "historic-sensitivity", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQEAAAD8CAYAAAB3lxGOAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAACRKElEQVR4nO39bcxu3XYWhl1j3S+YiBBsH1PLsml8/CGQ/4AdK7ZFVLlYVOAinB+U2I2CiSy5iiBy1FSx3UpNFbWSqaoQo0ZurCapqWgMcUKDEIISxyiKVFwMQU6xcThQE9vyRyDGuKCGPPcc/THHxzXGnOu+72e/+z372ec8c+/7WWvNtdZc82tc4xpjzjWXqCpew2t4DZ++4XjXGXgNr+E1vNvwCgKv4TV8modXEHgNr+HTPLyCwGt4DZ/m4RUEXsNr+DQPryDwGl7Dp3n4SEBARH67iPyEiHxCRL7jo3jGa3gNr+HtBHnb8wRE5ALgvwTw2wD8NIC/COCbVPXH3uqDXsNreA1vJXwUTOCfBPAJVf2bqvoPAXw/gG/4CJ7zGl7Da3gL4YOPIM3PB/BTdPzTAL6qXyQi3wrgWwHgMz7jV/0Tn/8Fn3+e4oasyNmJs1v09vWAeKKb5zwQpB9snvWWJ2dqT/RtT/68md7JyTt50AczqdCTKtQeUeO1XlOZru+3Vo2ml30cdw2R2BcRunbXU57RPwEspHxfBXH3c0n8f/Vf/dTfVtVf1+M/ChB4KKjq9wL4XgD44i/5Ev2D//v/w+6q2HShX0DAa0S5grwWdR6f1ZrMPyI9bo0XOjjbPwsf1vTSKFtuXVh4W55zBn7bXpi70o/b9aIkavyM6LhKUW2ft5FXOhPlyf15ndo+5jmLV1UMHdBhWx0YQ+dWbTtGPEtEZnvJbFs/Xn7H3B7HgeMQ+819kaMcH8dBFVcrVNvxLLe1n1VN7Gueg+8j46MvY+nyd0HhX/gX/sDf2sV/FObAzwD49XT8BRb3SQ6v70S8i/DWa13K5q0+4GGWt9zx/Ds/ivCA3nkofBQg8BcBfKmIfFxEfiWAbwTwJz+C5zw/3Ky0T2XQeBmd9rGwz6s+VIQ75l678vEW3/D0txWe3TR5w9vy6b91c0BVn0TkDwD4swAuAP4dVf2rb/s5bxQ+SXJ+4hH4yMO5vf0OcvPsR76NWnvcFyN4E2jUN77zZpIU5PzUzdgPEz4Sn4Cq/mkAf/qjSPuTH7zhX354q+DzSccNbduT028SZPo0Vn2uJw69TQJl+9H1iXehPN6ZY7AEd4RsgjsAi19X23F3SlF/0hZZvM5ub+p0/0YeJOPm4zQcf2X/4U50Vr5T928GoYz2e+/GnMe+tbBNXreHWhtmvYSj72DCc8MuGbF/FFF/mHb3Wvv32vye+XD72Ny9i1vx/nPezER4GSCAW0Xd9QzaZwGPnqQZTR708KyqztaNjQLKowBVp4rIHghIIVRwaCXYjP3caqzVwfxYyy5M4JNmHXD991NNmqk5c1ivX5Pp+SgBYoQA0aZqLnPf7grmdVLqpgl7jAbg9iiBDSjMJGg/875yMR66XMWazqluG89P9TqOEYTY9/gbbXESXg4IbHI9q5Ti1bVvRmT9sPArsQAeUkLWmLWgxPAg6/WUbgaAmcfZGTju5vDgMwEginOaXGvkrkLvocCZ5r6rcKiMu7F3XffLSGVjA2dDg37sQ4IVxEHDokrZpjRDmFqBSKM7AzgdGhTBsQi/DSta++dcgbN63NQRlfb0OseyVoZV6HNIMeqlAcGj4cWAwBqagEc0mwK9050Jv8W75sAc507aP8VfVKE0GWQXJgA0dmCgspoHKwAsQpp4cyewltlK/PKo++E5nSVrfXdn79PKEcs+AQAJfra50jlE2yUrcFnPeQLc8auDlPd5XgeRARPy41i1/yGVBYhPDJJmSmye1/OxnFEqB1z2aZ+EneurzilYgeG54UWAwDLBpZ3dAYHfB9DElaCGFQiYNvJzZuNqAAEDAMtmZwNTI+QVsdcEuhZp1VynsrwFhQ4gm/O2XRjUzes3z3/oOWj53wleLXMR+ADzzXHcTrTf2xMox51G78rd2zTZH/+wsINDDsixmgvBKqzRTwY11/0OgHy6lGNtkCr8t7fPDS8CBICTTsuV1iqQj6NzFWFHFfxiR85UssG1uIhSllP7rn4BZwVn5kDrBAwABQPq05Zbd+nqbvtAOO1kbxp0c1g1+Nzu2+3W8V7z+04FB2YEoUY3+XPBFZoFurX9DweABIjOCLaTmCSfWcAeDlwUt80j5X8LCl4XO+F/cyB4OSCwZFzXk2edSblyOhjUDuL7UyM0TSBKU0r94SsQ8P46amC9YdO2a+l0e8SBCOwClG8myh8SALa3d6lHaZN5xoFwE7+5zpNTPxcmAoNDgn3NQ4JHCUHl0wRIUGj+gLKP7COmLAgHNhVT60Fb/M5ZyPcGDgQDqIIOdOGfx3OqNAPOY+EFgcBZxk8EfqMRCwCMHQjkvgAT6enf0YmdpI5m4ff87oYKnSGU/C9l6fs49Qv0DhSC1DUo1c2zVMEenc6PN/evzdM6e0/nAWAoHv/FLCBlqVkvN8sd/p9s0yLczQFY/APlPJpDAbvhnH37tDJzGVjYlwr2O0nw+/EYSum8ryBw1tuoY4e+1P1Wt8I/SrwDgQgg4zCqB0CAcdR51DzBhIW/HrN9mHle9UImtgox756gQRCSVk+PtveuY+iyc/78B9Jb8C2argt5u4iBYxH8mnh0/MIG7NoOrK1E6vvi5sA6SrBzDrLzMFhEqaneJmvrI8qyO9f22awhBuv1039jrGDwnPAiQCBAfz0TF6hfeAICaReOrKDRwICOJ9UbEEMAgeAYgnEcAQRbRknCD2h0ppOSbWR231GXMtenNqni67RsHk7y9CR3Rjk9tUZ2YCOp5S1pchYMpWuq4FMbkuDv2IH3g3wm53M6f20P6RjsZuF8IzD8Aibt6VD0/ebHofpoULevGxfyEyAIpsApMuhtwYBY7zPCiwABIAvaIhHV4KgIUEeg+7rQD43XSDM+9+NVUgggB44DGAAOHRiYQOCPc1uf6/a28LcibAWf0pN7KWnbq51c2zWes2djzRuEXT9e8SoFQ9eTGyAg7X5iFqymgWLJCKwerDKcBUCS2q+Ufx0a5NGAAgbdObgIe8Zl/97UQTMHsp9Ru98EgBEA4H6B54QXAgI30KtQyazktAXzfKX+uT8YCOy9cjE7QMYBHAMYUwOMARxH2muQOknoZiks3U0beikz/3xBx4hTLdPTfKS1z645A92ziFuF1/220+JF6Ft9BOOjZELIi06I5Ivm5zRaIJctkvu5ObCyAV8vACAfj+Rw4Nbvs2Se66D116Lp+T6qqzAFvN7YHzAW7R9rKIxNBdwILwQETrrqDgCiEuyuja00Ghh45fDxIQLFgcsxMIZgUoGB4zjmtTigirp4xp1Qhwqp8VvnSI13VhnZLVbGsdH7pf/kwV028JzAtGX73L6/CvbiC9nGNRbgbXyDIXgarG3XTCEHgRenXmr/Q+roQDapGnOs6SZAcB2dg2K0nvL1Nc1bpgADwOL30vfYJ7ALheYvAMBaRbPjmKZnoQ8WMGyVGWMCKoIDCuhhq8KMCQSqwADkcIqmwSnvMYG1DPv950hmgN0i0aepl/BWgaCnyYlv9ln76nLzIw84CST8C4icJRcUnhkA/XLcsJoDgPkS0r0o5F/YZflZdV7pwEkbEwAq9csT0yDl47HwYkHAqzIVvhZA4EpAsYtM+McwwadjzXgRweU4oBdHznAHAscx21gnKBw+o7BXbHiYHwytbbbJ3bj37E6RVCq8z+f7wzT7dM2AUj64r7uJ0p7jTCVNIc23Mk1gfLsW6H5cgPBabIpjtla1tFphfZZF+gKIDHSPf/MDwEqREZpxUTc5hbyWOrfZOF6x2TAKr9CKL7s+kganQqAYriBDFs4qax9eMAhkKECpgOqYhe3DgWMU4R+jxRFLOMzWv6hCL8QwDAwE3rDzekzeADs5r/AOKLehYG2OPV1NTfKI/X2eegWCppbLM2Z8AEj88VPpsUww8PNzpiVUoDbjMudO7MSeO30rn6XD2S16lVBNt6yoJVfvNkAioeULGQywgkECovWHcn8+yetTRcyEXAVfkXNLOCt+NhXfCjRCZxGpcQrOjVriD4QXAgJR7SU46fODFHYsws9aftBvfzx9AhdjAZeeFQhExuybKuTIQqgXsQYHvA/fX1tgr6/2191jF10OUuvlcT5IUnhQ4yfQrZnizlrAAanEvLMKAQHr/+j4Lk2949t7G5SVVkKujRSGxASl+Oq7SZHN+6ZQ0WgAWq/r2OT3FAQ04ZQoBZco+oXAABK9Lfi+WlfeQ24RnzUkAPQ6eTS8CBBQYF1DjjuMGsopbNjPhd/pf4LAuLrAX/dAcJ0sYToGL7h0rWxaPTzGOiB6wDU+VE3gxPa5CdcGPGWwvLOR+BunMrTlckLrdWkAugkb8aopKKn5HOhqBoS1NWBvYjLd750ae61Nwm+W1rxMskijp6dB6BEdfanIbRG9OHM3hH/vF2BGkMJOLKDYVqtJoJqqYJansQBnCbJhAM6+Sj0+wiS1/p7pGLy70KiI/Dsi8gsi8v+muM8WkT8nIn/dtp9l8SIif1jm58d+VES+4vGsSPtlUDAQ5M8p/1DFdQxcrwPXcZ2/qx1fr3i6XvH0ZL/rFdcn/s1rrg0kxjBP65g2l4NQOGYWBL4VdlelB/hWEntcv4X2qeVq9E71gWbCsQggxsdBQhPHnr7LeYyn1+E2lP2TiTnL0Bzqm3uSz96Ny7MnfVMVcVMp+r6bRYUwICz3Aig1FXXX696uoTK48zFmIYKfU0cj+Jm1ZbLsdQsXkqiV/u8sPLLa8P8FwG9vcd8B4AdV9UsB/KAdA8DvAPCl9vtWAN/zQPrb4FSbqbj7PoYNhQzz/icAtP3rNYDAf09PT/Nn4DBBYwr/9ZrsIdavb0MwDkRe2S7FDr73IOGMGegtuS6nci8EUTZXSvTP9ZIdGFB87/CeiKeX6bvw25Xe2RsAhPAfJATL9Fy0+A4mnFEqaVReLVpx/BWtv6qb7izcVhMJb9SMsHCSSDN4eQ46GKBeU4SeMiLcpuhmonc61v4n7ODDgICq/qcA/psW/Q0Avs/2vw/AP03xf0Rn+AsAPlNEPu/eM04DAcEyRTLGRdnmv05hdgAYLvjOBp6SGVyfCjg4GwhGMJRMj/n84WzAkTX2W509ULQq1Ep7u8bzM+uzFu22rUeYcG8oryyXkcbvnXHuhXACKRxxveSzTrR/avyd8CcwOCAlJd8VkQThrLNTOZc8kCD3X5ajaWUGpQCYzGcATxF8qjvPB93H3AD0fJTY3hYAA2D0Rh8ifAwD3tgn8Lmq+rO2/3MAPtf2d58g+3wAP4tbIVs6g67iMBgMhqaw2v716n6ByQCGgcEwDT/3J1hcjgOla4tAhmCIYIwDxzGgQ6DHEUAAVaioOQs1nWFhAmuWo5gMpWC399VsyRvVRRZ3VB8VpKbLprNfSH4A7mx+PYtbP+/FnI+SfIC4b6A8CICUvz4S0MvgWc+RiikwKgIfiRARmrdxq2d7fAJJyRZ6xKa2pZa9piEoIw0MUlS+qblnmdNXYH2EmKMAsy8pHcfWjiIDVG7tcQ9I/CZ8aMegqqrI4na6G4S+Rfixz/mc3RXo9o5i2ufD/QHEBK7GBEL4x9XA4EpUP4/1oqbtALmmhjhELM35MtFQhaiNfTsAmXs8hmbFMbgi9lIE3mrpqq1SAe2db73kBlDQg70zaztN+Sh3nmhdIc8t47W6N8+vKZ11L+ylXhgv6JwM0pIEDgkAaDdSYh20bMd1zf7H7KDDQ2ZSWspSyksPikZMR6DHTcz0OvOaSsDiqRHi81MUtT12TDHk/5MDAj8vIp+nqj9rdP8XLP7hT5ApfYvwC7/4i1sRgaTIkuyGWQD5Ba4bIEjhTzbgtH+MK1TnwGA0/hg4xhXXITjGwNCD/AGCoTInDdEkpdmQ/rulu5fSL0c7ICgk42bD7p7dEKhpeyeg68yUDQCQOi1ntb9FmWkVThDaMUcRmi5dc78RzF4kywJp+1qTZLHEzmISodcei3lPi8CgVbmbR+7D8jrT3rOpqtTLafd47nsxHbxK7nrlhfD7fJfHw5t+huxPAvhm2/9mAP8Rxf9eGyX4agC/RGbD8wJVujHxtM0DCNyTPwoQXMfGIeiOQBsdcJ/A09M1QcLT8A9Y+lRjdU2/t9h3YY2/3TDb6w3a749AuKrQ6EXd2bV0ROkHebGUKCmny109cXpo0aobmx/kD4iVfY/9aIE/eBVYpXqqNclZguU1ulShBEiWED938K0CGOUmOBGqmOoDaEAWqZe7ox5LNVI93w7ZT97EFAAeYAIi8u8B+FoAnyMiPw3gXwXwXQD+uIh8C4C/BeD32OV/GsDXA/gEgH8A4J9/OCcLEZDS/XeOwckC2CdAv7EygOv1CdenyQQOvUTnPK72ddnrwDg2jkHo8vyYn623q13vbPnKrXng5p/c6BBFOFkbeu/VhU766c4eFwZQ4gTSEqk+EUssjvniDd8Jlad5IAIZWgCgAE5Jrx9XlFqBoAt7CiyDJgPDfL5u063H/GzjPFS5GqBirMj2nSn4U2YtaDAEZwvcftUUaLVwb5hpE+6CgKp+08mpr9tcqwB+/7NyEOE+5gGEdwwIJLRj1BGDHCpkdjBwuYA+LT1wXK9zfwwchWXM9Nwpla9zJjtwJ+a7Dr0G2W+3yCPfs6l6WVOrANEmGWXasjzLie78toPfP290/0q4D5UAwJ9ZkeluWC+h9Fwz37ijkBv+2zFseVDC+Hycl8lTINuFJw3pFOzcZ8dixdTIz4lSoFZ5OLyIGYO12i0mVgCW02uW4LQ9ZhMqgcOI4cMxrhBRjOuBcVynA/BI4ND4uU/gmPvizAANDPAWgGA2305b02Z7ru62DiIpsNovPsnwQzK3+7if3xDxjX4onZYZcZbMG4dScFAPaiym0HS/8TFFVB6TySH0eKlovVfdNUWqEBGDRnWgzIad+znMyMzluTMGXwQIOB3jML3OBADcuHlX0mUQALjvgF4gCgeimQsAcBwDl/ZuwShDj/4SkmLImFNdOxvgCj+r+x08byG7S/x5Y8pSH/vDfFbVflokcn/74yLRwybfy2SW2/cTvN5PG0AylUbOrXMlEGxG3YtWfbDUDMBST1TIJV7fKnzfLSSmG4d5IJNFBai53yWy7wCRfpbnhBcBAjN0JkDIdgOlp1ZG8Y+UyUTNRHDzAAAuxxXX48Bx2YBBzBacpoGoVDbQgCANhA8jPGudJI2MqOd32Bsp799bfdN0z9S6nu6dC/ubhewzzAIQAhJCX2nBGxZ5NZqc7ki86+C56vvJD9IMQBV8pKkQzxKSBiF2UBjNe8gEgBW91DwilSUkC9i6P7T5CpYZhSMmDgmA6+WYwn8dGBcGgPpGYpoDA0NlojQ9663VARdxPXPaUXv1nCaxzapsr7knExuX1J07HrnmVqqP13PUo+3kkGBjCw0IdmXegXrhGrTRtncayumeek1FBOY7YJOAcmCAlkxAPpWYgFc/Nd+JlDgbUFThLG8XtncKAOByHbheBi7jiuv1wOXSzYL0K4iZA8IjBv5cnzvwjLKunxLZFm05d3rJI+3+iJJIBdXCThw+GsF//jM8MEci9eiaJIYvU+SLwDyEAisP61tn/udsYJNz8UcJul4JXkNDifWlL2LOdu45yunlgMDSv9L2yfPtIq0HCQQ8oWg6+NwhyD6B6+XA5XokI+jrDvh8AZswVIcHyf5oWdlXvwBy7gjbgXcv8nLJm7BYvuGWY3DbZx/r0DcTp3Pa4mrsmzKsKqTJAqpKAQPBqTjffURGBAIEoaeLH3II5Vmp2OOMIPLKIxziQJbxubjLY+FNJwu99SD9H9l16RfIq/04/AB2xmUzJhRthgxjrQEfLbiO1WyI0YG6omv9PdsR+4z6wO2+eM80ePghN6JOAeiegO8deo/C5XPcgfsg3EVQIICdzXxtf9490yluXY0FNhek33DXEbtC1/mkIjJ0nOgA76s50IXcYzcVKet0U7U/p28a0hChv1wEyDQFuj+gzDlwJjCB4DhykpCzDuDD+wUeaTNZdtYae7Tpi566xwweUfo3E7gVn+c/LJYW/3EIJ80LICFJzzpzgJ3Qbp5TtslSY0zA6ksAe/kpmUGpSuXUOP110liYF57PsPuFcI38Bc9kAi8DBILSZJhDWMwCGlC0XlOZAJkE4RDUst4AAFyvB66XC40aDFwudA+9pHQc5hw80ifg7xDM5+7A4ESCInp//pQF3ACAW+FOUpt8PXDtEp7nH9iL/RuaBCmLpZcsWtgFCKBtv7L1sdPCC+0xlCgJv1+nN7ZrtXuW+P0L8SFzBq4GdgEUz2QCL8Yc6GGHzD1mrrUaB1bvPEloLOaAv0rMaxB0kyAmCo10LioNGRZzAG9uErDLoxV+PbwDAEsca75nZer2s/eBKkB6/D1yf155uuydeltuhNScxYtO50sZT/XMSU8sPqt9Qo+0QaRIiq+oPaFjYgLVbXbWoW6HF8EEduilivSAMv2xO7RdO4XR6T+bBhUIKhO4mImwMwnSNJjrGB7JApTZAPAc2/YE99uR1uOH2rV3xk3yN/JabjlhBPuwo2TrzUoe0efU1uOhM8Y+IzAvY4Hb1f5Z6mW/NcwUQH8XoC44fosFcKrx2nDL7GQCmrLgJYg8fDgm8CJAYIbWHFTAfQe3g94HCQBGdwxe/VXjKwQwFnCpAu/zBGhpsTEU41AcZXQAy1Dh26uGc7V/OorwQNxNRNEVgM6Dnly2iQzfiR8yEjSj4EGQ2nvb6WinEIsApcDEDeiAsD7xvJ5Z+z+jIxS/QJoG2d8JFBiwJHlCvmdBTOB9nDb8WPDqqROIOs2rzp4auI8tcwqI8sfIwNiYE+PAOCZIHHrkcCGll0j8tpDhpEA3wk1zdpekoHSeYpvfYu/bDrcDCdZyZPtrjZlJun8lza7Ikdbr/F4WYVOau1LWLDoQ2A3+mIKVaqnHNxbyGtfyYisH9Tyeb1tGSigPSK0fLVrBqvbpJfGHwosAAafy6wlthQrJJ6/vMX8HfUfuOOzz0gft8z1MFanzgE0KWrdwHPXFIn+pqI1CLB3VtSB34lvAcE9qH5XsDxlKlRdzfNeJ6aLeuRdLIeuCQYABpwPy8vo2ku35/hTaWjkVCEziAXQJUhL22C0KXTKfxB74afNZXDd1/yzOGSRhYwsVENbL1EAKoYx8nsxz9M+LAAGk/CzRHNze8THTQ+aagEd8S/6IeF4uzMGigwFzQp4AVCcb9dGCfKlo+QKS/cQo2UNK803q6iMBAl3+7rR0KcRJ79VdPAu/dhCgLRAmWK3fyhBqwlIEMQybBgTzWxGopkDPYgjcSWMpXSP5jHqLLnWwA4IEWB9svhWo0RuAlPoZeE9BACca8qRyQoPLATnGXI/ukFgfIBjACRhUICiPCyQdun69qPoNjBG0jlo6QO5U4fpQ9YT1k2BvKbiA5jGDAWntjmY7oWGBoGsKCLhzFazZ98DKk7MqINAj7VCIFSyMIItSw2LB9Qii+yg40O7zvFSg2oGCckY8a+X567PpAbmNftf74mPhxYBAz3UIP6MAD5+I4BBAZa4I3JmAmwVyHBsz4MRzzBUZDCDNgQIAZUnyCgBC9nUVJC7dg9UCtwd7kHbNm4Z9XtikyU68aq9200nyJ8K/sAAX9P7pbWYA93Vm1tkOCE7uuaX99w9Y2MC9tDowbMGohxuOvhymbmxp3DQ6l/BiQGBbTq2NHmMFMXR4QGSu+lOYgByFAex8AjEJicbDWMsM1f1biPHVo2QBWDprFIA2H54FtMpB+Xbbh2UFlMfahSoAdB/HinGbnh337UGgOs4UucDrKACwrWNLI0eTEFVzCwi6oFYvf6+D9AvwoBSzgb2cboCA6idqW08vJye39ddtfde6Gc+0O18ICCgW7Gp9Sdo+OwYP0cIExJcN6w7EZSHLmoPoaOb8i6XHt/MG0oEVQ4Xcmqea/7lgsOtIb8cSkNqV1kdubfdK7/dZ3Gi5qCPer8LPNP/W+xp+j9e7FJ+AcBYW0yDKzvH9JKbQaRF1T3aeWVDgNNxob2JTe91dYKYmab+dibR1st8Ij3yL8NeLyA+JyI+JyF8VkW+z+Lf7PUKtv9AUpTypxQsQHMQExE0D2ZgCDgSHEQExp45QheZqxjFEeL0W/0A4DQ0AhmZGy0zC+PeA6G870r27dLv7WLhHGVNrx5FyHAmt/yNBjevv/cpLWgOdAcRn6IvJtc/5jobvqbnGOeW2A07zOFp+Oc+rE7PVxYmwltwQaOqi8na5J8XFCuzG7yw8Mm34CcC/rKpfBuCrAfx+EfkyvMXvEe4qH95AFJa5AeHwI5+AAUL4B47DwGGznPVMreQkKZVT//w8mQ8RDuqofRiLSrQp5wkgFO8S/zL2Tu2dPvO51wU0kPCEpp4HVFZHOwcIBoPspEXzK1/Dgq9NmDbff1zYwyb/O6EvcRpZ3oFB7Yt7UOA2jzUnNCenPSqMer85MgixFiqXZhW3en0cBB5ZbfhnYZ8RU9VfFpEfx/y02DcA+Fq77PsA/HkA3w76HiGAvyAinyn2oZKbz1kfTCUFwmrbsIBDBVqYAA8briMC+S72Zo6AV6TPNhRaZSgmE2kZIsRS2ZnupuneSqhPWY/fLMXcT+2UgoIon2LpUxvNVoBso80T8Bt4MCj0zowEAN7ySI/qju4jJvX4vAJngDnZB8iPqe3abPUNxAOav2E7s9P8Flx3Sn/7s+KNwo1z0Osi9htoPSc8yycgIl8I4MsB/DA+5PcIpXyG7NcthQQcnbcZaRRfIYOYAPkG1q/f2jWAzTfwxqmabujAMQRDQH6BCgh76ldnDHr2e59Q3PdY54UPXPfmN5ymo9bhK+1MLVqbrLZVB4Iq8Dvm1xhCmAe1bv2hHQC2QIAqjSyc8zzsnQaet39mhwt4PcaSlv2pPqbqbEw5pjTb9WtIYFpusJsKMD6g9XfhYRAQkX8UwH8A4F9S1b9XK/z53yNU/gzZF32JntK7mov4x0BwyAE9hlH+ozACYTawMIH2oE77xhx5KAuNKA8RsoMwc+tlaQ7rPRCclPRdBu1HIajJlro6KxpN2+3gzrmj2qMI+fI5+HLfeSdfgAB1hCDj8+UeMeFWAoKqeAnQu6Bb0kLHhP9l5CmD1Gs2V9ClNvLgk+Xt4rjPd1bT5TnhIRAQkV+BCQB/VFX/Q4v+0N8jrKHTHUTjRz5ggCxWMTTcNzX8UbX9QaaBmwdHgkB9MzFwIDTREIVsRgjWOQL1t2gILqEkvu+QYavHFXe/VHx+8zMCaVpOMvetPRwQFtVP1/FxpHnfXn6OTbvt6o9Qc1b0RUNXFkCEHKCz4BhFYQjYXO39de47mNz+FuOt4P00miKY2ZuBwCOjAwLg3wbw46r6r9Opt/o9QtX6yxPU+aIxc6LPbhJQflnItxccxwWXy4EPLhdcPrDf5cBxyenER4AKirMu7K/SKfPjJF1zrbYsdZ6uNR8Nm8tPhaCB5+NBNhJz49J2fCuk9j/7nQn+eTkCyMv8DzvHcS2TfaboWVyXpc7mk/llfCNINY76bzncBkHriCeBWJIfPQiiHh5hAr8FwD8H4L8Qkb9icf9LvNXvEZ41dtqhHtKOmwfiWtJ+/ALRcRy4XC64XC744IMLxvgAlzHwK9Q+PsIs4WgOxSRgMydhe6WmC5obw1wjVyHaaSB4nulbdO05N2vJcfDkhqIETzXUPjjLcqzlfssMZj7DVrM1RhUdjO/vanNRo9p+jwdplbBTBNjEzegOBDxfpO3fzkU5iuomMuGiHqNa5B/oQLCb96UtZkdoSu2RAn0OGXhkdOA/2zzfw1v7HmEX9ko3aUuCz5Udw4U+HHgILscF18sVl8uBMS744HLB+OAC6AeAatH+PoxY3itYap1Qt7GCMXjfzUHSTH6oVIAow60Wu0EB7mntRo17uAc+whaL0gczhfznDnbOcBYg0Hr+ROgfYQDsXeei70DABX4LEFb6lMUuZIIWEQdLc7HpEcVfASQBgIGggi0fb8u9azE2C4IVnKW0Dy9kxuCt4FpGS41N1PXeZY1+kGPwmFT/Mg4MW0dwfHDBBwYAqmoAgOg0fDzBZUX7tMVM+xcgGLYg6VzINPR8CIYUb/SZGN6S3VXb36m5M0ayBO++lqy4NsnuGaCgmF/IMWqiBmz9a1up/DuYZw4rVU3h32s0rrcs/FbYcWzZQNf2ZU/acdt7fqC5KNSfpD1nS5hcVwAI5UeAxfd0JvCRjQ589EHXIz3rPFg5qze0OwTdHDguc+kwHVD9ADyU4iaEMwlLJqmc/ynOCs1/W9trLjwigip8BAS1Xz2TBbTzTs/5jqXb3gELzl/dN9eY41gAGeZHWAwpAjBKx7UHF8qA0mOVey/YZ5Dt7mkzKHUAPWUC/q+BABeZmUCNf1D4N6xgQyAQQNDMkVmSCpUFCIBwKu6evTMFgA6et8OLAIGpYXe5brTQeyLWBnRBZjt/mgHGBvQS7wQwCLj2y+TaMeXC89l//SWj44gcUVmcvqzk86xOHuqGj6r7RxPcAYA6O/BhNeSweQjp3GFrZyP3CIEH7UcHvuUMdOGf+2wSnNF+YVSHDy17IbdPyDq4EW5WZZzMXpSW5RkTENrn+BuS3MhTmgIf0RDhJydsmMAuVtaOFZpckC8PkWPQVwxmN64OFm1DY+qku6cH9PZRguYbABSHYBl78U7GKP9offQzH4aknoUk2TY0xoAgCOBUwJybU9pjok0mUP02/QFAgLlSnScQtHs2B/d9AqtvAA4M+8RPasTzV8NjjIvUCbETr0++xUGR9Fzez8yH0s9eSnUZoPB4eEEg0EIRSBbWipbwTkmN7a8PT3PggF4upGEcBMasvDTyp3C7aiugABN8mBlAjCDmjtMbhgoMCGRMRgIcC8WLTD+/UtC73xTKVI29L+467PmTTTe5hofEaMyMsxEBOZloQ7k8z78ucTkhqJ03W2TNr2tVE6ybQLAbLjwr+7143eyVHLU7Oht0AJAESxf+IugMCJWbrrWXwEE9/KQsa3ghILAOBdaza3CNBFjHcwA4BOOQ6RTUA6oHLnoJuhRpBjugd9Z1hJBDfW4A5yGBoLIBzTkDw4Yfp00AHQAOTQCI4UFv/BsdU88OiAuXS07iT5J+6Mqi1U3Y6V621x0w2FxITVdHFOa9utk6nXVh8cQyQ9L20xl4BgaI37bUevNwrZC7ApZA4IZIPx9MQJx5TSDP+koWsAwela6gpT7fb3Ng1xBNe0egdnA7T2QO+U3te0yNbObAFOhLTZ8/NEoafK7KMsIZw+DEdiuzgUEvvMwvGNNXi+xGHx5MDYpzSXShiNObRj299yT+3jlgofEiPh+A6b/Hwy9KNcRlCoQuKIqsUa1HBAQzCWcYNYMuyLxd5gfgFhNIHq6ZbMkj0/N9PXoFtUtumBo7YFIwUE5Hq2v/XM3A/DBS0+1soAv+e+cYBKqwcWzZd8eaV5yYjlEyBQ6Zgq0H9OIMYwJAILMIdFwxxrx2yLAFGoGBAQyZnm/rKTx2HSyhsYB8vXjOEwggwgE2JcRafssBniHr/UwBjIfmGO9C6qD4ayZAaCTxl3XmNeIAh84KKgvYFzTrF/B6tnjJFNIBiDjm4b46NHhuGsDAQa0NhLLiTI2BIGuU8t+lrx020YebALu6nuTKaX9/ePa/dG6eV6Nv2Mn6aHgxIHCWaabx3KFCAVF/nx3igEj9eCgUwAeIDnMcgnH19wGuEJmrFuuY1w7xN9NqxbOZsAwR0odOJshMANBDp71Owu/U7zHqflYvt9JYQeYcTJoa5GhtV1ili5kGcNNADRC6X6AwAhT1VOcGOAPwmoU9J3OcY+xoQv0YCDgAeBqABF4iyrCpB6Q2rqf2ZsFSz2y6FIbibWgDhEIrJDELaPVWn0XSQTvvrznQg2ZnsIg8F3QV1pBTI/k8gUMFwFE6mjhYiECu1nGu14gX2OfMD4XgsPT4qdlFdnMD8jcXJcVxJCzrNAFUHRCq8O6QO14v8b7HQpmMdqbndzAB2ACEC+wWDYrqlni+ho0Qp8AG/zwkreWJ1QkDW4qdfbWpMi4kJ7kR6kfMAYD385H8drAWptNrzYGg5n8Bgl63khsBn0ueEbMqifYXVnBPnl3w4YrpzvWb8IJAQO8eBS1rjqQK0rkXAu4dwgHCBfSYZoOYto4FShYtgqUDuXAPmxwU3zq07xxyEADmLjThmM9zkFpFdS0/O4dYTnKM2euBnE6aHfguLdClS4csA8QCLL6PBJR7GSC2tWGATW/UibMvs4tv2/RvBgDchlwF00yT5fsk7qCMnC8I6qNJCEbaWYnnq+YNCH9Lq7x0PKdQJ+t0xrRpKCCf98zwgkCgh50OnlulfY8hPlTvzzaoHeQQiKYPwY/lOHAEQAhEDngrB5vwp/mowDFNgesYELmid/3QAgrImIg/REOLrzK53s9XppDPuKCNMCpp1CEZA2k5f5ZrwiCebxiapHNez5SSm3IpDEjhOMRY3gYANoJUhP0OAEhWSGU+HCwua+WWajV+kDhQhb6BEgjEAjWoDwQjDGFP4ec3UxHnap2C8/CM8IJA4KyySbiVAMD3nQ4R9U4K17SVYNuBDlm1/4yfNr13Su8x0VY+OmB+ALkKrtGrSIPYwwWKIWNqnaHT234jLARUWMhTw4eQF5OgetcVCB/CDnR28lDzct5C90SlXGt0N5b6ktSwInNGhQ+bndn1WwA4vXYFgcoCHhF2LqnflXE+/HkGULuyiD0ye6qz2wSAumaFm5vUzVveAijfVxDYN0HV7EXoOwAUhoAKuFYxSrRzFfhZgQkG00w4/O05bzjKizfY0Ln4SL0mu8wU1hEoPeArFqlNJtrUwNKO9IKOCYzXgYTgdJMg6XYmo0lN7kl+Cy7AboTcF5tNmUILGxAg8zc3k3nttGgVJk/wOQDg25avYEWZ6xvvdy0ldBBZmQuqKYCedycDlfUuTmdXNrS/R4J8xsOojBcEAmsgUlkKy97PFQB4NAGgfidJ1bpJ0E0B9eOG5KVXEBPQMTBEIGPEU6OhQ6vBrlHTgtSgVF7PdPOrNUZTKX/YtPHIygJ8GGU7cnizs98RdRPmE3tgLVe/DZLvH4AprQsOFgFmAesa9gwUKgAkENSin5V1F7/eLyHUFaiKKSB0dZfT+IagkrbXWMWqLFTTenmk6o9xU+PB8EJAoApujbe9ru15SInspNSVdqVpf6egMOE/1DU/lg5TWcF05kUbBr1OygZjAjuXYHSEIRAZUBEMlWAOy3DOVA1rlO3M0645rVxAjDf757MDCIBV+G8J/h0NcpcBbE9S+7Lw23Fk5wCiEl2ImvDuAADt/C1mcFs69qXbQ4CgLxDGQr81CxgUIp2axuzmu9EnZgHY9BErm6V/7pFZwwsBgV1gDdlBws0BZgK4LTyhmRUH5tTivWNQIEqmwCGZSCJAMoFhYIBhbWMgtOsIYwKRyPQHPDzFk3kqAJ/ookypw7udrEBoUpL7DXJWmnnjgXAaOs0vLbCRi4wSLN3tBkq42PjbibV4FmF+2DQBsBHkxhBuaP+874h7QXXn5bSqW8wCz3cvFItxHC+CT2BFXCGYgI8+LP6tZJnJAtrq1rMQ2eeJBby3PoFbCBzbwgaYCcAhlH5wmUeo0Nm75inJznGIbh2DcS6ut7TgZop5+nXOMhQFcPRO2bSB2jCkzRuI2XLIzsddbnYWihdUIe8v9ACoHLv37KyDe/Wf4SYS3Ax7LYpSl3mlxEXdJNiBwJn2r8zgKG2wzWCJfrBg5foJbAsIUD7tgPoRauMiFXz4BEj4+1eOoN3wzSSj/h4Md0FARH4VgP8UwGfY9T+gqv+qiHwcwPcD+BiAvwTgn1PVfyginwHgjwD4JwD8HQD/jKr+5MM5KqELdhhLUXcuQmEo7Hqd7/JyX4sJcMwXfRZWUDtdPEKRWnBiwKT7LohXRCdw7X8YAxh+fCQIbLseRVYAyEkloe3VTYL8Ll9lAeQTUEq795V7MkD06nmksz0rJiTJeg4MAgm+yQ5q3KL5b5oGXO707Jdof8fjPPPbwiUAeP5RBF8aCIjSM6LzNlOgsQBnDWs3l6wzkWfh2COfIftvAfxWVf1NAH4zgN8ucxXhPwjgD6nqlwD4RQDfYtd/C4BftPg/ZNe9QdDtUcHNAATab/go/Ota+Whb4e8VbH4tR/wCUS4qcs2JQ2WZ8vyqcQ4ttgbnDsBFRNMSvO/UMa43WCRqmfW1qcNSUzVq1SXPo5lnt4YQT2nZ1zW1DVobVQdfp/39RyygbWvbngPHs38hjP1bF8z1+JfN4YS2mASjjwwgOwZXriQYnALgJtwFAZ3h/2uHv8J+CuC3AvgBi/8+AP+07X+DHcPOf508ZKRUjc8afj2X224alPSonrPxTzpd63zxVeP4qCkKjc7hQfsOgQHA9TpnDY6rCf2VAMK9vYPsO/qWXc4KQwEHlC0S7EjonR56rXkevSp2imEPszWcNdwu/pFWzpuzLVgo+YMxsQisr/78RsLqvoDNvdgI6hlIbNKsTPL2PUBPF4QBxDC9L2vrF95/GPQbYt+rm7Pw6MdHLpiU/0sA/JsA/gaAv6uqT3aJf2oMoM+QqeqTiPwSpsnwt1ua8Rmyz/7YxzZPXTslA4JXVAUFuk9Ar2naARG/GAHow4Sbxh3idl/NW6C2A46mlruKQMZ8Q3FuDxxjzO0xG/ewRq6pt3K7HRBlMv9Fof86Oxifg72AQ3EwE1TYT3DLNKAs1Ha4ZzPUqwvfEHoLMYYx2SxgfZn03+8NYfIrbnb6w+6tguDyUHtEL+cjdlGW0fMnDeD2x1663qNWBeCT0TpbDNDXTM3r5SGdS+EhEFDVK4DfLCKfCeBPAPiNz3rKPs34DNk//vGP69ZLvtD9FDwXvqIp88bYCxkKb7C44TeddLKfLSjOAlTttWAt+VLajw053UTsy0UyIOPAkPmOwXEMDJ2AoD3dJYh1RV7MM3ZSdgUVCJxuh6zP69NLcCLzLSvbrOnmwuV6Zm5Y5GkCwQSjua3HLjQhKG5jbwCA66r/CDMKCISjVVyQJKvV65OKeV+mZgEni0ATRCn3d+Bxs877cB8VGBuTchklKHUt4Fmlj4RnjQ6o6t8VkR8C8DUAPlNEPjA2wJ8a88+Q/bSIfADg12I6CJ8dmOCnriczwMGAL7IQ/Y5JAGu/0A4IVgAHhMPfImwdTavRURHZK37ODJwvEwlExpxDcIj5CQ47trcNTUvzNFoW8ew0TWeV5XuoE3sRFfDXU0GX7rR/75irjOtywrUS2i87ZmtIQoIG0bQrmZFgBvOPtjvE619KzlHGHlsmsl69OE0T0wIo87GPUKPO5Gp+YQIZ+VZXSiTg1+k/ul6v8zeutD9iy2bl6MfKDkSqjwfCI58h+3XGACAi/wiA3wbgxwH8EIDfbZd9M+pnyL7Z9n83gP9kr+YzFIfYxgFyzgQojRMgCFXgEaQVdv6AxbY70v7jTlE9uPbFYk2HYGk0O3+l/dzWGWHsLEwbEGXf62HdT5vR68KrMWREN91Dy2Z7onhfqK5VN2lGnGtlP1+FKuMEXYh8y6Ab+U/8Jx9KalXuT0MRP6/H0dOg5+rmlx1nKQKd8/4hJV1vDxfca+kj1/xdr3ja/Z6e4ne9PuF6vWL4jxjCspjuis+n4REm8HkAvs/8AgeAP66qf0pEfgzA94vI/xbAf475vULY9v8qIp8A8N8A+MYHnrFksmjXbafXAIjqESei7NoPSAF2NXLDljwmPYB/38x9AsE+KOw+TAqxBUtEosF9hmCCwoFxzJmGaozEJjTCV+ypimjV/skCJotg1sOswJkAuuK069x0WJqkaxSvZ1DdU4Pt+tp6LMsZFsIex1MeMgWfacnMrj3HZXbRP3yDlM0mG5GXVdvvrytoi00prU5VNZzI1+vA05W0//WKp+vTBIeneTz8m5e0sG0oisF+gm4a3w+PfIbsRwF8+Sb+bwL4Jzfx/z8A/5Nn5QLYZjy7XNFD2FhCRAW8m1ADq+sckoobIBAe4EPDWQggZ+W5WcA2HI3nTlPDXigyABhuAjQqJyaB4m/UgWg72fG7d/+yfCjmQAi7YxabAl5xDACc0K4tCsXXeqzZHgujuNEXb4OB58euU47JMvnzN0mhCDjHbyui3iabeqYm2Z9raNv7ZdaR9WPVYIyTDczt07iGxp/AYNp/JNU/Y87BnjfK6lZ4MTMGt3neUGCc7VtI8pmtxXZ9mgjO4FaToIwS2AxAHQYcLgBR8Qh7zCk+IDZfQJpJ0MBA8+3D2RcbGIBYTeu4of1ZTzUgYMzzOBagE4u3N0E+tygz8o94x+zXoO7v2lhF9ieW0GbqB0sRviQwoVwtgM+s3PeP9TlF8weJbBOI6MDVDANilJsEf24R/eU6rni6kj/giUyDJ2cDT6b5URWQPUS5T+J5TkHgpYBAUeke5QVE3XJlxo0N+WTd9UZNxbe+rroCwVyvMMeVE+HzyZoTgRwEBDjOAKD9JByDYVJSjrXkflX5fKmBA0m/akuKQaEly4uFRgtQnYZpEHVvkdEBs17W/aqqt2CwPX4EprBve62nxcaLy/hINxEhZH4ksC6J77LllUf1noKqse+0fagxgKvNLxmm+Yf7A57SLLg+xUdt9u2T+xu37N3wMkAAjpYtLsUstlnBiD7pHdSI9fIueA4zxdHc7jR/A4JDjhiPR2hsy104n3JlIQeB8AOMARmKIxYirT/xNxSRRoybBEexD4DSCdU7dr5CzBof8HkC9daFVPC9pT0QFbwhA40JuHbKVqNbKL0qPZon1vgT3p0wwsCY5zqGrqs6ExpaHbrJUUdm0BEg2v6MDXA9Rq/tlB1G69kUuF6Lb+CJ/ALuEByLiXZ68OzwckBgF9OYwMIKdLl8M6pDACBMCWXDBKrmP4752m8AARmFQc0CCNzbP9cWGkcdCbiOMScLtaGdXIxE5qfL6Dds9yg9bWUBQk4ybfIR/fkWGHSB077rYFCZQDhk+S+D9Jrcpp1XOr9mZTUZmHEs5fLdqBs+3n/7ITlUX3DN0SIfsl2XIUCH+alT/z6El+tRXsfAEw8POgDQiMDT01NOKqN88PEpMD0QXgwIbDWC003lzte2SQXsJhSFmXM2DACErWwGAKfkuZ7AaExhp8ncKcgTOwCBXAfkqILv7MBnCoY5gHyV3p/CY7ejHJNU88qYZ9rfUKEMn3d1Rlqwavvc9nM5IlOFv9221Neyz/R5d30fy9+BSweDzgqw0eB0/TqBMhc89aXOSvrt/pqNmYp3y9GBwN4fiWHC68C4Tofg0yAGEGAwj72OXBHxVpBLsnl5nhNeBAgklTw55/s7JhAYUNep5xA02zR/VFQfDRCf9dVYwSHzVWFucDIFkgnMhgUkJ4EUIFhngU3QOeZbiEcK+xT8JgDlqKl0pvRtX/g24DHp6IIfAKAbAGhAcMICdpFn/VV3ZWeAAtnEG3MmgoFMrS2vlJR+sWcSOYi/cdyp/25fmQmAHMf0IlBhhNfCBNwX4Pv/3dM89qnW2Yclj0Xg34WcTKb21XvhRYAAkB2pxTY2QOwgriiX7w22aEGrnBiUB1UkMNcPyNeC8130agpwnp3exUtE1wkCnQHEsfqxjQ6oYs4yPCBGBSYYCOz7RQDm0mTBBqLzExsoLECTArHg3AIE1sZF8L2+W71vAaC1i/oD2E/Q1+NpoZkERQG0/QIOpAHLfitihZbK63PRldlXgvbHRAruT1j37dnFMch+I+0mIs0ItBGCp2IOXMMcUNV8ua35r2LGq0NXq8N74cWAwC4sGj90EQgYUjPNNrXGIvgOubedaNioSDUAGGkC2L8DvvR4mgyeh4n0tYGvNhU4qH8AgeLgTmA+BAkwmt8uHACOAYxDzUQw4dcqSLFHWlD51MIEZoV0Wnmz/kt9U717/A4ACHR2+eW+ebYfcdq2Pf1+U2FAhTZstPnC4zsmlAh1Bum3+NyRklSCsvePgWkS1JmiOau0TA2+9pmDT+EcVCiOIZDjKB/fncvfzT574Ig6eC+ZQA/7F4oYECKqXJDr71Fcter2QTR/N1DUBZ+dgvEJMgMD8eFBbXO8mR1cB47D1hmU+eoyfM7AMd8vgNgLRzgMGPo8b2MLAtQFP73DuuAHCXY7KMRbCmBS3TZ169d3Ady3Q41Yzt2i7wuK7Z6jy/MLszE07JMrPf8Sv9sSI9Sf3IRMcpMA6XfX6eA0dfzap5Hv3g24xpDhnByUIwbXcbU8WBuawE8zR8Ph6ebAcwAAeMEgUEflN8GhNo6Z3q0TJoZqUCcVAN5gmo4attV0kBOHfhwXHyAtnyHT3F8a3ljGOOZ6g9dZxkPs46VyQI4ptMcxvdiKucTZBAZiL8ZSHAhE/PNrXn9sBnBtSkCAL4+W7Cb4TanmXdXvWgzWIadkNE56S/DJmelC5s0Zox0bEzCYga+56JHLgdH7RRkMixvbYeKFGch5zcwuNZ18V2vvMa4GALZvJsAoowB1iJB9R6vTww0VclQvfi0/93h4sSBQkLdG17jCDPoN1JOMN494466uCFQEloT77JzqCgYx/KOSJoKmH0DGmKMGcg3zYgrOAdX5LsFc3FShOtcdgE4AcLBA6aRJCwGUEYDUeY0lI+WDTRs2GnibWk9JC3q1uib1G1rruK2yPNwfJ+2YMldasYmeO984s9K3kwJUNuCs0P5p1qP7AtIjcGz6Eu8VDWQjAK7FU9vH/uCXflzwfRjQr+UXyFDLiLBn47cTfrGVsZ4TXgwIrFM6skVvskekjVobX6lDmZZSwGfoKVH1rvGrBm+gsAi/TWKCrThMTGCERpgNNKcRC64D9lHUmbtD5gIj/s0DVZ1AgMPYgKa9d/SuOtM8DnH3ItxDDLDcNTYgzAy8njTrU9zXcoOPbdGlo7cQkLBRnadvjQyd//W2t5L5EEBhApySMwHbd81P/wIgFPDl6eft6R1x/8eM1syFJhMY5vBzoU8A4HgaCXgajQnwK8G1um+963K40zAmnDwWXgwI9MCdd4ZzGIjT0aEqcChyTr6qL9fNAr5fvCFNhdlo+crmGSg4AMwOcRgruI4BOa64mgkw6flEgQtgy4/PLxqrHgYALozkBTgUOqyxTegdCFSPqIaB+RLkVkZjP82COzVbElA+sP1IpQMBJ87zF/YZqvuNRLjA68k+ILayEkxojWmpxler2SSQuLeygvniqIGsvwykLuju/PTZf/6sCgLXbgbs9q/XZhqw4iF/k9VnQnoYBOVfLsv2HjOBBbh0ZQErI1gpmeSteQn5Cbwv8qSNrvlV06m38wtEQ8WbgwQA8KXDRjgJfZUhkSuuIoCzgCtmJzpM+GnEI34MCJhDlj6OOYHAQELHBANjDEoCxPJXZSz/BltqwNC3eV1rt61pgMoKNixge67YLKlxo51BWthBX/KWTItYIEFfgoGxI2IFM8tpV8eznPFpPyb6rtWpN0LY274DALECB44AgNGZQDUB4GDFv4NGDJ4RXg4IrCjQhP4EArRGT63f2aC2F2Q0tP/WF9DNhMYMqjPQOwEtCy2TAYiaw0mvmNMHxOwAANfJAqCKAxdcwMLO+zUch9owgQC2XJkaE1AAKgOixxyvBpfZaDM50Vz4XUcyc7rJD07tMwKCaCCpmej30/4KOBm8VviK4isowp+bWE+RSlno/7wo4qCw+RbJHF3w84OgWn8w9tc0/RjpI3BwKOfJZOwvoY0AG0TBXME7F0j/0FF/Rzeub4cXAQI7C2aWPQ3G1cxrkh6IUDsH94gkjlo1vQPAGSCQr0AbEBRTAMPMgGMOG8poaH1t5VRATfz1wEUvRcvkVSaUgjmT6aCtCoADUJtjqBUI+8zi+nUilhsXYKUqa6LHFN3a5pQR9EYrXkitbG2zH32fvX+aXEXpeGM7IIZCC/bsgMB9CqRhbTGZ8B11oR9jEz9i6DecgrHPjsIEguhPI+9PFqC1SOwUpBEip/+8Ktbx/jKBGsKNZfbcDb0EprFZcwkIROzmnjfgbohwc6yO1HptIwTJABIIrEF12pnzu4NSUK548VUnD1Du1F0rWx8YAI4jjH5RQIdADxgTGBh6YH4E5QgMdROpmwJMlLMm0Z6PKqSlkRKk67UNCDLhev6WrSdui3NYmYCW+M2lfL3lN14iCp3qUNCYAHKtCF44Jm32un+1OQGLX4DNADcTxrV9WKRv09cAIPPqZssyKmAAcLzHPoGdS6D7lnaIr3wBaQ33bNtR1SjQhernMY0aNCaQjEHBpkEFgoGBA2JAEO8cDKdwmWeFTu1vW4tB7+xLBR1TWw0HBR02KGLzDdRHFTKV5SWZOO7uQdqSbV40fhHWCgSZV6HrdgJv0Nx9BqTY826DRqfnEde05RI08wLAbQZnAnM+vq90PNlAMoF5X6z337autX1OSfSPK2v+E0Cw88H4iFHAlEhhWWQOBACA2QCbBAI5PiImIHONwR8B8DOq+jvlrX+GbGcQnBqfdJmSUMEa2itT+USJOweBBgBhHlw3/oCRmgKDGtIBYEDmhkrlDp8p8M4EEhQ8r7Z1Ol/JxOwMhwmzHvYewpxwlA6rlLeoI2Ll3TfQecCOf+luX1jI6Jy0nTsmwLJ/88lrHs9uq+VwBpRbwEYP3ATgyUs0xTv7CY8maekrbgZ0ge+AMEHA87ZRUkth0mhOwScGIDxC8NH5BL4Nc5Xhf8yO/TNk3y8i/yfMz499D+gzZCLyjXbdP3M39ZZr/+ZennyA8gWSZsVq2/r5IuTdIdgZgNb4OrxIQGA/wD846gCweAKgmPMCggE4GMzCl3oRkEmoMEo77f+hDgQTDIJKCrn3mAVoldl15KDygth/AI/b7CMwiKVzomv+DhAbmIk0Qv8TG+B7fIr0kkKJKSMFzgYsj9Jsp84OeUbptTmWC+VnBsAjBnZexxxJkmgULybrfy+/RAxoklhlATxX4Fh16o3w6BeIvgDA/xjA/w7A/1zm+MlvBfA/tUu+D8D/BhMEvsH2gfkZsv+jiIjuXxO8/VxsOuBJKp66mpB04Z/CuTpyikOwOwOjga8VAMh+K0Dg3dI6T4D8yA6rwJwUVLzLIf61kNGQjgS+mS8aSdi4w/wP863EMmxFwr/u83p6zQ9g5YhYkl3IHWErgk9l8cbcmAB+ShWlAztzSt3vBUjt6cxqzXsNLF7JAir999ECvzj6wDL/fx8fTGARfGYI81q3VALkQYyP5V6xTGxaAMFHBY7DZpA+jgKPMoF/A8C/AuDX2PHH8BY/Q/ZZn/0xLATGvde8VrbSdulMZuMBOVU0EuudhX5K4qmkWcinkHnq+4zG3phpv/UQwAHg0DQTgtYh2rVSOkGyA2czmSigbgO2suGAdxaxIUXYS0hDAWDMacpYO80qWHSOKmN3RYKF/XFhbQLPbdf5r4KaYAFNBXvn45nRlkkq9qBgGj9sLVD/k3LML3zdehmsgsK1XdPy7/kutF2Wtufj4zhwXA4clwsux4HLkdvjOPInc/uc8MinyX8ngF9Q1b8kIl/7rNRvBKXPkP33v/ALdZUZoo/ekYQAwXXYIdNbDtNs4qZEppLTYcgXTi+MHMhVhI4BDDnmO/3+Xq8eGGKfFj80NPnlkh3yoheoq/MNRct3BbqwWacYY+ZhjIIgLOx6KPQy/QjHoXOSkXWOOdNwdgC140MPHGNqhsM84qrz+4uHzrkG8xXZo0xMZDpfRFO3u1GmAghN6LwMcUyOr84R2ZWT/g2vp1yqK9bc9/qx+vQ00nF4YmMHBU81XI6R5oCSQI++H0PIJtyEgmmz5yugAvvIqgs8Cz8fw+8FjuPA5WK/44LLZba775f4j8Ax+FsA/C4R+XoAvwrTJ/DdeKufIZPQoB6URDcF344PQIZ/6cWGyAboY5x8J/NLCexwD+shc/rtBIP5zcAJBoJhW3UhMpub6fxFFbjMpySl744b+zIucT3XtGIMxtcbnC6E1BAxb+Byyee6sJvAHwYAExAu8HcPjmOuUXDogQuDQExNPuabh4evYvQ4haxtxTtS45DCCLdylcG48zVvNSu3ogFACn8IHvg61HsDRBjSqgZOVoeyr0CCTkziyQVkfNiQR4xiaM+Sij52HPEa+FRWSgI/BZ0/v34QKDgIBBDYdh5PRnC0+OeERz4+8p0AvhMAjAn8L1T1nxWRfx/zM2Pfj/1nyP6fePAzZFYz9TBsM6f+wBTvqc+VQQGAHNbBLc7NAh/vVbSOx+OthswDw8ZY7SvCyHf4UxsdU/DZ7p440IqT9lqCnD+z1G/8hifmw0f0O1RT8LvGD0AwoKBrjmPg4mBxKC6wF5S8S9oQI8ZcTu20QU7jqCyncQ4M6xXBBogdMJXnGXvxzT3SwMun3Vud1l/qAPrT2sTBwM0BYmqdjfCkIaL8XEwRzPf/B6D2STu1l7NyYRD37KNN+jFQODAF/TiKwCcwSB5fjk/qZKFvx1v6DNlWB7EPgEQ7NHuYc84IfB2eyQicUuY4eLG+bOkusWW7ZC4pZuaF+NeDh72zLzKH3o6qmbrgR3AA86dFh6tkM6nr7NR5/9TO3KHHGBUEiglwgV4MLBpDuBh7uahCLwroQS4E8h3Y3ANulKItrWDSwPcWuhdTxu8JFuAUXbBLhQFghDYfBQAGaWQG0kVotfoPOP+Vga4m2wIiG/veQQDUNyzxbPHDnbaAO6x4ubAAAwYFBoSDBV0KCJRzh3wk5gBXyJ8H8Odt/+19hkx6YwCJ2Uznc18Oa5BC/eVc+6PGzTX8EObAYVNxBwgAjgNjLvAzvwOgarPzkgHMzlrt37VLy8l+7TQ65uvIsCGr+VaczKnIMr9izPa+HpdJ+y+KrU/A9icQzPcTEM8z4Y88DWIr83SutKvRoUOOTkiBxp9d+RMAWPhT80upTD/LLCCHbNukrqahc3HPdYrvTtg7GHCXYzbimj6dlChxRG0iLbfvXfgdB1zI/aWfw14AYgA4jD24cPv24sdS43PJsX377MKLnTEIt5tCyLvAa06dVVdq6RPYaf8al7QvniGS6/rZp8MPm6arh2lRRTKC3tvd7lU6U9lhCJH6gcyhvfnc+dahv/o6V0GSuVyZjLnakAn/dAzqxidADMH9AxfzXeCSWpnpr9h7B+a2YMtZXfuT8M/x9WiIm2wgykplX+sh54Q4PDlOLZR+kDnA4/YODCz4m9l9NUiWJ6Kqc9D9GWpIFfmKeK3Ap5FyMTMmoT0SXwTmyXehz/3j2MQfktfbvr81mGCQoPGc8HJAYMcEitaRei51VGxX4e+MQNu5KYTJCswv4MeH2Hs5LnxzaM8dUZybSFNs+I2umxpUw7pR7jXWiQam5meHor8T78eTnag5/VaNnwyB/AOXAz4zMTQr0jgRkVzlGEfOnKMyZc+WWoVF+hvDaa1pPHg+39/TPzEFPJddA7umr690X5ELtzaPPR0zO/CyhLCf+AO4IMWSYAXg3dSQNbS+p0jP4RGiNAOqFk9BPzYCToyhsAU3GQwYqi13N7wgEFhicpfU6uyHKwD4PWEW0JYFPxNMXwDmuMB0ArpJYHCAQyHD/GeqNqyWS091xgEBZLCDCsUWjX3SJnZimgGadnd0IPFxYwmbfxkWbP6BoYrLZZoBUBTP5QcQXBMFZt0PwSEjRlhY2VfaHFkt8TfZgFN9EvwAgMYCYqtY6jA//KrLGhDXUU2E9ZwxBigJKWlqVtFRVtkUTEqBmfG5ORVtZyxzt58AQJT+MkHgUoDhIABI4b/lP3hvmcAu3ASGdt3sKOYQHF3oA4uDFQwAUKFpBmYKHIctDT6BQIE5F8GG1YCN2Rf5mJ1ARasnma6JFDQ3rmbSXmVYy4oQILT+agJU/4ADwxR+MwV6rViHuU4UMKRDjGQWyJTs/LO+pSNwr421zAga1M4ZSBC19noJIPBRAJ7Lr322XrKDffw0C1jYq+Z3Aa75T91OLJLYA1/h9Rpsy5icz+uP7eLQc+9/Tgwqzj4S/jKaECMIPKx41ib78CJAwLVcDVNLNCVbtL8AbWaqMYBDzJObQp8mwUxw+gFpH3X/sNdzh6oN70x+gHiclJ/Qfoz5q85RCOu8ilFmMnoHzzS93FiEBVZHxxjYDwsqhjsBCRh2le0KS661U40xP7jCWckeZTUemhMbANg5RelclLWaBWchmUCaAQGsy6y9awi8z9i79vf5HQSC9sefqF8qnRVX6HoeVp4XOFtTp/i2KGykFUJqs/mEBZ6H+fZDgH5chX6my0LPE42QOX4ovAgQiI7Jgef+bstTsdnpf55zzrphBaZu5sjADO4HmPuzIc0gKBM9KIOZk0KtxSb9yJwXfPg3B2HvsaNquDAP2Jm4MR88b+afWIYFmR3YfrKAWm1OS4fYuodiMyblgBxzZaLoWIU+536OJt7qbMQGGgBwtqr2r8LfnYLLq90jP+DhL+dcYwEP+rqPzdmvzlwhXDszEZjGb+Lo+DAmxff7dS78Ps03hPxyWYSfJwL5OV8tyNvFs9lnHAI7Bn07vAwQwKYrGUorUD81HgpJiV1ajQzmCJIr8PATYqEC1vxzz+m/v/h32Bh7jhPaDfNhLbMCQa4kNL1tiHvVVxlSXcoaswLbftmaEE0GYFOWtfoHqrPwsrKJ0P7XZAHXOV35KpP9iE0aYs4DaGmPcuocpZeg2WDEBjzd1UzwUR92Dp5+0KV/zIN/FMeOQYA0pvARAUG8msusiRbzwATPWbQcQoRizr0IABDS+peY+89TfxME1nNFy8d2F/fegsDqzPDGCsbmpIDkXNVpGjMGv5jZANmjhR3MkEBgwCCYk+jssiNfJJjDhfadwmMBBLuRygVp5+M3opwh+ApbZMPHoM/p8uI1N/hymp4jENnZidjOuI6PQyHH/DTa/BCKf54NEPsOwqBvNN4Oa941MkYjFQEKGsf+qq42jR+jAdcRb+rdEvhrYwYTBLzcU4CcxhcsM9bG+MCUG7Qfmr4P4TVb//AXftjW9/2LoJoHe6cgtxmXYY1/HhC8EBBYaatEfJ5xeY5js/R9f0lA24HR24G1hg4A6lxAFbBvAahYx8dctsuFYr4GiAlA/KVQX+/bBD0p4bx/yJxH7h8jiRloSC3CFBhxbsZXB9GRNWUgMsYEA58QJCAau6l2H/fGUMhxwXHYxKTjwGGsQB0YlEChIIjmLsUR3ISQe17hbacEAH7cXtUd/F5+CPcTbccCAvyhD9+f7/AD9KXRolUr9edfOvRiKG5z7lKE/VKofxH+Y0717eP/ZaqwYNditZrbMdf3DVfLEl4ICNwKe4BQ2vdutJoU/cp5lcvvrpaTFeTj51xsNwnMi+4mgn8kUMTYgswVheIjIT72r+EgEnPADVHoUae9ujCjxxlIuFAfbo/CMAvOXgZUZ+eaRbzer1Z134Jah5yM4BB7W9EYgo2QBs3lFgjtGYkbLFdqUjR+HGuCeTKBBgCjavz8em+u4Rdf/OnHkdYwNin5MRcSoQMOsPQGKB0fvD3asbg2vxDlJ1AgB2Bx+h0JAGHbU/dNjb5+RE32vT7q/9HwYkBgFWDrN4L4mGT+zeu7iK/prZABwXT92QtHoz38QMqI+wgmE9BpAgyTOkeTw7TuwKTQmFN9p6ZR+/aAaVGdzGIcuXDldlbczjFm0j7pae0S7EPwOeo6mv+BTHxnHPDnDQMBoq9qAOCTpKCI2ZI8hyFrmtWX5F9vy2BujQF0gGje//IFH2YET0+4Xp9iYY+6vPdo+9ecMWjvgSDMSab7B83GO/KYGACDQNkvwn0hYb+0OLGhQP6BwEC4WpFC3Xs/neNLHHgfDC8GBNagi6DPuMW/G9d0+plAnx1SS0UmM1BMljgoDZeWw6FAMOcTik8n1lihQw6bDzDUXhYx00ONAYypWYdObTtUMVzICAiGDfclGFSQ2NUPgDynWUbllVpZ819I+CjtS4DAxQBhbi9hohgYeGf1egpsJZodeXBqqhsg6L6Bea74Adz+9+G/YhJccX3ij3vkvIHCAugH2KxLVZsdCTLZaL5+TORJNtAFnr3+yQQ6ABA4yBoXcwCksQFiBatOp94+UX971aPhBYNAhlXLk33p5j5dE9oHziJaaluUhVH/hBpmAoev6mkPG5ia05nAGGoUfEDUhX+yg7n8d3Y8PRTHqALvr8XmWgVjmguxbiAtZBpClVoVmMtjF+p95h2yS4rnfSjGRY3OqnVk0/wXGBAgfAjpHDQoEFoi3mHIRkO8llPwHYSsHC1+XJsj0JkAA4CxgOv1KZf6brMDc3nvjBOIvWWa9ej9wpkA2/c5f78BwrFnATnjjwR+J/ySLCDAJ9gI+XBMwHNZdG9DRaULbw4ELwIE9mh3Ru9BE4S0dLIq/LK/mRPePlQIOeaD5or+OhkAYD4B67RGvcSAYL5noAUIpgb1z5OZA49WKIoFKQ7aD+DwCTJHTJaBgQJANLs7E9XfCkRlAoqi/acpcEAvE5j0QgBwsRWTFP7uEXyCwOEa3+oqvNewuRB2TqkeI5+UqZ2TsH7M1dnAOhLAn/U+mybcf5OVGRAQgLrg5Rt7lyLYLLyyxK9AUN/06yDBpoW0NQQYCLyLLlyfOvKmEz8TD14ECJyFsw9mrt8jqHMC/ZygyTRQzQNT5M5YXevnhXnjHA08bB0CtY+Dqq84YvtMr902N/3GAKBKLyIZi/B35Y+BY9hMxWNOX3ZWICo5dDZtDxKc1OguROEXpfcGZt4u4TOYE44uYQ64g9ABIIUfKKuhTsdE+AV83oaYgS3Q+ioyXNSTTiQDsLMWXxyDNNtvjA4ET9Mx6C8RbQU/VwVWW5JdbOTHMaCaAzmzb12/rw39kXCLnb/0a3mOgDBg8MiCFEDwUew9kXNY3RjCzA4WBnweXhAInOXaXgOid7G9Xy3OQXNAFd8Un7SLp4aaEGMyHOBwuEAFMEx4SfpPtNUEFlBzNClkqM1ZsnRYQO1ZB8eFb2A6Cw/e6oHDv2soCh2CgQkA/s6DyjWmJsTyW1df+tzLr8kALpd89nEhRjDZgA7A5xnNtROAy/x0Mlw3iS16MrU9NmCwHq+mQAJDj2dvfl+qex4/FSBY1vzzKdsU79+VEIEtEAMyCSyvLozkHD2b479lCUX472/XocgH/QD9uJgG6yjCvfBiQGDL2LszIER3byrMapndKQHBajXQgZ0FZ5UlVLHzST506MJ3WKwLv9r7w3poPG7KXwq/MwLEcTIBPawTi33SXI74mMgcUpz7kDnkN6DzM+dOGDXTcuFxFqAK6GVeMxcXMu1/mM9hzPcOxnD737S1a34V4HK1mhHqZO7FTjvWP+2tYQ74NipkK/jBE1S38wRyWnBzCj7lp+F4paECDLQGoAhwHCPaIVs8bX3f+vTd3Vx/Nhe69k8HI00gKqMKPqyIAAAg92NuRwCC8xXTTjcs3TcJLwYEemmEGmnlCPmysH+zzpNwAAgUsfM5K8yE/wRY03rln4QZMNypZ8fRkW2IgYfy/P5QxLHPQICkv0d2XpH5YdP4qKnt+41q/oYAQJNbF6DrdUTk/L6JU38E7dfjmL6Gi+IyDgMK/lkhLmnvX3EFkJ/uCo3PpoAtBqO2H03Jgs/CHyAwK2idElwBYMQkoadkAg6mxAICDNzvMgYOX5HK2wkObOwTcCG/rE6+48BxuWxAgYCg2fp9MlDQ/g2Ieh8MxZ6ewOjhW1PgQ4RHPz7ykwB+GcAVwJOqfqWIfDaAPwbgCwH8JIDfo6q/KBPWvhvA1wP4BwB+n6r+5Q+TyU771/PJDkr1MCq4Zl8uIsFXmEc9K9y38XYhUf8+5Zf9AaBHlK3ysa00fJjgD5utJyPj3GNtnm24TW+sIcijwjShhi3tebkoYnUhtaXK/WWjcVxwGWrvHHhFeD5J4xsAzI47MjbMAZ8LYcIP3kcIf2cAYRQQGwg245OEyhd9iQXYpCGfZpwaX8vqv/whUdjIQDEFgDAH0h9Aw338hl9MBtoAg5CDb7NgaFkAJBypXn+ejQTc+d9ZAPfwnSI7UW4PhOcwgf+hqvIHRL4DwA+q6neJyHfY8bcD+B0AvtR+X4X5VaKvupf41hy4c029dnUKzpsYAOpWoFB/s2/3PGNfzAQUbg6Qr4C0mCJZgOeay+LCn8eKMUzw7ece7LQVHQxgADCnH7v+YgaiBgTxskyAlE5npr156IBwMVCYDshkAZFLZRPA8nKdDskKAE5h/bqkt6VuQuBRBL+Aw7jGj4cL6/RgA4ana1mGfFlvMEAgTY2oF6doAJWh+gRS2C/1Fd/L5m3A4uDDFgREMj57iJsDtae79crdk52F4g1E3OBNwocxB74BwNfa/vdhLkD67Rb/R3S28F8Qkc8Ukc9T1Z+9mdpio5H2ADUaNdy8jxU7iZi0zpyJVkDwZ4fzym3bac+6L4FfMpr7M/HpFnSQmPuM11j221sO6loT8XPZbhfa/QwjVCcnx/7Pc+xfR3MOMei+cZ0CPQS4XlMwrteAG1xNO6lemgnANi1Cs4avwAV8YQNY4mNuwLhC1QBB5296QYcVZMAmYswn+Cq+VkHe7seR5T5s8Y3U2BY3v8VSwKyUSfh8PT4OsuF9BnkIPuhakCmQWj/bswp/jbXe06S9v3hXGcNj4VEQUAD/D5mDwv+Wzq8HfS4J9s8B+Fzbj8+QWfBPlBUQEPoM2cc+9rEQ4PJE3vaTK3ddbo4KYUDobB+bfaDWYwGTFH5+/4B1e893teL6u4Emkv456mqUk2puZTa48ZETcSYkMFqeoBGwpPPafIPRGcRkAUOvOOyT5yJzjYEJBoor0g8zQcC/pVeFvQACrJNG4atPYEZleRwI5qIgTxjjaQGAOS/T8mE+ES/rAcSIhvqis/4zMyCo/IWm7hYb3oTZgRn+DGR9ki9GJOvEab33kwomDBIa5kDpO1FV7W9hA4UXWB56/PPCoyDwT6nqz4jIfw/AnxORv8YnVVWlfrniblD6DNnHv+jjCwbYVSidv9txTVMuypMihE8yEKDuTxNAl3hgfbko2Bh0bYIS4R2+KXilMoZ+7j8CCTQqC+6ACQZZRGI4ngcME3pjBib03mkHddphgHK1Tn8lNqKHrZwb5gASAJDAkPZumkwp8Mj9iIexgCf7HPxTZQD2m0BnAsWpyCzTBDdJByfmsc/Su5QtjL5Xre3CLwSyEQ8GWpigMqOTjA9gZpbg/caFPGClmAWdC8xDOlY6T9HPgYSHQEBVf8a2vyAifwLzewM/7zRfRD4PwC/Y5f4ZMg/8ibKzJ2Cr8rcCryc/mNI/RYlF6Nki2INCdlQPc4iwsQHAZhWuj2bNvyUfZ5o/3f2xP7VmBwiNB8aEHVNH3lkzJ0YE7E1DBTDU50Yccx6CXKOj2zhA7DNYHXqga3s+du2UfoLU/mW/jQwA7hR8gjoT6OaAVjbgFRqaH4h3FuLYLqtOPX6dlxgBa25UMHCwjPol8J3lN2BYgKSl69qdAYCYgFgiewbAPfJM3B/XyY98kPRXAzhU9Zdt/38E4F9Dfm7su7B+huwPiMj3YzoEf+m+P+Asz03rKV2Ptq8nJ0iwmXadSqW02yV1dE2HtICFAzlRqKczd0lLb0Ct+EC2v7GwAE8j9QgJB5eNMpKMZAD2VeIxSNNR571GASkPGNBBq9004Q/tJqzVOA9V6Dn9CUzXYAMu/KoV/ITYgJdfLQ/hZ1A7Q1sX/m4KHMd05slBYEaaHaVuuhmmCbg7Yb/xi+YhMAgAzQMEvFC33onMYhk/EB5hAp8L4E+YA+IDAP83Vf0zIvIXAfxxEfkWAH8LwO+x6/805vDgJzCHCP/5x7JyI/c7FqDUgRYAmEE2ByEPG6EXoUpsrIA1fTIAXU2EBVxyyNDBo+Z0I+zKwm7TXSmOTYK0+YmehpbR3C/PE6PIMr+6JJOiqtgyCd7RSyd3h5x9wMRXu7HOmxqfNJ8dJBBUFE/Q84qbseXFoeEMIIFgjpZo/rxUtuPvLpSh2mACYl/xlbmAZ4AB4iWeMAcCYLIeA2yl7k9zjDS/NTgzgEj38HcvGAAQdRlpdiCIsiGUUVF7Cqrrx8MjHyT9mwB+0yb+7wD4uk28Avj9z8/KkhIJt2YrLsp+RyOqjV4pldaK6tq/gwI9c5tmEzKedlxhmYSfyxKUn36h9Xeaf7Rj+wUAZKcl3R/PjZqJSVPzeEDmegjAfE9hfro001dbZcHyOJd2qxo/jxMYgtJKrYusZF3yOcf0qxngQFBMAeh861PUkhd6XyGTVGoxNgMu5BPIt/jmlQcLtIGiA22aBR0opQJAtAdWIOD6AtUTHEzZpJIAt/W9GW9arUrsGeHFzBjs0zgtNhDPLsLCAvhaCzthLXECl9INI/AHWu+hzrtlA7qyAQilU7yBVRNOYNgINP3qaIGDwmj1kHTUOyBMMGShJjAwsDrwG9SmIsPuBXV8dRAYmB8zmebAKvCr8LNPIMremUEDiPp1odHMgVn+NAcqQgslG2iv2QuOQ2i8vy7qwU67vU+gHYOKEkBR62H3q0yDmYPPx2CQmHGx+PbSpzIDBPHPCi8GBNaQTEA1OzqfLgxhExiR49j3u/ZXuqizALqRHY8ZvcmbUBG4sy9gwNq2/rojUAkEtTAESyucVFn2nmdYV4lREE9SZtxcopBfr7rYMw4Ah5kllxxFAMgsWPcL62qCgu2+T/AZNgPwmm8AujmAMcsZmnh5SBEoFtfwCfQhwni3n1gAyNsP8gPEKExlAyh1QiaEAaHPIdj6BQiPWfiDFcC0vbUV96PooorS0o+GFwQCujnUcuzjyE4F8xwJJmuEDQDMClaqOVSNz6DA5y30iUI+V6CyAY38ShP8nD7r19EvhLqNAJDQq67mQHVOZQdU1aUephLRUkwv/qD0RgGkA/NjrPZVo1hDESdanzp+rfwVBBhgrQ0cBFzwdTGVqMyWjnAhIXRsQmTCtCzyWRyEja5HfpVMHhbsLEqABpe9pKUl7ji8zGdAUONhM1tnN3A/SGoafq/ouSbBywGBJeMulF3jIWRIilmwlrzqgdRQEVkEXbZAMOu/Dv9J3zeBLkDgE0oaAPAk2lPB75OGyvku+L0OZqa5c/W6Cf3Bt4mvVOxazd+VZACQaQYch01rnjXAVLlqQwRAsNCfH3vN5Nz/sg4jqjmU9nhKYwE9ZwOxzdeCL80nsAruZrQkylTbwBkYOI0AhJbmUeMASl8yr+kXmL/ZDaQAd+mo1JyFfT0QXg4I7IhM0N92vFxbAYIrwRsPRhujw7EqtPMeJ/4ooTRJa23ZgF3IcucCqq6tYsaca2MW8B0jUIBHBug8z5xDdMSuvbwgVEekJsqEHbURAmcBh8Tz57zbCQKQuc0JL1Xjpy3bgSDrL9rHBCMays7Fa9FKANDqRsQ/LprChvI8IaE6Ii6YAI0MXCT9Amc+ATYDuB+xqZAiyoLuJgWl280Cy/gKngwG3vym9kfvwBneXyawBD3Zb3F0ijWK5G7tkHydtouCYqxP80WGy7P8nPCkodyPvs7mimU6O/A6xh/auFN/P96MGhRWIClMez8EEM7BYCQzleHX+tecRCYQHQcOA4FD8+3Ac8Ff9zsI+Lm+P0Gp/iK+1xFSy7qwVC3Lw5dShwR9zj9PF2bBdLZBfgeh5y2mAlF+88o2kwDLdcUEiDrrjtac+xD9tNLOUC6EtQ+HFwwCGfaFEsSwGFAYQEXw1FRxvKECseeQC0H4W73edU49zZXE6Bip0w9PwuRLVEyreZyYECowpk/+UEAPMQHrAuTan1hDCO+672ZSMT1sL7ZC4JQoRKBkeRSrKR2R4hgpWO6x9vwqWjsUwZ9t1kFAgFi4yLOiHQgAVBMJhV3MB9trUYrQ15PtHHMYUSS+LDX3r4CIjT6IZWLAh0M7KC+zAxfhlmV/02sDYBgBUxdlJbCfY9atINW8EMB7F3XgXp97K7wYEFjzncUCFy56mZrgRy+NjraAgN8CrmMGAn6qec9lCrhvXfyAFHI14T40j6E8T0BSk9l5d7qJH0cnts5+IIFAOHcbwb8BBLlvWjL6DjEoKnpoOKqH3eUa7x8mSAQQeIckweY2CIbiW6Vja0OICy4DAahMm/3ID/EqxVyVSS09o9dmGcSr2CLXCQg+lTreUsyvTxRTpvWp/mZhAQOvi9aWDJSlATbH/rzo/vknK1sT5kmvPRxeMAjAwI56VwOA2bCJjKUTy9pgdhXWd50qIDDWMPCmsFf6715bWmvU7GeNfdG59oDvq/p3CpJRxI86TWggJc30KBA0BlDqmOpgpbp+ca2nQkldwq0DMgvwYWzIZivrMQNBru3g034ZEPz5Wa7SlJEfz6OPnM/fpNUeI/ZmodAohCBHIXLViM4GqnDnsGJ594DrkiVaWn6phc6BgeHDy0k7Jicnb+HdDS8bBMBCjiqVDgAEDEXYCyCQ/WrHoYYKiGgIr/dGv9SUeZjKDAgTFAQHCb13tuFsACT81mASWdDohJNdSHrfo/9shP0eC3Da7Onw8abeo+Pa88JsCiKRwgWzU11LuuCrULqi1AaoIMCmBANB5CaFPoWfygA0X4EQgrT95DU4DpvjcMw6ntOlBTqnHi4OSDY9clq2FuWymgJ+7OCzEXAKqX42O1wMdYnQOObGm4Svn7gfXgQIhGByIHt1KqVEPZhAzfNeaBq3RgeBVSMlwFQgiLTEOxmBggu/XRVOWgYFJDNwwXehVxJ+t3NlAHoZ9hn0gTGmk2pQhyoafdH0tcOubCGKRGyobiE1rs4y9Dy74DMQIMqTTdS6obgAzYboAu+g4GDAwOx/EwPWKcZ5mKKkBAAaGTMBUZ0gcJ0AMGSOGGAM6GFTokFzE6KfJIB2Te8rCeWHYsmxR0C+VvxJkJODpWIdnaXWzb30W3gRIADczrd4gYEAgwQNB4Cq5baAwJom0MDUsXUy9sT6vte9GjC4MxBazQFmDEqCLw4Axgh8O78YNNf7uxwDOqbnOh1vrse8nEZ1H2IE/rZdrV/xGm0gGfXMzytMCcZucnBTWDAXAKggzkKRQJAO2137l3S0HfNVLuSmGDSOEccBJMcFw96AHGMCgI7rXHSVJyY1gJ157CMSFQiWdQhoy23gWRGgsR8K0va9km06uC7n7GEEho+GlwsCJOPCkTwi4HuaoNCF37e9w1ft78eWhpsFKPhQUDedg/aLYxd+BwMUwc+ta0hb5MOHrgZi5lp9h3/V+ufn6loDIfhRfvOdl/qpLRBA0BXuoot9UlTV3qBz4fkvPzJ3xJlAglN28i70qGl742huEO1QMdJTGsYCdFQAmBfSMGyvV66v7W+yAP6wqFdm9Duq5uhxVTOhPgi5z+yXnVXcSO3wkfByQYC0dPgFQrl4SefWX1DZCX+kzRqJ089UkA9JASnaT9zbL6vgm6c8jiWFPU0BE6IACPMbHCM+ctEpJZsEnQWU9weit+fEI//gpjOlCgYEBNIgQGt9c9TmYBH8vo36F8QXdna2dAJCtkVYJj1Q25Y+rxpVkT8lMJjmwBhH/PLdhCt81aVkAOwQbFsW/MUnQOXjzJ4FOT8fEhCIqo0B9Ep4XnjBINDPOQvotnwer8KvIfzLtjxVCVw5J2Qe7Cpd2745Fd1Bk4zA9KqzANqHAJdxYJS17qgTeT4YAFQpbrccWTy9VaSBAWlcfk4wHiprzioE6jLdEUnH54DgAuHm0xEPR2bkoNs6I6D2kn7C8lUEfiDmZnA8gBR+mw49dH5/QdVXJGVA5aFCqjPb5/cN8qtCBN580xuFVhGl7PXEm44PvAgQOAdB19Fd89crYn/R/LIVftesPRXW/j2+av4b+5Z0iCLR5LBNwSYBMC6CYwh9rIJs9piZFqnYA2jRzRs+Ah+ByL7orKBpLlCX0hRn71g51yGFX0ueQHnJeL/GQQA62UA6Pg30gLmegZsJhQG4LiUN6I1KNWoZbWCgGLGd7Onwj64MF/5L+AL4zU3herS2iLc0u8Y/GLyzR0v73Q1nFzML8Igy/4NOPTO8CBAA7jCBdpJBQYgvprBvGAClI9GBPDR90x5YBglAQn8DFFKgeKTZOmx4q2e6x+APW2IRzswjaygtnXTxEXhmSJDm3zrb7aB6SirgT/RZjhrCBfUZfMwEGgsgIAgwsWcNSd9EjLH7BYL0tBagN64XzS62pXazfMxvCxgIDP/mgH2YxfI1VzI2INCLLV7CQLBfuCVMqjAFshxl0pDVZ/d7rAJekK4FadfWU1X4LWIBisfCywGBhS5lQVLEjRV0s6CPRzMDiLhkAAsG3MHomDXo9ZukJFlCAIGgkGLNW8A6ndppHIJxCK6dCUjmuQCAd/gdC1g6b5bQ63Hpk9ZhM98u4M4ESNA1f36RdiBY/BaAHEjBQAoOjpyGfPhLGEHJfF+z9WUKeAgbEM8v04yHxufGxkB8nQkKjOPAOK4YowJAWcZNfXSFpw8jmQCyfeL7A3YiGKhdlF27INtJ2JwQn1Phmt/ntPj1ulz/nHDcvwSQ+QGRHxCRvyYiPy4iXyMiny0if05E/rptP8uuFRH5wyLyCRH5URH5imflyJ8ZnXXVHEll+bjZY8hGSmEysGF4tuP9XHB+bj4zl6Pi5+Y19Us062q2ubClv9BSv1Vfy7ZhBl4W7LZ1zns/n8Doc+HX9Ocvgaa/zJO/Ud77928BenxZE6B8HHTEV4KVXxUeI9INau7fY0Dub0GveQPLF4eWpcp4rQIt+zsGwGDg7VEUNQt8HLMgrkLppuM5bNf7um7XzWXxnsszw0MggPltwT+jqr8Rc73BH0d+huxLAfygHQP1M2TfivkZsgcyIuVXBfuB40BcGp7xLSMzUDs+uN4SGDjdpBU8kt4l0s4syCPtgQQ8rJ8tM7NcvpKvf8X2yPwwYPSyuxby+itl8+BaM2VoK+DjPG60LXveYx9YZDMen722bjN7d3r9yb27sDbyzYSVHs5mT2SL642zQXF+3FnfvF8IACpcqz1zKOynsY169jxqy1Mr0W1wqeGRJcd/LYD/AYDfZxXzDwH8QxH5BgBfa5d9Hz7EZ8hc823OwNcBcEpYGNUD+2kquJABSe2Wp90IRrsmH719zY0oP4wtqZY9sCUQTIfW/ET5fCtu4DjmVOMJHAMJgP60hK0lhHNv5oYFfzTB5w99VnMAWcBpC2y0GZ236nPAVit5Vgbfh3JfAY9naTw2gTbmUCRHAs9AVtIKMawgl8WP7AtoZCnKSPv7rNpjSi+xKC3Py+s+XHjEJ/BxAP81gH9XRH4TgL8E4NvwIT9DVoOcgEBTttv9WwBhFRj3KJ+sz7gVzBbL7iygVR8bMKxpx+sI23S7ueOmxNGA4MitKg7NLwkVdkRcwIu6lE+tGzdAu8UChq5xkRhvt31SSAZckIUqp9cbAwAVoFVxgElTdTsBVz550uLRZUia1R5UNfE8U0ruDMH7Fud1uy8Zuauzgp0MTsYCWn6Kxt/oolvhEXPgAwBfAeB7VPXLAfx9JPWfmUtj6uEgIt8qIj8iIj/y9375lzca8IEfZBGeognjOm97icqvGuBubj3Ta9yy3R2dPUzaSYk8OhDMHy2GacfFZCDzqJgym9yoAwCruzv0v2/d4z6db6yhtHVIev3Le+eul2i7JuJP9imdTkhKFZPCmN2BAfdGPoqYM7NxAcS5APZzVNU8KqS7dChu6Fy9aujJtf25PeypzjY8AgI/DeCnVfWH7fgHMEHh52V+fgzyBp8hU9XvVdWvVNWv/Md+za8hLZb/orODO/uxFfotQJBQBeU20NjZ6Nufa9QTYS/hEUTJHplRC7DR75h+gOMwQDjSLyCyqwtKO/wOXum85fH0sx+SBXQ/QPncd6WqS2/1gsfz64s9W0A9BQCc9PodDBP5l7Ud7zZXk7Bi59uf9KucCDSkxA/2Cfg+3Zs+gATsArIbFlCzmi7M/jsLd0FAVX8OwE+JyG+wqK8D8GPIz5AB62fIfq+NEnw1HvgM2RT4Y/mxQKyOsI0gIAWBtSHHP6r7ew7Llqmc9PMb7X4Cy6G9/Q7PdwOCaRrYPAJnBsdRRhu2QELp9qABBF3o+afhkO8/d1r1e7tAANnR53OdLt/YFu1eM68FHFAlIcAj2R43k2N6jZdIzPX/fA4lrBr5j/0GCGVf+76UOol1I+gafyktj5XqmoGB2JabBlw/eH54dJ7Avwjgj4rIrwTwNzE/LXbgLX6G7MwnwJSO5VdO9gHuN0o32f5uMvpScxRht2jYz0Lp7rYtoydF0hbjoxBds7tv4CBGoHpgDPIFsFPQ2U6phyY4Lm/KeSGKz5r/BgvoxSw1YAcKbtt72xb6K3bdP9DPUa1yq+S+iXwxlXbP5d0Eh52wV+G2Z4skaMTDDZiU2oWuUehcQIfuUWjmmdJnkMl82k1r57obHv0q8V8B8JWbU2/nM2RyBgKzREsfwjkIRN8oya3CyvXk92v86Xl5tFZvt0CerT05eIFJbx8OFGIAIxbDODCOAzIOCMY8h2QCEMlO1crC8ls6kp6MDLS4wT1/8wjHy2iHR7DTUctv0gbutwDgpJ5nkpmWKtW3cM2vdTTrIysozBxw76F+1IW76wTvV9v4NRe5oFa2kB9qZIj77GlSd8MLnjHYzznMnrOC2jdY4B0lWSeA69eS1v2sS2YB21EBj28ZuxsyHbfl0xRIcyfo/3HgGAdUjvnm4RD7ElA1BTwdbU9aO1SDpI3wnzIDaDyjatsm0+pbWbaL4JfcUDgDAG8HV4AL/poS8dsk+0ro193jom50T+8JA/u+F8nzHPniPEeHnfuqCUzBnnp+6CCgZ8sInh9eDghsWqPggtVSWR9QNtdRFWq9zCqvAkq5kLSEx2tJ5QGVtmm9La6AOks8u5sCaQbMmYQTCIaBwZBcHacCgQvm5sm67MCH+86GBXfMYHbs3mOl4KSbUQ166QbaupRGdTdk32nPjQDzswj+CQ6qs7Tp341gORisMX55gEMDKsGNN1CtrpR1E7zelJNJX9ZG+3Oe3jS8CBDoDrKMx9JfZlwVuK3jCwg9pdEtCAiKmtxAqj+MOZ5kQzxYsCUi2tv+eAMvDk1iA4fkZKExjlwWS46yXh6nUR0DvXzu6PJ461w7BrBhASO9fq2g2dOZCWRd3touiL+qRDrvGBSdpLRLBT+30fMFII/fpa1xb5gDTfuzY1CRnvqi9RnMNgCx0y2ex/hbmAHlqWc3C/pGaPAiQAAADtkNVBBMFhAAHayNWercKZrVjmuI2m8p8YeEnNRdAYeuebe9jJ5l+ok1aWMDYiMDzgKOQyYTOIwBHAfkKqTfcl8sn9psnAAAZjpm558OFzZQiLptJlZUp7a26vTfzQKuo0gP2ApOSYequzXZqv0ZbL2W13tqCH3PsFC0MY96xKmeN8D6hz2VQWFROlrLF0k5p2nZe0vhxYDAGbeTfkoAF7x6jm3UrDQRt14r+DL9CridN3hycajO2R4aHbgh+KflroKf3ZTjkd5/PgfW/OlcrPnI8iX1z3i1Hpxj0iz42P6yvqnsIunMsnpUqkev1lJDN7T9cv5GNWu5jxmJCy3dGM6BNMF8H63+MyskxG1fATN7WrysRUtVFKs6bHJbayG6X0mkrYX1IUDh5YDASUdQ53ySVCn6h8d5hztXHwDETFhOw9fHk82wTQMGdgCiHdPip3VO/e1CSuR5c9lSH/3dwK4EU6DVt1QevoeVf95XNVte7xoxgSJz3RZ8CQM365lXgfKPuSQLyGdxtU6Zqx6c6meozbOtcmIoQD6rzsM4YMsEo0zN9v3jgBwXiFwgh32IdXedmW25OkoDFNoG0ynn+Nhz6/U481yLqPQX0d7t9MPhxYDAdkaT1E6mHCcUd0f4M736RF4kPzWYdyrupavA53FJEayqWCdtgze4bXVzfQq+llgt57wjrAmknGdeecTbgcAnocw4bfdy+TzeBZ6EvJW9XKPZZpnKev+WJewAIMlHZtLXeuQ24hCMyQVfwuFawcAF/7IIPY7LvHYHBPy98SL8SFAgoc+RHCCZQ17j7GIHALHPXZDQ8Dk48DJAgCunhZRH0jK8qIhQpwot1JKnuC7XcUOoIiT10rxLNgCgWp8tIUhrWSIP5C3n9M/uK3WhdcJKdIImqKGvQ3P7pQQEhhwMHGEGlH2tDIAGz4uudi2/1LaDdYp47KvOjl7wIMG9iPIGCByww2G31HjdnUut5dArZCfIl/WYf3RuftPcf5dF+MVYwV2Nv1EGxVHO/ZRqv/wNMKcLHwwvAwRwlu3sWG67C9WImpYRq0Bduw7tV80QNmoDAO9xOUsQyCXIq2bzZcwANwOCUkT+bwcSGOE4tzPP66dofxLKiM9+QZqiAQHfVeI6qNR0tGS5SaVXYqnue+2x0doVXxa8XuZzxL7USEFMmtIQtpxqDgIFMaEOM8CFvr/Fud06sHTB3gt/mUXpcXEd4h7NM7SyELeJRoOEKfqcESy8IBC4zQS0VkRQLNS4uIM7pSWP2mf80jLqJ0BQBMHU7AIU7snOQTonMP/FdpbeWZlRr3dgauXvAqlaLzqLj7SIEaQfoPsMUvO7ti8OQ1BceQgLNKln4RqntinqHHHPqv9rMjzcWE2BClQ1tBELJBMAmOLvbPzmG9gJvt9fflXofVRiAYXCBtD2+zUs9LMiwozzmvU2W5D/fngRIDCzvwrP4vQTKVf5UF809QmQRHpC91HHDIApblja787ABgABOjcXeTzJW3yQUNplt8ui9IsIsJgqMQWk1gCBniYL6MyhOAg3CibTZYHvACBUEhfzZhY0MChJcizha2mOUidndYxoMjc9OwuopsGlsYF1C7mYf6CaCatwn2xDkVXNz2XJOhAUgaftAtjKVz4WXgQIANgKsMZfrxANJGTqpMgPkBSN1Ckqp7XItaWhII3u3yMg4U76UZ+zn29cZLnrxpW17TuxCyVv8+Sqo8t1dD3bi5U5OgPwe9IvEOcsJ9XP0AV4s7WFV8VZFPtwNmCwuAd3wm/xLmvdJ1Kq026eTevDqukYxE7jywEfEVgcf0ImA/sH/FfanBlA28Yl3ObSugAdh0mjBaA7YLN58Gh4OSCwDdn60TV2PgKpjb8lBD2OBL8O/1uv6697VScCohP3JYPusJE1D8QCWiiaHvWgCL1Sh6DrqjOPsh+ag+I8PswELR0szAfOSzfDdr4Buiva0B2CmVBLY43eCn+rkxp81qIE4Jb5Fe4HKCyAhgIPP6Yhwt1oQAOF+eiu9Xm/CfrSX/J8qQLxtmZgJlMt5neMO/WyhhcDAnsqN4Uv9IVVmFD1KA8jitMm0+KFDVjYPKbMMm9j2TlTo+jweNas7H2nP31gOe0MgzrJJhWPIz28XmRoENcUFpAaIjWHFuHmLSe9O+byCmxa7uIcTPDM81nGrFugA0rBVc7cVvirInCHbqaXGpiFvpgAcXzJ35EMIWh/A4AOBisLwNK2t0GglJr0UFZAZX17QHhOeCEgIDcqw00AE37JuQHuYU1/AXWkxrK8YwYbc0XfFXqJ8yGseCJWg7SDwg2h76dcO1EfUX8O3bCYAKian6l8soOmyeHCQw5BpbjNkCAfg48LXMyREafbUc+beQQJcLn4q/Jfbd3AwYoxfEEHRB2VyCJ4ORaf/oC6ZYFfJgoVZ+DKClCABO35nv+uiGpnuEcgdftLs63/nhNeCAicsZd7JoB1OIsvYAGyRT01AoMQfKL/ZRpsZwAGMstElJ3TMLO/4QatMyil1TpG18ARFwJcNWZoaNb0BhAZhxByIIuX4FE17XYMPi5odUFxhY3xNUX7o92/O6aHyy4feS6jKgDkDyT4XXjTPFhHBwgInBXsAMAnC0UW/Jktu03iT+V/OaGlHQvY279Y6+EZ4QWBwFoV8/1+oStI85ejwPk4ZtZQ95HCx52RAcGvJwDY+gIAohP3y/S8pulp1fuVdtgjzJoi+8xqHiDuMc3BwNKGBf0hhXJGIgmgPS6AmKBwacUywd4BFXvJOGEB+Uy0C3I+f87tPzAdg1KEt5gEx0r9HQh4HkEHgPQJtOwWcioLWdR69WkobQtmf7PNhvb2eSy8IBBYM+5aC3A5VMTIgKrND4BRT+9Ulpr0fQ32EPJb7GRWfUrPtsrWto2Kb+dIbe1oWf9KTHYInxbtLIe0mWR8Oe77re5IVMG8IeJ1f54TqR3vVjiT3BqIl21YsfSLHgxN48u6X6btkilQwMDNAmYLYe8LYkShpaVU/8pantv5gXLdquOEZMUgoI61HmBfdzLn4HPCIx8f+Q0A/hhFfRGA/zWAP2LxXwjgJwH8HlX9RZm19N2Y6wz+AwC/T1X/8q1nKBTXsakCyW4cM6jV9oNK2+e0TOCTODh40LmWJvS28PP5sqAmAcOYnGx+ItTOR/q9l7Mz0c5t6babOK3T9tWF4zzoWgYZYjMVEPZAoF4N2OWpZC9OchbKeYlNFh0Ub3/EyxppreBwE4CawAcnDN8RZwJZb6A6LAXZC3eJi7QduNd6XdgAkP3rFAySqmmLD8+/DvoGhH3KrcTz59QeC3dBQFV/AsBvBgARuWAuH/4nkJ8h+y4R+Q47/nbUz5B9FeZnyL7q5jMAXHfoZapdonIqEwAzAQIGp5vLMRD31ymWbjevwu9OM6uLKfR2vS+uwaCQJr5E/qPna+6LmRFB5wQxDJSafyP8h4RmqmBgdcYdGZ6YVycDAumWQglOOo+0U9L6Mj0vRKdp+S7gAQbIskhJi0pwKjiZkWBSkaWZE2V2wECw/aHUPYNBFfx8hXhuUfMB31fa7zv1/L7usz/mSk8s+FPox0iQCPPuwfBcc+DrAPwNVf1b8hY/QwYFrltnhposp8BDTfOfnfP7OgPYgoCrPfeC+zNdo+d+Cjri2pUZ5LnQSmQfx5ClSgxtLiTcGQJ650xqOuepy9JZXQD2NufMU4AenVNkHQQo7dqDGUD8qedTBlODVpl27Z8XFxzz8twKS/mcBSQAsuAL7bOmn49bGUDX+gwO6ltkPSdPXd/4WxhCb++m+s9AIQQezAQ0tL8Dwxhj33Y3wnNB4BsB/Hu2/9Y+QzbNgRM7hplA7FuVExhEU0zJa1qNzIl2T2UALPwo4FC0vV/intkAg7kfwh70tCrSaRXMa6owmsZhZU4MwJcQ2wl/sXk5HSMfWawNALR9bpedr0b6gbR4Wa8txKQwgSq8frR/UOaVo9n55w8IFrAT/hMG0AEhhm8dVPw5oLYCtx0xuUWcmYUBYRosJcr+xa3D9r+DQRH+xgyeEx4GAfvmwO8C8J39nKqqlBVAH0rvWzG/WoyPfc7n3GACbudravotEzBg0BtdfFKAuLfQfweXAIcEG6dXIfCRHY1kHCT8eWmdOgsAggjAlHLpRHZnXEidmU2Do5oBi28gKtie3t88Mzhau10yhN4U5eoTJb08fpcllyna9zQrEz8X+LmpCbugs8bnOCXBvskAGlB0ANGWeW1tOPPC9Wbb4pfZC7+WfmvXERDsFn4NUyDYAPsEHg/PYQK/A8BfVtWft+Ofd5ovb/gZMgDfCwAf/+Iv1q1PACDNr4b6d5jATJ1+qPui+d7/lgF0UGDBqBqfgcGTM8WbQuudzn48+cjqYe1AXVKcBRw82y21XfbfHSBI63adCSioqGEmPdSNomySxyzcoHzlQdW2of2bj4OqYn1u7sbnOW4Iv5B5sPoDsInrgJDtET4A/wUYZJ3GVrHUfV7DfXB/HLGKKeSL9mdT4CMcHaDwTUhTAMjPkH0X1s+Q/QER+X5Mh+Ddz5CpYj86YFreiVYIjwk/a30pXv8dAGziGAyK1nfVWZ2C2bCr4IPOuxmwyLTHSX1plnNW7mIGIAdgXxniztzXJMwkmrbsNUvVpO38qrHOw56AZBlij+qE8c33mRZ0EDktCwEOLRKV+ySc4umQkJ+zAWABhZ1ZsclX1J1mnMXUreYd98CAZwKOjYOQ/QG+/5zwEAiIyK8G8NsA/M8o+rvw1j5DpvvRAQDTBFDESAAzgYj3sf8zBkC/QOeN4G+Ev/oFEPfT6UijgIBT9/h3LEKQOSRCLiCHFLLDBRuwEYJjr9UiYX+QIna6sGccuwR7y/CVyLS1yO0JA2iJdblhUyeYADMHyscZSAA5EUho8c7OAiKDXm+UyRs/De0vRetHWwFxrtYX1xrVrrZYbVfrfp99AJUJaIsb0DP/2kl49DNkfx/Ax1rc38Fb+gyZ4mSIEEiKr9W5F6RMgUbQKNUOALVJmO6zatw7C++AgB1MDW9CPxDvlAh0fv21qa6d/5iRoiw/HuaAMYODmQISCAqb0JL0rmY8UuNs/cv5EgBzwVB6DNpjeXcDBjwcWJhBmAaIm7Sn1x7m9R0AcAsItlT/RlyA6soEZg9k82Be33tflEGL3m+AsO6HarB99geMBRDWUYLnhBcxY/DUHBD2udL+FhDyHHdxdnrVcyCNb41Agl+Pq5B7nj1n0YCudQU4jjG74jjm6B4Ghhxz0ACCg/yorFm4s0nrmH2SUBV+7qSeDqXL1Uvax0GPfQNbCOhpcHw7DupNFyT9b0jB9DwOs0UZEEr2Ld5SDzaQi3M24YfXGT2T6L+zg2VGYOzPXdf+hcmF8Kf/JXsZ1a/HL9q++qh2AFG1PpsGDAgK/SQMEX5E4cQcUNbygPAoAFXQygS0VHD5tYruQJD0nxCZJP4MBHJj4/9jfj9QMCA655SLOhuYk44OFlQqFVpHLevh2Vp2iylQhL/UmpW3v4IaxS0dtpdzJ/nSjljo00nYr01hmpvU+jPbDfDaM7U8gx9fNf8ZEHTh9uE/WepwV6fWNs3s4E+skRoqbVqPNRRJ7pNCuhXngo6q9et+OgifE14ECCjOJwuxlndH4OIjaAJ+CgJ8nvl8E3weGUhETsH3PBfhJ8ERmQCgY8yvBumcVuyv1x62zTfAKB0P1PlgpsC8/6gddQGDeh+29bprga7/6b7OAmSzKzV+1oPHS7skhSvMAGIKJU3Qi18Uzy9niWtnINckMaBZ5gkwWG6EXyPjKOcTYJIJzLZLIAdId4BGkqLPdTDg/sXKaH/ONT8L+x4I3kcQUMXTuC7xXr0FCHxf1vgzwa+OwF7RG8G3PLH7PGSgaEnPf893AoDqgA5AjgPHmKu+hAY5ZgmvY1ADa/l57geXY83BWqldlYrnnRkBpXgDKxZw8iYgwXcBdgFjXiB87Q4QKO1gFR1oyETIOAID6ScRL5ghrmWIS8FSVIFyL/sYIxyDx5j3HvbMaEPBPGeoMBWCt5umwzj2tVyzCj9KXGw5T2Ng6JVGBWj/fQUBAHjaeDQX4ZfuF7D9WyDAlVzObwR9cQYmSCDuyrAIjs4cuz9gKObnww/gGIohBw7Q1syEMa6z4/VfQX8CB4BAIrOYJasSpLqZnswF6lRmc1gSjDMmxmQzs2Ovmgl5rgMCnwdF61oUr+aaG6FcyVIM27rIjSrwOnAdAxK/K2RcgesVOJ5mO0Fx6AUC4IBmHC4YAObKghrMK55Fpmf6BRIM0OIil9Ye5f7B04IJDMYVfXjwvQQBheK6YQJ+lidqooHBMkFo5xjcMQAS8LwGwQbYZ+Cp1x3elbwXgNrEHjFNIoB9RVgxcMTW5zZoEfxrdtJoaOs4Lvito3lJl3wZAOzEeYXKtXxZNp3bmO6YAgvahsxLir7T/eD7lC6z8oW5oIEBPYovLQCwlJDKGhdmXQaFHtf4XQ0EJhAcOHTedqgL/2UKvU7hZ5BItpFaXam/ORB025+V1BkriBeGOhjwy0TjfQYBBf67m+aAb1nXNWbAv5u+garxVxYw08s3sapKXLXp2gXnqAAghzmlxhSEcUgwAlHFYeaAAwCDQdC/7v0NIFgBQD07UVrLW8tiyW2A37Z0JyGl0bU9U/t0uiEYQSp/GtUo6TWQWMyBljsChMoYvC6k1I0XNWn6rFOxer4aC8C4Qq5XyPE0zQBU4T8wWRizgUOB4XkMpUEaHwkG5wwA0R/3rCDnABSBHzxbcF7DAPNIeBkgAL1pDgQT6OsLKFAcg3aOBXzrD1iEnBqBGEE2QslsyeF6gdvBAzLEGADiBSBnBCIHhs8VGNnACQK7ueK2oAR8i9iWGmhUO7NepxBHfJR/q5Br2WjeQaHvZA6Uc/6XHZdcfS1uyfYGDCyry/mgKQBy3Yg+KpJ1KTogJlCiV1zHZYLAeAKuBy5yzDouQq8QBgTA4tQyfyb8iD7l5kFXUtE25K8qJkRoel00fwWDxwEAeCEgAMWpObBofwKDxUzoQr4AQCLwbRaAvCY05K53ajuG5af+G1AcI9cEGDKFKRZK9QY0BqCDx3w7AyCTIBhBlmFpfjOatedXK3BQ9AYMpikgoXKV6P1qDpQxd7IEmP67DyDiOKVW1TuB37KDOMnt5ftEz4lCq445PD0GYCYA5IAeT9CrFKGfZoGZAAYMzBL8aSzA9ThBKPpj0fobVsBpNRCIvrHEv4cgMJnALRCYVxWhp9GB3YzBPQgwM0CcSxawiePthq9GDI1TibaZfiIYJvT8O9yj7ADADToM1YdSA3efAP/bCIQI/OMfkdmN8Cttu+jsG0WqwDq95/jCCJgJ8PFSndvjhQH0cwD82xSL0PvPop1FCeaQs5jwyzgg4wD8dz3MHKhCryT07hR0ljBHMKrAr8K/AQIS9NL//E515loBgIX+UwAE9qMDXcsX4dcbzkGqvBKnnQkgKz8AAFjYAADW/EL7AQaRnml5/7cBBJ8eO5wJdGEPQFgbN0cH5q/DX/6y5npd747ZNeDiq5GKlXOZglyleLaPhKzvmECaDY1I9P2TxzBLOF/dvQGCzmMfDWI6LccA1H5jQMcVGAf0euAiYoJ+Wah/HyXwr8mdCTwdwX0TKfDZf1cnYcar2/veJ9AEn5TFc8LLAAG9xwSawG9GB5gNJLVCxNV4BgBUje8NtLCA+ZT4G72x2b4wAFAE5XcwQAAAAgwiH03jY+R+Nrp3or5PRaWsKu2bLIAv5arIOrnFCAj8JMtcz3tcZwJlE5d2c+CWTwC1ygsUr7md+zwhKwTRaDTMJxCmgBzQIcEC9Dr7lY8MiAl8ZQEJDjwBjAV+J/zZ72p/vQUOqeVr3whgQ+4/J7wMEIDi6boBAUlzYG8K9PVdPDUW9pV2LcygsYBtvPXcEPZgIqbtmCkoCTsLvyJBgSbNJN1bqR9oeHD1B2RnKvIcUiLFBMj6rvtsCS3C3yWNpTQY/qr9VybA5SZEaFkuj7in9aVmT0LIXEV4u6d54MLFw2o4DAjkCh0ClQPHVXxx8ibwxAKaXyAcg13gNWJnfpj6U55A53eKK7U8A0IzFQhQHg0vAgSgwNP23YGNxheN+AQCdWlCVBxVcKnQjdAH9Y/rvSHyeD7CBJd6aY5SeKeOWesGAGqg4NcQE/Be3kDgfGsmAXiikPbSlUUuAHqVFhzftGgWc6/5KWRqG3Og7NF+E3IB19mdkFVbM0H5reWyWKUYL58kmE7pvELHAZUBlSvkekDlCSrAkD45qE8QmjMIfbLQbHnS9dQ2QAp/qKRCxZrAe+kaW4gtAwBG9pP3FQQUj5kDO1OgM4WovBD4SqtAlbRjAdqOuXclfSf7154+wcCEHjKdTmYkTg1o10kKhrMFjQ5pzy8IX+PTMcgAsBPVXT2f74c2xQYIZK2LHQvojIBHCaIOFkqf8wi2zOCEAXDWAsMJDXpPiDLrdLgeMaZ+QPQKDJsKLFfoFRiYTGAosQDFwgIU6Rdwx2DxB9gzu/D3fpqKiIWeUmG2oC2uAMJ7CwK3pw3n0CBieXFmBv1V4kXoCZXzeO57I6FtU4l4HswEMMq3PW7bzL/YqJXEMJuzgXiGZuMp7fM57zwFCGr3yny707HVaJeqW91lDwbrmVX3nzABBgOPo7QZCBbZT/zdhpD/AgS1LNz+zqzmhCGBysChV9ufbTOBQKGSTEAlmcCQZAID856dtieVkxqczjNMdbbKMM+Owg4Aa1qPh+P+Je8qbDo5VXBWossLVzjQUbYDAKM0ypaqUTk13W/1/JiSjA7QOwQLvNL9/fk9H379/uCZ4Rn3djNgI+4n99SYh67dXESk4yFLAsC2fEv72663W9Jt1HZkgGkUnCcJ3QMAZgyZH+6DradnpyRFtzv//PCCQeA8nGmJtxFYyN4onGmyk7h87vPPPP+q51/7pvc+v0lku/tRhtPHfKTPX70p7zq8lyCwdMTn9uoP0wL3nvUhMWQNj2X28SK99QzeD0vmdnnQ26ffVbhbsbcu2J3TZ5x/JHx4OHkvQeB9Dbea98M25eNd56PSQc/hMi9BD8pjdXb3ontelVtnduefWzcfHjE/NUDgk19vb/SIl9D172rhNw7npXsZ5e7hTcv8nJb9ZJT805QJfJRVezvtTQfoN7xh5k675DsyL95qOCnDW7XJ32a932Psj6VyI/5tegY+fAeR504x/CiCiPwygJ941/n4iMLnAPjb7zoTH0F4Ldf7F/5xVf11PfJFzBMA8BOq+pXvOhMfRRCRH/lULNtruT51wntpDryG1/Aa3l54BYHX8Bo+zcNLAYHvfdcZ+AjDp2rZXsv1KRJehGPwNbyG1/DuwkthAq/hNbyGdxReQeA1vIZP8/DOQUBEfruI/ISIfEJEvuNd5+c5QUR+vYj8kIj8mIj8VRH5Nov/bBH5cyLy1237WRYvIvKHraw/KiJf8W5LcDuIyEVE/nMR+VN2/HER+WHL/x8TkV9p8Z9hx5+w81/4TjN+J4jIZ4rID4jIXxORHxeRr/lUabM3Ce8UBETkAuDfBPA7AHwZgG8SkS97l3l6ZngC8C+r6pcB+GoAv9/y/x0AflBVvxTAD9oxMMv5pfb7VgDf88nP8rPCtwH4cTr+gwD+kKp+CYBfBPAtFv8tAH7R4v+QXfeSw3cD+DOq+hsB/CbMMn6qtNnzQ1mv7pP8A/A1AP4sHX8ngO98l3n6kOX5jwD8NszZj59ncZ+HORkKAP4tAN9E18d1L+0H4AswheG3AvhTmHNc/zaAD3rbAfizAL7G9j+w6+Rdl+GkXL8WwP+n5+9Toc3e9PeuzYHPB/BTdPzTFvfeBaPAXw7ghwF8rqr+rJ36OQCfa/vvU3n/DQD/CuwLWwA+BuDvquqTHXPeo1x2/pfs+pcYPg7gvwbw75qp838WkV+NT402e6PwrkHgUyKIyD8K4D8A8C+p6t/jczrVx3s1DisivxPAL6jqX3rXefkIwgcAvgLA96jqlwP4+0jqD+D9bLMPE941CPwMgF9Px19gce9NEJFfgQkAf1RV/0OL/nkR+Tw7/3kAfsHi35fy/hYAv0tEfhLA92OaBN8N4DNFxN834bxHuez8rwXwdz6ZGX5G+GkAP62qP2zHP4AJCu97m71xeNcg8BcBfKl5nX8lgG8E8CffcZ4eDjJXCv23Afy4qv7rdOpPAvhm2/9mTF+Bx/9e8zh/NYBfIgr6YoKqfqeqfoGqfiFmm/wnqvrPAvghAL/bLuvl8vL+brv+RWpSVf05AD8lIr/Bor4OwI/hPW+zDxXetVMCwNcD+C8B/A0A/6t3nZ9n5v2fwqSNPwrgr9jv6zHt4R8E8NcB/McAPtuuF8zRkL8B4L8A8JXvugwPlPFrAfwp2/8iAP8vAJ8A8O8D+AyL/1V2/Ak7/0XvOt93yvSbAfyItdv/HcBnfSq12XN/r9OGX8Nr+DQP79oceA2v4TW84/AKAq/hNXyah1cQeA2v4dM8vILAa3gNn+bhFQRew2v4NA+vIPAaXsOneXgFgdfwGj7Nw/8f22YBIobgbicAAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - " import mindspore.dataset.transforms.c_transforms as c_transforms\n", - " import mindspore.dataset.vision.c_transforms as C\n", - " import matplotlib.pyplot as plt\n", - "\n", - " cifar10_path = \"./datasets/cifar-10-batches-bin/train\"\n", - "\n", - " # create Cifar10Dataset for reading data\n", - " cifar10_dataset = ds.Cifar10Dataset(cifar10_path, num_parallel_workers=4)\n", - " transforms = C.RandomResizedCrop((800, 800))\n", - " # apply the transform to the dataset through dataset.map()\n", - " cifar10_dataset = cifar10_dataset.map(operations=transforms, input_columns=\"image\", num_parallel_workers=4)\n", - "\n", - " data = next(cifar10_dataset.create_dict_iterator())\n", - " plt.imshow(data[\"image\"].asnumpy())\n", - " plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "tested-postcard", - "metadata": {}, - "source": [ - "2. A user-defined Python function is used to perform data augmentation. During data augmentation, the multi-process optimization solution is used, and four processes are enabled to concurrently complete the task." - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "structural-glory", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "before map:\n", - "[0 1 2 3 4]\n", - "[1 2 3 4 5]\n", - "[2 3 4 5 6]\n", - "[3 4 5 6 7]\n", - "[4 5 6 7 8]\n", - "after map:\n", - "[ 0 1 4 9 16]\n", - "[ 1 4 9 16 25]\n", - "[ 4 9 16 25 36]\n", - "[ 9 16 25 36 49]\n", - "[16 25 36 49 64]\n" - ] - } - ], - "source": [ - " def generator_func():\n", - " for i in range(5):\n", - " yield (np.array([i, i+1, i+2, i+3, i+4]),)\n", - "\n", - " ds3 = ds.GeneratorDataset(source=generator_func, column_names=[\"data\"])\n", - " print(\"before map:\")\n", - " for data in ds3.create_dict_iterator():\n", - " print(data[\"data\"])\n", - "\n", - " func = lambda x:x**2\n", - " ds4 = ds3.map(operations=func, input_columns=\"data\", python_multiprocessing=True, num_parallel_workers=4)\n", - " print(\"after map:\")\n", - " for data in ds4.create_dict_iterator():\n", - " print(data[\"data\"])" - ] - }, - { - "cell_type": "markdown", - "id": "banned-florida", - "metadata": {}, - "source": [ - "## Optimizing the Operating System Performance\n", - "\n", - "Data processing is performed on the host. Therefore, configurations of the host or operating system may affect the performance of data processing. Major factors include storage, NUMA architecture, and CPU (computing resources).\n", - "\n", - "1. Storage\n", - "\n", - " Solid State Drive (SSD) is recommended for storing large datasets. SSD reduces the impact of I/O on data processing.\n", - "\n", - " > In most cases, after a dataset is loaded, it is stored in page cache of the operating system. To some extent, this reduces I/O overheads and accelerates reading subsequent epochs.\n", - "\n", - "2. NUMA architecture\n", - "\n", - " NUMA (Non-uniform Memory Architecture) is developed to solve the scalability problem of traditional Symmetric Multi-processor systems. The NUMA system has multiple memory buses. Several processors are connected to one memory via memory bus to form a group. This way, the entire large system is divided into several groups, the concept of this group is called a node in the NUMA system. Memory belonging to this node is called local memory, memory belonging to other nodes (with respect to this node) is called foreign memory. Therefore, the latency for each node to access its local memory is different from accessing foreign memory. This needs to be avoided during data processing. Generally, the following command can be used to bind a process to a node:\n", - "\n", - " ```bash\n", - " numactl --cpubind=0 --membind=0 python train.py\n", - " ```\n", - "\n", - " The example above binds the `train.py` process to `numa node` 0." - ] - }, - { - "cell_type": "markdown", - "id": "listed-palmer", - "metadata": {}, - "source": [ - "3. CPU (computing resource)\n", - "\n", - " CPU affects data processing in two aspects: resource allocation and CPU frequency.\n", - "\n", - " - Resource allocation\n", - "\n", - " In distributed training, multiple training processes are run on one device. These training processes allocate and compete for computing resources based on the policy of the operating system. When there is a large number of processes, data processing performance may deteriorate due to resource contention. In some cases, users need to manually allocate resources to avoid resource contention.\n", - " \n", - " ```bash\n", - " numactl --cpubind=0 python train.py\n", - " ```\n", - " \n", - " or\n", - " \n", - " ```bash\n", - " taskset -c 0-15 python train.py\n", - " ```\n", - " \n", - " > The `numactl` method directly specifies `numa node id`. The `taskset` method allows for finer control by specifying `cpu core` within a `numa node`. The `core id` range from 0 to 15.\n", - " \n", - " - CPU frequency\n", - "\n", - " The setting of CPU frequency is critical to maximizing the computing power of the host CPU. Generally, the Linux kernel supports the tuning of the CPU frequency to reduce power consumption. Power consumption can be reduced to varying degrees by selecting power management policies for different system idle states. However, lower power consumption means slower CPU wake-up which in turn impacts performance. Therefore, if the CPU's power setting is in the conservative or powersave mode, `cpupower` command can be used to switch performance modes, resulting in significant data processing performance improvement.\n", - " \n", - " ```bash\n", - " cpupower frequency-set -g performance\n", - " ```" - ] - }, - { - "cell_type": "markdown", - "id": "finnish-specific", - "metadata": {}, - "source": [ - "## Performance Optimization Solution Summary\n", - "\n", - "### Multi-thread Optimization Solution\n", - "\n", - "During the data pipeline process, the number of threads for related operators can be set to improve the concurrency and performance. For example:\n", - "\n", - "- During data loading, the `num_parallel_workers` parameter in the built-in data loading class is used to set the number of threads.\n", - "- During data augmentation, the `num_parallel_workers` parameter in the `map` function is used to set the number of threads.\n", - "- During batch processing, the `num_parallel_workers` parameter in the `batch` function is used to set the number of threads.\n", - "\n", - "For details, see [Built-in Loading Operators](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.html).\n", - "\n", - "### Multi-process Optimization Solution\n", - "\n", - "During data processing, operators implemented by Python support the multi-process mode. For example:\n", - "\n", - "- By default, the `GeneratorDataset` class is in multi-process mode. The `num_parallel_workers` parameter indicates the number of enabled processes. The default value is 1. For details, see [GeneratorDataset](https://www.mindspore.cn/doc/api_python/en/master/mindspore/dataset/mindspore.dataset.GeneratorDataset.html).\n", - "- If the user-defined Python function or the `py_transforms` module is used to perform data augmentation and the `python_multiprocessing` parameter of the `map` function is set to True, the `num_parallel_workers` parameter indicates the number of processes and the default value of the `python_multiprocessing` parameter is False. In this case, the `num_parallel_workers` parameter indicates the number of threads. For details, see [Built-in Loading Operators](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.html).\n", - "\n", - "### Compose Optimization Solution\n", - "\n", - "Map operators can receive the Tensor operator list and apply all these operators based on a specific sequence. Compared with the Map operator used by each Tensor operator, such Fat Map operators can achieve better performance, as shown in the following figure:" - ] - }, - { - "cell_type": "markdown", - "id": "northern-input", - "metadata": {}, - "source": [ - "![compose](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_en/advanced_use/images/compose.png)" - ] - }, - { - "cell_type": "markdown", - "id": "connected-platinum", - "metadata": {}, - "source": [ - "### Operator Fusion Optimization Solution\n", - "\n", - "Some fusion operators are provided to aggregate the functions of two or more operators into one operator. For details, see [Augmentation Operators](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.dataset.vision.html). Compared with the pipelines of their components, such fusion operators provide better performance. As shown in the figure:\n", - "\n", - "![operator-fusion](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_en/advanced_use/images/operator_fusion.png)\n", - "\n", - "### Operating System Optimization Solution\n", - "\n", - "- Use Solid State Drives to store the data.\n", - "- Bind the process to a NUMA node.\n", - "- Manually allocate more computing resources.\n", - "- Set a higher CPU frequency.\n", - "\n", - "## References\n", - "\n", - "[1] Alex Krizhevsky. [Learning Multiple Layers of Features from Tiny Images](http://www.cs.toronto.edu/~kriz/learning-features-2009-TR.pdf)." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/tutorials/training/source_en/advanced_use/parameterized_quantum_circuit.md b/tutorials/training/source_en/advanced_use/parameterized_quantum_circuit.md deleted file mode 100644 index efa3e922dcc37e3b82b60422781be3f15e46c1dd..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/parameterized_quantum_circuit.md +++ /dev/null @@ -1,5 +0,0 @@ -# Parameterized Quantum Circuit - -No English version right now, welcome to contribute. - - diff --git a/tutorials/training/source_en/advanced_use/performance_profiling.rst b/tutorials/training/source_en/advanced_use/performance_profiling.rst deleted file mode 100644 index 85ac06aebd48a2e0e3fe5f460e7ce4584d822325..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/performance_profiling.rst +++ /dev/null @@ -1,11 +0,0 @@ -Performance Profiling -================================== - -Performance data like operator's execution time is recorded in files and can be viewed on the web page, this can help users optimize the performance of neural networks. - -.. toctree:: - :maxdepth: 1 - - performance_profiling_ascend - performance_profiling_gpu - performance_profiling_ascend_of_cluster diff --git a/tutorials/training/source_en/advanced_use/performance_profiling_ascend.md b/tutorials/training/source_en/advanced_use/performance_profiling_ascend.md deleted file mode 100644 index 56b4f08fa29d4ac5690d9cb386e6160e75f11ce8..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/performance_profiling_ascend.md +++ /dev/null @@ -1,276 +0,0 @@ -# Performance Profiling (Ascend) - -`Linux` `Ascend` `Model Optimization` `Intermediate` `Expert` - - - -- [Performance Profiling (Ascend)](#performance-profiling-ascend) - - [Overview](#overview) - - [Operation Process](#operation-process) - - [Preparing the Training Script](#preparing-the-training-script) - - [Launch MindInsight](#launch-mindinsight) - - [Training Performance](#training-performanece) - - [Step Trace Analysis](#step-trace-analysis) - - [Operator Performance Analysis](#operator-performance-analysis) - - [Data Preparation Performance Analysis](#data-preparation-performance-analysis) - - [Timeline Analysis](#timeline-analysis) - - [Resource Utilization](#resource-utilization) - - [CPU Utilization Analysis](#cpu-utilization-analysis) - - [Memory Analysis](#memory-analysis) - - [Specifications](#specifications) - - [Notices](#notices) - - - - - -## Overview - -This article describes how to use MindSpore Profiler for performance debugging on Ascend AI processors. - -## Operation Process - -- Prepare a training script, add profiler APIs in the training script and run the training script. -- Start MindInsight and specify the summary-base-dir using startup parameters, note that summary-base-dir is the parent directory of the directory created by Profiler. For example, the directory created by Profiler is `/home/user/code/data/`, the summary-base-dir should be `/home/user/code`. After MindInsight is started, access the visualization page based on the IP address and port number. The default access IP address is `http://127.0.0.1:8080`. -- Find the training in the list, click the performance profiling link and view the data on the web page. - -## Preparing the Training Script - -To enable the performance profiling of neural networks, MindSpore Profiler APIs should be added into the script. At first, the MindSpore `Profiler` object need to be set after `set_context` is set and before the network and HCCL initialization. Then, at the end of the training, `Profiler.analyse()` should be called to finish profiling and generate the perforamnce analyse results. - -> The parameters of Profiler are as follows: -> -> - -The sample code is as follows: - -```python -from mindspore.profiler import Profiler -from mindspore import Model, nn, context - -# Init context env -context.set_context(mode=context.GRAPH_MODE, device_target='Ascend', device_id=int(os.environ["DEVICE_ID"])) - -# Init Profiler -# Note that 'data' directory is created in current path by default. To visualize the profiling data by MindInsight, -# 'data' directory should be placed under summary-base-dir. -profiler = Profiler() - -# Train Model -Model.train() - -# Profiler end -profiler.analyse() -``` - -## Launch MindInsight - -The MindInsight launch command can refer to [MindInsight Commands](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/mindinsight_commands.html). - -## Training Performance - -Users can access the Training Performance by selecting a specific training from the training list, and click the performance profiling link. - -![performance_overall.png](./images/performance_overall.png) - -Figure 1: Overall Performance - -Figure 1 displays the overall performance of the training, including the overall data of Step Trace, Operator Performance, Data Preparation Performance and Timeline. The data shown in these components include: - -- Step Trace: It will divide the training steps into several stages and collect execution time for each stage. The overall performance page will show the step trace graph. -- Operator Performance: It will collect the execution time of operators and operator types. The overall performance page will show the pie graph for different operator types. -- Data Preparation Performance: It will analyse the performance of the data input stages. The overall performance page will show the number of steps that may be the bottleneck for these stages. -- Timeline: It will collect execution time for stream tasks on the devices. The tasks will be shown on the time axis. The overall performance page will show the statistics for streams and tasks. - -Users can click the detail link to see the details of each components. Besides, MindInsight Profiler will try to analyse the performance data, the assistant on the left will show performance tuning suggestions for this training. - -### Step Trace Analysis - -The Step Trace Component is used to show the general performance of the stages in the training. Step Trace will divide the training into several stages: -Step Gap (The time between the end of one step and the computation of next step), Forward/Backward Propagation, All Reduce and Parameter Update. It will show the execution time for each stage, and help to find the bottleneck stage quickly. - -> Step Trace does not support heterogeneous training currently. - -![step_trace.png](./images/step_trace.png) - -Figure 2: Step Trace Analysis - -Figure 2 displays the Step Trace page. The Step Trace detail will show the start/finish time for each stage. By default, it shows the average time for all the steps. Users can also choose a specific step to see its step trace statistics. - -The graphs at the bottom of the page show the execution time of Step Interval, Forward/Backward Propagation and Step Tail (The time between the end of Backward Propagation and the end of Parameter Update) changes according to different steps, it will help to decide whether we can optimize the performance of some stages. Here are more details: - -- **Step Interval** is the duration for reading data from data queues. If this part takes long time, it is advised to check the data processing for further analysis. -- **Forward and Backward Propagation** is the duration for executing the forward and backward operations on the network, which handle the main calculation work of a step. If this part takes long time, it is advised to check the statistics of operators or timeline for further analysis. -- **Step Tail** is the duration for performing parameter aggregation and update operations in parallel training. If the operation takes long time, it is advised to check the statistics of communication operators and the status of parallelism. - -In order to divide the stages, the Step Trace Component need to figure out the forward propagation start operator and the backward propagation end operator. MindSpore will automatically figure out the two operators to reduce the profiler configuration work. The first operator after `get_next` will be selected as the forward start operator and the operator before the last all reduce will be selected as the backward end operator. -**However, Profiler do not guarantee that the automatically selected operators will meet the user's expectation in all cases.** Users can set the two operators manually as follows: - -- Set environment variable `PROFILING_FP_START` to configure the forward start operator, for example, `export PROFILING_FP_START=fp32_vars/conv2d/BatchNorm`. -- Set environment variable `PROFILING_BP_END` to configure the backward end operator, for example, `export PROFILING_BP_END=loss_scale/gradients/AddN_70`. - -### Operator Performance Analysis - -The operator performance analysis component is used to display the execution time of the operators(AICORE/AICPU/HOSTCPU) during MindSpore run. - -![op_type_statistics.png](./images/op_type_statistics.PNG) - -Figure 3: Statistics for Operator Types - -Figure 3 displays the statistics for the operator types, including: - -- Choose pie or bar graph to show the proportion time occupied by each operator type. The time of one operator type is calculated by accumulating the execution time of operators belonging to this type. -- Display top 20 operator types with the longest execution time, show the proportion and execution time (ms) of each operator type. - -![op_statistics.png](./images/op_statistics.PNG) - -Figure 4: Statistics for Operators - -Figure 4 displays the statistics table for the operators, including: - -- Choose All: Display statistics for the operators, including operator name, type, execution time, full scope time, information, etc. The table will be sorted by execution time by default. -- Choose Type: Display statistics for the operator types, including operator type name, execution time, execution frequency and proportion of total time. Users can click on each line, querying for all the operators belonging to this type. -- Search: There is a search box on the right, which can support fuzzy search for operators/operator types. - -### Data Preparation Performance Analysis - -The Data preparation performance analysis component is used to analyse the execution of data input pipeline for the training. The data input pipeline can be divided into three stages: -the data process pipeline, data transfer from host to device and data fetch on device. The component will analyse the performance of each stage in detail and display the results. - -![minddata_profile.png](./images/minddata_profile.png) - -Figure 5: Data Preparation Performance Analysis - -Figure 5 displays the page of data preparation performance analysis component. It consists of two tabs: the step gap and the data process. - -The step gap page is used to analyse whether there is performance bottleneck in the three stages. We can get our conclusion from the data queue graphs: - -- The data queue size stands for the queue length when the training fetches data from the queue on the device. If the data queue size is 0, the training will wait until there is data in the queue; If the data queue size is greater than 0, the training can get data very quickly, and it means data preparation stage is not the bottleneck for this training step. -- The host queue size can be used to infer the speed of data process and data transfer. If the host queue size is 0, it means we need to speed up the data process stage. -- If the size of the host queue is always large and the size of the data queue is continuously small, there may be a performance bottleneck in data transfer. - -![data_op_profile.png](./images/data_op_profile.png) - -Figure 6: Data Process Pipeline Analysis - -Figure 6 displays the page of data process pipeline analysis. The data queues are used to exchange data between the data processing operators. The data size of the queues reflect the data consume speed of the operators, and can be used to infer the bottleneck operator. The queue usage percentage stands for the average value of data size in queue divide data queue maximum size, the higher the usage percentage, the more data that is accumulated in the queue. The graph at the bottom of the page shows the data processing pipeline operators with the data queues, the user can click one queue to see how the data size changes according to the time, and the operators connected to the queue. The data process pipeline can be analysed as follows: - -- When the input queue usage percentage of one operator is high, and the output queue usage percentage is low, the operator may be the bottleneck. -- For the leftmost operator, if the usage percentage of all the queues on the right are low, the operator may be the bottleneck. -- For the rightmost operator, if the usage percentage of all the queues on the left are high, the operator may be the bottleneck. - -To optimize the performance of data processing operators, there are some suggestions: - -- If the Dataset Operator is the bottleneck, try to increase the `num_parallel_workers`. -- If a GeneratorOp type operator is the bottleneck, try to increase the `num_parallel_workers` and replace the operator to `MindRecordDataset`. -- If a MapOp type operator is the bottleneck, try to increase the `num_parallel_workers`. If it is a python operator, try to optimize the training script. -- If a BatchOp type operator is the bottleneck, try to adjust the size of `prefetch_size`. - -### Timeline Analysis - -The Timeline component can display: - -- The operators (AICORE/AICPU/HOSTCPU operators) are executed on which device. -- The MindSpore stream split strategy for this neural network. -- The execution sequence and execution time of the operator on the device. -- The step number of training (Currently dynamic shape scene, multi-graph scene and heterogeneous training scene are not supported, steps data may be inaccurate in these scene.). -- `Scope Name` of the operator, the number of each operator's `Scope Name` could be selected and download corresponding timeline file. For example, the full name of one operator is `Default/network/lenet5/Conv2D-op11`, thus the first `Scope Name` of this operator is `Default`, the second `Scope Name` is `network`. If two `Scope Name` for each operator is selected, then the `Default` and `network` will be displayed. - -Users can get the most detailed information from the Timeline: - -- From the High level, users can analyse whether the stream split strategy can be optimized and whether the step tail is too long. -- From the Low level, users can analyse the execution time for all the operators, etc. - -Users can click the download button on the overall performance page to view Timeline details. The Timeline data file (json format) will be stored on local machine, and can be displayed by tools. We suggest to use `chrome://tracing` or [Perfetto](https://ui.perfetto.dev/#!viewer) to visualize the Timeline. - -- Chrome tracing: Click "load" on the upper left to load the file. -- Perfetto: Click "Open trace file" on the left to load the file. - -![timeline.png](./images/timeline.png) - -Figure 10: Timeline Analysis - -The Timeline consists of the following parts: - -- Device and Stream List: It will show the stream list on each device. Each stream consists of a series of tasks. One rectangle stands for one task, and the area stands for the execution time of the task. -- The Operator Information: When we click one task, the corresponding operator of this task will be shown at the bottom. - -W/A/S/D can be applied to zoom in and out of the Timeline graph. - -## Resource Utilization - -Resource utilization includes cpu usage analysis and memory usage analysis. - -![resource_visibility.png](./images/resource_visibility.png) - -Figure 11:Overview of resource utilization - -Overview of resource utilization:Including CPU utilization analysis and memory usage analysis. You can view the details by clicking the View Details button in the upper right corner. - -### CPU Utilization Analysis - -CPU utilization, which is mainly used to assist performance debugging. After the performance bottleneck is determined according to the queue size, the performance can be debugged according to the CPU utilization (if the user utilization is too low, increase the number of threads; if the system utilization is too high, decrease the number of threads). -CPU utilization includes CPU utilization of the whole machine, process and Data pipeline operator. - -![device_utilization.png](./images/device_cpu_utilization.png) - -Figure 7: CPU utilization of the whole machine - -CPU utilization of the whole machine: Show the overall CPU usage of the device in the training process, including user utilization, system utilization, idle utilization, IO utilization, current number of active processes, and context switching times. If the user utilization is low, you can try to increase the number of operator threads to increase the CPU utilization; if the system utilization is high, and the number of context switching and CPU waiting for processing is large, it indicates that the number of threads needs to be reduced accordingly. - -![process_cpu_utilization.png](./images/process_cpu_utilizaton.png) - -Figure 8: Process utilization - -Process utilization: Show the CPU usage of a single process. The combination of whole machine utilization and process utilization can determine whether other processes affect the training process. - -![data_op_utilization.png](./images/data_op_utilization.png) - -Figure 9: Operator utilization - -Operator utilization: Show the CPU utilization of Data pipeline single operator. We can adjust the number of threads of the corresponding operator according to the actual situation. If the number of threads is small and takes up a lot of CPU, you can consider whether you need to optimize the code. - -Common scenarios of CPU utilization: - -- According to the queue size, the network debugging personnel can judge that the performance of MindData has a bottleneck. They can adjust the number of threads by combining the utilization rate of the whole machine and the utilization rate of the operator. -- Developers can check the utilization of operators. If an operator consumes CPU utilization, they can confirm whether the code needs to be optimized. - -### Memory Analysis - -This page is used to show the memory usage of the neural network model on the **device**, which is an **ideal prediction** based on the theoretical calculation results. The content of the page includes: - -- An overview of the memory usage of the model, including the total available memory, peak memory and other information. -- The memory occupied varies in the execution order while the model is running. -- The memory usage of each operator is decomposed and displayed in the table of ```Operator Memory Allocation```. - -> Memory Analysis does not support heterogeneous training currently. - -![memory.png](./images/memory.png) - -Figure 8:Memory Analysis - -Users can obtain the summary of memory usage via the ```Memory Allocation Overview```. In addition, they can obtain more detailed information from ```Memory Usage```, including: - -- **Line Chart**: Changes in model memory usage, including static memory, total occupied memory and total available memory. -- **Zooming**: There is a zoom scroll bar under the line chart. Users can zoom in or out the line chart by adjusting its size to observe more details. -- **FP/BP**: The execution positions of the start of ```Forward Propagation``` and the end of ```Backward Propagation``` of the model on the line chart. -- **Details of Nodes**: Hovering over the line chart, the information of the corresponding execution operator is shown, including the execution order of the operator, the name of the operator, the memory occupied by the operator, the total memory occupied by the model in the current position, and the relative memory change compared with the previous execution position. -- **Memory Decomposition**: Left clicking a position on the line chart, the memory breakdowns of the execution position is shown in the table below the line chart, called ```Operator Memory Allocation```. The table shows the memory decomposition of the corresponding execution position, i.e., the output tensor of which operators are allocated the occupied memory of the current execution position. The module provides users with abundant information, including tensor name, tensor size, tensor type, data type, shape, format, and the active lifetime of tensor memory. - -![memory_graphics.png](./images/memory_graphics.png) - -Figure 9:Memory Statistics - -## Specifications - -- To limit the data size generated by the Profiler, MindInsight suggests that for large neural network, the profiled steps should be less than 10. - - > The number of steps can be controlled by controlling the size of training data set. For example, the `num_samples` parameter in `mindspore.dataset.MindDataset` can control the size of the data set. For details, please refer to: - > - -- The parse of Timeline data is time consuming, and usually the data of a few steps is enough to analyze the results. In order to speed up the data parse and UI display, Profiler will show at most 20M data (Contain 10+ step information for large networks). - -## Notices - -- Currently running in PyNative mode is not supported. -- Currently the training and inference process does not support performance debugging, only individual training or inference is supported. \ No newline at end of file diff --git a/tutorials/training/source_en/advanced_use/performance_profiling_ascend_of_cluster.md b/tutorials/training/source_en/advanced_use/performance_profiling_ascend_of_cluster.md deleted file mode 100644 index eba5949236a6fa847ba79f4ccacf7d4cbcc87220..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/performance_profiling_ascend_of_cluster.md +++ /dev/null @@ -1,239 +0,0 @@ -# Cluster Performance Profiling (Ascend) - -`Linux` `Ascend` `Model Optimization` `Intermediate` `Expert` - - - -- [Cluster Performance Profiling (Ascend)](#cluster-performance-profiling-ascend) - - [Overview](#overview) - - [Operation Process](#operation-process) - - [Distributed Training](#distributed-training) - - [Collect Cluster Performance Data](#distributed-training) - - [Launch MindInsight](#launch-mindinsight) - - [Performance Analysis](#performance-analysis) - - [Cluster Step Trace Analysis](#cluster-step-trace-analysis) - - [Specifications](#specifications) - - [Notices](#notices) - - - - - -## Overview - -This article describes how to use MindSpore Profiler for cluster performance debugging on Ascend AI processors. - -## Operation Process - -- Set up the distributed training environment, prepare a training script, add profiler APIs in the training script and run the training script. -- Collect Cluster Performance Data. -- Start MindInsight and specify the summary-base-dir using startup parameters, note that summary-base-dir is the parent directory of the directory created by Profiler. For example, the directory created by Profiler is `/home/user/code/data/`, the summary-base-dir should be `/home/user/code`. After MindInsight is started, access the visualization page based on the IP address and port number. The default access IP address is `http://127.0.0.1:8080`. -- Find the cluster training in the list, click the cluster performance profiling link and view the data on the web page. - -## Distributed Training - -For distributed training, please refer to [Distributed Training](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html). - -## Collect Cluster Performance Data - -In multi-server and multi-device training, after the cluster training, the performance data is distributed in each host node. To analyze the cluster performance, we need to collect the performance data of all host nodes to one host for analysis. Considering the complexity of the cluster running environment and the related permissions and login problems, a more reasonable way is to let users collect cluster performance data. The following is the process of using a script to collect performance data after a distributed cluster training. Users can refer to this script to collect cluster performance data. - -Script program description: the script program first creates the cluster job folder, and then uses the SSHPass technology for non interactive remote copy (to avoid manual authentication, manually enter the password), copies the data of each host node in the cluster to the cluster job folder. At the same time, the script program generates the host IP address mapping table and copies the networking information file of the multi-device environment to the cluster job file. - -```bash -#!/bin/bash - -echo "==============================================================================================================" -echo "Please run the script as: " -echo "bash collect_cluster_profiler_data.sh" -echo "for example: bash collect_cluster_profiler_data.sh cluster_hccl_config_path cluster_account_config_path cluster_train_id host_train_id device_regex output"s -echo "==============================================================================================================" - -SSH="ssh -o StrictHostKeyChecking=no" -SCP="scp -o StrictHostKeyChecking=no" - -# Get the node list in the cluster. -get_cluster_list() -{ - local cluster_config=$1 - cat ${cluster_config} | python3 -c 'import sys,json;[print(node) for node in json.load(sys.stdin)["cluster"].keys()]' -} - -# Get the account number of node. -get_node_user() -{ - local cluster_config=$1 - local node=$2 - cat ${cluster_config} | python3 -c 'import sys,json;print(json.load(sys.stdin)["cluster"]['\"${node}\"']["user"])' -} - -# Get the password of node. -get_node_passwd() -{ - local cluster_config=$1 - local node=$2 - cat ${cluster_config} | python3 -c 'import sys,json;print(json.load(sys.stdin)["cluster"]['\"${node}\"']["passwd"])' -} - -# Copy the data from remote node to the local node. -rscp_pass() -{ - local node="$1" - local user="$2" - local passwd="$3" - local src="$4" - local target="$5" - sshpass -p "${passwd}" ${SCP} -r "${user}"@"${node}":"${src}" "${target}" -} - -cluster_hccl_config_path=$1 -cluster_account_config_path=$2s -cluster_train_id=$3 -host_train_id=$4 -device_regex=$5 -output=$6 -host_ip_mapping_file='host_ips_mapping.txt' -host_ip_mapping_id=1 -node_list=$(get_cluster_list ${cluster_account_config_path}) -echo "-----begin----" - -if [ ! -d "${cluster_train_id}" ]; then -mkdir -p ${cluster_train_id} -fi - -# Copy the networking information file of multi-device environment to the cluster directory. -cp $cluster_hccl_config_paht $cluster_train_id - -for node in ${node_list} -do - user=$(get_node_user ${cluster_account_config_path} ${node}) - passwd=$(get_node_passwd ${cluster_account_config_path} ${node}) - echo "------------------${user}@${node}---------------------" - target_dir=${cluster_train_id}/cluster_profiler/${host_ip_mapping_id}/profiler/ - if [ ! -d "${target_dir}" ]; then - mkdir -p ${target_dir} - fi - - # Eight-device data - for((i=0;i<8;i++)); - do - src_dir=${host_train_id}/${device_regex}${i}/${output}*/profiler*/*.* - $(rscp_pass ${node} ${user} ${passwd} "${src_dir}" ${target_dir}) - done - - # Save the mapping information to the host_ips_mapping.txt. - echo "$node $host_ip_mapping_id">>${cluster_train_id}/$host_ip_mapping_file - - # host_ip_mapping_id ++ - host_ip_mapping_id=$((${host_ip_mapping_id}+1)) -done -``` - -Script Parameter Description: - -- `cluster_hccl_config_path` Network information file path in the multi-device environment. The content format is as follows: - - ```json - { - "version": "1.0", - "server_count": "1", - "server_list": [ - { - "server_id": "10.xxx.xxx.1", - "device": [ - {"device_id": "0","device_ip": "192.1.27.6","rank_id": "0"}, - {"device_id": "1","device_ip": "192.2.27.6","rank_id": "1"}, - {"device_id": "2","device_ip": "192.3.27.6","rank_id": "2"}, - {"device_id": "3","device_ip": "192.4.27.6","rank_id": "3"}, - {"device_id": "4","device_ip": "192.1.27.7","rank_id": "4"}, - {"device_id": "5","device_ip": "192.2.27.7","rank_id": "5"}, - {"device_id": "6","device_ip": "192.3.27.7","rank_id": "6"}, - {"device_id": "7","device_ip": "192.4.27.7","rank_id": "7"}], - "host_nic_ip": "reserve" - } - ], - "status": "completed" - } - ``` - -- `cluster_account_config_path` Host node account password configuration file path, The content format is as follows: - - ```json - { - "rank_size": 16, - "cluster": { - "10.xxx.xxx.1": { - "user": "root", - "passwd": "xxx" - }, - "10.xxx.xxx.2": { - "user": "root", - "passwd": "xxx" - } - } - } - ``` - -- `cluster_train_id` The path to save the performance data of the cluster profiler. For example, `/home/summary/run1` and `/home/data/Run2`, where `run1` and `run2` respectively save the jobs of two cluster training. -- `host_train_id` During cluster training, each host node stores the path of profiler performance data. For example:`/home/summary/`. -- `device_regex` The name of the folder where the performance data of the profiler is stored on different devices in each host node. For example:`/home/summary/device0` and `/home/summary/device1`, which are the folders corresponding to device 0 and device 1. At this time, device_regex is device. -- `output` The path to save the profiler performance file set by the user in the training script, the default is `./data`. - -> The collected cluster performance jobs need to conform to the directory structure, otherwise, they cannot be visualized with MindInsight. It must contain the networking information file (the file name is optional) and host_ips_mapping.txt File (file name and suffix are unique). - -The directory structure of cluster performance folder collected by script is as follows: - -```text -|-- run - |-- hccl.json - |-- host_ips_mapping.txt - |-- cluster_profiler - |-- 1 - | |-- profiler - | |-- step_trace_raw_0_detail_time.csv -``` - -Cluster performance folder structure description: - -- `hccl.json` It is the networking information file of the current multi-device environment. It records the correspondence between host_ip and device_id and rank_id. -- `host_ips_mapping.txt` For host_ip mapping file. From the security point of view, the real host ip needs to be mapped to avoid exposing the real host value, leading to security risks. A host_ip mapping table is maintained here. One line of content in the file represents a set of mappings. For example, 10.xxx.xxx.1 1 means that the mapping value of 10.xxx.xxx.1 is 1. -- `cluster_profiler` It is the label of cluster training job, which is used to judge whether the training job belongs to cluster training job. -- `1` Save the performance data of the host node profiler, and it is the single-server multi device Profiler Data. The `cluster_profiler` folder contains performance data for all host nodes in the cluster. - -## Launch MindInsight - -The MindInsight launch command can refer to [MindInsight Commands](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/mindinsight_commands.html). - -### Cluster Performance Analysis - -Users can access the Performance Profiler by selecting a specific training from the training list, and click the performance profiling link. Cluster performance analysis includes cluster iteration trajectory analysis. - -#### Cluster iteration trajectory analysis - -Using the cluster iterative trajectory analysis component, we can find out the slow host and slow device in cluster training. Cluster iteration trajectory analysis component shows the iteration information of all devices, including iteration gap, forward and backward, iteration trailing, and supports sorting operation. The iteration gap reflects the speed of the data processing stage, and the iteration gap time of the device can reflect the speed of the corresponding host processing data. The forward and backward time of the device reflects the computing power of the device. Iterative tailing reflects all_reduce time and parallelism. - -![cluster_iterative_trajectory.png](./images/cluster_iterative_trajectory.png) - -Figure 1: Cluster iteration trajectory analysis - -Figure 1 shows the cluster iteration trajectory analysis page. By default, it shows the average performance of the device. It supports querying the iteration trajectory information of the device under a specific step. By clicking the details link in the single device, you can also jump to the detailed performance display page of the single device to query the detailed performance data of the single device. - -![single_car_performance_overall.png](./images/single_car_performance_overall.png) - -Figure 2: Single device details - -Figure 2 shows the performance information of a single device in the cluster. Please refer to [single device performance information](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/performance_profiling_ascend.html) for the performance information of a single device. - -## Specifications - -- To limit the data size generated by the Profiler, MindInsight suggests that for large neural networks, the profiled steps should be less than 10. - - > The number of steps can be controlled by controlling the size of training data set. For example, the `num_samples` parameter in `mindspore.dataset.MindDataset` can control the size of the data set. For details, please refer to: - > - -- The parse of Timeline data is time consuming, and usually the data of a few steps is enough to analyze the results. In order to speed up the data parse and UI display, Profiler will show at most 20M data (Contain 10+ step information for large networks). - -## Notices - -- Currently running in PyNative mode is not supported. -- Currently the training and inference process does not support performance debugging, only individual training or inference is supported. diff --git a/tutorials/training/source_en/advanced_use/performance_profiling_gpu.md b/tutorials/training/source_en/advanced_use/performance_profiling_gpu.md deleted file mode 100644 index 9fb1ba564e1a09f274314374f0520f20ed1d3bb3..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/performance_profiling_gpu.md +++ /dev/null @@ -1,182 +0,0 @@ -# Performance Profiling (GPU) - -`Linux` `GPU` `Model Optimization` `Intermediate` `Expert` - - - -- [Performance Profiling (GPU)](#performance-profiling-gpu) - - [Overview](#overview) - - [Operation Process](#operation-process) - - [Preparing the Training Script](#preparing-the-training-script) - - [Launching MindInsight](#launching-mindinsight) - - [Training Performance](#training-performance) - - [Operator Performance Analysis](#operator-performance-analysis) - - [Timeline Analysis](#timeline-analysis) - - [Step Trace Analysis](#step-trace-analysis) - - [Data Preparation Analysis](#data-preparation-analysis) - - [Resource Utilization](#resource-utilization) - - [CPU Utilization Analysis](#cpu-utilization-analysis) - - [Notices](#notices) - - - - - -## Overview - -This article describes how to use MindSpore Profiler for performance debugging on GPU. - -## Operation Process - -- Prepare a training script, add profiler APIs in the training script and run the training script. -- Start MindInsight and specify the summary-base-dir using startup parameters, note that summary-base-dir is the parent directory of the directory created by Profiler. For example, the directory created by Profiler is `/home/user/code/data/`, the summary-base-dir should be `/home/user/code`. After MindInsight is started, access the visualization page based on the IP address and port number. The default access IP address is `http://127.0.0.1:8080`. -- Find the training in the list, click the performance profiling link and view the data on the web page. - -> By default, common users do not have the permission to access the NVIDIA GPU performance counters on the target device. -> -> If common users need to use the profiler performance statistics capability in the training script, configure the permission by referring to the following description: -> -> - -## Preparing the Training Script - -To enable the performance profiling of neural networks, MindSpore Profiler APIs should be added into the script. - -- The MindSpore `Profiler` object needs to be initialized after `set_context` is set. - - > In multi-card training scene, `Profiler` object needs to be initialized after `set_auto_parallel_context`. - > - > Only the output_path in parameters is working in GPU now. - -- At the end of the training, `Profiler.analyse` should be called to finish profiling and generate the performance analysis results. - -The sample code is the same as that in the Ascend chip: . - -In GPU scenarios, users can customize the callback mode to collect performance data. Data preparation stage and data sinking mode do not support this mode. - -The following is the example: - -```python -class StopAtStep(Callback): - def __init__(self, start_step, stop_step): - super(StopAtStep, self).__init__() - self.start_step = start_step - self.stop_step = stop_step - self.already_analysed = False - - def step_begin(self, run_context): - cb_params = run_context.original_args() - step_num = cb_params.cur_step_num - if step_num == self.start_step: - self.profiler = Profiler() - - def step_end(self, run_context): - cb_params = run_context.original_args() - step_num = cb_params.cur_step_num - if step_num == self.stop_step and not self.already_analysed: - self.profiler.analyse() - self.already_analysed = True - - def end(self, run_context): - if not self.already_analysed: - self.profiler.analyse() -``` - -The code above is just an example. Users should implement callback by themselves. - -## Launching MindInsight - -The MindInsight launch command can refer to [MindInsight Commands](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/mindinsight_commands.html). - -## Training Performance - -Users can access the Training Performance by selecting a specific training from the training list, and click the performance profiling link. And the Training Performance only supports operation analysis, Timeline Analysis, Step Trace Analysis and Data Preparation Analysis now, other modules will be published soon. - -![performance_overall.png](./images/performance_overall.png) - -Figure 1: Overall Performance - -Figure 1 displays the overall performance of the training, including the overall data of Step Trace, Operator Performance, Data Preparation Performance and Timeline: - -- Operator Performance: It will collect the average execution time of operators and operator types. The overall performance page will show the pie graph for different operator types. -- Timeline: It will collect execution time for operations and CUDA activity. The tasks will be shown on the time axis. The overall performance page will show the statistics for tasks. -- Step Trace: It will divide the training steps into several stages and collect execution time for each stage. The overall performance page will show the step trace graph. -- Data Preparation Performance: It will analyse the performance of the data input stages. The overall performance page will show the number of steps that may be the bottleneck for these stages. - -Users can click the detail link to see the details of each components. - -### Operator Performance Analysis - -The operator performance analysis component is used to display the execution time of the operators when running MindSpore(include GPU operator,CUDA kernel,HOSTCPU operator). - -![gpu_op_ui_profiler.png](./images/gpu_op_ui_profiler.png) - -Figure 2: Statistics for Operator Types - -Figure 2 displays the statistics for the operator types, including: - -- Choose a pie or a bar graph to show the proportion time occupied by each operator type. The time of one operator type is calculated by accumulating the execution time of operators belong to this type. -- Display top 20 operator types with the longest average execution time, show the proportion of total time and average execution time (ms) of each operator type. - -The bottom half of Figure 2 displays the statistics table for the operators' details, including: - -- Choose All: Display statistics for the operators, including operator position information, type, execution time, full scope time, etc. The table will be sorted by average execution time by default. -- Choose Type: Display statistics for the operator types, including operator type name, execution time, execution frequency and proportion of total time, average execution time. Users can click on each line to query for all the operators belong to this type. -- Search: There is a search box on the right, which supports fuzzy search for operators/operator types. - -![gpu_activity_profiler.png](./images/gpu_activity_profiler.png) - -Figure 3: Statistics for Kernel Activities - -Figure 3 displays the statistics for the Kernel, including: - -- A pie graph to show the proportion time occupied by each kernel activity and the execution time of each kernel activity. -- The statistical table's column includes activity name, operation name, execution frequency, total time and average time. -- The search box on the right, which supports fuzzy search for the activity name/operator full name. - -### Timeline Analysis - -The usage is almost the same as that in Ascend. The difference is GPU Timeline displays the operation information and CUDA activity. - -The usage is described as follows: - - - -### Step Trace Analysis - -The usage is almost the same as that in Ascend. (**Note that step trace do not support heterogeneous training scene.**) - -The usage is described as follows: - - - -### Data Preparation Analysis - -The usage is almost the same as that in Ascend. - -The usage is described as follows: - - - -## Resource Utilization - -Resource utilization includes cpu usage analysis. - -![resource_visibility_gpu.png](./images/resource_visibility_gpu.png) - -Figure 4:Overview of resource utilization - -Overview of resource utilization:Including CPU utilization analysis. You can view the details by clicking the View Details button in the upper right corner. - -### CPU Utilization Analysis - -The usage is almost the same as that in Ascend. - -The usage is described as follows: - - - -## Notices - -- Currently running in PyNative mode is not supported. -- Currently the training and inference process does not support performance debugging, only individual training or inference is supported. diff --git a/tutorials/training/source_en/advanced_use/protect_user_privacy_with_differential_privacy.md b/tutorials/training/source_en/advanced_use/protect_user_privacy_with_differential_privacy.md deleted file mode 100644 index 992337d62472bc9fb22733064528ef5d8c35f7b6..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/protect_user_privacy_with_differential_privacy.md +++ /dev/null @@ -1,351 +0,0 @@ -# Protecting User Privacy with Differential Privacy Mechanism - -`Linux` `Ascend` `Model Training` `Model Optimization` `Enterprise` `Expert` - - - -- [Protecting User Privacy with Differential Privacy Mechanism](#protecting-user-privacy-with-differential-privacy-mechanism) - - [Overview](#overview) - - [Implementation](#implementation) - - [Importing Library Files](#importing-library-files) - - [Configuring Parameters](#configuring-parameters) - - [Preprocessing the Dataset](#preprocessing-the-dataset) - - [Creating the Model](#creating-the-model) - - [Introducing the Differential Privacy](#introducing-the-differential-privacy) - - [References](#references) - - - - - -## Overview - -Differential privacy is a mechanism for protecting user data privacy. What is privacy? Privacy refers to the attributes of individual users. Common attributes shared by a group of users may not be considered as privacy. For example, if we say "smoking people have a higher probability of getting lung cancer", it does not disclose privacy. However, if we say "Zhang San smokes and gets lung cancer", it discloses the privacy of Zhang San. Assume that there are 100 patients in a hospital and 10 of them have lung cancer. If the information of any 99 patients are known, we can infer whether the remaining one has lung cancer. This behavior of stealing privacy is called differential attack. Differential privacy is a method for preventing differential attacks. By adding noise, the query results of two datasets with only one different record are nearly indistinguishable. In the above example, after differential privacy is used, the statistic information of the 100 patients achieved by the attacker is almost the same as that of the 99 patients. Therefore, the attacker can hardly infer the information of the remaining one patient. - -**Differential privacy in machine learning:** - -Machine learning algorithms usually update model parameters and learn data features based on a large amount of data. Ideally, these models can learn the common features of a class of entities and achieve good generalization, such as "smoking patients are more likely to get lung cancer" rather than models with individual features, such as "Zhang San is a smoker who gets lung cancer." However, machine learning algorithms do not distinguish between general and individual features. The published machine learning models, especially the deep neural networks, may unintentionally memorize and expose the features of individual entities in training data. This can be exploited by malicious attackers to reveal Zhang San's privacy information from the published model. Therefore, it is necessary to use differential privacy to protect machine learning models from privacy leakage. - -**Differential privacy definition** [1] - -$Pr[\mathcal{K}(D)\in S] \le e^{\epsilon} Pr[\mathcal{K}(D') \in S]+\delta$ - -For datasets $D$ and $D'$ that differ on only one record, the probability of obtaining the same result from $\mathcal{K}(D)$ and $\mathcal{K}(D')$ by using a randomized algorithm $\mathcal{K}$ must meet the preceding formula. $\epsilon$ indicates the differential privacy budget and $\delta$ indicates the perturbation. The smaller the values of $\epsilon$ and $\delta$, the closer the data distribution output by $\mathcal{K}$ on $D$ and $D'$. - -**Differential privacy measurement:** - -Differential privacy can be measured using $\epsilon$ and $\delta$. - -- $\epsilon$: specifies the upper limit of the output probability that can be changed when a record is added to or deleted from the dataset. We usually hope that $\epsilon$ is a small constant. A smaller value indicates stricter differential privacy conditions. -- $\delta$: limits the probability of arbitrary model behavior change. Generally, this parameter is set to a small constant. You are advised to set this parameter to a value less than the reciprocal of the size of a training dataset. - -**Differential privacy implemented by MindArmour:** - -MindArmour differential privacy module Differential-Privacy implements the differential privacy optimizer. Currently, SGD, Momentum, and Adam are supported. They are differential privacy optimizers based on the Gaussian mechanism. Gaussian noise mechanism supports both non-adaptive policy and adaptive policy The non-adaptive policy use a fixed noise parameter for each step while the adaptive policy changes the noise parameter along time or iteration step. An advantage of using the non-adaptive Gaussian noise is that a differential privacy budget $\epsilon$ can be strictly controlled. However, a disadvantage is that in a model training process, the noise amount added in each step is fixed. In the later training stage, large noise makes the model convergence difficult, and even causes the performance to decrease greatly and the model usability to be poor. Adaptive noise can solve this problem. In the initial model training stage, the amount of added noise is large. As the model converges, the amount of noise decreases gradually, and the impact of noise on model availability decreases. The disadvantage is that the differential privacy budget cannot be strictly controlled. Under the same initial value, the $\epsilon$ of the adaptive differential privacy is greater than that of the non-adaptive differential privacy. Rényi differential privacy (RDP) [2] is also provided to monitor differential privacy budgets. - -The LeNet model and MNIST dataset are used as an example to describe how to use the differential privacy optimizer to train a neural network model on MindSpore. - -> This example is for the Ascend 910 AI processor. You can download the complete sample code from . - -## Implementation - -### Importing Library Files - -The followings are the required public modules, MindSpore modules, and differential privacy feature modules. - -```python -import os -from easydict import EasyDict as edict - -import mindspore.nn as nn -from mindspore import context, load_checkpoint, load_param_into_net -from mindspore.train.callback import ModelCheckpoint -from mindspore.train.callback import CheckpointConfig -from mindspore.train.callback import LossMonitor -from mindspore.nn.metrics import Accuracy -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as CV -import mindspore.dataset.transforms.c_transforms as C -from mindspore.dataset.vision import Inter -from mindspore import dtype as mstype - -from mindarmour.privacy.diff_privacy import DPModel -from mindarmour.privacy.diff_privacy import PrivacyMonitorFactory -from mindarmour.privacy.diff_privacy import NoiseMechanismsFactory -from mindarmour.privacy.diff_privacy import ClipMechanismsFactory -from mindarmour.utils.logger import LogUtil - -LOGGER = LogUtil.get_instance() -LOGGER.set_level('INFO') -TAG = 'Lenet5_train' -``` - -### Configuring Parameters - -1. Set the running environment, dataset path, model training parameters, checkpoint storage parameters, and differential privacy parameters. Replace 'data_path' with your data path. For more configurations, see . - - ```python - cfg = edict({ - 'num_classes': 10, # the number of classes of model's output - 'lr': 0.01, # the learning rate of model's optimizer - 'momentum': 0.9, # the momentum value of model's optimizer - 'epoch_size': 10, # training epochs - 'batch_size': 256, # batch size for training - 'image_height': 32, # the height of training samples - 'image_width': 32, # the width of training samples - 'save_checkpoint_steps': 234, # the interval steps for saving checkpoint file of the model - 'keep_checkpoint_max': 10, # the maximum number of checkpoint files would be saved - 'device_target': 'Ascend', # device used - 'data_path': '../../common/dataset/MNIST', # the path of training and testing data set - 'dataset_sink_mode': False, # whether deliver all training data to device one time - 'micro_batches': 32, # the number of small batches split from an original batch - 'norm_bound': 1.0, # the clip bound of the gradients of model's training parameters - 'initial_noise_multiplier': 0.05, # the initial multiplication coefficient of the noise added to training - # parameters' gradients - 'noise_mechanisms': 'Gaussian', # the method of adding noise in gradients while training - 'clip_mechanisms': 'Gaussian', # the method of adaptive clipping gradients while training - 'clip_decay_policy': 'Linear', # Decay policy of adaptive clipping, decay_policy must be in ['Linear', 'Geometric']. - 'clip_learning_rate': 0.001, # Learning rate of update norm clip. - 'target_unclipped_quantile': 0.9, # Target quantile of norm clip. - 'fraction_stddev': 0.01, # The stddev of Gaussian normal which used in empirical_fraction. - 'optimizer': 'Momentum' # the base optimizer used for Differential privacy training - }) - ``` - -2. Configure the necessary information, including the environment information and the execution mode. - - ```python - context.set_context(mode=context.GRAPH_MODE, device_target=cfg.device_target) - ``` - - For details about the API configuration, see the `context.set_context`. - -### Preprocessing the Dataset - -Load the dataset and convert the dataset format to a MindSpore data format. - -```python -def generate_mnist_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1, sparse=True): - """ - create dataset for training or testing - """ - # define dataset - ds1 = ds.MnistDataset(data_path) - - # define operation parameters - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - - # define map operations - resize_op = CV.Resize((resize_height, resize_width), - interpolation=Inter.LINEAR) - rescale_op = CV.Rescale(rescale, shift) - hwc2chw_op = CV.HWC2CHW() - type_cast_op = C.TypeCast(mstype.int32) - - # apply map operations on images - if not sparse: - one_hot_enco = C.OneHot(10) - ds1 = ds1.map(operations=one_hot_enco, input_columns="label", - num_parallel_workers=num_parallel_workers) - type_cast_op = C.TypeCast(mstype.float32) - ds1 = ds1.map(operations=type_cast_op, input_columns="label", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=resize_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=rescale_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=hwc2chw_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - ds1 = ds1.shuffle(buffer_size=buffer_size) - ds1 = ds1.batch(batch_size, drop_remainder=True) - ds1 = ds1.repeat(repeat_size) - - return ds1 -``` - -### Creating the Model - -The LeNet model is used as an example. You can also create and train your own model. - -```python -from mindspore import nn -from mindspore.common.initializer import TruncatedNormal - - -def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): - weight = weight_variable() - return nn.Conv2d(in_channels, out_channels, - kernel_size=kernel_size, stride=stride, padding=padding, - weight_init=weight, has_bias=False, pad_mode="valid") - - -def fc_with_initialize(input_channels, out_channels): - weight = weight_variable() - bias = weight_variable() - return nn.Dense(input_channels, out_channels, weight, bias) - - -def weight_variable(): - return TruncatedNormal(0.05) - - -class LeNet5(nn.Cell): - """ - LeNet network - """ - def __init__(self): - super(LeNet5, self).__init__() - self.conv1 = conv(1, 6, 5) - self.conv2 = conv(6, 16, 5) - self.fc1 = fc_with_initialize(16*5*5, 120) - self.fc2 = fc_with_initialize(120, 84) - self.fc3 = fc_with_initialize(84, 10) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x -``` - -Load the LeNet network, define the loss function, configure the checkpoint parameters, and load data by using the `generate_mnist_dataset` function defined in the preceding information. - -```python -network = LeNet5() -net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") -config_ck = CheckpointConfig(save_checkpoint_steps=cfg.save_checkpoint_steps, - keep_checkpoint_max=cfg.keep_checkpoint_max) -ckpoint_cb = ModelCheckpoint(prefix="checkpoint_lenet", - directory='./trained_ckpt_file/', - config=config_ck) - -# get training dataset -ds_train = generate_mnist_dataset(os.path.join(cfg.data_path, "train"), - cfg.batch_size) -``` - -### Introducing the Differential Privacy - -1. Set parameters of a differential privacy optimizer. - - - Determine whether values of the `micro_batches` and `batch_size` parameters meet the requirements. The value of `batch_size` must be an integer multiple of `micro_batches`. - - Instantiate a differential privacy factory class. - - Set a noise mechanism for the differential privacy. Currently, the Gaussian noise mechanism with a fixed standard deviation (`Gaussian`) and the Gaussian noise mechanism with an adaptive standard deviation (`AdaGaussian`) are supported. - - Set an optimizer type. Currently, `SGD`, `Momentum`, and `Adam` are supported. - - Set up a differential privacy budget monitor RDP to observe changes in the differential privacy budget $\epsilon$ in each step. - - ```python - if cfg.micro_batches and cfg.batch_size % cfg.micro_batches != 0: - raise ValueError( - "Number of micro_batches should divide evenly batch_size") - # Create a factory class of DP noise mechanisms, this method is adding noise - # in gradients while training. Initial_noise_multiplier is suggested to be - # greater than 1.0, otherwise the privacy budget would be huge, which means - # that the privacy protection effect is weak. Mechanisms can be 'Gaussian' - # or 'AdaGaussian', in which noise would be decayed with 'AdaGaussian' - # mechanism while be constant with 'Gaussian' mechanism. - noise_mech = NoiseMechanismsFactory().create(cfg.noise_mechanisms, - norm_bound=cfg.norm_bound, - initial_noise_multiplier=cfg.initial_noise_multiplier, - decay_policy=None) - # Create a factory class of clip mechanisms, this method is to adaptive clip - # gradients while training, decay_policy support 'Linear' and 'Geometric', - # learning_rate is the learning rate to update clip_norm, - # target_unclipped_quantile is the target quantile of norm clip, - # fraction_stddev is the stddev of Gaussian normal which used in - # empirical_fraction, the formula is - # $empirical_fraction + N(0, fraction_stddev)$. - clip_mech = ClipMechanismsFactory().create(cfg.clip_mechanisms, - decay_policy=cfg.clip_decay_policy, - learning_rate=cfg.clip_learning_rate, - target_unclipped_quantile=cfg.target_unclipped_quantile, - fraction_stddev=cfg.fraction_stddev) - net_opt = nn.Momentum(params=network.trainable_params(), - learning_rate=cfg.lr, momentum=cfg.momentum) - # Create a monitor for DP training. The function of the monitor is to - # compute and print the privacy budget(eps and delta) while training. - rdp_monitor = PrivacyMonitorFactory.create('rdp', - num_samples=60000, - batch_size=cfg.batch_size, - initial_noise_multiplier=cfg.initial_noise_multiplier, - per_print_times=234, - noise_decay_mode=None) - ``` - -2. Pack the LeNet model as a differential privacy model by transferring the network to `DPModel`. - - ```python - # Create the DP model for training. - model = DPModel(micro_batches=cfg.micro_batches, - norm_bound=cfg.norm_bound, - noise_mech=noise_mech, - clip_mech=clip_mech, - network=network, - loss_fn=net_loss, - optimizer=net_opt, - metrics={"Accuracy": Accuracy()}) - ``` - -3. Train and test the model. - - ```python - LOGGER.info(TAG, "============== Starting Training ==============") - model.train(cfg['epoch_size'], ds_train, - callbacks=[ckpoint_cb, LossMonitor(), rdp_monitor], - dataset_sink_mode=cfg.dataset_sink_mode) - - LOGGER.info(TAG, "============== Starting Testing ==============") - ckpt_file_name = 'trained_ckpt_file/checkpoint_lenet-10_234.ckpt' - param_dict = load_checkpoint(ckpt_file_name) - load_param_into_net(network, param_dict) - ds_eval = generate_mnist_dataset(os.path.join(cfg.data_path, 'test'), - batch_size=cfg.batch_size) - acc = model.eval(ds_eval, dataset_sink_mode=False) - LOGGER.info(TAG, "============== Accuracy: %s ==============", acc) - ``` - -4. Run the following command to execute the script: - - ```bash - python lenet5_dp.py - ``` - - In the preceding command, replace `lenet5_dp.py` with the name of your script. - -5. Display the result. - - The accuracy of the LeNet model without differential privacy is 99%, and the accuracy of the LeNet model with Gaussian noise and adaptive clip differential privacy is mostly more than 95%. - - ```text - ============== Starting Training ============== - ... - ============== Starting Testing ============== - ... - ============== Accuracy: 0.9698 ============== - ``` - -### References - -[1] C. Dwork and J. Lei. Differential privacy and robust statistics. In STOC, pages 371–380. ACM, 2009. - -[2] Ilya Mironov. Rényi differential privacy. In IEEE Computer Security Foundations Symposium, 2017. - -[3] Abadi, M. e. a., 2016. *Deep learning with differential privacy.* s.l.:Proceedings of the 2016 ACM SIGSAC Conference on Computer and Communications Security. diff --git a/tutorials/training/source_en/advanced_use/protect_user_privacy_with_suppress_privacy.md b/tutorials/training/source_en/advanced_use/protect_user_privacy_with_suppress_privacy.md deleted file mode 100644 index 729ad802a4202f255e7574e4cc9d3ec4a64ebce1..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/protect_user_privacy_with_suppress_privacy.md +++ /dev/null @@ -1,5 +0,0 @@ -# Protecting User Privacy with Suppress Privacy - -No English version right now, welcome to contribute. - - diff --git a/tutorials/training/source_en/advanced_use/qnn_for_nlp.md b/tutorials/training/source_en/advanced_use/qnn_for_nlp.md deleted file mode 100644 index 2418f6f4855b2d92f02e8d7271048e509f4fa8c4..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/qnn_for_nlp.md +++ /dev/null @@ -1,5 +0,0 @@ -# Application of Quantum Neural Network in Natural Language Processing - -No English version right now, welcome to contribute. - - diff --git a/tutorials/training/source_en/advanced_use/quantum_neural_network.rst b/tutorials/training/source_en/advanced_use/quantum_neural_network.rst deleted file mode 100644 index 3b226d8087ab461ed72fb2356110c6bf4730e5f1..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/quantum_neural_network.rst +++ /dev/null @@ -1,8 +0,0 @@ -Quantum Neural Network -======================== - -.. toctree:: - :maxdepth: 1 - - parameterized_quantum_circuit - qnn_for_nlp \ No newline at end of file diff --git a/tutorials/training/source_en/advanced_use/save_load_model_hybrid_parallel.md b/tutorials/training/source_en/advanced_use/save_load_model_hybrid_parallel.md deleted file mode 100644 index 2fbe83fd03bfaaacc27eb574a2fca035dd923000..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/save_load_model_hybrid_parallel.md +++ /dev/null @@ -1,555 +0,0 @@ -# Saving and Loading Models in Hybrid Parallel Mode - -`Linux` `Ascend` `GPU` `Model Training` `Intermediate` `Expert` - - - -- [Saving and Loading Models in Hybrid Parallel Mode](#saving-and-loading-models-in-hybrid-parallel-mode) - - [Overview](#overview) - - [Background](#background) - - [Application Scenario](#application-scenario) - - [Integrating the Saved Checkpoint Files](#integrating-the-saved-checkpoint-files) - - [Overall Process](#overall-process) - - [Preparations](#preparations) - - [Importing the Checkpoint Files in rank id order](#importing-the-checkpoint-files-in-rank-id-order) - - [Obtaining a List of All Parameters on the Network](#obtaining-a-list-of-all-parameters-on-the-network) - - [Integrate the Model Parallel Parameters](#integrate-the-model-parallel-parameters) - - [Saving the Data and Generating a New Checkpoint File](#saving-the-data-and-generating-a-new-checkpoint-file) - - [Loading the Integrated and Saved Checkpoint File](#loading-the-integrated-and-saved-checkpoint-file) - - [Overall Process](#overall-process-1) - - [Step 1: Loading the Checkpoint File](#step-1-loading-the-checkpoint-file) - - [Step 2: Dividing a Model Parallel Parameter](#step-2-dividing-a-model-parallel-parameter) - - [Step 3: Loading the Modified Parameter Data to the Network](#step-3-loading-the-modified-parameter-data-to-the-network) - - [Example](#example) - - [Scenario Description](#scenario-description) - - [Example Code](#example-code) - - - - - -## Overview - -### Background - -In the MindSpore model parallel scenario, each instance process stores only the parameter data on the current node. The parameter data of a model parallel Cell on each node is a slice of the complete parameter data. For example, the complete parameter data shape is \[8, 8], and the parameter data on each node is a part of the data, for example, shape \[2, 8]. - -In the auto parallel scenario, MindSpore automatically generates the dividing strategy. The MindSpore checkpoint module supports automatic integrating, saving, and loading. - -In the hybrid parallel scenario, the dividing strategy is implemented by users. MindSpore saves the slice strategy of model, which is the same on each node, and the data corresponding to each node is stored respectively. Users need to integrate, save, and load the checkpoint files by themselves. This tutorial describes how to integrate, save, and load checkpoint files in the hybrid parallel scenario. - -### Application Scenario - -If you encounter the following scenarios, refer to this tutorial to integrate, save, and load checkpoint files: - -Scenario 1: multi-device training and single-device inference - -The following describes the overall process of training on 64 devices and inference on a single device: - -1. Execute the training to automatically generate the checkpoint files and the slice strategy files. - -2. Integrate the saved checkpoint files. - - Integrate the divided model parameters based on the specific dividing strategy to generate a new checkpoint file. - -3. Load the new checkpoint file in the single-GPU environment and call the export API to export the model for inference as required. - -If the number of GPUs in a cluster in the checkpoint saving environment is the same as that in the loading environment, for example, if the checkpoint files are saved and loaded in the same training environment or training and inference is performed on a single device, you do not need to perform integration, saving and loading. - -Scenario 2: The training is divided into multiple stages, and the cluster size in each stage is different. - -For example, in the training stage 1, the training environment with 64 devices is used, and in the training stage 2, the training environment with 56 devices is used. The overall operation process is as follows: - -1. Execute the training in stage 1 to automatically generate the checkpoint files and the slice strategy files. - -2. Integrate the saved checkpoint files. - - Integrate the divided model parameters based on the specific dividing strategy to generate a new checkpoint file. - -3. Load the checkpoint file that is integrated and saved in the stage 2 cluster. - - During the loading, you need to redivide the parameter data in the checkpoint file based on the new training environment configuration. - -4. Perform stage 2 training. - -## Integrating the Saved Checkpoint Files - -### Overall Process - -Import the checkpoint files to be integrated to the network in rank id order and obtain the list of all parameters through the API provided by MindSpore, and then obtain the slice strategy of model. See steps 1 and 2 in the following figure. - -Then, update the parameter list and integrate the model parallel parameters. See step 3 in the following figure. - -Finally, save the updated parameter list to a file through the API provided by MindSpore to generate a new checkpoint file. See step 4 in the following figure. - -![img](./images/checkpoint_integration_process.jpg) - -### Preparations - -#### Importing the Checkpoint Files in rank id order - -Define the network, call the `load_checkpoint` and `load_param_into_net` APIs to import the checkpoint files to the network in rank id order, and then call `parameters_and_names` API to obtain all parameters in this network. - -```python -net = Net() -opt = Momentum(learning_rate=0.01, momentum=0.9, params=net.get_parameters()) -net = TrainOneStepCell(net, opt) -param_dicts = [] -for i in range(rank_size): - file_name = os.path.join("./node"+str(i), "CKP_1-4_32.ckpt") # checkpoint file name of current node - param_dict = load_checkpoint(file_name) - load_param_into_net(net, param_dict) - param_dict = {} - for _, param in net.parameters_and_names(): - param_dict[param.name] = param - param_dicts.append(param_dict) -``` - -In the preceding information: - -- `rank_size`: number of nodes in previous distributed training. -- `load_checkpoint`: loads the checkpoint model parameter file and returns a parameter dictionary. -- `load_param_into_net`: loads model parameter data to the network. - -#### Obtaining a List of All Parameters on the Network - -Call the `build_searched_strategy` API to obtain the slice strategy of model. - -```python -strategy = build_searched_strategy("./strategy_train.ckpt") -``` - -In the preceding information: - -- `strategy_train.ckpt`: name of model slice strategy, set by users calling `set_auto_parallel_context` API and customizing `strategy_ckpt_save_file` parameter before training network. - -### Integrate the Model Parallel Parameters - -The following uses a model parameter as an example to describe a specific integration process. - -The parameter name is weight and the dividing strategy is to perform dividing in a 4-device scenario. - -1. Obtain the data value on all nodes for model parallel parameters. - - ```python - sliced_parameters = [] - for i in range(4): - parameter = param_dicts[i].get("weight") - sliced_parameters.append(parameter) - ``` - - > To ensure that the parameter update speed remains unchanged, you need to integrate the parameters saved in the optimizer, for example, moments.weight. - -2. Call the `merge_sliced_parameter` API to merge the sliced parameters. - - ```python - merged_parameter = merge_sliced_parameter(sliced_parameters, strategy) - ``` - -> If there are multiple model parallel parameters, repeat steps 1 to 2 to process them one by one. - -### Saving the Data and Generating a New Checkpoint File - -1. Convert `param_dict` to `param_list`. - - ```python - param_list = [] - for (key, value) in param_dict.items(): - each_param = {} - each_param["name"] = key - if isinstance(value.data, Tensor): - param_data = value.data - else: - param_data = Tensor(value.data) - each_param["data"] = param_data - param_list.append(each_param) - ``` - -2. Call the `save_checkpoint` API to write the parameter data to a file and generate a new checkpoint file. - - ```python - save_checkpoint(param_list, "./CKP-Integrated_1-4_32.ckpt") - ``` - - In the preceding information: - - - `save_checkpoint`: saves network model parameters to a file. - - `CKP-Integrated_1-4_32.ckpt`: name of the generated checkpoint model parameter file. - -## Loading the Integrated and Saved Checkpoint File - -### Overall Process - -If you need to load the integrated and saved checkpoint file to multi-device training or inference, divide the parallel parameter data based on the new strategy before loading the model parameters to the network. The following steps are implemented in the pre-training script. Steps 1 and 3 are the same as the strategy of checkpoint loading in a single-node system. Step 2 is added to divide model parallel parameters. In the single-device training/inference scenario, data dividing is not involved. In this case, step 2 can be skipped. - -### Step 1: Loading the Checkpoint File - -Call the `load_checkpoint` API to load model parameter data from the checkpoint file. - -```python -param_dict = load_checkpoint("./CKP-Integrated_1-4_32.ckpt") -``` - -- `load_checkpoint`: loads the checkpoint model parameter file and returns a parameter dictionary. -- `CKP-Integrated_1-4_32.ckpt`: name of the checkpoint model parameter file to be loaded. - -### Step 2: Dividing a Model Parallel Parameter - -The following uses a specific model parameter as an example. The parameter name is weight, the data value is Tensor \[\[1, 2, 3, 4], \[5, 6, 7, 8]], and the dividing strategy is to perform dividing in the two-device scenario based on \[2, 1]. Data distribution after dividing is as follows: - -| Device0 | Device1 | -|--------------------|---------------------| -| Value [1, 2, 3, 4] | Value \[5, 6, 7, 8] | - -1. Divide the model parameter data. - - In the following code example, data is divided into two slices in dimension 0. - - ```python - new_param = parameter_dict["weight"] - slice_list = np.split(new_param.data.asnumpy(), 2, axis=0) - new_param_moments = parameter_dict["moments.weight"] - slice_moments_list = np.split(new_param_moments.data.asnumpy(), 2, axis=0) - ``` - - Data after dividing: - - ```text - slice_list[0] --- [1, 2, 3, 4] Corresponding to device0 - slice_list[1] --- [5, 6, 7, 8] Corresponding to device1 - ``` - - Similar to slice\_list, slice\_moments\_list is divided into two tensors with the shape of \[1, 4]. - -2. Load the corresponding data slice on each node. - - Obtain rank\_id of the current node and load data based on rank\_id. - - ```python - rank = get_rank() - tensor_slice = Tensor(slice_list[rank]) - tensor_slice_moments = Tensor(slice_moments_list[rank]) - ``` - - - `get_rank`: obtains the ID of the current device in the cluster. - -3. Modify values of model parameters. - - ```python - new_param.set_data(tensor_slice, True) - new_param_moments.set_data(tensor_slice_moments, True) - ``` - - - `set_data`: sets the value of a model parameter. The API parameter type is Tensor or number. - -### Step 3: Loading the Modified Parameter Data to the Network - -Call the `load_param_into_net` API to load the model parameter data to the network. - -```python -net = Net() -opt = Momentum(learning_rate=0.01, momentum=0.9, params=parallel_net.get_parameters()) -load_param_into_net(net, param_dict) -load_param_into_net(opt, param_dict) -``` - -## Example - -### Scenario Description - -Overall scenario: The training is divided into two stages. The cluster scales in the two stages are different. The MatMul operator at the FC layer is simulated to run in parallel. - -User process: - -1. Execute stage 1 training. There are four devices in stage 1 training environment. The weight shape of the MatMul operator on each device is \[2, 8]. Checkpoint files are automatically exported during the training. - -2. Execute the script to integrate checkpoint files. Based on the specific dividing strategy, integrate the divided model parameters to generate the integrated checkpoint file. - -3. Execute stage 2 training: There are two devices in stage 2 training environment. The weight shape of the MatMul operator on each device is \[4, 8]. Load the initialized model parameter data from the integrated checkpoint file and then perform training. - -> For details about the distributed environment configuration and training code, see [Distributed Training](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html). -> -> This document provides the example code for integrating checkpoint files and loading checkpoint files before distributed training. The code is for reference only. - -### Example Code - -1. Run the following script to integrate the checkpoint files: - - ```python - python ./integrate_checkpoint.py "Name of the checkpoint file to be integrated" "Path and name of the checkpoint file generated after integration" "Path and name of the strategy file" "Number of nodes" - ``` - - integrate\_checkpoint.py: - - ```python - import numpy as np - import os - import mindspore.nn as nn - from mindspore import Tensor, Parameter - import mindspore.ops as ops - from mindspore import save_checkpoint, load_checkpoint, build_searched_strategy, merge_sliced_parameter - - class Net(nn.Cell): - def __init__(self,weight_init): - super(Net, self).__init__() - self.weight = Parameter(Tensor(weight_init), layerwise_parallel=True) - self.fc = ops.MatMul(transpose_b=True) - - def construct(self, x): - x = self.fc(x, self.weight) - return x - - def integrate_ckpt_file(old_ckpt_file, new_ckpt_file, strategy_file, rank_size): - weight = np.ones([2, 8]).astype(np.float32) - net = Net(weight) - opt = Momentum(learning_rate=0.01, momentum=0.9, params=net.get_parameters()) - net = TrainOneStepCell(net, opt) - - # load CheckPoint into net in rank id order - param_dicts = [] - for i in range(rank_size): - file_name = os.path.join("./node"+str(i), old_ckpt_file) - param_dict = load_checkpoint(file_name) - load_param_into_net(net, param_dict) - param_dict = {} - for _, param in net.parameters_and_names(): - param_dict[param.name] = param - param_dicts.append(param_dict) - - strategy = build_searched_strategy(strategy_file) - param_dict = {} - - for paramname in ["weight", "moments.weight"]: - # get layer wise model parallel parameter - sliced_parameters = [] - for i in range(rank_size): - parameter = param_dicts[i].get(paramname) - sliced_parameters.append(parameter) - - # merge the parallel parameters of the model - merged_parameter = merge_sliced_parameter(sliced_parameters, strategy) - param_dict[paramname] = merged_parameter - - # convert param_dict to list type data - param_list = [] - for (key, value) in param_dict.items(): - each_param = {} - each_param["name"] = key - if isinstance(value.data, Tensor): - param_data = value.data - else: - param_data = Tensor(value.data) - each_param["data"] = param_data - param_list.append(each_param) - - # call the API to generate a new CheckPoint file - save_checkpoint(param_list, new_ckpt_file) - - return - - if __name__ == "__main__": - try: - old_ckpt_file = sys.argv[1] - new_ckpt_file = sys.argv[2] - strategy_file = sys.argv[3] - rank_size = int(sys.argv[4]) - integrate_ckpt_file(old_ckpt_file, new_ckpt_file, strategy_file, rank_size) - except: - print("Fail to integrate checkpoint file") - sys.exit(-1) - ``` - - The command output is as follows. - - Before the script is executed, the parameter values in the checkpoint files are as follows: - - ```text - device0: - name is weight - value is - [[0.87537426 1.0448935 0.86736983 0.8836905 0.77354026 0.69588304 0.9183654 0.7792076] - [0.87224025 0.8726848 0.771446 0.81967723 0.88974726 0.7988162 0.72919345 0.7677011]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.2567724 -0.07485991 0.282002 0.2456022 0.454939 0.619168 0.18964815 0.45714882] - [0.25946522 0.24344791 0.45677605 0.3611395 0.23378398 0.41439137 0.5312468 0.4696194]] - - device1: - name is weight - value is - [[0.9210751 0.9050457 0.9827775 0.920396 0.9240526 0.9750359 1.0275179 1.0819869] - [0.73605865 0.84631145 0.9746683 0.9386582 0.82902765 0.83565056 0.9702136 1.0514659]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.2417504 0.28193963 0.06713893 0.21510397 0.23380603 0.11424308 0.0218009 -0.11969765] - [0.45955992 0.22664294 0.01990281 0.0731914 0.27125207 0.27298513 -0.01716102 -0.15327111]] - - device2: - name is weight - value is - [[1.0108461 0.8689414 0.91719437 0.8805056 0.7994629 0.8999671 0.7585804 1.0287056 ] - [0.90653455 0.60146594 0.7206475 0.8306303 0.8364681 0.89625114 0.7354735 0.8447268]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.03440702 0.41419312 0.24817684 0.30765256 0.48516113 0.24904746 0.57791173 0.00955463] - [0.13458519 0.6690533 0.49259356 0.28319967 0.25951773 0.16777472 0.45696738 0.24933104]] - - device3: - name is weight - value is - [[0.7147005 0.9168278 0.80178416 0.6258351 0.8413766 0.5909515 0.696347 0.71359116] - [0.20506378 0.03691584 0.2454556 0.12978578 0.19065076 0.23904312 0.27509746 0.34614682]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.14152306 0.5040985 0.24455397 0.10907605 0.11319532 0.19538902 0.01208619 0.40430856] - [-0.7773164 -0.47611716 -0.6041424 -0.6144473 -0.2651842 -0.31909415 -0.4510405 -0.12860501]] - ``` - - After the script is executed, the parameter values in the checkpoint files are as follows: - - ```text - name is weight - value is - [[1.1138763 1.0962057 1.3516843 1.0812817 1.1579804 1.1078343 1.0906502 1.3207073] - [0.916671 1.0781671 1.0368758 0.9680898 1.1735439 1.0628364 0.9960786 1.0135143] - [0.8828271 0.7963984 0.90675324 0.9830291 0.89010954 0.897052 0.7890109 0.89784735] - [1.0011744 1.0840297 1.0201758 1.0882459 0.94232416 1.0775206 1.0195118 1.0528734] - [1.0053468 0.98402303 0.99762845 0.97587246 1.0259694 1.0055295 0.99420834 0.9496847] - [1.0851002 1.0295962 1.0999886 1.0958165 0.9765328 1.146529 1.0970603 1.1388365] - [0.7147005 0.9168278 0.80178416 0.6258351 0.8413766 0.5909515 0.696347 0.71359116] - [0.20506378 0.03691584 0.2454556 0.12978578 0.19065076 0.23904312 0.27509746 0.34614682]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.2567724 -0.07485991 0.282002 0.2456022 0.454939 0.619168 0.18964815 0.45714882] - [0.25946522 0.24344791 0.45677605 0.3611395 0.23378398 0.41439137 0.5312468 0.4696194 ] - [0.2417504 0.28193963 0.06713893 0.21510397 0.23380603 0.11424308 0.0218009 -0.11969765] - [0.45955992 0.22664294 0.01990281 0.0731914 0.27125207 0.27298513 -0.01716102 -0.15327111] - [0.03440702 0.41419312 0.24817684 0.30765256 0.48516113 0.24904746 0.57791173 0.00955463] - [0.13458519 0.6690533 0.49259356 0.28319967 0.25951773 0.16777472 0.45696738 0.24933104] - [0.14152306 0.5040985 0.24455397 0.10907605 0.11319532 0.19538902 0.01208619 0.40430856] - [-0.7773164 -0.47611716 -0.6041424 -0.6144473 -0.2651842 -0.31909415 -0.4510405 - -0.12860501]] - ``` - -2. Execute stage 2 training and load the checkpoint file before training. The training code needs to be supplemented based on the site requirements. - - ```python - import numpy as np - import os - import mindspore.nn as nn - from mindspore import context - from mindspore.communication.management import init - from mindspore import Tensor, Parameter - import mindspore.ops as ops - from mindspore import load_checkpoint, load_param_into_net - - from mindspore.communication.management import init - devid = int(os.getenv('DEVICE_ID')) - context.set_context(mode=context.GRAPH_MODE,device_target='Ascend',save_graphs=True, device_id=devid) - init() - - class Net(nn.Cell): - def __init__(self,weight_init): - super(Net, self).__init__() - self.weight = Parameter(Tensor(weight_init), layerwise_parallel=True) - self.fc = ops.MatMul(transpose_b=True) - - def construct(self, x): - x = self.fc(x, self.weight) - return x - def train_mindspore_impl_fc(input, label, ckpt_file): - param_dict = load_checkpoint(ckpt_file) - - for paramname in ["weight", "moments.weight"]: - # get layer wise model parallel parameter - new_param = parameter_dict[paramname] - # split the model parameter data - slice_list = np.split(new_param.data.asnumpy(), 2, axis=0) - # Load the corresponding data slice - rank = get_rank() - tensor_slice = Tensor(slice_list[rank]) - # modify model parameter data values - new_param.set_data(tensor_slice, True) - - # load the modified parameter data into the network - weight = np.ones([4, 8]).astype(np.float32) - net = Net(weight) - load_param_into_net(net, param_dict) - opt = Momentum(learning_rate=0.01, momentum=0.9, params=parallel_net.get_parameters()) - load_param_into_net(opt, param_dict) - # train code - ... - - if __name__ == "__main__": - input = np.random.random((4, 8)).astype(np.float32) - print("mean = ", np.mean(input,axis=1, keepdims=True)) - label = np.random.random((4, 4)).astype(np.float32) - ckpt_file = sys.argv[1] - train_mindspore_impl_fc(input, label, ckpt_file) - ``` - - In the preceding information: - - - `mode=context.GRAPH_MODE`: sets the running mode to graph mode for distributed training. (The PyNative mode does not support parallel running.) - - `device_id`: physical sequence number of a device, that is, the actual sequence number of the device on a computer where the device is located. - - `init`: completes the distributed training initialization. - - Parameter values after loading: - - ```text - device0: - name is weight - value is - [[0.87537426 1.0448935 0.86736983 0.8836905 0.77354026 0.69588304 0.9183654 0.7792076] - [0.87224025 0.8726848 0.771446 0.81967723 0.88974726 0.7988162 0.72919345 0.7677011] - [0.8828271 0.7963984 0.90675324 0.9830291 0.89010954 0.897052 0.7890109 0.89784735] - [1.0011744 1.0840297 1.0201758 1.0882459 0.94232416 1.0775206 1.0195118 1.0528734]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.2567724 -0.07485991 0.282002 0.2456022 0.454939 0.619168 0.18964815 0.45714882] - [0.25946522 0.24344791 0.45677605 0.3611395 0.23378398 0.41439137 0.5312468 0.4696194] - [0.2417504 0.28193963 0.06713893 0.21510397 0.23380603 0.11424308 0.0218009 -0.11969765] - [0.45955992 0.22664294 0.01990281 0.0731914 0.27125207 0.27298513 -0.01716102 -0.15327111]] - - device1: - name is weight - value is - [[1.0053468 0.98402303 0.99762845 0.97587246 1.0259694 1.0055295 0.99420834 0.9496847] - [1.0851002 1.0295962 1.0999886 1.0958165 0.9765328 1.146529 1.0970603 1.1388365] - [0.7147005 0.9168278 0.80178416 0.6258351 0.8413766 0.5909515 0.696347 0.71359116] - [0.20506378 0.03691584 0.2454556 0.12978578 0.19065076 0.23904312 0.27509746 0.34614682]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.03440702 0.41419312 0.24817684 0.30765256 0.48516113 0.24904746 0.57791173 0.00955463] - [0.13458519 0.6690533 0.49259356 0.28319967 0.25951773 0.16777472 0.45696738 0.24933104] - [0.14152306 0.5040985 0.24455397 0.10907605 0.11319532 0.19538902 0.01208619 0.40430856] - [-0.7773164 -0.47611716 -0.6041424 -0.6144473 -0.2651842 -0.31909415 -0.4510405 -0.12860501]] - ``` diff --git a/tutorials/training/source_en/advanced_use/summary_record.md b/tutorials/training/source_en/advanced_use/summary_record.md deleted file mode 100644 index b36288254076ffc1d8113f83e740b1b948b7a967..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/summary_record.md +++ /dev/null @@ -1,523 +0,0 @@ -# Collecting Summary Record - -`Linux` `Ascend` `GPU` `CPU` `Model Optimization` `Intermediate` `Expert` - - - -- [Collecting Summary Record](#collecting-summary-record) - - [Overview](#overview) - - [Operation Process](#operation-process) - - [Preparing The Training Script](#preparing-the-training-script) - - [Method one: Automatically collected through SummaryCollector](#method-one-automatically-collected-through-summarycollector) - - [Method two: Custom collection of network data with summary operators and SummaryCollector](#method-two-custom-collection-of-network-data-with-summary-operators-and-summarycollector) - - [Method three: Custom callback recording data](#method-three-custom-callback-recording-data) - - [Method four: Advanced usage, custom training cycle](#method-four-advanced-usage-custom-training-cycle) - - [Distributed Training Scene](#distributed-training-scene) - - [Tip: Recording gradients](#tip-recording-gradients) - - [Run MindInsight](#run-mindinsight) - - [Notices](#notices) - - - - - -## Overview - -Scalars, images, computational graphs, and model hyperparameters during training are recorded in files and can be viewed on the web page. - -## Operation Process - -- Prepare a training script, specify scalars, images, computational graphs, and model hyperparameters in the training script, record them in the summary log file, and run the training script. -- Start MindInsight and specify the summary log file directory using startup parameters. After MindInsight is started, access the visualization page based on the IP address and port number. The default access IP address is `http://127.0.0.1:8080`. -- During the training, when data is written into the summary log file, you can view the data on the web page. - -## Preparing The Training Script - -Currently, MindSpore supports to save scalars, images, computational graph, and model hyperparameters to summary log file and display them on the web page. The computational graph can only be recorded in the graph mode. - -MindSpore currently supports multiple ways to record data into summary log files. - -### Method one: Automatically collected through SummaryCollector - -The `Callback` mechanism in MindSpore provides a quick and easy way to collect common information, including the calculational graph, loss value, learning rate, parameter weights, etc. It is named 'SummaryCollector'. - -When you write a training script, you just instantiate the `SummaryCollector` and apply it to either `model.train` or `model.eval`. You can automatically collect some common summary data. The detailed usage of `SummaryCollector` can refer to the `API` document `mindspore.train.callback.SummaryCollector`. - -The sample code is as follows: - -```python -import mindspore -import mindspore.nn as nn -from mindspore import ops -from mindspore import context, Tensor, Model -from mindspore.nn.metrics import Accuracy -from mindspore.train.callback import SummaryCollector - - -def conv(in_channels, out_channels, kernel_size, stride=1, padding=0, pad_mode="valid", has_bias=True): - return nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding, - has_bias=has_bias, pad_mode=pad_mode) - - -def fc_with_initialize(input_channels, out_channels, has_bias=True): - return nn.Dense(input_channels, out_channels, has_bias=has_bias) - - -class AlexNet(nn.Cell): - """AlexNet""" - def __init__(self, num_classes=10, channel=3, phase='train', include_top=True): - super(AlexNet, self).__init__() - self.conv1 = conv(channel, 64, 11, stride=4, pad_mode="same", has_bias=True) - self.conv2 = conv(64, 128, 5, pad_mode="same", has_bias=True) - self.conv3 = conv(128, 192, 3, pad_mode="same", has_bias=True) - self.conv4 = conv(192, 256, 3, pad_mode="same", has_bias=True) - self.conv5 = conv(256, 256, 3, pad_mode="same", has_bias=True) - self.relu = ops.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=3, stride=2, pad_mode='valid') - self.include_top = include_top - if self.include_top: - dropout_ratio = 0.65 - if phase == 'test': - dropout_ratio = 1.0 - self.flatten = nn.Flatten() - self.fc1 = fc_with_initialize(6 * 6 * 256, 4096) - self.fc2 = fc_with_initialize(4096, 4096) - self.fc3 = fc_with_initialize(4096, num_classes) - self.dropout = nn.Dropout(dropout_ratio) - - def construct(self, x): - """define network""" - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv3(x) - x = self.relu(x) - x = self.conv4(x) - x = self.relu(x) - x = self.conv5(x) - x = self.relu(x) - x = self.max_pool2d(x) - if not self.include_top: - return x - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.dropout(x) - x = self.fc2(x) - x = self.relu(x) - x = self.dropout(x) - x = self.fc3(x) - return x - -def train(): - context.set_context(mode=context.GRAPH_MODE) - - network = AlexNet(num_classes=10) - loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - lr = Tensor(0.5, mindspore.float32) - opt = nn.Momentum(network.trainable_params(), lr, momentum=0.9) - model = Model(network, loss, opt, metrics={"Accuracy": Accuracy()}) - - # How to create a valid dataset instance, - # for details, see the https://www.mindspore.cn/tutorial/training/en/master/quick_start/quick_start.html document. - ds_train = create_dataset('./dataset_path') - - # Initialize a SummaryCollector callback instance, and use it in model.train or model.eval - summary_collector = SummaryCollector(summary_dir='./summary_dir', collect_freq=1) - - # Note: dataset_sink_mode should be set to False, else you should modify collect_freq in SummaryCollector - model.train(epoch=1, train_dataset=ds_train, callbacks=[summary_collector], dataset_sink_mode=False) - - ds_eval = create_dataset('./dataset_path') - model.eval(ds_eval, callbacks=[summary_collector]) - -if __name__ == '__main__': - train() -``` - -> 1. When using summary, it is recommended that you set `dataset_sink_mode` argument of `model.train` to `False`. Please see notices for more information. -> 2. When using summary, you need to run the code in `if __name__ == "__main__"`. For more detail, refer to [Python tutorial](https://docs.python.org/3.7/library/multiprocessing.html#multiprocessing-programming) - -### Method two: Custom collection of network data with summary operators and SummaryCollector - -In addition to providing the `SummaryCollector` that automatically collects some summary data, MindSpore provides summary operators that enable customized collection of other data on the network, such as the input of each convolutional layer, or the loss value in the loss function, etc. - -The following summary operators are currently supported: - -- [ScalarSummary](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ScalarSummary.html): Record a scalar data. -- [TensorSummary](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.TensorSummary.html): Record a tensor data. -- [ImageSummary](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.ImageSummary.html): Record a image data. -- [HistogramSummary](https://www.mindspore.cn/doc/api_python/en/master/mindspore/ops/mindspore.ops.HistogramSummary.html): Convert tensor data into histogram data records. - -The recording method is shown in the following steps. - -Step 1: Call the summary operator in the `construct` function of the derived class that inherits `nn.Cell` to collect image or scalar data. - -For example, when a network is defined, image data is recorded in `construct` of the network. When the loss function is defined, the loss value is recorded in `construct` of the loss function. - -Record the dynamic learning rate in `construct` of the optimizer when defining the optimizer. - -The sample code is as follows: - -```python -import mindspore -import mindspore.ops as ops -from mindspore import Tensor, nn -from mindspore.nn import Optimizer - - -class CrossEntropyLoss(nn.Cell): - """Loss function definition.""" - def __init__(self): - super(CrossEntropyLoss, self).__init__() - self.cross_entropy = ops.SoftmaxCrossEntropyWithLogits() - self.mean = ops.ReduceMean() - self.one_hot = ops.OneHot() - self.on_value = Tensor(1.0, mindspore.float32) - self.off_value = Tensor(0.0, mindspore.float32) - - # Init ScalarSummary - self.scalar_summary = ops.ScalarSummary() - - def construct(self, logits, label): - label = self.one_hot(label, ops.shape(logits)[1], self.on_value, self.off_value) - loss = self.cross_entropy(logits, label)[0] - loss = self.mean(loss, (-1,)) - - # Record loss - self.scalar_summary("loss", loss) - return loss - - -class MyOptimizer(Optimizer): - """Optimizer definition.""" - def __init__(self, learning_rate, params, ...): - ... - # Initialize ScalarSummary - self.scalar_summary = ops.ScalarSummary() - self.histogram_summary = ops.HistogramSummary() - self.weight_names = [param.name for param in self.parameters] - - def construct(self, grads): - ... - # Record learning rate here - self.scalar_summary("learning_rate", learning_rate) - - # Record weight - self.histogram_summary(self.weight_names[0], self.parameters[0]) - # Record gradient - self.histogram_summary(self.weight_names[0] + ".gradient", grads[0]) - - ... - -class Net(nn.Cell): - """Net definition.""" - def __init__(self): - super(Net, self).__init__() - ... - - # Init ImageSummary - self.image_summary = ops.ImageSummary() - # Init TensorSummary - self.tensor_summary = ops.TensorSummary() - - def construct(self, data): - # Record image by Summary operator - self.image_summary("image", data) - # Record tensor by Summary operator - self.tensor_summary("tensor", data) - ... - return out -``` - -> 1. In the same Summary operator, the name given to the data must not be repeated, otherwise the data collection and presentation will have unexpected behavior. -> For example, if two `ScalarSummary` operators are used to collect scalar data, two scalars cannot be given the same name. -> 2. Summary operator only supports Graph mode and needs to be used in `construct` of `nn.Cell`. The PyNative mode is not supported yet. - -Step 2: In the training script, instantiate the `SummaryCollector` and apply it to `model.train`. - -The sample code is as follows: - -```python -from mindspore import Model, nn, context -from mindspore.train.callback import SummaryCollector -... - -def train(): - context.set_context(mode=context.GRAPH_MODE) - network = Net() - loss_fn = CrossEntropyLoss() - optim = MyOptimizer(learning_rate=0.01, params=network.trainable_params()) - model = Model(network, loss_fn=loss_fn, optimizer=optim, metrics={"Accuracy": Accuracy()}) - - ds_train = create_dataset('./dataset_path') - - summary_collector = SummaryCollector(summary_dir='./summary_dir', collect_freq=1) - model.train(epoch=2, train_dataset=ds_train, callbacks=[summary_collector]) - -if __name__ == '__main__': - train() -``` - -### Method three: Custom callback recording data - -MindSpore supports customized callback and supports to record data into summary log file -in custom callback, and display the data by the web page. - -The following pseudocode is shown in the CNN network, where developers can use the network output with the original tag and the prediction tag to generate the image of the confusion matrix. -It is then recorded into the summary log file through the `SummaryRecord` module. -The detailed usage of `SummaryRecord` can refer to the `API` document `mindspore.train.summary.SummaryRecord`. - -The sample code is as follows: - -```python -from mindspore.train.callback import Callback -from mindspore.train.summary import SummaryRecord - -class ConfusionMatrixCallback(Callback): - def __init__(self, summary_dir): - self._summary_dir = summary_dir - - def __enter__(self): - # init you summary record in here, when the train script run, it will be inited before training - self.summary_record = SummaryRecord(self._summary_dir) - return self - - def __exit__(self, *exc_args): - # Note: you must close the summary record, it will release the process pool resource - # else your training script will not exit from training. - self.summary_record.close() - - def step_end(self, run_context): - cb_params = run_context.original_args() - - # create a confusion matric image, and record it to summary file - confusion_matrix = create_confusion_matrix(cb_params) - self.summary_record.add_value('image', 'confusion_matrix', confusion_matrix) - self.summary_record.record(cb_params.cur_step_num) - -# init you train script -... - -confusion_matrix = ConfusionMatrixCallback(summary_dir='./summary_dir') -model.train(network, train_dataset=ds_train, callbacks=[confusion_matrix]) -``` - -The above three ways support the record computational graph, loss value and other data. In addition, MindSpore also supports the saving of computational graph for other phases of training, through -the `save_graphs` option of `context.set_context` in the training script is set to `True` to record computational graphs of other phases, including the computational graph after operator fusion. - -In the saved files, `ms_output_after_hwopt.pb` is the computational graph after operator fusion, which can be viewed on the web page. - -### Method four: Advanced usage, custom training cycle - -If you are not using the `Model` interface provided by MindSpore, you can implement a method by imitating `train` method of `Model` interface to control the number of iterations. You can imitate the `SummaryCollector` and record the summary operator data in the following manner. For a detailed custom training cycle tutorial, please [refer to the tutorial on the official website](https://www.mindspore.cn/doc/programming_guide/en/master/train.html#customizing-a-training-cycle). - -The following example demonstrates how to record data in a custom training cycle using the summary operator and the `add_value` interface of `SummaryRecord`. For more tutorials about `SummaryRecord`, [refer to the Python API documentation](https://www.mindspore.cn/doc/api_python/en/master/mindspore/mindspore.train.html#mindspore.train.summary.SummaryRecord). Please note that `SummaryRecord` will not record computational graph automatically. If you need to record the computational graph, please manually pass the instance of network that inherits from Cell. The recorded computational graph only includes the code and functions used in the construct method. - -```python -from mindspore import nn -from mindspore.train.summary import SummaryRecord -import mindspore.ops as ops - -class LeNet5(nn.Cell): - def __init__(self, num_class=10): - super(LeNet5, self).__init__() - self.num_class = num_class - self.batch_size = 32 - self.conv1 = conv(1, 6, 5) - ... - - self.image_summary = ops.ImageSummary() - self.tensor_summary = ops.TensorSummary() - - def construct(self, x): - self.image_summary('x1', x) - x = self.conv1(x) - self.tensor_summary('after_conv1', x) - x = self.relu(x) - ... - return x - -... - -def train(): - epochs = 10 - net = LeNet5() - # Note1: An instance of the network should be passed to SummaryRecord if you want to record - # computational graph. - with SummaryRecord('./summary_dir', network=net) as summary_record: - for epoch in range(epochs): - step = 1 - for inputs in dataset_helper: - output = net(*inputs) - current_step = epoch * len(dataset_helper) + step - print("step: {0}, losses: {1}".format(current_step, output.asnumpy())) - - # Note2: The output should be a scalar, and use 'add_value' method to record loss. - # Note3: You must use the 'record(step)' method to record the data of this step. - summary_record.add_value('scalar', 'loss', output) - summary_record.record(current_step) - - step += 1 - -if __name__ == '__main__': - train() - -``` - -### Distributed Training Scene - -The `SummaryCollector` and the `SummaryRecord` are not multi-process safe when writing data, so in a single-machine multi-card scenario, you need to make sure that each card stores data in a different directory. In a distributed scenario, we set the summary directory with the 'get_rank' function. - -```python3 -from mindspore.communication.management import get_rank -summary_dir = "summary_dir" + str(get_rank()) -``` - -The sample code is as follows: - -```python3 -from mindspore.communication.management import get_rank - -... - -network = ResNet50(num_classes=10) - -# Init a SummaryCollector callback instance, and use it in model.train or model.eval -summary_dir = "summary_dir" + str(get_rank()) -summary_collector = SummaryCollector(summary_dir=summary_dir, collect_freq=1) - -# Note: dataset_sink_mode should be set to False, else you should modify collect freq in SummaryCollector -model.train(epoch=1, train_dataset=ds_train, callbacks=[summary_collector], dataset_sink_mode=False) - -model.eval(ds_eval, callbacks=[summary_collector]) -``` - -### Tip: Recording gradients - -There is a tip for recording gradients with summary in addition to the above methods. Please note that the tip should be used with one of the above methods. - -Recording gradients is possible by inheriting your original optimizer and inserting calls to summary operator. An example of code is as follows: - -```python -import mindspore.nn as nn -import mindspore.ops as ops -... - -# Define a new optimizer class by inheriting your original optimizer. -class MyOptimizer(nn.Momentum): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._original_construct = super().construct - self.histogram_summary = ops.HistogramSummary() - self.gradient_names = [param.name + ".gradient" for param in self.parameters] - - def construct(self, grads): - # Record gradient. - self.histogram_summary(self.gradient_names[0], grads[0]) - return self._original_construct(grads) - -... - -# Initialize your model with the newly defined optimizer. -model = Model(network, loss_fn=loss_fn, optimizer=MyOptimizer(arg1=arg1value)) -``` - -## Run MindInsight - -After completing the data collection in the tutorial above, you can start MindInsight to visualize the collected data. When start MindInsight, you need to specify the summary log file directory with the `--summary-base-dir` parameter. - -The specified summary log file directory can be the output directory of a training or the parent directory of the output directory of multiple training. - -The output directory structure for a training is as follows - -```text -└─summary_dir - events.out.events.summary.1596869898.hostname_MS - events.out.events.summary.1596869898.hostname_lineage -``` - -Execute command: - -```Bash -mindinsight start --summary-base-dir ./summary_dir -``` - -The output directory structure of multiple training is as follows: - -```text -└─summary - ├─summary_dir1 - │ events.out.events.summary.1596869898.hostname_MS - │ events.out.events.summary.1596869898.hostname_lineage - │ - └─summary_dir2 - events.out.events.summary.1596869998.hostname_MS - events.out.events.summary.1596869998.hostname_lineage -``` - -Execute command: - -```Bash -mindinsight start --summary-base-dir ./summary -``` - -After successful startup, the visual page can be viewed by visiting the `http://127.0.0.1:8080` address through the browser. - -Stop MindInsight command: - -```Bash -mindinsight stop -``` - -For more parameter Settings, see the [MindInsight related commands](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/mindinsight_commands.html) page. - -## Notices - -1. To limit time of listing summaries, MindInsight lists at most 999 summary items. - -2. Multiple `SummaryRecord` instances can not be used at the same time. (`SummaryRecord` is used in `SummaryCollector`) - - If you use two or more instances of `SummaryCollector` in the callback list of 'model.train' or 'model.eval', it is seen as using multiple `SummaryRecord` instances at the same time, and it may cause recoding data failure. - - If the customized callback uses `SummaryRecord`, it can not be used with `SummaryCollector` at the same time. - - Correct code: - - ```python - ... - summary_collector = SummaryCollector('./summary_dir') - model.train(2, train_dataset, callbacks=[summary_collector]) - ... - model.eval(dataset, callbacks=[summary_collector]) - ``` - - Wrong code: - - ```python - ... - summary_collector1 = SummaryCollector('./summary_dir1') - summary_collector2 = SummaryCollector('./summary_dir2') - model.train(2, train_dataset, callbacks=[summary_collector1, summary_collector2]) - ``` - - Wrong code: - - ```python - ... - # Note: the 'ConfusionMatrixCallback' is user-defined, and it uses SummaryRecord to record data. - confusion_callback = ConfusionMatrixCallback('./summary_dir1') - summary_collector = SummaryCollector('./summary_dir2') - model.train(2, train_dataset, callbacks=[confusion_callback, summary_collector]) - ``` - -3. In each Summary log file directory, only one training data should be placed. If a summary log directory contains summary data from multiple training, MindInsight will overlay the summary data from these training when visualizing the data, which may not be consistent with the expected visualizations. - -4. When using summary, it is recommended that you set `dataset_sink_mode` argument of `model.train` to `False`, so that the unit of `collect_freq` is `step`. When `dataset_sink_mode` was `True`, the unit of `collect_freq` would be `epoch` and it is recommended that you set `collect_freq` manually. - -5. The maximum amount of data saved per step is 2147483647 Bytes. If this limit is exceeded, data for the step cannot be recorded and an error occurs. - -6. In PyNative mode, the `SummaryCollector` can be used properly, but the computational graph can not be recorded and the summary operator can not be used. diff --git a/tutorials/training/source_en/advanced_use/test_model_security_fuzzing.md b/tutorials/training/source_en/advanced_use/test_model_security_fuzzing.md deleted file mode 100644 index 05f6529a5497231a583c2b2a0698bf03b6bcb88b..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/test_model_security_fuzzing.md +++ /dev/null @@ -1,210 +0,0 @@ -# Testing Model Security Using Fuzz Testing - -`Linux` `Ascend` `GPU` `CPU` `Model Evaluation` `Enterprise` `Expert` - - - -- [Testing Model Security Using Fuzz Testing](#testing-model-security-using-fuzz-testing) - - [Overview](#overview) - - [Implementation](#implementation) - - [Importing Library Files](#importing-library-files) - - [Parameter Configuration](#parameter-configuration) - - [Fuzz Testing Application](#fuzz-testing-application) - - -   - -## Overview - -The decision logic of traditional software is determined by the code logic. Traditional software determines whether the test is adequate based on the code line coverage rate. Ideally, the higher the coverage rate is, the more adequate the code test is. However, for deep neural network, the decision logic of the program is determined by the training data, network structure, and parameters through a black box mechanism. The code line coverage fails to evaluate the test adequacy. A more suitable test evaluation criterion needs to be selected according to the deep network features to guide the neural network to perform a more adequate test and find more corner error cases, thereby ensuring universality and robustness of a model. - -The fuzz testing module of MindArmour uses the neuron coverage rate as the test evaluation criterion. Neuron coverage is the range of the number of neurons observed and activated and the range of the neuron output value through a set of inputs. The neuron coverage is used to guide input mutation so that the input can activate more neurons and neuron values can be distributed in a wider range. In this way, we can explore different types of model output results and incorrect behaviors. - -The LeNet model and MNIST dataset are used as an example to describe how to use Fuzz testing. - -> This example is for CPUs, GPUs, and Ascend 910 AI processors. You can download the complete sample code at . - -## Implementation - -### Importing Library Files - -The following lists the required common modules, MindSpore-related modules, Fuzz testing feature modules, and configuration log labels and log levels. - -```python -import numpy as np -from mindspore import Model -from mindspore import context -from mindspore import load_checkpoint, load_param_into_net - -from mindarmour.fuzz_testing import Fuzzer -from mindarmour.fuzz_testing import ModelCoverageMetrics -from mindarmour.utils.logger import LogUtil - -from examples.common.dataset.data_processing import generate_mnist_dataset -from examples.common.networks.lenet5.lenet5_net import LeNet5 - -LOGGER = LogUtil.get_instance() -TAG = 'Fuzz_testing' -LOGGER.set_level('INFO') -``` - -### Parameter Configuration - -Configure necessary information, including the environment information and execution mode. - -```python -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") -``` - -For details about the API configuration, see the `context.set_context`. - -### Fuzz Testing Application - -1. Create a LeNet model and load the MNIST dataset. The operation is the same as that for [Model Security](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/improve_model_security_nad.html). - - ```python - ... - # Lenet model - model = Model(net) - # get training data - mnist_path = "../common/dataset/MNIST/" - batch_size = 32 - ds = generate_mnist_dataset(os.path.join(mnist_path, "train"), batch_size, sparse=False) - train_images = [] - for data in ds.create_tuple_iterator(): - images = data[0].asnumpy().astype(np.float32) - train_images.append(images) - train_images = np.concatenate(train_images, axis=0) - - # get test data - batch_size = 32 - ds = generate_mnist_dataset(os.path.join(mnist_path, "test"), batch_size, sparse=False) - test_images = [] - test_labels = [] - for data in ds.create_tuple_iterator(): - images = data[0].asnumpy().astype(np.float32) - labels = data[1].asnumpy() - test_images.append(images) - test_labels.append(labels) - test_images = np.concatenate(test_images, axis=0) - test_labels = np.concatenate(test_labels, axis=0) - ``` - -2. Configure Fuzzer parameters. - - Set the data mutation method and parameters. Multiple methods can be configured at the same time. Currently, the following data mutation methods are supported: - - - Image affine transformation methods: Translate, Scale, Shear, and Rotate. - - Methods based on image pixel value changes: Contrast, Brightness, Blur, and Noise. - - Methods for generating adversarial examples based on white-box and black-box attacks: FGSM, PGD, and MDIIM. - - The data mutation method must include the method based on the image pixel value changes. - - The first two image transform methods support user-defined configuration parameters and randomly generated parameters by algorithms. For user-defined configuration parameters see the class methods corresponding to . For randomly generated parameters by algorithms you can set method's params to `'auto_param': [True]`. The mutation parameters are randomly generated within the recommended range. - - For details about how to set parameters based on the attack defense method, see the corresponding attack method class. - - The following is an example for configure Fuzzer parameters. - - ```python - mutate_config = [{'method': 'Blur', - 'params': {'radius': [0.1, 0.2, 0.3], - 'auto_param': [True, False]}}, - {'method': 'Contrast', - 'params': {'auto_param': [True]}}, - {'method': 'Translate', - 'params': {'auto_param': [True]}}, - {'method': 'Brightness', - 'params': {'auto_param': [True]}}, - {'method': 'Noise', - 'params': {'auto_param': [True]}}, - {'method': 'Scale', - 'params': {'auto_param': [True]}}, - {'method': 'Shear', - 'params': {'auto_param': [True]}}, - {'method': 'FGSM', - 'params': {'eps': [0.3, 0.2, 0.4], 'alpha': [0.1]}} - ] - ``` - - Set evaluation metrics. Currently, the following evaluation metrics are supported: - - General evaluation metric: accuracy. - - Neuron coverage rate metrics: kmnc, nbc, and snac. - - Adversarial attack evaluation metric: attack_success_rate. - You can set this parameter to `auto`. By default, all evaluation metrics are used. - - ```python - eval_metrics =['accuracy', 'kmnc', 'attack_success_rate'] - ``` - -3. Initialize the seed queue. Each seed in the seed queue has two values: original image and image label. Here we select 100 samples as initial seed queue. - - ```python - # make initial seeds - initial_seeds = [] - for img, label in zip(test_images, test_labels): - initial_seeds.append([img, label]) - initial_seeds = initial_seeds[:100] - ``` - -4. Test the neuron coverage rate before the fuzz testing. - - ```python - segmented_num = 1000 - neuron_num = 10 - model_coverage_test = ModelCoverageMetrics(model, neuron_num, segmented_num, train_images) - model_coverage_test.calculate_coverage(np.array(test_images[:100]).astype(np.float32)) - LOGGER.info(TAG, 'KMNC of this test is : %s', model_coverage_test.get_kmnc()) - ``` - - Result: - - ```python - KMNC of this test is : 0.0851 - ``` - -5. Perform the fuzz testing. - - ```python - eval_metrics = 'auto' - model_fuzz_test = Fuzzer(model, train_images, neuron_num, segmented_num) - _, _, _, _, metrics = model_fuzz_test.fuzzing(mutate_config, initial_seeds, eval_metrics=eval_metrics) - ``` - -6. Experiment results. - - The results of fuzz testing contains five aspect data: - - - fuzz_samples: mutated samples in fuzz testing. - - true_labels: the ground truth labels of fuzz_samples. - - fuzz_pred: predictions of tested model about fuzz_samples. - - fuzz_strategies: the methods used to mutate fuzz_samples. - - metrics_report: metrics report of fuzz testing. - - The first 4 returns can be used to further calculated complex metrics and analyze the robustness of the model. - - Run the following command to view the result: - - ```python - if metrics: - for key in metrics: - LOGGER.info(TAG, key + ': %s', metrics[key]) - ``` - - The fuzz testing result is as follows: - - ```python - Accuracy: 0.7929 - Attack_success_rate: 0.3939 - Neural_coverage_KMNC: 0.4797 - ``` - - Before the fuzzing test, the KMNC neuron coverage rate of the seed is 8.5%. After the fuzzing test, the KMNC neuron coverage rate is 47.97%, and the neuron coverage rate and sample diversity increase. After the fuzzing test, the accuracy rate of the model to generate samples is 79.29%, and the attack success rate is 39.39% for samples using the adversarial attack method. Since the initial seed, the mutation method and the corresponding parameters are all randomly selected, it is normal that the result floats to some extent. - - Original image: - - ![fuzz_seed](./images/fuzz_seed.png) - - Mutation images generated by fuzzing: - - ![fuzz_res](./images/fuzz_res.png) diff --git a/tutorials/training/source_en/advanced_use/test_model_security_membership_inference.md b/tutorials/training/source_en/advanced_use/test_model_security_membership_inference.md deleted file mode 100644 index d88e3b897690f44444b87e0c8055febff46808f9..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/test_model_security_membership_inference.md +++ /dev/null @@ -1,312 +0,0 @@ -# Using Membership Inference to Test Model Security - -`Linux` `Ascend` `GPU` `CPU` `Model Evaluation` `Enterprise` `Expert` - - - -- [Using Membership Inference to Test Model Security](#using-membership-inference-to-test-model-security) - - [Overview](#overview) - - [Implementation](#implementation) - - [Importing Library Files](#importing-library-files) - - [Importing Related Packages](#importing-related-packages) - - [Loading the Dataset](#loading-the-dataset) - - [Creating the Model](#creating-the-model) - - [Using Membership Inference for Privacy Security Evaluation](#using-membership-inference-for-privacy-security-evaluation) - - [References](#references) - - -   - -## Overview - -Membership inference is a method of inferring user privacy data. Privacy refers to some attributes of a single user. Once the privacy is disclosed, personal injury and reputation damage may occur. Although user privacy data is confidential, it can be inferred by using non-sensitive information. If members of a private club like to wear purple sunglasses and red shoes, then a person who wears purple sunglasses and red shoes (non-sensitive information) may be inferred as a member of this private club (sensitive information). This is membership inference. - -In machine learning and deep learning, if an attacker has some access permissions (black box, gray box, or white box) of a model to obtain some or all information about the model output, structure, or parameters, they can determine whether a sample belongs to a training set of a model. In this case, we can use membership inference to evaluate the privacy data security of machine learning and deep learning models. If more than 60% samples can be correctly inferred using membership inference, the model has privacy data leakage risks. - -The following uses a VGG16 model and CIFAR-100 dataset as an example to describe how to use membership inference to perform model privacy security evaluation. This tutorial uses pre-trained model parameters for demonstration. This following describes only the model structure, parameter settings, and dataset preprocessing method. - -> This example is for the Ascend 910 AI Processor. You can download the complete sample code in the following link: -> -> - -## Implementation - -### Importing Library Files - -#### Importing Related Packages - -The following contains common modules, MindSpore-related modules, membership inference feature modules, and configuration log labels and log levels. - -```python -import argparse -import sys -import math -import os - -import numpy as np - -import mindspore.nn as nn -from mindspore import Model, load_param_into_net, load_checkpoint -import mindspore.common.dtype as mstype -from mindspore.common import initializer as init -from mindspore.common.initializer import initializer -import mindspore.dataset as de -import mindspore.dataset.transforms.c_transforms as C -import mindspore.dataset.vision.c_transforms as vision -from mindarmour import MembershipInference -from mindarmour.utils import LogUtil - -LOGGER = LogUtil.get_instance() -TAG = "MembershipInference_test" -LOGGER.set_level("INFO") -``` - -### Loading the Dataset - -The CIFAR-100 dataset is used as an example. You can use your own dataset. Ensure that the input data has only two attributes: `image` and `label`. - -```python -# Generate CIFAR-100 data. -def vgg_create_dataset100(data_home, image_size, batch_size, rank_id=0, rank_size=1, repeat_num=1, - training=True, num_samples=None, shuffle=True): - """Data operations.""" - de.config.set_seed(1) - data_dir = os.path.join(data_home, "train") - if not training: - data_dir = os.path.join(data_home, "test") - - if num_samples is not None: - data_set = de.Cifar100Dataset(data_dir, num_shards=rank_size, shard_id=rank_id, - num_samples=num_samples, shuffle=shuffle) - else: - data_set = de.Cifar100Dataset(data_dir, num_shards=rank_size, shard_id=rank_id) - - input_columns = ["fine_label"] - output_columns = ["label"] - data_set = data_set.rename(input_columns=input_columns, output_columns=output_columns) - data_set = data_set.project(["image", "label"]) - - rescale = 1.0 / 255.0 - shift = 0.0 - - # Define map operations. - random_crop_op = vision.RandomCrop((32, 32), (4, 4, 4, 4)) # padding_mode default CONSTANT. - random_horizontal_op = vision.RandomHorizontalFlip() - resize_op = vision.Resize(image_size) # interpolation default BILINEAR. - rescale_op = vision.Rescale(rescale, shift) - normalize_op = vision.Normalize((0.4465, 0.4822, 0.4914), (0.2010, 0.1994, 0.2023)) - changeswap_op = vision.HWC2CHW() - type_cast_op = C.TypeCast(mstype.int32) - - c_trans = [] - if training: - c_trans = [random_crop_op, random_horizontal_op] - c_trans += [resize_op, rescale_op, normalize_op, - changeswap_op] - - # Apply map operations on images. - data_set = data_set.map(operations=type_cast_op, input_columns="label") - data_set = data_set.map(operations=c_trans, input_columns="image") - - # Apply repeat operations. - data_set = data_set.repeat(repeat_num) - - # Apply batch operations. - data_set = data_set.batch(batch_size=batch_size, drop_remainder=True) - - return data_set -``` - -### Creating the Model - -The VGG16 model is used as an example. You can use your own model. - -```python -def _make_layer(base, args, batch_norm): - """Make stage network of VGG.""" - layers = [] - in_channels = 3 - for v in base: - if v == 'M': - layers += [nn.MaxPool2d(kernel_size=2, stride=2)] - else: - conv2d = nn.Conv2d(in_channels=in_channels, - out_channels=v, - kernel_size=3, - padding=args.padding, - pad_mode=args.pad_mode, - has_bias=args.has_bias, - weight_init='XavierUniform') - if batch_norm: - layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU()] - else: - layers += [conv2d, nn.ReLU()] - in_channels = v - return nn.SequentialCell(layers) - - -class Vgg(nn.Cell): - """ - VGG network definition. - """ - - def __init__(self, base, num_classes=1000, batch_norm=False, batch_size=1, args=None, phase="train"): - super(Vgg, self).__init__() - _ = batch_size - self.layers = _make_layer(base, args, batch_norm=batch_norm) - self.flatten = nn.Flatten() - dropout_ratio = 0.5 - if not args.has_dropout or phase == "test": - dropout_ratio = 1.0 - self.classifier = nn.SequentialCell([ - nn.Dense(512*7*7, 4096), - nn.ReLU(), - nn.Dropout(dropout_ratio), - nn.Dense(4096, 4096), - nn.ReLU(), - nn.Dropout(dropout_ratio), - nn.Dense(4096, num_classes)]) - - def construct(self, x): - x = self.layers(x) - x = self.flatten(x) - x = self.classifier(x) - return x - - -base16 = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'] - - -def vgg16(num_classes=1000, args=None, phase="train"): - net = Vgg(base16, num_classes=num_classes, args=args, batch_norm=args.batch_norm, phase=phase) - return net -``` - -### Using Membership Inference for Privacy Security Evaluation - -1. Build the VGG16 model and load the parameter file. - - You can directly load the pre-trained VGG16 parameter settings or use the preceding network for training. - - ```python - ... - # load parameter - parser = argparse.ArgumentParser("main case arg parser.") - parser.add_argument("--data_path", type=str, required=True, help="Data home path for dataset") - parser.add_argument("--pre_trained", type=str, required=True, help="Checkpoint path") - args = parser.parse_args() - args.batch_norm = True - args.has_dropout = False - args.has_bias = False - args.padding = 0 - args.pad_mode = "same" - args.weight_decay = 5e-4 - args.loss_scale = 1.0 - - # Load the pretrained model. - net = vgg16(num_classes=100, args=args) - loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) - opt = nn.Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9, - weight_decay=args.weight_decay, loss_scale=args.loss_scale) - load_param_into_net(net, load_checkpoint(args.pre_trained)) - model = Model(network=net, loss_fn=loss, optimizer=opt) - ``` - -2. Load the CIFAR-100 dataset and split it into a training set and a test set of the membership inference model at the ratio of 8:2. - - ```python - # Load and split dataset. - train_dataset = vgg_create_dataset100(data_home=args.data_path, image_size=(224, 224), - batch_size=64, num_samples=5000, shuffle=False) - test_dataset = vgg_create_dataset100(data_home=args.data_path, image_size=(224, 224), - batch_size=64, num_samples=5000, shuffle=False, training=False) - train_train, eval_train = train_dataset.split([0.8, 0.2]) - train_test, eval_test = test_dataset.split([0.8, 0.2]) - msg = "Data loading completed." - LOGGER.info(TAG, msg) - ``` - -3. Set the inference and evaluation parameters. - - Set the method and parameters for membership inference. Currently, the following inference methods are supported: KNN, LR, MLPClassifier, and RandomForestClassifier. The data type of inference parameters is list. Each method is represented by a dictionary whose keys are `method` and `params`. - - ```python - config = [ - { - "method": "lr", - "params": { - "C": np.logspace(-4, 2, 10) - } - }, - { - "method": "knn", - "params": { - "n_neighbors": [3, 5, 7] - } - }, - { - "method": "mlp", - "params": { - "hidden_layer_sizes": [(64,), (32, 32)], - "solver": ["adam"], - "alpha": [0.0001, 0.001, 0.01] - } - }, - { - "method": "rf", - "params": { - "n_estimators": [100], - "max_features": ["auto", "sqrt"], - "max_depth": [5, 10, 20, None], - "min_samples_split": [2, 5, 10], - "min_samples_leaf": [1, 2, 4] - } - } - ] - ``` - - The training set is regarded as a positive class, and the test set is regarded as a negative class. You can set the following three evaluation metrics: - - Accuracy: Percentage of samples correctly inferred to all samples. - - Precision: Percentage of correctly inferred positive samples to all inferred positive samples. - - Recall: Percentage of correctly inferred positive samples to all actual positive samples. - If the number of samples is large enough and all the preceding metric values are greater than 0.6, the target model has privacy leakage risks. - - ```python - metrics = ["precision", "accuracy", "recall"] - ``` - -4. Train the membership inference model. - - ```python - inference = MembershipInference(model) # Get inference model. - - inference.train(train_train, train_test, config) # Train inference model. - msg = "Membership inference model training completed." - LOGGER.info(TAG, msg) - - result = inference.eval(eval_train, eval_test, metrics) # Eval metrics. - count = len(config) - for i in range(count): - print("Method: {}, {}".format(config[i]["method"], result[i])) - ``` - -5. Run the following command to start member inference training and evaluation to obtain the result: - - ```bash - python example_vgg_cifar.py --data_path ./cifar-100-binary/ --pre_trained ./VGG16-100_781.ckpt - ``` - - Metric values of membership inference are accurate to four decimal places. - - Take the first row as an example. When lr (logical regression classification) is used for membership inference, the accuracy is 0.7132, the precision is 0.6596, and the recall is 0.8810, indicating that lr has a probability of 71.32% that can correctly determine whether a data sample belongs to a training set of the target model. In a binary classification task, the metrics indicate that membership inference is valid, that is, the model has privacy leakage risks. - - ```text - Method: lr, {'recall': 0.8810,'precision': 0.6596,'accuracy': 0.7132} - Method: knn, {'recall': 0.7082,'precision': 0.5613,'accuracy': 0.5774} - Method: mlp, {'recall': 0.6729,'precision': 0.6462,'accuracy': 0.6522} - Method: rf, {'recall': 0.8513, 'precision': 0.6655, 'accuracy': 0.7117} - ``` - -## References - -[1] [Shokri R , Stronati M , Song C , et al. Membership Inference Attacks against Machine Learning Models[J].](https://arxiv.org/abs/1610.05820v2) diff --git a/tutorials/training/source_en/advanced_use/use_on_the_cloud.md b/tutorials/training/source_en/advanced_use/use_on_the_cloud.md deleted file mode 100644 index 0111a4dd63375e68cfe9d5ea4e4ff6d15d8ca99c..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/use_on_the_cloud.md +++ /dev/null @@ -1,309 +0,0 @@ -# Using MindSpore on the Cloud - -`Linux` `Ascend` `Whole Process` `Beginner` `Intermediate` `Expert` - - - -- [Using MindSpore on the Cloud](#using-mindspore-on-the-cloud) - - [Overview](#overview) - - [Preparations](#preparations) - - [Preparing ModelArts](#preparing-modelarts) - - [Accessing Ascend AI Processor Resources on HUAWEI CLOUD](#accessing-ascend-ai-processor-resources-on-huawei-cloud) - - [Preparing Data](#preparing-data) - - [Preparing for Script Execution](#preparing-for-script-execution) - - [Running the MindSpore Script on ModelArts After Simple Adaptation](#running-the-mindspore-script-on-modelarts-after-simple-adaptation) - - [Adapting to Script Arguments](#adapting-to-script-arguments) - - [Adapting to OBS Data](#adapting-to-obs-data) - - [Adapting to 8-Device Training Jobs](#adapting-to-8-device-training-jobs) - - [Sample Code](#sample-code) - - [Creating a Training Job](#creating-a-training-job) - - [Opening the ModelArts Console](#opening-the-modelarts-console) - - [Using a Common Framework to Create a Training Job](#using-a-common-framework-to-create-a-training-job) - - [Using MindSpore as a Common Framework to Create a Training Job](#using-mindspore-as-a-common-framework-to-create-a-training-job) - - [Viewing the Execution Result](#viewing-the-execution-result) - - - - - -## Overview - -ModelArts is a one-stop AI development platform provided by HUAWEI CLOUD. It integrates the Ascend AI Processor resource pool. Developers can experience MindSpore on this platform. - -ResNet-50 is used as an example to describe how to use MindSpore to complete a training task on ModelArts. - -## Preparations - -### Preparing ModelArts - -Create an account, configure ModelArts, and create an Object Storage Service (OBS) bucket by referring to the "Preparations" section in the ModelArts tutorial. -> For more information about ModelArts, visit . Prepare ModelArts by referring to the "Preparations" section. - -### Accessing Ascend AI Processor Resources on HUAWEI CLOUD - -You can click [here](https://console.huaweicloud.com/modelarts/?region=cn-north-4#/dashboard/applyModelArtsAscend910Beta) to join the beta testing program of the ModelArts Ascend Compute Service. - -### Preparing Data - -ModelArts uses OBS to store data. Therefore, before starting a training job, you need to upload the data to OBS. The CIFAR-10 dataset in binary format is used as an example. - -1. Download and decompress the CIFAR-10 dataset. - - > Download the CIFAR-10 dataset at . Among the three dataset versions provided on the page, select CIFAR-10 binary version. - -2. Create an OBS bucket (for example, ms-dataset), create a data directory (for example, cifar-10) in the bucket, and upload the CIFAR-10 data to the data directory according to the following structure. - - ```text - └─Object storage/ms-dataset/cifar-10 - ├─train - │ data_batch_1.bin - │ data_batch_2.bin - │ data_batch_3.bin - │ data_batch_4.bin - │ data_batch_5.bin - │ - └─eval - test_batch.bin - ``` - -### Preparing for Script Execution - -Create an OBS bucket (for example, `resnet50-train`), create a code directory (for example, `resnet50_cifar10_train`) in the bucket, and upload all scripts in the following directories to the code directory: -> ResNet-50 is used in scripts in to train the CIFAR-10 dataset and validate the accuracy after training is complete. `1*Ascend` or `8*Ascend` can be used in scripts on ModelArts for training. -> -> Note that the script version must be the same as the MindSpore version selected in "Creating a Training Task." For example, if you use scripts provided for MindSpore 1.1, you need to select MindSpore 1.1 when creating a training job. - -To facilitate subsequent training job creation, you need to create a training output directory and a log output directory. The directory structure created in this example is as follows: - -```text -└─Object storage/resnet50-train - ├─resnet50_cifar10_train - │ dataset.py - │ resnet.py - │ resnet50_train.py - │ - ├─output - └─log -``` - -## Running the MindSpore Script on ModelArts After Simple Adaptation - -Scripts provided in section "Preparing for Script Execution" can directly run on ModelArts. If you want to experience how to use ResNet-50 to train CIFAR-10, skip this section. If you need to run customized MindSpore scripts or more MindSpore sample code on ModelArts, perform simple adaptation on the MindSpore code as follows: - -### Adapting to Script Arguments - -1. Set `data_url` and `train_url`. They are necessary for running the script on ModelArts, corresponding to the data storage path (an OBS path) and training output path (an OBS path), respectively. - - ``` python - import argparse - - parser = argparse.ArgumentParser(description='ResNet-50 train.') - parser.add_argument('--data_url', required=True, default=None, help='Location of data.') - parser.add_argument('--train_url', required=True, default=None, help='Location of training outputs.') - ``` - -2. ModelArts allows you to pass arguments to the configuration options in the script. For details, see "Creating a Training Job." - - ``` python - parser.add_argument('--epoch_size', type=int, default=90, help='Train epoch size.') - ``` - -### Adapting to OBS Data - -MindSpore does not provide APIs for directly accessing OBS data. You need to use APIs provided by MoXing to interact with OBS. ModelArts training scripts are executed in containers. Generally, the `/cache` directory is used to store the container data. -> HUAWEI CLOUD MoXing provides various APIs for users: . In this example, only the `copy_parallel` API is used. - -1. Download the data stored in OBS to an execution container. - - ```python - import moxing as mox - mox.file.copy_parallel(src_url='s3://dataset_url/', dst_url='/cache/data_path') - ``` - -2. Upload the training output from the container to OBS. - - ```python - import moxing as mox - mox.file.copy_parallel(src_url='/cache/output_path', dst_url='s3://output_url/') - ``` - -### Adapting to 8-Device Training Jobs - -To run scripts in the `8*Ascend` environment, you need to adapt dataset creation code and a local data path, and configure a distributed policy. By obtaining the environment variables `DEVICE_ID` and `RANK_SIZE`, you can build training scripts applicable to `1*Ascend` and `8*Ascend`. - -1. Adapt a local path. - - ```python - import os - - device_num = int(os.getenv('RANK_SIZE')) - device_id = int(os.getenv('DEVICE_ID')) - # define local data path - local_data_path = '/cache/data' - - if device_num > 1: - # define distributed local data path - local_data_path = os.path.join(local_data_path, str(device_id)) - ``` - -2. Adapt datasets. - - ```python - import os - import mindspore.dataset.engine as de - - device_id = int(os.getenv('DEVICE_ID')) - device_num = int(os.getenv('RANK_SIZE')) - if device_num == 1: - # create train data for 1 Ascend situation - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True) - else: - # create train data for 1 Ascend situation, split train data for 8 Ascend situation - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True, - num_shards=device_num, shard_id=device_id) - ``` - -3. Configure a distributed policy. - - ```python - import os - from mindspore import context - from mindspore.context import ParallelMode - - device_num = int(os.getenv('RANK_SIZE')) - if device_num > 1: - context.set_auto_parallel_context(device_num=device_num, - parallel_mode=ParallelMode.DATA_PARALLEL, - gradients_mean=True) - ``` - -### Sample Code - -Perform simple adaptation on the MindSpore script based on the preceding three points. The following pseudocode is used as an example: - -Original MindSpore script: - -``` python -import os -import argparse -from mindspore import context -from mindspore.context import ParallelMode -import mindspore.dataset.engine as de - -device_id = int(os.getenv('DEVICE_ID')) -device_num = int(os.getenv('RANK_SIZE')) - -def create_dataset(dataset_path): - if device_num == 1: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True) - else: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True, - num_shards=device_num, shard_id=device_id) - return ds - -def resnet50_train(args): - if device_num > 1: - context.set_auto_parallel_context(device_num=device_num, - parallel_mode=ParallelMode.DATA_PARALLEL, - gradients_mean=True) - train_dataset = create_dataset(local_data_path) - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='ResNet-50 train.') - parser.add_argument('--local_data_path', required=True, default=None, help='Location of data.') - parser.add_argument('--epoch_size', type=int, default=90, help='Train epoch size.') - - args_opt, unknown = parser.parse_known_args() - - resnet50_train(args_opt) -``` - -Adapted MindSpore script: - -``` python -import os -import argparse -from mindspore import context -from mindspore.context import ParallelMode -import mindspore.dataset.engine as de - -# adapt to cloud: used for downloading data -import moxing as mox - -device_id = int(os.getenv('DEVICE_ID')) -device_num = int(os.getenv('RANK_SIZE')) - -def create_dataset(dataset_path): - if device_num == 1: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True) - else: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True, - num_shards=device_num, shard_id=device_id) - return ds - -def resnet50_train(args): - # adapt to cloud: define local data path - local_data_path = '/cache/data' - - if device_num > 1: - context.set_auto_parallel_context(device_num=device_num, - parallel_mode=ParallelMode.DATA_PARALLEL, - gradients_mean=True) - # adapt to cloud: define distributed local data path - local_data_path = os.path.join(local_data_path, str(device_id)) - - # adapt to cloud: download data from obs to local location - print('Download data.') - mox.file.copy_parallel(src_url=args.data_url, dst_url=local_data_path) - - train_dataset = create_dataset(local_data_path) - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='ResNet-50 train.') - # adapt to cloud: get obs data path - parser.add_argument('--data_url', required=True, default=None, help='Location of data.') - # adapt to cloud: get obs output path - parser.add_argument('--train_url', required=True, default=None, help='Location of training outputs.') - parser.add_argument('--epoch_size', type=int, default=90, help='Train epoch size.') - args_opt, unknown = parser.parse_known_args() - - resnet50_train(args_opt) -``` - -## Creating a Training Job - -Create a training job to run the MindSpore script. The following provides step-by-step instructions for creating a training job on ModelArts. - -### Opening the ModelArts Console - -Click Console on the HUAWEI CLOUD ModelArts home page at . - -### Using a Common Framework to Create a Training Job - -ModelArts Tutorial shows how to use a common framework to create a training job. - -### Using MindSpore as a Common Framework to Create a Training Job - -Training scripts and data in this tutorial are used as an example to describe how to configure arguments on the training job creation page. - -1. `Algorithm Source`: Click `Frameworks`, and then select `Ascend-Powered-Engine` and the required MindSpore version. (`Mindspore-0.5-python3.7-aarch64` is used as an example here. Use scripts corresponding to the selected version.) - -2. `Code Directory`: Select a code directory created in an OBS bucket. Set `Startup File` to a startup script in the code directory. - -3. `Data Source`: Click `Data Storage Path` and enter the CIFAR-10 dataset path in OBS. - -4. `Argument`: Set `data_url` and `train_url` to the values of `Data Storage Path` and `Training Output Path`, respectively. Click the add icon to pass values to other arguments in the script, for example, `epoch_size`. - -5. `Resource Pool`: Click `Public Resource Pool > Ascend`. - -6. `Specification`: Select `Ascend: 1 * Ascend 910 CPU: 24-core 96 GiB` or `Ascend: 8 * Ascend 910 CPU: 192-core 768 GiB`, which indicate single-node single-device and single-node 8-device specifications, respectively. - -## Viewing the Execution Result - -1. You can view run logs on the Training Jobs page. - - The `8*Ascend` specification is used to execute the ResNet-50 training job. The total number of epochs is 92, the accuracy is about 92%, and the number of images trained per second is about 12,000. - - The `1*Ascend` specification is used to execute the ResNet-50 training job. The total number of epochs is 92, the accuracy is about 95%, and the number of images trained per second is about 1800. - -2. If you specify a log path when creating a training job, you can download log files from OBS and view them. diff --git a/tutorials/training/source_en/advanced_use/visualization_tutorials.rst b/tutorials/training/source_en/advanced_use/visualization_tutorials.rst deleted file mode 100644 index e49cc5c09b7f8e953e5a83669d26c00c91280517..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/advanced_use/visualization_tutorials.rst +++ /dev/null @@ -1,14 +0,0 @@ -Training Process Visualization -================================== - -.. toctree:: - :maxdepth: 1 - - summary_record - dashboard - lineage_and_scalars_comparison - hyper_parameters_auto_tuning - performance_profiling - debugger - model_explanation - mindinsight_commands diff --git a/tutorials/training/source_en/conf.py b/tutorials/training/source_en/conf.py deleted file mode 100644 index 46072a4ffdbbe9a21ef7ac60e2fd3a34fe2dae08..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/conf.py +++ /dev/null @@ -1,85 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -# import sys -import IPython -import re -import nbsphinx as nbs - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'recommonmark', - 'sphinx_markdown_tables', - 'nbsphinx', - 'sphinx.ext.mathjax', - 'IPython.sphinxext.ipython_console_highlighting' -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_static_path = ['_static'] - -def setup(app): - app.add_stylesheet('css/bootstrap.min.css') - app.add_stylesheet('css/training.css') - app.add_javascript('js/training.js') - -# Remove extra outputs for nbsphinx extension. -nbsphinx_source_re = re.compile(r"(app\.connect\('html-collect-pages', html_collect_pages\))") -nbsphinx_math_re = re.compile(r"(\S.*$)") -mod_path = os.path.abspath(nbs.__file__) -with open(mod_path, "r+", encoding="utf8") as f: - contents = f.readlines() - for num, line in enumerate(contents): - _content_re = nbsphinx_source_re.search(line) - if _content_re and "#" not in line: - contents[num] = nbsphinx_source_re.sub(r"# \g<1>", line) - if "mathjax_config = app.config" in line and "#" not in line: - contents[num:num+10] = [nbsphinx_math_re.sub(r"# \g<1>", i) for i in contents[num:num+10]] - break - f.seek(0) - f.writelines(contents) \ No newline at end of file diff --git a/tutorials/training/source_en/index.rst b/tutorials/training/source_en/index.rst deleted file mode 100644 index ddff262e3377f802ec5b47a9a5974d1e083912d6..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/index.rst +++ /dev/null @@ -1,901 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 09:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Train with MindSpore -========================== - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Quick Start - :hidden: - - quick_start/quick_start - quick_start/linear_regression - quick_start/quick_video - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Basic Use - :hidden: - - use/data_preparation - use/defining_the_network - use/save_model - use/load_model_for_inference_and_transfer - use/publish_model - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Process Data - :hidden: - - advanced_use/convert_dataset - advanced_use/optimize_data_processing - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Build Networks - :hidden: - - advanced_use/custom_loss_function - advanced_use/custom_operator - advanced_use/migrate_script - advanced_use/apply_deep_probability_programming - advanced_use/implement_high_order_differentiation - advanced_use/quantum_neural_network - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Model Optimization - :hidden: - - advanced_use/debug_in_pynative_mode - advanced_use/dump_in_graph_mode - advanced_use/custom_debugging_info - advanced_use/visualization_tutorials - advanced_use/enable_auto_augmentation - advanced_use/evaluate_the_model_during_training - advanced_use/incremental_operator_build - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Performance Optimization - :hidden: - - advanced_use/distributed_training_tutorials - advanced_use/enable_mixed_precision - advanced_use/enable_graph_kernel_fusion - advanced_use/apply_gradient_accumulation - advanced_use/enable_cache - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Model Compression - :hidden: - - advanced_use/apply_quantization_aware_training - advanced_use/apply_post_training_quantization - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Model Security and Privacy - :hidden: - - advanced_use/improve_model_security_nad - advanced_use/protect_user_privacy_with_differential_privacy - advanced_use/protect_user_privacy_with_suppress_privacy - advanced_use/test_model_security_fuzzing - advanced_use/test_model_security_membership_inference - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: Application - :hidden: - - advanced_use/cv - advanced_use/nlp - advanced_use/hpc - advanced_use/use_on_the_cloud - -.. raw:: html - -
    -
    -
    -
    - - -
    - Filter condition - -
    - -
    -
    -
    -
    -
    Operating System
    -
    -
    - - -
    -
    - -
    -
    -
    -
    -
    Hardware
    -
    -
    - - - -
    -
    -
    - -
    -
    -
    -
    User
    -
    -
    - - - - -
    -
    -
    - -
    -
    -
    -
    Stage
    -
    -
    - - - - - - - - - - -
    - -
    - -
    - -
    -
    - - - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - -
    - -
    -
    -
    - diff --git a/tutorials/training/source_en/quick_start/images/LeNet_5.jpg b/tutorials/training/source_en/quick_start/images/LeNet_5.jpg deleted file mode 100644 index 7894b0e181d965c5e9cbba91fe240c1890d37bda..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/quick_start/images/LeNet_5.jpg and /dev/null differ diff --git a/tutorials/training/source_en/quick_start/images/linear_regression.gif b/tutorials/training/source_en/quick_start/images/linear_regression.gif deleted file mode 100644 index ff616e2782ba2fecb54064ffb867d944d5b29f10..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/quick_start/images/linear_regression.gif and /dev/null differ diff --git a/tutorials/training/source_en/quick_start/images/linear_regression_eval_datasets.png b/tutorials/training/source_en/quick_start/images/linear_regression_eval_datasets.png deleted file mode 100644 index 7dc474508bf5241a038fee6d9b5c093199d93691..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/quick_start/images/linear_regression_eval_datasets.png and /dev/null differ diff --git a/tutorials/training/source_en/quick_start/images/model_net_and_eval_datasets.png b/tutorials/training/source_en/quick_start/images/model_net_and_eval_datasets.png deleted file mode 100644 index c99e0bd2155c4c42befeab1ead6820f9edf7c059..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/quick_start/images/model_net_and_eval_datasets.png and /dev/null differ diff --git a/tutorials/training/source_en/quick_start/images/quick_start_quick_start_11_1.png b/tutorials/training/source_en/quick_start/images/quick_start_quick_start_11_1.png deleted file mode 100644 index 738773cb05a3d937e43bb344bfd3f873192a33f9..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/quick_start/images/quick_start_quick_start_11_1.png and /dev/null differ diff --git a/tutorials/training/source_en/quick_start/images/quick_start_quick_start_21_0.png b/tutorials/training/source_en/quick_start/images/quick_start_quick_start_21_0.png deleted file mode 100644 index 5f60b3e4d8161eafcdf9b8be9ebc033e68f022b5..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/quick_start/images/quick_start_quick_start_21_0.png and /dev/null differ diff --git a/tutorials/training/source_en/quick_start/images/quick_start_quick_start_45_0.png b/tutorials/training/source_en/quick_start/images/quick_start_quick_start_45_0.png deleted file mode 100644 index c6cdc0ac0216274c57b22fc9901599aa721628e5..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/quick_start/images/quick_start_quick_start_45_0.png and /dev/null differ diff --git a/tutorials/training/source_en/quick_start/images/quick_start_quick_start_52_0.png b/tutorials/training/source_en/quick_start/images/quick_start_quick_start_52_0.png deleted file mode 100644 index 74ab35c3a601b8a1111adb6ea6390f30de688cad..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/quick_start/images/quick_start_quick_start_52_0.png and /dev/null differ diff --git a/tutorials/training/source_en/quick_start/images/quick_start_quick_start_58_1.png b/tutorials/training/source_en/quick_start/images/quick_start_quick_start_58_1.png deleted file mode 100644 index 4dbc06454c1aa28a7aac5282239c0495281681ae..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/quick_start/images/quick_start_quick_start_58_1.png and /dev/null differ diff --git a/tutorials/training/source_en/quick_start/images/quick_start_quick_start_60_1.png b/tutorials/training/source_en/quick_start/images/quick_start_quick_start_60_1.png deleted file mode 100644 index eaf08b4cabe1844c8a1e4696a9f1b5b67202815c..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/quick_start/images/quick_start_quick_start_60_1.png and /dev/null differ diff --git a/tutorials/training/source_en/quick_start/linear_regression.ipynb b/tutorials/training/source_en/quick_start/linear_regression.ipynb deleted file mode 100644 index 69fd39eff4615a8d79a1d87939e44effa9eaa2ab..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/linear_regression.ipynb +++ /dev/null @@ -1,562 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "pregnant-wages", - "metadata": {}, - "source": [ - "# Implementing Simple Linear Function Fitting\n", - "\n", - "Author: [Yi Yang](https://github.com/helloyesterday)    Editor: [Mingfu Lv](https://gitee.com/lvmingfu)\n", - "\n", - "`Linux` `Windows` `Ascend` `GPU` `CPU` `Whole Process` `Beginner` `Intermediate` `Expert`\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_en/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_en/quick_start/linear_regression.ipynb)" - ] - }, - { - "cell_type": "markdown", - "id": "sized-algorithm", - "metadata": {}, - "source": [ - "## Overview\n", - "\n", - "Regression algorithms usually use a series of properties to predict a value, and the predicted values are consecutive. For example, the price of a house is predicted based on some given feature data of the house, such as area and the number of bedrooms; or future temperature conditions are predicted by using the temperature change data and satellite cloud images in the last week. If the actual price of the house is CNY5 million, and the value predicted through regression analysis is CNY4.99 million, the regression analysis is considered accurate. For machine learning problems, common regression analysis includes linear regression, polynomial regression, and logistic regression. This example describes the linear regression algorithms and how to use MindSpore to perform linear regression AI training.\n", - "\n", - "The whole process is as follows:\n", - "\n", - "1. Generate datasets.\n", - "2. Define a training network.\n", - "3. Define and associate the forward and backward propagation networks.\n", - "4. Prepare for fitting process visualization.\n", - "5. Perform training.\n", - "\n", - "> This document is applicable to CPU, GPU and Ascend environments. The source code address of this example: .\n", - "\n", - "## Environment Preparation\n", - "\n", - "Complete MindSpore running configuration." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "statutory-enough", - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore import context\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"CPU\")" - ] - }, - { - "cell_type": "markdown", - "id": "billion-minority", - "metadata": {}, - "source": [ - "`GRAPH_MODE`: graph mode.\n", - "\n", - "`device_target`: sets the MindSpore training hardware to CPU.\n", - "\n", - "> Third-party support package: `matplotlib`. If this package is not installed, run the `pip install matplotlib` command to install it first.\n", - "\n", - "## Generating Datasets\n", - "\n", - "### Defining the Dataset Generation Function\n", - "\n", - "`get_data` is used to generate training and test datasets. Since linear data is fitted, the required training datasets should be randomly distributed around the objective function. Assume that the objective function to be fitted is $f(x)=2x+3$. $f(x)=2x+3+noise$ is used to generate training datasets, and `noise` is a random value that complies with standard normal distribution rules." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "little-florida", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "\n", - "def get_data(num, w=2.0, b=3.0):\n", - " for _ in range(num):\n", - " x = np.random.uniform(-10.0, 10.0)\n", - " noise = np.random.normal(0, 1)\n", - " y = x * w + b + noise\n", - " yield np.array([x]).astype(np.float32), np.array([y]).astype(np.float32)" - ] - }, - { - "cell_type": "markdown", - "id": "surgical-strip", - "metadata": {}, - "source": [ - "Use `get_data` to generate 50 groups of test data and visualize them." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "technological-tsunami", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAEICAYAAAC6fYRZAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAq0UlEQVR4nO3deXxU1fnH8c8TQGQvS4AoQSnW/gzVoqYICCqoCCpF+4tbsVURI4tWqLZCbaFipfxUFC2IgmJdW0CtCLKILCqrZRMhuLAohLJEsICCSJjn98dMwhAnISEzmWTyfb9e88rce8/c++TO5JmTc889x9wdERFJTEnxDkBERGJHSV5EJIEpyYuIJDAleRGRBKYkLyKSwJTkRUQSmJK8SAFmNt/Mehez7EVmlh3rmESOl5K8VFhm9rmZHTCzr8Meo+MdV2HM7GYzWxDvOKRyqRrvAERKqbu7vxPvIETKK9XkJeGYWXUz+6+Z/SRsXXKo1t/YzOqb2TQzyzGzr0LPmxVz3zXM7O+h12UBPyuwfZCZbTCzfWaWZWZXh9afATwFtAv9x/Hf0PorzGylme01sy1m9uconQYRQEleEpC7HwReB24IW30t8K677yT4uX8OOAVoDhwAitvMMxRoGXpcBtxUYPsGoCNQD7gfeMnMUtx9HdAHWOzutd39B6Hy3wC/Bn4AXAH0NbOrivu7ihyLkrxUdG+Eau15j9tC618Brg8r98vQOtx9l7u/5u773X0f8CBwYTGPdy3woLvvdvctwBPhG919srv/x90D7j4R+AxoU9jO3H2+u38UKr8a+EcJYhE5JrXJS0V3VSFt8vOAmmZ2HrADaA38C8DMagKPAV2B+qHydcysirsfPsbxTgK2hC1/Eb7RzH4N/BY4NbSqNtCosJ2F4hsB/AQ4AagOTD5GDCLFppq8JKRQsp5EsMnmBmBaqNYOcDfwY+A8d68LXBBab8XY9TYgNWy5ed4TMzsFGA/cATQMNcmsCdtvpCFfXwHeBFLdvR7BdvvixCFSLEryksheAa4Deoae56lDsB3+v2bWgGA7e3FNAgaHLt42A+4M21aLYCLPATCzWwjW0PPsAJqZ2QkFYtnt7t+aWRuCzUoiUaMkLxXd1AL95P+Vt8HdlxK8sHkSMCPsNaOAGsCXwBJgZgmOdz/BJppNwNvAi2HHywJGAosJJvQzgYVhr50LrAW2m9mXoXX9gGFmtg8YQvBLRCRqTJOGiIgkLtXkRUQSWKmTvJmlmtm80I0fa83srtD6P5vZVjNbFXpcXvpwRUSkJErdXGNmKUCKu68wszrAcuAqgv2Jv3b3R0odpYiIHJdS95N3920Eu5Xh7vvMbB1wcmn3KyIipRfVC69mdirwHsFuY78Fbgb2AsuAu939q6Je36hRIz/11FOjFo+ISGWwfPnyL909OdK2qCV5M6sNvEvwlu/XzawJwS5qDjxAsEmnV4TXZQKZAM2bNz/3iy++KFhERESKYGbL3T090rao9K4xs2rAa8DL7v46gLvvcPfD7h4geBdgxPE73H2cu6e7e3pycsQvIhEROU7R6F1jwLPAOnd/NGx9Slixqwne3i0iImUoGgOUnQ/8CvjIzFaF1v0BuMHMWhNsrvkcuD0KxxIRkRKIRu+aBUQeUGl6afctIiKlozteRUQSmJK8iEgCU5IXEUlgSvIiInEU8ADD3x9OVk5WTPav6f9ERGItEICcHGjcGOxIP5Xpn0zjin92B+A/+/7D6MuLO5988SnJi4jEUiAAnTrBokXQvj3Mm8f+w9+SMjKFvQf3AnDenjo83uWxmBxezTUiIrGUkxNM8Lm5sGgRY+c/TK3htfIT/PKnYcnfDlBl1+6YHF5JXkQklho3hvbt2Vm3CvbHXPq9PwiAW1rfjM+9gHNyqgZr+I0bx+Twaq4REYklM347+BweW/pe/qrNAzaTWi8Vukduq48mJXkRkRhZl7OOtCfT8pdHXDyCezvce6RAUhI0aRLTGJTkRUSizN3p9nI3Zm2Ylb9uz6A91K1et8xjUZIXEYmGUDfJed+spfOLF+evnpgxkWtbXRu3sJTkRURKKxDgYOcLOe3cBWSHKutnNDqD1X1XUzUpvmlWvWtERErphQVjOLHTkQS/8KqpZPXPinuCB9XkRUSO21cHvqLBQw3ylzOyjEnbO2BDrohjVEdTkhcROQ5D5w1l2HvD8pfX9/+UlofrxrQ75PEodZI3s1TgBaAJwVmgxrn742bWAJgInEpwZqhr3f2r0h5PRCSeNn61kZZPtMxfvq/jffyl81/iGFHRolGTzwXudvcVZlYHWG5ms4GbgTnuPsLMBgGDgHuL2I+ISPkT6jXjyclc99r1TM6anL9p1+930aBGgyJeHH/RmP5vG7At9Hyfma0DTgZ6ABeFij0PzEdJXkQqktDgYku+WEi7Ww7nr36ux3Pc3Prm+MVVAlFtkzezU4GzgaVAk9AXAMB2gs05kV6TCWQCNG/ePJrhiIiUSu6Obfz0zPfI6hxcPqlmUzYO/JzqVavHN7ASiFoXSjOrDbwGDHD3veHb3N0Jttd/j7uPc/d0d09PTk6OVjgiIqXyatarVBvXjKxQWnpn5Zlsvec/FSrBQ5Rq8mZWjWCCf9ndXw+t3mFmKe6+zcxSgJ3ROJaISKkVMokHwL6D+6g74sjwA11+eCkzL30Ba9KkXPWaKa5S1+TNzIBngXXu/mjYpjeBm0LPbwKmlPZYIiKlljeJR7NmcNFFweWQhxc+fFSCX9tvLbN+9TbWtGmFTPAQnZr8+cCvgI/MbFVo3R+AEcAkM7sV+AKI3+ANIiJ5CkziQU4O2TUOkfpYan6R37T5DY93ezyOQUZPNHrXLAAK+4q7uJD1IiLxEZrEI286vt5L/8CzKyfkb95+93aa1I7t8L9lSWPXiEjlYgbz5rFq1Uys83v5CX50t9H4UE+oBA8a1kBEKpmABzj/ufNZkr0EgNon1GbHPTuoWa1mnCOLDSV5Eak0pn82nSteOTJ42NTr3+TKH3ePY0SxpyQvIokr1FVyf/3aNB2Zwr7v9gFw3p46LBy9nypzH4F5VwSn4UtQifubiUjlFuoqOfbnJ1Hrr7XzE/zy/32bJX87QJVDh/N71yQy1eRFJCHt+CKLpp3fy1++5cfXM+H6f4D7Ub1raNw4jlHGnpK8iCScgTMHMmrpqPzlzQvbkDrkleBCqHdNYXe8JholeRFJGOty1pH2ZFr+8ojOf+Xe02+BIY2DNfidO4OJPSkJmiRWV8nCKMmLSIXn7nR9uStvb3g7f92eQXuoWz00REHeUAZ5TTTz5iX0xdZwleO3FJGENW/TPJKGJeUn+IkZE/GhfiTBQ8ShDCoL1eRFpPwpYpTIPAdzD3La304je282AGc0OoPVfVdTNSlCWiswlEGiX2wNp5q8iJQvRYwSmeeFD1/gxAdPzE/wC3stJKt/VuQED0cutmZnw/z5CX+xNZxq8iJSvkRqWgldJN19YDcNH2qYXzQjLYNJGZOw4iTtSnSxNZxq8iJSvuQ1rVStelTTypB5Q45K8OvvXM/kayYXL8FXYqrJi0j5UqAf+8b/bqLlEy3zN/+x4x95oPMDcQywYonW9H8TgCuBne7+k9C6PwO3AXmXsf/g7tOjcTwRSXBJSXjjxlz76rW8mvVq/updv99FgxoN4hhYxROtmvzfgdHACwXWP+buj0TpGCJSSSzJXkK7Z9vlLz/382e5+aQr4MT6cYyqYopKm7y7vwfsjsa+RKTyyg3k0urJVvkJ/uQ6J/Pt4P3cPOD5InvbSOFifeH1DjNbbWYTzCziV7CZZZrZMjNbllOJblAQkaO9mvUq1R6oRlZOFgBzfj2H7N9mU/2rvZX2RqZoiGWSHwu0BFoD24CRkQq5+zh3T3f39OTk5BiGIyLl0b6D+7D7jWsmXwNAl5ZdCAwJ0LlF52CBQnrbSPHErHeNu+/Ie25m44FpsTqWiFRMDy18iHvfuTd/eW2/taQlpx1dqJKNGhltMUvyZpbi7ttCi1cDa2J1LBGpWLL3ZpP6WGr+8l3n3cWorqMKf0FSEiQnHxlFUom+2KLVhfIfwEVAIzPLBoYCF5lZa8CBz4Hbo3EsEanYbp1yKxNWTchf3n73dprUPsadqJV4FMnSikqSd/cbIqx+Nhr7FpHEsOo/Kzh7/Ln5y6O7jaZ/m/7Fe3ERQx1I0XTHq4jEVMADnP/s+SzZugSA2rlV2PHH/1Kzeu3i76QSjyJZWkryIhIz0z+bzhWvXJG/PPUVuHKjwR3fQJMSJHl3+Oc/g23xTZqoTb4ElORFJOr2H9pP00easu+7fQC0PbktC16sRpWNi0teE4/UHq8kX2xK8iISVU/++0n6Tz/S1r4icwVnp5wNvY49EUhEao8vFSV5EYmKHV/voOnIpvnLt7S+hQk9jvSiOe7x3NUeXypK8iJSagNnDmTU0lH5y5sHbCa1XmrhLygJ3QxVKkryInLc1uWsI+3JI3eojrh4BPd2uLeIVxynSjqrUzQoyYtIibk7XV/uytsb3s5ft2fQHupWrxvHqCQSJXkRKZF5m+bR+YXO+csTMyZybatr4xiRFEVJXkSK5WDuQVo+0ZKt+7YCkJacxod9PqRqktJIeabBH0TkmJ5f9TwnPnhifoJf1GsRa/utVYKvAPQOiUihdh/YTcOHGuYvZ6RlMCljEqYeLhWGkryIRPSnuX/iL+//JX95/Z3radmgZRwjkuOhJC8iR9n41UZaPnEkmf+x4x95oPMDcYxISkNJXkSAYLfIa1+9llezXs1ft+v3u2hQo0Eco5LSisqF19BE3TvNbE3YugZmNtvMPgv9jDiRt4jE3+Iti0kalpSf4J97MwmfewENqv8gvoFJqUWrd83fga4F1g0C5rj7j4A5oWURKUdyA7mkjUmj/YT2AJxcK4Vv/1qFm1cEjgwGJhVaVJK8u78H7C6wugfwfOj588BV0TiWiETHq1mvUu2Baqz7ch0Ac349h+y7t1L9vPOhalUNBpYgYtkm3yRsIu/tQMSBJ8wsE8gEaN68eQzDERGAfQf3UXfEkeEHurTswsyeM490i9RgYAmlTG6GcncnOKF3pG3j3D3d3dOTk5PLIhyRSuuhhQ8dleDX9lvLrBtnHd3vPW8wMCX4hBDLmvwOM0tx921mlgLsjOGxRKQI2f/dTOrjp+Qv33XeXYzqOip+AUmZiWWSfxO4CRgR+jklhscSkULcOqUXE1Y9l7+8feB/aFI3JY4RSVmKSpI3s38AFwGNzCwbGEowuU8ys1uBLwANUydShlZuW8k5487JXx79FvRfWRX6JIFGBK40opLk3f2GQjZdHI39i0jxBTxA+2fbs3TrUgDqnFCH7W+fRc2VS4M9ZtyDD7W5VwoahVIkgUz/bDpVhlXJT/DTbpjG3sF7qTnnPdi8OZjcU1PhoosgEIhvsFImNKyBSALYf2g/TR9pyr7v9gHQtllbFtyygCpJVYIFkpKCj8WLITf3yI1OmlIv4akmL1IeBQKwY0ew5n0MT/77SWoNr5Wf4FdkrmDxrYuPJPg8jRsHm2t0o1Olopq8SHkTCECnTsHadvv2wZuTkr5fH9vx9Q6ajmyav3xL61uY0GNC4fs1041OlZCSvEh5k5MTTPBFNKsMnDmQUUtH5S9vHrCZ1Hqpx9533o1OUmkoyYuUN3nNKnk1+bBmlXU560h7Mi1/ecTFI7i3w73xiFIqCCV5kfImQrOKu3PZS5cxe+Ps/GJ7Bu2hbnV1eJei6cKrSHlQ8EJr2Pgx8zbNI2lYUn6Cn5QxCR/qSvBSLKrJi5SVQCDyRc9CLrQezD1IyydasnXfVgDSktP4sM+HVE3Sn60Un2ryImUhL5E3a/b9G5EiXGh9ftXznPjgifkJflGvRaztt1YJXkpMnxiRslBUj5mwC627L/gZDZ860i0yIy2DSRmTjh4KWKQEVJMXKQtF3YgUutD6p9fvpOEFi/NXr79zPZOvmawEL6WimrxIWSjiRqQNuzdw2t9Oy1/+Y8c/8kDnB+IRpSQgJXmRslLgRiR355rJ1/Dautfy1+36/S4a1GgQj+gkQSnJi8TB4i2LaT+hff7ycz2e4+bWN8cvIElYSvIiZSg3kMtZY89i3ZfrAGhWtxnr71xP9arV4xyZJKqYJ3kz+xzYBxwGct09PdbHFCmPJq+dzLWvHpkgbc6v59C5Rec4RiSVQVnV5Du5+5dldCyRslXYTU4hew/upd6IevnLXVp2YWbPmeo1I2VCXShFSqOom5yAhxY+dFSCz+qzhlmXvoDSu5SVskjyDrxtZsvNLLPgRjPLNLNlZrYsJyenDMIRiaJINzkBW/Zswe437n0nOELkXefdhf/pMGdc26/QLwSRWCiLJN/B3c8BugH9zeyC8I3uPs7d0909PTk5uQzCEYmiCDc53TrlVpqPap5fZPvd2xnVdVShXwgisRTzNnl33xr6udPM/gW0Ad6L9XFFykTYTU4rD2/lnGFH6k2ju42mf5v+R8oWMU68SKzENMmbWS0gyd33hZ53AYbF8pgiZS1g0H5qD5ZuXQpAnRPqsP2e7dSsVvPogpp+T+Ig1jX5JsC/Qr0IqgKvuPvMGB9TpMy89elbXPmPK/OXp90wjStOv6LwF2j6PSljMU3y7r4R+GksjyESD/sP7afJI034+ruvAWjbrC0Ley0kydRhTcoXfSJFSmjMB2OoNbxWfoJfkbmCxbcuJsk5enYnkXJAwxqIFNOOr3fQdOSRsd57te7Fsz2eDS4UMruTSLwpyYsUw8z1M+n2crf85c0DNpNaL/VIgaImBRGJI1U1RIqQ800ON75+I91e7kajmo0YcfEIfKgfneCh6ElBROJINXmRCNydl1a/xMBZA9l7cC9DLxzK4A6DCx8tUt0jpZxSkhfJExpobFO1b7j9rT7M3jibds3aMb77eFo1bnXs16t7pJRDSvIiAIEAuZ0v4onchfypMyTVqMnobqPp+7O+RXeLPMYIlCLxpjZ5EWDVunm0/fH73H1pgIs3OFnXvkv/Nv2PneCLGIFSpDxQkpdK7cChAwx6ZxDpr13GlkbVmPRaElOyO5D6w7OP/WINOCYVgJprpNKau2kumVMz2fDVBnq17sXDl/wfDX5zuPhNLxpwTCoAJXmpdHYf2M3v3v4dE1ZNoGX9lkdPw1erBDtSjxqpAJTkpdJwdyZnTebOGXeya/8uBp0/iCEXDqFGtRrHv1P1qJFyTkleKrZi9m7ZsmcL/af3Z+qnUzk35Vxm3TiL1k1bl12cInGiC69ScRWjd0vAA4z5YAxpT6YxZ9McRnYZyZJei2htKRpITCoFJXmpuI7Ru2XtzrV0mNCBO2bcQfvU9qzpu4bfnjeAqhdfqm6PUmnEPMmbWVcz+8TM1pvZoFgfTyqRQsaLOZh7kKHzhnL202fz6a5PeeGqF5jZcyYt6rdQt0epdGI9/V8VYAxwKZAN/NvM3nT3rFgeVyqJCL1bFm5eSO+pvfn4y4/peWZPHrvsMZJrhU0Qr26PUsnE+sJrG2B9aIYozOyfQA9ASV6iI9S7Zc+3exg8ZzBjl43llHqnMKPnDLqe1vX75cO/GBo1gp071f1RElqsm2tOBraELWeH1uUzs0wzW2Zmy3L0r7MchykfTyHtyTSeXv40A9sOZE2/NZETfJ6kJEhOhs6d1TYvCS/uF17dfZy7p7t7enJy8rFfIBKybd82rpl8DVdNvIqGNRqy+NbFPHrZo9Q+ofaxX6y2eakkYp3ktwLhsys0C60TOW7uzjMrnuGMMWcw9ZOpDO88nOWZy2lzcpvi70STfEglEes2+X8DPzKzFgST+/XAL2N8TElgn+76lMypmbz7xbtceMqFjOs+jtMbnl7yHWlIAqkkYprk3T3XzO4AZgFVgAnuvjaWx5TEdOjwIR5e9DDD3h3GiVVPZHz38fQ6u1fRQwEfi4YkkEog5sMauPt0YHqsjyPlTKThBko6wUao/AeHPqf31Nv4aOdHZKRl8ETXJ0ipkxLb+EUSRNwvvEoCijTcQEkn2AgE+PqSCxh4SwrtnmnLrgO7eOO6N5j8vxNJ2Z+kIQlEiklJXqIvUs+VEvZmmbH8n/zkpwsZdZ5z+3IjK2M+PU7vrpmYREpISV6iL1LPlWL2Zsn5JocbX7+Ry6f3pEa1Grz/fBWe3NuReqmnqdujyHHQUMMSfYX1XCmiN4u789Lqlxg4ayB7D+5l6IVDGdz+XqrfvfdIeQ1JIFJiSvISG5F6rhTSm2XTV5u4fdrtzN44m3bN2jG++3haNW4V3NgkbEIPdXsUKTEleYmb3EAujy95nCHzh5BkSYzuNpq+P+t7pFtkpN446vYoUiJqk5foCARgx45gr5fw54VYtX0VbZ9pyz2z7+HiFheT1S+L/m36H53gdZFVpNSU5KX0whPyhRcWmZwPHDrAoHcGkT4unS17tzApYxJTrp9Car3Uo/epi6wiUaHmGim9ggnZ7OjkHGpembvhHTLfvI0Nez+nV+tePNzlYRrUaBB5n7rIKhIVSvJSeuEJuV27YJIPS867D+zmd2/fw4RVz9FyN8z54kw6/2l8sH29MLrIKhIVSvJSegUTsjvk5ODJyUzOmsydM+5k1/5dDFpoDJnn1GDdUTX8Qukiq0ipKclLdIQnZDO2nPgd/Sb2YNqn0zg35VxmzT2Z1rNXBr8QOqr5RaSsKMlLVAU8wNh/j2XQnEEEPMDILiP5zanXUbX/qcECSUkwcaKaX0TKiJK8RM3anWu5beptLM5eTJeWXXjqiqdoUb9FsPkm/CKqmmBEyoySvJTawdyDDH9/OH9d8FfqVq/Li1e/SM8ze2J5tXVdRBWJm5gleTP7M3AbkNfB+Q+hseUlgSzYvIDbpt7Gx19+TM8ze/LYZY+RXCvCXL26iCoSF7GuyT/m7o/E+BgSB3u+3cPgOYMZu2wsp9Q7hRk9Z9D1tK7xDktEClBzjZTYlI+n0G96P7Z/vZ2BbQcyrNMwap9QO95hiUgEsR7W4A4zW21mE8ysfqQCZpZpZsvMbFmObl0vO8UYX6agbfu2kTEpg6smXkXDGg1ZfOtiHr3sUSV4kXKsVEnezN4xszURHj2AsUBLoDWwDRgZaR/uPs7d0909PTk5QluuRF8JB/9yd55Z8QxnjDmDaZ9OY3jn4SzPXE6bk9uUTbwictxK1Vzj7pcUp5yZjQemleZYEkWRBv8q5KLop7s+JXNqJu9+8S4XnnIh47qP4/SGp5dxwCJyvGLWXGNmKWGLVwNrYnUsKaa8Jprk5GNOxXfo8CGGvz+cs8aexartqxjffTxzb5qrBC9SwcTywutDZtYacOBz4PYYHkuKkpfcr7sOFi8OJvY5c2DXroj91j/Y+gG93+zNRzs/IiMtgye6PkFKnZRCdi4i5VnMkry7/ypW+5YSyGt/z2uegeDzXbu+10Tz9Xdf86e5f+LxpY9zUp2TeOO6N+jxPz3iELSIRIu6UCa68PZ3s+BNSRGaaGZ8NoO+b/Xliz1f0C+9H3+9JHj3qohUbJoZKtHljfVetSp07AjZ2TB/fn4TTc43OfR8vSeXv3I5NarV4P1b3mfMFWOU4EUShGryia6QcWPcnZdWv8TAWQPZe3AvQy8cyuAOg6letXqcAxaRaFKSrwwKjBuz6atN3D7tdmZvnE27Zu0Y3308rRq3imOAIhIrSvKVSG4gl8eXPM6Q+UNIsiRGdxtN35/1JcnUaieSqJTkK4mV21Zy29TbWL5tOd1P786Yy8eQWi813mGJSIwpySe4A4cOcP+79/PIokdoVLMRkzImkZGWcWSsdxFJaEryCWzuprlkTs1kw1cbuPXsW3n40oepXyPiOHEikqCU5BPQ7gO7uefte3hu1XO0rN+SOb+eQ+cWneMdlojEgZJ8AnF3JmdN5s4Zd7Jr/y4GnT+IIRcOoUa1GvEOTUTiREk+QWzZs4V+0/sx7dNpnJtyLrNunEXrpq3jHZaIxJmSfAV3OHCYscvGMnjOYAIeYGSXkfzmvN9QNUlvrYgoyZdfgUDwLtVGjeDLLyOOFrl251p6T+3NkuwldGnZhaeueIoW9VvEKWARKY90F0x5FD5zU8OGcPLJR83gdDD3IEPnDeXsp8/ms12f8WLn0cz85QwleBH5HiX58ih85Mg9e+Dw4fwZnBZsXkDrp1sz7L1hXJt2Deve/hE3dh6Adep0zGn8RKTyUZIvj8JHjqxXD6pUYU/HNvT991A6PteRA4cOMKPnDF7q8CjJ7y47eho/EZEwpZ3I+xozW2tmATNLL7BtsJmtN7NPzOyy0oVZyeSNHJmdDbt2MWXBs6Rd+TnjVoxnYNuBrOm3hq6ndT36y6CQafxEpHIr7YXXNcAvgKfDV5pZGnA90Ao4CXjHzE5398OlPF7lkZTEtpoB7nztOl5b9xpnNTmLN657g5+d/LMjZQoZRlhEJE+pkry7rwMijYPSA/inux8ENpnZeqANsLg0x6ssAh7g2RXP8rvZv+Pb3G8Z3nk497S/h2pVqn2/cIFhhEVEwsWqC+XJwJKw5ezQuu8xs0wgE6B58+YxCqfi+OTLT8iclsl7X7zHRadexNNXPs3pDU+Pd1giUkEdM8mb2TtA0wib7nP3KaUNwN3HAeMA0tPTvbT7q6gOHT7Ew4seZti7wzix6ok80/0Zep3dS6NFikipHDPJu/slx7HfrUD4YOXNQuskgg+2fkDvN3vz0c6PyEjL4ImuT5BSJyXeYYlIAohVF8o3gevNrLqZtQB+BHwQo2NVWF9/9zUDZtxF22fasvvAbt647g0mXzNZCV5EoqZUbfJmdjXwNyAZeMvMVrn7Ze6+1swmAVlALtBfPWuONuOzGfR5qw+b92ym3zLjr9+cQt0B3eMdlogkmNL2rvkX8K9Ctj0IPFia/SeinG9yGDBrAK989Ar/84Mf8f7zVeiw6TBU/SDYFVI9ZUQkinTHaxlxd1748AXOGHMGk9dOZuiFQ1nVbzUdUs/XzUwiEjMahbIMbPpqE7dPu53ZG2fTrlk7xncfT6vGrYIbdTOTiMSQknwM5QZyeXzJ4wyZP4QkS2LM5WPok96HJAv7B0o3M4lIDCnJx8jKbSu5beptLN+2nO6nd2fM5WNIrZd67BeKiESRknyU7T+0n/vn38/IxSNpVLMRkzImkZGWoZuaRCQulOSjaO6muWROzWTDVxu49X9+ycPd/0b9mg3iHZaIVGLqXRMFuw/spteUXlz8wsUAzFl5Js/cOIn63a7WRB4iEldK8qXg7kxcM5EzxpzBCx++wKDzB/HR/75D57fWaSIPESkX1FxznLbs2UK/6f2Y9uk0zk05l1k3zqJ109bgHuzzvmiR+r6LSNwpyZfQ4cBhxi4by+A5gwl4gJFdRvKb835D1aTQqdREHiJSjijJl8DanWvpPbU3S7KX0KVlF5664ila1G/x/YLq+y4i5YSSfDEczD3Ig+8/yIgFI6hbvS4vXv0iPc/sqW6RIlLuKckfw4LNC7ht6m18/OXH3HjWjTza5VGSayXHOywRkWJR75pwgQDs2AHu7Pl2D32n9aXjcx05cOgAM3rO4MWrX1SCF5EKRTX5PIEAdOoEixbxxs9Pp//5/2X719sZ2HYgwzoNo/YJtQt/nS6yikg5VaqavJldY2ZrzSxgZulh6081swNmtir0eKr0ocZYTg7bVi8k4xe5XH1WFo1O+AFLbl3Co5c9WnSC79QJmjWDiy7SjU8iUu6Utia/BvgF8HSEbRvcvXUp918mAh7g2ewp/K4/fGswfMOp3HPfSqpVPaHoF+bkBPvDh9/4pF41IlKOlKom7+7r3P2TaAUTM2Ft7QV98uUndHq+E5nTbufs0zqy+saFDH5+Y9EJPm9/ycnBG5406YeIlFOxvPDawsxWmtm7ZtaxsEJmlmlmy8xsWU4shgAopEnlu8Pf8eB7D/LTp37Kh9s/5JnuzzD3prmc/uP2Rbeth++vUyeYMweys2H+fLXJi0i5Yx6hdntUAbN3gKYRNt3n7lNCZeYD97j7stBydaC2u+8ys3OBN4BW7r63qGOlp6f7smXLSvxLFGnHjmBCzs0N1rizs/kg9wt6v9mbj3Z+RMYPr+SJHk+TUvek496fmmhEJJ7MbLm7p0fadsyavLtf4u4/ifCYUsRrDrr7rtDz5cAG4PTj/QVKpXHj/CaVrzuex4CVw2n7TFt2H9jNG6vTmNxrJindbyj+RdOw/amJRkTKu5g015hZsplVCT3/IfAjYGMsjlWMYGDePGYsep5WP9/C40ufoG96X7KumU+PNz8t+WiReWPTqIlGRCqA0nahvNrMsoF2wFtmNiu06QJgtZmtAl4F+rj77lJFepxyvsmh5xu/4vLpPalVrRYLblnAmCvGULdZy+OvkeeNTaMELyLl3DHb5MtSNNvk3Z0XV7/Ib2f9lr0H9/KHjn9gcIfBVK9a/Ugh3cgkIgmgqDb5hLzjdeNXG+kzrQ+zN86mXbN2jO8+nlaNW32/oEaLFJEEl1BJPjeQy6gloxgybwhVkqow5vIx9EnvQ5JpiB4RqZwSJsmv3LaS3lN7s2LbCrqf3p0xl48htV5qvMMSEYmrhEjy8zbN49IXL6VRzUZMyphERlqGxnoXESFBknyH5h24r+N9DGg7gPo16sc7HBGRciMhkny1KtW4v9P9xy6o3jQiUslUniuSGhZYRCqhypPkIw0LLCKS4CpPkteYMyJSCSVEm3yx5I05ozZ5EalEKk+SB93hKiKVTuVprhERqYSU5EVEEpiSvIhIAlOSFxFJYEryIiIJTEleRCSBlauZocwsB/iiFLtoBHwZpXCiSXGVjOIqGcVVMokY1ynunhxpQ7lK8qVlZssKmwIrnhRXySiuklFcJVPZ4lJzjYhIAlOSFxFJYImW5MfFO4BCKK6SUVwlo7hKplLFlVBt8iIicrREq8mLiEgYJXkRkQRWoZK8mV1jZmvNLGBm6QW2DTaz9Wb2iZldVsjrW5jZ0lC5iWZ2QozinGhmq0KPz81sVSHlPjezj0LllsUilgLH+7OZbQ2L7fJCynUNncf1ZjaoDOJ62Mw+NrPVZvYvM/tBIeVifr6O9bubWfXQ+7s+9Fk6NRZxRDhuqpnNM7Os0N/AXRHKXGRme8Le3yFlFFuR74sFPRE6Z6vN7JwyiOnHYedhlZntNbMBBcqUyfkyswlmttPM1oSta2Bms83ss9DP+oW89qZQmc/M7KbjCsDdK8wDOAP4MTAfSA9bnwZ8CFQHWgAbgCoRXj8JuD70/CmgbxnEPBIYUsi2z4FGZXj+/gzcc4wyVULn74fACaHzmhbjuLoAVUPP/w/4v3icr+L87kA/4KnQ8+uBiWX03qUA54Se1wE+jRDbRcC0svo8Ffd9AS4HZgAGtAWWlnF8VYDtBG8YKvPzBVwAnAOsCVv3EDAo9HxQpM880ADYGPpZP/S8fkmPX6Fq8u6+zt0/ibCpB/BPdz/o7puA9UCb8AJmZkBn4NXQqueBq2IYbt4xrwX+EcvjRFkbYL27b3T374B/Ejy/MePub7t7bmhxCdAslscrQnF+9x4EPzsQ/CxdHHqfY8rdt7n7itDzfcA64ORYHzdKegAveNAS4AdmllKGx78Y2ODupbmb/ri5+3vA7gKrwz9HheWiy4DZ7r7b3b8CZgNdS3r8CpXki3AysCVsOZvv/wE0BP4blkwilYm2jsAOd/+skO0OvG1my80sM8ax5Lkj9C/zhEL+RSzOuYylXgRrfZHE+nwV53fPLxP6LO0h+NkqM6EmorOBpRE2tzOzD81shpm1KqOQjvW+xPszdT2FV7Ticb4Amrj7ttDz7UCkKeuict7K3fR/ZvYO0DTCpvvcfUpZx1OYYsZ5A0XX4ju4+1YzawzMNrOPQ9/6MYkLGAs8QPCP8gGCTUm9SnO8aMSVd77M7D4gF3i5kN1E/XxVNGZWG3gNGODuewtsXkGwSeLr0PWWN4AflUFY5fZ9CV13+zkwOMLmeJ2vo7i7m1nM+rKXuyTv7pccx8u2Aqlhy81C68LtIvhvYtVQDSxSmWI7VpxmVhX4BXBuEfvYGvq508z+RbC5oFR/HMU9f2Y2HpgWYVNxzmXU4zKzm4ErgYs91CAZYR9RP18FFOd3zyuTHXqP6xH8bMWcmVUjmOBfdvfXC24PT/ruPt3MnjSzRu4e08G4ivG+xOQzVUzdgBXuvqPghnidr5AdZpbi7ttCTVc7I5TZSvC6QZ5mBK9HlkiiNNe8CVwf6vnQguC38QfhBUKJYx6QEVp1ExDL/wwuAT529+xIG82slpnVyXtO8OLjmkhlo6VAO+jVhRzv38CPLNgT6QSC/+q+GeO4ugK/B37u7vsLKVMW56s4v/ubBD87EPwszS3sSymaQu3+zwLr3P3RQso0zbs+YGZtCP59x/QLqJjvy5vAr0O9bNoCe8KaKmKt0P+m43G+woR/jgrLRbOALmZWP9S02iW0rmRifWU5mg+CiSkbOAjsAGaFbbuPYM+IT4BuYeunAyeFnv+QYPJfD0wGqscw1r8DfQqsOwmYHhbLh6HHWoLNFrE+fy8CHwGrQx+ylIJxhZYvJ9h7Y0MZxbWeYNvjqtDjqYJxldX5ivS7A8MIfgEBnBj67KwPfZZ+GOvzEzpuB4LNbKvDztPlQJ+8zxlwR+jcfEjwAnb7Mogr4vtSIC4DxoTO6UeE9YyLcWy1CCbtemHryvx8EfyS2QYcCuWvWwlex5kDfAa8AzQIlU0Hngl7ba/QZ209cMvxHF/DGoiIJLBEaa4REZEIlORFRBKYkryISAJTkhcRSWBK8iIiCUxJXkQkgSnJi4gksP8HtwliY/xdrkEAAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "eval_data = list(get_data(50))\n", - "x_target_label = np.array([-10, 10, 0.1])\n", - "y_target_label = x_target_label * 2 + 3\n", - "x_eval_label,y_eval_label = zip(*eval_data)\n", - "\n", - "plt.scatter(x_eval_label, y_eval_label, color=\"red\", s=5)\n", - "plt.plot(x_target_label, y_target_label, color=\"green\")\n", - "plt.title(\"Eval data\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "mediterranean-pathology", - "metadata": {}, - "source": [ - "In the preceding figure, the green line indicates the objective function, and the red points indicate the verification data `eval_data`.\n", - "\n", - "### Defining the Data Argumentation Function\n", - "\n", - "Use the MindSpore data conversion function `GeneratorDataset` to convert the data type to that suitable for MindSpore training, and then use `batch` and `repeat` to perform data argumentation. The operation is described as follows:\n", - "\n", - "- `ds.GeneratorDataset`: converts the generated data into a MindSpore dataset and saves the x and y values of the generated data to arrays of `data` and `label`.\n", - "- `batch`: combines `batch_size` pieces of data into a batch.\n", - "- `repeat`: multiplies the number of datasets." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "distinct-grammar", - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore import dataset as ds\n", - "\n", - "def create_dataset(num_data, batch_size=16, repeat_size=1):\n", - " input_data = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data','label'])\n", - " input_data = input_data.batch(batch_size)\n", - " input_data = input_data.repeat(repeat_size)\n", - " return input_data" - ] - }, - { - "cell_type": "markdown", - "id": "approved-plasma", - "metadata": {}, - "source": [ - "Use the dataset argumentation function to generate training data and view the training data format." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "shared-spirituality", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The dataset size of ds_train: 100\n", - "dict_keys(['data', 'label'])\n", - "The x label value shape: (16, 1)\n", - "The y label value shape: (16, 1)\n" - ] - } - ], - "source": [ - "data_number = 1600\n", - "batch_number = 16\n", - "repeat_number = 1\n", - "\n", - "ds_train = create_dataset(data_number, batch_size=batch_number, repeat_size=repeat_number) \n", - "print(\"The dataset size of ds_train:\", ds_train.get_dataset_size())\n", - "dict_datasets = next(ds_train.create_dict_iterator())\n", - "\n", - "print(dict_datasets.keys())\n", - "print(\"The x label value shape:\", dict_datasets[\"data\"].shape)\n", - "print(\"The y label value shape:\", dict_datasets[\"label\"].shape)" - ] - }, - { - "cell_type": "markdown", - "id": "irish-strap", - "metadata": {}, - "source": [ - "Use the defined `create_dataset` to perform argumentation on the generated 1600 data records and set them into 100 datasets with the shape of 16 x 1.\n", - "\n", - "## Defining the Training Network\n", - "\n", - "In MindSpore, use `nn.Dense` to generate a linear function model of single data input and single data output.\n", - "\n", - "$$f(x)=wx+b\\tag{1}$$\n", - "\n", - "Use the Normal operator to randomly initialize the weights $w$ and $b$." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "deadly-secretariat", - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.common.initializer import Normal\n", - "from mindspore import nn\n", - "\n", - "class LinearNet(nn.Cell):\n", - " def __init__(self):\n", - " super(LinearNet, self).__init__()\n", - " self.fc = nn.Dense(1, 1, Normal(0.02), Normal(0.02))\n", - " \n", - " def construct(self, x):\n", - " x = self.fc(x)\n", - " return x" - ] - }, - { - "cell_type": "markdown", - "id": "better-planner", - "metadata": {}, - "source": [ - "Call the network to view the initialized model parameters." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "documented-january", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Parameter (name=fc.weight) [[-0.02573255]]\n", - "Parameter (name=fc.bias) [0.01332773]\n" - ] - } - ], - "source": [ - "net = LinearNet()\n", - "model_params = net.trainable_params()\n", - "for param in model_params:\n", - " print(param, param.asnumpy())" - ] - }, - { - "cell_type": "markdown", - "id": "pleasant-bracelet", - "metadata": {}, - "source": [ - "After initializing the network model, visualize the initialized network function and training dataset to understand the model function before fitting." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "twenty-smart", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYQAAAD8CAYAAAB3u9PLAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAqfElEQVR4nO3deZyNdf/H8dfnzFjKvgzJkhYUiZhkSShF7rq1U93dLe5bulMkrQppd5f2FOmnuitKSgtqQipKyBLJLpEtW0qWmfP9/XHOnDmmM5uzz7yfj8d5zLmu63uu6zPXnDmfc323y5xziIiIeOIdgIiIJAYlBBERAZQQRETETwlBREQAJQQREfFTQhARESACCcHM6prZDDP7wcyWmlk///qhZrbRzBb6H93CD1dERKLFwh2HYGa1gFrOue/MrAIwH7gQuBz43Tn3eNhRiohI1KWGuwPn3CZgk//5HjNbBtQOd78iIhJbYV8hHLIzs/rAF8DJwADgWuA3YB5wm3NuZ4jX9AZ6A5QrV67liSeeGLF4RERKgvnz5//qnEsLdz8RSwhmVh6YCTzknJtoZjWBXwEHPICvWun6/PaRnp7u5s2bF5F4RERKCjOb75xLD3c/EellZGalgHeBN5xzEwGcc1ucc1nOOS8wGmgViWOJiEh0RKKXkQFjgGXOuRFB62sFFbsIWBLusUREJHrCblQG2gFXA9+b2UL/unuAK8ysOb4qo3XADRE4loiIREkkehl9BViITZPD3beIiMSORiqLiAighCAiIn5KCCIiAighiIgkrZ1/7qT+U/Ujtj8lBBGRROb1wpYtEDSI2DlHz3d6UHV4VX7a/VPEDqWEICKSqLxe6NQJ6tSBjh3B62XswrF4hnkY/8PbAAxZWy9ih4vEOAQREYmGbdtg9mzIzOSHFbNo8kBKYNOpm+Cbl6G0/cLDEfos1xWCiEiiqlGDP844nXq3QpM+WYHVa29Zw3fLzqS0pULbthyEzEgcTlcIIiIJqu+Um3m+46zA8ns93uPCEy/0LcyY4buCqFEDPJH5bq+EICKSYCb9OIkLx18YWO57Wl+e7fbsoYU8HqhZM6LHVUIQEUkQ63at49injw0s161Yl2U3LaNc6XIxOb4SgohIvHi9sG0bB6pVpvWYNizYvCCwacmNS2hSo0lMw1FCEBGJB3+X0vtTvmJoB29g9f91/z+ubX5tXEJSQhARiYMZC9/jrLO+CCz3OL47b131Hr5bzMSHEoKISAxt+X0LRz1xVGC5TCZsmt2GKoPfgzgmA4jMHdPqmtkMM/vBzJaaWT//+qpmlmFmK/0/q4QfrohIcsryZtH1f10PSQbfXD+bfX03U+WzWXFPBhCZgWmZwG3OucZAa+AmM2sM3AVMc841AKb5l0VESpznvn2O1AdS+WT1JwCMOHcEbojj9LptfF1HEyAZQGTumLYJ2OR/vsfMlgG1ge5AR3+xV4HPgTvDPZ6ISMLz9x6an/kz6S+fFljdqX4nPr36U1I9iVlbH9GozKw+cCowB6jpTxYAm4HIjqAQEUlEXi+7O7enbuvZ7CmTs/qXAb9Qq0Kt+MVVCBGby8jMygPvAv2dc78Fb3POOcDl8breZjbPzOZt27YtUuGIiMScc45/jLucyh1ykkHG+W/jhriETwYQoYRgZqXwJYM3nHMT/au3mFkt//ZawNZQr3XOjXLOpTvn0tPS0iIRjohIzL2x+A08wzy8sfJdAO75ynDTz6Rzi0vjHFnhhV1lZL5Os2OAZc65EUGbPgCuAR71/5wU7rFERGLO3x5AjRohG39//PVHTnr+pMBy0xpNmdtrDmV2/pbnaxJVJNoQ2gFXA9+b2UL/unvwJYK3zawX8BNweQSOJSISO9k3qJk9G9q29c0w6p9ZdO/BvZz8wsms3bU2UHzVzas4vurxvoWaR8Qj4rBEopfRV0BeKfDscPcvIhI3QTeoYfZs33LNmtw69VaemvNUoNiEyyZwSeNL4hdnhCRm3ycRkURQo4bvysB/hfDRrm+54MW/Bzbf0PIGRv5tZFynm4gkJQQRkbyYwYwZrF+zgGPeSIdxvrmHaparyapbVlG+dPk4BxhZSggiInk4mHWQM/7vDL7d+G1g3aI+izil5ilxjCp6dE9lERHwNSBv2QLON2TqoS8eovSDpQPJYPT5o3BDXLFNBqArBBGRQ3oTfXleE85suSiw6aKt1Zgwahee6f+DGb0idv/iRKSEICKybRvbFs6ixr1ZgC8ZeMzDln9+T/UGzSAz65BeRsVV8U11IiKF4HVezp/WixoDsgLrZl33FVmDs6h+zEm+Xkapqb6fNWrEMdLo0xWCiJRYL857kRs/vjGw/Ojpg7izywM5o4v9vYzyG6lcnCghiEiJs2DTAlqMahFYbl+vPdOvmZ4zLXXwdBUeT7GuJgqmhCAiJcZv+3+j/lP12blvZ2Ddz7f+TJ2KdXIK5TNdRXFXMn5LESnRnHNcN+k6Kj1aKZAMplw1BTfEHZoMIPR0FSWEEoKIJKdc4wbyMm7JODzDPIxdOBaA29vejhvi6HpC19AvyJ6uooQ0JAdTlZGIJJ9CVOus3L6Shs81DCw3qtaIhX0WUja1bP77LmENycGUEEQk+eQxCynAnwf/pNmLzVi5Y2Wg+Iq+K2hQrUHh91+CGpKDqcpIRJJPHtU6d2TcwZEPHxlIBuMuGYcb4oqWDEowXSGISPLJVa0zdfUnnPfGeYHN1zW/jjF/H1NspqWOlYgkBDN7BTgf2OqcO9m/bijwbyC7if4e59zkSBxPRASPh41HZFJnWE5FR9UjqrK231oqlqkYx8CSV6SqjMYCoZrsn3TONfc/lAxEJCIyvZmc8coZ1Hkyp8vod72/Y/vAbVTc9WeBPY8ktIgkBOfcF8COSOxLRCQ/w2cNp9QDpZj18ywARv5tJG6I49SazXw9j+rUgY4dfT2RpEii3YbQ18z+CcwDbnPO7cxdwMx6A70B6tWrF+VwRCRZzVo/izP+74zA8vkNz2dSz0l4zP+9Np+eR1I40exlNBI4HmgObAKeCFXIOTfKOZfunEtPS0uLYjgikoy2792O537PIclg68CtfHjFhznJAEr0gLJIidoVgnNuS/ZzMxsNfBStY4lI8eN1Xi55+xLe//H9wLqZ187kzGPODP2CEjygLFKidoVgZrWCFi8ClkTrWCJSvIyeP5qUYSmBZPDQWQ/hhri8k0E2jwfS0mDrVjUsH4ZIdTt9C+gIVDezDcAQoKOZNQccsA64IRLHEpHia/GWxTR7sVlg+fTap/PldV9SKqVU4XZQgmcqjYSIJATn3BUhVo+JxL5FpPjbs38PJzx7Alv/2BpY91P/n6hXqYgdTdSwHBalThGJG+ccvT/sTcVHKwaSwYeLmuDuyyp6MgA1LIdJU1eISFxM+GECl71zWWC5/xzjySkOUpcf/jd752DcOF+Dcs2aalguIiUEEYmp1TtWc8KzJwSWj69yPN/3WcwR55wHqbMP/5t9qPYDJYQiUUIQkZjYn7mflqNasnTb0sC6ZTct48TqJ/oWwu0yqvaDsKkNQUSibtC0QZR9qGwgGbx+0eu4IS4nGUDOPQgO91u92g/CpisEEYmajNUZnPu/cwPL/zjlH7x24WvRmZZaA9PCpoQgIhH3y55fqD2idmC5QukKrL91PZXLVo7ugUvonc4iRQlBRCIm05vJOa+fw+frPg+sm/vvuaQfnR6/oKTQ1IYgIhEx4usRlHqgVCAZPNP1GdwQp2SQRHSFICJhmbNhDq3HtA4sdzm+Cx9f+TEpnpQ4RiWHQwlBRA7Ljj93UOuJWhzIOhBYt/m2zdQsrzr8ZKUqIxEpEuccl79zOdWGVwskg+n/nI4b4pQMkpyuEESk0MYuHMt1k64LLA/tMJQhHYfEMSKJJCUEESnQ0q1LOXnkyYHllrVaMrvXbEqnlI5jVBJpSggikqc/DvxBo+casXHPxsC6tbesoX6VY+MYlURLRNoQzOwVM9tqZkuC1lU1swwzW+n/WSUSxxKR2Og7uS/lHykfSAbvLW6MezCV+hde65tIToqdSDUqjwW65lp3FzDNOdcAmOZfFpEE9/6P72P3G8/PfR6Avqf1xfXZzIUfrDh04jgpdiJ1x7QvzKx+rtXd8d1WE+BV4HPgzkgcT0Qib+3OtRz3zHGB5boV67LspmWUK13Od5+Btm1zppbWxHHFUjTbEGo65zb5n28GQvZHM7PeQG+AevUO4w5JIhKWA1kHaP1yaxZsXhBYt+TGJTSp0SSnkCaOKxFiMg7BOecAl8e2Uc65dOdcelpaWizCERGvF7ZsYciMwZR5sEwgGYztPhY3xB2aDLKFOz21JLxoXiFsMbNazrlNZlYL2FrgK0Qk+rxeZlx0Kme1WBxY1aNJD9665K3oTEstSSOaCeED4BrgUf/PSVE8logUwubfN1PriVrQwrdc9iD80vtHqtRrFN/AJCFEqtvpW8DXQCMz22BmvfAlgnPMbCXQ2b8sInGQ5c3i3NfP9SUDv29eSeHPL8+kSu0TYMsWX8OxlGiR6mV0RR6bzo7E/kXk8D0751lumXpLYHnEuSO49fR+0GcbVK8OZ5116I3pPZrirKTSSGWRYmreL/M4bfRpgeWzjj2LT//xac601DVr+q4MdGN68VNCEClmdu3bRZ0Rdfjj4B+Bdb8M+IVaFWr9tXD2jek1vkBQQhBJfF5vofr/O+e4auJVvLXkrcC6jKsz6Hxc57z3rfEFEkSVhSKJzOuFTp2gTh3o2DHPOYReX/Q6nmGeQDIY1H4QbojLPxlk0/gC8dMVgkgi27Yt3zr+ZduW0fiFxoHlpjWaMvffcymTWiYe0UqSU0IQSWR51PHvPbiXk184mbW71gaKrrp5FcdXPT5ekUoxoIQgkmhytxnkquPvP7U/T895OlB8wmUTuKTxJXEMWIoLtSGIxJp/HqGQA8FCtRn46/g/Wvkxdr8FksENLW/AO9irZCARoysEkVjK/sDPayBYiDaD9WX3c8xTxwSK1CxXk1W3rKJ86fJx+AWkONMVgkgshWokDpbdZpCaysF2rWn1wQWHJIPFfRazeeBmJQOJCiUEkVgK+sAPORDM32bw4KTbKN3pK+b+MheA0ReMxg1xNK3ZNA5BS0mhKiORWCpgINgXP31Bh7EdAssXn3Qx71z2Dh7TdzeJPiUEkVjLHggWZOsfW6n5eM66FEthy8AtVDuyWqyjkxJMCUEkjrzOy/lvns+UVVMC62ZdP4u2ddvGMSopqZQQROJk5NyR/GfyfwLLj3V+jDva3RHHiKSki3pCMLN1wB4gC8h0zqVH+5giiWzBpgW0GNUisNy+XnumXzOdVI++n0l8xeod2Mk592uMjiUSe4WYkXT3vt0c89Qx7N6/O7Buw60bqF2xdqyiFMmXui6IhKuAGUmdc1z7/rVUfqxyIBlM/dtbuMFeJQNJKLFICA741Mzmm1nv3BvNrLeZzTOzedtyD9IRSQb5DDYbt2QcnmEeXl30KgB3tLkdN/1MurS5Ot/prEXiIRZVRmc45zaaWQ0gw8x+dM59kb3ROTcKGAWQnp6uu3xL8gkxI+mK7Sto9FyjQJFG1RqxsM9Cym7fDbPr6JaVkpCinhCccxv9P7ea2XtAK+CL/F8lkkSCBpv9WaUCzZ5rxModKwObV/RdQYNqDXwLNcrolpWSsKJaZWRm5cysQvZz4FxgSTSPKRIXHg+3L3qcIx8pF0gG4y4ZhxvicpIB5CSPDRvg8891lzJJKNG+QqgJvGe+N30q8KZzbmqUjykSU1NWTqHbm90Cy9c3v56X//4ylteHfYiRyiKJIKoJwTm3BmgWzWOIxMuG3zZQ98m6geVqR1RjTb81VCxTMY5RiRw+jYQRKaKDWQfpMLYDX2/4OrBuwQ0LaH5U85yb3+QzHkEkUWkcgkgRPPbVY5R+sHQgGYz820jcEJeTDPIZjyCS6HSFIFII63evP+RGNRc0vID3e75/6LTUocYjqK1AkogSgkg+9h7cy/BZwxk+azgAHvOw+bbNpJVL+2vhEOMRRJKJEoJICM45xi8dzx0Zd/Dzbz/To0kPHuv8GMdUPibvFxVw8xuRRKeEIAKHTE43f9N39Jvaj1k/z+LUo07ljYvfoP0x7Qu3H3UplSSmhCDibwzevGgWg3pU5/+O3kpauTRevuBlrm1+LSmelPxfqysCKSbUy0hKvP2bN/Bf71c0/E8Wr9fcwm3N+rCi7wp6tehVcDJQryIpRpQQpMRyzvHh8g85+d2zuaOzlw4/GUu+Tee/3Z+nUtlKBe8gn1lORZKRqoykRFq6dSm3fnIrGWsyOKn6SUy9cjJdKrYoWtWPehVJMaOEICXKjj93MPTzobww9wUqlKnA012f5sb0GymVUqroO1OvIilmlBCkRMj0ZvLSvJcY/Plgdu3bxQ0tb2BYp2FUP7J6eDtWryIpRpQQpNibtmYa/ab2Y+m2pXSq34mnuz5N05pN4x2WSMJRQpDkVIjunqt3rGZgxkDe//F9jq18LBMvm8CFVdthNfSNXiQU9TKS5FNAd889+/dw12d30fiFxmSszuCRsx/hhxuXcNGNz2B166qLqEgeop4QzKyrmS03s1Vmdle0jyclQB7dPb3Oy9iFY2n4XEMem/UYV5x8BStuXsFdZ9xF2Z171EVUpABRrTIysxTgeeAcYAMw18w+cM79EKr84sW+L30eT/4Ps4LLFKdyyRBjUcqFLUR3z9k/z6bf1H7M+2Uereu0ZlLPSbSq3Srf14jIocw5F72dm7UBhjrnuviX7wZwzj0SqnxaWrrr3n0eXi8FPpwruEyilJNDFSZ5FFzG4XFZZFXcxK+n3sWe+m+S+ufRHPX9cKpuvIIUjyfEPh2erIN4ypTyrcs8gKdsaTweK9Sxv/kGfvopsufC44HUVN8jJSXneX7rQq0vbLlorCtKPLm3BT/3eCJ7bksSM5vvnEsPdz/RblSuDfwctLwBOD24gJn1BnoD1KtXj5dfjnJEcZBfEilsgkmU5BbLcvmVPeD2sbTS4/xQ9VEcWZy07V4abr0TT5nyeOvntT/DudJ4vQ7vgkVk7v4db8VKeBufjNdrBR430skA/L/LAd9DJJuZL1nml1iDH5ES915GzrlRwCiA9PT06F2uxFHwH1fC45xjwg8TGJgxkPW713Np40v57zn/pX7l+oXfyZatUOc0IBP2psL7G+IyliA76WRm5jyysg5dzmtdqPWFLRer4+RVLve27OdRrKxIOs7lnJ/9+2N33GgnhI1A3aDlOv51IkW2YNMC+k3tx5frv6RZzWa8duFrdKjfoeg7SpD2hOAvCmXKxCUESVBery9R5pdUgx9NIzSsJtoJYS7QwMyOxZcIegJXRvmYUsxs/WMr906/l5e/e5lqR1bjpfNfotepBcxEmh9NOSEJLrvdqtRhzKgSjqgmBOdcppn1BT4BUoBXnHNLo3lMKT4OZB3guW+f4/6Z97P34F76t+7P4A6DqVy2cvg793g05YRILlFvQ3DOTQYmR/s4Eideb+hv2nmtL8S+XFoak1dNYcCnA1ixfQXdGnRjxLkjaFS9UXR+BxEB0EhlCYPXG3rEcF7rC7GvZc1q061/dc5/63wM4+MrP+bjnh/SKKuyWh1FokwJQQ5fXjeIOYwbx+zcsJL+5b7klN5ZfF1uJyPa3M/iGxfT7fiuuiuZSIwoIcjhy+6tk5p6aG+dvNaHkOXN4sV5L9JgXDueaeXotdBYObc1t55zH6VTSuuuZCIxFPdxCJLE8uqtU8hePDPWzqD/J/1ZvGUxHY7pwFPnjqB5Su1DX5MgXURFSgIlBAlPXr118unFs3bnWgZmDGTisokcU+kYJlw2gYtPuhgLlTjURVQkZpQQJGZ+P/A7j3z5CE98/QQpnhQe7PQgA9oM4IhSR+QUCtU7SV1ERWJCbQhSdF4vbNmSM/dC9vO8ijsvry16jYbPNuThrx7m8iaXs6LvCgadOeivyUANyCJxoysEKZrsD+3Zs6FNG9+3+Oz6/Rkz/jJl5ZwNc7hl6i18u/FbWtVuxcQeE2ldp3XofYdqQNaVgUjMKCFI0eT+0DYL+QG+cdfP3D1lAK+vmECt8rV49cJX+ccp/8Bj+VyUqgFZJK6UEKRogj+0c18h1KjBvsx9jJj9BA9nDOYgXu7eUJe7X/ieCkdUKnjfakAWiSslBCma3B/azgWmm5i4bCIDMwaybtc6Ll5p/PdTOG7PJnh0HxQmIYAakEXiSI3KUnTZH9r+W4stcps56/WzufSdS6lQugLTvmvKu+Mdx+0yVf2IJBFdIchh2/bHNu6bcR+jvxtNlbJVeKHbC/y7zt9JvaW+r4DHA+PHq+pHJEkoIUiRHcw6yPNzn2fo50P5/cDv3NzqZoZ0GEKVI6r4qpCCG4ZV/SOSNJQQpEimrPRNS/3jrz/S5fgujOgygsZpjXMKqGFYJGlFrQ3BzIaa2UYzW+h/dIvWsST6lv+6nL+9+Te6vdmNLG8WH13xEVOumnJoMsgW3MYgIkkj2lcITzrnHo/yMSSKdu3bxQMzH+CZb5/hyFJH8vg5j3Pz6Tf7ZiIVkWJFVUYSUpY3izELxnDv9Hv5de+v/KvFv3jwrAepUU49hkSKq2h3O+1rZovN7BUzqxKqgJn1NrN5ZjZvm+a6j6xCzDMUysx1M2k5qiU3fHQDJ1Y/kfm95zPqglFKBiLFXFgJwcw+M7MlIR7dgZHA8UBzYBPwRKh9OOdGOefSnXPpaWlp4YQjwQ5jorh1u9Zx+TuX0/HVjuzct5Pxl45n5rUzObXWqVEPV0TiL6wqI+dc58KUM7PRwEfhHEuKqAgTxf1x4A8e/epRHv/6cQzj/o73M7DtQI4sdWSMgxaReIpaG4KZ1XLObfIvXgQsidaxxC/4XgKFmCjOOceb37/JnZ/dycY9G7my6ZU8evaj1K1UNw7Bi0i8RbNRebiZNQccsA64IYrHKtmy2wp69ICvv86Zijqf8QBzN86l39R+fL3ha1rWasn4S8fTrl67OP0CIpIIopYQnHNXR2vfEiT4/gSZmb51wVVEuaqJNu3ZxN3T7ubVRa9Ss1xNXvn7K1zT/Jr8p6UWkRJB3U6TXXBbgX+yuVBVRPsy9/HUN0/x0JcPcSDrAHe2u5N72t9DxTIV4xS4iCQaJYRkl7utYPz4Q0YJO+eYtHwSt316G2t2rqF7o+48fu7jnFD1hDgHLiKJRgkh2eUzd9D3W76n/yf9mb52Ok3SmpBxdQadjytUxzARKYGUEIqDXDeV2b53O4NnDObF+S9SqUwlnjvvOW5Iv4FUj/7cIpI3fUIUIwezDjJy3kiGfj6U3/b/xn/S/8PQjkOpdmS1eIcmIklACaGY+HT1p/Sf2p9lvy6j83GdearLUzSp0STeYYlIElFCSHIrt6/ktk9v48MVH3J8leOZ1HMSFzS8ANPU0yJSREoISWr3vt08+MWDPD3nacqmlmV45+HccvotlEktE+/QRCRJKSEkmSxvFmMXjuWe6few7Y9tXNf8Oh46+yGOKn9UvEMTkSSnhJBEvlr/Ff2m9uO7Td/Rtm5bPr7yY9KPTo93WCJSTCghJIH1u9dzR8YdjF86njoV6/DmxW/S8+SeaicQkYhSQoiX4JlJ8/hg33twL8NnDWf4rOE4HEM6DOH2trdTrnS5GAcrIiWBEkKs5TUzqSdncjnnHOOXjuf2jNvZ8NsGejTpwfBzhlOvUr04Bi4ixZ0SQiwVNDMpMP+X+fSb2o9ZP8/i1D3leXNiCu3rboKL68QxcBEpCTTncSzlnpk0JSUwM+nm3zfTa1IvTht9Git3rOTlDiOY+8yftF+blZM0RESiKNx7Kl9mZkvNzGtm6bm23W1mq8xsuZl1CS/MYiJ7ZtLUVGjfHjZsYP9nnzB89n9p+GxDXl/8Ore1uY0VfVfQq0N/Utq085XN445nIiKRFG6V0RLgYuCl4JVm1hjoCTQBjgY+M7OGzrmsMI+X3IJmJnVpaXy48iMGvD2A1TtXc0HDC3ji3CdoUK1BTvl87ngmIhJpYSUE59wyIFT3x+7AOOfcfmCtma0CWgFfh3O8YsHjYan9yq1vXE3GmgxOqn4SU6+aSpcTQlxE5ZrFVEQkmqLVqFwb+CZoeYN/3V+YWW+gN0C9esW7F82OP3cwZMYQRs4bSYUyFXim6zP0Se9DqZRS8Q5NRKTghGBmnwGh5kUY5JybFG4AzrlRwCiA9PR0F+7+ElGmN5OX5r3E4M8Hs2vfLvq07MP9ne6n+pHV4x2aiEhAgQnBOXc4t9jaCNQNWq7jX1fifLbmM/pP7c/SbUs569izeKrLUzSt2TTeYYmI/EW0up1+APQ0szJmdizQAPg2SsdKSKt3rObCcRdyzuvnsPfgXt7r8R6fXf2ZkoGIJKyw2hDM7CLgWSAN+NjMFjrnujjnlprZ28APQCZwU0npYbRn/x4e+vIhnvzmSUpZKo+c9TD929xK2dSy8Q5NRCRf5lziVNunp6e7efPmxTuMw+J1Xl5b9Bp3T7ubzb9v5ppNNXl4/K8cfUq7v0xNISISSWY23zkX9tTH+pSKgNk/z+b0l0/nuknXUb9yfeZcNJmxY7Zz9C6NMhaR5KGEEIYNv23gqolX0e6Vdvyy5xf+d9H/mH39bFo17ZozIlmjjEUkSWhyu8Ow9+BeHp/9OI/Negyv83Jv+3u584w7KV+6fE4hjTIWkSSjhFAEzjne+eEdbs+4nfW713NZ48sYfs5w6leu/9fCGmUsIklGCaGQFmxaQL+p/fhy/Zc0q9mM1y58jQ71O8Q7LBGRiFFCKMDWP7YyaNogxiwYQ7WyVXjpby/Sq8W/SPGkxDs0EZGIUkLIw4GsAzw751mGfTGMvQf3cuv6o7lv/GYqT34TZvw73uGJiEScEkIuzjk+XvkxAz4ZwModK+nWoBsjWt5Do5M7QmbWX+5wJiJSXKjbaZBl25Zx3hvnccFbF+AxD5OvnMzHV35Mo4Zt1Y1URIo9XSEAO//cyf0z7+e5b5+jfOnyPNnlSW467aacaamDbmyjbqQiUlyV6ISQ6c1k9PzR3DfjPnbu28m/W/ybBzo9QFq5tL8WVjdSESnmSmxCmL52Ov2n9uf7rd/T4ZgOPN31aZod1SzeYYmIxE2JSwhrdq7h9ozbmbhsIvUr12fCZRO4+KSLQ90GVESkRCkxCWHP/j088tUjjPh6BCmeFB7s9CAD2gzgiFJHxDs0EZGEUHwTgtcL27bhTavO/75/g7s+u4tNv2/i6lOu5pGzH6F2xZC3eBYRKbHCvUHOZcBQ4CSglXNunn99fWAZsNxf9BvnXJ9wjlUkXi906sQ3P82i38VH8m2lPbSq3YqJPSbSuk7rgl+r3kQiUgKFOw5hCXAx8EWIbaudc839j9glA2Dj2sVcnfYlba7L4mfPHl7t9Axf9/q6cMmgUyeoUwc6dvQti4iUEGElBOfcMufc8oJLxsafB//koS8eouH4drzTGO75yljxXTv+2b4vHivEr7ptm28kcmambmwjIiVONEcqH2tmC8xsppm1j8gevV7YsgVy3fbTOceEHybQ+IXG3DvjXrqe0JUf+q3gof9tovy0Lwuu+sneb1qaRiSLSIlVYBuCmX0GHBVi0yDn3KQ8XrYJqOec225mLYH3zayJc+63EPvvDfQGqFevXt6BZFfnzJ7t+7D236d40eZF9Jvaj5k/zaRpjaZM/+d0Oh3bqaBfK+/9TpsG27erDUFESpwCE4JzrnNRd+qc2w/s9z+fb2argYbAvBBlRwGjANLT013u7QG5qnO2rV/GfUueZfR3o6lStgojuz3Pv+p0J/Woo4sWbO5qou3bNSJZREqkqFQZmVmamaX4nx8HNADWhLXTGjWgbVsOlE7hyR71aDCuHWMWjOHmVjez8qbl9Bk4ntR69YveGOzfr6qJRKSkC7fb6UXAs0Aa8LGZLXTOdQHOBIaZ2UHAC/Rxzu0IK1Izpoy+g1un9Gf5rlV0qdOFJ7s8yUlpJ/nq/3M3Bhf2W74mrhMRAcJMCM6594D3Qqx/F3g3nH0HW/7rcgZ8OoDJKyfToGoDPrriI7o16JYz3UT2t/zsdoCifsvXxHUiIok9UnnXvl0MmzmMZ799liNLHcnj5zzOzaffTOmU0ocW1Ld8EZGwJWRCyPJmMWbBGAZNH8T2vdv5V4t/8eBZD1KjXD7f/PUtX0QkLAmXEGaum0m/qf1YtGUR7eu15+muT3NqrVPjHZaISLGXUAlhzc41dHy1I/Uq1WP8peO5rPFlmpZaRCRGEioh7Nq3i2EdhzGw7UBNSy0iEmPmXN5jwWLtlFNPcYsXLI53GCIiScXM5jvn0sPdTzTnMiqyv/QeyksecxqJiMjhS6iEUCiaolpEJCqSLyFoimoRkahIvoSguYdERKIioXoZFYpGJYuIREXyJQTQqGQRkShIviojERGJCiUEEREBlBBERMRPCUFERIAwE4KZ/dfMfjSzxWb2nplVDtp2t5mtMrPlZtYl7EhFRCSqwr1CyABOds6dAqwA7gYws8ZAT6AJ0BV4IfseyyIikpjCSgjOuU+dc5n+xW+AOv7n3YFxzrn9zrm1wCqgVTjHEhGR6IrkOITrgfH+57XxJYhsG/zr/sLMegO9/Yv7zWxJBGOKlurAr/EOohAUZ2QpzshJhhgheeJsFImdFJgQzOwz4KgQmwY55yb5ywwCMoE3ihqAc24UMMq/n3mRmMI12hRnZCnOyEqGOJMhRkiuOCOxnwITgnOucwGBXAucD5ztcm6usBGoG1Ssjn+diIgkqHB7GXUF7gD+7pzbG7TpA6CnmZUxs2OBBsC34RxLRESiK9w2hOeAMkCG/97H3zjn+jjnlprZ28AP+KqSbnLOZRVif6PCjCdWFGdkKc7ISoY4kyFGKGFxJtQtNEVEJH40UllERAAlBBER8Yt5QjCzy8xsqZl5zSw917YCp7sws2PNbI6/3HgzKx2DmMeb2UL/Y52ZLcyj3Doz+95fLiLdwIrCzIaa2cagWLvlUa6r/xyvMrO74hBnnlOe5CoX8/NZ0Lnxd5QY798+x8zqxyKuXDHUNbMZZvaD/3+pX4gyHc1sd9B7YXCs4/THke/f0Hye8Z/PxWbWIg4xNgo6TwvN7Dcz65+rTFzOp5m9YmZbg8dnmVlVM8sws5X+n1XyeO01/jIrzeyaQh3QORfTB3ASvkEUnwPpQesbA4vwNVIfC6wGUkK8/m2gp//5i8CNMY7/CWBwHtvWAdVjfU6Djj8UGFhAmRT/uT0OKO0/541jHOe5QKr/+WPAY4lwPgtzboD/AC/6n/cExsfh71wLaOF/XgHftDG54+wIfBTr2Ir6NwS6AVMAA1oDc+IcbwqwGTgmEc4ncCbQAlgStG44cJf/+V2h/n+AqsAa/88q/udVCjpezK8QnHPLnHPLQ2wqcLoL83VlOguY4F/1KnBhFMM9hP/4lwNvxeqYUdAKWOWcW+OcOwCMw3fuY8blPeVJvBXm3HTH974D3/vwbP/7Imacc5ucc9/5n+8BlpHHTABJoDvwmvP5BqhsZrXiGM/ZwGrn3E9xjCHAOfcFsCPX6uD3YF6fgV2ADOfcDufcTnzzznUt6HiJ1IZQG/g5aDnUdBfVgF1BHyZ5TokRJe2BLc65lXlsd8CnZjbfPyVHPPT1X3q/kselZGHOcyxdj+8bYiixPp+FOTeBMv734W5878u48FdZnQrMCbG5jZktMrMpZtYktpEFFPQ3TLT3Y0/y/sKXCOcToKZzbpP/+WYg1P2ED+u8RuWeylaI6S4STSFjvoL8rw7OcM5tNLMa+MZm/OjP8DGJExgJPIDvn/ABfNVb10fy+IVVmPNpBU95EvXzmczMrDzwLtDfOfdbrs3f4av2+N3flvQ+vgGisZY0f0N/e+Tf8c/anEuinM9DOOecmUVs7EBUEoIrYLqLPBRmuovt+C4pU/3fziI2JUZBMZtZKnAx0DKffWz0/9xqZu/hq4KI6Ju/sOfWzEYDH4XYFJNpRQpxPq/lr1Oe5N5H1M9nLoU5N9llNvjfE5XwvS9jysxK4UsGbzjnJubeHpwgnHOTzewFM6vunIvpRG2F+Bsm0jQ35wHfOee25N6QKOfTb4uZ1XLObfJXr20NUWYjvnaPbHXwtdvmK5GqjAqc7sL/wTEDuNS/6hogVlccnYEfnXMbQm00s3JmViH7Ob6G05jO3Jqr7vWiPI4/F2hgvt5apfFdIn8Qi/iyWd5TngSXicf5LMy5+QDf+w5878PpeSW0aPG3WYwBljnnRuRR5qjstg0za4Xvfz2miauQf8MPgH/6exu1BnYHVYfEWp41AIlwPoMEvwfz+gz8BDjXzKr4q47P9a/LXxxazS/CV5+1H9gCfBK0bRC+Xh7LgfOC1k8GjvY/Pw5folgFvAOUiVHcY4E+udYdDUwOimuR/7EUX9VIrM/t68D3wGL/m6ZW7jj9y93w9UxZHac4V+Gr31zof7yYO854nc9Q5wYYhi95AZT1v+9W+d+Hx8Xh/J2Br1pwcdA57Ab0yX6PAn39520Rvob7tnGIM+TfMFecBjzvP9/fE9TzMMaxlsP3AV8paF3czye+BLUJOOj/3OyFr81qGrAS+Ayo6i+bDrwc9Nrr/e/TVcB1hTmepq4QEREgsaqMREQkjpQQREQEUEIQERE/JQQREQGUEERExE8JQUREACUEERHx+3/E90TicLYoGQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "from mindspore import Tensor\n", - "\n", - "x_model_label = np.array([-10, 10, 0.1])\n", - "y_model_label = (x_model_label * Tensor(model_params[0]).asnumpy()[0][0] +\n", - " Tensor(model_params[1]).asnumpy()[0])\n", - "plt.axis([-10, 10, -20, 25])\n", - "plt.scatter(x_eval_label, y_eval_label, color=\"red\", s=5)\n", - "plt.plot(x_model_label, y_model_label, color=\"blue\")\n", - "plt.plot(x_target_label, y_target_label, color=\"green\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "supposed-truck", - "metadata": {}, - "source": [ - "As shown in the preceding figure, the initialized model function in blue differs greatly from the objective function in green.\n", - "\n", - "## Defining and Associating the Forward and Backward Propagation Networks\n", - "\n", - "Define the loss function of the model. The mean squared error (MSE) method is used to determine the fitting effect. The smaller the MSE value difference, the better the fitting effect. The loss function formula is as follows:\n", - "\n", - "$$J(w)=\\frac{1}{2m}\\sum_{i=1}^m(h(x_i)-y^{(i)})^2\\tag{2}$$\n", - "\n", - "Assuming that the $i$th data record in the training data is $(x_i,y^{(i)})$, parameters in formula 2 are described as follows:\n", - "\n", - "- $J(w)$ specifies the loss value.\n", - "\n", - "- $m$ specifies the amount of sample data. In this example, the value of $m$ is `batch_number`.\n", - "\n", - "- $h(x_i)$ is a predicted value obtained after the $x_i$ value of the $i$th data record is substituted into the model network (formula 1).\n", - "\n", - "- $y^{(i)}$ is the $y^{(i)}$ value (label value) of the $i$th data record.\n", - "\n", - "### Defining the Forward Propagation Network\n", - "\n", - "A forward propagation network consists of two parts:\n", - "\n", - "1. Bring parameters into the model network to obtain the predicted value.\n", - "2. Use the predicted value and training data to compute the loss value.\n", - "\n", - "The following method is used in MindSpore:" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "irish-creativity", - "metadata": {}, - "outputs": [], - "source": [ - "net = LinearNet()\n", - "net_loss = nn.loss.MSELoss()" - ] - }, - { - "cell_type": "markdown", - "id": "german-performance", - "metadata": {}, - "source": [ - "### Defining the Backward Propagation Network\n", - "\n", - "The objective of the backward propagation network is to continuously change the weight value to obtain the minimum loss value. Generally, the weight update formula is used in the linear network:\n", - "\n", - "$$w_{t}=w_{t-1}-\\alpha\\frac{\\partial{J(w_{t-1})}}{\\partial{w}}\\tag{3}$$\n", - "\n", - "Parameters in formula 3 are described as follows:\n", - "\n", - "- $w_{t}$ indicates the weight after training steps.\n", - "- $w_{t-1}$ indicates the weight before training steps.\n", - "- $\\alpha$ indicates the learning rate.\n", - "- $\\frac{\\partial{J(w_{t-1}\\ )}}{\\partial{w}}$ is the differentiation of the loss function to the weight $w_{t-1}$.\n", - "\n", - "After all weight values in the function are updated, transfer the values to the model function. This process is the backward propagation. To implement this process, the optimizer function in MindSpore is required." - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "understood-organic", - "metadata": {}, - "outputs": [], - "source": [ - "opt = nn.Momentum(net.trainable_params(), learning_rate=0.005, momentum=0.9)" - ] - }, - { - "cell_type": "markdown", - "id": "confident-matthew", - "metadata": {}, - "source": [ - "### Associating the Forward and Backward Propagation Networks\n", - "\n", - "After forward propagation and backward propagation are defined, call the `Model` function in MindSpore to associate the previously defined networks, loss functions, and optimizer function to form a complete computing network." - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "accepted-photograph", - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore import Model\n", - "\n", - "model = Model(net, net_loss, opt)" - ] - }, - { - "cell_type": "markdown", - "id": "civic-underwear", - "metadata": {}, - "source": [ - "## Preparation for Fitting Process Visualization\n", - "\n", - "### Defining the Visualization Function\n", - "\n", - "To make the entire training process easier to understand, the test data, objective function, and model network of the training process need to be visualized. The following defines a visualization function which is called after each training step to display a fitting process of the model network." - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "appreciated-pension", - "metadata": {}, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt\n", - "import time\n", - "\n", - "def plot_model_and_datasets(net, eval_data):\n", - " weight = net.trainable_params()[0]\n", - " bias = net.trainable_params()[1]\n", - " x = np.arange(-10, 10, 0.1)\n", - " y = x * Tensor(weight).asnumpy()[0][0] + Tensor(bias).asnumpy()[0]\n", - " x1, y1 = zip(*eval_data)\n", - " x_target = x\n", - " y_target = x_target * 2 + 3\n", - "\n", - " plt.axis([-11, 11, -20, 25])\n", - " plt.scatter(x1, y1, color=\"red\", s=5)\n", - " plt.plot(x, y, color=\"blue\")\n", - " plt.plot(x_target, y_target, color=\"green\")\n", - " plt.show()\n", - " time.sleep(0.2)" - ] - }, - { - "cell_type": "markdown", - "id": "nutritional-contrast", - "metadata": {}, - "source": [ - "### Defining the Callback Function\n", - "\n", - "MindSpore provides tools to customize the model training process. The following calls the visualization function in `step_end` to display the fitting process. For more information, see [Customized Debugging Information](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/custom_debugging_info.html#callback).\n", - "\n", - "- `display.clear_output`:Clear the printed content to achieve dynamic fitting effect." - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "infinite-heritage", - "metadata": {}, - "outputs": [], - "source": [ - "from IPython import display\n", - "from mindspore.train.callback import Callback\n", - "\n", - "class ImageShowCallback(Callback):\n", - " def __init__(self, net, eval_data):\n", - " self.net = net\n", - " self.eval_data = eval_data\n", - "\n", - " def step_end(self, run_context):\n", - " plot_model_and_datasets(self.net, self.eval_data)\n", - " display.clear_output(wait=True)" - ] - }, - { - "cell_type": "markdown", - "id": "accompanied-static", - "metadata": {}, - "source": [ - "## Performing Training\n", - "\n", - "After the preceding process is complete, use the training parameter `ds_train` to train the model. In this example, `model.train` is called. The parameters are described as follows:\n", - "\n", - "- `epoch`: Number of times that the entire dataset is trained.\n", - "- `ds_train`: Training dataset.\n", - "- `callbacks`: Required callback function during training.\n", - "- `dataset_sink_mode`: Dataset offload mode, which supports the Ascend and GPU computing platforms. In this example, this parameter is set to False for the CPU computing platform." - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "toxic-submission", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAD8CAYAAACSCdTiAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAuPUlEQVR4nO3dd3gUVRfA4d9JAgHpJUCkCChIk6IRlV6UoiKCIiAqCiShI+UDBEQEQRQQpQuCBVEp0nsv0qRDICBFSkIIoYPUZO/3x240xg1J2N1ssjnv8+TJ7szs3JPZzcnNmTt3xBiDUkopz+Tl7gCUUkq5jiZ5pZTyYJrklVLKg2mSV0opD6ZJXimlPJgmeaWU8mAOJ3kRKSwi60TkkIgcFJFutuWDRCRcRPbavl50PFyllFLJIY6OkxcRf8DfGLNbRLIBu4BXgTeAG8aYkQ5HqZRS6oH4OLoDY0wEEGF7fF1EQoGCju5XKaWU4xzuyf9rZyJFgY1AOaAH8C5wDdgJ9DTGXLbzmiAgCCBLlixPlSpVymnxKKVUerBr164Lxhg/e+ucluRFJCuwARhqjJkrIvmBC4ABhmAt6bS53z4CAgLMzp07nRKPUkqlFyKyyxgTYG+dU0bXiEgG4FdghjFmLoAxJtIYE2OMsQBTgMrOaEsppVTSOWN0jQBTgVBjzBdxlvvH2awJEOJoW0oppZLH4ROvQFXgbeCAiOy1LesHtBSRiljLNSeBYCe0pZRSKhmcMbrmN0DsrFrq6L6VUko5Rq94VUopD6ZJXiml3OT6dZg4EVx57yZN8kop5QYrVkC5ctCpE+zZ47p2NMkrpVQKunQJWreGBg0gSxb47TfDk0+6rj1N8kop5WoWC0RGMme2oXRp+Okn6D/A0O/Hn+m0uzxXbv1nMgCn0SSvlFKuZLEQUeU1XvPfTLM3hEKFDAs3nGZv6Zd5e9GbeB8I4WKT+tY/BC7gjHHySiml7DAGvh97ne7bp3GLzAzz7oPvsGw0Wz8cYyx8sdKLLtss+HjtgagoyJ/f6TFoT14ppVzg1Clo2BDeez8H5bKfYZZ/WeZ3nUjPbR9S/ZHqHOx4kO4+1fDx8oEqVSBfPpfEoT15pZRyIosFJkyAvn1BBL4Ye4vIx3/itW2nyZUpFz81+JoW5VogIrBunbUHny+fdWMX0CSvlFJOcuQItG0LmzdD/frwzkdrGLgjmONbjvNexfcYWW8kuTPn/ucFXl4uKdHEpeUapZRy0L17MHw4VKgAhw7BuKkXKdD+XVqtfB4RYc07a5jWeNq/E3wK0Z68Uko5YO9eaNPGekFT05duUbfPXAZt687l8Mv0q9aPATUGkDlDZrfFpz15pZR6ALdvw4AB8PTTcPasYcJTXbiZKwud1r5FsZzF2BW0i6F1h7o1wYP25JVSKtm2bLHW3g8fhrdbR1Pi1aH02jEOLwNjlnvRce48vPM/7O4wAe3JK6VUkt24Ad26QbVqcPMmjPt1D4eqPMvAfYOocy03h772pkvGangX8E98ZynE4Z68iBQGfgDyY71ByGRjzFcikhuYCRTFetOQN+zdyFsppdKCVasgKAhOnoTgzjfxrf8R3XaPJu9DeZn1+ixeL9UUGXDBpcMhH4QzevLRQE9jTBngWaCTiJQB+gJrjDElgDW250oplaZcuWItzdSrBxkzwqj5K1n5WDnG7BpJm0ptCO0USrOyzRBvb+twyFSU4ME5d4aKACJsj6+LSChQEGgM1LJt9j2wHujjaHtKKZVS5s+Hjh0N589D1z5RRD3Vk557f6RknpKsb72emkVrujvERDn1xKuIFAUqAduB/LY/AADnsJZzlFIq1YuMhK5dYdYsKJ/lKMHlGzLW+zTXDgsf1viQftX7kcknk7vDTBKnJXkRyQr8CrxvjLkmcf5lMcYYEbF77xMRCQKCAIoUKeKscJRSKtmMgRkzrCdXb9yA7h/sZ394JQYVt1DlDEzuuZ6ypVN/7z0up4yuEZEMWBP8DGPMXNviSBHxt633B87be60xZrIxJsAYE+Dn5+eMcJRSKtnOnIGXX4a334YSpe7R5ZfPmZTlWX4vIkxY6sWmo9UpW6qGu8NMNoeTvFi77FOBUGPMF3FWLQRa2x63BhY42pZSSjmF7SYeGIPFApMmQdmysH499Bi5k9tvP82o/X2o/1h9Qnv+SYeFZ/FavyHVnVRNCmeUa6oCbwMHRGSvbVk/YDgwS0TaAqeAN5zQllJKOcZigdq1YcsWjlZqRuBDM9iwQaj5wg0ebTeQLw9/RX7yM/eNuTQp3cTd0TrMGaNrfgMS+vNW19H9K6WUU0VFEb15O1/GdOPDHUPwzW54f9xy5t3rwIbQU3QI6MCndT8lR6Yc7o7UKXRaA6VUunIgMh9tMu9l541S1C+0gEzDfuHLE79QOm9pfnvvN6oWqeruEJ1Kk7xSKl24exeGDoVhw4ScuUrSfsg4Zt4ZyF+n/uLjWh/Tp2offH183R2m02mSV0p5vN9/t04HfPAgvPLeUS5WCWJS+HqqF6nO5EaTKZW3lLtDdBmdoEwp5bFu3rDQs/1fPPec4cr1u7wzZRgrij9ByIU9TK4xkvWt13l0ggdN8kopD7VujYUn/CL44ussNH5iJDn+9xQ/hPenUcmXCV1TisB6ffGqXcc62saDablGKeVRrl6F3r1h8mQvimWMoknDZ5hfOZyCtx9mQYsFvJLzGWhVCKKjrRPDR0W5/D6r7qRJXinlMRYvhvbtISICmvReyO9ebzDf9w6dwh5m6LhDZM+Uwzp3QZUq1gRfpYp1amAPpuUapVSaFxUFrVpBo0aQ1T+C2hPeYN5DjclVuARbmi5h7JQwa4IH61Wr69ZBWJj1Etc0eBVrcmhPXimVZhkDM2dCly5w5aqFlwd9wybf3pyMus3QOkPpVaUXGb0zWje2WKx/DfLlAy8vjy7RxKU9eaVUmhQeDq++Ci1bgv8Thyk/uhaLCaaSfyX2d9hPv+r9/p3ga9eGQoWgVi2PP9kal/bklVJpijHwzTfQqxfcjblLvWHDWR8zlCw3sjD1lam8V/E9JH4JJirKWoNPJydb49KevFIq9YkzS2RcJ07A889b77X6aK3NFPqkEivvfkTT0k0J7RRKm0pt/pvgwVqiqVIFfHzSxcnWuDTJK6VSFzullZgY+PJLeOIJ+H3fVWp+3pE9T1bjLjdY8uYSfn7tZ/JnvU/PPJ2dbI1LyzVKqdQlXmnl0G+XaNsnL9u2wVOt5hFWoTObbp3j/WfeZ0idIWTNmDVp+01HJ1vj0p68Uip1sZVW7npnZkjBSVR6IQ+Hz4YTMKIpu0o0pUA2P7a13cboBqOTnuDTMe3JK6VSFxF2jlhH2/di2B/qzVPBk/ijSF9C7txleN3h9HiuBxm8M7g7yjTDWfd4nSYi50UkJM6yQSISLiJ7bV8vOqMtpZTnunUL+vSBZ57z4ix/UOrz6uwq0JFnClUmpEMIfar10QSfTM7qyX8HjAN+iLd8tDFmpJPaUEp5sI0boV07OPrnbSq9P4yQnMMxlux8/+r3vF2uFXLhgnW0TTo6aeoMTunJG2M2ApecsS+lVPpy/Tp06gQ1a8L1PBspPLQie7IPoXm55oR2CuWdJ95C6tRJlxcyOYOrT7x2FpH9tnJOLnsbiEiQiOwUkZ1RUVEuDkcplZosXw5ly8KEaVco90EQ5xrUxNv3DstbLWd6k+n4ZfGzfyGTSjJXJvmJwKNARSACGGVvI2PMZGNMgDEmwM/Pz4XhKKVSi4sXoXVraNjQYCk1mzwDS3Mo01R6PdeLkA4h1H+s/j8bp+MLmZzBZaNrjDGRsY9FZAqw2FVtKaXSjjlzrOWZi9FnKDGwE0e9FvFknidZ2WgJT/o/+d8XxF7IFDu5mNbkk8VlPXkR8Y/ztAkQktC2SinPFxEBr70Gzd6Iwbf6WHx7lCHcdw0jXxjJ9nbb7Sf4WF5e4OcH58//Z6oDdX9O6cmLyM9ALSCviIQBHwG1RKQiYICTQLAz2lJKpS3GwHffQY8e8FfWAxQeFMgZs536j9Rn4ksTKZarWOI7iZ3qIPZGH+vWWRO/SpRTkrwxpqWdxVOdsW+lVNp18iQEB8PKtbco1GowN4qP5HbmXMxoMIOW5Vran0zMnnQ8i6Sj9E+hUsrpLBYYOxbKlYONZ9bhN7A8YcWG89beGEKXP8abZVskPcGDnnx1gE5roJRyqiNHoG1b2Lz7EgXf60V4vm8pmLUoP0/wou4xC/jsSH5P3Bj45RfrSdf8+fXkazJoT14p5RT37sGnn0L5Coa90b+Q/YPSnMv/A32r9uVAp4PUfbjag/XEY+vxRYpA8+Z64jWZtCevlHLYnj3W3vueE6co0LkD57It4+kCTzOl0UoqFKhg3ehBh0FqPd4h2pNXSj2w27ehf38IqBzD0byj8e1Rhuu5N/Jl/S/Z2nbrPwke/pnPPbmlFq3HO0R78kqpB7Jli7X3fvjKXvL8L5CLvjt58bEXmfDiBB7J+YjzGtKLoRyiPXmlVLLcuAHdukHVWjcJL9MHr/YB+OQ+w8zXZ7K45WLnJvhYD/pfgNKevFIq6Vatst5E+6TXKrL3bc817xO0q9SOz1/4nFyZ7c5BqNxMk7xSKlGXL0PPnvDtzAtkb9YDik2nQJ6SLHh5HbWK1nJ3eOo+NMkrpe5r/nxo38FwvsAMMv+vOze9rzCg6gD61+hPJp9M7g5PJUKTvFLKrshI6NIFZq8+QbYWHTD5V1Kh0LNMaTSFcvnKuTs8lUR64lUp9S/GwPTpULpsNHPPjSRDt3JQeCvjGo7jt/d+0wSfxmhPXin1t9OnoX17WLZ3F1laBxKTfQ8vPf4K418cT6HshdwdnnoA2pNXSmGxwMSJUKbiX6zy6okEVybbwxHMaTaH+c3na4JPw7Qnr1Q6d/QotGsHG88uJ1Nwe6IznSK4zDsMf/lLcuqwyDTPKT152426z4tISJxluUVklYgctX3XT4tSqUh0NIwYAU88e56t/q3grYYULZiZTbsqMOnNn8jZ4FVrF1+lac4q13wHNIi3rC+wxhhTAlhje66USgX274dnnzP0/uk7LB1LQ9nZDKo5iL1NV1Bt2cF/Twam0jSnJHljzEbgUrzFjYHvbY+/B151RltKqQd35w589BE8+fwx9ld8Hl59j8rFSrOv/T4+qvURvv6FdTIwD+PKmnx+Y0yE7fE5wO7coCISBAQBFClSxIXhKJW+bd8Obdrd41DOUXh3+JhMmTIy9oVJBD4ViJfY+ns6GZjHSZETr8YYIyJ2Z/o3xkwGJgMEBATo3QCUcrKbN2FAf8Po2b+ToWkQ5NlP49JNGdtwLA9ne/i/L4idDEx5BFcm+UgR8TfGRIiIP3DehW0ppexYtw7atL/GyeIDod0Y/O5kZHyzX3m1TFN3h6ZSiCvHyS8EWtsetwYWuLAtpVQcV69aZ4us034xYa+UQ575ik47DKFjonk1T1V3h6dSkLOGUP4MbAUeF5EwEWkLDAdeEJGjwPO250opF1u8GEoFnGPKlebwZiNKPJKdzbsqMm6lD9kDqlrnLdD7pKYbTinXGGNaJrCqrjP2r5RKXFQUdO1m+OXIVLze+B8ZMt1kYM0h9K7am4wdfawzjjVvDoVtI2jWrbPW35VH03dYqTTOGPj5ZyhZ5QgzM9WGVwKp9lgFDnTYz4AaA8jondGazL28YOtWHQOfzui0BkqlYeHhENzxLkuufI60/ISsmTLzRYMptKnU5p9hkbFib4i9ZYuOgU9HNMkrlRpZLPcdq24MfPMNvD9qK7eeD4QnD/J6mTcY0/ArCmQtYH+fOgY+XdJyjVKpjcUCtWtDoUJQq9Z/5o85fhxq1rtG0PzO3GxRlQKPXGNRy0XMajYz4QQfS2+Ine5oklcqtYmKspZU4tXOY2Jg9Ggo03Q+v1Uog1SeQJdnunCk60FeLvmym4NWqZWWa5RKbezUzg8ehLc7nmVPgS7QdC6lc5fnu6ZzqVywsrujVamcJnmlUoP4NXhb7fxuznx8OsQweOnXmDp9yJDpLh/X/pReVXqSwTuDu6NWaYCWa5RKKRaLdax6/AuR7NXgvbzYeSY/5eocZtDJmlgadqBa8QAOdT7AB9X7aoJXSaZJXqmUcL+TqfFq8LdOR9Gj9x2e7j2Io3UrkK34Ib5t/C0b2q7msdyPue1HUGmTJnmlUkICJ1OBf2rwPj5sLNOekq3+YPTNilDzY14v04xj3UN5t+K7iI6IUQ9Ak7xSKSFOIv/PhUgiXFuwjrZvn6JmkXuE1atB/kK3WPrmUma3mEG+LHrRknpweuJVqZRwnwuRli41vDN8Hhcrd0GKRNL5qR58Wm8wWTJmcWPAylNoklcqpcS7GcfFixDUK4y5tztB3YWUzFaJn1os4qmHn3JjkMrTaJJXKoUZA7Nmx9Du64nceKYfPhmjGVJ7BL2qvY+Pl/5KKufST5RSKSgiAt7uGcKahwKhxjae9XuBGS0nUTxXcXeHpjyUy5O8iJwErgMxQLQxJsDVbSqV2hgDU769Tdc5n3An4DOyeOdk/CvTeadiKx01o1wqpXrytY0xF1KoLaVSViIzRp48Ca/32sAu/yB45g9eLfoOU5qNIu9DeVM+VpXu6BBKpRxxn4ucLBb47KvLlOjZjl1P1CJvvmiWt1zOvAafkzdzHvfFrNKVlEjyBlgpIrtEJCj+ShEJEpGdIrIzSu9Uo9KaBC5yCg01lH59Jn3DShPzxHe0L9ebU733UT9wWIJTCCvlCimR5KsZY54EGgKdRKRG3JXGmMnGmABjTICfn18KhKOUE8W7yOlernz0HnqaskMb8UeFFhTNXYidQTuY+NpnPHTlr4SvelXKRVye5I0x4bbv54F5gM6NqjxH7EVOYWHs/GINxVqOYcTNMngVX8/gKqM52mcbTz5cybrt/a56VcpFXHriVUSyAF7GmOu2x/WAwa5sU6mUdvuuF53GnGNaVCMov4NKWRsyr+1EHsn5yL831NvvKTdw9eia/MA82xAxH+AnY8xyF7epVIpZu/EWzcYP5lKpEWQqkIexDX6mbeXmCQ+LjHfVq1Ku5tIkb4w5AVRwZRtKucONG/D2wDXMjwmGMsep79eGn94bQe7Mud0dmlL/okMolUqmOUsuUqD9u8zP8Tw5cghLmq1lecep5PbNaf+mIEq5kSZ5pZLo0iVDjc4zaLahFH89OoO3i/Yj4sP9vFim9v1vCqKUG+ncNUolwaSZf9JtZQfuFlmBv6UyC9pO4eki5f/ZwN54ea29q1RAe/JK3Ud4RDQVOoyiw/5yRPtv5n/lxnDmoy3/TvCgwyNVqqU9eaXsMAY+mbqbj/cEElNgN6WkEUs6jad4nsL2X6DDI1UqpUleqVi2icYOX8/CiyM+5s8Co8mQw49RVWbT9fnXEp8tUodHqlRIk7xSABYLllp16BaRg/Ev7cc8fJIqvoEs+N9n5M2a676v0967Ss20Jq8UsH3TH/jnzci4txaSOSaGX55dwOa+kxNP8DqiRqVy2pNX6dq9e4a3Rkxn1tUeUO4ajTZUYqYlC5nHNEr8xTqiRqUB2pNX6daSLcfJ070es+61JrflcdY228XCn5eRed3GpJVedESNSgO0J6/SnRs379H4sy9YGzMIyZGBdgUmMCkwGG+vZPZ5dESNSgM0yat0ZdqKHXRcFsidXPsofLMJSzuPpdwjBR98hzqiRqVymuRVuhB5+QYNR3zIngxj8MpYgP6PzeWTVk3cHZZSLqdJXqVtSRjC+OmcpQzc3oHorKcpe7MDy3oOpTB3rVc8aYlFeTg98arSrkSGMB49G8mjfVrS7+BLeEVnZWyl3wj5dByF33hVhz2qdMPlSV5EGojIERE5JiJ9Xd2eSkcSuIm2MYau307j8bGlOZFxLlXvfkzk4N10fqVqgq9RylO5NMmLiDcwHutNvMsALUWkjCvbVOmInSGM247+QYG+dRh7ui2Zr5djdt19/DZ0IDmz+Sb4GqU8matr8pWBY7Y7RCEivwCNgUMublelB3GGMN7JnZN3xg9jVuQQ8MpEIyYza1RbMvl6Jfga8uaF8+d1+KPyaK4u1xQEzsR5HmZb9jcRCRKRnSKyM0r/dVbJ5eXFwvA/8RsQwKyLA8gd9QrrXgtl4UeB/03wcV6Dnx/UqaO1eeXx3H7i1Rgz2RgTYIwJ8PPzc3c4Kg25dvs6tUd0ofHCKtyIvkLbzAs5P34WtQL8E3+x1uZVOuHqck04EHcC7kK2ZUo55Ov1C+m2ohN3fMMpGN6ZJT2HUqFUtqTvILY2v2WL1uaVR3N1kt8BlBCRYliTewvgTRe3qTxY2JUIXhrflf3Rc/C6UY6+hWYz7KNnk19S1ykJVDrh0iRvjIkWkc7ACsAbmGaMOejKNpVnshgLgxd/wyfbexMjt3n83FCWffg/ihXJ8OA71SkJVDrg8itejTFLgaWubkelMvauRE3uDTZs2++PucQrU4I5xSYyRNbm82pf031wCe18K5UEbj/xqjyQvStRk3uDDYuFO3VqEtwigIoTK3DqVgiVI6YSNmQVPRpkRzAp8ZMoleZpklfOZ2/kSjJHs6zduYSC5f5gcq0wMoW+yPePb2X7hHfJ11yHPSqVHJrklfPZu6o0iVeaXr19lcZTOlB32StczJCJ+jO6cjbiHu+8W1KHPSr1AHQWSuV8CY1cSWQ0y4zd8wia35mbco4ch7szo80gXup765/tddijUsmmSV65hr2RKwmMZgm/Fk6zbzuz9cp8iKpAy6zzmTLtabJkAcj+z4Y67FGpZNMkr9zGYiyM2jCJ/uv6ci/mHvkOfcavvbpTrYptWKS90Tg67FGpZNGavHIOiwUiI6034oj7OAEhkQcpPaI6vTd0IvrkM3TyCuH0z73/neCTMxpHKWWXJnnluLgJuWbN+ybn29G36b5oIOUnVOKPi0couud79vRYybjBj+LrG2dDPcmqlFNouUY5Ln5CFvl3craVVzacWE+LGUGcsxzF69BbfPj0F3w4xw8fe59CPcmqlFNokleOi5uQn3vOmuTjJOfLty7TYe7/mHlsKlwuRrm1E/h1VjAlS93nH0k9yaqUU2iSV46Ln5CNgagojJ8fs0LmEDivC9djLpBhazc+X3+DrpZueOVqCiRyAlVPsirlME3yyjniJmQRzmS6yzvfNGZ9xGI4+yRV1n/Gz398SBEJg+rVtfyiVArRJK+cKsYSw9htE+izqh9371rIvH0UExq3pvXUAgjR4OUNM2dq+UWpFKJJXjnNgcgDvDkzkJDL2+FYfV60TGTajGLkz2dgTpyTqFqCUSrFaJJXDrt17xYfrR3CqK0jsNzMRY6tM5jWvSVNm8b21vUkqlLu4rIkLyKDgEAgdoBzP9vc8sqDrP1zLa1nBxN26xjsfZeWeUcyfmEecuWKt6GeRFXKLVzdkx9tjBnp4jaUG1y8eZH3l/6PHw9+C5ceJd/vq5n+cV3q1XN3ZEqpuLRco5LFGMMvIb/QYWE3rt69BFv60qH0QD5flZmsWd0dnVIqPlcn+c4i8g6wE+hpjLkcfwMRCQKCAIoUKeLicNTfknsrPuDklZMEzu/I6lPLIPxpHtm/mhmjylO1qotjVUo9MIfmrhGR1SISYuerMTAReBSoCEQAo+ztwxgz2RgTYIwJ8PPzcyQclVTJnPwr2hLN6K2jKTWmLGuObUSWf0Vfv60cXq8JXqnUzqGevDHm+aRsJyJTgMWOtKWcyN7kXwmcFN17bi+tf23H/gu74I+XKPPnBH6cUIRKlVI4ZqXUA3HZLJQi4h/naRMgxFVtqSSKnQLYzy/RW/HdvHeT3qv68NTXARw4FYbPvJkMLbuIvRs0wSuVlriyJv+5iFQEDHASCHZhW+p+YpN78+awdas1sa9ZAxcv2q3Jrzq+inYL2nP6+gnY1Y7KVz/n+19yUaqUm+JXSj0wlyV5Y8zbrtq3SobY+ntseQasjy9e/E+J5sLNC3Rf3oMfD0xHLpUk08p1fN6xFh07gre3G2JXSjlMh1B6urj1dxHrRUnxSjTGGH7c/yNdl3bn6u2rsGkAtTP0Z+qyTBQt6r7QlVKO0ztDebrYud59fKyzP4aFwfr1f5doTlw+Qb3p9Xln/jtcPVGSrDP2MO3tIaxergleKU+gPXlPl8DNN2KHRQ5c9xF3b/vAynE0LtSBCZu88PdPZJ9KqTRDk3x6EG/emF1nd9F2QTv2nd+LHGlM7m3jmPhZIV5/XecOU8rTaJJPR/66+xcD1w3ky21f4nUrPyz8lVYBTfhyh5Anj7ujU0q5gib5dGL5seUEL2rP6WunYGcw/qHDmTI2Jw0bujsypZQraZL3cOf/Ok/3Fd356cBPZLhSCuZuosNL1Rg+A7Jnd3d0SilX0yTvoYwxfLf3O3qs6Mm12zdg/SCKnOvLtOm+1Kjh7uiUUilFk7wHOnbpGMGLg1n751oyRlTDzJtM73dLM2gQZM7s7uiUUilJk7wHuRdzj5FbRjJ4w2Bi7maEpZMoeTeQb5d4ERDg7uiUUu6gSd5DbA/bTuCiQA6cP0DGY69hloxhcPeH6dMHMmZ0d3RKKXfRJJ/aJXJzj+t3rjNg7QDG/j4W37sPw6/zqZSzMVM3QdmybohXKZWq6LQGqZXFAhER1pt6JHBzj8V/LKbshLKM/X0sPns7wPhDfBHcmM2bNcErpay0J58aJTRzpO3mHudunKPb8m7MOjiLhy48ipm/meo3MjJlZ1aKP+be0JVSqYsm+dQogZkjLX55mbprCr1X9+b67Zv4bByM94b2TI7pRzvv75BsYYD9OzwppdInR+/x2kxEDoqIRUQC4q37QESOicgREanvWJjpjJ2ZI4/M+ZraP9QhaHEQMeEViBm7n4ZZBhAa0J5An++Qqvbv8KSUSt8c7cmHAE2Br+MuFJEyQAugLPAwsFpEShpjYhxsL32IM3Pk3Tw5+Wzz53yy6RO8Yh7Ce/E3ZDzZhp/HCc2bg5jZ9z0xq5RK3xy9kXcogPw3uTQGfjHG3AH+FJFjQGVgqyPtpSteXmy9e4LAyYEcjDpIjjPNuTrzS1o2KsBXS623aQVAvBK8CbdSSrmqJl8Q2BbneZht2X+ISBAQBFCkSBEXhZO2XLtzjQ9Wf8DEnRPJaimEzFxM1hsvMf0naNTI3dEppdKSRJO8iKwGCthZ1d8Ys8DRAIwxk4HJAAEBAcbR/aV18w/Pp/PSzpy9fpbsoV25Ou8TAltnZcQIyJHD3dEppdKaRJO8Meb5B9hvOFA4zvNCtmUqAWevn6XLsi7MDZ1LnnvlMd/NJY9vZeYugzp13B2dUiqtctXFUAuBFiLiKyLFgBLA7y5qK02zGAuTdk6i9PjSLD68lOy/f8qlz3bSo3llDhzQBK+UcoxDNXkRaQKMBfyAJSKy1xhT3xhzUERmAYeAaKCTjqz5r0NRhwhaFMTmM5vJf7MO1775mscKPMa03+CZZ9wdnVLKE4gxqacMHhAQYHbu3OnuMFzuTvQdPv3tU4ZtGobvLR9k6Wfc3N+e/gN86Ndf8PV1d4RKqbRERHYZY+zONatXvKawTac2EbQ4iMMXDlPoQgvCvv2Kp/46zTTvZyjfaRn46nBIpZTz6ARlKeTK7SsELwqmxnc1uHD5Ng/NXcaFb37i83y/ss27GuWrZtMrVpVSTqc9eRczxjA3dC5dlnUh8q9IioT15PT3H1PjuSx8sx9KPBoMUU31ilWllEtoknehsGthdFraiYVHFlLIuxIZv1vEpainmPAVBAdb5x0DvWJVKeU6muRdIMYSw8SdE+m3ph/3YqJ55MgITs18n4b1fZi0HvTCXqVUStEk72QHIg8QtCiIbeHbeJQXOD1hEtejizP9e2jVSisySqmUpUneSW5H3+aTjZ/w2ebPyHrLhyKLh3L8wAc0ex3GjtOKjFLKPTTJO8H6k+sJWhTE0UtHKXf7TQ6NGUWmm4a5Xs1oMm68ZnillNvoEEoHXLp1iXYL21H7+9r8dSuGgmtWETJ8Bq2z7eaQd3maVIvSYZFKKbfSnvwDMMYw6+Asui7vysWbF3nyZh92Dx1I0YIPsXIlvFC3AUSF6LBIpZTbaZJPptNXT9NxSUeWHF1CiSwBeM1bwZ59FenaBYYOhaxZQYdFKqVSC03ySRRjiWHc7+Pov7Y/AE9fHM2Oj7vweElv5myCqlXdHKBSStmhST4J9p3bR+CiQHac3UGlrA05M3Eiu08+Qr8P4MMPIVMmd0eolFL2aZK/j1v3bvHxho8ZuWUkuTLlofLpn/l9WnMqVhRW7oBKldwdoVJK3Z8m+bgsFoiyjohZ/eca2i9uz/HLx6metQ37R45g3+XcDBsGvXpBhgzuDlYppRLn0BBKEWkmIgdFxCIiAXGWFxWRWyKy1/Y1yfFQXcxigdq1uViiIO928OeF6S9gifEi4OBaNvWaSrlHc7N3L3zwQbwEb7FAZCSkonn5lVIqlqPj5EOApsBGO+uOG2Mq2r7aO9iOy5nz55lx7TdKtY9hRr5I6nn15PzgfYQurc2YMbBxI5QqFe9Ftj8MFCoEtWpZnyulVCriULnGGBMKIGl8LPifl/+kw+oOrHjVQoWwTBRe+yMrT7zGCy/A5MlQtGgCL4yKgi1bIDra+j0qSodOKqVSFVde8VpMRPaIyAYRqe7CdhKXQEkl2hLNqC2jKDexHJvPbOYVnzGE/nidPy815dtvYcWKBBJ87P78/KBKFfDxsX7Xq1uVUqlMoj15EVkNFLCzqr8xZkECL4sAihhjLorIU8B8ESlrjLlmZ/9BQBBAEVfMwRtbUtmyxZqI160DLy92R+wmcFEguyN2U7NAIy7+MJ6FWwrTpAmMHw/+/knc35o1cPGiXt2qlEqVEk3yxpjnk7tTY8wd4I7t8S4ROQ6UBP5zl25jzGRgMlhv5J3cthIVr6Ty19mTfHRoAqO3jSbfQ/l47a/pzO/8JnlyezF7Nrz2WiK5On6J5uJFLdEopVItl5RrRMRPRLxtj4sDJYATrmgrUfny/V1SWfFyKcr9WpdRW0fR6OG2ZB+2nF9HvEWr3Cs5FGLh9deT0BmPsz8t0SilUjtHh1A2EZEw4DlgiYissK2qAewXkb3AHKC9MeaSQ5E+eJBELZ7FW983pkHFEDJ4+fL69Q0sDPqaW5dzsZSGfH+pEXksUUneH+vWQVgYrF+vJRqlVKrm6OiaecA8O8t/BX51ZN/OYIzhh30/0GNlD67fuc7bhQeyaVg/5hz3pWMHw/B9QWT7fU3ye+ReOgGZUipt8NgrXo9fOk7w4mDW/LmGyv5VKLR7CtMHlqFECdiwAWrUELAs/fsKV+2RK6U8kccl+Xsx9/hi6xcM2jCIjN4ZCS40gQUfBrMz0ovevWHQIMic2bax9siVUh7Oo5L8jvAdBC4KZF/kPl4s1gTvlWP5ekZBypeHRQshICDxfSillCfxiNv/3bh7g+7Lu/Ps1GeJuhlFtwJz2d5jLitmF2TIENixQxO8Uip98oie/P7I/Yz5fQytHg8m8qdP+WphDp55BqZNgzJl3B2dUkq5j0ck+WcLVmFwnmN8FliM6Gj44gvo2hW8vd0dmVJKuZdHlGvWroUBnYvx9NMQEgLduyeQ4HVaYKVUOuMRSb5uXVi+HFavhuLFE9hIpwVWSqVDHpHkRaB+/WTOOROVxCtclVIqDfOIJJ8kOueMUiod8ogTr0kSO+eMXuGqlEpH0k+SB73CVSmV7qSfco1SSqVDmuSVUsqDaZJXSikPpkleKaU8mKN3hhohIodFZL+IzBORnHHWfSAix0TkiIjUdzhSpZRSyeZoT34VUM4YUx74A/gAQETKAC2AskADYELsPV+VUkqlHIeSvDFmpTEm2vZ0G1DI9rgx8Isx5o4x5k/gGFDZkbaUUkolnzPHybcBZtoeF8Sa9GOF2Zb9h4gEAUG2pzdE5IgDMeQFLjjwelfRuJJH40oejSt5PDGuRxJakWiSF5HVQAE7q/obYxbYtukPRAMzkhuZMWYyMDm5r7NHRHYaY1Ld7UE0ruTRuJJH40qe9BZXokneGPP8/daLyLvAy0BdY/6ewzccKBxns0K2ZUoppVKQo6NrGgC9gVeMMTfjrFoItBARXxEpBpQAfnekLaWUUsnnaE1+HOALrBLrhF/bjDHtjTEHRWQWcAhrGaeTMSbGwbaSwillHxfQuJJH40oejSt50lVcYvQuSUop5bH0ilellPJgmuSVUsqDpakkLyLNROSgiFhEJCDeukSnURCRYiKy3bbdTBHJ6KI4Z4rIXtvXSRHZm8B2J0XkgG27na6IJV57g0QkPE5sLyawXQPbcTwmIn1TIK4Ep8eIt53Lj1diP7ttMMFM2/rtIlLUFXHYabewiKwTkUO234FudrapJSJX47y/A1Motvu+L2I1xnbM9ovIkykQ0+NxjsNeEbkmIu/H2yZFjpeITBOR8yISEmdZbhFZJSJHbd9zJfDa1rZtjopI6wcKwBiTZr6A0sDjwHogIM7yMsA+rCeBiwHHAW87r58FtLA9ngR0SIGYRwEDE1h3EsibgsdvENArkW28bcevOJDRdlzLuDiueoCP7fFnwGfuOF5J+dmBjsAk2+MWwMwUeu/8gSdtj7NhnUYkfmy1gMUp9XlK6vsCvAgsAwR4FtiewvF5A+eAR9xxvIAawJNASJxlnwN9bY/72vvMA7mBE7bvuWyPcyW3/TTVkzfGhBpj7F0Rm+g0CmId/lMHmGNb9D3wqgvDjW3zDeBnV7bjZJWBY8aYE8aYu8AvWI+vy5iEp8dIaUn52Rtj/eyA9bNU1/Y+u5QxJsIYs9v2+DoQSgJXkadCjYEfjNU2IKeI+Kdg+3WB48aYUynY5t+MMRuBS/EWx/0cJZSL6gOrjDGXjDGXsc4V1iC57aepJH8fBYEzcZ7bm0YhD3AlTjJJcKoFJ6oORBpjjiaw3gArRWSXbXqHlNDZ9i/ztAT+RUzKsXSlNlh7ffa4+ngl5Wf/exvbZ+kq1s9WirGViCoB2+2sfk5E9onIMhEpm0IhJfa+uPsz1YKEO1ruOF4A+Y0xEbbH5wB79yV1ynFLdfd4lSRMo5AaJDHOlty/F1/NGBMuIvmwXmtw2PZX3yVxAROBIVh/KYdgLSW1caQ9Z8Rlkj49htOPV1ojIlmBX4H3jTHX4q3ejbUkccN2vmU+1gsRXS3Vvi+2826vYJshNx53Ha9/McYYEXHZWPZUl+RNItMoJCAp0yhcxPpvoo+tB+bQVAuJxSkiPkBT4Kn77CPc9v28iMzDWi5w6JcjqcdPRKYAi+2scsmUFEk4Xu/y3+kx4u/D6ccrnqT87LHbhNne4xxYP1suJyIZsCb4GcaYufHXx036xpilIjJBRPIaY1w6GVcS3hd3TnPSENhtjImMv8Jdx8smUkT8jTERttLVeTvbhGM9bxCrENbzkcniKeWaRKdRsCWOdcDrtkWtAVf+Z/A8cNgYE2ZvpYhkEZFssY+xnnwMsbets8SrgzZJoL0dQAmxjkTKiPVf3YUujiuh6THibpMSxyspP/tCrJ8dsH6W1ib0R8mZbHX/qUCoMeaLBLYpEHt+QEQqY/39dukfoCS+LwuBd2yjbJ4FrsYpVbhagv9Nu+N4xRH3c5RQLloB1BORXLbSaj3bsuRx9ZllZ35hTUxhwB0gElgRZ11/rCMjjgAN4yxfCjxse1wca/I/BswGfF0Y63dA+3jLHgaWxolln+3rINayhauP33TgALDf9iHzjx+X7fmLWEdvHE+huI5hrT3utX1Nih9XSh0vez87MBjrHyCATLbPzjHbZ6m4q4+Prd1qWMts++McpxeB9rGfM6Cz7djsw3oCu0oKxGX3fYkXlwDjbcf0AHFGxrk4tixYk3aOOMtS/Hhh/SMTAdyz5a+2WM/jrAGOAquB3LZtA4Bv4ry2je2zdgx470Ha12kNlFLKg3lKuUYppZQdmuSVUsqDaZJXSikPpkleKaU8mCZ5pZTyYJrklVLKg2mSV0opD/Z/oWiRAZ5b+O8AAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Parameter (name=fc.weight) [[2.0495164]]\n", - "Parameter (name=fc.bias) [3.0472562]\n" - ] - } - ], - "source": [ - "from mindspore.train.callback import LossMonitor\n", - "\n", - "epoch = 1\n", - "imageshow_cb = ImageShowCallback(net, eval_data)\n", - "model.train(epoch, ds_train, callbacks=[imageshow_cb], dataset_sink_mode=False)\n", - "\n", - "plot_model_and_datasets(net, eval_data)\n", - "for param in net.trainable_params():\n", - " print(param, param.asnumpy())" - ] - }, - { - "cell_type": "markdown", - "id": "changed-dayton", - "metadata": {}, - "source": [ - "After the training is complete, the weight parameters of the final model are printed. The value of weight is close to 2.0 and the value of bias is close to 3.0. As a result, the model training meets the expectation.\n", - "\n", - "## Summary\n", - "\n", - "We have learned the principles of the linear fitting algorithm, defined the corresponding algorithms in the MindSpore framework, understood the training process of such linear fitting models in MindSpore, and finally fitted a model function close to the objective function. In addition, you can adjust the dataset generation interval from (-10,10) to (-100,100) to check whether the weight values are closer to those of the objective function; adjust the learning rate to check whether the fitting efficiency changes; or explore how to use MindSpore to fit quadratic functions, such as $f(x)=ax^2+bx+c$, or higher-order functions." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/tutorials/training/source_en/quick_start/quick_start.ipynb b/tutorials/training/source_en/quick_start/quick_start.ipynb deleted file mode 100644 index 48464f47320c7d990419c25914eaf07b65beb26e..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_start.ipynb +++ /dev/null @@ -1,1026 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "alone-tourism", - "metadata": {}, - "source": [ - "# Implementing an Image Classification Application\n", - "\n", - "`Linux` `Windows` `Ascend` `GPU` `CPU` `Whole Process` `Beginner` `Intermediate` `Expert`\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_en/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_en/quick_start/quick_start.ipynb)" - ] - }, - { - "cell_type": "markdown", - "id": "differential-arbitration", - "metadata": {}, - "source": [ - "## Overview\n", - "\n", - "This document uses a practice example to demonstrate the basic functions of MindSpore. For common users, it takes 20 to 30 minutes to complete the practice.\n", - "\n", - "During the practice, a simple image classification function is implemented. The overall process is as follows:\n", - "\n", - "1. Process the required dataset. The MNIST dataset is used in this example.\n", - "2. Define a network. The LeNet network is used in this example.\n", - "3. The loss value and precision value of the model collected by the custom callback function.\n", - "4. Define the loss function and optimizer.\n", - "5. Load the dataset and perform training. After the training is complete, check the result and save the model file.\n", - "6. Load the saved model for inference.\n", - "7. Validate the model, load the test dataset and trained model, and validate the result accuracy.\n", - "\n", - "This is a simple and basic application process. Other advanced and complex applications can be extended based on this basic process.\n", - "\n", - "> This document is applicable to CPU, GPU and Ascend environments.\n", - ">\n", - "> You can find the complete executable sample code at ." - ] - }, - { - "cell_type": "markdown", - "id": "entitled-worth", - "metadata": {}, - "source": [ - "## Preparations\n", - "\n", - "Before you start, check whether MindSpore has been correctly installed. If not, install MindSpore on your computer by visiting [MindSpore installation page](https://www.mindspore.cn/install/en). \n", - "\n", - "In addition, you shall have basic mathematical knowledge such as Python coding basics, probability, and matrix.\n", - "\n", - "Start your MindSpore experience now." - ] - }, - { - "cell_type": "markdown", - "id": "congressional-natural", - "metadata": {}, - "source": [ - "### Downloading the Dataset\n", - "\n", - "The `MNIST` dataset used in this example consists of 10 classes of 28 x 28 pixels grayscale images. It has a training set of 60,000 examples, and a test set of 10,000 examples.\n", - "\n", - "> Download the MNIST dataset at . This page provides four download links of dataset files. The first two links are required for data training, and the last two links are required for data test." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "viral-indiana", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "id": "vulnerable-eclipse", - "metadata": {}, - "source": [ - "### Importing Python Libraries and Modules\n", - "\n", - "Before start, you need to import Python libraries.\n", - "\n", - "Currently, only the `os` library is required. Other libraries are not described here." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "geological-peeing", - "metadata": {}, - "outputs": [], - "source": [ - "import os" - ] - }, - { - "cell_type": "markdown", - "id": "otherwise-determination", - "metadata": {}, - "source": [ - "For details about MindSpore modules, search on the [MindSpore API Page](https://www.mindspore.cn/doc/api_python/en/master/index.html).\n", - "\n", - "### Configuring the Running Information\n", - "\n", - "Before compiling code, you need to learn basic information about the hardware and backend required for MindSpore running.\n", - "\n", - "You can use `context.set_context` to configure the information required for running, such as the running mode, backend information, and hardware information.\n", - "\n", - "Import the `context` module and configure the required information." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "swiss-recall", - "metadata": {}, - "outputs": [ - ], - "source": [ - "from mindspore import context\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"CPU\")" - ] - }, - { - "cell_type": "markdown", - "id": "metric-delight", - "metadata": {}, - "source": [ - "This example runs in graph mode. You can configure hardware information based on actual requirements. For example, if the code runs on the Ascend AI processor, set `--device_target` to `Ascend`. This rule also applies to the code running on the CPU and GPU. For details about parameters, see the API description for `context.set_context`.\n", - "\n", - "## Processing Data\n", - "\n", - "Datasets are important for training. A good dataset can effectively improve training accuracy and efficiency. Generally, before loading a dataset, you need to perform some operations on the dataset.\n", - "\n", - "A convolutional neural network such as LeNet is used to train the dataset. During data training, the data format is required. Therefore, you need to check the data in the dataset first. In this way, a targeted data conversion function can be constructed to convert the data in the dataset into a data format that meets the training requirements.\n", - "\n", - "Execute the following code to view the original dataset data:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "incorporated-fishing", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The type of mnist_ds: \n", - "Number of pictures contained in the mnist_ds: 60000\n", - "The item of mnist_ds: dict_keys(['image', 'label'])\n", - "Tensor of image in item: (28, 28, 1)\n", - "The label of item: 9\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAEICAYAAACZA4KlAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAANqklEQVR4nO3da6hl5X3H8e+vdhyJWtSYTiZqo7X2hQ12bE9NINJYbBNjWjRvJNMQRhDHgEIDFiKmECkVbGlMLWlDxmodizGGJOIUbKORgrUt4lEmOl4SjSg6jk6C2qiN4yX/vtjLcDye2+z7Oc/3A5u99rrs9Z/F/M6z9nr22k+qCklr3y9NugBJ42HYpUYYdqkRhl1qhGGXGmHYpUYYdi0oyRNJ/nDSdWh4DLsmIsmfJNmV5OUk/53kxEnXtNYZdo1Ukl9eYN4JwA3AZ4HDgH8Fdiy0robHsK8y3en1nye5P8n/JrkpyUFJzk1y17x1K8lvdNPXJfnHJP/Wtab/leS9Sf4uyQtJHkly8rzd/V6Sh7rl/5zkoDnv/cdJdiZ5sWuZT5pX4+eT3A+8skCIPwb8Z1XdVVVvAH8NHAV8ZIiHSvMY9tXpHOAM4DjgJODc/djuL4AjgX3A/wD3da+/BVw5b/1P0wvm8cBvdtvS/VG4FrgAeDfwNXot8/o5224GPgEcVlVvdH+c/nTO8sybDvCBFf471AfDvjr9fVU9U1XP0zsF3rTC7W6uqnur6lXgZuDVqrq+qt4EbgLmt+xfqaqnuv1cTi/AAFuBr1XV3VX1ZlVtp/fH40Pzanyqqn4GUFUnVdXXu2XfAz6S5LQkBwKXAgcC79qPY6D9ZNhXp2fnTP8fcMgKt3tuzvTPFng9/32emjP9JPC+bvr9wMXdKfyLSV4EjpmzfP62b1NVjwBbgK8Ae+idWTwEPL3Cf4f64AWRteMV5rSMSd47hPc8Zs70rwHPdNNPAZdX1eVLbLvk7ZRV9S16Hx1IchhwHnBP35VqWbbsa8f3gd9Ksqm7kHbZEN7zwiRHJzkC+AK9U32Aq4HPJvlgeg5O8okkh670jZP8bpIDkrwH2Abs6Fp8jYhhXyOq6ofAX9L7PPwocNfSW6zI14HbgMeBHwF/1e1rFjif3mn4C8BjLHORMMmDST49Z9ZVwIvAD7r3OH8I9WoJ8ccrpDbYskuNMOxSIwy71AjDLjVirP3sB2Z9HcTB49yl1JRXeYXXal8WWjZQ2JOcQa8L5QDgn6rqiqXWP4iD+WBOH2SXkpZwd92x6LK+T+OTHAD8A/Bx4ERgs/ckS9NrkM/spwCPVdXjVfUa8A3grOGUJWnYBgn7Ubz9Zoenu3lvk2Rrktkks6+zb4DdSRrEyK/GV9W2qpqpqpl1rF9+A0kjMUjYd/P2u6KO7uZJmkKDhP0e4IQkx3U/QPApYMdwypI0bH13vXU/NXQR8F16XW/XVtWDQ6tM0lAN1M9eVbcCtw6pFkkj5NdlpUYYdqkRhl1qhGGXGmHYpUYYdqkRhl1qhGGXGmHYpUYYdqkRhl1qhGGXGmHYpUYYdqkRhl1qhGGXGmHYpUYYdqkRhl1qhGGXGmHYpUYYdqkRhl1qhGGXGmHYpUYYdqkRhl1qhGGXGmHYpUYYdqkRAw3ZnOQJ4CXgTeCNqpoZRlGShm+gsHf+oKp+MoT3kTRCnsZLjRg07AXcluTeJFsXWiHJ1iSzSWZfZ9+Au5PUr0FP40+tqt1JfhW4PckjVXXn3BWqahuwDeBXckQNuD9JfRqoZa+q3d3zXuBm4JRhFCVp+PoOe5KDkxz61jTwUWDXsAqTNFyDnMZvAG5O8tb7fL2q/n0oVWnN+O4zOyddwkh87H2bJl3Cfus77FX1OPDbQ6xF0gjZ9SY1wrBLjTDsUiMMu9QIwy41Yhg3wmgVW6tdY3onW3apEYZdaoRhlxph2KVGGHapEYZdaoRhlxphP/sYDNqXPcjtlPaj92c13sK6HFt2qRGGXWqEYZcaYdilRhh2qRGGXWqEYZcaYT/7KmBf+fgtd8xXYz+8LbvUCMMuNcKwS40w7FIjDLvUCMMuNcKwS42wn30MluuTbbUf3eMyXsu27EmuTbI3ya45845IcnuSR7vnw0dbpqRBreQ0/jrgjHnzLgHuqKoTgDu615Km2LJhr6o7gefnzT4L2N5NbwfOHm5Zkoat38/sG6pqTzf9LLBhsRWTbAW2AhzEu/rcnaRBDXw1vqoKqCWWb6uqmaqaWcf6QXcnqU/9hv25JBsBuue9wytJ0ij0G/YdwJZuegtwy3DKkTQqy35mT3IjcBpwZJKngS8CVwDfTHIe8CRwziiLXO1a7i9eqi+95eMyCcuGvao2L7Lo9CHXImmE/Lqs1AjDLjXCsEuNMOxSIwy71AhvcR2CtdyFNOhtqKv12KzGn4peji271AjDLjXCsEuNMOxSIwy71AjDLjXCsEuNsJ99hdZqf/Fa7SdfzlrsR1+OLbvUCMMuNcKwS40w7FIjDLvUCMMuNcKwS42wn32NW6v95Np/tuxSIwy71AjDLjXCsEuNMOxSIwy71AjDLjXCfvaO/dFrT4v3rC9l2ZY9ybVJ9ibZNWfeZUl2J9nZPc4cbZmSBrWS0/jrgDMWmP/lqtrUPW4dblmShm3ZsFfVncDzY6hF0ggNcoHuoiT3d6f5hy+2UpKtSWaTzL7OvgF2J2kQ/Yb9q8DxwCZgD/ClxVasqm1VNVNVM+tY3+fuJA2qr7BX1XNV9WZV/Ry4GjhluGVJGra+wp5k45yXnwR2LbaupOmwbD97khuB04AjkzwNfBE4LckmoIAngAtGV6KkYVg27FW1eYHZ14ygFkkj5NdlpUYYdqkRhl1qhGGXGmHYpUZ4i2tn0KGNpWlnyy41wrBLjTDsUiMMu9QIwy41wrBLjTDsUiPsZ2/cNH+/wJ+CHi5bdqkRhl1qhGGXGmHYpUYYdqkRhl1qhGGXGmE/+wqt1T5f79Nvhy271AjDLjXCsEuNMOxSIwy71AjDLjXCsEuNWMmQzccA1wMb6A3RvK2qrkpyBHATcCy9YZvPqaoXRleq+mE/ut6ykpb9DeDiqjoR+BBwYZITgUuAO6rqBOCO7rWkKbVs2KtqT1Xd102/BDwMHAWcBWzvVtsOnD2iGiUNwX59Zk9yLHAycDewoar2dIuepXeaL2lKrTjsSQ4Bvg18rqp+OndZVRW9z/MLbbc1yWyS2dfZN1Cxkvq3orAnWUcv6DdU1Xe62c8l2dgt3wjsXWjbqtpWVTNVNbOO9cOoWVIflg17kgDXAA9X1ZVzFu0AtnTTW4Bbhl+epGFZyS2uHwY+AzyQZGc371LgCuCbSc4DngTOGUmFWrPW6m3D02rZsFfVXUAWWXz6cMuRNCp+g05qhGGXGmHYpUYYdqkRhl1qhGGXGuFPSa8C3qaqYbBllxph2KVGGHapEYZdaoRhlxph2KVGGHapEfaza6S8Z3162LJLjTDsUiMMu9QIwy41wrBLjTDsUiMMu9QIwy41wrBLjTDsUiMMu9QIwy41wrBLjTDsUiMMu9SIZe9nT3IMcD2wAShgW1VdleQy4Hzgx92ql1bVraMqVNPJ+9VXj5X8eMUbwMVVdV+SQ4F7k9zeLftyVf3t6MqTNCzLhr2q9gB7uumXkjwMHDXqwiQN1359Zk9yLHAycHc366Ik9ye5Nsnhi2yzNclsktnX2TdYtZL6tuKwJzkE+Dbwuar6KfBV4HhgE72W/0sLbVdV26pqpqpm1rF+8Iol9WVFYU+yjl7Qb6iq7wBU1XNV9WZV/Ry4GjhldGVKGtSyYU8S4Brg4aq6cs78jXNW+ySwa/jlSRqWlVyN/zDwGeCBJDu7eZcCm5Nsotcd9wRwwQjqE3ZvaThWcjX+LiALLLJPXVpF/Aad1AjDLjXCsEuNMOxSIwy71AjDLjXCsEuNMOxSIwy71AjDLjXCsEuNMOxSIwy71AjDLjUiVTW+nSU/Bp6cM+tI4CdjK2D/TGtt01oXWFu/hlnb+6vqPQstGGvY37HzZLaqZiZWwBKmtbZprQusrV/jqs3TeKkRhl1qxKTDvm3C+1/KtNY2rXWBtfVrLLVN9DO7pPGZdMsuaUwMu9SIiYQ9yRlJfpDksSSXTKKGxSR5IskDSXYmmZ1wLdcm2Ztk15x5RyS5Pcmj3fOCY+xNqLbLkuzujt3OJGdOqLZjkvxHkoeSPJjkz7r5Ez12S9Q1luM29s/sSQ4Afgj8EfA0cA+wuaoeGmshi0jyBDBTVRP/AkaS3wdeBq6vqg908/4GeL6qruj+UB5eVZ+fktouA16e9DDe3WhFG+cOMw6cDZzLBI/dEnWdwxiO2yRa9lOAx6rq8ap6DfgGcNYE6ph6VXUn8Py82WcB27vp7fT+s4zdIrVNharaU1X3ddMvAW8NMz7RY7dEXWMxibAfBTw15/XTTNd47wXcluTeJFsnXcwCNlTVnm76WWDDJItZwLLDeI/TvGHGp+bY9TP8+aC8QPdOp1bV7wAfBy7sTlenUvU+g01T3+mKhvEelwWGGf+FSR67foc/H9Qkwr4bOGbO66O7eVOhqnZ3z3uBm5m+oaife2sE3e5574Tr+YVpGsZ7oWHGmYJjN8nhzycR9nuAE5Icl+RA4FPAjgnU8Q5JDu4unJDkYOCjTN9Q1DuALd30FuCWCdbyNtMyjPdiw4wz4WM38eHPq2rsD+BMelfkfwR8YRI1LFLXrwPf7x4PTro24EZ6p3Wv07u2cR7wbuAO4FHge8ARU1TbvwAPAPfTC9bGCdV2Kr1T9PuBnd3jzEkfuyXqGstx8+uyUiO8QCc1wrBLjTDsUiMMu9QIwy41wrBLjTDsUiP+HwkwTbYrVWKZAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "import matplotlib\n", - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "\n", - "train_data_path = \"./datasets/MNIST_Data/train\"\n", - "test_data_path = \"./datasets/MNIST_Data/test\"\n", - "mnist_ds = ds.MnistDataset(train_data_path)\n", - "print('The type of mnist_ds:', type(mnist_ds))\n", - "print(\"Number of pictures contained in the mnist_ds:\", mnist_ds.get_dataset_size())\n", - "\n", - "dic_ds = mnist_ds.create_dict_iterator()\n", - "item = next(dic_ds)\n", - "img = item[\"image\"].asnumpy()\n", - "label = item[\"label\"].asnumpy()\n", - "\n", - "print(\"The item of mnist_ds:\", item.keys())\n", - "print(\"Tensor of image in item:\", img.shape)\n", - "print(\"The label of item:\", label)\n", - "\n", - "plt.imshow(np.squeeze(img))\n", - "plt.title(\"number:%s\"% item[\"label\"].asnumpy())\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "advance-relief", - "metadata": {}, - "source": [ - "From the above operation, we can see that the training datasets `train-images-idx3-ubyte` and `train-labels-idx1-ubyte` correspond to 60,000 images and 60,000 digital labels. After loading the data, the dictionary data set is converted by `create_dict_iterator`. View one of the data, which is a dictionary with keys `image` and `label`. The tensor of `image` (height: 28; width: 28; channel: 1) and `label` are numbers corresponding to the image." - ] - }, - { - "cell_type": "markdown", - "id": "right-principal", - "metadata": {}, - "source": [ - "### Defining the Dataset and Data Operations\n", - "\n", - "Define the `create_dataset` function to create a dataset. In this function, define the data augmentation and processing operations to be performed.\n", - "\n", - "1. Define the dataset.\n", - "2. Define parameters required for data augmentation and processing.\n", - "3. Generate corresponding data augmentation operations according to the parameters.\n", - "4. Use the `map` mapping function to apply data operations to the dataset.\n", - "5. Process the generated dataset.\n", - "\n", - "After the definition is completed, use `create_datasets` to perform data augmentation on the original data, and extract a `batch` of data to view the changes after data augmentation." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "fewer-corps", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Number of groups in the dataset: 1875\n" - ] - } - ], - "source": [ - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore import dtype as mstype\n", - "\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\"\n", - " create dataset for train or test\n", - "\n", - " Args:\n", - " data_path (str): Data path\n", - " batch_size (int): The number of data records in each group\n", - " repeat_size (int): The number of replicated data records\n", - " num_parallel_workers (int): The number of parallel workers\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " # define some parameters needed for data enhancement and rough justification\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # according to the parameters, generate the corresponding data enhancement method\n", - " resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR)\n", - " rescale_nml_op = CV.Rescale(rescale_nml, shift_nml)\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # using map to apply operations to a dataset\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=resize_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_nml_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=hwc2chw_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - "\n", - " # process the generated dataset\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size)\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds\n", - "\n", - "ms_dataset = create_dataset(train_data_path)\n", - "print('Number of groups in the dataset:', ms_dataset.get_dataset_size())" - ] - }, - { - "cell_type": "markdown", - "id": "interracial-quality", - "metadata": {}, - "source": [ - "After the data augmentation function is called, the dataset `size` changes from 60000 to 1875, which meets the expectations of the `mnist_ds.batch` operation in data augmentation ($60000/32=1875$).\n", - "\n", - "In the preceding augmentation process:\n", - "\n", - "- The `label` data enhancement operation in the dataset:\n", - "\n", - " - `C.TypeCast`: Convert the data type to `int32`.\n", - "\n", - "- The `image` data enhancement operation in the dataset: \n", - "\n", - " - `datasets.MnistDataset`: Convert the dataset into MindSpore trainable data. \n", - " - `CV.Resize`: Resize image data pixels to meet the data size requirements of the LeNet network.\n", - " - `CV.Rescale`: Standardize and normalize image data so that the value of each pixel is in the range (0,1), which can improve training efficiency. \n", - " - `CV.HWC2CHW`: Transform the image data tensor, the tensor form is changed from `height x width x channel` (HWC) to `channel x height x width` (CHW), which is convenient for data training.\n", - "\n", - "- Other enhancement operations:\n", - "\n", - " - `mnist_ds.shuffle`: Randomly store data in a memory that can hold 10,000 images for shuffle. \n", - " - `mnist_ds.batch`: Extract 32 images from the shuffled 10,000 image addresses to form a `batch`, the parameter `batch_size` indicates the number of data contained in each group, and each group is now set to contain 32 data. \n", - " - `mnist_ds.repeat`: The `batch` data is replicated and enhanced. The parameter `repeat_size` indicates the number of replicated datasets.\n", - "\n", - "Perform the `shuffle` and `batch` operations, and then perform the `repeat` operation to ensure that data is unique during one `epoch`.\n", - "\n", - "> MindSpore supports multiple data processing and augmentation operations, which are usually used in combined. For details, see section [Data Processing](https://www.mindspore.cn/tutorial/training/en/master/use/data_preparation.html) and [Data Augmentation](https://www.mindspore.cn/doc/programming_guide/en/master/augmentation.html) in the MindSpore tutorials." - ] - }, - { - "cell_type": "markdown", - "id": "acceptable-implement", - "metadata": {}, - "source": [ - "### Viewing Enhanced Data\n", - "\n", - "Obtain a group of data from the 1875 groups of data and view the data tensor and `label`." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "dress-midnight", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor of image: (32, 1, 32, 32)\n", - "Labels: [5 5 6 4 7 0 1 2 2 8 7 4 3 3 6 5 8 9 6 6 9 7 9 8 2 9 0 2 4 3 9 3]\n" - ] - } - ], - "source": [ - "data = next(ms_dataset.create_dict_iterator(output_numpy=True))\n", - "images = data[\"image\"]\n", - "labels = data[\"label\"]\n", - "print('Tensor of image:', images.shape)\n", - "print('Labels:', labels)" - ] - }, - { - "cell_type": "markdown", - "id": "conscious-africa", - "metadata": {}, - "source": [ - "Visualize the tensor data and the value corresponding to `label`." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "sound-monday", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWAAAADsCAYAAABKZHxbAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAB7eUlEQVR4nO29d3gc1dm/f5+Z7VqrS5as7iK5AO7GNrgkYEwcIDahxkko5kcnjQSSvN+XQEgPyUtCMKSYFohDb6Y5gG1s3DtY7pYsyZZsq1tl28z5/bGrXi1tM5n7unRJmp3y2TNnnjnlec4jpJQYGBgYGIQfJdICDAwMDP5bMQywgYGBQYQwDLCBgYFBhDAMsIGBgUGEMAywgYGBQYQwDLCBgYFBhDAMsIGBgUGEiFoDLIRYLYRwCSEaAj/7I62pJ84mrQBCiOuEEHuFEI1CiMNCiFmR1tQbQohRgfJ9PtJauqPdfW/50YQQj0VaV3cIIRKFEK8H7v1RIcQ3Iq2pJ4QQdwshtgoh3EKIZyKtpyeEEFYhxLJAeZ4WQuwUQnylP8eaQi1ukNwtpfxHpEX0k7NCqxBiHvBb4FpgM5AeWUX94nFgS6RF9ISU0tnytxDCCVQAL0dOUa88DniAocAE4B0hxC4p5Z6Iquqe48AvgPmAPcJaesMElAJzgBJgAfCSEOJcKWVxr0dKKXv9AYqBHwK7gTrgRcAG3Ais67SvBEYG/n4GWAq8BzQAnwJpwKNADbAPmNjLdVcDt/Slz9B6xlrXA0vOBq2Bc1wHvAQ8CDwfrTrbnfcG4Aggok0rEIPf+Oa32/ZP4DfRprXTOX8BPBPtdbXTuXcDX+9rv/4OQVwDXArkAecFvkx/j/t/QDLgBjYA2wP/vwL8sWVHIcRSIcTSTsf/WghRKYT4VAgx19A6OK1CCBWYAqQIIQ4JIcqEEH8RQvSndRH2chVCxAI/B37Qz2tFRGcnbgCek4GnMMq05gM+KeWBdufaBYyLQq2DIaJahRBD8Zd1n72K/hrgP0spj0spq4G38Xdd+sPrUsptUkoX8DrgklI+J6XU8L+ZJrbsKKW8U0p5Z7tj7weGAxnA34C3hRAjDK2D0joUMANXAbMC15uIv9JFm1aAh4FlUsqyfl4rUjoBEELk4O+GPhulWp1Afadz1QFDolDrYIhkHTADLwDPSin39XXB/hrginZ/N+G/kf3hRLu/m7v5v8fzSCk3SSlPSyndUspn8XcJFhhaB6W1OfD7MSlluZSyEv9bPeq0CiEmABcD/9fP67QQ9vvfjm/h7+YW9fOa4dbaAMR22hYLnO7HNSNZrmdKRLQKIRT8Qzoe4O7+XHAwk3CNgKPdxdMGca7+IAExwGMNrYCUskYIUYZfX+vmQZwylOU6F8gFSoQQ4K/8qhBirJRy0hmeK1z3/9vAbwZ5jlBqPQCYhBCjpJQHA9vG04+ucg+E+7kaDCHVKvyVdBn+XuYCKaW3P8cNxg1tFzBOCDFBCGHDP0kSFIQQ8UKI+UIImxDCJIRYDMwG3je0DpqngXuEEKlCiATg+8CKAZ4rlFr/BozA332cADwJvIN/RvxMCXWZIoSYiX8IarDeDyHTKqVsBF4Dfi6EiBFCXAB8DX+rbSCEtFwDz5MNUPG/fG1CiIE2GkNdB54AxgCXSymb+9q5hQEb4MBA/s+BD4GDwLqBngtACPGkEOLJwL9m/DOfp4BK4B5gYafJA0PrmWsF/7jqFvytob3ADuCX0aZVStkkpaxo+cHffXZJKU9Fk8523AC8JqXsT3e+R8Kg9U78Ll0ngeXAHXKALmhh0Pr/8Hf9fwx8M/B3f+Yrwqo1MPZ/G/6GQkU7f/DFfZ6nf5O1BgYGBgbBJmoj4QwMDAy+6BgG2MDAwCBCGAbYwMDAIEIYBtjAwMAgQhgG2MDAwCBC9Munbp5ydVS5SvxHf7nHIIezRevZohMMrYPBuP/B54uiFYwWsIGBgUHEMAywgYGBQYSI9gXZ/+upWzwdV5KCVMF+Sifh83r0nYWRlmVg8IUnHM9e6A2wEKiJCTRP7Xt1RsWrY16zC+nzhVzW2ULVV13MHXGQWFMzn5SPpHhzMumpUzDXe2Dj7kjLMzAYFIrNhpI+lKaC1NZt9i2H0aprIEJRusJkwjN3PJ5razg/rQSn6uaT8pGUfJpMjicfrXBAqwx0S8gMsGKzIeJiISmeprw4Tt3c1OcxriYLBVscaPWdlyz972XuiIP8ethKktUYNidu4pmhs/hwbD6yOIbhGyOtziCaUWw2RNawjhvrG9Br65Bud2REtUOx2RDDs6m4MAnX/Hp0XcF1ys6YkmREfQPS64mMLmcMR2/Qee6c5UyxaliFmc2Jm3gw5muUarlk1zfiKzsWlGuFxAALkwmRk0nN5GSqxwm8mW6OzHih12M0qbPP6+Y+x0KIAgOsxMT0az+92QW6FmI1fqZZzUzL2Ehl2kfcnzufM1mlPJwIkwlhMoGiIL2+iD1IfSGsVoSqghAgZVRrPVOEyYTIGkbR4o6rLiYW6sRvKcdXdDRCyvy02IiKOUk4F1aw7dzXqdGamPrJXUhTBKemhEDYbCyb+Wyr8QX/s/fo8Jd58GuXsb9uNKnP16I3Ng76csE3wEKgZmVw7JJUsq86woZRbasyalLv8TAdiVdGtuBb/7RYaJ4zFt3c95K+zk8Po1XXhsYIC4Eiei6zaEXNysCdk4RmU7GVNyB37e39gHZlH85upxg9HFe6E80qUN2ye60B4xy1iO7rqLDbaRibzN5bO2bNGfPpt7DUpWCJsAFWszI4Pi+VrKuP8Nao99GkznFNUHDvcXwVJ/o+QaiQEun18uviBTw64iVGmhXMQgUg3xzDEznv8vhdx1lZOhvbf3YN+oUdVAOsJiUizGaKFmdw5VVr+UXqZ62faVLngNeF3sM65V6psM+TRrhXZ1NsNoQzxt9iA1BV9KRYnlj6J8ZYHL0fDMy57VYcH+8JytuwA0KgpqZgV6tRB7y2e/hRY2Mpvj6DRdes5eIhe1iy4QYKvpuIVlXdZV9hMqE4YxA2G+BPEKudPBU2g7fvew6enr2MuXadlU1mbltzA/lL2n2XQH2WLhd6Q2P0zU0E6ojoZISllAil+8aMxezDF+PAZrOhu1zhUNktpYsymLd4I39I3x4xDT2hVVXDRdXcu+oqHh3+MiNMdlThL884xc79SXv52tKd3Dd94aBfFsFtAb9i40fZ71NgfpVE1Yp/qdw24/uDy25Gqart8QFrfQDDSMWSSYy47gAPZb3Ruk1Bkm+2hVVHBwIP1o1rN7HAcQKn0veLIFrY+5vR/Pqi5SxynsSEynuz/sI9L10LF3U1wJ654zl6g85bs5aiISh0p/Pc7KloJ05GQHk3BOrzzetuJO9ZgenjbZFW1Ea7OjLacgK1XWKTjxtH88cN88h6u+thn0x5ijtSFrBv6ERSntgQRsFnIVe5WPDYXTw7/SkuCJE5CIoBVpMS4RUbf8h7heFmM1bhT520ze3hrr3fwPpEIsInsR3cjc/TS5M9DC2fFq3p9noUIbkm9jXmOg6Ra2ozci1vu+6o05t5vHoCH/zPHABiNhxGaw5OS8KUmUHFgmwmLdmNXa0JGN+2O/9oTS5/XjWf7Pd0zHVeFHYG5bpBxaaTYqpvHTuLV2BYTB3HO+1Wt3g6nmtreO6c5Yw2W9GRQHmX1ly4ON9Wz1/nPMvLm6a1bvtR2svkmCw8M+spPpk4mvePj6V6XRpZv1gfEY3QfR2xC0uHfUqtFah2je7c/OMUO8nWBjTL2dOrihRaVTWaOw+vNAGhGQocvAEWAmE286Ps9xlptraOl7zaEMuPt15J2itWHB9/DlKiR3DmVVitiDEjOHCfhXeHP05ioG7ahIpdOFCFQoPu4s3GDH7xwrU9n0cD+ylJ6sd+FzCtqSloLw5ps9CcKvj1sJWoiC4t31JXIjFHVeyrPwdNC1GVGDglD8zk+onrKDDX0ZK/UMHfo+hMc7LC9LQSplg1VGEGqXdoxYWbOMXOHHsTk4atbN2WoPi7nudbvYy37GCk9QQ/Kb46YhrVcQUcvSyJCV8r7LGOACjo6D6Bs7CS/OfuYN3iR0hV/ZPK95ZP4oMPppC7YVCJOwZF1S0ziL2kgisTtgL+5+61hkx+98w1ZNXvjJiu7sh6Q+VW27f43ZRXWRjTEPTzD8oAC5MJNSuDom9kUGB+FbNoSxpqUzwkxDVSea6D+pzxHY5L+/Q0yt5i9NPhqwRCVXGlxfDU9L93GNMB2Oz2suzUBaw6nI9+3E7+v8p7Po8uwe3BF+wxXwAhkCZIVrv3wNBQULz0a7xZmEyomcMoW5TZui1U5a7YbFQsmcRFl2/jxsQNDFXtAJT7GlhWO4W1q88lj47dXamCU3W3tpSjAaswY1W76rEKM1ZhJlFtAHP4X3ue+VOoyzNTP0oydsoRHspYQbLafYLezW4vvz96OXGbbchjFeS9FkPt9ZDqbxdhV71oFtAtakTCYOsWT0dZVMlPRr7LeIsHsOGVOtsbc8l58Ri+5n6nUwsLPpuCxerDJvqVY/OMGbQBduckceXVawNjvm2ca6nkByM/ZEtaXpfj3k45n+Th43CWNGMurcJXGgaHKiHQrILZNmjpmr3aEMuOphzeKx1D8+ZkMje4Mdc3oh3qb1bx6ENNSEBmp9GUPYT6HBMXf7PNWfjtlPPJdozCsqsIraYmaNcUzhhGXHeAe1M/ItvkaH25lWpWXi2aQN5rwW85BJs6vZldHjuv1UzpsP3KhK1Msrg6DAWFEzljPMULFSafc5ALEw8xy3GAEeauxrd9XXZ/mkzWmmqEw87pvBjM7XoWV8Rt54MxYzi9M5G4QWVFGxhVX3Xxh/x3mGOrxanYqNObWeNK5e295zKyaEf4BfWCnDGe8rk6S0Zu4xxLFeBEkzrFvib+t2QR0jt4ozy4IQhVxRNnCng7dGw5ZJucZA+p4bohXR905VLJ2gkjOHQoiZQtWSRtMCNcnqA5N/eXv5XO5sD+YTiLTSTv92E7GtA6anjHHaPIeb0n1JQUhNNBU0EqFdPMWCbVkJNQ02GWWblU8k7TDHKqh0KQDLDicKCNzOBX2U+Q184w1OnNbGnOp/5IPKmb214CpswMpM2CJ14Sa4rcLHxnjvskj5bN4/BrozpsL/t6PA9nvcUYSw8HhpiihQ5uv/BDborfGRhGaBOiSZ21LhMagvu3Xoml0EFskU7q5zXIg0XIc0fhWlxDoqq2HpOluskYUkdpTFJ4v4gQmPJyWDh6F7Nsla1DJ8d9kn8cm0XqO9Y+ThBGAlr3XWPnzln/4Ya43aSqTrxS45DXzaMn51G8fCRDGwf/whj0GLDQoFJr7LHb3B2/T9sBaTt4Y5STh3Iuozgn3T+uGiTn5v7S6LWgNiv4HHBqoolTE9O63S9+v07CrmpkcVnUOeu3BIzUzxlO7QiVxnwP10xaz2+H7uyy7+/TdvDepLE07xiCrdA0aLcqxeGAkdkcWRhDfKf+7F6PhVeOTSL2oNIhqKX8smxcKYKYiZWMd5QM6vrBpNQXx46DOeQ/2nGCbdv4yZwY5mRMBEbclZgY5n15B0vid3Z5vlqMwZINd6G7VbLeUnB+uh+tsgodf0+obriT7VOexJ8E2c+Lp89hV2EOeUfCW4cVu53ia4fxrZg1WAOZ5TWps9+byp59WeQvj5KwTkVFTUmi+Nph/HLBcq6IOYFD8Zf9Ca2Zp6pnsfatiWQ9sT4oNWJwBljTsNR5ebDiy/wmfU2X2djeUIXCwpgGFk79N3WT/Z4FK0tnY/tod0hbmprUW7vI6857Dc7r+5gF+xdwdGUuqTvi+hdYMFCk5EzmoYTV2howIm85xfMFy5lg7b0lkZ1QQ3luHDGZw/AVD9wAKjYbjMymeGEiB7+9FOhoILY0D+fogTQyj/lovHhc6/Zr7/qQ2xN2E6fYMeiZlnubY1uNuZNXjldqHPB6+MHhayj47tFWH+uWUCDF4UBmDaV6dNdR3if3zCL3DR3zh2F0qRMCJSGel277A+Msdlpa8SW+JlbWzsReFiVrgikqptRkKucNZ8Xtvwv06NpsWqEngVe2TyY/iF4wg/rmusuFsm4nRy5LYcUn6Yy1lvd7JjtO0UhRrViFudW5edHSHfzgipthf1FIjLDilRzwusg323p1NevMuwXvQgGsblZ6DSwIJ8JqRRTkdQoY6bsb927Bu3zv5imsMk8j7dGBG2CRmU7RokT23ra028/vSTjKPYv+Cou6+/TsML5qUiKqVcMsfIRz5dau97atvDSpc8jr5geHr4GLyugcfylMJsjP5dB1cYEXY+QRFgt6UmwXb5g/Vc7l0xcmkfVo5Nz6WhECNSWJynnD2fTbJ2jx4mnBLb1U+JIRLrX74wfI4F89geCJ52ZPPSMfzqJbRnSIllOFQr7Zxh/feoo7b/8utk/2oDf1vYBPf9EbG7F9tJsfXHEzf3zrqX5FuXVmls3Xa2BBuFBiYmieM5Ynlv4psgEjX3ResfHu8McZYQrfC6Ove7vWZWLJhrv8Ld9ujvfMHU/JTTrrZz9C515JJDhb6qopL4fia4ex4vbf0dn4AjxwciorXppJwV8+C+pgVHDa/lL6o5fOwADnvWDjndpZ7L46g7cC60WoQmGMxYFuEmd0rn7LdLthfxF33PFdpEngubOKJ0b/q7Xb3jnIAuiwjyqUHgMLwsF3k1fjWWzio5SJDN2sDeglYtB/Mhx1JCq9B+YEE1NmBuWXZXPtXR92e28frcnlsQ8uJf/Zum57YC3BLc+es7zV77eFsUvvJGG/Ru6RBpTiI90a72DT2/dZsH8B5S/nkvlOKZEO8JYzxrPvGjs/X/Bih4lkgLwPlpD8iYX4Iy5yj5bhC7ILZ3AHX84gIEErPUb6f8yU+fK4cOGV/vHYFkIYpCPdbuxr9vjd0izn8K3h38cXaOB0DrIAKL26gPqCtq59T4EFQUGIXr97tsnBPSkfM/SyelZOHh0aDf2lD61nQjQ74rfwakMs92+9kqy3QmeMpc2CK0Vwe8JuOg/T3Fs+ibdXns+IFc3IwsMdPit5YCauTC/jC4q4PWM1U6xdo+AS92nEri1Cr61D6y0aNUi0Dxrp/H0u2H0lrteGkr76BNqxnn3uQ03VLTOoPleSMLya2/PWsSimnPZjvnnv30LOy4KYnUXoNbVoIVgLJKgG+ExWkJc+H3pxKanb7BwaldqvybBg0TK0EbvmEHHbYpAm/7hOd0EWUhPoUiFUoYhngioURpqt3Jqwma/F7qA/Y74ho76BxEKdseu/ecaH3jV2DVcP2dfaSotmR/wWdjTlYCl0+D0NQnURIZAq3U5QflI+kuSdEnX3YaTUMeVmtwbZzLtiCxfH7WGUuZIsk4JVtHX16/Rm5m67iaFF4XGl7C5oJE7p2Ko88XkqI7efRj9aFvYFjlqChnQLOOdX8OOcjUy2FZNl8rZ6O7SUWc7LAsfWYnxV1SFbcjYoBri7FeRXHDoHZCxxO3s+TrrdqLWNmE8PCYaMM0arrILKqn7vX6c3s841lNWHRjGS4DuNC5cHe4Xke+VtwQDfTV7dIbjBLFTSTU7SB3jnNru9rD0+HPupwb1Q9No64reUY6lLOeNjn/3OdKaPPdwanaUhOe21hX2NWmuJlV8XL+CN+Ar21KZjK/W3flrqc559XasHQoNmxdwQqDNh5tWGWKoPJjKipBm8XpQRuRyfl9oaZHN38ieBOuLv5rcElbxSPZWT7iE4n4lDKT4YlpZv+QVmci8s4dtD93QbNPJoTS6xhwVqRQ2+CPjVtwQNZTpquTphM+MtnkCQTVvLt1LTcDwfh2PzwZBn5giKAe68gny15uZwQwqlMcP7PjiKMWVmYHF4A2GIasidxmVdPSnbG1j1z7YFYc69rYxrhxTjFMGZwPj7ybk0b0om9fOaQbXppduNr+jogNaVLb5uMqdH24h0r2LYpx4qj2WxypGNqUmSfsRvEFrq8xznXmwi8i5SfyudjeOYgjQp6OeN4tRkJ1lXH2kXZNPRyLUGlbw6CtUjGfreDrQwLT3piZNcmb6Dbww50iF6sCVo5LEPLmXE7mb0uggkXRACYTLxUNYbAXc4Bej4XGlSxyMVnKXNkBiPmhjfdrhPQzY0oZ0K3oqNwVmMx2bjrVlLW7/U24057DqQTU5p992L9pkIpN2Cbo6+Ba+VmBjKL8tmatZeUtRmwEmpL449hzNC5jSu1dfD5s9I29y27T9Xj2Wm/QhZJhd2YRnwhFCd3oxX6qw6PIrsjW70UPky9wevQr1uQ5MNYZvg6g7zyq0kd97YTX12Sy+NPitKhGaLGr0WmtN1yi6y44nVyRhX0Tpx3R639OKSPna5M9hRmEf+n/zuXeF8zcXvFSwfO5XzRpQyvZ1tq9dd3LLxLvKfqUXuPYIeRcFM7fFPtOscWRiDydXxxWZqhISDPuxvRJMB7oY3KiYQv9NMzL5j+LrxZhBjRuBKi0GzCppSVLyZbV0RTepnFIwQClqc4NuCBtrdiDBr23wwj//Vr+AryZ/zZcehLrO0fdGSheSx6omUuhJQjtox10d2bQZ7qZmVtedyruWjM/4+kWCT28zm8mwcVSE2ZT0E4vQnYEiTOpvcZladHs/K8tERC24YuqGW/WPS2ZaRy3Rbaau2oz418tkuAmiIDgFZnUk3OTn47Se6bF/ZZOa21TeQ/0bwtITkLr1b8C4/veU83oy7kNynunZ99t9n4dnpf+UCW8cCaFm4XfHJyGVE7cEJPlSO2H0x6sZtnAb+9IMrKTzDDAJNuocin3/y4KP7ZmH7ZA95TZFfhDvr4fV80jgT62JvVGZE6MytW79F+jIrlg+iJFy2E0XeBpqkyo1r7yLvWYHz4204ORIRLf4sI39jrr3tZdUsPezzpIc92013SCkpdKcz3FR+RsN60RuI0QM/S93GPbevp/rWroKHqZJYpevYS0vWDOvB3RFZO7g3p/FQOWKHikqtkQcrvkzR5fEAWKt2RW23z2BwXP/TH5K0oYLRlYfQm5oi3YHsQEs9PHJZXNiz3XShXdDYxner+VnqJySo/fOlj95ADCnRqmq4647v8JXfrW6N87cKM+kmM6ndJOLs3PSv05t5rHoiH94/C9vB0K4F0RP9cRrPfTf4jtj9Jeu1MnZvG8/UEedTNdvDkUuW9bjvNreH2/fcRPxvHCgndvk3RkHrw6B3ZFk5ua9aGavfSeEdPYcRdw4YStx4GK2mJuI56w4+O4lHZrzMRGsjYO9UD3dGVFsrgaCxPd+byKLYiUjFP0TalKJ2+1ydFYEY0uvBvmYPL/3lYnZ9O5OHM99udT/pbZLl1YZYfl74VbxbEvwBEGt2RaTl25vTOMDR6gRSjnrRyiIR/+ZHKzuOqeIkqYcSGVKSzpgjd/a4r6kJ4o9omLbuQjcM71mD7nKhHCkh53UYI3q+v6HMyjIYpo0s5nzb8dY5k1rdTlXlEJK37om6XqNpy15M7ZbpdMbHdftcDf/Uja3wLAjE0JuaSFtxlM9ix3Dx2OHYh/RtSN1lTlK2Qub64/4AiCCu/XAmuIc6aRrr6uI0vmD/Akpr41HXx2EvrUaPYAtD+nz+4JXyE9gaG8krTuxxX7+7TGPYXI++6IQzTZ3e1IRyqJi8f/X8/IQ0K8sg2La2gC/n3IXJ7J93aK61Eb/dEtHsyz3RWZPe7Or+uaqsxlffEN2BGC34jh0n6x0nni1ONGvfA9zm+mbMpZVhX4i9M7pFwe5sam21u6WXB05OpfzlXBJLfDhKqhElkQuZ7ICuodXWQW1dpJUYhAjd5YKzMCvL8Dca8TrNyEBaJ9WtYzlZGZZ1JwZNhJ6roE/CaYUHUAuhv3OFkV6IA0B1azTV2lnZ5K84Fb5kVrw0k9x3y9DKjke05ftFxdQk+bx2GCvjPqNay2RfbSpOoiN1kZSSjxtHc8znH3Jy1dpQ3WeFGYksG3fTOaOeUWq9E/kwnyjAcqKB+O3J3MYNIEG4VAr+8lnEJtz+G4gr8lKyNpvbim8Ar4K9zBQx16kOSAnNLv5v1aVIu998JGw3YTlZZxgTg6BjGGD8rfbUwgOkttsWbZMGXzQsH2wl+4NIq+gerb6eUXdv6rgtQloMvthELg7UwMDA4L8cEQ3RKQYGBgb/jRgtYAMDA4MIYRhgAwMDgwhhGGADAwODCGEYYAMDA4MIYRhgAwMDgwgRlQZYCGEVQiwTQhwVQpwWQuwUQnwl0rp6QgiRK4R4VwhRI4SoEEL8RYgoyGXTDUKIhk4/mhDisUjr6g0hxCghhEsI8XyktXSHEOJ5IUS5EKJeCHFACHFLpDX1xNmkFUAIcZ0QYq8QolEIcVgIMSvSmrpDCLE6UEdbnqv9/TkuKg0w/gCRUmAOEAf8P+AlIURuJEX1wlLgJJAOTMCvu+flrCKIlNLZ8gOkAc3AyxGW1RePA1siLaIXfg3kSiljgSuAXwghJkdYU0+cNVqFEPOA3wI3AUOA2RAN4ZI9cne756ugPwf0aYCFEMVCiB8KIXYLIeqEEC8KIWxCiBuFEOs67SuFECMDfz8jhFgqhHgv8Eb4VAiRJoR4NNBS3CeEmNjdNaWUjVLKB6WUxVJKXUq5AigCeq0okdAaIA94SUrpklJWAO8D46JUa3u+jv/FsTZatQohrgNqgY/6+jKR0iml3COlbFm+TAZ+RhhaB33/HwJ+LqXcGLADx6SUva7cFSXPVb/pbwv4GuBS/IbmPODGMzju/wHJgBvYAGwP/P8K8MeWHQNfvttVqIUQQ4F8YE+Uan0UuE4I4RBCZABfwW+Eo1Fre24AnpP9i8YJu1YhRCzwc+AH/bxWRHS229YE7APKgXcNrQPXKoRQgSlAihDikBCiTPiH9uydLxBpre34tRCiMmC85/brilLKXn+AYuCb7f7/HfBk4Eut67SvBEYG/n4G+Hu7z+4B9rb7/1ygth/XNwMfAn+NVq3AGGAb/sXdZOB8Ihq1ttsvB/8SB3lRXK5/Au4P/P0g8Hw06my3nwpciP8hNhtaB64VGBY411b8Q3vJwKfAL6NNa+Dz8/EPk1jxN2xOAyP6ug/9bQFXtPu7CehvKtv2KVCbu/m/1/MIIRTgn4AHuLuf1wyr1oDG94HXgBj8FSUB/9hVVGntxLfwV8j+Ljwb7nKdAFwM/F8/r9NCxMpUSqlJKdcBmcAd/bimobXn8zQHfj8mpSyXUlbib4EuiEKtSCk3SSlPSyndUspn8b8s+tQ6mEm4RqA1eZoQIm0Q5+qCEEIAy4ChwNellN5BnC6UWhOBbOAvgcKvAp6mfxWlO0Jaru34NvDsIM8RSq1zgVygRAhRAfwQ+LoQYiBplMNVpi2Y6GNctRcMrYCUsgYogw45RgezcE24y1UCfeZSGYwB3gWME0JMEELY8HcRg8kT+Lv2l0spm/vauQ9CpjXwZi4C7hBCmIQQ8fi7ILsHeMpQlytCiJlABoP3fgil1r/hNwwTAj9PAu8A8wdwrpDpFEKkCr+rlFMIoQoh5gPX049JQ0NrnzwN3BPQnQB8H1gxwHOFslzjhRDzA5N9JiHEYvweG33OAw3YAEspD+CfIPkQOAis6/2I3hFCPCmEeDLwdw5wG/4Hr0K0+dYtjjatAa7EP+h/CjgEePFXlmjUCv4XxGtSykGtOB9KrVLKJillRcsP0AC4pJRnnNs8xGUq8Xfhy4Aa4BHge1LKtwZybkNrh7r6MH73wwPAXmAH8Mso1GoGfoH/+a/EP368MHDN3s8TGEA2MDAwMAgz0RqIYWBgYPCFxzDABgYGBhHCMMAGBgYGEcIwwAYGBgYRwjDABgYGBhGiX0smzlOujipXif/oL/fo4Hy2aD1bdIKhdTAY9z/4fFG0Qj8NsIGBgcFgUGNj2fub0WDTQUjit1hJX12JVtinq+wXGsMAGxgYhBYhwG7jh3PfY5SlAlXo3KLdRFKhE7Uw0uIiS0QNsJqQgMxOwz00BnO9BzYONHrXwGBwKDYbSvpQmgpSu/3cXO/BXFqFr7QszMo60pdOANWtYTnREB2tSyFQkxKpnTucBTGvkWf2r2dji3OjWa2oEZYXaSJigNWUFITTQVNBKhXTzOjjGvAdc1BQNRwAWXoc3eUKmx7FZkPExUJsx8WOZOlxRFwswmFHmlSET0M2NKGdOuNoWIOeUFTUWCekJLVuisj9H55NxYVJuObXd7uPu2gImavSsUbIAKuxsYi4WHzpCZyc4OxRJ0DzaRsxhclk1zfiK+t1/fLQoqioKUk0Ts3FtbiGRPUsN7fd1NVukRLh8vSr7MNjgBUVxW5r/bd+znBqR6g05nu4ZtJ6fpaymdcb0/lF/bUA5L5iQTlSgt7UFHJpwmRC5GRSMzmZ2vyOTiG5r1ioPSee+hwFzQ6mJog7rDHkndNhNRBnG8JkAqGA1JE+X6/7qrFOPBNGcGxuW/3Ie14iikv7PHbQOq1WhKoicjOpmJOEc2EF2859HYBKrZE4xYZZ+I3G93KnsKpsGqFeQqsnnd7xI6g8z079CJ2McR11tmATKnZh4bCvmQdHXcaBmtEk/T1CBlgI1JQkmibnUHq1jyNTXgT8a6k36C58XhWhRXa+rOX+o/vrafv6psTEdNlfiY+jedwwymdaej+vDxwnZL/KPuQGWJhMKEmJNEzPbd0mbznF8wXLmWC1BrZYWDykisW3+heYH6vdSc7roBwqDrmhU7MyOD4vlayrj7BhVMfFi8Zqd5Ixt5Slw19hgtXKNreHOwoX4zs5BmXtjpDqOmNEHyvfhXHNDzUrAz3WgVLfhK/oaO87JydybI6Nvbe2JReYs+1WHOUnQ2+ARw/Hle7k5EQzIy49wluB+9+gu3iw4st8P/Ujsk32ViMcKcSYERy+ysq9897mrvhSADSp0yw9PFjxZZo1MwAz4w7xZcch8s1O/py9gvuXeCn7e2Q0q4kJNE3JoeQajSPznurw2ZuNGcjjNsz1jT0cHR5a7r/q0rAWV+IrLgEhEBYLzXPGops7PlNNKSrVs9wcmddTghk/lVoj9x+b36+yD5kBFiYTijMGkhOpmpHGpt8+0WkPa7fHARTesZSx3EneCy70I8WhkghA6aIM5i3eyB/Suy4zW3hHS0H7tU62Wnj3vGe4//fzKZseUllnhJqUiDCbe91Hq6pBej2h1xIbS/H1GbjHNWP9PIGcx2vQ6nvuLkeSfd9z8PTsZcy1663bGnQXKxrTOXJZHL97ex6/TP+QZLVrayhcqEmJHLjPwj+nL+UCm4Imdep1F0d9Kvs8fp3aiZOosbH88e4rKbpmLb9I/SxieltonjqCUzc3cWTGC10+++Xz11LwwnF8IX62+6Ll/r9RO4mPX5hGxpOVKAnx6EmxPLH0T4yxOPo+ySAJmQH2zB3P0Rt0Vsx6HJvQ6f+i9AZnzCs2fpT9Pilqzy2Ku2//DrZP9oR8WGfvb0bz64uWM99xjLcnZ/NgxtcZdfemkF4zWFRqjTxY8WW/UTt5CrD1eUzIecXGu8MfZ4TJ331f6zJxy8a7KLj3OFLKgM62cl/kPIl/dUSDM8GVKin57gReuu0PKEjyzeG59yExwHWLp+O5tobnzllOvtmGKroPuHu0Jpelu+fgc5s4csmyUEg56zn47CSmjDjKznX55L3RiKmsiooF2Uxa0uYx8qO0l8kxWTD10qvQTaLvYYogaP3tjBeZ76ggTnGQZqpD2rQe95czxrP/Gju/XLAcgCJvA9f/9IckbjyM1hzaoaeDz07ikRkvM9HaCNjZ5vZw+56biP+NA+XETgCO3DuaBT/O5G9jn+e7yavxLDax2jmTrF+sD6m29mQ46khUQBUKj9bk8tgHl5L/bB2+Eyc7DivZdFJM9ViFueN3YWfYtLbQ8vz/7ZzldA62Hbv0TnJfr0I/XtH9wWGi/f3PSV7NrCsP4JJmRputXezVgv0LKPkgl6Q9/RsSE7rEXOftV9kH3QBX3TID05Wn+FX+20yxaqii+7fxveWTeHvl+STtllSP62oYZGhtRdTT4rj+2xkvMtN2jOcWTObVceNpaEpiatZefj1sZeu+CYq9x5dcKyEsz/Za/ca3XeLaHq7rmT+F4oUKd876D1fEnECTJpqkStKGCnxV1aD3bLiDwbSRxZxvO87PTszlze0Tce43E39Ew7R1Fy0DEqYte6msGku9tLLPlcb7e8eSvW0wmbHOnB3/OI/v3Gjh4cy3KXUl4jiuIAsPdzC+JQ/M5PqJ6ygw1wFOanU7VZVDSN66B73nU4eEzs9/ZwOcuE+DYxURm8RuX1cvdpwgTrHjFDpD1Ur/56Jtgi3v/VuwHDOTtEeSvesUsuR4/y+kaf0q+6Ab4OpzJT/OW8+FtjqsomszfsH+BZTWxqNtiyd7nQtzrYvagvgu+yQc0JGnQz9In3DAx2u7JzI+poRvx1Z2u88z9ansacrgXEcpC2JCq0mYTKhZGRRfn8EvLmoxaE5uit/G/CGf4ZIm0tQmktUoGdJp52Q/z15OnOIfN9vs9vL7o5cTv637GeO6PDMTxx3ihrjdOJQYNBmork3NITe+AENMbmxC8Gn5cIauUUlaV4ZsaERrZxhqvj6B83KKSFMbWVl/LrYDNhxb9hN6dW0Mffcon8WP4eKxwzGdspCx19s6lq/YbFQsmcRFl2/jxsQNDFXtbHZ7+U3x5cRttobdyNUtno6yqJL/zV/R5fmv05uZu+0mhhY1oIe4d9MjPdTVbR6NZafmsLakY2q8nJcFjuJKqK5Dr61Dut1BlxR0AyxjveRbKnAq/sLXpE6Jr4k/Vc4FoPzlXBJLfDhKqlFON9NUkIorq+PkUMn7uWTvOoUehskb567jJGRm8e/caXw79t1u91l+bBoHitPYMzydBSP7mNUfBIrNhsjJ5NglqSy6Zi1XOSuwCn9rMt3kJD1wtzTpoMjb0FqmLUx1FjHLfpRsU5iMcycn+4TAS2Gz28v/HllE+X+yyF5d2a3B8jkEeTFVpEZogmvV4VH8SCrUFiaRd7jJPwMeQJhMeOaOx3NtDbdnrGaYqtKgWTE3gFZZFVadvmPHyXrHSW1pIq54aE4yIS6dihTgcyiMW1zIvakfkW1yoAqFdY2jKNqeSX4P5R4K2pfXrwre4kJbXYfnv9jXxP+UXYHzmTiU4oNontBPBveoVQjmOg6QoLZNsJV6k9hcnoP8PLbDvo6D5eghdocMugEW9WbWNIzGIw8C0CStrKydyacvTAIg851StGPlCIcD9+SRHJtr4ppJGzqcI6nQhyw5HpI3Tmd8pWXEFadztDqh288PeBs5cDQN5z4LexnGh8My2VebipOGoGsRcbHUTE4m+6ojgZnstuGbcl8DpZqVWs3RpUwBPLGwcXouSflvk21qKzdN6qx1mVDdetBd0YTFgp6b3sXJ/u8n51K6Kpuc92q6jcZSU1LwDoFYU1tLqF538Z/GsUgtPJ3mlBU2diSdS+oJHaHpqKOGt34mY2yU3KTz7DnLmWLVuu3JhQtTZgbS48VnFdSO1bFnNNCAvzFnMfl4PPs94pS2F26dZsfcKMDjxZSZEZZADMUZw9EbdJ7rVF5eqXHI6+bRk/M48HwBQ9/b0aGHES1kmauYMayYNb6O7oZN+ck46hrQQjgkFnQDnLhb8JyczTNDZvo3eBXsZSayHvVPXLS8S7SxuZRcYmXR/A38duhOvFKjTvffnEg7aLfn1+WX4txrIeGgD1OThZ94r8ZeZsLJkeBfLNZJbb7SxR+5QXfxdO1kXi6aSG2ls0uZKjYbpy8bz6lxDlzSDPgNcMsDsGTDXeRXNCC9wX2TC5OJxgwH26f8jfZO9qsPjyJroxt9195uj2u4IA9tTAPjHf5Wp1t62eRO4M+r5lPgCs/iALH/2kgswPTzODXZSfPQIYB/7kGzStbPfiTQOldwSy+NPitKaN2Su6DExFB+WTauFIF9WiV/HP0uC2M6v/jt1OnN2IQJqzAz0XGUtyZWU+xLx35Kkvp8LXpjCIfNhEDYbLw1aynjLHZaxnw1qXPI6+axU19m7VsTyXpifY9josJkQli7TiBLnw/p8YTch32a1cy0jI2QsbHD9rz6JeTIXGJ2m9GrqrsEawSDoBvgpGUb6CNQD4DyC5xcfsnGVuNb5HPx23J/xnFzvRe08I209Tbht/Hdcxn+qt9n0Q70HIEfDCESJG3joQFea8jkqZVfYsRLjaRu3trlMN/UMfhuruLFMS+0Bre0GN/vHb6Ggu8e9b/Fw0B/nOxLv6bx9NTnW/1v63QPL1ZOY9Tdm8I+aVQ+09mDH3jb0MhWt8rm8mwcVeFTJ8z+YIBr7/qQ2xN2d5zYDNBSTx6rnsiFMQcYb2lgYQwsnPpv6iY383j1BFaWzsb20e6w9CbbU+Jr4rFTF7P6zUm9eo0oDgciM52GMV2thuNYE2pxedCHfTQEXqmhdDND3H4yu2j+MvJYQtKwHOIPp2E9WtV3YNEZEpm1IGJj0axgV/0zynW63/iWTfe/3RV2hu1BVGw2fA4Fq7n7N5vmkOhOG8JkCn1kli5RPLDP2/Fh+d0z11Dw77IO45TtyX3kAL8etrJDwECd7uKRivlwUVlYJ43O1MlekzpVmuB4YxxwOqTaBsqduxcT+1wsznfDVC+FQE1K4PGlfw60Krsa3ybdQ5HPf2c/um8Wy666kB/O/IAFMXtJVFXiFDv3J+1l0dId/OCKm2F/UViN8J8q5/LpC5Nae2mdUWw2hDMGbWQGRxbGcPDbnQO1YPzm64l9biTOdxuDNqEopaTQnQ6Uo9K1Ze0QWuuCQeA3wsyHn544jzdfujDogUURMcDR5DResWQS4xYX8nj2e3RX0dctfoQZGXeT+/R4TB9vC6kWX9FRsv90kvueXthhe1b9TnzNzSG9dqRY6zKxZMNd/lZ6pMX0wCdTnuKOlAXsGzqRlCc29H1AiGkJGCm6PB4Aa9Uuxqy38Y51NC98+au4FtewfcqLqEIh32zjj289xZ23fzcsgTj9pWLJJEZcd4BfZT9BvALtexwtBL3cA4Erz82eiujOJ95hp/KC9G6iduFnqdsYfcPxoAcWhc0Aq0mJ8IqNdHs99yY/zfm2eqzCHnGncc0KydaGbrt4AKlqDGarD2nqOcghaEiJ3tSE3tnYnuEYWCTL9H+++SLbF+ZQ72t7oHQpONYUh3J1M/v+L5dHprcFQGgIdLcatiGSzmS9VsbubeOZEzuly2cLfrWqtfv/cObb/OHmZlYnhSEQQ0q0qhruuuM7fOV3q1s1PFqTy59XzSf7Pd3v6H9iV9v+Ade0+I90mhqGM7x6CUcuWYYqFMZYHGEJxOkvJQ/M5KLLt/H91I8YYe7ZYydOsZNsbUCzBFG3lGgnTnZbFkJVSWpsYrq8neW/fKRDS9gqzH0GFg2EoBjgzqvdd7uPVePd4Y+TqMAQxdLqXjVM9bAgaw//nj+b3HXBUHPmqIGOZYPu4s3GDB56/RrWLX6kg4tUWAND+mlwW8r9vuRlDFHa/G0j6Yi/KKacBY5StE7du2odFjx2F7+f8kqrAzxAgbmOJVPXsezpC1r3Hf1/jci9R8KydoVWdhxTxUlM3SyV+NJfLmbXtzN5OPNtck0OLon/nPcyzwu5JgDp9WBfs4eX/nIxzybPQ6pgPyUZ8Vkzph0H/Y7+3dQTraoaR1ES5rJOY6phrL/3lk/ivfenkru+65BSyQMzmXfFFu5OWU2uqe+1FhQhQ6O9m7KTPh/aqSoSP5BcnnkfP7npRRbFlOMIPFtjLTVcM3krb//vTLIeDs5LeFAGuHPQQJqptsd9zUJjhKlrxFayaue6uC1UXepkbfXMLsfZT+kkfF6PvjP4s+Oe+VNomOBiqrMIAK/U2d6Qw4jltczIvJvnZi5jilVjVs4RPvrqOSTkziCuyIt5ZdeJsLDTzql8kuU0VtFWmTWpIDURckd86fPhONbE2PXfBODvk55jilWQoHR9sBIUnaemP8N4S3OH3sZQ1c6N8VuZMftg67ZfLr8J6yFTWAxwbzPbaSuOsvVLWVSkOxhhVnAIN5jD90rTm5pIW3EUrBakIhAuD3pdfe9eDVKC14fii1xr95PykSTtlih7i1sbAO2DRu5OWU2eqeclClp4pj6VN3dNIOdg8KIPW3QMW+F3h+1y73UNrbKSnBcdPJB6NWXzPuKm+J2kqjEMVe1cH7+JTy8Y3v3JB8CADXD3QQN9jed2LXCzUMk327g39SOs3+xa0CsOnQMylridA1XaM+UXmPn6ORu50F4KONGQ1HodyD0HyX1qPKvOG8s5lh38f6mrGXfRMd4bdw4la7PJXtnnqUNLDwEQ4UZ6PKhFxxn6d3+FvPvObzAlrZQYk5tsazUXxuxnmtVfJ1ShMNsGncfZzUIl0+Qk06Thll4eODkVtVkDPTSGriVowLajCK26ptfehu/YcZT9ubyRP5ms5LXEq1YSUk7D9PPClr3Fd+wMwl+jhER7E6cSkkiIj0NvaEBNSqR+zsguQSN9sfzYNBI3mYnZVUKwpr+FM4YR1x3gsMwn/SOrP9Ci8+SklPiKjjLypTietH0J1wVmbkrYTLbJSabJRUH8SYK1LP+ADXB3QQPb3B5qdTvD1NNkmZTWaJjOHPA2UqHFYBNeslQ36SYneWZnt0tCaiisSplG3ECF9oI7y8NX43aR2SlyTPp8mD7exsryMVw2ZBcTLQrTrMVMshdzy8kbwubg3i19ZBmo05s56MlCqQ/D8L6UaFXVWD7wj9/akqazJSkRqUJjhuSDSWO5N+cDVKGjIpll83V48Or0Zo77JKU+/92t8CWz4qWZ5JaWoYXA40RxOCA/l5KbdDItI3BsKe7TyX7Ypx5ezZ3I5OnFzLGXclXuTp5e+GWGb+zxkMHrtNkQWcOAM88OothsaAkxeGL9L7BQBuL0xOKMjfx8VgrmxiyStjloHB5P0w21XYJGeqMlAGrEfnfwnjUhECYTD2W9wY+u/jo1Ndkk1Nb7x4S7Y+Nu8mKn8GzMdAqml5M9pCY4Otox8Ke0m6CB+w5fRWllPNNzirlz6Cqmd7K/Lav3P1B2OduOZjPE2cxVuTu5NaGr4W2hzmsPnQO8V6Fet6HJhm7fyJXr0lmeej73JK0j0+QkTW1kSnYJB74SuUwDaqwT17lZXbIMgD+gYZ0rgacOzyDxs/B3QVuDGwB1bD7lxdncNukGhCqxOd18ev7fWkNAW7Q+UfYl9hzOAAnCpVLwl8/wnQ6+O5owmRCZ6Ry8Lo71sx/hfM93yJG5OLaCVlnZo3Eyr9yK85yZbDknj+uG1HBrwnYOz00JWguoW51Zwyha7M+90d/sMO2ze5ya7CRjnH+1sXrdxS0b72JUCAJxeuLbsZXETnuZh5yXcXh0Et5Md5e62ht1ejMPlF2Os9CC5WRoQqrfLXiXMaPuJHFT7+PQljWf4Zg4mR3n5XBdVBngQNBAe8rXZJKyX2ftl0Zz/pwipttKWz9ryTLQrJk59HQB+e8dxZeVzNMLv8zhuSk9XuaTQyNJDZEDvL3UzMracxln6TQbKwRISdbD63kpbwqXzPmMTJNGvjkm4pkGSEmi/EJrt6vyb3Kb+cnni4h5MY7Y5ZF1l9IKD5BaeIDUQIu96pIR1E7VSVDbtP7080U4Xowjf3lbczJUI6zCaqVhTBL7vvU4qohpdbLPEbk4Nuv+lnB3RliIsE5gCauVhrHJrRlCxmp3kvMmKId6N8JizAhcaTEdsntoUueoT6Xg3uP4Kk6E6ysAsDCmgYVT/w1T+39M+8CSg88WkP1OcciHYKTSzc1t5yEhxoygaZhOtjU0a4AEtZ/alkGiI+2zDGgnTpLEBv+YzrHjDN9Ir62JkYQu9U/Ww+v5tHomtYvtAT9gf3CImpriX+haSnApnPLF4pYn+zHGHVqEyYSMseGzdTUU5b4Gbvr0HnKfVjB9HML+8Rmi2G00TM8N+Fa2veRu3fot0pdZsXwQPq2KV3LA62pdo7po/jLGJ1yP1zmKuDVHoNnVwcm+JdOIz94WNBRuCu9YSn7MHeS9ZkY9fAzZ2NRlSKIla8az0//KBba2nlyz9LDPk44MYzqqgdI5sCT1k134QuyzrDkkWqITNSmxtVyF2YKa1LYuzP522UhCQcgHCrtmGYgu0pZtp7hiPF+6YSjbp7zIb9LXsOKTdJ6bPRXtxElG37+Xh++5ns+jINWLZ+54Sm7SWT/7ETo7rs954UfkL69BFh7qJr7HQG9sxPbRbn5wxc388a2nWtPNfDLlKaonaqxoGMf/rbq0o5N9INNIgflVElUrkQoaWrf4EWqvhx8fXUjx8m6CEjplzYDof+7a011giR4GD5jO5Tr0ud00zxnL40v/3LrPMFUS28NcVjAImQHu6jS+M1SXGhS6y4W5Ucft9ReFU7FxWUw569+poVmLAXSujXuFOfYjRDqtkjQpWKzubpdwVJsFSoMLX4jDpc8EU2YG5Zdlc+1dH3bYnvfBEnJeUrBvPRzW6Dfp8aCeqkVvN6YQp9iJU+CbsXspuPQ4L2+a1vpZS6YRq/Df93AEuOjNLpwbi5l+X1swQKoaQ6oKv895nY++k8+WG/M6HPOjtJc7uHV1l90jZLQLGpGmgY3VtGaQaBdYEg46l+vOm7PJsa1mtLkt6Kr93FAo7v/ADXAvY2Mt2S5GvNPOaXzAFwo9jv0nsb8/jAtjrmTdea/hVGw8NHRNazCBP9136BP09YY/g4Tgh2PXRFTHmSBtFlwpgtsTdtN+AsZSZsFx5JTfDSysgiR6XT3XPnkvmgVmLtjNT9PfZ4TZSYLqYI69iUm9ZBoJS4CLrvUYDJBrcvCNIUe40nmwwyEtOu8tn8Rr2yd3m90jlLQEjQwq0q6HwJKgIiV6/WmuffLeHsvVLBRU0XWy8NWGWH689UrSXrEGtVwHboDrG0gs1Fud8DuwM5bsdS5Muw6Hdim8IKEdryB1nYUKdSiX2b7Civz3OizYHA3U5ZmZOPYQVw/ZR8vwQ0uWAbfXROJeLSwZRPqLKSuTyplp2KZWdQi8WLB/AUl7JFTXha2l0x69uZmcF48hVYXNrvO4eEwB9iH9W6SmudZG/HZL6DNNdAoGeCi9GZO5H32FnbHkbfZgP9A1u0eoiZY1Jvqi5f63lOvFI/dzY/Japllt3fZv7y2fxCflI6k+lEjqJhjyycGgluuADbBeW0f8lnIsdV09GGzl1YiScrQQuBOFAul2ox8pIU2XHFPz+N7NXdcFaKHOa+eTQyNDOjnYHd1lkKjUNBzPxxF/Wsexrzyq0r/L2BhcSQKTkHyvvK08y1/OJX37ibBkO+lemGxdUjBrhRnPZieatX9jfKpbD5lbVBfaBQN4nWak2vf4c8tz56utC4PAs5RO5bpq5iS2TMvmgvTu1/d+7/2pJO+SjChtxlxaiS/IS2MO2ABLtxtf0VEs3ayPGc3DDT0h3W70w8UMc3tYZZ7W436Kj5C5xZ0JmtRxSZX4tcX4Kk4ELVIoWIj6RhIOxFIjk1lFcuv2lowooV7asz9ohQdQC6HrKhC9HBMyNT2wcXe/p/4iXyvPIgLlmls5lprDiaxKSe52t9z1p/0h1adPh+QZi8hylNGK9PnwFZeQ9mj36+5GEqFBrdfRGsyiAxVabNS6GPlKy7CWlpHWeXtE1BgYdI++s5C4nfQaaRvKF5thgM8S7JU6aw6P4v7A/zr+RcxV39kx9mZgYNAVwwCfJcS9sJG4FzoHrZyO2kXMDQwM+kZEaxfWwMDA4ItOaOLrDAwMDAz6xDDABgYGBhHCMMAGBgYGEcIwwAYGBgYRwjDABgYGBhHCMMAGBgYGESJqDbAQIlcI8a4QokYIUSGE+IsQIir9loUQY4QQHwsh6oQQh4QQiyKtqTeEENcJIfYKIRqFEIeFELMiraknzgatZ9P9F0I0dPrRhBCPRVpXd5xl5TogexW1BhhYCpwE0oEJwBzgzkgK6o5AIb8JrAASgVuB54UQ+REV1gNCiHnAb4GbgCHAbKD7lUgizNmg9Wy7/1JKZ8sPkAY0Ay9HWFYXzrZyZaD2SkrZ6w9QDPwQ2A3UAS8CNuBGYF2nfSUwMvD3MwFR7wENwKf4b/ijQA2wD5jYy3X3Agva/f974K/RphU4J3CMaLdtJfBwtGkNHL8eWNLXfT8btRr3v+8y7XTeG/C/0ES0aT3bypUB2CspZb9bwNcAlwJ5wHmBL9Pf4/4fkAy4gQ3A9sD/rwB/bNlRCLFUCNE+qdyjwHVCCIcQIgP4CvA+fRMJrZ0R+CtQVGkVQqjAFCAl0KUrC3SV+pOu9mzRatz//mu9AXhOBixGlGuF6C7XRxmIvepnq+Kb7f7/HfAk/Xuj/L3dZ/cAe9v9fy5Q28t1xwDb8C+gJQPn68+bOqxa8ScKOwLcF/j7EsADfBCFWocFzrUVf1cpGf+b/pdfBK3G/e/7uWq3Xw7+1TXz+rGvUa4hsFdS9r8FXNHu7yb6nxytfS7s5m7+7/Y8QggF/9vjNfzpH5KBBPzjgVGlVUrpBRYCXw1c+17gJXpP9hwRrYHPAB6TUpZLKSvxv9UXfIG0Gve/f+f5Fn6DVNTPaxrlGgJ7NZhJuEagNW+PEKLz0q+DIRHIBv4ipXRLKauAp+mfoeiOUGpFSrlbSjlHSpkkpZwPDAc2D/B0IdMqpazBX4Hbdzn70/3sibNFq3H/u/Jt4NlBnsMoVz8DtleDMcC7gHFCiAlCCBvw4CDO1YFAa6cIuEMIYRJCxOMfr9o9wFOGTCuAEOI8IYQtMP7zQ/xd5mcGeLqQasVfMe4RQqQKIRKA7+OfaR4IZ4tW4/63QwgxE8hg8N4PRrkyOHs1YAMspTwA/Bz4EDgIrBvouQCEEE8KIZ5st+lK/APpp4BDgBf/AxiNWr8FlON3Q7kImCel7F+mx/BrfRjYAhzAP3O7A/jlF1mrcf87aAW/cXhNSjmopI1GuQ7eXhnrARsYGBhEiGgOxDAwMDD4QmMYYAMDA4MIYRhgAwMDgwhhGGADAwODCNGv1cXmKVdH1Uzdf/SXRU+fnS1azxadYGgdDMb9Dz5fFK1gtIANDAwMIoZhgA0MDAwiRFQucG7QO8JkQs0cRtmizA7bEw54idldjq+0P+HykaFu8XRcSQpS9f9vapLEFXkxr9waWWEGBhHAMMBnIcJkwp2bzMXf3Nhh+5sfn8/w6iRENBlgIVATE2ieOgIAz7U1nJ9WglP1BzR9XjuMkrXZZK+MpEiD/1ZMWZl4s5Lwxlr6tb/QJeZ6D2wc6KoIna4flLP0hKKixjohJanvfaVE6BJf0VEwovN6RlFRkhKpHWnlD+nbWzcf8DbyWtIkdKuKGkF5HVBU1JQkmqbkcOrmJgCWnfMvJlp1rMIMwMq4z7it+IZIqjT4L8SUmYG0WTh1QRqVk3XsGQ39Os7nU/EdczD6ZG5QbFXoDLAQqClJuMZnUz6zH28XCYoHcv7RiHS5kM3NSJ8vZPLOSgJl2jg+g+pZbSHxdXozD5RdjrPQguVkJVoEJbZHjXXiOjeLkms0jsx4oWVr4Cf6ECYTwmrt/kNNQ/p8Rp08Q6KxTJWYGMovy8aVIrBPq+SPo99lYUz/DHCT7uH1xnR+deraVluFrvu/h8dzxgY5ZAZYTUqkcWoupVf7ODKvt0Xu2yjyNnBd2Y+wntZw7qlElh5Hd7lCJbErolePET8RbJ13LNOnANCkzmPVEzn4bAHZ7xTjO3Y8Yvq6kJxI+QXWvu9/FHR4FJsNkTWMhrHJ3X5uqfNhLa7EV1wS/Iv3p94NhgjVWcXhQGSm0zCm+x6wpc6H9WiVvyUZJoTVSvOcsVx714fcnrCbOKVjghVN6t0epwq/v4JDsbB4SBUzb/9dq61CgqOsEbW4HK2q+oz0hMQAKzYb9XNG0nRDLUemvNjv4/LMTjb99gkAxj5xJ3kv6OhHikMhsQPCZEJxxiBstl73k1JCswutvj7kmjrTXZlqUueA18VH980i9ZNd+Jqawq6rN4QuUTz+F2ueueta1m7ppcKXjHBFvkUsMtMp+kYae2/r/mXxvfIprPrnNNIeDa4BVmNjwW5DhMgISynRTp4KuxEWJhPk53LwujgOfvuJbvf56YnzePOlC8l5vCYsz5SwWhEFeTyx9E+MsTiANuOrSZ163cVxrfv74BAaiaraarDb2yqA0eu+Rfqy4Vg+iAIDXLFkEuMWF/J49nu0/5LRimfueI7eoPPWrN5bah83jub/Vl3KqLs3hUlZG53LtMX4/uCym7Ee3I3uHtAqfSHFV3SUnH80cl3ZjzpU1hYeODmVFS/NpOAvn9F9u+OLz97fjOaHc99jruNA0M+tISh0p/Pc7KloJ04G/fy94Zk7npKbdNbPfgR/koiu/Cx1G6NvOM6DGV8P+TOlxMTQPGcsTyz9E/nmrg2ttS4Tt2y8i4J7u+9B1szJw7W4hu1n0KDsDyExwJoVkq0NXZr3PVGnN/N49QQ++J85rdty95ajH6/o5ajBU/LATFIuKOeS9E/5krOQ0eYexqoCDIvdS8q8en76zJWMunFbSLW1p+SBmVx0+TbuTf2IOMVJnd7MY9UT+fD+WdgO7kZGofEFMGUMo/yybK6/u6uLw4L9Cyh/OZfcd8vwnR7UsrSDRs4Yz/5r7PxywfIe9/lu8mrOva2MD64ax9ZDucG7/zad0dbjfda9gTLcVM76d2rY+70JmLbuC8uQXt3i6XiureHZc5aTqvqNb5G3get/+kMsDTre26t4YvS/mGC1kmaqQ9rCM2uhm0Wg5dvGgv0LKPkgl5QdHkZVNOLr4UUV/5EP/XA6c9JvRSrgGaKw/JePkGd2kvfBEnJeUrBvPXzG8y+hGwMOtGkadBdvNmbwixeubf1s5oLd/DT9fUYEuqVeqXPUlYjj4z2t++hud8gH512ZXu7O+5j5jgpswoQamJnviQTVwUx7KZNGHCWcJsOV6eWS+M/INvkrj1fqlLoSsK/ZE5Ut3xakzYIrRXBr/Oe07wldsPtKXK8NJX31CbSyyI5Ze+ZPoXihwp2z/sOXHWW8cDqbh16/hnWLH2k1HgDZJgfXDilmpv0I/yuvCN79FxIV2TrGGGycwsZDQ9ewKG4CJjX0Qz1Vt8zAdOUpfpX/NlOsGg26h9caMvndM3eSvfIgsqmJ/V8voL6g3QsnxEPgvXG0OoHkPT5sawuRXl+PQzVaVTXidAOOvX6T6XQ4uDzzPjQLDP/UjXV3MVp1zRlfP+R+wF6ps6Uhj7x/lQNQuigdk6K1huCV+xpYVjuFVR9PIK9xQ6jldMSsk6qeJk6xU+5r4JXTI3li76wOu9w1dg1XD9nX+jA6hCDO7AqbAa5bPJ3xBUWMtpxCFW0Gwaer6E0Nrfu0D24AEBrYK3Vi/7Wx8ynDgikrk8qZadimVnXpCZ34PJWR20+jHy2LqFeBZ/4Uihcp3DhjHTfE7caMYHtDDnmvNVB7PaS2K09VKDiFjRS1Maj3P36rlZv0m7HHdWyZWs0+Vk9+ut+9yN5IUB1IJTxWrvpcyY/z1nOhrY5qzcfTtZN56j9fouDFY/gqK0FKpCbQpQLoZJnqGDfiGFXfmB6RuqrrCopHR+9j/sSUmUHD+GHUjvKbTKmAO16CgPIZVpKdeQzZbj3jIKiQG2BFCBJMTbhzEjGv2YVrYhxfTdhFiuq/dKlm5dWiCeS91j83kGAhZ4wnMfk0QxQXYKFUs/LMkfMZ+veO40OPLPoKlTOGcFPCZrJN/U2uGjyqvurip8PWMkxVKfE1sLY5hw9rZvHJoZGMZEfrPnNHHCTW1Nx6XINmZXN5Do4TkzGv2RV2Q+fNSuLUFJ0/jnmnw/ZHa3KJPSxQK2rwRbD1LmeMp3ihwpKZn3Bj/FasQmVVcwpv7htP/uGj/PjoQjIdtVyVuIVJFhdOpfcJ2oGSvqqSpD1ONGu78wuBz6FwR/ICkq0NKKL/E2jZ1moujNnPNGvvvblQIGeMJ2F4NZNtxTgVG4VeLy8XTWTEi409ejpkmRRuyVjLD7+aRuy/QihO07DU+fjpifP4Weq2Vj/07MQaTuVlkZ6V2aPxNGVlUjk3i1MXeVh0bvcRm2/Gnc/wk2ceBBUSA2xqkhQ1JlHuayBRtfKlIYX88+bzGVU5ioWjdzHLVolT8XenazUHtZVOUjeHNxS1aKGDW3LXk2XyUqdrbGnOp/ZQIskfdHwLD4ubzsupEzn/vMNkm8JvMOaOOMhM2ymcSgxvNibx0PbLiPk0htQqHYTAlJfDwtG7+GnKpySobeNbbullU7KZm2++iVGVoxD7i8Lm0qfGxlKbbSd1ZGUX/8o/r5rPiM+aQUpMWZlgUpENTWinToVFWwtFCx3cfuGH3BS/s9X4PrxvASkrrMjGJoqXT2TPMNAvVchK/QhniFZN0QoPoBZ29Yy22WzsGzoRzSLOqIt+Ok9n/4yhTMsMc2+S7p+p+iPxpG7uuWXrVGxcaDvBnBEH+5VvfqBIn9/l7c2XL+Se29eTbvIb4MUZG/n5rBTMjVkkbTAjXB58x45jysvx9xqE4NTMoVTPb+bhyW+zeEhVh/Nuc3uo1e28Fj95QEFQITHAcUVedhTm8VryGO6KL2W2DdbOeozZpT/iJucRrMJ/WU3qNEkreMO/JtDcubtZEr+TZDWGT1zwz+LzSfysraYrMf7uvtAlXq+KS5qByI63bmnIw74phtS/rAf8fpbF1w7jWzFrOpSpW/pbu+3LfdSyYYiikrC0hLWxuVSOFyxKP9Rhe6XWSMbHYD54HM/oDCrPseOLgbjDGkPe93cB9cbGkOuDrvf/NwcvRXk9idjlG9CBlCc2IG6dQa3X3sFDo0lK6rw2ILQ9Nt3lIuWJMzOiwmQi/iuT2JCTCxEwwH09UxDwt1Ylimgr1XCUqfT58BWXkPuUi/1LYlHFaeIUC9+OrSR22ss85LyM4px0bJWStNe9FF87DD0wTO2YXMkjgWANt/RyWve0nvf2PTdRVTmEhO0mLCfromMSzvLBVnLFVB5PnsNdM58HIN3kbOcP6I+MK/Y18V7NBdjLIrskxWs1U2hemUra0xtACITFQvOcsehmgbzlFC8WLGdCIJpHB/RIzhq0IARKQjwv3fYHxlnstC/Tbe4MvNLE4iFVreU+59NbcRw/ERYDXD7TyeWXbOS3Q3e2bmvSPfz8xFzqs1TkrDyOXST54Zy3mW4/zN37rqfRNw7FK7H9ZxfS6+n55MFAiA4G4JXqqTR/kEraU+tbPweYcstOfpn+Icmqf+ipTm9mfXMW2w7lkE9laDUOADUrg/ocE7kJZz4ZFGxan6mWMg3gmzqG1OR6YoUbsFKjNYWvTKVEer38ungBw2LquD55I+fb6lkYAwun/pu6yc08WXMerzVezIrbf9fFd71Ga2KTO4GXK6e2bov/jYPkrXvQXa4BRaCGzPIpXh2P20y5r4H0bsZO6/Rm/qfsCg48X0DWE+u7OUP4cCgefA4wDU0FVUVPim3nrA3gN76a1KnSBMcb4yCsfhBdERYLelIsSrswspYy3fFxAYpPsLiHoIJwo0mdoz4fB28ewcVPb+T/pa5rN1xiYcP4V9GW6uzzurlv+kJ8FSdCJ0YI1NQU7Go1KoI6vZmT7iGoHn85CrMFNSkBALtahdruZbvJFctP1n2d/CXRuXJb6aIM5i3e2GGNkGgj95ED/HrYSpLVGNzSy9uN2Tz44dfJD5NvvVZVDRdVcxy4belN/Pqil1nkPIlVmIlT7NyfdJD7f3sQ6Giz2mtt77OssHNQPuwhM8DmNbsYVTmK2aU/6jYSZvbWm4l9Npah7+2IuBP+z1K3cc/t66m+1T+CoyB7dNZesuEuCr57NKLrLfTkVN5SpsMP1lD89cQIKuyIKhTyzTb++NZT5JhMOBRH3weFgoDxvXHtJhY4TuBUHIzffH1rPSRQro8v/TMAeSY1clr/C4h0IM7o+/fy8D3X8/k1a/lF6me97hsqrSEzwPrUcRy+1s4vv/LvLp+1OC47Nh9EC+daDz1gFWbSTWZSA3HgPflkagh0t3rG8d6hoLNTeUuZgs6BW+L55Vf8QQUtDvCJGw+jNYe+rEsemMm8K7Zwd/IntG9FqELp4gTfHlUo5JlURqyoDmnQgBCC0ZYT2IV/yMbjNWFq0lCSkyi/LJtr7/qwNSiifT14tCaXxz64lNHP1Ea8wXAmhPv+94SalAiv2PhR2svEBTxK3LoJUzPoEQrE0U+fRnVDs9a3x8jtietovNLKx6bJZD0cvB57yGa/fEPMqGlNXBHTtTv51XM+59QEMzI9NVSXHxCqUHp1iNdk9CYQsZRZcBypQUhay90/yamStKHC/9LQQ99ud2V6uThuT2vQyJngUCw8NHQN3jgzhDBooH3gw51jP6HxO3Uc+kMSY7+9l9sTdnepB/eWT2LpW19hxKsu5N4jIdMVbCJx/9vjiYXmhdM48PRkDjyWzaPDXybPZMMsIr/2B/iDRmLnVXBlQschpXJfA/nP3sG/TyfQFJhwyzY5uCtlFZdfuZ4DT03hwNJpKEOGDFpDyFrAUhGYzRoOpetSlN9P/YgjlyRxlFyyTWPRdxaGSkaPrF51HjNzRnLxyP3cmLy2g99knd7M3G038cS5L7SuXbvZ7eX3Ry8nflv/Fm4OBVOdRbw5fjxx88Z1WUFM8QoaRidyaoKJi0d8hkOxtK3s1NQcvofPrBOruHp8kS3Yv4Cvp23ncufhDpFm4C/3L2+/maEVTaGZLJQS6XJx+Sd38c8L/8EUq8ZVQ/Ywc+xBXNJEmtpEnNJ1vuKT8pEk75Souw+jh3qCMBSE6f6vXnUef1tQyS0J25nqLGLj9Fwqxjr4x9QXMAuNESZ7yCL+zpS6xdNRFlXyk5HvMt7iodzn45XT41haOBuPy0T+8hr+x3YtD6Q1t9qIiRYr9ySt45I5n1Hhi+dXd19L7vIytLLjA66vofEDzsrkVJ6JnMS22Vi39PLAyancnriOXJODB3Pf5K4LFlNTkkjczlCo6J3hbzTidZpZNXMSW6Zlc0G6v2WjS8Fpn42mzxMoHp3MeZYTIGBdYwFF2zPJXx259XZn2Y9y5Xk7eOvE+aRvaFPxaE0u9lOSuuEmhkw7xZLkT9CkiWJfE/9bsgjp9YZFX+fglu7YW5zOriFZzHUcao0006ROsa+J/ym7AuczcSjFB9E8oTF0ekMjec8I7o77Br8Y9wZzbBopeFjbnM4btdlkW6u5J6Fj0EBDs5W0Wl/Eusr9QR1XQGOmTpbNPzzWUqbhvP/D32jk1XHjmT/kM2bZj5KU/zYuaeYie0tdjbzxFSYTnrnj8Vxbw68K3vJH7Ok+ltWcz3PrLyD3DR3FK5GFhxj54thWG3FkbhI/z3uDaVYnmSYNt6xg77Vr2bR+CqaKk9FlgBsmDKP2fA/3DvPPFtZoTbzdmM2Kl2Yy5objXOUsYZrVxoKsPbw8fA6JKSlhd8Rn427MQG7lWGoOJ7IqpW0dWN0EckoDWeYq1IBLUok7kZhjAq0w+KtW9cb+2lTKUk3EKRrZJifz4z7jzezzYEObgXt811ySayWVE+HKzD1Mtlpo0F181JRP8fKRDG3cERat7R3xezLA4/KOMz6mlCGB0Fiv1DjkdfPoyXkceL6Aoe/tCOm8gPT5MH28DVvadB78xhUsGf4p+5rTeXP3eJyFVhpzNe5Z9NfW/Q94G3GdtqK6o3vkt/xLSYyacJRZjgOAhWbpCfv9Z+Nuao5MZ0X2BGbEHERBxyHcrGxq612eb6sPSnj1QFGcMRy9Qee5c5YzxaphFTbebEziue3TyXtVw/yhf5ElCR1sRDE5LHPObg1wsQozv0j9jDmxMwa1xkbQDbCwWqkaa+Lq8ZtZPKSKGq2JD5oyePDDr1Pwl894/7JzuNhxBKcCEx1H+deYJhouyMP+RpgNcAB9ZyFxOyGu9QsITENT+d3dbwT8axXc0kujz4oSgWULKtelszz1fO5JWkemyYkqdBRFR7O2tSZ8bpXqcYLYMVWMd/jXq63WfaysHEvKExvCNmnU3hG/J1bkvxf4y7/PCa2Zp6pnsfatiWQ9sT5sWmP/tZEq+wx+f+7lWGoV8ta5sW7YRePF42BR236/Lr8U597oyjTSGcVmo26aiz/lvsNkq//F55Iam+qGh/X+AyTuFjwnZ/PMkJndfv6PuU9zoc3VGgocVoRA2Gy8NWtp67MN/gCn2F1WzB92P7kmPz9AatYkNkzKDXqAS/AN8OjhNA3TybZWUac380FTBj9ZfyX5d29CBzYfmsT6lAzmOypYGAOOaS9wW/MN5L8RbCXBY5PbzObybBxV4W8FZT28npfypnDJnM/INGnEK81kJddyfHTbWGXR/GUdjgmrc/sg+dyTxCvbJ5P/i/D7gict20CHXA1DhnQJ+930zrnkvXocXxgSAwwIIfCd3zG4ASIXMNSlTDvx703TGT9sJVY1Aga4L3pYFF/NyqAut2uAiyb1QWdzCboB3vc9B0/P/htz7Torm7o6ro/+QTE/fWwRGdOf4oLQrG8SdG7d+i3Sl1mxfBCZlcVwKZzyxeKWJ5lstfDR2LdgbPe7RsK5fSDU6c1Uahr73LlRkREDIRAOO+4hUaClvwT8msf88TN+lvpJa3BLNAUMnQ10CMTqhqLFGVx5VUdf4ZaECIpPDirbSPhjgF+x8e7wxxlhiv5MGdFCNDiMB5vZW2/G+UIscauPUOAqjLhWU14OxdcOY8Xtv6NzFFRU0k1QSQvREjB0ttA5EKsziYpGomoF/K32YGajCakBPt9Wz1/nPMvLm6a1bvtRmt8XMFrcUdqj2Gz4zh/DiD98Tp4pelpCZ+IwHknn9uIf5rPgx8P429jnW9fO6I7WQJwth9Aqq3rcL5xIRaBb6DZ3XbTRUk8L/vB5wPi2dSVbAkbyn62LioChqEJKtKoa7rrjO3zld6tbk3J2DsTqTHtbFexsNCE1wHGKnTn2JiYNa0tJk6C0+QK+2hDL/VuvJOutKDHGqopniJmHhq6JuhDU9E8bWN1wPmOGTu91P8cJSdqOyHQ7TVv2YnlqPN8a/n18vXRwWjMIRJOBEN0v+yijYN2lLrSrp85O9bTUlYjjuIIsPBwhcdGN9Hqwr9nDS3+5mF3fzuThzLdbM/P01Sg84G3kgbLLOfhsAalrdgUlG03QDXD8Viu/yfkKjrw3mGY1YxXmbgfcn6lP5eGtXyX1HSvOTw9GTVdJqnRYVxdCnzW8P6iFxaSVDUHaeg8EES4Pel19RLr0ustF7JpDxG2LQfbWg6isRqtviFi69P6yYP8CEg7oyNPhWSKzP6ixsXgnjqBkQdd6CqChoPgI/YpyZzF6UxNpK47yWewYLh47HPuQ/hnS5tNWnHusZL9THLQM5EE3wOmrKjkam8NdFyxuDW7ojjd3TWDoRyYS15bii5JuaDSj1ddDGFJ3DxatsgrOsvupJiTQPCIJV5bfaLUEDZW/nEv69hPoUVTuIi6WynPs/HDu210+2+z2svb4cOynIj2i3jOrDo9ifeIOvmyPbO/Hd+w4We848WzplI2kF1S3juVkJb5jwctjGHQDrBUeINtioqakY3BDZ3IOeInZXYqv7FiwJQSVs8UR32AQpCZRk28mJ+s4K5vMVPiSWfHSTHLfHVyYaSjwJzqFu+JLO2w/4G3kgaJrcH+aTOrnNRGf1OyJlBU2Ho69jOPDP2VPbTqmpsj1gnrKRtLrMUHWEJIx4C7BDT0QPdW6DaH5Mze08EDZ5TgLo9sR32CQqApCg6OlydxWcgPCpVLwl8/wRWPosZQIX8c6Cv56euzdHLLfrQx7tOaZ0DkAJv1I9Gb1DgeRTUURbWgaljov9x+b37rp0NMFZL9XHNRuh0F0oRUeILXwAO29QKO1BSlcHhwnZIc6CmdXPe0rWOO/CcMAt0N3uVDW7qCsnaNBEhuisqVu8N+Jr+wYSX8/RtnfO2436unZiZBRPhNtYGBg8EUlShxwDQwMDP77MAywgYGBQYQwDLCBgYFBhDAMsIGBgUGEMAywgYGBQYSISgMshLAKIZYJIY4KIU4LIXYKIb4SaV09IYQYI4T4WAhRJ4Q4JIRY1PdRkUEIkSiEeF0I0Rgo329EWlN3nG11AEAIMUoI4RJCPB9pLT0hhHheCFEuhKgXQhwQQtwSaU09cZY9VwMrVyll1P3gz1fzIJCL/yVxGf6VpXMjra0brSbgAPAD/FGNXwYagfxIa+tB73LgRfyL3l4I1AHjIq3rbK4D7TSvBNYCz0daSy8axwHWwN+jgQpgcqR1daPzbHuuBlSu/TlxMfBDYHfgYX0RsAE3Aus67SuBkYG/nwGWAu8BDcCnQBrwKFAD7AMmnsEX3A18Pdq0AucEjhHttq0EHo5CrTGAp30lBv4J/CbatA6kDkRSJ3Ad8BL+l0afBjgayhQoAMqBa6JNK2fRczXQcpVS9nsI4hrgUiAPOC/wZfp73P8DkgE3sAHYHvj/FeCPLTsKIZYKIZZ2dxIhxFAgH9gT7VpbdsFfgaJNaz7gk1K2XyxgF/63d7Rp7cAZ1IGw6xRCxAI/x99aOxMiUqaBbU34jUo58G60au1EtD5X7bedUbn21wD/WUp5XEpZDbwNTOjnca9LKbdJKV3A64BLSvmclFLD/2aa2LKjlPJOKeWdnU8ghDADLwDPSin3RaHW/cBJ4EdCCLMQ4hJgDtCfFd3DrdUJdF5bsQ4YEoVaWznDOhAJnQ8Dy6SUZf28ViS1Evh/CDALeA2/sYk2rWfTc9W6jTMs1/4a4Ip2fzfR/6RZJ9r93dzN/72eRwih4O8ie4C7+3nNsGqVUnqBhcBXA9e+F39XtD8PY7jLtQGI7bQtlv5lbjxb6kBYdQohJgAXA//Xz+u0JyJlCiCl1KSU64BM4I5+XNN4rkJQroNZjKeRdm8jIUTaIM7VBSGEAJYBQ4EFgRsyUEKqVUq5G//bueX864FnB3i6UGo9AJiEEKOklAcD28bTv6Gd7jhb6kAodc7FP1FY4peLE1CFEGOllJMGcL6Qlmk3mIARAzzWeK56pl/lOhg3tF3AOCHEBCGEDf/kQzB5AhgDXC6lbB7kuUKqVQhxnhDCJoRwCCF+CKTjH9QfCCHTKqVsxN81+rkQIkYIcQHwNfwtzIFwttSBUOr8G/4HbULg50ngHWB+z4f0Ssi0CiFShRDXCSGcQghVCDEfuB74KNq0wtnzXA2mXAdsgAMTOT8HPgQOAusGei4AIcSTQognA3/nALfhr9AVQoiGwM/iaNMa4Fv4B91PAhcB86SUA1ppOgxa7wTsAa3LgTuklANqAZ8tdSCUOqWUTVLKipYf/MM8LinlqYGcO8T3X+LvFpfhn9l/BPielPKtKNQKZ89zNeByNZajNDAwMIgQURkJZ2BgYPDfgGGADQwMDCKEYYANDAwMIoRhgA0MDAwihGGADQwMDCJEvwIx5ilXR5WrxH/0l0VPn50tWs8WnWBoHQzG/Q8+XxStYLSADQwMDCKGYYANDAwMIoRhgA0MDAwixGAW4+n5pFmZeLOS8MZaetzHVt6AKKlAq6kJhQSDEKImJCCz03Cl93eRqY449p9EO16BdA8oqjTstK/PQpeY6z2wcXfENLSgeHXMa3Yhfb6wajEIHsExwEJgystBKgKE4NTMoVRO1rFnNPR4iNyZwLC1dizbNbT6zkvUGvSLTuXeG7L0OLrLFZTLyuw0yi5JQJ9eN6Dj7e8PI3WdBf1IyVlhhBsmDOPYXAVr7mm8XhVZHMPwjZHT0ILbZWFU5SiUpkAZ1jeg19ZFXZkqNhsia1iX7cLlQdbVR/T5V2NjEXGxSFvPjcUWhE9DNjShnRrQMh/dEhQDrNjtFF87DN0CCHBMruSPo99lYUzPBvje3Em8HXM+Ob7hqJsKkV5PMKQMHkVFsdv6tave2BhiMb2gqKgpSf5yt/a9e+4rFpQjJehNTYO+tCvdiT69jj0zXhjQ8RfGXMkJZShDhUAWlyG9vojcf2G1IlTV//KSskcdNaNMfG3uRv6Qvp1KrZH7c+f3a1HaYNJeQwvlvgZml/4Ik8v/8o3fr5OwqzqiZdoZxeGAkdkUXZnY5TN7hSRlewNs/iz8umw2UFW8E0dQeY4dV0rfx5gaIeGgD/sb0WSAhUCJHcJLt/2BcRZ7vw/7Q/p2ci+v4lHrV8g/PQK5a++gpQyagFFrmJ7b965eie0/uyJTyRUVU2oylfOGs+L235Fn7nsoYKx2Jzmvg3KoOGgt4YGy7rzXWGBdwNHEXFJ3xGErb4jI/Rejh+NKd6JZBapbRkxHf9E6Tdmkm5wc/PYTrf8v2L+AoyvblenuQPKQCC241WJ8ixcmsvfWpWhS7/D598vPZ80/p5K2OczChMB3/hg8Q8yUfBV+OOdt7oovbf24vU5VtJX5R80qt6y5ifw3giclJGPA/eWehKOce8XfWJJyAwXfTUSrqo6cGCFQU5KoumQEm377RK+7alJnn9fNfdMX4qs40eu+QSegs3Le8IDO/o3DFt6xlLHcSd4LLvQjxSGV2B/eLXgXCmBlk5nb1txA/pLwa9j3PQdPz17GXLseUR39QXVDpdtJnd5MnNJ9Q6elTFc3K9yy8QYK7k1FSol28lTYjbAwmSA/l0PXxXHw237je8DrQqdtqKysKR7VE+aXgxCoqSmM+eNn/Cz1ExLUjhmOmnQPRT6t9f9hqiRWsaEKBRWJYtFQk4JnqyJqgAFm2Xy8N+sv3PPStXBR5AywKS+H4muHseL239H/DCbh52zRaRBc0pZtp7hiPF+6YSjbp7zY676zbD62zH6c0o0Khe50nps9Fe3EyTAp9eOZO56Sm3TWz34ETdo54HXxg8tuRqmqbX0ZSK+XoY070Hs/VfAIGN8b125igeMETqWj8a3UGnmw4ssUXR7fum3/H9N5dvpTXGALja0avAGWEq2qhrvu+A5f+d1qbk/Y3fqG3ub2cNfeb2B9om38Z8GvVnXYRxUK8QoMi6nj+KDFDAw5Yzz7rrHz8wUv9qs7HynOFp0L9i9g39F0zsk7xluj3u913/Nt9fx21sv8+JmrGHXjtpBrU5MS4RUb6fZ67kt5honWRh6tGcNjH1zK6GdquxiDkgdmMu+KLdyd/AmRfOHpLhexaw5hcg1nePUSjlyyrMd9VaGQoDqIVXSGm8pZ/04Ne783AdPWfWEZfqpbPB3PtTU8e85yUtUYNKmjI1CqavGdOAUtXfwIDI0IIRhtOYFdtE26PVqTy59XzSf7PR1znRflxK7Wz0b9NpFv33Qn35v3HvckHA26rQpKC1h6PdjX7OGlv1zMs8nzkGrg5E0Qf0TD8fHnrfsedSXh7TQWpAAK4b8ZVbfMoPpcScLwam7PW8eimHKg7cbkvX8LlmNmFK/Ale3hqknb+H3ajrDrBPDMn0LxQoU7Z/2ni87+Int3lOg3jv0nsb8/jAtjrmTdea9R7mtgzgs/QnUJkJBwQCe30suplFzGjOqY5Phni5dzRcwJHIpff5xiZ7rtGJNGHO1XZtBBIQTCbOZH2e8z1nKaOMWCVdgpdSXiOK4g9x7pcogr08vFcXvINjk44G3kgbLLOfR0AUlsCLXaLmhV1Tg2K+S6sxlTdCeaXbJu8SOkqjHd7q8KBaew8dDQNcz/SS6Wp8YTu+YQWmVVSHU2JytMTythilWjQffwWkMmv3vmGrLqdoKu9Xl8yJASvf4033jyB2jtHh/7KcmIz5ox7TgImobe7sUgCw/jOD6FMk8CcDTotipoQxB6UxNpK46C1eJ3i6LFbaMRrZ23gFfvvrKEC8Vmo2LJJHQLOOdX8OOcjUy2FZNl8uJQ/Nrq9GbmbruJnJcFjuJKGgoSKEtWcaqRc++pyzMzcdwhbojb3aozUmjHK0hdZ+EkQxlz+lt4XCbyl9cgWtyhTjeC243NaiVhS0et2xbmMt9xDEe7F0iiYuKS5EKeuHMhaU9tD3krLUVt7GC0NBQUH91PqJp1YhUXqlCo0GLYXpLFqPeOEhHPWynRqqqxbfWQV5yIjLExI/Nunpu5jClWDaswd3tYgurgb2Of51vDv0/cthgIsQGWKjhVN9Wam6drJ/PUf75EwYvH8DUPNrXj4NGbm8l58RhSbZtcEy4Pel19F6+musXTcSUpeKecZqLjaEj0BHUM2HcsUoMI/Uc4Yxhx3QEyHbVcnbCZ8RYPTsUGWNCkTrGvif8puwLnM3E4Nh9EZqRSNdbE6HOLuSx2JwNpeQ4WdVwBdfmS2UkHu23tPFqTS6krsXWWPM7UzETH0V7dAAeDdLvRj5Qw1OWhqSQFxSuRhYfQuwsI6DRH+fq+Ccwasp8v26sD5Q52YeEixwE++WYhxScCrbSq6ojN3veESzfjbTJHpJ53DsSQAqQqsO4zUTo1iYmWE9BLD2d1UwHmBgm+8LVASzUrLxdNZMSLjfiKQmPAzhgpe9YiBGpiAs1T/cmMPdfWcH5aCV+K28eF9lLqdJV1rqGsPjSKkQSnJxy+SbhA0EC8+ShqbzUlhCgOB9rIDH6V/QT55hj8gx826vRmjvsk+72pvFdzAQeeL2DoezvQXC5OXj+arIuP8nDuG0y2ht/4mjIzOHpZEuMmH2GW4wDtXwCa1FnrMvHYB5fiOO5vxekWaB6qs2liLgtHvxMyXdLtxld0FEugMvfXVKassPFw7GVQsKLVCKtCYYTZyePZ7/GlG4biLE5DnG4IS0BBua+B4oYkTE3RY+y7C1zoKbhJALnmStQeAnHa15ERu5vR60Ib9KCmpOAdArEmF7Wag9oqJ2m7P0eMGt5xxygJGukQiGE20ZQXx6mb/b7yy875FxOtOlZhpk5XWdWcwsP7FpD6Tj8c7/tJeAxwu6CBb8WswSrC73whTCZEZjpHFsYQ386d0i29rHMl8ETZl9izLwt7mYmsJ9ZDTAxKTAz15zfzp9x3OhhfHxoVmpNwJDStWJDNhK8V8lDGCka0m3jzSo1DXjdLNtxF/jO1yL1HkF4PprwcKuYNo+mc8L8s+kPsvzZSZZ/Bb6++lPj815jdLuYlTrGzfcqLzBl2K459pqA/nEJVkU5HhzG8F0+fw67CHPKOdB1+UGw2hCpRRPfz9EpMW29E+nxIj2fQrfaeAhd6D25SAz9dqddd3LnjVvKfq0fuOYQeYr/1hgvy0MY0MN5Rgip0FIsG+bldvk9ioU78lvKItoyF1Yp3/Agqz/MHYuhmiTfTzZHWAKO2ct3lsfObg5eivJ5E7PLgjf+H3hIGjG/VJSPaBQ2E3zgIq5WGMUkBx/W2B2erW+Wnny/C8WIc+cv98aXCbKF5zlh0syAtpZJY4Qb8bz2/4fPxu+KvoPoGH1XWu2jBlFt28sv0D0lWuxrf7x2+hoLvHu3gk9g4OhXX/Hq2nft619OF8n3RUyh0NwYpadkGyuNm8mbKJGa3i+wKKYqKkpRI1Yw0bO0M6pN7ZpH7ho75w64eGL6pY0hNrm+9/6rQ/U1OIRCWtjoC4ChrRC0uH5R/qGKzdQhcCAa1us6QN4cgyg+ja6Effii9QucfU1/gIrvGNreH3LQqihdmdvk+Yz79Fpa6lNYeVCQQo4dz+Cor987rGIjRHa/VTKF5ZSppT60PqoaQG2DFbqNheu4ZBQ2Ekzt3Lyb2uVic7+70uyAJgZqUwONL/9wuss9vfDWpc8jr5geHr4GLyghpdQ74LNrV6i5DNnW6i0cq5nfRoNhs+BwKVnPXsdgibwOKF9BD4HUZ0Co6GWEpJTS7omKtj4HUw9xHDvDrYStJDoy7q0hUq4ZpWDp6UixPLP0TYyx+X9LR675F+rLhWD4YuAEWmekULUpk723BMb4AeWYnm377BNP120lcKdEqK0M7tu5SOOWLxS1PMtlq4aOxb8HYrrtZzD58MQ5sNlvEIjP9gTh/Y66972fCoXjw2f1DFsGszxEPxIg0n0x5ijtSFrBv6ERSnui9a7HWZWLJhrv8rc5QiurDYbwnKpZMYtziQh7Pfg/oGC11/U9/SPbKg/gqK0OmdbTlBGq77v3HjaP5v1WXMuruTcG9ZoRoCXA4/qlAQZJv7t+aIdHA8l89wuVZ95Hzbzu+4pKQXWf0/Xt5+J7r+fyatfwitec1Hs7kuYsGfpa6jdE3HOfBjK8HtT6H1ACbMjMovyyba+/6sMd9trk93L7nJuJ/40BhZyjldEucYufhzLf56Dv72XJjHgB2tZo8U9cxNQ2B7lbDEjLdncM49FxeJQ/M5KLLt3Fv6kfEKW0tvCJvA9f/9Ickfngk6J4Fis2G7/wxFPzhcxY4umodFruXgkuP8+LG8znWFIdydXNkw827YdmUZ7nTshhH4nRil/e+xFn7AIf2awTkfbCEnJcU7FsPD+rFLMvKyX3Vylj9TgrvCF4rGPwt4Z/c9CI/S7maES/Hh2w5Tf30aVQ3NGsdXeJa6qGlQcd7exVPjP4XydYGNEtkJuQBRj/axEP/vpmfWXpeFt1zp1/rBKuVy2NKsMx7iZ8+c2XQgoZCZoDVcQUcvSyJCV8r5PaE3XRukbVQq9upqhxC8tY9IQ1JlG43zsJK8p+7o4vzeq7JwTeGHOFK50G/dgSOTq3OVxtiuX/rlWS9Fb417FVkhwcdei4vV6aXS+I/I9vUpluTOk1SJWlDBb5TVcF3gldVPEPMPDR0Tbet9ATVwRx7E5MyPqBahwWP3YXm9r/kpo7az5UJWwlXToCe7v8Uq8avznmdvybM5bNLJnc45r7kZxiidHypNOgu3mzM4BcvXNu6bfinbqy7i9GqB7e2te5yoRwpIed1GCPu7PuA3s5llngyPRTNb4uYWxRTzoPpLrxOM917DAeH9E8bWN1wPmOGTm/dpnoge+VBZFMT+79eQH2BFUXIXl3nQo0sPIztoKnXpVxLr/ZrBX99nmkvDWrQUFANcIvjslShMVNn7JQjPJSxokOLrD2b3V5+U3w5cZutIR8Hkj4fsvQ4I5ZbmZF5Nxarj7vHrebqIftIVWNwCluvI4M7mnKwFDpwfro/pMMPwmRCzcqg6BsZJCodr9RTedUtns74giJGW06him6CNJqaQxaBJFW6LGjSHqswY1XNJCg6T01/Bm8gTHKYeposk98NMBxInw95rIK812KovR5S1TZ9c2y1DM95jeOZQzocM8XagFX4Gw6b3V6WnZrNqsP56Mft5P+rvG3Hymq0+oag9C70piaUQ8Xk/WvgHiDSaad2bCxVo7wdtjsUCyazhlRDO/KoFhaTVjakwxq7QtP9w19SIjWBLiOfjEd6PX2uZhi3ycavMr+KLe8NplnNOIQgzuyKLgMsTCY8c8e3Oi47VTdZtmpmOQ50cJ1qz2a3l/89sojy/2SRvboytGOqAXSXC7HnILlPjUc3W3lk0Vf4z5ix5Dr9kUFmoZFtreaehK4zsw2aFXMDIQ/jFCYT7pwkrrx6LYlqR3/DdY0FFG3PZNQn1YikxA4O47dnrGaY2jZs0hJU8r8li5Dejg9iJFCFEnA5a7nT/RvXDiZ6UxPq4WP8+OhCMh21XJW4hUkWF07FxhgLjOlSC9t6besaC/hw/XhyVngx1zeiHSoKnU6XCwZw/pZgjYZsO5XnCb533qoQqOsbrb4eomDiNRikr6rkwPAs1g8dxTRrcdDPP2gDrDgckJ9LyU06z56zvFNIZM/uZm/VTeLw9iyGr21CKzwwWBn9Rvp8mD72j9/kySmU5g6nyOE3ZLoZGnM17ln01w7HhM1ZP+AqVTvSGpjA6NhRLHEn4igXiFPVNE0f0Y3DuL812eKm9ujJeRQvH8nQxhCtXyElqluyullhls3XZbgkGpGNTRQvn8hhi2DLpdnckLOR4Za2lcJUoaMiu3yfEncizmKlW3e1SNBbsEbqyEquTD/UxbXqgLcR12krqjts64+d9WiFB3Acm0mpKxEoDvr5B2WAW4IbDl4Xx/rZLeNq/XsImzUzigZSEajtHNrDmWXCvHIrye3+V2JiaLx4HCzquF9vzvrBRI110jwmnepZXbufbunFo5vwOaBpcg4l12jdOoy3uMo9durLrH1rIllPrA/Z2Lr0+rCVN3DLxhvYMvvxXociogXd5Wqdda9qmMHvz70cfUg7tz0BqlXr8H3c0kujz4oSrgUg2mVl0ZtdoGv+Z81kAlVFWCzIrKFnFKxRpzfzQNnlOAstWE6Gp8fZF2ahoZv8fvfRkL2jO4TVim4GkxKap2hwBriH4Ib+0JIR4/+GXEpm4jggkGXio91BiSgaEIrS7aRAb876QSU5kfILrByZ13UGfKtb5WB9Cq5UjZJrNY7Me6rbUxT7mnj05Dy/8f1FcJ3GOyO9HuTufRTcm0rpRgWnomEW3UdkRSNJyzaQ1HmjEJiGpnJ8oyAh8FU2uc1sLs/GURX6lqMwmVCSEluzsjjXF6HX1KBmZeDOScITZ0KzKlSPVvoVrNGS3eGx6okcfLaA7HeKo2bNlmxrNU0ZOmJsGDPitJ9w64eNEaOH0zRMJ9vqH3rUocOi8oMl4hkx7ln0V1hE64r5P7jiZtRTtf7ViZqbw2eIhUA47LiHRKcBuXP3Ypr2xRM/trrHBbnr9Gb+p+wKDjxf4A+nDhNSSgrd6UA5Waq7NYPAF4Vbt36L9GVWLB+ELhOnMJlQnDGQ7I/Wa8nKMv1Ht5O0oYKibwzjyqt6963tTPvsDh/dN4vUT3bhC0JOwGBxT8JRCi5fxm2x4clEIswW1KSE1v/1+tN92pj2wRqa1KnSBMcb4yBI03BRE4ihCoV8s40/vvUUOoJrn7yXnH+XhdRpvD3RnmnikylP4Z6sYxUKPbn0zd56M7HPxjL0vTBmGQikvHlu9lSEEB0yCBj0H8/c8Ry9QWfFrMcDodL+Orj8V4/gkgrxih6YlO2fA1nn7A7Wql0hXwcimlFiYmieM5bHl/65dduZ2phQBGINqpmiN7twbixm+n23U+Qd/NKHqlAYY3Ew2uwfd0EJXytKKgLdQreZJpZNeZbqOxqpv356N0eGhzjFTqoa02M+sLwPlpDwVydDPjkY/tBOKdFOnMR34iSjfuvm22/dyWM1OeHVcJYjTQpWu5d8s61DHcwzO8k320g3OXtc77c9j9bkMvy127jq9u+x//tj8AXuSzSNsY5+tIklb9/KYzU5rRlRDj4zue8DB4luFow2W1t/fnLTi4x+tYzDL0zs8GyrSYmoq4aRudHJP2b7s6ZAaAKxBtcC1jW0U1UkfiC5PPM+fnLTiyyKKed/T0znte2TsZX2XGHsU6p4YMw73U4YqEIJWvaGfiNEj07hU6wa09JL2JI8nthQaqisJv3TRPJyl3RwoO+LvPdv8S8ev3XwwQCDQsouGQQM+o8QXQNvgD6Hc9pnbuktu0O00L6OxCl25tnL0We8xk+WXs3o+/einw5dbpT2ZbkoppwFjlLmxO7jH5mzWwNxVKvGu8MfJ1GBIYGsKaEKxBr8EISuoVVWkvOigwdSr+ah9GZMe2LI2+zBdqS8x8OqitL4wZRv8tOM7lvOiXs15OnweUT0hlWYiTG5W1MthQqtvgH73nKS1uXA/L7375C5Y2txVCxiLr0eEgt9vPrhDN7OO2dA58gua0R2t7h7OJAS6XJx+Sd3YbV7EUJi2TQEW3l1SId1bOUNKBsSGCu/ecbHtmRuwevrMbtDNNFSR17aOZlxM8q4ynkch+LG1BC6B0z6fDiONTF2vb98/z7pOaZYBQmKgy/bqxnVLhDHLDRGmOytxvqZ+lQe3vpVUt+x4vz0YFA9SIIzBhxYZX7kS3F4nWasJ6sRJeVotXU9HpLs9hJ3KAlvbPeDhY795VGxitarDbHsaMrh3YNjSTwV4pFVXUOvqib+cBrfK5/S+65ScNI9pDVzh1ZdE3Hj24Jz13GGV/V8b/tCKTmC5olcl1lvaCTvGYFu9gfC2Mr99TmUiJIKMldKXJ+d+fyDfevhqLr//cG56zhD4zP5GV9j7dh9bCrPJvt9DzJEaYukx4NadJyhf/cvDH/3nd9gSlopMabuIw5fa/f3m7smMPQjE4lrS/EFORAruJNwG3djhn61FHylZYjSsh5DNcLd/hE+DVMjrGzqOGxy/9YrsRQ6SCzSSfi8LuSTW9Lnw1pcyap/TutjR1A9sjVzRzTR173ti0j7qLYP1oH+1efBotXUQE0NlgGskRPp8hoIvtIyktaArSadrXnjianSMX28MXSpeQP59FqWC7UlTWdLUmK/erU5B7zE7C7FV3Ys6LKixgsi0siGJhIO+rhtzQ0dtme9pfjXf6isCsuDKH0+fMUlpD3av5lZI6bJ4GzFV1qGtbSM1AhcO/ZfG89oPidUDULDAAfQTp3C/sYp8t/o5rOwqzEwMPhv4IvjLW9gYGBwliHCkVjSwMDAwKArRgvYwMDAIEIYBtjAwMAgQhgG2MDAwCBCGAbYwMDAIEIYBtjAwMAgQhgG2MDAwCBC/P/l7EYBDESDNQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "count = 1\n", - "for i in images:\n", - " plt.subplot(4, 8, count)\n", - " plt.imshow(np.squeeze(i))\n", - " plt.title('num:%s'%labels[count-1])\n", - " plt.xticks([])\n", - " count += 1\n", - " plt.axis(\"off\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "voluntary-hospital", - "metadata": {}, - "source": [ - "Through the above query operation, you can see the transformed images. The dataset is divided into 1875 groups of data. Each group of data contains 32 images. The resolution of each image is 32 x 32. After all the data is prepared, you can proceed to the next step of data training.\n", - "\n", - "## Defining the Network\n", - "\n", - "The LeNet network is relatively simple. In addition to the input layer, the LeNet network has seven layers, including two convolutional layers, two down-sample layers (pooling layers), and three full connection layers. Each layer contains different numbers of training parameters, as shown in the following figure:\n", - "\n", - "> For details about the LeNet network, visit .\n", - "\n", - "You can initialize the full connection layers and convolutional layers by `Normal`.\n", - "\n", - "MindSpore supports multiple parameter initialization methods, such as `TruncatedNormal`, `Normal`, and `Uniform`, default value is `Normal`. For details, see the description of the `mindspore.common.initializer` module in the MindSpore API.\n", - "\n", - "To use MindSpore for neural network definition, inherit `mindspore.nn.Cell`. `Cell` is the base class of all neural networks (such as `Conv2d`).\n", - "\n", - "Define each layer of a neural network in the `__init__` method in advance, and then define the `construct` method to complete the forward construction of the neural network. According to the structure of the LeNet network, define the network layers as follows:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "nervous-athletics", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "layer conv1: Conv2d\n", - "****************************************\n", - "layer fc1: Dense\n" - ] - } - ], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " # define the operator required\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " # use the preceding operators to construct networks\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x)\n", - " return x\n", - "\n", - "network = LeNet5()\n", - "print(\"layer conv1:\", network.conv1)\n", - "print(\"*\"*40)\n", - "print(\"layer fc1:\", network.fc1)" - ] - }, - { - "cell_type": "markdown", - "id": "coordinate-hometown", - "metadata": {}, - "source": [ - "After the construction is completed, you can use `print(LeNet5())` to print out all the parameters of each layer in the neural network, or use `LeNet().{layer name}` to print the corresponding parameter information. This example chooses to print the corresponding parameters of the first convolutional layer and the first fully connected layer.\n", - "\n", - "## Custom Callback Function to Collect the Model Loss Value and Precision Value\n", - "\n", - "Customize a data collection callback class `StepLossAccInfo`, it is used to collect two types of information:\n", - "\n", - "1. Information about the relationship between `step` and `loss` values during training;\n", - "\n", - "2. Information of each 125 training `step` and corresponding model accuracy value `accuracy`.\n", - "\n", - "This class inherits the `Callback` class. You can customize operations during training. After the training is completed, the data can be drawn into a graph to view the changes in `step` and `loss`, as well as the `step` and `accuracy` changes.\n", - "\n", - "The following code will be used as a callback function to be called in the model training function `model.train`. The following visualizes the information collected during the model verification stage." - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "extraordinary-twist", - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.train.callback import Callback\n", - "\n", - "# custom callback function\n", - "class StepLossAccInfo(Callback):\n", - " def __init__(self, model, eval_dataset, steps_loss, steps_eval):\n", - " self.model = model\n", - " self.eval_dataset = eval_dataset\n", - " self.steps_loss = steps_loss\n", - " self.steps_eval = steps_eval\n", - "\n", - " def step_end(self, run_context):\n", - " cb_params = run_context.original_args()\n", - " cur_epoch = cb_params.cur_epoch_num\n", - " cur_step = (cur_epoch-1)*1875 + cb_params.cur_step_num\n", - " self.steps_loss[\"loss_value\"].append(str(cb_params.net_outputs))\n", - " self.steps_loss[\"step\"].append(str(cur_step))\n", - " if cur_step % 125 == 0:\n", - " acc = self.model.eval(self.eval_dataset, dataset_sink_mode=False)\n", - " self.steps_eval[\"step\"].append(cur_step)\n", - " self.steps_eval[\"acc\"].append(acc[\"Accuracy\"])" - ] - }, - { - "cell_type": "markdown", - "id": "conventional-clarity", - "metadata": {}, - "source": [ - "In the preceding information:\n", - "\n", - "- `model`: computational graph model.\n", - "- `eval_dataset`: validation dataset.\n", - "- `steps_loss`: Collect the relationship between step and loss value, the data format is `{\"step\": [], \"loss_value\": []}`.\n", - "- `steps_eval`: Collect information about the model accuracy value `accuracy` corresponding to step, the data format is `{\"step\": [], \"acc\": []}`.\n", - "\n", - "## Defining the Loss Function and Optimizer\n", - "\n", - "Before definition, this section briefly describes concepts of loss function and optimizer.\n", - "\n", - "- Loss function: It is also called objective function and is used to measure the difference between a predicted value and an actual value. Deep learning reduces the value of the loss function by continuous iteration. Defining a good loss function can effectively improve the model performance.\n", - "\n", - "- Optimizer: It is used to minimize the loss function, improving the model during training.\n", - "\n", - "After the loss function is defined, the weight-related gradient of the loss function can be obtained. The gradient is used to indicate the weight optimization direction for the optimizer, improving model performance.\n", - "\n", - "Loss functions supported by MindSpore include `SoftmaxCrossEntropyWithLogits`, `L1Loss`, `MSELoss`. The loss function `SoftmaxCrossEntropyWithLogits` is used in this example.\n", - "\n", - "The optimizers supported by MindSpore include `Adam`, `AdamWeightDecay`, `Momentum`, etc. The popular `Momentum` optimizer is used here." - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "corporate-gasoline", - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.nn import SoftmaxCrossEntropyWithLogits\n", - "\n", - "lr = 0.01\n", - "momentum = 0.9\n", - "\n", - "# create the network\n", - "network = LeNet5()\n", - "\n", - "# define the optimizer\n", - "net_opt = nn.Momentum(network.trainable_params(), lr, momentum)\n", - "\n", - "# define the loss function\n", - "net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')" - ] - }, - { - "cell_type": "markdown", - "id": "metric-detail", - "metadata": {}, - "source": [ - "## Training the Network\n", - "\n", - "After completing the construction of the neural network, you can start network training. The network training can be conveniently performed through the `Model.train` interface provided by MindSpore. The parameters mainly include:\n", - "\n", - "1. `epoch_size`: specifies the number of batches of images that need to be traversed by each epoch.\n", - "2. `ds_train`: specifies the training dataset.\n", - "3. MindSpore-provided callback mechanism. The callback function `callbacks` contains `ModelCheckpoint`, `LossMonitor`, and `Callback` arguments, where `ModelCheckpoint` is used to save network models and parameters for performing fine-tuning.\n", - "4. `dataset_sink_mode`: specifies the dataset sink mode. The default value `True` needs to be set to `False` because this mode does not support the CPU computing platform." - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "tropical-prairie", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 125, loss is 0.13396409\n", - "epoch: 1 step: 250, loss is 0.1545082\n", - "epoch: 1 step: 375, loss is 0.12724978\n", - "epoch: 1 step: 500, loss is 0.034271903\n", - "epoch: 1 step: 625, loss is 0.13005787\n", - "epoch: 1 step: 750, loss is 0.010596659\n", - "epoch: 1 step: 875, loss is 0.008820764\n", - "epoch: 1 step: 1000, loss is 0.09243655\n", - "epoch: 1 step: 1125, loss is 0.054233808\n", - "epoch: 1 step: 1250, loss is 0.074425355\n", - "epoch: 1 step: 1375, loss is 0.005053058\n", - "epoch: 1 step: 1500, loss is 0.13170624\n", - "epoch: 1 step: 1625, loss is 0.028616104\n", - "epoch: 1 step: 1750, loss is 0.12095115\n", - "epoch: 1 step: 1875, loss is 0.013343395\n" - ] - } - ], - "source": [ - "import os\n", - "from mindspore import Tensor, Model\n", - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor\n", - "from mindspore.nn import Accuracy\n", - "\n", - "epoch_size = 1\n", - "mnist_path = \"./datasets/MNIST_Data\"\n", - "model_path = \"./models/ckpt/mindspore_quick_start/\"\n", - "\n", - "repeat_size = 1\n", - "ds_train = create_dataset(os.path.join(mnist_path, \"train\"), 32, repeat_size)\n", - "eval_dataset = create_dataset(os.path.join(mnist_path, \"test\"), 32)\n", - "\n", - "# clean up old run files before in Linux\n", - "os.system('rm -f {0}*.ckpt {0}*.meta {0}*.pb'.format(model_path))\n", - "\n", - "# define the model\n", - "model = Model(network, net_loss, net_opt, metrics={\"Accuracy\": Accuracy()} )\n", - "\n", - "# save the network model and parameters for subsequence fine-tuning\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=375, keep_checkpoint_max=16)\n", - "# group layers into an object with training and evaluation features\n", - "ckpoint_cb = ModelCheckpoint(prefix=\"checkpoint_lenet\", directory=model_path, config=config_ck)\n", - "\n", - "steps_loss = {\"step\": [], \"loss_value\": []}\n", - "steps_eval = {\"step\": [], \"acc\": []}\n", - "# collect the steps,loss and accuracy information\n", - "step_loss_acc_info = StepLossAccInfo(model , eval_dataset, steps_loss, steps_eval)\n", - "\n", - "model.train(epoch_size, ds_train, callbacks=[ckpoint_cb, LossMonitor(125), step_loss_acc_info], dataset_sink_mode=False)" - ] - }, - { - "cell_type": "markdown", - "id": "hollywood-scanner", - "metadata": {}, - "source": [ - "After training, multiple model files will be generated and saved under the pre-set directory." - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "accepted-retirement", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./models/ckpt/mindspore_quick_start/\n", - "├── checkpoint_lenet-1_1125.ckpt\n", - "├── checkpoint_lenet-1_1500.ckpt\n", - "├── checkpoint_lenet-1_1875.ckpt\n", - "├── checkpoint_lenet-1_375.ckpt\n", - "├── checkpoint_lenet-1_750.ckpt\n", - "└── checkpoint_lenet-graph.meta\n", - "\n", - "0 directories, 6 files\n" - ] - } - ], - "source": [ - "!tree $model_path" - ] - }, - { - "cell_type": "markdown", - "id": "tender-bradford", - "metadata": {}, - "source": [ - "The meaning of the file name: `{Customized name configured in ModelCheckpoint}-{The number of epoch}-{The number of step}`.\n", - "\n", - "> To use free control loop iterations, traversing data sets, etc., you can refer to the \"Customizing a Training Cycle\" part of the official website programming guide \"[Training](https://www.mindspore.cn/doc/programming_guide/en/master/train.html#customizing-a-training-cycle)\".\n", - "\n", - "### Checking the Loss Value of the Model with the Change of Training Steps" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "handmade-halifax", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAA7EUlEQVR4nO2de/wdw/n434+QIG4hUUQiSrTiTkp8XUpLBRVaVPBtUfdKVZXWrYpqq7RuxRdtUSXu6pe2CKlrRJBEIhK3JBK5kYiQq9w8vz9mT89+zmfPObvn7J7dc/Z5v1772t3Z2ZlnZ2fnmWdmdkZUFcMwDCO/rJa2AIZhGEa6mCIwDMPIOaYIDMMwco4pAsMwjJxjisAwDCPnmCIwDMPIOaYIWhwRuUxE7klbjloRkRNFZHjacvgRkStF5GMR+TCFuFVEtg7hbz8RmVFvOGkiIr08OVdPW5ZWxxRBCyAix4nIKBFZJCKzReQJEdk7bbmySLUCMsT9PYGfAX1UdZP4JDOM9DBF0OSIyLnA9cBvgS8BPYFbgMNTFCuTxFSz7AnMU9U5MYRlGJnAFEETIyLrA1cAZ6nqo6q6WFVXqOo/VfV8n9eOInK3iCwUkQki0tcXxgUiMtm7NlFEvuO7dqKIDBeRP4jIfBF5X0QO9l3fUkRe8O4dJiI3+5uhRKSfiIwQkU9FZJyI7FfhWXqIyKMiMldE5onITSXXy8lwkoi85ckwRURO913bT0RmiMgvvGac+4AngM0862mRiGwWlK5ees0VkWkicomIrCYiBwBP++6/K+DeQpw/F5E5noV2hIgcIiLvisgnInKRz38nEbleRGZ52/Ui0sl3/XwvjFki8sOSuDp56fKBiHwkIreKyFrl0rhC2gc+r3dtaxF5XkQ+85rDHvDcRUSu855xgYiMF5HtA8I+RkRGlbj9VESGeMeHisjrXhjTReSyCnJO9d5B4bxNs2eU/GaUoKq2NekG9AdWAqtX8HMZ8DlwCNAB+B0w0nf9aGAzXKXgGGAxsKl37URgBXCqd++ZwCxAvOsvA38AOgJ7AwuAe7xr3YF5XryrAQd6590CZOwAjAOuAzoDawJ7h5ThUGArQICvA0uAXb1r+3np83ugE7CW5zajSrreDfw/YF2gF/AucLIvzLL3++K8FFjDk3suMNgLbztgKbCl5/8KYCSwMdANGAH82vd+PwK299JlMKDA1t7164AhwIZe2P8EfhdSTn84lZ73PuBi7x3638tBwGhgAy/tt8XLNyXxrA0sBHr73F4DBvrk3MELf0fveY/wrvXy5FzdO58KHFCStyPnN9sC8kPaAthWx8uD44EPq/i5DBjmO+8DLK3gfyxwuHd8IjDJd21t78PcBNdEshJY23f9Ht+H+Qvg7yVhDwVOCIhzT1xh2U6hVZKhjPyPAT/xjvcDlgNr+q5XKyA7ePf08bmdDjwX8v79cAV9B+98XU/ePXx+RvsKu8nAIb5rBwFTveM7gKt817bxwtoaV/guBrYqScf3Q8pZCKfa894N3A5sXnL/N3AKox+wWpU8eA9wqXfcG6cY1i7j93rgOu+4F+EVQej8Zlv7zZqGmpt5QFep3vbtH92yBFizcI+I/EBExnrm9Ke42mfXoHtVdYl3uA7OivjE5wYw3Xe8BXB0IVwv7L2BTQPk6wFMU9WV1eQvkQEROVhERnpNLp/iaoR++eeq6udlwg2iK64mP83nNg1X4wzLPFVd5R0v9fYf+a4vLciPS8fSuDbzXZtecq1AN5xSHO1L3yc99yhUe96f45TOq+KaFX8IoKrPADcBNwNzROR2EVmvTByDgWO94+OAxwrvUUT2EJFnvWapz4AzaPv+whIlvxklmCJobl4GlgFH1HKziGwB/BkYBGykqhsAb+I+/GrMBjYUkbV9bj18x9NxNbQNfFtnVb0qIKzpQM8QCq1U/k7AI7jmqS958j9eIn/p9LrVptv9GNcUtYXPrScwM4psEZgVENcs73g2bdO0p+/4Y5xC2c6Xvuur6jpEo+LzquqHqnqqqm6GsxRuEW/YqareqKq74azMbYDzCeZpoJuI7IxTCIN91wbjmrd6qOr6wK2Uz3+LccqvgH/UVpT8ZpRgiqCJUdXPcG3RN3sdkmuLyBpeLfnqEEF0xhWMc8F1vOIsgjBxTwNGAZeJSEcR2RM4zOflHuAwETlIRDqIyJpeR+rmAcG9iiv0rhKRzp7fvUKI0RHX9j8XWCmuE/lbVe75CNhIXEd70HOtAh4EfiMi63rK8lzveZLgPuASEekmIl1x77MQ14PAiSLSx1O4v/LJ+QVOiV8nIhsDiEh3ETkoSuTVnldEjva9s/m4/PKFiHzNq82vgSugPwe+KBPHCuAh4Bpcf8bTvsvr4izLz0Vkd5zFUI6xwEAvj/cFjvJdi5LfjBJMETQ5qvpH3Id7Ca5AnI6r4T8W4t6JwB9xlsVHuE67lyJEfzyuXXoecCXwAM5CQVWn44awXuST63wC8pxXGB2Ga7P+AJiB67iuJv9C4GxcQTYfV4gMqXLP27jCd4rXhNBu1BDwY1zhNgUYjqu13lFNnhq5EqdQ3wDGA2M8N1T1CVyb+TPAJG/v5xee+0gRWQAMA75SgwyVnvdrwCsisgiXtj9R1SnAejhFNB/XlDQPV9CXYzBwAPBQSRPgj4ArRGQhTgk+WCGMX+IGBswHLsdnWUTJb0Z7CiMvDKNuvKGFb6vqr6p6NgwjM5i2NGrGax7YStwY+/64GtljKYtlGEZEbA4Pox42AR4FNsI155ypqq+nK5JhGFGxpiHDMIycY01DhmEYOacpm4a6du2qvXr1SlsMwzCMpmL06NEfq2q7nw6bUhH06tWLUaNGVfdoGIZh/BcRmRbkbk1DhmEYOccUgWEYRs4xRWAYhpFzTBEYhmHkHFMEhmEYOccUgWEYRs4xRWAYhpFzTBEYRhDjx8OIEWlLYRgNoSl/KDOMxNlxR7e3ubiMHGAWgWEYRs4xRWAYhpFzTBEYhmHkHFMEhlEPkyfD8OFpS2EYdZG4IhCR/iLyjohMEpELyvj5nohMFJEJIjI4yI9hZJKtt4Z99klbCsOoi0RHDYlIB+Bm4EDcUoavicgQVZ3o89MbuBDYS1Xni8jGScpkGIZhtCVpi2B3YJKqTlHV5cD9uAXO/ZwK3Kyq8wFUdU7CMhmGYRg+klYE3YHpvvMZnpufbYBtROQlERkpIv2DAhKR00RklIiMmjt3bkLiGoZh5I8sdBavDvQG9gOOBf4sIhuUelLV21W1r6r27dat3UprhmEYRo0krQhmAj1855t7bn5mAENUdYWqvg+8i1MMhmEYRgNIWhG8BvQWkS1FpCMwEBhS4ucxnDWAiHTFNRVNSVguwzAMwyNRRaCqK4FBwFDgLeBBVZ0gIleIyADP21BgnohMBJ4FzlfVeUnKZRiGYRQRbcJJtfr27aujRo1KWwyjlRFx+2rfR1h/hpEBRGS0qvYtdc9CZ7FhGIaRIqYIDMMwco4pAsMwjJxjisAwDCPnmCIwDMPIOaYIDMMwco4pAsMwjJxjisAwDCPnmCIwDMPIOaYIDMMwco4pAsMwjJxjisAwDCPnmCIwDMPIOaYIDKMVePddNxPq44+nLYnRhJgiMIxW4OWX3f6BB9KVw2hKTBEYhmHkHFMEhtEK2MI4Rh2YIjAMw8g5pggMoxUoLJlpGDVgisAwDCPnmCIwjFbA+giMOjBFYBiGkXMSVwQi0l9E3hGRSSJyQcD1E0VkroiM9bZTkpbJMFoO6yMw6mD1JAMXkQ7AzcCBwAzgNREZoqoTS7w+oKqDkpTFMFoaaxoy6iBpi2B3YJKqTlHV5cD9wOEJx2kY+cUsA6MGklYE3YHpvvMZnlspR4rIGyLysIj0CApIRE4TkVEiMmru3LlJyGoYzY9ZBkYNZKGz+J9AL1XdEXga+FuQJ1W9XVX7qmrfbt26NVRAwzCMViZpRTAT8NfwN/fc/ouqzlPVZd7pX4DdEpbJMFoXaxoyaiBpRfAa0FtEthSRjsBAYIjfg4hs6jsdALyVsEyGYRiGj0RHDanqShEZBAwFOgB3qOoEEbkCGKWqQ4CzRWQAsBL4BDgxSZkMo6WxPgKjBhJVBACq+jjweInbpb7jC4ELk5bDMAzDCCYLncWGYcSF9REYNWCKwDAMI+eYIjAMw8g5pgiSRAROOiltKQzDMCpiiiBp7rorbQkMwzAqYorAMFoBGzZq1IEpgjQZNw4mT05bCsMwck7i/xFkkjffhA4dYNtt05Vj553d3mpzRr3YsFGjDvKpCHbYwe2tADYMw7CmIcNoCaxSY9SBKYI0eP11sDUVDMPICPlsGkqbXXeFTTZp67ZyJSxfDmuvnY5MRnNjfQRGHZhFkBYfftj2fMAA6Nw5HVmakZUrYcWKtKXIDtY0ZNSBKYKkiFpDe+KJZORoVXr0MMUZhFkGRg1Y05DRnJRaVIbDLAOjBswiMOLn1VdhxIi0pTAMIyRmERjxs8cebm+108ZjTUNGDZhFYBiGkXNMERjw9ttu3iOj+TErzKgBaxrKAh99lG78hTmXrBAxjFxiFkHcTJsGZ5wR7Z7Sn8sMo1asj8CoAbMI4uaEE+D559OWwjCMZuHTT2G99WC19OrlZhHEjTWvGK3EM8/Af/6TthSty/z50KULXHJJqmIkrghEpL+IvCMik0Tkggr+jhQRFZG+SctkGEZIvvlNOOCAtKVoXT7+2O0feihVMRJVBCLSAbgZOBjoAxwrIn0C/K0L/AR4JUl5DKMlePNNmDUrbSmMuHn8cfjgg1SiTtoi2B2YpKpTVHU5cD9weIC/XwO/Bz5PWJ7ksaYhI2l22AG6d2/rZvmu+Tn0UNhxx1SiTloRdAem+85neG7/RUR2BXqo6r8rBSQip4nIKBEZNdfm8k+PESNg3ry0pYiH3/8exo9PWwrDKPLZZ6lEm2pnsYisBlwL/KyaX1W9XVX7qmrfbt26JS9clhk3DoYPTyfuvfaCb3wjnbjj5oILoG+LdEk187BRVbj4Ypg6NW1JGk9GLLmkFcFMoIfvfHPPrcC6wPbAcyIyFegHDLEO4yrsvDPss09j4ho/Ho46qu3c/2+80d7frbe6P5SbjeXL05YgHjJSoNTExInw29/CkUemLUluSfo/gteA3iKyJU4BDASOK1xU1c+AroVzEXkOOE9VRyUslxGWH/wAxo51HZS77FLe35lnwpprwtKlDROtLpq54KxEM1oGX3zh9suWpStHHCxfDh06uC0MGXlfoS0CEVlbRH4pIn/2znuLyLcr3aOqK4FBwFDgLeBBVZ0gIleIyIB6BG9K/v3vVH8aqYswBefnMfT1q8Ldd8PixfWHlUeaUcFlpDCMhU6doH//8P4z8r6ilEp3AsuAPb3zmcCV1W5S1cdVdRtV3UpVf+O5XaqqQwL87pdpa2Dx4tpqvIUaz2WXZebFh6bRH+mLL7q/s3/yk2Tjifs93H57vOGVMnIk3Hdf+eutUJim8W189BHMnFndXxhe8Ua/DxsWT3gNJIoi2EpVrwZWAKjqEqAFcl8E1lkHNt88+n2//W38sjQa1cZ8qAsWuP3s2cnHFSenn55s+HvuCccdV/56s1Uw/BSUWBrPsMkmtX3TpTz1FPTrF/2+jCjwKIpguYisBSiAiGyFsxDyxSefRL/nmWfil6NRpPmRJkmrPU+BjBQskWiFPPb++7Xdl5FnjtJZ/CvgSaCHiNwL7AWcmIRQTU1GXmxs+AuWRjxbMxZkcfHMM66Nea+9ag+jGfNfnt95RgitCFT1aREZgxviKcBPVPXjxCQzskWjCpg0CrI//Qn69HHz6qRJIf5mLMzjIK/PnQFCKwIR2dc7XOjt+4gIqvpC/GK1GIUMHqbmM3hw5bbgRtNoiyAo3iTwP8vZZ7d3a1aasXbdjDLHTcppEKVp6Hzf8Zq4eYRGAy3ym2lGOP74bCkCw2gUraCIayXlZ4/SNHSY/1xEegDXxy1QS9IKGbwVnsFPqz1PM2MWQerU83fTDGDbuARpGFHn6BkypHUmWauFRjcNWQGdX/L87lNWhlH+LP6TiNzobTcBLwJjkhMtIZ5+unh8+umw007l/c6fD4cfDocdVt5PKZUyczPXfMo918SJyczX08g+grzw+efup8asTeXQzN9FixClj8D/x+9K4D5VfSlmeRpLtb9BCwXc5Mn1xdPMhU4li2D6dNhuOzjrrPrjueEGePRROO+8+sMygrn2Wrj8cujcGc4/v7r/RtPM30m9pPzsoS0CVf2bb7u36ZVAUiRZu/nd79KrPQVl1MIye+Wa25Yvh7//PVwmP+cceKGBA9DyWOgUpkfJ2sSArfBDWZNbNVUVgYiMF5E3ArbxIhIwH3HOSTIzX1l1aqfqPPYYnHtueP9hPtJy1379azd76SOPhI/PSI6sFVY//rGTKWty1UK9330TjBqqOMOoEYFXX01bAvjOd9z+2muj31sus5Zz//BDt58/P3pcRnJkpeZ9001tz9OWa8kSmDIFtt++8XFnvWlIVadV2hohZNNT70t+5RWXSbNKM9bo0i504ibM82S1CaY0/6xaBVddBYsWNVaOY49160HXMgV6M34DPqKMGuonIq+JyCIRWS4iq0RkQZLCpU7hg5kzB9Zaq/5wamHWLDer4Smn1B5GPYQZPhpnwZJmIdWoVd+MYArv/qGH4MIL3dZICn1UcY6C++ILuPHG6hW5rFsEPm4CjgXeA9YCTgFuTkKoTBLHoiu1sNCb0WNUyss0NDqjpjF8NKjTe86cZOUAt2B5vc/bzDXSUtkLndmNtgiS4KGH3Noav/xlZX9NpAhQ1UlAB1Vdpap3AhGW4mlCmvnjigt/c0IjLIKs8aUvJR/HrFn1h9HMTUMFsipXPXz2mdsvKNN4kpFnjqIIlohIR2CsiFwtIj+NeH8+yMiLjY1WVYat9p4KVHpfWXiXxxwD997b1i0LciXFqlVuH3YN45SIUpB/3/M/CFgM9ACOTEKolqOeQicrH0ktw0fDXq/Vr9GeMOmXZho/+CD87/8GX2vFd19QBKuXGaBZ+L4Ly9mmRBRFsBugqrpAVS9X1XO9pqLmIumCNSj8ODJ4lDAWL3ZyPPhg/fFGnWso5QwdmlYrdMLk66xUKkpptXfhZ+VKt28hi+Aw4F0R+buIfFtEokxPkR/iyNRHHlnsJK7l45061e0vv7x+WaLyxBONj9No7sK0UHkoPENaz5JEvNUsgiTjjkCUKSZOArYGHsKNHposIn9JSrBMkNbLefRRt0BNkowcCW9E+DE8bGdxXKOr/Apw0aLaxnbnkTAVh6wpjaTlWW89OOqoZOMoRzWLICPvIlKtXlVXiMgTuAXs1wKOwA0jNapR+Ms2LKUfdNwZZs89g8P92c+gSxe45JJgOaqRRMZed11YbbVi7aoeSmufrUYzzn6b9DtZuDC9aU5azSIQkYNF5C7cfwRHAn8BNglxX38ReUdEJonIBQHXz/DmLRorIsNFpE8E+ZsDVTj55NrubfTHe+21wWOe0y444+p72Hhj6N49+n2VLKJmI6nn+PrX4aKLot/XKukaRDWLICNDeqP0EfwAeAz4iqqeqKqPq+rKSjeISAfcT2cHA32AYwMK+sGquoOq7gxcDdQwCU4TELXJJCu1t0r/EQRl4qzIXY5585x1FvXDu/RSZ5VcemkycsVFafqLFCcZrLfQWbDA/X393nvB1194wc2QG5XSvoGs56EoVBs+mhElGKWP4FhVfUxVA1e1EJGXA5x3Byap6hRVXQ7cDxxeEq7/T4vOuGan5uDtt2HChOTCnznTTYJVK3FkskofZRKZOCMfRjsKE6T9+tfJLMSTJNddBw88UH84//63+/s6bmVYqgiymgdqIex/BE0w+2hY1gxw6w5M953PAPYo9SQiZwHnAh2BbwQFLiKnAacB9OzZs15Z42Fbb6VO/0sMeqG1vuTNN68tjCRqVK3UNAKt8W9HFIYMKebXSvzrX67ycfbZ7a8l9f6bLV99+KEbmdevX3W/hSbN1crUuZuwaagaNT+Jqt6sqlsBvwAuKePndlXtq6p9u3XrVmtU0aj0wU9K+BeKejqL48xUSY5CWXNNuP762u41olPuXU6YAPfd544PO8zNjRM1nBkzapcrTkugEVOe77RTcbBFNV4OaijxkRElmPQUETNxfyAX2NxzK8f9uJFI2aDSS+rdu+25CFzQri+8ejjliFrrXLo0nlE1lYgr065cCbNnu7Vzf/rTYD/NWOtuFkrf4/bbw3HH1Rfm0KHxyVMrw4fDhhu6xZeSJMpEhM89F85fC1kEQV/ua0BvEdnSm6doIDCkzU0i/hL1UNyopObk97+PL6yoBeHaaxdHJiXVNFRKkFlbKUPPmwfDhrl1jjfbLHw8SZCRmhgQz/tq5knn4ho++tprbh+28I2Tau8w4xWbKMNHO4vIat7xNiIyQETW8Hn5fuk93qiiQcBQ4C3gQVWdICJXiMgAz9sgEZkgImNx/QQn1Pgs8RPUTppl/vY3t29001AYFi6Erl3hwAPh3XfjCTPvVFvxrtwor1mz4J57osdnfQTlifoMqm4GgWeeqe3+mInSWfwCsI+IdAGewtX2jwGOB1DVN4NuUtXHgcdL3C71HVdpkEyRhx6KJ5yk5hr68ENXuFb7WSUOwnYW+xXHX3w/npebhrfS/UlSzzvJSsG1xx61Dbm8+263HXYYrL9+9Hjjfkdx/1CW8do34BaqefRRt0HqeSpK05Co6hLgu8Atqno0sF0yYiVIM2QSCCfnppvCeefVdm8ccsQ5Qiqu+/3ccQcMGhRfeK1GpZ/0GlkwlYur1nycRh7Mwh/4dRBJEYjInjgL4N+eW7an1AtLxl4KED5j/fOfycS/alXb8fKNXpgmDmV28slwc5lF9NKwCF5/PbmV5sIo5Sgj0YIUaFLTV2Tl/4E0428ii+Ac4ELgH147/5eBZxORqtFkcerksB+WSDKZqG9f6NSpcgdjkIzl5G4WSyxJdt0Vvva1tKUIxy23lL/mn0M/juUk48i/kyfH92dyGoVys/QRqOrzwPMAXqfxx6raZL2pZYjTlEyiuaQQRiNr5GPHun0zzmYZhiQsghdfhG7d4KtfrT3sWgmzDkbcBeQll7gpJQrLMdZKuT6CsO9o9GhXcdlii2j3Gf8lyqihwSKynoh0Bt4EJorI+cmJ1kDqtQiqZbwkM2ZSFkEQYeLJUh9Bo9l333B/7yZBGulWWHKy3p+46pW9MPfRtGn1hVMghxZBlKahPt68QEcATwBbEjBktClJWhHUQpRmF7/8e+8NTz4ZPp4Tahyt++tfF4/9z/+rX9UWXpJksekvKcqtKKea/NTmtZKVPoIcE0URrOH9N3AEMERVV9BME8RVohlqsZVGVvivvfSSW1MA4K233Hkl7r47fPz+eMpNPDZxYrjwGknQMOBWGD4axMMPJx9H3P09WUvPHFoEUQag3wZMBcYBL4jIFkDIweEZp1ksglri33vveCZYa3RGjbOwiaNDs5RDDoFPPok/3Hrxt9dnrYAtR7k+grB5oFmesxLNoghU9UbgRp/TNBHZP36RUmD+fOjcua3bvHnxhR/XuORyI3eSzER+JdmIGTsbpVTrtQiacW3mpJqGevVqO1NuEH/4A3z3u8HXsmaRN6JQztgouiidxeuLyLUiMsrb/ohbP6D56dGjvdv+EXRcmp3FSYcf5hf4JOL/xz/iCytjH10ilP6fcOKJbv4pP/Wkw/vvF9dkCKLS7KNz58L557vpRYKo5c/imTNhxIjw/rPE0qVw++1t3Zqos/gOYCHwPW9bANyZhFCZYPz48H4bUYsN6uwr+Euzj6LaNYDnnw8fj00xURu33db2/G9/Ky6TGAf77Vec2yjqOyoU9NWa6ObNC2+Jb7MN7LUXnH56fYs3BZH0+7344uKqcRkhiiLYSlV/5a02NkVVLwe+nJRgTcWaQWvyNJDSjFtuEYy44wnL4MHJx1GJPFgEtaAK77wTzm+S8/z733nXruHuWbLE7W+/vf0a22ko6ih57KOP2ruVk/mll+D//q82mSIQpcRYKiJ7F05EZC9gafwiNTmNnjgryCLI03DJWmk1i6AWXnwxnZ/fSinNr2mnby3xR7knyk+ne+8NP/pRdHkiEmXU0BnA3SKyvnc+nyxNGR2WNGqHqsW29nrCCPtn8Q47RGvaKhdOKXGut9BIsm4RNEq+0njefru2e+OSt1CJSbvgbzQZfN4oi9ePU9WdgB2BHVV1F8qsL2yUkORshkEWwRprtD0Pu4xgpfntVeGaa8KFE0QthcekSZn8aJpylApkVyGWWgT1ynnzzTByZO33pzG4o4k6iwFQ1QXeH8bgFpIxkiDKn8XVMlGPHuFmvazUuVjvqKGwGd3vr3fvtmsa1Ercw0fzSByzi9Z6PSqrVoVfUzgNWkERlJDRKkYM1JrxP/44XjmqEWQRBGWqsJ2C5ShMQhdEkpn4lVcqX//Pf5KLu1mp9Bd6GH9h7o2TrCnlJC34WsNPmHoVQfaeKC5qzfiTJycT/+zZ5f02onP44ovru3/SpHjkKOWAA6r7qdciqHVWzLRIWr44+wgge+m5eHGy4TejRSAiC0VkQcC2ECizAnkLEGcNKI6XvGpVfX8WJzGddJQPubCweFT8ci9e7OZPioMkmw7mz3dyx9GsBdHHydfSDFeNJC2CrI1y22abZMPPmuIjhCJQ1XVVdb2AbV1VbcBiuQ1m2DCX6VetSluScAR1qGZpWupa/Jbj6KOhTx9YsSLafUGF2NSp4e8vHcdd7Vnef9/ty62OFpWttormP2zTUBRK7/3rX+GDD2q7t9Q9yxbXu+/CBhvEN8U1NKdFkDseeSRtCRxBS1AGfVDLlmX7QyoQh0yF/oCoNch6a7NnnVXf/Y2OO+mf8kTglFPiCzuL+bXA7be7ifwefDC+ME0RtAiPPRbNfy0vOcrMlq2sCOJqXomTRqZvpSUjy1FrZ3G9/7rUSjl577wTLr+8sbIUKPTJ/fGPbp+1puKYSVwRiEh/EXlHRCaJyAUB188VkYki8oaI/Meb3jrbfOc7yccRZZ6YMIogTEaeMyd8nFD84zGp4aNhqWYhpPXh1TPj6iuv1D7woFZFcN11tcVXrzyV3t9ll9UX9+9+V9t9m5V0fyatCPxuy5a5+Bq4yFOiikBEOgA3AwcDfYBjRaRPibfXgb6quiPwMHB1kjKlQi0F0dNPhw8nLovg/vuj+X/55fB+4+wQLH2+Dh2Kc88EcfLJ8cUdFH85Xn+9smW3YkX5D75fP9h662TlK2VBheVFmnX46EUXFY933rn2cOp9fv+ou2rPu3Ch21ea7TVmkrYIdgcmeZPULQfuBw73e1DVZ1W18BWPBKpMbJ4wWf37shJxKYIOHeKJPykK8QTFt2ABLF/u3t8NN7S9tnx58rKV4+ST3SiioInGPv/c7YNWUAtLnO3NL7wQzl8c38h55xWt3rCVhH794L77wsdRmg7jxoW/t5Ranjlsxa3UrXDcwLIoaUXQHZjuO5/huZXjZNx6yMnx6aeVr2ew/e6/hP2zOOgZPvus/RzopUQdjROFOCyCSu9GpPhur7yy/rhqlaOUTz6BTTZxW6NkKZfWy5ZVv7cccRdKhbb3KLzyChx3XHj/cX7LtTx/uVmAwyrvBiqCzAz/FJH/BfoCXy9z/TTgNICePXvWHlG5dtAhQ2DAgNrDrUSj5y4Jiu/MM6uHE+f89aXEmQbVwkrbqiuVr9EWSbn0+fnPw/kLohmbhtJWBOVo9JQbIUjaIpgJ+Jf/2txza4OIHABcDAxQ1WWl1wFU9XZV7auqfbt161abNJVGoBx+OPzwh+kXIrUQV9NQrRZBnJ3Ftcbnf29z58Kuu7b3E9caw1myGpMcinjOOcHucX8jYazFLKV5veSwaeg1oLeIbCkiHYGBwBC/BxHZBbgNpwQiDluJyMSJla/feWei0ddNuY8hrvnck2wainPUUBgz+vXX254PHw4bbRROhnppZKGVpCLw97WkbRHU0rQYNR2qNT0G+Z80KXjJzEo/2wU9S1Dcc+eWDyNmElUEqroSGAQMBd4CHlTVCSJyhYgU2mGuAdYBHhKRsSIypExw9dOMtf0wpG0RhKGRFkEQYae4mDmzufJJUFrMmlX7veWII03CVmSi3Bv3PeUo9/y9e7slM0spjPwJIoNNQ4n3Eajq48DjJW6X+o5DzBoWE2l94HG92HLyl1o6tdYkau3QjbNpKMyf3UG1rWpzLoVdvnP06Op+6v2Qv/tdePTRcPLUwo03tnfLctNKUk2LSVsElSymSmGFbRpqIPn6s7jZFUE5SkeD1Lq+bJLpM2FCOH9hfuXffvv2btVkT2od5yCqve9//CPZuMLOtnrFFeHj8Yc5Zkz4+6A491KU4ZSlNGJiukWLyl8LStOf/rR4HOYZzjmn/bcaloSbiUwRtAJxjfZJMn3qGStfoNJzbrQR3Hpr/XGEIUumfRo1/UprUwTxP/9T+XpWLIJKFL6N3/ym9rhvuMGNWqzFIjjhhHDx1ki+FEGzUy5jJznsMwz1fnAXXhiPHJWmIwhbo4y7YA37k1atJDl1h5+0p6FuRGdxJQrPf8klla9Xi3vJktoUQbX/n+okX4qg2S2CpUuD3eOaoz8trroq+TjCFgphZtWMyyKIq6D6RsnS4UkszZnmqKGddqq8MFOt4UYh6vNH7SNIGVMEjSDpFx+lrbcSzz0XTzhZJOw7KLfUaJQaWb2KYK21osX17LPxyZQk5WT48MPK973xRm1Du6s9c5LLnFaKu9rwUessTpgwiiAJZZGFjzBJmuH5kpz0Lur1ahTmIKpVlqxaBOVk+MUvar+3VhYvDrfMaYGoz18uv4VZUdAUQcI0e9NQVmkGRZDFDtw4ZNp///ZuSSiCGTPquz8KQc1zcXcWRx2FE5cigNoUQcJllymCRvD22+nE2yiaQcE2cq6jsMSxEMyrr7Z3S/Jv47SIu7O40pTbQUTN4+WWug1jEXz2WXu3ESPgZz+LJkMETBEY9VNLR16jyZpFoApHHpm8LP74mhm/IqjH4hKBbbeNnh5Ry45KI/mqxT1oULD7tddGkyECpgiMfJBFRZA36pn0z59elaZvCMPbb1efd6yUamVH6ToJDz9cPpxq777cgIUEyZciCEMta8Qa2aeRTUNpK4Ik+giiMniwa8qo9S/3UvwWQel02uWo9MxR1jUAl6aVOvFLRyBdf315vxmsBGRmPYKGYBZBfmnEFAVRaHRhUK7NOimOP97tqw0NDYv//YUdyhv3fwT33htPOKYIUsYUQX7Ju0XQSEXon4wtroESw4YVj5P4S7ya308/hdVjKi6rxRW12SoGTBEY+SBPfQRB+byRFoF/QZvFi+MJ8803i8dpKIJzz4XulVbZDUlGLYJ89RGYIsgvjbQIgsb2Rw0jbtJqGluyJP4w03qWme0WV6wNUwQpY4ogv+S9jyCt50/iOcNO4x0l7gSHZrajnjQ599z45PBhisDIB1n8oayRcTe6szgLREmr889PTg4/9TYNXXddfLL4yJciMPJLnDXie+6pP4xLL63up1bS7iz204xKs8BTT8Ujhx/rI8gAZhHklzjXbIhj/YRrrqntvlo7otOyCDJY6IXme99LJtwMpkm+FEHQWrdGPkii07KZSEsRpNk3U2+Bm1SBbYogZeJYLtFoTuIaxpg2YSZLC8rnWessbwT1FrhRJ6YLgzUNZQBrGsovlRYmbya6dKnuZ9as9m5pKYKNN04nXshkgWuKIAuYIsgvy5enLUG6RJ1/Py6iLP4SNxkscIFMypW4IhCR/iLyjohMEpELAq7vKyJjRGSliByVsDCJBm9kmDwOn/QzbVo68Waw0EuViy6CcePSlqIdiSoCEekA3AwcDPQBjhWRPiXePgBOBAYnKYuRc/KuCNKimYeP5oikLYLdgUmqOkVVlwP3A4f7PajqVFV9A8hhb5bRMPLYWZoFmnnUUFZZsSL2IJNWBN2B6b7zGZ5bZETkNBEZJSKj5tba3mlNQ/nFFEE6DBmSXtytqgiGDo09yKbpLFbV21W1r6r27datW22BmCLIL6YI0sH+3WkKklYEM4EevvPNPbd0MEWQX6yPIB3SVMD1LI2ZZRIox5JWBK8BvUVkSxHpCAwE0rMVTRHkF1ME+WPHHdOWoGlIVBGo6kpgEDAUeAt4UFUniMgVIjIAQES+JiIzgKOB20RkQmICmSLIL9Y0ZLQKCZRjia9QpqqPA4+XuF3qO34N12RkGMlhisAwytI0ncWGURfWNGS0Ck3YR5AtrGkov8Q5DbVhtBimCIx8YBaB0SqYRVAnpgjyi1kERqtgiqBOTBHkF1MEjcG+seQxRVAnWc6k22yTtgStjTUNNYZWndahxTFFkBXsA0oWswiMVsEsghbGxrkni1kEhlGWfCmCWjXps8/GK0cQZhEkiykCo1UwiyAl9tsv+TjMIkiWpJuG/v73ZMM3jATJlyKwPoL8Mnt2suHbBGdGozCLoE6yrAisMzMcu+yStgTBdOiQtgRGXjBFUCdZVgSLF6ctQXOw995pSxDMWmulLYFh1Ey+FEGtK5vVy8knV/ezYEHychjJseGGaUtg5IUEmpHzpQjuvrvy9bPPLn/tjDNqj7dr1+p+0lJSRv2IZNvaNFqLBAaW5EsRVKq1rb8+HHdc42TxM2kSbLVVvGGedVY0/3vuWfm6dWaXp1MnWC1fn5KRImYR1MmyZe3drrzS7f/4x8r3+mt811xT2e9GG0WTq1ev4HHut9wSLRw/m0dc6ydKQRamqQvgtNOiyRCGLNa8O3WCNdZIW4rK3HMPdOyYthRGHJhFUCebbdbe7eKLYcwYOOmk8OGccELl6/vv3/Y8zIsr+HnpJejdGw45BAYODC9TKV/9ajT/UWoZy5dX93P22XDDDdFkqIWddoonnPPOq0+GNdd0Cj2r9O0La6+dthTJUu27bBXMIqgTkeACcpddXI24R4/6wv/KV9y+tHY9d251uQqKoGNHePdd+Pe/a5dDFb70peBr669fe7gFVqyo7qdfP1c4VmPffWuXY6ON4NRTa7/fTz2Wxl//6vZ9+9Yexq671n5vGL7yFVg98ZVp0+Wuu9KWoD3f/S7svnt4/2GsbbMIYsD/wXfv3vbaZpvBjTdWvm/TTcsXGk8+CRMnttfYl14a7N8fdqFpyK9E6vm3YN11g92PP768DOU45pi252EUQdimkqgFsN8aibOZyJ/uUdv7C00uQU2PYam3EhKGVlIEhx6atgThOP/8+C0xswhi4Je/LB5/+cvtr//4x8H3FQqdCy8sHm+4IYwYUfSz6aaw7bawZEnbe7fcsrJMfkXg/zEpzNh0ETf09NNP27pvvz386U+uRhL0HFG4//625+UUwbe/7TZorwgOOghuuilavJ06tXf7/PPi8WqrVf8owvbX7Lxz8XjAgOr+/T+2FQrxehRBI8h6P0ZYvvpV+Ne/0pYiHDvvHP8UNWYRxMCxx8KLL7rjagm6aBFMmeKO/QVoly5wwAHwwAPFgqhfv2LBtXBh0e/QoeHkKlgNfuW0zjowZw6MHeviC+LFF13tf/313QfibycdNAgeeaRtX8PuuwcXjlGaaCpZBAUrprTQefJJN5Jp2LC27oV0PfDA9mEdcUR7t6VL299bjmHDXNqFYeBAuOgiePjhcP4LVsO22xbluOEGV1PdYotwYRQIes6o7Lxz9XizZBFsvXVt9y1cGPxOx4ypS5zEWHNNV/kcOTKc/zAVNbMIYmLbbd3+nHMq++vcObg2v9pq8PTTrnDu3Nm59exZvL7DDsXjLl3a3tuvn9sffrjbF0b3HHWUe8GlTTrdurnOyKefhvHj28vSp0/x+K23gttJC4Xz734H3/8+vPdesZZy220u3qOOcufbbdf+/lLKKQLV4rWCIthrr7Z+vvnNtufbb+/2V18dHJ6fTTZpW5iVjt8v/Snvm9906XvRRcHylvKb38CRR5b/GP0VhyA/hZpq1L+Me/cO569S89GZZ8L77xfTE9oPjqhXEdx7b3u3q66Cl1+OHlZhtF5U1lkn2FKsVbHEzUEHwfDhbd1WW63yQAJ/palav9rJJ7cta+JCVRPdgP7AO8Ak4IKA652AB7zrrwC9qoW52267aaK4Iqit25w5qgMHqi5Y0N7/ffepfvZZ8fzzz1V32smFMWNG2zA/+0z1lFOCwwnDwoWqN90ULGM5jjjC+X3kkaLbqlWqI0a093viicWwS+Po1cudjxqlusMOxesF92OOUd13X3f87LPuni++cHH5Kdz329+qLltW9LvBBs59yhTVo45SnTZNda21iv6fe0517lzViy5SPeEEJ8cf/lC8vnhxsNx33dX+mQrb2murzpzZVr6HH27r56STVDt2bCv7177m9ttu2z4Nhw9ve78/raD4Pgrb2We7tAuSb+ON3f6dd1RHjiz/HLfd5uL2p4FfXlX3HIXzQv6Msn3xheq3v606eHDbcMeMccfbbefOJ01qe9/WW6v27NnWbcKE6PGX5ne/e+H7qyXMOLcCBx6oeuedxfPZs8vfM3Bg8fidd9pf/8UvXJ4cM6Z9XosIMEo1oJwOcoxrAzoAk4EvAx2BcUCfEj8/Am71jgcCD1QLNxVFEJUlS1RHj443TD+nnaZ6zz3h/YLqsGHV/Z59dlHWt99WHTeueG3xYtVPPimen3OO8zd3rivU581zBTSovvFG+TjKpcWcOaoTJ7Z1W7rUpWU5pkwphrd0afG4U6ein+XLVS+4oP0HdvPN5cPt379y4bPHHm7/1a9WfkZQ/fnP3b5/f9X771edOrXt9YULi+n+q18V3S+7THXlSreVhrtypeq//qV68MHuvKAICn423tgdn3KKU1qqroJyzz2qb73VNixV1VmzVNdbT3XHHVX/53+c4rvlFqek7723bSVCVXXRoqKCf/ddF84WWwQ//+uvq15ySfH84IOLYRx9dPv38rOfFY/vvruYp0rfxamnFt0//9y53XKL6v77F90nT3b7ddZx1wvpu8su7eNdd133XvfZJ1rhX9gKlYUgpk1r69dfwfnRj9z+0EOdsj3zTPfeCtf95UidpKUI9gSG+s4vBC4s8TMU2NM7Xh34GJBK4TZEEWywQbxh7ref0+xpsGiR6l/+4jJZNRYudAXQihW1xzVkSGU/cSvFWbNUH3zQHT/wgFN85eLt0kX1gw/aKrggXnjB+X/xxbbul13m3AuKZe+9y4dx4IHOz0cfucLvgw+K1z7+WPWJJ4pu8+c7uRctUr311mJBXsrEie56gWuucXH8619FtwkTnHKuxsqV7n3Xy5IlToZvfKPoVqh87LGHUxgrVzrr+NZb28r2ySeqf/6zqzGvXKk6dKjLpwMGOKulwPjxqu+91zbe5ctdARtUUdh0U2ehLFvm5Dj1VOf+xRfuPlWXB4YPLxbKfgoWS58+5Qv+73+/rfVXLt8V2Gor5++pp9z7nzlT9YwzVD/91L13/zfnt/4+/LByuBEopwjEXUsGETkK6K+qp3jn3wf2UNVBPj9ven5meOeTPT8fl4R1GnAaQM+ePXebNm1aYnKzYIEbvVNo/zfi5Z13XHt13NNqVOP992GDDdr320Rl6lTXMXvbbfCd75T/Z6PwKSc5/cQXX7gV9Er7XhrNiBGujyRLk++pur6c+fNd31u5PpLp09073Wef4OvDh8Pzz7t+tHXWcf8FdetWHH7+zDPw9ttw+unxTkc+f757v1FnKqiAiIxW1XY/vDSNIvDTt29fHTVqVGJyG4ZhtCLlFEHSo4ZmAv6hDpt7boF+RGR1YH1gXsJyGYZhGB5JK4LXgN4isqWIdMR1Bg8p8TMEOME7Pgp4RpM0UwzDMIw2JPqHiaquFJFBuA7hDsAdqjpBRK7AdVoMAf4K/F1EJgGf4JSFYRiG0SAS/9VQVR8HHi9xu9R3/DlwdNJyGIZhGMHk889iwzAM47+YIjAMw8g5pggMwzByjikCwzCMnJPoD2VJISJzgVp/Le6Km8Yiy2RdxqzLB9mXMevygckYB1mTbwtV7Vbq2JSKoB5EZFTQn3VZIusyZl0+yL6MWZcPTMY4yLp8BaxpyDAMI+eYIjAMw8g5eVQEt6ctQAiyLmPW5YPsy5h1+cBkjIOsywfksI/AMAzDaEseLQLDMAzDhykCwzCMnJMrRSAi/UXkHRGZJCIXpCRDDxF5VkQmisgEEfmJ536ZiMwUkbHedojvngs9md8RkYMaJOdUERnvyTLKc9tQRJ4Wkfe8fRfPXUTkRk/GN0Rk14Rl+4ovncaKyAIROSftNBSRO0RkjrfYUsEtcpqJyAme//dE5ISguGKU7xoReduT4R8isoHn3ktElvrS8lbfPbt5eWOS9wySsIyR32uS33oZGR/wyTdVRMZ67qmkY2SC1q9sxQ03DfZk4MtAR2Ac0CcFOTYFdvWO1wXeBfoAlwHnBfjv48naCdjSe4YODZBzKtC1xO1q4ALv+ALg997xIcATgAD9gFca/F4/BLZIOw2BfYFdgTdrTTNgQ2CKt+/iHXdJUL5vAat7x7/3ydfL768knFc9mcV7hoMTTsNI7zXpbz1IxpLrfwQuTTMdo255sgh2Byap6hRVXQ7cDxzeaCFUdbaqjvGOFwJvAd0r3HI4cL+qLlPV94FJuGdJg8OBv3nHfwOO8LnfrY6RwAYismmDZPomMFlVK/1p3pA0VNUXcGtqlMYdJc0OAp5W1U9UdT7wNNA/KflU9SlVXemdjsStIlgWT8b1VHWkutLsbt8zJSJjBcq910S/9UoyerX67wH3VQoj6XSMSp4UQXdguu98BpUL4MQRkV7ALsArntMgz0S/o9CEQHpyK/CUiIwWkdM8ty+p6mzv+EOgsGp7mmk7kLYfXZbSEKKnWZqy/hBXMy2wpYi8LiLPi0hhZffunkyNli/Ke00zDfcBPlLV93xuWUrHQPKkCDKFiKwDPAKco6oLgP8DtgJ2BmbjzMs02VtVdwUOBs4SkX39F71aTKpjj8UtfzoAeMhzyloatiELaVYOEbkYWAnc6znNBnqq6i7AucBgEVkvJfEy/V5LOJa2FZMspWNZ8qQIZgI9fOebe24NR0TWwCmBe1X1UQBV/UhVV6nqF8CfKTZdpCK3qs709nOAf3jyfFRo8vH2c9KUEaekxqjqR56smUpDj6hp1nBZReRE4NvA8Z6ywmtumecdj8a1uW/jyeJvPkpcvhreayrvW0RWB74LPFBwy1I6ViJPiuA1oLeIbOnVJAcCQxothNeG+FfgLVW91ufub1P/DlAYkTAEGCginURkS6A3rpMpSRk7i8i6hWNch+KbniyFUSwnAP/PJ+MPvJEw/YDPfM0hSdKm9pWlNPQRNc2GAt8SkS5eE8i3PLdEEJH+wM+BAaq6xOfeTUQ6eMdfxqXZFE/GBSLSz8vLP/A9U1IyRn2vaX3rBwBvq+p/m3yylI4VSauXOo0NN1LjXZxWvjglGfbGNQ+8AYz1tkOAvwPjPfchwKa+ey72ZH6HBowswI22GOdtEwppBWwE/Ad4DxgGbOi5C3CzJ+N4oG8DZOwMzAPW97mlmoY4pTQbWIFr8z25ljTDtdVP8raTEpZvEq49vZAXb/X8Hum9+7HAGOAwXzh9cYXxZOAmvBkKEpQx8ntN8lsPktFzvws4o8RvKukYdbMpJgzDMHJOnpqGDMMwjABMERiGYeQcUwSGYRg5xxSBYRhGzjFFYBiGkXNMERhGBUTkYnGzxL7hzR65h7iZTtdOWzbDiAsbPmoYZRCRPYFrgf1UdZmIdMXNZjkCN+7/41QFNIyYMIvAMMqzKfCxqi4D8Ar+o4DNgGdF5FkAEfmWiLwsImNE5CFvHqnCmg5Xe3POvyoiW3vuR4vImyIyTkReSOfRDKOIWQSGUQavQB8OrI37K/gBVX1eRKbiWQSelfAo7q/WxSLyC6CTql7h+fuzqv5GRH4AfE9Vvy0i44H+qjpTRDZQ1U/TeD7DKGAWgWGUQVUXAbsBpwFzgQe8Cdr89MMtkPKSuFWpTsAtklPgPt9+T+/4JeAuETkVt4iKYaTK6mkLYBhZRlVXAc8Bz3k1+dKlIwW3kMyx5YIoPVbVM0RkD+BQYLSI7KbeDJWGkQZmERhGGcStjdzb57QzMA1YiFtmFNyqXnv52v87i8g2vnuO8e1f9vxspaqvqOqlOEvDP2WyYTQcswgMozzrAH8St6D7StxMnafhpr9+UkRmqer+XnPRfSLSybvvEtzMlwBdROQNYJl3H8A1noIR3Myk4xrxMIZRDussNoyE8Hcqpy2LYVTCmoYMwzByjlkEhmEYOccsAsMwjJxjisAwDCPnmCIwDMPIOaYIDMMwco4pAsMwjJzz/wFScyPq2nzpCQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "steps = steps_loss[\"step\"]\n", - "loss_value = steps_loss[\"loss_value\"]\n", - "steps = list(map(int, steps))\n", - "loss_value = list(map(float, loss_value))\n", - "plt.plot(steps, loss_value, color=\"red\")\n", - "plt.xlabel(\"Steps\")\n", - "plt.ylabel(\"Loss_value\")\n", - "plt.title(\"Change chart of model loss value\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "suspended-characterization", - "metadata": {}, - "source": [ - "Judging from the above, it can be divided into three stages:\n", - "\n", - "Stage 1: When the training starts, the loss value is around 2.2, which indicates that the training performance is not satisfied.\n", - "\n", - "Stage 2: When the training arrives at a certain point, the loss value decreases sharply and the training performance is greatly improved.\n", - "\n", - "Stage 3: After the loss value converges to a small value, it tends to get close to 0. At this point, the training performance is steady and cannot be further improved, leading to the result that the training is terminated.\n", - "\n", - "## Validating the Model\n", - "\n", - "After obtaining the model file, validate the model generalization capability.\n", - "\n", - "The process of building a network for verification is as follows:\n", - "\n", - "1. Load the model by reading the parameter `param_dict` in the `.ckpt` file.\n", - "2. Load the parameter `param_dict` to the neural network, Lenet.\n", - "3. Load the test dataset.\n", - "4. Call the function, `model.eval`, to transfer the parameters of the test dataset, `ds_eval`. Compute the accuracy of `checkpoint_lenet-{epoch}_1875.ckpt`" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "adaptive-member", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Testing ==============\n", - "============== Accuracy:{'Accuracy': 0.9758613782051282} ==============\n" - ] - } - ], - "source": [ - "from mindspore import load_checkpoint, load_param_into_net\n", - "\n", - "# testing relate modules\n", - "def test_net(network, model, mnist_path):\n", - " \"\"\"Define the evaluation method.\"\"\"\n", - " print(\"============== Starting Testing ==============\")\n", - " # load the saved model for evaluation\n", - " param_dict = load_checkpoint(\"./models/ckpt/mindspore_quick_start/checkpoint_lenet-1_1875.ckpt\")\n", - " # load parameter to the network\n", - " load_param_into_net(network, param_dict)\n", - " # load testing dataset\n", - " ds_eval = create_dataset(os.path.join(mnist_path, \"test\"))\n", - " acc = model.eval(ds_eval, dataset_sink_mode=False)\n", - " print(\"============== Accuracy:{} ==============\".format(acc))\n", - "\n", - "test_net(network, model, mnist_path)" - ] - }, - { - "cell_type": "markdown", - "id": "departmental-crash", - "metadata": {}, - "source": [ - "In the preceding information:\n", - "\n", - "- `load_checkpoint`:This API is used to load the CheckPoint model parameter file and return a parameter dictionary.\n", - "\n", - "- `checkpoint_lenet-1_1875.ckpt`:name of the saved CheckPoint model file.\n", - "\n", - "- `load_param_into_net`:This API is used to load parameters to the network.\n", - "\n", - "When the training step reaches 1875, the accuracy of the model is over 95%, meaning the model performance is good.\n", - "\n", - "We can check the change of the model accuracy as the training step changes.\n", - "\n", - "`eval_show` draws the line chart of model accuracy every 25 `step`. In particular, `steps_eval` stores the training step of the model and the corresponding accuracy information." - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "rocky-juvenile", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZUAAAEWCAYAAACufwpNAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAA/A0lEQVR4nO3deZzd8/XH8ddbYkmINUEISexiZ0RUEaFFFyElllaipdqituZnbW2ltqr+qFapLQS1108RE5JaKiMTSUikIYLKQiIkEZFEkvP743wuN2OWOzN3m5nzfDzu4977Xc+9d+ae+/2sMjNCCCGEfFil1AGEEEJoPSKphBBCyJtIKiGEEPImkkoIIYS8iaQSQgghbyKphBBCyJtIKqFkJPWQZJLa57DtCZJeLEZcbYWkH0p6phn7PyVpcD5jauB8Of+9hNKJpBJyIuldSUslda6xfFz6R+9RotBCE5nZMDP7di7bSrpE0j019j/UzO4qTHTFkf52typ1HK1JJJXQGO8Ax2aeSNoJ6Fi6cMpDS/zl3BJjzqe2/voLKZJKaIy7gUFZzwcDQ7M3kLSOpKGS5kh6T9KvJa2S1rWT9HtJH0maBny3ln1vkzRL0gxJl0tql0tgkh6U9IGk+ZKel7RD1roOkq5L8cyX9KKkDmndNyX9W9I8Se9LOiEtHyXppKxjrFT8ln7hnirpLeCttOx/0zEWSBorad+s7dtJukDS25I+Tes3k3STpOtqvJbHJZ1Vy2v8i6Tf11j2D0lnp8fnZR3/DUlH1Ij/JUnXS5oLXFLLa6o1fkmHABcAR0taKGlCzfdI0irps35P0uz0N7BOWpcpthos6b/p87+wns+yzs8r+WFtx5HUW9LL6bOcJelPklar6zOT9HxaNSG9rqPriik0gpnFLW4N3oB3gYOAKcD2QDtgOtAdMKBH2m4o8A+gE9ADeBM4Ma37OfAfYDNgfWBk2rd9Wv8o8FdgTWBD4BXgZ2ndCcCL9cT3k3TO1YE/AuOz1t0EjAI2TXF/I23XHfgUv/paFdgA2DXtMwo4KesYK50/xV2ZXkeHtOxH6RjtgV8BHwBrpHX/A7wObAsI2CVt2xuYCayStusMLAI2quU17ge8Dyg9Xw/4HNgkPT8K2AT/sXg08BnQNSv+ZcAvU3wdanlN9cV/CXBPjXi+fI/S+z8V2AJYC3gEuDut65Her1vTeXcBlgDb1/FZ1vV51XscYA+gT4q/BzAZOLOBz8yArUr9/9WabiUPIG4t48ZXSeXXwJXAIekftH36x+yRvgCWAr2y9vsZMCo9fg74eda6b6d92wMbpS+IDlnrjwVGpscrfQE2EOu66bjrpC/Yz4FdatnufODROo7x5RdmbedPx+/XQByfZM6LJ+P+dWw3GfhWenwa8GQd2wn4L7Bfev5T4Ll6zj8+c84U/39rrK/3Pa0R/yXUn1SeBU7JWrct8EXWF7wB3bLWvwIcU8s56/u8cj5OWndm9udb22dGJJW836L4KzTW3cBx+BfS0BrrOuO/+N/LWvYe/osT/Ff0+zXWZXRP+85KxRfz8KuWDRsKKBUtXZWKfhbgCTATT2dgDeDtWnbdrI7lucp+LUgaImlyKrKZhye1TMOG+s51F36VQLq/u7aNzL8F7+ereq3jgGFZ5x8kaXzW+7dj1vm/Fm9NDcTfkE34+uee+bGQ8UHW40X4FU1N9X1e9R5H0jaSnkjFoAuA39USf73vQWi+SCqhUczsPbzC/jt4EUe2j/Bfp92zlm0OzEiPZ+FfrtnrMt7Hr1Q6m9m66ba2me1Aw44D+uNXUuvgv2jBf9l/BCwGtqxlv/frWA5edJTdCGHjWrb5cojvVP9wDjAQWM/M1gXmpxgaOtc9QH9Ju+BFi4/VsR3AfcCRkroDewEPp/N3x4uFTgM2SOefmHX+leKtKYf4GxrOfCZf/9yXAR82sF9N9X1eDfkLXry6tZmtjdcDqcY2MSx7gUVSCU1xIl6M8Fn2QjNbDjwAXCGpU/qiOxv/0iStO11SN0nrAedl7TsLeAa4TtLaqeJ3S0n75xBPJzwhzcUTwe+yjrsCuB34g6RN0lXN3pJWx3/lHyRpoKT2kjaQtGvadTwwQFJHeZPTE3OIYRkwB2gv6SJg7az1fwN+K2lruZ0lbZBinA6Mwa9QHjazz+s6iZmNw794/wYMN7N5adWa+BfmHABJP8avVHLVUPwfAj2UGl3U4j7gLEk9Ja2FfwZ/N7NljYihoc8rl9ewAFgoaTvgFzns8yFeDxTyJJJKaDQze9vMqutY/Uv8V/404EXgXvxLAvyX9HBgAvAqX7/SGQSsBryBl+c/BHTNIaSheHHLjLTv6Brrh+CV5GOAj4Gr8Yrx/+JXXL9Ky8fjlb8A1+P1Qx/ixVPDqN9w4Gm8YcJ7+K/t7KKWP+BJ9Rn8i+82vLI54y5gJ+oo+qrhXvyq7N7MAjN7A7gOeDnFvBPwUg7HyjX+B9P9XEmv1rL/7Sn25/Er2cX430JT1Pp55bjfcXjji1uBv+ewzyXAXanIcGCTog0rybQiCSGUkKT98Cu67hb/lKEFiyuVEEpM0qrAGcDfIqGEli6SSgglJGl7YB5ezPfHkgYTQh5E8VcIIYS8iSuVEEIIedOmB1Xr3Lmz9ejRo9RhhBBCizJ27NiPzKxLbevadFLp0aMH1dV1tYwNIYRQG0nv1bUuir9CCCHkTSSVEEIIeRNJJYQQQt5EUgkhhJA3kVRCCCHkTSSVEEIIeRNJJYQQQt5EUgkhhOb68EO45x6IYa8iqYQQQrOYweDBcPzxcO21pY6m5CKphBBCczzxBAwfDt27w/nnQ2VlqSMqqUgqIYTQVEuWwFlnQa9eMH48bL89HHMMvPtuqSMrmUgqIYTQVH/8I7z9tt+vuy48+igsXw4DBsDnn5c4uNKIpBJCCE0xcyb89rfQvz9861u+bOutvcJ+3Dj4+c/bZMV9JJUQQmiK886DL76A665befn3vgeXXAJDh8JNN5UktFKKpBJCCI01ejTcfTf86lew5ZZfX/+b33hyOessePHF4sdXQpFUQgihMVasgNNPh002gQsuqH2bVVbxpNOzJxx1lBeVtREFTSqSDpE0RdJUSefVsr67pGclvSZplKRuWeuukTRJ0mRJN0hSWr6HpNfTMbOXry+pUtJb6X69Qr62EEIbddddMGYMXH01rLVW3dtlKu4//RSOPBKWLi1aiKVUsKQiqR1wE3Ao0As4VlKvGpv9HhhqZjsDlwFXpn2/AewD7AzsCOwJ7J/2+QvwU2DrdDskLT8PeNbMtgaeTc9DCCF/Fizwvih77w0//GHD2++wA9xxB7z8MpxxRuHjKwOFvFLpDUw1s2lmthS4H+hfY5tewHPp8cis9QasAawGrA6sCnwoqSuwtpmNNjMDhgKHp336A3elx3dlLQ8hhPz47W9h9my44QbwQpKGHXUUnHMO3Hwz3H57YeMrA4VMKpsC72c9n56WZZsADEiPjwA6SdrAzF7Gk8ysdBtuZpPT/tPrOOZGZjYrPf4A2Ki2oCSdLKlaUvWcOXOa9spCCG3Pm2/C//4v/PjHUFHRuH2vuAIOPBBOOQWqqwsTX5kodUX9EGB/SePw4q0ZwHJJWwHbA93wpNFP0r65HjRdxdTaQNzMbjGzCjOr6NKlS7NfQAihjTjrLOjQAX73u8bv27493H8/bLyxd4ycPTv/8ZWJQiaVGcBmWc+7pWVfMrOZZjbAzHYDLkzL5uFXLaPNbKGZLQSeAvZO+3er45iZ4jHSfev91EIIxfXkk3676CLYqNZCkIZ17gyPPAJz5vhQLsuW5TfGMlHIpDIG2FpST0mrAccAj2dvIKmzpEwM5wOZAsf/4lcw7SWtil/FTE7FWwsk9UmtvgYB/0j7PA4MTo8HZy0PIYSmW7rUr1K23RZ++cvmHWv33b1uZeRI7zzZChUsqZjZMuA0YDgwGXjAzCZJukzSYWmzvsAUSW/idSBXpOUPAW8Dr+P1LhPM7P/SulOAvwFT0zZPpeVXAd+S9BZwUHoeQgjNc8MNXp9y/fWw2mrNP97gwXDqqd4T//77m3+8MiNrg2PTZFRUVFh1K680CyE0wwcfwDbbwH77+RD3+bJ0KfTr52OEjR4NO+2Uv2MXgaSxZlZra4VSV9SHEEL5uuACWLzYr1LyabXV4MEHYZ114Igj4JNP8nv8EoqkEkIItRkzxjsunnmmjz6cb127wkMPwX//Cz/6kQ//0gpEUgkhhJoy43tttBH8+teFO883vuF9X558Ei69tHDnKaL2pQ4ghBDKzj33eF3HHXfA2msX9lw//7lfFV12GeyxBxx2WMP7lLGoqI+K+hBCtk8/9cr5zTf3MbtWKUKBzuLFsO++3srslVe8+XIZi4r6EELI1RVXeKuvG24oTkIBWGMNePhhr8AfMMATWwsVSSWEUJ4mTSr+PCRTp3pLr0GDYK+9invuzTeHv/8d/vMf+MlPWuxUxJFUQgjlZ8kSH15+m23g2mt92t5iOPtsv1q4qkR9p/v1g2uu8VZh11xTmhiaKZJKCKH8vPyyFwFtsYUPG7/rrjBqVGHPOXw4/N//eWuvrl0Le676nH02HH2095GprCxdHE0USSWEUH4qK6FdO5/f/fHHYdEiOOAA78/xwQf5P98XX3h/lK228vtSkuC226BXLx948t13SxtPI0VSCSGUnxEjoE8fb877/e97/cqvf+290LfdFm68Mb+j/P7pT16X8Yc/wOqr5++4TbXmmj4V8YoVXnG/eHGpI8pZJJUQQnn55BOfyOqgg75a1rGjz7r4+uuebE4/Hfbc04vJmmv2bLjkEjj4YPje95p/vHzZaiu4+24fH6zUV0+NEEklhFBeRo70X+jf+tbX122zDTz9NDzwgM9L8o1vwEknwUcfNf18F17oxWt//GPuUwQXy/e+B+eeC3/9KwwbVupochJJJYRQXioroVMn6N279vWSz/s+eTIMGQJ33eVFYrfc0vjxs8aO9fqLX/4Sttuu+bEXwuWX+yjJJ58Mb7xR6mgaFEkltA3Dh8Of/1zqKEIuRoyAvn1h1VXr365TJ29uPH487Lgj/Oxn3gx57NjczmMGZ5zhMzJedFFzoy6czFTEa60FRx4JCxeWOqJ6RVIJrd/SpV5EcsYZ8PHHpY4m1Ofdd70DYnZ9SkN22MGbGw8d6vvvuSecdhrMm1f/fvfdBy+95HPOr7tuk0Muiq5dPd4pU3yssDLuGBlJJbR+w4bB9OneWuixx0odTajPiBF+X1t9Sn0kOP54/9I99VT4y1+8SGzo0Nq/gD/7zPu/7L47/PjHzY+7GPr185GMhw2DW28tdTR1iqQSWrcVK+Dqq2GXXbwj3QMPlDqiUJ8RI2CTTZpev7Huut7ceMwY6NnTp+7df3+YOHHl7a68EmbM8PG92rVrdthFc8EFcMgh3vrt1VdLHU2tCppUJB0iaYqkqZLOq2V9d0nPSnpN0ihJ3dLyAySNz7otlnR4WvdC1vKZkh5Ly/tKmp+1rowLSUPRPPaY/3o97zwYONC/tObOLXVUoTYrVsCzz/pVSnNbYe2+O/z73/6LftIk75E/ZIj30p82DX7/ezjuONhnn7yEXjSrrOLNjLt08cYKDRXxlYKZFeQGtAPeBrYAVgMmAL1qbPMgMDg97gfcXctx1gc+BjrWsu5hYFB63Bd4ojEx7rHHHhZasRUrzCoqzLbc0uyLL8xefdUMzG69tdSRhdpkPp+7787vcefMMTvpJD/2JpuY7bWXWceOZu+/n9/zFNO//23Wvr3Z4Yf733mRAdVWx/dqIa9UegNTzWyamS0F7gf619imF/BcejyylvUARwJPmdmi7IWS1sYT0WP5DDq0Is89553ozjnHW9Dsuqt3KIsisPKUGeeqMZX0uejc2a9YXn7ZZ3KsqvJipG7d8nueYtp7bx9w8rHHfFTlMlLIpLIp8H7W8+lpWbYJwID0+Aigk6QNamxzDHBfLcc/HHjWzBZkLdtb0gRJT0naobagJJ0sqVpS9Zw5c3J8KaFFuvJK2HhjH8YcvEhl4EBPNvHZl58RI7xp8MYbF+b4ffp4Xcvzz3txaEt35pk+hMu553pRX5kodUX9EGB/SeOA/YEZwPLMSkldgZ2A4bXseywrJ5tXge5mtgtwI3VcwZjZLWZWYWYVXbp0ycuLCGVozBgvnz/7bJ8AKeOoo2D5ch9XKZSPxYvhhRca3+qrsdq18xkWW1LlfF0kuP126N7dfyyVyQ+lQiaVGcBmWc+7pWVfMrOZZjbAzHYDLkzL5mVtMhB41MxWmkxBUme8eO2fWcdaYGYL0+MngVXTdqEtuuoqbwn0s5+tvHyXXWDrrX1gwlA+XnrJE0u+i75au3XW8blXPvrIR3BevrzhfQqskEllDLC1pJ6SVsOLsR7P3kBSZ0mZGM4Hbq9xjJpXIxlH4pXyXw7dKWljyZuMSOqNv7Zo5tMW/ec/fiVy6qk+ym22KAIrT5WV3oN+v/1KHUnLs+uu3oz6mWd8KuQSK1hSMbNlwGl40dVk4AEzmyTpMkmHpc36AlMkvQlsBHz5jkjqgV/p/KuWw9dWz3IkMFHSBOAG4JjUSiG0NVdf7UVeZ5xR+/qBA7356iOPFDeuULcRI7zyea21Sh1Jy3TSSd7585JLvupAWiJqy9+7FRUVVl1dXeowQj69/753cvzFL7xjW23MYPvtYdNNvd4llNbcud7v4rLLfM6U0DSffQZ77eVD+Y8b53/fBSJprJlV1Lau1BX1IeTXddf5/a9+Vfc2mSKwUaPgww+LElaox3PPeaKP+pTmWXNNrytctMhnjPzii4b3KYBIKqH1+Ogj749w3HHeIqY+UQRWPiorvcK5otYfvqExtt/e/wdefLFkV32RVELrceON/ivt3HMb3naHHfwfsC11hFyyBN56q9RRfN2IET7/fPv2pY6kdTj2WC/+veYaePzxhrfPs0gqoXX49FNPKv37Q69eDW+fKQL717/ggw8KH1+pzZ7tc5Rst523jisXb78N77xT+P4pbc3118Mee/iAmu+8U9RTR1IJrcMtt/jc5uefn/s+Rx3lZfkPP1y4uMrB5Mnem3z8eE+md91V6oi+kmmpFPUp+bX66l6/YuY/npYsKdqpI6mElm/JEvjDH7wIZa+9ct9vhx38qqY1d4QcOdLncf/sM78qO+QQH+W2DDrJAV6fsvnm3iE15FfPnv4DorraR5YokkgqoeW7+26YObNp4zkNHOhjQc2alf+4Su3OO+Hb3/b5SaqqfM73wYN9HpFyaEq9fLm3/DrooOYPdR9q17+/D/n/5z/7lMRFEEkltGzLl3uF5O67N61cvjUWgZnBb37jMxr27etDoPTo4eu+/31Ybz1POKX26qteZBlFX4X1u9/BN7/pHSSLUJ8WSSW0bI884i2azj+/ab92e/XykXFbSyuwxYvhhz+Eyy+HE0+EJ59cef71NdbwPgyPPgrz55csTOCr+pQDDyxtHK3dqqv6VUrHjnDkkV4UWkCRVELLZebD22+zDRxxRNOPM3Cgt+ufMaPhbcvZRx/5r/777vMBNW+91b9QajrhBE8+pU6klZU+wOeGG5Y2jrZg003h3nvhjTfglFP8f6dAIqmEluuZZ3w4inPOad5Q5q2hCOzNN72FV3W1J4tzz637ym3PPb2PTilbgS1a5MVy0ZS4eA46CC6+GIYOhdtuK9hpIqmEluuqq/wX2I9+1LzjbLcd7Lxz6X+5N9Xzz3tCWbDAW3sddVT920teYf/SS6XrDPnCC7B0adSnFNuvf+2J/LTTvIl5AURSCS3T6NE+dtfZZ3ub/OYaONC/ZKdPb/6xiunuu/2LeaON/D3Ze+/c9vvRj2CVVUp3tTJiBKy2mk+YFYqnXTsYNsynWL6vtllFmi+SSmiZrrwS1l8fTj45P8fL/Lp/6KH8HK/QzHyY80GDvGXPv//tozPnatNNvbnx0KE+BlqxVVbCPvt45XEori5dvJj0qqsKcvhIKqHlmTTJxzT65S/zN//GNtt4pXFLKAJbssSTyaWXeqX70097M+HGGjzYpwoYOTLvIdZr9myYMCHqU0pp440L1jcokkpoea6+2n/h/vKX+T3uwIHw8sv+RVuu5s71L+N77vFZ/m6/3YuRmqJ/fx8duNhFYJmOl1Gf0ipFUgkty7vvetPIk0+GDTbI77HLvQjsrbe8zuSVV7w8/IILmvdrs0MH77Py0ENeyV8sI0b4ldXuuxfvnKFoIqmEluW667yCub5JuJpq661ht93KswjshRe8hdfHH/sv/WOOyc9xBw+Gzz8vXiI18/qUfv2a1ww8lK0Gk4qksZJOldSEQtsQ8mj2bPjb37zlUrduhTnHwIHeiuq99wpz/Ka4914vKurc2WPbZ5/8HbtPH69PKlYR2FtvefFi1Ke0WrlcqRwNbAKMkXS/pIOl3K65JR0iaYqkqZK+NtqfpO6SnpX0mqRRkrql5QdIGp91Wyzp8LTuTknvZK3bNS2XpBvSuV6TFNfWrc3//q9XUucyCVdTlVMRmBn89rc+7EqfPl7fs9VW+T1Hps/K88/DtGn5PXZtKiv9PupTWi8zy+mGJ6DDgBnAf4FLgfXr2b4d8DawBbAaMAHoVWObB4HB6XE/4O5ajrM+8DHQMT2/Eziylu2+AzwFCOgDVDX0mvbYYw8LLcT8+WbrrGP2gx8U/lx77GHWu3fhz1OfJUvMBg0yA7PjjzdbvLhw5/rvf80ks4svLtw5Mg4/3Kxnz8KfJxQUUG11fK/mVKciaWfgOuBa4GHgKGAB8Fw9u/UGpprZNDNbCtwP9K+xTa+sY4ysZT3AkcBTZraogTD7A0PTax4NrCupawP7hJbi5pt9AMSmDG/fWAMHemX4u+8W/ly1+fxzOPhg70Ny6aVeNJWPDp512Wwzv3K4667C9llZtuyroe5Dq5VTnQpwPTAG2NnMTjezKjO7DqjvenlTILtt5vS0LNsEYEB6fATQSVLNJj3HADW7fl6Ririul5T5b8vlfEg6WVK1pOo5c+bUE34oG4sX+/SoBx0EFRWFP1+mCKxUk3f9z//4aAFDh8JFFxVnrpHBgz2JPv984c5RXe2tzKI+pVXL5UrlKDM70MzuNbOV5qQ0swF17ZSjIcD+ksYB++NFa19OSZeuNHYChmftcz6wHbAnXjTWqAJ2M7vFzCrMrKJLly7NDD8UxV13+TzyjZkquDl69vTkVYpWYE8+CTfdBGedBccfX7zzHnEEdOpU2Ar7ykpPkP36Fe4coeRySSonSVo380TSepIuz2G/GcBmWc+7pWVfMrOZZjbAzHYDLkzL5mVtMhB41My+yNpnViriWgLcgRez5XS+0AItW+aTcO25p08XXCwDB/ov62JUXmfMnu0Ta+20k0+sVEwdO8LRR/vV2cKFhTnHiBHeNyXf/YtCWcklqRya/UVvZp/gleINGQNsLamnpNXwYqzHszeQ1FlSJobzgdtrHONYahR9ZepJUgu0w4GJadXjwKDUCqwPMN/MWuEcsW3Mgw/6F3tTJ+FqqmK3AjPzSbXmz/cmxGusUZzzZhs82CdwKsQUAAsXeuu1qE9p9XJJKu2y6i2Q1AFosNbQzJYBp+FFV5OBB8xskqTLJB2WNusLTJH0JrARcEXWeXrgVx7/qnHoYZJeB14HOgOZq6Yn8TqeqcCtwCk5vLZQzsx80LvttvMhRYqpRw+f071YRWB//Ss88YQPQbPjjsU5Z0377ANbblmYIrDnn4cvvoj6lDagfQ7bDAOelXRHev5jIKe/OjN7Ev+yz152Udbjh4Bafwqa2bvUUtFuZrUWyKZmbqfmEldoIZ56Cl57De64w3vRF9vAgTBkCLz9tn/ZFsp//uND+H/72/kfz6wxJB+g8je/8Ur7zLz2+VBZ6Vdf+ey4GcpSg/+pZnY1fgWxfbr91syuKXRgIXDlld7c9bjjSnP+I4/0+0K2Alu61Ds3duwId95ZmuSZLdM44O6783vcESN87pRSFOuFosrpL9jMnjKzIek2vOE9QmimF1/025AhTR+Ft7m6d/ee7IUsArv4Ynj1VR9+pmsZdKvq3t1bZ915Z/7mMZ81CyZOjPqUNiKXfip9JI2RtFDSUknLJRVxSNPQJl11lbcSOvHE0sYxcCCMG1eYaXf/9S+vQznpJDj88Pwfv6kGD/bGES++mJ/jZYa6j/qUNiGXK5U/4a2w3gI6ACcBNxUyqNDGvfYa/POfcMYZsOaapY2lUEVg8+Z5UdOWW3rHznLygx/45Gf5qrCvrPQfCLvskp/jhbKWS0U9ZjZVUjszWw7ckTorFqknWihLixf7YIdz53qnuezbWmt9fVnm1qFDw02Dr77aj3HaacV5LfXZbDOfw+SBB3z+knw55RSYOdOnAc7X7JX5suaa3qT6gQfghhuaN+WvmdenHHhg6euLQlHkklQWpX4m4yVdA8wi5mEJF1zgv7A33BA+/dTHq8pFu3b1J50114T77/ce5U2ZIrcQBg70eN5804eJb65hw3ySrd/+1pstl6PBg73V3aOPekOCppo82ZNnFH21GbIGKuMkdQc+xEcaPgtYB/izmU0tfHiFVVFRYdXV1aUOo+WprPTmr6eeCn/6ky9btsw7uH36aW63+rbt0AGqqmCTTUr7OjOmT/crlssvhwsvbN6x3n3Xi4F22snrVMp1oqoVK3yY/S23/Gq4+qa44QYvxnznnfw2UQ4lJWmsmdU6EF+9SUVSO3zk32b8VClfkVSaYO5c2Hlnn9t87FhPAG3BN7/pCW/ChKYfY/lyH2pm/Hg/Ts+eeQuvIC691G/vvedJtSm+/33vh1OIhg6hZOpLKvUWY6U6lO6p+Cu0dWY+N/ycOT6USFtJKOBFYK+95l+QTXX11T4t8E03lX9CARg0yD/zpvZZ+eILH205mhK3KbnUjUwDXpL0G0lnZ26FDiyUoTvugEcegSuugF13LXU0xfWDH3gDg6a2Aquu9j4pRx/t0yG3BD17wv77N73PSlWVF3NGfUqbkktSeRt4Im3bKesW2pKpU+H007345le/KnU0xbfppl4E1pSOkJ995pXdXbvCX/5S3IExm+uEE7zoavToxu87YoS3+Crm6NKh5Bps/WVmlxYjkFDGvvjCf12vuqr3XWirTUMHDvSxud54A3r1yn2/s8/2L+bnniufFm25+sEPvEHGnXd60+rGqKz0eWla2msOzZJLj/qRkp6reStGcKFMXH65F2X89a9Nr7BtDZpSBPbYY3DLLT6bY9++hYqscDp18g6g99+fe7Nx8Bkeq6qiPqUNyuUn5xDgf9LtN8B4IJpMtRX//rcnlUGD/Jd6W9a1qw+KmGsR2KxZPgTLbrt5n5SWavBgTxL/+Efu+4wa5a3doj6lzclllOKxWbeXzOxsfB6U0NotWODFXt27w403ljqa8jBwoBd/TZpU/3YrVvgsjp995p0dSzUoZj707Qubb+5FYLkaMcJ74je2yCy0eLkUf62fdess6WC8A2Ro7U4/3fso3H03rL12qaMpD5kisIauVv70Jxg+HK67DrbfvjixFcoqq/jVSmUlzMhxhu7KSthvP1i9wfn8QiuTS/HXWLy4ayzwMvAroMRDx4aCe/BBr5S/8MKYWCnbxht7M9sHH6y7me3EiXDOOfDd78IvflHc+Apl0CC/+rrnnoa3nT7d+/NEfUqblEvxV08z2yLdb21m3zazPI2JHcrS9Onws5/5uFS/+U2poyk/Awf6mFa1FYEtWeLNh9deG267rWU1H67PVlt5k+pc+qyMGOH3UZ/SJuVS/HWqpHWznq8nKaf53yUdImmKpKmSzqtlfXdJz0p6TdIoSd3S8gMkjc+6LZZ0eFo3LB1zoqTbJa2alveVND9rn4tqni/kYMUKL+pYutTrAlZdtdQRlZ8BA7xIqLYisAsu8J73t98OG21U/NgK6YQT/ApkzJj6txsxwgca3XHHooQVyoyZ1XsDxteybFwO+7XDO05ugQ9GOQHoVWObB4HB6XE/4O5ajrM+8DHQMT3/DqB0uw/4RVreF3iiobiyb3vssYeFGq691gzM/va3UkdS3vr1M9t2W7MVK75aVlnp790vflG6uApp/nyzDh3qf30rVphttJHZcccVL65QdEC11fG9mkudSjvpq2v4NMhkLk1ZegNTzWyamS0F7gf619imF5Dp8zKylvUARwJPmdkiADN7MuuFvQJ0yyGWkIvx4/2X9hFHwE9+UupoytvAgTBlCrz+uj+fO9ev8LbbDn7/+9LGVihrr+1Xaffd5/Pp1GbiRPjww6hPacNySSpPA3+XdKCkA/Grg6dz2G9T4P2s59PTsmwTgAHp8RFAJ0kb1NjmmHTOlaRir+NrxLK3pAmSnpK0Q21BSTpZUrWk6jlz5uTwMtqIzz+H446Dzp3h1ltbT11AoRxxxFdFYGZeBzVnjhcZNmdSq3J3wgk+a+X//V/t6zPD5EdSabNySSrn4lcTv0i3Z4Fz8nT+IcD+aSbJ/YEZwPLMSkldgZ2A4bXs+2fgeTN7IT1/FehuZrsANwKP1XZCM7vFzCrMrKJLly55ehmtwDnneOXzXXf51K+hfhtu6GNaPfCAV14//LB3cNx991JHVlgHHADdutXdZ2XECNh227Y98kIbl0tS6QDcamZHmtmRwN+AXBqfzwCy/7K6pWVfMrOZZjbAzHYDLkzL5mVtMhB41My+yN5P0sVAF+DsrGMtMLOF6fGTwKqSOucQZ3jqKe9XceaZ0WKnMQYO9DG9fv5zb2Y8ZEipIyq8du28efHw4T5iQLYlS3zisfgbatNySSrP4oklowMwIof9xgBbS+qZ5mM5Bng8e4PUmTITw/nA7TWOcSw1ir4knQQcDBxrZiuylm+cqfuR1Bt/bXNziLN83HOPz6j49NNNG2q8KWbP9p7fO+0EV15ZnHO2Fkcc4V+yHTrA0KHlO4tjvg0a5EOwDBu28vLRo2HRoij6auNySSprZK4AANLjBguNzWwZcBpedDUZeMDMJkm6TNJhabO+wBRJbwIbAVdk9pfUA7/S+VeNQ9+ctn25RtPhI4GJkiYANwDHpMr8luO227xM+tBDvcNhZWVhk4uZj001b55/QayxRuHO1Rp16eLD1zz8sA9j0lZsu60Pv1Kzz0plpSfWljhwZsifupqFZW7AS8DuWc/3AF5uaL+WcCurJsXLlpmttZbZT39qdvPNZt26efPUffYxGzFi5aar+XLzzX6O66/P/7FD6/bXv/rfTnX1V8v22sts771LF1MoGprZpPhM4EFJL0h6Efg7fgUS8umNN3yWvH339ZZEU6f6tLPvvuvFCX37+siv+TJlCpx1lpd/n356/o4b2oaBA31cr0yF/SefeKfIqE9p83IZpmUMsB3e8uvnwPZmNrbQgbU5VVV+v9defr/66nDKKZ5cbrzRK4QPOAD69fN5zptj6VIfSqRjR/9SaKuTboWmW3ddr1O6916voB81ykdjiPqUNi/Xb5Nt8Y6KuwPHShpUuJDaqKoqnyFv661XXr7GGnDaafD22/DHP3qz3/3283/el15q2rkuuQTGjvX+KJts0tzIQ1t1wgnw8cfwz396fcqaa371oyi0WbmM/XUx3u/jRuAA4BrgsHp3Co1XVeUDONbV6bBDBzjjDE8u113nPbm/+U04+ODGzR/+/PNw1VVw4on+SzOEpjroIP9Rcued3j+lb9+WPW9MyItcrlSOBA4EPjCzHwO7EPOp5NfChT7ibS6/8jp29DnPp02Da66BV1/1ljiHHgqvvFL/vvPnw/HHw5Zb+lVPCM3Rrp3/Pf3zn148G0VfgdySyufm/UGWSVobmM3KnRpDc1VXe3l0nz6577Pmmj7v+Tvv+JXHmDGelL73PS/aqs2pp/okS/fcA2utlZ/YQ9s2eLD/7UJU0gcgt6RSnYa+vxWfqOtVfLKukC+ZSvrevRu/71prwbnnenK54gqfU76iAvr3h3Hjvtruvvu8L8rFF0e5d8if7bf3v6dNNoFevUodTSgDskZ0rksdEtc2s9cKFlERVVRUWHV1danD8JFfX3/dixCaa8ECuOEGr3eZNw8OPxx++lMfLHKHHXwYjfbtm3+eEDLeftuLVlv7uGfhS5LGmllFresak1Ram7JIKmaw6abeVDiXqVpzNX++15tcf70/7tQJJkyAnj3zd44QQptUX1KJDgqlNn26D8yX7yKpddbxoq533vExvR56KBJKCKHgohyk1Gp2esy39daD8742k3MIIRREnUlF0vr17WhmH+c/nDaoqsrb9u+yS6kjCSGEZqvvSmUsYPhc8DUZPvd8aK6qKthtNx+WJYQQWrg6k4qZRQF8oS1b5n1KTjqp1JGEEEJe5DJMiyT9SNJv0vPN0yRYobkmTvRJjaLfSAihlcil9defgb2B49LzT4GbChZRW1LoSvoQQiiyXFp/7WVmu0saB2Bmn6TpgUNzVVVB586wRVRPhRBah1yuVL6Q1A6vnEdSF2BF/buEnFRV+VVKXSMThxBCC5NLUrkBeBTYUNIVwIvA73I5uKRDJE2RNFXS1zpLSOou6VlJr0kaJalbWn5Amn8+c1ss6fC0rqekqnTMv2eumiStnp5PTet75PQOlMqCBT43ShR9hRBakVxmfhwGnANcCcwCDjezBxvaL13d3AQcik/wdaykmiPO/R4YamY7A5elc2BmI81sVzPbFegHLAKeSftcDVxvZlsBnwAnpuUnAp+k5den7crXmDE+REsklRBCK1JnUpG0fuaGD3d/H3Av8GFDHSOT3sBUM5tmZkuB+4H+NbbpBTyXHo+sZT34fC5PmdkiScKTzENp3V3A4elx//SctP7AtH15as7IxCGEUKbqu1IZC1Sn+znAm8Bb6XEuc9RvCryf9Xx6WpZtAjAgPT4C6CRpgxrbHIMnNIANgHlmtqyWY355vrR+ftq+PI0eDdtu63N9hxBCK1FnUjGznma2BTAC+L6ZdTazDYDv8VVRVHMNAfZPLcv2B2YAyzMrJXUFdgKG5+l8SDpZUrWk6jlz5uTrsI1j9lUlfQghtCK5VNT3MbMnM0/M7CngGznsN4OVZ4jslpZ9ycxmmtkAM9sNuDAtm5e1yUDgUTP7Ij2fC6wrKdMUOvuYX54vrV8nbb8SM7vFzCrMrKJLly45vIwCeO89mD07kkoIodXJJanMlPRrST3S7UJgZg77jQG2Tq21VsOLsR7P3kBSZ0mZGM4Hbq9xjGP5qugL88lfRuL1LACDgX+kx4+n56T1z1m5ThYTnR5DCK1ULknlWKAL3qz4UWDDtKxeqV7jNLzoajLwgJlNknSZpMPSZn2BKZLeBDYCrsjsn5oEbwb8q8ahzwXOljQVrzO5LS2/DdggLT8bKN/x3quqYI01YOedSx1JCCHkVc4zP0rqhF8sLCxsSMVTspkf99nH7196qfjnDiGEZmrWzI+SdkoV6ROBSZLGStox30G2GV98Aa++Cn36lDqSEELIu1yKv/4KnG1m3c2sO/Ar4JbChtWKvfYaLF4c9SkhhFYpl6SyppmNzDwxs1HAmgWLqLWLSvoQQiuWyyjF09JcKnen5z8CphUupFauqgo22gg237zUkYQQQt7lcqXyE7z11yPp1iUtC00RIxOHEFqxBq9UzOwT4PQixNL6ffIJTJkCgwaVOpIQQiiIOpOKpMfrWgdgZofVtz7U4pVX/D7qU0IIrVR9Vyp74wM03gdUAVFe01xVVV7steeepY4khBAKor6ksjHwLbz3/HHAP4H7zGxSMQJrlaqqYPvtYe21Sx1JCCEURH2jFC83s6fNbDDQB5gKjJJ0WtGia01iZOIQQhtQb0W9pNWB7+JXKz34amrh0FjTpsHcuZFUQgitWn0V9UOBHYEngUvNbGLRomqNMp0eY3iWEEIrVt+Vyo+Az4AzgNOzZuYVPrBkVAw0RlUVdOwIO+xQ6khCCKFg6kwqZpZLx8iQq6oqqKiA9rkMYhBCCC1TJI5iWLIExo2L+pQQQqsXSaUYJkyApUsjqYQQWr1IKsUQIxOHENqISCrFMHo0bLIJdOtW6khCCKGgIqkUQ3R6DCG0EQVNKpIOkTRF0lRJ59WyvrukZyW9JmmUpG5Z6zaX9IykyZLekNQjLX9B0vh0mynpsbS8r6T5WesuKuRry9lHH8Hbb0dSCSG0CQVr3yqpHXATPn7YdGCMpMfN7I2szX4PDDWzuyT1A64Ejk/rhgJXmFmlpLWAFQBmtm/WOR4G/pF1vBfM7HuFek1NEiMThxDakEJeqfQGpprZNDNbCtwP9K+xTS/gufR4ZGa9pF5AezOrBDCzhWa2KHtHSWsD/YDHCvYK8qGqClZZxfuohBBCK1fIpLIpPnR+xvS0LNsEYEB6fATQSdIGwDbAPEmPSBon6dp05ZPtcOBZM1uQtWxvSRMkPSWp1q7rkk6WVC2pes6cOU18aY1QVQU77ghrrVX4c4UQQomVuqJ+CLC/pHHA/sAMYDleLLdvWr8nsAVwQo19j8Xnesl4FehuZrsAN1LHFYyZ3WJmFWZW0aVLl/y9ktpP5sVfUfQVQmgjCplUZgCbZT3vlpZ9ycxmmtkAM9sNuDAtm4df1YxPRWfL8ASxe2Y/SZ3x4rV/Zh1rgZktTI+fBFZN25XOW2/5FMKRVEIIbUQhk8oYYGtJPSWtBhwDrDRFsaTOkjIxnA/cnrXvupIylxL9gOwK/iOBJ8xscdaxNlYa9VJSb/y1zc3za2qc6PQYQmhjCpZU0hXGacBwYDLwgJlNknSZpMz89n2BKZLeBDYCrkj7LseLvp6V9Do+MvKtWYc/hpWLvsATzURJE/B5X44xMyvIi8tVVZXXpWy/fUnDCCGEYlGpv3dLqaKiwqqrqwt3gj33hE6d4LnnGt42hBBaCEljzazWJq2lrqhvvT7/HMaPj6KvEEKbEkmlUMaNg2XLIqmEENqUSCqFEpX0IYQ2KJJKoVRVwWabQdeupY4khBCKJpJKocTIxCGENiiSSiHMng3vvgt9+pQ6khBCKKpIKoUQ9SkhhDYqkkohVFVBu3aw++4NbxtCCK1IJJVCqKqCnXeGjh1LHUkIIRRVJJV8W7EiRiYOIbRZkVTybcoUWLAgkkoIoU2KpJJvo0f7fSSVEEIbFEkl36qqYJ11YNttSx1JCCEUXSSVfKuq8tGJV4m3NoTQ9sQ3Xz4tWgSvvx5FXyGENiuSSj6NHQvLl0dSCSG0WZFU8il60ocQ2rhIKvlUVQU9e8KGG5Y6khBCKImCJhVJh0iaImmqpPNqWd9d0rOSXpM0SlK3rHWbS3pG0mRJb0jqkZbfKekdSePTbde0XJJuSOd6TVLxx0iJkYlDCG1cwZKKpHbATcChQC/gWEm9amz2e2Come0MXAZcmbVuKHCtmW0P9AZmZ637HzPbNd3Gp2WHAlun28nAX/L8kuo3axa8/34klRBCm1bIK5XewFQzm2ZmS4H7gf41tukFPJcej8ysT8mnvZlVApjZQjNb1MD5+uMJysxsNLCupOLNkBX1KSGEUNCksinwftbz6WlZtgnAgPT4CKCTpA2AbYB5kh6RNE7StenKJ+OKVMR1vaTVG3E+JJ0sqVpS9Zw5c5r+6mqqqoJVV4XddsvfMUMIoYUpdUX9EGB/SeOA/YEZwHKgPbBvWr8nsAVwQtrnfGC7tHx94NzGnNDMbjGzCjOr6NKlSz5eg6uqgl12gTXWyN8xQwihhSlkUpkBbJb1vFta9iUzm2lmA8xsN+DCtGwefpUxPhWdLQMeA3ZP62elIq4lwB14MVtO5yuY5cthzJgo+gohtHmFTCpjgK0l9ZS0GnAM8Hj2BpI6S8rEcD5we9a+60rKXEr0A95I+3RN9wIOByambR4HBqVWYH2A+WY2qyCvrKY33oCFCyOphBDavPaFOrCZLZN0GjAcaAfcbmaTJF0GVJvZ40Bf4EpJBjwPnJr2XS5pCPBsSh5jgVvToYelZCNgPPDztPxJ4DvAVGAR8ONCvbaviUr6EEIAQGZW6hhKpqKiwqqrq5t/oJ/+FB5+GObOBan5xwshhDImaayZVdS2rtQV9a1DptNjJJQQQhsXSaW5Fi6ESZOi6CuEEIik0nzV1T4vfSSVEEKIpNJsmUr63r3r3y6EENqASCrNVVUFW20FG2xQ6khCCKHkIqk0V4xMHEIIX4qk0hzTp8PMmZFUQgghiaTSHKNH+30klRBCACKpNE9VFay2mg8kGUIIIZJKs1RV+VD3q6/e8LYhhNAGRFJpqmXLYOzYKPoKIYQskVSaauJEWLQI+vQpdSQhhFA2Iqk0VYxMHEIIXxNJpamqqqBzZ+jZs9SRhBBC2Yik0lQxMnEIIXxNJJWmWLAAJk+Ooq8QQqghkkpTjBkDZpFUQgihhkgqTbH66vDd78Kee5Y6khBCKCsFm6O+VfvmN+GJJ0odRQghlJ2CXqlIOkTSFElTJZ1Xy/rukp6V9JqkUZK6Za3bXNIzkiZLekNSj7R8WDrmREm3S1o1Le8rab6k8el2USFfWwghhK8rWFKR1A64CTgU6AUcK6lXjc1+Dww1s52By4Ars9YNBa41s+2B3sDstHwYsB2wE9ABOClrnxfMbNd0uyzfrymEEEL9Cnml0huYambTzGwpcD/Qv8Y2vYDn0uORmfUp+bQ3s0oAM1toZovS4yctAV4BuhFCCKEsFDKpbAq8n/V8elqWbQIwID0+AugkaQNgG2CepEckjZN0bbry+VIq9joeeDpr8d6SJkh6StIOtQUl6WRJ1ZKq58yZ0/RXF0II4WtK3fprCLC/pHHA/sAMYDnegGDftH5PYAvghBr7/hl43sxeSM9fBbqb2S7AjcBjtZ3QzG4xswozq+jSpUt+X00IIbRxhUwqM4DNsp53S8u+ZGYzzWyAme0GXJiWzcOvasanorNleILYPbOfpIuBLsDZWcdaYGYL0+MngVUldS7A6wohhFCHQiaVMcDWknpKWg04Bng8ewNJnSVlYjgfuD1r33UlZS4l+gFvpH1OAg4GjjWzFVnH2ljyMVMk9cZf29yCvLIQQgi1KlhSSVcYpwHDgcnAA2Y2SdJlkg5Lm/UFpkh6E9gIuCLtuxwv+npW0uuAgFvTPjenbV+u0XT4SGCipAnADcAxqTI/hBBCkagtf+9KmgO8V+o4augMfFTqIBqhJcXbkmKFlhVvS4oVWla85RhrdzOrtVK6TSeVciSp2swqSh1HrlpSvC0pVmhZ8bakWKFlxduSYoXSt/4KIYTQikRSCSGEkDeRVMrPLaUOoJFaUrwtKVZoWfG2pFihZcXbkmKNOpUQQgj5E1cqIYQQ8iaSSgghhLyJpFJEkjaTNDLNDzNJ0hlp+SWSZmTNBfOdrH3OT/PRTJF0cAliflfS6ymu6rRsfUmVkt5K9+ul5ZJ0Q4r3NUm713/0vMa5bdb7N17SAklnltN7m+b/mS1pYtayRr+Xkgan7d+SNLjI8V4r6T8ppkclrZuW95D0edb7fHPWPnukv6Gp6TWpSLE2+rNXA3NAFTDWv2fF+a6k8Wl5Sd/XJjGzuBXpBnQFdk+POwFv4sP/XwIMqWX7XvhIzqsDPYG3gXZFjvldoHONZdcA56XH5wFXp8ffAZ7CR0DoA1SV6H1uB3wAdC+n9xbYDx/DbmJT30tgfWBaul8vPV6viPF+G5+WAuDqrHh7ZG9X4zivpNeg9JoOLVKsjfrs0+1tfADb1dI2vYoRa4311wEXlcP72pRbXKkUkZnNMrNX0+NP8eFrak4HkK0/cL+ZLTGzd4Cp+Dw1pdYfuCs9vgs4PGv5UHOj8fHbupYgvgOBt82svtESiv7emtnzwMe1xNGY9/JgoNLMPjazT4BK4JBixWtmz5gPwQQwmgbmM0oxr21mo82/CYfy1WssaKz1qOuzz2UOqILGmq42BgL31XeMYr2vTRFJpUTk0yPvBlSlRaelIoXbM0Ug5DYnTaEZ8IyksZJOTss2MrNZ6fEH+FhsUB7xgg9emv1PWa7vLTT+vSyXuAF+gv9Czugpn//oX5L2Tcs2xWPMKHa8jfnsy+G93Rf40MzeylpWju9rnSKplICktYCHgTPNbAHwF2BLYFdgFn75Wy6+aWa749NCnyppv+yV6VdS2bRLl4+IfRjwYFpUzu/tSsrtvayPpAuBZfj03uDv7ebm01icDdwrae1SxZe0mM8+y7Gs/IOoHN/XekVSKTL5jJUPA8PM7BEAM/vQzJabD+V/K18VwzQ4J02hmdmMdD8beDTF9mGmWCvdz06blzxePPm9amYfQnm/t0lj38uSxy3pBOB7wA9TIiQVJc1Nj8fidRPbpNiyi8iKFm8TPvuSvreS2uMz4f49s6wc39eGRFIpolReehsw2cz+kLU8u97hCCDTKuRx4BhJq0vqCWyNV84VK941JXXKPMYraSemuDKtjgYD/8iKd1BqudQHmJ9VtFMsK/3SK9f3Nktj38vhwLclrZeKc76dlhWFpEOAc4DDzGxR1vIuSlN+S9oCfz+npZgXSOqT/v4HZb3GQsfa2M++wTmgCuwg4D9m9mWxVjm+rw0qdUuBtnQDvokXb7wGjE+37wB3A6+n5Y8DXbP2uRD/dTKFIrfuwFvBTEi3ScCFafkGwLPAW8AIYP20XMBNKd7XgYoix7smPjHbOlnLyua9xZPdLOALvAz8xKa8l3hdxtR0+3GR452K1ztk/n5vTtv+IP2NjMen9v5+1nEq8C/0t4E/kUbyKEKsjf7s0//jm2ndhcV6X9PyO4Gf19i2pO9rU24xTEsIIYS8ieKvEEIIeRNJJYQQQt5EUgkhhJA3kVRCCCHkTSSVEEIIeRNJJYRGko9+3LHUcdQnjW47seEtQ8ivSCohNN6ZQFknleZKvbtDaLRIKiHUIY0o8E9JEyRNlHS0pNOBTYCRkkam7b4t6WVJr0p6MI3tlpmL5po058Urkraq5RyXpMEOR0malo7/tSsNSUMkXZIej5J0vaRqSZMl7SnpEfn8KpdnHb69pGFpm4cyV1fyeTj+lQYJHZ41TMwoSX+Uz5tzRkHe1NDqRVIJoW6HADPNbBcz2xF42sxuAGYCB5jZAZI6A78GDjIfeLMaH/gvY76Z7YT3eP5jHefZDh/SvjdwcRofriFLzawCuBkfnuNUYEfgBEkbpG22Bf5sZtsDC4BT0rFvBI40sz2A24Erso67mplVmFlLGHwxlKG4xA2hbq8D10m6GnjCzF6oZZs++KRPL/kQTKwGvJy1/r6s++vrOM8/zWwJsETSbL4a/r4+mTGpXgcmWRpjTdI0fFDEecD7ZvZS2u4e4HTgaTz5VKZ42+FDhmT8nRCaIZJKCHUwszfl0/h+B7hc0rNmdlmNzYRPmnVsXYep43G2JVmPl+P/l8tYuSRhjTr2WVFj/xV89X9d83yW4p1kZnvXEctndSwPISdR/BVCHSRtAiwys3uAa/EpYAE+xaeDBp/9cJ9MfUmqh9km6zBHZ91nX8E05ENgQ0kbSFodH2q+sTaXlEkexwEv4gModsksl7SqpB2acOwQahVXKiHUbSfgWkkr8BFlf5GW3wI8LWlmqlc5AbgvffmD17G8mR6vJ+k1/GqirquZrzGzLyRdhg/JPgP4TxPin4JPrHY78AbwFzNbKulI4AZJ6+DfAX/ER8INodlilOIQCkTSu/iQ9R+VOpYQiiWKv0IIIeRNXKmEEELIm7hSCSGEkDeRVEIIIeRNJJUQQgh5E0klhBBC3kRSCSGEkDf/DzhzGcc1LpbQAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "def eval_show(steps_eval):\n", - " plt.xlabel(\"step number\")\n", - " plt.ylabel(\"Model accuracy\")\n", - " plt.title(\"Model accuracy variation chart\")\n", - " plt.plot(steps_eval[\"step\"], steps_eval[\"acc\"], \"red\")\n", - " plt.show()\n", - "\n", - "eval_show(steps_eval)" - ] - }, - { - "cell_type": "markdown", - "id": "exposed-martin", - "metadata": {}, - "source": [ - "In the figure, it can be seen that the change of the model accuracy can be divided in three stages:\n", - "\n", - "Stage 1: When the training starts, the model accuracy rises slowly.\n", - "\n", - "Stage 2: At a certain point, the model accuracy rises sharply.\n", - "\n", - "Stage 3: The model accuracy nearly comes to 1 without reaching.\n", - "\n", - "During the training process, as the training data increases, it will have a positive correlation with the model accuracy, but as the accuracy reaches a certain level, the training gains will decrease.\n", - "\n", - "## Inference and Prediction\n", - "\n", - "Apply the trained model to predict a single image or a set of images. The procedure is as follows:\n", - "\n", - "1. Transform the test data to the data that fits LeNet.\n", - "2. Extract the `image` data.\n", - "3. Call the function, `model.predict`, to predict the corresponding digit in each `image`. To be noted, `predict` will returns the probability of predicting 0 to 9 for each `image`.\n", - "4. Call the `plot_pie` function to display the probability of predictions. Negative probabilities will be removed and not be displayed.\n", - "\n", - "Load the dataset to be predicted and call the `create_dataset` function to transform the test dataset into required formats. Select a set of 32 images for inference and prediction." - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "adolescent-diagram", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Row 1, column 7 is incorrectly identified as 5, the correct value should be 3 \n", - "\n", - "[2 0 1 3 0 0 5 8 7 2 6 2 7 3 1 2 9 5 4 6 0 3 0 8 3 9 5 1 9 6 4 2] <--Predicted figures\n", - "[2 0 1 3 0 0 3 8 7 2 6 2 7 3 1 2 9 5 4 6 0 3 0 8 3 9 5 1 9 6 4 2] <--The right number\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADsCAYAAADXaXXTAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAB72ElEQVR4nO29eXwV5dn//75nzp6TfYFAVgIJi4qssihQNyxVC9allrauda2/+q1Vq31qN1tbW/v0aat2UVFbS13qiiBoVRYVkB3ZISQQSIDsZDnbzP3745wkJ8lJyHI2dN6v13nBmZkz88k991xzL9d13UJKiYGBgYFBdFBiLcDAwMDgi4RhdA0MDAyiiGF0DQwMDKKIYXQNDAwMoohhdA0MDAyiiGF0DQwMDKKIYXQNDAwMokjMja4QFAvBG0JwQghqhWC5EJTEWldPCMHZQrBRCFoC/54da009IQR/E4I9QqALwfWx1tMTQpAhBB8JQY0Q1AvBJ0IwM9a6QnGa3f/TRitCSIRoRoimwOepWEvqCSE4Xwg2CUGjEJQKwS39+X3Yja4QmPr5kxTgTaAEGAKsB94Is6yQ9FerEFjwa/snkAo8B7wR2B5RBlCuAFuBO4BNYZbTKwPQ2gTcCGTiL9ffAG8N8G/uM5/n+x9LrQgx0Ps2Himdgc/NYdXUAwMoVzPwGvBXIBm4Bvi9EIzv80mklH36gCwD+QDInSDrQC4CaQM5B2QFyPtBVoH8B0gF5A9BHgBZA/IlkGl9vE4aSAkyva/aoqUV5MUgj4AUQdsOgbwk3rR2ucYakNcPVGOUtSogLwvUgax40nk63f9IaJVQJuEBCTsl1ElYJMEmYY6ECgn3S6iS8A8JioQfSjggoUbCSxJ6vv/+Gz5ysHU0CuU6JFA3HUHbPgV5bV+19beluxCYCxQBxcD/BLYPBdKAfOAW4C5gPjAbGAbUAY+3nUQItgnBN3q4xiygSkpq+qktGlrHAdv8daSdbYHt8aY1UkRMqxBsA1z4ez5PScnxONN5Ot3/uNaKENsQomtdXYUQVQjxKkIUDFJn2LQGl6uUHAMWAzcIgSoE0wPnWdNnVf18c9wW9H1e4M0wB6QHpC1o3y6QFwR9zwbpBWk6xTVyAm/nPr81oqkV5I9B/rvLthdA/jTetHa5RjhbupHWagN5Lcjr4k3n6XT/I6E10NK9Lej7vEBLdo4EjwRb0L5dEi4I+p4twSsh9P2HWRIsElIk/FnCZz0eG+O6ir8ndgykL/D5Tn+09Xfs5XDQ/8vxvxUATkiJK2hfPvCaEOhB2zT8Y7ZHQp1YCDKBFcATUrK4n7qipbUJSOqyLQk4GYdaI0VEtQbOsVgIdgnBFinZGkc6T6f7H3WtSNlNK0L07f5LuSrwPw9CfA9oBMYA2yOhdaDlKgSjgX8DVwDvAqOAJUJwVEre7ouo/g4v5Ab9Pw84Gvi/7HLcYeDLUpIS9LFJ2aPBTcVvcN+Ukl/2U1M0te4AzhICEbTtrMD2eNMaKaKl1QyMiDOdp9P9jwutSJkS9LEhZV/vv4RO2iOutY/legawV0qWS4kuJXuAt4Ev91VUf43unUKQIwRpwI+AF3s47i/AL4UgH/ytWCH4aqgDhSAJWA58JCU/7KeeqGoFPsT/Bvz/hMAqBN8NbH8/DrUiBBYhsOGvvGYhsAkxaI+VSNSBaUJwbkCvXQjux9/KWBdPOjm97n/EtCJEDkL0SStC5OMXm4kQobUKMQ4hzkYIFSGcwGP4W5i7Bqs1AuW6GRgVcBsTQlAEXIp/vLxv9HOMpG02sB7kcyAdgTGSii7HKiC/D3IPyJOBsZRfBe3fAXJh4P/XgZQgm0E2BX3yBjmeE3atge8TQG4E2QpyE8gJAx4ji7zWDwNlG/yZE29aQc4GuTVwXC3IlSBnxZvO0/D+h1Wr7Oy9UC/hOQkO2ea90PlYRcL3JeyRcDIw9vuroP07JCwM/P/8wHHNEo5LeF3CqDgu16tBfhY4tgLkb0AqfdUm/Cc5NUJQBtwsJe/12aLHCENrZDhdtJ4uOuH00ooQZcDNSBn3WuO5XGMekWZgYGDwRcIwugYGBgZRpM/DCwYGBgYGg8do6RoYGBhEEcPoGhgYGESRXiPSLlKuiquxh3f1l3t0lja0DpyetJ4uOsHQOhg+D1pPF51gtHQNDAwMoophdA0MDAyiiGF0DQwMDKLIoDLzC5MJz5zxSFN32y10ibnRA2v7HpIcryg2G0r2EFpKstq32T89gFZb54+sjSFy+ni8SWYQHUNIsSx7NTUVmTcUV7bzlMd+nupILFHHleAe4kS39NyGcuw5jna0Cul2R0VTqGcmFjrikUEZXcWZQPl1OjaHq9s+n0/Fd8RBSU1QoqgTNWiNTaBrg7lsVFAzMxFOB9KkoqUmcPxsJ665je37M5UiHGUNUFOPXt8Q/UokBKbCfHZfbcc0rAWTqaNMvV4VWZbAiLXRlQQg84ZScXEq+rSGUx4bS53gNwwiOQmSenhBNDZ1ureKzYYybCi+g+Uxf9m23X+pCA5dlk7LGBd2Z0uPh9vfGUbWGgt66aGo1FWRnETd1Gwaru6cSTLlxWySV7aiHRtMfvr+odhsiNxhpz4wmAjaqoEbXSEQNhtvnvcE4yz2brtbdA+vNWfzcOM17duy16Ri31WJXlOL9PmQPt+ALx8plIQEABpnj6C+SEWzgydJZ/i4Kjae+Vr7cYWNN2E5kk7GtjRSPq30P4jR1Gm3U3bNMH45bzGXJxzDoXQsfVWtNXN/wVwqoqrIjyvbiT6tgR3TXzjlsbHSKaxWhKoiCnKoOzuN+uLQLcSUPTqpm6rRSw8hzCYYkUfZZWnkP9WMdLmQra1Rr8PCZELY7QiH//7rVphxyTYezH6HInPPvYtzE67guBzCEJcnOnU1yUndGIVdXerBmK13kLLBEfnrB1AcDhiZx8Er0vr1u3ZbVd+98aA3Nw9K06AX/tMQeKWGEkh9qQp/BXYoFhYm1rDwlifajy3Mv4n0NfmkHBiKtawaX9mhwV4+rAirldbZY9HNAnnzCf5ZspizrdaQxx6c+zQAo9d8C0tDJpZoGl0hUJISeenWxwIvvM5rDeqAPuhUpOFDk/7c0G11I9aIMUW4hiZwfIKZoktK+WTUOyGPm7dnHpWvFJD9vkCaVcq/msbbNz/K1yvuxXpSw7mjGnn4KLqre08vEigOByInm6ax6bgTVZbc9iiF7Ya29+GcNWe9yujGb5F4OEp1VUq6Za0FRBQ7CIrNBiPzKJufxq4gO9QX2myVo7pzS1fxSmz/3Yb0eAbc2xmU0ZVSstOdDVSiBkrYITTSVJVkpXvr9+Dcp2Eu3F05mQ/+MZWhf4gfoyusVkRJIU8+8X+MsbS9iUMb3HhGkzo1muBoczKDXyQgPHr2el3oCIapkiTFFlPjq6ansfc+C89N+yszbb3rWFqylLtvnMzbuVMwtQp23foE4GTdb54EoPj52yn6lwWxc3/EW7zCZILiAvZ9PZl9334ysPXU4+ZfZERONgcXpAXuW/9os1XBtNXl719+I+qJevSGRvTW1n4b34EbXSnRjp/g+VlTEEGTOHWzC3EtrGPT5J7yBccfSkICrbPH8uQT/0ex2RZrOYNitcvETZ/cScn3yon1yHl7Jb30RpSaevY8NpTnpj3DzFgW8Ss2lo54nCJT90ZBf1mz8HdMH/5dChaNx/T+xjCI6xnPnPEcukHn41m/AxIiei2D0KhCodhs4/dvPoOO4Jq/3EP+vyv63WMf3PCClP4B8SCjm/JfH/qBbGZn34JUwJOosPiXvwvqBsH3Mj7Es9DEh84Z5D788aAkhAvdLIJauJ3Z6PZw565vYPlrWsi/J5ooNhu+c8ZQ9NhnFJrUbvs1BLpbRaupjYE6v1dH7olsZg/z33+fXWHYk/tJtbTy/Yx3OcPiBgZv8AbKcEcDaUrooY6D3iauffAHWJp02lb2sjR6yRdujszu/qbIUhNYNHMRdyVei2PINJIWR2ZGsGHhNDzX1PHcGYvJUns2uH+oK+CPH8wleyXUjVbZeXv/W3jhQE4fz56r7fxyXuelDsc+cQcFr9WgH62Kia6utJVX7vLOLdV5v/qA21K3heytq0JhjMWBJnUeuOFFfpJ5FUUvp/TLA2fQY7pAp+a1VlOLONmEY5f/1E6Hg8ty7uOBG15kQUIlDsVCnsnBXZnvo3xV8r6YQe4vYmN41XElVH4pncZzWhmaWd1t/z2VE3l10ySce8yklGokvL+TxMx0XDKGY5OqiifRzM+GrMShdH9JaLHUBmi1dYimZhLKEvCV5HLkHh//yHudZEWQqFiwCn9F3utt5qGKy9i/qIR0Pom4LjUpiV2/Hs19GU+TGDTpuNfbzCOVl7B26ZmoHshbsQ/ZEuQFoGmIwlwQoZvn51i9TM0+xKcZ47utAhkOam6ejumKE/yq+C0mWzV6cq2/p3Iib604h6K3WzHvOwpiMMvLDQ5fohk1u4XLE44RPN+QPaeCvWPTSPxoItkfVKPt3BtZIdW1ZH+URmHBTe1zMMFMsx/gjZIq6ncNZ+jz21GcCex8OIfpCfuwid5NoyoUFiRU8tNsF16nGXM/ZIXH6AYjJdLt7nBLaWkh/0UHD2VdRcVF/+WGlC1kqQmMNFu5OWMVW2YOD7uEviAmjKP8kmRyLyrnf/OXkaK00nUMd1XlSIasVElfU4FsakZrakJJcHSapBJRnK9Sk5LwTiji0DxIVbsb3PVuL78tv4yUjZYQv44OppzhNI0fRu1YEy3DdBZN+AsFJkenluV6t5efll3F4bcLyFtWRiRHQ4XJhJo7nLJrh/PwBS8y2drUbvjbdSwtoPC1SoSm46uu7jZGp9Y2kLYznbEffxOr2ceHkxa1t4KswkyCyY3s3ukYNA0Lp6EsqObHxUs419aANYThn7dnHofrU9A2ppC3xoVp6wGkrqO69W7HRqOuignjqJxu4cKizzp51AA8VvQy9YV2Hhkyj/KkPPIsJvQtOyOmRWtswr6rkvQ1+d3GZwHOsHj5fwXv8silX+aI9Uw0Gzx87ouMt7S215GuNOitzNl4A26v33TaNzqwHq+le2n3TPiNblekxHewnJEvJfPSqAlceMYOslQwC5Uck4uSlONRdxlSx5VQ/uVk8i8q4xcFrzPJaiHUpFlTq5WhJ3z+MRshUNPTqJtdiEPEZrRUJCdRfYadH8x5q9u+9W4vPy5dQOW7ueR9WB318dw2B/0TJRbqz/Fw1fj15FlrmGWDrq2zw9509lVmkb3fh+/I0ZDnCxfCZMKdn86Cq1dzpbOqk8H9cekCjrybR97b1Wj7D/Z4Dr2+gZRPK7E0ZuFJUqk9WyM5gh2KtqAjzzV1/KrkTc61NeBUOhtct/Ty0PEpVL5cQNohH45DtYhDlWgnT7a7PcaCujOSSJx6gpsyVtHVq8bvCaTjGPEaD100n0Pkk+8pjlyLV9fQa2pJOTCUuysnt2++InUDEy0unIqN8+21pJT8hzeyJmJXvZ3qSDCa1CnztfCjistxPptMSqsOUmKr9Jd7f4i80W27UEUNTS3peCPRJOgLXZzJi+aW8pO8NwMG148mdVa7TGgINKngqrehuv3mS1gs6AXZuBbWkabG5m+QNguuTLgz5XC3fX8/PofDH+SRv6wu8t22LphyhlN+aTotY12MzivnnmHrWJhY0+PxueYaRmUf51BJAXkjCxG6jEzAgaKipKdRP9LKw1nbIagT2J/ykm43voPlWMoO4RiSRUsE67DicEBxAYdu0HnujMVMtmrtLVyv1DimtbLTk0qVL4MlL82gYGkFWsVR9DjweVczM2kcIfh6zo5Oz1VXplrN/LzwdX584QIqG/PIimB9lT4f1rJqPvjH1PZt6+bm88DIZZxvr8Wp2Jhlg1nZmwJ7Qw8UtEoP/20pZu8/SxiybHO7m2B/WrhtRM3oVs3LY0ruLjLVVqLu6qKoqJnpPTqTu6WXBt1DjSa46ZM70d3+hyp1kwnL8QY0/A9Dwwgnmyb/hbaJoCbdhc+rIrQoOB8KAWYTujn0tVaXjyD7Uw/61sGuWt1/qublcfZXd/Kz4Ut6ddBvY6rVzB9GvMwjCy5hre1MFA/k/d9x9JaeI6oGgprkpHVMNrXndY/AimV59YQwmRA52ez7ejIfz/pdYNKso0l9TGvl6bqpPPvpDIRLpeTP2/Gd7LtbYKSD6JpmFqKNaWK8o/tsfrXmDyjwj+ubmWo1c0/+cm6deB1Z3Y4OH9Ln76kGu6fWnJzOw1d8BbXkLS60n8QqTj0iW6v7WFE9lswnPxmQoQ0m8kY3MJA0+eYt/DL7PTLUjocyKk78AYNbc3FRSGfyOq2Fde5UXqyeytHmZL+rVdDMf5vBlblDqB3duU/5alMO8qgNc+PgIlT6gpqWSkthMt6cnkM4o+l4HszEm7bxyLAVne5tW0BEV9rGdovNCSzKW412y0p2e93c9+yCAfk89kpGGpUzrZReFHoWP1bl1RPCbqdpbHrAD7f7EMFOTyrPfjqD4ps3AL20shSFWMTGHL5c56kpL3CBvWNwS5M6rdLDT6vOp1Uzc03GOs6xNYb0DIgW6U9/QoNrGv9z9XxSzvzXKV0YG/RW1rqGs+lAPqPoPuHeXyJqdIXZgpqeCoBdrUENqgnRcuJX7DaaphUEHNo7t8Lc0stbzXn89L2vMeq764CT3cZC25zS9389mX3f7nh4D3qbePTZOygZgJ/eQGidUsSJG1soDRFeW+lrwus2IXyxtyJu6eWE5qZBD90FT1Y0MlVrp9aFgkTPTEHU1kUlL0A8lVc7QiAcdtyJocvNLb1U+TIQrlMMbZziPBHFpXDCl4RbHseESqPuotynstuTTemlyWjHjnProm+zaNYi5tgH214cHMkvrMVTM4VbvvOtU4asb3Yn8MD6Kxh9z8GwzJVEzOi2BRw8/sQfASg0qZ1cnOLBif+h41NY8tIMSv68vcdWQ09O6dc++APyVuzzz3bHmNkv3Evx4jrkzv2hIi+jykPHp7Dk5RkUPHMg5P6DNxdxxZWrA2OsnR3O77jte9hW7Qj7MENX4qm82jAV5lN2zTCW3PYooYbf+lJX+3KeSDL6/l384q5r+ezq1VyYuIOb195JyT1HkYFAqtOV82w+lp33Z+566Rq4YPD+7xExuqac4VRemsc1d77HaLPfK6CrM/oEazN/m/48Ly2bii47jNnmp85iyNLyiM9qz9szj8qXCyhYWtHruJg0KVis7m5O6dZG3e/PGetsU4DaKlCaXPhiNJlS9oNirkwai1QElkYvBeUV+HrIIlX4go11qyczpehcamZ5KL346XaH84seXcV/Hj+foUsie/8HUl49BaW01aOctw8P2PVNTh/P7qvt/HzeiyGDbvpcV09xnkijnzxJweIjrPtoMh/ZzmFUVbO/HsTBM9KVtoCTv52xmLZx87bgmK898C63pHzWPgSiCoVCk40/FL3E/3v/KpSrWgcVfDQoo9vmcI5N7zRAZnF4mZK7i9tSt6H24O+WrNg51+ZifPZ7nbb/f9db2J4yhty3nYOehVczM2mcPQJ5c/e3bHltKpnlXrSK7g/3oYdm4MrxglknM6ueOwtjlHvwNMH06S5MbR4dmobm8/X4oGmHj2CqPEZGXQGNI1I77bszbQv/zLgALP1xNe8FIcI3ttlDUEp7PTrSP7ehYNqCCRYkVNLVzWrmtitwvTqE7A+PhayrbXjmTqZsvsId573b43mcy5049hyJqF902/01KQrS23M9iDWtGQrThh5qDzjRpE6LVEn/pIrFf76YRelzsZ1Tw0Nj3mZ+QhNmoTLCbOaH+ct41HThoK7db6OrJiWhjS2gcobT70x8wYsMNdV3OsYmvAxVW0hWen/bWoUZq9r5AftFzltcOKYIz6dO1EH4TZtyc6iZlUPd5S28WLKYYD/ceyonIjclYz9c28nVRrHZqLppIhdctpGLU7aToLhJUVrJNXlxSwsb3Crf2fRtAPIqmqOW1k9MGEfVOWa+lBe6y26bVEtN6VDSvT58hyPv9SxMJtScYVQsyGnfNvSjkyi7yk6Z9q4tpafa6kHxdbaIyYrdH2QQzYiTPtBbUIquKwifHFRdkIrAbNa6BRMAHPssi5GbTqKXV/R4Dc/cyZQtULh++hquS96GQ+ncK5u3Zx6u14YwZM0JtMpjA9bZF3pL2dqwcBrjRxxkmOkksc4fIVVwqu7ungstrWQvOQQWM9WHsvnBJVfROOktvp1UjVWYGWlupOzmkRS+YEOrODqg+94vo6umpuI5u5BDF1u57OK1Qc7EXVsmCgMdTyoyO7ElutGsVgYzFeDNTef4OZLfT3y1U3rGP9QV8NaKc8hf04o4VNme4b55dBY+h8K4hTu5J+u/5LVHUVlo0DXea03kwc8WMOTv/qlO5VApmsczCIV9pzeHc4CfjF3CPZO/SdKBdESEja5isyHyczh6URYXftPfA3j7wDhayhwkllriIbFZ2OkpKOU/TUl4KhIwN7ZG7NrmRgW1tglflwnG4FVbyuYLbpqxiutTNpAV5EESHECR/cExv+GO4YoNNV9x8eCw1QyLkZ97T7QFPvz40AKk14tW5X8xZXh9eBNy+fewqXw7aSkAmaqVK65azbo1kzFVHY+80ZV5Qzlyno0Fl3zMb4ZsCWwdWFcw2NG7K8FBCQPFm2TBPryJ+QlNQEfgw5+WX0LOxxqmmlbITEcPrArhvqQRi8nH43nL2lvolb4mDmtWPm0t5pkD03G8mIxlud/QRHPyrzVTcNHQAz06nI+1HENP8qFb1UG9qPqCSE6iblIGuVeV8ljAofy1T6ZgO+FGuvv2ElJsNrTUBDxJsZ3B7guKzYYvO5XGIr1bUMrPd36FzI1gPlwdsS67N0lHS3MijliRXh9qkhMy05EJNg7doGO1ufjBmFVclbi7k8Gt01p4qzmvUwBFzBYNCAQmzR+9lfNs1TgVBw16K/s8uSiNUQsV6JG2wIeyxSMZ0ry5fbvvcAXJZdmU13bYKKsw83DWdmYnTe8YUusn/fqLXdlOlPENQQa3Z9zSy0m954ewwmfihbrzeGXzpG77goMSwkWj7uKOzbdQ9HIrJwvs1HwlrYdVIToCHxbVT+LlgxNoLE0hbbsgaXHkE7P0lwa9lYcqLsO504LleBTCf5Oc1BcrnRJ/5y6XmDbvO+XQQvCKDScmORk+rnO2qQa9FaERN+OAwmRC5IfWCuDZlErOx0fxVRwZ3HV0ider0qJ7ug0xDD3jOCcmDWFISz6ioYnWccM4OtOCZpVBARQQ3F2v01pY3jKcn773tX4HUISdoMCkbyWsxCpMuKWXNa5UnjkwnbTtsR9KckmNdQ0jwhL40Bf6/ZqR8tSF1BZw8HL1lB6P2V2fRd2aoRT3kNox3MajXtdJfCMR1xAN5brjPa4K0ebU/2pTDs+s+BJFLzWTtT5+J9Ierz2bfc+VkPd2WcQ9PoAeVwTwO+T3Xje6rtjwZpDh1qTOn2onYD8hweONqNa+BkWoucM5cnEWeVd21trf85wKc6MH7aiD15qzu4VPrznrVeZZ51GeVoD9RDq157mDgj26j4u2GdwHPr6C4u+ui4oRaaft/re9NBUVU1YG1ReNCApMsrDKBQ9+tgDHi8lx2ZBpRwj6YO76Tb+MruLV8bjNVPqayDZ1H7Nt0Fup1jTeajqDP34wNxBwEBonTTgp7b/iAVJo7sj47ye0wW1b5eDRZ6+OWuDDYFj+o9lkvb8N3yDXbRoM7iQFZ2Y6SkLva1/t6WHFhrZy/+9955G1aiu+CPvp9gU1KYmya4ez4OoOn+JgDnqbULyAHgaztnYbJdWFPNxwDQtDrHKwtGQplJz6NMHBPsW9PHuRQE1PQ5j9Q43S60W63AhnAtUXjegWmHTLhm+R/bS1faguHlGTksBuw5OkYjWHd1imX0bXvHIro6pHMevwvUFLhnQwa8ONOF9IIvnDUkpcO6P7lh0kXVc5yG3Ygq81chMknycW/+p3uKRyypDutuV6ggkud+u+behxsjT3rl+P5pELFrPAeZxQ8xbxFBzTRl8DKCLCKzbuzXuHTLWZHx+6nJ0fjEd1EZMgjXCw69ej+cGcZcxLeDWQ4Cp8Ycv9m0jz+RD7yhn1tJvZH93SbX/20WaU8v1o1T1nmIonumaOFz6Jbd82fINYdC5atDlyp609gNYanYURe6LQ7Owx10IwPa2NpiNQaur95R5GZEUlBf+xMla/o9MqCj/65ov8JOUqil5OCpnxf99zE/nN9BeZ6wid5g/CHxzTk9ZTMfaJO0jdo2FyyY7AlBiM4Q53NDDWcpJ0xc5v819jy7XDcElztyCNwuU3kf+Sgn3DgZgvJ9VGsmLj/uzl/Hbdxe3b7klfxBRrA6lqZ/3heO76Paart7QgDh7CcbS7v5/0+aLmRnUqHHuOY39nGOcmXMGas17ttj84075p8z7/RinjpqXVG8GO3L6aWtCjWH17yMbf38Umu67YkNuwJewvOt3lQj1yjNQ9KZ22L0iopOLi//IX+5coTJqMZeV2xOgR7L7bPzzyu+kvc6HjWMikLJW+Jma/cC8jd1WFta7oLhdK6SHyX4Mx4o727TPmdc6I16S7eKN5OA+/cA0Aha/VwJEqfyBCW2BKDNj81Fncfb2FX+S8RYHJQbba1gPomBgsfOdm8l8WODaUodXWxURnKMxCpdBk45FhK9q3+bOhdQyXNekuXm3K4dFn7yBvxT5/RNoAn7sB+Wv05gAdL2hHq8haY+E4Qxjb9M3uB2xJas+0P9h17CNN6l4fb7x/DssKxwL+yUx3q5nR1fuja3DxZ+O3bT1EvqWAsYkhyrWPtJ604txp7VixIUJDOXpLC4mlTUz49OvtKz44FAvfTt6Ma6aZ5xzTSTh7Ei3DdJ6a9XcAJlubuhlct/Syzm3mho/u8udtqKgM+zOgt7Sg7C+j8F8dxny96ywuHFOCPdG/zedV0Y/aKf6XPwJOVlRGbQn43hiytJztKWO4cOyIdq1daTe4NbVx15M0C5WMHtafq/Q1sah+Es+8+yVKXjwScnWR/hB7J7kIId1u9NJDDHF5aDmU2W1/W8Z3LZbuNH3EufUoI2rS8SZ1jIcqXhnxxDAh0TW06moc6zSGaANfh0t1634Xt15WbAgH0uNBLavE+dxIbs+YR4a1CSXgdlDRkkJiSguNY8Ca4OGt+gkAdF+XA5p9VtZV5lGwSPEnyolQo0N3uSCoTHKXmPGsd6JZ/fdeaBJzY3PEy62/+I4cJfdtJ55PO7R2xb7hgL+FGwcG135CsmT/GWg9rDkXTFlTOts+K2DUi83+ZPuD5HNrdCEo43+IgjqdJvl8hysQhyu6xaLFrOpKiVZTi2X54DIuRaWNLiVadQ3Opc3sHjIBzdI9H4N/QUkrH3w8NcQJ/Cg+SKjRMb2/Nqrlru3ci7qTiAe9hINTaY2XMVyAtO0NCD2JDzJ7vudtmFokBaUeWN/di2UgfK6NroFBG7rLReaTcewTahBV9C07Sd4CyTG4dmzX6zYwMDD4gmEYXQMDA4MoImQcDGobGBgYfFEwWroGBgYGUcQwugYGBgZRxDC6BgYGBlHEMLoGBgYGUcQwugYGBgZRJOZGVwjOE4KmLh8pBF+LtbauCEGxELwhBCeEoFYIlgvRl0ynsUEIVCF4WAiOCsFJIdgsBCmx1tWV06VcT7O6miEEHwlBjRDUC8EnQjAz1rp6Qgj+JgR7hEAXgutjracnwlFXw250hehnukjJailxtn2AS4EmoHuq/jDTX61ACvAm/pTSQ4D1wBthlhWSAWgF+BkwA5iOP9r1W0DEs6OcLuX6Oa+rTcCNQCaQCvwGeGuA9ahfDPAaW4E7gE1hltMrMamrUso+fUCWgXwA5E6QdSAXgbSBnAOyAuT9IKtA/gOkAvKHIA+ArAH5Esi0Pl5nEchFfdUVY61pgTVh0uNNK8hUkE0giwZTlqdjuRp1tds1FJCXBco0K861rgF5/ee5rva3pbsQmAsUAcXA/wS2DwXSgHzgFuAuYD4wGxgG1AGPt51ECLYJwTe6nlwIEoArgef6qSvqWgPMAqqkZLBZ2yOh9UzAB1wpBFVCsFcI7hykzkhp7Uo4ytWoq4Ft+Hs3bwJPScnxeNUaAeKzrvbzzXFb0Pd5gTfDHJAekLagfbtAXhD0PRukF6TpFNf4FsiDIEUY3nKR1poD8gjIa+NRK8hvBN7AT4O0gzwL5AmQF8Wb1nCXq1FXu13DBvJakNedBlrD2dKNy7ra3/GMw0H/L8f/VgA4IWWnscJ84DUhOmVQ1PCPgfS2XvV1wPNShiV7XsS0CkEmsAJ4QkoWx6nWtqzgP5eSVmCbEPwbmAe8G2dagbCXq1FXAwTOsVgIdgnBFinZGq9aw0xc1tX+Di/kBv0/D2hb87trxTsMfFlKUoI+Nil7LmwhyAXmAM/3U1NUtQpBKv7CflNKfhnHWtsW/wo+RzgMxOlSrl/4uhoCMzDwzPPR1RoO4rKu9tfo3ikEOUKQBvwIeLGH4/4C/FII8gMiM4Xgq6c497eAj6XkQD81RU2rECQBy4GPpOSHYdIZEa2BclwN/EgIrEIwBvg6sCTetEaoXL/odXWaEJwrBBYhsAvB/fhbboNdmz0i5RrQacOfYt4sBDYhBu1dFZ91tZ9jJG2zgfUgnwPpCIyRVHQ5VgH5fZB7QJ4MjKX8Kmj/DpALu/xmN8ibBjuWE0mtIK8LjJM24/cMaPvkxZvWwPfhIN8JaCwFeesXoVyNuioBORvk1sBxtSBXgpwVj1oD3z8M1IHgz5x40xqOutrn1I5CUAbcLCXv9ekHMcTQGhlOF62ni04wtEaKeNYa84g0AwMDgy8ShtE1MDAwiCLGyhEGBgYGUcRo6RoYGBhEEcPoGhgYGESRXiPSLlKuiquxh3f1l0VP+wytA6cnraeLTjC0DobPg9bTRSecwugaxB/CakWMHsHuux3t23JfV3F+dACterB5dwwMDCLNF9romnJzaBo/jPpR3Yth6EcnUXaVoZ88GQNlPSNUFVe2k6dmPdO+7e4dt5K4NQEMo2tgEPd8IY2uOq4E9xAnJ0os1J/jYcGZG7od81bmOeQ5RmHZehCtri4GKruj2GyIghyOTzBzgV1r3+5zAEp8DM+rqanIvKG4sp29H+fWsBxrQtu5N0rKDAw6E6u6+oUzuqac4ZRfmk7LWBej88q5Z9g6FiZ2byEql0jeYBp5FGLZpKE1NsZAbWdEchJ1Z6dRdElprKWERE1KwnN2IUfOs6GMb+j12JZ6OymbMsgyjG6/UWw2RO6wkPuET0M2taCdOBFlVX1DzcxEOB1Ik+rfICXC5cFXEc08OP2rq60nbSTszCCvsTksOr9QRldJSKDy0jzO/upOfjZ8CUXmnt9wvx26GX2u4C3lHPK1Ikyb9oKmobsivtpNN4TJhDCZkEPSqB0n+GRUxFeH6TfCasU7vojyuVaumPsxvxmypdfj/9uq8h3TdQx92haTMu0NxWYDVe2+I8b3H1VFWCzI3CEcvCIt5LGmFkg+oJH49sm4K1eAppmF1BWb/L0zQPjAcUyS/vfoGd3+1tW93mZ+OupS9taNDovOyBpd0eMEXgdRCs4QZguts8dyzZ3vcVvqNpKV3rsUAI9lb6Lgshr+N/EScpPHYWnwoqzeHAW1nVFzh+POT6d+pBVvjrvbfhEH87Zi9AhKv2bl7ouWcldqeft2TXakKFVFxxBIitJKRsZJfFPGxKRMuxFUV33njMGTaEYG2V2hEfP770k2oVkVakcr7LrliZDHbnR7uH3nQnzH46Rcu3D4qxqLZj3FHLu/XlRrzdx/ZC4Vf4+eBjF6BAeutHLPRUu4M+Vwp31t9TW4rhabE/hj3hLuv8kbFp2RMbpCoNjtKEmJpzxUer1oNbURkRGsR01P5fEn/sg4ix2w9/mnd6WWc9eCv1J9eaByTIuczJ44vGA4Fy1cy2PZ3dfsO+htQvECut79h1Fk962JPHLBiyxwHsefthVadA8HfR1jz8NUSZJiQxUKk6wWlp71LPf/NjZl2k6Iujrq99v5SdYqUtUOD5F24xBn978rcVOucczuux0smvW3dsPfhiZ19npd6IhOdTXcRMTomgrzKbtmGC/d+livx53QEnikbB5cEGGj+znm2gd/QN6Kffiqq2OqY/T9u/jFXdfy2dWreThrO9VaMz+tOp+Dl6W0H7Pn99k8N+0ZZtpip7MroepqoUnFoTh6+ZXB5402g/v9S29Eqalnz2NDI1ZXw2p0GxZOo/orLr5aso1vOFcz2mzt9fhM9STDEhra07nHgo1uD3fu+gaWv6bhSVRY/MvfUdjLWG+0OfTQDC66/FO+m7EK6K7L2qgjW1qiNkzTE/rJk6huaNXMbHR7uG3HDaT82oFyrGNlmFG/SePbN9zB3Rct6zQEEQt6q6uRaN2Eiwa9lcdrz2b5j2Z32n7oy/D92cu5K7WcZMXGD4au4P+9fxXKVa2R70n2kX3PTeR3019mgrWZ/vQ2w83oP7Tws3/fyE8sne+z8Els+7bh83jQ3MPxShMQ/h7koIyumpTErl+PBpsOQjJ+xEEeHLaa82zVWIUJVVg6HT9z2xUkWtzclruS+QlNKIASlhVk+s89lRN5ddMknHvMpJRqJLz3GU6Hg8ty7kOzwIx523gw+51eJ9uigSvHy4XJO8gzdW55VfqamP3CvYzcVYXu7j7OGwuyP2riw6ZzWJE4jZRSDdOGrehBLwO58wCOo5Op8KTiX7Iq+hx6aAauHC/jS3qvq/HIXm8zD1Vcxr7nSsh6f1unfQljx7eXq1mojDCb+WH+Mh41XRgbsSGYOrKMc2xH2+dT2v6e/YtKSOeTqOmQOw9g22fqPuckZVSepQEZXWEyoeYOp+za4Tx8wYsMNdUDMEw9Sa5Jwak4cEsvH7l0vrPp2+2/c7yTSNVYydb0POYn7AzLH9AnpES6XFy26k6sdi9CSNiSROF6D/a9FcimZrTmZmhpIf9FB1JVWO86i+/NS+Sh/DeZajVjEyrnJJey6Y75DH1mU1RmhhsWTmN8yUFGW06gioT27ZrUqdcVCl9rQh49hvT5Iq6lL6g7yxhakQgm1V+mXcpIej0IH/j06LUkhcmEmjOMigU5AFx0+adcmLyDUebq9rralXl75nG4PoXzckq5PmM1U63mqOkNRepeH2+8fw6vZkzAucNK3ttl+Jqb2/d75k6mZZyLCQ7/i8wtvaxzm7nxo+sZ3bo/VrLbUWw2qm6ayNVpr5KidJicKi2BTYdyGbWsnGjWYOn1IL2eKF6xM/02uorNhsjP4cjFWSy4ejVXOquwirZK6eCQr4k3mtP5sH406yvzGPL3jkERx54juDJyaPRFf1BPb2qm8FmBHuhG2iprEYcq8dUH+ehJie+gv+LmLjGze0g+H2eOYqq1DLuwcIFjLyuu3UvLiw6IgtGt+YqLB4etZliQ+5Imdcp8Lfzo0ALUA0fQWloirqOvaI2NEAf+zMEIkwl3QQYXfnMtAN/NWEWeyYEq/Ma2QW9lq8fOq3WT239T+XIBqgqrvlTExSmfgbUpJtrbcG49yoiadHSriuV4Nb4jHQNycvp4yuYr3HT2Gs61HwacuKSPD06O99f3OKgfwplA0df3cr5jP3bR8ZJz6Wa8LeZOf0880qC3ssY1hA/3j2Ikg/cI6ZfRVWw2xIg8qmank3dlKQ9nbadtpnqvt5kqLYFX6+bwxrbxpGy04qjRsSxf2/57H6B4cgYteiBInw/T+xvbv59qpEbbuZekfdNZNWUU1yR+RrbJSYHJwc9y3+I+8/yIakUITIX5zB+9lfNs1Z1aY63Sw39biilbPJIhzT1XgG4O9Cdq0BqbQNd6/M3nElXFk2wKmvn3d20rfU0c1qx82lrMP8rOoXVFVvtPhmxu5sicBIrSa8g11QKxHXrwHa5APVGNKTkJkpyoozoW9N1ztZ07znuX65K3kaU68UqNg16FFZWjcb6/MUaDdx0oDgfayOH8Ku/JuJor6Q1TznAsDi824QVUjvokTx05j6y3e5+j6vP5+3Owkj2EqnPTcc6v4s0gB/0GvZWHKi5jY3kepr0OCte4Mb/3cbffC6sV3QwmJbbuTX0l+aCXzTsLeTVjTDd/vkii2O2UXTOMbyWsxCr8t0iTOq3SQ7lPsqJ6LJlPftL+4hBWK0JV28eoQjnQZ69Jxbb1EFp1dcwm3eLl/jfpLhbVT+LlgxNoLE0hbbtg6DMd9bX6lulMuKxzAE2LlDR4bUBsWr1K9hDqpmZTN6bz0Mwv5y3m8oRjqMJCg97KQa/C36pnU7dmKE5iG7koTCZETjal8xNI6TKipEmdFmkFb/xNWlbNy2NK7i4y1VbAyWFfMjsODKd48dpT/rYv9MvotpRk4ZrbyMYzXwM6HIn/VDuBfc+VUPx2ea9dBTF6BC3DdPKs/rBbHdDpQwBFODhVoEYIQ2RZvoF8ZQpPZM7izukvREhYF4RASUrkpVsfC/gU+1tZh3wtvNtSzIrqsWzcn08xHS5iYkwRrqEJaFb/3xjKgb4w/ybyzQU41msxm82O5f0PDtJ4tSmHZ1Z8iaKXmsla3/1BmnjTNh4ZtoIM1W9wG/RWPm7N7Vbu0aSlJIv6q5rYNeOfIfZa+Mil80HTmbxzdCy1a4aS+3D3Rk+0EVYrTWPS2fftJ4GETvvKfC0sq5uJvSLOgmKFYPLNW/hl9nvt9x8gnF2GAf/FwY7v/73vPLJWbcV3ivGjYKdkTerUaIKjzclAZDN5CbMFNT2112NCBWkoNhs+h4LVHPuJqv+rnsMH/5jK0D983OnBV9PT2Hufheem/ZWZtp5bDQfnPs3ohG+RrY/Asjw2RjdW91/xSnZ7O2alH332akr+XYGv7FCffr/OlcQDa75G8U3dEyNFC8Wr43GbqfQ1kW3q3k2/Y9tCkp5PInHpVhJc8Zmbo40GvZUfVVzO3n+WkPtk7F8O7QiBmpWJXa1FDTQG3NJLlS8D4QoRFj5ABmR0uzq+W2u2ovdzNnC1y8RNn9xJyffKieQoo5KQQOvssTz+xB97PKanII2qmyYybuFOHs9bRiz9CnvlFRtLRzxOkSlO9fVAtO6/3tyM7d2t3Ddtfvu23MYt+FpbI3jV8GNeuZVR1aOYdfjeQMuxM6smP8PtmfPYPWQCmU9Gz/1qIMzacCNJzyUxZNnmCHjBDpCAwb1+9TrmOY61z6M8dHwKS16aQcmft4dNa7+Mrv3TA+SeyObKoXdjbvB2OL4PYIxQQ6C71ah0dXWz6DVQY6TZzZ9Gvshv113cafvVSa9yvmM/yYqTBr2VP9VO4L37z8NWs7WHM0UPNT0NXrHxWOErFJoiE67YX9oCDr5UtC/k/vsynm13jJ9gbeZv05/npWVT0aW/61l2zyhMG3aH3R1Pej34jh0P2hC6vraV6b1DXyZZ8XvY/KGugD8tv4TRz9bH1EBInw+xr5xRT7uZ/dEt7dvn/eqDQC4RO7/IeYvHbmzlw/QZMR9eMOUMp/LSPK65871u+zxeE6aW2CQPCkZOH8/B+Q7mzPH7PNvVuoDB7fCuui1tDcXfruLdS8eyft8kRl2/safT9Zl+GV2ttg7R1Ix9t8mfcakfxvbQQzO49uw1jDQ3Ak40GV0j0ZtRUlEoNCk8MmxFp+02oba7uHilzmFXKvaVO/rdqu8rwmpFjCliz30Whqm9lK0QCLOZe/PeYaTZilmEr+szELoGHMywhU4rmKhYsAp/izxZsXOuzcX47I6Hct4Dw6iuGYvUAhOCjWbStgnSnw5Dy+1UdTWoTAtNtvYyPexKw3FUQe6KfZddb2lBHDyE4+ix9m0v/flCtn47h1/kvEWBycHFKZ+xLOesGKr0I20WXJmC21K3EdxLnLntClw1dsoWgJg3LXz3t5945k6mbL7Cbee+x00pWwBQEd38tvNMDq5JLGNuwn4+zhzO/YuuBGD0/zYjd5UOyN+3f8MLUiLdbuQAojYyZ1by9dT1DFHjsxtsFioZakKP+21CZVJiOWtvnQ9AzmsVaBVHwxqYIFQV19AEnpn2d5KU7r7MPjsw7SwqZzjx2aHE/B/Mou9uOH1J+tYf2pzeL7hsIxenbA8KOOi5HIOxCjNWtSPw4G9j/0m9bkdDsK55JM/umIbJFfn60hbsc/AbwzuV6bONWby6dSL5u7wxdaYPRvp8nerc0CXlbPhSLlXZDorMCg7hBnNsO+2m3ByqZwzFNqWGZKXz/Tv2WRaqgLOmHODM5KP8U55HepT1eeZOpmyBwvXT13BDypZen3tVKDiFDacCcx1VpM96FoBb6r/DiP+MxbT1QL9Xl4n41KEwmfDMGc8l2WvIN0nMQuWQr4mlDXOwHY7/0Ms27MLCRY697Ak42b/vmUbSoaEoXn8LKizZ5YVAswpm2aDrQs151lpait0cTErgsovXYle9pKl99xv8T1MSnooEzI3hG8tsc3q/J+u/3QIOdnksfNo6ImSOhUO+JrZ7MvBKE/MTOlywzrZa+U+Tk80t+Sw7PAbL1gTSNldHdMwXAgEU+elccdXqTmW6+MhU0taZSdh6KKoRU/3Bd+Qo3tbsiOUJGAje3HROTNb5/Zi3O23/Q10BSQcEmk2wIyebk14blvro9njbg0lmrOL6lA1kqaEbLf9pSsKmeDjTUk1eYOIyWbG3r9hy98XL+JN7Hhk543AeasV8uAbf4Yo+aYio0VUcDigu4NANOrOdu7AFfE5Xt+bzxrbxFK6Jj5wBfUEVCoVmZ7uT/aVXDeFQfQqa5q80YVkJQUpUt+TDVoXzbL5OQyLnJuzh5EQbLbolKOly38JTN7o9/HDDFWStB/Ph6vAYECEQJhM/y3293em9LeBgu6uQZdVnsK1iOHfNfq7Tzyp9TTxddw4v75uA12PCMbWzK979G67AstNB0kGdrM/qIr+cj6KipKdRP9LaLdhnb/lQiva4o76qQX/o6sgfa9SkJOrz7GSNrO70QgX44wdzKdruf+mfPO6gNiGB7NIo2YBAwNHuLsEkwWhSZ7XLhIbw10OLjy8X7mRu8nZUoZOitDLJ6m8o3pVazqGL17L6rCJKd2aQ80E21lgb3TbH6H1fT+bjWb8jS00AFNzSy4f1o0nZaA0ZQHG6sKR4WafvK1rM3Mp1ZPVwfF+QXh+2yiZuXnsdn856vFM+16lWM1Mz+56vwis1GnT/RMVtO25g6CtWElftwxemxSuFqiKdjk4Ji148eQb/KpvCiWPJ4BUIW+c2altQwj/WnEv2SoHq0bm19bpOx+S+qeD8aA9adU1U2m1qkpPWMdnUntf54X+k8hKcuyxYjke+pT1Q2lZCCXbkjyVtKzKcOFtwRXbnnA/VWjN5y3RMm/ehNzeTvCaKwhQVNTOdsmuGtQeTOEIMgTXqLm5eeyeaWyX3DQXNamPp2Gm8mjsRYdLJyDjJ0rOebR+O+O3QzTB0M3cPmcwHFVMZ2kc5kTO6PThGr3ObWV+Zh6Mmul0hTer9muEPdqbvSqQ8BaTXg9y2m5J7sji8VsGpaAOaJHNLL6VeL7+rmgtAyq8dmDZs7ZaAZsAEWoc104diEx3l9Ned55H2cgJ5LTqHr/JRetEznX7WFpQw6qVmWL8dgOLXu58+qkYuI43KmVZKL+q8EsO6t8+k8D9H8ZWWRVNNnxFWa88rocQo9renFRladA8/rTofc4MXtCi/whQVU1YG1ReNYMltjwZ6Zd2HNTWpU+5TKbnnKL6qjonKtlJVbDZ8U8bw09+fz6+zV2IXlgHbgaiHg9yy4VtkP23tlJMh0iheyV6vi2Jz31yrDnqbaJGhjZ2C7PN5BoqUkp3ubKCSXNXd5wz2bunlhOZmjzeZR8sWwAUVAc1bwtpqVOw2mqYVsO43TxLcurKYfRyfKEgc20Dp5Bc7/eagt4lHn72jX0EJBqERViuipJAnn/g/xlgctHkHRMKRvz+EWpFBkzoHfRoHL0tBqQpvPTwlQqBmplN90YhudbUrrdLDbk82sgcvF93lQlmzhdJLM1myKpux1kpyVT3khPepiLMYvPCjNzdj++82vn/5jfz+zWcClbR3rn3wB6R/UgUtXSadhEDPTOnzeQaElGjHT/D8rCkIIfq12sJDx6ew5OUZ5D+9H9XXEvVu8arJz+CepGMVCl2DSeJlhYvTnbZgnyef+D+KzZ0rRSQc+U9n2lYFWXLbo/RmcNuCvUovTUY73ssqyj08m/3W1e9f9OWkPThGFy6/ifyXFOwbDkTVIEi3G/Yc5Pbbv4c0ndpvKm3tAbS6OmSIrpCorQt5HsWjM7qyKTyVXUq0Y8dBCEb9Jo3/GXoLuuXULV1Lo5eC8gp8x0/EJKlNV/cg8Ldwr33wB6S9V+oPhInxChfByOnj2XO1nV/OWxxrKZhyhlM1L4+JN20Luf+D/cU419lRPJJr7nyv20t/3p55VL5cQMHSCnz9dGEKB6FWhYh1QJFUBLqFHrOb/aGugD9+MJe8ZXog2GtLH07a/dk0uTRyyg73eYI6Ika3J8doS4UFR+kJtNq6SFy2d01uN/aVO/rkrKr1svxNj+eREukNs2ORlD1nuQ+FpqH5fBE3bNLtxrmzmuLnb2fNwrZJ0s406S5ebcrh0WfvIG/FPr/BjbO0kr5EM2p2C5cnHKPrOJ+MUh6m9uvZLLRmiW4BOm18nLaZt0ZOoFUzhw44eHUI2R8eQ6uITW7anlaF2PdcCVkr+58mICwIQU/5lO6pnMhbK86h6O1WTJv3+YO9+nPu4GdT1/3PXR8Ju9HtzTFa8QqEN/JGoSfCldA5momhY53lPhTS50MePkrRYivTc76Lxerzr8YRhM+roh+1U/LiEf+QQhy1cNuQisBs1nAoceAvLgTSRI+O+ufbazlz6HJ06DRpNm/PPFyvDWHIyhPo5RUxW0Uk0eTGFmgYrHd7+WnZVRx+u8C/ykWsEqk3NpG2U2fsx9/svm9LEnlrXP7ghqBVOPrDQJ/NsBvd3hyj7SckeLzhvqRBDNBdLsSOfRQ8M759NY5ghCYxNza3r8RxOhGLuipcHuxVkrsrJ5/64CAqXy4g+4NjfoMbw7XyPjgwinulQrK5ldVHR+D+KIO8pdUxXRVCr28g5dNKLA2Z3fa1rRyjxWAoJvxGN8mCfXhTj47RekN8LediMHC6rsZxuqG6NVrq7axo6Rxk8qfll1C0Lbp1VTY0krmpiQ/+MbVfv8t5+zDakcqYr5OXucTG5vQzkSrYT0QpsOUUSLcb38FyLCFe/LGcaAy70RW6xOsxUa11brLnLe1wjDYwiAcsx5pI2ZTBrXQO0Bj9bD1yV2lUxyG1xkZYv52h6/v3u3gJT07611qSgr4b3hM9E3aja270IMsTuL9wbpftMXCMNjDoBW3nXrJ27u0WRWgYDINIEn7vhbXbGLEWukYhh9tB38DAwOB0RPQUgWFgYGBgEH5iv9yAgYGBwRcIw+gaGBgYRBHD6BoYGBhEEcPoGhgYGEQRw+gaGBgYRBHD6BoYGBhEkbgwukJwmRB8JgRNQvCxEIyNtaaeEAIpBM0BrU1C8FSsNZ0KIfh2QPfNsdbSE0KgCsHDQnBUCE4KwWYhSIm1rq4IwdlCsFEIWgL/nh1rTaEQggwh+EgIaoSgXgg+EYKZsdbVE6dLuQIIwflCsEkIGoWgVAhu6c/vw250hehfwIUQjAJeAG4DUoC3gDf7e56BMIhrjJcSZ+ATFUM2UK1CkAo8COwIr6JerzkQrT8DZgDTgSTgW0CY1hcKzQDqqgV4A/gnkAo8B7wR2B5RBlCmTcCNQCZ+rb8B3orH5+p0KlchMAOvAX8FkoFrgN8Lwfg+n0RK2acPyDKQD4DcCbIO5CKQNpBzQFaAvB9kFch/gFRA/hDkAZA1IF8CmdbDeb8L8u2g7wrIVpAX9FVbtLQGzi1BjhyotmhqDZz/LyDvAPkhyJvjUSvIVJBNIIviuUxBXgzyCEgRtO0QyEviTWuXayggLwvU3ax403o6lSvIIYFydARt+xTktX3V1t+W7kJgLlAEFAP/E9g+FEgD8oFbgLuA+cBsYBhQBzzedhIh2CYE3wg6r+jyfwGc0U9t0dIKsEoIqoTgVSEoGKTOiGkVgqnAZOAvYdAYSa1n4s/dcmWgXPcKwZ1xqHMcsE3KTks/bgtsjzet7dvw9xjeBJ6SkuNxqPW0KVcpOQYsBm4IDIlND5yn7+sb9/PNcVvQ93mBN8MckB6QtqB9uwhqqYLMBukFaQpx3tEgmwPnsYD8MUgd5AODfMuFXWtg/6yAzhSQfwb5WU/HxrhcVZAbQE4LfP+Q8LR0I6H1G4HWw9Mg7SDPAnkC5EVxpvPHIP/dZdsLIH8ab2Xa5Ro2kNeCvC5O7/9pVa74ew3HQPoCn+/0R1t/x3cOB/2/HP9bAeCElJ3G3/KB14TolONGA4YAR4JPKCW7heA64M9ANv5xnZ10z5nTX8KuNaB3VeC/HiH4HtAIjAG2x5nWO/C3HsK97HIktLatAPpzKWkFtgnBv4F5wLtxpLMJOmUwJPB9sJmwI1JX2wicY7EQ7BKCLVIymAXLvtDlKgSjgX8DV+Cvm6OAJUJwVEo6r9zQA/0dXsgN+n8e0JYWvmvWnMPAl6UkJehjkzJ0xZCSV6TkDClJB34CFACf9lNbVLSGQNLjSkwx1XoBsCDQXa/CP0n1mBD8OQ61tq3GGHyOrueLB507gLOE6HS/z2Lwk5TRqqtmYEQcaj2dyvUMYK+ULJcSXUr2AG8DX+6zqn4217eDzAGZBnINyF8FmusVXY79f4HubH7geybIr/Zy7kmB7nAm/kHsf4WhGxR2rSDHgTw7oNUJ8g8g94A0x6HWFJBDgz4fg/w+yOR40xrYvwrkX0FaQY4BeZwBTqZGsEwtIMtBfi+g87uB75Z4K1OQ00CeG9Bsxz9xdBLksDjUejqVaxH+Sd/zQYrA9/0gb+mztn7+EW2zgfUgnwPp6OGPUAIP+J7AjT4A8ldB+3eAXBj0fU3guNrAg5cw0MKOpNZAQe/BPwZ9HOTrIEfFo9YQ1/mQ8HovhLsODAf5TqBCl4K8NU51TgC5Eb+HzSaQE+KxTEHOBrk16LlaCXJWPGo9nco18P1q/HM5J/F7QvwGpNJXbX3OpysEZcDNUvJen34QQwytkeF00Xq66ARDa6SIZ61xEZFmYGBg8EXBMLoGBgYGUcRYrsfAwMAgihgtXQMDA4Mo0mtwxEXKVXHVDH5Xf7lHf1hD68DpSevpohMMrYPh86D1dNEJkViC3eALjZw+noPzHcyZsw1dCo60JKNc1YpWUxtraQYGcYFhdA3Cii/RjMhv5pFhK9CBnZ5EHjVdGGtZBgZxg2F0DcKGmDCOyukWLiz6jAw1AU3qZKrNCDHYKGkDg88PhtH9HKGmpiLzhuLKdgKgeHXMK7cifb6oXL/ujCQSp57gpoxVaNJEma+FHx9agPR6o3L9ASMEaloqrVOK2jc59hxHO1qFdLtjKAzUcSW4hzjRLaee8zY3ejAfrsF3eLC5ogwiiWF0P0fIvKFUXJyKPq0BAFeLhZJPHWiNjRG/tpqZSeMIwddzdjDJaqFJd/HflmLKFo9kSPPmiF9/wCgqamY6LZPzOXFjS/tm+zvDyFpjQS89FDPDa8oZTvml6bSMdWF3tpzy+NYjTrLW5ZC2wo124kQUFBoMBMPofo5wZTvRpzWwY/oLaFJnt9fNfY75EAWj2zSzEG1ME+MdhwCo1X2sqB5L5pOfdMqXF1cI4Te4k/I5dLVG6fQX2nedm3AFx5QhDBECWVaB9PqQXk9U5VXNy+Psr+7kZ8OXUGR2nvL415ud3Jf5NUyuESS+fRLdFdHVjk5bhNWKUFXoy7CXpiF9vrD2FqNrdPvyRxrBGp1pK7M4L5fDX9VYNOWfzLHrNOitfNyay8b9+RRTHWtpPaKmp9E8pYDDV/koveiZTvvWnPUq86zzKE8rIGtzMrbKJuTWXVHVN/GmbTwybAUZaneDq8mOV5kq/EMP8xOayJ32d25PWojv+BiU1XHSwzjVcx/lui3GFOEamoBmPbU9sjT4sJbX4Cs7FDadUTO6anoawmw+5XF640n01ta4NzLRILjMpNeLbG6Jy9aLmp6GatUwCx+gsM6VxANrvkbxTRtiLa1HFJuNxtkjabmuntLJL4Y8ZmnJUiiBD1sVbvrkOkq+lxYXrm8tuoeDPq39+zBVkqTYUIXCJKuFpWc9y/2/nUvFtBiKDCDMFtT01F6PkV5v1MpVTU9j730Wnpv2V2baTj1O/uCxs3jj5XMpeMYVNp3Ra+m+YuPevHfIVJt7Peyav9xD/r8r/G+WLzpBZfbjQ5dTtngCmU9+EmtV3XnFxtIRj1NkssdaSZ+pumki4xbu5PG8ZUDvus+z+Vh23p+566Vr4ILYGt1qrZmfVp3PwctS2rft+X02z017hpm22OkKhZKQQOvssTz+xB97POaElsAjZfOiV679rKs/ydrIXbd9zJ6bksKmM2JGV7HZ8E0ZQ8Hv9gJw79CXyTdZMGHt9Xe6GVDiNzpZTU+DV2xk2xtRhL81/sH+YjKXWElaHN6VcYY7GhhrOUm6YifHUc8BS3y5XrWVxWOFr1BosrV3c08HNCtkWJtIVk798KlCodBk408jX+S36y5u3775qbMYsrQc35Gjvfx68MzbM49DywtI3+FD6BJzgxflWMeKO5o7B680QWD0PFmx8YOhK/h/718Vk8CUtgCZi87fTL7tQ0abe37mR5rdncq17J5RmDbsDnuPbqB11SrMZJvMpKlu/lD0UljKNKxG1zN3MpUzzbhzPQhVkpXRyCPDVgCQqtj79Iee85XtbHCdSe7bFrSde8Mpr52am6dTe6ZEJvXsypT7uorzowMwJIPKL6VTP8k/g61aNZaOeJy0oD/l47TNPJUzi+1fmoL9sJncX3w8KH1qUhK7fj2a+zKeJlmxoArFb+B7sbmeuZMpmy/4wZhVPR8UToRAmM3cm/cOI81WzEIF4D9NSdy/4Qpy34xfA1xz83SSLq7iqtT19DX9iFmoFJps7fUZYFbWeLCceshsoGx+6ixmZY0n5YBO3tYTyEMB465p6L0Mv5mFygizmR/mL4tJYEpbgMzPh/4Xs1BQRc8vNhWFQpPSXq5XJo/FpKph0yKsVsSYIvbeZ2HpiMcpNNna62obTbqLN5qH87PXrmbNwt+RpSZ0O49VmBlpVvhj0UvM+9OdjPrNEOTOAwOaXA2b0fXMnUzZAoUrp67lkuRtqEhSlFYygv6AeyonsqpyJM0uC067m9nZ+/nt0M6D/f+TvYwLx5Tg+dSJujNc6jpoWDgNZUE1PxzxEUWWYz0ed7vtm6jjS/Am64ycUM7v8t8BwCw0ikydXyDn22sZlf8qW4cO588HvzRgbcJkQs0dTtm1w3n4gheZbG3C2kuFbaOt7K+fvoarEncD3StNpMhUmzEHadzcko9lpwPnR3vQevldLKk9U/LD/LWMt3iAvvfJzULtVJ+tU2upKc8m3adFxDd2yNJysFr8Y/n1DUi3219HcoZxdEFO+3FTRu1hmOkkwffdhBr1wBTP3Mk0FJqpH6tzSdFeEhULG9wq39n0zZDHB9uAtnI9NE+hqLEY05b96CcHty6lmpSEd3wRpV+zsmjaX7o9t+vdXp4+MYsPDhSjVdopXlzP9JzvYrH6+O64D7kqcXcnA2wWKkUmO89Me5afDrkZ6z5T9I2uMJnwzBmPNCmUzRfcNGMV16dsIMfkRJM6h3xe7q6c2X78snem4KgUqHaoydZZZ/JBF6NbZHZiS3SjWa2E733XodVzTR2/KnmTc20NOJWeH7hHJ/+HrWPzSDa1cJ5jL5OslqC9nVtHTsXGGAtkqhW8m3J8QMsYKzYbIj+HIxdnseDq1VzprGo3uOvdXlYfHYH9RGjnq8qZZq6cupabUteTpfrLPhaBCevdXpYdHkPSQR2tuibkMabcHLy56XiTLCH3txEpR385fTypI2qZZCvr9f73hZ+MXcI9k79J0oF0RASMru/IUX9wREE6uiUfAN0saMw3ceE3O4ayrkjdwLBA67BBb2Wrx84rtedQ0ZIS8fvf1QZMGrePc9P2c6btMO+1JvLgZwsY8vfQ5dySlcgrc5L57Zc7bMD/m/MOfzo5jzx1FJatB9Hq6gauLTmJ6rPsfP/it5hlg67P7ZrmEt77eDz5S7yYm1qQO/ZR8Mx4dLOV3y34MtXTE7khdT15pg7vEVUozLKBZlX65o0VggEbXcXhgOICDt2gY7W5+MGYVYE3gxOv1NjvdfOnExfy0QsTAXClSzK36rRkKDTn6mSNrGF61sGBXr7/ep0JlF+n8/wZi5ls1bAKf0XwSo1jWit7vMmoSM6z+VCFwvyEJuYntDW1QxuIvd5mqrQEXLq/i1mr5bC7PgsnTf3WJ5KTqJuUQd6VpTyctR3/wq1+/n58Dq3rMsj6rC6kz6s718NXkreSE6gcrdIT8cAExW7HVzAEs+hQdCqdamYmNbNzOD5VYh/eexmF3dFfCEyF+ey+2s5thWvINXnp6b72lfkJTTwwvBlvknWQZwpNqOAIk6qTn1rHY9mbgo5UABsNeisftGby632X0Lo8C9UjIxqYEsoGzLTvx4PCp60jeObAdBwvJmNZHnquw1GYT0v2cDae72lv1NyVWs6hi9fydtN08muHwCCMLiYVXwJMsx8g1L1u0OxY6hVsZbUIlwefz4fp/Y0AFMrJPMu5NE6x8dXUTQxVmyk2h6cHOSCjK0wmRE42+76ezMezgsdA/P9Wa638u+Ec/rtiAiP+7h/wP/ats6gdo2KfWs3vRy9lfkLnh65a6/Bq8HpVhBZGlzEhEDYbb573BOMsdtreeJrU2e9180ztebyyZRKqVePTWY+TqjpCnsYrNRr0jgH+hyouY2N5Hr7WQDF6FewVJpyU9l9jkpP6YoVPRr3TaXOT7uLDA6PIXetG76OfqEtqrGsYEbHAhLb7Xzo/gZSgxsPq8hFkf+oJqdPvojWCusta+P3EV0Pe/2SlY7zt9WYn92ZciaW5EPvrgze6it1O2TXD+OW8xVyecAyH0vkB6npv2zALBZswYRWhx24j6dnY3+CIXR4Lfzh4Id43Mhn6V/+8QqQCU0LZgCbdxR9rx/PywQk0lqaQtl2QtLhnbxvZ1ELyAY3bdy5k6VnPtg8x/HboZt466wxcnzqxbBu4xrbzf3f3tXwy/j/d9k9wlPPmhFrKfNnYT0iy/lkPgN7qwrxiAwViMm8fP4dlY8dyfv4+/jx83cDFBDEwo2u10jQmnX3ffpJQ44dHNQsrjo5myHqN5gvHAXDNne9xW+q2kLPFTbqLn1adT6vmr9hKuR1zY/9bi/2lzNfCH45fxOo3J1D8y08wDcni6FpBaohxDbf0Uur18ruque3b9i8qoXhZmGavpYQQD/AbzcORlTZMza0D7s6Em7b7v/tbj6OKPrz9hcB3zhh8N9bw4pgXONvaMZutSZ1W6eHnx+bwvcwPyDPZMQuV+QlNOKa+wK0t11H8+mAFC5TUFF669bHAS7dzq6etZxZ8b9vIs9dyYeJnMXHHOlVwRNeJ6U9bR3B4x1CK19ajRzioRtjtNI3tbANebcrhmRVfouilZrLWn9qTRztxgsS3T+I7Poaf/v58Hs1ejUMJX59BO3GCxHdaaPaNQ3vC//oJLrP5CU3Mn/JvGia18njt2bxTNRsA58cH0WpqsSzfwIiN6TRPK2LpVeMgYHQ1qYd8VvtKRFzGJlkt/jfLk133hDa4S5qzKb00Ge3YcQAKibwvaoPeyo8qLmfvP0vIffLjHg2aW3o5obnZ403m0bIFcEHH2F06nxDpVDI/e+1qil5twnToOAzJCnlMcGBCtFC8kr1eF8XmU7jfCIGalcmY32/nJ1mr2nsRmtRp1F0c1hR2e4aw/7oRPPG8xv9krSFVdeCWXqp8GQjX4Ef2hcWCnp6EEuJJaevt3H3g6k73to3SS2by0ncmsCMoRDjWtOgeyn2+bmV/V2o5Z17+N27Ouo6Se7KQUqIdPxF+wysEwmHHndhxbw56m3j02Tso6aePve5yoazZwsHLsji4VmNcBMZpFK9kt9fvfVRoUrsZ9mTFzoMZe3jwyT0ATLv3NtJWgGxp6RZAo0mdvV4Xik8OuFxjmnuhzdG79NJkf+WIIrM23EjSc0kMWba51y7YQ8ensOTlGeQ/vR/V1xL1Gfk1C39H/bXglT0btraIpGihNzdj++82vn/5jfz+zWcYYwk9HNNmcK9fvY55jmM4lY7jVrtM3Lz2Tkp+UIlMS+am15Yx13G8/ZiHjk9hyUszKPnz9kF1kdsc9J984v8oNncvo9UuEzd9cicl3yuPW2+LYKq1Zn5+bA77biwKWfbn2Xx8OutxDq9V2OnO5vlZU9obM+HCVJhP2TXDWHLbo4C/FX7tgz8gb8U+fNXxFfatNzdje3cr902bD8Cot2s6vfxDsfhXv+Oy3PvwOSSTL9zVHkDTZnC/f+mNWPdtQx9gIqSIG92D3iauffAHfO2Bd7kl5bP24YWNbg+37biBlF87UI5tibSMbjxx1gus/NkY3rltLNWrs8l9+GO0mjruvP3/Q5o6Wr2WRi8F5RX4ItFiCEJWVFLwHytj9TvYefsT7duz1ASy1M6x9l2JRVCC9HhQT9Sj9+Y8DAghGG05hl10tC7+UFfAn5ZfQvFzDfiOncAEjLIc73SMWzdhamXQbkPgn/Hv6cWgIdDdakhn94aF0/BcU8ffzlhMLJYTLPtBMVcmjUUqHWUsdIm50Ye6Zye33/49pEnguaOGJ0f/i7OtVlShkKo6cCoaUBkRlzGpCHQL5Jkc7c932nul/jKMw/B96fXgC7x4dtx9NguSJlD+FcH3Zy/nrtTybscXmp08cMOL2ISXidajJCsdwzs6AqWmHp9n4MmPBmR0pduNc2c1Y/5yx6kPVsDz5VbOcRzAJjouV6/bqalOJGPDjphkoZps1TjDsoV5idt4Km0W74sZ5P7iY+wrd3QeatA0NJ8v4pVJd7lQjxwjdU9KyP1xGe3VpUzuHLeK3y+YS6GcjHlFR94FFdlJ/2FXGo6jCpRWoJxVwp77LOSqHWOU91ROZNk7Uyj4eJB+mpmZNM4egbw5dC+qp0COQw/NwJXjZXzJQW4b/iGTrRqhjO7MbVfgXO7EsedIRIaZTJ/uCh0ooGnobnd7XdUsZ3D1Vd/h0UkdE5RmoZKrutnz+2xG/SZtwI78XVHHlXDo0nRmfNk/w9UiVdI/qcJ3ogb0/vcV2oIX9txnYZgawWcsUFfbynRkYzGLts/jiUyBbpZ4cjwcnPt0++ELEioBsAoHe73NPFJ5CWuXnonqgdyGLYOyBwMzuj4f8vBRCv/Vy4VVFc+wJErnW/j11FcZb2nFKuw825jF4iNT2XtwKCmbLNFJ4CIl0uXislV38o9znwq4jJmxCjPjLBo3Z6xiy8zhAOgtp85bGin0lhYSS5sY+3FoZ3IANeAytKR4Wcj9lb4mnq6fzOoPz4zK2HgwVybu4L9jRnO4YAQZvRynoaCbwXf2yHbH9bbhkXsqJ7LknXPIfdeNsqtsUC9k4XTQMELlHyWLoUv4+bONWfxiw1fIetuK86N9nYYWMmdW8t3C9znTUkmuSWl3L+zKsc+yGLn1JFplz0E2fUVNSkIbW0DV9I5WVc5rFWgVR3tMK9hWV5NW7sdnG8UPtKtonPQW307yd/GTFBvPTHuWG66/jRH/GYtp64FB9xzcQ5y0jHXxYPY7QKD30NLab4Nrys2hafwwaseaaBmmd6sDytZEbJW1YW+Qtdkb89YDZB9MRNosYDbRUpjChLSv8+GkRSQr9k7jvlVaAh+VFTLyX5UITcfX2jooDQMeXtBdLtjfs5+tYrNB9li+MfsjvppQ3e7ov/jIVCqX5lG00YXleHXUxtH0pmYKnxV8N/kbPDzudWbb6nEGXJQKzR4uyt7Nh5f4Aznsnx5Aq62LeldJejyoB48y5O8jejxGNyscyU/hwZvP4idZGzu5Mh3yNfF03Tn8Y825jHo18t4fXck2OZmVvo8nRo1gyNhitJ17kV4vPz50Ob/Nf40CkwNVKORZa2kpdlOWbOP7Fy9pd1z/Q10Bb604h/wVLsxbS9EGaSCkScXnoJO3RBuLj0wlZbWN1E+rEIlOPJM7yvyS7FXMdVSRrPQ87gdgblRQa5vwhSHJuUhO4sSEhE5BD9s2jMdUdfyUuVy16hrSV9nxJOby72FT+XbSUqDDkf/ui5exaO88ssuSBm10dYuC3dlCgcnR7wAcOX083iQzCMGJQhP153i4avx68qw13evAmlbEocpBae0NrbGxPc+0MJlwaPkc2ZWFe2J3M28TXpwON1pGIqwdhA9bgMiM6SoqSnoa9SOtnRz993qb2Vs+lKKNLtQPN0V14kIGHJ9tQ6fx029czo9LlnC+vRanYsMmTFyY+BkvfWcCALknshFNzdFfMUDKgKtKz8k0hNlCwsh8/l0ynbsXfEKW6i/byoDBff6jmRS+5oP126MgV/J+82iGqbvaJybOTdjDe5NHc6i6gDyvD2m1sGNlKluuGUa2Wo1DWDg3YQ8nJ9po0S3cmXK4/XyPb53D8FU+zNvK0OobIqq9ttWBqQW0NCfVZztxze1I9H6+c2enobBgNKmz2mVCQ2BqAbTw1GJps9A6RHQKepidMrnPeQh8hytILsumvLZ7GsW7Ust5IlOENU/EKQNwFBU1yQmZ6e2b9lxtxzSsBZNJIz+tjnuGrWNhYg0NeisrWpIA+NPySyha0oq67cCgX7p9ImCrmovTSRxbizXEMF6u6ubKgi0smn8+I8KQ0yr8RjeQjb95/HBqz+tstB6pvATnLktUW7hdSfrXWmrs03n4iq+glrzFhfaTWIWZmTba3YJmD7sFx25TzNfHConUQdNRWzpXjreai3l+w3Ryl4P1k91gs0V26EZKaHXxvx9cQuZFjcx1HCFVdTDVauYPI17mkQWXsNZ+JgCKBl7ZYTymWs1MzeyeWMPnVlE9uv9vjDCzsvfz6pwkqickMHxcFRvPfC1orxr4dKbNn/emT+5Ed6vk7vMhmyM3HKVZBcKZ0OcpPM0mUNXozJC4pMbGk/kMeX4bqCpKQmd/bSUlmdZxw6ic0dFN/+W8xZzvqMCMQAu47x3yST5uzeWBNV8DYPSz9chdpejRWKUjaOWQw1f5Am5h3d1as01ObkndxIE5mQMK8e9K2I1ub9n41719JoX/OYqvtCzcl+0X6U9/QoNrGv9z9XwSz/x3oGtzetBTYMrrVWfj2G8BfLR8aRyWBm/EVw7QGhsZ9d11PPjsFTD9VeY6jpCk2Cgy2XkqdyV8Z2WXX5h69cLYf/HfObP0uxQeTIVwtHR7CDgBeCx7U5dQ2t7xSo29Xg/fP3C1370s4O0QtsZDCK21Y1SkMgLV3TdDWjPGRFFq6LBZEeaRMrNQyLfVsuvC2SH3t2Sq1J7rofTiJzrd8xdO5rOpuYCTXv9D1+C1+VcYCSS8j+akupqWSsvkwFJNAVvVpjWSE9dhNbq9ZeM/6G1C8QJ6fKyYlfzCWnxVk7jxxhtYfd6fyA5KauFOUkhwOKClJS5dYELRtsoB+H057z8SvZUDRl2/kQeeuIqaOcuY4xhcOs54qiNttAVQfD8QQBGJXprQJYrH/5wUBkJ+g10HB0Mknr2uAQU90XWVi0efvbrLIgVNMVvSqXVKESdubGlfG6/ND1dHdFqNI9yE1ej2lo0/Hp2nzSu3Mqp6FLMO3xtoOfppc47O/7fdWMGij4y+fxdv20azVB07qPPkNm4Z9OxwuIlGAIXvYDn5TzXz9Yp7WfebbqGcgyJWz16oVS7i8f4CnQIflJp69jw2NGKrcYTV6IbKxh+PztOHHppB5sxKSlKOk2LezXUJZZ32F5qdcb+CRU90CjphS9Suq588CSdPDj4/RBjrR08BJ32lbcWGzM0eiquaI7sCQ8Ct0Xoy/Gbd2qgjw9Rrs396gNwT2cwccSt1o9X2ch37xB2k7tEwuTquEWqVi3h4/qF74IsqFPJNJkqe20+rZub76e9yhsXNqZZyGghhM7pds/E36S5ebcrh0WfvIG/FPn+FHYDzdCRw5Xj5buH7nG8/iorAKkx0TYIi4yO3TL/4T1MSP9xwBUNfsWLasDU2S5/HyUMFgbj+0kPkvwHFCbf3uCpAVwrfuRnLETPpO6R/xYbyI0hvpLNsgGxtxbmjI+joJwvbMqINLCFBpa+J2S/cy8hdVQMOWe2KVluHaGom+YCNpF3ZjBF+rYWv1cCRqu7ldIpVLmJFa4bCtKGHOgW+OBQLP8lahYYkUbG0u7nu9TbzUMVl7F9UQnoYfN/DYnTbVmN4YORSxls8VPp8LKqfxDPvfomSF4/4uzVxVPApW808M/JcCgpfZ6o1csutRALp8+E40hIygMJd4SRrHSSu2ocWh6sGxwK9pQVlbxlFi0X7qgDiFLNK+S8LHGXVUNvQvmJDNOgadPSjlGt4KLsVk7n/jRUpBR6XieLFdciKylP6+vbjxEi3G83tRnG5KfyXv2xkRWVcrlTdEykHfKzYMZYXk/a1B5MAIXMyVGkJbDqUy6hl5WGJPAzLyhFdV2PY6fXy8sEJFL3YjO9g99jmWJP9QTXlSfncOXMhM7ND5751HJPgid6qC32ltwAKc2Mr5sPV+HpYteGLiu5yIYJWBTgV9g2xCY6BzkFHI19KxOs0I9WBNQwUr0Tu3B8+g9uFUwVIxTPOrUdJzcnl3wUdwSRdWe/2sqa5hGXHxqHuScB3ZGvI4/rLoIxuqNUYGvRWPm0tprE0pU85NWOBtnMveRYTdYfS+CAzdMDq0M0n0RsaQ+6LKacIoIh8J/j0RAatCnAq4mMQDFi7jcH2w+Knfxlf+A5XkLYni12lw1iRG7qUf1N2OeWbh5O8D7JLw9fbGbjRDazG8PSM5xhlbuWkDifxsMY1hGcOTCdte3wPiupbdpK8BZJ7OyZaYgwMDKKO5VgTKZsyuJXrQu5P3WCmeGVN2FclH7jRlRLp9fJI2TyGJTS0J4j+cP8ost629rpMh4GBgUGs0XbuJWvnXkIvDRA4JgLXHdTwglZTCxfUErxYzUgiGwVlYGBgcDojZBx5FRgYGBh83jn9vP8NDAwMTmMMo2tgYGAQRQyja2BgYBBFDKNrYGBgEEUMo2tgYGAQRWJudIUgQwg+EoIaIagXgk+EYGasdfWEEFwmBJ8JQZMQfCwEg8tlGEGEQApBc0BrkxA8FWtNPSEEfxOCPUKgC8H1sdbTE6fZ/VeF4GEhOCoEJ4VgsxCkxFpXbwjBtwP19uZYawmFEBQLwRtCcEIIaoVguRBtmaz7RtiNrhD99v1tAm4EMoFU4DfAWwM4T7/p7zWEYBTwAnAbkAK8BbwZj1qDGC8lzsAnKhV5gFq3AncAfV/OYZB8Ae7/z4AZwHQgCfgWEPGsNAMtDyFIBR4EdoRXUa/X7K/WFOBN/EsGDAHWA2/06wxSyj59QJaBfADkTpB1IBeBtIGcA7IC5P0gq0D+A6QC8ocgD4CsAfkSyLQ+XEMBeVlg7ZKsvmqLllaQ3wX5dhe9rSAviDetgXNLkCMHqi1GdWANyOvjUefpdP9BpoJsAll0utx/kH8BeQfID0HeHM9ag66TFnjO0vuqrb8t3YXAXKAIKAb+J7B9KJAG5AO3AHcB84HZwDCgDni87SRCsE0IvhF8YiHYhv8t/CbwlJQc76e2aGkVXf4vgDPiVCvAKiGoEoJXhaBgkDojrTWcfNHv/5n48x9dGbj/e4XgzkHqjJRWhGAqMBn4Sxg0RlRrF2YBVVLS99R+/Xxz3Bb0fV7gzTAHpAekLWjfruC3P8hskF6QplNcwwbyWpDXheEtF3atIEeDbA6cxwLyxyB1kA/Em9bA/lkBnSkg/wzys1PdgzioA+Fq6X6h7z/IbwRaYE+DtIM8C+QJkBfFoVYV5AaQ0wLfPyQ8Ld1I19UckEdAXtsfbf0dzzgc9P9y/G8FgBNSdhorygdeE6JToi4N/xjIkZ5OHjjHYiHYJQRbpGQwCSzDrlVKdgvBdcCfgWzgn8BOGPTKzBEpVylZFfivRwi+BzQCY4Dt8aY1AnzR73/bQmQ/l5JWYJsQ/BuYB7wbZ1rvALZJSbhzwUasrgpBJrACeEJKFvdHVH+Nbm7Q//OgPddN1wQOh4EbpeSjfp6/DTMwAgZldCOiVUpeAV4BCMwE3wR8OgidEL1ylXTuHg+EaGkdLF/0+78txDnCkWglElovAGYLwbzA9zRgghCcLSXfjTOtbRN+K4A3peSX/RXV3zHdO4UgRwjSgB8BL/Zw3F+AXwpBfkBkphB8NdSBQjBNCM4VAosQ2IXgfvxvmHX91BZxrYH9kwKuOJnA3/AX/O540yoE44Tg7IBWJ/AY/rf2rnjTGthvEQIb/peCWQhsQgzKu+YLff+l5ACwGviREFiFYAzwdWBJvGkFrsffAzs78NmA3/PiR/GmVQiSgOXAR1LywwGp6ucYSdtsYD3I50A6AmMkFV2OVUB+H+QekCcDYym/Ctq/A+TCwP9ng9waOK4W5EqQs8IwnhN2rYHva4K0/hVkQjxqBXl+4LhmkMdBvg5yVDxqDRrHk10+c+JQ52lx/wPfh4N8B78XQynIW+NVa5fffkh4vRfC+VxdF6ibzYFybfvk9VVbn1M7CkEZcLOUvNenH8QQQ2tkOF20ni46wdAaKeJZa8wj0gwMDAy+SBhG18DAwCCKGCtHGBgYGEQRo6VrYGBgEEUMo2tgYGAQRXoNjrhIuSquxh7e1V/u0bHf0DpwetJ6uugEQ+tg+DxoPV10gtHSNTAwMIgqhtE1MDAwiCIRT75sEB3EhHHUnZGEK6OjV6O4Yegzm9BdEc9bbRBDPHMn01BoRrP7773QwF6tk/SvcOePMQgHhtE9zZHTx+NNMlM1zULi1BNcOPQAABoKFS0ptLzogBgaXWEy4ZkzHmnyd6pslU2IQ1VodXUx0/R5Qk4fT9l8hUln7CPfUQtAvdfBygOjSPpXjMXFIW3PC6L3vE+OPcfRjlYh3e6wazCM7mmIYrMhcv1Z6vZcbcc0rIWLRmznhozVTLJaANCkzm6vm/vM82On0+GA4gIO3aBjtfkNv9ySyrDVdiybNLTGxphp+7xwcL6D2859jxtStpClJgBQrTVzP4PPN/l5IdTzYjJpvf4m5cVskle2oh0b7FoK3Qm/0RUCYbEgTH08taZFvfsrrFaEqvb6tpNuN9Lni6KqviFMJkTuMA4uHArAL+ct5vKEYzgUC2BpP86HRpXmJFbBL8JkQuRks+/ryXw863ftBuGegom8ZT+HwpZ8WD+YtL6RQUlIOOUxenNzFJScGiUhgYvO38xNKVvIUDt0t0hJg9eGf/nB6OjohKYhfT7/86OoKHZbp93S50N6PP68MZHW5nDAyDwOXpEGdH1eeqZk5+0kb0+G4yfCrjPsRldNT0MvyKZ5uOOUxwoNLA1elDVbonID2q87pgjX0AQ0a89G17mzGnmkCr2lJWq6+oKw22kal8GuW54I2tq5Armll/1eH4+WfRnVFxv9wmqlaUw6+779JNDxUD6WvQltrsIHx6cydH1MpIUm0FhonT0W3dxzvVC8Etu7W5FeTxTFdUeY/VrzbR9iFh3z4XVaCx+35rJxfz7FVEdNR3CZWRp8WMuq8R06gpqeRtOMwk6/cRxpQS2rRKvu+wo3A0Gx2WBkHmXz07o9L5rUQ/5GDZSlJ8dDS0EKjuOpaDW1YdUVVqOr2Gw0zh5Jy3X1bJr8t1MeX60189Oq8ym9NBMtAm+UUKjpaey9z8Jz0/7KTFvPzhvFz99O4asJqAeOIJtb4mMySgiEw447UQ252y29nNDc7PEm82jZl+GCCnrvRBkgBIrdjpKagp6exJNP/B9jLKEbDO1DNtPm46s6FmWhQQiBmp7K40/8kXEWO2AH/Pf/reY8fvre1yj+7mDTUQ9Eh5+7KyfzwT+mMuzvNTTNKGTVk51twfj115L0/EicS5sj+lyJnGwOLkhj161PdNpe6WuiXlfQu+TzV5AUm22oQuHg3KcZnfAtsvURWJbHsdGtumki4xbu5PG8ZbRVhN7IUBP4dfZKlqzK5vlZUyIyftKNV2wsHfE4Rabe9a1Z+Dvqr4Ufls+nbPEEMp/8JPLaToGpMJ+ya4ax5LZHAWe3/Q8dn8KSl2eQ//R+VF+LYXD7QFuZvnTrY+0P3enKQ8ensOSlGZT8eTuh23HxwarJz3B75jx2D4nNczX7hXspfK0J06EgeyMEemYKv3/zmR5fuuEibEb30EMzuOCyjdyT9V+SlQ6DcNDbxLUP/gBLk47Q4dCX4fuzl3NXajkAdmFhrLUScYrZxMGipqfBKzYeK3yFQpOtvRvRE1lqAlkq/Db/NR678UI+TJ9B7sMfR1TjqZCKQLdAobm7wZ23Zx6VLxdQsLQCX5R6Dac7cvp4dl9t5+fzXmS02XrKOhFrTDnDqZqXx8SbtmFXayk0de7xuHUTplbQT56MkcK+kazYybA2oVki+8wHE2yHRu48hjxSha+1teMAoWCCbq3fSNBvo6tmZtI0s5DDX+3cjrr27DVcn/YJ61y5zN98CZ5Nqf7jPZC3Yh+ypYWqG8YzsuQwU+ylgEqT7uLVphweffZqchu3hOPvCY0QCLOZe/PeYaTZilmo3FM5kVc3TUJtUBl6xnHWnPVqyJ8WmBxcnPIZy3LOipy+viJEjyucldemklnuRas4ahjcU1Bz83Rqz5SkjqjltsI1LEioRBXdJ1a61pGVZ74SA7UdSJuF1izBI8NWoCJwKB0tsnsqJ7LsnSkUfBzfBrcNRcjBr9Z3KqpryfkwhTHyjk52SA85Sa5H7bnpt9EVTgd1o0w8NevpTttLzA0MUe0835qDZ3Mqhf+q9B+v6fiqq2n4xjmYv3yCu/Pf5QyLF1DxSp1NzQXkv3ik81snjAiTCTV3OAe/MZwS838wC38rcVXlSIasVHEe8VAmsqAHm6oKBYdwgzk+O2zz9szjcH0K6sfJ2A/Xosehx0U80bBwGsqCan444iMm2crINXlxKP6Jvga9lTkbb8Dt9T8WrSetiBYVLUEnyRrbMX1Tbg7VM4Zim1rTyVOhDbvqxTPMQ8WFiZhmzgCM4BitsQnz1lIKj6S126FQhjXYRqQpHY3JSHW++z+84NMwn5S8VT+h0+a3Av8u3TeWtP062v6DgP8P8l4wEc81dfyq5E3OtTXgVGw06K2sdGXx1q4zGXlw82D/jh4RJhPu/HSuuGo1aaq1fXtTq5WhJ3xYKuoxnxwasetHmkPvFJC+04ejvBZxqDLWcuKWtiCN7vVQY5ULXqmdwnF3Is5nk0lp9bd6qs6x4JxazaW5nzHecShm2k25OVTPyaXmklZ+N+btkMdcnryJjKknOXyW3zVKQ6Ha7aTs2HiSVu73z8B/0XpAuoZW3wD1DT0eothsiPwcjlyc1clG/KcpCU9FAubG8DcG+210ZUMjmZua+OAfU0PuTzuhk/pZAzqdneOfO2Mxk60aVuE3uB+0ZvKL3fPIetsa8jxhQ1XxJJt4OGs7/pXdOyN8GqZmWNHSed9YSx1DVDtmEdpTIF5I3+nD8f6OuPEdBfy+mSnJtGTGT9kpzgTKr9N5PkQ9/PW+S2hdnoXqkQxZtrm9ZWgeN4NZ2fv5SeZOALQY2ayms4dx4gIPv5j0FvMTQvveTrWamWotA8ratzXorXzpuiGYWkfg+FTxG149vNOrUkrebx7NMHUXqap/uCPZ1IonBSjKjas6EAqRnETdpAzyriztZCN+vvMrZG4E8+Fqwt137LfR1RobYf32nn0shUBaLKipqcjcIezv5Byv4JZe1rhS+cXueSivpZO0ODZeAW0vfdnUQuo+H7euvK7T/pumrOH6lA3kmLpPWhn0jprkpHVMNrXnhT+EciAIkwky0lhy3uOBmenu9XDoM/5J0rZBJGG1opvBpHQMK8Ui4ERYrdSMNXHV+PUsTOyfX2uyYmfT5BcpvOomRrjzsG70+Ft+4UJKaHXxvx9cQsklR5ltb8EqzExwlPPmhFpKzal4c0LXAbPQ0E1+P9+Y+jwnOakvVvhk1Dvtmxr0VryfppLz0RF8FUfCfsmIBUc0jHBSO1ph37efINg5fp3bzIOfLcDxYnLMDG4w2okT2F8/QfHrnbc/vWgm587aS44pjsZypYTToYeYkUblTCulFz1x6mMjjaKipKdRM30oNtFxL9e5zTzw2QISutbDwECeGFNEyzCdPKvf0HmlRqnXy6NlC6IacCJGj+ikYyAcnPs0Y0vvoPBgaq9d7YGgNTYy6rvreHndVCYOW4FVNTM/oYn5U/4NU3r+XZ61lpbhOmJsEXLrrrBqGiyP156N/YQEjzci549gcMRfQh5zy4Zvkf20Fcvy+M2ApKanoVo1zMJHvGe/dCcpJDgc0NLyxRuz6wOK3UbTtALW/eZJ2nybK31N3PDRXRQsUjC9H1QPhUDNykQIwZ77LPxj2hPtATQNuovfVc2NesDJ7rsdLJr1N+bY4+jlHwbuSi2n5LKnuTXpOopviv71FZsN4UxAS3fis3V+bpb/aDZZ72/DF6Ehu5gGR8QtfQygiAcW/+p3XJZ7H/n/tuMri91kz+nE7BfupXhxHXLn/o6OQ8DgXr96HaMtx8hVdZKU0zdQwqB3qm6aSNHX9/KrvCdJUSC4Nx5pwtqM06yQYW0iWenZWD09+Tku+O0amt8ZweH/mRHOy/eLpyc/R+3tzTReO619m5qehvrBsD4HUEQbWVFJwX9qGfvkHe3bCs1OHrjhRfbcNQymxYEv8SmYt2ceK5+eSs5rh2Om4edX/Jsznt1D7kdW8tYl+D9rHYx8u455jmOMM1tIVR0xu/9t9TBnrZOnZj3LBGscTZL2QNkPipm37Xq29CMV4jm2Rn5z3svse3ZSBJWFRrNCjqOeYnNCezKmaBH+Md1TBCBOtmqcYdnMpYlbeSp9Fu+LGeT+InKRXtLtxrmzmuLnb2fNwo5sV5OtGr864zX+mjqH7Rf7b7pq1Vg64nEKTba49FrQXS7UI8dI3ZPSafuChEoqLv4vf7F/icKkyZhXbIiNwDZ6COKYue0KXK8OIfvDY2hHouPeFur+X55wjLmOI2hdBshVBE6lewjoXm8zD1Vcxv5FJaQT4XmIoECesZaTJCsWrKL3HtfMbVeQaHFzW+7KkN4Nhe/czIiP3cjayOUwNn26C8sz47n6qu/w6KRXe/SyCCZZsTPNdoSJReVEM6Sj5ubpJF1cxVWp6wlud1b6mpj9wr2M3FWFHoE8um2E9VWeutfHG++fw9TNV3Fvld+Pt0FvZcKnX2etS8MtvViFmWTFzjiLiZszVpE2syqcErohfT7kkSoKX22iPuh9YBVmZtvq+U3+qzw161memvUsz0x7liJTfLuJ6S0tJJY2MeHTr9Og+30IHYqFbydvZvy4choKurvFRRNTbg4104dgn9h94ufYZ1lkbjqJXl4RtbSZoe6/Q/G3ZDPUhE6fNpenYNa7vdxdehU7XhvNkGXlUdEMkKk2k6UmYBWnvp+JFjdOsxub6Dzx0/bs5b8ssG47hNYYuVSPustF0sr9qLucbG3JC6mjzQYE4xCCZHP0gjfagmMeGLmU8Ra/14RbelnlgnNX3kXR4jpkRWVE62dYW7rOrUcZUZNO04Z03hifgftiU7vD+e3XL+Thca8z21aPU/G3JHNMLkpSjkc82bLe0oJ64Ag/LJ9PjqOeK9M+ZaLFhVOxMcYCYzpNjSj8oa6AafYDnGHx4lRspKgtpGae9Hff126LsNrekR4PalklzudGcnvGPH6R8xYFJgfZJiez0vfxxKgRDBlbjLZzb9S1ncqJ39yooNY24YtgKyIUvd3/U7GmuYS9G/IpXlqN78jRKKjtP7flrsQmvJxhqSE4EVK1puH4ZzKO9fvQausiPtGqVdeQVCr51+7J1Pn8LzBdinYb8P7PxjLGsqVPL5JIUfMVF48Vv91uhwBc0scHJ8dTsEjxj/NHuEEQVqPrO1yBOFxByo4kEg/ms/LYlHaH88a8ibw+dBJjhy7HGYOhMtncQtniCRywCD69JI/r8tcy0V5Gruom2+SkQW9lnSsJgD8tv4QN03bzo2FLGWOBXNXNlQVbWDT/fEZE2elCzcxEOB3ILslNnHsb2Pj+aLZcs5FstRqHsHBuwh6WTxxLZVkeWTEwun1x4o8Voe7/CEtHlilV6KhIzrP5Oo3lHnKnkXBExOQl1lc6yrrD4NZpLbzVdAbJH5aGPR9sb6Rtb0DoSXyQGQiekrTbgPIH0vD2kMc2Wswp2scM2wmcSsc4ri4ldT4H1sN1UNi5lS5cHmRDY8hVTtqeTXxaj8eEIiLL9XQNoNABU6ukVTN3GvGNZoZ73eVqTyNX0zSd3555Gckj6riyYAvfTNnIx625PLDmawCMfraeTzILOTbEyRh0sk1ObkndxIE5mVFfAqVpZiF1xSZ8IbLNKRp4ZYcxnmo1c0/+cm6deB1ZUdQIp3bib9BbERoxc2sLdf/1xKAWjfCP6X866/H2YQa39NLss6KcZuks6rQWlrcM548fzKXEtTOq19a37CR5CyR33Q40eNNwxaFbo1WYmOI8yPKF3aNsbcchY3srpk3dX7qNs0dQX6RiaZBkbmrq80oo0VkjTQgm37yFX2a/R4bqfxs36K1RzXAfTPrTn5AOMO0sFs0/n00zAzpu8k9A6YDui/6MaigOf1Vj0aynTuGn2fvSI9HgVE78kXY47w/t9z8YITANy+boR4LUwHtsndvM+so8HDWnj49sm8F94OMrKP7uurjKq7t+XyEfZw5nrqOqu4eTEFF7IXctE4diYWFiDQtv6R7M86e6fP73w0vISRvXbZ+8+QT/LFnMs7Uz+eAffV8JJfJGN+D/aFdrUYOmtNe5knhgzdfaDV1MWLuNEWvhJETd8H/eOJUTf6QdzgeLsFjQ05NQgjwaTodAnjYa9FaqNY23ms7gjx/Mjc7KEf1k1PUbuX/RlWTNWtSpnthVL2pWdFaPOdKSzE5PIh5zI5mq9ZTjy3ellnPXgr/Cgp6O6H/umMga3SCH83mOYyHdcQwMYo2SkEDr7LE8+cT/nbYrR8zacCPOF5JI/rCUEtfOuGrh9kbUV4+50sWjpgsp+85IrrhydSDJTXSJmNFVbDZ854yh5LHPAga3ozL/oa6APy2/hNHP1p82lSNZsXF/9nJ+u+5iAMruGYVpw+64ylV6OpZrvKCbRcSXaYkUhctvIv8lBcen+yO+2GMkiNbqMYB/UlEICl+ws271ZGYnTcdnE9SNVtl5e3RyhYQ/OGJcCZVfSqdhqousjEZ+NmRlpxbuPZUTeWvFORQtaUXuKg335cNG7usqt9i+xaOT/8P8hCbMQqXQZOORYSsAuDJ5LCY18v68ua+r3LXjtpATaV2xn5AUbYtNuXYtrzai5XAeCe4ct4rfL5hLoYxiwImU6I0nueYv96D1cah+xEdurNvKouqlMFB6qidqNDM5SYl2+AimymOYVBVhNpG0K5sx4o5Oh9kn1/DQmLfD7okTNqPrmTuZhkIzjaMkIyeU87/5y0hRWjs5nN9TOZEl75xD3rtu1G0H0GO8jHVvOD86gHp2CVvH5jE/wT8DbBZqe9Z+qURnfSfnRwdI3JrQzWUsFMLlQW9ojEm5Oj86QJpzFN8//k0eHO6vpFIKPC6TP89BhB3OI8GViTv475jRHC4YQUYUr6u3tpL/4hGk2kffyupaf+BDHHoGdCXUcxULpM/XqT4qLjeF/+rcKKg5OJTvT+6ozz3hLU1k+L6+TxCHzehWzjRTcO4hvj1kB+c59jLJaiF4kPkPdQW8teIc8le4MG8tRYvzxfPaHL2XHD6DLydtZao1Ng7dWnUNnAZdRq26hvRVdpJK0/EmdQwlKV4ZFYfzSHBYs3K4MRlzc5SNmZT4DkYv+i2aaNU1mE9Co89fRzSpU+Zr4ceHFiC9sfNs0V0uCKx200aG20vy/s71ORTmxpZ+JTsPX0s3WTI9/SBjrUeo0RJY0SXl6J+WX0LRklbUbQfi3uC2kba9gUM5GfzYvoB78pd32qe6o7eQ3elCW3BM115x3JeSlKhu2W31kN+UXY77owyyPqszxsjDiKlF8ln9MFYkb6dFWllWN5OyxSMZ0hy5ZbsGQk/1OeSx/Thv2Ixu2jbB83IWzyaGzhw2+tl65K7SuB5S6Iq+ZSf5nmIqG/O4dWLnlSVGVzUhvadf682gO9Lrw1bZ1G31kNQNZvJWVsd1NNrpSPJBL4dW53Fr2XXgVbBXmMh98uMvzIstbEY3pMN5EKdrgWo795K1c2+3CK/T9e8x6I70epBbd4VMph3NhOVfFCzLN5C3/NTHfV6Jr4SxBgYGBp9zRDQX2TMwMDD4omO0dA0MDAyiiGF0DQwMDKKIYXQNDAwMoohhdA0MDAyiiGF0DQwMDKKIYXQNDAwMosj/D4u+HznwUFROAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "ds_test = create_dataset(test_data_path).create_dict_iterator()\n", - "data = next(ds_test)\n", - "images = data[\"image\"].asnumpy()\n", - "labels = data[\"label\"].asnumpy()\n", - "\n", - "output = model.predict(Tensor(data['image']))\n", - "pred = np.argmax(output.asnumpy(), axis=1)\n", - "err_num = []\n", - "index = 1\n", - "for i in range(len(labels)):\n", - " plt.subplot(4, 8, i+1)\n", - " color = 'blue' if pred[i] == labels[i] else 'red'\n", - " plt.title(\"pre:{}\".format(pred[i]), color=color)\n", - " plt.imshow(np.squeeze(images[i]))\n", - " plt.axis(\"off\")\n", - " if color == 'red':\n", - " index = 0\n", - " print(\"Row {}, column {} is incorrectly identified as {}, the correct value should be {}\".format(int(i/8)+1, i%8+1, pred[i], labels[i]), '\\n')\n", - "if index:\n", - " print(\"All the figures in this group are predicted correctly!\")\n", - "print(pred, \"<--Predicted figures\")\n", - "print(labels, \"<--The right number\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "perfect-croatia", - "metadata": {}, - "source": [ - "Draw a pie chart for probability analysis. In this example, it displays a pie chart for the current `batch` in the first image.\n", - "\n", - "`prb` stores the preceding 32 prediction numbers and corresponding output results. The classification result `prb[0]` corresponding to the first image is obtained, and the sigmol formula $\\frac{1}{1+e^{-x}}$ is used to obtain the probability of [0-9] corresponding to the image. The number whose probability value is greater than 0.5 is analyzed in the pie chart." - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "stopped-creativity", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The probability of corresponding numbers [0-9] in Figure 1:\n", - " [0.054608213813288335, 0.04007988333681419, 0.9999934046689553, 0.9469836696068331, 0.347608619405929, 0.020873059274634436, 0.0013652782671098932, 0.9990516692085604, 0.39083703602997244, 0.036189847324771866]\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAAD3CAYAAAC+eIeLAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAmUUlEQVR4nO3deXhU5fn/8fedhEV2gbCILBZZBUEJi4qkohZEEYsKKCpuVWtri/artkWISxfrr2qt1VpFrdW6tFapW911WGQbAZFVUZFNJCxhCyQkc//+OIMECCSTmTnPmZn7dV1zETJzznySwCdnnvPMeURVMcYY448s1wGMMSaTWOkaY4yPrHSNMcZHVrrGGOMjK11jjPGRla4xxvjIStcEioh8KCJXJ2nf7URkh4hkR//eUkSmish2EblXRH4tIpOT8LxjReTtRO/XpKYc1wFMYonISuBqVX3XdZagUdVVQIMKn7oG2Ag00gRNWBeRDsBXQC1VLYs+7z+BfyZi/yb12ZGuyWTtgSWJKlxjqsNKN42JyOUiMkNE7heRIhH5UkROjn5+tYhsEJFxFR5/tojMF5Ft0ftvP2B/l4nI1yKySUQmishKETkjel+WiPxSRL6I3v8vEWl6mGwjRGRB9Lm+EJGhlTymo4i8H93fRhH5p4g0qXD/rSKyNjo8sFxETo9+vp+IhKP7/lZE7ot+voOIqIjkiMjfgXHALdEhhzNE5HYReabC/geKyEfR791qEbm8Gt+nqdE/i6L7PSn6/Z5eYb8ni8hcEdka/fPkCvd9KCJ3RX9u20XkbRFpfsgfskk5Vrrprz+wEGgGPAs8D/QFjgUuAf4iIntfcu8ELgOaAGcDPxaR8wBEpDvwMDAWaA00BtpUeJ4bgPOAfOAoYAvwUGWBRKQf8A/g5uhzDQJWVvZQ4PfR/XUD2gK3R/fRBfgp0FdVGwJDKuzjAeABVW0EdAT+deCOVfVyvJf896hqgwOHY0SkPfA/4EEgF+gNLIjefcjvU/RrAWgS3e/MA/bbFHgd+DPez+Q+4HURaVbhYRcDVwAtgNrA/1XyvTEpyko3/X2lqk+qajnwAl5x3amqJar6NlCKV8Co6oeq+qmqRlR1IfAcXokCXAC8qqrTVbUUmARUfFl+HTBBVdeoagleOV4gIpWdN7gKeEJV34k+11pVXXbgg1R1RfQxJapaiFdQe/OUA3WA7iJSS1VXquoX0fv2AMeKSHNV3aGqs2rwfbsYeFdVn1PVPaq6SVUXVOP7VJWzgc9V9WlVLVPV54BlwPAKj3lSVT9T1V14vzB61yC/CSgr3fT3bYWPdwGo6oGfawAgIv1F5AMRKRSRrXhFuvel7VHA6r0bqWoxsKnCftoDL0dfihcBS/GKsWUlmdoCX1Ty+f1EZxc8Hx1C2AY8szePqq4AxuOV+4bo446KbnoV0BlYFn35fk5VzxVLxiq+T1U5Cvj6gM99zf6vGtZX+LiY/U/+mRRnpWsqehZ4BWirqo2BR/Be4gN8Axy994EicgTey+O9VgNnqWqTCre6qrq2kudZjfeyvyq/wzua7hkdKrikQh5U9VlVHYhX+Ar8Ifr5z1X1IryX538AXhSR+tV4vupmPNz3qaqTcuuieStqB1T2fTJpyErXVNQQ2Kyqu6PjrhdXuO9FYHj0JFBtvCNMqXD/I8Bvo2OhiEiuiIw4xPM8DlwhIqdHT8C1EZGuh8izA9gqIm3wxoCJ7r+LiAwWkTrAbrwj9kj0vktEJFdVI0BRdJNIDN8H8MZ7zxCRUdETb81EpHeFXIf6PhVGn+t7h9jvG0BnEbk4ut/RQHfgtRjzmRRlpWsquh64U0S2443ZfncCSlUX450sex7vqHcHsAEoiT7kAbyjv7ej28/CO4l3EFWdg3ei6H5gKxDi4KM/gDuAE6OPeR14qcJ9dYC78ebZrsc7qv1V9L6hwGIR2RHNNSY6Plpt0Tm9w4BfAJvxTqL1it59uO9TMfBbYEZ0qGXAAfvdBJwT3e8m4BbgHFXdGEs+k7rEpiiamojOeCgCOqnqV47jGJMy7EjXVJuIDBeRetHx0T8Cn1L5VC9jzCFY6ZpYjMA7EbQO6IT3st1eKhkTAxteMMYYH9mRrjHG+MhK1xhjfGSla4wxPrLSNcYYH1npGmOMj6x0jTHGR7ZcjzEmkD7++OMWOTk5k4EeBPMAMQIsKisru7pPnz4bqruRla4xJpBycnImt2rVqltubu6WrKyswL2hIBKJSGFhYff169dPBs6t7nZB/O1hjDEAPXJzc7cFsXABsrKyNDc3dyvekXj1t0tSHmOMiVdWUAt3r2i+mHrUStcYY3xkY7omOLwLkh+Nt3TN0RU+bom3QGOt727Dzl3NLRM74C0JVIa3WGTRAbcteNf+XQmsIj+v1KevxCSDSJ+E7k/148PdvWLFilpjx449ZuPGjbVEhHHjxhVOnDix2ifMDsVK1/hLpC7ehcn7A13Yv2Srv9R4rdqzgAFVPm6fCKHwOrwCXom3GOR8YD75ed/EsB+TIWrVqsW99967ZuDAgcVbtmzJOuGEE7oPGzZsW58+fXbHs18rXZNcIh3xynHvrRfe0arfsth39Dxwv3tC4W/ZW8DeihdTyc8r8jmfCZj27dvvad++/R6AI488MtKxY8ddq1atqm2la4JDJBs4BTgVr2D7A7lOM1VPS7wlfoZG/x4hFJ4PvA98AEwjP2+Hq3DGveXLl9desmRJvfz8/Lj/HVjpmviIZAH5wChgJN5aZakuC+gTvd0MlBEKTwP+A7xkwxGZZevWrVkjR47sePfdd69u2rRprAucHsRK18TOK9pB7Cvalm4DJV0OcFr09iCh8Ey81ZFfIj/va6fJTFKVlJTI2Wef3fHCCy/cPG7cuKJE7NNK11TPvqK9EDif9C/aQxHg5OjtPkLhqcBjwIvk58U11meCJRKJMGbMmPadO3feffvtt3+bqP1a6ZrDE2kGXAv8GO8klNnfoOjtz4TCzwCPkZ/3qeNM6amKKV6J9s477zSYMmVKs06dOu3q2rVrd4A77rhj7ejRo7fGs18rXVM5ke7AeOAS4Ai3YVLCkcANwA3R4Yd7gZfJz4t7DNC4MWTIkB2ahKK3d6SZ/YkMROQNYDHwI6xwa+IkvDHfJYTCVxIKu5giZwLKStd4RIYgMhWYBpzlOk6a6AI8DnxJKHwToXB914GMe1a6mU7kTETmAm/iza81iXc03nDDl4TCP7Uj38xmpZupRNoh8h/gbSDPdZwM0QJ4EFhGKDzKdRjjhpVuphGpg8gEYCneHFvjv+8BLxAKf0Qo3N91GOMvK91MInIWsAj4DVDPcRrjnXCbSSj8CKFwY9dhjD9sylgmEOkA/AkY4TaIqYTgzYM+l1D4BvLz/uM6UFDJHYm9tKMWHH46WHFxsfTv379raWmplJeXy/Dhw7fcf//96+J9XjvSTWfeUMIkYAlWuEHXGniRUHgKobC9CSUA6tatq9OnT1++fPnyJYsXL17y3nvvNXrvvffinoFipZuuRDoDc4A7sLm2qWQE3vzey1wHyXRZWVk0btw4AlBaWiplZWUiIvHvN+49mOARGQOEgeNdRzE10hB4ilD4GULhhq7DZLKysjK6du3avWXLlr3y8/O3DR48eGe8+7TSTScidRF5BHgO7z+uSW1jgfmEwn1dB8lUOTk5LFu2bMmqVasWzps3r/7cuXPrxrtPK910IXIsMBPvpIxJHx2BGYTCtxAKx//a1tRI8+bNy0899dTtr776atyzTKx004HIKGAe0NtxEpMctYA/AP8mFLapfj5Zt25dzsaNG7MBduzYIR988EGjbt26xX35Tpsylsq81XPvA653HcX44nygI6HwCPLzVrkO47eqpngl2urVq2tdfvnlx5SXl6OqMmLEiM0XXXRRXJd1BCvd1CXSHHgDsPG+zNIbmEMoPJL8vI9ch0ln/fv337V06dIlid6vDS+kIpE2wFSscDNVS+ADQuFxroOY2FnpphrvhNl0oJvrKMap2sDfCYVvdh3ExMZKN5WI9MS73m0Hx0lMcNxDKPxb1yFM9VnppgqR/kAIaOU6igmcXxMK/8WmlKUGK91UIHI68C7eOlzGVOYneO9is5PjAWelG3Qi5wGvAw0cJzHBdyneOK8d8QaY/VYMMpHRwDPYz8lU31hgG+k4dzsUTuilHcnPO+y8308++aTO6NGjO+79+5o1a+rccsstaydNmrQhnqe1/8xBJXIG8DT2MzKx+zGh8Dby837pOkgq69WrV8myZcuWgHfhm1atWvUaM2ZMUbz7teGFIBLpDbyE9/ZPY2riVkLhX7kOkS5eeeWVRu3atSvp3Llzabz7stINGm+Vhzewq4SZ+P2OUNgugJQAzz33XNMLLrhgUyL2ZaUbJCJNgP/hrSJgTCL8hVB4sOsQqWz37t3y7rvvNr700ku3JGJ/VrpBIZID/Avo6jqKSSs5eFcnO9Z1kFT14osvNu7evXtx27ZtyxKxPyvd4LgfONN1CJOWmgKvxrvisIh0EZEFFW7bRGR8YiIG1/PPP9901KhRmxO1PzszHgQi1wI/dR3DpLWuwPOEwueQn1dekx2o6nKi12wWkWxgLfBywhJWpYopXsmwbdu2rOnTpzd66qmnvk7UPu1I1zWRgcCDrmOYjDAUuCtB+zod+EJVE1ZGQdSoUaNIUVHRgmbNmtXoF1VlrHRdEmmINxfXpoYZv9xKKHx6AvYzBm8tPhMjK123/oRdMcz4Kwt4mlA4t6Y7EJHawLnAvxOWKoNY6boiMhy40nUMk5Fa410cp6bXaDgLmKeq3yYwU2UikUgk0NeRiOaLxLKNla4L3lI7j7mOYTLaWcD4Gm57Ef4MLSwqLCxsHNTijUQiUlhY2BhYFMt2NnvBjb/hLblijEt3Ewq/Q35etUtDROrjTW1M+jvdysrKrl6/fv3k9evX9yCYB4gRYFFZWdnVsWxkpes3kUuBka5jGIO35M/jhMInV3camaruBJolN5anT58+G/DGjtNKEH97pC+Ro7HpYSZY+lHzYQZTA1a6fhER4EkgrncFGZMEdxIKt3cdIlNY6frnCuAM1yGMqUQ94C+uQ2QKK10/iBwB3Ok6hjGHcQ6h8AjXITKBla4/xgNtXIcwpgr32MKWyWelm2wiTYFbXccwpho648NUsExnpZt8E7CTZyZ1FBAKN3IdIp1Z6SaTSHvgJ65jGBODXOyVWVJZ6SbXXUAd1yGMidGNhMJ2DiJJrHSTReR4YKzrGMbUwBHALa5DpCsr3eS5G/v+mtR1NaFwc9ch0pGVQjJ4q0Gc5TqGMXGoB/zcdYh0ZKWbHPaP1aSDnxIKN3QdIt1Y6SaaSCvA3tlj0kET4DrXIdKNlW7iXY2teWbSx432LrXEstJNJJEs4EeuYxiTQK1Jw2vaumSlm1jDgHauQxiTYNe4DpBOrHQT68euAxiTBGfa9XYTx0o3UUQ6AENdxzD+211SQr/rxtHrqos57vJRFDz5NwDG/uY2ulx6Pj0uH82Vf7iTPWVllW7/1Juv0WnsSDqNHclTb74GQElpKUNvvoEel4/m4Sn7Vjq/5o+/Zd5ny5L/Re0vC+9chUkAK93EuQb7fmakOrVr8/59f+WTx59lweRneXPOTGYt/pSxZ5zFsn+8yKdPPs+ukhImvz7loG03b9vKHU89xuy/PsmcR/7OHU89xpbt23hr7kwG9uzNwiee4+m3/wfAJys+ozwS4cTOXX3+CgG4klA428UTpxsriUQQqQVc6TqGcUNEaFCvHgB7ysrYU1aGiDBswCmICCJCv27HsaZww0HbvjV3Fmfm9adpo8Yc2bARZ+b15805M6mVk0NxyW72lJWhqgBMfOIR7rrS2Qyuo/BWATZxstJNjHOwJdUzWnl5Ob2vupgW5/2AM/P60797j+/u21NWxtNvv8HQficdtN3awg20zd33T+fo3BasLdzAmX36s3L9Nwy4/gp+dv5oXpkR4sROXTmqea4vX88hXODyydOFzb9LjB+6DmDcys7OZsHjz1K0fTs/nHgzi75cQY/vHQvA9fffzaDjT+DU40+o9v5ycnJ4duJvAK+0h9x8A//97R+56aH7WfXtei4bMoxzT8lPytdyGCMIha+t7nLtpnJ2pBsvkWy8qWLG0KRhQ047oQ9vzpkJwB1/f4zCoiLu+8mNlT6+TW4LVhd++93f1xRuoE1ui/0e8/CUf3PZD4Yxa8kiGtdvwAsFv+Pef/0zeV/EoTUHvu/iidOJlW78BgLNXIcw7hQWbaFo+3YAdpXs5p3wHLq268Dk16bw1tyZPDfpN2RlVf5fbUjfAbw9dzZbtm9jy/ZtvD13NkP6Dvju/i3bt/HazOlcNuRsinfvJivLGyPeVVLiy9dWifNdPXG6sOGF+Nl1FjLcN5s2Mu73t1MeiRCJRBh12hmcc/Kp5AweQPtWrTjpeu8c68hBpzFp3I8IL1vCI6+8xORbbqNpo8ZMvOwq+l47DoBJ466iaaN9qzvd+dRkJlxyJVlZWQzpO4CHpvybnleO4bpznXXfDwmFf0p+XsRVgFQne8+MmhoS+Qzo5DpGxhlxwSxuvHVA1Q80SdCP/Ly5rkOkKhteiIf3hggrXJNpBrsOkMqsdONj8xZNJrLSjYOVbnzOcB3AGAcGEgrb5UtryEq3pkQEON11DGMcqAfYeHoNWenW3PHYVDGTuWyIoYasdGuu+m8vMib9HPyeZlMtVro116PqhxiTtuygo4asdGuup+sAxjjUglC4jesQqchKt+bsSNdkOjvarQEr3ZoQaYp3fVFjMtmJrgOkIivdmrGjXGPsSLdGrHRrxsZzjYHurgOkIivdmrEjXWOgPaGwuA6Raqx0a8aOdI2BOkBr1yFSjZVuzdiRrjGeY1wHSDVWurESaQQ0rvJxxmQGK90YWenG7kjXAYwJkA6uA6QaK93YNXEdwJgAaes6QKqx0o2dHekas49daS9GVrqxa+I6gDEBYqUbIyvd2NmRrjH7NHUdINVY6cauiesAxgRII9cBUo2VbuzsSNeYfax0Y2SlG7smrgMYEyD1XQdINVa6sbMjXWP2sWsvxMhKN3YNXAcwxqQuK93YlbgOYGDDllVENLLJdQ5jYmWlG7udrgMYqD17TreGU/Oz/7zm+dCeSNla13kymA0vxMhKN3ZWugHQpITGDbfv/uLnK+7NrzdtYMtff/nwjF3lJStc5zKmKla6sbPSDYixC72fRZmW5/x+1ZOn1J82sONVy+6as2XP9k9dZ8sgEdcBUo2VbuysdAPihtl0qvh3BXli/Sv9ms4Y3PPsheMXrinZMNdVtgyy3XWAVGOlGztfSrcD3vIUvYG8A+67F28gbeMhtr0FOA7oBvwMULyzf0Pxrr7+cIXHXgPMS1Bmv3XYSusGJSyt7L43Ns84vu3Ms/v2+3jcZ0t2fvmRqpb7nS9DFLkOkGqsdGPn25HuB8ACIFzhc6uBt4F2h9jmI2AGsBBYBMwFQsBbwMDo55+OPvYToJzUXkd7+GdsONz9c7cv6Xzc3NEnd5oz8ptpRQumquouv7JliK2uA6QaK93YOR1euBG4h0OfMhZgN1CKd3S7B2gJ1AKKo3/X6GMnAnclM6wPbpzJUdV53Be71hw9aMGPBrX6aOjOKYUfhiKqVhaJUeQ6QKqx0o2dL6UrwA+APsCj0c/9F2gD9DrMdicBp+GtFtgaGII3zHAmsBIYgDfk8AreEW61GivA+q6jU61yVlX38Rv2bG7+w8U35zeZ/v2sR9e9HCrT8vXJzJcBilwHSDVWurHzpXSn4421/g94CJgK/A64s4rtVgBLgTXAWuB9YBqQAzwLzAcuBP4E/AK4CbgAr4RT1alf81Ws22wvL2547We/y68/dWDTu1Y+Pr0kUhrzPgwAW1wHSDVWurEr9ONJ2kT/bAH8EG9c9iu8o9wOeKV6InDgYdrLeEezDaK3s4CZBzzmYeAyYBbeCpsv4J2cS1U/n1XzixCValntSSsfGVhv6sD213/2h1nby3YuSWC0TGBvTImRlW7skn5EtJN983B24p046wtswBsiWAkcjXck3OqAbdvhFXQZ3vhtCG94Ya8twGt4pVuM9w9AgFQ+uzTsc3qIsjmefUTQrL+ue3FAo+nf737+olvmry/dlKqTOvy20nWAVGOlGyvvBExc/8Gr8i3eTINeQD/gbLzpXocSBq6OfnwB0BFvulmv6G14hcfeCUzA+8EPwRt66Alcmrj4vstRsnt+W/nUsZp4aeMHJ7T+aOiJA+ddvfSz4lUzVdXeAHBoK10HSDWiqlU/Kg2JSFvgH3gn9xV4VFUfqObGczl4+qxx6NE+zL52OP2Tse/u9Y5Z+WTXgtV9G3bvJyJ1kvEcKewY8vNWug6RSjK5dFsDrVV1nog0BD4GzlPVqsf0RF4ARiU5oonBzloUN/g1gnBEsp6jTe3cbx/rMmHp0KYnnygitmKCN827Lvl5Za6DpJKMHV5Q1W9UdV704+14J/3bHH6r73yetGCmRurvoV67rST1mgtrSwtbDvt0/PePnD5Yn1r/WqhcI76cVA2wNVa4scvY0q1IRDoAJwCzq7nJ4uSlMTV1xQJK/XiereU7Gl++7I78BtNObXjPqn9MK43sqfY84TSzyHWAVJTxpSsiDYD/AONVdVs1N7N/bAH047l0Rf276tXuSGndW7988NR6Uwe2uWnF/R/tLN+13K/nDogFrgOkoowuXRGphVe4/1TVl2LYdDnerCwTIC130vzI3f7/Qiwnkn3/mmdPbjBtUJeLl9z28cY9RZ/4ncGR+a4DpKKMLV0REeBxYKmq3hfTxqql2LhuIF242O07pJ7b8Faf3Bln9jpjwfWLVu5eN0fT+0z1AtcBUlHGli5wCt701MEisiB6GxbD9guTlMvEYfwsOrjOAPBe0dwex8wa0e+E8NgvF+z4bLqq7nGdKcG2AV+6DpGKMnbKWNxErgP+6jqGOdgRE1ixuxbHus5RUYe6rdc93mXi56c1ycsTkfqu8yTANPLzBrkOkYoy+Ug3Xu+5DmAq94Mvgnc9gJW7vznq9E+uz8+dcWbpCxveCUU0ktR3NfpghusAqcpKt6ZUP4fqX1LQ+OfGmeS6znAom8q2Hjlmya/zG07Lr5viKxm/7zpAqrLhhXiIPAFc4TqG2V8EtPYk1pdn0dp1lqrkSHbZzW0vmz2x/VUtj8iuE6ghkcMoBY4kP6/YdZBUZEe68XnXdQBzsCyQfmtTY3ZJiq5kPMcKt+asdONj47oBdcNsUupkVYqtZPyB6wCpzIYX4iXyKd4iuyZASrIprXsbuxFS9sI0fRt2/+zvXQs2dqt3TH8RyXadp4LB5OdZ8daQHenGz4YYAqhOObU7b0rta2TsXcm4y5zz183Y+klQVjIuwltNytSQlW78bIghoK79mLR4Gff5rtVtB86/elDrj87aMWXjhx86Xsn4VfLz0u2NHr6y4YV4eRfM2Yy3yrkJkK112Nbkl9RFqO06SyI1zK63/Y8dx8+7svW5XXIk+8AVm5LtPPLz/uvzc6YVO9KNl+oO4HXXMczBGpfQqNWO5F5j1wWHKxnvBN7y6bnSlpVuYjzhOoCp3NiF7HSdIVkcrGT8Bvl5u5P8HGnPhhcSwTuzvBqCPxk/06xqxDftb6QVgrjO4oeRzU+b/1DnW7VV7WYnJmH3Y8jPeyEJ+80odqSbCKrlwNOuY5iDtdtG64aliVspOOiSuJLxFsDGchPASjdxbIghoM5dzgbXGfw2Y9sn3brMOf+kHnNHr5qzbfE0VS2Jc5dP29BCYljpJorqcuzKS4E0fiZHu87gypLirzr0n3f5qW1nnl30v00zPoxhSaoDTU5osAxmY7qJJHIl3moUJmBqT+TrPdm0d53DtcbZDbY+0OkXCy5pOax7tmRV92pss8nPG5DUYBnEjnQT61/ADtchzMHyV7LSdYYgqOFKxo8lPVgGsdJNJG/O7r9dxzAHGz+LJq4zBEnFlYx/seJPh1vJeCtgMxYSyIYXEk3kJOAj1zHM/sqE8tqT2KJCc9dZguriFkPCD3T6v1rNazXpVeHT95Cfd6uzUGnISjcZRN4DBruOYfZ3wrVMX9CagdXeYArwGVAf+En0c+8DywCJfv48OOg6Zt/gvUexJPq4Qey7Dt1/gG+BzsAZ0c+FgBZAtxi+mCQ6vUnfRZO73lbcvk7rXiLyPfLz1rnOlE6sdJNB5GRsJkPgPHYic645l37V3mAlUBt4mX2luxuoG/14FlAIDD9gu414ZdsMb83cR6PbbwVmAyOAfwCjgD3Aq8DFsX41yde1XocHlt781XjXOdKNjekmg+pHwJuuY5j9jV1IT5Tqr3jQATjigM/VrfDxHqj0fW7N8QoXvKPg+kAxkA2UARGgPLrtB8D3q53IT+XLilc+6DpEOrLSTZ5JrgOY/dUr44j2W1kU947eA+4DFgKnVfHYNXgFeySQC9QD/gZ0wbs2nQJHxZ0oGZ7TAv3CdYh0ZKWbLKpzgVdcxzD7u3IepXHv5HTgJuB4YM5hHrcdb2hiBPv+p50F/Bg4Ge8o9zRgKt5kw4/jTpYoEeC3rkOkKyvd5JoE6XEh7XRxXZhuKOUJ2VlP4FDX9doN/BPvdGrbSu5fhnd5pFK8I95R0X3F/yshEV7UAl3mOkS6stJNJtVP8M5Xm4BoUUyzprviGGLYVOHj5VDpBLQyvJmtvYDjKrm/HO8k3CnRx+4dF9471uvWbuBXrkOksxzXATJAATAS+wUXGKMWU/RI32o88EW8GQzFwL14QwGfs292QhPgnOhj1wJhvKGExcDX0e0WRO8/j30X/pyDV8i1gZZ4J+QeBjpx8Ik7/92tBfql6xDpzKaM+UHkGWCs6xjGs7wZq7reQDvXOQLoC6CHFqhdTSyJ7OjLHxMgfVcwSDVdNtHuiD187jpHAP3MCjf5rHT9oPo1NoUsUIasYK3rDAHzihboG65DZAIrXf88AMxzHcJ4bpxFS9cZAmQX8HPXITKFla5fvCV9riEI56cNg76mW3aEb1znCIjfa4GudB0iU1jp+kn1Y7wjXhMA/dfYuC6wArjHdYhMYqXrvwl4U+ONYz+fTX3XGQLgp1oQ9/ppJgZWun5T3Q1chjct3jh03lKOR9nqOodD92qBvuU6RKax0nXBuy7D3a5jZLraEWp13chi1zkcmQn80nWITGSl686dwHzXITLdtR9XenHGdLcJGK0Faq+2HLDSdUV1D3Ah3uVOjCNXzqMHGpDLzPhDgcu0QFe7DpKprHRdUv0Cr3jtiMORRqU0bLWDha5z+OgeexOEW1a6rqm+D4x3HSOTXbqQXa4z+GQ6cJvrEJnOSjcIVB/CW0/AOHDDbDqhaX/d40JgjI3jumelGxw34K0La3zWdhutGpay1HWOJIoAl2qB2vUmAsBKNyi8E2sXAF+5jpKJRixjg+sMSXStzccNDivdIFHdiHcZ7B2uo2SaG2dWuqhOOrhFC3Sy6xBmHyvdoFH9FLgEW1vNVyeup2Otcr52nSPB/qAF+v9chzD7s9INItX/Atdhxeur769kpesMCfSoFqi94yyArHSDSvVR4Gq8kyDGB+NncqTrDAnyAt5C7yaAbI20oBO5FHgSyHYdJd2VC5Fak9isUukav6nif8AILdA9roOYytmRbtCpPg1cil38POmylaze61N66tgM4AIr3GCz0k0Fqs8BY7C3CyfdT+ZQx3WGGpoBnKMFWuw6iDk8G15IJSLn4Y3X1XacJG0V57Cr/gQUoZ7rLDH4L3CRFmimvJ05pdmRbipRnQKcD9iV/pOkXhlHdCjiU9c5YvAYcL4Vbuqw0k01qq8BwyGjVzxIqqvmkypjondpgV6jBWrj/SnEhhdSlcixwMtAD9dR0k1hPTa3uJnGSGBnjJQCV2uBPu06iImdHemmKtUVwADgeddR0k1uMU2b7QrsEEMhMNgKN3VZ6aYy1Z2oXgT8ApvZkFCjFgVy+GYJ0F8LdIbrIKbmrHTTgep9wBmQ1lfK8tX4WXzPdYYDPI5XuNW6Cp2I1BWROSLyiYgsFpE7kpzPVJON6aYTkTbAf4D+rqOkg3oT+HxXLTo5jrER+JEW6JRYNhIRAeqr6g4RqYW3asTPVXVWEjKaGNiRbjpRXQsMAh51HSUdDP0c1xf9fhPoGWvhAqhn7yVCa0VvdoQVAFa66Ua1FNVrgZHAetdxUtmNs2jl6Kl3ATdogZ6lBVrjn6GIZIvIArxhp3dUdXaiApqas9JNV6ovA92BvztOkrJOXUXX7AjrfH7a+UCeFuhf4t2Rqparam/gaKCfiNj0wgCw0k1nqltQvQIYCml3gW5fnLSaFT49VQS4BxigBbokkTtW1SLgA7x/B8YxK91MoPoWcBxwN97EelNNP5tNAx+e5i2gtxborVqgCfn5iEiuiDSJfnwEcCawLBH7NvGx2QuZRqQL8Be8KWamCqVZ7Kk7kWIVGidh94uA/0vGopEicjzwFN51mLOAf6nqnYl+HhM7K91MJXIh3pFv0OajBk73nzBjaS6nJHCX64FJwBN23YTMY8MLmUr130Bn4DLsZedhXRtO2P+TYuAuoJMW6GNWuJnJjnQNiGQBFwATgOMdpwmcbbXZ3vhX1EFqfB3jcuAZYIIWqOu5v8YxK12zj/cupuHAbUBfx2kCpc1NhNc1Ii/GzTYDk4GHtEBXJSGWSUE2vGD2UVVUX0G1H970oumuIwXFpQuJZRmcT4FrgKOjMxKscM137EjXHJ7IKcAVeMMPyTiDnxLWNOTbtjfRAkEO8ZBy4BXgQS3QD3yMZlKMla6pHpE6eEMPlwBnkYHrtDX+JYu31eW4Az69CXgCbwjB3oBiqmSla2In0hS4EK+AT4FDHv2llXHnEfpHb/LxivZl4EXgfVvy3MTCStfER6QDMBYYDfR0Gyapvp57FM/2u4b3gQ+1QO2i8aZGrHRN4ojkAqdGb4OAXhDYdcaqsgeYAbwOvIEm9noIJnNZ6ZrkEWmEN/wwCK+I+xLMseASvLfkzqtwW4jqbqepTFqy0jX+EamLt6pFT7x3w+29tce/6YvFwELgY/YV7GLUxmWNP6x0jXvezIh2eNd9bVvhzxZAHbxVD2pX8ecOvIt1V3XbjP2jNw5Z6RpjjI/sHWnGGOMjK11jjPGRla4xxvjIStcYY3xkpWuMMT6y0jXGGB9Z6RpjjI+sdI0xxkdWusYY4yMrXWOM8ZGVrjHG+MhK1xhjfGSla4wxPrLSNcYYH1npGmOMj6x0jTHGR1a6xhjjo/8PKHuRbBhobKIAAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "import numpy as np\n", - "# define the pie drawing function of probability analysis\n", - "\n", - "prb = output.asnumpy()\n", - "\n", - "def plot_pie(prbs):\n", - " dict1 = {}\n", - " # remove the negative number and build the dictionary dict1. The key is the number and the value is the probability value\n", - " for i in range(10):\n", - " if prbs[i] > 0:\n", - " dict1[str(i)] = prbs[i]\n", - "\n", - " label_list = dict1.keys()\n", - " size = dict1.values()\n", - " colors = [\"red\", \"green\", \"pink\", \"blue\", \"purple\", \"orange\", \"gray\"]\n", - " color = colors[: len(size)]\n", - " plt.pie(size, colors=color, labels=label_list, labeldistance=1.1, autopct=\"%1.1f%%\", shadow=False, startangle=90, pctdistance=0.6)\n", - " plt.axis(\"equal\")\n", - " plt.legend()\n", - " plt.title(\"Image classification\")\n", - " plt.show()\n", - "\n", - "\n", - "print(\"The probability of corresponding numbers [0-9] in Figure 1:\\n\", list(map(lambda x:1/(1+np.exp(-x)), prb[0])))\n", - "plot_pie(prb[0])" - ] - }, - { - "cell_type": "markdown", - "id": "apart-appointment", - "metadata": {}, - "source": [ - "That's the whole experience of the handwritten number classification application." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/tutorials/training/source_en/quick_start/quick_video.md b/tutorials/training/source_en/quick_start/quick_video.md deleted file mode 100644 index 53790e17752ee728ff69f075d77a352829ff905a..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video.md +++ /dev/null @@ -1,711 +0,0 @@ -# Hands-on Installation and Experience - - - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - -Provides video tutorials from installation to try-on, helping you quickly use MindSpore. - -## MindSpore Installation - - - - - - -## MindSpore Experience - - - - - - -## Operator Development - - - - - - -## Training Process Visualization-MindInsight - - - - - - -## Model Security and Privacy-MindArmour - - - - - - -## Mobile&IoT-MindSpore Lite - - - - - - -## Join the MindSpore Community - - - - - diff --git a/tutorials/training/source_en/quick_start/quick_video/ascend310.md b/tutorials/training/source_en/quick_start/quick_video/ascend310.md deleted file mode 100644 index e5e7f05a80be7bba059102f2aa36b4a477ad3761..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/ascend310.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindSpore Installation on Ascend 310 - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**Install now**: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/ascend910.md b/tutorials/training/source_en/quick_start/quick_video/ascend910.md deleted file mode 100644 index e14235aecffc79a1e10c2dfef63fbc5239009fba..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/ascend910.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindSpore Installation on Ascend 910 - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**Install now**: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/community.md b/tutorials/training/source_en/quick_start/quick_video/community.md deleted file mode 100644 index d8c30e13e74b8c62965be300d5dd467ba9689e99..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/community.md +++ /dev/null @@ -1,9 +0,0 @@ -# Participate in community building - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**See more**: diff --git a/tutorials/training/source_en/quick_start/quick_video/cpu_operator_development.md b/tutorials/training/source_en/quick_start/quick_video/cpu_operator_development.md deleted file mode 100644 index c327b704c597055d540d6c7e0dfc0439ee57358a..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/cpu_operator_development.md +++ /dev/null @@ -1,7 +0,0 @@ -# CPU Operators Development - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - diff --git a/tutorials/training/source_en/quick_start/quick_video/cpu_ubuntu.md b/tutorials/training/source_en/quick_start/quick_video/cpu_ubuntu.md deleted file mode 100644 index 85d40d7fe17ba60b7903d136de9695070d06ca88..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/cpu_ubuntu.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindSpore Installation on CPU-Ubuntu - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**Install now**: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/cpu_windows.md b/tutorials/training/source_en/quick_start/quick_video/cpu_windows.md deleted file mode 100644 index ba84eb101c2b583b4999e6944434fde6fcf44017..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/cpu_windows.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindSpore Installation on CPU-Windows - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**Install now**: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/customized_debugging.md b/tutorials/training/source_en/quick_start/quick_video/customized_debugging.md deleted file mode 100644 index ea6234349c9dbad562cbdbd1ac1dbf42471751c6..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/customized_debugging.md +++ /dev/null @@ -1,9 +0,0 @@ -# Customized Debugging - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**View the full tutorial**: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/gpu.md b/tutorials/training/source_en/quick_start/quick_video/gpu.md deleted file mode 100644 index da137f5c37e4708792d20b2a714c9a69a90835a1..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/gpu.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindSpore Installation on GPU - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**Install now**: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/gpu_operator_development.md b/tutorials/training/source_en/quick_start/quick_video/gpu_operator_development.md deleted file mode 100644 index 3a55fdcaec1b942220998c90f21558585e6eb009..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/gpu_operator_development.md +++ /dev/null @@ -1,7 +0,0 @@ -# GPU Operators Development - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - diff --git a/tutorials/training/source_en/quick_start/quick_video/inference.md b/tutorials/training/source_en/quick_start/quick_video/inference.md deleted file mode 100644 index 00f98db57f6750aee79f9cd87c1ac60a2da48845..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/inference.md +++ /dev/null @@ -1,9 +0,0 @@ -# Inference on Different Platforms - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**See More**: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/loading_the_dataset_and_converting_data_format.md b/tutorials/training/source_en/quick_start/quick_video/loading_the_dataset_and_converting_data_format.md deleted file mode 100644 index cdd21d37f314ae52f230eeef6c16721190184ffa..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/loading_the_dataset_and_converting_data_format.md +++ /dev/null @@ -1,7 +0,0 @@ -# Loading the Dataset and Converting Data Format - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - diff --git a/tutorials/training/source_en/quick_start/quick_video/loading_the_model_from_hub.md b/tutorials/training/source_en/quick_start/quick_video/loading_the_model_from_hub.md deleted file mode 100644 index 547496a77773a3b9f23c3e54512943cac28b7355..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/loading_the_model_from_hub.md +++ /dev/null @@ -1,9 +0,0 @@ -# Loading the Model from Hub - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**See more**: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/mindArmour_differential_privacy.md b/tutorials/training/source_en/quick_start/quick_video/mindArmour_differential_privacy.md deleted file mode 100644 index 91f624501bbcc43ae88078a8d550e67830f6dc01..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/mindArmour_differential_privacy.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindArmour Differential Privacy - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**See more**: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/mindArmour_fuzzing.md b/tutorials/training/source_en/quick_start/quick_video/mindArmour_fuzzing.md deleted file mode 100644 index 2f55db126285719f06fe9ed343c473f1e6f344ff..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/mindArmour_fuzzing.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindArmour Model Security-AI Fuzzer - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**See more**: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/mindArmour_installation_and_adversarial_attack_and_defense.md b/tutorials/training/source_en/quick_start/quick_video/mindArmour_installation_and_adversarial_attack_and_defense.md deleted file mode 100644 index 66b58b93967a891331fe75c2fd02a15394031645..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/mindArmour_installation_and_adversarial_attack_and_defense.md +++ /dev/null @@ -1,11 +0,0 @@ -# MindArmour Installation and Adversarial attack and defense - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**Install now**: - -**See more**: diff --git a/tutorials/training/source_en/quick_start/quick_video/mindArmour_membership_inference.md b/tutorials/training/source_en/quick_start/quick_video/mindArmour_membership_inference.md deleted file mode 100644 index c2952375b418f9d8348286e577b118a5bfccd00b..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/mindArmour_membership_inference.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindArmour Model Security-Membership Inference - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**See more**: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/mindInsight_dashboard.md b/tutorials/training/source_en/quick_start/quick_video/mindInsight_dashboard.md deleted file mode 100644 index 8ab88476a3c868fa731ca793cdff7868ff7eaa96..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/mindInsight_dashboard.md +++ /dev/null @@ -1,11 +0,0 @@ -# MindInsight Training Dashboard - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**Install now**: - -**See more**: diff --git a/tutorials/training/source_en/quick_start/quick_video/mindInsight_debugger.md b/tutorials/training/source_en/quick_start/quick_video/mindInsight_debugger.md deleted file mode 100644 index fcf0a16464efbe315ee45279e46a1519689b2a4f..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/mindInsight_debugger.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindInsight Debugger - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**See more**: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/mindInsight_installation_and_common_commands.md b/tutorials/training/source_en/quick_start/quick_video/mindInsight_installation_and_common_commands.md deleted file mode 100644 index 73a821b7f01445a1b2e96833ef05e230c0c99df1..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/mindInsight_installation_and_common_commands.md +++ /dev/null @@ -1,11 +0,0 @@ -# MindInsight Installation and Common Commands - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**Install now**: - -**More commands**: diff --git a/tutorials/training/source_en/quick_start/quick_video/mindInsight_lineage_and_scalars_comparison.md b/tutorials/training/source_en/quick_start/quick_video/mindInsight_lineage_and_scalars_comparison.md deleted file mode 100644 index b15971ca8c97445bd64c8854b55ad1dadde55160..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/mindInsight_lineage_and_scalars_comparison.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindInsight Lineage and Comparison Dashboard - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**See more**: diff --git a/tutorials/training/source_en/quick_start/quick_video/mindInsight_model_explanation.md b/tutorials/training/source_en/quick_start/quick_video/mindInsight_model_explanation.md deleted file mode 100644 index 6481dbd2b231a0882ae20e7dcf02753e07241aaa..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/mindInsight_model_explanation.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindInsight Model Explanation - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**See more**: diff --git a/tutorials/training/source_en/quick_start/quick_video/mindInsight_performance_profiling.md b/tutorials/training/source_en/quick_start/quick_video/mindInsight_performance_profiling.md deleted file mode 100644 index 953ca8844386844edb24faefb57ce3d69d89e43a..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/mindInsight_performance_profiling.md +++ /dev/null @@ -1,13 +0,0 @@ -# MindInsight Performance Profiling - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**See more**: - - - - \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/mindspore_lite_converter.md b/tutorials/training/source_en/quick_start/quick_video/mindspore_lite_converter.md deleted file mode 100644 index f61e0801344ba13920edc9ca5f84bf17bcda3e2a..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/mindspore_lite_converter.md +++ /dev/null @@ -1,13 +0,0 @@ -# MindSpore Lite converter - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**See More**: - - - - \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/mindspore_lite_quick_start.md b/tutorials/training/source_en/quick_start/quick_video/mindspore_lite_quick_start.md deleted file mode 100644 index a6771817829f4c7938773e863e1977ffd2cf7d2d..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/mindspore_lite_quick_start.md +++ /dev/null @@ -1,11 +0,0 @@ -# MindSpore Lite Quick Start - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**See More**: - -**Use MindSpore Lite**: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/optimize_data_processing.md b/tutorials/training/source_en/quick_start/quick_video/optimize_data_processing.md deleted file mode 100644 index 1dff869c3106a2c3a1d33539d25c02331b0c8805..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/optimize_data_processing.md +++ /dev/null @@ -1,9 +0,0 @@ -# Optimize Data Processing - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -See More: \ No newline at end of file diff --git a/tutorials/training/source_en/quick_start/quick_video/quick_start_video.md b/tutorials/training/source_en/quick_start/quick_video/quick_start_video.md deleted file mode 100644 index 914e35c6a520ebb06589aaf09c87011537a2628c..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/quick_start_video.md +++ /dev/null @@ -1,11 +0,0 @@ -# Quick Start - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**View code**: - -**View the full tutorial**: diff --git a/tutorials/training/source_en/quick_start/quick_video/saving_and_loading_model_parameters.md b/tutorials/training/source_en/quick_start/quick_video/saving_and_loading_model_parameters.md deleted file mode 100644 index 78a12a04c0f41419e4afd4d8641be8c5004cb135..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/quick_start/quick_video/saving_and_loading_model_parameters.md +++ /dev/null @@ -1,9 +0,0 @@ -# Saving and Loading Model Parameters - -[comment]: <> (This document contains Hands-on Tutorial Series. Gitee does not support display. Please check tutorials on the official website) - - - -**View the full tutorial**: diff --git a/tutorials/training/source_en/use/data_preparation.rst b/tutorials/training/source_en/use/data_preparation.rst deleted file mode 100644 index 95b0318919fb0e220d756b4e71a4fdad917c15be..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/use/data_preparation.rst +++ /dev/null @@ -1,8 +0,0 @@ -Loading Dataset -================ - -.. toctree:: - :maxdepth: 1 - - load_dataset_image - load_dataset_text \ No newline at end of file diff --git a/tutorials/training/source_en/use/defining_the_network.md b/tutorials/training/source_en/use/defining_the_network.md deleted file mode 100644 index 2a17fff312064e7eb2327cf85366cb0d7181b388..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/use/defining_the_network.md +++ /dev/null @@ -1,27 +0,0 @@ -# Defining the Network - -Translator: [huqi](https://gitee.com/hu-qi) - -`Linux` `Ascend` `GPU` `CPU` `Model Development` `Beginner` `Intermediate` `Expert` - - - -A neural network model composed of multiple layers is an important part of the training process. You can build a network model based on the base class of `nn.Cell` in MindSpore by initializing the `__init__` method and constructing the `construct` method. There are several ways to define the network model: - -- Use the official network model directly. - - It is recommended to consult the current [Network Support List](https://www.mindspore.cn/doc/note/en/master/network_list_ms.html) provided by MindSpore to directly use the corresponding network model. In the network support list, the platforms supported by each network are provided. Click the corresponding network name to view the definition of the network. Users can customize the network initialization parameters according to their needs. - -- Build your own network. - - - If the built-in operators in the network are not enough to meet your needs, you can use MindSpore to customize the operators quickly and easily and add them to the network. - - Go to [Custom Operators](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/custom_operator.html) for detailed help information. - - - MindSpore provides scripts for migrating third-party training frameworks, and supports the migration of existing TensorFlow, PyTorch, etc. networks to MindSpore to help you quickly migrate the network. - - Go to [Migrating Training Scripts from Third Party Frameworks](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/migrate_script.html) for detailed help information. - - - MindSpore supports probabilistic programming using the logic of developing deep learning models, and also provides a toolbox for deep probabilistic learning to build Bayesian neural networks. - - Go to [Deep Probability Programming](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/apply_deep_probability_programming.html) for detailed help information. diff --git a/tutorials/training/source_en/use/images/map.eddx b/tutorials/training/source_en/use/images/map.eddx deleted file mode 100644 index a54b2c9bddf146988f1eb2b6401b6c22f2e8f4b5..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/use/images/map.eddx and /dev/null differ diff --git a/tutorials/training/source_en/use/images/mnist_5.png b/tutorials/training/source_en/use/images/mnist_5.png deleted file mode 100644 index f6ab8189e759f47b890e96b01cca8573774dada3..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/use/images/mnist_5.png and /dev/null differ diff --git a/tutorials/training/source_en/use/images/mnist_5_resize_crop.png b/tutorials/training/source_en/use/images/mnist_5_resize_crop.png deleted file mode 100644 index 084404666feaa0ef1c22384f7525003c3981577c..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_en/use/images/mnist_5_resize_crop.png and /dev/null differ diff --git a/tutorials/training/source_en/use/load_dataset_image.ipynb b/tutorials/training/source_en/use/load_dataset_image.ipynb deleted file mode 100644 index ba8479a38e57a03239a7d318846fed06665ea346..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/use/load_dataset_image.ipynb +++ /dev/null @@ -1,419 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "secondary-vatican", - "metadata": {}, - "source": [ - "# Loading Image Dataset\n", - "\n", - "`Linux` `Ascend` `GPU` `CPU` `Data Preparation` `Beginner` `Intermediate` `Expert`\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_en/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_en/use/load_dataset_image.ipynb)" - ] - }, - { - "cell_type": "markdown", - "id": "demographic-concentrate", - "metadata": {}, - "source": [ - "## Overview\n", - "\n", - "In computer vision training tasks, it is often difficult to read the entire dataset directly into memory due to memory capacity. The `mindspore.dataset` module provided by MindSpore enables users to customize their data fetching strategy from disk. At the same time, data processing and data augmentation operators are applied to the data. Pipelined data processing produces a continuous flow of data to the training network, improving overall performance.\n", - "\n", - "In addition, MindSpore supports data loading in distributed scenarios. Users can define the number of shards while loading. For more details, see [Loading the Dataset in Data Parallel Mode](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html#loading-the-dataset-in-data-parallel-mode).\n", - "\n", - "This tutorial uses the MNIST dataset [1] as an example to demonstrate how to load and process image data using MindSpore.\n", - "\n", - "## Preparations\n", - "\n", - "### Importing Module\n", - "\n", - "This module provides APIs to load and process data sets." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "bigger-airfare", - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds" - ] - }, - { - "cell_type": "markdown", - "id": "proud-culture", - "metadata": {}, - "source": [ - "### Downloading Dataset\n", - "\n", - "Run the following command to download the training images and labels of the MNIST dataset and unzip them, put them in the path `./datasets/MNIST_Data`, the directory structure is as follows:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "several-coordinator", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "id": "controlling-digest", - "metadata": {}, - "source": [ - "## Loading Dataset\n", - "\n", - "MindSpore supports loading common datasets in the field of image processing that come in a variety of on-disk formats. Users can also implement custom dataset class to load customized data. For the detailed loading method of various datasets, please refer to the [Loading Dataset](https://www.mindspore.cn/doc/programming_guide/en/master/dataset_loading.html) in the programming guide.\n", - "\n", - "The following tutorial shows how to load the MNIST dataset using the `MnistDataset` in the `mindspore.dataset` module.\n", - "\n", - "1. Configure the dataset directory and create the `MnistDataset`." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "burning-broadcast", - "metadata": {}, - "outputs": [], - "source": [ - " DATA_DIR = './datasets/MNIST_Data/train'\n", - " mnist_dataset = ds.MnistDataset(DATA_DIR, num_samples=6, shuffle=False)" - ] - }, - { - "cell_type": "markdown", - "id": "beneficial-dispatch", - "metadata": {}, - "source": [ - "2. Create an iterator then obtain data through the iterator." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "royal-catalog", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAENCAYAAADJzhMWAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAMaklEQVR4nO3dX6ik9X3H8fenJmnBeLFGul2MZtNUQiGlWkQKlWIpCdZeqDc2QsE0pZuLWhLIRcReRAiFUGzaQqF0Q2w2tjUEjFHE1lix2eQmuIrVVTFauxKX1Y0sbbQ3afTbi/OsPbuec+bs/Htmz/f9gmFmnjP7zHef3c/+/s3sL1WFpJ3vZ8YuQNJyGHapCcMuNWHYpSYMu9SEYZeaMOxSE4Zdp0hyJEltcntl7Po0vXeNXYBW0n8Df7XB8TeWXIfmKH6CTuslOQJQVXvHrUTzZjdeasJuvDbys0l+H7gY+B/gSeBgVb05blmahd14nWLoxn9ggx/9J/AHVfWd5VakebEbr9P9PfDbwC8A5wK/AvwdsBf45yS/Ol5pmoUtu7Ylye3AZ4FvVdX1Y9ejM2fYtS1Jfgl4HjhRVe8bux6dObvx2q4fDffnjlqFpmbYtV2/Pty/OGoVmpph19uS/HKSd7TcSfYCfzM8/YelFqW5cZ1d6/0e8NkkB4GXgNeBDwG/C/wc8ABw+3jlaRaGXes9AnwYuAz4DdbG5/8FfA+4E7iznNE9azkbLzXhmF1qwrBLTRh2qQnDLjWx1Nn4JM4GSgtWVdno+Ewte5KrkzyX5IUkt8xyLkmLNfXSW5JzgB8AHwVeBh4FbqyqZ7b4Nbbs0oItomW/Anihql6sqp8AXweuneF8khZolrBfCPxw3fOXh2OnSLIvyaEkh2Z4L0kzWvgEXVXtB/aD3XhpTLO07EeBi9Y9f/9wTNIKmiXsjwKXJPlgkvcAHwfum09ZkuZt6m58Vf00yc3Ag8A5wB1V9fTcKpM0V0v91ptjdmnxFvKhGklnD8MuNWHYpSYMu9SEYZeaMOxSE4ZdasKwS00YdqkJwy41YdilJgy71IRhl5ow7FIThl1qwrBLTRh2qQnDLjVh2KUmDLvUhGGXmjDsUhOGXWrCsEtNGHapCcMuNWHYpSYMu9SEYZeamHrLZp0dlrlLr/5fsuFGqqOaKexJjgCvA28CP62qy+dRlKT5m0fL/ltV9docziNpgRyzS03MGvYCvp3ksST7NnpBkn1JDiU5NON7SZpBZpnASXJhVR1N8vPAQ8CfVNXBLV7vbNGSOUE3jjEn6KpqwzefqWWvqqPD/XHgHuCKWc4naXGmDnuSc5Ocd/Ix8DHg8LwKkzRfs8zG7wbuGbor7wL+qar+ZS5V7TB2pbUKZhqzn/GbNR2zG/Z+dtyYXdLZw7BLTRh2qQnDLjVh2KUm/IrrHDjb3s8qfoV1Elt2qQnDLjVh2KUmDLvUhGGXmjDsUhOGXWrCdXatrLNxLXuV2bJLTRh2qQnDLjVh2KUmDLvUhGGXmjDsUhOus8/BpPXgnfx9986/97ONLbvUhGGXmjDsUhOGXWrCsEtNGHapCcMuNeE6+xKczWvRs36n3O+kr46JLXuSO5IcT3J43bHzkzyU5Pnhftdiy5Q0q+10478KXH3asVuAh6vqEuDh4bmkFTYx7FV1EDhx2uFrgQPD4wPAdfMtS9K8TTtm311Vx4bHrwC7N3thkn3AvinfR9KczDxBV1WVZNMZpqraD+wH2Op1khZr2qW3V5PsARjuj8+vJEmLMG3Y7wNuGh7fBNw7n3IkLUomrfEmuQu4CrgAeBX4PPAt4BvAxcBLwA1Vdfok3kbnshu/AGOu07uOvnqqasM/lIlhnyfDvhiGXettFnY/Lis1YdilJgy71IRhl5ow7FIThl1qwrBLTRh2qQnDLjVh2KUmDLvUhGGXmjDsUhP+V9I7wFbfPFv0N+IWeX6/UTdftuxSE4ZdasKwS00YdqkJwy41YdilJgy71ITr7Dvc2bxd9Db+m/MlVbIz2LJLTRh2qQnDLjVh2KUmDLvUhGGXmjDsUhOuszfnOnwfE1v2JHckOZ7k8LpjtyU5muSJ4XbNYsuUNKvtdOO/Cly9wfG/rKpLh9sD8y1L0rxNDHtVHQROLKEWSQs0ywTdzUmeHLr5uzZ7UZJ9SQ4lOTTDe0maUbYzAZNkL3B/VX1keL4beA0o4AvAnqr65DbOs7qzPdrQKk/QTdJ1gq6qNvyNT9WyV9WrVfVmVb0FfBm4YpbiJC3eVGFPsmfd0+uBw5u9VtJqmLjOnuQu4CrggiQvA58HrkpyKWvd+CPApxZXosZ0Nq/D61TbGrPP7c0cs+84qxx2x+yn8uOyUhOGXWrCsEtNGHapCcMuNeFXXDWTWWa8x9xOuuNMvS271IRhl5ow7FIThl1qwrBLTRh2qQnDLjXhOru2tMrfatOZsWWXmjDsUhOGXWrCsEtNGHapCcMuNWHYpSZcZ9/hOq+Td/zO+lZs2aUmDLvUhGGXmjDsUhOGXWrCsEtNGHapiYlhT3JRkkeSPJPk6SSfHo6fn+ShJM8P97sWX25PVTX1bSdLsuVNp5q4ZXOSPcCeqno8yXnAY8B1wCeAE1X1xSS3ALuq6nMTzrWz//YtyE4P7bQM9Mam3rK5qo5V1ePD49eBZ4ELgWuBA8PLDrD2D4CkFXVGY/Yke4HLgO8Du6vq2PCjV4Dd8y1N0jxt+7PxSd4L3A18pqp+vL4LVVW1WRc9yT5g36yFSprNxDE7QJJ3A/cDD1bVl4ZjzwFXVdWxYVz/b1X14QnncfA5BcfsG3PMvrGpx+xZu6JfAZ49GfTBfcBNw+ObgHtnLVLS4mxnNv5K4LvAU8Bbw+FbWRu3fwO4GHgJuKGqTkw4V8smypZ5Orbc09msZd9WN35eDLvOhGGfztTdeEk7g2GXmjDsUhOGXWrCsEtNGHapCf8r6W1y+Ww6Lp+tDlt2qQnDLjVh2KUmDLvUhGGXmjDsUhOGXWqizTq76+TTcZ1857Bll5ow7FIThl1qwrBLTRh2qQnDLjVh2KUm2qyzd+U6uU6yZZeaMOxSE4ZdasKwS00YdqkJwy41YdilJiausye5CPgasBsoYH9V/XWS24A/An40vPTWqnpgUYXOyvVmdTdxf/Yke4A9VfV4kvOAx4DrgBuAN6rq9m2/WdP92aVl2mx/9okte1UdA44Nj19P8ixw4XzLk7RoZzRmT7IXuAz4/nDo5iRPJrkjya5Nfs2+JIeSHJqtVEmzmNiNf/uFyXuB7wB/VlXfTLIbeI21cfwXWOvqf3LCOezGSwu2WTd+W2FP8m7gfuDBqvrSBj/fC9xfVR+ZcB7DLi3YZmGf2I3P2jT2V4Bn1wd9mLg76Xrg8KxFSlqc7czGXwl8F3gKeGs4fCtwI3Apa934I8Cnhsm8rc5lyy4t2Ezd+Hkx7NLiTd2Nl7QzGHapCcMuNWHYpSYMu9SEYZeaMOxSE4ZdasKwS00YdqkJwy41YdilJgy71IRhl5pY9pbNrwEvrXt+wXBsFa1qbataF1jbtOZZ2wc2+8FSv8/+jjdPDlXV5aMVsIVVrW1V6wJrm9ayarMbLzVh2KUmxg77/pHffyurWtuq1gXWNq2l1DbqmF3S8ozdsktaEsMuNTFK2JNcneS5JC8kuWWMGjaT5EiSp5I8Mfb+dMMeeseTHF537PwkDyV5frjfcI+9kWq7LcnR4do9keSakWq7KMkjSZ5J8nSSTw/HR712W9S1lOu29DF7knOAHwAfBV4GHgVurKpnllrIJpIcAS6vqtE/gJHkN4E3gK+d3ForyZ8DJ6rqi8M/lLuq6nMrUtttnOE23guqbbNtxj/BiNduntufT2OMlv0K4IWqerGqfgJ8Hbh2hDpWXlUdBE6cdvha4MDw+ABrf1mWbpPaVkJVHauqx4fHrwMntxkf9dptUddSjBH2C4Efrnv+Mqu133sB307yWJJ9Yxezgd3rttl6Bdg9ZjEbmLiN9zKdts34yly7abY/n5UTdO90ZVX9GvA7wB8P3dWVVGtjsFVaO/1b4EOs7QF4DPiLMYsZthm/G/hMVf14/c/GvHYb1LWU6zZG2I8CF617/v7h2EqoqqPD/XHgHtaGHavk1ZM76A73x0eu521V9WpVvVlVbwFfZsRrN2wzfjfwj1X1zeHw6Nduo7qWdd3GCPujwCVJPpjkPcDHgftGqOMdkpw7TJyQ5FzgY6zeVtT3ATcNj28C7h2xllOsyjbem20zzsjXbvTtz6tq6TfgGtZm5P8D+NMxatikrl8E/n24PT12bcBdrHXr/pe1uY0/BN4HPAw8D/wrcP4K1XYna1t7P8lasPaMVNuVrHXRnwSeGG7XjH3ttqhrKdfNj8tKTThBJzVh2KUmDLvUhGGXmjDsUhOGXWrCsEtN/B/M3kbdmYwBvQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - " import matplotlib.pyplot as plt\n", - "\n", - " mnist_it = mnist_dataset.create_dict_iterator()\n", - " data = next(mnist_it)\n", - " plt.imshow(data['image'].asnumpy().squeeze(), cmap=plt.cm.gray)\n", - " plt.title(data['label'].asnumpy(), fontsize=20)\n", - " plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "metropolitan-vacuum", - "metadata": {}, - "source": [ - "In addition, users can pass in a `sampler` parameter to specify the sampling process during dataset loading. For the data samplers supported by MindSpore and their detailed usage methods, please refer to the programming guide [sampler](https://www.mindspore.cn/doc/programming_guide/en/master/sampler.html).\n", - "\n", - "## Processing Data\n", - "\n", - "For the data processing operators currently supported by MindSpore and their detailed usage methods, please refer to the [Processing Data](https://www.mindspore.cn/doc/programming_guide/en/master/pipeline.html) in the programming guide.\n", - "\n", - "The following tutorial demonstrates how to construct a pipeline and perform operations such as `shuffle`, `batch` and `repeat` on the MNIST dataset." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "japanese-equity", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "5\n", - "0\n", - "4\n", - "1\n", - "9\n", - "2\n" - ] - } - ], - "source": [ - "for data in mnist_dataset.create_dict_iterator():\n", - " print(data['label'])" - ] - }, - { - "cell_type": "markdown", - "id": "mexican-sport", - "metadata": {}, - "source": [ - "1. Shuffle the dataset." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "documented-romance", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "after shuffle: \n", - "4\n", - "2\n", - "1\n", - "0\n", - "5\n", - "9\n" - ] - } - ], - "source": [ - " ds.config.set_seed(58)\n", - " ds1 = mnist_dataset.shuffle(buffer_size=6)\n", - "\n", - " print('after shuffle: ')\n", - " for data in ds1.create_dict_iterator():\n", - " print(data['label'])" - ] - }, - { - "cell_type": "markdown", - "id": "inner-summit", - "metadata": {}, - "source": [ - "2. Add `batch` after `shuffle`." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "terminal-danish", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "after batch: \n", - "[4 2]\n", - "[1 0]\n", - "[5 9]\n" - ] - } - ], - "source": [ - " ds2 = ds1.batch(batch_size=2)\n", - "\n", - " print('after batch: ')\n", - " for data in ds2.create_dict_iterator():\n", - " print(data['label'])" - ] - }, - { - "cell_type": "markdown", - "id": "adapted-metallic", - "metadata": {}, - "source": [ - "3. Add `repeat` after `batch`." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "freelance-witness", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "after repeat: \n", - "[4 2]\n", - "[1 0]\n", - "[5 9]\n", - "[2 4]\n", - "[0 9]\n", - "[1 5]\n" - ] - } - ], - "source": [ - " ds3 = ds2.repeat(count=2)\n", - "\n", - " print('after repeat: ')\n", - " for data in ds3.create_dict_iterator():\n", - " print(data['label'])" - ] - }, - { - "cell_type": "markdown", - "id": "republican-silence", - "metadata": {}, - "source": [ - "The results show the dataset is repeated, and the order of the replica is different from that of the first copy.\n", - "\n", - "> Having `repeat` in the pipeline results in the execution of repeated operations defined in the entire pipeline, instead of simply copying the current dataset. So the order of the replica is different from that of the first copy after `shuffle`." - ] - }, - { - "cell_type": "markdown", - "id": "expensive-stand", - "metadata": {}, - "source": [ - "## Augmentation\n", - "\n", - "For the data augmentation operators supported by MindSpore and their detailed usage methods, please refer to the programming guide [Data Augmentation](https://www.mindspore.cn/doc/programming_guide/en/master/augmentation.html).\n", - "\n", - "The following tutorial demonstrates how to use the `c_transforms` module to augment data in the MNIST dataset.\n", - "\n", - "1. Import related modules and load the dataset." - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "fifteen-secretariat", - "metadata": {}, - "outputs": [], - "source": [ - " from mindspore.dataset.vision import Inter\n", - " import mindspore.dataset.vision.c_transforms as transforms\n", - "\n", - " mnist_dataset = ds.MnistDataset(DATA_DIR, num_samples=6, shuffle=False)" - ] - }, - { - "cell_type": "markdown", - "id": "accompanied-biology", - "metadata": {}, - "source": [ - "2. Define augmentation operators and perform the `Resize` and `RandomCrop` operations on images in the dataset." - ] - }, - { - "cell_type": "code", - "execution_count": 10 , - "id": "defensive-monday", - "metadata": {}, - "outputs": [], - "source": [ - " resize_op = transforms.Resize(size=(200,200), interpolation=Inter.LINEAR)\n", - " crop_op = transforms.RandomCrop(150)\n", - " transforms_list = [resize_op, crop_op]\n", - " ds4 = mnist_dataset.map(operations=transforms_list,input_columns='image')" - ] - }, - { - "cell_type": "markdown", - "id": "hollow-grain", - "metadata": {}, - "source": [ - "3. Visualize the result of augmentation." - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "stable-leonard", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQEAAAENCAYAAAAPLtCGAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAy1UlEQVR4nO2da4yk6VXf/6fu9da1q7tndr027BoMikNEbK2MJQhBOEHGGBYUyzEhiW0sWZFsLsERrOEDfADJDg7giAiyiU0WYlg7XMJKgYBxDE6keMN6MfiOl2WNd5ndme7put+rnnyo+j9z6p3u6eq6V9f5SaV+q7qn6+l36jnPuR9xzsEwjN0lsu4FGIaxXkwIGMaOY0LAMHYcEwKGseOYEDCMHceEgGHsOCYEDGPHMSFgTIWIPC0i7ozHc+tenzE7sXUvwNgqKgB+4ZTX6yteh7FAxDIGjWkQkacBwDl373pXYiwaMwcMY8cxc8C4CEkR+ecAvgJAA8BfAPiYc26w3mUZ82DmgDEVY3PgK0/51l8DeLNz7k9WuyJjUZg5YEzLrwB4FYC7AGQA/D0A/xHAvQB+X0S+fn1LM+bBNAFjLkTkPQDeAeC/O+e+Z93rMS6OCQFjLkTkqwF8EcBN59z+utdjXBwzB4x5uTH+mlnrKoyZMSFgzMsrx1+fWusqjJkxIWCci4j8HRG57aQXkXsB/OL46X9d6aKMhWF5AsY0/FMA7xCRjwH4EoAagK8C8B0AUgB+D8B71rc8Yx5MCBjT8FEAXwvgZQC+ESP7vwzg/wD4NQC/5szDvLVYdMAwdhzzCRjGjmNCwDB2nKUJARF5tYh8QUSeFJEHl/U+hmHMx1J8AiISBfCXAP4xgGcA/CmA73XOfXbhb2YYxlwsKzrwCgBPOueeAgAReQTAAwBOFQIiYt5Jw1g+R865w/CLyzIH7gHwZfX8mfFrHhF5q4g8LiKPL2kNhmFM8qXTXlxbnoBz7iEADwGmCRjGOlmWJvAsgBep5y8cv2YYxoaxLCHwpwBeIiL3iUgCwBsAPLqk9zIMYw6WYg445/oi8nYAfwAgCuD9zrnPLOO9DMOYj41IGzafgGGshE845+4Pv2gZg4ax45gQMIwdx4SAYew4JgQMY8cxIWAYO44JAcPYcUwIGMaOY0LAMHYcEwKGseOYEDCMHceEgGHsOCYEDGPHMSFgGDuOCQHD2HFMCBjGjmNCwDB2HBMChrHjmBAwjB3HhIBh7DgmBAxjx5lZCIjIi0TkoyLyWRH5jIj80Pj1koh8WES+OP66t7jlGoaxaObRBPoA3uGceymAVwJ4m4i8FMCDAD7inHsJgI+MnxuGsaHMLAScc9ecc0+Mr2sAPofRvMEHADw8/rGHAXz3nGs0DGOJLGT4iIjcC+BlAB4DcNU5d238recAXD3j37wVwFsX8f6GYczO3I5BEckC+C0AP+ycq+rvudFkk1MHizjnHnLO3X/aMATDMFbHXEJAROIYCYAPOOd+e/zy8yJy9/j7dwO4Pt8SDcNYJvNEBwTA+wB8zjn3c+pbjwJ44/j6jQB+d/blGYaxbGaeRSgi3wTgfwP4FIDh+OUfx8gv8CEAXwHgSwBe75y7ec7vslmEhrF8Tp1FaANJDWN3sIGkhmHczkJChMbuMXIJ3X5tbC7D4fDU100IGFPBjS4iiEQiE1/1IxIx5XJTqdVqp75uQsCYmng8jlgshmg0ett1JBJBLBZDLGYfqU3l85///Kmv2/+YMRUiglgshng8jng8jmQy6QVBKpVCLBZDIpFAPB5f91KNC2JCwDgXqvnc6MlkEplMBslkEolEAplMBqlUyn/P2C5MCBjnQiGQSCSQSqUQBAFyuRyCIEA6nUY+n/eCIJPJrHu5xgUxIWBMhYh4+z8ej/sNn8lkUCgUkMvlkEqlkMvl1r1U44KYEDCmQvsEUqkUstkscrkcMpkMSqUScrkcstks8vn8updqXBATAsa50BzQGkChUECxWEShUMCVK1e8INjbs0ZS24YJAWMqwppAEAReGygWiyiVSigUCiiVSuteqnFBTAgY58JEIB0dCIIAmUwGuVwOhUIBe3t7KBaLODw8XPdyjQtiQmBD4EY773odMBdAOwPz+TyKxSL29vZweHiIK1euYG9vD1euXFnLGo3ZMSGwAUSjUUSjUUQikYlrZuTxdWA9giAajSKVSuHg4MCf+HQKptNpnyQUjUatjmALMSGwAUSjUZ9ym0gk/HUsFkMymfShuXVqAslkEsViEfl8fiJHIJVKeQFgdQPbiQmBDSAajSKRSCCRSExsLJ2Jl0qlJkyDVRKJRJBMJnF4eIh8Pu99ANQGmDZMbcXYLkwIrBntcKPNHQSBj8VzkwVBsLZNRk2kVCr5iADzAoIg8OaAaQLbiQmBDUDn5XPDp9PpiY2WzWYRiUTWstGYI1AsFv1aKKhYSGT+gO3FhMAGoAVANptFoVBAJpPx3vcgCFAoFNZWpkvHZC6XQzqdRjqdvk0QmDmwvZgQWDPMydfmAAXB/v4+Dg4OkM/nsbe3tzYhwDVS9U8mkz4qkEwmvRAwc2A7mftTJSJRAI8DeNY591oRuQ/AIwD2AXwCwL9wznXnfZ/LjG7IwTJdnYjDTLx1RgjoHNQJQywmsujAdrOIo+WHMJpDyMqRdwP4eefcIyLyywDeAuCXFvA+lxZurHQ6fVsWHmPzh4eHSCQSaxMC1Aao9uuHzmcwto+5hICIvBDAdwD4GQA/Mh5I8q0A/tn4Rx4G8FMwIeAJb2JqAclk0pfiagFw9epV7O/v4+rVq0gmk2t1vp3WXHTd2YzG/MyrCfwCgB8FwCLyfQBl51x//PwZjCYV38YuDCSlN59VeKddx2Ixn4BDx5tOxGH3HqrdhrFoZhYCIvJaANedc58QkW+56L93zj0E4KHx77p0w0e4wbX6nEgkJlRomgH7+/solUrY29vzkQEKAYbe7KQ1lsU8msA3AvguEXkNgBRGPoH3AiiKSGysDbwQwLPzL3P7oJrP7D961E+7ZvFNPp9HqVTymoHOHjSMZTGzJ8c5907n3Audc/cCeAOA/+Wc+z4AHwXwuvGP7fRAUmbacdOz8w5DfgcHBzg8PESpVPINOpgglE6nJ+LvpgkYy2IZgecfA/CIiPw0gD/DaHLxzqHj/9rzT5tfF+EwDz+bzU4k4bCYiP4Dw1gGCxECzrk/BvDH4+unALxiEb93mxERrwVQA2D2H9V+1gbk83kvECgImD5Mp6AJAWNZWMbgEtCdeBj64+nPEODBwYF/ziIhduxhAw8tAEwIGMvChMCS0M05wynB7MpTKBSQz+e9/c/+fbxmL4FYLGZCwFgaJgSWBM0BrQWwIGh/fx933323dwgydEihwTRcnY1nQsBYFiYEloTuE6AjA+zMe+XKFZ8SHE4kCk/5NQFgLBMTAkuCG1if6Cy8SaVSPkoQbhZiG34zcc75R/g5X9tWTAisgHDn4NNOfmMzcc5hMBhgMBig1+thOBxiOByi3++j3+/76+FwuLXCwITAirCNvp1wk/d6PXQ6HXS7XfR6PbTbbfR6PfT7fbTbbfT7fRMChnEZcc55QdBut9HtdtHtdtFoNLxQaLVa6Ha7GA6H617uTJgQWBHbekrsOsPh0JsC7XbbP6rVKprNJjqdDur1OtrttgkB486YObCdUAvodrtoNptoNBpoNBool8uo1Wr+ut1uYzAYrHu5M2FCwDDuAM0B+gTa7TaazSZqtRqq1SpqtRpu3ryJVquFfr9//i/cQEwIGMYdYHSAQoCaQLVaRblcRqVSwdHREer1ugkBw7iMDIdDdLtdLwDq9TrK5TKOj49xdHSEcrmMa9euoVarodPprHu5M2FCwDDuAJOBqAm0Wq0JTeDk5ARHR0eoVqvodrezqbYJgSVBW5LXfK6TTeh51pzWzNO4RTjKcqeoyyLuJf/fBoMBut0u2u22FwT1eh3VahWVSgXlctmEgHE7VCW1M4kFQkEQwDmHbrfrG4foNt6sPQiPJt9FdNYeNySv9XPgVjMXfT95L/naRbI0wwJcZw/2ej2fN8CcgW3EhMCScM6h3+97j3Kj0UCtVvN1BOl02gsJthFjfUE8Hr/tepfTi7kJu92uT9ftdDr+mhtyOBz6bk6xWMyXcet7C8D6M4QwIbAkwplm1Ab44Uyn097W1B2EdG8B4FYhknNuZz+41AQYr9dZer1eD61WC51OB8Ph0Pdh0MVafI01G5a4NYkJgSXBDd5utxGLxVCr1ZBIJPypNRgMUKvVJhqLsvcAOw0Nh0P/weUptouEhWmr1UK9Xker1UK73fbXg8EAiUTC3z+WcKdSKQwGA38v1zXTcVOxu7EEaEfyg6vnDLDoZDgcej9BoVDwgoCFKPzQUp3d5dNLm1baO99oNNBsNlGpVFCv1zEYDPwUJ45Q570E4DWsXb6XpzHvGLIigP8M4OsAOADfD+ALAD4I4F4ATwN4vXPuZJ732Ua0CtvpdLxtTwcTAO8voMOp3+9PdBLiCbbNZaqL4E5ZexQClUoFg8HAm1lM3InH475j07aX/C6LeTWB9wL4n86514lIAkAA4McBfMQ59y4ReRDAgxi1Id8pKAQ6nY7/0A2HQ6/Otttt33uw0+n4D7d2WrHxaDKZXOefsnb0vaSTlRu/Vqvhxo0bKJfL6PV6yGQy6Ha7yOVy3kcAjIa+0rG4rYU+y2KeMWQFAN8M4E0AMB4/3hWRBwB8y/jHHsaoFfnOCQGeXCLiw0oMB8bjcVSrVR8u1BVqOqyVTqe9/2CXTy+dtddsNn3W3snJCcrlMp577jkcHR2h1+shl8t5M8w5530p9MfMcy8v6//BPJrAfQBuAPgVEfl6AJ/AaEz5VefctfHPPAfg6mn/eBcGkupuNFT3GaummkovNjc/7dlEIuG939vcsGIRUIUPmwNM1qFA4EnPEe6pVArNZtPfS9MCTmceIRAD8HIAP+Cce0xE3ouR6u9xzrmzho1e9oGktEuHw6FX73UTUT2VWLcUz2Qy3gRgxxpugl0OE4Ydg7VazWfqHR8f4/r1615A0ARIJpMoFAr+XlITuGjJLwXwZb338wiBZwA845x7bPz8NzESAs+LyN3OuWsicjeA6/MuclvRnmmN/jBFo1HU63V/+lerVaTTaUQiEd+M1DmHSCSCIAh8mCvcp5AmxDZlFlK4UUWnxhROsa7X67h586ZX/+kPqFQqqFarqFarqNfr6Ha7iEajPjMzCAKUy2XvGMxmsxAROOfQbrenWiM1jXK57CMS7CrEhKVt1y5mFgLOuedE5Msi8rXOuS8AeBWAz44fbwTwLuz4QNKz0Kq9zkvnSUcVtlqtIpPJ+BkGnU7HRw/0Vz0CfZsyCykA2JqL5lO/3/fmU7/f9z4A1u/X6/WJ9l7ciPp3aLOBSUOVSsULyVarde59cs6hXq97YcP3ZXKSDvdus7k2b3TgBwB8YBwZeArAmzGadPwhEXkLgC8BeP2c73Hp0aFEer+j0aifRsTTMZPJeGciM+E4/pzmAn0L24Bu4kn/h97YFIzaEcjN2Gq1/GbUG1GHEmk6hFO1e70eUqnUVGuk6XHz5k0vhBqNxkSj0W0WAMCcQsA590kA95/yrVfN83t3iXAiDOsL+OHq9/uo1Wo+u5BzCpkVx8zC4XDo02W3xXfAU1tv6Gaz6Xv3cRMzOejo6Mh39KlUKv5U1q3AGUpkVmEymfSahT7Zpw27sqVYtVrFjRs3/P9FrVbzqcvbLggsY3ADCHeuicfjPmVYRHy+fLfb9anFuhxZV81t04dRmwPMnaC6z3RgJgRRG6jX6xMpw51OZyKhSt9LChHgVnUhqzoZQThvfeFIBE0Cdh6mANqm+x7GhMAGoNVifvjpDATgP+z9fh+ZTAa9Xm+iECaRSCCRSGxVPkG4KCjs+W82m94px9d53Ww2vRDQKrku2grfSzrvmI/BCMJ56OIvrqvZbHohsNOOQWNx0BwARieWzixsNBpIJpNIp9Oo1+t+tDlt216vN1FyzFNpW8wBagE89U9OTnB8fOxP/qOjo4l7Qb8JtQWexhSANAf0e9DP0mw2vZ9lWr8J7zHfU78/tTPTBIy50CosMBICTHhhm2smFDETbjAYIB6P+/Bgu91GOp3eKk0AmEwH5gnP2H+1WsXNmzfx/PPP+83ebDa95qAFgNYE9L0EbgkanugUmNSyzkNrK3r4CB2YJgSMhcAPLj9I2mOuG2JEIhEMBgNEo1EfKUgkEt5U2DYhcF7bLvbv42bjhuem5N+sfQI0B8JCge+hw6nToEOVp72/mQPGQtCDLbvd7kQSkL6m6sySWTq7isWij1tv06mkBYBOAT46OsLNmzdx7do1PPvss7c5/3QikX7O38kszW63e+p9vGh7sfPed9snE5sQ2ADCH2Q9xVh/DYIAsVgMyWQStVoNyWTSX6fTacTjcdRqNfT7/alOujulM4ev1800m0xvRhZunXUvZ33/s75uMyYENgT9YTrrg6XtUXqs0+m0V6FZncjowXno+gWekOEmncvMQtQpz0yCoonDLD+mUFP1D3drDjtCL+MmXTYmBLYIfviZVMNOOTdv3oSI+NLlab3fegPSYaabc7I3H4ClZCHyvZn70Ov1UCgUfA0AYSiQBVW60Sj/ZkYIjItjQmCL0Ikw9XrdbxR2zWk2m+j1elMLAZ70QRAglUohkUhMXKfTaa8lLAP2+2MGJPsGMPuRTjx69uv1+sTfT82IDsBt8odsEiYEtgidCMOwGmsMaAOzR8E0G5enPWPh4Xz6eDyOwWCwtMacFALUBk5LhnLO+eSgaDTqPfx6sAsrKI3ZMCGwRYTNAW6UaDTqc/D7/b7PITgPqv8si2XZsv4+i24WDR2OdHTyfajSa/OE+fu8bjabE6p/u93equrJTcOEwBYRNgeYPNTr9VCr1ZBKpXwB0jQwutBut5HP55HNZic2fCKR8EJlGYRnMFD9TyaT6HQ6yOfzKBQKPpU4mUz6zELWVrBr87JMll3AhMAWEa6X52scZ0bbWXcquhPa7gdGqjUrE2Ox2NJr5ZnnkEgk/KYGMFFGTS2B7drZFKTRaExMbzJNYHZMCGwRNAeoSjPZRmfDcdLONDAEp4VGEARIp9MTjTmXlRFHnwAr+lj/wBTpbreLbDbrnaCc9TccDlGtVr0ZYwJgPkwIbBG6SIbOMx3T19lw02yKZDLpB3TQ4cjTNxaLIZfLLTUVmQ69SCTim6cwSqCHf9brdWQyGTjnfN5/o9HwgoLREBMEs2FCYIvQoTBmw531mAba4txk6XT6tsq8VZgDVPE5HUg/aCa0221kMhlfaESThVqMCYDZMSGwRSw6C449C9ivQPfNY+HMMuPu3LjnOTKHw6HPX2CCFB+7PrZ9EZhL1TB2HBMCxqXATILZmUsIiMi/FpHPiMinReQ3RCQlIveJyGMi8qSIfHDcidgwjA1lZiEgIvcA+EEA9zvnvg5AFMAbALwbwM87574awAmAtyxioYZhLId5zYEYgLSIxDCaSHwNwLdiNI0IGA0k/e4538MwzmXbG3usk5mFgHPuWQDvAfA3GG3+CkZDScvOuf74x54BcM9p/15E3ioij4vI47OuwVgetqF2h3nMgT0AD2A0nfgFADIAXj3tv3fOPeScu985d9rwEsMwVsQ85sA/AvDXzrkbzrkegN8G8I0AimPzAABeCODZOddoGOdi0YHZmSdZ6G8AvFJEAgAtjEaPPQ7gowBeB+AR2EDSrUFn6OnmmbqbLwlvtlVsPt0jMDyNWbdEW2Y1YdhEuiwm0zxTiR8Tkd8E8ASAPoA/A/AQgP8B4BER+enxa+9bxEKN5aA3v970fLDhCCv4ANyWnnzRdOVZ0SPZuflZjsz+A8vYmDqN+U7Pt5V5B5L+JICfDL38FIBXzPN7jdXAza8HeHJYp+7UywGg8Xjcbz5eh0ejLyOFN9yGrNVqYW9vz89hZM8Dth9bJLpWQ6dUs7aC19vc2sxqB3YYPayD5cgcvhmNRn1FoR6GytJfVhrymprAMoQAfy9P/CAIkMlk0Ol0MBwOJ2Yx9Pv983/hBdDDRlhbwX4OvF5mufUqMCGww9xJCHDjsa8f249x0wdBgGQy6av8+FgGWgiwB0I2m/UtxjhkhO3VFom+N41GwwsACj3n3ESzk23EhMAOo82BZrMJAN6ubrVaE70HM5kMCoUC0uk0UqkUCoWCb0Ci7fRlwIYjnLdIwUUTIZlMIpfL+TmNi0TPQaxWq77pqW5kwtLrbcWEwA5DIcBZfZFIxGsBejRaEAT+lM1kMshmsxMbnhtxWSoxIwGJRAKpVMqvmcKB3+t2u0sRAq1WC4nEqASGEQg9C3HbOxuZENhhwuYAAN+kgxuKzUzZ05DCQTsJ6TBbphCgOUBh0+/3EYlEJoQAHXSLhENSo9HoRJvzVqvlfSUmBIytRZsDwK0NzhHe7GCcTqcRBAE6nQ4KhQLa7faEJsCuRMuaAMToAPsnMizIqUX5fN5PJFq0Xc5Ox5lMxr83/SQ0A7a9qYkJgR2GeQF0pvGaIT89liwIAj8TEIAfjR6Px73WsMw2ZIxE6NyAdDo9MTJ8GWugIIxGoz4aMRgMUK1W0Wq1bvMPbCMmBHYYJrnwBGXMOzyUNB6Po91uI51O+xbhjUYD6XQa6XR66WEy7RMYDAZ+TeHhpMtI2qHZ0+/3Ua1WfVv3VCo10eLMhICxtXAj3cm7HYvFfBtyTgDO5/MTw0voKAsLgkWkGOtw5arJZDJ+zFuz2fQaR7lcRq1W88NbwyHSbUoxNiFgTAXj4fSWcwNEo1FkMhmIiLfLtSahNwjt6W1qDqp9EOl0GrlcDt1uF8Vi0f+t3W7Xd2rWrdJ13QVNrU3MLDQhYJyLNhsYSWDr70QigWq16tVzeuy1T4HX/D3rOtVngaYIHZMco05hwIxFTk9mFiF9FTrKsiyTZV5MCBhTQdtbZxVyfBlt9Var5acbM7OQD8b3E4mEj7lvC/SDUAD0ej0Ui0U/P3E4HHpNgA+mGTOUSC1pE9OLTQgY5xJOKmq1WqhWqwDgowXtdttn1HFGQBAEyGaz3nlItkULAOBNmkQi4acgcQQcw5TJZNJHCxqNBur1uheWDCcCmCg62iRMCBhTweiBPuGYQMPpwFSP2+02giDwlYjcOCw62sTT8Czov2CiEh2DrFegNhSNRpFOp320gOPhefpzbNwmRhFMCBjnon0CdA42m03/GoAJ5xgTiigEhsMhRMRHGDbtJLwT9AmweIn2/HA49LULdBrSR8KfB+CzGBl6NSFgbC0cfw7An+SsrKvX636DB0GAg4MDZLNZtNvtiSq7IAj8iUrtYNPhhh4MBgiCwDsIk8kkOp2OjxQ0Gg00Gg3/N9ZqNZ9fwHvASMmmYULAmAqe+rqbEMtq6SeIx+M+g46NPoIg8KryWbkEm4zWBNjEhKFO9i+gD4TOTwrLbreLWq2Gdru90UlFJgSMc9GFRgB83JuhPobIIpEIUqmU9xHQmUZhwEYc2yYEtCNTp1LTydfv9332JCMozjmfWUhfiJkDxlZDlTbcazD8lc4zesSZbRfOLNwWdK8EHdoMx/vZno0CMBaLod/vo1arodvt+pCqmQPGpeCsEemRSMSfjLpTUSqVQq1WQ6VS8ScizQSq2+EHT+BN2TTnpT/rUCIrL9l7gRGVWq3meyCw2IlRFyYSUctaZVLRuUJARN4P4LUAro9nDkJESgA+COBeAE8DeL1z7kRGd+a9AF4DoAngTc65J5azdGPT0B9i1tozs7DRaKBarXohwHDaWY1LqYJvyzwBnRqtBUE2m/URlWKx6IVAuFGpNi2AWybXKphGE/gvAH4RwK+q1x4E8BHn3LtE5MHx8x8D8O0AXjJ+fAOAXxp/NXYIhhJZY8ANwrLjZrOJXq+HZDLpQ29nXW+qHR2GmksikfDJUTqfgkItn8+jXq/77Mper4dms+l7E7C3AwXDKjhXCDjnPiYi94ZefgDAt4yvHwbwxxgJgQcA/Kob6TEfF5GiiNztnLu2sBUbG4vWBHQ+gRYC/KA75/yGZ1YhW53rGoNtSTEO1xjQHCgUCt68cc55E6HRaPg+joyqhJu7rKpv4aw+gatqYz8H4Or4+h4AX1Y/x4GktwkBEXkrgLfO+P7GhqIbldAc0PayTptlRyJdZceNxP4B20S4DRojIvweIybUdNinAbjlX1lHUtHcjkHnnBORC3swnHMPYTSxCLP8e2Nz0Y7Ber3uw4v9ft8nFzUaDWSzWT8/oNvtIggCv5FEZCJDb9PR5kAQBP5155zXDJLJJJrNJprNJk5OTlCr1dBoNCbGqjGywlyDVTCrEHiear6I3A3g+vj1ZwG8SP2cDSTdMZhURC0AuCUUGCpjtCCfzyOXy3ntoNvtTrQRY7bdNmQXhjsi685MbNkeBMFE92JWV9KMAkZZmNoxugpmFQKPYjRs9F2YHDr6KIC3i8gjGDkEK+YP2C3C5gDj5tQEEokEWq2Wry0YDAZ+jmAkEpnIJVhG49BloX0CiURiIneCGz6RSPg2bTppivej1+t5AbhR5oCI/AZGTsADEXkGo9mD7wLwIRF5C4AvAXj9+Md/D6Pw4JMYhQjfvIQ1GxuMrjakrc+wX9g/oB1jPPkYVlvGSLFlotOLAfi/mX4Q3WSEHYioLbAHgRYCq8yPmCY68L1nfOtVp/ysA/C2eRdlbDd6aKce2UXblyW4zWbTty/nyRcEAfL5vBcC25JirIVANBq9bdqzTr1mdISag25EwkKsbTAHDONMtB3PMmJgMr2YIUQ+2JCDiUV0ojUaDd/LT49ADz82IbOQfx+FANEZlizCYiViuAWb/jtXhQkBY2mclV4MwE850kIglUqhXq+jWq36Kj127tFNS3lNjYKn5iYJgjttYj3bgap/WKitEhMCxtqgrcxhn5FIxG943aCTXnR9ctLRxut1baBZ2aSuwyYEjLVBx2Gn05loYKrnG7A8maW6VKM5LyFc478tnKa1rEuAmRAw1gYdZTrPniW3OjLAEei5XM437wBumRnsVLQN+QQavdZ1mjImBIy1oMeeMUGGKjKzB5vNJmq1mu9aXCwWkclkfNdf/jy1Azb93EbWaR5s710zth4dMmMbbwAT49DZuZjdehh+1M7CZc5BXCVmDhg7h04s0uqwTjNm9IB9/oGR6qxTb3V68bZi5oCxk+j+/Qyb0TTgI5lMIpPJoNFoeFNhMBgglUr52Pq29S0Eblf/zRwwdhKdTad7+enraDSKbDbrQ4XAKBknn89PNPfcpJDbNGxS52ETAsba0NmEHFRy2mM4HCKZTPoKxGaziVar5R8sRb5IEw69AXV+wTTJPotAZzrqHgr0czAfIpxwddbXeTAhYKwV+gXuRCQS8cM9mCRUrVZ9RKBcLiORSKDX63m/wZ0Ib8DTNBDd12CRhDe+HnSay+VQr9dRKBRuG2d+2oMO0XlNIRMCxsbDqICuMahUKn4WIvMKMpnMVGFCphvrSj99+vJ5eObAomCzUTYbyeVyE0NLOeU4lUpNlFVzsjEdpxyBPm8DEhMCxsajpyIzbNhoNPzcPzYkpVPxPFjjz7RjpiQzykBP/TLyDqiFsA0ZG5NmMhmfPcloiYj4hqSdTgexWMxvfl2ZyP6Fs2JCwNh4qAloAcAe/nogKmcFTgMzDxl9CIJgIl0ZuOW4XIZJoDWBTCbjpxZzM1MT4JhzplXzWpsE82JCwNh4mFTE3gMA/KnIk5L+gmnUdw5HZaPTQqGATqfjewMyKrGssKOecjwYDJDNZgHA+zuSySRarRaKxSIqlYqvqeA1h5fopq3zYELA2Hh0y7JOp4NoNIpGozExrafb7Xrz4DxExE8GoupN9TuRSHi7exlCQPcc0F2J2WKd5gKLpehA1D0X2bU4Ho/7bsbzYELA2Hj4wWdiEXD7VGSekNOo7sw45L/Vw0LpjEskEkvTBOiYpPnBVmPc2IlEAp1OB5lMxmsNHNxCM4Ddh+jANJ+AcamhU7Ddbk+kFGtv/kUadEYiERQKBZRKJTQaDb+J2PQ0k8kstd257kXIjcx2avzb+PdyOMvJyYnPpQDgh5cwqjAPJgSMjUfXGNAsYMsx3ddv2iw87VAcDod+01O9ZlhuGY5B/i46MHW+QHg2IUOALK7ihGNqKrpB6zzMOpD0ZwF8J4AugL8C8GbnXHn8vXcCeAuAAYAfdM79wdyrNHaK8KbTJ7X2D+hw20U6C+n24LFYzJ+qOi7P91oWXDNLqPVUYl73+320Wi2vCZXLZb9O5jKsRAjg9IGkHwbwTudcX0TeDeCdAH5MRF4K4A0A/i6AFwD4IxH5GufcaiYrGluF7kCsnXPh62k39rYwTXNUCrtKpYJOp4Nms+nDhjR9FpXRONNAUufcH6qnHwfwuvH1AwAecc51APy1iDwJ4BUA/u/cKzUuHUzRZbfd8DVP6mX0DiyVStjf38fe3h7y+bzPFWCYju+7Tu70dy+y9HgRPoHvB/DB8fU9GAkFwoGktyE2kHTnoRCgfasbiLKMeFF2r0ZEUCwWUSwWkc/n/UxEhuXCXYAvO3MJARH5CQB9AB+46L+1gaSGTprh0A1uxng87k/nZWzGfD6PQqGAbDaLg4MDFItFLwwoCHZBAABzCAEReRNGDsNXuVseFBtIakxNOGEmlUohl8shm836dNp8Pr/wrjvRaBSZTMZvegoAvu+yNJBNZSYhICKvBvCjAP6hc66pvvUogF8XkZ/DyDH4EgD/b+5VGpcOOsZoDnC2gN6cuVwOxWJx4acyM/IYGmTzUoYJdfLQLjDrQNJ3AkgC+PD4Rn3cOfevnHOfEZEPAfgsRmbC2ywyYJwFw3Q0B7jp9/b2fHfh/f39pQgBPdBEFxDRHOCU5F0QBLMOJH3fHX7+ZwD8zDyLMnYD7RPIZrNeCOzv7yOfz2N/fx9XrlyZOh14WljFpycYcePr600Ya7YKLGPQWBvaHKA2QDMgn8+jWCyiVCohkUgsXAjoWYCMRuihoGYOGMaS0Y014vE40um0d85x8x8eHuKuu+7yswYXiU5ICrcW04lK62YVzVNNCBhTo1Nzz5qkO+3GiUQi3glYKBSQz+eRz+dRKpWwt7eHvb09lEollEolpFKppW7I09KUlw07A+lJStzwOm2YjUR0M1XWTiwqtdmEgDEV+sTUY8J1p1x+b9rfx2QdhgUZJmRqLLWEy6aaMyWYjUJPu2Z9RLVaRbVaRb1e912Ww0VO82JCwJgKbUfrZpz6OQXCeVCL2Nvb85l7uVwOuVxu41J3Fw1PfJ7munyYlYPc4N1uF8fHxyiXyyiXy6jX677YiRqBdRYyVgLVf9btM6zGaxa20NM/DZFIxJsAzNhjCi8FwTYPGL0TzrmJpqnsosx2aXyt3W7jxo0bqNVqKJfLOD4+RqVSQbVa9YLAhICxMnQ3HHryufn1CZ5Op6f+fTQDwjn8WhO4jLF63R+BXZEajYZvosrrZrOJ4+Nj31g1bBJQCMzrFzAhYJxLuE227pIbBIHPwedr0/7OIAgQBIH3CwRB4NN2WS572dDmALUBzlJotVqoVCqo1+totVqo1+s4OTnx1xQC7XZ7ou/BvJgQMKaC5gCn5dCjn81mfXIPc/2nhU5A7RSkYLjMRTzsl9hqtdBsNlGtVr3NXy6XcfPmTX/612o1bxroa5oP7JA0DyYEjHOhJkAnIDcrw3sM6VG1n/Z30segm2VQMFzGqAChJkBBoDf8yckJjo+PvR9ARwOazSa63S46nc6EOTAvJgSMqaA5QEHAvv3ZbNZn9+VyOZRKpal/H4t1wj0EdOThskH7nZEANhSlIKBWUC6XcXJy4v0GnLtAMyLcB3EeLt9dNpYCNy3Vd53We+XKFVy5cgXFYhEHBwdT/06ddxC+vsxNPfRItUaj4Tf+yckJrl+/jr/927/10QBudJ0/EM4tmBcTApeYcHsq3bP/IpuLJzSjAjQF2JiD2X17e3s4PDycaY3htenn24TO/ONz/TrNANr1nC6kNQDmBty4ceO2f3/W83kwIXAJCaf2nvb1Iqes7v7DiAA9+frB0txdJdwtWI8R53N2EGZYsNls+rAfBQOdf4uYLjQNJgQuITqcp0dw09nGa/7ceVATuOuuu7wJcFpiz2UM6U2LnpzMTEDa/EwBpi3farVw/fp1HB0doVwu+7Agw3+LCv1NiwmBS0i4RJdeeJ7U7N9Hu/s8+LuuXLmCQqEwkeobbsKxqzABiI4+evQZ06d3n3kB169fx8nJCSqVis8CZJLQomz9aTEhcAnRnnw2yWBYj8k42Wx26tObQoC9+MKJPXoC0K6ibX4d0uPGb7fbfsx4q9XyAoBJQDQBqEWsooSYmBC4hJzVrCOfz/ueerlcbur8fHYA0g05dZtuneK7q2hzgKc9E4H0NX0BJycnviBImwJmDhgLQffu06m9pVLJCwB27OFgzDvBRKF8Pu+dgBQCNDFMCNyqB2ACULVancj+4zWf0yGotYFOp7O0sehnYULgEqJr/pmRx7BeLpfzAoGZeedB84KbPpFIIJvNegHAPP9dFgIAfPyevgCG/+r1OiqVCk5OTlCr1fzpT38BfQHtdts7EjdKCJw2kFR97x0A3gPg0Dl3JKOY03sBvAZAE8CbnHNPLH7Zxp1gGFCn+NIcoGPv4ODAq/LT/j4OBdEOR91bYJeFQNgnQCHALMCTkxMcHR15ByBLgbXzMJwJuCpmHUgKEXkRgG8D8Dfq5W/HaNbASwB8A4BfGn81VgjNAdb9s4vvwcEB9vb2sL+/j3vuucd79s+D+QS6CSedigxF7romcCdzgALgmWee8Z2Cwh2E9PXG+QROG0g65ucxGkDyu+q1BwD86ngi0cdFpCgidzvnri1ktTvGrNlyeuZ9uIsvM/z29/e9ej/tOrjJWdgTTkLatuy+eQh772kK0DFIByCzAWkOlMtl1Gq1ib6COqFI9xxcFbNOIHoAwLPOuT8P/cffA+DL6jkHkt4mBGwg6SSnpcrOurF0P30tBFjwQ2EwrRAwcGrKLq8B+M2vuwVRELBCkKZBrVZb299xGhcWAiISAPhxjEyBmbGBpLcIq9a0r5nZN23vPlIoFJDL5XD16lVvAugZf+wHuEsn96zwlNZqur7m6c8N//zzz+P69eu+DqBWq/lU4FXb+tMyiybwVQDuA0At4IUAnhCRV8AGks6EHoDB5J7TSmyn3bTc8KVSaaKjr07uMSEwHToTkDX8zAqkE4/VgI1GA8fHxzg5OfEnPuP/i2oFtgwuLAScc58CcIXPReRpAPePowOPAni7iDyCkUOwYv6A89HhPDbV0Ko8X5/W8UbVnxoBcwXCXXxNCJwPhQDr+bXNrzMDKQTK5fJEi3Dt+V+1rT8tMw0kdc6dNYvw9zAKDz6JUYjwzQta56WFMXjG83UmHjPzKAymbbfFhB6GAykQ9ORd0wSmg6aAjv23Wi0f72fbL256OgCZH8DX15EENC2zDiTV379XXTsAb5t/WbsFtQDm9DM3v1AooFAo+ESfaYUANQnW/NMxqNt4XdbWXYtGJwDpE1+n/Wrbn9EA1g0wJ6DT6Vwec8BYPMzwo0ef9fr04nMzTysEWDVIs4CaBAXAZW3guQx0H4BwJiDt/ps3b/oOweGW4ToV2ISAcSrMy9eVfpzGUyqVcHBwgHw+j729valSfAH48V38nRz4qcdumyYwHfQHUBPQXYDoALx+/brXBBgipMDgNTWBrTQHjOVDLYCDPGjLHx4e4urVqygUCjg4OJh6RLfWLHQjEWoBl7l/3zLQmYAUAsfHx74N2LVr11CpVCYagepGojor0DQB41R0mq/O9Q9n+F1ECNDhyBNfN+/cxQy/WdHRAV0TwKIgnQmoT/twM1A+NyFg3IY2BxKJxG3Vfvv7+9jf38fVq1d9ks+s72NcHG0O0ObXJsHx8TFu3LiBk5OT23oCbuKGPw0TAhfktLbYWsXWp+80RKNRvOAFL/CZfXt7e366T7h1l6nw56NPX6rgOuuPz6fdoGz9dXR05BOBKpXKhA9gk3MApsGEwAVgsQzTefVYbt3O6yLe91gs5kd06+ad2pNv6vt0aE9+eEgHT/OL1uvrgSDMDdCJQosaALJOTAhcEJ3cw0IdZvXxmgJiGiKRyMTpz/x+LQTMkz89Wn1neE53+dWCYBqo/vP0P00ILKr//7qQTVi8iNwA0ABwtO61KA5g6zmPTVuTrefOfKVz7rbpMBshBABARB53zt2/7nUQW8/5bNqabD2zsbutYAzDAGBCwDB2nk0SAg+tewEhbD3ns2lrsvXMwMb4BAzDWA+bpAkYhrEGTAgYxo6zdiEgIq8WkS+IyJMi8uCa1vAiEfmoiHxWRD4jIj80fv2nRORZEfnk+PGaFa7paRH51Ph9Hx+/VhKRD4vIF8df91a0lq9V9+CTIlIVkR9e9f0RkfeLyHUR+bR67dR7IiP+/fhz9Rci8vIVrednReTz4/f8HREpjl+/V0Ra6l798qLXMzPMdlrHA0AUwF8BeDGABIA/B/DSNazjbgAvH1/nAPwlgJcC+CkA/2ZN9+ZpAAeh1/4tgAfH1w8CePea/s+eA/CVq74/AL4ZwMsBfPq8e4JRm7vfByAAXgngsRWt59sAxMbX71bruVf/3CY91q0JvALAk865p5xzXQCPYDTAZKU456658bg051wNwOcwmpewaTwA4OHx9cMAvnsNa3gVgL9yzn1p1W/snPsYgJuhl8+6J34QjnPu4wCKInL3stfjnPtD51x//PTjGHXc3mjWLQTOGlayNmQ0bellAB4bv/T2sWr3/lWp32McgD8UkU+MB7UAwFV3q3vzcwCurnA95A0AfkM9X9f9IWfdk034bH0/RtoIuU9E/kxE/kRE/sGK13Im6xYCG4WIZAH8FoAfds5VMZql+FUA/j5GU5T+3QqX803OuZdjNN/xbSLyzfqbbqRjrjS+KyIJAN8F4L+NX1rn/bmNddyTsxCRnwDQB/CB8UvXAHyFc+5lAH4EwK+LSH5d69OsWwhszLASEYljJAA+4Jz7bQBwzj3vnBs454YA/hNG5stKcM49O/56HcDvjN/7eaq046/XV7WeMd8O4Ann3PPjta3t/ijOuidr+2yJyJswmuT9fWPBBOdcxzl3PL7+BEa+sK9ZxXrOY91C4E8BvERE7hufMm8A8OiqFyGjOt33Aficc+7n1OvahvweAJ8O/9slrScjIjleY+Rs+jRG9+aN4x97IyaHwa6C74UyBdZ1f0KcdU8eBfAvx1GCV2JFg3BE5NUYDer9LudcU71+KCLR8fWLMZrc/dSy1zMV6/ZMYuTF/UuMJONPrGkN34SRGvkXAD45frwGwK8B+NT49UcB3L2i9bwYo0jJnwP4DO8LgH0AHwHwRQB/BKC0wnuUAXAMoKBeW+n9wUgAXQPQw8jGf8tZ9wSjqMB/GH+uPoXRlKxVrOdJjHwR/Bz98vhn/8n4//KTAJ4A8J2r/pyf9bC0YcPYcdZtDhiGsWZMCBjGjmNCwDB2HBMChrHjmBAwjB3HhIBh7DgmBAxjx/n/mXhiQaHJjeAAAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - " mnist_it = ds4.create_dict_iterator()\n", - " data = next(mnist_it)\n", - " plt.imshow(data['image'].asnumpy().squeeze(), cmap=plt.cm.gray)\n", - " plt.title(data['label'].asnumpy(), fontsize=20)\n", - " plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "creative-liberal", - "metadata": {}, - "source": [ - "The original image is scaled up then randomly cropped to 150 x 150.\n", - "\n", - "## References\n", - "\n", - "[1] Y. LeCun, L. Bottou, Y. Bengio, and P. Haffner. [Gradient-based learning applied to document recognition](http://yann.lecun.com/exdb/publis/pdf/lecun-98.pdf)." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/tutorials/training/source_en/use/load_dataset_text.ipynb b/tutorials/training/source_en/use/load_dataset_text.ipynb deleted file mode 100644 index 4f39fe7e80ea8b75f8cbdb0c412aff4f092fbf4d..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/use/load_dataset_text.ipynb +++ /dev/null @@ -1,336 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "cellular-malpractice", - "metadata": {}, - "source": [ - "# Loading Text Dataset\n", - "\n", - "`Linux` `Ascend` `GPU` `CPU` `Data Preparation` `Beginner` `Intermediate` `Expert`\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_en/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_en/use/load_dataset_text.ipynb)" - ] - }, - { - "cell_type": "markdown", - "id": "united-implementation", - "metadata": {}, - "source": [ - "## Overview\n", - "\n", - "The `mindspore.dataset` module provided by MindSpore enables users to customize their data fetching strategy from disk. At the same time, data processing and tokenization operators are applied to the data. Pipelined data processing produces a continuous flow of data to the training network, improving overall performance.\n", - "\n", - "In addition, MindSpore supports data loading in distributed scenarios. Users can define the number of shards while loading. For more details, see [Loading the Dataset in Data Parallel Mode](https://www.mindspore.cn/tutorial/training/en/master/advanced_use/distributed_training_ascend.html#loading-the-dataset-in-data-parallel-mode).\n", - "\n", - "This tutorial briefly demonstrates how to load and process text data using MindSpore.\n", - "\n", - "## Preparations\n", - "\n", - "1. Prepare the following text data." - ] - }, - { - "cell_type": "markdown", - "id": "green-characteristic", - "metadata": {}, - "source": [ - " Welcome to Beijing!\n", - "\n", - " 北京欢迎您!\n", - "\n", - " 我喜欢English!" - ] - }, - { - "cell_type": "markdown", - "id": "verified-oakland", - "metadata": {}, - "source": [ - "2. Create the `tokenizer.txt` file, copy the text data to the file, and save the file under `./datasets` directory. The directory structure is as follow." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "forbidden-positive", - "metadata": {}, - "outputs": [], - "source": [ - " import os\n", - "\n", - " if not os.path.exists('./datasets'):\n", - " os.mkdir('./datasets')\n", - " file_handle=open('./datasets/tokenizer.txt',mode='w')\n", - " file_handle.write('Welcome to Beijing \\n北京欢迎您! \\n我喜欢English! \\n')\n", - " file_handle.close()" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "geological-indicator", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets\n", - "├── MNIST_Data\n", - "│   ├── test\n", - "│   │   ├── t10k-images-idx3-ubyte\n", - "│   │   └── t10k-labels-idx1-ubyte\n", - "│   └── train\n", - "│   ├── train-images-idx3-ubyte\n", - "│   └── train-labels-idx1-ubyte\n", - "└── tokenizer.txt\n", - "\n", - "3 directories, 5 files\n" - ] - } - ], - "source": [ - " ! tree ./datasets" - ] - }, - { - "cell_type": "markdown", - "id": "continuous-cornell", - "metadata": {}, - "source": [ - "3. Import the `mindspore.dataset` and `mindspore.dataset.text` modules." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "handled-cooking", - "metadata": {}, - "outputs": [ - ], - "source": [ - " import mindspore.dataset as ds\n", - " import mindspore.dataset.text as text" - ] - }, - { - "cell_type": "markdown", - "id": "incorporated-glasgow", - "metadata": {}, - "source": [ - "## Loading Dataset\n", - "\n", - "MindSpore supports loading common datasets in the field of text processing that come in a variety of on-disk formats. Users can also implement custom dataset class to load customized data. For detailed loading methods of various datasets, please refer to the [Loading Dataset](https://www.mindspore.cn/doc/programming_guide/en/master/dataset_loading.html) chapter in the Programming Guide.\n", - "\n", - "The following tutorial demonstrates loading datasets using the `TextFileDataset` in the `mindspore.dataset` module.\n", - "\n", - "1. Configure the dataset directory as follows and create a dataset object." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "therapeutic-press", - "metadata": {}, - "outputs": [], - "source": [ - " DATA_FILE = \"./datasets/tokenizer.txt\"\n", - " dataset = ds.TextFileDataset(DATA_FILE, shuffle=False)" - ] - }, - { - "cell_type": "markdown", - "id": "split-mustang", - "metadata": {}, - "source": [ - "2. Create an iterator then obtain data through the iterator." - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "collect-consortium", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Welcome to Beijing \n", - "北京欢迎您! \n", - "我喜欢English! \n" - ] - } - ], - "source": [ - " for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']))" - ] - }, - { - "cell_type": "markdown", - "id": "concrete-victor", - "metadata": {}, - "source": [ - "## Processing Data\n", - "\n", - "For the data processing operators currently supported by MindSpore and their detailed usage methods, please refer to the [Processing Data](https://www.mindspore.cn/doc/programming_guide/en/master/pipeline.html) chapter in the Programming Guide\n", - "\n", - "The following tutorial demonstrates how to construct a pipeline and perform operations such as `shuffle` and `RegexReplace` on the text dataset.\n", - "\n", - "1. Shuffle the dataset." - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "shaped-conditions", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "我喜欢English! \n", - "Welcome to Beijing \n", - "北京欢迎您! \n" - ] - } - ], - "source": [ - " ds.config.set_seed(58)\n", - " dataset = dataset.shuffle(buffer_size=3)\n", - "\n", - " for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']))" - ] - }, - { - "cell_type": "markdown", - "id": "figured-interval", - "metadata": {}, - "source": [ - "2. Perform `RegexReplace` on the dataset." - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "boring-result", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "我喜欢English! \n", - "Welcome to Shanghai \n", - "上海欢迎您! \n" - ] - } - ], - "source": [ - " replace_op1 = text.RegexReplace(\"Beijing\", \"Shanghai\")\n", - " replace_op2 = text.RegexReplace(\"北京\", \"上海\")\n", - " dataset = dataset.map(operations=replace_op1)\n", - " dataset = dataset.map(operations=replace_op2)\n", - "\n", - " for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']))" - ] - }, - { - "cell_type": "markdown", - "id": "challenging-trunk", - "metadata": {}, - "source": [ - "## Tokenization\n", - "\n", - "For the data tokenization operators currently supported by MindSpore and their detailed usage methods, please refer to the [Tokenizer](https://www.mindspore.cn/doc/programming_guide/en/master/tokenizer.html) chapter in the Programming Guide.\n", - "\n", - "The following tutorial demonstrates how to use the `WhitespaceTokenizer` to tokenize words with space.\n", - "\n", - "1. Create a `tokenizer`." - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "respiratory-lighter", - "metadata": {}, - "outputs": [], - "source": [ - " tokenizer = text.WhitespaceTokenizer()" - ] - }, - { - "cell_type": "markdown", - "id": "accepting-arrest", - "metadata": {}, - "source": [ - "2. Apply the `tokenizer`." - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "human-enforcement", - "metadata": {}, - "outputs": [], - "source": [ - " dataset = dataset.map(operations=tokenizer)" - ] - }, - { - "cell_type": "markdown", - "id": "rapid-paragraph", - "metadata": {}, - "source": [ - "3. Create an iterator and obtain data through the iterator." - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "external-athletics", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['我喜欢English!']\n", - "['Welcome', 'to', 'Shanghai']\n", - "['上海欢迎您!']\n" - ] - } - ], - "source": [ - " for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']).tolist())" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/tutorials/training/source_en/use/load_model_for_inference_and_transfer.md b/tutorials/training/source_en/use/load_model_for_inference_and_transfer.md deleted file mode 100644 index 94be779bd0857b647931fed4a3e3371149bd3d46..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/use/load_model_for_inference_and_transfer.md +++ /dev/null @@ -1,282 +0,0 @@ -# Loading a Model for Inference and Transfer Learning - -`Linux` `Ascend` `GPU` `CPU` `Model Loading` `Beginner` `Intermediate` `Expert` - - - -- [Loading a Model for Inference and Transfer Learning](#loading-a-model-for-inference-and-transfer-learning) - - [Overview](#overview) - - [Loading the local Model](#loading-the-local-model) - - [For Inference Validation](#for-inference-validation) - - [For Transfer Training](#for-transfer-training) - - [Loading the Model from Hub](#loading-the-model-from-hub) - - [For Inference Validation](#for-inference-validation-1) - - [For Transfer Training](#for-transfer-training-1) - - - - - -## Overview - -CheckPoints which are saved locally during model training, or download from [MindSpore Hub](https://www.mindspore.cn/resources/hub/) are used for inference and transfer training. - -The following uses examples to describe how to load models from local and MindSpore Hub. - -## Loading the local Model - -After saving CheckPoint files, you can load parameters. - -### For Inference Validation - -In inference-only scenarios, use `load_checkpoint` to directly load parameters to the network for subsequent inference validation. - -The sample code is as follows: - -```python -resnet = ResNet50() -load_checkpoint("resnet50-2_32.ckpt", net=resnet) -dateset_eval = create_dataset(os.path.join(mnist_path, "test"), 32, 1) # define the test dataset -loss = CrossEntropyLoss() -model = Model(resnet, loss, metrics={"accuracy"}) -acc = model.eval(dataset_eval) -``` - -The `load_checkpoint` method loads network parameters in the parameter file to the model. After the loading, parameters in the network are those saved in CheckPoints. -The `eval` method validates the accuracy of the trained model. - -### For Transfer Training - -In the retraining and fine-tuning scenarios for task interruption, you can load network parameters and optimizer parameters to the model. - -The sample code is as follows: - -```python -# return a parameter dict for model -param_dict = load_checkpoint("resnet50-2_32.ckpt") -resnet = ResNet50() -opt = Momentum(resnet.trainable_params(), 0.01, 0.9) -# load the parameter into net -load_param_into_net(resnet, param_dict) -# load the parameter into optimizer -load_param_into_net(opt, param_dict) -loss = SoftmaxCrossEntropyWithLogits() -model = Model(resnet, loss, opt) -model.train(epoch, dataset) -``` - -The `load_checkpoint` method returns a parameter dictionary and then the `load_param_into_net` method loads parameters in the parameter dictionary to the network or optimizer. - -## Loading the Model from Hub - -### For Inference Validation - -`mindspore_hub.load` API is used to load the pre-trained model in a single line of code. The main process of model loading is as follows: - -1. Search the model of interest on [MindSpore Hub Website](https://www.mindspore.cn/resources/hub). - - For example, if you aim to perform image classification on CIFAR-10 dataset using GoogleNet, please search on [MindSpore Hub Website](https://www.mindspore.cn/resources/hub) with the keyword `GoogleNet`. Then all related models will be returned. Once you enter into the related model page, you can get the website `url`. - -2. Complete the task of loading model using `url` , as shown in the example below: - - ```python - - import mindspore_hub as mshub - import mindspore - from mindspore import context, Tensor, nn, Model - from mindspore import dtype as mstype - import mindspore.dataset.vision.py_transforms as py_transforms - - context.set_context(mode=context.GRAPH_MODE, - device_target="Ascend", - device_id=0) - - model = "mindspore/ascend/0.7/googlenet_v1_cifar10" - - # Initialize the number of classes based on the pre-trained model. - network = mshub.load(model, num_classes=10) - network.set_train(False) - - # ... - - ``` - -3. After loading the model, you can use MindSpore to do inference. You can refer to [Multi-Platform Inference Overview](https://www.mindspore.cn/tutorial/inference/en/master/multi_platform_inference.html). - -### For Transfer Training - -When loading a model with `mindspore_hub.load` API, we can add an extra argument to load the feature extraction part of the model only. So we can easily add new layers to perform transfer learning. This feature can be found in the related model page when an extra argument (e.g., include_top) has been integrated into the model construction by the model developer. The value of `include_top` is True or False, indicating whether to keep the top layer in the fully-connected network. - -We use [MobileNetV2](https://gitee.com/mindspore/mindspore/tree/r1.0/model_zoo/official/cv/mobilenetv2) as an example to illustrate how to load a model trained on the ImageNet dataset and then perform transfer learning (re-training) on a specific sub-task dataset. The main steps are listed below: - -1. Search the model of interest on [MindSpore Hub Website](https://www.mindspore.cn/resources/hub/) and get the related `url`. - -2. Load the model from MindSpore Hub using the `url`. Note that the parameter `include_top` is provided by the model developer. - - ```python - import os - import mindspore_hub as mshub - import mindspore - from mindspore import context, Tensor, nn - from mindspore.nn import Momentum - from mindspore.train.serialization import save_checkpoint, load_checkpoint,load_param_into_net - from mindspore import ops - import mindspore.dataset as ds - import mindspore.dataset.transforms.c_transforms as C2 - import mindspore.dataset.vision.c_transforms as C - from mindspore import dtype as mstype - from mindspore import Model - context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", device_id=0) - - model = "mindspore/ascend/1.0/mobilenetv2_v1.0_openimage" - network = mshub.load(model, num_classes=500, include_top=False, activation="Sigmoid") - network.set_train(False) - ``` - -3. Add a new classification layer into current model architecture. - - ```python - class ReduceMeanFlatten(nn.Cell): - def __init__(self): - super(ReduceMeanFlatten, self).__init__() - self.mean = ops.ReduceMean(keep_dims=True) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.mean(x, (2, 3)) - x = self.flatten(x) - return x - - # Check MindSpore Hub website to conclude that the last output shape is 1280. - last_channel = 1280 - - # The number of classes in target task is 10. - num_classes = 10 - - reducemean_flatten = ReduceMeanFlatten() - - classification_layer = nn.Dense(last_channel, num_classes) - classification_layer.set_train(True) - - train_network = nn.SequentialCell([network, reducemean_flatten, classification_layer]) - ``` - -4. Define `dataset_loader`. - - As shown below, the new dataset used for fine-tuning is the [CIFAR-10](https://www.cs.toronto.edu/~kriz/cifar.html). It is noted here we need to download the `binary version` dataset. After downloading and decompression, the following code can be used for data loading and processing. It is noted the `dataset_path` is the path to the dataset and should be given by the user. - - ```python - def create_cifar10dataset(dataset_path, batch_size, do_train): - if do_train: - usage, shuffle = "train", True - else: - usage, shuffle = "test", False - - data_set = ds.Cifar10Dataset(dataset_dir=dataset_path, usage=usage, shuffle=True) - - # define map operations - trans = [C.Resize((256, 256))] - if do_train: - trans += [ - C.RandomHorizontalFlip(prob=0.5), - ] - - trans += [ - C.Rescale(1.0 / 255.0, 0.0), - C.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5]), - C.HWC2CHW() - ] - - type_cast_op = C2.TypeCast(mstype.int32) - - data_set = data_set.map(operations=type_cast_op, input_columns="label", num_parallel_workers=8) - data_set = data_set.map(operations=trans, input_columns="image", num_parallel_workers=8) - - # apply batch operations - data_set = data_set.batch(batch_size, drop_remainder=True) - return data_set - - # Create Dataset - dataset_path = "/path_to_dataset/cifar-10-batches-bin" - dataset = create_cifar10dataset(dataset_path, batch_size=32, do_train=True) - ``` - -5. Define `loss`, `optimizer` and `learning rate`. - - ```python - def generate_steps_lr(lr_init, steps_per_epoch, total_epochs): - total_steps = total_epochs * steps_per_epoch - decay_epoch_index = [0.3*total_steps, 0.6*total_steps, 0.8*total_steps] - lr_each_step = [] - for i in range(total_steps): - if i < decay_epoch_index[0]: - lr = lr_init - elif i < decay_epoch_index[1]: - lr = lr_init * 0.1 - elif i < decay_epoch_index[2]: - lr = lr_init * 0.01 - else: - lr = lr_init * 0.001 - lr_each_step.append(lr) - return lr_each_step - - # Set epoch size - epoch_size = 60 - - # Wrap the backbone network with loss. - loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - loss_net = nn.WithLossCell(train_network, loss_fn) - steps_per_epoch = dataset.get_dataset_size() - lr = generate_steps_lr(lr_init=0.01, steps_per_epoch=steps_per_epoch, total_epochs=epoch_size) - # Create an optimizer. - optim = Momentum(filter(lambda x: x.requires_grad, classification_layer.get_parameters()), Tensor(lr, mindspore.float32), 0.9, 4e-5) - train_net = nn.TrainOneStepCell(loss_net, optim) - ``` - -6. Start fine-tuning. - - ```python - for epoch in range(epoch_size): - for i, items in enumerate(dataset): - data, label = items - data = mindspore.Tensor(data) - label = mindspore.Tensor(label) - - loss = train_net(data, label) - print(f"epoch: {epoch}/{epoch_size}, loss: {loss}") - # Save the ckpt file for each epoch. - if not os.path.exists('ckpt'): - os.mkdir('ckpt') - ckpt_path = f"./ckpt/cifar10_finetune_epoch{epoch}.ckpt" - save_checkpoint(train_network, ckpt_path) - ``` - -6. Eval on test set. - - ```python - model = "mindspore/ascend/1.0/mobilenetv2_v1.0_openimage" - - network = mshub.load(model, num_classes=500, pretrained=True, include_top=False, activation="Sigmoid") - network.set_train(False) - reducemean_flatten = ReduceMeanFlatten() - classification_layer = nn.Dense(last_channel, num_classes) - classification_layer.set_train(False) - softmax = nn.Softmax() - network = nn.SequentialCell([network, reducemean_flatten, classification_layer, softmax]) - - # Load a pre-trained ckpt file. - ckpt_path = "./ckpt/cifar10_finetune_epoch59.ckpt" - trained_ckpt = load_checkpoint(ckpt_path) - load_param_into_net(classification_layer, trained_ckpt) - - loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - - # Define loss and create model. - eval_dataset = create_cifar10dataset(dataset_path, batch_size=32, do_train=False) - eval_metrics = {'Loss': nn.Loss(), - 'Top1-Acc': nn.Top1CategoricalAccuracy(), - 'Top5-Acc': nn.Top5CategoricalAccuracy()} - model = Model(network, loss_fn=loss, optimizer=None, metrics=eval_metrics) - metrics = model.eval(eval_dataset) - print("metric: ", metrics) - ``` diff --git a/tutorials/training/source_en/use/publish_model.md b/tutorials/training/source_en/use/publish_model.md deleted file mode 100644 index e4e6e6c9f58ee5f6daff4f437172e2d25b1c496b..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/use/publish_model.md +++ /dev/null @@ -1,87 +0,0 @@ -# Publishing Models using MindSpore Hub - -`Linux` `Ascend` `GPU` `Model Publishing` `Intermediate` `Expert` - - - -- [Publishing Models using MindSpore Hub](#publishing-models-using-mindspore-hub) - - [Overview](#overview) - - [How to publish models](#how-to-publish-models) - - - - - -## Overview - -[MindSpore Hub](https://www.mindspore.cn/resources/hub/) is a platform for storing pre-trained models provided by MindSpore or third-party developers. It provides application developers with simple model loading and fine-tuning APIs, which enables the users to perform inference or fine-tuning based on the pre-trained models and thus deploy to their own applications. Users can also submit their pre-trained models into MindSpore Hub following the specific steps. Thus other users can download and use the published models. - -This tutorial uses GoogleNet as an example to describe how to submit models for model developers who are interested in publishing models into MindSpore Hub. - -## How to publish models - -You can publish models to MindSpore Hub via PR in [hub](https://gitee.com/mindspore/hub) repo. Here we use GoogleNet as an example to list the steps of model submission to MindSpore Hub. - -1. Host your pre-trained model in a storage location where we are able to access. - -2. Add a model generation python file called `mindspore_hub_conf.py` in your own repo using this [template](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/googlenet/mindspore_hub_conf.py). The location of the `mindspore_hub_conf.py` file is shown below: - - ```shell - googlenet - ├── src - │   ├── googlenet.py - ├── script - │   ├── run_train.sh - ├── train.py - ├── test.py - ├── mindspore_hub_conf.py - ``` - -3. Create a `{model_name}_{model_version}_{dataset}.md` file in `hub/mshub_res/assets/mindspore/ascend/0.7` using this [template](https://gitee.com/mindspore/hub/blob/master/mshub_res/assets/mindspore/ascend/0.7/googlenet_v1_cifar10.md#). Here `ascend` refers to the hardware platform for the pre-trained model, and `0.7` indicates the MindSpore version. The structure of the `hub/mshub_res` folder is as follows: - - ```shell - hub - ├── mshub_res - │   ├── assets - │   ├── mindspore - │ ├── gpu - │ ├── 0.7 - │ ├── ascend - │ ├── 0.7 - │ ├── googlenet_v1_cifar10.md - │   ├── tools - │ ├── get_sha256.py - │ ├── load_markdown.py - │ └── md_validator.py - ``` - - Note that it is required to fill in the `{model_name}_{model_version}_{dataset}.md` template by providing `file-format`、`asset-link` and `asset-sha256` below, which refers to the model file format, model storage location from step 1 and model hash value, respectively. - - ```shell - file-format: ckpt - asset-link: https://download.mindspore.cn/model_zoo/official/cv/googlenet/goolenet_ascend_0.2.0_cifar10_official_classification_20200713/googlenet.ckpt - asset-sha256: 114e5acc31dad444fa8ed2aafa02ca34734419f602b9299f3b53013dfc71b0f7 - ``` - - The MindSpore Hub supports multiple model file formats including: - - [MindSpore CKPT](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html#checkpoint-configuration-policies) - - [MindIR](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html#export-mindir-model) - - [AIR](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html#export-air-model) - - [ONNX](https://www.mindspore.cn/tutorial/training/en/master/use/save_model.html#export-onnx-model) - - For each pre-trained model, please run the following command to obtain a hash value required at `asset-sha256` of this `.md` file. Here the pre-trained model `googlenet.ckpt` is accessed from the storage location in step 1 and then saved in `tools` folder. The output hash value is: `114e5acc31dad444fa8ed2aafa02ca34734419f602b9299f3b53013dfc71b0f7`. - - ```python - cd ../tools - python get_sha256.py --file ../googlenet.ckpt - ``` - -4. Check the format of the markdown file locally using `hub/mshub_res/tools/md_validator.py` by running the following command. The output is `All Passed`,which indicates that the format and content of the `.md` file meets the requirements. - - ```python - python md_validator.py --check_path ../assets/mindspore/ascend/0.7/googlenet_v1_cifar10.md - ``` - -5. Create a PR in `mindspore/hub` repo. See our [Contributor Wiki](https://gitee.com/mindspore/mindspore/blob/master/CONTRIBUTING.md#) for more information about creating a PR. - -Once your PR is merged into master branch here, your model will show up in [MindSpore Hub Website](https://www.mindspore.cn/resources/hub) within 24 hours. Please refer to [README](https://gitee.com/mindspore/hub/blob/master/mshub_res/README.md#) for more information about model submission. diff --git a/tutorials/training/source_en/use/save_model.md b/tutorials/training/source_en/use/save_model.md deleted file mode 100644 index 0447c492aedcd3e5e99785dd4f40771689f00f8b..0000000000000000000000000000000000000000 --- a/tutorials/training/source_en/use/save_model.md +++ /dev/null @@ -1,170 +0,0 @@ -# Saving Models - -`Linux` `Ascend` `GPU` `CPU` `Model Export` `Beginner` `Intermediate` `Expert` - - - -- [Saving Models](#saving-models) - - [Overview](#overview) - - [Saving CheckPoint files](#saving-checkpoint-files) - - [CheckPoint Configuration Policies](#checkpoint-configuration-policies) - - [Export MindIR Model](#export-mindir-model) - - [Export AIR Model](#export-air-model) - - [Export ONNX Model](#export-onnx-model) - - - - - -## Overview - -During model training, you can add CheckPoints to save model parameters for inference and retraining after interruption. If you want to perform inference on different hardware platforms, you need to generate corresponding models based on the network and CheckPoint, such as MindIR, AIR and ONNX. - -- MindIR: A functional IR of MindSpore based on graph representation, which defines an extensible graph structure and IR representation of operators, which eliminates the model differences between different backends. The model trained on Ascend 910 can be used for reasoning on the upper side of Ascend 310, GPU and MindSpore Lite. -- CheckPoint: A CheckPoint file of MindSpore is a binary file that stores the values of all training parameters. The Google Protocol Buffers mechanism with good scalability is adopted, which is independent of the development language and platform. The protocol format of CheckPoints is defined in `mindspore/ccsrc/utils/checkpoint.proto`. -- AIR: Ascend Intermediate Representation (AIR) is an open file format defined by Huawei for machine learning and can better adapt to the Ascend AI processor. It is similar to ONNX. -- ONNX: Open Neural Network Exchange (ONNX) is an open file format designed for machine learning. It is used to store trained models. - -The following uses examples to describe how to save MindSpore CheckPoint files, and how to export MindIR, AIR and ONNX files. - -## Saving CheckPoint files - -During model training, use the callback mechanism to transfer the object of the callback function `ModelCheckpoint` to save model parameters and generate CheckPoint files. - -You can use the `CheckpointConfig` object to set the CheckPoint saving policies. The saved parameters are classified into network parameters and optimizer parameters. - -`ModelCheckpoint` provides default configuration policies for users to quickly get started. The following describes the usage: - -```python -from mindspore.train.callback import ModelCheckpoint -ckpoint_cb = ModelCheckpoint() -model.train(epoch_num, dataset, callbacks=ckpoint_cb) -``` - -You can configure the CheckPoint policies as required. The following describes the usage: - -```python -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig -config_ck = CheckpointConfig(save_checkpoint_steps=32, keep_checkpoint_max=10) -ckpoint_cb = ModelCheckpoint(prefix='resnet50', directory=None, config=config_ck) -model.train(epoch_num, dataset, callbacks=ckpoint_cb) -``` - -In the preceding code, initialize a `TrainConfig` class object to set the saving policies. - -- `save_checkpoint_steps` indicates the saving frequency. That is, parameters are saved every specified number of steps. -- `keep_checkpoint_max` indicates the maximum number of CheckPoint files that can be saved. -- `prefix` indicates the prefix name of the generated CheckPoint file. -- `directory` indicates the directory for storing the file. - -Create a `ModelCheckpoint` object and transfer it to the model.train method. Then you can use the CheckPoint function during training. - -Generated CheckPoint files are as follows: - -```text -resnet50-graph.meta # Generate compiled computation graph. -resnet50-1_32.ckpt # The file name extension is .ckpt. -resnet50-2_32.ckpt # The file name format contains the epoch and step correspond to the saved parameters. -resnet50-3_32.ckpt # The file name indicates that the model parameters generated during the 32th step of the third epoch are saved. -... -``` - -If you use the same prefix and run the training script for multiple times, CheckPoint files with the same name may be generated. MindSpore adds underscores (_) and digits at the end of the user-defined prefix to distinguish CheckPoints with the same name. If you want to delete the `.ckpt` file, please delete the `.meta` file simultaneously. - -For example, `resnet50_3-2_32.ckpt` indicates the CheckPoint file generated during the 32th step of the second epoch after the script is executed for the third time. - -> - When performing distributed parallel training tasks, each process needs to set different `directory` parameters to save the CheckPoint file to a different directory to prevent files from being read or written incorrectly. - -### CheckPoint Configuration Policies - -MindSpore provides two types of CheckPoint saving policies: iteration policy and time policy. You can create the `CheckpointConfig` object to set the corresponding policies. -CheckpointConfig contains the following four parameters: - -- save_checkpoint_steps: indicates the step interval for saving a CheckPoint file. That is, parameters are saved every specified number of steps. The default value is 1. -- save_checkpoint_seconds: indicates the interval for saving a CheckPoint file. That is, parameters are saved every specified number of seconds. The default value is 0. -- keep_checkpoint_max: indicates the maximum number of CheckPoint files that can be saved. The default value is 5. -- keep_checkpoint_per_n_minutes: indicates the interval for saving a CheckPoint file. That is, parameters are saved every specified number of minutes. The default value is 0. - -`save_checkpoint_steps` and `keep_checkpoint_max` are iteration policies, which can be configured based on the number of training iterations. -`save_checkpoint_seconds` and `keep_checkpoint_per_n_minutes` are time policies, which can be configured during training. - -The two types of policies cannot be used together. Iteration policies have a higher priority than time policies. When the two types of policies are configured at the same time, only iteration policies take effect. -If a parameter is set to None, the related policy is cancelled. -After the training script is normally executed, the CheckPoint file generated during the last step is saved by default. - -## Export MindIR Model - -If you want to perform inference across platforms or hardware (Ascend AI processor, MindSpore on-device, GPU, etc.), you can generate the corresponding MindIR format model file through the network definition and CheckPoint. MindIR format file can be applied to MindSpore Lite. Currently, it supports inference network based on static graph without controlling flow semantics. - -If you want to perform inference on the device, then you need to generate corresponding MindIR models based on the network and CheckPoint. -Currently we support the export of MindIR models for inference based on the graph mode, which do not contain control flow. Taking the export of MindIR model as an example to illustrate the implementation of model export, -the code is as follows: - -```python -import numpy as np -from mindspore import Tensor, export, load_checkpoint, load_param_into_net -import numpy as np - -resnet = ResNet50() -# load the parameter into net -load_checkpoint("resnet50-2_32.ckpt", net=resnet) -input = np.random.uniform(0.0, 1.0, size=[32, 3, 224, 224]).astype(np.float32) -export(resnet, Tensor(input), file_name='resnet50-2_32', file_format='MINDIR') -``` - -> - `input` is the input parameter of the `export` method, representing the input of the network. If the network has multiple inputs, they need to be passed into the `export` method together. eg: `export(network, Tensor(input1), Tensor(input2), file_name='network', file_format='MINDIR')`. -> - The suffix ".mindir" is automatically added to the exported file name. - -In order to avoid the hardware limitation of protobuf, when the exported model parameter size exceeds 1G, the framework will save the network structure and parameters separately by default. - --The name of the network structure file ends with the user-specified prefix plus `_graph.mindir`. --In the same level directory, there will be a folder with user-specified prefix plus `_variables`, which stores network parameters. - -Taking the above code as an example, if the parameter size in the model exceeds 1G, the generated directory structure is as follows: - -```text -resnet50-2_32_graph.mindir -resnet50-2_32_variables - data_0 - data_1 - ... -``` - -## Export AIR Model - -If you want to perform inference on the Shengteng AI processor, you can also generate the corresponding AIR format model file through the network definition and CheckPoint. The code example of exporting this format file is as follows: - -```python -import numpy as np -from mindspore import Tensor, export, load_checkpoint, load_param_into_net - -resnet = ResNet50() -# load the parameter into net -load_checkpoint("resnet50-2_32.ckpt", net=resnet) -input = np.random.uniform(0.0, 1.0, size=[32, 3, 224, 224]).astype(np.float32) -export(resnet, Tensor(input), file_name='resnet50-2_32', file_format='AIR') -``` - -The `input` parameter is used to specify the input shape and the data type of the exported model. - -> - `input` is the input parameter of the `export` method, representing the input of the network. If the network has multiple inputs, they need to be passed into the `export` method together. eg: `export(network, Tensor(input1), Tensor(input2), file_name='network', file_format='AIR')`. -> - The suffix ".air" is automatically added to the exported file name. - -## Export ONNX Model - -When you have a CheckPoint file, if you want to do inference on Ascend AI processor, GPU, or CPU, you need to generate ONNX models based on the network and CheckPoint. ONNX format file is a general model file, which can be applied to many kinds of hardware, such as Ascend AI processor, GPU, CPU, etc. The code example of exporting this format file is as follows: - -```python -import numpy as np -from mindspore import Tensor, export, load_checkpoint, load_param_into_net - -resnet = ResNet50() -# load the parameter into net -load_checkpoint("resnet50-2_32.ckpt", net=resnet) -input = np.random.uniform(0.0, 1.0, size=[32, 3, 224, 224]).astype(np.float32) -export(resnet, Tensor(input), file_name='resnet50-2_32', file_format='ONNX') -``` - -> - `input` is the input parameter of the `export` method, representing the input of the network. If the network has multiple inputs, they need to be passed into the `export` method together. eg: `export(network, Tensor(input1), Tensor(input2), file_name='network', file_format='ONNX')`. -> - The suffix ".onnx" is automatically added to the exported file name. -> - Currently, only the ONNX format export of ResNet series networks is supported. diff --git a/tutorials/training/source_zh_cn/_static/css/bootstrap.min.css b/tutorials/training/source_zh_cn/_static/css/bootstrap.min.css deleted file mode 100644 index 35722284f98a1189566e9200862b02aac7cbec50..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/_static/css/bootstrap.min.css +++ /dev/null @@ -1,6 +0,0 @@ -/*! - * Bootstrap v3.3.7 (http://getbootstrap.com) - * Copyright 2011-2016 Twitter, Inc. - * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) - *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */html{overflow-y: hidden!important;font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}dfn{font-style:italic}h1{margin:.67em 0;font-size:2em}mark{color:#000;background:#ff0}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{height:0;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace,monospace;font-size:1em}button,input,optgroup,select,textarea{margin:0;font:inherit;color:inherit}button{overflow:visible}button,select{text-transform:none}button,html input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}input{line-height:normal}input[type=checkbox],input[type=radio]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}fieldset{padding:.35em .625em .75em;margin:0 2px;border:1px solid silver}legend{padding:0;border:0}textarea{overflow:auto}optgroup{font-weight:700}table{border-spacing:0;border-collapse:collapse}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{*,:after,:before{color:#000!important;text-shadow:none!important;background:0 0!important;-webkit-box-shadow:none!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}blockquote,pre{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px solid #000}.table{border-collapse:collapse!important}.table td,.table th{background-color:#fff!important}.table-bordered td,.table-bordered th{border:1px solid #ddd!important}}@font-face{font-family:'Glyphicons Halflings';src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix) format('embedded-opentype'),url(../fonts/glyphicons-halflings-regular.woff2) format('woff2'),url(../fonts/glyphicons-halflings-regular.woff) format('woff'),url(../fonts/glyphicons-halflings-regular.ttf) format('truetype'),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular) format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons Halflings';font-style:normal;font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-eur:before,.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.glyphicon-cd:before{content:"\e201"}.glyphicon-save-file:before{content:"\e202"}.glyphicon-open-file:before{content:"\e203"}.glyphicon-level-up:before{content:"\e204"}.glyphicon-copy:before{content:"\e205"}.glyphicon-paste:before{content:"\e206"}.glyphicon-alert:before{content:"\e209"}.glyphicon-equalizer:before{content:"\e210"}.glyphicon-king:before{content:"\e211"}.glyphicon-queen:before{content:"\e212"}.glyphicon-pawn:before{content:"\e213"}.glyphicon-bishop:before{content:"\e214"}.glyphicon-knight:before{content:"\e215"}.glyphicon-baby-formula:before{content:"\e216"}.glyphicon-tent:before{content:"\26fa"}.glyphicon-blackboard:before{content:"\e218"}.glyphicon-bed:before{content:"\e219"}.glyphicon-apple:before{content:"\f8ff"}.glyphicon-erase:before{content:"\e221"}.glyphicon-hourglass:before{content:"\231b"}.glyphicon-lamp:before{content:"\e223"}.glyphicon-duplicate:before{content:"\e224"}.glyphicon-piggy-bank:before{content:"\e225"}.glyphicon-scissors:before{content:"\e226"}.glyphicon-bitcoin:before{content:"\e227"}.glyphicon-btc:before{content:"\e227"}.glyphicon-xbt:before{content:"\e227"}.glyphicon-yen:before{content:"\00a5"}.glyphicon-jpy:before{content:"\00a5"}.glyphicon-ruble:before{content:"\20bd"}.glyphicon-rub:before{content:"\20bd"}.glyphicon-scale:before{content:"\e230"}.glyphicon-ice-lolly:before{content:"\e231"}.glyphicon-ice-lolly-tasted:before{content:"\e232"}.glyphicon-education:before{content:"\e233"}.glyphicon-option-horizontal:before{content:"\e234"}.glyphicon-option-vertical:before{content:"\e235"}.glyphicon-menu-hamburger:before{content:"\e236"}.glyphicon-modal-window:before{content:"\e237"}.glyphicon-oil:before{content:"\e238"}.glyphicon-grain:before{content:"\e239"}.glyphicon-sunglasses:before{content:"\e240"}.glyphicon-text-size:before{content:"\e241"}.glyphicon-text-color:before{content:"\e242"}.glyphicon-text-background:before{content:"\e243"}.glyphicon-object-align-top:before{content:"\e244"}.glyphicon-object-align-bottom:before{content:"\e245"}.glyphicon-object-align-horizontal:before{content:"\e246"}.glyphicon-object-align-left:before{content:"\e247"}.glyphicon-object-align-vertical:before{content:"\e248"}.glyphicon-object-align-right:before{content:"\e249"}.glyphicon-triangle-right:before{content:"\e250"}.glyphicon-triangle-left:before{content:"\e251"}.glyphicon-triangle-bottom:before{content:"\e252"}.glyphicon-triangle-top:before{content:"\e253"}.glyphicon-console:before{content:"\e254"}.glyphicon-superscript:before{content:"\e255"}.glyphicon-subscript:before{content:"\e256"}.glyphicon-menu-left:before{content:"\e257"}.glyphicon-menu-right:before{content:"\e258"}.glyphicon-menu-down:before{content:"\e259"}.glyphicon-menu-up:before{content:"\e260"}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}:after,:before{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:10px;-webkit-tap-highlight-color:rgba(0,0,0,0)}body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857143;color:#333;background-color:#fff}button,input,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#337ab7;text-decoration:none}a:focus,a:hover{color:#23527c;text-decoration:underline}a:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}figure{margin:0}img{vertical-align:middle}.carousel-inner>.item>a>img,.carousel-inner>.item>img,.img-responsive,.thumbnail a>img,.thumbnail>img{display:block;max-width:100%;height:auto}.img-rounded{border-radius:6px}.img-thumbnail{display:inline-block;max-width:100%;height:auto;padding:4px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px solid #eee}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}[role=button]{cursor:pointer}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-weight:400;line-height:1;color:#777}.h1,.h2,.h3,h1,h2,h3{margin-top:20px;margin-bottom:10px}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small{font-size:65%}.h4,.h5,.h6,h4,h5,h6{margin-top:10px;margin-bottom:10px}.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-size:75%}.h1,h1{font-size:36px}.h2,h2{font-size:30px}.h3,h3{font-size:24px}.h4,h4{font-size:18px}.h5,h5{font-size:14px}.h6,h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:300;line-height:1.4}@media (min-width:768px){.lead{font-size:21px}}.small,small{font-size:85%}.mark,mark{padding:.2em;background-color:#fcf8e3}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-nowrap{white-space:nowrap}.text-lowercase{text-transform:lowercase}.text-uppercase{text-transform:uppercase}.text-capitalize{text-transform:capitalize}.text-muted{color:#777}.text-primary{color:#337ab7}a.text-primary:focus,a.text-primary:hover{color:#286090}.text-success{color:#3c763d}a.text-success:focus,a.text-success:hover{color:#2b542c}.text-info{color:#31708f}a.text-info:focus,a.text-info:hover{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:focus,a.text-warning:hover{color:#66512c}.text-danger{color:#a94442}a.text-danger:focus,a.text-danger:hover{color:#843534}.bg-primary{color:#fff;background-color:#337ab7}a.bg-primary:focus,a.bg-primary:hover{background-color:#286090}.bg-success{background-color:#dff0d8}a.bg-success:focus,a.bg-success:hover{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:focus,a.bg-info:hover{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:focus,a.bg-warning:hover{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:focus,a.bg-danger:hover{background-color:#e4b9b9}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #eee}ol,ul{margin-top:0;margin-bottom:10px}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;margin-left:-5px;list-style:none}.list-inline>li{display:inline-block;padding-right:5px;padding-left:5px}dl{margin-top:0;margin-bottom:20px}dd,dt{line-height:1.42857143}dt{font-weight:700}dd{margin-left:0}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}}abbr[data-original-title],abbr[title]{cursor:help;border-bottom:1px dotted #777}.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #eee}blockquote ol:last-child,blockquote p:last-child,blockquote ul:last-child{margin-bottom:0}blockquote .small,blockquote footer,blockquote small{display:block;font-size:80%;line-height:1.42857143;color:#777}blockquote .small:before,blockquote footer:before,blockquote small:before{content:'\2014 \00A0'}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;text-align:right;border-right:5px solid #eee;border-left:0}.blockquote-reverse .small:before,.blockquote-reverse footer:before,.blockquote-reverse small:before,blockquote.pull-right .small:before,blockquote.pull-right footer:before,blockquote.pull-right small:before{content:''}.blockquote-reverse .small:after,.blockquote-reverse footer:after,.blockquote-reverse small:after,blockquote.pull-right .small:after,blockquote.pull-right footer:after,blockquote.pull-right small:after{content:'\00A0 \2014'}address{margin-bottom:20px;font-style:normal;line-height:1.42857143}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,"Courier New",monospace}code{padding:2px 4px;font-size:90%;color:#c7254e;background-color:#f9f2f4;border-radius:4px}kbd{padding:2px 4px;font-size:90%;color:#fff;background-color:#333;border-radius:3px;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.25);box-shadow:inset 0 -1px 0 rgba(0,0,0,.25)}kbd kbd{padding:0;font-size:100%;font-weight:700;-webkit-box-shadow:none;box-shadow:none}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:1.42857143;color:#333;word-break:break-all;word-wrap:break-word;background-color:#f5f5f5;border:1px solid #ccc;border-radius:4px}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;background-color:transparent;border-radius:0}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}@media (min-width:768px){.container{width:750px}}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.container-fluid{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}.row{margin-right:-15px;margin-left:-15px}.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{position:relative;min-height:1px;padding-right:15px;padding-left:15px}.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{float:left}.col-xs-12{width:100%}.col-xs-11{width:91.66666667%}.col-xs-10{width:83.33333333%}.col-xs-9{width:75%}.col-xs-8{width:66.66666667%}.col-xs-7{width:58.33333333%}.col-xs-6{width:50%}.col-xs-5{width:41.66666667%}.col-xs-4{width:33.33333333%}.col-xs-3{width:25%}.col-xs-2{width:16.66666667%}.col-xs-1{width:8.33333333%}.col-xs-pull-12{right:100%}.col-xs-pull-11{right:91.66666667%}.col-xs-pull-10{right:83.33333333%}.col-xs-pull-9{right:75%}.col-xs-pull-8{right:66.66666667%}.col-xs-pull-7{right:58.33333333%}.col-xs-pull-6{right:50%}.col-xs-pull-5{right:41.66666667%}.col-xs-pull-4{right:33.33333333%}.col-xs-pull-3{right:25%}.col-xs-pull-2{right:16.66666667%}.col-xs-pull-1{right:8.33333333%}.col-xs-pull-0{right:auto}.col-xs-push-12{left:100%}.col-xs-push-11{left:91.66666667%}.col-xs-push-10{left:83.33333333%}.col-xs-push-9{left:75%}.col-xs-push-8{left:66.66666667%}.col-xs-push-7{left:58.33333333%}.col-xs-push-6{left:50%}.col-xs-push-5{left:41.66666667%}.col-xs-push-4{left:33.33333333%}.col-xs-push-3{left:25%}.col-xs-push-2{left:16.66666667%}.col-xs-push-1{left:8.33333333%}.col-xs-push-0{left:auto}.col-xs-offset-12{margin-left:100%}.col-xs-offset-11{margin-left:91.66666667%}.col-xs-offset-10{margin-left:83.33333333%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-8{margin-left:66.66666667%}.col-xs-offset-7{margin-left:58.33333333%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-5{margin-left:41.66666667%}.col-xs-offset-4{margin-left:33.33333333%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-2{margin-left:16.66666667%}.col-xs-offset-1{margin-left:8.33333333%}.col-xs-offset-0{margin-left:0}@media (min-width:768px){.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9{float:left}.col-sm-12{width:100%}.col-sm-11{width:91.66666667%}.col-sm-10{width:83.33333333%}.col-sm-9{width:75%}.col-sm-8{width:66.66666667%}.col-sm-7{width:58.33333333%}.col-sm-6{width:50%}.col-sm-5{width:41.66666667%}.col-sm-4{width:33.33333333%}.col-sm-3{width:25%}.col-sm-2{width:16.66666667%}.col-sm-1{width:8.33333333%}.col-sm-pull-12{right:100%}.col-sm-pull-11{right:91.66666667%}.col-sm-pull-10{right:83.33333333%}.col-sm-pull-9{right:75%}.col-sm-pull-8{right:66.66666667%}.col-sm-pull-7{right:58.33333333%}.col-sm-pull-6{right:50%}.col-sm-pull-5{right:41.66666667%}.col-sm-pull-4{right:33.33333333%}.col-sm-pull-3{right:25%}.col-sm-pull-2{right:16.66666667%}.col-sm-pull-1{right:8.33333333%}.col-sm-pull-0{right:auto}.col-sm-push-12{left:100%}.col-sm-push-11{left:91.66666667%}.col-sm-push-10{left:83.33333333%}.col-sm-push-9{left:75%}.col-sm-push-8{left:66.66666667%}.col-sm-push-7{left:58.33333333%}.col-sm-push-6{left:50%}.col-sm-push-5{left:41.66666667%}.col-sm-push-4{left:33.33333333%}.col-sm-push-3{left:25%}.col-sm-push-2{left:16.66666667%}.col-sm-push-1{left:8.33333333%}.col-sm-push-0{left:auto}.col-sm-offset-12{margin-left:100%}.col-sm-offset-11{margin-left:91.66666667%}.col-sm-offset-10{margin-left:83.33333333%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-8{margin-left:66.66666667%}.col-sm-offset-7{margin-left:58.33333333%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-5{margin-left:41.66666667%}.col-sm-offset-4{margin-left:33.33333333%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-2{margin-left:16.66666667%}.col-sm-offset-1{margin-left:8.33333333%}.col-sm-offset-0{margin-left:0}}@media (min-width:992px){.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9{float:left}.col-md-12{width:100%}.col-md-11{width:91.66666667%}.col-md-10{width:83.33333333%}.col-md-9{width:75%}.col-md-8{width:66.66666667%}.col-md-7{width:58.33333333%}.col-md-6{width:50%}.col-md-5{width:41.66666667%}.col-md-4{width:33.33333333%}.col-md-3{width:25%}.col-md-2{width:16.66666667%}.col-md-1{width:8.33333333%}.col-md-pull-12{right:100%}.col-md-pull-11{right:91.66666667%}.col-md-pull-10{right:83.33333333%}.col-md-pull-9{right:75%}.col-md-pull-8{right:66.66666667%}.col-md-pull-7{right:58.33333333%}.col-md-pull-6{right:50%}.col-md-pull-5{right:41.66666667%}.col-md-pull-4{right:33.33333333%}.col-md-pull-3{right:25%}.col-md-pull-2{right:16.66666667%}.col-md-pull-1{right:8.33333333%}.col-md-pull-0{right:auto}.col-md-push-12{left:100%}.col-md-push-11{left:91.66666667%}.col-md-push-10{left:83.33333333%}.col-md-push-9{left:75%}.col-md-push-8{left:66.66666667%}.col-md-push-7{left:58.33333333%}.col-md-push-6{left:50%}.col-md-push-5{left:41.66666667%}.col-md-push-4{left:33.33333333%}.col-md-push-3{left:25%}.col-md-push-2{left:16.66666667%}.col-md-push-1{left:8.33333333%}.col-md-push-0{left:auto}.col-md-offset-12{margin-left:100%}.col-md-offset-11{margin-left:91.66666667%}.col-md-offset-10{margin-left:83.33333333%}.col-md-offset-9{margin-left:75%}.col-md-offset-8{margin-left:66.66666667%}.col-md-offset-7{margin-left:58.33333333%}.col-md-offset-6{margin-left:50%}.col-md-offset-5{margin-left:41.66666667%}.col-md-offset-4{margin-left:33.33333333%}.col-md-offset-3{margin-left:25%}.col-md-offset-2{margin-left:16.66666667%}.col-md-offset-1{margin-left:8.33333333%}.col-md-offset-0{margin-left:0}}@media (min-width:1200px){.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9{float:left}.col-lg-12{width:100%}.col-lg-11{width:91.66666667%}.col-lg-10{width:83.33333333%}.col-lg-9{width:75%}.col-lg-8{width:66.66666667%}.col-lg-7{width:58.33333333%}.col-lg-6{width:50%}.col-lg-5{width:41.66666667%}.col-lg-4{width:33.33333333%}.col-lg-3{width:25%}.col-lg-2{width:16.66666667%}.col-lg-1{width:8.33333333%}.col-lg-pull-12{right:100%}.col-lg-pull-11{right:91.66666667%}.col-lg-pull-10{right:83.33333333%}.col-lg-pull-9{right:75%}.col-lg-pull-8{right:66.66666667%}.col-lg-pull-7{right:58.33333333%}.col-lg-pull-6{right:50%}.col-lg-pull-5{right:41.66666667%}.col-lg-pull-4{right:33.33333333%}.col-lg-pull-3{right:25%}.col-lg-pull-2{right:16.66666667%}.col-lg-pull-1{right:8.33333333%}.col-lg-pull-0{right:auto}.col-lg-push-12{left:100%}.col-lg-push-11{left:91.66666667%}.col-lg-push-10{left:83.33333333%}.col-lg-push-9{left:75%}.col-lg-push-8{left:66.66666667%}.col-lg-push-7{left:58.33333333%}.col-lg-push-6{left:50%}.col-lg-push-5{left:41.66666667%}.col-lg-push-4{left:33.33333333%}.col-lg-push-3{left:25%}.col-lg-push-2{left:16.66666667%}.col-lg-push-1{left:8.33333333%}.col-lg-push-0{left:auto}.col-lg-offset-12{margin-left:100%}.col-lg-offset-11{margin-left:91.66666667%}.col-lg-offset-10{margin-left:83.33333333%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-8{margin-left:66.66666667%}.col-lg-offset-7{margin-left:58.33333333%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-5{margin-left:41.66666667%}.col-lg-offset-4{margin-left:33.33333333%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-2{margin-left:16.66666667%}.col-lg-offset-1{margin-left:8.33333333%}.col-lg-offset-0{margin-left:0}}table{background-color:transparent}caption{padding-top:8px;padding-bottom:8px;color:#777;text-align:left}th{text-align:left}.table{width:100%;max-width:100%;margin-bottom:20px}.table>tbody>tr>td,.table>tbody>tr>th,.table>tfoot>tr>td,.table>tfoot>tr>th,.table>thead>tr>td,.table>thead>tr>th{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>td,.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>td,.table>thead:first-child>tr:first-child>th{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>tbody>tr>td,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>td,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>thead>tr>th{padding:5px}.table-bordered{border:1px solid #ddd}.table-bordered>tbody>tr>td,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>td,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border:1px solid #ddd}.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover{background-color:#f5f5f5}table col[class*=col-]{position:static;display:table-column;float:none}table td[class*=col-],table th[class*=col-]{position:static;display:table-cell;float:none}.table>tbody>tr.active>td,.table>tbody>tr.active>th,.table>tbody>tr>td.active,.table>tbody>tr>th.active,.table>tfoot>tr.active>td,.table>tfoot>tr.active>th,.table>tfoot>tr>td.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>thead>tr.active>th,.table>thead>tr>td.active,.table>thead>tr>th.active{background-color:#f5f5f5}.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr.active:hover>th,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover{background-color:#e8e8e8}.table>tbody>tr.success>td,.table>tbody>tr.success>th,.table>tbody>tr>td.success,.table>tbody>tr>th.success,.table>tfoot>tr.success>td,.table>tfoot>tr.success>th,.table>tfoot>tr>td.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>thead>tr.success>th,.table>thead>tr>td.success,.table>thead>tr>th.success{background-color:#dff0d8}.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr.success:hover>th,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover{background-color:#d0e9c6}.table>tbody>tr.info>td,.table>tbody>tr.info>th,.table>tbody>tr>td.info,.table>tbody>tr>th.info,.table>tfoot>tr.info>td,.table>tfoot>tr.info>th,.table>tfoot>tr>td.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>thead>tr.info>th,.table>thead>tr>td.info,.table>thead>tr>th.info{background-color:#d9edf7}.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr.info:hover>th,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover{background-color:#c4e3f3}.table>tbody>tr.warning>td,.table>tbody>tr.warning>th,.table>tbody>tr>td.warning,.table>tbody>tr>th.warning,.table>tfoot>tr.warning>td,.table>tfoot>tr.warning>th,.table>tfoot>tr>td.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>thead>tr.warning>th,.table>thead>tr>td.warning,.table>thead>tr>th.warning{background-color:#fcf8e3}.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr.warning:hover>th,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover{background-color:#faf2cc}.table>tbody>tr.danger>td,.table>tbody>tr.danger>th,.table>tbody>tr>td.danger,.table>tbody>tr>th.danger,.table>tfoot>tr.danger>td,.table>tfoot>tr.danger>th,.table>tfoot>tr>td.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>thead>tr.danger>th,.table>thead>tr>td.danger,.table>thead>tr>th.danger{background-color:#f2dede}.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr.danger:hover>th,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover{background-color:#ebcccc}.table-responsive{min-height:.01%;overflow-x:auto}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>td,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>thead>tr>th{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}}fieldset{min-width:0;padding:0;margin:0;border:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:inherit;color:#333;border:0;border-bottom:1px solid #e5e5e5}label{display:inline-block;max-width:100%;margin-bottom:5px;font-weight:700}input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=checkbox],input[type=radio]{margin:4px 0 0;margin-top:1px\9;line-height:normal}input[type=file]{display:block}input[type=range]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type=file]:focus,input[type=checkbox]:focus,input[type=radio]:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}output{display:block;padding-top:7px;font-size:14px;line-height:1.42857143;color:#555}.form-control{display:block;width:100%;height:34px;padding:6px 12px;font-size:14px;line-height:1.42857143;color:#555;background-color:#fff;background-image:none;border:1px solid #ccc;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075);-webkit-transition:border-color ease-in-out .15s,-webkit-box-shadow ease-in-out .15s;-o-transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6);box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.form-control::-ms-expand{background-color:transparent;border:0}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{background-color:#eee;opacity:1}.form-control[disabled],fieldset[disabled] .form-control{cursor:not-allowed}textarea.form-control{height:auto}input[type=search]{-webkit-appearance:none}@media screen and (-webkit-min-device-pixel-ratio:0){input[type=date].form-control,input[type=time].form-control,input[type=datetime-local].form-control,input[type=month].form-control{line-height:34px}.input-group-sm input[type=date],.input-group-sm input[type=time],.input-group-sm input[type=datetime-local],.input-group-sm input[type=month],input[type=date].input-sm,input[type=time].input-sm,input[type=datetime-local].input-sm,input[type=month].input-sm{line-height:30px}.input-group-lg input[type=date],.input-group-lg input[type=time],.input-group-lg input[type=datetime-local],.input-group-lg input[type=month],input[type=date].input-lg,input[type=time].input-lg,input[type=datetime-local].input-lg,input[type=month].input-lg{line-height:46px}}.form-group{margin-bottom:15px}.checkbox,.radio{position:relative;display:block;margin-top:10px;margin-bottom:10px}.checkbox label,.radio label{min-height:20px;padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.checkbox input[type=checkbox],.checkbox-inline input[type=checkbox],.radio input[type=radio],.radio-inline input[type=radio]{position:absolute;margin-top:4px\9;margin-left:-20px}.checkbox+.checkbox,.radio+.radio{margin-top:-5px}.checkbox-inline,.radio-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;font-weight:400;vertical-align:middle;cursor:pointer}.checkbox-inline+.checkbox-inline,.radio-inline+.radio-inline{margin-top:0;margin-left:10px}fieldset[disabled] input[type=checkbox],fieldset[disabled] input[type=radio],input[type=checkbox].disabled,input[type=checkbox][disabled],input[type=radio].disabled,input[type=radio][disabled]{cursor:not-allowed}.checkbox-inline.disabled,.radio-inline.disabled,fieldset[disabled] .checkbox-inline,fieldset[disabled] .radio-inline{cursor:not-allowed}.checkbox.disabled label,.radio.disabled label,fieldset[disabled] .checkbox label,fieldset[disabled] .radio label{cursor:not-allowed}.form-control-static{min-height:34px;padding-top:7px;padding-bottom:7px;margin-bottom:0}.form-control-static.input-lg,.form-control-static.input-sm{padding-right:0;padding-left:0}.input-sm{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-sm{height:30px;line-height:30px}select[multiple].input-sm,textarea.input-sm{height:auto}.form-group-sm .form-control{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.form-group-sm select.form-control{height:30px;line-height:30px}.form-group-sm select[multiple].form-control,.form-group-sm textarea.form-control{height:auto}.form-group-sm .form-control-static{height:30px;min-height:32px;padding:6px 10px;font-size:12px;line-height:1.5}.input-lg{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-lg{height:46px;line-height:46px}select[multiple].input-lg,textarea.input-lg{height:auto}.form-group-lg .form-control{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.form-group-lg select.form-control{height:46px;line-height:46px}.form-group-lg select[multiple].form-control,.form-group-lg textarea.form-control{height:auto}.form-group-lg .form-control-static{height:46px;min-height:38px;padding:11px 16px;font-size:18px;line-height:1.3333333}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.form-control-feedback{position:absolute;top:0;right:0;z-index:2;display:block;width:34px;height:34px;line-height:34px;text-align:center;pointer-events:none}.form-group-lg .form-control+.form-control-feedback,.input-group-lg+.form-control-feedback,.input-lg+.form-control-feedback{width:46px;height:46px;line-height:46px}.form-group-sm .form-control+.form-control-feedback,.input-group-sm+.form-control-feedback,.input-sm+.form-control-feedback{width:30px;height:30px;line-height:30px}.has-success .checkbox,.has-success .checkbox-inline,.has-success .control-label,.has-success .help-block,.has-success .radio,.has-success .radio-inline,.has-success.checkbox label,.has-success.checkbox-inline label,.has-success.radio label,.has-success.radio-inline label{color:#3c763d}.has-success .form-control{border-color:#3c763d;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-success .form-control:focus{border-color:#2b542c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;background-color:#dff0d8;border-color:#3c763d}.has-success .form-control-feedback{color:#3c763d}.has-warning .checkbox,.has-warning .checkbox-inline,.has-warning .control-label,.has-warning .help-block,.has-warning .radio,.has-warning .radio-inline,.has-warning.checkbox label,.has-warning.checkbox-inline label,.has-warning.radio label,.has-warning.radio-inline label{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-warning .form-control:focus{border-color:#66512c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;background-color:#fcf8e3;border-color:#8a6d3b}.has-warning .form-control-feedback{color:#8a6d3b}.has-error .checkbox,.has-error .checkbox-inline,.has-error .control-label,.has-error .help-block,.has-error .radio,.has-error .radio-inline,.has-error.checkbox label,.has-error.checkbox-inline label,.has-error.radio label,.has-error.radio-inline label{color:#a94442}.has-error .form-control{border-color:#a94442;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-error .form-control:focus{border-color:#843534;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;background-color:#f2dede;border-color:#a94442}.has-error .form-control-feedback{color:#a94442}.has-feedback label~.form-control-feedback{top:25px}.has-feedback label.sr-only~.form-control-feedback{top:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#737373}@media (min-width:768px){.form-inline .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-static{display:inline-block}.form-inline .input-group{display:inline-table;vertical-align:middle}.form-inline .input-group .form-control,.form-inline .input-group .input-group-addon,.form-inline .input-group .input-group-btn{width:auto}.form-inline .input-group>.form-control{width:100%}.form-inline .control-label{margin-bottom:0;vertical-align:middle}.form-inline .checkbox,.form-inline .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.form-inline .checkbox label,.form-inline .radio label{padding-left:0}.form-inline .checkbox input[type=checkbox],.form-inline .radio input[type=radio]{position:relative;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}}.form-horizontal .checkbox,.form-horizontal .checkbox-inline,.form-horizontal .radio,.form-horizontal .radio-inline{padding-top:7px;margin-top:0;margin-bottom:0}.form-horizontal .checkbox,.form-horizontal .radio{min-height:27px}.form-horizontal .form-group{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.form-horizontal .control-label{padding-top:7px;margin-bottom:0;text-align:right}}.form-horizontal .has-feedback .form-control-feedback{right:15px}@media (min-width:768px){.form-horizontal .form-group-lg .control-label{padding-top:11px;font-size:18px}}@media (min-width:768px){.form-horizontal .form-group-sm .control-label{padding-top:6px;font-size:12px}}.btn{display:inline-block;padding:6px 12px;margin-bottom:0;font-size:14px;font-weight:400;line-height:1.42857143;text-align:center;white-space:nowrap;vertical-align:middle;-ms-touch-action:manipulation;touch-action:manipulation;cursor:pointer;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;background-image:none;border:1px solid transparent;border-radius:4px}.btn.active.focus,.btn.active:focus,.btn.focus,.btn:active.focus,.btn:active:focus,.btn:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn.focus,.btn:focus,.btn:hover{color:#333;text-decoration:none}.btn.active,.btn:active{background-image:none;outline:0;-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;filter:alpha(opacity=65);-webkit-box-shadow:none;box-shadow:none;opacity:.65}a.btn.disabled,fieldset[disabled] a.btn{pointer-events:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default.focus,.btn-default:focus{color:#333;background-color:#e6e6e6;border-color:#8c8c8c}.btn-default:hover{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active,.btn-default:active,.open>.dropdown-toggle.btn-default{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active.focus,.btn-default.active:focus,.btn-default.active:hover,.btn-default:active.focus,.btn-default:active:focus,.btn-default:active:hover,.open>.dropdown-toggle.btn-default.focus,.open>.dropdown-toggle.btn-default:focus,.open>.dropdown-toggle.btn-default:hover{color:#333;background-color:#d4d4d4;border-color:#8c8c8c}.btn-default.active,.btn-default:active,.open>.dropdown-toggle.btn-default{background-image:none}.btn-default.disabled.focus,.btn-default.disabled:focus,.btn-default.disabled:hover,.btn-default[disabled].focus,.btn-default[disabled]:focus,.btn-default[disabled]:hover,fieldset[disabled] .btn-default.focus,fieldset[disabled] .btn-default:focus,fieldset[disabled] .btn-default:hover{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#337ab7;border-color:#2e6da4}.btn-primary.focus,.btn-primary:focus{color:#fff;background-color:#286090;border-color:#122b40}.btn-primary:hover{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active,.btn-primary:active,.open>.dropdown-toggle.btn-primary{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active.focus,.btn-primary.active:focus,.btn-primary.active:hover,.btn-primary:active.focus,.btn-primary:active:focus,.btn-primary:active:hover,.open>.dropdown-toggle.btn-primary.focus,.open>.dropdown-toggle.btn-primary:focus,.open>.dropdown-toggle.btn-primary:hover{color:#fff;background-color:#204d74;border-color:#122b40}.btn-primary.active,.btn-primary:active,.open>.dropdown-toggle.btn-primary{background-image:none}.btn-primary.disabled.focus,.btn-primary.disabled:focus,.btn-primary.disabled:hover,.btn-primary[disabled].focus,.btn-primary[disabled]:focus,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary.focus,fieldset[disabled] .btn-primary:focus,fieldset[disabled] .btn-primary:hover{background-color:#337ab7;border-color:#2e6da4}.btn-primary .badge{color:#337ab7;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success.focus,.btn-success:focus{color:#fff;background-color:#449d44;border-color:#255625}.btn-success:hover{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active.focus,.btn-success.active:focus,.btn-success.active:hover,.btn-success:active.focus,.btn-success:active:focus,.btn-success:active:hover,.open>.dropdown-toggle.btn-success.focus,.open>.dropdown-toggle.btn-success:focus,.open>.dropdown-toggle.btn-success:hover{color:#fff;background-color:#398439;border-color:#255625}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{background-image:none}.btn-success.disabled.focus,.btn-success.disabled:focus,.btn-success.disabled:hover,.btn-success[disabled].focus,.btn-success[disabled]:focus,.btn-success[disabled]:hover,fieldset[disabled] .btn-success.focus,fieldset[disabled] .btn-success:focus,fieldset[disabled] .btn-success:hover{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info.focus,.btn-info:focus{color:#fff;background-color:#31b0d5;border-color:#1b6d85}.btn-info:hover{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active,.btn-info:active,.open>.dropdown-toggle.btn-info{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active.focus,.btn-info.active:focus,.btn-info.active:hover,.btn-info:active.focus,.btn-info:active:focus,.btn-info:active:hover,.open>.dropdown-toggle.btn-info.focus,.open>.dropdown-toggle.btn-info:focus,.open>.dropdown-toggle.btn-info:hover{color:#fff;background-color:#269abc;border-color:#1b6d85}.btn-info.active,.btn-info:active,.open>.dropdown-toggle.btn-info{background-image:none}.btn-info.disabled.focus,.btn-info.disabled:focus,.btn-info.disabled:hover,.btn-info[disabled].focus,.btn-info[disabled]:focus,.btn-info[disabled]:hover,fieldset[disabled] .btn-info.focus,fieldset[disabled] .btn-info:focus,fieldset[disabled] .btn-info:hover{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning.focus,.btn-warning:focus{color:#fff;background-color:#ec971f;border-color:#985f0d}.btn-warning:hover{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active,.btn-warning:active,.open>.dropdown-toggle.btn-warning{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active.focus,.btn-warning.active:focus,.btn-warning.active:hover,.btn-warning:active.focus,.btn-warning:active:focus,.btn-warning:active:hover,.open>.dropdown-toggle.btn-warning.focus,.open>.dropdown-toggle.btn-warning:focus,.open>.dropdown-toggle.btn-warning:hover{color:#fff;background-color:#d58512;border-color:#985f0d}.btn-warning.active,.btn-warning:active,.open>.dropdown-toggle.btn-warning{background-image:none}.btn-warning.disabled.focus,.btn-warning.disabled:focus,.btn-warning.disabled:hover,.btn-warning[disabled].focus,.btn-warning[disabled]:focus,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning.focus,fieldset[disabled] .btn-warning:focus,fieldset[disabled] .btn-warning:hover{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger.focus,.btn-danger:focus{color:#fff;background-color:#c9302c;border-color:#761c19}.btn-danger:hover{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active,.btn-danger:active,.open>.dropdown-toggle.btn-danger{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active.focus,.btn-danger.active:focus,.btn-danger.active:hover,.btn-danger:active.focus,.btn-danger:active:focus,.btn-danger:active:hover,.open>.dropdown-toggle.btn-danger.focus,.open>.dropdown-toggle.btn-danger:focus,.open>.dropdown-toggle.btn-danger:hover{color:#fff;background-color:#ac2925;border-color:#761c19}.btn-danger.active,.btn-danger:active,.open>.dropdown-toggle.btn-danger{background-image:none}.btn-danger.disabled.focus,.btn-danger.disabled:focus,.btn-danger.disabled:hover,.btn-danger[disabled].focus,.btn-danger[disabled]:focus,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger.focus,fieldset[disabled] .btn-danger:focus,fieldset[disabled] .btn-danger:hover{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{font-weight:400;color:#337ab7;border-radius:0}.btn-link,.btn-link.active,.btn-link:active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link,.btn-link:active,.btn-link:focus,.btn-link:hover{border-color:transparent}.btn-link:focus,.btn-link:hover{color:#23527c;text-decoration:underline;background-color:transparent}.btn-link[disabled]:focus,.btn-link[disabled]:hover,fieldset[disabled] .btn-link:focus,fieldset[disabled] .btn-link:hover{color:#777;text-decoration:none}.btn-group-lg>.btn,.btn-lg{padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.btn-group-sm>.btn,.btn-sm{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-group-xs>.btn,.btn-xs{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:5px}input[type=button].btn-block,input[type=reset].btn-block,input[type=submit].btn-block{width:100%}.fade{opacity:0;-webkit-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}tr.collapse.in{display:table-row}tbody.collapse.in{display:table-row-group}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition-timing-function:ease;-o-transition-timing-function:ease;transition-timing-function:ease;-webkit-transition-duration:.35s;-o-transition-duration:.35s;transition-duration:.35s;-webkit-transition-property:height,visibility;-o-transition-property:height,visibility;transition-property:height,visibility}.caret{display:inline-block;width:0;height:0;margin-left:2px;vertical-align:middle;border-top:4px dashed;border-top:4px solid\9;border-right:4px solid transparent;border-left:4px solid transparent}.dropdown,.dropup{position:relative}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;font-size:14px;text-align:left;list-style:none;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,.175);box-shadow:0 6px 12px rgba(0,0,0,.175)}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:400;line-height:1.42857143;color:#333;white-space:nowrap}.dropdown-menu>li>a:focus,.dropdown-menu>li>a:hover{color:#262626;text-decoration:none;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:focus,.dropdown-menu>.active>a:hover{color:#fff;text-decoration:none;background-color:#337ab7;outline:0}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{color:#777}.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{text-decoration:none;cursor:not-allowed;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-right{right:0;left:auto}.dropdown-menu-left{right:auto;left:0}.dropdown-header{display:block;padding:3px 20px;font-size:12px;line-height:1.42857143;color:#777;white-space:nowrap}.dropdown-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:990}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{content:"";border-top:0;border-bottom:4px dashed;border-bottom:4px solid\9}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:2px}@media (min-width:768px){.navbar-right .dropdown-menu{right:0;left:auto}.navbar-right .dropdown-menu-left{right:auto;left:0}}.btn-group,.btn-group-vertical{position:relative;display:inline-block;vertical-align:middle}.btn-group-vertical>.btn,.btn-group>.btn{position:relative;float:left}.btn-group-vertical>.btn.active,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:hover,.btn-group>.btn.active,.btn-group>.btn:active,.btn-group>.btn:focus,.btn-group>.btn:hover{z-index:2}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar .btn,.btn-toolbar .btn-group,.btn-toolbar .input-group{float:left}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.btn-group>.btn-group{float:left}.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-bottom-left-radius:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-right:8px;padding-left:8px}.btn-group>.btn-lg+.dropdown-toggle{padding-right:12px;padding-left:12px}.btn-group.open .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-group.open .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn .caret{margin-left:0}.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child){border-radius:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-left-radius:0;border-top-right-radius:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-top-right-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{display:table-cell;float:none;width:1%}.btn-group-justified>.btn-group .btn{width:100%}.btn-group-justified>.btn-group .dropdown-menu{left:auto}[data-toggle=buttons]>.btn input[type=checkbox],[data-toggle=buttons]>.btn input[type=radio],[data-toggle=buttons]>.btn-group>.btn input[type=checkbox],[data-toggle=buttons]>.btn-group>.btn input[type=radio]{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*=col-]{float:none;padding-right:0;padding-left:0}.input-group .form-control{position:relative;z-index:2;float:left;width:100%;margin-bottom:0}.input-group .form-control:focus{z-index:3}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-group-lg>.form-control,select.input-group-lg>.input-group-addon,select.input-group-lg>.input-group-btn>.btn{height:46px;line-height:46px}select[multiple].input-group-lg>.form-control,select[multiple].input-group-lg>.input-group-addon,select[multiple].input-group-lg>.input-group-btn>.btn,textarea.input-group-lg>.form-control,textarea.input-group-lg>.input-group-addon,textarea.input-group-lg>.input-group-btn>.btn{height:auto}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-group-sm>.form-control,select.input-group-sm>.input-group-addon,select.input-group-sm>.input-group-btn>.btn{height:30px;line-height:30px}select[multiple].input-group-sm>.form-control,select[multiple].input-group-sm>.input-group-addon,select[multiple].input-group-sm>.input-group-btn>.btn,textarea.input-group-sm>.form-control,textarea.input-group-sm>.input-group-addon,textarea.input-group-sm>.input-group-btn>.btn{height:auto}.input-group .form-control,.input-group-addon,.input-group-btn{display:table-cell}.input-group .form-control:not(:first-child):not(:last-child),.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:400;line-height:1;color:#555;text-align:center;background-color:#eee;border:1px solid #ccc;border-radius:4px}.input-group-addon.input-sm{padding:5px 10px;font-size:12px;border-radius:3px}.input-group-addon.input-lg{padding:10px 16px;font-size:18px;border-radius:6px}.input-group-addon input[type=checkbox],.input-group-addon input[type=radio]{margin-top:0}.input-group .form-control:first-child,.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn-group:not(:last-child)>.btn,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group .form-control:last-child,.input-group-addon:last-child,.input-group-btn:first-child>.btn-group:not(:first-child)>.btn,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle{border-top-left-radius:0;border-bottom-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{position:relative;font-size:0;white-space:nowrap}.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:active,.input-group-btn>.btn:focus,.input-group-btn>.btn:hover{z-index:2}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{z-index:2;margin-left:-1px}.nav{padding-left:0;margin-bottom:0;list-style:none}.nav>li{position:relative;display:block}.nav>li>a{position:relative;display:block;padding:10px 15px}.nav>li>a:focus,.nav>li>a:hover{text-decoration:none;background-color:#eee}.nav>li.disabled>a{color:#777}.nav>li.disabled>a:focus,.nav>li.disabled>a:hover{color:#777;text-decoration:none;cursor:not-allowed;background-color:transparent}.nav .open>a,.nav .open>a:focus,.nav .open>a:hover{background-color:#eee;border-color:#337ab7}.nav .nav-divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.nav>li>a>img{max-width:none}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{float:left;margin-bottom:-1px}.nav-tabs>li>a{margin-right:2px;line-height:1.42857143;border:1px solid transparent;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover{border-color:#eee #eee #ddd}.nav-tabs>li.active>a,.nav-tabs>li.active>a:focus,.nav-tabs>li.active>a:hover{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-tabs.nav-justified{width:100%;border-bottom:0}.nav-tabs.nav-justified>li{float:none}.nav-tabs.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-tabs.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-tabs.nav-justified>li{display:table-cell;width:1%}.nav-tabs.nav-justified>li>a{margin-bottom:0}}.nav-tabs.nav-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs.nav-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border-bottom-color:#fff}}.nav-pills>li{float:left}.nav-pills>li>a{border-radius:4px}.nav-pills>li+li{margin-left:2px}.nav-pills>li.active>a,.nav-pills>li.active>a:focus,.nav-pills>li.active>a:hover{color:#fff;background-color:#337ab7}.nav-stacked>li{float:none}.nav-stacked>li+li{margin-top:2px;margin-left:0}.nav-justified{width:100%}.nav-justified>li{float:none}.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-justified>li{display:table-cell;width:1%}.nav-justified>li>a{margin-bottom:0}}.nav-tabs-justified{border-bottom:0}.nav-tabs-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border-bottom-color:#fff}}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.navbar{position:relative;min-height:50px;margin-bottom:20px;border:1px solid transparent}@media (min-width:768px){.navbar{border-radius:4px}}@media (min-width:768px){.navbar-header{float:left}}.navbar-collapse{padding-right:15px;padding-left:15px;overflow-x:visible;-webkit-overflow-scrolling:touch;border-top:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1)}.navbar-collapse.in{overflow-y:auto}@media (min-width:768px){.navbar-collapse{width:auto;border-top:0;-webkit-box-shadow:none;box-shadow:none}.navbar-collapse.collapse{display:block!important;height:auto!important;padding-bottom:0;overflow:visible!important}.navbar-collapse.in{overflow-y:visible}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse,.navbar-static-top .navbar-collapse{padding-right:0;padding-left:0}}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:340px}@media (max-device-width:480px) and (orientation:landscape){.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:200px}}.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:0;margin-left:0}}.navbar-static-top{z-index:1000;border-width:0 0 1px}@media (min-width:768px){.navbar-static-top{border-radius:0}}.navbar-fixed-bottom,.navbar-fixed-top{position:fixed;right:0;left:0;z-index:1030}@media (min-width:768px){.navbar-fixed-bottom,.navbar-fixed-top{border-radius:0}}.navbar-fixed-top{top:0;border-width:0 0 1px}.navbar-fixed-bottom{bottom:0;margin-bottom:0;border-width:1px 0 0}.navbar-brand{float:left;height:50px;padding:15px 15px;font-size:18px;line-height:20px}.navbar-brand:focus,.navbar-brand:hover{text-decoration:none}.navbar-brand>img{display:block}@media (min-width:768px){.navbar>.container .navbar-brand,.navbar>.container-fluid .navbar-brand{margin-left:-15px}}.navbar-toggle{position:relative;float:right;padding:9px 10px;margin-top:8px;margin-right:15px;margin-bottom:8px;background-color:transparent;background-image:none;border:1px solid transparent;border-radius:4px}.navbar-toggle:focus{outline:0}.navbar-toggle .icon-bar{display:block;width:22px;height:2px;border-radius:1px}.navbar-toggle .icon-bar+.icon-bar{margin-top:4px}@media (min-width:768px){.navbar-toggle{display:none}}.navbar-nav{margin:7.5px -15px}.navbar-nav>li>a{padding-top:10px;padding-bottom:10px;line-height:20px}@media (max-width:767px){.navbar-nav .open .dropdown-menu{position:static;float:none;width:auto;margin-top:0;background-color:transparent;border:0;-webkit-box-shadow:none;box-shadow:none}.navbar-nav .open .dropdown-menu .dropdown-header,.navbar-nav .open .dropdown-menu>li>a{padding:5px 15px 5px 25px}.navbar-nav .open .dropdown-menu>li>a{line-height:20px}.navbar-nav .open .dropdown-menu>li>a:focus,.navbar-nav .open .dropdown-menu>li>a:hover{background-image:none}}@media (min-width:768px){.navbar-nav{float:left;margin:0}.navbar-nav>li{float:left}.navbar-nav>li>a{padding-top:15px;padding-bottom:15px}}.navbar-form{padding:10px 15px;margin-top:8px;margin-right:-15px;margin-bottom:8px;margin-left:-15px;border-top:1px solid transparent;border-bottom:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1)}@media (min-width:768px){.navbar-form .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.navbar-form .form-control{display:inline-block;width:auto;vertical-align:middle}.navbar-form .form-control-static{display:inline-block}.navbar-form .input-group{display:inline-table;vertical-align:middle}.navbar-form .input-group .form-control,.navbar-form .input-group .input-group-addon,.navbar-form .input-group .input-group-btn{width:auto}.navbar-form .input-group>.form-control{width:100%}.navbar-form .control-label{margin-bottom:0;vertical-align:middle}.navbar-form .checkbox,.navbar-form .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.navbar-form .checkbox label,.navbar-form .radio label{padding-left:0}.navbar-form .checkbox input[type=checkbox],.navbar-form .radio input[type=radio]{position:relative;margin-left:0}.navbar-form .has-feedback .form-control-feedback{top:0}}@media (max-width:767px){.navbar-form .form-group{margin-bottom:5px}.navbar-form .form-group:last-child{margin-bottom:0}}@media (min-width:768px){.navbar-form{width:auto;padding-top:0;padding-bottom:0;margin-right:0;margin-left:0;border:0;-webkit-box-shadow:none;box-shadow:none}}.navbar-nav>li>.dropdown-menu{margin-top:0;border-top-left-radius:0;border-top-right-radius:0}.navbar-fixed-bottom .navbar-nav>li>.dropdown-menu{margin-bottom:0;border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.navbar-btn{margin-top:8px;margin-bottom:8px}.navbar-btn.btn-sm{margin-top:10px;margin-bottom:10px}.navbar-btn.btn-xs{margin-top:14px;margin-bottom:14px}.navbar-text{margin-top:15px;margin-bottom:15px}@media (min-width:768px){.navbar-text{float:left;margin-right:15px;margin-left:15px}}@media (min-width:768px){.navbar-left{float:left!important}.navbar-right{float:right!important;margin-right:-15px}.navbar-right~.navbar-right{margin-right:0}}.navbar-default{background-color:#f8f8f8;border-color:#e7e7e7}.navbar-default .navbar-brand{color:#777}.navbar-default .navbar-brand:focus,.navbar-default .navbar-brand:hover{color:#5e5e5e;background-color:transparent}.navbar-default .navbar-text{color:#777}.navbar-default .navbar-nav>li>a{color:#777}.navbar-default .navbar-nav>li>a:focus,.navbar-default .navbar-nav>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.active>a:focus,.navbar-default .navbar-nav>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav>.disabled>a,.navbar-default .navbar-nav>.disabled>a:focus,.navbar-default .navbar-nav>.disabled>a:hover{color:#ccc;background-color:transparent}.navbar-default .navbar-toggle{border-color:#ddd}.navbar-default .navbar-toggle:focus,.navbar-default .navbar-toggle:hover{background-color:#ddd}.navbar-default .navbar-toggle .icon-bar{background-color:#888}.navbar-default .navbar-collapse,.navbar-default .navbar-form{border-color:#e7e7e7}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.open>a:focus,.navbar-default .navbar-nav>.open>a:hover{color:#555;background-color:#e7e7e7}@media (max-width:767px){.navbar-default .navbar-nav .open .dropdown-menu>li>a{color:#777}.navbar-default .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav .open .dropdown-menu>.active>a,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#ccc;background-color:transparent}}.navbar-default .navbar-link{color:#777}.navbar-default .navbar-link:hover{color:#333}.navbar-default .btn-link{color:#777}.navbar-default .btn-link:focus,.navbar-default .btn-link:hover{color:#333}.navbar-default .btn-link[disabled]:focus,.navbar-default .btn-link[disabled]:hover,fieldset[disabled] .navbar-default .btn-link:focus,fieldset[disabled] .navbar-default .btn-link:hover{color:#ccc}.navbar-inverse{background-color:#222;border-color:#080808}.navbar-inverse .navbar-brand{color:#9d9d9d}.navbar-inverse .navbar-brand:focus,.navbar-inverse .navbar-brand:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-text{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a:focus,.navbar-inverse .navbar-nav>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.active>a:focus,.navbar-inverse .navbar-nav>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav>.disabled>a,.navbar-inverse .navbar-nav>.disabled>a:focus,.navbar-inverse .navbar-nav>.disabled>a:hover{color:#444;background-color:transparent}.navbar-inverse .navbar-toggle{border-color:#333}.navbar-inverse .navbar-toggle:focus,.navbar-inverse .navbar-toggle:hover{background-color:#333}.navbar-inverse .navbar-toggle .icon-bar{background-color:#fff}.navbar-inverse .navbar-collapse,.navbar-inverse .navbar-form{border-color:#101010}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.open>a:focus,.navbar-inverse .navbar-nav>.open>a:hover{color:#fff;background-color:#080808}@media (max-width:767px){.navbar-inverse .navbar-nav .open .dropdown-menu>.dropdown-header{border-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu .divider{background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#444;background-color:transparent}}.navbar-inverse .navbar-link{color:#9d9d9d}.navbar-inverse .navbar-link:hover{color:#fff}.navbar-inverse .btn-link{color:#9d9d9d}.navbar-inverse .btn-link:focus,.navbar-inverse .btn-link:hover{color:#fff}.navbar-inverse .btn-link[disabled]:focus,.navbar-inverse .btn-link[disabled]:hover,fieldset[disabled] .navbar-inverse .btn-link:focus,fieldset[disabled] .navbar-inverse .btn-link:hover{color:#444}.breadcrumb{padding:8px 15px;margin-bottom:20px;list-style:none;background-color:#f5f5f5;border-radius:4px}.breadcrumb>li{display:inline-block}.breadcrumb>li+li:before{padding:0 5px;color:#ccc;content:"/\00a0"}.breadcrumb>.active{color:#777}.pagination{display:inline-block;padding-left:0;margin:20px 0;border-radius:4px}.pagination>li{display:inline}.pagination>li>a,.pagination>li>span{position:relative;float:left;padding:6px 12px;margin-left:-1px;line-height:1.42857143;color:#337ab7;text-decoration:none;background-color:#fff;border:1px solid #ddd}.pagination>li:first-child>a,.pagination>li:first-child>span{margin-left:0;border-top-left-radius:4px;border-bottom-left-radius:4px}.pagination>li:last-child>a,.pagination>li:last-child>span{border-top-right-radius:4px;border-bottom-right-radius:4px}.pagination>li>a:focus,.pagination>li>a:hover,.pagination>li>span:focus,.pagination>li>span:hover{z-index:2;color:#23527c;background-color:#eee;border-color:#ddd}.pagination>.active>a,.pagination>.active>a:focus,.pagination>.active>a:hover,.pagination>.active>span,.pagination>.active>span:focus,.pagination>.active>span:hover{z-index:3;color:#fff;cursor:default;background-color:#337ab7;border-color:#337ab7}.pagination>.disabled>a,.pagination>.disabled>a:focus,.pagination>.disabled>a:hover,.pagination>.disabled>span,.pagination>.disabled>span:focus,.pagination>.disabled>span:hover{color:#777;cursor:not-allowed;background-color:#fff;border-color:#ddd}.pagination-lg>li>a,.pagination-lg>li>span{padding:10px 16px;font-size:18px;line-height:1.3333333}.pagination-lg>li:first-child>a,.pagination-lg>li:first-child>span{border-top-left-radius:6px;border-bottom-left-radius:6px}.pagination-lg>li:last-child>a,.pagination-lg>li:last-child>span{border-top-right-radius:6px;border-bottom-right-radius:6px}.pagination-sm>li>a,.pagination-sm>li>span{padding:5px 10px;font-size:12px;line-height:1.5}.pagination-sm>li:first-child>a,.pagination-sm>li:first-child>span{border-top-left-radius:3px;border-bottom-left-radius:3px}.pagination-sm>li:last-child>a,.pagination-sm>li:last-child>span{border-top-right-radius:3px;border-bottom-right-radius:3px}.pager{padding-left:0;margin:20px 0;text-align:center;list-style:none}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;border-radius:15px}.pager li>a:focus,.pager li>a:hover{text-decoration:none;background-color:#eee}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:focus,.pager .disabled>a:hover,.pager .disabled>span{color:#777;cursor:not-allowed;background-color:#fff}.label{display:inline;padding:.2em .6em .3em;font-size:75%;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25em}a.label:focus,a.label:hover{color:#fff;text-decoration:none;cursor:pointer}.label:empty{display:none}.btn .label{position:relative;top:-1px}.label-default{background-color:#777}.label-default[href]:focus,.label-default[href]:hover{background-color:#5e5e5e}.label-primary{background-color:#337ab7}.label-primary[href]:focus,.label-primary[href]:hover{background-color:#286090}.label-success{background-color:#5cb85c}.label-success[href]:focus,.label-success[href]:hover{background-color:#449d44}.label-info{background-color:#5bc0de}.label-info[href]:focus,.label-info[href]:hover{background-color:#31b0d5}.label-warning{background-color:#f0ad4e}.label-warning[href]:focus,.label-warning[href]:hover{background-color:#ec971f}.label-danger{background-color:#d9534f}.label-danger[href]:focus,.label-danger[href]:hover{background-color:#c9302c}.badge{display:inline-block;min-width:10px;padding:3px 7px;font-size:12px;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:middle;background-color:#777;border-radius:10px}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.btn-group-xs>.btn .badge,.btn-xs .badge{top:0;padding:1px 5px}a.badge:focus,a.badge:hover{color:#fff;text-decoration:none;cursor:pointer}.list-group-item.active>.badge,.nav-pills>.active>a>.badge{color:#337ab7;background-color:#fff}.list-group-item>.badge{float:right}.list-group-item>.badge+.badge{margin-right:5px}.nav-pills>li>a>.badge{margin-left:3px}.jumbotron{padding-top:30px;padding-bottom:30px;margin-bottom:30px;color:inherit;background-color:#eee}.jumbotron .h1,.jumbotron h1{color:inherit}.jumbotron p{margin-bottom:15px;font-size:21px;font-weight:200}.jumbotron>hr{border-top-color:#d5d5d5}.container .jumbotron,.container-fluid .jumbotron{padding-right:15px;padding-left:15px;border-radius:6px}.jumbotron .container{max-width:100%}@media screen and (min-width:768px){.jumbotron{padding-top:48px;padding-bottom:48px}.container .jumbotron,.container-fluid .jumbotron{padding-right:60px;padding-left:60px}.jumbotron .h1,.jumbotron h1{font-size:63px}}.thumbnail{display:block;padding:4px;margin-bottom:20px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:border .2s ease-in-out;-o-transition:border .2s ease-in-out;transition:border .2s ease-in-out}.thumbnail a>img,.thumbnail>img{margin-right:auto;margin-left:auto}a.thumbnail.active,a.thumbnail:focus,a.thumbnail:hover{border-color:#337ab7}.thumbnail .caption{padding:9px;color:#333}.alert{padding:15px;margin-bottom:20px;border:1px solid transparent;border-radius:4px}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:700}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable,.alert-dismissible{padding-right:35px}.alert-dismissable .close,.alert-dismissible .close{position:relative;top:-2px;right:-21px;color:inherit}.alert-success{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;overflow:hidden;background-color:#f5f5f5;border-radius:4px;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,.1);box-shadow:inset 0 1px 2px rgba(0,0,0,.1)}.progress-bar{float:left;width:0;height:100%;font-size:12px;line-height:20px;color:#fff;text-align:center;background-color:#337ab7;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);-webkit-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress-bar-striped,.progress-striped .progress-bar{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);-webkit-background-size:40px 40px;background-size:40px 40px}.progress-bar.active,.progress.active .progress-bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-bar-success{background-color:#5cb85c}.progress-striped .progress-bar-success{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-info{background-color:#5bc0de}.progress-striped .progress-bar-info{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-warning{background-color:#f0ad4e}.progress-striped .progress-bar-warning{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-danger{background-color:#d9534f}.progress-striped .progress-bar-danger{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.media{margin-top:15px}.media:first-child{margin-top:0}.media,.media-body{overflow:hidden;zoom:1}.media-body{width:10000px}.media-object{display:block}.media-object.img-thumbnail{max-width:none}.media-right,.media>.pull-right{padding-left:10px}.media-left,.media>.pull-left{padding-right:10px}.media-body,.media-left,.media-right{display:table-cell;vertical-align:top}.media-middle{vertical-align:middle}.media-bottom{vertical-align:bottom}.media-heading{margin-top:0;margin-bottom:5px}.media-list{padding-left:0;list-style:none}.list-group{padding-left:0;margin-bottom:20px}.list-group-item{position:relative;display:block;padding:10px 15px;margin-bottom:-1px;background-color:#fff;border:1px solid #ddd}.list-group-item:first-child{border-top-left-radius:4px;border-top-right-radius:4px}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}a.list-group-item,button.list-group-item{color:#555}a.list-group-item .list-group-item-heading,button.list-group-item .list-group-item-heading{color:#333}a.list-group-item:focus,a.list-group-item:hover,button.list-group-item:focus,button.list-group-item:hover{color:#555;text-decoration:none;background-color:#f5f5f5}button.list-group-item{width:100%;text-align:left}.list-group-item.disabled,.list-group-item.disabled:focus,.list-group-item.disabled:hover{color:#777;cursor:not-allowed;background-color:#eee}.list-group-item.disabled .list-group-item-heading,.list-group-item.disabled:focus .list-group-item-heading,.list-group-item.disabled:hover .list-group-item-heading{color:inherit}.list-group-item.disabled .list-group-item-text,.list-group-item.disabled:focus .list-group-item-text,.list-group-item.disabled:hover .list-group-item-text{color:#777}.list-group-item.active,.list-group-item.active:focus,.list-group-item.active:hover{z-index:2;color:#fff;background-color:#337ab7;border-color:#337ab7}.list-group-item.active .list-group-item-heading,.list-group-item.active .list-group-item-heading>.small,.list-group-item.active .list-group-item-heading>small,.list-group-item.active:focus .list-group-item-heading,.list-group-item.active:focus .list-group-item-heading>.small,.list-group-item.active:focus .list-group-item-heading>small,.list-group-item.active:hover .list-group-item-heading,.list-group-item.active:hover .list-group-item-heading>.small,.list-group-item.active:hover .list-group-item-heading>small{color:inherit}.list-group-item.active .list-group-item-text,.list-group-item.active:focus .list-group-item-text,.list-group-item.active:hover .list-group-item-text{color:#c7ddef}.list-group-item-success{color:#3c763d;background-color:#dff0d8}a.list-group-item-success,button.list-group-item-success{color:#3c763d}a.list-group-item-success .list-group-item-heading,button.list-group-item-success .list-group-item-heading{color:inherit}a.list-group-item-success:focus,a.list-group-item-success:hover,button.list-group-item-success:focus,button.list-group-item-success:hover{color:#3c763d;background-color:#d0e9c6}a.list-group-item-success.active,a.list-group-item-success.active:focus,a.list-group-item-success.active:hover,button.list-group-item-success.active,button.list-group-item-success.active:focus,button.list-group-item-success.active:hover{color:#fff;background-color:#3c763d;border-color:#3c763d}.list-group-item-info{color:#31708f;background-color:#d9edf7}a.list-group-item-info,button.list-group-item-info{color:#31708f}a.list-group-item-info .list-group-item-heading,button.list-group-item-info .list-group-item-heading{color:inherit}a.list-group-item-info:focus,a.list-group-item-info:hover,button.list-group-item-info:focus,button.list-group-item-info:hover{color:#31708f;background-color:#c4e3f3}a.list-group-item-info.active,a.list-group-item-info.active:focus,a.list-group-item-info.active:hover,button.list-group-item-info.active,button.list-group-item-info.active:focus,button.list-group-item-info.active:hover{color:#fff;background-color:#31708f;border-color:#31708f}.list-group-item-warning{color:#8a6d3b;background-color:#fcf8e3}a.list-group-item-warning,button.list-group-item-warning{color:#8a6d3b}a.list-group-item-warning .list-group-item-heading,button.list-group-item-warning .list-group-item-heading{color:inherit}a.list-group-item-warning:focus,a.list-group-item-warning:hover,button.list-group-item-warning:focus,button.list-group-item-warning:hover{color:#8a6d3b;background-color:#faf2cc}a.list-group-item-warning.active,a.list-group-item-warning.active:focus,a.list-group-item-warning.active:hover,button.list-group-item-warning.active,button.list-group-item-warning.active:focus,button.list-group-item-warning.active:hover{color:#fff;background-color:#8a6d3b;border-color:#8a6d3b}.list-group-item-danger{color:#a94442;background-color:#f2dede}a.list-group-item-danger,button.list-group-item-danger{color:#a94442}a.list-group-item-danger .list-group-item-heading,button.list-group-item-danger .list-group-item-heading{color:inherit}a.list-group-item-danger:focus,a.list-group-item-danger:hover,button.list-group-item-danger:focus,button.list-group-item-danger:hover{color:#a94442;background-color:#ebcccc}a.list-group-item-danger.active,a.list-group-item-danger.active:focus,a.list-group-item-danger.active:hover,button.list-group-item-danger.active,button.list-group-item-danger.active:focus,button.list-group-item-danger.active:hover{color:#fff;background-color:#a94442;border-color:#a94442}.list-group-item-heading{margin-top:0;margin-bottom:5px}.list-group-item-text{margin-bottom:0;line-height:1.3}.panel{margin-bottom:20px;background-color:#fff;border:1px solid transparent;border-radius:4px;-webkit-box-shadow:0 1px 1px rgba(0,0,0,.05);box-shadow:0 1px 1px rgba(0,0,0,.05)}.panel-body{padding:15px}.panel-heading{padding:10px 15px;border-bottom:1px solid transparent;border-top-left-radius:3px;border-top-right-radius:3px}.panel-heading>.dropdown .dropdown-toggle{color:inherit}.panel-title{margin-top:0;margin-bottom:0;font-size:16px;color:inherit}.panel-title>.small,.panel-title>.small>a,.panel-title>a,.panel-title>small,.panel-title>small>a{color:inherit}.panel-footer{padding:10px 15px;background-color:#f5f5f5;border-top:1px solid #ddd;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.list-group,.panel>.panel-collapse>.list-group{margin-bottom:0}.panel>.list-group .list-group-item,.panel>.panel-collapse>.list-group .list-group-item{border-width:1px 0;border-radius:0}.panel>.list-group:first-child .list-group-item:first-child,.panel>.panel-collapse>.list-group:first-child .list-group-item:first-child{border-top:0;border-top-left-radius:3px;border-top-right-radius:3px}.panel>.list-group:last-child .list-group-item:last-child,.panel>.panel-collapse>.list-group:last-child .list-group-item:last-child{border-bottom:0;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.panel-heading+.panel-collapse>.list-group .list-group-item:first-child{border-top-left-radius:0;border-top-right-radius:0}.panel-heading+.list-group .list-group-item:first-child{border-top-width:0}.list-group+.panel-footer{border-top-width:0}.panel>.panel-collapse>.table,.panel>.table,.panel>.table-responsive>.table{margin-bottom:0}.panel>.panel-collapse>.table caption,.panel>.table caption,.panel>.table-responsive>.table caption{padding-right:15px;padding-left:15px}.panel>.table-responsive:first-child>.table:first-child,.panel>.table:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child,.panel>.table:first-child>thead:first-child>tr:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table:first-child>thead:first-child>tr:first-child th:first-child{border-top-left-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table:first-child>thead:first-child>tr:first-child th:last-child{border-top-right-radius:3px}.panel>.table-responsive:last-child>.table:last-child,.panel>.table:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:first-child{border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:last-child{border-bottom-right-radius:3px}.panel>.panel-body+.table,.panel>.panel-body+.table-responsive,.panel>.table+.panel-body,.panel>.table-responsive+.panel-body{border-top:1px solid #ddd}.panel>.table>tbody:first-child>tr:first-child td,.panel>.table>tbody:first-child>tr:first-child th{border-top:0}.panel>.table-bordered,.panel>.table-responsive>.table-bordered{border:0}.panel>.table-bordered>tbody>tr>td:first-child,.panel>.table-bordered>tbody>tr>th:first-child,.panel>.table-bordered>tfoot>tr>td:first-child,.panel>.table-bordered>tfoot>tr>th:first-child,.panel>.table-bordered>thead>tr>td:first-child,.panel>.table-bordered>thead>tr>th:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:first-child,.panel>.table-responsive>.table-bordered>thead>tr>td:first-child,.panel>.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.panel>.table-bordered>tbody>tr>td:last-child,.panel>.table-bordered>tbody>tr>th:last-child,.panel>.table-bordered>tfoot>tr>td:last-child,.panel>.table-bordered>tfoot>tr>th:last-child,.panel>.table-bordered>thead>tr>td:last-child,.panel>.table-bordered>thead>tr>th:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:last-child,.panel>.table-responsive>.table-bordered>thead>tr>td:last-child,.panel>.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.panel>.table-bordered>tbody>tr:first-child>td,.panel>.table-bordered>tbody>tr:first-child>th,.panel>.table-bordered>thead>tr:first-child>td,.panel>.table-bordered>thead>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>th,.panel>.table-responsive>.table-bordered>thead>tr:first-child>td,.panel>.table-responsive>.table-bordered>thead>tr:first-child>th{border-bottom:0}.panel>.table-bordered>tbody>tr:last-child>td,.panel>.table-bordered>tbody>tr:last-child>th,.panel>.table-bordered>tfoot>tr:last-child>td,.panel>.table-bordered>tfoot>tr:last-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>th,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>td,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}.panel>.table-responsive{margin-bottom:0;border:0}.panel-group{margin-bottom:20px}.panel-group .panel{margin-bottom:0;border-radius:4px}.panel-group .panel+.panel{margin-top:5px}.panel-group .panel-heading{border-bottom:0}.panel-group .panel-heading+.panel-collapse>.list-group,.panel-group .panel-heading+.panel-collapse>.panel-body{border-top:1px solid #ddd}.panel-group .panel-footer{border-top:0}.panel-group .panel-footer+.panel-collapse .panel-body{border-bottom:1px solid #ddd}.panel-default{border-color:#ddd}.panel-default>.panel-heading{color:#333;background-color:#f5f5f5;border-color:#ddd}.panel-default>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ddd}.panel-default>.panel-heading .badge{color:#f5f5f5;background-color:#333}.panel-default>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ddd}.panel-primary{border-color:#337ab7}.panel-primary>.panel-heading{color:#fff;background-color:#337ab7;border-color:#337ab7}.panel-primary>.panel-heading+.panel-collapse>.panel-body{border-top-color:#337ab7}.panel-primary>.panel-heading .badge{color:#337ab7;background-color:#fff}.panel-primary>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#337ab7}.panel-success{border-color:#d6e9c6}.panel-success>.panel-heading{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.panel-success>.panel-heading+.panel-collapse>.panel-body{border-top-color:#d6e9c6}.panel-success>.panel-heading .badge{color:#dff0d8;background-color:#3c763d}.panel-success>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#d6e9c6}.panel-info{border-color:#bce8f1}.panel-info>.panel-heading{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.panel-info>.panel-heading+.panel-collapse>.panel-body{border-top-color:#bce8f1}.panel-info>.panel-heading .badge{color:#d9edf7;background-color:#31708f}.panel-info>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#bce8f1}.panel-warning{border-color:#faebcc}.panel-warning>.panel-heading{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.panel-warning>.panel-heading+.panel-collapse>.panel-body{border-top-color:#faebcc}.panel-warning>.panel-heading .badge{color:#fcf8e3;background-color:#8a6d3b}.panel-warning>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#faebcc}.panel-danger{border-color:#ebccd1}.panel-danger>.panel-heading{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.panel-danger>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ebccd1}.panel-danger>.panel-heading .badge{color:#f2dede;background-color:#a94442}.panel-danger>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ebccd1}.embed-responsive{position:relative;display:block;height:0;padding:0;overflow:hidden}.embed-responsive .embed-responsive-item,.embed-responsive embed,.embed-responsive iframe,.embed-responsive object,.embed-responsive video{position:absolute;top:0;bottom:0;left:0;width:100%;height:100%;border:0}.embed-responsive-16by9{padding-bottom:56.25%}.embed-responsive-4by3{padding-bottom:75%}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.05);box-shadow:inset 0 1px 1px rgba(0,0,0,.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,.15)}.well-lg{padding:24px;border-radius:6px}.well-sm{padding:9px;border-radius:3px}.close{float:right;font-size:21px;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;filter:alpha(opacity=20);opacity:.2}.close:focus,.close:hover{color:#000;text-decoration:none;cursor:pointer;filter:alpha(opacity=50);opacity:.5}button.close{-webkit-appearance:none;padding:0;cursor:pointer;background:0 0;border:0}.modal-open{overflow:hidden}.modal{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1050;display:none;overflow:hidden;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transition:-webkit-transform .3s ease-out;-o-transition:-o-transform .3s ease-out;transition:transform .3s ease-out;-webkit-transform:translate(0,-25%);-ms-transform:translate(0,-25%);-o-transform:translate(0,-25%);transform:translate(0,-25%)}.modal.in .modal-dialog{-webkit-transform:translate(0,0);-ms-transform:translate(0,0);-o-transform:translate(0,0);transform:translate(0,0)}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #999;border:1px solid rgba(0,0,0,.2);border-radius:6px;outline:0;-webkit-box-shadow:0 3px 9px rgba(0,0,0,.5);box-shadow:0 3px 9px rgba(0,0,0,.5)}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{filter:alpha(opacity=0);opacity:0}.modal-backdrop.in{filter:alpha(opacity=50);opacity:.5}.modal-header{padding:15px;border-bottom:1px solid #e5e5e5}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857143}.modal-body{position:relative;padding:15px}.modal-footer{padding:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{-webkit-box-shadow:0 5px 15px rgba(0,0,0,.5);box-shadow:0 5px 15px rgba(0,0,0,.5)}.modal-sm{width:300px}}@media (min-width:992px){.modal-lg{width:900px}}.tooltip{position:absolute;z-index:1070;display:block;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:12px;font-style:normal;font-weight:400;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;filter:alpha(opacity=0);opacity:0;line-break:auto}.tooltip.in{filter:alpha(opacity=90);opacity:.9}.tooltip.top{padding:5px 0;margin-top:-3px}.tooltip.right{padding:0 5px;margin-left:3px}.tooltip.bottom{padding:5px 0;margin-top:3px}.tooltip.left{padding:0 5px;margin-left:-3px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;background-color:#000;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-left .tooltip-arrow{right:5px;bottom:0;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-right .tooltip-arrow{bottom:0;left:5px;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-left .tooltip-arrow{top:0;right:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-right .tooltip-arrow{top:0;left:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.popover{position:absolute;top:0;left:0;z-index:1060;display:none;max-width:276px;padding:1px;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;font-style:normal;font-weight:400;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.2);border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,.2);box-shadow:0 5px 10px rgba(0,0,0,.2);line-break:auto}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:5px 5px 0 0}.popover-content{padding:9px 14px}.popover>.arrow,.popover>.arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover>.arrow{border-width:11px}.popover>.arrow:after{content:"";border-width:10px}.popover.top>.arrow{bottom:-11px;left:50%;margin-left:-11px;border-top-color:#999;border-top-color:rgba(0,0,0,.25);border-bottom-width:0}.popover.top>.arrow:after{bottom:1px;margin-left:-10px;content:" ";border-top-color:#fff;border-bottom-width:0}.popover.right>.arrow{top:50%;left:-11px;margin-top:-11px;border-right-color:#999;border-right-color:rgba(0,0,0,.25);border-left-width:0}.popover.right>.arrow:after{bottom:-10px;left:1px;content:" ";border-right-color:#fff;border-left-width:0}.popover.bottom>.arrow{top:-11px;left:50%;margin-left:-11px;border-top-width:0;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,.25)}.popover.bottom>.arrow:after{top:1px;margin-left:-10px;content:" ";border-top-width:0;border-bottom-color:#fff}.popover.left>.arrow{top:50%;right:-11px;margin-top:-11px;border-right-width:0;border-left-color:#999;border-left-color:rgba(0,0,0,.25)}.popover.left>.arrow:after{right:1px;bottom:-10px;content:" ";border-right-width:0;border-left-color:#fff}.carousel{position:relative}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner>.item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>a>img,.carousel-inner>.item>img{line-height:1}@media all and (transform-3d),(-webkit-transform-3d){.carousel-inner>.item{-webkit-transition:-webkit-transform .6s ease-in-out;-o-transition:-o-transform .6s ease-in-out;transition:transform .6s ease-in-out;-webkit-backface-visibility:hidden;backface-visibility:hidden;-webkit-perspective:1000px;perspective:1000px}.carousel-inner>.item.active.right,.carousel-inner>.item.next{left:0;-webkit-transform:translate3d(100%,0,0);transform:translate3d(100%,0,0)}.carousel-inner>.item.active.left,.carousel-inner>.item.prev{left:0;-webkit-transform:translate3d(-100%,0,0);transform:translate3d(-100%,0,0)}.carousel-inner>.item.active,.carousel-inner>.item.next.left,.carousel-inner>.item.prev.right{left:0;-webkit-transform:translate3d(0,0,0);transform:translate3d(0,0,0)}}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:0;bottom:0;left:0;width:15%;font-size:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6);background-color:rgba(0,0,0,0);filter:alpha(opacity=50);opacity:.5}.carousel-control.left{background-image:-webkit-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.5)),to(rgba(0,0,0,.0001)));background-image:linear-gradient(to right,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1);background-repeat:repeat-x}.carousel-control.right{right:0;left:auto;background-image:-webkit-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.0001)),to(rgba(0,0,0,.5)));background-image:linear-gradient(to right,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1);background-repeat:repeat-x}.carousel-control:focus,.carousel-control:hover{color:#fff;text-decoration:none;filter:alpha(opacity=90);outline:0;opacity:.9}.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{position:absolute;top:50%;z-index:5;display:inline-block;margin-top:-10px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{left:50%;margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{right:50%;margin-right:-10px}.carousel-control .icon-next,.carousel-control .icon-prev{width:20px;height:20px;font-family:serif;line-height:1}.carousel-control .icon-prev:before{content:'\2039'}.carousel-control .icon-next:before{content:'\203a'}.carousel-indicators{position:absolute;bottom:10px;left:50%;z-index:15;width:60%;padding-left:0;margin-left:-30%;text-align:center;list-style:none}.carousel-indicators li{display:inline-block;width:10px;height:10px;margin:1px;text-indent:-999px;cursor:pointer;background-color:#000;background-color:rgba(0,0,0,0);border:1px solid #fff;border-radius:10px}.carousel-indicators .active{width:12px;height:12px;margin:0;background-color:#fff}.carousel-caption{position:absolute;right:15%;bottom:20px;left:15%;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6)}.carousel-caption .btn{text-shadow:none}@media screen and (min-width:768px){.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{width:30px;height:30px;margin-top:-10px;font-size:30px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{margin-right:-10px}.carousel-caption{right:20%;left:20%;padding-bottom:30px}.carousel-indicators{bottom:20px}}.btn-group-vertical>.btn-group:after,.btn-group-vertical>.btn-group:before,.btn-toolbar:after,.btn-toolbar:before,.clearfix:after,.clearfix:before,.container-fluid:after,.container-fluid:before,.container:after,.container:before,.dl-horizontal dd:after,.dl-horizontal dd:before,.form-horizontal .form-group:after,.form-horizontal .form-group:before,.modal-footer:after,.modal-footer:before,.modal-header:after,.modal-header:before,.nav:after,.nav:before,.navbar-collapse:after,.navbar-collapse:before,.navbar-header:after,.navbar-header:before,.navbar:after,.navbar:before,.pager:after,.pager:before,.panel-body:after,.panel-body:before,.row:after,.row:before{display:table;content:" "}.btn-group-vertical>.btn-group:after,.btn-toolbar:after,.clearfix:after,.container-fluid:after,.container:after,.dl-horizontal dd:after,.form-horizontal .form-group:after,.modal-footer:after,.modal-header:after,.nav:after,.navbar-collapse:after,.navbar-header:after,.navbar:after,.pager:after,.panel-body:after,.row:after{clear:both}.center-block{display:block;margin-right:auto;margin-left:auto}.pull-right{float:right!important}.pull-left{float:left!important}.hide{display:none!important}.show{display:block!important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none!important}.affix{position:fixed}@-ms-viewport{width:device-width}.visible-lg,.visible-md,.visible-sm,.visible-xs{display:none!important}.visible-lg-block,.visible-lg-inline,.visible-lg-inline-block,.visible-md-block,.visible-md-inline,.visible-md-inline-block,.visible-sm-block,.visible-sm-inline,.visible-sm-inline-block,.visible-xs-block,.visible-xs-inline,.visible-xs-inline-block{display:none!important}@media (max-width:767px){.visible-xs{display:block!important}table.visible-xs{display:table!important}tr.visible-xs{display:table-row!important}td.visible-xs,th.visible-xs{display:table-cell!important}}@media (max-width:767px){.visible-xs-block{display:block!important}}@media (max-width:767px){.visible-xs-inline{display:inline!important}}@media (max-width:767px){.visible-xs-inline-block{display:inline-block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block!important}table.visible-sm{display:table!important}tr.visible-sm{display:table-row!important}td.visible-sm,th.visible-sm{display:table-cell!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-block{display:block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline{display:inline!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline-block{display:inline-block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block!important}table.visible-md{display:table!important}tr.visible-md{display:table-row!important}td.visible-md,th.visible-md{display:table-cell!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-block{display:block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline{display:inline!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline-block{display:inline-block!important}}@media (min-width:1200px){.visible-lg{display:block!important}table.visible-lg{display:table!important}tr.visible-lg{display:table-row!important}td.visible-lg,th.visible-lg{display:table-cell!important}}@media (min-width:1200px){.visible-lg-block{display:block!important}}@media (min-width:1200px){.visible-lg-inline{display:inline!important}}@media (min-width:1200px){.visible-lg-inline-block{display:inline-block!important}}@media (max-width:767px){.hidden-xs{display:none!important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none!important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none!important}}@media (min-width:1200px){.hidden-lg{display:none!important}}.visible-print{display:none!important}@media print{.visible-print{display:block!important}table.visible-print{display:table!important}tr.visible-print{display:table-row!important}td.visible-print,th.visible-print{display:table-cell!important}}.visible-print-block{display:none!important}@media print{.visible-print-block{display:block!important}}.visible-print-inline{display:none!important}@media print{.visible-print-inline{display:inline!important}}.visible-print-inline-block{display:none!important}@media print{.visible-print-inline-block{display:inline-block!important}}@media print{.hidden-print{display:none!important}} -/*# sourceMappingURL=bootstrap.min.css.map */ \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/_static/css/training.css b/tutorials/training/source_zh_cn/_static/css/training.css deleted file mode 100644 index 0fe812a12367337c37dffb5f4cfcafc2ba6fdb49..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/_static/css/training.css +++ /dev/null @@ -1,117 +0,0 @@ -.doc-filter-btn { - border: 1px solid #BFBFBF; - margin-right: 0.5rem; - font-size: 0.7rem; - color: #444444; - background-color: white; - width: 4.2rem; - height: 1.7rem; - text-align: left; - position: relative; - -} -.doc-stage-detail button{ - margin-bottom: 0.5rem; -} -button.doc-btn{ - background-color: transparent; - outline: none; -} -.doc-btn-color{ - border: 1px solid #379BE6; - color: #379BE6; -} -.doc-btn-hover{ - border: 1px solid #379BE6; - color: #379BE6; -} -.doc-article-list{ - margin-top: 1.1rem; -} -.doc-article-item{ - padding:2.5rem 2.5rem; - margin-bottom: 1.3rem; - border:1px solid #e5e5e5; - border-radius:0.5rem; - width: 1140px; - box-shadow: 0 0 30px 2px rgba(199,196,196,0.50) -} -.doc-article-item a{ - display:block; - text-decoration:none!important; -} -.doc-article-head{ - color: #444444; - font-size:0.9rem; - font-weight:bold; - margin-bottom:0.8rem; - text-align:left; -} -.doc-article-desc{ - font-size:0.7rem; - color:#444444; -} -.doc-footer nav ul li a{ - font-size: 0.7rem; -} -.doc-footer nav ul li span{ - font-size: 0.7rem; -} -.doc-title{ - font-size: 1.6rem; - color: #444444; - font-weight: bold; - margin-bottom: 2.2rem; -} -.doc-filter{ - font-size: 0.7rem; - color: #666666; -} -.doc-delete{ - font-size: 0.7rem; - color: #379BE6; - float: right; -} -.doc-condition{ - margin-bottom: 2rem; -} -.doc-label-choice{ - font-size: 0.7rem; - margin-bottom: 0.53rem; -} -.doc-os{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-hardware{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-user{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-stage{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-experience{ - font-weight: bold; - padding-top: 0.3rem; -} -.doc-label-content{ - margin-bottom: 2.2rem; -} -div.col-sm-10{ - padding-left: 2.5rem; -} -.container{ - margin-top: 1rem; - margin-left: -15px; -} -#all{ - border: none; - background-color: transparent; - outline: none; -} - diff --git a/tutorials/training/source_zh_cn/_static/img/choice.png b/tutorials/training/source_zh_cn/_static/img/choice.png deleted file mode 100644 index 5fb06488a24489616b937778c06af9e8d409046b..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/_static/img/choice.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/_static/js/training.js b/tutorials/training/source_zh_cn/_static/js/training.js deleted file mode 100644 index df20ecced73f2d61dadbc5b5244c2ca78aa54a8b..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/_static/js/training.js +++ /dev/null @@ -1,262 +0,0 @@ -$(function() { - $("button.doc-btn").hover(function(){ - - //移入事件 - $(this).addClass('doc-btn-hover') - },function(){ - //移出事件 - $(this).removeClass('doc-btn-hover'); - }) - // 每页显示数 - var curNum = 8 - // 计算总数 - var all = $('.doc-article-list').children('div.doc-article-item').length; - - var list = [] - - - - $('button.doc-btn').click(function() { - $('.doc-article-item').removeClass('OUO'); - var id_val = $(this).attr('id') - if (id_val !== 'all') { - if ($('#all').hasClass('doc-btn-color')) { - $('#all').removeClass('doc-btn-color').find('img').remove(); - list.splice(list.indexOf('all_exist'), 1); - $('.doc-article-item').removeClass('all_exist'); - } - } else { - $('button.doc-btn-color').each(function() { - var tag = $(this).attr('id'); - $('.' + tag).removeClass(tag + '_exist'); - list.splice(list.indexOf(tag + '_exist'), 1); - }); - - $('button.doc-btn-color').removeClass('doc-btn-color').find('img').remove(); - } - if ($(this).hasClass('doc-btn-color')) { - $(this).removeClass('doc-btn-color').find('img').remove(); - $('.' + id_val).removeClass(id_val + '_exist'); - list.splice(list.indexOf(id_val + '_exist'), 1); - - } else { - if(id_val == 'all'){ - $(this).addClass('doc-btn-color'); - $('.' + id_val).addClass(id_val + '_exist'); - list.push(id_val + '_exist'); - }else{ - $(this).addClass('doc-btn-color').append(''); - $('.' + id_val).addClass(id_val + '_exist'); - list.push(id_val + '_exist'); - } - - } - - if(list.length > 0){ - var os_list = []; - var hardware_list = []; - var user_list = []; - var stage_list = []; - var experience_list = []; - var all_list = []; - var hasWindows = false; - var hasCpu = false; - - $('.doc-article-item').addClass('hidden'); - var str = 'OUO'; - for(var i=0;i -1) { - hasWindows = true; - } - }else if (list[i].indexOf('hardware') == 0){ - hardware_list.push(list[i]); - if (list[i].indexOf('CPU') > -1) { - hasCpu = true; - } - }else if (list[i].indexOf('user') == 0){ - user_list.push(list[i]); - }else if (list[i].indexOf('stage') == 0){ - stage_list.push(list[i]); - }else if (list[i].indexOf('experience') == 0){ - experience_list.push(list[i]); - }else{ - all_list.push(list[i]); - } - } - - if(!((os_list.length === 1 && hasWindows) && (hardware_list.length && !hasCpu))) { - $('.doc-article-item').each(function(){ - var os_count = 0; - var hardware_count = 0; - var user_count = 0; - var stage_count = 0; - var experience_count = 0; - var all_count = 0; - if(os_list.length > 0){ - for(var i=0;i -1){ - os_count += 1; - } - } - }else{ - os_count = 'empty'; - } - - if(hardware_list.length > 0){ - for(var i=0;i -1){ - hardware_count += 1; - } - } - }else{ - hardware_count = 'empty'; - } - - if(user_list.length > 0){ - for(var i=0;i -1){ - user_count += 1; - } - } - }else{ - user_count = 'empty'; - } - - if(stage_list.length > 0){ - for(var i=0;i -1){ - stage_count += 1; - } - } - }else{ - stage_count = 'empty'; - } - - if(experience_list.length > 0){ - for(var i=0;i -1){ - experience_count += 1; - } - } - }else{ - experience_count = 'empty'; - } - - if(all_list.length > 0){ - for(var i=0;i -1){ - all_count += 1; - } - } - }else{ - all_count = 'empty'; - } - - - if(((os_count >0 && os_count <= os_list.length) || os_count=='empty') && ((hardware_count >0 && hardware_count <= hardware_list.length) || hardware_count=='empty') && ((user_count >0 && user_count <= user_list.length) || user_count == 'empty') && ((stage_count >0 && stage_count <= stage_list.length) || stage_count == 'empty') && ((experience_count >0 && experience_count <= experience_list.length) || experience_count == 'empty')){ - $(this).removeClass('hidden').addClass(str); - } - }); - } - - }else{ - $('.doc-article-item').addClass('hidden'); - } - - var hidden_num = $('.doc-article-list').children('.doc-article-item.hidden').length; - var all_article = all - hidden_num - // 计算总页数 - var len = Math.ceil((all - hidden_num) / curNum); - // 生成页码 - var pageList = '
  • ' + '共' + all_article + '条' + '
  • ' + '
  • '; - // 当前的索引值 - var iNum = 0; - - for (var i = 0; i < len; i++) { - pageList += '
  • ' + (i + 1) + '
  • ' - } - pageList += '
  • ' - // 首页加亮显示 - if (all_article > 0){ - $('#pageNav').html(pageList).find('li').eq(2).addClass('active'); - }else{ - $('#pageNav').html('
  • ' + '共' + all_article + '条' + '
  • '); - } - - // 标签页的点击事件 - $('#pageNav').find('li.doc-data').each(function() { - $(this).click(function() { - $(this).addClass('active').siblings('li').removeClass('active'); - iNum = $(this).index() - 2; - if(iNum > 0){ - $('li.pre').removeClass('disabled'); - }else{ - $('li.pre').addClass('disabled'); - } - if(iNum+1 == len){ - $('li.nex').addClass('disabled'); - } - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = (iNum * curNum); i < (iNum + 1) * curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show() - } - - }); - }); - if(iNum == 0){ - $('li.pre').addClass('disabled'); - } - - if(iNum+1 == len){ - $('li.nex').addClass('disabled'); - } - // 向前页点击时间 - $('li.pre').click(function(){ - if(iNum > 0){ - iNum -= 1; - if(iNum == 0){ - $(this).addClass('disabled'); - } - $('li.nex').removeClass('disabled'); - $('#pageNav').find('li.doc-data').eq(iNum).addClass('active').siblings('li').removeClass('active'); - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = (iNum * curNum); i < (iNum + 1) * curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show() - } - } - - }); - - // 向后页点击事件 - $('li.nex').click(function(){ - if(iNum+1 < len){ - iNum += 1; - if(iNum+1 == len){ - $(this).addClass('disabled'); - } - $('li.pre').removeClass('disabled'); - $('#pageNav').find('li.doc-data').eq(iNum).addClass('active').siblings('li').removeClass('active'); - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = (iNum * curNum); i < (iNum + 1) * curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show() - } - } - }); - - // 首页的显示 - $('.doc-article-item[class*="' + str + '"]').hide(); - for (var i = 0; i < curNum; i++) { - $('div.doc-article-list').find('.doc-article-item[class*="' + str + '"]').eq(i).show(); - } - - if ($('button.doc-btn-color').length == 0) { - $('#all').trigger('click'); - } - }); - - - $('#all').trigger('click'); - -}); diff --git a/tutorials/training/source_zh_cn/_static/logo_modelarts.png b/tutorials/training/source_zh_cn/_static/logo_modelarts.png deleted file mode 100644 index 9b499805e2f8ab52dcde3fd4a7708ef753da9b84..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/_static/logo_modelarts.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/_static/logo_notebook.png b/tutorials/training/source_zh_cn/_static/logo_notebook.png deleted file mode 100644 index f28598315f19f4be76a73ddf5dc6bbdbe4db35fd..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/_static/logo_notebook.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/_static/logo_online_experience.png b/tutorials/training/source_zh_cn/_static/logo_online_experience.png deleted file mode 100644 index 9845ddd10bc6e997be8725e841c16328f4eb9135..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/_static/logo_online_experience.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/_static/logo_source.png b/tutorials/training/source_zh_cn/_static/logo_source.png deleted file mode 100644 index 9932d67ab50871edb0c95979c4e948c812c7cdea..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/_static/logo_source.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/apply_deep_probability_programming.md b/tutorials/training/source_zh_cn/advanced_use/apply_deep_probability_programming.md deleted file mode 100644 index 8379bedfec63701c18d7008f1e387dca60a67467..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/apply_deep_probability_programming.md +++ /dev/null @@ -1,658 +0,0 @@ -# 深度概率编程 - -`Ascend` `GPU` `全流程` `初级` `中级` `高级` - - - -- [深度概率编程](#深度概率编程) - - [概述](#概述) - - [使用贝叶斯神经网络](#使用贝叶斯神经网络) - - [处理数据集](#处理数据集) - - [定义贝叶斯神经网络](#定义贝叶斯神经网络) - - [定义损失函数和优化器](#定义损失函数和优化器) - - [训练网络](#训练网络) - - [使用变分自编码器](#使用变分自编码器) - - [定义变分自编码器](#定义变分自编码器) - - [定义损失函数和优化器](#定义损失函数和优化器-1) - - [处理数据](#处理数据) - - [训练网络](#训练网络-1) - - [生成新样本或重构输入样本](#生成新样本或重构输入样本) - - [DNN一键转换成BNN](#dnn一键转换成bnn) - - [定义DNN模型](#定义dnn模型) - - [定义损失函数和优化器](#定义损失函数和优化器-2) - - [实例化TransformToBNN](#实例化transformtobnn) - - [实现功能一:转换整个模型](#实现功能一转换整个模型) - - [实现功能二:转换指定类型的层](#实现功能二转换指定类型的层) - - [使用不确定性估计工具箱](#使用不确定性估计工具箱) - - - - - -## 概述 - -深度学习模型具有强大的拟合能力,而贝叶斯理论具有很好的可解释能力。MindSpore深度概率编程(MindSpore Deep Probabilistic Programming, MDP)将深度学习和贝叶斯学习结合,通过设置网络权重为分布、引入隐空间分布等,可以对分布进行采样前向传播,由此引入了不确定性,从而增强了模型的鲁棒性和可解释性。MDP不仅包含通用、专业的概率学习编程语言,适用于“专业”用户,而且支持使用开发深度学习模型的逻辑进行概率编程,让初学者轻松上手;此外,还提供深度概率学习的工具箱,拓展贝叶斯应用功能。 - -本章将详细介绍深度概率编程在MindSpore上的应用。在动手进行实践之前,确保,你已经正确安装了MindSpore 0.7.0-beta及其以上版本。本章的具体内容如下: - -1. 介绍如何使用[bnn_layers模块](https://gitee.com/mindspore/mindspore/tree/master/mindspore/nn/probability/bnn_layers)实现贝叶斯神经网络(Bayesian Neural Network, BNN); -2. 介绍如何使用[variational模块](https://gitee.com/mindspore/mindspore/tree/master/mindspore/nn/probability/infer/variational)和[dpn模块](https://gitee.com/mindspore/mindspore/tree/master/mindspore/nn/probability/dpn)实现变分自编码器(Variational AutoEncoder, VAE); -3. 介绍如何使用[transforms模块](https://gitee.com/mindspore/mindspore/tree/master/mindspore/nn/probability/transforms)实现DNN(Deep Neural Network, DNN)一键转BNN; -4. 介绍如何使用[toolbox模块](https://gitee.com/mindspore/mindspore/blob/master/mindspore/nn/probability/toolbox/uncertainty_evaluation.py)实现不确定性估计。 - -## 使用贝叶斯神经网络 - -贝叶斯神经网络是由概率模型和神经网络组成的基本模型,它的权重不再是一个确定的值,而是一个分布。本例介绍了如何使用MDP中的bnn_layers模块实现贝叶斯神经网络,并利用贝叶斯神经网络实现一个简单的图片分类功能,整体流程如下: - -1. 处理MNIST数据集; -2. 定义贝叶斯LeNet网络; -3. 定义损失函数和优化器; -4. 加载数据集并进行训练。 - -> 本例面向GPU或Ascend 910 AI处理器平台,你可以在这里下载完整的样例代码:。 -> 贝叶斯神经网络目前只支持图模式,需要在代码中设置`context.set_context(mode=context.GRAPH_MODE)`。 - -### 处理数据集 - -本例子使用的是MNIST数据集,数据处理过程与教程中的[实现一个图片分类应用](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html)一致。 - -### 定义贝叶斯神经网络 - -本例使用的是Bayesian LeNet。利用bnn_layers模块构建贝叶斯神经网络的方法与构建普通的神经网络相同。值得注意的是,`bnn_layers`和普通的神经网络层可以互相组合。 - -```python -import mindspore.nn as nn -from mindspore.nn.probability import bnn_layers -import mindspore.ops as ops - -class BNNLeNet5(nn.Cell): - """ - bayesian Lenet network - - Args: - num_class (int): Num classes. Default: 10. - - Returns: - Tensor, output tensor - Examples: - >>> BNNLeNet5(num_class=10) - - """ - def __init__(self, num_class=10): - super(BNNLeNet5, self).__init__() - self.num_class = num_class - self.conv1 = bnn_layers.ConvReparam(1, 6, 5, stride=1, padding=0, has_bias=False, pad_mode="valid") - self.conv2 = bnn_layers.ConvReparam(6, 16, 5, stride=1, padding=0, has_bias=False, pad_mode="valid") - self.fc1 = bnn_layers.DenseReparam(16 * 5 * 5, 120) - self.fc2 = bnn_layers.DenseReparam(120, 84) - self.fc3 = bnn_layers.DenseReparam(84, self.num_class) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - self.reshape = ops.Reshape() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x -``` - -### 定义损失函数和优化器 - -接下来需要定义损失函数(Loss)和优化器(Optimizer)。损失函数是深度学习的训练目标,也叫目标函数,可以理解为神经网络的输出(Logits)和标签(Labels)之间的距离,是一个标量数据。 - -常见的损失函数包括均方误差、L2损失、Hinge损失、交叉熵等等。图像分类应用通常采用交叉熵损失(CrossEntropy)。 - -优化器用于神经网络求解(训练)。由于神经网络参数规模庞大,无法直接求解,因而深度学习中采用随机梯度下降算法(SGD)及其改进算法进行求解。MindSpore封装了常见的优化器,如`SGD`、`Adam`、`Momemtum`等等。本例采用`Adam`优化器,通常需要设定两个参数,学习率(`learning_rate`)和权重衰减项(`weight_decay`)。 - -MindSpore中定义损失函数和优化器的代码样例如下: - -```python -# loss function definition -criterion = SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - -# optimization definition -optimizer = AdamWeightDecay(params=network.trainable_params(), learning_rate=0.0001) -``` - -### 训练网络 - -贝叶斯神经网络的训练过程与DNN基本相同,唯一不同的是将`WithLossCell`替换为适用于BNN的`WithBNNLossCell`。除了`backbone`和`loss_fn`两个参数之外,`WithBNNLossCell`增加了`dnn_factor`和`bnn_factor`两个参数。`dnn_factor`是由损失函数计算得到的网络整体损失的系数,`bnn_factor`是每个贝叶斯层的KL散度的系数,这两个参数是用来平衡网络整体损失和贝叶斯层的KL散度的,防止KL散度的值过大掩盖了网络整体损失。 - -```python -net_with_loss = bnn_layers.WithBNNLossCell(network, criterion, dnn_factor=60000, bnn_factor=0.000001) -train_bnn_network = TrainOneStepCell(net_with_loss, optimizer) -train_bnn_network.set_train() - -train_set = create_dataset('./mnist_data/train', 64, 1) -test_set = create_dataset('./mnist_data/test', 64, 1) - -epoch = 10 - -for i in range(epoch): - train_loss, train_acc = train_model(train_bnn_network, network, train_set) - - valid_acc = validate_model(network, test_set) - - print('Epoch: {} \tTraining Loss: {:.4f} \tTraining Accuracy: {:.4f} \tvalidation Accuracy: {:.4f}'. - format(i, train_loss, train_acc, valid_acc)) -``` - -其中,`train_model`和`validate_model`在MindSpore中的代码样例如下: - -```python -def train_model(train_net, net, dataset): - accs = [] - loss_sum = 0 - for _, data in enumerate(dataset.create_dict_iterator()): - train_x = Tensor(data['image'].asnumpy().astype(np.float32)) - label = Tensor(data['label'].asnumpy().astype(np.int32)) - loss = train_net(train_x, label) - output = net(train_x) - log_output = ops.LogSoftmax(axis=1)(output) - acc = np.mean(log_output.asnumpy().argmax(axis=1) == label.asnumpy()) - accs.append(acc) - loss_sum += loss.asnumpy() - - loss_sum = loss_sum / len(accs) - acc_mean = np.mean(accs) - return loss_sum, acc_mean - - -def validate_model(net, dataset): - accs = [] - for _, data in enumerate(dataset.create_dict_iterator()): - train_x = Tensor(data['image'].asnumpy().astype(np.float32)) - label = Tensor(data['label'].asnumpy().astype(np.int32)) - output = net(train_x) - log_output = ops.LogSoftmax(axis=1)(output) - acc = np.mean(log_output.asnumpy().argmax(axis=1) == label.asnumpy()) - accs.append(acc) - - acc_mean = np.mean(accs) - return acc_mean -``` - -## 使用变分自编码器 - -接下来介绍如何使用MDP中的variational模块和dpn模块实现变分自编码器。变分自编码器是经典的应用了变分推断的深度概率模型,用来学习潜在变量的表示,通过该模型,不仅可以压缩输入数据,还可以生成该类型的新图像。本例的整体流程如下: - -1. 定义变分自编码器; -2. 定义损失函数和优化器; -3. 处理数据; -4. 训练网络; -5. 生成新样本或重构输入样本。 - -> 本例面向GPU或Ascend 910 AI处理器平台,你可以在这里下载完整的样例代码: - -### 定义变分自编码器 - -使用dpn模块来构造变分自编码器尤为简单,你只需要自定义编码器和解码器(DNN模型),调用`VAE`接口即可。 - -```python -class Encoder(nn.Cell): - def __init__(self): - super(Encoder, self).__init__() - self.fc1 = nn.Dense(1024, 800) - self.fc2 = nn.Dense(800, 400) - self.relu = nn.ReLU() - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - return x - - -class Decoder(nn.Cell): - def __init__(self): - super(Decoder, self).__init__() - self.fc1 = nn.Dense(400, 1024) - self.sigmoid = nn.Sigmoid() - self.reshape = ops.Reshape() - - def construct(self, z): - z = self.fc1(z) - z = self.reshape(z, IMAGE_SHAPE) - z = self.sigmoid(z) - return z - - -encoder = Encoder() -decoder = Decoder() -vae = VAE(encoder, decoder, hidden_size=400, latent_size=20) -``` - -### 定义损失函数和优化器 - -接下来需要定义损失函数(Loss)和优化器(Optimizer)。本例使用的损失函数是`ELBO`,`ELBO`是变分推断专用的损失函数;本例使用的优化器是`Adam`。 -MindSpore中定义损失函数和优化器的代码样例如下: - -```python -# loss function definition -net_loss = ELBO(latent_prior='Normal', output_prior='Normal') - -# optimization definition -optimizer = nn.Adam(params=vae.trainable_params(), learning_rate=0.001) - -net_with_loss = nn.WithLossCell(vae, net_loss) -``` - -### 处理数据 - -本例使用的是MNIST数据集,数据处理过程与教程中的[实现一个图片分类应用](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html)一致。 - -### 训练网络 - -使用variational模块中的`SVI`接口对VAE网络进行训练。 - -```python -from mindspore.nn.probability.infer import SVI - -vi = SVI(net_with_loss=net_with_loss, optimizer=optimizer) -vae = vi.run(train_dataset=ds_train, epochs=10) -trained_loss = vi.get_train_loss() -``` - -通过`vi.run`可以得到训练好的网络,使用`vi.get_train_loss`可以得到训练之后的损失。 - -### 生成新样本或重构输入样本 - -利用训练好的VAE网络,我们可以生成新的样本或重构输入样本。 - -```python -IMAGE_SHAPE = (-1, 1, 32, 32) -generated_sample = vae.generate_sample(64, IMAGE_SHAPE) -for sample in ds_train.create_dict_iterator(): - sample_x = Tensor(sample['image'].asnumpy(), dtype=mstype.float32) - reconstructed_sample = vae.reconstruct_sample(sample_x) -``` - -## DNN一键转换成BNN - -对于不熟悉贝叶斯模型的DNN研究人员,MDP提供了高级API`TransformToBNN`,支持DNN模型一键转换成BNN模型。目前在LeNet,ResNet,MobileNet,VGG等模型上验证了API的通用性。本例将会介绍如何使用transforms模块中的`TransformToBNN`API实现DNN一键转换成BNN,整体流程如下: - -1. 定义DNN模型; -2. 定义损失函数和优化器; -3. 实现功能一:转换整个模型; -4. 实现功能二:转换指定类型的层。 - -> 本例面向GPU或Ascend 910 AI处理器平台,你可以在这里下载完整的样例代码: - -### 定义DNN模型 - -本例使用的DNN模型是LeNet。 - -```python -from mindspore.common.initializer import TruncatedNormal -import mindspore.nn as nn -import mindspore.ops as ops - -def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): - """weight initial for conv layer""" - weight = weight_variable() - return nn.Conv2d(in_channels, out_channels, - kernel_size=kernel_size, stride=stride, padding=padding, - weight_init=weight, has_bias=False, pad_mode="valid") - - -def fc_with_initialize(input_channels, out_channels): - """weight initial for fc layer""" - weight = weight_variable() - bias = weight_variable() - return nn.Dense(input_channels, out_channels, weight, bias) - - -def weight_variable(): - """weight initial""" - return TruncatedNormal(0.02) - - -class LeNet5(nn.Cell): - """ - Lenet network - - Args: - num_class (int): Num classes. Default: 10. - - Returns: - Tensor, output tensor - Examples: - >>> LeNet5(num_class=10) - - """ - def __init__(self, num_class=10): - super(LeNet5, self).__init__() - self.num_class = num_class - self.conv1 = conv(1, 6, 5) - self.conv2 = conv(6, 16, 5) - self.fc1 = fc_with_initialize(16 * 5 * 5, 120) - self.fc2 = fc_with_initialize(120, 84) - self.fc3 = fc_with_initialize(84, self.num_class) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - self.reshape = ops.Reshape() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x -``` - -LeNet的网络结构如下: - -```text -LeNet5 - (conv1) Conv2dinput_channels=1, output_channels=6, kernel_size=(5, 5),stride=(1, 1), pad_mode=valid, padding=0, dilation=(1, 1), group=1, has_bias=False - (conv2) Conv2dinput_channels=6, output_channels=16, kernel_size=(5, 5),stride=(1, 1), pad_mode=valid, padding=0, dilation=(1, 1), group=1, has_bias=False - (fc1) Densein_channels=400, out_channels=120, weight=Parameter (name=fc1.weight), has_bias=True, bias=Parameter (name=fc1.bias) - (fc2) Densein_channels=120, out_channels=84, weight=Parameter (name=fc2.weight), has_bias=True, bias=Parameter (name=fc2.bias) - (fc3) Densein_channels=84, out_channels=10, weight=Parameter (name=fc3.weight), has_bias=True, bias=Parameter (name=fc3.bias) - (relu) ReLU - (max_pool2d) MaxPool2dkernel_size=2, stride=2, pad_mode=VALID - (flatten) Flatten -``` - -### 定义损失函数和优化器 - -接下来需要定义损失函数(Loss)和优化器(Optimizer)。本例使用交叉熵损失作为损失函数,`Adam`作为优化器。 - -```python -network = LeNet5() - -# loss function definition -criterion = SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - -# optimization definition -optimizer = AdamWeightDecay(params=network.trainable_params(), learning_rate=0.0001) - -net_with_loss = WithLossCell(network, criterion) -train_network = TrainOneStepCell(net_with_loss, optimizer) -``` - -### 实例化TransformToBNN - -`TransformToBNN`的`__init__`函数定义如下: - -```python -class TransformToBNN: - def __init__(self, trainable_dnn, dnn_factor=1, bnn_factor=1): - net_with_loss = trainable_dnn.network - self.optimizer = trainable_dnn.optimizer - self.backbone = net_with_loss.backbone_network - self.loss_fn = getattr(net_with_loss, "_loss_fn") - self.dnn_factor = dnn_factor - self.bnn_factor = bnn_factor - self.bnn_loss_file = None -``` - -参数`trainable_bnn`是经过`TrainOneStepCell`包装的可训练DNN模型,`dnn_factor`和`bnn_factor`分别为由损失函数计算得到的网络整体损失的系数和每个贝叶斯层的KL散度的系数。 -MindSpore中实例化`TransformToBNN`的代码如下: - -```python -from mindspore.nn.probability import transforms - -bnn_transformer = transforms.TransformToBNN(train_network, 60000, 0.000001) -``` - -### 实现功能一:转换整个模型 - -`transform_to_bnn_model`方法可以将整个DNN模型转换为BNN模型。其定义如下: - -```python - def transform_to_bnn_model(self, - get_dense_args=lambda dp: {"in_channels": dp.in_channels, "has_bias": dp.has_bias, - "out_channels": dp.out_channels, "activation": dp.activation}, - get_conv_args=lambda dp: {"in_channels": dp.in_channels, "out_channels": dp.out_channels, - "pad_mode": dp.pad_mode, "kernel_size": dp.kernel_size, - "stride": dp.stride, "has_bias": dp.has_bias, - "padding": dp.padding, "dilation": dp.dilation, - "group": dp.group}, - add_dense_args=None, - add_conv_args=None): - r""" - Transform the whole DNN model to BNN model, and wrap BNN model by TrainOneStepCell. - - Args: - get_dense_args (function): The arguments gotten from the DNN full connection layer. Default: lambda dp: - {"in_channels": dp.in_channels, "out_channels": dp.out_channels, "has_bias": dp.has_bias}. - get_conv_args (function): The arguments gotten from the DNN convolutional layer. Default: lambda dp: - {"in_channels": dp.in_channels, "out_channels": dp.out_channels, "pad_mode": dp.pad_mode, - "kernel_size": dp.kernel_size, "stride": dp.stride, "has_bias": dp.has_bias}. - add_dense_args (dict): The new arguments added to BNN full connection layer. Default: {}. - add_conv_args (dict): The new arguments added to BNN convolutional layer. Default: {}. - - Returns: - Cell, a trainable BNN model wrapped by TrainOneStepCell. - """ -``` - -参数`get_dense_args`指定从DNN模型的全连接层中获取哪些参数,`get_conv_args`指定从DNN模型的卷积层中获取哪些参数,参数`add_dense_args`和`add_conv_args`分别指定了要为BNN层指定哪些新的参数值。需要注意的是,`add_dense_args`中的参数不能与`get_dense_args`重复,`add_conv_args`和`get_conv_args`也是如此。 - -在MindSpore中将整个DNN模型转换成BNN模型的代码如下: - -```python -train_bnn_network = bnn_transformer.transform_to_bnn_model() -``` - -整个模型转换后的结构如下: - -```text -LeNet5 - (conv1) ConvReparam - in_channels=1, out_channels=6, kernel_size=(5, 5), stride=(1, 1), pad_mode=valid, padding=0, dilation=(1, 1), group=1, weight_mean=Parameter (name=conv1.weight_posterior.mean), weight_std=Parameter (name=conv1.weight_posterior.untransformed_std), has_bias=False - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (conv2) ConvReparam - in_channels=6, out_channels=16, kernel_size=(5, 5), stride=(1, 1), pad_mode=valid, padding=0, dilation=(1, 1), group=1, weight_mean=Parameter (name=conv2.weight_posterior.mean), weight_std=Parameter (name=conv2.weight_posterior.untransformed_std), has_bias=False - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (fc1) DenseReparam - in_channels=400, out_channels=120, weight_mean=Parameter (name=fc1.weight_posterior.mean), weight_std=Parameter (name=fc1.weight_posterior.untransformed_std), has_bias=True, bias_mean=Parameter (name=fc1.bias_posterior.mean), bias_std=Parameter (name=fc1.bias_posterior.untransformed_std) - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - (bias_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (bias_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (fc2) DenseReparam - in_channels=120, out_channels=84, weight_mean=Parameter (name=fc2.weight_posterior.mean), weight_std=Parameter (name=fc2.weight_posterior.untransformed_std), has_bias=True, bias_mean=Parameter (name=fc2.bias_posterior.mean), bias_std=Parameter (name=fc2.bias_posterior.untransformed_std) - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - (bias_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (bias_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (fc3) DenseReparam - in_channels=84, out_channels=10, weight_mean=Parameter (name=fc3.weight_posterior.mean), weight_std=Parameter (name=fc3.weight_posterior.untransformed_std), has_bias=True, bias_mean=Parameter (name=fc3.bias_posterior.mean), bias_std=Parameter (name=fc3.bias_posterior.untransformed_std) - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - (bias_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (bias_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (relu) ReLU - (max_pool2d) MaxPool2dkernel_size=2, stride=2, pad_mode=VALID - (flatten) Flatten -``` - -可以看到,整个LeNet网络中的卷积层和全连接层都转变成了相应的贝叶斯层。 - -### 实现功能二:转换指定类型的层 - -`transform_to_bnn_layer`方法可以将DNN模型中指定类型的层(nn.Dense或者nn.Conv2d)转换为对应的贝叶斯层。其定义如下: - -```python - def transform_to_bnn_layer(self, dnn_layer, bnn_layer, get_args=None, add_args=None): - r""" - Transform a specific type of layers in DNN model to corresponding BNN layer. - - Args: - dnn_layer_type (Cell): The type of DNN layer to be transformed to BNN layer. The optional values are - nn.Dense, nn.Conv2d. - bnn_layer_type (Cell): The type of BNN layer to be transformed to. The optional values are - DenseReparameterization, ConvReparameterization. - get_args (dict): The arguments gotten from the DNN layer. Default: None. - add_args (dict): The new arguments added to BNN layer. Default: None. - - Returns: - Cell, a trainable model wrapped by TrainOneStepCell, whose sprcific type of layer is transformed to the corresponding bayesian layer. - """ -``` - -参数`dnn_layer`指定将哪个类型的DNN层转换成BNN层,`bnn_layer`指定DNN层将转换成哪个类型的BNN层,`get_args`和`add_args`分别指定从DNN层中获取哪些参数和要为BNN层的哪些参数重新赋值。 - -在MindSpore中将DNN模型中的Dense层转换成相应贝叶斯层`DenseReparam`的代码如下: - -```python -train_bnn_network = bnn_transformer.transform_to_bnn_layer(nn.Dense, bnn_layers.DenseReparam) -``` - -转换后网络的结构如下: - -```text -LeNet5 - (conv1) Conv2dinput_channels=1, output_channels=6, kernel_size=(5, 5),stride=(1, 1), pad_mode=valid, padding=0, dilation=(1, 1), group=1, has_bias=False - (conv2) Conv2dinput_channels=6, output_channels=16, kernel_size=(5, 5),stride=(1, 1), pad_mode=valid, padding=0, dilation=(1, 1), group=1, has_bias=False - (fc1) DenseReparam - in_channels=400, out_channels=120, weight_mean=Parameter (name=fc1.weight_posterior.mean), weight_std=Parameter (name=fc1.weight_posterior.untransformed_std), has_bias=True, bias_mean=Parameter (name=fc1.bias_posterior.mean), bias_std=Parameter (name=fc1.bias_posterior.untransformed_std) - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - (bias_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (bias_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (fc2) DenseReparam - in_channels=120, out_channels=84, weight_mean=Parameter (name=fc2.weight_posterior.mean), weight_std=Parameter (name=fc2.weight_posterior.untransformed_std), has_bias=True, bias_mean=Parameter (name=fc2.bias_posterior.mean), bias_std=Parameter (name=fc2.bias_posterior.untransformed_std) - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - (bias_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (bias_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (fc3) DenseReparam - in_channels=84, out_channels=10, weight_mean=Parameter (name=fc3.weight_posterior.mean), weight_std=Parameter (name=fc3.weight_posterior.untransformed_std), has_bias=True, bias_mean=Parameter (name=fc3.bias_posterior.mean), bias_std=Parameter (name=fc3.bias_posterior.untransformed_std) - (weight_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (weight_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - (bias_prior) NormalPrior - (normal) Normalmean = 0.0, standard deviation = 0.1 - - (bias_posterior) NormalPosterior - (normal) Normalbatch_shape = None - - - (relu) ReLU - (max_pool2d) MaxPool2dkernel_size=2, stride=2, pad_mode=VALID - (flatten) Flatten -``` - -可以看到,LeNet网络中的卷积层保持不变,全连接层变成了对应的贝叶斯层`DenseReparam`。 - -## 使用不确定性估计工具箱 - -贝叶斯神经网络的优势之一就是可以获取不确定性,MDP在上层提供了不确定性估计的工具箱,用户可以很方便地使用该工具箱计算不确定性。不确定性意味着深度学习模型对预测结果的不确定程度。目前,大多数深度学习算法只能给出预测结果,而不能判断预测结果的可靠性。不确定性主要有两种类型:偶然不确定性和认知不确定性。 - -- 偶然不确定性(Aleatoric Uncertainty):描述数据中的内在噪声,即无法避免的误差,这个现象不能通过增加采样数据来削弱。 -- 认知不确定性(Epistemic Uncertainty):模型自身对输入数据的估计可能因为训练不佳、训练数据不够等原因而不准确,可以通过增加训练数据等方式来缓解。 - -不确定性估计工具箱,适用于主流的深度学习模型,如回归、分类等。在推理阶段,利用不确定性估计工具箱,开发人员只需通过训练模型和训练数据集,指定需要估计的任务和样本,即可得到偶然不确定性和认知不确定性。基于不确定性信息,开发人员可以更好地理解模型和数据集。 -> 本例面向GPU或Ascend 910 AI处理器平台,你可以在这里下载完整的样例代码: - -以分类任务为例,本例中使用的模型是LeNet,数据集为MNIST,数据处理过程与教程中的[实现一个图片分类应用](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html)一致。为了评估测试示例的不确定性,使用工具箱的方法如下: - -```python -from mindspore.nn.probability.toolbox.uncertainty_evaluation import UncertaintyEvaluation -from mindspore import load_checkpoint, load_param_into_net - -network = LeNet5() -param_dict = load_checkpoint('checkpoint_lenet.ckpt') -load_param_into_net(network, param_dict) -# get train and eval dataset -ds_train = create_dataset('workspace/mnist/train') -ds_eval = create_dataset('workspace/mnist/test') -evaluation = UncertaintyEvaluation(model=network, - train_dataset=ds_train, - task_type='classification', - num_classes=10, - epochs=1, - epi_uncer_model_path=None, - ale_uncer_model_path=None, - save_model=False) -for eval_data in ds_eval.create_dict_iterator(): - eval_data = Tensor(eval_data['image'].asnumpy(), mstype.float32) - epistemic_uncertainty = evaluation.eval_epistemic_uncertainty(eval_data) - aleatoric_uncertainty = evaluation.eval_aleatoric_uncertainty(eval_data) -``` diff --git a/tutorials/training/source_zh_cn/advanced_use/apply_gradient_accumulation.md b/tutorials/training/source_zh_cn/advanced_use/apply_gradient_accumulation.md deleted file mode 100644 index 2216ced7242cf37b986517f9450372e2116bcaa3..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/apply_gradient_accumulation.md +++ /dev/null @@ -1,625 +0,0 @@ -# 应用梯度累积算法 - -`Linux` `GPU` `模型调优` `中级` `高级` - - - -- [应用梯度累积算法](#应用梯度累积算法) - - [概述](#概述) - - [单机模式](#单机模式) - - [导入需要的库文件](#导入需要的库文件) - - [加载数据集](#加载数据集) - - [定义网络](#定义网络) - - [定义训练流程](#定义训练流程) - - [定义训练模型](#定义训练模型) - - [训练并保存模型](#训练并保存模型) - - [实验结果](#实验结果) - - [并行模式](#并行模式) - - [定义并行训练流程](#定义并行训练流程) - - [定义并行训练模型](#定义并行训练模型) - - [训练模型](#训练模型) - - - - -   - -   - - -## 概述 - -本教程介绍梯度累积的训练方式,目的是为了解决由于内存不足导致某些大型网络无法训练大Batch_size的问题。 - -传统的训练方式是每次计算得到loss和梯度后,直接用所得梯度对参数进行更新。 - -与传统的训练方式不同,梯度累积引入Mini-batch的概念,首先对每个Mini-batch的数据计算loss和梯度,但不立即更新模型参数,而是先对所得梯度进行累加,然后在指定数量(N)个Mini-batch之后,用累积后的梯度更新网络参数。下次训练前清空过往累积梯度后重新累加,如此往复。最终目的是为了达到跟直接用N*Mini-batch数据训练几乎同样的效果。 - -本篇教程将分别介绍在单机模式和并行模式下如何实现梯度累积训练。 - -## 单机模式 - -在单机模式下,主要通过将训练流程拆分为正向反向训练、参数更新和累积梯度清理三个部分实现梯度累积。这里以MNIST作为示范数据集,自定义简单模型实现梯度累积需要如下几个步骤。 - -> 你可以在这里下载主要的训练样例代码: - -### 导入需要的库文件 - -下列是我们所需要的公共模块及MindSpore的模块及库文件。 - -```python -import argparse -import os -from collections.abc import Iterable - -import mindspore.nn as nn -from mindspore import ParameterTuple -from mindspore import context, DatasetHelper, save_checkpoint -from mindspore.nn import Cell -import mindspore.ops as ops -from model_zoo.official.cv.lenet.src.dataset import create_dataset -from model_zoo.official.cv.lenet.src.lenet import LeNet5 -``` - -### 加载数据集 - -利用MindSpore的`dataset`提供的`MnistDataset`接口加载MNIST数据集,此部分代码由`model_zoo`中`lenet`目录下的[dataset.py](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/lenet/src/dataset.py)导入。 - -### 定义网络 - -这里以LeNet网络为例进行介绍,当然也可以使用其它的网络,如ResNet-50、BERT等, 此部分代码由`model_zoo`中`lenet`目录下的[lenet.py](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/lenet/src/lenet.py)导入。 - -### 定义训练流程 - -将训练流程拆分为正向反向训练、参数更新和累积梯度清理三个部分: - -- `TrainForwardBackward`计算loss和梯度,利用grad_sum实现梯度累加。 -- `TrainOptim`实现参数更新。 -- `TrainClear`实现对梯度累加变量grad_sum清零。 - -```python -_sum_op = ops.MultitypeFuncGraph("grad_sum_op") -_clear_op = ops.MultitypeFuncGraph("clear_op") - - -@_sum_op.register("Tensor", "Tensor") -def _cumulative_grad(grad_sum, grad): - """Apply grad sum to cumulative gradient.""" - add = ops.AssignAdd() - return add(grad_sum, grad) - - -@_clear_op.register("Tensor", "Tensor") -def _clear_grad_sum(grad_sum, zero): - """Apply zero to clear grad_sum.""" - success = True - success = ops.depend(success, ops.assign(grad_sum, zero)) - return success - - -class TrainForwardBackward(Cell): - def __init__(self, network, optimizer, grad_sum, sens=1.0): - super(TrainForwardBackward, self).__init__(auto_prefix=False) - self.network = network - self.network.set_grad() - self.network.add_flags(defer_inline=True) - self.weights = ParameterTuple(network.trainable_params()) - self.optimizer = optimizer - self.grad_sum = grad_sum - self.grad = ops.GradOperation(get_by_list=True, sens_param=True) - self.sens = sens - self.hyper_map = ops.HyperMap() - - def construct(self, *inputs): - weights = self.weights - loss = self.network(*inputs) - sens = ops.Fill()(ops.DType()(loss), ops.Shape()(loss), self.sens) - grads = self.grad(self.network, weights)(*inputs, sens) - return ops.depend(loss, self.hyper_map(ops.partial(_sum_op), self.grad_sum, grads)) - - -class TrainOptim(Cell): - def __init__(self, optimizer, grad_sum): - super(TrainOptim, self).__init__(auto_prefix=False) - self.optimizer = optimizer - self.grad_sum = grad_sum - - def construct(self): - return self.optimizer(self.grad_sum) - - -class TrainClear(Cell): - def __init__(self, grad_sum, zeros): - super(TrainClear, self).__init__(auto_prefix=False) - self.grad_sum = grad_sum - self.zeros = zeros - self.hyper_map = ops.HyperMap() - - def construct(self): - success = self.hyper_map(ops.partial(_clear_op), self.grad_sum, self.zeros) - return success -``` - -### 定义训练模型 - -每个Mini-batch通过正反向训练计算loss和梯度,通过mini_steps控制每次更新参数前的累加次数。达到累加次数后进行参数更新和 -累加梯度变量清零。 - -```python -class GradientAccumulation: - def __init__(self, network, loss_fn, optimizer): - self._network = network - self._loss_fn = loss_fn - self._optimizer = optimizer - - params = self._optimizer.parameters - self._grad_sum = params.clone(prefix="grad_sum", init='zeros') - self._zeros = params.clone(prefix="zeros", init='zeros') - self._train_forward_backward = self._build_train_forward_backward_network() - self._train_optim = self._build_train_optim() - self._train_clear = self._build_train_clear() - - @staticmethod - def _transform_callbacks(callbacks): - """Transform callback to a list.""" - if callbacks is None: - return [] - - if isinstance(callbacks, Iterable): - return list(callbacks) - - return [callbacks] - - def _build_train_forward_backward_network(self): - """Build forward and backward network""" - network = self._network - network = nn.WithLossCell(network, self._loss_fn) - loss_scale = 1.0 - network = TrainForwardBackward(network, self._optimizer, self._grad_sum, loss_scale).set_train() - return network - - def _build_train_optim(self): - """Build optimizer network""" - network = TrainOptim(self._optimizer, self._grad_sum).set_train() - return network - - def _build_train_clear(self): - """Build clear network""" - network = TrainClear(self._grad_sum, self._zeros).set_train() - return network - - def train_process(self, epoch, train_dataset, mini_steps=None): - """ - Training process. The data would be passed to network directly. - """ - dataset_helper = DatasetHelper(train_dataset, dataset_sink_mode=False, epoch_num=epoch) - - for i in range(epoch): - step = 0 - for k, next_element in enumerate(dataset_helper): - loss = self._train_forward_backward(*next_element) - if (k + 1) % mini_steps == 0: - step += 1 - print("epoch:", i + 1, "step:", step, "loss is ", loss) - self._train_optim() - self._train_clear() - - train_dataset.reset() - - save_checkpoint(self._train_forward_backward, "gradient_accumulation.ckpt", ) -``` - -### 训练并保存模型 - -调用网络、优化器及损失函数,然后自定义`GradientAccumulation`的`train_process`接口,进行模型训练。 - -```python -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='MindSpore Grad Cumulative Example') - parser.add_argument('--device_target', type=str, default="GPU", choices=['GPU'], - help='device where the code will be implemented (default: GPU)') - parser.add_argument('--data_path', type=str, default="./Data", - help='path where the dataset is saved') - args = parser.parse_args() - - context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target) - ds_train = create_dataset(os.path.join(args.data_path, "train"), 32) - - net = LeNet5(10) - net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - net_opt = nn.Momentum(net.trainable_params(), 0.01, 0.9) - model = GradientAccumulation(net, net_loss, net_opt) - - print("============== Starting Training ==============") - model.train_process(10, ds_train, mini_steps=4) -``` - -### 实验结果 - -在经历了10轮epoch之后,在测试集上的精度约为96.31%。 - -**执行训练:** - -1. 运行训练代码,查看运行结果。 - - ```shell - python train.py --data_path=./MNIST_Data - ``` - - 输出如下,可以看到loss值随着训练逐步降低: - - ```shell - epoch: 1 step: 27 loss is 0.3660637 - epoch: 1 step: 28 loss is 0.25238192 - ... - epoch: 3 step: 2 loss is 0.12296932 - epoch: 3 step: 3 loss is 0.15799297 - ... - epoch: 10 step: 448 loss is 0.06443884 - epoch: 10 step: 449 loss is 0.0067842817 - ``` - -2. 查看保存的CheckPoint文件。 - - 训练过程中保存了CheckPoint文件`gradient_accumulation.ckpt`,即模型文件。 - -**验证模型:** - -通过`model_zoo`中`lenet`目录下的[eval.py](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/lenet/train.py),使用保存的CheckPoint文件,加载验证数据集,进行验证。 - -```shell -python eval.py --data_path=./MNIST_Data --ckpt_path=./gradient_accumulation.ckpt --device_target=GPU -``` - -输出如下,可以看到使用验证的数据集,正确率在96.31%左右,与batch_size为32的验证结果一致。 - -```shell -============== Starting Testing ============== -============== {'Accuracy': 0.9631730769230769} ============== -``` - -## 并行模式 - -在`SEMI_AUTO_PARALLEL`和`AUTO_PARALLEL`模式下使用梯度累积,主要是将累积迭代和更新迭代作为两张图下发并且交替执行。在累积迭代图上,只执行正反向运算及梯度累加。在更新迭代图上,执行正反向运算和参数更新。本小节将以[分布式并行训练教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html)中的样例为基础进行介绍,具体分为如下几个步骤。 - -> 你可以在这里下载主要的训练样例代码: - -### 定义并行训练流程 - -通常情况下,定义了正向网络后会使用[`TrainOneStepCell`](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn/mindspore.nn.TrainOneStepCell.html?highlight=trainonestepcell)将网络正反向及优化器关联到一起。但是梯度累积时存在累积和更新两种情况,所以我们要基于原有类定义做一些改造。样例代码如下: - -```python -import numpy as np -import mindspore.common.dtype as mstype -from mindspore import ops, context, Tensor, Parameter -from mindspore.nn import TrainOneStepCell -from mindspore.common.initializer import initializer - -zeroslike = ops.ZerosLike() -reset_accu_grads = ops.MultitypeFuncGraph("reset_accu_grads") - -@reset_accu_grads.register("Tensor") -def _reset_accu_grads(accu_grad): - succ = True - return ops.depend(succ, ops.assign(accu_grad, zeroslike(accu_grad))) - -cast = ops.Cast() -update_accu_grads = ops.MultitypeFuncGraph("update_accu_grads") - - -@update_accu_grads.register("Tensor", "Tensor") -def _update_accu_grads(accu_grad, grad): - succ = True - return ops.depend(succ, ops.assign_add(accu_grad, cast(grad, mstype.float32))) - -class TrainAccuStepsCell(TrainOneStepCell): - def __init__(self, network, optimizer, sens=1.0): - super(TrainAccuStepsCell, self).__init__(network, optimizer, sens) - self.accumulation = False - self.accumulation_steps = context.get_auto_parallel_context("grad_accumulation_step") - self.accu_grads = self.weights.clone(prefix="accu_grads", init='zeros') - self.hyper_map = ops.HyperMap() - - def construct(self, *inputs): - """Defines the computation performed.""" - weights = self.weights - loss = self.network(*inputs) - sens = ops.Fill()(ops.DType()(loss), ops.Shape()(loss), self.sens) - grads = self.grad(self.network, weights)(*inputs, sens) - if self.accumulation and self.accumulation_steps > 1: - accu_succ = self.hyper_map(update_accu_grads, self.accu_grads, grads) - loss = ops.depend(loss, accu_succ) - if self.accumulation: - succ = False - else: - grads = self.grad_reducer(grads) - accu_grads = ops.depend(self.accu_grads, grads) - accu_succ = self.hyper_map(reset_accu_grads, accu_grads) - loss = ops.depend(loss, accu_succ) - succ = self.optimizer(grads) - return ops.depend(loss, succ) -``` - -在`TrainOneStepCell`的基础上,增加累积标记`accumulation`和累积梯度参数`accu_grads`的定义,分别用于区分训练流程和保存累积梯度值。在累积迭代图上,`accumulation`为True,只执行正反向运算并将梯度累加到参数`accu_grads`。在更新迭代图上,`accumulation`为False,执行正反向运算和参数更新。 - -> 由于并行模式下的梯度累积实现需要结合框架内部的图优化完成,所以网络中定义的`accumulation`和`accu_grads`为特定字符,不能修改。 - -在动态loss scale场景下,除了梯度需要累积外,溢出标志位也需要累积判断,可以基于[`TrainOneStepWithLossScaleCell`](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn/mindspore.nn.TrainOneStepWithLossScaleCell.html#mindspore.nn.TrainOneStepWithLossScaleCell)改造,实现代码如下: - -```python -import numpy as np -import mindspore.common.dtype as mstype -from mindspore import ops, context, Tensor, Parameter -from mindspore.nn import TrainOneStepWithLossScaleCell -from mindspore.nn.wrap.loss_scale import _grad_scale -from mindspore.common.initializer import initializer - -zeroslike = ops.ZerosLike() -reset_accu_grads = ops.MultitypeFuncGraph("reset_accu_grads") - -@reset_accu_grads.register("Tensor") -def _reset_accu_grads(accu_grad): - succ = True - return ops.depend(succ, ops.assign(accu_grad, zeroslike(accu_grad))) - -cast = ops.Cast() -update_accu_grads = ops.MultitypeFuncGraph("update_accu_grads") - - -@update_accu_grads.register("Tensor", "Tensor") -def _update_accu_grads(accu_grad, grad): - succ = True - return ops.depend(succ, ops.assign_add(accu_grad, cast(grad, mstype.float32))) - - -class TrainAccuStepsWithLossScaleCell(TrainOneStepWithLossScaleCell): - def __init__(self, network, optimizer, scale_sense): - super(TrainAccuStepsWithLossScaleCell, self).__init__(network, optimizer, scale_sense) - self.accumulation = False - self.accumulation_steps = context.get_auto_parallel_context("grad_accumulation_step") - self.one = Tensor(np.array([1]).astype(np.int32)) - self.zero = Tensor(np.array([0]).astype(np.int32)) - self.accu_grads = self.weights.clone(prefix="accu_grads", init='zeros') - self.accu_overflow = Parameter(initializer(0, [1], mstype.int32)) - self.accu_loss = Parameter(initializer(0, [1], mstype.float32)) - self.cast = ops.Cast() - self.logical_or = ops.LogicalOr() - self.not_equal = ops.NotEqual() - self.select = ops.Select() - self.reshape = ops.Reshape() - - def construct(self, *inputs): - """Defines the computation performed.""" - weights = self.weights - loss = self.network(*inputs) - scaling_sens = self.scale_sense - status, scaling_sens = self.start_overflow_check(loss, scaling_sens) - scaling_sens_filled = ops.ones_like(loss) * ops.cast(scaling_sens, ops.dtype(loss)) - grads = self.grad(self.network, weights)(*inputs, scaling_sens_filled) - # accumulate gradients - if self.accumulation and self.accumulation_steps > 1: - accu_succ = self.hyper_map(update_accu_grads, self.accu_grads, grads) - loss = ops.depend(loss, accu_succ) - overflow = self.get_overflow_status(status, grads) - overflow = self.logical_or(self.not_equal(self.accu_overflow, self.zero), overflow) - accu_overflow = self.select(overflow, self.one, self.zero) - - if self.accumulation: - succ = False - self.accu_overflow = accu_overflow - else: - self.accu_overflow = self.zero - # apply grad reducer on grads - grads = self.grad_reducer(grads) - grads = self.hyper_map(ops.partial(_grad_scale, scaling_sens), grads) - accu_overflow = self.allreduce(accu_overflow) - overflow = self.less_equal(self.base, accu_overflow) - accu_grads = ops.depend(self.accu_grads, grads) - accu_succ = self.hyper_map(reset_accu_grads, accu_grads) - overflow = ops.depend(overflow, accu_succ) - overflow = self.reshape(overflow, (())) - overflow = self.process_loss_scale(overflow) - if overflow: - succ = False - else: - succ = self.optimizer(grads) - - ret = (loss, overflow, scaling_sens) - return ops.depend(ret, succ) -``` - -其中`accu_overflow`是专门用来保存累积溢出标志位的参数。 - -### 定义并行训练模型 - -经过`cell_wrapper`封装的网络已经包含了正反向和优化器实现,我们还需要将数据集对接到网络并实现两张图交替执行。这里基于框架中的[`Model`](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.html?highlight=model#mindspore.Model)接口实现上述功能。 - -```python -import math -from mindspore.train.callback import RunContext -from mindspore import context -from mindspore.context import ParallelMode -from mindspore import Model, connect_network_with_dataset -from mindspore.common.dtype import pytype_to_dtype -from mindspore._c_expression import init_exec_dataset -from mindspore.train.train_thor.dataset_helper import DatasetHelper - - -def _convert_type(types): - """ - Convert from numpy type to tensor type. - - Args: - types (list): Numpy type list of element in dataset. - - Returns: - list, list of element in dataset. - """ - ms_types = [] - for np_type in types: - ms_type = pytype_to_dtype(np_type) - ms_types.append(ms_type) - return ms_types - - -def _get_types_and_shapes(dataset): - """Get dataset types and shapes.""" - dataset_types = _convert_type(dataset.output_types()) - dataset_shapes = dataset.output_shapes() - return dataset_types, dataset_shapes - - -def _exec_datagraph(exec_dataset, dataset_size, phase='dataset'): - """Initialize and execute the dataset graph.""" - batch_size = exec_dataset.get_batch_size() - input_indexs = exec_dataset.input_indexs - - # transform data format - dataset_types, dataset_shapes = _get_types_and_shapes(exec_dataset) - init_exec_dataset(exec_dataset.__transfer_dataset__.queue_name, - dataset_size, - batch_size, - dataset_types, - dataset_shapes, - input_indexs, - phase=phase, - need_run=False) - - -class Model_ACCU(Model): - def __init__(self, network, loss_fn=None, optimizer=None, metrics=None, eval_network=None, - eval_indexes=None, amp_level="O0", **kwargs): - super(Model_ACCU, self).__init__(network, loss_fn, optimizer, metrics, eval_network, - eval_indexes, amp_level, **kwargs) - self._frequency = context.get_auto_parallel_context("grad_accumulation_step") - self._train_network = self._build_train_network() - - def _exec_preprocess(self, network, is_train, phase, dataset, dataset_sink_mode, sink_size=-1, - epoch_num=1, iter_first_order=1): - """Initializes dataset.""" - if dataset_sink_mode and not is_train: - dataset.__loop_size__ = 1 - dataset_helper = DatasetHelper(dataset, dataset_sink_mode, sink_size, epoch_num, iter_first_order) - - if dataset_sink_mode and context.get_context("device_target") != "GPU": - network = connect_network_with_dataset(network, dataset_helper) - network.set_train(is_train) - network.phase = phase - - if self._parallel_mode in (ParallelMode.SEMI_AUTO_PARALLEL, ParallelMode.AUTO_PARALLEL): - network.set_auto_parallel() - - return dataset_helper, network - - def _train_dataset_sink_process(self, epoch, train_dataset, list_callback=None, cb_params=None, sink_size=-1): - """ - Training process. The data would be passed to network through dataset channel. - - Args: - epoch (int): Total number of iterations on the data. - train_dataset (Dataset): A training dataset iterator. If there is no - loss_fn, a tuple with multiple data (data1, data2, data3, ...) should be - returned and passed to the network. Otherwise, a tuple (data, label) should - be returned. The data and label would be passed to the network and loss - function respectively. - list_callback (Callback): Executor of callback list. Default: None. - cb_params (_InternalCallbackParam): Callback parameters. Default: None. - sink_size (int): Control the amount of data in each sink. Default: -1. - """ - if sink_size == -1: - epoch_num = epoch - else: - epoch_num = math.ceil(epoch * sink_size / train_dataset.get_dataset_size()) - - iter_first_order = 1 - iter_second_order = self._frequency - 1 - train_dataset.__loop_size__ = iter_second_order - dataset_helper, train_network = self._exec_preprocess(self._train_network, - is_train=True, - phase='train', - dataset=train_dataset, - dataset_sink_mode=True, - sink_size=sink_size, - epoch_num=epoch_num, - iter_first_order=iter_first_order) - - self._train_network = train_network - cb_params.train_network = self._train_network - cb_params.cur_step_num = 0 - - run_context = RunContext(cb_params) - list_callback.begin(run_context) - - # used to stop training for early stop, such as stopAtTIme or stopATStep - should_stop = False - switch_branch_one = True - index_first_order = 0 - train_network_init_flag = True - has_do_dataset_init = False - - for i in range(epoch): - cb_params.cur_epoch_num = i + 1 - list_callback.epoch_begin(run_context) - # for data sink dataset_helper only iter once, other wise iter epoch_size times. - for inputs in dataset_helper: - list_callback.step_begin(run_context) - if switch_branch_one: - cb_params.cur_step_num += iter_second_order - if train_network_init_flag: - self._train_network.add_flags_recursive(accumulation=True) - self._train_network.phase = 'train0' - else: - cb_params.cur_step_num += iter_first_order - if train_network_init_flag: - self._train_network.add_flags_recursive(accumulation=False) - train_network_init_flag = False - self._train_network.phase = 'train1' - if not has_do_dataset_init: - _exec_datagraph(train_dataset, iter_first_order, phase='train1_dataset') - has_do_dataset_init = True - switch_branch_one = not switch_branch_one - outputs = self._train_network(*inputs) - cb_params.net_outputs = outputs - list_callback.step_end(run_context) - - list_callback.epoch_end(run_context) - should_stop = should_stop or run_context.get_stop_requested() - if should_stop: - break - dataset_helper.stop_send() - - list_callback.end(run_context) -``` - -在样例代码中,子类`Model_ACCU`改写了基类的`_exec_preprocess`数据集封装和`_train_dataset_sink_process`训练循环下沉方法,分别下发累积迭代图`(accumulation=True)`及更新迭代图`(accumulation=False)`的数据子图和训练图,实现交替执行的训练过程。累积迭代图的下沉步数为`grad_accumulation_step`减1,更新迭代图下沉步数为1。 - -### 训练模型 - -完成上述定义后,即可利用训练接口完成模型训练。首先需要在`context.set_auto_parallel_context`配置`grad_accumulation_step`参数,使能梯度累积。其次利用改造的`cell_wrapper`封装网络结构,传入`Model_ACCU`中初始化模型。 - -```python -context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL, gradients_mean=True, grad_accumulation_step=6) -loss_cb = LossMonitor() -data_path = os.getenv('DATA_PATH') -batch_size = 32 -dataset = create_dataset(data_path, batch_size=batch_size) -num_classes = 10 -net = resnet50(batch_size, num_classes) -loss = SoftmaxCrossEntropyExpand(sparse=True) -opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, 0.9) -net_with_loss = nn.WithLossCell(net, loss) -net_with_loss = VirtualDatasetCell(net_with_loss) -wrap_net = TrainAccuStepsCell(net_with_loss, opt) -model = Model_ACCU(wrap_net) -model.train(epoch_size, dataset, callbacks=[loss_cb], dataset_sink_mode=True) -``` - -在日志中可以检索到如下的日志打印: - -```text -epoch: 1 step: 234, loss is 1.7588712 -epoch: 2 step: 234, loss is 1.7275971 -epoch: 3 step: 234, loss is 1.5423206 -epoch: 4 step: 234, loss is 1.2762429 -epoch: 5 step: 234, loss is 1.0915408 -``` diff --git a/tutorials/training/source_zh_cn/advanced_use/apply_host_device_training.md b/tutorials/training/source_zh_cn/advanced_use/apply_host_device_training.md deleted file mode 100644 index 6da63c3f9dc2590f5fb274de88578efeb8c26c2a..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/apply_host_device_training.md +++ /dev/null @@ -1,109 +0,0 @@ -# 应用Host&Device混合训练 - -`Linux` `Ascend` `CPU` `模型训练` `中级` `高级` - - - -- [Host&Device混合训练](#hostdevice混合训练) - - [概述](#概述) - - [准备工作](#准备工作) - - [配置混合执行](#配置混合执行) - - [训练模型](#训练模型) - - [参考文献](#参考文献) - - - - - -## 概述 - -在深度学习中,工作人员时常会遇到超大模型的训练问题,即模型参数所占内存超过了设备内存上限。为高效地训练超大模型,一种方案便是分布式并行训练,也就是将工作交由同构的多个加速器(如Ascend 910 AI处理器,GPU等)共同完成。但是这种方式在面对几百GB甚至几TB级别的模型时,所需的加速器过多。而当从业者实际难以获取大规模集群时,这种方式难以应用。另一种可行的方案是使用主机端(Host)和加速器(Device)的混合训练模式。此方案同时发挥了主机端内存大和加速器端计算快的优势,是一种解决超大模型训练较有效的方式。 - -在MindSpore中,用户可以将待训练的参数放在主机,同时将必要算子的执行位置配置为主机,其余算子的执行位置配置为加速器,从而方便地实现混合训练。此教程以推荐模型[Wide&Deep](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/recommend/wide_and_deep)为例,讲解MindSpore在主机和Ascend 910 AI处理器的混合训练。 - -## 准备工作 - -1. 准备模型代码。Wide&Deep的代码可参见:,其中,`train_and_eval_auto_parallel.py`为训练的主函数所在,`src/`目录中包含Wide&Deep模型的定义、数据处理和配置信息等,`script/`目录中包含不同配置下的训练脚本。 - -2. 准备数据集。请参考[1]中的链接下载数据集,并利用脚本`src/preprocess_data.py`将数据集转换为MindRecord格式。 - -3. 配置处理器信息。在裸机环境(即本地有Ascend 910 AI 处理器)进行分布式训练时,需要配置加速器信息文件。此样例只使用一个加速器,故只需配置包含0号卡的`rank_table_1p_0.json`文件(每台机器的具体的IP信息不同,需要查看网络配置来设定,此为示例),如下所示: - - ```json - { - "version": "1.0", - "server_count": "1", - "server_list": [ - { - "server_id":"10.155.170.16", - "device": [ - {"device_id":"0","device_ip":"192.1.113.246","rank_id":"0"}], - "host_nic_ip":"reserve" - } - ], - "status": "completed" - } - ``` - -## 配置混合执行 - -1. 配置混合训练标识。在`src/config.py`文件中,设置`argparse_init`函数中的`host_device_mix`默认值为`1`,设置`WideDeepConfig`类的`__init__`函数中`self.host_device_mix`为`1`: - - ```python - self.host_device_mix = 1 - ``` - -2. 检查必要算子和优化器的执行位置。在`src/wide_and_deep.py`的`WideDeepModel`类中,检查`EmbeddingLookup`为主机端执行: - - ```python - self.deep_embeddinglookup = nn.EmbeddingLookup() - self.wide_embeddinglookup = nn.EmbeddingLookup() - ``` - - 在`src/wide_and_deep.py`文件的`class TrainStepWrap(nn.Cell)`中,检查两个优化器主机端执行的属性。 - - ```python - self.optimizer_w.target = "CPU" - self.optimizer_d.target = "CPU" - ``` - -## 训练模型 - -使用训练脚本`script/run_auto_parallel_train.sh`。执行命令:`bash run_auto_parallel_train.sh 1 1 DATASET RANK_TABLE_FILE`, -其中第一个`1`表示用例使用的卡数,第二`1`表示训练的epoch数,`DATASET`是数据集所在路径,`RANK_TABLE_FILE`为上述`rank_table_1p_0.json`文件所在路径。 - -运行日志保存在`device_0`目录下,其中`loss.log`保存一个epoch内中多个loss值,其值类似如下: - -```text -epoch: 1 step: 1, wide_loss is 0.6873926, deep_loss is 0.8878349 -epoch: 1 step: 2, wide_loss is 0.6442529, deep_loss is 0.8342661 -epoch: 1 step: 3, wide_loss is 0.6227323, deep_loss is 0.80273706 -epoch: 1 step: 4, wide_loss is 0.6107221, deep_loss is 0.7813441 -epoch: 1 step: 5, wide_loss is 0.5937832, deep_loss is 0.75526017 -epoch: 1 step: 6, wide_loss is 0.5875453, deep_loss is 0.74038756 -epoch: 1 step: 7, wide_loss is 0.5798845, deep_loss is 0.7245408 -epoch: 1 step: 8, wide_loss is 0.57553077, deep_loss is 0.7123517 -epoch: 1 step: 9, wide_loss is 0.5733629, deep_loss is 0.70278376 -epoch: 1 step: 10, wide_loss is 0.566089, deep_loss is 0.6884129 -``` - -`test_deep0.log`保存pytest进程输出的详细的运行时日志(需要将日志级别设置为INFO,且在MindSpore编译时加上-p on选项),搜索关键字`EmbeddingLookup`,可找到如下信息: - -```text -[INFO] DEVICE(109904,python3.7):2020-06-27-12:42:34.928.275 [mindspore/ccsrc/device/cpu/cpu_kernel_runtime.cc:324] Run] cpu kernel: Default/network-VirtualDatasetCellTriple/_backbone-NetWithLossClass/network-WideDeepModel/EmbeddingLookup-op297 costs 3066 us. -[INFO] DEVICE(109904,python3.7):2020-06-27-12:42:34.943.896 [mindspore/ccsrc/device/cpu/cpu_kernel_runtime.cc:324] Run] cpu kernel: Default/network-VirtualDatasetCellTriple/_backbone-NetWithLossClass/network-WideDeepModel/EmbeddingLookup-op298 costs 15521 us. -``` - -表示`EmbeddingLookup`在主机端的执行时间。 -继续在`test_deep0.log`搜索关键字`FusedSparseFtrl`和`FusedSparseLazyAdam`,可找到如下信息: - -```text -[INFO] DEVICE(109904,python3.7):2020-06-27-12:42:35.422.963 [mindspore/ccsrc/device/cpu/cpu_kernel_runtime.cc:324] Run] cpu kernel: Default/optimizer_w-FTRL/FusedSparseFtrl-op299 costs 54492 us. -[INFO] DEVICE(109904,python3.7):2020-06-27-12:42:35.565.953 [mindspore/ccsrc/device/cpu/cpu_kernel_runtime.cc:324] Run] cpu kernel: Default/optimizer_d-LazyAdam/FusedSparseLazyAdam-op300 costs 142865 us. -``` - -表示两个优化器在主机端的执行时间。 - -## 参考文献 - -[1] Huifeng Guo, Ruiming Tang, Yunming Ye, Zhenguo Li, Xiuqiang He. [DeepFM: A Factorization-Machine based Neural Network for CTR Prediction.](https://doi.org/10.24963/ijcai.2017/239) IJCAI 2017. diff --git a/tutorials/training/source_zh_cn/advanced_use/apply_parameter_server_training.md b/tutorials/training/source_zh_cn/advanced_use/apply_parameter_server_training.md deleted file mode 100644 index a2282983f63a958a9599e80381a6acc396c4b876..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/apply_parameter_server_training.md +++ /dev/null @@ -1,165 +0,0 @@ -# 使用Parameter Server训练 - -`Linux` `Ascend` `GPU` `模型训练` `中级` `高级` - - - -- [Parameter Server训练](#parameter_server训练) - - [概述](#概述) - - [准备工作](#准备工作) - - [训练脚本准备](#训练脚本准备) - - [参数设置](#参数设置) - - [环境变量设置](#环境变量设置) - - [执行训练](#执行训练) - - - - - -## 概述 - -Parameter Server(参数服务器)是分布式训练中一种广泛使用的架构,相较于同步的AllReduce训练方法,Parameter Server具有更好的灵活性、可扩展性以及节点容灾的能力。具体来讲,参数服务器既支持同步SGD,也支持异步SGD的训练算法;在扩展性上,将模型的计算与模型的更新分别部署在Worker和Server两类进程中,使得Worker和Server的资源可以独立地横向扩缩;另外,在大规模数据中心的环境下,计算设备、网络以及存储经常会出现各种故障而导致部分节点异常,而在参数服务器的架构下,能够较为容易地处理此类的故障而不会对训练中的任务产生影响。 - -在MindSpore的参数服务器实现中,采用了自研的通信框架作为基础架构,基于其提供的远程通信能力以及抽象的Send/Broadcast等原语,实现了同步SGD的分布式训练算法,另外结合Ascend和GPU中的高性能集合通信库(HCCL和NCCL),MindSpore还提供了Parameter Server和AllReduce的混合训练模式,支持将部分权重通过参数服务器进行存储和更新,其余权重仍然通过AllReduce算法进行训练。 - -在参数服务器的架构设计中,一共包含三个独立的组件,分别是Server、Worker和Scheduler,作用分别是: - -- Server:保存模型的权重和反向计算的梯度值,并使用优化器通过Worker上传的梯度值对模型进行更新。 - -- Worker:执行网络的正反向计算,反向计算的梯度值通过Push接口上传至Server中,通过Pull接口把Server更新好的模型下载到Worker本地。 - -- Scheduler:用于建立Server和Worker的通信关系。 - -## 准备工作 - -以LeNet在Ascend 910上使用Parameter Server训练为例: - -### 训练脚本准备 - -参考,使用[MNIST数据集](http://yann.lecun.com/exdb/mnist/),了解如何训练一个LeNet网络。 - -### 参数设置 - -1. 首先调用`mindspore.context.set_ps_context(enable_ps=True)`开启Parameter Server训练模式. - - - 此接口需在`mindspore.communication.management.init()`之前调用。 - - 若没有调用此接口,下面的[环境变量设置](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/apply_parameter_server_training.html#id5)则不会生效。 - - 调用`mindspore.context.reset_ps_context()`可以关闭Parameter Server训练模式。 - -2. 在本训练模式下,有以下两种调用接口方式以控制训练参数是否通过Parameter Server进行更新,并且可以控制参数初始化位置: - - - 通过`mindspore.nn.Cell.set_param_ps()`对`nn.Cell`中所有权重递归设置。 - - 通过`mindspore.Parameter.set_param_ps()`对此权重进行设置。 - - 被设置为通过Parameter Server更新的单个权重大小不得超过INT_MAX(2^31 - 1)字节。 - - 接口`set_param_ps`可接收一个`bool`型参数:`init_in_server`,表示该训练参数是否在Server端初始化,`init_in_server`默认值为`False`,表示在Worker上初始化该训练参数;当前仅支持`EmbeddingLookup`算子的训练参数`embedding_table`在Server端初始化,以解决超大shape的`embedding_table`在Worker上初始化导致内存不足的问题,该算子的`target`属性需要设置为'CPU'。在Server端初始化的训练参数将不再同步到Worker上,如果涉及到多Server训练并保存CheckPoint,则训练结束后每个Server均会保存一个CheckPoint。 - -3. 在[原训练脚本](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/lenet/train.py)基础上,设置LeNet模型所有权重通过Parameter Server训练: - - ```python - context.set_ps_context(enable_ps=True) - network = LeNet5(cfg.num_classes) - network.set_param_ps() - ``` - -4. [可选配置]针对超大shape的`embedding_table`,由于设备上存放不下全量的`embedding_table`,可以配置[EmbeddingLookup算子](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn/mindspore.nn.EmbeddingLookup.html)的`vocab_cache_size`,用于开启Parameter Server训练模式下`EmbeddingLookup`的cache功能,该功能使用`vocab_cache_size`大小的`embedding_table`在设备上训练,全量`embedding_table`存储在Server,将下批次训练用到的`embedding_table`提前换入到cache上,当cache放不下时则将过期的`embedding_table`放回到Server,以达到提升训练性能的目的;训练结束后,可在Server上导出CheckPoint,保存训练后的全量`embedding_table`。详细网络训练脚本参考。 - - ```python - context.set_auto_parallel_context(full_batch=True, parallel_mode=ParallelMode.AUTO_PARALLEL) - context.set_context(enable_sparse=True) - network = Net() - model = Model(network) - model.train(epoch, train_dataset, dataset_sink_mode=True) - ``` - - 其中, - - - `dataset_sink_mode`:是否开启数据下沉模式 ,为`True`时表明开启,通过数据集通道传递数据,该场景中必须设置为`True`(训练中推理也需要开启数据下沉模式)。 - - `full_batch`:是否全量导入数据集,为`True`时表明全量导入,每卡的数据相同,在多Worker场景中必须设置为`True`。 - - `parallel_mode`:并行模式,多Worker场景需要开启自动并行模式,设置`parallel_mode`=`ParallelMode.AUTO_PARALLEL`。 - - `enable_sparse`:是否开启sparse训练,默认值为`False`;为`True`时表示开启sparse训练;在Parameter Server模式下,所有开启cache的`EmbeddingLookup`算子的`sparse`参数应和`enable_sparse`值保持一致。 - -### 环境变量设置 - -MindSpore通过读取环境变量,控制Parameter Server训练,环境变量包括以下选项(其中`MS_SCHED_HOST`及`MS_SCHED_PORT`所有脚本需保持一致): - -```text -export MS_SERVER_NUM=1 # Server number -export MS_WORKER_NUM=1 # Worker number -export MS_SCHED_HOST=XXX.XXX.XXX.XXX # Scheduler IP address -export MS_SCHED_PORT=XXXX # Scheduler port -export MS_ROLE=MS_SCHED # The role of this process: MS_SCHED represents the scheduler, MS_WORKER represents the worker, MS_PSERVER represents the Server -``` - -## 执行训练 - -1. shell脚本 - - 提供Worker,Server和Scheduler三个角色对应的shell脚本,以启动训练: - - `Scheduler.sh`: - - ```bash - #!/bin/bash - export MS_SERVER_NUM=1 - export MS_WORKER_NUM=1 - export MS_SCHED_HOST=XXX.XXX.XXX.XXX - export MS_SCHED_PORT=XXXX - export MS_ROLE=MS_SCHED - python train.py --device_target=Ascend --data_path=path/to/dataset - ``` - - `Server.sh`: - - ```bash - #!/bin/bash - export MS_SERVER_NUM=1 - export MS_WORKER_NUM=1 - export MS_SCHED_HOST=XXX.XXX.XXX.XXX - export MS_SCHED_PORT=XXXX - export MS_ROLE=MS_PSERVER - python train.py --device_target=Ascend --data_path=path/to/dataset - ``` - - `Worker.sh`: - - ```bash - #!/bin/bash - export MS_SERVER_NUM=1 - export MS_WORKER_NUM=1 - export MS_SCHED_HOST=XXX.XXX.XXX.XXX - export MS_SCHED_PORT=XXXX - export MS_ROLE=MS_WORKER - python train.py --device_target=Ascend --data_path=path/to/dataset - ``` - - 最后分别执行: - - ```bash - sh Scheduler.sh > scheduler.log 2>&1 & - sh Server.sh > server.log 2>&1 & - sh Worker.sh > worker.log 2>&1 & - ``` - - 启动训练 - -2. 查看结果 - - 查看`scheduler.log`中Server与Worker通信日志: - - ```text - The server node id:b5d8a47c-46d7-49a5-aecf-d29d7f8b6124,node ip: 10.90.53.118,node port:46737 assign rank id:0 - The worker node id:55e86d4b-d717-4930-b414-ebd80082f541 assign rank id:1 - Start the scheduler node is successful! - ``` - - 说明Server、Worker与Scheduler通信建立成功。 - - 查看`worker.log`中训练结果: - - ```text - epoch: 1 step: 1, loss is 2.302287 - epoch: 1 step: 2, loss is 2.304071 - epoch: 1 step: 3, loss is 2.308778 - epoch: 1 step: 4, loss is 2.301943 - ... - ``` diff --git a/tutorials/training/source_zh_cn/advanced_use/apply_post_training_quantization.md b/tutorials/training/source_zh_cn/advanced_use/apply_post_training_quantization.md deleted file mode 100644 index 6b5489f68e1f40dd8ba104fda309a3e6beb375cb..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/apply_post_training_quantization.md +++ /dev/null @@ -1,38 +0,0 @@ -# 应用训练后量化 - -`Linux` `模型调优` `高级` - - - -- [应用训练后量化](#应用训练后量化) - - [概念](#概念) - - [权重量化](#权重量化) - - [全量化](#全量化) - - [训练后量化工具](#训练后量化工具) - - - - - -## 概念 - -训练后量化是指对预训练后的网络进行权重量化或者全量化,以达到减小模型大小和提升推理性能的目的。 -该过程不需要训练,对激活值量化时需要少量校准数据。 - -### 权重量化 - -对模型的权值进行量化,仅压缩模型大小,推理时仍然执行float32运算。量化比特数越低,模型压缩率越大,但是精度损失通常也比较大。 - -### 全量化 - -对模型的权重和激活值统一进行量化,推理时执行int运算。可以减小模型大小、提升模型推理速度和降低功耗。 -针对需要提升模型运行速度、降低模型运行功耗的场景,可以使用训练后全量化功能。为了计算激活值的量化参数,用户需要提供校准数据集。 - -## 训练后量化工具 - -根据模型推理部署的硬件平台选择使用对应的训练后量化工具。 - -| 训练后量化工具 | 量化方法支持 | 推理硬件平台支持 | 量化模型部署 | -| --- | --- | --- | --- | -| [MindSpore训练后量化工具](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/post_training_quantization.html) | 权重量化
    全量化 | CPU | [端侧推理](https://www.mindspore.cn/tutorial/lite/zh-CN/master/use/runtime.html) | -| 昇腾模型压缩工具 | 全量化 | Ascend 310 AI处理器 | [Ascend 310 AI处理器上推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_ascend_310.html) | diff --git a/tutorials/training/source_zh_cn/advanced_use/apply_quantization_aware_training.md b/tutorials/training/source_zh_cn/advanced_use/apply_quantization_aware_training.md deleted file mode 100644 index bc962ea986dff197e09c24871da7ebcc9db11796..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/apply_quantization_aware_training.md +++ /dev/null @@ -1,280 +0,0 @@ -# 应用感知量化训练 - -`Linux` `Ascend` `GPU` `模型调优` `高级` - - - -- [应用感知量化训练](#应用感知量化训练) - - [背景](#背景) - - [概念](#概念) - - [量化](#量化) - - [伪量化节点](#伪量化节点) - - [感知量化训练](#感知量化训练) - - [感知量化训练示例](#感知量化训练示例) - - [定义量化网络](#定义量化网络) - - [自动构建量化网络](#自动构建量化网络) - - [手动构建量化网络](#手动构建量化网络) - - [导出量化模型](#导出量化模型) - - [参考文献](#参考文献) - - - - - -## 背景 - -越来越多的应用选择在移动设备或者边缘设备上使用深度学习技术。以手机为例,为了提供人性化和智能的服务,现在操作系统和应用都开始集成深度学习功能。而使用该功能,涉及训练或者推理,自然包含大量的模型及权重文件。经典的AlexNet,原始权重文件已经超过了200MB,而最近出现的新模型正往结构更复杂、参数更多的方向发展。由于移动设备、边缘设备的硬件资源有限,需要对模型进行精简,而量化(Quantization)技术就是应对该类问题衍生出的技术之一。 - -## 概念 - -### 量化 - -量化即以较低的推理精度损失将连续取值(或者大量可能的离散取值)的浮点型模型权重或流经模型的张量数据定点近似(通常为INT8)为有限多个(或较少的)离散值的过程,它是以更少位数的数据类型用于近似表示32位有限范围浮点型数据的过程,而模型的输入输出依然是浮点型。这样的好处是可以减小模型尺寸大小,减少模型内存占用,加快模型推理速度,降低功耗等。 - -如上所述,与FP32类型相比,FP16、INT8、INT4等低精度数据表达类型所占用空间更小。使用低精度数据表达类型替换高精度数据表达类型,可以大幅降低存储空间和传输时间。而低比特的计算性能也更高,INT8相对比FP32的加速比可达到3倍甚至更高,对于相同的计算,功耗上也有明显优势。 - -当前业界量化方案主要分为两种:感知量化训练(Quantization Aware Training)和训练后量化(Post-training Quantization)。感知量化训练需要训练数据,在模型准确率上通常表现更好,适用于对模型压缩率和模型准确率要求较高的场景;训练后量化简单易用,只需少量校准数据,适用于追求高易用性和缺乏训练资源的场景。 - -### 伪量化节点 - -伪量化节点,是指感知量化训练中插入的节点,用以寻找网络数据分布,并反馈损失精度,具体作用如下: - -- 找到网络数据的分布,即找到待量化参数的最大值和最小值; -- 模拟量化为低比特时的精度损失,把该损失作用到网络模型中,传递给损失函数,让优化器在训练过程中对该损失值进行优化。 - -## 感知量化训练 - -MindSpore的感知量化训练是指在训练时使用伪量化节点来模拟量化操作,过程中仍然采用浮点数计算,并通过反向传播学习更新网络参数,使得网络参数更好地适应量化带来的损失。对于权值和数据的量化,MindSpore采用了参考文献[1]中的方案。 - -感知量化训练规格 - -| 规格 | 规格说明 | -| --- | --- | -| 硬件支持 | GPU、Ascend AI 910处理器的硬件平台 | -| 网络支持 | 已实现的网络包括LeNet、ResNet50等网络,具体请参见。 | -| 算法支持 | 支持非对称和对称的量化算法;支持逐层和逐通道的量化算法。| -| 方案支持 | 支持4、7和8比特的量化方案。 | -| 数据类型支持 | Ascend平台支持精度为FP32和FP16的网络进行量化训练,GPU平台支持FP32。 | -| 运行模式支持 | Graph模式 | - -## 感知量化训练示例 - -感知量化训练与一般训练步骤一致,在定义量化网络和生成量化模型阶段需要进行额外的操作,完整流程如下: - -1. 加载数据集,处理数据。 -2. 定义量化网络。 -3. 定义优化器和损失函数。 -4. 训练网络,保存模型文件。 -5. 加载保存的模型,进行推理。 -6. 导出量化模型。 - -在上面流程中,步骤2和步骤6是感知量化训练区别普通训练需要额外进行的步骤。接下来,以LeNet网络为例,展开叙述量化相关步骤。 - -> 你可以在这里找到完整可运行的样例代码: 。 - -### 定义量化网络 - -量化网络是指在原网络定义的基础上修改需要量化的网络层后生成的带有伪量化节点的网络。根据构建量化网络的不同,定义量化网络可分为如下两种的方法: - -- 自动构建量化网络:定义融合网络后,调用转换接口后会自动将融合网络转化为量化网络。用户无需感知插入伪量化节点的过程,更简单易用。 -- 手动构建量化网络:手动将需要量化的网络层替换成对应的量化节点,或者直接在需要量化的网络层后插入伪量化节点,修改后的网络即量化网络。用户可以自定义需要量化的网络层,更加灵活易扩展。 - -> - 自动构建量化网络方法支持量化的网络层包含`nn.Conv2dBnAct`、`nn.DenseBnAct`、`Add`、`Sub`、`Mul`和`RealDiv`。如果只需量化这些网络层的部分层,或者要支持量化其他网络层,请使用手动构建量化网络方法。 -> - 自动构建量化网络的转换接口是`QuantizationAwareTraining.quantize`。 - -原网络模型LeNet5的定义如下所示: - -```python -class LeNet5(nn.Cell): - """ - Lenet network - - Args: - num_class (int): Num classes. Default: 10. - num_channel (int): Num channel. Default: 1. - Returns: - Tensor, output tensor - Examples: - >>> LeNet(num_class=10, num_channel=1) - - """ - def __init__(self, num_class=10, num_channel=1): - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02)) - self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02)) - self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02)) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x -``` - -#### 自动构建量化网络 - -首先定义融合网络: - -1. 使用`nn.Conv2dBnAct`算子替换原网络模型中的2个算子`nn.Conv2d`和`nn.ReLU`。 -2. 使用`nn.DenseBnAct`算子替换原网络模型中的2个算子`nn.Dense`和`nn.ReLU`。 - -> 无论`nn.Dense`和`nn.Conv2d`算子后面有没有`nn.BatchNorm`和`nn.ReLU`,都要按规定使用上述两个算子进行融合替换。 - -替换算子后的融合网络如下: - -```python -class LeNet5(nn.Cell): - def __init__(self, num_class=10): - super(LeNet5, self).__init__() - self.num_class = num_class - - self.conv1 = nn.Conv2dBnAct(1, 6, kernel_size=5, pad_mode='valid', activation='relu') - self.conv2 = nn.Conv2dBnAct(6, 16, kernel_size=5, pad_mode='valid', activation='relu') - - self.fc1 = nn.DenseBnAct(16 * 5 * 5, 120, activation='relu') - self.fc2 = nn.DenseBnAct(120, 84, activation='relu') - self.fc3 = nn.DenseBnAct(84, self.num_class) - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.max_pool2d(self.conv1(x)) - x = self.max_pool2d(self.conv2(x)) - x = self.flatten(x) - x = self.fc1(x) - x = self.fc2(x) - x = self.fc3(x) - return x -``` - -使用感知量化训练进行微调时,需要加载预训练模型的参数。 - -```python -from mindspore.compression.quant import load_nonquant_param_into_quant_net -... -# define fusion network -network = LeNet5(cfg.num_classes) - -param_dict = load_checkpoint(args.ckpt_path) -load_nonquant_param_into_quant_net(network, param_dict) -``` - -然后使用`QuantizationAwareTraining.quantize`接口自动在融合网络中插入伪量化节点,将融合网络转化为量化网络。 - -```python -from mindspore.compression.quant import QuantizationAwareTraining - -quantizer = QuantizationAwareTraining(quant_delay=900, - bn_fold=False, - per_channel=[True, False], - symmetric=[True, False]) -net = quantizer.quantize(network) -``` - -> 如果量化精度不满足要求,请先调整合适的量化策略参数。例如,一般量化bit数越大量化精度损失越小,权重采用逐通道量化会比逐层量化获取更好的精度。另外,还可以选择手动构建量化网络方法,通过手动选择量化部分网络层来平衡准确率和推理性能之间的关系。 - -#### 手动构建量化网络 - -把原网络中需要量化的层替换成对应的量化算子: - -1. 使用`nn.Conv2dQuant`替换原网络模型中的`nn.Conv2d`算子。 -2. 使用`nn.DenseQuant`替换原网络模型中`nn.Dense`算子。 -3. 使用`nn.ActQuant`替换原网络模型中的`nn.ReLU`算子。 - -```python -class LeNet5(nn.Cell): - def __init__(self, num_class=10, channel=1): - super(LeNet5, self).__init__() - self.num_class = num_class - - self.qconfig = create_quant_config(quant_dtype=(QuantDtype.INT8, QuantDtype.INT8), per_channel=(True, False), symmetric=[True, False]) - - self.conv1 = nn.Conv2dQuant(channel, 6, 5, pad_mode='valid', quant_config=self.qconfig, quant_dtype=QuantDtype.INT8) - self.conv2 = nn.Conv2dQuant(6, 16, 5, pad_mode='valid', quant_config=self.qconfig, quant_dtype=QuantDtype.INT8) - self.fc1 = nn.DenseQuant(16 * 5 * 5, 120, quant_config=self.qconfig, quant_dtype=QuantDtype.INT8) - self.fc2 = nn.DenseQuant(120, 84, quant_config=self.qconfig, quant_dtype=QuantDtype.INT8) - self.fc3 = nn.DenseQuant(84, self.num_class, quant_config=self.qconfig, quant_dtype=QuantDtype.INT8) - - self.relu = nn.ActQuant(nn.ReLU(), quant_config=self.qconfig, quant_dtype=QuantDtype.INT8) - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x -``` - -> - 量化算子:`nn.Conv2dQuant`、`nn.DenseQuant`、`nn.ActQuant`等为含有伪量化节点的算子。更多的量化算子内容请参见 。 -> - 在需要量化的网络层后面插入伪量化节点`nn.FakeQuantWithMinMaxObserver`可以实现更多网络层的量化。 -> - 建议优先选择量化网络中靠后的层,因为量化前面的网络层可能会造成更多的精度损失。 - -使用感知量化训练进行微调时,需要加载预训练模型的参数。 - -```python -from mindspore.compression.quant import load_nonquant_param_into_quant_net -... -# define quant network -network = LeNet5(cfg.num_classes) - -param_dict = load_checkpoint(args.ckpt_path) -load_nonquant_param_into_quant_net(network, param_dict) -``` - -### 导出量化模型 - -在端侧硬件平台上部署的量化模型为通用模型格式(AIR、MindIR等),并且不包含伪量化节点。导出步骤为: - -1. 定义量化网络。该步骤的量化网络和感知量化训练时的量化网络相同。 -2. 加载感知量化训练时保存的CheckPoint格式文件。 -3. 导出量化模型。设置`export`接口的`quant_mode`、`mean`和`std_dev`参数。 - -```python -from mindspore import Tensor, context, load_checkpoint, load_param_into_net, export - -if __name__ == "__main__": - ... - # define fusion network - network = LeNet5(cfg.num_classes) - quantizer = QuantizationAwareTraining(bn_fold=False, - per_channel=[True, False], - symmetric=[True, False]) - network = quantizer.quantize(network) - - # load quantization aware network checkpoint - param_dict = load_checkpoint(args.ckpt_path) - load_param_into_net(network, param_dict) - - # export network - inputs = Tensor(np.ones([1, 1, cfg.image_height, cfg.image_width]), mindspore.float32) - export(network, inputs, file_name="lenet_quant", file_format='MINDIR', quant_mode='QUANT', mean=127.5, std_dev=127.5) -``` - -导出量化模型后,请[使用MindSpore进行推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/index.html)。 - -> - 导出的模型格式支持MindIR和AIR。 -> - 感知量化训练后导出的模型支持[端侧推理](https://www.mindspore.cn/lite/docs?master)和[Ascend 310 AI处理器上推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_ascend_310.html)。 - -## 参考文献 - -[1] Jacob B, Kligys S, Chen B, et al. Quantization and training of neural networks for efficient integer-arithmetic-only inference[C]//Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition. 2018: 2704-2713. - -[2] Krishnamoorthi R. Quantizing deep convolutional networks for efficient inference: A whitepaper[J]. arXiv preprint arXiv:1806.08342, 2018. diff --git a/tutorials/training/source_zh_cn/advanced_use/convert_dataset.ipynb b/tutorials/training/source_zh_cn/advanced_use/convert_dataset.ipynb deleted file mode 100644 index 10fb19f03e8777edc399feee7c2e60ac2c10d4ef..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/convert_dataset.ipynb +++ /dev/null @@ -1,469 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 转换数据集为MindRecord\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/advanced_use/convert_dataset.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/mindspore_convert_dataset.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL21pbmRzcG9yZV9jb252ZXJ0X2RhdGFzZXRfdG9fbWluZHJlY29yZC5pcHluYg==&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "用户可以将非标准的数据集和常用的数据集转换为MindSpore数据格式,即MindRecord,从而方便地加载到MindSpore中进行训练。同时,MindSpore在部分场景做了性能优化,使用MindRecord数据格式可以获得更好的性能体验。\n", - "\n", - "MindSpore数据格式具备的特征如下:\n", - "\n", - "1. 实现多变的用户数据统一存储、访问,训练数据读取更加简便。\n", - "2. 数据聚合存储,高效读取,且方便管理、移动。\n", - "3. 高效的数据编解码操作,对用户透明、无感知。\n", - "4. 可以灵活控制分区的大小,实现分布式训练。\n", - "\n", - "MindSpore数据格式的目标是归一化用户的数据集,并进一步通过`MindDataset`(详细使用方法参考[API](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/dataset/mindspore.dataset.MindDataset.html))实现数据的读取,并用于训练过程。\n", - "\n", - "![data-conversion-concept](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/data_conversion_concept.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 基本概念" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "一个MindRecord文件由数据文件和索引文件组成,且数据文件及索引文件暂不支持重命名操作:\n", - "\n", - "- 数据文件\n", - "\n", - " 包含文件头、标量数据页、块数据页,用于存储用户归一化后的训练数据,且单个MindRecord文件建议小于20G,用户可将大数据集进行分片存储为多个MindRecord文件。\n", - "\n", - "\n", - "- 索引文件\n", - "\n", - " 包含基于标量数据(如图像Label、图像文件名等)生成的索引信息,用于方便的检索、统计数据集信息。\n", - "\n", - "![mindrecord](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/mindrecord.png)\n", - "\n", - "数据文件主要由以下几个关键部分组成:\n", - "\n", - "- 文件头\n", - " \n", - " 文件头主要用来存储文件头大小、标量数据页大小、块数据页大小、Schema信息、索引字段、统计信息、文件分区信息、标量数据与块数据对应关系等,是MindRecord文件的元信息。\n", - "\n", - "\n", - "- 标量数据页\n", - " \n", - " 标量数据页主要用来存储整型、字符串、浮点型数据,如图像的Label、图像的文件名、图像的长宽等信息,即适合用标量来存储的信息会保存在这里。\n", - "\n", - "\n", - "- 块数据页\n", - " \n", - " 块数据页主要用来存储二进制串、Numpy数组等数据,如二进制图像文件本身、文本转换成的字典等。\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 将数据集转换为MindRecord\n", - "\n", - "下面本教程将简单演示如何将图片数据及其标注转换为MindRecord格式。更多MindSpore数据格式转换说明,可参见编程指南中[MindSpore数据格式转换](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_conversion.html)章节。" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-22T02:13:50.133177Z", - "start_time": "2021-02-22T02:13:50.127990Z" - } - }, - "source": [ - "示例一:展示如何将数据按照定义的数据集结构转换为MindRecord数据文件。\n", - "\n", - "1. 导入文件写入工具类`FileWriter`。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-22T02:14:54.114866Z", - "start_time": "2021-02-22T02:14:11.315731Z" - } - }, - "outputs": [], - "source": [ - "from mindspore.mindrecord import FileWriter" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 定义数据集结构文件Schema。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "cv_schema_json = {\"file_name\": {\"type\": \"string\"}, \"label\": {\"type\": \"int32\"}, \"data\": {\"type\": \"bytes\"}}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Schema文件主要包含字段名`name`、字段数据类型`type`和字段各维度维数`shape`:\n", - "\n", - "- 字段名:字段的引用名称,可以包含字母、数字和下划线。\n", - "\n", - "- 字段数据类型:包含int32、int64、float32、float64、string、bytes。\n", - "\n", - "- 字段维数:一维数组用[-1]表示,更高维度可表示为[m, n, …],其中m、n为各维度维数。\n", - "\n", - "> 如果字段有属性`shape`,则用户传入`write_raw_data`接口的数据必须为`numpy.ndarray`类型,对应数据类型必须为int32、int64、float32、float64。\n", - "\n", - "3. 按照用户定义的Schema格式,准备需要写入的数据列表,此处传入的是图片数据的二进制流。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "data = [{\"file_name\": \"1.jpg\", \"label\": 0, \"data\": b\"\\x10c\\xb3w\\xa8\\xee$o&\\xd4\\x00\\xf8\\x129\\x15\\xd9\\xf2q\\xc0\\xa2\\x91YFUO\\x1dsE1\\x1ep\"},\n", - " {\"file_name\": \"3.jpg\", \"label\": 99, \"data\": b\"\\xaf\\xafU<\\xb8|6\\xbd}\\xc1\\x99[\\xeaj+\\x8f\\x84\\xd3\\xcc\\xa0,i\\xbb\\xb9-\\xcdz\\xecp{T\\xb1\\xdb\"}]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "4. 添加索引字段可以加速数据读取,该步骤非必选。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "indexes = [\"file_name\", \"label\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "5. 创建FileWriter对象,传入文件名及分片数量,然后添加Schema文件及索引,调用`write_raw_data`接口写入数据,最后调用`commit`接口生成本地数据文件。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MSRStatus.SUCCESS" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "writer = FileWriter(file_name=\"test.mindrecord\", shard_num=4)\n", - "writer.add_schema(cv_schema_json, \"test_schema\")\n", - "writer.add_index(indexes)\n", - "writer.write_raw_data(data)\n", - "writer.commit()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "该示例会生成`test.mindrecord0`、`test.mindrecord0.db`、`test.mindrecord1`、`test.mindrecord1.db`、`test.mindrecord2`、`test.mindrecord2.db`、`test.mindrecord3`、`test.mindrecord3.db`共8个文件,称为MindRecord数据集。`test.mindrecord0`和`test.mindrecord0.db`称为1个MindRecord文件,其中`test.mindrecord0`为数据文件,`test.mindrecord0.db`为索引文件。\n", - "\n", - "接口说明:\n", - "\n", - "- `write_raw_data`:将数据写入到内存之中。\n", - "\n", - "- `commit`:将最终内存中的数据写入到磁盘。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "6. 如果需要在现有数据格式文件中增加新数据,可以调用`open_for_append`接口打开已存在的数据文件,继续调用`write_raw_data`接口写入新数据,最后调用`commit`接口生成本地数据文件。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-22T02:32:29.779366Z", - "start_time": "2021-02-22T02:32:29.606138Z" - } - }, - "outputs": [ - { - "data": { - "text/plain": [ - "MSRStatus.SUCCESS" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "writer = FileWriter.open_for_append(\"test.mindrecord0\")\n", - "writer.write_raw_data(data)\n", - "writer.commit()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "示例二:将`jpg`格式的图片,按照示例一的方法,将其转换成MindRecord数据集。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下载需要处理的图片数据`transform.jpg`作为待处理的原始数据。\n", - "\n", - "创建文件夹目录`./datasets/convert_dataset_to_mindrecord/datas_to_mindrecord/`用于存放本次体验中所有的转换数据集。\n", - "\n", - "创建文件夹目录`./datasets/convert_dataset_to_mindrecord/images/`用于存放下载下来的图片数据。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/convert_dataset_to_mindrecord/images/\n", - "└── transform.jpg\n", - "\n", - "0 directories, 1 file\n" - ] - } - ], - "source": [ - "!wget -N https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/datasets/transform.jpg\n", - "!mkdir -p ./datasets/convert_dataset_to_mindrecord/datas_to_mindrecord/\n", - "!mkdir -p ./datasets/convert_dataset_to_mindrecord/images/\n", - "!mv -f ./transform.jpg ./datasets/convert_dataset_to_mindrecord/images/\n", - "!tree ./datasets/convert_dataset_to_mindrecord/images/" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "执行以下代码,将下载的`transform.jpg`转换为MindRecord数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MSRStatus.SUCCESS" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# step 1 import class FileWriter\n", - "import os \n", - "from mindspore.mindrecord import FileWriter\n", - "\n", - "# clean up old run files before in Linux\n", - "data_path = './datasets/convert_dataset_to_mindrecord/datas_to_mindrecord/'\n", - "os.system('rm -f {}test.*'.format(data_path))\n", - "\n", - "# import FileWriter class ready to write data\n", - "data_record_path = './datasets/convert_dataset_to_mindrecord/datas_to_mindrecord/test.mindrecord'\n", - "writer = FileWriter(file_name=data_record_path,shard_num=4)\n", - "\n", - "# define the data type\n", - "data_schema = {\"file_name\":{\"type\":\"string\"},\"label\":{\"type\":\"int32\"},\"data\":{\"type\":\"bytes\"}}\n", - "writer.add_schema(data_schema,\"test_schema\")\n", - "\n", - "# prepeare the data contents\n", - "file_name = \"./datasets/convert_dataset_to_mindrecord/images/transform.jpg\"\n", - "with open(file_name, \"rb\") as f:\n", - " bytes_data = f.read()\n", - "data = [{\"file_name\":\"transform.jpg\", \"label\":1, \"data\":bytes_data}]\n", - "\n", - "# add index field\n", - "indexes = [\"file_name\",\"label\"]\n", - "writer.add_index(indexes)\n", - "\n", - "# save data to the files\n", - "writer.write_raw_data(data)\n", - "writer.commit()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "该示例会生成8个文件,成为MindRecord数据集。`test.mindrecord0`和`test.mindrecord0.db`称为1个MindRecord文件,其中`test.mindrecord0`为数据文件,`test.mindrecord0.db`为索引文件,生成的文件如下所示:" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/convert_dataset_to_mindrecord/datas_to_mindrecord/\n", - "├── test.mindrecord0\n", - "├── test.mindrecord0.db\n", - "├── test.mindrecord1\n", - "├── test.mindrecord1.db\n", - "├── test.mindrecord2\n", - "├── test.mindrecord2.db\n", - "├── test.mindrecord3\n", - "└── test.mindrecord3.db\n", - "\n", - "0 directories, 8 files\n" - ] - } - ], - "source": [ - "!tree ./datasets/convert_dataset_to_mindrecord/datas_to_mindrecord/" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 读取MindRecord数据集" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下面将简单演示如何通过`MindDataset`读取MindRecord数据集。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 导入读取类`MindDataset`。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 首先使用`MindDataset`读取MindRecord数据集,然后对数据创建了字典迭代器,并通过迭代器读取了一条数据记录。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-22T02:35:01.078717Z", - "start_time": "2021-02-22T02:35:01.036028Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "sample: {'data': array([255, 216, 255, ..., 159, 255, 217], dtype=uint8), 'file_name': array(b'transform.jpg', dtype='|S13'), 'label': array(1, dtype=int32)}\n", - "Got 1 samples\n" - ] - } - ], - "source": [ - "file_name = './datasets/convert_dataset_to_mindrecord/datas_to_mindrecord/test.mindrecord0'\n", - "# create MindDataset for reading data\n", - "define_data_set = ds.MindDataset(dataset_file=file_name)\n", - "# create a dictionary iterator and read a data record through the iterator\n", - "count = 0\n", - "for item in define_data_set.create_dict_iterator(output_numpy=True):\n", - " print(\"sample: {}\".format(item))\n", - " count += 1\n", - "print(\"Got {} samples\".format(count))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/training/source_zh_cn/advanced_use/custom_debugging_info.md b/tutorials/training/source_zh_cn/advanced_use/custom_debugging_info.md deleted file mode 100644 index 2be65cf68ec77e488ba8d4c7d5e4118e5b7d9189..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/custom_debugging_info.md +++ /dev/null @@ -1,412 +0,0 @@ -# 自定义调试信息 - -`Linux` `Ascend` `GPU` `CPU` `模型调优` `中级` `高级` - - - -- [自定义调试信息](#自定义调试信息) - - [概述](#概述) - - [Callback介绍](#callback介绍) - - [MindSpore的Callback能力](#mindspore的callback能力) - - [自定义Callback](#自定义callback) - - [MindSpore metrics功能介绍](#mindspore-metrics功能介绍) - - [Print算子功能介绍](#print算子功能介绍) - - [数据Dump功能介绍](#数据dump功能介绍) - - [同步Dump功能使用方法](#同步dump功能使用方法) - - [异步Dump功能使用方法](#异步dump功能使用方法) - - [Running Data Recorder](#running-data-recorder) - - [使用方法](#使用方法) - - [通过配置文件配置RDR](#通过配置文件配置rdr) - - [通过环境变量配置RDR](#通过环境变量配置rdr) - - [异常处理](#异常处理) - - [内存复用](#内存复用) - - [使用方法](#使用方法-1) - - [日志相关的环境变量和配置](#日志相关的环境变量和配置) - - - - -   - -   - - -## 概述 - -本文介绍如何使用MindSpore提供的`Callback`、`metrics`、`Print`算子、日志打印等自定义能力,帮助用户快速调试训练网络。 - -## Callback介绍 - -`Callback`是回调函数的意思,但它其实不是一个函数而是一个类,用户可以使用回调函数来观察训练过程中网络内部的状态和相关信息,或在特定时期执行特定动作。 -例如监控loss、保存模型参数、动态调整参数、提前终止训练任务等。 - -### MindSpore的Callback能力 - -MindSpore提供`Callback`能力,支持用户在训练/推理的特定阶段,插入自定义的操作。包括: - -- MindSpore框架提供的`ModelCheckpoint`、`LossMonitor`、`SummaryCollector`等`Callback`类。 -- MindSpore支持用户自定义`Callback`。 - -使用方法:在`model.train`方法中传入`Callback`对象,它可以是一个`Callback`列表,例: - -```python -ckpt_cb = ModelCheckpoint() -loss_cb = LossMonitor() -summary_cb = SummaryCollector(summary_dir='./summary_dir') -model.train(epoch, dataset, callbacks=[ckpt_cb, loss_cb, summary_cb]) -``` - -`ModelCheckpoint`可以保存模型参数,以便进行再训练或推理。 -`LossMonitor`可以在日志中输出loss,方便用户查看,同时它还会监控训练过程中的loss值变化情况,当loss值为`Nan`或`Inf`时终止训练。 -`SummaryCollector` 可以把训练过程中的信息存储到文件中,以便后续可视化展示。 -在训练过程中,`Callback`列表会按照定义的顺序执行`Callback`函数。因此在定义过程中,需考虑`Callback`之间的依赖关系。 - -### 自定义Callback - -用户可以基于`Callback`基类,根据自身的需求,实现自定义`Callback`。 - -`Callback`基类定义如下所示: - -```python -class Callback(): - """Callback base class""" - def begin(self, run_context): - """Called once before the network executing.""" - pass - - def epoch_begin(self, run_context): - """Called before each epoch beginning.""" - pass - - def epoch_end(self, run_context): - """Called after each epoch finished.""" - pass - - def step_begin(self, run_context): - """Called before each step beginning.""" - pass - - def step_end(self, run_context): - """Called after each step finished.""" - pass - - def end(self, run_context): - """Called once after network training.""" - pass -``` - -`Callback`可以把训练过程中的重要信息记录下来,通过一个字典类型变量`cb_params`传递给`Callback`对象, -用户可以在各个自定义的`Callback`中获取到相关属性,执行自定义操作。也可以自定义其他变量传递给`cb_params`对象。 - -`cb_params`中的主要属性包括: - -- `loss_fn`:损失函数 -- `optimizer`:优化器 -- `train_dataset`:训练的数据集 -- `cur_epoch_num`:当前的epoch数 -- `cur_step_num`:当前的step数 -- `batch_num`:一个epoch中step的数量 -- ... - -用户可以继承`Callback`基类自定义`Callback`对象。 - -下面通过两个例子,进一步了解自定义`Callback`的用法。 - -> 自定义`Callback`样例代码: -> -> - -- 在规定时间内终止训练。 - - ```python - class StopAtTime(Callback): - def __init__(self, run_time): - super(StopAtTime, self).__init__() - self.run_time = run_time*60 - - def begin(self, run_context): - cb_params = run_context.original_args() - cb_params.init_time = time.time() - - def step_end(self, run_context): - cb_params = run_context.original_args() - epoch_num = cb_params.cur_epoch_num - step_num = cb_params.cur_step_num - loss = cb_params.net_outputs - cur_time = time.time() - if (cur_time - cb_params.init_time) > self.run_time: - print("epoch: ", epoch_num, " step: ", step_num, " loss: ", loss) - run_context.request_stop() - ``` - - 实现逻辑为:通过`run_context.original_args`方法可以获取到`cb_params`字典,字典里会包含前文描述的主要属性信息。 - 同时可以对字典内的值进行修改和添加,上述用例中,在`begin`中定义一个`init_time`对象传递给`cb_params`字典。 - 在每次`step_end`会做出判断,当训练时间大于设置的时间阈值时,会向`run_context`传递终止训练的信号,提前终止训练,并打印当前的`epoch`、`step`、`loss`的值。 - -- 保存训练过程中精度最高的checkpoint文件。 - - ```python - class SaveCallback(Callback): - def __init__(self, eval_model, ds_eval): - super(SaveCallback, self).__init__() - self.model = eval_model - self.ds_eval = ds_eval - self.acc = 0 - - def step_end(self, run_context): - cb_params = run_context.original_args() - result = self.model.eval(self.ds_eval) - if result['accuracy'] > self.acc: - self.acc = result['accuracy'] - file_name = str(self.acc) + ".ckpt" - save_checkpoint(save_obj=cb_params.train_network, ckpt_file_name=file_name) - print("Save the maximum accuracy checkpoint,the accuracy is", self.acc) - ``` - - 具体实现逻辑为:定义一个`Callback`对象,初始化对象接收`model`对象和`ds_eval`(验证数据集)。在`step_end`阶段验证模型的精度,当精度为当前最高时,自动触发保存checkpoint方法,保存当前的参数。 - -## MindSpore metrics功能介绍 - -当训练结束后,可以使用metrics评估训练结果的好坏。 - -MindSpore提供了多种metrics评估指标,如:`accuracy`、`loss`、`precision`、`recall`、`F1`。 - -用户可以定义一个metrics字典对象,里面包含多种指标,传递给`model`对象,通过`model.eval`来验证训练的效果。 - -> `metrics`使用样例代码: -> -> - -```python -metrics = { - 'accuracy': nn.Accuracy(), - 'loss': nn.Loss(), - 'precision': nn.Precision(), - 'recall': nn.Recall(), - 'f1_score': nn.F1() -} -model = Model(network=net, loss_fn=net_loss, optimizer=net_opt, metrics=metrics) -result = model.eval(ds_eval) -``` - -`model.eval`方法会返回一个字典,里面是传入metrics的指标和结果。 - -在eval过程中也可以使用`Callback`功能,用户可以调用相关API或自定义`Callback`方法实现想要的功能。 - -用户也可以定义自己的`metrics`类,通过继承`Metric`基类,并重写`clear`、`update`、`eval`三个方法即可实现。 - -以`Accuracy`算子举例说明其内部实现原理: - -`Accuracy`继承了`EvaluationBase`基类,重写了上述三个方法。 - -- `clear`方法会把类中相关计算参数初始化。 -- `update`方法接受预测值和标签值,更新`Accuracy`内部变量。 -- `eval`方法会计算相关指标,返回计算结果。 - -调用`Accuracy`的`eval`方法,即可得到计算结果。 - -通过如下代码可以更清楚了解到`Accuracy`是如何运行的: - -```python -x = Tensor(np.array([[0.2, 0.5], [0.3, 0.1], [0.9, 0.6]])) -y = Tensor(np.array([1, 0, 1])) -metric = Accuracy() -metric.clear() -metric.update(x, y) -accuracy = metric.eval() -print('Accuracy is ', accuracy) -``` - -输出: - -```text -Accuracy is 0.6667 -``` - -## Print算子功能介绍 - -MindSpore的自研`Print`算子可以将用户输入的Tensor或字符串信息打印出来,支持多字符串输入,多Tensor输入和字符串与Tensor的混合输入,输入参数以逗号隔开。目前`Print`算子仅支持在Ascend环境下使用。 - -`Print`算子使用方法与其他算子相同,在网络中的`__init__`声明算子并在`construct`进行调用,具体使用实例及输出结果如下: - -```python -import numpy as np -from mindspore import Tensor -import mindspore.ops as ops -import mindspore.nn as nn -import mindspore.context as context - -context.set_context(mode=context.GRAPH_MODE) - -class PrintDemo(nn.Cell): - def __init__(self): - super(PrintDemo, self).__init__() - self.print = ops.Print() - - def construct(self, x, y): - self.print('print Tensor x and Tensor y:', x, y) - return x - -x = Tensor(np.ones([2, 1]).astype(np.int32)) -y = Tensor(np.ones([2, 2]).astype(np.int32)) -net = PrintDemo() -output = net(x, y) -``` - -输出: - -```text -print Tensor x and Tensor y: -Tensor(shape=[2, 1], dtype=Int32, value= -[[1] - [1]]) -Tensor(shape=[2, 2], dtype=Int32, value= -[[1 1] - [1 1]]) -``` - -## 数据Dump功能介绍 - -训练网络时,若训练结果和预期有偏差,可以通过数据Dump功能保存算子的输入输出进行调试。详细Dump功能介绍参考[Dump功能说明](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/dump_in_graph_mode.html#dump)。 - -### 同步Dump功能使用方法 - -同步Dump功能使用参考[同步Dump操作步骤](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/dump_in_graph_mode.html#id5)。 - -### 异步Dump功能使用方法 - -异步Dump功能使用参考[异步Dump操作步骤](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/dump_in_graph_mode.html#id10)。 - -## Running Data Recorder - -Running Data Recorder(RDR)是MindSpore提供训练程序运行时记录数据的功能。要记录的数据将会在MindSpore中进行预设,运行训练脚本时,如果MindSpore出现了运行异常,则会自动地导出MindSpore中预先记录的数据以辅助定位运行异常的原因。不同的运行异常将会导出不同的数据,比如出现`Run task error`异常,将会导出计算图、图执行顺序、内存分配等信息以辅助定位异常的原因。 - -> 并非所有运行异常都会导出数据,目前仅支持部分异常导出数据。 -> -> 目前仅支持图模式训练场景下,收集CPU/Ascend/GPU的相关数据。 - -### 使用方法 - -#### 通过配置文件配置RDR - -1. 创建配置文件`mindspore_config.json`。 - - ```json - { - "rdr": { - "enable": true, - "path": "/home/mindspore/rdr" - } - } - ``` - - > enable: 控制RDR功能是否开启。 - > - > path: 设置RDR保存数据的路径。当前必须为绝对路径。 - -2. 通过 `context` 配置RDR。 - - ```python3 - context.set_context(env_config_path="./mindspore_config.json") - ``` - -#### 通过环境变量配置RDR - -通过`export MS_RDR_ENABLE=1`来开启RDR, 然后设置RDR文件导出路径: `export MS_RDR_PATH=/absolute/path`. - -> 用户设置的配置文件优先级高于环境变量。 - -#### 异常处理 - -假如在Ascend 910上使用MindSpore进行训练,训练出现了`Run task error`异常。 - -这时我们到`/home/mindspore/rdr`目录中,可以看到有几个文件出现在该目录中,每一个文件都代表着一种数据。比如 `hwopt_d_before_graph_0.ir` 该文件为计算图文件。可以使用文本工具打开该文件,用以查看计算图,分析计算图是否符合预期。 - -## 内存复用 - -内存复用功能(Mem Reuse)是让不同的Tensor共用同样的一部分内存,以降低内存开销,支撑更大的网络,关闭后每个Tensor有自己独立的内存空间,Tensor间无共享内存。 -MindSpore内存复用功能默认开启,可以通过以下方式手动控制该功能的关闭和开启。 - -### 使用方法 - -1. 创建配置文件`mindspore_config.json`。 - - ```json - { - "sys": { - "mem_reuse": true - } - } - ``` - - > mem_reuse: 控制内存复用功能是否开启,当设置为true时,控制内存复用功能开启,为false时,内存复用功能关闭。 - -2. 通过 `context` 配置内存复用功能。 - - ```python3 - context.set_context(env_config_path="./mindspore_config.json") - ``` - -## 日志相关的环境变量和配置 - -MindSpore采用glog来输出日志,常用的几个环境变量如下: - -- `GLOG_v` - - 该环境变量控制日志的级别。 - 该环境变量默认值为2,即WARNING级别,对应关系如下:0-DEBUG、1-INFO、2-WARNING、3-ERROR。 - -- `GLOG_logtostderr` - - 该环境变量控制日志的输出方式。 - 该环境变量的值设置为1时,日志输出到屏幕;值设置为0时,日志输出到文件。默认值为1。 - -- `GLOG_log_dir` - - 该环境变量指定日志输出的路径。 - 若`GLOG_logtostderr`的值为0,则必须设置此变量。 - 若指定了`GLOG_log_dir`且`GLOG_logtostderr`的值为1时,则日志输出到屏幕,不输出到文件。 - C++和Python的日志会被输出到不同的文件中,C++日志的文件名遵从`GLOG`日志文件的命名规则,这里是`mindspore.机器名.用户名.log.日志级别.时间戳.进程ID`,Python日志的文件名为`mindspore.log.进程ID`。 - -- `MS_SUBMODULE_LOG_v` - - 该环境变量指定MindSpore C++各子模块的日志级别。 - 该环境变量赋值方式为:`MS_SUBMODULE_LOG_v="{SubModule1:LogLevel1,SubModule2:LogLevel2,...}"`。 - 其中被指定子模块的日志级别将覆盖`GLOG_v`在此模块内的设置,此处子模块的日志级别`LogLevel`与`GLOG_v`的日志级别含义相同,MindSpore子模块的划分如下表。 - 例如可以通过`GLOG_v=1 MS_SUBMODULE_LOG_v="{PARSER:2,ANALYZER:2}"`把`PARSER`和`ANALYZER`模块的日志级别设为WARNING,其他模块的日志级别设为INFO。 - -- `GLOG_stderrthreshold` - - 日志模块在将日志输出到文件的同时也会将日志打印到屏幕,该环境变量用于控制此种场景下打印到屏幕的日志级别。 - 该环境变量默认值为2,即WARNING级别,对应关系如下:0-DEBUG、1-INFO、2-WARNING、3-ERROR。 - -MindSpore子模块按照目录划分如下: - -| Source Files | Sub Module Name | -| -------------------------------------------- | --------------- | -| mindspore/ccsrc/backend/kernel_compiler | KERNEL | -| mindspore/ccsrc/backend/optimizer | PRE_ACT | -| mindspore/ccsrc/backend/session | SESSION | -| mindspore/ccsrc/common | COMMON | -| mindspore/ccsrc/debug | DEBUG | -| mindspore/ccsrc/frontend/operator | ANALYZER | -| mindspore/ccsrc/frontend/optimizer | OPTIMIZER | -| mindspore/ccsrc/frontend/parallel | PARALLEL | -| mindspore/ccsrc/minddata/dataset | MD | -| mindspore/ccsrc/minddata/mindrecord | MD | -| mindspore/ccsrc/pipeline/jit/*.cc | PIPELINE | -| mindspore/ccsrc/pipeline/jit/parse | PARSER | -| mindspore/ccsrc/pipeline/jit/static_analysis | ANALYZER | -| mindspore/ccsrc/pipeline/pynative | PYNATIVE | -| mindspore/ccsrc/profiler | PROFILER | -| mindspore/ccsrc/pybind_api | COMMON | -| mindspore/ccsrc/runtime/device | DEVICE | -| mindspore/ccsrc/transform/graph_ir | GE_ADPT | -| mindspore/ccsrc/transform/express_ir | EXPRESS | -| mindspore/ccsrc/utils | UTILS | -| mindspore/ccsrc/vm | VM | -| mindspore/ccsrc | ME | -| mindspore/core/gvar | COMMON | -| mindspore/core/ | CORE | - -> glog不支持日志文件的绕接,如果需要控制日志文件对磁盘空间的占用,可选用操作系统提供的日志文件管理工具,例如:Linux的logrotate。 diff --git a/tutorials/training/source_zh_cn/advanced_use/custom_loss_function.md b/tutorials/training/source_zh_cn/advanced_use/custom_loss_function.md deleted file mode 100644 index 01460ae8599de0186fd24c5c8503b3aa8159ad83..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/custom_loss_function.md +++ /dev/null @@ -1,443 +0,0 @@ -# 定义与使用损失函数 - -`Linux` `Ascend` `GPU` `CPU` `模型开发` `高级` - - - -- [定义与使用损失函数](#定义与使用损失函数) - - [概述](#概述) - - [定义损失函数](#定义损失函数) - - [损失函数与模型训练](#损失函数与模型训练) - - [定义数据集和网络](#定义数据集和网络) - - [使用Model进行模型训练](#使用model进行模型训练) - - [多标签损失函数与模型训练](#多标签损失函数与模型训练) - - [定义多标签数据集](#定义多标签数据集) - - [定义多标签损失函数](#定义多标签损失函数) - - [使用Model进行多标签模型训练](#使用model进行多标签模型训练) - - - -## 概述 - -损失函数,又叫目标函数,用于衡量预测值与真实值差异的程度。在深度学习中,模型训练就是通过不停地迭代来缩小损失函数值的过程。因此,在模型训练过程中损失函数的选择非常重要,定义一个好的损失函数,可以有效提高模型的性能。 - -MindSpore提供了许多通用损失函数供用户选择,但这些通用损失函数并不适用于所有场景,很多情况需要用户自定义所需的损失函数。因此,本教程介绍损失函数的写作方法。 - -## 定义损失函数 - -Cell是MindSpore的基本网络单元,可以用于构建网络,损失函数也需要通过Cell来定义。使用Cell定义损失函数的方法与定义一个普通的网络相同,差别在于,其执行逻辑用于计算前向网络输出与真实值之间的误差。 - -以MindSpore提供的损失函数L1Loss为例,损失函数的定义方法如下: - -```python -import mindspore.nn as nn -import mindspore.ops as ops - -class L1Loss(nn.Cell): - def __init__(self): - super(L1Loss, self).__init__() - self.abs = ops.Abs() - self.reduce_mean = ops.ReduceMean() - - def construct(self, base, target): - x = self.abs(base - target) - return self.reduce_mean(x) -``` - -在`__init__`方法中实例化所需的算子,并在`construct`中调用这些算子。这样,一个用于计算L1Loss的损失函数就定义好了。 - -给定一组预测值和真实值,调用损失函数,就可以得到这组预测值和真实值之间的差异,如下所示: - -```python -import numpy as np -from mindspore import Tensor - -loss = L1Loss() -input_data = Tensor(np.array([0.1, 0.2, 0.3]).astype(np.float32)) -target_data = Tensor(np.array([0.1, 0.2, 0.2]).astype(np.float32)) - -output = loss(input_data, target_data) -print(output) -``` - -以`Ascend`后端为例,输出结果如下: - -```python -0.03333334 -``` - -在定义损失函数时还可以继承损失函数的基类`_Loss`。`_Loss`提供了`get_loss`方法,用于对损失值求和或求均值,输出一个标量。L1Loss使用`_Loss`作为基类的定义如下: - -```python -import mindspore.ops as ops -from mindspore.nn.loss.loss import _Loss - -class L1Loss(_Loss): - def __init__(self, reduction="mean"): - super(L1Loss, self).__init__(reduction) - self.abs = ops.Abs() - - def construct(self, base, target): - x = self.abs(base - target) - return self.get_loss(x) -``` - -首先,使用`_Loss`作为L1Loss的基类,然后给`__init__`增加一个参数`reduction`,并通过`super`传给基类,最后在`construct`中调用基类提供的`get_loss`方法。`reduction`的合法参数有三个,`mean`、`sum`和`none`,分别表示求均值、求和与输出原值。 - -## 损失函数与模型训练 - -接下来使用定义好的L1Loss进行模型训练。 - -### 定义数据集和网络 - -这里使用简单的线性拟场景作为样例,数据集和网络结构定义如下: - -> 线性拟合详细介绍可参考教程[实现简单线性函数拟合](https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/linear_regression.html) - -1. 定义数据集 - - ```python - import numpy as np - from mindspore import dataset as ds - - def get_data(num, w=2.0, b=3.0): - for _ in range(num): - x = np.random.uniform(-10.0, 10.0) - noise = np.random.normal(0, 1) - y = x * w + b + noise - yield np.array([x]).astype(np.float32), np.array([y]).astype(np.float32) - - def create_dataset(num_data, batch_size=16): - dataset = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data', 'label']) - dataset = dataset.batch(batch_size) - return dataset - ``` - -2. 定义网络 - - ```python - from mindspore.common.initializer import Normal - import mindspore.nn as nn - - class LinearNet(nn.Cell): - def __init__(self): - super(LinearNet, self).__init__() - self.fc = nn.Dense(1, 1, Normal(0.02), Normal(0.02)) - - def construct(self, x): - return self.fc(x) - ``` - -### 使用Model进行模型训练 - -`Model`是MindSpore提供的用于模型训练、评估和推理的高阶API。创建数据集并定义一个`Model`就可以使用`train`接口进行模型训练。接下来我们使用`Model`进行模型训练,并采用之前定义好的`L1Loss`作为此次训练的损失函数。 - -1. 定义前向网络、损失函数和优化器 - - 使用之前定义的`LinearNet`和`L1Loss`作为前向网络和损失函数,并选择MindSpore提供的`Momemtum`作为优化器。 - - ```python - # define network - net = LinearNet() - # define loss function - loss = L1Loss() - # define optimizer - opt = nn.Momentum(net.trainable_params(), learning_rate=0.005, momentum=0.9) - ``` - -2. 定义`Model` - - 定义`Model`时需要指定前向网络、损失函数和优化器,`Model`内部会将它们关联起来,组成一张训练网。 - - ```python - from mindspore import Model - - # define Model - model = Model(net, loss, opt) - ``` - -3. 创建数据集,并调用`train`接口进行模型训练 - - 调用`train`接口时必须指定迭代次数`epoch`和训练数据集`train_dataset`,我们将`epoch`设置为1,将`create_dataset`创建的数据集作为训练集。`callbacks`是`train`接口的可选参数,在`callbacks`中使用`LossMonitor`可以监控训练过程中损失函数值的变化。`dataset_sink_mode`也是一个可选参数,这里设置为`False`,表示使用非下沉模式进行训练。 - - ```python - from mindspore.train.callback import LossMonitor - - # create dataset - ds_train = create_dataset(num_data=160) - # training - model.train(epoch=1, train_dataset=ds_train, callbacks=[LossMonitor()], dataset_sink_mode=False) - ``` - -完整代码如下: - -```python -import numpy as np - -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Model -from mindspore import dataset as ds -from mindspore.nn.loss.loss import _Loss -from mindspore.common.initializer import Normal -from mindspore.train.callback import LossMonitor - -class LinearNet(nn.Cell): - def __init__(self): - super(LinearNet, self).__init__() - self.fc = nn.Dense(1, 1, Normal(0.02), Normal(0.02)) - - def construct(self, x): - return self.fc(x) - -class L1Loss(_Loss): - def __init__(self, reduction="mean"): - super(L1Loss, self).__init__(reduction) - self.abs = ops.Abs() - - def construct(self, base, target): - x = self.abs(base - target) - return self.get_loss(x) - -def get_data(num, w=2.0, b=3.0): - for _ in range(num): - x = np.random.uniform(-10.0, 10.0) - noise = np.random.normal(0, 1) - y = x * w + b + noise - yield np.array([x]).astype(np.float32), np.array([y]).astype(np.float32) - -def create_dataset(num_data, batch_size=16): - dataset = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data', 'label']) - dataset = dataset.batch(batch_size) - return dataset - -# define network -net = LinearNet() -# define loss functhon -loss = L1Loss() -# define optimizer -opt = nn.Momentum(net.trainable_params(), learning_rate=0.005, momentum=0.9) -# define Model -model = Model(net, loss, opt) -# create dataset -ds_train = create_dataset(num_data=160) -# training -model.train(epoch=1, train_dataset=ds_train, callbacks=[LossMonitor()], dataset_sink_mode=False) -``` - -执行结果如下: - -```text -epoch: 1 step: 1, loss is 8.328788 -epoch: 1 step: 2, loss is 8.594973 -epoch: 1 step: 3, loss is 13.299595 -epoch: 1 step: 4, loss is 9.04059 -epoch: 1 step: 5, loss is 8.991402 -epoch: 1 step: 6, loss is 6.5928526 -epoch: 1 step: 7, loss is 8.239887 -epoch: 1 step: 8, loss is 7.3984795 -epoch: 1 step: 9, loss is 7.33724 -epoch: 1 step: 10, loss is 4.3588376 -``` - -## 多标签损失函数与模型训练 - -上一章定义了一个简单的损失函数`L1Loss`,其他损失函数可以仿照`L1Loss`进行编写。但许多深度学习应用的数据集较复杂,例如目标检测网络Faster R-CNN的数据中就包含多个标签,而不是简单的data和label,这时候损失函数的定义和使用略有不同。 - -Faster R-CNN网络结构较复杂,不便在此处详细展开。本章对上一章中描述的线性拟合场景进行扩展,手动构建一个多标签数据集,介绍在这种场景下如何定义损失函数,并通过`Model`进行训练。 - -### 定义多标签数据集 - -首先定义数据集。对之前定义的数据集稍作修改: - -1. `get_multilabel_data`中产生两个标签`y1`和`y2` -2. `GeneratorDataset`的`column_names`参数设置为['data', 'label1', 'label2'] - -这样通过`create_multilabel_dataset`产生的数据集就有一个数据`data`,两个标签`label1`和`label2`。 - -```python -import numpy as np -from mindspore import dataset as ds - -def get_multilabel_data(num, w=2.0, b=3.0): - for _ in range(num): - x = np.random.uniform(-10.0, 10.0) - noise1 = np.random.normal(0, 1) - noise2 = np.random.normal(-1, 1) - y1 = x * w + b + noise1 - y2 = x * w + b + noise2 - yield np.array([x]).astype(np.float32), np.array([y1]).astype(np.float32), np.array([y2]).astype(np.float32) - -def create_multilabel_dataset(num_data, batch_size=16): - dataset = ds.GeneratorDataset(list(get_multilabel_data(num_data)), column_names=['data', 'label1', 'label2']) - dataset = dataset.batch(batch_size) - return dataset -``` - -### 定义多标签损失函数 - -针对上一步创建的数据集,定义损失函数`L1LossForMultiLabel`。此时,损失函数`construct`的输入有三个,预测值`base`,真实值`target1`和`target2`,我们在`construct`中分别计算预测值与真实值`target1`、`target2`之间的误差,将这两个误差的均值作为最终的损失函数值,具体如下: - -```python -import mindspore.ops as ops -from mindspore.nn.loss.loss import _Loss - -class L1LossForMultiLabel(_Loss): - def __init__(self, reduction="mean"): - super(L1LossForMultiLabel, self).__init__(reduction) - self.abs = ops.Abs() - - def construct(self, base, target1, target2): - x1 = self.abs(base - target1) - x2 = self.abs(base - target2) - return self.get_loss(x1)/2 + self.get_loss(x2)/2 -``` - -### 使用Model进行多标签模型训练 - -刚才提到过,Model内部会关联用户指定的前向网络、损失函数和优化器。其中,前向网络和损失函数是通过`nn.WithLossCell`关联起来的,`nn.WithLossCell`会将前向网络和损失函数连接起来,如下: - -```python -import mindspore.nn as nn - -class WithLossCell(nn.Cell): - def __init__(self, backbone, loss_fn): - super(WithLossCell, self).__init__(auto_prefix=False) - self._backbone = backbone - self._loss_fn = loss_fn - - def construct(self, data, label): - output = self._backbone(data) - return self._loss_fn(output, label) -``` - -注意到`Model`默认使用的`nn.WithLossCell`只有两个输入,`data`和`label`,对于多个标签的场景显然不适用。此时,如果想要使用`Model`进行模型训练就需要用户将前向网络与损失函数连接起来,具体如下: - -1. 定义适用于当前场景的`CustomWithLossCell` - - 仿照`nn.WithLossCell`进行定义,将`construct`的输入修改为三个,将数据部分传给`backend`,将预测值和两个标签传给`loss_fn`。 - - ```python - import mindspore.nn as nn - - class CustomWithLossCell(nn.Cell): - def __init__(self, backbone, loss_fn): - super(CustomWithLossCell, self).__init__(auto_prefix=False) - self._backbone = backbone - self._loss_fn = loss_fn - - def construct(self, data, label1, label2): - output = self._backbone(data) - return self._loss_fn(output, label1, label2) - ``` - -2. 使用`CustomWithLossCell`将前向网络和损失函数连接起来 - - 前向网络使用上一章定义的`LinearNet`,损失函数使用`L1LossForMultiLabel`,用`CustomWithLossCell`将它们连接起来,如下: - - ```python - net = LinearNet() - loss = L1LossForMultiLabel() - loss_net = CustomWithLossCell(net, loss) - ``` - - 这样`loss_net`中就包含了前向网络和损失函数的运算逻辑。 - -3. 定义Model并进行模型训练 - - `Model`的`network`指定为`loss_net`,`loss_fn`不指定,优化器仍使用`Momentum`。此时用户未指定`loss_fn`,`Model`则认为`network`内部已经实现了损失函数的逻辑,便不会用`nn.WithLossCell`对前向函数和损失函数进行封装。 - - 使用`create_multilabel_dataset`创建多标签数据集并进行训练: - - ```python - from mindspore.train.callback import LossMonitor - from mindspore import Model - - opt = nn.Momentum(net.trainable_params(), learning_rate=0.005, momentum=0.9) - model = Model(network=loss_net, optimizer=opt) - ds_train = create_multilabel_dataset(num_data=160) - model.train(epoch=1, train_dataset=ds_train, callbacks=[LossMonitor()], dataset_sink_mode=False) - ``` - -完整代码如下: - -```python -import numpy as np - -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Model -from mindspore import dataset as ds -from mindspore.nn.loss.loss import _Loss -from mindspore.common.initializer import Normal -from mindspore.train.callback import LossMonitor - -class LinearNet(nn.Cell): - def __init__(self): - super(LinearNet, self).__init__() - self.fc = nn.Dense(1, 1, Normal(0.02), Normal(0.02)) - - def construct(self, x): - return self.fc(x) - -class L1LossForMultiLabel(_Loss): - def __init__(self, reduction="mean"): - super(L1LossForMultiLabel, self).__init__(reduction) - self.abs = ops.Abs() - - def construct(self, base, target1, target2): - x1 = self.abs(base - target1) - x2 = self.abs(base - target2) - return self.get_loss(x1)/2 + self.get_loss(x2)/2 - -class CustomWithLossCell(nn.Cell): - def __init__(self, backbone, loss_fn): - super(CustomWithLossCell, self).__init__(auto_prefix=False) - self._backbone = backbone - self._loss_fn = loss_fn - - def construct(self, data, label1, label2): - output = self._backbone(data) - return self._loss_fn(output, label1, label2) - -def get_multilabel_data(num, w=2.0, b=3.0): - for _ in range(num): - x = np.random.uniform(-10.0, 10.0) - noise1 = np.random.normal(0, 1) - noise2 = np.random.normal(-1, 1) - y1 = x * w + b + noise1 - y2 = x * w + b + noise2 - yield np.array([x]).astype(np.float32), np.array([y1]).astype(np.float32), np.array([y2]).astype(np.float32) - -def create_multilabel_dataset(num_data, batch_size=16): - dataset = ds.GeneratorDataset(list(get_multilabel_data(num_data)), column_names=['data', 'label1', 'label2']) - dataset = dataset.batch(batch_size) - return dataset - -net = LinearNet() -loss = L1LossForMultiLabel() -# build loss network -loss_net = CustomWithLossCell(net, loss) - -opt = nn.Momentum(net.trainable_params(), learning_rate=0.005, momentum=0.9) -model = Model(network=loss_net, optimizer=opt) -ds_train = create_multilabel_dataset(num_data=160) -model.train(epoch=1, train_dataset=ds_train, callbacks=[LossMonitor()], dataset_sink_mode=False) -``` - -执行结果如下: - -```text -epoch: 1 step: 1, loss is 11.039986 -epoch: 1 step: 2, loss is 7.7847576 -epoch: 1 step: 3, loss is 9.236277 -epoch: 1 step: 4, loss is 8.3316345 -epoch: 1 step: 5, loss is 6.957058 -epoch: 1 step: 6, loss is 9.231144 -epoch: 1 step: 7, loss is 9.1072 -epoch: 1 step: 8, loss is 6.7703295 -epoch: 1 step: 9, loss is 6.363703 -epoch: 1 step: 10, loss is 5.014839 -``` - -本章节简单讲解了多标签数据集场景下,如何定义损失函数并使用Model进行模型训练。在很多其他场景中,也可以采用此类方法进行模型训练。 diff --git a/tutorials/training/source_zh_cn/advanced_use/custom_operator.rst b/tutorials/training/source_zh_cn/advanced_use/custom_operator.rst deleted file mode 100644 index 40d4197c73aed5d14e42d1ce9e64d50cb88d91d6..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/custom_operator.rst +++ /dev/null @@ -1,9 +0,0 @@ -自定义算子 -=========== - -.. toctree:: - :maxdepth: 1 - - custom_operator_ascend - custom_operator_gpu - custom_operator_cpu \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/advanced_use/custom_operator_ascend.md b/tutorials/training/source_zh_cn/advanced_use/custom_operator_ascend.md deleted file mode 100644 index c79d8c8c4afb9c40e0ca3ea87c87104eb1c46be5..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/custom_operator_ascend.md +++ /dev/null @@ -1,264 +0,0 @@ -# 自定义算子(Ascend) - -`Linux` `Ascend` `模型开发` `高级` - - - -- [自定义算子](#自定义算子) - - [概述](#概述) - - [注册算子原语](#注册算子原语) - - [实现TBE算子和注册算子信息](#实现tbe算子和注册算子信息) - - [实现TBE算子](#实现tbe算子) - - [注册算子信息](#注册算子信息) - - [示例](#示例) - - [使用自定义算子](#使用自定义算子) - - [定义算子反向传播函数](#定义算子反向传播函数) - - - - - -## 概述 - -当开发网络遇到内置算子不足以满足需求时,你可以利用MindSpore的Python API方便快捷地扩展昇腾AI处理器的自定义算子。 - -添加一个自定义算子,需要完成算子原语注册、算子实现、算子信息注册三部分工作。 - -其中: - -- 算子原语:定义了算子在网络中的前端接口原型,也是组成网络模型的基础单元,主要包括算子的名称、属性(可选)、输入输出名称、输出shape推理方法、输出dtype推理方法等信息。 -- 算子实现:通过TBE(Tensor Boost Engine)提供的特性语言接口,描述算子内部计算逻辑的实现。TBE提供了开发昇腾AI芯片自定义算子的能力。 -- 算子信息:描述TBE算子的基本信息,如算子名称、支持的输入输出类型等。它是后端做算子选择和映射时的依据。 - -本文将以自定义Square算子为例,介绍自定义算子的步骤。 - -> 更多详细内容可参考MindSpore源码中[tests/st/ops/custom_ops_tbe](https://gitee.com/mindspore/mindspore/tree/master/tests/st/ops/custom_ops_tbe)下的用例。 - -## 注册算子原语 - -每个算子的原语是一个继承于`PrimitiveWithInfer`的子类,其类型名称即是算子名称。 - -自定义算子原语与内置算子原语的接口定义完全一致: - -- 属性由构造函数`__init__`的入参定义。本用例的算子没有属性,因此`__init__`没有额外的入参。带属性的用例可参考MindSpore源码中的[custom add3](https://gitee.com/mindspore/mindspore/blob/master/tests/st/ops/custom_ops_tbe/cus_add3.py)用例。 -- 输入输出的名称通过`init_prim_io_names`函数定义。 -- 输出Tensor的shape推理方法在`infer_shape`函数中定义,输出Tensor的dtype推理方法在`infer_dtype`函数中定义。 - -自定义算子与内置算子的唯一区别是需要通过在`__init__`函数中导入算子实现函数(`from square_impl import CusSquareImpl`)来将算子实现注册到后端。本用例在`square_impl.py`中定义了算子实现和算子信息,将在后文中说明。 - -以Square算子原语`cus_square.py`为例,给出如下示例代码。 - -```python -from mindspore.ops import prim_attr_register, PrimitiveWithInfer -import mindspore.ops as ops -# y = x^2 -class CusSquare(PrimitiveWithInfer): - """ - The definition of the CusSquare primitive. - """ - @prim_attr_register - def __init__(self): - self.init_prim_io_names(inputs=['x'], outputs=['y']) - from square_impl import CusSquareImpl # Import the entry function of the kernel implementation from relative path or PYTHONPATH. - - def infer_shape(self, data_shape): - return data_shape - - def infer_dtype(self, data_dtype): - return data_dtype -``` - -## 实现TBE算子和注册算子信息 - -### 实现TBE算子 - -通常编写一个算子的实现,需要编写一个计算函数和一个入口函数。 - -算子的计算函数主要用来封装算子的计算逻辑供主函数调用,其内部通过调用TBE的API接口组合实现算子的计算逻辑。 - -算子的入口函数描述了编译算子的内部过程,一般分为如下几步: - -1. 准备输入的placeholder,placeholder是一个占位符,返回一个Tensor对象,表示一组输入数据。 -2. 调用计算函数,计算函数使用TBE提供的API接口描述了算子内部的计算逻辑。 -3. 调用Schedule调度模块,调度模块对算子中的数据按照调度模块的调度描述进行切分,同时指定好数据的搬运流程,确保在硬件上的执行达到最优。默认可以采用自动调度模块(`auto_schedule`)。 -4. 调用`cce_build_code`编译生成算子二进制。 - -> 入口函数的输入参数有特殊要求,需要依次为:算子每个输入的信息、算子每个输出的信息、算子属性(可选)和`kernel_name`(生成算子二进制的名称)。输入和输出的信息用字典封装传入,其中包含该算子在网络中被调用时传入的实际输入和输出的shape和dtype。 - -更多关于使用TBE开发算子的内容请参考[TBE文档](https://support.huaweicloud.com/odevg-A800_3000_3010/atlaste_10_0063.html),关于TBE算子的调试和性能优化请参考[MindStudio文档](https://support.huaweicloud.com/usermanual-mindstudioc73/atlasmindstudio_02_0043.html)。 - -### 注册算子信息 - -算子信息是指导后端选择算子实现的关键信息,同时也指导后端为算子插入合适的类型和格式转换。它通过`TBERegOp`接口定义,通过`op_info_register`装饰器将算子信息与算子实现入口函数绑定。当算子实现py文件被导入时,`op_info_register`装饰器会将算子信息注册到后端的算子信息库中。更多关于算子信息的使用方法请参考`TBERegOp`的成员方法的注释说明,算子信息的字段含义可以参考[TBE文档](https://support.huaweicloud.com/odevg-A800_3000_3010/atlaste_10_0096.html)。 - -> - 算子信息中定义输入输出信息的个数和顺序、算子实现入口函数的参数中的输入输出信息的个数和顺序、算子原语中输入输出名称列表的个数和顺序,三者要完全一致。 -> - 算子如果带属性,在算子信息中需要用`attr`描述属性信息,属性的名称与算子原语定义中的属性名称要一致。 - -### 示例 - -下面以`Square`算子的TBE实现`square_impl.py`为例进行介绍。`square_compute`是算子实现的计算函数,通过调用`te.lang.cce`提供的API描述了`x * x`的计算逻辑。`cus_square_op_info`是算子信息,通过`TBERegOp`来定义。 - -`TBERegOp`的设置需要注意以下几点: - -- `TBERegOp("CusSquare")`中算子注册名称`CusSquare`需要与算子名称一致。 -- `fusion_type("OPAQUE")`中`OPAQUE`表示自定义算子采取不融合策略。 -- `kernel_name("CusSquareImpl")`中`CusSquareImpl`需要与算子入口函数名称一致。 -- `dtype_format`用来描述算子支持的数据类型,下面示例中注册了两项,说明该算子支持两种数据类型,每一项需按照输入和输出的顺序依次描述支持的格式。第一个`dtype_format`说明支持的第一种数据类型是input0为F32_Default格式,output0为F32_Default格式。第二个`dtype_format`说明支持的第二种数据类型是input0为F16_Default格式,output0为F16_Default格式。 -- `auto_schedule`、`cce_build_code`等TBE相关接口描述请见TBE文档中[auto_schedule](https://support.huaweicloud.com/odevg-A800_3000_3010/atlaste_07_0071.html)和[cce_build_code](https://support.huaweicloud.com/odevg-A800_3000_3010/atlaste_07_0072.html)的详细说明。 - -```python -from __future__ import absolute_import -from te import tvm -from topi import generic -import te.lang.cce -from topi.cce import util -from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType - -def square_compute(input_x): - """ - The compute function of the CusSquare implementation. - """ - res = te.lang.cce.vmul(input_x, input_x) - return res - -# Define the kernel info of CusSquare. -cus_square_op_info = TBERegOp("CusSquare") \ - .fusion_type("OPAQUE") \ - .partial_flag(True) \ - .async_flag(False) \ - .binfile_name("square.so") \ - .compute_cost(10) \ - .kernel_name("CusSquareImpl") \ - .input(0, "x", False, "required", "all") \ - .output(0, "y", False, "required", "all") \ - .dtype_format(DataType.F32_Default, DataType.F32_Default) \ - .dtype_format(DataType.F16_Default, DataType.F16_Default) \ - .get_op_info() - -# Binding kernel info with the kernel implementation. -@op_info_register(cus_square_op_info) -def CusSquareImpl(input_x, output_y, kernel_name="CusSquareImpl"): - """ - The entry function of the CusSquare implementation. - """ - shape = input_x.get("shape") - dtype = input_x.get("dtype").lower() - - shape = util.shape_refine(shape) - data = tvm.placeholder(shape, name="data", dtype=dtype.lower()) - - with tvm.target.cce(): - res = square_compute(data) - sch = generic.auto_schedule(res) - - config = {"print_ir": False, - "name": kernel_name, - "tensor_list": [data, res]} - - te.lang.cce.cce_build_code(sch, config) -``` - -## 使用自定义算子 - -自定义算子与内置算子在网络中的使用方法一样,通过导入原语直接使用。下面以`CusSquare`的单算子网络测试为例进行说明。 - -在`test_square.py`文件中定义网络。 - -```python -import numpy as np -import mindspore.nn as nn -import mindspore.context as context -from mindspore import Tensor -# Import the definition of the CusSquare primitive. -from cus_square import CusSquare -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.square = CusSquare() - - def construct(self, data): - return self.square(data) - -def test_net(): - x = np.array([1.0, 4.0, 9.0]).astype(np.float32) - square = Net() - output = square(Tensor(x)) - print("x: ", x) - print("output: ", output) -``` - -执行用例: - -```bash -pytest -s tests/st/ops/custom_ops_tbe/test_square.py::test_net -``` - -执行结果: - -```text -x: [1. 4. 9.] -output: [1. 16. 81.] -``` - -## 定义算子反向传播函数 - -如果算子要支持自动微分,需要在其原语中定义其反向传播函数(bprop)。你需要在bprop中描述利用正向输入、正向输出和输出梯度得到输入梯度的反向计算逻辑。反向计算逻辑可以使用内置算子或自定义反向算子构成。 - -定义算子反向传播函数时需注意以下几点: - -- bprop函数的入参顺序约定为正向的输入、正向的输出、输出梯度。若算子为多输出算子,正向输出和输出梯度将以元组的形式提供。 -- bprop函数的返回值形式约定为输入梯度组成的元组,元组中元素的顺序与正向输入参数顺序一致。即使只有一个输入梯度,返回值也要求是元组的形式。 - -例如,增加bprop后的`CusSquare`原语为: - -```python -class CusSquare(PrimitiveWithInfer): - @prim_attr_register - def __init__(self): - """init CusSquare""" - self.init_prim_io_names(inputs=['x'], outputs=['y']) - from square_impl import CusSquareImpl - - def infer_shape(self, data_shape): - return data_shape - - def infer_dtype(self, data_dtype): - return data_dtype - - def get_bprop(self): - def bprop(data, out, dout): - twos_like = ops.OnesLike()(data) * 2.0 - gradient = ops.Mul()(data, twos_like) - dx = ops.Mul()(gradient, dout) - return (dx,) - return bprop -``` - -在`test_square.py`文件中定义反向用例。 - -```python -import mindspore.ops as ops -def test_grad_net(): - x = np.array([1.0, 4.0, 9.0]).astype(np.float32) - sens = np.array([1.0, 1.0, 1.0]).astype(np.float32) - square = Net() - grad = ops.GradOperation(sens_param=True) - dx = grad(square)(Tensor(x), Tensor(sens)) - print("x: ", x) - print("dx: ", dx) -``` - -执行用例: - -```bash -pytest -s tests/st/ops/custom_ops_tbe/test_square.py::test_grad_net -``` - -执行结果: - -```text -x: [1. 4. 9.] -dx: [2. 8. 18.] -``` diff --git a/tutorials/training/source_zh_cn/advanced_use/custom_operator_cpu.md b/tutorials/training/source_zh_cn/advanced_use/custom_operator_cpu.md deleted file mode 100644 index e2ac5e2387c23ad60f9baa2f30c9c1d8715e9d15..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/custom_operator_cpu.md +++ /dev/null @@ -1,279 +0,0 @@ -# 自定义算子(CPU) - -`Linux` `CPU` `模型开发` `高级` - - - -- [自定义算子(CPU)](#自定义算子cpu) - - [概述](#概述) - - [注册算子原语](#注册算子原语) - - [实现CPU算子和注册算子信息](#实现cpu算子和注册算子信息) - - [实现CPU算子](#实现cpu算子) - - [注册算子信息](#注册算子信息) - - [编译MindSpore](#编译mindspore) - - [使用自定义CPU算子](#使用自定义cpu算子) - - [定义算子反向传播函数](#定义算子反向传播函数) - - - - - -## 概述 - -当开发网络遇到内置算子不足以满足需求时,你可以利用MindSpore的Python API和C++ API方便快捷地扩展CPU端的自定义算子。 - -添加一个自定义算子,需要完成算子原语注册、算子实现、算子信息注册三部分工作。 - -其中: - -- 算子原语:定义了算子在网络中的前端接口原型,也是组成网络模型的基础单元,主要包括算子的名称、属性(可选)、输入输出名称、输出shape推理方法、输出dtype推理方法等信息。 -- 算子实现:利用框架提供的C++ API,结合算子具体特性实现算子内部计算逻辑。 -- 算子信息:描述CPU算子的基本信息,如算子名称、支持的输入输出类型等。它是后端做算子选择和映射时的依据。 - -本文将以自定义`Transpose`算子为例,介绍自定义算子的步骤。 - -## 注册算子原语 - -算子的原语是一个继承于`PrimitiveWithInfer`的子类,其类型名称即是算子名称。 - -CPU算子原语定义在`mindspore/ops/operations`路径下,根据算子类型选择适合的文件,接口定义如下: - -- 属性由构造函数`__init__`的入参定义。本用例的算子没有init属性,因此`__init__`没有额外的入参。 -- 输入输出的名称通过`init_prim_io_names`函数定义。 -- 输出Tensor的shape和dtype检验在`__infer__`函数中实现。 - -以`Transpose`算子原语为例,给出如下示例代码。 - -```python -from mindspore.ops import PrimitiveWithInfer - -class Transpose(PrimitiveWithInfer): - """ - The definition of the Transpose primitive. - """ - @prim_attr_register - def __init__(self): - """Initialize Transpose""" - self.init_prim_io_names(inputs=['x', 'perm'], outputs=['output']) - def __infer__(self, x, perm): - x_shape = x['shape'] - p_value = perm['value'] - if len(x_shape) != len(p_value): - raise ValueError('The dimension of x and perm must be equal.') - out_shapes = [] - for i in p_value: - out_shapes.append(x_shape[i]) - out = {'shape': tuple(out_shapes), - 'dtype': x['dtype'], - 'value': None} - return out -``` - -## 实现CPU算子和注册算子信息 - -### 实现CPU算子 - -通常一个CPU算子的实现,需要编写一个头文件和一个源文件,文件路径为`mindspore/ccsrc/backend/kernel_compiler/cpu`,如果算子的逻辑实现是通过调用第三方库`MKL-DNN`,则放在子目录`mkldnn`下。详细介绍请参考[oneMKL](https://github.com/oneapi-src/oneMKL)和[oneDNN](https://github.com/oneapi-src/oneDNN) 。 - -算子的头文件中包括算子的注册信息和类的声明。算子类继承于`CPUKernel`父类,重载`InitKernel`和`Launch`两个成员函数。 - -算子的源文件是类的实现,主要是重载InitKernel和Launch两个函数,`Transpose`算子实现的头文件代码示例如下: - -```cpp -class TransposeCPUFwdKernel : public CPUKernel { - public: - TransposeCPUFwdKernel() = default; - ~TransposeCPUFwdKernel() override = default; - - void InitKernel(const CNodePtr &kernel_node) override; - - bool Launch(const std::vector &inputs, const std::vector &workspace, - const std::vector &outputs) override; - - private: - std::vector shape_; - std::vector axis_; -}; -``` - -- `InitKernel`函数的入参包含一个节点指针的常量引用,通过`AnfRuntimeAlgorithm`类的成员函数可以获取该算子节点输入输出的shape和算子的属性信息等。 -- `Launch`函数的入参是三个向量,分别包含所有的输入地址,workspace地址,所有的输出地址。函数体中描述算子的具体实现逻辑。 -- `shape_`和`axis_`是定义的两个成员变量。 - -源文件中`InitKernel`函数的定义如下: - -```cpp -void TransposeCPUFwdKernel::InitKernel(const CNodePtr &kernel_node) { - MS_EXCEPTION_IF_NULL(kernel_node); - shape_ = AnfAlgo::GetInputDeviceShape(kernel_node, 0); - axis_ = AnfAlgo::GetNodeAttr>(kernel_node, "perm"); - if (shape_.size() != axis_.size()) { - MS_LOG(EXCEPTION) << "The size of input shape and transpose axis shape must be equal."; - } -} -``` - -- `AnfRuntimeAlgorithm`类中的函数实现了各种对算子节点的操作,`shape_`表示算子第1个输入的shape,`axis_`表示算子的属性perm。 -- `Transpose`算子原语中参数“perm”作为输入传入,但是在解析时元组类型的“perm”实际被认为是算子的属性。 - -> `AnfRuntimeAlgorithm`类的详细内容可参考MindSpore源码中[mindspore/ccsrc/backend/session/anf_runtime_algorithm.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/backend/session/anf_runtime_algorithm.h)下的声明。 - -源文件中`Launch`函数的定义如下:首先依次获取每个输入输出的地址,然后根据`axis_`变换维度,把值赋给输出地址指向的空间。 - -```cpp -bool TransposeCPUFwdKernel::Launch(const std::vector &inputs, - const std::vector & /*workspace*/, - const std::vector &outputs) { - auto input = reinterpret_cast(inputs[0]->addr); - auto output = reinterpret_cast(outputs[0]->addr); - size_t size = IntToSize(inputs[0]->size / sizeof(float)); - size_t shape_size = IntToSize(shape_.size()); - if (shape_size > kMaxDim) { - MS_LOG(EXCEPTION) << "Input is " << shape_size << "-D, but transpose supports max " << kMaxDim << "-D inputs."; - } - size_t pos_array[kMaxDim]; - size_t size_offset[kMaxDim]; - size_offset[0] = size / shape_[0]; - for (size_t i = 1; i < shape_size; i++) { - size_offset[i] = size_offset[SizeToInt(i) - 1] / shape_[i]; - } - for (size_t position = 0; position < size; position += 1) { - size_t temp_position = position; - pos_array[0] = temp_position / size_offset[0]; - for (size_t i = 1; i < shape_size; i++) { - temp_position -= pos_array[SizeToInt(i) - 1] * size_offset[i - 1]; - pos_array[i] = temp_position / size_offset[i]; - } - size_t new_position = pos_array[axis_[SizeToInt(shape_size) - 1]]; - size_t new_position_size = 1; - for (int j = shape_size - 2; j >= 0; j--) { - new_position_size *= shape_[axis_[j + 1]]; - new_position += pos_array[axis_[j]] * new_position_size; - } - output[new_position] = input[position]; - } - return true; -} -``` - -### 注册算子信息 - -算子信息是指导后端选择算子实现的关键信息,`MS_REG_CPU_KERNEL`中第一个参数是注册算子的名称,和原语中算子名称一致,第二个参数依次指明每个输入输出的类型,最后一个参数是算子实现的类名。`Transpose`算子注册代码如下: - -```cpp -MS_REG_CPU_KERNEL(Transpose, KernelAttr().AddInputAttr(kNumberTypeFloat32).AddOutputAttr(kNumberTypeFloat32), - TransposeCPUFwdKernel); -``` - -> 算子信息中定义输入输出信息的个数和顺序、算子实现中的输入输出信息的个数和顺序、算子原语中输入输出名称列表的个数和顺序,三者要完全一致。 - -## 编译MindSpore - -写好自定义CPU算子后,需要重新编译安装MindSpore,具体请参考[安装文档](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_source.md#)。 - -## 使用自定义CPU算子 - -编译并安装完成后,自定义CPU算子可以通过导入原语直接使用。下面以`Transpose`的单算子网络测试为例进行说明。 - -在`test_transpose.py`文件中定义网络。 - -```python -import numpy as np -import mindspore.nn as nn -import mindspore.context as context -from mindspore import Tensor -import mindspore.ops as ops - -context.set_context(mode=context.GRAPH_MODE, device_target="CPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.transpose = ops.Transpose() - - def construct(self, data): - return self.transpose(data, (1, 0)) - -def test_net(): - x = np.arange(2 * 3).reshape(2, 3).astype(np.float32) - transpose = Net() - output = transpose(Tensor(x)) - print("output: ", output) -``` - -执行用例: - -```bash -pytest -s test_transpose.py::test_net -``` - -执行结果: - -```text -output: [[0, 3] - [1, 4] - [2, 5]] -``` - -## 定义算子反向传播函数 - -如果算子要支持自动微分,需要在其原语中定义其反向传播函数(bprop)。你需要在bprop中描述利用正向输入、正向输出和输出梯度得到输入梯度的反向计算逻辑。反向计算逻辑可以使用内置算子或自定义反向算子构成。 - -定义算子反向传播函数时需注意以下几点: - -- bprop函数的入参顺序约定为正向的输入、正向的输出、输出梯度。若算子为多输出算子,正向输出和输出梯度将以元组的形式提供。 -- bprop函数的返回值形式约定为输入梯度组成的元组,元组中元素的顺序与正向输入参数顺序一致。即使只有一个输入梯度,返回值也要求是元组的形式。 - -例如,`Transpose`的反向原语为: - -```python -import mindspore.ops as ops -invert_permutation = ops.InvertPermutation() -transpose = ops.Transpose() -zeros_like = ops.zeros_like() -@bprop_getters.register(ops.Transpose) -def get_bprop_transpose(self): - """Generate bprop for Transpose""" - - def bprop(x, perm, out, dout): - return transpose(dout, invert_permutation(perm)), zeros_like(perm) - - return bprop -``` - -- `Transpose`反向算子中用到了`InvertPermutation`算子,该算子和`Transpose`算子开发一样,需要有算子的原语,注册,实现等完整的流程。 - -在`test_transpose.py`文件中定义反向用例。 - -```python -import mindspore.ops as ops -class Grad(nn.Cell): - def __init__(self, network): - super(Grad, self).__init__() - self.grad = ops.GradOperation(sens_param=True) - self.network = network - - def construct(self, input_data, sens): - gout = self.grad(self.network)(input_data, sens) - return gout - -def test_grad_net(): - x = np.arange(2 * 3).reshape(2, 3).astype(np.float32) - sens = np.arange(2 * 3).reshape(3, 2).astype(np.float32) - grad = Grad(Net()) - dx = grad(Tensor(x), Tensor(sens)) - print("dx: ", dx.asnumpy()) -``` - -执行用例: - -```bash -pytest -s test_transpose.py::test_grad_net -``` - -执行结果: - -```text -dx: [[0. 2. 4.] - [1. 3. 5.]] -``` diff --git a/tutorials/training/source_zh_cn/advanced_use/custom_operator_gpu.md b/tutorials/training/source_zh_cn/advanced_use/custom_operator_gpu.md deleted file mode 100644 index bd159ebb6b5596d616fd8f0e401012eb52c21029..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/custom_operator_gpu.md +++ /dev/null @@ -1,260 +0,0 @@ -# 自定义算子(GPU) - -`Linux` `GPU` `模型开发` `高级` - - - -- [自定义算子(GPU)](#自定义算子gpu) - - [概述](#概述) - - [注册算子原语](#注册算子原语) - - [定义算子反向传播函数](#定义算子反向传播函数) - - [GPU算子开发](#gpu算子开发) - - [GPU算子注册](#gpu算子注册) - - [编译MindSpore](#编译mindspore) - - [算子验证](#算子验证) - - - - - -## 概述 - -算子是构建神经网络的基本要素,当开发网络遇到内置算子无法满足要求时。你可以利用MindSpore方便地实现一个GPU算子。 - -- Primitive注册:算子原语是构建网络模型的基础单元,用户可以直接或者间接调用算子原语搭建一个神经网络模型。 -- GPU Kernel实现:GPU Kernel用于调用GPU实现加速计算。 -- GPU Kernel注册:算子注册用于将GPU Kernel及必要信息注册给框架,由框架完成对GPU Kernel的调用。 - -在本教程中,我们将在MindSpore框架中使用C++和CUDA开发一个TensorAddV2算子。TensorAddV2用于将两个同维度的Tensor逐元素相加。 - -## 注册算子原语 - -算子原语通常包括: - -- 算子名:算子名用于唯一标识个算子 -- 注释:描述算子的算法、使用约束。注释将被导出成为MindSpore API接口文档,供开发者查阅。 -- 输入:算子输入Tensor。 -- 属性:一般描述算法参数,例如Conv2d中`data_format`描述了输入数据为`NCHW`或者`NHWC`格式。 -- 输入数据合法性校验:对输入数据、属性进行合法性校验,便于开发者及早发现网络模型存在的问题。 -- 输出数据类型和维度推导:用于推导输出的数据类型和维度。 - -下面的代码中定义了一个名为TensorAddV2算子: - -- `TensorAddV2`继承于`PrimitiveWithInfer`。 -- `__init__`构造函数用于初始化算子,由于TensorAddV2没有属性,因此`__init__`没有额外输入。 -- `infer_shape`方法中约束两个输入维度必须相同,输出的维度和x1的维度相同。 -- `infer_dtype`方法中约束两个输入数据必须是float32类型,输出的数据类型和输入数据类型相同。 - -```python -# mindspore/ops/operations/math_ops.py -class TensorAddV2(PrimitiveWithInfer): - """ - Adds two input tensors element-wise. - """ - @prim_attr_register - def __init__(self): - self.init_prim_io_names(inputs=['x1', 'x2'], outputs=['y']) - - def infer_shape(self, x1_shape, x2_shape): - validator.check_integer('input dims', len(x1_shape), len(x2_shape), Rel.EQ, self.name) - for i in range(len(x1_shape)): - validator.check_integer('input_shape', x1_shape[i], x2_shape[i], Rel.EQ, self.name) - return x1_shape - - def infer_dtype(self, x1_dtype, x2_type): - validator.check_tensor_type_same({'x1_dtype': x1_dtype}, [mstype.float32], self.name) - validator.check_tensor_type_same({'x2_dtype': x2_dtype}, [mstype.float32], self.name) - return x1_dtype -``` - -接下来我们在__init__.py中导出TensorAddV2类型,方便用户在网络中导入使用。 - -```python -# mindspore/ops/operations/__init__.py -from .math_ops import (Abs, ACos, ..., TensorAddV2) -... -... -__all__ = [ - 'ReverseSequence', - 'CropAndResize', - ..., - 'TensorAddV2' -] -``` - -## 定义算子反向传播函数 - -如果算子要支持自动微分,需要在其原语中定义其反向传播函数(bprop)。你需要在bprop中描述利用正向输入、正向输出和输出梯度得到输入梯度的反向计算逻辑。反向计算逻辑可以使用内置算子或自定义反向算子构成。 - -定义算子反向传播函数时需注意以下几点: - -- bprop函数的入参顺序约定为正向的输入、正向的输出、输出梯度。若算子为多输出算子,正向输出和输出梯度将以元组的形式提供。 -- bprop函数的返回值形式约定为输入梯度组成的元组,元组中元素的顺序与正向输入参数顺序一致。即使只有一个输入梯度,返回值也要求是元组的形式。 - -例如,`TensorAddV2`的反向原语为: - -```python -import mindspore.ops as ops -@bprop_getters.register(ops.TensorAddV2) -def get_bprop_tensoraddv2(self): - """Generate bprop for TensorAddV2""" - - def bprop(x1, x2, out, dout): - return dout, dout - - return bprop -``` - -## GPU算子开发 - -GPU自定义算子继承于`GPUKernel`: - -- `Init()`: 用于完成GPU Kernel的初始化,通常包括记录算子输入/输出维度,完成Launch前的准备工作。 -- `GetInputSizeList()`: 向框架反馈输入Tensor需要占用的显存字节数。 -- `GetOutputSizeList()`: 向框架反馈输出Tensor需要占用的显存字节数。 -- `GetWorkspaceSizeList()`: 向框架反馈`Workspace`字节数,`Workspace`是用于计算过程中存放临时数据的空间。 -- `Launch()`: 通常调用CUDA kernel(CUDA kernel是基于Nvidia GPU的并行计算架构开发的核函数),或者cuDNN接口等方式,完成算子在GPU上加速。 - -下面的代码给出了TensorAddV2的实现: -为了支持数据类型的泛化,我们使用类模板定义`TensorAddV2GpuKernel`: - -- `Init()`中记录了Tensor的元素个数。 -- `GetInputSizeList()`返回了输入Tensor需要占用的字节数,TensorAddV2有两个Input,每个Input占用字节数为element_num * sizeof(T)。 -- `GetOutputSizeList()`返回了输出Tensor需要占用的字节数,TensorAddV2有一个output,占用element_num * sizeof(T)字节。 -- 由于TensorAddV2不需要`Workspace`,因此`GetWorkspaceSizeList()`返回空的`std::vector`。 -- `Launch()`接收input、output在显存的地址,接着调用`TensorAddV2`完成加速。 - -```c++ -// mindspore/ccsrc/backend/kernel_compiler/gpu/math/tensor_add_v2_gpu_kernel.h - -template -class TensorAddV2GpuKernel : public GpuKernel { - public: - TensorAddV2GpuKernel() : element_num_(1) {} - ~TensorAddV2GpuKernel() override = default; - - bool Init(const CNodePtr &kernel_node) override { - auto shape = AnfAlgo::GetPrevNodeOutputInferShape(kernel_node, 0); - for (size_t i = 0; i < shape.size(); i++) { - element_num_ *= shape[i]; - } - InitSizeLists(); - return true; - } - - const std::vector &GetInputSizeList() const override { return input_size_list_; } - const std::vector &GetOutputSizeList() const override { return output_size_list_; } - const std::vector &GetWorkspaceSizeList() const override { return workspace_size_list_; } - - bool Launch(const std::vector &inputs, const std::vector &, - const std::vector &outputs, void *stream_ptr) override { - T *x1 = GetDeviceAddress(inputs, 0); - T *x2 = GetDeviceAddress(inputs, 1); - T *y = GetDeviceAddress(outputs, 0); - - TensorAddV2(element_num_, x1, x2, y, reinterpret_cast(stream_ptr)); - return true; - } - - protected: - void InitSizeLists() override { - input_size_list_.push_back(element_num_ * sizeof(T)); - input_size_list_.push_back(element_num_ * sizeof(T)); - output_size_list_.push_back(element_num_ * sizeof(T)); - } - - private: - size_t element_num_; - std::vector input_size_list_; - std::vector output_size_list_; - std::vector workspace_size_list_; -}; -``` - -`TensorAddV2`中调用了CUDA kernel`TensorAddV2Kernel`来实现`element_num`个元素的并行相加: - -```c++ -// mindspore/ccsrc/backend/kernel_compiler/gpu/math/tensor_add_v2_gpu_kernel.h - - template - __global__ void TensorAddV2Kernel(const size_t element_num, const T* x1, const T* x2, T* y) { - for (size_t i = blockIdx.x * blockDim.x + threadIdx.x; i < element_num; i += blockDim.x * gridDim.x) { - y[i] = x1[i] + x2[i]; - } - } - - template - void TensorAddV2(const size_t &element_num, const T* x1, const T* x2, T* y, cudaStream_t stream){ - size_t thread_per_block = 256; - size_t block_per_grid = (element_num + thread_per_block - 1 ) / thread_per_block; - TensorAddV2Kernel<<>>(element_num, x1, x2, y); - return; - } - - template void TensorAddV2(const size_t &element_num, const float* x1, const float* x2, float* y, cudaStream_t stream); -``` - -## GPU算子注册 - -算子信息包含: - -- `Primive` -- `Input dtype, output dtype` -- `GPU Kernel class` -- `CUDA内置数据类型` - -框架会根据`Primive`和`Input dtype, output dtype`,调用以`CUDA内置数据类型`实例化`GPU Kernel class`模板类。 - -如下代码中分别注册了支持float和int的TensorAddV2算子。 - -```c++ -// mindspore/ccsrc/backend/kernel_compiler/gpu/math/tensor_add_v2_gpu_kernel.cc - -MS_REG_GPU_KERNEL_ONE(TensorAddV2, KernelAttr() - .AddInputAttr(kNumberTypeFloat32) - .AddInputAttr(kNumberTypeFloat32) - .AddOutputAttr(kNumberTypeFloat32), - TensorAddV2GpuKernel, float) - -MS_REG_GPU_KERNEL_ONE(TensorAddV2, KernelAttr() - .AddInputAttr(kNumberTypeInt32) - .AddInputAttr(kNumberTypeInt32) - .AddOutputAttr(kNumberTypeInt32), - TensorAddV2GpuKernel, int) - -``` - -## 编译MindSpore - -写好自定义GPU算子后,需要重新编译安装MindSpore,具体请参考[安装文档](https://gitee.com/mindspore/docs/blob/master/install/mindspore_gpu_install_source.md#)。 - -## 算子验证 - -在教程的最后,我们构建一个单算子网络,来验证刚才开发的TensorAddV2算子: - -```python -# tests/st/ops/gpu/test_tensoraddv2_op.py - -import mindspore.context as context -from mindspore import Tensor -import mindspore.ops as ops - -context.set_context(device_target='GPU') - -@pytest.mark.level0 -@pytest.mark.platform_x86_gpu_training -@pytest.mark.env_onecard -def test_TensroAdd(): - x1 = Tensor(np.ones((3, 4), np.float32)) - x2 = Tensor(np.ones((3, 4), np.float32)) - y = ops.TensorAddV2()(x1, x2) - print('result: ', y) -``` - -通过`pytest -s tests/st/ops/gpu/test_tensoraddv2_op.py`命令执行后,可以看到结果符合预期: - -```text -result: [[2. 2. 2. 2.] - [2. 2. 2. 2.] - [2. 2. 2. 2.]] -``` diff --git a/tutorials/training/source_zh_cn/advanced_use/cv.rst b/tutorials/training/source_zh_cn/advanced_use/cv.rst deleted file mode 100644 index 8383859382de2ca647dd34e8a1646af962b53e53..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/cv.rst +++ /dev/null @@ -1,9 +0,0 @@ -机器视觉 -=========== - -.. toctree:: - :maxdepth: 1 - - cv_resnet50 - cv_resnet50_second_order_optimizer - cv_mobilenetv2_fine_tune \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/advanced_use/cv_mobilenetv2_fine_tune.md b/tutorials/training/source_zh_cn/advanced_use/cv_mobilenetv2_fine_tune.md deleted file mode 100644 index 3154f5de7ee4753240f528c2b58616b4b6d73dfe..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/cv_mobilenetv2_fine_tune.md +++ /dev/null @@ -1,409 +0,0 @@ -# 使用MobileNetV2网络实现微调(Fine Tune) - -`Linux` `Windows` `Ascend` `GPU` `CPU` `模型开发` `中级` `高级` - - - -- [使用MobileNetV2网络实现微调(Fine Tune)](#使用mobilenetv2网络实现微调fine-tune) - - [概述](#概述) - - [任务描述及准备](#任务描述及准备) - - [环境配置](#环境配置) - - [下载代码](#下载代码) - - [准备预训练模型](#准备预训练模型) - - [准备数据](#准备数据) - - [预训练模型加载代码详解](#预训练模型加载代码详解) - - [参数简介](#参数简介) - - [运行Python文件](#运行python文件) - - [运行Shell脚本](#运行shell脚本) - - [加载微调训练](#加载微调训练) - - [CPU加载训练](#cpu加载训练) - - [GPU加载训练](#gpu加载训练) - - [Ascend加载训练](#ascend加载训练) - - [微调训练结果](#微调训练结果) - - [验证微调训练模型](#验证微调训练模型) - - [验证模型](#验证模型) - - [验证结果](#验证结果) - - - -   - -## 概述 - -计算机视觉任务中,从头开始训练一个网络耗时巨大,需要大量计算能力。预训练模型选择的常见的OpenImage、ImageNet、VOC、COCO等公开大型数据集,规模达到几十万甚至超过上百万张。大部分任务数据规模较大,训练网络模型时,如果不使用预训练模型,从头开始训练网络,需要消耗大量的时间与计算能力,模型容易陷入局部极小值和过拟合。因此大部分任务都会选择预训练模型,在其上做微调(也称为Fine Tune)。 - -MindSpore是一个多元化的机器学习框架。既可以在手机等端侧和PC等设备上运行,也可以在云上的服务器集群上运行。目前MobileNetV2支持在Windows、EulerOS和Ubuntu系统中使用单个CPU做微调,也可以使用单个或者多个Ascend AI处理器或GPU做微调,本教程将会介绍如何在不同系统与处理器下的MindSpore框架中做微调的训练与验证。 - -目前,Window上暂只支持支持CPU,Ubuntu与EulerOS上支持CPU、GPU与Ascend AI处理器三种处理器。 - -> 你可以在这里找到完整可运行的样例代码: - -## 任务描述及准备 - -### 环境配置 - -若在本地环境运行,需要安装MindSpore框架,配置CPU、GPU或Ascend AI处理器。若在华为云环境上运行,不需要安装MindSpore框架,不需要配置Ascend AI处理器、CPU与GPU,可以跳过本小节。 - -Windows操作系统中使用`\`,Linux操作系统中使用`/`分割路径地址中不同层级目录,下文中默认使用`/`,若用户使用Windows操作系统,路径地址中`/`需自行更改为`\`。 - -1. 安装MindSpore框架 - 在EulerOS、Ubuntu或者Windows等系统上需要根据系统和处理器架构[安装对应版本MindSpore框架](https://www.mindspore.cn/install)。 - -2. 配置CPU环境 - 使用CPU时,在代码中,需要在调用CPU开始训练或测试前,按照如下代码设置: - - ```python - if config.platform == "CPU": - context.set_context(mode=context.GRAPH_MODE, device_target=config.platform, \ - save_graphs=False) - ``` - -3. 配置GPU环境 - 使用GPU时,在代码中,需要在调用GPU开始训练或测试前,按照如下代码设置: - - ```python - elif config.platform == "GPU": - context.set_context(mode=context.GRAPH_MODE, device_target=config.platform, save_graphs=False) - if config.run_distribute: - init("nccl") - context.set_auto_parallel_context(device_num=get_group_size(), - parallel_mode=ParallelMode.DATA_PARALLEL, - gradients_mean=True) - ``` - -4. 配置Ascend环境 - 以Ascend 910 AI处理器为例,1个8个处理器环境的json配置文件`hccl_config.json`示例如下。单/多处理器环境可以根据以下示例调整`"server_count"`与`device`: - - ```json - { - "version": "1.0", - "server_count": "1", - "server_list": [ - { - "server_id": "10.155.111.140", - "device": [ - {"device_id": "0","device_ip": "192.1.27.6","rank_id": "0"}, - {"device_id": "1","device_ip": "192.2.27.6","rank_id": "1"}, - {"device_id": "2","device_ip": "192.3.27.6","rank_id": "2"}, - {"device_id": "3","device_ip": "192.4.27.6","rank_id": "3"}, - {"device_id": "4","device_ip": "192.1.27.7","rank_id": "4"}, - {"device_id": "5","device_ip": "192.2.27.7","rank_id": "5"}, - {"device_id": "6","device_ip": "192.3.27.7","rank_id": "6"}, - {"device_id": "7","device_ip": "192.4.27.7","rank_id": "7"}], - "host_nic_ip": "reserve" - } - ], - "status": "completed" - } - ``` - - 使用Ascend AI处理器时,在代码中,需要在调用Ascend AI处理器开始训练或测试前,按照如下代码设置: - - ```python - elif config.platform == "Ascend": - context.set_context(mode=context.GRAPH_MODE, device_target=config.platform, device_id=config.device_id, - save_graphs=False) - if config.run_distribute: - context.set_auto_parallel_context(device_num=config.rank_size, - parallel_mode=ParallelMode.DATA_PARALLEL, - gradients_mean=True, - all_reduce_fusion_config=[140]) - init() - ... - ``` - -### 下载代码 - -在Gitee中克隆[MindSpore开源项目仓库](https://gitee.com/mindspore/mindspore.git),进入`./model_zoo/official/cv/mobilenetv2/`。 - -```bash -git clone https://gitee.com/mindspore/mindspore.git -cd ./mindspore/model_zoo/official/cv/mobilenetv2 -``` - -代码结构如下: - -```bash -├─MobileNetV2 - ├─README.md # descriptions about MobileNetV2 - ├─scripts - │ run_train.sh # Shell script for train with Ascend or GPU - │ run_eval.sh # Shell script for evaluation with Ascend or GPU - ├─src - │ config.py # parameter configuration - │ dataset.py # creating dataset - │ launch.py # start Python script - │ lr_generator.py # learning rate config - │ mobilenetV2.py # MobileNetV2 architecture - │ mobilenetV2_fusion.py # MobileNetV2 fusion architecture - │ models.py # net utils to load ckpt_file, define_net... - │ utils.py # net utils to switch precision, set_context and so on - ├─train.py # training script - └─eval.py # evaluation script -``` - -运行微调训练与测试时,Windows、Ubuntu与EulersOS上可以使用Python文件`train.py`与`eval.py`,Ubuntu与EulerOS上还可以使用Shell脚本文件`run_train.sh`与`run_eval.sh`。 - -使用脚本文件`run_train.sh`时,该文件会将运行`launch.py`并且将参数传入`launch.py`,`launch.py`根据分配的CPU、GPU或Ascend AI处理器数量,启动单个/多个进程运行`train.py`,每一个进程分配对应的一个处理器。 - -### 准备预训练模型 - -用户需要根据不同处理器种类[下载CPU/GPU预训练模型](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2_cpu_gpu.ckpt)或[下载Ascend预训练模型](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2_ascend.ckpt)到以下目录: -`./pretrain_checkpoint/` - -- CPU/GPU 处理器 - - ```bash - mkdir pretrain_checkpoint - wget -P ./pretrain_checkpoint https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2_cpu_gpu.ckpt - ``` - -- Ascend AI处理器 - - ```bash - mkdir pretrain_checkpoint - wget -P ./pretrain_checkpoint https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2_ascend.ckpt - ``` - -### 准备数据 - -准备ImageFolder格式管理的数据集,运行`run_train.sh`时加入``参数,运行`train.py`时加入`--dataset_path `参数: - -数据集结构如下: - -```bash -└─ImageFolder - ├─train - │ class1Folder - │ class2Folder - │ ...... - └─eval - class1Folder - class2Folder - ...... -``` - -## 预训练模型加载代码详解 - -在微调时,需要加载预训练模型。不同数据集和任务中特征提取层(卷积层)分布趋于一致,但是特征向量的组合(全连接层)不相同,分类数量(全连接层output_size)通常也不一致。在微调时,只加载与训练特征提取层参数,不加载与训练全连接层参数;在微调与初始训练时,加载与训练特征提取层参数与全连接层参数。 - -在训练与测试之前,首先按照代码第1行,构建MobileNetV2的backbone网络,head网络,并且构建包含这两个子网络的MobileNetV2网络。代码第3-10行展示了如何定义`backbone_net`与`head_net`,以及将两个子网络置入`mobilenet_v2`中。代码第12-23行,展示了在微调训练模式下,需要将预训练模型加载`入backbone_net`子网络,并且冻结`backbone_net`中的参数,不参与训练。代码第21-23行展示了如何冻结网络参数。 - -```python - 1: backbone_net, head_net, net = define_net(args_opt, config) - 2: ... - 3: def define_net(config, is_training): - 4: backbone_net = MobileNetV2Backbone() - 5: activation = config.activation if not is_training else "None" - 6: head_net = MobileNetV2Head(input_channel=backbone_net.out_channels, - 7: num_classes=config.num_classes, - 8: activation=activation) - 9: net = mobilenet_v2(backbone_net, head_net) -10: return backbone_net, head_net, net -11: ... -12: if args_opt.pretrain_ckpt and args_opt.freeze_layer == "backbone": -13: load_ckpt(backbone_net, args_opt.pretrain_ckpt, trainable=False) -14: ... -15: def load_ckpt(network, pretrain_ckpt_path, trainable=True): -16: """ -17: train the param weight or not -18: """ -19: param_dict = load_checkpoint(pretrain_ckpt_path) -20: load_param_into_net(network, param_dict) -21: if not trainable: -22: for param in network.get_parameters(): -23: param.requires_grad = False -``` - -## 参数简介 - -每个参数需要用户根据自己本地的处理器类型、数据地址与预训练模型地址等修改为相应的值。 - -### 运行Python文件 - -在Windows与Linux系统上训练时,运行`train.py`时需要传入`dataset_path`、`platform`、`pretrain_ckpt`与`freeze_layer`四个参数。验证时,运行`eval.py`并且传入`dataset_path`、`platform`、`pretrain_ckpt`三个参数。 - -```bash -# Windows/Linux train with Python file -python train.py --platform [PLATFORM] --dataset_path --pretrain_ckpt [PRETRAIN_CHECKPOINT_PATH] --freeze_layer[("none", "backbone")] - -# Windows/Linux eval with Python file -python eval.py --platform [PLATFORM] --dataset_path --pretrain_ckpt -``` - -- `--dataset_path`:训练与验证数据集地址,无默认值,用户训练/验证时必须输入。 -- `--platform`:处理器类型,默认为“Ascend”,可以设置为“CPU”或"GPU"。 -- `--pretrain_ckpt`:增量训练或调优时,需要传入pretrain_checkpoint文件路径以加载预训练好的模型参数权重。 -- `--freeze_layer`:冻结网络层,输入“none"、"backbone"其中一个。 - -### 运行Shell脚本 - -在Linux系统上时,可以选择运行Shell脚本文件`./scripts/run_train.sh`与`./scripts/run_eval.sh`。运行时需要在交互界面中同时传入参数。 - -```bash -# Windows doesn't support Shell -# Linux train with Shell script -sh run_train.sh [FREEZE_LAYER] - -# Linux eval with Shell script for fine tune -sh run_eval.sh -``` - -- ``:处理器类型,默认为“Ascend”,可以设置为“GPU”。 -- ``:每个节点(一台服务器/PC相当于一个节点)进程数量,建议设置为机器上Ascend AI处理器数量或GPU数量。 -- ``:字符串格式的设备ID,训练将会根据``将进程绑定到对应ID的设备上,多个设备ID之间使用','分隔,建议ID数量与进程数量相同。 -- ``:platform选择Ascend时,需要配置Ascend的配置Json文件,。 -- ``:训练与验证数据集地址,无默认值,用户训练/验证时必须输入。 -- ``:增量训练或调优时,需要传入checkpoint文件路径以加载预训练好的模型参数权重 -- `[FREEZE_LAYER]`:针对微调的模型做验证时,需要选择不冻结网络或者冻结backbone。 - -## 加载微调训练 - -Windows系统上,MobileNetV2做微调训练时,只能运行`train.py`。Linux系统上,使用MobileNetV2做微调训练时,可以选择运行`run_train.sh`, 并在运行Shell脚本文件时传入[参数](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/cv_mobilenetv2_fine_tune.html#id8)。 - -Windows系统输出信息到交互式命令行,Linux系统环境下运行`run_train.sh`时,命令行结尾使用`&> `将标准输出与错误输出写入log文件。微调成功开始训练,`./train/rank*/log*.log`中会持续写入每一个epoch的训练时间与Loss等信息。若未成功,上述log文件会写入失败报错信息。 - -### CPU加载训练 - -- 设置节点数量 - - 目前运行`train.py`时仅支持单处理器,不需要调整处理器数量。运行`run_train.sh`文件时,`CPU`设备默认为单处理器,目前暂不支持修改CPU数量。 - -- 开始增量训练 - - 使用样例1:通过Python文件调用1个CPU处理器。 - - ```bash - # Windows or Linux with Python - python train.py --platform CPU --dataset_path --pretrain_ckpt ./pretrain_checkpoint/mobilenetv2_cpu_gpu.ckpt --freeze_layer backbone - ``` - - 使用样例2:通过Shell文件调用1个CPU处理器。 - - ```bash - # Linux with Shell - sh run_train.sh CPU ../pretrain_checkpoint/mobilenetV2_cpu_gpu.ckpt backbone - ``` - -### GPU加载训练 - -- 设置节点数量 - - 目前运行`train.py`时仅支持单处理器,不需要调整节点数量。运行`run_train.sh`文件时,设置``为GPU数量, ``为可使用的处理器编号,即GPU的ID,可以选择一个或多个设备ID,使用`,`隔开。 - -- 开始增量训练 - - - 使用样例1:通过Python文件调用1个GPU处理器。 - - ```bash - # Windows or Linux with Python - python train.py --platform GPU --dataset_path --pretrain_ckpt ./pretrain_checkpoint/mobilenetv2_cpu_gpu.ckpt --freeze_layer backbone - ``` - - - 使用样例2:通过Shell脚本调用1个GPU处理器,设备ID为`“0”`。 - - ```bash - # Linux with Shell - sh run_train.sh GPU 1 0 ../pretrain_checkpoint/mobilenetv2_cpu_gpu.ckpt backbone - ``` - - - 使用样例3:通过Shell脚本调用8个GPU处理器,设备ID为`“0,1,2,3,4,5,6,7”`。 - - ```bash - # Linux with Shell - sh run_train.sh GPU 8 0,1,2,3,4,5,6,7 < ../pretrain_checkpoint/mobilenetv2_cpu_gpu.ckpt backbone - ``` - -### Ascend加载训练 - -- 设置节点数量 - - 目前运行`train.py`时仅支持单处理器,不需要调整节点数量。运行`run_train.sh`文件时,设置``为Ascend AI处理器数量, ``为可使用的处理器编号,即Ascend AI处理器的ID,8卡服务器可以选择0-7中一个或多个设备ID,使用`,`隔开。Ascend节点处理器数量目前只能设置为1或者8。 - -- 开始增量训练 - - - 使用样例1:通过Python文件调用1个Ascend处理器。 - - ```bash - # Windows or Linux with Python - python train.py --platform Ascend --dataset_path --pretrain_ckpt ./pretrain_checkpoint mobilenetv2_ascend.ckpt --freeze_layer backbone - ``` - - - 使用样例2:通过Shell脚本调用1个Ascend AI处理器,设备ID为“0”。 - - ```bash - # Linux with Shell - sh run_train.sh Ascend 1 0 ~/rank_table.json ../pretrain_checkpoint/mobilenetv2_ascend.ckpt backbone - ``` - - - 使用样例3:通过Shell脚本调用8个Ascend AI处理器,设备ID为”0,1,2,3,4,5,6,7“。 - - ```bash - # Linux with Shell - sh run_train.sh Ascend 8 0,1,2,3,4,5,6,7 ~/rank_table.json ../pretrain_checkpoint/mobilenetv2_ascend.ckpt backbone - ``` - -### 微调训练结果 - -- 查看运行结果。 - - - 运行Python文件时在交互式命令行中查看打印信息,`Linux`上运行Shell脚本运行后使用`cat ./train/rank0/log0.log`中查看打印信息,输出结果如下: - - ```bash - train args: Namespace(dataset_path='./dataset/train', platform='CPU', \ - pretrain_ckpt='./pretrain_checkpoint/mobilenetv2_cpu_gpu.ckpt', freeze_layer='backbone') - cfg: {'num_classes': 26, 'image_height': 224, 'image_width': 224, 'batch_size': 150, \ - 'epoch_size': 200, 'warmup_epochs': 0, 'lr_max': 0.03, 'lr_end': 0.03, 'momentum': 0.9, \ - 'weight_decay': 4e-05, 'label_smooth': 0.1, 'loss_scale': 1024, 'save_checkpoint': True, \ - 'save_checkpoint_epochs': 1, 'keep_checkpoint_max': 20, 'save_checkpoint_path': './', \ - 'platform': 'CPU'} - Processing batch: 16: 100%|███████████████████████████████████████████ █████████████████████| 16/16 [00:00 --pretrain_ckpt ./ckpt_0/mobilenetv2_15.ckpt - -# Linux with Shell -sh run_eval.sh CPU ../ckpt_0/mobilenetv2_15.ckpt -``` - -### 验证结果 - -运行Python文件时在交互式命令行中输出验证结果,Shell脚本将把这些信息写入`./eval.log`中,需要使用`cat ./eval.log`查看,结果如下: - -```bash -result:{'acc': 0.9466666666666666666667} -pretrain_ckpt = ./ckpt_0/mobilenetv2_15.ckpt -``` diff --git a/tutorials/training/source_zh_cn/advanced_use/cv_resnet50.md b/tutorials/training/source_zh_cn/advanced_use/cv_resnet50.md deleted file mode 100644 index 890a544e76572c80a911c5fdc3e5bde26694f23b..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/cv_resnet50.md +++ /dev/null @@ -1,211 +0,0 @@ -# 使用ResNet-50网络实现图像分类 - -`Linux` `Ascend` `GPU` `全流程` `初级` `中级` `高级` - - - -- [使用ResNet-50网络实现图像分类](#使用resnet-50网络实现图像分类) - - [概述](#概述) - - [图像分类](#图像分类) - - [任务描述及准备](#任务描述及准备) - - [下载CIFAR-10数据集](#下载cifar-10数据集) - - [数据预加载和预处理](#数据预加载和预处理) - - [定义卷积神经网络](#定义卷积神经网络) - - [定义损失函数和优化器](#定义损失函数和优化器) - - [调用`Model`高阶API进行训练和保存模型文件](#调用model高阶api进行训练和保存模型文件) - - [加载保存的模型,并进行验证](#加载保存的模型并进行验证) - - [参考文献](#参考文献) - - - -   -   - - -## 概述 - -计算机视觉是当前深度学习研究最广泛、落地最成熟的技术领域,在手机拍照、智能安防、自动驾驶等场景有广泛应用。从2012年AlexNet在ImageNet比赛夺冠以来,深度学习深刻推动了计算机视觉领域的发展,当前最先进的计算机视觉算法几乎都是深度学习相关的。深度神经网络可以逐层提取图像特征,并保持局部不变性,被广泛应用于分类、检测、分割、检索、识别、提升、重建等视觉任务中。 - -本章结合图像分类任务,介绍MindSpore如何应用于计算机视觉场景。 - -## 图像分类 - -图像分类是最基础的计算机视觉应用,属于有监督学习类别。给定一张数字图像,判断图像所属的类别,如猫、狗、飞机、汽车等等。用函数来表示这个过程如下: - -```python -def classify(image): - label = model(image) - return label -``` - -选择合适的model是关键。这里的model一般指的是深度卷积神经网络,如AlexNet、VGG、GoogLeNet、ResNet等等。 - -MindSpore实现了典型的卷积神经网络,开发者可以参考[model_zoo](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official)。 - -MindSpore当前支持的图像分类网络包括:典型网络LeNet、AlexNet、ResNet。 - -## 任务描述及准备 - -![cifar10](images/cifar10.jpg) - -图1:CIFAR-10数据集[1] - -如图1所示,CIFAR-10数据集共包含10类、共60000张图片。其中,每类图片6000张,50000张是训练集,10000张是测试集。每张图片大小为32*32。 - -图像分类的训练指标通常是精度(Accuracy),即正确预测的样本数占总预测样本数的比值。 - -接下来我们介绍利用MindSpore解决图片分类任务,整体流程如下: - -1. 下载CIFAR-10数据集 -2. 数据加载和预处理 -3. 定义卷积神经网络,本例采用ResNet-50网络 -4. 定义损失函数和优化器 -5. 调用`Model`高阶API进行训练和保存模型文件 -6. 加载保存的模型进行推理 - -> 本例面向Ascend 910 AI处理器硬件平台,你可以在这里下载完整的样例代码: - -下面对任务流程中各个环节及代码关键片段进行解释说明。 - -## 下载CIFAR-10数据集 - -先从[CIFAR-10数据集官网](https://www.cs.toronto.edu/~kriz/cifar.html)上下载CIFAR-10数据集。本例中采用binary格式的数据,Linux环境可以通过下面的命令下载: - -```shell -wget https://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz -``` - -接下来需要解压数据集,解压命令如下: - -```shell -tar -zvxf cifar-10-binary.tar.gz -``` - -## 数据预加载和预处理 - -1. 加载数据集 - - 数据加载可以通过内置数据集格式`Cifar10Dataset`接口完成。 - > `Cifar10Dataset`,读取类型为随机读取,内置CIFAR-10数据集,包含图像和标签,图像格式默认为uint8,标签数据格式默认为uint32。更多说明请查看API中`Cifar10Dataset`接口说明。 - - 数据加载代码如下,其中`data_home`为数据存储位置: - - ```python - cifar_ds = ds.Cifar10Dataset(data_home) - ``` - -2. 数据增强 - - 数据增强主要是对数据进行归一化和丰富数据样本数量。常见的数据增强方式包括裁剪、翻转、色彩变化等等。MindSpore通过调用`map`方法在图片上执行增强操作: - - ```python - resize_height = 224 - resize_width = 224 - rescale = 1.0 / 255.0 - shift = 0.0 - - # define map operations - random_crop_op = C.RandomCrop((32, 32), (4, 4, 4, 4)) # padding_mode default CONSTANT - random_horizontal_op = C.RandomHorizontalFlip() - resize_op = C.Resize((resize_height, resize_width)) # interpolation default BILINEAR - rescale_op = C.Rescale(rescale, shift) - normalize_op = C.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)) - changeswap_op = C.HWC2CHW() - type_cast_op = C2.TypeCast(mstype.int32) - - c_trans = [] - if training: - c_trans = [random_crop_op, random_horizontal_op] - c_trans += [resize_op, rescale_op, normalize_op, changeswap_op] - - # apply map operations on images - cifar_ds = cifar_ds.map(operations=type_cast_op, input_columns="label") - cifar_ds = cifar_ds.map(operations=c_trans, input_columns="image") - ``` - -3. 数据混洗和批处理 - - 最后通过数据混洗(`shuffle`)随机打乱数据的顺序,并按`batch`读取数据,进行模型训练: - - ```python - # apply shuffle operations - cifar_ds = cifar_ds.shuffle(buffer_size=10) - - # apply batch operations - cifar_ds = cifar_ds.batch(batch_size=args_opt.batch_size, drop_remainder=True) - - # apply repeat operations - cifar_ds = cifar_ds.repeat(repeat_num) - ``` - -## 定义卷积神经网络 - -卷积神经网络已经是图像分类任务的标准算法了。卷积神经网络采用分层的结构对图片进行特征提取,由一系列的网络层堆叠而成,比如卷积层、池化层、激活层等等。 - -ResNet通常是较好的选择。首先,它足够深,常见的有34层,50层,101层。通常层次越深,表征能力越强,分类准确率越高。其次,可学习,采用了残差结构,通过shortcut连接把低层直接跟高层相连,解决了反向传播过程中因为网络太深造成的梯度消失问题。此外,ResNet网络的性能很好,既表现为识别的准确率,也包括它本身模型的大小和参数量。 - -MindSpore Model Zoo中已经实现了ResNet模型,可以采用[ResNet-50](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/resnet/src/resnet.py)。调用方法如下: - -```python -network = resnet50(class_num=10) -``` - -更多ResNet的介绍请参考:[ResNet论文](https://arxiv.org/abs/1512.03385) - -## 定义损失函数和优化器 - -接下来需要定义损失函数(Loss)和优化器(Optimizer)。损失函数是深度学习的训练目标,也叫目标函数,可以理解为神经网络的输出(Logits)和标签(Labels)之间的距离,是一个标量数据。 - -常见的损失函数包括均方误差、L2损失、Hinge损失、交叉熵等等。图像分类应用通常采用交叉熵损失(`CrossEntropy`)。 - -优化器用于神经网络求解(训练)。由于神经网络参数规模庞大,无法直接求解,因而深度学习中采用随机梯度下降算法(SGD)及其改进算法进行求解。MindSpore封装了常见的优化器,如`SGD`、`ADAM`、`Momemtum`等等。本例采用`Momentum`优化器,通常需要设定两个参数,动量(`moment`)和权重衰减项(`weight decay`)。 - -MindSpore中定义损失函数和优化器的代码样例如下: - -```python -# loss function definition -ls = SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - -# optimization definition -opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, 0.9) -``` - -## 调用`Model`高阶API进行训练和保存模型文件 - -完成数据预处理、网络定义、损失函数和优化器定义之后,就可以进行模型训练了。模型训练包含两层迭代,数据集的多轮迭代(`epoch`)和一轮数据集内按分组(`batch`)大小进行的单步迭代。其中,单步迭代指的是按分组从数据集中抽取数据,输入到网络中计算得到损失函数,然后通过优化器计算和更新训练参数的梯度。 - -为了简化训练过程,MindSpore封装了`Model`高阶接口。用户输入网络、损失函数和优化器完成`Model`的初始化,然后调用`train`接口进行训练,`train`接口参数包括迭代次数(`epoch`)和数据集(`dataset`)。 - -模型保存是对训练参数进行持久化的过程。`Model`类中通过回调函数(`callback`)的方式进行模型保存,如下面代码所示。用户通过`CheckpointConfig`设置回调函数的参数,其中,`save_checkpoint_steps`指每经过固定的单步迭代次数保存一次模型,`keep_checkpoint_max`指最多保存的模型个数。 - -```python -''' -network, loss, optimizer are defined before. -batch_num, epoch_size are training parameters. -''' -model = Model(net, loss_fn=ls, optimizer=opt, metrics={'acc'}) - -# CheckPoint CallBack definition -config_ck = CheckpointConfig(save_checkpoint_steps=batch_num, keep_checkpoint_max=35) -ckpoint_cb = ModelCheckpoint(prefix="train_resnet_cifar10", directory="./", config=config_ck) - -# LossMonitor is used to print loss value on screen -loss_cb = LossMonitor() -model.train(epoch_size, dataset, callbacks=[ckpoint_cb, loss_cb]) -``` - -## 加载保存的模型,并进行验证 - -训练得到的模型文件(如`resnet.ckpt`)可以用来预测新图像的类别。首先通过`load_checkpoint`加载模型文件。然后调用`Model`的`eval`接口预测新图像类别。 - -```python -param_dict = load_checkpoint(args_opt.checkpoint_path) -load_param_into_net(net, param_dict) -eval_dataset = create_dataset(training=False) -res = model.eval(eval_dataset) -print("result: ", res) -``` - -## 参考文献 - -[1] diff --git a/tutorials/training/source_zh_cn/advanced_use/cv_resnet50_second_order_optimizer.md b/tutorials/training/source_zh_cn/advanced_use/cv_resnet50_second_order_optimizer.md deleted file mode 100644 index d528718fcd992703fee3782bdb39febb30ac73c4..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/cv_resnet50_second_order_optimizer.md +++ /dev/null @@ -1,512 +0,0 @@ -# 在ResNet-50网络上应用二阶优化实践 - -`Linux` `Ascend` `GPU` `模型开发` `模型调优` `高级` - - - -- [在ResNet-50网络上应用二阶优化实践](#在resnet-50网络上应用二阶优化实践) - - [概述](#概述) - - [准备环节](#准备环节) - - [准备数据集](#准备数据集) - - [配置分布式环境变量](#配置分布式环境变量) - - [Ascend 910](#ascend-910) - - [GPU](#gpu) - - [加载处理数据集](#加载处理数据集) - - [定义网络](#定义网络) - - [定义损失函数及THOR优化器](#定义损失函数及thor优化器) - - [定义损失函数](#定义损失函数) - - [定义优化器](#定义优化器) - - [训练网络](#训练网络) - - [配置模型保存](#配置模型保存) - - [配置训练网络](#配置训练网络) - - [运行脚本](#运行脚本) - - [Ascend 910](#ascend-910-1) - - [GPU](#gpu-1) - - [模型推理](#模型推理) - - [定义推理网络](#定义推理网络) - - [执行推理](#执行推理) - - [Ascend 910](#ascend-910-2) - - [GPU](#gpu-2) - - -   - -## 概述 - -常见的优化算法可分为一阶优化算法和二阶优化算法。经典的一阶优化算法如SGD等,计算量小、计算速度快,但是收敛的速度慢,所需的迭代次数多。而二阶优化算法使用目标函数的二阶导数来加速收敛,能更快地收敛到模型最优值,所需要的迭代次数少,但由于二阶优化算法过高的计算成本,导致其总体执行时间仍然慢于一阶,故目前在深度神经网络训练中二阶优化算法的应用并不普遍。二阶优化算法的主要计算成本在于二阶信息矩阵(Hessian矩阵、[FIM矩阵](https://arxiv.org/pdf/1808.07172.pdf)等)的求逆运算,时间复杂度约为$O(n^3)$。 - -MindSpore开发团队在现有的自然梯度算法的基础上,对FIM矩阵采用近似、切分等优化加速手段,极大的降低了逆矩阵的计算复杂度,开发出了可用的二阶优化器THOR。使用8块Ascend 910 AI处理器,THOR可以在72min内完成ResNet50-v1.5网络和ImageNet数据集的训练,相比于SGD+Momentum速度提升了近一倍。 - -本篇教程将主要介绍如何在Ascend 910 以及GPU上,使用MindSpore提供的二阶优化器THOR训练ResNet50-v1.5网络和ImageNet数据集。 -> 你可以在这里下载完整的示例代码: - 。 - -示例代码目录结构 - -```shell -├── resnet_thor - ├── README.md - ├── scripts - ├── run_distribute_train.sh # launch distributed training for Ascend 910 - └── run_eval.sh # launch inference for Ascend 910 - ├── run_distribute_train_gpu.sh # launch distributed training for GPU - └── run_eval_gpu.sh # launch inference for GPU - ├── src - ├── crossentropy.py # CrossEntropy loss function - ├── config.py # parameter configuration - ├── dataset_helper.py # dataset helper for minddata dataset - ├── grad_reducer_thor.py # grad reduce for thor - ├── model_thor.py # model for train - ├── resnet_thor.py # resnet50_thor backone - ├── thor.py # thor optimizer - ├── thor_layer.py # thor layer - └── dataset.py # data preprocessing - ├── eval.py # infer script - ├── train.py # train script - ├── export.py # export checkpoint file into air file - └── mindspore_hub_conf.py # config file for mindspore hub repository - -``` - -整体执行流程如下: - -1. 准备ImageNet数据集,处理需要的数据集; -2. 定义ResNet50网络; -3. 定义损失函数和THOR优化器; -4. 加载数据集并进行训练,训练完成后,查看结果及保存模型文件; -5. 加载保存的模型,进行推理。 - -## 准备环节 - -实践前,确保已经正确安装MindSpore。如果没有,可以通过[MindSpore安装页面](https://www.mindspore.cn/install)安装MindSpore。 - -### 准备数据集 - -下载完整的ImageNet2012数据集,将数据集解压分别存放到本地工作区的`ImageNet2012/ilsvrc`、`ImageNet2012/ilsvrc_eval`路径下。 - -目录结构如下: - -```text -└─ImageNet2012 - ├─ilsvrc - │ n03676483 - │ n04067472 - │ n01622779 - │ ...... - └─ilsvrc_eval - │ n03018349 - │ n02504013 - │ n07871810 - │ ...... - -``` - -### 配置分布式环境变量 - -#### Ascend 910 - -Ascend 910 AI处理器的分布式环境变量配置参考[分布式并行训练 (Ascend)](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html#id4)。 - -#### GPU - -GPU的分布式环境配置参考[分布式并行训练 (GPU)](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_gpu.html#id4)。 - -## 加载处理数据集 - -分布式训练时,通过并行的方式加载数据集,同时通过MindSpore提供的数据增强接口对数据集进行处理。加载处理数据集的脚本在源码的`src/dataset.py`脚本中。 - -```python -import os -from mindspore import dtype as mstype -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as C -import mindspore.dataset.transforms.c_transforms as C2 -from mindspore.communication.management import init, get_rank, get_group_size - -def create_dataset(dataset_path, do_train, repeat_num=1, batch_size=32, target="Ascend"): - if target == "Ascend": - device_num, rank_id = _get_rank_info() - num_parallels = 8 - else: - init() - rank_id = get_rank() - device_num = get_group_size() - num_parallels = 4 - - if device_num == 1: - data_set = ds.ImageFolderDataset(dataset_path, num_parallel_workers=num_parallels, shuffle=True) - else: - data_set = ds.ImageFolderDataset(dataset_path, num_parallel_workers=num_parallels, shuffle=True, - num_shards=device_num, shard_id=rank_id) - - image_size = 224 - mean = [0.485 * 255, 0.456 * 255, 0.406 * 255] - std = [0.229 * 255, 0.224 * 255, 0.225 * 255] - - # define map operations - if do_train: - trans = [ - C.RandomCropDecodeResize(image_size, scale=(0.08, 1.0), ratio=(0.75, 1.333)), - C.RandomHorizontalFlip(prob=0.5), - C.Normalize(mean=mean, std=std), - C.HWC2CHW() - ] - else: - trans = [ - C.Decode(), - C.Resize(256), - C.CenterCrop(image_size), - C.Normalize(mean=mean, std=std), - C.HWC2CHW() - ] - - type_cast_op = C2.TypeCast(mstype.int32) - - data_set = data_set.map(operations=trans, input_columns="image", num_parallel_workers=num_parallels) - data_set = data_set.map(operations=type_cast_op, input_columns="label", num_parallel_workers=num_parallels) - - # apply batch operations - data_set = data_set.batch(batch_size, drop_remainder=True) - - # apply dataset repeat operation - data_set = data_set.repeat(repeat_num) - - return data_set -``` - -> MindSpore支持进行多种数据处理和增强的操作,各种操作往往组合使用,具体可以参考[数据处理](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/pipeline.html)和[数据增强](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/augmentation.html)章节。 - -## 定义网络 - -本示例中使用的网络模型为ResNet50-v1.5,先定义[ResNet50网络](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/resnet/src/resnet.py),然后使用二阶优化器自定义的算子替换`Conv2d`和 -和`Dense`算子。定义好的网络模型在在源码`src/resnet_thor.py`脚本中,自定义的算子`Conv2d_thor`和`Dense_thor`在`src/thor_layer.py`脚本中。 - -- 使用`Conv2d_thor`替换原网络模型中的`Conv2d` -- 使用`Dense_thor`替换原网络模型中的`Dense` - -> 使用THOR自定义的算子`Conv2d_thor`和`Dense_thor`是为了保存模型训练中的二阶矩阵信息,新定义的网络与原网络模型的backbone一致。 - -网络构建完成以后,在`__main__`函数中调用定义好的ResNet50: - -```python -... -from src.resnet_thor import resnet50 -... -if __name__ == "__main__": - ... - # define the net - net = resnet50(class_num=config.class_num, damping=damping, loss_scale=config.loss_scale, - frequency=config.frequency, batch_size=config.batch_size) - ... -``` - -## 定义损失函数及THOR优化器 - -### 定义损失函数 - -MindSpore支持的损失函数有`SoftmaxCrossEntropyWithLogits`、`L1Loss`、`MSELoss`等。THOR优化器需要使用`SoftmaxCrossEntropyWithLogits`损失函数。 - -损失函数的实现步骤在`src/crossentropy.py`脚本中。这里使用了深度网络模型训练中的一个常用trick:label smoothing,通过对真实标签做平滑处理,提高模型对分类错误标签的容忍度,从而可以增加模型的泛化能力。 - -```python -class CrossEntropy(_Loss): - """CrossEntropy""" - def __init__(self, smooth_factor=0., num_classes=1000): - super(CrossEntropy, self).__init__() - self.onehot = ops.OneHot() - self.on_value = Tensor(1.0 - smooth_factor, mstype.float32) - self.off_value = Tensor(1.0 * smooth_factor / (num_classes - 1), mstype.float32) - self.ce = nn.SoftmaxCrossEntropyWithLogits() - self.mean = ops.ReduceMean(False) - - def construct(self, logit, label): - one_hot_label = self.onehot(label, ops.shape(logit)[1], self.on_value, self.off_value) - loss = self.ce(logit, one_hot_label) - loss = self.mean(loss, 0) - return loss -``` - -在`__main__`函数中调用定义好的损失函数: - -```python -... -from src.crossentropy import CrossEntropy -... -if __name__ == "__main__": - ... - # define the loss function - if not config.use_label_smooth: - config.label_smooth_factor = 0.0 - loss = CrossEntropy(smooth_factor=config.label_smooth_factor, num_classes=config.class_num) - ... -``` - -### 定义优化器 - -THOR优化器的参数更新公式如下: - -$$ \theta^{t+1} = \theta^t + \alpha F^{-1}\nabla E$$ - -参数更新公式中各参数的含义如下: - -- $\theta$:网络中的可训参数; -- $t$:迭代次数; -- $\alpha$:学习率值,参数的更新步长; -- $F^{-1}$:FIM矩阵,在网络中计算获得; -- $\nabla E$:一阶梯度值。 - -从参数更新公式中可以看出,THOR优化器需要额外计算的是每一层的FIM矩阵,每一层的FIM矩阵就是之前在自定义的网络模型中计算获得的。FIM矩阵可以对每一层参数更新的步长和方向进行自适应的调整,加速收敛的同时可以降低调参的复杂度。 - -```python -... -if args_opt.device_target == "Ascend": - from src.thor import THOR -else: - from src.thor import THOR_GPU as THOR -... - -if __name__ == "__main__": - ... - # learning rate setting - lr = get_model_lr(0, config.lr_init, config.lr_decay, config.lr_end_epoch, step_size, decay_epochs=39) - # define the optimizer - opt = THOR(filter(lambda x: x.requires_grad, net.get_parameters()), Tensor(lr), config.momentum, - filter(lambda x: 'matrix_A' in x.name, net.get_parameters()), - filter(lambda x: 'matrix_G' in x.name, net.get_parameters()), - filter(lambda x: 'A_inv_max' in x.name, net.get_parameters()), - filter(lambda x: 'G_inv_max' in x.name, net.get_parameters()), - config.weight_decay, config.loss_scale) - ... -``` - -## 训练网络 - -### 配置模型保存 - -MindSpore提供了callback机制,可以在训练过程中执行自定义逻辑,这里使用框架提供的`ModelCheckpoint`函数。 -`ModelCheckpoint`可以保存网络模型和参数,以便进行后续的fine-tuning操作。 -`TimeMonitor`、`LossMonitor`是MindSpore官方提供的callback函数,可以分别用于监控训练过程中单步迭代时间和`loss`值的变化。 - -```python -... -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, TimeMonitor, LossMonitor -... -if __name__ == "__main__": - ... - # define callbacks - time_cb = TimeMonitor(data_size=step_size) - loss_cb = LossMonitor() - cb = [time_cb, loss_cb] - if config.save_checkpoint: - config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size, - keep_checkpoint_max=config.keep_checkpoint_max) - ckpt_cb = ModelCheckpoint(prefix="resnet", directory=ckpt_save_dir, config=config_ck) - cb += [ckpt_cb] - ... -``` - -### 配置训练网络 - -通过MindSpore提供的`model.train`接口可以方便地进行网络的训练。THOR优化器通过降低二阶矩阵更新频率,来减少计算量,提升计算速度,故重新定义一个Model_Thor类,继承MindSpore提供的Model类。在Model_Thor类中增加二阶矩阵更新频率控制参数,用户可以通过调整该参数,优化整体的性能。 - -```python -... -from mindspore import FixedLossScaleManager -from src.model_thor import Model_Thor as Model -... - -if __name__ == "__main__": - ... - loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) - if target == "Ascend": - model = Model(net, loss_fn=loss, optimizer=opt, amp_level='O2', loss_scale_manager=loss_scale, - keep_batchnorm_fp32=False, metrics={'acc'}, frequency=config.frequency) - else: - model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics={'acc'}, - amp_level="O2", keep_batchnorm_fp32=True, frequency=config.frequency) - ... -``` - -### 运行脚本 - -训练脚本定义完成之后,调`scripts`目录下的shell脚本,启动分布式训练进程。 - -#### Ascend 910 - -目前MindSpore分布式在Ascend上执行采用单卡单进程运行方式,即每张卡上运行1个进程,进程数量与使用的卡的数量一致。进程均放在后台执行,每个进程创建1个目录,目录名称为`train_parallel`+ `device_id`,用来保存日志信息,算子编译信息以及训练的checkpoint文件。下面以使用8张卡的分布式训练脚本为例,演示如何运行脚本: - -使用以下命令运行脚本: - -```bash -sh run_distribute_train.sh -``` - -脚本需要传入变量`RANK_TABLE_FILE`、`DATASET_PATH`和`DEVICE_NUM`,其中: - -- `RANK_TABLE_FILE`:组网信息文件的路径。(rank table文件的生成,参考[HCCL_TOOL](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/utils/hccl_tools)) -- `DATASET_PATH`:训练数据集路径。 -- `DEVICE_NUM`:实际的运行卡数。 - -其余环境变量请参考安装教程中的配置项。 - -训练过程中loss打印示例如下: - -```bash -... -epoch: 1 step: 5004, loss is 4.4182425 -epoch: 2 step: 5004, loss is 3.740064 -epoch: 3 step: 5004, loss is 4.0546017 -epoch: 4 step: 5004, loss is 3.7598825 -epoch: 5 step: 5004, loss is 3.3744206 -... -epoch: 40 step: 5004, loss is 1.6907625 -epoch: 41 step: 5004, loss is 1.8217756 -epoch: 42 step: 5004, loss is 1.6453942 -... -``` - -训练完后,每张卡训练产生的checkpoint文件保存在各自训练目录下,`device_0`产生的checkpoint文件示例如下: - -```bash -└─train_parallel0 - ├─resnet-1_5004.ckpt - ├─resnet-2_5004.ckpt - │ ...... - ├─resnet-42_5004.ckpt - │ ...... -``` - -其中, -`*.ckpt`:指保存的模型参数文件。checkpoint文件名称具体含义:*网络名称*-*epoch数*_*step数*.ckpt。 - -#### GPU - -在GPU硬件平台上,MindSpore采用OpenMPI的`mpirun`进行分布式训练,进程创建1个目录,目录名称为`train_parallel`,用来保存日志信息和训练的checkpoint文件。下面以使用8张卡的分布式训练脚本为例,演示如何运行脚本: - -```bash -sh run_distribute_train_gpu.sh -``` - -脚本需要传入变量`DATASET_PATH`和`DEVICE_NUM`,其中: - -- `DATASET_PATH`:训练数据集路径。 -- `DEVICE_NUM`:实际的运行卡数。 - -在GPU训练时,无需设置`DEVICE_ID`环境变量,因此在主训练脚本中不需要调用`int(os.getenv('DEVICE_ID'))`来获取卡的物理序号,同时`context`中也无需传入`device_id`。我们需要将device_target设置为GPU,并需要调用`init()`来使能NCCL。 - -训练过程中loss打印示例如下: - -```bash -... -epoch: 1 step: 5004, loss is 4.2546034 -epoch: 2 step: 5004, loss is 4.0819564 -epoch: 3 step: 5004, loss is 3.7005644 -epoch: 4 step: 5004, loss is 3.2668946 -epoch: 5 step: 5004, loss is 3.023509 -... -epoch: 36 step: 5004, loss is 1.645802 -... -``` - -训练完后,保存的模型文件示例如下: - -```bash -└─train_parallel - ├─ckpt_0 - ├─resnet-1_5004.ckpt - ├─resnet-2_5004.ckpt - │ ...... - ├─resnet-36_5004.ckpt - │ ...... - ...... - ├─ckpt_7 - ├─resnet-1_5004.ckpt - ├─resnet-2_5004.ckpt - │ ...... - ├─resnet-36_5004.ckpt - │ ...... - -``` - -## 模型推理 - -使用训练过程中保存的checkpoint文件进行推理,验证模型的泛化能力。首先通过`load_checkpoint`接口加载模型文件,然后调用`Model`的`eval`接口对输入图片类别作出预测,再与输入图片的真实类别做比较,得出最终的预测精度值。 - -### 定义推理网络 - -1. 使用`load_checkpoint`接口加载模型文件。 -2. 使用`model.eval`接口读入测试数据集,进行推理。 -3. 计算得出预测精度值。 - -```python -... -from mindspore import load_checkpoint, load_param_into_net -... - -if __name__ == "__main__": - ... - # define net - net = resnet(class_num=config.class_num) - net.add_flags_recursive(thor=False) - - # load checkpoint - param_dict = load_checkpoint(args_opt.checkpoint_path) - keys = list(param_dict.keys()) - for key in keys: - if "damping" in key: - param_dict.pop(key) - load_param_into_net(net, param_dict) - net.set_train(False) - - # define model - model = Model(net, loss_fn=loss, metrics={'top_1_accuracy', 'top_5_accuracy'}) - - # eval model - res = model.eval(dataset) - print("result:", res, "ckpt=", args_opt.checkpoint_path) -``` - -### 执行推理 - -推理网络定义完成之后,调用`scripts`目录下的shell脚本,进行推理。 - -#### Ascend 910 - -在Ascend 910硬件平台上,推理的执行命令如下: - -```bash -sh run_eval.sh -``` - -脚本需要传入变量`DATASET_PATH`和`CHECKPOINT_PATH`,其中: - -- `DATASET_PATH`:推理数据集路径。 -- `CHECKPOINT_PATH`:保存的checkpoint路径。 - -目前推理使用的是单卡(默认device 0)进行推理,推理的结果如下: - -```text -result: {'top_5_accuracy': 0.9295574583866837, 'top_1_accuracy': 0.761443661971831} ckpt=train_parallel0/resnet-42_5004.ckpt -``` - -- `top_5_accuracy`:对于一个输入图片,如果预测概率排名前五的标签中包含真实标签,即认为分类正确; -- `top_1_accuracy`:对于一个输入图片,如果预测概率最大的标签与真实标签相同,即认为分类正确。 - -#### GPU - -在GPU硬件平台上,推理的执行命令如下: - -```bash -sh run_eval_gpu.sh -``` - -脚本需要传入变量`DATASET_PATH`和`CHECKPOINT_PATH`,其中: - -- `DATASET_PATH`:推理数据集路径。 -- `CHECKPOINT_PATH`:保存的checkpoint路径。 - -推理的结果如下: - -```text -result: {'top_5_accuracy': 0.9287972151088348, 'top_1_accuracy': 0.7597031049935979} ckpt=train_parallel/resnet-36_5004.ckpt -``` diff --git a/tutorials/training/source_zh_cn/advanced_use/dashboard.md b/tutorials/training/source_zh_cn/advanced_use/dashboard.md deleted file mode 100644 index 75e2ecf6127509ea7d1368e42d91c7690ad426ae..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/dashboard.md +++ /dev/null @@ -1,202 +0,0 @@ -# 查看训练看板 - -`Linux` `Ascend` `GPU` `CPU` `模型调优` `中级` `高级` - - - -- [查看训练看板](#查看训练看板) - - [概述](#概述) - - [标量可视化](#标量可视化) - - [参数分布图可视化](#参数分布图可视化) - - [计算图可视化](#计算图可视化) - - [数据图可视化](#数据图可视化) - - [图像可视化](#图像可视化) - - [张量可视化](#张量可视化) - - [注意事项](#注意事项) - - - -   - - -## 概述 - -训练看板是MindInsight的可视化组件的重要组成部分,而训练看板的标签包含:标量可视化、参数分布图可视化、计算图可视化、数据图可视化、图像可视化和张量可视化等。 - -用户从训练列表中选择指定的训练,进入训练看板。 - -> 收集可视化数据及进入训练看板方法可参考《[收集Summary数据](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/summary_record.html)》。 - -## 标量可视化 - -标量可视化用于展示训练过程中,标量的变化趋势情况。 - -![scalar.png](./images/scalar.png) - -图1:标量趋势图 - -图1展示了神经网络在训练过程中损失值的变化过程。横坐标是训练步骤,纵坐标是损失值。 - -图中右上角有几个按钮功能,从左到右功能分别是全屏展示,切换Y轴比例,开启/关闭框选,分步回退和还原图形。 - -- 全屏展示即全屏展示该标量曲线,再点击一次即可恢复。 -- 切换Y轴比例是指可以将Y轴坐标进行对数转换。 -- 开启/关闭框选是指可以框选图中部分区域,并放大查看该区域, 可以在已放大的图形上叠加框选。 -- 分步回退是指对同一个区域连续框选并放大查看时,可以逐步撤销操作。 -- 还原图形是指进行了多次框选后,点击此按钮可以将图还原回原始状态。 - -图中右下角可以设置阈值并高亮显示或者删除阈值。如图所示,设置的阈值为小于1.5,红色高亮部分显示出不超出阈值的部分,能够直观地看到预期的数据值或者一些异常的数值。 - -![scalar_select.png](./images/scalar_select.png) - -图2:标量可视化功能区 - -图2展示的标量可视化的功能区,提供了根据选择不同标签,水平轴的不同维度和平滑度来查看标量信息的功能。 - -- 标签选择:提供了对所有标签进行多项选择的功能,用户可以通过勾选所需的标签,查看对应的标量信息。 -- 水平轴:可以选择“步骤”、“相对时间”、“绝对时间”中的任意一项,来作为标量曲线的水平轴。 -- 平滑度:可以通过调整平滑度,对标量曲线进行平滑处理。 -- 标量合成:可以选中两条标量曲线进行合成并展示在一个图中,以方便对两条曲线进行对比或者查看合成后的图。 - -![scalar_compound.png](./images/scalar_compound.png) - -图3:Accuracy和Loss的标量合成图 - -图3展示Accuracy曲线和Loss曲线的标量合成图。标量合成的功能区与标量可视化的功能区相似。其中与标量可视化功能区不一样的地方,在于标签选择时,标量合成功能最多只能同时选择两个标签,将其曲线合成并展示。 - -## 参数分布图可视化 - -参数分布图用于将用户所指定的张量以直方图的形式进行展示。 - -![histogram.png](./images/histogram.png) - -图4:直方图展示 - -图4将用户所记录的张量以直方图的形式进行展示。点击图中右上角,可以将图放大。 - -![histogram_func.png](./images/histogram_func.png) - -图5:参数分布图功能区 - -图5展示参数分布图的功能区,包含以下内容: - -- 标签选择:提供了对所有标签进行多项选择的功能,用户可以通过勾选所需的标签,查看对应的直方图。 -- 纵轴:可以选择`步骤`、`相对时间`、`绝对时间`中的任意一项,来作为直方图纵轴显示的数据。 -- 视角:可以选择`正视`和`俯视`中的一种。`正视`是指从正面的角度查看直方图,此时不同步骤之间的数据会覆盖在一起。`俯视`是指偏移以45度角俯视直方图区域,这时可以呈现不同步骤之间数据的差异。 - -## 计算图可视化 - -计算图可视化用于展示计算图的图结构,数据流以及控制流的走向,支持展示summary日志文件与通过`context`的`save_graphs`参数导出的`pb`文件。 - -![graph.png](./images/graph.png) - -图6:计算图展示区 - -图6展示了计算图的网络结构。如图中所展示的,在展示区中,选中其中一个算子(图中圈红算子),可以看到该算子有两个输入和一个输出(实线代表算子的数据流走向)。 - -![graph_sidebar.png](./images/graph_sidebar.png) - -图7:计算图功能区 - -图7展示了计算图可视化的功能区,包含以下内容: - -- 文件选择框:可以选择查看不同文件的计算图。 -- 搜索框:可以对节点进行搜索,输入节点名称点击回车,即可展示该节点。 -- 缩略图:展示整个网络图结构的缩略图,在查看超大图结构时,方便查看当前浏览的区域。 -- 节点信息:展示选中的节点的基本信息,包括节点的名称、属性、输入节点、输出节点等信息。 -- 图例:展示的是计算图中各个图标的含义。 - -## 数据图可视化 - -数据图可视化用于展示单次模型训练的数据处理和数据增强信息。 - -![data_function.png](./images/data_function.png) - -图8:数据图功能区 - -图8展示的数据图功能区包含以下内容: - -- 图例:展示数据溯源图中各个图标的含义。 -- 数据处理流水线:展示训练所使用的数据处理流水线,可以选择图中的单个节点查看详细信息。 -- 节点信息:展示选中的节点的基本信息,包括使用的数据处理和增强算子的名称、参数等。 - -## 图像可视化 - -图像可视化用于展示用户所指定的图片。 - -![image.png](./images/image_vi.png) - -图9:图像可视化 - -图9展示通过滑动图中“步骤”滑条,查看不同步骤的图片。 - -![image_function.png](./images/image_function.png) - -图10:图像可视化功能区 - -图10展示图像可视化的功能区,提供了选择查看不同标签,不同亮度和不同对比度来查看图片信息。 - -- 标签:提供了对所有标签进行多项选择的功能,用户可以通过勾选所需的标签,查看对应的图片信息。 -- 亮度调整:可以调整所展示的所有图片亮度。 -- 对比度调整:可以调整所展示的所有图片对比度。 - -## 张量可视化 - -张量可视化用于将张量以表格以及直方图的形式进行展示。 - -![tensor_function.png](./images/tensor_function.png) - -图11:张量可视化功能区 - -图11展示张量可视化的功能区,包含以下内容: - -- 标签选择:提供了对所有标签进行多项选择的功能,用户可以通过勾选所需的标签,查看对应的表格数据或者直方图。 -- 视图:可以选择`表格`或者`直方图`来展示tensor数据。在`直方图`视图下存在`纵轴`和`视角`的功能选择。 -- 纵轴:可以选择`步骤`、`相对时间`、`绝对时间`中的任意一项,来作为直方图纵轴显示的数据。 -- 视角:可以选择`正视`和`俯视`中的一种。`正视`是指从正面的角度查看直方图,此时不同步骤之间的数据会覆盖在一起。`俯视`是指偏移45度角俯视直方图区域,这时可以呈现不同步骤之间数据的差异。 - -![tensor_table.png](./images/tensor_table.png) - -图12:表格展示 - -图12将用户所记录的张量以表格的形式展示,包含以下功能: - -- 点击表格右边小方框按钮,可以将表格放大。 -- 表格中白色方框显示当前展示的是哪个维度下的张量数据,其中冒号`:`表示当前维度索引范围,和Python索引含义基本一致,不指定具体索引表示当前维度所有值,`2:5`表示索引2到5(不包括5)的值,可以在方框输入对应的索引或者含有`:`的索引范围后按`Enter`键或者点击后边的打勾按钮来查询特定维度的张量数据。假设某维度是32,则其索引范围是-32到31。注意:可以查询0维到2维的张量数据,不支持查询超过两维的张量数据,即不能设置超过两个冒号`:`的查询条件。 -- 拖拽表格下方的空心圆圈可以查询特定步骤的张量数据。 - -![tensor_histogram.png](./images/tensor_histogram.png) - -图13:直方图展示 - -图13将用户所记录的张量以直方图的形式进行展示。点击图中右上角,可以将图放大。 - -## 注意事项 - -1. 目前MindSpore仅支持在Ascend 910 AI处理器上导出算子融合后的计算图。 -2. 在训练中使用Summary算子收集数据时,`HistogramSummary` 算子会影响性能,所以请尽量少地使用。 -3. 为了控制内存占用,MindInsight对标签(tag)数目和步骤(step)数目进行了限制: - - 每个训练看板的最大标签数量为300个标签。标量标签、图片标签、计算图标签、参数分布图(直方图)标签、张量标签的数量总和不得超过300个。特别地,每个训练看板最多有10个计算图标签、6个张量标签。当实际标签数量超过这一限制时,将依照MindInsight的处理顺序,保留最近处理的300个标签。 - - 每个训练看板的每个标量标签最多有1000个步骤的数据。当实际步骤的数目超过这一限制时,将对数据进行随机采样,以满足这一限制。 - - 每个训练看板的每个图片标签最多有10个步骤的数据。当实际步骤的数目超过这一限制时,将对数据进行随机采样,以满足这一限制。 - - 每个训练看板的每个参数分布图(直方图)标签最多有50个步骤的数据。当实际步骤的数目超过这一限制时,将对数据进行随机采样,以满足这一限制。 - - 每个训练看板的每个张量标签最多有20个步骤的数据。当实际步骤的数目超过这一限制时,将对数据进行随机采样,以满足这一限制。 -4. 由于`TensorSummary`会记录完整Tensor数据,数据量通常会比较大,为了控制内存占用和出于性能上的考虑,MindInsight对Tensor的大小以及返回前端展示的数值个数进行以下限制: - - MindInsight最大支持加载含有1千万个数值的Tensor。 - - MindInsight对Tensor的前端展示每次查询最大支持1000列。 - - Tensor加载后,在张量可视的表格视图下,最大支持查看10万个数值,如果所选择的维度查询得到的数值超过这一限制,则无法显示。 - -5. 由于张量可视(`TensorSummary`)会记录原始张量数据,需要的存储空间较大。使用`TensorSummary`前和训练过程中请注意检查系统存储空间充足。 - - 通过以下方法可以降低张量可视功能的存储空间占用: - - 1)避免使用`TensorSummary`记录较大的Tensor。 - - 2)减少网络中`TensorSummary`算子的使用个数。 - - 功能使用完毕后,请及时清理不再需要的训练日志,以释放磁盘空间。 - - 备注:估算`TensorSummary`空间使用量的方法如下: - - 一个`TensorSummary数据的大小 = Tensor中的数值个数 * 4 bytes`。假设使用`TensorSummary`记录的Tensor大小为`32 * 1 * 256 * 256`,则一个`TensorSummary`数据大约需要`32 * 1 * 256 * 256 * 4 bytes = 8,388,608 bytes = 8MiB`。`TensorSummary`默认会记录20个步骤的数据,则记录这20组数据需要的空间约为`20 * 8 MiB = 160MiB`。需要注意的是,由于数据结构等因素的开销,实际使用的存储空间会略大于160MiB。 -6. 当使用`TensorSummary`时,由于记录完整Tensor数据,训练日志文件较大,MindInsight需要更多时间解析训练日志文件,请耐心等待。 diff --git a/tutorials/training/source_zh_cn/advanced_use/debug_in_pynative_mode.md b/tutorials/training/source_zh_cn/advanced_use/debug_in_pynative_mode.md deleted file mode 100644 index 33da6899e28d3f7c1a6fd9154eb235116a9df78b..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/debug_in_pynative_mode.md +++ /dev/null @@ -1,390 +0,0 @@ -# 使用PyNative模式调试 - -`Linux` `Ascend` `GPU` `CPU` `模型开发` `初级` `中级` `高级` - - - -- [使用PyNative模式调试](#使用pynative模式调试) - - [概述](#概述) - - [执行单算子](#执行单算子) - - [执行普通函数](#执行普通函数) - - [提升PyNative性能](#提升pynative性能) - - [调试网络训练模型](#调试网络训练模型) - - - - -   - -   - - -## 概述 - -MindSpore支持两种运行模式,在调试或者运行方面做了不同的优化: - -- PyNative模式:也称动态图模式,将神经网络中的各个算子逐一下发执行,方便用户编写和调试神经网络模型。 -- Graph模式:也称静态图模式或者图模式,将神经网络模型编译成一整张图,然后下发执行。该模式利用图优化等技术提高运行性能,同时有助于规模部署和跨平台运行。 - -默认情况下,MindSpore处于PyNative模式,可以通过`context.set_context(mode=context.GRAPH_MODE)`切换为Graph模式;同样地,MindSpore处于Graph模式时,可以通过 `context.set_context(mode=context.PYNATIVE_MODE)`切换为PyNative模式。 - -PyNative模式下,支持执行单算子、普通函数和网络,以及单独求梯度的操作。下面将详细介绍使用方法和注意事项。 - -> PyNative模式下为了提升性能,算子在device上使用了异步执行方式,因此在算子执行错误的时候,错误信息可能会在程序执行到最后才显示。 - -## 执行单算子 - -执行单个算子,并打印相关结果,如下例所示。 - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import context, Tensor - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -conv = nn.Conv2d(3, 4, 3, bias_init='zeros') -input_data = Tensor(np.ones([1, 3, 5, 5]).astype(np.float32)) -output = conv(input_data) -print(output.asnumpy()) -``` - -输出: - -```python -[[[[-0.02190447 -0.05208071 -0.05208071 -0.05208071 -0.06265172] -[-0.01529094 -0.05286242 -0.05286242 -0.05286242 -0.04228776] -[-0.01529094 -0.05286242 -0.05286242 -0.05286242 -0.04228776] -[-0.01529094 -0.05286242 -0.05286242 -0.05286242 -0.04228776] -[-0.01430791 -0.04892948 -0.04892948 -0.04892948 -0.01096004]] - -[[ 0.00802889 -0.00229866 -0.00229866 -0.00229866 -0.00471579] -[ 0.01172971 0.02172665 0.02172665 0.02172665 0.03261888] -[ 0.01172971 0.02172665 0.02172665 0.02172665 0.03261888] -[ 0.01172971 0.02172665 0.02172665 0.02172665 0.03261888] -[ 0.01784375 0.01185635 0.01185635 0.01185635 0.01839031]] - -[[ 0.04841832 0.03321705 0.03321705 0.03321705 0.0342317 ] -[ 0.0651359 0.04310361 0.04310361 0.04310361 0.03355784] -[ 0.0651359 0.04310361 0.04310361 0.04310361 0.03355784] -[ 0.0651359 0.04310361 0.04310361 0.04310361 0.03355784] -[ 0.04680437 0.03465693 0.03465693 0.03465693 0.00171057]] - -[[-0.01783456 -0.00459451 -0.00459451 -0.00459451 0.02316688] -[ 0.01295831 0.00879035 0.00879035 0.00879035 0.01178642] -[ 0.01295831 0.00879035 0.00879035 0.00879035 0.01178642] -[ 0.01295831 0.00879035 0.00879035 0.00879035 0.01178642] -[ 0.05016355 0.03958241 0.03958241 0.03958241 0.03443141]]]] -``` - -## 执行普通函数 - -将若干算子组合成一个函数,然后直接通过函数调用的方式执行这些算子,并打印相关结果,如下例所示。 - -示例代码: - -```python -import numpy as np -from mindspore import context, Tensor -import mindspore.ops as ops - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -def add_func(x, y): - z = ops.add(x, y) - z = ops.add(z, x) - return z - -x = Tensor(np.ones([3, 3], dtype=np.float32)) -y = Tensor(np.ones([3, 3], dtype=np.float32)) -output = add_func(x, y) -print(output.asnumpy()) -``` - -输出: - -```python -[[3. 3. 3.] - [3. 3. 3.] - [3. 3. 3.]] -``` - -> PyNative不支持summary功能,图模式summary相关算子不能使用。 - -### 提升PyNative性能 - -为了提高PyNative模式下的前向计算任务执行速度,MindSpore提供了Staging功能,该功能可以在PyNative模式下将Python函数或者Python类的方法编译成计算图,通过图优化等技术提高运行速度,如下例所示。 - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import context, Tensor -import mindspore.ops as ops -from mindspore import ms_function - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -class TensorAddNet(nn.Cell): - def __init__(self): - super(TensorAddNet, self).__init__() - self.add = ops.Add() - - @ms_function - def construct(self, x, y): - res = self.add(x, y) - return res - -x = Tensor(np.ones([4, 4]).astype(np.float32)) -y = Tensor(np.ones([4, 4]).astype(np.float32)) -net = TensorAddNet() - -z = net(x, y) # Staging mode -add = ops.Add() -res = add(x, z) # PyNative mode -print(res.asnumpy()) -``` - -输出: - -```python -[[3. 3. 3. 3.] - [3. 3. 3. 3.] - [3. 3. 3. 3.] - [3. 3. 3. 3.]] -``` - -上述示例代码中,在`TensorAddNet`类的`construct`之前加装了`ms_function`装饰器,该装饰器会将`construct`方法编译成计算图,在给定输入之后,以图的形式下发执行,而上一示例代码中的`add`会直接以普通的PyNative的方式执行。 - -需要说明的是,加装了`ms_function`装饰器的函数中,如果包含不需要进行参数训练的算子(如`pooling`、`add`等算子),则这些算子可以在被装饰的函数中直接调用,如下例所示。 - -示例代码: - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import context, Tensor -import mindspore.ops as ops -from mindspore import ms_function - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -add = ops.Add() - -@ms_function -def add_fn(x, y): - res = add(x, y) - return res - -x = Tensor(np.ones([4, 4]).astype(np.float32)) -y = Tensor(np.ones([4, 4]).astype(np.float32)) -z = add_fn(x, y) -print(z.asnumpy()) -``` - -输出: - -```shell -[[2. 2. 2. 2.] - [2. 2. 2. 2.] - [2. 2. 2. 2.] - [2. 2. 2. 2.]] -``` - -如果被装饰的函数中包含了需要进行参数训练的算子(如`Convolution`、`BatchNorm`等算子),则这些算子必须在被装饰等函数之外完成实例化操作,如下例所示。 - -示例代码: - -```python -import numpy as np -import mindspore.nn as nn -from mindspore import context, Tensor -from mindspore import ms_function - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -conv_obj = nn.Conv2d(in_channels=3, out_channels=4, kernel_size=3, stride=2, padding=0) -conv_obj.init_parameters_data() -@ms_function -def conv_fn(x): - res = conv_obj(x) - return res - -input_data = np.random.randn(2, 3, 6, 6).astype(np.float32) -z = conv_fn(Tensor(input_data)) -print(z.asnumpy()) -``` - -输出: - -```shell -[[[[ 0.10377571 -0.0182163 -0.05221086] -[ 0.1428334 -0.01216263 0.03171652] -[-0.00673915 -0.01216291 0.02872104]] - -[[ 0.02906547 -0.02333629 -0.0358406 ] -[ 0.03805163 -0.00589525 0.04790922] -[-0.01307234 -0.00916951 0.02396654]] - -[[ 0.01477884 -0.06549098 -0.01571796] -[ 0.00526886 -0.09617482 0.04676902] -[-0.02132788 -0.04203424 0.04523344]] - -[[ 0.04590619 -0.00251453 -0.00782715] -[ 0.06099087 -0.03445276 0.00022781] -[ 0.0563223 -0.04832596 -0.00948266]]] - -[[[ 0.08444098 -0.05898955 -0.039262 ] -[ 0.08322686 -0.0074796 0.0411371 ] -[-0.02319113 0.02128408 -0.01493311]] - -[[ 0.02473745 -0.02558945 -0.0337843 ] -[-0.03617039 -0.05027632 -0.04603915] -[ 0.03672804 0.00507637 -0.08433761]] - -[[ 0.09628943 0.01895323 -0.02196114] -[ 0.04779419 -0.0871575 0.0055248 ] -[-0.04382382 -0.00511185 -0.01168541]] - -[[ 0.0534859 0.02526264 0.04755395] -[-0.03438103 -0.05877855 0.06530266] -[ 0.0377498 -0.06117418 0.00546303]]]] -``` - -## 调试网络训练模型 - -PyNative模式下,还可以支持单独求梯度的操作。如下例所示,可通过`GradOperation`求该函数或者网络所有的输入梯度。需要注意,输入类型仅支持Tensor。 - -示例代码: - -```python -import mindspore.ops as ops -import mindspore.context as context -from mindspore import dtype as mstype -from mindspore import Tensor - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -def mul(x, y): - return x * y - -def mainf(x, y): - return ops.GradOperation(get_all=True)(mul)(x, y) - -print(mainf(Tensor(1, mstype.int32), Tensor(2, mstype.int32))) -``` - -输出: - -```python -(Tensor(shape=[], dtype=Int32, value=2), Tensor(shape=[], dtype=Int32, value=1)) -``` - -在进行网络训练时,求得梯度然后调用优化器对参数进行优化(暂不支持在反向计算梯度的过程中设置断点),然后再利用前向计算loss,从而实现在PyNative模式下进行网络训练。 - -完整LeNet示例代码: - -```python -import numpy as np -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import dtype as mstype -from mindspore import context, Tensor, ParameterTuple -from mindspore.common.initializer import TruncatedNormal -from mindspore.nn import Dense, WithLossCell, SoftmaxCrossEntropyWithLogits, Momentum - -context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") - -def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): - """weight initial for conv layer""" - weight = weight_variable() - return nn.Conv2d(in_channels, out_channels, - kernel_size=kernel_size, stride=stride, padding=padding, - weight_init=weight, has_bias=False, pad_mode="valid") - -def fc_with_initialize(input_channels, out_channels): - """weight initial for fc layer""" - weight = weight_variable() - bias = weight_variable() - return nn.Dense(input_channels, out_channels, weight, bias) - -def weight_variable(): - """weight initial""" - return TruncatedNormal(0.02) - - -class LeNet5(nn.Cell): - """ - Lenet network - Args: - num_class (int): Num classes. Default: 10. - - Returns: - Tensor, output tensor - - Examples: - >>> LeNet(num_class=10) - """ - def __init__(self, num_class=10): - super(LeNet5, self).__init__() - self.num_class = num_class - self.batch_size = 32 - self.conv1 = conv(1, 6, 5) - self.conv2 = conv(6, 16, 5) - self.fc1 = fc_with_initialize(16 * 5 * 5, 120) - self.fc2 = fc_with_initialize(120, 84) - self.fc3 = fc_with_initialize(84, self.num_class) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.reshape = ops.Reshape() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.reshape(x, (self.batch_size, -1)) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x - - -class GradWrap(nn.Cell): - """ GradWrap definition """ - def __init__(self, network): - super(GradWrap, self).__init__(auto_prefix=False) - self.network = network - self.weights = ParameterTuple(filter(lambda x: x.requires_grad, network.get_parameters())) - - def construct(self, x, label): - weights = self.weights - return ops.GradOperation(get_by_list=True)(self.network, weights)(x, label) - -net = LeNet5() -optimizer = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.1, 0.9) -criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') -net_with_criterion = WithLossCell(net, criterion) -train_network = GradWrap(net_with_criterion) -train_network.set_train() - -input_data = Tensor(np.ones([net.batch_size, 1, 32, 32]).astype(np.float32) * 0.01) -label = Tensor(np.ones([net.batch_size]).astype(np.int32)) -output = net(Tensor(input_data)) -loss_output = criterion(output, label) -grads = train_network(input_data, label) -success = optimizer(grads) -loss = loss_output.asnumpy() -print(loss) -``` - -输出: - -```python -2.3050091 -``` - -上述执行方式中,可以在`construct`函数任意需要的地方设置断点,获取网络执行的中间结果,通过pdb的方式对网络进行调试。 diff --git a/tutorials/training/source_zh_cn/advanced_use/debugger.md b/tutorials/training/source_zh_cn/advanced_use/debugger.md deleted file mode 100755 index 424ed3f601a00d85b308cdcb6a9e0adf3727b809..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/debugger.md +++ /dev/null @@ -1,236 +0,0 @@ -# 使用调试器 - -`Linux` `Ascend` `GPU` `模型调优` `中级` `高级` - - - -- [使用调试器](#使用调试器) - - [概述](#概述) - - [操作流程](#操作流程) - - [调试器环境准备](#调试器环境准备) - - [调试器页面介绍](#调试器页面介绍) - - [计算图](#计算图) - - [节点列表](#节点列表) - - [节点信息](#节点信息) - - [监测点列表](#监测点列表) - - [设置监测点](#设置监测点) - - [重新检查](#重新检查) - - [训练控制](#训练控制) - - [张量检查视图](#张量检查视图) - - [使用调试器进行调试](#使用调试器进行调试) - - [注意事项](#注意事项) - - - - - -## 概述 - -MindSpore调试器是为图模式训练提供的调试工具,可以用来查看并分析计算图节点的中间结果。 - -在MindSpore图模式的训练过程中,用户无法从Python层获取到计算图中间节点的结果,使得训练调试变得很困难。使用MindSpore调试器,用户可以: - -- 在MindInsight调试器界面结合计算图,查看图节点的输出结果; -- 设置监测点,监测训练异常情况(比如检查张量溢出),在异常发生时追踪错误原因; -- 查看权重等参数的变化情况。 - -## 操作流程 - -- 以调试模式启动MindInsight,等待训练连接; -- 配置相关环境变量,运行训练脚本; -- 训练连接成功,在MindInsight调试器界面设置监测点; -- 在MindInsight调试器界面分析训练执行情况。 - -## 调试器环境准备 - -开始训练前,请先安装MindInsight,并以调试模式启动。调试模式下,MindSpore会将训练信息发送给MindInsight调试服务,用户可在MindInsight调试器界面进行查看和分析。 - -MindInsight调试服务启动命令: - -```shell -mindinsight start --port {PORT} --enable-debugger True --debugger-port {DEBUGGER_PORT} -``` - -参数含义如下: - -|参数名|属性|功能描述|参数类型|默认值|取值范围| -|---|---|---|---|---|---| -|`--port {PORT}`|可选|指定Web可视化服务端口。|Integer|8080|1~65535| -|`--enable-debugger {ENABLE_DEBUGGER}`|可选|取值为True或1, 开启MindInsight侧调试器;默认为False,不开启。|Boolean|False|True/False/1/0| -|`--debugger-port {DEBUGGER_PORT}`|可选|指定调试服务端口。|Integer|50051|1~65535| - -更多启动参数请参考[MindInsight相关命令](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/mindinsight_commands.html)。 - -然后,设置环境变量`export ENABLE_MS_DEBUGGER=1`或`export ENABLE_MS_DEBUGGER=True`,将训练指定为调试模式,并设置训练要连接的调试服务和端口: -`export MS_DEBUGGER_HOST=127.0.0.1`(该服务地址需与MindInsight host一致); -`export MS_DEBUGGER_PORT=50051`(该端口需与MindInsight debugger-port一致)。 - -如果用户设备的内存空间有限,可在运行训练前开启内存复用模式,以降低运行内存占用:`export MS_DEBUGGER_PARTIAL_MEM=1`。 - -此外,训练时不要使用数据下沉模式(需设置`model.train`中的`dataset_sink_mode`为`False`),以保证调试器可以获取每个轮次的训练信息。 - -调试器环境准备完成后,运行训练脚本。 - -## 调试器页面介绍 - -训练连接成功后,可以在MindInsight调试器界面查看计算图等训练元信息,调试器页面布局由计算图、节点列表、节点信息、监测点列表、监测点命中列表等部分组成。 - -![debugger_init_page](images/debugger_init_page.png) - -图1: 调试器初始页面 - -### 计算图 - -调试器将优化后的最终执行图展示在UI的中上位置,用户可以双击打开图上的方框 (代表一个`scope`) 将计算图进一步展开,查看`scope`中的节点信息。 - -面板的最上方展示了`训练端地址`(训练脚本所在进程的地址和端口),训练使用的`卡号`, 训练的`当前轮次`等元信息。 - -在GPU环境下,训练执行图面板的右上角会有`当前节点`和`下一个节点`两个按钮,分别用于回到当前执行节点和执行下一个节点。 -用户可以方便地执行单个节点。 - -### 节点列表 - -![debugger_search_node_type](images/debugger_search_node_type.png) -图2: 节点列表按节点类型过滤 - -如图1所示,在UI的左侧会展示计算图`节点列表`,可以将计算图中的节点按`scope`分层展开。点击`节点列表`中的节点,计算图也会联动展开到选中节点的位置。 -用户也可以使用`节点列表`上方的`图文件`以及`节点类型`对节点按图文件和类型进行过滤,如图2所示。用户还可以使用`节点类型`下方的搜索框按名称进行节点的搜索。 - -### 节点信息 - -点击计算图上的节点后,可以在UI下方查看该节点的详细信息,如图2所示。该部分展示了节点的输出和输入,训练的`轮次`数目,`张量`的`类型`、`形状`和`数值`等信息。 - -在GPU环境下,选中图上的某个可执行节点后,单击鼠标右键,可选择`运行到该节点`,代表将训练脚本运行到被选中节点(不超过一个`轮次`)。 - -### 监测点列表 - -![debugger_set_watch_point](images/debugger_watch_point_list.png) - -图3: 监测点列表 - -如图3所示,监测点列表位于页面的左下方。监测点列表上方的三个按钮从左到右依次为`重新检查`、`清空监测点`和`创建监测点`。 - -### 设置监测点 - -![debugger_set_watch_point](images/debugger_set_watch_point.png) - -图4: 创建监测点 - -为了方便地对节点的计算结果进行监测分析,用户可以给计算图中的节点设置监测点。图4展示了监测点的设置方法,用户首先点击监测点列表右上角的 `+` 按钮新增监测点并选择检查条件,比如检查过大张量,选择一个要检查的条件并输入阈值,点击“确定”以创建监测点。 -监测点创建后,请手动勾选要检查的节点,最后点击监测点旁边的`√`确认。如果创建监测点时,选择的检查对象为“检查权重”、“检查梯度”、“检查激活”,则监测点创建时,会自动勾选权重、梯度、激活节点,用户可以在点击“确定”后手动修改节点勾选状态。 - -支持的条件包括(括号中为缩写): - -- 检查张量 - - 检查计算过程溢出(OO):检查算子计算过程中是否存在溢出现象,仅支持昇腾AI处理器。 - - 检查张量是否全为0(TZ):通过对条件参数设置阈值来检查张量的0值比例,可选参数为`0值比例>=`。 - - 检查张量溢出(TO):检查张量值是否存在溢出现象。 - - 检查张量值范围(TR):通过对条件参数设置阈值来检查张量值的范围,可选参数为`在范围中的值所占百分比>`、`在范围中的值所占百分比<`、`MAX-MIN>`和`MAX-MIN<`。其中在设置`在范围中的值所占百分比>`和`在范围中的值所占百分比<`时需要同时设置支持参数`范围上界(含)`和`范围下界(含)`。 - - 检查过大张量(TL):通过对条件参数设置阈值来检查张量值是否过大,可选参数为`绝对值的平均值>`、`max >`、`min >`和`mean >`。 - - 检查过小张量(TS):通过对条件参数设置阈值来检查张量值是否过小,可选参数为`绝对值的平均值<`、`max <`、`min <`和`mean <`。 - -- 检查权重 - - 检查权重变化过大(WCL):通过对条件参数设置阈值来检查权重值的变化是否过大,可选参数为`平均变化比例值>`。 - - 其中`平均变化比例值`的计算方式为 `mean(abs(当前权重值 - 上一轮次权重值)) / (mean(abs(上一轮次权重值)) + 偏移量)`。 - - 检查权重变化过小(WCS):通过对条件参数设置阈值来检查权重值的变化是否过小,可选参数为`平均变化比例值<`。 - - 检查权重初始值(WI):通过对条件参数设置阈值来检查权重的初始值,可选参数为`0值比例>=`、`max >`和`min <`。 - - 检查未变化权重(WNC):通过对条件参数设置阈值来检查权重值是否更新,可选参数为`相对容忍度`。 - - 检查权重溢出(WO):检查权重值是否存在溢出现象。 - - 检查过大权重(WL):通过对条件参数设置阈值来检查权重值是否过大,可选参数为`绝对值的平均值>`、`max >`、`min >`和`mean >`。 - - 检查过小权重(WS):通过对条件参数设置阈值来检查权重值是否过小,可选参数为`绝对值的平均值<`、`max <`、`min <`和`mean <`。 - -- 检查激活值 - - 检查激活值范围(AR):通过对条件参数设置阈值来检查激活值的范围,可选参数为`在范围中的值所占百分比>`、`在范围中的值所占百分比<`、`MAX-MIN>`和`MAX-MIN<`。其中在设置`在范围中的值所占百分比>`和`在范围中的值所占百分比<`时需要同时设置支持参数`范围上界(含)`和`范围下界(含)`。 - -- 检查梯度 - - 检查梯度爆炸(GE):检查梯度值是否存在溢出现象。 - - 检查梯度过大(GL):通过对条件参数设置阈值来检查梯度值是否过大,可选参数为`绝对值的平均值>`、`max >`、`min >`和`mean >`。 - - 检查梯度消失(GV):通过对条件参数设置阈值来检查梯度值是否过小,可选参数为`绝对值的平均值<`、`max <`、`min <`和`mean <`。 - -检测点生成后用户还可以在节点列表添加或取消要监控的节点(勾选节点前的方框),如图3所示。除此之外,用户可以通过点击`清空监测点`按钮或者点击监测点旁边的`x`来删除监测点。 - -训练时,调试器会对这些监控节点的输出进行实时分析,一旦监控条件触发,训练暂停,用户可在UI上查看触发的监测点信息。 - -![debugger_watch_point_hit](images/debugger_watch_point_hit.png) - -图5: 查看触发的监测点 - -图5展示了监测点触发后的展示页面,该页面和`节点列表`所在位置相同。触发的节点以及监控条件会按照节点的执行序排列,触发的监控条件上会显示该条件的设置值以及触发该条件的实际值。 -另外,用户点击某一行,会在计算图中跳转到对应节点,可以进一步查看节点信息分析异常结果出现的原因。点击`查看`进入张量检查视图可以查看触发的监测点信息以及调优向导,如图6所示。 - -### 重新检查 - -为了更详细地对节点进行监测分析,用户可以在修改监测点的节点,添加删除监测点后对当前轮次重新检查。`重新检查`按钮位于监测点列表右上角,如图3所示。 - -### 训练控制 - -监测点设置面板的下方是训练控制面板,该面板展示了调试器的训练控制功能,有`继续`、`暂停`、`结束`和`确定`四个按钮。 - -- `确定`代表训练向前执行若干个`轮次`,需要用户在上方的输入框内指定执行的`轮次`数目,直到监测点触发、或`轮次`执行完毕后暂停; -- `继续`代表训练一直执行,直到监测点触发后暂停、或运行至训练结束; -- `暂停`代表训练暂停; -- `结束`代表终止训练。 - -### 张量检查视图 - -![debugger_tensor_view](images/debugger_tensor_view.png) - -图6: 查看`张量`值 - -一些`张量`的维度过多,无法直接在主页进行展示。用户可以点击对应的`查看`按钮,在弹出的张量检查视图中查看`张量`值的详细信息。 - -如图6所示,张量检查视图将`张量`值展示在UI的中上位置,用户可以进行`维度选择`,点击`显示当前step`,`显示上一step`和`显示对比结果`对张量进行显示和对比(当前仅支持参数节点与上一轮次对比)。此外,用户可以设置切片进行`维度选择`来显示相应维度的`张量`。 - -视图的最上方展示了`节点信息`、`当前轮次`以及`统计信息`;视图的左侧展示了调优向导,当监测点命中时,将显示命中信息和相关的调优建议;视图的下方展示了张量关系图以及详细的`节点信息`。 - -通过张量关系图,可以分析当前张量是通过哪些张量计算出来的,还可以分析当前张量影响到了哪些常量。张量图中标注了命中监测点的条件的缩写,方便用户快速识别张量问题的传播路径。每个条件的缩写可以在“设置监测点”一节中查到。 - -## 使用调试器进行调试 - -1. 在调试器环境准备完成后,打开调试器界面,如下图所示: - - ![debugger_waiting](images/debugger_waiting.png) - - 图7: 调试器等待训练连接 - - 此时,调试器处于等待训练启动和连接的状态。 - -2. 在终端运行训练脚本。 - -3. 稍等片刻,在MindInsight UI上可以看到弹窗,提示选择是否使用推荐监测点,如下图所示: - - ![debugger_ask_recommend](images/debugger_ask_recommend.png) - - 图8: 等待用户选择是否使用推荐监测点 - -4. 稍后可以看到计算图显示在调试器界面,见图1。 - -5. 设置监测点,见图4。 - - 按图4所示,选中检测条件,并按图3添加或取消部分节点,调试器将监控这些节点在计算过程中是否存在满足监控条件的输出。 - 设置完监测点后,可以在控制面板设置轮次并点击`确定`向前训练指定轮次,或者直接点击`继续`继续训练。 - -6. 监测点触发,见图5。 - - 监测点触发后,用户查看对应的节点信息,通过张量检查视图找出异常原因,修改脚本,修复问题。 - -## 注意事项 - -- 场景支持: - - 调试器暂不支持分布式训练场景。 - - 调试器暂不支持推断场景。 - - 调试器暂不支持单机多卡/集群场景。 - - 调试器暂不支持连接多个训练进程。 - - 调试器暂不支持CPU场景。 - -- 性能影响: - - 使用调试器时,会对训练性能产生一定影响。 - - 设置的监测点数目过多时,可能会出现系统内存不足(Out-of-Memory)的异常。 - -- GPU场景: - - 在GPU场景下,只有满足条件的参数节点可以与自身的上一轮次结果作对比:使用`下一个节点`执行过的节点、使用`运行到该节点`时选中的节点、作为`监测点`输入的参数节点。其他情况均无法使用`上一轮次对比`功能。 - - 由于GPU上一个轮次是一个子图(而非完整的图),GPU上多图做重新检查时,只能重新检查当前的子图。 - -- 重新检查只检查当前有张量值的监测点。 -- 检查计算过程溢出需要用户开启异步Dump的全部溢出检测功能,开启方式请参照[异步Dump功能介绍](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#id5) -- 调试器展示的图是优化后的最终执行图。调用的算子可能已经与其它算子融合,或者在优化后改变了名称。 diff --git a/tutorials/training/source_zh_cn/advanced_use/distributed_training_ascend.md b/tutorials/training/source_zh_cn/advanced_use/distributed_training_ascend.md deleted file mode 100644 index e57537adf2a3821c64a36da12e420b6907a19db1..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/distributed_training_ascend.md +++ /dev/null @@ -1,648 +0,0 @@ -# 分布式并行训练 (Ascend) - -`Linux` `Ascend` `模型训练` `中级` `高级` - - - -- [分布式并行训练 (Ascend)](#分布式并行训练-ascend) - - [概述](#概述) - - [准备环节](#准备环节) - - [下载数据集](#下载数据集) - - [配置分布式环境变量](#配置分布式环境变量) - - [调用集合通信库](#调用集合通信库) - - [数据并行模式加载数据集](#数据并行模式加载数据集) - - [定义网络](#定义网络) - - [手动混合并行模式](#手动混合并行模式) - - [半自动并行模式](#半自动并行模式) - - [定义损失函数及优化器](#定义损失函数及优化器) - - [定义损失函数](#定义损失函数) - - [定义优化器](#定义优化器) - - [训练网络](#训练网络) - - [运行脚本](#运行脚本) - - [分布式训练模型参数保存和加载](#分布式训练模型参数保存和加载) - - [自动并行模式](#自动并行模式) - - [数据并行模式](#数据并行模式) - - [半自动并行模式](#半自动并行模式-1) - - [手动混合并行模式](#手动混合并行模式-1) - - [多机多卡训练](#多机多卡训练) - - - - - -## 概述 - -本篇教程我们主要讲解,如何在Ascend 910 AI处理器硬件平台上,利用MindSpore通过数据并行及自动并行模式训练ResNet-50网络。 -> 你可以在这里下载完整的样例代码: -> -> - -目录结构如下: - -```text -└─tutorial_code - ├─distributed_training - │ rank_table_16pcs.json - │ rank_table_8pcs.json - │ rank_table_2pcs.json - │ cell_wrapper.py - │ model_accu.py - │ resnet.py - │ resnet50_distributed_training.py - │ resnet50_distributed_training_gpu.py - │ resnet50_distributed_training_grad_accu.py - │ run.sh - │ run_gpu.sh - │ run_grad_accu.sh - │ run_cluster.sh -``` - -其中,`rank_table_16pcs.json`、`rank_table_8pcs.json`、`rank_table_2pcs.json`是配置当前多卡环境的组网信息文件。`resnet.py`、`resnet50_distributed_training.py`、`resnet50_distributed_training_gpu.py`和`resnet50_distributed_training_grad_accu.py`几个文件是定义网络结构的脚本。`run.sh`、`run_gpu.sh`、`run_grad_accu.sh`、`run_cluster.sh`是执行脚本。 - -此外在[定义网络](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html#id7)和[分布式训练模型参数保存和加载](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html#id13)小节中我们针对手动混合并行模式和半自动并行模式的使用做了特殊说明。 - -## 准备环节 - -### 下载数据集 - -本样例采用`CIFAR-10`数据集,由10类32*32的彩色图片组成,每类包含6000张图片。其中训练集共50000张图片,测试集共10000张图片。 - -> `CIFAR-10`数据集下载链接:。 - -将数据集下载并解压到本地路径下,解压后的文件夹为`cifar-10-batches-bin`。 - -### 配置分布式环境变量 - -在裸机环境(对比云上环境,即本地有Ascend 910 AI 处理器)进行分布式训练时,需要配置当前多卡环境的组网信息文件。如果使用华为云环境,因为云服务本身已经做好了配置,可以跳过本小节。 - -以Ascend 910 AI处理器为例,1个8卡环境的json配置文件示例如下,本样例将该配置文件命名为`rank_table_8pcs.json`。2卡环境配置可以参考样例代码中的`rank_table_2pcs.json`文件。 - -```json -{ - "version": "1.0", - "server_count": "1", - "server_list": [ - { - "server_id": "10.155.111.140", - "device": [ - {"device_id": "0","device_ip": "192.1.27.6","rank_id": "0"}, - {"device_id": "1","device_ip": "192.2.27.6","rank_id": "1"}, - {"device_id": "2","device_ip": "192.3.27.6","rank_id": "2"}, - {"device_id": "3","device_ip": "192.4.27.6","rank_id": "3"}, - {"device_id": "4","device_ip": "192.1.27.7","rank_id": "4"}, - {"device_id": "5","device_ip": "192.2.27.7","rank_id": "5"}, - {"device_id": "6","device_ip": "192.3.27.7","rank_id": "6"}, - {"device_id": "7","device_ip": "192.4.27.7","rank_id": "7"}], - "host_nic_ip": "reserve" - } - ], - "status": "completed" -} -``` - -其中需要根据实际训练环境修改的参数项有: - -- `server_count`表示参与训练的机器数量。 -- `server_id`表示当前机器的IP地址。 -- `device_id`表示卡物理序号,即卡所在机器中的实际序号。 -- `device_ip`表示集成网卡的IP地址,可以在当前机器执行指令`cat /etc/hccn.conf`,`address_x`的键值就是网卡IP地址。 -- `rank_id`表示卡逻辑序号,固定从0开始编号。 - -### 调用集合通信库 - -MindSpore分布式并行训练的通信使用了华为集合通信库`Huawei Collective Communication Library`(以下简称HCCL),可以在Ascend AI处理器配套的软件包中找到。同时`mindspore.communication.management`中封装了HCCL提供的集合通信接口,方便用户配置分布式信息。 -> HCCL实现了基于Ascend AI处理器的多机多卡通信,有一些使用限制,我们列出使用分布式服务常见的,详细的可以查看HCCL对应的使用文档。 -> -> - 单机场景下支持1、2、4、8卡设备集群,多机场景下支持8*n卡设备集群。 -> - 每台机器的0-3卡和4-7卡各为1个组网,2卡和4卡训练时卡必须相连且不支持跨组网创建集群。 -> - 组建多机集群时需要保证各台机器使用同一交换机。 -> - 服务器硬件架构及操作系统需要是SMP(Symmetrical Multi-Processing,对称多处理器)处理模式。 - -下面是调用集合通信库样例代码: - -```python -import os -from mindspore import context -from mindspore.communication.management import init - -if __name__ == "__main__": - context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", device_id=int(os.environ["DEVICE_ID"])) - init() - ... -``` - -其中, - -- `mode=context.GRAPH_MODE`:使用分布式训练需要指定运行模式为图模式(PyNative模式当前仅支持数据并行)。 -- `device_id`:卡的物理序号,即卡所在机器中的实际序号。 -- `init`:使能HCCL通信,并完成分布式训练初始化操作。 - -## 数据并行模式加载数据集 - -分布式训练时,数据是以数据并行的方式导入的。下面我们以CIFAR-10数据集为例,介绍以数据并行方式导入CIFAR-10数据集的方法,`data_path`是指数据集的路径,即`cifar-10-batches-bin`文件夹的路径。 - -```python -from mindspore import dtype as mstype -import mindspore.dataset as ds -import mindspore.dataset.transforms.c_transforms as C -import mindspore.dataset.vision.c_transforms as vision -from mindspore.communication.management import get_rank, get_group_size - -def create_dataset(data_path, repeat_num=1, batch_size=32, rank_id=0, rank_size=1): - resize_height = 224 - resize_width = 224 - rescale = 1.0 / 255.0 - shift = 0.0 - - # get rank_id and rank_size - rank_id = get_rank() - rank_size = get_group_size() - data_set = ds.Cifar10Dataset(data_path, num_shards=rank_size, shard_id=rank_id) - - # define map operations - random_crop_op = vision.RandomCrop((32, 32), (4, 4, 4, 4)) - random_horizontal_op = vision.RandomHorizontalFlip() - resize_op = vision.Resize((resize_height, resize_width)) - rescale_op = vision.Rescale(rescale, shift) - normalize_op = vision.Normalize((0.4465, 0.4822, 0.4914), (0.2010, 0.1994, 0.2023)) - changeswap_op = vision.HWC2CHW() - type_cast_op = C.TypeCast(mstype.int32) - - c_trans = [random_crop_op, random_horizontal_op] - c_trans += [resize_op, rescale_op, normalize_op, changeswap_op] - - # apply map operations on images - data_set = data_set.map(operations=type_cast_op, input_columns="label") - data_set = data_set.map(operations=c_trans, input_columns="image") - - # apply shuffle operations - data_set = data_set.shuffle(buffer_size=10) - - # apply batch operations - data_set = data_set.batch(batch_size=batch_size, drop_remainder=True) - - # apply repeat operations - data_set = data_set.repeat(repeat_num) - - return data_set -``` - -其中,与单机不同的是,在数据集接口需要传入`num_shards`和`shard_id`参数,分别对应卡的数量和逻辑序号,建议通过HCCL接口获取: - -- `get_rank`:获取当前设备在集群中的ID。 -- `get_group_size`:获取集群数量。 - -> 数据并行场景加载数据集时,建议对每卡指定相同的数据集文件,若是各卡加载的数据集不同,可能会影响计算精度。 - -## 定义网络 - -数据并行及自动并行模式下,网络定义方式与单机写法一致,可以参考[ResNet网络样例脚本](https://gitee.com/mindspore/docs/blob/master/tutorials/tutorial_code/resnet/resnet.py)。 - -本章节重点介绍手动混合并行和半自动并行模式的网络定义方法。 - -### 手动混合并行模式 - -手动混合并行模式在数据并行模式的基础上,对`parameter`增加了模型并行`layerwise_parallel`配置,包含此配置的`parameter`将以切片的形式保存并参与计算,在优化器计算时不会进行梯度累加。在该模式下,框架不会自动插入并行算子前后需要的计算和通信操作,为了保证计算逻辑的正确性,用户需要手动推导并写在网络结构中,适合对并行原理深入了解的用户使用。 - -以下面的代码为例,将`self.weight`指定为模型并行配置,即`self.weight`和`MatMul`的输出在第二维`channel`上存在切分。这时再在第二维上进行`ReduceSum`得到的仅是单卡累加结果,还需要引入`AllReduce.Sum`通信操作对每卡的结果做加和。关于并行算子的推导原理可以参考这篇[设计文档](https://www.mindspore.cn/doc/note/zh-CN/master/design/mindspore/distributed_training_design.html#id10)。 - -```python -from mindspore import Tensor -import mindspore.ops as ops -from mindspore import dtype as mstype -import mindspore.nn as nn - -class HybridParallelNet(nn.Cell): - def __init__(self): - super(HybridParallelNet, self).__init__() - # initialize the weight which is sliced at the second dimension - weight_init = np.random.rand(512, 128/2).astype(np.float32) - self.weight = Parameter(Tensor(weight_init), layerwise_parallel=True) - self.fc = ops.MatMul() - self.reduce = ops.ReduceSum() - self.allreduce = ops.AllReduce(op='sum') - - def construct(self, x): - x = self.fc(x, self.weight) - x = self.reduce(x, -1) - x = self.allreduce(x) - return x -``` - -### 半自动并行模式 - -半自动并行模式相较于自动并行模式需要用户手动配置并行策略进行调优。关于算子并行策略的定义可以参考这篇[设计文档](https://www.mindspore.cn/doc/note/zh-CN/master/design/mindspore/distributed_training_design.html#id10)。 - -以前述的`HybridParallelNet`为例,在半自动并行模式下的脚本代码如下,`MatMul`的切分策略为`{(1, 1),(1, 2)}`,指定`self.weight`在第二维度上被切分两份。 - -```python -from mindspore import Tensor -import mindspore.ops as ops -from mindspore import dtype as mstype -import mindspore.nn as nn - -class SemiAutoParallelNet(nn.Cell): - def __init__(self): - super(SemiAutoParallelNet, self).__init__() - # initialize full tensor weight - weight_init = np.random.rand(512, 128).astype(np.float32) - self.weight = Parameter(Tensor(weight_init)) - # set shard strategy - self.fc = ops.MatMul().shard({(1, 1),(1, 2)}) - self.reduce = ops.ReduceSum() - - def construct(self, x): - x = self.fc(x, self.weight) - x = self.reduce(x, -1) - return x -``` - -> - 半自动并行模式时,未配置策略的算子默认以数据并行方式执行。 -> - 自动并行模式支持通过策略搜索算法自动获取高效的算子并行策略,同时也支持用户对算子手动配置特定的并行策略。 -> - 如果某个`parameter`被多个算子使用,则每个算子对这个`parameter`的切分策略需要保持一致,否则将报错。 - -## 定义损失函数及优化器 - -### 定义损失函数 - -自动并行以算子为粒度切分模型,通过算法搜索得到最优并行策略,所以与单机训练不同的是,为了有更好的并行训练效果,损失函数建议使用小算子来实现。 - -在Loss部分,我们采用`SoftmaxCrossEntropyWithLogits`的展开形式,即按照数学公式,将其展开为多个小算子进行实现,样例代码如下: - -```python -import mindspore.ops as ops -from mindspore import Tensor -from mindspore import dtype as mstype -import mindspore.nn as nn - -class SoftmaxCrossEntropyExpand(nn.Cell): - def __init__(self, sparse=False): - super(SoftmaxCrossEntropyExpand, self).__init__() - self.exp = ops.Exp() - self.sum = ops.ReduceSum(keep_dims=True) - self.onehot = ops.OneHot() - self.on_value = Tensor(1.0, mstype.float32) - self.off_value = Tensor(0.0, mstype.float32) - self.div = ops.RealDiv() - self.log = ops.Log() - self.sum_cross_entropy = ops.ReduceSum(keep_dims=False) - self.mul = ops.Mul() - self.mul2 = ops.Mul() - self.mean = ops.ReduceMean(keep_dims=False) - self.sparse = sparse - self.max = ops.ReduceMax(keep_dims=True) - self.sub = ops.Sub() - - def construct(self, logit, label): - logit_max = self.max(logit, -1) - exp = self.exp(self.sub(logit, logit_max)) - exp_sum = self.sum(exp, -1) - softmax_result = self.div(exp, exp_sum) - if self.sparse: - label = self.onehot(label, ops.shape(logit)[1], self.on_value, self.off_value) - softmax_result_log = self.log(softmax_result) - loss = self.sum_cross_entropy((self.mul(softmax_result_log, label)), -1) - loss = self.mul2(ops.scalar_to_array(-1.0), loss) - loss = self.mean(loss, -1) - - return loss -``` - -### 定义优化器 - -采用`Momentum`优化器作为参数更新工具,这里定义与单机一致,不再展开,具体可以参考样例代码中的实现。 - -## 训练网络 - -`context.set_auto_parallel_context`是配置并行训练参数的接口,必须在初始化网络之前调用。常用参数包括: - -- `parallel_mode`:分布式并行模式,默认为单机模式`ParallelMode.STAND_ALONE`。可选数据并行`ParallelMode.DATA_PARALLEL`及自动并行`ParallelMode.AUTO_PARALLEL`。 -- `parameter_broadcast`:训练开始前自动广播0号卡上数据并行的参数权值到其他卡上,默认值为`False`。 -- `gradients_mean`:反向计算时,框架内部会将数据并行参数分散在多台机器的梯度值进行收集,得到全局梯度值后再传入优化器中更新。默认值为`False`,设置为True对应`allreduce_mean`操作,False对应`allreduce_sum`操作。 -- `device_num`和`global_rank`建议采用默认值,框架内会调用HCCL接口获取。 - -> 更多分布式并行配置项用户请参考[编程指南](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/auto_parallel.html)。 - -如脚本中存在多个网络用例,请在执行下个用例前调用`context.reset_auto_parallel_context`将所有参数还原到默认值。 - -在下面的样例中我们指定并行模式为自动并行,用户如需切换为数据并行模式只需将`parallel_mode`改为`DATA_PARALLEL`。 - -```python -from mindspore import context, Model -from mindspore.nn.optim.momentum import Momentum -from mindspore.train.callback import LossMonitor -from mindspore.context import ParallelMode -from resnet import resnet50 - -device_id = int(os.getenv('DEVICE_ID')) -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") -context.set_context(device_id=device_id) # set device_id - -def test_train_cifar(epoch_size=10): - context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL, gradients_mean=True) - loss_cb = LossMonitor() - dataset = create_dataset(data_path) - batch_size = 32 - num_classes = 10 - net = resnet50(batch_size, num_classes) - loss = SoftmaxCrossEntropyExpand(sparse=True) - opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, 0.9) - model = Model(net, loss_fn=loss, optimizer=opt) - model.train(epoch_size, dataset, callbacks=[loss_cb], dataset_sink_mode=True) -``` - -其中, - -- `dataset_sink_mode=True`:表示采用数据集的下沉模式,即训练的计算下沉到硬件平台中执行。 -- `LossMonitor`:能够通过回调函数返回Loss值,用于监控损失函数。 - -## 运行脚本 - -上述已将训练所需的脚本编辑好了,接下来通过命令调用对应的脚本。 - -目前MindSpore分布式执行采用单卡单进程运行方式,即每张卡上运行1个进程,进程数量与使用的卡的数量一致。其中,0卡在前台执行,其他卡放在后台执行。每个进程创建1个目录,用来保存日志信息以及算子编译信息。下面以使用8张卡的分布式训练脚本为例,演示如何运行脚本: - -```bash -#!/bin/bash - -echo "==============================================================================================================" -echo "Please run the script as: " -echo "bash run.sh DATA_PATH RANK_SIZE" -echo "For example: bash run.sh /path/dataset 8" -echo "It is better to use the absolute path." -echo "==============================================================================================================" -DATA_PATH=$1 -export DATA_PATH=${DATA_PATH} -RANK_SIZE=$2 - -EXEC_PATH=$(pwd) - -test_dist_8pcs() -{ - export RANK_TABLE_FILE=${EXEC_PATH}/rank_table_8pcs.json - export RANK_SIZE=8 -} - -test_dist_2pcs() -{ - export RANK_TABLE_FILE=${EXEC_PATH}/rank_table_2pcs.json - export RANK_SIZE=2 -} - -test_dist_${RANK_SIZE}pcs - -for((i=1;i<${RANK_SIZE};i++)) -do - rm -rf device$i - mkdir device$i - cp ./resnet50_distributed_training.py ./resnet.py ./device$i - cd ./device$i - export DEVICE_ID=$i - export RANK_ID=$i - echo "start training for device $i" - env > env$i.log - pytest -s -v ./resnet50_distributed_training.py > train.log$i 2>&1 & - cd ../ -done -rm -rf device0 -mkdir device0 -cp ./resnet50_distributed_training.py ./resnet.py ./device0 -cd ./device0 -export DEVICE_ID=0 -export RANK_ID=0 -echo "start training for device 0" -env > env0.log -pytest -s -v ./resnet50_distributed_training.py > train.log0 2>&1 -if [ $? -eq 0 ];then - echo "training success" -else - echo "training failed" - exit 2 -fi -cd ../ -``` - -脚本需要传入变量`DATA_PATH`和`RANK_SIZE`,分别表示数据集的绝对路径和卡的数量。 - -分布式相关的环境变量有, - -- `RANK_TABLE_FILE`:组网信息文件的路径。 -- `DEVICE_ID`:当前卡在机器上的实际序号。 -- `RANK_ID`:当前卡的逻辑序号。 - -其余环境变量请参考安装教程中的配置项。 - -运行时间大约在5分钟内,主要时间是用于算子的编译,实际训练时间在20秒内。用户可以通过`ps -ef | grep pytest`来监控任务进程。 - -日志文件保存到`rank`所对应的`device0`、 `device1`......目录下,`env.log`中记录了环境变量的相关信息,关于Loss部分结果保存在`train.log`中,示例如下: - -```text -epoch: 1 step: 156, loss is 2.0084016 -epoch: 2 step: 156, loss is 1.6407638 -epoch: 3 step: 156, loss is 1.6164391 -epoch: 4 step: 156, loss is 1.6838071 -epoch: 5 step: 156, loss is 1.6320667 -epoch: 6 step: 156, loss is 1.3098773 -epoch: 7 step: 156, loss is 1.3515002 -epoch: 8 step: 156, loss is 1.2943741 -epoch: 9 step: 156, loss is 1.2316195 -epoch: 10 step: 156, loss is 1.1533381 -``` - -## 分布式训练模型参数保存和加载 - -在MindSpore中,支持四种分布式并行训练模式,即自动并行模式(Auto Parallel)、数据并行模式(Data Parallel)、半自动并行模式(Semi Auto Parallel)、手动混合并行模式(Hybrid Parallel),下面分别介绍四种分布式并行训练模式下模型的保存和加载。分布式训练进行模型参数的保存之前,需要先按照本教程配置分布式环境变量和集合通信库。 - -### 自动并行模式 - -自动并行模式(Auto Parallel)下模型参数的保存和加载与非分布式训练的模型参数保存和加载用法相同,只需在本教程训练网络步骤中的`test_train_cifar`方法中添加配置`CheckpointConfig`和`ModelCheckpoint`,即可实现模型参数的保存,具体代码如下: - -```python -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig - -def test_train_cifar(epoch_size=10): - context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL, gradients_mean=True) - loss_cb = LossMonitor() - dataset = create_dataset(data_path) - batch_size = 32 - num_classes = 10 - net = resnet50(batch_size, num_classes) - loss = SoftmaxCrossEntropyExpand(sparse=True) - opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, 0.9) - ckpt_config = CheckpointConfig() - ckpt_callback = ModelCheckpoint(prefix='auto_parallel', config=ckpt_config) - model = Model(net, loss_fn=loss, optimizer=opt) - model.train(epoch_size, dataset, callbacks=[loss_cb, ckpt_callback], dataset_sink_mode=True) -``` - -保存好checkpoint文件后,用户可以很容易加载模型参数进行推理或再训练场景,如用于再训练场景可使用如下代码加载模型: - -```python -from mindspore import load_checkpoint, load_param_into_net - -net = resnet50(batch_size=32, num_classes=10) -# The parameter for load_checkpoint is a .ckpt file which has been successfully saved -param_dict = load_checkpoint('...') -load_param_into_net(net, param_dict) -``` - -checkpoint配置策略和保存方法可以参考[模型参数的保存和加载](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html#checkpoint)。 - -> 默认情况下,对于网络中切分的参数将会采用合并保存,对于参数量过大需要采用切片保存及推理的场景可以参考[分布式推理](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference_ascend_910.html#id1)。 - -### 数据并行模式 - -数据并行模式(Data Parallel)下checkpoint的使用方法和自动并行模式(Auto Parallel)一样,只需要将`test_train_cifar`中 - -```python -context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL, gradients_mean=True) -``` - -修改为: - -```python -context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) -``` - -> 数据并行场景下加载模型参数时建议每卡加载相同的checkpoint文件,避免造成计算误差,或者可以打开`parameter_broadcast`开关将0号卡的参数广播到其他卡上。 - -### 半自动并行模式 - -半自动并行模式(Semi Auto Parallel)下checkpoint使用方法,与自动并行模式(Auto Parallel)和数据并行模式(Data Parallel)的用法相同,不同之处在于网络的定义,半自动并行模式(Semi Auto Parallel)下网络模型的定义请参考本教程中定义网络部分的[半自动并行模式](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html#id9)。 - -保存模型时,可以使用如下代码来实现: - -```python -... -net = SemiAutoParallelNet() -... -ckpt_config = CheckpointConfig() -ckpt_callback = ModelCheckpoint(prefix='semi_auto_parallel', config=ckpt_config) -``` - -加载模型时,可以使用如下代码来实现: - -```python -net = SemiAutoParallelNet() -# The parameter for load_checkpoint is a .ckpt file which has been successfully saved -param_dict = load_checkpoint('...') -load_param_into_net(net, param_dict) -``` - -以上介绍的三种并行训练模式,checkpoint文件的保存方式都是每张卡上均保存完整的checkpoint文件,在以上三种并行训练模式上,用户还可以选择每张卡上只保存本卡的checkpoint文件,以半自动并行模式(Semi Auto Parallel)为例,进行说明。 - -只需要改动设置checkpoint保存策略的代码,将`CheckpointConfig`中的`integrated_save`参数设置为Fasle,便可实现每张卡上只保存本卡的checkpoint文件,具体改动如下: - -将checkpoint配置策略由: - -```python -# config checkpoint -ckpt_config = CheckpointConfig(keep_checkpoint_max=1) -``` - -改为: - -```python -# config checkpoint -ckpt_config = CheckpointConfig(keep_checkpoint_max=1, integrated_save=False) -``` - -需要注意的是,如果用户选择了这种checkpoint保存方式,那么就需要用户自己对切分的checkpoint进行保存和加载,以便进行后续的推理或再训练。具体用法可参考[对保存的checkpoint文件做合并处理](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/save_load_model_hybrid_parallel.html#checkpoint)。 - -### 手动混合并行模式 - -手动混合并行模式(Hybrid Parallel)的模型参数保存和加载请参考[手动设置并行场景模型参数的保存和加载](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/save_load_model_hybrid_parallel.html)。 - -## 多机多卡训练 - -前面的章节,对MindSpore的分布式训练进行了介绍,都是基于单机8卡的Ascend环境,使用多机进行分布式训练,可以更大地提升训练速度。 -在Ascend环境下,跨机器的NPU单元的通信与单机内各个NPU单元的通信一样,依旧是通过HCCL进行通信,区别在于,单机内的NPU单元天然的是互通的,而跨机器的则需要保证两台机器的网络是互通的。 -在确认了机器之间的NPU单元的网络是通畅后,配置多机的json配置文件,本教程以16卡的配置文件为例。需要注意的是,在多机的json文件配置中,要求rank_id的排序,与server_id的字典序一致。 - -```json -{ - "version": "1.0", - "server_count": "2", - "server_list": [ - { - "server_id": "10.155.111.140", - "device": [ - {"device_id": "0","device_ip": "192.1.27.6","rank_id": "0"}, - {"device_id": "1","device_ip": "192.2.27.6","rank_id": "1"}, - {"device_id": "2","device_ip": "192.3.27.6","rank_id": "2"}, - {"device_id": "3","device_ip": "192.4.27.6","rank_id": "3"}, - {"device_id": "4","device_ip": "192.1.27.7","rank_id": "4"}, - {"device_id": "5","device_ip": "192.2.27.7","rank_id": "5"}, - {"device_id": "6","device_ip": "192.3.27.7","rank_id": "6"}, - {"device_id": "7","device_ip": "192.4.27.7","rank_id": "7"}], - "host_nic_ip": "reserve" - }, - { - "server_id": "10.155.111.141", - "device": [ - {"device_id": "0","device_ip": "192.1.27.8","rank_id": "8"}, - {"device_id": "1","device_ip": "192.2.27.8","rank_id": "9"}, - {"device_id": "2","device_ip": "192.3.27.8","rank_id": "10"}, - {"device_id": "3","device_ip": "192.4.27.8","rank_id": "11"}, - {"device_id": "4","device_ip": "192.1.27.9","rank_id": "12"}, - {"device_id": "5","device_ip": "192.2.27.9","rank_id": "13"}, - {"device_id": "6","device_ip": "192.3.27.9","rank_id": "14"}, - {"device_id": "7","device_ip": "192.4.27.9","rank_id": "15"}], - "host_nic_ip": "reserve" - } - ], - "status": "completed" -} -``` - -准备好配置文件后,可以进行分布式多机训练脚本的组织,在以2机16卡为例,两台机器上编写的脚本与单机8卡的运行脚本类似,区别在于指定不同的rank_id变量。 - -```bash -#!/bin/bash - -echo "==============================================================================================================" -echo "Please run the script as: " -echo "bash run.sh DATA_PATH RANK_TABLE_FILE RANK_SIZE RANK_START" -echo "For example: bash run.sh /path/dataset /path/rank_table.json 16 0" -echo "It is better to use the absolute path." -echo "==============================================================================================================" - -execute_path=$(pwd) -echo ${execute_path} -script_self=$(readlink -f "$0") -self_path=$(dirname "${script_self}") -echo ${self_path} - -export DATA_PATH=$1 -export RANK_TABLE_FILE=$2 -export RANK_SIZE=$3 -RANK_START=$4 -DEVICE_START=0 -for((i=0;i<=7;i++)); -do - export RANK_ID=$[i+RANK_START] - export DEVICE_ID=$[i+DEVICE_START] - rm -rf ${execute_path}/device_$RANK_ID - mkdir ${execute_path}/device_$RANK_ID - cd ${execute_path}/device_$RANK_ID || exit - pytest -s ${self_path}/resnet50_distributed_training.py >train$RANK_ID.log 2>&1 & -done -``` - -上面列出的参考脚本,所要求的代码组织结构如下,脚本中会获取脚本所在路径以及命令执行的路径,并且将所有任务都置于后台执行。 - -```text -└─tutorial_code - ├─distributed_training - │ resnet50_distributed_training.py - │ run_cluster.sh -``` - -执行时,两台机器分别执行如下命令,其中rank_table.json按照本章节展示的16卡的分布式json文件参考配置。 - -```bash -# server0 -bash run.sh /path/dataset /path/rank_table.json 16 0 -# server1 -bash run.sh /path/dataset /path/rank_table.json 16 8 -``` diff --git a/tutorials/training/source_zh_cn/advanced_use/distributed_training_gpu.md b/tutorials/training/source_zh_cn/advanced_use/distributed_training_gpu.md deleted file mode 100644 index cf84070f72ca97c61b1be3888266bcd375eff807..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/distributed_training_gpu.md +++ /dev/null @@ -1,153 +0,0 @@ -# 分布式并行训练 (GPU) - -`Linux` `GPU` `模型训练` `中级` `高级` - - - -- [分布式并行训练 (GPU)](#分布式并行训练-gpu) - - [概述](#概述) - - [准备环节](#准备环节) - - [下载数据集](#下载数据集) - - [配置分布式环境](#配置分布式环境) - - [调用集合通信库](#调用集合通信库) - - [定义网络](#定义网络) - - [运行脚本](#运行脚本) - - [运行多机脚本](#运行多机脚本) - - - - - -## 概述 - -本篇教程我们主要讲解,如何在GPU硬件平台上,利用MindSpore的数据并行及自动并行模式训练ResNet-50网络。 - -## 准备环节 - -### 下载数据集 - -本样例采用`CIFAR-10`数据集,数据集的下载以及加载方式和Ascend 910 AI处理器一致。 - -数据集的下载和加载方式可参考:。 - -### 配置分布式环境 - -- `OpenMPI-4.0.3`:MindSpore采用的多进程通信库。 - - OpenMPI-4.0.3源码下载地址:,选择`openmpi-4.0.3.tar.gz`下载。 - - 参考OpenMPI官网教程安装:。 - -- `NCCL-2.7.6`:Nvidia集合通信库。 - - NCCL-2.7.6下载地址:。 - - 参考NCCL官网教程安装:。 - -- 主机间免密登陆(涉及多机训练时需要)。若训练涉及多机,则需要配置多机间免密登陆,可参考以下步骤进行配置: - 1. 每台主机确定同一个用户作为登陆用户(不推荐root); - 2. 执行`ssh-keygen -t rsa -P ""`生成密钥; - 3. 执行`ssh-copy-id DEVICE-IP`设置需要免密登陆的机器IP; - 4. 执行`ssh DEVICE-IP`,若不需要输入密码即可登录,则说明以上配置成功; - 5. 在所有机器上执行以上命令,确保两两互通。 - -### 调用集合通信库 - -在GPU硬件平台上,MindSpore分布式并行训练的通信使用的是NCCL。 - -> GPU平台上,MindSpore暂不支持用户进行: -> -> `get_local_rank`、`get_local_size`、`get_world_rank_from_group_rank`、`get_group_rank_from_world_rank`、`create_group`操作。 - -下面是调用集合通信库的代码样例: - -```python -from mindspore import context -from mindspore.communication.management import init - -if __name__ == "__main__": - context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - init("nccl") - ... -``` - -其中, - -- `mode=context.GRAPH_MODE`:使用分布式训练需要指定运行模式为图模式(PyNative模式不支持并行)。 -- `init("nccl")`:使能NCCL通信,并完成分布式训练初始化操作。 - -## 定义网络 - -在GPU硬件平台上,网络的定义和Ascend 910 AI处理器一致。 - -网络、优化器、损失函数的定义可参考:。 - -## 运行脚本 - -在GPU硬件平台上,MindSpore采用OpenMPI的`mpirun`进行分布式训练。下面以使用8张卡的分布式训练脚本为例,演示如何运行脚本: - -> 你可以在这里找到样例的运行脚本: -> -> 。 -> -> 如果通过root用户执行脚本,`mpirun`需要加上`--allow-run-as-root`参数。 - -```bash -#!/bin/bash - -echo "==============================================================================================================" -echo "Please run the script as: " -echo "bash run_gpu.sh DATA_PATH" -echo "For example: bash run_gpu.sh /path/dataset" -echo "It is better to use the absolute path." -echo "==============================================================================================================" -DATA_PATH=$1 -export DATA_PATH=${DATA_PATH} - -rm -rf device -mkdir device -cp ./resnet50_distributed_training.py ./resnet.py ./device -cd ./device -echo "start training" -mpirun -n 8 pytest -s -v ./resnet50_distributed_training.py > train.log 2>&1 & -``` - -脚本会在后台运行,日志文件会保存到device目录下,共跑了10个epoch,每个epoch有234个step,关于Loss部分结果保存在train.log中。将loss值grep出来后,示例如下: - -```text -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -epoch: 1 step: 1, loss is 2.3025854 -``` - -## 运行多机脚本 - -若训练涉及多机,则需要额外在`mpirun`命令中设置多机配置。你可以直接在`mpirun`命令中用`-H`选项进行设置,比如`mpirun -n 16 -H DEVICE1_IP:8,DEVICE2_IP:8 python hello.py`,表示在ip为DEVICE1_IP和DEVICE2_IP的机器上分别起8个进程运行程序;或者也可以构造一个如下这样的hostfile文件,并将其路径传给`mpirun`的`--hostfile`的选项。hostfile文件每一行格式为`[hostname] slots=[slotnum]`,hostname可以是ip或者主机名。 - -```bash -DEVICE1 slots=8 -DEVICE2 slots=8 -``` - -两机十六卡的执行脚本如下,需要传入变量`DATA_PATH`和`HOSTFILE`,表示数据集的路径和hostfile文件的路径。更多mpirun的选项设置可见OpenMPI的官网。 - -```bash -#!/bin/bash - -DATA_PATH=$1 -HOSTFILE=$2 - -rm -rf device -mkdir device -cp ./resnet50_distributed_training.py ./resnet.py ./device -cd ./device -echo "start training" -mpirun -n 16 --hostfile $HOSTFILE -x DATA_PATH=$DATA_PATH -x PATH -mca pml ob1 pytest -s -v ./resnet50_distributed_training.py > train.log 2>&1 & -``` - -在GPU上进行分布式训练时,模型参数的保存和加载可参考[分布式训练模型参数保存和加载](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html#id15) diff --git a/tutorials/training/source_zh_cn/advanced_use/distributed_training_tutorials.rst b/tutorials/training/source_zh_cn/advanced_use/distributed_training_tutorials.rst deleted file mode 100644 index 504f487d74228a2ad9ddc6e6ee7d56e0da33f656..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/distributed_training_tutorials.rst +++ /dev/null @@ -1,28 +0,0 @@ -分布式并行训练 -=============== - -在深度学习中,当数据集和参数量的规模越来越大,训练所需的时间和硬件资源会随之增加,最后会变成制约训练的瓶颈。分布式并行训练,可以降低对内存、计算性能等硬件的需求,是进行训练的重要优化手段。根据并行的原理及模式不同,业界主流的并行类型有以下几种: - -- 数据并行(Data Parallel):对数据进行切分的并行模式,一般按照batch维度切分,将数据分配到各个计算单元(worker)中,进行模型计算。 -- 模型并行(Model Parallel):对模型进行切分的并行模式。MindSpore中支持层内模型并行模式,即对参数切分后分配到各个计算单元中进行训练。 -- 混合并行(Hybrid Parallel):指涵盖数据并行和模型并行的并行模式。 - -当前MindSpore也提供分布式并行训练的功能。它支持了多种模式包括: - -- `DATA_PARALLEL`:数据并行模式。 -- `AUTO_PARALLEL`:自动并行模式,融合了数据并行、模型并行及混合并行的1种分布式并行模式,可以自动建立代价模型,找到训练时间较短的并行策略,为用户选择1种并行模式。MindSpore提供了如下的两种不同的策略搜索算法: - - - `dynamic_programming` :动态规划策略搜索算法。能够搜索出代价模型刻画的最优策略,但在搜索巨大网络模型的并行策略时耗时较长。其代价模型是围绕Ascend 910芯片基于内存的计算开销和通信开销对训练时间建模。 - - `recursive_programming` :双递归策略搜索算法。对于巨大网络以及大规模多卡切分能够保证瞬间生成最优策略。其基于符号运算的代价模型可以自由适配不同的加速器集群。 - -- `SEMI_AUTO_PARALLEL`:半自动并行模式,相较于自动并行,该模式需要用户对算子手动配置切分策略实现并行。 -- `HYBRID_PARALLEL`:在MindSpore中特指用户通过手动切分模型实现混合并行的场景。 - -.. toctree:: - :maxdepth: 1 - - distributed_training_ascend - distributed_training_gpu - apply_host_device_training - apply_parameter_server_training - save_load_model_hybrid_parallel diff --git a/tutorials/training/source_zh_cn/advanced_use/dump_in_graph_mode.md b/tutorials/training/source_zh_cn/advanced_use/dump_in_graph_mode.md deleted file mode 100644 index a4f284f97022ea613898fbcf0bb0accc7d13c25b..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/dump_in_graph_mode.md +++ /dev/null @@ -1,526 +0,0 @@ -# 使用Dump功能在Graph模式调试 - -`Linux` `Ascend` `GPU` `CPU` `模型调优` `中级` `高级` - - - -- [使用Dump功能在Graph模式调试](#使用dump功能在graph模式调试) - - [概述](#概述) - - [调试过程](#调试过程) - - [适用场景](#适用场景) - - [Dump功能说明](#dump功能说明) - - [同步Dump](#同步dump) - - [同步Dump操作步骤](#同步dump操作步骤) - - [同步Dump数据对象目录](#同步dump数据对象目录) - - [同步Dump数据文件介绍](#同步dump数据文件介绍) - - [同步Dump数据分析样例](#同步dump数据分析样例) - - [异步Dump](#异步dump) - - [异步Dump操作步骤](#异步dump操作步骤) - - [异步Dump数据对象目录](#异步dump数据对象目录) - - [异步Dump数据文件介绍](#异步dump数据文件介绍) - - [异步Dump数据分析样例](#异步dump数据分析样例) - - - - - -## 概述 - -为了对训练过程进行分析,用户需要感知训练过程中算子的输入和输出数据。 - -- 对于动态图模式,MindSpore提供了Python原生执行能力,用户可以在网络脚本运行过程中查看记录相应的输入输出,详情见[使用PyNative模式调试](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/debug_in_pynative_mode.html) 。 - -- 对于静态图模式,MindSpore提供了Dump功能,用来将模型训练中的图以及算子的输入输出数据保存到磁盘文件。 - -本文针对静态图模式,介绍如何基于Dump功能对网络数据进行分析对比。 - -### 调试过程 - -1. 从脚本找到对应的算子 - - 使用Dump功能将自动生成最终执行图的IR文件(IR文件中包含了算子全名,和算子在计算图中输入和输出的依赖,也包含从算子到相应脚本代码的Trace信息),IR文件可以用`vi`命令查看,Dump功能的配置见[同步Dump操作步骤](#id5)和[异步Dump操作步骤](#id10),Dump输出的目录结构见[同步Dump数据对象目录](#id6)和[异步Dump数据对象目录](#id11)。然后通过图文件找到脚本中代码对应的算子,参考[同步Dump数据分析样例](#id8)和[异步Dump数据数据分析样例](#id13)。 - -2. 从算子到Dump数据 - - 在了解脚本和算子的映射关系后,可以确定想要分析的算子名称,从而找到算子对应的dump文件,参考[同步Dump数据对象目录](#id6)和[异步Dump数据对象目录](#id11)。 - -3. 分析Dump数据 - - 通过解析Dump数据,可以与其他第三方框架进行对比。同步Dump数据格式参考[同步Dump数据文件介绍](#id7),异步Dump数据格式参考[异步Dump数据文件介绍](#id12)。 - -### 适用场景 - -1. 静态图算子结果分析。 - - 通过Dump功能获得的IR图,可以了解脚本代码与执行算子的映射关系(详情见[MindSpore IR简介](https://www.mindspore.cn/doc/note/zh-CN/master/design/mindspore/mindir.html#id1))。结合执行算子的输入和输出数据,可以分析训练过程中可能存在的溢出、梯度爆炸与消失等问题,反向跟踪到脚本中可能存在问题的代码。 - -2. 特征图分析。 - - 通过获取图层的输出数据,分析特征图的信息。 - -3. 模型迁移。 - - 在将模型从第三方框架(TensorFlow、PyTorch)迁移到MindSpore的场景中,通过比对相同位置算子的输出数据,分析第三方框架和MindSpore对于同一模型的训练结果是否足够接近,来定位模型的精度问题。 - -## Dump功能说明 - -MindSpore提供了同步Dump与异步Dump两种模式: - -- 同步Dump的机制是在网络训练过程中每个step执行结束后, Host侧发起Dump动作,从Device上拷贝算子地址里面的数据到Host,并保存文件。同步Dump会默认关闭算子间的内存复用,避免读到脏数据。 -- 异步Dump是专门针对Ascend整图下沉而开发的功能,可以一边执行算子一边dump数据,一个算子执行结束后立即dump数据,因此开启内存复用也可以生成正确的数据,但是相应的网络训练的速度会较慢。 - -不同模式所需要的配置文件和dump出来的数据格式不同: - -- 同步模式较异步模式会占用更多内存,但易用性更好。 -- 一般对于中小型网络(如ResNet)等,推荐优先使用同步Dump模式。在网络占用内存不大的情况下,请优先使用同步Dump。若开启同步Dump后,因为模型过大导致需要的内存超过系统限制,再使用异步Dump。 -- 在Ascend上开启同步Dump的时候,待Dump的算子会自动关闭内存复用。 -- 同步Dump目前支持Ascend、GPU和CPU上的图模式,暂不支持PyNative模式。 -- 异步Dump仅支持Ascend上的图模式,不支持PyNative模式。开启异步Dump的时候不会关闭内存复用。 - -## 同步Dump - -### 同步Dump操作步骤 - -1. 创建json格式的配置文件,JSON文件的名称和位置可以自定义设置。 - - ```json - { - "common_dump_settings": { - "dump_mode": 0, - "path": "/absolute_path", - "net_name": "ResNet50", - "iteration": 0, - "input_output": 0, - "kernels": ["Default/Conv-op12"], - "support_device": [0,1,2,3,4,5,6,7] - }, - "e2e_dump_settings": { - "enable": true, - "trans_flag": true - } - } - ``` - - - `dump_mode`:设置成0,表示Dump出该网络中的所有算子;设置成1,表示Dump`"kernels"`里面指定的算子。 - - `path`:Dump保存数据的绝对路径。 - - `net_name`:自定义的网络名称,例如:"ResNet50"。 - - `iteration`:指定需要Dump的迭代,若设置成0,表示Dump所有的迭代。 - - `input_output`:设置成0,表示Dump出算子的输入和算子的输出;设置成1,表示Dump出算子的输入;设置成2,表示Dump出算子的输出。该配置参数仅支持Ascend和CPU,GPU只能Dump算子的输出。 - - `kernels`:算子的名称列表。开启IR保存开关`context.set_context(save_graphs=True)`并执行用例,从生成的IR文件`trace_code_graph_{graph_id}`中获取算子名称。详细说明可以参照教程:[如何保存IR](https://www.mindspore.cn/doc/note/zh-CN/master/design/mindspore/mindir.html#ir)。 - - `support_device`:支持的设备,默认设置成0到7即可;在分布式训练场景下,需要dump个别设备上的数据,可以只在`support_device`中指定需要Dump的设备Id。该配置参数在CPU上无效,因为CPU下没有device这个概念。 - - `enable`:开启E2E Dump,如果同时开启同步Dump和异步Dump,那么只有同步Dump会生效。 - - `trans_flag`:开启格式转换。将设备上的数据格式转换成NCHW格式。若为`True`,则数据会以Host侧的4D格式(NCHW)格式保存;若为`False`,则保留Device侧的数据格式。该配置参数在CPU上无效,因为CPU上没有format转换。 - -2. 设置Dump环境变量,指定Dump的json配置文件。 - - ```bash - export MINDSPORE_DUMP_CONFIG=${xxx} - ``` - - 其中"xxx"为配置文件的绝对路径,如: - - ```bash - export MINDSPORE_DUMP_CONFIG=/path/to/data_dump.json - ``` - - 注意: - - - 在网络脚本执行前,设置好环境变量;网络脚本执行过程中设置将会不生效。 - - 在分布式场景下,Dump环境变量需要在调用`mindspore.communication.management.init`之前配置。 - -3. 启动网络训练脚本。 - - 训练启动后,若正确配置了`MINDSPORE_DUMP_CONFIG`环境变量,则会读取配置文件的内容,并按照Dump配置中指定的数据保存路径保存算子数据。 - 同步模式下,如果要Dump数据,必须采用非数据下沉模式(设置`model.train`或`DatasetHelper`中的`dataset_sink_mode`参数为`False`),以保证可以获取每个step的Dump数据。 - 若脚本中都不调用`model.train`或`DatasetHelper`,则默认为非数据下沉模式。使用Dump功能将自动生成最终执行图的IR文件。 - - 可以在训练脚本中设置`context.set_context(reserve_class_name_in_scope=False)`,避免Dump文件名称过长导致Dump数据文件生成失败。 - -4. 通过`numpy.fromfile`读取和解析同步Dump数据,参考[同步Dump数据文件介绍](#id7)。 - -### 同步Dump数据对象目录 - -启动训练后,同步Dump保存的数据对象包括最终执行图(`ms_output_trace_code_graph_{graph_id}.ir`文件)以及图中算子的输入和输出数据,数据目录结构如下所示: - -```text -{path}/ - |-- {net_name}/ - |-- {device_id}/ - |-- iteration_{iteration}/ - -- {op_name}_{input_output_index}_{shape}_{data_type}_{format}.bin - … - |-- graphs/ - ms_output_trace_code_graph_{graph_id}.pb - ms_output_trace_code_graph_{graph_id}.ir - |-- execution_order/ - ms_execution_order_graph_{graph_id}.csv - - |-- .metadata/ - data_dump.json -``` - -- `path`:`data_dump.json`配置文件中设置的绝对路径。 -- `net_name`:`data_dump.json`配置文件中设置的网络名称。 -- `device_id`:训练的卡号。 -- `graph_id`:训练的图标号。 -- `iteration`:训练的轮次。 -- `operator_name`:算子名称。 -- `input_output_index` :输入或输出标号,例如`output_0`表示该文件是该算子的第1个输出Tensor的数据。 -- `shape`: 张量维度信息。 -- `data_type`: 数据类型。 -- `format`: 数据格式。 - -在CPU上进行数据dump时,没有`device_id`这个目录层级,因为CPU上没有device这个概念,也没有`graphs`、`execution_order`和`.metadata`目录。 - -### 同步Dump数据文件介绍 - -同步Dump生成的数据文件是后缀名为`.bin`的二进制文件,文件命名格式为: - -```text -{operator_name}_{input_output_index}_{shape}_{data_type}_{format}.bin -``` - -根据文件名提供的`Tensor`信息,可以用`numpy.fromfile`读取数据,并还原原始数据的`data_type`和`shape`。 - -同步Dump生成的最终执行图文件后缀名分别为`.pb`和`.ir`,文件命名格式为: - -```text -ms_output_trace_code_graph_{graph_id}.pb -ms_output_trace_code_graph_{graph_id}.ir -``` - -其中以`.ir`为后缀的文件可以通过`vi`命令打开查看。 - -同步Dump生成的节点执行序文件后缀名为`.csv`,文件命名格式为: - -```text -ms_execution_order_graph_{graph_id}.csv -``` - -`.metadata`记录了训练的原信息,其中`data_dump.json`保存了用户设置的dump配置。 - -### 同步Dump数据分析样例 - -对于Ascend场景,在通过Dump功能将脚本对应的图保存到磁盘上后,会产生最终执行图文件`ms_output_trace_code_graph_{graph_id}.ir`。该文件中保存了对应的图中每个算子的堆栈信息,记录了算子对应的生成脚本。 - -以[AlexNet脚本](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/alexnet/src/alexnet.py)为例 : - -```python -import mindspore.nn as nn -import mindspore.ops as ops - - -def conv(in_channels, out_channels, kernel_size, stride=1, padding=0, pad_mode="valid", has_bias=True): - return nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding, - has_bias=has_bias, pad_mode=pad_mode) - - -def fc_with_initialize(input_channels, out_channels, has_bias=True): - return nn.Dense(input_channels, out_channels, has_bias=has_bias) - - -class AlexNet(nn.Cell): - """ - Alexnet - """ - def __init__(self, num_classes=10, channel=3, phase='train', include_top=True): - super(AlexNet, self).__init__() - self.conv1 = conv(channel, 64, 11, stride=4, pad_mode="same", has_bias=True) - self.conv2 = conv(64, 128, 5, pad_mode="same", has_bias=True) - self.conv3 = conv(128, 192, 3, pad_mode="same", has_bias=True) - self.conv4 = conv(192, 256, 3, pad_mode="same", has_bias=True) - self.conv5 = conv(256, 256, 3, pad_mode="same", has_bias=True) - self.relu = ops.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=3, stride=2, pad_mode="valid") - self.include_top = include_top - if self.include_top: - dropout_ratio = 0.65 - if phase == 'test': - dropout_ratio = 1.0 - self.flatten = nn.Flatten() - self.fc1 = fc_with_initialize(6 * 6 * 256, 4096) - self.fc2 = fc_with_initialize(4096, 4096) - self.fc3 = fc_with_initialize(4096, num_classes) - self.dropout = nn.Dropout(dropout_ratio) - - def construct(self, x): - """define network""" - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv3(x) - x = self.relu(x) - x = self.conv4(x) - x = self.relu(x) - x = self.conv5(x) - x = self.relu(x) - x = self.max_pool2d(x) - if not self.include_top: - return x - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.dropout(x) - x = self.fc2(x) - x = self.relu(x) - x = self.dropout(x) - x = self.fc3(x) - return x -``` - -如果用户想查看脚本中第58行的代码: - -```python -x = self.conv3(x) -``` - -执行完训练网络后,可以从最终执行图(`ms_output_trace_code_graph_{graph_id}.ir`文件)中查找到该行代码所对应的多个算子信息,文件内容如下所示: - -```text - %24(equivoutput) = Conv2D(%23, %21) {instance name: conv2d} primitive_attrs: {compile_info: , pri_format: NC1HWC0, stride: (1, 1, 1, 1), pad: (0, 0, 0, 0), pad_mod: same, out_channel: -192, mode: 1, dilation: (1, 1, 1, 1), output_names: [output], group: 1, format: NCHW, offset_a: 0, kernel_size: (3, 3), groups: 1, input_names: [x, w], pad_list: (1, 1, 1, 1), -IsFeatureMapOutput: true, IsFeatureMapInputList: (0)} - : (, ) -> () - : (, ) -> () - : (Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/Conv2D-op107) - ... - # In file {Absolute path of model_zoo}/official/cv/alexnet/src/alexnet.py(58)/ x = self.conv3(x)/ - ... - %25(equivoutput) = BiasAdd(%24, %22) {instance name: bias_add} primitive_attrs: {output_used_num: (1), input_names: [x, b], format: NCHW, compile_info: , output_names: [output], -IsFeatureMapOutput: true, IsFeatureMapInputList: (0), pri_format: NC1HWC0} - : () -> () -> () - : () -> () -> () - : (Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/BiasAdd-op105) - ... - # In file {Absolute path of model_zoo}/official/cv/alexnet/src/alexnet.py(58)/ x = self.conv3(x)/ - ... -``` - -以上所示文件内容的各行所表示的含义如下: - -- 算子在Host侧(第一行)和Device侧(第二行,有些算子可能不存在)的输入输出情况。从执行图可知,该算子有两个输入(箭头左侧),一个输出(箭头右侧)。 - - ```text - : (, ) -> () - : (, ) -> () - ``` - -- 算子名称。从执行图可知,该算子在最终执行图中的完整名称为`Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/Conv2D-op107`。 - - ```text - : (Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/Conv2D-op107) - ``` - -- 算子对应的训练脚本代码。通过搜索要查询的训练脚本代码,可以找到多个匹配的算子。 - - ```text - # In file {Absolute path of model_zoo}/official/cv/alexnet/src/alexnet.py(58)/ x = self.conv3(x)/ - ``` - -通过算子名称和输入输出信息,可以查找到唯一对应的Tensor数据文件。比如,若要查看Conv2D-op107算子的第1个输出数据对应的Dump文件,可获取以下信息: - -- `operator_name`:`Default--network-WithLossCell--_backbone-AlexNet--conv3-Conv2d--Conv2D-op107`。基于图中序号2声明的算子名称,将其中的`/`替换为`--`可得。 - -- `input_output_index` :`output_0`表示该文件是该算子的第1个输出Tensor的数据。 - -在Dump保存的数据对象文件目录下搜索到相应的文件名: -`Default--network-WithLossCell--_backbone-AlexNet--conv3-Conv2d--Conv2D-op107_output_0_shape_32_12_13_13_16_Float16_NC1HWC0.bin`。 -从文件名中可以得知以下信息: - -- `shape`: 张量维度是`32_12_13_13_16`。 - -- `data_type`: 数据类型为`Float16`。 - -- `format`: 数据格式为`NC1HWC0`(可通过Dump配置文件修改要保存的数据格式)。 - -还原数据的时候,首先通过执行: - -```python -import numpy -numpy.fromfile("Default--network-WithLossCell--_backbone-AlexNet--conv3-Conv2d--Conv2D-op107_output_0_shape_32_12_13_13_16_Float16_NC1HWC0.bin", numpy.float16) -``` - -生成一维array数据,再通过执行: - -```python -import numpy -numpy.reshape(array, (32,12,13,13,16)) -``` - -还原到原始shape数据。 - -## 异步Dump - -大型网络(如Bert Large)使用同步Dump时会导致内存溢出,MindSpore通过异步Dump提供了大型网络的调试能力。 - -### 异步Dump操作步骤 - -1. 创建配置文件`data_dump.json`。 - - JSON文件的名称和位置可以自定义设置。 - - ```json - { - "common_dump_settings": { - "dump_mode": 0, - "path": "/absolute_path", - "net_name": "ResNet50", - "iteration": 0, - "input_output": 0, - "kernels": ["Default/Conv-op12"], - "support_device": [0,1,2,3,4,5,6,7] - }, - "async_dump_settings": { - "enable": true, - "op_debug_mode": 0 - } - } - ``` - - - `dump_mode`:设置成0,表示Dump出改网络中的所有算子;设置成1,表示Dump`"kernels"`里面指定的算子。 - - `path`:Dump保存数据的绝对路径。 - - `net_name`:自定义的网络名称,例如:"ResNet50"。 - - `iteration`:指定需要Dump的迭代。非数据下沉模式下,`iteration`需要设置成0,并且会Dump出每个迭代的数据。 - - `input_output`:设置成0,表示Dump出算子的输入和算子的输出;设置成1,表示Dump出算子的输入;设置成2,表示Dump出算子的输出。 - - `kernels`:算子的名称列表。开启IR保存开关`context.set_context(save_graphs=True)`并执行用例,从生成的`trace_code_graph_{graph_id}`IR文件中获取算子名称。`kernels`仅支持TBE算子、AiCPU算子、通信算子,若设置成通信算子的名称,将会Dump出通信算子的输入算子的数据。详细说明可以参照教程:[如何保存IR](https://www.mindspore.cn/doc/note/zh-CN/master/design/mindspore/mindir.html#ir)。 - - `support_device`:支持的设备,默认设置成0到7即可;在分布式训练场景下,需要dump个别设备上的数据,可以只在`support_device`中指定需要Dump的设备Id。 - - `enable`:开启异步Dump,如果同时开启同步Dump和异步Dump,那么只有同步Dump会生效。 - - `op_debug_mode`:该属性用于算子溢出调试,设置成0,表示不开启溢出;设置成1,表示开启AiCore溢出检测;设置成2,表示开启Atomic溢出检测;设置成3,表示开启全部溢出检测功能。在Dump数据的时候请设置成0,若设置成其他值,则只会Dump溢出算子的数据。 - -2. 设置数据Dump的环境变量。 - - ```bash - export MINDSPORE_DUMP_CONFIG={Absolute path of data_dump.json} - ``` - - - 在网络脚本执行前,设置好环境变量;网络脚本执行过程中设置将会不生效。 - - 在分布式场景下,Dump环境变量需要在调用`mindspore.communication.management.init`之前配置。 - -3. 执行用例Dump数据。 - - 可以在训练脚本中设置`context.set_context(reserve_class_name_in_scope=False)`,避免Dump文件名称过长导致Dump数据文件生成失败。 - -4. 参考[异步Dump数据分析样例](#id13)解析Dump数据文件。 - -注意: - -- 若需要dump全量或部分算子,则可以修改json配置文件中的`dump_mode`选项为0或1。 -- 若开启数据下沉功能(设置`model.train`或`DatasetHelper`中的`dataset_sink_mode`参数为`True`),只能dump出配置文件里指定的一个step的数据(此时`iteration 0`表示第0个step),并保存到指定目录下。 -- 若不开启数据下沉功能(设置`model.train`或`DatasetHelper`中的`dataset_sink_mode`参数为`False`),配置文档里`iteration`必须指定为0,所有step的数据都保存在一个目录中,无法支持多step的数据管理。此时建议只执行一次step的数据Dump(可以通过修改脚本只训练一个step)。 -- 使用Dump功能将自动生成最终执行图的IR文件。 - -### 异步Dump数据对象目录 - -异步Dump保存的数据对象包括了最终执行图(`ms_output_trace_code_graph_{graph_id}.ir`文件)以及图中算子的输入和输出数据,目录结构如下所示: - -```text -{path}/ - |-- {device_id}/ - |-- {new_name}_graph_{graph_id}/ - |-- {graph_id}/ - |-- {iteration}/ - |-- {op_type}.{op_name}.{task_id}.{timestamp} - … - |-- graphs/ - ms_output_trace_code_graph_{graph_id}.pb - ms_output_trace_code_graph_{graph_id}.ir - |-- execution_order/ - ms_execution_order_graph_{graph_id}.csv - - |-- .metadata/ - data_dump.json -``` - -- `path`:`data_dump.json`文件中设置的绝对路径。 -- `net_name`:`data_dump.json`文件中设置的网络名称。 -- `device_id`:训练的卡号。 -- `graph_id`:训练的图标号。 -- `iteration`:训练的轮次。 -- `op_type`:算子类型。 -- `op_name`:算子名称。 -- `taskid`:任务标号。 -- `timestamp`:时间戳。 - -### 异步Dump数据文件介绍 - -启动训练后,异步Dump生成的原始数据文件是protobuf格式的文件,需要用到海思Run包中自带的数据解析工具进行解析,详见[如何查看dump数据文件](https://support.huaweicloud.com/tg-Inference-cann/atlasaccuracy_16_0014.html) 。 - -数据在Device侧的格式可能和Host侧计算图中的定义不同,异步Dump的数据格式为Device侧格式,如果想要转为Host侧格式,可以参考[如何进行dump数据文件Format转换](https://support.huaweicloud.com/tg-Inference-cann/atlasaccuracy_16_0013.html) 。 - -异步Dump生成的数据文件命名规则如下: - -- Dump路径的命名规则为:`{path}/{device_id}/{net_name}_graph_{graph_id}/{graph_id}/{iteration}`。 -- Dump文件的命名规则为:`{op_type}.{op_name}.{task_id}.{timestamp}`。 - -以一个简单网络的Dump结果为例:`Add.Default_Add-op1.2.161243956333802`,其中`Add`是`{op_type}`,`Default_Add-op1`是`{op_name}`,`2`是`{task_id}`,`161243956333802`是`{timestamp}`。 - -如果`op_type`和`op_name`中出现了“.”、“/”、“\”、空格时,会转换为下划线表示。 - -异步Dump生成的最终执行图文件和节点执行序文件命名规则与同步Dump相同,可以参考[同步Dump数据文件介绍](#id7)。 - -### 异步Dump数据分析样例 - -通过异步Dump的功能,获取到算子异步Dump生成的数据文件。 - -1. 使用run包中提供的`msaccucmp.py`解析Dump出来的文件。不同的环境上`msaccucmp.py`文件所在的路径可能不同,可以通过`find`命令进行查找: - - ```bash - find ${run_path} -name "msaccucmp.py" - ``` - - - `run_path`:run包的安装路径。 - -2. 找到`msaccucmp.py`后,到`/absolute_path`目录下,运行如下命令解析Dump数据: - - ```bash - python ${The absolute path of msaccucmp.py} convert -d {file path of dump} -out {file path of output} - ``` - - 若需要转换数据格式,可参考使用说明链接 。 - - 如Dump生成的数据文件为: - - ```text - BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491 - ``` - - 则执行: - - ```bash - python3.7.5 msaccucmp.py convert -d BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491 -out ./output -f NCHW -t npy - ``` - - 则可以在`./output`下生成该算子的所有输入输出数据。每个数据以`.npy`后缀的文件保存,数据格式为`NCHW`。生成结果如下: - - ```text - BNTrainingUpdate. Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell _1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.0.30x1024x17x17.npy - BNTrainingUpdate. Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell _1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.1.1x1024x1x1.npy - BNTrainingUpdate. Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell _1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.2.1x1024x1x1.npy - BNTrainingUpdate. Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell _1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.3.1x1024x1x1.npy - BNTrainingUpdate. Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell _1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.4.1x1024x1x1.npy - BNTrainingUpdate. Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell _1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.5.1x1024x1x1.npy - BNTrainingUpdate. Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell _1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.6.1x1024x1x1.npy - BNTrainingUpdate. Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell _1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.output.0.30x1024x17x17.npy - BNTrainingUpdate. Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell _1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.output.1.1x1024x1x1.npy - BNTrainingUpdate. Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell _1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.output.2.1x1024x1x1.npy - BNTrainingUpdate. Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell _1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.output.3.1x1024x1x1.npy - BNTrainingUpdate. Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell _1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.output.4.1x1024x1x1.npy - ``` - - 在文件名的末尾可以看到该文件是算子的第几个输入或输出,以及数据的维度信息。例如,通过第一个`.npy`文件名 - - ```text - BNTrainingUpdate. Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell _1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.0.30x1024x17x17.npy - ``` - - 可知该文件是算子的第0个输入,数据的维度信息是`30x1024x17x17`。 - -3. 通过`numpy.load("file_name")`可以读取到对应数据。例: - - ```python - import numpy - numpy.load("BNTrainingUpdate.Default_network-YoloWithLossCell_yolo_network-YOLOV3DarkNet53_feature_map-YOLOv3_backblock0-YoloBlock_conv3-SequentialCell_1-BatchNorm2d_BNTrainingUpdate-op5489.137.1608983934774491.input.0.30x1024x17x17.npy") - ``` diff --git a/tutorials/training/source_zh_cn/advanced_use/enable_auto_augmentation.md b/tutorials/training/source_zh_cn/advanced_use/enable_auto_augmentation.md deleted file mode 100644 index 60f4861b7fa8ee9871af3bb31cd2cf480d1d7f60..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/enable_auto_augmentation.md +++ /dev/null @@ -1,245 +0,0 @@ -# 应用自动数据增强 - -`Linux` `Ascend` `GPU` `CPU` `数据准备` `中级` `高级` - - - -- [应用自动数据增强](#应用自动数据增强) - - [概述](#概述) - - [ImageNet自动数据增强](#imagenet自动数据增强) - - [参考文献](#参考文献) - - - - -   - -   - - -## 概述 - -自动数据增强(AutoAugment)[1]是在一系列图像增强子策略的搜索空间中,通过搜索算法找到适合特定数据集的图像增强方案。MindSpore的`c_transforms`模块提供了丰富的C++算子来实现AutoAugment,用户也可以自定义函数或者算子来实现。更多MindSpore算子的详细说明参见[API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.vision.html)。 - -MindSpore算子和AutoAugment中的算子的对应关系如下: - -| AutoAugment算子 | MindSpore算子 | 描述 | -| :------: | :------ | ------ | -| shearX | RandomAffine | 横向剪切 | -| shearY | RandomAffine | 纵向剪切 | -| translateX | RandomAffine | 水平平移 | -| translateY | RandomAffine | 垂直平移 | -| rotate | RandomRotation | 旋转变换 | -| color | RandomColor | 颜色变换 | -| posterize | RandomPosterize | 减少颜色通道位数 | -| solarize | RandomSolarize | 指定的阈值范围内,反转所有的像素点 | -| contrast | RandomColorAdjust | 调整对比度 | -| sharpness | RandomSharpness | 调整锐度 | -| brightness | RandomColorAdjust | 调整亮度 | -| autocontrast | AutoContrast | 最大化图像对比度 | -| equalize | Equalize | 均衡图像直方图 | -| invert | Invert | 反转图像 | - -## ImageNet自动数据增强 - -本教程以在ImageNet数据集上实现AutoAugment作为示例。 - -针对ImageNet数据集的数据增强策略包含25条子策略,每条子策略中包含两种变换,针对一个batch中的每张图像随机挑选一个子策略的组合,以预定的概率来决定是否执行子策略中的每种变换。 - -用户可以使用MindSpore中`c_transforms`模块的`RandomSelectSubpolicy`接口来实现AutoAugment,在ImageNet分类训练中标准的数据增强方式分以下几个步骤: - -- `RandomCropDecodeResize`:随机裁剪后进行解码。 - -- `RandomHorizontalFlip`:水平方向上随机翻转。 - -- `Normalize`:归一化。 - -- `HWC2CHW`:图片通道变化。 - -在`RandomCropDecodeResize`后插入AutoAugment变换,如下所示: - -1. 引入MindSpore数据增强模块。 - - ```python - import matplotlib.pyplot as plt - - import mindspore.dataset as ds - import mindspore.dataset.transforms.c_transforms as c_transforms - import mindspore.dataset.vision.c_transforms as c_vision - from mindspore import dtype as mstype - ``` - -2. 定义MindSpore算子到AutoAugment算子的映射: - - ```python - # define Auto Augmentation operators - PARAMETER_MAX = 10 - - def float_parameter(level, maxval): - return float(level) * maxval / PARAMETER_MAX - - def int_parameter(level, maxval): - return int(level * maxval / PARAMETER_MAX) - - def shear_x(level): - v = float_parameter(level, 0.3) - return c_transforms.RandomChoice([c_vision.RandomAffine(degrees=0, shear=(-v,-v)), c_vision.RandomAffine(degrees=0, shear=(v, v))]) - - def shear_y(level): - v = float_parameter(level, 0.3) - return c_transforms.RandomChoice([c_vision.RandomAffine(degrees=0, shear=(0, 0, -v,-v)), c_vision.RandomAffine(degrees=0, shear=(0, 0, v, v))]) - - def translate_x(level): - v = float_parameter(level, 150 / 331) - return c_transforms.RandomChoice([c_vision.RandomAffine(degrees=0, translate=(-v,-v)), c_vision.RandomAffine(degrees=0, translate=(v, v))]) - - def translate_y(level): - v = float_parameter(level, 150 / 331) - return c_transforms.RandomChoice([c_vision.RandomAffine(degrees=0, translate=(0, 0, -v,-v)), c_vision.RandomAffine(degrees=0, translate=(0, 0, v, v))]) - - def color_impl(level): - v = float_parameter(level, 1.8) + 0.1 - return c_vision.RandomColor(degrees=(v, v)) - - def rotate_impl(level): - v = int_parameter(level, 30) - return c_transforms.RandomChoice([c_vision.RandomRotation(degrees=(-v, -v)), c_vision.RandomRotation(degrees=(v, v))]) - - def solarize_impl(level): - level = int_parameter(level, 256) - v = 256 - level - return c_vision.RandomSolarize(threshold=(0, v)) - - def posterize_impl(level): - level = int_parameter(level, 4) - v = 4 - level - return c_vision.RandomPosterize(bits=(v, v)) - - def contrast_impl(level): - v = float_parameter(level, 1.8) + 0.1 - return c_vision.RandomColorAdjust(contrast=(v, v)) - - def autocontrast_impl(level): - return c_vision.AutoContrast() - - def sharpness_impl(level): - v = float_parameter(level, 1.8) + 0.1 - return c_vision.RandomSharpness(degrees=(v, v)) - - def brightness_impl(level): - v = float_parameter(level, 1.8) + 0.1 - return c_vision.RandomColorAdjust(brightness=(v, v)) - ``` - -3. 定义ImageNet数据集的AutoAugment策略: - - ```python - # define the Auto Augmentation policy - imagenet_policy = [ - [(posterize_impl(8), 0.4), (rotate_impl(9), 0.6)], - [(solarize_impl(5), 0.6), (autocontrast_impl(5), 0.6)], - [(c_vision.Equalize(), 0.8), (c_vision.Equalize(), 0.6)], - [(posterize_impl(7), 0.6), (posterize_impl(6), 0.6)], - [(c_vision.Equalize(), 0.4), (solarize_impl(4), 0.2)], - - [(c_vision.Equalize(), 0.4), (rotate_impl(8), 0.8)], - [(solarize_impl(3), 0.6), (c_vision.Equalize(), 0.6)], - [(posterize_impl(5), 0.8), (c_vision.Equalize(), 1.0)], - [(rotate_impl(3), 0.2), (solarize_impl(8), 0.6)], - [(c_vision.Equalize(), 0.6), (posterize_impl(6), 0.4)], - - [(rotate_impl(8), 0.8), (color_impl(0), 0.4)], - [(rotate_impl(9), 0.4), (c_vision.Equalize(), 0.6)], - [(c_vision.Equalize(), 0.0), (c_vision.Equalize(), 0.8)], - [(c_vision.Invert(), 0.6), (c_vision.Equalize(), 1.0)], - [(color_impl(4), 0.6), (contrast_impl(8), 1.0)], - - [(rotate_impl(8), 0.8), (color_impl(2), 1.0)], - [(color_impl(8), 0.8), (solarize_impl(7), 0.8)], - [(sharpness_impl(7), 0.4), (c_vision.Invert(), 0.6)], - [(shear_x(5), 0.6), (c_vision.Equalize(), 1.0)], - [(color_impl(0), 0.4), (c_vision.Equalize(), 0.6)], - - [(c_vision.Equalize(), 0.4), (solarize_impl(4), 0.2)], - [(solarize_impl(5), 0.6), (autocontrast_impl(5), 0.6)], - [(c_vision.Invert(), 0.6), (c_vision.Equalize(), 1.0)], - [(color_impl(4), 0.6), (contrast_impl(8), 1.0)], - [(c_vision.Equalize(), 0.8), (c_vision.Equalize(), 0.6)], - ] - ``` - -4. 在`RandomCropDecodeResize`操作后插入AutoAugment变换。 - - ```python - def create_dataset(dataset_path, do_train, repeat_num=1, batch_size=32, shuffle=True, num_samples=5, target="Ascend"): - # create a train or eval imagenet2012 dataset for ResNet-50 - dataset = ds.ImageFolderDataset(dataset_path, num_parallel_workers=8, - shuffle=shuffle, num_samples=num_samples) - - image_size = 224 - mean = [0.485 * 255, 0.456 * 255, 0.406 * 255] - std = [0.229 * 255, 0.224 * 255, 0.225 * 255] - - # define map operations - if do_train: - trans = [ - c_vision.RandomCropDecodeResize(image_size, scale=(0.08, 1.0), ratio=(0.75, 1.333)), - ] - - post_trans = [ - c_vision.RandomHorizontalFlip(prob=0.5), - ] - else: - trans = [ - c_vision.Decode(), - c_vision.Resize(256), - c_vision.CenterCrop(image_size), - c_vision.Normalize(mean=mean, std=std), - c_vision.HWC2CHW() - ] - dataset = dataset.map(operations=trans, input_columns="image") - if do_train: - dataset = dataset.map(operations=c_vision.RandomSelectSubpolicy(imagenet_policy), input_columns=["image"]) - dataset = dataset.map(operations=post_trans, input_columns="image") - type_cast_op = c_transforms.TypeCast(mstype.int32) - dataset = dataset.map(operations=type_cast_op, input_columns="label") - # apply the batch operation - dataset = dataset.batch(batch_size, drop_remainder=True) - # apply the repeat operation - dataset = dataset.repeat(repeat_num) - - return dataset - ``` - -5. 验证自动数据增强效果。 - - ```python - # Define the path to image folder directory. This directory needs to contain sub-directories which contain the images - DATA_DIR = "/path/to/image_folder_directory" - dataset = create_dataset(dataset_path=DATA_DIR, do_train=True, batch_size=5, shuffle=False, num_samples=5) - - epochs = 5 - itr = dataset.create_dict_iterator() - fig=plt.figure(figsize=(8, 8)) - columns = 5 - rows = 5 - - step_num = 0 - for ep_num in range(epochs): - for data in itr: - step_num += 1 - for index in range(rows): - fig.add_subplot(rows, columns, ep_num * rows + index + 1) - plt.imshow(data['image'].asnumpy()[index]) - plt.show() - ``` - - > 为了更好地演示效果,此处只加载5张图片,且读取时不进行`shuffle`操作,自动数据增强时也不进行`Normalize`和`HWC2CHW`操作。 - - ![augment](./images/auto_augmentation.png) - - 运行结果可以看到,batch中每张图像的增强效果,水平方向表示1个batch的5张图像,垂直方向表示5个batch。 - -## 参考文献 - -[1] [AutoAugment: Learning Augmentation Policies from Data](https://arxiv.org/abs/1805.09501). diff --git a/tutorials/training/source_zh_cn/advanced_use/enable_cache.md b/tutorials/training/source_zh_cn/advanced_use/enable_cache.md deleted file mode 100644 index 2015c99dc7943bf5b40bcc9f555b6d818e505f33..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/enable_cache.md +++ /dev/null @@ -1,152 +0,0 @@ -# 应用单节点数据缓存 - -`Linux` `Ascend` `GPU` `CPU` `数据准备` `中级` `高级` - - - -- [应用单节点数据缓存](#应用单节点数据缓存) - - [概述](#概述) - - [配置环境](#配置环境) - - [启动缓存服务器](#启动缓存服务器) - - [创建缓存会话](#创建缓存会话) - - [创建缓存实例](#创建缓存实例) - - [插入缓存实例](#插入缓存实例) - - [销毁缓存会话](#销毁缓存会话) - - [关闭缓存服务器](#关闭缓存服务器) - - - -   -   - - -## 概述 - -对于需要重复访问远程的数据集或需要重复从磁盘中读取数据集的情况,可以使用单节点缓存算子将数据集缓存于本地内存中,以加速数据集的读取。 - -下面,本教程将演示如何使用单节点缓存服务来缓存经过数据增强处理的数据。 - -## 配置环境 - -使用缓存服务前,需要安装MindSpore,并设置相关环境变量。以Conda环境为例,设置方法如下: - -```shell -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{path_to_conda}/envs/{your_env_name}/lib/python3.7/site-packages/mindspore:{path_to_conda}/envs/{your_env_name}/lib/python3.7/site-packages/mindspore/lib -export PATH=$PATH:{path_to_conda}/envs/{your_env_name}/bin -``` - -## 启动缓存服务器 - -在使用单节点缓存服务之前,首先需要启动缓存服务器: - -```shell -$ cache_admin --start -Cache server startup completed successfully! -The cache server daemon has been created as process id 10394 and is listening on port 50052 - -Recommendation: -Since the server is detached into its own daemon process, monitor the server logs (under /tmp/mindspore/cache/log) for any issues that may happen after startup -``` - -若提示找不到`libpython3.7m.so.1.0`文件,尝试在虚拟环境下查找其路径并设置环境变量: - -```shell -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{path_to_conda}/envs/{your_env_name}/lib -``` - -## 创建缓存会话 - -若缓存服务器中不存在缓存会话,则需要创建一个缓存会话,得到缓存会话id: - -```shell -$ cache_admin -g -Session created for server on port 50052: 1493732251 -``` - -缓存会话id由服务器随机分配。 - -## 创建缓存实例 - -创建Python脚本`my_training_script.py`,在脚本中使用`DatasetCache` API来定义一个名为`some_cache`的缓存实例,并把上一步中创建的缓存会话id传入`session_id`参数: - -```python -import mindspore.dataset as ds - -some_cache = ds.DatasetCache(session_id=1493732251, size=0, spilling=False) -``` - -## 插入缓存实例 - -下面样例中使用到CIFAR-10数据集。运行样例前,需要参照[数据集加载](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_loading.html#cifar-10-100)中的方法下载并存放CIFAR-10数据集。目录结构如下: - -```text -├─my_training_script.py -└─cifar-10-batches-bin - ├── batches.meta.txt - ├── data_batch_1.bin - ├── data_batch_2.bin - ├── data_batch_3.bin - ├── data_batch_4.bin - ├── data_batch_5.bin - ├── readme.html - └── test_batch.bin -``` - -继续编写Python脚本,在应用数据增强算子时将所创建的`some_cache`作为其`cache`参数传入: - -```python -import mindspore.dataset.vision.c_transforms as c_vision - -dataset_dir = "cifar-10-batches-bin/" -data = ds.Cifar10Dataset(dataset_dir=dataset_dir, num_samples=5, shuffle=False, num_parallel_workers=1) - -# apply cache to map -rescale_op = c_vision.Rescale(1.0 / 255.0, -1.0) -data = data.map(input_columns=["image"], operations=rescale_op, cache=some_cache) - -num_iter = 0 -for item in data.create_dict_iterator(num_epochs=1): # each data is a dictionary - # in this example, each dictionary has a key "image" - print("{} image shape: {}".format(num_iter, item["image"].shape)) - num_iter += 1 -``` - -运行Python脚本`my_training_script.py`,得到输出结果: - -```text -0 image shape: (32, 32, 3) -1 image shape: (32, 32, 3) -2 image shape: (32, 32, 3) -3 image shape: (32, 32, 3) -4 image shape: (32, 32, 3) -``` - -通过`cache_admin --list_sessions`命令可以查看当前会话有五条数据,说明数据缓存成功。 - -```shell -$ cache_admin --list_sessions -Listing sessions for server on port 50052 - - Session Cache Id Mem cached Disk cached Avg cache size Numa hit - 1493732251 3618046178 5 n/a 12442 5 -``` - -## 销毁缓存会话 - -在训练结束后,可以选择将当前的缓存销毁并释放内存: - -```shell -$ cache_admin --destroy_session 1493732251 -Drop session successfully for server on port 50052 -``` - -以上命令将销毁缓存会话id为1493732251的缓存。 - -## 关闭缓存服务器 - -使用完毕后,可以选择关闭缓存服务器,该操作将销毁当前服务器中存在的所有缓存会话并释放内存。 - -```shell -$ cache_admin --stop -Cache server on port 50052 has been stopped successfully. -``` diff --git a/tutorials/training/source_zh_cn/advanced_use/enable_graph_kernel_fusion.md b/tutorials/training/source_zh_cn/advanced_use/enable_graph_kernel_fusion.md deleted file mode 100644 index 8fab718ed6ba45a4a26615e0e861fcc24b9218c0..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/enable_graph_kernel_fusion.md +++ /dev/null @@ -1,156 +0,0 @@ -# 使能图算融合 - -`Linux` `Ascend` `GPU` `模型调优` `中级` `高级` - - - -- [使能图算融合](#使能图算融合) - - [概述](#概述) - - [使用方法](#使用方法) - - [样例脚本](#样例脚本) - - [自定义组合算子](#自定义组合算子) - - [样例脚本](#样例脚本-1) - - - - - -## 概述 - -图算融合是MindSpore特有的网络性能优化技术。它可以通过自动分析和优化现有网络计算图逻辑,并结合目标硬件能力,对计算图进行计算化简和替代、算子拆分和融合、算子特例化编译等优化,以提升设备计算资源利用率,实现对网络性能的整体优化。相比传统优化技术,图算融合具有多算子跨边界联合优化、与算子编译跨层协同、基于Polyhedral的算子即时编译等独特优势。另外,图算融合只需要用户打开对应配置后,整个优化过程即可自动完成,不需要网络开发人员进行其它额外感知,使得用户可以聚焦网络算法实现。 - -图算融合的适用场景包括: - -- 对网络执行时间具有较高性能要求的场景; -- 通过拼接基本算子实现自定义组合算子,并希望对这些基本算子进行自动融合,以提升自定义组合算子性能的场景。 - -## 使用方法 - -当前图算融合优化默认关闭状态,我们只需在训练脚本中为`context`指定参数`enable_graph_kernel=True`即可启用图算融合: - -```python -from mindspore import context -context.set_context(enable_graph_kernel=True) -``` - -> 图算融合优化只支持Graph模式。 - -### 样例脚本 - -为了说明图算融合优化场景,我们构造了一个简单网络`MyNet`, 包含一个乘法和加法计算。在打开图算融合进行优化之后,这两个计算便会自动合成一个融合算子: - -```python -import numpy as np -import mindspore.context as context -from mindspore import Tensor -from mindspore.nn import Cell -import mindspore.ops as ops - -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") -# save graph ir to view fusion detail. -context.set_context(save_graphs=True) -# enable graph kernel optimization. -context.set_context(enable_graph_kernel=True) - -class MyNet(Cell): - def __init__(self): - super(MyNet, self).__init__() - self.add = ops.Add() - self.mul = ops.Mul() - - def construct(self, x): - a = self.mul(x, 2.0) - res = self.add(a, 1.0) - return res - -x = np.ones((4, 4)).astype(np.float32) * 0.5 -net = MyNet() -result = net(Tensor(x)) -print("result: {}".format(result)) -``` - -输出结果: - -```text -result: [[2. 2. 2. 2.] - [2. 2. 2. 2.] - [2. 2. 2. 2.] - [2. 2. 2. 2.]] -``` - -该计算图的融合结果如图1所示,其中左图为未使能图算融合时的对应计算图,右图为使能图算融合后的对应计算图。可以看到该网络中的加法和乘法被融合成一个算子。该融合过程可以通过查看中间IR,或者通过Profiling等工具跟踪算子执行过程进行验证。 - -![基本算子融合示例](images/graph_kernel_example_fuse_basic.png) - -图1:图算融合优化计算图 - -## 自定义组合算子 - -基于图算融合技术,用户可以很方便地实现高性能的自定义组合算子。其主要流程为: - -1. 在脚本中用基本算子组合的方式实现自定义算子定义和使用; -2. 打开图算融合配置; -3. 图算融合对自定义组合算子中的基本算子自动进行算子融合,并生成高性能融合算子。 - -相比其它自定义算子方式,这种方式具有对框架无侵入、简单易用等优点。 - -### 样例脚本 - -我们构造一个简单网络`MyNet`,并在其中使用了自定义算子`MyOp`。代码样例如下: - -```python -import numpy as np -import mindspore.context as context -from mindspore import Tensor -from mindspore.nn import Cell -import mindspore.ops as ops - -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") -# enable graph kernel optimization. -context.set_context(enable_graph_kernel=True) - -class MyOp(Cell): - """ my first custom OP composited by basic OPs """ - def __init__(self): - super(MyOp, self).__init__() - self.sub = ops.Sub() - self.mul = ops.Mul() - - def construct(self, x, y): - a = self.sub(x, y) - return self.mul(a, x) - -class MyNet(Cell): - def __init__(self): - super(MyNet, self).__init__() - self.mul = ops.Mul() - self.pow = ops.Pow() - self.my_op = MyOp() - - def construct(self, x, y): - a = self.mul(x, 2.0) - b = self.pow(a, 3.0) - res = self.my_op(b, y) - return res - -x = np.ones((4, 4)).astype(np.float32) * 0.2 -y = np.ones((4, 4)).astype(np.float32) * 0.3 -net = MyNet() -result = net(Tensor(x), Tensor(y)) -print("result: {}".format(result)) -``` - -输出结果: - -```text -result: [[-0.015104 -0.015104 -0.015104 -0.015104] - [-0.015104 -0.015104 -0.015104 -0.015104] - [-0.015104 -0.015104 -0.015104 -0.015104] - [-0.015104 -0.015104 -0.015104 -0.015104]] -``` - -该计算图的融合结果如图2所示,其中左图为未使能图算融合时的对应计算图,右图为使能图算融合后的对应计算图。可以看到不仅自定义算子`MyOp`中的基本算子进行了融合,并且与主图中的其他算子也进行了更大范围融合。该融合过程可以通过查看中间IR,或者通过Profiling等工具跟踪算子执行过程进行验证。 - -![自定义组合算子融合示例](images/graph_kernel_example_custom_op.png) - -图2:自定义组合算子优化计算图 diff --git a/tutorials/training/source_zh_cn/advanced_use/enable_mixed_precision.md b/tutorials/training/source_zh_cn/advanced_use/enable_mixed_precision.md deleted file mode 100644 index 643e13acf4819889ccd5f29abb588d2c798124cd..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/enable_mixed_precision.md +++ /dev/null @@ -1,233 +0,0 @@ -# 使能自动混合精度 - -`Linux` `Ascend` `GPU` `模型训练` `中级` `高级` - - - -- [使能自动混合精度](#使能自动混合精度) - - [概述](#概述) - - [计算流程](#计算流程) - - [自动混合精度](#自动混合精度) - - [手动混合精度](#手动混合精度) - - [约束](#约束) - - - -   -   - - -## 概述 - -混合精度训练方法是通过混合使用单精度和半精度数据格式来加速深度神经网络训练的过程,同时保持了单精度训练所能达到的网络精度。混合精度训练能够加速计算过程,同时减少内存使用和存取,并使得在特定的硬件上可以训练更大的模型或`batch size`。 - -对于FP16的算子,若给定的数据类型是FP32,MindSpore框架的后端会进行降精度处理。用户可以开启INFO日志,并通过搜索关键字“Reduce precision”查看降精度处理的算子。 - -## 计算流程 - -MindSpore混合精度典型的计算流程如下图所示: - -![mix precision](./images/mix_precision.PNG) - -1. 参数以FP32存储; -2. 正向计算过程中,遇到FP16算子,需要把算子输入和参数从FP32 cast成FP16进行计算; -3. 将Loss层设置为FP32进行计算; -4. 反向计算过程中,首先乘以Loss Scale值,避免反向梯度过小而产生下溢; -5. FP16参数参与梯度计算,其结果将被cast回FP32; -6. 除以Loss scale值,还原被放大的梯度; -7. 判断梯度是否存在溢出,如果溢出则跳过更新,否则优化器以FP32对原始参数进行更新。 - -本文通过自动混合精度和手动混合精度的样例来讲解计算流程。 - -## 自动混合精度 - -使用自动混合精度,需要调用相应的接口,将待训练网络和优化器作为输入传进去;该接口会将整张网络的算子转换成FP16算子(除`BatchNorm`算子和Loss涉及到的算子外)。可以使用`amp`接口和`Model`接口两种方式实现混合精度。 - -使用`amp`接口具体的实现步骤为: - -1. 引入MindSpore的混合精度的接口`amp`; - -2. 定义网络:该步骤和普通的网络定义没有区别(无需手动配置某个算子的精度); - -3. 使用`amp.build_train_network`接口封装网络模型、优化器和损失函数,设置level参数,参考。在该步骤中,MindSpore会将有需要的算子自动进行类型转换。 - -代码样例如下: - -```python -import numpy as np - -import mindspore.nn as nn -from mindspore import Tensor, context -import mindspore.ops as ops -from mindspore.nn import Momentum -# The interface of Auto_mixed precision -from mindspore import amp - -context.set_context(mode=context.GRAPH_MODE) -context.set_context(device_target="Ascend") - -# Define network -class Net(nn.Cell): - def __init__(self, input_channel, out_channel): - super(Net, self).__init__() - self.dense = nn.Dense(input_channel, out_channel) - self.relu = ops.ReLU() - - def construct(self, x): - x = self.dense(x) - x = self.relu(x) - return x - - -# Initialize network -net = Net(512, 128) - -# Define training data, label -predict = Tensor(np.ones([64, 512]).astype(np.float32) * 0.01) -label = Tensor(np.zeros([64, 128]).astype(np.float32)) - -# Define Loss and Optimizer -loss = nn.SoftmaxCrossEntropyWithLogits() -optimizer = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) -train_network = amp.build_train_network(net, optimizer, loss, level="O3", loss_scale_manager=None) - -# Run training -output = train_network(predict, label) -``` - -使用`Model`接口具体的实现步骤为: - -1. 引入MindSpore的模型训练接口`Model`; - -2. 定义网络:该步骤和普通的网络定义没有区别(无需手动配置某个算子的精度); - -3. 创建数据集。该步骤可参考 ; - -4. 使用`Model`接口封装网络模型、优化器和损失函数,设置`amp_level`参数,参考。在该步骤中,MindSpore会将有需要的算子自动进行类型转换。 - -代码样例如下: - -```python -import numpy as np -import mindspore.nn as nn -from mindspore.nn.metrics import Accuracy -from mindspore import context, Model -from mindspore.common.initializer import Normal -from src.dataset import create_dataset - -context.set_context(mode=context.GRAPH_MODE) -context.set_context(device_target="Ascend") - -# Define network -class LeNet5(nn.Cell): - """ - Lenet network - - Args: - num_class (int): Number of classes. Default: 10. - num_channel (int): Number of channels. Default: 1. - - Returns: - Tensor, output tensor - Examples: - >>> LeNet(num_class=10) - - """ - def __init__(self, num_class=10, num_channel=1): - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02)) - self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02)) - self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02)) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x - -# create dataset -ds_train = create_dataset("/dataset/MNIST/train", 32) - -# Initialize network -network = LeNet5(10) - -# Define Loss and Optimizer -net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") -net_opt = nn.Momentum(network.trainable_params(),learning_rate=0.01, momentum=0.9) -model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()}, amp_level="O3") - -# Run training -model.train(epoch=10, train_dataset=ds_train) -``` - -## 手动混合精度 - -MindSpore还支持手动混合精度。假定在网络中只有一个Dense Layer要用FP32计算,其他Layer都用FP16计算。混合精度配置以Cell为粒度,Cell默认是FP32类型。 - -以下是一个手动混合精度的实现步骤: - -1. 定义网络:该步骤与自动混合精度中的步骤2类似; - -2. 配置混合精度:通过`net.to_float(mstype.float16)`,把该Cell及其子Cell中所有的算子都配置成FP16;然后,将模型中的dense算子手动配置成FP32; - -3. 使用TrainOneStepCell封装网络模型和优化器。 - -代码样例如下: - -```python -import numpy as np - -import mindspore.nn as nn -from mindspore import dtype as mstype -from mindspore import Tensor, context -import mindspore.ops as ops -from mindspore.nn import WithLossCell, TrainOneStepCell -from mindspore.nn import Momentum - -context.set_context(mode=context.GRAPH_MODE) -context.set_context(device_target="Ascend") - -# Define network -class Net(nn.Cell): - def __init__(self, input_channel, out_channel): - super(Net, self).__init__() - self.dense = nn.Dense(input_channel, out_channel) - self.relu = ops.ReLU() - - def construct(self, x): - x = self.dense(x) - x = self.relu(x) - return x - -# Initialize network -net = Net(512, 128) -# Set mixing precision -net.to_float(mstype.float16) -net.dense.to_float(mstype.float32) - -# Define training data, label -predict = Tensor(np.ones([64, 512]).astype(np.float32) * 0.01) -label = Tensor(np.zeros([64, 128]).astype(np.float32)) - -# Define Loss and Optimizer -loss = nn.SoftmaxCrossEntropyWithLogits() -optimizer = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) -net_with_loss = WithLossCell(net, loss) -train_network = TrainOneStepCell(net_with_loss, optimizer) -train_network.set_train() - -# Run training -output = train_network(predict, label) -``` - -## 约束 - -使用混合精度时,只能由自动微分功能生成反向网络,不能由用户自定义生成反向网络,否则可能会导致MindSpore产生数据格式不匹配的异常信息。 diff --git a/tutorials/training/source_zh_cn/advanced_use/evaluate_the_model_during_training.md b/tutorials/training/source_zh_cn/advanced_use/evaluate_the_model_during_training.md deleted file mode 100644 index 12623e7ea9384163e4a49e04642e0560ab12d195..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/evaluate_the_model_during_training.md +++ /dev/null @@ -1,177 +0,0 @@ -# 训练时验证模型 - -`Linux` `Ascend` `GPU` `CPU` `模型导出` `模型训练` `初级` `中级` `高级` - - - -- [同步训练和验证模型](#同步训练和验证模型) - - [概述](#概述) - - [定义回调函数EvalCallBack](#定义回调函数evalcallback) - - [定义训练网络并执行](#定义训练网络并执行) - - [定义函数绘制不同epoch下模型的精度](#定义函数绘制不同epoch下模型的精度) - - [总结](#总结) - - - - -   - -   - - -## 概述 - -在面对复杂网络时,往往需要进行几十甚至几百次的epoch训练。在训练之前,很难掌握在训练到第几个epoch时,模型的精度能达到满足要求的程度,所以经常会采用一边训练的同时,在相隔固定epoch的位置对模型进行精度验证,并保存相应的模型,等训练完毕后,通过查看对应模型精度的变化就能迅速地挑选出相对最优的模型,本文将采用这种方法,以LeNet网络为样本,进行示例。 - -流程如下: - -1. 定义回调函数EvalCallBack,实现同步进行训练和验证。 -2. 定义训练网络并执行。 -3. 将不同epoch下的模型精度绘制出折线图并挑选最优模型。 - -完整示例请参源代码:。 - -## 定义回调函数EvalCallBack - -实现思想:每隔n个epoch验证一次模型精度,需要在自定义回调函数中实现,如需了解详细用法,请参考[API说明](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.train.html#mindspore.train.callback.Callback); - -核心实现:回调函数的`epoch_end`内设置验证点,如下: - -`cur_epoch % eval_per_epoch == 0`:即每`eval_per_epoch`个epoch结束时,验证一次模型精度。 - -- `cur_epoch`:当前训练过程的`epoch`数值。 -- `eval_per_epoch`:用户自定义数值,即验证频次。 - -其他参数解释: - -- `model`:MindSpore中的`Model`类。 -- `eval_dataset`:验证数据集。 -- `epoch_per_eval`:记录验证模型的精度和相应的epoch数,其数据形式为`{"epoch": [], "acc": []}`。 - -```python -from mindspore.train.callback import Callback - -class EvalCallBack(Callback): - def __init__(self, model, eval_dataset, eval_per_epoch, epoch_per_eval): - self.model = model - self.eval_dataset = eval_dataset - self.eval_per_epoch = eval_per_epoch - self.epoch_per_eval = epoch_per_eval - - def epoch_end(self, run_context): - cb_param = run_context.original_args() - cur_epoch = cb_param.cur_epoch_num - if cur_epoch % self.eval_per_epoch == 0: - acc = self.model.eval(self.eval_dataset, dataset_sink_mode=False) - self.epoch_per_eval["epoch"].append(cur_epoch) - self.epoch_per_eval["acc"].append(acc["Accuracy"]) - print(acc) - -``` - -## 定义训练网络并执行 - -在保存模型的参数`CheckpointConfig`中,需计算好单个`epoch`中的`step`数,根据保存模型参数`ckpt`文件,需要间隔的step数来设置,本次示例每个epoch有1875个step,按照每两个epoch验证一次的思想,这里设置`save_checkpoint_steps=eval_per_epoch*1875`,其中变量`eval_per_epoch`等于2。 - -参数解释: - -- `config_ck`:配置保存模型信息。 - - `save_checkpoint_steps`:每多少个step保存一次模型的权重参数`ckpt`文件。 - - `keep_checkpoint_max`:设置保存模型的权重参数`ckpt`文件的数量上限。 -- `ckpoint_cb`:配置模型权重参数`ckpt`文件保存名称的前缀信息及保存路径信息。 -- `model`:MindSpore中的`Model`类。 -- `model.train`:`Model`类的执行训练函数。 -- `epoch_per_eval`:定义收集`epoch`数和对应模型精度信息的字典。 -- `train_data`:训练数据集。 -- `eval_data`:验证数据集。 - -```python -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor -from mindspore import context, Model -from mindspore.nn.metrics import Accuracy - -if __name__ == "__main__": - context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - ckpt_save_dir = "./lenet_ckpt" - eval_per_epoch = 2 - epoch_size = 10 - ... ... - - # need to calculate how many steps are in each epoch,in this example, 1875 steps per epoch - config_ck = CheckpointConfig(save_checkpoint_steps=eval_per_epoch*1875, keep_checkpoint_max=15) - ckpoint_cb = ModelCheckpoint(prefix="checkpoint_lenet",directory=ckpt_save_dir, config=config_ck) - model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()}) - - epoch_per_eval = {"epoch": [], "acc": []} - eval_cb = EvalCallBack(model, eval_data, eval_per_epoch, epoch_per_eval) - - model.train(epoch_size, train_data, callbacks=[ckpoint_cb, LossMonitor(375), eval_cb], - dataset_sink_mode=False) -``` - -输出结果: - -```text -epoch: 1 step: 375, loss is 2.298612 -epoch: 1 step: 750, loss is 2.075152 -epoch: 1 step: 1125, loss is 0.39205977 -epoch: 1 step: 1500, loss is 0.12368304 -epoch: 1 step: 1875, loss is 0.20988345 -epoch: 2 step: 375, loss is 0.20582482 -epoch: 2 step: 750, loss is 0.029070046 -epoch: 2 step: 1125, loss is 0.041760832 -epoch: 2 step: 1500, loss is 0.067035824 -epoch: 2 step: 1875, loss is 0.0050643035 -{'Accuracy': 0.9763621794871795} -... ... -epoch: 9 step: 375, loss is 0.021227183 -epoch: 9 step: 750, loss is 0.005586236 -epoch: 9 step: 1125, loss is 0.029125651 -epoch: 9 step: 1500, loss is 0.00045874066 -epoch: 9 step: 1875, loss is 0.023556218 -epoch: 10 step: 375, loss is 0.0005807788 -epoch: 10 step: 750, loss is 0.02574059 -epoch: 10 step: 1125, loss is 0.108463734 -epoch: 10 step: 1500, loss is 0.01950589 -epoch: 10 step: 1875, loss is 0.10563098 -{'Accuracy': 0.979667467948718} -``` - -在同一目录找到`lenet_ckpt`文件夹,文件夹中保存了5个模型的权重参数`ckpt`文件,和一个计算图相关数据`meta`文件,其结构如下: - -```text -lenet_ckpt -├── checkpoint_lenet-10_1875.ckpt -├── checkpoint_lenet-2_1875.ckpt -├── checkpoint_lenet-4_1875.ckpt -├── checkpoint_lenet-6_1875.ckpt -├── checkpoint_lenet-8_1875.ckpt -└── checkpoint_lenet-graph.meta -``` - -## 定义函数绘制不同epoch下模型的精度 - -定义绘图函数`eval_show`,将`epoch_per_eval`载入到`eval_show`中,绘制出不同`epoch`下模型的验证精度折线图。 - -```python -import matplotlib.pyplot as plt - -def eval_show(epoch_per_eval): - plt.xlabel("epoch number") - plt.ylabel("Model accuracy") - plt.title("Model accuracy variation chart") - plt.plot(epoch_per_eval["epoch"], epoch_per_eval["acc"], "red") - plt.show() - -eval_show(epoch_per_eval) -``` - -输出结果: - -![png](./images/evaluate_the_model_during_training.png) - -从上图可以一目了然地挑选出需要的最优模型权重参数`ckpt`文件。 - -## 总结 - -本次使用MNIST数据集通过卷积神经网络LeNet5进行训练,着重介绍了在进行模型训练的同时进行模型的验证,保存对应`epoch`的模型权重参数`ckpt`文件,并从中挑选出最优模型的方法。 diff --git a/tutorials/training/source_zh_cn/advanced_use/hpc.rst b/tutorials/training/source_zh_cn/advanced_use/hpc.rst deleted file mode 100644 index 39116f0b4b84367060515a843e3deb93f3270177..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/hpc.rst +++ /dev/null @@ -1,8 +0,0 @@ -高性能计算 -=========== - -.. toctree:: - :maxdepth: 1 - - hpc_gomo - hpc_sponge \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/advanced_use/hpc_gomo.md b/tutorials/training/source_zh_cn/advanced_use/hpc_gomo.md deleted file mode 100644 index 87c192ca73836e61acc806077d32156be6fd52d4..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/hpc_gomo.md +++ /dev/null @@ -1,236 +0,0 @@ -# 实现区域海洋模型GOMO - -`Linux` `GPU` `模型开发` `高级` - - - -- [实现区域海洋模型GOMO](#实现区域海洋模型GOMO) - - [概述](#概述) - - [准备环节](#准备环节) - - [安装软件依赖](#安装软件依赖) - - [准备数据](#准备数据) - - [加载数据](#加载数据) - - [定义网络](#定义网络) - - [训练网络](#训练网络) - - [配置模型保存](#配置模型保存) - - [配置训练网络](#配置训练网络) - - [运行脚本](#运行脚本) - - [引用](#引用) - - -   - -## 概述 - -GOMO(Generalized Operator Modelling of the Ocean)是基于 OpenArray 的三维区域海洋模型,由清华大学黄小猛老师开发的用于海洋建模和并行计算的通用模型([Xiaomeng Huang et al, 2019](https://gmd.copernicus.org/articles/12/4729/2019/gmd-12-4729-2019.pdf))。GOMO模型中的基本方程和实现算法来自于POM2k模型([Blumberg and Mellor, 1987](http://www.sciepub.com/portal/downloads?doi=10.12691/ajmo-2-2-2&filename=ajmo-2-2-2.pdf))。三维海洋模型在地球系统模式中有重要的作用,以质量能量守恒定律为约束,通过模拟海洋洋流、漩涡等现象,不仅能够很好的表示海面温度和海表面高度的分布,还能够实时的预测台风、海啸等现象。传统的海洋模型代码实现复杂,且都是基于CPU运行,我们使用MindSpore对GOMO模型进行框架加速,结合GPU,能获得较大的性能提升。 - -本篇教程将主要介绍如何在GPU上,使用MindSpore构建并运行三维海洋模型GOMO。 -> 你可以在这里下载完整的示例代码: - 。 - -示例代码目录结构如下: - -```shell -└── ocean_model - ├── README.md # descriptions about ocean model GOMO - ├── scripts - │ ├── run_distribute_train.sh # launch distributed training for GPU - ├──src - │ ├── GOMO.py # GOMO model - │ ├── Grid.py # grid initial - │ ├── stencil.py # averaging and differential stencil oprator - │ ├── op_operator.py # averaging and differential kernel operator - │ ├── read_var.py # read variables from nc file - ├── train.py # train script -``` - -整体执行流程如下: - -1. 准备海洋模拟数据集Seamount文件,加载处理数据; -2. 定义GOMO变量初始化; -3. 定义GOMO模型; -4. 加载数据集并进行训练,训练完成后,查看结果及保存文件。 - -## 准备环节 - -### 安装软件依赖 - -1. 安装MindSpore - - 实践前,确保已经正确安装MindSpore。如果没有,可以通过[MindSpore安装页面](https://www.mindspore.cn/install)安装。 - -2. 安装netCDF4 - - ```shell - pip install netCDF4 - ``` - -### 准备数据 - -本教程使用的是netCDF格式的[Seamount文件](https://github.com/hxmhuang/GOMO/tree/master/bin/data),贝克曼和海德沃格尔提出的Seamount问题是区域海洋模型广泛使用的理想试验案例([Beckmann and Haidvogel, 1993](https://journals.ametsoc.org/view/journals/phoc/23/8/1520-0485_1993_023_1736_nsofaa_2_0_co_2.xml?tab_body=fulltext-display))。 - -## 加载数据 - -加载Seamount数据文件,从文件脚本中读取变量的初始化值,Seamount文件中的数据类型是双精度Float64,需要将其转成Float32进入MindSpore计算。加载处理数据的脚本在源码的`src/read_var.py`脚本中。 - -```python -import numpy as np -import netCDF4 as nc - -# variable name list -params_name = ['z', 'zz', 'dz', 'dzz', 'dx', 'dy', 'cor', 'h', 'fsm', 'dum', 'dvm', 'art', 'aru', 'arv', 'rfe', 'rfw', - 'rfn', 'rfs', 'east_e', 'north_e', 'east_c', 'north_c', 'east_u', 'north_u', 'east_v', 'north_v', 'tb', - 'sb', 'tclim', 'sclim', 'rot', 'vfluxf', 'wusurf', 'wvsurf', 'e_atmos', 'ub', 'vb', 'uab', 'vab', 'elb', - 'etb', 'dt', 'uabw', 'uabe', 'vabs', 'vabn', 'els', 'eln', 'ele', 'elw', 'ssurf', 'tsurf', 'tbe', 'sbe', - 'sbw', 'tbw', 'tbn', 'tbs', 'sbn', 'sbs', 'wtsurf', 'swrad'] - -def load_var(file_obj, name): - """load variable from nc data file""" - data = file_obj.variables[name] - data = data[:] - data = np.float32(np.transpose(data, (2, 1, 0))) - return data - -def read_nc(file_path): - """ put the load variable into the dict """ - variable = {} - file_obj = nc.Dataset(file_path) - for name in params_name: - variable[name] = load_var(file_obj, name) - return variable -``` - -## 定义GOMO网络 - -GOMO模型基于动量、能量和质量守恒定律,推导微分方程组和边界条件,确定需要求解的7个方程组,详细的公式推导参考[论文](https://gmd.copernicus.org/articles/12/4729/2019/gmd-12-4729-2019.pdf)。图1是GOMO的整体执行流程图。 - -首先,从Seamount数据中加载数据,用于模型中变量的初始化。加载初始值和模型参数后,计算分为内模态循环和外模态循环两个部分。在外模态循环中,主要计算二维海表面高度el和二维平均风速ua、va。在内模态循环中,循环次数iend是训练的总时间步数(由用户输入设定),内模态循环的计算三维数组占主导地位,依次计算湍流动能q2和产生湍流动能的湍流长度q2l、温度t和盐度s、x和y方向的风速u和v。计算完成之后,保存所需的变量结果,结束训练。 - -![GOMO](images/gomo.png) - -图1:GOMO模型流程图 - -### 初始化变量 - -```python -... -from src.GOMO import GOMO_init -... -if __name__ == "__main__": - ... - # define grid and init variable update - net_init = GOMO_init(im, jm, kb, stencil_width) - ... -``` - -### 定义GOMO模型 - -```python -def construct(self, etf, ua, uab, va, vab, el, elb, d, u, v, w, kq, km, kh, q2, q2l, tb, t, sb, s, - rho, wubot, wvbot, ub, vb, egb, etb, dt, dhb, utb, vtb, vfluxb, et): - """construct""" - x_d, y_d, z_d = self.x_d, self.y_d, self.z_d - q2b, q2lb = self.q2b, self.q2lb - dx, dy = self.dx, self.dy - # surface forcing - w = w * (1 - self.z_h) + self.z_h * self.vfluxf - # lateral_viscosity - advx, advy, drhox, drhoy, aam = self.lateral_viscosity(dx, dy, u, v, dt, self.aam, ub, vb, x_d, y_d, z_d, rho, self.rmean) - # mode_interaction - adx2d, ady2d, drx2d, dry2d, aam2d, advua, advva, egf, utf, vtf = self.mode_interaction(advx, advy, drhox, drhoy, aam, x_d, y_d, d, uab, vab, ua, va, el) - # ===========external model=========== - vamax = 0 - elf = 0 - for iext in range(1, 31): - # external_el - elf = self.external_el(x_d, y_d, d, ua, va, elb) - # external_ua - advua, uaf = self.external_ua(iext, x_d, y_d, elf, d, ua, va, uab, vab, el, elb, advua, aam2d, adx2d, drx2d, wubot) - # external_va - advva, vaf = self.external_va(iext, x_d, y_d, elf, d, ua, va, uab, vab, el, elb, advva, aam2d, ady2d, dry2d, wvbot) - # external_update - etf, uab, ua, vab, va, elb, el, d, egf, utf, vtf, vamax = self.external_update(iext, etf, ua, uab, va, vab, el, elb, elf, uaf, vaf, egf, utf, vtf, d) - # ===========internal model=========== - if self.global_step != 0: - # adjust_uv - u, v = self.adjust_uv(u, v, utb, vtb, utf, vtf, dt) - # internal_w - w = self.internal_w(x_d, y_d, dt, u, v, etf, etb, vfluxb) - # internal_q - dhf, a, c, gg, ee, kq, km, kh, q2b_, q2, q2lb_, q2l = self.internal_q(x_d, y_d, z_d, etf, aam, q2b, q2lb, q2, q2l, kq, km, kh, u, v, w, dt, dhb, rho, wubot, wvbot, t, s) - q2b = ops.Assign()(self.q2b, q2b_) - q2lb = ops.Assign()(self.q2lb, q2lb_) - # internal_t_t - a, c, ee, gg, tb, t = self.internal_t_(t, tb, self.wtsurf, self.tsurf, self.swrad, self.tclim, self.tbe, self.tbw, self.tbn, self.tbs, x_d, y_d, z_d, dt, u, aam, self.h, self.dum, v, self.dvm, w, dhf, etf, a, kh, self.dzz, c, self.dzz1, ee, gg, dx, self.dz, dy, self.fsm, dhb) - # internal_t_s - a, c, ee, gg, sb, s = self.internal_t_(s, sb, self.wssurf, self.ssurf, self.swrad0, self.sclim, self.sbe, self.sbw, self.sbn, self.sbs, x_d, y_d, z_d, dt, u, aam, self.h, self.dum, v, self.dvm, w, dhf, etf, a, kh, self.dzz, c, self.dzz1, ee, gg, dx, self.dz, dy, self.fsm, dhb) - # dense - rho = self.dens(s, t, self.zz, self.h, self.fsm) - # internal_u - uf, a, c, gg, ee, wubot = self.internal_u(x_d, z_d, dhf, u, v, w, ub, vb, egf, egb, ee, gg, self.cbc, km, advx, drhox, dt, dhb) - # internal_v - vf, a, c, gg, ee, wvbot = self.internal_v(y_d, z_d, dhf, u, v, w, ub, vb, egf, egb, ee, gg, self.cbc, km, advy, drhoy, dt, dhb) - # adjust_ufvf - u, v, ub, vb = self.adjust_ufvf(u, v, uf, vf, ub, vb) - # internal_update - egb, etb, dt, dhb, utb, vtb, vfluxb, et = self.internal_update(egf, etb, utf, vtf, etf, et) - steps = ops.AssignAdd()(self.global_step, 1) - - return elf, etf, ua, uab, va, vab, el, elb, d, u, v, w, kq, km, kh, q2, q2l, tb, t, sb, s, rho, wubot, wvbot, \ - ub, vb, egb, etb, dt, dhb, utb, vtb, vfluxb, et, steps, vamax, q2b, q2lb -``` - -在`__main__`函数中调用定义好的GOMO模型: - -```python -... -from src.GOMO import GOMO -... -if __name__ == "__main__": - ... - # define GOMO model - Model = GOMO(im=im, jm=jm, kb=kb, stencil_width=stencil_width, variable=variable, x_d=x_d, y_d=y_d, z_d=z_d, - q2b=q2b, q2lb=q2lb, aam=aam, cbc=cbc, rmean=rmean) - ... -``` - -## 训练网络 - -### 运行脚本 - -训练脚本定义完成之后,调用`scripts`目录下的shell脚本,启动训练进程。 -使用以下命令运行脚本: - -```shell -sh run_distribute_train.sh -``` - -脚本需要传入变量`im`、`jm`、`kb`、`step`、`DATASET_PATH`,其中: - -- `im`,`jm`,`kb`:模拟的海洋区域分辨率,与使用的数据相关; -- `step`:训练的时间步数(与图1中的`iend`对应); -- `DATASET_PATH`:训练数据路径。 - -训练完后,训练过程中变量的变化值保存在`train/outputs`目录下,每隔5个时间步保存一次数据,主要保存了4个变量值,分别是东向的风速、北向的风速(单位是m/s),位温度(单位是K),海表面高度(单位是m)。 - -```bash -└─outputs - ├─u_5.npy - ├─v_5.npy - ├─t_5.npy - ├─et_5.npy - ├─u_10.npy - ├─v_10.npy - ├─t_10.npy - ├─et_10.npy - -``` - -其中, -`*.npy`:指保存的变量。文件名称具体含义:*变量名称*_*step数*.npy。 - -## 引用 - -1. Huang X, Huang X, Wang D, et al. OpenArray v1. 0: a simple operator library for the decoupling of ocean modeling and parallel computing[J]. Geoscientific Model Development, 2019, 12(11). -2. Blumberg A F, Mellor G L. A description of a three‐dimensional coastal ocean circulation model[J]. Three‐dimensional coastal ocean models, 1987, 4: 1-16. -3. Beckmann A, Haidvogel D B. Numerical simulation of flow around a tall isolated seamount. Part I: Problem formulation and model accuracy[J]. Journal of Physical Oceanography, 1993, 23(8): 1736-1753. diff --git a/tutorials/training/source_zh_cn/advanced_use/hpc_sponge.md b/tutorials/training/source_zh_cn/advanced_use/hpc_sponge.md deleted file mode 100644 index 869dd138f6a31bda1653e13b9007a3bcafd253e6..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/hpc_sponge.md +++ /dev/null @@ -1,198 +0,0 @@ -# SPONGE分子模拟实践 - -`Linux` `GPU` `模型开发` `高级` - - - -- [SPONGE分子模拟实践](#sponge分子模拟实践) - - [概述](#概述) - - [整体执行](#整体执行) - - [准备环节](#准备环节) - - [模拟多肽水溶液体系示例](#模拟多肽水溶液体系示例) - - [准备输入文件](#准备输入文件) - - [加载数据](#加载数据) - - [构建模拟流程](#构建模拟流程) - - [运行脚本](#运行脚本) - - [运行结果](#运行结果) - - -   - -## 概述 - -分子模拟是指利用计算机以原子水平的分子模型来模拟分子结构与行为,进而模拟分子体系的各种物理、化学性质的方法。它是在实验基础上,通过基本原理,构筑起一套模型和算法,从而计算出合理的分子结构与分子行为。 - -近年来,分子模拟技术发展迅速并且在多个学科领域得到了广泛的应用。在药物设计领域,可用于研究病毒、药物的作用机理等;在生物科学领域,可用于表征蛋白质的多级结构与性质;在材料学领域,可用于研究结构与力学性能、材料的优化设计等;在化学领域,可用于研究表面催化及机理;在石油化工领域,可用于分子筛催化剂结构表征、合成设计、吸附扩散,可构建和表征高分子链以及晶态或非晶态本体聚合物的结构,预测包括共混行为、机械性质、扩散、内聚等重要性质。 - -MindSpore版的SPONGE是北大和深圳湾实验室高毅勤课题组与华为MindSpore团队联合开发的分子模拟库,具有高性能、模块化等特性。基于MindSpore自动并行、图算融合等特性,SPONGE可高效地完成传统分子模拟过程。SPONGE利用MindSpore自动微分的特性,可以将神经网络等AI方法与传统分子模拟进行结合。 - -本篇教程将主要介绍如何在GPU上,使用MindSpore内置的SPONGE进行高性能分子模拟。 - -> 你可以在这里下载完整的示例代码:。 - -## 整体执行 - -1. 准备分子模拟输入文件,加载数据,确定计算的分子体系; -2. 定义 SPONGE 模块并初始化,确定计算流程; -3. 运行训练脚本,输出模拟的热力学信息文件,并查看结果; - -## 准备环节 - -实践前,确保已经正确安装MindSpore。如果没有,可以通过[MindSpore安装页面](https://www.mindspore.cn/install)安装MindSpore。 - -## 模拟多肽水溶液体系示例 - -SPONGE具有高性能及易用的优势,本教程使用SPONGE模拟多肽水溶液体系。模拟体系为丙氨酸三肽水溶液体系。 - -### 准备输入文件 - -本教程模拟体系中需要加载三个输入文件,分别是: - -- 属性文件(后缀为`.in`的文件),声明模拟的基本条件,对整个模拟过程进行参数控制。 -- 拓扑文件(后缀为`.param7`的文件),拓扑文件描述的是体系内部分子的拓扑关系及各种参数。 -- 坐标文件(后缀为`.rst7`的文件),坐标文件描述的是每个原子在体系中的初始时刻的坐标。 - -拓扑文件和坐标文件可以通过建模过程由AmberTools中自带的tleap工具(下载地址, 遵守GPL协议)建模完成。建模过程如下: - -- 打开tleap - - ```bash - tleap - ``` - -- 加载tleap自带的ff14SB力场 - - ```bash - > source leaprc.protein.ff14SB - ``` - -- 搭建丙氨酸三肽模型 - - ```bash - > ala = sequence {ALA ALA ALA} - ``` - -- 利用tleap加载其自带的tip3p力场 - - ```bash - > source leaprc.water.tip3p - ``` - -- 利用tleap中的`slovatebox`溶解丙氨酸三肽链, 完成体系构建。`10.0`代表加入的水距离我们溶解的分子及体系边界至少在`10.0`埃以上 - - ```bash - > solvatebox ala TIP3PBOX 10.0 - ``` - -- 将建好的体系保存成`parm7`及`rst7`文件 - - ```bash - > saveamberparm ala ala.parm7 ala_350_cool_290.rst7 - ``` - -通过tleap构建了所需要的拓扑文件(`WATER_ALA.parm7`)和坐标文件(`WATER_ALA_350_cool_290.rst7`)后,需要通过属性文件声明模拟的基本条件,对整个模拟过程进行参数控制。以本教程中的属性文件`NVT_290_10ns.in`为例,其文件内容如下: - -```text -NVT 290k - mode = 1, # Simulation mode ; mode=1 for NVT ensemble - dt= 0.001, # Time step in picoseconds (ps). The time length of each MD step - step_limit = 1, # Total step limit, number of MD steps run - thermostat=1, # Thermostat for temperature ; thermostat=0 for Langevin thermostat - langevin_gamma=1.0, # Gamma_ln for Langevin thermostat represents coupling strength between thermostat and system - target_temperature=290, # Target temperature - write_information_interval=1000, # Output frequency - amber_irest=1, # Input style ; amber_irest=1 for using amber style input & rst7 file contains veclocity - cut=10.0, # Nonbonded cutoff distance in Angstroms -``` - -- `mode`,分子动力学(MD)模式,`1`表示模拟采用`NVT`系综。 -- `dt`,表示模拟步长。 -- `step_limit`,表示模拟总步数。 -- `thermostat`,表示控温方法,`1`表示采用的是`Liujian-Langevin`方法。 -- `langevin_gamma`,表示控温器中的`Gamma_ln`参数。 -- `target_temperature`,表示目标温度。 -- `amber_irest`,表示输入方式,`1`表示使用amber方式输入,并且`rst7`文件中包含`veclocity`属性。 -- `cut`,表示非键相互作用的距离。 - -### 加载数据 - -完成输入文件的构建后,将文件存放在本地工作区的`sponge_in`路径下,其目录结构如下: - -```text -└─sponge - ├─sponge_in - │ NVT_290_10ns.in # specific MD simulation setting - │ WATER_ALA.parm7 # topology file include atom & residue & bond & nonbond information - │ WATER_ALA_350_cool_290.rst7 # restart file record atom coordinate & velocity and box information -``` - -从三个输入文件中,读取模拟体系需要的参数,用于MindSpore的计算。加载代码如下: - -```python -import argparse -from mindspore import context - -parser = argparse.ArgumentParser(description='Sponge Controller') -parser.add_argument('--i', type=str, default=None, help='input file') -parser.add_argument('--amber_parm', type=str, default=None, help='paramter file in AMBER type') -parser.add_argument('--c', type=str, default=None, help='initial coordinates file') -parser.add_argument('--r', type=str, default="restrt", help='') -parser.add_argument('--x', type=str, default="mdcrd", help='') -parser.add_argument('--o', type=str, default="mdout", help="") -parser.add_argument('--box', type=str, default="mdbox", help='') -parser.add_argument('--device_id', type=int, default=0, help='') -args_opt = parser.parse_args() - -context.set_context(mode=context.GRAPH_MODE, device_target="GPU", device_id=args_opt.device_id, save_graphs=False) -``` - -### 构建模拟流程 - -使用SPONGE中定义的计算力模块和计算能量模块,通过多次迭代进行分子动力学过程演化,使得体系达到我们所需要的平衡态,并记录每一个模拟步骤中得到的能量等数据。为了方便起见,本教程的计算迭代次数设置为`1`,其模拟流程构建代码如下: - -```python -from src.simulation_initial import Simulation -from mindspore import Tensor - -if __name__ == "__main__": - simulation = Simulation(args_opt) - save_path = args_opt.o - for steps in range(simulation.md_info.step_limit): - print_step = steps % simulation.ntwx - if steps == simulation.md_info.step_limit - 1: - print_step = 0 - temperature, total_potential_energy, sigma_of_bond_ene, sigma_of_angle_ene, sigma_of_dihedral_ene, \ - nb14_lj_energy_sum, nb14_cf_energy_sum, LJ_energy_sum, ee_ene, _ = simulation(Tensor(steps), Tensor(print_step)) - # compute energy and temperature -``` - -### 运行脚本 - -执行以下命令,启动训练脚本`main.py`进行训练: - -```text -python main.py --i /path/NVT_290_10ns.in \ - --amber_parm /path/WATER_ALA.parm7 \ - --c /path/WATER_ALA_350_cool_290.rst7 \ - --o /path/ala_NVT_290_10ns.out -``` - -- -`i` 为MD模拟的属性文件,控制模拟过程 -- -`amber_parm` 为MD模拟体系的拓扑文件 -- -`c` 为我们输入的初始坐标文件 -- -`o` 为我们模拟输出的记录文件,其记录了输出每步的能量等信息 -- -`path` 为文件所在的路径,在本教程中为`sponge_in` - -训练过程中,使用属性文件(后缀为`.in`的文件)、拓扑文件(后缀为`.param7`的文件)以及坐标文件(后缀为`.rst7`的文件),通过在指定温度下进行模拟,计算力和能量,进行分子动力学过程演化。 - -### 运行结果 - -训练结束后,可以得到输出文件`ala_NVT_290_10ns.out`,体系能量变化被记录在了该文件中,可以查看模拟体系的热力学信息。查看`ala_NVT_290_10ns.out`可以看到如下内容: - -```text -_steps_ _TEMP_ _TOT_POT_ENE_ _BOND_ENE_ _ANGLE_ENE_ _DIHEDRAL_ENE_ _14LJ_ENE_ _14CF_ENE_ _LJ_ENE_ _CF_PME_ENE_ - 1 293.105 -6117.709 1204.406 7.096 4.491 3.456 44.018 1372.488 -8753.664 - ... -``` - -其中记录了模拟过程中输出的各类能量, 分别是迭代次数(_steps_),温度(_TEMP_),总能量(_TOT_POT_E_),键长(_BOND_ENE_),键角(_ANGLE_ENE_),二面角相互作用(_DIHEDRAL_ENE_),非键相互作用,其包含静电力及Leonard-Jones相互作用。 diff --git a/tutorials/training/source_zh_cn/advanced_use/hyper_parameters_auto_tuning.md b/tutorials/training/source_zh_cn/advanced_use/hyper_parameters_auto_tuning.md deleted file mode 100644 index b10fb85745325429c56e5732f36e055b887d8cac..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/hyper_parameters_auto_tuning.md +++ /dev/null @@ -1,212 +0,0 @@ -# 使用mindoptimizer进行超参调优 - -`Linux` `Ascend` `GPU` `CPU` `模型调优` `中级` `高级` - - - -- [使用mindoptimizer进行超参调优](#使用mindoptimizer进行超参调优) - - [概述](#概述) - - [安装](#安装) - - [用法](#用法) - - [配置文件规则说明](#配置文件规则说明) - - [使用示例](#使用示例) - - [注意事项](#注意事项) - - - -   - -## 概述 - -机器学习领域一般有两类参数,一类是模型内部参数,依靠训练数据来对模型参数进行调参,还有一类则是模型外部的设置参数,需要人工配置,这类参数被称为“超参数”。不同的超参数会对模型效果有不小的影响,因此超参在训练任务中的重要性较高。传统的方式都需要人工去调试和配置,这种方式消耗时间和精力。MindInsight调参功能可以用于搜索超参,基于用户给的调参配置信息,可以自动搜索参数并且执行模型训练。 - -MindInsight提供的`mindoptimizer`调参命令可以根据用户配置,从训练日志中提取以往训练记录,再对以往训练记录进行分析,推荐超参,最后自动执行训练脚本。用户在使用时需要按照yaml格式来配置超参的范围等信息,再参考本教程替换训练脚本中的超参,旨在将自动推荐的超参同步到训练脚本里面。当前仅支持高斯过程调参方法,其他方法敬请期待。 - -## 安装 - -此工具为MindInsight的子模块,安装MindInsight后,即可使用MindInsight调参命令,安装MindInsight请参考该[安装文档](https://gitee.com/mindspore/mindinsight/blob/master/README_CN.md#)。 - -## 用法 - -MindInsight提供调参命令,命令行(Command-line interface, CLI)的使用方式如下: - -```text -usage: mindoptimizer [-h] [--version] [--config CONFIG] - [--iter ITER] - -optional arguments: - -h, --help Shows the help message and exits. - --version Shows the program version and exits. - --config CONFIG Specifies the configuration file for parameter tuning. - The file format is yaml. - --iter ITER Specifies the times of automatic training. - Automatically recommended parameters are used every time - before the training is performed. - The default value of ITER is 1. -``` - -## 配置文件规则说明 - -调参配置文件的格式是yaml,需配置运行命令、训练日志根目录、调参方法、优化目标和超参数信息。其中超参数需要配置取值范围,类型和来源等。MindInsight会根据配置的超参数和优化目标从训练日志中取训练记录,如学习率和正确率,可以供推荐算法分析它们之间的关系,更好地推荐超参数。 - -1. 配置运行命令 - - 通过`command`来配置运行命令,如`command: python train.py`。在调参程序推荐出超参数后,运行命令会被直接执行。 - -2. 配置训练日志根目录 - - `summary_base_dir`是训练日志根目录,它用于训练记录的提取,这样可以更好地推荐超参。同时,建议用户在训练脚本中加`SummaryColletor`来收集训练信息,可查看[Summmary收集教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/summary_record.html)。调参命令会根据配置的`summary_base_dir`来生成子目录路径,可配置在`SummaryColletor`记录该次训练记录。自动执行训练后,会在训练日志根目录的子目录记录当次训练信息,产生的训练信息可以作为训练记录来推荐下一次需要的超参。配置`summary_base_dir`如`summary_base_dir: /home/summaries`。 - -3. 配置调参方法 - - 通过`name`配置调参方法的名字,通过`args`字段来配置这个调参方法的参数。 - - 当前采用的算法是高斯过程回归器(Gaussian process regressor, GP),这个算法可配置采集方法(Acquisition Function),可选,范围是[`ucb`, `pi`,`ei`],默认值为`ucb`。 - - - Upper confidence bound (UCB) - - Probability of improvement (PI) - - Expected improvement (EI) - - 示例: - - ```yaml - tuner: - name: gp - args: - method: ucb - ``` - -4. 配置调参目标 - - 用户可以选择loss或者自定义的评估指标作为调参的目标。 - - 配置说明: - - - group:可选,取值包括`system_defined`和`metric`,默认`system_defined`。使用`group`来配置优化目标所在的组,如loss是系统自定义收集字段,则是`system_defined`组;而其他在`Model()`中使用的评估指标,如`model = Model(net, loss_fn=loss, optimizer=None, metrics={'Accuracy'})`,`Accuracy`属于评估指标(metrics),因此组别是`metric`。 - - goal:可选,取值包括`minimize`、`maximize`,默认`minimize`。使用`goal`来表示该目标的优化方向,如正确率越高越好,即`goal`需要配置为`maximize`。 - - 配置loss: - - ```yaml - target: - name:loss - ``` - - 配置评估指标中的Accuracy: - - ```yaml - target: - group: metric - name: Accuracy - goal: maximize - ``` - -5. 配置超参信息 - - 超参的配置字段:`bounds`、`choice`、`type`和`source`。这里配置的超参字段,会用于训练记录的提取和超参推荐。其中,`bounds`、`choice`和`type`会影响超参推荐,`bounds`配置了参数的上下界,`choice`表示推荐值从中选取,`type`则是配置了该参数的类型。 - - 目前系统自定义收集的可调字段包括`learning_rate`、`batch_size`和`epoch`。其余参数都为用户自定义参数,可配置为`user_defined`,将在训练时被自动收集在训练日志中。 - - - bounds: 列表,元素个数为2,第一个数为下界值min,第二个数为上界值max。范围是[min, max),生成随机数方法是`numpy.random.uniform()`。 - - choice:列表,个数不限,参数取值从这个列表中的元素中选取。 - - type:必填,取值为`int`或`float`。 - - source:可选,取值为`system_defined`或`user_defined`。如果是自动收集的字段,默认为`system_defined`;否则,默认为`user_defined`。 - - > `bounds`和`choice`有且仅有一个,必填。如果配置了`choice`,仅会从`choice`的列表中选取值;如果同时配置了`choice`和`type`,则`type`不生效。 - -## 使用示例 - -若用户要优化`learning_rate`、`batch_size`和`momentum`这几个超参数,且优化目标是`Accuracy`,则应按照如下示例配置yaml文件。 - -1. 配置config.yaml - - ```yaml - command: sh /home/example/run_alexnet_ascend.sh - summary_base_dir: /home/summaries - tuner: - name: gp - target: - group: metric - name: Accuracy - goal: maximize - parameters: - learning_rate: - bounds: [0.00001, 0.001] - type: float - batch_size: - choice: [32, 64, 128, 256] - type: int - momentum: - source: user_defined - choice: [0.8, 0.9] - type: float - ``` - - > `momentum`和系统定义的变量不存在重名问题,可不设置source这个字段。 - - **yaml配置同名字段会选取最后一个,请避免以下使用方式。** - - ```yaml - parameters: - learning_rate: - bounds: [0.0005, 0.001] - type: float - learning_rate: - source: user_defined - bounds: [0.00002, 0.0001] - type: float - ``` - -2. 在训练脚本实例化`HyperConfig`对象 - - (1) 用户需要实例化`HyperConfig`,并使用`HyperConfig`实例的参数变量作为训练脚本中对应参数的取值。 - (2) 加上`SummaryCollector`来收集训练信息,包括超参和评估指标值等。 - - 如[Model Zoo](https://www.mindspore.cn/doc/note/zh-CN/master/network_list_ms.html)中的训练脚本: - - ```python - ds_train = create_dataset_cifar10(args.data_path, batch_size) - lr = Tensor(get_lr_cifar10(0, cfg.learning_rate, cfg.epoch_size, step_per_epoch)) - opt = nn.Momentum(network.trainable_params(), lr, cfg.momentum) - - model.train(cfg.epoch_size, ds_train, callbacks=[time_cb, ckpoint_cb, LossMonitor()] - ``` - - 修改后: - - ```python - from mindinsight.optimizer import HyperConfig - config = HyperConfig() - params = config.params - - # Replace batch_size with params.batch_size. - ds_train = create_dataset_cifar10(args.data_path, params.batch_size) - # Replace cfg.learning_rate with params.learning_rate. - lr = Tensor(get_lr_cifar10(0, params.learning_rate, cfg.epoch_size, step_per_epoch)) - # Replace cfg.momentum with params.momentum. - opt = nn.Momentum(network.trainable_params(), lr, params.momentum) - - # Instantiate SummaryCollector and add it to callback to automatically collect training information. - summary_cb = SummaryCollector(config.summary_dir) - model.train(cfg.epoch_size, ds_train, callbacks=[time_cb, ckpoint_cb, LossMonitor(), summary_cb] - ``` - -3. 运行 - - 在进行自动调参前请确保训练脚本可以正确执行。 - - ```shell - mindoptimizer --config ./config.yaml --iter 10 - ``` - - > 将执行训练的命令填写在配置文件中,在能够成功运行该训练命令的目录下运行mindoptimizer程序。 - -4. 可视化 - - 基于config.yaml里面配置的summary_base_dir来启动MindInsight,启动方法可以查看[MindInsight启动命令](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/mindinsight_commands.html#id3)。 - -## 注意事项 - -1. 训练脚本由用户编写和维护,本工具不会自动修改训练脚本,如果训练脚本本身有错误,则使用本工具支持训练脚本时也会出错; -2. 本工具不对运行过程中的打印信息进行处理或修改; -3. 本工具需要确保调参过程可信,参数配置错误或脚本执行错误都会终止调参过程,用户可根据相应的提示来进行问题定位。 diff --git a/tutorials/training/source_zh_cn/advanced_use/images/adv_attack_result.png b/tutorials/training/source_zh_cn/advanced_use/images/adv_attack_result.png deleted file mode 100644 index 3169f95a7821e1a936a23d7c0cb54e28c8d0b858..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/adv_attack_result.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/ansatz.png b/tutorials/training/source_zh_cn/advanced_use/images/ansatz.png deleted file mode 100644 index 237bc0476f2da2689e48db12c6aa0a94b97937a8..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/ansatz.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/auto_augmentation.png b/tutorials/training/source_zh_cn/advanced_use/images/auto_augmentation.png deleted file mode 100644 index 3daa904f181d2c7a6a2b6f7f2271c8e33f2ba933..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/auto_augmentation.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/bert_model.PNG b/tutorials/training/source_zh_cn/advanced_use/images/bert_model.PNG deleted file mode 100644 index 8dddbe6be41ae9ae4cd5fef0ea7e4ab1b98a46eb..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/bert_model.PNG and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/checkpoint_integrate_process.pptx b/tutorials/training/source_zh_cn/advanced_use/images/checkpoint_integrate_process.pptx deleted file mode 100644 index b7aa34a541094be83b0175f34f4f80d507cdfc8f..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/checkpoint_integrate_process.pptx and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/checkpoint_integration_process.jpg b/tutorials/training/source_zh_cn/advanced_use/images/checkpoint_integration_process.jpg deleted file mode 100644 index b7b957162ed69c2d41825eea508116f01181b8f0..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/checkpoint_integration_process.jpg and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/cifar10.jpg b/tutorials/training/source_zh_cn/advanced_use/images/cifar10.jpg deleted file mode 100644 index 90057bb785d1be501b133985bd56bc7608d9051b..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/cifar10.jpg and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/classical_nlp_loss.png b/tutorials/training/source_zh_cn/advanced_use/images/classical_nlp_loss.png deleted file mode 100644 index 47745d5c5254253d814ad67d8d8daaaad56ff6b9..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/classical_nlp_loss.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/cloud_train_job1.png b/tutorials/training/source_zh_cn/advanced_use/images/cloud_train_job1.png deleted file mode 100644 index 9feb0e5388843e7d2005d9fd9c3b8dc8a2f4b17e..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/cloud_train_job1.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/cloud_train_job2.png b/tutorials/training/source_zh_cn/advanced_use/images/cloud_train_job2.png deleted file mode 100644 index 9932bc8e141d95dcaa986f78afac09833d0a43c5..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/cloud_train_job2.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/cluster_iterative_trajectory.png b/tutorials/training/source_zh_cn/advanced_use/images/cluster_iterative_trajectory.png deleted file mode 100644 index ca6f6f366d7c7bf68e3e6754edc021e4f7a275a0..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/cluster_iterative_trajectory.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/compose.png b/tutorials/training/source_zh_cn/advanced_use/images/compose.png deleted file mode 100644 index 7f49413a61f0136e000843ced9b6076101850a46..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/compose.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/data_chart.png b/tutorials/training/source_zh_cn/advanced_use/images/data_chart.png deleted file mode 100644 index 017e09898f12a48f4db201bfa0b526ab1d2bac23..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/data_chart.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/data_conversion_concept.png b/tutorials/training/source_zh_cn/advanced_use/images/data_conversion_concept.png deleted file mode 100644 index da42c23585c902f715e7f6825b430866c2a01a67..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/data_conversion_concept.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/data_conversion_concept.pptx b/tutorials/training/source_zh_cn/advanced_use/images/data_conversion_concept.pptx deleted file mode 100644 index 8468f7686f2d73aeae68ed5f6307e4f2bee5bc46..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/data_conversion_concept.pptx and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/data_enhancement_performance_scheme.png b/tutorials/training/source_zh_cn/advanced_use/images/data_enhancement_performance_scheme.png deleted file mode 100644 index d48b912adec16f0c4b773357edcc35cdd273f6a3..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/data_enhancement_performance_scheme.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/data_function.png b/tutorials/training/source_zh_cn/advanced_use/images/data_function.png deleted file mode 100644 index 27d5a5765b8ee324fcdf235873d8661c526a6710..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/data_function.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/data_label.png b/tutorials/training/source_zh_cn/advanced_use/images/data_label.png deleted file mode 100644 index 4b7601ea6feb5218dcc91907260fb81bd4d89efd..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/data_label.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/data_loading_performance_scheme.png b/tutorials/training/source_zh_cn/advanced_use/images/data_loading_performance_scheme.png deleted file mode 100644 index a9dd7efba57b09b3002c8f8cfe72900ee03a09d6..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/data_loading_performance_scheme.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/data_op_profile.png b/tutorials/training/source_zh_cn/advanced_use/images/data_op_profile.png deleted file mode 100644 index 288325d5955b8362e4942d8bd56c522804f51840..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/data_op_profile.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/data_op_utilization.png b/tutorials/training/source_zh_cn/advanced_use/images/data_op_utilization.png deleted file mode 100644 index 6d8ee6d8ec4b535848bf4486b341cb33b457cfd8..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/data_op_utilization.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/data_profile.png b/tutorials/training/source_zh_cn/advanced_use/images/data_profile.png deleted file mode 100644 index d38f496112e8c12aac76a342822e5fdbdebb0520..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/data_profile.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/data_table.png b/tutorials/training/source_zh_cn/advanced_use/images/data_table.png deleted file mode 100644 index 58d7ebfde31d83bc7ee3789f9ded9318f83486ab..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/data_table.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/debugger_ask_recommend.png b/tutorials/training/source_zh_cn/advanced_use/images/debugger_ask_recommend.png deleted file mode 100755 index 95af3226a83d5def01c3d0cb8d17ba1e3315e638..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/debugger_ask_recommend.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/debugger_init_page.png b/tutorials/training/source_zh_cn/advanced_use/images/debugger_init_page.png deleted file mode 100755 index dc724b0c58e5e0ea9c3b775f83ea76a2349de9d1..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/debugger_init_page.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/debugger_search_node_type.png b/tutorials/training/source_zh_cn/advanced_use/images/debugger_search_node_type.png deleted file mode 100755 index 8e23df5ae9eaea10baea89f54360b18eed1aeb0d..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/debugger_search_node_type.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/debugger_set_watch_point.png b/tutorials/training/source_zh_cn/advanced_use/images/debugger_set_watch_point.png deleted file mode 100755 index ab135b12017e1b6918460a52502398849b803fa4..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/debugger_set_watch_point.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/debugger_tensor_view.png b/tutorials/training/source_zh_cn/advanced_use/images/debugger_tensor_view.png deleted file mode 100755 index d7bc1fcbbc1e767be07c374d3d298e43160a637d..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/debugger_tensor_view.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/debugger_waiting.png b/tutorials/training/source_zh_cn/advanced_use/images/debugger_waiting.png deleted file mode 100755 index 63bb4d6066fb81eb1629ba7ac545f69114296ff4..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/debugger_waiting.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/debugger_watch_point_hit.png b/tutorials/training/source_zh_cn/advanced_use/images/debugger_watch_point_hit.png deleted file mode 100755 index 87dfeb02082e7a518bd6cba31224b73d5d7bdd6e..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/debugger_watch_point_hit.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/debugger_watch_point_list.png b/tutorials/training/source_zh_cn/advanced_use/images/debugger_watch_point_list.png deleted file mode 100755 index c862c0ec046d6c54aff6dcd7822e5b50d0b744cc..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/debugger_watch_point_list.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/device_cpu_utilization.png b/tutorials/training/source_zh_cn/advanced_use/images/device_cpu_utilization.png deleted file mode 100644 index 29b3475057e0e0209fe90380313b24ede17696ae..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/device_cpu_utilization.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/encoder.png b/tutorials/training/source_zh_cn/advanced_use/images/encoder.png deleted file mode 100644 index 3323891c1f359f658d85b58c48048e9acebb2113..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/encoder.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/evaluate_the_model_during_training.png b/tutorials/training/source_zh_cn/advanced_use/images/evaluate_the_model_during_training.png deleted file mode 100644 index cbecb6c9739eaf047c89ea79f9d596a2793e6283..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/evaluate_the_model_during_training.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/finetune.PNG b/tutorials/training/source_zh_cn/advanced_use/images/finetune.PNG deleted file mode 100644 index cbdc6263669c5a40c910af9f5f2483d0d1137455..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/finetune.PNG and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/fuzz_res.png b/tutorials/training/source_zh_cn/advanced_use/images/fuzz_res.png deleted file mode 100644 index be6d022850438ff4b9c070f7225cbd950e1e3686..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/fuzz_res.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/fuzz_seed.png b/tutorials/training/source_zh_cn/advanced_use/images/fuzz_seed.png deleted file mode 100644 index cb138aebfabea1a1f778fbb65b6a0ee4533974e2..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/fuzz_seed.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/gomo.png b/tutorials/training/source_zh_cn/advanced_use/images/gomo.png deleted file mode 100644 index aafca76b6cd6e8c90c9d6ad5dedde71795dd1422..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/gomo.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/gomo_diagram.pptx b/tutorials/training/source_zh_cn/advanced_use/images/gomo_diagram.pptx deleted file mode 100644 index 2e3dbd6985d867df42858c1d10d408b7ec04d6a8..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/gomo_diagram.pptx and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/gpu_activity_profiler.png b/tutorials/training/source_zh_cn/advanced_use/images/gpu_activity_profiler.png deleted file mode 100644 index 633599d845ffc1f308d704540dc501b5288038f4..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/gpu_activity_profiler.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/gpu_op_ui_profiler.png b/tutorials/training/source_zh_cn/advanced_use/images/gpu_op_ui_profiler.png deleted file mode 100644 index e8e1dcaacf5c1dbd80dafe9e634f60db1048efb9..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/gpu_op_ui_profiler.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/graph.png b/tutorials/training/source_zh_cn/advanced_use/images/graph.png deleted file mode 100644 index 0bc13636b5c84952978469c652c38500e6d34f43..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/graph.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/graph_kernel_example_custom_op.png b/tutorials/training/source_zh_cn/advanced_use/images/graph_kernel_example_custom_op.png deleted file mode 100644 index 22dfddef960b7792bfa181a1ad0356c6aed97c53..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/graph_kernel_example_custom_op.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/graph_kernel_example_fuse_basic.png b/tutorials/training/source_zh_cn/advanced_use/images/graph_kernel_example_fuse_basic.png deleted file mode 100644 index d446ff7b00111b8e5e61d9540397fb1e1cf79afc..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/graph_kernel_example_fuse_basic.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/graph_sidebar.png b/tutorials/training/source_zh_cn/advanced_use/images/graph_sidebar.png deleted file mode 100644 index 1cfab2911877ed6a51097f0e7bac880479143e26..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/graph_sidebar.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/histogram.png b/tutorials/training/source_zh_cn/advanced_use/images/histogram.png deleted file mode 100644 index 5b240083bd7fe971b55537386e5e4ca6090a6bd2..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/histogram.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/histogram_func.png b/tutorials/training/source_zh_cn/advanced_use/images/histogram_func.png deleted file mode 100644 index 5e30875d0efdab22a326207b4d4c65c8867fefeb..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/histogram_func.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/image_function.png b/tutorials/training/source_zh_cn/advanced_use/images/image_function.png deleted file mode 100644 index 214e6b3927a1098456cabc6b70083b6365c85298..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/image_function.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/image_vi.png b/tutorials/training/source_zh_cn/advanced_use/images/image_vi.png deleted file mode 100644 index d1924d71c670e02f22eb878a8c3794bde630f178..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/image_vi.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/introduce.PNG b/tutorials/training/source_zh_cn/advanced_use/images/introduce.PNG deleted file mode 100644 index 889f5066faf9258a21477287aba87e4ef49c4eb3..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/introduce.PNG and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/introduce.svg b/tutorials/training/source_zh_cn/advanced_use/images/introduce.svg deleted file mode 100644 index c46702bd0eafc4bbbaf2707ccb2002fa24bca850..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/images/introduce.svg +++ /dev/null @@ -1 +0,0 @@ -诗词数据集预训练预训练模型Fine-TuningModelServiceMindSpore Serving裸机Docker部署数据预处理数据后处理ClientsHttpModelHub \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/advanced_use/images/inversion_ordinary.png b/tutorials/training/source_zh_cn/advanced_use/images/inversion_ordinary.png deleted file mode 100644 index 17eb401f8f66603544e315dc2e305172448f3d4a..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/inversion_ordinary.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/inversion_sup.png b/tutorials/training/source_zh_cn/advanced_use/images/inversion_sup.png deleted file mode 100644 index ba32b3b9f5b92ea00ba574afbde3a70ee0c63ca0..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/inversion_sup.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/lineage_label.png b/tutorials/training/source_zh_cn/advanced_use/images/lineage_label.png deleted file mode 100644 index 5b06ae43bdfc0a1488d0065644f541609713dcec..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/lineage_label.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/lineage_model_chart.png b/tutorials/training/source_zh_cn/advanced_use/images/lineage_model_chart.png deleted file mode 100644 index dd6bbcfc698dd38ec7fba3f2939972fcfefdc662..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/lineage_model_chart.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/lineage_model_table.png b/tutorials/training/source_zh_cn/advanced_use/images/lineage_model_table.png deleted file mode 100644 index 3ebdb9f480b5b04e1d11a98dcac299192dd1c578..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/lineage_model_table.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/memory.png b/tutorials/training/source_zh_cn/advanced_use/images/memory.png deleted file mode 100644 index e88c84595d5baa25c08fa0d02e302338bbd9d1b0..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/memory.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/memory_graphics.png b/tutorials/training/source_zh_cn/advanced_use/images/memory_graphics.png deleted file mode 100644 index abd7125e09fae2c181102bec26536344b7c07a80..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/memory_graphics.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/mindrecord.png b/tutorials/training/source_zh_cn/advanced_use/images/mindrecord.png deleted file mode 100644 index 809ae5bc06e60ef8139034213134ca27d4a9abb7..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/mindrecord.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/mindrecord.pptx b/tutorials/training/source_zh_cn/advanced_use/images/mindrecord.pptx deleted file mode 100644 index fd234bb19eccc4eae312504826de178a18883b1f..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/mindrecord.pptx and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/mix_precision.PNG b/tutorials/training/source_zh_cn/advanced_use/images/mix_precision.PNG deleted file mode 100644 index 93fa9ab6863f1dcd7076bc99c00bfacf3a3109fc..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/mix_precision.PNG and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/mix_precision.eddx b/tutorials/training/source_zh_cn/advanced_use/images/mix_precision.eddx deleted file mode 100644 index cfec9db4f1651e09cfb65e1c2b1bc1b1eef1fe2e..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/mix_precision.eddx and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/multi_scalars.png b/tutorials/training/source_zh_cn/advanced_use/images/multi_scalars.png deleted file mode 100644 index 0ea23d83bdc44a72d5118950575758ceb1178803..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/multi_scalars.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/multi_scalars_select.png b/tutorials/training/source_zh_cn/advanced_use/images/multi_scalars_select.png deleted file mode 100644 index 104eae240dbd7518adc85ff2cb265c22dd6cb39c..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/multi_scalars_select.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/nlp_loss.png b/tutorials/training/source_zh_cn/advanced_use/images/nlp_loss.png deleted file mode 100644 index 920c4d270cef33660ef7d0867d412494f1415c5a..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/nlp_loss.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/op_statistics.PNG b/tutorials/training/source_zh_cn/advanced_use/images/op_statistics.PNG deleted file mode 100644 index fb9c9da03ed16976877539b9a75f0591463a1dc3..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/op_statistics.PNG and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/op_type_statistics.PNG b/tutorials/training/source_zh_cn/advanced_use/images/op_type_statistics.PNG deleted file mode 100644 index c4aea613f27f0bcda34e0b1ae1cf19a3c7b71f75..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/op_type_statistics.PNG and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/operator_fusion.png b/tutorials/training/source_zh_cn/advanced_use/images/operator_fusion.png deleted file mode 100644 index 2152b7381711ee58b7342f72412da2fcc019316f..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/operator_fusion.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/performance_overall.png b/tutorials/training/source_zh_cn/advanced_use/images/performance_overall.png deleted file mode 100644 index e6846a725cff0e61a0beb92e93502312eee8483c..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/performance_overall.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/pipeline.png b/tutorials/training/source_zh_cn/advanced_use/images/pipeline.png deleted file mode 100644 index 997d92b20ed383550a8ad30da96e5d3c764c5b66..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/pipeline.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/process_cpu_utilizaton.png b/tutorials/training/source_zh_cn/advanced_use/images/process_cpu_utilizaton.png deleted file mode 100644 index b2997e2a67f355724c0f2519187aa9edf7ee4548..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/process_cpu_utilizaton.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/qcbow.png b/tutorials/training/source_zh_cn/advanced_use/images/qcbow.png deleted file mode 100644 index 2f47d84164848a7ff2709d01ee14426a74c4ba4f..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/qcbow.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/quantum_circuit.png b/tutorials/training/source_zh_cn/advanced_use/images/quantum_circuit.png deleted file mode 100644 index 5c566a46942448d0d600a1207376d880a44087e1..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/quantum_circuit.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/resource_visibility.png b/tutorials/training/source_zh_cn/advanced_use/images/resource_visibility.png deleted file mode 100644 index 0906f411a6467b7ab1f44ffccb5ab2fa8509bf91..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/resource_visibility.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/resource_visibility_gpu.png b/tutorials/training/source_zh_cn/advanced_use/images/resource_visibility_gpu.png deleted file mode 100644 index 6aa71938ca73399ca80523f4e379f28efcf90225..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/resource_visibility_gpu.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/scalar.png b/tutorials/training/source_zh_cn/advanced_use/images/scalar.png deleted file mode 100644 index 73a1e08e4109d50820025a3df5552f5a6320116c..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/scalar.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/scalar_compound.png b/tutorials/training/source_zh_cn/advanced_use/images/scalar_compound.png deleted file mode 100644 index 8813a59f7551f7ab239e6103e1e9ef14ec4e2add..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/scalar_compound.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/scalar_select.png b/tutorials/training/source_zh_cn/advanced_use/images/scalar_select.png deleted file mode 100644 index a74f1f651338718a4e8f5ba171c47069e569ee2f..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/scalar_select.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/shuffle.png b/tutorials/training/source_zh_cn/advanced_use/images/shuffle.png deleted file mode 100644 index c31b9a250e61e91898429fd680ba37cfaa585c65..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/shuffle.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/shuffle_performance_scheme.png b/tutorials/training/source_zh_cn/advanced_use/images/shuffle_performance_scheme.png deleted file mode 100644 index 6a83512e5df78bc92ed614ec68526d41dda870a2..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/shuffle_performance_scheme.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/simple_qnn.png b/tutorials/training/source_zh_cn/advanced_use/images/simple_qnn.png deleted file mode 100644 index 05d57ae0e8e9b267ca58b46385b5ca23de0c06aa..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/simple_qnn.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/single_car_performance_overall.png b/tutorials/training/source_zh_cn/advanced_use/images/single_car_performance_overall.png deleted file mode 100644 index 0e72019145bb9da4fca48b6d6cfd92dbe5d96920..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/single_car_performance_overall.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/step_trace.png b/tutorials/training/source_zh_cn/advanced_use/images/step_trace.png deleted file mode 100644 index 1feace7a12db61c4da2b04b149715239dbe8db60..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/step_trace.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/targets.png b/tutorials/training/source_zh_cn/advanced_use/images/targets.png deleted file mode 100644 index f717a5c8fc80dc7c5be7dbb560c8f678e6c4474f..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/targets.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/tensor_function.png b/tutorials/training/source_zh_cn/advanced_use/images/tensor_function.png deleted file mode 100644 index ab0ad58219181c782c65c396577d2b030b6a8d19..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/tensor_function.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/tensor_histogram.png b/tutorials/training/source_zh_cn/advanced_use/images/tensor_histogram.png deleted file mode 100644 index 967a452efde4efc9f464782244f4e790417b7122..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/tensor_histogram.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/tensor_table.png b/tutorials/training/source_zh_cn/advanced_use/images/tensor_table.png deleted file mode 100644 index d04dffae59fd6f9e49aede94bae93f8b8621fcb0..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/tensor_table.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/timeline.png b/tutorials/training/source_zh_cn/advanced_use/images/timeline.png deleted file mode 100644 index 19c60e104169d86f1022758eda15bbc9c8a0dcf6..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/timeline.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/train_log_1_Ascend.png b/tutorials/training/source_zh_cn/advanced_use/images/train_log_1_Ascend.png deleted file mode 100644 index c44f557359ec6340b0009e7935aefe81df3ee3b2..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/train_log_1_Ascend.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/train_log_8_Ascend.png b/tutorials/training/source_zh_cn/advanced_use/images/train_log_8_Ascend.png deleted file mode 100644 index b2b7429c2fd169b92b18968fff4f8c07802d6a52..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/train_log_8_Ascend.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/train_log_8_Ascend_clu.png b/tutorials/training/source_zh_cn/advanced_use/images/train_log_8_Ascend_clu.png deleted file mode 100644 index fb24ae6e349ac3a8891ebc0f8610f46682421232..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/train_log_8_Ascend_clu.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/xai_hoc.png b/tutorials/training/source_zh_cn/advanced_use/images/xai_hoc.png deleted file mode 100644 index fffeef14f0af7fab7e4edae4a618adaabd54843a..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/xai_hoc.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/xai_hoc_index.png b/tutorials/training/source_zh_cn/advanced_use/images/xai_hoc_index.png deleted file mode 100644 index ca6eee818f2d37ffe5677ea4e8127482cfab0a64..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/xai_hoc_index.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/xai_index.png b/tutorials/training/source_zh_cn/advanced_use/images/xai_index.png deleted file mode 100644 index 31ee4cb7d7bc3f3a1a366cf5d6ee1365a29d3cbf..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/xai_index.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/xai_metrix_class.png b/tutorials/training/source_zh_cn/advanced_use/images/xai_metrix_class.png deleted file mode 100644 index ef4ffc4090bc3f78f7720f1057d48c5ffa8c9170..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/xai_metrix_class.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/xai_metrix_comprehensive.png b/tutorials/training/source_zh_cn/advanced_use/images/xai_metrix_comprehensive.png deleted file mode 100644 index c266ad77f5ea2cf0e7a124e4fb60a4f133afa0a7..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/xai_metrix_comprehensive.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/xai_saliency_map.png b/tutorials/training/source_zh_cn/advanced_use/images/xai_saliency_map.png deleted file mode 100644 index 6f9d5909f94093274d44e26cd575394f5a383692..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/xai_saliency_map.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/xai_saliency_map_detail.png b/tutorials/training/source_zh_cn/advanced_use/images/xai_saliency_map_detail.png deleted file mode 100644 index 3247dca079922e039dd8119a3c0b4ded4584df54..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/xai_saliency_map_detail.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/images/xai_uncertainty.png b/tutorials/training/source_zh_cn/advanced_use/images/xai_uncertainty.png deleted file mode 100644 index 888a9410908fa6b8ae64de78313f546413aa344a..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/advanced_use/images/xai_uncertainty.png and /dev/null differ diff --git a/tutorials/training/source_zh_cn/advanced_use/implement_high_order_differentiation.md b/tutorials/training/source_zh_cn/advanced_use/implement_high_order_differentiation.md deleted file mode 100644 index 8a4b83811a4714dc8a32a4c2ae6429c446dd8f05..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/implement_high_order_differentiation.md +++ /dev/null @@ -1,401 +0,0 @@ -# 实现高阶自动微分 - -`CPU` `GPU` `Ascend` `全流程` `初级` `中级` `高级` - - - -- [实现高阶自动微分](#实现高阶自动微分) - - [概述](#概述) - - [一阶求导](#一阶求导) - - [输入求导](#输入求导) - - [权重求导](#权重求导) - - [梯度值缩放](#梯度值缩放) - - [高阶求导](#高阶求导) - - [单输入单输出高阶导数](#单输入单输出高阶导数) - - [单输入多输出高阶导数](#单输入多输出高阶导数) - - [多输入多输出高阶导数](#多输入多输出高阶导数) - - [二阶微分算子支持情况](#二阶微分算子支持情况) - - [引用](#引用) - - - - - -## 概述 - -高阶微分在AI支持科学计算、二阶优化等领域均有应用。如分子动力学模拟中,利用神经网络训练势能时[1],损失函数中需计算神经网络输出对输入的导数,则反向传播便存在损失函数对输入、权重的二阶交叉导数;此外,AI求解微分方程(如PINNs[2]方法)还会存在输出对输入的二阶导数。又如二阶优化中,为了能够让神经网络快速收敛,牛顿法等需计算损失函数对权重的二阶导数。以下将主要介绍MindSpore图模式下的高阶导数。 - -> 完整样例代码见:[导数样例代码](https://gitee.com/mindspore/docs/tree/master/tutorials/tutorial_code) - -## 一阶求导 - -首先回顾下MindSpore计算一阶导数方法`mindspore.ops.GradOperation (get_all=False, get_by_list=False, sens_param=False)`,其中`get_all`为`False`时,只会对第一个输入求导,为`True`时,会对所有输入求导;`get_by_list`为`False`时,不会对权重求导,为`True`时,会对权重求导;`sens_param`对网络的输出值做缩放以改变最终梯度,故其维度与输出维度保持一致。下面用MatMul算子的一阶求导做深入分析。 - -### 输入求导 - -对输入求导代码如下: - -```python -import numpy as np -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -from mindspore import ParameterTuple, Parameter -from mindspore import dtype as mstype -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.matmul = ops.MatMul() - self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z') - def construct(self, x, y): - x = x * self.z - out = self.matmul(x, y) - return out - -class GradNetWrtX(nn.Cell): - def __init__(self, net): - super(GradNetWrtX, self).__init__() - self.net = net - self.grad_op = ops.GradOperation() - def construct(self, x, y): - gradient_function = self.grad_op(self.net) - return gradient_function(x, y) - -x = Tensor([[0.8, 0.6, 0.2], [1.8, 1.3, 1.1]], dtype=mstype.float32) -y = Tensor([[0.11, 3.3, 1.1], [1.1, 0.2, 1.4], [1.1, 2.2, 0.3]], dtype=mstype.float32) -output = GradNetWrtX(Net())(x, y) -print(output) -``` - -输出结果如下: - -```python -[[4.5099998 2.7 3.6000001] - [4.5099998 2.7 3.6000001]] -``` - -为便于分析,输入`x`、`y`以及权重`z`可以表示成如下形式: - -```python -x = Tensor([[x1, x2, x3], [x4, x5, x6]]) -y = Tensor([[y1, y2, y3], [y4, y5, y6], [y7, y8, y9]]) -z = Tensor([z]) -``` - -根据MatMul算子定义可得前向结果: - -$output = [[(x1 \cdot y1 + x2 \cdot y4 + x3 \cdot y7) \cdot z, (x1 \cdot y2 + x2 \cdot y5 + x3 \cdot y8) \cdot z, (x1 \cdot y3 + x2 \cdot y6 + x3 \cdot y9) \cdot z]$, - -$[(x4 \cdot y1 + x5 \cdot y4 + x6 \cdot y7) \cdot z, (x4 \cdot y2 + x5 \cdot y5 + x6 \cdot y8) \cdot z, (x4 \cdot y3 + x5 \cdot y6 + x6 \cdot y9) \cdot z]]$ - -梯度计算时由于MindSpore采用的是Reverse[3]自动微分机制,会对输出结果求和后再对输入`x`求导: - -(1) 求和公式: - -$\sum{output} = [(x1 \cdot y1 + x2 \cdot y4 + x3 \cdot y7) + (x1 \cdot y2 + x2 \cdot y5 + x3 \cdot y8) + (x1 \cdot y3 + x2 \cdot y6 + x3 \cdot y9) +$ - -$(x4 \cdot y1 + x5 \cdot y4 + x6 \cdot y7) + (x4 \cdot y2 + x5 \cdot y5 + x6 \cdot y8) + (x4 \cdot y3 + x5 \cdot y6 + x6 \cdot y9)] \cdot z$ - -(2) 求导公式: - -$\frac{\mathrm{d}(\sum{output})}{\mathrm{d}x} = [[(y1 + y2 + y3) \cdot z,(y4 + y5 + y6) \cdot z,(y7 + y8 + y9) \cdot z],[(y1 + y2 + y3) \cdot z,(y4 + y5 + y6) \cdot z,(y7 + y8 + y9) \cdot z]]$ - -(3) 计算结果: - -$\frac{\mathrm{d}(\sum{output})}{\mathrm{d}x} = [[4.5099998 \quad 2.7 \quad 3.6000001] [4.5099998 \quad 2.7 \quad 3.6000001]]$ - -若考虑对`x`、`y`输入求导,只需在`GradNetWrtX`中设置`self.grad_op = GradOperation(get_all=True)`。 - -### 权重求导 - -若考虑对权重的求导,将`GradNetWrtX`修改成: - -```python -class GradNetWrtX(nn.Cell): - def __init__(self, net): - super(GradNetWrtX, self).__init__() - self.net = net - self.params = ParameterTuple(net.trainable_params()) - self.grad_op = ops.GradOperation(get_by_list=True) - def construct(self, x, y): - gradient_function = self.grad_op(self.net, self.params) - return gradient_function(x, y) -``` - -```python -output = GradNetWrtX(Net())(x, y) -print(output) -``` - -输出结果如下: - -```python -(Tensor(shape=[1], dtype=Float32, value= [ 2.15359993e+01]),) -``` - -求导公式变为: - -$\frac{\mathrm{d}(\sum{output})}{\mathrm{d}z} = (x1 \cdot y1 + x2 \cdot y4 + x3 \cdot y7) + (x1 \cdot y2 + x2 \cdot y5 + x3 \cdot y8) + (x1 \cdot y3 + x2 \cdot y6 + x3 \cdot y9) + $ - -$(x4 \cdot y1 + x5 \cdot y4 + x6 \cdot y7) + (x4 \cdot y2 + x5 \cdot y5 + x6 \cdot y8) + (x4 \cdot y3 + x5 \cdot y6 + x6 \cdot y9)$ - -计算结果: - -$\frac{\mathrm{d}(\sum{output})}{\mathrm{d}z} = [2.15359993e+01]$ - -### 梯度值缩放 - -可以通过`sens_param`参数控制梯度值的缩放: - -```python -class GradNetWrtX(nn.Cell): - def __init__(self, net): - super(GradNetWrtX, self).__init__() - self.net = net - self.grad_op = ops.GradOperation(sens_param=True) - self.grad_wrt_output = Tensor([[0.1, 0.6, 0.2], [0.8, 1.3, 1.1]], dtype=mstype.float32) - def construct(self, x, y): - gradient_function = self.grad_op(self.net) - return gradient_function(x, y, self.grad_wrt_output) -``` - -```python -output = GradNetWrtX(Net())(x, y) -print(output) -``` - -输出结果如下: - -```python -[[2.211 0.51 1.49 ] - [5.588 2.68 4.07 ]] -``` - -`self.grad_wrt_output`可以记作如下形式: - -```python -self.grad_wrt_output = Tensor([[s1, s2, s3], [s4, s5, s6]]) -``` - -缩放后的输出值为原输出值与`self.grad_wrt_output`对应元素的乘积: - -$output = [[(x1 \cdot y1 + x2 \cdot y4 + x3 \cdot y7) \cdot z \cdot s1,(x1 \cdot y2 + x2 \cdot y5 + x3 \cdot y8) \cdot z \cdot s2,(x1 \cdot y3 + x2 \cdot y6 + x3 \cdot y9) \cdot z \cdot s3],$ - -$[(x4 \cdot y1 + x5 \cdot y4 + x6 \cdot y7) \cdot z \cdot s4,(x4 \cdot y2 + x5 \cdot y5 + x6 \cdot y8) \cdot z \cdot s5,(x4 \cdot y3 + x5 \cdot y6 + x6 \cdot y9) \cdot z \cdot s6]]$ - -求导公式变为输出值总和对`x`的每个元素求导: - -$\frac{\mathrm{d}(\sum{output})}{\mathrm{d}x} = [[(s1 \cdot y1 + s2 \cdot y2 + s3 \cdot y3) \cdot z,(s1 \cdot y4 + s2 \cdot y5 + s3 \cdot y6) \cdot z,(s1 \cdot y7 + s2 \cdot y8 + s3 \cdot y9) \cdot z],$ - -$[(s4 \cdot y1 + s5 \cdot y2 + s6 \cdot y3) \cdot z,(s4 \cdot y4 + s5 \cdot y5 + s6 \cdot y6) \cdot z,(s4 \cdot y7 + s5 \cdot y8 + s6 \cdot y9) \cdot z]]$ - -如果想计算单个输出(例如`output[0][0]`)对输入的导数,可以将相应位置的缩放值置为1,其他置为0;也可以改变网络结构: - -```python -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.matmul = ops.MatMul() - self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z') - def construct(self, x, y): - x = x * self.z - out = self.matmul(x, y) - return out[0][0] -``` - -```python -output = GradNetWrtX(Net())(x, y) -print(output) -``` - -输出结果如下: - -```python -[[0.11 1.1 1.1] - [0. 0. 0. ]] -``` - -## 高阶求导 - -MindSpore可通过多次求导的方式支持高阶导数,下面通过几类例子展开阐述。 - -### 单输入单输出高阶导数 - -例如Sin算子,其二阶导数(-Sin)实现如下: - -```python -import numpy as np -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.sin = ops.Sin() - def construct(self, x): - out = self.sin(x) - return out - -class Grad(nn.Cell): - def __init__(self, network): - super(Grad, self).__init__() - self.grad = ops.GradOperation() - self.network = network - def construct(self, x): - gout= self.grad(self.network)(x) - return gout -class GradSec(nn.Cell): - def __init__(self, network): - super(GradSec, self).__init__() - self.grad = ops.GradOperation() - self.network = network - def construct(self, x): - gout= self.grad(self.network)(x) - return gout - -net=Net() -firstgrad = Grad(net) # first order -secondgrad = GradSec(firstgrad) # second order -x_train = Tensor(np.array([1.0], dtype=np.float32)) -output = secondgrad(x_train) -print(output) -``` - -输出结果如下: - -```python -[-0.841471] -``` - -### 单输入多输出高阶导数 - -例如多输出的乘法运算,其高阶导数如下: - -```python -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -from mindspore import dtype as mstype -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.mul = ops.Mul() - def construct(self, x): - out = self.mul(x, x) - return out - -class Grad(nn.Cell): - def __init__(self, network): - super(Grad, self).__init__() - self.grad = ops.GradOperation(sens_param=False) - self.network = network - def construct(self, x): - gout = self.grad(self.network)(x) - return gout -class GradSec(nn.Cell): - def __init__(self, network): - super(GradSec, self).__init__() - self.grad = ops.GradOperation(sens_param=False) - self.network = network - def construct(self, x): - gout = self.grad(self.network)(x) - return gout - -net=Net() -firstgrad = Grad(net) # first order -secondgrad = GradSec(firstgrad) # second order -x = Tensor([0.1, 0.2, 0.3], dtype=mstype.float32) -output = secondgrad(x) -print(output) -``` - -输出结果如下: - -```python -[2. 2. 2.] -``` - -### 多输入多输出高阶导数 - -例如神经网络有多个输入`x`、`y`,可以通过梯度缩放机制获得二阶导数`dxdx`,`dydy`,`dxdy`,`dydx`如下: - -```python -import numpy as np -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.mul = ops.Mul() - - def construct(self, x, y): - x_square = self.mul(x, x) - x_square_y = self.mul(x_square, y) - return x_square_y - -class Grad(nn.Cell): - def __init__(self, network): - super(Grad, self).__init__() - self.grad = ops.GradOperation(get_all=True, sens_param=False) - self.network = network - def construct(self, x, y): - gout = self.grad(self.network)(x, y) # return dx, dy - return gout - -class GradSec(nn.Cell): - def __init__(self, network): - super(GradSec, self).__init__() - self.grad = ops.GradOperation(get_all=True, sens_param=True) - self.network = network - self.sens1 = Tensor(np.array([1]).astype('float32')) - self.sens2 = Tensor(np.array([0]).astype('float32')) - def construct(self, x, y): - dxdx, dxdy = self.grad(self.network)(x, y, (self.sens1,self.sens2)) - dydx, dydy = self.grad(self.network)(x, y, (self.sens2,self.sens1)) - return dxdx, dxdy, dydx, dydy - -net = Net() -firstgrad = Grad(net) # first order -secondgrad = GradSec(firstgrad) # second order -x_train = Tensor(np.array([4],dtype=np.float32)) -y_train = Tensor(np.array([5],dtype=np.float32)) -dxdx, dxdy, dydx, dydy = secondgrad(x_train, y_train) -print(dxdx, dxdy, dydx, dydy) -``` - -输出结果如下: - -```python -[10] [8.] [8.] [0.] -``` - -具体地,一阶导数计算的结果是`dx`、`dy`:如果计算`dxdx`,则一阶导数只需保留`dx`,对应`x`、`y`的缩放值分别设置成1和0,即`self.grad(self.network)(x, y, (self.sens1,self.sens2))`;同理计算`dydy`,则一阶导数只保留`dy`,对应`x`、`y`的`sens_param`分别设置成0和1,即`self.grad(self.network)(x, y, (self.sens2,self.sens1))`。 - -## 二阶微分算子支持情况 - -CPU支持算子:[Square](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Square.html#mindspore.ops.Square)、 -[Exp](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Exp.html#mindspore.ops.Exp)、[Neg](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Neg.html#mindspore.ops.Neg)、[Mul](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Mul.html#mindspore.ops.Mul)、[MatMul](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.MatMul.html#mindspore.ops.MatMul); - -GPU支持算子:[Pow](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Pow.html#mindspore.ops.Pow)、[Log](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Log.html#mindspore.ops.Log)、[Square](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Square.html#mindspore.ops.Square)、[Exp](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Exp.html#mindspore.ops.Exp)、[Neg](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Neg.html#mindspore.ops.Neg)、[Mul](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Mul.html#mindspore.ops.Mul)、[Div](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Div.html#mindspore.ops.Div)、[MatMul](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.MatMul.html#mindspore.ops.MatMul)、[Sin](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Sin.html#mindspore.ops.Sin)、[Cos](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Cos.html#mindspore.ops.Cos)、[Tan](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Tan.html#mindspore.ops.Tan)、[Atanh](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Atanh.html#mindspore.ops.Atanh); - -Ascend支持算子:[Pow](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Pow.html#mindspore.ops.Pow)、[Log](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Log.html#mindspore.ops.Log)、[Square](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Square.html#mindspore.ops.Square)、[Exp](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Exp.html#mindspore.ops.Exp)、[Neg](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Neg.html#mindspore.ops.Neg)、[Mul](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Mul.html#mindspore.ops.Mul)、[Div](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Div.html#mindspore.ops.Div)、[MatMul](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.MatMul.html#mindspore.ops.MatMul)、[Sin](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Sin.html#mindspore.ops.Sin)、[Cos](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Cos.html#mindspore.ops.Cos)、[Tan](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Tan.html#mindspore.ops.Tan)、[Sinh](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Sinh.html#mindspore.ops.Sinh)、[Cosh](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Cosh.html#mindspore.ops.Cosh)、[Atanh](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.Atanh.html#mindspore.ops.Atanh)。 - -## 引用 - -[1] Zhang L, Han J, Wang H, et al. [Deep potential molecular dynamics: a scalable model with the accuracy of quantum mechanics[J]](https://arxiv.org/pdf/1707.09571v2.pdf). Physical review letters, 2018, 120(14): 143001. - -[2] Raissi M, Perdikaris P, Karniadakis G E. [Physics informed deep learning (part i): Data-driven solutions of nonlinear partial differential equations[J]](https://arxiv.org/pdf/1711.10561.pdf). arXiv preprint arXiv:1711.10561, 2017. - -[3] Baydin A G, Pearlmutter B A, Radul A A, et al. [Automatic differentiation in machine learning: a survey[J]](https://jmlr.org/papers/volume18/17-468/17-468.pdf). The Journal of Machine Learning Research, 2017, 18(1): 5595-5637. diff --git a/tutorials/training/source_zh_cn/advanced_use/improve_model_security_nad.md b/tutorials/training/source_zh_cn/advanced_use/improve_model_security_nad.md deleted file mode 100644 index a63f5bb3855a91d1b7e422826c52ae077ef1cb1f..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/improve_model_security_nad.md +++ /dev/null @@ -1,334 +0,0 @@ -# 使用NAD算法提升模型安全性 - -`Linux` `Ascend` `GPU` `CPU` `模型训练` `模型调优` `企业` `高级` - - - -- [使用NAD算法提升模型安全性](#使用nad算法提升模型安全性) - - [概述](#概述) - - [建立被攻击模型](#建立被攻击模型) - - [引入相关包](#引入相关包) - - [加载数据集](#加载数据集) - - [建立模型](#建立模型) - - [对抗性攻击](#对抗性攻击) - - [对抗性防御](#对抗性防御) - - [防御实现](#防御实现) - - [防御效果](#防御效果) - - - -   - -   - - -## 概述 - -本教程介绍MindArmour提供的模型安全防护手段,引导您快速使用MindArmour,为您的AI模型提供一定的安全防护能力。 - -AI算法设计之初普遍未考虑相关的安全威胁,使得AI算法的判断结果容易被恶意攻击者影响,导致AI系统判断失准。攻击者在原始样本处加入人类不易察觉的微小扰动,导致深度学习模型误判,称为对抗样本攻击。MindArmour模型安全提供对抗样本生成、对抗样本检测、模型防御、攻防效果评估等功能,为AI模型安全研究和AI应用安全提供重要支撑。 - -- 对抗样本生成模块支持安全工程师快速高效地生成对抗样本,用于攻击AI模型。 -- 对抗样本检测、防御模块支持用户检测过滤对抗样本、增强AI模型对于对抗样本的鲁棒性。 -- 评估模块提供多种指标全面评估对抗样本攻防性能。 - -这里通过图像分类任务上的对抗性攻防,以攻击算法FGSM和防御算法NAD为例,介绍MindArmour在对抗攻防上的使用方法。 - -> 本例面向CPU、GPU、Ascend 910 AI处理器,你可以在这里下载完整的样例代码: -> - -## 建立被攻击模型 - -以MNIST为示范数据集,自定义的简单模型作为被攻击模型。 - -### 引入相关包 - -```python -import os -import numpy as np -from scipy.special import softmax - -from mindspore import dataset as ds -from mindspore import dtype as mstype -import mindspore.dataset.vision.c_transforms as CV -import mindspore.dataset.transforms.c_transforms as C -from mindspore.dataset.vision import Inter -import mindspore.nn as nn -from mindspore.nn import SoftmaxCrossEntropyWithLogits -from mindspore.common.initializer import TruncatedNormal -from mindspore import Model, Tensor, context -from mindspore.train.callback import LossMonitor - -from mindarmour.adv_robustness.attacks import FastGradientSignMethod -from mindarmour.utils.logger import LogUtil -from mindarmour.adv_robustness.evaluations import AttackEvaluate - -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") - -LOGGER = LogUtil.get_instance() -LOGGER.set_level("INFO") -TAG = 'demo' -``` - -### 加载数据集 - -利用MindSpore的dataset提供的`MnistDataset`接口加载MNIST数据集。 - -```python -# generate dataset for train of test -def generate_mnist_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1, sparse=True): - """ - create dataset for training or testing - """ - # define dataset - ds1 = ds.MnistDataset(data_path) - - # define operation parameters - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - - # define map operations - resize_op = CV.Resize((resize_height, resize_width), - interpolation=Inter.LINEAR) - rescale_op = CV.Rescale(rescale, shift) - hwc2chw_op = CV.HWC2CHW() - type_cast_op = C.TypeCast(mstype.int32) - - # apply map operations on images - if not sparse: - one_hot_enco = C.OneHot(10) - ds1 = ds1.map(operations=one_hot_enco, input_columns="label", - num_parallel_workers=num_parallel_workers) - type_cast_op = C.TypeCast(mstype.float32) - ds1 = ds1.map(operations=type_cast_op, input_columns="label", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=resize_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=rescale_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=hwc2chw_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - ds1 = ds1.shuffle(buffer_size=buffer_size) - ds1 = ds1.batch(batch_size, drop_remainder=True) - ds1 = ds1.repeat(repeat_size) - - return ds1 -``` - -### 建立模型 - -这里以LeNet模型为例,您也可以建立训练自己的模型。 - -1. 定义LeNet模型网络。 - - ```python - def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): - weight = weight_variable() - return nn.Conv2d(in_channels, out_channels, - kernel_size=kernel_size, stride=stride, padding=padding, - weight_init=weight, has_bias=False, pad_mode="valid") - - - def fc_with_initialize(input_channels, out_channels): - weight = weight_variable() - bias = weight_variable() - return nn.Dense(input_channels, out_channels, weight, bias) - - - def weight_variable(): - return TruncatedNormal(0.02) - - - class LeNet5(nn.Cell): - """ - Lenet network - """ - def __init__(self): - super(LeNet5, self).__init__() - self.conv1 = conv(1, 6, 5) - self.conv2 = conv(6, 16, 5) - self.fc1 = fc_with_initialize(16*5*5, 120) - self.fc2 = fc_with_initialize(120, 84) - self.fc3 = fc_with_initialize(84, 10) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x - ``` - -2. 训练LeNet模型。利用上面定义的数据加载函数`generate_mnist_dataset`载入数据。 - - ```python - mnist_path = "../common/dataset/MNIST/" - batch_size = 32 - # train original model - ds_train = generate_mnist_dataset(os.path.join(mnist_path, "train"), - batch_size=batch_size, repeat_size=1, - sparse=False) - net = LeNet5() - loss = SoftmaxCrossEntropyWithLogits(sparse=False) - opt = nn.Momentum(net.trainable_params(), 0.01, 0.09) - model = Model(net, loss, opt, metrics=None) - model.train(10, ds_train, callbacks=[LossMonitor()], - dataset_sink_mode=False) - - # 2. get test data - ds_test = generate_mnist_dataset(os.path.join(mnist_path, "test"), - batch_size=batch_size, repeat_size=1, - sparse=False) - inputs = [] - labels = [] - for data in ds_test.create_tuple_iterator(): - inputs.append(data[0].asnumpy().astype(np.float32)) - labels.append(data[1].asnumpy()) - test_inputs = np.concatenate(inputs) - test_labels = np.concatenate(labels) - ``` - -3. 测试模型。 - - ```python - # prediction accuracy before attack - net.set_train(False) - test_logits = net(Tensor(test_inputs)).asnumpy() - - tmp = np.argmax(test_logits, axis=1) == np.argmax(test_labels, axis=1) - accuracy = np.mean(tmp) - LOGGER.info(TAG, 'prediction accuracy before attacking is : %s', accuracy) - ``` - - 测试结果中分类精度达到了98%。 - - ```python - prediction accuracy before attacking is : 0.9895833333333334 - ``` - -## 对抗性攻击 - -调用MindArmour提供的FGSM接口(FastGradientSignMethod)。 - -```python -# attacking -# get adv data -attack = FastGradientSignMethod(net, eps=0.3, loss_fn=loss) -adv_data = attack.batch_generate(test_inputs, test_labels) - -# get accuracy of adv data on original model -adv_logits = net(Tensor(adv_data)).asnumpy() -adv_proba = softmax(adv_logits, axis=1) -tmp = np.argmax(adv_proba, axis=1) == np.argmax(test_labels, axis=1) -accuracy_adv = np.mean(tmp) -LOGGER.info(TAG, 'prediction accuracy after attacking is : %s', accuracy_adv) - -attack_evaluate = AttackEvaluate(test_inputs.transpose(0, 2, 3, 1), - test_labels, - adv_data.transpose(0, 2, 3, 1), - adv_proba) -LOGGER.info(TAG, 'mis-classification rate of adversaries is : %s', - attack_evaluate.mis_classification_rate()) -LOGGER.info(TAG, 'The average confidence of adversarial class is : %s', - attack_evaluate.avg_conf_adv_class()) -LOGGER.info(TAG, 'The average confidence of true class is : %s', - attack_evaluate.avg_conf_true_class()) -LOGGER.info(TAG, 'The average distance (l0, l2, linf) between original ' - 'samples and adversarial samples are: %s', - attack_evaluate.avg_lp_distance()) -LOGGER.info(TAG, 'The average structural similarity between original ' - 'samples and adversarial samples are: %s', - attack_evaluate.avg_ssim()) -``` - -攻击结果如下: - -```text -prediction accuracy after attacking is : 0.052083 -mis-classification rate of adversaries is : 0.947917 -The average confidence of adversarial class is : 0.803375 -The average confidence of true class is : 0.042139 -The average distance (l0, l2, linf) between original samples and adversarial samples are: (1.698870, 0.465888, 0.300000) -The average structural similarity between original samples and adversarial samples are: 0.332538 -``` - -对模型进行FGSM无目标攻击后,模型精度由98.9%降到5.2%,误分类率高达95%,成功攻击的对抗样本的预测类别的平均置信度(ACAC)为 0.803375,成功攻击的对抗样本的真实类别的平均置信度(ACTC)为 0.042139,同时给出了生成的对抗样本与原始样本的零范数距离、二范数距离和无穷范数距离,平均每个对抗样本与原始样本间的结构相似性为0.332538,平均每生成一张对抗样本所需时间为0.003125s。 - -攻击前后效果如下图,左侧为原始样本,右侧为FGSM无目标攻击后生成的对抗样本。从视觉角度而言,右侧图片与左侧图片几乎没有明显变化,但是均成功误导了模型,使模型将其误分类为其他非正确类别。 - -![adv_attack_result](./images/adv_attack_result.png) - -## 对抗性防御 - -NaturalAdversarialDefense(NAD)是一种简单有效的对抗样本防御方法,使用对抗训练的方式,在模型训练的过程中构建对抗样本,并将对抗样本与原始样本混合,一起训练模型。随着训练次数的增加,模型在训练的过程中提升对于对抗样本的鲁棒性。NAD算法使用FGSM作为攻击算法,构建对抗样本。 - -### 防御实现 - -调用MindArmour提供的NAD防御接口(NaturalAdversarialDefense)。 - -```python -from mindarmour.adv_robustness.defenses import NaturalAdversarialDefense - - -# defense -net.set_train() -nad = NaturalAdversarialDefense(net, loss_fn=loss, optimizer=opt, - bounds=(0.0, 1.0), eps=0.3) -nad.batch_defense(test_inputs, test_labels, batch_size=32, epochs=10) - -# get accuracy of test data on defensed model -net.set_train(False) -test_logits = net(Tensor(test_inputs)).asnumpy() - -tmp = np.argmax(test_logits, axis=1) == np.argmax(test_labels, axis=1) -accuracy = np.mean(tmp) -LOGGER.info(TAG, 'accuracy of TEST data on defensed model is : %s', accuracy) - -# get accuracy of adv data on defensed model -adv_logits = net(Tensor(adv_data)).asnumpy() -adv_proba = softmax(adv_logits, axis=1) -tmp = np.argmax(adv_proba, axis=1) == np.argmax(test_labels, axis=1) -accuracy_adv = np.mean(tmp) - -attack_evaluate = AttackEvaluate(test_inputs.transpose(0, 2, 3, 1), - test_labels, - adv_data.transpose(0, 2, 3, 1), - adv_proba) - -LOGGER.info(TAG, 'accuracy of adv data on defensed model is : %s', - np.mean(accuracy_adv)) -LOGGER.info(TAG, 'defense mis-classification rate of adversaries is : %s', - attack_evaluate.mis_classification_rate()) -LOGGER.info(TAG, 'The average confidence of adversarial class is : %s', - attack_evaluate.avg_conf_adv_class()) -LOGGER.info(TAG, 'The average confidence of true class is : %s', - attack_evaluate.avg_conf_true_class()) -``` - -### 防御效果 - -```text -accuracy of TEST data on defensed model is : 0.974259 -accuracy of adv data on defensed model is : 0.856370 -defense mis-classification rate of adversaries is : 0.143629 -The average confidence of adversarial class is : 0.616670 -The average confidence of true class is : 0.177374 -``` - -使用NAD进行对抗样本防御后,模型对于对抗样本的误分类率从95%降至14%,模型有效地防御了对抗样本。同时,模型对于原来测试数据集的分类精度达97%。 diff --git a/tutorials/training/source_zh_cn/advanced_use/incremental_operator_build.md b/tutorials/training/source_zh_cn/advanced_use/incremental_operator_build.md deleted file mode 100644 index bd7087995b766a24443139ce0e3b36017b19faa5..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/incremental_operator_build.md +++ /dev/null @@ -1,117 +0,0 @@ -# 算子增量编译 - -`Linux` `Ascend` `模型训练` `初级` `中级` `高级` - - - -- [算子增量编译](#算子增量编译) - - [概述](#概述) - - [使用方法](#使用方法) - - [常见问题](#常见问题) - - - - - -## 概述 - -在执行网络模型的过程中,MindSpore会对所使用的算子进行编译,该阶段耗时会随网络模型规模的增大而增大。为提升用户二次执行模型的性能体验,我们提供了一种算子增量编译机制。MindSpore执行网络模型时会在执行目录下生成`kernel_meta`目录,并在执行过程中保存网络编译生成的算子缓存文件到此目录,包括`.o`文件,`.info`文件以及`.json`文件。若用户再次执行相同的网络模型,或者仅有部分变化,MindSpore会自动调用`kernel_meta`目录下可复用的算子缓存文件,显著减少网络编译时间,提升执行性能。目前算子增量编译功能仅支持在昇腾AI芯片上使用。 - -下面,本教程将演示如何使用算子增量编译。 - -## 使用方法 - -算子增量编译在MindSpore中默认开启,用户无需对其进行控制。下面我们在`src`目录下构造一个简单的网络用例`test_square.py`。当前目录结构为: - -```text -└─src - └── test_square.py -``` - -执行如下用例: - -```python -import numpy as np -import mindspore.nn as nn -import mindspore.context as context -import mindspore.ops as ops -from mindspore import Tensor - -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.square = ops.Square() - - def construct(self, data): - return self.square(data) - -def test_net(): - x = np.array([1.0, 4.0, 9.0]).astype(np.float32) - square = Net() - output = square(Tensor(x)) - print("x: ", x) - print("output: ", output) - - -``` - -该网络由一个单算子`Square`构成,输出为输入的平方值。执行结果如下: - -```python -x: [1. 4. 9.] -output: [1. 16. 81.] -``` - -在执行目录下,生成了`kernel_meta`文件夹,其中包含Square算子的`.o`文件,`.json`文件以及`.info`文件,当前目录结构为: - -```text -└─src - ├── test_square.py - └── kernel_meta - ├── Square_3307185124911971026_7.info - ├── Square_3307185124911971026_7.json - └── Square_3307185124911971026_7.o -``` - -对于一个算子来说: - -`.o`文件即MindSpore在网络执行过程中对该算子生成的可执行文件。 - -`.info`文件记录了该算子的所有有效信息,包括算子名称、算子属性、输入输出格式、输入输出数据类型等等。`.info`文件用于查找并确定算子的`.o`文件是否可复用。详细内容如下: - -```text -{"SocInfo":{"autoTilingMode":"NO_TUNE","coreNum":"","coreType":"","l1Fusion":"false","l2Fusion":"false","l2Mode":"2","op_debug_level":"","op_impl_mode":"","op_impl_mode_list":[],"socVersion":"Ascend910A"},"impl_path":"","op_info":{"Type":"Square","attrs":null,"full_name":"Default/Square-op1","gen_model":"single","graph_id":0,"inputs":[[{"dtype":"float32","format":"NCHW","name":"x_0","ori_format":"NCHW","ori_shape":[3],"param_type":"required","range":[[3,3]],"shape":[3],"valid":true}]],"is_dynamic_shape":false,"kernel_name":"Square_2989580383048251395_7","module_name":"impl.square","name":"square","outputs":[[{"dtype":"float32","format":"NCHW","name":"y","ori_format":"NCHW","ori_shape":[3],"param_type":"required","range":[[3,3]],"shape":[3],"valid":true}]],"py_module_path":"/usr/local/Ascend/opp/op_impl/built-in/ai_core/tbe","socVersion":"Ascend910A"},"platform":"TBE"} -``` - -`.json`文件存放了算子编译结果,在运行时将会使用到。详细内容如下: - -```text -{ - "batchBindOnly":1, - "binFileName":"Square_3307185124911971026_7", - "binFileSuffix":".o", - "blockDim":1, - "kernelName":"Square_3307185124911971026_7__kernel0", - "magic":"RT_DEV_BINARY_MAGIC_ELF", - "opParaSize":0, - "parameters":[ - 0, - 0 - ], - "sha256":"64d4963bf6b619c2d85da67611f5677e0ea11bba0413ed3620b0926b1d072a1a" -} -``` - -在生成如上的三种算子缓存文件之后,用户在执行网络模型时即可进行算子增量编译,即仅编译新增或者有改动的算子,大幅提升网络编译性能。 - -## 常见问题 - -- 不同场景下缓存文件通常不能共用,例如多卡与单卡、训练与推理等。 - -- 在多卡运行时,执行网络模型将会在多个`device`目录下均生成`kernel_meta`文件夹。 - - 请注意,在多卡运行的情况下,如果仅删除部分卡的`kernel_meta`下的算子缓存文件后重复执行相同的网络模型,可能会引起不需重新编译算子的部分卡等候超时,导致执行失败。在这种情况下,可以通过设置环境变量`HCCL_CONNECT_TIMEOUT`,即多卡间等待时间来避免失败,但该方式耗时等同于全部删除缓存重新编译。 - -- 如果在网络编译的过程中打断进程,有概率会导致`kernel_meta`中的缓存文件生成错误,并使得后续重新执行的过程失败。此时需要用户去删除`kernel_meta`文件夹,重新编译网络。 diff --git a/tutorials/training/source_zh_cn/advanced_use/lineage_and_scalars_comparison.md b/tutorials/training/source_zh_cn/advanced_use/lineage_and_scalars_comparison.md deleted file mode 100644 index e51a6f00962b83abcdbd09912bac8067408f7a4d..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/lineage_and_scalars_comparison.md +++ /dev/null @@ -1,114 +0,0 @@ -# 查看溯源和对比看板 - -`Linux` `Ascend` `GPU` `CPU` `模型调优` `中级` `高级` - - - -- [查看溯源和对比看板](#查看溯源和对比看板) - - [概述](#概述) - - [模型溯源](#模型溯源) - - [数据溯源](#数据溯源) - - [对比看板](#对比看板) - - [注意事项](#注意事项) - - - -   - - -## 概述 - -MindInsight中的模型溯源、数据溯源和对比看板同训练看板一样属于可视化组件中的重要组成部分,在对训练数据的可视化中,通过对比看板观察不同标量趋势图发现问题,再使用溯源功能定位问题原因,给用户在数据增强和深度神经网络中提供高效调优的能力。 - -用户从对比分析进入溯源和对比看板。 - -## 模型溯源 - -模型溯源可视化用于展示所有训练的模型参数信息。 - -![image.png](./images/lineage_label.png) - -图1:模型参数选择区 - -图1展示的模型参数选择区,列举了可供查看的模型参数标签。用户可以通过勾选所需的标签,查看相应的模型参数。 - -![image.png](./images/lineage_model_chart.png) - -图2:模型溯源功能区 - -图2展示的模型溯源功能区,图像化展示了模型的参数信息。用户可以通过选择列的特定区域,展示区域范围内的模型信息。 - -![image.png](./images/lineage_model_table.png) - -图3:模型列表 - -图3分组展示所有模型信息,用户可以按指定列进行升序或降序展示模型信息。 - -左侧概览页展示优化目标和相关参数的信息。 - -![targets.png](./images/targets.png) - -图4:概览页 - -图4展示的是优化目标分布、参数重要性和散点图。用户可以选择优化目标来查看参数重要性,再通过点击柱状图来查看参数和优化目标的散点图。 - -## 数据溯源 - -数据溯源可视化用于展示所有训练的数据处理和数据增强信息。 - -![data_label.png](./images/data_label.png) - -图5:数据处理和增强算子选择区 - -图5展示的数据处理和数据增强算子选择区,列举了可供查看的数据处理和增强算子的名称。用户可以通过勾选所需的标签,查看相应的参数等信息。 - -![data_chart.png](./images/data_chart.png) - -图6:数据溯源功能区 - -图6展示的数据溯源功能区,图像化展示了数据处理和数据增强使用的参数信息。用户可以通过选择列的特定区域,展示区域范围内的参数信息。 - -![data_table.png](./images/data_table.png) - -图7:数据溯源列表 - -图7展示所有模型训练的数据处理和数据增强信息。 - -> 如果用户筛选模型溯源随后切换到数据溯源页面时,折线图将展示最新一次筛选过的模型溯源列。 - -## 对比看板 - -对比看板可视用于多个训练之间的标量曲线对比。 - -![multi_scalars.png](./images/multi_scalars.png) - -图8:标量对比曲线图 - -图8展示了多个训练之间的标量曲线对比效果,横坐标是训练步骤,纵坐标是标量值。 - -图中右上角有几个按钮功能,从左到右功能分别是全屏展示,切换Y轴比例,开启/关闭框选,分步回退和还原图形。 - -- 全屏展示即全屏展示该标量曲线,再点击一次即可恢复。 -- 切换Y轴比例是指可以将Y轴坐标进行对数转换。 -- 开启/关闭框选是指可以框选图中部分区域,并放大查看该区域, 可以在已放大的图形上叠加框选。 -- 分步回退是指对同一个区域连续框选并放大查看时,可以逐步撤销操作。 -- 还原图形是指进行了多次框选后,点击此按钮可以将图还原回原始状态。 - -![multi_scalars_select.png](./images/multi_scalars_select.png) - -图9:对比看板可视功能区 - -图9展示的对比看板可视的功能区,提供了根据选择不同训练或标签,水平轴的不同维度和平滑度来进行标量对比的功能。 - -- 训练选择:提供了对所有训练进行多项选择的功能,用户可以点击展开,通过勾选或关键字筛选所需的训练。 -- 标签选择:提供了对所有标签进行多项选择的功能,用户可以通过勾选所需的标签,查看对应的标量信息。 -- 水平轴:可以选择“步骤”、“相对时间”、“绝对时间”中的任意一项,来作为标量曲线的水平轴。 -- 平滑度:可以通过调整平滑度,对标量曲线进行平滑处理。 - -## 注意事项 - -出于性能上的考虑,MindInsight对比看板使用缓存机制加载训练的标量曲线数据,并进行以下限制: - -- 对比看板只支持在缓存中的训练进行比较标量曲线对比。 -- 缓存最多保留最新(按修改时间排列)的15个训练。 -- 用户最多同时对比5个训练的标量曲线。 diff --git a/tutorials/training/source_zh_cn/advanced_use/migrate_3rd_scripts.md b/tutorials/training/source_zh_cn/advanced_use/migrate_3rd_scripts.md deleted file mode 100644 index fc26fb08856d084288f3f7fa3004c02993d7f701..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/migrate_3rd_scripts.md +++ /dev/null @@ -1,278 +0,0 @@ -# 迁移第三方框架训练脚本 - -`Linux` `Ascend` `GPU` `CPU` `全流程` `初级` `中级` `高级` - - - -- [迁移第三方框架训练脚本](#迁移第三方框架训练脚本) - - [概述](#概述) - - [准备环节](#准备环节) - - [算子评估](#算子评估) - - [软硬件环境准备](#软硬件环境准备) - - [E2E迁移网络](#e2e迁移网络) - - [训练阶段](#训练阶段) - - [脚本迁移](#脚本迁移) - - [精度调试](#精度调试) - - [云上集成](#云上集成) - - [推理阶段](#推理阶段) - - [样例参考](#样例参考) - - - - - -## 概述 - -你可能已经编写过TensorFlow、PyTorch等框架的脚本,本教程介绍如何将已有的TensorFlow、PyTorch等的网络迁移到MindSpore,包括主要步骤和操作建议,帮助你快速进行网络迁移。 - -## 准备环节 - -在动手改造你的脚本前,请先做好算子评估和软硬件环境准备,确保MindSpore可以支持你希望迁移的网络。 - -### 算子评估 - -分析待迁移的网络中所包含的算子,结合[MindSpore算子支持列表](https://www.mindspore.cn/doc/note/zh-CN/master/operator_list_ms.html),梳理出MindSpore对这些算子的支持程度。 - -以ResNet-50为例,[Conv](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn/mindspore.nn.Conv2d.html)和[BatchNorm](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn/mindspore.nn.BatchNorm2d.html)是其中最主要的两个算子,它们已在MindSpore支持的算子列表中。 - -如果发现没有对应算子,建议: - -- 使用其他算子替换:分析算子实现公式,审视是否可以采用MindSpore现有算子叠加达到预期目标。 -- 临时替代方案:比如不支持某个Loss,是否可以替换为同类已支持的Loss算子;又比如当前的网络结构,是否可以替换为其他同类主流网络等。 - -如果发现支持的算子存在功能不全,建议: - -- 非必要功能:可删除。 -- 必要功能:寻找替代方案。 - -如果上述仍不能满足你的要求,你可以在[MindSpore代码仓](https://gitee.com/mindspore/mindspore)提出诉求。 - -### 软硬件环境准备 - -准备好硬件环境,查看与你环境对应平台的[安装指南](https://www.mindspore.cn/install),完成MindSpore的安装。 - -## E2E迁移网络 - -### 训练阶段 - -#### 脚本迁移 - -MindSpore与TensorFlow、PyTorch在网络结构组织方式上,存在一定差别,迁移前需要对原脚本有较为清晰的了解,明确地知道每一层的shape等信息。 - -> 你也可以使用[MindConverter工具](https://gitee.com/mindspore/mindinsight/tree/master/mindinsight/mindconverter)实现PyTorch网络定义脚本到MindSpore网络定义脚本的自动转换。 - -下面,我们以ResNet-50的迁移,并在Ascend 910上训练为例: - -1. 导入MindSpore模块。 - - 根据所需使用的接口,导入相应的MindSpore模块,模块列表详见。 - -2. 加载数据集和预处理。 - - 使用MindSpore构造你需要使用的数据集。目前MindSpore已支持常见数据集,你可以通过原始格式、`MindRecord`、`TFRecord`等多种接口调用,同时还支持数据处理以及数据增强等相关功能,具体用法可参考[准备数据教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/data_preparation.html)。 - - 本例中加载了Cifar-10数据集,可同时支持单卡和多卡的场景。 - - ```python - if device_num == 1: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=4, shuffle=True) - else: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=4, shuffle=True, - num_shards=device_num, shard_id=rank_id) - ``` - - 然后对数据进行了数据增强、数据清洗和批处理等操作。代码详见。 - -3. 构建网络。 - - 与TensorFlow相比,MindSpore对于卷积的最大差异在于数据格式。MindSpore整网默认使用`NCHW`的格式,与常见的TensorFlow所使用的`NHWC`不同。 - - 以batch_size=32的ResNet-50网络中第一层卷积为例: - - - 在TensorFlow中,输入feature的格式为[32, 224, 224, 3],卷积核大小为[7, 7, 3, 64]。 - - 在MindSpore中,输入feature的格式为[32, 3, 224, 224],卷积核大小为[64, 3, 7, 7]。 - - ```python - def _conv7x7(in_channel, out_channel, stride=1): - weight_shape = (out_channel, in_channel, 7, 7) - weight = _weight_variable(weight_shape) - return nn.Conv2d(in_channel, out_channel, - kernel_size=7, stride=stride, padding=0, pad_mode='same', weight_init=weight) - - - def _bn(channel): - return nn.BatchNorm2d(channel, eps=1e-4, momentum=0.9, - gamma_init=1, beta_init=0, moving_mean_init=0, moving_var_init=1) - ``` - -4. 构造子网。 - - MindSpore中使用`nn.Cell`来构造一个子网结构。子网内遵循先定义后使用的原则来搭建网络结构。每一个需要使用的算子需先定义在Cell的`__init__`函数内,然后在`construct`函数内将定义好的算子连接起来,最后将子网的输出通过`return`返回。 - - ```python - class ResidualBlock(nn.Cell): - """ - ResNet V1 residual block definition. - - Args: - in_channel (int): Input channel. - out_channel (int): Output channel. - stride (int): Stride size for the first convolutional layer. Default: 1. - - Returns: - Tensor, output tensor. - - Examples: - >>> ResidualBlock(3, 256, stride=2) - """ - expansion = 4 - - def __init__(self, - in_channel, - out_channel, - stride=1): - super(ResidualBlock, self).__init__() - - channel = out_channel - self.conv1 = _conv1x1(in_channel, channel, stride=1) - self.bn1 = _bn(channel) - - self.conv2 = _conv3x3(channel, channel, stride=stride) - self.bn2 = _bn(channel) - - self.conv3 = _conv1x1(channel, out_channel, stride=1) - self.bn3 = _bn_last(out_channel) - - self.relu = nn.ReLU() - - self.down_sample = False - - if stride != 1 or in_channel != out_channel: - self.down_sample = True - self.down_sample_layer = None - - if self.down_sample: - self.down_sample_layer = nn.SequentialCell([_conv1x1(in_channel, out_channel, stride), - _bn(out_channel)]) - self.add = ops.Add() - - def construct(self, x): - identity = x - - out = self.conv1(x) - out = self.bn1(out) - out = self.relu(out) - - out = self.conv2(out) - out = self.bn2(out) - out = self.relu(out) - - out = self.conv3(out) - out = self.bn3(out) - - if self.down_sample: - identity = self.down_sample_layer(identity) - - out = self.add(out, identity) - out = self.relu(out) - - return out - ``` - -5. 定义串联结构。 - - ResNet-50网络中有大量的重复结构,TensorFlow中可以使用for循环调用函数的方式来减少重复代码。MindSpore中,我们定义的每一个Cell对象都是独立的,尤其对于内部存在权重参数的子网,定义的Cell是不能重复使用的,如果出现大量重复串联结构,可以使用循环构造多个Cell实例并通过`SequentialCell`来串联。 - - ```python - def _make_layer(self, block, layer_num, in_channel, out_channel, stride): - """ - Make stage network of ResNet. - - Args: - block (Cell): Resnet block. - layer_num (int): Layer number. - in_channel (int): Input channel. - out_channel (int): Output channel. - stride (int): Stride size for the first convolutional layer. - - Returns: - SequentialCell, the output layer. - - Examples: - >>> _make_layer(ResidualBlock, 3, 128, 256, 2) - """ - layers = [] - - resnet_block = block(in_channel, out_channel, stride=stride) - layers.append(resnet_block) - - for _ in range(1, layer_num): - resnet_block = block(out_channel, out_channel, stride=1) - layers.append(resnet_block) - - return nn.SequentialCell(layers) - ``` - -6. 构造整网。 - - 将定义好的多个子网连接起来就是整个[ResNet-50](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/resnet/src/resnet.py)网络的结构了。同样遵循先定义后使用的原则,在`__init__`中定义所有用到的子网,在`construct`中连接子网。 - -7. 定义损失函数和优化器。 - - 定义好网络后,还需要相应地定义损失函数和优化器。 - - ```python - loss = SoftmaxCrossEntropyWithLogits(sparse=True) - opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), lr, config.momentum, config.weight_decay, config.loss_scale) - ``` - -8. 构造模型。 - - 类似于TensorFlow的`Estimator`接口,将定义好的网络原型、损失函数、优化器传入MindSpore的`Model`接口,内部会自动将其组合成一个可用于训练的网络。 - - 如果需要在训练中使用Loss Scale,则可以单独定义一个`loss_scale_manager`,一同传入`Model`接口。 - - ```python - loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) - ``` - - 如果希望使用`Model`内置的评估方法,则可以使用[metrics](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#mindspore-metrics)属性设置希望使用的评估方法。 - - ```python - model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics={'acc'}) - ``` - - 类似于TensorFlow的`estimator.train`,可以通过调用`model.train`接口来进行训练。CheckPoint和中间结果打印等功能,可通过`Callback`的方式定义到`model.train`接口上。 - - ```python - time_cb = TimeMonitor(data_size=step_size) - loss_cb = LossMonitor() - cb = [time_cb, loss_cb] - if config.save_checkpoint: - config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_steps, - keep_checkpoint_max=config.keep_checkpoint_max) - ckpt_cb = ModelCheckpoint(prefix="resnet", directory=config.save_checkpoint_path, config=config_ck) - cb += [ckpt_cb] - model.train(epoch_size, dataset, callbacks=cb) - ``` - -#### 精度调试 - -精度调优过程建议如下两点: - -1. 单卡精度验证时,建议先采用小数据集进行训练。验证达标后,多卡精度验证时,再采用全量数据集。这样可以帮助提升调试效率。 -2. 首先删减脚本中的不必要技巧(如优化器中的增强配置、动态Loss Scale等),验证达标后,在此基础上逐个叠加新增功能,待当前新增功能确认正常后,再叠加下一个功能。这样可以帮助快速定位问题。 - -#### 云上集成 - -请参考[在云上使用MindSpore](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/use_on_the_cloud.html),将你的脚本运行在ModelArts。 - -### 推理阶段 - -在Ascend 910 AI处理器上训练后的模型,支持在不同的硬件平台上执行推理。详细步骤可参考[多平台推理教程](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference.html)。 - -## 样例参考 - -1. [常用数据集读取样例](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_loading.html) - -2. [Model Zoo](https://gitee.com/mindspore/mindspore/tree/master/model_zoo) diff --git a/tutorials/training/source_zh_cn/advanced_use/migrate_3rd_scripts_mindconverter.md b/tutorials/training/source_zh_cn/advanced_use/migrate_3rd_scripts_mindconverter.md deleted file mode 100644 index b57a514d635914c232acd2ea93f20c524b0162a3..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/migrate_3rd_scripts_mindconverter.md +++ /dev/null @@ -1,289 +0,0 @@ -# 使用工具迁移第三方框架脚本 - -`Linux` `Ascend` `模型开发` `初级` - - - -- [使用工具迁移第三方框架脚本](#使用工具迁移第三方框架脚本) - - [概述](#概述) - - [安装](#安装) - - [用法](#用法) - - [PyTorch模型脚本迁移](#pytorch模型脚本迁移) - - [TensorFlow模型脚本迁移](#tensorflow模型脚本迁移) - - [ONNX模型文件迁移](#onnx模型文件迁移) - - [使用场景](#使用场景) - - [使用示例](#使用示例) - - [基于AST的脚本转换示例](#基于ast的脚本转换示例) - - [基于图结构的脚本生成示例](#基于图结构的脚本生成示例) - - [TensorFlow模型脚本生成示例](#tensorflow模型脚本生成示例) - - [ONNX模型文件生成示例](#onnx模型文件生成示例) - - [注意事项](#注意事项) - - - - - -## 概述 - -MindConverter是一款用于将PyTorch(ONNX)、TensorFlow(PB)模型转换到MindSpore模型定义脚本以及权重文件的工具。结合转换报告的信息,用户只需对转换后的脚本进行微小的改动,即可实现快速迁移。 - -## 安装 - -此工具为MindInsight的子模块,安装MindInsight后,即可使用MindConverter,MindInsight安装请参考该[安装文档](https://gitee.com/mindspore/mindinsight/blob/master/README_CN.md#)。 - -除安装MindInsight之外,还需要安装下列依赖库: - -1. TensorFlow不作为MindInsight明确声明的依赖库。若想使用基于图结构的脚本生成工具,需要用户手动安装TensorFlow(MindConverter推荐使用TensorFlow 1.15.x版本)。 -2. ONNX(>=1.8.0)、ONNXRUNTIME(>=1.5.2)、ONNXOPTIMIZER(>=0.1.2)不作为MindInsight明确声明的依赖库,若想使用基于图结构的脚本生成工具,必须安装上述三方库。若想使用TensorFlow(MindConverter推荐使用TensorFlow 1.15.x版本)模型脚本迁移需要额外安装TF2ONNX(>=1.7.1)。 - -## 用法 - -MindConverter提供命令行(Command-line interface, CLI)的使用方式,命令如下。 - -```bash -usage: mindconverter [-h] [--version] [--in_file IN_FILE] - [--model_file MODEL_FILE] [--shape SHAPE [SHAPE ...]] - [--input_nodes INPUT_NODES [INPUT_NODES ...]] - [--output_nodes OUTPUT_NODES [OUTPUT_NODES ...]] - [--output OUTPUT] [--report REPORT] - -optional arguments: - -h, --help show this help message and exit - --version show program version number and exit - --in_file IN_FILE Specify path for script file to use AST schema to do - script conversation. - --model_file MODEL_FILE - Tensorflow(.pb) or ONNX(.onnx) model file path is - expected to do script generation based on graph - schema. When `--in_file` and `--model_file` are both - provided, use AST schema as default. - --shape SHAPE [SHAPE ...] - Expected input tensor shape of `--model_file`. It is - required when use graph based schema. Both order and - number should be consistent with `--input_nodes`. - Given that (1,128) and (1,512) are shapes of input_1 - and input_2 separately. Usage: --shape 1,128 1,512 - --input_nodes INPUT_NODES [INPUT_NODES ...] - Input node(s) name of `--model_file`. It is required - when use graph based schema. Both order and number - should be consistent with `--shape`. Given that both - input_1 and input_2 are inputs of model. Usage: - --input_nodes input_1 input_2 - --output_nodes OUTPUT_NODES [OUTPUT_NODES ...] - Output node(s) name of `--model_file`. It is required - when use graph based schema. Given that both output_1 - and output_2 are outputs of model. Usage: - --output_nodes output_1 output_2 - --output OUTPUT Optional, specify path for converted script file - directory. Default output directory is `output` folder - in the current working directory. - --report REPORT Optional, specify report directory. Default is - converted script directory. -``` - -### PyTorch模型脚本迁移 - -**MindConverter仅提供基于抽象语法树(Abstract syntax tree, AST)的PyTorch脚本迁移**:指定`--in_file`的值,将使用基于AST的脚本转换方案; - -> 若同时指定了`--in_file`,`--model_file`将默认使用AST方案进行脚本迁移。 - -其中,`--output`与`--report`参数可省略。若省略,MindConverter将在当前工作目录(Working directory)下自动创建`output`目录,将生成的脚本、转换报告输出至该目录。 - -> 若需要使用MindConverter计算图方案进行PyTorch模型脚本迁移,建议将PyTorch模型转换为ONNX,再使用ONNX文件进行模型脚本迁移,详情见[PyTorch使用说明](https://pytorch.org/docs/stable/onnx.html)。 - -### TensorFlow模型脚本迁移 - -**MindConverter提供基于图结构的脚本生成方案**:指定`--model_file`、`--shape`、`--input_nodes`、`--output_nodes`进行脚本迁移。 - -> AST方案不支持TensorFlow模型脚本迁移,TensorFlow脚本迁移仅支持基于图结构的方案。 - -若省略`--output`与`--report`参数,MindConverter将在当前工作目录(Working directory)下自动创建`output`目录,将生成的脚本、转换报告、权重文件、权重映射表输出至该目录。 - -### ONNX模型文件迁移 - -**MindConverter提供基于图结构的脚本生成方案**:指定`--model_file`、`--shape`、`--input_nodes`、`--output_nodes`进行脚本迁移。 - -> AST方案不支持ONNX模型文件迁移,ONNX文件迁移仅支持基于图结构的方案。 - -若省略`--output`与`--report`参数,MindConverter将在当前工作目录(Working directory)下自动创建`output`目录,将生成的脚本、转换报告、权重文件、权重映射表输出至该目录。 - -## 使用场景 - -MindConverter提供两种技术方案,以应对不同脚本迁移场景: - -1. 用户希望迁移后脚本保持原脚本结构(包括变量、函数、类命名等与原脚本保持一致); -2. 用户希望迁移后脚本保持较高的转换率,尽量少的修改、甚至不需要修改,即可实现迁移后模型脚本的执行。 - -对于上述第一种场景,推荐用户使用基于AST的方案进行转换(AST方案仅支持PyTorch脚本转换),AST方案通过对原PyTorch脚本的抽象语法树进行解析、编辑,将其替换为MindSpore的抽象语法树,再利用抽象语法树生成代码。理论上,AST方案支持任意模型脚本迁移,但语法树解析操作受原脚本用户编码风格影响,可能导致同一模型的不同脚本最终的转换率存在一定差异。 - -对于上述第二种场景,推荐用户使用基于图结构的脚本生成方案,计算图作为一种标准的模型描述语言,可以消除用户代码风格多样导致的脚本转换率不稳定的问题。在已支持算子的情况下,该方案可提供优于AST方案的转换率。 - -目前已基于计算机视觉领域典型模型对图结构的脚本转换方案进行测试。 - -> 1. 基于图结构的脚本生成方案,由于要以推理模式加载ONNX、TensorFlow模型,会导致转换后网络中Dropout算子丢失,需要用户手动补齐。 -> 2. 基于图结构的脚本生成方案持续优化中。 - -## 使用示例 - -### 基于AST的脚本转换示例 - -若用户希望使用基于AST的方案进行脚本迁移,假设原PyTorch脚本路径为`/home/user/model.py`,希望将脚本输出至`/home/user/output`,转换报告输出至`/home/user/output/report`,则脚本转换命令为: - -```bash -mindconverter --in_file /home/user/model.py \ - --output /home/user/output \ - --report /home/user/output/report -``` - -转换报告中,对于未转换的代码行形式为如下,其中x, y指明的是原PyTorch脚本中代码的行、列号。对于未成功转换的算子,可参考[MindSporeAPI映射查询功能](https://www.mindspore.cn/doc/note/zh-CN/master/index.html#operator_api) 手动对代码进行迁移。对于工具无法迁移的算子,会保留原脚本中的代码。 - -```text -line x:y: [UnConvert] 'operator' didn't convert. ... -``` - -转换报告示例如下所示: - -```text - [Start Convert] - [Insert] 'import mindspore.ops as ops' is inserted to the converted file. - line 1:0: [Convert] 'import torch' is converted to 'import mindspore'. - ... - line 157:23: [UnConvert] 'nn.AdaptiveAvgPool2d' didn't convert. Maybe could convert to mindspore.ops.operations.ReduceMean. - ... - [Convert Over] -``` - -对于部分未成功转换的算子,报告中会提供修改建议,如`line 157:23`,MindConverter建议将`torch.nn.AdaptiveAvgPool2d`替换为`mindspore.ops.operations.ReduceMean`。 - -### 基于图结构的脚本生成示例 - -#### TensorFlow模型脚本生成示例 - -使用TensorFlow模型脚本迁移,需要先将TensorFlow模型导出为pb格式,并且获取模型输入节点、输出节点名称。TensorFlow pb模型导出可参考[TensorFlow Pb模型导出教程](https://gitee.com/mindspore/mindinsight/blob/master/mindinsight/mindconverter/docs/tensorflow_model_exporting_cn.md#)。 - -假设输入节点名称为`input_1:0`,输出节点名称为`predictions/Softmax:0`,模型输入样本尺寸为`1,224,224,3`,模型绝对路径为`xxx/frozen_model.pb`,希望将脚本、权重文件输出至`/home/user/output`,转换报告以及权重映射表输出至`/home/user/output/report`,则脚本生成命令为: - -```bash -mindconverter --model_file /home/user/xxx/frozen_model.pb --shape 1,224,224,3 \ - --input_nodes input_1:0 \ - --output_nodes predictions/Softmax:0 \ - --output /home/user/output \ - --report /home/user/output/report -``` - -执行该命令,MindSpore代码文件、权重文件、权重映射表和转换报告生成至相应目录。 - -由于基于图结构方案属于生成式方法,转换过程中未参考原TensorFlow脚本,因此生成的转换报告中涉及的代码行、列号均指生成后脚本。 - -另外对于未成功转换的算子,在代码中会相应的标识该节点输入、输出Tensor的shape(以`input_shape`, `output_shape`标识),便于用户手动修改。以Reshape算子为例,将生成如下代码: - -```python -class Classifier(nn.Cell): - - def __init__(self): - super(Classifier, self).__init__() - ... - self.reshape = onnx.Reshape(input_shape=(1, 1280, 1, 1), - output_shape=(1, 1280)) - ... - - def construct(self, x): - ... - # Suppose input of `reshape` is x. - reshape_output = self.reshape(x) - ... - -``` - -通过`input_shape`、`output_shape`参数,用户可以十分便捷地完成算子替换,替换结果如下: - -```python -import mindspore.ops as ops -... - -class Classifier(nn.Cell): - - def __init__(self): - super(Classifier, self).__init__() - ... - self.reshape = ops.Reshape(input_shape=(1, 1280, 1, 1), - output_shape=(1, 1280)) - ... - - def construct(self, x): - ... - # Suppose input of `reshape` is x. - reshape_output = self.reshape(x, (1, 1280)) - ... - -``` - -> 其中`--output`与`--report`参数可省略,若省略,该命令将在当前工作目录(Working directory)下自动创建`output`目录,将生成的脚本、转换报告输出至该目录。 - -映射表中分别保存算子在MindSpore中的权重信息(`converted_weight`)和在原始框架中的权重信息(`source_weight`)。 - -权重映射表示例如下所示: - -```json -{ - "resnet50": [ - { - "converted_weight": { - "name": "conv2d_0.weight", - "shape": [ - 64, - 3, - 7, - 7 - ], - "data_type": "Float32" - }, - "source_weight": { - "name": "conv1.weight", - "shape": [ - 64, - 3, - 7, - 7 - ], - "data_type": "float32" - } - } - ] -} -``` - -#### ONNX模型文件生成示例 - -使用ONNX模型文件迁移,需要先从.onnx文件中获取模型输入节点、输出节点名称。获取ONNX模输入、输出节点名称,可使用 [Netron](https://github.com/lutzroeder/netron) 工具查看。 - -假设输入节点名称为`input_1:0`、输出节点名称为`predictions/Softmax:0`,模型输入样本尺寸为`1,3,224,224`,则可使用如下命令进行脚本生成: - -```bash -mindconverter --model_file /home/user/xxx/model.onnx --shape 1,3,224,224 \ - --input_nodes input_1:0 \ - --output_nodes predictions/Softmax:0 \ - --output /home/user/output \ - --report /home/user/output/report -``` - -执行该命令,MindSpore代码文件、权重文件、权重映射表和转换报告生成至相应目录。 - -由于基于图结构方案属于生成式方法,转换过程中未参考ONNX文件,因此生成的转换报告中涉及的代码行、列号均指生成后脚本。 - -另外,对于未成功转换的算子,在代码中会相应的标识该节点输入、输出Tensor的shape(以`input_shape`、`output_shape`标识),便于用户手动修改,示例见**TensorFlow模型脚本生成示例**。 - -## MindConverter错误码速查表 - -MindConverter错误码定义,请参考[链接](https://gitee.com/mindspore/mindinsight/blob/master/mindinsight/mindconverter/docs/error_code_definition_cn.md# )。 - -## MindConverter支持的模型列表 - -[支持的模型列表(如下模型已基于x86 Ubuntu发行版,PyTorch 1.5.0以及TensorFlow 1.15.0测试通过)](https://gitee.com/mindspore/mindinsight/blob/master/mindinsight/mindconverter/docs/supported_model_list_cn.md# )。 - -## 注意事项 - -1. 脚本转换工具本质上为算子驱动,对于MindConverter未维护的ONNX算子与MindSpore算子映射,将会出现相应的算子无法转换的问题,对于该类算子,用户可手动修改,或基于MindConverter实现映射关系,向MindInsight仓库贡献。 -2. 在使用基于计算图的迁移时,MindConverter会根据`--shape`参数将模型输入的批次大小(batch size)、句子长度(sequence length)、图片尺寸(image shape)等尺寸相关参数固定下来,用户需要保证基于MindSpore重训练、推理时输入shape与转换时一致;若需要调整输入尺寸,请重新指定`--shape`进行转换或修改转换后脚本中涉及张量尺寸变更操作相应的操作数。 -3. 脚本文件和权重文件输出于同一个目录下,转换报告和权重映射表输出于同一目录下。 -4. 模型文件的安全性与一致性请用户自行保证。 diff --git a/tutorials/training/source_zh_cn/advanced_use/migrate_script.rst b/tutorials/training/source_zh_cn/advanced_use/migrate_script.rst deleted file mode 100644 index dea5b119a20a7530207b977dec670d25f8072f1d..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/migrate_script.rst +++ /dev/null @@ -1,9 +0,0 @@ -迁移第三方框架训练脚本 -======================== - -.. toctree:: - :maxdepth: 1 - - migrate_3rd_scripts_mindconverter - migrate_3rd_scripts - \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/advanced_use/mindinsight_commands.md b/tutorials/training/source_zh_cn/advanced_use/mindinsight_commands.md deleted file mode 100644 index f0390a89a0613828d2707115fd080e30c0783a1c..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/mindinsight_commands.md +++ /dev/null @@ -1,147 +0,0 @@ -# MindInsight相关命令 - -`Linux` `Ascend` `GPU` `CPU` `模型调优` `中级` `高级` - - - -- [MindInsight相关命令](#mindinsight相关命令) - - [查看命令帮助信息](#查看命令帮助信息) - - [查看版本信息](#查看版本信息) - - [启动服务](#启动服务) - - [查看服务进程信息](#查看服务进程信息) - - [停止服务](#停止服务) - - [Summary导出](#summary导出) - - [使用mindoptimizer进行超参调优](#使用mindoptimizer进行超参调优) - - - - - -## 查看命令帮助信息 - -```shell -mindinsight --help -``` - -## 查看版本信息 - -```shell -mindinsight --version -``` - -## 启动服务 - -```shell -mindinsight start [-h] [--config ] [--workspace ] - [--port ] [--url-path-prefix ] - [--reload-interval ] - [--summary-base-dir ] - [--enable-debugger ] - [--debugger-port ] -``` - -参数含义如下: - -|参数名|属性|功能描述|参数类型|默认值|取值范围|规则限制| -|---|---|---|---|---|---|---| -|`-h, --help`|可选|显示启动命令的帮助信息。|-|-|-|-| -|`--config `|可选|指定配置文件或配置模块。|String|空|-|物理文件路径(file:/path/to/config.py)或Python可识别的模块路径(python:path.to.config.module)。| -|`--workspace `|可选|指定工作目录路径。|String|$HOME/mindinsight|-|-| -|`--port `|可选|指定Web可视化服务端口。|Integer|8080|1~65535|-| -|`--url-path-prefix `|可选|指定Web服务URL地址前缀。|String|空|-|URL地址前缀由斜杠(/)分隔成多个部分,各部分支持由字母/数字/下划线/连字符/点号组成的字符串,但不能是单点号(.)或双点号(..)。| -|`--reload-interval `|可选|指定加载数据的时间间隔(单位:秒)。|Integer|3|0~300|设置为0时表示只加载一次数据。| -|`--summary-base-dir `|可选|指定加载训练日志数据的根目录路径。|String|./|-|MindInsight将遍历此路径下的直属子目录。若某个直属子目录包含日志文件,则该子目录被识别为日志文件目录,若根目录包含日志文件,则根目录被识别为日志文件目录。| -|`--enable-debugger `|可选|是否开启Debugger功能|Boolean|False|True/False/1/0|只有开启了调试器,才会在MindInsight页面显示调试器入口。| -|`--debugger-port `|可选|指定Debugger Server服务端口。|Integer|50051|1~65535|-| - -> 服务启动时,命令行参数值将被保存为进程的环境变量,并以 `MINDINSIGHT_` 开头作为标识,如 `MINDINSIGHT_CONFIG`,`MINDINSIGHT_WORKSPACE`,`MINDINSIGHT_PORT` 等。 - -## 查看服务进程信息 - -MindInsight向用户提供Web服务,可通过以下命令,查看当前运行的Web服务进程。 - -```shell -ps -ef | grep mindinsight -``` - -根据服务进程PID,可通过以下命令,查看当前服务进程对应的工作目录`WORKSPACE`。 - -```shell -lsof -p | grep access -``` - -输出如下,可查看`WORKSPACE`。 - -```shell -gunicorn /log/gunicorn/access.log -``` - -## 停止服务 - -```shell -mindinsight stop [-h] [--port PORT] -``` - -参数含义如下: - -|参数名|属性|功能描述|参数类型|默认值|取值范围|规则限制| -|---|---|---|---|---|---|---| -|`-h, --help`|可选|显示停止命令的帮助信息。|-|-|-|-| -|`--port `|可选|指定Web可视化服务端口。|Integer|8080|1~65535|-| - -## Summary导出 - -MindInsight中提供解析Summary日志文件的工具,用户可以通过命令行将summary日志文件中的标量存入csv文件,图像存入png文件,从而便于查看和对数据进一步处理。 - -```shell -mindinsight parse_summary [--summary-dir] [--output] -``` - -参数含义如下: - -|参数名|属性|功能描述|参数类型|默认值|取值范围|规则限制| -|---|---|---|---|---|---|---| -|`--summary-dir `|可选|指定要解析的文件的目录。如果该目录中存在多个summary日志文件,则仅根据文件名解析最新的文件。|String|./|-|summary文件夹需要可读可执行权限,summary文件需要可读权限,检查权限失败会报错退出| -|`--output `|可选|指定输出的目录,将数据输出到该目录中。|String|./|-|-| - -执行命令: - -```shell -mindinsight parse_summary --summary-dir ./ --output ./ -``` - -输出目录结构如下: - -```text -└─output_{datetime} - ├─image - │ └─{tag}_{step}.png - │ - └─scalar.csv -``` - -其中, - -- output_{datetime}为输出目录下的新建目录,命名规则为 'output_年月日_时分秒_毫秒微秒'。 - -- {tag}\_{step}.png为训练过程中的图像,tag代表标签(tag中的特殊字符将被删除,'_'将被替换成代'/')step代表训练步骤。 - -- scalar.csv为标量数据(编码方式:'utf-8')。 - -## 使用mindoptimizer进行超参调优 - -MindInsight中提供调参命令,命令行(Command-line interface, CLI)的使用方式,命令如下。 - -```shell -usage: mindoptimizer [-h] [--version] [--config ] - [--iter ] - -``` - -参数含义如下: - -|参数名|属性|功能描述|参数类型|默认值|取值范围|规则限制| -|---|---|---|---|---|---|---| -|`-h, --help`|可选|显示启动命令的帮助信息|-|-|-|-| -|`--config `|必选|指定配置文件|String|-|-|物理文件路径(file:/path/to/config.yaml),文件格式为yaml| -|`--iter `|可选|指定调参次数|Integer|1|正整数|-| diff --git a/tutorials/training/source_zh_cn/advanced_use/model_encrypt_protection.md b/tutorials/training/source_zh_cn/advanced_use/model_encrypt_protection.md deleted file mode 100644 index 6c6e4ef1f5145181c34ab1f07211841446e92b93..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/model_encrypt_protection.md +++ /dev/null @@ -1,68 +0,0 @@ -# 模型加密保护 - -`Linux` `Ascend` `GPU` `CPU` `模型保护` `企业` `高级` - - - -- [模型加密保护](#模型加密保护) - - [概述](#概述) - - [安全导出CheckPoint文件](#安全导出CheckPoint文件) - - [加载密文CheckPoint文件](#加载密文CheckPoint文件) - - -   - -## 概述 - -MindSpore框架提供通过加密对模型文件进行保护的功能,使用对称加密算法对参数文件或推理模型进行加密,使用时直接加载密文模型完成推理或增量训练。 -目前加密方案支持在Linux平台下对CheckPoint参数文件的保护。 - -以下通过示例来介绍CheckPoint文件的加密保存和读取的方法。 - -> 你可以在这里下载完整的样例代码: - -## 安全导出CheckPoint文件 - -目前MindSpore支持使用Callback机制传入回调函数`ModelCheckpoint`对象以保存模型参数,用户可以通过配置`CheckpointConfig`对象来启用参数文件的加密保护。具体配置方法如下: - -```python -from mindspore.train.callback import CheckpointConfig, ModelCheckpoint - -config_ck = CheckpointConfig(save_checkpoint_steps=1875, keep_checkpoint_max=10, enc_key=b'0123456789ABCDEF', enc_mode='AES-GCM') -ckpoint_cb = ModelCheckpoint(prefix='lenet_enc', directory=None, config=config_ck) -model.train(10, dataset, callbacks=ckpoint_cb) -``` - -上述代码中,通过在`CheckpointConfig`中初始化加密密钥和加密模式来启用模型加密。 - -- `enc_key`表示用于对称加密的密钥。 - -- `enc_mode`表示使用哪种加密模式。 - -除了上面这种保存模型参数的方法,还可以调用`save_checkpoint`接口来保存模型参数,使用方法如下: - -```python -from mindspore import save_checkpoint - -save_checkpoint(network, 'lenet_enc.ckpt', enc_key=b'0123456789ABCDEF', enc_mode='AES-GCM') -``` - -其中`enc_key`和`enc_mode`的定义同上。 - -## 加载密文CheckPoint文件 - -MindSpore提供`load_checkpoint`和`load_distributed_checkpoint`分别用于单文件和分布式场景下加载CheckPoint参数文件。以单文件场景为例,可以用如下方式加载密文CheckPoint文件: - -```python -from mindspore import load_checkpoint - -param_dict = load_checkpoint('lenet_enc.ckpt', dec_key=b'0123456789ABCDEF', dec_mode='AES-GCM') -``` - -上述代码中,通过指定`dec_key`和`dec_mode`来启用对密文文件的读取。 - -- `dec_key`表示用于对称解密的密钥。 - -- `dec_mode`表示使用哪种解密模式。 - -分布式场景的方式类似,在调用`load_distributed_checkpoint`时指定`dec_key`和`dec_mode`即可。 diff --git a/tutorials/training/source_zh_cn/advanced_use/model_explanation.md b/tutorials/training/source_zh_cn/advanced_use/model_explanation.md deleted file mode 100644 index bff98a1502e17f4bd6e58c57b322aff81af49f09..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/model_explanation.md +++ /dev/null @@ -1,237 +0,0 @@ -# 解释模型 - -`Linux` `Ascend` `GPU` `模型调优` `初级` `中级` `高级` - - - -- [解释模型](#解释模型) - - [概述](#概述) - - [操作流程](#操作流程) - - [准备脚本](#准备脚本) - - [使用限制](#使用限制) - - [启动MindInsight](#启动mindinsight) - - [页面及功能介绍](#页面及功能介绍) - - [显著图可视化](#显著图可视化) - - [解释方法评估](#解释方法评估) - - [综合评估](#综合评估) - - [分类评估](#分类评估) - - [不确定性](#不确定性) - - [反事实](#反事实) - - [基于遮掩的反事实](#基于遮掩的反事实) - - [基于遮掩的反事实使用限制](#基于遮掩的反事实使用限制) - - [基于遮掩的反事实页面及功能介绍](#基于遮掩的反事实页面及功能介绍) - - - - - -## 概述 - -当前深度学习模型多为黑盒模型,性能表现好但可解释性较差。模型解释模块旨在为用户提供对模型决策依据的解释,帮助用户更好地理解模型、信任模型,以及当模型出现错误时有针对性地改进模型效果。 - -在一些影响至关重要的应用场景中,如自动驾驶、金融决策等,由于法律和政策监管的原因,AI模型如果不具备可解释性,是无法真正落地应用的。所以模型的可解释性的重要性越来越高,受到越来越多的关注。因此,模型解释是提升MindSpore生态应用性、用户友好性至关重要的一部分。 - -具体来说,在图片分类任务中,较为广泛使用的一类解释方法会将影响模型的分类决策最关键的区域高亮出来,我们称之为“显著图”,如果被高亮的部分恰好就是相应标签的关键特征,那么通常说明模型学习到的特征是正确的,用户可以更加信任模型的效果和决策。如果模型关注的是不相关的部分,即使预测标签是正确的,也不代表模型是可靠的,模型开发者还是需要优化改进模型。造成这种情况有可能是训练数据中存在某些特征的相关性,模型开发者可以考虑有针对性的做数据增强来修正模型学习到的偏见。 - -除了提供多种解释方法,我们还提供了一套对解释方法效果评分的度量方法,从多种维度评估解释方法的效果,从而帮助用户比较和选择最适合于特定场景的解释方法。 - -## 操作流程 - -### 准备脚本 - -当前MindSpore提供解释方法及给解释方法进行评估的度量Python API,已提供的解释方法可以通过`mindspore.explainer.explanation`包获取,度量方法可以通过`mindspore.explainer.benchmark`包获取。用户准备好待解释的黑盒模型和数据,在脚本中根据需要实例化解释方法及度量方法,调用API用于收集解释结果和解释度量结果。 - -MindSpore还提供`mindspore.explainer.ImageClassificationRunner`运行模块,支持自动化运行所有解释方法和度量方法。用户将实例化的解释方法及度量方法进行注册,即可自动运行解释方法及度量方法,并生成及保存包含解释结果及解释度量结果的解释日志。 - -下面以ResNet50及带有20类多标签数据为例,用户初始化`explanation`中解释方法及`benchmark`中度量方法,调用`ImageClassificationRunner`进行解释和度量。其样例代码如下: - -```python -import mindspore.nn as nn -from mindspore import load_checkpoint, load_param_into_net - -from mindspore.explainer.explanation import GradCAM, GuidedBackprop -from mindspore.explainer.benchmark import Faithfulness, Localization -from mindspore.explainer import ImageClassificationRunner - -if __name__ == "__main__": - num_classes = 20 - # please refer to model_zoo.cv.official.resnet.src.resnet50.py for the model architecture of resnet50 - net = resnet50(num_classes) - param_dict = load_checkpoint("resnet50.ckpt") - load_param_into_net(net, param_dict) - - - # initialize explainers with the loaded black-box model - gradcam = GradCAM(net, layer='layer4') - guidedbackprop = GuidedBackprop(net) - - # initialize benchmarkers to evaluate the chosen explainers - # for Faithfulness, the initialization needs an activation function that transforms the output of the network to a probability is also needed - activation_fn = nn.Sigmoid() # for multi-label classification - faithfulness = Faithfulness(num_labels=num_classes, metric='InsertionAUC', activation_fn=activation_fn) - localization = Localization(num_labels=num_classes, metric='PointingGame') - - # returns the dataset to be explained, when localization is chosen, the dataset is required to provide bounding box - # the columns of the dataset should be in [image], [image, labels], or [image, labels, bbox] (order matters) - # You may refer to 'mindspore.dataset.project' for columns managements - dataset_path = "dataset_dir" - dataset = get_dataset(dataset_path) - - # specify the class names of the dataset - classes = [ - 'aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', - 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', - 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor', - ] - - data = (dataset, classes) - explainers = [gradcam, guidedbackprop] - benchmarkers = [faithfulness, localization] - - # initialize runner with specified summary_dir - runner = ImageClassificationRunner(summary_dir='./summary_dir', network=net, activation_fn=activation_fn, data=data) - runner.register_saliency(explainers, benchmarkers) - - # execute runner.run to generate explanation and evaluation results to save it to summary_dir - runner.run() -``` - -### 使用限制 - -- 当前只支持图片分类下的CNN网络模型,比如:Lenet、Resnet、Alexnet。 -- 输入的图片数据必须为单通道、三通道或四通道格式。 -- 仅支持GPU和Ascend设备下的PyNative运行模式。 -- 不同的 `ImageClassificationRunner` 对象需要使用不同的解释方法及度量方法对象,所以用户必须针对每个 `ImageClassificationRunner` 对象实例化独占的解释方法及度量方法对象,否则可能会产生错误。下方是一个正确的实例化示例。 - -```python -gradcam = GradCAM(net, layer='layer4') -guidedbackprop = GuidedBackprop(net) - -runner = ImageClassificationRunner(summary_dir='./summary_dir_1', network=net, activation_fn=activation_fn, data=data) -runner.register_saliency(explainers=[gradcam, guidedbackprop]) -runner.run() - -# generate another summary with GradCAM only -runner2 = ImageClassificationRunner(summary_dir='./summary_dir_2', network=net, activation_fn=activation_fn, data=data) - -# reusing explainer instance in other runner, errors may occur -# runner2.register_saliency(explainers=[gradcam]) - -# instantiating a new GradCAM is the correct way -gradcam2 = GradCAM(net, layer='layer4') -runner2.register_saliency(explainers=[gradcam2]) - -runner2.run() -``` - -### 启动MindInsight - -启动MindInsight系统,在顶部选择进入“模型解释”模块。可以看到所有的解释日志路径,当日志满足条件时,操作列会有“显著图可视化”的功能入口。 - -![xai_index](./images/xai_index.png) - -## 页面及功能介绍 - -### 显著图可视化 - -显著图可视化用于展示对模型决策结果影响最为显著的图片区域,通常高亮部分可视为图片被标记为目标分类的关键特征。 - -![xai_saliency_map](./images/xai_saliency_map.png) - -进入显著图可视化界面,会展示: - -- 用户通过Dataset的Python API接口设置的目标数据集。 -- 真实标签、预测标签,以及模型对对应标签的预测概率。根据具体情况,系统会在对应标签的左上角增加TP,FN,FP(含义见界面提示信息)的旗标。 -- 选中的解释方法给出的显著图。 - -界面操作: - -1. 通过界面上方的解释方法勾选需要的解释方法。 -2. 通过切换界面右上方的“叠加于原图”按钮可以选择让显著图叠加于原图上显示。 -3. 点击不同标签,显示模型对不同标签的显著图分析结果。对于不同的分类结果,通常模型的关注点也是不同的。 -4. 点选预测类型复选框,以显示具有已选类型的标签的图片:TP - 真阳性、 FN - 假阴性、 FP - 假阳性。 -5. 通过界面上方的标签筛选功能,筛选出指定标签图片。 -6. 通过界面右上角的图片排序改变图片显示的顺序,可选“概率值降序”或“不确定性值降序”。 -7. 点击解释方法最右边的“查看评分”,可以进入评估所有解释方法的界面。 -8. 点击图片可查看放大图。 - -![xai_saliency_map_detail](./images/xai_saliency_map_detail.png) - -### 解释方法评估 - -#### 综合评估 - -对当前提供的解释方法,从不同的评估维度进行评分。我们提供了多种评估度量维度,帮助用户对比不同解释方法在不同维度上的表现,从而挑选最适合用户使用的解释方法。用户可以根据对特定场景下看中的指标进行权重配置,得到综合得分。 - -![xai_metrix_comprehensive](./images/xai_metrix_comprehensive.png) - -#### 分类评估 - -分类评估页提供两种形式的对比,一种是同一解释方法的不同度量维度在各个标签下的分值,一种是对于同一度量维度,不同解释方法在各个标签下的分值。 - -![xai_metrix_class](./images/xai_metrix_class.png) - -## 不确定性 - -模型决策结果存有不确定性, 名为 [Epistemic Uncertainty](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/nn_probability/mindspore.nn.probability.toolbox.UncertaintyEvaluation.html#mindspore.nn.probability.toolbox.UncertaintyEvaluation) 。计算方法是在模型中插入dropout层再多次重复推理,最后得出输出概率的标准偏差和95%置信区间: - -![xai_saliency_map](./images/xai_uncertainty.png) - -模型和数据集的准备、使用限制跟前述的解释方法相同,用户通过调用 `ImageClassificationRunner` 的 `register_uncertainty()` 来启用不确定性计算,下方是一个使用例子。 - -```python -runner = ImageClassificationRunner(summary_dir='./summary_dir_1', network=net, activation_fn=activation_fn, data=data) -runner.register_saliency(explainers=[gradcam, guidedbackprop]) -runner.register_uncertainty() -runner.run() -``` - -要注意的是 `register_uncertainty()` 必须跟 `register_saliency()`一起使用,但调用次序没有限制。 - -## 反事实 - -反事实是一种相对新的模型解释方法,是指对已得出的推理结果进行否定而重新表征。例如一张动物图片被模型分类为猫,我们可以通过解答反事实问题(例如:如何把这张图片进行编辑从而令其不会被模型分类为猫?)从而解释一个模型决策(例如:将这张图片分类为猫)。反事实解释存有不同的形式,`ImageClassificationRunner` 提供了一个容易使用的“基于遮掩的反事实(简称“HOC”)”接口。将来,会支持不同的反事实功能。 - -### 基于遮掩的反事实 - -HOC是一种基于遮掩的反事实方法,目的是在目标标签的预测概率高于阈值(暂固定为0.5)的约束条件下搜索出最小的图片显示区域。整个搜索是一个分层级的过程,首先把整个图片使用高斯模糊遮掩住,之后从最大的显示区块开始搜索,再深入搜索到更小的子区块以获得更精确的结果。最终我们会得到一个区块树,每一个节点代表一个正方形的显示区块,而子节点侧是在父节点范围内的更小的正方形显示区块。根节点代表了整个原始图片区域,而它的直接子节点就是第一层的显示区块。 - -目前,`ImageClassificationRunner` 会基于输入图片大小自动计算分层数量(一到三层),显示区块大小,平移步幅及模糊遮掩图。第一层的显示区块边长为输入图片的短边的一半(向下舍入),每下一层显示区块边长就会减半,最小为28个像素否则不会再增加下一层。平移步幅是显示区块边长的五分之一(向下舍入)。 - -模型和数据集的准备跟前述的解释方法相同,用户通过调用 `ImageClassificationRunner` 的 `register_hierarchical_occlusion()` 来启用HOC,下方是一个使用例子。 - -```python -runner = ImageClassificationRunner(summary_dir='./summary_dir_1', network=net, activation_fn=activation_fn, data=data) -runner.register_hierarchical_occlusion() -runner.run() -``` - -用户可在同一个runner上配合使用 register_saliency() 。 - -#### 基于遮掩的反事实使用限制 - -- 包括所有前述的使用限制,但只支持三通道输入的模型。 -- 图片数据必须为RGB三通道格式,短边的长度必须大于或等于56像素。 -- 在只调用了`register_hierarchical_occlusion()`但没有调用`register_saliency()`的情况下,可以同时支持PyNative和Graph运行模式。 - -#### 基于遮掩的反事实页面及功能介绍 - -可以看到在解释日志列表上所有使用了HOC功能的解释日志的”反事实解释”链接已经可被点击,点击后就会进入反事实页面。 - -![xai_hoc_index](./images/xai_hoc_index.png) - -基于遮掩的反事实解释页面展示了所有搜寻到的结果,包括: - -- 所有预测概率大于0.5的目标标签的样本及其原始图片。 -- 目标标签的预测概率。 -- 搜索过程中每个分层的结果图及其预测概率。 - -![xai_hoc](./images/xai_hoc.png) - -界面操作: - -1. 在左侧“图片列表”面板右上方有个“隐藏”开关,当开关开启时页面将会隐藏不含HOC解释结果的数据。开关默认为开启,用户可选择关闭以显示所有数据。 -2. 在左侧“图片列表”面板选择要显示样本的标签筛选及排序方法,可以选择使用预测概率排序。 -3. 在左侧“图片列表”面板流览样本或翻页,当点击了一个样本图片后,相关的HOC搜索结果会在其他的面板上显示。 -4. 在中央“原始图片”面板选择要显示的标签搜寻结果,只有预测概率大于0.5的标签才有HOC搜索结果。 -5. 在下方“逐层遮掩过程”面板检视搜寻过程的分层结果图,当点击了一张结果图后,它会被放大显示在右侧“查看解释”面板上。 (注意: 为了区分显示,已遮掩的区域被降低了亮度及转成了灰阶,但在实际的搜索中只是使用了高斯模糊作遮掩,并没有对亮度或彩度进行调整。) diff --git a/tutorials/training/source_zh_cn/advanced_use/nlp.rst b/tutorials/training/source_zh_cn/advanced_use/nlp.rst deleted file mode 100644 index 37d9606b68dc72b1259f10a5ee6c3afc872842cc..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/nlp.rst +++ /dev/null @@ -1,8 +0,0 @@ -自然语言处理 -=============== - -.. toctree:: - :maxdepth: 1 - - nlp_sentimentnet - nlp_bert_poetry diff --git a/tutorials/training/source_zh_cn/advanced_use/nlp_bert_poetry.md b/tutorials/training/source_zh_cn/advanced_use/nlp_bert_poetry.md deleted file mode 100644 index 07c801b80e304f45cfe32efd48fa1bf6f2b45a58..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/nlp_bert_poetry.md +++ /dev/null @@ -1,304 +0,0 @@ -# 使用BERT网络实现智能写诗 - -`Linux` `Ascend` `模型训练` `推理应用` `高级` - - - -- [使用BERT实现智能写诗](#使用bert实现智能写诗) - - [案例简介](#案例简介) - - [模型介绍](#模型介绍) - - [模型训练](#模型训练) - - [Pre-training](#pre-training) - - [Fine-tuning](#fine-tuning) - - [模型修改](#模型修改) - - [样例代码](#样例代码) - - [实现步骤](#实现步骤) - - [基础信息](#基础信息) - - [数据准备](#数据准备) - - [训练](#训练) - - [推理验证](#推理验证) - - [服务部署](#服务部署) - - [参考文献](#参考文献) - - - - -五千年历史孕育了深厚的中华文化,而诗词是中华文化不可或缺的一部分,欣赏过诗词就可以感受到当中纯净、辽阔的意境,极致的感性,恰恰弥补了节奏紧迫的现代生活带给我们的拥挤感、浮躁感,古语曰:熟读唐诗三百首,不会作诗也会吟,今天理科生MindSpore也来秀一秀文艺范儿! - -## 案例简介 - -通过MindSpore训练出智能写诗模型及部署预测服务,具体流程如下图所示: - -![introduce image](images/introduce.PNG) - -图1:案例流程图 - -由于Bert预训练比较费时费力,在本案例中省略了预训练阶段,直接提供MindSpore预训练好的Bert-Base模型,经过Fine-tuning后训练获得最终的模型的训练全流程。 - -除此之外,将展示如何通过MindSpore Serving将该模型部署成一个预测服务,Clients代码可以发送请求给该预测服务并获得预测结果。 - -## 模型介绍 - -和诗词打交道需要用NLP相关的网络,BERT作为NLP领域中里程碑式的模型,极大地推动了NLP社区的发展,BERT模型由Google提出,采用Transformer中的Encoder结构,通过若干层Encoder的堆叠并借由注意力机制,在多项GLUE(General Language Understanding Evaluation)任务中取得了SOTA(State Of The Art)的效果。 - -正是由于这种注意力的机制,不同于以往的循环神经网络的结构,可以做高度的并行计算,这样便可以充分发挥出Ascend 910AI处理器的强大算力,获得极佳的性能表现。 - -## 模型训练 - -分为两个步骤,即Pre-training和Fine-tuning。首先在海量无标签的数据上进行Pre-training,希望通过此过程让模型掌握一般的人类语言语义机制,然后在Fine-tuning阶段会针对特定细分领域的有标签数据进行训练以完成特定任务。 - -### Pre-training - -Pre-training是在无标签数据上进行的自编码训练,因此训练任务的设计尤为重要,BERT中的Pre-training包含两项任务MLM(Masked Language Model)和NSP(Next Sentence Prediction)。 - -- **MLM任务**是在输入时,随机将部分token置换为[MASK]标记,然后通过注意力机制,由其上下文预测出被遮挡位置的原始token。 - -- BERT模型的输入是两“句”话:A与B,构造数据的时候会以50%的概率随机调换A、B的位置,**NSP任务**是预测A与B是否是相连的两“句”话。 - -在MLM基础上再增加一个NSP任务,是考虑到实际任务中并没有MLM这种任务,增加一个更符合实际任务类型的预训练任务。 - -从上述描述中可以看出,Pre-training并不需要任务数据标签,这种MLM的训练任务本质上是去噪自编码模型,因此BERT可以利用海量的无标签数据来进行预训练。通过预训练阶段的任务设置,BERT可以从无标签数据中学到基础语义逻辑,然后配合Finetune过程完成特定任务训练。 - -BERT模型的结构如下图所示,输入两“句”话,如果是中文模型,那么每一个token对应一个汉字,[CLS]和[SEP]是插入的特殊标识位。 - -![Teaser image](images/bert_model.PNG) - -图2:Bert模型结构[1] - -### Fine-tuning - -Fine-tuning是在BERT的预训练模型基础上,在最后增加一层适配实际任务,然后在有标签数据上进行少量的训练。 - -Fine-tuning的模式可以分为两大类,end-to-end Fine-tuning和feature-based approach,两者的区别在于Finetune阶段中是否修改BERT预训练模型中的参数,正常情况下都是使用end-to-end Fine-tuning。 - -### 模型修改 - -BERT采用了Encoder结构,`attention_mask`为全1的向量,即每个token都可以看到其前后的token,此举帮助每一个token都可以了解到整句话信息从而加强语义理解能力,所以BERT天生就不是生成式模型。 - -语句生成任务中,在生成下一个token时,应当只能看到之前token的信息,而不应该看到全局信息,因此需要在修改`attention_mask`为下三角矩阵,这样当前token只能看到自己及之前的token信息。 - -用于Fine-tuning的数据是40000多首诗词,并无标签,因此构造Fine-tuning任务如下图所示,每一个token的输出要接近下一个标签token,使用交叉熵作为损失函数。 - -![Teaser image](images/finetune.PNG) - -图3:训练流程示意图 - -## 样例代码 - -可以在这里下载完整的样例代码:,直接运行体验实现写诗效果,代码结构如下: - -```text -└─bert_poetry - ├── src - ├── bert_for_pre_training.py # 封装BERT-Base正反向网络类 - ├── bert_model.py # 定义BERT正向网络结构 - ├── finetune_config.py # Fine-tuning配置文件 - ├── fused_layer_norm.py # 定义fused_layer_norm - ├── __init__.py # __init__ - ├── utils.py # 定义Fine-tuning正向网络结构 - ├── poetry_utils.py # 分词器 Tokenizer - └── poetry_dataset.py # 解析poetry.txt,生成所需dataset - ├── vocab.txt # 词汇表 - ├── generator.py # 推理生成诗句使用函数 - ├── poetry.py # 训练、推理、导出函数 - ├── serving - ├── ms_serving # 启动服务器侧serving - ├── bert_flask.py # 服务器侧接收requests请求 - ├── poetry_client.py # 客户端代码 - ├── ms_service_pb2_grpc.py # 定义了grpc相关函数供bert_flask.py使用 - └── ms_service_pb2.py # 定义了protocol buffer相关函数供bert_flask.py使用 - -``` - -## 实现步骤 - -### 基础信息 - -本例可在Ascend 910 AI处理器平台上进行训练及推理。 - -### 数据准备 - -数据集为[43030首诗词](https://github.com/AaronJny/DeepLearningExamples/tree/master/keras-bert-poetry-generator)其中的`poetry.txt`。 - -BERT-Base模型的预训练ckpt:可在[MindSpore官网](http://download.mindspore.cn/model_zoo/official/nlp/bert/bert_base_ascend_0.5.0_cn-wiki_official_nlp_20200720.tar.gz)下载。 - -### 训练 - -在`src/finetune_config.py`中修改`pre_training_ckpt`路径,加载预训练的ckpt,修改`batch_size`为bs,修改`dataset_path`为存放诗词的路径,默认的`BertConfig`为Base模型。 - -```python -'dataset_path': '/your/path/to/poetry.txt', -'batch_size': bs, -'pre_training_ckpt': '/your/path/to/pre_training_ckpt', -``` - -执行训练指令 - -```bash -python poetry.py -``` - -### 推理验证 - -修改`poetry.py`中`test_eval`函数来控制随机生成、续写诗句或是藏头诗。 - -`generate_random_poetry`函数实现随机生成和续写诗句的功能,如果入参`s`为空则代表随机生成,`s`不为空则为续写诗句。 - -```python - output = generate_random_poetry(poetrymodel, s='') #随机生成 - output = generate_random_poetry(poetrymodel, s='天下为公') #续写诗句 -``` - -`generate_hidden`函数实现生成藏头诗的功能,入参`head`为隐藏的头部语句。 - -```python - output = generate_hidden(poetrymodel, head="人工智能") #藏头诗 -``` - -执行推理指令 - -```bash -python poetry.py --train=False --ckpt_path=/your/ckpt/path -``` - -会打印出最终生成的诗句,脚本中默认生成一首随机生成、一首续写诗词、一首藏头诗,结果如下所示: - -随机生成: - -```text -大堤柳暗, -春深树根。 -东望一望, -断回还家。 -山色渐风雨, -东风多雨禾。 -无情与去, -万里所思。 -``` - -续写 【天下为公】: - -```text -天下为公少, -唯君北向西。 -远山无路见, -长水见人偏。 -一路巴猿啸, -千峰楚客啼。 -幽深有诗策, -无以话年华。 -``` - -藏头诗 【人工智能】: - -```text -人君离别难堪望, -工部张机自少年。 -智士不知身没处, -能令圣德属何年。 -``` - -### 服务部署 - -通过MindSpore Serving将训练好的模型部署成推理服务。服务端部署包含以下3个步骤:模型导出、Serving服务启动、预处理及后处理的服务启动;客户端发送推理请求给服务端进行模型推理,推理生成的诗句返回给客户端展示。 - -- 模型导出 - - 在使用Serving部署服务前,需要导出模型文件,在`poetry.py`中提供了`export_net`函数负责导出MindIR模型,执行命令: - - ```bash - python poetry.py --export=True --ckpt_path=/your/ckpt/path - ``` - - 会在当前路径下生成`poetry.pb`文件。 - -- Serving服务 - - 在服务器侧启动Serving服务,并加载导出的MindIR文件`poetry.pb`。 - - ```bash - cd serving - ./ms_serving --model_path=/path/to/your/MINDIR_file --model_name=your_mindir.pb - ``` - -- 预处理及后处理的服务 - - 预处理及后处理通过Flask框架来快速实现,在服务器侧运行`bert_flask.py`文件,启动Flask服务。 - - ```bash - python bert_flask.py - ``` - - 通过以上步骤,服务端部署就已经完成。 - -- 客户端 - - 可用电脑作为客户端,修改`poetry_client.py`中的url请求地址为推理服务启动的服务器IP,并确保端口与服务端`bert_flask.py`中的端口一致,例如: - - ```python - url = 'http://10.155.170.71:8080/' - ``` - - 运行`poetry_client.py`文件 - - ```bash - python poetry_client.py - ``` - - 此时在客户端输入指令,即可在远端服务器进行推理,返回生成的诗句。 - - ```text - 选择模式:0-随机生成,1:续写,2:藏头诗 - 0 - ``` - - ```text - 一朵黄花叶, - 千竿绿树枝。 - 含香待夏晚, - 澹浩长风时。 - ``` - - ```text - 选择模式:0-随机生成,1:续写,2:藏头诗 - 1 - 输入首句诗 - 明月 - ``` - - ```text - 明月照三峡, - 长空一片云。 - 秋风与雨过, - 唯有客舟分。 - 寒影出何处, - 远林含不闻。 - 不知前后事, - 何道逐风君。 - ``` - - ```text - 选择模式:0-随机生成,1:续写,2:藏头诗 - 2 - 输入藏头诗 - 人工智能 - ``` - - ```text - 人生事太远, - 工部与神期。 - 智者岂无识, - 能文争有疑。 - ``` - - 细读鉴赏一下,平仄、押韵、意味均有体现,AI诗人已然成形。 - -> 友情提醒,修改其他类型数据集,也可以完成其他简单的生成类任务,如对春联,简单聊天机器人等,用户可尝试体验实现。 - -## 参考文献 - -[1] [BERT:Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/abs/1810.04805) - -[2] [https://github.com/AaronJny/DeepLearningExamples/](https://github.com/AaronJny/DeepLearningExamples/) - -[3] [https://github.com/bojone/bert4keras](https://github.com/bojone/bert4keras) diff --git a/tutorials/training/source_zh_cn/advanced_use/nlp_sentimentnet.md b/tutorials/training/source_zh_cn/advanced_use/nlp_sentimentnet.md deleted file mode 100644 index 8d291552a73558654eb32dbb1e52efe06ff0fcbe..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/nlp_sentimentnet.md +++ /dev/null @@ -1,307 +0,0 @@ -# 使用SentimentNet实现情感分类 - -`Linux` `GPU` `CPU` `全流程` `初级` `中级` `高级` - - - -- [使用SentimentNet实现情感分类](#使用sentimentnet实现情感分类) - - [概述](#概述) - - [准备及设计](#准备及设计) - - [下载数据集](#下载数据集) - - [确定评价标准](#确定评价标准) - - [确定网络及流程](#确定网络及流程) - - [实现阶段](#实现阶段) - - [导入需要的库文件](#导入需要的库文件) - - [配置环境信息](#配置环境信息) - - [预处理数据集](#预处理数据集) - - [定义网络](#定义网络) - - [预训练模型](#预训练模型) - - [定义优化器及损失函数](#定义优化器及损失函数) - - [训练并保存模型](#训练并保存模型) - - [模型验证](#模型验证) - - [实验结果](#实验结果) - - - -   -   - - -## 概述 - -情感分类是自然语言处理中文本分类问题的子集,属于自然语言处理最基础的应用。它是对带有感情色彩的主观性文本进行分析和推理的过程,即分析说话人的态度,是倾向正面还是反面。 - -> 通常情况下,我们会把情感类别分为正面、反面和中性三类。虽然“面无表情”的评论也有不少;不过,大部分时候会只采用正面和反面的案例进行训练,下面这个数据集就是很好的例子。 - -传统的文本主题分类问题的典型参考数据集为[20 Newsgroups](http://qwone.com/~jason/20Newsgroups/),该数据集由20组新闻数据组成,包含约20000个新闻文档。 -其主题列表中有些类别的数据比较相似,例如comp.sys.ibm.pc.hardware和comp.sys.mac.hardware都是和电脑系统硬件相关的题目,相似度比较高。而有些主题类别的数据相对来说就毫无关联,例如misc.forsale和soc.religion.christian。 - -就网络本身而言,文本主题分类的网络结构和情感分类的网络结构大致相似。在掌握了情感分类网络如何构造之后,很容易可以构造一个类似的网络,稍作调参即可用于文本主题分类任务。 - -但在业务上下文侧,文本主题分类是分析文本讨论的客观内容,而情感分类是要从文本中得到它是否支持某种观点的信息。比如,“《阿甘正传》真是好看极了,影片主题明确,节奏流畅。”这句话,在文本主题分类是要将其归为类别为“电影”主题,而情感分类则要挖掘出这一影评的态度是正面还是负面。 - -相对于传统的文本主题分类,情感分类较为简单,实用性也较强。常见的购物网站、电影网站都可以采集到相对高质量的数据集,也很容易给业务领域带来收益。例如,可以结合领域上下文,自动分析特定类型客户对当前产品的意见,可以分主题分用户类型对情感进行分析,以作针对性的处理,甚至基于此进一步推荐产品,提高转化率,带来更高的商业收益。 - -特殊领域中,某些非极性词也充分表达了用户的情感倾向,比如下载使用APP时,“卡死了”、“下载太慢了”就表达了用户的负面情感倾向;股票领域中,“看涨”、“牛市”表达的就是用户的正面情感倾向。所以,本质上,我们希望模型能够在垂直领域中,挖掘出一些特殊的表达,作为极性词给情感分类系统使用: - -$垂直极性词 = 通用极性词 + 领域特有极性词$ - -按照处理文本的粒度不同,情感分析可分为词语级、短语级、句子级、段落级以及篇章级等几个研究层次。这里以“段落级”为例,输入为一个段落,输出为影评是正面还是负面的信息。 - -## 准备及设计 - -### 下载数据集 - -采用IMDb影评数据集作为实验数据。 -> 数据集下载地址: - -以下是负面影评(Negative)和正面影评(Positive)的案例。 - -| Review | Label | -|---|---| -| "Quitting" may be as much about exiting a pre-ordained identity as about drug withdrawal. As a rural guy coming to Beijing, class and success must have struck this young artist face on as an appeal to separate from his roots and far surpass his peasant parents' acting success. Troubles arise, however, when the new man is too new, when it demands too big a departure from family, history, nature, and personal identity. The ensuing splits, and confusion between the imaginary and the real and the dissonance between the ordinary and the heroic are the stuff of a gut check on the one hand or a complete escape from self on the other. | Negative | -| This movie is amazing because the fact that the real people portray themselves and their real life experience and do such a good job it's like they're almost living the past over again. Jia Hongsheng plays himself an actor who quit everything except music and drugs struggling with depression and searching for the meaning of life while being angry at everyone especially the people who care for him most. | Positive | - -同时,我们要下载GloVe文件,并在文件开头处添加新的一行,意思是总共读取400000个单词,每个单词用300纬度的词向量表示。 - -```text -400000 300 -``` - -GloVe文件下载地址:。 - -### 确定评价标准 - -作为典型的分类问题,情感分类的评价标准可以比照普通的分类问题处理。常见的精度(Accuracy)、精准度(Precision)、召回率(Recall)和F_beta分数都可以作为参考。 - -$精度(Accuracy)= 分类正确的样本数目 / 总样本数目$ - -$精准度(Precision)= 真阳性样本数目 / 所有预测类别为阳性的样本数目$ - -$召回率(Recall)= 真阳性样本数目 / 所有真实类别为阳性的样本数目$ - -$F1分数 = (2 \times Precision \times Recall) / (Precision + Recall)$ - -在IMDb这个数据集中,正负样本数差别不大,可以简单地用精度(accuracy)作为分类器的衡量标准。 - -### 确定网络及流程 - -我们使用基于LSTM构建的SentimentNet网络进行自然语言处理。 - -1. 加载使用的数据集,并进行必要的数据处理。 -2. 使用基于LSTM构建的SentimentNet网络训练数据,生成模型。 - > LSTM(Long short-term memory,长短期记忆)网络是一种时间循环神经网络,适合于处理和预测时间序列中间隔和延迟非常长的重要事件。具体介绍可参考网上资料,在此不再赘述。 -3. 得到模型之后,使用验证数据集,查看模型精度情况。 - -> 本例面向GPU或CPU硬件平台,你可以在这里下载完整的样例代码: -> -> - `src/config.py`:网络中的一些配置,包括`batch size`、进行几次epoch训练等。 -> - `src/dataset.py`:数据集相关,包括转换成MindRecord文件,数据预处理等。 -> - `src/imdb.py`: 解析IMDb数据集的工具。 -> - `src/lstm.py`: 定义情感网络。 -> - `train.py`:模型的训练脚本。 -> - `eval.py`:模型的推理脚本。 - -## 实现阶段 - -### 导入需要的库文件 - -下列是我们所需要的公共模块及MindSpore的模块及库文件。 - -```python -import argparse -import os - -import numpy as np - -from src.config import lstm_cfg as cfg -from src.dataset import convert_to_mindrecord -from src.dataset import lstm_create_dataset -from src.lstm import SentimentNet -from mindspore import Tensor, nn, Model, context, load_param_into_net, load_checkpoint -from mindspore.nn import Accuracy -from mindspore.train.callback import LossMonitor, CheckpointConfig, ModelCheckpoint, TimeMonitor -``` - -### 配置环境信息 - -1. 使用`parser`模块,传入运行必要的信息,如数据集存放路径,GloVe存放路径,这样的好处是,对于经常变化的配置,可以在运行代码时输入,使用更加灵活。 - - ```python - parser = argparse.ArgumentParser(description='MindSpore LSTM Example') - parser.add_argument('--preprocess', type=str, default='false', choices=['true', 'false'], - help='whether to preprocess data.') - parser.add_argument('--aclimdb_path', type=str, default="./aclImdb", - help='path where the dataset is stored.') - parser.add_argument('--glove_path', type=str, default="./glove", - help='path where the GloVe is stored.') - parser.add_argument('--preprocess_path', type=str, default="./preprocess", - help='path where the pre-process data is stored.') - parser.add_argument('--ckpt_path', type=str, default="./", - help='the path to save the checkpoint file.') - parser.add_argument('--pre_trained', type=str, default=None, - help='the pretrained checkpoint file path.') - parser.add_argument('--device_target', type=str, default="GPU", choices=['GPU', 'CPU'], - help='the target device to run, support "GPU", "CPU". Default: "GPU".') - args = parser.parse_args() - ``` - -2. 实现代码前,需要配置必要的信息,包括环境信息、执行的模式、后端信息及硬件信息。 - - ```python - context.set_context( - mode=context.GRAPH_MODE, - save_graphs=False, - device_target=args.device_target) - ``` - - 详细的接口配置信息,请参见`context.set_context`接口说明。 - -### 预处理数据集 - -将数据集格式转换为MindRecord格式,便于MindSpore读取。 - -```python -if args.preprocess == "true": - print("============== Starting Data Pre-processing ==============") - convert_to_mindrecord(cfg.embed_size, args.aclimdb_path, args.preprocess_path, args.glove_path) -``` - -> 转换成功后会在`preprocess_path`路径下生成`mindrecord`文件; 通常该操作在数据集不变的情况下,无需每次训练都执行。 -> `convert_to_mindrecord`函数的具体实现请参考 -> 其中包含两大步骤: -> -> 1. 解析文本数据集,包括编码、分词、对齐、处理GloVe原始数据,使之能够适应网络结构。 -> 2. 转换并保存为MindRecord格式数据集。 - -### 定义网络 - -```python -embedding_table = np.loadtxt(os.path.join(args.preprocess_path, "weight.txt")).astype(np.float32) -network = SentimentNet(vocab_size=embedding_table.shape[0], - embed_size=cfg.embed_size, - num_hiddens=cfg.num_hiddens, - num_layers=cfg.num_layers, - bidirectional=cfg.bidirectional, - num_classes=cfg.num_classes, - weight=Tensor(embedding_table), - batch_size=cfg.batch_size) -``` - -> `SentimentNet`网络结构的具体实现请参考 - -### 预训练模型 - -通过参数`pre_trained`指定预加载CheckPoint文件来进行预训练,默认该参数为空。 - -```python -if args.pre_trained: - load_param_into_net(network, load_checkpoint(args.pre_trained)) -``` - -### 定义优化器及损失函数 - -定义优化器及损失函数的示例代码如下: - -```python -loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') -opt = nn.Momentum(network.trainable_params(), cfg.learning_rate, cfg.momentum) -loss_cb = LossMonitor() -``` - -### 训练并保存模型 - -加载对应数据集并配置好CheckPoint生成信息,然后使用`model.train`接口,进行模型训练。 - -```python -model = Model(network, loss, opt, {'acc': Accuracy()}) - -print("============== Starting Training ==============") -ds_train = lstm_create_dataset(args.preprocess_path, cfg.batch_size) -config_ck = CheckpointConfig(save_checkpoint_steps=cfg.save_checkpoint_steps, - keep_checkpoint_max=cfg.keep_checkpoint_max) -ckpoint_cb = ModelCheckpoint(prefix="lstm", directory=args.ckpt_path, config=config_ck) -time_cb = TimeMonitor(data_size=ds_train.get_dataset_size()) -if args.device_target == "CPU": - model.train(cfg.num_epochs, ds_train, callbacks=[time_cb, ckpoint_cb, loss_cb], dataset_sink_mode=False) -else: - model.train(cfg.num_epochs, ds_train, callbacks=[time_cb, ckpoint_cb, loss_cb]) -print("============== Training Success ==============") -``` - -> `lstm_create_dataset`函数的具体实现请参考 - -### 模型验证 - -加载验证数据集及保存的CheckPoint文件,进行验证,查看模型质量。 - -```python -model = Model(network, loss, opt, {'acc': Accuracy()}) - -print("============== Starting Testing ==============") -ds_eval = lstm_create_dataset(args.preprocess_path, cfg.batch_size, training=False) -param_dict = load_checkpoint(args.ckpt_path) -load_param_into_net(network, param_dict) -if args.device_target == "CPU": - acc = model.eval(ds_eval, dataset_sink_mode=False) -else: - acc = model.eval(ds_eval) -print("============== {} ==============".format(acc)) -``` - -## 实验结果 - -在经历了20轮epoch之后,在测试集上的精度约为84.19%。 - -### 执行训练 - -1. 运行训练代码,查看运行结果。 - - ```shell - python train.py --preprocess=true --ckpt_path=./ --device_target=GPU - ``` - - 输出如下,可以看到loss值随着训练逐步降低,最后达到0.2855左右: - - ```shell - ============== Starting Data Pre-processing ============== - vocab_size: 252192 - ============== Starting Training ============== - epoch: 1 step: 1, loss is 0.6935 - epoch: 1 step: 2, loss is 0.6924 - ... - epoch: 10 step: 389, loss is 0.2675 - epoch: 10 step: 390, loss is 0.3232 - ... - epoch: 20 step: 389, loss is 0.1354 - epoch: 20 step: 390, loss is 0.2855 - ``` - -2. 查看保存的CheckPoint文件。 - - 训练过程中保存了CheckPoint文件,即模型文件,我们可以查看文件保存的路径下的所有保存文件。 - - ```shell - ls ./*.ckpt - ``` - - 输出如下: - - ```shell - lstm-11_390.ckpt lstm-12_390.ckpt lstm-13_390.ckpt lstm-14_390.ckpt lstm-15_390.ckpt lstm-16_390.ckpt lstm-17_390.ckpt lstm-18_390.ckpt lstm-19_390.ckpt lstm-20_390.ckpt - ``` - -### 验证模型 - -使用最后保存的CheckPoint文件,加载验证数据集,进行验证。 - -```shell -python eval.py --ckpt_path=./lstm-20_390.ckpt --device_target=GPU -``` - -输出如下,可以看到使用验证的数据集,对文本的情感分析正确率在84.19%左右,达到一个基本满意的结果。 - -```shell -============== Starting Testing ============== -============== {'acc': 0.8419471153846154} ============== -``` diff --git a/tutorials/training/source_zh_cn/advanced_use/optimize_data_processing.ipynb b/tutorials/training/source_zh_cn/advanced_use/optimize_data_processing.ipynb deleted file mode 100644 index 3f8a830ba6fe452fd38a18f4833ab7f3bdaea3af..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/optimize_data_processing.ipynb +++ /dev/null @@ -1,876 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 优化数据处理\n", - "\n", - "`Linux` `Ascend` `GPU` `CPU` `数据准备` `中级` `高级`\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/advanced_use/optimize_data_processing.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/mindspore_optimize_data_processing.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL21pbmRzcG9yZV9vcHRpbWl6ZV90aGVfcGVyZm9ybWFuY2Vfb2ZfZGF0YV9wcmVwYXJhdGlvbi5pcHluYg==&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "数据是整个深度学习中最重要的一环,因为数据的好坏决定了最终结果的上限,模型的好坏只是去无限逼近这个上限,所以高质量的数据输入,会在整个深度神经网络中起到积极作用,数据在整个数据处理和数据增强的过程像经过pipeline管道的水一样,源源不断地流向训练系统,如图所示:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![pipeline](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/pipeline.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore为用户提供了数据处理以及数据增强的功能,在数据的整个pipeline过程中,其中的每一步骤,如果都能够进行合理的运用,那么数据的性能会得到很大的优化和提升。本次体验将基于CIFAR-10数据集来为大家展示如何在数据加载、数据处理和数据增强的过程中进行性能的优化。\n", - "\n", - "此外,操作系统的存储、架构和计算资源也会一定程度上影响数据处理的性能。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 准备环节" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 导入模块" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`dataset`模块提供API用来加载和处理数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`numpy`模块用于生成ndarray数组。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 下载所需数据集" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "运行以下命令来获取数据集:\n", - "\n", - "下载CIFAR-10二进制格式数据集,并将数据集文件解压到`./datasets/`目录下,数据加载的时候使用该数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/cifar-10-batches-bin\n", - "├── readme.html\n", - "├── test\n", - "│   └── test_batch.bin\n", - "└── train\n", - " ├── batches.meta.txt\n", - " ├── data_batch_1.bin\n", - " ├── data_batch_2.bin\n", - " ├── data_batch_3.bin\n", - " ├── data_batch_4.bin\n", - " └── data_batch_5.bin\n", - "\n", - "2 directories, 8 files\n" - ] - } - ], - "source": [ - "!wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-binary.tar.gz\n", - "!mkdir -p datasets\n", - "!tar -xzf cifar-10-binary.tar.gz -C datasets\n", - "!mkdir -p datasets/cifar-10-batches-bin/train datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/test_batch.bin datasets/cifar-10-batches-bin/test\n", - "!mv -f datasets/cifar-10-batches-bin/data_batch*.bin datasets/cifar-10-batches-bin/batches.meta.txt datasets/cifar-10-batches-bin/train\n", - "!tree ./datasets/cifar-10-batches-bin" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下载CIFAR-10 Python文件格式数据集,并将数据集文件解压到`./datasets/cifar-10-batches-py`目录下,数据转换的时候使用该数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/cifar-10-batches-py\n", - "├── batches.meta\n", - "├── data_batch_1\n", - "├── data_batch_2\n", - "├── data_batch_3\n", - "├── data_batch_4\n", - "├── data_batch_5\n", - "├── readme.html\n", - "└── test_batch\n", - "\n", - "0 directories, 8 files\n" - ] - } - ], - "source": [ - "!wget -N https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-python.tar.gz\n", - "!mkdir -p datasets\n", - "!tar -xzf cifar-10-python.tar.gz -C datasets\n", - "!tree ./datasets/cifar-10-batches-py" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据加载性能优化" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore为用户提供了多种数据加载方式,其中包括常用数据集加载、用户自定义数据集加载、MindSpore数据格式加载,详情内容请参考[数据集加载](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_loading.html)。对于数据集加载,底层实现方式的不同,会导致数据集加载的性能存在差异,如下所示:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "| | 常用数据集 | 用户自定义 | MindRecord |\n", - "| :----: | :----: | :----: | :----: |\n", - "| 底层实现 | C++ | Python | C++ |\n", - "| 性能 | 高 | 中 | 高|" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 性能优化方案" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![data-loading-performance-scheme](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/data_loading_performance_scheme.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "数据加载性能优化建议如下:\n", - "\n", - "- 已经支持的数据集格式优选内置加载算子,具体内容请参考[内置加载算子](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.html),如果性能仍无法满足需求,则可采取多线程并发方案,请参考本文[多线程优化方案](#多线程优化方案)。\n", - "\n", - "- 不支持的数据集格式,优选转换为MindSpore数据格式后再使用`MindDataset`类进行加载(详细使用方法参考[API](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/dataset/mindspore.dataset.MindDataset.html)),具体内容请参考[将数据集转换为MindSpore数据格式](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/convert_dataset.html),如果性能仍无法满足需求,则可采取多线程并发方案,请参考本文[多线程优化方案](#多线程优化方案)。\n", - "\n", - "- 不支持的数据集格式,算法快速验证场景,优选用户自定义`GeneratorDataset`类实现(详细使用方法参考[API](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/dataset/mindspore.dataset.GeneratorDataset.html)),如果性能仍无法满足需求,则可采取多进程并发方案,请参考本文[多进程优化方案](#多进程优化方案)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 代码示例\n", - "\n", - "基于以上的数据加载性能优化建议,本次体验分别使用内置加载算子`Cifar10Dataset`类(详细使用方法参考[API](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/dataset/mindspore.dataset.Cifar10Dataset.html))、数据转换后使用`MindDataset`类、使用`GeneratorDataset`类进行数据加载,代码演示如下:\n", - "\n", - "1. 使用内置算子`Cifar10Dataset`类加载CIFAR-10数据集,这里使用的是CIFAR-10二进制格式的数据集,加载数据时采取多线程优化方案,开启了4个线程并发完成任务,最后对数据创建了字典迭代器,并通过迭代器读取了一条数据记录。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'image': Tensor(shape=[32, 32, 3], dtype=UInt8, value=\n", - "[[[181, 185, 194],\n", - " [184, 187, 196],\n", - " [189, 192, 201],\n", - " ...\n", - " [178, 181, 191],\n", - " [171, 174, 183],\n", - " [166, 170, 179]],\n", - " [[182, 185, 194],\n", - " [184, 187, 196],\n", - " [189, 192, 201],\n", - " ...\n", - " [180, 183, 192],\n", - " [173, 176, 185],\n", - " [167, 170, 179]],\n", - " [[185, 188, 197],\n", - " [187, 190, 199],\n", - " [193, 196, 205],\n", - " ...\n", - " [182, 185, 194],\n", - " [176, 179, 188],\n", - " [170, 173, 182]],\n", - " ...\n", - " [[176, 174, 185],\n", - " [172, 171, 181],\n", - " [174, 172, 183],\n", - " ...\n", - " [168, 171, 180],\n", - " [164, 167, 176],\n", - " [160, 163, 172]],\n", - " [[172, 170, 181],\n", - " [171, 169, 180],\n", - " [173, 171, 182],\n", - " ...\n", - " [164, 167, 176],\n", - " [160, 163, 172],\n", - " [156, 159, 168]],\n", - " [[171, 169, 180],\n", - " [173, 171, 182],\n", - " [177, 175, 186],\n", - " ...\n", - " [162, 165, 174],\n", - " [158, 161, 170],\n", - " [152, 155, 164]]]), 'label': Tensor(shape=[], dtype=UInt32, value= 6)}\n" - ] - } - ], - "source": [ - "cifar10_path = \"./datasets/cifar-10-batches-bin/train\"\n", - "\n", - "# create Cifar10Dataset for reading data\n", - "cifar10_dataset = ds.Cifar10Dataset(cifar10_path,num_parallel_workers=4)\n", - "# create a dictionary iterator and read a data record through the iterator\n", - "print(next(cifar10_dataset.create_dict_iterator()))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 使用`Cifar10ToMR`这个类将CIFAR-10数据集转换为MindSpore数据格式,这里使用的是CIFAR-10 python文件格式的数据集,然后使用`MindDataset`类加载MindSpore数据格式数据集,加载数据采取多线程优化方案,开启了4个线程并发完成任务,最后对数据创建了字典迭代器,并通过迭代器读取了一条数据记录。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'data': Tensor(shape=[1289], dtype=UInt8, value= [255, 216, 255, 224, 0, 16, 74, 70, 73, 70, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 255, 219, 0, 67, \n", - " 0, 2, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 4, 3, 2, 2, 2, 2, 5, 4, \n", - " 4, 3, 4, 6, 5, 6, 6, 6, 5, 6, 6, 6, 7, 9, 8, 6, 7, 9, 7, 6, 6, 8, 11, 8, \n", - " 9, 10, 10, 10, 10, 10, 6, 8, 11, 12, 11, 10, 12, 9, 10, 10, 10, 255, 219, 0, 67, 1, 2, 2, \n", - " ...\n", - " ...\n", - " ...\n", - " 39, 227, 206, 143, 241, 91, 196, 154, 230, 189, 125, 165, 105, 218, 94, 163, 124, 146, 11, 187, 29, 34, 217, 210, \n", - " 23, 186, 56, 14, 192, 19, 181, 1, 57, 36, 14, 51, 211, 173, 105, 9, 191, 100, 212, 174, 122, 25, 110, 39, \n", - " 11, 133, 193, 226, 169, 73, 36, 234, 69, 90, 222, 93, 31, 223, 115, 255, 217]), 'id': Tensor(shape=[], dtype=Int64, value= 46084), 'label': Tensor(shape=[], dtype=Int64, value= 5)}\n" - ] - } - ], - "source": [ - "import os\n", - "from mindspore.mindrecord import Cifar10ToMR\n", - "\n", - "trans_path = \"./transform/\"\n", - "\n", - "if not os.path.exists(trans_path):\n", - " os.mkdir(trans_path)\n", - "\n", - "os.system(\"rm -f {}cifar10*\".format(trans_path))\n", - "\n", - "cifar10_path = './datasets/cifar-10-batches-py'\n", - "cifar10_mindrecord_path = './transform/cifar10.record'\n", - "\n", - "cifar10_transformer = Cifar10ToMR(cifar10_path,cifar10_mindrecord_path)\n", - "# execute transformation from CIFAR-10 to MindRecord\n", - "cifar10_transformer.transform(['label'])\n", - "\n", - "# create MindDataset for reading data\n", - "cifar10_mind_dataset = ds.MindDataset(dataset_file=cifar10_mindrecord_path,num_parallel_workers=4)\n", - "# create a dictionary iterator and read a data record through the iterator\n", - "print(next(cifar10_mind_dataset.create_dict_iterator()))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 使用`GeneratorDataset`类加载自定义数据集,并且采取多进程优化方案,开启了4个进程并发完成任务,最后对数据创建了字典迭代器,并通过迭代器读取了一条数据记录。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'data': Tensor(shape=[1], dtype=Int64, value= [0])}\n" - ] - } - ], - "source": [ - "def generator_func(num):\n", - " for i in range(num):\n", - " yield (np.array([i]),)\n", - "\n", - "# create GeneratorDataset for reading data\n", - "dataset = ds.GeneratorDataset(source=generator_func(5),column_names=[\"data\"],num_parallel_workers=4)\n", - "# create a dictionary iterator and read a data record through the iterator\n", - "print(next(dataset.create_dict_iterator()))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## shuffle性能优化" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "shuffle操作主要是对有序的数据集或者进行过repeat的数据集进行混洗,MindSpore专门为用户提供了`shuffle`函数,其中设定的`buffer_size`参数越大,混洗程度越大,但时间、计算资源消耗也会大。该接口支持用户在整个pipeline的任何时候都可以对数据进行混洗,具体内容请参考[shuffle处理](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/pipeline.html#shuffle)。但是因为底层的实现方式不同,该方式的性能不如直接在[内置加载算子](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.html)中设置`shuffle`参数直接对数据进行混洗。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 性能优化方案" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![shuffle-performance-scheme](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/shuffle_performance_scheme.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "shuffle性能优化建议如下:\n", - "\n", - "- 直接使用内置加载算子的`shuffle`参数进行数据的混洗。\n", - "\n", - "- 如果使用的是`shuffle`函数,当性能仍无法满足需求,可通过调大`buffer_size`参数的值来优化提升性能。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 代码示例\n", - "\n", - "基于以上的shuffle性能优化建议,本次体验分别使用内置加载算子`Cifar10Dataset`类的`shuffle`参数和`Shuffle`函数进行数据的混洗,代码演示如下:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 使用内置算子`Cifar10Dataset`类加载CIFAR-10数据集,这里使用的是CIFAR-10二进制格式的数据集,并且设置`shuffle`参数为True来进行数据混洗,最后对数据创建了字典迭代器,并通过迭代器读取了一条数据记录。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'image': Tensor(shape=[32, 32, 3], dtype=UInt8, value=\n", - "[[[213, 205, 194],\n", - " [215, 207, 196],\n", - " [219, 210, 200],\n", - " ...\n", - " [253, 254, 249],\n", - " [253, 254, 249],\n", - " [253, 254, 249]],\n", - " [[218, 208, 198],\n", - " [220, 210, 200],\n", - " [222, 212, 202],\n", - " ...\n", - " [253, 254, 249],\n", - " [253, 254, 249],\n", - " [253, 254, 249]],\n", - " [[219, 209, 198],\n", - " [222, 211, 200],\n", - " [224, 214, 202],\n", - " ...\n", - " [254, 253, 248],\n", - " [254, 253, 248],\n", - " [254, 253, 248]],\n", - " ...\n", - " [[135, 141, 139],\n", - " [135, 141, 139],\n", - " [146, 152, 150],\n", - " ...\n", - " [172, 174, 172],\n", - " [181, 182, 182],\n", - " [168, 168, 167]],\n", - " [[113, 119, 117],\n", - " [109, 115, 113],\n", - " [117, 123, 121],\n", - " ...\n", - " [155, 159, 156],\n", - " [150, 155, 155],\n", - " [135, 140, 140]],\n", - " [[121, 127, 125],\n", - " [117, 123, 121],\n", - " [121, 127, 125],\n", - " ...\n", - " [180, 184, 180],\n", - " [141, 146, 144],\n", - " [125, 130, 129]]]), 'label': Tensor(shape=[], dtype=UInt32, value= 8)}\n" - ] - } - ], - "source": [ - "cifar10_path = \"./datasets/cifar-10-batches-bin/train\"\n", - "\n", - "# create Cifar10Dataset for reading data\n", - "cifar10_dataset = ds.Cifar10Dataset(cifar10_path,shuffle=True)\n", - "# create a dictionary iterator and read a data record through the iterator\n", - "print(next(cifar10_dataset.create_dict_iterator()))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 使用`shuffle`函数进行数据混洗,参数`buffer_size`设置为3,数据采用`GeneratorDataset`类自定义生成。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "before shuffle:\n", - "[0 1 2 3 4]\n", - "[1 2 3 4 5]\n", - "[2 3 4 5 6]\n", - "[3 4 5 6 7]\n", - "[4 5 6 7 8]\n", - "after shuffle:\n", - "[2 3 4 5 6]\n", - "[3 4 5 6 7]\n", - "[1 2 3 4 5]\n", - "[0 1 2 3 4]\n", - "[4 5 6 7 8]\n" - ] - } - ], - "source": [ - "def generator_func():\n", - " for i in range(5):\n", - " yield (np.array([i,i+1,i+2,i+3,i+4]),)\n", - "\n", - "ds1 = ds.GeneratorDataset(source=generator_func,column_names=[\"data\"])\n", - "print(\"before shuffle:\")\n", - "for data in ds1.create_dict_iterator():\n", - " print(data[\"data\"])\n", - "\n", - "ds2 = ds1.shuffle(buffer_size=3)\n", - "print(\"after shuffle:\")\n", - "for data in ds2.create_dict_iterator():\n", - " print(data[\"data\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据增强性能优化" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在图片分类的训练中,尤其是当数据集比较小的时候,用户可以使用数据增强的方式来预处理图片,从而丰富数据集。MindSpore为用户提供了多种数据增强的方式,其中包括:\n", - "\n", - "- 使用内置C算子(`c_transforms`模块)进行数据增强。\n", - "\n", - "- 使用内置Python算子(`py_transforms`模块)进行数据增强。\n", - "\n", - "- 用户可根据自己的需求,自定义Python函数进行数据增强。\n", - "\n", - "具体的内容请参考[数据增强](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/augmentation.html)。因为底层的实现方式不同,所以性能还是有一定的差异,如下所示:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "| 模块 | 底层接口 | 说明 |\n", - "| :----: | :----: | :----: |\n", - "| c_transforms | C++(基于OpenCV)| 性能高 |\n", - "| py_transforms | Python(基于PIL) | 该模块提供了多种图像增强功能,并提供了PIL Image和Numpy数组之间的传输方法 |\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 性能优化方案" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![data-enhancement-performance-scheme](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/data_enhancement_performance_scheme.png)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "数据增强性能优化建议如下:\n", - "\n", - "- 优先使用`c_transforms`模块进行数据增强,因为性能最高,如果性能仍无法满足需求,可采取[多线程优化方案](#多线程优化方案)、[Compose优化方案](#Compose优化方案)或者[算子融合优化方案](#算子融合优化方案)。\n", - "\n", - "- 如果使用了`py_transforms`模块进行数据增强,当性能仍无法满足需求,可采取[多线程优化方案](#多线程优化方案)、[多进程优化方案](#多进程优化方案)、[Compose优化方案](#Compose优化方案)或者[算子融合优化方案](#算子融合优化方案)。\n", - "\n", - "- `c_transforms`模块是在C++内维护buffer管理,`py_transforms`模块是在Python内维护buffer管理。因为Python和C++切换的性能成本,建议不要混用算子。\n", - "\n", - "- 如果用户使用了自定义Python函数进行数据增强,当性能仍无法满足需求,可采取[多线程优化方案](#多线程优化方案)或者[多进程优化方案](#多进程优化方案),如果还是无法提升性能,就需要对自定义的Python代码进行优化。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 代码示例\n", - "\n", - "基于以上的数据增强性能优化建议,本次体验分别使用`c_transforms`模块和自定义Python函数进行了数据增强,演示代码如下所示:\n", - "\n", - "1. 使用`c_transforms`模块进行数据增强,数据增强时采用多线程优化方案,开启了4个线程并发完成任务,并且采用了算子融合优化方案,使用`RandomResizedCrop`融合类替代`RandomResize`类和`RandomCrop`类。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQEAAAD8CAYAAAB3lxGOAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAACmNklEQVR4nO39a6xt23YWBn6tj3V8r8HAxTfGusJWbIRFhCLxiBWCiEoOiCg4CP+hwE5EGcoS9QNSREEKdkoqSqVEMn9CHCUisfIyEYmhTEgQIhDKgFCkigtMEFHsOLlQRr4OxoEYhyTi7LVGb/Wjvb7WR59rr3XOPvesfe/ue881xxxzzDH6o7Wvfa31l6gq3qV36V364k3j487Au/QuvUsfb3oHAu/Su/RFnt6BwLv0Ln2Rp3cg8C69S1/k6R0IvEvv0hd5egcC79K79EWePhIQEJF/SkR+REQ+KyLf/lE84116l96lN5PkTY8TEJEDwH8P4NcC+ByAvwjgW1T1h97og96ld+ldeiPpo2AC/yiAz6rqX1fVVwC+F8A3fQTPeZfepXfpDaS7j+CePx/Aj9HnzwH4FetFIvLbAfx2AHjv7u4f+fTP/dmP3tT4iuy/FEAggAjE7p3H9i52TR6LHcc5oXN+rSqgqtvXpGMqDxeOciqVbY23PPCj259b+VfWptrOa95A617xnapd4d+rxjV8LrNfeV8/y41WsMrGjlcKrK6jLW4eD8EQwRgD4u98LDIwhrVTtsecj7ePUo1S/VVd9bJzpe/O3Uw3RJOT5p9bX77+aV7NJdNAl3tEe10z9GOf+x//tqp+xXr+owCBJyVV/W4A3w0An/l5n9bf9pu/sX+f1wElXCVkmoU0ATmOA+O4w3EcfnzgGHc4jlHn7+5wl6/3cLx3h/fu7nDcvWfn3rPjMQbuHx5wf3+PV/f2fn//gFcP9/3cwwNU4cI7IEMgy7udN8I1ZwjphM4S1knHq1AjwGiaQDeBnxM6T8xzQufEnCf09Pc5/fyJOSfO88Q8T5zT3y+f7XcJhl6GQccy4jsnkLIImmxAQATHceDO63977J8/8SVfgk9+8pP++gQ++ckv9Xc/9wk7fxyHt8k97l+9uh7TuTm9brzOVaedyzaYS51fQYXPV7mQhuVyHN9D0pgE5jgWQxmv477xHQOUMkABYwiOY+AYw2W7jvP9GEAALKX/6+/+vX9jbR7go3EHfhzAV9Pnr/JzbyDdwshbMPyUeIfcOH5iespP0sRuvnrk04d9bN3xdff9AOV+w/d49Ncf5fSWFz515oNk77lhvo8CBP4igK8Tka8VkS8B8M0A/vhH8BxKm1I7g3gsXb/9iCTiyfqxu/CFS2mmjyKfn4eyvwn84/RRZPmRPMrmec8t0ht3B1T1QUR+J4A/DeAA8O+p6n/7pp/T02PFftOt/CHSEwTkeokgeeLzbtV+/dynvoz0eWi7N130N5zlp7Xfh0sfSUxAVf8kgD/5Udz7xhOvpyTO66P2VdtvH2vBz4ei7J4R57o4PFU4Hg2otrt92PQRKuwLwvHXJsUbze9r23jnYt4K3t5IL2bE4BqIqcgJJ9kcxSdp5yJquq8OO8e3V1AE3U+wP/3hIECWTMvllMd1vRTr64M/VrafqUcke06i96R6USJy326UEWiqb7mWh6+9HD81tYDZlQ0157cF7a4BS+E6rxD7Jlu36p3OZZWwPNZ32eOEqteM4Oc1fEz5o8i//X6RdOrhSrnvQoRstiemj613YE3XQUtu60RcLa8BPJGohJTifmwX5XmpWvaHXrsC51QMcDQ5Xsj3lT9QlqhlQY1l5wRqKK3GT0Ts+SJ2R1FDcaB8vT0PIJ1oGannqpdWvexDBCoDMhRDFcBoAUtTeuodyF4B/jza92urZF6lGIgIrIcBoRzoCpG/LW6WvSDQjODPqZjTIvsCeIR/01Ubz/HuRaurCcWgCL0AmG5nrI2uXY0TqoO6HScw20MKH7OHoM6xciaWaVYMnUPvEQhZiy+5d0A1u02Hd5cOiV4c78lpsv609CJAoKwwJVLUZjklrHgBwPYFaQKb0Et1U3Vb/cuDuuz4pSDBVP71LtNXNE+l0KJrBgAOcSpF4wIMboV5d1YRARRCwkgWfwiGCiCj8z8RDAGmCOZ0cB297to4CtNqB5pel7I5jmcboDAQLC//pbpCRLdodOFN79o7zxMAHBA6ADDgx3gDqy6BYkIUmBCoTggEM88LVMTvF2BgXbpWmomBAR0GHL1+H3mP9og2hbRmU6XvXLayy5CME3cjjuFd4mPk8cjxEwQEbxsIeLOvp1CWbU1Bt0rAV2GtdyDpINbKYcUOJjAhA24ZFjaAchkyiz1b9iZ0guhzKLf4fexUBwZHBH8PoaHaIYMJvTKSUkQqb1gLUv5SSHG2MDHGzC+j7q71WgxMqR5WAGDbvIIz/+PEA5naeApiAXNOiEj19VPbRD13JiDOBFzpZEKnQGVCpoGftbVAMA0YdGJOwRghAwMTE4ae2utlYKkrr7/hrRFyTK2j6/mNwrNbXGMNggmE8kc5GQDG2woCO6NXAT0NRYqvSBAb3WdBa5/5N3bncDPjfapC5oSMAZnTGEBzB8rqpIu6TZGvektUaAxASt8bA1ioQt6TTQi9bZGou0WAWcYZ1lEAnUWdVQRjCFRH3qLqFx1Qk+Z2EIj3yCnjWApmA+Z41AIE0S4EBMkCViZwyxXwcuo4XOlNhnSa1Z+Yzg4EorOAQASYk8AImBPAUMgEwh9INhT1JEhD1I5ZHrKs1/Or4rPhQfvsg4UCBHww2pByC2QIBvj5r08vAwSI7rQUQhfi1aw6N8YtFtDdgvg9Pfai5GZpBo3uW1FZSfFKSbWym+fSJZAqBbQM/A4AhADAqH08TxIULiCk9Dy3VKSB/hBjAqqaMQkVgahCHQB0amczQh+8DCW7UvSVsxGMSaoqVgC4BsKaIDRmplN9NOTEPM/OBMiPL4EJq6jQoe4CBN0uBjanOjA4EDhjsCIIgIk5gTEMcMYYBghQSFr/xc0JNyq+JyYWLQR6BZg2EJiW71T8OaFS9RFujrEBewl/5rp+YnoZIIAbIOCqo1lheZqUvVuZm4DQULisaLc6dkLgQud0k4EgfNZmgtf6bo8iIHKkMADQSwwgAEBDqR0lGulnAFJ+hl+VSi8OOCWEkGHPdcBRNRCADqO9I0CANZgKWFrtOVhAQE2xxL8L/3msAcZ8RFWc5D2QYJtMgFyBc7o/TzGBlB0vqrEaMT9+2heqM9mAUX93NwIIZHpMRFzp7ZYMBKuLlAAQlp/LOUrJs/2ZpdF7gsBUzLEovojJnw8br/kVpfSl+KwLeHJ6ESBQvnYlgVPjEBUiA1sXIOnY1S3gc/259m+qK+VUq3yhRrn4nXsKvFq1zkC6EpluS/UAuMFPgBCjC2aRupedLkAzvz2J1E1VyQLnc6ViG6lEoylT3prArJ1HBwGFsZhQfoFU+5FgFkMDVxYVg1kAgTCxgRNoMYHIlJGXcgeggA6z6qoRGwhAUMh0/x9R17NyESEAGBAYuLjbSNa+5lPceCc5EIwEg/rO8+k9IGNOqAzM4fEoEchUqNjntPYMAgsgDCFBfEJ6ESAA7JmAhKAmCiyVSAjYFJ2VjxoiTUXczt8NCIIWuN8YPmdYGxbOmwEBek4/W9YzwKyxguU4+Y/R9oULVPYv+SjmBHYt0hp53kWi4MVqHF20PyG7LLnKgq0kLKqBjT2O2JL/uLdVBwKuKeV/rvzMAjgwqM4EZgNoBnxyfwQO6OLKVEDQWUClCUCcBeS5qYYLIjSxSqj3Yzlm6wy21DHBx48VVh6Z0DGMicxhn8UYoQUzrSyh+CKs/L2e30IQ2CiWhFbY9408hkWVTWUvQNCP48ccGCxrGOMEROBdg9x3DBI2dDa+ijPjTTKBUAiA9MMBQDsYeH4DBnpVpent3yyuQaMlEdUOF4N+JMsN4m+z9nR7/pyDq1SyLlULgNSfxd1WBQTXaqsbowGubgKD/Xv/ocCBb1hkX4aDuCk/4jiZgSYo9EEAlYwFKCmvdrkLZdyBgcvkkNEAYH1PEPDXEAOACNqKVPAy6nIMFCNwABiDwQBPTi8DBHTPBELSQzkqEcWlhsAOCNj3Who4gCCeP3ViKKAagScaJ0CBpQKPyy09d/G3gCsJCOtngoEUAGjwgLouIPBSQ0qvYAHuChSGViYLBJMgXNi+kpo3ax9HWsfBkCQBMub5S/VyQJO6yuB2uPKAvDcxr8YCzhPzOKw9b8RnyuUbZkGTwQkEM/1szTEBgulKvk8K1eHA5XxjsPKP/DwGKX98dgAYpPR57GskQD34mOWcKcMaMgxzZUzMSfEJAIJlJcg+Mb0MEMANEBBNhShhlbSwu6DgDgjQjttTEVH/qcCYiumocw0IFlu5kvMVo4TknJ6t1TgrGKQ7EEzA/2kCWChVReX52Rp1s4BJPCSEPKsg6HiyqlYrTclBxxrfK51zNJVQsAw+ImMSHAyjZmk1lyCjwc5msrPmDiRcYakHyZioaEFatp/QyMCg2DohqhQN4FoARNVdAgMDYKKts+DddXksA3IEMyAAGKX8YzkfIGDKPzHEAU8EMyRBxNlBMKt6LzCo47fOHShqef2irBma2WL/8goELHCdNdDdypK7xE7YWAFIDUa5BQT9BqXAoU3N4xVJAY2LVzAAivG0a7cVpreBID4vCtasBKR9Rp63NOHAmFaZFb4Uv8dKJJUXbqnh/vg2JtDYSdXnGhPQGDEYvQPuDhCfycJXGQJ04MrOwd0AqUlAQO1IwmHxUvUiqTMBNjI1QKdoOin6EIxxNBCoFZI2IEDKb210mhyFPGe7YQEBAoJLnb4+vQgQSIt8Oa0eIUUKXQQLUziBdJPjN3zYKLxqWsGKKvv7tKCM+LndYKFLkuWQTJOLWy9bKAn4nno5n+hXt2O/ZTm/yY4g/IzshuzKSH76ct5+rlnXrPjlFinVqVA/Ni37BSQrKMW4AnZ7MSvI4tLwbQoMmnJo/kaISjHTAQCPEFqUfSx5FQOECxgpMrA4hrGAMTR99ixPjNi7vC+j+kLxx1hA4Vjkh9vV8xFgRMCZlr+bm8b6nppeBgg4ErYkNvLJS0UqFSO9PBh1sLIDBx0nOBhXNkQfAlF1qmb0ubpzkKgbvQKloJkxVyqBdfksVHoqJiy2EEojszj6GtDSuZ5TzHNmTMIoMQ+KWV4S1hfEbLzQguyGZAtyDWrVOYVaXAQGIJOKbmSYntMbDBGTKDAwS9zXB6xhr/0cKYgrGUCYF3UzFeJBswg+5viKS7akHbWRp+4yaLJAXrqOoNxBz+rEegdkSMmPCMIFBTHRZDspV1EO1CQyKQbMstYMD4F/GrHM36YxFBbjWJvnkfQiQEARkzUqicDHa5vSp2DCBHOklQDG4cp/BBBoA4d0K4YJzBiDgCDo8YBgZiNux6Xny/5d+mOjQee0htABuNDG19y4/LkdR1DSR8XBwQDRL64RK1lNR4lvujlLFhMI1qi2HzMVh85SfCziRh8Cd9TdAFM4TQXmtRbLMobVZPo8UrlYkUIXgg0Ug4leCFOqAuPVOTLjkcxBpAZrZSZDS5F1gGA9yYxGzhXY90pVF3a5JP4K5adnxP3R5GB2tkWsURnsl9KtbXK55JH0IkAA0AsTCMqnmBiO0gMu1yQUIwA0gUChemCgWEGAwFCFiA2KkTGcBSiGDhv8EWAg2hb2bLq1WFM/WwCggHX5hdAhBYyVvQtb/46BQHOMfAyMmZkZkaVCkg1oCYTGtSv9XsAgeli8nKb8HgirUmZ7ddNbkmnxDB/dsAWBbv3bd+4/DxkryfU6AXQEG9AqR5S5wx2dE1y6TdldQPUOJJR6WwhsZmkxEbkwgQYGATIMACkj8WYH4W5dGeL6mgjnt5FSbg9mgc/zBl4ICLhCcxKFWWYYirO8p8xPxRzBBBR6KAYOUv4arx8Na2xgYNCxs3q37qgI96Qf+n3iSovALtNpk7oBKyGzfLiiE/oXGND3YQ3CJXFAAmbRw6iECxugCrzKPcJn5FcNPBmdBSgwdUB05iPjpkr+OJDOgAt5WT577uoCrKyA3AGm2WzWvL6MMc4OZMHmoMkIhHKVF4HrorsKU2ZZ/fx3ePNbfESc5XR3ipgLhPI+mJosMmJS7WYj67z3SBXrYzchWNZtUx8Q8/T0WhAQkX8PwK8H8JOq+g/7uS8H8IcBfA2AHwXwm1T1p8RK/10AvhHA/w7gt6rqX37dMxRXdwCiGDHTTc1311EgYMM3Nd0CPQ4ccL046B13DgYeE1AfSYZiAIA9Z8AHaQzBZVw6YnJuCBD1LYfL4O/2k1mW3QuZyr+++wXtfZ1JlnlZXkT7GwuIQoU1ds2oCHMP1JXyAZidBQyIDa12q8+GNx8h8GHDRQ4iG2Xp1/0ElnNyIPrVJRc9kKrHBAHFSG0PoUg0iBx4/UjlNYBAhNrRP0/Jtrha4pHPttnJNF5/mcBmYMCjBDujyXxlM5Gc6GxKHwDBLoENxFowgNq/Fq15enoKE/gPAPwbAP4gnft2AN+vqt8pttfgtwP4PQB+HYCv89evAPAHsNl4ZJd2gUH1cQIi05EY7gdasE2cGg4CgOOwCjiyT/0kJlDUbuiA0voaaSHGgExcFDCVKa911qARLCvW0IJ82sEkFD2FIM7zMd0nr8282Hc9jBXvkcm8eWOj3ZUpIGBFNFfJWcCk2HuzrjeS1LdsjXbdYrcAIY7Lt7a7qSKDgjqBOcJhsJ700VqxULBhAq4sKN1OZ3UHzJ3k9hMdkKneO+D3S+UfVJ/D2dZO8atJMqt+fJEVsAxVjCBAoX6uy/11u/rw69JrQUBV/4KIfM1y+psAfIMffw+APw8DgW8C8AfVcvpficinROQzqvo3X/OMLRMIf0+Mu/vxqCCQA8GhBQBhOXBEo6JAwF+D3vNx/h5jwwM0ignUlT0ANV3nQklppGFMgJk10UX7n+7jbYCiAEMXKaIfthgAZW0BrmIBO5dg+Ow7ADp8cZVho9j8dwNqx0Fppe7PQq+kfGENywUoEOiA0F2BtKw7JjAmZI4syxBgDoP8ZdEkZLeao2CWNzeHsc8zezQOjKOAYDgTmEMhviLR2ruyAkrlPyqBx22iyVSxwRsboITipztQbkQ1MbX/R+EO3EhfSYr9EwC+0o93W5D9fACPggAAW8ONk4oxAPHjcLHcDUAEhtw6HvEzBY7jDsBZakL0KCp3JO2yty48BhOrwtp3kTc7awoS89Y1g3k8wCUmI6HdagGXhcLJeiIKt/5ovRexAEkXoV4XoR38sv70UfNnzeLOAevhEAdgvnFUSlUQg0JYSwaBKyAsvQccGFS0cSJhLGxknQBj2Jo/vvhHTv4TB7TsESAg4B6JeCelG3pAD8XUw5ebO4wF6Mjy9+XSGs2Kil7AscpiYkxtpth2FbMLaGsd1IInhQABA8wC9PHBZkv60IFBVVWR55MQob0If8aXfuLaRQiFygC8T9jcO2cEERgQgUzFMUJIiA2ggMH0yWplEGv2r3CAJ5HWzLFV5xoBiCZWCQAmV4CmvfootwzyPF4r+ZcbcW3Plg/Vyk7cP+coI5WAWaq50mXJcjqqW6zpqyuVZTNh381OtOeF5V98D3QQuAYGV0BYrSqyHAr1uf024098rURxP92AwNDcApNuETmvDfzsuYfna86J4zDrfxxm/Ue8j4mpA2OaC3lR/ku3Ibkya+OpJlNKdteUft2GbmEEcN3YssJgARsD8kj6oCDwt4Lmi8hnAPykn3/yFmRKexF++ad+ll5iAjAmEOiaQibqPqP6NWrdg3xvSPtsAhCzz6yqvO7TlzwATLcWMT6h/RxMLf20a1Z2ovlNp+8LOM/a9+/qVrTbtwe1+/O3TfFID1P50YEm3QQt5U8BRgJATXqJkXUWHBUUCzDWZQJeA3RK0SvvbA3jGVfaP8bRwSFcgbgfA0oWS3NuvzGTYePsMRwIalBPAP9a6wlKmx4JVcU4hjEAX5V5xjVxjJA7Y6cNWAJpKf+MAeWpabcHOwZg9OACBjvFz/cEhufZ5A8KAn8cwLcC+E5//8/o/O8Uke+FBQR/+nXxgEh7JtAFKo4lgwIGCkd5Rm7Zpd0J6ABwTSZ8Q4zeDzDIANHVWHLJihlMwBsqBvqcMwHgfHiowCdbuLwVCUwqEt2bFEJ4oRUuYliJ+IKAQeg8A0DQ2ly+OmICkHCyAQfimN4cPq8ilL9AgLvKIv9IEDgucYAGDmMk61jNZ9B6uN8uPgpxuttilB0+vDfAQHLAUtjPFsgbFZQcR/n/QxXHUOgxMNRZwjg8EG1uA6gOt8fuylQpdPeWbXRzfACKAVSQuKx9ynK4DHm+PeW16SldhP8xgG8A8A+IyOcA/F6Y8v8REfk2AH8DwG/yy/8krHvws7Auwt/2lEyo7noHgJwSFg4Q89mkd9ODOIIjUCDWd49jeo4dLK5+WlUBLyZpXogg+5rBVtotFFnuRHSPCZznxPnwYC9fLz+VPB7XrP/qZ/pFsTpQ+OH5QClbF19HAaUOCwjKWpkvDAIC7x2YapTXl+KuQJrkqkTCeSZL2ICA6PKQY+MCHJdegaUQVIBos/CPrZ0MCOxcjPPXKT7pJ66j+om85Rj/4btXH0bDj9ECgmNMz5uv8qu1+tJal9GWXCcldDRXpQkiyczFFehDxTlA2K0/M4L189PSU3oHvuXGV79mc60C+B1Pfnr/7fK5LONiGPJENHEEjaqrZVq3TlDbU+pGZRJdmKcfz2y8NWiDITVQ5sICel64UYvO+fBff24qKQ8il3gYKftK/ZuSSN6Kq6VdFwCwWKwmtFQfeQ8ti98YQ+bJuxJFsF3DIes24gECCSAQuQBAzRUIxWHGZmVZfeCctZiW0mWAGEOdX+ppFSoWtBSTYA0DMtw9mlxOJBgWuku7bVhttvippPSdBTxrW/ncTj22mKdzQEyiQxoA7i7Oc8/wCF7GiEFchw0DgMroFhIlZKBG4qBTC8o4pbK+Xn9GUlpbrQWzjmX6OcDnBTglziXnvfFTV1cXxvMax/GsUKi8qI7rXJcfWV55PpW5Pl/zUDcPBtBn8K1PKxCN9kiZDss5h42SHAaY5g70cf6Nbrfz1DZrN2A+30lPVumOFazpYh38PlrfcXAt1upzVjPF3CuJ0YIOGg2EXO4SCGlZ9rX+GcdjbEkHqsrLGgfoIHD6JLIT2o6DpWqTC/4chOcZGPAyQGDrDkBsfEBYPDtFlilQ2sece1cTa4ixhKjkovmm8MYCRMSjzQEEZk0sNGgRAgDANExgY62b2l59wmZ9q2gFVMKNeYGRLA4rPn9Xn5gFrODSLXVYLEmp5bbo1ELERlLGVNZYp9+e3yfP5DM24+rBeYCfY8C+PFs3h1el73k3hbhMaVbaXUppCLCvJyk2KrrP0dD+2Ahw6tirVy+HZ1qBNdof7iKWz6cHkvX04wgsU4BZ5+Tm8/UDyihYnJL05YnpRYDAlgmIzRyEiAdjSJDS8pOvGfSSlQ3lKkwf6hWR7gCC2YBAa805gXcdFBDoIowBNZc6Zysq5VMvxaNjaeeFfr/ctn1XbIIVv5c/ACBXv4n7ZM2jBZ+COce9Ig3A9ieA5GChBgCXEXSdEXhOCJ34PF5jujor2Ak5M4n4Pl0EaAKBkOIZE7ABQIoK6sa/rMHWjuNm4C2pfxiJNEDhEpKvzwPJnPKfZyl8KP/Jn+eZih7LifEio0rHz8GBFwECgcL9pAnaEKWFMUqhbBmn6Gq6Cl/e2BsXw9bWD6sfVsgAIYajSgoGMNINUFTDSvoD19QoGf8TVoC69rHPdT+i/8LnQ9iL3q8gwL0LIcBdMasBeG57y0M82ClnWEmkYtBMwAYI14lA4cFW05D13NZor4ud87VPxgYkFzsJhfQ1BWMMxJzWLezoUUO86z7JttwdEEgfm8+uQ7n75OsrYtyItuNZ37mSm8I/NDA4z4fqZp5nyf+4voebEwDx1PQiQAAbdyACdL1v1CloG+hxpFCvVkeDkgG2bJjdwYVTjQXMAAL3D2kfOmDQkGC7awSespLDdVS7d1k5Zt6hDNoUuRe4zqXa5K3IgjekCZcjmED52SsAXIJ3ay4iUCWVC5HUd8sBudoNWEb398cCBDl4R8NFK6Ve4n24RQuyl2ZTe8UC1N8DAPqQ41BKkViXABkTCmNxBQI2Pr6FGy2iqpGz8P2pLhsTmDR+ZB1RGiDw8LAAgb2f5wPmgzMBB9zDu3SPMWxvhRjFNASC8fkdMfhm0o31BGRUdBcl5EFvxzgwjiWyzFQo2yoalam/OIXy3oRZSGprz8H84NiLzsEgewk2C/KzojYWIGWts3z9I5WbwYMuIeXtOEBK30Chf24vzoQilwaHK2uAR9Z5fEfoxO1QAdpgAjVN2MA8/HKi6Ow303sjWY8h5Y3UA4w9KFejOeFyoIBvTAqgTeABAw7X3ZALaF16ttCBpxT+zAVF12MDgIcaV3L654cCAj0nYi/CeQwHAHvHsC5ORB/O2wYCu8CgVbiP8QeahvFor2McrcTcHtpOKOa0Icex2QQrhoGB5nDZOQaG95mDhJZp5raiC4vKikspa790jSpubrcFhDpfz9grPStsuCXNHSCenVYtnxfAI+05nQUIWlcfAUC4CNMVX2h/xxgcFgqUQMBMiZnHJq3+fx1r1t2tgTgVKK4h47nBabCiqOtwf4bY0GH/l0utVY4C5cgdmOD1Ec8AgNOi/hEQTBB45N22RxPcjYFjDuhxQMeAjZv3PRHQjcNT0osAAWADAjGDbdhQUAAlxEmJDhw2WSDRV+gY8R5WRlhBnKLFkNgEA7MQA+r70Q9vUKejcdysDeUbOxYgS6N0WruCQdJ/+kkxhFLKfGP2ICOFoGIkq0Jv2FL6TUjbjyX/VSaKA9C4/93cAIj4iL4AX/X14cIiU5whqkZeq/+ttuPHmjse+a0a83DgkVJScVaQVcGuQAAJQGsTSk1BnZE/zWdx6+6YQADAefagX1j/h/MB58N9DTBz5X/w43meOIZAjwNzHhZcPA4DgKMWLBXgEoh+LL0IENCdOwAYACQ1Y6sWo71qtdagcbFctp8ElBb8XBReRHKxydiHMIOECsRiEoHqseKQhHSJtMaPnAsdpWsgQSNX8Y7PnfGkVcdC7ZIZ7IEmgIKDgKH4XLeNCYBALil3ZxW8YUaz9DeGAHOsIADAW9WeN4HwxaP8Go9nINiiQcjCer4rZDEB6jKcKSW+cIprdPxaq28goTrrY7iMzZy8BK2BunWPBXyICdhw8ukUvwDAlL0A4OG8LwDw87b5yvCehQnoAbP+R8rJFAOAea2cm+lFgEAoKqeRq/12WxvR0RgfcBwOE7PoPBD6331BhbRRXztAEJXc9y1+m0ofQIAQ3TJfTVlZIeHdg7QibhbaLgYrQpyp+xS6x3Fc1BmAVN1s+u97kv6ccHXE8iAR+HQAGWT51+G/3EU75Khjmg8wZeKULAUU0+bpBzgGAKRLIF3587iXg12AXbLvo4uwW/pYQg2TKjPdAJI4zos3kczYhERox6dgEEh3IUCn76rMym++/sPDQ1N4fn+4v8/jOU/cnQN6d4dYeNZkLxbf8fkv7ro8Nb0MEMBmsNCoRksjGYJNghZM4MT0HV2lCxQiGOSRYymr/+iL+osVmqDAgBAR4iYz7bW6A6zsxFmXc13hpYOBoIFBAQDvJVAuE7siO9ZS56wc6WpkPooJ1KCsXv8NCKLHJoAAgnNO4DzNaoYyxpRwUuxUaq/aNqKarmPFv5aJygNzEUQrWj9ttVJfLMUCat5PePktP7Dq29hMxZSiR4J/7rLnzzMg0GQCDAQPD2cDgYeHB5z393i4gMC9uQ8+2Qlai6SHrA05cYpgyMSA4KnpRYDAzh0AZNlx1s+GUEZg0CtFAV8HzxcdiXsTGu+s/vblg4WEaST7iz3zS7bLPG9jArr8WK636ErYrX87HzR1Uf5iBLVwRnWfRb0sLZBsgINiBChZ3wwGKygcVzAQcQCArwhd/nnrqSAb3Nfh2aWdK9DvE7+vNos9Bpw1DuSqSYgp1OQGZV23ynem1NqUX1WXLQjpe0ewS3Cep8UAvEcgAODhnt/v8RCAcH+PeT5A72wBTUEFAYczgDEk9zN8vP56ehEgIBCfRVap7cO+o+4h/AgyDrBPGL6YuQnzCgISM+OGCf5YlNzvsarohViveUIXibqu01dWQmmfTQFWIe+BuV6G/bnosmNhrTq6lVYw4ZgC9wxw/CF7G2SpjwQtSdeCC3Y1nrRgSeP6hH5lky952OWr5SXz0G/bW8Le4rpehromXI2RG/8JoNY/P2Lvwuluk8tZ+OrR12+7D48c7FMu5ML2OEIRYB2xi2QdiD8IILq6gfv0IkAAAtzd9ayImJU/FmuTQgkkE8+NKzUGZviuPRyMobkDQW/n6GvKo1UkqtEXQWBl3I2y2xTvIvAMWHWsfv/orIu68Kc3oeYAnX1uLsAyaq9ASJJu1/N7vbcNMxkIos7bi7rV+OWbu9pzNWM2vIwW/yYbU3rZowKlKSYB3iNbm7Xv2JiM/TW9rkFtTecUuRZduSn0YtDJtiXLkr9rpDErVkiREXELvYNOxQmkThgjK50YsRHhB0gvAgQEkl19kYLqH0cMCOIhr3ZNC/ylcM20/Ou21pAaMjrHxJjD3nVk/ADY+5kMAFeLd/tlP95xfjQAYUXI8flkCZt1GzUwhwfl9O2vr/lI1sTy0kMal9WAV6ueP+LgFyt07OycAAvE5q7RNg0sUICCAD/yBuqxjwMA53dslBwil5jJlTVUXVuzrecEELUZlbRMtbTXWl/KJWjXDmYziGnAitxfwkGAYwCmE90Vy+HMyUWfl14GCIjcYAJ3GMdRwacbM8/SImUApm9nffpLRDBP21ByzpkA0GiV++ktLxsAiHxvlX4pG9+pBsd4/ovLpSKsv8n73GACKwDsVsFNJcqKA/iEOyEFMnSfMIucLVVk4C6oqCn1TCMWI/PYNVvjKwwgWdoWMJSys1ugLbeHreLqDnUF7Z/RPle7WV6uQCCwNQdXGbHfT4/Qe57JzWucgfMBLAFnAoB55xOb7iAALYTiw+ZlbauSmaemFwwC4tQn5ggwqgMImqmdAegShDmJDYhYd9VUV34WSv8HoJvGRR8ZAOKdlXZvOSOtvh3KEjcmomkXYtYYAuclJlBJdwXIwl27CUP0qEAp0Fw4tN8lsKALlwUZq7ckfHkLuGkODsrlvmgZ9s4Aqu57pVDlsflEr/NV4fteBh5nIkrPTPIKBl4fF5C/nreR5ErZkzYPRb3+qwwBAJJFsfql7cWJ/msCwITeuTugEyfU9OE43DiuQBd1xCD6+vQiQACCizsgMnDccWE3/imw90UpGMjjtgFTkDmLCXQfFfu4WQjSRggrvysTeKwRwvKX76f5jhyhxrcp4bkOz726ALv5/Lgoc4KA0Geq43aPpTxJ35XL4u3hO8YG8LYptMsLzAiyrq9VWFjAbVCWnxW/FpohQHbL3kEgyny1/HkdUXZmQ8noRHKouUrNQYmuw7L+BQADBeQBAuX/MwugHal9DYIyin2MBjOa56YXAQKCvTswxrGg3ihhpFjS2h0T0zaLCZw4T2uoc87sRslBROwKLPmiD4sFcdTX6jKyfHdg2KU1SJhMgA/ooiDEzQISALQA3gYAWMC74u+Py/pLCm/DXc94lB/SQcAW+rRZmMYEggWs8YDkPu3m62prLY9L/a9Kv45jYCaQbUL3vADBhgkkE/N7jOHLq8FHozoQmPLbdOUxFOfZiYy45T9FIIN6wIzWkkGrwHa+39nowCGC4+hdtQXUCCHdyt2t9JSFRr8atgXZV3qzfLeqfpe8wf0Id+4AIOn7tKXDCKU5MDUdLXnOdgYIT3IHCADmSlEbNQU8RJdKuMv3DgCurAAomoZLMA7LsyPsxj/tyh+W/4bC3/hceWQlRyo7FkGq4wUQXTlN8aWYjJcFiKXB3W2IdonXhhFkTICZSgOA+q67AvzeBzCx+yh5v/6Zlbu7B1cmEHWYeRbkpCgVsdmoUmNLBLbOdTCQBIA8jjaEswB/sfK7SxBMIEEgQI/KX0BQdfaU9BQm8ADgd6vqXxaRnwXgB0XkzwD4rXhD+xGK7N0BXpW2L0hZiftLZzKBdaimDc4IQSkgmFvlr+Picywc68vyG3Ss5+9mWyjlHUtcQBkipB2H5R+0echV+WPZ9DWfC73n91R0uT7b8avKFkFMQaywkXXmk4Nic3NdBlxFgDCDgVoBUaA2O11HCkb+ru2wjmDkQU0jmzBuUse9fW4p/Y4NVHnMBQg3VHKZcwcBl5tTADljzYoCgzHEdngSeD2wCxAgcEL1gM7Degikuz45fbsB1/PSU1Yb/pvwbcRU9e+JyA/Dthb7JgDf4Jd9Dz7MfoS3mMCwJcRqFFpXvBC+q4VZRmjNDROYCzXNVz7+dnZDUNCtfvwk80eWa1Oz9RbKEMyGvo77XV87JrAKhJBQc931+Apfe8nhUh/puks2AEx5Kbg3AcSKUIICZQaCcMEypmC/lRgvvLKBsOAXACgWUC5k70Pn/K8gEAW8soDevvx9j2n4Z+FzXk6JbeoEU07MiQ4A57DdniAeEJwNDGaAwJyYx0x2EUHyQ4gNMLg/Mz0rJiC2MekvA/AD+JD7EQptQ/Yzf8aXbkGgVqml1WtIoRS15NWui3CdvMEgwBuGMh3dRwYzz4W2NwWlmMFjDaL5KibQgmtrfGK19C4IVU97llIW7AZgLCBRjAQ55j5rhRiKNrQMN0ZQw1l97T6VJRawGycQNSD03grfmMq1LnZMIPzl3qpyaeMynauy7z7HdavCR3BUHdQrVnRCTpvUM0/rRSgAODFPcRkm+u+Kb+93NnHImQCAq+LzcmIr2D0hPRkEROTLAPxRAP+8qv4vTRn1+fsRKm1D9hWf/rlqm4iuzwxK62gZ7kB7NlK4JtGoUv6asy0iPteA4wK0Y7AuRViEY3e8BgafVHZWrUKAtIbXiuzPbuB4UfZbIHB7hiEfRz1oUPxYUE8Tcq2eRBaqANTa7FU8OBMIkL7FvjT8gJviS26LrHWxgsGRQNAzs9Z/P58KxS6Bf45YjFeQrzSt1asTbUcMx5TdAWCe/m6Tj6YvbDNzbkcHgWAB85yYxwk9fLXhlAPqXuT3j4oJiMh7MAD4Q6r6n/jpD70fId1/wwQAsENOVgBA80PZunD34G13gHoRbgSpmB1cwAHdUuzO30qpNxkIKDaQQnRhAeViCGQ7GOgWECQALL8ZDURq1FmUl5WzdoijfKkCFxwoVhBR/qTGOYaDWUC3osYcmAuE7fdX882LDb2OCWjPZAdhdCDfM4F+HumydVdmjenYTEOLB9i8gVhWTKBjGjNwxWbrX3sP3OHu7sScB+Z5l9cmEGaMpI4/AAY8qXdAAPy7AH5YVf9V+uqN7ke4TiDyp4fHVjSVraULanYF8tpt3FVICt0WdzhmTucc4yGtyer3rS9mEXPOnAXGC0UE2FziDTtA8XeNhkS15gooF+O73LLRXyXSpPWlICWHQMHAoBSmXjHop16mrOkaZVxh53PzCQZyP75QfL4SJPG3mc5tl6C2NguQ5QrkvwBuT1ZbgCAVnpgAA0ABuW+X7ZulxrZuQ8TCJnzPOA5QE+sBOEd1lc95RJipAyOfk03VvyY9hQn8KgC/BcB/IyJ/xc/9S3iT+xHKdRYhC7vVaV89uCyV4iGULwCAAn/cBQj/XVzz8HAujYu85jgOnOfE3d2B8zxxd3eH8zxxHEc7VtWL8tfxWWyDTT+Vm+1e7m3CVifzVHnL+AcqAn2pUr9/87QXth1g0JiDwDbjEFb4lbqHuWagIuUmi23f16IrZbV2VveWBe5ttAOAOu5uAUlTHvdWqE+7yUjVJVt5YL+fmUw7p4razJWsNhIz0o3N67XmwzIgHGPgPAaOGSsJga6LttQGBs9JT+kd+C9x+75vZD9CAajB/D5Qqmw7A/UhqC6IU2Gzq3K9Nqb/YbF5Y0c0673SZM8/5lTc3R04jhPneZdAsALAcRxQ6PLcSUyg8kEGYym7XBTz6lJE3rKOc0edx2u11ly0XZ5rft5qPRIMAF+zMn6pnquIgF9BAEK+6Mo24pMU4EAKvKyssa/hBgSCK9wEi11MoI6rsLu66ucuLhN1wSYYgN2lUPidW9k3dO1AHZR2AY2oLWZowWrG8PUENdu1qpvnXW42w3lNehEjBvdMwBeGVLMkqgEMNMjHewECAE4fGcg9AG1YMKwBzzkh54TIuVjb6lk4z4OA4MBxLGBw3GEclufVPViP2WJkY5su+Ao6BASCHEVWKkC1sgrgtTLzWv7cdk92BGDAiRltdUGsPFyKL6q1SQmDwAIAiFxL+dtgpY7PCyNgunBR+AaOV0XlPvM+9XwncCsjs7em9Lw4y4UJcKCTxkLwKNTpW7f5IjVsvTMXBADsmkRZeWGYCGgXWPi1ylwvav4x43BNLwIEBEiFihRKc+pM3yCEMZXs1BwIdAWA2TaTiHvOOSGnRWptdvHKACyucBx3BAQMBgeOuzucx1krHU9bfPKx0XBdYQn1XdGYEYSSJH+0DJJPXnVgAss1uQrAsjS6u1YdEchKBwBIrMQj4P39DLg6wPBx8YYqCyvyFgyY6tP57kPUs1Y2sCrvoyCwAYW4pk/PXkAmg6dE40UxVUr5QQzVB0rZb+FDiqOOyCAsTKCxnijHMTCmra5dgUcGgWzY5+o/gBcCApBrYDApldNHwwEGgBgJuPrgZ80c3CihLUZq7IGfVeMLzlR6iwsYCzBXwEHgPHEeR+uCWnsSbr1HeQWohT0XyWzj/Um5ql5ilp59M2etZtMe4r8sv55OpwvQqbU9311a1I5LzAAuMJMn1pL4tUL+6oUBLCCwpfzr5/W7mmW6BgfDzeAaWTMZeVunIF8++/PMwPiEIY1l6qfV0ZzAHNBhx7H7cXsWAXlniXVVzDAcFBdAsAEKcPbfPp8FAC8FBLBxBxxRZZaSKGoySgXgePlmnjoc9KziAoA1jEVm6TlTcRwx0agAoF4MCP3Vfffb5QvfLQuSsil8Wae9mcnKKwfnMlYCOBCwhXZLERYd7VYohe3KyN+pxFBYFANgWV28Au0PyJNd6XWh+3Q+rkM6KzeUfvfiiUTUO5BgynW8VLsXYIxiAbWidQFCbEM2fM/KmjjkwBmLF8NBwoeoW/cgP1KJCBAbgF7KG6s66xEsQFDMgYOQfvwBcOBFgIDgygRizzhG8oyKzysLOHM3F5pCrL2bLu4RS42VRVXY/oMDImf6YNfX9fyqQPvjMsGsMGkdoxZCAcJaggRH61VsgBUfjR3E88Mnb95IKoJk/Xaf168JAPBbKiGA5rloG8CCh1iEkGICod6yiROwq8AvECAkmPZrigXE5/2wYekHzVXocQXpgDA4MOij/uTMuQBz+nmWXygtb19AQzUGU+IYLszNU+XSMXDocKpnG46w8vN77Jfx3PQiQAAil94BYGJMxZmbVqB8LhoNmCCQ4wR6D8HFHZgx0wtZcTaoo9bmG7G+4Ubpa8kznuFYgtOFsw91Tpq9WNyulOXzgthAYUC5RDYZKhTftI/ZQXdFyu/Mas9nXxVP8zv/qZiS70DA6hGIHYDq+cjPVyWP1YaL9Txu6SvDe2bALECwugPJB7iusxI6CDAL4MlaMeNvTlf888y8TWZ6XnjNVYYC3BnUl5hRR4Eq1xBbAMdjZrEBDvdIIHvMbNLWc9PLAAEAsjCBAUDGTIWpoEspgS3b7MOC1xGC2l2BRok1dqZllGbUrvnajzIB2hB154uqau+qSkvWKak0wcZiOSlp+fcAfPGOiTlHMgJ1U1xuQADeYqCjzBv/vCm/59M+F5fOMRvuJtijNEcZ90eR0uczivV0AMDlODRotapbv30BghWEwcfcHgwCafl5uq4d22/OYimRPaAH6TRmCBbLoYZsMS6m9waYdvnwlbAxFFAbKGQ6H3IQ8j2AGHmIAX0mELwIEBCxLZY5md++TBpiAJg1Rdj2bu/DgdtEIrKIDAZC1BB8VrAAQAxDvQIBD1HlYw4apiXz+/PRXvgl8yb0mxKcqKPYp7GXsd5N+W46iU25pNc1UNYyUgafiAEEMEUPQjo6VM+p6BHfCfBZ94fsQIT83Y06Wl67bsIcDp2MB8UM+H0FAWIBcR9b1MaZwHnaWgHSWsfryXz0wfmmlrcm2Q0wWmUiJon5MDJBBiQNCAgUBsytiB2VnuEWvAgQAHpMIMQoF0+k8+UOkCtwnnm+9hmo0VgtLhC+LYKullLFcQeB0Y5ZyXcvVS0A8KCYyoQKjV5jeuppK9gsOpzH8JBGxQXCLSjrHz4n7fqT9whokToSDs7K5W8d1meeElyWjBsxnin1b8cIdixop/ic40VR9uMEDsRS3FcgCOW0883yLy4FMwERsYFmVCUdKNV9/GWmX6EFVZDm2gpNFqJ8w9eFyOdYMDKAYMZqZB6InLZBIlYQfl16ESAQCs+fbYPMquISZJ8GTBs7rqsFaVvY8rFJQswO6jMAjHFuQWDHBmIkIQ8OyrIM8Z2NKliX7UNUNBTx6gNXKisbJ0rhW1++H8dioHyHXe0HNea8YPmLVCLkM4e7BdoIaAlgACoHArEq9ussPKu+lNW+zQZWUFjrVFLJ1rz0LsbOBHLh0qhHrZfq9A1GLJAax0QTUL7VKoM1bDjjAgKIg4iKAEOg8MlQbuyjWdV6JAH4jkq+1PFz4oMvAgQUwHl2PyZ8/HOu/r7WoBzsFbpiB8UIrpN5bv8WKDBYmUJ/t5TWgd551iIL2fQ1623FWrRndYMQgt/PCNiCMoVnZWY7VfJax73M2UOi5tOvewSa4pifq/YRF8qP8GbXc9gob1i6sHOaSt4G5xBbCIXtyLhp92W5+aa4DCZZUyvzQrpcCuumxjTrmsHlHKbOk8YecD4s59oW4zy/xA1XGrCTmOoSVBEbzWl031ZpGBDvzfLP3mWJIb6F3mI9XpNeBgio4uHhoZ2bc/rEoDXg1627/R5oCDtZIPq6AU9jBZYi+BMKdrGYLjyrwq9Tl0tRB2KL7gYISeXzyb0d3YKtin9dW7/cp3Sj6EarwpQlEu825XvU/gdtI5QN09xGHbyuBEhgCeXO5ceCHUFzQE2WJ6P0RNvBZbkCWQF+vRS0Ueug1Q6ibZlYeV4VCkzYMuIKHwxU9TJD6V3R188BCg/3D7i/t/cChAKNnGDWGCS/rNprpMyw3gYvx5yxC3G4hDFlueTgKekFg4D2SptnDgCaGtHVFQCQLICVfjeOf6oCsVtOMgf30TxlPS5WKCwZAB9+XNZ/7EBgjNwyPZTfjmOMuUV4AwRym2sGIXruFQCuqwZlvi+VHfWmOXrSFDS2aau9BrJbUGo2onMCv5Velb9qD50VlIsjGJChGD7UUcXaYu8WBODWXVluGMx617GP+VAFIrA3hymQiM+DkCxT3Dd7PUBbtLMswJnAw8Oi+HsQeHi4r9dZYDDTwJ0OwJqyXODowETMZY6y/EOsB8KYwaj6HbjEGR5LLwIEAOCBhvECzASWGYKXxUG98jZW7rqwyGNMoLsDhaRslUkoXWxW63+KQBYQiC3TrbGHKb8OY5mxC5JGV5+7By6oZbF2DGC3ZBgp0lLHFxdqTrMaaeUEIrRxqw+J7TaYFCIDgmtiABC3Zn4Pj10NAOr0J+IYUe+h9JcyLWXJ4NrFHajFOyINDOiwbck1AcBzFQjLrl8wArLI8VxW+tnofqf/ub14bDnOYBFuAe2TmW5RZEBQOxh5nSQAuCsQ9F+kuwdvX0zgFhPggUC7mACVVDcC0YXieSAQid2Aq0uAxgJuuQNxbCygfj+Gr8s/IxhZbKbvRdj919u7Dq/q2mrZ65ootDOBiRqPcVlyLH6at25imoysV1qcKzBgejrCv1XeELbqG8LMgZSfGAGXZY0DzCm+z8TZMUqGMYN4Jb3oPoEGx8nClZVWnRuFJxZA50LxGwgQu50UH1hdk/jMLACQ5gqId0PO6AYNMF/q6XXpxYKAai0WwoOB+sxAbpyrzxvrCZSAPA8EMqIcKrhxCRoLWACBlzePmMBI6j8dEPgcl2OppGxc95cbEyh2cnULwoTFq7OBqdM3zptLuSx+MST4pZfdRwOJSGZyTwgMAFL5W6zRvhtCE5uWvo+s65DnjVD3uIAxKWsHYzTnKXQvy3NOjaZ8BhHQsP4sD1C09QLmLMV3hX5oYPDQwKCAoLsKM13c04xaWP1kfpEzlsPOBIBhAeZpBiNdBNlU1iPpxYDAeXEHOCbgQ4J1DQzG74FgAdGALSD4WibAjVx+7tUN4ONStJ3l3wUH416Whlt+BoAS6kZNg5JvXIJSfOQxNkLgd3QAgJfT/w0HJHOfCQDch6YoQAhqguUjvJN4ACCxnHiBQ9Dyot7Lj6+H2d7V9sX4xogh4LawZ+Gmg6cv9LHS/AJ3pXhJbWSjdDwJBB4eHlP8+x4jaO8PyzwXTdaT1bUAQLhDFQMoVjh9OLOIWg8B5NF2WdPLAAEA9ysTaO4AdavMPm//QnPBgnEdRbhV/mUnIgBNaXduAEfhb4LAuQOBWESsAnE9T0hFbakBAccEIo+Vn3rneAZaUDDYgLkiPtiU2IDRTHgwTbMrrw9LDsZcbcCqH+aV6y7yZnEACjJe/ApFP7N+t3P9Kp5hLKCzIxWBjqsryWwpWdJkJkksdJ6k/Pep/B0Q7nPNybl0D2YsgV6qxopSgTHakg8M/mMCM3tavCcgVkTGSDdhExK+mZ6y0OgnAfwFAJ/w679PVX+viHwtgO8F8GkAPwjgt6jqKxH5BGzbsn8EwN8B8JtV9UcffcgNd6BmCU5yB/rCoWU563f7qDH3GOyV/3UxAWYBkW65AnNOzGGWp4SzrFhY/1DCAAaR6DoL290zs2MC3k5x0VXpom7ijg4CU2f6lwpT/CgzxsCYagAwpwXxXEECAED37cmCbskC4i9bd3/W+ttkQSh3w7RTWm2srp+18xLY9LY6w2qOsW1jqzNNQAsmUAHGmUE8U+KHRfnv65iAgX3/cmvrc6yJCVWP+ts6BBITMKIMqG5OzWHEPlPRA4HhDkQ3bC388vr0FCbwPoBfrar/q9jS4/+liPznAP4FAL9fVb9XRP4tAN8G23Ls2wD8lKr+QhH5ZgC/D8BvfuwBFnE9r+fO2bpRwqp32lzXXwOC16BRDiUO6v8ICOxiAp0RyMXiX9yBc/iGE4OiwAJTeAaFAAb6TFc3ayAVE4grugfQezSsggBIWXKFel+4dQ9asUy5c3yNd+mrK0/MWVflWYO3hC0YA+UhzyKp75XqW95i/YLYAEURS5Lr9Xpv75oRSuMX5YT4+v5TJ4Yv+8UVzHlUgJauK4AvxWUACIW/p8AfHVPwL+WYFqONY6hm70UYh6D/a9szs4n1DYIZVM/KG44JqNX6/+of3/OXAvjVAP4ZP/89AP4fMBD4Jj8GgO8D8G+IiOgVfvkZeyYwta0gdKH0y/UsELe2Iitfb/+a5HDuYgJMs412+oyyuQeB8FOjexBQCCZURrIAkU1eQCiQ1tQbetQCGrdTM7teSRV+MytiRwIfxyAreCgpjE1WMncrax10eH28XvMQQr2eQ8QIyFXJGYmW5cb+2B0oVyzYFD9WICM29RjmDqC4F5e1uZJaAMA0/jYAXN9zROB5ZRTMMqJ84gDQcsfgPyTXLoyWC0dAghX4+TceExCRA0b5fyGAfxPAXwPwd1U1NDe2GgNoGzJVfRCRn4a5DH97uWduQ/alX/rJKwjAqbavI1gAUEAQtLmE48oGGAjO+dgehOUueN1HTgsAEpsLEG4xgA4ExQJEfDOPWb5xLKDS2Agx4QwU0TNjWO2tpJnT+ow4p4vyio1jUAhibQKDCYOsts9ezkqUdo+dzAUOpH+a1qwDQboGCwjyjIRQfP5cL231yzHmbgC4fsNZceDTqBvPRwsE1jDgi8Lf3zdQsHf7PkCAd8S6Htu+xSKOmTRVPJZlE0HuRI0xIXOAJ2JlD47os0cLAk8EAVU9AfxSEfkUgD8G4B961lP298xtyD71qZ+t62AhgDayvATuGlG+ChY/p57HJqwae+MG9MCRlrAt9kM3on8LYK6WHq08fK6lcvWLCUgBgj3zWuJ1Xn/4mRr3WhWXrC0zIgmFgBIY7EuvzfJXnu35cgWB5XPz7xU+M2Ym5HT3g+qN2tJAt4BgrkqnPa4UrAOA9z6R4j+czfdnRS8wuMd5fwWB8yGGBWtuL5aL3+YktwooWxliwRB0fzDMDxEojntUtacD8SyP4Fm9A6r6d0XkzwH4lQA+JSJ3zgZ4q7HYhuxzInIH4OfAAoSP3Pgq/LqcK0toXWs8Fx10VR41ebQPFRh0YdE1Ml+v1y0r1mcQ8mKkd22tgXVjjFwLbz0XC1lctpku9pGyQYLP9fV4BUuLODukeEWtYwv4l6RoMcw6Bxctdc+rEDfq0f3/BID8mtiA5/XCLRoTUPQZk1EnNlYAiO7CYJMWpD3P0+v5pN2G6gEP9w+4d+seSv9wf497OmbfvwKE+8Agg44u77wtG6A+PVh9q3NBLQ7rn2fFKzrT1HRz14V0npqe0jvwFQDuHQC+FMCvhQX7/hyA3wjrIfhW9G3IvhXA/8e//7OPxQOiAeZ6iRIYSnV4jEFLg3kXFq87kEGei3DbTMW90vdYA4BHlb6tPPyaV19tqFbFlfVcAAFPCArgWyql8o2bVHxpRbpPmugLu2gvCijGSsOzVtL0NmHfvhYMMW5NnVRCz22n5HKf6McXxErHmQ17T1e5gABYBh15TwFAvTdzQqYtNz/SvassqaorvCv+vVn5+/v7dj5jAedDGzI8l8FC83wA90JddmZe174EKbzXtdDKVyKwQO6NfTZXxvNGQQDAZwB8j8cFBoA/oqp/QkR+CMD3isi/DOC/hu1XCH//D0XkswD+ZwDf/JSM8MSd/gX7lj6n2s8DsC6VcxXI6Fqp6L5Z3Zqy+djoQYCZgFv2uwN3GyDgFYhysRFfdqyxgVzuqhaq2AGBXTuuypjVEZbPrN7GZnpiVrRGCIIHsJBdWUj4/eK0OXoTYin4tOjr7R0A4jRjAMW823cikrEKP+xCkG+uNKnw9T4nnAHUHIwas+G7AovgIR1tcRfI6jOU//7+FYHAKz/nIHB/XyP+TlP2SdOKJ32HnPLuC4Dy2gHB5BbXkAFAfeHQnMEo83EQoBm3TzENkZ7SO/BXAfyyzfm/DuAf3Zz/+wD+j0/OAZCVsE8lMPB+cV5rBQiXgCx/KP9i3ebc0f/rmAEAi7W/q+O7uwSDO19wdF1abLc7bluqqgGBNCBgV4EFlUPaLZawqa9yHqJ+iY6vLgEBwEhG1a22svKhb6+VdZwgjD7phVoxv0+ixkyg2pJ9f6U/cRylDrDIWZeZ13ALquv29IVBiwJU7Vj7T9zf3+PV/SsHg1d49SpAwIHg1Ss8RNQ/JwA9QPM4zj/kIKCga+q+nBLFjWNBrBcoDQCmB48F0xYypWHo9Tq3556TXsSIQajRnJ5YmZH+q7GBEj6AfEwSrrRwYBC4Lje2ZwIMAgUA4e/fHXc4yB2IgSjrQqPXc6ON+w9wkOX7NonH68KriSyJ+earl9+AIepH809emfUXz6Hngu9KSpcKt4BH6nCselx4gPqSlZ2AvbKTz8ifab5lZqp0ESjsBmTmRiw8R+OEnMBpPmHDAI4RhcW/f/XKQaCU/9X9qwYC6kqvuSYAfz6h5+lKnpWYJan8kruTbkIHAAscIt2wtYfhygJiavLT08sAAejVHSCuOFACl+9BmSG2LHkEmsgyrX7uZcTga0GAg30GBv3cLvg32vt6rvv+V0aQ211lTGB1B5DMZTHYS/0Rpxa0TURST9NlWtbCywYIAIh7VYS+2sPXSwAwVMKBIB9g06z5iHRK6lq31OLFS3Em8Yg4QHcNKgUQ1EhND2Q6AJwoi1wxoQKBUPhXr14lAMTx/f09dJ7+mtd3dSWcPINx3SR0E8iZ0fsSy4cXAMxZdaGPsgAaWv8m3YHPR4qoJydR2LqDLDChUKxMYgNdysctvskAYNN4+0Sh3ZDhAAFmAXd3d/kKAGAQ2AbWWKlIyW4DwehAQIt7lFXugcGsvFXXHAAai1hZMNXXGg9oTeEWbNJxPGMIrByudZmnnCykRb8XL4OBIJhbtjtkpzblRzu9vrAHujr8aI5vAMBpv8bhP7SpzAPzPIsFvLrHq1f3eOVA8OrV+/n+cH+fXX7QUPwJzJhoNM33d7+cS7HiIsd7qhfGx43AdjZiABAUCCgDwGW/jTcfGPy8pF2mNQQ5qH1G1cMCuxLOmRZmFwu4MoGnuAN3pPwdCFYQANBdk817DxCuvQEMbIvwkuRkWEB9T8X4JqJpF/SvczTGB2mpL0BJIxCVyHf6rxwfEFsgxAqX6wIA0jyPcgRWJKCzN1gDlxt0FACwF/SOitO1SLISSOGOgaETcx6Y88FZwH1a/gSA99/H+69e4dX7DgLqiu7Kj9gtOHcTmj4ijJZoc9ZV3pJkfgTeBSiJ874FnxvIuHx2JpDjD3Y9BG8dCCgu7gBvf1VWy4NtY/gOQBaFn+fiCrDlJfCYN7sI90xgp/jrK/122QlxpcYCNt2CneGQcrrUqCuXZnApq+5i4f1quy7zpe2SYhidBVST8DOibiomEU+NgdAakHHkUaf5WMhAfnBACBC7WY9U4Ai0EchxbUTsQpZ7JaCpQg+j3gbONoagYgLFAt5//xVevf8K7zsY3N+/IqU3FRUHgAACKbRM+QhQb5/V3tnF49GZOYTdu2aHrIun+MQ6vboGbx8IYOMOIATePwvtBMODd8ZhC0LesP4ngcEczwEBVvb38N57t0EA6FapN0BQV3IHBik/DRSK8QMrALREVrkhwA23IJY6rywVCa16iuPR8++0wwS0glfTp6umEoqbMSiOyNfOJyW6tokGZN6KwvciabATV/JVzoMhcJdojW0IhfIy6EgAGGPg4eGhAOA+4gDv49Wr9/H+q/fx/vvv49X7fx/39/em6O6WhMLzex4LujwiZLHqIQrJbqrmkO14DUwbZ47YOftW1+BbCwLWsGtEkxbeBExwsiuN++vvaPLLY6+BWtoZewBwiyfoIPDee+/V5/few3sLCLQuO7p3UmrVBICIT4TySwOCChKmv05qkpAYHNGQ8mI9gwUEJ6h6pivIAq9MoHW5pRtQC23MWAQj6K2Pdsvg1i7wJV3d81zGBMKqJ0T1PDdgsQBlDBjaAU4AxYD4PoHVTlPF1+jnDUUeaozAK+8ReN+YgAGAAcH9q1dZq6Hwwp+Vjr0tOejKy4HFguslJxEUHM4CanCWlUMuTODmgKG3DQSgtc5eJNtbL1CgrFXvgzd/3VZarfnU7ZXXnzfGCdxmAqH86zsDAbAHlDX+YGVaugqF4gQJChEQBFnNqid7e0oDh5V2UKDqzCvCT01XK6h0OhNFn91KxVDVeIbVuwm/7VsQ4Lc8qH2ULOOFCWwYABU9QTwA48oICiAAQU3OsnpWtaClLbA6EwSCCby6J1eAYwLvv4+///fNHbCtQELRsTl2ABIxgKTBYcGCbNWmaT1favsH5hoTOm0Kt08vnpPqelX28+oKvJ0gsElcBLZW4T/37cHd/7KLm3WR3DPuCYFBIHnzygIuQHB3h/fu3gMEl/vaJJbRPkMkdzEOlyaCm0f7PFqM4aIMi9sR0fy0zAhfFKYMCNDYCUX4qevv4wQ9K+6ilYdiPHpT6FZlX/10bqu6x7JaLrGsKje7OGtddBfM4gOaqyfHDE52hx4ebN7AvQ8RzvgABQtj0FCCgCt9DEMe0IxlDf+Taz4MybUZaq0EXADSQMprPDYbRbxH7IAUPtnZ2w4CguuuxBlAoy3Dx3JOwoKRMIKCVxfLx9S9W3D+LCI4jqMp/GVocPQMSL+vquaehLeYwDqUeB1ktCe4u7hDMKSlMrO8lzv4b0uZ64X2vphy/6eL8vSBW9wlWq4MN/KtoN8lh96WO4ZV+S0WIA4KtdgJf06GkQwrQKC+CyZgQ4NjMlDMC+hrAMQ9hsaW5L7sgvjOUshwCnJD2KWPNgBxBaNLLMgxeSezCQg56vUtBgGjR11grkNseWht0XzuglP1hS8GU9KuOBcrBlzegTUweNeYB+9XF4J96158z3UgUXcF6r2Cf8VOygpamQKswsbWkhK4gEgFL+v3UTUdEPIBafnpJl1QI3ZBw5wTAFDKlvGHi/7fAAVvt8xmy18tFNMZkOZxDWaix3AZMl+taDVZiJYIf8i1AWcuBTZnrMeIBAIV+HoMSH3PqgxAlSoyxwfapqUJVlwdmsOww8A194zY5iQgeOtAAOi7EgNI/7j1o1+m3fZ148YYpUAAIoCTQSamrUpXbSj2OihoO0WYQOgpqQJFY1GkEIqRGhyBysonWes4r4pYbNSER81qpK9fqYq4v8+F2uf1Akgs92VCGm01lvx3QS7QiBbw6EO3dJcMFjKVm7Z338olEDq+ugrVAMtJeuZDBgbva70AXgswVwaetiW8iCt/YJXtWzjys2RR6EzVRbb/uNbf0npK+e11UAygWEF9fmp6GSBwwx2IYEqba5/R1j6FOEGELR0/YqFUa1rP8fyAnTuwDhe2YpRwSzw0n8/WEovS0HnUVFIogBmswMql0JyaasLfN91kaxiWj0vGjCJ0fXVb1sCeoA85jgpd3YKVDWRNCL+ALQbQe71uA0CUYWUCVUahNidG1eqyzpk7QBuF0DLhvElIjuUnNuVaTQDgeddxqczVDYiuYaQcpPBUHTX2tnMH6j3YysLjHk0vAgQiWtrO0eCZtnf8OthGbH21NDUDiApL6yM98PQ6lAwm8JQ1AnjEIPtzTSmA/k6Wun0OIZ5q03YxEUNwNQJjbgFyqfAxMhAZypAAoGrWu7kSpRhs9YMd2K+Zk1cbNZ92p/wZo+A6qHZ4XWrPzXwt9Lcp4o4JRP44zrGwHhDr8eOHh4dcQOQ+3AFeW5D2vmguADGBsPfFMIlxVcYSBDuIUj33SgHaLMliAbV/JjEAihE8Nb0IEAA2TGCUz9yZALkFjqKqRTPNKz6u9xd5tGKY1oebUQOSfITizcVCFkq8Un4Cg3pIvF3VY+aUMbWZb2QnjSVoKoIN27dFSy1A6j5xOsloQQK2oroqGX2HRXBrS1LP9VrmhQVwvIIdgU0VVCLqHAzlAgT5yqyhwK/OdaWhFaUuk8dmgkAuHHLP+whcd8bOh/jqVjpK+U3lAwgqU5eakB4TaADbANcRR66AuGMEvBnvU9PLAAGBUyI6JRU8S/97cQti8JBZRPudRWeDGZRwrtMrV6VfvzuO6L6zocnDwaC6+bpLwEG+bQDQn1F0tBKfowgARI3lpNUDSABsYIlNjAoACF85Vl9C+21oRjIKLNZSfXUg7WTSYcWEnhjMTbBLICgvoNyA1/MCpumtZ2B1W1CgZ3lClS+uCMVvtJl2p3JWVYOFKDB4nnjgtQLmzC3Dwr5PiHc5ajIDDAeCZFq9wUUonuKvXYqSNflYFX/2z/l660AAso8JxMAaCeWKGIGUm+CUM1aTAarSBNZIF0vsvnIIJfvv6avxoCRnAn2gUncJXvda3ZHVR22uSiimcA8AGm2vATu2sKadL8GTNQgGNrRsaes4GEAnAlEvZZGC8o9F+dfAYLd97Xa4BQTl8ng9rECQ8ZBWqvrtUuK27LxScI8+z1lzB2oRUXYF+qa4CWjiOxpBfACnH09ARtVv5dOHEpPLmEyA22cpVxSu2qkzmcuIwQaUr08vAgQEuB0TYLeAACF8KXMHxCyhd9KmYnngLJdnintHACYAwk624+co+DiGMYYlaMiBxACB7Qsl9ExXoyuMqSE3fNXebu+CUmwWh3T1IywAtOvrnlFX3e+u8zTd+UYPwRroKkB5mlywJWXqvo4uvSgMnVet3atyHwDv7z9njbYLJlAsgLsIIzB4JgjI8L0bYoHQIVW36Gzr4hYEE6C4ShYjZjoGwyFgvik/C1v6yNwBsTUG/xKAH1fVXy9vchsy2TABDvzJ6Mtz5ZTiCqqoTkwM3yDDurSmKNR3pIlVaJrCL/56i9qOxR25cWxMYbPOIL8c0HYjC3sD+tJSwwFgiC026crDLEB97mmsTtv3LgCaYoSC03mNf4uicQBhDWpSg92MCbQAaVwblq9a4JFEAAYGgA4Ejyk+p/Dnc/uv2E5s9p2Bavfge+ohqH0EeZKOjKhvwRw+Y3MGEIQrwKyuaiKoFPcMDBn+qwLlAt9ijdBdfSh204s/KibwuwD8MICf7Z9/H97QNmTAZpxAVFAo++oarCiau/wGhRSITp+aWVT8YrHGI58JDHigUpsIRFY/X3e+BBkHEmUUHSUA4KGfMgUTswBmDpxy3Z6trKH6nvS1Sm0pdAlR/hYFJPk9uR+x5VfGL7RbbXZNOAi66yIsoPXfNiCI+66AoAlR8VayvwJB/9012bkGANTvf57dyicInLWEeALArMFCUfcitkOwbXG2AQKNwcVEuaJqUMHBrEMl31+WIPai9Pl5UqxjcQ3eOAiIyFcB+KcB/CsA/gUxaXhj25CJ7EEgJ9REHICj8WSRAaRg5FZTWsof1uOW1brM4ZfeQLvgF4OEMYBlHcJloFG4AzdnftF06OjyS2BKJtBZRNSTuTslnKHbpVSRSrnKqvT7cqOwigYABEDUYCkeKyGLoKP9plDgsXhAqM3yr5Wd/Wz+/er41L4DrPQPsamobzBSXYHUIxCAcG6YQAKAJhDoUIs/MeFqdUqDhaTcpYgJtJzfUJVb9H8NEl7jJo+npzKBfw3AvwjgZ/nnT+ONbkP2pRfKGUOFqxtlHVm1RPijirfyNVBbOu2sfHy+WrX9a9ieBzSkuQcNe1yA3YFIt0AJVK6yrOifb6TtN4teRMCxs4ISnrDQpasU3LuAJ7eHj1xM+i8X5196VmDz7mWBqjD95FcTc2Gl4rpcC5wgqLSv4OQtxUr5a3QgbSce4wJikFD2y8+i+1UQ5NyAhT5dHaknpgbUFTO6uG/xfUDno67SPj22o6VlWuTXA/hJVf3BZ935NUlVv1tVv15Vv/6Tn/iSTTQ9fH9ShBDKFLCF6uaLAzJhXjqd8jwsr+XcpTIIxUFCfpV3Luc2qBOZvpUH+63f/hH2camvBBBEhZU8UWVdLMpmBeaKDcRrBVDqoWnLol0BDSKZj9YeZOXXPnwsbRB5yErnhml1bs/Y+s4tmr64Bed0t8GVnwfkJAoFbV+kI0S0tRXIYHmePIP5fuuF2eshFb3k6prk6SBD6SlM4FcB+A0i8o0APgmLCXwX3uQ2ZLfcAQoMJjW+WUpWej+z+JNBcaMB+vN6xYqdaD4yC1tjI0umMicEAMBG8aAXgFhRnGl4KB+PVOTJTLtlwiTKKiRMueV3UcqRVgQQilBX0a8M6iZzwgoAvXbanoXcXusegdgncQPAk4W2OhH3b0BgYBA7XZ/kKvR5+XoJtKUi7mYE4gqUYbwYkCtbtwFg9916blMr+f4YW9yl1zIBVf0OVf0qVf0a2G5Cf1ZV/1nUNmTAfhsy4MnbkO197ZoxuNJlzxvId1S0F1Kh6NG3Kj1/v/vuirorXZfIVGaMrNhrG/v2GgfdEpcrsp1+vHVj0GSvjPBihWkHXmYkrX3QXYCKzexiJjtXrcrTrXMwkZUFRD3u5CSOb8nTagi0jaar1XliINC8BApzYJDqhQ1s4SkZkMe4movHFrqzH00ZWF97eeHXPh+36uR2+jDjBH4P3tA2ZIIbswh56nCjmSuq8kFAQ2cBWxwKZqDtJnYnjZF3y4KVCwA05V/TBkyurxuuQtZMPcuU3haYGOPweqKBVIvgLVmB5NJfkpTWa8pmwJFwrkVOICL3Y30m10eypKV5lM5ktWu012x1gmzJsrRVL4oAAm6rqL66B9Xx6g4wC7gwAVrD/wJOqOd7BRUbuLKiS3NUwW/LwCPGg7OgWqtO8+s5aPDcXYn/PIA/78dvbBsyiGzdAR4xyBagPzD+aH1OwdJ26lItYa03qZShU+vmAnDj49rOlBVUgz+tsbke4j3qRNWGCa/1NG7EBCJHsdIQfBehqqMJtF6VYgrUIgsbIfaxdrNSkCSBgAAg68MrKp5j23izoCs/vtVDtNHtFMBbbTmdeeQowXAJZnUFTt9EhLtu+9iEkLPF5Yq8sesWzOCilovi+0zRPRNko1bsElz2tR5EIDcl+5pexIhBAJfBQhfhIlRdDW9ZfXIP/AumUU+vFqbMK0fw/IEEcskUN1Qq0w13I59xAYe6JStfLJV1HOrW/drLsWVLmbfQxhicovkNL9ZBZhtMw1ssYNut6tVxIwcFiPV+UQBkYxLI43JHEWQd3AKElXb3rtkYMHTWOAC2/gwE0/OsDqDR0gzSYMa0xAWacETZb7d/rxOkLDU53pU52O22NvbpRYCAyI3BQpfNOIjsZFxASVBWdIwKfp3VuKa8fgcgxALyPX63HpNlvwkCiyBkuaguigkIcr05KFmenV9eGTEmxMrEPAmIqVc18vBSXBgYrEu8Id20nVuw1GpVKQIkryDQwJPy3IAXrAO3ASDqKRTZYgPVC8CxgFL6CAZefXOJNiVu2ZiArKsFrVetbT+h2CyAu4AAy1A9f0kBNpd6fzy9CBAANu4A0INOjQXcsnJE/xA40MEhcPwpmLAFDvZ3SeBb5SculXJDboNAYw0EBFZSE6MEAB8aXSVeAlAbJhBgcbUgZTMUwECMPBy4KlblIV4g+gtBAwBWjALU+MPl3VPhZE+grCxxiktxNp8LVPryWzVxKICAFT+2FFOsgbqo86oVIL2fCyCiHV9SMotYanwHAv18lamqpcPkc6IBll4ICGBZYxAWJ+Aur2Zh+qWJi6zwUZHoFjaUwR7XK7PdkxV4TQQEqW4t/3XfevYjE4jU/dXd89Koui8uuPTptAh0ggHQK4rKmmBVAGBzLYBYGLOzp9XSV6wG4fMyK4g62kg+YR4J+GwTXxgc1saJPLyeBfCx35MCg2dOKCIm4LsIZQxgAYOUr41MZDSktVcBJFf7DvBXq19AwEDWDURW4lI/z3UJXgQIiDwSE7gEnRZ6lXBYAl0BlH7+2njeQJtGVapg/m2K9SO019WoNV7L2yNsAHRN5bKsyXAUCH84r6iMLe+VqdrabSGTZNy2bCSFmNkA7cGYrgK97wCg104DgH191G+5zXsMQNrd+WlbxboEBgsEQAHAAgFqQ3VG53UYjxQEE+K2Ijaws81k7Xu9X0GgPX/XPsutAwCeygheBAhs3YFkArSg5xIXAMJaaHmPWTmdCbCCXZ4u18DSRSDr4k1DMzj1FI2x8/0fe1XNSC4cOkasm6DgSVGPN7dQPnSHd5TLcXl+5mLDAtLSNxBAAkD67otFT8HeuQHEAqoF67bRVnZv7fds5aljZltrYJDjAmnl1Sg6W/50BfTysLQlwYaqZ2ATnwmZ9ExHeSufYcQKCG/JxpKF/vkZcYEXAgLXwCCAyxZOZHRaKmbIqFrnmGaLSDbAYynZRMsQC/31uxV9b1O+pwFAPIut4Biyv3ZhztfiFRVQwNcfxKKcsWnKFSzX4GS218JAroyk54EtW3OHcsWfGiuQnt1ylwLtKsCtHoIEFNUW7Y94ALsDwVJi/8bKhNZxI6D8vB64DOa6qYKNRd+zgagvPh/5UmIiXOPlLj0u35xeBAiI2MKe67nyg/39xtBYTmQbmiVf6dNtwUIq9E5Yb72mTgz4HnI0w89vA0jNaKtlrfavDhqca2Bd4qvIwA2N6aUs6sqOVcYRCuTimOt6DTomoHL9FyL7/67kORx3OZf1EUqaS4LNtOJdQdA+M43vikMvckG27ge5KuRPFihUyTYVvQJy3K+WeuM88F6DIXsBWNiWcy1rXVvNaz7iNUb1eHoxIBD7+vG5Le1m+h3BKCzCF5+90vtySwoNgV+RNKw5AUfsryeqkEm7Hw+B+JYzCmCoQAcwVKE+oEeGgUIM7OGFHy6KT0tDBXup9ypX+nvMShxlgiWp7mRA6prir2StolwD12XDGBTI2hD9ILGkc64KMVRXq4x9iS9a7y+UNICBPu9iB09iVmw5U7/ZwlOmMz32eVF4DbYJAizfaZvWf5wTvm3RDF5n4C0FmAEAyQICTBnEMv8bYwb4CtNvHRMA3ntvzQpZHyzWKKLRed0ehQv5yxI0knYjphQLSQYAdIsfgODr+MnMih+oBrA952xzkAAgtvRbQHBht0xsGtkehOv6gT0ikcBIN5Cm+PSb5dwKAD0esI98KK6CW5+1A8CknoAdO2hWei7v7D5s2Nhcf7dhAWmhd0p9Vfx0PzeYUPfbW2uNdQbUFD+Wh8cYkKkwGlD3T4W3G+Z98/7xeQEClgIVQe1J/bT0QkDgygS6J8z+ZpyRdm2blc70S8OiPLLIgvRDC7oVle2MoNYsFJFU/gxY0aKiFtS0rbABdJpLYLAOU91mLcre6iBxoa7U5bey1KPUL/mbruybzUSSAXSWwRawU25Q3fWZeetnZYaQlnsu93tEwZ/EBog+J4HZWcvXMYANCNL9GZB8rTuMqZgDwFTIgDGBARuq7YvJBIACS/4IFAq+CAi4sZXb+S1jAsAOBJzW+vfXL+tAs224mgo52Y+69fz+kQSZGEEt/EmuwRhQchNGWP9hFlvUlKqsYvd5GxC4kFuOOAhprotQPICVu9XRNjC3AGeF8Ze/O9erxw/yd2kdi24zYPLxdTVcnslHAMgggGgzbr/bSr5jBpffa8lGgAFRpSZTt+1oqCczgGI/4QoUG4AtPzYBjAmZvoktnEVyfKspNoNBfVfP64nExD7r7RKs6UWAwJ4JENtJ+mN/UumjXZcqSarUKs5+J2Mda0Otn+5BrBPHwjxtX3tfDHSKGMUDmtLYfn2h/GFVQ2AqCMbKnzTYv7NuQS3lY+W3CktWJK0c9P16Dqvib0BCagr3tYvr2ktThqvXUw7PJZDLFX93oDCpXqIFtZS1LOEVCG4p/8UVIIZCgkWM4HUM4JpKZVnxu+GZAERttcE5gSETcwzItO3qxd3GdtdLVrQf8yvzvrT40zHgJYHAe8vZqlBQ5bbz3ghs2Bp1yt+SNZiCOQYBgZLy2014CyuOC0SgR0SzB0CxKsj68gYS1DDUFQiUz2u3+GB/Pm+V52T54vrZjx957y4XBT4vQHBtO0WvK03/f5m7z0t/3wSBUnRQO6agB6VPVvUEANgqTPKBehYdy/L5WmjNr7viVz1MBxsBMObE9CXpACSLDLB+VF8JAK5uwTWL0mT5aemFgABwd3fdOuyy1ZILGYvKBTUZnZuFUmo8zX3kO+cSdwWkLEgAwVRa3nvW5bMUxwAEqJ2Cw5/2mEDkP6jvXIJgXjbxFX3WoRPsItjjKMzJ1p8pfAMG/z6r6goKt8CM3YV0QoKXZl11qr9T+nNRfD4XinpLOVOBt8q+ixWs5+q3Ww2C0/SoEv5adMkSK+OVcYYrgIHcmGTMGJrt7TccEC65eDyVa7PmPTiB4BnewEsBgT0TmGQ581h8pd3cEi4WEOVfVkOzQJQs2cEEMLJxnXarbyaB3qDBAujBzgrmAgB+H+7FSJePgGz1gemziO2pONUYi6AXkMGlR/1J6QkILO5Ug5mCZ1y3MBfsFgpZmcDaSZAK4O1UY/Nrqe5bwMCfI25jxXmcnl8VHuiKv+8hoBwnEPRtQ67Pup20bpVASPICMxK26fDEZJYF8eXiqY1aS1xTuWBXoMz21HAln55eDAisXYRBlWdGUCfmBNS7WCZ8rf4VAfgeLCxuZQcGEqIFKXjZVxvvG2uTbEK1DwgKK7wwAjAQYBVSdlX6uSEDM8YcANXvG844iLbj+pz4XAOqOiOIv2KVVNfAYhjr4qFZTrpnsc4r4JaCx+4+e6WvnYEcBFb37ubxzto/1mvgBU0wqPuw4X89h16pefLNuH2XN99j0NYnRr1mAam9a8M8CT5SzX3N3YYJREGeExQEXggIAILjWLOiJSBkjSbQovG6VlNTLs1GiuMJ9am45GOahiJHDUqtjFvKb1tSMxmwrO8UBaSYVj7LXuULKMFt5xXAmBg6UmBV1Ta7BDCScQQmCJrSh5/ZlHbjDoQMSf8uejxqfAYBTisn6D4FknNR7BqauwGBBRwyd7J5d++jutM+YFchgcHjacMGsuq6HdYAg5Ax9TFBaiwjnDYJHHemqMyyCKDVEcGs+p4V7PImMQ78mf7FUzcf+VEAfw/ACeBBVb9eRL4cwB8G8DUAfhTAb1LVnxJrpe8C8I0A/ncAv1VV//Jr7r9lAud5WiWdYD0yX94VYyUBTOwu/iEAYDrNJhDQECxrpRwgJL4Sr9pvMG1V3jmKCSCVDWgWd6cwYGVnMFgEUwfl4RYxZHdASGk7KDQ24BWkmW+qLP+e3YG2viNCgHt+GCyLCdTY/NrBJ9yDUPwrMJQ8IJUj6rCONdu2XMZelzvFz6oNINi+WoUsx6tuUdulG9ANx5hRGC0aADVY0AIDazst0POYkMLcU36whE9wSyyeiwB4HhP4J1SVNxD5dgDfr6rfKSLf7p9/D4BfB+Dr/PUrYLsS/YrHbiyCy9wBOOrbxI46lwrTBKNkOmmqVqOyRQVcyX2XXfWluI1V1L1TKWOVnUT5aQEeGh/gLdYtP6E7BQVuKH4ceyGjTBSgbIifrwUEVuVf3QJ114LqM/Krzj/XYCB3EV7VoMqyLthxTnIJSPmb4reYAYMAMxB+rxZ+3OLvRh0WyBYgUFmo7uOL5iK4DFFTtsu37iPIBQB8BGkwRfV6r3MScRsPTguDBshNoGw1xvQB04dxB74JwDf48ffAFiD9PX7+D6rV+H8lIp8Skc+o6t+8favrVGJV7mtfglNEt1k4OWA2RmzPZSv08ii/3MKMBd6Vdkt5I0/+pCCD2Sjqx0sEORm3f2IwCbMR/mSxkrq2AGK1Sa3qnE5GtRRKNEBwwpMQ0KxJlZWtbrdwoVBWrwC6teflunxmXu7ss7CAW+wAYCUpt2YFhLLCXdn7OQ5A8irCNUKxgQMZjarz5ZwfcoxIVC12QyzIXAAbHTjcqIwLhkqWN4yTWz5/C6NQEtcON20X7ffc9FQQUAD/hRgX+7dV9bsBfCUp9k8A+Eo/zm3IPMUWZQ0EhLYh+/Snv/zRh0uJeL64qOy3xlbltix3NM6B43BgaYty8rLZ4QdTRabV5L+9UpJ5lJbZtWFZspEXpSb5YjZg19WTtAHATSjImoq/kvVSldQZQLNzS41SGVOhGHCtTLxcN2/jdT6cuaffw8PDReEzTrCwg8h8sZxbbMBrvCk/xVgIDGKDkSsQxDXr0mHRKFE7yzkBct9LRUwE7NIp1v7DB5UJ4hoxNzSbqsuVAjTFmwRpaXYjqldXOE4wY3pKeioI/OOq+uMi8vMA/BkR+e/4S1VVuc5qeTQ5kHw3AHzN1/yD2jPttB1yEc4rKbWz1y6t4bP3qiGLehFd3u2oixLC4lrSGu6mX2YtiQxYaqA7UdHmEtRlS/10S7Q57FWwckICzFB4d1kKY7yM5BawJVktnilq3as2+Dzx8BDWnxhA7vFXDIAV/1xiA4FNexDYsZRQ3mVFIAKF3GBkxwgmryS88PvGAvo5nSPjRdJeEz6nzKtTvDuwBqdljIclXjrLNE+11sKU+F0KiUDInWVTJdmOOyHZpyeBgKr+uL//pIj8Mdh+A38raL6IfAbAT/rlsQ1ZJN6i7GZqmV5Xu4hr2oWhsEIFL+VnOt18QLb2O99ZSBFIsXbAYzdFNmL/4MfRkpkBcgEQDILzyQHC/sT49aNpJU30RRaL8w5Bk8bF91eYkkzUCEn1+AUvzRVgEGzg4cE2+nw4z670DgQnMYBwDwoEXPFxBYAVBMo12IMAg1ApvwNExDLavgLVTlzr2aYacwEUgonKAaC2VGu6AzYYKIYKi683AR9qXqYuHpN7mmJHAIhBZLtd55KsQP6U9FoQEJGfCWCo6t/z438SwP8Ttd3Yd+K6DdnvFJHvhQUEf/rxeMCtB9f7aqVDOVfkW1lALM1dKRRTCqkb9ZcrHQXyfNMq1iPlc0wRNBusKX4KlC73qM8ZzHRweK3yZ16DQ0X9UHlb//Gi/P7eyA7FLWI6rChsFNzUBQQezA2InX5p99/zwgLOjXtg4wQ6CCzKnwCOBgAdEECfSflPYgExh2MdtLX6aReapkXZ1XaC6mzA6mcGE4DFumIZgZABa19JIMgqj+AjlSSvSQyQjZzVtR0on5aewgS+EsAfc2W5A/AfqeqfEpG/COCPiMi3AfgbAH6TX/8nYd2Dn4V1Ef62p2VlUbC02vsSCR2tMQFTOl6a264Ld6BuwADgn9t5NDbw2qSsxvy7aHxS/JQrzcZfGeijxy3Jo8fbsrVrue7FH+VWJtySGeM1kAGwlQmUO0DHvs13YwGLKxDnwCCAzgCwMIMOAqDjqF8Dgdhb8CQXgNcy0KWLcRcgZA5mxM66jGOhGXsS4OM8kwGYzacl26BO9WOCmtN69BUAqpkvdKC1HSt7tbRcvntdei0IqG039ks25/8OgF+zOa8AfsfTs3Blrro5V9c2O1dn13iAwBqDLHvS7IsyUE4SF1YAul6f7v62VOWGJKqlTJH1IjAIQchoQBPI3Xs9q+U1rKaX28rDZbgNBFyrGdDEpJ4au2om1WZ34IGCgw/2OdhBugArC6jPjvv1AgHAAgJX5b8eqyptNU4jExMA1u7DBQRWdpEAM3wEqs8I1FhnwoAAUMjwYerqI1RtCCigNt0ceh3fH7XN0TXiiq3JDIwWZee6ewYKvJARg0C37VHd0l5bVpCWgaP8M5fmjm4WEVq7b7kBW+EQxPo2XJF9XncAcFHRRbDIuKCBBQN/ugOdDu5S8wulnfHTHTD7FStDWMrmQGXj0gHghDAIkNU/82XxgDOChKTsPIKQ9wK0iD0DAAjIAtd6YPAKBnG+2EAwjmtQcB1HMLE0TIE1tR3EXCEZxQbmnKb8owKD01mAgXrf5zHncRDREKHH9hboAiCgkYShJULNKMmknppeBghcqIDkuebnLK+wDoqyUCKxIm8s5OEAMIOC+SMS1R1VsxG8Yps0kp74uWArnDjCf7XVi9XX3sB9cNOVEnJfwc06vFRlr5erot9wg6hAwU3q2NIkBa5YAAcIl9jA6g5s2AGy96ay0+MDzGiuIFDhnTpnXZjFAtr05ogLRO9AA4EK1BKFs/8BAB4wFV82TCIwOGKCm68locsyddHeEugKelbVfbZ/vgWDcLYgqyCWG7Vnuvv0MkAAGwzQW8VgVsBU174bA9BpjVHjBMSnAQOh/up/Yj3BEgBZnrT/xEn7n6asHRgUrOE8biA+C90hovCbp7VcbVmK7AHgGuSM+kQKFcl7FiCO47sM8D2w0p/pAmRXIbsDuQPwvLCDc56VG6KzHQDoXIyvvzCDDgIWD+hdhLzVWOsd2DG2BAGCZYX1mIwJUQeAUStNMyuY08zUFAcCAvktqDMYLIbBWEAcX+c+loh+BF2EH31a1S2szvW1V8UFDIbkKK6wstd1AwFko0iztEZLVxqyJG+Ui6L4Qcfx0qy1ZyCsPAO+ySNzgS6AfOdrLaAMO1v8RfF3YGBjCBbGEs/U6HaFBwY3MYFwATw28HDDHUggWEDBmEDkf3UFqlzUodOO7V0bMJzndXhyYwG5tgOrVIAAK6NWS6gFAJUBIAY7DcmgYC4hpqgh4OkSdJkRYnytodkl8aaJe+XMUnQN6W7T69MLAYE1JTfPT7fscA0jZWF2C58Tg4ACAw8YhaJx8Ccfq6RIRE2XLO4AgJW0WdS8iGMAxQSCJUj98sordHnvNZFvSfKJTjdr31hBK6WPX49/SAUJ5Y9X39G3AoIPwQ4eyjWY54mHmyyAQADa6pp7Cvo5UPke/9yZQB8otO0dYMBePmeLzJlLhCmtIBwsYErNiBTx9Sk0BheFIVqBnlIyrjU/KOZ60QaroLc8MHg7dZkXZPBDWJj9KCZfrMgKZEOo2mCPdUHMDghCSiKLwmROLgpuz+FTlIOwLhc2sHj8ZHFTDuga4iyofoFFMBYZKCYg/Ti/jLvQrs1L/eTa/7vuwSUuUMDwsLX6O2ZQICBN4RMEEgis5ndkbf186RlYNjm59A4wW+M2clCINtdcQZgAYAUEERs7MMUZSrFQBnqJNmeLoUtewtiYdSN5YLAS+vnWUmzTiwABVcXDw7merL7dLLSlEAQbFzDyHv7D+PnlWAFDby0fLaaiinZQSLABSKp4NFZ9n24H0Bs5lZfyx0DTfFDKo5dvCDBFcLr0CwRnPR2qvgSZwjZ0HT6ZBTQFNa0ClWc5XmcIcn0FABSFpmG/qfAzg3wVgNNL4G0V6FR0qtdbgcDHg4OWT5Gq/wga7tYzzBWQaDm0Cxu4MIJqLx2AYtrw4WHMIBkBoVGb96D+mhNDbC2B6atSxVDiHptCP67WQS5/RyM5ow5UFTom3jomYCDwcDnPtM0stZ1nAGj9/4mcdhxvibqqVvnOBExwwtLNxg6KAaBZyu6ELnu+aQl7s6ApYEiG0hu3zglcOKbgJDosgANAlfWI5wzNsekC2/AiRLm6D5FlyTtKuQTJJxYAMGHjkXdkyR8efKpwTSSa87zsrUDORdYdj+sYY2TvDNPZNSjIn8N94npkJQqFvoIALfdOcYGaylz31eVzPmOKo28sGujHIskCLiAg4soPzAQE67lKJM/mLZBckyrcvYDtpTEl5W2KYAQDeToGvAwQgNo49HaKlEcXRQrrNUbt7tPpj5ZesZCEX+aLhsax+Wpi7GAyNS+r3zb4WACh8kDKP+N9kqtB+as7NzywlYwt8CRivfJrufJhBy51wqsj132lKzsBQv+OPB4HzwDIBICg8Q81ZXjnc9cegmvkvSv2GAMKW0mpmAEWBSIg8Guybx/2zuAfowWNjZwLO5nZJi1I6HlkV6sxNb+vwJnAVPPPB8war2DQgM6sf62QVWtTDl+4lltZFslN5pOsx5YsGxM4xeYp6LR6imc8J70IEFDsmQD33bKSWxkHas51p9Wr8icbUEBisVIVA4B0C6a9SwebuJNsm6muKquBZl14iy2+PjNGn0247XOfntrLEgKqAA5FXhVCp8ZXGQNI8Rfr32IfwDpcbSovHkoAsMYBaGhuA4JcSpyaZKH4YwxAa8HYFQDGCgiA3xsOBN5OSwCTYwEXNkBAFftBlBuwKn8dA76o1PDnTsGQ4YAwMWk54WIAIABgVmBDsQ0ISS6kvZF7UycUvteht2eIAcvSU9PLAAFVPNwvIECWt1QxvqLgVjKpFgKkywnbw+JPV3wpFyCtqI9cYzEo4yh5MhQ/ST65AbzvXk6OIT94bWRuuACAOL82flJVVRz5uxAuA8YxOv0u+l/vCQDBBsInT+jpbKzKYwr/sBkI1EblqfbIe7OyFPEXG00rLsXJEFjxB2gBGL+Dj8exNR+L/id72cQCGii7K5Cxi9imbssGtL4LkZswai9YAME3pvHeA0kGMBsA1MrD6kFEloRK1/MFqIqogPrueRzA0osBgdUdAEAUlbq5XDMkLdrlZnXYLKijpy8TbgovGyZQCm33QMlA3KpMv/fVxresNLzqrvmgXiSkqq1gEGXCbOdTn0OZVHH4PncWLIwt0YxW27qIVBVCz70BAOv06YQRArUIBto04aU3gMq6xnK0KRYBm4gtyAlBTIs1/SllKbehjgHFjHtMazM2mz2YeXUF2kaouQlMV/ZuBoqRRkjAVg0ZubOQDQawXYaHSDK51Q0YY1ZMQAVjCnQ4c1vWBoDUMvEsCiELHBdJttLc5qelFwECUFx7BwDIsh1WzBAUEtxBTqyuN803J+vqgcApeyYwfSKIllvRULcxDM8jmeoKpHkwjcbJK6F9AwARAgHxmAAzoCibArECMTV01MOcPoyVegiQdwllr1MJpnSPS7MwoGmUhbsAa2mxNlGHovC5Cs+lcavcg2hBKP4Q6e0/jFZn96BlEAozyRzYVApMNheFRgmyu1JM7fWuQKilQrI3IK1/rCUugPi8gbbXgMaSd9riBKscRP0A0ctDUEDuDi4rLs8CgWfgwIsAAVXF/RITiKCJyvD98UKJvZLdlxpsBexm6927O5BxAIvAq0RwcEJErZE0aGWAQByXCDBDYIudPiZZz5jL3ge+FJuJIuTiqVKTT+zBZingayREo5sCD8iY1vU0ywe/CMFq9RMAxvK5lAlJ6UuhznllABUP6O5A+dxePelpkMD7czXanKz+CgQj90Eoi6dag3OiEebsTOyWW8AA0HoH2PIv7EAB9/HtXcWj8kMwNfwbs/ATDgJDILO7AXaskGGymGBtgpCSxrJdBJQUn3thCNiegwIvAwSgOBcmIALoOBCOr8ggpQth8TUCW/XUXfsJp7fitN8R2dA8YgUBBEWFNWhaKLgwztD6BAkcNLBmlvVkEGiBr/gMZwHi8xxgYwJivwHLS4w+q65JGQPjHJij93fzuIq0uklBik3FueYOhFX151b3YIz+e8iRgDzqL8fnZxfc7PlQKblmMGBXZTAQSHMNYqNUJSXItQ+FPSYOzK4uQcQEZnMFiglkyUuOlnO2dY06TIu7BOEGSO41JgDmTQDwsk7BxKiFSMVsQHj3zSEQKYNE5asu3DM/P8cjeBkgoLj0DogIjgOpLOxXRhBsiOAY6x6Gu9KXOzBFbGkomR4fEGIC5hLU1uQUbY4ZXxqNEvSwP0YbAPS197vyF73Nl7olHmLdhOp7WKlCxzDlHwYC0wNCYxwYw1hADH5pRCCNiSs+yhUAPzuZwomIcXAX35xKCl/uAAc/L12FYWHz3mXdMp6zMJQxOhAMGcvnAAGbGSpjWH97Atc1MHhdUKTHDLRZz7S1WzZgRMStv1R8IIBoSMQovK4dpLh3YI6KB+RCpLRJbihwssNFktntTHD291ji/e1jAnrtIhQfCRhKM0SgNSKm3AEfMOR3Wu98eY65AzWUM5nAFN8/brZdiMuyaHjmbQOicrRDcK5WiEGAYxxKZUtl9MiTOh0GbERZLF2t04RouOCNceIYB+ZRFLd8xqysUriN8jMINBZwQ6Fi/YCLonFMgFbyjWqq9fOrDSsvi7IPB/pRYBDHkScZp9Ud02aOY9xyBXQFg947AKqB9ZyqM354Xz3MDWjK74wuQKBYgC17Z6xByR1Qc+Vy9CvVl1KFpZQVQFfdl4s2z7PK84T0IkAAeu0dsIUcPbo6JuYYOEKgIAkCx3GwYY4b7h6SgcHoFUBY5zlzoJCxARPeGkosCQq5C1HkExwToMBgDrA5EwhiuGgqfvi4pIgmMIB4kElV2u9sX0KPZYhgjAPnceKYR1rfAIBeC5LvyQiWf80tQFHrsJp9P4GHRdF20fgeqa57M/gM86HFrPqq8BH3yeMAgfPEOQw4ZFSU4aY7sHzWqKtZoNFcgLjZcq6wxjR1hvWHAXUswYZgfXOYbEXPAIMCxQ58DryNOVCgLz1GtsazlOM3POZkYza82/bhodX769JTtyH7FIB/B8A/7Nn4PwP4EbyhbcgUwMPZkWuIYgzBORVjM+DkYsWEvuwkvZ6iPtTSLTbgjQkB1DfE9M1KQ/jHVMQGZtYmlo91NdjmOgRVU7Y6sxoe5juylRjiG6SOyq/NSTNL03xJFeiQcgFI4Zi13EzJCtBfa6Mox0bmEkjjwF9nSvwI1aWtSOFlee9Kv2MBAQIT52GxEGOCg9weoswUKM3JT43lUfdgq7PXxAZC+aOdiAkMzsNUX4AkRgb6wqPqPThTodGXq+RuEliG62SGD/Vdto9mEJYZwRsHAZhS/ylV/Y0i8iUAfgaAfwlvaBsyeIO1U2MkTeNutz6ARS+FJUPWjrNCfXilUFBPPDAYWzuH8LIA5GCUxTrWstVdEernFNQJ6srWN60wFkWs7y/KKsula3WCLSIFvSCAKCzsQbPwRq3eUCyCLDixhNy9aQxbNCOo71DIDLrbA247hb/cKwCCLL8wC4jrhrlEx3HgOA6cDgTHGDjdPRzpXgxfAXgT+4BQOdnAcEyAAaCuY0WPWEe4OnVETb8gbao4H7BbJJ39HIvbqzox5oCOgTGH7WKdxtAZ49Mx4ElLjv8cAP8HAL/VMqCvALwSkW8C8A1+2ffgQ21DhosyN8tGxzyMNXzDQZbARJ21RIhK0fJVsUhjO6e5yUMqBeIPlvxUt0xTNPpBueKl8kC4eNFgwCIzm2NpgnQ5pos5GxzcLB4ZP/Xt2a2yARnpa15YhPvvQhT9OHzotZp/q2pjFBh8IniJdH1K2SMGUO89CLgGAwMQZPgMvPPAMQaOcTgAHBkj4tecA0OmB9hICTk44RV3CQYSSLBrEMpvCl+xjisyk+JLN/Dxg95jFG5uMCMrn9W3g4AqdBzQw2QvGZMMf0m5JE9MT2ECXwvgfwLw74vILwHwgwB+Fz7kNmScFCiB8ZRLg4V/HZaF2cC01V00JNuVmwdXFNjWZwmhBlzpnQ2I5KaRvMJOV6gemQ2GAvqudGjzzGQE9e1NJafr2Vq08/UE5i0LlV983VbzkawOW6/Ikm8JQTsOj5n4/X0MxyCXCPlspJKH/8+BwHITaieo6/gApHUUsTY7j4GxvhoAlELptMVnZa03Dctaur8ygAL3Vrt+tGwd3qg8SUBjp9TexAQCCCogOrznxwBgHIcFitNNnVbm6YxnSK/PNwwCdwB+OYB/TlV/QES+C0b9M6k+fxsyob0Iv+zLvuzCBMrvjH73rvzxXSzukOomyQWQwCBV6bJR+FiIBKjzlJP02ZorogFOkz6HtSAqLbJ9YTkG6fqO8hNhWN6ZBpAwp9tiQaTwX5XuJRl1KNyMHpEqgwPTYqHUgcDaCvSupFTEBIgFNOo7el3sRgjWeTjVneYOpPU/mvKvTMAGU60KEhVcmS8giLx3EFB3oRjjo6y24g+xPWJ5QnXIqSCf87aUwQHAymogfcyJeQyM6d3DUu5Cbav3dHV8Cgh8DsDnVPUH/PP3wUDgQ21DprQX4c/7ip+nFxC45Q7MUsAIvo05bI8BAB12uZLtRLkCofDISitfisg7sQGThehX7l1yIQ1xeGluaU2+UXDS/i3l3zODi6kJoaT4hWpwpc0EEzVXwGgRxTWoOZgNhHBye63+p7YDzzNZqRUAy0UoJc3nAZfzEWAbHhc4Nm5AKEMfu99rm3Mciq+k8DsQgAKa1bV3AwgK8svQf/EPEZso0ieXOg6AO8ZhbMDrPQcHjdnKO3j05zOowFM2H/kJEfkxEflFqvojsA1Hfshf34o3sg3ZNcA30QEgAnFr9N1WdzFfNyLrqfCozy0IKJIxgQgKBlD0hSp5P/hSjoxXUHS+B+Di1yhXIGIQm0avL+p3fB0DyAoSDCjJQ4gJ3HQH3PIbnZ15J90EW1fLvW4jf9GCRmOuCr8DATTlB7UHvaPqzQKDZC3pXZgJjGHdcD5y74Kb5O4ziwLVox2XiwW1/KlqrTOhuFYDt2V8KV0uGeTLHZC07oeX6ziOBIE5z+oZYfdnAdqnpqf2DvxzAP6QWM/AX4dtLTbwhrYh28cEaDAHs4G1j3eqDQX2jUYk+NriFhQIkFCRO5CfCb0zf+TrVrdSjdNu+9mtLEwCYDQbqFv9uA7d0pN655VJAKQJU9RhHbFPGz661HlF+rJCP7MeqloNqOc/KPogEGCGtQo55fGGwl9AwcHjAgJcdj9vAFAuwcFugVQAM+MNc1zqt7VxxlEWNsDMiNyfWN0HMYT7onSU7yzXyhDKMGQdR9wlXILDg58MAqcPDuOxEt6rwGNPnpqeuivxXwHw9Zuv3sg2ZP675bNUP/TS11sxgunLgjkbiLXXWEFIcTSU3Lu1IOIN6wpiJju/uojJEhdINtJoNPkEQCkB/xPQi7+plEBBFzNAhRJtBToBC5UnlABfyhagwEAWGIDK55CysJaHVYGJ0kNwUfjS5A4KpCSlOGQpyXpH/Y2gyK4k4zgu8YCwrDG/oNjTDRkEsYILCGgHAWcBEWWpaEs1G7dNqT+1f52pegvQInegMYE5cR4Hxnka8zlHtQvV/XNWFnghIwavIDAj8t8Gp5D1Vy2f3LuoRF2aqUGagoUAckxgZQY7K62rcPSRZy0ugKtDIAJA+7PYGjRTH7mlr6IcaV0XplDP6vR17SK8MlYLiLLgFKDxrbuAjjH6ubRglr/sp2fLy+UmELDb8zGXZvlM4BBKwfGAowFAjw2s7herZrP0XGc7cEeBgKNAO16T0B/hBo16RTcII15enugGDdCbc+I4T8zjwHkeOIaxgRqBSvX+xPQyQADwCRyUBgW1lpFpNd5bc009oQaLrp/OBqzCc/FOLbvQAYFjAkB2GzETWMYtaLoFVzALCntR9ACEyANZiCY0aVGRlrCda4JVQFAy7XWELn8KrTiFXyvw3oFGBRgAJBfBMFdqpIsQAtj79Ps05SoglnNe3gUA4usqXdXtEdbxOKh7cDdWQDJgVsFBej6iqFVqXf7a5qP2HiYmDE+5WltPsLGXbCpWUqn2zADpGI11RVkDBAwAzhYTkAwOEtg9Mb0IEFD4uHxKQ9HWuc+IfO4mS9NVxafYtqhO3IloKLBReAOEYg8rNdcrPSQ2kPGJFIgKICUXESA5RlI2VL6aEuc3RSsXhtDOlanpQkyugHKdSAFAPDdcBL7+Et4I6+4TXqIsqWyyBKmaJUZT/ihqWcbFQnpmV9ZSJUUqPA8Y2o0TWJlAAmi7K1deb+9kAjR0XWI3K20/6W3Y2F6rSGI5i0wEo2pdhMUCjsPmh5zHLbBbGM8T04sAAWAXE0BTrg4I3FVIgp7uODVJ6pkrIyu/H4OO93UXrUwMYC69FQkQ9LOUbSfbBACRsWIjwj/z70qIyqJ2y9GErFAgKe5aH5pwhIyfBgAIeGk1rkNB+NexqAmAS1fWbsReBhE3vL7TY2nP1ACBVqE7JrB/ZgOD8YhyED5WnS3vrfeHAKL7ib3OWlsiWnlheZwKsDgweIyRQ6QDBPJcsoAA6BXwnpZeDAhcU5gpeiPkvaA1W7zNvUoG4x9bfrY6srxT0stBzy41gPnHNn7B8q7te6aFV2tQ96hjHnK7CHUh3db4ZLV4dyiA2mrco9xRI2udsYUS8SXyXVmOZpGoyy4A4Rg+pLtx+jrY6mMHXEOocFGECxO5TKbR87uvJ75ma6ifkVhmGsN77Pkb1yBuJMs9X5+BbkSaTEkH1cfSCwGBa99zdvGsg0j4ogTfvbW4PmWtGL0epZBpVTA3ck7wmC3Pds3s1w3ruRgxNbkJYzTcRnixlPnGd+kT5kARerE4sQtxEca6Z4ihWXwrQxRxh6875Y+uu+HWSnLJnFTxR1qqejTQgL2zrdj3sBYMIReGqLlc/iHLyG2bLkK6SGQIFARifodBwc9kG92Pb6+LS8JFLtCbOTzeV6Saw3oB/BmqWhu+0MItHMztd38awr0IEBBgDwLSgzqF5CCioDfAoN//dkr+fBs/UndC+dxnI6sU6xSIqM3qGhPTuy7nnC5LtxR9o/REmUOBJT+L19GBIQeBgdBvR1PstFYgOV/yARFgVhljSjWDQW+jkQCQrCBpqgGC0A+LzZXARj+ELm1Y7K7GYkTXMK9x2JfUmiiKLlnGHUvAWu/RyE1ixFcP8U1FfKfhQco/CIR3AHCh6Ny+VDPl9s6cFjzOE6fEACLgGKNtAz/n2dYYDKSOsj41vQgQgOxBICdGtG6Pa+E4GLbclppTUi62NyivsJs9arSg6NNXiBlZ6RYjEBGfiagQtWssgFj96p1RdEEcJLAgMbnECxwQWNBG+oS9ri6ywAqwAFGgXVB/SbZDMw4pZTedR+kPBgF/DxAIl8jcOaXPuWwrggkE/Y9NQWpk5nQQqF2Q5znLgqavWPUWwNrKHUpC59DqPCrNy+5AEAuJl3s2GiDUzD9mATU9uikntUvFH/qyZ+eckHlCHuxiHcOYwMNZZee4FEB1+fT0IkBAdu6AMMouQpuNpc16xKn6uFF5WQ5XfrpgSZLHpqy2WswYFkyzJcutIYZqLfutCp0DcwRY0FTZBQiyS80uXABNl6wHE+iR+bWLqICA643+ba61nZ1iHwPBSOXvQBDuyBHR61B8GrwTylCKTwG3VPgqI3disEKor2YcVi/2PJi061HO5Mz8VT2tDsHqKICUUwG6ju4xAFXb6Sbc1IzKbwOSR5/YM6TJMJUarVdm2upD85yYcuKM3IrVSbEAWjsxl3Gr5Unf9ASijz5tmEApBlXgytawugO3788sIN71xvsuxJ/KMqyPfKjAFhrjMQpIIEgfdUQzd+VfB3e0fvVGi6ukPUiqi9+5dBGl0C+oR1aRX8yyUvknMFvsg8BIxLuvRovUJwAcPOmllLoFckVzj4do8yqrphtQ6xfyIqfLPgcUM4jCssVf4yvlImCjmAwDxgBanEHYTX0kFhAsbdS1F2aWQe6oo+nL3504T8ufrbxn7V1lP2nZtF52A4y3zR3YMIGwNGtQ5bavc4sIsQ3oStHAMh3WJUZOfnkoq4rkopAzXQgQEIAsHVsnaQKxG1jDIKD8zhTaz5uvXjEBDgx6xskiBqcRwoKlXkWc/cY6h74c1gwgcGDyNoiuqkEAkK7B0We+xaCuGuQ1AU3CvTalKwUvZFq7H/PuRz0mEIFFanuWmU15W0wgf7bmqj5bvdWKSOu4iDGWkYwL68tnxs1QbKBWpFaczsZy6U0FdFQ8ZFJMoOZ7dLl9anoRICDYM4GKCXBjLSQ/u49ACryDg+7ttZR9jp0NlE9eFiAaP2JPcKrffV47U/TWLq6Vc0ZT/no3QQl6mMCydoP6+QIVcgkufeIEf4vlZ1jIa8P/dQYwp2IMX1c/ykrPbgN2FgDgSS+51ZvEmn6+oea01fvtnlFbVdZaHLQvclo7HxUAzAwMlgwFuS9Fr1gJKyWEYECx0SILChprJVeVYgF7RnArJlDuB1r72lLqU2orelVAD8WY0jaANXZULCC7yOWGCtxILwIE9u5A9BAUopZpu6qyrkdPrgRSYqBXZmQuKSCtFuz5FVJ2vs96ToAWwKs180YHB/HNRpaBSDylOmiykNBV7wDNlluEPUuzgoFUGQW+u04CwExfmAfLiNQ6f4Nepvw1wGWMUcouvnVXzPue06f4lsJGhfGQbN6/oZjAg+98dC7uQLVCFvviElhNMC+KeieemH85xMRMoIZKv2aw0sI4Ops1VxEJegpJFqJZ76rWvrmEPblCMay5ZE12KnIzvQgQkK07ABqT3hsw65DbXNd77p6zSY08XJGjZKh8QYuPxT40XXS0/bIXKJWdwYAsSAhVBYgoWDTs3XofbAVbQbdCrceBrTvVc4HB1U+WIbCJTiaEw4NhwQICeKInJAYEHe11tNcYI6ds51ZfgtjFM/dY4JhPBsty6fKT1tV/2MQEdFnrkUvM9L+7QBf3oMnFgK3DKIgt4ZL8ieC25V9iAhLxngCPnQx2JmDVpPAJNGkEpqCYUbhCtLoVR1qfgQEvAwRuM4F9tLulLPjKAHpl9F/JtpLCl+fgUoyyS2UZ8PEA3gW4FkToOMsiCQKXoNHGmscqy7yW/8hNU2buogSguwEUFyjGVKQf5NYg6PFGEdKP97X4c71HtaW9bLr1qDhArn6zvO4cBKb27ExXKA2AnakcEk23uAO150HFBCZbxAzOatY503xyDNo/JPjYe4f0iIMMlwO/d9Qzt18DhoNAgIEHjZWwpBYADNvqfErV+VTMYXXE29v1WAiX3YKbT00vAgQEgmMBAQhoRlVnA9VM1mSXUAC5Rvy+h+G4fu9GCJBDbIMuGxuoHgHJe1P+knIiGz0U/RjHzRFmIj5nfBbdi5ecXgez4tXVM7DpISDrCsqXkJvAlnKlkcYE1teRlv3g4cE0aOjCBGTW86PKFVaHEKrbatdmGU97rUwgN0DVUoYW18lmWVyfRs+9na4I0GWA7tdXQZaluzCOD+R26lHvAnpmlz2FkaPYoES9LVXFemhiO7OcPKdV7owLLOV+YnoRIACgjSwDgglQRTeau2krqsybz6DX5UICgji0xpcCAtQAskGbgoIUiP1rpAAECKyTbK6fo6HHLF94ehDolAk5JyAnIlqdfudY3AHS/u7z32YB5g5wbRkTEGGlXAKD1CuQ3YQXEODWqriC6oCNsDShj0YNy5gbq2jfDfnh4aEFDC9MQNHK261+tU13D7zMN4Aga0Uk2VaNY1kWAQlg9HJn0fK9s4A8UtSgM7fmcTyDqUTwkGJDKxPosPX69DJAQGxW2HKq1p6XWntt9ek4VYTehDV2eimf9sbOOU14yvrH8eo+cM9xU7DHLI5IFxKn03KhjzFYx4aJJDUeyP0Ik9LH85tFH+153J3Vv792vYoE/RVXfCB3QOadmdV2h8oVf5MFjM4OvEzWNprKLarpcthOyxv2EoxlaeEdzHen5wpsaa2Xsu7Lvrm7uwIq2uTyUn/JsIiFrkyMS6OASLAXp5ttWjvnJ1ivbpQ+aFoqwaYct9NTNh/5RbDtxiL9AgD/dwB/EG9oGzIRuYAA0PenzzXow02QzhJWK8JdLSLA+QBA0AeYzBpptQ8sFj29UGWEwdko0iqE3K/crAZZFAokaQT9Mg+SWcwXR+rV1vMPCeTxAmuUmoOtl8BrWkvv8RDxgVAmkMECos2aG7BawbGAgCuR0nTkrB+V2iWI5owMotrHGDiPgeM8oMcsZRPYRqCXZL58Ru+XwGnNS4kXWwAqe+w/QS7hEK7rYlr86FRjj3uoAgJN1zKYpeZ27RsKkoQlmErJJfcGlCcQIVVQXOv16SmrDf8IgF8KACJywJYP/2OwZcffyDZkgisTgBd8UKPd/ky2mgJKjYZ6pQQTyIiyL1HGgcT6nWaARUAtxw1Gyj9I4fuOO2yNRweDy1DiWiY7TUplJ53zaOPm/6Lobe0UNC4sgMHzshRY3iyUQHxQFT/TJtHk8lfRRXiji8z8foFo9BToVfmF2F761/1eh2++OuaBi8Ls/buMw/DuSXmOgVolyxxb07sUgniM6/5ooBl1H8fFSAkAgt6HGKU4uSwxk4+2BMtA5MSeUAvhLKGwdczAE9Jz3YFfA+CvqerfkDe5DZmYv7ycQgRReKBFsgGijsUEgtYXEITtVDV15sVI+qIgAANBNY92t2NpLLBFza46SWufx6FspPg1yKeOwxKzP8sCXwG6HQrY9fEsdj+aW7KjssGo/J7mEklhXloYf9SQGh+wKr5UkEzGsFWiWg+DpBugg0YUJluqaeR5T19U8xgDegxQxjbJWICqJhCVa8TKXz0yurgDgtiFSp0FSLkDDJzkmnGOnsQEVEqpt2ym8sJsQPkmzSXoLPGp6bkg8M0A/mM/fmPbkO2YQFV2gUF8HsvnbvFBAADkCsSz6DMrfhtzrt74gPmB7GOsDCDymRS2rHwbBUhdgcwKarLPINYwXPlqEk/3QqSVIYCqbBaxko2bcX2hf86HdWdop3BCTKAp6hCMQxrT0aHN4pvlHbk25MpOZAUAWn57nAdi5KIbSErBi6utq+dkid4Lu5kDw3ebLo31GMjyuTEBocZhIAhrzLdzY18b3yJBZXUGklV4GyUhEF8xW5AuWi/681kA8AwQENtz4DcA+I71O9UPtw3Zpz71qZvuQFP2OG7nSGC9YqMO5pxO8QQqtdee55niB+2xfi+vZJG4cdf/8BNJ4Zqvv+n6uwakrucg4v3qkpY9MsZj48OyBAp0wQmw6b0OTemB/syoy048LgAUFTRE2ijB2gDkaMBn7sBYwFcx2xJvxVDGQtuLzZzVG6FLEE/pYAUBcgdaD8oNl6AHHtMkZOBOgJtDszkvSdTT+Q92VeAinO8AidXFQLVRtK3FjLQRgSh2EsSPyB34dQD+sqr+Lf/8xrYh++qv+io9jmO9JJE+hJM/MyjkxXZjD2qZP6sowIjWuDBpxaXpc3O+gPEdEKBiFLLxYTlQtip/KDiDQDZ+fFe1wDVXaC8stKzYRalzcQ++d3sOiHFU+fpxPIJytLCAOuZBUeEOKG0F5nVFbsFcYgMJCAsoHGNgHgM6V4PRKXG4LkO1j8qUyt8OiF3Vm/UPsy1atprBM5R0EQvXRiml9/qrW2r2QAFCe+h2thlym88IIFiIQLrCKKB9anoOCHwLyhUAbLuxb8Wb2IZM9r0DxbTIHjIDA4GApwQA9Ype7sV0d/nGFbP8tQIWuixBoZ5fW0OzD1sr7KwgwOhePmW9z7aDLpUNr4kJhEvAk1qSCfj39J5C1d4ZdK+gENcGE8gdcy8sqLol1YGg+rilgULOyVgCd+xmHHNgzsN3410FPPxhAgFDgUuMgbdS69O501Sn9Q8lLv5+DQym+PmBq2HAiQOAXacZX7jKGEFMk8dqK7/HAvoLDpRsvGkQEJGfCeDXAvi/0OnvxBvahkwEGGtMIOh2fM53YgV5dXIge/Ox10rnqprDz4rj5PX0zEDaCNjsWYDXzcX/Pkj5ckVcGsq7Kn0pnLgHMuoaLikVs9waEoRGr68xgahrrscoQ9bx4hrke7NElseDuwePrrQcG7FxG2Hxh6/H4PMfpiviLAvdgoqNVdlKuzaNm9s1jupcBCFzmnYylG79g22Yj01q5WxA1nMg9udAUEhQWVL/bfQAJNVnTBHttyYZy1YPWfX3GsLeS16eoT5H/wE8fRuy/w3Ap5dzfwdvbBsyG4J6PRvvejm3HrOvfwmSqDvOALBEdTWFPm44kv1DhA1/nHRFNbSv/nchAKhBNLW3PI/nD8W7frbHzlToNQUAVEN3y4AEgBB6A6Kss06Jmvx2JQ8BBwk8ksFUF2RtD95cAra0OTLQ9oiYqYAKHQ4GOSaAAay7WLb5xsBQNhjVs5NDZ0dR4jbBKuMkPEYiuk19Mli4Amk0+Jy10OrCSa9BzlkxAf98iwnwr8IoBQhHvYe8EnqXTMRvtVyCp6YXMWJQZDdOwL8DwE771frHIdmBdURgrPYLQAY14hAIaF+9vL8QEBAL6O5a/m4XCOThswkCxafpRv29FLCDRoBPNjcxgSI5nQlwnur+jwvHzZ6D5XyMeGyDg4gNVM9ITYSZOiA6Mx4QewS27cMHAWvcc3K9HjiY3TEYRp2gVncqMCqrX8u8jeYOdIJNrgABQQsIJlBfeWn03kSdV1ehEBBQ/reAIJt2ZQPCidjARxgT+MiS4AYIrNaOlCAHSwh8DLVjLUegc6/AWVtxaa2eOyA2QQw1gi3ykwfZpdMoQeWdBuDkdtIH74xjQGDjrPjGQJ/pVajOVDMtQDYwN3L/PV/fXRR+9i3h8HqVUszdHgf5fXbb7Xb/qaCgiK9NoDYJRmVkzGMMAoZJo/iW7sEICI45yh1gkSAWkApIINDWb9i4BAE+pf/kClwCxPBpwcTUmv5qskhE8E8jJuAgvGECQeX5TlKtijBMxc52LVjlfutAwOjl6g6wny/92AMseYkEtMZlBQBtFh7g49ajgoatCDC8rbUaOEjAFZ07KeANJGugDPcMHHkcdD+K10AgP9cEkm1Da72KClfGDAhWRRrMaKvigIuwhHJ2hR9N+UXkqvQcE6BIvAFYLcMuKhg6ckTnEBv2y2tJtsFGzGgOCwyGexNCz25fdEfGOWZqwkBA7KB6bK6Wv7d2AGWwte42JUgLsveGLb6IFABQSxTXoyMJFoCUhQT5hXmEXDRi9Iz0MkAAwD77K+1b6B8K9a8ISJ9TQND7ZiMloyNrCpIHIPt11a9PGElLw8OFpQsKl8dvYMXRpVgKWzQ3tjmjVyy5TevujRH976j3VqhusezNI9fh46aXwVS47tGsnSznuKVUL69QdF4Bh+dsxFLhcT72DeC++mJZ9jqOPuYAyzNj8FGVmZjN+kL/jFRYrgsaTZoumAc74a4OCmATCCCI0aZC+Ui5SPnoXaAcB7kMxBJZ6tpHRWr1ukRX63PSywABL8T2vL8Lyvoh34ICLsretSFTVj5uCwOkV2II/gWD4vsR6+u75Ur6VvmaqhiThUUvQsNUNpfTzmW1a/GMWmdAAcSCGuusyMzdtUp3n6leMz9ipc5xk2nB6lxQ2KkzrXuWGzVKMKcA00q57eVlPM8CulYOCYYyoDpwDG5rYxcyF4UGKZ7fpNw9amfZKw0DDFi+AKuIAZu0NeNdMEYAg9dBsgRJA9H2IeDPGUzdBESpuzXq1x5yUD0Mc4tFfWDc09OLAAEFaphvO8tvpIFMYd2S8D9OacUlzf0eCBaLYb+NSsdFUILKMwUuIPEnE0BNzJKhaMPIP59T9fX0aD2BCwBMr6/RtmnvbkLUBXORqlMmRM3yhpKH0msI87V1hBR16oRg+MKhMbyVQeAGAOTquVy2Ur5oh1QiHTkPIcBHp9q5WHWpATxcCf1OpPirHCQW+krIxbA6IGT7knMevUU64PmJ+RGGFxynOTKeMhIQypCMxb0skEgZVAWOkK2jWMFgQH66Y/AiQABQTD13pwF4nTfF9z+pVHNhACTWIjloKChYCEPRtBAGps+lzEL3ohy5MPHAmOrfz6y6INmxkrJHI7KwFRNoa8szAJy10AYUbcfcUHzyiCpJr8KliolliQs0gtBerD+fzRtMADJxioCXY4MA8zzb9lm8bPZ2S7HcTMOfJchRhhgDenjQLwHAVuORIRizlkvPNkpr7O0pnQ0UeyuXKlhZtg1PO19AQCCYEVic6j0iJLNhYChoHAHV2seRgqLCYFAyZmVyBgAFNOpiJAiMOaAyMfadbdv0MkDAhflyMo662SoVY6uXfj/5cJSK9odwVgPeioCzstd7Uc0EkzF6LCDzHRuTmL/WlZ2OCQxyPT1X/KTJJwFAugO8iWVQ2KDjtxLNRU9wuta3CL3Hr1SyKoCKLRTfMPrAAADghvWvZcNPcnl4uSwNAJCq5+H51uEg4Hs+Dn8Xmb1NHczXduZzIQt9MH7IVSzlVUCgo5iVDBsEBXdJdAimguLUNPOQelTuaB3GY3EPMvCaxyVbRgGcARyKQ9VnYlrgNVymt44JGIDeZgJAsAH7EzLLypZCQzRy+XVZ9sYCHgeBFBKgWZX87GxgFbosgmruY2EUdrEwy7k51RX+JMU/mxtQ7gB8XYTOBsKUZU08EidKQLAMpnI0BqD9HlG74SqkX0qWNutb0ZYGq9ck5Y/NNG7sH4Cg077pSwOBAoAh5Q5w00vmid45RiDFcriMwQZ4H8TompVwLamoMzoYUgyJCdCcihg/cnd3h7u72rOxgpcAD0KrvSG9XUP5s/w2n2KMwzfAtTp5anoRIADQ1F9KLLtKAlGuLQl8WtW8Zd0nMWABgLASi/LXxqDFFrAcr98ZZoRglO9WzFyboudxxDT8eM7uDhQj6K+YIWkbcWoHFpRyV62RsqPXk9J7TnGN8REXB0CyDWJodd5PgbZzz2MgsDk3yffmmADEt/7w3hDAJgcNHZjZJbmJB5DVbwDA7XdpX1LgyIsD7dyCgLsD/C6jyWKCWIysPA4cd3cGAu/dFRNoMojLZwBOBAwEj+FgOMwlGmp1MIZgXiZZ3U4vBASAXe/AavHtusWweYOVBdQUFKDEnwEAq6BgeRcBWqBvERof/deCgPkmLWsJBFprF9RWXLhsKnImCMQefGvQLFwCBaTocy20WYBImaL3RFB2ABojqLnu4sqeHV79Lkvwgd2K+BtdhVcQeMgAaICd5hoQfL/w37XqHLYF2nTlmTHG4DLF13+/MDs2Ak0GQpxQ7losQtOWo8vl30L5zVKnGzAnVAqwENbcBz4dY+DOmcB7d3c+mEzSjRKRDCbyuaqXcDFtCPWYzgh8ZOVbyQSCcvWT9sY0TUp+GyMIP7j9cLWEPkuMBYQruB/7NSlUDAjLlOAbT2znpy1LWr6ltqj+JHbQVxguMFjXRuxMYBaQEAAQFl6oLh/X9S5kZBFvj5hW+k3FNgr0Ij6j5fdHeWL3IAKA8zxbhvlZncoDAhd64Sj6rYVTVsYXN/Q2Z2YHySczEPDW6HNqyQUEuYvzNDBQqd9FeQQxqIyGk9+5O/Dee7g7jsxL5Qn9HIPAnFCn/jom1KdX8zDxDbG+mV4ECAC7LkISvlX5IZ0RuAUqBtnV0Xwpou6rBdgJTgOAsXx2AHCOpvT8tMT+BVvmWtuwQKAGy7DFIeUPNkD96DkCUgRziQdkXrY1WUmXM6r9W7b92QYSnCFAgnskiD6j56n2zmNXYDa3YJ5n3jmtODM3lHIorjsy97kA3ddHKhOxgdWla8Xvbkm2le+GZJY/ZAfQGA6tEzJ9qDS5ggEybaj1YfGA99wtqHwSCITsBgC6gM95YOjscypC+WlE51PTiwEB3YBAKH3Q+fRbmbrGwebddT8vbELRaP6VBYSi15BZWommdQfSEmVuCeGoPYEmSOxbNsHK43mJCUzvKbh0FU7fD4Dcg7Rcq4Yv1cMn6tJyCGzkmfqiLDwHrtQlaXMo/LIGfuwj2Lo8J/UGOAhMOgdv54zJAAUATXEj+FVzFNYNajqQrMAgpFxXF6JcAq/TGfVMw88l3IBZ7zKSptdYhwIgjgnc3R24u3vvwgQ8yym/8ZlB9zg8P8fEmDbFeh22vjOqt9KLAIGtO4BOQ2O/9QYKeQNsjsl68D0XmphuwCIQbUXeUHoCgZiayhZw6rQttoiPp0UkZS2L35V/Tk1qfM7YdYc3IeGYwPTuqYUJgJSzVU0o8lr5yPOqNQe+ASy5BUvDJQDkqMUYwUhDnTMmsIJA+3xaGwxrteHrLTJdD6uvql3oA5h3E55QGd8CAH1udZJ1OheQZhAgv917J1Rq8FLUfspYrMtIvQONCVBL8W+z9ThP88CRQNDZwNvLBG4EBpOUJl9lW0SfFWgqr+TFkhCkZcAaE3gNE1gqONhAgMCcEwPDdgliKrgoae6qsyh+YwLpAlzjADFMOKhncweIlveQSPmTUVUMFowW5AAgrD+zAVaW9JvD95+z+c8tKPgaAAjlGu7vqzORosUFBAYCtfBIWyvQ27jKXpa4AIEBoEBmNRhZlToboFl+zPrb4ByFisV2RmvvykRzB8bAcdxVXIDcAWgHAWR7EKucE8eY1TPSuhjfUibwWDLrhAspbWKqDAwXe3UjyeXa/BS+I30uatbjCgBy0krl9VIICpyhAjwZQIoP9Tksc3gYUbq4hn34PNb2dqPMV9p0w3t4YlryQvlZx2/Ud/FZ27WQPssyXws9jk+stKsllfWEvzMzqPteZablq7KcbRP0qNVgDAiri7aZSxbR2Gd7OuWpKqBlW8KIcV101vPU9IzBhS817Xjq9qrNB91f8FplegPpKT77c/Ds85LWSvqwNcQs7oPfQTbHt265cYjeXPq8tcObff5bAgKvKd2T2nWNIejm3OcxPaXB9OaHF5CeIXG3NfID683ud0+poZ3VfzHpwzbxB/z9WwICH14B9JbV36Ut/Xz+E5+bHn/e499+/iHicder5eeRrH/QfO9+94LV+82mPa39QmcCbyKtVv/pkvnB6vb5v/rA7sAHfsKHecA2+vH4na/BkgqIPekG+/RFBwi3GOIXNhN4rEmfxillywRu1NriquoHqtzH7/2kBtPLwbMedfvhbyI9/tDtt+vjIxC4u/g1ZdrF/fhnL815+miTl/YDNq/oB5PwN5pE5O8B+JGPOx8fUfoHAPztjzsTH0F6V663L/2DqvoV68mX0kX4I6r69R93Jj6KJCJ/6QuxbO/K9YWT3hJ34F16l96ljyq9A4F36V36Ik8vBQS+++POwEeYvlDL9q5cXyDpRQQG36V36V36+NJLYQLv0rv0Ln1M6R0IvEvv0hd5+thBQET+KRH5ERH5rIh8+8edn+ckEflqEflzIvJDIvLfisjv8vNfLiJ/RkT+B3//uX5eRORf97L+VRH55R9vCR5PInKIyH8tIn/CP3+tiPyA5/8Pi8iX+PlP+OfP+vdf87Fm/DVJRD4lIt8nIv+diPywiPzKL5Q2+yDpYwUBsa16/00Avw7ALwbwLSLyiz/OPD0zPQD43ar6iwH8YwB+h+f/2wF8v6p+HYDv98+AlfPr/PXbAfyBz3+Wn5V+F4Afps+/D8DvV9VfCOCnAHybn/82AD/l53+/X/eS03cB+FOq+g8B+CWwMn6htNnzU1+f7vP7AvArAfxp+vwdAL7j48zThyzPfwbg18JGP37Gz30GNhgKAP5tAN9C1+d1L+0F4KtgyvCrAfwJ2KDUvw3gbm07AH8awK/04zu/Tj7uMtwo188B8P9b8/eF0GYf9PVxuwM/H8CP0efP+bm3LjkF/mUAfgDAV6rq3/SvfgLAV/rx21Tefw3Av4jaSODTAP6uqj74Z857lsu//2m//iWmrwXwPwH4993V+XdE5GfiC6PNPlD6uEHgCyKJyJcB+KMA/nlV/V/4OzXz8Vb1w4rIrwfwk6r6gx93Xj6CdAfglwP4A6r6ywD8byjqD+DtbLMPkz5uEPhxAF9Nn7/Kz701SUTegwHAH1LV/8RP/y0R+Yx//xkAP+nn35by/ioAv0FEfhTA98Jcgu8C8CkRifkmnPcsl3//cwD8nc9nhp+RPgfgc6r6A/75+2Cg8La32QdOHzcI/EUAX+dR5y8B8M0A/vjHnKcnJ7GF3P5dAD+sqv8qffXHAXyrH38rLFYQ5/9PHnH+xwD8NFHQF5NU9TtU9atU9WtgbfJnVfWfBfDnAPxGv2wtV5T3N/r1L9KSqupPAPgxEflFfurXAPghvOVt9qHSxx2UAPCNAP57AH8NwP/t487PM/P+j8No418F8Ff89Y0wf/j7AfwPAP7fAL7crxdYb8hfA/DfAPj6j7sMTyjjNwD4E378CwD8fwF8FsD/C8An/Pwn/fNn/ftf8HHn+zVl+qUA/pK3238K4Od+IbXZc1/vhg2/S+/SF3n6uN2Bd+ldepc+5vQOBN6ld+mLPL0DgXfpXfoiT+9A4F16l77I0zsQeJfepS/y9A4E3qV36Ys8vQOBd+ld+iJP/3/8qcVUArouIwAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import mindspore.dataset.transforms.c_transforms as c_transforms\n", - "import mindspore.dataset.vision.c_transforms as C\n", - "import matplotlib.pyplot as plt\n", - "\n", - "cifar10_path = \"./datasets/cifar-10-batches-bin/train\"\n", - "\n", - "# create Cifar10Dataset for reading data\n", - "cifar10_dataset = ds.Cifar10Dataset(cifar10_path,num_parallel_workers=4)\n", - "transforms = C.RandomResizedCrop((800,800))\n", - "# apply the transform to the dataset through dataset.map()\n", - "cifar10_dataset = cifar10_dataset.map(operations=transforms,input_columns=\"image\",num_parallel_workers=4)\n", - "\n", - "data = next(cifar10_dataset.create_dict_iterator())\n", - "plt.imshow(data[\"image\"].asnumpy())\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 使用自定义Python函数进行数据增强,数据增强时采用多进程优化方案,开启了4个进程并发完成任务。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "before map:\n", - "[0 1 2 3 4]\n", - "[1 2 3 4 5]\n", - "[2 3 4 5 6]\n", - "[3 4 5 6 7]\n", - "[4 5 6 7 8]\n", - "after map:\n", - "[ 0 1 4 9 16]\n", - "[ 1 4 9 16 25]\n", - "[ 4 9 16 25 36]\n", - "[ 9 16 25 36 49]\n", - "[16 25 36 49 64]\n" - ] - } - ], - "source": [ - "def generator_func():\n", - " for i in range(5):\n", - " yield (np.array([i,i+1,i+2,i+3,i+4]),)\n", - "\n", - "ds3 = ds.GeneratorDataset(source=generator_func,column_names=[\"data\"])\n", - "print(\"before map:\")\n", - "for data in ds3.create_dict_iterator():\n", - " print(data[\"data\"])\n", - "\n", - "func = lambda x:x**2\n", - "ds4 = ds3.map(operations=func,input_columns=\"data\",python_multiprocessing=True,num_parallel_workers=4)\n", - "print(\"after map:\")\n", - "for data in ds4.create_dict_iterator():\n", - " print(data[\"data\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 操作系统性能优化\n", - "\n", - "由于数据处理是在host端进行,那么机器或者操作系统本身的一些配置会对数据处理存在影响,主要有存储、NUMA架构、CPU(计算资源)几个方面。\n", - "\n", - "1. 存储\n", - "\n", - " 当数据集较大时,推荐使用固态硬盘对数据进行存储,能够减少存储I/O对数据处理的影响。\n", - "\n", - " > 一般地,当数据集被加载之后,就会缓存在操作系统的page cache中,在一定程度上降低了存储开销,加快了后续epoch的数据读取。\n", - "\n", - "2. NUMA架构\n", - "\n", - " 非一致性内存架构(Non-uniform Memory Architecture)是为了解决传统的对称多处理(Symmetric Multi-processor)系统中的可扩展性问题而诞生的。NUMA系统拥有多条内存总线,于是将几个处理器通过内存总线与一块内存相连构成一个组,这样整个庞大的系统就可以被分为若干个组,这个组的概念在NUMA系统中被称为节点(node)。处于该节点中的内存被称为本地内存(local memory),处于其他节点中的内存对于该组而言被称为外部内存(foreign memory)。因此每个节点访问本地内存和访问其他节点的外部内存的延迟是不相同的,在数据处理的过程中需要尽可能避免这一情况的发生。一般我们可以使用以下命令进行进程与node节点的绑定:\n", - "\n", - " ```bash\n", - " numactl --cpubind=0 --membind=0 python train.py\n", - " ```\n", - "\n", - "3. CPU(计算资源) \n", - "\n", - " CPU对于数据处理的影响主要是计算资源的分配和CPU频率的设置两个方面。\n", - "\n", - " - 计算资源的分配\n", - "\n", - " 当我们进行分布式训练时,一台设备机器上会启动多个训练进程,而这些训练进程会通过操作系统本身的策略进行计算资源的分配与抢占,当进程较多时,可能会由于计算资源的竞争而导致数据处理性能的下降,因此这时需要进行人工分配计算资源,避免各个进程的计算资源竞争。\n", - "\n", - " ```bash\n", - " numactl --cpubind=0 python train.py\n", - " ```\n", - "\n", - " 或\n", - "\n", - " ```bash\n", - " taskset -c 0-15 python train.py\n", - " ```\n", - "\n", - " > `numactl`的方式较为粗粒度,直接指定`numa node id`,而`taskset`的方式是细粒度的,它能够直接指定`numa node`上的`cpu core`,其中0-15表示的`core id`从0到15。\n", - "\n", - " - CPU频率设置\n", - "\n", - " 要想充分发挥host端CPU的最大算力,CPU频率的设置至关重要。一般地,linux内核支持调节CPU主频,降低功耗,已到达节能的效果。通过选择系统空闲状态不同的电源管理策略,可以实现不同程度降低服务器功耗。但是,更低的功耗策略意味着CPU唤醒更慢对性能影响更大。因此如果发现CPU模式为conservative或者powersave,可以使用cpupower设置CPU Performance模式,对数据处理的性能提升有非常大的效果。 \n", - "\n", - " ```bash\n", - " cpupower frequency-set -g performance\n", - " ```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 性能优化方案总结\n", - "\n", - "### 多线程优化方案\n", - "\n", - "在数据pipeline过程中,相关算子一般都有线程数设置参数,来提升处理并发度,提升性能,例如:\n", - "\n", - "- 在数据加载的过程中,内置数据加载类有`num_parallel_workers`参数用来设置线程数。\n", - "\n", - "- 在数据增强的过程中,`map`函数有`num_parallel_workers`参数用来设置线程数。\n", - "\n", - "- 在Batch的过程中,`batch`函数有`num_parallel_workers`参数用来设置线程数。\n", - "\n", - "具体内容请参考[内置加载算子](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 多进程优化方案\n", - "\n", - "数据处理中Python实现的算子均支持多进程的模式,例如:\n", - "\n", - "- `GeneratorDataset`这个类默认是多进程模式,它的`num_parallel_workers`参数表示的是开启的进程数,默认为1,具体内容请参考[GeneratorDataset](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/dataset/mindspore.dataset.GeneratorDataset.html)。\n", - "\n", - "- 如果使用Python自定义函数或者`py_transforms`模块进行数据增强的时候,当`map`函数的参数`python_multiprocessing`设置为True时,此时参数`num_parallel_workers`表示的是进程数,参数`python_multiprocessing`默认为False,此时参数`num_parallel_workers`表示的是线程数,具体的内容请参考[内置加载算子](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.html)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Compose优化方案\n", - "\n", - "Map算子可以接收Tensor算子列表,并将按照顺序应用所有的这些算子,与为每个Tensor算子使用的Map算子相比,此类“胖Map算子”可以获得更好的性能,如图所示:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![compose](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/compose.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 算子融合优化方案\n", - "\n", - "提供某些融合算子,这些算子将两个或多个算子的功能聚合到一个算子中。具体内容请参考[数据增强算子](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.dataset.vision.html),与它们各自组件的流水线相比,这种融合算子提供了更好的性能。如图所示:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![operator-fusion](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/operator_fusion.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 操作系统优化方案\n", - "\n", - "- 使用固态硬盘进行数据存储。\n", - "\n", - "- 将进程与node节点绑定。\n", - "\n", - "- 人工分配更多的计算资源。\n", - "\n", - "- 提高CPU运算频率。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 参考文献\n", - "\n", - "[1] Alex Krizhevsky. [Learning Multiple Layers of Features from Tiny Images](http://www.cs.toronto.edu/~kriz/learning-features-2009-TR.pdf)." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/training/source_zh_cn/advanced_use/parameterized_quantum_circuit.ipynb b/tutorials/training/source_zh_cn/advanced_use/parameterized_quantum_circuit.ipynb deleted file mode 100644 index ece9c1b10e06e29c69af0836a5e176a8121ce725..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/parameterized_quantum_circuit.ipynb +++ /dev/null @@ -1,429 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 参数化量子线路\n", - "\n", - "`Linux` `CPU` `全流程` `初级` `中级` `高级`\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/advanced_use/parameterized_quantum_circuit.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/mindspore_parameterized_quantum_circuit.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/docs/programming_guide/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL21pbmRzcG9yZV9wYXJhbWV0ZXJpemVkX3F1YW50dW1fY2lyY3VpdC5pcHluYg==&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "参数化量子线路(Parameterized quantum circuit, PQC)是进行量子机器学习的一种途径,量子-经典混合机器学习架构MindQuantum能够处理带参数的量子线路,并利用量子神经网络的可逆性来对线路进行自动微分,得到观测值对各参数的导数。\n", - "\n", - "构建参数化量子线路并用参数化模拟器算子进行线路演化的大致流程如下:\n", - "\n", - "1. 初始化量子线路。\n", - "2. 根据需求,在量子线路中加入参数化的量子门或者非参数的量子门。\n", - "3. 利用PQC模拟器算子来进行态演化或者梯度求解。\n", - "\n", - "## 环境准备\n", - "\n", - "导入本教程所依赖模块。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import mindquantum as mq\n", - "from mindquantum.gate import H, X, Y, RY, RX" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 量子门\n", - "\n", - "量子门是对量子比特进行操作的基本逻辑单元。对于经典电路来说,任意的逻辑电路都可以由一些基本逻辑门构成,类似的,任意的量子线路都可以由一些基本的量子门构成,如作用在单比特上的门和受控非门。常用的量子门有$\\text{X}$门、$\\text{Y}$门、$\\text{Z}$门、$\\text{Hadamard}$门、$\\text{CNOT}$门以及一些旋转门。例如,$\\text{Y}$门的形式如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Gate name: Y\n", - "Gate matrix: \n", - " [[ 0.+0.j -0.-1.j]\n", - " [ 0.+1.j 0.+0.j]]\n" - ] - } - ], - "source": [ - "print('Gate name: ', Y)\n", - "print('Gate matrix: \\n', Y.matrix())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "上面的$\\text{Z}$门是一个非参数门,而一些旋转门(比如$\\text{RY}$门)则是含参数门,通过给予不同的旋转角度$\\theta$,旋转门将对量子比特产生不同的影响,如$\\text{RY}$门矩阵的表达式为:\n", - "\n", - "$$\\text{RY}(\\theta)=e^{-i\\theta Y/2}=\\begin{pmatrix}\\cos(\\theta/2)&-\\sin(\\theta/2)\\\\\\sin(\\theta/2)&\\cos(\\theta/2)\\end{pmatrix}$$\n", - "\n", - "其中$i$为虚数基本单位。这种含参数的量子门是后续搭建量子神经网络的重要组成单元。下面,我们打印$\\text{RY}$门在旋转角度为$0.5$时的矩阵形式。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[ 0.96891242, -0.24740396],\n", - " [ 0.24740396, 0.96891242]])" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ry = RY('a')\n", - "ry.matrix({'a': 0.5})" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 量子线路\n", - "\n", - "量子线路是用来对各种量子逻辑门进行有效组织的结构,我们可以通过量子门的list来初始化量子线路,也可以通过加法(`+`)加一个量子门或者线路,乘法(`*`)乘一个整数来扩充量子线路。这里我们来构建如下的量子线路,并打印量子线路的相关信息。下图中`q0`、`q1`和`q2`分别表示三个量子比特,量子线路由三个量子门构成,分别为作用在`q0`比特上的Hadamard门,作用在`q1`比特并受`q0`比特控制的$CNOT$门和作用在`q2`比特上的$\\text{RY}$旋转门。\n", - "\n", - "![quantum circuit](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/quantum_circuit.png)\n", - "\n", - "### [HiQsimulator](https://hiq.huaweicloud.com/doc/index.html) 兼容的量子线路搭建格式\n", - "\n", - "1. 使用`CircuitEngine`线路引擎来搭建量子线路\n", - "\n", - " 我们通过操作符“|”,将量子门作用在相应的量子比特上。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "H(0)\n", - "X(1 <-: 0)\n", - "RY(p1|2)\n", - "========Circuit Summary========\n", - "|Total number of gates : 3. |\n", - "|Parameter gates : 1. |\n", - "|with 1 parameters are : p1. |\n", - "|Number qubit of circuit: 3 |\n", - "===============================\n" - ] - } - ], - "source": [ - "eng = mq.engine.CircuitEngine()\n", - "qubits = eng.allocate_qureg(3)\n", - "H | qubits[0]\n", - "X | (qubits[0], qubits[1])\n", - "RY('p1') | qubits[2]\n", - "encoder = eng.circuit\n", - "print(encoder)\n", - "encoder.summary()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - " 这里`X(1 <-: 0)`表示受0比特控制、作用在1比特上的`X`门,也即CNOT门。`RY(p1|2)`表示作用在2比特上的绕Y轴旋转门,`p1`为旋转角度。通过打印出的Summary信息我们可以发现,该量子线路由三个量子门构成,其中有一个量子门是参数化量子门,整个量子线路用到的量子比特为3个。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 使用装饰器来搭建量子线路\n", - "\n", - " 通过装饰器来搭建量子线路能够省去一些重复的引擎声明步骤。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "H(0)\n", - "X(1 <-: 0)\n", - "RY(p1|2)\n", - "========Circuit Summary========\n", - "|Total number of gates : 3. |\n", - "|Parameter gates : 1. |\n", - "|with 1 parameters are : p1. |\n", - "|Number qubit of circuit: 3 |\n", - "===============================\n" - ] - } - ], - "source": [ - "from mindquantum.engine import circuit_generator\n", - "\n", - "@circuit_generator(3)\n", - "def encoder(qubits):\n", - " H | qubits[0]\n", - " X | (qubits[0], qubits[1])\n", - " RY('p1') | qubits[2]\n", - "\n", - "print(encoder)\n", - "encoder.summary()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - " 我们还可以给装饰器传入更多的参数来供线路生成时使用。例如可以传入一个字符串,搭建量子线路时,可以利用该字符串来给每个参数添加一个前缀,这样有利于生成结构相同,但参数名不同的量子线路。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "H(0)\n", - "X(1 <-: 0)\n", - "RY(encoder_1|2)\n", - "===========Circuit Summary===========\n", - "|Total number of gates : 3. |\n", - "|Parameter gates : 1. |\n", - "|with 1 parameters are : encoder_1.|\n", - "|Number qubit of circuit: 3 |\n", - "=====================================\n" - ] - } - ], - "source": [ - "@circuit_generator(3, prefix='encoder')\n", - "def encoder(qubits, prefix):\n", - " H | qubits[0]\n", - " X | (qubits[0], qubits[1])\n", - " RY(prefix + '_1') | qubits[2]\n", - "\n", - "print(encoder)\n", - "encoder.summary()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 更便捷的线路生成方式\n", - "\n", - "通过往量子线路中不断地添加作用在不同比特上的量子门可快速完成量子线路的搭建。\n" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "H(0)\n", - "X(1 <-: 0)\n", - "RY(p1|2)\n", - "========Circuit Summary========\n", - "|Total number of gates : 3. |\n", - "|Parameter gates : 1. |\n", - "|with 1 parameters are : p1. |\n", - "|Number qubit of circuit: 3 |\n", - "===============================\n" - ] - } - ], - "source": [ - "from mindquantum import Circuit\n", - "\n", - "encoder = Circuit()\n", - "encoder += H.on(0)\n", - "encoder += X.on(1,0)\n", - "encoder += RY('p1').on(2)\n", - "print(encoder)\n", - "encoder.summary()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 利用 MindSpore 算子进行量子线路的模拟" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "一个常见的量子神经网络通常由如下三个部分构成:\n", - "\n", - "- 一个(或多个)编码线路,用于将经典数据编码为量子数据\n", - "- 一个(或多个)待训练线路(通常称为Ansatz)\n", - "- 一个(或多个)待测量物理量\n", - "\n", - "下面我们搭建如下的量子神经网络,该量子神经网络的编码部分由两个$\\text{RY}$门构成,而Ansatz线路由一个$\\text{CNOT}$门和两个$\\text{RX}$门构成,待测量物理量是作用在1号比特上的$\\text{Z}$算符。\n", - "\n", - "![simple qnn](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/simple_qnn.png)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "from projectq.ops import QubitOperator\n", - "\n", - "@circuit_generator(2)\n", - "def encoder(qubits):\n", - " RY('a') | qubits[0]\n", - " RY('b') | qubits[1]\n", - "\n", - "@circuit_generator(2)\n", - "def ansatz(qubits):\n", - " X | (qubits[0],qubits[1])\n", - " RX('p1') | qubits[0]\n", - " RX('p2') | qubits[1]\n", - "\n", - "ham = mq.Hamiltonian(QubitOperator('Z1'))\n", - "encoder_names = ['a', 'b']\n", - "ansatz_names = ['p1', 'p2']" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "这里我们通过装饰器的方式生成了Encoder线路和Ansatz线路。并利用`generate_pqc_operator`方法来产生一个线路模拟算子,对该量子线路进行模拟计算,并求取量子神经网络的输出对各参数的梯度值。在`generate_pqc_operator`方法中,我们需要提供Encoder线路的参数名、Ansatz线路的参数名、整个量子线路和待测量的物理量。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Measurement result: [[0.89819133]]\n", - "Gradient of encoder parameters: [[[-0.09011973 -0.1820724 ]]]\n", - "Gradient of ansatz parameters: [[[-2.7755576e-17 -3.7974921e-01]]]\n" - ] - } - ], - "source": [ - "from mindquantum.nn import generate_pqc_operator\n", - "from mindspore import Tensor\n", - "from mindspore import context\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"CPU\")\n", - "\n", - "pqc = generate_pqc_operator(encoder_names, ansatz_names, encoder+ansatz, ham)\n", - "encoder_data = Tensor(np.array([[0.1,0.2]]).astype(np.float32))\n", - "ansatz_data = Tensor(np.array([0.3,0.4]).astype(np.float32))\n", - "measure_result, encoder_grad, ansatz_grad = pqc(encoder_data, ansatz_data)\n", - "print('Measurement result: ', measure_result.asnumpy())\n", - "print('Gradient of encoder parameters: ', encoder_grad.asnumpy())\n", - "print('Gradient of ansatz parameters: ', ansatz_grad.asnumpy())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "上面的三个结果分别表示量子神经网络的输出值、编码线路中参数的梯度值和带训练Ansatz线路中参数的梯度值。有的时候,量子神经网络是作为整个量子经典混合神经网络的第一层,因此我们不用对编码线路中的梯度求导数,对于这种不需要求梯度的线路,可以通过`no_grad`方法指定不需要计算梯度的量子线路不求导。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Measurement result: [[0.89819133]]\n", - "Gradient of encoder parameters: [[[0. 0.]]]\n", - "Gradient of ansatz parameters: [[[-2.7755576e-17 -3.7974921e-01]]]\n" - ] - } - ], - "source": [ - "encoder.no_grad()\n", - "pqc = generate_pqc_operator(encoder_names, ansatz_names, encoder+ansatz, ham)\n", - "measure_result, encoder_grad, ansatz_grad = pqc(encoder_data, ansatz_data)\n", - "print('Measurement result: ', measure_result.asnumpy())\n", - "print('Gradient of encoder parameters: ', encoder_grad.asnumpy())\n", - "print('Gradient of ansatz parameters: ', ansatz_grad.asnumpy())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "如上可知,量子神经网络中的编码线路参数的导数都为零,实际模拟计算过程中没有对其求导。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/training/source_zh_cn/advanced_use/performance_profiling.rst b/tutorials/training/source_zh_cn/advanced_use/performance_profiling.rst deleted file mode 100644 index e6cc6d46fae0c4ae2111508ff0c7f8f665677be0..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/performance_profiling.rst +++ /dev/null @@ -1,11 +0,0 @@ -性能调试 -================================== - -性能调试模块可将训练过程中的算子耗时等信息记录到文件中,通过可视化界面供用户查看分析,帮助用户更高效地调试神经网络性能。 - -.. toctree:: - :maxdepth: 1 - - performance_profiling_ascend - performance_profiling_gpu - performance_profiling_ascend_of_cluster diff --git a/tutorials/training/source_zh_cn/advanced_use/performance_profiling_ascend.md b/tutorials/training/source_zh_cn/advanced_use/performance_profiling_ascend.md deleted file mode 100644 index 088c46c80671138ee440de8bceb2340fffec6fe3..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/performance_profiling_ascend.md +++ /dev/null @@ -1,277 +0,0 @@ -# 性能调试(Ascend) - -`Linux` `Ascend` `模型调优` `中级` `高级` - - - -- [性能调试(Ascend)](#性能调试ascend) - - [概述](#概述) - - [操作流程](#操作流程) - - [准备训练脚本](#准备训练脚本) - - [启动MindInsight](#启动MindInsight) - - [训练性能](#训练性能) - - [迭代轨迹分析](#迭代轨迹分析) - - [算子性能分析](#算子性能分析) - - [数据准备性能分析](#数据准备性能分析) - - [Timeline分析](#Timeline分析) - - [资源利用](#资源利用) - - [CPU利用率分析](#CPU利用率分析) - - [内存使用情况分析](#内存使用情况分析) - - [规格](#规格) - - [注意事项](#注意事项) - - - - - -## 概述 - -本教程介绍如何在Ascend AI处理器上使用MindSpore Profiler进行性能调试。 - -## 操作流程 - -- 准备训练脚本,并在训练脚本中调用性能调试接口,接着运行训练脚本。 -- 启动MindInsight,并通过启动参数指定summary-base-dir目录(summary-base-dir是Profiler所创建目录的父目录),例如训练时Profiler创建的文件夹绝对路径为`/home/user/code/data`,则summary-base-dir设为`/home/user/code`。启动成功后,根据IP和端口访问可视化界面,默认访问地址为 `http://127.0.0.1:8080`。 -- 在训练列表找到对应训练,点击性能分析,即可在页面中查看训练性能数据。 - -## 准备训练脚本 - -为了收集神经网络的性能数据,需要在训练脚本中添加MindSpore Profiler相关接口。 - -- `set_context`之后,初始化网络、以及初始化HCCL之前,需要初始化MindSpore `Profiler`对象。 - - > Profiler支持的参数可以参考: - > - > - -- 在训练结束后,调用`Profiler.analyse()`停止性能数据收集并生成性能分析结果。 - -样例代码如下: - -```python -from mindspore.profiler import Profiler -from mindspore import Model, nn, context - -# Init context env -context.set_context(mode=context.GRAPH_MODE, device_target='Ascend', device_id=int(os.environ["DEVICE_ID"])) - -# Init Profiler -# Note that 'data' directory is created in current path by default. To visualize the profiling data by MindInsight, -# 'data' directory should be placed under summary-base-dir. -profiler = Profiler() - -# Train Model -Model.train() - -# Profiler end -profiler.analyse() -``` - -## 启动MindInsight - -启动命令请参考[MindInsight相关命令](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/mindinsight_commands.html)。 - -## 训练性能 - -用户从训练列表中选择指定的训练,点击性能调试,可以查看该次训练的性能数据。 - -![performance_overall.png](./images/performance_overall.png) - -图1:性能数据总览 - -图1展示了性能数据总览页面,包含了迭代轨迹(Step Trace)、算子性能、数据准备性能和Timeline等组件的数据总体呈现。各组件展示的数据如下: - -- 迭代轨迹:将训练step划分为几个阶段,统计每个阶段的耗时,按时间线进行展示;总览页展示了迭代轨迹图。 -- 算子性能:统计单算子以及各算子类型的执行时间,进行排序展示;总览页中展示了各算子类型时间占比的饼状图。 -- 数据准备性能:统计训练数据准备各阶段的性能情况;总览页中展示了各阶段性能可能存在瓶颈的step数目。 -- Timeline:按设备统计每个stream中task的耗时情况,在时间轴排列展示;总览页展示了Timeline中stream和task的汇总情况。 - -用户可以点击查看详情链接,进入某个组件页面进行详细分析。MindInsight也会对性能数据进行分析,在左侧的智能小助手中给出性能调试的建议。 - -### 迭代轨迹分析 - -使用迭代轨迹分析组件可以快速了解训练各阶段在总时长中的占比情况。迭代轨迹将训练的一个step划分为迭代间隙 (两次step执行的间隔时间)、前向与反向执行、all reduce、参数更新等几个阶段,并显示出每个阶段的时长,帮助用户定界出性能瓶颈所在的执行阶段。 - -![step_trace.png](./images/step_trace.png) - -图2:迭代轨迹分析 - -图2展示了迭代轨迹分析页面。在迭代轨迹详情中,会展示各阶段在训练step中的起止时间,默认显示的是各step的平均值,用户也可以在下拉菜单选择某个step查看该step的迭代轨迹情况。 - -页面下方显示了迭代间隙、前后向计算、迭代拖尾时间随着step的变化曲线等,用户可以据此判断某个阶段是否存在性能优化空间。其中: - -- **迭代间隙:** 主要负责从数据队列中读取数据,如果该部分耗时较长,建议前往数据处理部分进一步分析。 -- **前后向计算:** 执行网络中的前向算子以及反向算子,承载了一个step主要的计算工作,如果该部分耗时较长,建议前往算子统计或时间线中进一步分析。 -- **迭代拖尾:** 主要在多卡场景下执行参数聚合、参数更新操作,包括前后向计算结束到参数更新完成的时间。如果该部分耗时较长,建议查看`all_reduce`耗时以及并行情况。 - -> 迭代轨迹暂不支持异构训练场景。 - -迭代轨迹在做阶段划分时,需要识别前向计算开始的算子和反向计算结束的算子。为了降低用户使用Profiler的门槛,MindSpore会对这两个算子做自动识别,方法为: -前向计算开始的算子指定为`get_next`算子之后连接的第一个算子,反向计算结束的算子指定为最后一次all reduce之前连接的算子。**Profiler不保证在所有情况下自动识别的结果和用户的预期一致,用户可以根据网络的特点自行调整**,调整方法如下: - -- 设置`PROFILING_FP_START`环境变量指定前向计算开始的算子,如`export PROFILING_FP_START=fp32_vars/conv2d/BatchNorm`。 -- 设置`PROFILING_BP_END`环境变量指定反向计算结束的算子,如`export PROFILING_BP_END=loss_scale/gradients/AddN_70`。 - -### 算子性能分析 - -使用算子性能分析组件可以对MindSpore运行过程中的各个算子的执行时间进行统计展示(包括AICORE、AICPU、HOSTCPU算子)。 - -![op_type_statistics.png](./images/op_type_statistics.PNG) - -图3:算子类别统计分析 - -图3展示了按算子类别进行统计分析的结果,包含以下内容: - -- 可以选择饼图/柱状图展示各算子类别的时间占比,每个算子类别的执行时间会统计属于该类别的算子执行时间总和。 -- 统计前20个占比时间最长的算子类别,展示其时间所占的百分比以及具体的执行时间(毫秒)。 - -![op_statistics.png](./images/op_statistics.PNG) - -图4:算子统计分析 - -图4展示了算子性能统计表,包含以下内容: - -- 选择全部:按单个算子的统计结果进行排序展示,展示维度包括算子名称、算子类型、算子执行时间、算子全scope名称、算子信息等;默认按算子执行时间排序。 -- 选择分类:按算子类别的统计结果进行排序展示,展示维度包括算子分类名称、算子类别执行时间、执行频次、占总时间的比例等。点击每个算子类别,可以进一步查看该类别下所有单个算子的统计信息。 -- 搜索:在右侧搜索框中输入字符串,支持对算子名称/类别进行模糊搜索。 - -### 数据准备性能分析 - -使用数据准备性能分析组件可以对训练数据准备过程进行性能分析。数据准备过程可以分为三个阶段:数据处理pipeline、数据发送至Device以及Device侧读取训练数据。数据准备性能分析组件会对每个阶段的处理性能进行详细分析,并将分析结果进行展示。 - -![minddata_profile.png](images/data_profile.png) - -图5:数据准备性能分析 - -图5展示了数据准备性能分析页面,包含迭代间隙、数据处理两个TAB页面。 - -迭代间隙TAB页主要用来分析数据准备三个阶段是否存在性能瓶颈,数据队列图是分析判断的重要依据: - -- 数据队列Size代表Device侧从队列取数据时队列的长度,如果数据队列Size为0,则训练会一直等待,直到队列中有数据才会开始某个step的训练;如果数据队列Size大于0,则训练可以快速取到数据,数据准备不是该step的瓶颈所在。 -- 主机队列Size可以推断出数据处理和发送速度,如果主机队列Size为0,表示数据处理速度慢而数据发送速度快,需要加快数据处理。 -- 如果主机队列Size一直较大,而数据队列的Size持续很小,则数据发送有可能存在性能瓶颈。 - -![data_op_profile.png](./images/data_op_profile.png) - -图6:数据处理pipeline分析 - -图6展示了数据处理TAB页面,可以对数据处理pipeline做进一步分析。不同的数据算子之间使用队列进行数据交换,队列的长度可以反映出算子处理数据的快慢,进而推断出pipeline中的瓶颈算子所在。 - -算子队列的平均使用率代表队列中已有数据Size除以队列最大数据Size的平均值,使用率越高说明队列中数据积累越多。算子队列关系展示了数据处理pipeline中的算子以及它们之间的连接情况,点击某个队列可以在下方查看该队列中数据Size随着时间的变化曲线,以及与数据队列连接的算子信息等。对数据处理pipeline的分析有如下建议: - -- 当算子左边连接的Queue使用率都比较高,右边连接的Queue使用率比较低,该算子可能是性能瓶颈。 -- 对于最左侧的算子,如果其右边所有Queue的使用率都比较低,该算子可能是性能瓶颈。 -- 对于最右侧的算子,如果其左边所有Queue的使用率都比较高,该算子可能是性能瓶颈。 - -对于不同的类型的数据处理算子,有如下优化建议: - -- 如果Dataset算子是性能瓶颈,建议增加`num_parallel_workers`。 -- 如果GeneratorOp类型的算子是性能瓶颈,建议增加`num_parallel_workers`,并尝试将其替换为`MindRecordDataset`。 -- 如果MapOp类型的算子是性能瓶颈,建议增加`num_parallel_workers`,如果该算子为Python算子,可以尝试优化脚本。 -- 如果BatchOp类型的算子是性能瓶颈,建议调整`prefetch_size`的大小。 - -### Timeline分析 - -Timeline组件可以展示: - -- 算子分配到哪个设备(AICPU、AICORE、HOSTCPU)执行。 -- MindSpore对该网络的流切分策略。 -- 算子在Device上的执行序列和执行时长。 -- 训练的Step数(暂不支持动态Shape场景、多图场景和异构训练场景,这些场景下Step数据可能不准确)。 -- 算子的`Scope Name`信息,可以选择展示多少层`Scope Name`信息并下载对应的timeline文件。例如某算子的全名为:`Default/network/lenet5/Conv2D-op11`,则该算子的第一层Scope Name为`Default`、第二层为`network`。如果选择展示两层`Scope Name`信息,则会展示`Default`和`network`。 - -通过分析Timeline,用户可以对训练过程进行细粒度分析:从High Level层面,可以分析流切分方法是否合理、迭代间隙和拖尾时间是否过长等;从Low Level层面,可以分析算子执行时间等。 - -用户可以点击总览页面Timeline部分的下载按钮,将Timeline数据文件 (json格式) 保存至本地,再通过工具查看Timeline的详细信息。推荐使用 `chrome://tracing` 或者 [Perfetto](https://ui.perfetto.dev/#!/) 做Timeline展示。 - -- Chrome tracing:点击左上角"load"加载文件。 -- Perfetto:点击左侧"Open trace file"加载文件。 - -![timeline.png](./images/timeline.png) - -图7:Timeline分析 - -Timeline主要包含如下几个部分: - -- Device及其stream list:包含Device上的stream列表,每个stream由task执行序列组成,一个task是其中的一个小方块,大小代表执行时间长短。 -- 算子信息:选中某个task后,可以显示该task对应算子的信息,包括名称、type等。 - -可以使用W/A/S/D来放大、缩小地查看Timeline图信息。 - -## 资源利用 - -资源利用包括CPU利用率和内存使用情况分析。 - -![resource_visibility.png](./images/resource_visibility.png) - -图8:资源利用总览 - -图8展示了资源利用总览页面,包括CPU利用率分析与内存使用情况分析。通过点击右上角的`查看详情`按钮可以查看详细信息。 - -### CPU利用率分析 - -CPU利用率分析,主要起到辅助性能调试的作用。根据Queue size确定了性能瓶颈后,可以根据CPU利用率辅助对性能进行调试(用户利用率过低,增加线程数;系统利用率过大,减小线程数)。 -CPU利用率包含整机CPU利用率、进程CPU利用率、Data pipeline算子CPU利用率。 - -![device_cpu_utilization.png](./images/device_cpu_utilization.png) - -图9: 整机CPU利用率 - -整机CPU利用率:展示设备在训练过程中整体的CPU使用情况,包含用户利用率、系统利用率、空闲利用率、IO利用率、当前活跃进程数、上下文切换次数。如果用户利用率较低,可以尝试增大算子线程数,增加CPU使用情况;如果系统利用率较大,同时上下文切换次数、CPU等待处理的进程较大,说明需要相应减少线程个数。 - -![process_cpu_utilization.png](./images/process_cpu_utilizaton.png) - -图10: 进程利用率 - -进程利用率:展示单个进程的CPU占用情况。整机利用率和进程利用率结合,可以确定训练过程中是否有其他进程影响训练。 - -![data_op_cpu_utilization.png](./images/data_op_utilization.png) - -图11: 算子利用率 - -算子利用率:展示Data pipeline单个算子占用的CPU利用率。可以根据实际情况,调整对应算子的线程数。如果线程数不大,占用CPU较多,可以考虑优化代码。 - -CPU利用率常用场景: - -- 网络调试人员根据Queue size判断是Data性能有瓶颈,可以结合整机利用率和算子利用率作为辅助尝试调整线程数。 -- 开发人员可以查看算子利用率,如果某一个算子比较耗CPU利用率,可以考虑优化该算子。 - -### 内存使用情况分析 - -该页面用于展示模型在**Device侧**的内存使用情况,是**基于理论值的理想预估**。页面内容包括: - -- 模型的内存分配概览,包括总可用内存、峰值内存等信息。 -- 模型运行过程中,占用内存大小随执行顺序的变化。 -- 模型运行过程中,每个执行算子的内存使用分解情况。 - -> 内存使用情况分析暂不支持异构训练场景。 - -![memory.png](./images/memory.png) - -图12:内存使用情况页面 - -用户可以结合```内存分配概览```提供的信息以及折线图的变化趋势来了解内存使用的大致情况,除此之外,从折线图里还可以获得更多细节信息,包括: - -- 局部缩放:折线图下方有缩放滚动条,用户可以通过调节其大小对折线图进行放大或缩小,以便观察细节。 -- 前向开始和反向结束位置:通常情况下,用户可以在折线图上观察到模型的前向开始和反向结束的执行位置。 -- 执行算子信息:鼠标悬浮在折线图上的某处,可以看到对应位置的执行算子信息,包括算子执行顺序编号、算子名称、算子占用内存、模型在当前位置占用的总内存,以及与前一执行位置的相对内存变化。 -- 算子内存分配情况:鼠标点击折线图上的某一位置,位于折线图下方的```算子内存分配```模块会将该执行位置的内存使用分解情况展示出来。```算子内存分配```模块展示了对应执行位置的内存分解情况,也即,当前执行位置的已占用内存分配给了哪些算子的输出张量。该模块给用户提供了更丰富的信息,包括张量名称、张量大小、张量类型、数据类型、形状、格式,以及张量内存活跃的生命周期。 - -![memory_graphics.png](./images/memory_graphics.png) - -图13:内存使用折线图 - -## 规格 - -- 为了控制性能测试时生成数据的大小,大型网络建议性能调试的step数目限制在10以内。 - - > 控制step数目可以通过控制训练数据集的大小来实现,如`mindspore.dataset.MindDataset`类中的`num_samples`参数可以控制数据集大小,详情参考: - > - > - -- Timeline数据的解析比较耗时,且一般几个step的数据即足够分析出结果。出于数据解析和UI展示性能的考虑,Profiler最多展示20M数据(对大型网络20M可以显示10+条step的信息)。 - -## 注意事项 - -- PyNative模式下暂不支持性能调试。 -- 训练加推理过程暂不支持性能调试,目前支持单独训练或推理的性能调试。 diff --git a/tutorials/training/source_zh_cn/advanced_use/performance_profiling_ascend_of_cluster.md b/tutorials/training/source_zh_cn/advanced_use/performance_profiling_ascend_of_cluster.md deleted file mode 100644 index ec082a8cb8f441eabdb1b09861845848b1508157..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/performance_profiling_ascend_of_cluster.md +++ /dev/null @@ -1,241 +0,0 @@ -# 集群性能调试(Ascend) - -`Linux` `Ascend` `集群调优` `中级` `高级` - - - -- [集群性能调试(Ascend)](#集群性能调试ascend) - - [概述](#概述) - - [操作流程](#操作流程) - - [分布式训练](#分布式训练) - - [收集集群性能数据](#收集集群性能数据) - - [启动MindInsight](#启动mindinsight) - - [性能分析](#性能分析) - - [集群迭代轨迹性能数据分析](#集群迭代轨迹性能数据分析) - - [规格](#规格) - - [注意事项](#注意事项) - - - - - -## 概述 - -本教程介绍如何在Ascend AI处理器上使用MindSpore Profiler进行集群训练性能调试。 - -## 操作流程 - -- 搭建分布式训练环境,准备分布式训练脚本,并在训练脚本中调用性能调试接口,接着运行训练脚本。 -- 收集集群训练性能数据。 -- 启动MindInsight,并通过启动参数指定summary-base-dir目录(summary-base-dir是Profiler所创建目录的父目录),例如训练时Profiler创建的文件夹绝对路径为`/home/user/code/data`,则summary-base-dir设为`/home/user/code`。启动成功后,根据IP和端口访问可视化界面,默认访问地址为 `http://127.0.0.1:8080`。 -- 在训练列表找到对应集群训练,点击性能分析,即可在页面中查看集群性能数据。 - -## 分布式训练 - -分布式训练请参考[分布式训练教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html)。 - -## 收集集群性能数据 - -多机多卡训练的时候,一次集群训练后,性能数据分布在各个主机上(host节点)。要进行集群性能分析,需要将所有主机上的性能数据收集到一台主机上进行分析。考虑到集群运行环境的复杂以及相关的权限问题、登录问题,比较合理的方式是让用户去收集集群性能数据。 -下面是一次分布式集群训练后,使用脚本收集性能数据的过程,用户可以参照此脚本进行集群性能数据收集。 - -脚本程序说明:脚本程序首先创建了集群作业文件夹,然后利用SSHPass技术进行非交互式的远程拷贝(避免了手动认证,手动输入密码),将集群中各个host节点的数据拷贝到集群作业文件夹中。脚本程序同时生成了host ip地址的映射表以及将多卡环境的组网信息文件拷贝到集群作业文件中。 - -```bash -#!/bin/bash - -echo "==============================================================================================================" -echo "Please run the script as: " -echo "bash collect_cluster_profiler_data.sh" -echo "for example: bash collect_cluster_profiler_data.sh cluster_hccl_config_path cluster_account_config_path cluster_train_id host_train_id device_regex output"s -echo "==============================================================================================================" - -SSH="ssh -o StrictHostKeyChecking=no" -SCP="scp -o StrictHostKeyChecking=no" - -# Get the node list in the cluster. -get_cluster_list() -{ - local cluster_config=$1 - cat ${cluster_config} | python3 -c 'import sys,json;[print(node) for node in json.load(sys.stdin)["cluster"].keys()]' -} - -# Get the account number of node. -get_node_user() -{ - local cluster_config=$1 - local node=$2 - cat ${cluster_config} | python3 -c 'import sys,json;print(json.load(sys.stdin)["cluster"]['\"${node}\"']["user"])' -} - -# Get the password of node. -get_node_passwd() -{ - local cluster_config=$1 - local node=$2 - cat ${cluster_config} | python3 -c 'import sys,json;print(json.load(sys.stdin)["cluster"]['\"${node}\"']["passwd"])' -} - -# Copy data from remote node to local node. -rscp_pass() -{ - local node="$1" - local user="$2" - local passwd="$3" - local src="$4" - local target="$5" - sshpass -p "${passwd}" ${SCP} -r "${user}"@"${node}":"${src}" "${target}" -} - -cluster_hccl_config_path=$1 -cluster_account_config_path=$2s -cluster_train_id=$3 -host_train_id=$4 -device_regex=$5 -output=$6 -host_ip_mapping_file='host_ips_mapping.txt' -host_ip_mapping_id=1 -node_list=$(get_cluster_list ${cluster_account_config_path}) -echo "-----begin----" - -if [ ! -d "${cluster_train_id}" ]; then -mkdir -p ${cluster_train_id} -fi - -# Copy the networking information file of multi card environment to the cluster directory. -cp $cluster_hccl_config_paht $cluster_train_id - - -for node in ${node_list} -do - user=$(get_node_user ${cluster_account_config_path} ${node}) - passwd=$(get_node_passwd ${cluster_account_config_path} ${node}) - echo "------------------${user}@${node}---------------------" - target_dir=${cluster_train_id}/cluster_profiler/${host_ip_mapping_id}/profiler/ - if [ ! -d "${target_dir}" ]; then - mkdir -p ${target_dir} - fi - - # Eight card data - for((i=0;i<8;i++)); - do - src_dir=${host_train_id}/${device_regex}${i}/${output}*/profiler*/*.* - $(rscp_pass ${node} ${user} ${passwd} "${src_dir}" ${target_dir}) - done - - # save the mapping information to the host_ips_mapping.txt. - echo "$node $host_ip_mapping_id">>${cluster_train_id}/$host_ip_mapping_file - - # host_ip_mapping_id ++ - host_ip_mapping_id=$((${host_ip_mapping_id}+1)) -done -``` - -脚本参数说明: - -- `cluster_hccl_config_path` 为多卡环境的组网信息文件路径。内容格式如下: - - ```json - { - "version": "1.0", - "server_count": "1", - "server_list": [ - { - "server_id": "10.xxx.xxx.1", - "device": [ - {"device_id": "0","device_ip": "192.1.27.6","rank_id": "0"}, - {"device_id": "1","device_ip": "192.2.27.6","rank_id": "1"}, - {"device_id": "2","device_ip": "192.3.27.6","rank_id": "2"}, - {"device_id": "3","device_ip": "192.4.27.6","rank_id": "3"}, - {"device_id": "4","device_ip": "192.1.27.7","rank_id": "4"}, - {"device_id": "5","device_ip": "192.2.27.7","rank_id": "5"}, - {"device_id": "6","device_ip": "192.3.27.7","rank_id": "6"}, - {"device_id": "7","device_ip": "192.4.27.7","rank_id": "7"}], - "host_nic_ip": "reserve" - } - ], - "status": "completed" - } - ``` - -- `cluster_account_config_path` 为各主机账号密码配置文件路径,内容格式如下: - - ```json - { - "rank_size": 16, - "cluster": { - "10.xxx.xxx.1": { - "user": "root", - "passwd": "xxx" - }, - "10.xxx.xxx.2": { - "user": "root", - "passwd": "xxx" - } - } - } - ``` - -- `cluster_train_id` 为集群profiler性能数据保存的路径,比如`/home/summary/run1`、`/home/data/run2` 其中`run1`和`run2`分别保存两次集群训练的作业。 -- `host_train_id` 为集群训练时,各个主机节点保存profiler的性能数据的路径。比如:`/home/summary/`。 -- `device_regex` 为各个主机节点中不同卡保存profiler的性能数据的文件夹名称。比如:`/home/summary/device0`和`/home/summary/device1`分别是0号卡和1号卡对应的文件夹,此时device_regex为device。 -- `output` 为训练脚本中用户设置的保存profiler性能文件的路径,默认为`./data`。 - -通过脚本收集到的集群性能文件夹目录结构为: - -```text -|-- run - |-- hccl.json - |-- host_ips_mapping.txt - |-- cluster_profiler - |-- 1 - | |-- profiler - | |-- step_trace_raw_0_detail_time.csv -``` - -> 收集的集群性能作业需要符合该目录结构,否则无法用MindInsight进行可视化展示。必须包含组网信息文件(文件名可以任取)和host_ips_mapping.txt文件(文件名和后缀唯一)。 - -集群性能文件夹结构说明: - -- `hccl.json` 为当前多卡环境的组网信息文件。记录了host_ip、device_id、rank_id之间的对应关系。 -- `host_ips_mapping.txt` 为host_ip映射文件。从安全角度出发,集群中的真实host_ip需要经过映射,避免在查询中,暴露真实的host_ip值,导致安全风险。此处会维护一个host_ip映射表,`host_ips_mapping.txt`中的内容一行代表一组映射,譬如: 10.xxx.xxx.1 1 表示10.xxx.xxx.1的映射值为1。 -- `cluster_profiler` 为集群训练作业的标签,用于判断训练作业是否属于集群训练作业。 -- `1` 为单机多卡profiler数据,保存集群中某一个host_ip节点的profiler性能数据文件,`1`为该host_ip映射后的名称。一个cluster_profiler文件中包含了集群中所有的host节点的性能数据。 - -## 启动MindInsight - -启动命令请参考[MindInsight相关命令](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/mindinsight_commands.html)。 - -### 集群性能分析 - -用户从训练列表中选择指定的训练,点击性能调试,可以查看该次训练的性能数据。集群性能分析包括集群迭代轨迹分析。 - -#### 集群迭代轨迹分析 - -使用集群迭代轨迹分析组件,可以找出集群训练中的慢主机、慢卡。 -集群迭代轨迹分析组件展示所有卡的迭代信息,包括迭代间隙、前反向、迭代拖尾,均支持排序操作。其中迭代间隙反映了数据处理阶段的快慢,通过卡的迭代间隙时间可以反映出对应主机处理数据的快慢。卡的前反向时间反映了卡的计算能力。迭代拖尾反映了all_reduce耗时以及并行情况。 - -![cluster_iterative_trajectory.png](./images/cluster_iterative_trajectory.png) - -图1:集群迭代轨迹 - -图1展示了集群迭代轨迹分析页面,默认展示卡的性能平均值,支持查询特定step下的卡的迭代轨迹信息。通过点击单卡中的详情连接,也可以跳转到单卡的详细性能展示页面,查询详细的单卡性能数据。 - -![single_car_performance_overall.png](./images/single_car_performance_overall.png) - -图2:单卡性能信息 - -图2展示集群中单卡性能信息,单卡性能信息请参考[单卡性能信息](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/performance_profiling_ascend.html)。 - -## 规格 - -- 为了控制性能测试时生成数据的大小,大型网络建议性能调试的step数目限制在10以内。 - - > 控制step数目可以通过控制训练数据集的大小来实现,如`mindspore.dataset.MindDataset`类中的`num_samples`参数可以控制数据集大小,详情参考: - > - > - -## 注意事项 - -- PyNative模式下暂不支持性能调试。 -- 训练加推理过程暂不支持性能调试,目前支持单独训练或推理的性能调试。 \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/advanced_use/performance_profiling_gpu.md b/tutorials/training/source_zh_cn/advanced_use/performance_profiling_gpu.md deleted file mode 100644 index d3bc9137c09f33449ef7ed15ded4f79cfd8c4307..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/performance_profiling_gpu.md +++ /dev/null @@ -1,174 +0,0 @@ -# 性能调试(GPU) - -`Linux` `GPU` `模型调优` `中级` `高级` - - - -- [性能调试(GPU)](#性能调试gpu) - - [概述](#概述) - - [操作流程](#操作流程) - - [准备训练脚本](#准备训练脚本) - - [启动MindInsight](#启动mindinsight) - - [训练性能](#训练性能) - - [算子性能分析](#算子性能分析) - - [Timeline分析](#timeline分析) - - [迭代轨迹分析](#迭代轨迹分析) - - [数据准备性能分析](#数据准备性能分析) - - [资源利用](#资源利用) - - [CPU利用率分析](#cpu利用率分析) - - [注意事项](#注意事项) - - - - - -## 概述 - -本教程介绍如何在GPU上使用MindSpore Profiler进行性能调试。 - -## 操作流程 - -- 准备训练脚本,并在训练脚本中调用性能调试接口,接着运行训练脚本。 -- 启动MindInsight,并通过启动参数指定summary-base-dir目录(summary-base-dir是Profiler所创建目录的父目录),例如训练时Profiler创建的文件夹绝对路径为`/home/user/code/data`,则summary-base-dir设为`/home/user/code`。启动成功后,根据IP和端口访问可视化界面,默认访问地址为 `http://127.0.0.1:8080`。 -- 在训练列表找到对应训练,点击性能分析,即可在页面中查看训练性能数据。 - -> 普通用户在默认情况下无权访问目标设备上的NVIDIA GPU性能计数器。 -> -> 如果普通用户需要在训练脚本中使用profiler性能统计能力,则需参考以下网址的说明进行权限配置。 -> -> - -## 准备训练脚本 - -为了收集神经网络的性能数据,需要在训练脚本中添加MindSpore Profiler相关接口。 - -- `set_context`之后,需要初始化MindSpore `Profiler`对象。 - - > GPU多卡场景需要在`set_auto_parallel_context`之后初始化`Profiler`对象。 - > - > GPU场景下初始化Profiler对象时只有output_path参数有效。 - -- 在训练结束后,调用`Profiler.analyse`停止性能数据收集并生成性能分析结果。 - -样例代码与Ascend使用方式一致,可以参考: - -GPU场景可自定义callback方式收集性能,但数据准备阶段、数据下沉模式不支持该方式收集性能数据。 - -示例如下: - -```python -class StopAtStep(Callback): - def __init__(self, start_step, stop_step): - super(StopAtStep, self).__init__() - self.start_step = start_step - self.stop_step = stop_step - self.already_analysed = False - - def step_begin(self, run_context): - cb_params = run_context.original_args() - step_num = cb_params.cur_step_num - if step_num == self.start_step: - self.profiler = Profiler() - - def step_end(self, run_context): - cb_params = run_context.original_args() - step_num = cb_params.cur_step_num - if step_num == self.stop_step and not self.already_analysed: - self.profiler.analyse() - self.already_analysed = True - - def end(self, run_context): - if not self.already_analysed: - self.profiler.analyse() -``` - -以上代码仅供参考,用户可根据所需场景自由实现。 - -## 启动MindInsight - -启动命令请参考[MindInsight相关命令](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/mindinsight_commands.html)。 - -## 训练性能 - -用户从训练列表中选择指定的训练,点击性能调试,可以查看该次训练的性能数据(目前GPU场景支持算子耗时统计排名、Timeline、迭代轨迹分析以及数据准备性能分析,其他功能敬请期待)。 - -![performance_overall.png](./images/performance_overall.png) - -图1:性能数据总览 - -图1展示了性能数据总览页面,包含了迭代轨迹(Step Trace)、算子性能、数据准备性能和Timeline等组件的数据总体呈现: - -- 算子性能:统计单算子以及各算子类型的执行时间,进行排序展示;总览页中展示了各算子类型平均执行时间占比的饼状图。 -- Timeline:统计了算子以及CUDA activity,在时间轴排列展示;总览页展示了Timeline中执行情况汇总。 -- 迭代轨迹:将训练step划分为几个阶段,统计每个阶段的耗时,按时间线进行展示;总览页展示了迭代轨迹图。 -- 数据准备性能:统计训练数据准备阶段各阶段的性能情况;总览页中展示了各阶段性能可能存在瓶颈的step数目。 - -用户可以点击查看详情链接,进入组件页面进行详细分析。 - -### 算子性能分析 - -使用算子性能分析组件可以对MindSpore运行过程中的各个算子的执行时间进行统计展示(包括GPU算子、CUDA内核、HOSTCPU算子)。 - -![gpu_op_ui_profiler.png](./images/gpu_op_ui_profiler.png) - -图2:算子类别统计分析 - -图2展示了按算子类别进行统计分析的结果,包含以下内容: - -- 可以选择饼图/柱状图展示各算子类别的时间占比,每个算子类别的执行时间会统计属于该类别的算子执行时间总和以及平均执行时间。 -- 统计前20个平均执行时间最长的算子类别。 - -图2下半部分展示了算子性能统计表,包含以下内容: - -- 选择全部:按单个算子的统计结果进行排序展示,展示维度包括算子位置(Device/Host)、算子类型、算子执行时间、算子全名等;默认按算子平均执行时间排序。 -- 选择分类:按算子类别的统计结果进行排序展示,展示维度包括算子分类名称、算子类别执行时间、执行频次、执行总时间的比例、平均执行时间。点击每个算子类别,可以进一步查看该类别下所有单个算子的统计信息。 -- 搜索:在右侧搜索框中输入字符串,支持对算子名称/类别进行模糊搜索。 - -![gpu_activity_profiler.png](./images/gpu_activity_profiler.png) - -图3:内核信息分析 - -图3展示了CUDA activity信息统计,包含以下内容: - -- 统计图表:展示了各个kernel activity的占比以及算子的耗时信息。 -- 内核信息列表:信息列表展示activity的名称、所属算子名称、执行次数、总时间、平均时间等信息。 -- 搜索:可以通过name(activity名称)以及`op_full_name`(所属算子名称)进行部分匹配的搜索。 - -### Timeline分析 - -GPU场景下,Timeline分析的使用方法和Ascend场景相同,不同之处是,GPU Timeline展示的是算子信息(包括GPU算子和CPU算子)和CUDA activity的信息。 - -使用方法可参考: - -### 迭代轨迹分析 - -GPU场景下,迭代轨迹分析的使用方法和Ascend场景相同。(注意:**迭代轨迹暂不支持异构训练场景**) - -使用方法可参考: - -### 数据准备性能分析 - -GPU场景下,数据准备性能分析的使用方法和Ascend场景相同。 - -使用方法可参考: - -## 资源利用 - -资源利用包括CPU利用率分析。 - -![resource_visibility_gpu.png](./images/resource_visibility_gpu.png) - -图4:资源利用总览 - -图4展示了资源利用总览页面,展示了CPU利用率的情况。通过点击右上角的`查看详情`按钮可以查看详细信息。 - -### CPU利用率分析 - -GPU场景下,CPU利用率分析的使用方法和Ascend场景相同。 - -使用方法可参考: - -## 注意事项 - -- PyNative模式下暂不支持性能调试。 -- 训练加推理过程暂不支持性能调试,目前支持单独训练或推理的性能调试。 diff --git a/tutorials/training/source_zh_cn/advanced_use/protect_user_privacy_with_differential_privacy.md b/tutorials/training/source_zh_cn/advanced_use/protect_user_privacy_with_differential_privacy.md deleted file mode 100644 index 3f863e87b4235bf9f79ade6e6dfab95808007676..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/protect_user_privacy_with_differential_privacy.md +++ /dev/null @@ -1,353 +0,0 @@ -# 应用差分隐私机制保护用户隐私 - -`Linux` `Ascend` `模型训练` `模型调优` `企业` `高级` - - - -- [应用差分隐私机制保护用户隐私](#应用差分隐私机制保护用户隐私) - - [概述](#概述) - - [实现阶段](#实现阶段) - - [导入需要的库文件](#导入需要的库文件) - - [参数配置](#参数配置) - - [预处理数据集](#预处理数据集) - - [建立模型](#建立模型) - - [引入差分隐私](#引入差分隐私) - - [引用](#引用) - - - - - -## 概述 - -差分隐私是一种保护用户数据隐私的机制。什么是隐私,隐私指的是单个用户的某些属性,一群用户的某一些属性可以不看做隐私。例如:“抽烟的人有更高的几率会得肺癌”,这个不泄露隐私,但是“张三抽烟,得了肺癌”,这个就泄露了张三的隐私。如果我们知道A医院,今天就诊的100个病人,其中有10个肺癌,并且我们知道了其中99个人的患病信息,就可以推测剩下一个人是否患有肺癌。这种窃取隐私的行为叫做差分攻击。差分隐私是防止差分攻击的方法,通过添加噪声,使得差别只有一条记录的两个数据集,通过模型推理获得相同结果的概率非常接近。也就是说,用了差分隐私后,攻击者知道的100个人的患病信息和99个人的患病信息几乎是一样的,从而无法推测出剩下1个人的患病情况。 - -### 机器学习中的差分隐私 - -机器学习算法一般是用大量数据并更新模型参数,学习数据特征。在理想情况下,这些算法学习到一些泛化性较好的模型,例如“吸烟患者更容易得肺癌”,而不是特定的个体特征,例如“张三是个吸烟者,患有肺癌”。然而,机器学习算法并不会区分通用特征还是个体特征。当我们用机器学习来完成某个重要的任务,例如肺癌诊断,发布的机器学习模型,可能在无意中透露训练集中的个体特征,恶意攻击者可能从发布的模型获得关于张三的隐私信息,因此使用差分隐私技术来保护机器学习模型是十分必要的。 - -**差分隐私定义**[1]为: - -$Pr[\mathcal{K}(D)\in S] \le e^{\epsilon} Pr[\mathcal{K}(D') \in S]+\delta$ - -对于两个差别只有一条记录的数据集$D, D'$,通过随机算法$\mathcal{K}$,输出为结果集合$S$子集的概率满足上面公式,$\epsilon$为差分隐私预算,$\delta$ 为扰动,$\epsilon, \delta$越小,$\mathcal{K}$在$D, D'$上输出的数据分布越接近。 - -### 差分隐私的度量 - -差分隐私可以用$\epsilon, \delta$ 度量。 - -- $\epsilon$:数据集中增加或者减少一条记录,引起的输出概率可以改变的上限。我们通常希望$\epsilon$是一个较小的常数,值越小表示差分隐私条件越严格。 -- $\delta$:用于限制模型行为任意改变的概率,通常设置为一个小的常数,推荐设置小于训练数据集大小的倒数。 - -### MindArmour实现的差分隐私 - -MindArmour的差分隐私模块Differential-Privacy,实现了差分隐私优化器。目前支持基于高斯机制的差分隐私SGD、Momentum、Adam优化器。其中,高斯噪声机制支持固定标准差的非自适应高斯噪声和随着时间或者迭代步数变化而变化的自适应高斯噪声,使用非自适应高斯噪声的优势在于可以严格控制差分隐私预算$\epsilon$,缺点是在模型训练过程中,每个Step添加的噪声量固定,在训练后期,较大的噪声使得模型收敛困难,甚至导致性能大幅下跌,模型可用性差。自适应噪声很好的解决了这个问题,在模型训练初期,添加的噪声量较大,随着模型逐渐收敛,噪声量逐渐减小,噪声对于模型可用性的影响减小。自适应噪声的缺点是不能严格控制差分隐私预算,在同样的初始值下,自适应差分隐私的$\epsilon$比非自适应的大。同时还提供RDP(R’enyi differential privacy)[2]用于监测差分隐私预算。 - -这里以LeNet模型,MNIST 数据集为例,说明如何在MindSpore上使用差分隐私优化器训练神经网络模型。 - -> 本例面向Ascend 910 AI处理器,你可以在这里下载完整的样例代码: - -## 实现阶段 - -### 导入需要的库文件 - -下列是我们需要的公共模块、MindSpore相关模块和差分隐私特性模块。 - -```python -import os -from easydict import EasyDict as edict - -import mindspore.nn as nn -from mindspore import context, load_checkpoint, load_param_into_net -from mindspore.train.callback import ModelCheckpoint -from mindspore.train.callback import CheckpointConfig -from mindspore.train.callback import LossMonitor -from mindspore.nn import Accuracy -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as CV -import mindspore.dataset.transforms.c_transforms as C -from mindspore.dataset.vision import Inter -from mindspore import dtype as mstype - -from mindarmour.privacy.diff_privacy import DPModel -from mindarmour.privacy.diff_privacy import NoiseMechanismsFactory -from mindarmour.privacy.diff_privacy import ClipMechanismsFactory -from mindarmour.privacy.diff_privacy import PrivacyMonitorFactory -from mindarmour.utils.logger import LogUtil - -LOGGER = LogUtil.get_instance() -LOGGER.set_level('INFO') -TAG = 'Lenet5_train' -``` - -### 参数配置 - -1. 设置运行环境、数据集路径、模型训练参数、checkpoint存储参数、差分隐私参数,`data_path`数据路径替换成你的数据集所在路径。更多配置可以参考。 - - ```python - cfg = edict({ - 'num_classes': 10, # the number of classes of model's output - 'lr': 0.01, # the learning rate of model's optimizer - 'momentum': 0.9, # the momentum value of model's optimizer - 'epoch_size': 10, # training epochs - 'batch_size': 256, # batch size for training - 'image_height': 32, # the height of training samples - 'image_width': 32, # the width of training samples - 'save_checkpoint_steps': 234, # the interval steps for saving checkpoint file of the model - 'keep_checkpoint_max': 10, # the maximum number of checkpoint files would be saved - 'device_target': 'Ascend', # device used - 'data_path': '../../common/dataset/MNIST', # the path of training and testing data set - 'dataset_sink_mode': False, # whether deliver all training data to device one time - 'micro_batches': 32, # the number of small batches split from an original batch - 'norm_bound': 1.0, # the clip bound of the gradients of model's training parameters - 'initial_noise_multiplier': 0.05, # the initial multiplication coefficient of the noise added to training - # parameters' gradients - 'noise_mechanisms': 'Gaussian', # the method of adding noise in gradients while training - 'clip_mechanisms': 'Gaussian', # the method of adaptive clipping gradients while training - 'clip_decay_policy': 'Linear', # Decay policy of adaptive clipping, decay_policy must be in ['Linear', 'Geometric']. - 'clip_learning_rate': 0.001, # Learning rate of update norm clip. - 'target_unclipped_quantile': 0.9, # Target quantile of norm clip. - 'fraction_stddev': 0.01, # The stddev of Gaussian normal which used in empirical_fraction. - 'optimizer': 'Momentum' # the base optimizer used for Differential privacy training - }) - ``` - -2. 配置必要的信息,包括环境信息、执行的模式。 - - ```python - context.set_context(mode=context.GRAPH_MODE, device_target=cfg.device_target) - ``` - - 详细的接口配置信息,请参见`context.set_context`接口说明。 - -### 预处理数据集 - -加载数据集并处理成MindSpore数据格式。 - -```python -def generate_mnist_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1, sparse=True): - """ - create dataset for training or testing - """ - # define dataset - ds1 = ds.MnistDataset(data_path) - - # define operation parameters - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - - # define map operations - resize_op = CV.Resize((resize_height, resize_width), - interpolation=Inter.LINEAR) - rescale_op = CV.Rescale(rescale, shift) - hwc2chw_op = CV.HWC2CHW() - type_cast_op = C.TypeCast(mstype.int32) - - # apply map operations on images - if not sparse: - one_hot_enco = C.OneHot(10) - ds1 = ds1.map(operations=one_hot_enco, input_columns="label", - num_parallel_workers=num_parallel_workers) - type_cast_op = C.TypeCast(mstype.float32) - ds1 = ds1.map(operations=type_cast_op, input_columns="label", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=resize_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=rescale_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=hwc2chw_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - ds1 = ds1.shuffle(buffer_size=buffer_size) - ds1 = ds1.batch(batch_size, drop_remainder=True) - ds1 = ds1.repeat(repeat_size) - - return ds1 -``` - -### 建立模型 - -这里以LeNet模型为例,您也可以建立训练自己的模型。 - -```python -from mindspore import nn -from mindspore.common.initializer import TruncatedNormal - - -def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): - weight = weight_variable() - return nn.Conv2d(in_channels, out_channels, - kernel_size=kernel_size, stride=stride, padding=padding, - weight_init=weight, has_bias=False, pad_mode="valid") - - -def fc_with_initialize(input_channels, out_channels): - weight = weight_variable() - bias = weight_variable() - return nn.Dense(input_channels, out_channels, weight, bias) - - -def weight_variable(): - return TruncatedNormal(0.05) - - -class LeNet5(nn.Cell): - """ - LeNet network - """ - def __init__(self): - super(LeNet5, self).__init__() - self.conv1 = conv(1, 6, 5) - self.conv2 = conv(6, 16, 5) - self.fc1 = fc_with_initialize(16*5*5, 120) - self.fc2 = fc_with_initialize(120, 84) - self.fc3 = fc_with_initialize(84, 10) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x -``` - -加载LeNet网络,定义损失函数、配置checkpoint、用上述定义的数据加载函数`generate_mnist_dataset`载入数据。 - -```python -network = LeNet5() -net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") -config_ck = CheckpointConfig(save_checkpoint_steps=cfg.save_checkpoint_steps, - keep_checkpoint_max=cfg.keep_checkpoint_max) -ckpoint_cb = ModelCheckpoint(prefix="checkpoint_lenet", - directory='./trained_ckpt_file/', - config=config_ck) - -# get training dataset -ds_train = generate_mnist_dataset(os.path.join(cfg.data_path, "train"), - cfg.batch_size) -``` - -### 引入差分隐私 - -1. 配置差分隐私优化器的参数。 - - - 判断`micro_batches`和`batch_size`参数是否符合要求,`batch_size`必须要整除`micro_batches`。 - - 实例化差分隐私工厂类。 - - 设置差分隐私的噪声机制,目前mechanisms支持固定标准差的高斯噪声机制:`Gaussian`和自适应调整标准差的高斯噪声机制:`AdaGaussian`。 - - 设置优化器类型,目前支持`SGD`、`Momentum`和`Adam`。 - - 设置差分隐私预算监测器RDP,用于观测每个step中的差分隐私预算$\epsilon$的变化。 - - ```python - if cfg.micro_batches and cfg.batch_size % cfg.micro_batches != 0: - raise ValueError( - "Number of micro_batches should divide evenly batch_size") - # Create a factory class of DP noise mechanisms, this method is adding noise - # in gradients while training. Initial_noise_multiplier is suggested to be - # greater than 1.0, otherwise the privacy budget would be huge, which means - # that the privacy protection effect is weak. Mechanisms can be 'Gaussian' - # or 'AdaGaussian', in which noise would be decayed with 'AdaGaussian' - # mechanism while be constant with 'Gaussian' mechanism. - noise_mech = NoiseMechanismsFactory().create(cfg.noise_mechanisms, - norm_bound=cfg.norm_bound, - initial_noise_multiplier=cfg.initial_noise_multiplier, - decay_policy=None) - # Create a factory class of clip mechanisms, this method is to adaptive clip - # gradients while training, decay_policy support 'Linear' and 'Geometric', - # learning_rate is the learning rate to update clip_norm, - # target_unclipped_quantile is the target quantile of norm clip, - # fraction_stddev is the stddev of Gaussian normal which used in - # empirical_fraction, the formula is - # $empirical_fraction + N(0, fraction_stddev)$. - clip_mech = ClipMechanismsFactory().create(cfg.clip_mechanisms, - decay_policy=cfg.clip_decay_policy, - learning_rate=cfg.clip_learning_rate, - target_unclipped_quantile=cfg.target_unclipped_quantile, - fraction_stddev=cfg.fraction_stddev) - net_opt = nn.Momentum(params=network.trainable_params(), - learning_rate=cfg.lr, momentum=cfg.momentum) - # Create a monitor for DP training. The function of the monitor is to - # compute and print the privacy budget(eps and delta) while training. - rdp_monitor = PrivacyMonitorFactory.create('rdp', - num_samples=60000, - batch_size=cfg.batch_size, - initial_noise_multiplier=cfg.initial_noise_multiplier, - per_print_times=234, - noise_decay_mode=None) - ``` - -2. 将LeNet模型包装成差分隐私模型,只需要将网络传入`DPModel`即可。 - - ```python - # Create the DP model for training. - model = DPModel(micro_batches=cfg.micro_batches, - norm_bound=cfg.norm_bound, - noise_mech=noise_mech, - clip_mech=clip_mech, - network=network, - loss_fn=net_loss, - optimizer=net_opt, - metrics={"Accuracy": Accuracy()}) - ``` - -3. 模型训练与测试。 - - ```python - LOGGER.info(TAG, "============== Starting Training ==============") - model.train(cfg['epoch_size'], ds_train, - callbacks=[ckpoint_cb, LossMonitor(), rdp_monitor], - dataset_sink_mode=cfg.dataset_sink_mode) - - LOGGER.info(TAG, "============== Starting Testing ==============") - ckpt_file_name = 'trained_ckpt_file/checkpoint_lenet-10_234.ckpt' - param_dict = load_checkpoint(ckpt_file_name) - load_param_into_net(network, param_dict) - ds_eval = generate_mnist_dataset(os.path.join(cfg.data_path, 'test'), - batch_size=cfg.batch_size) - acc = model.eval(ds_eval, dataset_sink_mode=False) - LOGGER.info(TAG, "============== Accuracy: %s ==============", acc) - ``` - -4. 运行命令。 - - 运行脚本,可在命令行输入命令: - - ```bash - python lenet_dp.py - ``` - - 其中`lenet5_dp.py`替换成你的脚本的名字。 - -5. 结果展示。 - - 不加差分隐私的LeNet模型精度稳定在99%,加了Gaussian噪声,自适应Clip的差分隐私LeNet模型收敛,精度稳定在95%左右。 - - ```text - ============== Starting Training ============== - ... - ============== Starting Testing ============== - ... - ============== Accuracy: 0.9698 ============== - ``` - -### 引用 - -[1] C. Dwork and J. Lei. Differential privacy and robust statistics. In STOC, pages 371–380. ACM, 2009. - -[2] Ilya Mironov. Rényi differential privacy. In IEEE Computer Security Foundations Symposium, 2017. - -[3] Abadi, M. e. a., 2016. *Deep learning with differential privacy.* s.l.:Proceedings of the 2016 ACM SIGSAC Conference on Computer and Communications Security. diff --git a/tutorials/training/source_zh_cn/advanced_use/protect_user_privacy_with_suppress_privacy.md b/tutorials/training/source_zh_cn/advanced_use/protect_user_privacy_with_suppress_privacy.md deleted file mode 100644 index e339ffa5875750d420a548dcbf7a3bbf04cd8641..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/protect_user_privacy_with_suppress_privacy.md +++ /dev/null @@ -1,489 +0,0 @@ -# 应用抑制隐私机制保护用户隐私 - -`Linux` `Ascend` `模型训练` `模型调优` `企业` `高级` - - - -- [应用抑制隐私机制保护用户隐私](#应用抑制隐私机制保护用户隐私) - - [概述](#概述) - - [实现阶段](#实现阶段) - - [导入需要的库文件](#导入需要的库文件) - - [参数配置](#参数配置) - - [预处理数据集](#预处理数据集) - - [建立模型](#建立模型) - - [引入抑制隐私](#引入抑制隐私) - - [隐私保护效果测试](#隐私保护效果测试) - - [引用](#引用) - - - - - -## 概述 - -抑制隐私是一种保护用户数据隐私的机制。抑制隐私是在AI训练过程中保护用户隐私的一种方法。通过去除模型中不重要的参数,使得其参数量得以大幅度的减少,减少了模型可能泄露的输入样本信息,从而大大降低通过模型逆向攻击获得原始样本的可能性。实验表明抑制隐私技术相对于差分隐私能够在某些模型的训练精度和隐私保护程度之间取得更好的平衡。 - -### MindArmour实现的抑制隐私 - -MindArmour的抑制隐私模块Suppress-Privacy,实现了抑制隐私优化器。在模型训练过程中,不重要的参数会按照一定的比例逐渐地被设置为0,最终只保留5-10%的参数。 - -这里以LeNet模型,MNIST 数据集为例,说明如何在MindSpore上使用抑制隐私优化器训练神经网络模型。 - -> 本例面向Ascend 910 AI处理器,你可以在这里下载完整的样例代码: - -## 实现阶段 - -### 导入需要的库文件 - -下列是我们需要的公共模块、MindSpore相关模块和抑制隐私特性模块。 - -```python -import os -from easydict import EasyDict as edict -import mindspore.nn as nn -from mindspore import context -from mindspore.train.callback import ModelCheckpoint -from mindspore.train.callback import CheckpointConfig -from mindspore.train.callback import LossMonitor -from mindspore.nn.metrics import Accuracy -from mindspore import load_checkpoint, load_param_into_net -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as CV -import mindspore.dataset.transforms.c_transforms as C -from mindspore.dataset.vision.utils import Inter -from mindspore import dtype as mstype - -from examples.common.networks.lenet5.lenet5_net import LeNet5 - -from mindarmour.privacy.sup_privacy import SuppressModel -from mindarmour.privacy.sup_privacy import SuppressMasker -from mindarmour.privacy.sup_privacy import SuppressPrivacyFactory -from mindarmour.privacy.sup_privacy import MaskLayerDes - -from mindarmour.utils.logger import LogUtil - -LOGGER = LogUtil.get_instance() -LOGGER.set_level('INFO') -TAG = 'Lenet5_Suppress_train' -``` - -### 参数配置 - -1. 设置运行环境、模型训练参数、checkpoint存储参数,batch_size参数建议不要超过64。更多配置可以参考。 - - ```python - cfg = edict({ - 'num_classes': 10, # the number of classes of model's output - 'batch_size': 32, # batch size for training - 'image_height': 32, # the height of training samples - 'image_width': 32, # the width of training samples - 'keep_checkpoint_max': 10, # the maximum number of checkpoint files would be saved - 'device_target': 'Ascend', # device used - }) - ``` - -2. 配置必要的信息,包括环境信息、执行的模式。目前支持Ascend上的PyNative模式。 - - ```python - context.set_context(mode=context.PYNATIVE_MODE, device_target=cfg.device_target) - ``` - - 详细的接口配置信息,请参见`context.set_context`接口说明。 - -### 预处理数据集 - -加载数据集并处理成MindSpore数据格式。 - -```python -def generate_mnist_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1, sparse=True): - """ - create dataset for training or testing - """ - # define dataset - ds1 = ds.MnistDataset(data_path) - - # define operation parameters - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - - # define map operations - resize_op = CV.Resize((resize_height, resize_width), - interpolation=Inter.LINEAR) - rescale_op = CV.Rescale(rescale, shift) - hwc2chw_op = CV.HWC2CHW() - type_cast_op = C.TypeCast(mstype.int32) - - # apply map operations on images - if not sparse: - one_hot_enco = C.OneHot(10) - ds1 = ds1.map(operations=one_hot_enco, input_columns="label", - num_parallel_workers=num_parallel_workers) - type_cast_op = C.TypeCast(mstype.float32) - ds1 = ds1.map(operations=type_cast_op, input_columns="label", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=resize_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=rescale_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - ds1 = ds1.map(operations=hwc2chw_op, input_columns="image", - num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - ds1 = ds1.shuffle(buffer_size=buffer_size) - ds1 = ds1.batch(batch_size, drop_remainder=True) - ds1 = ds1.repeat(repeat_size) - - return ds1 -``` - -### 建立模型 - -这里以LeNet模型为例,您也可以建立训练自己的模型。 - -加载LeNet网络,配置checkpoint、设置优化器类型,用上述定义的数据加载函数`generate_mnist_dataset`载入数据。 - -```python -networks_l5 = LeNet5() -config_ck = CheckpointConfig(save_checkpoint_steps=10, - keep_checkpoint_max=cfg.keep_checkpoint_max) -ckpoint_cb = ModelCheckpoint(prefix="checkpoint_lenet", - directory='./trained_ckpt_file/', - config=config_ck) - -# get training dataset -ds_train = generate_mnist_dataset('MNIST_unzip/train', cfg.batch_size) -``` - -### 引入抑制隐私训练 - -1. 配置抑制隐私优化器的参数 - - - 定义AI模型的哪些层参与suppress操作。 - - 实例化抑制隐私工厂类。 - - 定义损失函数。 - - 设置优化器类型。 - - 如果样本数为60000,推荐的参数设置为end_epoch:10,start_epoch:3,mask_times:1000,lr:0.10,sparse_end:0.95,sparse_start:0.0。 - 这样相邻两次suppress操作的间隔大致在10~20个batch。 - - ```python - # layer_name (str): Layer name, get the name of one layer as following: - # for layer in networks.get_parameters(expand=True): - # if layer.name == "conv": ... - # grad_idx (int): Grad layer index, get mask layer's index in grad tuple. - # is_add_noise (bool): If True, the weight of this layer can add noise. - # If False, the weight of this layer can not add noise. - # is_lower_clip (bool): If true, the weights of this layer would be clipped to greater than an lower bound value. - # If False, the weights of this layer won't be clipped. - # min_num (int): The number of weights left that not be suppressed, which need to be greater than 0. - # upper_bound (float): max value of weight in this layer, default value is 1.20 . - masklayers_lenet5 = [] # determine which layer should be masked - masklayers_lenet5.append(MaskLayerDes("conv1.weight", 0, True, True, 10)) - masklayers_lenet5.append(MaskLayerDes("conv2.weight", 1, True, True, 50)) - masklayers_lenet5.append(MaskLayerDes("fc1.weight", 2, True, False, -1)) - masklayers_lenet5.append(MaskLayerDes("fc2.weight", 4, True, False, -1)) - masklayers_lenet5.append(MaskLayerDes("fc3.weight", 6, True, False, 50)) - - # networks (Cell): The training network. - # mask_layers (list): Description of the training network layers that need to be suppressed. - # policy (str): Training policy for suppress privacy training. "local_train" means local training. - # end_epoch (int): The last epoch in suppress operations, 0 < start_epoch <= end_epoch <= 100 . - # batch_num (int): The num of batch in an epoch, should be equal to num_samples/batch_size . - # start_epoch (int): The first epoch in suppress operations, 0 < start_epoch <= end_epoch <= 100 . - # mask_times (int): The num of suppress operations. - # lr (Union[float, int]): Learning rate, 0 < lr <= 0.5 . - # sparse_end (float): The sparsity to reach, 0.0 <= sparse_start < sparse_end < 1.0 . - # sparse_start (float): The sparsity to start, 0.0 <= sparse_start < sparse_end < 1.0 . - suppress_ctrl_instance = SuppressPrivacyFactory().create(networks_l5, - masklayers_lenet5, - policy="local_train", - end_epoch=10, - batch_num=1875, - start_epoch=3, - mask_times=1000, - lr=0.05, - sparse_end=0.95, - sparse_start=0.0) - net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - net_opt = nn.SGD(networks_l5.trainable_params(), 0.05) - ``` - -2. 将LeNet模型包装成抑制隐私模型 - - - 实例化抑制隐私模型类SuppressModel,用于执行模型训练过程。 - - 实例化抑制隐私监测器SuppressMasker,用于在模型训练中选择合适时机对模型参数进行suppress(置零)操作。 - - ```python - # Create the suppress model for training. - model_instance = SuppressModel(network=networks_l5, - loss_fn=net_loss, - optimizer=net_opt, - metrics={"Accuracy": Accuracy()}) - model_instance.link_suppress_ctrl(suppress_ctrl_instance) - suppress_masker = SuppressMasker(model=model_instance, suppress_ctrl=suppress_ctrl_instance) - ``` - -3. 模型训练与测试 - - ```python - LOGGER.info(TAG, "============== Starting SUPP Training ==============") - model_instance.train(10, ds_train, callbacks=[ckpoint_cb, LossMonitor(), suppress_masker], - dataset_sink_mode=False) - - LOGGER.info(TAG, "============== Starting SUPP Testing ==============") - ds_eval = generate_mnist_dataset('MNIST_unzip/test', batch_size=cfg.batch_size) - acc = model_instance.eval(ds_eval, dataset_sink_mode=False) - LOGGER.info(TAG, "============== SUPP Accuracy: %s ==============", acc) - ``` - -4. 运行命令 - - 运行脚本,可在命令行输入命令: - - ```bash - python examples/privacy/sup_privacy/sup_privacy.py - ``` - - 其中`sup_privacy.py`替换成你的脚本的名字。 - -5. 结果展示 - - 不加抑制隐私的LeNet模型精度稳定在99%,使用抑制隐私LeNet模型收敛,精度稳定在97.5%左右。 - - ```text - ============== Starting SUPP Training ============== - ... - ============== Starting SUPP Testing ============== - ... - ============== SUPP Accuracy: 0.9745 ============== - ``` - -### 隐私保护效果测试 - -为了评估抑制隐私训练对数据集的保护效果,我们使用图像逆向攻击进行测试, -这种逆向攻击可以根据原始图片在神经网络某一层的输出来反向还原出原始图片,主要原因是网络在训练的过程中“记住”了训练集的特征, -这种攻击方法的原理可以参考,完整的代码实现可以参考 -,下面介绍详细的测试步骤: - -1. 准备工作 - - 为了和抑制隐私训练进行对比,我们需要先使用常规训练得到模型的CheckPoint文件。模型训练可以参考 - [mindarmour/examples/common/networks/lenet5](https://gitee.com/mindspore/mindarmour/blob/master/examples/common/networks/lenet5/mnist_train.py) , - 它的目录结构如下: - - ```text - ├── __init__.py - ├── lenet5_net.py - └── mnist_train.py - ``` - - 其中`lenet5_net.py`为LeNet5的模型定义,`mnist_train.py`为LeNet5的常规训练脚本。在该目录下运行如下命令,即可生成包含模型CheckPoint文件的`trained_ckpt_file`文件夹。 - - ```python - python mnist_train.py - ``` - - 此外,由于下面的步骤7中需要用到新训练的模型进行攻击效果的评估,我们在生成`trained_ckpt_file`目录后,将`mnist_train.py`文件中的变量`ckpoint_cb`的生成命令改成: - - ```python - ckpoint_cb = ModelCheckpoint(prefix="checkpoint_lenet", - directory="./new_trained_ckpt_file/", - config=config_ck) - ``` - - 其中`prefix`代表生成的CheckPoint文件名的前缀,`directory`代表CheckPoint文件的存放路径,再运行`mnist_train.py`, - 就可以得到`new_trained_ckpt_file`文件夹及包含其中的模型文件。此时`examples/common/networks/lenet5`的目录结构应该如下所示: - - ```text - ├── __init__.py - ├── lenet5_net.py - ├── mnist_train.py - ├── new_trained_ckpt_file - │   ├── checkpoint_lenet-10_1875.ckpt - │   ├── checkpoint_lenet-1_1875.ckpt - │   ├── checkpoint_lenet-2_1875.ckpt - │   ├── checkpoint_lenet-3_1875.ckpt - │   ├── checkpoint_lenet-4_1875.ckpt - │   ├── checkpoint_lenet-5_1875.ckpt - │   ├── checkpoint_lenet-6_1875.ckpt - │   ├── checkpoint_lenet-7_1875.ckpt - │   ├── checkpoint_lenet-8_1875.ckpt - │   ├── checkpoint_lenet-9_1875.ckpt - │   └── checkpoint_lenet-graph.meta - └── trained_ckpt_file - ├── checkpoint_lenet-10_1875.ckpt - ├── checkpoint_lenet-1_1875.ckpt - ├── checkpoint_lenet-2_1875.ckpt - ├── checkpoint_lenet-3_1875.ckpt - ├── checkpoint_lenet-4_1875.ckpt - ├── checkpoint_lenet-5_1875.ckpt - ├── checkpoint_lenet-6_1875.ckpt - ├── checkpoint_lenet-7_1875.ckpt - ├── checkpoint_lenet-8_1875.ckpt - ├── checkpoint_lenet-9_1875.ckpt - └── checkpoint_lenet-graph.meta - ``` - -2. 导入需要的模块 - - ```python - import numpy as np - import matplotlib.pyplot as plt - from scipy.special import softmax - from mindspore import load_checkpoint, load_param_into_net - from mindspore import Tensor, context - from mindspore import nn - from mindarmour.privacy.evaluation import ImageInversionAttack - from mindarmour.utils.logger import LogUtil - from examples.common.networks.lenet5.lenet5_net import LeNet5, conv, fc_with_initialize - from examples.common.dataset.data_processing import generate_mnist_dataset - LOGGER = LogUtil.get_instance() - LOGGER.set_level('INFO') - TAG = 'InversionAttack' - ``` - -3. 构建逆向测试网络 - - 为了更好地演示,我们取LeNet5的前面两个卷积层conv1、conv2和第一个全连接层fc1作为测试网络,于是攻击任务就是:根据某一图片从fc1输出的feature map来还原出该图片。 - - ```python - class LeNet5_part(nn.Cell): - """ - Part of LeNet5 network. - """ - def __init__(self): - super(LeNet5_part, self).__init__() - self.conv1 = conv(1, 6, 5) - self.conv2 = conv(6, 16, 5) - self.fc1 = fc_with_initialize(16*5*5, 120) - self.fc2 = fc_with_initialize(120, 84) - self.fc3 = fc_with_initialize(84, 10) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - return x - ``` - -4. 将训练好的CheckPoint文件导入模型 - - ```python - Checkpoint_path = '../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' - load_dict = load_checkpoint(Checkpoint_path) - net = LeNet5_part() - load_param_into_net(net, load_dict) - ``` - -5. 获取测试样本 - - 我们取30张图片进行测试,保存好它们本身以及它们经过`LeNet5_part`的输出(即`target_features`)。 - - ```python - # get original data - data_list = "../../common/dataset/MNIST/train" - batch_size = 32 - ds = generate_mnist_dataset(data_list, batch_size) - i = 0 - batch_num = 1 - sample_num = 30 - for data in ds.create_tuple_iterator(output_numpy=True): - i += 1 - images = data[0].astype(np.float32) - true_labels = data[1][: sample_num] - target_features = net(Tensor(images)).asnumpy()[:sample_num] - original_images = images[: sample_num] - if i >= batch_num: - break - ``` - -6. 进行逆向攻击 - - ```python - inversion_attack = ImageInversionAttack(net, input_shape=(1, 32, 32), input_bound=(0, 1), loss_weights=[1, 0.1, 5]) - inversion_images = inversion_attack.generate(target_features, iters=100) - ``` - -7. 攻击结果评估和展示 - - 我们用matplotlib画出原始图像以及用逆向攻击还原出来的图像,并且调用`inversion_attack`的`evaluate`方法进行定量评估, - `evaluate`方法会返回`avg_l2_dis`,`avg_ssim`和`avg_confi`,分别表示原图与逆向还原的图像之间的平均L2 - 范数距离和平均结构相似性,以及逆向还原出来的图片在一个新模型上的推理结果(在其真实标签上的平均置信度)。 - 一般来说,`avg_l2_dis`越小、`avg_ssim`越大,则代表inversion_images与original_images越接近;而新的神经网络模型可以替代人眼对图片的可识别度做一个定量的评估(即`avg_confi`越高,说明inversion_image包含的语义信息与原图更为接近)。 - - ```python - plot_num = min(sample_num, 10) - for n in range(1, plot_num+1): - plt.subplot(2, plot_num, n) - if n == 1: - plt.title('Original images', fontsize=12, loc='left') - plt.gray() - plt.imshow(images[n - 1].reshape(32, 32)) - plt.subplot(2, plot_num, n + plot_num) - if n == 1: - plt.title('Inverted images based on ordinary trained model', fontsize=12, loc='left') - plt.gray() - plt.imshow(inversion_images[n - 1].reshape(32, 32)) - plt.show() - - net2 = LeNet5() - new_ckpt_path = '../../common/networks/lenet5/new_trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' - new_load_dict = load_checkpoint(new_ckpt_path) - load_param_into_net(net2, new_load_dict) - pred_labels = np.argmax(net2(Tensor(inversion_images).astype(np.float32)).asnumpy(), axis=1) - - avg_l2_dis, avg_ssim, avg_confi = inversion_attack.evaluate(original_images, inversion_images, true_labels, net2) - LOGGER.info(TAG, 'The average L2 distance between original images and inverted images is: {}'.format(avg_l2_dis)) - LOGGER.info(TAG, 'The average ssim value between original images and inverted images is: {}'.format(avg_ssim)) - LOGGER.info(TAG, 'The average prediction confidence on true labels of inverted images is: {}'.format(avg_confi)) - LOGGER.info(TAG, 'True labels of original images are: %s' % true_labels) - LOGGER.info(TAG, 'Predicted labels of inverted images are: %s' % pred_labels) - ``` - -8. 实验结果 - - ```text - The average L2 distance between original images and inverted images is: 0.8294931122450715 - The average ssim value between original images and inverted images is: 0.2429179625584347 - The average prediction confidence on true labels of inverted images is: 0.9547292590141296 - True labels of original images are: [5 7 1 0 4 3 1 5 5 9 5 0 9 9 7 5 4 2 1 7 4 0 0 6 2 6 0 6 6 6] - Predicted labels of inverted images are: [5 7 1 0 4 3 1 5 5 9 5 0 9 9 7 5 4 2 1 7 4 0 0 6 2 6 0 6 6 6] - ``` - - ![fuzz_seed](./images/inversion_ordinary.png) - - 我们可以从inversion_images看出original_images的大概轮廓了,说明常规训练的模型很可能会导致训练集的隐私泄露。 - **为了验证抑制隐私训练得到的模型可以更好地保护训练数据的信息**,我们将上述步骤4中的CheckPoint文件换成抑制隐私训练得到的CheckPoint文件,并执行上述步骤2至步骤7的过程,可以得到如下结果: - - ```text - The average L2 distance between original images and inverted images is: 0.862553358599391 - The average ssim value between original images and inverted images is: 0.2644709319921787 - The average prediction confidence on true labels of inverted images is: 0.5576204061508179 - True labels of original images are: [9 2 2 0 1 2 9 8 5 0 7 3 4 8 9 0 6 6 7 2 0 6 7 5 8 8 1 6 7 9] - Predicted labels of inverted images are: [8 2 2 0 1 2 7 8 5 0 7 3 4 8 9 7 6 6 7 2 0 6 7 5 8 8 1 5 7 9] - ``` - - ![fuzz_seed](./images/inversion_sup.png) - - 首先,从可视化上结果来看,基于抑制隐私训练得到的模型进行逆向攻击,效果很差;但这种情形下得到的avg_l2_dis和avg_ssim和上一种情形很接近, - 这主要是由于avg_l2_dis和avg_ssim只能根据图片像素的均值和标准差比较图片之间的低阶信息,而avg_confi可以比较图片之间的高阶语义信息。 - - 本实验使用的样本是MNIST数据集,这类图片较为简单,黑色背景占了图片的大部分区域,而包含主要信息的白色部分占据的区域较少。但可以看到, - 基于抑制隐私模型得到的avg_confi明显低于上一组实验,这说明逆向构造出来的图片已经比较难被新模型识别出来了,这个结果和我们人眼观察的结果是一致的。 - -### 引用 - -[1] Ligeng Zhu, Zhijian Liu, and Song Han. [Deep Leakage from Gradients](http://arxiv.org/pdf/1906.08935.pdf). NeurIPS, 2019. - -[2] Aravindh Mahendran, Andrea Vedaldi. [Understanding Deep Image Representations by Inverting Them](https://arxiv.org/pdf/1412.0035.pdf). CVPR, 2015. \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/advanced_use/qnn_for_nlp.ipynb b/tutorials/training/source_zh_cn/advanced_use/qnn_for_nlp.ipynb deleted file mode 100644 index 65fd4cd5575d5a38108b8b867031cd256d5fac62..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/qnn_for_nlp.ipynb +++ /dev/null @@ -1,847 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 量子神经网络在自然语言处理中的应用\n", - "\n", - "\n", - "`Linux` `CPU` `全流程` `初级` `中级` `高级`\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/advanced_use/qnn_for_nlp.ipynb)\n", - "\n", - "## 概述\n", - "\n", - "在自然语言处理过程中,词嵌入(Word embedding)是其中的重要步骤,它是一个将高维度空间的词向量嵌入到一个维数更低的连续向量空间的过程。当给予神经网络的语料信息不断增加时,网络的训练过程将越来越困难。利用量子力学的态叠加和纠缠等特性,我们可以利用量子神经网络来处理这些经典语料信息,加入其训练过程,并提高收敛精度。下面,我们将简单地搭建一个量子经典混合神经网络来完成一个词嵌入任务。\n", - "\n", - "\n", - "## 环境准备\n", - "\n", - "导入本教程所依赖模块\n" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import time\n", - "from projectq.ops import QubitOperator\n", - "import mindspore.ops as ops\n", - "import mindspore.dataset as ds\n", - "from mindspore import nn\n", - "from mindspore.train.callback import LossMonitor\n", - "from mindspore import Model\n", - "from mindquantum.nn import MindQuantumLayer\n", - "from mindquantum import Hamiltonian, Circuit, RX, RY, X, H, UN" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "本教程实现的是一个[CBOW模型](https://blog.csdn.net/u010665216/article/details/78724856),即利用某个词所处的环境来预测该词。例如对于“I love natural language processing”这句话,我们可以将其切分为5个词,\\[\"I\", \"love\", \"natural\", \"language\", \"processing”\\],在所选窗口为2时,我们要处理的问题是利用\\[\"I\", \"love\", \"language\", \"processing\"\\]来预测出目标词汇\"natural\"。这里我们以窗口为2为例,搭建如下的量子神经网络,来完成词嵌入任务。\n", - "\n", - "![quantum word embedding](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/qcbow.png)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "这里,编码线路会将\"I\"、\"love\"、\"language\"和\"processing\"的编码信息编码到量子线路中,待训练的量子线路由四个Ansatz线路构成,最后我们在量子线路末端对量子比特做$\\text{Z}$基矢上的测量,具体所需测量的比特的个数由所需嵌入空间的维数确定。\n", - "\n", - "## 数据预处理\n", - "\n", - "我们对所需要处理的语句进行处理,生成关于该句子的词典,并根据窗口大小来生成样本点。\n" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "def GenerateWordDictAndSample(corpus, window=2):\n", - " all_words = corpus.split()\n", - " word_set = list(set(all_words))\n", - " word_set.sort()\n", - " word_dict = {w: i for i,w in enumerate(word_set)}\n", - " sampling = []\n", - " for index, word in enumerate(all_words[window:-window]):\n", - " around = []\n", - " for i in range(index, index + 2*window + 1):\n", - " if i != index + window:\n", - " around.append(all_words[i])\n", - " sampling.append([around,all_words[index + window]])\n", - " return word_dict, sampling" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'I': 0, 'language': 1, 'love': 2, 'natural': 3, 'processing': 4}\n", - "word dict size: 5\n", - "samples: [[['I', 'love', 'language', 'processing'], 'natural']]\n", - "number of samples: 1\n" - ] - } - ], - "source": [ - "word_dict, sample = GenerateWordDictAndSample(\"I love natural language processing\")\n", - "print(word_dict)\n", - "print('word dict size: ', len(word_dict))\n", - "print('samples: ', sample)\n", - "print('number of samples: ', len(sample))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "根据如上信息,我们得到该句子的词典大小为5,能够产生一个样本点。\n", - "\n", - "## 编码线路\n", - "\n", - "为了简单起见,我们使用的编码线路由$\\text{RX}$旋转门构成,结构如下。\n", - "\n", - "![encoder circuit](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/encoder.png)\n", - "\n", - "我们对每个量子门都作用一个$\\text{RX}$旋转门。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "def GenerateEncoderCircuit(n_qubits, prefix=''):\n", - " if len(prefix) != 0 and prefix[-1] != '_':\n", - " prefix += '_'\n", - " circ = Circuit()\n", - " for i in range(n_qubits):\n", - " circ += RX(prefix + str(i)).on(i)\n", - " return circ" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "RX(e_0|0)\n", - "RX(e_1|1)\n", - "RX(e_2|2)" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "GenerateEncoderCircuit(3,prefix='e')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "我们通常用$\\left|0\\right>$和$\\left|1\\right>$来标记二能级量子比特的两个状态,由态叠加原理,量子比特还可以处于这两个状态的叠加态:\n", - "\n", - "$$\\left|\\psi\\right>=\\alpha\\left|0\\right>+\\beta\\left|1\\right>$$\n", - "\n", - "对于$n$比特的量子态,其将处于$2^n$维的希尔伯特空间中。对于上面由5个词构成的词典,我们只需要$\\lceil \\log_2 5 \\rceil=3$个量子比特即可完成编码,这也体现出量子计算的优越性。\n", - "\n", - "例如对于上面词典中的\"love\",其对应的标签为2,2的二进制表示为`010`,我们只需将编码线路中的`e_0`、`e_1`和`e_2`分别设为$0$、$\\pi$和$0$即可。下面我们通过`Evolution`算子来验证以下。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Label is: 2\n", - "Binary label is: 010\n", - "Parameters of encoder is: \n", - " [0. 3.14159 0. ]\n", - "Encoder circuit is: \n", - " RX(e_0|0)\n", - "RX(e_1|1)\n", - "RX(e_2|2)\n", - "Encoder parameter names are: \n", - " ['e_0', 'e_1', 'e_2']\n", - "Amplitude of quantum state is: \n", - " [0. 0. 1. 0. 0. 0. 0. 0.]\n", - "Label in quantum state is: 2\n" - ] - } - ], - "source": [ - "from mindquantum.nn import generate_evolution_operator\n", - "from mindspore import context\n", - "from mindspore import Tensor\n", - "\n", - "n_qubits = 3 # number of qubits of this quantum circuit\n", - "label = 2 # label need to encode\n", - "label_bin = bin(label)[-1:1:-1].ljust(n_qubits,'0') # binary form of label\n", - "label_array = np.array([int(i)*np.pi for i in label_bin]).astype(np.float32) # parameter value of encoder\n", - "encoder = GenerateEncoderCircuit(n_qubits, prefix='e') # encoder circuit\n", - "encoder_para_names = encoder.parameter_resolver().para_name # parameter names of encoder\n", - "\n", - "print(\"Label is: \", label)\n", - "print(\"Binary label is: \", label_bin)\n", - "print(\"Parameters of encoder is: \\n\", np.round(label_array, 5))\n", - "print(\"Encoder circuit is: \\n\", encoder)\n", - "print(\"Encoder parameter names are: \\n\", encoder_para_names)\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"CPU\")\n", - "# quantum state evolution operator\n", - "evol = generate_evolution_operator(param_names=encoder_para_names, circuit=encoder)\n", - "state = evol(Tensor(label_array))\n", - "state = state.asnumpy()\n", - "quantum_state = state[:, 0] + 1j * state[:, 1]\n", - "amp = np.round(np.abs(quantum_state)**2, 3)\n", - "\n", - "print(\"Amplitude of quantum state is: \\n\", amp)\n", - "print(\"Label in quantum state is: \", np.argmax(amp))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "通过上面的验证,我们发现,对于标签为2的数据,最后得到量子态的振幅最大的位置也是2,因此得到的量子态正是对输入标签的编码。我们将对数据编码生成参数数值的过程总结成如下函数。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "def GenerateTrainData(sample, word_dict):\n", - " n_qubits = np.int(np.ceil(np.log2(1 + max(word_dict.values()))))\n", - " data_x = []\n", - " data_y = []\n", - " for around, center in sample:\n", - " data_x.append([])\n", - " for word in around:\n", - " label = word_dict[word]\n", - " label_bin = bin(label)[-1:1:-1].ljust(n_qubits,'0')\n", - " label_array = [int(i)*np.pi for i in label_bin]\n", - " data_x[-1].extend(label_array)\n", - " data_y.append(word_dict[center])\n", - " return np.array(data_x).astype(np.float32), np.array(data_y).astype(np.int32)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(array([[0. , 0. , 0. , 0. , 3.1415927, 0. ,\n", - " 3.1415927, 0. , 0. , 0. , 0. , 3.1415927]],\n", - " dtype=float32),\n", - " array([3], dtype=int32))" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "GenerateTrainData(sample, word_dict)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "根据上面的结果,我们将4个输入的词编码的信息合并为一个更长向量,便于后续神经网络调用。\n", - "\n", - "## Ansatz线路\n", - "\n", - "Ansatz线路的选择多种多样,我们选择如下的量子线路作为Ansatz线路,它的一个单元由一层$\\text{RY}$门和一层$\\text{CNOT}$门构成,对此单元重复$p$次构成整个Ansatz线路。\n", - "\n", - "![ansatz circuit](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/ansatz.png)\n", - "\n", - "定义如下函数生成Ansatz线路。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "def GenerateAnsatzCircuit(n_qubits, layers, prefix=''):\n", - " if len(prefix) != 0 and prefix[-1] != '_':\n", - " prefix += '_'\n", - " circ = Circuit()\n", - " for l in range(layers):\n", - " for i in range(n_qubits):\n", - " circ += RY(prefix + str(l) + '_' + str(i)).on(i)\n", - " for i in range(l % 2, n_qubits, 2):\n", - " if i < n_qubits and i + 1 < n_qubits:\n", - " circ += X.on(i + 1, i)\n", - " return circ" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "RY(a_0_0|0)\n", - "RY(a_0_1|1)\n", - "RY(a_0_2|2)\n", - "RY(a_0_3|3)\n", - "RY(a_0_4|4)\n", - "X(1 <-: 0)\n", - "X(3 <-: 2)\n", - "RY(a_1_0|0)\n", - "RY(a_1_1|1)\n", - "RY(a_1_2|2)\n", - "RY(a_1_3|3)\n", - "RY(a_1_4|4)\n", - "X(2 <-: 1)\n", - "X(4 <-: 3)" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "GenerateAnsatzCircuit(5, 2, 'a')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 测量\n", - "\n", - "我们把对不同比特位上的测量结果作为降维后的数据。具体过程与比特编码类似,例如当我们想将词向量降维为5维向量时,对于第3维的数据可以如下产生:\n", - "\n", - "- 3对应的二进制为`00011`。\n", - "- 测量量子线路末态对$Z_0Z_1$哈密顿量的期望值。\n", - "\n", - "下面函数将给出产生各个维度上数据所需的哈密顿量(hams),其中`n_qubits`表示线路的比特数,`dims`表示词嵌入的维度:" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "def GenerateEmbeddingHamiltonian(dims, n_qubits):\n", - " hams = []\n", - " for i in range(dims):\n", - " s = ''\n", - " for j, k in enumerate(bin(i + 1)[-1:1:-1]):\n", - " if k == '1':\n", - " s = s + 'Z' + str(j) + ' '\n", - " hams.append(Hamiltonian(QubitOperator(s)))\n", - " return hams" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[1.0 Z0, 1.0 Z1, 1.0 Z0 Z1, 1.0 Z2, 1.0 Z0 Z2]" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "GenerateEmbeddingHamiltonian(5, 5)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 量子版词向量嵌入层\n", - "\n", - "量子版词向量嵌入层结合前面的编码量子线路和待训练量子线路,以及测量哈密顿量,将`num_embedding`个词嵌入为`embedding_dim`维的词向量。这里我们还在量子线路的最开始加上了Hadamard门,将初态制备为均匀叠加态,用以提高量子神经网络的表达能力。\n", - "\n", - "下面,我们定义量子嵌入层,它将返回一个量子线路模拟算子。" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [], - "source": [ - "def QEmbedding(num_embedding, embedding_dim, window, layers, n_threads):\n", - " n_qubits = int(np.ceil(np.log2(num_embedding)))\n", - " hams = GenerateEmbeddingHamiltonian(embedding_dim, n_qubits)\n", - " circ = Circuit()\n", - " circ = UN(H, n_qubits)\n", - " encoder_param_name = []\n", - " ansatz_param_name = []\n", - " for w in range(2 * window):\n", - " encoder = GenerateEncoderCircuit(n_qubits, 'Encoder_' + str(w))\n", - " ansatz = GenerateAnsatzCircuit(n_qubits, layers, 'Ansatz_' + str(w))\n", - " encoder.no_grad()\n", - " circ += encoder\n", - " circ += ansatz\n", - " encoder_param_name.extend(list(encoder.parameter_resolver()))\n", - " ansatz_param_name.extend(list(ansatz.parameter_resolver()))\n", - " net = MindQuantumLayer(encoder_param_name,\n", - " ansatz_param_name,\n", - " circ,\n", - " hams,\n", - " n_threads=n_threads)\n", - " return net" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "整个训练模型跟经典网络类似,由一个嵌入层和两个全连通层构成,然而此处的嵌入层是由量子神经网络构成。下面定义量子神经网络CBOW。" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [], - "source": [ - "class CBOW(nn.Cell):\n", - " def __init__(self, num_embedding, embedding_dim, window, layers, n_threads,\n", - " hidden_dim):\n", - " super(CBOW, self).__init__()\n", - " self.embedding = QEmbedding(num_embedding, embedding_dim, window,\n", - " layers, n_threads)\n", - " self.dense1 = nn.Dense(embedding_dim, hidden_dim)\n", - " self.dense2 = nn.Dense(hidden_dim, num_embedding)\n", - " self.relu = ops.ReLU()\n", - "\n", - " def construct(self, x):\n", - " embed = self.embedding(x)\n", - " out = self.dense1(embed)\n", - " out = self.relu(out)\n", - " out = self.dense2(out)\n", - " return out" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下面我们对一个稍长的句子来进行训练。首先定义`LossMonitorWithCollection`用于监督收敛过程,并搜集收敛过程的损失。" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [], - "source": [ - "class LossMonitorWithCollection(LossMonitor):\n", - " def __init__(self, per_print_times=1):\n", - " super(LossMonitorWithCollection, self).__init__(per_print_times)\n", - " self.loss = []\n", - " \n", - " def begin(self, run_context):\n", - " self.begin_time = time.time()\n", - " \n", - " def end(self, run_context):\n", - " self.end_time = time.time()\n", - " print('Total time used: {}'.format(self.end_time - self.begin_time))\n", - " \n", - " def epoch_begin(self, run_context):\n", - " self.epoch_begin_time = time.time()\n", - " \n", - " def epoch_end(self, run_context):\n", - " cb_params = run_context.original_args()\n", - " self.epoch_end_time = time.time()\n", - " if self._per_print_times != 0 and cb_params.cur_step_num % self._per_print_times == 0:\n", - " print('')\n", - " \n", - " def step_end(self, run_context):\n", - " cb_params = run_context.original_args()\n", - " loss = cb_params.net_outputs\n", - "\n", - " if isinstance(loss, (tuple, list)):\n", - " if isinstance(loss[0], Tensor) and isinstance(loss[0].asnumpy(), np.ndarray):\n", - " loss = loss[0]\n", - "\n", - " if isinstance(loss, Tensor) and isinstance(loss.asnumpy(), np.ndarray):\n", - " loss = np.mean(loss.asnumpy())\n", - "\n", - " cur_step_in_epoch = (cb_params.cur_step_num - 1) % cb_params.batch_num + 1\n", - "\n", - " if isinstance(loss, float) and (np.isnan(loss) or np.isinf(loss)):\n", - " raise ValueError(\"epoch: {} step: {}. Invalid loss, terminating training.\".format(\n", - " cb_params.cur_epoch_num, cur_step_in_epoch))\n", - " self.loss.append(loss)\n", - " if self._per_print_times != 0 and cb_params.cur_step_num % self._per_print_times == 0:\n", - " print(\"\\repoch: %+3s step: %+3s time: %5.5s, loss is %5.5s\" % (cb_params.cur_epoch_num, cur_step_in_epoch, time.time() - self.epoch_begin_time, loss), flush=True, end='')\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "接下来,利用量子版本的`CBOW`来对一个长句进行词嵌入。运行之前请在终端运行`export OMP_NUM_THREADS=4`,将量子模拟器的线程数设置为4个,当所需模拟的量子系统比特数较多时,可设置更多的线程数来提高模拟效率。" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": { - "scrolled": true, - "tags": [] - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 25 step: 20 time: 0.592, loss is 3.154\n", - "epoch: 50 step: 20 time: 0.614, loss is 2.944\n", - "epoch: 75 step: 20 time: 0.572, loss is 0.224\n", - "epoch: 100 step: 20 time: 0.562, loss is 0.015\n", - "epoch: 125 step: 20 time: 0.545, loss is 0.009\n", - "epoch: 150 step: 20 time: 0.599, loss is 0.003\n", - "epoch: 175 step: 20 time: 0.586, loss is 0.002\n", - "epoch: 200 step: 20 time: 0.552, loss is 0.045\n", - "epoch: 225 step: 20 time: 0.590, loss is 0.001\n", - "epoch: 250 step: 20 time: 0.643, loss is 0.001\n", - "epoch: 275 step: 20 time: 0.562, loss is 0.001\n", - "epoch: 300 step: 20 time: 0.584, loss is 0.001\n", - "epoch: 325 step: 20 time: 0.566, loss is 0.000\n", - "epoch: 350 step: 20 time: 0.578, loss is 0.000\n", - "Total time used: 206.29734826087952\n" - ] - } - ], - "source": [ - "import mindspore as ms\n", - "from mindspore import context\n", - "from mindspore import Tensor\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"CPU\")\n", - "corpus = \"\"\"We are about to study the idea of a computational process.\n", - "Computational processes are abstract beings that inhabit computers.\n", - "As they evolve, processes manipulate other abstract things called data.\n", - "The evolution of a process is directed by a pattern of rules\n", - "called a program. People create programs to direct processes. In effect,\n", - "we conjure the spirits of the computer with our spells.\"\"\"\n", - "\n", - "ms.set_seed(42)\n", - "window_size = 2\n", - "embedding_dim = 10\n", - "hidden_dim = 128\n", - "word_dict, sample = GenerateWordDictAndSample(corpus, window=window_size)\n", - "train_x,train_y = GenerateTrainData(sample, word_dict)\n", - "\n", - "train_loader = ds.NumpySlicesDataset({\n", - " \"around\": train_x,\n", - " \"center\": train_y\n", - "},shuffle=False).batch(3)\n", - "net = CBOW(len(word_dict), embedding_dim, window_size, 3, 4, hidden_dim)\n", - "net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')\n", - "net_opt = nn.Momentum(net.trainable_params(), 0.01, 0.9)\n", - "loss_monitor = LossMonitorWithCollection(500)\n", - "model = Model(net, net_loss, net_opt)\n", - "model.train(350, train_loader, callbacks=[loss_monitor], dataset_sink_mode=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "打印收敛过程中的损失函数值:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "plt.plot(loss_monitor.loss,'.')\n", - "plt.xlabel('Steps')\n", - "plt.ylabel('Loss')\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "得到收敛图为\n", - "\n", - "![nlp loss](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/nlp_loss.png)\n", - "\n", - "通过如下方法打印量子嵌入层的量子线路中的参数:" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "data": { - "text/plain": [ - "array([ 1.52044818e-01, 1.71521559e-01, 2.35021308e-01, -3.95286232e-01,\n", - " -3.71680595e-03, 7.96886325e-01, -4.04954888e-02, 1.55393332e-01,\n", - " 4.11805660e-02, 7.79824018e-01, 2.96543002e-01, -2.21819162e-01,\n", - " -4.67430688e-02, 4.66759771e-01, 2.75283188e-01, 1.35858059e-01,\n", - " -3.23841363e-01, -2.31937021e-01, -4.68942285e-01, -1.96520030e-01,\n", - " 2.16065589e-02, 1.23866223e-01, -9.68078300e-02, 1.69127151e-01,\n", - " -8.90062153e-01, 2.56734312e-01, 8.37369189e-02, -1.15734830e-01,\n", - " -1.34410933e-01, -3.12207133e-01, -8.90189946e-01, 1.97006428e+00,\n", - " -2.49193460e-02, 2.25960299e-01, -3.90179232e-02, -3.03875893e-01,\n", - " 2.02030335e-02, -7.07065910e-02, -4.81521547e-01, 5.04257262e-01,\n", - " -1.32081115e+00, 2.83502758e-01, 2.80248702e-01, 1.63375765e-01,\n", - " -6.91465080e-01, 6.82975233e-01, -2.67829001e-01, 2.29658693e-01,\n", - " 2.78859794e-01, -1.04206935e-01, -5.57148576e-01, 4.41706657e-01,\n", - " -6.76973104e-01, 2.47751385e-01, -2.96468334e-03, -1.66827604e-01,\n", - " -3.47717047e-01, -9.04396921e-03, -7.69433856e-01, 4.33617719e-02,\n", - " -2.09145937e-02, -1.55236557e-01, -2.16777384e-01, -2.26556376e-01,\n", - " -6.16374731e-01, 2.05871137e-03, -3.08128931e-02, -1.63372140e-02,\n", - " 1.46710426e-01, 2.31793106e-01, 4.16066934e-04, -9.28813033e-03],\n", - " dtype=float32)" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "net.embedding.weight.asnumpy()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 经典版词向量嵌入层\n", - "\n", - "这里我们利用经典的词向量嵌入层来搭建一个经典的CBOW神经网络,并与量子版本进行对比。\n", - "\n", - "首先,搭建经典的CBOW神经网络,其中的参数跟量子版本的类似。" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [], - "source": [ - "class CBOWClassical(nn.Cell):\n", - " def __init__(self, num_embedding, embedding_dim, window, hidden_dim):\n", - " super(CBOWClassical, self).__init__()\n", - " self.dim = 2 * window * embedding_dim\n", - " self.embedding = nn.Embedding(num_embedding, embedding_dim, True)\n", - " self.dense1 = nn.Dense(self.dim, hidden_dim)\n", - " self.dense2 = nn.Dense(hidden_dim, num_embedding)\n", - " self.relu = ops.ReLU()\n", - " self.reshape = ops.Reshape()\n", - "\n", - " def construct(self, x):\n", - " embed = self.embedding(x)\n", - " embed = self.reshape(embed, (-1, self.dim))\n", - " out = self.dense1(embed)\n", - " out = self.relu(out)\n", - " out = self.dense2(out)\n", - " return out" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "生成适用于经典CBOW神经网络的数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "train_x shape: (58, 4)\n", - "train_y shape: (58,)\n" - ] - } - ], - "source": [ - "train_x = []\n", - "train_y = []\n", - "for i in sample:\n", - " around, center = i\n", - " train_y.append(word_dict[center])\n", - " train_x.append([])\n", - " for j in around:\n", - " train_x[-1].append(word_dict[j])\n", - "train_x = np.array(train_x).astype(np.int32)\n", - "train_y = np.array(train_y).astype(np.int32)\n", - "print(\"train_x shape: \", train_x.shape)\n", - "print(\"train_y shape: \", train_y.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "我们对经典CBOW网络进行训练。" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 25 step: 20 time: 0.008, loss is 3.155\n", - "epoch: 50 step: 20 time: 0.026, loss is 3.027\n", - "epoch: 75 step: 20 time: 0.010, loss is 3.010\n", - "epoch: 100 step: 20 time: 0.009, loss is 2.955\n", - "epoch: 125 step: 20 time: 0.008, loss is 0.630\n", - "epoch: 150 step: 20 time: 0.008, loss is 0.059\n", - "epoch: 175 step: 20 time: 0.009, loss is 0.008\n", - "epoch: 200 step: 20 time: 0.008, loss is 0.003\n", - "epoch: 225 step: 20 time: 0.017, loss is 0.001\n", - "epoch: 250 step: 20 time: 0.008, loss is 0.001\n", - "epoch: 275 step: 20 time: 0.016, loss is 0.000\n", - "epoch: 300 step: 20 time: 0.008, loss is 0.000\n", - "epoch: 325 step: 20 time: 0.016, loss is 0.000\n", - "epoch: 350 step: 20 time: 0.008, loss is 0.000\n", - "Total time used: 5.06074857711792\n" - ] - } - ], - "source": [ - "train_loader = ds.NumpySlicesDataset({\n", - " \"around\": train_x,\n", - " \"center\": train_y\n", - "},shuffle=False).batch(3)\n", - "net = CBOWClassical(len(word_dict), embedding_dim, window_size, hidden_dim)\n", - "net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')\n", - "net_opt = nn.Momentum(net.trainable_params(), 0.01, 0.9)\n", - "loss_monitor = LossMonitorWithCollection(500)\n", - "model = Model(net, net_loss, net_opt)\n", - "model.train(350, train_loader, callbacks=[loss_monitor], dataset_sink_mode=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "打印收敛过程中的损失函数值:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "plt.plot(loss_monitor.loss,'.')\n", - "plt.xlabel('Steps')\n", - "plt.ylabel('Loss')\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "得到收敛图为\n", - "\n", - "![classical nlp loss](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/advanced_use/images/classical_nlp_loss.png)\n", - "\n", - "由上可知,通过量子模拟得到的量子版词嵌入模型也能很好的完成嵌入任务。当数据集大到经典计算机算力难以承受时,量子计算机将能够轻松处理这类问题。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 参考文献\n", - "\n", - "[1] Tomas Mikolov, Kai Chen, Greg Corrado, Jeffrey Dean. [Efficient Estimation of Word Representations in\n", - "Vector Space](https://arxiv.org/pdf/1301.3781.pdf)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.5" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/advanced_use/quantum_neural_network.rst b/tutorials/training/source_zh_cn/advanced_use/quantum_neural_network.rst deleted file mode 100644 index e3bf46236bf7bb49539fde74032fc9d42aa431f0..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/quantum_neural_network.rst +++ /dev/null @@ -1,8 +0,0 @@ -量子神经网络 -=============== - -.. toctree:: - :maxdepth: 1 - - parameterized_quantum_circuit - qnn_for_nlp \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/advanced_use/save_load_model_hybrid_parallel.md b/tutorials/training/source_zh_cn/advanced_use/save_load_model_hybrid_parallel.md deleted file mode 100644 index b3dfa3e6b62dde5c44e46a74f6ea5e1f564418fe..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/save_load_model_hybrid_parallel.md +++ /dev/null @@ -1,560 +0,0 @@ -# 保存和加载模型(HyBrid Parallel模式) - -`Linux` `Ascend` `GPU` `模型训练` `中级` `高级` - - - -- [保存和加载模型(HyBrid Parallel模式)](#保存和加载模型hybrid-parallel模式) - - [概述](#概述) - - [背景](#背景) - - [使用场景](#使用场景) - - [对保存的CheckPoint文件做合并处理](#对保存的checkpoint文件做合并处理) - - [整体流程](#整体流程) - - [准备工作](#准备工作) - - [按逻辑顺序导入CheckPoint文件](#按逻辑顺序导入checkpoint文件) - - [获取模型参数切分策略](#获取模型参数切分策略) - - [对模型并行的参数做合并处理](#对模型并行的参数做合并处理) - - [保存数据生成新的CheckPoint文件](#保存数据生成新的checkpoint文件) - - [加载合并保存的CheckPoint文件](#加载合并保存的checkpoint文件) - - [整体流程](#整体流程-1) - - [步骤1:加载CheckPoint文件](#步骤1加载checkpoint文件) - - [步骤2:对模型并行参数做切分处理](#步骤2对模型并行参数做切分处理) - - [步骤3:将修改后的参数数据加载到网络中](#步骤3将修改后的参数数据加载到网络中) - - [示例](#示例) - - [示例场景说明](#示例场景说明) - - [示例代码](#示例代码) - - - - - -## 概述 - -### 背景 - -MindSpore模型并行场景下,每个实例进程只保存有本节点对应的参数数据。对于模型并行的Cell,其在每个节点上的参数数据,都是完整参数数据的一个切片。比如完整参数数据shape为[8, 8],每个节点上的参数数据为其中的一部分,如shape[2, 8]。 - -对于自动切分的模型并行场景(Auto Parallel),切分逻辑由MindSpore自动生成,MindSpore的CheckPoint模块可以支持自动合并保存和基于合并保存的加载能力。 - -对于用户手动设置的并行场景(HyBrid Parallel),切分逻辑由用户自己实现,MindSpore在每个节点上保存相同的模型参数切分策略文件和本节点上的数据,用户需要自己实现CheckPoint文件的合并保存与加载功能。本教程用于指导用户在手动切分场景下,实现CheckPoint的合并保存与加载能力。 - -### 使用场景 - -如果你遇到如下两个场景,需要参考本教程操作,完成CheckPoint的合并保存与加载: - -场景1:多卡训练,单卡推理。 - - 以在64卡上训练,并在单卡上推理为例,整体操作流程如下: - -1. 执行训练,自动生成CheckPoint文件和模型参数切分策略文件。 - -2. 用户对保存的CheckPoint文件做合并处理。 - - 根据具体的切分逻辑,对于存在切分的具体模型参数做合并处理,生成新CheckPoint文件。 - -3. 在单卡环境加载新的CheckPoint文件,之后再根据需要调用export接口导出用于推理的模型。 - -若CheckPoint的保存环境和加载环境集群的卡数相同,比如在同一训练环境保存加载CheckPoint,或者单卡训练单卡推理,则可以不需要做合并保存和加载。 - -场景2:训练分为多阶段,每个阶段的集群大小不一样。 - -​ 以训练阶段一是64卡训练环境,阶段二是56卡训练环境为例,整体操作流程如下: - -1. 执行阶段一训练,自动生成CheckPoint文件和模型参数切分策略文件。 - -2. 用户对保存的CheckPoint文件做合并处理。 - - 根据具体的切分逻辑,对于存在切分的具体模型参数做合并处理,生成新CheckPoint文件。 - -3. 在阶段二集群上加载合并保存的CheckPoint文件。 - - 在加载过程中,用户需要根据新的训练环境配置,重新切分CheckPoint文件中的参数数据。 - -4. 执行阶段二训练。 - -## 对保存的CheckPoint文件做合并处理 - -### 整体流程 - -首先,执行准备工作,按逻辑顺序将待合并处理的CheckPoint文件导入网络,获取模型全量参数并添加至列表中,再获取模型参数切分策略。对应下图中的Step1和Step2。 - -其次,更新参数列表,对涉及模型并行的参数做合并处理。对应下图中的Step3。 - -最后,将更新之后的参数列表,通过MindSpore提供的API保存到文件,生成新的CheckPoint文件。对应下图中的Step4。 - -![img](./images/checkpoint_integration_process.jpg) - -### 准备工作 - -#### 按逻辑顺序导入CheckPoint文件 - -定义网络,调用`load_checkpoint`、`load_param_into_net`接口,按逻辑顺序将CheckPoint文件导入网络,之后调用`parameters_and_names`接口获取网络里所有的参数数据。 - -```python -net = Net() -opt = Momentum(learning_rate=0.01, momentum=0.9, params=net.get_parameters()) -net = TrainOneStepCell(net, opt) -param_dicts = [] -for i in range(rank_size): - file_name = os.path.join("./node"+str(i), "CKP_1-4_32.ckpt") # checkpoint file name of current node - param_dict = load_checkpoint(file_name) - load_param_into_net(net, param_dict) - param_dict = {} - for _, param in net.parameters_and_names(): - param_dict[param.name] = param - param_dicts.append(param_dict) -``` - -其中, - -- `rank_size`:之前分布式训练的节点数。 -- `load_checkpoint`:通过该接口加载CheckPoint模型参数文件,返回一个参数字典。 -- `load_param_into_net`:模型参数数据加载到网络中。 - -#### 获取模型参数切分策略 - -调用`build_searched_strategy`接口,得到模型各个参数的切分策略。 - -```python -strategy = build_searched_strategy("./strategy_train.cpkt") -``` - -其中, - -- `strategy_train.ckpt`:保存的模型参数切分策略文件名称,训练网络之前由用户调用`set_auto_parallel_context`接口自定义`strategy_ckpt_save_file`参数生成。 - -### 对模型并行的参数做合并处理 - -下面以一个具体的模型参数为例,说明下参数合并处理的具体流程。 - -参数名称为"weight",切分逻辑为4卡场景。 - -1. 针对涉及模型并行的参数,获取所有节点上的参数数据。 - - ```python - sliced_parameters = [] - for i in range(4): - parameter = param_dicts[i].get("weight") - sliced_parameters.append(parameter) - ``` - - > 如果要保证参数更新速度不变,需要对优化器中保存的参数,如"moments.weight",同样做合并处理。 - -2. 调用`merge_sliced_parameter`接口进行参数合并。 - - ```python - merged_parameter = merge_sliced_parameter(sliced_parameters, strategy) - ``` - -> 如果存在多个模型并行的参数,则需要重复步骤1到步骤2循环逐个处理。 - -### 保存数据生成新的CheckPoint文件 - -1. 将`param_dict`转换为list类型数据。 - - ```python - param_list = [] - for (key, value) in param_dict.items(): - each_param = {} - each_param["name"] = key - if isinstance(value.data, Tensor): - param_data = value.data - else: - param_data = Tensor(value.data) - each_param["data"] = param_data - param_list.append(each_param) - ``` - -2. 调用`save_checkpoint`接口,将参数数据写入文件,生成新的CheckPoint文件。 - - ```python - save_checkpoint(param_list, "./CKP-Integrated_1-4_32.ckpt") - ``` - - 其中, - - `save_checkpoint`: 通过该接口将网络模型参数信息存入文件。 - - `CKP-Integrated_1-4_32.ckpt`: 新生成的CheckPoint模型参数文件名称。 - -## 加载合并保存的CheckPoint文件 - -### 整体流程 - -如果需要将合并保存的CheckPoint加载到多卡训练或推理中,在模型参数真正加载到网络前,需要对于涉及并行的参数数据按照新的逻辑切分。 -如下步骤在训练前脚本里实现,步骤1和3同单机CheckPoint加载的逻辑,步骤2为新增,用于涉及并行的模型参数的切分。 -对于加载到单卡训练/推理的场景,不涉及数据切分,则步骤2可省略。 - -### 步骤1:加载CheckPoint文件 - -调用`load_checkpoint`接口,从CheckPoint文件中加载模型参数数据。 - -```python -param_dict = load_checkpoint("./CKP-Integrated_1-4_32.ckpt") -``` - -- `load_checkpoint`:通过该接口加载CheckPoint模型参数文件,返回一个参数字典。 -- `CKP-Integrated_1-4_32.ckpt`:需要加载的CheckPoint模型参数文件名称。 - -### 步骤2:对模型并行参数做切分处理 - -下面以一个具体的模型参数为例,参数名称为"weight", 数据值为Tensor [[1, 2, 3, 4], [5, 6, 7, 8]],切分逻辑为2卡场景,按[2, 1]切分。 -切分后数据分布情况如下: - -| Device0 | Device1 | -| ------------------- | -------------------- | -| Value [1, 2, 3, 4] | Value [5, 6, 7, 8] | - -1. 对模型参数数据做切分。 - - 如下代码示例,在维度0上,将数据切分为两个切片。 - - ```python - new_param = parameter_dict["weight"] - slice_list = np.split(new_param.data.asnumpy(), 2, axis=0) - new_param_moments = parameter_dict["moments.weight"] - slice_moments_list = np.split(new_param_moments.data.asnumpy(), 2, axis=0) - ``` - - 切分后的数据情况: - - ```text - slice_list[0] --- [1, 2, 3, 4] 对应device0 - slice_list[1] --- [5, 6, 7, 8] 对应device1 - ``` - - 与`slice_list`类似,`slice_moments_list` 也被切分为两个shape为[1, 4]的Tensor。 - -2. 在每个节点分别加载对应的数据切片。 - - 获取本节点的rank_id,根据rank_id加载数据。 - - ```python - rank = get_rank() - tensor_slice = Tensor(slice_list[rank]) - tensor_slice_moments = Tensor(slice_moments_list[rank]) - ``` - - - `get_rank`:获取当前设备在集群中的ID。 - -3. 修改模型参数数据值。 - - ```python - new_param.set_data(tensor_slice, True) - new_param_moments.set_data(tensor_slice_moments, True) - ``` - - - `set_data`:设置模型参数的值,接口参数类型为Tensor 或number。 - -### 步骤3:将修改后的参数数据加载到网络中 - -调用`load_param_into_net`接口,将模型参数数据加载到网络中。 - -```python -net = Net() -opt = Momentum(learning_rate=0.01, momentum=0.9, params=parallel_net.get_parameters()) -load_param_into_net(net, param_dict) -load_param_into_net(opt, param_dict) -``` - -## 示例 - -### 示例场景说明 - -整体场景:训练分为两个阶段,两阶段的集群规模不一致,模拟FC层MatMul算子并行。 - -用户流程: - -1. 执行阶段1训练:阶段1为4卡训练环境,每卡上MatMul算子weight的shape为[2, 8],训练过程中自动导出CheckPoint。 - -2. 执行脚本对CheckPoint文件做合并处理,根据具体的切分逻辑,对于存在切分的具体模型参数做合并处理,生成合并的CheckPoint文件。 - -3. 执行阶段2训练:阶段2为2卡训练环境,每卡上MatMul算子weight的shape为[4, 8],从合并后的CheckPoint文件加载初始化模型参数数据,之后执行训练。 - -> 具体分布式环境配置和训练部分代码,此处不做详细说明,可以参考[分布式并行训练](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html) -章节。 -> -> 本文档附上对CheckPoint文件做合并处理以及分布式训练前加载CheckPoint文件的示例代码,仅作为参考,实际请参考具体情况实现。 - -### 示例代码 - -1. 执行脚本对CheckPoint文件做合并处理。 - - 脚本执行命令: - - ```bash - python ./integrate_checkpoint.py "待合并的CheckPoint文件名称" "合并生成的CheckPoint文件路径&名称" "策略文件路径&名称" "节点数" - ``` - - integrate_checkpoint.py: - - ```python - import numpy as np - import os - import mindspore.nn as nn - from mindspore import Tensor, Parameter - import mindspore.ops as ops - from mindspore import save_checkpoint, load_checkpoint, build_searched_strategy, merge_sliced_parameter - - class Net(nn.Cell): - def __init__(self,weight_init): - super(Net, self).__init__() - self.weight = Parameter(Tensor(weight_init), layerwise_parallel=True) - self.fc = ops.MatMul(transpose_b=True) - - def construct(self, x): - x = self.fc(x, self.weight) - return x - - def integrate_ckpt_file(old_ckpt_file, new_ckpt_file, strategy_file, rank_size): - weight = np.ones([2, 8]).astype(np.float32) - net = Net(weight) - opt = Momentum(learning_rate=0.01, momentum=0.9, params=net.get_parameters()) - net = TrainOneStepCell(net, opt) - - # load CheckPoint into net in rank id order - param_dicts = [] - for i in range(rank_size): - file_name = os.path.join("./node"+str(i), old_ckpt_file) - param_dict = load_checkpoint(file_name) - load_param_into_net(net, param_dict) - param_dict = {} - for _, param in net.parameters_and_names(): - param_dict[param.name] = param - param_dicts.append(param_dict) - - strategy = build_searched_strategy(strategy_file) - param_dict = {} - - for paramname in ["weight", "moments.weight"]: - # get layer wise model parallel parameter - sliced_parameters = [] - for i in range(rank_size): - parameter = param_dicts[i].get(paramname) - sliced_parameters.append(parameter) - - # merge the parallel parameters of the model - merged_parameter = merge_sliced_parameter(sliced_parameters, strategy) - param_dict[paramname] = merged_parameter - - # convert param_dict to list type data - param_list = [] - for (key, value) in param_dict.items(): - each_param = {} - each_param["name"] = key - if isinstance(value.data, Tensor): - param_data = value.data - else: - param_data = Tensor(value.data) - each_param["data"] = param_data - param_list.append(each_param) - - # call the API to generate a new CheckPoint file - save_checkpoint(param_list, new_ckpt_file) - - return - - if __name__ == "__main__": - try: - old_ckpt_file = sys.argv[1] - new_ckpt_file = sys.argv[2] - strategy_file = sys.argv[3] - rank_size = int(sys.argv[4]) - integrate_ckpt_file(old_ckpt_file, new_ckpt_file, strategy_file, rank_size) - except: - print("Fail to integrate checkpoint file") - sys.exit(-1) - ``` - - 执行结果: - - 脚本执行前,CheckPoint文件中参数值: - - ```text - device0: - name is weight - value is - [[0.87537426 1.0448935 0.86736983 0.8836905 0.77354026 0.69588304 0.9183654 0.7792076] - [0.87224025 0.8726848 0.771446 0.81967723 0.88974726 0.7988162 0.72919345 0.7677011]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.2567724 -0.07485991 0.282002 0.2456022 0.454939 0.619168 0.18964815 0.45714882] - [0.25946522 0.24344791 0.45677605 0.3611395 0.23378398 0.41439137 0.5312468 0.4696194]] - - device1: - name is weight - value is - [[0.9210751 0.9050457 0.9827775 0.920396 0.9240526 0.9750359 1.0275179 1.0819869] - [0.73605865 0.84631145 0.9746683 0.9386582 0.82902765 0.83565056 0.9702136 1.0514659]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.2417504 0.28193963 0.06713893 0.21510397 0.23380603 0.11424308 0.0218009 -0.11969765] - [0.45955992 0.22664294 0.01990281 0.0731914 0.27125207 0.27298513 -0.01716102 -0.15327111]] - - device2: - name is weight - value is - [[1.0108461 0.8689414 0.91719437 0.8805056 0.7994629 0.8999671 0.7585804 1.0287056 ] - [0.90653455 0.60146594 0.7206475 0.8306303 0.8364681 0.89625114 0.7354735 0.8447268]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.03440702 0.41419312 0.24817684 0.30765256 0.48516113 0.24904746 0.57791173 0.00955463] - [0.13458519 0.6690533 0.49259356 0.28319967 0.25951773 0.16777472 0.45696738 0.24933104]] - - device3: - name is weight - value is - [[0.7147005 0.9168278 0.80178416 0.6258351 0.8413766 0.5909515 0.696347 0.71359116] - [0.20506378 0.03691584 0.2454556 0.12978578 0.19065076 0.23904312 0.27509746 0.34614682]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.14152306 0.5040985 0.24455397 0.10907605 0.11319532 0.19538902 0.01208619 0.40430856] - [-0.7773164 -0.47611716 -0.6041424 -0.6144473 -0.2651842 -0.31909415 -0.4510405 -0.12860501]] - ``` - - 脚本执行后,CheckPoint文件中参数值: - - ```text - name is weight - value is - [[1.1138763 1.0962057 1.3516843 1.0812817 1.1579804 1.1078343 1.0906502 1.3207073] - [0.916671 1.0781671 1.0368758 0.9680898 1.1735439 1.0628364 0.9960786 1.0135143] - [0.8828271 0.7963984 0.90675324 0.9830291 0.89010954 0.897052 0.7890109 0.89784735] - [1.0011744 1.0840297 1.0201758 1.0882459 0.94232416 1.0775206 1.0195118 1.0528734] - [1.0053468 0.98402303 0.99762845 0.97587246 1.0259694 1.0055295 0.99420834 0.9496847] - [1.0851002 1.0295962 1.0999886 1.0958165 0.9765328 1.146529 1.0970603 1.1388365] - [0.7147005 0.9168278 0.80178416 0.6258351 0.8413766 0.5909515 0.696347 0.71359116] - [0.20506378 0.03691584 0.2454556 0.12978578 0.19065076 0.23904312 0.27509746 0.34614682]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.2567724 -0.07485991 0.282002 0.2456022 0.454939 0.619168 0.18964815 0.45714882] - [0.25946522 0.24344791 0.45677605 0.3611395 0.23378398 0.41439137 0.5312468 0.4696194 ] - [0.2417504 0.28193963 0.06713893 0.21510397 0.23380603 0.11424308 0.0218009 -0.11969765] - [0.45955992 0.22664294 0.01990281 0.0731914 0.27125207 0.27298513 -0.01716102 -0.15327111] - [0.03440702 0.41419312 0.24817684 0.30765256 0.48516113 0.24904746 0.57791173 0.00955463] - [0.13458519 0.6690533 0.49259356 0.28319967 0.25951773 0.16777472 0.45696738 0.24933104] - [0.14152306 0.5040985 0.24455397 0.10907605 0.11319532 0.19538902 0.01208619 0.40430856] - [-0.7773164 -0.47611716 -0.6041424 -0.6144473 -0.2651842 -0.31909415 -0.4510405 - -0.12860501]] - ``` - -2. 执行阶段2训练,训练前加载CheckPoint文件。其中训练代码部分,需要根据实际情况补充。 - - ```python - import numpy as np - import os - import mindspore.nn as nn - from mindspore import context - from mindspore.communication.management import init - from mindspore import Tensor, Parameter - import mindspore.ops as ops - from mindspore import load_checkpoint, load_param_into_net - - from mindspore.communication.management import init - devid = int(os.getenv('DEVICE_ID')) - context.set_context(mode=context.GRAPH_MODE,device_target='Ascend',save_graphs=True, device_id=devid) - init() - - class Net(nn.Cell): - def __init__(self,weight_init): - super(Net, self).__init__() - self.weight = Parameter(Tensor(weight_init), layerwise_parallel=True) - self.fc = ops.MatMul(transpose_b=True) - - def construct(self, x): - x = self.fc(x, self.weight) - return x - def train_mindspore_impl_fc(input, label, ckpt_file): - param_dict = load_checkpoint(ckpt_file) - - for paramname in ["weight", "moments.weight"]: - # get layer wise model parallel parameter - new_param = parameter_dict[paramname] - # split the model parameter data - slice_list = np.split(new_param.data.asnumpy(), 2, axis=0) - # Load the corresponding data slice - rank = get_rank() - tensor_slice = Tensor(slice_list[rank]) - # modify model parameter data values - new_param.set_data(tensor_slice, True) - - # load the modified parameter data into the network - weight = np.ones([4, 8]).astype(np.float32) - net = Net(weight) - load_param_into_net(net, param_dict) - opt = Momentum(learning_rate=0.01, momentum=0.9, params=parallel_net.get_parameters()) - load_param_into_net(opt, param_dict) - # train code - ... - - if __name__ == "__main__": - input = np.random.random((4, 8)).astype(np.float32) - print("mean = ", np.mean(input,axis=1, keepdims=True)) - label = np.random.random((4, 4)).astype(np.float32) - ckpt_file = sys.argv[1] - train_mindspore_impl_fc(input, label, ckpt_file) - ``` - - 其中, - - - `mode=context.GRAPH_MODE`:使用分布式训练需要指定运行模式为图模式(PyNative模式不支持并行)。 - - `device_id`:卡物理序号,即卡所在机器中的实际序号。 - - `init`:完成分布式训练初始化操作。 - - 加载后的参数值: - - ```text - device0: - name is weight - value is - [[0.87537426 1.0448935 0.86736983 0.8836905 0.77354026 0.69588304 0.9183654 0.7792076] - [0.87224025 0.8726848 0.771446 0.81967723 0.88974726 0.7988162 0.72919345 0.7677011] - [0.8828271 0.7963984 0.90675324 0.9830291 0.89010954 0.897052 0.7890109 0.89784735] - [1.0011744 1.0840297 1.0201758 1.0882459 0.94232416 1.0775206 1.0195118 1.0528734]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.2567724 -0.07485991 0.282002 0.2456022 0.454939 0.619168 0.18964815 0.45714882] - [0.25946522 0.24344791 0.45677605 0.3611395 0.23378398 0.41439137 0.5312468 0.4696194] - [0.2417504 0.28193963 0.06713893 0.21510397 0.23380603 0.11424308 0.0218009 -0.11969765] - [0.45955992 0.22664294 0.01990281 0.0731914 0.27125207 0.27298513 -0.01716102 -0.15327111]] - - device1: - name is weight - value is - [[1.0053468 0.98402303 0.99762845 0.97587246 1.0259694 1.0055295 0.99420834 0.9496847] - [1.0851002 1.0295962 1.0999886 1.0958165 0.9765328 1.146529 1.0970603 1.1388365] - [0.7147005 0.9168278 0.80178416 0.6258351 0.8413766 0.5909515 0.696347 0.71359116] - [0.20506378 0.03691584 0.2454556 0.12978578 0.19065076 0.23904312 0.27509746 0.34614682]] - name is learning_rate - value is [0.01] - name is momentum - value is [0.9] - name is moments.weight - value is - [[0.03440702 0.41419312 0.24817684 0.30765256 0.48516113 0.24904746 0.57791173 0.00955463] - [0.13458519 0.6690533 0.49259356 0.28319967 0.25951773 0.16777472 0.45696738 0.24933104] - [0.14152306 0.5040985 0.24455397 0.10907605 0.11319532 0.19538902 0.01208619 0.40430856] - [-0.7773164 -0.47611716 -0.6041424 -0.6144473 -0.2651842 -0.31909415 -0.4510405 -0.12860501]] - ``` diff --git a/tutorials/training/source_zh_cn/advanced_use/summary_record.md b/tutorials/training/source_zh_cn/advanced_use/summary_record.md deleted file mode 100644 index dd007d2b094b66a42fac642e8dd3913c1a172b6d..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/summary_record.md +++ /dev/null @@ -1,528 +0,0 @@ -# 收集Summary数据 - -`Linux` `Ascend` `GPU` `CPU` `模型调优` `中级` `高级` - - - -- [收集Summary数据](#收集summary数据) - - [概述](#概述) - - [操作流程](#操作流程) - - [准备训练脚本](#准备训练脚本) - - [方式一:通过SummaryCollector自动收集](#方式一通过summarycollector自动收集) - - [方式二:结合Summary算子和SummaryCollector,自定义收集网络中的数据](#方式二结合summary算子和summarycollector自定义收集网络中的数据) - - [方式三:自定义Callback记录数据](#方式三自定义callback记录数据) - - [方式四:进阶用法,自定义训练循环](#方式四进阶用法自定义训练循环) - - [分布式训练场景](#分布式训练场景) - - [使用技巧:记录梯度信息](#使用技巧记录梯度信息) - - [运行MindInsight](#运行mindinsight) - - [注意事项](#注意事项) - - - -   - - -## 概述 - -训练过程中的标量、图像、计算图以及模型超参等信息记录到文件中,通过可视化界面供用户查看。 - -## 操作流程 - -- 准备训练脚本,并在训练脚本中指定标量、图像、计算图、模型超参等信息记录到summary日志文件,接着运行训练脚本。 -- 启动MindInsight,并通过启动参数指定summary日志文件目录,启动成功后,根据IP和端口访问可视化界面,默认访问地址为 `http://127.0.0.1:8080`。 -- 在训练过程中,有数据写入summary日志文件时,即可在页面中[查看训练看板中可视的数据](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/dashboard.html)。 - -> 在ModelArts中查看可视数据,可参考[ModelArts上管理可视化作业](https://support.huaweicloud.com/engineers-modelarts/modelarts_23_0050.html)。 - -## 准备训练脚本 - -当前MindSpore支持将标量、图像、计算图、模型超参等信息保存到summary日志文件中,并通过可视化界面进行展示。计算图数据仅能在图模式下记录。 - -MindSpore目前支持多种方式将数据记录到summary日志文件中。 - -### 方式一:通过SummaryCollector自动收集 - -在MindSpore中通过 `Callback` 机制提供支持快速简易地收集一些常见的信息,包括计算图,损失值,学习率,参数权重等信息的 `Callback`, 叫做 `SummaryCollector`。 - -在编写训练脚本时,仅需要实例化 `SummaryCollector`,并将其应用到 `model.train` 或者 `model.eval` 中, -即可自动收集一些常见信息。`SummaryCollector` 详细的用法可以参考 `API` 文档中 `mindspore.train.callback.SummaryCollector`。 - -样例代码如下: - -```python -import mindspore -import mindspore.nn as nn -from mindspore import ops -from mindspore import context, Tensor, Model -from mindspore.nn.metrics import Accuracy -from mindspore.train.callback import SummaryCollector - - -def conv(in_channels, out_channels, kernel_size, stride=1, padding=0, pad_mode="valid", has_bias=True): - return nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding, - has_bias=has_bias, pad_mode=pad_mode) - - -def fc_with_initialize(input_channels, out_channels, has_bias=True): - return nn.Dense(input_channels, out_channels, has_bias=has_bias) - - -class AlexNet(nn.Cell): - """AlexNet""" - def __init__(self, num_classes=10, channel=3, phase='train', include_top=True): - super(AlexNet, self).__init__() - self.conv1 = conv(channel, 64, 11, stride=4, pad_mode="same", has_bias=True) - self.conv2 = conv(64, 128, 5, pad_mode="same", has_bias=True) - self.conv3 = conv(128, 192, 3, pad_mode="same", has_bias=True) - self.conv4 = conv(192, 256, 3, pad_mode="same", has_bias=True) - self.conv5 = conv(256, 256, 3, pad_mode="same", has_bias=True) - self.relu = ops.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=3, stride=2, pad_mode='valid') - self.include_top = include_top - if self.include_top: - dropout_ratio = 0.65 - if phase == 'test': - dropout_ratio = 1.0 - self.flatten = nn.Flatten() - self.fc1 = fc_with_initialize(6 * 6 * 256, 4096) - self.fc2 = fc_with_initialize(4096, 4096) - self.fc3 = fc_with_initialize(4096, num_classes) - self.dropout = nn.Dropout(dropout_ratio) - - def construct(self, x): - """define network""" - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv3(x) - x = self.relu(x) - x = self.conv4(x) - x = self.relu(x) - x = self.conv5(x) - x = self.relu(x) - x = self.max_pool2d(x) - if not self.include_top: - return x - x = self.flatten(x) - x = self.fc1(x) - x = self.relu(x) - x = self.dropout(x) - x = self.fc2(x) - x = self.relu(x) - x = self.dropout(x) - x = self.fc3(x) - return x - -def train(): - context.set_context(mode=context.GRAPH_MODE) - - network = AlexNet(num_classes=10) - loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - lr = Tensor(0.5, mindspore.float32) - opt = nn.Momentum(network.trainable_params(), lr, momentum=0.9) - model = Model(network, loss, opt, metrics={"Accuracy": Accuracy()}) - - # How to create a valid dataset instance, - # for detail, see the https://www.mindspore.cn/tutorial/training/zh-CN/master/quick_start/quick_start.html document. - ds_train = create_dataset('./dataset_path') - - # Init a SummaryCollector callback instance, and use it in model.train or model.eval - summary_collector = SummaryCollector(summary_dir='./summary_dir', collect_freq=1) - - # Note: dataset_sink_mode should be set to False, else you should modify collect freq in SummaryCollector - model.train(epoch=1, train_dataset=ds_train, callbacks=[summary_collector], dataset_sink_mode=False) - - ds_eval = create_dataset('./dataset_path') - model.eval(ds_eval, callbacks=[summary_collector]) - -if __name__ == '__main__': - train() - -``` - -> 1. 使用summary功能时,建议将`model.train`的`dataset_sink_mode`参数设置为`False`。请参考文末的注意事项。 -> 2. 使用summary功能时,需要将代码放置到`if __name__ == "__main__"`中运行。详情请[参考Python官网介绍](https://docs.python.org/zh-cn/3.7/library/multiprocessing.html#multiprocessing-programming)。 - -### 方式二:结合Summary算子和SummaryCollector,自定义收集网络中的数据 - -MindSpore除了提供 `SummaryCollector` 能够自动收集一些常见数据,还提供了Summary算子,支持在网络中自定义收集其他的数据,比如每一个卷积层的输入,或在损失函数中的损失值等。 - -当前支持的Summary算子: - -- [ScalarSummary](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ScalarSummary.html):记录标量数据 -- [TensorSummary](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.TensorSummary.html):记录张量数据 -- [ImageSummary](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.ImageSummary.html):记录图片数据 -- [HistogramSummary](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/ops/mindspore.ops.HistogramSummary.html):将张量数据转为直方图数据记录 - -记录方式如下面的步骤所示。 - -步骤一:在继承 `nn.Cell` 的衍生类的 `construct` 函数中调用Summary算子来采集图像或标量数据或者其他数据。 - -比如,定义网络时,在网络的 `construct` 中记录图像数据;定义损失函数时,在损失函数的 `construct`中记录损失值。 - -如果要记录动态学习率,可以定义优化器时,在优化器的 `construct` 中记录学习率。 - -样例代码如下: - -```python -import mindspore -import mindspore.ops as ops -from mindspore import Tensor, nn -from mindspore.nn import Optimizer - - -class CrossEntropyLoss(nn.Cell): - """Loss function definition.""" - def __init__(self): - super(CrossEntropyLoss, self).__init__() - self.cross_entropy = ops.SoftmaxCrossEntropyWithLogits() - self.mean = ops.ReduceMean() - self.one_hot = ops.OneHot() - self.on_value = Tensor(1.0, mindspore.float32) - self.off_value = Tensor(0.0, mindspore.float32) - - # Init ScalarSummary - self.scalar_summary = ops.ScalarSummary() - - def construct(self, logits, label): - label = self.one_hot(label, ops.shape(logits)[1], self.on_value, self.off_value) - loss = self.cross_entropy(logits, label)[0] - loss = self.mean(loss, (-1,)) - - # Record loss - self.scalar_summary("loss", loss) - return loss - - -class MyOptimizer(Optimizer): - """Optimizer definition.""" - def __init__(self, learning_rate, params, ...): - ... - # Initialize ScalarSummary - self.scalar_summary = ops.ScalarSummary() - self.histogram_summary = ops.HistogramSummary() - self.weight_names = [param.name for param in self.parameters] - - def construct(self, grads): - ... - # Record learning rate here - self.scalar_summary("learning_rate", learning_rate) - - # Record weight - self.histogram_summary(self.weight_names[0], self.parameters[0]) - # Record gradient - self.histogram_summary(self.weight_names[0] + ".gradient", grads[0]) - - ... - -class Net(nn.Cell): - """Net definition.""" - def __init__(self): - super(Net, self).__init__() - ... - - # Init ImageSummary - self.image_summary = ops.ImageSummary() - # Init TensorSummary - self.tensor_summary = ops.TensorSummary() - - def construct(self, data): - # Record image by Summary operator - self.image_summary("image", data) - # Record tensor by Summary operator - self.tensor_summary("tensor", data) - ... - return out -``` - -> 1. 同一种Summary算子中,给数据设置的名字不能重复,否则数据收集和展示都会出现非预期行为。比如使用两个 `ScalarSummary` 算子收集标量数据,给两个标量设置的名字不能是相同的。 -> 2. summary算子仅支持图模式,需要在`nn.Cell`的`construct`中使用。暂不支持PyNative模式。 - -步骤二:在训练脚本中,实例化 `SummaryCollector`,并将其应用到 `model.train`。 - -样例代码如下: - -```python -from mindspore import Model, nn, context -from mindspore.train.callback import SummaryCollector -... - -def train(): - context.set_context(mode=context.GRAPH_MODE) - network = Net() - loss_fn = CrossEntropyLoss() - optim = MyOptimizer(learning_rate=0.01, params=network.trainable_params()) - model = Model(network, loss_fn=loss_fn, optimizer=optim, metrics={"Accuracy": Accuracy()}) - - ds_train = create_dataset('./dataset_path') - - summary_collector = SummaryCollector(summary_dir='./summary_dir', collect_freq=1) - model.train(epoch=2, train_dataset=ds_train, callbacks=[summary_collector]) - -if __name__ == '__main__': - train() -``` - -### 方式三:自定义Callback记录数据 - -MindSpore支持自定义Callback, 并允许在自定义Callback中将数据记录到summary日志文件中, -并通过可视化页面进行查看。 - -下面的伪代码则展示在CNN网络中,开发者可以利用带有原始标签和预测标签的网络输出,生成混淆矩阵的图片, -然后通过 `SummaryRecord` 模块记录到summary日志文件中。 -`SummaryRecord` 详细的用法可以参考 `API` 文档中 `mindspore.train.summary.SummaryRecord`。 - -样例代码如下: - -```python -from mindspore.train.callback import Callback -from mindspore.train.summary import SummaryRecord - -class ConfusionMatrixCallback(Callback): - def __init__(self, summary_dir): - self._summary_dir = summary_dir - - def __enter__(self): - # init you summary record in here, when the train script run, it will be inited before training - self.summary_record = SummaryRecord(self._summary_dir) - return self - - def __exit__(self, *exc_args): - # Note: you must close the summary record, it will release the process pool resource - # else your training script will not exit from training. - self.summary_record.close() - - def step_end(self, run_context): - cb_params = run_context.original_args() - - # create a confusion matric image, and record it to summary file - confusion_matrix = create_confusion_matrix(cb_params) - self.summary_record.add_value('image', 'confusion_matrix', confusion_matrix) - self.summary_record.record(cb_params.cur_step_num) - -# init you train script -... - -confusion_matrix = ConfusionMatrixCallback(summary_dir='./summary_dir') -model.train(network, train_dataset=ds_train, callbacks=[confusion_matrix]) -``` - -上面的三种方式,支持记录计算图, 损失值等多种数据。除此以外,MindSpore还支持保存训练中其他阶段的计算图,通过 -将训练脚本中 `context.set_context` 的 `save_graphs` 选项设置为 `True`, 可以记录其他阶段的计算图,其中包括算子融合后的计算图。 - -在保存的文件中,`ms_output_after_hwopt.pb` 即为算子融合后的计算图,可以使用可视化页面对其进行查看。 - -### 方式四:进阶用法,自定义训练循环 - -如果训练时不是使用MindSpore提供的 `Model` 接口,而是模仿 `Model` 的 `train` 接口自由控制循环的迭代次数。则可以模拟 `SummaryCollector`,使用下面的方式记录summary算子数据。详细的自定义训练循环教程,请[参考官网教程](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/train.html#自定义训练循环)。 - -下面的例子,将演示如何使用summary算子以及 `SummaryRecord` 的 `add_value` 接口在自定义训练循环中记录数据。更多 `SummaryRecord` 的教程,请[参考Python API文档](https://www.mindspore.cn/doc/api_python/zh-CN/master/mindspore/mindspore.train.html#mindspore.train.summary.SummaryRecord)。需要说明的是,`SummaryRecord`不会自动记录计算图,您需要手动传入继承了`Cell`的网络实例以记录计算图。此外,生成计算图的内容仅包含您在`construct`方法中使用到的代码和函数。 - -```python -from mindspore import nn -from mindspore.train.summary import SummaryRecord -import mindspore.ops as ops - -class LeNet5(nn.Cell): - def __init__(self, num_class=10): - super(LeNet5, self).__init__() - self.num_class = num_class - self.batch_size = 32 - self.conv1 = conv(1, 6, 5) - ... - - self.image_summary = ops.ImageSummary() - self.tensor_summary = ops.TensorSummary() - - def construct(self, x): - self.image_summary('x1', x) - x = self.conv1(x) - self.tensor_summary('after_conv1', x) - x = self.relu(x) - ... - return x - -... - -def train(): - epochs = 10 - net = LeNet5() - # Note1: An instance of the network should be passed to SummaryRecord if you want to record - # computational graph. - with SummaryRecord('./summary_dir', network=net) as summary_record: - for epoch in range(epochs): - step = 1 - for inputs in dataset_helper: - output = net(*inputs) - current_step = epoch * len(dataset_helper) + step - print("step: {0}, losses: {1}".format(current_step, output.asnumpy())) - - # Note2: The output should be a scalar, and use 'add_value' method to record loss. - # Note3: You must use the 'record(step)' method to record the data of this step. - summary_record.add_value('scalar', 'loss', output) - summary_record.record(current_step) - - step += 1 - -if __name__ == '__main__': - train() - -``` - -### 分布式训练场景 - -由于`SummaryCollector`和`SummaryRecord`写数据是非进程安全的。所以在单机多卡的场景中,需要确保每张卡保存数据的目录不一样。在分布式场景下,我们通过`get_rank`函数设置summary目录。 - -```python3 -summary_dir = "summary_dir" + str(get_rank()) -``` - -示例代码如下: - -```python3 -from mindspore.communication.management import get_rank - -... - -network = ResNet50(num_classes=10) - -# Init a SummaryCollector callback instance, and use it in model.train or model.eval -summary_dir = "summary_dir" + str(get_rank()) -summary_collector = SummaryCollector(summary_dir=summary_dir, collect_freq=1) - -# Note: dataset_sink_mode should be set to False, else you should modify collect freq in SummaryCollector -model.train(epoch=1, train_dataset=ds_train, callbacks=[summary_collector], dataset_sink_mode=False) - -model.eval(ds_eval, callbacks=[summary_collector]) -``` - -### 使用技巧:记录梯度信息 - -除了上述使用方式外,使用summary算子时还有一个记录梯度信息的技巧。请注意此技巧需要和上述的某一种使用方式同时使用。 - -通过继承原有优化器类的方法可以插入summary算子读取梯度信息。样例代码片段如下: - -```python -import mindspore.nn as nn -import mindspore.ops as ops -... - -# Define a new optimizer class by inheriting your original optimizer. -class MyOptimizer(nn.Momentum): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._original_construct = super().construct - self.histogram_summary = ops.HistogramSummary() - self.gradient_names = [param.name + ".gradient" for param in self.parameters] - - def construct(self, grads): - # Record gradient. - self.histogram_summary(self.gradient_names[0], grads[0]) - return self._original_construct(grads) - -... - -# Initialize your model with the newly defined optimizer. -model = Model(network, loss_fn=loss_fn, optimizer=MyOptimizer(arg1=arg1value)) -``` - -## 运行MindInsight - -按照上面教程完成数据收集后,启动MindInsight,即可可视化收集到的数据。启动MindInsight时, -需要通过 `--summary-base-dir` 参数指定summary日志文件目录。 - -其中指定的summary日志文件目录可以是一次训练的输出目录,也可以是多次训练输出目录的父目录。 - -一次训练的输出目录结构如下: - -```text -└─summary_dir - events.out.events.summary.1596869898.hostname_MS - events.out.events.summary.1596869898.hostname_lineage -``` - -启动命令: - -```Bash -mindinsight start --summary-base-dir ./summary_dir -``` - -多次训练的输出目录结构如下: - -```text -└─summary - ├─summary_dir1 - │ events.out.events.summary.1596869898.hostname_MS - │ events.out.events.summary.1596869898.hostname_lineage - │ - └─summary_dir2 - events.out.events.summary.1596869998.hostname_MS - events.out.events.summary.1596869998.hostname_lineage -``` - -启动命令: - -```Bash -mindinsight start --summary-base-dir ./summary -``` - -启动成功后,通过浏览器访问 `http://127.0.0.1:8080` 地址,即可查看可视化页面。 - -停止MindInsight命令: - -```Bash -mindinsight stop -``` - -更多参数设置,请点击查看[MindInsight相关命令](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/mindinsight_commands.html)页面。 - -## 注意事项 - -1. 为了控制列出summary文件目录的用时,MindInsight最多支持发现999个summary文件目录。 - -2. 不能同时使用多个 `SummaryRecord` 实例 (`SummaryCollector` 中使用了 `SummaryRecord`)。 - - 如果在 `model.train` 或者 `model.eval` 的callback列表中使用两个及以上的 `SummaryCollector` 实例,则视为同时使用 `SummaryRecord`,可能导致记录数据失败。 - - 自定义callback中如果使用 `SummaryRecord`,则其不能和 `SummaryCollector` 同时使用。 - - 正确代码: - - ```python - ... - summary_collector = SummaryCollector('./summary_dir') - model.train(2, train_dataset, callbacks=[summary_collector]) - - ... - model.eval(dataset, callbacks=[summary_collector]) - ``` - - 错误代码: - - ```python - ... - summary_collector1 = SummaryCollector('./summary_dir1') - summary_collector2 = SummaryCollector('./summary_dir2') - model.train(2, train_dataset, callbacks=[summary_collector1, summary_collector2]) - ``` - - 错误代码: - - ```python - ... - # Note: the 'ConfusionMatrixCallback' is user-defined, and it uses SummaryRecord to record data. - confusion_callback = ConfusionMatrixCallback('./summary_dir1') - summary_collector = SummaryCollector('./summary_dir2') - model.train(2, train_dataset, callbacks=[confusion_callback, summary_collector]) - ``` - -3. 每个summary日志文件目录中,应该只放置一次训练的数据。一个summary日志目录中如果存放了多次训练的summary数据,MindInsight在可视化数据时会将这些训练的summary数据进行叠加展示,可能会与预期可视化效果不相符。 - -4. 使用summary功能时,建议将`model.train`方法的`dataset_sink_mode`参数设置为`False`,从而以`step`作为`collect_freq`参数的单位收集数据。当`dataset_sink_mode`为`True`时,将以`epoch`作为`collect_freq`的单位,此时建议手动设置`collect_freq`参数。`collect_freq`参数默认值为`10`。 - -5. 每个step保存的数据量,最大限制为2147483647Bytes。如果超出该限制,则无法记录该step的数据,并出现错误。 - -6. PyNative模式下,`SummaryCollector` 能够正常使用,但不支持记录计算图以及不支持使用Summary算子。 diff --git a/tutorials/training/source_zh_cn/advanced_use/test_model_security_fuzzing.md b/tutorials/training/source_zh_cn/advanced_use/test_model_security_fuzzing.md deleted file mode 100644 index 06cfb3923f6a8b2e140ce0e66a400ca3dc58f85e..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/test_model_security_fuzzing.md +++ /dev/null @@ -1,201 +0,0 @@ -# 使用fuzz testing模块测试模型安全性 - -`Linux` `Ascend` `GPU` `CPU` `模型评测` `企业` `高级` - - - -- [使用fuzz testing模块测试模型安全性](#使用fuzz-testing模块测试模型安全性) - - [概述](#概述) - - [实现阶段](#实现阶段) - - [导入需要的库文件](#导入需要的库文件) - - [参数配置](#参数配置) - - [运用Fuzz Testing](#运用fuzz-testing) - - -   - -## 概述 - -传统软件的决策逻辑由代码逻辑决定,传统软件通过代码行覆盖率来判断当前测试是否充分,理想情况下覆盖率越高,代码测试越充分。然而,对于深度神经网络而言,程序的决策逻辑由训练数据、网络模型结构和参数通过某种黑盒机制决定,代码行覆盖率已不足以评估测试的充分性。需要根据深度网络的特点选择更为适合的测试评价准则,指导神经网络进行更为充分的测试,发现更多的边缘错误用例,从而确保模型的通用性、鲁棒性。 - -MindArmour的fuzz_testing模块以神经元覆盖率作为测试评价准则。神经元覆盖率,是指通过一组输入观察到的、激活的神经元数量和神经元输出值的范围。我们通过神经元覆盖率来指导输入变异,让输入能够激活更多的神经元,神经元值的分布范围更广,从而探索不同类型的模型输出结果、错误行为。 - -这里以LeNet模型,MNIST数据集为例,说明如何使用Fuzzer。 - -> 本例面向CPU、GPU、Ascend 910 AI处理器,你可以在这里下载完整的样例代码: - -## 实现阶段 - -### 导入需要的库文件 - -下列是我们需要的公共模块、MindSpore相关模块和fuzz_testing特性模块,以及配置日志标签和日志等级。 - -```python -import numpy as np -from mindspore import Model -from mindspore import context -from mindspore import load_checkpoint, load_param_into_net - -from mindarmour.fuzz_testing import Fuzzer -from mindarmour.fuzz_testing import ModelCoverageMetrics -from mindarmour.utils.logger import LogUtil - -from examples.common.dataset.data_processing import generate_mnist_dataset -from examples.common.networks.lenet5.lenet5_net import LeNet5 - -LOGGER = LogUtil.get_instance() -TAG = 'Fuzz_testing' -LOGGER.set_level('INFO') -``` - -### 参数配置 - -配置必要的信息,包括环境信息、执行的模式。 - -```python -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") -``` - -详细的接口配置信息,请参见`context.set_context`接口说明。 - -### 运用Fuzz Testing - -1. 建立LeNet模型,加载MNIST数据集,操作同[模型安全](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/improve_model_security_nad.html) - - ```python - ... - # Lenet model - model = Model(net) - # get training data - mnist_path = "../common/dataset/MNIST/" - batch_size = 32 - ds = generate_mnist_dataset(os.path.join(mnist_path, "train"), batch_size, sparse=False) - train_images = [] - for data in ds.create_tuple_iterator(): - images = data[0].asnumpy().astype(np.float32) - train_images.append(images) - train_images = np.concatenate(train_images, axis=0) - - # get test data - batch_size = 32 - ds = generate_mnist_dataset(os.path.join(mnist_path, "test"), batch_size, sparse=False) - test_images = [] - test_labels = [] - for data in ds.create_tuple_iterator(): - images = data[0].asnumpy().astype(np.float32) - labels = data[1].asnumpy() - test_images.append(images) - test_labels.append(labels) - test_images = np.concatenate(test_images, axis=0) - test_labels = np.concatenate(test_labels, axis=0) - ``` - -2. Fuzzer参数配置。 - - 设置数据变异方法及参数。支持同时配置多种方法,目前支持的数据变异方法包含三类: - - - 图像仿射变换方法:Translate、Scale、Shear、Rotate。 - - 基于图像像素值变化的方法: Contrast、Brightness、Blur、Noise。 - - 基于对抗攻击的白盒、黑盒对抗样本生成方法:FGSM、PGD、MDIIM。 - - 数据变异方法中一定要包含基于图像像素值变化的方法。 - - 前两种类型的图像变化方法,支持用户自定义配置参数,也支持算法随机选择参数。用户自定义参数配置范围请参考: - 中对应的类方法。算法随机选择参数,则`params`设置为`'auto_param': [True]`,参数将在推荐范围内随机生成。 - - 基于对抗攻击方法的参数配置请参考对应的攻击方法类。 - - 下面是变异方法及其参数配置的一个例子: - - ```python - mutate_config = [{'method': 'Blur', - 'params': {'radius': [0.1, 0.2, 0.3], - 'auto_param': [True, False]}}, - {'method': 'Contrast', - 'params': {'auto_param': [True]}}, - {'method': 'Translate', - 'params': {'auto_param': [True]}}, - {'method': 'Brightness', - 'params': {'auto_param': [True]}}, - {'method': 'Noise', - 'params': {'auto_param': [True]}}, - {'method': 'Scale', - 'params': {'auto_param': [True]}}, - {'method': 'Shear', - 'params': {'auto_param': [True]}}, - {'method': 'FGSM', - 'params': {'eps': [0.3, 0.2, 0.4], 'alpha': [0.1]}} - ] - ``` - - 设置评价指标,目前支持5种评价指标,包括: - - 通用评价指标:accuracy。 - - 神经元覆盖率指标:kmnc, nbc,snac。 - - 对抗攻击评价指标:attack_success_rate。 - 也可以设置为‘auto’,默认使用所有评价指标。 - - ```python - eval_metrics =['accuracy', 'kmnc', 'attack_success_rate'] - ``` - -3. 初始化种子队列,种子队列中的每个种子,包含2个值:原始图片、图片标签。这里取100个样本作为初始种子队列。 - - ```python - # make initial seeds - initial_seeds = [] - for img, label in zip(test_images, test_labels): - initial_seeds.append([img, label]) - initial_seeds = initial_seeds[:100] - ``` - -4. 测试Fuzz测试前的神经元覆盖率。 - - ```python - segmented_num = 1000 - neuron_num = 10 - model_coverage_test = ModelCoverageMetrics(model, neuron_num, segmented_num, train_images) - model_coverage_test.calculate_coverage(np.array(test_images[:100]).astype(np.float32)) - LOGGER.info(TAG, 'KMNC of this test is : %s', model_coverage_test.get_kmnc()) - ``` - - 结果: - - ```python - KMNC of this test is : 0.0851 - ``` - -5. Fuzz测试。 - - ```python - eval_metrics = 'auto' - model_fuzz_test = Fuzzer(model, train_images, neuron_num, segmented_num) - _, _, _, _, metrics = model_fuzz_test.fuzzing(mutate_config, initial_seeds, eval_metrics=eval_metrics) - ``` - -6. 实验结果。 - - fuzzing的返回结果中包含了5个数据:fuzz生成的样本fuzz_samples、生成样本的真实标签true_labels、被测模型对于生成样本的预测值fuzz_preds、 生成样本使用的变异方法fuzz_strategies、fuzz testing的评估报告metrics_report。用户可使用这些返回结果进一步的分析模型的鲁棒性。这里只展开metrics_report,查看fuzz testing后的各个评估指标。 - - ```python - if metrics: - for key in metrics: - LOGGER.info(TAG, key + ': %s', metrics[key]) - ``` - - Fuzz测试后结果如下: - - ```python - Accuracy: 0.7929 - Attack_success_rate: 0.3939 - Neural_coverage_KMNC: 0.4797 - ``` - - Fuzz测试前种子的KMNC神经元覆盖率为8.5%,Fuzz后,KMNC神经元覆盖率为47.97%,神经元覆盖率提升,样本的多样性提升。Fuzz后,模型对于Fuzz生成样本的准确率为79.29%,使用了对抗攻击方法的样本,攻击成功率为39.39%。由于初始化种子、变异方法和相应的参数均为随机选择的,结果有一定的浮动是正常的。 - - 原始图片: - - ![fuzz_seed](./images/fuzz_seed.png) - -​ Fuzz生成的变异图片: - - ![fuzz_res](./images/fuzz_res.png) diff --git a/tutorials/training/source_zh_cn/advanced_use/test_model_security_membership_inference.md b/tutorials/training/source_zh_cn/advanced_use/test_model_security_membership_inference.md deleted file mode 100644 index b3419d5b21e64a4b78b3893cd10d91d47222efe3..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/test_model_security_membership_inference.md +++ /dev/null @@ -1,314 +0,0 @@ -# 使用成员推理测试模型安全性 - -`Linux` `Ascend` `GPU` `CPU` `模型评测` `企业` `高级` - - - -- [使用成员推理测试模型安全性](#使用成员推理测试模型安全性) - - [概述](#概述) - - [实现阶段](#实现阶段) - - [导入需要的库文件](#导入需要的库文件) - - [加载数据集](#加载数据集) - - [建立模型](#建立模型) - - [运用MembershipInference进行隐私安全评估](#运用membershipinference进行隐私安全评估) - - [参考文献](#参考文献) - - -   -   - - -## 概述 - -成员推理是一种推测用户隐私数据的方法。隐私指的是单个用户的某些属性,一旦泄露可能会造成人身损害、名誉损害等后果。通常情况下,用户的隐私数据会作保密处理,但我们可以利用非敏感信息来进行推测。如果我们知道了某个私人俱乐部的成员都喜欢戴紫色墨镜、穿红色皮鞋,那么我们遇到一个戴紫色墨镜且穿红色皮鞋(非敏感信息)的人,就可以推断他/她很可能是这个私人俱乐部的成员(敏感信息)。这就是成员推理。 - -机器学习/深度学习的成员推理(MembershipInference),指的是攻击者拥有模型的部分访问权限(黑盒、灰盒或白盒),能够获取到模型的输出、结构或参数等部分或全部信息,并基于这些信息推断某个样本是否属于模型的训练集。利用成员推理,我们可以评估机器学习/深度学习模型的隐私数据安全。如果在成员推理下能正确识别出60%+的样本,那么我们认为该模型存在隐私数据泄露风险。 - -这里以VGG16模型,CIFAR-100数据集为例,说明如何使用MembershipInference进行模型隐私安全评估。本教程使用预训练的模型参数进行演示,这里仅给出模型结构、参数设置和数据集预处理方式。 - ->本例面向Ascend 910处理器,您可以在这里下载完整的样例代码: -> -> - -## 实现阶段 - -### 导入需要的库文件 - -#### 引入相关包 - -下面是我们需要的公共模块、MindSpore相关模块和MembershipInference特性模块,以及配置日志标签和日志等级。 - -```python -import argparse -import sys -import math -import os - -import numpy as np - -import mindspore.nn as nn -from mindspore import Model, load_param_into_net, load_checkpoint -from mindspore import dtype as mstype -from mindspore.common import initializer as init -from mindspore.common.initializer import initializer -import mindspore.dataset as de -import mindspore.dataset.transforms.c_transforms as C -import mindspore.dataset.vision.c_transforms as vision -from mindarmour import MembershipInference -from mindarmour.utils import LogUtil - -LOGGER = LogUtil.get_instance() -TAG = "MembershipInference_test" -LOGGER.set_level("INFO") -``` - -### 加载数据集 - -这里采用的是CIFAR-100数据集,您也可以采用自己的数据集,但要保证传入的数据仅有两项属性"image"和"label"。 - -```python -# Generate CIFAR-100 data. -def vgg_create_dataset100(data_home, image_size, batch_size, rank_id=0, rank_size=1, repeat_num=1, - training=True, num_samples=None, shuffle=True): - """Data operations.""" - de.config.set_seed(1) - data_dir = os.path.join(data_home, "train") - if not training: - data_dir = os.path.join(data_home, "test") - - if num_samples is not None: - data_set = de.Cifar100Dataset(data_dir, num_shards=rank_size, shard_id=rank_id, - num_samples=num_samples, shuffle=shuffle) - else: - data_set = de.Cifar100Dataset(data_dir, num_shards=rank_size, shard_id=rank_id) - - input_columns = ["fine_label"] - output_columns = ["label"] - data_set = data_set.rename(input_columns=input_columns, output_columns=output_columns) - data_set = data_set.project(["image", "label"]) - - rescale = 1.0 / 255.0 - shift = 0.0 - - # Define map operations. - random_crop_op = vision.RandomCrop((32, 32), (4, 4, 4, 4)) # padding_mode default CONSTANT. - random_horizontal_op = vision.RandomHorizontalFlip() - resize_op = vision.Resize(image_size) # interpolation default BILINEAR. - rescale_op = vision.Rescale(rescale, shift) - normalize_op = vision.Normalize((0.4465, 0.4822, 0.4914), (0.2010, 0.1994, 0.2023)) - changeswap_op = vision.HWC2CHW() - type_cast_op = C.TypeCast(mstype.int32) - - c_trans = [] - if training: - c_trans = [random_crop_op, random_horizontal_op] - c_trans += [resize_op, rescale_op, normalize_op, - changeswap_op] - - # Apply map operations on images. - data_set = data_set.map(operations=type_cast_op, input_columns="label") - data_set = data_set.map(operations=c_trans, input_columns="image") - - # Apply repeat operations. - data_set = data_set.repeat(repeat_num) - - # Apply batch operations. - data_set = data_set.batch(batch_size=batch_size, drop_remainder=True) - - return data_set -``` - -### 建立模型 - -这里以VGG16模型为例,您也可以替换为自己的模型。 - -```python -def _make_layer(base, args, batch_norm): - """Make stage network of VGG.""" - layers = [] - in_channels = 3 - for v in base: - if v == 'M': - layers += [nn.MaxPool2d(kernel_size=2, stride=2)] - else: - conv2d = nn.Conv2d(in_channels=in_channels, - out_channels=v, - kernel_size=3, - padding=args.padding, - pad_mode=args.pad_mode, - has_bias=args.has_bias, - weight_init='XavierUniform') - if batch_norm: - layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU()] - else: - layers += [conv2d, nn.ReLU()] - in_channels = v - return nn.SequentialCell(layers) - - -class Vgg(nn.Cell): - """ - VGG network definition. - """ - - def __init__(self, base, num_classes=1000, batch_norm=False, batch_size=1, args=None, phase="train"): - super(Vgg, self).__init__() - _ = batch_size - self.layers = _make_layer(base, args, batch_norm=batch_norm) - self.flatten = nn.Flatten() - dropout_ratio = 0.5 - if not args.has_dropout or phase == "test": - dropout_ratio = 1.0 - self.classifier = nn.SequentialCell([ - nn.Dense(512*7*7, 4096), - nn.ReLU(), - nn.Dropout(dropout_ratio), - nn.Dense(4096, 4096), - nn.ReLU(), - nn.Dropout(dropout_ratio), - nn.Dense(4096, num_classes)]) - - def construct(self, x): - x = self.layers(x) - x = self.flatten(x) - x = self.classifier(x) - return x - - -base16 = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'] - - -def vgg16(num_classes=1000, args=None, phase="train"): - net = Vgg(base16, num_classes=num_classes, args=args, batch_norm=args.batch_norm, phase=phase) - return net -``` - -### 运用MembershipInference进行隐私安全评估 - -1. 构建VGG16模型并加载参数文件。 - - 这里直接加载预训练完成的VGG16参数配置,您也可以使用如上的网络自行训练。 - - ```python - ... - # load parameter - parser = argparse.ArgumentParser("main case arg parser.") - parser.add_argument("--data_path", type=str, required=True, help="Data home path for dataset") - parser.add_argument("--pre_trained", type=str, required=True, help="Checkpoint path") - args = parser.parse_args() - args.batch_norm = True - args.has_dropout = False - args.has_bias = False - args.padding = 0 - args.pad_mode = "same" - args.weight_decay = 5e-4 - args.loss_scale = 1.0 - - # Load the pretrained model. - net = vgg16(num_classes=100, args=args) - loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) - opt = nn.Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9, - weight_decay=args.weight_decay, loss_scale=args.loss_scale) - load_param_into_net(net, load_checkpoint(args.pre_trained)) - model = Model(network=net, loss_fn=loss, optimizer=opt) - ``` - -2. 加载CIFAR-100数据集,按8:2分割为成员推理模型的训练集和测试集。 - - ```python - # Load and split dataset. - train_dataset = vgg_create_dataset100(data_home=args.data_path, image_size=(224, 224), - batch_size=64, num_samples=5000, shuffle=False) - test_dataset = vgg_create_dataset100(data_home=args.data_path, image_size=(224, 224), - batch_size=64, num_samples=5000, shuffle=False, training=False) - train_train, eval_train = train_dataset.split([0.8, 0.2]) - train_test, eval_test = test_dataset.split([0.8, 0.2]) - msg = "Data loading completed." - LOGGER.info(TAG, msg) - ``` - -3. 配置推理参数和评估参数 - - 设置用于成员推理的方法和参数。目前支持的推理方法有:KNN、LR、MLPClassifier和RandomForestClassifier。推理参数数据类型使用list,各个方法使用key为"method"和"params"的字典表示。 - - ```python - config = [ - { - "method": "lr", - "params": { - "C": np.logspace(-4, 2, 10) - } - }, - { - "method": "knn", - "params": { - "n_neighbors": [3, 5, 7] - } - }, - { - "method": "mlp", - "params": { - "hidden_layer_sizes": [(64,), (32, 32)], - "solver": ["adam"], - "alpha": [0.0001, 0.001, 0.01] - } - }, - { - "method": "rf", - "params": { - "n_estimators": [100], - "max_features": ["auto", "sqrt"], - "max_depth": [5, 10, 20, None], - "min_samples_split": [2, 5, 10], - "min_samples_leaf": [1, 2, 4] - } - } - ] - ``` - - 我们约定标签为训练集的是正类,标签为测试集的是负类。设置评价指标,目前支持3种评价指标。包括: - - 准确率:accuracy,正确推理的数量占全体样本中的比例。 - - 精确率:precision,正确推理的正类样本占所有推理为正类中的比例。 - - 召回率:recall,正确推理的正类样本占全体正类样本的比例。 - 在样本数量足够大时,如果上述指标均大于0.6,我们认为目标模型就存在隐私泄露的风险。 - - ```python - metrics = ["precision", "accuracy", "recall"] - ``` - -4. 训练成员推理模型,并给出评估结果。 - - ```python - inference = MembershipInference(model) # Get inference model. - - inference.train(train_train, train_test, config) # Train inference model. - msg = "Membership inference model training completed." - LOGGER.info(TAG, msg) - - result = inference.eval(eval_train, eval_test, metrics) # Eval metrics. - count = len(config) - for i in range(count): - print("Method: {}, {}".format(config[i]["method"], result[i])) - ``` - -5. 实验结果。 - 执行如下指令,开始成员推理训练和评估: - - ```bash - python example_vgg_cifar.py --data_path ./cifar-100-binary/ --pre_trained ./VGG16-100_781.ckpt - ``` - - 成员推理的指标如下所示,各数值均保留至小数点后四位。 - - 以第一行结果为例:在使用lr(逻辑回归分类)进行成员推理时,推理的准确率(accuracy)为0.7132,推理精确率(precision)为0.6596,正类样本召回率为0.8810,说明lr有71.32%的概率能正确分辨一个数据样本是否属于目标模型的训练数据集。在二分类任务下,指标表明成员推理是有效的,即该模型存在隐私泄露的风险。 - - ```text - Method: lr, {'recall': 0.8810,'precision': 0.6596,'accuracy': 0.7132} - Method: knn, {'recall': 0.7082,'precision': 0.5613,'accuracy': 0.5774} - Method: mlp, {'recall': 0.6729,'precision': 0.6462,'accuracy': 0.6522} - Method: rf, {'recall': 0.8513, 'precision': 0.6655, 'accuracy': 0.7117} - ``` - -## 参考文献 - -[1] [Shokri R , Stronati M , Song C , et al. Membership Inference Attacks against Machine Learning Models[J].](https://arxiv.org/abs/1610.05820v2) diff --git a/tutorials/training/source_zh_cn/advanced_use/use_on_the_cloud.md b/tutorials/training/source_zh_cn/advanced_use/use_on_the_cloud.md deleted file mode 100644 index 3da2540ffd9d529d444e4a7bb938b43a26fecd47..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/use_on_the_cloud.md +++ /dev/null @@ -1,321 +0,0 @@ -# 在云上使用MindSpore - -`Linux` `Ascend` `全流程` `初级` `中级` `高级` - - - -- [在云上使用MindSpore](#在云上使用mindspore) - - [概述](#概述) - - [准备工作](#准备工作) - - [ModelArts使用准备](#modelarts使用准备) - - [拥有云上昇腾AI处理器资源](#拥有云上昇腾ai处理器资源) - - [数据准备](#数据准备) - - [执行脚本准备](#执行脚本准备) - - [通过简单适配将MindSpore脚本运行在ModelArts](#通过简单适配将mindspore脚本运行在modelarts) - - [适配脚本参数](#适配脚本参数) - - [适配OBS数据](#适配obs数据) - - [适配8卡训练任务](#适配8卡训练任务) - - [示例代码](#示例代码) - - [创建训练任务](#创建训练任务) - - [进入ModelArts控制台](#进入modelarts控制台) - - [使用常用框架创建训练作业](#使用常用框架创建训练作业) - - [使用MindSpore作为常用框架创建训练作业](#使用mindspore作为常用框架创建训练作业) - - [查看运行结果](#查看运行结果) - - - - - -## 概述 - -ModelArts是华为云提供的面向开发者的一站式AI开发平台,集成了昇腾AI处理器资源池,用户可以在该平台下体验MindSpore。 - -本教程以ResNet-50为例,简要介绍如何在ModelArts使用MindSpore完成训练任务。 - -## 准备工作 - -### ModelArts使用准备 - -参考ModelArts教程“准备工作”一栏,完成账号注册、ModelArts配置和创建桶的准备工作。 -> ModelArts教程链接:。页面提供了较丰富的ModelArts教程,参考“准备工作”部分完成ModelArts准备工作。 - -### 拥有云上昇腾AI处理器资源 - -确保你的账号已拥有ModelArts华为云昇腾集群服务公测资格,可在[ModelArts华为云](https://console.huaweicloud.com/modelarts/?region=cn-north-4#/dashboard/applyModelArtsAscend910Beta)提交申请。 - -### 数据准备 - -ModelArts使用对象存储服务(Object Storage Service,简称OBS)进行数据存储,因此,在开始训练任务之前,需要将数据上传至OBS。本示例使用CIFAR-10二进制格式数据集。 - -1. 下载CIFAR-10数据集并解压。 - - > CIFAR-10数据集下载页面:。页面提供3个数据集下载链接,本示例使用CIFAR-10 binary version。 - -2. 新建一个自己的OBS桶(例如:ms-dataset),在桶中创建数据目录(例如:cifar-10),将CIFAR-10数据按照如下结构上传至数据目录。 - - ```text - └─对象存储/ms-dataset/cifar-10 - ├─train - │ data_batch_1.bin - │ data_batch_2.bin - │ data_batch_3.bin - │ data_batch_4.bin - │ data_batch_5.bin - │ - └─eval - test_batch.bin - ``` - -### 执行脚本准备 - -新建一个自己的OBS桶(例如:`resnet50-train`),在桶中创建代码目录(例如:`resnet50_cifar10_train`),并将以下目录中的所有脚本上传至代码目录: -> 脚本使用ResNet-50网络在CIFAR-10数据集上进行训练,并在训练结束后验证精度。脚本可以在ModelArts采用`1*Ascend`或`8*Ascend`两种不同规格进行训练任务。 -> -> 注意运行脚本的版本需要与"创建训练任务"步骤选择的MindSpore版本一致。例如:使用MindSpore 1.1版本教程提供的脚本,则需要在创建训练任务时选择1.1版本的MindSpore引擎。 - -为了方便后续创建训练作业,先创建训练输出目录和日志输出目录,本示例创建的目录结构如下: - -```text -└─对象存储/resnet50-train - ├─resnet50_cifar10_train - │ dataset.py - │ resnet.py - │ resnet50_train.py - │ - ├─output - └─log -``` - -## 通过简单适配将MindSpore脚本运行在ModelArts - -“执行脚本准备”章节提供的脚本可以直接运行在ModelArts,想要快速体验ResNet-50训练CIFAR-10可以跳过本章节。如果需要将自定义MindSpore脚本或更多MindSpore示例代码在ModelArts运行起来,需要参考本章节对MindSpore代码进行简单适配。 - -### 适配脚本参数 - -1. 在ModelArts运行的脚本必须配置`data_url`和`train_url`,分别对应数据存储路径(OBS路径)和训练输出路径(OBS路径)。 - - ``` python - import argparse - - parser = argparse.ArgumentParser(description='ResNet-50 train.') - parser.add_argument('--data_url', required=True, default=None, help='Location of data.') - parser.add_argument('--train_url', required=True, default=None, help='Location of training outputs.') - ``` - -2. ModelArts界面支持向脚本中其他参数传值,在下一章节“创建训练作业”中将会详细介绍。 - - ``` python - parser.add_argument('--epoch_size', type=int, default=90, help='Train epoch size.') - ``` - -### 适配OBS数据 - -MindSpore暂时没有提供直接访问OBS数据的接口,需要通过MoXing提供的API与OBS交互。ModelArts训练脚本在容器中执行,通常选用`/cache`目录作为容器数据存储路径。 -> 华为云MoXing提供了丰富的API供用户使用,本示例中仅需要使用`copy_parallel`接口。 - -1. 将OBS中存储的数据下载至执行容器。 - - ```python - import moxing as mox - mox.file.copy_parallel(src_url='s3://dataset_url/', dst_url='/cache/data_path') - ``` - -2. 将训练输出从容器中上传至OBS。 - - ```python - import moxing as mox - mox.file.copy_parallel(src_url='/cache/output_path', dst_url='s3://output_url/') - ``` - -### 适配8卡训练任务 - -如果需要将脚本运行在`8*Ascend`规格的环境上,需要对创建数据集的代码和本地数据路径进行适配,并配置分布式策略。通过获取`DEVICE_ID`和`RANK_SIZE`两个环境变量,用户可以构建适用于`1*Ascend`和`8*Ascend`两种不同规格的训练脚本。 - -1. 本地路径适配。 - - ```python - import os - - device_num = int(os.getenv('RANK_SIZE')) - device_id = int(os.getenv('DEVICE_ID')) - # define local data path - local_data_path = '/cache/data' - - if device_num > 1: - # define distributed local data path - local_data_path = os.path.join(local_data_path, str(device_id)) - ``` - -2. 数据集适配。 - - ```python - import os - import mindspore.dataset.engine as de - - device_id = int(os.getenv('DEVICE_ID')) - device_num = int(os.getenv('RANK_SIZE')) - if device_num == 1: - # create train data for 1 Ascend situation - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True) - else: - # create train data for 1 Ascend situation, split train data for 8 Ascend situation - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True, - num_shards=device_num, shard_id=device_id) - ``` - -3. 配置分布式策略。 - - ```python - import os - from mindspore import context - from mindspore.context import ParallelMode - - device_num = int(os.getenv('RANK_SIZE')) - if device_num > 1: - context.set_auto_parallel_context(device_num=device_num, - parallel_mode=ParallelMode.DATA_PARALLEL, - gradients_mean=True) - ``` - -### 示例代码 - -结合以上三点对MindSpore脚本进行简单适配,以下述伪代码为例: - -原始MindSpore脚本: - -``` python -import os -import argparse -from mindspore import context -from mindspore.context import ParallelMode -import mindspore.dataset.engine as de - -device_id = int(os.getenv('DEVICE_ID')) -device_num = int(os.getenv('RANK_SIZE')) - -def create_dataset(dataset_path): - if device_num == 1: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True) - else: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True, - num_shards=device_num, shard_id=device_id) - return ds - -def resnet50_train(args): - if device_num > 1: - context.set_auto_parallel_context(device_num=device_num, - parallel_mode=ParallelMode.DATA_PARALLEL, - gradients_mean=True) - train_dataset = create_dataset(local_data_path) - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='ResNet-50 train.') - parser.add_argument('--local_data_path', required=True, default=None, help='Location of data.') - parser.add_argument('--epoch_size', type=int, default=90, help='Train epoch size.') - - args_opt, unknown = parser.parse_known_args() - - resnet50_train(args_opt) -``` - -适配后的MindSpore脚本: - -``` python -import os -import argparse -from mindspore import context -from mindspore.context import ParallelMode -import mindspore.dataset.engine as de - -# adapt to cloud: used for downloading data -import moxing as mox - -device_id = int(os.getenv('DEVICE_ID')) -device_num = int(os.getenv('RANK_SIZE')) - -def create_dataset(dataset_path): - if device_num == 1: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True) - else: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=True, - num_shards=device_num, shard_id=device_id) - return ds - -def resnet50_train(args): - # adapt to cloud: define local data path - local_data_path = '/cache/data' - - if device_num > 1: - context.set_auto_parallel_context(device_num=device_num, - parallel_mode=ParallelMode.DATA_PARALLEL, - gradients_mean=True) - # adapt to cloud: define distributed local data path - local_data_path = os.path.join(local_data_path, str(device_id)) - - # adapt to cloud: download data from obs to local location - print('Download data.') - mox.file.copy_parallel(src_url=args.data_url, dst_url=local_data_path) - - train_dataset = create_dataset(local_data_path) - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='ResNet-50 train.') - # adapt to cloud: get obs data path - parser.add_argument('--data_url', required=True, default=None, help='Location of data.') - # adapt to cloud: get obs output path - parser.add_argument('--train_url', required=True, default=None, help='Location of training outputs.') - parser.add_argument('--epoch_size', type=int, default=90, help='Train epoch size.') - args_opt, unknown = parser.parse_known_args() - - resnet50_train(args_opt) -``` - -## 创建训练任务 - -准备好数据和执行脚本以后,需要创建训练任务将MindSpore脚本真正运行起来。首次使用ModelArts的用户可以根据本章节了解ModelArts创建训练作业的流程。 - -### 进入ModelArts控制台 - -打开华为云ModelArts主页,点击该页面的“进入控制台”。 - -### 使用常用框架创建训练作业 - -ModelArts教程展示了如何使用常用框架创建训练作业。 - -### 使用MindSpore作为常用框架创建训练作业 - -以本教程使用的训练脚本和数据为例,详细列出在创建训练作业界面如何进行配置: - -1. `算法来源`:选择`常用框架`,然后`AI引擎`选择`Ascend-Powered-Engine`和所需的MindSpore版本(本示例图片为`Mindspore-0.5-python3.7-aarch64`,请注意使用与所选版本对应的脚本)。 - -2. `代码目录`选择预先在OBS桶中创建代码目录,`启动文件`选择代码目录下的启动脚本。 - -3. `数据来源`选择`数据存储位置`,并填入OBS中CIFAR-10数据集的位置。 - -4. `运行参数`:`数据存储位置`和`训练输出位置`分别对应运行参数`data_url`和`train_url`,选择`增加运行参数`可以向脚本中其他参数传值,如`epoch_size`。 - -5. `资源池`选择`公共资源池 > Ascend`。 - -6. `资源池 > 规格`选择`Ascend: 1 * Ascend 910 CPU:24 核 96GiB`或`Ascend: 8 * Ascend 910 CPU:192 核 768GiB`,分别表示单机单卡和单机8卡规格。 - -使用MindSpore作为常用框架创建训练作业,如下图所示。 - -![训练作业参数](./images/cloud_train_job1.png) - -![训练作业规格](./images/cloud_train_job2.png) - -## 查看运行结果 - -1. 在训练作业界面可以查看运行日志 - - 采用`8*Ascend`规格执行ResNet-50训练任务,epoch总数为92,精度约为92%,每秒训练图片张数约12000,日志如下图所示。 - - ![8*Ascend训练执行结果](./images/train_log_8_Ascend_clu.png) - - ![8*Ascend训练执行结果](./images/train_log_8_Ascend.png) - - 采用`1*Ascend`规格执行ResNet-50训练任务。epoch总数为92,精度约为95%,每秒训练图片张数约1800,日志如下图所示。 - - ![1*Ascend训练执行结果](./images/train_log_1_Ascend.png) - -2. 如果创建训练作业时指定了日志路径,可以从OBS下载日志文件并查看。 diff --git a/tutorials/training/source_zh_cn/advanced_use/visualization_tutorials.rst b/tutorials/training/source_zh_cn/advanced_use/visualization_tutorials.rst deleted file mode 100644 index 1480d6709e5a363bb910cf52a33748e2ece3aada..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/advanced_use/visualization_tutorials.rst +++ /dev/null @@ -1,14 +0,0 @@ -使用可视化组件MindInsight -=========================== - -.. toctree:: - :maxdepth: 1 - - summary_record - dashboard - lineage_and_scalars_comparison - hyper_parameters_auto_tuning - performance_profiling - debugger - model_explanation - mindinsight_commands diff --git a/tutorials/training/source_zh_cn/conf.py b/tutorials/training/source_zh_cn/conf.py deleted file mode 100644 index aa4b98834d2001250bd2adc3442aabc4e228f9dd..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/conf.py +++ /dev/null @@ -1,90 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys -import IPython -import re -import nbsphinx as nbs - -# -- Project information ----------------------------------------------------- - -project = 'MindSpore' -copyright = '2020, MindSpore' -author = 'MindSpore' - -# The full version, including alpha/beta/rc tags -release = 'master' - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'recommonmark', - 'sphinx_markdown_tables', - 'nbsphinx', - 'sphinx.ext.mathjax', - 'IPython.sphinxext.ipython_console_highlighting' -] - -source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -highlight_language = 'none' - -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -html_search_language = 'zh' - -html_search_options = {'dict': '../../resource/jieba.txt'} - -html_static_path = ['_static'] - -def setup(app): - app.add_stylesheet('css/bootstrap.min.css') - app.add_stylesheet('css/training.css') - app.add_javascript('js/training.js') - -# Remove extra outputs for nbsphinx extension. -nbsphinx_source_re = re.compile(r"(app\.connect\('html-collect-pages', html_collect_pages\))") -nbsphinx_math_re = re.compile(r"(\S.*$)") -mod_path = os.path.abspath(nbs.__file__) -with open(mod_path, "r+", encoding="utf8") as f: - contents = f.readlines() - for num, line in enumerate(contents): - _content_re = nbsphinx_source_re.search(line) - if _content_re and "#" not in line: - contents[num] = nbsphinx_source_re.sub(r"# \g<1>", line) - if "mathjax_config = app.config" in line and "#" not in line: - contents[num:num+10] = [nbsphinx_math_re.sub(r"# \g<1>", i) for i in contents[num:num+10]] - break - f.seek(0) - f.writelines(contents) \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/index.rst b/tutorials/training/source_zh_cn/index.rst deleted file mode 100644 index fbee3e070cc17fce93a01987cc143af2c81f52db..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/index.rst +++ /dev/null @@ -1,943 +0,0 @@ -.. MindSpore documentation master file, created by - sphinx-quickstart on Thu Mar 24 09:00:00 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -使用MindSpore进行训练 -========================= - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 快速入门 - :hidden: - - quick_start/quick_start - quick_start/linear_regression - quick_start/quick_video - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 基础使用 - :hidden: - - use/data_preparation - use/defining_the_network - use/save_model - use/load_model_for_inference_and_transfer - use/publish_model - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 处理数据 - :hidden: - - advanced_use/convert_dataset - advanced_use/optimize_data_processing - - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 构建网络 - :hidden: - - advanced_use/custom_loss_function - advanced_use/custom_operator - advanced_use/migrate_script - advanced_use/apply_deep_probability_programming - advanced_use/implement_high_order_differentiation - advanced_use/quantum_neural_network - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 调试网络 - :hidden: - - advanced_use/debug_in_pynative_mode - advanced_use/dump_in_graph_mode - advanced_use/custom_debugging_info - advanced_use/visualization_tutorials - advanced_use/enable_auto_augmentation - advanced_use/evaluate_the_model_during_training - advanced_use/incremental_operator_build - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 优化训练性能 - :hidden: - - advanced_use/distributed_training_tutorials - advanced_use/enable_mixed_precision - advanced_use/enable_graph_kernel_fusion - advanced_use/apply_gradient_accumulation - advanced_use/enable_cache - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 压缩模型 - :hidden: - - advanced_use/apply_quantization_aware_training - advanced_use/apply_post_training_quantization - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 模型安全和隐私 - :hidden: - - advanced_use/improve_model_security_nad - advanced_use/protect_user_privacy_with_differential_privacy - advanced_use/protect_user_privacy_with_suppress_privacy - advanced_use/test_model_security_fuzzing - advanced_use/test_model_security_membership_inference - advanced_use/model_encrypt_protection - -.. toctree:: - :glob: - :maxdepth: 1 - :caption: 应用实践 - :hidden: - - advanced_use/cv - advanced_use/nlp - advanced_use/hpc - advanced_use/use_on_the_cloud - -.. raw:: html - -
    -
    -
    -
    - - -
    - 筛选条件 - -
    - -
    -
    -
    -
    -
    操作系统
    -
    -
    - - -
    -
    - -
    -
    -
    -
    -
    硬件
    -
    -
    - - - -
    -
    -
    - -
    -
    -
    -
    用户
    -
    -
    - - - - -
    -
    -
    - -
    -
    -
    -
    阶段
    -
    -
    - - - - - - - - - - -
    - -
    -
    -
    -
    -
    -
    体验
    -
    -
    - - -
    - -
    - -
    - -
    -
    - - - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - -
    - -
    -
    -
    - diff --git a/tutorials/training/source_zh_cn/quick_start/images/LeNet_5.jpg b/tutorials/training/source_zh_cn/quick_start/images/LeNet_5.jpg deleted file mode 100644 index 7894b0e181d965c5e9cbba91fe240c1890d37bda..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/quick_start/images/LeNet_5.jpg and /dev/null differ diff --git a/tutorials/training/source_zh_cn/quick_start/linear_regression.ipynb b/tutorials/training/source_zh_cn/quick_start/linear_regression.ipynb deleted file mode 100644 index 291939731548dee8365d1f49f436b17a9a046733..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/linear_regression.ipynb +++ /dev/null @@ -1,651 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 实现简单线性函数拟合\n", - "\n", - "作者:[杨奕](https://github.com/helloyesterday)    编辑:[吕明赋](https://gitee.com/lvmingfu)\n", - "\n", - "`Linux` `Windows` `Ascend` `GPU` `CPU` `全流程` `初级` `中级` `高级`\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/quick_start/linear_regression.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/mindspore_linear_regression.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL21pbmRzcG9yZV9saW5lYXJfcmVncmVzc2lvbi5pcHluYg==&imagename=MindSpore1.1.1) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_online_experience.png)](https://ascend.huawei.com/zh/#/college/onlineExperiment/codeLabMindSpore/linearRegression)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "回归问题算法通常是利用一系列属性来预测一个值,预测的值是连续的。例如给出一套房子的一些特征数据,如面积、卧室数等等来预测房价,利用最近一周的气温变化和卫星云图来预测未来的气温情况等。如果一套房子实际价格为500万元,通过回归分析的预测值为499万元,则认为这是一个比较好的回归分析。在机器学习问题中,常见的回归分析有线性回归、多项式回归、逻辑回归等。本例子介绍线性回归算法,并通过MindSpore进行线性回归AI训练体验。\n", - "\n", - "整体流程如下:\n", - "\n", - "1. 生成数据集\n", - "2. 定义训练网络\n", - "3. 定义前向传播网络与反向传播网络并关联\n", - "4. 拟合过程可视化准备\n", - "5. 执行训练" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> 本文档适用于CPU、GPU和Ascend环境。本例的源代码地址:。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 环境准备\n", - "\n", - "设置MindSpore运行配置" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:52.617310Z", - "start_time": "2021-01-04T07:04:51.919345Z" - } - }, - "outputs": [], - "source": [ - "from mindspore import context\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"CPU\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`GRAPH_MODE`:图模式。\n", - "\n", - "`device_target`:设置MindSpore的训练硬件为CPU。\n", - "\n", - "> 本教程代码依赖`matplotlib`第三方支持包,可使用命令`pip install matplotlib`安装。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 生成数据集\n", - "\n", - "### 定义数据集生成函数\n", - "\n", - "`get_data`用于生成训练数据集和测试数据集。由于拟合的是线性数据,假定要拟合的目标函数为:$f(x)=2x+3$,那么我们需要的训练数据集应随机分布于函数周边,这里采用了$f(x)=2x+3+noise$的方式生成,其中`noise`为遵循标准正态分布规律的随机数值。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:52.623357Z", - "start_time": "2021-01-04T07:04:52.618320Z" - } - }, - "outputs": [], - "source": [ - "import numpy as np\n", - "\n", - "def get_data(num, w=2.0, b=3.0):\n", - " for _ in range(num):\n", - " x = np.random.uniform(-10.0, 10.0)\n", - " noise = np.random.normal(0, 1)\n", - " y = x * w + b + noise\n", - " yield np.array([x]).astype(np.float32), np.array([y]).astype(np.float32)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "使用`get_data`生成50组测试数据,并可视化。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:52.988318Z", - "start_time": "2021-01-04T07:04:52.624363Z" - } - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAEICAYAAAC6fYRZAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3de3yU5Zn/8c/FwRNYxSVBFBE8tFu1Fdsg1EOrQpWqLZWtVi3Iri7xUFrRdn/i4VdPuxC79dDtzxXSFnRbqIcqeAIFFKwBFYO1cgj+RAUMYBJqBTyFhrn2j+eZMElmkgwzTyYz+b5fr7wyz2HmvnkyXLlzP9dct7k7IiJSmLrlugMiIhIdBXkRkQKmIC8iUsAU5EVECpiCvIhIAVOQFxEpYAryIs2Y2RIz+9d2nnuamVVH3SeRPaUgL3nLzNab2adm9lHC1//Ldb9SMbN/NrOKXPdDupYeue6ASIa+7e6Lct0Jkc5KI3kpOGa2t5l9aGbHJewrCkf9xWbWx8yeMrM6M/tb+HhAO197XzO7P3zeGmBos+OTzextM9thZmvM7Lxw/xeBacDXwr84Pgz3n2Nmfzaz7Wb2npndkq3rIAIK8lKA3L0eeAy4KGH3BcAL7l5L8L6fCRwODAQ+Bdo7zXMzcGT4dRYwvtnxt4FTgQOAW4Hfm1l/d68CrgBecvfe7n5geP7HwCXAgcA5wJVm9t00/rkirVKQl3w3Nxy1x78mhPtn0zTIXxzuw93/6u6Puvsn7r4D+A/gG+1s7wLgP9z9A3d/D/ivxIPu/oi7b3b3mLs/BLwFnJjqxdx9ibuvDM9/A/hDGn0RaZPm5CXffTfFnPzzwL5mNgx4HxgCzAEws/2Au4FRQJ/w/P3NrLu772qjvUOA9xK2NyQeNLNLgGuBQeGu3kDfVC8W9q8MOA7YC9gbeKSNPoi0m0byUpDcPQY8TDCavxh4Khy1A/wE+AIwzN0/B3w93G/teOktwGEJ2wPjD8zscODXwETgH8IpmVUJr5us5Ots4AngMHc/gGDevj39EGkXBXkpZLOB7wM/CB/H7U8wD/+hmR1EMM/eXg8D14c3bwcAP0o41osgkNcBmNm/EIzQ42qAAWa2V7O+fODun5nZiQS/kESyRkFe8t2TzfLk58QPuPsrBDc2DwHmJzznHmBfYCvwMvBMGu3dSjBF8y6wAPhdQntrgDuBlwgC+peApQnPfR5YDbxvZlvDfVcBt5nZDuBnBL9ERLLGtGiIiEjh0kheRKSAKciLiBQwBXkRkQKmIC8iUsA61Yeh+vbt64MGDcp1N0RE8sqKFSu2untRsmMZB3kzOwz4H+BgIAaUu/svw0JLEwhzhoEb3H1ea681aNAgKisrM+2SiEiXYmYbUh3Lxki+AfiJu79mZvsDK8xsYXjsbnf/RRbaEBGRPZBxkHf3LQQf9cbdd5hZFXBopq8rIiKZy+qNVzMbBJwAvBLummhmb5jZDDPrk/KJIiISiawFeTPrDTwKTHL37cB9BDW3hxCM9O9M8bxSM6s0s8q6urpkp4iIyB7KSpA3s54EAX6Wuz8G4O417r4rrAb4a1LU1Hb3cncvcfeSoqKkN4dFRGQPZRzkzcyA3wJV7n5Xwv7+CaedR1ByVUREOlA2smtOBsYBK83s9XDfDcBFZjaEoPTqeuDyLLQlIpLfYjGoq4PiYrDolw7IRnZNBckXOWg1J15EpMuIxaCmBtzhwgvhpZfgpJNg8WLoFm3hgU71iVcRkYITi8Hpp8OLLwZBPm7ZMqithYMPjrR51a4REYlSXV0Q0Juv3dHQABdcEPwSiJCCvIhIlIqLg6mZ+Pz7sGG7p2heein4JRAhBXkRkSiZBXPvmzbBli3BqP6UU6BHDzjpJBr6HsQNz93AypqVkTSvOXkRkah16wb9E7LKFy+Gujoe2foCF/x7sK77xzs/5pff+mXWm1aQFxHpYNt27uDAabtvuI48YiT3jLonkrY0XSMi0oGmvDiFA+84sHF7zVVrWDhuIRZRzrxG8iIiHWDDhxsY9MtBjdvXDr+WO89KWtIrqzSSFxGJkLvzg8d+0CTA1744nDu/+Z8d0r5G8iIiEXml+hWG/3Z443b5092Y8GoMelQGqZP9+kXeB43kRUTSlVimIImGWAPH/vexjQG+uFcxn97wCRP23Z06SXFxh3RVQV5EJB3xMgUDBsBpp7X4xOojqx+h5+09WVO3BoCF4xZS89Ma9um5b5A6WV0NS5Z0SHEy0HSNiEh64mUKGhqC7+G0y/b67RxQdkDjaSOPGMmCsQuaZs1069YhUzSJNJIXka6tjamXFuJlChKmXaa8OKVJgE+ZFpluW1mgkbyIdF3xqZdly9pf+jdepqCujg17fcqg23af32pa5J60lQUK8iLSdaWYemmLmzF26bXMXjm7cV/tT2sp6tXKEqZ72FamsrH832FmttjMqsxstZldHe4/yMwWmtlb4fc+mXdXRCSLkky9tGX5puV0u61bY4AvP7ccv9lbD/B72FY2mGc4NxSu5drf3V8zs/2BFcB3gX8GPnD3MjObDPRx9+tae62SkhKvrKzMqD8iImlp53J8DbEGhkwbwuq61UCQFrlh0gb26bFP1ttKl5mtcPeSZMcyHsm7+xZ3fy18vAOoAg4FRgMPhKc9QBD4RUQ6l3jGSytBN54WGQ/wjWmR6QT4draVbVmdkzezQcAJwCtAP3ffAsEvAjNL+reJmZUCpQADBw7MZndERDLSrrTITi5rt3bNrDfwKDDJ3be393nuXu7uJe5eUlTUxpyWiEhHiMWYMv+GJgF+9VWrI60WGZWsjOTNrCdBgJ/l7o+Fu2vMrH84iu8P1GajLRGRKG344F0G/eqIxu1rh1/DnWfdlcMeZSYb2TUG/BaocvfEK/EEMD58PB54PNO2RESi0lgtMiHA1/4c7vzyv+WwV5nLxkj+ZGAcsNLMXg/33QCUAQ+b2WXARuD8LLQlIpJ1yzctZ9hvhjVulz8JE1YQ3CDNs+mZ5jIO8u5eAaS6CiMyfX0Rkag0xBo4YfoJrKpdBYRpkVevZ5/nR0GP8JOpHVxrJtv0iVcR6ZIeWf0IF/zxgsbtheMWMvKIkcFGWLYg2/nsuaAgLyKFK8mHjzprtcioqAqliBSmJHXfm1eLzNe0yHRoJC8ihSmhINiG1UsZdHv3xkPXDL+Gu/I4LTIdCvIiUpj69sWHljD20FeYfdyuxt01P62huFfHFAfrDBTkRaTwxGIsH13CsLNeb9xVfm45E746IYedyg0FeRHJX0lurDbEGjjh3i+xauhaAIo/gg2T1rPPoYfnsqc5oxuvIpKfktxY/eOaP9Lz9p6s+iAI8AtmdaNm+dfZ55CuW/xQI3kRyU8JN1a3Vy7lgIQbqyOPGMmCi5/BrthaELnumVCQF5H8FK60NMUquPH03TdWV1+1mmOKjgk2CiTXPRMK8iKSlzZs28igM/7UuN2V0iLToSAvInnF3Rk7Z2yTRbS7WlpkOhTkRSRvNK8WOf3c6ZR+tTSHPer8FORFpNNLWi0y3UW0uyilUIpIp9aYFhkG+AVjF+zZItpdVLaW/5sBnAvUuvtx4b5bgAlAXXjaDe4+LxvtiUjha7VaZJIPQUly2RrJ3w+MSrL/bncfEn4pwItIu0x9cWrqapFJPgQlqWVlJO/ufzKzQdl4LRHpujZ8uIFBvxzUuJ00LTLhQ1AsWxZsKx8+pajn5Cea2RtmNsPM+iQ7wcxKzazSzCrr6uqSnSIiXcDYx8Y2CfA1P61JnvcefgiKHj2C78VKnWxNlEH+PuBIYAiwBbgz2UnuXu7uJe5eUlRUFGF3RKQzWr5pOXarMWvlLCBIi/SbvWneeywGNTXgHszBL14M1dWwZInm5NsQWQqlu9fEH5vZr4GnompLRPJPu9Mi43Pwy8KFtRcvLqjl+aIW2UjezPonbJ4HrIqqLRHp5BJH4qSZFplsDl7aLVsplH8ATgP6mlk1cDNwmpkNARxYD1yejbZEJM8kjMS3n3oiB3xjWeOhpItoNxefg4+P5DUHnxbz8DdrZ1BSUuKVlZW57oaIZFNNDQwYwNThDdwwcvfuJtUi28p7V158q8xshbuXJDumT7yKSKQ27PUpdtPuAH/NsEn4zd40wLeV9x6fg1eAT5tq14hIZMY+NrYxawag5ifvU9y72Q1T5b1HSiN5Ecm6FmmR50wL0iKbB3hQ3nvENJIXkaxpiDXwlelfYWXtSgCKdvZk450x9nl+NiyeEEy7NBfPe9eceyQ0kheRrIinRcYD/IJzHqL2584+9bvaTn3UnHtkNJIXkYykrBYJcNK9Sn3MMQV5EdljU1+cyg3P39C43SQtEjQN0wkoyItI2ppXi5w0bBJ3j7q75YkqP5BzCvIikpYWaZFaRLtTU5AXkXbRItr5SUFeRFrVIi1yvyI2XrNRa6zmCaVQikhKLdIixy6g9t9qFeDziEbyItJCq4toS17RSF5EmiirKEu9iLbkHY3kRQRiMTa+82cOn7W7Wm3KtEjJK9laNGQGcC5Q6+7HhfsOAh4CBhEsGnKBu/8tG+2JSBbFYoz9YX9mHVzbuEtpkYUjW9M19wOjmu2bDDzn7kcDz4XbItKJLN+0HLu9e2OAn/50N/yK9xXgC0hWgry7/wn4oNnu0cAD4eMHgO9moy0RaUOz9VSTaYg1cPy04xvz3ot29uTTqd0p3fcU1ZgpMFHeeO3n7lsAwu9J3zlmVmpmlWZWWacFekUy09AAp5zS6ipL8bTIN2reAGDBrG7Uvjicfd59D5YsUY2ZApPzG6/uXg6UQ7DGa467I5K/YjE49VR4+eVgu9kqS83TIkcceioLrlxGt4Zd8O5LQZ0ZBfiCE+VIvsbM+gOE32vbOF9EMlFXB6++unt76NDGqZdkaZGLLnuBbiedrBWZClyUI/kngPFAWfj98QjbEpHiYjj5ZFi6NAjwFRVs3P4eh99zeOMpLdIiVQq44GUrhfIPwGlAXzOrBm4mCO4Pm9llwEbg/Gy0JSIpNFtGb9zcS/j9G79vPJw0LVKlgAteVoK8u1+U4tCIbLy+iCQRi7UchXfrxvKGDQy77eDG01QtsmvL+Y1XEdkDsRicfvrupfUWL6aBGF8t/2pj1oyqRQqodo1IfqqrCwJ8QwMsW8ajr8xsmhbZVrXIduTSS2FQkBfJR8XFcNJJbN+vO3ZTA99b8K8AjBg8gl0/28U3j/xm6ufG/wpoJZdeCoeCvEg+MqPs30dxwP/Z1bhr9VWrWXTJIrpZG/+tm/0VgD6EWNA0Jy+SZzZu29h6WmRbwr8CGufzlR9f0BTkRfLIuMfG8vtMF9Fulmqp/PjCpiAvkgde3fQqJ/7mxMbtaWuP4vJZbwZ57slSKdui/PguQ3PyIp1YvFpkPMD3/Rg++Xe4/I/rg8Ce7k1UZdV0OQryIp3Uo2sebZoW+YNnqXvl6+xLQq2ZdG6iKqumS9J0jUgns6N+B58r+1zj9ojBI1gwbkGQNbN4ZNOpmXRuoib7haApm4KnkbxIJ1JWUdYkwK+6clXTtMj4XHp87j1+E7W6uu1a8PFfCKo62aVoJC/SCWSUFtnem6jKqumSFORFcmzcnHFtV4vMFmXVdDkK8iI50iIt8pxpXF5yeQ57JIVIQV4kKiny1xtiDU2qRfbdry8bJ21k35775qqnUsAiv/FqZuvNbKWZvW5mlVG3J9IppEhXbJkW+Qx1l6xiX5UDloh01Ej+dHff2kFtieRes3TFHZve5XMzjmo8PGLwCBb84Bm6nTGiSU14uinhTbJL7yiRKCSkK5ZdNKBJgG9Mi9z6V1WDlMh1RJB3YIGZrTCzFmuQmVmpmVWaWWWd3uRSKMzYOPcB7KYGrj9yPQBXD7sav9k5tvjY4BzlrUsHMI+4hoWZHeLum82sGFgI/Mjd/5Ts3JKSEq+s1LS95L9L5lzC7974XeN2yrTIPSkuJtKMma1w95JkxyKfk3f3zeH3WjObA5wIJA3yIvku7bRI5a1LxCIN8mbWC+jm7jvCx2cCt0XZpkguKC1SOquoR/L9gDkW/BnaA5jt7s9E3KZIh3p0zaN875HvNW4/O/ZZzjzyzBz2SGS3SIO8u78DHB9lGyK50mq1SJFOQu9GkT1wR8UdrVeLFOkkVNZAJA3Nq0VePexq7hl1Tw57JNI6BXmRdrrs8cuY8fqMxu1Iq0WKZIn+thRpw6btmzjvofMaA/y0c6bhN7sCvOQFjeRFUoh5jOmV05n83GR27tpJ2Ygyfjzsx0qLlLyiIC+SxJq6NZQ+WcrS95YyYvAIpp87nSMPOjLX3RJJm4K8SIL6hnqmVkxlyotT2H/v/bn/OzO4pP+3sD76VKrkJ83Ji4QqNlZwwvQTuPWFWzn/2POpunI14yfdjx12WJOa8CL5REFeurxtn23jyqeu5NSZp/LJ3z9h3sXzmDVmFsWfmEoBS97TdI10aXOq5jBx/kTe/+h9rh1+Lbeefiu99+odHIyXAo4v6qFSwJKHFOSlS9q0fRMT509k7tq5DOk3hMe/OYOS485sWu7XLFitSaWAJY9puka6lJjHuO/V+zjmv4/hmXXPcMeIMpY/2JuSr5ybfN49XgpYAV7ylEbyUpiSLMaRNC3y771h6U1N591V310KiEbyUnhiMTj9dBgwAE47jfqdn3LLklsYMm0IVVuruH/0/SwctzDIe9cSfFLgNJKXwlNX15gVU/HeUkqnHU/V397i4i9dzN1n3d20HIHm3aXART6SN7NRZvamma0zs8lRtydCcTHbTj2RK79tnDp+F5/Edu5Oi0xWb0bz7lLAol7+rztwL/BNoBp41cyecPc1UbYrXductXOZeO563v/IuObEq7ntjNt3p0WKdDFRT9ecCKwLV4jCzB4ERgMK8pJ1m7Zv4kfzf8SctXM4vt/xzP3+XIYeOjTX3RLJqaiD/KHAewnb1cCwiNuULqZ5tcg7Rt7BNcOvoWf3nrnumkjORR3kk01yepMTzEqBUoCBAwdG3B0pNKoWKdK6qG+8VgOHJWwPADYnnuDu5e5e4u4lRUVFEXdHCkV9Q32TtMiZo2fuTosUkUZRj+RfBY42s8HAJuBC4OKI25QCV7GxgtInS6naWpU8LVJEGkUa5N29wcwmAs8C3YEZ7r46yjalcG37bBuTF01m2oppHH7A4cy7eB7fOvpbue6WSKcW+Yeh3H0eMC/qdqSwJVaLvGbYJKVFirSTPvEqnVqTtMgdvZj7sDH0udfgzP1y3TWRvKDaNdIpJVaLnL9uPmXDbuTVX33G0Pd2aQEPkTRoJC+dTtK0yD5HwPAXtYCHSJoU5KXTaL6I9szRMxl//HjMHWpr4fnnYetWFRITSYOCvHQKiWmRFx13EfeMuidIi4yXDY6P4BcvVoAXSYPm5CWnki2iPfufZu/Oe08oG6y5eJH0aSQvOdMiLfK4H9H70MFNT9Ji2iIZ0UhesiMWg5oacG/z1M07NjPmoTGMeXgMRfsV8fKly7hr6mv0HvyFluusxhf1qK6GJUs0VSOSJgV5yVyz5fZaLIYdP81jTKucxhfv/WKQFjmijFcnvMrQnoNan5LRoh4ie0zTNZK5ZPPmzRbDbrVapKZkRCKjIC+ZayVIp0yLTByVa51VkcgoyEvmUgTppRuXMuHJCS3TIpOJT8lAMN2jgC+SFZqTl+xImDePp0WeMvOU5GmRrWnn/L6ItI9G8pJVTdIih1/Dbaffll61yHbM74tI+2kkL1nRIi3yspe566y70i8HHJ/f79FDN2FFskAjeclIzGOUryjnukXXsXPXTspGlHHt167d80W0dRNWJKsiC/JmdgswAYgnPd8QLiAiBaKqrorSp0qp2FjBiMEjmHbuNI466KjMXzjxJqyIZCTqkfzd7v6LiNuQDtautEgR6RQ0XSNpSSstUkRyLuobrxPN7A0zm2FmfZKdYGalZlZpZpV1qjDYaSWmRX5c/Q7z/tCN2f+1ieJ9++a6ayLSCvN2FJRK+WSzRcDBSQ7dCLwMbAUcuB3o7+6XtvZ6JSUlXllZucf9kWgkpkVe/aUJ3PaD39D7011BBkx1tebPRXLMzFa4e0myYxlN17j7yHZ24NfAU5m0JR1v847NTJw3MVhEu9/xzP3+XIYeUgJDq1RnRiRPRJld09/dt4Sb5wGrompLsqvNtEilOIrkjShvvP7czIYQTNesBy6PsC3Jkqqa1ZTOvYyK91/hjMFnMP3c6S3TIpXiKJI3Igvy7j4uqteW7KtvqGfqi1OYsuR29v/Mmbn+84y/aQHWvXuuuyYiGVAKpTRNi1xj3DMfiuvfga1bNWIXyXOqXdOFNUmL/PvHzLvoaWbXnkpxverGiBQKjeS7qMS0yEnDJnH7GbcHxcQWj9JNVZECoiDfxWzeVs3Exy9nzrvzdqdFHjp09wm6qSpSUBTku4iYxyivnMZ1T/yYnb6Lsg2DufaGV+jZc+9cd01EIqQg3wUkVos84z1j+pNw1Pb34BcfatQuUuB047WA1TfUc+uSWxkyfQira1cz8zszWPTuKRy1vY0bq7EY1NRABiUvRKRz0Eg+36VY9DpltcjF41u/sRpfYzVetmDx4mCeXkTykv735rMki163SItsvoh2woLbSSVbY1VE8pZG8vmsWUCes/wBJi67qWVaZDria6yqAJlIQVCQz2dhQN78xlImXnwAc569lC/3+3LLtMh0aI1VkYKiIJ/HYjjlv/g+1y36MztjH1P2jQwX0Y5TrrxIwVCQz1NN0iJTVYsUkS5PQT7P1DfUU1ZRxpSKKfTq2UuLaItIqxTk84gW0RaRdGWUQmlm55vZajOLmVlJs2PXm9k6M3vTzM7KrJtdW5tpkSIiKWQ6kl8FjAGmJ+40s2OAC4FjgUOARWb2eXfflWF7XU7KapEiIu2Q6ULeVUCy+eDRwIPuXg+8a2brgBOBlzJprytJXEQ747RIEemyopqTPxR4OWG7OtzXgpmVAqUAAwcOjKg7eSIWI1ZbQ/nGOVz33GR2xv7echFtEZE0tBnkzWwRcHCSQze6++OpnpZkX9JqV+5eDpQDlJSUdN2KWLEYVecMo/SQSioGwhnvwvT3h3LU9f+m2jEissfaDPLuPnIPXrcaOCxhewCweQ9ep0uob6in7NmbmFJSSa+/w8y5MP51sB5/Dj55qg8micgeimq65glgtpndRXDj9WhgeURt5bUmaZFbi7hn9gcUW2/o/pFqx4hIxjIK8mZ2HvAroAh42sxed/ez3H21mT0MrAEagB8qs6apbZ9t4/rnrue+yvsY+NnePD2nG2cX/yOsfSgI7Fu3qnaMiGTMvBMtDFFSUuKVlZW57kbkEtMif3zcv3L72N/S+9Nd0KMHVFdrekZE0mJmK9y9JNkxfeK1A7VIi7zgMYb2OBxKquCllzQ9IyJZpyDfAWIeo3xFOdctuo6du3YydcRUfjLsGnqOPHN33faNG+HggzU9IyJZpSAfsZTVImtqmq7A1K2bAryIZJ0SsCPSYhHt0TNZNG7R7nLA8RWYerSxqLaISAY0ko9Au6pFagUmEekACvJZ1CQt8oCBPH3x05x99Nmpn6AVmEQkYgry6YrFko6+VS1SRDojzcmnIxaD00+HAQPgtNMgFmPzjs2MeWgMYx4eQ9/9+vLyZS9z96i7FeBFpFPQSD4ddXWNGTGxZUspX/ILrlv+H7vTIr/2E1WLFJFORUE+HWFGTNWbSym9qBcVL16nRbRFpFNTkE9D/a6dlN1yOlMqXqbXXt2ZeZYW0RaRzk1Bvp20iLaI5CMF+TaknRYpItKJKMi3Yu7aufxw3g+VFikieUtBPi4h/33zR1u0iLaIFISM8uTN7HwzW21mMTMrSdg/yMw+NbPXw69pmXc1QmH+e2zAoUy7+PN88d4vMn/dfKaOmErlhEoFeBHJW5mO5FcBY4DpSY697e5DMnz9jlFXx9o3lzJh3C4qDl/HGX1PYfqYmUqLFJG8l1GQd/cqIH9SCJOUJKhvqKes6j6mXB6jVz3MXPN5xv/fF7Bu+jCwiOS/KCPZYDP7s5m9YGanpjrJzErNrNLMKuvq6qLrTZKSBEs3LuWE6Sdwywu38k9f+j5rr1jJPz+4VgFeRApGmyN5M1sEHJzk0I3u/niKp20BBrr7X83sq8BcMzvW3bc3P9Hdy4FyCNZ4bX/X05RQkmDbiqVc/+il3LfmAaVFikhBazPIu/vIdF/U3euB+vDxCjN7G/g8kLtVusOSBHO3VvDD0d15v+p3SosUkYIXSQqlmRUBH7j7LjM7AjgaeCeKttpr80db+NFV/8Bja2N8ud8/Mvfbv1HWjIgUvIyCvJmdB/wKKAKeNrPX3f0s4OvAbWbWAOwCrnD3DzLubbpiMWK1NZS/N5frnpusapEi0uVkml0zB5iTZP+jwKOZvHbGYjHWnjOMCYdUUjEQzhh0BtO/rWqRItK1FGQayc5dO7lt/mSOL6lkdRHMeLIbi86apQAvIl1OwZU1WLpxKaVPlbKmbg0XbS3i7j98QL/jT9ZaqiLSJRXMSH7bZ9u46umrOGXmKXy08yOevvhpZt/7Pv3e3ARLljRZj1VEpKsoiJF85eZKRj84Onm1SI3gRaQLK4ggf0SfIzi26FhVixQRaaYggvxB+x7EgnELct0NEZFOp2Dm5EVEpCUFeRGRAqYgLyJSwBTkRUQKmIK8iEgBU5AXESlgCvIiIgVMQV5EpICZe3Qr7qXLzOqADRm8RF9ga5a6k03qV3rUr/SoX+kpxH4d7u5FyQ50qiCfKTOrdPeSXPejOfUrPepXetSv9HS1fmm6RkSkgCnIi4gUsEIL8uW57kAK6ld61K/0qF/p6VL9Kqg5eRERaarQRvIiIpJAQV5EpIDlVZA3s/PNbLWZxcyspNmx681snZm9aWZnpXj+YDN7xczeMrOHzGyviPr5kJm9Hn6tN7PXU5y33sxWhudVRtGXZu3dYmabEvp2dorzRoXXcZ2ZTe6Afv2nma01szfMbI6ZHZjivMivV1v/djPbO/z5rgvfS4Oi6EeSdg8zs8VmVhX+H7g6yTmnmdm2hJ/vzzqob63+XCzwX+E1e8PMvtIBffpCwnV43cy2m9mkZud0yPUysxlmVmtmqxL2HWRmC8NYtNsmmtMAAATRSURBVNDM+qR47vjwnLfMbPwedcDd8+YL+CLwBWAJUJKw/xjgL8DewGDgbaB7kuc/DFwYPp4GXNkBfb4T+FmKY+uBvh14/W4BftrGOd3D63cEsFd4XY+JuF9nAj3Cx3cAd+TierXn3w5cBUwLH18IPNRBP7v+wFfCx/sD/z9J304Dnuqo91N7fy7A2cB8wIDhwCsd3L/uwPsEHxjq8OsFfB34CrAqYd/Pgcnh48nJ3vPAQcA74fc+4eM+6bafVyN5d69y9zeTHBoNPOju9e7+LrAOODHxBDMz4Azgj+GuB4DvRtnfsM0LgD9E2U6WnQisc/d33H0n8CDB9Y2Muy9w94Zw82VgQJTttaI9//bRBO8dCN5LI8Kfc6TcfYu7vxY+3gFUAYdG3W6WjAb+xwMvAweaWf8ObH8E8La7Z/Jp+j3m7n8CPmi2O/F9lCoWnQUsdPcP3P1vwEJgVLrt51WQb8WhwHsJ29W0/A/wD8CHCcEk2TnZdipQ4+5vpTjuwAIzW2FmpRH3JW5i+CfzjBR/IrbnWkbpUoJRXzJRX6/2/NsbzwnfS9sI3lsdJpwiOgF4Jcnhr5nZX8xsvpkd20Fdauvnkuv31IWkHmjl4noB9HP3LRD8AgeKk5yTlevW6RbyNrNFwMFJDt3o7o+nelqSfc1zQ9tzTru1s58X0foo/mR332xmxcBCM1sb/tbfY631C7gPuJ3g3307wVTSpc1fIslzM86zbc/1MrMbgQZgVoqXyfr1at7NJPsifR+ly8x6A48Ck9x9e7PDrxFMSXwU3m+ZCxzdAd1q6+eSs2sW3nf7DnB9ksO5ul7tlZXr1umCvLuP3IOnVQOHJWwPADY3O2crwZ+JPcIRWLJz2q2tfppZD2AM8NVWXmNz+L3WzOYQTBdkFLTae/3M7NfAU0kOtedaZr1f4U2lc4ERHk5IJnmNrF+vZtrzb4+fUx3+jA+g5Z/ikTCzngQBfpa7P9b8eGLQd/d5ZvbfZtbX3SMtxtWOn0sk76l2+hbwmrvXND+Qq+sVqjGz/u6+JZy6qk1yTjXBfYO4AQT3I9NSKNM1TwAXhpkPgwl+Gy9PPCEMHIuB74W7xgOp/jLIhpHAWnevTnbQzHqZ2f7xxwQ3H1clOzdbms2DnpeivVeBoy3IRNqL4E/dJyLu1yjgOuA77v5JinM64nq159/+BMF7B4L30vOpfillUzjv/1ugyt3vSnHOwfH7A2Z2IsH/779G3K/2/FyeAC4Js2yGA9viUxUdIOVf07m4XgkS30epYtGzwJlm1iecWj0z3JeeqO8sZ/OLIDBVA/VADfBswrEbCTIj3gS+lbB/HnBI+PgIguC/DngE2DvCvt4PXNFs3yHAvIS+/CX8Wk0wbRH19fsdsBJ4I3yT9W/er3D7bILsjbc7qF/rCOYeXw+/pjXvV0ddr2T/duA2gl9AAPuE75114XvpiKivT9juKQR/qr+RcJ3OBq6Iv8+AieG1+QvBDeyTOqBfSX8uzfplwL3hNV1JQmZcxH3bjyBoH5Cwr8OvF8EvmS3A38P4dRnBfZzngLfC7weF55YAv0l47qXhe20d8C970r7KGoiIFLBCma4REZEkFORFRAqYgryISAFTkBcRKWAK8iIiBUxBXkSkgCnIi4gUsP8FUy7XCPXim48AAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "eval_data = list(get_data(50))\n", - "x_target_label = np.array([-10, 10, 0.1])\n", - "y_target_label = x_target_label * 2 + 3\n", - "x_eval_label,y_eval_label = zip(*eval_data)\n", - "\n", - "plt.scatter(x_eval_label, y_eval_label, color=\"red\", s=5)\n", - "plt.plot(x_target_label, y_target_label, color=\"green\")\n", - "plt.title(\"Eval data\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "上图中绿色线条部分为目标函数,红点部分为验证数据`eval_data`。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义数据增强函数\n", - "\n", - "使用MindSpore的数据增强函数,将数据进行增强操作,操作解释如下:\n", - "\n", - "- `ds.GeneratorDataset`:将生成的数据转换为MindSpore的数据集,并且将生成的数据的x,y值存入到`data`和`label`的数组中。\n", - "- `batch`:将`batch_size`个数据组合成一个batch。\n", - "- `repeat`:将数据集数量倍增。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:52.993381Z", - "start_time": "2021-01-04T07:04:52.990360Z" - } - }, - "outputs": [], - "source": [ - "from mindspore import dataset as ds\n", - "\n", - "def create_dataset(num_data, batch_size=16, repeat_size=1):\n", - " input_data = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data','label'])\n", - " input_data = input_data.batch(batch_size)\n", - " input_data = input_data.repeat(repeat_size)\n", - " return input_data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "使用数据集增强函数生成训练数据,并查看训练数据的格式。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.079377Z", - "start_time": "2021-01-04T07:04:52.994402Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The dataset size of ds_train: 100\n", - "dict_keys(['data', 'label'])\n", - "The x label value shape: (16, 1)\n", - "The y label value shape: (16, 1)\n" - ] - } - ], - "source": [ - "data_number = 1600\n", - "batch_number = 16\n", - "repeat_number = 1\n", - "\n", - "ds_train = create_dataset(data_number, batch_size=batch_number, repeat_size=repeat_number) \n", - "print(\"The dataset size of ds_train:\", ds_train.get_dataset_size())\n", - "dict_datasets = next(ds_train.create_dict_iterator())\n", - "\n", - "print(dict_datasets.keys())\n", - "print(\"The x label value shape:\", dict_datasets[\"data\"].shape)\n", - "print(\"The y label value shape:\", dict_datasets[\"label\"].shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "通过定义的`create_dataset`将生成的1600个数据增强为了100组shape为16x1的数据集。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义训练网络\n", - "\n", - "在MindSpore中使用`nn.Dense`生成单个数据输入,单个数据输出的线性函数模型:\n", - "\n", - "$$f(x)=wx+b\\tag{1}$$\n", - "\n", - "并使用Normal算子随机初始化权重$w$和$b$。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.085026Z", - "start_time": "2021-01-04T07:04:53.080390Z" - } - }, - "outputs": [], - "source": [ - "from mindspore.common.initializer import Normal\n", - "from mindspore import nn\n", - "\n", - "class LinearNet(nn.Cell):\n", - " def __init__(self):\n", - " super(LinearNet, self).__init__()\n", - " self.fc = nn.Dense(1, 1, Normal(0.02), Normal(0.02))\n", - " \n", - " def construct(self, x):\n", - " x = self.fc(x)\n", - " return x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "调用网络查看初始化的模型参数。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.100773Z", - "start_time": "2021-01-04T07:04:53.086027Z" - }, - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Parameter (name=fc.weight) [[-0.02289871]]\n", - "Parameter (name=fc.bias) [0.01492652]\n" - ] - } - ], - "source": [ - "net = LinearNet()\n", - "model_params = net.trainable_params()\n", - "for param in model_params:\n", - " print(param, param.asnumpy())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "初始化网络模型后,接下来将初始化的网络函数和训练数据集进行可视化,了解拟合前的模型函数情况。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.242097Z", - "start_time": "2021-01-04T07:04:53.102786Z" - }, - "scrolled": true - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYQAAAD8CAYAAAB3u9PLAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3dd3iUVdrH8e+dhIAUBSFApFqAVUQpERBBQFGUteEuoq7KKor4iksRK6wRwYaKa0cQxYoooriISKRYAJVQVBClL0QgIFWlJnPeP2YyCWFCEqYm+X2uK1fmqefmyTD3POec5xxzziEiIhIX7QBERCQ2KCGIiAighCAiIj5KCCIiAighiIiIjxKCiIgAIUgIZlbPzGab2XIzW2Zm/X3rHzSzX81sie+nW/DhiohIuFiwzyGYWTKQ7JxbZGZVgIXAFcBVwB/OuSeDD1NERMItIdgTOOc2AZt8r383s+VAnWDPKyIikRX0HcIhJzNrCHwJnA4MAv4J7AbSgTudczsCHNMH6ANQqVKlVn/5y19CFo+ISFmwcOHC35xzScGeJ2QJwcwqA18ADzvnJptZLeA3wAHD8VYr3XSkc6SkpLj09PSQxCMiUlaY2ULnXEqw5wlJLyMzKwd8ALztnJsM4JzLdM5lO+c8wFigdSjKEhGR8AhFLyMDxgHLnXOj8qxPzrNbd2BpsGWJiEj4BN2oDJwDXA/8aGZLfOvuB64xs+Z4q4zWAbeGoCwRkbLH44HMTDCDWrW8v8MgFL2MvgYCRTct2HOLiJRpOYmgZ0/4+mtwDjp0gDlzIC70zxWH4g5BRERCzeOBzp1h3jzIyspdP28ebNkCtWuHvEgNXSEiEou2bs1NBnmriLKz4aqrvAkjxJQQRERiUc2a0K4dJCR4q4mWLMmtJpo/35swQkxVRiIiscgMZs/2fvDXrOld1769966hXTuoWZMde3fQ4uUWIStSCUFEJFbFxXl7FeXwJQiXlMQ1H1zDxGUTQ1tcSM8mIiLhExfH+E2fEjc83p8MUjumhuz0ukMQESkBftr6E01fbOpfblG7Bd/c/A2J8YkMY1hIylBCEBGJYX8e+JNTXziVDbs3+Net7b+WhlUbhrwsVRmJiMSoftP6UfnRyv5k8OF7cbhZ59Lw2PphKU93CCIiMWbKz1O4YuIV/uV+p9/Ec1e/4X0mIWGet+dR3sbmEFFCEBGJBI8ntwtpAWMRrdu5jhOfOdG/XO/Yeiy/fTmVylWEdqsO6XIaDkoIIiLhlncYinbtvN1H84xFdCD7AG1facvizYv965betpSmNXMbkQ95JiFMg9upDUFEJNzyDkMxb94hTxkPmzOM8iPK+5PBa5e/hkt1hyYDyH0mIUzJAHSHICJSPEWo+jlMzjAUeap8Zq+dzXlvnOffpWfTnkz42wQs0DmPpsyjoIQgIlJUhVT9FCjPMBSZFR21H8o9pnx8eTbduYlqx1QLbZlHIRQzptUzs9lmttzMlplZf9/6480szcxW+n4X8K8VESkhjlD1U5hsHBel9aL2qNzJJL/p/Q37hu4rOBkEWWZxhSLNZAF3OudOBdoCt5vZacC9wEznXCNgpm9ZRKTkyjsCaTF6+zz/3fMkDE/gs9WfATDqwlG4VEebum3CVubRCMWMaZuATb7Xv5vZcqAOcDnQybfb68Ac4J5gyxMRiZr8I5AWUp+/cONCUsam+Jc7N+zMjOtnkBBXjI/eYpYZjJC2IZhZQ6AF8C1Qy5cscM5tMrPwpTURkUjJPwJpALv27aLe0/X4/cDv/nUbB20kuUryEY4KrsxQCFnLhJlVBj4ABjjndhfjuD5mlm5m6VvDWDcmIhJuzjmum3wdVR+v6k8Gaden4VLd0SeDCApJQjCzcniTwdvOucm+1ZlmluzbngxsCXSsc26Mcy7FOZeSlJQUinBERCLL4+Htr14k7qE43v7xbQDub38/LtXR5aQuUQ6u6IKuMjJvp9lxwHLn3Kg8mz4GegGP+X5PCbYsEZFY8/OWnzj1pdyHyJrVbMaCWxZQPqF8FKM6OqFoQzgHuB740cyW+NbdjzcRvGdmvYH1QI8QlCUiEhP2HNzD6S+eztqda/3rVj0DJy/7DEpgMoDQ9DL6Giio2fv8YM8vIhJrBk4fyH++/Y9/edJ78Lef8PYACmMvoHDTWEYiIkU0dcVUbJj5k8GtrW7F8+9s/lbjXO9zAh06RKQ3ULho6AoRkUKs37WeBv9p4F+uVakWq/61isqJlb0rIvScQLgpIYiIQMAB5A5mH6T9a+357tfv/Lt93/d7zqh1xqHHRug5gXBTlZGISM4AcnXrQqdO4PHw8JcPkzgi0Z8Mxl46FpfqDk8GpYjuEEREMjP9A8h9tWEu5w6P92/q/pfuTLpqEnFW+r8/KyGISNnm8UDPnmxNzKLmUIBsAOIsjszBmdSoWCOq4UWSEoKIlGmeLZlcVv9rPsnTSX7uTXNpV69d9IKKktJ/DyQiAt47gcxMcM6/anT6aOJfPoFPGnnXPbbqRNwDnjKZDEB3CCJSFuSbdWzxO6No+UrusNQd6ndg1kUTSKh9QonuNhosJQQRKf18s47tjs+i4dlfsiNPMtgwcAN1j60bxeBihxKCiJR6LimJm3pXZ3xypn/dp//4lItOuSiKUcUetSGISKn27tJ3iRse708Gd509GJfqlAwC0B2CiJRKK7etpPHzjf3LTao3YUnfJVRIqBDFqGKbEoKIlCp7D+7lzNFnsnL7Sv+6Ff1W0Kh6oyhGVTKoykhESo270+6m4iMV/cng3b+9i0t1SgZFpDsEESnxpq+azsVvX+xfvrH5jYy7bBxWhruQHo2QJAQzexW4BNjinDvdt+5B4BZgq2+3+51z00JRnogIwK+7f6Xu07ldRo8/5njW9l/LseWP9a4IMIKpFCxUVUbjgUBN9k8755r7fpQMRCQksjxZtH+1/SHJYFGfRWy7e9uhySDfCKZyZCFJCM65L4HtoTiXiMiRjJw7knLDyzF3w1wAXvrrS7hUR4vkFofu6HsYjaws7++tWwOcTfIKdxtCPzO7AUgH7nTO7ci/g5n1AfoA1K9fP8zhiEhJNXf9XNq/1t6/fEnjS5hy9ZSCh6WuWRPatfMPV0HNmhGKtOQyl2egp6BOZNYQmJqnDaEW8BvggOFAsnPupiOdIyUlxaWnp4ckHhEpHbbt2UbSE0k4cj+rtgzeQlKlpMN3zt9mUEbaEMxsoXMupfA9jyxs3U6dc5nOuWznnAcYC7QOV1kiUvp4nIfuE7tT44ka/mTwxT+/wKW6gpNB/jaDnKktS3EyCKWwJQQzS86z2B1YGq6yRKSEyzc09diFY4l/KJ6Pfv4IgIfPexiX6ji3wbkFn0NtBkELVbfTCUAnoIaZZQCpQCcza463ymgdcGsoyhKRUibP0NQ/XHAGZ7ZZ5N/Upk4bvrrxK8pZvDdhHKnqR20GQQtJQnDOXRNg9bhQnFtESrmtW/k9fS6nDMhmS+XcZPC/Af+j/nH1D5vLgNmzvVVB+Zl5t5WBNoNw0dAVIhI1zjn6fDeUY+/OZktl77r/Xv0xLtV5kwEUrypIbQZB0dAVIhIVk36aRI/3e/iXBzTrw9PdRx/+Ya6qoIhRQhCRiFq9fTWnPHeKf/nkYxvy4+3LOCaxYuADVBUUMaoyEpGI2J+1n9NfPP2QZLD8mxRW3Z3BMRdcfOShJVQVFBFKCCISdkNmDqHCwxVYtnUZAG92fxPXdzN/+XyJuonGEFUZiUjYpK1O48K3LvQvX3fGdbxxxRveYamdU9tAjFFCEJGQ2/j7RuqMquNfrpJYhfUD11O1QtXcndQ2EHOUEEQkZLI8WVzw5gXMWTfHv27BLQtIOaGAYXZy2gYkJqgNQURCYtT8UZQbXs6fDJ696Flcqis4GUjM0R2CiATl24xvaTuurX+568ld+eTaT4iPi49iVHI0lBBE5Khs37ud5KeSOZB9wL9u852bqVVZVUAllaqMRKRYnHNc9f5VVB9Z3Z8MZt0wC5fqlAxKON0hiEiRjV8ynhun3OhffrDjg6R2So1iRBJKSggiUjDfjGPL2Mrpo5v5V7dKbsW83vNIjE+MYnASakoIIhKYx8Of559Lk5Zz+fXY3NVr+6+lYdWGUQtLwickbQhm9qqZbTGzpXnWHW9maWa20ve7WijKEpHI6Df5Zip3yk0GH3Z9DZfqlAxKsVA1Ko8HLsq37l5gpnOuETDTtywi0ZBvisoj+ejnj7BhxgvLXgOg3wLDzTqXK9r0CneUEmWhmjHtSzNrmG/15Xin1QR4HZgD3BOK8kSkGIo449jaHWs56dmT/Mv1dsHyRe2oNGES1K6toSXKgHC2IdRyzm0CcM5tMrOAI1eZWR+gD0D9+vXDGI5IGeTxwE8/HT7jWJ7hIg5kH6DtK21ZvHmxf93S0fE03ZwNCd95k4eSQZkQ9ecQnHNjnHMpzrmUpKSkaIcjUnrk3Bk0bw6VKkFCwmGjiqbOTqX8iPL+ZDD+8vG4Bzw0bXxOwP2ldAvnHUKmmSX77g6SgS1hLEtE8suZizg7G/74A5YsgaZNwYzZa2dz3hvn+Xft2bQnE/42wTssNWgU0jIqnAnhY6AX8Jjv95QwliUi+eWfi7hpUzb/mUnyU8n+XSokVGDjoI1UOyZfJ0CNQlomhSQhmNkEvA3INcwsA0jFmwjeM7PewHqgR8FnEJGQyzPfQHaN6lz8VlfS1qT5N3/T+xva1G0TxQAl1oSql9E1BWw6PxTnF5FC+J4oPqyKJy6O59a9x79G/8u/atSFoxh49sAoBCmxTk8qi5R0BXQrTd+Yzlljz/Lvdt6J5zHjuhkalloKpIQgUtLlNB77upXuzFhF3bda8ufBP/27bBy0keQqyUc4CQXfZUiZEfVupyISJF/jsUuI59qbq1HttSb+ZJB2fRou1RUtGXTuDHXrQqdO3mUpc5QQREo6M9585ibihmYzofZWAIZ0GIJLdXQ5qUvRzpHvLoOtW8MYsMQqVRmJlFQeD8tXzOW0ief6VzWr2YwFtyygfEL54p0rfxdVPYxWJikhiJRAe/b/wempSaw9Zp9/3ao7VnHy8Scf3QnzdFFVG0LZpSojkRJmwPQBVHqsij8ZTJoUh+u7OTcZFGNk00PkPIymZFBmKSGIlBBTV0zFhhnPfPsMALf+moxnRDx/O759bhXP0TYOH20SkVJFVUYiMW79rvU0+E8D/3KtSrVY9a9VVE6oCMPzVfEEahwubAiKIg6PLaWf/uoiMepg9kFaj219SDL4oe8PbB68mcqJlQNX8eQ0DhdnpFL1MBIfJQSRGDTiyxEkjkhkwcYFAIy9dCwu1dGsVrMjH5jTOJyRAXPmFK094GiSiJRKqjISiSFf/u9LOo7v6F++8tQreb/H+8RZMb67FXekUvUwEh8lBJEYsOXPLdR6MvdDPN7iyRycSfWK1SMTgIa7FpQQRKLK4zxc8s4lfLrqU/+6uTfNpV29dlGMSsoqtSGIhNMRunO+tOAl4h+K9yeDx7s8jkt1SgYSNWG/QzCzdcDvQDaQ5ZxLCXeZIjGhgO6cizctpuWYlv7dOtTvwKxes0ggzps8VI8vURKpKqPOzrnfIlSWSGzI151zV8ZqGrxzFrv27/LvkjEwgzrH1tGzABIT9I4TCZc8w1L/s3d1qr7W2J8Mpv9jOi7VeZMB6FkAiQmRSAgOmGFmC82sT/6NZtbHzNLNLH2r/hNIaWLGu8/dStzQbF5PzgTg7nZ341IdXU/peui+ehZAYoC5MI9dYmYnOOc2mllNIA24wzn3ZaB9U1JSXHp6eljjEYmEFdtW0OT5Jv7lJtWbsKTvEiokVCj4IM1YJkfJzBaGon027G0IzrmNvt9bzOxDoDUQMCGIlHR7D+7lzNFnsnL7Sv+6Ff1W0Kh6o8IP1rMAEmVhrTIys0pmViXnNXAhsDScZYpEy10z7qLiIxX9yeDdv72LS3VFSwYiMSDcdwi1gA/Ne/ubALzjnJse5jJFIurTlZ/S7Z1u/uWbmt/EK5e9gqnaR0qYsCYE59wa4MxwliESLRm7M6j3dD3/cvVjqrOm/xqOLX9sFKMSOXoaukKkmA5mH6Tj+I7Mz5jvX7f41sU0r908ilGJBE/PIYgUw+NfP07iiER/Mnjpry/hUp2SgZQKukMQKYL8s5Zd2vhSPrr6o+INSy0S45QQRI5gz8E9jJw7kpFzRwIQZ3FsvnMzSZWSohyZSOgpIYgE4Jxj4rKJ3J12Nxt2b6Bn05483uVxGlRtUPjBIiWUEoJIPgs3LqT/9P7M3TCXFjWa8Xavt+jQ8NxohyUSdqoAFfHZ/Mdmek/pzVljz2Ll9pW8srwRCwb+RIde//YOKyFSyukOQcq8/Vn7efbbZxn+5XD2Ze3jzrPvZOhf+nDcfadBVnbu6KMaVkJKOSUEKbOcc0xdMZVBMwaxavsqLml8CU9d+BSNqzf2znDWrl3u/AQafVTKACUEKZOWbVnGwM8GkrYmjVNrnMr0f0w/dEhqM+8kNRp9VMoQJQQpU7bv3c6Dcx7kxQUvUqV8FZ656Blua3kr5bbv9N4V5P3g1+ijUsaoUVlKlwImtc/yZPHCdy/Q6LlGvLDgBfq06sPKO1byr7P6Ua7LhVC3LnTqpMZjKdOUEKT0yJmXON+H+8w1M2k+ujn9Pu3HmbXOZMmtS3jxry9So2INTV0pkocSgpQe+T7cV69aQPeJ3enyZhf2HNzD5KsmM/OGmTSr1Sz3GE1dKeKnNgQpPXwf7r+nz+Xhnsk8/d65lIsrx6PnP8qAtgMCT1+pxmMRv7AnBDO7CHgGiAdecc49Fu4ypWzy4HjjP7247/Of2bxnA71O78Uj5z/CCVVOOPKBajwWAcKcEMwsHngBuADIABaY2cfOuZ8C7f/zz9Chg/f/p5n395FeF3U/HROdY8wi94V73oZ59J/en/SN6bSt25Yp1/6X1nVaR6ZwkVIi3HcIrYFVvpnTMLN3gcuBgAkhLg4SE71tgR6Ptyo457VzgV8faVs4jpHiC2fiya6UwdYz72FXg3dI2HsC9Ze9xb4p1/B/L8WFPMHNnw8ZGeG/VgkJEB+f+7uw10Xd72iOCee5g40nTi2gIRfuhFAH2JBnOQNok3cHM+sD9AGoX78+M2eGOaIg5SSJcCceHXPk1wfcXn4+/klWVX8MRzZNMofSKPMe4uIq46lT+PE5XzaKE1u4kwH4/m0Hwl+OSCDhTgiBKgwO+Z7tnBsDjAFISUmJ+e/gZt5vJxIdzjkm/TSJwWmDWb9rPX8/7e88ccETNKzaMNqhBc05yM72/mRlHfq7sNdF3a80HaM79tALd0LIAOrlWa4LbAxzmVJKLd60mP7T+/PV+q84s9aZvHHFG3Rs2DHaYYWMWW61SPny0Y5GSpJQtdWFOyEsABqZ2YnAr8DVwLVhLlNKmS1/bmHorKG8sugVqleszsuXvEzvFr2Jj9OtmkgohTUhOOeyzKwf8BnebqevOueWhbNMKT0OZB/g+W+fY9gXw9iTtZcBbQfwQMcHqFqharRDEymVwv4cgnNuGjAt3OVI6eGcY9rKaQz6bBArtq+g20pj1LZWNBnypLqWiISRnlSWmLJ863IGzRjE9FXTaVL1FD6ZEEe3XzyQsEST1IiEmb5uSUzYsXcHA6YP4IzRZzB/w3xGXTiKH25fSrda7TXOkEiE6A5Boirbk83YRWMZOmso2/dup0+rPgzvOIykPUB8osYZEokg3SFI1MxeO5uWY1py2ye3cXrN01l06yJGd3uRpEuuyh3CGrzVREoGImGnOwSJuLU71jI4bTCTl0+mwXENmNRjEleeeiVm5p3cJv/8BGo3EIkIJQSJmD8O/MGjXz3KU/OfIj4unhGdRzDo7EEcU+6Y3J1y5ifQ5PYiEaeEIGHncR7e+uEt7v38Xjb9sYnrz7ieRzs/TJ39iZB/jgLNTyASNWpDkOIrYN7iQL7N+Jazx51Nr496Ue+4eszvPZ83Lh9PncuvK3ge45z5CZQMRCJKCUGKp4B5i/P7dfev3PDhDbQd15YNuzbw+hWvM7/3fNrWbat5jEVilKqMpHgCfZjnafTdl7WPUfNH8chXj3DQc5D72t/Hfe3vo0r5KrnnUDuBSExSQpDiKeDD3DnH5OWTGZw2mHU713HlqVfyxAVPcFK1kw4/h9oJRGKSEoIUT4AP8+83f8+AzwYwZ90cmtVsxswbZnLeiecd+Tz55zH2eJQgRKJMbQhSfL4P8617fqPv1L60HNOSHzN/5MVuL7Lo1kWFJ4P8itguISLhpTsEKbaD2Qd5YcELPDjnQf448Ad3tL6D1I6pVDum2tGdsJB2CRGJDCUEKZZPV37KoBmD+Pm3n+l6cldGdR3FaUmnBXdSNTKLxISwJQQzexC4BcjpU3i/b24EKYF++e0XBs0YxLSV02h0fCOmXjOVbo26eYebCJYamUViQrjvEJ52zj0Z5jIkjHbu28nwL4bz7HfPUrFcRZ684EnuaHMHifGJoS0ofyOziEScqowkoGxPNuMWjWXozKH8tm87N7e8mRHnjaBmJVXniJRW4e5l1M/MfjCzV80sYIujmfUxs3QzS9+qJ1ZjwhfrvqDVmFbc+slt/GXFNhZ+14Ixfx2tZCBSypkrwng0BR5s9jlQO8CmIcA3wG+AA4YDyc65m450vpSUFJeenn7U8Uhw1u1cx91pd/P+T+9Tv3Idnnh9Ez1+9GAJCZCRoSodkRhlZgudcynBnieoKiPnXJei7GdmY4GpwZQl4fPngT957OvHeHL+kxjGsE7DGHz2nVT8uBskqOePSFkRzl5Gyc65Tb7F7sDScJUlR8c5xzs/vsM9n9/Dr7//yrXNruWx8x+j3nH1vDuo549ImRLORuWRZtYcb5XROuDWMJYlxeHxsGDZDPp/9xDzM+bTKrkVE/8+kXPqn3Pofur5I1KmhC0hOOeuD9e55eht2vUr993TiteTM6m1vxyv/v0VerW4kTjTKCYiZZ0+BcqIfVn7eOzrx2j8QhMmJGVyz9ew4hkPN9a5RMlARAA9h1DqOeeY8ssU7pxxJ2t2rOHyJpfz5PiNnDJnsRqLReQQSgil2I+ZPzLgswHMWjuLpklNSbs+jS4ndYGrNNS0iBxOCaG08XjYtuEXHlj6HKMXvsxx5Y/j+Yuf59aUW0mI8/251VgsIgEoIZQiBw/u56V/NuXBeqvZXR7+r83tPNhpGNUrVo92aCJSAqg1sZSYsXoGZ77UjP6NV9NqI3w/Jp7nWv1byUBEikwJoYRbuW0ll024jK5vdeUAHqb80JQZE+Jp2vgcNRiLSLGoyqiE2rVvFyO+HMEz3z5DhYQKjOwykn+1+Rfl+5UrWoOx5jAWkXyUEGJZgA/tbE8245eM5/5Z97P1z63c2PxGHj7/YWpXzjPGYGENxjlzGOfMUDZ7trehWUTKNH0KxKoAE89/vf5rWr/Smpv/ezOnHH8K393yHeMuH3doMiiKQHMYi0iZp4QQq/J8aK9fOper37mSDq91YMufW3jnynf4+savSTnhKEe7zZnDOCFBD6eJiJ+qjGJVzZrsad+GkXHzGHmOw/3vM1I7pnJXu7uolFgpuHNrDmMRCUAJIQY555i4bCJ3XfY/MnY7eja9ipEXjKT+cfVDV4geThORfJQQYszCjQvpP70/czfMpUXtFrxz5Tt0aNAh2mGJSBmghBAjNv+xmSEzh/DaktdIqpTEK5e+wj+b/5P4uPhohyYiZURQjcpm1sPMlpmZx8xS8m27z8xWmdkvZtY1uDBLr/1Z+xk5dySNn2vMmz+8yZ1n38mKfivo3bK3koGIRFSwdwhLgSuBl/OuNLPTgKuBpsAJwOdm1tg5lx1keaWGc47/rvgvgz4bxOodq7m08aU8deFTNKreKNqhiUgZFVRCcM4tB7DDe6lcDrzrnNsPrDWzVUBrYH4w5ZUWy7YsY+BnA0lbk8apNU5l+j+m0/UU3USJSHSFqw2hDvBNnuUM37rDmFkfoA9A/foh7EUTSzweyMxk+/6dpP70Ai+lj6ZK+So8e9Gz9E3pS7n4ctGOUESk8IRgZp8DgR6FHeKcm1LQYQHWuUA7OufGAGMAUlJSAu5Tonk8ZJ3XiZf3fsUDnWDnMdD3rNsY1vkhalSsEe3oRET8Ck0IzrkuR3HeDKBenuW6wMajOE+J9/niDxjQ9CuW1YTz1sB/0uJodlsqKBmISIwJ19AVHwNXm1l5MzsRaAR8F6ayYtLq7au54t0ruGDqVeypUoEP34XP34Bmp2hYahGJTUG1IZhZd+A5IAn4xMyWOOe6OueWmdl7wE9AFnB7Welh9Pv+33n4q4d5+punKRdXjkfbDGFA5/uo0H+3d4iIWrU0VISIxCRzLnaq7VNSUlx6enq0wzgqHufhje/f4L6Z97H5j830OuMGHnnxF06Ys1BDTItIWJnZQufcUY52mUtPKofAvA3z6D+9P+kb02lbty1Trp5C64QGcFXdQ4eY1thBIhLD9JU1CBm7M/jH5H9wzqvnsPH3jbzV/S3m/fNrbzJIStIQ0yJSougO4SjsObiHJ+c9yeNzH8fjPAztMJR72t9D5YSKh85ENnMmbNumIaZFpERQQigG5xzv//Q+d6Xdxfpd6+lxWg9GXjCShlUbenfIzDx0JrJt21RNJCIlhqqMimjxpsV0HN+RnpN6Uq1CNeb0msN7Pd7LTQagmchEpETTHUIhtvy5hSEzhzBu8TiqV6zOy5e8TO8WBYxEqpnIRKQEU0IowIHsAzz37XM89OVD7Dm4h4FtB/Lvjv+maoWqRz5QM5GJSAlVthOCx3PYt3nnHJ+s/IRBnw1i5faVdGvUjVEXjqJJjSZRDlZEJLzKbhuCx+PtEVS3LnTqBB4Py7cu5+K3L+bSCZcSZ3FMu3Yan1z7iZKBiJQJZfcOYetWf4+gHYvmMuyjvjy/9FUqJ1bm6a5Pc/tZt2tYahEpU8puQqhZk6xzzmbs3rn8u4uxY+k4bml5C8M7DyepUlK0oxMRibgymxBmrZvNgJ47+XGLh44NOvDMRc9wZu0zox2WiEjUlLmEsGbHGu5Ku4vJyyfTsGpDJvWYxJWnXhloGlARkTKlzCSE3/f/zqNfP8qo+aOIj4tnRAuPU48AAAnnSURBVOcRDDp7EMeUOybaoYmIxITSlxDydSX1OA9v/fAW935+L5v+2MT1Z1zPo+c/Sp1jA07xLCJSZgXV7dTMepjZMjPzmFlKnvUNzWyvmS3x/YwOPtQiyNeV9Jv18zh73Nn0+qgX9Y6rx/ze83mj+xtKBiIiAQR7h7AUuBJ4OcC21c655kGev3h8XUl/PSaLe5O+4q3XziG5cjKvX/E6151xHXFWdh+7EBEpTFAJwTm3HIiZBtm91aow6pq6PFJvHdnxcH/7+7ivw/1UTqwc7dBERGJeONsQTjSzxcBuYKhz7quQnTlfO4Fzjg+Wf8BdaXex7uR1XHniX3nikmc46fiTQ1akiEhpV2hCMLPPgdoBNg1xzk0p4LBNQH3n3DYzawV8ZGZNnXO7A5y/D9AHoH79+oVHnNNO4JuE5vsJ/6H/jIF88b8vaFazGbNumEXnEzsXfh4RETlEoQnBOdeluCd1zu0H9vteLzSz1UBjID3AvmOAMQApKSmu0JP72gm2Jmbx7+O+YuwrKVSrUI2X/voSN7e8mYS40tdxSkQkEsLy6WlmScB251y2mZ0ENALWhOLcB6pX5YWe9RlWbw1/JsIdrfuR2vFBqh1TLRSnFxEps4JKCGbWHXgOSAI+MbMlzrmuwLnAQ2aWBWQDfZ1z24+6IF+bwae7FjJwxiB+abSGrvU68/Qlz3NqzdOC+SeIiIhPsL2MPgQ+DLD+A+CDYM7t5/Hwy1/bMKj6QqY1cjQ6vhFTr5lKt0bdYqZ3k4hIaRDTFe479+3koU/v5bmz0ql4EJ5Mi+OO92aSeEK9aIcmIlLqxGRCyPZkM27xOIbMGsK2Pdu4eXNtRkzcSs3m50By3WiHJyJSKsVcQvhi3Rf0n96f7zO/p0N977DULWqdCcM1cb2ISDjFVEJYs2MNnV7vRP3j6jPx7xPpcVqP3HYCTVwvIhJWMZUQdu7byUOdHmJwu8EallpEJMLMucKfBYuUM1qc4X5Y/EO0wxARKVHMbKFzLqXwPY8spob/TIxPjHYIIiJlVkwlBBERiR4lBBERAZQQRETERwlBREQAJQQREfFRQhAREUAJQUREfJQQREQEUEIQERGfoBKCmT1hZj+b2Q9m9qGZVc2z7T4zW2Vmv5hZ1+BDFRGRcAr2DiENON05dwawArgPwMxOA64GmgIXAS+aWXyQZYmISBgFlRCcczOcc1m+xW+AnNlrLgfedc7td86tBVYBrYMpS0REwiuUw1/fBEz0va6DN0HkyPCtO4yZ9QH6+Bb3m9nSEMYULjWA36IdRBEoztBSnKFTEmKEkhNnk1CcpNCEYGafA7UDbBrinJvi22cIkAW8nXNYgP0DjrPtnBsDjPGdJz0UQ7iGm+IMLcUZWiUhzpIQI5SsOENxnkITgnOuSyGB9AIuAc53uZMrZAD18uxWF9h4tEGKiEj4BdvL6CLgHuAy59yePJs+Bq42s/JmdiLQCPgumLJERCS8gm1DeB4oD6T55j7+xjnX1zm3zMzeA37CW5V0u3MuuwjnGxNkPJGiOENLcYZWSYizJMQIZSzOmJpCU0REokdPKouICKCEICIiPhFPCGbWw8yWmZnHzFLybSt0uAszO9HMvjWzlWY20cwSIxDzRDNb4vtZZ2ZLCthvnZn96NsvJN3Aihnng2b2a55YuxWw30W+a7zKzO6NQpwFDnmSb7+IX8/Cro2vo8RE3/ZvzaxhJOLKF0M9M5ttZst9/5f6B9ink5ntyvNeeCDScfriOOLf0Lye9V3PH8ysZRRibJLnOi0xs91mNiDfPlG5nmb2qpltyft8lpkdb2Zpvs/ANDOrVsCxvXz7rPT1Bi2ccy6iP8CpeB+imAOk5Fl/GvA93kbqE4HVQHyA498Drva9Hg3cFuH4nwIeKGDbOqBGpK9pnvIfBAYXsk+879qeBCT6rvlpEY7zQiDB9/px4PFYuJ5FuTbA/wGjfa+vBiZG4e+cDLT0va6Cd9iY/HF2AqZGOrbi/g2BbsCneJ9dagt8G+V444HNQINYuJ7AuUBLYGmedSOBe32v7w30/wc4Hljj+13N97paYeVF/A7BObfcOfdLgE2FDndh3q5M5wGTfKteB64IZ7wByr8KmBCpMsOgNbDKObfGOXcAeBfvtY8YV/CQJ9FWlGtzOd73HXjfh+f73hcR45zb5Jxb5Hv9O7CcAkYCKAEuB95wXt8AVc0sOYrxnA+sds79L4ox+DnnvgS251ud9z1Y0GdgVyDNObfdObcD77hzFxVWXiy1IdQBNuRZDjTcRXVgZ54PkwKHxAiTDkCmc25lAdsdMMPMFvqG5IiGfr5b71cLuJUsynWOpJvwfkMMJNLXsyjXxr+P7324C+/7Mip8VVYtgG8DbD7bzL43s0/NrGlEA8tV2N8w1t6PV1PwF75YuJ4AtZxzm8D75QCoGWCfo7quoRzLyM+KMNxFoMMCrMvfJ7bIQ2IUVxFjvoYj3x2c45zbaGY18T6b8bMvw4fMkeIEXgKG470mw/FWb92U/xQBjg153+OiXE87fMiT/MJ+PfOJ6nuwuMysMvABMMA5tzvf5kV4qz3+8LUlfYT3AdFIK+xvGEvXMxG4DN+ozfnEyvUsqqO6rmFJCK6Q4S4KUJThLn7De0uZ4Pt2FrIhMQqL2cwSgCuBVkc4x0bf7y1m9iHeKoiQfoAV9dqa2VhgaoBNERlWpAjXM9CQJ/nPEfbrmU9Rrk3OPhm+98RxHH5LH3ZmVg5vMnjbOTc5//a8CcI5N83MXjSzGs65iA7UVoS/YSwNc3MxsMg5l5l/Q6xcT59MM0t2zm3yVa9tCbBPBt52jxx18bbbHlEsVRkVOtyF74NjNvB336peQEF3HKHWBfjZOZcRaKOZVTKzKjmv8TacRnTk1nx1r90LKH8B0Mi8vbUS8d4ifxyJ+HJYwUOe5N0nGtezKNfmY7zvO/C+D2cVlNDCxddmMQ5Y7pwbVcA+tXPaNsysNd7/69siF2WR/4YfAzf4ehu1BXblVIdEQYE1ALFwPfPI+x4s6DPwM+BCM6vmqzq+0LfuyKLQat4db/baD2QCn+XZNgRvL49fgIvzrJ8GnOB7fRLeRLEKeB8oH6G4xwN98607AZiWJ67vfT/L8FaNRPravgn8CPzge9Mk54/Tt9wNb8+U1VGKcxXe+s0lvp/R+eOM1vUMdG2Ah/AmL4AKvvfdKt/78KQoXL/2eG//f8hzDbsBfXPeo0A/33X7Hm/DfbsoxBnwb5gvTgNe8F3vH8nT8zDCsVbE+wF/XJ51Ub+eeBPUJuCg73OzN942q5nASt/v4337pgCv5Dn2Jt/7dBVwY1HK09AVIiICxFaVkYiIRJESgoiIAEoIIiLio4QgIiKAEoKIiPgoIYiICKCEICIiPv8PsVW4qWEViusAAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "from mindspore import Tensor\n", - "\n", - "x_model_label = np.array([-10, 10, 0.1])\n", - "y_model_label = (x_model_label * Tensor(model_params[0]).asnumpy()[0][0] + \n", - " Tensor(model_params[1]).asnumpy()[0])\n", - "\n", - "plt.axis([-10, 10, -20, 25])\n", - "plt.scatter(x_eval_label, y_eval_label, color=\"red\", s=5)\n", - "plt.plot(x_model_label, y_model_label, color=\"blue\")\n", - "plt.plot(x_target_label, y_target_label, color=\"green\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "从上图中可以看出,蓝色线条的初始化模型函数与绿色线条的目标函数还是有较大的差别的。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义前向传播网络与反向传播网络并关联" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "接下来需要定义模型的损失函数,这里采用均方误差(MSE,Mean Squared Error)的方法用于判断拟合的效果如何,即均方误差值越小,拟合的效果越好,其损失函数公式为:\n", - "\n", - "$$J(w)=\\frac{1}{2m}\\sum_{i=1}^m(h(x_i)-y^{(i)})^2\\tag{2}$$\n", - "\n", - "假设训练数据第$i$个数据为$(x_i,y^{(i)})$,公式2中的参数解释如下:\n", - "\n", - "- $J(w)$为损失值。\n", - "\n", - "- $m$为样本数据的数量,本例中$m$的值为`batch_number`。\n", - "\n", - "- $h(x_i)$为第$i$个数据的$x_i$值代入模型网络(公式1)后的预测值。\n", - "\n", - "- $y^{(i)}$为第$i$个数据中的$y^{(i)}$值(label值)。\n", - "\n", - "### 定义前向传播网络\n", - "\n", - "前向传播网络包含两个部分,其中:\n", - "\n", - "1. 将参数带入到模型网络中得出预测值。\n", - "2. 使用预测值和训练数据计算出loss值。\n", - "\n", - "在MindSpore中使用如下方式实现。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.249228Z", - "start_time": "2021-01-04T07:04:53.243109Z" - } - }, - "outputs": [], - "source": [ - "net = LinearNet()\n", - "net_loss = nn.loss.MSELoss()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义反向传播网络\n", - "\n", - "反向传播网络的目标是不断变换权重值,使得loss值取得最小值,一般的在线性网络中采用权重更新公式:\n", - "\n", - "$$w_{t}=w_{t-1}-\\alpha\\frac{\\partial{J(w_{t-1})}}{\\partial{w}}\\tag{3}$$\n", - "\n", - "公式3参数解释:\n", - "\n", - "- $w_{t}$为迭代后的权重值。\n", - "- $w_{t-1}$为迭代前的权重值。\n", - "- $\\alpha$为学习率。\n", - "- $\\frac{\\partial{J(w_{t-1}\\ )}}{\\partial{w}}$为损失函数对权重$w_{t-1}$的微分。\n", - "\n", - "函数中所有的权重值更新完成后,将值传入到模型函数中,这个过程就是反向传播过程,实现此过程需要使用MindSpore中的优化器函数,如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.273562Z", - "start_time": "2021-01-04T07:04:53.250245Z" - } - }, - "outputs": [], - "source": [ - "opt = nn.Momentum(net.trainable_params(), learning_rate=0.005, momentum=0.9)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 关联前向和反向传播网络\n", - "\n", - "定义完成前向传播和反向传播后,在MindSpore中需要调用`Model`函数,将前面定义的网络,损失函数,优化器函数关联起来,使之变成完整的计算网络。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.287238Z", - "start_time": "2021-01-04T07:04:53.275579Z" - } - }, - "outputs": [], - "source": [ - "from mindspore import Model\n", - "\n", - "model = Model(net, net_loss, opt)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 拟合过程可视化准备\n", - "\n", - "### 定义绘图函数\n", - "\n", - "为了使得整个训练过程更容易理解,需要将训练过程的测试数据、目标函数和模型网络进行可视化,这里定义了可视化函数,将在每个step训练结束后调用,展示模型网络的拟合过程。" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.305631Z", - "start_time": "2021-01-04T07:04:53.288251Z" - } - }, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt\n", - "import time\n", - "\n", - "def plot_model_and_datasets(net, eval_data):\n", - " weight = net.trainable_params()[0]\n", - " bias = net.trainable_params()[1]\n", - " x = np.arange(-10, 10, 0.1)\n", - " y = x * Tensor(weight).asnumpy()[0][0] + Tensor(bias).asnumpy()[0]\n", - " x1, y1 = zip(*eval_data)\n", - " x_target = x\n", - " y_target = x_target * 2 + 3\n", - " \n", - " plt.axis([-11, 11, -20, 25])\n", - " plt.scatter(x1, y1, color=\"red\", s=5)\n", - " plt.plot(x, y, color=\"blue\")\n", - " plt.plot(x_target, y_target, color=\"green\")\n", - " plt.show()\n", - " time.sleep(0.2)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义回调函数\n", - "\n", - "MindSpore提供的工具,可对模型训练过程进行自定义控制,这里在`step_end`中调用可视化函数,展示拟合过程。更多的使用可参考[官网说明](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_debugging_info.html#callback)\n", - "\n", - "- `display.clear_output`:清除打印内容,实现动态拟合效果。" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:04:53.318392Z", - "start_time": "2021-01-04T07:04:53.306647Z" - } - }, - "outputs": [], - "source": [ - "from IPython import display\n", - "from mindspore.train.callback import Callback\n", - "\n", - "class ImageShowCallback(Callback):\n", - " def __init__(self, net, eval_data):\n", - " self.net = net\n", - " self.eval_data = eval_data\n", - " \n", - " def step_end(self, run_context):\n", - " plot_model_and_datasets(self.net, self.eval_data)\n", - " display.clear_output(wait=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 执行训练\n", - "\n", - "完成以上过程后,可以使用训练数`ds_train`对模型训练,这里调用`model.train`进行,其中参数解释:\n", - "\n", - "- `epoch`:训练迭代的整个数据集的次数。\n", - "- `ds_train`:训练数据集。\n", - "- `callbacks`:训练过程中需要调用的回调函数。\n", - "- `dataset_sink_model`:数据集下沉模式,支持Ascend、GPU计算平台,本例为CPU计算平台设置为False。" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": { - "ExecuteTime": { - "end_time": "2021-01-04T07:05:27.693120Z", - "start_time": "2021-01-04T07:04:53.319412Z" - } - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAD8CAYAAACSCdTiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3dd3xUVfrH8c9DqFIEpAhSBLvgSomouO5aUBFdQXct6M+GEMDeQUBAsKJYkBpEAUWKikhViqKrCBKQHpGqhhISek1I5vz+mGGNYQIJMzeTTL7v1yuvzNx7554nN5Mnd8499znmnENERKJTsUgHICIi3lGSFxGJYkryIiJRTEleRCSKKcmLiEQxJXkRkSgWcpI3s9pm9o2ZJZrZSjN7LLC8t5ltMrMlga9WoYcrIiJ5YaGOkzezGkAN59xiMysPLALaALcB+5xzb4QepoiInIjioe7AObcF2BJ4vNfMEoHTQt2viIiELuQz+b/szOx04DugIfAkcB+wB0gAnnLO7QzymjggDqBs2bJNzz333LDFIyJSFCxatCjVOVc12LqwJXkzKwd8C7zknJtoZtWBVMABffF36bQ71j5iY2NdQkJCWOIRESkqzGyRcy422LqwjK4xsxLAZ8AY59xEAOdcsnMu0znnA4YDzcLRloiI5F44RtcYMAJIdM69mWV5jSyb3QysCLUtERHJm5AvvAKXAXcDy81sSWBZN6CtmTXC312zEegYhrZERAonnw9SUqBKFUhNhWrVwMzzZsMxuuZ7IFik00Pdt4hIVMjIgMsvh4ULoVw52LcPLrsMvvkGinl7T6rueBUR8ZLPB//4B8yfD5mZsHu3//u8ebBtm+fNK8mLiHgpJcV/Bn9E+fL+7xkZcNtt/n8CHlKSFxHxUrVq0Lw5FC8Ol14KiYkQE+Nf9+OP/n8CHgrHhVcREcmJmb/vPSXFn/DB3x8/bx40b056xaqU9LB5ncmLiHitWDGoXt2f8ANJP2PjH9z/r86Ue6Ypv/6+y7umPduziIgENXtRElVfbs/I/W0pXSqGbfu2e9aWumtERPLJ/gOZ3PLaQGZmdIdKjjsrv8nIHo9QIsa7VKwkLyKSD0bOWEbn6R04VOUnah5uyeSOQ2h6xumet6vuGhERD21JPUjjp5/j/h+bkl5mHV0+u5CkBftpWq9OvrSvM3kREQ84B71Hz+GlpR3JPHkdDQ7czYzBk6i9f6l/OGVKiv9irMd0Ji8iklc+HyQn+zN5ECvWbafuY/fRZ2MLYmKMwRfPYcWro6jdtLE/wTdv/udwSo/pTF5EJC98Prjyyv+Nc89afyYz09FhwFhGbn0cV3EnVxbvxqQXelDhpDL+12YdL58PxclASV5EJG9SUvwJPiPD/z3Q7TJr4UZuG92ZXVW+pEJGMz6+dTY3xP7tr689Ml4+H6m7RkSKtuN0vRwla5mC5s3ZX74y1/buz7WTGrCrwvfcVXkA2/vNOzrBn0hbYaAkLyJF15Gul1q14Iorclcs7EiZgqQk3u/yFlWeu5RZ9jQ1069i8QOr+OiRRyh+pDZNqG2FQThmhqptZt+YWaKZrTSzxwLLK5vZLDNbE/heKfRwRUTCKFjXSy5s2XGIRv3f4IEFzUgvncRzZ04gqd9kGtevHfa2QhWOM/kM4Cnn3HnAJcBDZnY+0BWY45w7C5gTeC4iUnBk63rJzYiXXqNnUvuVhiwt+wYND7dj4zOJvHzXrdjxLqSeQFvhEI6ZobYAWwKP95pZInAa0Bq4IrDZKGAu0CXU9kREwiZ7hchjJOrl61K4YcCT/FH5I0ra2Qy6eC4dW/7Tk7bCKayja8zsdKAxsACoHvgHgHNui5nlz78tEZG8OM6Il8xMxwMDPmR08pO4k/dwVczzTOrbjfJlSoe9LS+ELcmbWTngM+Bx59ye4350+fN1cUAcQJ06+XObr4hIbsxMWMftozux65TZnJzenLH/ief62AaRDitPwjK6xsxK4E/wY5xzEwOLk82sRmB9DSDoZIbOuXjnXKxzLrZq1arhCEdEJCT796dxbfe+XDfpAnaVW8DdlQaz/Y3/FroED2E4kzf/KfsIINE592aWVZOBe4FXA9+/CLUtERGvjZj+Ew9NvY+06omctv4ypj4/lkZnHmPUTAEXjjP5y4C7gavMbEngqxX+5H6Nma0Brgk8FxEpkDan7qNRlydpv+BSDp+0nW7jmpA0+gcalS3chQHCMbrmeyCnDvirQ92/iIjXen44g5eXdiaz/G9csK890weOo1ba4j+n6yvEdMeriBRZy9YlU/uJtvRd34ri7iSGXfw9y/rFU+viJv7x7Jdfnu+jYcKtcH8OERE5AZmZjnbvfsCHyU/jyu3n6mIvMOnFLpQrU8q/QQTGs3tFSV5EopfPd1Sy/iphDbd/FMfuSnM5+dDljL0rnutjz/3r6yIwnt0r6q4RkeiUrSDYvv2HaNHnZVp+cQG7T/qZeyrGs73/3KMTfJTRmbyIRKfk5P8VBHtvw24e7hFLWsWV1DrwH6Y+OIALz6gR6QjzhZK8iEQfnw9uv53NxUpx/fWXsazZd8TsP43u9b/gxbtvinR0+UpJXkSiT0oKz6ek88pDFcis8B1/2xfH9K79OK1KhUhHlu+U5EWk8ApyYXXpui3cOOgxku5YQKnkMxm6pA3t5wwp9KNkTpQuvIpI4ZTtwmpmRgb3vBNPoxHnkVR2Mi3sRVKf/Zr2cz4rsgkedCYvIoVVlpmWZvySRNunrmR35e85+cAVjLtrGC0vOjvSERYISvIiUjhVq8a+S/9O6xgfX/99Ppaxk3sqjuD9HvcTE1N0z9yzU5IXkUIp/qt5PHrRNtIqrKL2rjuY+vDb/O2M6LiBKZyU5EWkUElK3U2r/s+xvPQQYqwOz9efRp+7W0U6rAJLSV5ECo3uH33Oa8seJrPMVi488DjTu/alZpVykQ6rQFOSF5ECb8m6Tdw4+BE2VficUukXMuzKSTxw/UWRDqtQUJIXkQIr0+fjvoHD+GhrVyiTTgteZdIrT1K2TIlIh1ZohGuO1/fNbJuZrciyrLeZbco2W5SISK5MX7iSyk9fzkc7H6TivmZ82XoFs3p18Sd4n89fm8a5SIdZ4IXrZqiRQMsgy99yzjUKfE0PU1siEsX2HjzE1S/25IYpjdlbcjX3nTyK1Ldmct1FZ/g3yHYTFD5fROMt6MLSXeOc+87MTg/HvkSk6Br21Xc8NiuOtPKrqb3n/5j2yJtccEbVv26U5SYo5s3zP4+S2u9e8LqswcNmtizQnVMp2AZmFmdmCWaWkJKS4nE4IlIQJaXu4oJucXSa/08yXBo9633J729/eHSCB3+dmubN/dPzNW/ufy45MhemPq3AmfxU51zDwPPqQCrggL5ADedcu2PtIzY21iUkJIQlHhEp+JxzdP/4U/ote5TM0tu48OCTTH+2NzWrlP3rhtkLkQUpTFaUmdki51xssHWenck755Kdc5nOOR8wHGjmVVsiUvj8vP4Paj/bmlfW3kbxQzV57+KFLOn3evAEn70P/sj0fErwx+XZEEozq+Gc2xJ4ejOw4ljbi0gUy3LmneHzcd+gwYxJ7gYlfVzj3uDzVx+jbJkc0pH64EMSliRvZmOBK4AqZpYE9AKuMLNG+LtrNgIdw9GWiBQyR87E581j+j9a0LbRTvZUWEDFPdcx/p4hXHtRvT+HRAbrfjnSBz9vnvrgT0C4Rte0DbJ4RDj2LSKFXEoKexfMp/U/m/JN89lYWiXurzCG4T3a+qtFZvknQPPm8M03/u6YI8z8y9QHf0J0x6uIeGrYzyt5tFMN0istoM6q65n20mganlnlzw1y0x1zpA9e8kwzQ4mIJ/5I3UHD7u3otOBqMosVp1fVSfw2btpfEzxoSKTHdCYvImHlnKPbx+Ppt/wxfKW202jnU0zv3ocaVU8K/gJ1x3hKSV5Ewmbx+t/419DObC47g9IHLmLwvKu4/8d34OeFR/e1Z6XuGM8oyYtIyDIyM7ln8ADGJveAEsa1vreZ+MStlD27roY+RpiSvIiEZGrCEu4a14E95ROouKsVE+4ZzDXN6vorRGroY8QpyYvICdlz8ACt33qBuWn9sWJVaFd+PPE9bv1zEm31tRcISvIikmeDv5rFE3M6kV52PXV2tGfa4/1oeEaQGoTqa484JXkRybXfU1O5/u0nWVXiQ2IOn03v07+hZ88rdJJegCnJi8hxOefoOnYMbyx/Al+JXTTe04Np3bpTo2rpSIcmx6EkLyLHlLBuPTfFd2bLSTMpve8SBrcczv03NIx0WJJLSvIiEtThzAzuHfI2Y7f2hJjiXJc5kM9e60TZk2IiHZrkgZK8iBxlyqJF3DWuA3vL/UzFHTcx4d5BXHNxrUiHJSdASV5E/lfvfXe5srR+pxffpr2NUY125T5lWPdbKF5cV1YLKyV5kaIuUOp30NbNPHnDAdJP3kyd1I5Me+JVGp5ZMdLRSYjCUoUyMFH3NjNbkWVZZTObZWZrAt+DTuQtIpG18ZdVnF9lDw/fuZbM9LK8cMp0Ng4cqgQfJcJVangk0DLbsq7AHOfcWcCcwHMR8dqRWZacO+Zmzjme+eh9zvjwnyQ2WEnjua354+cz6PlQS417jyJhSfLOue+AHdkWtwZGBR6PAtqEoy0ROYZgk14HsXDdWmo+14I31j1AyW2n88Gc21n88RBqfD9d5QeijJeThlQ/MpF34HvQ6kRmFmdmCWaWkJKS4mE4IkVAcvLRsyxlkZ5xmDsGvkqzkRew1RJoObUNqR+s5L6fxvlLECjBR52IzwzlnIt3zsU652KrVq0a6XBECi+fD26/3Z/gzY6q/PhFwk9U6RbL+O3PUSmlFbNar2LGSTsoG5OpKpFRzMvRNclmVsM5t8XMagDbPGxLRFJS4Mcf/Y+LFYPx48GMXQf20vqd5/kubQDmq0m7kz4nflAbYmJQlcgiwMsz+cnAvYHH9wJfeNiWiGSdK/Wyy6B6dd79airV+zTgu7QB1N32IMs6rmLEM4EED39WiVSCj1phOZM3s7HAFUAVM0sCegGvAhPM7AHgd+DWcLQlIjnIUr99g/lo1esOfomZQMyBBrxwzg883+tS5fIiKCxJ3jnXNodVV4dj/yISROAu1axdLc6Mp+dM4e0Vz+CLOUDjnX2Z1uNZalQrGeFgJVIifuFVRE5AkKGSC9atpma3K3lzTQdK7ryQD5otY/HbPZTgiziVNRApjFJS/jdUMu3HH7j7rZ58susNcGVomT6cT/u3o+xJxziHC/IpQKKTzuRFCqPARdbP69SgSqc6fLLvJSpta82sNonMeKn98RN8Lm6YkuigM3mRQmjnwb3c1LIh36f9F9tbiwfKTGHY4Bv/HDVzLFk+BfzvhinNwxq1lORFChOfj3cmjuGZhOc4XHozdbc+wtSnXqThWeVzv48jQy3nzdNNUEWAkrxIIbF+WxKtnr2Z1fUSiNnTgD5nfkqPXpfkvUs9y1BL9clHPyV5kQLO53w8PTaed1Z2wVcrnSaz/8O0Bd9y6h/1wDixi6hHboKSqKcLryIF2Px1idTs/k/eWtOZkttj+WBOWxbNn8Spl57nT+p5vYiayzLEEj2U5EUKoEOH07h1UG8uHXUhyZmruD7tA1L7z+a+H96DpCSYO9d/1h7sImpONKqmSFKSFylgPlv4PVWfb8SnqS9QacutzL45kekv30fZsnZ0rZms9WqOdxE1L/8QJGqoT16kgNi+fxet3+3KD2nDsPS6PFBpOkOHXE/xY/2V5uUiqkbVFElK8iIR5pzj7ZkT6TL3EQ6XTKbu1ieZ9lQfGpxdNnc7yO1FVI2qKZKU5EUiaO22JG4Y9BC/FptMzJ7G9GkyhR69m3qXfzWqpshRkheJgExfJk+NG8K7q7rhI4MmO15nWq/HObWa/iQlvPSOEvFKDuPX561dwc0fdGBbyfmUTrmGoTcM5d6b6kcwUIlmno+uMbONZrbczJaYWYLX7YkUCEGGKx48fIh/D+rBZaMbs+3wWlod+pCU12dw78VlNW5dPJNfZ/JXOudS86ktkcjLNlzxkzlTuP/rZ9lf+lcqb7qHCe37c/XFlf3/CI6MdvnmG3+fuUgY6R0l4oXAcMXtZYtz2R1NuW1eGw4czKB9qZkkDxvF1ZdW0bh1yRf5keQdMNPMFplZXPaVZhZnZglmlpCiN7lECQf079qJGo+cwrz6CdRNepZlnZczvOs1f457z8uNTCInyJzHfYFmVtM5t9nMqgGzgEecc98F2zY2NtYlJKjbXgq3Ndt+54bBD7LGphGT3JRejYfTo33j4MMiNUOThIGZLXLOxQZb53mfvHNuc+D7NjP7HGgGBE3yIoVZpi+TJ8cN5N1V3XEOmu5+iym9HqZG9WP8mWncunjM0yRvZmWBYs65vYHH1wJ9vGxTJBK+X7uUWz7oQErJhZROvp6hNw7h3tZ1Ix2WiOdn8tWBz83/MbQ48LFz7kuP2xTJNwcPH+Su4X34PPl1SD+FVhljGf/W7ZQrp64XKRg8TfLOufXAhV62IRIp4xfO4YHPO7K/1Doq/96OCR1e5+rmlSMdlshf6I5XkTxK2bedmwY9xfxDo7D9Z9Kh3NcMir+SEiUiHZnI0ZTkRXLJOcfrX31M9/8+TkbMLupu6sbUZ3rQ8NwykQ5NJEdK8iK58EvyBm4c2pl1fEXMjmb0aTycHi/8TaMepcBTkhc5hgxfBo+Pe4fBiT1xmcVoumsAU3o9SI1TYyIdmkiuKMmL5ODbNYv5z6gOpJZYTJnN/2LIjYO49+bakQ5LJE+U5EWy2Z++n/97rzeTkt+CQ1W5Ye+HjH3rTspXUKknKXyU5EWyGLtwJu0ndeJAyQ1U+q09E1YdpsXP98OS4aoSKYWSkrwIkLw3hdZDnmTBwY+wvefQofq3DHrxHErUq/XXKpEqQSCFjJK8FGnOOV778kOe//5JMmL2cPofPZnS5TkanlvaP5FH8+Z/1ntXlUgphJTkpchKTF7HjUM7sZ7ZxKQ2p0+TeHq80ODPYZFm/i4aVYmUQkxJXoqcw5mHeWz8mwz9pTcuowRNdwxiysP/pkbDapA9j6tKpBRyuook0cnng+Tko+ZO/ebXhdTofRFD1nSl1B/XM7LJShISx1OjyZ9zsYpEEyV5iT5BJtHel76PmwY/wVVjLmH7gRRa7Z3ItgETuffykpqCT6Kaumsk+mSbO3XMnPHEze3KgZK/U3lDZ8Z3eIUWl5/s37ZcNV1claimJC/RJzB36tYlP3DTLQ1YOO9ObPf5dDj1ewa+dxklS2bZVhdXJcp53l1jZi3NbLWZrTWzrl63J+KAl7vcTe1HKrCwViJ1N7zAsocWE98jW4I/4sjFVSV4iUJeT/8XAwwCrgGSgIVmNtk5t8rLdqXoWrHlV24a3pENbi4xyZfTp0k83V84VzeqSpHldXdNM2BtYIYozGwc0BpQkpewSs9M59HxrxP/S1/c4dI03RHP5D4PULOGsrsUbV4n+dOAP7I8TwIuzrqBmcUBcQB16tTxOByJRnNWz+f2jzqwvfgKyvx2K0Nav8O9t9SIdFgiBYLXST5YJ+dfBi475+KBeIDY2FgXZHuRoPam7eXOEd2Yum0Q7D+NVkxm7Lv/okKFSEcmUnB4neSTgKwFuGsBmz1uU4qA0Qsm02nKQxwsvonKax9mfMeXaHF5+UiHJVLgeJ3kFwJnmVk9YBNwB3Cnx21KFNu8Zws3DX2URQc/xXY2pEP1Txj4/iXBR82IiLdJ3jmXYWYPA18BMcD7zrmVXrYp0cnnfLw0PZ4+P3Ylww5Rd+NLTHnuGS44v0SkQxMp0Dy/Gco5Nx2Y7nU7Er2Wb/mFm4bHsdH9l5iky+mz/FK6L+hKseIaOSNyPLrjVQqstIw0Hhn/Gu+tfgmXXpamMzsz+eeJ1Cz+I2x/UtUhRXJBp0JSIM1a/QM1+zRm+NpelN5wCx80WUVC+ZXULL5dNWZE8kBn8lKg7D60mzvf78r0lKGwtw6t3DQ+HtCSk9NT4N9fQ2qqasyI5IHO5KXAGDn/c2q8eD7Tk+OpvPoJZrZZybQ3W3Jym0DZ4KuugqpVleBF8kBn8hJxSbs3cdOwh/n54CRsx4V0qD6Jdz+4iFKl8E/8kb3eu/riRXJNSV4ixud89J0xlL4/diXTHabuhteY3O0J/tYgy7DIaqr3LhIKJXmJiCWbV9LmvTh+c/OI2dSCPo0H0/2hChQ7NdtbUvXeRUKiPnkJjxzmVM3uUMYh2o/pSZNhjflt/2qa/j6Kjb2/5PlP21OsTg7zrKreu8gJU5KX0AWZUzWYr375jpp9GjFibV9Kr72d95smkjDiHmqVStU8qyIeUZKX0GWbUzV7kt55cCfXD+5Ay/H/ZOeedFpt/4rNgz7k/tuq+jc40u9evLj63UXCTH3yErocLo4653h//qc8PP0RDhVLpfLqZxjbqRfXXlH2r69Xv7uIZ5TkJXRBkvTvu/7gpvgHWXpwKqQ0oUP1Gbw7qrF/WGQwR/rdwd/do4QvEhbqrpHwCCTpTOej1/R3qd//fJbu/pq6v/RnyUMLiH/hGAk+q1z274tI7uhMXsLm583LaTOiA7/7FhDzx3W80HQIPfrWy9sk2sH693Xzk8gJU5KXkB08fJCHJvTlg19fh4OVaJoyhs/7tqV27RPoatHNTyJh5VmSN7PeQAfgyFCLboHa8hJFpiV+zf+N68iuYmsp8+t9DG7zBvfedsqJd6XrIqxIWHl9Jv+Wc+4Nj9uQCNh+YDt3jnyGmSkfwK4zuME3m4+GXE3FimHYedaLsCISEnXXSJ445xg+72Me/fIJ0ortoFJiV8Z37sk1V5aJdGgiEoTXSf5hM7sHSACecs7tzL6BmcUBcQB16tTxOBwJxYadG2kzvDPLDn4JybF0+OlRBszrRumTNEhLpKAyd5xaI8d8sdls4NQgq7oD84FUwAF9gRrOuXbH2l9sbKxLSEg44XjEGxm+DHrPeJdXF/QgM9OoOyeOL376igtjfoWkJHWtiESYmS1yzsUGWxfSmbxzrkUuAxgOTA2lLYmMhKQl3PxBe5J8i4jZeAMvNBlE95L3ERPzq0a/iBQCXo6uqeGc2xJ4ejOwwqu2JPwOHD7AgxNeYNSv/eFAFZpuG8/EF2+lTh2DR+do9ItIIeFln3w/M2uEv7tmI9DRw7YkXHw+pvz4KXfPfI7dxdZT5pf2DLq5H/fdXunPfK7RLyKFhmdJ3jl3t1f7Fm+k7ttG20evYHbdRNhxNq18X/PR0CupVCnSkYnIidIQSsE5x9B5H/HEl0+QVms3lb/tzNjvV3HtH+eDErxIoaYkX8St27GeNiM6seLALNhyKe0X/ot3V75E6cua6qKqSBRQki+iMnwZPD/9Lfr91AtfRnHqrh3IpO6daXQBkNJOF1VFooSSfFHj8/HT8tncMrkLm3xLiFnfmhdiB9K9by1iYgLb6KKqSNRQki9C9h/aS+e4S/mwXiLsr06TrZ8y8aVbqFtXZ+wi0UpJvoj4YuWX3DO+I3vO+J3SCfcwaM5+7l//d+xUJXiRaKYkH+W27d/GnaOeYE7Kx7DjXFp9/xofrniDypedB9V1YVUk2inJRynnHAN/GMnTM58inX1UWtGbjx/sSssBJSDl3mNfWNUcqyJRQ+UDCzufD5KTIUuhuTXb13LBGy14dE470jc1oH3GUjaN6UXLFqX+vFv1WAlec6yKRA0l+cIsW0I+fDiNLlNf4dwBF7ByRwJ1lw9l8SPfMvzl8yiT23LvweZYFZFCS0m+MMuSkOdtmEfdl5vSb1E3bM0N9K6ayLrxHWncKI+/4iNzrBYvriqTIlFAffKFWbVq7L28GZ1Kr+HjZqmwZxdNtk5i4iutqVv3BPepOVZFooqSfCH22YpptPvHH+whldLLOzPw5ldod1eF0POyqkyKRA0l+UJo676ttB39GHNTJkBKA1pl/sDo+Es55ZRIRyYiBY2SfCHicz4GfD+CLrOfJd13gErL+zLmwWe5/tqSkQ5NRAqokC68mtmtZrbSzHxmFptt3XNmttbMVpvZdaGFKb+krKbhG1fyxNdxpP9+IQ8cXkbSxz2U4EXkmEI9k18B3AIMy7rQzM4H7gAaADWB2WZ2tnMuM8T2ipz0zHS6TX+NtxJexJd2EnVXv8fE59vRpIkuiIrI8YU6kXcigB19pa81MM45lwZsMLO1QDPgx1DaK2q+/+1HbvuwA1syVxKz+nZ6xb5NjxdPpbg62UQkl7xKF6cB87M8TwosO4qZxQFxAHXq1PEonEIiUE5gT/mSxI15ivGbRsKeWjTZPJVPX7mBevUiHaCIFDbHTfJmNhs4Nciq7s65L3J6WZBlLsgynHPxQDxAbGxs0G2KhMDdqxNSF9C+ZWn2lt9D6YUdGXjba7TrH4ZhkSJSJB03yTvnWpzAfpOA2lme1wI2n8B+iozNG5fTtkYi312VBlvPodX4NozaOowq7/cGqxDp8ESkkPKqrMFk4A4zK2Vm9YCzgJ88aqtQ8zkfb343lNNH/YPvztxLpdnPMO2j05m29SWqXHaOygqISEhC6pM3s5uBd4GqwDQzW+Kcu845t9LMJgCrgAzgIY2sOdrKbav496g4Vh/4AX67igeqDOGdUSdTtm4VSE1VWQERCZk5V3C6wWNjY11CQkKkw/BcWkYaz01/hbcXvYw7VJ463z7LxIRPaXrZSf66McVUN05Ecs/MFjnnYoOt02C8fPbtxv9y+0dxJGf+QkziXfQ4rw89Es6neGYazCvuLwymujEiEiZK8vlk16FdxE3owicb4mHn6TTePINPX76W+uW2wTfN4McfVdpXRMJO/QIec84xbuln1H71fD5Z9x6lFz/F8MYrWDT2Wuo/cCXUru3vd//9d5g7V33wIhJWOpP3UNKeJO748CF+SJ0MWxvT8vAURr/XlKpV8U/Zl3UGpmLFlOBFJOx0Ju+BTF8m/b4dSP3+5/PDlllUWvg609r8xIwRgQQPmoFJRPKFzuTDbHnycv4zKo5fD86HDdfQrtpQBoyvT9my2TbUDEwikg+U5MPkUMYhukx7kXd/fg13oCJ1Ej/i0553ctFFx0jemoFJRDymJJ9XgSJiWc++v14/l7Yfx7Etc5yzTP8AAArKSURBVA3FVtzD8xf1p8fEKpQoEeFYRaTIU5LPi0ARMebNg+bN2TH9M+I+68pnG0bAjvo03jSLT15rwRlnRDpQERE/Jfm8SEmBefNwGRmM2TmfTv3OZz87KL24C2/f3JO4t09S17qIFChK8nlRrRq/X9mEtlXXMu/sHbCpLi0Pz2TUiEYaHCMiBZKSfC5l+jJ5/buBPH/pSjIyoOL8N/nwkUe5sVVMpEMTEcmRknwuLN26lP+M7sDagwth3fW0qzaEdz6pS7lykY5MROTYlOSP4eDhgzwz7QUGL3kDt/8U6qwayye9bqdZM3W8i0jhoCR/RLahkTPXzub/xnUiJXMdMcva0e2i13l+UmUNixSRQiXUSUNuBXoD5wHNnHMJgeWnA4nA6sCm851znUJpy1NZhkZu/8dFtL/7LCb9Nhq2n0WjpK/5pN+VnHlmpIMUEcm7UM/kVwC3AMOCrFvnnGsU4v7zR0oKbt4PjD4PHmy6lAPrF1J6UTfevqUHce+U0bBIESm0QkryzrlEACvkWXBDif207VCTBdX/gKQLaHk4npHv/00VB0Sk0POyCmU9M/vZzL41s8s9bCf3fD5/id/AlIcZvgxe+qY/Zw+4gAUVd1Lxh3f44qbvmTFSCV5EosNxz+TNbDZwapBV3Z1zX+Twsi1AHefcdjNrCkwyswbOuT1B9h8HxAHUqVMn95HnVbaSBIs/7s9tYzqy7uBi+PVf3F99EO98Vpvy5b0LQUQkvx03yTvnWuR1p865NCAt8HiRma0DzgaOmqXbORcPxIN/Iu+8tpVrgZIE+y2Dp8osZtjwi2FfNeqs/ITxvf7NJZcU7i4nEZFgPBlCaWZVgR3OuUwzqw+cBaz3oq1cq1aNGTecx92nb2R7pb0U+7kD3S56jZ6TK2lYpIhErVCHUN4MvAtUBaaZ2RLn3HXAP4A+ZpYBZAKdnHM7Qo42rwJj31PKGg98+iRTGi+H1HNo9PMwxvf7J2efne8RiYjkq1BH13wOfB5k+WfAZ6HsO2Q+H+7KK3h/73weua4kB0ukU2phT97+dzc6DiilYZEiUiRE7R2v69Yu5I7Tl5FQ/zD8fhHX7RvIyA8ac2qwS8giIlEq6pL84czDvDL3Tfr8tzeZNUtScepLjDy4h9bLG4HO3kWkiImqJL9w00JuH9OBDQeXwi83c3/1AbwzriTl61fVRNkiUiRFRZLfl76PJ6c+z3vLBuD2nkrt5ROZ0OdmLrkk0pGJiESWl3e85puP5yxj+NIB2OKOdK+4irVTleBFRCBKzuTb/r05U8au5Y3X63HOOZGORkSk4IiKJF++PEwZXS/SYYiIFDhR0V0jIiLBKcmLiEQxJXkRkSimJC8iEsWU5EVEopiSvIhIFFOSFxGJYkryIiJRTEleRCSKhZTkzex1M/vFzJaZ2edmVjHLuufMbK2ZrTaz60IPVURE8irUM/lZQEPn3N+AX4HnAMzsfOAOoAHQEhhsZjEhtiUiInkUUpJ3zs10zmUEns4HagUetwbGOefSnHMbgLVAs1DaEhGRvAtngbJ2wPjA49PwJ/0jkgLLjmJmcUBc4Ok+M1sdQgxVgNQQXu8VxZU3iitvFFfeRGNcdXNacdwkb2azgWAzo3Z3zn0R2KY7kAGMOfKyINu7YPt3zsUD8ceLIzfMLME5FxuOfYWT4sobxZU3iitvilpcx03yzrkWx1pvZvcCNwJXO+eOJPIkoHaWzWoBm080SBEROTGhjq5pCXQBbnLOHciyajJwh5mVMrN6wFnAT6G0JSIieRdqn/xAoBQwy/wTZc93znVyzq00swnAKvzdOA855zJDbCs3wtLt4wHFlTeKK28UV94Uqbjszx4WERGJNrrjVUQkiinJi4hEsUKV5M3sVjNbaWY+M4vNtu64ZRTMrJ6ZLTCzNWY23sxKehTneDNbEvjaaGZLcthuo5ktD2yX4EUs2drrbWabssTWKoftWgaO41oz65oPceVYHiPbdp4fr+P97IHBBOMD6xeY2elexBGk3dpm9o2ZJQb+Bh4Lss0VZrY7y++3Zz7Fdszfi/kNCByzZWbWJB9iOifLcVhiZnvM7PFs2+TL8TKz981sm5mtyLKsspnNCuSiWWZWKYfX3hvYZk1gJGPeOecKzRdwHnAOMBeIzbL8fGAp/ovA9YB1QEyQ108A7gg8Hgp0zoeY+wM9c1i3EaiSj8evN/D0cbaJCRy/+kDJwHE93+O4rgWKBx6/BrwWieOVm58deBAYGnh8BzA+n353NYAmgcfl8ZcRyR7bFcDU/Ho/5fb3ArQCZuC/f+YSYEE+xxcDbAXqRuJ4Af8AmgArsizrB3QNPO4a7D0PVAbWB75XCjyulNf2C9WZvHMu0TkX7I7Y45ZRMP/wn6uATwOLRgFtvIw30OZtwFgv2wmzZsBa59x651w6MA7/8fWMy7k8Rn7Lzc/eGv97B/zvpasDv2dPOee2OOcWBx7vBRLJ4S7yAqg1MNr5zQcqmlmNfGz/amCdc+63fGzzf5xz3wE7si3O+j7KKRddB8xyzu1wzu3EXyusZV7bL1RJ/hhOA/7I8jxYGYVTgF1ZkkmOpRbC6HIg2Tm3Jof1DphpZosC5R3yw8OBj8zv5/ARMTfH0kvt8J/1BeP18crNz/6/bQLvpd3431v5JtBF1BhYEGT1pWa21MxmmFmDfArpeL+XSL+n7iDnE61IHC+A6s65LeD/Bw5UC7JNWI5bOGvXhIXlooxCsJcFWZZ9bGiuSy3kRi7jbMuxz+Ivc85tNrNq+O81+CXwX/+EHSsuYAjQF//P3Rd/V1K77LsI8tqQx9nm5njZ0eUxsgv78coeZpBlnr6P8srMygGfAY875/ZkW70Yf5fEvsD1lkn4b0T02vF+LxE7ZoHrbjcRqJCbTaSOV26F5bgVuCTvjlNGIQe5KaOQiv9jYvHAGVhIpRaOF6eZFQduAZoeYx+bA9+3mdnn+LsLQkpauT1+ZjYcmBpklSclKXJxvIKVx8i+j7Afr2xy87Mf2SYp8Ds+maM/invCzErgT/BjnHMTs6/PmvSdc9PNbLCZVXHOeVqMKxe/l0iWObkeWOycS86+IlLHKyDZzGo457YEuq62BdkmCf91gyNq4b8emSfR0l1z3DIKgcTxDfCfwKJ7gZw+GYRDC+AX51xSsJVmVtbMyh95jP/i44pg24ZLtn7Qm3NobyFwlvlHIpXE/1F3ssdx5VQeI+s2+XG8cvOzT8b/3gH/e+nrnP4phVOg338EkOicezOHbU49cn3AzJrh//ve7nFcufm9TAbuCYyyuQTYfaSrIh/k+Gk6Escri6zvo5xy0VfAtWZWKdC1em1gWd54fWU5nF/4E1MSkAYkA19lWdcd/8iI1cD1WZZPB2oGHtfHn/zXAp8ApTyMdSTQKduymsD0LLEsDXytxN9t4fXx+xBYDiwLvMlqZI8r8LwV/tEb6/IprrX4+x6XBL6GZo8rv45XsJ8d6IP/HxBA6cB7Z23gvVTf6+MTaPfv+D+qL8tynFoBnY68z4CHA8dmKf4L2M3zIa6gv5dscRkwKHBMl5NlZJzHsZ2EP2mfnGVZvh8v/P9ktgCHA/nrAfzXceYAawLfKwe2jQXey/LadoH32lrg/hNpX2UNRESiWLR014iISBBK8iIiUUxJXkQkiinJi4hEMSV5EZEopiQvIhLFlORFRKLY/wNQ0IMDOgZ73wAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Parameter (name=fc.weight) [[2.0064354]]\n", - "Parameter (name=fc.bias) [2.9529438]\n" - ] - } - ], - "source": [ - "\n", - "from mindspore.train.callback import LossMonitor\n", - "\n", - "epoch = 1\n", - "imageshow_cb = ImageShowCallback(net, eval_data)\n", - "model.train(epoch, ds_train, callbacks=[imageshow_cb], dataset_sink_mode=False)\n", - "\n", - "plot_model_and_datasets(net, eval_data)\n", - "for net_param in net.trainable_params():\n", - " print(net_param, net_param.asnumpy())" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "ExecuteTime": { - "end_time": "2020-09-14T04:00:18.787349Z", - "start_time": "2020-09-14T04:00:18.784236Z" - } - }, - "source": [ - "训练完成后打印出最终模型的权重参数,其中weight接近于2.0,bias接近于3.0,模型训练完成,符合预期。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 总结\n", - "\n", - "本次体验我们了解了线性拟合的算法原理,并在MindSpore框架下实现了相应的算法定义,了解了线性拟合这类的线性回归模型在MindSpore中的训练过程,并最终拟合出了一条接近目标函数的模型函数。另外有兴趣的可以调整数据集的生成区间从(-10,10)扩展到(-100,100),看看权重值是否更接近目标函数;调整学习率大小,看看拟合的效率是否有变化;当然也可以探索如何使用MindSpore拟合$f(x)=ax^2+bx+c$这类的二次函数或者更高次的函数。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/training/source_zh_cn/quick_start/quick_start.ipynb b/tutorials/training/source_zh_cn/quick_start/quick_start.ipynb deleted file mode 100644 index 2d9c8fbefeea57448bf66b905f42026f6fdab8f8..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_start.ipynb +++ /dev/null @@ -1,1184 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 实现一个图片分类应用\n", - "\n", - "`Linux` `Windows` `Ascend` `GPU` `CPU` `全流程` `初级` `中级` `高级`\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/quick_start/quick_start.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/mindspore_quick_start.ipynb) [![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL21pbmRzcG9yZV9xdWlja19zdGFydC5pcHluYg==&imagename=MindSpore1.1.1) [![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_online_experience.png)](https://ascend.huawei.com/zh/#/college/onlineExperiment/codeLabMindSpore/mindSpore)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "下面我们通过一个实际样例,带领大家体验MindSpore基础的功能,对于一般的用户而言,完成整个样例实践会持续20~30分钟。\n", - "\n", - "本例子会实现一个简单的图片分类的功能,整体流程如下:\n", - "\n", - "1. 处理需要的数据集,这里使用了MNIST数据集。\n", - "2. 定义一个网络,这里我们使用LeNet网络。\n", - "3. 自定义回调函数收集模型的损失值和精度值。\n", - "4. 定义损失函数和优化器。\n", - "5. 加载数据集并进行训练,训练完成后,查看结果及保存模型文件。\n", - "6. 加载保存的模型,进行推理。\n", - "7. 验证模型,加载测试数据集和训练后的模型,验证结果精度。\n", - "\n", - "这是简单、基础的应用流程,其他高级、复杂的应用可以基于这个基本流程进行扩展。\n", - "\n", - "> 本文档适用于CPU、GPU和Ascend环境。 \n", - "> 你可以在这里找到完整可运行的样例代码: 。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 准备环节\n", - "\n", - "在动手进行实践之前,确保,你已经正确安装了MindSpore。如果没有,可以通过[MindSpore安装页面](https://www.mindspore.cn/install)将MindSpore安装在你的电脑当中。 \n", - "\n", - "同时希望你拥有Python编码基础和概率、矩阵等基础数学知识。\n", - "\n", - "那么接下来,就开始MindSpore的体验之旅吧。\n", - "\n", - "### 下载数据集\n", - "\n", - "我们示例中用到的`MNIST`数据集是由10类$28*28$的灰度图片组成,训练数据集包含60000张图片,测试数据集包含10000张图片。\n", - "\n", - "> MNIST数据集下载页面:。页面提供4个数据集下载链接,其中前2个文件是训练数据需要,后2个文件是测试结果需要。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 导入Python库&模块\n", - "\n", - "在使用前,需要导入需要的Python库。\n", - "\n", - "目前使用到`os`库,为方便理解,其他需要的库,我们在具体使用到时再说明。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:53:40.753790Z", - "start_time": "2021-02-03T08:53:40.750705Z" - } - }, - "outputs": [], - "source": [ - "import os" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "详细的MindSpore的模块说明,可以在[MindSpore API页面](https://www.mindspore.cn/doc/api_python/zh-CN/master/index.html)中搜索查询。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 配置运行信息\n", - "\n", - "在正式编写代码前,需要了解MindSpore运行所需要的硬件、后端等基本信息。\n", - "\n", - "可以通过`context.set_context`来配置运行需要的信息,譬如运行模式、后端信息、硬件等信息。\n", - "\n", - "导入`context`模块,配置运行需要的信息。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:53:41.528022Z", - "start_time": "2021-02-03T08:53:40.755378Z" - } - }, - "outputs": [], - "source": [ - "from mindspore import context\n", - "\n", - "context.set_context(mode=context.GRAPH_MODE, device_target=\"CPU\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在样例中我们配置样例运行使用图模式。根据实际情况配置硬件信息,譬如代码运行在Ascend AI处理器上,则`--device_target`选择`Ascend`,代码运行在CPU、GPU同理。详细参数说明,请参见`context.set_context`接口说明。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据处理\n", - "数据集对于训练非常重要,好的数据集可以有效提高训练精度和效率,在加载数据集前,通常会对数据集进行一些处理。\n", - "\n", - "由于后面会采用LeNet这样的卷积神经网络对数据集进行训练,而采用在训练数据时,对数据格式是有所要求的,所以接下来需要先查看数据集内的数据是什么样的,这样才能构造一个针对性的数据转换函数,将数据集数据转换成符合训练要求的数据形式。\n", - "\n", - "执行如下代码查看原始数据集数据。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:53:42.128603Z", - "start_time": "2021-02-03T08:53:41.530089Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The type of mnist_ds: \n", - "Number of pictures contained in the mnist_ds: 60000\n", - "The item of mnist_ds: dict_keys(['image', 'label'])\n", - "Tensor of image in item: (28, 28, 1)\n", - "The label of item: 8\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAEICAYAAACZA4KlAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAANvUlEQVR4nO3dfaxkdX3H8fdHXJe42MhKgRWpUEqTamNXc4smNK2NrSI2QZto2DRmSYxrqya1oYmGNpE0NaFN1dpQH9aKLkZRU6FuE6rSTVNC2xAvBGERK5Susu7CatEKVNYFvv1jzrZ3L/dpZ8483P29X8lkzpyHOd85uZ97zpzfOfNLVSHpxPeMaRcgaTIMu9QIwy41wrBLjTDsUiMMu9QIw66nSbIvyW9Muw71y7Br4pKck+TGJD9I8mCSq5M8c9p1negMu8ZmhQB/GDgEbAG2Ar8GvH1SdbXKsK8j3eH1Hya5M8l/J/l8kpOTXJbklkXzVpKf64Y/leTDSf4hyaNJ/iXJmUn+stu7fjPJSxet7peTfKOb/skkJy94799KckeSHyb51yQvWVTju5PcCTy2TODPBb5QVY9X1YPAl4EX97WdtDTDvv68CbiIQWBeAlx2HMv9MXAacBj4N+D27vXfAh9YNP/vAK8BzgN+vluWJC8DrgHeBjwP+BiwO8nGBctuA14HPLeqnuj+0Xx4wfQPAZcmeXaSs4DXMgi8xsiwrz9/VVUHquph4O8ZHAavxQ1VdVtVPQ7cADxeVddW1ZPA54HFe/arq+qBbj3vYxBggLcCH6uqW6vqyaraxeCfxysW1fhAVf0YoKreXlULD9P/mcGe/EfAfmAe+Ls1fg4NybCvPw8uGP4f4JQ1LvfQguEfL/F68fs8sGD428Dzu+EXApd3h/A/TPJD4OwF0xcve4wkzwC+AlwPbGJwZHEq8Gdr/BwakmE/MTwGPPvoiyRn9vCeZy8Y/hngQDf8APC+qnrugsezq+q6BfOvdCvl5u69r66qw1X1X8AngYt7qFkrMOwnhq8DL06ytTuRdmUP7/mOJC9Ishm4gsGhPsDHgd9N8vIMbEryuiTPWcubVtX3gf8Efi/JM5M8F9jefQaNkWE/AVTVt4A/Af4RuBe4ZeUl1uSzwFeB+7vHn3brmmfwvf1q4AfAfaxykjDJR5N8dMGo32ZwkvF73fJPAH/QQ81aQfzxCqkN7tmlRhh2qRGGXWqEYZcaMdE7jZ6VjXUymya5Sqkpj/MYP6nDWWraSGFPchGD65xPAv6mqq5aaf6T2cTL86pRVilpBbfWnmWnDX0Yn+Qk4K8Z3MTwImBbkhcN+36SxmuU7+wXAPdV1f1V9RPgc8Al/ZQlqW+jhP0sjr3hYX837hhJdiSZTzJ/hMMjrE7SKEYJ+1InAZ52OV5V7ayquaqa28DGJRaRNAmjhH0/x94Z9QL+/84oSTNmlLB/DTg/yblJngVcCuzupyxJfRu66a37uaF3MvghgpOAa6rq7t4qk9SrkdrZq+pG4MaeapE0Rl4uKzXCsEuNMOxSIwy71AjDLjXCsEuNMOxSIwy71AjDLjXCsEuNMOxSIwy71AjDLjXCsEuNMOxSIwy71AjDLjXCsEuNMOxSIwy71AjDLjXCsEuNMOxSIwy71AjDLjXCsEuNMOxSIwy71AjDLjVipF5ctTZfOXDHtEuYmtc8f+u0S1BnpLAn2Qc8AjwJPFFVc30UJal/fezZf72qvt/D+0gaI7+zS40YNewFfDXJbUl2LDVDkh1J5pPMH+HwiKuTNKxRD+MvrKoDSU4Hbkryzaq6eeEMVbUT2AnwU9lcI65P0pBG2rNX1YHu+RBwA3BBH0VJ6t/QYU+yKclzjg4Drwb29lWYpH6Nchh/BnBDkqPv89mq+nIvVa0zLbejr2aUbWMbfb+GDntV3Q/8Uo+1SBojm96kRhh2qRGGXWqEYZcaYdilRhh2qRGGXWqEYZcaYdilRhh2qRGGXWqEYZcaYdilRvhT0ppZq90e6y2wx8c9u9QIwy41wrBLjTDsUiMMu9QIwy41wrBLjbCd/QQwSnvzev4Z7HHWfiK24btnlxph2KVGGHapEYZdaoRhlxph2KVGGHapEbaznwBWam9erb14tenruR1+FOP+3NNox191z57kmiSHkuxdMG5zkpuS3Ns9nzreMiWNai2H8Z8CLlo07j3Anqo6H9jTvZY0w1YNe1XdDDy8aPQlwK5ueBfw+p7rktSzYU/QnVFVBwG659OXmzHJjiTzSeaPcHjI1Uka1djPxlfVzqqaq6q5DWwc9+okLWPYsD+UZAtA93yov5IkjcOwYd8NbO+GtwNf6qccSeOyajt7kuuAVwKnJdkPvBe4CvhCkrcA3wHeOM4iZ916bque5drWs1m8H37VsFfVtmUmvarnWiSNkZfLSo0w7FIjDLvUCMMuNcKwS43wFtcTnE1rOso9u9QIwy41wrBLjTDsUiMMu9QIwy41wrBLjbCdfQLW8y2w69ks3mY6Te7ZpUYYdqkRhl1qhGGXGmHYpUYYdqkRhl1qhO3sM+BEboe3rXt2uGeXGmHYpUYYdqkRhl1qhGGXGmHYpUYYdqkRhl1qxKphT3JNkkNJ9i4Yd2WS7ya5o3tcPN4yJY1qLXv2TwEXLTH+g1W1tXvc2G9Zkvq2atir6mbg4QnUImmMRvnO/s4kd3aH+acuN1OSHUnmk8wf4fAIq5M0imHD/hHgPGArcBB4/3IzVtXOqpqrqrkNbBxydZJGNVTYq+qhqnqyqp4CPg5c0G9Zkvo2VNiTbFnw8g3A3uXmlTQbVr2fPcl1wCuB05LsB94LvDLJVqCAfcDbxljjuree70cf1Sif3Xvh+7Vq2Ktq2xKjPzGGWiSNkVfQSY0w7FIjDLvUCMMuNcKwS43wp6Q1s1ZrtrNp7vi4Z5caYdilRhh2qRGGXWqEYZcaYdilRhh2qRG2s2tFJ3J30q1xzy41wrBLjTDsUiMMu9QIwy41wrBLjTDsUiMMu9QIwy41wrBLjTDsUiMMu9QIwy41wrBLjTDsUiPW0mXz2cC1wJnAU8DOqvpQks3A54FzGHTb/Kaq+sH4StU0eL/6iWMte/YngMur6heAVwDvSPIi4D3Anqo6H9jTvZY0o1YNe1UdrKrbu+FHgHuAs4BLgF3dbLuA14+rSEmjO67v7EnOAV4K3AqcUVUHYfAPATi97+Ik9WfNYU9yCvBF4F1V9aPjWG5Hkvkk80c4PEyNknqwprAn2cAg6J+pquu70Q8l2dJN3wIcWmrZqtpZVXNVNbeBjX3ULGkIq4Y9SYBPAPdU1QcWTNoNbO+GtwNf6r88SX1Zy09JXwi8GbgrydF2mCuAq4AvJHkL8B3gjeMpcf0btWthm7/Uh1XDXlW3AFlm8qv6LUfSuHgFndQIwy41wrBLjTDsUiMMu9QIwy41wrBLjTDsUiMMu9QIwy41wrBLjTDsUiMMu9QIwy41Yi33s0tTMervAOhY7tmlRhh2qRGGXWqEYZcaYdilRhh2qRGGXWqE7ezrwDjbm/1N+na4Z5caYdilRhh2qRGGXWqEYZcaYdilRhh2qRGrtrMnORu4FjgTeArYWVUfSnIl8Fbge92sV1TVjeMqVOPhPePtWMtFNU8Al1fV7UmeA9yW5KZu2ger6i/GV56kvqwa9qo6CBzshh9Jcg9w1rgLk9Sv4/rOnuQc4KXArd2odya5M8k1SU5dZpkdSeaTzB/h8EjFShremsOe5BTgi8C7qupHwEeA84CtDPb8719quaraWVVzVTW3gY09lCxpGGsKe5INDIL+maq6HqCqHqqqJ6vqKeDjwAXjK1PSqFYNe5IAnwDuqaoPLBi/ZcFsbwD29l+epL6s5Wz8hcCbgbuSHL0f8gpgW5KtQAH7gLeNpUJJvVjL2fhbgCwxyTZ1aR3xCjqpEYZdaoRhlxph2KVGGHapEYZdaoRhlxph2KVGGHapEYZdaoRhlxph2KVGGHapEYZdakSqanIrS74HfHvBqNOA70+sgOMzq7XNal1gbcPqs7YXVtVPLzVhomF/2sqT+aqam1oBK5jV2ma1LrC2YU2qNg/jpUYYdqkR0w77zimvfyWzWtus1gXWNqyJ1DbV7+ySJmfae3ZJE2LYpUZMJexJLkry70nuS/KeadSwnCT7ktyV5I4k81Ou5Zokh5LsXTBuc5KbktzbPS/Zx96UarsyyXe7bXdHkounVNvZSf4pyT1J7k7y+934qW67FeqayHab+Hf2JCcB3wJ+E9gPfA3YVlXfmGghy0iyD5irqqlfgJHkV4FHgWur6he7cX8OPFxVV3X/KE+tqnfPSG1XAo9OuxvvrreiLQu7GQdeD1zGFLfdCnW9iQlst2ns2S8A7quq+6vqJ8DngEumUMfMq6qbgYcXjb4E2NUN72LwxzJxy9Q2E6rqYFXd3g0/AhztZnyq226FuiZiGmE/C3hgwev9zFZ/7wV8NcltSXZMu5glnFFVB2HwxwOcPuV6Flu1G+9JWtTN+Mxsu2G6Px/VNMK+VFdSs9T+d2FVvQx4LfCO7nBVa7OmbrwnZYluxmfCsN2fj2oaYd8PnL3g9QuAA1OoY0lVdaB7PgTcwOx1Rf3Q0R50u+dDU67n/8xSN95LdTPODGy7aXZ/Po2wfw04P8m5SZ4FXArsnkIdT5NkU3fihCSbgFcze11R7wa2d8PbgS9NsZZjzEo33st1M86Ut93Uuz+vqok/gIsZnJH/D+CPplHDMnX9LPD17nH3tGsDrmNwWHeEwRHRW4DnAXuAe7vnzTNU26eBu4A7GQRry5Rq+xUGXw3vBO7oHhdPe9utUNdEtpuXy0qN8Ao6qRGGXWqEYZcaYdilRhh2qRGGXWqEYZca8b+yHlbL9HH54gAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "import matplotlib\n", - "import numpy as np\n", - "import mindspore.dataset as ds\n", - "\n", - "train_data_path = \"./datasets/MNIST_Data/train\"\n", - "test_data_path = \"./datasets/MNIST_Data/test\"\n", - "mnist_ds = ds.MnistDataset(train_data_path)\n", - "print('The type of mnist_ds:', type(mnist_ds))\n", - "print(\"Number of pictures contained in the mnist_ds:\", mnist_ds.get_dataset_size())\n", - "\n", - "dic_ds = mnist_ds.create_dict_iterator()\n", - "item = next(dic_ds)\n", - "img = item[\"image\"].asnumpy()\n", - "label = item[\"label\"].asnumpy()\n", - "\n", - "print(\"The item of mnist_ds:\", item.keys())\n", - "print(\"Tensor of image in item:\", img.shape) \n", - "print(\"The label of item:\", label)\n", - "\n", - "plt.imshow(np.squeeze(img))\n", - "plt.title(\"number:%s\"% item[\"label\"].asnumpy())\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "从上面的运行情况我们可以看到,训练数据集`train-images-idx3-ubyte`和`train-labels-idx1-ubyte`对应的是6万张图片和6万个数字标签,载入数据后经过`create_dict_iterator`转换字典型的数据集,取其中的一个数据查看,这是一个key为`image`和`label`的字典,其中的`image`的张量(高度28,宽度28,通道1)和`label`为对应图片的数字。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 定义数据集及数据操作\n", - "\n", - "我们定义一个函数`create_dataset`来创建数据集。在这个函数中,我们定义好需要进行的数据增强和处理操作:\n", - "\n", - "1. 定义数据集。\n", - "2. 定义进行数据增强和处理所需要的一些参数。\n", - "3. 根据参数,生成对应的数据增强操作。\n", - "4. 使用`map`映射函数,将数据操作应用到数据集。\n", - "5. 对生成的数据集进行处理。\n", - "\n", - "定义完成后,使用`create_datasets`对原始数据进行增强操作,并抽取一个`batch`的数据,查看数据增强后的变化。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:53:42.147143Z", - "start_time": "2021-02-03T08:53:42.129614Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Number of groups in the dataset: 1875\n" - ] - } - ], - "source": [ - "import mindspore.dataset.vision.c_transforms as CV\n", - "import mindspore.dataset.transforms.c_transforms as C\n", - "from mindspore.dataset.vision import Inter\n", - "from mindspore import dtype as mstype\n", - "\n", - "\n", - "def create_dataset(data_path, batch_size=32, repeat_size=1,\n", - " num_parallel_workers=1):\n", - " \"\"\" \n", - " create dataset for train or test\n", - " \n", - " Args:\n", - " data_path (str): Data path\n", - " batch_size (int): The number of data records in each group\n", - " repeat_size (int): The number of replicated data records\n", - " num_parallel_workers (int): The number of parallel workers\n", - " \"\"\"\n", - " # define dataset\n", - " mnist_ds = ds.MnistDataset(data_path)\n", - "\n", - " # define some parameters needed for data enhancement and rough justification\n", - " resize_height, resize_width = 32, 32\n", - " rescale = 1.0 / 255.0\n", - " shift = 0.0\n", - " rescale_nml = 1 / 0.3081\n", - " shift_nml = -1 * 0.1307 / 0.3081\n", - "\n", - " # according to the parameters, generate the corresponding data enhancement method\n", - " resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR)\n", - " rescale_nml_op = CV.Rescale(rescale_nml, shift_nml)\n", - " rescale_op = CV.Rescale(rescale, shift)\n", - " hwc2chw_op = CV.HWC2CHW()\n", - " type_cast_op = C.TypeCast(mstype.int32)\n", - "\n", - " # using map to apply operations to a dataset\n", - " mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns=\"label\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=resize_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=rescale_nml_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " mnist_ds = mnist_ds.map(operations=hwc2chw_op, input_columns=\"image\", num_parallel_workers=num_parallel_workers)\n", - " \n", - " # process the generated dataset\n", - " buffer_size = 10000\n", - " mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size)\n", - " mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)\n", - " mnist_ds = mnist_ds.repeat(repeat_size)\n", - "\n", - " return mnist_ds\n", - "\n", - "ms_dataset = create_dataset(train_data_path)\n", - "print('Number of groups in the dataset:', ms_dataset.get_dataset_size())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "调用数据增强函数后,查看数据集`size`由60000变成了1875,符合我们的数据增强中`mnist_ds.batch`操作的预期($60000/32=1875$)。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "上述增强过程中:\n", - "\n", - "- 数据集中的`label`数据增强操作:\n", - "\n", - " - `C.TypeCast`:将数据类型转化为`int32`。\n", - "\n", - "- 数据集中的`image`数据增强操作: \n", - "\n", - " - `datasets.MnistDataset`:将数据集转化为MindSpore可训练的数据。 \n", - " - `CV.Resize`:对图像数据像素进行缩放,适应LeNet网络对数据的尺寸要求。 \n", - " - `CV.Rescale`:对图像数据进行标准化、归一化操作,使得每个像素的数值大小在(0,1)范围中,可以提升训练效率。 \n", - " - `CV.HWC2CHW`:对图像数据张量进行变换,张量形式由`高x宽x通道`(HWC)变为`通道x高x宽`(CHW),方便进行数据训练。\n", - " \n", - "- 其他增强操作:\n", - "\n", - " - `mnist_ds.shuffle`:随机将数据存放在可容纳10000张图片地址的内存中进行混洗。 \n", - " - `mnist_ds.batch`:从混洗的10000张图片地址中抽取32张图片组成一个`batch`,参数`batch_size`表示每组包含的数据个数,现设置每组包含32个数据。 \n", - " - `mnist_ds.repeat`:将`batch`数据进行复制增强,参数`repeat_size`表示数据集复制的数量。\n", - "\n", - "先进行`shuffle`、`batch`操作,再进行`repeat`操作,这样能保证1个`epoch`内数据不重复。\n", - "\n", - "> MindSpore支持进行多种数据处理和增强的操作,各种操作往往组合使用,具体可以参考[数据处理](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/pipeline.html)和与[数据增强](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/augmentation.html)章节。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 查看增强后的数据" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "从1875组数据中取出一组数据,查看其数据张量及`label`。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:53:42.580776Z", - "start_time": "2021-02-03T08:53:42.149224Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor of image: (32, 1, 32, 32)\n", - "Labels: [9 8 5 5 1 2 3 5 7 0 6 1 0 3 8 1 2 1 5 1 5 2 8 4 4 6 4 5 5 5 7 8]\n" - ] - } - ], - "source": [ - "data = next(ms_dataset.create_dict_iterator(output_numpy=True))\n", - "images = data[\"image\"]\n", - "labels = data[\"label\"]\n", - "print('Tensor of image:', images.shape)\n", - "print('Labels:', labels)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "将张量数据和`label`对应的值进行可视化。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:53:43.245788Z", - "start_time": "2021-02-03T08:53:42.582357Z" - } - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWAAAADsCAYAAABKZHxbAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nOyde3hU1bn/P+9MQi4kJCQhQLgHSIhiRYxw8Ip6gFKLxapUhFKwniiKN0DrOfhT65FjqwitRbSoQClIxWuLl4LnVDiACEUuyiESucklXJIAgVyZzKzfH3smmUlmkkkys/cE1+d58mT2/Ttr1n732u9637VEKYVGo9FozMdmtQCNRqP5vqINsEaj0ViENsAajUZjEdoAazQajUVoA6zRaDQWoQ2wRqPRWIQ2wBqNRmMREWuARSRHRP4hIqUisldEbrFaUyBEpLeIfCwip0XkuIjMF5Eoq3X5Q0TWikiViJS5//ZYrSkQbUWriEwTka0iUi0iS6zWEwgRiRGRN0TkOxE5JyLbRWS01boCISLLROSYiJwVkQIRudtqTYFoaV2NSAPsNl5/BT4EUoA8YJmIZFkqLDALgJNAV2AQcB1wn6WKGmeaUirB/ZdttZgmaAtaC4FngUVWC2mCKOAwRv1MAv4fsFJEeluoqTGeA3orpToANwPPisjlFmtqjGbX1SYNsIgcFJGZIvKVuzX6lojEishkEdlQb18lIv3cn5eIyAIR+cT9RNgoIl1E5HfuluI3InJZgMsOADKAeUopp1LqH8BG4OcRqBWgD7BSKVWllDoO/B24OEK1Npu2otUqnUqp95RSHwAlkaxVKVWulHpaKXVQKeVSSn0IHAAaNWoWluv/KaWqPYvuv76RqLWlBNsCHgf8EMPQ/ACY3IzjngDSgGpgE7DNvfwOMNezo/vLL/As+jmXAAMjUCvA74E7RCReRLoBozGMcCRqBXhORIrdlWx4M67ZFrRapbMlWKpVRDoDWcD/RapW97oK4BvgGPBxpGqlJfeVUqrRP+AgMNFr+XngVfeX2lBvXwX0c39eArzmte0BIN9r+RLgTIBrRgP7gcfcn0cC54HVkabVvT0H+BKocZ93CSARqnUokAjEAL8AzgF9LwStVun02u9ZYElT+0WI1mjgv4E/tgGtduBqDOMYHYlam1tXPX/BtoCPe32uABKCPO6E1+dKP8t+z6OUcgBjgZvc154BrASORJpWEbEBq4H3gPYYT8uOwG8jTSuAUmqzUuqcUqpaKfUnDNfOjy4grabrbAWWaHXX2T9jNGqmBXlNy8pVGW7IDUB3YGoQ12wrdbVVnXDlQLxnQUS6tOJcDVBKfaWUuk4plaqUGgVkAltaeLpwak0BegDz3YVfAiwmOKPmj7CWqx8U/l0+wdBWtJqtszWEVauICPAG0Bm41d3YaSlml2sUTfiAGyEi62prDPBO4GIRGSQiscDTrThXA0TkB27nebyIzMSIMFjSwtOFTatSqhijI2OqiESJSDLGK8jOFp4ybFpFJFlERrnLNUpEJgDXYrTgL2St4a6rUe7z2gG7R3MLTxdWrcArGC6zMUqpylaeK5y/f7qI3CEiCSJiF5FRwHjgHxGotcV1tcUGWClVADyD4Uf6FtjQ+BGNIyKvisirXqt+juF0PwncCIxQdT2ikab1pxhO/yJgL4Yv+JEI1BqN4acsAoox/FxjlVItiq9tK1pN+P2fwHhFfRyY6P78REvOHU6tItILuAcjVPK41MWsTog0rRgtyKkYbsfTwBzgYaXUXyNQa4vrqrgdyBqNRqMxmYhMxNBoNJrvA9oAazQajUVoA6zRaDQWoQ2wRqPRWIQ2wBqNRmMRQcUqjrDdHlGhEp+63vYb4NxWdILW2houBK1tRSdora2hMa2gW8AajUZjGdoAazQajUVE5KwNGo0mMOqqQXx7V8Nbt8cqG3EftHS4FI0VaAPchrHFx3PooUG4Yhpu67Bfkbx0k/miLgD8lWunHTUAlho45/DBAByeWsOBa15vsP3a7rdQyRAgvDo9Oo4Oj232sZlvHKLmcDCDGn4/0C4IjUajsQjdAnYTldmbk9d19butY4ExaJRs3GH8j4nhzO2X4YwW0tcdo2b/QbNkAnVaa+KF1VOfp3tUw2FKh+8aC0tNlXXBIIkJvHPPHHLa1Y5eSJ9PjPkgsz6wRpO6ahCHpxqt8D3X+P9h//eS9xnxwBgAzlfm0m711rBoOXqd0fLNz2v+pCA53Efm0ijT75nWYrs0B4DiwclN7ptQ6Ai67MNugO2dOuG4qLvfbdGFpQA4v90fbhkBiephaNs/KSNghaq9+TaCLTGRiuE5rJg9hz7RCQz5j6l0NKky+dcajnHCNZHGt3dF1bodChzlrCz1ncZtXNKXZEW359OcVQAMnzG25YOMhpH8vAWGEX6jJmJcEZ776nxmJwCkRmHbvAsGDcDZPhqAgvHG/wM3v9Lk+YbvCr7sw2aAbfFG6+HYuP5sn+XfsPX9xxQAsqcl4TxTGi4pAbElJvLt/T0AKJgU3NO8ZnA/1v1xIWYbvqa0Hqkpo73Y6GiPb7BNc2GRt2cCMSMP+qz77zUTWDuwrnkeG+XAlpqCs+RU2PU4lYt9NZU4ldAjykaCrXHfcH7eArJip9J3tnHPu86dC7vGQPjeV4ZxLXCU8/DoKVy/ZBOPpuxr9PgDjjKqlK8n93RFHMGO9h42A3zooUEArJ76PIGM1dbrXgZg+JJf0mWs+QZ4z28u4oub57iX2pt+/ebQlNYxv32Ms8Mq2XfDYnOFaSKSd7Le5ZGPb+TQ0PBfa19NJQ+PngIlZ3CtiObvAz5q8pgNE+bwL4nTAeh//+ZwSwyIv/uqb1Qcv/tkMb2i2mEM9RuYydOm037LQZ913aoLcQZ5/ZAb4ILXc/nlkA0MiX8NgO5RCSwszWD5Yz9myfy53Prco1w0KZ9lvdfWttY6xlciuQO5epHhN9lwVy5q665QS2uAinWSbq8zZgtLM3j73lEN9sspNFoR5346lJ/PXhV2XfUpeD2Xt26c76MVjKfv5GnTaXfWQdfd39J1TSIjXjPeKuJKKnGZrrR5nJx2JQC35vmf5ODdhTeQPv9zMyX5pc/f8sh5sRgg6Bsr1OT85hQjlgb+beOmx3Lps+PZOWQFAAm2WHrFlXCI5kcqNEXmokMAjFhr6JEaha1gF6qmBtuDAxiROqXBMec7RLNk/lz6RBuNsXR7e1SsVaVpEOi+souNrOjADTJvOxG/LR9nK1rwITfAvXoW80TaN4zINzoDZi3rTmypi/ZrdjB+1ky6rN7L0QP9uCIjp/aYsh7C5S/t5om0bwAY0X6YJeEZJx0dsK3b3mC9p5pUXd2ZvKRCn22qpbOpNYNePYsZEuP7JF5ZlsTcZ+4lec12VHW1obGoCNu3xvZIM7725CSOLsmgY3zdLDh3ZhiOskCveTH3Olgaa1T0jDnWGeLYY1GW9lOA0U/S2G/r2vUNZ0tyTdHi8d3avHy4nvxf165v/N677TunN3hVtxp/95U/stdPosPf6wxybKmL+HVGq72191lIDfDB2cN4stdKAPYe7AxA1mIjFlUBScu+wAm0W11EClA51ohZTJ54jGW914ZSSkip0+nbadDnb3kM2HYmrMbOu0y9KajqStKyL4ioxHc/eJIGbDFOtl7+cqM+6pPOcq5ePhN7Vd1TrdPeGjNkNo4JD9kLGXv/TPY8lUQXu9VKmkefv+URezyKHmursK8NT0x9SAywJ3D9Dz97jZHxDqYcuoa0jU0/WYoGGZfPd/fcnnZWkLvufrILS8P+qndm0jCGX9y0m6Ny7BCYWgRQ28Ps0ZnzYnHYWkb1y7Q+Q+L3sfzpf/NZ121tFfa128Kip7n4TxowjO+I/DG1D2hvpMpO9uxdlnbKaEKDd7JGVZcaDtywEK9JidsE/VY4sK0Lb+JNZL0TaDQazfeIkLSA6weuf/HJJfRcFITPrt7783EnZM84ivPEyVDIapRTF8G/dtzd6D7nR+US88Cx2pavBzN0+ksG8GZkvKNB3HJ2ziQ69BkGNC8YPJScH5VLWUY0Z39YDhhJAwWOcvL21E28Ky+mkRVAm9W+a3unThy7rR+JNkPJjGOD6bA/0h091uKdmOSh7vcPHN7Zr/cJAE5NGWb4Vd+zLhrCH0eujyMh07if6idjhYqwhKFVda3B3j8TqEuykKgoXEMHoqLEZz8PJ53lzD0xEmrM8fllPr6JJzrdwoTRDXPqPagZxQ2Mr2k6a2qYe2IEszPWNOilDcSea5bCNcbn5gSDhxI1o5h/DvRNF1tb0Z/o2R291jhxXXdZRCTi1MdxUXd33LrRW7/+D0PpqMfUCEj9xKTmUHtvzTYiC945MRLbZiOaIhLwbuBcumU8ABmlA3Dt+iZk1wiLAT5w80L6JriTLGakGytTk3npzZcDhncsPH05h4aWA+XhkBQ8ItjT0sAmxEb5+l7LXFXMKhxpik5nySkODYVZm0cyvfOnfvexi6JvVBx2aehJMjMQv1ZPakqDMgPISyokb0XD+GSrE3HaCvbkJIjxM+JSPWwx5od1hSoxKS+pkOFvvlwXT3ymFFVdHRqRTXDaWQFAuXL5TesHasP7hs8dS8zI0F07bIkYniSL4+63Co+xiHTsaWnkfb6J7OiT9IiygVcc5W0Ft2K7vRIzHxJHfxTDjKix/jemJvO7Txb7faiZGYjvodvH1czr9jEEGXtqdSJOW+Hokgz+MuiNJvczogzaVkeXN54ECKcSpjwxnaRlX5hy3dx19wPQYVNcwKzdcBESA+wqOcVDE+7jttfX1MbJ1iZZBBF6MvHgcIof7A6EP/miMfI6fknMZw5Gx58mRuoqcu3rxxMKZ8lRUzU11oKVklNMu/N+VJTQ+bkDPqF84QzED0SvuJKAaaiepJEJz3/YsI54xQZrfDn1YRZvX/J6o4kB/jDrnoratpcR4xsmXjRG/brqwTsBYvqTK3gueULYEnFOfZjF8uzFQHtc1YaR6rryW0bsqvsut7+6ukHc/8Ls5Uz4cAopPy4IiY6QGGBVU4Ns3MGfZ43htSQbyROPNPCdHqkpY8xvHyOqQnHttM282LUuXOpIWTIxJmS+1af/ohqyEyYBhv803d7enRTgG0J3tsSoFF12md+p1Riechfg6K9zGfFYok+5j0v6krc/uJtukwvD9opvGziAwmeFbpMLgara9Z5EnDPLjIFO7A5F8prtnJzdASj0cybrOT8ql25P7fVZZ0aiTWP8pOdXzTa+YN495Tp3zm/yUmMc/XWuTyIWQFkv8fG5jkso5anUkEj0wZMQ5HmoZa+fRP9Fhs/ZWVSEbV1R7b5/njWGPbP+6WOrsqLb85OeX7E+RA2bkLog4t/bTDxQeXoIOYPu89lmq4aei3fgqqjgy/G9oav18aqycQe2EUY6rKfzypvs9ZOw5SfQY0dkdAo0RrvVWykYlwte9Toruj1/GfQGM2ICuDBaibpqEIWPVrP28jfInX8/Tyf+N56H19VpRnbbO6k96PHX4+Q/nsKJ6y/hmcRXaCq/3irKMqJ9WmZmJNoEy7Vf30LRJv/Dpf5+ov9Y8XDjSbKRKjvZj+8OOn673eqtpNRbl96jO1mxU9kwYU5dp3M4Hn4xMfxl0Bu1DzVbfgKy0X8rO/69zXx5b++w2qqw+IDjPthCTz/jprow8v89KaiRToe/tydlsfVjEQRDsIkloeR0Vhw7hywB4t2DANUZ1qc6GSF+8ZOrWdDvRg6MXujeUrfPr4suAqBkTQYZHDRFc3Pot8KBa2e+1TIAKNrUlZ5P+9ZFT7KOP27K+Jo3p40Kyyt8/SSbk85yrj43k/4vH46YISZDgRm2SidiaDQajUWYPiPGrXn/qB18Zfk5w8lzdFtXMi1oAamrBuHKKfO7beLB4SQUmv9a1xIqfjqUqx/y9auDMa7pHTvuplt1eHyuCYUOJh4c3ug4Ho+m7OPRm30H25lxbDBflvSkZE0GYO1AO20FV04Z6iqjtSsbdxgJI+P6B5wR5dGUfVTn/YP180PfCeuZEcOTZJFub0/BpFcY8ckUnwF6msP5zE7u8Xi9/N0hzn+pn2TT1DW8bZWHtZU23thyNVmEpj/I0imJnthwCwBZj5sf6G4bOIDCR6vZ447vq8/upTl02bbXsuEHvbENHEBNalyDxAVPcsvPZ69q0FsLsLL0crqMzQ/bd2i3eivFJQN5dtGAZh23ZfYVxL+3OWLcDp4ZEcp6GU7HauXgpdMDsJc7Imawoz3XLOXSGCMaJz3qMopyYn0SRgDWVNS5d8LpE44tMf6vLEtiXEJd525JTiypXOazb1RJZYPEBU999qYkJ/zROq5enblz6mo62RuPqfbcV+nRaxpse3rfzWTdHbrOePMMsDvBIcZmGI+TznKkyrrhkSrnVrFzYOAJvrY9+QpDqqbScUlRwH3MonJuFWsH/qUucSGI5JYyVxXfVaYS7phltXUX63/QvJsnnshKOd1/V0+gLvNpv8PBZzf2RZ2wNiwS4LvKVMpcVSTYYmuTAfDTZjhSU8YjbzxWu/zOPXPC9vt7/Mpzz4znCq8MuG1PNpyu54ff3IR9fLrPOtdL5/h0wF8avcaRmjJs50Mk2I3auovPbuzLjzd/TU4742HljFPYEhMNXefOIVFR2LIyG00aCyWmGWBPgsPo+NNANFcvn0n2bKOCR0Ivc1ugOcktViSNaELP0R/FcNvbtzY5y8SY3z5Gz8V14xTMWDTWnS4fvt8/+e3tTC6d7s6E8887We9yeLPvHV4/wckfnu8TbttQf2YO19CBvPTmy6YljZnXArYJ2dEnaxMc7JWihx0MkvqzHTSW3GJl0khb5ODsYfzhZ69ZLSMgzpJTVNf0bnSfwc9Mpeu73+KsqKhb6f05TKjqauLX5nPdPXk+s114k2CLJadd887r9/uEiPpJY+n29vzlR/MBWPPVJaRHr/Hb8vW+r0L5ULDMB3zX7atZWm79bAeNYXUQvgfXrm/o/MIgsh+sSxqpT+b795C61UZn96hNrl2hHbXpQuV8F4clMbTNQV5M44qMqQG3d/lwL84ia1xlrnPniFuzk/GzZjL9yRU+PuHmsLIsiReeuxMI7/fxJC+ddNQlBHlmxRgS03CQnXDfV5YZ4FVHf0DHSJjtoI0gG3fQI9qIv8zJv6/B9uz3T0dMzGpboscqGzmH/SQNnYucB5i/xAVvrO4oVtXVJC37gueSJ/BUWsvOEVsM6e6YezO+z7sLbyDmXkejsx73+VseAxaeDet9ZZkBLtrUlZ4fWNfyLVmTQZ/Ddze6T/+CyBqjwDPbRc+1DbdpP3rLaCxpSNM8ImEC1WBJn/85S2NHsSAncE9fOGe88aATMTQajcYizGsBV1dzx45f1o58ZfUsA5Hqd9ZoNObQlA0wwxVimgF2nin1GfM1JkKC8DUajcYqtAtCo9FoLEKUipRkS41Go/l+oVvAGo1GYxHaAGs0Go1FaAOs0Wg0FqENsEaj0ViENsAajUZjERFrgEWkrN6fU0T+YLUuf4hIioi8LyLlIvKdiNxptabGEJE7RCTfrXefiPiZktR6RGSaiGwVkWoRWWK1nkC0pd9fRJaJyDEROSsiBSLSeD6+hYhIbxH5WEROi8hxEZkvIpZOIhGIltbViPwyAEqp2rHtRKQ9cAJ42zpFjfIycB7oDAwCPhKRnUqp/7NWVkNEZATwW+BnwBbA/1S7kUEh8CwwCjBngNaW0WZ+f+A54JdKqWoRGQCsFZHtSqkvrRbmhwXASYw6mgx8CtwHvGSlqAC0rK4qpRr9Aw4CM4GvgFLgLYzRlCcDG+rtq4B+7s9LMArwE6AM2Ah0AX4HnAa+AS5r6vruc/0C2I87bjmStGJMYnUeyPJa92fgN5Gm1X385xg3YJPlbrVWr3M+CyyJRJ1t7fevd95s4BgwLhK1AvnAj7yWXwD+GIlam1tXPX/BuiDGAT8E+gA/cH+ZYI97AkgDqoFNwDb38jvAXM+OIrJARBYEOM8vgKXK/Q0jTGsW4FRKFXidaydwcaRpFRE7kAt0EpG9InLE/VoXzBPb6joQLPr3b6JM3esqMIzKMeDjCNX6e+AOEYkXkW7AaODvEaq1RQRrgF9SShUqpU4BqzBes4LhfaXUl0qpKuB9oEoptVQp5cR4MtXO4KeUuk8p1WCgWxHpCVwH/ClCtSZgPGm9KQUSI1BrZyAauA24xn29yzAqXaRpbSn692+iTN3LiRh14D0MYxOJWtdhPMjOAkeArUDgiRyt1doigjXAx70+V+A9FWvjnPD6XOlnOZjzTMJ4dTgQ5DXN1loGdKi3rgMQzHxLZmv1DHD8B6XUMaVUMcZT/UcRqLWl6N8/iPMopZxKqQ1AdyDwdBt1mKpVRGzAaowHRHuMVmhHjP6LiNLaGloTBVEOxHsWRKRL6+X4ZRLBt34DEU6tBUCUiPT3Wncp0NIOmLBpVUqdxmhJhGoAELPqQGvRv39gooC+LTw2nFpTgB7AfKVUtVKqBFhMcI0Ff0RkXW2NAd4JXCwig0QkFng6NJLqEJErgW60PvohbFqVUuUYT+lnRKS9iFwF/ASjI6YlhLtcFwMPiEi6iHQEHgY+bOG5wqpVRKLc57UDdhGJbWEYkv79AfdvfoeIJIiIXURGAeOBf0SaVvfb2QFgqrseJGP0Be1s4Skjsq622AC7Ox2eAf4b+BbY0NJzAYjIqyLyar3VvwDeU0q1avpkE7TehxF6chJYAUxVLQxBMkHrfwL/xGi55QPbgdkRqvUJjFe/x4GJ7s/B+KvN1tlWfn+F4W44gtGzPwd4WCn11wjUCvBTjM60ImAvUAM8EqFaW1RX9XCUGo1GYxERmwmn0Wg0FzraAGs0Go1FaAOs0Wg0FqENsEaj0ViENsAajUZjEUHFVI6w3R5RoRKfut4Wf+vbik7QWlvDhaC1regErbU1NKYVIng4Sk3LODNpGABxE4/5rI+bHotr1zdWSNJoNAHQBvgCoXDmlZTlnGf4xbsAWNxzvc/2EalTtL9Jo4kwtAFuhDOThnE2M/AbROYbh6g5fMRERb7Y4uM59NAgXDFw122reTRln2VaWoPt0hwO3tLRZ13v90/j2plvupaoHt3Z/8ueAbd32K9IXrrJREUaM/H3+3dbW4V97bawXE83ijQajcYidAu4HhITw5nbL8MZLVw7bTMvdg385Bvx2RRsFraAJTGBd+6ZQ067+ID7lLmquK3gVqJKKnGZqK0p1FWDOJ1ljANfkuti/y2+Y1tf8d1UUlo67EorOJ/Zify8wONszzg2mC1lQ4l/b7Mpeip+OpSqpJa1k2JLXabp9Mb7t/VH+rpj1Ow/aJ6gZuDv98/OmUQvhzGksGzcEdLraQPshS0xkYrhOayYPYc+0SEf+jNk2Dt1wnFRd6o6RBMrgc3qSWc5swpHYru9EmfJURMVNs23d0VxYPQrAbdLRPVl1/Fi1208+3QF69+LNeV6o55exxNpLes8fbZ4gGk6PdgGDqDw0Wp2DlkScJ8h/zGVjhFqgP2x55ql9Ckz5i7N2hjac7faAEtUFLbUlNadpLoa55n6kwqYi8TEUDE8h3V/XEhj4y5XKwf7HQ7jmBpzrYQt3mjpHhvXn+2zPE9p/1rLXFXMKhzJoaHlGEOhRjbe5QpgM7lswagD5ztEN7lfjM2BvVMPnMXFoAezqsWemoLrpXPsHPBRo/s52gu2+HhcFRUmKWs9thgnAPbkpJDaqlYbYNfQgcxb3rqpke7Y8Uu6jLXWAJ+5/TJWzJ5DU4Pev3R6AJ/daIxfbSvZFbKRzYPh0EPGa9Dqqc/TlM7bCm7FdnslbcH4gm+5AiSf2W5q2ULwdeDBjt/Q//MTLLxyGM6iInPEtQG6fVzNvG4fY8yBGZhVv3qeUR0eo/tzn5sjLARsve5lAIYvCa2tarUBVlHSqA8yGN6+7HUmfDiFlB8XNL1zmHBFSVBuh18kfUXB37pw9EcxqJoaE5TV4Wpn/O8eFVjnpVvGA5DxhIo4twPAqQ+z+EnPr/jPxFcwpqcz8C5XZ8kpUzWdnHYlAP/+4HKfOjD4mamk5ldx8pEqdg5ZUbs+RqLJjj4JtkZj7EPChrtyGdF+WJP7dX7uAMt6rw27nsboFVdCgs3X+E48OJwT/96H8x2iWTJ/Ln2iE+gelcC8X77GAwnGa33vWZEfVdLRbti4jvGVTezZPMLmAx6RPwaAM8u6N9hWv3MrK7o9P+n5FeubeHKGA0/iwjUPBNdZkW5vz7xu/8Ntb9+K7cEBpiU3FM68krtuXx1we+b795C61UbnAqOCuHaFtrMgVPyk51dun6bvq/7aygy+/sMlJJdtN1VP4cwrmTTZKNdxCb4tm9T8KmzrttO5ZhDZD04CDH8gQI8oG64V0WGvA2rrriZDlQpez2VW5/8Nm4bWcKQsmZh124mLiWH8rJlMf3IF4xJKGRnv4HwXR9MnMJHzo3Lp9tRev9s89kxeTMOY+T40tNoAt9tfRM7ChpODdtphtA5TPmj4dNvgHMaU+9o3SBYwm1N3DWNgnpG40Fi0Q30SbLH8fcBHpiU3FM68ktt+vjZgnG+fv+UxYOFZS+JmQ8ELp/ry5qujSF/2ualuh0DletJZztXLZ9J//2FqMHq+bSOMVjLXGP/MrgP+sCUmsuc3F/HWjfMZEuP7QHvhVF/eXXgD6UTGa76qriZp2Rc8lzyB7+6NrJj1yrFDAIh54Jjft4hrv74FXukEQNzqLSG9dqsNcM3hI/R82rpQrJZyZpJhfK1+CARD6shCnuq0O+D2fiscbdb4AnxUeAnp8803FIHKtcQp9Ju3j5oTJ03XFAyexBVnrOKLm+eQbm9fu+3XRRcB8M6fh5Nhcpn+edX19LmtiAmJJbXrbsr4mjenjar9fdPnf85HN18SUQa4aJBhBvNzVvnfvqkrPT8IT1nqRAyNRqOxCEvigOMmHrO85RmMhhnHBvNlSU9uyvgawJKndsVPhzIk9Z9+t0VqkkV97J06UfTjfgBkxb5psZrGKXCUc8eOu+lWXeizvsN+wzky49jgZrmrQkXgxJX2tbq+LOlJyZoMADLmmP9G0XvWJp62j4NbVta2gh9N2UevB5cz98x4klZ2cEAAACAASURBVN/eTvlNgwLWZ8toyu8VRr9YRCRiFDjK+euhH5CCOVEQkjuQ7gmNu00WlmawZfYVxL+3mUVPjQLg0XtaF27XEgIF4tdPsrANHEBNauDsI4Do3UcsCZtyXNSdf85umHSxpdrohPnuUBpZIezYCIZAdWBl6eV0GZuPs956z/gP623D4L/qDHBJTixddncKa7k2ldzgXVczTC7H+mQ+volnnOPo9LPXARgZ72BcQilXzJ7D5NLp/Hz2KvKSCps4y/cH0w2wPTWF2Cjf3s+8PRPMCUETwZ6WxvVLNjVozZa5qjhcU9eOfOfukcRvND+Nsxa31hjbfr+b/1T6A47e3B6iwN45HddL5/h0wF8aPeVls++j6zuCqnBHSpw7F3LZwXLSWc4dH08HIOt+E8u5iTrwXWUqzYmdfvffX2B82UySloXPAFfOrWLnwA8arHcqF/tqKq2vq/XoPWsTj5T9G2DErHePSqBPdII7yUnjjekGONhg7XBgT0sj7/NNjI4/Tf0wqNsKbsU23isTy+Qki/o0phWMZIAfb/66drlHlI1gAuDPPWrjpo8eAaC/mYavHlcvn0n2bCMCxUz3SZN1oJnJK5OnTSd5jflJIwD7aip5ePQUbAXW1lV/9Py9EQY55uxjXlmbmvqYboC9g7W9kwZMuQltQnb0SWKkYeJIVU00MScaJi4cnD2MP/zsNTPU+dKIVjCSAXLaNZ02640ngUPF1n/BDj2e5IZb8/5BenTD+GV7pVjTAvdTrq1JXml31oGqrg6pxGBxKoGSM6YnBAWDJ82468pvGeycyrYnG7qg3rrBWPez16eSdfdWU/V5k7noEAA5cl+jAzGFA0t9wGdLjA6ELrusK/ymAqzPd3EwMr6uZbyyLIkXnrsTgE679zbwFWoMqlKN/y0dSMZMmlMPVfiT34LGkwxSXdMbeTGNdqutu48C4SwqIjW/YTIWUBu7vPj6RUx5/S7LjLBnTO+E73qYfm3TDLAnaPw/E+cD0Vz79S30WGVeFJy9fyZ7nkqii913/d6DnQHI8lN5C2deyeTctT7rCqq6krLY6JDRxrchdTNzfB1wn+z1k+ixrspEVa3HE6yfPNH8mPfKZV2Zct81DaJ2PMkgACMeG0PBuFzSNhpGLWVR5Kf3ehge5+KtG1/hjpenkf34btPfjJr8bcP40DXNAEt8HB/dNK923IhwBjf7w5GRxL4bFgONj1txocwy4Y8R+WPYe7Bz7U0aDlJHFvK1nw4jD5duGU+vl2zIRvNDuVpK5dghMNXoZPvUHax/2llB7rr7yS4sDfuDOHnpJnZFDWNKnrHsL3zy05xVkANTLjZS9b7oeSWxxViS4OKN7dIcCsbX1bdLt4yvfeMYfvEewPg+Q2Ki+eimecx4ZiyYbICbSsQIJzoRQ6PRaCzClBawvVMnjt3Wj0SbV1dbBHTbvnCqLwn57XzWNTbLxNpKG29suZosIs/X1hQTDw6n5vnOfl0toSKYpJGMJ1TEDhQUiMNjXBy45H2fdcedkD3jKE6T0pVTFm1id40xcNTwiYaDPTbKwTtZ7/qMQFbbOs5bz8qypNoECKs6CosHJ3Pg5roOuPR5sXRZZ9TBL55yj69xj8XDAVzoiRiOi7q7Q1GMXviVZUnEljR+jBksensUmW8ZPaDnr7sMoNFZJp7ed7OlvbXNocBRzsrSy2uXix/sTrut4dEuUVG4hg4MGGQfyTNzNIbkDsTZPpoOqZExpnLtZKDGgGzYUlN45OMb6RVXwrikL8mKbu+zv3cCRPzafEuiTiJ1ZpNgCbetsiQKYs5/3Un6EutHaXLGKb693+j5LJjkHSbjO97uaacRUnO6Io4uZolzKfY40smMPk2MRPvoKFcuukcl1AbiO/10zd+x4266jPUeoGdX2KTaUlOYt3yB37eGtjAzR+1sB53Tfdb7S9YAY/aOPY50cFlrXZwlpzg0FA4Ry9sf3M1fBr1Bqt3Q5Bmgx5MAMWL8FGzrzB3qE4yZTQ44ymrHWT7fIZq4mBjLWuTNJdy2KiJSka1iw4Q5XkvtA+6Xu+5+ALKn7Tct8sFZXMzCK4fx7Wf7a42AR0eHTXFsn7WgNhCfkjMNju9WXRgRURptYWYOz2wHx+vlpWRGR+MvCeal0wP47PpMY0qiCKHb5EJmxIxl7yPGrCK+DQrrSH57O5NLp9dmwS2ZP5fxs2aStOwLi5VFBt9rA+w9jJ8/chbeR/fPKskuNAbqNnXeOqVwFhXxP5OHsab9tQC1OjhzhBG7piA1ysiCisBA/EidmcNVcoqHJtzHba+vqXWX1M52YG/sSIOJB4dT/GB3VFH43ij8cXD2MH4+5rNG9jDC+h6Ob2wf81HV1bQ7WxdH3yc6gelPruC55Ak++y0szeCdu0diKzG3XK0m7AbY3yjzVgSzR+8+wmWz72PVr55vdEqfIzVljPntY0RVKDLXFVKz/6ClLUnvGRG8ddjWGWFRkepii4QkG3+omhpk4w7+PGsMe2b9M+iRzTwJOzXPdw6bL70xzndxtCipxVOfu+7+1rJ6XP/eG5dQynf3+mZHnnR0QDbuiLj6HG5bFXYDXJYR3WCU+eSJR6g8PYS4D0I7unxjOIuK6LK4nFEdHsMVE3g/WzX0XLwDV0UFkdeujEzUuTJu++NMn3LtsSOySy/+vc1sSBhGTua/BLW/Z4aXUM+IECw9Vtm4tvstAPxvvYiMQKypiOaRNx6j5+IdOC2cgdj73pv3y9cYGe+IqPj6bu6koBxpOLNP722nwzpMgiUuiE9zVpEz6D56Bo7XDwuuioqgZmKN5LF1I5FgyzXSSF66iWSrRQRJ3AdbqMTI2Ooz5u6gjml3PJrez30eEfXZU0ceSLjb71xw7Y5H0xtrsvfsa423oJ5rG24Ld9npRAyNRqOxCEtawDOODa6dYUCj0QSHx2WXZfKbYyhpC1PQm0nYDXBsqYtniwf4rNsy+wqS39M/hEaj+X4TdgMc/95m1r/nO1B4PJEzer9Go9FYhSilXQEajUZjBboTTqPRaCxCG2CNRqOxCG2ANRqNxiK0AdZoNBqL0AZYo9FoLEIbYI1Go7GIiDTAIhIjIm+IyHcick5EtovIaKt1BUJEponIVhGpFpElVutpDBFZKyJVIlLm/ttjtaZAtJVybStl2gbvq94i8rGInBaR4yIyX0QieghdEenvrgvLgtk/Ig0wRoLIYeA6IAn4f8BKEeltoabGKASeBRZZLSRIpimlEtx/2VaLaYS2VK5toUzb2n21ADgJdAUGYehuOGRZZPEy4H9iRD80aYBF5KCIzBSRr0SkVETeEpFYEZksIhvq7atEpJ/78xIRWSAin7hbBRtFpIuI/M79RPtGRC7zd02lVLlS6mml1EGllEsp9SFwALjc3/5WanXrfU8p9QEQ9OxRVmltCW2lXHWZXlj3FdAHWKmUqlJKHQf+DlwcoVoRkTuAM8D/NLafN8G2gMcBP8QokB8Ak5tx3BNAGlANbAK2uZffAeZ6dnR/+QX+TiIinYEs4P8iXWszsUrrcyJS7K5kwyNca3PRZXrh3Fe/B+4QkXgR6QaMxjDCEadVRDoAzwAzgrwWELwBfkkpVaiUOgWswngdCIb3lVJfKqWqgPeBKqXUUqWUE3gLqH2iKKXuU0o1eL0QkWhgOfAnpVQwUwJYprUFWKH1V0Am0A1YCKwSkb4RqrUl6DK9cO6rdRgt3rPAEWArEMxYcFZo/U/gDaXU4SCvBQRvgI97fa6g/rTBgTnh9bnSz3Kj5xERG/Bn4DwwLchrWqK1hZiuVSm1WSl1TilVrZT6E7AR+FEkam0hukwvgPvKrXE18B7GjLlpQEfgtxGodRDwr8C8IK9TS2t6FMuB2nnIRSSkM7aLiABvAJ2BHymlGg6jHzxh1RpizNaqgJbOfNVWylWXad352sp9lQL0AOYrpaqBahFZjNEp+1gLzhdOrcOB3sAho3hJAOwicpFSanBjB7YmCmIncLGIDBKRWODpVpzLH68AOcAYpVRlK88VVq0iEuU+rx2j4GOl5eEyYdMqIskiMsqjT0QmANditDQiSiuEtFx1mdbRJu4rpVQxRgfhVHe5JgO/cF+zJYSzXBcCfTFcHYOAV4GPgFFNHdhiA6yUKsBwOv838C2wofEjGkdEXhWRV92fewH3YHyZ41IXXzmh0ZNYoNXNExivKI8DE92fn4hArdEYLYgioBh4ABirlGpR3GpbKVddpm32vvopRmdaEbAXqAEeiTStSqkKpdRxzx9QhuE/LmryPHo8YI1Go7GGSE3E0Gg0mgsebYA1Go3GIrQB1mg0GovQBlij0WgsIqiQnhG22yOqp+5T19t+Yyzbik7QWlvDhaC1regErbU1NKYVdAtYo9FoLEMbYI1Go7GIsA1uXDl2CACHx7gC7pO2MRqAlEWbwiVDo7kgsffPJP/xlIDbE/LbkTHncxMVaVpCyA3wmUnDOJspdBp2DIADl7wfcN8pF18DwO6aYSQvjUwjXDl2CEWD/BdT5huHqDl8xGRFbZuT066kKs13XYf9KiJ+f9ulORy8pWOzj+v9/mlcO/PDoMg/tktzyJ+ayIHRCwPu88LQvrxZNYr0+doIRzLaBaHRaDQWEfIWcNzEY2weGMyQnbC453oAhk9MhaWhVtJ6zo/KJeaBY+TnrPK7fcRnU7DpFnCzuDXvHzyR5jv87Ixjg9lSNpT49zZbpMqgeHAy+XnNHw8+s9M9ZC/MCVsr+PyoXMoyomuXS3JdHLj5j40e82jKPno9uJy5Z8aT/PZ2VHV1WLQFwnZpDmCUqT/Stp0x9a2hOUhMDGduvwxntJC+7hg1+w+G7VoRMcFd94QzFOcORG3dZbWUWiR3IN2e2suy3mt91p90lrPwtDGDi73cQUTFvLRRXuy6jWefrmD9e7GWXD+qR3cAynq1bATJ/bf8kZyT99GzpeN0NYGaUcw/g2zUeDMuoZRLnp3LjE/H4jxxMgzKAnNwrOHKyb/H/wOtz9/y6LfCGN88qqQS165gxoQPD57f/3xmJ+N/h2hWzJ5Dn+gEhvzHVDq2JQN8uiKO/PMVje7TI8pGgq3uZlvWey3PLhrA+h9YcwP64+pFWxu01AAWnr7cS2fkPDAiHhHsaWnE2PY32FTmquK7ylSMIVvNZ/9dPQFa1PrVNMSWmIgzrq5pUq0c7HcYww5nRkcTI9EcuHkh3Gxs/+E3N2G/PQVnySlTddqTkyAmhm/v7wFAwaRXvLYGN4a7PTUFouqZ0epqnGdKgzo+5Aa42+RCZsSMbXQf14po/j7go1BfWhPB2NPSyPt8E6PjT2OM4FjHbQW3Yru9EqsMsCa07PnNRXxx8xz3UnteOj2Az240Zmi6/n/28WjKPp/938l6l0c+vpFDQ83VeXRJBn8Z9Aapds/Don2zz9Ht42qmd/7QZ90dO35Jl7EWGeCmLP+pD7NY3ncxLfmymjaMTciOPkmMxDfYVFUTTUzJUQtEwcHZw/jDz15r1jELSzN4+17fsbYz9x+iJpTCguCAo4zJ06Yz4fkPyUsqNPnqgVGxTtLtxv098eBwih/sjjphvC1Wu6Ib7J9gi6VXXAmHMPcNuGN8JTntGtbHYLCnptDt42pmZ6yp/a7e5w0W033AP+n5FVnRvoJnHBvMltlXEI+1nTBgvJYcXZLBuKTXiYSHRMHrufTqWRy28393KI2su7eG7fyNMSJ/DADyYhpw0PTr7//NMJ6+ZSUj431n5TlSU8aY3z5GVIV/D39sqYv4db51NZzGV15M44qMqQ3W2x2K5DXbOTm7AxA5BtibI2XJxHj17ax++jr+mnQ9AGd/aLzx7LnG/B74gtdzWdx3UaP75Cy8j8z/LWzw29oGDsD10jnmdfuYBFvrbEREdMJ9WdLT8h7wWmJi+MugNxo8JABeONWXdxfeQDrmxVb+csgGv77oUPFslwGsN7nl4WHvwc4AZK225gHQbfAxJiSWNFh/zmWj6zt7Te+4CkS71Vvxl3JhS0xkz9xBPJM4n/punUgl/r3NtROzlfW60vhwDYxM/JpFL08j+/HduM6dC9v1bYmJ7PnNRbx143yGxDQsszUV0Ty07N8A/3H+6qpBFD5azc4BH0EI7hvTDLAtPp5DDw1iSHzzXvfMJKpHd769v4eXT8iXjwovMT2w/c+rrqfPbUV+DUVjLD+XyhMbbmmwfnLu5zzVaXeo5AVFU+UaSWypdnDHx4+QXWFuGTUXT5l+cfOcBq/AkcRNGV/z5rS6hJCT066ky/rTlPdJrE3WMhOJj+Ojm+YFdD1sqehLz6cNrf7eak5nxbFzyBK/x/666CIAStZkkBHkG51OxNBoNBqLMK0FLIkJvHPPHJ8nz/JzqQAc3daVTAt8gPU5n9nJHYoSOS2K3rM28bR9HK8NbthaiI1y8E7Wu7UhfS+c6stHhZcARplmPd4wvfedmcP5bGRW7fJ3h9LIIrwugEgs10CsOXcJWdN3cNodiO+P2FKXpS6zqMze7J+UQcGkBTRWpgWOcu7YcTfdqs31Dyfkt+OFoUbUgych5IXyOwH49weX83jOHfQdUMinXglOa85dQv/7NxN45Bhr8JcEE4glWw2XSlYzxuAwxQDbEhMpH9KbWPEV73lF9mcoIokt1UYnjWGsDpp+/cwA5WPrnM7hzS5y2sHKsiTefLXuVS/QAy1jzucwp2453N/H3qkTRTkNfWVrKqJpdzzy/Jbp0WepHHltbSC+P54tHmBZ0ghA0bVdm4xZLnCUM+HrKXQZm4/TJF0eMuZ8zqL2RpTIo/csYFxCKeNm18XYjrvFN4uvwFHOXw/9gBQKTNXpj/Tos7iuG1a77C8Zqz4ry5IoqOraovocUgPsNygZKB/Sm3V/XIh3cPNJZzlSZQ/l5VuFxMRwvoP/AvzZP4weaKuiBeojMTHYkpMgNRm7GH7VOf91J+lLImfgFVu88aZzbFx/ts/yNRZHasp45I3H6P1c5Oj1kJdUSF69ulqfGJsDe6ceOIuLwaRZxW2JiUh8HACO9k1n7OXtmUDKj603aI1x2mkkbN2+/W66jI2MtOS8pELyViwOev8DjjLmPnMvScu+oDfNb0iG1AD7C0oG3C1f3wp99fKZZM82wlMi4bXjzO2XsWL2HILNgLGSM7dfxuJn52IXRd+oOKvl+OXQQ4MAWD31eeqX6ZjfPkbPxTsi4ndvCQ92/Ib+n59g4ZXDcBYVmXLNPb+5iI9umgdAoq3h/dQWyV13PwDZ0/ab3koPFZOnTSd5zfYWD0nQagMsuQO5epHRMszruDHoHtn5t73OA867AcPPaTWuKPH7ytnnb3nkvGjE4UZKJXFFiY8vffAzU+ny0d6I0Qfgamf87x7VsEyjyxWuisbT1SOZGIkmO/ok2Fo2dkRzKXg9l7dunN/ipAGzqZ/c4p24cvurq2uTRlZcZQyn+bM5UyPm7TIYvL9P/LZ8XK0Y6KjVBtjZPtorTjX4TpaR8Q6evG0lAK9dcY1lCQFnJhn+nmse8N+pEnssCue3DccvsIJAWlPzq0xriQVD4cwruev21X63BQputwJPgkNTCQHZ6yfVfrYiaaBXz2K/Mav1yV4/iQ5/N+7BhEIHViS3AJzv4vBJbjnp6IBt3XYA/jxrDK8l2UieeKS2Ey6ciUb1cZ0pZcoT05n+5ArGJQSXLlwf7+/T2rc4SxMxPLGtEwZ+wJb+Dn72huFrzfnNKVOM3qm7hjEwz3CDvNh1m/+dzGnkBMXZTEOMR+tJZzlXL59J//2HI8KgeSjLOd8g399D988qwzq8X3PwJDgkHRgMQE7+fX7367G2iqPD3Z1u15ijzZMwoGKdPNsr8KQGPsfkJ5Cy2Fq/euHMK5mcu7Z2uX7ykicRo+DqXDBGrOTfeq3nmdnjTHkTVtXVJC37goLHukILDHCok7EiIhMOYEhMNAdGvw5AH0cesce70G1tFfa1AQxjCCi+ylE7JrE/RuSPodMO601b/VlGNKHFU8d6rm1kp+FX+iym2hV7H+lL/5fbhXxWlJYkWURKXU0dWchTnXbXJiW88+fhZDSRvDQhsYQDYz5j/SzzIkv+vOp63uhytc+6fr1P+ITGeZhy6BrW/l824J7qKYTJWDoRQ6PRaCzCtBZwmauK2wpuparG8GX9W6/1AdNrD9xsOOezcybRy2H0psvGHSHVc35ULv16n2h0nzPLupPygfUdhIFmGUm3t6dg0iuM+CTyZ+bw/P5RJZWWRD9EZfbm5HVda5c7fbi3eX7zet3c4Sz7liSu7Psmg9SONiqnGP0ECYUO2lk0xga0LCnBTLzdHZ7ZO/bkZdS6Rbz54pNLyPp1eL6HKQb4pLOcWYUjsd1eWTvs4DOzx3FgzGe1+6RHn20wpN6ea5Zyacx4ADJKB4Rs1PxAs114s7A0g9hS6wKlJCoK19CBqCjh+JkatlQ7guqIsRrbwAF0SG04ru/hGhe28Q6cJ8yd+cAz28H+SRk+yQuDY6aSmt896PNUdfV9va9WDl46PSBiZkXZf8sfwWvoj4kHh3Oi6rLa5ahte8M6yE1bxd4/k/ypiQBNTvMUDsJugMtcVcwqHMmhoeV4D7jde9YmH5+PuupfGP7my/SNisMudZ6RnUNWADB87lhiRoZGU6DZLgCcysW+mkreuXsk8RutSze1paYwb/kCctrFc9ns+xh/Po99N/gGiHu0Sk0kmACDyrlV7GzB9DnhItBsF9uefMXf7sGf1+Hgsxv71o5zG2ks670WVqytXR4xfkptz70Z2GKMoEh7cpLPGOGeZC3PdrBuRhR7chJ7nkriwA3+Z5c+4CijStmwnQ+fhrAb4GBnO7Bt3sXDo6fwu08W+x0K0iz21VTy8Ogp2Ap2RUTLBmDVr56nvdgA3zjQSNSq0QBsve5lAIYv8Z0dwpOs1cUOnvps1YwoR5dksPXyl6l/X3mYPG067bccpOe58CUNtboTLmrbXq67J48DjrLadX3+lseI8VMYMX4K8mBiUHM9qZoaXAX7mXbn/SwszWitrBbjVAIlZ1A11vYou0pO8dCE+1hYmkH3qAQ62n0rycLSDKbdeT+ugv2Wa/Vw6sMsFmYvb7B+YWkGD024D5fJc36Fiwvt+4SDjvZ4Otrjefuy17nmqyp6bm6PPTWFXnEl5LSL96nPVTXRps8HB8bMFfXvK2/anXXgPHEyrElDrW4Bu86dI27NTsbPmlk7etQArymnm/PkUDU1yMYdtcHa3pgRWL6yLIm5z9xL8hnzXtUC0VhZgHtEro2bI6rl2+hsJxZp7f3BaQAy0+8x/KQh4KSjA7JxR9i+T/TuI1wxq+EMGK2h025zMiX9Jbc8kfaN0Qn79q2MS/oST+ei1TOiBMIzI0rX3d+GvcxC4oLwBDd7aG1z3XvU/HDw7sIbWJ52Q4P1scWQvuzziDJq4S6LUOKvXDvsVyS/Z10kiachkL0wh5wi/4kWzSW2mLDOiuIsKiJlcWgzG81KU6+f3JLNJPZcs5QEW6x7Il7D+F779S3wijENfNzqLSapa5o1FdE88oYxVonThHT5iEnEMBOzZ7X4vhDJ5eramU/PnVar+P7gSW7p5RhEn7K7G2zvscpG3AfWGt6SNRn0Oeyrrd3xaHo/97lpoZI6EUOj0Wgs4nvZAtZoNOYgG3eQtdFqFf7JiIAkEd0C1mg0GovQBlij0WgsQpRJU6poNBqNxhfdAtZoNBqL0AZYo9FoLEIbYI1Go7EIbYA1Go3GIrQB1mg0GouIeAMsIv1FpEpEllmtpTFE5A4RyReRchHZJyImTd/YMtpCuYrIWrfGMvffHqs1+aOt6IQ2p7Ws3p9TRP5gta76iEhvEflYRE6LyHERmS8iQSW5RbwBBl4G/mm1iMYQkRHAb4EpQCJwLRAZc9kHJuLL1c00pVSC+y/bajGN0FZ0QhvR6qUxAegMVAJvWyzLHwuAk0BXYBBwHRDUyE9NGmAROSgiM0XkKxEpFZG3RCRWRCaLyIZ6+yoR6ef+vEREFojIJ+6n10YR6SIiv3M/Kb4Rkcv8X7X2fHcAZ4D/CebLWKj118AzSqkvlFIupdRRpdTRCNXalsq1WbQVnVpri7TehmHkAk5jbqHOPsBKpVSVUuo48Hfg4mC+VLAt4HHAD90X+gEwuRnHPQGkAdXAJmCbe/kdYK5nR3cBLPBa7gA8A8wI8lqWaBURO5ALdBKRvSJyRIxXkLhI0+pebhPl6sVzIlLsvimGX0A6tdbgtHr4BbBUNZ05ZoXO3wN3iEi8iHQDRmMY4aZRSjX6hzFS8kSv5eeBV91fbEO9fRXQz/15CfCa17YHgHyv5UuAM41c9/fAr9yfnwaWRaJWIMN9rq0YryBpwEZgdqRpbUvl6t4+FMOlE4NxA54D+rZ1nVprcFq99uuJMaRxn0jUiTGX8pdAjfu8S3BnGTf1F2wL+LjX5wogIcjjvOd9r/Sz7Pc8IjII+FdgXpDX8cZUre5tAH9QSh1TShVjPC1/FGla21i5opTarJQ6p5SqVkr9CePB1lS5thWdWmsTWr2YhGFADwSxr9n3lA1YDbyHMdp8GtARo0+oSVozHGU5XrPZiUiXVpyrPsOB3sAhEQHjy9tF5CKl1OAWnC9sWpVSp0XkCIRsIg1droFRgLTguLaiE7RWf0wCftOK48OpMwXoAcxXSlUD1SKyGHgWeKypg1sTBbETuFhEBolILMbrbKhYCPTF6FEchPEa8REwqoXnC6dWgMXAAyKSLiIdgYeBD1t4Ll2ugIgki8godydKlIhMwIguWX0B69Ra6yEiVwLdaF30Q9h0ut94DwBT3WWajOHaCWr+lRYbYKVUAUZnzn8D3wIbGj+icUTkVRF51X3uCqXUcc8fUAZUKaVaNFFWOLW6+U+MkK4CIB/YDsyONK1trFyjMVoRRUAxhl9urFKq2XGrbUWn1trgvgLDmL2nlDrX0vOaoPOnGB1/RcBeDF/wHDygEgAAEJxJREFUI0Gdy+1E1mg0Go3JtIVEDI1Go7kg0QZYo9FoLEIbYI1Go7EIbYA1Go3GIrQB1mg0GosIKhFjhO32iAqV+NT1tt/A8baiE7TW1nAhaG0rOkFrbQ2NaQXdAtZoNBrL0AZYo9FoLKI1Y0G0aQpnXklZznnSNkYDkLJok8WKNBrN943vrQFOHVnI1wM/oA93A5CyyGJBmjaFLT6eQw8NwhXTcFuH/YrkpfqBrmka7YLQaDQai/jetoA99OttDPt5flQu7VZvtVgNRGX25uR1XWuXO324F2dRi8bKiRjsnTpR9ON+Te/nUCS/vR1VXW2CquZhuzQHgOLByQDUxAurpz5P96iGw8QO3zUWlobu2udH5VKWER3Uvh0LjOGpZeOO0AnQhA3LDLC9fyaOjKS65XIHausuU64tuQPpnnAEgE9zVgEwfMbYlg/KF0KKru3KP2e/Urs8OGYqXd4laCMc1aM7AOczO5lapoGwd+rE8Vv7se3JV5rcN/98BTM+HYvzxEkTlDWN5A7E2d4wfAXjjf8Hbvb+HsGO9d2Ca0dF4Ro6EBUldHtqL8t6rw3quD6fGC61rI1hk1aLvVMnHBd1b9Gx7fYXUXP4SIgVhQbvsg+G6N1HWtxICpsBtsUb4x9LoruSuhTO4mJwj76W/3gKB0a/Xrv/s8UDWP+D2HDJ8eHqRVt5Iu0bU64VLJ7ycrT3/dG3PfkKQ6qm0nFJcD/w/rt6ApB/zwJeONWXz67P9Cl3s/B8n2Pj+rN9Vt30WQccZVQp/56vPY50cEVAGKcI9rQ0rl+yiUdT9gV92GlnhfG/Io7WjPgtUVHYsjJ56c2XyYpu34ozhZfim/qx5b+afrD6I2vpVPrOLgXAda7FI02GnJaU/WWz76PL4nJcFRXNvl7YDPChhwYB8M49cwDj5lp45bA2/zodLjzltXrq84SqZfVgx2/o//kJS8o90PeZPG067bcc9H+Q5yFtMfa0NPI+38To+NMYQ+gGR+66+wHInrYfZyuu7xo6kJfefJm+UcHM69o22TBhDv+SOB2A/vdvtlhNHS0p+1W/ep5RHR6j+3OfN/t6YTPArnbG/5x2nplAToLNaN0VvJ7LWze8QnMqd6QguQO5epGvr/jdhTeQPr/5he+Np7z8+RRbSoxEMzr+NN9+tp//mTzMVHeE9/dZWJrB2/cak27Eb8vHGUEtHm8qfjqUUU+vI8a2n9Hxp4mRpuvn4GemkppfBUB2odGic54pbZUOFSVNtr68y9SbnMJThoZWKQiOtI/2Mjh2alDupfqk29vzlx/NB+Bnr08l627r+18qfjqUn89e1ey3ju5RCcz75Ws8kGC4f3rPCj4CJiwGuHDmldx1e2CHaq+exQyJiQzjOyJ/DADyYhrGpKqBOT8ql25P7W3gvlieekOY1Bmols7YhWGEH03Zx5r211oW8nLS0QHbuu0AuCzSUJ+C13Pp1dNobZesyQAgtkR5/bb+6+eMY4P53/lDa5e7eHWShtPoZa+fRIe/1xmG2FIX8esathzNMLy11yoqosu7cEX11Cb3rYkXVv3Kt9PSYwM8v4NVnJk0DICrH9pMXlJhg+31y76sl5Cft8Bnn5HxDs53cTT72mExwGU555vlO3vhVF+jFUnrWpEtYe/BzgBkBYiAOHXXMIqvMgq2X+8TfjtD/mX01+w6bPyILU7oCGBk+/wtjwHbzgRvuFphrEOKl46RiV+z6OVpZD++23J/ny0xkT2/uYi3bpxfawD6HDZaLrEbGhrd7PWTsOXXGY0O+xUpXjG+4TB47fYXkbV0KhsmzCHdbtz4tvwEUhabf380hbOoiJTFTbu3bPHxjOrwGPN++Roj45tvqMLJ2Uyjsr7YdZvP+j5/yyP2eBQ91lZhX1v3m6f36E5WrO/v01JMiYLYUu3gjo8fIbtiNyenXcmdGb6t448KL2n1K3y4KL7K4dNZCEZnS+66+1lx1UKGxESzuOd6+lyVDYQ+oaPfCgeunfmhPakJdNpRAxhvGJ/mrOKjm+Yx45mxYKEBjurRnW/v78EXNzd+43h+X1e1nf6LapCN5tbNmsNH6DfvPCV3COl2Uy8dNlwVFXR/7nO2jO/LyPjI6QCvHDuETsOO+d3Wb4UD27otADiHG5OGHx3uCRQITWexTsTQaDQaizClBbzm3CX0v38zLuDWvH80yz0RTl441ZeE/HYBt58flVubqOFNuXLRYVMcB4ekMSSmtMnzBEUjD9T6gfgJhQ6/SSPqqkG4cspapyNExH1gtBzOV+Yy8anh/Kb7hxy7rR9dVyrLImHOZ3aiYNIrgP/Wb0Khg+G7xnK6Is6IZGhlZ1ooceWUcWrKsGYdE1vqIv69yIkwiEQOj3Fx4JL3fdaVuaq4reBWTlweR0ymUeZnf1gOwJ5rvH2/dfWopTbA0ky4LdWGL+i7Q2lkNdEBFgo8Adbp0WsAWPT2KHrO8f96KbkDGwTAFzjKWVl6OTE2B1/8x++JkWhWliXx5qujyGilCyW2xPi/siyJcQl1N35JTiwXTcr30THx4HBOVF3mc3z07iPk3xXFgWt83SVW0271Vk5UXUb3FQlsn7WAwc7mJZaYQYdU4+aKqoqF2R3pXKNwlZVbrMqXPdcshWuad8zC0gzeLjEiJaK27bXc/x5p2AYOqP3tvSly1lA5rxvTnv/Ab6ecNwtLMzjp6MC7C29okQ0IiwGWKjsnncYXS7e3J8bmwN7ZSBCIsdW1fn/2D6P31KwQFFtqCvOWL/AKjQuMJ1njpLOcEqfhpL9jx910ve1bbFmZ/OSTr4immrnP3Ev6stb7CD0+8DlldzLOK7jdX4jPst5rYcVan3WXzb4PW0xlq3WEA6lRFDjK6RsV1+zEEjPYOWSF8cH9r8BRzsOjp0DJGVxnSiMyNToY8pIKyVuxGIAR46fURqJYgju5Jca23zoN9aicW8XOgR80WN8nOoF1f1zY6LFO5WJfTSXv3D0S2bijxQEEYTHA2Y/v5upzMwEomPQKD3b8hh9v/hqAzOho2lL879XLZ9JvnvHQ6FZdSI1XoPYN995L8prtIXLHt45Vv3qe9mIDmn64mI1t8y4eHj2F332yOKIzuzz0jYrjd58sxqmEKU9MJ2nZF1ZLavO0NLklUtlXU8nDo6f8//bOPaapK47j3962QLSMRxXGQ4Qqj5s9NAw0LOrQZBq3mUjiYwuGTWWI9Q8jPubm5mMJYUvcTHBihqIJ6tycUYwTA8sC6lBxzIgzNKJUQARnZdDx0Mptuz8ut6Uvnr0PsvP5r/cWenrvOeee/s7v+/uCqr8zpvHPywRs6epC7IGHAAD6uRa6rALQPuPjosvVwYgoNSErqAp04VbEFjWD6a9NwCVqK2HFguxsTKjUwSKR1ZE3BRzexsowQHsnzP0JzVs++wF5qnTJZr7IZZTtQZGz8yTyAoVvq6X9H2xM18KqkCE074FrCOrTGLd/d99Ws2LwFZzgUDLEK5/AVya9BcJwmHHjAwBAyD42C0LGWNnJl2HG9H95iwFzhTY0xQrQ0LokLsdfyUDskbE1fqysWV6G4h42RhbOxYIVCuSE/oIQ+USomqxoXhkF9UL2Ms1S/4GsgFboXlCYeKORF0XX5MttoAsdr5fm7Dqoa+wJK+1JFujTvvf6Z/OJpdOI1Z/nIGfnSaxQGbFLLXwblHUtSN7hKhqwb7C4ljBboTKiKbsMxX5O/YRnrAwDWdUtyAA82pOE5HDadk7V2gefS+7Ddgmd7Ps05nWS7yMjEUF5m/rDSTg6bfCcUbpQC1UTu74NtVWZs4dxvPHLl/dNOEbfiMiKICDL8TilUwmeX+nM1uAGFNAvPJ4PXNWCOZMasGtynWBtYvSN0BQxoKG1HYs/2+GQCzzpJg3aoHX4uzXLyiSTXeIOq8mEgOPXUb8tDFAZvSNeGSGeRAMBD9gcz3hkuJ2Eh+onfONTVoPgYb63J8YfADAtYfDNIykwlAjK28hjNdBtZ6/kTwsOelTjPjH3YM6JLeyvX54rtomSBfG2boktUV9IrL3P8O6FTfjxHbsKKvWVuwCAuowUFxcDrlTlQAaKSviCediCqN32G++sgrPU6hBV63isuHsRCugXtu9zNOoKb+0bC8fOz0fMMgOv4pWRIq9kFVBT+2YipjsTlK8ZNW8dQJDc/nN5sH4iJQwz2SGtc9N3xYITwKjl4u2WUDNo6Nb748FiLjTjOSTabpZh+r4GW+iR13bx/gkEAoFAcAvvK2CFJhr6+Y6l3TqPRyK4RPhVhKWrC7EbqlF++zXM8mXlkNxKcfPGHlyWp4CZIIM/5b7yQuUzCqsr1iOuX1QiJbjY5PVdb7IH1kljBazQROPfmaE2QUD0jms4lDwX6W7Sf8RGVnULcVWAPDQEj6uBoAEyYK6fpK5Se9XtwptISYjDodBEQ58RjvqMAngSwAjB08RAp2L6bGGlRFUT0v3bRWqVABOwYV6Yw4ZSoTEcfkZxp69zza9jRcCfDilR34TdBHK5YhyOGQWcYGR1hTTK5o2GdtoPL9dNFlwAYZgXhsxPzuH03wtBVdt3jct7lfB5LGxmDOXvDyZxaGuk5y8p4SeT2iN2cKhXE9C61YS7XE5zPyZrH/I7Elh3FBHa5Tz+OYS+/zI3X/7K/tk4MzcR6YvFEy/x7ojh7PDwc/Yit2X0hCT4vXosL8lE5RtFDnE+Z1qYbnRZKLxfugkAECehwtEjRUwBRFZAK+aeKMDm2UsBhoGfog8bj32M6D0CF7lJnI5f+4UJQ+P4EPaW2wUfyNXBsOR3oTbhgsNxk7UPF3uDUDFfA6tBWGsqT+OfQ4z770zfRBkoXyELeLrCuyOGNx0evEnER61I+m4DGhZ4HpBLvt6GsNP3bRtu42tNJE0iSk3YF1GK5Es5YjdlRHjL7YIPuGsKOFp65Xck2C2pBEbq4x+QhniJd0cMqQoEzJ1GxO8xIqabzY9zl7iu7LFKxiByvMK5Jlz7Ih/zf2vAhwG3oaKkr4ZzxmJiA8JiFOipP5yEtbN+93g+K6jK7TU1WZSi1NxozE3B/pWHALgf/3ShFpojzRAyD8qde4cU5ibBHDFamG52RVl3TzIrCPM9PRIO+gIAkqtdE/RDLrcJ2km8QXRJBwBAE+KaiD8WZ43RYjYYoNZF2pw5xNyIGQ2cCCa2Xrw6G1Ojng5hIut6TTe3JeJGbjImQNiwmf6rFOxOO+Wx6DpdqIWmuFVwR2SuHw7Fqe4AfPtlNgI7hambIYgjRnmvEpuKtiHq6C2YR+EcyiecwCG41vXceJt8Afv3UdekAGkiN2YwRHgY+OgNoAu1Q79xAM4iGCkz7680GK6FAWCdOwLPCJ9pFJHY5jarQEhxgyeGc//9ngIhx68KtmEpjCNG7zRE5l0lMVQBCap/hpiLmQ7HxFrFKVuNLm2ZIoIQx1ngMhyk0Gfby8NttkmDMeU8hagS6dXXYMVLOYjPvQNGxJKYo7n/fEOEGAQCgSASvKyAVTofpE5ZanvNFlwfn/mz4xVOVCAFzPf0iFsrnTqw4w2hCgB5g0c3w5CKpQ7HmponSVK8JAV4mYDD914F9tpfC+F2QSAQxEez3TXuTMa/Z0gIgkAgEERCZrVKwc+BQCAQ/n+QFTCBQCCIBJmACQQCQSTIBEwgEAgiQSZgAoFAEAkyARMIBIJIkAmYQCAQROI/84Wo3sDO604AAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "count = 1\n", - "for i in images:\n", - " plt.subplot(4, 8, count) \n", - " plt.imshow(np.squeeze(i))\n", - " plt.title('num:%s'%labels[count-1])\n", - " plt.xticks([])\n", - " count += 1\n", - " plt.axis(\"off\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "通过上述查询操作,看到经过变换后的图片,数据集内分成了1875组数据,每组数据中含有32张图片,每张图片像数值为32×32,数据全部准备好后,就可以进行下一步的数据训练了。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义网络" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "我们选择相对简单的LeNet网络。LeNet网络不包括输入层的情况下,共有7层:2个卷积层、2个下采样层(池化层)、3个全连接层。每层都包含不同数量的训练参数,如下图所示:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\"LeNet5\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "> 更多的LeNet网络的介绍不在此赘述,希望详细了解LeNet网络,可以查询。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在构建LeNet前,我们对全连接层以及卷积层采用Normal进行参数初始化。\n", - "\n", - "MindSpore支持`TruncatedNormal`、`Normal`、`Uniform`等多种参数初始化方法,具体可以参考MindSpore API的`mindspore.common.initializer`模块说明。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "使用MindSpore定义神经网络需要继承`mindspore.nn.Cell`,`Cell`是所有神经网络(`Conv2d`等)的基类。\n", - "\n", - "神经网络的各层需要预先在`__init__`方法中定义,然后通过定义`construct`方法来完成神经网络的前向构造,按照LeNet的网络结构,定义网络各层如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:53:43.367791Z", - "start_time": "2021-02-03T08:53:43.248322Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "layer conv1: Conv2d\n", - "****************************************\n", - "layer fc1: Dense\n" - ] - } - ], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.common.initializer import Normal\n", - "\n", - "class LeNet5(nn.Cell):\n", - " \"\"\"Lenet network structure.\"\"\"\n", - " # define the operator required\n", - " def __init__(self, num_class=10, num_channel=1):\n", - " super(LeNet5, self).__init__()\n", - " self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid')\n", - " self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')\n", - " self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02))\n", - " self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02))\n", - " self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02))\n", - " self.relu = nn.ReLU()\n", - " self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)\n", - " self.flatten = nn.Flatten()\n", - "\n", - " # use the preceding operators to construct networks\n", - " def construct(self, x):\n", - " x = self.max_pool2d(self.relu(self.conv1(x)))\n", - " x = self.max_pool2d(self.relu(self.conv2(x)))\n", - " x = self.flatten(x)\n", - " x = self.relu(self.fc1(x))\n", - " x = self.relu(self.fc2(x))\n", - " x = self.fc3(x) \n", - " return x\n", - " \n", - "network = LeNet5()\n", - "print(\"layer conv1:\", network.conv1)\n", - "print(\"*\"*40)\n", - "print(\"layer fc1:\", network.fc1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "构建完成后,可以使用`print(LeNet5())`将神经网络中的各层参数全部打印出来,也可以使用`LeNet().{layer名称}`打印相应的参数信息。本例选择打印第一个卷积层和第一个全连接层的相应参数。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 自定义回调函数收集模型的损失值和精度值" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "自定义一个数据收集的回调类`StepLossAccInfo`,用于收集两类信息:\n", - "\n", - "1. 训练过程中`step`和`loss`值之间关系的信息;\n", - "2. 每训练125个`step`和对应模型精度值`accuracy`的信息。\n", - "\n", - "该类继承了`Callback`类,可以自定义训练过程中的操作,等训练完成后,可将数据绘成图查看`step`与`loss`的变化情况,以及`step`与`accuracy`的变化情况。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "以下代码会作为回调函数,在模型训练函数`model.train`中调用,本文验证模型阶段会将收集到的信息,进行可视化展示。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:53:43.400967Z", - "start_time": "2021-02-03T08:53:43.369836Z" - } - }, - "outputs": [], - "source": [ - "from mindspore.train.callback import Callback\n", - "\n", - "# custom callback function\n", - "class StepLossAccInfo(Callback):\n", - " def __init__(self, model, eval_dataset, steps_loss, steps_eval):\n", - " self.model = model\n", - " self.eval_dataset = eval_dataset\n", - " self.steps_loss = steps_loss\n", - " self.steps_eval = steps_eval\n", - " \n", - " def step_end(self, run_context):\n", - " cb_params = run_context.original_args()\n", - " cur_epoch = cb_params.cur_epoch_num\n", - " cur_step = (cur_epoch-1)*1875 + cb_params.cur_step_num\n", - " self.steps_loss[\"loss_value\"].append(str(cb_params.net_outputs))\n", - " self.steps_loss[\"step\"].append(str(cur_step))\n", - " if cur_step % 125 == 0:\n", - " acc = self.model.eval(self.eval_dataset, dataset_sink_mode=False)\n", - " self.steps_eval[\"step\"].append(cur_step)\n", - " self.steps_eval[\"acc\"].append(acc[\"Accuracy\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "其中:\n", - "\n", - "- `model`:计算图模型Model。\n", - "- `eval_dataset`:验证数据集。\n", - "- `steps_loss`:收集step和loss值之间的关系,数据格式`{\"step\": [], \"loss_value\": []}`。\n", - "- `steps_eval`:收集step对应模型精度值`accuracy`的信息,数据格式为`{\"step\": [], \"acc\": []}`。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义损失函数及优化器\n", - "\n", - "在进行定义之前,先简单介绍损失函数及优化器的概念。\n", - "\n", - "- 损失函数:又叫目标函数,用于衡量预测值与实际值差异的程度。深度学习通过不停地迭代来缩小损失函数的值。定义一个好的损失函数,可以有效提高模型的性能。\n", - "\n", - "- 优化器:用于最小化损失函数,从而在训练过程中改进模型。\n", - "\n", - "定义了损失函数后,可以得到损失函数关于权重的梯度。梯度用于指示优化器优化权重的方向,以提高模型性能。\n", - "\n", - "MindSpore支持的损失函数有`SoftmaxCrossEntropyWithLogits`、`L1Loss`、`MSELoss`等。这里使用`SoftmaxCrossEntropyWithLogits`损失函数。\n", - "\n", - "MindSpore支持的优化器有`Adam`、`AdamWeightDecay`、`Momentum`等。这里使用流行的`Momentum`优化器。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:53:43.478821Z", - "start_time": "2021-02-03T08:53:43.402997Z" - } - }, - "outputs": [], - "source": [ - "import mindspore.nn as nn\n", - "from mindspore.nn import SoftmaxCrossEntropyWithLogits\n", - "\n", - "lr = 0.01\n", - "momentum = 0.9 \n", - "\n", - "# create the network\n", - "network = LeNet5()\n", - "\n", - "# define the optimizer\n", - "net_opt = nn.Momentum(network.trainable_params(), lr, momentum)\n", - "\n", - "# define the loss function\n", - "net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 训练网络" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "完成神经网络的构建后,就可以着手进行网络训练了,通过MindSpore提供的`Model.train`接口可以方便地进行网络的训练,参数主要包含:\n", - "\n", - "1. 每个`epoch`需要遍历完成图片的batch数:`epoch_size`; \n", - "2. 训练数据集`ds_train`; \n", - "3. MindSpore提供了callback机制,回调函数`callbacks`,包含`ModelCheckpoint`、`LossMonitor`和`Callback`模型检测参数;其中`ModelCheckpoint`可以保存网络模型和参数,以便进行后续的fine-tuning(微调)操作; \n", - "4. 数据下沉模式`dataset_sink_mode`,此参数默认`True`需设置成`False`,因为此模式不支持CPU计算平台。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:54:23.928574Z", - "start_time": "2021-02-03T08:53:43.479829Z" - }, - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "epoch: 1 step: 125, loss is 2.2961428\n", - "epoch: 1 step: 250, loss is 2.2972755\n", - "epoch: 1 step: 375, loss is 2.2992194\n", - "epoch: 1 step: 500, loss is 2.3089285\n", - "epoch: 1 step: 625, loss is 2.304193\n", - "epoch: 1 step: 750, loss is 2.3023324\n", - "epoch: 1 step: 875, loss is 0.69262105\n", - "epoch: 1 step: 1000, loss is 0.23356618\n", - "epoch: 1 step: 1125, loss is 0.35567114\n", - "epoch: 1 step: 1250, loss is 0.2065609\n", - "epoch: 1 step: 1375, loss is 0.19551893\n", - "epoch: 1 step: 1500, loss is 0.1836512\n", - "epoch: 1 step: 1625, loss is 0.028234977\n", - "epoch: 1 step: 1750, loss is 0.1124336\n", - "epoch: 1 step: 1875, loss is 0.026502304\n" - ] - } - ], - "source": [ - "import os\n", - "from mindspore import Tensor, Model\n", - "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor\n", - "from mindspore.nn import Accuracy\n", - "\n", - "epoch_size = 1\n", - "mnist_path = \"./datasets/MNIST_Data\"\n", - "model_path = \"./models/ckpt/mindspore_quick_start/\"\n", - "\n", - "repeat_size = 1\n", - "ds_train = create_dataset(os.path.join(mnist_path, \"train\"), 32, repeat_size)\n", - "eval_dataset = create_dataset(os.path.join(mnist_path, \"test\"), 32)\n", - "\n", - "# clean up old run files before in Linux\n", - "os.system('rm -f {0}*.ckpt {0}*.meta {0}*.pb'.format(model_path))\n", - "\n", - "# define the model\n", - "model = Model(network, net_loss, net_opt, metrics={\"Accuracy\": Accuracy()} )\n", - "\n", - "# save the network model and parameters for subsequence fine-tuning\n", - "config_ck = CheckpointConfig(save_checkpoint_steps=375, keep_checkpoint_max=16)\n", - "# group layers into an object with training and evaluation features\n", - "ckpoint_cb = ModelCheckpoint(prefix=\"checkpoint_lenet\", directory=model_path, config=config_ck)\n", - "\n", - "steps_loss = {\"step\": [], \"loss_value\": []}\n", - "steps_eval = {\"step\": [], \"acc\": []}\n", - "# collect the steps,loss and accuracy information\n", - "step_loss_acc_info = StepLossAccInfo(model , eval_dataset, steps_loss, steps_eval)\n", - "\n", - "model.train(epoch_size, ds_train, callbacks=[ckpoint_cb, LossMonitor(125), step_loss_acc_info], dataset_sink_mode=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "训练完成后,会在设置的模型保存路径上生成多个模型文件。" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:54:23.979139Z", - "start_time": "2021-02-03T08:54:23.929589Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./models/ckpt/mindspore_quick_start/\n", - "├── checkpoint_lenet-1_1125.ckpt\n", - "├── checkpoint_lenet-1_1500.ckpt\n", - "├── checkpoint_lenet-1_1875.ckpt\n", - "├── checkpoint_lenet-1_375.ckpt\n", - "├── checkpoint_lenet-1_750.ckpt\n", - "└── checkpoint_lenet-graph.meta\n", - "\n", - "0 directories, 6 files\n" - ] - } - ], - "source": [ - "!tree $model_path" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-01T03:42:45.627283Z", - "start_time": "2021-02-01T03:42:45.606466Z" - } - }, - "source": [ - "文件名称具体含义`{ModelCheckpoint中设置的自定义名称}-{第几个epoch}_{第几个step}.ckpt`。\n", - "\n", - "> 使用自由控制循环的迭代次数、遍历数据集等,可以参照官网编程指南《[训练](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/train.html#%E8%87%AA%E5%AE%9A%E4%B9%89%E8%AE%AD%E7%BB%83%E5%BE%AA%E7%8E%AF)》的自定义循环训练部分。\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 查看模型损失值随着训练步数的变化情况" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:54:24.122898Z", - "start_time": "2021-02-03T08:54:23.980168Z" - }, - "scrolled": true - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYMAAAEWCAYAAACEz/viAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3dd7wcZb3H8c8vhR5qQkkoCQFLuEoLTYooiAlKuypFLxELoFcQKSo2jIhcEUQuUnIBaVIMGMSAIM1QQj+EFEIISYCYQjopkARIzu/+8cxk5+yZ3bN7zs7unrPf9+u1r52d+tvZc+Y3z/PMPGPujoiINLZutQ5ARERqT8lARESUDERERMlARERQMhAREZQMREQEJYMuz8yGm9lttY6jvczsFDMbW+s4kszsIjNbZGbzarBtN7NdSpjvUDOb3dH11JKZ9Y/i7FHrWBqBkkEXYGZfNbMmM3vXzN42swfN7KBax1WP2jpIlrD8DsC5wCB337ZykYnUlpJBJ2dm5wBXABcD2wA7AtcAx9QyrnpUoTPMnYDF7r6gAusSqRtKBp2YmW0GXAh8z93vcff33P1Dd7/P3X+YmHU9M7vVzFaY2WQzG5xYx/lmNiOa9qqZHZeYdoqZjTWzy8zsHTN708yGJqYPMLMno2UfNbOrk1VSZra/mT1jZkvNbIKZHVrku+xgZveY2UIzW2xmV+VNLxTDN8xsShTDG2Z2emLaoWY228x+HFXp3Ak8CPSNSlHvmlnftP0a7a+FZjbTzH5uZt3M7HDgkcTyN6csG2/zR2a2ICqpHWtmR5rZ62a2xMx+mph/fTO7wszmRq8rzGz9xPQfRuuYa2bfzNvW+tF++beZzTezEWa2YaF9XGTfp37faNouZvaEmS2LqsZGRuPNzP4QfcdlZjbRzP4jZd0nmllT3rizzWx0NPwFM3vZzJab2SwzG14kzrei3yD+3KIKtJy/N0nh7np10hcwBFgD9Cgyz3BgNXAk0B34H+C5xPSvAH0JJwYnAO8B20XTTgE+BE6Nlv0uMBewaPqzwGXAesBBwHLgtmhaP2BxtN1uwOeiz31SYuwOTAD+AGwMbAAcVGIMXwAGAgZ8GlgJ7BVNOzTaP5cA6wMbRuNmt7FfbwX+DvQC+gOvA99KrLPg8oltXgD0jOJeCNwRrW+36PfYOZr/QuA5YGugD/AM8OvE7zsf+I9ov9wBOLBLNP0KYDSwZbTu+4D/KTHO5HqKfd87gZ9Fv2Hyd/k88BKwebTvP070d5O3nY2AFcCuiXEvAicm4vxEtP5PRt/32Gha/yjOHtHnt4DD8/62y/5706vA30StA9CrAz8efA2Y18Y8w4FHE58HAauKzD8eOCYaPgWYnpi2UfTPuS2hOmoNsFFi+m2Jf84fA3/OW/dDwNdTtnlAdMBsldSKxVAg/nuBs6LhQ4EPgA0S09s6SHYH3ie0CcTjTgceL3H5Q4FVQPfoc68o3v0S87yUOODNAI5MTPs88FY0fCPw28S0j0Tr2oVwAH4PGJi3H98sMc54PW1931uB64Dt85b/LCFp7A90a+Nv8Dbggmh4V0Jy2KjAvFcAf4iG+1N6Mij5702v9JeqiTq3xUDvEurCk1e9rAQ2iJcxs2FmNj4qWi8lnIX2TlvW3VdGg5sQShNLEuMAZiWGdwK+Eq83WvdBwHYp8e0AzHT3NW3FnxcDZjbUzJ6Lql+WEs4Mk/EvdPfVBdabpjehpDMzMW4m4cyzVIvdfW00vCp6n5+YviqOn7Af87fVNzFtVt60WB9CYnwpsX//GY0vR1vf90eExPOChSrGbwK4+7+Aq4Crgflmdp2ZbVpgG3cAJ0XDXwXujX9HM9vPzMZEVVTLgO/Q8vcrVTl/b5JCyaBze5ZQ5XBsexY2s52A64EzgK3cfXPgFcI/f1veBrY0s40S43ZIDM8inKltnnht7O6/TVnXLGDHEpJafvzrA6MIVVXbRPE/kBd/fre8bXXTu4hQLbVTYtyOwJxyYivD3JRtzY2G36blPt0xMbyIkFR2S+zfzdx9E8pT9Pu6+zx3P9Xd+xJKDNdYdEmqu1/p7nsTqr4+AvyQdA8TTlr2ICSFOxLT7iBUde3g7psBIyj89/ceIQHGkldzlfP3JimUDDoxd19GqJu+Omqk3MjMekZny78rYRUbEw6OCyE0xhJKBqVseybQBAw3s/XM7ADgqMQstwFHmdnnzay7mW0QNa5un7K6FwgHvt+a2cbRvAeWEMZ6hLaAhcAaCw3LR7SxzHxgKwuN72nfay1wF/AbM+sVJcxzou+ThTuBn5tZHzPrTfg9423dBZxiZoOipPvLRJzNhET+BzPbGsDM+pnZ58vZeFvf18y+kvjN3iH8vaw1s32is/qehIP0amBt6y1AVOL7K3ApoX3jkcTkXoQS5moz25dQcihkPHBi9Dc+GPhyYlo5f2+SQsmgk3P3ywn/vD8nHBRnEc707y1h2VeB3xNKGPMJDXlPl7H5rxHqqRcDFwEjCfXPuPsswuWtP03E9UNS/uaiA9JRhDrsfwOzCY3ZbcW/Avg+4WD2DuFAMrqNZV4jHIDfiKoTWl1NBJxJOMC9AYwlnL3e2FY87XQRIalOBCYB46JxuPuDhDr0fwHTo/ekH0fjnzOz5cCjwEfbEUOx77sP8LyZvUvYt2e5+5vApoRk9A6hWmkxoYRWyB3A4cDdedWB/w1caGYrCInwriLr+AXhYoF3gF+RKGGU8/cm6eIrMkQ6LLrs8DV3/2WbM4tIXVHWlHaLqgoGWrgGfwjhzKzNEomI1B/1+SEdsS1wD7AVoWrnu+7+cm1DEpH2UDWRiIiomkhERDppNVHv3r29f//+tQ5DRKRTeemllxa5e+qNiZ0yGfTv35+mpqa2ZxQRkXXMbGahaaomEhGRbJOBhW6Jx1joYniymZ2VMs+hURe446PXBVnGJCIirWVdTbQGONfdx5lZL0KnWo9Ed74mPeXuX8w4FhERKSDTkoG7v+3u46LhFcAUyuv9UUREqqBqbQZm1h/YE3g+ZfIB0ZOJHjSz3Qosf5qF5/w2LVy4MMNIRUQaT1WSgZltQuhq+Afuvjxv8jhgJ3ffHfgjBbozcPfr3H2wuw/u06fcLttFRKSYzJNB1MXtKOB2d78nf7q7L3f3d6PhB4CeUVe+IiJSJVlfTWTAn4ApUVfLafNsG81H1J95N0J3uCLV99RTMHlyraMQqbqsryY6EDgZmGRm46NxPyV6YpO7jyA8oOK7ZraG8OSmE10dJkmtHHJIeNefoDSYTJOBu4+ljUcouvtVhGepiohIjegOZBERUTIQERElAxERQclARERQMhAREZQMREQEJQMREUHJoLpWr4aXX06f9s47Yfr//A8sWZJ9LNOnw6pVuc8jR8K9qd1CtVYsvnnzYNw4mDMH4g4F990XfvITmDYN7r4bPvyw5TKPPw4zZ0Jzc+5mrzfeqM5+EJHA3Tvda++99/ZMPf20++DB7pMnu48b5z5vXhi/bJn77ru7X321+9ix7u++637YYe733humz57tfv757i+80Hqdy5a5H3+8O7iPHOm+ZIn7xRe79+vnft99YXzPnuEd3GfNCu+9ernvuaf7D3/ofsMN4X3OHPc1a9ynTw/TR4xwnzLFvanJvbnZ/YIL3O+6y/36690vusj9/ffD9q++2v3FF91PPjms+7DDwjT33Hbvv9999Wr3hx4K45980v2QQ8J3evNN99//Psx3333uo0a5P/GE+5lnuvfokVtH8nXFFa3H7bef+xe/GGJatKjltAED3I8+Ogxvv33Yp/lmzHCfNi23z0eOTN/fzzzjfuON4bcsVRyHSBcENHmB46p5J7ztfvDgwd6uZyDPmAEvvABjxsD11+fGDxkC//xn5QIE2G8/aGqCtWsru17J+dOf4FvfCsO33Ra6khg6NPQt1KsXrFiRm/eDD6Bnz7bXadEN853w/0KkLWb2krsPTp3WUMng8svh3HMrH5DUv9NPhxEj2p5PyUC6sGLJoLHaDL71LRg7Fi68MJy133QTHHZYbvp22+WGkyWHfBtskP75O9+BTTapXLydRd++tY6gbaW2h4g0qMZKBpttBgceCL/4Bey9N5xyCjz6aK7Geu5ceP11eP55+Pa3YenS0KgbT1+1KlQ9rFrVshY8/nzttWF6vMy8efDII7B8OUydGmI4+WS4+OIw/H//B489BpdeCg8/HBpY+0VPBT3vPFi5MjS8vvdeaFy991549tnQ2OoOr7wCr72W+34//3mYPn58bpx7qBp74olcvEuWwNZbw+jR4fMVV8BnPxvmP+KI8D5qFHzpS2H/PPUU3H8/vP8+vPoq/O53sO228JnPhLPtOXPgl79suXzsiCPgllvC8AsvhP0xblyI8/XXc/PttFNI0gCHHx6qehYvDvt26lR48smwT+J9fO65cMAB4bd87jl4+univ/38+XDNNW3+iYg0qsaqJqq1GTPCQa9Hkc5im5tDVYUV7ey1bf/4R0gU5VaLrV0bktnGG5e3nHtYbsMNw+f58+H44+HGG2HgwMLLzZgR4hw6FLp1Cwlv6FBYf/3ytg8hefz2t3DCCfCFL4Srlt55p+U8ixbBVlsVXoeqiaQLU5uBNKbf/Q5+/OOW47beOiSqQpQMpAtTm4E0pnPOaV19tGABvPhibeIRqWNKBtJ19egBn/pUaC9JUjIQaUXJQLq+Qw4Jjd2x9rRHiHRxSgbSGH7/+9xw/qXBIqJkIA1ijz1yVxEl+2QSEUDJQBqFWe5ejxdeqG0sInVIyUAaR3x3eLG7y0UalJKBNI711qt1BCJ1S8lAGkfyru6VK2sXh0gdUjKQxrR6da0jEKkrSgbSmD7xiVpHIFJXlAykMc2dW+sIROqKkoE0lsMPr3UEInVJyUAay3HH1ToCkbqkZCCNRV1Ti6RSMpDG0txc6whE6pKSgTSWtWvTh0UaXKbJwMx2MLMxZjbFzCab2Vkp85iZXWlm081sopntlWVM0uCSJQN1WCeyTtYlgzXAue7+cWB/4HtmNihvnqHArtHrNODajGOSRpZMBrrxTGSdTJOBu7/t7uOi4RXAFKBf3mzHALd68BywuZltl2Vc0sCSVUMqGYisU7U2AzPrD+wJPJ83qR8wK/F5Nq0TBmZ2mpk1mVnTwoULswpTuro1a3LDSgYi61QlGZjZJsAo4Afuvjx/csoira7/c/fr3H2wuw/u06dPFmFKI1i8ODesZCCyTubJwMx6EhLB7e5+T8oss4EdEp+3B9RXgGTj5JNzw0oGIutkfTWRAX8Cprj75QVmGw0Mi64q2h9Y5u5vZxmXNLA998wNKxmIrNMj4/UfCJwMTDKz8dG4nwI7Arj7COAB4EhgOrAS+EbGMYkESgYi62SaDNx9LOltAsl5HPhelnGIpFIyEFlHdyBL40peWSTS4JQMpHGpnyKRdZQMpHEpGYiso2Qgjev222sdgUjdUDKQxvXgg3DLLXDDDbWORKTmsr60VKS+nXJKeP/2t2sahkitqWQgIiJKBiIiomQgIiIoGYiICEoG0oi23rrWEYjUHSUDaTyTJtU6ApG6o2QgjWfDDWsdgUjdUTKQxtO9e60jEKk7SgbSeLrpz14kn/4rpPEoGYi0ov8KaTyqJhJpRclAGk/37rDxxrWOQqSuKBlIYzrwwFpHIFJXlAykMamqSKQFJQNpTGpEFmlB/xHSmMxqHYFIXVEyEBERJQMREVEykEblXusIROqKkoGIiCgZSINSyUCkBSUDERFRMpAGpZKBSAtKBtKYlAxEWlAyEBGRbJOBmd1oZgvM7JUC0w81s2VmNj56XZBlPCLrlFoymDgR/v73bGMRqQM9Sp3RzLYBLgb6uvtQMxsEHODufyqy2M3AVcCtReZ5yt2/WGocIplYsAC23rr1+N13D++qVpIurpySwc3AQ0Df6PPrwA+KLeDuTwJL2hWZSDVts02tIxCpqXKSQW93vwtoBnD3NcDaCsRwgJlNMLMHzWy3QjOZ2Wlm1mRmTQsXLqzAZqWh6UxfpIVyksF7ZrYV4ABmtj+wrIPbHwfs5O67A38E7i00o7tf5+6D3X1wnz59OrhZaXhXXll8+oUXVicOkTpRTjI4BxgNDDSzpwntAGd2ZOPuvtzd342GHwB6mlnvjqxTpCQf/Sh861uFp196afViEakDJTcgu/s4M/s08FHAgKnu/mFHNm5m2wLz3d3NbF9CclrckXWKlKzY085UjSQNppyriYbljdrLzHD3glcKmdmdwKFAbzObDfwS6Ang7iOALwPfNbM1wCrgRHf9F0qV6NGXIuuUnAyAfRLDGwCHEer8CyYDdz+p2Ard/SrCpaci1adkILJOOdVELdoHzGwz4M8Vj0ikWpQMRNbpyB3IK4FdKxWISNV1U28sIrFy2gzuI7qslJBEBgF3ZRGUSFWoZCCyTjltBpclhtcAM919doXjEakeXU0ksk45bQZPZBmISNWpZCCyTpvJwMxWkKseajEJcHfftOJRiVRDfjJY1tEb6kU6rzaTgbv3qkYgIlWXnwz22Sd9PpEGUE6bAQBmtjXhPgMA3P3fFY1IpFryryaaNq02cYjUgZKvrTOzo81sGvAm8ATwFvBgRnGJZE9tBiLrlHOh9a+B/YHX3X0A4Q7kpzOJSqQadDWRyDrlJIMP3X0x0M3Murn7GGCPjOISyZ5KBiLrlNNmsNTMNgGeBG43swWE+w1EOiclA5F1yikZHEPoguJs4J/ADOCoLIISqQolA5F1yikZnAbcHd11fEtG8YhUj/omElmnnP+GTYGHzOwpM/uemekJ4tK5jRlT6whE6kbJycDdf+XuuwHfA/oCT5jZo5lFJpK1p54qPE1XE0mDaU85eQEwj/B4yq0rG45IFe2/f60jEKkb5dx09l0zexx4DOgNnOrun8wqMJHM/eIXhaetXl29OETqQDkNyDsBP3D38WkTzWwLd3+nMmGJVEHPnrWOQKRulNOF9fltzPIYsFfHwhGpIl1NJLJOJf8brILrEsme7jMQWaeSyUCXX0jnopKByDr6b5DG1VbJQJeXSgNRNZE0rvySQY+8JrTm5urFIlJj5VxaOtDM1o+GDzWz75vZ5olZDqt4dCJZyk8G+Qf/c8/NDauUIF1cOSWDUcBaM9sF+BMwALgjnujuSyocm0i2tsnrUSX/gP+//1t4mkgXU04yaHb3NcBxwBXufjawXTZhiVTBxhvDqafmPhc74CsZSBdX1sNtzOwk4OvA/dE43bUjjUHJQLq4cpLBN4ADgN+4+5tmNgC4LZuwRKqk1MtLlQykiyvnDuRXge9D6HoC6OXuv80qMJGqUDIQAcq7muhxM9vUzLYEJgA3mdnl2YUmUgVW4hXRSgbSxZVTTbSZuy8H/hO4yd33Bg4vtoCZ3WhmC8zslQLTzcyuNLPpZjbRzNS3kVRXuSWD5maYMye7eERqpJxk0MPMtgOOJ9eA3JabgSFFpg8Fdo1epwHXlhGPSMeVmwwuvBC23x7+/e/sYhKpgXKSwYXAQ8AMd3/RzHYGphVbwN2fBIrdf3AMcKsHzwGbRwlHpDrKTQYPPRTeVTqQLqacBuS7gbsTn98AvtTB7fcDZiU+z47Gvd3B9YqUptSeS+NkECePtWuziUekRsppQN7ezP4WtQHMN7NRZrZ9B7ef1nqX2lJnZqeZWZOZNS1cuLCDmxWJlFsyiJOH+i2SLqacaqKbgNFAX8LZ+33RuI6YDeyQ+Lw9MDdtRne/zt0Hu/vgPn36dHCzIpFyk0E8v5KBdDHlJIM+7n6Tu6+JXjcDHT0qjwaGRVcV7Q8sc3dVEUn1lPtMAyUD6aLKeQbyIjP7L+DO6PNJwOJiC5jZncChQG8zmw38kqgLC3cfATwAHAlMB1YS7nIWqZ72lgzUZiBdTDnJ4JvAVcAfCPX6z9DGwdvdT2pjugPfKyMGkcoqJxn84x/w2GPhs0oG0sWUXEZ293+7+9Hu3sfdt3b3Ywk3oIl0XuUkgyuuaPlZpAvp6JPOzqlIFCK1MmwYbL55+rRtt80N5x/8VTKQLqajyUCPupTObeed4Z130qfNm5cbzk8GM2bAJZdkF5dIlZXTZpBGZWVpDPnJ4PvfD+9f+1ronkKkk2uzZGBmK8xsecprBeGeA5Guzz29h9OudFXRFlu0bBeRhtJmMnD3Xu6+acqrl7t3tGQh0jkUajDuSg3JS5fC2WfXOgqpkY62GYg0hq500BdJoWQgUopGKBlIQ1MyEClFqcnggw9g6FCYMCH7mEQqSHX+IhB6Iy3WGFxqMnj5ZfjnP2HJEnj++crFJ5IxlQxEoO07kUeNgqeeaj2+q1QTdZXvIe2mZCACbSeDM8+E1atbj+/oQXTNGjjhBJg4sWPr6Sglg4anaiIRKL8r61hHD6KTJ8Ndd8GUKbVPCNLQVDIQgfYng9NOC89FXrOmfcun3chWCyoZNDwlAxFofzJ44gkYMgSGD69oOFWnZNDwlAxEoONn6K+/3rHl3eHmm2G77bLtEfWqq+COO9K3Lw1NbQYi0P6SQSz/YFrqwTWZhE49NVQ3rV3b8XgKOfPM8P7Vr7Ycr2TQ8FQyEAE48cSWnz/+8drEAbU5MNcqGaxeDXvvDc88U5vtyzpKBiIAf/wjTJqU+/zrX5e3vDu88QZMm9a+7Sd7RU0emJubq/MgnVolg8mTYdy4XIlFakbJQASgRw/o3Tv3uXv38tcxcCCcfHJ5yyQTQDx8111w3HFheNNNYZddyo9FpExKBiKxZP19uXX2aWfWTU2hS+hCT1LL32Zs2DC4994w/N578Oab5cXSHqWWDNasqU48xbjD2LFq56gwJQORWPLA/IlPlLds/oHJDPbZJzwsZsst216+uTl0clcrpR5Yf/rT8KjQWbOyjaeYG2+Egw+Gu++uXQxdkJKBSCxOBptsAgMGVH7dEyfCihXpN6i99lplt1euUpPBo4+G9wULsoulLfFlvLUuoXQxSgYisTgZtOeeg1IOpnfeGdoAklcuddY7kOslbqkYJQORWEcOcGkH00LtDqNGtX87bXn7bbj++txn99CldltXJJWaDOqhnj6OQQmpopQMRGLxwbtSB7z2XJGU1J44jjkm9Jc0e3b4fM894WE7V17ZsVjy1fJArGSQCSUDkVh8cGnPdf3llAzStlnqOpPGjIFjj20Z78KF4f3DD8N7nBTaql+fObP4dOny1B2FSCztpq+OyE8GaestlgzaSkpHHw3vvhsuP+3Vq+X64mVL/S6lXj1VD9VEkgklA5FYpdsMOlpNVE4J5f334aWXCie0SlepVHp9SjI1p2QgEutIySB/mfffD2ftaeuPjRgR5it1ncWmn302XHtt6cu2lw7aXZaSgUisI8ng/vtbfp4wofU8+ev97neLr7OcksH48e1ftj3KKRksWADHHw9/+Qtsu23H16eElAk1IIvEKt1mkO+SS8qbv9TLQZP9GmWtPfvm//4vPATo6qsrG4uuJqqozJOBmQ0xs6lmNt3Mzk+ZfoqZLTSz8dHr21nHJJKq0peWFrPeem3Pc911rcc1NaU/OyH/wJj1d6jlmbxKBpnINBmYWXfgamAoMAg4ycwGpcw60t33iF43ZBmTSEFZlwyS4ks/iznnnJaf//nP0N/RNdfAf/83rFwZxjc3Vz8ZtEexBDJuHPz975Vbn5Qt65LBvsB0d3/D3T8A/gIck/E2RdqnmsmgPWbMCO+33966sTj/wFjupaWlitfX3AxLl1Z23b//fWXXJ2XJOhn0A5LdG86OxuX7kplNNLO/mtkOaSsys9PMrMnMmhbGN9aIVFJ+MrjggtrFUo5SSgblnkXfcUfuBrY0550HW2zR8oqppUvDdm66qe31u7cuHZXa6F2vybqTyzoZpP0F5v+S9wH93f2TwKPALWkrcvfr3H2wuw/u06dPhcMUofUB81e/qs+EkHWbwauvwte+Bkcc0XL82rXwyith+LHHwvt774WD+vrrh/0Fodvutlx/fWg3ie+Qzo957Fi47LLi6yiW4JYuDU+ra26GKVPCvK++mj6ve3YJ5r33wrOtK12KykDWyWA2kDzT3x6Ym5zB3Re7e3yx9fXA3hnHJJIurZqolLr9Wps4sXA1UXucdVZ4z79cdd681vN26xYe3vPBB6Ulgdjtt4f3uOoLWu73gw+GH/6w9PXlO/vskMjvuw9Gjgzj4vekRYvCd7jmmvZvq5hrr4UbboCLL85m/RWUdTJ4EdjVzAaY2XrAicDo5Axmtl3i49HAlIxjEknX3mTwwAPZxFNI/oH/8MMrWzLITyRLl8Jzz4WSQb5u3dpXJRWvK3mXdiXPzuPqqw8+KN6x3Zw54T3tyq1KSF7+Gxs+vHWirQOZJgN3XwOcATxEOMjf5e6TzexCMzs6mu37ZjbZzCYA3wdOyTImkYLSDhalJINqPaO42MGyvcng3XdzB8RY/kH/C1+AAw6AVavSt1vqtsaObb2NZDJobg6vESNKW18xyZiKJYP4Et9id4JXKg4I3/tXv4L99stmex2Q+X0G7v6Au3/E3Qe6+2+icRe4++ho+Cfuvpu77+7un3H3Gj/ySRpWfLBIHqD6pV3vkPDVr1bnEscxY8IdvIXkxzB9esuz3Q8+gGXLWi/3qU/B9tu3HJefAF98MbynPaHNvfQqqTFjcsPxMsnO/NxDVU5bd2YnD7BmxW/mSyarZL39ddfBXntlnwyScUDue9dh9aPuQBaJmYWztqam3LhzzoG//S29KmjgwFD3XUpX1R312c/C00+H4bQz8SeeaPn5+OPh9NNzz1W+5hrYfPPWy02a1Hpc8vsnpVUTtbfxNV5Xct81N7dOWGPHtj5Q55/pn9/qXtZ0yTaN00+Hl1/OrSvrZBDLupuQDlAyEEm64ALYY4/c5+7dwzMDhg5tPW9HHpPZEWkH37Sz9kLztiVOIKVswx3eeKPluAkT0quUkuKDYrIU9tJLMG1ay/kOPhh+8IP0daR9tzlzQvcXpX7vOCnF323NmmzP2tMSaltWrw6X+mZ8Sa2SgUhHVaNkkPT886XPW+gAcvnlpS0/aVJuHWnJoLkZDjmk9fi2GmTjg2L+JZdpcSVLL0cdBf/7v2E47bsdfTR85zvh8Z+x/N5jk+KkFCf0j+ig2q4AABGcSURBVH+8tK5C2qs9NwP+6EfhUt9//SubmCJKBiLtFSeBeu4WodBB59xzS1v+k59sedZc6vqLnQGPHJl78tqPftR2DMn9m+wdNq3KZfHi8J6s9sm/5DW+HyItzunT246nHPn7pz3VRP/+d3hfvrzj8RShZCBSquQNUpA7SFW7ZFCOStZRl5MMCnGHE0+EFStKXybez6/lXVtS7IFCcaxpiXr48Nxwe/bP5Mlhvcl7JObNCw3Zbe2PtrY3cSI8+2zLcfE6M/47q+O/YpE6k39lUWcuGUC44aoc7UkG8+a1PKNtb7337NmhCicp7cAaJ4NS6+aT86XdVJfm5pvD+6hRuXEnnRQastOeY1Foe2l23z1c4ZWUduVVBvRwG5FybLJJrg66M5QM7ryz8LSddipvXWkNy4UO7nGd/XbbtRz/0EPlbRPCfs4vlRXadnywLbUROJ7fDB55JDd+yhTYeefQzUZaPElTpsDjj4fhQg358b5rT0kkv10jI3X8VyxSh159NdfI2RlKBlOK3NAfd4FdqrQDbKGD22WXpc9/5JHlbRMKn7Enk0HcEB1f2VToiqh8550X3ufPbzl+0KDQEJ2WcOJxEyeGq88efrjw+uN540bvcpPBkiW5fqBUTSRSR3bYIVzzD52jZFBJaQf3YtU+P/tZZbb7+uvp45MH1i22aFltU2oyyL8/I+nmm8NvW+hqpNtvD53hJZNqWwf7ci8tPfroXGO4koFIndkh6nvx9NPDez2XDCrpS19qPa5YMkh2P5GF/G3HZ9DQsvqno955p+Xn/HUmq4b2269443i5JYO4l1hQm4FI3dlyy5YHokYpGaQplgzyr4qp9HaKbTs+6C5Z0vFtt5VQ8ktM3/gG/PWvxeMqVfKmPJUMROpco5QM0sTXwGct7cD/y1+2/Jw8Q49LBqee2vFtJ3/fKVPg0ktbTs+vkorvocj3zDOFk8GECS3vcYjbepIJQMlApM41csng05+uznZKOaO+/vrccHu6fRg2rO157r679bj8kkFaj6kABx5YOK499oBdd819nhs99iVZMshYA/8Vi1RIsZJBfo+g0j7l3p9QKHn8/e/lbzv5+6Yl/vySwfvvFy4xlfNoz6lTW85/2GGwYEFpy7eDkoFIRxUrGeh5vZVRbl17oUbcY4/tWBylJINXXw33cKT1hJr8HmeeWXg7b70FH/tY6+dQjxtXcqjlUjIQ6ahGbjOollom1Ysuyg2n/dbl9PKabFS/6qrC28y/7yGWYRfYSgYiWVLJoDJq+RyAa6/NDZfzNLx77mk9Lr+aaurU8m7mUzIQqWNZdnkswXHH1Xb7ZqHqJv/ZDVD4wTjDhrXudjr/2ccf+xicdVbrZfN7Wo0VK010kJKBSEett1762V3yjFI6JuPum0syYEDLK5Zixe52zq/ueeut1vOk3QVd6P6I9vTtVCIlA5FK6NGj9WMlS30k5L77ZhOTVMfo0YWnlfI4zXIvg017VGkFKBmIVMo554T3r341PGz9K18pbbkrr8wuJqmt1avbnmfq1PLWedNN7YulDUoGIpUycGB4/9znwvN8e/cOXV4npT0ics894YEH4JZbOrb9Rr75rV6VkgzqhP56RCrlpJNC/e/Xv54b17dvy3nS6od79oShQ2Gjjdq/7Vdeqe0VN+3RowG6RivluQp1ksTrIwqRrsAsnPknLz+84462n/Mbzx+3L2y2WXnb7dYNdtutvGUK+fa3c8PF2jI23bTj2zrmmI6vo97l93iaptwknlHSVzIQyVLfvuHZuEl33w1jxrSeN34qWLE7U9NU8uCwzz654f/6r8LzJZNGe+lmvfZpT79LJVAyEKm2L38ZDj0UXngh99Q0gIMOCtel5/fGOWtWaevt37/0GI46qu150h75GKvEgTx+VKSUR8lApIvZZx84++yW4z7zmdZ16cUOygAnnBDeN9ggffruu7f8/PDDcOut6fOawRFHhOFil8VW4oC04YYdX0cjUjIQaTAbbwxPPgl9+uTG/ehH4XGMsSVL4M9/DsOFztbzSwGf/GTLefOTyEc+Et4XLSocW6EHv5ej3LaRSpk+PVzt1VkpGYg0kGnTYOZMOPjgluMvuaTl1UpbbBGuRoLWyWC99UKDdlqf+MkuNL75zfC4xlh8xl7soJPWhUL+sw1+85vCy0PhkkzWBg4M94EMGVKb7XeUkoFIJ1dO/zq77AJbbZX7PHYsTJ5cfJk4GVx1FTz/fLj79YknYOedW8+74YYwfHgY7tkzdzWSWe4gXeigs+WWIT53+M//zI1//PHQdXMsvwosFieibt3Cc4ufey49vvY46ig477zC07fZJjf8sY+1bxu1pmQg0omtXQujRrV/+QMPhEGDis9z0UWhveGUU1peFnryyfDgg+EmuKSNNw7v3bvn2hV23jl3IE7rc2faNHj99dznuIoqluyYLe2AfswxcN11YbhbN/jsZ1uWSiAkvbQO4QAuuyx9fGy33cI6k048MTecrB5atar4utqSfDJZNXXWZGBmQ8xsqplNN7PzU6avb2Yjo+nPm1n/rGMSqbpu3Sp7KeWwYS3bEiA8uOXDD3MH+ZhZqBLJv6ntoIPC+5Ah4XLWpqZwII2rpuLpSfkllvx1DhjQ8nN+Q/W99+aqZy68MP27DRoE224btpXvP/4jfZlY376tG77vvBPmzAntHP365cavXBneb745XOGVZsSI8B63oyQ9/ngoiVSyO5GTToIzzig+T0bJAHfP7AV0B2YAOwPrAROAQXnz/DcwIho+ERjZ1nr33ntvF5EyTZ3qPny4e3Nzbtzq1enzLlsW3nPd7YVXmvxp4H7qqbnPzzwTxu2yS+HYrrnG/XOfc587N33d8evpp93Hj3f/y19y4yZNCt/pr391X7vW/f77w/jdd3efNq3wNo8+Osw3apT7qlXu55zjfthhufU+9FBu3sWLC++LuXNbTyv2uuWW3HZ+85uW06ZMCeu8+OLw+dOfbr38CScU/k5tAJq80PG60IRKvIADgIcSn38C/CRvnoeAA6LhHsAiwIqtV8lApEpmzQoHwksvdX/rrfR55sxxf+ON3OdksnF3X7nSfeut3e+7r/ztNzWFg/2qVeGgnbR4cS5pJS1ZErb37LPF1x0ng+eey4378MMw7vzzW867fHnhZBAvA+59+hROAmPH5vbNGWeEcY89lpv+/vutv5976/VMnFj8exVRLBlYmJ4NM/syMMTdvx19PhnYz93PSMzzSjTP7OjzjGieRXnrOg04DWDHHXfce+bMmZnFLSINYN48uO02OPfcllV4cTVM/lVYf/0r7L8//OMf8KlPwSc+kZv24YehDWLTTcPwDTeEqq7jjoP77guXsyYb1FevDo/A/MxnQvXV2rWF7yeZMiVcQNCvX7gooAMP+jGzl9x9cOq0jJPBV4DP5yWDfd39zMQ8k6N5kslgX3dfXGi9gwcP9qampsziFhHpioolg6wbkGcDOyQ+bw/MLTSPmfUANgMKPOZHRESykHUyeBHY1cwGmNl6hAbi/McCjQbiu2i+DPzLsyyuiIhIK5l2KO7ua8zsDEIjcXfgRnefbGYXEhoyRgN/Av5sZtMJJYITC69RRESykPnTJdz9AeCBvHEXJIZXAyU+H1BERLKgO5BFRETJQERElAxERAQlAxERIeObzrJiZguB9t6C3JvQ5UU9q/cY6z0+UIyVUO/xQf3HWG/x7eTufdImdMpk0BFm1lToDrx6Ue8x1nt8oBgrod7jg/qPsd7jS1I1kYiIKBmIiEhjJoPrah1ACeo9xnqPDxRjJdR7fFD/MdZ7fOs0XJuBiIi01oglAxERyaNkICIijZUMzGyImU01s+lmdn6NYtjBzMaY2RQzm2xmZ0Xjh5vZHDMbH72OTCzzkyjmqWb2+SrF+ZaZTYpiaYrGbWlmj5jZtOh9i2i8mdmVUYwTzWyvjGP7aGI/jTez5Wb2g1rvQzO70cwWRE/vi8eVvc/M7OvR/NPM7Otp26pwjJea2WtRHH8zs82j8f3NbFVif45ILLN39PcxPfoelra9CsVX9u+a5f96gRhHJuJ7y8zGR+Orvg/brdDzMLvai9CF9gxgZ2A9YAIwqAZxbAfsFQ33Al4HBgHDgfNS5h8Uxbo+MCD6Dt2rEOdbQO+8cb8Dzo+GzwcuiYaPBB4EDNgfeL7Kv+s8YKda70PgEGAv4JX27jNgS+CN6H2LaHiLjGM8AugRDV+SiLF/cr689bxAeMa5Rd9jaIbxlfW7Zv2/nhZj3vTfAxfUah+299VIJYN9genu/oa7fwD8BTim2kG4+9vuPi4aXgFMAfoVWeQY4C/u/r67vwlMJ3yXWjgGuCUavgU4NjH+Vg+eAzY3s+2qFNNhwAx3L3ZHelX2obs/Seun9JW7zz4PPOLuS9z9HeARYEiWMbr7w+6+Jvr4HOGJhAVFcW7q7s96OKrdmvheFY+viEK/a6b/68VijM7ujwfuLLaOLPdhezVSMugHzEp8nk3xg3DmzKw/sCfwfDTqjKiofmNcnUDt4nbgYTN7ycxOi8Zt4+5vQ0hqwNY1jhHCw5CS/3j1tA+h/H1W67/TbxLOUmMDzOxlM3vCzA6OxvWL4opVI8Zyftda7sODgfnuPi0xrl72YVGNlAzS6uNqdl2tmW0CjAJ+4O7LgWuBgcAewNuEoibULu4D3X0vYCjwPTM7pMi8NYnRwqNUjwbujkbV2z4splBMNYvVzH4GrAFuj0a9Dezo7nsC5wB3mNmmNYix3N+1lr/3SbQ8OamXfdimRkoGs4EdEp+3B+bWIhAz60lIBLe7+z0A7j7f3de6ezNwPblqjJrE7e5zo/cFwN+ieObH1T/R+4JaxkhIVOPcfX4Ua13tw0i5+6wmsUYN1V8EvhZVWxBVvyyOhl8i1MN/JIoxWZWUaYzt+F1rtQ97AP8JjIzH1cs+LEUjJYMXgV3NbEB0RnkiMLraQUR1in8Cprj75YnxyTr244D4SoXRwIlmtr6ZDQB2JTQ8ZRnjxmbWKx4mNDC+EsUSX93ydeDviRiHRVfI7A8si6tGMtbiLKye9mFCufvsIeAIM9siqg45IhqXGTMbAvwYONrdVybG9zGz7tHwzoT99kYU5woz2z/6ex6W+F5ZxFfu71qr//XDgdfcfV31T73sw5LUsvW62i/CFRyvE7Lzz2oUw0GE4uBEYHz0OhL4MzApGj8a2C6xzM+imKdShSsOCFdhTIhek+N9BWwFPAZMi963jMYbcHUU4yRgcBVi3AhYDGyWGFfTfUhITG8DHxLO/L7Vnn1GqLefHr2+UYUYpxPq2OO/xxHRvF+Kfv8JwDjgqMR6BhMOyjOAq4h6M8govrJ/1yz/19NijMbfDHwnb96q78P2vtQdhYiINFQ1kYiIFKBkICIiSgYiIqJkICIiKBmIiAhKBiJFmdnPLPQuOzHqdXI/Cz2kblTr2EQqSZeWihRgZgcAlwOHuvv7Ztab0AvmM4T7AhbVNECRClLJQKSw7YBF7v4+QHTw/zLQFxhjZmMAzOwIM3vWzMaZ2d1Rv1PxMyEuMbMXotcu0fivmNkrZjbBzJ6szVcTaUklA5ECooP6WMLdzo8CI939CTN7i6hkEJUW7iHc/fqemf0YWN/dL4zmu97df2Nmw4Dj3f2LZjYJGOLuc8xsc3dfWpMvKJKgkoFIAe7+LrA3cBqwEBhpZqfkzbY/4SErT1t4utXXCQ/aid2ZeD8gGn4auNnMTiU8iEWk5nrUOgCReubua4HHgcejM/r8x1Aa4WE0JxVaRf6wu3/HzPYDvgCMN7M9POrZUqRWVDIQKcDCs5Z3TYzaA5gJrCA8shTCk8EOTLQHbGRmH0ksc0Li/dlonoHu/ry7XwAsomV3yyI1oZKBSGGbAH+08ID4NYTePU8jdJ39oJm97e6fiaqO7jSz9aPlfk7oMRNgfTN7nnDiFZceLo2SjBF6Mp1QlW8jUoQakEUykmxornUsIm1RNZGIiKhkICIiKhmIiAhKBiIigpKBiIigZCAiIigZiIgI8P8McjSkUsZIJQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "steps = steps_loss[\"step\"]\n", - "loss_value = steps_loss[\"loss_value\"]\n", - "steps = list(map(int, steps))\n", - "loss_value = list(map(float, loss_value))\n", - "plt.plot(steps, loss_value, color=\"red\")\n", - "plt.xlabel(\"Steps\")\n", - "plt.ylabel(\"Loss_value\")\n", - "plt.title(\"Change chart of model loss value\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "从上面可以看出来大致分为三个阶段:\n", - "\n", - "阶段一:训练开始时,loss值在2.2上下浮动,训练收益感觉并不明显。\n", - "\n", - "阶段二:训练到某一时刻,loss值迅速减少,训练收益大幅增加。\n", - "\n", - "阶段三:loss值收敛到一定小的值后,开始振荡在一个小的区间上无法趋0,再继续增加训练并无明显收益,至此训练结束。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 验证模型" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "得到模型文件后,通过运行测试数据集得到的结果,验证模型的泛化能力。\n", - "\n", - "搭建测试网络来验证模型的过程主要为:\n", - "\n", - "1. 载入模型`.ckpt`文件中的参数`param_dict`;\n", - "2. 将参数`param_dict`载入到神经网络LeNet中;\n", - "3. 载入测试数据集;\n", - "4. 调用函数`model.eval`传入参数测试数据集`ds_eval`,生成模型`checkpoint_lenet-{epoch}_1875.ckpt`的精度值。" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:54:25.073201Z", - "start_time": "2021-02-03T08:54:24.124960Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============== Starting Testing ==============\n", - "============== Accuracy:{'Accuracy': 0.9697516025641025} ==============\n" - ] - } - ], - "source": [ - "from mindspore import load_checkpoint, load_param_into_net\n", - "\n", - "# testing relate modules \n", - "def test_net(network, model, mnist_path):\n", - " \"\"\"Define the evaluation method.\"\"\"\n", - " print(\"============== Starting Testing ==============\")\n", - " # load the saved model for evaluation\n", - " param_dict = load_checkpoint(\"./models/ckpt/mindspore_quick_start/checkpoint_lenet-1_1875.ckpt\")\n", - " # load parameter to the network\n", - " load_param_into_net(network, param_dict)\n", - " # load testing dataset\n", - " ds_eval = create_dataset(os.path.join(mnist_path, \"test\"))\n", - " acc = model.eval(ds_eval, dataset_sink_mode=False)\n", - " print(\"============== Accuracy:{} ==============\".format(acc))\n", - "\n", - "test_net(network, model, mnist_path)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "其中:\n", - "\n", - "- `load_checkpoint`:通过该接口加载CheckPoint模型参数文件,返回一个参数字典。\n", - "\n", - "- `checkpoint_lenet-1_1875.ckpt`:之前保存的CheckPoint模型文件名称。\n", - "\n", - "- `load_param_into_net`:通过该接口把参数加载到网络中。\n", - "\n", - "经过1875步训练后生成的模型精度超过95%,模型优良。\n", - "\n", - "我们可以看一下模型随着训练步数变化,精度随之变化的情况。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`eval_show`将绘制每25个`step`与模型精度值的折线图,其中`steps_eval`存储着模型的step数和对应模型精度值信息。" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:54:25.213489Z", - "start_time": "2021-02-03T08:54:25.078949Z" - } - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3de5gcZZn38e+PhIQAgUASDCEkEyGwRBDEyOFlPaxyCiKIi3IQOYiy7oLKoruLi6/L4uouKKKu7CIgclIQEDXvCgaWo6AoiQYhnGYIgQQChEMIEExIcr9/PNWkZ9LdUzPTNd09/ftcV11dXae+u3qm7nqeeuopRQRmZta+Nmh0AGZm1lhOBGZmbc6JwMyszTkRmJm1OScCM7M250RgZtbmnAisTyR1SApJw3Mse7ykuwYjrnYh6eOSbhrA+jdKOq6eMfXyebn/XqxxnAiGMEkLJa2SNK7H9HnZP2dHYyKz/oqIH0XE/nmWlXSmpCt7rD8zIi4rJrrBkf3tbt/oOIYSJ4Kh73HgqNIbSbsAoxoXTnNoxTPUVoy5ntr9+xfJiWDouwI4tuz9ccDl5QtI2lzS5ZKWSnpC0pclbZDNGybpm5Kel7QA+GCFdX8gaYmkpyT9m6RheQKTdK2kZyS9LOlOSW8rmzdK0rlZPC9LukvSqGzeX0r6jaRlkhZJOj6bfrukT5Vto1vVVHYmebKkTqAzm/adbBvLJc2V9O6y5YdJ+mdJj0l6JZu/raTzJZ3b47v8P0mnVviOF0j6Zo9pv5B0WjZ+etn2H5R0WI/475Z0nqQXgTMrfKeK8Us6EPhn4AhJr0q6r+c+krRB9ls/Iem57G9g82xeqUrnOElPZr//GTV+y6q/V+bjlbYjaQ9Jv81+yyWSvidpRLXfTNKd2az7su91RLWYrA8iwsMQHYCFwL7AI8BOwDBgETAFCKAjW+5y4BfAaKADeBQ4MZv3GeBhYFtgS+C2bN3h2fyfA98HNgG2An4P/E0273jgrhrxfTL7zJHAt4F5ZfPOB24Htsni/j/ZcpOBV0ilnA2BscBu2Tq3A58q20a3z8/ivjn7HqOyacdk2xgOfAF4Btgom/cPwP3AjoCAXbNl9wCeBjbIlhsHrADeUuE7vifb58rebwG8DkzM3n8UmEg6KTsCeA3Yuiz+1cBns/hGVfhOteI/E7iyRzxv7qNs/3cBbwU2Ba4HrsjmdWT766Lsc3cFVgI7Vfktq/1eNbcDvBPYK4u/A3gIOLWX3yyA7Rv9/zWUhoYH4KHAH3ddIvgy8O/Agdk/1fDsn6kj+6ddCUwvW+9vgNuz8VuBz5TN2z9bdzjwlmzdUWXzjwJuy8a7HbR6iXVMtt3Ns4Pi68CuFZb7EvCzKtt48yBX6fOz7b+/lzheKn0uKYEeWmW5h4D9svFTgBuqLCfgSeA92ftPA7fW+Px5pc/M4n+yx/ya+7RH/GdSOxHcAvxd2bwdgTfKDsoBTCqb/3vgyAqfWev3yr2dbN6p5b9vpd8MJ4K6D64aag9XAEeTDiKX95g3DhgBPFE27QnSmR2ks9VFPeaVTCGdlS/JivbLSKWDrXoLKKt2+Y+sWmQ5KWmV4hkHbAQ8VmHVbatMz6v8uyDpC5IeyqozlpESUenieq3Puox0Nk72ekWlhSIdua5m3XWao4EflX3+sUoX70v7b+eyz18v3p56ib83E1n/dy8l+JJnysZXkEoOPdX6vWpuR9IOkv4nqyJcDny9Qvw194ENnBNBG4iIJ0gXjQ8iFf/LPU86C5xSNm0y8FQ2voR0QCyfV7KIVCIYFxFjsmGziHgbvTsaOJRUYtmcdOYI6Qz6eeDPwHYV1ltUZTqkapWNy95PqLDMm93tZvXp/wR8DNgiIsYAL2cx9PZZVwKHStqVVO328yrLAVwFHC5pCrAn8NPs86eQqkxOAcZmn/9A2ed3i7enHPH31rXw06z/u68Gnu1lvZ5q/V69+W9S1eO0iNiMdF1DPZZxF8kFcyJoHyeSitivlU+MiDXANcDXJI3ODk6nkQ50ZPM+J2mSpC2A08vWXQLcBJwrabPs4uN2kt6bI57RpCTyAung/fWy7a4FLgG+JWliVnrYW9JI0tn0vpI+Jmm4pLGSdstWnQd8RNLGSs0LT8wRw2pgKTBc0leAzcrmXwx8VdI0JW+XNDaLcTFwL6kk8NOIeL3ah0TEH7PPuBiYHRHLslmbkA5ySwEknUAqEeTVW/zPAh3KLvxXcBXw95KmStqU9Bv8JCJW9yGG3n6vPN9hOfCqpL8A/jbHOs+SrmtYnTgRtImIeCwi5lSZ/VnS2fQC4C7gx6R/bEhnrLOB+4A/sH6J4lhS1dKDpPrp64Ctc4R0Oakq4qls3Xt6zP8i6ULtvcCLwNmki7NPkko2X8imzyNdgAQ4D1hFOlBcRlkVTBWzgRtJF8efIJ3VlldDfIuUCG8iHax+QPemt5cBu1ClWqiHq0ilnx+XJkTEg8C5wG+zmHcB7s6xrbzxX5u9viDpDxXWvySL/U5SifHPpL+F/qj4e+Vc72hSA4CLgJ/kWOdM4LKsOu1j/YrWuim1ZDCzPpL0HlLJqSM7KzZrSS4RmPWDpA2BzwMXOwlYq3MiMOsjSTsBy0hVYN9ucDhmA+aqITOzNucSgZlZmyusEydJlwAHA89FxHpN4iQJ+A6pBcgK4PiIqNSyoZtx48ZFR0dHnaM1Mxva5s6d+3xEjK80r8je/C4Fvsf6d7KWzASmZcOepBtL9uxtox0dHcyZU60VpJmZVSLpiWrzCqsaiog7Se2JqzkUuDySe4AxkvK0Pzczszpq5DWCbeh+88ti1vVv042kkyTNkTRn6dKlgxKcmVm7aGQi6NmfCFTpUyQiLoyIGRExY/z4ilVcZmbWT41MBIvp3pnZJFInWGZmNogamQhmAcdmnXntBbycdWJmZmaDqMjmo1cB7wPGSVoM/Aup73oi4gLgBlLT0S5S89ETiorFzMyqKywRRMRRvcwP4OSiPt/MzPIp8j4CMzPLa80aWL4cli3rPrz88rrxgw+GGTPq/tFOBGbWPlasgAcegPvug3nz4OmnYcQIGDkyvZYPlablnb5yZeUDebUD/LJl8Morvcc/YYITgZlZLhHpID9vXjrol4bOTlib9Ro+ejRMmQKrV6cD96pV6w9r1tQvpg02gM03hzFj1g3bb7/+tDFjKk8bPRqGDatfPGWcCMysta1aBQ8+2P2Af9998MIL65aZOhV23RWOPDK97rordHSkg3Mta9asnxyqJY3S9JUrU+mg54F8001BlW6fajwnAjMbmD//GZ58EhYuhEWL0tn4RhvBqFFpKI1Xm7bhhvkPkEuXdj/Yz5sHDz2UzuohbXPnneGww2C33dIBf5dd0hl2fwwbti7mIcyJwMxqe+01eOKJNCxcuO61NP7MMwPb/gYbVE4Y5YljzRqYPx+WlN1qNHFiOtgffPC6s/xp0wqrPhnKnAjM2t3y5esf5MsP9s8/3335DTdMdetTpsAHP5heOzrS6+TJ6UD8+uuppFD+Wmlantdly1K9/n77rTvg77orjBs36LtqqHIiMBuK3ngjHcCXLoXnnqs8LF6cDvQvvdR93Y02Wndw3333dQf5jo40TJjQe926tRQnArNWEJHOjKsd1HsOL1bpAX74cNhqqzRsvTXsvXf3M/qOjjSvSS9qWjGcCMya0Zo1cMstcOmlcMcd6eBeuiDa09ix6eA9fny6UFo60FcaxozxQd7W40Rg1kwefTQd/C+/HJ56CrbcMtXDT5pU+cA+dmyqszcbACcCs0ZbvhyuuQZ++EP4zW9S/fvMmfDtb8OHPpTapJsVyInArBHWroXbbksH/+uvTy1kdtoJzjkHjjkm1d+bDRInArPB9Nhj66p+nnwy1dkff3wa3vUu199bQzgRmBXtlVfguuvS2f+vf50O9vvvn87+Dz00Ndc0ayAnArMirF0Ld96ZDv7XXZd6vdxhB/j61+ETn0gXf82ahBOBWT09/jhcdlkaFi6EzTaDj38cTjgB9trLVT/WlJwIzOph4UI48US49dZ0sP/AB+BrX4MPfxg23rjR0ZnV5ERgNlBLl6Y6/6VL4atfhWOPTX3umLUIJwKzgXj11XTD16JF8L//C/vs0+iIzPrMicCsv954Aw4/HObOhZ/9zEnAWpYTgVl/rF2brgnMng0XXQSHHNLoiMz6zX3JmvXH6afDFVekawKf+lSjozEbECcCs7467zz4xjfg5JPhjDMaHY3ZgDkRmPXFj38Mp52Wrg185zu+L8CGBCcCs7xuvjn1CfTe96ZqIT8b14YIJwKzPObOhY98JPUQ+otfuH8gG1KcCMx609mZng8wdizceCNsvnmjIzKrKycCs1qeeQYOOCA1F509GyZObHREZnXn+wjMqlm+PJUEnn029SG0446NjsisEE4EZpWsXAmHHQYPPACzZsGeezY6IrPCOBGY9bR2beo47tZbU3fSM2c2OiKzQvkagVm5CPj7v08Pkz/nnJQQzIY4JwKzcmefDd/9bkoGX/xio6MxGxROBGYll14KX/oSHH00fPObvmvY2oYTgRnAL3+ZOo/bb7/0nOEN/K9h7aPQv3ZJB0p6RFKXpNMrzJ8s6TZJf5T0J0kHFRmPWUX33AMf/Sjsthv89KcwYkSjIzIbVIUlAknDgPOBmcB04ChJ03ss9mXgmoh4B3Ak8F9FxWNW0cMPpyeMTZwIN9wAo0c3OiKzQVdkiWAPoCsiFkTEKuBq4NAeywSwWTa+OfB0gfGYdffUU+mu4eHD013DW23V6IjMGqLI+wi2ARaVvV8M9Lwr50zgJkmfBTYB9q20IUknAScBTPZDwa0eli2DAw+EF1+EO+6A7bZrdERmDVNkiaBSk4vo8f4o4NKImAQcBFwhab2YIuLCiJgRETPGjx9fQKjWVl5/PT1a8pFH0rOGd9+90RGZNVSRiWAxsG3Z+0msX/VzInANQET8FtgIGFdgTGZw3HFw113pmQL7ViyEmrWVIhPBvcA0SVMljSBdDJ7VY5kngQ8ASNqJlAiWFhiTtbsXXoBrr4V//Ec44ohGR2PWFApLBBGxGjgFmA08RGodNF/SWZIOyRb7AvBpSfcBVwHHR0TP6iOz+unsTK/77NPYOMyaSKGdzkXEDcANPaZ9pWz8QcD/kTZ4urrS67RpjY3DrIn49klrL52d6a7hqVMbHYlZ03AisPbS1QWTJ8PIkY2OxKxpOBFYe+nshO23b3QUZk3FicDaS1eXrw+Y9eBEYO3jhRfgpZdcIjDrwYnA2odbDJlV5ERg7aN0D4FLBGbdOBFY++jqSk8de+tbGx2JWVNxIrD20dnppqNmFTgRWPtwiyGzipwIrH34HgKzipwIrD28+GJqOuoSgdl6nAisPbjFkFlVTgTWHnwPgVlVTgTWHjo7U9NR9zpqth4nAmsPXV2w7baw0UaNjsSs6TgRWHtw01GzqpwIrD246ahZVb0mAklzJJ0saYvBCMis7l58MQ0uEZhVlKdEcCQwEbhX0tWSDpCkguMyq59SiyGXCMwq6jURRERXRJwB7AD8GLgEeFLSv0rasugAzQbMTUfNasp1jUDS24FzgW8APwUOB5YDtxYXmlmdlJqOutdRs4qG97aApLnAMuAHwOkRsTKb9TtJ+xQZnFlduOmoWU29JgLgoxGxoNKMiPhIneMxqz+3GDKrKU/V0KckjSm9kbSFpH8rMCaz+vI9BGY15UkEMyNiWelNRLwEHFRcSGZ19NJL6aH1LhGYVZUnEQyT9OYjnSSNAvyIJ2sNbjFk1qs81wiuBG6R9EMggE8ClxUalVm9uPtps171mggi4hxJ9wMfAAR8NSJmFx6ZWT2UHli/3XaNjsSsaeUpERARNwI3FhyLWf11dsKkSW46alZDnr6G9pJ0r6RXJa2StEbS8sEIzmzA3GLIrFd5LhZ/DzgK6ARGAZ8C/rPIoMzqxvcQmPUqb9VQl6RhEbEG+KGk3xQcl9nAlZqOukRgVlOeRLBC0ghgnqRzgCXAJsWGZVYH7nXULJc8VUOfyJY7BXgN2Bb46yKDMqsL30NglkvNEoGkYcDXIuIY4M/Avw5KVGb1ULqHwL2OmtVUs0SQXRMYn1UNmbWWUq+jo0Y1OhKzppbnGsFC4G5Js0hVQwBExLd6W1HSgcB3gGHAxRHxHxWW+RhwJumu5fsi4uhckZv1xi2GzHLJkwiezoYNgNF5N5xVK50P7AcsJj3qclZEPFi2zDTgS8A+EfGSpK36ErxZTV1dcNhhjY7CrOnl6WKiv9cF9gC6Ss8ykHQ1cCjwYNkynwbOz3o0JSKe6+dnmXW3bBk8/7wvFJvlkOcJZbeRqm26iYj397LqNsCisveLgT17LLND9hl3k6qPzoyIX1WI4STgJIDJkyf3FrKZm46a9UGeqqEvlo1vRGo6ujrHeqowrWdCGQ5MA94HTAJ+LWnn8ucfAETEhcCFADNmzFgvKZmtp9RiyCUCs17lqRqa22PS3ZLuyLHtxaR7Dkomka419Fzmnoh4A3hc0iOkxHBvju2bVVcqEbjpqFmv8nQ6t2XZME7SAcCEHNu+F5gmaWrW/PRIYFaPZX4O/FX2OeNIVUUVn49s1ielXkc33rjRkZg1vTxVQ3NJVToiVQk9DpzY20oRsVrSKcBsUv3/JRExX9JZwJyImJXN21/Sg8Aa4B8i4oX+fRWzMl1dvj5gllOeqqGp/d14RNwA3NBj2lfKxgM4LRvM6qez001HzXLKUzV0sqQxZe+3kPR3xYZlNgClpqMuEZjlkqfTuU+Xt+LJ2vx/uriQzAboscfSq1sMmeWSJxFsIOnNpqDZHcPue8ialx9Yb9YneS4WzwaukXQB6aLxZ4D1bvoyaxqlpqN+YL1ZLnkSwT+R7ur9W1LLoZuAi4sMymxAOjthm23cdNQspzyJYBRwUURcAG9WDY0EVhQZmFm/+YH1Zn2S5xrBLaRkUDIK+N9iwjGrA3c/bdYneRLBRhHxaulNNu4ytzWnl1+GpUtdIjDrgzyJ4DVJu5feSHon8HpxIZkNgHsdNeuzPNcITgWulVTqMG5r4IjiQjIbAD+w3qzP8nQxca+kvwB2JLUaejjrLdSs+ZTuIXDTUbPc8pQIICWB6aTnEbxDEhFxeXFhmfVTV5ebjpr1UZ4nlP0L6cEx00kdyM0E7gKcCKz5uMWQWZ/luVh8OPAB4JmIOAHYlXQfgVnz8T0EZn2WJxG8HhFrgdWSNgOeA/zYJ2s+y5fDc8+5RGDWR3muEczJuqG+iPSQmleB3xcalVl/uMWQWb/kaTVUevbABZJ+BWwWEX8qNiyzfnCvo2b9krfVEAARsbCgOMwGzr2OmvVLnmsEZq2hsxMmToRNNml0JGYtxYnAhg4/sN6sX6pWDUnastaKEfFi/cMxG4DOTvjQhxodhVnLqXWNYC7piWSqMC9wE1JrJm46atZvVRNBREwdzEDMBsRNR836rddrBEqOkfR/s/eTJe1RfGhmfeDup836Lc/F4v8C9gaOzt6/ApxfWERm/eF7CMz6Lc99BHtGxO6S/ggQES9JGlFwXGZ909UFW2/tpqNm/ZCnRPBG9sD6AJA0HlhbaFRmfdXZ6esDZv2UJxF8F/gZsJWkr5G6oP56oVGZ9ZXvITDrtzx9Df1I0lxSV9QCPhwRDxUemVley5fDs8+6RGDWT3lvKHsOuKp8nm8os6bx2GPp1SUCs37Je0PZZOClbHwM8CTg+wysOZRaDLlEYNYvVa8RRMTUiHgrMBv4UESMi4ixwMHA9YMVoFmv3Ouo2YDkuVj8roi4ofQmIm4E3ltcSGZ9VGo6uummjY7ErCXluY/geUlfBq4kVRUdA7xQaFRmfeEH1psNSJ4SwVHAeFIT0p8DW2XTzJqDH1hvNiB5mo++CHw+e3D92oh4tfiwzHJ65RV45hmXCMwGIE+nc7tk3UvcD8yXNFfSznk2LulASY9I6pJ0eo3lDpcUkmbkD92MdU1HXSIw67c8VUPfB06LiCkRMQX4AnBhbytl3VKcD8wEpgNHSZpeYbnRwOeA3/UlcDPAnc2Z1UGeRLBJRNxWehMRtwN5evbaA+iKiAURsQq4Gji0wnJfBc4B/pxjm2bduftpswHLkwgWSPq/kjqy4cvA4znW2wZYVPZ+cTbtTZLeAWwbEf9Ta0OSTpI0R9KcpUuX5vhoaxudnTBhgpuOmg1AnkTwSVKroetJLYfGAyfkWK/aIy7TTGkD4DxSVVNNEXFhRMyIiBnjx4/P8dHWNtxiyGzA8rQaeolUh99Xi4Fty95PAp4uez8a2Bm4XRLABGCWpEMiYk4/Ps/aUWcnzJzZ6CjMWlqtTudm1VoxIg7pZdv3AtMkTQWeAo5k3VPOiIiXgXFln3c78EUnAcvt1VfddNSsDmqVCPYm1fFfRWrRU6mqp6qIWC3pFFJfRcOASyJivqSzgDkRUTPRmPXKD6w3q4taiWACsB/pLuKjgV8CV0XE/Lwbz/oouqHHtK9UWfZ9ebdrBrjFkFmd1Op9dE1E/CoijgP2ArpI9fmfHbTozGrxPQRmdVHzYrGkkcAHSaWCDtJjK90FtTWHri54y1tg9OhGR2LW0mpdLL6M1KrnRuBfI+KBQYvKLA8/sN6sLmqVCD4BvAbsAHwua+IJ6aJxRMRmBcdmVltXFxxwQKOjMGt5VRNBROS52cysMV59FZYscYnArA58sLfW5AfWm9WNE4G1Jj+w3qxunAisNfkeArO6cSKw1tTZ6aajZnXiRGCtqavLpQGzOnEisNbkewjM6saJwFrPa6+lpqMuEZjVhROBtR73OmpWV04E1nrcYsisrpwIrPW411GzunIisNbT1QVbbQWbubsrs3pwIrDW4xZDZnXlRGCtx/cQmNWVE4G1ltdeg6efdonArI6cCKy1uNdRs7pzIrDW4qajZnXnRGCtxU1HzerOicBaS1cXjB8Pm2/e6EjMhgwnAmstbjpqVndOBNZa3HTUrO6cCKx1rFgBTz3lEoFZnTkRWOtw01GzQjgRWOvwA+vNCuFEYK3D9xCYFcKJwFpHZ6ebjpoVwInAWodbDJkVwonAWofvITArhBOBtYZS01GXCMzqzonAWkOp6ahLBGZ150RgrcEthswK40RgrcG9jpoVptBEIOlASY9I6pJ0eoX5p0l6UNKfJN0iaUqR8VgL6+qCceNgzJhGR2I25BSWCCQNA84HZgLTgaMkTe+x2B+BGRHxduA64Jyi4rEW5xZDZoUpskSwB9AVEQsiYhVwNXBo+QIRcVtErMje3gNMKjAea2W+h8CsMEUmgm2ARWXvF2fTqjkRuLHAeKxVrVgBixe7RGBWkOEFblsVpkXFBaVjgBnAe6vMPwk4CWDy5Mn1is9axYIF6dUlArNCFFkiWAxsW/Z+EvB0z4Uk7QucARwSESsrbSgiLoyIGRExY/z48YUEa03MvY6aFarIRHAvME3SVEkjgCOBWeULSHoH8H1SEniuwFislfkeArNCFZYIImI1cAowG3gIuCYi5ks6S9Ih2WLfADYFrpU0T9KsKpuzdtbZ6aajZgUq8hoBEXEDcEOPaV8pG9+3yM+3IcIthswK5TuLrfl1djoRmBXIicCa2+uvu+moWcGcCKy5+YH1ZoVzIrDmVmox5BKBWWGcCKy5uemoWeGcCKy5dXbC2LGwxRaNjsRsyHIisObmpqNmhXMisObm7qfNCudEYM3r9ddh0SKXCMwK5kRgzavU66hLBGaFciKw5uUWQ2aDwonAmpe7nzYbFE4E1pzWroVHH4Utt3TTUbOCFdr76JC2bBnMnw8PPgjPP999nlT9fa15ld5HVH+tNa/aMpX0N77y8bVrYdUqWLkyDQMZX7kSVq9O291rr+pxm1ldOBH05uWX1x3w589fNzy93sPWDGDkyDSMGFF9fNSo9GyBWsuUxvfbr9HfyGzIcyIoWb688gH/qafWLbPxxrDTTrDvvvC2t60bJkxYd3bc88y7/H2teZWWLW2z1mt/lqlXfD1tuGHlzzCzptZ+iWD5cnjooe4H+/nzU1fHJaNGpQP++9/f/YA/ZQpsMMQuq/jAbdb22icRXHwxnHVWukGpZKON0gH/fe9LB/rp09NrRwcMG9aoSM3MBlX7JIIJE+Dd7+5+hj91qg/4Ztb22icRHHxwGszMrJshVuFtZmZ95URgZtbmnAjMzNqcE4GZWZtzIjAza3NOBGZmbc6JwMyszTkRmJm1OUWt7ombkKSlwBONjqOHccDzvS7VPFopXsdanFaKt5ViheaMd0pEjK80o+USQTOSNCciZjQ6jrxaKV7HWpxWireVYoXWi9dVQ2Zmbc6JwMyszTkR1MeFjQ6gj1opXsdanFaKt5VihRaL19cIzMzanEsEZmZtzonAzKzNORH0QtK2km6T9JCk+ZI+n00/U9JTkuZlw0Fl63xJUpekRyQd0ICYF0q6P4trTjZtS0k3S+rMXrfIpkvSd7N4/yRp90GMc8ey/TdP0nJJpzbTvpV0iaTnJD1QNq3P+1LScdnynZKOG8RYvyHp4Syen0kak03vkPR62T6+oGydd2Z/P13Z9ynkwdZV4u3zby/pwGxal6TTBzHWn5TFuVDSvGx6w/dtn0WEhxoDsDWwezY+GngUmA6cCXyxwvLTgfuAkcBU4DFg2CDHvBAY12PaOcDp2fjpwNnZ+EHAjYCAvYDfNWg/DwOeAaY0074F3gPsDjzQ330JbAksyF63yMa3GKRY9weGZ+Nnl8XaUb5cj+38Htg7+x43AjMHcd/26bfPhseAtwIjsmWmD0asPeafC3ylWfZtXweXCHoREUsi4g/Z+CvAQ8A2NVY5FLg6IlZGxONAF7BH8ZH26lDgsmz8MuDDZdMvj+QeYIykrRsQ3weAxyKi1l3jg75vI+JO4MUKcfRlXx4A3BwRL0bES8DNwIGDEWtE3BQRq7O39wCTam0ji3eziPhtpCPX5az7foXHW0O1334PoCsiFkTEKuDqbNlBizU7q/8YcFWtbQzmvu0rJ4I+kNQBvAP4XTbplKzIfUmpeoCUJBaVrbaY2omjCAHcJGmupJOyaW+JiCWQkhuwVTa9GeIFOJLu/0jNum+h7/uyWeL+JOkstGSqpD9KukPSu7Np25Zge5kAAAULSURBVJDiK2lErH357Zth374beDYiOsumNeu+rciJICdJmwI/BU6NiOXAfwPbAbsBS0hFQ0hFvp4Gu43uPhGxOzATOFnSe2os2/B4JY0ADgGuzSY1876tpVp8DY9b0hnAauBH2aQlwOSIeAdwGvBjSZvR+Fj7+ts3Ol6Ao+h+EtOs+7YqJ4IcJG1ISgI/iojrASLi2YhYExFrgYtYV0WxGNi2bPVJwNODGW9EPJ29Pgf8LIvt2VKVT/b6XLZ4w+MlJaw/RMSz0Nz7NtPXfdnQuLOL0wcDH8+qJMiqWF7IxueS6tl3yGItrz4a1Fj78ds3et8OBz4C/KQ0rVn3bS1OBL3I6v9+ADwUEd8qm15ej34YUGpNMAs4UtJISVOBaaQLRIMV7yaSRpfGSRcLH8jiKrVWOQ74RVm8x2YtXvYCXi5VewyibmdUzbpvy/R1X84G9pe0RVbVsX82rXCSDgT+CTgkIlaUTR8vaVg2/lbSvlyQxfuKpL2yv/1jy77fYMTb19/+XmCapKlZyfLIbNnBsi/wcES8WeXTrPu2pkZfrW72AfhLUvHtT8C8bDgIuAK4P5s+C9i6bJ0zSGcBjzDIrQJIrSfuy4b5wBnZ9LHALUBn9rplNl3A+Vm89wMzBjnejYEXgM3LpjXNviUlqCXAG6QzuhP7sy9J9fNd2XDCIMbaRapDL/3tXpAt+9fZ38d9wB+AD5VtZwbpAPwY8D2yHggGKd4+//bZ/+Oj2bwzBivWbPqlwGd6LNvwfdvXwV1MmJm1OVcNmZm1OScCM7M250RgZtbmnAjMzNqcE4GZWZtzIrC2oNSr6caNjqOWrNfKB3pf0qy+nAisXZxKumdhyMrucjXrMycCG1KyO6t/Kek+SQ9IOkLS54CJwG2SbsuW21/SbyX9QdK1WV9SpWc5nC3p99mwfYXPODPrEO12SQuy7a93Ri/pi5LOzMZvl3SepDuVnm3xLknXKz2f4N/KNj9c0mVZp2vXlUoxSv3Y35F1JDi7rIuL2yV9XdIdwOcL2ak25DkR2FBzIPB0ROwaETsDv4qI75L6dPmriPgrSeOALwP7Ruqcbw6pc7CS5RGxB+nOz29X+Zy/IHUvvQfwL1l/VL1ZFRHvAS4gdS1wMrAzcLyksdkyOwIXRsTbgeXA32Xb/k/g8Ih4J3AJ8LWy7Y6JiPdGxLmY9YOLkjbU3A98U9LZwP9ExK8rLLMX6UEnd6cuXxgB/LZs/lVlr+dV+ZxfRsRKYKWk54C35Iit1AfO/cD8yPp0krSA1HHaMmBRRNydLXcl8DngV6SEcXMW7zBSdwclP8FsAJwIbEiJiEclvZPU/8y/S7opIs7qsZhID4o5qtpmqoyXW1k2vob0v7Sa7qXsjaqss7bH+mtZ97/Y8/NKXS3Pj4i9q8TyWpXpZrm4asiGFEkTgRURcSXwTdLjBQFeIT1qFNKTuvYp1f9L2ljSDmWbOaLstbyk0Jtnga0kjZU0ktT1c19NllQ64B8F3EXqZG18abqkDSW9rR/bNqvIJQIbanYBviFpLamnyL/Npl8I3ChpSXad4HjgquyADemawaPZ+EhJvyOdKFUrNawnIt6QdBbpCXaPAw/3I/6HgOMkfZ/Uu+l/R8QqSYcD35W0Oen/9tukHi7NBsy9j5qVkbSQ1H30842OxWywuGrIzKzNuURgZtbmXCIwM2tzTgRmZm3OicDMrM05EZiZtTknAjOzNvf/AUE9FLljBccsAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "def eval_show(steps_eval):\n", - " plt.xlabel(\"step number\")\n", - " plt.ylabel(\"Model accuracy\")\n", - " plt.title(\"Model accuracy variation chart\")\n", - " plt.plot(steps_eval[\"step\"], steps_eval[\"acc\"], \"red\")\n", - " plt.show()\n", - "\n", - "eval_show(steps_eval)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "从图中可以看出训练得到的模型精度变化分为三个阶段:\n", - "\n", - "阶段一:训练开始时,模型精度缓慢震荡上升。\n", - "\n", - "阶段二:训练到某一时刻,模型精度迅速上升。\n", - "\n", - "阶段三:缓慢上升趋近于不到1的某个值时附近振荡。\n", - "\n", - "整个训练过程,随着训练数据的增加,会对模型精度有着正相关的影响,但是随着精度到达一定程度,训练收益会降低。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 推理预测" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "我们使用生成的模型应用到分类预测单个或者单组图片数据上,具体步骤如下:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 将要测试的数据转换成适应LeNet的数据类型。\n", - "2. 提取出`image`的数据。\n", - "3. 使用函数`model.predict`预测`image`对应的数字。需要说明的是`predict`返回的是`image`对应0-9的概率值。\n", - "4. 调用`plot_pie`将预测出的各数字的概率显示出来。负概率的数字会被去掉。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "载入要预测的数据集,并调用`create_dataset`转换成符合格式要求的数据集,并选取其中一组32张图片进行推理预测。" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:54:26.317823Z", - "start_time": "2021-02-03T08:54:25.214530Z" - }, - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Row 1, column 2 is incorrectly identified as 8, the correct value should be 2 \n", - "\n", - "Row 3, column 7 is incorrectly identified as 9, the correct value should be 4 \n", - "\n", - "[5 8 0 2 7 4 1 7 8 6 6 8 7 9 5 8 7 2 0 4 5 9 9 3 9 1 3 9 7 6 3 4] <--Predicted figures\n", - "[5 2 0 2 7 4 1 7 8 6 6 8 7 9 5 8 7 2 0 4 5 9 4 3 9 1 3 9 7 6 3 4] <--The right number\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADsCAYAAADXaXXTAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nOy9e3xU1bn//14zCQkhIZBAgAAhBAmJRQWMcECRqEdQUYtHpFosSqVRlOpRvP70VFSoFxB+VQSNCIhQW8Bq661gq1guClJEpUQiJOGOJIFg7iQz6/vH3jOZazKZzN4zgfV+veaVzN5r7/nM2ms/s/aznmctIaVEoVAoFOZgCbcAhUKhOJtQRlehUChMRBldhUKhMBFldBUKhcJElNFVKBQKE1FGV6FQKExEGV2FQqEwkYgwukKwQQjqhKBKf+0Jtya/CJGOEB8hxEmEOIYQCxEiKtyyfCEESULwrhBUC8F+IfhluDX5QghihOANXWOlEHwtBFeHW5cvXNqo42UTgpfDras5hGCgfn+tDLcWfwjBDCHYLgT1QrA83Hqao61tIORGVwiCNUAzpCRefw0KqSh/BGcsFwHHgV7AEGAMcHcoZfkiyHp9BTgN9AAmA4uF4GchFeaDILRGAQfR6jIR+D9gtRCkh1iaG8HUqUsbjUer11pgTcjFedCG+wq0dvBVqLS0RJBajwCzgaUhltMs4WgDARtdISgRgseEYLcQnBSCZUIQKwS5QnBICB4RgmPAMr38tUKwUwgqhGCLEJzf2i8XNEKUIMRjCLFb75EuQ4hYhMhFiEMI8QhCOLUixLUIsRMhKhBiC0I0p7U/sBop65DyGPB3CN6QGVWvQtAJuBH4PympkpJNwN+AX0WaVimplpJZUlIiJXYp+QAoBi6MJJ0+mIj2A7wxGJ1maBWCm4EK4J/BajRDq5T8RUreA8rbqtNorR60vg1IKQN6gSwBuQtkX5BJIDeDnA0yF2QjyOdBxoDsCHIYyOMgR4C0grxNPz5GP9cikItczr0BZCnIMv28uYHq8vmCEgm7JPSVkCRhs4TZEnIlNEp4XkKMhI4Shkk4LmGEBKuE2/TjY/RzLZKwyOXcd0lYISFOQm/9c24IVqtR9QpyKMhaj896EOT7kabVx+f0AFkHMivCdX4KclZb2qrB91VnkIX6uWeBXBmpWl0+YzbI5W26/yO8DbT2S9zl8v4akPv0L3EaZKzLvsUgn/E4fg/IMX7OPQJkgl4Jt4GsBDkg6ErXjOZdLu+vkbBPN7qnJcS67Fss4RmP4/dI8KlVQraEf+vGW0pYLkG0sXGEvF5BjgZ5zGPbb0BuiDStHmWiQf4D5GsRrjMNpA1k/6DbqcFaQf4B5CP6/6EyukbXayiNbkS2gdb6dA+6/L8fSNX/L5WSOpd9/YCZele9QggqgL4u5d2Qkq1SUikl9VLyJrAZuKaV2gLWipReWnXXgvbyp1UIC7AO+AvQCegGdAWeN0prG+q1Cujssa0zUBmBWgEQAgvwFpofekak6tSZAmySkuI26jREqxAMAf4bWBACfYZqNZCIbAOtdSL3dfk/Dc35DSA9yh0E5kjJnFae34EERJDHOmiVVqQMRGuSft6FSFkP1CPEMrQBgIfN0hpgvRYCUUIwUEp+0LddAPynDTrBoDYgBAJ4A21g4hopaYhEnS5MAZ4LUpsnRmjNBdKBA0K7k+IBqxCcKyXDIkyrUURmG2hld/07kH3QfCQbQf5e764f8iibA/Kg7jYQIDuBHA8ywcd5u4AcBzIWZBTIySCrQQ4K+vFCcy98J6GP1Hy6GyX8XncvHPIomyPhoO7TFRI6SRgvwUurXr5IwqMSoiR0kfCuhFVtfAwKeb3q5f8E8m293MUgT4H8WYRqfRXklyDjQ/RoaYhO/ZhRehv1WybcWkHGgezp8poHci3I7pGmVS8fpduAZ0G+5bAHkai1rW2gtV/iMZC7QVaAfFO/sF5fQi9/Fciv9LJHQa5xCNRvsFf1/7vr5Sr1sl+CvLJNjVkzuo9J2C2hQsKbUhv48ja6WvmrJHyllz0qYY3T6MKrEl51KTtEwgYJJyWU6WVT2tg4Ql6v+vskkO/pjeMAyF+2pV4NbAP9QEq0wbMql9fkSNLpUv41kG+1qY2apNXluFmExqdrVFudpbcB19esSNTa1jYgtBO0jBCUANOk5B8BHRBOhCgBpiFlxGttT/XaXrS2F52gtBpFJGuNiIw0hUKhOFtQRlehUChMJGD3gkKhUCjajurpKhQKhYkoo6tQKBQm0mxyxJWWmyLK9/CJfY3fhAmlNXj8aW0vOkFpbQtngtb2ohNUT1ehUChMRRldhUKhMJGIXPFAceZy4tcjKbvY/7QKA5c2IjbvNFGRQmEuYTG6tlxtvo3DubEAWOoh7Q87sdfUhEMOAFF9+1B0R1qL5SJBK4AlLo4D9w3BHqO9772hDuuGHWHV1BIVU0YyOG8Xy9L8z/d80abpJG02UdQZRCTeVwpvlHtBoVAoTMTwnq6IiaHipqHYopsG8366qhqAPaMXAVBwuoaZSyeAyb/IlguyASgb1oWqfoKCvEUtHhMJWgEa4wTrpr9An6h4AAZlT6FfwxCAiHo8d73+l87Yyou9Irs33p45PEbr4TracbjaqivW7t0pvfYcr+1dC2uByGqrZmGo0bUkJFCTm83bc+bRPzreyI8KGJEzGFunaAAKb9H+Fl+/OJySWsQ6MIOC6QmAp9amOt0zegUXxNwCQOqpLOy7vjdTok+Cuf4ixIE/1u7daTi3T6uPi959CFtpaWjFnIU0nNuHr+Z431/9P54GQOZZ6EoyzOiKmBhqcrP5/LV8XI2DJ/WygT0NKWA3OMxOCKzdunHZ8i94KGmfsZ8VIqzJSRAVxZ4nEym+PL/F8t8MfxuAq14aj/WmJGzlJ4yW6JdArn+VvY5SW6OhP8hl489h2+9b/6M6dM7d9FrrEmpZX4+t4lQIlYUWS0ICto4RFaoakViTk7BXVSPr67HExQEgEtrW/mSN1mu3Vwa2KIthRrfipqG8PWcezRlcgJdOZvHZZRnYysqMkgKAtVs38rZ8wdVxJ4FoQz8rVPT+qJ4HenxATytAXMDHrc18h/s/uoIDIwyT1iKBXP+JhTdSu6C3bpgji/cfeYHKh5qGPG7eeQc9J0Su0d3z3Ll8ef08/V2nsGqJZHp/VM93Lw8lceWXHLhPc8etvXNeC0c1z/gP7wdg4D1bAypvmNG1RwmvHkx2/t30+azWbZu1ugFZussoGU1YBIOijxMjAjde4aZfx3KyO7Reb7wlljmp63l861gOXxMTlh6vr+sP0P9veZzzthYyFlVeS4fktq7KYwwOX7mDNUOXsPrbphXhN/06B7ndhHYbIDLWRoq1ydjmn0pl7bSxWMojR2M4sSYn0fujeuakrmfD73ZR+HAvhse9DhDUPeaKjLW1qnzIjW7FlJEAjP6tt9WP3y+xfP612zb1QOSNtUsih5enMilxCYH0WjLevZMBWUf4JPt957YUayce6PEJM6MmGKjUP9JHEmTGu3eSlf8T9m8KAKgbl0PvJ/e2eFxb6P6vo1z0+HQa4wTvP/KClzENlMzoTjzRrclPfutLPTn8VA4d1m0PlVQnjuuf+oQMyDdf9NxIZl+y2m3b8YbOiM07z/r76/S4HOTMMmKjGljQ+yPiLZ2YFH8K4sP31BJSo3vi11ocJuBzlLrLrYeoPTmcju9tC+XHtpm5JwawdO04bLGSTZPnufUYzMY6MIM9Tyay/cJX6OpDh0OrK4PePcn39/aAbPeyPa2w58XeDHoqHtsPRUbK9qLbjgqy8+922zbo3ZNOgwtQlRrNyvQNhupoLCohqagES1wc4zo/7Ixrbo4/3Po6Y+Oa74GvTN/ARanZJIVIpyvydAONW7tiOXUAO94JJR2ORZP++BfO972HHWVyQrkBSto/VanRfDX4Pf1dbFi1OAip0S27uKHZwPdPst/n0uk3UMtwgIgxvh8eOY+MNw7wwz19my23rb6Bmz+6n0E1uw3T0pCayL7Ll+Hpw32q9FwA1r6VS9q8LW777EC3zSOZ+rPRAM5r0NUax77Ll9G/Ko+sxTFuBs9o7N8UkPaNxzb9ryOI3xE6aIqemhr6PLul5YLAb6OncbpnA7fnbOHJ7v6v9U9XVZNYPCzkSSkOrY34TihZXxPN/VW/Ie0POzn26yH8MnWd2/GrKpN56/3LSOcLwonlgmxnhFAks6oymSc23eBzX+7P9gA0a9dai0qOUCgUChMxPQ34X+e9y5W/vQ6A07XG+MR8Ul/PzTvvYM3QJWRGd2LuiQF8eOQ8APYfTib2jhgKpyyiOR/q+srzGHjPVmePzUyWbx8FQOY83721pKVfsLtR86fPvK/azb1TfH0+F22dTtI3Pg81HUcQvyM5xsGtJbnEHwn/wJrj0X3tg7l8NjbTub1PfIWbO2TP6BVk776btA0Yxolz4b+7uve2x8Y1sPbOecxcOoEb8z71CoF8ff9oN/dDuCgb1sVnDPzcEwOIL+gQBkXurKpMBmDWu5PIfNS7vuTFQ9hyb3/tjZ+ebjDfxVCje9xWTf5JbcQ3r+u/nb5Sx4BP7swJsM7v4SHFVnGKnhNOMfmDqfw87Vveyb+c1L8eACD2jpgWs9EKG6r564HzSaLQDLlurK+JpsOxlh/TuqzQGs5Gy0j4vfsjb1U/QUrfPjQePGSIxkCxDsygrlejz30/PtafDp+b9CMcAKnztoAeTWTt3p3dN2bD7zaYquGclSd5IWssk/UYbAexwk718HRSopuiE7bVaz9Y+w90I5MSM2X6xF+iy9I147xcZGZqWF2VSGFdL956/zIAMnz8QFkGZ3HkoXr2eNS753n++Oo4Uhe27ruE1OiKOisFp5tSDuf/OJYDIzS/3f6tY1nQ+5/EW8LrzE66tpCNxNIz4Tt+eHwwgN7DbZ68PZNJutZYgytiYjjd2d24Hmqs4v43HiY9QH+kPwryFpEt7ybtqbYbXRETg6VLYssFGxvdwtWsXRJ9JnrYpJ19jbWIxsgba3cE0B+dNJCvH2+5nYQSa3IS9pcq+SbrQ699/aPjveKbb9mcB0D24wegh5ZwZCsrg7N8HcR62UBRQ9MT1Pyn7yJx5Zd+fd7N1buD4oYq5j99FykrW39fhtToDnp0NzOfdglRamwENKN7+JoYJq65kb8380XMJBKDyX0lFFz3/MOkLdsZFpeGPypuGsqy2fNbLDf/xyvdEjQOL09l+4Wv4DlIuK+xlv+9eiqWwl0RF+LkCKBfN/0FWkr0CTW9P6pnQe+PCHTUffuYVwA4pkdr7mlIIX/UyLM+nfmlk1l8dsUA5/suFV83284CqffbZzxAl/XNn8cfITW69spK8JMKZys/QX1jeig/LmgKl+Tw5ysWthga5hrI37G81nDD5yuhILpatnpqvm4f7mVY7HR2/M6YOSXsUSKggPI5qevJd0komJS4xCsMzhnEX7gL2ejb7RAuSuaM5OVfaAH0vuJ7s/PvJmPpAYxS3a9jeaueDLta4/S/ji3HwRLiwOd2RrcP9/LPXSORP7acJOKaQBFv8W0bihuquH3GA8RtKMBeXx+UJtMG0gqX5LBswFKzPq55HZctZXhM8z5Sz0B+M3qavhIDgkkWsJWW0vMdGGq9u00JAa4ceXAUyWOPADA6ObB0xxSre0KBryeKSA3iL3puJLNuWN1svG78fhl2H7kvZh4dxr8WjsDaIOlS8XXLBxhEqBNdgsFWWgoB9PQtg7Owv1TpTKDwxK1O138dtMEFE43uHcM3kdsx/A/JgeoYkHVESzggB4D4gg7awEo7wVZaSq+1wm3+ANpwE1Rln+Y7Z5C5O/3/lkfsMa0pdR95FNCiVFpi7okBvJN/OSlERr1aEhLY89y5yFgbsy9Z3WzCQf+/5ZG1o8LQH+N38i9nVbfL3bbZs6vYM3qF27ZBG6dgKWj6Ye1cJEnSB1Uj7ccMaFM7NAJ58RCOPFSv+3B9P1n8uzyNpGWhqVPDja5jhQNHnjM0BfqXr08l1aRRVl86muOT7PfdMrzmjhjA0vimTLCMNw6EvJfTtbCWC7bd4pwtDLQA/Kp+o+i+U3uIDTShJKpvH364py/JVuNuu5O2GnI+v4fsF8ucGW8HZmmhbZxn2McagqO+vrzef0bitvoGfvHpdAC372wUKT5GxU9MHQmj3bd1/nsnkpZFxg9Xe+RkZke+Gb7c5z4jbJVKjlAoFAoTMbSna+3enaOTBrqtcAAtB/qbpaM1PJS0j4dcYnmv/GwqlhD3dMXmnaREDQWX0MA9o1fAaJdJn30/4bsRlZFO0ZRU72SPEHZ6CxuquenraQyaUYSt4hSnx+VQlRqNPbsq4HM8lLSPfveuYm71L53bUj4/SmNRSeiEBoDf+vJgfeV5ZN6hxRG3bl4p44iUhJJWEQE+D3nxEE5mdgSgPMe3k2juiQGsfSsXIKSuxZAb3ai+2iz9pzO6U5odq8c2Nhm6QAP9Q0nDuX28dEQq1uoG5p4YwL1dvydGNNVT52Qt9M4+Zqhb+agde7FXVmIdmEFDqhY7W3RZx4CWHmoLG2oGkrIgloYLMgDo/eTeZievyT+VSm7cD2RGuxu1SfGnmOSyssDw/286XU0wuq2tr3Amx0DTfVXVz90hGmkJJa7EnrKTfyqVvMQj4ZbixQ+/jqL4av/RPcEmPgRCUEa3ueB4x6QxhVO8v1CoAv3DidGB/HL7Lj67YgDXbv2O7A5NRtfp5/VIkBlzZx6dtpWw50nHRDm+OdRYheV06HTmJR4h723/nwfuQelrp43llYfG8Kchbzj3J1jsbk8exQ1VWExKkCh4NIniq5f43X+osYpKu+sk5tPoOcG8CYM8Kfq1tlK10T+moSTuL1tZUz6uxXZiNtYuiVhi/D+rtCXxIRCCMrrNBcc3Ddx4P6ZFYqB/a4m0QP7lC+dTJy0tri4Rjrp3DUq3lO+i9+2dmBnTlDxzdOI5bllebQk4DzXXPf8wvdY2zfXbu/5IxLgUFG3DX5KOA6PbYVBGN9DgeGgKJu7wUwO9dv+ALYwrk7aFW0ty+fGx/ohGaXggv738BPdNvpuJS9a3+GgW6PpiwSRZuJL93An6N2hppsXXt7y8zq0luZTd28ctKN1zjbFeqyVX7prqfB+3I/iA80A58UEmP0/7lmcSFtPcsk3R1RLbj8cN1dIWnEH6OwradSfGbE58kMma87yTdCA0iQ+BEJTRDSTo2TOYWNbXh62nEL37EEPnBJ4okJ1/N/H73X/n4o80OH1nRvfEZGMjYvNO3nr8Ol5P1B5xm5atX9HcoW4caqziuucfJqpGkvKvo23KnLL9UETWYm0G8Iu2Tm+xfPyRBjpsb97XaCstxfJ5U+C6kcbDsRrDmvOW6H5l/wY3O/9uMv51xLBMs1BQJy102laCLcDFEM92vK+/O6urEpn/9F1tTnwIhKCMrq9VATyJpABtW2kpPZdVB7xygBExuMEQ95etzgegxGJt4u/sgubr3RVLPZpLoaYmJAbEkZ0XKVNEtoqYGP405A2fNxxoA7z3rfwNEDnX35Xen9cBkC2062+ph7TKneGUFDwmJ0cEshrLH18dR8rKLabYquDcCz5WBYh0WrNyQCT2cByrE7R27lb16OkfR+D78u2jtCVwZmnto71c//ZwbaOPnHKGOzrou9PcGg5kNRYjohT8oZIjFAqFwkRMXzlCoQgL+sohXeNqnZvK16cC5iXpnI3Yfigi8w5zF0UNFLOTtBwoo6s4K3CsHOKKWfN+KMKLtbqB2WVZXtvNTtJyoIyuQqE4o5Hbd7HxfO/Zw8K1WrKQZ/lSHgqFQmEmaiBNoVAoTEQZXYVCoTARZXQVCoXCRJTRVSgUChNRRlehUChMJCKMrhCkC8FHQnBSCI4JwUIhIjecTQhuFoICIagWgn1CeK5aFTm0B63t6foLQZXHyyYEL4dbly+EIFsIPhWCU0KwVwhuCLcmfwjBBiGoc6nXPeHW5ItQtNWQG90gb5ZFwHGgFzAEGAMEPrNLkASjVQiuBJ4HpgIJwKWA4Sk37UXrmX79pSTe8QJ6ALXAmpCL86C1WvXyfwU+AJKAPGClEGQaIM/XZwfDDJf6HRRSUT4IV1sN2OgKQYkQPCYEu3Urv0wIYoUgVwgOCcEjQnAMWKaXv1YIdgpBhRBsEYLzmzl9f2C1lNRJyTHg78DPWvNFTNT6FPC0lHwpJXYpOSwlh890rer6+2Qi2g24MQK1ZgGpwAIpsUnJp8Bm4FcRqDWkRHxblVIG9AJZAnIXyL4gk0BuBjkbZC7IRpDPg4wB2RHkMJDHQY4AaQV5m358jH6uRSAXuZz7LpArQMaB7K1/zg2BajNLq77/NMhHQe4FeQjkQpAdz3St6vr7/JxPQc4KVqfB1/88kFUghctnfQLy3UjTqr/fALIUZJl+3twI1dnmttraL3KXy/trQO7Tv8hpkLEu+xaDfMbj+D0gx/g5dzbIf+sVIkEud20sQVZ6yLWCTNX1bQfZC2Q3/YLOOdO1quvv9RlpIG0g+wer0+DrHw2yCOTD+v9j9fOtizSt+r4RIBPQjOFtICtBDohAnW1uq6316R50+X8/2uMLQKmU1Lns6wfM1LvrFUJQAfR1Ke9ECCzAOuAvaAurdQO6ovki20LItaL57wBelpKjUlIGzAeuOUu0nu3X35UpwCYpKW6jTkO0SkkDMAEYDxwDZgKrgbbOzm5IvUrJVimplJJ6KXkTzRVyRrbV1hrdvi7/pwGOBbw8J3A4CMyRki4urzgpPdeyBTQnf19goV7h5Wi+lrYaspBrlZKTaI021BNWtBetZ/X192AK8GYbNRqqVUq+lZIxUpIsJeOADGBbJGr1gaRta0xEblttZZf9O5B90PwkG0H+Xu+yH/IomwPyoP7IIEB2AjkeZIKfcxeh+R6jQHYB+S7IVW18DDJK69MgvwKZArKrfu5nznSt6vq7HTMKZHVzZSJBK8jzQcai+R8fBFmM7quMJK36NR+na40COVmv30GRpDNUbbW1X+QxkLtBVoB8U7+YXl9EL3+VfsNXgDwKco3ji4B8FeSrLmWHoDnST6I50teATGlj4zBKazSac70C5DGQL+HiIzpTtarr71b+NZBvBavPxHqdq9dpFciPQZ4TiVpBdtfLVeplvwR5ZaTpDFVbDXhqRyEoAaZJyT8COiCMKK2hp73oBKXVKNqL1kjXGREZaQqFQnG2oIyuQqFQmIhaOUKhUChMRPV0FQqFwkSU0VUoFAoTaXaWnSstN0WU7+ET+xq/wdJKa/D409pedILS2hbOBK3tRSeoJdhbpGLKSAA63nqU/Qe6kTlte5gVuVO4JId+aWVu2w7v6EXGo+FZXlqhUDSPMrrNcOLXIxmctwuAZWkbmd0zi43EhlmVhiUhgT3Pncufr1jI8Jhot325TAiTKoVC0RKmGd3jM0ZR1817e2wZpCzcYpaMgHBo/a+rvmNZmjZV6qrKZN56/zLSiYwepIjryIfjF5DdIc657anScwEoX59KKiVhUqZQtG8c93/vDU3z4hzO9e5spb97Evs3Ba0+vxpIUygUChMxvKcrYmKouGkoj927iknxp7z2zy7LYuPCyHhk96V1VWUyALPenUTG45HRy7V2787RieeQYLG7bV++fRQAmfPC9+RguSAbgLJhXfyW6VqozTopNu80RZMiMqj5nxHUJQbezzO7nXje/4Oypzj37Rm9yKt8Rvc7GZSf3ererqFG15KQQE1uNm/PmUf/6HgjP6rN+NK6viaap9dOAogog3vsxnP4+vFFQFOdrq+JpsOxaP8HmoB1YAYF0xMAKL5+sd9yF2y7BYDUU1nYd31virb2imVwFo3JHQMqG31E6yjYfjB8yb5mierbB4DTGd3dtv9qzvvkJR7xdYhP+n88DYDMzaHT1hyWLoksmz3f6bLbM3pFs+WLbniN/tY8sl/MAAKvd0ONbuOwc/j8tXxcjcNxWzXltqZoimP1iVh7JDrf2ytOIevrjZTlE19a73vrN6Q/FRn+Zkuc1hCOThqoG9wmDjVWcf8bD5P+bHi1FjyaRPHV+c739bKBooYGADKio4kR2o/CN8O1qUpz508gZqz5OgPFkpCAiPMweHaJrawMTMjktCYnYX+pkk+y/hRQ+QGfTgVg0IxEbBXeT5VmYElI4Id7tKlsC6f4/+FtieO2akSdNVSyWkRERUFyF6zC93WtstdRamv06jwWX59P/2j9x+GOCDC6vrhk1YOcs2Cf833Nhf1YvPUPzvdTn3iAxJVfmi0r4jlw3xAA1k1/AdcfBoDrnn+YtGU7sfs4Lpy8dDKLz64YAMBl/9zHQ0n7Wjgistjz3Ll8OH6B+7aGFPJHjcRWWmr45/f+qJ4FvT+CACNmto95BYDc5XfQc0J4jO6e587ly+vn6e86BX2eS1Y9yKA5WuSQGe3aPmIwL/3xFQZE+X6qmFh4I7ULeusds7ZhutG11gpsPx53vo+u7u02Am+Pastk8cFR8z8j+NWc9922ZeffTcbSAzSarsadwiU53DF8E8PjXgegT5S7wR329HR6vfMDtpqacMhzUrgkhz9fvhjQerO3luRSdm8f5I/ajfPP20fyzUt9WZm+IXwigRMfZPLztG8BeOv9ywBI93AdOco8k7DQrW1qHAeLsW3UmpxE74/qmZO6nnhL4Iarq1XTumboEiZ/MJWkawuNkugXGWsjxdo6Y5t/KpU1d41z2zaw6CCNlZWhlOYXx/2fGe2t2+kKe0LSIbnBa3//v+WR/aIWJ28L8PMMM7qnx+XQ+8m9btsy3r2TQe+d5IRLwkGfePclpqT5Npe6RIuXryl+v6TxYFuXk2o7/dLKeKKbt9/zUGMV1z3/sGZwTeh1NUfhkhyWXbbULV74UFUXYrbvcr6X23dxqCo9DOrc+Xnat8767D9Rq7fXLxrtVmbVoGX6DWi+j9wyOAv7S5Us6P2RX4O7uiqRuc/+EoCHHvuj1wB1ZnQnfp72rekx5UXPjWT2Jau9tjvaalSN70f32FN24j7f6rbNjM6OI/Hpkvu2+vQ1D9o4BXuxdg1q5x+lT/wxt/0Z795JVv5PrfahG2Z0q1KjvcVCczkAACAASURBVHo1A7KO8P29Pcj9WVPCgSv9/5ZH1o4K0x6TaycMB6DLre7G1Wwd/iiZM5Lf9fNuxOtrorn/Dc2lEK4ernVgBgWPJgHw58sXeyVotAcmJ5Rrfwe/57HHt7HT6v1B0ipDO5ruaIcHr7PTObmab7I+xNWlcOl3N1D6RS/n+9gySFmm+e+f7TSZJz1iygHGJnzH0ldmMOjR3dhN6jH2HnbUWacOXNuqPcxPY664Jj692GuHzzKj+hVry1bibasAkrdbgorTNdW98En2+5Dtf/85bzcE9SWCpXSI9vULsjXXwklbDTmf30P2i2Vev16WC7IpuaErlnpI+4OxDcgSF8eB+4bw8i9eZ2yc+yPNqspknl47ifRnt4T1R6EhNZHiq5fo7yLf4Drq1OGmCYZtNQPoY0C9O9ph8dXeYUlXFlwHi7uT9p7vQdKUhVucPTZXhsdE8+H4Bcx8egKYZHR9YVSdtZWyixt8GlJXmtt/wbZb6FFY63d/c6jkCIVCoTCRiJh7ocpex8TCG4kqrzXtF1FePAR7dpXzfWFDNTd9PY1BM4qc4Tby4iGczNRGM8tz7BTdsEjzT/30ML1WG+dLFQnxrL1zno9BHHh9/2ivgR8H/hITun+w1zS/ryOZ5PCOXmREUCqyZ53OPTGAD4+c57f8rAF/I7djU2vcUGvhjW2XkIkBEx75iT67tSSXxhd60HFd86umd7z1aIu9NkUTp8flcE76j0Ed67BVqU9I7LuCczMZZnT9hLu5UdhQzepTF7K/NhnLTbXYyg8bJccNy+AsjjxUz57hTUvbrz51IT0nFDhHIB1lvhm+3O3Y7tYYfjl9Hf/cOhIMMGSWhASqh6cTK9x/frbVa26G/Qe6kelhzBwB9IW3aI/5nokJw2Kmk1ygBayHKoDeWt3A3BMDuLfr9874W4AXCrTA2/SP6xA5g5H6YJrIGUyf+PAPTDpYumYcaX5isEXOYJa8dCm5LmMSs/Zdb/oMcz8+1p8On7f8mfsPdGN2zyyGx2kheZ4uqXCSEv0T9jGa+yNqx17T/Mv+EDmD6f3k3qCiaI7bqnn8yNg226qQG11HEH9DJ99hCK7JETfvnEbPCQVAtf4yh9r5dXzjNXjShCMoXRvQcKeooYHPrhjgDIUKNf4SSm7+6AEAMu/ZioiKwpKc5NzfUgD9jt81GWFnlk+Agdz+kNt38dllGQzc8iNXx530Snzgba036YzTXf6FM073pE3zh5+s6UjPNqkIjJYC3z25ZOl2nxEjkUrmtO1sTujOise1ELi2JCWEmrzEI+S9vQyAMXfm0WlbCbKyyvxBNSGwduvm1g5bS/7JCzkwou22KuRGt7kgfnBPjuhdfyTg2DYzaW1QutF4BorbRwxmwaqmQZe+URbCodVWVkb+qJH88FmRz4Z8b9fvuXbrd4CWkeYYcMv5/B4AzZVjgs6WAt/PBEKVlGAkyxfOp05amPjag/QxOXvS2q0beVu+4Oq4k4R74DdkRrelIH7QEg4GvnGARpfkiEjAEcgPmmHr17GceIv5RsxfksbANw5w5DbN/3hj3qekRK/36e91cGtJLrtXZLv1cA1BSmylpX4TH2JENNkdvBu4vV5L7zQrVVVGCbfA90hJfGmJm15dx/GGzryTf3mz058WLsnhz1csbHVSQqjp+EAsF8y+pelpxwNHCu2CO17nt/HaE5e/8YlQInIGc9nyL9yeyMJJyIyuvyB+B9n5d5Ox4kjYEw4Kl+SwbMBSQA/HARpf6EFsXRV169MBmJS4Cl+9hdVVicx/+i66VHxtiDZfSRrXXPcl/x6Vxi9T1wH4fTQatHEKnf+uaY4/0kDPHXu5qH66doyPAPpQIrfv4vBTOVyU2hQPqA08vmbYZ7aG6N2HGDrnbt5/5AX6RMW3KvHF0UbEi93AoIHB9PdOApCRcqdbnWlt4QgxdzXw4fX+B/2WDVhqepz06XE5lNyEm5/bvut7eswdwkWZWrvz1wbGxjXwu4la/Pks6yTDVzmxdYrW7xvfdTTz6DD+tXCE27ZLZ2z1G7/bVkJidP0F8bvS57NaGotKQvFxbeKO4Zuco9J7S3oAkLluO41jhrJhsMMv6m1w554YwB9fHUfKyi3+BpsN4cVeO8DHxT9uq+aSVQ9irdP843031GHd4NJ4B2ZQdok2oJIeXYbRj1Qd1m0nyeV9tx3Z9LfmUXy9d6767Tlar23tg7mkmjANpa20lJ7LqhnX+WHsMZC+42TAUTKubcQoHLHpg/KzyS69G1usZNPkec6e60NJkTdvRVVqNH++4iV+8YZmYLOfO4HthyLE5p0k6bOCddvh+/tAU2IKN6zmadskU3q8nkw9oGUi7l40mKQVTZ9fMmckw+L3G/a5bTK6zQXxnyk4VmNY+1YuqRG0wkW5TXDOgn3YfjxOxZSR2sz2uaOc++t6NrrM+BX+RypXnuy+G4C42+v5Y904U1YOsdfUOP2IkRao78D+TQFp32gRLP+V8AB/usZ7KaZIYnhMtDNBpn9DHlmLY9ySmzy/j4y1MfuSd92y1iYnlFN83WdsfNxcd97UA6PZlT8YgKQVXxDVtw9Fd6QB+E1KCtXKMSo5QqFQKEwk6J6utXt3jk4ayLrpLzgHzeaeGODc73gcCkfiQ6A4AqRPTB1JVT/vELe5Jwaw9q1cAFMeg+OPNHBrSW5AMYQJFjtHJ55DVM2AgP1Pc08MIL6gQwiUtkzZsC5u8cK3luQyrce/3BIOHkraR33epxGzckikYK+sZOA9W/nFkuleKz03x/hULVLEDFeEZ1stvj6fDNudJG/3Tkm2NkgyH/ia6vFD2DGkn9f8DEYSlZFO0WXeUStffnwe6f/WfOll+v1fkOedhu2guaSkVmsK9sCGc/t4rV6waNMVzv8ful678Acb7VhuacD2Y+TFPX6iz7nAHPft+adSnaPGZroUOqzbTln5YGYvzQIgr+u//Y5I94mK95rMvCWWrhlHmklL+XiGxP74WH9mPd6FDc3ERyvcaW0yxtIntekRH7qzde0iGBxtde7ypgSZohtegxu8yxY3VHH7qQdavXJEKCi9tJdPY1rXq5HvA1jlxAhCGqfra9Ak0thfm0yVvc5nSJhN2tnXWMvaaWMRm3eSgvk+XLl9FxvP17Tt3zqWB3p8AkCyVbNigYYFua7a4MByOoRCA8RRpw2dooiNOjP9/mcr/hJkPOkfHR+Syb+DwdIoKW6o8rniQyAYkcgTEXMvmMnha2KYuOZG/u4j22xfYy3/e/VULIW7TI1Q8Mfha2KYGTUBgL33a66bQLONXFdtcJBWaf7qEo46HfzmtzzX63MiJeFEERpaSpAJN13WfM3tpx4I2ugbkchjqNHNP5XK2mljsZQbkzIbDLbyE1juzeLK5Kle+0Sj1AxuY2SEzdvKTzj/H/iK5ou98mNv3b6wVjcYlqrcGvpFdeCy1f/mtsRvvSbl9kxKUQRPyZyRvPyL4KetDBqXBJn1nS7l+P11fpMjfGF0G5D19XT4qfVPWNn5d9Pns1oGOeYqCWEiT9BGN3r3IS563H/w/cyjw9g25yLiNm+NiF6jK/Zd3/sN24g0rQ4cwfyWAIP6w/09HCuAxAhHYHqTwXVNSumw3dxJZJrD2iWRw8tTmZS4hEhNpfXH6Z4NYQ3blNt3YQF6NDYlRwRC/JEGw9uAZ3JMSzgTuYpKDElTD9ro2kpLSVqmzbLlmL3elc5Fki5/iYxlyxWRwaCNU7AUxNN9p/Yk0dKUhWZiHZjBnicT2X7hK3QNczpte8Y1OSJS8EyOaYmMNw4YmjkbEveCGcHtivZF18Ja54xmDgYubURsjsy20pCayL7LlwFNc1pMPTCabpsjNzmhObbVN3DzR/czqGZ3uKVEBK7JMS1htHNRJUcoFAqFiZx10QsKcxCbd5IZYY+ZrWHm0WFeOfmRTHxBB3L7TnC+33+gG5n3bI24hCSFMroKBaBFe8wuy3K+3zbnonY1JpE6bwvMa3rvubqIInJQRlehwD0pBSCOrWFUoziTEVKGO7hIoVAozh7UQJpCoVCYiDK6CoVCYSLK6CoUCoWJKKOrUCgUJqKMrkKhUJiIMroKhUJhIhFhdIWgyuNlE4KXw63LEyGIEYI3hGC/EFQKwddCcHW4dflDCJKE4F0hqNY1/zLcmlpCCAYKQZ0QrAy3Fl8IwQZdn6Ot7gm3Jn8IQbYQfCoEp4RgrxC+1nWIEITIRohPEeIUQuxFiIjVKgQrheCoEPwkBIVCMK3lo5oIudEVovUJF1IS73gBPYBaYE2otXkShNYo4CAwBkgE/g9YLQTpIZbmRTD1CrwCnEar08nAYiH4WUiF+SBIrQ5eAb4KlZbmaIPOGS5tdlBIRfmhtVr18n8FPgCSgDxgpRBkGiDP88NbV69aeS+tCGG41iDbwLNAupR0Bq4HZgvBhYEeHLDRFYISIXhMCHYLwUkhWCYEsUKQKwSHhOARITgGLNPLXysEO4WgQgi2CMH5AX7UROA4sDFQbWZplZJqKZklJSVSYpeSD4BiCLzCzdIqBJ2AG4H/k5IqKdkE/A34VaRpdTn/zUAF8M9gNZqhM5QYqDULSAUWSIlNSj4FNtOG648QJQjxGELsRoiTCLEMIWIRIhchDiHEIwjh1IoQ1yLEToSoQIgtCNGiVqS0IWWbtRrZBqTkP1JS73irvwb4K+/jBDKgF8gSkLtA9gWZBHIzyNkgc0E2gnweZAzIjiCHgTwOcgRIK8jb9ONj9HMtArnIz+d8CnJWoLrCrLUHyDqQWZGmFeRQkLUen/UgyPcjTav+vjPIQv3cs0CujFCdG0CWgizTz5sbiW0V5Hkgq0AKl8/6BOS7QeuFEgm7JPSVkCRhs4TZEnIlNEp4XkKMhI4Shkk4LmGEBKuE2/TjY/RzLZKwSP//PAlVUs+Q1bd9IiForUbbAH1bDUgJcgfI+IC1tfJL3OXy/hqQ+/QvcRpkrMu+xSCf8Th+D8gxLXxGGkgbyP4haMhGa40G+Q+Qr0WiVpCjQR7z2PYbkBsiTau+7w8gH9H/D4XRNUrnCJAJ+g17G8hKkAMiTavePotAPqz/P1Y/37qg26tmNO9yeX+NhH260T0tIdZl32IJz3gcv0eCd71CtIQiCQ/r/4/Vzxe0VpNsgBXkJSCfABkdqLbW+nQPuvy/H+2RAKBUSupc9vUDZupd9QohqAD6upT3xxRgk5QUt1KXqVqFwAK8heYvnRGhWquAzh7bOgOVkaZVCIYA/w0saKM2Q3UCSMlWKamUknopeRPtMfiaSNMqJQ3ABGA8cAyYCawG2rokgl+tSOmlVXctaC9/9Sql6VpDYa+k5rbZBPQBAl6jqLVO5L4u/6cBjkXspUe5g8AcKZnTyvNPAZ5r5TH+MESrEAjgDbTBqWv0xt1WjNBaCEQJwUAp+UHfdgHwnzYpNUZrLpAOHBDa2mrxgFUIzpWSYRGk0xcSEEEe68AQrVLyLdqgLwBCsAV4sw06oZVakTKwepXSTStCmK61DW0gCgN9ut+B7IPmI9kI8vd6d/2QR9kckAf1RzEBshPI8SATmjn/KJDVzZWJBK0gXwX5Ja3w4YRR659Avq2XuxjkKZA/izStIONA9nR5zQO5FmT3CNPZBeQ4kLEgo0BO1tvsoEirU738+brWODR/fjG6nzKol+Ze+E5CH92nu1HC73X3wiGPsjkSDuo+XSGhk4TxEnzf33C+hFgJcRIelFAsHf7fCKpXkCkgbwYZj+ZeGKe3gZ8HrK2VX+IxkLtBVoB8U7+YXl9CL38VyK/0skdBrnF8Cd1wvepR/jWQbwXdIEzQCrIfSIk2eFbl8pocaVr190kg39MbxQGQv4zEevVx3Cza7tM14vp318tV6mW/BHllpNYpyLkgT+pt9GOQ57Tp3tKM7mMSdkuokPCmbiS9ja5W/ioJX+llj0pY4zS68KqEV13KzpVwUmoDah9LaJNWg9vA53q5n9AM+29aoy3g+XSFoASYJiX/COiAMKK0GkN70dpedEL70ooQJcA0pIx4rZFcrxGRkaZQKBRnC8roKhQKhYmo5XoUCoXCRFRPV6FQKEyk2TjdKy03RVQ3+BP7Gr/xkEpr8PjT2l50gtLaFs4Ere1FJ6ierkKhUJiKMroKhUJhIm2Z97Rdc+LXIym72H8G78CljYjNO01UpFAozgbOSqNbMWUkg/N2sSzN/5S9g+Kn0Dd6GNYNO0xUplAoznSUe0GhUChMxLSebs3/jKAu0d3Gxx9poMO67WZJcNLx1qMsS9vIqspkAF7fPxqA2KgG1ma+Q7wllj2jVzCIKfRrGAIQNlfD6XE5VKVGB3WstUHSZc3XyPr6lgsrFAYhYmKouGkotuimAf2uhbXavrPQhWeY0bV2707DuX2c7381533yEo+4lcndNQHWGaXAP/sPdGN2zyzeev8yANIf/wIAS3IS9390BXNS15Ni7cSe0Su4IOYWAFJPZWHf9b2pOkXOYHo/uZeV6RuCOr64oYrbTz1A3IYC7JVtnUa3ZawDM2hITWx6X92A3L7Lf3m9jYhGiWXrLmRjo+EaASyDs2hM7thiuQ5FpTQebOuUrgpLl0SWzZ5Pdoc457YLtmn3VUrU0BbbSWvxbIctEVVea+q9HVKja0lIQMRpjfnoxHP4+vFFoTx9yMictp2NxJLOF27bbeUnODACHt86lgW9/0m8JZZvhr8NQO78CcSMNVfnJUu380S34BtD/+h4Pn8tnzF35tFx/TeG93gLHk2i+Oolzvezy7LYeH6s3/Jl489h2+8XU3C6hpkjJmD78bih+hzUzq9jw+A/tVguc8V0zllwutkysrIKe01NqKS1iIiJwdLF26CYraOtOO4r3m65nbQWz3bYEld9Px7rLSlgl9jKyrSJBA0kpEZ3z3Pn8uF4bfL/BIsdbS7q9sfha2KYuOZG/p71YbilhITlC+dzy+MPkrjyy3BLaVdsmjyP8pubn5984msP0ufZLSYpgoqbhrJs9vyw6ziTWJv5Dge32tnTkEL+qJHYSksN/byQGd3CJTn8+YqFbo8Q7RVb+Qks92Zxwexbmn6RTcSanETvj+rJ67oZ6NTm8/WPjueB373Ns10mk7Lw7LsxRc5gLlnaNHYwKXEVgdRrirUTKdbmy9g7tFFcgByfMQqAx+5d5fMeW3DH6/w2fhrQ5C6LFOzlJ7hv8t3IKEGPZ4uDdpcZRbwlluwOkBF9kh8+K6Lero2hvJN/uSH3S8iMbr+0MobHuA/4rK5KZO6zvwTgocf+yKT4U6H6OMOx7/qen8pzTP9cy+As7C9VsqD3R8RbAjO4Ge/eSfL2pkHK8hw7RTe85lZmUvwpnkwOqVQnhUty6JdWxux+77ZY9siDo0geq/n2RydvNUaQB7ZO0R5uGq1eryy4DoCKlX2o6icoyItMd9iRB0cx5XZt8MNxDx1qrOK65x/msftWMSn+FGPjGjjdMxQrR3njuL61K3vRZUXrDbps1GLeBXCoKj3k+jwZuLSRiza1vGTZT1dVA7Bn9AoAYkQ0DyXtc+5flXy5IfpCPpA29YAWCfDl388jtgxSlmm/FIX394J2ZHSPPDiK23M2ON//pt9Gnp4zyfBeRGNyRz7J+hMQuI8rebuFpGVNurrtyKa/NY/i6/Pdyv3X1d+x6+BIAJKWhu573DF8k5fvee6JAVpPAfeeQlX2ab4b/F7IPjtYLv3uBljcHYCk974gpW8fsrkbgD/c+jpj41o2YIM2TqHv53UtlmsrVdmn3YzB+ppo7n/jYdKW7eRZMZn9d61z2x9qHNc3O+Nuuhj2KaFDbN5J0uaWyyUWa8vvZRdo190WK9k0eR4pVu1H2aj7JWRGt3x9Kv0PTqPbZq23m7b0zHqMnZxQTvF1n7Hx8dA5/NvCSVsNOZ/fg73eykA9/MaB/ZsCsl/MYED8VLaPeYWuVu1xdFnaRvpfPAiApKXG6vvwyHkR78ooHaI3/yGjiC2DtFma3t9GTwuo16hlLRqbPFMxZSS5P2sa2V9VmczTayeR/uwW7IZ+8pmPI/EpbYP+vkcK5TcLp0vJqPtFJUcoFAqFiYSsp5s6z3evxhEYnRnbNCDlSEo4vKMXGZSESkLQRGWkc3xML7dtseWSTWUDoPtuADbUWnhj2yVkYn4yhy+O2WDQzMN+w6xsPxQxaGYKx7ZC1xYGg85G/nXeu3Be0/vVVYnMrdbGHzrvBfZG021HBfZvCsIjUMeRyOPg9f2jSX/8C+d99di9q0wbK7FnV3Fiqva4Ha7EJqOwdu/O0Ynn6FFXGnNPDCC+IPQjpYZnpPkKjH5i0w0AZD4avlFW1wDqoss6tjiIsuTHSxn0ah1GRvBZu3enNDsw90WssFM9PJ24DbX+Ex8aG5n/45XOZA8z2FavPZbvP9CNTI8fVMvgLDonV5uio7VMij/FpDmL3bb1/1se2S9mANqPWKRgSUigJjebt+fMo3+0eWGZe0avAG3IhltLcikrHxxQUoOIisI+YjAyStAnvthglYHhmUBRmh2r5xU01efSNeNI89OZbAtn5YQ31i6J7HkykX2XL/O5v7ihiu7WKOItTQZwZfoG5i4fwGeXZRgWQF02/hx2/M79xj/UWEUnYXH6ZR04Eh+uvGUqls+/9nk+R7JH/rcXtinJwi9CYO3WjRhLk0G6ZXMeANkP/oA9KgpLcpJzn/2lSr4JQ+yzaJQUNlQzIKojVhG4R634+nz6R2thWJl3hMfonqzpyElbjfP6x0Y1UJObzeev5WNWHPz+2mSq7HW+74crBgBaWJi/jEJLchILVi2KiHBSa3ISREU1e/8bzVlpdA8vT2X7ha8AvhvB7TMeoOP9h72SI+7t+j0Dt/xoSgC1g+uef5ifRtaGrYE0h7VbN/K2fMHVcScBbQB1+5hXAMhdfgc95sawYFXTE0TfKAuticoIFZatu/jfq6fy/3+8jMxoc3r8oaL37UfIWXiP8/qvzXyH0oWNmJl45C9Z6N6u33Pt1u8AuG/y3e1iHoXeH9XzQI8P6GkFf/e/0Zx1RvfEB5msOW8JXT0et/NPpbLmrnEARP3fMV4asBrPAPoYEc2g6ONgaT5LKZREV0sGPXWK/lVaD9IzDKw5Qp1k4YrIGcxlyzWDGyOa4rMdPbI1Q5ewYcnAFns3+adSWTttLJby0OXeeyIbG6G8Apt0v27O/P8F7j8EN726zjlPyJ8v1548frFkOpnTzPdh2ipOYa9vcsrHW2KJN3n425EsdGXyVPbeol3r4uvziRHRZHfQ3k9csp7jDZ19Hh9j2UdGdHCTNoUKx73QGlfbH371OttuHOC2bX9tMoevicFWfiJoLaYb3UEbpzBwqTkTm/ji52nfOns7rsHxsafsxH2uBeuPebnOq8y+71MpuuE1+kZZsL8djeXe0E+AI33Ycik0f2LW4hgALtrqHvTdffdebP5OGBXFAz0+MMSfa9n/I39cPI5bHvmGPlHeN1RmdCcyPSY48mTm0WFsm3MRcZu3Guor98WgjVPo95JmvcRmd/fMW49fx57Hv+LFXjucCT/90spMVthEc8H+ZiUd2Xd9jwXIqsgGtHbomlCi/Ug1d73Da3TtVdV89/JQxkefz6UztvLB3sF0/rv7fdEYJ3j/kRfoE6U9RYyNa2BsnPs9XmWvY+KaG9t0/4fc6NZOGA7Aweu0UUBLjE3vymuM6lfMhl9rsW/xI0f5jXoINZaEBPY8dy7PJCwEor2C41sqk9zVAjdoPY2/Z33IlclTTY23c4yiJ33jvt2fwbUOzGDPk4ludR9KbKWl9FxWzbjOD2OPgV9PDDxA35FAs3vRYLr8JTyDqaP6FbNr0GAAr0D6uL9sZVP8SKbe3anZie7Norlgf7OTjlzboWtCiSvdRx4F9AgRF/r/LY/YY1H811XfmV6vsr7eOffIJttI+h6ox7rBve1Z4uKc7dkTe3YVe0avcN7/F8y+hR5zhwTlUgmZ0a2YMpKfMoSzwovdKrzpEXNZ2kbQKzy37wSYFyoFzSPiOvLh+AXOx93SL3qR9l6TwY/q24cf7unLl9c3ZaS4lom9eAgXbAvPXAytxXJBNgXTEyi+PB/Xup96YLQzeSUU2GtqnJOsrKgax6Js7xm5LDE2twSNqQdGsytfN3ZBpJSGipYC37us+IINY3KcbXV86nf8cca4iE/4MJPGg4dIm+U99aWj49X/umlu27NfLMP2QxEb+jbVazjwl8rs2p49kRcP4YKYpvv/m+Fvc1Hm9IAy3zxRyREKhUJhIiHr6Xa89ShbA8ypD0tyRH09N++8gzVDl2j+WhcnYlRGOkVTUimcsgjHgNPMo8PoXNRUSGzeSUrUUNA7uocu60jGwXQai0zS3wrKhnWh+PrFXtu//Pg8w9Kz/bmJrD3cEzSM1NAaWhv4/lDSPurzPmXjwshIA/fEqED+YOj43jYAMj3Mgd+xh3aA5/3fFkJmdPcf6Ma2gQ3OgYfChmo21Az0Wi1ifU00T6+dBECGiVPQ2SpO0XPCKVbrMat1vRqxjxkKeCdH5J9KZduci7z8jdbqBuaeGMC9Xb+nIG8Rw0um0zWERlf4GE2q6idI6dvH7woGImcwtk7RXsd4sroqkdjykMgMGEtCAtXD04kVETBLgJ4o0q+jVgnv5F9OaitdBSnRPyEv/i9TV7kIFKMC+c0gkuvVFdf7P0ZE+7xfAyFkRjdz2nZufmWGcxLz2/5zOx1f7Urea+4hTve99RvSnwp/4yi+Ph+u971vzV3jnJEMrsjtu/jsigFcu/U7Z6iM0RTkLSIz1v8KBpct/6LFAazihirmP30XKSvNrffGYeeYGsTfHI5EkQN6nLDn7GeBkJd4hNGrFpm6ysXZQLup153f89mkC/n5x9+S2YYQuJBGLwx6dDczn54AgO3n3Vi+cC6RcMO1d5pbwUCLf2y+Adw+4wG6rP/a9LAsheJMwj5iMC/98RUGRLW84wycCQAAFqxJREFUvl5zhNTo2isrQZ8HwHp6gKl54YGy6dc53PpST5+z1xc3VHH7jAeI21Hgd9o811nwuzUXIxsE3T7cy7DY6V6pwIGsYOAL5/fZUIA9AlYEzs6/m4ylB4iEB8iSOSP51XWfNVvmmYTFhDu+1B9GJr4omnCs2HFj3qekRK8PSUajYckRvgL9IwG5fReHn8rhotRsr33WBkmX9V83a6BcZ8EP9cCArbSU5II+LRcMgNVVicx/+q4Wv4+ZxO+XEbO67umeDQHMR+G9Esr8p++iS4XvuS5MxcDEF4WG64odvlx4wdo4UzPSzJppvyU6rNtOkp994X4E71BUSnZ+61YwAG0lhNIvmqanjC2DlJVbwv59IpW+71u4tI82251nEL8/Cut6kbjyy8is0wjt5PgimLo3A0dylIzVulO352zwMrbHbdVcsupBrHWC9B0ng5pI3jCj27Wwlv4fuwdHmzHTfnvHNeA80BUMQGvIrskeiubp+N42amkK4u+cXN1s4suqymTeev8y0omMRR9lTS3jP7yfP12z0GttwkjHs+4BRJ2VQTW7w6bJV3KUJ9vqG7j5owcYNGcX9srKoFfuUMkRCoVCYSKG9XTF5p1kBpEip2gi0pbSbi1R5bXk7prgfB9/xJjVaoPFNYjfMjiL3PkT/JY9vKOXqXHlLWGvrGTgPVtZ/+15DI/5Pvx+sVbiK4EinNHc9sR4UocdZVLBZL9l9h/oRuY9W9us86yb2lFhHvZd3xMz1nVLSZiUtIy3VnciYVkpX/z1wPkApie+nGm0dP0Br5VQgkUZXYWiHZN0bSEbiQ0q2UMRHoQ0YNkZhUKhUPhGDaQpFAqFiSijq1AoFCaijK5CoVCYiDK6CoVCYSLK6CoUCoWJRITRFYJsIfhUCE4JwV4huCHcmvwhBDOEYLsQ1AvB8nDraQ4hWCkER4XgJyEoFIJpLR8VHtpLGxCCKo+XTQheDrcufwjBzUJQIATVQrBPCEaHW5Mv2lNbdSAEA4WgTghWtua4kBtdIVoX+6uX/yvwAZAE5AErhSAz1Nr8fHZrOQLMBnwsZ2gcQWp9FkiXks5oU7bPFoILQ6vMm/bSBoKpUymJd7yAHkAtsCbk4jwIRqsQXAk8D0wFEoBLgaIQS/P1uWdsW/XgFeCr1h4UsNEVghIheEwIdgvBSSFYJgSxQpArBIeE4BEhOAYs08tfKwQ7haBCCLYIwfl+Tp0FpAILpMQmJZ8Cm4FftfbLmKAVKfmLlLwHhCQHyGCt/5ESx7yOUn8NiECtIW0DRtapBxOB40DQS9sarPUp4Gkp+VJK7FJyWEoOR6LWdtRWHee/GagA/tlqcVLKgF4gS0DuAtkXZBLIzSBng8wF2QjyeZAxIDuCHAbyOMgRIK0gb9OPj9HPtQjkIv3/80BWgRQun/UJyHcD1WaWVo/PmA1yebAazdKqb6sBKUHuABkfaVpD3QbMuP76vk9BzorE66/vPw3yUZB7QR4CuRBkx0jT2p7aqv6+M8hC/dyzQK5slbZWfom7XN5fA3Kf/iVOg4x12bcY5DMex+8BOcbHeaNBFoF8WP9/rH6+dW2s8JBr9SgTSqNrtFYryEtAPgEyOtK0hroNmFSnaSBtIPtH4vUHmaobr+0ge4HsphueOZGmtT21VX3fH0A+ov/faqPbWp/uQZf/96M9EgKUSonr7OT9gJl6V71CCCqAvi7lnUhJAzABGA8cA2YCq4G2LjEQcq0GYqhWqT2ybwL6ANMjTatBbcDo6z8F2CQlxW3QaKTWWv3vy1JyVErKgPnANRGo1Umkt1UhGAL8N7AgWFGtdSL3dfk/DW1QCfCaWO4gMEdK5gRyUin5FhjjeC8EW4A3W6nNE0O0GoRZWqNog59Mp720AaPrdArwXJDaPAm5Vik5KQSHfJyjrZztbTUXSAcOCG21jnjAKgTnSsmwgFS1srv+Hcg+aD6SjSB/r3fXD3mUzQF5EM1HIkB2AjkeZIKfc58PMhZkHMgHQRaj+1Pa8GhhlNYoXeuzIN/S/4+KNK0gU0DeDDJef2QbB7Ia5M8jTWuo24CROvVjRul16bdMJGgF+TTIr/S20FU/9zORprU9tVW9ffZ0ec0DuRZk94C1tfJLPAZyN8gKkG/qAry+hF7+Kv2CV4A8CnKN40uAfBXkqy5l54I8iTaY8jHIc0LQkI3SOgu0mnN5zYo0rSC7g/xcL/eT3gB/E8H1GrI2YKROfdtrIN9qS12aVKfRaINAFSCPgXwJF19mpGhtb23V47hZtNKnG/DUjkJQAkyTkn8EdEAYUVqNob1obS86QWk1ikjWGhEZaQqFQnG2oIyuQqFQmIhaOUKhUChMRPV0FQqFwkSU0VUoFAoTaTY54krLTRHle/jEvkb426e0Bo8/re1FJyitbeFM0NpedIJagl2haBccnzEKgBvzPgVgf20yh6+JwVZ+IpyyFEGgjK5C0Q6oS9b+PtHtewAKTtcwM2pCGBUpgkUZXYWiPeD3YVXR3lBG1wfHZ4yirpv2f/edjQB0fG9bGBW5E9W3D0V3pDVbJrYMUhZuMUmRQuEbX22194Y6rBt2hElR+FHRCwqFQmEiqqfrgxvzPnX6zq4suA6A07U5dFi33XQtp8flUJUa7batqp+gIG9Rs8etrkpkfsUtdFnzNbK+vtmyinZARI3NB87pjO5ebTX7/7V39kFNnHkc/24SBEIQ5K0I+AJIMB2sb4gDhRPtiOd1dLBVqqX1iqUo6tlasbWn01pHR6++nZ7aGl9wrNo7xdrW01Y7N+KpqJSz2HNEUF5ExRMILwYSMMnm/lgSSLIhAfYl6n5mGGD32d3fPPvsb599fm/GhRicz488roCgdB3ws+IkACB5WSpwhvvrG5fV45eY73p8XJqsGSPWbsGyn1NheFTLgmTdIw4MhO7FsB4dI7l2B6RazZJETy/iqAi0DdTzLYbTSAZR9/1JRCBUCg+b/W0D9SAnjKbaqrQgb9ziVD6A6lNdiI/FNkJvhOjqDRj19vuakEhAjo+BUULA7eZ9GOrqenxtQek6oNGgoX5rPBHMsyxPAyKpFADwMC0Kv67sfjZuzeQ5GRCd/5VxmQiJBCJ/P0bPSaoaun04maRkhR8qpyo5uVZfEXl74/YiKnd42dwvadtUTldS9X4B/P7WqxDP8uPU9U3s64PSz3xQPinXvK3dqMOPmgFQJsTTKlLC3R0iXx/A3xfbj+yE3M0LcX/OxoADgtJlnNjziwAA0YsrYOBZlqeB6vdHAQDOZH8BKqk+/5DjY7D1cM9eAI54P30hiEvFjJ7zWaB0w4u4Mn1Tx39eDtvnyY9j6elXUD2eXbm68uBACIrG7gQgNW/b3jgc5yZGwFBfT3tM06zRyF27BWLCiEiJZ5+uz5rS1bw2HlNWn6fd9/XJiQCAoSsvs3V5xiDbxQAAQ1Mzz5K4PlXr4vG3N/YAAMIktgpXoVyIsHNai20qhQeufUo/I2IKo4SAop/UccMenpMvLy5lcwjyMlMgUt3gSQJ6yvbG4h+v7ECQ2LGyNSETeWCIpwrVsF2GYIsBUi0GiDvHw1tVyahfEgZjnW1/moJSPllyGIp+UlTqWjBpwQL0e6xDwM07vZqIMap0a3IS4J9ClSGK8//FbIyyJnwmNSVfLU5DxArXV7yuxtEWH6zflo6TH39Bq9z4oGJDPFbPOIoUqc687WiLDzauf9P8f8T5GugrqiyOC74ZiHHtVP3BwF4O4meZig3xWJt41GJbra4/iEvFvNvWRDHDod3SWd8xN3I/4tzdujnCNbnf4gv3IluFW5OTgLnvUIacNFkzZZxeswC+ZynjdG/HKiNKt2pdPJ4E6/BObD4+C7zpsH26t4r6Y8ZRrDGkPRUzXr7QHhoIRcRCi22kmxH6eC28CPsef2c1bli6LweD1dx8AoeOedh5XwFsbIjEka+mICi301eYbgXUUFcHv1zqJSwoXFus+9VVML48CjXL23G9F0bermxsiMRx5SQEwbV8ymtyEjDz7Xws9ys3bytrGwifQ1f6/LJjROm+Pe2c3Vltd6R7qxD4xl4sbXkPg7cVg9RomBCn14ikUlS/Pwpx0j28ytEV34OX4dvlf9FIBW5le6NyUi66rkl15bDaH2vy0jB0fQFITqSkpy0AuP9JgsW91abGoW6U/WE39EQjyOslrMk0snAOHqscf/6uTTzhksrusNofX5+ciKGwP1ERjVSgasYAi21M9ashmSp4ey9bj9K4b/p8vlM1I1wyiMc/pcZiAulMvzuLEBwhICAgwCF9mukS7u5omjUacg/6N16ZrhVLytOQJz8OmYh+oTxFqkPe/E1Ytj8V4HmmS3jLkDd/E+NGl75ifHkUGuWUxVQVS6Jy+m7adofVVFaU1SfSEMHzks1yv3Isz9qF+/oWTHv8ESQa6qPM9637KOnwfaZj3N1s+F1nVhaJSovkG1RymJBVRgTfcBzksudsEtJpPp1byDbMLHud8i9lVkyn2HM3iXY5znqMVMyw9NZgql8fTKCe49IkZr1B+GSsfzUKXxsP6bdXbfax8Uz1SemKfH2Qu3aLhZIqbKcMKVW6ADQZQqDdGoq6HXrIhDl1jyFiY2DwckPt0jZcjzvQbduzGjesyUsDAF4U7t3qABRG6WwMKWESmdP+usrmEHg0M6/KyBu34J7S8bcT7YnYGITJ7ttsrzW0YmVNCkSztDCoHjArZC9xdowQLFnd2o06bG8cjnaSuu8p3v/tkTEtTNaERxNGOxWYwBTWY3XzwGtYu1qDC99SL5Su93/VxRkAADmDBn/GXcbmXMoCAPS/7IlfV+5C1m4luvPXbDfqUKoLAkh+bbGERAL4+0LcZXTWGlpBtIl5EIaAOCAAEw9ctljI747FeZkYtr2j7QtBgF7PqcO5PLMIs3cuxpXpWwCgR25DBiOJcr0WeZkpkF6ynW1whoN+VzaORfX4VgCtnIgj9veDh0RHv7MXY4QJRE+o3yVPqK/SUl0Q5d/aEVCwb182Kqfudfp8h4bmA9/ko0zXig+mZoAsq2Bd8dKNVXeRDuLAQTDU1yNxf1GvbFTOwrjSLZqwEwDQ+jsSzjjHO3JK5gpyfAy2H9lp4ficeDgH0esoVxIuPyXFAQHIKriMqdJGAM7NGi6mb4Jqdqfn6JZHkzl1OAeA6BU3kajOAWA/GomOcr0WH0zNgKjsBq9uUL3pdzYJPd2OraGnARofVr5kHbyN8oZZtr8jly9pZOTZjZR44q8/5mLxm4s4CTqxHqtLBtxCVMEjKBPiAdxj9dqMK12T0/EAB+2A7p2SucYoISB365ydKZQLEbWvGno+cgGICES71cKdcH5tOUjshaAuk3KuHc4BgFSrEbWTGrCTf8yw2DfrqzPI8qmxOcbs6F/Gzadlt/Si39lkiKeK1hZCxMZg4gFK4boT3L4czB5Gduwvig0NmHwwg3afCWopxNIOJCZEkLt5cRZ0QqrVEGs7r+ROuGGqtBG3z1Xgjz6/AfBC+A9ZUGymXihMujPyGgZszynZFZDdNUJ/z3ZdT6B7TH0m6vgt9vXBgwMhSJbehnVY6LKHY1C4bhykl666hKM/uV2NQRJ644NZVvC4/NGBwcutY0nBeYVr5Ch8znC7AqLb3bd5QT8K0UvmAgBKkw5a7ltfiQefc5PRb+h3jQCAiKD5qJixG+6EqV+pcerxUALD7QrGr9sn85ZR3YKZu3NwVmN78zc2REKhXGj+oWsj8GwjjopA6Y4I5I/dZ/EVkVGdhIzqJFzcRm8x5gO9vyd+Gn7KrpfNf1SDOZf1uHISNjZEWmx7b8gF1C5ts3MEPeE/ZCHgWhOTotFSk5OAsn2xaJgXj4Z58XbbEZeKMehLCQZ9KUH0hbkW+w4NzbdJZcoW5PUSkNdLEK18jPAfsiz2RV+Yi9DzPetnZ+nTTJfUaBC2vgB/kmXiSbDlgr+spB8Gb+p0ei58LRIpUmpx+vO6FwEAqrMhCEFVX0RgBNFIBcrmuM5LwajR4tVTS/H3P+x4KsMqAapPS7K9UTlJia5BHBnVSbihjAEA+B10/UhEPsdq0I4CnJo+wsJQlu6tQrqTQQmNBg1izy+CYnM9KzM2oDOgiHQH5s08g+V+5QhHJgDAb7/940yVI/qHxwNJrIjWJ/r/5AVxPjvjU3DkEhAQEOAQRtZ0u8udQBdAcaCIytwj3+Qa4X/1Y3xROZ3dTFc9gVSrEbXoKs7+NgJx7twneO4LopEKAEBpVn+bII5lD8fg5q4Yl5vhSiKGomKibbq+jQ2RyPs6GQAQ4iJj1RnytSKsLp+ORo0nlZKUpQx54sBAPEyLwpnszsRLGxsiISvp5/BY0zhRxfIZqE5h/fy/VZUMWY0dVz0GYN2QRhdA4SqYMty3DLG0MrDlpM8VZbpWfF/9EvxQxul1xVERKMn2BgAbhatsDkHhunHw/dZ1FK7p/lfMDaEtf7T/2BSLJTI+sBd0QocpMCnjXDbkmUUIBntJhMSBgfjf68M6Al8ohXu0xQdHvpqCEKtcCl2rLZgwLefRRVeyFchhjb3n/9En4eh3nj1D3nOdxLxiHlWl1PqBO7ZgCqTn+XfSdxc5vw5Xa2iFykANntnFmQhOZS9pDB2mbPzUGm4nLhP4YIVlhQNbhXtf32IOBOATkyP/qVe3Omw7+/RS6phF7PVzd5VB1m9Lx8C8O1RwTle6VFtwRKWuBSI9N1rX+vk3jVWC5es/10rXVemN43vi4RwM20oZXELbazhPk0iXjR9wncAHaxxVOJj2l48wOLeY1yxtJqJX3MSyNamO22morFhsytxdZZCTH38B9XJbM1FPqi28s/hDKl9tnyXtOVyNVc6VLlsOx88UPXDSH7MmG/4lbYiquAc9xwUoxf5+CD3djiGeKqT57MUAq9Bflwp8sMLoYeg2VNmt1ch7qlETpFoNuEjBTrJjuZYueX5vE+orm0NwbMEUAID0WglInqpXG4wEoGpifaxyrnQXJv4LB+9QHRyyiR03lqcdsqkZGas+xIeffoM0ma0RpGtFhuB/3oGhro42QTibmIIJtoae7vBtdd3AB2voqjF0RaFciIh/13Dep88DESfmw7/Icjbs0Uyal/O4+rKoyUnAvFmd5b3NVSGamC+Mag3nSne5Xzl2KVxgsQwwOz8rCMvKDBEV1bw+cMb2dvgcuoL1vun4LMB2v0c9zBUZ+PhaMFcNGH4K1nkBMqopp8ubu2JcymhmomodVVaILkF5raEViYdzqPBvIRqRFnvPjLNEs5yk3llaFE/M/s/mKieHCjiZIHCudDOqkxBwyTUc/k0O2oPzLbe7ygzHFTPqA0Cj3NMmjeDkkmm4U/WC+d72xS1Mmxpn/tvzu8Jen4eOt6eds1sRQmUgMGxrOefLNE8T9p4ZZ3GFNXJruK5eIQRHCAgICHAI+zPd9nbMLn4Xx0bvhdzNCwV3wzGkVOv4OAGXRVajM1diMEFsDoCcoSQljcM6h6VzNm/n2VeYiMSJZUj2tJxzlelaMbs4E6HttpnQBJ49ZCX9kDyIGsMPrg1EBIch3qwrXUNTM4JTm3H0t7FYFXALpUkHEd5CxWbLL7F9dQE26HemCDhjvbWKsfOzGf0lzyxCxt55eDfuosX276tfQnBqieBR85wQsqkA6PAY5FLhAoKfrsBziDyzCBesDIBcR+8JPL8QRqOrOfQICAgIPLsIhjQBAQEBDhGUroCAgACHCEpXQEBAgEMEpSsgICDAIYLSFRAQEOAQQekKCAgIcMj/ARaqKtLmZA8yAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "ds_test = create_dataset(test_data_path).create_dict_iterator()\n", - "data = next(ds_test)\n", - "images = data[\"image\"].asnumpy()\n", - "labels = data[\"label\"].asnumpy()\n", - "\n", - "output = model.predict(Tensor(data['image']))\n", - "pred = np.argmax(output.asnumpy(), axis=1)\n", - "err_num = []\n", - "index = 1\n", - "for i in range(len(labels)):\n", - " plt.subplot(4, 8, i+1)\n", - " color = 'blue' if pred[i] == labels[i] else 'red'\n", - " plt.title(\"pre:{}\".format(pred[i]), color=color)\n", - " plt.imshow(np.squeeze(images[i]))\n", - " plt.axis(\"off\")\n", - " if color == 'red':\n", - " index = 0\n", - " print(\"Row {}, column {} is incorrectly identified as {}, the correct value should be {}\".format(int(i/8)+1, i%8+1, pred[i], labels[i]), '\\n')\n", - "if index:\n", - " print(\"All the figures in this group are predicted correctly!\")\n", - "print(pred, \"<--Predicted figures\") \n", - "print(labels, \"<--The right number\")\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "构建一个概率分析的饼图函数,本例展示了当前`batch`中的第一张图片的分析饼图。\n", - "\n", - "`prb`存储了上面这组32张预测数字和对应的输出结果,取出第一张图片对应[0-9]分类结果`prb[0]`,带入sigmol公式$\\frac{1}{1+e^{-x}}$,得到该图片对应[0-9]的概率,将概率值0.5以上的数字组成饼图分析。" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": { - "ExecuteTime": { - "end_time": "2021-02-03T08:54:26.413763Z", - "start_time": "2021-02-03T08:54:26.318837Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The probability of corresponding numbers [0-9] in Figure 1:\n", - " [0.16316024637045334, 0.04876983802517727, 0.02261393383191808, 0.9963960715325838, 0.037634749376478496, 0.998856840107891, 0.1612087582052347, 0.08714517716531343, 0.6207903209907534, 0.9653037548477632]\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAAD3CAYAAAC+eIeLAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3deXzU1b3/8dcnC/suAVlUVmWxiCiguKCAVYuI1qVVexGr4tZeq73V1nvrmNrVn5baRVuvta1rtdbWWnstqBXqgooWZRdkD4QESEJC1pn5/P44X0pEIJnMcmb5PB+PeUyY+X6/886QfHLmfM/3HFFVjDHGpEae7wDGGJNLrOgaY0wKWdE1xpgUsqJrjDEpZEXXGGNSyIquMcakkBVdk1ZE5DURuSZJxz5SRGpEJD/4d18RWSgi1SJyn4jcISIPJ+F1rxCReYk+rslMBb4DmMQSkQ3ANar6su8s6UZVNwFdmj00B9gBdNMEDVgXkUHAeqBQVcPB6z4BPJGI45vMZy1dk8uOAlYkquAa0xpWdLOYiMwWkTdEZK6IVIrIOhGZFDy+WUTKROTKZttPF5F/icju4Pm79jveLBHZKCI7ReTbIrJBRKYFz+WJyDdF5OPg+WdEpNchss0UkSXBa30sIuccYJuhIvJqcLwdIvKEiPRo9vztIlISdA+sFpGpweMTRGRxcOztIvLj4PFBIqIiUiAivwWuBG4LuhymichdIvJ4s+OfKiJvBu/dZhGZ3Yr3aWFwXxkc9+Tg/X692XEnici7IlIV3E9q9txrInJ38P9WLSLzRKT3wf+XTaaxopv9JgIfAocBTwK/B8YDw4AvAT8Xkb0fufcAs4AewHTgBhG5AEBERgEPAFcA/YDuwIBmr/OfwAXAZKA/UAH84kCBRGQC8CjwjeC1Tgc2HGhT4AfB8UYCRwB3Bcc4BvgKMF5VuwJnNzvG/cD9qtoNGAo8s/+BVXU27iP/ParaZf/uGBE5Evg/4GdAETAWWNLS+xR8LwA9guO+td9xewEvAj/F/Z/8GHhRRA5rttnlwFVAH6Ad8F8HeG9MhrKim/3Wq+pvVDUCPI0rXN9R1QZVnQc04gowqvqaqi5V1aiqfgg8hSuiABcDL6jq66raCNwJNP9Yfh3w36q6RVUbcMXxYhE50HmDq4FHVHV+8Folqrpq/41UdW2wTYOqluMK1N48EaA9MEpEClV1g6p+HDzXBAwTkd6qWqOqi9rwvl0BvKyqT6lqk6ruVNUlrXifWjIdWKOqj6lqWFWfAlYBM5pt8xtV/UhV63B/MMa2Ib9JU1Z0s9/2Zl/XAajq/o91ARCRiSLyDxEpF5Eq4Hpg70fb/sDmvTupai2ws9lxjgL+FHwUrwRW4gpj3wNkOgL4+ACPf4KI9BGR3wddCLuBx/fmUdW1wNdwxb0s2K5/sOvVwNHAquDj+3ktvVYsGVt4n1rSH9i432Mb+eSnhtJmX9fyyZN/JsNZ0TXNPQn8BThCVbsDv8R9xAfYBgzcu6GIdMR9PN5rM3CuqvZoduugqiUHeJ3NuI/9LfkBrjU9Jugq+FKzPKjqk6p6Kq7gK/Cj4PE1qnoZ7uP5j4BnRaRzK16vtRkP9T61dFJua5C3uSOBA71PJgtZ0TXNdQV2qWp90O96ebPnngVmBCeB2gHFNCuAuMLzPRE5CkBEikRk5kFe59fAVSIyNTgBN0BERhwkTw3upNQAXB8wwfGPEZEpItIeqMe12CPBc18SkSJVjQKVwS6RmN4J1987TUQuDU68HSYiez/mH+p9KgeiwJCDHPdvwNEicnlw3C8Ao4C/xpjPZCgruqa5G4HviEg1rs/23yegVHU58FXcibhtQDVQBjQEm9yPa/3NC/ZfhDuJ9ymq+g7uRNFcoApYwKdbf+AK+7hgmxeB55o91x74IW6cbSmuVXtH8Nw5wHIRqQlyfVFV61v7JgQZNwGfA74O7MKdRDsuePpQ71Mt8D3gjaCr5aT9jrsTOC847k7gNuA8Vd0RSz6TucSGKJq2CEY8VALDVXW97zzGZApr6ZpWE5EZItIp6B+9F1jKgYd6GWMOwoquicVM3ImgrcBw3Md2+6hkTAyse8EYY1LIWrrGGJNCVnSNMSaFrOgaY0wKWdE1xpgUsqJrjDEpZEXXGGNSyJbrMcakpffee69PQUHBw8CxpGcDMQosC4fD15xwwgllrd3Jiq4xJi0VFBQ8fPjhh48sKiqqyMvLS7sLCqLRqJSXl48qLS19GDi/tful418PY4wBOLaoqGh3OhZcgLy8PC0qKqrCtcRbv1+S8hhjTLzy0rXg7hXki6mOWtE1xpgUsj5dkx7cZOSDcasoHAb0BHrtd98JyAfyR7Ji8ypGDgDCuAnK63Bz65Y3u29+K1WlKZXfkkkwkRMSejzV9w71dG1trUycOHFEY2OjRCIRmTFjRsXcuXO3xvuyVnRNaokMBE4ERuCWw9l7G0gMn7x2021xcJzWCouwFrd224rgthJYperWjjOmuQ4dOujrr7++unv37tGGhgYZP378Ma+88krV1KlT98RzXCu6JnncsuLj97sd7ilNAa7QjwAubPZ4VIQNwDu4FSxeU+VTKxOb3JOXl0f37t2jAI2NjRIOh0VEWtqtRVZ0TeK4tdNOwy2XczbwGb+BWiUPt57ZEOCLACJsBxbiivACYLlqiwtOmiwUDoc59thjR23atKn9lVdeWTZlypS4WrlgRdfEyy1EOQNXaM8AYl11Nx31BS4JbgBlIvwJtxbaAtWYF7k0GaqgoIBVq1at2LFjR/706dOHvvvuux3Gjx8f03p7+7PRCyZ2Ir0RuRGRN4D1wM+A6WRHwT2QPsB1wCvAVhEeFGGKCPmec5kU6d27d+TUU0+tfuGFF7rHeywruqZ1RDohcjkiL+JWA/4FMIlPLsOeC/oA17OvAD8gwnjPmUwSbN26tWDHjh35ADU1NfLaa691GzlyZFytXLDuBdMSkaG4pdevArp5TpNu+gA3ADeI8Dauxf8HVRr9xspSLQzxSrTNmzcXzp49e3AkEkFVZebMmbsuu+yyqniPa0XXHJjINOBm4HPYJ6LWmBjc7hPhV8AvVdmWihcWkZuBa3GfOv5XVX+SitfNdhMnTqxbuXLlikQf136ZzD4ihYhci8hyYD5wHvYzEqu+wJ3ARhGeEiGxA/r3IyLH4gruBOA44DwRGZ7M1zTxsV8oAyL5iFwFfAQ8BIzynCgbFOKGoC0W4Y8ijEzS64wEFqlqraqGcUPcLmxhH+ORFd1cJiKIXIa7OusRYJDfQFnr88AyEX4nkvD3eBlwuogcJiKdcN1BRyT4NUwCWdHNVSLnAx8ATwJHe06TC/KAWcBqEX4hkpgr81R1JfAjXHfQS7j/03Aijm2Sw4purhEZgcg84Hky44qxbNMOuBH4WISQCO3iPaCq/lpVx6nq6cAuYE28xzTJY0U3V4h0RuQe4EPgLN9xDJ2Au4AlIpwWz4FEpE9wfySuK+OpuNOZpLEhY7lAZAbwc9y0iSa9jAQWiPAwcJsqlW04xh/FTS7UBNykqhUJTZgmpDixUztqqOVxvwMGDPhM586dI3l5eRQUFOiyZctWxvu6VnSzmUhP4EHgC76jmEMS3LCvGSJ8TZWnY9lZVeNqKZtDW7BgwUf9+vVLWD+5dS9kK5GpwFKs4GaSw4Hfi/BXEfr5DmOSw4puthFpj8h9uLPZA3zHMW0yHfhAhHN9BzEwderU4aNHjx5577339k7E8ax7IZu4q5OeAMb4jmLiVgS8KMJc4Ju21JAfb7zxxqpBgwY1lZSUFEyZMuXo0aNH15977rk18RzTWrrZQuQK3OoHVnCzhwC3AgtFGOg7TC4aNGhQE8CAAQPC06dPr3zrrbfinr7Uim6mc5fw3gs8DnT0HcckxUnAv0SY5jtILtm9e3deRUVF3t6v//GPf3QbM2ZM3OvpWfdCJnOjE34PfNZ3FJN0vYG/i3CbKvf5DuNDa4Z4JdKWLVsKLrzwwmEAkUhELrroop0XX3zx7niPa0U3U4mMwl1VNsx3FJMyecC9IhwB3KpK1HegbDZq1KjG1atX29SOBhCZAizCCm6uuhl4WoQOvoOY2FnRzTQiFwJ/A7r6jmK8uhiYJ0JP30FMbKzoZhKR2cAfgPaek5j0cBrwuohd3p1JrOhmCpGv4ea8tRVoTXOjgEUijPYdxLSOFd1MIFIMzCX3Vt41rdMPeFkEW6YnA1jRTXci/41bc8uYQzkceEWEo3wHMYdmQ8bSmchXgO/6jmEyxhHAqyKcpspW32ESbsHixC7yOfnEFsf9FhcX93nssceKRIQRI0bUPv300xs6deqk8bystXTTlcgs4Ke+Y5iMMwTX4u3jO0imW79+feFDDz3Ud8mSJSvWrFmzPBKJyMMPP9wr3uNa0U1HbljYI1gfrmmbEcB8EeIuELkuEonInj178pqamqirq8sbOHBg3BMPWdFNNyJn4pZbsVEKJh5jcLOU2fDCNho8eHDTTTfdVDp48OAxffr0Oa5r166Rz3/+83FfBmxFN52IDAWexcbhmsQ4Cfil7xCZqry8PP/FF1/ssXbt2qWlpaUf1tbW5j3wwAPWvZA1RLoCfwH7SGgSarYIX/MdIhO98MIL3Y488siG/v37h9u3b68XXHBB5Ztvvtkl3uNa0U0HInm4ycdH+Y5istK9IrYCdKwGDRrU+P7773eprq7Oi0ajvPrqq11HjhxZH+9xbchYergbmOE7hMla+bgJciaostZ3mDZrxRCvRJoyZcqeGTNmVIwZM2ZkQUEBo0ePrr311lvL4z2uFV3fRC4F7vAdw2S9nsDzIpykSrXvMJli7ty5W+fOnZvQMc/WveCTyGDgf33HMDljFPAb3yFynRVdX0TycUvsdPMdxeSUi0SY5TtELrOi68//AJN8hzA56Wc2HaQ/VnR9EDkZ+LbvGCZndQN+K2JXPPpgRTfV3Hjcx7ErzoxfZ+KW/TEpZkW3FUSkg4i8IyIfiMhycfPbttU9uElJjPHtByI2NjzVbMhY6zQAU1S1RkQKgddF5P9UdVFMRxGZCMxJRkBj2qAD8JgIE1UJ+w7TEhESOrWjKi2O+7377rv7PProo0WqyqxZs8rvvPPOsnhf11q6raBOTfDPwuAW25yabrTCg9h7btLLOOBG3yHS0bvvvtvh0UcfLXr//fdXrly5cvlLL73UY+nSpXHPi2IFoJVEJF9ElgBlwHxVfTvGQ9wEHJ/4ZCYxNuO6OUcCo4H7g8d3AWcBw4P7ioPsnw+MDW7nN3v8CtyEX82vf7kbeD5RwRPhLhEO8x0i3SxdurTjuHHjarp27RotLCzklFNOqX766ad7xHtcK7qtpKoRVR0LDAQmiMixrd5ZpB/uN82krQLgPmAlsAj4BbAC+CEwFVgT3P/wIPt3BJYEt78Ej33Y7P6fQBWwDXgHmJnw7yAOPbGfz08ZO3Zs3dtvv921tLQ0v7q6Om/+/PndN2/e3C7e41rRjZGqVgKvAefEsNuPsYsg0lw/3CdtgK64Fm8JrkV6ZfD4lcCfYzhmIVAHRIFGXGv4TuA7CcibcHNE+IzvEOlk3Lhx9TfffHPplClTjj7zzDOHjxo1qragIP7TYFZ0W0FEikSkR/B1R2AasKqVO58MfDF56UzibQD+BUwEtuMKMsH9wc6j1AMn4qaw3VuYRwJH4or5pcBa3KmAtOxlygd+4jtEurnlllt2rFixYuXixYtX9+rVKzJ8+HCbZSxF+gG/E3cyLA94RlX/2sp9D/Z51KSlGuAiXP2J5cPJJqA/sA6YAnwGGMon69gM4FfA94APcH3E18YfOXGmiPB5VZ7zHSRdlJSUFAwYMCC8Zs2adi+++GKPd955p3WNrUOwotsKqvohbWmeiEwHTk94IJMkTbiCewXw+eCxvrh+2H7B/cHWe+wf3A8BzsC1lIc2e/55XEt4D7AMeAb3o3EF0ClR30Ai3CvCC6rEvRZYorVmiFeinX/++UMrKysLCgoK9Cc/+cmmoqKiSLzHtKKbLCKCLZ+eQRS4GtclcGuzx88Hfgd8M7g/0AmwClzhbA/sAN4Abmv2fBNuNMRfcSfk9l59u7evN62K7mDgS9hsZAC89957qxN9TOvTTZ4LceOHTEZ4A3gMeJV9Q7/+hiu283FDxuYH/wZYDFwTfL0S14o9Djfs7Jt8chGQX+BOwnXCDR9TXPfDKUDcI5CS4XYRqw3JYi3dZHCt3Lt8xzCxOJWDX+/yygEeOxF4OPh6ErD0EMduvkSZ4BZ7TmvH4PpXnvUdJBvZX7PkOAds+I3JaN/yHQCIRqPRtJ4JLcgXjWUfK7rJYauvmkw3ToSzPWdYVl5e3j1dC280GpXy8vLuuDOjrWbdC4kmMgr4rO8YxiTAHcDffb14OBy+prS09OHS0tJjSc8GYhRYFg6Hr2lxy2ZENbZ5W0wLRH6FzSSWdAPYsngrA070nSMHTFLlLd8hskk6/vXIXCKHAf/hO4YxCXSd7wDZxopuYs3BzXxiTLa4WISuvkNkEyu6iXW17wDGJFhn3MQRJkGs6CaKm9hmaIvbGZN5rvIdIJtY0U2cL/kOYEySnCLC0b5DZAsruong1k2zj2Amm1lrN0Gs6CbGOUBv3yGMSaJZIuT7DpENrOgmRkq6FuqBCbhpVUYDof2e/yrQ5SD7PsG+aVzG4v7jl+CWOT4HOBZ4oNn2c3CTExoT6A9M9h0iG1jRjZdIB+C8VLxUe9wcWB/gCuZLuNW8wM15VXmIfa9g3wpejwGDcMX378AJuFW8Hgq2/QB3qU1arm9gfJrhO0A2sKIbv8mkaEJUYV9Ltim4CRABvgHc08rjPAVcFny9dxWvcLPnv02aruJlfDu/5U1MS6zoxu/cVL5YBNdC7YNb7GUi8HPcb0O/Q+zX3NPsK7pnAaXBcW7DrWN7AvvWQTCmmSGD+jWM9B0i09mEN/FLadHNx3URVOJmSV8I/AG3PHFrvI1rlu9dP74AeDL4ugk4G1d4b8Wt+jULa97kNq0e3K9xxRXTdjbNmVE+7Ig+TdPhxJW+U2UyK7rxEBkCfsYv9sCtxPUP3Bqzw4LHa4Ov1x5kv9+zr5W7vwdw6xu8BbTDtYhPxopurmlfGF13+nHVm68/v7z7eZOqRrcr0InNnv4scK+vbNnAim58UtrKLcf1wfbA9cO+DNyO6x7YqwsHL7hRXKt44QGeq8Ct4DUP19LNw/UXx73etMkAWjegd9OyL5y5q+66meWDjx7YMAS3wuaBnMaCxR2YfKL9aLSRFd34pHSS5224lmgEV0Av5dDDJv6CG9Ww96TYQmAgB/5t+g7wP7hCezZuVa/PANcnIrhJOwX50c0njdqz7rrzyztfdHrFsR3b6/hW7toBOA23YFyricgtuEXlFLe20VWqmpOF2+bTjYdIGVDkO0Yusvl0Y6WNfXqEl3/+9IrdN8wsP3LM0LrBcRzsh0w+sdXL+YjIAOB1YJSq1onIM8DfVPW3cWTIWNbSbSuRwVjBNWksP0+3HT+8du0108vbXTZ116hunaOJGno9oQ37FAAdRaQJdy53a4KyZBwrum03seVNjEkljfTsGlk+Y1JlxQ0zy/udNGrP0bR+JGEsTmDBYmHyia36mKyqJSJyL25ATB0wT1XnJSFXRrCi23Zt+WtvTEKJ6I7Rg+pWX3Xuzrwrz9kx8rBukTEpeNnuwHDgo9ZsLCI9gZnAYNxoxz+IyJdU9fHkRUxfVnTbzoqu8UC1a6foynMmVJXfMLO86PTjqkfk53GKhyDjaWXRBaYB61W1HEBEngMmAVZ0TSuJFADjfMcwuUHQqmEDG1bMOntn9Jrp5Ucf3is8yncm3IWRT7Ry203ASSLSCde9MBU3sCYnWdFtm2HYWmgmiTq2j66ZcvzukhsuKO919viq0QX5nOw7036ObXkTR1XfFpFngfdx03z8i33zK+UcK7ptM9x3AJNttOaovo0rLp+2q2HOjPJhgw5vHE56/5y1uugCqGqIT89GmpNswpu2GdbyJsYcSPNZkY9uGlh03cZn7vr4X/Xz32+34emlE75/bclp/XrV9PtC8bcYdvmFTLxhNhu2udFVbyz9gDFfvozx181i7ZbNAFRWV3P2N76Kh/H2A1mwuHuqXzQbWNFtm3RugZi0pfX9erH4+hmPv77y0Sc2Nr78aGH/3m8cdUTRwuPbF2q7vVv9+m/P07NLN9Y++Sduufhybn/oZwDc98zj/PE7P+L7197Eg395FoC7H3uYO66YjYj4+IbSoW8541j3QttYS9e0SkG+bhk/Ys+6OeeVd7zkzIrRnTtE/30VXW19mKZw+FMF8/k3FnLX7GsBuHjyFL5y/z2oKoUFBdQ1NFBbX09hfgEfl2yhpLycyWNPSO03tc9RuPmRTAys6LaNFV1zENrUu3t4+YWnVVbdMLNs4PHD64biprz4t0gkwglz/oO1JVu46cJLmDjqk92jJeVlHFHUF4CCggK6d+nCzqoqvnX5bObc+306tm/PY3cU818P3s/dV3udHWOAzxfPVFZ0Y+WGix3pO4ZJH3l5uv24obVrrpm+o+CKabtGde8SGXuo7fPz81ny6yeprK7mwm9/g2Xr1nLskH1/x5VP98+KwNjhx7Dowd8AsPCD9+nfuwhV5QvF36Iwv4D7bvwafXsdluDv7pAGtryJ2Z8V3dj1BlsVNbdptEeXyPLpJ1ftunFm2eEnjd5zdJ7QN9aj9OjalTPGnsBL77z1iaI7sKgvm8u3M7BPX8LhMFU1NfTqtu+clary3cce4enQ9/nK/fdQPPs6NpRu5afPPc33rrkxMd9i61jRbQMrurHr5TuAST0R3TnyqPrVs8/Zwexzdo4o6hH+TFuOU15ZQWF+AT26dqWuoZ6X33uH2y+b9Yltzp90Gr976UVOHj2GZxe8ypRx4z/R7/u7l/7K9JNOoWfXbtTWN5CXJ+Tl5VFbn9iZElW1LqyRnbXR+qrKcPWe7Y27GrY0bI+sr9+Wt65uS7s1dZsj8yYvavlA5hOs6MYupZ/fjC+qXTpGV312/O6yG2aW9T7z+OqR+XlMiveo23bu4Mof3EUkGiUajXLpmdM4b9Jp3PnILznxmJGcf8pkrv7cTP7j+yGGXX4hvbp14/d3fu/f+9fW1/O7v7/IvHt/DsCtl17ORXfeTrvCQp769ndb8V1pfVgjO+qi9VUV4Zo9ZY07GzY3lEU31G+VdfUlhevrtnbeUL+125aGsl67I3u64VqzB2vR9oj3/chFNp9urEQuAP7kO0auS858ulo1bEDDyi+dtTN8zXk7jh7Qu6lPYo+fHKra4AppQ1VluLqmrGlXw5b6suj6+q2sry9pt84V0q5bGsp6VUVqEjm2doeG1KY3jZG1dGNnLd0s0qFddO0ZY6tLbphZ1v3ciVXHFhZwku9MAKraGCGyozbSUFkVrnGF1LVIWVe3td36+pJO64MWaWW4ujtuJEGqRxP0lGLJ05BGU/y6Gc2Kbuys6GY03XNkn8blX5y6q+G6GeVDhvRvHEaKhgC6QhrdWRdpqKwMV9eUN1XU/7uQ1pe0W1dX0nFj/bZuWxrKDtsV3t0d6B/c0lU+0Bmo9h0kk1jRjV1X3wFMbAoLohsnjd6z8fqZZZ0vOLXy2A7tNGHTcqpqU1BIK6oiNTXljRUNWxq2hzfUl8r6+pJ2H9dt6bixflvXLQ1lh+0MV/XATSqejInFfbGRPDGyohs7+yFLe9pweK+mZRdPrthz/czyo0YPqj8Kd/VU6/ZWDUeI7qiPNlRUhWv2lDdW1G9pLAtvrC+VdXUlhevqt3Tc4FqkvXY2VfZQOBx3y0U2lUCMrOjGzstF7ubQCvK15ISj96y79rwd7b8wZdfoLh2jn7g2VlXDUaI766INFbvDe2rKmyoatjSUNW2s3xYU0pKOG+q3ddvcsL3nzqbKnjleSGNhjZAYWdGNnf1lTwP5Eo306tK05IwJ28uunL6lw4mjKzqVN1XK1oay2sd2lS5eV7+lYH3d1o4b6rd229SwveeOpopeCn0h9osYzCHZ70OMrOjGzlq6nt13Mm+WnzXyqMa82n7PoXnP7camXfHHim6MrOjGzoquJzs7UnnKl1mxuohJsMd3HONY0Y2RFd3YWdH14NmRvP/FS+gXScBVYSahrE83RlZ0Y9foO0AuqSug/nNX8PZrgzgdsT94aSjsO0CmsaIbu92+A+SKN49g1dRZFNYXMtl3FnNQu3wHyDRWdGNnV98kWViIfPkCXn9sDJMQCn3nMQdVqyFN7NRmOcCKbuwqfQfIZqsPY9NJ11BZ2dFatxnAWrltYGceY2c/aEny7TN5fcRX6FXZkTG+s5hW2ek7QCaylm7s7ActwUo7Uz7xWtZv6sGpvrOYmNjvQhtYSzd25b4DZJNHxvLOgK/Dph4kbBIakzJWdNvAWrqx2wJEsT9YcaluR82Zs1nyXn9r3WYwK7ptYEU3VqpNiGzBVgRus78PZemMy+nWlG8FN8OV+g6Qiazots06rOjGrDGPpksu5c2/HMOpiF3JlAVW+w6Qiazots064AzfITLJkr58fOrVNO5pZ0PBssgq3wEykRXdtlnvO0CmiILefC7//PkEJiB08J3HJIxiLd02saLbNut8B8gEm7qxbfwctpV14XTfWUzCbdKQ1vkOkYnsDHzb2MeqFvz4ZN4cdAsdy7owzncWkxT2O9BG1tJtm2W42cba+Q6Sbio6UDXpapavKrIpGLPcSt8BMpW1dNtCtRFY6jtGunl2JO8X3UatFdycYC3dNrKWbtstBk5ocascEMx5+85rgzjN5rzNGR/4DpCprKXbdot9B0gHbx7Bql63U/LaYJtkPIfUAu/5DpGprKXbdjlddCNC9MszWfjocZxic97mnEUa0ibfITKVFd22WwbUQ+6NPf2oF5snXktFZUe7QCRHLfQdIJNZ90JbqYaB133HSLU7z+D1Y75KD5vzNqdZ0Y2DtXTjMw+Y5jtEKmzvzI6J1/LxRpvzNtc1Aot8h8hk1tKNzzzfAVLhkbG80//r6MYeTPSdxXi32K5Ei4+1dOPzIW56u8N9B0mG6nbUTLmSfy0ewGm+s5i0YV0LcbKWbjxUFZjvO0YyzB/C0sNuZ6cVXEs+Cl4AAAnCSURBVLOfv/kOkOms6MYvq7oYGvNomvlFFnz2PxjVlM9RvvOYtLINeMN3iExn3Qvx+z8gTBa8lzbnrWnBcxrSqO8Qmc5auvFS3UmGdzFEQf/zHBYefz3997RjpO88Jm39wXeAbJDxrbM08RRwru8QbbGpG9smzGHrdpvz1hzaduCfvkNkA2vpJsafgYwbRjP3JDfn7fYuNnGPaZF1LSSItXQTQbUakb8Cl/iO0hoVHag65WqWr7QpGE3rPes7QLawlm7iPOU7QGs8N4J/Fd3GHiu4JgbbgQW+Q2QLa+kmzt+AXUAv30EOpK6A+umX8/Y/bApGE7uHNKQR3yGyhbV0E0W1Afi17xgH8tZAVve6nS3/GMJkK7gmRmHgl75DZBNxF1WZhBA5CvgYyPcdBf495+0/Hz2OkxFbz820yR80pJf6DpFNrHshkVQ3IvICcIHvKB/1YvNJ11JR0dEudDBx+ZnvANnGuhcSz/sP6d45bytszlsTnw80pDY2N8GspZtoqq8ishwYneqX3t6ZHSddw8cbetqctyYhfuE7QDaylm5y3J/qF/xNMOfthp42561JiArgCd8hspGdSEsGkXbAR5D8WbpqCtkzZTbvv2tTMJrEulNDerfvENnIWrrJoNoIfDfZLzN/CEt7fZMdVnBNgpUDc32HyFbW0k0WkQJgFTA00YduzKPp0kt48/kRnIqkx/A0k1Vu1ZBa0U0SO5GWLKphRO4GfpvIw37Ql3WnfpmGmvY2FMwkxWbgAd8hspl1LyTX48DqRBxo75y3Y6+nX017m/PWJM13NKQNvkNkM2vpJpNqBJE7gafjOczmbpROuJaS0q42561Jqo9I8Ccz82nW0k021WeA19q6+/0TeeuoW2hf2tXmvDVJF9KQhn2HyHbW0k2NrwBLiOH9tjlvTYq9RZyfyEzrWEs3FVSXE8PlwTbnrUmxMHC9hmwoUyrYkLFUEemGG0LW72Cb1OfTcN4VLHrF5ryNTxR4COgKXAG8DSzCXWP1DaDzQfabB6wBFBiCW/UugpuefjcwHpgQbPuX4N8H/d/MKD/WkH7dd4hcYS3dVFHdjfuVP6C3BrK65zfZ/IrNeRu/RUDvZv8+EpgFdD/EPptwg6VuAG4EtgIbgLVA/+Dx94JtS3GFOTsK7mYg5DtELrGim0qqTwAvN38oIkRnz2TBpKsZXF/IME/JskcVrrU6rtlj/YCeLewnuA/ZkWb3XXAzIzfhWs97vQqcmaC8/s3RkNb4DpFL7ERa6n0ZWAZ0szlvk+Al4Cwg1pGmRwCDgHuDf08AinCLL30APAycgusg6g90S0BW/x7VkL7kO0SusaKbaqqbEbm5eDJX33UGxyEc4TtS1liN66/tD6yPcd+dwA7g1uDfj+G6FwYBFwePRYLHL8MV9yrgOGBEHJn9KQVu8R0iF1n3gg+qv73rTMoQuvqOklU24wrvXNyC4euBP7Zy31XAQKB9cBsGbNlvm3eBscHj+cAlwMK4U/sQBWZpSHf5DpKLrOj6cy2f/rU28ZgGfB3XfrsYGAxc1Mp9u+NatpHgthHXvbBXHe56reNwfbx7T3Vm5qUE39WQzvcdIldZ0fUkaGXM4pOnaEwyLALuww37ehB4Pni8pNnXo3D9tw8Gt77AMc2OsQA4HVdsh+JGNzwIGXid4CtAse8QuczG6XomxXIXNmTHpMY24HgN6XbfQXKZtXT9Kwae8x3CZL0IcJkVXP+s6HoWXHo5Czc3gzHJcqeGdIHvEMaKblrQkO4BZgJlvrOYrPRn4Ae+QxjHim6a0JBuAi4EGn1nMVnlDVy3gp28SRNWdNOIhvRN4DrfOUzWWAnM0JDW+w5i9rGim2Y0pL8Ffug7h8l4JcA5GtIK30HMJ9mQsTQlxfJT4Ku+c5iMVAWcpiFd6juI+TRr6aYpDel/Ar/yncNknAbgAiu46cuKbnq7AfiN7xAmYzQCX9SQvuY7iDk4615Ic1IsecCjuDUQjDmYOuBCDenffQcxh2ZFNwNIseQDTwKX+s5i0tJu4DwN6T99BzEts+6FDKAhjeBauo/6zmLSzi5gqhXczGEt3QwjxVIM3Ok7h0kLpcBZGtJlvoOY1rOim4GkWK7CjWwo9J3FeLMRV3DX+A5iYmNFN0NJsUzDrYuQHat1mVgsAC7RkJb7DmJiZ326GUpD+jJuqcTNvrOYlPoFMM0Kbuaylm6Gk2Lpj5uPd6LvLCapGoAbNaSP+A5i4mMt3QynId0KnArcA9hf0Oy0FZhsBTc7WEs3i0ixnI0bVtbHdxaTMG8CF2lIS30HMYlhLd0sElyNNBa3+KDJbI3AfwOnW8HNLtbSzULBpcPfwq2/lu85jondEuBKDemHvoOYxLOim8WkWCYCDwFjfGcxrRLGLatzt4a0yXcYkxxWdLOcFEsB8DXgLqCz3zTmEFbgWreLfQcxyWVFN0dIsRwB/Ay3AKZJH7XA/wN+aMvq5AYrujlGiuV8XPE90neWHKfAY8AdGtIS32FM6ljRzUFSLJ2Bb+K6Hbp4jpOL/gncal0JucmKbg6TYukN3AbcBHTyHCcXrANu05D+0XcQ448VXYMUS19cy/d6oIPnONloHXAf8LCGtNF3GOOXFV3zb8E8DncA1wLtPMfJBu/hTpI9G0xEb4wVXfNpQfG9BpgDDPAcJxPNA+7RkNqVgeZTrOiagwrWZpuBW5X4LED8JkprdcAfgB9rSD/wHcakLyu6plWkWIYB1wFXAYd5jpNOFgG/AX6vId3tO4xJf1Z0TUykWDoA5wKX4FrBuTjk7CPgKeBJDelHvsOYzGJF17RZUIDPwV3lNh0o8psoaSLAYlxf7Z81pO97zmMymBVdkxDBzGYTgfOA04ETyezhZxtxRXYe8IqGtMJzHpMlrOiapJBiaQeMw63jNim47+s11MGFgZW4KRXfBuZbt4FJFiu6JmWkWIbgWsPHAMOBYcF9zxTGqAI+wBXYvffLNaQNKcxgcpgVXeOdFEsv9hXhwbgi3APovt99D9z0lGGgCbe6wv73dbg1xbYCJfvfa0irkv79iGwAqnF9wWFVPTHZr2kyhxVdYxIsKLonquoO31lM+rE10owxJoWs6BqTeArME5H3RGSO7zAmvRT4DmBMFjpFVbeKSB9gvoisUtWFvkOZ9GAtXWMSTFW3BvdlwJ+ACX4TmXRiRdeYBBKRziLSde/XwGeBZX5TmXRi3QvGJFZf4E8iAu7360lVfclvJJNObMiYMcakkHUvGGNMClnRNcaYFLKia4wxKWRF1xhjUsiKrjHGpJAVXWOMSSErusYYk0JWdI0xJoWs6BpjTApZ0TXGmBSyomuMMSn0/wFxsexm0sqA0gAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "import numpy as np\n", - "# define the pie drawing function of probability analysis\n", - "\n", - "prb = output.asnumpy()\n", - "\n", - "def plot_pie(prbs):\n", - " dict1 = {}\n", - " # remove the negative number and build the dictionary dict1. The key is the number and the value is the probability value\n", - " for i in range(10):\n", - " if prbs[i] > 0:\n", - " dict1[str(i)] = prbs[i]\n", - "\n", - " label_list = dict1.keys()\n", - " size = dict1.values()\n", - " colors = [\"red\", \"green\", \"pink\", \"blue\", \"purple\", \"orange\", \"gray\"] \n", - " color = colors[: len(size)]\n", - " plt.pie(size, colors=color, labels=label_list, labeldistance=1.1, autopct=\"%1.1f%%\", shadow=False, startangle=90, pctdistance=0.6)\n", - " plt.axis(\"equal\")\n", - " plt.legend()\n", - " plt.title(\"Image classification\")\n", - " plt.show()\n", - "\n", - "\n", - "print(\"The probability of corresponding numbers [0-9] in Figure 1:\\n\", list(map(lambda x:1/(1+np.exp(-x)), prb[0])))\n", - "plot_pie(prb[0])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "以上就是这次手写数字分类应用的全部体验过程。" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video.md b/tutorials/training/source_zh_cn/quick_start/quick_video.md deleted file mode 100644 index 432372d8e99466c4bd23dc8acf2605d9b0adda6a..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video.md +++ /dev/null @@ -1,759 +0,0 @@ -# 手把手安装和体验 - - - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - -涵盖安装到体验的视频教程,手把手帮助您快速、更好地使用MindSpore。 - -## 安装MindSpore - - - - - - -## 体验MindSpore - - - - - - -## 算子开发 - - - - - - -## 使用可视化组件MindInsight - - - - - - -## 使用安全组件MindArmour - - - - - - -## 使用端侧组件MindSpore Lite - - - - - - -## 参与社区 - - - - - diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/ascend310.md b/tutorials/training/source_zh_cn/quick_start/quick_video/ascend310.md deleted file mode 100644 index a2f683f5da92c9bfc9cade220bd5919edad066d8..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/ascend310.md +++ /dev/null @@ -1,9 +0,0 @@ -# Ascend 310上安装MindSpore - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**立即安装**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/ascend910.md b/tutorials/training/source_zh_cn/quick_start/quick_video/ascend910.md deleted file mode 100644 index d48d9c76b9790de643b86245b31b5f37ab160ec1..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/ascend910.md +++ /dev/null @@ -1,9 +0,0 @@ -# Ascend 910上安装MindSpore - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**立即安装**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/ascend910_operator_development.md b/tutorials/training/source_zh_cn/quick_start/quick_video/ascend910_operator_development.md deleted file mode 100644 index 8b72f11c5ed8e2cbf1f4caafbea6a195c8d8d1d1..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/ascend910_operator_development.md +++ /dev/null @@ -1,7 +0,0 @@ -# Ascend 910算子开发 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/community.md b/tutorials/training/source_zh_cn/quick_start/quick_video/community.md deleted file mode 100644 index 0c65a558f2c389cac7ef3e75742dd816eeb82817..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/community.md +++ /dev/null @@ -1,9 +0,0 @@ -# 参与社区建设 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**更多内容**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/cpu_operator_development.md b/tutorials/training/source_zh_cn/quick_start/quick_video/cpu_operator_development.md deleted file mode 100644 index 1b6851e0f6b42ac3d1cc5dcdb73d73a68cb1767c..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/cpu_operator_development.md +++ /dev/null @@ -1,7 +0,0 @@ -# CPU算子开发 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/cpu_ubuntu.md b/tutorials/training/source_zh_cn/quick_start/quick_video/cpu_ubuntu.md deleted file mode 100644 index baff978d12f1fec38e648c36f8da3ca30571d5d5..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/cpu_ubuntu.md +++ /dev/null @@ -1,9 +0,0 @@ -# CPU-Ubuntu上安装MindSpore - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**立即安装**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/cpu_windows.md b/tutorials/training/source_zh_cn/quick_start/quick_video/cpu_windows.md deleted file mode 100644 index badcdffb9d229a321517102f200f25122fc9e0e0..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/cpu_windows.md +++ /dev/null @@ -1,9 +0,0 @@ -# CPU-Windows上安装MindSpore - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**立即安装**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/customized_debugging.md b/tutorials/training/source_zh_cn/quick_start/quick_video/customized_debugging.md deleted file mode 100644 index a54e1f899b09454ab97e94a76b8ef9fca422eaeb..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/customized_debugging.md +++ /dev/null @@ -1,9 +0,0 @@ -# 自定义调试 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**查看完整教程**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/experience_on_modelarts.md b/tutorials/training/source_zh_cn/quick_start/quick_video/experience_on_modelarts.md deleted file mode 100644 index fc591d797e333dd9962a84224fcc747463f57fe2..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/experience_on_modelarts.md +++ /dev/null @@ -1,11 +0,0 @@ -# 云平台-ModelArts上使用MindSpore - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**查看更多内容**: - -**查看course代码仓**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/gpu.md b/tutorials/training/source_zh_cn/quick_start/quick_video/gpu.md deleted file mode 100644 index 0e4801497995e347e8bed9b7eafbfd94330fc4f4..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/gpu.md +++ /dev/null @@ -1,9 +0,0 @@ -# GPU上安装MindSpore - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**立即安装**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/gpu_operator_development.md b/tutorials/training/source_zh_cn/quick_start/quick_video/gpu_operator_development.md deleted file mode 100644 index d3ffc50da7d5e449549c47ca015316c8dc6e3797..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/gpu_operator_development.md +++ /dev/null @@ -1,7 +0,0 @@ -# GPU算子开发 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/inference.md b/tutorials/training/source_zh_cn/quick_start/quick_video/inference.md deleted file mode 100644 index 9bccf474586b17c133300dcdeaf7d57f51e7da83..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/inference.md +++ /dev/null @@ -1,9 +0,0 @@ -# 多平台推理 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**更多内容**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/loading_the_dataset_and_converting_data_format.md b/tutorials/training/source_zh_cn/quick_start/quick_video/loading_the_dataset_and_converting_data_format.md deleted file mode 100644 index 6d716a6e511c9b0430a041123f00eafa72ffee5c..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/loading_the_dataset_and_converting_data_format.md +++ /dev/null @@ -1,13 +0,0 @@ -# 加载数据集与转换格式 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**查看更多内容**: - - - - \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/loading_the_model_from_hub.md b/tutorials/training/source_zh_cn/quick_start/quick_video/loading_the_model_from_hub.md deleted file mode 100644 index 06795ab1380fca0085dada4dc143762790365f7e..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/loading_the_model_from_hub.md +++ /dev/null @@ -1,9 +0,0 @@ -# 从Hub中加载模型 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**查看更多内容**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/mindArmour_differential_privacy.md b/tutorials/training/source_zh_cn/quick_start/quick_video/mindArmour_differential_privacy.md deleted file mode 100644 index a7006c1ff18e8d8af106d38bcb04e0f0075e6e31..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/mindArmour_differential_privacy.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindArmour差分隐私 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**更多内容**: diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/mindArmour_fuzzing.md b/tutorials/training/source_zh_cn/quick_start/quick_video/mindArmour_fuzzing.md deleted file mode 100644 index 33560b27e931c670fe907326fa5ba3e554a9e1ea..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/mindArmour_fuzzing.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindArmour测试模型安全性之AI Fuzzer - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**更多内容**: diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/mindArmour_installation_and_adversarial_attack_and_defense.md b/tutorials/training/source_zh_cn/quick_start/quick_video/mindArmour_installation_and_adversarial_attack_and_defense.md deleted file mode 100644 index 96c638ee2a413c5d2684f0552eb532ca2a9ecab8..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/mindArmour_installation_and_adversarial_attack_and_defense.md +++ /dev/null @@ -1,11 +0,0 @@ -# MindArmour安装与对抗攻防 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**立即安装**: - -**查看更多内容**: diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/mindArmour_membership_inference.md b/tutorials/training/source_zh_cn/quick_start/quick_video/mindArmour_membership_inference.md deleted file mode 100644 index 52fa4c60b61a2ce5d7a9f8cc02b4062f51df0b72..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/mindArmour_membership_inference.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindArmour测试模型安全性之成员推理 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**更多内容**: diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_dashboard.md b/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_dashboard.md deleted file mode 100644 index f37041e5dbe1964467b9db00352480e0b3c3c397..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_dashboard.md +++ /dev/null @@ -1,11 +0,0 @@ -# MindInsight训练看板 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**立即安装**: - -**查看更多内容**: diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_debugger.md b/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_debugger.md deleted file mode 100644 index 5099bc0a704fa514c8887b54a64d6a9296030e92..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_debugger.md +++ /dev/null @@ -1,9 +0,0 @@ -# 使用调试器 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**查看更多内容**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_installation_and_common_commands.md b/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_installation_and_common_commands.md deleted file mode 100644 index c8201bdedeea28530be9ae1f81173645475c8223..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_installation_and_common_commands.md +++ /dev/null @@ -1,11 +0,0 @@ -# MindInsight安装与常用命令 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**立即安装**: - -**查看更多命令**: diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_lineage_and_scalars_comparison.md b/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_lineage_and_scalars_comparison.md deleted file mode 100644 index 931a2afb491afb33e3c13c6cf81b75fb09926e7d..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_lineage_and_scalars_comparison.md +++ /dev/null @@ -1,11 +0,0 @@ -# MindInsight溯源与对比看板 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**立即安装**: - -**查看更多内容**: diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_model_explanation.md b/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_model_explanation.md deleted file mode 100644 index a02a96879a53b0a2487b29bb2e3aa85cd1e1856a..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_model_explanation.md +++ /dev/null @@ -1,9 +0,0 @@ -# MindInsight模型解释 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**查看更多内容**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_performance_profiling.md b/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_performance_profiling.md deleted file mode 100644 index b12163e47d6981650c366a82f913accef7483f19..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/mindInsight_performance_profiling.md +++ /dev/null @@ -1,13 +0,0 @@ -# MindInsight性能调试 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**查看更多内容**: - - - - \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/mindspore_lite_converter.md b/tutorials/training/source_zh_cn/quick_start/quick_video/mindspore_lite_converter.md deleted file mode 100644 index 4475f8da277b43509bc7ff4e4aa29585f1ed4439..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/mindspore_lite_converter.md +++ /dev/null @@ -1,13 +0,0 @@ -# MindSpore Lite转换工具converter - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**更多内容**: - - - - \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/mindspore_lite_quick_start.md b/tutorials/training/source_zh_cn/quick_start/quick_video/mindspore_lite_quick_start.md deleted file mode 100644 index 855292d7d8dfb256e3110dc2bfb07d607b99a698..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/mindspore_lite_quick_start.md +++ /dev/null @@ -1,11 +0,0 @@ -# MindSpore Lite快速体验 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**更多内容**: - -**立即使用MindSpore Lite**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/optimize_data_processing.md b/tutorials/training/source_zh_cn/quick_start/quick_video/optimize_data_processing.md deleted file mode 100644 index 9ce5b656a70dff858c9c6084409fdaf32709bf17..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/optimize_data_processing.md +++ /dev/null @@ -1,9 +0,0 @@ -# 优化数据处理 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**查看更多内容**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/quick_start_video.md b/tutorials/training/source_zh_cn/quick_start/quick_video/quick_start_video.md deleted file mode 100644 index 9367549d39baa2b74db3ed37a4b6c912bb4d75c5..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/quick_start_video.md +++ /dev/null @@ -1,11 +0,0 @@ -# 体验快速入门 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**查看代码**: - -**查看完整教程**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/quick_start/quick_video/saving_and_loading_model_parameters.md b/tutorials/training/source_zh_cn/quick_start/quick_video/saving_and_loading_model_parameters.md deleted file mode 100644 index a91970636762d797825409b90274b97d3a76d284..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/quick_start/quick_video/saving_and_loading_model_parameters.md +++ /dev/null @@ -1,9 +0,0 @@ -# 模型参数保存与加载 - -[comment]: <> (本文档中包含手把手系列视频,码云Gitee不支持展示,请于官方网站对应教程中查看) - - - -**查看完整教程**: \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/use/data_preparation.rst b/tutorials/training/source_zh_cn/use/data_preparation.rst deleted file mode 100644 index 3756c17e816ddbaba15b005a13d5ecb0bccb1b2a..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/use/data_preparation.rst +++ /dev/null @@ -1,8 +0,0 @@ -加载数据集 -=========== - -.. toctree:: - :maxdepth: 1 - - load_dataset_image - load_dataset_text \ No newline at end of file diff --git a/tutorials/training/source_zh_cn/use/defining_the_network.md b/tutorials/training/source_zh_cn/use/defining_the_network.md deleted file mode 100644 index da32937232104cb16c4551326a1fdf303158a9a9..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/use/defining_the_network.md +++ /dev/null @@ -1,25 +0,0 @@ -# 定义网络 - -`Linux` `Ascend` `GPU` `CPU` `模型开发` `初级` `中级` `高级` - - - -由多个层组成的神经网络模型,是训练过程的重要组成部分。你可以基于MindSpore中的`nn.Cell`基类,通过初始化`__init__`方法和构造`construct`方法构建网络模型。定义网络模型有以下几种方式: - -- 直接使用官方提供的典型网络模型。 - - 建议通过查阅当前MindSpore提供的[网络支持列表](https://www.mindspore.cn/doc/note/zh-CN/master/network_list_ms.html),直接使用相应的网络模型。在网络支持列表中,提供了每个网络所支持的平台,直接点击相应网络名称查看网络的定义,用户可根据需求自定义网络初始化参数。 - -- 自行构建网络。 - - - 若网络中的内置算子不足以满足需求时,你可以利用MindSpore方便快捷地自定义算子并加入到网络中。 - - 通过[自定义算子](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/custom_operator.html)了解详细帮助信息。 - - - MindSpore提供了迁移第三方训练框架的脚本,支持将已有的TensorFlow、PyTorch等的网络迁移到MindSpore,帮助你快速进行网络迁移。 - - 通过[迁移第三方框架训练脚本](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/migrate_script.html)了解详细帮助信息。 - - - MindSpore支持使用开发深度学习模型的逻辑进行概率编程,还提供深度概率学习的工具箱,构建贝叶斯神经网络。 - - 通过[深度概率编程](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/apply_deep_probability_programming.html)了解详细帮助信息。 diff --git a/tutorials/training/source_zh_cn/use/images/map.eddx b/tutorials/training/source_zh_cn/use/images/map.eddx deleted file mode 100644 index 266b96e2445ac2d49e063c243de9213e1e177b6f..0000000000000000000000000000000000000000 Binary files a/tutorials/training/source_zh_cn/use/images/map.eddx and /dev/null differ diff --git a/tutorials/training/source_zh_cn/use/load_dataset_image.ipynb b/tutorials/training/source_zh_cn/use/load_dataset_image.ipynb deleted file mode 100644 index 0340f74661d9bd48c2350ed2050b2d85d717a3e4..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/use/load_dataset_image.ipynb +++ /dev/null @@ -1,465 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 加载图像数据集" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`Linux` `Ascend` `GPU` `CPU` `数据准备` `初级` `中级` `高级`\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/use/load_dataset_image.ipynb) \n", - "[![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/mindspore_load_dataset_image.ipynb) \n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL21pbmRzcG9yZV9sb2FkX2RhdGFzZXRfaW1hZ2UuaXB5bmI=&imagename=MindSpore1.1.1) " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在计算机视觉任务中,图像数据往往因为容量限制难以直接全部读入内存。MindSpore提供的`mindspore.dataset`模块可以帮助用户构建数据集对象,分批次地读取图像数据。同时,在各个数据集类中还内置了数据处理和数据增强算子,使得数据在训练过程中能够像经过pipeline管道的水一样源源不断地流向训练系统,提升数据训练效果。\n", - "\n", - "此外,MindSpore还支持分布式场景数据加载,用户可以在加载数据集时指定分片数目,具体用法参见[数据并行模式加载数据集](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html#id6)。\n", - "\n", - "本教程将以加载MNIST训练数据集[1]为例,演示如何使用MindSpore加载和处理图像数据。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 准备环节" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 导入模块" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "该模块提供API以加载和处理数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 下载所需数据集" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "运行以下命令来下载MNIST数据集的训练图像和标签并解压,存放在`./datasets/MNIST_Data`路径中,目录结构如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets/MNIST_Data\n", - "├── test\n", - "│   ├── t10k-images-idx3-ubyte\n", - "│   └── t10k-labels-idx1-ubyte\n", - "└── train\n", - " ├── train-images-idx3-ubyte\n", - " └── train-labels-idx1-ubyte\n", - "\n", - "2 directories, 4 files\n" - ] - } - ], - "source": [ - "!mkdir -p ./datasets/MNIST_Data/train ./datasets/MNIST_Data/test\n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-labels-idx1-ubyte --no-check-certificate \n", - "!wget -NP ./datasets/MNIST_Data/train https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/train-images-idx3-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-labels-idx1-ubyte --no-check-certificate\n", - "!wget -NP ./datasets/MNIST_Data/test https://mindspore-website.obs.myhuaweicloud.com/notebook/datasets/mnist/t10k-images-idx3-ubyte --no-check-certificate\n", - "!tree ./datasets/MNIST_Data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 加载数据集" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore目前支持加载图像领域常用的经典数据集和多种数据存储格式下的数据集,用户也可以通过构建自定义数据集类实现自定义方式的数据加载。各种数据集的详细加载方法,可参考编程指南中[数据集加载](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_loading.html)章节。\n", - "\n", - "下面演示使用`mindspore.dataset`模块中的`MnistDataset`类加载MNIST训练数据集。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 配置数据集目录,创建MNIST数据集对象。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "DATA_DIR = './datasets/MNIST_Data/train'\n", - "mnist_dataset = ds.MnistDataset(DATA_DIR, num_samples=6, shuffle=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 创建字典迭代器,通过迭代器获取一条数据,并将数据进行可视化。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAENCAYAAADJzhMWAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAMaklEQVR4nO3dX6ik9X3H8fenJmnBeLFGul2MZtNUQiGlWkQKlWIpCdZeqDc2QsE0pZuLWhLIRcReRAiFUGzaQqF0Q2w2tjUEjFHE1lix2eQmuIrVVTFauxKX1Y0sbbQ3afTbi/OsPbuec+bs/Htmz/f9gmFmnjP7zHef3c/+/s3sL1WFpJ3vZ8YuQNJyGHapCcMuNWHYpSYMu9SEYZeaMOxSE4Zdp0hyJEltcntl7Po0vXeNXYBW0n8Df7XB8TeWXIfmKH6CTuslOQJQVXvHrUTzZjdeasJuvDbys0l+H7gY+B/gSeBgVb05blmahd14nWLoxn9ggx/9J/AHVfWd5VakebEbr9P9PfDbwC8A5wK/AvwdsBf45yS/Ol5pmoUtu7Ylye3AZ4FvVdX1Y9ejM2fYtS1Jfgl4HjhRVe8bux6dObvx2q4fDffnjlqFpmbYtV2/Pty/OGoVmpph19uS/HKSd7TcSfYCfzM8/YelFqW5cZ1d6/0e8NkkB4GXgNeBDwG/C/wc8ABw+3jlaRaGXes9AnwYuAz4DdbG5/8FfA+4E7iznNE9azkbLzXhmF1qwrBLTRh2qQnDLjWx1Nn4JM4GSgtWVdno+Ewte5KrkzyX5IUkt8xyLkmLNfXSW5JzgB8AHwVeBh4FbqyqZ7b4Nbbs0oItomW/Anihql6sqp8AXweuneF8khZolrBfCPxw3fOXh2OnSLIvyaEkh2Z4L0kzWvgEXVXtB/aD3XhpTLO07EeBi9Y9f/9wTNIKmiXsjwKXJPlgkvcAHwfum09ZkuZt6m58Vf00yc3Ag8A5wB1V9fTcKpM0V0v91ptjdmnxFvKhGklnD8MuNWHYpSYMu9SEYZeaMOxSE4ZdasKwS00YdqkJwy41YdilJgy71IRhl5ow7FIThl1qwrBLTRh2qQnDLjVh2KUmDLvUhGGXmjDsUhOGXWrCsEtNGHapCcMuNWHYpSYMu9SEYZeamHrLZp0dlrlLr/5fsuFGqqOaKexJjgCvA28CP62qy+dRlKT5m0fL/ltV9docziNpgRyzS03MGvYCvp3ksST7NnpBkn1JDiU5NON7SZpBZpnASXJhVR1N8vPAQ8CfVNXBLV7vbNGSOUE3jjEn6KpqwzefqWWvqqPD/XHgHuCKWc4naXGmDnuSc5Ocd/Ix8DHg8LwKkzRfs8zG7wbuGbor7wL+qar+ZS5V7TB2pbUKZhqzn/GbNR2zG/Z+dtyYXdLZw7BLTRh2qQnDLjVh2KUm/IrrHDjb3s8qfoV1Elt2qQnDLjVh2KUmDLvUhGGXmjDsUhOGXWrCdXatrLNxLXuV2bJLTRh2qQnDLjVh2KUmDLvUhGGXmjDsUhOus8/BpPXgnfx9986/97ONLbvUhGGXmjDsUhOGXWrCsEtNGHapCcMuNeE6+xKczWvRs36n3O+kr46JLXuSO5IcT3J43bHzkzyU5Pnhftdiy5Q0q+10478KXH3asVuAh6vqEuDh4bmkFTYx7FV1EDhx2uFrgQPD4wPAdfMtS9K8TTtm311Vx4bHrwC7N3thkn3AvinfR9KczDxBV1WVZNMZpqraD+wH2Op1khZr2qW3V5PsARjuj8+vJEmLMG3Y7wNuGh7fBNw7n3IkLUomrfEmuQu4CrgAeBX4PPAt4BvAxcBLwA1Vdfok3kbnshu/AGOu07uOvnqqasM/lIlhnyfDvhiGXettFnY/Lis1YdilJgy71IRhl5ow7FIThl1qwrBLTRh2qQnDLjVh2KUmDLvUhGGXmjDsUhP+V9I7wFbfPFv0N+IWeX6/UTdftuxSE4ZdasKwS00YdqkJwy41YdilJgy71ITr7Dvc2bxd9Db+m/MlVbIz2LJLTRh2qQnDLjVh2KUmDLvUhGGXmjDsUhOuszfnOnwfE1v2JHckOZ7k8LpjtyU5muSJ4XbNYsuUNKvtdOO/Cly9wfG/rKpLh9sD8y1L0rxNDHtVHQROLKEWSQs0ywTdzUmeHLr5uzZ7UZJ9SQ4lOTTDe0maUbYzAZNkL3B/VX1keL4beA0o4AvAnqr65DbOs7qzPdrQKk/QTdJ1gq6qNvyNT9WyV9WrVfVmVb0FfBm4YpbiJC3eVGFPsmfd0+uBw5u9VtJqmLjOnuQu4CrggiQvA58HrkpyKWvd+CPApxZXosZ0Nq/D61TbGrPP7c0cs+84qxx2x+yn8uOyUhOGXWrCsEtNGHapCcMuNeFXXDWTWWa8x9xOuuNMvS271IRhl5ow7FIThl1qwrBLTRh2qQnDLjXhOru2tMrfatOZsWWXmjDsUhOGXWrCsEtNGHapCcMuNWHYpSZcZ9/hOq+Td/zO+lZs2aUmDLvUhGGXmjDsUhOGXWrCsEtNGHapiYlhT3JRkkeSPJPk6SSfHo6fn+ShJM8P97sWX25PVTX1bSdLsuVNp5q4ZXOSPcCeqno8yXnAY8B1wCeAE1X1xSS3ALuq6nMTzrWz//YtyE4P7bQM9Mam3rK5qo5V1ePD49eBZ4ELgWuBA8PLDrD2D4CkFXVGY/Yke4HLgO8Du6vq2PCjV4Dd8y1N0jxt+7PxSd4L3A18pqp+vL4LVVW1WRc9yT5g36yFSprNxDE7QJJ3A/cDD1bVl4ZjzwFXVdWxYVz/b1X14QnncfA5BcfsG3PMvrGpx+xZu6JfAZ49GfTBfcBNw+ObgHtnLVLS4mxnNv5K4LvAU8Bbw+FbWRu3fwO4GHgJuKGqTkw4V8smypZ5Orbc09msZd9WN35eDLvOhGGfztTdeEk7g2GXmjDsUhOGXWrCsEtNGHapCf8r6W1y+Ww6Lp+tDlt2qQnDLjVh2KUmDLvUhGGXmjDsUhOGXWqizTq76+TTcZ1857Bll5ow7FIThl1qwrBLTRh2qQnDLjVh2KUm2qyzd+U6uU6yZZeaMOxSE4ZdasKwS00YdqkJwy41YdilJiausye5CPgasBsoYH9V/XWS24A/An40vPTWqnpgUYXOyvVmdTdxf/Yke4A9VfV4kvOAx4DrgBuAN6rq9m2/WdP92aVl2mx/9okte1UdA44Nj19P8ixw4XzLk7RoZzRmT7IXuAz4/nDo5iRPJrkjya5Nfs2+JIeSHJqtVEmzmNiNf/uFyXuB7wB/VlXfTLIbeI21cfwXWOvqf3LCOezGSwu2WTd+W2FP8m7gfuDBqvrSBj/fC9xfVR+ZcB7DLi3YZmGf2I3P2jT2V4Bn1wd9mLg76Xrg8KxFSlqc7czGXwl8F3gKeGs4fCtwI3Apa934I8Cnhsm8rc5lyy4t2Ezd+Hkx7NLiTd2Nl7QzGHapCcMuNWHYpSYMu9SEYZeaMOxSE4ZdasKwS00YdqkJwy41YdilJgy71IRhl5pY9pbNrwEvrXt+wXBsFa1qbataF1jbtOZZ2wc2+8FSv8/+jjdPDlXV5aMVsIVVrW1V6wJrm9ayarMbLzVh2KUmxg77/pHffyurWtuq1gXWNq2l1DbqmF3S8ozdsktaEsMuNTFK2JNcneS5JC8kuWWMGjaT5EiSp5I8Mfb+dMMeeseTHF537PwkDyV5frjfcI+9kWq7LcnR4do9keSakWq7KMkjSZ5J8nSSTw/HR712W9S1lOu29DF7knOAHwAfBV4GHgVurKpnllrIJpIcAS6vqtE/gJHkN4E3gK+d3ForyZ8DJ6rqi8M/lLuq6nMrUtttnOE23guqbbNtxj/BiNduntufT2OMlv0K4IWqerGqfgJ8Hbh2hDpWXlUdBE6cdvha4MDw+ABrf1mWbpPaVkJVHauqx4fHrwMntxkf9dptUddSjBH2C4Efrnv+Mqu133sB307yWJJ9Yxezgd3rttl6Bdg9ZjEbmLiN9zKdts34yly7abY/n5UTdO90ZVX9GvA7wB8P3dWVVGtjsFVaO/1b4EOs7QF4DPiLMYsZthm/G/hMVf14/c/GvHYb1LWU6zZG2I8CF617/v7h2EqoqqPD/XHgHtaGHavk1ZM76A73x0eu521V9WpVvVlVbwFfZsRrN2wzfjfwj1X1zeHw6Nduo7qWdd3GCPujwCVJPpjkPcDHgftGqOMdkpw7TJyQ5FzgY6zeVtT3ATcNj28C7h2xllOsyjbem20zzsjXbvTtz6tq6TfgGtZm5P8D+NMxatikrl8E/n24PT12bcBdrHXr/pe1uY0/BN4HPAw8D/wrcP4K1XYna1t7P8lasPaMVNuVrHXRnwSeGG7XjH3ttqhrKdfNj8tKTThBJzVh2KUmDLvUhGGXmjDsUhOGXWrCsEtN/B/M3kbdmYwBvQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "mnist_it = mnist_dataset.create_dict_iterator()\n", - "data = next(mnist_it)\n", - "plt.imshow(data['image'].asnumpy().squeeze(), cmap=plt.cm.gray)\n", - "plt.title(data['label'].asnumpy(), fontsize=20)\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "此外,用户还可以在数据集加载时传入`sampler`参数用来指定数据采样方式。MindSpore目前支持的数据采样器及其详细使用方法,可参考编程指南中[采样器](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/sampler.html)章节。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据处理" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore目前支持的数据处理算子及其详细使用方法,可参考编程指南中[数据处理](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/pipeline.html)章节。\n", - "\n", - "下面演示构建pipeline,对MNIST数据集进行`shuffle`、`batch`、`repeat`等操作。\n", - "\n", - "操作前的数据如下:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "5\n", - "0\n", - "4\n", - "1\n", - "9\n", - "2\n" - ] - } - ], - "source": [ - "for data in mnist_dataset.create_dict_iterator():\n", - " print(data['label'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 对数据进行混洗。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "after shuffle: \n", - "4\n", - "2\n", - "1\n", - "0\n", - "5\n", - "9\n" - ] - } - ], - "source": [ - "ds.config.set_seed(58)\n", - "ds1 = mnist_dataset.shuffle(buffer_size=6)\n", - "\n", - "print('after shuffle: ')\n", - "for data in ds1.create_dict_iterator():\n", - " print(data['label'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 对数据进行分批。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "after batch: \n", - "[4 2]\n", - "[1 0]\n", - "[5 9]\n" - ] - } - ], - "source": [ - "ds2 = ds1.batch(batch_size=2)\n", - "\n", - "print('after batch: ')\n", - "for data in ds2.create_dict_iterator():\n", - " print(data['label'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 对pipeline操作进行重复。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "after repeat: \n", - "[4 2]\n", - "[1 0]\n", - "[5 9]\n", - "[2 4]\n", - "[0 9]\n", - "[1 5]\n" - ] - } - ], - "source": [ - "ds3 = ds2.repeat(count=2)\n", - "\n", - "print('after repeat: ')\n", - "for data in ds3.create_dict_iterator():\n", - " print(data['label'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "可以看到,数据集被扩充成两份,且第二份数据的顺序与第一份不同。\n", - "\n", - "> 因为`repeat`将对整个数据处理pipeline中已经定义的操作进行重复,而不是单纯将此刻的数据进行复制,故第二份数据执行`shuffle`后与第一份数据顺序不同。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据增强" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore目前支持的数据增强算子及其详细使用方法,可参考编程指南中[数据增强](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/augmentation.html)章节。\n", - "\n", - "下面演示使用`c_transforms`模块对MNIST数据集进行数据增强。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 导入相关模块,重新加载数据集。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "from mindspore.dataset.vision import Inter\n", - "import mindspore.dataset.vision.c_transforms as transforms\n", - "\n", - "mnist_dataset = ds.MnistDataset(DATA_DIR, num_samples=6, shuffle=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 定义数据增强算子,对数据集执行`Resize`和`RandomCrop`操作。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "resize_op = transforms.Resize(size=(200,200), interpolation=Inter.LINEAR)\n", - "crop_op = transforms.RandomCrop(150)\n", - "transforms_list = [resize_op, crop_op]\n", - "ds4 = mnist_dataset.map(operations=transforms_list,input_columns='image')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 查看数据增强效果。" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQEAAAENCAYAAAAPLtCGAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAA3zUlEQVR4nO2da4yz6Vnf/5fPx7HHM+87e0rZACFqSkUTRUskKEVsi0I4LKhRGqA0CVtFVTmWVGQDH+BDkZKSAqmooGkTSGhgE04lUkNJSBMiJJKyGxJyamBZErKbd+edgz0+n+9+sP/3e/lZz8wzY3tsj6+fZL2PPR77Gb++r+c63f9LnHMwDGNziSz7BAzDWC5mBAxjwzEjYBgbjhkBw9hwzAgYxoZjRsAwNhwzAoax4ZgRMEIhIl8QEXfK7Zlln59xeWLLPgFjrTgB8EtTHq9f8XkYc0SsY9AIg4h8AQCcc/cv90yMeWPhgGFsOBYOGBchKSL/EsDfA9AA8JcAPuKcGyz3tIxZsHDACMU4HPiKKT/6WwCvcc79ydWekTEvLBwwwvJrAB4EcBeALIB/COC/ArgfwB+KyNct79SMWTBPwJgJEXkzgNcB+J/Oue9Z9vkYF8eMgDETIvLVAP4awLFzbmfZ52NcHAsHjFk5GP+bXepZGJfGjIAxKy8Z//vkUs/CuDRmBIxzEZG/LyLPutKLyP0Afnl8939c6UkZc8P6BIww/AsArxORjwD4IoAagK8C8O0AUgDeB+DNyzs9YxbMCBhh+BCA5wN4IYBvwCj+rwD4UwC/AeA3nGWY1xarDhjGhmM5AcPYcMwIGMaGszAjICIvFZHPi8gTIvLIot7HMIzZWEhOQESiAP4KwD8D8BSAPwfwvc65z879zQzDmIlFVQceAPCEc+5JABCRRwE8BGCqERARy06eQjQaRTQaRSwWQywW8/fj8fjEsYgs7RxFxN8ikciz7uvHjOXx+OOPHzrnbgQfX5QRuBfAl9T9pwB8vX6CiLwWwGsX9P7XhlKphEKhgN3dXdx1110oFovY3t7Gvffei+3tbWxvb+Puu+9GPB5HJHL1KR4u7ng8jlgshkQi4Q1WPB5/1n1jeYjIF6c9vrQ+AefcWwG8FTBP4CwikYhfROl0Gul0Gvl8HltbWygUCtje3sbOzg4SicRSjAAwMgTRaNSfK48jkYg3Tss6N+N8FmUEngbwHHX/vvFjxhlMc5d5Nc1kMsjlcigWiygWi9jZ2cHNmzexu7uL++67D4lEAtFodAlnbaw7izICfw7geSLyXIwW/ysBfN+C3mst0VdLxvY81o/v7u5ie3vbhwW5XA7ZbBbpdNq72hZvG7OwECPgnOuLyA8D+CMAUQBvd859ZhHvta4w2cfEHhN9XNh8fHt7G8ViEVtbW8hms8hkMt4A8HfMABizsLCcgHPufRhtLDECiIhPoiUSCaTTaSSTScTjcaTTaaRSKcTjcaRSKezs7CCfz6NUKqFUKmFrawu5XA7pdNobDzMCxizYBqIlwat+KpXyV/hpx8Vi0f9bLBaRy+W8EUilUhYOGDNjRmBJMBygJ0A3n1f6VCqFra0tbG1tIZPJ+Mf5PJbkLBlozIoZgSVBA8Arfj6fRzabxfb2NgqFArLZrE8EJpNJ5HI55PP5CYPB/IGV34xZMCOwBJgTSCaT3gjwqn/jxg2USiWfB8hkMt5YMHeQTCb948wJWDhgXBYzAkuA7bQ0BJlMBtlsFrlczjcBsUuQScJ4PO6Th0woWnXAmAdmBJYES4DMCdAAsAOwWCxib2/PNwGx+455AL2PwDBmwYzAkuCVPZPJIJ/Po1gsolQq4caNG7jnnntQKpVw7733+kWuN+UQCwOMeWBGYEnoXXa8ousdg9Oy/7bgVw/nHIbD4Zn/8raqmBFYIaZtxbWFv7pwoff7ffR6PTjn0O/30e/3MRgM/L/D4RDD4XDZp3sqZgRWAFvo64lzDoPBAJ1OB91uF71eD+12G51OB71eD91uF61WyxuDVcWMwAqwyq6icTraE2i32+h2u2g2m2g2m94ANBoN9Ho99Pv9ZZ/uqZgRMIxLwli/3++j2+2i3W77hc9/6/W69wxWFTMCK4CFA+vJcDjEYDDwHkCr1UKlUsHJyQmazSaq1SrK5TJarRa63e6yT/dUzAgYxgwMh0N0u110Oh00m000Gg1Uq1XUajVUKhUcHh6i1Wqh3W4v+1RPxYyAYVwSnROgIWi1WqjX66jVajg5OUG5XPZewqpiRsAwLokOB9rtNhqNhl/4lUoFBwcH+PKXv4xGo4FGo7Hs0z0VMwKGcUnoCehKQK1WQ7lcxvHxMQ4ODrC/v49qtYp6vb7s0z0VMwJLgl8gupNsOKFbyXqzVuqd1kRkDUV3Sqy6Oy/4GAl+brN+lvx/ZHNQ8P+QJUPzBIxnwS8NXclms4lkMolqtYp0Og0RQTKZnJDwpnaAbjHm/U02BFyENKg81t16/X5/4vM6S9w1rD6DbgvmcdAY9Ho9f1tVzAgsiX6/7zPK1WoVsVgMzjkkEgk453zDSXArMfcV8HEahk01AqzTc6HxKqyTdfw55zdwhgM/v2QyOSHwumlc2giIyHMAvBPAHgAH4K3OubeISAnAuwHcD+ALAF7hnCvPfqrXC14xtNsYi8W8QRgMBohEIl5BSAuJJJPJCRd20xkOh94AsGFHl+14TB2GZDKJfr/vF78OC+bZvbkuik+zeAJ9AK9zzn1cRPIAHheRDwB4NYAPOufeOJ5G/AiA189+qtcHuo384jabTT+iix5Bu91Gv9/3UmKZTAb9fh+pVMobCE7+oUu6qQaBV3pdq6dhZcdeq9Xy2g3pdBq9Xs+LtTIUiEQiGA6HG+dZXdoIOOduAbg1Pq6JyOcwmkH4EIBvHj/tHQA+DDMCE9CFZeIvkUhgOBz6hd9qtZBOp3FycoJCoeA1BzqdDjKZDHq9nv+i0mhsKvwsuehrtRqq1aoPs6rVKhqNBprNpjem2WwWnU7HKzvrwamb+FnOJScgIvcDeCGAjwHYGxsIAHgGo3Bh2u9s9EBSegKxWMw3kgwGA4iINwSdTsfvUhsOh/6qH41GfZgwGAw28our0Q07rNezXHd8fOx7+DOZjM8RMJkaiUTQ6XS8h7WJn+XMRkBEcgB+F8CPO+eq2o1yzrnTho1u+kBSnRPg4u73+94jSCaTaDQa3gg457yoaCwWQy6XW/ms81UQDK10vf7k5ATHx8eo1+uoVqv+MxsMBj6hGolEvHfFz3+Wc1lHZjICIhLHyAC8yzn3e+OH90XkbufcLRG5G8DtWU/yuuGc8yIUg8EAg8EA7XYb0WgUtVrNZ63pJRQKBXS7XUQiEb8vnVLk6XTacgJjY9pqtXzPPvv2b9265bv4KODabDYnknYMEbjdd9M+y1mqAwLgbQA+55z7BfWj9wJ4FYA3jv/9g5nO8JrCxXxaDVvXrPnzXC6HWCyGVCrlt6duqgtLtCfAxCB79yuVCsrlMo6OjnB8fIxut+tDrlwu56sF1AJYdfGPRTGLJ/ANAH4AwKdE5BPjx34Ko8X/HhF5GMAXAbxipjO8prC5hDkAXabiVSoSifjsdTqdRr1eRzKZ9LvSaAiYKNQEBUnXmaCR092A2gC0222fIGQe4OTkBNVqFZVKxYdS8Xjc5wjS6TQ6nc6EUQ1rWNmQxKYk3TTEn68Ds1QH/hTAad+uBy/7uptCWPHJRqOBeDzus96cXFQul72nkMlkntVdyPJhsBNuXQyCbsdl15++8fFer4fj42McHx/7MODk5AQnJyeo1Wqo1+uo1+v+c6TMe7VaRTKZRDQaRTabRSKRAACkUik0m81QTUP0OvhetVoNzWbTy40xx7Dqnpp1DK44OoHYbDZ9s0u5XIaIYDgc+hCBA0ni8bgvPXJugX58HQyB7gTk3gouLH3c7XZx+/ZtbwS4WafZbKLdbk+ETLqKwPwBjQAbtKLRqPe+zqPVaqHZbOLg4ADlctkbg+B7r7pHYEZgxdFqtnR3mf3m1YoJQhoILn5++XVVYV3Qrr7O/OvOQHYC8upPA8BwSV+NAXgvQm/w4f5/egI0qGGMAH+f/Qj6vVmKNE/AmBl6Aox1+eWMxWLo9XpezTabzSKZTPoGGLYZD4dD32acSCTWzhPQi5WdgDSErVYLrVbLewH1et17A3yu9gS497/b7aLRaPjSbCwWQ7/f9y3G/JzOg+d2dHTk3//k5MS/N/s8zAgYM6ETX41Gw7u1APxjnU4HuVzOjzbn9GJmutlglMlkVv4LSYKdgBTsoAteqVR8J6B2wyntxS28wXCAHlWj0YCI+GoBDUO73Q496ZkGpVKpoFareQ+NHgGrERYOGDPBJBi/vMCdqcZ0bYfDoTcE+ve4Q44u7qp/GTXBJqBg1p9XXr3w2u32hEsebADSn6Vu0opGo/6zZCNRGBimUViUBsrCAWOu8OrV6XT8wmc9u9VqIZlMotlsIp/Po9lsTmTPo9GoTwyyOWnVv5DApHYfm4Ao4Mnb4eGhd70ZGjB5yuNp4UCv10M0GvWfFRd+u91GPB73YUKYkIlJShonhmf0Avjaq/6ZmxFYcfjFBeB3F8bjce+28ipfLBaRz+d93ZpfduoOrFtj0bSNQdTuK5fLeOaZZ1Aul1Gr1SauvDSYrALQG9BGgK/f6XQQj8fRarV8lyaTgmGMgBaGYRJSlwfNCBhzIdjBFo1G/ZeOsSt3IXKrsa5/6y/lungCwLMbgfQwD3YDHh8fo1qtTnT7BWcBapecnxE/A72A+VleRFhE5xn4nvrYwgFjLgS/wHrbK69YkUjEf5mDRoClsl6vtxZfSBLcGchSHrv/jo6OfEigFxuNh9Zw5N+sG4+YEAx+jhepngSlxfT76eNV/8zNCKw4+oumCX5ReZ9VAm43zuVy3lugJkEymQz13npKsl4g7EZcp8nJutX4vM9y1vdYN8wIrCnBL1xQA5/Jqlqt5luNs9ms7yw8Dy58usfRaHTiOB6PT2x0mjd8Xbb5plIpZDIZH/8Xi0X0+31EIpGJKoAOB+ien3dFXtfFOy/MCFwTtBHQMXStVvNX7mQy6ROF58EyJAU42YXI9mN2IoZ5rcug3z+VSiGdTiObzfoEJ4VYuAtQJwVpFLrd7oROw6Yv9tMwI3BN0COy6/U6RMTvFdB5AS7k86AXwO7DVCrluxJTqZTfAbkodV4aAb4vFzU9AxFBNpudKBHqXYQsEeokoDEdMwLXhGBTUSQS8WUvuscXuXqzzTifz/uFyKuqfp1FNSAFPYFer+eboagFyDbpYLOQfg4rB+uQt1gWZgSuCcEOu0gkgkaj4Y0AFyt7C86DycTBYOD3IPCqz+ajRbXEMh+hdz/SEPFnw+EQ8XjcJ0FjsRiazSaAO2VVLdZiRuB0zAhcE3RJrV6v+4Xf6/WQTCb9fvewffFsNGq1Wsjlcr5lmW51Op32OxYXgQ4HdDWCuYFsNuu787LZrG8jpoGLRqPo9/tIJBLodrsLOcfrghmBawLDgU6nA2Cym42L6SItsfwddtbR/WauIbhNd97QCCSTyYm+CHo6VA5mOzVDB72fgp2Amz6m7TzMCFwTgm2xLJH1+33EYjHfcBM2mcddh8lk0m+3zeVy3gXXeYZFwPOkFgKNAEuADFW00IhzzouFdLtd3zRl4cDZmBG4JvCKrBNhp8mLhfUEcrkcRMRrGNIIpFIpX5ZbpCfAsiYTkOl0+lnyYp1Ox/89sVjMi48MBgM0Gg2/gco4HTMC1wR6AlyUwVmFFxUepWgJpbcymYyPwWkAFu0J6OlKp/2rd+71+32cnJz48WK6ock4nXkMH4kCeAzA08657xCR5wJ4FMAOgMcB/IBzzjIzV0RwkczyOnSzdRcejQw78BbdgBNm8Kqe06C9HwsDwjEPE/ljAD6n7r8JwC86574aQBnAw3N4D8MwFsRMRkBE7gPw7QD++/i+APgWAL8zfso7AHz3LO9hGLOyLpuclsWsnsAvAfhJAMwO7QCoOOfYo/kURpOKn4WIvFZEHhORx2Y8B8MwZuDSRkBEvgPAbefc45f5fefcW51zL3bOvfiy52AYxuzMOobsu0TkZQBSALYAvAVAUURiY2/gPgBPz36axiqxbrvx1kHYY5lc2hNwzr3BOXefc+5+AK8E8H+cc98P4EMAXj5+mg0kNYwVZxEF1NcD+AkReQKjHMHbFvAehmHMibk0CznnPgzgw+PjJwE8MI/XNYx5YNWBs7GOQeNUdBwdnBKs9yYE9+tftktxFvj6wTZpNhCx9XgRuYHTuhmDx6uKGQHjVLRqrt6sQ2VjKvlwm3FQjFQLlQKLMwRaDzEWi3kNgmQy6ZWQOE9gnouSr6U1DPWxvr/KmBEwpsIvMTfkNJtNv19fRJBKpdDtdv3EH70A9dWXj4cVM7koeqMR1ZWLxSKAO4NbaAQ4hWlecIFz8Ghw7gAnKtOQripmBIxT0ZoEnALEq30ymfQTd3q93sSVl2IjPKYe4aJGo9P4UG4sn8/7vQ/UVUwkEhODR+YBFz4VkCntxmMRMU/AWF/oCXAh0SPQo8+AO1JedLszmYy/CjrnvNHgjsBFhASUF9PKxFpqjTLpizAC/X7fC51S1g2An3i8DpuYzAgYp+KcmxiLTuUeLnw9BZgzDfL5vB+Awom/VCNaBFqBqN/vI5PJeCEVGqBsNotcLjd3xWEaGS10yr8zEol4padVVzo2I2BMJTgLkFdbJggHgwHq9TrS6TQajQZyuRxyuZxPHlLhiKIewZmK84TTl6mzyPdlSMKcxrzPgTMeyuWyl2/j4qcKUrPZXHk9AzMCxqkEDQGHjnAxcciJ1hugsRARJJNJL/O1SAUihhtUPaIyMl1xHSLME842YNxPFSaKrtAbsXDAWFuoVqxhSbDT6firLePhTqczMZYsmUwil8tNjEVfxIJgPoCvTaVkLn667fM2RNRtBO5UKXq9HprNpk9IroPIqRkBYyqscdP1ZyWAV3pKd1GLkC43+wOGwyGSySTy+TySyeTCwgFddWBZkld9hiXMY8w7S895j3pGQr/f9xqH9Xp9LeTNzAgYp6LHfDMs0NJdvFHWXER8biCZTPrxZ4vWI9SdgYPBwCsRTxtPPk843JW9Ev1+H7VaDel0Gq1Wy3sCZgSMtYWL/6yruIj4GYFUJObVv9VqodPpTBiC4O+eNmL9ItAYAVhYFWIaNAKNRsOHAJlMxgudcu4jS5SaYGiyzF4CMwLGzDArz0RZtVr1GXsmB5kz4KLQSsC8imux0FWPo4E7eYBEIoFMJoNut4tisYh2u+3P3zmHfD6Per3uDSq9Ku7DmCbkepWYETBmRjcVMVnGTH0mk/EJQXYastlId/qx8rDIScfzhn8T/wbOS8zn8xMTofRQVbYVt9ttf8wJTwxfrhozAsbMMGzgF7rVavkM/cnJyURMnkqlfJ8/24u18k80Gl1YFWHe6PIkF3omk5mY4MwSaSqV8iPTmVuhh6R3Zy4DMwLGTPDqpfcYMEsOwGfJ6/U6Go2Gj5nz+bw/7vf7yGazftLwqvfak2A4oBOpNHKpVMr/7bVazQ9KSSaTaDabvs/gvNzLIjEjYMwMv/zcXsx9AkzW6U01nCistQiYOGOX3ToZARHxuQ8aw8FgMLGbMpvNol6v+yQivR/Cz2dZVQQzAsbM6HCAhgC4k+nXrcQ0Boz7nXM+k87W33WBngD3LjAEGA6H3gBEo1E0m00kk0kA8MaOicHBYDAxOWkZmBEwZoZffl7NhsOh7yxsNBo+9s/n88jn8ygWi74Rqdfr+cQg5x+ukyfASkcqlfKLnhWRTqeDra0ttNttNJtNJBIJHyYwTzIcDv0uxGUlRM0IGDNDIwDc2UIbjUYnkoTxeBy1Wg2FQsE3F/H53HXInYDrZAQikQgSiYSvErBSoPdcMFfCXZWRSGSiOlCtVv0+g2UwkxEQkSJGI8i+FoAD8IMAPg/g3QDuB/AFAK9wzpVneR9jtdGbimgQ2ErLL3c0GkWr1cJwOPRNRQwBdM5gnWYE6OqA1k3QW64ZIqVSKd9OzNwIw4R4PL5UMdRZPYG3APjfzrmXi0gCQAbATwH4oHPujSLyCIBHMJIhN64purPwLHFRthIPBgNkMhkfCxcKBeTzeZ87WCcjwMSgc87H/cBkByDHp3MzlnPO6zB0Oh0fLq1dOCAiBQDfBODVADAeP94VkYcAfPP4ae/ASIrcjMCGcJbSru4loGZhIpFApVLxV0+WCNPp9ETVgN4E7/N4VfoJzmp/5vkyJ5DNZrG1tYVOp4PhcOi1DhKJhNds0AKvPGbScd49BbN4As8FcADg10Tk6wA8jtGY8j3n3K3xc54BsDftl0XktQBeO8P7G2sIv9CsInAnXq1WQzQaRSaTQTwe94k03WashUsBrE13IT0GdkjqxiImD3WJUO/c1Mci4nMvq2IEYgBeBOBHnHMfE5G3YOT6e5xzTkSm+nbOubcCeCsAnPYc43qhtyczaZhIJHxijGIgw+HQlw3T6bTPHehSotYtWHWmdRZms1l0u10v4QaMuinj8bhPGlLXkQlGnTcI6jzMwixG4CkATznnPja+/zsYGYF9EbnbOXdLRO4GcHvWkzSuB3qzDEVLgZEoCKW4ut0u6vU6MpkMMpmM35bMLsNUKjVRW1+UgvG8oc5iKpVCv99HoVDw+QTuqWg0Gsjn82g0Gn4PRq1Wm5jtAMzXCwBmMALOuWdE5Esi8nzn3OcBPAjgs+PbqwC8ETaQ1AigdxzS1a/X6760CMAbCer5p9Np34ADYCHKwYuG4QC7C9kuzb+bcmx8DkurLCsC8LJl8/aAZjWjPwLgXePKwJMAXoPRkNP3iMjDAL4I4BUzvodxTQjuM9CTi3TzUKfT8fEyJc6cc16zgMmzdTECwXCAeyUA+PJgPB73is3MiejNVIPBwO9EnHc/wUxGwDn3CQAvnvKjB2d5XeN6ovcYsGeAZUHmB9LptL+VSiV0u13k8/mJVmN6AovULZwnuqkouK262+0im816z6fZbCKbzaJWq/nRaTQUVGrqdDpzPb/1CKiMa4MOB/SGG2484lbjTCYDAL5kGIvFkM1mvaFY13BAGy22SfNvYj8B/162X/PzYglxpTwBw7gIOhxgvE+hDSb6uBU5m816TQIm0La2tpBOp9e2qYg5DcqQUwuRyUItNkLZtlqt5kMnJhHNCBhrjZbUAu4sEH1jv4CujetWY3oC6xIOAHc8AXoDnNAE3Bn5xvAoEol440fVoW6367UL510RMSNgXDl6f8C0NmOWAFkaazabvsOQQ04rlcpEb0FQAZlxtz5eJrrGH/RggiPO2SSltyMvcsS7GQFjqUxrM9bSXLqxiBtukskkTk5OfL5AVw30wuF9LqBV8RhOazHWhksbMH1bBGYEjJWElQRWDZLJJI6Pj33bcTQaRbvdRj6f912FFPfgzjx9vC7dhcCkp3QVmBEwVhK207KcyOGnwMhtZsmsVqshm816PQLqFlK4g1fUdckdAM+ex7BoL8aMgLGS6KlHzI43Go2J8holvfkcJtuCz1nUBKLrghkBY+VgppyLXMfK7Cjs9/toNBpIp9MoFotevZilQ44HZwfeOrPo8MCMgLGSMDnYbrcnSmidTsdr8mk9fxoChgrUJqDu/zp7AhYOGBuJrhAQNtAkk0n0er0JgU8R8eo+fCybzXohjnU2Aos+dzMCxkqiNxrxmJ2FsVgMjUbDVwOAOyEEjQD35U8bhLrqTHP/LRwwNg6t4a87CYPH3JXHjUmU/KYnwJ2J6+QJXHVPgxkBY2UJDujUQqZcKEwQNptNr+jLpiL9eKPR8C255xF8DyYm9eJcVAeifj/KqrFPgiVQ7jRkmHPe7TzMCBgrS5gvMZOFVOKhZmEikUC9XvfSZZQlCwMX/DShU72BZxGGQBuAoAiJ3nK8tbXly5+nCZOGnW9oRsBYa1gO5B6DWq3mlYi4oGgYwpYKtaCpno7EXIOeE7AIt51X/0wmg62tLX/F73a7/vF4PD4x2pxhjy6h9nq9UElRMwLG2qOn/bC7UMt5cbHEYrFQi5YLPZVK+ZZknYAEMLEnYZ7QA+F7sveB5VIaNuecFxmhF9Tr9dBsNv3r0BCaETCuNdOGoTabzQmJ74sagaAOYCqV8vsVAPhFuAhoBGjAeA78+7TMOkMDVkoo18Ywitu1z8OMgLH2aEnuoBQX24nL5XJoI8B9+/l8Hu12G5lMxmsCUvhzkb0HOhxgMpPhSTab9ROb6vU6ms0mKpUKarWaFydlMpXiJOdhRsBYa1hBoAAn9xdw9Hev1/ODPsIm8rghiY1KvKIyJ0BFoHkbAV3+1IKk9AyotERB0nQ6jUajMZG4ZI7gIp7PrANJ/x2Af43RMNJPYaQ2fDeARwHsYDSV6AfGI8oMYyFwsTMEaDab3oWmEWi1WqHj906n46/8rCjE43E/N3CR0mZBZeJ0Ou0XOGXJtd5gPB73fyvzBPwZw4bz8gKzzCK8F8CPAniBc64lIu8B8EoALwPwi865R0XkVwE8DOBXLvs+hnEWjH2pWciSIRcItfwpPBKGra2tCYVj9idkMhm/MBe5qYdVDHoFNDxsfmKSkANaSDQa9cnBbrc7sevyLGYNB2IA0iLSw2gi8S0A3wLg+8Y/fweAn4UZAWOBsBZOjyA4tFRfScPQaDRQr9d9GzJzDa1Wy8fpi5p7QG9GJzb11GfW/3u9nm+RpicwHA79zsp2ux1aSGWWCURPi8ibAfwdgBaA92Pk/lecc0xLPgXg3lP+WBtIapxLUFdv2r/Bej2TdlwwF63pszeA9XfW4PVCXHQ4wBIfB5Do7kC9zbrb7SKTySCVSnm5di2xFoZZwoFtAA9hNJ24AuC3Abw07O/bQFLjPHj1niYeepUColetSKRbh0+DI83b7bavZiSTyYnZBGEN3yzhwD8F8LfOuYPxif8egG8AUBSR2NgbuA/A0zO8h7Gh6OSYrpuzVKaTX/NepMwJ7O7uYmdnB8Vi0c884GJbxPsui1mMwN8BeImIZDAKBx4E8BiADwF4OUYVAhtIalwKXgnZvccGnuCYcsb88ySfzyOXy/l6PEuG+j2viwEAZssJfExEfgfAxwH0AfwFRu79/wLwqIj8h/Fjb5vHiRqbhW775Q46Dh9JpVLI5XILkw6jVBk9AHoGNAKsNFwXQzDrQNKfAfAzgYefBPDALK9rGMzIM+HFKzPLYoVCAdls1mfv5wkTbfl8HsViEdlsFtlsFrlczu8luE7egHUMGiuH3k7LKz8NQSaTQT6fR6lUQjabRSaTmftiZNyfzWb9XINMJjOReLsuBgAwI2CsKLp9lq2yuVwO+XzeG4Gtra2FGAGWCLmHn5uJGA7QCFwXQ2BGwFhJdDjAK/L29jaKxSKKxSL29vZQKpWQy+XmvhipH8B8BJOT2hNY1XDgMv0LZgSMlSS4pZZhwNbWForFoi/f5fP5uVcHWHHQsw3ZvccmnFU0AJfFjICxMC67ULS8FvMBjMu1Idje3kahUJj7gpw21VhLi+muvqsieIUPdhDOspfBjIAxF04T5dSLJewVOxKJIJPJ+BxAoVBAoVDwocD29jZKpRJ2dnZQKBSWPnZ83miR0Gliomxd5sj2TqczITGmZy2Y0KhxJWhRTsbTp40KD0MkEkE6ncaNGzdQKpWe1bTD/vjrtviBSal1LmjuWdDH3W4Xh4eHqFQqKJfLqFarXmSEBiHsvAUzAsbMBON3Js/0Me+HcaFFxM8YZKOONgCMza9Tww7RIilB0VB93G63cXx8jGq1ilqthnq9jnq97hWVLjJ0xYyAMTPs7qMwJuvsjOV5nEqlQhuBVCqF7e1t7O7u+pCAfQJ69Nh1Q49kp14iF3aj0fBX+Xq9jpOTE9RqNRweHuL4+Bj1et3LjHH6koUDxpWgN/volt5sNoutrS0v75XNZkMbgUQigUKhgO3tbV8i1EaAXXvXEbr77XYb9Xrdz1PgAm+1Wt4DaDQaKJfLODk5QaPRQK1W86IiNAKmNmwsHB0OaA8gl8v53XdM8oWN4+PxuG8MogGhV6Hbdq+TN6ATf0H15EajgWq16qcqVSoVf0xj0Gg00G63fShh4YBxZejNPrq7r1gs+oaera2tC2XyY7GYrxDQqFBgU++Zv27oacztdttf3Wu1Go6Ojnz8f3x87EMFGgnOH6AhsHDAuBKmCWOyll8qlXDjxg3k83ns7OygVCqFXrhsGea0Hb42d/Fd15wAAG8EWq0WarUaTk5OUKlUcHBwgEqlgmq1itu3b/v8gJZX50g2GoEwmBEwZkYbAvbZ8ypeKBSwtbWF7e1t7OzshB4ISi0BXV1gaZC36+oJ6IlKerxatVpFpVJBpVLB0dGRj/vp/vNYVwfMEzCuBIYD7PCjAWBnHz2Cmzdvhr56a6HQWYVD1wmGAzQCFD1lCHB8fIyjoyPs7+9PCI+edmxGwDgVnVQ76zjM6zAZyFwA43+W+HZ2drC3t4e9vb0LZ/SD57DOC19n6qcdD4dDHwY0m00/VblcLnsDcHBwgKOjIzzzzDMTvx98n4tgRmDD0FNupo3c1htkwiw4tvjqxp5cLvesbD47B6+jCx8G3e47HA59V6AeJ87pyswBsAuQZUHdGhw28x8GMwIbCONtLkwdd+vHwxiBaDSKdDrtBTmDTT3XOZN/EejmM1Zn9x97AnQn4NHR0YQR0A1DF+kEDIsZgQ2DV3st2qlVc1iOo1E4D3b3FYtF5PN53+/PGj8z+tdNjeci6FZgXs25sHu9ni/rcbT6wcEBarUajo+P/b6AWq3mDYEZAWNmdCafsTwNAa/iLMWdB3MCpyn0amHOTYUxf7AdmItft/o2Gg3fAsxpw41Gw4cDS/EEROTtAL4DwG3n3NeOHysBeDeA+wF8AcArnHNlGZn6t2A0j7AJ4NXOuY/P9YyNmQiW33Q2P51OY2try6v6JpPJUK/H7j6+jjYqelfhJqPHh3U6Hd/gw14AGgL2BbAqwE7ATqczsU14noTxBH4dwC8DeKd67BEAH3TOvVFEHhnffz2AbwPwvPHt6zGaQfj18zxhYzZ0Yw8XLV35XC7nt+5yw08Y4vG4zwHofAC9AfMEnB+U2mq1fAsw24CPj4/9wj85OfG7AVut1kRisNVqLScccM59RETuDzz8EIBvHh+/A8CHMTICDwF4pxvVKD4qIkURuds5d2tuZ2zMBD0B5gDY4UdDwM6+TCaDdDod6jWj0ahPAtIAUKhTb/3d1JwACW4MYqzP2n+j0UClUvGLneEBvQd6A6uSE9hTC/sZAHvj43sBfEk9jwNJn2UEbCDp8gjmBLSYp965p8denwXFRCgMqqcG6WrDpqI3BulwgJ2ALAnWajWUy2W/2Fkt4DENwDLCgTNxzrnLDBS1gaTLQecEmA/gwt/e3sbNmzext7eHXC6HXC4X+jW1mpDu9GMosMlGAMBEdYAhAVuADw4OsL+/j5OTExwfH/vkH3+H+QStLDRPLmsE9unmi8jdAG6PH38awHPU82wg6YK4rGutt/xyZ54W7tzd3cXe3p5v/LnI+Zw2Lvy6bfk9j2mioDoU0B4AQ4GDgwOUy2UcHh6e21k477HolzUC78Vo2OgbMTl09L0AflhEHsUoIXhi+YDZOE/A86ILjHvz9R794E3PujfOZpoYaFAJuNfr+Sw/NQB0iVD3CXQ6nSv/G8KUCH8LoyTgrog8hdHswTcCeI+IPAzgiwBeMX76+zAqDz6BUYnwNQs4541Ca+Br3fugJn7Y7Dtn7N24ccMP82BGn5n8TXfdw6IXeVAclK3BOgQ4ODjwDUCs/2sVoHnH+mEJUx343lN+9OCU5zoAPzTrSRl3YFytx3Hr41gshnQ6HXrhMg+wt7fn23w5dJPZfBoa42yY7OM+fr39Vx8zBNjf3/d5AN0IxOfPO9YPi3UMrjjBwZxax49lvlwuF1p5l7kAbvjRXoCW8t6kGP6y0BPo9/veCOjuPt0ZSEkw7gegF6BbgVfWEzCWS1C1hzF7sVj09/P5fOg6PI0IZb84dlt3+FlNPxw6HNBJP2760eKgbAri1Z9NQNoLMCNgTEV7AezJz+Vy2N7e9vdLpVLohcvQghN9aUSC4YAZgfNh1l/X/U9OTlAul9FsNr0WAI0BDQLv83doCMwIGFNhfB7s86dsVz6fx+7ubui9+kwwUrRTt/fqcGCT23zDosMBioFoJSDKgOk9AKwCsDXYPAHjXIKa/lrKmzP6dnd3Q1cIWE3QO/z0rsFVHru9agRzAkE9QNb9qRisRUHYDrzITsCwmBFYcegFcPGzu+/mzZsolUrY3t7GPffcE7q0xx6DRCIxUXrU5UYzAuHQG4OYC+DVn52AX/7yl30yMDhLUM8YtHDAOJVgYjDY47+zs4ObN29eaCKPbjai6z9tHLdxNsH9AAwH2A1IT4AbhXQTEUVAaUj4s2VgRmABBIU6LyLcGYR9ASztUc6bxoBhwSxjuTZtwQcFOk8T7DwPVgVYGdCDQJgI5JbhWq029RxWATMCc4QbafQGGi2wyc00F0m67e3tYXt728t2cctvNpu99hN6540W9NSuefB+2AXKBb+/v+/HhHNIqB4KepHXXAZmBOYEe/i5jZabdKaN6U4kEqFfd3d310/zoXQXR3Px9Yxw6PibSTpm9pmw63a7oV1zXvkPDw/9UFCW/nQT0CobAMCMwFyhlLdu82U/Po9pDMLCfgB29+l5fNbYczGCEl80BFTtoejnRYwAJcKDWoB6P4AZgQ0iKNbBRZtMJn0MfxHZLgAoFAoT2325zz+o4muG4HzY3UfFHl6xtaw3s/hhFi73DGh5MD052DyBDUOHA8F6vu7yY8NPWIKjvdnmq6W7zACEg15At9v1TT3s8qtWq77Lr9frhTYC3W53YkgIpwbTI1hm/T8sZgTmiA4HmM2nN8ArORdxWOj+s72XnoSe6mOEg7Lfpw37ZJ2/2+2GMgJ8LeYBODuQXYHrkBQEzAjMDXoC01R78vm8z/Azvg9LcCgIy4XaEJgnEA5m/3Vzj571V61WcXx8jE6nE+rqzdfTiUBWBeglWE5gw2AnHvvxOZK7UCjgxo0buOuuu/z+/bDo8WDsBdAVCNv6G56g2i8FPg8PD3FwcICTkxPs7++j3W6HMgI60cj4X48J11qBq4wZgTkSrA7oDj8aBI7qDoseFqo39/DYqgPh0Vp/egBItVr1HX7Hx8dotVqhBD7Y7XfWaHDzBNaciywuvTjprnPHH/MBpVIJpVIJOzs7Fz6H04Q8N9UAXHRh6Rbfs8Z/HxwchDYCfN2zREEvc65XzcYaAd0nr6+wejT3RUZp8+q/t7eH3d1dr96rG3woCGICnuejr7LBUd76ftgF5pzD7du3cXR0hKOjIxwfH3uln+Defmb1N4WNNgJs62WMzR17fJw77cLu008kEl63j6VBLnybyXcxOMqb22zZ2cdYmx1+YeNt5xyOjo4mOvtYz9fNPYuQ9F51LjuQ9OcBfCeALoC/AfAa51xl/LM3AHgYwADAjzrn/mgxpz4b9AR0e29QwJOLN+w+/aAR0HP5aFzMCIRDu+/ahddSXnrhhnk9NvVw3p82AjQ2m2YAgMsPJP0AgDc45/oi8iYAbwDwehF5AYBXAvgHAO4B8Mci8jXOuZXzrXQ9n7347PLj1Zu6e2Hibmbqd3d3fZeflgC7iEEx7ngCLLkxicdWXTbkdLvd0K9ZqVRQr9dRq9VweHjou/w49dc8gVOYNpDUOfd+dfejAF4+Pn4IwKPOuQ6AvxWRJwA8AODP5nO68yOYyc9kMshkMv4KzjHdqVTqQoo93OWXyWSQz+cn8gDmCYSHRoCZfN3ey+m9jOHDvh5jf/5+0BNYpvb/MplHTuAHAbx7fHwvRkaBcCDps1j2QFKdE6AXEGz1LRQKyGQyobryGFroKz8NC70AK+eF5zTBDrrxvKpzw08YtPw3DQC9CQsHLomI/DSAPoB3XfR3lz2QVCv26Fr+zs6Od+Mpyx1muy6NChe9FgFhl59N9wmHlvJutVrehS+Xy76mT3e+2WyGNgJ61BdDgG63O+ENWDhwAUTk1RglDB90dz61tRlIynIga/o0BKznFwoF3Lx5E1tbW6GNgIj4xc5EI7f8Mtlo4UA4gtN92NRTqVRQLpdx+/ZtlMtl1Ov10EaAiUR6F3ofwbJHgS2TSxkBEXkpgJ8E8E+cc031o/cC+E0R+QWMEoPPA/B/Zz7LBaDDgdOUfIvFIorFIuLxeOjXDIp28uqvFYaM8wmGA9z+S6+gUqn4fv+wV26tHqTbepkPsHDgFE4ZSPoGAEkAHxjHuB91zv0b59xnROQ9AD6LUZjwQ6tYGQDueAKU3KZ4Z6lUwu7uLkqlEu655x5sb2+H9gQIY/+goGfwecZ0dDjAfAB7/Sneub+/7/v9w5YIg7fg9OBNNADA5QeSvu2M5/8cgJ+b5aSuAi5MrQXIpiGGCBT1DOsJ6Nc+6/51hItI98trRV3eD4NzDo1GA+Vy2Q/vZIOPnuXH5N5lXHi94Dd18ZON7RjU6D58LcGtb8bp6BZfPUgjuJPuIo09rVbLt/TqBa+n/l5UGNSYjhkBY2ZY0+ckHjbesLOPxuEiNf12u+37/IMafpucxFsEZgSMmdGjuFh710M6efVut9uhrtqsCnCvP8MAGoJlD/C8bpgRMGaG8T4NAVt9mcln22+z2QwdDnQ6HT+4g6+jVXssDJgfZgSMuaBdfi54lvK4bz9sTZ+eBcd56b0C2ghsckZ/npgRMGZGl/Oo2KuHc3IxVyqV0C48Nw+xs4+GgF1/Fg7MDzMCxkzo8l+wz58dfhTwLJfLoRaunvartw/rPn9LDM4PMwLGzGi1Hy5cPaqb8l2Hh4ehwwHKgweFRHTZ0UKB+WBGwJiZYHefXvS3b9/22n37+/sXUgLiQteyYjQQPDZmZ2ONQFApllcdxp+8miWTSRv6eQ5s5tFbffV9TuWp1+sX0u6bJta5TgKe68LGfrv1Fyk4Z17vOeeGIGM6zrmJ7D87+7RwJ29hh3oYV8vGfrunbVCJxWJIp9MQEf+FrVarZgTOQHf3cZ8/d/fRCLDF11hNNvbbrferN5tNryxMTUEKTDQaDTMCZ0AjUKlUJoQ/9Hy+TVbtWQc29tutu9xYfopGo6jVaohGoxgOh4hEIuh2u6YGdAbs7qNmHxd/q9WyPv81YWONAEta9AQikYhf+IPBwHenpdNpMwJnwJwK8wFs9aUR0J6AsZpstBHQ4QC/zMPhEO12G6lUCvV6Hclk0rYSnwE9Ksb/NAbdbtd3+5kRWG021ggEwwER8V/Ufr/vM9ph5w5sMoPBwPf06zHdQe0+ywmsJhtrBBgOUEhkMBggFov5nnVqD5o46PnoPgveaFy1nLexmmysEaAQBj2C4EBSPRLcPIGz0bJienCoHtNticHVZaONgBaZDEqMbfro74syTbwzeN9YTS41kFT97HUA3gzghnPuUEYr5i0AXgagCeDVzrmPz/+0Z0d3DNpVythkwgS7vw7gpcEHReQ5AL4VwN+ph78No1kDz8NoxNivzH6KhmEsknONgHPuIwCOp/zoFzEaQKL9vIcAvNON+CiAoojcPZczNQxjIVwq7S0iDwF42jn3ycCP7gXwJXX/zIGkIvKYiDx2mXMwDGM+XDgxKCIZAD+FUShwaZY9kNQwjBGXqQ58FYDnAvjkOHN+H4CPi8gDWKOBpIZhjLhwOOCc+5Rz7qZz7n7n3P0Yufwvcs49g9FA0n8lI14C4MQ5d2u+p2wYxjw51wiMB5L+GYDni8hTIvLwGU9/H4AnATwB4L8B+LdzOUvDMBaGrEITh+UEDONKeNw59+Lgg9YUbxgbjhkBw9hwzAgYxoZjRsAwNhwzAoax4ZgRMIwNx4yAYWw4ZgQMY8NZFWWhQwCN8b+rwi7sfM5j1c7JzudsvmLagyvRMQgAIvLYtG6mZWHncz6rdk52PpfDwgHD2HDMCBjGhrNKRuCtyz6BAHY+57Nq52TncwlWJidgGMZyWCVPwDCMJWBGwDA2nKUbARF5qYh8XkSeEJFHlnQOzxGRD4nIZ0XkMyLyY+PHf1ZEnhaRT4xvL7vCc/qCiHxq/L6PjR8ricgHROSvx/9uX9G5PF99Bp8QkaqI/PhVfz4i8nYRuS0in1aPTf1MxhJ3/3n8vfpLEXnRFZ3Pz4vI/xu/5++LSHH8+P0i0lKf1a/O+3wujR4ZddU3AFEAfwPgKwEkAHwSwAuWcB53Y6STCAB5AH8F4AUAfhbAv1/SZ/MFALuBx/4jgEfGx48AeNOS/s+ewajx5Eo/HwDfBOBFAD593meC0RSsPwQgAF4C4GNXdD7fCiA2Pn6TOp/79fNW6bZsT+ABAE845550znUBPIrRAJMrxTl3y43HpTnnagA+h1PmJSyZhwC8Y3z8DgDfvYRzeBDA3zjnvnjVb+ymD8I57TNZ+CCcaefjnHu/c64/vvtRjBS3V5plG4HQw0quChG5H8ALAXxs/NAPj127t1+V+z3GAXi/iDwuIq8dP7bn7qg3PwNg7wrPh7wSwG+p+8v6fMhpn8kqfLd+ECNvhDxXRP5CRP5ERP7xFZ/LqSzbCKwUIpID8LsAftw5V8VoluJXAfhHAG4B+E9XeDrf6Jx7EUbzHX9IRL5J/9CNfMwrre+KSALAdwH47fFDy/x8nsUyPpPTEJGfBtAH8K7xQ7cA/D3n3AsB/ASA3xSRrWWdn2bZRmBlhpWISBwjA/Au59zvAYBzbt85N3DODTGSUH/gqs7HOff0+N/bAH5//N77dGnH/96+qvMZ820APu6c2x+f29I+H8Vpn8nSvlsi8mqMJnl//9gwwTnXcc4djY8fxygX9jVXcT7nsWwj8OcAnicizx1fZV6J0QCTK2U8Uv1tAD7nnPsF9biOIb8HwKeDv7ug88mKSJ7HGCWbPo3RZ/Oq8dNeBeAPruJ8FN8LFQos6/MJcNpnspRBOCLyUowG9X6Xc66pHr8hItHx8VdiNLn7yUWfTyiWnZnEKIv7VxhZxp9e0jl8I0Zu5F8C+MT49jIAvwHgU+PH3wvg7is6n6/EqFLySQCf4ecCYAfABwH8NYA/BlC6ws8oC+AIQEE9dqWfD0YG6BaAHkYx/sOnfSYYVQX+y/h79SkAL76i83kCo1wEv0e/On7uPx//X34CwMcBfOdVf89Pu1nbsGFsOMsOBwzDWDJmBAxjwzEjYBgbjhkBw9hwzAgYxoZjRsAwNhwzAoax4fx/a0nhLhDhHZUAAAAASUVORK5CYII=\n", - "text/plain": [ - "
    " - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "mnist_it = ds4.create_dict_iterator()\n", - "data = next(mnist_it)\n", - "plt.imshow(data['image'].asnumpy().squeeze(), cmap=plt.cm.gray)\n", - "plt.title(data['label'].asnumpy(), fontsize=20)\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "可以看到,原始图片经缩放后被随机裁剪至150x150大小。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 参考文献\n", - "\n", - "[1] Y. LeCun, L. Bottou, Y. Bengio, and P. Haffner. [Gradient-based learning applied to document recognition](http://yann.lecun.com/exdb/publis/pdf/lecun-98.pdf)." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:mindspore] *", - "language": "python", - "name": "conda-env-mindspore-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/training/source_zh_cn/use/load_dataset_text.ipynb b/tutorials/training/source_zh_cn/use/load_dataset_text.ipynb deleted file mode 100644 index ef6ef4fdbb8c1f82e6dd6a02e8c9965886a8690c..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/use/load_dataset_text.ipynb +++ /dev/null @@ -1,326 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 加载文本数据集\n", - "\n", - "`Linux` `Ascend` `GPU` `CPU` `数据准备` `初级` `中级` `高级`\n", - "\n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_source.png)](https://gitee.com/mindspore/docs/blob/master/tutorials/training/source_zh_cn/use/load_dataset_text.ipynb) \n", - "[![](https://gitee.com/mindspore/docs/raw/master/resource/_static/logo_notebook.png)](https://obs.dualstack.cn-north-4.myhuaweicloud.com/mindspore-website/notebook/master/mindspore_load_dataset_text.ipynb) \n", - "[![](https://gitee.com/mindspore/docs/raw/master/tutorials/training/source_zh_cn/_static/logo_modelarts.png)](https://authoring-modelarts-cnnorth4.huaweicloud.com/console/lab?share-url-b64=aHR0cHM6Ly9vYnMuZHVhbHN0YWNrLmNuLW5vcnRoLTQubXlodWF3ZWljbG91ZC5jb20vbWluZHNwb3JlLXdlYnNpdGUvbm90ZWJvb2svbW9kZWxhcnRzL21pbmRzcG9yZV9sb2FkX2RhdGFzZXRfdGV4dC5pcHluYg==&imagename=MindSpore1.1.1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 概述\n", - "\n", - "MindSpore提供的`mindspore.dataset`模块可以帮助用户构建数据集对象,分批次地读取文本数据。同时,在各个数据集类中还内置了数据处理和数据分词算子,使得数据在训练过程中能够像经过pipeline管道的水一样源源不断地流向训练系统,提升数据训练效果。\n", - "\n", - "此外,MindSpore还支持分布式场景数据加载,用户可以在加载数据集时指定分片数目,具体用法参见[数据并行模式加载数据集](https://www.mindspore.cn/tutorial/training/zh-CN/master/advanced_use/distributed_training_ascend.html#id6)。\n", - "\n", - "下面,本教程将简要演示如何使用MindSpore加载和处理文本数据。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 准备环节" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 准备文本数据,内容如下:\n", - "\n", - " ```text\n", - " Welcome to Beijing\n", - " 北京欢迎您!\n", - " 我喜欢English!\n", - " ```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 创建`tokenizer.txt`文件并复制文本数据到该文件中,将该文件存放在`./datasets`路径下。执行如下代码完成本步骤。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "./datasets\n", - "└── tokenizer.txt\n", - "\n", - "0 directories, 1 file\n" - ] - } - ], - "source": [ - "import os\n", - "\n", - "if not os.path.exists('./datasets'):\n", - " os.mkdir('./datasets')\n", - "file_handle=open('./datasets/tokenizer.txt',mode='w')\n", - "file_handle.write('Welcome to Beijing \\n北京欢迎您! \\n我喜欢English! \\n')\n", - "file_handle.close()\n", - "! tree ./datasets" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 导入`mindspore.dataset`和`mindspore.dataset.text`模块。" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "import mindspore.dataset as ds\n", - "import mindspore.dataset.text as text" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 加载数据集" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore目前支持加载文本领域常用的经典数据集和多种数据存储格式下的数据集,用户也可以通过构建自定义数据集类实现自定义方式的数据加载。各种数据集的详细加载方法,可参考编程指南中[数据集加载](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/dataset_loading.html)章节。\n", - "\n", - "下面演示使用`MindSpore.dataset`模块中的`TextFileDataset`类加载数据集。\n", - "\n", - "1. 配置数据集目录,创建数据集对象。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "DATA_FILE = './datasets/tokenizer.txt'\n", - "dataset = ds.TextFileDataset(DATA_FILE, shuffle=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 创建字典迭代器,通过迭代器获取数据,可以获得分词前的数据。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Welcome to Beijing \n", - "北京欢迎您! \n", - "我喜欢English! \n" - ] - } - ], - "source": [ - "for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据处理" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore目前支持的数据处理算子及其详细使用方法,可参考编程指南中[数据处理](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/pipeline.html)章节。\n", - "\n", - "下面演示构建pipeline,对文本数据集进行混洗和文本替换操作。\n", - "\n", - "1. 对数据集进行混洗。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "我喜欢English! \n", - "Welcome to Beijing \n", - "北京欢迎您! \n" - ] - } - ], - "source": [ - "ds.config.set_seed(58)\n", - "dataset = dataset.shuffle(buffer_size=3)\n", - "\n", - "for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 对数据集进行文本替换。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "我喜欢English! \n", - "Welcome to Shanghai \n", - "上海欢迎您! \n" - ] - } - ], - "source": [ - "replace_op1 = text.RegexReplace(\"Beijing\", \"Shanghai\")\n", - "replace_op2 = text.RegexReplace(\"北京\", \"上海\")\n", - "\n", - "dataset = dataset.map(operations=replace_op1)\n", - "dataset = dataset.map(operations=replace_op2)\n", - "\n", - "for data in dataset.create_dict_iterator(output_numpy=True):\n", - " print(text.to_str(data['text']))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据分词" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "MindSpore目前支持的数据分词算子及其详细使用方法,可参考编程指南中[分词器](https://www.mindspore.cn/doc/programming_guide/zh-CN/master/tokenizer.html)章节。\n", - "\n", - "下面演示使用`WhitespaceTokenizer`分词器来分词,该分词是按照空格来进行分词。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "1. 创建`tokenizer`。" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "tokenizer = text.WhitespaceTokenizer()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. 执行操作`tokenizer`。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "dataset = dataset.map(operations=tokenizer)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "3. 创建字典迭代器,通过迭代器获取数据。" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['我喜欢English!']\n", - "['Welcome', 'to', 'Shanghai']\n", - "['上海欢迎您!']\n" - ] - } - ], - "source": [ - "for data in dataset.create_dict_iterator(num_epochs=1,output_numpy=True):\n", - " print(text.to_str(data['text']).tolist())" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "MindSpore-1.1.1", - "language": "python", - "name": "mindspore-1.1.1" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/training/source_zh_cn/use/load_model_for_inference_and_transfer.md b/tutorials/training/source_zh_cn/use/load_model_for_inference_and_transfer.md deleted file mode 100644 index 0eed4c5b4318737890a07a19006636c6b8a4a1f8..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/use/load_model_for_inference_and_transfer.md +++ /dev/null @@ -1,285 +0,0 @@ -# 加载模型用于推理或迁移学习 - -`Linux` `Ascend` `GPU` `CPU` `模型加载` `初级` `中级` `高级` - - - -- [加载模型用于推理或迁移学习](#加载模型用于推理或迁移学习) - - [概述](#概述) - - [本地加载模型](#本地加载模型) - - [用于推理验证](#用于推理验证) - - [用于迁移学习](#用于迁移学习) - - [从Hub加载模型](#从hub加载模型) - - [用于推理验证](#用于推理验证-1) - - [用于迁移学习](#用于迁移学习-1) - - - - -   - -   - - -## 概述 - -在模型训练过程中保存在本地的CheckPoint文件,或从[MindSpore Hub](https://www.mindspore.cn/resources/hub/)下载的CheckPoint文件,都可以帮助用户进行推理或迁移学习使用。 - -以下通过示例来介绍如何通过本地加载或Hub加载模型,用于推理验证和迁移学习。 - -## 本地加载模型 - -### 用于推理验证 - -针对仅推理场景可以使用`load_checkpoint`把参数直接加载到网络中,以便进行后续的推理验证。 - -示例代码如下: - -```python -resnet = ResNet50() -load_checkpoint("resnet50-2_32.ckpt", net=resnet) -dateset_eval = create_dataset(os.path.join(mnist_path, "test"), 32, 1) # define the test dataset -loss = CrossEntropyLoss() -model = Model(resnet, loss, metrics={"accuracy"}) -acc = model.eval(dataset_eval) -``` - -- `load_checkpoint`方法会把参数文件中的网络参数加载到模型中。加载后,网络中的参数就是CheckPoint保存的。 -- `eval`方法会验证训练后模型的精度。 - -### 用于迁移学习 - -针对任务中断再训练及微调(Fine Tune)场景,可以加载网络参数和优化器参数到模型中。 - -示例代码如下: - -```python -# return a parameter dict for model -param_dict = load_checkpoint("resnet50-2_32.ckpt") -resnet = ResNet50() -opt = Momentum(resnet.trainable_params(), 0.01, 0.9) -# load the parameter into net -load_param_into_net(resnet, param_dict) -# load the parameter into optimizer -load_param_into_net(opt, param_dict) -loss = SoftmaxCrossEntropyWithLogits() -model = Model(resnet, loss, opt) -model.train(epoch, dataset) -``` - -- `load_checkpoint`方法会返回一个参数字典。 -- `load_param_into_net`会把参数字典中相应的参数加载到网络或优化器中。 - -## 从Hub加载模型 - -### 用于推理验证 - -`mindspore_hub.load` API用于加载预训练模型,可以实现一行代码完成模型的加载。主要的模型加载流程如下: - -1. 在[MindSpore Hub官网](https://www.mindspore.cn/resources/hub)上搜索感兴趣的模型。 - - 例如,想使用GoogleNet对CIFAR-10数据集进行分类,可以在MindSpore Hub官网上使用关键词`GoogleNet`进行搜索。页面将会返回与GoogleNet相关的所有模型。进入相关模型页面之后,获得详情页`url`。 - -2. 使用`url`完成模型的加载,示例代码如下: - - ```python - import mindspore_hub as mshub - import mindspore - from mindspore import context, Tensor, nn, Model - from mindspore import dtype as mstype - import mindspore.dataset.vision.py_transforms as py_transforms - - context.set_context(mode=context.GRAPH_MODE, - device_target="Ascend", - device_id=0) - - model = "mindspore/ascend/0.7/googlenet_v1_cifar10" - - # Initialize the number of classes based on the pre-trained model. - network = mshub.load(model, num_classes=10) - network.set_train(False) - - # ... - - ``` - -3. 完成模型加载后,可以使用MindSpore进行推理,参考[推理模型总览](https://www.mindspore.cn/tutorial/inference/zh-CN/master/multi_platform_inference.html)。 - -### 用于迁移学习 - -通过`mindspore_hub.load`完成模型加载后,可以增加一个额外的参数项只加载神经网络的特征提取部分,这样我们就能很容易地在之后增加一些新的层进行迁移学习。*当模型开发者将额外的参数(例如 `include_top`)添加到模型构造中时,可以在模型的详情页中找到这个功能。`include_top`取值为True或者False,表示是否保留顶层的全连接网络。* - -下面我们以[MobileNetV2](https://gitee.com/mindspore/mindspore/tree/r1.0/model_zoo/official/cv/mobilenetv2)为例,说明如何加载一个基于OpenImage的预训练模型,并在特定的子任务数据集上进行迁移学习(重训练)。主要的步骤如下: - -1. 在[MindSpore Hub官网](https://www.mindspore.cn/resources/hub/)上搜索感兴趣的模型,并从网站上获取特定的`url`。 - -2. 使用`url`进行MindSpore Hub模型的加载,注意:`include_top`参数需要模型开发者提供。 - - ```python - import os - import mindspore_hub as mshub - import mindspore - from mindspore import context, Tensor, nn - from mindspore.nn import Momentum - from mindspore.train.serialization import save_checkpoint, load_checkpoint,load_param_into_net - from mindspore import ops - import mindspore.dataset as ds - import mindspore.dataset.transforms.c_transforms as C2 - import mindspore.dataset.vision.c_transforms as C - from mindspore import dtype as mstype - from mindspore import Model - context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", device_id=0) - - model = "mindspore/ascend/1.0/mobilenetv2_v1.0_openimage" - network = mshub.load(model, num_classes=500, include_top=False, activation="Sigmoid") - network.set_train(False) - ``` - -3. 在现有模型结构基础上,增加一个与新任务相关的分类层。 - - ```python - class ReduceMeanFlatten(nn.Cell): - def __init__(self): - super(ReduceMeanFlatten, self).__init__() - self.mean = ops.ReduceMean(keep_dims=True) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.mean(x, (2, 3)) - x = self.flatten(x) - return x - - # Check MindSpore Hub website to conclude that the last output shape is 1280. - last_channel = 1280 - - # The number of classes in target task is 10. - num_classes = 10 - - reducemean_flatten = ReduceMeanFlatten() - - classification_layer = nn.Dense(last_channel, num_classes) - classification_layer.set_train(True) - - train_network = nn.SequentialCell([network, reducemean_flatten, classification_layer]) - ``` - -4. 定义数据集加载函数。 - - 如下所示,进行微调任务的数据集为[CIFAR-10](https://www.cs.toronto.edu/~kriz/cifar.html),注意此处需要下载二进制版本(`binary version`)的数据。下载解压后可以通过如下所示代码加载和处理数据。`dataset_path`是数据集的保存路径,由用户给定。 - - ```python - def create_cifar10dataset(dataset_path, batch_size, do_train): - if do_train: - usage, shuffle = "train", True - else: - usage, shuffle = "test", False - - data_set = ds.Cifar10Dataset(dataset_dir=dataset_path, usage=usage, shuffle=True) - - # define map operations - trans = [C.Resize((256, 256))] - if do_train: - trans += [ - C.RandomHorizontalFlip(prob=0.5), - ] - - trans += [ - C.Rescale(1.0 / 255.0, 0.0), - C.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5]), - C.HWC2CHW() - ] - - type_cast_op = C2.TypeCast(mstype.int32) - - data_set = data_set.map(operations=type_cast_op, input_columns="label", num_parallel_workers=8) - data_set = data_set.map(operations=trans, input_columns="image", num_parallel_workers=8) - - # apply batch operations - data_set = data_set.batch(batch_size, drop_remainder=True) - return data_set - - # Create Dataset - dataset_path = "/path_to_dataset/cifar-10-batches-bin" - dataset = create_cifar10dataset(dataset_path, batch_size=32, do_train=True) - ``` - -5. 为模型训练选择损失函数、优化器和学习率。 - - ```python - def generate_steps_lr(lr_init, steps_per_epoch, total_epochs): - total_steps = total_epochs * steps_per_epoch - decay_epoch_index = [0.3*total_steps, 0.6*total_steps, 0.8*total_steps] - lr_each_step = [] - for i in range(total_steps): - if i < decay_epoch_index[0]: - lr = lr_init - elif i < decay_epoch_index[1]: - lr = lr_init * 0.1 - elif i < decay_epoch_index[2]: - lr = lr_init * 0.01 - else: - lr = lr_init * 0.001 - lr_each_step.append(lr) - return lr_each_step - - # Set epoch size - epoch_size = 60 - - # Wrap the backbone network with loss. - loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - loss_net = nn.WithLossCell(train_network, loss_fn) - steps_per_epoch = dataset.get_dataset_size() - lr = generate_steps_lr(lr_init=0.01, steps_per_epoch=steps_per_epoch, total_epochs=epoch_size) - - # Create an optimizer. - optim = Momentum(filter(lambda x: x.requires_grad, classification_layer.get_parameters()), Tensor(lr, mindspore.float32), 0.9, 4e-5) - train_net = nn.TrainOneStepCell(loss_net, optim) - ``` - -6. 开始重训练。 - - ```python - for epoch in range(epoch_size): - for i, items in enumerate(dataset): - data, label = items - data = mindspore.Tensor(data) - label = mindspore.Tensor(label) - - loss = train_net(data, label) - print(f"epoch: {epoch}/{epoch_size}, loss: {loss}") - # Save the ckpt file for each epoch. - if not os.path.exists('ckpt'): - os.mkdir('ckpt') - ckpt_path = f"./ckpt/cifar10_finetune_epoch{epoch}.ckpt" - save_checkpoint(train_network, ckpt_path) - ``` - -7. 在测试集上测试模型精度。 - - ```python - model = "mindspore/ascend/1.0/mobilenetv2_v1.0_openimage" - - network = mshub.load(model, num_classes=500, pretrained=True, include_top=False, activation="Sigmoid") - network.set_train(False) - reducemean_flatten = ReduceMeanFlatten() - classification_layer = nn.Dense(last_channel, num_classes) - classification_layer.set_train(False) - softmax = nn.Softmax() - network = nn.SequentialCell([network, reducemean_flatten, classification_layer, softmax]) - - # Load a pre-trained ckpt file. - ckpt_path = "./ckpt/cifar10_finetune_epoch59.ckpt" - trained_ckpt = load_checkpoint(ckpt_path) - load_param_into_net(classification_layer, trained_ckpt) - - loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - - # Define loss and create model. - eval_dataset = create_cifar10dataset(dataset_path, batch_size=32, do_train=False) - eval_metrics = {'Loss': nn.Loss(), - 'Top1-Acc': nn.Top1CategoricalAccuracy(), - 'Top5-Acc': nn.Top5CategoricalAccuracy()} - model = Model(network, loss_fn=loss, optimizer=None, metrics=eval_metrics) - metrics = model.eval(eval_dataset) - print("metric: ", metrics) - ``` diff --git a/tutorials/training/source_zh_cn/use/publish_model.md b/tutorials/training/source_zh_cn/use/publish_model.md deleted file mode 100644 index b81496f520e233e96ec19b1b7378a2bc8dac2d0c..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/use/publish_model.md +++ /dev/null @@ -1,87 +0,0 @@ -# 发布模型 - -`Linux` `Ascend` `GPU` `模型发布` `中级` `高级` - - - -- [发布模型](#发布模型) - - [概述](#概述) - - [发布模型到MindSpore Hub](#发布模型到mindspore-hub) - - - - - -## 概述 - -[MindSpore Hub](https://www.mindspore.cn/resources/hub/)是存放MindSpore官方或者第三方开发者提供的预训练模型的平台。它向应用开发者提供了简单易用的模型加载和微调APIs,使得用户可以基于预训练模型进行推理或者微调,并部署到自己的应用中。用户也可以将自己训练好的模型按照指定的步骤发布到MindSpore Hub中,以供其他用户进行下载和使用。 - -本教程以GoogleNet为例,对想要将模型发布到MindSpore Hub的模型开发者介绍了模型上传步骤。 - -## 发布模型到MindSpore Hub - -用户可通过向[hub](https://gitee.com/mindspore/hub)仓提交PR的方式向MindSpore Hub发布模型。这里我们以GoogleNet为例,列出模型提交到MindSpore Hub的步骤。 - -1. 将你的预训练模型托管在可以访问的存储位置。 - -2. 参照[模板](https://gitee.com/mindspore/mindspore/blob/master/model_zoo/official/cv/googlenet/mindspore_hub_conf.py),在你自己的代码仓中添加模型生成文件`mindspore_hub_conf.py`,文件放置的位置如下: - - ```bash - googlenet - ├── src - │   ├── googlenet.py - ├── script - │   ├── run_train.sh - ├── train.py - ├── test.py - ├── mindspore_hub_conf.py - ``` - -3. 参照[模板](https://gitee.com/mindspore/hub/blob/master/mshub_res/assets/mindspore/ascend/0.7/googlenet_v1_cifar10.md#),在`hub/mshub_res/assets/mindspore/ascend/0.7`文件夹下创建`{model_name}_{model_version}_{dataset}.md`文件,其中`ascend`为模型运行的硬件平台,`0.7`为MindSpore的版本号,`hub/mshub_res`的目录结构为: - - ```bash - hub - ├── mshub_res - │   ├── assets - │   ├── mindspore - │ ├── gpu - │ ├── 0.7 - │ ├── ascend - │ ├── 0.7 - │ ├── googlenet_v1_cifar10.md - │   ├── tools - │ ├── get_sha256.py - │ ├── load_markdown.py - │ └── md_validator.py - ``` - - 注意,`{model_name}_{model_version}_{dataset}.md`文件中需要补充如下所示的`file-format`、`asset-link` 和 `asset-sha256`信息,它们分别表示模型文件格式、模型存储位置(步骤1所得)和模型哈希值。 - - ```bash - file-format: ckpt - asset-link: https://download.mindspore.cn/model_zoo/official/cv/googlenet/goolenet_ascend_0.2.0_cifar10_official_classification_20200713/googlenet.ckpt - asset-sha256: 114e5acc31dad444fa8ed2aafa02ca34734419f602b9299f3b53013dfc71b0f7 - ``` - - 其中,MindSpore Hub支持的模型文件格式有: - - [MindSpore CKPT](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html#checkpoint) - - [MINDIR](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html#mindir) - - [AIR](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html#air) - - [ONNX](https://www.mindspore.cn/tutorial/training/zh-CN/master/use/save_model.html#onnx) - - 对于每个预训练模型,执行以下命令,用来获得`.md`文件`asset-sha256`处所需的哈希值,其中`googlenet.ckpt`是从步骤1的存储位置处下载并保存到`tools`文件夹的预训练模型,运行后输出的哈希值为`114e5acc31dad444fa8ed2aafa02ca34734419f602b9299f3b53013dfc71b0f7`。 - - ```python - cd /hub/mshub_res/tools - python get_sha256.py --file ../googlenet.ckpt - ``` - -4. 使用`hub/mshub_res/tools/md_validator.py`在本地核对`.md`文件的格式,执行以下命令,输出结果为`All Passed`,表示`.md`文件的格式和内容均符合要求。 - - ```python - python md_validator.py --check_path ../assets/mindspore/ascend/0.7/googlenet_v1_cifar10.md - ``` - -5. 在`mindspore/hub`仓创建PR,详细创建方式可以参考[贡献者Wiki](https://gitee.com/mindspore/mindspore/blob/master/CONTRIBUTING.md#)。 - -一旦你的PR合入到`mindspore/hub`的master分支,你的模型将于24小时内在[MindSpore Hub 网站](https://www.mindspore.cn/resources/hub)上显示。有关模型上传的更多详细信息,请参考[README](https://gitee.com/mindspore/hub/blob/master/mshub_res/README.md#)。 diff --git a/tutorials/training/source_zh_cn/use/save_model.md b/tutorials/training/source_zh_cn/use/save_model.md deleted file mode 100644 index 3562e4faa31e7b5fc8a61e412cc4e9f32e1fe21c..0000000000000000000000000000000000000000 --- a/tutorials/training/source_zh_cn/use/save_model.md +++ /dev/null @@ -1,169 +0,0 @@ -# 保存模型 - -`Linux` `Ascend` `GPU` `CPU` `模型导出` `初级` `中级` `高级` - - - -- [保存模型](#保存模型) - - [概述](#概述) - - [保存CheckPoint格式文件](#保存checkpoint格式文件) - - [CheckPoint配置策略](#checkpoint配置策略) - - [导出MindIR格式文件](#导出mindir格式文件) - - [导出AIR格式文件](#导出air格式文件) - - [导出ONNX格式文件](#导出onnx格式文件) - - - - -   - -   - - -## 概述 - -在模型训练过程中,可以添加检查点(CheckPoint)用于保存模型的参数,以便执行推理及再训练使用。如果想继续在不同硬件平台上做推理,可通过网络和CheckPoint格式文件生成对应的MindIR、AIR和ONNX格式文件。 - -- MindIR:MindSpore的一种基于图表示的函数式IR,定义了可扩展的图结构以及算子的IR表示,它消除了不同后端的模型差异。可以把在Ascend 910训练好的模型,在Ascend 310、GPU以及MindSpore Lite端侧上执行推理。 -- CheckPoint:MindSpore的存储了所有训练参数值的二进制文件。采用了Google的Protocol Buffers机制,与开发语言、平台无关,具有良好的可扩展性。CheckPoint的protocol格式定义在`mindspore/ccsrc/utils/checkpoint.proto`中。 -- AIR:全称Ascend Intermediate Representation,类似ONNX,是华为定义的针对机器学习所设计的开放式的文件格式,能更好地适配Ascend AI处理器。 -- ONNX:全称Open Neural Network Exchange,是一种针对机器学习所设计的开放式的文件格式,用于存储训练好的模型。 - -以下通过示例来介绍保存CheckPoint格式文件和导出MindIR、AIR和ONNX格式文件的方法。 - -## 保存CheckPoint格式文件 - -在模型训练的过程中,使用Callback机制传入回调函数`ModelCheckpoint`对象,可以保存模型参数,生成CheckPoint文件。 - -通过`CheckpointConfig`对象可以设置CheckPoint的保存策略。保存的参数分为网络参数和优化器参数。 - -`ModelCheckpoint`提供默认配置策略,方便用户快速上手。具体用法如下: - -```python -from mindspore.train.callback import ModelCheckpoint -ckpoint_cb = ModelCheckpoint() -model.train(epoch_num, dataset, callbacks=ckpoint_cb) -``` - -用户可以根据具体需求对CheckPoint策略进行配置。具体用法如下: - -```python -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig -config_ck = CheckpointConfig(save_checkpoint_steps=32, keep_checkpoint_max=10) -ckpoint_cb = ModelCheckpoint(prefix='resnet50', directory=None, config=config_ck) -model.train(epoch_num, dataset, callbacks=ckpoint_cb) -``` - -上述代码中,首先需要初始化一个`CheckpointConfig`类对象,用来设置保存策略。 - -- `save_checkpoint_steps`表示每隔多少个step保存一次。 -- `keep_checkpoint_max`表示最多保留CheckPoint文件的数量。 -- `prefix`表示生成CheckPoint文件的前缀名。 -- `directory`表示存放文件的目录。 - -创建一个`ModelCheckpoint`对象把它传递给model.train方法,就可以在训练过程中使用CheckPoint功能了。 - -生成的CheckPoint文件如下: - -```text -resnet50-graph.meta # 编译后的计算图 -resnet50-1_32.ckpt # CheckPoint文件后缀名为'.ckpt' -resnet50-2_32.ckpt # 文件的命名方式表示保存参数所在的epoch和step数 -resnet50-3_32.ckpt # 表示保存的是第3个epoch的第32个step的模型参数 -... -``` - -如果用户使用相同的前缀名,运行多次训练脚本,可能会生成同名CheckPoint文件。MindSpore为方便用户区分每次生成的文件,会在用户定义的前缀后添加"_"和数字加以区分。如果想要删除`.ckpt`文件时,请同步删除`.meta` 文件。 - -例:`resnet50_3-2_32.ckpt` 表示运行第3次脚本生成的第2个epoch的第32个step的CheckPoint文件。 - -> - 当执行分布式并行训练任务时,每个进程需要设置不同`directory`参数,用以保存CheckPoint文件到不同的目录,以防文件发生读写错乱。 - -### CheckPoint配置策略 - -MindSpore提供了两种保存CheckPoint策略:迭代策略和时间策略,可以通过创建`CheckpointConfig`对象设置相应策略。 -`CheckpointConfig`中共有四个参数可以设置: - -- save_checkpoint_steps:表示每隔多少个step保存一个CheckPoint文件,默认值为1。 -- save_checkpoint_seconds:表示每隔多少秒保存一个CheckPoint文件,默认值为0。 -- keep_checkpoint_max:表示最多保存多少个CheckPoint文件,默认值为5。 -- keep_checkpoint_per_n_minutes:表示每隔多少分钟保留一个CheckPoint文件,默认值为0。 - -`save_checkpoint_steps`和`keep_checkpoint_max`为迭代策略,根据训练迭代的次数进行配置。 -`save_checkpoint_seconds`和`keep_checkpoint_per_n_minutes`为时间策略,根据训练的时长进行配置。 - -两种策略不能同时使用,迭代策略优先级高于时间策略,当同时设置时,只有迭代策略可以生效。当参数显示设置为`None`时,表示放弃该策略。在迭代策略脚本正常结束的情况下,会默认保存最后一个step的CheckPoint文件。 - -## 导出MindIR格式文件 - -如果想跨平台或硬件执行推理(如昇腾AI处理器、MindSpore端侧、GPU等),可以通过网络定义和CheckPoint生成MindIR格式模型文件。当前支持基于静态图,且不包含控制流语义的推理网络导出。导出该格式文件的代码样例如下: - -```python -import numpy as np -from mindspore import Tensor, export, load_checkpoint, load_param_into_net - -resnet = ResNet50() -# load the parameter into net -load_checkpoint("resnet50-2_32.ckpt", net=resnet) -input = np.random.uniform(0.0, 1.0, size=[32, 3, 224, 224]).astype(np.float32) -export(resnet, Tensor(input), file_name='resnet50-2_32', file_format='MINDIR') -``` - -> - `input`为`export`方法的入参,代表网络的输入,如果网络有多个输入,需要一同传进`export`方法。 例如:`export(network, Tensor(input1), Tensor(input2), file_name='network', file_format='MINDIR')` -> - 导出的文件名称会自动添加".mindir"后缀。 - -为了避免protobuf的硬件限制,当导出的模型参数大小超过1G时,框架默认会把网络结构和参数分开保存。 - -- 网络结构文件的名称以用户指定前缀加`_graph.mindir`结尾。 -- 同级目录下,会生用户指定前缀加`_variables`的文件夹,里面存放网络的参数。 - -以上述代码为例,如果模型中参数大小超过1G,生成的目录结构如下: - -```text -resnet50-2_32_graph.mindir -resnet50-2_32_variables - data_0 - data_1 - ... -``` - -> 加载时,如果传入的文件名以`_graph.mindir`结尾,框架会自动查找同级目录下的参数文件。 - -## 导出AIR格式文件 - -如果想在昇腾AI处理器上执行推理,还可以通过网络定义和CheckPoint生成AIR格式模型文件。导出该格式文件的代码样例如下: - -```python -import numpy as np -from mindspore import Tensor, export, load_checkpoint, load_param_into_net - -resnet = ResNet50() -# load the parameter into net -load_checkpoint("resnet50-2_32.ckpt", net=resnet) -input = np.random.uniform(0.0, 1.0, size=[32, 3, 224, 224]).astype(np.float32) -export(resnet, Tensor(input), file_name='resnet50-2_32', file_format='AIR') -``` - -`input`用来指定导出模型的输入shape以及数据类型。 - -> - `input`为`export`方法的入参,代表网络的输入,如果网络有多个输入,需要一同传进`export`方法。 例如:`export(network, Tensor(input1), Tensor(input2), file_name='network', file_format='AIR')` -> - 导出的文件名称会自动添加".air"后缀。 - -## 导出ONNX格式文件 - -当有了CheckPoint文件后,如果想继续在昇腾AI处理器、GPU、CPU等多种硬件上做推理,需要通过网络和CheckPoint生成对应的ONNX格式模型文件。导出该格式文件的代码样例如下: - -```python -import numpy as np -from mindspore import Tensor, export, load_checkpoint, load_param_into_net - -resnet = ResNet50() -# load the parameter into net -load_checkpoint("resnet50-2_32.ckpt", net=resnet) -input = np.random.uniform(0.0, 1.0, size=[32, 3, 224, 224]).astype(np.float32) -export(resnet, Tensor(input), file_name='resnet50-2_32', file_format='ONNX') -``` - -> - `input`为`export`方法的入参,代表网络的输入,如果网络有多个输入,需要一同传进`export`方法。 例如:`export(network, Tensor(input1), Tensor(input2), file_name='network', file_format='ONNX')` -> - 导出的文件名称会自动添加".onnx"后缀。 -> - 目前ONNX格式导出仅支持ResNet系列网络。 diff --git a/tutorials/tutorial_code/acl_resnet50_sample/inc/model_process.h b/tutorials/tutorial_code/acl_resnet50_sample/inc/model_process.h deleted file mode 100644 index 12c6d055fc33b3b502f9121c4dc00fac8e82aa7e..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/acl_resnet50_sample/inc/model_process.h +++ /dev/null @@ -1,104 +0,0 @@ -/** -* @file model_process.h -* -* Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. -* -* This program is distributed in the hope that it will be useful, -* but WITHOUT ANY WARRANTY; without even the implied warranty of -* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -*/ -#pragma once -#include -#include "inc/utils.h" -#include "acl/acl.h" - -/** -* ModelProcess -*/ -class ModelProcess { - public: - /** - * @brief Constructor - */ - ModelProcess(); - - /** - * @brief Destructor - */ - ~ModelProcess(); - - /** - * @brief load model from file with mem - * @param [in] modelPath: model path - * @return result - */ - Result LoadModelFromFileWithMem(const char *modelPath); - - /** - * @brief unload model - */ - void Unload(); - - /** - * @brief create model desc - * @return result - */ - Result CreateDesc(); - - /** - * @brief destroy desc - */ - void DestroyDesc(); - - /** - * @brief create model input - * @param [in] inputDataBuffer: input buffer - * @param [in] bufferSize: input buffer size - * @return result - */ - Result CreateInput(void *inputDataBuffer, size_t bufferSize); - - /** - * @brief destroy input resource - */ - void DestroyInput(); - - /** - * @brief create output buffer - * @return result - */ - Result CreateOutput(); - - /** - * @brief destroy output resource - */ - void DestroyOutput(); - - /** - * @brief model execute - * @return result - */ - Result Execute(); - - /** - * @brief dump model output result to file - */ - void DumpModelOutputResult(char *output_name); - - /** - * @brief get model output result - */ - void OutputModelResult(); - - private: - uint32_t modelId_; - size_t modelMemSize_; - size_t modelWeightSize_; - void *modelMemPtr_; - void *modelWeightPtr_; - bool loadFlag_; // model load flag - aclmdlDesc *modelDesc_; - aclmdlDataset *input_; - aclmdlDataset *output_; -}; - diff --git a/tutorials/tutorial_code/acl_resnet50_sample/inc/sample_process.h b/tutorials/tutorial_code/acl_resnet50_sample/inc/sample_process.h deleted file mode 100644 index 1f42dcc44d25fa55cdfb4ce2b3f2b019ff9786b1..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/acl_resnet50_sample/inc/sample_process.h +++ /dev/null @@ -1,52 +0,0 @@ -/** -* @file sample_process.h -* -* Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. -* -* This program is distributed in the hope that it will be useful, -* but WITHOUT ANY WARRANTY; without even the implied warranty of -* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -*/ -#pragma once -#include -#include -#include "inc/utils.h" -#include "acl/acl.h" - -/** -* SampleProcess -*/ -class SampleProcess { - public: - /** - * @brief Constructor - */ - SampleProcess(); - - /** - * @brief Destructor - */ - ~SampleProcess(); - - /** - * @brief init reousce - * @return result - */ - Result InitResource(); - - /** - * @brief sample process - * @return result - */ - Result Process(char *om_path, char *input_folder); - - void GetAllFiles(std::string path, std::vector *files); - - private: - void DestroyResource(); - - int32_t deviceId_; - aclrtContext context_; - aclrtStream stream_; -}; - diff --git a/tutorials/tutorial_code/acl_resnet50_sample/inc/utils.h b/tutorials/tutorial_code/acl_resnet50_sample/inc/utils.h deleted file mode 100644 index ca26e9dbc232689edc3b5035eb914f88b7bd1950..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/acl_resnet50_sample/inc/utils.h +++ /dev/null @@ -1,45 +0,0 @@ -/** -* @file utils.h -* -* Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. -* -* This program is distributed in the hope that it will be useful, -* but WITHOUT ANY WARRANTY; without even the implied warranty of -* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -*/ -#pragma once -#include -#include - -#define INFO_LOG(fmt, args...) fprintf(stdout, "[INFO] " fmt "\n", ##args) -#define WARN_LOG(fmt, args...) fprintf(stdout, "[WARN] " fmt "\n", ##args) -#define ERROR_LOG(fmt, args...) fprintf(stdout, "[ERROR] " fmt "\n", ##args) - -typedef enum Result { - SUCCESS = 0, - FAILED = 1 -} Result; - -/** -* Utils -*/ -class Utils { - public: - /** - * @brief create device buffer of file - * @param [in] fileName: file name - * @param [out] fileSize: size of file - * @return device buffer of file - */ - static void *GetDeviceBufferOfFile(std::string fileName, uint32_t *fileSize); - - /** - * @brief create buffer of file - * @param [in] fileName: file name - * @param [out] fileSize: size of file - * @return buffer of pic - */ - static void* ReadBinFile(std::string fileName, uint32_t *fileSize); -}; - -#pragma once diff --git a/tutorials/tutorial_code/acl_resnet50_sample/src/CMakeLists.txt b/tutorials/tutorial_code/acl_resnet50_sample/src/CMakeLists.txt deleted file mode 100644 index 28895a3351983857354da9b68fd62602a107738f..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/acl_resnet50_sample/src/CMakeLists.txt +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) Huawei Technologies Co., Ltd. 2019. All rights reserved. - -# CMake lowest version requirement -cmake_minimum_required(VERSION 3.5.1) - -# project information -project(ACL_RESNET50) - -# Compile options -add_compile_options(-std=c++11) - -set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "../../../out") -set(CMAKE_CXX_FLAGS_DEBUG "-fPIC -O0 -g -Wall") -set(CMAKE_CXX_FLAGS_RELEASE "-fPIC -O2 -Wall") - -set(INC_PATH $ENV{DDK_PATH}) - -if (NOT DEFINED ENV{DDK_PATH}) - set(INC_PATH "/usr/local/Ascend") - message(STATUS "set default INC_PATH: ${INC_PATH}") -else () - message(STATUS "env INC_PATH: ${INC_PATH}") -endif() - -set(LIB_PATH $ENV{NPU_HOST_LIB}) - -if (NOT DEFINED ENV{NPU_HOST_LIB}) - set(LIB_PATH "/usr/local/Ascend/acllib/lib64/stub/") - message(STATUS "set default LIB_PATH: ${LIB_PATH}") -else () - message(STATUS "env LIB_PATH: ${LIB_PATH}") -endif() - -# Header path -include_directories( - ${INC_PATH}/acllib_linux.arm64/include/ - ../ -) - -if(target STREQUAL "Simulator_Function") - add_compile_options(-DFUNC_SIM) -endif() - -# add host lib path -link_directories( - ${LIB_PATH} -) - -add_executable(main - utils.cpp - model_process.cpp - sample_process.cpp - main.cpp) - -if(target STREQUAL "Simulator_Function") - target_link_libraries(main funcsim) -else() - target_link_libraries(main ascendcl stdc++) -endif() - -install(TARGETS main DESTINATION ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}) diff --git a/tutorials/tutorial_code/acl_resnet50_sample/src/acl.json b/tutorials/tutorial_code/acl_resnet50_sample/src/acl.json deleted file mode 100644 index 0967ef424bce6791893e9a57bb952f80fd536e93..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/acl_resnet50_sample/src/acl.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tutorials/tutorial_code/acl_resnet50_sample/src/main.cpp b/tutorials/tutorial_code/acl_resnet50_sample/src/main.cpp deleted file mode 100644 index 1c75b6bd04ea12f2a6523d85d38b89479e57c3e8..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/acl_resnet50_sample/src/main.cpp +++ /dev/null @@ -1,35 +0,0 @@ -/** -* @file main.cpp -* -* Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. -* -* This program is distributed in the hope that it will be useful, -* but WITHOUT ANY WARRANTY; without even the implied warranty of -* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -*/ -#include -#include "inc/sample_process.h" -#include "inc/utils.h" -bool g_isDevice = false; - -int main(int argc, char **argv) { - if (argc != 3) { - ERROR_LOG("usage:./main path_of_om path_of_inputFolder"); - return FAILED; - } - SampleProcess processSample; - Result ret = processSample.InitResource(); - if (ret != SUCCESS) { - ERROR_LOG("sample init resource failed"); - return FAILED; - } - - ret = processSample.Process(argv[1], argv[2]); - if (ret != SUCCESS) { - ERROR_LOG("sample process failed"); - return FAILED; - } - - INFO_LOG("execute sample success"); - return SUCCESS; -} diff --git a/tutorials/tutorial_code/acl_resnet50_sample/src/model_process.cpp b/tutorials/tutorial_code/acl_resnet50_sample/src/model_process.cpp deleted file mode 100644 index 9aaf184307ade78d87f3cfafd48cb6f81e3c4472..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/acl_resnet50_sample/src/model_process.cpp +++ /dev/null @@ -1,333 +0,0 @@ -/** -* @file model_process.cpp -* -* Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. -* -* This program is distributed in the hope that it will be useful, -* but WITHOUT ANY WARRANTY; without even the implied warranty of -* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -*/ -#include "inc/model_process.h" -#include -#include -#include -#include -#include "inc/utils.h" -extern bool g_isDevice; - -ModelProcess::ModelProcess() :modelId_(0), modelMemSize_(0), modelWeightSize_(0), modelMemPtr_(nullptr), -modelWeightPtr_(nullptr), loadFlag_(false), modelDesc_(nullptr), input_(nullptr), output_(nullptr) { -} - -ModelProcess::~ModelProcess() { - Unload(); - DestroyDesc(); - DestroyInput(); - DestroyOutput(); -} - -Result ModelProcess::LoadModelFromFileWithMem(const char *modelPath) { - if (loadFlag_) { - ERROR_LOG("has already loaded a model"); - return FAILED; - } - - aclError ret = aclmdlQuerySize(modelPath, &modelMemSize_, &modelWeightSize_); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("query model failed, model file is %s", modelPath); - return FAILED; - } - - ret = aclrtMalloc(&modelMemPtr_, modelMemSize_, ACL_MEM_MALLOC_HUGE_FIRST); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("malloc buffer for mem failed, require size is %zu", modelMemSize_); - return FAILED; - } - - ret = aclrtMalloc(&modelWeightPtr_, modelWeightSize_, ACL_MEM_MALLOC_HUGE_FIRST); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("malloc buffer for weight failed, require size is %zu", modelWeightSize_); - return FAILED; - } - - ret = aclmdlLoadFromFileWithMem(modelPath, &modelId_, modelMemPtr_, - modelMemSize_, modelWeightPtr_, modelWeightSize_); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("load model from file failed, model file is %s", modelPath); - return FAILED; - } - - loadFlag_ = true; - INFO_LOG("load model %s success", modelPath); - return SUCCESS; -} - -Result ModelProcess::CreateDesc() { - modelDesc_ = aclmdlCreateDesc(); - if (modelDesc_ == nullptr) { - ERROR_LOG("create model description failed"); - return FAILED; - } - - aclError ret = aclmdlGetDesc(modelDesc_, modelId_); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("get model description failed"); - return FAILED; - } - - INFO_LOG("create model description success"); - - return SUCCESS; -} - -void ModelProcess::DestroyDesc() { - if (modelDesc_ != nullptr) { - (void)aclmdlDestroyDesc(modelDesc_); - modelDesc_ = nullptr; - } -} - -Result ModelProcess::CreateInput(void *inputDataBuffer, size_t bufferSize) { - input_ = aclmdlCreateDataset(); - if (input_ == nullptr) { - ERROR_LOG("can't create dataset, create input failed"); - return FAILED; - } - - aclDataBuffer* inputData = aclCreateDataBuffer(inputDataBuffer, bufferSize); - if (inputData == nullptr) { - ERROR_LOG("can't create data buffer, create input failed"); - return FAILED; - } - - aclError ret = aclmdlAddDatasetBuffer(input_, inputData); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("add input dataset buffer failed"); - aclDestroyDataBuffer(inputData); - inputData = nullptr; - return FAILED; - } - - return SUCCESS; -} - -void ModelProcess::DestroyInput() { - if (input_ == nullptr) { - return; - } - - for (size_t i = 0; i < aclmdlGetDatasetNumBuffers(input_); ++i) { - aclDataBuffer* dataBuffer = aclmdlGetDatasetBuffer(input_, i); - aclDestroyDataBuffer(dataBuffer); - } - aclmdlDestroyDataset(input_); - input_ = nullptr; -} - -Result ModelProcess::CreateOutput() { - if (modelDesc_ == nullptr) { - ERROR_LOG("no model description, create output failed"); - return FAILED; - } - - output_ = aclmdlCreateDataset(); - if (output_ == nullptr) { - ERROR_LOG("can't create dataset, create output failed"); - return FAILED; - } - - size_t outputSize = aclmdlGetNumOutputs(modelDesc_); - for (size_t i = 0; i < outputSize; ++i) { - size_t buffer_size = aclmdlGetOutputSizeByIndex(modelDesc_, i); - - void *outputBuffer = nullptr; - aclError ret = aclrtMalloc(&outputBuffer, buffer_size, ACL_MEM_MALLOC_NORMAL_ONLY); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("can't malloc buffer, size is %zu, create output failed", buffer_size); - return FAILED; - } - - aclDataBuffer* outputData = aclCreateDataBuffer(outputBuffer, buffer_size); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("can't create data buffer, create output failed"); - aclrtFree(outputBuffer); - return FAILED; - } - - ret = aclmdlAddDatasetBuffer(output_, outputData); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("can't add data buffer, create output failed"); - aclrtFree(outputBuffer); - aclDestroyDataBuffer(outputData); - return FAILED; - } - } - - INFO_LOG("create model output success"); - return SUCCESS; -} - -void ModelProcess::DumpModelOutputResult(char *output_name) { - std::stringstream ss; - size_t outputNum = aclmdlGetDatasetNumBuffers(output_); - static int executeNum = 0; - - for (size_t i = 0; i < outputNum; ++i) { - ss << "result/" << output_name << "_output" << ++executeNum << "_" << i << ".bin"; - std::string outputFileName = ss.str(); - FILE *outputFile = fopen(outputFileName.c_str(), "wb"); - if (outputFile) { - aclDataBuffer* dataBuffer = aclmdlGetDatasetBuffer(output_, i); - void* data = aclGetDataBufferAddr(dataBuffer); - uint32_t len = aclGetDataBufferSize(dataBuffer); - - void* outHostData = NULL; - aclError ret = ACL_ERROR_NONE; - if (!g_isDevice) { - ret = aclrtMallocHost(&outHostData, len); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("aclrtMallocHost failed, ret[%d]", ret); - return; - } - - ret = aclrtMemcpy(outHostData, len, data, len, ACL_MEMCPY_DEVICE_TO_HOST); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("aclrtMemcpy failed, ret[%d]", ret); - (void)aclrtFreeHost(outHostData); - return; - } - - fwrite(outHostData, len, sizeof(char), outputFile); - - ret = aclrtFreeHost(outHostData); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("aclrtFreeHost failed, ret[%d]", ret); - return; - } - } else { - fwrite(data, len, sizeof(char), outputFile); - } - fclose(outputFile); - outputFile = nullptr; - } else { - ERROR_LOG("create output file [%s] failed", outputFileName.c_str()); - return; - } - } - - INFO_LOG("dump data success"); - return; -} - -void ModelProcess::OutputModelResult() { - for (size_t i = 0; i < aclmdlGetDatasetNumBuffers(output_); ++i) { - aclDataBuffer* dataBuffer = aclmdlGetDatasetBuffer(output_, i); - void* data = aclGetDataBufferAddr(dataBuffer); - uint32_t len = aclGetDataBufferSize(dataBuffer); - - void *outHostData = NULL; - aclError ret = ACL_ERROR_NONE; - float *outData = NULL; - if (!g_isDevice) { - ret = aclrtMallocHost(&outHostData, len); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("aclrtMallocHost failed, ret[%d]", ret); - return; - } - - ret = aclrtMemcpy(outHostData, len, data, len, ACL_MEMCPY_DEVICE_TO_HOST); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("aclrtMemcpy failed, ret[%d]", ret); - return; - } - - outData = reinterpret_cast(outHostData); - } else { - outData = reinterpret_cast(data); - } - std::map > resultMap; - for (unsigned int j = 0; j < len / sizeof(float); ++j) { - resultMap[*outData] = j; - outData++; - } - - int cnt = 0; - for (auto it = resultMap.begin(); it != resultMap.end(); ++it) { - // print top 5 - if (++cnt > 5) { - break; - } - - INFO_LOG("top %d: index[%d] value[%lf]", cnt, it->second, it->first); - } - if (!g_isDevice) { - ret = aclrtFreeHost(outHostData); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("aclrtFreeHost failed, ret[%d]", ret); - return; - } - } - } - - INFO_LOG("output data success"); - return; -} - -void ModelProcess::DestroyOutput() { - if (output_ == nullptr) { - return; - } - - for (size_t i = 0; i < aclmdlGetDatasetNumBuffers(output_); ++i) { - aclDataBuffer* dataBuffer = aclmdlGetDatasetBuffer(output_, i); - void* data = aclGetDataBufferAddr(dataBuffer); - (void)aclrtFree(data); - (void)aclDestroyDataBuffer(dataBuffer); - } - - (void)aclmdlDestroyDataset(output_); - output_ = nullptr; -} - -Result ModelProcess::Execute() { - aclError ret = aclmdlExecute(modelId_, input_, output_); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("execute model failed, modelId is %u", modelId_); - return FAILED; - } - - INFO_LOG("model execute success"); - return SUCCESS; -} - -void ModelProcess::Unload() { - if (!loadFlag_) { - WARN_LOG("no model had been loaded, unload failed"); - return; - } - - aclError ret = aclmdlUnload(modelId_); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("unload model failed, modelId is %u", modelId_); - } - - if (modelDesc_ != nullptr) { - (void)aclmdlDestroyDesc(modelDesc_); - modelDesc_ = nullptr; - } - - if (modelMemPtr_ != nullptr) { - aclrtFree(modelMemPtr_); - modelMemPtr_ = nullptr; - modelMemSize_ = 0; - } - - if (modelWeightPtr_ != nullptr) { - aclrtFree(modelWeightPtr_); - modelWeightPtr_ = nullptr; - modelWeightSize_ = 0; - } - - loadFlag_ = false; - INFO_LOG("unload model success, modelId is %u", modelId_); -} diff --git a/tutorials/tutorial_code/acl_resnet50_sample/src/sample_process.cpp b/tutorials/tutorial_code/acl_resnet50_sample/src/sample_process.cpp deleted file mode 100644 index e47a627453e1efb4b1af303c7a761bb3a10a510d..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/acl_resnet50_sample/src/sample_process.cpp +++ /dev/null @@ -1,193 +0,0 @@ -/** -* @file sample_process.cpp -* -* Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. -* -* This program is distributed in the hope that it will be useful, -* but WITHOUT ANY WARRANTY; without even the implied warranty of -* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -*/ -#include "inc/sample_process.h" -#include -#include -#include -#include -#include -#include "inc/model_process.h" -#include "acl/acl.h" -#include "inc/utils.h" -extern bool g_isDevice; -using std::string; -using std::vector; - -SampleProcess::SampleProcess() :deviceId_(0), context_(nullptr), stream_(nullptr) { -} - -SampleProcess::~SampleProcess() { - DestroyResource(); -} - -Result SampleProcess::InitResource() { - // ACL init - const char *aclConfigPath = "../src/acl.json"; - aclError ret = aclInit(aclConfigPath); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("acl init failed"); - return FAILED; - } - INFO_LOG("acl init success"); - - // open device - ret = aclrtSetDevice(deviceId_); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("acl open device %d failed", deviceId_); - return FAILED; - } - INFO_LOG("open device %d success", deviceId_); - - // create context (set current) - ret = aclrtCreateContext(&context_, deviceId_); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("acl create context failed"); - return FAILED; - } - INFO_LOG("create context success"); - - // create stream - ret = aclrtCreateStream(&stream_); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("acl create stream failed"); - return FAILED; - } - INFO_LOG("create stream success"); - - // get run mode - aclrtRunMode runMode; - ret = aclrtGetRunMode(&runMode); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("acl get run mode failed"); - return FAILED; - } - g_isDevice = (runMode == ACL_DEVICE); - INFO_LOG("get run mode success"); - return SUCCESS; -} - -void SampleProcess::GetAllFiles(std::string path, std::vector *files) { - DIR *pDir; - struct dirent* ptr; - if (!(pDir = opendir(path.c_str()))) - return; - while ((ptr = readdir(pDir)) != 0) { - if (strcmp(ptr->d_name, ".") != 0 && strcmp(ptr->d_name, "..") != 0) - files->push_back(path + "/" + ptr->d_name); - } - closedir(pDir); -} - -Result SampleProcess::Process(char *om_path, char *input_folder) { - // model init - ModelProcess processModel; - // const char* omModelPath = "../model/resnet50.om"; - const char* omModelPath = om_path; - - Result ret = processModel.LoadModelFromFileWithMem(omModelPath); - if (ret != SUCCESS) { - ERROR_LOG("execute LoadModelFromFileWithMem failed"); - return FAILED; - } - - ret = processModel.CreateDesc(); - if (ret != SUCCESS) { - ERROR_LOG("execute CreateDesc failed"); - return FAILED; - } - - ret = processModel.CreateOutput(); - if (ret != SUCCESS) { - ERROR_LOG("execute CreateOutput failed"); - return FAILED; - } - - std::vector testFile; - GetAllFiles(input_folder, &testFile); - - if (testFile.size() == 0) { - WARN_LOG("no input data under folder"); - } - - // loop begin - for (size_t index = 0; index < testFile.size(); ++index) { - INFO_LOG("start to process file:%s", testFile[index].c_str()); - // model process - uint32_t devBufferSize; - void *picDevBuffer = Utils::GetDeviceBufferOfFile(testFile[index], &devBufferSize); - if (picDevBuffer == nullptr) { - ERROR_LOG("get pic device buffer failed,index is %zu", index); - return FAILED; - } - ret = processModel.CreateInput(picDevBuffer, devBufferSize); - if (ret != SUCCESS) { - ERROR_LOG("execute CreateInput failed"); - aclrtFree(picDevBuffer); - return FAILED; - } - - ret = processModel.Execute(); - if (ret != SUCCESS) { - ERROR_LOG("execute inference failed"); - aclrtFree(picDevBuffer); - return FAILED; - } - - int pos = testFile[index].find_last_of('/'); - std::string name = testFile[index].substr(pos+1); - std::string outputname = name.substr(0, name.rfind(".")); - - // print the top 5 confidence values - processModel.OutputModelResult(); - // dump output result to file in the current directory - processModel.DumpModelOutputResult(const_cast(outputname.c_str())); - - // release model input buffer - aclrtFree(picDevBuffer); - processModel.DestroyInput(); - } - // loop end - - return SUCCESS; -} - -void SampleProcess::DestroyResource() { - aclError ret; - if (stream_ != nullptr) { - ret = aclrtDestroyStream(stream_); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("destroy stream failed"); - } - stream_ = nullptr; - } - INFO_LOG("end to destroy stream"); - - if (context_ != nullptr) { - ret = aclrtDestroyContext(context_); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("destroy context failed"); - } - context_ = nullptr; - } - INFO_LOG("end to destroy context"); - - ret = aclrtResetDevice(deviceId_); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("reset device failed"); - } - INFO_LOG("end to reset device is %d", deviceId_); - - ret = aclFinalize(); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("finalize acl failed"); - } - INFO_LOG("end to finalize acl"); -} - diff --git a/tutorials/tutorial_code/acl_resnet50_sample/src/utils.cpp b/tutorials/tutorial_code/acl_resnet50_sample/src/utils.cpp deleted file mode 100644 index 109c1737cd44d67a8d58e48d0887f9b50cd269cd..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/acl_resnet50_sample/src/utils.cpp +++ /dev/null @@ -1,106 +0,0 @@ -/** -* @file utils.cpp -* -* Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. -* -* This program is distributed in the hope that it will be useful, -* but WITHOUT ANY WARRANTY; without even the implied warranty of -* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -*/ -#include "inc/utils.h" -#include -#include -#include -#include -#include "acl/acl.h" - -extern bool g_isDevice; - -void* Utils::ReadBinFile(std::string fileName, uint32_t *fileSize) { - struct stat sBuf; - int fileStatus = stat(fileName.data(), &sBuf); - if (fileStatus == -1) { - ERROR_LOG("failed to get file"); - return nullptr; - } - if (S_ISREG(sBuf.st_mode) == 0) { - ERROR_LOG("%s is not a file, please enter a file", fileName.c_str()); - return nullptr; - } - - std::ifstream binFile(fileName, std::ifstream::binary); - if (binFile.is_open() == false) { - ERROR_LOG("open file %s failed", fileName.c_str()); - return nullptr; - } - - binFile.seekg(0, binFile.end); - uint32_t binFileBufferLen = binFile.tellg(); - if (binFileBufferLen == 0) { - ERROR_LOG("binfile is empty, filename is %s", fileName.c_str()); - binFile.close(); - return nullptr; - } - - binFile.seekg(0, binFile.beg); - - void* binFileBufferData = nullptr; - aclError ret = ACL_ERROR_NONE; - if (!g_isDevice) { - ret = aclrtMallocHost(&binFileBufferData, binFileBufferLen); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("malloc for binFileBufferData failed"); - binFile.close(); - return nullptr; - } - if (binFileBufferData == nullptr) { - ERROR_LOG("malloc binFileBufferData failed"); - binFile.close(); - return nullptr; - } - } else { - ret = aclrtMalloc(&binFileBufferData, binFileBufferLen, ACL_MEM_MALLOC_NORMAL_ONLY); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("malloc device buffer failed. size is %u", binFileBufferLen); - binFile.close(); - return nullptr; - } - } - binFile.read(static_cast(binFileBufferData), binFileBufferLen); - binFile.close(); - *fileSize = binFileBufferLen; - return binFileBufferData; -} - -void* Utils::GetDeviceBufferOfFile(std::string fileName, uint32_t *fileSize) { - uint32_t inputHostBuffSize = 0; - void* inputHostBuff = Utils::ReadBinFile(fileName, &inputHostBuffSize); - if (inputHostBuff == nullptr) { - return nullptr; - } - if (!g_isDevice) { - void *inBufferDev = nullptr; - uint32_t inBufferSize = inputHostBuffSize; - aclError ret = aclrtMalloc(&inBufferDev, inBufferSize, ACL_MEM_MALLOC_NORMAL_ONLY); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("malloc device buffer failed. size is %u", inBufferSize); - aclrtFreeHost(inputHostBuff); - return nullptr; - } - - ret = aclrtMemcpy(inBufferDev, inBufferSize, inputHostBuff, inputHostBuffSize, ACL_MEMCPY_HOST_TO_DEVICE); - if (ret != ACL_ERROR_NONE) { - ERROR_LOG("memcpy failed. device buffer size is %u, input host buffer size is %u", - inBufferSize, inputHostBuffSize); - aclrtFree(inBufferDev); - aclrtFreeHost(inputHostBuff); - return nullptr; - } - aclrtFreeHost(inputHostBuff); - *fileSize = inBufferSize; - return inBufferDev; - } else { - *fileSize = inputHostBuffSize; - return inputHostBuff; - } -} diff --git a/tutorials/tutorial_code/acl_resnet50_sample/test_data/test_data_1x3x224x224_1.bin b/tutorials/tutorial_code/acl_resnet50_sample/test_data/test_data_1x3x224x224_1.bin deleted file mode 100644 index 2dd1cbcc2d5e04d4438739989232bb38b8b04f1a..0000000000000000000000000000000000000000 Binary files a/tutorials/tutorial_code/acl_resnet50_sample/test_data/test_data_1x3x224x224_1.bin and /dev/null differ diff --git a/tutorials/tutorial_code/acl_resnet50_sample/test_data/test_data_1x3x224x224_2.bin b/tutorials/tutorial_code/acl_resnet50_sample/test_data/test_data_1x3x224x224_2.bin deleted file mode 100644 index fb76192207f06d8258efd8f7abd67212a0ef9476..0000000000000000000000000000000000000000 Binary files a/tutorials/tutorial_code/acl_resnet50_sample/test_data/test_data_1x3x224x224_2.bin and /dev/null differ diff --git a/tutorials/tutorial_code/ascend310_resnet50_preprocess_sample/CMakeLists.txt b/tutorials/tutorial_code/ascend310_resnet50_preprocess_sample/CMakeLists.txt deleted file mode 100644 index 17538cabed238d23faf3c16552ec547210422c49..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/ascend310_resnet50_preprocess_sample/CMakeLists.txt +++ /dev/null @@ -1,11 +0,0 @@ -cmake_minimum_required(VERSION 3.14.1) -project (ResNet50Sample[CXX]) -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17 -fPIE -Wl,--allow-shlib-undefined") -option(MINDSPORE_PATH "mindspore install path" "") -include_directories(${MINDSPORE_PATH}) -include_directories(${MINDSPORE_PATH}/include) -find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) -file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) - -add_executable(resnet50_sample main.cc) -target_link_libraries(resnet50_sample ${MS_LIB} ${MD_LIB}) diff --git a/tutorials/tutorial_code/ascend310_resnet50_preprocess_sample/README.md b/tutorials/tutorial_code/ascend310_resnet50_preprocess_sample/README.md deleted file mode 100644 index aa0fb2ea3f9a7ba9c0cfb00a6473b59f8624637a..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/ascend310_resnet50_preprocess_sample/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# README - -Usage: - -```bash -cd ascend310_resnet50_preprocess_sample -cmake . -DMINDSPORE_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath` -make -./resnet50_sample -``` diff --git a/tutorials/tutorial_code/ascend310_resnet50_preprocess_sample/main.cc b/tutorials/tutorial_code/ascend310_resnet50_preprocess_sample/main.cc deleted file mode 100644 index 9cbade39a61baa4286cc6d8c4eac2c7eeb25daa8..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/ascend310_resnet50_preprocess_sample/main.cc +++ /dev/null @@ -1,210 +0,0 @@ -/** - * Copyright 2021 Huawei Technologies Co., Ltd - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include -#include -#include -#include -#include -#include - -#include "include/api/context.h" -#include "include/api/model.h" -#include "include/api/serialization.h" -#include "include/dataset/execute.h" -#include "include/dataset/vision.h" - -namespace ms = mindspore; -namespace ds = mindspore::dataset; -constexpr auto resnet_file = "./model/resnet50_imagenet.mindir"; -constexpr auto image_path = "./test_data"; - -std::vector GetAllFiles(std::string_view dir_name); -DIR *OpenDir(std::string_view dir_name); -std::string RealPath(std::string_view path); -ms::MSTensor ReadFile(const std::string &file); -size_t GetMax(ms::MSTensor data); - -int main() { - // set context - auto context = std::make_shared(); - auto ascend310_info = std::make_shared(); - ascend310_info->SetDeviceID(0); - context->MutableDeviceInfo().push_back(ascend310_info); - - // define model - ms::Graph graph; - ms::Status ret = ms::Serialization::Load(resnet_file, ms::ModelType::kMindIR, &graph); - if (ret != ms::kSuccess) { - std::cout << "Load model failed." << std::endl; - return 1; - } - ms::Model resnet50; - - // build model - ret = resnet50.Build(ms::GraphCell(graph), context); - if (ret != ms::kSuccess) { - std::cout << "Build model failed." << std::endl; - return 1; - } - - // get model info - std::vector model_inputs = resnet50.GetInputs(); - if (model_inputs.empty()) { - std::cout << "Invalid model, inputs is empty." << std::endl; - return 1; - } - - // define transforms - std::shared_ptr decode(new ds::vision::Decode()); - std::shared_ptr resize(new ds::vision::Resize({256})); - std::shared_ptr normalize(new ds::vision::Normalize({0.485 * 255, 0.456 * 255, 0.406 * 255}, - {0.229 * 255, 0.224 * 255, 0.225 * 255})); - std::shared_ptr center_crop(new ds::vision::CenterCrop({224, 224})); - std::shared_ptr hwc2chw(new ds::vision::HWC2CHW()); - - // define preprocessor - ds::Execute preprocessor({decode, resize, normalize, center_crop, hwc2chw}); - - std::vector images = GetAllFiles(image_path); - for (const auto &image_file : images) { - // prepare input - std::vector outputs; - std::vector inputs; - - // read image file and preprocess - auto image = ReadFile(image_file); - ret = preprocessor(image, &image); - if (ret != ms::kSuccess) { - std::cout << "Image preprocess failed." << std::endl; - return 1; - } - - inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), - image.Data().get(), image.DataSize()); - - // infer - ret = resnet50.Predict(inputs, &outputs); - if (ret != ms::kSuccess) { - std::cout << "Predict model failed." << std::endl; - return 1; - } - - // print infer result - std::cout << "Image: " << image_file << " infer result: " << GetMax(outputs[0]) << std::endl; - } - return 0; -} - -std::vector GetAllFiles(std::string_view dir_name) { - struct dirent *filename; - DIR *dir = OpenDir(dir_name); - if (dir == nullptr) { - return {}; - } - - /* read all the files in the dir ~ */ - std::vector res; - while ((filename = readdir(dir)) != nullptr) { - std::string d_name = std::string(filename->d_name); - // get rid of "." and ".." - if (d_name == "." || d_name == ".." || filename->d_type != DT_REG) - continue; - res.emplace_back(std::string(dir_name) + "/" + filename->d_name); - } - - std::sort(res.begin(), res.end()); - return res; -} - -DIR *OpenDir(std::string_view dir_name) { - // check the parameter ! - if (dir_name.empty()) { - std::cout << " dir_name is null ! " << std::endl; - return nullptr; - } - - std::string real_path = RealPath(dir_name); - - // check if dir_name is a valid dir - struct stat s; - lstat(real_path.c_str(), &s); - if (!S_ISDIR(s.st_mode)) { - std::cout << "dir_name is not a valid directory !" << std::endl; - return nullptr; - } - - DIR *dir; - dir = opendir(real_path.c_str()); - if (dir == nullptr) { - std::cout << "Can not open dir " << dir_name << std::endl; - return nullptr; - } - return dir; -} - -std::string RealPath(std::string_view path) { - char real_path_mem[PATH_MAX] = {0}; - char *real_path_ret = realpath(path.data(), real_path_mem); - - if (real_path_ret == nullptr) { - std::cout << "File: " << path << " is not exist."; - return ""; - } - - return std::string(real_path_mem); -} - -ms::MSTensor ReadFile(const std::string &file) { - if (file.empty()) { - std::cout << "Pointer file is nullptr" << std::endl; - return ms::MSTensor(); - } - - std::ifstream ifs(file); - if (!ifs.good()) { - std::cout << "File: " << file << " is not exist" << std::endl; - return ms::MSTensor(); - } - - if (!ifs.is_open()) { - std::cout << "File: " << file << "open failed" << std::endl; - return ms::MSTensor(); - } - - ifs.seekg(0, std::ios::end); - size_t size = ifs.tellg(); - ms::MSTensor buffer(file, ms::DataType::kNumberTypeUInt8, {static_cast(size)}, nullptr, size); - - ifs.seekg(0, std::ios::beg); - ifs.read(reinterpret_cast(buffer.MutableData()), size); - ifs.close(); - - return buffer; -} - -size_t GetMax(ms::MSTensor data) { - float max_value = -1; - size_t max_idx = 0; - const float *p = reinterpret_cast(data.MutableData()); - for (size_t i = 0; i < data.DataSize() / sizeof(float); ++i) { - if (p[i] > max_value) { - max_value = p[i]; - max_idx = i; - } - } - return max_idx; -} diff --git a/tutorials/tutorial_code/ascend310_single_op_sample/CMakeLists.txt b/tutorials/tutorial_code/ascend310_single_op_sample/CMakeLists.txt deleted file mode 100644 index 13b50fa32e7b8a6c514426a4fb0342615dd10356..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/ascend310_single_op_sample/CMakeLists.txt +++ /dev/null @@ -1,10 +0,0 @@ -cmake_minimum_required(VERSION 3.14.1) -project (TensorAddSample[CXX]) -add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0) -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17 -fPIE -Wl,--allow-shlib-undefined") -option(MINDSPORE_PATH "mindspore install path" "") -include_directories(${MINDSPORE_PATH}) -find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) - -add_executable(tensor_add_sample main.cc) -target_link_libraries(tensor_add_sample ${MS_LIB}) diff --git a/tutorials/tutorial_code/ascend310_single_op_sample/README.md b/tutorials/tutorial_code/ascend310_single_op_sample/README.md deleted file mode 100644 index 700a07e9d383b78ca2ac7cb260b598dacaf01a45..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/ascend310_single_op_sample/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# README - -Usage: - -```bash -cd ascend310_single_op_sample -cmake . -DMINDSPORE_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath` -make -./tensor_add_sample -``` diff --git a/tutorials/tutorial_code/ascend310_single_op_sample/main.cc b/tutorials/tutorial_code/ascend310_single_op_sample/main.cc deleted file mode 100644 index ead314976c8293e9bb3327433fbd3f9bc2371515..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/ascend310_single_op_sample/main.cc +++ /dev/null @@ -1,80 +0,0 @@ -/** - * Copyright 2021 Huawei Technologies Co., Ltd - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -#include -#include "include/api/context.h" -#include "include/api/model.h" -#include "include/api/serialization.h" - -namespace ms = mindspore; -constexpr auto tensor_add_file = "./tensor_add.mindir"; -static const std::vector input_data_1 = {1, 2, 3, 4}; -static const std::vector input_data_2 = {2, 3, 4, 5}; - -int main() { - // set context - auto context = std::make_shared(); - auto ascend310_info = std::make_shared(); - ascend310_info->SetDeviceID(0); - context->MutableDeviceInfo().push_back(ascend310_info); - - // define model - ms::Graph graph; - ms::Status ret = ms::Serialization::Load(tensor_add_file, ms::ModelType::kMindIR, &graph); - if (ret != ms::kSuccess) { - std::cout << "Load model failed." << std::endl; - return 1; - } - ms::Model tensor_add; - - // build model - ret = tensor_add.Build(ms::GraphCell(graph), context); - if (ret != ms::kSuccess) { - std::cout << "Build model failed." << std::endl; - return 1; - } - - // get model inputs - std::vector origin_inputs = tensor_add.GetInputs(); - if (origin_inputs.size() != 2) { - std::cout << "Invalid model inputs size " << origin_inputs.size() << std::endl; - return 1; - } - - // prepare input - std::vector outputs; - std::vector inputs; - inputs.emplace_back(origin_inputs[0].Name(), origin_inputs[0].DataType(), origin_inputs[0].Shape(), - input_data_1.data(), sizeof(float) * input_data_1.size()); - inputs.emplace_back(origin_inputs[1].Name(), origin_inputs[1].DataType(), origin_inputs[1].Shape(), - input_data_1.data(), sizeof(float) * input_data_1.size()); - - // infer - ret = tensor_add.Predict(inputs, &outputs); - if (ret != ms::kSuccess) { - std::cout << "Predict model failed." << std::endl; - return 1; - } - - // print - for (auto &buffer : outputs) { - const float *p = reinterpret_cast(buffer.MutableData()); - for (size_t i = 0; i < buffer.DataSize() / sizeof(float); ++i) { - std::cout << p[i] << std::endl; - } - } - - return 0; -} diff --git a/tutorials/tutorial_code/ascend310_single_op_sample/tensor_add.mindir b/tutorials/tutorial_code/ascend310_single_op_sample/tensor_add.mindir deleted file mode 100644 index 80a58db0e4533c4f8c5c89069407aa7f3489f9a0..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/ascend310_single_op_sample/tensor_add.mindir +++ /dev/null @@ -1,10 +0,0 @@ - -0.1.0 MindSpore*1.1.0: - -8_7_5_construct_wrapper:x_ -8_7_5_construct_wrapper:y_8_7_5_construct_wrapper:1:2"8_7_5_construct_wrapper:[CNode]0:1"Add*b:shape1z shape:shape1,*0 - output_namesZoutputzscalar:List[value1,],*4 - input_namesZxZyzscalar:List[value1,value2,],:Default/Add-op38_7_5_construct_wrapper*$ -8_7_5_construct_wrapper:x_*$ -8_7_5_construct_wrapper:y_2% -8_7_5_construct_wrapper:1:2 \ No newline at end of file diff --git a/tutorials/tutorial_code/ascend910_resnet50_preprocess_sample/CMakeLists.txt b/tutorials/tutorial_code/ascend910_resnet50_preprocess_sample/CMakeLists.txt deleted file mode 100644 index 99a12ed4cd5ffd296122c356a0af7cd150a8d249..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/ascend910_resnet50_preprocess_sample/CMakeLists.txt +++ /dev/null @@ -1,12 +0,0 @@ -cmake_minimum_required(VERSION 3.14.1) -project (ResNet50Sample[CXX]) -add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0) -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17 -fPIE -Wl,--allow-shlib-undefined") -option(MINDSPORE_PATH "mindspore install path" "") -include_directories(${MINDSPORE_PATH}) -include_directories(${MINDSPORE_PATH}/include) -find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) -file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) - -add_executable(resnet50_sample main.cc) -target_link_libraries(resnet50_sample ${MS_LIB} ${MD_LIB}) diff --git a/tutorials/tutorial_code/ascend910_resnet50_preprocess_sample/README.md b/tutorials/tutorial_code/ascend910_resnet50_preprocess_sample/README.md deleted file mode 100644 index ef1ca118965f78d4154eaa1cae0d40b8cf6fc255..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/ascend910_resnet50_preprocess_sample/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# README - -Usage: - -```bash -cd ascend910_resnet50_preprocess_sample -cmake . -DMINDSPORE_PATH=`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath` -make -./resnet50_sample -``` diff --git a/tutorials/tutorial_code/ascend910_resnet50_preprocess_sample/main.cc b/tutorials/tutorial_code/ascend910_resnet50_preprocess_sample/main.cc deleted file mode 100644 index 5f1b96359da7b723f3209073f5f99596de848c62..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/ascend910_resnet50_preprocess_sample/main.cc +++ /dev/null @@ -1,210 +0,0 @@ -/** - * Copyright 2021 Huawei Technologies Co., Ltd - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include -#include -#include -#include -#include -#include - -#include "include/api/context.h" -#include "include/api/model.h" -#include "include/api/serialization.h" -#include "include/dataset/execute.h" -#include "include/dataset/vision.h" - -namespace ms = mindspore; -namespace ds = mindspore::dataset; -constexpr auto resnet_file = "./model/resnet50_imagenet.mindir"; -constexpr auto image_path = "./test_data"; - -std::vector GetAllFiles(std::string_view dir_name); -DIR *OpenDir(std::string_view dir_name); -std::string RealPath(std::string_view path); -ms::MSTensor ReadFile(const std::string &file); -size_t GetMax(ms::MSTensor data); - -int main() { - // set context - auto context = std::make_shared(); - auto ascend910_info = std::make_shared(); - ascend910_info->SetDeviceID(0); - context->MutableDeviceInfo().push_back(ascend910_info); - - // define model - ms::Graph graph; - ms::Status ret = ms::Serialization::Load(resnet_file, ms::ModelType::kMindIR, &graph); - if (ret != ms::kSuccess) { - std::cout << "Load model failed." << std::endl; - return 1; - } - ms::Model resnet50; - - // build model - ret = resnet50.Build(ms::GraphCell(graph), context); - if (ret != ms::kSuccess) { - std::cout << "Build model failed." << std::endl; - return 1; - } - - // get model info - std::vector model_inputs = resnet50.GetInputs(); - if (model_inputs.empty()) { - std::cout << "Invalid model, inputs is empty." << std::endl; - return 1; - } - - // define transforms - std::shared_ptr decode(new ds::vision::Decode()); - std::shared_ptr resize(new ds::vision::Resize({256})); - std::shared_ptr normalize(new ds::vision::Normalize({0.485 * 255, 0.456 * 255, 0.406 * 255}, - {0.229 * 255, 0.224 * 255, 0.225 * 255})); - std::shared_ptr center_crop(new ds::vision::CenterCrop({224, 224})); - std::shared_ptr hwc2chw(new ds::vision::HWC2CHW()); - - // define preprocessor - ds::Execute preprocessor({decode, resize, normalize, center_crop, hwc2chw}); - - std::vector images = GetAllFiles(image_path); - for (const auto &image_file : images) { - // prepare input - std::vector outputs; - std::vector inputs; - - // read image file and preprocess - auto image = ReadFile(image_file); - ret = preprocessor(image, &image); - if (ret != ms::kSuccess) { - std::cout << "Image preprocess failed." << std::endl; - return 1; - } - - inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), - image.Data().get(), image.DataSize()); - - // infer - ret = resnet50.Predict(inputs, &outputs); - if (ret != ms::kSuccess) { - std::cout << "Predict model failed." << std::endl; - return 1; - } - - // print infer result - std::cout << "Image: " << image_file << " infer result: " << GetMax(outputs[0]) << std::endl; - } - return 0; -} - -std::vector GetAllFiles(std::string_view dir_name) { - struct dirent *filename; - DIR *dir = OpenDir(dir_name); - if (dir == nullptr) { - return {}; - } - - /* read all the files in the dir ~ */ - std::vector res; - while ((filename = readdir(dir)) != nullptr) { - std::string d_name = std::string(filename->d_name); - // get rid of "." and ".." - if (d_name == "." || d_name == ".." || filename->d_type != DT_REG) - continue; - res.emplace_back(std::string(dir_name) + "/" + filename->d_name); - } - - std::sort(res.begin(), res.end()); - return res; -} - -DIR *OpenDir(std::string_view dir_name) { - // check the parameter ! - if (dir_name.empty()) { - std::cout << " dir_name is null ! " << std::endl; - return nullptr; - } - - std::string real_path = RealPath(dir_name); - - // check if dir_name is a valid dir - struct stat s; - lstat(real_path.c_str(), &s); - if (!S_ISDIR(s.st_mode)) { - std::cout << "dir_name is not a valid directory !" << std::endl; - return nullptr; - } - - DIR *dir; - dir = opendir(real_path.c_str()); - if (dir == nullptr) { - std::cout << "Can not open dir " << dir_name << std::endl; - return nullptr; - } - return dir; -} - -std::string RealPath(std::string_view path) { - char real_path_mem[PATH_MAX] = {0}; - char *real_path_ret = realpath(path.data(), real_path_mem); - - if (real_path_ret == nullptr) { - std::cout << "File: " << path << " is not exist."; - return ""; - } - - return std::string(real_path_mem); -} - -ms::MSTensor ReadFile(const std::string &file) { - if (file.empty()) { - std::cout << "Pointer file is nullptr" << std::endl; - return ms::MSTensor(); - } - - std::ifstream ifs(file); - if (!ifs.good()) { - std::cout << "File: " << file << " is not exist" << std::endl; - return ms::MSTensor(); - } - - if (!ifs.is_open()) { - std::cout << "File: " << file << "open failed" << std::endl; - return ms::MSTensor(); - } - - ifs.seekg(0, std::ios::end); - size_t size = ifs.tellg(); - ms::MSTensor buffer(file, ms::DataType::kNumberTypeUInt8, {static_cast(size)}, nullptr, size); - - ifs.seekg(0, std::ios::beg); - ifs.read(reinterpret_cast(buffer.MutableData()), size); - ifs.close(); - - return buffer; -} - -size_t GetMax(ms::MSTensor data) { - float max_value = -1; - size_t max_idx = 0; - const float *p = reinterpret_cast(data.MutableData()); - for (size_t i = 0; i < data.DataSize() / sizeof(float); ++i) { - if (p[i] > max_value) { - max_value = p[i]; - max_idx = i; - } - } - return max_idx; -} diff --git a/tutorials/tutorial_code/bert_poetry/generator.py b/tutorials/tutorial_code/bert_poetry/generator.py deleted file mode 100644 index 4c033dcbe08cc194a2e48744ab7a9e2f23a987a3..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/generator.py +++ /dev/null @@ -1,82 +0,0 @@ -"""bert generator""" -import numpy as np -from mindspore import Tensor -from mindspore import dtype as mstype -from src.finetune_config import cfg as settings -from src.poetry_dataset import padding, create_tokenizer - -_, tokenizer, _ = create_tokenizer() - -def generate_random_poetry(model, s=''): - """generate random poetry""" - token_ids, segment_ids = tokenizer.encode(s) - token_ids = token_ids[:-1] - segment_ids = segment_ids[:-1] - target_ids = [] - length = 128 - while len(token_ids) + len(target_ids) < settings.max_len: - _target_ids = token_ids + target_ids - _segment_ids = segment_ids + [0 for _ in target_ids] - - index = len(_target_ids) - - _target_ids = padding(np.array(_target_ids), length=length) - _segment_ids = padding(np.array(_segment_ids), length=length) - - pad_mask = (_target_ids != 0).astype(np.float32) - - _target_ids = Tensor([_target_ids], mstype.int32) - _segment_ids = Tensor([_segment_ids], mstype.int32) - pad_mask = Tensor([pad_mask], mstype.float32) - - _probas = model(_target_ids, _segment_ids, pad_mask).asnumpy() - _probas = _probas[0, index-1, 3:] - p_args = _probas.argsort()[::-1][:100] - p = _probas[p_args] - p = p / sum(p) - target_index = np.random.choice(len(p), p=p) - target = p_args[target_index] + 3 - target_ids.append(target) - if target == 3: - break - return tokenizer.decode(token_ids + target_ids) - -def generate_hidden(model, head=""): - """generate hidden""" - token_ids, segment_ids = tokenizer.encode('') - token_ids = token_ids[:-1] - segment_ids = segment_ids[:-1] - punctuations = [',', '。'] - punctuation_ids = [tokenizer._token_to_id[token] for token in punctuations] - poetry = [] - length = 128 - - for ch in head: - poetry.append(ch) - token_id = tokenizer._token_to_id[ch] - token_ids.append(token_id) - segment_ids.append(0) - while True: - index = len(token_ids) - _target_ids = padding(np.array(token_ids), length=length) - _segment_ids = padding(np.array(segment_ids), length=length) - pad_mask = (_target_ids != 0).astype(np.float32) - - _target_ids = Tensor([_target_ids], mstype.int32) - _segment_ids = Tensor([_segment_ids], mstype.int32) - pad_mask = Tensor([pad_mask], mstype.float32) - _probas = model(_target_ids, _segment_ids, pad_mask).asnumpy() - - _probas = _probas[0, index-1, 3:] - p_args = _probas.argsort()[::-1][:100] - p = _probas[p_args] - p = p / sum(p) - target_index = np.random.choice(len(p), p=p) - target = p_args[target_index] + 3 - token_ids.append(target) - segment_ids.append(0) - if target > 3: - poetry.append(tokenizer._id_to_token[target]) - if target in punctuation_ids: - break - return ''.join(poetry) diff --git a/tutorials/tutorial_code/bert_poetry/poetry.py b/tutorials/tutorial_code/bert_poetry/poetry.py deleted file mode 100644 index e052f7e26c3c09fa9004730ef828e4b2951c2e65..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/poetry.py +++ /dev/null @@ -1,201 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ - -''' -Bert finetune script. -''' -import os -import re -import time -import argparse -from src.utils import BertPoetry, BertPoetryCell, BertLearningRate, BertPoetryModel -from src.finetune_config import cfg, bert_net_cfg -from src.poetry_dataset import create_poetry_dataset, create_tokenizer -from mindspore import context, load_checkpoint, load_param_into_net -from mindspore.nn import DynamicLossScaleUpdateCell -from mindspore.nn import AdamWeightDecay -from mindspore import Model -from mindspore.train.callback import Callback -from mindspore.train.callback import CheckpointConfig, ModelCheckpoint -from mindspore import Tensor, Parameter, export -from mindspore import dtype as mstype -from generator import generate_random_poetry, generate_hidden -import numpy as np - -class LossCallBack(Callback): - ''' - Monitor the loss in training. - If the loss is NAN or INF, terminate training. - Note: - If per_print_times is 0, do not print loss. - Args: - per_print_times (int): Print loss every times. Default: 1. - ''' - def __init__(self, model, per_print_times=1): - super(LossCallBack, self).__init__() - if not isinstance(per_print_times, int) or per_print_times < 0: - raise ValueError("print_step must be in and >= 0.") - self._per_print_times = per_print_times - self.model = model - - def step_end(self, run_context): - cb_params = run_context.original_args() - with open("./loss.log", "a+") as f: - f.write("epoch: {}, step: {}, outputs are {}".format(cb_params.cur_epoch_num, cb_params.cur_step_num, - str(cb_params.net_outputs))) - f.write("\n") - - -def test_train(): - ''' - finetune function - ''' - target = args_opt.device_target - if target == "Ascend": - devid = int(os.getenv('DEVICE_ID')) - context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", device_id=devid) - - poetry, tokenizer, keep_words = create_tokenizer() - print(len(keep_words)) - - dataset = create_poetry_dataset(bert_net_cfg.batch_size, poetry, tokenizer) - - num_tokens = 3191 - poetrymodel = BertPoetryModel(bert_net_cfg, True, num_tokens, dropout_prob=0.1) - netwithloss = BertPoetry(poetrymodel, bert_net_cfg, True, dropout_prob=0.1) - callback = LossCallBack(poetrymodel) - - # optimizer - steps_per_epoch = dataset.get_dataset_size() - print("============ steps_per_epoch is {}".format(steps_per_epoch)) - lr_schedule = BertLearningRate(learning_rate=cfg.AdamWeightDecay.learning_rate, - end_learning_rate=cfg.AdamWeightDecay.end_learning_rate, - warmup_steps=1000, - decay_steps=cfg.epoch_num*steps_per_epoch, - power=cfg.AdamWeightDecay.power) - optimizer = AdamWeightDecay(netwithloss.trainable_params(), lr_schedule) - # load checkpoint into network - ckpt_config = CheckpointConfig(save_checkpoint_steps=steps_per_epoch, keep_checkpoint_max=1) - ckpoint_cb = ModelCheckpoint(prefix=cfg.ckpt_prefix, directory=cfg.ckpt_dir, config=ckpt_config) - - param_dict = load_checkpoint(cfg.pre_training_ckpt) - new_dict = {} - - - - # load corresponding rows of embedding_lookup - for key in param_dict: - if "bert_embedding_lookup" not in key: - new_dict[key] = param_dict[key] - else: - value = param_dict[key] - np_value = value.data.asnumpy() - np_value = np_value[keep_words] - tensor_value = Tensor(np_value, mstype.float32) - parameter_value = Parameter(tensor_value, name=key) - new_dict[key] = parameter_value - - load_param_into_net(netwithloss, new_dict) - update_cell = DynamicLossScaleUpdateCell(loss_scale_value=2**32, scale_factor=2, scale_window=1000) - netwithgrads = BertPoetryCell(netwithloss, optimizer=optimizer, scale_update_cell=update_cell) - - model = Model(netwithgrads) - model.train(cfg.epoch_num, dataset, callbacks=[callback, ckpoint_cb], dataset_sink_mode=True) - -def test_eval(model_ckpt_path): - '''eval model''' - target = args_opt.device_target - if target == "Ascend": - devid = int(os.getenv('DEVICE_ID')) - context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", device_id=devid) - bert_net_cfg.batch_size = 1 - poetrymodel = BertPoetryModel(bert_net_cfg, False, 3191, dropout_prob=0.0) - poetrymodel.set_train(False) - param_dict = load_checkpoint(model_ckpt_path) - load_param_into_net(poetrymodel, param_dict) - - # random generation/continue - start_time = time.time() - output = generate_random_poetry(poetrymodel, s='') - end_to_end_delay = (time.time()-start_time)*1000 - a = re.findall(r'[\u4e00-\u9fa5]*[\uff0c\u3002]', output) - - print("\n**********************************") - print("随机生成: \n") - for poem in a: - print(poem) - print("\ncost time: {:.1f} ms".format(end_to_end_delay)) - print("\n") - - start = "天下为公" - start_time = time.time() - output = generate_random_poetry(poetrymodel, s=start) - end_to_end_delay = (time.time()-start_time)*1000 - a = re.findall(r'[\u4e00-\u9fa5]*[\uff0c\u3002]', output) - - print("\n**********************************") - print("续写 【{}】: \n".format(start)) - for poem in a: - print(poem) - print("\ncost time: {:.1f} ms".format(end_to_end_delay)) - print("\n") - - - - # hidden poetry - s = "人工智能" - start_time = time.time() - output = generate_hidden(poetrymodel, head=s) - end_to_end_delay = (time.time()-start_time)*1000 - a = re.findall(r'[\u4e00-\u9fa5]*[\uff0c\u3002]', output) - print("\n**********************************") - print("藏头诗 【{}】: \n".format(s)) - for poem in a: - print(poem) - print("\ncost time: {:.1f} ms".format(end_to_end_delay)) - print("\n") - - -def export_net(model_ckpt_path): - bert_net_cfg.batch_size = 1 - poetrymodel = BertPoetryModel(bert_net_cfg, False, 3191, dropout_prob=0.0) - poetrymodel.set_train(False) - param_dict = load_checkpoint(model_ckpt_path) - load_param_into_net(poetrymodel, param_dict) - input_id = np.ones(shape=(1, 128)) - token_type_id = np.ones(shape=(1, 128)) - pad_mask = np.ones(shape=(1, 128)) - export(poetrymodel, Tensor(input_id, mstype.int32),\ - Tensor(token_type_id, mstype.int32),\ - Tensor(pad_mask, mstype.float32),\ - file_name='poetry.pb', file_format='MINDIR') - -parser = argparse.ArgumentParser(description='Bert finetune') -parser.add_argument('--device_target', type=str, default='Ascend', help='Device target') -parser.add_argument('--train', type=str, default="True", help='train or inference') -parser.add_argument('--ckpt_path', type=str, help='path of your ckpt') -parser.add_argument('--export', type=str, default="False", help="whether export MINDIF") -args_opt = parser.parse_args() -if __name__ == "__main__": - if args_opt.export in ["true", "True", "TRUE"]: - ckpt_path = args_opt.ckpt_path - export_net(ckpt_path) - exit() - - if args_opt.train in ["true", "True", "TRUE"]: - test_train() - else: - ckpt_path = args_opt.ckpt_path - test_eval(ckpt_path) diff --git a/tutorials/tutorial_code/bert_poetry/serving/bert_flask.py b/tutorials/tutorial_code/bert_poetry/serving/bert_flask.py deleted file mode 100644 index 22b586fafb321b831253c4ab0cd54b1400cf9a7c..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/serving/bert_flask.py +++ /dev/null @@ -1,190 +0,0 @@ -'''bert serving in flask''' -import sys -import time -import os -from flask import Flask, request -import grpc -import numpy as np -import ms_service_pb2 -import ms_service_pb2_grpc -from src.poetry_dataset import sequence_padding -from src.poetry_dataset import create_tokenizer - -sys.path.append(os.path.abspath(os.path.join(os.getcwd(), "../"))) - -app = Flask(__name__) -channel = grpc.insecure_channel('localhost:3300') -stub = ms_service_pb2_grpc.MSServiceStub(channel) -_, tokenizer, _ = create_tokenizer() - -def input_construction(request_module, input_ids, segment_ids, pad_mask): - '''construct input format''' - input_ids = input_ids.astype(np.int32) - segment_ids = segment_ids.astype(np.int32) - pad_mask = pad_mask.astype(np.float32) - - request_input_ids = request_module.data.add() - request_input_ids.tensor_shape.dims.extend(list(input_ids.shape)) - request_input_ids.tensor_type = ms_service_pb2.MS_INT32 - request_input_ids.data = input_ids.tobytes() - - request_segment_ids = request_module.data.add() - request_segment_ids.tensor_shape.dims.extend(list(segment_ids.shape)) - request_segment_ids.tensor_type = ms_service_pb2.MS_INT32 - request_segment_ids.data = segment_ids.tobytes() - - request_pad_mask = request_module.data.add() - request_pad_mask.tensor_shape.dims.extend(list(pad_mask.shape)) - request_pad_mask.tensor_type = ms_service_pb2.MS_FLOAT32 - request_pad_mask.data = pad_mask.tobytes() - - return request_module, request_input_ids, request_segment_ids, request_pad_mask - -def model_predict(stub_func, request_module): - """model predict method""" - try: - start_time = time.time() - result = stub_func.Predict(request_module) - print("time cost is %s" %(time.time()-start_time)) - result_np = np.frombuffer(result.result[0].data, dtype=np.float32).reshape(result.result[0].tensor_shape.dims) - #print("ms client received: ") - #print(result_np) - except grpc.RpcError as e: - print(e.details()) - status_code = e.code() - print(status_code.name) - print(status_code.value) - exit() - return result_np - -def generate_random_poetry(s, *data): - """generate random poetry""" - token_ids, segment_ids = tokenizer.encode(s) - token_ids = token_ids[:-1] - segment_ids = segment_ids[:-1] - target_ids = [] - - stub_func, request_module, request_input_ids, request_segment_ids, request_pad_mask = data - - MAX_LEN = 64 - length = 128 - while len(token_ids) + len(target_ids) < MAX_LEN: - _target_ids = token_ids + target_ids - _segment_ids = segment_ids + [0 for _ in target_ids] - index = len(_target_ids) - _target_ids = sequence_padding(np.array(_target_ids), length=length) - _segment_ids = sequence_padding(np.array(_segment_ids), length=length) - pad_mask = (_target_ids != 0).astype(np.float32) - _target_ids = _target_ids.astype(np.int32) - _segment_ids = _segment_ids.astype(np.int32) - - request_input_ids.data = _target_ids.tobytes() - request_segment_ids.data = _segment_ids.tobytes() - request_pad_mask.data = pad_mask.tobytes() - - _probas = model_predict(stub_func, request_module) - _probas = _probas[0, index-1, 3:] - p_args = _probas.argsort()[::-1][:100] - p = _probas[p_args] - p = p / sum(p) - target_index = np.random.choice(len(p), p=p) - target = p_args[target_index] + 3 - target_ids.append(target) - if target == 3: - break - poetry = tokenizer.decode(token_ids + target_ids) - return poetry - -def generate_hidden(head, *data): - """generate hidden""" - token_ids, segment_ids = tokenizer.encode('') - token_ids = token_ids[:-1] - segment_ids = segment_ids[:-1] - - punctuations = [',', '。'] - punctuation_ids = [tokenizer.token_to_id(token) for token in punctuations] - poetry = [] - length = 128 - - stub_func, request_module, request_input_ids, request_segment_ids, request_pad_mask = data - - for ch in head: - poetry.append(ch) - token_id = tokenizer.token_to_id(ch) - token_ids.append(token_id) - segment_ids.append(0) - while True: - index = len(token_ids) - _target_ids = sequence_padding(np.array(token_ids), length=length) - _segment_ids = sequence_padding(np.array(segment_ids), length=length) - - pad_mask = (_target_ids != 0).astype(np.float32) - _target_ids = _target_ids.astype(np.int32) - _segment_ids = _segment_ids.astype(np.int32) - - request_input_ids.data = _target_ids.tobytes() - request_segment_ids.data = _segment_ids.tobytes() - request_pad_mask.data = pad_mask.tobytes() - - _probas = model_predict(stub_func, request_module) - - _probas = _probas[0, index-1, 3:] - p_args = _probas.argsort()[::-1][:100] - p = _probas[p_args] - p = p / sum(p) - target_index = np.random.choice(len(p), p=p) - target = p_args[target_index] + 3 - token_ids.append(target) - segment_ids.append(0) - if target > 3: - poetry.append(tokenizer.id_to_token(target)) - if target in punctuation_ids: - break - return ''.join(poetry) - -def generate(s='', data_type=0): - """generate""" - if len(sys.argv) > 2: - sys.exit("input error") - channel_str = "" - if len(sys.argv) == 2: - split_args = sys.argv[1].split('=') - if len(split_args) > 1: - channel_str = split_args[1] - else: - channel_str = 'localhost:5500' - else: - channel_str = 'localhost:5500' - - serving_channel = grpc.insecure_channel(channel_str) - stub_func = ms_service_pb2_grpc.MSServiceStub(serving_channel) - request_module = ms_service_pb2.PredictRequest() - - _target_ids = np.ones(shape=(1, 128)) - _segment_ids = np.ones(shape=(1, 128)) - pad_mask = np.ones(shape=(1, 128)) - request_module, request_input_ids, request_segment_ids, request_pad_mask = input_construction(\ - request_module, _target_ids, _segment_ids, pad_mask) - if data_type in [0, 1]: - poetry = generate_random_poetry(s, stub_func, request_module, request_input_ids,\ - request_segment_ids, request_pad_mask) - else: - poetry = generate_hidden(s, stub_func, request_module, request_input_ids, request_segment_ids, request_pad_mask) - - print(poetry) - return poetry - -@app.route('/', methods=['POST']) -def bert(): - """bert""" - if request.method == 'POST': - s = request.get_json()['string'] - data_type = request.get_json()['type'] - print("s is {}".format(s)) - print("type is {}".format(data_type)) - poem = generate(s, data_type) - - return poem - -if __name__ == "__main__": - app.run(host="0.0.0.0", port=8080) diff --git a/tutorials/tutorial_code/bert_poetry/serving/ms_service_pb2.py b/tutorials/tutorial_code/bert_poetry/serving/ms_service_pb2.py deleted file mode 100644 index baec07a9898839469b0c1c7a040a91e2a27254bd..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/serving/ms_service_pb2.py +++ /dev/null @@ -1,374 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: ms_service.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='ms_service.proto', - package='ms_serving', - syntax='proto3', - serialized_options=None, - serialized_pb=_b('\n\x10ms_service.proto\x12\nms_serving\"V\n\x0ePredictRequest\x12 \n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x12.ms_serving.Tensor\x12\"\n\x06images\x18\x02 \x03(\x0b\x32\x12.ms_serving.Images\"2\n\x0cPredictReply\x12\"\n\x06result\x18\x01 \x03(\x0b\x32\x12.ms_serving.Tensor\"\x1b\n\x0bTensorShape\x12\x0c\n\x04\x64ims\x18\x01 \x03(\x03\"p\n\x06Tensor\x12-\n\x0ctensor_shape\x18\x01 \x01(\x0b\x32\x17.ms_serving.TensorShape\x12)\n\x0btensor_type\x18\x02 \x01(\x0e\x32\x14.ms_serving.DataType\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\"-\n\x06Images\x12\x0e\n\x06images\x18\x01 \x03(\x0c\x12\x13\n\x0binput_index\x18\x02 \x01(\r*\xc9\x01\n\x08\x44\x61taType\x12\x0e\n\nMS_UNKNOWN\x10\x00\x12\x0b\n\x07MS_BOOL\x10\x01\x12\x0b\n\x07MS_INT8\x10\x02\x12\x0c\n\x08MS_UINT8\x10\x03\x12\x0c\n\x08MS_INT16\x10\x04\x12\r\n\tMS_UINT16\x10\x05\x12\x0c\n\x08MS_INT32\x10\x06\x12\r\n\tMS_UINT32\x10\x07\x12\x0c\n\x08MS_INT64\x10\x08\x12\r\n\tMS_UINT64\x10\t\x12\x0e\n\nMS_FLOAT16\x10\n\x12\x0e\n\nMS_FLOAT32\x10\x0b\x12\x0e\n\nMS_FLOAT64\x10\x0c\x32\x8e\x01\n\tMSService\x12\x41\n\x07Predict\x12\x1a.ms_serving.PredictRequest\x1a\x18.ms_serving.PredictReply\"\x00\x12>\n\x04Test\x12\x1a.ms_serving.PredictRequest\x1a\x18.ms_serving.PredictReply\"\x00\x62\x06proto3') -) - -_DATATYPE = _descriptor.EnumDescriptor( - name='DataType', - full_name='ms_serving.DataType', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='MS_UNKNOWN', index=0, number=0, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MS_BOOL', index=1, number=1, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MS_INT8', index=2, number=2, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MS_UINT8', index=3, number=3, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MS_INT16', index=4, number=4, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MS_UINT16', index=5, number=5, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MS_INT32', index=6, number=6, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MS_UINT32', index=7, number=7, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MS_INT64', index=8, number=8, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MS_UINT64', index=9, number=9, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MS_FLOAT16', index=10, number=10, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MS_FLOAT32', index=11, number=11, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MS_FLOAT64', index=12, number=12, - serialized_options=None, - type=None), - ], - containing_type=None, - serialized_options=None, - serialized_start=363, - serialized_end=564, -) -_sym_db.RegisterEnumDescriptor(_DATATYPE) - -DataType = enum_type_wrapper.EnumTypeWrapper(_DATATYPE) -MS_UNKNOWN = 0 -MS_BOOL = 1 -MS_INT8 = 2 -MS_UINT8 = 3 -MS_INT16 = 4 -MS_UINT16 = 5 -MS_INT32 = 6 -MS_UINT32 = 7 -MS_INT64 = 8 -MS_UINT64 = 9 -MS_FLOAT16 = 10 -MS_FLOAT32 = 11 -MS_FLOAT64 = 12 - - - -_PREDICTREQUEST = _descriptor.Descriptor( - name='PredictRequest', - full_name='ms_serving.PredictRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='data', full_name='ms_serving.PredictRequest.data', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='images', full_name='ms_serving.PredictRequest.images', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=32, - serialized_end=118, -) - - -_PREDICTREPLY = _descriptor.Descriptor( - name='PredictReply', - full_name='ms_serving.PredictReply', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='result', full_name='ms_serving.PredictReply.result', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=120, - serialized_end=170, -) - - -_TENSORSHAPE = _descriptor.Descriptor( - name='TensorShape', - full_name='ms_serving.TensorShape', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='dims', full_name='ms_serving.TensorShape.dims', index=0, - number=1, type=3, cpp_type=2, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=172, - serialized_end=199, -) - - -_TENSOR = _descriptor.Descriptor( - name='Tensor', - full_name='ms_serving.Tensor', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='tensor_shape', full_name='ms_serving.Tensor.tensor_shape', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='tensor_type', full_name='ms_serving.Tensor.tensor_type', index=1, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='data', full_name='ms_serving.Tensor.data', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=201, - serialized_end=313, -) - - -_IMAGES = _descriptor.Descriptor( - name='Images', - full_name='ms_serving.Images', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='images', full_name='ms_serving.Images.images', index=0, - number=1, type=12, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='input_index', full_name='ms_serving.Images.input_index', index=1, - number=2, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=315, - serialized_end=360, -) - -_PREDICTREQUEST.fields_by_name['data'].message_type = _TENSOR -_PREDICTREQUEST.fields_by_name['images'].message_type = _IMAGES -_PREDICTREPLY.fields_by_name['result'].message_type = _TENSOR -_TENSOR.fields_by_name['tensor_shape'].message_type = _TENSORSHAPE -_TENSOR.fields_by_name['tensor_type'].enum_type = _DATATYPE -DESCRIPTOR.message_types_by_name['PredictRequest'] = _PREDICTREQUEST -DESCRIPTOR.message_types_by_name['PredictReply'] = _PREDICTREPLY -DESCRIPTOR.message_types_by_name['TensorShape'] = _TENSORSHAPE -DESCRIPTOR.message_types_by_name['Tensor'] = _TENSOR -DESCRIPTOR.message_types_by_name['Images'] = _IMAGES -DESCRIPTOR.enum_types_by_name['DataType'] = _DATATYPE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -PredictRequest = _reflection.GeneratedProtocolMessageType('PredictRequest', (_message.Message,), { - 'DESCRIPTOR' : _PREDICTREQUEST, - '__module__' : 'ms_service_pb2' - # @@protoc_insertion_point(class_scope:ms_serving.PredictRequest) - }) -_sym_db.RegisterMessage(PredictRequest) - -PredictReply = _reflection.GeneratedProtocolMessageType('PredictReply', (_message.Message,), { - 'DESCRIPTOR' : _PREDICTREPLY, - '__module__' : 'ms_service_pb2' - # @@protoc_insertion_point(class_scope:ms_serving.PredictReply) - }) -_sym_db.RegisterMessage(PredictReply) - -TensorShape = _reflection.GeneratedProtocolMessageType('TensorShape', (_message.Message,), { - 'DESCRIPTOR' : _TENSORSHAPE, - '__module__' : 'ms_service_pb2' - # @@protoc_insertion_point(class_scope:ms_serving.TensorShape) - }) -_sym_db.RegisterMessage(TensorShape) - -Tensor = _reflection.GeneratedProtocolMessageType('Tensor', (_message.Message,), { - 'DESCRIPTOR' : _TENSOR, - '__module__' : 'ms_service_pb2' - # @@protoc_insertion_point(class_scope:ms_serving.Tensor) - }) -_sym_db.RegisterMessage(Tensor) - -Images = _reflection.GeneratedProtocolMessageType('Images', (_message.Message,), { - 'DESCRIPTOR' : _IMAGES, - '__module__' : 'ms_service_pb2' - # @@protoc_insertion_point(class_scope:ms_serving.Images) - }) -_sym_db.RegisterMessage(Images) - - - -_MSSERVICE = _descriptor.ServiceDescriptor( - name='MSService', - full_name='ms_serving.MSService', - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=567, - serialized_end=709, - methods=[ - _descriptor.MethodDescriptor( - name='Predict', - full_name='ms_serving.MSService.Predict', - index=0, - containing_service=None, - input_type=_PREDICTREQUEST, - output_type=_PREDICTREPLY, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='Test', - full_name='ms_serving.MSService.Test', - index=1, - containing_service=None, - input_type=_PREDICTREQUEST, - output_type=_PREDICTREPLY, - serialized_options=None, - ), -]) -_sym_db.RegisterServiceDescriptor(_MSSERVICE) - -DESCRIPTOR.services_by_name['MSService'] = _MSSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/tutorials/tutorial_code/bert_poetry/serving/ms_service_pb2_grpc.py b/tutorials/tutorial_code/bert_poetry/serving/ms_service_pb2_grpc.py deleted file mode 100644 index 41e0c4f9f34c093cab6f942cedee429dc2dc9622..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/serving/ms_service_pb2_grpc.py +++ /dev/null @@ -1,63 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -import ms_service_pb2 as ms__service__pb2 - - -class MSServiceStub(object): - # missing associated documentation comment in .proto file - pass - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.Predict = channel.unary_unary( - '/ms_serving.MSService/Predict', - request_serializer=ms__service__pb2.PredictRequest.SerializeToString, - response_deserializer=ms__service__pb2.PredictReply.FromString, - ) - self.Test = channel.unary_unary( - '/ms_serving.MSService/Test', - request_serializer=ms__service__pb2.PredictRequest.SerializeToString, - response_deserializer=ms__service__pb2.PredictReply.FromString, - ) - - -class MSServiceServicer(object): - # missing associated documentation comment in .proto file - pass - - def Predict(self, request, context): - # missing associated documentation comment in .proto file - pass - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Test(self, request, context): - # missing associated documentation comment in .proto file - pass - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_MSServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - 'Predict': grpc.unary_unary_rpc_method_handler( - servicer.Predict, - request_deserializer=ms__service__pb2.PredictRequest.FromString, - response_serializer=ms__service__pb2.PredictReply.SerializeToString, - ), - 'Test': grpc.unary_unary_rpc_method_handler( - servicer.Test, - request_deserializer=ms__service__pb2.PredictRequest.FromString, - response_serializer=ms__service__pb2.PredictReply.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'ms_serving.MSService', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/tutorials/tutorial_code/bert_poetry/serving/ms_serving b/tutorials/tutorial_code/bert_poetry/serving/ms_serving deleted file mode 100644 index 50ded3135aaf59fc403c8d34bd795e60021c444b..0000000000000000000000000000000000000000 Binary files a/tutorials/tutorial_code/bert_poetry/serving/ms_serving and /dev/null differ diff --git a/tutorials/tutorial_code/bert_poetry/serving/poetry_client.py b/tutorials/tutorial_code/bert_poetry/serving/poetry_client.py deleted file mode 100644 index 586d20786f44b0790301d2349af014b7ed2601e9..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/serving/poetry_client.py +++ /dev/null @@ -1,36 +0,0 @@ -import time -import json -import requests -import argparse -import re - -headers = {"content-type": "application/json"} -url = 'http://10.155.170.71:8080/' - -while True: - print("\n**********************************") - type = input("选择模式:0-随机生成,1:续写,2:藏头诗\n") - try: - type = int(type) - except: - continue - if type not in [0, 1, 2]: - continue - if type == 1: - s = input("输入首句诗\n") - elif type == 2: - s = input("输入藏头诗\n") - else: - s = '' - - data = json.dumps({'string': s, 'type': type}) - start_time = time.time() - json_response = requests.post(url, data=data, headers=headers) - end_to_end_delay = (time.time()-start_time)*1000 - predictions = json_response.text - a = re.findall(r'[\u4e00-\u9fa5]*[\uff0c\u3002]', predictions) - print("\n") - for poem in a: - print(poem) - print("\ncost time: {:.1f} ms".format(end_to_end_delay)) - diff --git a/tutorials/tutorial_code/bert_poetry/src/__init__.py b/tutorials/tutorial_code/bert_poetry/src/__init__.py deleted file mode 100644 index 2c9cac57ed2cda8be13b6609917ebe8e7bedadd9..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/src/__init__.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""Bert Init.""" -from .bert_for_pre_training import BertNetworkWithLoss, BertPreTraining, \ - BertPretrainingLoss, GetMaskedLMOutput, GetNextSentenceOutput, \ - BertTrainOneStepCell, BertTrainOneStepWithLossScaleCell -from .bert_model import BertAttention, BertConfig, BertEncoderCell, BertModel, \ - BertOutput, BertSelfAttention, BertTransformer, EmbeddingLookup, \ - EmbeddingPostprocessor, RelaPosEmbeddingsGenerator, RelaPosMatrixGenerator, \ - SaturateCast, CreateAttentionMaskFromInputMask -from .poetry_dataset import padding, create_tokenizer -import os, sys - -sys.path.append(os.path.dirname(os.path.realpath(__file__))) - -__all__ = [ - "BertNetworkWithLoss", "BertPreTraining", "BertPretrainingLoss", - "GetMaskedLMOutput", "GetNextSentenceOutput", "BertTrainOneStepCell", "BertTrainOneStepWithLossScaleCell", - "BertAttention", "BertConfig", "BertEncoderCell", "BertModel", "BertOutput", - "BertSelfAttention", "BertTransformer", "EmbeddingLookup", - "EmbeddingPostprocessor", "RelaPosEmbeddingsGenerator", - "RelaPosMatrixGenerator", "SaturateCast", "CreateAttentionMaskFromInputMask", "create_tokenizer", "padding" -] diff --git a/tutorials/tutorial_code/bert_poetry/src/bert_for_pre_training.py b/tutorials/tutorial_code/bert_poetry/src/bert_for_pre_training.py deleted file mode 100644 index cc32bdc9da96a8c7912544eb462817f64143ad73..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/src/bert_for_pre_training.py +++ /dev/null @@ -1,438 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""Bert for pretraining.""" -import numpy as np - -import mindspore.nn as nn -from mindspore.common.initializer import initializer, TruncatedNormal -import mindspore.ops as ops -from mindspore import Tensor -from mindspore import Parameter, ParameterTuple -from mindspore import dtype as mstype -from mindspore.nn import DistributedGradReducer -from mindspore.context import ParallelMode -from mindspore.communication import get_group_size -from mindspore import context -from mindspore.ops import _selected_ops -from .bert_model import BertModel - -GRADIENT_CLIP_TYPE = 1 -GRADIENT_CLIP_VALUE = 1.0 - -clip_grad = ops.MultitypeFuncGraph("clip_grad") - - -# pylint: disable=consider-using-in -@clip_grad.register("Number", "Number", "Tensor") -def _clip_grad(clip_type, clip_value, grad): - """ - Clip gradients. - - Inputs: - clip_type (int): The way to clip, 0 for 'value', 1 for 'norm'. - clip_value (float): Specifies how much to clip. - grad (tuple[Tensor]): Gradients. - - Outputs: - tuple[Tensor], clipped gradients. - """ - if clip_type != 0 and clip_type != 1: - return grad - dt = ops.dtype(grad) - if clip_type == 0: - new_grad = ops.clip_by_value(grad, ops.cast(ops.tuple_to_array((-clip_value,)), dt), - ops.cast(ops.tuple_to_array((clip_value,)), dt)) - else: - new_grad = nn.ClipByNorm()(grad, ops.cast(ops.tuple_to_array((clip_value,)), dt)) - return new_grad - - -class GetMaskedLMOutput(nn.Cell): - """ - Get masked lm output. - - Args: - config (BertConfig): The config of BertModel. - - Returns: - Tensor, masked lm output. - """ - def __init__(self, config): - super(GetMaskedLMOutput, self).__init__() - self.width = config.hidden_size - self.reshape = ops.Reshape() - self.gather = ops.GatherV2() - - weight_init = TruncatedNormal(config.initializer_range) - self.dense = nn.Dense(self.width, - config.hidden_size, - weight_init=weight_init, - activation=config.hidden_act).to_float(config.compute_type) - self.layernorm = nn.LayerNorm((config.hidden_size,)).to_float(config.compute_type) - self.output_bias = Parameter( - initializer( - 'zero', - config.vocab_size), - name='output_bias') - self.matmul = ops.MatMul(transpose_b=True) - self.log_softmax = nn.LogSoftmax(axis=-1) - self.shape_flat_offsets = (-1, 1) - self.rng = Tensor(np.array(range(0, config.batch_size)).astype(np.int32)) - self.last_idx = (-1,) - self.shape_flat_sequence_tensor = (config.batch_size * config.seq_length, self.width) - self.seq_length_tensor = Tensor(np.array((config.seq_length,)).astype(np.int32)) - self.cast = ops.Cast() - self.compute_type = config.compute_type - self.dtype = config.dtype - - def construct(self, - input_tensor, - output_weights, - positions): - """construct masked lm output""" - flat_offsets = self.reshape( - self.rng * self.seq_length_tensor, self.shape_flat_offsets) - flat_position = self.reshape(positions + flat_offsets, self.last_idx) - flat_sequence_tensor = self.reshape(input_tensor, self.shape_flat_sequence_tensor) - input_tensor = self.gather(flat_sequence_tensor, flat_position, 0) - input_tensor = self.cast(input_tensor, self.compute_type) - output_weights = self.cast(output_weights, self.compute_type) - input_tensor = self.dense(input_tensor) - input_tensor = self.layernorm(input_tensor) - logits = self.matmul(input_tensor, output_weights) - logits = self.cast(logits, self.dtype) - logits = logits + self.output_bias - log_probs = self.log_softmax(logits) - return log_probs - - -class GetNextSentenceOutput(nn.Cell): - """ - Get next sentence output. - - Args: - config (BertConfig): The config of Bert. - - Returns: - Tensor, next sentence output. - """ - def __init__(self, config): - super(GetNextSentenceOutput, self).__init__() - self.log_softmax = _selected_ops.LogSoftmax() - weight_init = TruncatedNormal(config.initializer_range) - self.dense = nn.Dense(config.hidden_size, 2, - weight_init=weight_init, has_bias=True).to_float(config.compute_type) - self.dtype = config.dtype - self.cast = ops.Cast() - - def construct(self, input_tensor): - logits = self.dense(input_tensor) - logits = self.cast(logits, self.dtype) - log_prob = self.log_softmax(logits) - return log_prob - - -class BertPreTraining(nn.Cell): - """ - Bert pretraining network. - - Args: - config (BertConfig): The config of BertModel. - is_training (bool): Specifies whether to use the training mode. - use_one_hot_embeddings (bool): Specifies whether to use one-hot for embeddings. - - Returns: - Tensor, prediction_scores, seq_relationship_score. - """ - def __init__(self, config, is_training, use_one_hot_embeddings): - super(BertPreTraining, self).__init__() - self.bert = BertModel(config, is_training, use_one_hot_embeddings) - self.cls1 = GetMaskedLMOutput(config) - self.cls2 = GetNextSentenceOutput(config) - - def construct(self, input_ids, input_mask, token_type_id, - masked_lm_positions): - sequence_output, pooled_output, embedding_table = \ - self.bert(input_ids, token_type_id, input_mask) - prediction_scores = self.cls1(sequence_output, - embedding_table, - masked_lm_positions) - seq_relationship_score = self.cls2(pooled_output) - return prediction_scores, seq_relationship_score - - -class BertPretrainingLoss(nn.Cell): - """ - Provide bert pre-training loss. - - Args: - config (BertConfig): The config of BertModel. - - Returns: - Tensor, total loss. - """ - def __init__(self, config): - super(BertPretrainingLoss, self).__init__() - self.vocab_size = config.vocab_size - self.onehot = ops.OneHot() - self.on_value = Tensor(1.0, mstype.float32) - self.off_value = Tensor(0.0, mstype.float32) - self.reduce_sum = ops.ReduceSum() - self.reduce_mean = ops.ReduceMean() - self.reshape = ops.Reshape() - self.last_idx = (-1,) - self.neg = ops.Neg() - self.cast = ops.Cast() - - def construct(self, prediction_scores, seq_relationship_score, masked_lm_ids, - masked_lm_weights, next_sentence_labels): - """Defines the computation performed.""" - label_ids = self.reshape(masked_lm_ids, self.last_idx) - label_weights = self.cast(self.reshape(masked_lm_weights, self.last_idx), mstype.float32) - one_hot_labels = self.onehot(label_ids, self.vocab_size, self.on_value, self.off_value) - - per_example_loss = self.neg(self.reduce_sum(prediction_scores * one_hot_labels, self.last_idx)) - numerator = self.reduce_sum(label_weights * per_example_loss, ()) - denominator = self.reduce_sum(label_weights, ()) + self.cast(ops.tuple_to_array((1e-5,)), mstype.float32) - masked_lm_loss = numerator / denominator - - # next_sentence_loss - labels = self.reshape(next_sentence_labels, self.last_idx) - one_hot_labels = self.onehot(labels, 2, self.on_value, self.off_value) - per_example_loss = self.neg(self.reduce_sum( - one_hot_labels * seq_relationship_score, self.last_idx)) - next_sentence_loss = self.reduce_mean(per_example_loss, self.last_idx) - - # total_loss - total_loss = masked_lm_loss + next_sentence_loss - - return total_loss - - -class BertNetworkWithLoss(nn.Cell): - """ - Provide bert pre-training loss through network. - - Args: - config (BertConfig): The config of BertModel. - is_training (bool): Specifies whether to use the training mode. - use_one_hot_embeddings (bool): Specifies whether to use one-hot for embeddings. Default: False. - - Returns: - Tensor, the loss of the network. - """ - def __init__(self, config, is_training, use_one_hot_embeddings=False): - super(BertNetworkWithLoss, self).__init__() - self.bert = BertPreTraining(config, is_training, use_one_hot_embeddings) - self.loss = BertPretrainingLoss(config) - self.cast = ops.Cast() - - def construct(self, - input_ids, - input_mask, - token_type_id, - next_sentence_labels, - masked_lm_positions, - masked_lm_ids, - masked_lm_weights): - """construct bertworkwitlosscell""" - prediction_scores, seq_relationship_score = \ - self.bert(input_ids, input_mask, token_type_id, masked_lm_positions) - total_loss = self.loss(prediction_scores, seq_relationship_score, - masked_lm_ids, masked_lm_weights, next_sentence_labels) - return self.cast(total_loss, mstype.float32) - - -class BertTrainOneStepCell(nn.Cell): - """ - Encapsulation class of bert network training. - - Append an optimizer to the training network after that the construct - function can be called to create the backward graph. - - Args: - network (Cell): The training network. Note that loss function should have been added. - optimizer (Optimizer): Optimizer for updating the weights. - sens (Number): The adjust parameter. Default: 1.0. - """ - def __init__(self, network, optimizer, sens=1.0): - super(BertTrainOneStepCell, self).__init__(auto_prefix=False) - self.network = network - self.weights = ParameterTuple(network.trainable_params()) - self.optimizer = optimizer - self.grad = ops.GradOperation(get_by_list=True, sens_param=True) - self.sens = sens - self.reducer_flag = False - self.parallel_mode = context.get_auto_parallel_context("parallel_mode") - if self.parallel_mode in [ParallelMode.DATA_PARALLEL, ParallelMode.HYBRID_PARALLEL]: - self.reducer_flag = True - self.grad_reducer = None - if self.reducer_flag: - mean = context.get_auto_parallel_context("mirror_mean") - degree = get_group_size() - self.grad_reducer = DistributedGradReducer(optimizer.parameters, mean, degree) - - self.cast = ops.Cast() - self.hyper_map = ops.HyperMap() - - def set_sens(self, value): - self.sens = value - - def construct(self, - input_ids, - input_mask, - token_type_id, - next_sentence_labels, - masked_lm_positions, - masked_lm_ids, - masked_lm_weights): - """Defines the computation performed.""" - weights = self.weights - - loss = self.network(input_ids, - input_mask, - token_type_id, - next_sentence_labels, - masked_lm_positions, - masked_lm_ids, - masked_lm_weights) - grads = self.grad(self.network, weights)(input_ids, - input_mask, - token_type_id, - next_sentence_labels, - masked_lm_positions, - masked_lm_ids, - masked_lm_weights, - self.cast(ops.tuple_to_array((self.sens,)), - mstype.float32)) - grads = self.hyper_map(ops.partial(clip_grad, GRADIENT_CLIP_TYPE, GRADIENT_CLIP_VALUE), grads) - if self.reducer_flag: - # apply grad reducer on grads - grads = self.grad_reducer(grads) - succ = self.optimizer(grads) - return ops.depend(loss, succ) - - -grad_scale = ops.MultitypeFuncGraph("grad_scale") -reciprocal = ops.Reciprocal() - - -@grad_scale.register("Tensor", "Tensor") -def tensor_grad_scale(scale, grad): - return grad * reciprocal(scale) - - -class BertTrainOneStepWithLossScaleCell(nn.Cell): - """ - Encapsulation class of bert network training. - - Append an optimizer to the training network after that the construct - function can be called to create the backward graph. - - Args: - network (Cell): The training network. Note that loss function should have been added. - optimizer (Optimizer): Optimizer for updating the weights. - scale_update_cell (Cell): Cell to do the loss scale. Default: None. - """ - def __init__(self, network, optimizer, scale_update_cell=None): - super(BertTrainOneStepWithLossScaleCell, self).__init__(auto_prefix=False) - self.network = network - self.weights = ParameterTuple(network.trainable_params()) - self.optimizer = optimizer - self.grad = ops.GradOperation( - get_by_list=True, - sens_param=True) - self.reducer_flag = False - self.allreduce = ops.AllReduce() - self.parallel_mode = context.get_auto_parallel_context("parallel_mode") - if self.parallel_mode in [ParallelMode.DATA_PARALLEL, ParallelMode.HYBRID_PARALLEL]: - self.reducer_flag = True - self.grad_reducer = ops.identity - self.degree = 1 - if self.reducer_flag: - self.degree = get_group_size() - self.grad_reducer = DistributedGradReducer(optimizer.parameters, False, self.degree) - self.is_distributed = (self.parallel_mode != ParallelMode.STAND_ALONE) - self.cast = ops.Cast() - self.alloc_status = ops.NPUAllocFloatStatus() - self.get_status = ops.NPUGetFloatStatus() - self.clear_before_grad = ops.NPUClearFloatStatus() - self.reduce_sum = ops.ReduceSum(keep_dims=False) - self.depend_parameter_use = ops.ControlDepend(depend_mode=1) - self.base = Tensor(1, mstype.float32) - self.less_equal = ops.LessEqual() - self.hyper_map = ops.HyperMap() - self.loss_scale = None - self.loss_scaling_manager = scale_update_cell - if scale_update_cell: - self.loss_scale = Parameter(Tensor(scale_update_cell.get_loss_scale(), dtype=mstype.float32), - name="loss_scale") - - @ops.add_flags(has_effect=True) - def construct(self, - input_ids, - input_mask, - token_type_id, - next_sentence_labels, - masked_lm_positions, - masked_lm_ids, - masked_lm_weights, - sens=None): - """Defines the computation performed.""" - weights = self.weights - loss = self.network(input_ids, - input_mask, - token_type_id, - next_sentence_labels, - masked_lm_positions, - masked_lm_ids, - masked_lm_weights) - if sens is None: - scaling_sens = self.loss_scale - else: - scaling_sens = sens - # alloc status and clear should be right before gradoperation - init = self.alloc_status() - self.clear_before_grad(init) - grads = self.grad(self.network, weights)(input_ids, - input_mask, - token_type_id, - next_sentence_labels, - masked_lm_positions, - masked_lm_ids, - masked_lm_weights, - self.cast(scaling_sens, - mstype.float32)) - # apply grad reducer on grads - grads = self.grad_reducer(grads) - grads = self.hyper_map(ops.partial(grad_scale, scaling_sens * self.degree), grads) - grads = self.hyper_map(ops.partial(clip_grad, GRADIENT_CLIP_TYPE, GRADIENT_CLIP_VALUE), grads) - self.get_status(init) - flag_sum = self.reduce_sum(init, (0,)) - if self.is_distributed: - # sum overflow flag over devices - flag_reduce = self.allreduce(flag_sum) - cond = self.less_equal(self.base, flag_reduce) - else: - cond = self.less_equal(self.base, flag_sum) - overflow = cond - if sens is None: - overflow = self.loss_scaling_manager(self.loss_scale, cond) - if overflow: - succ = False - else: - succ = self.optimizer(grads) - ret = (loss, cond, scaling_sens) - return ops.depend(ret, succ) diff --git a/tutorials/tutorial_code/bert_poetry/src/bert_model.py b/tutorials/tutorial_code/bert_poetry/src/bert_model.py deleted file mode 100644 index efdb851920064d05f2f9d977d5f4ed2be031d881..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/src/bert_model.py +++ /dev/null @@ -1,961 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""Bert model.""" - -import math -import copy -import numpy as np -from mindspore import dtype as mstype -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore.common.initializer import TruncatedNormal, initializer -from mindspore import Tensor, Parameter -from .fused_layer_norm import FusedLayerNorm - - -class BertConfig: - """ - Configuration for `BertModel`. - - Args: - batch_size (int): Batch size of input dataset. - seq_length (int): Length of input sequence. Default: 128. - vocab_size (int): The shape of each embedding vector. Default: 32000. - hidden_size (int): Size of the bert encoder layers. Default: 768. - num_hidden_layers (int): Number of hidden layers in the BertTransformer encoder - cell. Default: 12. - num_attention_heads (int): Number of attention heads in the BertTransformer - encoder cell. Default: 12. - intermediate_size (int): Size of intermediate layer in the BertTransformer - encoder cell. Default: 3072. - hidden_act (str): Activation function used in the BertTransformer encoder - cell. Default: "gelu". - hidden_dropout_prob (float): The dropout probability for BertOutput. Default: 0.1. - attention_probs_dropout_prob (float): The dropout probability for - BertAttention. Default: 0.1. - max_position_embeddings (int): Maximum length of sequences used in this - model. Default: 512. - type_vocab_size (int): Size of token type vocab. Default: 16. - initializer_range (float): Initialization value of TruncatedNormal. Default: 0.02. - use_relative_positions (bool): Specifies whether to use relative positions. Default: False. - input_mask_from_dataset (bool): Specifies whether to use the input mask that loaded from - dataset. Default: True. - token_type_ids_from_dataset (bool): Specifies whether to use the token type ids that loaded - from dataset. Default: True. - dtype (:class:`mindspore.dtype`): Data type of the input. Default: mstype.float32. - compute_type (:class:`mindspore.dtype`): Compute type in BertTransformer. Default: mstype.float32. - """ - def __init__(self, - batch_size, - seq_length=128, - vocab_size=32000, - hidden_size=768, - num_hidden_layers=12, - num_attention_heads=12, - intermediate_size=3072, - hidden_act="gelu", - hidden_dropout_prob=0.1, - attention_probs_dropout_prob=0.1, - max_position_embeddings=512, - type_vocab_size=16, - initializer_range=0.02, - use_relative_positions=False, - input_mask_from_dataset=True, - token_type_ids_from_dataset=True, - dtype=mstype.float32, - compute_type=mstype.float32, - enable_fused_layernorm=False): - self.batch_size = batch_size - self.seq_length = seq_length - self.vocab_size = vocab_size - self.hidden_size = hidden_size - self.num_hidden_layers = num_hidden_layers - self.num_attention_heads = num_attention_heads - self.hidden_act = hidden_act - self.intermediate_size = intermediate_size - self.hidden_dropout_prob = hidden_dropout_prob - self.attention_probs_dropout_prob = attention_probs_dropout_prob - self.max_position_embeddings = max_position_embeddings - self.type_vocab_size = type_vocab_size - self.initializer_range = initializer_range - self.input_mask_from_dataset = input_mask_from_dataset - self.token_type_ids_from_dataset = token_type_ids_from_dataset - self.use_relative_positions = use_relative_positions - self.dtype = dtype - self.compute_type = compute_type - self.enable_fused_layernorm = enable_fused_layernorm - - -class EmbeddingLookup(nn.Cell): - """ - A embeddings lookup table with a fixed dictionary and size. - - Args: - vocab_size (int): Size of the dictionary of embeddings. - embedding_size (int): The size of each embedding vector. - embedding_shape (list): [batch_size, seq_length, embedding_size], the shape of - each embedding vector. - use_one_hot_embeddings (bool): Specifies whether to use one hot encoding form. Default: False. - initializer_range (float): Initialization value of TruncatedNormal. Default: 0.02. - """ - def __init__(self, - vocab_size, - embedding_size, - embedding_shape, - use_one_hot_embeddings=False, - initializer_range=0.02): - super(EmbeddingLookup, self).__init__() - self.vocab_size = vocab_size - self.use_one_hot_embeddings = use_one_hot_embeddings - self.embedding_table = Parameter(initializer - (TruncatedNormal(initializer_range), - [vocab_size, embedding_size]), - name='embedding_table') - self.expand = ops.ExpandDims() - self.shape_flat = (-1,) - self.gather = ops.GatherV2() - self.one_hot = ops.OneHot() - self.on_value = Tensor(1.0, mstype.float32) - self.off_value = Tensor(0.0, mstype.float32) - self.array_mul = ops.MatMul() - self.reshape = ops.Reshape() - self.shape = tuple(embedding_shape) - - def construct(self, input_ids): - """construct EmbeddingLookup""" - extended_ids = self.expand(input_ids, -1) - flat_ids = self.reshape(extended_ids, self.shape_flat) - if self.use_one_hot_embeddings: - one_hot_ids = self.one_hot(flat_ids, self.vocab_size, self.on_value, self.off_value) - output_for_reshape = self.array_mul( - one_hot_ids, self.embedding_table) - else: - output_for_reshape = self.gather(self.embedding_table, flat_ids, 0) - output = self.reshape(output_for_reshape, self.shape) - return output, self.embedding_table - - -class EmbeddingPostprocessor(nn.Cell): - """ - Postprocessors apply positional and token type embeddings to word embeddings. - - Args: - embedding_size (int): The size of each embedding vector. - embedding_shape (list): [batch_size, seq_length, embedding_size], the shape of - each embedding vector. - use_token_type (bool): Specifies whether to use token type embeddings. Default: False. - token_type_vocab_size (int): Size of token type vocab. Default: 16. - use_one_hot_embeddings (bool): Specifies whether to use one hot encoding form. Default: False. - initializer_range (float): Initialization value of TruncatedNormal. Default: 0.02. - max_position_embeddings (int): Maximum length of sequences used in this - model. Default: 512. - dropout_prob (float): The dropout probability. Default: 0.1. - """ - def __init__(self, - embedding_size, - embedding_shape, - use_relative_positions=False, - use_token_type=False, - token_type_vocab_size=16, - use_one_hot_embeddings=False, - initializer_range=0.02, - max_position_embeddings=512, - dropout_prob=0.1): - super(EmbeddingPostprocessor, self).__init__() - self.use_token_type = use_token_type - self.token_type_vocab_size = token_type_vocab_size - self.use_one_hot_embeddings = use_one_hot_embeddings - self.max_position_embeddings = max_position_embeddings - self.embedding_table = Parameter(initializer - (TruncatedNormal(initializer_range), - [token_type_vocab_size, - embedding_size]), - name='embedding_table') - - self.shape_flat = (-1,) - self.one_hot = ops.OneHot() - self.on_value = Tensor(1.0, mstype.float32) - self.off_value = Tensor(0.1, mstype.float32) - self.array_mul = ops.MatMul() - self.reshape = ops.Reshape() - self.shape = tuple(embedding_shape) - self.layernorm = nn.LayerNorm((embedding_size,)) - self.dropout = nn.Dropout(1 - dropout_prob) - self.gather = ops.GatherV2() - self.use_relative_positions = use_relative_positions - self.slice = ops.StridedSlice() - self.full_position_embeddings = Parameter(initializer - (TruncatedNormal(initializer_range), - [max_position_embeddings, - embedding_size]), - name='full_position_embeddings') - - def construct(self, token_type_ids, word_embeddings): - """construct EmbeddingPostprocessor""" - output = word_embeddings - if self.use_token_type: - flat_ids = self.reshape(token_type_ids, self.shape_flat) - if self.use_one_hot_embeddings: - one_hot_ids = self.one_hot(flat_ids, - self.token_type_vocab_size, self.on_value, self.off_value) - token_type_embeddings = self.array_mul(one_hot_ids, - self.embedding_table) - else: - token_type_embeddings = self.gather(self.embedding_table, flat_ids, 0) - token_type_embeddings = self.reshape(token_type_embeddings, self.shape) - output += token_type_embeddings - if not self.use_relative_positions: - _, seq, width = self.shape - position_embeddings = self.slice(self.full_position_embeddings, (0, 0), (seq, width), (1, 1)) - position_embeddings = self.reshape(position_embeddings, (1, seq, width)) - output += position_embeddings - output = self.layernorm(output) - output = self.dropout(output) - return output - - -class BertOutput(nn.Cell): - """ - Apply a linear computation to hidden status and a residual computation to input. - - Args: - in_channels (int): Input channels. - out_channels (int): Output channels. - initializer_range (float): Initialization value of TruncatedNormal. Default: 0.02. - dropout_prob (float): The dropout probability. Default: 0.1. - compute_type (:class:`mindspore.dtype`): Compute type in BertTransformer. Default: mstype.float32. - """ - def __init__(self, - in_channels, - out_channels, - initializer_range=0.02, - dropout_prob=0.1, - compute_type=mstype.float32, - enable_fused_layernorm=False): - super(BertOutput, self).__init__() - self.dense = nn.Dense(in_channels, out_channels, - weight_init=TruncatedNormal(initializer_range)).to_float(compute_type) - self.dropout = nn.Dropout(1 - dropout_prob) - self.dropout_prob = dropout_prob - self.add = ops.TensorAdd() - if compute_type == mstype.float16: - self.layernorm = FusedLayerNorm((out_channels,), - use_batch_norm=enable_fused_layernorm).to_float(compute_type) - else: - self.layernorm = nn.LayerNorm((out_channels,)).to_float(compute_type) - self.cast = ops.Cast() - - def construct(self, hidden_status, input_tensor): - output = self.dense(hidden_status) - output = self.dropout(output) - output = self.add(input_tensor, output) - output = self.layernorm(output) - return output - - -class RelaPosMatrixGenerator(nn.Cell): - """ - Generates matrix of relative positions between inputs. - - Args: - length (int): Length of one dim for the matrix to be generated. - max_relative_position (int): Max value of relative position. - """ - def __init__(self, length, max_relative_position): - super(RelaPosMatrixGenerator, self).__init__() - self._length = length - self._max_relative_position = Tensor(max_relative_position, dtype=mstype.int32) - self._min_relative_position = Tensor(-max_relative_position, dtype=mstype.int32) - self.range_length = -length + 1 - - self.tile = ops.Tile() - self.range_mat = ops.Reshape() - self.sub = ops.Sub() - self.expanddims = ops.ExpandDims() - self.cast = ops.Cast() - - def construct(self): - """construct RelaPosMatrixGenerator""" - range_vec_row_out = self.cast(ops.tuple_to_array(ops.make_range(self._length)), mstype.int32) - range_vec_col_out = self.range_mat(range_vec_row_out, (self._length, -1)) - tile_row_out = self.tile(range_vec_row_out, (self._length,)) - tile_col_out = self.tile(range_vec_col_out, (1, self._length)) - range_mat_out = self.range_mat(tile_row_out, (self._length, self._length)) - transpose_out = self.range_mat(tile_col_out, (self._length, self._length)) - distance_mat = self.sub(range_mat_out, transpose_out) - - distance_mat_clipped = ops.clip_by_value(distance_mat, - self._min_relative_position, - self._max_relative_position) - - # Shift values to be >=0. Each integer still uniquely identifies a - # relative position difference. - final_mat = distance_mat_clipped + self._max_relative_position - return final_mat - - -class RelaPosEmbeddingsGenerator(nn.Cell): - """ - Generates tensor of size [length, length, depth]. - - Args: - length (int): Length of one dim for the matrix to be generated. - depth (int): Size of each attention head. - max_relative_position (int): Maxmum value of relative position. - initializer_range (float): Initialization value of TruncatedNormal. - use_one_hot_embeddings (bool): Specifies whether to use one hot encoding form. Default: False. - """ - def __init__(self, - length, - depth, - max_relative_position, - initializer_range, - use_one_hot_embeddings=False): - super(RelaPosEmbeddingsGenerator, self).__init__() - self.depth = depth - self.vocab_size = max_relative_position * 2 + 1 - self.use_one_hot_embeddings = use_one_hot_embeddings - - self.embeddings_table = Parameter( - initializer(TruncatedNormal(initializer_range), - [self.vocab_size, self.depth]), - name='embeddings_for_position') - - self.relative_positions_matrix = RelaPosMatrixGenerator(length=length, - max_relative_position=max_relative_position) - self.reshape = ops.Reshape() - self.one_hot = ops.OneHot() - self.on_value = Tensor(1.0, mstype.float32) - self.off_value = Tensor(0.0, mstype.float32) - self.shape = ops.Shape() - self.gather = ops.GatherV2() # index_select - self.matmul = ops.BatchMatMul() - - def construct(self): - """construct RelaPosEmbeddingsGenerator""" - relative_positions_matrix_out = self.relative_positions_matrix() - - # Generate embedding for each relative position of dimension depth. - if self.use_one_hot_embeddings: - flat_relative_positions_matrix = self.reshape(relative_positions_matrix_out, (-1,)) - one_hot_relative_positions_matrix = self.one_hot( - flat_relative_positions_matrix, self.vocab_size, self.on_value, self.off_value) - embeddings = self.matmul(one_hot_relative_positions_matrix, self.embeddings_table) - my_shape = self.shape(relative_positions_matrix_out) + (self.depth,) - embeddings = self.reshape(embeddings, my_shape) - else: - embeddings = self.gather(self.embeddings_table, - relative_positions_matrix_out, 0) - return embeddings - - -class SaturateCast(nn.Cell): - """ - Performs a safe saturating cast. This operation applies proper clamping before casting to prevent - the danger that the value will overflow or underflow. - - Args: - src_type (:class:`mindspore.dtype`): The type of the elements of the input tensor. Default: mstype.float32. - dst_type (:class:`mindspore.dtype`): The type of the elements of the output tensor. Default: mstype.float32. - """ - def __init__(self, src_type=mstype.float32, dst_type=mstype.float32): - super(SaturateCast, self).__init__() - np_type = mstype.dtype_to_nptype(dst_type) - min_type = np.finfo(np_type).min - max_type = np.finfo(np_type).max - - self.tensor_min_type = Tensor([min_type], dtype=src_type) - self.tensor_max_type = Tensor([max_type], dtype=src_type) - - self.min_op = ops.Minimum() - self.max_op = ops.Maximum() - self.cast = ops.Cast() - self.dst_type = dst_type - - def construct(self, x): - out = self.max_op(x, self.tensor_min_type) - out = self.min_op(out, self.tensor_max_type) - return self.cast(out, self.dst_type) - - -class BertAttention(nn.Cell): - """ - Apply multi-headed attention from "from_tensor" to "to_tensor". - - Args: - batch_size (int): Batch size of input datasets. - from_tensor_width (int): Size of last dim of from_tensor. - to_tensor_width (int): Size of last dim of to_tensor. - from_seq_length (int): Length of from_tensor sequence. - to_seq_length (int): Length of to_tensor sequence. - num_attention_heads (int): Number of attention heads. Default: 1. - size_per_head (int): Size of each attention head. Default: 512. - query_act (str): Activation function for the query transform. Default: None. - key_act (str): Activation function for the key transform. Default: None. - value_act (str): Activation function for the value transform. Default: None. - has_attention_mask (bool): Specifies whether to use attention mask. Default: False. - attention_probs_dropout_prob (float): The dropout probability for - BertAttention. Default: 0.0. - use_one_hot_embeddings (bool): Specifies whether to use one hot encoding form. Default: False. - initializer_range (float): Initialization value of TruncatedNormal. Default: 0.02. - do_return_2d_tensor (bool): True for return 2d tensor. False for return 3d - tensor. Default: False. - use_relative_positions (bool): Specifies whether to use relative positions. Default: False. - compute_type (:class:`mindspore.dtype`): Compute type in BertAttention. Default: mstype.float32. - """ - def __init__(self, - batch_size, - from_tensor_width, - to_tensor_width, - from_seq_length, - to_seq_length, - num_attention_heads=1, - size_per_head=512, - query_act=None, - key_act=None, - value_act=None, - has_attention_mask=False, - attention_probs_dropout_prob=0.0, - use_one_hot_embeddings=False, - initializer_range=0.02, - do_return_2d_tensor=False, - use_relative_positions=False, - compute_type=mstype.float32): - - super(BertAttention, self).__init__() - self.batch_size = batch_size - self.from_seq_length = from_seq_length - self.to_seq_length = to_seq_length - self.num_attention_heads = num_attention_heads - self.size_per_head = size_per_head - self.has_attention_mask = has_attention_mask - self.use_relative_positions = use_relative_positions - - self.scores_mul = Tensor([1.0 / math.sqrt(float(self.size_per_head))], dtype=compute_type) - self.reshape = ops.Reshape() - self.shape_from_2d = (-1, from_tensor_width) - self.shape_to_2d = (-1, to_tensor_width) - weight = TruncatedNormal(initializer_range) - units = num_attention_heads * size_per_head - self.query_layer = nn.Dense(from_tensor_width, - units, - activation=query_act, - weight_init=weight).to_float(compute_type) - self.key_layer = nn.Dense(to_tensor_width, - units, - activation=key_act, - weight_init=weight).to_float(compute_type) - self.value_layer = nn.Dense(to_tensor_width, - units, - activation=value_act, - weight_init=weight).to_float(compute_type) - - self.shape_from = (batch_size, from_seq_length, num_attention_heads, size_per_head) - self.shape_to = ( - batch_size, to_seq_length, num_attention_heads, size_per_head) - - self.matmul_trans_b = ops.BatchMatMul(transpose_b=True) - self.multiply = ops.Mul() - self.transpose = ops.Transpose() - self.trans_shape = (0, 2, 1, 3) - self.trans_shape_relative = (2, 0, 1, 3) - self.trans_shape_position = (1, 2, 0, 3) - #self.multiply_data = Tensor([-10000.0,], dtype=compute_type) - self.multiply_data = Tensor([-10000.0,], dtype=mstype.float32) - self.batch_num = batch_size * num_attention_heads - self.matmul = ops.BatchMatMul() - - self.softmax = nn.Softmax() - self.dropout = nn.Dropout(1 - attention_probs_dropout_prob) - - if self.has_attention_mask: - self.expand_dims = ops.ExpandDims() - self.sub = ops.Sub() - self.add = ops.TensorAdd() - self.cast = ops.Cast() - self.get_dtype = ops.DType() - if do_return_2d_tensor: - self.shape_return = (batch_size * from_seq_length, num_attention_heads * size_per_head) - else: - self.shape_return = (batch_size, from_seq_length, num_attention_heads * size_per_head) - - self.cast_compute_type = SaturateCast(dst_type=compute_type) - if self.use_relative_positions: - self._generate_relative_positions_embeddings = \ - RelaPosEmbeddingsGenerator(length=to_seq_length, - depth=size_per_head, - max_relative_position=16, - initializer_range=initializer_range, - use_one_hot_embeddings=use_one_hot_embeddings) - - def construct(self, from_tensor, to_tensor, attention_mask): - """construct BertAttentio""" - # reshape 2d/3d input tensors to 2d - from_tensor_2d = self.reshape(from_tensor, self.shape_from_2d) - to_tensor_2d = self.reshape(to_tensor, self.shape_to_2d) - query_out = self.query_layer(from_tensor_2d) - key_out = self.key_layer(to_tensor_2d) - value_out = self.value_layer(to_tensor_2d) - - query_layer = self.reshape(query_out, self.shape_from) - query_layer = self.transpose(query_layer, self.trans_shape) - key_layer = self.reshape(key_out, self.shape_to) - key_layer = self.transpose(key_layer, self.trans_shape) - - attention_scores = self.matmul_trans_b(query_layer, key_layer) - - # use_relative_position, supplementary logic - if self.use_relative_positions: - # 'relations_keys' = [F|T, F|T, H] - relations_keys = self._generate_relative_positions_embeddings() - relations_keys = self.cast_compute_type(relations_keys) - # query_layer_t is [F, B, N, H] - query_layer_t = self.transpose(query_layer, self.trans_shape_relative) - # query_layer_r is [F, B * N, H] - query_layer_r = self.reshape(query_layer_t, - (self.from_seq_length, - self.batch_num, - self.size_per_head)) - # key_position_scores is [F, B * N, F|T] - key_position_scores = self.matmul_trans_b(query_layer_r, - relations_keys) - # key_position_scores_r is [F, B, N, F|T] - key_position_scores_r = self.reshape(key_position_scores, - (self.from_seq_length, - self.batch_size, - self.num_attention_heads, - self.from_seq_length)) - # key_position_scores_r_t is [B, N, F, F|T] - key_position_scores_r_t = self.transpose(key_position_scores_r, - self.trans_shape_position) - attention_scores = attention_scores + key_position_scores_r_t - - attention_scores = self.multiply(self.scores_mul, attention_scores) - - if self.has_attention_mask: - attention_mask = self.expand_dims(attention_mask, 1) - - attention_scores = self.cast(attention_scores, mstype.float32) - #ops.Print()(attention_scores) - - multiply_out = self.sub(self.cast(ops.tuple_to_array((1.0,)), self.get_dtype(attention_scores)), - self.cast(attention_mask, self.get_dtype(attention_scores))) - - adder = self.multiply(multiply_out, self.multiply_data) - attention_scores = self.add(adder, attention_scores) - #attention_scores = self.cast(attention_scores, mstype.float16) - - attention_probs = self.softmax(attention_scores) - attention_probs = self.cast(attention_probs, mstype.float16) - attention_probs = self.dropout(attention_probs) - - value_layer = self.reshape(value_out, self.shape_to) - value_layer = self.transpose(value_layer, self.trans_shape) - context_layer = self.matmul(attention_probs, value_layer) - - # use_relative_position, supplementary logic - if self.use_relative_positions: - # 'relations_values' = [F|T, F|T, H] - relations_values = self._generate_relative_positions_embeddings() - relations_values = self.cast_compute_type(relations_values) - # attention_probs_t is [F, B, N, T] - attention_probs_t = self.transpose(attention_probs, self.trans_shape_relative) - # attention_probs_r is [F, B * N, T] - attention_probs_r = self.reshape( - attention_probs_t, - (self.from_seq_length, - self.batch_num, - self.to_seq_length)) - # value_position_scores is [F, B * N, H] - value_position_scores = self.matmul(attention_probs_r, - relations_values) - # value_position_scores_r is [F, B, N, H] - value_position_scores_r = self.reshape(value_position_scores, - (self.from_seq_length, - self.batch_size, - self.num_attention_heads, - self.size_per_head)) - # value_position_scores_r_t is [B, N, F, H] - value_position_scores_r_t = self.transpose(value_position_scores_r, - self.trans_shape_position) - context_layer = context_layer + value_position_scores_r_t - - context_layer = self.transpose(context_layer, self.trans_shape) - context_layer = self.reshape(context_layer, self.shape_return) - - return context_layer - - -class BertSelfAttention(nn.Cell): - """ - Apply self-attention. - - Args: - batch_size (int): Batch size of input dataset. - seq_length (int): Length of input sequence. - hidden_size (int): Size of the bert encoder layers. - num_attention_heads (int): Number of attention heads. Default: 12. - attention_probs_dropout_prob (float): The dropout probability for - BertAttention. Default: 0.1. - use_one_hot_embeddings (bool): Specifies whether to use one_hot encoding form. Default: False. - initializer_range (float): Initialization value of TruncatedNormal. Default: 0.02. - hidden_dropout_prob (float): The dropout probability for BertOutput. Default: 0.1. - use_relative_positions (bool): Specifies whether to use relative positions. Default: False. - compute_type (:class:`mindspore.dtype`): Compute type in BertSelfAttention. Default: mstype.float32. - """ - def __init__(self, - batch_size, - seq_length, - hidden_size, - num_attention_heads=12, - attention_probs_dropout_prob=0.1, - use_one_hot_embeddings=False, - initializer_range=0.02, - hidden_dropout_prob=0.1, - use_relative_positions=False, - compute_type=mstype.float32, - enable_fused_layernorm=False): - super(BertSelfAttention, self).__init__() - if hidden_size % num_attention_heads != 0: - raise ValueError("The hidden size (%d) is not a multiple of the number " - "of attention heads (%d)" % (hidden_size, num_attention_heads)) - - self.size_per_head = int(hidden_size / num_attention_heads) - - self.attention = BertAttention( - batch_size=batch_size, - from_tensor_width=hidden_size, - to_tensor_width=hidden_size, - from_seq_length=seq_length, - to_seq_length=seq_length, - num_attention_heads=num_attention_heads, - size_per_head=self.size_per_head, - attention_probs_dropout_prob=attention_probs_dropout_prob, - use_one_hot_embeddings=use_one_hot_embeddings, - initializer_range=initializer_range, - use_relative_positions=use_relative_positions, - has_attention_mask=True, - do_return_2d_tensor=True, - compute_type=compute_type) - - self.output = BertOutput(in_channels=hidden_size, - out_channels=hidden_size, - initializer_range=initializer_range, - dropout_prob=hidden_dropout_prob, - compute_type=compute_type, - enable_fused_layernorm=enable_fused_layernorm) - self.reshape = ops.Reshape() - self.shape = (-1, hidden_size) - - def construct(self, input_tensor, attention_mask): - input_tensor = self.reshape(input_tensor, self.shape) - attention_output = self.attention(input_tensor, input_tensor, attention_mask) - output = self.output(attention_output, input_tensor) - return output - - -class BertEncoderCell(nn.Cell): - """ - Encoder cells used in BertTransformer. - - Args: - batch_size (int): Batch size of input dataset. - hidden_size (int): Size of the bert encoder layers. Default: 768. - seq_length (int): Length of input sequence. Default: 512. - num_attention_heads (int): Number of attention heads. Default: 12. - intermediate_size (int): Size of intermediate layer. Default: 3072. - attention_probs_dropout_prob (float): The dropout probability for - BertAttention. Default: 0.02. - use_one_hot_embeddings (bool): Specifies whether to use one hot encoding form. Default: False. - initializer_range (float): Initialization value of TruncatedNormal. Default: 0.02. - hidden_dropout_prob (float): The dropout probability for BertOutput. Default: 0.1. - use_relative_positions (bool): Specifies whether to use relative positions. Default: False. - hidden_act (str): Activation function. Default: "gelu". - compute_type (:class:`mindspore.dtype`): Compute type in attention. Default: mstype.float32. - """ - def __init__(self, - batch_size, - hidden_size=768, - seq_length=512, - num_attention_heads=12, - intermediate_size=3072, - attention_probs_dropout_prob=0.02, - use_one_hot_embeddings=False, - initializer_range=0.02, - hidden_dropout_prob=0.1, - use_relative_positions=False, - hidden_act="gelu", - compute_type=mstype.float32, - enable_fused_layernorm=False): - super(BertEncoderCell, self).__init__() - self.attention = BertSelfAttention( - batch_size=batch_size, - hidden_size=hidden_size, - seq_length=seq_length, - num_attention_heads=num_attention_heads, - attention_probs_dropout_prob=attention_probs_dropout_prob, - use_one_hot_embeddings=use_one_hot_embeddings, - initializer_range=initializer_range, - hidden_dropout_prob=hidden_dropout_prob, - use_relative_positions=use_relative_positions, - compute_type=compute_type, - enable_fused_layernorm=enable_fused_layernorm) - self.intermediate = nn.Dense(in_channels=hidden_size, - out_channels=intermediate_size, - activation=hidden_act, - weight_init=TruncatedNormal(initializer_range)).to_float(compute_type) - self.output = BertOutput(in_channels=intermediate_size, - out_channels=hidden_size, - initializer_range=initializer_range, - dropout_prob=hidden_dropout_prob, - compute_type=compute_type, - enable_fused_layernorm=enable_fused_layernorm) - - def construct(self, hidden_states, attention_mask): - # self-attention - attention_output = self.attention(hidden_states, attention_mask) - # feed construct - intermediate_output = self.intermediate(attention_output) - # add and normalize - output = self.output(intermediate_output, attention_output) - return output - - -class BertTransformer(nn.Cell): - """ - Multi-layer bert transformer. - - Args: - batch_size (int): Batch size of input dataset. - hidden_size (int): Size of the encoder layers. - seq_length (int): Length of input sequence. - num_hidden_layers (int): Number of hidden layers in encoder cells. - num_attention_heads (int): Number of attention heads in encoder cells. Default: 12. - intermediate_size (int): Size of intermediate layer in encoder cells. Default: 3072. - attention_probs_dropout_prob (float): The dropout probability for - BertAttention. Default: 0.1. - use_one_hot_embeddings (bool): Specifies whether to use one hot encoding form. Default: False. - initializer_range (float): Initialization value of TruncatedNormal. Default: 0.02. - hidden_dropout_prob (float): The dropout probability for BertOutput. Default: 0.1. - use_relative_positions (bool): Specifies whether to use relative positions. Default: False. - hidden_act (str): Activation function used in the encoder cells. Default: "gelu". - compute_type (:class:`mindspore.dtype`): Compute type in BertTransformer. Default: mstype.float32. - return_all_encoders (bool): Specifies whether to return all encoders. Default: False. - """ - def __init__(self, - batch_size, - hidden_size, - seq_length, - num_hidden_layers, - num_attention_heads=12, - intermediate_size=3072, - attention_probs_dropout_prob=0.1, - use_one_hot_embeddings=False, - initializer_range=0.02, - hidden_dropout_prob=0.1, - use_relative_positions=False, - hidden_act="gelu", - compute_type=mstype.float32, - return_all_encoders=False, - enable_fused_layernorm=False): - super(BertTransformer, self).__init__() - self.return_all_encoders = return_all_encoders - - layers = [] - for _ in range(num_hidden_layers): - layer = BertEncoderCell(batch_size=batch_size, - hidden_size=hidden_size, - seq_length=seq_length, - num_attention_heads=num_attention_heads, - intermediate_size=intermediate_size, - attention_probs_dropout_prob=attention_probs_dropout_prob, - use_one_hot_embeddings=use_one_hot_embeddings, - initializer_range=initializer_range, - hidden_dropout_prob=hidden_dropout_prob, - use_relative_positions=use_relative_positions, - hidden_act=hidden_act, - compute_type=compute_type, - enable_fused_layernorm=enable_fused_layernorm) - layers.append(layer) - - self.layers = nn.CellList(layers) - - self.reshape = ops.Reshape() - self.shape = (-1, hidden_size) - self.out_shape = (batch_size, seq_length, hidden_size) - - def construct(self, input_tensor, attention_mask): - """construct BertTransformer""" - prev_output = self.reshape(input_tensor, self.shape) - - all_encoder_layers = () - for layer_module in self.layers: - layer_output = layer_module(prev_output, attention_mask) - prev_output = layer_output - - if self.return_all_encoders: - layer_output = self.reshape(layer_output, self.out_shape) - all_encoder_layers = all_encoder_layers + (layer_output,) - - if not self.return_all_encoders: - prev_output = self.reshape(prev_output, self.out_shape) - all_encoder_layers = all_encoder_layers + (prev_output,) - return all_encoder_layers - - -class CreateAttentionMaskFromInputMask(nn.Cell): - """ - Create attention mask according to input mask. - - Args: - config (Class): Configuration for BertModel. - """ - def __init__(self, config): - super(CreateAttentionMaskFromInputMask, self).__init__() - self.input_mask_from_dataset = config.input_mask_from_dataset - self.input_mask = None - - if not self.input_mask_from_dataset: - self.input_mask = initializer( - "ones", [config.batch_size, config.seq_length], mstype.int32).to_tensor() - - self.cast = ops.Cast() - self.reshape = ops.Reshape() - self.shape = (config.batch_size, 1, config.seq_length) - self.broadcast_ones = initializer( - "ones", [config.batch_size, config.seq_length, 1], mstype.float32).to_tensor() - self.batch_matmul = ops.BatchMatMul() - - def construct(self, input_mask): - if not self.input_mask_from_dataset: - input_mask = self.input_mask - - #attention_mask = self.cast(self.reshape(input_mask, self.shape), mstype.float32) - attention_mask = input_mask - return attention_mask - - -class BertModel(nn.Cell): - """ - Bidirectional Encoder Representations from Transformers. - - Args: - config (Class): Configuration for BertModel. - is_training (bool): True for training mode. False for eval mode. - use_one_hot_embeddings (bool): Specifies whether to use one hot encoding form. Default: False. - """ - def __init__(self, - config, - is_training, - use_one_hot_embeddings=False): - super(BertModel, self).__init__() - config = copy.deepcopy(config) - if not is_training: - config.hidden_dropout_prob = 0.0 - config.attention_probs_dropout_prob = 0.0 - - self.input_mask_from_dataset = config.input_mask_from_dataset - self.token_type_ids_from_dataset = config.token_type_ids_from_dataset - self.batch_size = config.batch_size - self.seq_length = config.seq_length - self.hidden_size = config.hidden_size - self.num_hidden_layers = config.num_hidden_layers - self.embedding_size = config.hidden_size - self.token_type_ids = None - - self.last_idx = self.num_hidden_layers - 1 - #output_embedding_shape = [self.batch_size, self.seq_length, - # self.embedding_size] - output_embedding_shape = [-1, self.seq_length, - self.embedding_size] - - if not self.token_type_ids_from_dataset: - self.token_type_ids = initializer( - "zeros", [self.batch_size, self.seq_length], mstype.int32).to_tensor() - - self.bert_embedding_lookup = EmbeddingLookup( - vocab_size=config.vocab_size, - embedding_size=self.embedding_size, - embedding_shape=output_embedding_shape, - use_one_hot_embeddings=use_one_hot_embeddings, - initializer_range=config.initializer_range) - - self.bert_embedding_postprocessor = EmbeddingPostprocessor( - embedding_size=self.embedding_size, - embedding_shape=output_embedding_shape, - use_relative_positions=config.use_relative_positions, - use_token_type=True, - token_type_vocab_size=config.type_vocab_size, - use_one_hot_embeddings=use_one_hot_embeddings, - initializer_range=0.02, - max_position_embeddings=config.max_position_embeddings, - dropout_prob=config.hidden_dropout_prob) - - self.bert_encoder = BertTransformer( - batch_size=self.batch_size, - hidden_size=self.hidden_size, - seq_length=self.seq_length, - num_attention_heads=config.num_attention_heads, - num_hidden_layers=self.num_hidden_layers, - intermediate_size=config.intermediate_size, - attention_probs_dropout_prob=config.attention_probs_dropout_prob, - use_one_hot_embeddings=use_one_hot_embeddings, - initializer_range=config.initializer_range, - hidden_dropout_prob=config.hidden_dropout_prob, - use_relative_positions=config.use_relative_positions, - hidden_act=config.hidden_act, - compute_type=config.compute_type, - return_all_encoders=True, - enable_fused_layernorm=config.enable_fused_layernorm) - - self.cast = ops.Cast() - self.dtype = config.dtype - self.cast_compute_type = SaturateCast(dst_type=config.compute_type) - self.slice = ops.StridedSlice() - - self.squeeze_1 = ops.Squeeze(axis=1) - self.dense = nn.Dense(self.hidden_size, self.hidden_size, - activation="tanh", - weight_init=TruncatedNormal(config.initializer_range)).to_float(config.compute_type) - self._create_attention_mask_from_input_mask = CreateAttentionMaskFromInputMask(config) - - def construct(self, input_ids, token_type_ids, input_mask): - """construct BertModel""" - # embedding - if not self.token_type_ids_from_dataset: - token_type_ids = self.token_type_ids - word_embeddings, embedding_tables = self.bert_embedding_lookup(input_ids) - embedding_output = self.bert_embedding_postprocessor(token_type_ids, - word_embeddings) - - # attention mask [batch_size, seq_length, seq_length] - attention_mask = self._create_attention_mask_from_input_mask(input_mask) - - # bert encoder - encoder_output = self.bert_encoder(self.cast_compute_type(embedding_output), - attention_mask) - - sequence_output = self.cast(encoder_output[self.last_idx], self.dtype) - - # pooler - sequence_slice = self.slice(sequence_output, - (0, 0, 0), - (self.batch_size, 1, self.hidden_size), - (1, 1, 1)) - first_token = self.squeeze_1(sequence_slice) - pooled_output = self.dense(first_token) - pooled_output = self.cast(pooled_output, self.dtype) - - return sequence_output, pooled_output, embedding_tables diff --git a/tutorials/tutorial_code/bert_poetry/src/finetune_config.py b/tutorials/tutorial_code/bert_poetry/src/finetune_config.py deleted file mode 100644 index 502e5da189f9d77d46c3175cc0ef64fa7a4f29a6..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/src/finetune_config.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ - -""" -config settings, will be used in finetune.py -""" - -from easydict import EasyDict as edict -from mindspore import dtype as mstype -from .bert_model import BertConfig - -bs = 16 - -cfg = edict({ - 'dict_path': './vocab.txt', - 'disallowed_words': ['(', ')', '(', ')', '__', '《', '》', '【', '】', '[', ']'], - 'max_len': 64, - 'min_word_frequency': 8, - 'dataset_path': './poetry.txt', - 'batch_size': bs, - 'epoch_num': 20, - 'ckpt_prefix': 'poetry', - 'ckpt_dir': None, - 'pre_training_ckpt': './bert_converted.ckpt', - 'optimizer': 'AdamWeightDecayDynamicLR', - 'AdamWeightDecay': edict({ - 'learning_rate': 3e-5, - 'end_learning_rate': 1e-10, - 'power': 1.0, - 'weight_decay': 1e-5, - 'eps': 1e-6, - }), - 'Lamb': edict({ - 'start_learning_rate': 2e-5, - 'end_learning_rate': 1e-7, - 'power': 1.0, - 'weight_decay': 0.01, - 'decay_filter': lambda x: False, - }), - 'Momentum': edict({ - 'learning_rate': 2e-5, - 'momentum': 0.9, - }), -}) - -bert_net_cfg = BertConfig( - batch_size=bs, - seq_length=128, - vocab_size=3191, - hidden_size=768, - num_hidden_layers=12, - num_attention_heads=12, - intermediate_size=3072, - hidden_act="gelu", - hidden_dropout_prob=0.1, - attention_probs_dropout_prob=0.1, - max_position_embeddings=512, - type_vocab_size=2, - initializer_range=0.02, - use_relative_positions=False, - input_mask_from_dataset=True, - token_type_ids_from_dataset=True, - dtype=mstype.float32, - compute_type=mstype.float16, -) diff --git a/tutorials/tutorial_code/bert_poetry/src/fused_layer_norm.py b/tutorials/tutorial_code/bert_poetry/src/fused_layer_norm.py deleted file mode 100644 index e198fd4ffda8a750df7aad6b52af9c3ffa1e6c1d..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/src/fused_layer_norm.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""fused layernorm""" -import mindspore.ops as ops -from mindspore import Parameter -from mindspore.common.initializer import initializer -from mindspore.ops import constexpr -from mindspore import dtype as mstype -from mindspore.nn import Cell - -import numpy as np - - -__all__ = ['FusedLayerNorm'] - -@constexpr -def get_shape_for_norm(x_shape, begin_norm_axis): - print("input_shape: ", x_shape) - norm_shape = x_shape[begin_norm_axis:] - output_shape = (1, -1, 1, int(np.prod(norm_shape))) - print("output_shape: ", output_shape) - return output_shape - -class FusedLayerNorm(Cell): - r""" - Applies Layer Normalization over a mini-batch of inputs. - - Layer normalization is widely used in recurrent neural networks. It applies - normalization over a mini-batch of inputs for each single training case as described - in the paper `Layer Normalization `_. Unlike batch - normalization, layer normalization performs exactly the same computation at training and - testing times. It can be described using the following formula. It is applied across all channels - and pixel but only one batch size. - - .. math:: - y = \frac{x - \mathrm{E}[x]}{\sqrt{\mathrm{Var}[x] + \epsilon}} * \gamma + \beta - - Args: - normalized_shape (Union(tuple[int], list[int]): The normalization is performed over axis - `begin_norm_axis ... R - 1`. - begin_norm_axis (int): It first normalization dimension: normalization will be performed along dimensions - `begin_norm_axis: rank(inputs)`, the value should be in [-1, rank(input)). Default: -1. - begin_params_axis (int): The first parameter(beta, gamma)dimension: scale and centering parameters - will have dimensions `begin_params_axis: rank(inputs)` and will be broadcast with - the normalized inputs accordingly, the value should be in [-1, rank(input)). Default: -1. - gamma_init (Union[Tensor, str, Initializer, numbers.Number]): Initializer for the gamma weight. - The values of str refer to the function `initializer` including 'zeros', 'ones', 'xavier_uniform', - 'he_uniform', etc. Default: 'ones'. - beta_init (Union[Tensor, str, Initializer, numbers.Number]): Initializer for the beta weight. - The values of str refer to the function `initializer` including 'zeros', 'ones', 'xavier_uniform', - 'he_uniform', etc. Default: 'zeros'. - use_batch_nrom (bool): Whether use batchnorm to process. - - Inputs: - - **input_x** (Tensor) - The shape of 'input_x' is :math:`(x_1, x_2, ..., x_R)`, - and `input_shape[begin_norm_axis:]` is equal to `normalized_shape`. - - Outputs: - Tensor, the normalized and scaled offset tensor, has the same shape and data type as the `input_x`. - - Examples: - >>> x = Tensor(np.ones([20, 5, 10, 10]), mindspore.float32) - >>> shape1 = x.shape[1:] - >>> m = nn.LayerNorm(shape1, begin_norm_axis=1, begin_params_axis=1) - >>> m(x) - """ - def __init__(self, - normalized_shape, - begin_norm_axis=-1, - begin_params_axis=-1, - gamma_init='ones', - beta_init='zeros', - use_batch_norm=False): - super(FusedLayerNorm, self).__init__() - if not isinstance(normalized_shape, (tuple, list)): - raise TypeError("The type of 'normalized_shape' should be tuple[int] or list[int], but '{}' type is {}." - .format(normalized_shape, type(normalized_shape))) - self.normalized_shape = normalized_shape - self.begin_norm_axis = begin_norm_axis - self.begin_params_axis = begin_params_axis - self.gamma = Parameter(initializer( - gamma_init, normalized_shape), name="gamma") - self.beta = Parameter(initializer( - beta_init, normalized_shape), name="beta") - self.layer_norm = ops.LayerNorm(begin_norm_axis=self.begin_norm_axis, begin_params_axis=self.begin_params_axis) - - self.batch_norm = ops.BatchNorm(is_training=True, epsilon=1e-5) - self.use_batch_norm = use_batch_norm - - def construct(self, input_x): - """construct FusedLayerNorm cell""" - if self.use_batch_norm and self.training: - ones = ops.Fill()(mstype.float32, ops.shape(input_x)[:self.begin_norm_axis], 1.0) - zeros = ops.Fill()(mstype.float32, ops.shape(input_x)[:self.begin_norm_axis], 0.0) - shape_x = ops.shape(input_x) - norm_shape = get_shape_for_norm(shape_x, self.begin_norm_axis) - input_x = ops.reshape(input_x, norm_shape) - output, _, _, _, _, _ = self.batch_norm(input_x, ones, zeros, None, None) - output = ops.reshape(output, shape_x) - y = output * self.gamma + self.beta - else: - y, _, _ = self.layer_norm(input_x, self.gamma, self.beta) - return y - - def extend_repr(self): - """Display instance object as string.""" - s = 'normalized_shape={}, begin_norm_axis={}, begin_params_axis={}, gamma{}, beta={}'.format( - self.normalized_shape, self.begin_norm_axis, self.begin_params_axis, self.gamma, self.beta) - return s diff --git a/tutorials/tutorial_code/bert_poetry/src/poetry_dataset.py b/tutorials/tutorial_code/bert_poetry/src/poetry_dataset.py deleted file mode 100644 index 3f0d51e4f5a82ff7e0b1b95a79c263fee5100e63..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/src/poetry_dataset.py +++ /dev/null @@ -1,117 +0,0 @@ -""" -poetry dataset processing method -""" -from collections import defaultdict -import numpy as np -import mindspore.dataset as de -import mindspore.dataset.transforms.c_transforms as C -from mindspore import dtype as mstype -from .finetune_config import cfg -from .poetry_utils import Tokenizer - -def load_vocab(vacab_path): - token_to_id = {} - with open(vacab_path) as f: - lines = f.readlines() - for i, line in enumerate(lines): - token = line.strip() - token_to_id[token] = i - return token_to_id - -def create_tokenizer(length=128): - """tokenizer processing method""" - dict_path = cfg.dict_path - forbidden_words = cfg.disallowed_words - max_len = cfg.max_len - frequency_threshold = cfg.min_word_frequency - - poetry_src = [] - with open(cfg.dataset_path) as f: - lines = f.readlines() - - for line in lines: - if line.count(':') != 1: - continue - poem = line.split(':') - if len(poem) > 2: - continue - poem = poem[1] - forbidden_poem = [word in poem for word in forbidden_words] - if sum(forbidden_poem) > 0 or len(poem) > max_len-2: - continue - poetry_src.append(poem) - - token_to_id = load_vocab(dict_path) - _tokenizer = Tokenizer(token_to_id, do_lower_case=True) - - token_num_dict = defaultdict(int) - for poem in poetry_src: - for token in _tokenizer.tokenize(poem): - token_num_dict[token] += 1 - - - kept_token = [] - for token, num in token_num_dict.items(): - if num < int(frequency_threshold): - continue - kept_token.append((token, num)) - - kept_token = [token for token, _ in sorted(kept_token, key=lambda x: -x[1])] - - kept_token_id = [] - tokens_id_dict = {} - for i, token in enumerate(['[PAD]', '[UNK]', '[CLS]', '[SEP]']): - tokens_id_dict[token] = i - kept_token_id.append(token_to_id[token]) - - - for i, token in enumerate(kept_token): - if token in token_to_id and token not in tokens_id_dict: - tokens_id_dict[token] = len(tokens_id_dict) - kept_token_id.append(token_to_id[token]) - - tokenizer = Tokenizer(tokens_id_dict, do_lower_case=True) - - return poetry_src, tokenizer, kept_token_id - - -def padding(input_data, length=None): - input_data = np.array(input_data) - padding_length = length - input_data.shape[-1] - output = np.pad(input_data, ((0, padding_length)), 'constant', constant_values=0) - return output - -class PoetryDataGenerator(): - """Reconstructing the PoetryDataGenerator processing method""" - def __init__(self, batch_size, poetry, tokenizer, length=128): - self.data = poetry - self.batch_size = batch_size - self.tokenizer = tokenizer - self.length = length - - def __getitem__(self, index): - np.random.shuffle(self.data) - current_data = self.data[index] - - token_ids, segment_ids = self.tokenizer.encode(current_data) - batch_token_ids = padding(token_ids, length=self.length) - batch_segment_ids = padding(segment_ids, length=self.length) - pad_mask = (batch_token_ids != 0).astype(np.float32) - return (batch_token_ids, batch_segment_ids, pad_mask) - - def __len__(self): - return len(self.data) - - -def create_poetry_dataset(batch_size, poetry, tokenizer): - """create poetry dataset method""" - dt = PoetryDataGenerator(batch_size, poetry, tokenizer) - ds = de.GeneratorDataset(dt, ["input_ids", "token_type_id", "pad_mask"]) - #ds.set_dataset_size(dt.__len__()) - int_type_cast_op = C.TypeCast(mstype.int32) - float_type_cast_op = C.TypeCast(mstype.float32) - ds = ds.map(input_columns="input_ids", operations=int_type_cast_op) - ds = ds.map(input_columns="token_type_id", operations=int_type_cast_op) - ds = ds.map(input_columns="pad_mask", operations=float_type_cast_op) - ds = ds.batch(batch_size, drop_remainder=True) - return ds diff --git a/tutorials/tutorial_code/bert_poetry/src/poetry_utils.py b/tutorials/tutorial_code/bert_poetry/src/poetry_utils.py deleted file mode 100644 index a17765b64e1ec7d0afc965b3d2f34ad257a7f83f..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/src/poetry_utils.py +++ /dev/null @@ -1,197 +0,0 @@ -import re -import unicodedata -import numpy as np - -class Tokenizer(object): - def __init__(self, token_dict, do_lower_case=True): - self._do_lower_case = do_lower_case - self._token_to_id = token_dict - - self._token_pad = '[PAD]' - self._token_unk = '[UNK]' - self._token_mask = '[MASK]' - self._token_start = '[CLS]' - self._token_end = '[SEP]' - - self._id_to_token = {value: key for key, value in token_dict.items()} - self._vocab_size = len(token_dict) - - - def tokenize(self, text, maxlen=None): - if self._do_lower_case: - text = text.lower() - text = unicodedata.normalize('NFD', text) - text = ''.join([ - ch for ch in text if unicodedata.category(ch) != 'Mn' - ]) - spaced = '' - - src_tokens = [] - for ch in text: - if self._is_punctuation(ch) or self._is_cjk_character(ch): - src_tokens.append(ch) - elif self._is_space(ch) or ord(ch) == 0 or ord(ch) == 0xfffd or self._is_control(ch): - continue - else: - src_tokens.append(ch) - - tokens = [] - for word in src_tokens: - tokens.extend(self._word_piece_tokenize(word)) - - if self._token_start is not None: - tokens.insert(0, self._token_start) - if self._token_end is not None: - tokens.append(self._token_end) - - if maxlen is not None: - index = int(self._token_end is not None) + 1 - self.truncate_sequence(maxlen, tokens, -index) - - return tokens - - - def encode(self, text, maxlen=None): - text = self.tokenize(text) - if maxlen is not None: - self.truncate_sequence(maxlen, text, pop_index=-2) - token_ids = self.token_to_ids(text) - segment_ids = [0] * len(token_ids) - return token_ids, segment_ids - - - def decode(self, ids, tokens=None): - tokens = self.id_to_tokens(ids) - tokens = [token for token in tokens if not self._is_special(token)] - - text = [] - for i, token in enumerate(tokens): - if token.startswith("##"): - text.append(token[2:]) - elif len(token) == 1 and self._is_cjk_character(token): - text.append(token) - elif len(token) == 1 and self._is_punctuation(token): - text.append(token) - text.append(' ') - elif i > 0 and self._is_cjk_character(text[-1]): - text.append(token) - else: - text.append(' ') - text.append(token) - - text = ''.join(text) - - - text = re.sub(' +', ' ', text) - text = re.sub('\' (re|m|s|t|ve|d|ll) ', '\'\\1 ', text) - punctuation = self._cjk_punctuation() + '+-/={(<[' - punctuation_regex = '|'.join([re.escape(p) for p in punctuation]) - punctuation_regex = '(%s) ' % punctuation_regex - text = re.sub(punctuation_regex, '\\1', text) - text = re.sub('(\d\.) (\d)', '\\1\\2', text) - - return text.strip() - - def token_to_ids(self, tokens): - ids = [] - unk_ids = self._token_to_id[self._token_unk] - for token in tokens: - ids.append(self._token_to_id.get(token, unk_ids)) - return ids - - - def id_to_tokens(self, ids): - tokens = [] - for index in ids: - tokens.append(self._id_to_token[index]) - return tokens - - def truncate_sequence(self, maxlen, first_sequence, pop_index=-1): - """截断总长度 - """ - - while True: - total_length = len(first_sequence) - if total_length <= maxlen: - break - if np.random.rand() < 0.5: - first_sequence.pop(pop_index) - else: - first_sequence.pop(1) - - - def _word_piece_tokenize(self, word): - """word内分成subword - """ - if word in self._token_to_id: - return [word] - - tokens = [] - start, stop = 0, 0 - while start < len(word): - stop = len(word) - while stop > start: - sub_token = word[start:stop] - if start > 0: - sub_token = ''.join(['##', sub_token]) - if sub_token in self._token_to_id: - break - stop -= 1 - if start == stop: - stop += 1 - tokens.append(sub_token) - start = stop - - return tokens - - @staticmethod - def _is_special(ch): - return bool(ch) and (ch[0] == '[') and (ch[-1] == ']') - - @staticmethod - def _is_punctuation(ch): - """标点符号类字符判断(全/半角均在此内) - 提醒:unicodedata.category这个函数在py2和py3下的 - 表现可能不一样,比如u'§'字符,在py2下的结果为'So', - 在py3下的结果是'Po'。 - """ - code = ord(ch) - return 33 <= code <= 47 or \ - 58 <= code <= 64 or \ - 91 <= code <= 96 or \ - 123 <= code <= 126 or \ - unicodedata.category(ch).startswith('P') - - @staticmethod - def _is_cjk_character(ch): - """CJK类字符判断(包括中文字符也在此列) - 参考:https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block) - """ - code = ord(ch) - return 0x4E00 <= code <= 0x9FFF or \ - 0x3400 <= code <= 0x4DBF or \ - 0x20000 <= code <= 0x2A6DF or \ - 0x2A700 <= code <= 0x2B73F or \ - 0x2B740 <= code <= 0x2B81F or \ - 0x2B820 <= code <= 0x2CEAF or \ - 0xF900 <= code <= 0xFAFF or \ - 0x2F800 <= code <= 0x2FA1F - - @staticmethod - def _is_control(ch): - """控制类字符判断 - """ - return unicodedata.category(ch) in ('Cc', 'Cf') - - @staticmethod - def _is_space(ch): - """空格类字符判断 - """ - return ch == ' ' or ch == '\n' or ch == '\r' or ch == '\t' or \ - unicodedata.category(ch) == 'Zs' - - @staticmethod - def _cjk_punctuation(): - return u'\uff02\uff03\uff04\uff05\uff06\uff07\uff08\uff09\uff0a\uff0b\uff0c\uff0d\uff0f\uff1a\uff1b\uff1c\uff1d\uff1e\uff20\uff3b\uff3c\uff3d\uff3e\uff3f\uff40\uff5b\uff5c\uff5d\uff5e\uff5f\uff60\uff62\uff63\uff64\u3000\u3001\u3003\u3008\u3009\u300a\u300b\u300c\u300d\u300e\u300f\u3010\u3011\u3014\u3015\u3016\u3017\u3018\u3019\u301a\u301b\u301c\u301d\u301e\u301f\u3030\u303e\u303f\u2013\u2014\u2018\u2019\u201b\u201c\u201d\u201e\u201f\u2026\u2027\ufe4f\ufe51\ufe54\u00b7\uff01\uff1f\uff61\u3002' - - diff --git a/tutorials/tutorial_code/bert_poetry/src/utils.py b/tutorials/tutorial_code/bert_poetry/src/utils.py deleted file mode 100644 index b7e490072c3d0891577a33c9465687dedf7c8a09..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/src/utils.py +++ /dev/null @@ -1,248 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ - -''' -Functional Cells used in Bert finetune and evaluation. -''' -import numpy as np -import mindspore.nn as nn -from mindspore.common.initializer import TruncatedNormal, initializer -import mindspore.ops as ops -from mindspore import Tensor -from mindspore import Parameter, ParameterTuple -from mindspore import dtype as mstype -from mindspore.nn import DistributedGradReducer -from mindspore.context import ParallelMode -from mindspore.communication import get_group_size -from mindspore import context -from mindspore.nn.learning_rate_schedule import LearningRateSchedule -from mindspore.nn import PolynomialDecayLR, WarmUpLR -from .bert_model import BertModel -from .bert_for_pre_training import clip_grad - -GRADIENT_CLIP_TYPE = 1 -GRADIENT_CLIP_VALUE = 1.0 -grad_scale = ops.MultitypeFuncGraph("grad_scale") -reciprocal = ops.Reciprocal() - -@grad_scale.register("Tensor", "Tensor") -def tensor_grad_scale(scale, grad): - return grad * reciprocal(scale) - -_grad_overflow = ops.MultitypeFuncGraph("_grad_overflow") -grad_overflow = ops.FloatStatus() - -@_grad_overflow.register("Tensor") -def _tensor_grad_overflow(grad): - return grad_overflow(grad) - - -class BertPoetryCell(nn.TrainOneStepWithLossScaleCell): - """ - Specifically defined for finetuning where only four inputs tensor are needed. - """ - def __init__(self, network, optimizer, scale_update_cell=None): - - super(BertPoetryCell, self).__init__(network, optimizer, scale_update_cell) - self.network = network - self.weights = ParameterTuple(network.trainable_params()) - self.optimizer = optimizer - self.grad = ops.GradOperation( - get_by_list=True, - sens_param=True) - self.reducer_flag = False - self.allreduce = ops.AllReduce() - self.parallel_mode = context.get_auto_parallel_context("parallel_mode") - if self.parallel_mode in [ParallelMode.DATA_PARALLEL, ParallelMode.HYBRID_PARALLEL]: - self.reducer_flag = True - self.grad_reducer = None - if self.reducer_flag: - mean = context.get_auto_parallel_context("mirror_mean") - degree = get_group_size() - self.grad_reducer = DistributedGradReducer(optimizer.parameters, mean, degree) - self.is_distributed = (self.parallel_mode != ParallelMode.STAND_ALONE) - self.cast = ops.Cast() - self.gpu_target = False - if context.get_context("device_target") == "GPU": - self.gpu_target = True - self.float_status = ops.FloatStatus() - self.addn = ops.AddN() - self.reshape = ops.Reshape() - else: - self.alloc_status = ops.NPUAllocFloatStatus() - self.get_status = ops.NPUGetFloatStatus() - self.clear_before_grad = ops.NPUClearFloatStatus() - self.reduce_sum = ops.ReduceSum(keep_dims=False) - self.base = Tensor(1, mstype.float32) - self.less_equal = ops.LessEqual() - self.hyper_map = ops.HyperMap() - self.loss_scale = None - self.loss_scaling_manager = scale_update_cell - if scale_update_cell: - self.loss_scale = Parameter(Tensor(scale_update_cell.get_loss_scale(), dtype=mstype.float32), - name="loss_scale") - - def construct(self, - input_ids, - token_type_id, - pad_mask, - sens=None): - """construct BertPoetryCell""" - - weights = self.weights - loss = self.network(input_ids, - token_type_id, - pad_mask) - if sens is None: - scaling_sens = self.loss_scale - else: - scaling_sens = sens - status, scaling_sens = self.start_overflow_check(loss, scaling_sens) - - grads = self.grad(self.network, weights)(input_ids, - token_type_id, - pad_mask, - self.cast(scaling_sens, - mstype.float32)) - grads = self.hyper_map(ops.partial(grad_scale, scaling_sens), grads) - grads = self.hyper_map(ops.partial(clip_grad, GRADIENT_CLIP_TYPE, GRADIENT_CLIP_VALUE), grads) - if self.reducer_flag: - grads = self.grad_reducer(grads) - cond = self.get_overflow_status(status, grads) - overflow = cond - if sens is None: - overflow = self.loss_scaling_manager(self.loss_scale, cond) - if overflow: - succ = False - else: - succ = self.optimizer(grads) - ret = (loss, cond) - return ops.depend(ret, succ) - - - -class BertPoetryModel(nn.Cell): - """BertPoetryModel""" - def __init__(self, config, is_training, num_tokens, dropout_prob=0.0, use_one_hot_embeddings=False): - super(BertPoetryModel, self).__init__() - self.bert = BertModel(config, is_training, use_one_hot_embeddings) - self.num_tokens = num_tokens - idx = np.arange(config.seq_length) - mask = idx[None, :] <= idx[:, None] - self.mask = Tensor([mask], mstype.float32) - self.MLM_Dense = nn.Dense(config.hidden_size, config.hidden_size,\ - has_bias=True, weight_init=TruncatedNormal(0.02),\ - activation='gelu').to_float(mstype.float16) - self.layer_norm = nn.LayerNorm((config.hidden_size,)) - self.matmul = ops.MatMul(transpose_b=True) - self.biasadd = Parameter(initializer('zero', self.num_tokens), name='MLM_output_biasadd') - self.softmax = ops.Softmax(axis=-1) - self.seq_length = config.seq_length - self.hidden_size = config.hidden_size - self.cast = ops.Cast() - self.reshape = ops.Reshape() - self.batch_matmul = ops.BatchMatMul() - ones = np.ones(shape=(config.batch_size, config.seq_length, config.seq_length)) - self.lower_triangle_mask = Tensor(np.tril(ones), dtype=mstype.float32) - self.multiply = ops.Mul() - - def construct(self, input_ids, token_type_id, input_mask): - """construct BertPoetryModel""" - input_shape = ops.Shape()(input_mask) - shape_right = (input_shape[0], 1, input_shape[1]) - shape_left = input_shape + (1,) - input_mask = self.cast(input_mask, mstype.float32) - mask_left = self.reshape(input_mask, shape_left) - mask_right = self.reshape(input_mask, shape_right) - attention_mask = self.batch_matmul(mask_left, mask_right) - attention_mask = self.multiply(attention_mask, self.lower_triangle_mask) - - - sequence_output, _, embedding_tables = self.bert(input_ids, token_type_id, attention_mask) - bert_output = ops.Reshape()(sequence_output, (-1, self.hidden_size)) - MLM_output = self.MLM_Dense(bert_output) - MLM_output = self.layer_norm(MLM_output) - embedding_tables = ops.Cast()(embedding_tables, mstype.float16) - output = self.matmul(MLM_output, embedding_tables) - output = ops.Cast()(output, mstype.float32) - output = output + self.biasadd - output = ops.Reshape()(output, (-1, self.seq_length, self.num_tokens)) - - logits = self.softmax(output) - return logits - - - - -class BertPoetry(nn.Cell): - """BertPoetry""" - def __init__(self, model, config, is_training, dropout_prob=0.0, use_one_hot_embeddings=False): - super(BertPoetry, self).__init__(auto_prefix=False) - self.num_tokens = 3191 - self.poetry = model - self.onehot = ops.OneHot() - self.on_value = Tensor(1.0, mstype.float32) - self.off_value = Tensor(0.0, mstype.float32) - self.reduce_sum = ops.ReduceSum() - self.reduce_mean = ops.ReduceMean() - self.reshape = ops.Reshape() - self.neg = ops.Neg() - self.cast = ops.Cast() - self.last_idx = (-1,) - self.log = ops.Log() - self.max = ops.ArgMaxWithValue(axis=-1) - - def construct(self, input_ids, token_type_id, pad_mask): - """construct BertPoetry""" - logits = self.poetry(input_ids, token_type_id, pad_mask) - logits = logits[:, :127, :] - label_ids = input_ids[:, 1:] - - one_hot_labels = self.onehot(label_ids, self.num_tokens, self.on_value, self.off_value) - per_example_loss = self.neg(self.reduce_sum(one_hot_labels * self.log(logits), self.last_idx)) - loss = per_example_loss * pad_mask[:, 1:] - loss = self.reduce_sum(loss) / self.reduce_sum(pad_mask) - return_value = self.cast(loss, mstype.float32) - return return_value - - - -class BertLearningRate(LearningRateSchedule): - """ - Warmup-decay learning rate for Bert network. - """ - def __init__(self, learning_rate, end_learning_rate, warmup_steps, decay_steps, power): - super(BertLearningRate, self).__init__() - self.warmup_flag = False - if warmup_steps > 0: - self.warmup_flag = True - self.warmup_lr = WarmUpLR(learning_rate, warmup_steps) - self.decay_lr = PolynomialDecayLR(learning_rate, end_learning_rate, decay_steps, power) - self.warmup_steps = Tensor(np.array([warmup_steps]).astype(np.float32)) - - self.greater = ops.Greater() - self.one = Tensor(np.array([1.0]).astype(np.float32)) - self.cast = ops.Cast() - - def construct(self, global_step): - """construct BertLearningRate""" - decay_lr = self.decay_lr(global_step) - if self.warmup_flag: - is_warmup = self.cast(self.greater(self.warmup_steps, global_step), mstype.float32) - warmup_lr = self.warmup_lr(global_step) - lr = (self.one - is_warmup) * decay_lr + is_warmup * warmup_lr - else: - lr = decay_lr - return lr diff --git a/tutorials/tutorial_code/bert_poetry/vocab.txt b/tutorials/tutorial_code/bert_poetry/vocab.txt deleted file mode 100644 index 982bf138b8b059201061e005987cf395f0828414..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/bert_poetry/vocab.txt +++ /dev/null @@ -1,21128 +0,0 @@ -[PAD] -[unused1] -[unused2] -[unused3] -[unused4] -[unused5] -[unused6] -[unused7] -[unused8] -[unused9] -[unused10] -[unused11] -[unused12] -[unused13] -[unused14] -[unused15] -[unused16] -[unused17] -[unused18] -[unused19] -[unused20] -[unused21] -[unused22] -[unused23] -[unused24] -[unused25] -[unused26] -[unused27] -[unused28] -[unused29] -[unused30] -[unused31] -[unused32] -[unused33] -[unused34] -[unused35] -[unused36] -[unused37] -[unused38] -[unused39] -[unused40] -[unused41] -[unused42] -[unused43] -[unused44] -[unused45] -[unused46] -[unused47] -[unused48] -[unused49] -[unused50] -[unused51] -[unused52] -[unused53] -[unused54] -[unused55] -[unused56] -[unused57] -[unused58] -[unused59] -[unused60] -[unused61] -[unused62] -[unused63] -[unused64] -[unused65] -[unused66] -[unused67] -[unused68] -[unused69] -[unused70] -[unused71] -[unused72] -[unused73] -[unused74] -[unused75] -[unused76] -[unused77] -[unused78] -[unused79] -[unused80] -[unused81] -[unused82] -[unused83] -[unused84] -[unused85] -[unused86] -[unused87] -[unused88] -[unused89] -[unused90] -[unused91] -[unused92] -[unused93] -[unused94] -[unused95] -[unused96] -[unused97] -[unused98] -[unused99] -[UNK] -[CLS] -[SEP] -[MASK] - - -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -£ -¤ -¥ -§ -© -« -® -° -± -² -³ -µ -· -¹ -º -» -¼ -× -ß -æ -÷ -ø -đ -ŋ -ɔ -ə -ɡ -ʰ -ˇ -ˈ -ˊ -ˋ -ˍ -ː -˙ -˚ -ˢ -α -β -γ -δ -ε -η -θ -ι -κ -λ -μ -ν -ο -π -ρ -ς -σ -τ -υ -φ -χ -ψ -ω -а -б -в -г -д -е -ж -з -и -к -л -м -н -о -п -р -с -т -у -ф -х -ц -ч -ш -ы -ь -я -і -ا -ب -ة -ت -د -ر -س -ع -ل -م -ن -ه -و -ي -۩ -ก -ง -น -ม -ย -ร -อ -า -เ -๑ -་ -ღ -ᄀ -ᄁ -ᄂ -ᄃ -ᄅ -ᄆ -ᄇ -ᄈ -ᄉ -ᄋ -ᄌ -ᄎ -ᄏ -ᄐ -ᄑ -ᄒ -ᅡ -ᅢ -ᅣ -ᅥ -ᅦ -ᅧ -ᅨ -ᅩ -ᅪ -ᅬ -ᅭ -ᅮ -ᅯ -ᅲ -ᅳ -ᅴ -ᅵ -ᆨ -ᆫ -ᆯ -ᆷ -ᆸ -ᆺ -ᆻ -ᆼ -ᗜ -ᵃ -ᵉ -ᵍ -ᵏ -ᵐ -ᵒ -ᵘ -‖ -„ -† -• -‥ -‧ -
 -‰ -′ -″ -‹ -› -※ -‿ -⁄ -ⁱ -⁺ -ⁿ -₁ -₂ -₃ -₄ -€ -℃ -№ -™ -ⅰ -ⅱ -ⅲ -ⅳ -ⅴ -← -↑ -→ -↓ -↔ -↗ -↘ -⇒ -∀ -− -∕ -∙ -√ -∞ -∟ -∠ -∣ -∥ -∩ -∮ -∶ -∼ -∽ -≈ -≒ -≡ -≤ -≥ -≦ -≧ -≪ -≫ -⊙ -⋅ -⋈ -⋯ -⌒ -① -② -③ -④ -⑤ -⑥ -⑦ -⑧ -⑨ -⑩ -⑴ -⑵ -⑶ -⑷ -⑸ -⒈ -⒉ -⒊ -⒋ -ⓒ -ⓔ -ⓘ -─ -━ -│ -┃ -┅ -┆ -┊ -┌ -└ -├ -┣ -═ -║ -╚ -╞ -╠ -╭ -╮ -╯ -╰ -╱ -╳ -▂ -▃ -▅ -▇ -█ -▉ -▋ -▌ -▍ -▎ -■ -□ -▪ -▫ -▬ -▲ -△ -▶ -► -▼ -▽ -◆ -◇ -○ -◎ -● -◕ -◠ -◢ -◤ -☀ -★ -☆ -☕ -☞ -☺ -☼ -♀ -♂ -♠ -♡ -♣ -♥ -♦ -♪ -♫ -♬ -✈ -✔ -✕ -✖ -✦ -✨ -✪ -✰ -✿ -❀ -❤ -➜ -➤ -⦿ -、 -。 -〃 -々 -〇 -〈 -〉 -《 -》 -「 -」 -『 -』 -【 -】 -〓 -〔 -〕 -〖 -〗 -〜 -〝 -〞 -ぁ -あ -ぃ -い -う -ぇ -え -お -か -き -く -け -こ -さ -し -す -せ -そ -た -ち -っ -つ -て -と -な -に -ぬ -ね -の -は -ひ -ふ -へ -ほ -ま -み -む -め -も -ゃ -や -ゅ -ゆ -ょ -よ -ら -り -る -れ -ろ -わ -を -ん -゜ -ゝ -ァ -ア -ィ -イ -ゥ -ウ -ェ -エ -ォ -オ -カ -キ -ク -ケ -コ -サ -シ -ス -セ -ソ -タ -チ -ッ -ツ -テ -ト -ナ -ニ -ヌ -ネ -ノ -ハ -ヒ -フ -ヘ -ホ -マ -ミ -ム -メ -モ -ャ -ヤ -ュ -ユ -ョ -ヨ -ラ -リ -ル -レ -ロ -ワ -ヲ -ン -ヶ -・ -ー -ヽ -ㄅ -ㄆ -ㄇ -ㄉ -ㄋ -ㄌ -ㄍ -ㄎ -ㄏ -ㄒ -ㄚ -ㄛ -ㄞ -ㄟ -ㄢ -ㄤ -ㄥ -ㄧ -ㄨ -ㆍ -㈦ -㊣ -㎡ -㗎 -一 -丁 -七 -万 -丈 -三 -上 -下 -不 -与 -丐 -丑 -专 -且 -丕 -世 -丘 -丙 -业 -丛 -东 -丝 -丞 -丟 -両 -丢 -两 -严 -並 -丧 -丨 -个 -丫 -中 -丰 -串 -临 -丶 -丸 -丹 -为 -主 -丼 -丽 -举 -丿 -乂 -乃 -久 -么 -义 -之 -乌 -乍 -乎 -乏 -乐 -乒 -乓 -乔 -乖 -乗 -乘 -乙 -乜 -九 -乞 -也 -习 -乡 -书 -乩 -买 -乱 -乳 -乾 -亀 -亂 -了 -予 -争 -事 -二 -于 -亏 -云 -互 -五 -井 -亘 -亙 -亚 -些 -亜 -亞 -亟 -亡 -亢 -交 -亥 -亦 -产 -亨 -亩 -享 -京 -亭 -亮 -亲 -亳 -亵 -人 -亿 -什 -仁 -仃 -仄 -仅 -仆 -仇 -今 -介 -仍 -从 -仏 -仑 -仓 -仔 -仕 -他 -仗 -付 -仙 -仝 -仞 -仟 -代 -令 -以 -仨 -仪 -们 -仮 -仰 -仲 -件 -价 -任 -份 -仿 -企 -伉 -伊 -伍 -伎 -伏 -伐 -休 -伕 -众 -优 -伙 -会 -伝 -伞 -伟 -传 -伢 -伤 -伦 -伪 -伫 -伯 -估 -伴 -伶 -伸 -伺 -似 -伽 -佃 -但 -佇 -佈 -位 -低 -住 -佐 -佑 -体 -佔 -何 -佗 -佘 -余 -佚 -佛 -作 -佝 -佞 -佟 -你 -佢 -佣 -佤 -佥 -佩 -佬 -佯 -佰 -佳 -併 -佶 -佻 -佼 -使 -侃 -侄 -來 -侈 -例 -侍 -侏 -侑 -侖 -侗 -供 -依 -侠 -価 -侣 -侥 -侦 -侧 -侨 -侬 -侮 -侯 -侵 -侶 -侷 -便 -係 -促 -俄 -俊 -俎 -俏 -俐 -俑 -俗 -俘 -俚 -保 -俞 -俟 -俠 -信 -俨 -俩 -俪 -俬 -俭 -修 -俯 -俱 -俳 -俸 -俺 -俾 -倆 -倉 -個 -倌 -倍 -倏 -們 -倒 -倔 -倖 -倘 -候 -倚 -倜 -借 -倡 -値 -倦 -倩 -倪 -倫 -倬 -倭 -倶 -债 -值 -倾 -偃 -假 -偈 -偉 -偌 -偎 -偏 -偕 -做 -停 -健 -側 -偵 -偶 -偷 -偻 -偽 -偿 -傀 -傅 -傍 -傑 -傘 -備 -傚 -傢 -傣 -傥 -储 -傩 -催 -傭 -傲 -傳 -債 -傷 -傻 -傾 -僅 -働 -像 -僑 -僕 -僖 -僚 -僥 -僧 -僭 -僮 -僱 -僵 -價 -僻 -儀 -儂 -億 -儆 -儉 -儋 -儒 -儕 -儘 -償 -儡 -優 -儲 -儷 -儼 -儿 -兀 -允 -元 -兄 -充 -兆 -兇 -先 -光 -克 -兌 -免 -児 -兑 -兒 -兔 -兖 -党 -兜 -兢 -入 -內 -全 -兩 -八 -公 -六 -兮 -兰 -共 -兲 -关 -兴 -兵 -其 -具 -典 -兹 -养 -兼 -兽 -冀 -内 -円 -冇 -冈 -冉 -冊 -册 -再 -冏 -冒 -冕 -冗 -写 -军 -农 -冠 -冢 -冤 -冥 -冨 -冪 -冬 -冯 -冰 -冲 -决 -况 -冶 -冷 -冻 -冼 -冽 -冾 -净 -凄 -准 -凇 -凈 -凉 -凋 -凌 -凍 -减 -凑 -凛 -凜 -凝 -几 -凡 -凤 -処 -凪 -凭 -凯 -凰 -凱 -凳 -凶 -凸 -凹 -出 -击 -函 -凿 -刀 -刁 -刃 -分 -切 -刈 -刊 -刍 -刎 -刑 -划 -列 -刘 -则 -刚 -创 -初 -删 -判 -別 -刨 -利 -刪 -别 -刮 -到 -制 -刷 -券 -刹 -刺 -刻 -刽 -剁 -剂 -剃 -則 -剉 -削 -剋 -剌 -前 -剎 -剐 -剑 -剔 -剖 -剛 -剜 -剝 -剣 -剤 -剥 -剧 -剩 -剪 -副 -割 -創 -剷 -剽 -剿 -劃 -劇 -劈 -劉 -劊 -劍 -劏 -劑 -力 -劝 -办 -功 -加 -务 -劣 -动 -助 -努 -劫 -劭 -励 -劲 -劳 -労 -劵 -効 -劾 -势 -勁 -勃 -勇 -勉 -勋 -勐 -勒 -動 -勖 -勘 -務 -勛 -勝 -勞 -募 -勢 -勤 -勧 -勳 -勵 -勸 -勺 -勻 -勾 -勿 -匀 -包 -匆 -匈 -匍 -匐 -匕 -化 -北 -匙 -匝 -匠 -匡 -匣 -匪 -匮 -匯 -匱 -匹 -区 -医 -匾 -匿 -區 -十 -千 -卅 -升 -午 -卉 -半 -卍 -华 -协 -卑 -卒 -卓 -協 -单 -卖 -南 -単 -博 -卜 -卞 -卟 -占 -卡 -卢 -卤 -卦 -卧 -卫 -卮 -卯 -印 -危 -即 -却 -卵 -卷 -卸 -卻 -卿 -厂 -厄 -厅 -历 -厉 -压 -厌 -厕 -厘 -厚 -厝 -原 -厢 -厥 -厦 -厨 -厩 -厭 -厮 -厲 -厳 -去 -县 -叁 -参 -參 -又 -叉 -及 -友 -双 -反 -収 -发 -叔 -取 -受 -变 -叙 -叛 -叟 -叠 -叡 -叢 -口 -古 -句 -另 -叨 -叩 -只 -叫 -召 -叭 -叮 -可 -台 -叱 -史 -右 -叵 -叶 -号 -司 -叹 -叻 -叼 -叽 -吁 -吃 -各 -吆 -合 -吉 -吊 -吋 -同 -名 -后 -吏 -吐 -向 -吒 -吓 -吕 -吖 -吗 -君 -吝 -吞 -吟 -吠 -吡 -否 -吧 -吨 -吩 -含 -听 -吭 -吮 -启 -吱 -吳 -吴 -吵 -吶 -吸 -吹 -吻 -吼 -吽 -吾 -呀 -呂 -呃 -呆 -呈 -告 -呋 -呎 -呐 -呓 -呕 -呗 -员 -呛 -呜 -呢 -呤 -呦 -周 -呱 -呲 -味 -呵 -呷 -呸 -呻 -呼 -命 -咀 -咁 -咂 -咄 -咆 -咋 -和 -咎 -咏 -咐 -咒 -咔 -咕 -咖 -咗 -咘 -咙 -咚 -咛 -咣 -咤 -咦 -咧 -咨 -咩 -咪 -咫 -咬 -咭 -咯 -咱 -咲 -咳 -咸 -咻 -咽 -咿 -哀 -品 -哂 -哄 -哆 -哇 -哈 -哉 -哋 -哌 -响 -哎 -哏 -哐 -哑 -哒 -哔 -哗 -哟 -員 -哥 -哦 -哧 -哨 -哩 -哪 -哭 -哮 -哲 -哺 -哼 -哽 -唁 -唄 -唆 -唇 -唉 -唏 -唐 -唑 -唔 -唠 -唤 -唧 -唬 -售 -唯 -唰 -唱 -唳 -唷 -唸 -唾 -啃 -啄 -商 -啉 -啊 -問 -啓 -啕 -啖 -啜 -啞 -啟 -啡 -啤 -啥 -啦 -啧 -啪 -啫 -啬 -啮 -啰 -啱 -啲 -啵 -啶 -啷 -啸 -啻 -啼 -啾 -喀 -喂 -喃 -善 -喆 -喇 -喉 -喊 -喋 -喎 -喏 -喔 -喘 -喙 -喚 -喜 -喝 -喟 -喧 -喪 -喫 -喬 -單 -喰 -喱 -喲 -喳 -喵 -営 -喷 -喹 -喺 -喻 -喽 -嗅 -嗆 -嗇 -嗎 -嗑 -嗒 -嗓 -嗔 -嗖 -嗚 -嗜 -嗝 -嗟 -嗡 -嗣 -嗤 -嗦 -嗨 -嗪 -嗬 -嗯 -嗰 -嗲 -嗳 -嗶 -嗷 -嗽 -嘀 -嘅 -嘆 -嘈 -嘉 -嘌 -嘍 -嘎 -嘔 -嘖 -嘗 -嘘 -嘚 -嘛 -嘜 -嘞 -嘟 -嘢 -嘣 -嘤 -嘧 -嘩 -嘭 -嘮 -嘯 -嘰 -嘱 -嘲 -嘴 -嘶 -嘸 -嘹 -嘻 -嘿 -噁 -噌 -噎 -噓 -噔 -噗 -噙 -噜 -噠 -噢 -噤 -器 -噩 -噪 -噬 -噱 -噴 -噶 -噸 -噹 -噻 -噼 -嚀 -嚇 -嚎 -嚏 -嚐 -嚓 -嚕 -嚟 -嚣 -嚥 -嚨 -嚮 -嚴 -嚷 -嚼 -囂 -囉 -囊 -囍 -囑 -囔 -囗 -囚 -四 -囝 -回 -囟 -因 -囡 -团 -団 -囤 -囧 -囪 -囫 -园 -困 -囱 -囲 -図 -围 -囹 -固 -国 -图 -囿 -圃 -圄 -圆 -圈 -國 -圍 -圏 -園 -圓 -圖 -團 -圜 -土 -圣 -圧 -在 -圩 -圭 -地 -圳 -场 -圻 -圾 -址 -坂 -均 -坊 -坍 -坎 -坏 -坐 -坑 -块 -坚 -坛 -坝 -坞 -坟 -坠 -坡 -坤 -坦 -坨 -坪 -坯 -坳 -坵 -坷 -垂 -垃 -垄 -型 -垒 -垚 -垛 -垠 -垢 -垣 -垦 -垩 -垫 -垭 -垮 -垵 -埂 -埃 -埋 -城 -埔 -埕 -埗 -域 -埠 -埤 -埵 -執 -埸 -培 -基 -埼 -堀 -堂 -堃 -堅 -堆 -堇 -堑 -堕 -堙 -堡 -堤 -堪 -堯 -堰 -報 -場 -堵 -堺 -堿 -塊 -塌 -塑 -塔 -塗 -塘 -塚 -塞 -塢 -塩 -填 -塬 -塭 -塵 -塾 -墀 -境 -墅 -墉 -墊 -墒 -墓 -増 -墘 -墙 -墜 -增 -墟 -墨 -墩 -墮 -墳 -墻 -墾 -壁 -壅 -壆 -壇 -壊 -壑 -壓 -壕 -壘 -壞 -壟 -壢 -壤 -壩 -士 -壬 -壮 -壯 -声 -売 -壳 -壶 -壹 -壺 -壽 -处 -备 -変 -复 -夏 -夔 -夕 -外 -夙 -多 -夜 -够 -夠 -夢 -夥 -大 -天 -太 -夫 -夭 -央 -夯 -失 -头 -夷 -夸 -夹 -夺 -夾 -奂 -奄 -奇 -奈 -奉 -奋 -奎 -奏 -奐 -契 -奔 -奕 -奖 -套 -奘 -奚 -奠 -奢 -奥 -奧 -奪 -奬 -奮 -女 -奴 -奶 -奸 -她 -好 -如 -妃 -妄 -妆 -妇 -妈 -妊 -妍 -妒 -妓 -妖 -妘 -妙 -妝 -妞 -妣 -妤 -妥 -妨 -妩 -妪 -妮 -妲 -妳 -妹 -妻 -妾 -姆 -姉 -姊 -始 -姍 -姐 -姑 -姒 -姓 -委 -姗 -姚 -姜 -姝 -姣 -姥 -姦 -姨 -姪 -姫 -姬 -姹 -姻 -姿 -威 -娃 -娄 -娅 -娆 -娇 -娉 -娑 -娓 -娘 -娛 -娜 -娟 -娠 -娣 -娥 -娩 -娱 -娲 -娴 -娶 -娼 -婀 -婁 -婆 -婉 -婊 -婕 -婚 -婢 -婦 -婧 -婪 -婭 -婴 -婵 -婶 -婷 -婺 -婿 -媒 -媚 -媛 -媞 -媧 -媲 -媳 -媽 -媾 -嫁 -嫂 -嫉 -嫌 -嫑 -嫔 -嫖 -嫘 -嫚 -嫡 -嫣 -嫦 -嫩 -嫲 -嫵 -嫻 -嬅 -嬉 -嬌 -嬗 -嬛 -嬢 -嬤 -嬪 -嬰 -嬴 -嬷 -嬸 -嬿 -孀 -孃 -子 -孑 -孔 -孕 -孖 -字 -存 -孙 -孚 -孛 -孜 -孝 -孟 -孢 -季 -孤 -学 -孩 -孪 -孫 -孬 -孰 -孱 -孳 -孵 -學 -孺 -孽 -孿 -宁 -它 -宅 -宇 -守 -安 -宋 -完 -宏 -宓 -宕 -宗 -官 -宙 -定 -宛 -宜 -宝 -实 -実 -宠 -审 -客 -宣 -室 -宥 -宦 -宪 -宫 -宮 -宰 -害 -宴 -宵 -家 -宸 -容 -宽 -宾 -宿 -寂 -寄 -寅 -密 -寇 -富 -寐 -寒 -寓 -寛 -寝 -寞 -察 -寡 -寢 -寥 -實 -寧 -寨 -審 -寫 -寬 -寮 -寰 -寵 -寶 -寸 -对 -寺 -寻 -导 -対 -寿 -封 -専 -射 -将 -將 -專 -尉 -尊 -尋 -對 -導 -小 -少 -尔 -尕 -尖 -尘 -尚 -尝 -尤 -尧 -尬 -就 -尴 -尷 -尸 -尹 -尺 -尻 -尼 -尽 -尾 -尿 -局 -屁 -层 -屄 -居 -屆 -屈 -屉 -届 -屋 -屌 -屍 -屎 -屏 -屐 -屑 -展 -屜 -属 -屠 -屡 -屢 -層 -履 -屬 -屯 -山 -屹 -屿 -岀 -岁 -岂 -岌 -岐 -岑 -岔 -岖 -岗 -岘 -岙 -岚 -岛 -岡 -岩 -岫 -岬 -岭 -岱 -岳 -岷 -岸 -峇 -峋 -峒 -峙 -峡 -峤 -峥 -峦 -峨 -峪 -峭 -峯 -峰 -峴 -島 -峻 -峽 -崁 -崂 -崆 -崇 -崎 -崑 -崔 -崖 -崗 -崙 -崛 -崧 -崩 -崭 -崴 -崽 -嵇 -嵊 -嵋 -嵌 -嵐 -嵘 -嵩 -嵬 -嵯 -嶂 -嶄 -嶇 -嶋 -嶙 -嶺 -嶼 -嶽 -巅 -巍 -巒 -巔 -巖 -川 -州 -巡 -巢 -工 -左 -巧 -巨 -巩 -巫 -差 -己 -已 -巳 -巴 -巷 -巻 -巽 -巾 -巿 -币 -市 -布 -帅 -帆 -师 -希 -帐 -帑 -帕 -帖 -帘 -帚 -帛 -帜 -帝 -帥 -带 -帧 -師 -席 -帮 -帯 -帰 -帳 -帶 -帷 -常 -帼 -帽 -幀 -幂 -幄 -幅 -幌 -幔 -幕 -幟 -幡 -幢 -幣 -幫 -干 -平 -年 -并 -幸 -幹 -幺 -幻 -幼 -幽 -幾 -广 -庁 -広 -庄 -庆 -庇 -床 -序 -庐 -库 -应 -底 -庖 -店 -庙 -庚 -府 -庞 -废 -庠 -度 -座 -庫 -庭 -庵 -庶 -康 -庸 -庹 -庾 -廁 -廂 -廃 -廈 -廉 -廊 -廓 -廖 -廚 -廝 -廟 -廠 -廢 -廣 -廬 -廳 -延 -廷 -建 -廿 -开 -弁 -异 -弃 -弄 -弈 -弊 -弋 -式 -弑 -弒 -弓 -弔 -引 -弗 -弘 -弛 -弟 -张 -弥 -弦 -弧 -弩 -弭 -弯 -弱 -張 -強 -弹 -强 -弼 -弾 -彅 -彆 -彈 -彌 -彎 -归 -当 -录 -彗 -彙 -彝 -形 -彤 -彥 -彦 -彧 -彩 -彪 -彫 -彬 -彭 -彰 -影 -彷 -役 -彻 -彼 -彿 -往 -征 -径 -待 -徇 -很 -徉 -徊 -律 -後 -徐 -徑 -徒 -従 -徕 -得 -徘 -徙 -徜 -從 -徠 -御 -徨 -復 -循 -徬 -微 -徳 -徴 -徵 -德 -徹 -徼 -徽 -心 -必 -忆 -忌 -忍 -忏 -忐 -忑 -忒 -忖 -志 -忘 -忙 -応 -忠 -忡 -忤 -忧 -忪 -快 -忱 -念 -忻 -忽 -忿 -怀 -态 -怂 -怅 -怆 -怎 -怏 -怒 -怔 -怕 -怖 -怙 -怜 -思 -怠 -怡 -急 -怦 -性 -怨 -怪 -怯 -怵 -总 -怼 -恁 -恃 -恆 -恋 -恍 -恐 -恒 -恕 -恙 -恚 -恢 -恣 -恤 -恥 -恨 -恩 -恪 -恫 -恬 -恭 -息 -恰 -恳 -恵 -恶 -恸 -恺 -恻 -恼 -恿 -悄 -悅 -悉 -悌 -悍 -悔 -悖 -悚 -悟 -悠 -患 -悦 -您 -悩 -悪 -悬 -悯 -悱 -悲 -悴 -悵 -悶 -悸 -悻 -悼 -悽 -情 -惆 -惇 -惊 -惋 -惑 -惕 -惘 -惚 -惜 -惟 -惠 -惡 -惦 -惧 -惨 -惩 -惫 -惬 -惭 -惮 -惯 -惰 -惱 -想 -惴 -惶 -惹 -惺 -愁 -愆 -愈 -愉 -愍 -意 -愕 -愚 -愛 -愜 -感 -愣 -愤 -愧 -愫 -愷 -愿 -慄 -慈 -態 -慌 -慎 -慑 -慕 -慘 -慚 -慟 -慢 -慣 -慧 -慨 -慫 -慮 -慰 -慳 -慵 -慶 -慷 -慾 -憂 -憊 -憋 -憎 -憐 -憑 -憔 -憚 -憤 -憧 -憨 -憩 -憫 -憬 -憲 -憶 -憾 -懂 -懇 -懈 -應 -懊 -懋 -懑 -懒 -懦 -懲 -懵 -懶 -懷 -懸 -懺 -懼 -懾 -懿 -戀 -戈 -戊 -戌 -戍 -戎 -戏 -成 -我 -戒 -戕 -或 -战 -戚 -戛 -戟 -戡 -戦 -截 -戬 -戮 -戰 -戲 -戳 -戴 -戶 -户 -戸 -戻 -戾 -房 -所 -扁 -扇 -扈 -扉 -手 -才 -扎 -扑 -扒 -打 -扔 -払 -托 -扛 -扣 -扦 -执 -扩 -扪 -扫 -扬 -扭 -扮 -扯 -扰 -扱 -扳 -扶 -批 -扼 -找 -承 -技 -抄 -抉 -把 -抑 -抒 -抓 -投 -抖 -抗 -折 -抚 -抛 -抜 -択 -抟 -抠 -抡 -抢 -护 -报 -抨 -披 -抬 -抱 -抵 -抹 -押 -抽 -抿 -拂 -拄 -担 -拆 -拇 -拈 -拉 -拋 -拌 -拍 -拎 -拐 -拒 -拓 -拔 -拖 -拗 -拘 -拙 -拚 -招 -拜 -拟 -拡 -拢 -拣 -拥 -拦 -拧 -拨 -择 -括 -拭 -拮 -拯 -拱 -拳 -拴 -拷 -拼 -拽 -拾 -拿 -持 -挂 -指 -挈 -按 -挎 -挑 -挖 -挙 -挚 -挛 -挝 -挞 -挟 -挠 -挡 -挣 -挤 -挥 -挨 -挪 -挫 -振 -挲 -挹 -挺 -挽 -挾 -捂 -捅 -捆 -捉 -捋 -捌 -捍 -捎 -捏 -捐 -捕 -捞 -损 -捡 -换 -捣 -捧 -捨 -捩 -据 -捱 -捲 -捶 -捷 -捺 -捻 -掀 -掂 -掃 -掇 -授 -掉 -掌 -掏 -掐 -排 -掖 -掘 -掙 -掛 -掠 -採 -探 -掣 -接 -控 -推 -掩 -措 -掬 -掰 -掲 -掳 -掴 -掷 -掸 -掺 -揀 -揃 -揄 -揆 -揉 -揍 -描 -提 -插 -揖 -揚 -換 -握 -揣 -揩 -揪 -揭 -揮 -援 -揶 -揸 -揹 -揽 -搀 -搁 -搂 -搅 -損 -搏 -搐 -搓 -搔 -搖 -搗 -搜 -搞 -搡 -搪 -搬 -搭 -搵 -搶 -携 -搽 -摀 -摁 -摄 -摆 -摇 -摈 -摊 -摒 -摔 -摘 -摞 -摟 -摧 -摩 -摯 -摳 -摸 -摹 -摺 -摻 -撂 -撃 -撅 -撇 -撈 -撐 -撑 -撒 -撓 -撕 -撚 -撞 -撤 -撥 -撩 -撫 -撬 -播 -撮 -撰 -撲 -撵 -撷 -撸 -撻 -撼 -撿 -擀 -擁 -擂 -擄 -擅 -擇 -擊 -擋 -操 -擎 -擒 -擔 -擘 -據 -擞 -擠 -擡 -擢 -擦 -擬 -擰 -擱 -擲 -擴 -擷 -擺 -擼 -擾 -攀 -攏 -攒 -攔 -攘 -攙 -攜 -攝 -攞 -攢 -攣 -攤 -攥 -攪 -攫 -攬 -支 -收 -攸 -改 -攻 -放 -政 -故 -效 -敌 -敍 -敎 -敏 -救 -敕 -敖 -敗 -敘 -教 -敛 -敝 -敞 -敢 -散 -敦 -敬 -数 -敲 -整 -敵 -敷 -數 -斂 -斃 -文 -斋 -斌 -斎 -斐 -斑 -斓 -斗 -料 -斛 -斜 -斟 -斡 -斤 -斥 -斧 -斩 -斫 -斬 -断 -斯 -新 -斷 -方 -於 -施 -旁 -旃 -旅 -旋 -旌 -旎 -族 -旖 -旗 -无 -既 -日 -旦 -旧 -旨 -早 -旬 -旭 -旮 -旱 -时 -旷 -旺 -旻 -昀 -昂 -昆 -昇 -昉 -昊 -昌 -明 -昏 -易 -昔 -昕 -昙 -星 -映 -春 -昧 -昨 -昭 -是 -昱 -昴 -昵 -昶 -昼 -显 -晁 -時 -晃 -晉 -晋 -晌 -晏 -晒 -晓 -晔 -晕 -晖 -晗 -晚 -晝 -晞 -晟 -晤 -晦 -晨 -晩 -普 -景 -晰 -晴 -晶 -晷 -智 -晾 -暂 -暄 -暇 -暈 -暉 -暌 -暐 -暑 -暖 -暗 -暝 -暢 -暧 -暨 -暫 -暮 -暱 -暴 -暸 -暹 -曄 -曆 -曇 -曉 -曖 -曙 -曜 -曝 -曠 -曦 -曬 -曰 -曲 -曳 -更 -書 -曹 -曼 -曾 -替 -最 -會 -月 -有 -朋 -服 -朐 -朔 -朕 -朗 -望 -朝 -期 -朦 -朧 -木 -未 -末 -本 -札 -朮 -术 -朱 -朴 -朵 -机 -朽 -杀 -杂 -权 -杆 -杈 -杉 -李 -杏 -材 -村 -杓 -杖 -杜 -杞 -束 -杠 -条 -来 -杨 -杭 -杯 -杰 -東 -杳 -杵 -杷 -杼 -松 -板 -极 -构 -枇 -枉 -枋 -析 -枕 -林 -枚 -果 -枝 -枢 -枣 -枪 -枫 -枭 -枯 -枰 -枱 -枳 -架 -枷 -枸 -柄 -柏 -某 -柑 -柒 -染 -柔 -柘 -柚 -柜 -柞 -柠 -柢 -查 -柩 -柬 -柯 -柱 -柳 -柴 -柵 -査 -柿 -栀 -栃 -栄 -栅 -标 -栈 -栉 -栋 -栎 -栏 -树 -栓 -栖 -栗 -校 -栩 -株 -样 -核 -根 -格 -栽 -栾 -桀 -桁 -桂 -桃 -桅 -框 -案 -桉 -桌 -桎 -桐 -桑 -桓 -桔 -桜 -桠 -桡 -桢 -档 -桥 -桦 -桧 -桨 -桩 -桶 -桿 -梁 -梅 -梆 -梏 -梓 -梗 -條 -梟 -梢 -梦 -梧 -梨 -梭 -梯 -械 -梳 -梵 -梶 -检 -棂 -棄 -棉 -棋 -棍 -棒 -棕 -棗 -棘 -棚 -棟 -棠 -棣 -棧 -森 -棱 -棲 -棵 -棹 -棺 -椁 -椅 -椋 -植 -椎 -椒 -検 -椪 -椭 -椰 -椹 -椽 -椿 -楂 -楊 -楓 -楔 -楚 -楝 -楞 -楠 -楣 -楨 -楫 -業 -楮 -極 -楷 -楸 -楹 -楼 -楽 -概 -榄 -榆 -榈 -榉 -榔 -榕 -榖 -榛 -榜 -榨 -榫 -榭 -榮 -榱 -榴 -榷 -榻 -槁 -槃 -構 -槌 -槍 -槎 -槐 -槓 -様 -槛 -槟 -槤 -槭 -槲 -槳 -槻 -槽 -槿 -樁 -樂 -樊 -樑 -樓 -標 -樞 -樟 -模 -樣 -権 -横 -樫 -樯 -樱 -樵 -樸 -樹 -樺 -樽 -樾 -橄 -橇 -橋 -橐 -橘 -橙 -機 -橡 -橢 -橫 -橱 -橹 -橼 -檀 -檄 -檎 -檐 -檔 -檗 -檜 -檢 -檬 -檯 -檳 -檸 -檻 -櫃 -櫚 -櫛 -櫥 -櫸 -櫻 -欄 -權 -欒 -欖 -欠 -次 -欢 -欣 -欧 -欲 -欸 -欺 -欽 -款 -歆 -歇 -歉 -歌 -歎 -歐 -歓 -歙 -歛 -歡 -止 -正 -此 -步 -武 -歧 -歩 -歪 -歯 -歲 -歳 -歴 -歷 -歸 -歹 -死 -歼 -殁 -殃 -殆 -殇 -殉 -殊 -残 -殒 -殓 -殖 -殘 -殞 -殡 -殤 -殭 -殯 -殲 -殴 -段 -殷 -殺 -殼 -殿 -毀 -毁 -毂 -毅 -毆 -毋 -母 -毎 -每 -毒 -毓 -比 -毕 -毗 -毘 -毙 -毛 -毡 -毫 -毯 -毽 -氈 -氏 -氐 -民 -氓 -气 -氖 -気 -氙 -氛 -氟 -氡 -氢 -氣 -氤 -氦 -氧 -氨 -氪 -氫 -氮 -氯 -氰 -氲 -水 -氷 -永 -氹 -氾 -汀 -汁 -求 -汆 -汇 -汉 -汎 -汐 -汕 -汗 -汙 -汛 -汝 -汞 -江 -池 -污 -汤 -汨 -汩 -汪 -汰 -汲 -汴 -汶 -汹 -決 -汽 -汾 -沁 -沂 -沃 -沅 -沈 -沉 -沌 -沏 -沐 -沒 -沓 -沖 -沙 -沛 -沟 -没 -沢 -沣 -沥 -沦 -沧 -沪 -沫 -沭 -沮 -沱 -河 -沸 -油 -治 -沼 -沽 -沾 -沿 -況 -泄 -泉 -泊 -泌 -泓 -法 -泗 -泛 -泞 -泠 -泡 -波 -泣 -泥 -注 -泪 -泫 -泮 -泯 -泰 -泱 -泳 -泵 -泷 -泸 -泻 -泼 -泽 -泾 -洁 -洄 -洋 -洒 -洗 -洙 -洛 -洞 -津 -洩 -洪 -洮 -洱 -洲 -洵 -洶 -洸 -洹 -活 -洼 -洽 -派 -流 -浃 -浄 -浅 -浆 -浇 -浊 -测 -济 -浏 -浑 -浒 -浓 -浔 -浙 -浚 -浜 -浣 -浦 -浩 -浪 -浬 -浮 -浯 -浴 -海 -浸 -涂 -涅 -涇 -消 -涉 -涌 -涎 -涓 -涔 -涕 -涙 -涛 -涝 -涞 -涟 -涠 -涡 -涣 -涤 -润 -涧 -涨 -涩 -涪 -涮 -涯 -液 -涵 -涸 -涼 -涿 -淀 -淄 -淅 -淆 -淇 -淋 -淌 -淑 -淒 -淖 -淘 -淙 -淚 -淞 -淡 -淤 -淦 -淨 -淩 -淪 -淫 -淬 -淮 -深 -淳 -淵 -混 -淹 -淺 -添 -淼 -清 -済 -渉 -渊 -渋 -渍 -渎 -渐 -渔 -渗 -渙 -渚 -減 -渝 -渠 -渡 -渣 -渤 -渥 -渦 -温 -測 -渭 -港 -渲 -渴 -游 -渺 -渾 -湃 -湄 -湊 -湍 -湖 -湘 -湛 -湟 -湧 -湫 -湮 -湯 -湳 -湾 -湿 -満 -溃 -溅 -溉 -溏 -源 -準 -溜 -溝 -溟 -溢 -溥 -溧 -溪 -溫 -溯 -溱 -溴 -溶 -溺 -溼 -滁 -滂 -滄 -滅 -滇 -滋 -滌 -滑 -滓 -滔 -滕 -滙 -滚 -滝 -滞 -滟 -满 -滢 -滤 -滥 -滦 -滨 -滩 -滬 -滯 -滲 -滴 -滷 -滸 -滾 -滿 -漁 -漂 -漆 -漉 -漏 -漓 -演 -漕 -漠 -漢 -漣 -漩 -漪 -漫 -漬 -漯 -漱 -漲 -漳 -漸 -漾 -漿 -潆 -潇 -潋 -潍 -潑 -潔 -潘 -潛 -潜 -潞 -潟 -潢 -潤 -潦 -潧 -潭 -潮 -潰 -潴 -潸 -潺 -潼 -澀 -澄 -澆 -澈 -澍 -澎 -澗 -澜 -澡 -澤 -澧 -澱 -澳 -澹 -激 -濁 -濂 -濃 -濑 -濒 -濕 -濘 -濛 -濟 -濠 -濡 -濤 -濫 -濬 -濮 -濯 -濱 -濺 -濾 -瀅 -瀆 -瀉 -瀋 -瀏 -瀑 -瀕 -瀘 -瀚 -瀛 -瀝 -瀞 -瀟 -瀧 -瀨 -瀬 -瀰 -瀾 -灌 -灏 -灑 -灘 -灝 -灞 -灣 -火 -灬 -灭 -灯 -灰 -灵 -灶 -灸 -灼 -災 -灾 -灿 -炀 -炁 -炅 -炉 -炊 -炎 -炒 -炔 -炕 -炖 -炙 -炜 -炫 -炬 -炭 -炮 -炯 -炳 -炷 -炸 -点 -為 -炼 -炽 -烁 -烂 -烃 -烈 -烊 -烏 -烘 -烙 -烛 -烟 -烤 -烦 -烧 -烨 -烩 -烫 -烬 -热 -烯 -烷 -烹 -烽 -焉 -焊 -焕 -焖 -焗 -焘 -焙 -焚 -焜 -無 -焦 -焯 -焰 -焱 -然 -焼 -煅 -煉 -煊 -煌 -煎 -煒 -煖 -煙 -煜 -煞 -煤 -煥 -煦 -照 -煨 -煩 -煮 -煲 -煸 -煽 -熄 -熊 -熏 -熒 -熔 -熙 -熟 -熠 -熨 -熬 -熱 -熵 -熹 -熾 -燁 -燃 -燄 -燈 -燉 -燊 -燎 -燒 -燔 -燕 -燙 -燜 -營 -燥 -燦 -燧 -燭 -燮 -燴 -燻 -燼 -燿 -爆 -爍 -爐 -爛 -爪 -爬 -爭 -爰 -爱 -爲 -爵 -父 -爷 -爸 -爹 -爺 -爻 -爽 -爾 -牆 -片 -版 -牌 -牍 -牒 -牙 -牛 -牝 -牟 -牠 -牡 -牢 -牦 -牧 -物 -牯 -牲 -牴 -牵 -特 -牺 -牽 -犀 -犁 -犄 -犊 -犍 -犒 -犢 -犧 -犬 -犯 -状 -犷 -犸 -犹 -狀 -狂 -狄 -狈 -狎 -狐 -狒 -狗 -狙 -狞 -狠 -狡 -狩 -独 -狭 -狮 -狰 -狱 -狸 -狹 -狼 -狽 -猎 -猕 -猖 -猗 -猙 -猛 -猜 -猝 -猥 -猩 -猪 -猫 -猬 -献 -猴 -猶 -猷 -猾 -猿 -獄 -獅 -獎 -獐 -獒 -獗 -獠 -獣 -獨 -獭 -獰 -獲 -獵 -獷 -獸 -獺 -獻 -獼 -獾 -玄 -率 -玉 -王 -玑 -玖 -玛 -玟 -玠 -玥 -玩 -玫 -玮 -环 -现 -玲 -玳 -玷 -玺 -玻 -珀 -珂 -珅 -珈 -珉 -珊 -珍 -珏 -珐 -珑 -珙 -珞 -珠 -珣 -珥 -珩 -珪 -班 -珮 -珲 -珺 -現 -球 -琅 -理 -琇 -琉 -琊 -琍 -琏 -琐 -琛 -琢 -琥 -琦 -琨 -琪 -琬 -琮 -琰 -琲 -琳 -琴 -琵 -琶 -琺 -琼 -瑀 -瑁 -瑄 -瑋 -瑕 -瑗 -瑙 -瑚 -瑛 -瑜 -瑞 -瑟 -瑠 -瑣 -瑤 -瑩 -瑪 -瑯 -瑰 -瑶 -瑾 -璀 -璁 -璃 -璇 -璉 -璋 -璎 -璐 -璜 -璞 -璟 -璧 -璨 -環 -璽 -璿 -瓊 -瓏 -瓒 -瓜 -瓢 -瓣 -瓤 -瓦 -瓮 -瓯 -瓴 -瓶 -瓷 -甄 -甌 -甕 -甘 -甙 -甚 -甜 -生 -產 -産 -甥 -甦 -用 -甩 -甫 -甬 -甭 -甯 -田 -由 -甲 -申 -电 -男 -甸 -町 -画 -甾 -畀 -畅 -界 -畏 -畑 -畔 -留 -畜 -畝 -畢 -略 -畦 -番 -畫 -異 -畲 -畳 -畴 -當 -畸 -畹 -畿 -疆 -疇 -疊 -疏 -疑 -疔 -疖 -疗 -疙 -疚 -疝 -疟 -疡 -疣 -疤 -疥 -疫 -疮 -疯 -疱 -疲 -疳 -疵 -疸 -疹 -疼 -疽 -疾 -痂 -病 -症 -痈 -痉 -痊 -痍 -痒 -痔 -痕 -痘 -痙 -痛 -痞 -痠 -痢 -痣 -痤 -痧 -痨 -痪 -痫 -痰 -痱 -痴 -痹 -痺 -痼 -痿 -瘀 -瘁 -瘋 -瘍 -瘓 -瘘 -瘙 -瘟 -瘠 -瘡 -瘢 -瘤 -瘦 -瘧 -瘩 -瘪 -瘫 -瘴 -瘸 -瘾 -療 -癇 -癌 -癒 -癖 -癜 -癞 -癡 -癢 -癣 -癥 -癫 -癬 -癮 -癱 -癲 -癸 -発 -登 -發 -白 -百 -皂 -的 -皆 -皇 -皈 -皋 -皎 -皑 -皓 -皖 -皙 -皚 -皮 -皰 -皱 -皴 -皺 -皿 -盂 -盃 -盅 -盆 -盈 -益 -盎 -盏 -盐 -监 -盒 -盔 -盖 -盗 -盘 -盛 -盜 -盞 -盟 -盡 -監 -盤 -盥 -盧 -盪 -目 -盯 -盱 -盲 -直 -相 -盹 -盼 -盾 -省 -眈 -眉 -看 -県 -眙 -眞 -真 -眠 -眦 -眨 -眩 -眯 -眶 -眷 -眸 -眺 -眼 -眾 -着 -睁 -睇 -睏 -睐 -睑 -睛 -睜 -睞 -睡 -睢 -督 -睥 -睦 -睨 -睪 -睫 -睬 -睹 -睽 -睾 -睿 -瞄 -瞅 -瞇 -瞋 -瞌 -瞎 -瞑 -瞒 -瞓 -瞞 -瞟 -瞠 -瞥 -瞧 -瞩 -瞪 -瞬 -瞭 -瞰 -瞳 -瞻 -瞼 -瞿 -矇 -矍 -矗 -矚 -矛 -矜 -矢 -矣 -知 -矩 -矫 -短 -矮 -矯 -石 -矶 -矽 -矾 -矿 -码 -砂 -砌 -砍 -砒 -研 -砖 -砗 -砚 -砝 -砣 -砥 -砧 -砭 -砰 -砲 -破 -砷 -砸 -砺 -砼 -砾 -础 -硅 -硐 -硒 -硕 -硝 -硫 -硬 -确 -硯 -硼 -碁 -碇 -碉 -碌 -碍 -碎 -碑 -碓 -碗 -碘 -碚 -碛 -碟 -碣 -碧 -碩 -碰 -碱 -碳 -碴 -確 -碼 -碾 -磁 -磅 -磊 -磋 -磐 -磕 -磚 -磡 -磨 -磬 -磯 -磲 -磷 -磺 -礁 -礎 -礙 -礡 -礦 -礪 -礫 -礴 -示 -礼 -社 -祀 -祁 -祂 -祇 -祈 -祉 -祎 -祐 -祕 -祖 -祗 -祚 -祛 -祜 -祝 -神 -祟 -祠 -祢 -祥 -票 -祭 -祯 -祷 -祸 -祺 -祿 -禀 -禁 -禄 -禅 -禍 -禎 -福 -禛 -禦 -禧 -禪 -禮 -禱 -禹 -禺 -离 -禽 -禾 -禿 -秀 -私 -秃 -秆 -秉 -秋 -种 -科 -秒 -秘 -租 -秣 -秤 -秦 -秧 -秩 -秭 -积 -称 -秸 -移 -秽 -稀 -稅 -程 -稍 -税 -稔 -稗 -稚 -稜 -稞 -稟 -稠 -稣 -種 -稱 -稲 -稳 -稷 -稹 -稻 -稼 -稽 -稿 -穀 -穂 -穆 -穌 -積 -穎 -穗 -穢 -穩 -穫 -穴 -究 -穷 -穹 -空 -穿 -突 -窃 -窄 -窈 -窍 -窑 -窒 -窓 -窕 -窖 -窗 -窘 -窜 -窝 -窟 -窠 -窥 -窦 -窨 -窩 -窪 -窮 -窯 -窺 -窿 -竄 -竅 -竇 -竊 -立 -竖 -站 -竜 -竞 -竟 -章 -竣 -童 -竭 -端 -競 -竹 -竺 -竽 -竿 -笃 -笆 -笈 -笋 -笏 -笑 -笔 -笙 -笛 -笞 -笠 -符 -笨 -第 -笹 -笺 -笼 -筆 -等 -筊 -筋 -筍 -筏 -筐 -筑 -筒 -答 -策 -筛 -筝 -筠 -筱 -筲 -筵 -筷 -筹 -签 -简 -箇 -箋 -箍 -箏 -箐 -箔 -箕 -算 -箝 -管 -箩 -箫 -箭 -箱 -箴 -箸 -節 -篁 -範 -篆 -篇 -築 -篑 -篓 -篙 -篝 -篠 -篡 -篤 -篩 -篪 -篮 -篱 -篷 -簇 -簌 -簍 -簡 -簦 -簧 -簪 -簫 -簷 -簸 -簽 -簾 -簿 -籁 -籃 -籌 -籍 -籐 -籟 -籠 -籤 -籬 -籮 -籲 -米 -类 -籼 -籽 -粄 -粉 -粑 -粒 -粕 -粗 -粘 -粟 -粤 -粥 -粧 -粪 -粮 -粱 -粲 -粳 -粵 -粹 -粼 -粽 -精 -粿 -糅 -糊 -糍 -糕 -糖 -糗 -糙 -糜 -糞 -糟 -糠 -糧 -糬 -糯 -糰 -糸 -系 -糾 -紀 -紂 -約 -紅 -紉 -紊 -紋 -納 -紐 -紓 -純 -紗 -紘 -紙 -級 -紛 -紜 -素 -紡 -索 -紧 -紫 -紮 -累 -細 -紳 -紹 -紺 -終 -絃 -組 -絆 -経 -結 -絕 -絞 -絡 -絢 -給 -絨 -絮 -統 -絲 -絳 -絵 -絶 -絹 -綁 -綏 -綑 -經 -継 -続 -綜 -綠 -綢 -綦 -綫 -綬 -維 -綱 -網 -綴 -綵 -綸 -綺 -綻 -綽 -綾 -綿 -緊 -緋 -総 -緑 -緒 -緘 -線 -緝 -緞 -締 -緣 -編 -緩 -緬 -緯 -練 -緹 -緻 -縁 -縄 -縈 -縛 -縝 -縣 -縫 -縮 -縱 -縴 -縷 -總 -績 -繁 -繃 -繆 -繇 -繋 -織 -繕 -繚 -繞 -繡 -繩 -繪 -繫 -繭 -繳 -繹 -繼 -繽 -纂 -續 -纍 -纏 -纓 -纔 -纖 -纜 -纠 -红 -纣 -纤 -约 -级 -纨 -纪 -纫 -纬 -纭 -纯 -纰 -纱 -纲 -纳 -纵 -纶 -纷 -纸 -纹 -纺 -纽 -纾 -线 -绀 -练 -组 -绅 -细 -织 -终 -绊 -绍 -绎 -经 -绑 -绒 -结 -绔 -绕 -绘 -给 -绚 -绛 -络 -绝 -绞 -统 -绡 -绢 -绣 -绥 -绦 -继 -绩 -绪 -绫 -续 -绮 -绯 -绰 -绳 -维 -绵 -绶 -绷 -绸 -绻 -综 -绽 -绾 -绿 -缀 -缄 -缅 -缆 -缇 -缈 -缉 -缎 -缓 -缔 -缕 -编 -缘 -缙 -缚 -缜 -缝 -缠 -缢 -缤 -缥 -缨 -缩 -缪 -缭 -缮 -缰 -缱 -缴 -缸 -缺 -缽 -罂 -罄 -罌 -罐 -网 -罔 -罕 -罗 -罚 -罡 -罢 -罩 -罪 -置 -罰 -署 -罵 -罷 -罹 -羁 -羅 -羈 -羊 -羌 -美 -羔 -羚 -羞 -羟 -羡 -羣 -群 -羥 -羧 -羨 -義 -羯 -羲 -羸 -羹 -羽 -羿 -翁 -翅 -翊 -翌 -翎 -習 -翔 -翘 -翟 -翠 -翡 -翦 -翩 -翰 -翱 -翳 -翹 -翻 -翼 -耀 -老 -考 -耄 -者 -耆 -耋 -而 -耍 -耐 -耒 -耕 -耗 -耘 -耙 -耦 -耨 -耳 -耶 -耷 -耸 -耻 -耽 -耿 -聂 -聆 -聊 -聋 -职 -聒 -联 -聖 -聘 -聚 -聞 -聪 -聯 -聰 -聲 -聳 -聴 -聶 -職 -聽 -聾 -聿 -肃 -肄 -肅 -肆 -肇 -肉 -肋 -肌 -肏 -肓 -肖 -肘 -肚 -肛 -肝 -肠 -股 -肢 -肤 -肥 -肩 -肪 -肮 -肯 -肱 -育 -肴 -肺 -肽 -肾 -肿 -胀 -胁 -胃 -胄 -胆 -背 -胍 -胎 -胖 -胚 -胛 -胜 -胝 -胞 -胡 -胤 -胥 -胧 -胫 -胭 -胯 -胰 -胱 -胳 -胴 -胶 -胸 -胺 -能 -脂 -脅 -脆 -脇 -脈 -脉 -脊 -脍 -脏 -脐 -脑 -脓 -脖 -脘 -脚 -脛 -脣 -脩 -脫 -脯 -脱 -脲 -脳 -脸 -脹 -脾 -腆 -腈 -腊 -腋 -腌 -腎 -腐 -腑 -腓 -腔 -腕 -腥 -腦 -腩 -腫 -腭 -腮 -腰 -腱 -腳 -腴 -腸 -腹 -腺 -腻 -腼 -腾 -腿 -膀 -膈 -膊 -膏 -膑 -膘 -膚 -膛 -膜 -膝 -膠 -膦 -膨 -膩 -膳 -膺 -膻 -膽 -膾 -膿 -臀 -臂 -臃 -臆 -臉 -臊 -臍 -臓 -臘 -臟 -臣 -臥 -臧 -臨 -自 -臬 -臭 -至 -致 -臺 -臻 -臼 -臾 -舀 -舂 -舅 -舆 -與 -興 -舉 -舊 -舌 -舍 -舎 -舐 -舒 -舔 -舖 -舗 -舛 -舜 -舞 -舟 -航 -舫 -般 -舰 -舱 -舵 -舶 -舷 -舸 -船 -舺 -舾 -艇 -艋 -艘 -艙 -艦 -艮 -良 -艰 -艱 -色 -艳 -艷 -艹 -艺 -艾 -节 -芃 -芈 -芊 -芋 -芍 -芎 -芒 -芙 -芜 -芝 -芡 -芥 -芦 -芩 -芪 -芫 -芬 -芭 -芮 -芯 -花 -芳 -芷 -芸 -芹 -芻 -芽 -芾 -苁 -苄 -苇 -苋 -苍 -苏 -苑 -苒 -苓 -苔 -苕 -苗 -苛 -苜 -苞 -苟 -苡 -苣 -若 -苦 -苫 -苯 -英 -苷 -苹 -苻 -茁 -茂 -范 -茄 -茅 -茉 -茎 -茏 -茗 -茜 -茧 -茨 -茫 -茬 -茭 -茯 -茱 -茲 -茴 -茵 -茶 -茸 -茹 -茼 -荀 -荃 -荆 -草 -荊 -荏 -荐 -荒 -荔 -荖 -荘 -荚 -荞 -荟 -荠 -荡 -荣 -荤 -荥 -荧 -荨 -荪 -荫 -药 -荳 -荷 -荸 -荻 -荼 -荽 -莅 -莆 -莉 -莊 -莎 -莒 -莓 -莖 -莘 -莞 -莠 -莢 -莧 -莪 -莫 -莱 -莲 -莴 -获 -莹 -莺 -莽 -莿 -菀 -菁 -菅 -菇 -菈 -菊 -菌 -菏 -菓 -菖 -菘 -菜 -菟 -菠 -菡 -菩 -華 -菱 -菲 -菸 -菽 -萁 -萃 -萄 -萊 -萋 -萌 -萍 -萎 -萘 -萝 -萤 -营 -萦 -萧 -萨 -萩 -萬 -萱 -萵 -萸 -萼 -落 -葆 -葉 -著 -葚 -葛 -葡 -董 -葦 -葩 -葫 -葬 -葭 -葯 -葱 -葳 -葵 -葷 -葺 -蒂 -蒋 -蒐 -蒔 -蒙 -蒜 -蒞 -蒟 -蒡 -蒨 -蒲 -蒸 -蒹 -蒻 -蒼 -蒿 -蓁 -蓄 -蓆 -蓉 -蓋 -蓑 -蓓 -蓖 -蓝 -蓟 -蓦 -蓬 -蓮 -蓼 -蓿 -蔑 -蔓 -蔔 -蔗 -蔘 -蔚 -蔡 -蔣 -蔥 -蔫 -蔬 -蔭 -蔵 -蔷 -蔺 -蔻 -蔼 -蔽 -蕁 -蕃 -蕈 -蕉 -蕊 -蕎 -蕙 -蕤 -蕨 -蕩 -蕪 -蕭 -蕲 -蕴 -蕻 -蕾 -薄 -薅 -薇 -薈 -薊 -薏 -薑 -薔 -薙 -薛 -薦 -薨 -薩 -薪 -薬 -薯 -薰 -薹 -藉 -藍 -藏 -藐 -藓 -藕 -藜 -藝 -藤 -藥 -藩 -藹 -藻 -藿 -蘆 -蘇 -蘊 -蘋 -蘑 -蘚 -蘭 -蘸 -蘼 -蘿 -虎 -虏 -虐 -虑 -虔 -處 -虚 -虛 -虜 -虞 -號 -虢 -虧 -虫 -虬 -虱 -虹 -虻 -虽 -虾 -蚀 -蚁 -蚂 -蚊 -蚌 -蚓 -蚕 -蚜 -蚝 -蚣 -蚤 -蚩 -蚪 -蚯 -蚱 -蚵 -蛀 -蛆 -蛇 -蛊 -蛋 -蛎 -蛐 -蛔 -蛙 -蛛 -蛟 -蛤 -蛭 -蛮 -蛰 -蛳 -蛹 -蛻 -蛾 -蜀 -蜂 -蜃 -蜆 -蜇 -蜈 -蜊 -蜍 -蜒 -蜓 -蜕 -蜗 -蜘 -蜚 -蜜 -蜡 -蜢 -蜥 -蜱 -蜴 -蜷 -蜻 -蜿 -蝇 -蝈 -蝉 -蝌 -蝎 -蝕 -蝗 -蝙 -蝟 -蝠 -蝦 -蝨 -蝴 -蝶 -蝸 -蝼 -螂 -螃 -融 -螞 -螢 -螨 -螯 -螳 -螺 -蟀 -蟄 -蟆 -蟋 -蟎 -蟑 -蟒 -蟠 -蟬 -蟲 -蟹 -蟻 -蟾 -蠅 -蠍 -蠔 -蠕 -蠛 -蠟 -蠡 -蠢 -蠣 -蠱 -蠶 -蠹 -蠻 -血 -衄 -衅 -衆 -行 -衍 -術 -衔 -街 -衙 -衛 -衝 -衞 -衡 -衢 -衣 -补 -表 -衩 -衫 -衬 -衮 -衰 -衲 -衷 -衹 -衾 -衿 -袁 -袂 -袄 -袅 -袈 -袋 -袍 -袒 -袖 -袜 -袞 -袤 -袪 -被 -袭 -袱 -裁 -裂 -装 -裆 -裊 -裏 -裔 -裕 -裘 -裙 -補 -裝 -裟 -裡 -裤 -裨 -裱 -裳 -裴 -裸 -裹 -製 -裾 -褂 -複 -褐 -褒 -褓 -褔 -褚 -褥 -褪 -褫 -褲 -褶 -褻 -襁 -襄 -襟 -襠 -襪 -襬 -襯 -襲 -西 -要 -覃 -覆 -覇 -見 -規 -覓 -視 -覚 -覦 -覧 -親 -覬 -観 -覷 -覺 -覽 -觀 -见 -观 -规 -觅 -视 -览 -觉 -觊 -觎 -觐 -觑 -角 -觞 -解 -觥 -触 -觸 -言 -訂 -計 -訊 -討 -訓 -訕 -訖 -託 -記 -訛 -訝 -訟 -訣 -訥 -訪 -設 -許 -訳 -訴 -訶 -診 -註 -証 -詆 -詐 -詔 -評 -詛 -詞 -詠 -詡 -詢 -詣 -試 -詩 -詫 -詬 -詭 -詮 -詰 -話 -該 -詳 -詹 -詼 -誅 -誇 -誉 -誌 -認 -誓 -誕 -誘 -語 -誠 -誡 -誣 -誤 -誥 -誦 -誨 -說 -説 -読 -誰 -課 -誹 -誼 -調 -諄 -談 -請 -諏 -諒 -論 -諗 -諜 -諡 -諦 -諧 -諫 -諭 -諮 -諱 -諳 -諷 -諸 -諺 -諾 -謀 -謁 -謂 -謄 -謊 -謎 -謐 -謔 -謗 -謙 -講 -謝 -謠 -謨 -謬 -謹 -謾 -譁 -證 -譎 -譏 -識 -譙 -譚 -譜 -警 -譬 -譯 -議 -譲 -譴 -護 -譽 -讀 -變 -讓 -讚 -讞 -计 -订 -认 -讥 -讧 -讨 -让 -讪 -讫 -训 -议 -讯 -记 -讲 -讳 -讴 -讶 -讷 -许 -讹 -论 -讼 -讽 -设 -访 -诀 -证 -诃 -评 -诅 -识 -诈 -诉 -诊 -诋 -词 -诏 -译 -试 -诗 -诘 -诙 -诚 -诛 -话 -诞 -诟 -诠 -诡 -询 -诣 -诤 -该 -详 -诧 -诩 -诫 -诬 -语 -误 -诰 -诱 -诲 -说 -诵 -诶 -请 -诸 -诺 -读 -诽 -课 -诿 -谀 -谁 -调 -谄 -谅 -谆 -谈 -谊 -谋 -谌 -谍 -谎 -谏 -谐 -谑 -谒 -谓 -谔 -谕 -谗 -谘 -谙 -谚 -谛 -谜 -谟 -谢 -谣 -谤 -谥 -谦 -谧 -谨 -谩 -谪 -谬 -谭 -谯 -谱 -谲 -谴 -谶 -谷 -豁 -豆 -豇 -豈 -豉 -豊 -豌 -豎 -豐 -豔 -豚 -象 -豢 -豪 -豫 -豬 -豹 -豺 -貂 -貅 -貌 -貓 -貔 -貘 -貝 -貞 -負 -財 -貢 -貧 -貨 -販 -貪 -貫 -責 -貯 -貰 -貳 -貴 -貶 -買 -貸 -費 -貼 -貽 -貿 -賀 -賁 -賂 -賃 -賄 -資 -賈 -賊 -賑 -賓 -賜 -賞 -賠 -賡 -賢 -賣 -賤 -賦 -質 -賬 -賭 -賴 -賺 -購 -賽 -贅 -贈 -贊 -贍 -贏 -贓 -贖 -贛 -贝 -贞 -负 -贡 -财 -责 -贤 -败 -账 -货 -质 -贩 -贪 -贫 -贬 -购 -贮 -贯 -贰 -贱 -贲 -贴 -贵 -贷 -贸 -费 -贺 -贻 -贼 -贾 -贿 -赁 -赂 -赃 -资 -赅 -赈 -赊 -赋 -赌 -赎 -赏 -赐 -赓 -赔 -赖 -赘 -赚 -赛 -赝 -赞 -赠 -赡 -赢 -赣 -赤 -赦 -赧 -赫 -赭 -走 -赳 -赴 -赵 -赶 -起 -趁 -超 -越 -趋 -趕 -趙 -趟 -趣 -趨 -足 -趴 -趵 -趸 -趺 -趾 -跃 -跄 -跆 -跋 -跌 -跎 -跑 -跖 -跚 -跛 -距 -跟 -跡 -跤 -跨 -跩 -跪 -路 -跳 -践 -跷 -跹 -跺 -跻 -踉 -踊 -踌 -踏 -踐 -踝 -踞 -踟 -踢 -踩 -踪 -踮 -踱 -踴 -踵 -踹 -蹂 -蹄 -蹇 -蹈 -蹉 -蹊 -蹋 -蹑 -蹒 -蹙 -蹟 -蹣 -蹤 -蹦 -蹩 -蹬 -蹭 -蹲 -蹴 -蹶 -蹺 -蹼 -蹿 -躁 -躇 -躉 -躊 -躋 -躍 -躏 -躪 -身 -躬 -躯 -躲 -躺 -軀 -車 -軋 -軌 -軍 -軒 -軟 -転 -軸 -軼 -軽 -軾 -較 -載 -輒 -輓 -輔 -輕 -輛 -輝 -輟 -輩 -輪 -輯 -輸 -輻 -輾 -輿 -轄 -轅 -轆 -轉 -轍 -轎 -轟 -车 -轧 -轨 -轩 -转 -轭 -轮 -软 -轰 -轲 -轴 -轶 -轻 -轼 -载 -轿 -较 -辄 -辅 -辆 -辇 -辈 -辉 -辊 -辍 -辐 -辑 -输 -辕 -辖 -辗 -辘 -辙 -辛 -辜 -辞 -辟 -辣 -辦 -辨 -辩 -辫 -辭 -辮 -辯 -辰 -辱 -農 -边 -辺 -辻 -込 -辽 -达 -迁 -迂 -迄 -迅 -过 -迈 -迎 -运 -近 -返 -还 -这 -进 -远 -违 -连 -迟 -迢 -迤 -迥 -迦 -迩 -迪 -迫 -迭 -述 -迴 -迷 -迸 -迹 -迺 -追 -退 -送 -适 -逃 -逅 -逆 -选 -逊 -逍 -透 -逐 -递 -途 -逕 -逗 -這 -通 -逛 -逝 -逞 -速 -造 -逢 -連 -逮 -週 -進 -逵 -逶 -逸 -逻 -逼 -逾 -遁 -遂 -遅 -遇 -遊 -運 -遍 -過 -遏 -遐 -遑 -遒 -道 -達 -違 -遗 -遙 -遛 -遜 -遞 -遠 -遢 -遣 -遥 -遨 -適 -遭 -遮 -遲 -遴 -遵 -遶 -遷 -選 -遺 -遼 -遽 -避 -邀 -邁 -邂 -邃 -還 -邇 -邈 -邊 -邋 -邏 -邑 -邓 -邕 -邛 -邝 -邢 -那 -邦 -邨 -邪 -邬 -邮 -邯 -邰 -邱 -邳 -邵 -邸 -邹 -邺 -邻 -郁 -郅 -郊 -郎 -郑 -郜 -郝 -郡 -郢 -郤 -郦 -郧 -部 -郫 -郭 -郴 -郵 -郷 -郸 -都 -鄂 -鄉 -鄒 -鄔 -鄙 -鄞 -鄢 -鄧 -鄭 -鄰 -鄱 -鄲 -鄺 -酉 -酊 -酋 -酌 -配 -酐 -酒 -酗 -酚 -酝 -酢 -酣 -酥 -酩 -酪 -酬 -酮 -酯 -酰 -酱 -酵 -酶 -酷 -酸 -酿 -醃 -醇 -醉 -醋 -醍 -醐 -醒 -醚 -醛 -醜 -醞 -醣 -醪 -醫 -醬 -醮 -醯 -醴 -醺 -釀 -釁 -采 -釉 -释 -釋 -里 -重 -野 -量 -釐 -金 -釗 -釘 -釜 -針 -釣 -釦 -釧 -釵 -鈀 -鈉 -鈍 -鈎 -鈔 -鈕 -鈞 -鈣 -鈦 -鈪 -鈴 -鈺 -鈾 -鉀 -鉄 -鉅 -鉉 -鉑 -鉗 -鉚 -鉛 -鉤 -鉴 -鉻 -銀 -銃 -銅 -銑 -銓 -銖 -銘 -銜 -銬 -銭 -銮 -銳 -銷 -銹 -鋁 -鋅 -鋒 -鋤 -鋪 -鋰 -鋸 -鋼 -錄 -錐 -錘 -錚 -錠 -錢 -錦 -錨 -錫 -錮 -錯 -録 -錳 -錶 -鍊 -鍋 -鍍 -鍛 -鍥 -鍰 -鍵 -鍺 -鍾 -鎂 -鎊 -鎌 -鎏 -鎔 -鎖 -鎗 -鎚 -鎧 -鎬 -鎮 -鎳 -鏈 -鏖 -鏗 -鏘 -鏞 -鏟 -鏡 -鏢 -鏤 -鏽 -鐘 -鐮 -鐲 -鐳 -鐵 -鐸 -鐺 -鑄 -鑊 -鑑 -鑒 -鑣 -鑫 -鑰 -鑲 -鑼 -鑽 -鑾 -鑿 -针 -钉 -钊 -钎 -钏 -钒 -钓 -钗 -钙 -钛 -钜 -钝 -钞 -钟 -钠 -钡 -钢 -钣 -钤 -钥 -钦 -钧 -钨 -钩 -钮 -钯 -钰 -钱 -钳 -钴 -钵 -钺 -钻 -钼 -钾 -钿 -铀 -铁 -铂 -铃 -铄 -铅 -铆 -铉 -铎 -铐 -铛 -铜 -铝 -铠 -铡 -铢 -铣 -铤 -铨 -铩 -铬 -铭 -铮 -铰 -铲 -铵 -银 -铸 -铺 -链 -铿 -销 -锁 -锂 -锄 -锅 -锆 -锈 -锉 -锋 -锌 -锏 -锐 -锑 -错 -锚 -锟 -锡 -锢 -锣 -锤 -锥 -锦 -锭 -键 -锯 -锰 -锲 -锵 -锹 -锺 -锻 -镀 -镁 -镂 -镇 -镉 -镌 -镍 -镐 -镑 -镕 -镖 -镗 -镛 -镜 -镣 -镭 -镯 -镰 -镳 -镶 -長 -长 -門 -閃 -閉 -開 -閎 -閏 -閑 -閒 -間 -閔 -閘 -閡 -関 -閣 -閥 -閨 -閩 -閱 -閲 -閹 -閻 -閾 -闆 -闇 -闊 -闌 -闍 -闔 -闕 -闖 -闘 -關 -闡 -闢 -门 -闪 -闫 -闭 -问 -闯 -闰 -闲 -间 -闵 -闷 -闸 -闹 -闺 -闻 -闽 -闾 -阀 -阁 -阂 -阅 -阆 -阇 -阈 -阉 -阎 -阐 -阑 -阔 -阕 -阖 -阙 -阚 -阜 -队 -阡 -阪 -阮 -阱 -防 -阳 -阴 -阵 -阶 -阻 -阿 -陀 -陂 -附 -际 -陆 -陇 -陈 -陋 -陌 -降 -限 -陕 -陛 -陝 -陞 -陟 -陡 -院 -陣 -除 -陨 -险 -陪 -陰 -陲 -陳 -陵 -陶 -陷 -陸 -険 -陽 -隅 -隆 -隈 -隊 -隋 -隍 -階 -随 -隐 -隔 -隕 -隘 -隙 -際 -障 -隠 -隣 -隧 -隨 -險 -隱 -隴 -隶 -隸 -隻 -隼 -隽 -难 -雀 -雁 -雄 -雅 -集 -雇 -雉 -雋 -雌 -雍 -雎 -雏 -雑 -雒 -雕 -雖 -雙 -雛 -雜 -雞 -離 -難 -雨 -雪 -雯 -雰 -雲 -雳 -零 -雷 -雹 -電 -雾 -需 -霁 -霄 -霆 -震 -霈 -霉 -霊 -霍 -霎 -霏 -霑 -霓 -霖 -霜 -霞 -霧 -霭 -霰 -露 -霸 -霹 -霽 -霾 -靂 -靄 -靈 -青 -靓 -靖 -静 -靚 -靛 -靜 -非 -靠 -靡 -面 -靥 -靦 -革 -靳 -靴 -靶 -靼 -鞅 -鞋 -鞍 -鞏 -鞑 -鞘 -鞠 -鞣 -鞦 -鞭 -韆 -韋 -韌 -韓 -韜 -韦 -韧 -韩 -韬 -韭 -音 -韵 -韶 -韻 -響 -頁 -頂 -頃 -項 -順 -須 -頌 -預 -頑 -頒 -頓 -頗 -領 -頜 -頡 -頤 -頫 -頭 -頰 -頷 -頸 -頹 -頻 -頼 -顆 -題 -額 -顎 -顏 -顔 -願 -顛 -類 -顧 -顫 -顯 -顱 -顴 -页 -顶 -顷 -项 -顺 -须 -顼 -顽 -顾 -顿 -颁 -颂 -预 -颅 -领 -颇 -颈 -颉 -颊 -颌 -颍 -颐 -频 -颓 -颔 -颖 -颗 -题 -颚 -颛 -颜 -额 -颞 -颠 -颡 -颢 -颤 -颦 -颧 -風 -颯 -颱 -颳 -颶 -颼 -飄 -飆 -风 -飒 -飓 -飕 -飘 -飙 -飚 -飛 -飞 -食 -飢 -飨 -飩 -飪 -飯 -飲 -飼 -飽 -飾 -餃 -餅 -餉 -養 -餌 -餐 -餒 -餓 -餘 -餚 -餛 -餞 -餡 -館 -餮 -餵 -餾 -饅 -饈 -饋 -饌 -饍 -饑 -饒 -饕 -饗 -饞 -饥 -饨 -饪 -饬 -饭 -饮 -饯 -饰 -饱 -饲 -饴 -饵 -饶 -饷 -饺 -饼 -饽 -饿 -馀 -馁 -馄 -馅 -馆 -馈 -馋 -馍 -馏 -馒 -馔 -首 -馗 -香 -馥 -馨 -馬 -馭 -馮 -馳 -馴 -駁 -駄 -駅 -駆 -駐 -駒 -駕 -駛 -駝 -駭 -駱 -駿 -騁 -騎 -騏 -験 -騙 -騨 -騰 -騷 -驀 -驅 -驊 -驍 -驒 -驕 -驗 -驚 -驛 -驟 -驢 -驥 -马 -驭 -驮 -驯 -驰 -驱 -驳 -驴 -驶 -驷 -驸 -驹 -驻 -驼 -驾 -驿 -骁 -骂 -骄 -骅 -骆 -骇 -骈 -骊 -骋 -验 -骏 -骐 -骑 -骗 -骚 -骛 -骜 -骞 -骠 -骡 -骤 -骥 -骧 -骨 -骯 -骰 -骶 -骷 -骸 -骼 -髂 -髅 -髋 -髏 -髒 -髓 -體 -髖 -高 -髦 -髪 -髮 -髯 -髻 -鬃 -鬆 -鬍 -鬓 -鬚 -鬟 -鬢 -鬣 -鬥 -鬧 -鬱 -鬼 -魁 -魂 -魄 -魅 -魇 -魍 -魏 -魔 -魘 -魚 -魯 -魷 -鮑 -鮨 -鮪 -鮭 -鮮 -鯉 -鯊 -鯖 -鯛 -鯨 -鯰 -鯽 -鰍 -鰓 -鰭 -鰲 -鰻 -鰾 -鱈 -鱉 -鱔 -鱗 -鱷 -鱸 -鱼 -鱿 -鲁 -鲈 -鲍 -鲑 -鲛 -鲜 -鲟 -鲢 -鲤 -鲨 -鲫 -鲱 -鲲 -鲶 -鲷 -鲸 -鳃 -鳄 -鳅 -鳌 -鳍 -鳕 -鳖 -鳗 -鳝 -鳞 -鳥 -鳩 -鳳 -鳴 -鳶 -鴉 -鴕 -鴛 -鴦 -鴨 -鴻 -鴿 -鵑 -鵜 -鵝 -鵡 -鵬 -鵰 -鵲 -鶘 -鶩 -鶯 -鶴 -鷗 -鷲 -鷹 -鷺 -鸚 -鸞 -鸟 -鸠 -鸡 -鸢 -鸣 -鸥 -鸦 -鸨 -鸪 -鸭 -鸯 -鸳 -鸵 -鸽 -鸾 -鸿 -鹂 -鹃 -鹄 -鹅 -鹈 -鹉 -鹊 -鹌 -鹏 -鹑 -鹕 -鹘 -鹜 -鹞 -鹤 -鹦 -鹧 -鹫 -鹭 -鹰 -鹳 -鹵 -鹹 -鹼 -鹽 -鹿 -麂 -麋 -麒 -麓 -麗 -麝 -麟 -麥 -麦 -麩 -麴 -麵 -麸 -麺 -麻 -麼 -麽 -麾 -黃 -黄 -黍 -黎 -黏 -黑 -黒 -黔 -默 -黛 -黜 -黝 -點 -黠 -黨 -黯 -黴 -鼋 -鼎 -鼐 -鼓 -鼠 -鼬 -鼹 -鼻 -鼾 -齁 -齊 -齋 -齐 -齒 -齡 -齢 -齣 -齦 -齿 -龄 -龅 -龈 -龊 -龋 -龌 -龍 -龐 -龔 -龕 -龙 -龚 -龛 -龜 -龟 -︰ -︱ -︶ -︿ -﹁ -﹂ -﹍ -﹏ -﹐ -﹑ -﹒ -﹔ -﹕ -﹖ -﹗ -﹙ -﹚ -﹝ -﹞ -﹡ -﹣ -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -` -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -。 -「 -」 -、 -・ -ッ -ー -イ -ク -シ -ス -ト -ノ -フ -ラ -ル -ン -゙ -゚ - ̄ -¥ -👍 -🔥 -😂 -😎 -... -yam -10 -2017 -12 -11 -2016 -20 -30 -15 -06 -lofter -##s -2015 -by -16 -14 -18 -13 -24 -17 -2014 -21 -##0 -22 -19 -25 -23 -com -100 -00 -05 -2013 -##a -03 -09 -08 -28 -##2 -50 -01 -04 -##1 -27 -02 -2012 -##3 -26 -##e -07 -##8 -##5 -##6 -##4 -##9 -##7 -29 -2011 -40 -##t -2010 -##o -##d -##i -2009 -##n -app -www -the -##m -31 -##c -##l -##y -##r -##g -2008 -60 -http -200 -qq -##p -80 -##f -google -pixnet -90 -cookies -tripadvisor -500 -##er -##k -35 -##h -facebook -2007 -2000 -70 -##b -of -##x -##u -45 -300 -iphone -32 -1000 -2006 -48 -ip -36 -in -38 -3d -##w -##ing -55 -ctrip -##on -##v -33 -##の -to -34 -400 -id -2005 -it -37 -windows -llc -top -99 -42 -39 -000 -led -at -##an -41 -51 -52 -46 -49 -43 -53 -44 -##z -android -58 -and -59 -2004 -56 -vr -##か -5000 -2003 -47 -blogthis -twitter -54 -##le -150 -ok -2018 -57 -75 -cn -no -ios -##in -##mm -##00 -800 -on -te -3000 -65 -2001 -360 -95 -ig -lv -120 -##ng -##を -##us -##に -pc -てす -── -600 -##te -85 -2002 -88 -##ed -html -ncc -wifi -email -64 -blog -is -##10 -##て -mail -online -##al -dvd -##ic -studio -##は -##℃ -##ia -##と -line -vip -72 -##q -98 -##ce -##en -for -##is -##ra -##es -##j -usb -net -cp -1999 -asia -4g -##cm -diy -new -3c -##お -ta -66 -language -vs -apple -tw -86 -web -##ne -ipad -62 -you -##re -101 -68 -##tion -ps -de -bt -pony -atm -##2017 -1998 -67 -##ch -ceo -##or -go -##na -av -pro -cafe -96 -pinterest -97 -63 -pixstyleme3c -##ta -more -said -##2016 -1997 -mp3 -700 -##ll -nba -jun -##20 -92 -tv -1995 -pm -61 -76 -nbsp -250 -##ie -linux -##ma -cd -110 -hd -##17 -78 -##ion -77 -6000 -am -##th -##st -94 -##se -##et -69 -180 -gdp -my -105 -81 -abc -89 -flash -79 -one -93 -1990 -1996 -##ck -gps -##も -##ly -web885 -106 -2020 -91 -##ge -4000 -1500 -xd -boss -isbn -1994 -org -##ry -me -love -##11 -0fork -73 -##12 -3g -##ter -##ar -71 -82 -##la -hotel -130 -1970 -pk -83 -87 -140 -ie -##os -##30 -##el -74 -##50 -seo -cpu -##ml -p2p -84 -may -##る -sun -tue -internet -cc -posted -youtube -##at -##ン -##man -ii -##ル -##15 -abs -nt -pdf -yahoo -ago -1980 -##it -news -mac -104 -##てす -##me -##り -java -1992 -spa -##de -##nt -hk -all -plus -la -1993 -##mb -##16 -##ve -west -##da -160 -air -##い -##ps -から -##to -1989 -logo -htc -php -https -fi -momo -##son -sat -##ke -##80 -ebd -suv -wi -day -apk -##88 -##um -mv -galaxy -wiki -or -brake -##ス -1200 -する -this -1991 -mon -##こ -❤2017 -po -##ない -javascript -life -home -june -##ss -system -900 -##ー -##0 -pp -1988 -world -fb -4k -br -##as -ic -ai -leonardo -safari -##60 -live -free -xx -wed -win7 -kiehl -##co -lg -o2o -##go -us -235 -1949 -mm -しい -vfm -kanye -##90 -##2015 -##id -jr -##ey -123 -rss -##sa -##ro -##am -##no -thu -fri -350 -##sh -##ki -103 -comments -name -##のて -##pe -##ine -max -1987 -8000 -uber -##mi -##ton -wordpress -office -1986 -1985 -##ment -107 -bd -win10 -##ld -##li -gmail -bb -dior -##rs -##ri -##rd -##ます -up -cad -##® -dr -して -read -##21 -をお -##io -##99 -url -1984 -pvc -paypal -show -policy -##40 -##ty -##18 -with -##★ -##01 -txt -102 -##ba -dna -from -post -mini -ar -taiwan -john -##ga -privacy -agoda -##13 -##ny -word -##24 -##22 -##by -##ur -##hz -1982 -##ang -265 -cookie -netscape -108 -##ka -##~ -##ad -house -share -note -ibm -code -hello -nike -sim -survey -##016 -1979 -1950 -wikia -##32 -##017 -5g -cbc -##tor -##kg -1983 -##rt -##14 -campaign -store -2500 -os -##ct -##ts -##° -170 -api -##ns -365 -excel -##な -##ao -##ら -##し -~~ -##nd -university -163 -には -518 -##70 -##ya -##il -##25 -pierre -ipo -0020 -897 -##23 -hotels -##ian -のお -125 -years -6606 -##ers -##26 -high -##day -time -##ay -bug -##line -##く -##す -##be -xp -talk2yam -yamservice -10000 -coco -##dy -sony -##ies -1978 -microsoft -david -people -##ha -1960 -instagram -intel -その -##ot -iso -1981 -##va -115 -##mo -##land -xxx -man -co -ltxsw -##ation -baby -220 -##pa -##ol -1945 -7000 -tag -450 -##ue -msn -##31 -oppo -##ト -##ca -control -##om -st -chrome -##ure -##ん -be -##き -lol -##19 -した -##bo -240 -lady -##100 -##way -##から -4600 -##ko -##do -##un -4s -corporation -168 -##ni -herme -##28 -cp -978 -##up -##06 -ui -##ds -ppt -admin -three -します -bbc -re -128 -##48 -ca -##015 -##35 -hp -##ee -tpp -##た -##ive -×× -root -##cc -##ました -##ble -##ity -adobe -park -114 -et -oled -city -##ex -##ler -##ap -china -##book -20000 -view -##ice -global -##km -your -hong -##mg -out -##ms -ng -ebay -##29 -menu -ubuntu -##cy -rom -##view -open -ktv -do -server -##lo -if -english -##ね -##5 -##oo -1600 -##02 -step1 -kong -club -135 -july -inc -1976 -mr -hi -##net -touch -##ls -##ii -michael -lcd -##05 -##33 -phone -james -step2 -1300 -ios9 -##box -dc -##2 -##ley -samsung -111 -280 -pokemon -css -##ent -##les -いいえ -##1 -s8 -atom -play -bmw -##said -sa -etf -ctrl -♥yoyo♥ -##55 -2025 -##2014 -##66 -adidas -amazon -1958 -##ber -##ner -visa -##77 -##der -1800 -connectivity -##hi -firefox -109 -118 -hr -so -style -mark -pop -ol -skip -1975 -as -##27 -##ir -##61 -190 -mba -##う -##ai -le -##ver -1900 -cafe2017 -lte -super -113 -129 -##ron -amd -like -##☆ -are -##ster -we -##sk -paul -data -international -##ft -longchamp -ssd -good -##ート -##ti -reply -##my -↓↓↓ -apr -star -##ker -source -136 -js -112 -get -force -photo -##one -126 -##2013 -##ow -link -bbs -1972 -goods -##lin -python -119 -##ip -game -##ics -##ません -blue -##● -520 -##45 -page -itunes -##03 -1955 -260 -1968 -gt -gif -618 -##ff -##47 -group -くたさい -about -bar -ganji -##nce -music -lee -not -1977 -1971 -1973 -##per -an -faq -comment -##って -days -##ock -116 -##bs -1974 -1969 -v1 -player -1956 -xbox -sql -fm -f1 -139 -##ah -210 -##lv -##mp -##000 -melody -1957 -##3 -550 -17life -199 -1966 -xml -market -##au -##71 -999 -##04 -what -gl -##95 -##age -tips -##68 -book -##ting -mysql -can -1959 -230 -##ung -wonderland -watch -10℃ -##ction -9000 -mar -mobile -1946 -1962 -article -##db -part -▲top -party -って -1967 -1964 -1948 -##07 -##ore -##op -この -dj -##78 -##38 -010 -main -225 -1965 -##ong -art -320 -ad -134 -020 -##73 -117 -pm2 -japan -228 -##08 -ts -1963 -##ica -der -sm -##36 -2019 -##wa -ct -##7 -##や -##64 -1937 -homemesh -search -##85 -##れは -##tv -##di -macbook -##9 -##くたさい -service -##♥ -type -った -750 -##ier -##si -##75 -##います -##ok -best -##ット -goris -lock -##った -cf -3m -big -##ut -ftp -carol -##vi -10 -1961 -happy -sd -##ac -122 -anti -pe -cnn -iii -1920 -138 -##ラ -1940 -esp -jan -tags -##98 -##51 -august -vol -##86 -154 -##™ -##fs -##れ -##sion -design -ac -##ム -press -jordan -ppp -that -key -check -##6 -##tt -##㎡ -1080p -##lt -power -##42 -1952 -##bc -vivi -##ック -he -133 -121 -jpg -##rry -201 -175 -3500 -1947 -nb -##ted -##rn -しています -1954 -usd -##t00 -master -##ンク -001 -model -##58 -al -##09 -1953 -##34 -ram -goo -ても -##ui -127 -1930 -red -##ary -rpg -item -##pm -##41 -270 -##za -project -##2012 -hot -td -blogabstract -##ger -##62 -650 -##44 -gr2 -##します -##m -black -electronic -nfc -year -asus -また -html5 -cindy -##hd -m3 -132 -esc -##od -booking -##53 -fed -tvb -##81 -##ina -mit -165 -##いる -chan -192 -distribution -next -になる -peter -bios -steam -cm -1941 -にも -pk10 -##ix -##65 -##91 -dec -nasa -##ana -icecat -00z -b1 -will -##46 -li -se -##ji -##み -##ard -oct -##ain -jp -##ze -##bi -cio -##56 -smart -h5 -##39 -##port -curve -vpn -##nm -##dia -utc -##あり -12345678910 -##52 -rmvb -channel -a4 -miss -##and -##im -media -who -##63 -she -girl -5s -124 -vera -##して -class -vivo -king -##フ -##ei -national -ab -1951 -5cm -888 -145 -ipod -ap -1100 -5mm -211 -ms -2756 -##69 -mp4 -msci -##po -##89 -131 -mg -index -380 -##bit -##out -##zz -##97 -##67 -158 -apec -##8 -photoshop -opec -¥799 -ては -##96 -##tes -##ast -2g -○○ -##ール -¥2899 -##ling -##よ -##ory -1938 -##ical -kitty -content -##43 -step3 -##cn -win8 -155 -vc -1400 -iphone7 -robert -##した -tcl -137 -beauty -##87 -en -dollars -##ys -##oc -step -pay -yy -a1 -##2011 -##lly -##ks -##♪ -1939 -188 -download -1944 -sep -exe -ph -います -school -gb -center -pr -street -##board -uv -##37 -##lan -winrar -##que -##ua -##com -1942 -1936 -480 -gpu -##4 -ettoday -fu -tom -##54 -##ren -##via -149 -##72 -b2b -144 -##79 -##tch -rose -arm -mb -##49 -##ial -##nn -nvidia -step4 -mvp -00㎡ -york -156 -##イ -how -cpi -591 -2765 -gov -kg -joe -##xx -mandy -pa -##ser -copyright -fashion -1935 -don -##け -ecu -##ist -##art -erp -wap -have -##lm -talk -##ek -##ning -##if -ch -##ite -video -1943 -cs -san -iot -look -##84 -##2010 -##ku -october -##ux -trump -##hs -##ide -box -141 -first -##ins -april -##ight -##83 -185 -angel -protected -aa -151 -162 -x1 -m2 -##fe -##× -##ho -size -143 -min -ofo -fun -gomaji -ex -hdmi -food -dns -march -chris -kevin -##のか -##lla -##pp -##ec -ag -ems -6s -720p -##rm -##ham -off -##92 -asp -team -fandom -ed -299 -▌♥ -##ell -info -されています -##82 -sina -4066 -161 -##able -##ctor -330 -399 -315 -dll -rights -ltd -idc -jul -3kg -1927 -142 -ma -surface -##76 -##ク -~~~ -304 -mall -eps -146 -green -##59 -map -space -donald -v2 -sodu -##light -1931 -148 -1700 -まて -310 -reserved -htm -##han -##57 -2d -178 -mod -##ise -##tions -152 -ti -##shi -doc -1933 -icp -055 -wang -##ram -shopping -aug -##pi -##well -now -wam -b2 -からお -##hu -236 -1928 -##gb -266 -f2 -##93 -153 -mix -##ef -##uan -bwl -##plus -##res -core -##ess -tea -5℃ -hktvmall -nhk -##ate -list -##ese -301 -feb -4m -inn -ての -nov -159 -12345 -daniel -##ci -pass -##bet -##nk -coffee -202 -ssl -airbnb -##ute -fbi -woshipm -skype -ea -cg -sp -##fc -##www -yes -edge -alt -007 -##94 -fpga -##ght -##gs -iso9001 -さい -##ile -##wood -##uo -image -lin -icon -american -##em -1932 -set -says -##king -##tive -blogger -##74 -なと -256 -147 -##ox -##zy -##red -##ium -##lf -nokia -claire -##リ -##ding -november -lohas -##500 -##tic -##マ -##cs -##ある -##che -##ire -##gy -##ult -db -january -win -##カ -166 -road -ptt -##ま -##つ -198 -##fa -##mer -anna -pchome -はい -udn -ef -420 -##time -##tte -2030 -##ア -g20 -white -かかります -1929 -308 -garden -eleven -di -##おります -chen -309b -777 -172 -young -cosplay -ちてない -4500 -bat -##123 -##tra -##ては -kindle -npc -steve -etc -##ern -##| -call -xperia -ces -travel -sk -s7 -##ous -1934 -##int -みいたたけます -183 -edu -file -cho -qr -##car -##our -186 -##ant -##d -eric -1914 -rends -##jo -##する -mastercard -##2000 -kb -##min -290 -##ino -vista -##ris -##ud -jack -2400 -##set -169 -pos -1912 -##her -##ou -taipei -しく -205 -beta -##ませんか -232 -##fi -express -255 -body -##ill -aphojoy -user -december -meiki -##ick -tweet -richard -##av -##ᆫ -iphone6 -##dd -ちてすか -views -##mark -321 -pd -##00 -times -##▲ -level -##ash -10g -point -5l -##ome -208 -koreanmall -##ak -george -q2 -206 -wma -tcp -##200 -スタッフ -full -mlb -##lle -##watch -tm -run -179 -911 -smith -business -##und -1919 -color -##tal -222 -171 -##less -moon -4399 -##rl -update -pcb -shop -499 -157 -little -なし -end -##mhz -van -dsp -easy -660 -##house -##key -history -##o -oh -##001 -##hy -##web -oem -let -was -##2009 -##gg -review -##wan -182 -##°c -203 -uc -title -##val -united -233 -2021 -##ons -doi -trivago -overdope -sbs -##ance -##ち -grand -special -573032185 -imf -216 -wx17house -##so -##ーム -audi -##he -london -william -##rp -##ake -science -beach -cfa -amp -ps4 -880 -##800 -##link -##hp -crm -ferragamo -bell -make -##eng -195 -under -zh -photos -2300 -##style -##ント -via -176 -da -##gi -company -i7 -##ray -thomas -370 -ufo -i5 -##max -plc -ben -back -research -8g -173 -mike -##pc -##ッフ -september -189 -##ace -vps -february -167 -pantos -wp -lisa -1921 -★★ -jquery -night -long -offer -##berg -##news -1911 -##いて -ray -fks -wto -せます -over -164 -340 -##all -##rus -1924 -##888 -##works -blogtitle -loftpermalink -##→ -187 -martin -test -ling -km -##め -15000 -fda -v3 -##ja -##ロ -wedding -かある -outlet -family -##ea -をこ -##top -story -##ness -salvatore -##lu -204 -swift -215 -room -している -oracle -##ul -1925 -sam -b2c -week -pi -rock -##のは -##a -##けと -##ean -##300 -##gle -cctv -after -chinese -##back -powered -x2 -##tan -1918 -##nes -##イン -canon -only -181 -##zi -##las -say -##oe -184 -##sd -221 -##bot -##world -##zo -sky -made -top100 -just -1926 -pmi -802 -234 -gap -##vr -177 -les -174 -▲topoct -ball -vogue -vi -ing -ofweek -cos -##list -##ort -▲topmay -##なら -##lon -として -last -##tc -##of -##bus -##gen -real -eva -##コ -a3 -nas -##lie -##ria -##coin -##bt -▲topapr -his -212 -cat -nata -vive -health -⋯⋯ -drive -sir -▲topmar -du -cup -##カー -##ook -##よう -##sy -alex -msg -tour -しました -3ce -##word -193 -ebooks -r8 -block -318 -##より -2200 -nice -pvp -207 -months -1905 -rewards -##ther -1917 -0800 -##xi -##チ -##sc -micro -850 -gg -blogfp -op -1922 -daily -m1 -264 -true -##bb -ml -##tar -##のお -##ky -anthony -196 -253 -##yo -state -218 -##ara -##aa -##rc -##tz -##ston -より -gear -##eo -##ade -ge -see -1923 -##win -##ura -ss -heart -##den -##ita -down -##sm -el -png -2100 -610 -rakuten -whatsapp -bay -dream -add -##use -680 -311 -pad -gucci -mpv -##ode -##fo -island -▲topjun -##▼ -223 -jason -214 -chicago -##❤ -しの -##hone -io -##れる -##ことか -sogo -be2 -##ology -990 -cloud -vcd -##con -2~3 -##ford -##joy -##kb -##こさいます -##rade -but -##ach -docker -##ful -rfid -ul -##ase -hit -ford -##star -580 -##○ -11 -a2 -sdk -reading -edited -##are -cmos -##mc -238 -siri -light -##ella -##ため -bloomberg -##read -pizza -##ison -jimmy -##vm -college -node -journal -ba -18k -##play -245 -##cer -20 -magic -##yu -191 -jump -288 -tt -##ings -asr -##lia -3200 -step5 -network -##cd -mc -いします -1234 -pixstyleme -273 -##600 -2800 -money -★★★★★ -1280 -12 -430 -bl -みの -act -##tus -tokyo -##rial -##life -emba -##ae -saas -tcs -##rk -##wang -summer -##sp -ko -##ving -390 -premium -##その -netflix -##ヒ -uk -mt -##lton -right -frank -two -209 -える -##ple -##cal -021 -##んな -##sen -##ville -hold -nexus -dd -##ius -てお -##mah -##なく -tila -zero -820 -ce -##tin -resort -##ws -charles -old -p10 -5d -report -##360 -##ru -##には -bus -vans -lt -##est -pv -##レ -links -rebecca -##ツ -##dm -azure -##365 -きな -limited -bit -4gb -##mon -1910 -moto -##eam -213 -1913 -var -eos -なとの -226 -blogspot -された -699 -e3 -dos -dm -fc -##ments -##ik -##kw -boy -##bin -##ata -960 -er -##せ -219 -##vin -##tu -##ula -194 -##∥ -station -##ろ -##ature -835 -files -zara -hdr -top10 -nature -950 -magazine -s6 -marriott -##シ -avira -case -##っと -tab -##ran -tony -##home -oculus -im -##ral -jean -saint -cry -307 -rosie -##force -##ini -ice -##bert -のある -##nder -##mber -pet -2600 -##◆ -plurk -▲topdec -##sis -00kg -▲topnov -720 -##ence -tim -##ω -##nc -##ても -##name -log -ips -great -ikea -malaysia -unix -##イト -3600 -##ncy -##nie -12000 -akb48 -##ye -##oid -404 -##chi -##いた -oa -xuehai -##1000 -##orm -##rf -275 -さん -##ware -##リー -980 -ho -##pro -text -##era -560 -bob -227 -##ub -##2008 -8891 -scp -avi -##zen -2022 -mi -wu -museum -qvod -apache -lake -jcb -▲topaug -★★★ -ni -##hr -hill -302 -ne -weibo -490 -ruby -##ーシ -##ヶ -##row -4d -▲topjul -iv -##ish -github -306 -mate -312 -##スト -##lot -##ane -andrew -のハイト -##tina -t1 -rf -ed2k -##vel -##900 -way -final -りの -ns -5a -705 -197 -##メ -sweet -bytes -##ene -▲topjan -231 -##cker -##2007 -##px -100g -topapp -229 -helpapp -rs -low -14k -g4g -care -630 -ldquo -あり -##fork -leave -rm -edition -##gan -##zon -##qq -▲topsep -##google -##ism -gold -224 -explorer -##zer -toyota -category -select -visual -##labels -restaurant -##md -posts -s1 -##ico -もっと -angelababy -123456 -217 -sports -s3 -mbc -1915 -してくたさい -shell -x86 -candy -##new -kbs -face -xl -470 -##here -4a -swissinfo -v8 -▲topfeb -dram -##ual -##vice -3a -##wer -sport -q1 -ios10 -public -int -card -##c -ep -au -rt -##れた -1080 -bill -##mll -kim -30 -460 -wan -##uk -##ミ -x3 -298 -0t -scott -##ming -239 -e5 -##3d -h7n9 -worldcat -brown -##あります -##vo -##led -##580 -##ax -249 -410 -##ert -paris -##~6 -polo -925 -##lr -599 -##ナ -capital -##hing -bank -cv -1g -##chat -##s -##たい -adc -##ule -2m -##e -digital -hotmail -268 -##pad -870 -bbq -quot -##ring -before -wali -##まて -mcu -2k -2b -という -costco -316 -north -333 -switch -##city -##p -philips -##mann -management -panasonic -##cl -##vd -##ping -##rge -alice -##lk -##ましょう -css3 -##ney -vision -alpha -##ular -##400 -##tter -lz -にお -##ありません -mode -gre -1916 -pci -##tm -237 -1~2 -##yan -##そ -について -##let -##キ -work -war -coach -ah -mary -##ᅵ -huang -##pt -a8 -pt -follow -##berry -1895 -##ew -a5 -ghost -##ション -##wn -##og -south -##code -girls -##rid -action -villa -git -r11 -table -games -##cket -error -##anonymoussaid -##ag -here -##ame -##gc -qa -##■ -##lis -gmp -##gin -vmalife -##cher -yu -wedding -##tis -demo -dragon -530 -soho -social -bye -##rant -river -orz -acer -325 -##↑ -##ース -##ats -261 -del -##ven -440 -ups -##ように -##ター -305 -value -macd -yougou -##dn -661 -##ano -ll -##urt -##rent -continue -script -##wen -##ect -paper -263 -319 -shift -##chel -##フト -##cat -258 -x5 -fox -243 -##さん -car -aaa -##blog -loading -##yn -##tp -kuso -799 -si -sns -イカせるテンマ -ヒンクテンマ3 -rmb -vdc -forest -central -prime -help -ultra -##rmb -##ような -241 -square -688 -##しい -のないフロクに -##field -##reen -##ors -##ju -c1 -start -510 -##air -##map -cdn -##wo -cba -stephen -m8 -100km -##get -opera -##base -##ood -vsa -com™ -##aw -##ail -251 -なのて -count -t2 -##ᅡ -##een -2700 -hop -##gp -vsc -tree -##eg -##ose -816 -285 -##ories -##shop -alphago -v4 -1909 -simon -##ᆼ -fluke62max -zip -スホンサー -##sta -louis -cr -bas -##~10 -bc -##yer -hadoop -##ube -##wi -1906 -0755 -hola -##low -place -centre -5v -d3 -##fer -252 -##750 -##media -281 -540 -0l -exchange -262 -series -##ハー -##san -eb -##bank -##k -q3 -##nge -##mail -take -##lp -259 -1888 -client -east -cache -event -vincent -##ールを -きを -##nse -sui -855 -adchoice -##и -##stry -##なたの -246 -##zone -ga -apps -sea -##ab -248 -cisco -##タ -##rner -kymco -##care -dha -##pu -##yi -minkoff -royal -p1 -への -annie -269 -collection -kpi -playstation -257 -になります -866 -bh -##bar -queen -505 -radio -1904 -andy -armani -##xy -manager -iherb -##ery -##share -spring -raid -johnson -1908 -##ob -volvo -hall -##ball -v6 -our -taylor -##hk -bi -242 -##cp -kate -bo -water -technology -##rie -サイトは -277 -##ona -##sl -hpv -303 -gtx -hip -rdquo -jayz -stone -##lex -##rum -namespace -##やり -620 -##ale -##atic -des -##erson -##ql -##ves -##type -enter -##この -##てきます -d2 -##168 -##mix -##bian -との -a9 -jj -ky -##lc -access -movie -##hc -リストに -tower -##ration -##mit -ます -##nch -ua -tel -prefix -##o2 -1907 -##point -1901 -ott -~10 -##http -##ury -baidu -##ink -member -##logy -bigbang -nownews -##js -##shot -##tb -##こと -247 -eba -##tics -##lus -ける -v5 -spark -##ama -there -##ions -god -##lls -##down -hiv -##ress -burberry -day2 -##kv -◆◆ -jeff -related -film -edit -joseph -283 -##ark -cx -32gb -order -g9 -30000 -##ans -##tty -s5 -##bee -かあります -thread -xr -buy -sh -005 -land -spotify -mx -##ari -276 -##verse -×email -sf -why -##ことて -244 -7headlines -nego -sunny -dom -exo -401 -666 -positioning -fit -rgb -##tton -278 -kiss -alexa -adam -lp -みリストを -##g -mp -##ties -##llow -amy -##du -np -002 -institute -271 -##rth -##lar -2345 -590 -##des -sidebar -15 -imax -site -##cky -##kit -##ime -##009 -season -323 -##fun -##ンター -##ひ -gogoro -a7 -pu -lily -fire -twd600 -##ッセーシを -いて -##vis -30ml -##cture -##をお -information -##オ -close -friday -##くれる -yi -nick -てすか -##tta -##tel -6500 -##lock -cbd -economy -254 -かお -267 -tinker -double -375 -8gb -voice -##app -oops -channel -today -985 -##right -raw -xyz -##+ -jim -edm -##cent -7500 -supreme -814 -ds -##its -##asia -dropbox -##てすか -##tti -books -272 -100ml -##tle -##ller -##ken -##more -##boy -sex -309 -##dom -t3 -##ider -##なります -##unch -1903 -810 -feel -5500 -##かった -##put -により -s2 -mo -##gh -men -ka -amoled -div -##tr -##n1 -port -howard -##tags -ken -dnf -##nus -adsense -##а -ide -##へ -buff -thunder -##town -##ique -has -##body -auto -pin -##erry -tee -てした -295 -number -##the -##013 -object -psp -cool -udnbkk -16gb -##mic -miui -##tro -most -r2 -##alk -##nity -1880 -±0 -##いました -428 -s4 -law -version -##oa -n1 -sgs -docomo -##tf -##ack -henry -fc2 -##ded -##sco -##014 -##rite -286 -0mm -linkedin -##ada -##now -wii -##ndy -ucbug -##◎ -sputniknews -legalminer -##ika -##xp -2gb -##bu -q10 -oo -b6 -come -##rman -cheese -ming -maker -##gm -nikon -##fig -ppi -kelly -##ります -jchere -てきます -ted -md -003 -fgo -tech -##tto -dan -soc -##gl -##len -hair -earth -640 -521 -img -##pper -##a1 -##てきる -##ロク -acca -##ition -##ference -suite -##ig -outlook -##mond -##cation -398 -##pr -279 -101vip -358 -##999 -282 -64gb -3800 -345 -airport -##over -284 -##おり -jones -##ith -lab -##su -##いるのて -co2 -town -piece -##llo -no1 -vmware -24h -##qi -focus -reader -##admin -##ora -tb -false -##log -1898 -know -lan -838 -##ces -f4 -##ume -motel -stop -##oper -na -flickr -netcomponents -##af -##─ -pose -williams -local -##ound -##cg -##site -##iko -いお -274 -5m -gsm -con -##ath -1902 -friends -##hip -cell -317 -##rey -780 -cream -##cks -012 -##dp -facebooktwitterpinterestgoogle -sso -324 -shtml -song -swiss -##mw -##キンク -lumia -xdd -string -tiffany -522 -marc -られた -insee -russell -sc -dell -##ations -ok -camera -289 -##vs -##flow -##late -classic -287 -##nter -stay -g1 -mtv -512 -##ever -##lab -##nger -qe -sata -ryan -d1 -50ml -cms -##cing -su -292 -3300 -editor -296 -##nap -security -sunday -association -##ens -##700 -##bra -acg -##かり -sofascore -とは -mkv -##ign -jonathan -gary -build -labels -##oto -tesla -moba -qi -gohappy -general -ajax -1024 -##かる -サイト -society -##test -##urs -wps -fedora -##ich -mozilla -328 -##480 -##dr -usa -urn -##lina -##r -grace -##die -##try -##ader -1250 -##なり -elle -570 -##chen -##ᆯ -price -##ten -uhz -##ough -eq -##hen -states -push -session -balance -wow -506 -##cus -##py -when -##ward -##ep -34e -wong -library -prada -##サイト -##cle -running -##ree -313 -ck -date -q4 -##ctive -##ool -##> -mk -##ira -##163 -388 -die -secret -rq -dota -buffet -は1ヶ -e6 -##ez -pan -368 -ha -##card -##cha -2a -##さ -alan -day3 -eye -f3 -##end -france -keep -adi -rna -tvbs -##ala -solo -nova -##え -##tail -##ょう -support -##ries -##なる -##ved -base -copy -iis -fps -##ways -hero -hgih -profile -fish -mu -ssh -entertainment -chang -##wd -click -cake -##ond -pre -##tom -kic -pixel -##ov -##fl -product -6a -##pd -dear -##gate -es -yumi -audio -##² -##sky -echo -bin -where -##ture -329 -##ape -find -sap -isis -##なと -nand -##101 -##load -##ream -band -a6 -525 -never -##post -festival -50cm -##we -555 -guide -314 -zenfone -##ike -335 -gd -forum -jessica -strong -alexander -##ould -software -allen -##ious -program -360° -else -lohasthree -##gar -することかてきます -please -##れます -rc -##ggle -##ric -bim -50000 -##own -eclipse -355 -brian -3ds -##side -061 -361 -##other -##ける -##tech -##ator -485 -engine -##ged -##t -plaza -##fit -cia -ngo -westbrook -shi -tbs -50mm -##みませんか -sci -291 -reuters -##ily -contextlink -##hn -af -##cil -bridge -very -##cel -1890 -cambridge -##ize -15g -##aid -##data -790 -frm -##head -award -butler -##sun -meta -##mar -america -ps3 -puma -pmid -##すか -lc -670 -kitchen -##lic -オーフン5 -きなしソフトサーヒス -そして -day1 -future -★★★★ -##text -##page -##rris -pm1 -##ket -fans -##っています -1001 -christian -bot -kids -trackback -##hai -c3 -display -##hl -n2 -1896 -idea -さんも -##sent -airmail -##ug -##men -pwm -けます -028 -##lution -369 -852 -awards -schemas -354 -asics -wikipedia -font -##tional -##vy -c2 -293 -##れている -##dget -##ein -っている -contact -pepper -スキル -339 -##~5 -294 -##uel -##ument -730 -##hang -みてす -q5 -##sue -rain -##ndi -wei -swatch -##cept -わせ -331 -popular -##ste -##tag -p2 -501 -trc -1899 -##west -##live -justin -honda -ping -messenger -##rap -v9 -543 -##とは -unity -appqq -はすへて -025 -leo -##tone -##テ -##ass -uniqlo -##010 -502 -her -jane -memory -moneydj -##tical -human -12306 -していると -##m2 -coc -miacare -##mn -tmt -##core -vim -kk -##may -fan -target -use -too -338 -435 -2050 -867 -737 -fast -##2c -services -##ope -omega -energy -##わ -pinkoi -1a -##なから -##rain -jackson -##ement -##シャンルの -374 -366 -そんな -p9 -rd -##ᆨ -1111 -##tier -##vic -zone -##│ -385 -690 -dl -isofix -cpa -m4 -322 -kimi -めて -davis -##lay -lulu -##uck -050 -weeks -qs -##hop -920 -##n -ae -##ear -~5 -eia -405 -##fly -korea -jpeg -boost -##ship -small -##リア -1860 -eur -297 -425 -valley -##iel -simple -##ude -rn -k2 -##ena -されます -non -patrick -しているから -##ナー -feed -5757 -30g -process -well -qqmei -##thing -they -aws -lu -pink -##ters -##kin -または -board -##vertisement -wine -##ien -unicode -##dge -r1 -359 -##tant -いを -##twitter -##3c -cool1 -される -##れて -##l -isp -##012 -standard -45㎡2 -402 -##150 -matt -##fu -326 -##iner -googlemsn -pixnetfacebookyahoo -##ラン -x7 -886 -##uce -メーカー -sao -##ev -##きました -##file -9678 -403 -xddd -shirt -6l -##rio -##hat -3mm -givenchy -ya -bang -##lio -monday -crystal -ロクイン -##abc -336 -head -890 -ubuntuforumwikilinuxpastechat -##vc -##~20 -##rity -cnc -7866 -ipv6 -null -1897 -##ost -yang -imsean -tiger -##fet -##ンス -352 -##= -dji -327 -ji -maria -##come -##んて -foundation -3100 -##beth -##なった -1m -601 -active -##aft -##don -3p -sr -349 -emma -##khz -living -415 -353 -1889 -341 -709 -457 -sas -x6 -##face -pptv -x4 -##mate -han -sophie -##jing -337 -fifa -##mand -other -sale -inwedding -##gn -てきちゃいます -##mmy -##pmlast -bad -nana -nbc -してみてくたさいね -なとはお -##wu -##かあります -##あ -note7 -single -##340 -せからこ -してくたさい♪この -しにはとんとんワークケートを -するとあなたにもっとマッチした -ならワークケートへ -もみつかっちゃうかも -ワークケートの -##bel -window -##dio -##ht -union -age -382 -14 -##ivity -##y -コメント -domain -neo -##isa -##lter -5k -f5 -steven -##cts -powerpoint -tft -self -g2 -ft -##テル -zol -##act -mwc -381 -343 -もう -nbapop -408 -てある -eds -ace -##room -previous -author -tomtom -il -##ets -hu -financial -☆☆☆ -っています -bp -5t -chi -1gb -##hg -fairmont -cross -008 -gay -h2 -function -##けて -356 -also -1b -625 -##ータ -##raph -1894 -3~5 -##ils -i3 -334 -avenue -##host -による -##bon -##tsu -message -navigation -50g -fintech -h6 -##ことを -8cm -##ject -##vas -##firm -credit -##wf -xxxx -form -##nor -##space -huawei -plan -json -sbl -##dc -machine -921 -392 -wish -##120 -##sol -windows7 -edward -##ために -development -washington -##nsis -lo -818 -##sio -##ym -##bor -planet -##~8 -##wt -ieee -gpa -##めて -camp -ann -gm -##tw -##oka -connect -##rss -##work -##atus -wall -chicken -soul -2mm -##times -fa -##other -##cord -009 -##eep -hitachi -gui -harry -##pan -e1 -disney -##press -##ーション -wind -386 -frigidaire -##tl -liu -hsu -332 -basic -von -ev -いた -てきる -スホンサーサイト -learning -##ull -expedia -archives -change -##wei -santa -cut -ins -6gb -turbo -brand -cf1 -508 -004 -return -747 -##rip -h1 -##nis -##をこ -128gb -##にお -3t -application -しており -emc -rx -##oon -384 -quick -412 -15058 -wilson -wing -chapter -##bug -beyond -##cms -##dar -##oh -zoom -e2 -trip -sb -##nba -rcep -342 -aspx -ci -080 -gc -gnu -める -##count -advanced -dance -dv -##url -##ging -367 -8591 -am09 -shadow -battle -346 -##i -##cia -##という -emily -##のてす -##tation -host -ff -techorz -sars -##mini -##mporary -##ering -nc -4200 -798 -##next -cma -##mbps -##gas -##ift -##dot -##ィ -455 -##~17 -amana -##りの -426 -##ros -ir -00㎡1 -##eet -##ible -##↓ -710 -ˋ▽ˊ -##aka -dcs -iq -##v -l1 -##lor -maggie -##011 -##iu -588 -##~1 -830 -##gt -1tb -articles -create -##burg -##iki -database -fantasy -##rex -##cam -dlc -dean -##you -hard -path -gaming -victoria -maps -cb -##lee -##itor -overchicstoretvhome -systems -##xt -416 -p3 -sarah -760 -##nan -407 -486 -x9 -install -second -626 -##ann -##ph -##rcle -##nic -860 -##nar -ec -##とう -768 -metro -chocolate -##rian -~4 -##table -##しています -skin -##sn -395 -mountain -##0mm -inparadise -6m -7x24 -ib -4800 -##jia -eeworld -creative -g5 -g3 -357 -parker -ecfa -village -からの -18000 -sylvia -サーヒス -hbl -##ques -##onsored -##x2 -##きます -##v4 -##tein -ie6 -383 -##stack -389 -ver -##ads -##baby -sound -bbe -##110 -##lone -##uid -ads -022 -gundam -351 -thinkpad -006 -scrum -match -##ave -mems -##470 -##oy -##なりました -##talk -glass -lamigo -span -##eme -job -##a5 -jay -wade -kde -498 -##lace -ocean -tvg -##covery -##r3 -##ners -##rea -junior -think -##aine -cover -##ision -##sia -↓↓ -##bow -msi -413 -458 -406 -##love -711 -801 -soft -z2 -##pl -456 -1840 -mobil -mind -##uy -427 -nginx -##oi -めた -##rr -6221 -##mple -##sson -##ーシてす -371 -##nts -91tv -comhd -crv3000 -##uard -1868 -397 -deep -lost -field -gallery -##bia -rate -spf -redis -traction -930 -icloud -011 -なら -fe -jose -372 -##tory -into -sohu -fx -899 -379 -kicstart2 -##hia -すく -##~3 -##sit -ra -24 -##walk -##xure -500g -##pact -pacific -xa -natural -carlo -##250 -##walker -1850 -##can -cto -gigi -516 -##サー -pen -##hoo -ob -matlab -##b -##yy -13913459 -##iti -mango -##bbs -sense -c5 -oxford -##ニア -walker -jennifer -##ola -course -##bre -701 -##pus -##rder -lucky -075 -##ぁ -ivy -なお -##nia -sotheby -side -##ugh -joy -##orage -##ush -##bat -##dt -364 -r9 -##2d -##gio -511 -country -wear -##lax -##~7 -##moon -393 -seven -study -411 -348 -lonzo -8k -##ェ -evolution -##イフ -##kk -gs -kd -##レス -arduino -344 -b12 -##lux -arpg -##rdon -cook -##x5 -dark -five -##als -##ida -とても -sign -362 -##ちの -something -20mm -##nda -387 -##posted -fresh -tf -1870 -422 -cam -##mine -##skip -##form -##ssion -education -394 -##tee -dyson -stage -##jie -want -##night -epson -pack -あります -##ppy -テリヘル -##█ -wd -##eh -##rence -left -##lvin -golden -mhz -discovery -##trix -##n2 -loft -##uch -##dra -##sse -speed -~1 -1mdb -sorry -welcome -##urn -wave -gaga -##lmer -teddy -##160 -トラックハック -せよ -611 -##f2016 -378 -rp -##sha -rar -##あなたに -##きた -840 -holiday -##ュー -373 -074 -##vg -##nos -##rail -gartner -gi -6p -##dium -kit -488 -b3 -eco -##ろう -20g -sean -##stone -autocad -nu -##np -f16 -write -029 -m5 -##ias -images -atp -##dk -fsm -504 -1350 -ve -52kb -##xxx -##のに -##cake -414 -unit -lim -ru -1v -##ification -published -angela -16g -analytics -ak -##q -##nel -gmt -##icon -again -##₂ -##bby -ios11 -445 -かこさいます -waze -いてす -##ハ -9985 -##ust -##ティー -framework -##007 -iptv -delete -52sykb -cl -wwdc -027 -30cm -##fw -##ての -1389 -##xon -brandt -##ses -##dragon -tc -vetements -anne -monte -modern -official -##へて -##ere -##nne -##oud -もちろん -50 -etnews -##a2 -##graphy -421 -863 -##ちゃん -444 -##rtex -##てお -l2 -##gma -mount -ccd -たと -archive -morning -tan -ddos -e7 -##ホ -day4 -##ウ -gis -453 -its -495 -factory -bruce -pg -##ito -ってくたさい -guest -cdma -##lling -536 -n3 -しかし -3~4 -mega -eyes -ro -13 -women -dac -church -##jun -singapore -##facebook -6991 -starbucks -##tos -##stin -##shine -zen -##mu -tina -20℃ -1893 -##たけて -503 -465 -request -##gence -qt -##っ -1886 -347 -363 -q7 -##zzi -diary -##tore -409 -##ead -468 -cst -##osa -canada -agent -va -##jiang -##ちは -##ーク -##lam -sg -##nix -##sday -##よって -g6 -##master -bing -##zl -charlie -16 -8mm -nb40 -##ーン -thai -##ルフ -ln284ct -##itz -##2f -bonnie -##food -##lent -originals -##stro -##lts -418 -∟∣ -##bscribe -children -ntd -yesstyle -##かも -hmv -##tment -d5 -2cm -arts -sms -##pn -##я -##いい -topios9 -539 -lifestyle -virtual -##ague -xz -##deo -muji -024 -unt -##nnis -##ᅩ -faq1 -1884 -396 -##ette -fly -64㎡ -はしめまして -441 -curry -##pop -のこ -release -##← -##◆◆ -##cast -073 -ありな -500ml -##ews -5c -##stle -ios7 -##ima -787 -dog -lenovo -##r4 -roger -013 -cbs -vornado -100m -417 -##desk -##クok -##ald -1867 -9595 -2900 -##van -oil -##x -some -break -common -##jy -##lines -g7 -twice -419 -ella -nano -belle -にこ -##mes -##self -##note -jb -##ことかてきます -benz -##との -##ova -451 -save -##wing -##ますのて -kai -りは -##hua -##rect -rainer -##unge -448 -##0m -adsl -##かな -guestname -##uma -##kins -##zu -tokichoi -##price -county -##med -##mus -rmk -391 -address -vm -えて -openload -##group -##hin -##iginal -amg -urban -##oz -jobs -emi -##public -beautiful -##sch -album -##dden -##bell -jerry -works -hostel -miller -##drive -##rmin -##10 -376 -boot -828 -##370 -##fx -##cm~ -1885 -##nome -##ctionary -##oman -##lish -##cr -##hm -433 -##how -432 -francis -xi -c919 -b5 -evernote -##uc -vga -##3000 -coupe -##urg -##cca -##uality -019 -6g -れる -multi -##また -##ett -em -hey -##ani -##tax -##rma -inside -than -740 -leonnhurt -##jin -ict -れた -bird -notes -200mm -くの -##dical -##lli -result -442 -iu -ee -438 -smap -gopro -##last -yin -pure -998 -32g -けた -5kg -##dan -##rame -mama -##oot -bean -marketing -##hur -2l -bella -sync -xuite -##ground -515 -discuz -##getrelax -##ince -##bay -##5s -cj -##イス -gmat -apt -##pass -jing -##rix -c4 -rich -##とても -niusnews -##ello -bag -770 -##eting -##mobile -18 -culture -015 -##のてすか -377 -1020 -area -##ience -616 -details -gp -universal -silver -dit -はお -private -ddd -u11 -kanshu -##ified -fung -##nny -dx -##520 -tai -475 -023 -##fr -##lean -3s -##pin -429 -##rin -25000 -ly -rick -##bility -usb3 -banner -##baru -##gion -metal -dt -vdf -1871 -karl -qualcomm -bear -1010 -oldid -ian -jo -##tors -population -##ernel -1882 -mmorpg -##mv -##bike -603 -##© -ww -friend -##ager -exhibition -##del -##pods -fpx -structure -##free -##tings -kl -##rley -##copyright -##mma -california -3400 -orange -yoga -4l -canmake -honey -##anda -##コメント -595 -nikkie -##ルハイト -dhl -publishing -##mall -##gnet -20cm -513 -##クセス -##┅ -e88 -970 -##dog -fishbase -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##+ -##, -##- -##. -##/ -##: -##; -##< -##= -##> -##? -##@ -##[ -##\ -##] -##^ -##_ -##{ -##| -##} -##~ -##£ -##¤ -##¥ -##§ -##« -##± -##³ -##µ -##· -##¹ -##º -##» -##¼ -##ß -##æ -##÷ -##ø -##đ -##ŋ -##ɔ -##ə -##ɡ -##ʰ -##ˇ -##ˈ -##ˊ -##ˋ -##ˍ -##ː -##˙ -##˚ -##ˢ -##α -##β -##γ -##δ -##ε -##η -##θ -##ι -##κ -##λ -##μ -##ν -##ο -##π -##ρ -##ς -##σ -##τ -##υ -##φ -##χ -##ψ -##б -##в -##г -##д -##е -##ж -##з -##к -##л -##м -##н -##о -##п -##р -##с -##т -##у -##ф -##х -##ц -##ч -##ш -##ы -##ь -##і -##ا -##ب -##ة -##ت -##د -##ر -##س -##ع -##ل -##م -##ن -##ه -##و -##ي -##۩ -##ก -##ง -##น -##ม -##ย -##ร -##อ -##า -##เ -##๑ -##་ -##ღ -##ᄀ -##ᄁ -##ᄂ -##ᄃ -##ᄅ -##ᄆ -##ᄇ -##ᄈ -##ᄉ -##ᄋ -##ᄌ -##ᄎ -##ᄏ -##ᄐ -##ᄑ -##ᄒ -##ᅢ -##ᅣ -##ᅥ -##ᅦ -##ᅧ -##ᅨ -##ᅪ -##ᅬ -##ᅭ -##ᅮ -##ᅯ -##ᅲ -##ᅳ -##ᅴ -##ᆷ -##ᆸ -##ᆺ -##ᆻ -##ᗜ -##ᵃ -##ᵉ -##ᵍ -##ᵏ -##ᵐ -##ᵒ -##ᵘ -##‖ -##„ -##† -##• -##‥ -##‧ -##
 -##‰ -##′ -##″ -##‹ -##› -##※ -##‿ -##⁄ -##ⁱ -##⁺ -##ⁿ -##₁ -##₃ -##₄ -##€ -##№ -##ⅰ -##ⅱ -##ⅲ -##ⅳ -##ⅴ -##↔ -##↗ -##↘ -##⇒ -##∀ -##− -##∕ -##∙ -##√ -##∞ -##∟ -##∠ -##∣ -##∩ -##∮ -##∶ -##∼ -##∽ -##≈ -##≒ -##≡ -##≤ -##≥ -##≦ -##≧ -##≪ -##≫ -##⊙ -##⋅ -##⋈ -##⋯ -##⌒ -##① -##② -##③ -##④ -##⑤ -##⑥ -##⑦ -##⑧ -##⑨ -##⑩ -##⑴ -##⑵ -##⑶ -##⑷ -##⑸ -##⒈ -##⒉ -##⒊ -##⒋ -##ⓒ -##ⓔ -##ⓘ -##━ -##┃ -##┆ -##┊ -##┌ -##└ -##├ -##┣ -##═ -##║ -##╚ -##╞ -##╠ -##╭ -##╮ -##╯ -##╰ -##╱ -##╳ -##▂ -##▃ -##▅ -##▇ -##▉ -##▋ -##▌ -##▍ -##▎ -##□ -##▪ -##▫ -##▬ -##△ -##▶ -##► -##▽ -##◇ -##◕ -##◠ -##◢ -##◤ -##☀ -##☕ -##☞ -##☺ -##☼ -##♀ -##♂ -##♠ -##♡ -##♣ -##♦ -##♫ -##♬ -##✈ -##✔ -##✕ -##✖ -##✦ -##✨ -##✪ -##✰ -##✿ -##❀ -##➜ -##➤ -##⦿ -##、 -##。 -##〃 -##々 -##〇 -##〈 -##〉 -##《 -##》 -##「 -##」 -##『 -##』 -##【 -##】 -##〓 -##〔 -##〕 -##〖 -##〗 -##〜 -##〝 -##〞 -##ぃ -##ぇ -##ぬ -##ふ -##ほ -##む -##ゃ -##ゅ -##ゆ -##ょ -##゜ -##ゝ -##ァ -##ゥ -##エ -##ォ -##ケ -##サ -##セ -##ソ -##ッ -##ニ -##ヌ -##ネ -##ノ -##ヘ -##モ -##ャ -##ヤ -##ュ -##ユ -##ョ -##ヨ -##ワ -##ヲ -##・ -##ヽ -##ㄅ -##ㄆ -##ㄇ -##ㄉ -##ㄋ -##ㄌ -##ㄍ -##ㄎ -##ㄏ -##ㄒ -##ㄚ -##ㄛ -##ㄞ -##ㄟ -##ㄢ -##ㄤ -##ㄥ -##ㄧ -##ㄨ -##ㆍ -##㈦ -##㊣ -##㗎 -##一 -##丁 -##七 -##万 -##丈 -##三 -##上 -##下 -##不 -##与 -##丐 -##丑 -##专 -##且 -##丕 -##世 -##丘 -##丙 -##业 -##丛 -##东 -##丝 -##丞 -##丟 -##両 -##丢 -##两 -##严 -##並 -##丧 -##丨 -##个 -##丫 -##中 -##丰 -##串 -##临 -##丶 -##丸 -##丹 -##为 -##主 -##丼 -##丽 -##举 -##丿 -##乂 -##乃 -##久 -##么 -##义 -##之 -##乌 -##乍 -##乎 -##乏 -##乐 -##乒 -##乓 -##乔 -##乖 -##乗 -##乘 -##乙 -##乜 -##九 -##乞 -##也 -##习 -##乡 -##书 -##乩 -##买 -##乱 -##乳 -##乾 -##亀 -##亂 -##了 -##予 -##争 -##事 -##二 -##于 -##亏 -##云 -##互 -##五 -##井 -##亘 -##亙 -##亚 -##些 -##亜 -##亞 -##亟 -##亡 -##亢 -##交 -##亥 -##亦 -##产 -##亨 -##亩 -##享 -##京 -##亭 -##亮 -##亲 -##亳 -##亵 -##人 -##亿 -##什 -##仁 -##仃 -##仄 -##仅 -##仆 -##仇 -##今 -##介 -##仍 -##从 -##仏 -##仑 -##仓 -##仔 -##仕 -##他 -##仗 -##付 -##仙 -##仝 -##仞 -##仟 -##代 -##令 -##以 -##仨 -##仪 -##们 -##仮 -##仰 -##仲 -##件 -##价 -##任 -##份 -##仿 -##企 -##伉 -##伊 -##伍 -##伎 -##伏 -##伐 -##休 -##伕 -##众 -##优 -##伙 -##会 -##伝 -##伞 -##伟 -##传 -##伢 -##伤 -##伦 -##伪 -##伫 -##伯 -##估 -##伴 -##伶 -##伸 -##伺 -##似 -##伽 -##佃 -##但 -##佇 -##佈 -##位 -##低 -##住 -##佐 -##佑 -##体 -##佔 -##何 -##佗 -##佘 -##余 -##佚 -##佛 -##作 -##佝 -##佞 -##佟 -##你 -##佢 -##佣 -##佤 -##佥 -##佩 -##佬 -##佯 -##佰 -##佳 -##併 -##佶 -##佻 -##佼 -##使 -##侃 -##侄 -##來 -##侈 -##例 -##侍 -##侏 -##侑 -##侖 -##侗 -##供 -##依 -##侠 -##価 -##侣 -##侥 -##侦 -##侧 -##侨 -##侬 -##侮 -##侯 -##侵 -##侶 -##侷 -##便 -##係 -##促 -##俄 -##俊 -##俎 -##俏 -##俐 -##俑 -##俗 -##俘 -##俚 -##保 -##俞 -##俟 -##俠 -##信 -##俨 -##俩 -##俪 -##俬 -##俭 -##修 -##俯 -##俱 -##俳 -##俸 -##俺 -##俾 -##倆 -##倉 -##個 -##倌 -##倍 -##倏 -##們 -##倒 -##倔 -##倖 -##倘 -##候 -##倚 -##倜 -##借 -##倡 -##値 -##倦 -##倩 -##倪 -##倫 -##倬 -##倭 -##倶 -##债 -##值 -##倾 -##偃 -##假 -##偈 -##偉 -##偌 -##偎 -##偏 -##偕 -##做 -##停 -##健 -##側 -##偵 -##偶 -##偷 -##偻 -##偽 -##偿 -##傀 -##傅 -##傍 -##傑 -##傘 -##備 -##傚 -##傢 -##傣 -##傥 -##储 -##傩 -##催 -##傭 -##傲 -##傳 -##債 -##傷 -##傻 -##傾 -##僅 -##働 -##像 -##僑 -##僕 -##僖 -##僚 -##僥 -##僧 -##僭 -##僮 -##僱 -##僵 -##價 -##僻 -##儀 -##儂 -##億 -##儆 -##儉 -##儋 -##儒 -##儕 -##儘 -##償 -##儡 -##優 -##儲 -##儷 -##儼 -##儿 -##兀 -##允 -##元 -##兄 -##充 -##兆 -##兇 -##先 -##光 -##克 -##兌 -##免 -##児 -##兑 -##兒 -##兔 -##兖 -##党 -##兜 -##兢 -##入 -##內 -##全 -##兩 -##八 -##公 -##六 -##兮 -##兰 -##共 -##兲 -##关 -##兴 -##兵 -##其 -##具 -##典 -##兹 -##养 -##兼 -##兽 -##冀 -##内 -##円 -##冇 -##冈 -##冉 -##冊 -##册 -##再 -##冏 -##冒 -##冕 -##冗 -##写 -##军 -##农 -##冠 -##冢 -##冤 -##冥 -##冨 -##冪 -##冬 -##冯 -##冰 -##冲 -##决 -##况 -##冶 -##冷 -##冻 -##冼 -##冽 -##冾 -##净 -##凄 -##准 -##凇 -##凈 -##凉 -##凋 -##凌 -##凍 -##减 -##凑 -##凛 -##凜 -##凝 -##几 -##凡 -##凤 -##処 -##凪 -##凭 -##凯 -##凰 -##凱 -##凳 -##凶 -##凸 -##凹 -##出 -##击 -##函 -##凿 -##刀 -##刁 -##刃 -##分 -##切 -##刈 -##刊 -##刍 -##刎 -##刑 -##划 -##列 -##刘 -##则 -##刚 -##创 -##初 -##删 -##判 -##別 -##刨 -##利 -##刪 -##别 -##刮 -##到 -##制 -##刷 -##券 -##刹 -##刺 -##刻 -##刽 -##剁 -##剂 -##剃 -##則 -##剉 -##削 -##剋 -##剌 -##前 -##剎 -##剐 -##剑 -##剔 -##剖 -##剛 -##剜 -##剝 -##剣 -##剤 -##剥 -##剧 -##剩 -##剪 -##副 -##割 -##創 -##剷 -##剽 -##剿 -##劃 -##劇 -##劈 -##劉 -##劊 -##劍 -##劏 -##劑 -##力 -##劝 -##办 -##功 -##加 -##务 -##劣 -##动 -##助 -##努 -##劫 -##劭 -##励 -##劲 -##劳 -##労 -##劵 -##効 -##劾 -##势 -##勁 -##勃 -##勇 -##勉 -##勋 -##勐 -##勒 -##動 -##勖 -##勘 -##務 -##勛 -##勝 -##勞 -##募 -##勢 -##勤 -##勧 -##勳 -##勵 -##勸 -##勺 -##勻 -##勾 -##勿 -##匀 -##包 -##匆 -##匈 -##匍 -##匐 -##匕 -##化 -##北 -##匙 -##匝 -##匠 -##匡 -##匣 -##匪 -##匮 -##匯 -##匱 -##匹 -##区 -##医 -##匾 -##匿 -##區 -##十 -##千 -##卅 -##升 -##午 -##卉 -##半 -##卍 -##华 -##协 -##卑 -##卒 -##卓 -##協 -##单 -##卖 -##南 -##単 -##博 -##卜 -##卞 -##卟 -##占 -##卡 -##卢 -##卤 -##卦 -##卧 -##卫 -##卮 -##卯 -##印 -##危 -##即 -##却 -##卵 -##卷 -##卸 -##卻 -##卿 -##厂 -##厄 -##厅 -##历 -##厉 -##压 -##厌 -##厕 -##厘 -##厚 -##厝 -##原 -##厢 -##厥 -##厦 -##厨 -##厩 -##厭 -##厮 -##厲 -##厳 -##去 -##县 -##叁 -##参 -##參 -##又 -##叉 -##及 -##友 -##双 -##反 -##収 -##发 -##叔 -##取 -##受 -##变 -##叙 -##叛 -##叟 -##叠 -##叡 -##叢 -##口 -##古 -##句 -##另 -##叨 -##叩 -##只 -##叫 -##召 -##叭 -##叮 -##可 -##台 -##叱 -##史 -##右 -##叵 -##叶 -##号 -##司 -##叹 -##叻 -##叼 -##叽 -##吁 -##吃 -##各 -##吆 -##合 -##吉 -##吊 -##吋 -##同 -##名 -##后 -##吏 -##吐 -##向 -##吒 -##吓 -##吕 -##吖 -##吗 -##君 -##吝 -##吞 -##吟 -##吠 -##吡 -##否 -##吧 -##吨 -##吩 -##含 -##听 -##吭 -##吮 -##启 -##吱 -##吳 -##吴 -##吵 -##吶 -##吸 -##吹 -##吻 -##吼 -##吽 -##吾 -##呀 -##呂 -##呃 -##呆 -##呈 -##告 -##呋 -##呎 -##呐 -##呓 -##呕 -##呗 -##员 -##呛 -##呜 -##呢 -##呤 -##呦 -##周 -##呱 -##呲 -##味 -##呵 -##呷 -##呸 -##呻 -##呼 -##命 -##咀 -##咁 -##咂 -##咄 -##咆 -##咋 -##和 -##咎 -##咏 -##咐 -##咒 -##咔 -##咕 -##咖 -##咗 -##咘 -##咙 -##咚 -##咛 -##咣 -##咤 -##咦 -##咧 -##咨 -##咩 -##咪 -##咫 -##咬 -##咭 -##咯 -##咱 -##咲 -##咳 -##咸 -##咻 -##咽 -##咿 -##哀 -##品 -##哂 -##哄 -##哆 -##哇 -##哈 -##哉 -##哋 -##哌 -##响 -##哎 -##哏 -##哐 -##哑 -##哒 -##哔 -##哗 -##哟 -##員 -##哥 -##哦 -##哧 -##哨 -##哩 -##哪 -##哭 -##哮 -##哲 -##哺 -##哼 -##哽 -##唁 -##唄 -##唆 -##唇 -##唉 -##唏 -##唐 -##唑 -##唔 -##唠 -##唤 -##唧 -##唬 -##售 -##唯 -##唰 -##唱 -##唳 -##唷 -##唸 -##唾 -##啃 -##啄 -##商 -##啉 -##啊 -##問 -##啓 -##啕 -##啖 -##啜 -##啞 -##啟 -##啡 -##啤 -##啥 -##啦 -##啧 -##啪 -##啫 -##啬 -##啮 -##啰 -##啱 -##啲 -##啵 -##啶 -##啷 -##啸 -##啻 -##啼 -##啾 -##喀 -##喂 -##喃 -##善 -##喆 -##喇 -##喉 -##喊 -##喋 -##喎 -##喏 -##喔 -##喘 -##喙 -##喚 -##喜 -##喝 -##喟 -##喧 -##喪 -##喫 -##喬 -##單 -##喰 -##喱 -##喲 -##喳 -##喵 -##営 -##喷 -##喹 -##喺 -##喻 -##喽 -##嗅 -##嗆 -##嗇 -##嗎 -##嗑 -##嗒 -##嗓 -##嗔 -##嗖 -##嗚 -##嗜 -##嗝 -##嗟 -##嗡 -##嗣 -##嗤 -##嗦 -##嗨 -##嗪 -##嗬 -##嗯 -##嗰 -##嗲 -##嗳 -##嗶 -##嗷 -##嗽 -##嘀 -##嘅 -##嘆 -##嘈 -##嘉 -##嘌 -##嘍 -##嘎 -##嘔 -##嘖 -##嘗 -##嘘 -##嘚 -##嘛 -##嘜 -##嘞 -##嘟 -##嘢 -##嘣 -##嘤 -##嘧 -##嘩 -##嘭 -##嘮 -##嘯 -##嘰 -##嘱 -##嘲 -##嘴 -##嘶 -##嘸 -##嘹 -##嘻 -##嘿 -##噁 -##噌 -##噎 -##噓 -##噔 -##噗 -##噙 -##噜 -##噠 -##噢 -##噤 -##器 -##噩 -##噪 -##噬 -##噱 -##噴 -##噶 -##噸 -##噹 -##噻 -##噼 -##嚀 -##嚇 -##嚎 -##嚏 -##嚐 -##嚓 -##嚕 -##嚟 -##嚣 -##嚥 -##嚨 -##嚮 -##嚴 -##嚷 -##嚼 -##囂 -##囉 -##囊 -##囍 -##囑 -##囔 -##囗 -##囚 -##四 -##囝 -##回 -##囟 -##因 -##囡 -##团 -##団 -##囤 -##囧 -##囪 -##囫 -##园 -##困 -##囱 -##囲 -##図 -##围 -##囹 -##固 -##国 -##图 -##囿 -##圃 -##圄 -##圆 -##圈 -##國 -##圍 -##圏 -##園 -##圓 -##圖 -##團 -##圜 -##土 -##圣 -##圧 -##在 -##圩 -##圭 -##地 -##圳 -##场 -##圻 -##圾 -##址 -##坂 -##均 -##坊 -##坍 -##坎 -##坏 -##坐 -##坑 -##块 -##坚 -##坛 -##坝 -##坞 -##坟 -##坠 -##坡 -##坤 -##坦 -##坨 -##坪 -##坯 -##坳 -##坵 -##坷 -##垂 -##垃 -##垄 -##型 -##垒 -##垚 -##垛 -##垠 -##垢 -##垣 -##垦 -##垩 -##垫 -##垭 -##垮 -##垵 -##埂 -##埃 -##埋 -##城 -##埔 -##埕 -##埗 -##域 -##埠 -##埤 -##埵 -##執 -##埸 -##培 -##基 -##埼 -##堀 -##堂 -##堃 -##堅 -##堆 -##堇 -##堑 -##堕 -##堙 -##堡 -##堤 -##堪 -##堯 -##堰 -##報 -##場 -##堵 -##堺 -##堿 -##塊 -##塌 -##塑 -##塔 -##塗 -##塘 -##塚 -##塞 -##塢 -##塩 -##填 -##塬 -##塭 -##塵 -##塾 -##墀 -##境 -##墅 -##墉 -##墊 -##墒 -##墓 -##増 -##墘 -##墙 -##墜 -##增 -##墟 -##墨 -##墩 -##墮 -##墳 -##墻 -##墾 -##壁 -##壅 -##壆 -##壇 -##壊 -##壑 -##壓 -##壕 -##壘 -##壞 -##壟 -##壢 -##壤 -##壩 -##士 -##壬 -##壮 -##壯 -##声 -##売 -##壳 -##壶 -##壹 -##壺 -##壽 -##处 -##备 -##変 -##复 -##夏 -##夔 -##夕 -##外 -##夙 -##多 -##夜 -##够 -##夠 -##夢 -##夥 -##大 -##天 -##太 -##夫 -##夭 -##央 -##夯 -##失 -##头 -##夷 -##夸 -##夹 -##夺 -##夾 -##奂 -##奄 -##奇 -##奈 -##奉 -##奋 -##奎 -##奏 -##奐 -##契 -##奔 -##奕 -##奖 -##套 -##奘 -##奚 -##奠 -##奢 -##奥 -##奧 -##奪 -##奬 -##奮 -##女 -##奴 -##奶 -##奸 -##她 -##好 -##如 -##妃 -##妄 -##妆 -##妇 -##妈 -##妊 -##妍 -##妒 -##妓 -##妖 -##妘 -##妙 -##妝 -##妞 -##妣 -##妤 -##妥 -##妨 -##妩 -##妪 -##妮 -##妲 -##妳 -##妹 -##妻 -##妾 -##姆 -##姉 -##姊 -##始 -##姍 -##姐 -##姑 -##姒 -##姓 -##委 -##姗 -##姚 -##姜 -##姝 -##姣 -##姥 -##姦 -##姨 -##姪 -##姫 -##姬 -##姹 -##姻 -##姿 -##威 -##娃 -##娄 -##娅 -##娆 -##娇 -##娉 -##娑 -##娓 -##娘 -##娛 -##娜 -##娟 -##娠 -##娣 -##娥 -##娩 -##娱 -##娲 -##娴 -##娶 -##娼 -##婀 -##婁 -##婆 -##婉 -##婊 -##婕 -##婚 -##婢 -##婦 -##婧 -##婪 -##婭 -##婴 -##婵 -##婶 -##婷 -##婺 -##婿 -##媒 -##媚 -##媛 -##媞 -##媧 -##媲 -##媳 -##媽 -##媾 -##嫁 -##嫂 -##嫉 -##嫌 -##嫑 -##嫔 -##嫖 -##嫘 -##嫚 -##嫡 -##嫣 -##嫦 -##嫩 -##嫲 -##嫵 -##嫻 -##嬅 -##嬉 -##嬌 -##嬗 -##嬛 -##嬢 -##嬤 -##嬪 -##嬰 -##嬴 -##嬷 -##嬸 -##嬿 -##孀 -##孃 -##子 -##孑 -##孔 -##孕 -##孖 -##字 -##存 -##孙 -##孚 -##孛 -##孜 -##孝 -##孟 -##孢 -##季 -##孤 -##学 -##孩 -##孪 -##孫 -##孬 -##孰 -##孱 -##孳 -##孵 -##學 -##孺 -##孽 -##孿 -##宁 -##它 -##宅 -##宇 -##守 -##安 -##宋 -##完 -##宏 -##宓 -##宕 -##宗 -##官 -##宙 -##定 -##宛 -##宜 -##宝 -##实 -##実 -##宠 -##审 -##客 -##宣 -##室 -##宥 -##宦 -##宪 -##宫 -##宮 -##宰 -##害 -##宴 -##宵 -##家 -##宸 -##容 -##宽 -##宾 -##宿 -##寂 -##寄 -##寅 -##密 -##寇 -##富 -##寐 -##寒 -##寓 -##寛 -##寝 -##寞 -##察 -##寡 -##寢 -##寥 -##實 -##寧 -##寨 -##審 -##寫 -##寬 -##寮 -##寰 -##寵 -##寶 -##寸 -##对 -##寺 -##寻 -##导 -##対 -##寿 -##封 -##専 -##射 -##将 -##將 -##專 -##尉 -##尊 -##尋 -##對 -##導 -##小 -##少 -##尔 -##尕 -##尖 -##尘 -##尚 -##尝 -##尤 -##尧 -##尬 -##就 -##尴 -##尷 -##尸 -##尹 -##尺 -##尻 -##尼 -##尽 -##尾 -##尿 -##局 -##屁 -##层 -##屄 -##居 -##屆 -##屈 -##屉 -##届 -##屋 -##屌 -##屍 -##屎 -##屏 -##屐 -##屑 -##展 -##屜 -##属 -##屠 -##屡 -##屢 -##層 -##履 -##屬 -##屯 -##山 -##屹 -##屿 -##岀 -##岁 -##岂 -##岌 -##岐 -##岑 -##岔 -##岖 -##岗 -##岘 -##岙 -##岚 -##岛 -##岡 -##岩 -##岫 -##岬 -##岭 -##岱 -##岳 -##岷 -##岸 -##峇 -##峋 -##峒 -##峙 -##峡 -##峤 -##峥 -##峦 -##峨 -##峪 -##峭 -##峯 -##峰 -##峴 -##島 -##峻 -##峽 -##崁 -##崂 -##崆 -##崇 -##崎 -##崑 -##崔 -##崖 -##崗 -##崙 -##崛 -##崧 -##崩 -##崭 -##崴 -##崽 -##嵇 -##嵊 -##嵋 -##嵌 -##嵐 -##嵘 -##嵩 -##嵬 -##嵯 -##嶂 -##嶄 -##嶇 -##嶋 -##嶙 -##嶺 -##嶼 -##嶽 -##巅 -##巍 -##巒 -##巔 -##巖 -##川 -##州 -##巡 -##巢 -##工 -##左 -##巧 -##巨 -##巩 -##巫 -##差 -##己 -##已 -##巳 -##巴 -##巷 -##巻 -##巽 -##巾 -##巿 -##币 -##市 -##布 -##帅 -##帆 -##师 -##希 -##帐 -##帑 -##帕 -##帖 -##帘 -##帚 -##帛 -##帜 -##帝 -##帥 -##带 -##帧 -##師 -##席 -##帮 -##帯 -##帰 -##帳 -##帶 -##帷 -##常 -##帼 -##帽 -##幀 -##幂 -##幄 -##幅 -##幌 -##幔 -##幕 -##幟 -##幡 -##幢 -##幣 -##幫 -##干 -##平 -##年 -##并 -##幸 -##幹 -##幺 -##幻 -##幼 -##幽 -##幾 -##广 -##庁 -##広 -##庄 -##庆 -##庇 -##床 -##序 -##庐 -##库 -##应 -##底 -##庖 -##店 -##庙 -##庚 -##府 -##庞 -##废 -##庠 -##度 -##座 -##庫 -##庭 -##庵 -##庶 -##康 -##庸 -##庹 -##庾 -##廁 -##廂 -##廃 -##廈 -##廉 -##廊 -##廓 -##廖 -##廚 -##廝 -##廟 -##廠 -##廢 -##廣 -##廬 -##廳 -##延 -##廷 -##建 -##廿 -##开 -##弁 -##异 -##弃 -##弄 -##弈 -##弊 -##弋 -##式 -##弑 -##弒 -##弓 -##弔 -##引 -##弗 -##弘 -##弛 -##弟 -##张 -##弥 -##弦 -##弧 -##弩 -##弭 -##弯 -##弱 -##張 -##強 -##弹 -##强 -##弼 -##弾 -##彅 -##彆 -##彈 -##彌 -##彎 -##归 -##当 -##录 -##彗 -##彙 -##彝 -##形 -##彤 -##彥 -##彦 -##彧 -##彩 -##彪 -##彫 -##彬 -##彭 -##彰 -##影 -##彷 -##役 -##彻 -##彼 -##彿 -##往 -##征 -##径 -##待 -##徇 -##很 -##徉 -##徊 -##律 -##後 -##徐 -##徑 -##徒 -##従 -##徕 -##得 -##徘 -##徙 -##徜 -##從 -##徠 -##御 -##徨 -##復 -##循 -##徬 -##微 -##徳 -##徴 -##徵 -##德 -##徹 -##徼 -##徽 -##心 -##必 -##忆 -##忌 -##忍 -##忏 -##忐 -##忑 -##忒 -##忖 -##志 -##忘 -##忙 -##応 -##忠 -##忡 -##忤 -##忧 -##忪 -##快 -##忱 -##念 -##忻 -##忽 -##忿 -##怀 -##态 -##怂 -##怅 -##怆 -##怎 -##怏 -##怒 -##怔 -##怕 -##怖 -##怙 -##怜 -##思 -##怠 -##怡 -##急 -##怦 -##性 -##怨 -##怪 -##怯 -##怵 -##总 -##怼 -##恁 -##恃 -##恆 -##恋 -##恍 -##恐 -##恒 -##恕 -##恙 -##恚 -##恢 -##恣 -##恤 -##恥 -##恨 -##恩 -##恪 -##恫 -##恬 -##恭 -##息 -##恰 -##恳 -##恵 -##恶 -##恸 -##恺 -##恻 -##恼 -##恿 -##悄 -##悅 -##悉 -##悌 -##悍 -##悔 -##悖 -##悚 -##悟 -##悠 -##患 -##悦 -##您 -##悩 -##悪 -##悬 -##悯 -##悱 -##悲 -##悴 -##悵 -##悶 -##悸 -##悻 -##悼 -##悽 -##情 -##惆 -##惇 -##惊 -##惋 -##惑 -##惕 -##惘 -##惚 -##惜 -##惟 -##惠 -##惡 -##惦 -##惧 -##惨 -##惩 -##惫 -##惬 -##惭 -##惮 -##惯 -##惰 -##惱 -##想 -##惴 -##惶 -##惹 -##惺 -##愁 -##愆 -##愈 -##愉 -##愍 -##意 -##愕 -##愚 -##愛 -##愜 -##感 -##愣 -##愤 -##愧 -##愫 -##愷 -##愿 -##慄 -##慈 -##態 -##慌 -##慎 -##慑 -##慕 -##慘 -##慚 -##慟 -##慢 -##慣 -##慧 -##慨 -##慫 -##慮 -##慰 -##慳 -##慵 -##慶 -##慷 -##慾 -##憂 -##憊 -##憋 -##憎 -##憐 -##憑 -##憔 -##憚 -##憤 -##憧 -##憨 -##憩 -##憫 -##憬 -##憲 -##憶 -##憾 -##懂 -##懇 -##懈 -##應 -##懊 -##懋 -##懑 -##懒 -##懦 -##懲 -##懵 -##懶 -##懷 -##懸 -##懺 -##懼 -##懾 -##懿 -##戀 -##戈 -##戊 -##戌 -##戍 -##戎 -##戏 -##成 -##我 -##戒 -##戕 -##或 -##战 -##戚 -##戛 -##戟 -##戡 -##戦 -##截 -##戬 -##戮 -##戰 -##戲 -##戳 -##戴 -##戶 -##户 -##戸 -##戻 -##戾 -##房 -##所 -##扁 -##扇 -##扈 -##扉 -##手 -##才 -##扎 -##扑 -##扒 -##打 -##扔 -##払 -##托 -##扛 -##扣 -##扦 -##执 -##扩 -##扪 -##扫 -##扬 -##扭 -##扮 -##扯 -##扰 -##扱 -##扳 -##扶 -##批 -##扼 -##找 -##承 -##技 -##抄 -##抉 -##把 -##抑 -##抒 -##抓 -##投 -##抖 -##抗 -##折 -##抚 -##抛 -##抜 -##択 -##抟 -##抠 -##抡 -##抢 -##护 -##报 -##抨 -##披 -##抬 -##抱 -##抵 -##抹 -##押 -##抽 -##抿 -##拂 -##拄 -##担 -##拆 -##拇 -##拈 -##拉 -##拋 -##拌 -##拍 -##拎 -##拐 -##拒 -##拓 -##拔 -##拖 -##拗 -##拘 -##拙 -##拚 -##招 -##拜 -##拟 -##拡 -##拢 -##拣 -##拥 -##拦 -##拧 -##拨 -##择 -##括 -##拭 -##拮 -##拯 -##拱 -##拳 -##拴 -##拷 -##拼 -##拽 -##拾 -##拿 -##持 -##挂 -##指 -##挈 -##按 -##挎 -##挑 -##挖 -##挙 -##挚 -##挛 -##挝 -##挞 -##挟 -##挠 -##挡 -##挣 -##挤 -##挥 -##挨 -##挪 -##挫 -##振 -##挲 -##挹 -##挺 -##挽 -##挾 -##捂 -##捅 -##捆 -##捉 -##捋 -##捌 -##捍 -##捎 -##捏 -##捐 -##捕 -##捞 -##损 -##捡 -##换 -##捣 -##捧 -##捨 -##捩 -##据 -##捱 -##捲 -##捶 -##捷 -##捺 -##捻 -##掀 -##掂 -##掃 -##掇 -##授 -##掉 -##掌 -##掏 -##掐 -##排 -##掖 -##掘 -##掙 -##掛 -##掠 -##採 -##探 -##掣 -##接 -##控 -##推 -##掩 -##措 -##掬 -##掰 -##掲 -##掳 -##掴 -##掷 -##掸 -##掺 -##揀 -##揃 -##揄 -##揆 -##揉 -##揍 -##描 -##提 -##插 -##揖 -##揚 -##換 -##握 -##揣 -##揩 -##揪 -##揭 -##揮 -##援 -##揶 -##揸 -##揹 -##揽 -##搀 -##搁 -##搂 -##搅 -##損 -##搏 -##搐 -##搓 -##搔 -##搖 -##搗 -##搜 -##搞 -##搡 -##搪 -##搬 -##搭 -##搵 -##搶 -##携 -##搽 -##摀 -##摁 -##摄 -##摆 -##摇 -##摈 -##摊 -##摒 -##摔 -##摘 -##摞 -##摟 -##摧 -##摩 -##摯 -##摳 -##摸 -##摹 -##摺 -##摻 -##撂 -##撃 -##撅 -##撇 -##撈 -##撐 -##撑 -##撒 -##撓 -##撕 -##撚 -##撞 -##撤 -##撥 -##撩 -##撫 -##撬 -##播 -##撮 -##撰 -##撲 -##撵 -##撷 -##撸 -##撻 -##撼 -##撿 -##擀 -##擁 -##擂 -##擄 -##擅 -##擇 -##擊 -##擋 -##操 -##擎 -##擒 -##擔 -##擘 -##據 -##擞 -##擠 -##擡 -##擢 -##擦 -##擬 -##擰 -##擱 -##擲 -##擴 -##擷 -##擺 -##擼 -##擾 -##攀 -##攏 -##攒 -##攔 -##攘 -##攙 -##攜 -##攝 -##攞 -##攢 -##攣 -##攤 -##攥 -##攪 -##攫 -##攬 -##支 -##收 -##攸 -##改 -##攻 -##放 -##政 -##故 -##效 -##敌 -##敍 -##敎 -##敏 -##救 -##敕 -##敖 -##敗 -##敘 -##教 -##敛 -##敝 -##敞 -##敢 -##散 -##敦 -##敬 -##数 -##敲 -##整 -##敵 -##敷 -##數 -##斂 -##斃 -##文 -##斋 -##斌 -##斎 -##斐 -##斑 -##斓 -##斗 -##料 -##斛 -##斜 -##斟 -##斡 -##斤 -##斥 -##斧 -##斩 -##斫 -##斬 -##断 -##斯 -##新 -##斷 -##方 -##於 -##施 -##旁 -##旃 -##旅 -##旋 -##旌 -##旎 -##族 -##旖 -##旗 -##无 -##既 -##日 -##旦 -##旧 -##旨 -##早 -##旬 -##旭 -##旮 -##旱 -##时 -##旷 -##旺 -##旻 -##昀 -##昂 -##昆 -##昇 -##昉 -##昊 -##昌 -##明 -##昏 -##易 -##昔 -##昕 -##昙 -##星 -##映 -##春 -##昧 -##昨 -##昭 -##是 -##昱 -##昴 -##昵 -##昶 -##昼 -##显 -##晁 -##時 -##晃 -##晉 -##晋 -##晌 -##晏 -##晒 -##晓 -##晔 -##晕 -##晖 -##晗 -##晚 -##晝 -##晞 -##晟 -##晤 -##晦 -##晨 -##晩 -##普 -##景 -##晰 -##晴 -##晶 -##晷 -##智 -##晾 -##暂 -##暄 -##暇 -##暈 -##暉 -##暌 -##暐 -##暑 -##暖 -##暗 -##暝 -##暢 -##暧 -##暨 -##暫 -##暮 -##暱 -##暴 -##暸 -##暹 -##曄 -##曆 -##曇 -##曉 -##曖 -##曙 -##曜 -##曝 -##曠 -##曦 -##曬 -##曰 -##曲 -##曳 -##更 -##書 -##曹 -##曼 -##曾 -##替 -##最 -##會 -##月 -##有 -##朋 -##服 -##朐 -##朔 -##朕 -##朗 -##望 -##朝 -##期 -##朦 -##朧 -##木 -##未 -##末 -##本 -##札 -##朮 -##术 -##朱 -##朴 -##朵 -##机 -##朽 -##杀 -##杂 -##权 -##杆 -##杈 -##杉 -##李 -##杏 -##材 -##村 -##杓 -##杖 -##杜 -##杞 -##束 -##杠 -##条 -##来 -##杨 -##杭 -##杯 -##杰 -##東 -##杳 -##杵 -##杷 -##杼 -##松 -##板 -##极 -##构 -##枇 -##枉 -##枋 -##析 -##枕 -##林 -##枚 -##果 -##枝 -##枢 -##枣 -##枪 -##枫 -##枭 -##枯 -##枰 -##枱 -##枳 -##架 -##枷 -##枸 -##柄 -##柏 -##某 -##柑 -##柒 -##染 -##柔 -##柘 -##柚 -##柜 -##柞 -##柠 -##柢 -##查 -##柩 -##柬 -##柯 -##柱 -##柳 -##柴 -##柵 -##査 -##柿 -##栀 -##栃 -##栄 -##栅 -##标 -##栈 -##栉 -##栋 -##栎 -##栏 -##树 -##栓 -##栖 -##栗 -##校 -##栩 -##株 -##样 -##核 -##根 -##格 -##栽 -##栾 -##桀 -##桁 -##桂 -##桃 -##桅 -##框 -##案 -##桉 -##桌 -##桎 -##桐 -##桑 -##桓 -##桔 -##桜 -##桠 -##桡 -##桢 -##档 -##桥 -##桦 -##桧 -##桨 -##桩 -##桶 -##桿 -##梁 -##梅 -##梆 -##梏 -##梓 -##梗 -##條 -##梟 -##梢 -##梦 -##梧 -##梨 -##梭 -##梯 -##械 -##梳 -##梵 -##梶 -##检 -##棂 -##棄 -##棉 -##棋 -##棍 -##棒 -##棕 -##棗 -##棘 -##棚 -##棟 -##棠 -##棣 -##棧 -##森 -##棱 -##棲 -##棵 -##棹 -##棺 -##椁 -##椅 -##椋 -##植 -##椎 -##椒 -##検 -##椪 -##椭 -##椰 -##椹 -##椽 -##椿 -##楂 -##楊 -##楓 -##楔 -##楚 -##楝 -##楞 -##楠 -##楣 -##楨 -##楫 -##業 -##楮 -##極 -##楷 -##楸 -##楹 -##楼 -##楽 -##概 -##榄 -##榆 -##榈 -##榉 -##榔 -##榕 -##榖 -##榛 -##榜 -##榨 -##榫 -##榭 -##榮 -##榱 -##榴 -##榷 -##榻 -##槁 -##槃 -##構 -##槌 -##槍 -##槎 -##槐 -##槓 -##様 -##槛 -##槟 -##槤 -##槭 -##槲 -##槳 -##槻 -##槽 -##槿 -##樁 -##樂 -##樊 -##樑 -##樓 -##標 -##樞 -##樟 -##模 -##樣 -##権 -##横 -##樫 -##樯 -##樱 -##樵 -##樸 -##樹 -##樺 -##樽 -##樾 -##橄 -##橇 -##橋 -##橐 -##橘 -##橙 -##機 -##橡 -##橢 -##橫 -##橱 -##橹 -##橼 -##檀 -##檄 -##檎 -##檐 -##檔 -##檗 -##檜 -##檢 -##檬 -##檯 -##檳 -##檸 -##檻 -##櫃 -##櫚 -##櫛 -##櫥 -##櫸 -##櫻 -##欄 -##權 -##欒 -##欖 -##欠 -##次 -##欢 -##欣 -##欧 -##欲 -##欸 -##欺 -##欽 -##款 -##歆 -##歇 -##歉 -##歌 -##歎 -##歐 -##歓 -##歙 -##歛 -##歡 -##止 -##正 -##此 -##步 -##武 -##歧 -##歩 -##歪 -##歯 -##歲 -##歳 -##歴 -##歷 -##歸 -##歹 -##死 -##歼 -##殁 -##殃 -##殆 -##殇 -##殉 -##殊 -##残 -##殒 -##殓 -##殖 -##殘 -##殞 -##殡 -##殤 -##殭 -##殯 -##殲 -##殴 -##段 -##殷 -##殺 -##殼 -##殿 -##毀 -##毁 -##毂 -##毅 -##毆 -##毋 -##母 -##毎 -##每 -##毒 -##毓 -##比 -##毕 -##毗 -##毘 -##毙 -##毛 -##毡 -##毫 -##毯 -##毽 -##氈 -##氏 -##氐 -##民 -##氓 -##气 -##氖 -##気 -##氙 -##氛 -##氟 -##氡 -##氢 -##氣 -##氤 -##氦 -##氧 -##氨 -##氪 -##氫 -##氮 -##氯 -##氰 -##氲 -##水 -##氷 -##永 -##氹 -##氾 -##汀 -##汁 -##求 -##汆 -##汇 -##汉 -##汎 -##汐 -##汕 -##汗 -##汙 -##汛 -##汝 -##汞 -##江 -##池 -##污 -##汤 -##汨 -##汩 -##汪 -##汰 -##汲 -##汴 -##汶 -##汹 -##決 -##汽 -##汾 -##沁 -##沂 -##沃 -##沅 -##沈 -##沉 -##沌 -##沏 -##沐 -##沒 -##沓 -##沖 -##沙 -##沛 -##沟 -##没 -##沢 -##沣 -##沥 -##沦 -##沧 -##沪 -##沫 -##沭 -##沮 -##沱 -##河 -##沸 -##油 -##治 -##沼 -##沽 -##沾 -##沿 -##況 -##泄 -##泉 -##泊 -##泌 -##泓 -##法 -##泗 -##泛 -##泞 -##泠 -##泡 -##波 -##泣 -##泥 -##注 -##泪 -##泫 -##泮 -##泯 -##泰 -##泱 -##泳 -##泵 -##泷 -##泸 -##泻 -##泼 -##泽 -##泾 -##洁 -##洄 -##洋 -##洒 -##洗 -##洙 -##洛 -##洞 -##津 -##洩 -##洪 -##洮 -##洱 -##洲 -##洵 -##洶 -##洸 -##洹 -##活 -##洼 -##洽 -##派 -##流 -##浃 -##浄 -##浅 -##浆 -##浇 -##浊 -##测 -##济 -##浏 -##浑 -##浒 -##浓 -##浔 -##浙 -##浚 -##浜 -##浣 -##浦 -##浩 -##浪 -##浬 -##浮 -##浯 -##浴 -##海 -##浸 -##涂 -##涅 -##涇 -##消 -##涉 -##涌 -##涎 -##涓 -##涔 -##涕 -##涙 -##涛 -##涝 -##涞 -##涟 -##涠 -##涡 -##涣 -##涤 -##润 -##涧 -##涨 -##涩 -##涪 -##涮 -##涯 -##液 -##涵 -##涸 -##涼 -##涿 -##淀 -##淄 -##淅 -##淆 -##淇 -##淋 -##淌 -##淑 -##淒 -##淖 -##淘 -##淙 -##淚 -##淞 -##淡 -##淤 -##淦 -##淨 -##淩 -##淪 -##淫 -##淬 -##淮 -##深 -##淳 -##淵 -##混 -##淹 -##淺 -##添 -##淼 -##清 -##済 -##渉 -##渊 -##渋 -##渍 -##渎 -##渐 -##渔 -##渗 -##渙 -##渚 -##減 -##渝 -##渠 -##渡 -##渣 -##渤 -##渥 -##渦 -##温 -##測 -##渭 -##港 -##渲 -##渴 -##游 -##渺 -##渾 -##湃 -##湄 -##湊 -##湍 -##湖 -##湘 -##湛 -##湟 -##湧 -##湫 -##湮 -##湯 -##湳 -##湾 -##湿 -##満 -##溃 -##溅 -##溉 -##溏 -##源 -##準 -##溜 -##溝 -##溟 -##溢 -##溥 -##溧 -##溪 -##溫 -##溯 -##溱 -##溴 -##溶 -##溺 -##溼 -##滁 -##滂 -##滄 -##滅 -##滇 -##滋 -##滌 -##滑 -##滓 -##滔 -##滕 -##滙 -##滚 -##滝 -##滞 -##滟 -##满 -##滢 -##滤 -##滥 -##滦 -##滨 -##滩 -##滬 -##滯 -##滲 -##滴 -##滷 -##滸 -##滾 -##滿 -##漁 -##漂 -##漆 -##漉 -##漏 -##漓 -##演 -##漕 -##漠 -##漢 -##漣 -##漩 -##漪 -##漫 -##漬 -##漯 -##漱 -##漲 -##漳 -##漸 -##漾 -##漿 -##潆 -##潇 -##潋 -##潍 -##潑 -##潔 -##潘 -##潛 -##潜 -##潞 -##潟 -##潢 -##潤 -##潦 -##潧 -##潭 -##潮 -##潰 -##潴 -##潸 -##潺 -##潼 -##澀 -##澄 -##澆 -##澈 -##澍 -##澎 -##澗 -##澜 -##澡 -##澤 -##澧 -##澱 -##澳 -##澹 -##激 -##濁 -##濂 -##濃 -##濑 -##濒 -##濕 -##濘 -##濛 -##濟 -##濠 -##濡 -##濤 -##濫 -##濬 -##濮 -##濯 -##濱 -##濺 -##濾 -##瀅 -##瀆 -##瀉 -##瀋 -##瀏 -##瀑 -##瀕 -##瀘 -##瀚 -##瀛 -##瀝 -##瀞 -##瀟 -##瀧 -##瀨 -##瀬 -##瀰 -##瀾 -##灌 -##灏 -##灑 -##灘 -##灝 -##灞 -##灣 -##火 -##灬 -##灭 -##灯 -##灰 -##灵 -##灶 -##灸 -##灼 -##災 -##灾 -##灿 -##炀 -##炁 -##炅 -##炉 -##炊 -##炎 -##炒 -##炔 -##炕 -##炖 -##炙 -##炜 -##炫 -##炬 -##炭 -##炮 -##炯 -##炳 -##炷 -##炸 -##点 -##為 -##炼 -##炽 -##烁 -##烂 -##烃 -##烈 -##烊 -##烏 -##烘 -##烙 -##烛 -##烟 -##烤 -##烦 -##烧 -##烨 -##烩 -##烫 -##烬 -##热 -##烯 -##烷 -##烹 -##烽 -##焉 -##焊 -##焕 -##焖 -##焗 -##焘 -##焙 -##焚 -##焜 -##無 -##焦 -##焯 -##焰 -##焱 -##然 -##焼 -##煅 -##煉 -##煊 -##煌 -##煎 -##煒 -##煖 -##煙 -##煜 -##煞 -##煤 -##煥 -##煦 -##照 -##煨 -##煩 -##煮 -##煲 -##煸 -##煽 -##熄 -##熊 -##熏 -##熒 -##熔 -##熙 -##熟 -##熠 -##熨 -##熬 -##熱 -##熵 -##熹 -##熾 -##燁 -##燃 -##燄 -##燈 -##燉 -##燊 -##燎 -##燒 -##燔 -##燕 -##燙 -##燜 -##營 -##燥 -##燦 -##燧 -##燭 -##燮 -##燴 -##燻 -##燼 -##燿 -##爆 -##爍 -##爐 -##爛 -##爪 -##爬 -##爭 -##爰 -##爱 -##爲 -##爵 -##父 -##爷 -##爸 -##爹 -##爺 -##爻 -##爽 -##爾 -##牆 -##片 -##版 -##牌 -##牍 -##牒 -##牙 -##牛 -##牝 -##牟 -##牠 -##牡 -##牢 -##牦 -##牧 -##物 -##牯 -##牲 -##牴 -##牵 -##特 -##牺 -##牽 -##犀 -##犁 -##犄 -##犊 -##犍 -##犒 -##犢 -##犧 -##犬 -##犯 -##状 -##犷 -##犸 -##犹 -##狀 -##狂 -##狄 -##狈 -##狎 -##狐 -##狒 -##狗 -##狙 -##狞 -##狠 -##狡 -##狩 -##独 -##狭 -##狮 -##狰 -##狱 -##狸 -##狹 -##狼 -##狽 -##猎 -##猕 -##猖 -##猗 -##猙 -##猛 -##猜 -##猝 -##猥 -##猩 -##猪 -##猫 -##猬 -##献 -##猴 -##猶 -##猷 -##猾 -##猿 -##獄 -##獅 -##獎 -##獐 -##獒 -##獗 -##獠 -##獣 -##獨 -##獭 -##獰 -##獲 -##獵 -##獷 -##獸 -##獺 -##獻 -##獼 -##獾 -##玄 -##率 -##玉 -##王 -##玑 -##玖 -##玛 -##玟 -##玠 -##玥 -##玩 -##玫 -##玮 -##环 -##现 -##玲 -##玳 -##玷 -##玺 -##玻 -##珀 -##珂 -##珅 -##珈 -##珉 -##珊 -##珍 -##珏 -##珐 -##珑 -##珙 -##珞 -##珠 -##珣 -##珥 -##珩 -##珪 -##班 -##珮 -##珲 -##珺 -##現 -##球 -##琅 -##理 -##琇 -##琉 -##琊 -##琍 -##琏 -##琐 -##琛 -##琢 -##琥 -##琦 -##琨 -##琪 -##琬 -##琮 -##琰 -##琲 -##琳 -##琴 -##琵 -##琶 -##琺 -##琼 -##瑀 -##瑁 -##瑄 -##瑋 -##瑕 -##瑗 -##瑙 -##瑚 -##瑛 -##瑜 -##瑞 -##瑟 -##瑠 -##瑣 -##瑤 -##瑩 -##瑪 -##瑯 -##瑰 -##瑶 -##瑾 -##璀 -##璁 -##璃 -##璇 -##璉 -##璋 -##璎 -##璐 -##璜 -##璞 -##璟 -##璧 -##璨 -##環 -##璽 -##璿 -##瓊 -##瓏 -##瓒 -##瓜 -##瓢 -##瓣 -##瓤 -##瓦 -##瓮 -##瓯 -##瓴 -##瓶 -##瓷 -##甄 -##甌 -##甕 -##甘 -##甙 -##甚 -##甜 -##生 -##產 -##産 -##甥 -##甦 -##用 -##甩 -##甫 -##甬 -##甭 -##甯 -##田 -##由 -##甲 -##申 -##电 -##男 -##甸 -##町 -##画 -##甾 -##畀 -##畅 -##界 -##畏 -##畑 -##畔 -##留 -##畜 -##畝 -##畢 -##略 -##畦 -##番 -##畫 -##異 -##畲 -##畳 -##畴 -##當 -##畸 -##畹 -##畿 -##疆 -##疇 -##疊 -##疏 -##疑 -##疔 -##疖 -##疗 -##疙 -##疚 -##疝 -##疟 -##疡 -##疣 -##疤 -##疥 -##疫 -##疮 -##疯 -##疱 -##疲 -##疳 -##疵 -##疸 -##疹 -##疼 -##疽 -##疾 -##痂 -##病 -##症 -##痈 -##痉 -##痊 -##痍 -##痒 -##痔 -##痕 -##痘 -##痙 -##痛 -##痞 -##痠 -##痢 -##痣 -##痤 -##痧 -##痨 -##痪 -##痫 -##痰 -##痱 -##痴 -##痹 -##痺 -##痼 -##痿 -##瘀 -##瘁 -##瘋 -##瘍 -##瘓 -##瘘 -##瘙 -##瘟 -##瘠 -##瘡 -##瘢 -##瘤 -##瘦 -##瘧 -##瘩 -##瘪 -##瘫 -##瘴 -##瘸 -##瘾 -##療 -##癇 -##癌 -##癒 -##癖 -##癜 -##癞 -##癡 -##癢 -##癣 -##癥 -##癫 -##癬 -##癮 -##癱 -##癲 -##癸 -##発 -##登 -##發 -##白 -##百 -##皂 -##的 -##皆 -##皇 -##皈 -##皋 -##皎 -##皑 -##皓 -##皖 -##皙 -##皚 -##皮 -##皰 -##皱 -##皴 -##皺 -##皿 -##盂 -##盃 -##盅 -##盆 -##盈 -##益 -##盎 -##盏 -##盐 -##监 -##盒 -##盔 -##盖 -##盗 -##盘 -##盛 -##盜 -##盞 -##盟 -##盡 -##監 -##盤 -##盥 -##盧 -##盪 -##目 -##盯 -##盱 -##盲 -##直 -##相 -##盹 -##盼 -##盾 -##省 -##眈 -##眉 -##看 -##県 -##眙 -##眞 -##真 -##眠 -##眦 -##眨 -##眩 -##眯 -##眶 -##眷 -##眸 -##眺 -##眼 -##眾 -##着 -##睁 -##睇 -##睏 -##睐 -##睑 -##睛 -##睜 -##睞 -##睡 -##睢 -##督 -##睥 -##睦 -##睨 -##睪 -##睫 -##睬 -##睹 -##睽 -##睾 -##睿 -##瞄 -##瞅 -##瞇 -##瞋 -##瞌 -##瞎 -##瞑 -##瞒 -##瞓 -##瞞 -##瞟 -##瞠 -##瞥 -##瞧 -##瞩 -##瞪 -##瞬 -##瞭 -##瞰 -##瞳 -##瞻 -##瞼 -##瞿 -##矇 -##矍 -##矗 -##矚 -##矛 -##矜 -##矢 -##矣 -##知 -##矩 -##矫 -##短 -##矮 -##矯 -##石 -##矶 -##矽 -##矾 -##矿 -##码 -##砂 -##砌 -##砍 -##砒 -##研 -##砖 -##砗 -##砚 -##砝 -##砣 -##砥 -##砧 -##砭 -##砰 -##砲 -##破 -##砷 -##砸 -##砺 -##砼 -##砾 -##础 -##硅 -##硐 -##硒 -##硕 -##硝 -##硫 -##硬 -##确 -##硯 -##硼 -##碁 -##碇 -##碉 -##碌 -##碍 -##碎 -##碑 -##碓 -##碗 -##碘 -##碚 -##碛 -##碟 -##碣 -##碧 -##碩 -##碰 -##碱 -##碳 -##碴 -##確 -##碼 -##碾 -##磁 -##磅 -##磊 -##磋 -##磐 -##磕 -##磚 -##磡 -##磨 -##磬 -##磯 -##磲 -##磷 -##磺 -##礁 -##礎 -##礙 -##礡 -##礦 -##礪 -##礫 -##礴 -##示 -##礼 -##社 -##祀 -##祁 -##祂 -##祇 -##祈 -##祉 -##祎 -##祐 -##祕 -##祖 -##祗 -##祚 -##祛 -##祜 -##祝 -##神 -##祟 -##祠 -##祢 -##祥 -##票 -##祭 -##祯 -##祷 -##祸 -##祺 -##祿 -##禀 -##禁 -##禄 -##禅 -##禍 -##禎 -##福 -##禛 -##禦 -##禧 -##禪 -##禮 -##禱 -##禹 -##禺 -##离 -##禽 -##禾 -##禿 -##秀 -##私 -##秃 -##秆 -##秉 -##秋 -##种 -##科 -##秒 -##秘 -##租 -##秣 -##秤 -##秦 -##秧 -##秩 -##秭 -##积 -##称 -##秸 -##移 -##秽 -##稀 -##稅 -##程 -##稍 -##税 -##稔 -##稗 -##稚 -##稜 -##稞 -##稟 -##稠 -##稣 -##種 -##稱 -##稲 -##稳 -##稷 -##稹 -##稻 -##稼 -##稽 -##稿 -##穀 -##穂 -##穆 -##穌 -##積 -##穎 -##穗 -##穢 -##穩 -##穫 -##穴 -##究 -##穷 -##穹 -##空 -##穿 -##突 -##窃 -##窄 -##窈 -##窍 -##窑 -##窒 -##窓 -##窕 -##窖 -##窗 -##窘 -##窜 -##窝 -##窟 -##窠 -##窥 -##窦 -##窨 -##窩 -##窪 -##窮 -##窯 -##窺 -##窿 -##竄 -##竅 -##竇 -##竊 -##立 -##竖 -##站 -##竜 -##竞 -##竟 -##章 -##竣 -##童 -##竭 -##端 -##競 -##竹 -##竺 -##竽 -##竿 -##笃 -##笆 -##笈 -##笋 -##笏 -##笑 -##笔 -##笙 -##笛 -##笞 -##笠 -##符 -##笨 -##第 -##笹 -##笺 -##笼 -##筆 -##等 -##筊 -##筋 -##筍 -##筏 -##筐 -##筑 -##筒 -##答 -##策 -##筛 -##筝 -##筠 -##筱 -##筲 -##筵 -##筷 -##筹 -##签 -##简 -##箇 -##箋 -##箍 -##箏 -##箐 -##箔 -##箕 -##算 -##箝 -##管 -##箩 -##箫 -##箭 -##箱 -##箴 -##箸 -##節 -##篁 -##範 -##篆 -##篇 -##築 -##篑 -##篓 -##篙 -##篝 -##篠 -##篡 -##篤 -##篩 -##篪 -##篮 -##篱 -##篷 -##簇 -##簌 -##簍 -##簡 -##簦 -##簧 -##簪 -##簫 -##簷 -##簸 -##簽 -##簾 -##簿 -##籁 -##籃 -##籌 -##籍 -##籐 -##籟 -##籠 -##籤 -##籬 -##籮 -##籲 -##米 -##类 -##籼 -##籽 -##粄 -##粉 -##粑 -##粒 -##粕 -##粗 -##粘 -##粟 -##粤 -##粥 -##粧 -##粪 -##粮 -##粱 -##粲 -##粳 -##粵 -##粹 -##粼 -##粽 -##精 -##粿 -##糅 -##糊 -##糍 -##糕 -##糖 -##糗 -##糙 -##糜 -##糞 -##糟 -##糠 -##糧 -##糬 -##糯 -##糰 -##糸 -##系 -##糾 -##紀 -##紂 -##約 -##紅 -##紉 -##紊 -##紋 -##納 -##紐 -##紓 -##純 -##紗 -##紘 -##紙 -##級 -##紛 -##紜 -##素 -##紡 -##索 -##紧 -##紫 -##紮 -##累 -##細 -##紳 -##紹 -##紺 -##終 -##絃 -##組 -##絆 -##経 -##結 -##絕 -##絞 -##絡 -##絢 -##給 -##絨 -##絮 -##統 -##絲 -##絳 -##絵 -##絶 -##絹 -##綁 -##綏 -##綑 -##經 -##継 -##続 -##綜 -##綠 -##綢 -##綦 -##綫 -##綬 -##維 -##綱 -##網 -##綴 -##綵 -##綸 -##綺 -##綻 -##綽 -##綾 -##綿 -##緊 -##緋 -##総 -##緑 -##緒 -##緘 -##線 -##緝 -##緞 -##締 -##緣 -##編 -##緩 -##緬 -##緯 -##練 -##緹 -##緻 -##縁 -##縄 -##縈 -##縛 -##縝 -##縣 -##縫 -##縮 -##縱 -##縴 -##縷 -##總 -##績 -##繁 -##繃 -##繆 -##繇 -##繋 -##織 -##繕 -##繚 -##繞 -##繡 -##繩 -##繪 -##繫 -##繭 -##繳 -##繹 -##繼 -##繽 -##纂 -##續 -##纍 -##纏 -##纓 -##纔 -##纖 -##纜 -##纠 -##红 -##纣 -##纤 -##约 -##级 -##纨 -##纪 -##纫 -##纬 -##纭 -##纯 -##纰 -##纱 -##纲 -##纳 -##纵 -##纶 -##纷 -##纸 -##纹 -##纺 -##纽 -##纾 -##线 -##绀 -##练 -##组 -##绅 -##细 -##织 -##终 -##绊 -##绍 -##绎 -##经 -##绑 -##绒 -##结 -##绔 -##绕 -##绘 -##给 -##绚 -##绛 -##络 -##绝 -##绞 -##统 -##绡 -##绢 -##绣 -##绥 -##绦 -##继 -##绩 -##绪 -##绫 -##续 -##绮 -##绯 -##绰 -##绳 -##维 -##绵 -##绶 -##绷 -##绸 -##绻 -##综 -##绽 -##绾 -##绿 -##缀 -##缄 -##缅 -##缆 -##缇 -##缈 -##缉 -##缎 -##缓 -##缔 -##缕 -##编 -##缘 -##缙 -##缚 -##缜 -##缝 -##缠 -##缢 -##缤 -##缥 -##缨 -##缩 -##缪 -##缭 -##缮 -##缰 -##缱 -##缴 -##缸 -##缺 -##缽 -##罂 -##罄 -##罌 -##罐 -##网 -##罔 -##罕 -##罗 -##罚 -##罡 -##罢 -##罩 -##罪 -##置 -##罰 -##署 -##罵 -##罷 -##罹 -##羁 -##羅 -##羈 -##羊 -##羌 -##美 -##羔 -##羚 -##羞 -##羟 -##羡 -##羣 -##群 -##羥 -##羧 -##羨 -##義 -##羯 -##羲 -##羸 -##羹 -##羽 -##羿 -##翁 -##翅 -##翊 -##翌 -##翎 -##習 -##翔 -##翘 -##翟 -##翠 -##翡 -##翦 -##翩 -##翰 -##翱 -##翳 -##翹 -##翻 -##翼 -##耀 -##老 -##考 -##耄 -##者 -##耆 -##耋 -##而 -##耍 -##耐 -##耒 -##耕 -##耗 -##耘 -##耙 -##耦 -##耨 -##耳 -##耶 -##耷 -##耸 -##耻 -##耽 -##耿 -##聂 -##聆 -##聊 -##聋 -##职 -##聒 -##联 -##聖 -##聘 -##聚 -##聞 -##聪 -##聯 -##聰 -##聲 -##聳 -##聴 -##聶 -##職 -##聽 -##聾 -##聿 -##肃 -##肄 -##肅 -##肆 -##肇 -##肉 -##肋 -##肌 -##肏 -##肓 -##肖 -##肘 -##肚 -##肛 -##肝 -##肠 -##股 -##肢 -##肤 -##肥 -##肩 -##肪 -##肮 -##肯 -##肱 -##育 -##肴 -##肺 -##肽 -##肾 -##肿 -##胀 -##胁 -##胃 -##胄 -##胆 -##背 -##胍 -##胎 -##胖 -##胚 -##胛 -##胜 -##胝 -##胞 -##胡 -##胤 -##胥 -##胧 -##胫 -##胭 -##胯 -##胰 -##胱 -##胳 -##胴 -##胶 -##胸 -##胺 -##能 -##脂 -##脅 -##脆 -##脇 -##脈 -##脉 -##脊 -##脍 -##脏 -##脐 -##脑 -##脓 -##脖 -##脘 -##脚 -##脛 -##脣 -##脩 -##脫 -##脯 -##脱 -##脲 -##脳 -##脸 -##脹 -##脾 -##腆 -##腈 -##腊 -##腋 -##腌 -##腎 -##腐 -##腑 -##腓 -##腔 -##腕 -##腥 -##腦 -##腩 -##腫 -##腭 -##腮 -##腰 -##腱 -##腳 -##腴 -##腸 -##腹 -##腺 -##腻 -##腼 -##腾 -##腿 -##膀 -##膈 -##膊 -##膏 -##膑 -##膘 -##膚 -##膛 -##膜 -##膝 -##膠 -##膦 -##膨 -##膩 -##膳 -##膺 -##膻 -##膽 -##膾 -##膿 -##臀 -##臂 -##臃 -##臆 -##臉 -##臊 -##臍 -##臓 -##臘 -##臟 -##臣 -##臥 -##臧 -##臨 -##自 -##臬 -##臭 -##至 -##致 -##臺 -##臻 -##臼 -##臾 -##舀 -##舂 -##舅 -##舆 -##與 -##興 -##舉 -##舊 -##舌 -##舍 -##舎 -##舐 -##舒 -##舔 -##舖 -##舗 -##舛 -##舜 -##舞 -##舟 -##航 -##舫 -##般 -##舰 -##舱 -##舵 -##舶 -##舷 -##舸 -##船 -##舺 -##舾 -##艇 -##艋 -##艘 -##艙 -##艦 -##艮 -##良 -##艰 -##艱 -##色 -##艳 -##艷 -##艹 -##艺 -##艾 -##节 -##芃 -##芈 -##芊 -##芋 -##芍 -##芎 -##芒 -##芙 -##芜 -##芝 -##芡 -##芥 -##芦 -##芩 -##芪 -##芫 -##芬 -##芭 -##芮 -##芯 -##花 -##芳 -##芷 -##芸 -##芹 -##芻 -##芽 -##芾 -##苁 -##苄 -##苇 -##苋 -##苍 -##苏 -##苑 -##苒 -##苓 -##苔 -##苕 -##苗 -##苛 -##苜 -##苞 -##苟 -##苡 -##苣 -##若 -##苦 -##苫 -##苯 -##英 -##苷 -##苹 -##苻 -##茁 -##茂 -##范 -##茄 -##茅 -##茉 -##茎 -##茏 -##茗 -##茜 -##茧 -##茨 -##茫 -##茬 -##茭 -##茯 -##茱 -##茲 -##茴 -##茵 -##茶 -##茸 -##茹 -##茼 -##荀 -##荃 -##荆 -##草 -##荊 -##荏 -##荐 -##荒 -##荔 -##荖 -##荘 -##荚 -##荞 -##荟 -##荠 -##荡 -##荣 -##荤 -##荥 -##荧 -##荨 -##荪 -##荫 -##药 -##荳 -##荷 -##荸 -##荻 -##荼 -##荽 -##莅 -##莆 -##莉 -##莊 -##莎 -##莒 -##莓 -##莖 -##莘 -##莞 -##莠 -##莢 -##莧 -##莪 -##莫 -##莱 -##莲 -##莴 -##获 -##莹 -##莺 -##莽 -##莿 -##菀 -##菁 -##菅 -##菇 -##菈 -##菊 -##菌 -##菏 -##菓 -##菖 -##菘 -##菜 -##菟 -##菠 -##菡 -##菩 -##華 -##菱 -##菲 -##菸 -##菽 -##萁 -##萃 -##萄 -##萊 -##萋 -##萌 -##萍 -##萎 -##萘 -##萝 -##萤 -##营 -##萦 -##萧 -##萨 -##萩 -##萬 -##萱 -##萵 -##萸 -##萼 -##落 -##葆 -##葉 -##著 -##葚 -##葛 -##葡 -##董 -##葦 -##葩 -##葫 -##葬 -##葭 -##葯 -##葱 -##葳 -##葵 -##葷 -##葺 -##蒂 -##蒋 -##蒐 -##蒔 -##蒙 -##蒜 -##蒞 -##蒟 -##蒡 -##蒨 -##蒲 -##蒸 -##蒹 -##蒻 -##蒼 -##蒿 -##蓁 -##蓄 -##蓆 -##蓉 -##蓋 -##蓑 -##蓓 -##蓖 -##蓝 -##蓟 -##蓦 -##蓬 -##蓮 -##蓼 -##蓿 -##蔑 -##蔓 -##蔔 -##蔗 -##蔘 -##蔚 -##蔡 -##蔣 -##蔥 -##蔫 -##蔬 -##蔭 -##蔵 -##蔷 -##蔺 -##蔻 -##蔼 -##蔽 -##蕁 -##蕃 -##蕈 -##蕉 -##蕊 -##蕎 -##蕙 -##蕤 -##蕨 -##蕩 -##蕪 -##蕭 -##蕲 -##蕴 -##蕻 -##蕾 -##薄 -##薅 -##薇 -##薈 -##薊 -##薏 -##薑 -##薔 -##薙 -##薛 -##薦 -##薨 -##薩 -##薪 -##薬 -##薯 -##薰 -##薹 -##藉 -##藍 -##藏 -##藐 -##藓 -##藕 -##藜 -##藝 -##藤 -##藥 -##藩 -##藹 -##藻 -##藿 -##蘆 -##蘇 -##蘊 -##蘋 -##蘑 -##蘚 -##蘭 -##蘸 -##蘼 -##蘿 -##虎 -##虏 -##虐 -##虑 -##虔 -##處 -##虚 -##虛 -##虜 -##虞 -##號 -##虢 -##虧 -##虫 -##虬 -##虱 -##虹 -##虻 -##虽 -##虾 -##蚀 -##蚁 -##蚂 -##蚊 -##蚌 -##蚓 -##蚕 -##蚜 -##蚝 -##蚣 -##蚤 -##蚩 -##蚪 -##蚯 -##蚱 -##蚵 -##蛀 -##蛆 -##蛇 -##蛊 -##蛋 -##蛎 -##蛐 -##蛔 -##蛙 -##蛛 -##蛟 -##蛤 -##蛭 -##蛮 -##蛰 -##蛳 -##蛹 -##蛻 -##蛾 -##蜀 -##蜂 -##蜃 -##蜆 -##蜇 -##蜈 -##蜊 -##蜍 -##蜒 -##蜓 -##蜕 -##蜗 -##蜘 -##蜚 -##蜜 -##蜡 -##蜢 -##蜥 -##蜱 -##蜴 -##蜷 -##蜻 -##蜿 -##蝇 -##蝈 -##蝉 -##蝌 -##蝎 -##蝕 -##蝗 -##蝙 -##蝟 -##蝠 -##蝦 -##蝨 -##蝴 -##蝶 -##蝸 -##蝼 -##螂 -##螃 -##融 -##螞 -##螢 -##螨 -##螯 -##螳 -##螺 -##蟀 -##蟄 -##蟆 -##蟋 -##蟎 -##蟑 -##蟒 -##蟠 -##蟬 -##蟲 -##蟹 -##蟻 -##蟾 -##蠅 -##蠍 -##蠔 -##蠕 -##蠛 -##蠟 -##蠡 -##蠢 -##蠣 -##蠱 -##蠶 -##蠹 -##蠻 -##血 -##衄 -##衅 -##衆 -##行 -##衍 -##術 -##衔 -##街 -##衙 -##衛 -##衝 -##衞 -##衡 -##衢 -##衣 -##补 -##表 -##衩 -##衫 -##衬 -##衮 -##衰 -##衲 -##衷 -##衹 -##衾 -##衿 -##袁 -##袂 -##袄 -##袅 -##袈 -##袋 -##袍 -##袒 -##袖 -##袜 -##袞 -##袤 -##袪 -##被 -##袭 -##袱 -##裁 -##裂 -##装 -##裆 -##裊 -##裏 -##裔 -##裕 -##裘 -##裙 -##補 -##裝 -##裟 -##裡 -##裤 -##裨 -##裱 -##裳 -##裴 -##裸 -##裹 -##製 -##裾 -##褂 -##複 -##褐 -##褒 -##褓 -##褔 -##褚 -##褥 -##褪 -##褫 -##褲 -##褶 -##褻 -##襁 -##襄 -##襟 -##襠 -##襪 -##襬 -##襯 -##襲 -##西 -##要 -##覃 -##覆 -##覇 -##見 -##規 -##覓 -##視 -##覚 -##覦 -##覧 -##親 -##覬 -##観 -##覷 -##覺 -##覽 -##觀 -##见 -##观 -##规 -##觅 -##视 -##览 -##觉 -##觊 -##觎 -##觐 -##觑 -##角 -##觞 -##解 -##觥 -##触 -##觸 -##言 -##訂 -##計 -##訊 -##討 -##訓 -##訕 -##訖 -##託 -##記 -##訛 -##訝 -##訟 -##訣 -##訥 -##訪 -##設 -##許 -##訳 -##訴 -##訶 -##診 -##註 -##証 -##詆 -##詐 -##詔 -##評 -##詛 -##詞 -##詠 -##詡 -##詢 -##詣 -##試 -##詩 -##詫 -##詬 -##詭 -##詮 -##詰 -##話 -##該 -##詳 -##詹 -##詼 -##誅 -##誇 -##誉 -##誌 -##認 -##誓 -##誕 -##誘 -##語 -##誠 -##誡 -##誣 -##誤 -##誥 -##誦 -##誨 -##說 -##説 -##読 -##誰 -##課 -##誹 -##誼 -##調 -##諄 -##談 -##請 -##諏 -##諒 -##論 -##諗 -##諜 -##諡 -##諦 -##諧 -##諫 -##諭 -##諮 -##諱 -##諳 -##諷 -##諸 -##諺 -##諾 -##謀 -##謁 -##謂 -##謄 -##謊 -##謎 -##謐 -##謔 -##謗 -##謙 -##講 -##謝 -##謠 -##謨 -##謬 -##謹 -##謾 -##譁 -##證 -##譎 -##譏 -##識 -##譙 -##譚 -##譜 -##警 -##譬 -##譯 -##議 -##譲 -##譴 -##護 -##譽 -##讀 -##變 -##讓 -##讚 -##讞 -##计 -##订 -##认 -##讥 -##讧 -##讨 -##让 -##讪 -##讫 -##训 -##议 -##讯 -##记 -##讲 -##讳 -##讴 -##讶 -##讷 -##许 -##讹 -##论 -##讼 -##讽 -##设 -##访 -##诀 -##证 -##诃 -##评 -##诅 -##识 -##诈 -##诉 -##诊 -##诋 -##词 -##诏 -##译 -##试 -##诗 -##诘 -##诙 -##诚 -##诛 -##话 -##诞 -##诟 -##诠 -##诡 -##询 -##诣 -##诤 -##该 -##详 -##诧 -##诩 -##诫 -##诬 -##语 -##误 -##诰 -##诱 -##诲 -##说 -##诵 -##诶 -##请 -##诸 -##诺 -##读 -##诽 -##课 -##诿 -##谀 -##谁 -##调 -##谄 -##谅 -##谆 -##谈 -##谊 -##谋 -##谌 -##谍 -##谎 -##谏 -##谐 -##谑 -##谒 -##谓 -##谔 -##谕 -##谗 -##谘 -##谙 -##谚 -##谛 -##谜 -##谟 -##谢 -##谣 -##谤 -##谥 -##谦 -##谧 -##谨 -##谩 -##谪 -##谬 -##谭 -##谯 -##谱 -##谲 -##谴 -##谶 -##谷 -##豁 -##豆 -##豇 -##豈 -##豉 -##豊 -##豌 -##豎 -##豐 -##豔 -##豚 -##象 -##豢 -##豪 -##豫 -##豬 -##豹 -##豺 -##貂 -##貅 -##貌 -##貓 -##貔 -##貘 -##貝 -##貞 -##負 -##財 -##貢 -##貧 -##貨 -##販 -##貪 -##貫 -##責 -##貯 -##貰 -##貳 -##貴 -##貶 -##買 -##貸 -##費 -##貼 -##貽 -##貿 -##賀 -##賁 -##賂 -##賃 -##賄 -##資 -##賈 -##賊 -##賑 -##賓 -##賜 -##賞 -##賠 -##賡 -##賢 -##賣 -##賤 -##賦 -##質 -##賬 -##賭 -##賴 -##賺 -##購 -##賽 -##贅 -##贈 -##贊 -##贍 -##贏 -##贓 -##贖 -##贛 -##贝 -##贞 -##负 -##贡 -##财 -##责 -##贤 -##败 -##账 -##货 -##质 -##贩 -##贪 -##贫 -##贬 -##购 -##贮 -##贯 -##贰 -##贱 -##贲 -##贴 -##贵 -##贷 -##贸 -##费 -##贺 -##贻 -##贼 -##贾 -##贿 -##赁 -##赂 -##赃 -##资 -##赅 -##赈 -##赊 -##赋 -##赌 -##赎 -##赏 -##赐 -##赓 -##赔 -##赖 -##赘 -##赚 -##赛 -##赝 -##赞 -##赠 -##赡 -##赢 -##赣 -##赤 -##赦 -##赧 -##赫 -##赭 -##走 -##赳 -##赴 -##赵 -##赶 -##起 -##趁 -##超 -##越 -##趋 -##趕 -##趙 -##趟 -##趣 -##趨 -##足 -##趴 -##趵 -##趸 -##趺 -##趾 -##跃 -##跄 -##跆 -##跋 -##跌 -##跎 -##跑 -##跖 -##跚 -##跛 -##距 -##跟 -##跡 -##跤 -##跨 -##跩 -##跪 -##路 -##跳 -##践 -##跷 -##跹 -##跺 -##跻 -##踉 -##踊 -##踌 -##踏 -##踐 -##踝 -##踞 -##踟 -##踢 -##踩 -##踪 -##踮 -##踱 -##踴 -##踵 -##踹 -##蹂 -##蹄 -##蹇 -##蹈 -##蹉 -##蹊 -##蹋 -##蹑 -##蹒 -##蹙 -##蹟 -##蹣 -##蹤 -##蹦 -##蹩 -##蹬 -##蹭 -##蹲 -##蹴 -##蹶 -##蹺 -##蹼 -##蹿 -##躁 -##躇 -##躉 -##躊 -##躋 -##躍 -##躏 -##躪 -##身 -##躬 -##躯 -##躲 -##躺 -##軀 -##車 -##軋 -##軌 -##軍 -##軒 -##軟 -##転 -##軸 -##軼 -##軽 -##軾 -##較 -##載 -##輒 -##輓 -##輔 -##輕 -##輛 -##輝 -##輟 -##輩 -##輪 -##輯 -##輸 -##輻 -##輾 -##輿 -##轄 -##轅 -##轆 -##轉 -##轍 -##轎 -##轟 -##车 -##轧 -##轨 -##轩 -##转 -##轭 -##轮 -##软 -##轰 -##轲 -##轴 -##轶 -##轻 -##轼 -##载 -##轿 -##较 -##辄 -##辅 -##辆 -##辇 -##辈 -##辉 -##辊 -##辍 -##辐 -##辑 -##输 -##辕 -##辖 -##辗 -##辘 -##辙 -##辛 -##辜 -##辞 -##辟 -##辣 -##辦 -##辨 -##辩 -##辫 -##辭 -##辮 -##辯 -##辰 -##辱 -##農 -##边 -##辺 -##辻 -##込 -##辽 -##达 -##迁 -##迂 -##迄 -##迅 -##过 -##迈 -##迎 -##运 -##近 -##返 -##还 -##这 -##进 -##远 -##违 -##连 -##迟 -##迢 -##迤 -##迥 -##迦 -##迩 -##迪 -##迫 -##迭 -##述 -##迴 -##迷 -##迸 -##迹 -##迺 -##追 -##退 -##送 -##适 -##逃 -##逅 -##逆 -##选 -##逊 -##逍 -##透 -##逐 -##递 -##途 -##逕 -##逗 -##這 -##通 -##逛 -##逝 -##逞 -##速 -##造 -##逢 -##連 -##逮 -##週 -##進 -##逵 -##逶 -##逸 -##逻 -##逼 -##逾 -##遁 -##遂 -##遅 -##遇 -##遊 -##運 -##遍 -##過 -##遏 -##遐 -##遑 -##遒 -##道 -##達 -##違 -##遗 -##遙 -##遛 -##遜 -##遞 -##遠 -##遢 -##遣 -##遥 -##遨 -##適 -##遭 -##遮 -##遲 -##遴 -##遵 -##遶 -##遷 -##選 -##遺 -##遼 -##遽 -##避 -##邀 -##邁 -##邂 -##邃 -##還 -##邇 -##邈 -##邊 -##邋 -##邏 -##邑 -##邓 -##邕 -##邛 -##邝 -##邢 -##那 -##邦 -##邨 -##邪 -##邬 -##邮 -##邯 -##邰 -##邱 -##邳 -##邵 -##邸 -##邹 -##邺 -##邻 -##郁 -##郅 -##郊 -##郎 -##郑 -##郜 -##郝 -##郡 -##郢 -##郤 -##郦 -##郧 -##部 -##郫 -##郭 -##郴 -##郵 -##郷 -##郸 -##都 -##鄂 -##鄉 -##鄒 -##鄔 -##鄙 -##鄞 -##鄢 -##鄧 -##鄭 -##鄰 -##鄱 -##鄲 -##鄺 -##酉 -##酊 -##酋 -##酌 -##配 -##酐 -##酒 -##酗 -##酚 -##酝 -##酢 -##酣 -##酥 -##酩 -##酪 -##酬 -##酮 -##酯 -##酰 -##酱 -##酵 -##酶 -##酷 -##酸 -##酿 -##醃 -##醇 -##醉 -##醋 -##醍 -##醐 -##醒 -##醚 -##醛 -##醜 -##醞 -##醣 -##醪 -##醫 -##醬 -##醮 -##醯 -##醴 -##醺 -##釀 -##釁 -##采 -##釉 -##释 -##釋 -##里 -##重 -##野 -##量 -##釐 -##金 -##釗 -##釘 -##釜 -##針 -##釣 -##釦 -##釧 -##釵 -##鈀 -##鈉 -##鈍 -##鈎 -##鈔 -##鈕 -##鈞 -##鈣 -##鈦 -##鈪 -##鈴 -##鈺 -##鈾 -##鉀 -##鉄 -##鉅 -##鉉 -##鉑 -##鉗 -##鉚 -##鉛 -##鉤 -##鉴 -##鉻 -##銀 -##銃 -##銅 -##銑 -##銓 -##銖 -##銘 -##銜 -##銬 -##銭 -##銮 -##銳 -##銷 -##銹 -##鋁 -##鋅 -##鋒 -##鋤 -##鋪 -##鋰 -##鋸 -##鋼 -##錄 -##錐 -##錘 -##錚 -##錠 -##錢 -##錦 -##錨 -##錫 -##錮 -##錯 -##録 -##錳 -##錶 -##鍊 -##鍋 -##鍍 -##鍛 -##鍥 -##鍰 -##鍵 -##鍺 -##鍾 -##鎂 -##鎊 -##鎌 -##鎏 -##鎔 -##鎖 -##鎗 -##鎚 -##鎧 -##鎬 -##鎮 -##鎳 -##鏈 -##鏖 -##鏗 -##鏘 -##鏞 -##鏟 -##鏡 -##鏢 -##鏤 -##鏽 -##鐘 -##鐮 -##鐲 -##鐳 -##鐵 -##鐸 -##鐺 -##鑄 -##鑊 -##鑑 -##鑒 -##鑣 -##鑫 -##鑰 -##鑲 -##鑼 -##鑽 -##鑾 -##鑿 -##针 -##钉 -##钊 -##钎 -##钏 -##钒 -##钓 -##钗 -##钙 -##钛 -##钜 -##钝 -##钞 -##钟 -##钠 -##钡 -##钢 -##钣 -##钤 -##钥 -##钦 -##钧 -##钨 -##钩 -##钮 -##钯 -##钰 -##钱 -##钳 -##钴 -##钵 -##钺 -##钻 -##钼 -##钾 -##钿 -##铀 -##铁 -##铂 -##铃 -##铄 -##铅 -##铆 -##铉 -##铎 -##铐 -##铛 -##铜 -##铝 -##铠 -##铡 -##铢 -##铣 -##铤 -##铨 -##铩 -##铬 -##铭 -##铮 -##铰 -##铲 -##铵 -##银 -##铸 -##铺 -##链 -##铿 -##销 -##锁 -##锂 -##锄 -##锅 -##锆 -##锈 -##锉 -##锋 -##锌 -##锏 -##锐 -##锑 -##错 -##锚 -##锟 -##锡 -##锢 -##锣 -##锤 -##锥 -##锦 -##锭 -##键 -##锯 -##锰 -##锲 -##锵 -##锹 -##锺 -##锻 -##镀 -##镁 -##镂 -##镇 -##镉 -##镌 -##镍 -##镐 -##镑 -##镕 -##镖 -##镗 -##镛 -##镜 -##镣 -##镭 -##镯 -##镰 -##镳 -##镶 -##長 -##长 -##門 -##閃 -##閉 -##開 -##閎 -##閏 -##閑 -##閒 -##間 -##閔 -##閘 -##閡 -##関 -##閣 -##閥 -##閨 -##閩 -##閱 -##閲 -##閹 -##閻 -##閾 -##闆 -##闇 -##闊 -##闌 -##闍 -##闔 -##闕 -##闖 -##闘 -##關 -##闡 -##闢 -##门 -##闪 -##闫 -##闭 -##问 -##闯 -##闰 -##闲 -##间 -##闵 -##闷 -##闸 -##闹 -##闺 -##闻 -##闽 -##闾 -##阀 -##阁 -##阂 -##阅 -##阆 -##阇 -##阈 -##阉 -##阎 -##阐 -##阑 -##阔 -##阕 -##阖 -##阙 -##阚 -##阜 -##队 -##阡 -##阪 -##阮 -##阱 -##防 -##阳 -##阴 -##阵 -##阶 -##阻 -##阿 -##陀 -##陂 -##附 -##际 -##陆 -##陇 -##陈 -##陋 -##陌 -##降 -##限 -##陕 -##陛 -##陝 -##陞 -##陟 -##陡 -##院 -##陣 -##除 -##陨 -##险 -##陪 -##陰 -##陲 -##陳 -##陵 -##陶 -##陷 -##陸 -##険 -##陽 -##隅 -##隆 -##隈 -##隊 -##隋 -##隍 -##階 -##随 -##隐 -##隔 -##隕 -##隘 -##隙 -##際 -##障 -##隠 -##隣 -##隧 -##隨 -##險 -##隱 -##隴 -##隶 -##隸 -##隻 -##隼 -##隽 -##难 -##雀 -##雁 -##雄 -##雅 -##集 -##雇 -##雉 -##雋 -##雌 -##雍 -##雎 -##雏 -##雑 -##雒 -##雕 -##雖 -##雙 -##雛 -##雜 -##雞 -##離 -##難 -##雨 -##雪 -##雯 -##雰 -##雲 -##雳 -##零 -##雷 -##雹 -##電 -##雾 -##需 -##霁 -##霄 -##霆 -##震 -##霈 -##霉 -##霊 -##霍 -##霎 -##霏 -##霑 -##霓 -##霖 -##霜 -##霞 -##霧 -##霭 -##霰 -##露 -##霸 -##霹 -##霽 -##霾 -##靂 -##靄 -##靈 -##青 -##靓 -##靖 -##静 -##靚 -##靛 -##靜 -##非 -##靠 -##靡 -##面 -##靥 -##靦 -##革 -##靳 -##靴 -##靶 -##靼 -##鞅 -##鞋 -##鞍 -##鞏 -##鞑 -##鞘 -##鞠 -##鞣 -##鞦 -##鞭 -##韆 -##韋 -##韌 -##韓 -##韜 -##韦 -##韧 -##韩 -##韬 -##韭 -##音 -##韵 -##韶 -##韻 -##響 -##頁 -##頂 -##頃 -##項 -##順 -##須 -##頌 -##預 -##頑 -##頒 -##頓 -##頗 -##領 -##頜 -##頡 -##頤 -##頫 -##頭 -##頰 -##頷 -##頸 -##頹 -##頻 -##頼 -##顆 -##題 -##額 -##顎 -##顏 -##顔 -##願 -##顛 -##類 -##顧 -##顫 -##顯 -##顱 -##顴 -##页 -##顶 -##顷 -##项 -##顺 -##须 -##顼 -##顽 -##顾 -##顿 -##颁 -##颂 -##预 -##颅 -##领 -##颇 -##颈 -##颉 -##颊 -##颌 -##颍 -##颐 -##频 -##颓 -##颔 -##颖 -##颗 -##题 -##颚 -##颛 -##颜 -##额 -##颞 -##颠 -##颡 -##颢 -##颤 -##颦 -##颧 -##風 -##颯 -##颱 -##颳 -##颶 -##颼 -##飄 -##飆 -##风 -##飒 -##飓 -##飕 -##飘 -##飙 -##飚 -##飛 -##飞 -##食 -##飢 -##飨 -##飩 -##飪 -##飯 -##飲 -##飼 -##飽 -##飾 -##餃 -##餅 -##餉 -##養 -##餌 -##餐 -##餒 -##餓 -##餘 -##餚 -##餛 -##餞 -##餡 -##館 -##餮 -##餵 -##餾 -##饅 -##饈 -##饋 -##饌 -##饍 -##饑 -##饒 -##饕 -##饗 -##饞 -##饥 -##饨 -##饪 -##饬 -##饭 -##饮 -##饯 -##饰 -##饱 -##饲 -##饴 -##饵 -##饶 -##饷 -##饺 -##饼 -##饽 -##饿 -##馀 -##馁 -##馄 -##馅 -##馆 -##馈 -##馋 -##馍 -##馏 -##馒 -##馔 -##首 -##馗 -##香 -##馥 -##馨 -##馬 -##馭 -##馮 -##馳 -##馴 -##駁 -##駄 -##駅 -##駆 -##駐 -##駒 -##駕 -##駛 -##駝 -##駭 -##駱 -##駿 -##騁 -##騎 -##騏 -##験 -##騙 -##騨 -##騰 -##騷 -##驀 -##驅 -##驊 -##驍 -##驒 -##驕 -##驗 -##驚 -##驛 -##驟 -##驢 -##驥 -##马 -##驭 -##驮 -##驯 -##驰 -##驱 -##驳 -##驴 -##驶 -##驷 -##驸 -##驹 -##驻 -##驼 -##驾 -##驿 -##骁 -##骂 -##骄 -##骅 -##骆 -##骇 -##骈 -##骊 -##骋 -##验 -##骏 -##骐 -##骑 -##骗 -##骚 -##骛 -##骜 -##骞 -##骠 -##骡 -##骤 -##骥 -##骧 -##骨 -##骯 -##骰 -##骶 -##骷 -##骸 -##骼 -##髂 -##髅 -##髋 -##髏 -##髒 -##髓 -##體 -##髖 -##高 -##髦 -##髪 -##髮 -##髯 -##髻 -##鬃 -##鬆 -##鬍 -##鬓 -##鬚 -##鬟 -##鬢 -##鬣 -##鬥 -##鬧 -##鬱 -##鬼 -##魁 -##魂 -##魄 -##魅 -##魇 -##魍 -##魏 -##魔 -##魘 -##魚 -##魯 -##魷 -##鮑 -##鮨 -##鮪 -##鮭 -##鮮 -##鯉 -##鯊 -##鯖 -##鯛 -##鯨 -##鯰 -##鯽 -##鰍 -##鰓 -##鰭 -##鰲 -##鰻 -##鰾 -##鱈 -##鱉 -##鱔 -##鱗 -##鱷 -##鱸 -##鱼 -##鱿 -##鲁 -##鲈 -##鲍 -##鲑 -##鲛 -##鲜 -##鲟 -##鲢 -##鲤 -##鲨 -##鲫 -##鲱 -##鲲 -##鲶 -##鲷 -##鲸 -##鳃 -##鳄 -##鳅 -##鳌 -##鳍 -##鳕 -##鳖 -##鳗 -##鳝 -##鳞 -##鳥 -##鳩 -##鳳 -##鳴 -##鳶 -##鴉 -##鴕 -##鴛 -##鴦 -##鴨 -##鴻 -##鴿 -##鵑 -##鵜 -##鵝 -##鵡 -##鵬 -##鵰 -##鵲 -##鶘 -##鶩 -##鶯 -##鶴 -##鷗 -##鷲 -##鷹 -##鷺 -##鸚 -##鸞 -##鸟 -##鸠 -##鸡 -##鸢 -##鸣 -##鸥 -##鸦 -##鸨 -##鸪 -##鸭 -##鸯 -##鸳 -##鸵 -##鸽 -##鸾 -##鸿 -##鹂 -##鹃 -##鹄 -##鹅 -##鹈 -##鹉 -##鹊 -##鹌 -##鹏 -##鹑 -##鹕 -##鹘 -##鹜 -##鹞 -##鹤 -##鹦 -##鹧 -##鹫 -##鹭 -##鹰 -##鹳 -##鹵 -##鹹 -##鹼 -##鹽 -##鹿 -##麂 -##麋 -##麒 -##麓 -##麗 -##麝 -##麟 -##麥 -##麦 -##麩 -##麴 -##麵 -##麸 -##麺 -##麻 -##麼 -##麽 -##麾 -##黃 -##黄 -##黍 -##黎 -##黏 -##黑 -##黒 -##黔 -##默 -##黛 -##黜 -##黝 -##點 -##黠 -##黨 -##黯 -##黴 -##鼋 -##鼎 -##鼐 -##鼓 -##鼠 -##鼬 -##鼹 -##鼻 -##鼾 -##齁 -##齊 -##齋 -##齐 -##齒 -##齡 -##齢 -##齣 -##齦 -##齿 -##龄 -##龅 -##龈 -##龊 -##龋 -##龌 -##龍 -##龐 -##龔 -##龕 -##龙 -##龚 -##龛 -##龜 -##龟 -##︰ -##︱ -##︶ -##︿ -##﹁ -##﹂ -##﹍ -##﹏ -##﹐ -##﹑ -##﹒ -##﹔ -##﹕ -##﹖ -##﹗ -##﹙ -##﹚ -##﹝ -##﹞ -##﹡ -##﹣ -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##, -##- -##. -##/ -##: -##; -##< -##? -##@ -##[ -##\ -##] -##^ -##_ -##` -##f -##h -##j -##u -##w -##z -##{ -##} -##。 -##「 -##」 -##、 -##・ -##ッ -##ー -##イ -##ク -##シ -##ス -##ト -##ノ -##フ -##ラ -##ル -##ン -##゙ -##゚ -## ̄ -##¥ -##👍 -##🔥 -##😂 -##😎 diff --git a/tutorials/tutorial_code/cache/cache.sh b/tutorials/tutorial_code/cache/cache.sh deleted file mode 100644 index ac7c3a1ddd99ad0f6f3fd51dd25020a7b5c4063f..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/cache/cache.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# This shell script will launch parallel pipelines - -# get path to dataset directory -if [ $# != 1 ] -then - echo "Usage: sh cache.sh DATASET_PATH" -exit 1 -fi -dataset_path=$1 - -# generate a session id that these parallel pipelines can share -result=$(cache_admin -g 2>&1) -rc=$? -if [ $rc -ne 0 ]; then - echo "some error" - exit 1 -fi - -# grab the session id from the result string -session_id=$(echo $result | awk '{print $NF}') - -# make the session_id available to the python scripts -num_devices=4 - -for p in $(seq 0 $((${num_devices}-1))); do - python my_training_script.py --num_devices "$num_devices" --device "$p" --session_id $session_id --dataset_path $dataset_path -done diff --git a/tutorials/tutorial_code/cache/my_training_script.py b/tutorials/tutorial_code/cache/my_training_script.py deleted file mode 100644 index 2520ec6175604d046dcdef78aa0ce8e31e64b47c..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/cache/my_training_script.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -Training script -""" -import argparse -import mindspore.dataset as ds - -parser = argparse.ArgumentParser(description='Cache Example') -parser.add_argument('--num_devices', type=int, default=1, help='Device num.') -parser.add_argument('--device', type=int, default=0, help='Device id.') -parser.add_argument('--session_id', type=int, default=1, help='Session id.') -parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path') -args_opt = parser.parse_args() - -# apply cache to dataset -test_cache = ds.DatasetCache(session_id=args_opt.session_id, size=0, spilling=False) -dataset = ds.Cifar10Dataset(dataset_dir=args_opt.dataset_path, num_samples=4, shuffle=False, num_parallel_workers=1, - num_shards=args_opt.num_devices, shard_id=args_opt.device, cache=test_cache) -num_iter = 0 -for _ in dataset.create_dict_iterator(): - num_iter += 1 -print("Got {} samples on device {}".format(num_iter, args_opt.device)) diff --git a/tutorials/tutorial_code/custom_debugging_info/custom_callback.py b/tutorials/tutorial_code/custom_debugging_info/custom_callback.py deleted file mode 100644 index a2f5b04c2b8729077366aa4a5ea85b18cb4fe81b..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/custom_debugging_info/custom_callback.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -""" -callback function -""" -import time -from mindspore.train.callback import Callback -from mindspore import save_checkpoint - -# stop training at runtime*60 second -class StopAtTime(Callback): - """ - Args: - run_time (float): set training time - - Example: - >>> StopAtTime(1) - """ - def __init__(self, run_time): - super(StopAtTime, self).__init__() - self.run_time = run_time*60 - - def begin(self, run_context): - cb_params = run_context.original_args() - cb_params.init_time = time.time() - - def step_end(self, run_context): - cb_params = run_context.original_args() - epoch_num = cb_params.cur_epoch_num - step_num = cb_params.cur_step_num - loss = cb_params.net_outputs - cur_time = time.time() - if (cur_time - cb_params.init_time) > self.run_time: - print("epoch: ", epoch_num, " step: ", step_num, " loss: ", loss) - run_context.request_stop() - - def end(self, run_context): - cb_params = run_context.original_args() - print(cb_params.list_callback) - -class SaveCallback(Callback): - """ - save the maximum accuracy checkpoint - """ - def __init__(self, model, eval_dataset): - super(SaveCallback, self).__init__() - self.model = model - self.eval_dataset = eval_dataset - self.acc = 0.5 - - def step_end(self, run_context): - cb_params = run_context.original_args() - result = self.model.eval(self.eval_dataset) - if result['accuracy'] > self.acc: - self.acc = result['accuracy'] - file_name = str(self.acc) + ".ckpt" - save_checkpoint(save_obj=cb_params.train_network, ckpt_file_name=file_name) - print("Save the maximum accuracy checkpoint,the accuracy is", self.acc) diff --git a/tutorials/tutorial_code/custom_debugging_info/custom_debugging_info.py b/tutorials/tutorial_code/custom_debugging_info/custom_debugging_info.py deleted file mode 100644 index e9b017ec56f5e9fef568f1e9842379cfb9ce8c3c..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/custom_debugging_info/custom_debugging_info.py +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ - -"""Custom Debugging Info Tutorial -This sample code is applicable to CPU, GPU and Ascend. -""" -import os -import json -from mindspore import log as logger -from mindspore import context, Model -import mindspore.nn as nn -from mindspore.nn import SoftmaxCrossEntropyWithLogits -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor -from src.lenet import LeNet5 -from src.datasets import create_dataset -from custom_callback import StopAtTime - - -def set_dump_info(): - """ - set the dump parameter and write it in the JSON file of this directory - """ - abspath = os.getcwd() - data_dump = { - "common_dump_settings": { - "dump_mode": 0, - "path": abspath + "/data_dump", - "net_name": "LeNet5", - "iteration": 0, - "input_output": 2, - "kernels": ["Default/network-WithLossCell/_backbone-LeNet5/flatten-Flatten/Reshape-op118"], - "support_device": [0, 1, 2, 3, 4, 5, 6, 7] - }, - "e2e_dump_settings": { - "enable": True, - "trans_flag": False - } - } - with open("./data_dump.json", "w", encoding="GBK") as f: - json.dump(data_dump, f) - os.environ['MINDSPORE_DUMP_CONFIG'] = abspath + "/data_dump.json" - -def set_log_info(): - os.environ['GLOG_v'] = '1' - os.environ['GLOG_logtostderr'] = '1' - os.environ['logger_maxBytes'] = '5242880' - os.environ['GLOG_log_dir'] = 'D:/' if os.name == "nt" else '/var/log/mindspore' - os.environ['logger_backupCount'] = '10' - print(logger.get_log_config()) - -if __name__ == "__main__": - # clean files - if os.name == "nt": - os.system('del/f/s/q *.ckpt *.meta') - else: - os.system('rm -f *.ckpt *.meta *.pb') - - set_dump_info() - set_log_info() - - context.set_context(mode=context.GRAPH_MODE, device_target="CPU") - lr = 0.01 - momentum = 0.9 - epoch_size = 3 - train_data_path = "./datasets/MNIST_Data/train" - eval_data_path = "./datasets/MNIST_Data/test" - model_path = "./models/ckpt/custom_debugging_info/" - - net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - repeat_size = 1 - network = LeNet5() - - metrics = { - 'accuracy': nn.Accuracy(), - 'loss': nn.Loss(), - 'precision': nn.Precision(), - 'recall': nn.Recall(), - 'f1_score': nn.F1() - } - net_opt = nn.Momentum(network.trainable_params(), lr, momentum) - config_ck = CheckpointConfig(save_checkpoint_steps=1875, keep_checkpoint_max=10) - ckpoint_cb = ModelCheckpoint(prefix="checkpoint_lenet", directory=model_path, config=config_ck) - - model = Model(network, net_loss, net_opt, metrics=metrics) - - print("============== Starting Training ==============") - ds_train = create_dataset(train_data_path, repeat_size=repeat_size) - stop_cb = StopAtTime(run_time=0.6) - model.train(epoch_size, ds_train, callbacks=[ckpoint_cb, LossMonitor(375), stop_cb], dataset_sink_mode=False) - - print("============== Starting Testing ==============") - ds_eval = create_dataset(eval_data_path, repeat_size=repeat_size) - acc = model.eval(ds_eval, dataset_sink_mode=False) - print("============== Accuracy:{} ==============".format(acc)) diff --git a/tutorials/tutorial_code/custom_debugging_info/src/__init__.py b/tutorials/tutorial_code/custom_debugging_info/src/__init__.py deleted file mode 100644 index 919b0579077e1ca4dc63cedb7ae3a1a3e0134283..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/custom_debugging_info/src/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ diff --git a/tutorials/tutorial_code/custom_debugging_info/src/datasets.py b/tutorials/tutorial_code/custom_debugging_info/src/datasets.py deleted file mode 100644 index 9f77df9573f4f799a8b3dbd8aacabf5ce2b07234..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/custom_debugging_info/src/datasets.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ - -""" -Processing datasets -""" -import mindspore.dataset.vision.c_transforms as CV -from mindspore.dataset.vision import Inter -import mindspore.dataset as ds -import mindspore.dataset.transforms.c_transforms as CT -from mindspore import dtype as mstype - -def create_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1): - """ - create dataset for train or test - """ - # define dataset - mnist_ds = ds.MnistDataset(data_path) - - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - rescale_nml = 1 / 0.3081 - shift_nml = -1 * 0.1307 / 0.3081 - - # define map operations - resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Bilinear mode - rescale_nml_op = CV.Rescale(rescale_nml, shift_nml) - rescale_op = CV.Rescale(rescale, shift) - hwc2chw_op = CV.HWC2CHW() - type_cast_op = CT.TypeCast(mstype.int32) - - # apply map operations on images - mnist_ds = mnist_ds.map(input_columns="label", operations=type_cast_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=resize_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_nml_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=hwc2chw_op, num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script - mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True) - mnist_ds = mnist_ds.repeat(repeat_size) - - return mnist_ds diff --git a/tutorials/tutorial_code/custom_debugging_info/src/lenet.py b/tutorials/tutorial_code/custom_debugging_info/src/lenet.py deleted file mode 100644 index 52cc0a3a17d6d06399d1ba38d54de28dc74c261f..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/custom_debugging_info/src/lenet.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ - -""" -LeNet5 Network -""" -import mindspore.nn as nn -from mindspore.common.initializer import Normal - -class LeNet5(nn.Cell): - """ - Lenet network - - Args: - num_class (int): the number of classes. Default: 10. - num_channel (int): the number of channels. Default: 1. - - Returns: - Tensor, output tensor - Examples: - >>> LeNet(num_class=10) - - """ - - def __init__(self, num_class=10, num_channel=1): - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02)) - self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02)) - self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02)) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x diff --git a/tutorials/tutorial_code/debug_in_pynative_mode/01_single_op.py b/tutorials/tutorial_code/debug_in_pynative_mode/01_single_op.py deleted file mode 100644 index e2c505be60733ed5d5be53023edf33a7dde62931..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/debug_in_pynative_mode/01_single_op.py +++ /dev/null @@ -1,13 +0,0 @@ -"""single op tutorial -This sample code is applicable to Ascend. -""" -import numpy as np -import mindspore.nn as nn -from mindspore import context, Tensor - -context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") - -conv = nn.Conv2d(3, 4, 3, bias_init='zeros') -input_data = Tensor(np.ones([1, 3, 5, 5]).astype(np.float32)) -output = conv(input_data) -print(output.asnumpy()) diff --git a/tutorials/tutorial_code/debug_in_pynative_mode/02_single_function.py b/tutorials/tutorial_code/debug_in_pynative_mode/02_single_function.py deleted file mode 100644 index a5ae0cd11ad5e671b4e0dbb6786bad080808717d..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/debug_in_pynative_mode/02_single_function.py +++ /dev/null @@ -1,18 +0,0 @@ -"""single function -This sample code is applicable to Ascend. -""" -import numpy as np -from mindspore import context, Tensor -import mindspore.ops as ops - -context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") - -def tensor_add_func(x, y): - z = ops.tensor_add(x, y) - z = ops.tensor_add(z, x) - return z - -input_x = Tensor(np.ones([3, 3], dtype=np.float32)) -input_y = Tensor(np.ones([3, 3], dtype=np.float32)) -output = tensor_add_func(input_x, input_y) -print(output.asnumpy()) diff --git a/tutorials/tutorial_code/debug_in_pynative_mode/03_staging.py b/tutorials/tutorial_code/debug_in_pynative_mode/03_staging.py deleted file mode 100644 index 134c30b0b18c6bab11a898929f0fc0e105370e09..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/debug_in_pynative_mode/03_staging.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Improving performance in PyNative mode: Method 1 -This sample code is applicable to Ascend. -""" -import numpy as np -import mindspore.nn as nn -from mindspore import context, Tensor -import mindspore.ops as ops -from mindspore import ms_function - -context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") - -class TensorAddNet(nn.Cell): - def __init__(self): - super(TensorAddNet, self).__init__() - self.add = ops.Add() - - @ms_function - def construct(self, x, y): - res = self.add(x, y) - return res - -input_x = Tensor(np.ones([4, 4]).astype(np.float32)) -input_y = Tensor(np.ones([4, 4]).astype(np.float32)) -net = TensorAddNet() - -z = net(input_x, input_y) # Staging mode -tensor_add = ops.Add() -result = tensor_add(input_x, z) # PyNative mode -print(result.asnumpy()) diff --git a/tutorials/tutorial_code/debug_in_pynative_mode/04_staging_2.py b/tutorials/tutorial_code/debug_in_pynative_mode/04_staging_2.py deleted file mode 100644 index 40fa045e0fae9d4a254114ed930aad0390fa151f..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/debug_in_pynative_mode/04_staging_2.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Improving performance in PyNative mode: Method 2 -This sample code is applicable to Ascend. -""" -import numpy as np -from mindspore import context, Tensor -import mindspore.ops as ops -from mindspore import ms_function - -context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") - -tensor_add = ops.Add() - -@ms_function -def tensor_add_fn(x, y): - res = tensor_add(x, y) - return res - -input_x = Tensor(np.ones([4, 4]).astype(np.float32)) -input_y = Tensor(np.ones([4, 4]).astype(np.float32)) -z = tensor_add_fn(input_x, input_y) -print(z.asnumpy()) diff --git a/tutorials/tutorial_code/debug_in_pynative_mode/05_staging_3.py b/tutorials/tutorial_code/debug_in_pynative_mode/05_staging_3.py deleted file mode 100644 index fbaacda158a410457955935e2976a04c95747464..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/debug_in_pynative_mode/05_staging_3.py +++ /dev/null @@ -1,20 +0,0 @@ -"""Improving performance in PyNative mode: Method 3 -This sample code is applicable to Ascend. -""" -import numpy as np -import mindspore.nn as nn -from mindspore import context, Tensor -from mindspore import ms_function - -context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") - -conv_obj = nn.Conv2d(in_channels=3, out_channels=4, kernel_size=3, stride=2, padding=0) -conv_obj.init_parameters_data() -@ms_function -def conv_fn(x): - res = conv_obj(x) - return res - -input_data = np.random.randn(2, 3, 6, 6).astype(np.float32) -z = conv_fn(Tensor(input_data)) -print(z.asnumpy()) diff --git a/tutorials/tutorial_code/debug_in_pynative_mode/06_grad.py b/tutorials/tutorial_code/debug_in_pynative_mode/06_grad.py deleted file mode 100644 index 164f9f977d037397efc7af8711e0451696f2d6e0..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/debug_in_pynative_mode/06_grad.py +++ /dev/null @@ -1,17 +0,0 @@ -"""grad tutorial -This sample code is applicable to Ascend. -""" -import mindspore.ops as ops -import mindspore.context as context -from mindspore import dtype as mstype -from mindspore import Tensor - -context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") - -def mul(x, y): - return x * y - -def mainf(x, y): - return ops.GradOperation(get_all=True)(mul)(x, y) - -print(mainf(Tensor(1, mstype.int32), Tensor(2, mstype.int32))) diff --git a/tutorials/tutorial_code/debug_in_pynative_mode/07_lenet.py b/tutorials/tutorial_code/debug_in_pynative_mode/07_lenet.py deleted file mode 100644 index e04ddae1b39778867280ff03211f2a5374213ba0..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/debug_in_pynative_mode/07_lenet.py +++ /dev/null @@ -1,98 +0,0 @@ -"""lenet tutorial -This sample code is applicable to Ascend. -""" -import numpy as np -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import context, Tensor, ParameterTuple -from mindspore.common.initializer import TruncatedNormal -from mindspore.nn import WithLossCell, Momentum - -context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") - -def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): - """weight initial for conv layer""" - weight = weight_variable() - return nn.Conv2d(in_channels, out_channels, - kernel_size=kernel_size, stride=stride, padding=padding, - weight_init=weight, has_bias=False, pad_mode="valid") - -def fc_with_initialize(input_channels, out_channels): - """weight initial for fc layer""" - weight = weight_variable() - bias = weight_variable() - return nn.Dense(input_channels, out_channels, weight, bias) - -def weight_variable(): - """weight initial""" - return TruncatedNormal(0.02) - - -class LeNet5(nn.Cell): - """ - Lenet network - Args: - num_class (int): Num classes. Default: 10. - - Returns: - Tensor, output tensor - - Examples: - >>> LeNet(num_class=10) - """ - def __init__(self, num_class=10): - super(LeNet5, self).__init__() - self.num_class = num_class - self.batch_size = 32 - self.conv1 = conv(1, 6, 5) - self.conv2 = conv(6, 16, 5) - self.fc1 = fc_with_initialize(16 * 5 * 5, 120) - self.fc2 = fc_with_initialize(120, 84) - self.fc3 = fc_with_initialize(84, self.num_class) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.reshape = ops.Reshape() - - def construct(self, x): - """ construct LeNet5 """ - x = self.conv1(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.conv2(x) - x = self.relu(x) - x = self.max_pool2d(x) - x = self.reshape(x, (self.batch_size, -1)) - x = self.fc1(x) - x = self.relu(x) - x = self.fc2(x) - x = self.relu(x) - x = self.fc3(x) - return x - - -class GradWrap(nn.Cell): - """ GradWrap definition """ - def __init__(self, network): - super(GradWrap, self).__init__(auto_prefix=False) - self.network = network - self.weights = ParameterTuple(filter(lambda x: x.requires_grad, network.get_parameters())) - - def construct(self, x, label): - weights = self.weights - return ops.GradOperation(get_by_list=True)(self.network, weights)(x, label) - -net = LeNet5() -optimizer = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.1, 0.9) -criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') -net_with_criterion = WithLossCell(net, criterion) -train_network = GradWrap(net_with_criterion) -train_network.set_train() - -input_data = Tensor(np.ones([net.batch_size, 1, 32, 32]).astype(np.float32) * 0.01) -input_label = Tensor(np.ones([net.batch_size]).astype(np.int32)) -output = net(Tensor(input_data)) -loss_output = criterion(output, input_label) -grads = train_network(input_data, input_label) -success = optimizer(grads) -loss = loss_output.asnumpy() -print(loss) diff --git a/tutorials/tutorial_code/debug_in_pynative_mode/README.md b/tutorials/tutorial_code/debug_in_pynative_mode/README.md deleted file mode 100644 index 4d5736d04d997527923a6e749d3211d9d8bfc316..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/debug_in_pynative_mode/README.md +++ /dev/null @@ -1,17 +0,0 @@ -# 文件说明 - -> 测试文件运行环境为: mindspore-ascend 1.0.1。 - -- `01_single_op.py`:执行单个算子,并打印相关结果,示例输出`([[[[-0.02190447 -0.05208071 -0.0……5208071 -0.05208071 -0.06265172] ... [ 0.05016355 0.03958241 0.03958241 0.03958241 0.03443141]]]])`。 - -- `02_single_function.py`:若干算子组合成一个函数,并打印相关结果,示例输出`([[3. 3. 3.] [3. 3. 3.] [3. 3. 3.]])`。 - -- `03_staging.py`:MindSpore提供Staging功能,该功能可以在PyNative模式下将Python函数或者Python类的方法编译成计算图,通过图优化等技术提高运行速度,示例输出`([[3. 3. 3. 3.] [3. 3. 3. 3.] [3. 3. 3. 3.] [3. 3. 3. 3.]])`。 - -- `04_staging_2.py`:加装了`ms_function`装饰器的函数中,如果包含不需要进行参数训练的算子(如`pooling`、`tensor_add`等算子),则这些算子可以在被装饰的函数中直接调用,示例输出`[[2. 2. 2. 2.] [2. 2. 2. 2.] [2. 2. 2. 2.] [2. 2. 2. 2.]]`。 - -- `05_staging_3.py`:被装饰的函数中包含了需要进行参数训练的算子(如`Convolution`、`BatchNorm`等算子),则这些算子必须在被装饰等函数之外完成实例化操作,示例输出`[[[[ 0.10377571 -0.0182163 -0.05221086] ... [ 0.0377498 -0.06117418 0.00546303]]]]`。 - -- `06_grad.py`:PyNative模式下,还可以支持单独求梯度的操作,示例输出`(Tensor(shape=[], dtype=Int32, value=2), Tensor(shape=[], dtype=Int32, value=1))`。 - -- `07_lenet.py`:LeNet示例输出:2.3050091。 diff --git a/tutorials/tutorial_code/debugging_info/custom_callback.py b/tutorials/tutorial_code/debugging_info/custom_callback.py deleted file mode 100644 index 9c1d997b3e924e41d8511dee668b7c9e677bc9c0..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/debugging_info/custom_callback.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""custom callback -This sample code is applicable to Ascend, CPU and GPU. -""" -import time -import mindspore.nn as nn -from mindspore.nn import Momentum, SoftmaxCrossEntropyWithLogits -from mindspore import Model, context, save_checkpoint -from mindspore.train.callback import Callback, LossMonitor - -from src.dataset import create_train_dataset, create_eval_dataset -from src.net import Net - - -class StopAtTime(Callback): - """StopAtTime""" - def __init__(self, run_time): - """init""" - super(StopAtTime, self).__init__() - self.run_time = run_time*60 - - def begin(self, run_context): - """begin""" - cb_params = run_context.original_args() - cb_params.init_time = time.time() - - def step_end(self, run_context): - """step end""" - cb_params = run_context.original_args() - epoch_num = cb_params.cur_epoch_num - step_num = cb_params.cur_step_num - loss = cb_params.net_outputs - cur_time = time.time() - if (cur_time - cb_params.init_time) > self.run_time: - print(f"Stop after {self.run_time}s.") - print(f"epoch: {epoch_num}, step: {step_num}, loss is {loss}") - run_context.request_stop() - - -class SaveCallback(Callback): - """SaveCallback""" - def __init__(self, eval_model, ds_eval): - """init""" - super(SaveCallback, self).__init__() - self.model = eval_model - self.ds_eval = ds_eval - self.acc = 0 - - def step_end(self, run_context): - """step end""" - cb_params = run_context.original_args() - result = self.model.eval(self.ds_eval) - if result['Accuracy'] > self.acc: - self.acc = result['Accuracy'] - file_name = str(self.acc) + ".ckpt" - save_checkpoint(save_obj=cb_params.train_network, ckpt_file_name=file_name) - print("Save the maximum accuracy checkpoint, the accuracy is", self.acc) - -def set_dump_info(): - """ - set the dump parameter and write it in the JSON file of this directory - """ - abspath = os.getcwd() - data_dump = { - "common_dump_settings": { - "dump_mode": 0, - "path": abspath + "/data_dump", - "net_name": "LeNet5", - "iteration": 0, - "input_output": 2, - "kernels": ["Default/network-WithLossCell/_backbone-LeNet5/flatten-Flatten/Reshape-op118"], - "support_device": [0, 1, 2, 3, 4, 5, 6, 7] - }, - "e2e_dump_settings": { - "enable": True, - "trans_flag": False - } - } - with open("./data_dump.json", "w", encoding="GBK") as f: - json.dump(data_dump, f) - os.environ['MINDSPORE_DUMP_CONFIG'] = abspath + "/data_dump.json" - -def set_log_info(): - os.environ['GLOG_v'] = '1' - os.environ['GLOG_logtostderr'] = '1' - os.environ['logger_maxBytes'] = '5242880' - os.environ['GLOG_log_dir'] = 'D:/' if os.name == "nt" else '/var/log/mindspore' - os.environ['logger_backupCount'] = '10' - print(logger.get_log_config()) - -if __name__ == "__main__": - set_dump_info() - set_log_info() - context.set_context(mode=context.GRAPH_MODE) - train_dataset = create_train_dataset() - eval_dataset = create_eval_dataset() - net = Net() - net_opt = Momentum(net.trainable_params(), 0.01, 0.9) - net_loss = SoftmaxCrossEntropyWithLogits(reduction='mean') - model = Model(network=net, loss_fn=net_loss, optimizer=net_opt, metrics={'Accuracy': nn.Accuracy()}) - model.train(epoch=100, - train_dataset=train_dataset, - callbacks=[LossMonitor(), StopAtTime(3), SaveCallback(model, eval_dataset)]) diff --git a/tutorials/tutorial_code/debugging_info/custom_metrics.py b/tutorials/tutorial_code/debugging_info/custom_metrics.py deleted file mode 100644 index 9eb63b14646552fa7a1ed11c841ee0f0bb3cd06d..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/debugging_info/custom_metrics.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""use metrics -This sample code is applicable to Ascend, CPU and GPU. -""" -import mindspore.nn as nn -from mindspore.nn import Momentum, SoftmaxCrossEntropyWithLogits -from mindspore import Model, context -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor - -from src.dataset import create_train_dataset, create_eval_dataset -from src.net import Net - - -if __name__ == "__main__": - context.set_context(mode=context.GRAPH_MODE) - ds_train = create_train_dataset() - ds_eval = create_eval_dataset() - net = Net() - net_opt = Momentum(net.trainable_params(), 0.01, 0.9) - net_loss = SoftmaxCrossEntropyWithLogits(reduction='mean') - metrics = { - 'Accuracy': nn.Accuracy(), - 'Loss': nn.Loss(), - 'Precision': nn.Precision(), - 'Recall': nn.Recall(), - 'F1_score': nn.F1() - } - config_ck = CheckpointConfig(save_checkpoint_steps=1000, keep_checkpoint_max=10) - ckpoint = ModelCheckpoint(prefix="CKPT", config=config_ck) - model = Model(network=net, loss_fn=net_loss, optimizer=net_opt, metrics=metrics) - model.train(epoch=2, train_dataset=ds_train, callbacks=[ckpoint, LossMonitor()]) - result = model.eval(ds_eval) - print(result) diff --git a/tutorials/tutorial_code/debugging_info/src/__init__.py b/tutorials/tutorial_code/debugging_info/src/__init__.py deleted file mode 100644 index 6228b7132697d24157a4052193061e9913f031c4..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/debugging_info/src/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ diff --git a/tutorials/tutorial_code/debugging_info/src/dataset.py b/tutorials/tutorial_code/debugging_info/src/dataset.py deleted file mode 100644 index eac47f1b9f5942481b26986265340e5a3608e5f3..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/debugging_info/src/dataset.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""dataset -Custom dataset. -""" -import numpy as np -from mindspore import Tensor - - -def create_train_dataset(image_size=(1, 32, 32), num_classes=10): - """train dataset.""" - ds = CustomDataSet(image_size=image_size, num_classes=num_classes) - return ds - - -def create_eval_dataset(image_size=(1, 32, 32), num_classes=10): - """eval dataset""" - ds = CustomDataSet(size=2048, batch_size=2048, image_size=image_size, num_classes=num_classes) - return ds - - -class CustomDataSet: - """CustomDataset""" - def __init__(self, size=32768, batch_size=32, image_size=(1, 32, 32), num_classes=10, is_onehot=True): - """init""" - self.size = size - self.batch_size = batch_size - self.image_size = image_size - self.num_classes = num_classes - self.batch_index = 0 - self.is_onehot = is_onehot - self.repeat_count = 1 - self.batch_data_size = (self.batch_size,) + image_size - - def get_dataset_size(self): - """get dataset size""" - return int(self.size / self.batch_size) - - def get_repeat_count(self): - """get repeat count""" - return self.repeat_count - - def create_tuple_iterator(self, num_epochs=-1, do_copy=False): - """create tuple iterator""" - self.num_epochs = num_epochs - self.do_copy = do_copy - return self - - def __getitem__(self, batch_index): - """get item""" - if batch_index * self.batch_size >= len(self): - raise IndexError("{} index out of range".format(self.__class__.__name__)) - rng_state = np.random.get_state() - np.random.seed(batch_index) - img = np.random.randn(*self.batch_data_size) - target = np.random.randint(0, self.num_classes, size=(1, self.batch_size)) - np.random.set_state(rng_state) - img_ret = img.astype(np.float32) - target_ret = target.astype(np.float32) - if self.is_onehot: - target_onehot = np.zeros(shape=(self.batch_size, self.num_classes)) - target_onehot[np.arange(self.batch_size), target] = 1 - target_ret = target_onehot.astype(np.float32) - return Tensor(img_ret), Tensor(target_ret) - - def __len__(self): - """get size""" - return self.size - - def __iter__(self): - """iter dataset""" - self.batch_index = 0 - return self - - def reset(self): - """reset dataset""" - self.batch_index = 0 - - def __next__(self): - """get next batch""" - if self.batch_index * self.batch_size < len(self): - data = self[self.batch_index] - self.batch_index += 1 - return data - raise StopIteration diff --git a/tutorials/tutorial_code/debugging_info/src/net.py b/tutorials/tutorial_code/debugging_info/src/net.py deleted file mode 100644 index 755eb74fca9941f6e85bbd030a4be371182e8018..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/debugging_info/src/net.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""net -Custom net. -""" -import mindspore.nn as nn - - -class Net(nn.Cell): - """Net""" - def __init__(self, num_class=10, num_channel=1): - """init""" - super(Net, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init='ones') - self.fc2 = nn.Dense(120, 84, weight_init='ones') - self.fc3 = nn.Dense(84, num_class, weight_init='ones') - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - """construct""" - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x diff --git a/tutorials/tutorial_code/distributed_inference/dataset.py b/tutorials/tutorial_code/distributed_inference/dataset.py deleted file mode 100644 index 84635f1b70d5cbbed6de1857492993cb62eb363b..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_inference/dataset.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -'''dataset -The sample can be run on Ascend 910 AI processor. -''' -import numpy as np -from mindspore import Tensor -from mindspore.communication.management import init, get_rank, get_group_size - - -class FakeData: - """custom dataset""" - def __init__(self, size=256, batch_size=16, image_size=(96,), num_classes=16, random_offset=0): - """init""" - self.size = size - self.rank_batch_size = batch_size - self.total_batch_size = self.rank_batch_size - self.random_offset = random_offset - self.image_size = image_size - self.num_classes = num_classes - self.num_epochs = -1 - self.rank_size = 1 - self.rank_id = 0 - self.batch_index = 0 - self.image_data_type = np.float32 - self.label_data_type = np.float32 - self.is_onehot = True - init(backend_name='hccl') - self.rank_size = get_group_size() - self.rank_id = get_rank() - self.total_batch_size = self.rank_batch_size * self.rank_size - self.total_batch_data_size = (self.rank_size, self.rank_batch_size) + image_size - self.do_copy = False - - def get_dataset_size(self): - """get dataset size""" - return int(self.size / self.total_batch_size) - - def get_repeat_count(self): - """get repeat count""" - return 1 - - def create_tuple_iterator(self, num_epochs=-1, do_copy=False): - """create tuple iterator""" - self.num_epochs = num_epochs - self.do_copy = do_copy - return self - - def __getitem__(self, batch_index): - """get item""" - if batch_index * self.total_batch_size >= len(self): - raise IndexError("{} index out of range".format(self.__class__.__name__)) - rng_state = np.random.get_state() - np.random.seed(batch_index + self.random_offset) - img = np.random.randn(*self.total_batch_data_size) - target = np.random.randint(0, self.num_classes, size=(self.rank_size, self.rank_batch_size)) - np.random.set_state(rng_state) - img = img[self.rank_id] - target = target[self.rank_id] - img_ret = img.astype(self.image_data_type) - target_onehot = np.zeros(shape=(self.rank_batch_size, self.num_classes)) - target_onehot[np.arange(self.rank_batch_size), target] = 1 - target_ret = target_onehot.astype(self.label_data_type) - return Tensor(img_ret), Tensor(target_ret) - - def __len__(self): - """get size""" - return self.size - - def __iter__(self): - """iter""" - self.batch_index = 0 - return self - - def reset(self): - """reset index""" - self.batch_index = 0 - - def __next__(self): - """next""" - if self.batch_index * self.total_batch_size < len(self): - data = self[self.batch_index] - self.batch_index += 1 - return data - raise StopIteration diff --git a/tutorials/tutorial_code/distributed_inference/distributed_inference.py b/tutorials/tutorial_code/distributed_inference/distributed_inference.py deleted file mode 100644 index ebe475245e51587cc8f157f9ed10e8a1807a5dae..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_inference/distributed_inference.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""distributed inference -This sample code is applicable to Ascend. -""" -import numpy as np -from net import Net -from mindspore import context, Model, Tensor, load_distributed_checkpoint -from mindspore.communication import init - - -def test_inference(): - """distributed inference after distributed training""" - context.set_context(mode=context.GRAPH_MODE) - init(backend_name="hccl") - context.set_auto_parallel_context(full_batch=True, parallel_mode="semi_auto_parallel", - strategy_ckpt_load_file="./train_strategy.ckpt", device_num=8) - - predict_data = create_predict_data() - network = Net(matmul_size=(96, 16)) - model = Model(network) - predict_layout = model.infer_predict_layout(Tensor(predict_data)) - ckpt_file_list = create_ckpt_file_list() - load_distributed_checkpoint(network, ckpt_file_list, predict_layout) - predict_result = model.predict(predict_data) - print(predict_result) - - -def create_predict_data(): - """user-defined predict data""" - inputs_np = np.random.randn(128, 96).astype(np.float32) - return Tensor(inputs_np) - - -def create_ckpt_file_list(): - """user-defined ckpt file list""" - ckpt_file_list = [] - for i in range(8): - path = "../device" + str(i) + "/" + "rank_" + str(i) + "_ckpt/" + "parallel-2_2.ckpt" - ckpt_file_list.append(path) - return ckpt_file_list diff --git a/tutorials/tutorial_code/distributed_inference/distributed_training.py b/tutorials/tutorial_code/distributed_inference/distributed_training.py deleted file mode 100644 index 074eb49f56bd95c123ad49a81d006a3588818397..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_inference/distributed_training.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""distributed training -This sample code is applicable to Ascend. -""" -import os -from dataset import FakeData -from net import Net -from mindspore import context, Model -from mindspore.train.callback import CheckpointConfig, ModelCheckpoint -from mindspore.context import ParallelMode -from mindspore.nn import Momentum, SoftmaxCrossEntropyWithLogits - - -def test_train(): - """distributed training""" - context.set_context(mode=context.GRAPH_MODE) - parallel_dataset = FakeData() - strategy = ((2, 1), (1, 4)) - context.set_auto_parallel_context(parallel_mode=ParallelMode.SEMI_AUTO_PARALLEL, - device_num=8, - strategy_ckpt_save_file="./train_strategy.ckpt") - network = Net(matmul_size=(96, 16), strategy=strategy) - net_opt = Momentum(network.trainable_params(), 0.01, 0.9) - net_loss = SoftmaxCrossEntropyWithLogits(reduction='mean') - model = Model(network=network, loss_fn=net_loss, optimizer=net_opt) - ckpt_config = CheckpointConfig(keep_checkpoint_max=1, integrated_save=False) - global_rank_id = int(os.getenv("RANK_ID")) - ckpt_path = './rank_{}_ckpt'.format(global_rank_id) - ckpt_callback = ModelCheckpoint(prefix='parallel', directory=ckpt_path, config=ckpt_config) - model.train(epoch=2, train_dataset=parallel_dataset, callbacks=[ckpt_callback], dataset_sink_mode=False) - context.reset_auto_parallel_context() diff --git a/tutorials/tutorial_code/distributed_inference/net.py b/tutorials/tutorial_code/distributed_inference/net.py deleted file mode 100644 index 4b563a2019dc01f688fcb01901e0518001999fe6..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_inference/net.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -'''net -The sample can be run on Ascend 910 AI processor. -''' -import numpy as np -from mindspore import Tensor, Parameter, ops -from mindspore.nn import Cell - - -class Net(Cell): - """Net""" - def __init__(self, matmul_size, transpose_a=False, transpose_b=False, strategy=None): - """init""" - super().__init__() - matmul_np = np.full(matmul_size, 0.5, dtype=np.float32) - self.matmul_weight = Parameter(Tensor(matmul_np)) - self.matmul = ops.MatMul(transpose_a=transpose_a, transpose_b=transpose_b) - self.neg = ops.Neg() - if strategy is not None: - self.matmul.shard(strategy) - - def construct(self, inputs): - """construct""" - x = self.matmul(inputs, self.matmul_weight) - x = self.neg(x) - return x diff --git a/tutorials/tutorial_code/distributed_inference/rank_table_8pcs.json b/tutorials/tutorial_code/distributed_inference/rank_table_8pcs.json deleted file mode 100644 index 22c0cd210de4aede4861c816274fa0be8f17a09b..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_inference/rank_table_8pcs.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "version": "1.0", - "server_count": "1", - "server_list": [ - { - "server_id": "10.155.111.140", - "device": [ - {"device_id": "0","device_ip": "192.1.27.6","rank_id": "0"}, - {"device_id": "1","device_ip": "192.2.27.6","rank_id": "1"}, - {"device_id": "2","device_ip": "192.3.27.6","rank_id": "2"}, - {"device_id": "3","device_ip": "192.4.27.6","rank_id": "3"}, - {"device_id": "4","device_ip": "192.1.27.7","rank_id": "4"}, - {"device_id": "5","device_ip": "192.2.27.7","rank_id": "5"}, - {"device_id": "6","device_ip": "192.3.27.7","rank_id": "6"}, - {"device_id": "7","device_ip": "192.4.27.7","rank_id": "7"}], - "host_nic_ip": "reserve" - } - ], - "status": "completed" -} diff --git a/tutorials/tutorial_code/distributed_inference/run_inference.sh b/tutorials/tutorial_code/distributed_inference/run_inference.sh deleted file mode 100644 index 67bee27122338181cc5dfe729f5e5e067327b99a..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_inference/run_inference.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash - -EXEC_PATH=$(pwd) - -export RANK_TABLE_FILE=${EXEC_PATH}/rank_table_8pcs.json -export RANK_SIZE=8 - -for((i=1;i<${RANK_SIZE};i++)) -do - rm -rf device$i - mkdir device$i - cp ./distributed_training.py ./dataset.py ./net.py ./device$i - cd ./device$i - export DEVICE_ID=$i - export RANK_ID=$i - echo "start training for device $i" - pytest -sv ./distributed_training.py::test_train > train.log$i 2>&1 & - cd ../ -done - -rm -rf device0 -mkdir device0 -cp ./distributed_training.py ./dataset.py ./net.py ./device0 -cd ./device0 -export DEVICE_ID=0 -export RANK_ID=0 -echo "start training for device 0" -pytest -sv ./distributed_training.py::test_train > train.log0 2>&1 -if [ $? -eq 0 ];then - echo "training success" -else - echo "training failed" - exit 2 -fi -cd ../ - -for((i=1;i<${RANK_SIZE};i++)) -do - cp ./distributed_inference.py ./device$i - cd ./device$i - export DEVICE_ID=$i - export RANK_ID=$i - echo "start inference for device $i" - pytest -sv ./distributed_inference.py::test_inference > inference.log$i 2>&1 & - cd ../ -done - -cp ./distributed_inference.py ./device0 -cd ./device0 -export DEVICE_ID=0 -export RANK_ID=0 -echo "start inference for device 0" -pytest -sv ./distributed_inference.py::test_inference > inference.log0 2>&1 -if [ $? -eq 0 ];then - echo "inference success" -else - echo "inference failed" - exit 2 -fi -cd ../ diff --git a/tutorials/tutorial_code/distributed_training/cell_wrapper.py b/tutorials/tutorial_code/distributed_training/cell_wrapper.py deleted file mode 100644 index 7357afc7c5ed9302acd0e38c9449192ff1d94a07..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_training/cell_wrapper.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -""" -grad accumulation cell wrapper -""" -import numpy as np -import mindspore.common.dtype as mstype -from mindspore import ops, context, Tensor, Parameter -from mindspore.nn import Cell, TrainOneStepCell, TrainOneStepWithLossScaleCell -from mindspore.nn.wrap.loss_scale import _grad_scale -from mindspore.common.initializer import initializer -from mindspore.ops.operations.comm_ops import _VirtualDataset - -zeroslike = ops.ZerosLike() -reset_accu_grads = ops.MultitypeFuncGraph("reset_accu_grads") - -@reset_accu_grads.register("Tensor") -def _reset_accu_grads(accu_grad): - succ = True - return ops.depend(succ, ops.assign(accu_grad, zeroslike(accu_grad))) - -cast = ops.Cast() -update_accu_grads = ops.MultitypeFuncGraph("update_accu_grads") - - -@update_accu_grads.register("Tensor", "Tensor") -def _update_accu_grads(accu_grad, grad): - succ = True - return ops.depend(succ, ops.assign_add(accu_grad, cast(grad, mstype.float32))) - - -class TrainAccuStepsCell(TrainOneStepCell): - """construct train accu step cell""" - def __init__(self, network, optimizer, sens=1.0): - super(TrainAccuStepsCell, self).__init__(network, optimizer, sens) - self.accumulation = False - self.accumulation_steps = context.get_auto_parallel_context("grad_accumulation_step") - self.accu_grads = self.weights.clone(prefix="accu_grads", init='zeros') - self.hyper_map = ops.HyperMap() - - def construct(self, *inputs): - """Defines the computation performed.""" - weights = self.weights - loss = self.network(*inputs) - sens = ops.Fill()(ops.DType()(loss), ops.Shape()(loss), self.sens) - grads = self.grad(self.network, weights)(*inputs, sens) - if self.accumulation and self.accumulation_steps > 1: - accu_succ = self.hyper_map(update_accu_grads, self.accu_grads, grads) - loss = ops.depend(loss, accu_succ) - if self.accumulation: - succ = False - else: - grads = self.grad_reducer(grads) - accu_grads = ops.depend(self.accu_grads, grads) - accu_succ = self.hyper_map(reset_accu_grads, accu_grads) - loss = ops.depend(loss, accu_succ) - succ = self.optimizer(grads) - return ops.depend(loss, succ) - - -class TrainAccuStepsWithLossScaleCell(TrainOneStepWithLossScaleCell): - """construct train accu step with loss scale cell""" - def __init__(self, network, optimizer, scale_sense): - super(TrainAccuStepsWithLossScaleCell, self).__init__(network, optimizer, scale_sense) - self.accumulation = False - self.accumulation_steps = context.get_auto_parallel_context("grad_accumulation_step") - self.one = Tensor(np.array([1]).astype(np.int32)) - self.zero = Tensor(np.array([0]).astype(np.int32)) - self.accu_grads = self.weights.clone(prefix="accu_grads", init='zeros') - self.accu_overflow = Parameter(initializer(0, [1], mstype.int32)) - self.accu_loss = Parameter(initializer(0, [1], mstype.float32)) - self.cast = ops.Cast() - self.logical_or = ops.LogicalOr() - self.not_equal = ops.NotEqual() - self.select = ops.Select() - self.reshape = ops.Reshape() - - def construct(self, *inputs): - """Defines the computation performed.""" - weights = self.weights - loss = self.network(*inputs) - scaling_sens = self.scale_sense - status, scaling_sens = self.start_overflow_check(loss, scaling_sens) - scaling_sens_filled = ops.ones_like(loss) * ops.cast(scaling_sens, ops.dtype(loss)) - grads = self.grad(self.network, weights)(*inputs, scaling_sens_filled) - # accumulate gradients - if self.accumulation and self.accumulation_steps > 1: - accu_succ = self.hyper_map(update_accu_grads, self.accu_grads, grads) - loss = ops.depend(loss, accu_succ) - overflow = self.get_overflow_status(status, grads) - overflow = self.logical_or(self.not_equal(self.accu_overflow, self.zero), overflow) - accu_overflow = self.select(overflow, self.one, self.zero) - - if self.accumulation: - succ = False - self.accu_overflow = accu_overflow - else: - self.accu_overflow = self.zero - # apply grad reducer on grads - grads = self.grad_reducer(grads) - grads = self.hyper_map(ops.partial(_grad_scale, scaling_sens), grads) - accu_overflow = self.allreduce(accu_overflow) - overflow = self.less_equal(self.base, accu_overflow) - accu_grads = ops.depend(self.accu_grads, grads) - accu_succ = self.hyper_map(reset_accu_grads, accu_grads) - overflow = ops.depend(overflow, accu_succ) - overflow = self.reshape(overflow, (())) - overflow = self.process_loss_scale(overflow) - if overflow: - succ = False - else: - succ = self.optimizer(grads) - - ret = (loss, overflow, scaling_sens) - return ops.depend(ret, succ) - - -class VirtualDatasetCell(Cell): - def __init__(self, backbone): - super(VirtualDatasetCell, self).__init__(auto_prefix=False) - self._backbone = backbone - self._virtual_dataset = _VirtualDataset() - - def construct(self, *inputs): - output = self._virtual_dataset(*inputs) - return self._backbone(*output) diff --git a/tutorials/tutorial_code/distributed_training/model_accu.py b/tutorials/tutorial_code/distributed_training/model_accu.py deleted file mode 100644 index 554c3766fa40b5a082433ef07705388da89771f8..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_training/model_accu.py +++ /dev/null @@ -1,157 +0,0 @@ -"""model accu -Construct model accu -""" -import math -from mindspore.train.callback import RunContext -from mindspore import context -from mindspore.context import ParallelMode -from mindspore import Model, connect_network_with_dataset -from mindspore.common.dtype import pytype_to_dtype -from mindspore._c_expression import init_exec_dataset -from mindspore.train.train_thor.dataset_helper import DatasetHelper - - -def _convert_type(types): - """ - Convert from numpy type to tensor type. - - Args: - types (list): Numpy type list of element in dataset. - - Returns: - list, list of element in dataset. - """ - ms_types = [] - for np_type in types: - ms_type = pytype_to_dtype(np_type) - ms_types.append(ms_type) - return ms_types - - -def _get_types_and_shapes(dataset): - """Get dataset types and shapes.""" - dataset_types = _convert_type(dataset.output_types()) - dataset_shapes = dataset.output_shapes() - return dataset_types, dataset_shapes - - -def _exec_datagraph(exec_dataset, dataset_size, phase='dataset'): - """Initialize and execute the dataset graph.""" - batch_size = exec_dataset.get_batch_size() - input_indexs = exec_dataset.input_indexs - - # transform data format - dataset_types, dataset_shapes = _get_types_and_shapes(exec_dataset) - init_exec_dataset(exec_dataset.__transfer_dataset__.queue_name, - dataset_size, - batch_size, - dataset_types, - dataset_shapes, - input_indexs, - phase=phase, - need_run=False) - - -class Model_ACCU(Model): - """"Construct Model_ACCU""" - def __init__(self, network, loss_fn=None, optimizer=None, metrics=None, eval_network=None, - eval_indexes=None, amp_level="O0", **kwargs): - super(Model_ACCU, self).__init__(network, loss_fn, optimizer, metrics, eval_network, - eval_indexes, amp_level, **kwargs) - self._frequency = context.get_auto_parallel_context("grad_accumulation_step") - self._train_network = self._build_train_network() - - def _exec_preprocess(self, network, is_train, phase, dataset, dataset_sink_mode, sink_size=-1, - epoch_num=1, iter_first_order=1): - """Initializes dataset.""" - if dataset_sink_mode and not is_train: - dataset.__loop_size__ = 1 - dataset_helper = DatasetHelper(dataset, dataset_sink_mode, sink_size, epoch_num, iter_first_order) - - if dataset_sink_mode and context.get_context("device_target") != "GPU": - network = connect_network_with_dataset(network, dataset_helper) - network.set_train(is_train) - network.phase = phase - - if self._parallel_mode in (ParallelMode.SEMI_AUTO_PARALLEL, ParallelMode.AUTO_PARALLEL): - network.set_auto_parallel() - - return dataset_helper, network - - def _train_dataset_sink_process(self, epoch, train_dataset, list_callback=None, cb_params=None, sink_size=-1): - """ - Training process. The data would be passed to network through dataset channel. - - Args: - epoch (int): Total number of iterations on the data. - train_dataset (Dataset): A training dataset iterator. If there is no - loss_fn, a tuple with multiple data (data1, data2, data3, ...) should be - returned and passed to the network. Otherwise, a tuple (data, label) should - be returned. The data and label would be passed to the network and loss - function respectively. - list_callback (Callback): Executor of callback list. Default: None. - cb_params (_InternalCallbackParam): Callback parameters. Default: None. - sink_size (int): Control the amount of data in each sink. Default: -1. - """ - if sink_size == -1: - epoch_num = epoch - else: - epoch_num = math.ceil(epoch * sink_size / train_dataset.get_dataset_size()) - - iter_first_order = 1 - iter_second_order = self._frequency - 1 - train_dataset.__loop_size__ = iter_second_order - dataset_helper, train_network = self._exec_preprocess(self._train_network, - is_train=True, - phase='train', - dataset=train_dataset, - dataset_sink_mode=True, - sink_size=sink_size, - epoch_num=epoch_num, - iter_first_order=iter_first_order) - - self._train_network = train_network - cb_params.train_network = self._train_network - cb_params.cur_step_num = 0 - - run_context = RunContext(cb_params) - list_callback.begin(run_context) - - # used to stop training for early stop, such as stopAtTIme or stopATStep - should_stop = False - switch_branch_one = True - train_network_init_flag = True - has_do_dataset_init = False - - for i in range(epoch): - cb_params.cur_epoch_num = i + 1 - list_callback.epoch_begin(run_context) - # for data sink dataset_helper only iter once, other wise iter epoch_size times. - for inputs in dataset_helper: - list_callback.step_begin(run_context) - if switch_branch_one: - cb_params.cur_step_num += iter_second_order - if train_network_init_flag: - self._train_network.add_flags_recursive(accumulation=True) - self._train_network.phase = 'train0' - else: - cb_params.cur_step_num += iter_first_order - if train_network_init_flag: - self._train_network.add_flags_recursive(accumulation=False) - train_network_init_flag = False - self._train_network.phase = 'train1' - if not has_do_dataset_init: - _exec_datagraph(train_dataset, iter_first_order, phase='train1_dataset') - has_do_dataset_init = True - switch_branch_one = not switch_branch_one - outputs = self._train_network(*inputs) - cb_params.net_outputs = outputs - list_callback.step_end(run_context) - - list_callback.epoch_end(run_context) - should_stop = should_stop or run_context.get_stop_requested() - if should_stop: - break - dataset_helper.stop_send() - - list_callback.end(run_context) diff --git a/tutorials/tutorial_code/distributed_training/rank_table_16pcs.json b/tutorials/tutorial_code/distributed_training/rank_table_16pcs.json deleted file mode 100644 index 52f9d9e9e516cb2b606a01af0badc4c6118b9792..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_training/rank_table_16pcs.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "version": "1.0", - "server_count": "2", - "server_list": [ - { - "server_id": "10.155.111.140", - "device": [ - {"device_id": "0","device_ip": "192.1.27.6","rank_id": "0"}, - {"device_id": "1","device_ip": "192.2.27.6","rank_id": "1"}, - {"device_id": "2","device_ip": "192.3.27.6","rank_id": "2"}, - {"device_id": "3","device_ip": "192.4.27.6","rank_id": "3"}, - {"device_id": "4","device_ip": "192.1.27.7","rank_id": "4"}, - {"device_id": "5","device_ip": "192.2.27.7","rank_id": "5"}, - {"device_id": "6","device_ip": "192.3.27.7","rank_id": "6"}, - {"device_id": "7","device_ip": "192.4.27.7","rank_id": "7"}], - "host_nic_ip": "reserve" - }, - { - "server_id": "10.155.111.141", - "device": [ - {"device_id": "0","device_ip": "192.1.27.8","rank_id": "8"}, - {"device_id": "1","device_ip": "192.2.27.8","rank_id": "9"}, - {"device_id": "2","device_ip": "192.3.27.8","rank_id": "10"}, - {"device_id": "3","device_ip": "192.4.27.8","rank_id": "11"}, - {"device_id": "4","device_ip": "192.1.27.9","rank_id": "12"}, - {"device_id": "5","device_ip": "192.2.27.9","rank_id": "13"}, - {"device_id": "6","device_ip": "192.3.27.9","rank_id": "14"}, - {"device_id": "7","device_ip": "192.4.27.9","rank_id": "15"}], - "host_nic_ip": "reserve" - } - ], - "status": "completed" -} \ No newline at end of file diff --git a/tutorials/tutorial_code/distributed_training/rank_table_2pcs.json b/tutorials/tutorial_code/distributed_training/rank_table_2pcs.json deleted file mode 100644 index a7e65a752fa37487d532a69068f39eecb507e909..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_training/rank_table_2pcs.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "version": "1.0", - "server_count": "1", - "server_list": [ - { - "server_id": "10.155.111.140", - "device": [ - {"device_id": "0","device_ip": "192.1.27.6","rank_id": "0"}, - {"device_id": "1","device_ip": "192.2.27.6","rank_id": "1"}], - "host_nic_ip": "reserve" - } - ], - "status": "completed" -} diff --git a/tutorials/tutorial_code/distributed_training/rank_table_8pcs.json b/tutorials/tutorial_code/distributed_training/rank_table_8pcs.json deleted file mode 100644 index 22c0cd210de4aede4861c816274fa0be8f17a09b..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_training/rank_table_8pcs.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "version": "1.0", - "server_count": "1", - "server_list": [ - { - "server_id": "10.155.111.140", - "device": [ - {"device_id": "0","device_ip": "192.1.27.6","rank_id": "0"}, - {"device_id": "1","device_ip": "192.2.27.6","rank_id": "1"}, - {"device_id": "2","device_ip": "192.3.27.6","rank_id": "2"}, - {"device_id": "3","device_ip": "192.4.27.6","rank_id": "3"}, - {"device_id": "4","device_ip": "192.1.27.7","rank_id": "4"}, - {"device_id": "5","device_ip": "192.2.27.7","rank_id": "5"}, - {"device_id": "6","device_ip": "192.3.27.7","rank_id": "6"}, - {"device_id": "7","device_ip": "192.4.27.7","rank_id": "7"}], - "host_nic_ip": "reserve" - } - ], - "status": "completed" -} diff --git a/tutorials/tutorial_code/distributed_training/resnet.py b/tutorials/tutorial_code/distributed_training/resnet.py deleted file mode 100644 index eb93c81732311155b069d2ab9b239a4ffc2c4010..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_training/resnet.py +++ /dev/null @@ -1,310 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -'''resnet -The sample can be run on Ascend 910 AI processor. -''' -import numpy as np -import mindspore.nn as nn -from mindspore import Tensor -import mindspore.ops as ops - - -def weight_variable_0(shape): - """weight_variable_0""" - zeros = np.zeros(shape).astype(np.float32) - return Tensor(zeros) - - -def weight_variable_1(shape): - """weight_variable_1""" - ones = np.ones(shape).astype(np.float32) - return Tensor(ones) - - -def conv3x3(in_channels, out_channels, stride=1, padding=0): - """3x3 convolution """ - return nn.Conv2d(in_channels, out_channels, - kernel_size=3, stride=stride, padding=padding, weight_init='XavierUniform', - has_bias=False, pad_mode="same") - - -def conv1x1(in_channels, out_channels, stride=1, padding=0): - """1x1 convolution""" - return nn.Conv2d(in_channels, out_channels, - kernel_size=1, stride=stride, padding=padding, weight_init='XavierUniform', - has_bias=False, pad_mode="same") - - -def conv7x7(in_channels, out_channels, stride=1, padding=0): - """1x1 convolution""" - return nn.Conv2d(in_channels, out_channels, - kernel_size=7, stride=stride, padding=padding, weight_init='XavierUniform', - has_bias=False, pad_mode="same") - - -def bn_with_initialize(out_channels): - """bn_with_initialize""" - shape = (out_channels) - mean = weight_variable_0(shape) - var = weight_variable_1(shape) - beta = weight_variable_0(shape) - bn = nn.BatchNorm2d(out_channels, momentum=0.99, eps=0.00001, gamma_init='Uniform', - beta_init=beta, moving_mean_init=mean, moving_var_init=var) - return bn - - -def bn_with_initialize_last(out_channels): - """bn_with_initialize_last""" - shape = (out_channels) - mean = weight_variable_0(shape) - var = weight_variable_1(shape) - beta = weight_variable_0(shape) - bn = nn.BatchNorm2d(out_channels, momentum=0.99, eps=0.00001, gamma_init='Uniform', - beta_init=beta, moving_mean_init=mean, moving_var_init=var) - return bn - - -def fc_with_initialize(input_channels, out_channels): - """fc_with_initialize""" - return nn.Dense(input_channels, out_channels, weight_init='XavierUniform', bias_init='Uniform') - - -class ResidualBlock(nn.Cell): - """ResidualBlock""" - expansion = 4 - - def __init__(self, - in_channels, - out_channels, - stride=1): - """init block""" - super(ResidualBlock, self).__init__() - - out_chls = out_channels // self.expansion - self.conv1 = conv1x1(in_channels, out_chls, stride=stride, padding=0) - self.bn1 = bn_with_initialize(out_chls) - - self.conv2 = conv3x3(out_chls, out_chls, stride=1, padding=0) - self.bn2 = bn_with_initialize(out_chls) - - self.conv3 = conv1x1(out_chls, out_channels, stride=1, padding=0) - self.bn3 = bn_with_initialize_last(out_channels) - - self.relu = ops.ReLU() - self.add = ops.Add() - - def construct(self, x): - """construct""" - identity = x - - out = self.conv1(x) - out = self.bn1(out) - out = self.relu(out) - - out = self.conv2(out) - out = self.bn2(out) - out = self.relu(out) - - out = self.conv3(out) - out = self.bn3(out) - - out = self.add(out, identity) - out = self.relu(out) - - return out - - -class ResidualBlockWithDown(nn.Cell): - """ResidualBlockWithDown""" - expansion = 4 - - def __init__(self, - in_channels, - out_channels, - stride=1, - down_sample=False): - """init block with down""" - super(ResidualBlockWithDown, self).__init__() - - out_chls = out_channels // self.expansion - self.conv1 = conv1x1(in_channels, out_chls, stride=stride, padding=0) - self.bn1 = bn_with_initialize(out_chls) - - self.conv2 = conv3x3(out_chls, out_chls, stride=1, padding=0) - self.bn2 = bn_with_initialize(out_chls) - - self.conv3 = conv1x1(out_chls, out_channels, stride=1, padding=0) - self.bn3 = bn_with_initialize_last(out_channels) - - self.relu = ops.ReLU() - self.down_sample = down_sample - - self.conv_down_sample = conv1x1(in_channels, out_channels, stride=stride, padding=0) - self.bn_down_sample = bn_with_initialize(out_channels) - self.add = ops.Add() - - def construct(self, x): - """construct""" - identity = x - - out = self.conv1(x) - out = self.bn1(out) - out = self.relu(out) - - out = self.conv2(out) - out = self.bn2(out) - out = self.relu(out) - - out = self.conv3(out) - out = self.bn3(out) - - identity = self.conv_down_sample(identity) - identity = self.bn_down_sample(identity) - - out = self.add(out, identity) - out = self.relu(out) - - return out - - -class MakeLayer0(nn.Cell): - """MakeLayer0""" - - def __init__(self, block, in_channels, out_channels, stride): - """init""" - super(MakeLayer0, self).__init__() - self.a = ResidualBlockWithDown(in_channels, out_channels, stride=1, down_sample=True) - self.b = block(out_channels, out_channels, stride=stride) - self.c = block(out_channels, out_channels, stride=1) - - def construct(self, x): - """construct""" - x = self.a(x) - x = self.b(x) - x = self.c(x) - - return x - - -class MakeLayer1(nn.Cell): - """MakeLayer1""" - - def __init__(self, block, in_channels, out_channels, stride): - """init""" - super(MakeLayer1, self).__init__() - self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True) - self.b = block(out_channels, out_channels, stride=1) - self.c = block(out_channels, out_channels, stride=1) - self.d = block(out_channels, out_channels, stride=1) - - def construct(self, x): - """construct""" - x = self.a(x) - x = self.b(x) - x = self.c(x) - x = self.d(x) - - return x - - -class MakeLayer2(nn.Cell): - """MakeLayer2""" - - def __init__(self, block, in_channels, out_channels, stride): - """init""" - super(MakeLayer2, self).__init__() - self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True) - self.b = block(out_channels, out_channels, stride=1) - self.c = block(out_channels, out_channels, stride=1) - self.d = block(out_channels, out_channels, stride=1) - self.e = block(out_channels, out_channels, stride=1) - self.f = block(out_channels, out_channels, stride=1) - - def construct(self, x): - """construct""" - x = self.a(x) - x = self.b(x) - x = self.c(x) - x = self.d(x) - x = self.e(x) - x = self.f(x) - - return x - - -class MakeLayer3(nn.Cell): - """MakeLayer3""" - - def __init__(self, block, in_channels, out_channels, stride): - """init""" - super(MakeLayer3, self).__init__() - self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True) - self.b = block(out_channels, out_channels, stride=1) - self.c = block(out_channels, out_channels, stride=1) - - def construct(self, x): - """construct""" - x = self.a(x) - x = self.b(x) - x = self.c(x) - - return x - - -class ResNet(nn.Cell): - """ResNet""" - - def __init__(self, block, num_classes=100, batch_size=32): - """init""" - super(ResNet, self).__init__() - self.batch_size = batch_size - self.num_classes = num_classes - - self.conv1 = conv7x7(3, 64, stride=2, padding=0) - - self.bn1 = bn_with_initialize(64) - self.relu = ops.ReLU() - self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, pad_mode="same") - - self.layer1 = MakeLayer0(block, in_channels=64, out_channels=256, stride=1) - self.layer2 = MakeLayer1(block, in_channels=256, out_channels=512, stride=2) - self.layer3 = MakeLayer2(block, in_channels=512, out_channels=1024, stride=2) - self.layer4 = MakeLayer3(block, in_channels=1024, out_channels=2048, stride=2) - - self.pool = ops.ReduceMean(keep_dims=True) - self.squeeze = ops.Squeeze(axis=(2, 3)) - self.fc = fc_with_initialize(512 * block.expansion, num_classes) - - def construct(self, x): - """construct""" - x = self.conv1(x) - x = self.bn1(x) - x = self.relu(x) - x = self.maxpool(x) - - x = self.layer1(x) - x = self.layer2(x) - x = self.layer3(x) - x = self.layer4(x) - - x = self.pool(x, (2, 3)) - x = self.squeeze(x) - x = self.fc(x) - return x - - -def resnet50(batch_size, num_classes): - """create resnet50""" - return ResNet(ResidualBlock, num_classes, batch_size) diff --git a/tutorials/tutorial_code/distributed_training/resnet50_distributed_training.py b/tutorials/tutorial_code/distributed_training/resnet50_distributed_training.py deleted file mode 100644 index c769edc929e44c73ca5595b0ac6423b817e6bec0..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_training/resnet50_distributed_training.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""resnet50_distributed_training -This sample code is applicable to Ascend. -""" -import os -import mindspore.nn as nn -from mindspore import dtype as mstype -import mindspore.ops as ops -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as vision -import mindspore.dataset.transforms.c_transforms as C -from mindspore.communication.management import init, get_rank, get_group_size -from mindspore import Tensor, Model, context -from mindspore.nn import Momentum -from mindspore.context import ParallelMode -from mindspore.train.callback import LossMonitor -from resnet import resnet50 - -device_id = int(os.getenv('DEVICE_ID')) -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") -context.set_context(device_id=device_id) # set device_id -init() - -def create_dataset(data_path, repeat_num=1, batch_size=32, rank_id=0, rank_size=1): # pylint: disable=missing-docstring - resize_height = 224 - resize_width = 224 - rescale = 1.0 / 255.0 - shift = 0.0 - - # get rank_id and rank_size - rank_id = get_rank() - rank_size = get_group_size() - data_set = ds.Cifar10Dataset(data_path, num_shards=rank_size, shard_id=rank_id) - - # define map operations - random_crop_op = vision.RandomCrop((32, 32), (4, 4, 4, 4)) - random_horizontal_op = vision.RandomHorizontalFlip() - resize_op = vision.Resize((resize_height, resize_width)) - rescale_op = vision.Rescale(rescale, shift) - normalize_op = vision.Normalize((0.4465, 0.4822, 0.4914), (0.2010, 0.1994, 0.2023)) - changeswap_op = vision.HWC2CHW() - type_cast_op = C.TypeCast(mstype.int32) - - c_trans = [random_crop_op, random_horizontal_op] - c_trans += [resize_op, rescale_op, normalize_op, changeswap_op] - - # apply map operations on images - data_set = data_set.map(operations=type_cast_op, input_columns="label") - data_set = data_set.map(operations=c_trans, input_columns="image") - - # apply shuffle operations - data_set = data_set.shuffle(buffer_size=10) - - # apply batch operations - data_set = data_set.batch(batch_size=batch_size, drop_remainder=True) - - # apply repeat operations - data_set = data_set.repeat(repeat_num) - - return data_set - - -class SoftmaxCrossEntropyExpand(nn.Cell): # pylint: disable=missing-docstring - def __init__(self, sparse=False): - super(SoftmaxCrossEntropyExpand, self).__init__() - self.exp = ops.Exp() - self.sum = ops.ReduceSum(keep_dims=True) - self.onehot = ops.OneHot() - self.on_value = Tensor(1.0, mstype.float32) - self.off_value = Tensor(0.0, mstype.float32) - self.div = ops.RealDiv() - self.log = ops.Log() - self.sum_cross_entropy = ops.ReduceSum(keep_dims=False) - self.mul = ops.Mul() - self.mul2 = ops.Mul() - self.mean = ops.ReduceMean(keep_dims=False) - self.sparse = sparse - self.max = ops.ReduceMax(keep_dims=True) - self.sub = ops.Sub() - self.eps = Tensor(1e-24, mstype.float32) - - def construct(self, logit, label): # pylint: disable=missing-docstring - logit_max = self.max(logit, -1) - exp = self.exp(self.sub(logit, logit_max)) - exp_sum = self.sum(exp, -1) - softmax_result = self.div(exp, exp_sum) - if self.sparse: - label = self.onehot(label, ops.shape(logit)[1], self.on_value, self.off_value) - - softmax_result_log = self.log(softmax_result + self.eps) - loss = self.sum_cross_entropy((self.mul(softmax_result_log, label)), -1) - loss = self.mul2(ops.scalar_to_array(-1.0), loss) - loss = self.mean(loss, -1) - - return loss - - -def test_train_cifar(epoch_size=10): # pylint: disable=missing-docstring - context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL, gradients_mean=True) - loss_cb = LossMonitor() - data_path = os.getenv('DATA_PATH') - dataset = create_dataset(data_path) - batch_size = 32 - num_classes = 10 - net = resnet50(batch_size, num_classes) - loss = SoftmaxCrossEntropyExpand(sparse=True) - opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, 0.9) - model = Model(net, loss_fn=loss, optimizer=opt) - model.train(epoch_size, dataset, callbacks=[loss_cb], dataset_sink_mode=True) diff --git a/tutorials/tutorial_code/distributed_training/resnet50_distributed_training_gpu.py b/tutorials/tutorial_code/distributed_training/resnet50_distributed_training_gpu.py deleted file mode 100644 index a0b5ff07f00f1e2d2705e48041e8308017cadff1..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_training/resnet50_distributed_training_gpu.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""resnet50_distributed_training -This sample code is applicable to GPU. -""" -import os -import mindspore.nn as nn -from mindspore import dtype as mstype -import mindspore.ops as ops -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as vision -import mindspore.dataset.transforms.c_transforms as C -from mindspore.communication.management import init, get_rank, get_group_size -from mindspore import Tensor, Model -from mindspore.nn import Momentum -from mindspore.context import ParallelMode -from mindspore import context -from mindspore.train.callback import LossMonitor -from resnet import resnet50 - -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") -init("nccl") - -def create_dataset(data_path, repeat_num=1, batch_size=32, rank_id=0, rank_size=1): # pylint: disable=missing-docstring - resize_height = 224 - resize_width = 224 - rescale = 1.0 / 255.0 - shift = 0.0 - - # get rank_id and rank_size - rank_id = get_rank() - rank_size = get_group_size() - data_set = ds.Cifar10Dataset(data_path, num_shards=rank_size, shard_id=rank_id) - - # define map operations - random_crop_op = vision.RandomCrop((32, 32), (4, 4, 4, 4)) - random_horizontal_op = vision.RandomHorizontalFlip() - resize_op = vision.Resize((resize_height, resize_width)) - rescale_op = vision.Rescale(rescale, shift) - normalize_op = vision.Normalize((0.4465, 0.4822, 0.4914), (0.2010, 0.1994, 0.2023)) - changeswap_op = vision.HWC2CHW() - type_cast_op = C.TypeCast(mstype.int32) - - c_trans = [random_crop_op, random_horizontal_op] - c_trans += [resize_op, rescale_op, normalize_op, changeswap_op] - - # apply map operations on images - data_set = data_set.map(operations=type_cast_op, input_columns="label") - data_set = data_set.map(operations=c_trans, input_columns="image") - - # apply shuffle operations - data_set = data_set.shuffle(buffer_size=10) - - # apply batch operations - data_set = data_set.batch(batch_size=batch_size, drop_remainder=True) - - # apply repeat operations - data_set = data_set.repeat(repeat_num) - - return data_set - - -class SoftmaxCrossEntropyExpand(nn.Cell): # pylint: disable=missing-docstring - def __init__(self, sparse=False): - super(SoftmaxCrossEntropyExpand, self).__init__() - self.exp = ops.Exp() - self.sum = ops.ReduceSum(keep_dims=True) - self.onehot = ops.OneHot() - self.on_value = Tensor(1.0, mstype.float32) - self.off_value = Tensor(0.0, mstype.float32) - self.div = ops.RealDiv() - self.log = ops.Log() - self.sum_cross_entropy = ops.ReduceSum(keep_dims=False) - self.mul = ops.Mul() - self.mul2 = ops.Mul() - self.mean = ops.ReduceMean(keep_dims=False) - self.sparse = sparse - self.max = ops.ReduceMax(keep_dims=True) - self.sub = ops.Sub() - self.eps = Tensor(1e-24, mstype.float32) - - def construct(self, logit, label): # pylint: disable=missing-docstring - logit_max = self.max(logit, -1) - exp = self.exp(self.sub(logit, logit_max)) - exp_sum = self.sum(exp, -1) - softmax_result = self.div(exp, exp_sum) - if self.sparse: - label = self.onehot(label, ops.shape(logit)[1], self.on_value, self.off_value) - - softmax_result_log = self.log(softmax_result + self.eps) - loss = self.sum_cross_entropy((self.mul(softmax_result_log, label)), -1) - loss = self.mul2(ops.scalar_to_array(-1.0), loss) - loss = self.mean(loss, -1) - - return loss - - -def test_train_cifar(epoch_size=10): # pylint: disable=missing-docstring - context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL, gradients_mean=True) - loss_cb = LossMonitor() - data_path = os.getenv('DATA_PATH') - dataset = create_dataset(data_path) - batch_size = 32 - num_classes = 10 - net = resnet50(batch_size, num_classes) - loss = SoftmaxCrossEntropyExpand(sparse=True) - opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, 0.9) - model = Model(net, loss_fn=loss, optimizer=opt) - model.train(epoch_size, dataset, callbacks=[loss_cb], dataset_sink_mode=False) diff --git a/tutorials/tutorial_code/distributed_training/resnet50_distributed_training_grad_accu.py b/tutorials/tutorial_code/distributed_training/resnet50_distributed_training_grad_accu.py deleted file mode 100644 index ee121f35dddf35eb686f403f68e0010d4595b9b3..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_training/resnet50_distributed_training_grad_accu.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""resnet50_distributed_training_grad_accu -The sample can be run on Ascend 910 AI processor. -""" -import os -import mindspore.nn as nn -from mindspore import dtype as mstype -import mindspore.ops as ops -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as vision -import mindspore.dataset.transforms.c_transforms as C -from mindspore.communication.management import init, get_rank, get_group_size -from mindspore import Tensor, context -from mindspore.nn import Momentum -from mindspore.context import ParallelMode -from mindspore.train.callback import LossMonitor -from resnet import resnet50 -from model_accu import Model_ACCU -from cell_wrapper import TrainAccuStepsCell, VirtualDatasetCell - -device_id = int(os.getenv('DEVICE_ID')) -context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") -context.set_context(device_id=device_id) # set device_id -init() - -def create_dataset(data_path, repeat_num=1, batch_size=32): # pylint: disable=missing-docstring - resize_height = 224 - resize_width = 224 - rescale = 1.0 / 255.0 - shift = 0.0 - - # get rank_id and rank_size - rank_id = get_rank() - rank_size = get_group_size() - data_set = ds.Cifar10Dataset(data_path, num_shards=rank_size, shard_id=rank_id) - - # define map operations - random_crop_op = vision.RandomCrop((32, 32), (4, 4, 4, 4)) - random_horizontal_op = vision.RandomHorizontalFlip() - resize_op = vision.Resize((resize_height, resize_width)) - rescale_op = vision.Rescale(rescale, shift) - normalize_op = vision.Normalize((0.4465, 0.4822, 0.4914), (0.2010, 0.1994, 0.2023)) - changeswap_op = vision.HWC2CHW() - type_cast_op = C.TypeCast(mstype.int32) - - c_trans = [random_crop_op, random_horizontal_op] - c_trans += [resize_op, rescale_op, normalize_op, changeswap_op] - - # apply map operations on images - data_set = data_set.map(operations=type_cast_op, input_columns="label") - data_set = data_set.map(operations=c_trans, input_columns="image") - - # apply shuffle operations - data_set = data_set.shuffle(buffer_size=10) - - # apply batch operations - data_set = data_set.batch(batch_size=batch_size, drop_remainder=True) - - # apply repeat operations - data_set = data_set.repeat(repeat_num) - - return data_set - - -class SoftmaxCrossEntropyExpand(nn.Cell): # pylint: disable=missing-docstring - def __init__(self, sparse=False): - super(SoftmaxCrossEntropyExpand, self).__init__() - self.exp = ops.Exp() - self.sum = ops.ReduceSum(keep_dims=True) - self.onehot = ops.OneHot() - self.on_value = Tensor(1.0, mstype.float32) - self.off_value = Tensor(0.0, mstype.float32) - self.div = ops.RealDiv() - self.log = ops.Log() - self.sum_cross_entropy = ops.ReduceSum(keep_dims=False) - self.mul = ops.Mul() - self.mul2 = ops.Mul() - self.mean = ops.ReduceMean(keep_dims=False) - self.sparse = sparse - self.max = ops.ReduceMax(keep_dims=True) - self.sub = ops.Sub() - self.eps = Tensor(1e-24, mstype.float32) - - def construct(self, logit, label): # pylint: disable=missing-docstring - logit_max = self.max(logit, -1) - exp = self.exp(self.sub(logit, logit_max)) - exp_sum = self.sum(exp, -1) - softmax_result = self.div(exp, exp_sum) - if self.sparse: - label = self.onehot(label, ops.shape(logit)[1], self.on_value, self.off_value) - - softmax_result_log = self.log(softmax_result + self.eps) - loss = self.sum_cross_entropy((self.mul(softmax_result_log, label)), -1) - loss = self.mul2(ops.scalar_to_array(-1.0), loss) - loss = self.mean(loss, -1) - - return loss - - -def test_train_cifar(epoch_size=10): # pylint: disable=missing-docstring - context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL,\ - gradients_mean=True, grad_accumulation_step=6) - loss_cb = LossMonitor() - data_path = os.getenv('DATA_PATH') - batch_size = 32 - dataset = create_dataset(data_path, batch_size=batch_size) - num_classes = 10 - net = resnet50(batch_size, num_classes) - loss = SoftmaxCrossEntropyExpand(sparse=True) - opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, 0.9) - net_with_loss = nn.WithLossCell(net, loss) - net_with_loss = VirtualDatasetCell(net_with_loss) - wrap_net = TrainAccuStepsCell(net_with_loss, opt) - model = Model_ACCU(wrap_net) - model.train(epoch_size, dataset, callbacks=[loss_cb], dataset_sink_mode=True) diff --git a/tutorials/tutorial_code/distributed_training/run.sh b/tutorials/tutorial_code/distributed_training/run.sh deleted file mode 100644 index 65f5cab70a2de20cb5931f3b56bb3b225688964d..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_training/run.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash - -echo "==============================================================================================================" -echo "Please run the script as: " -echo "bash run.sh DATA_PATH RANK_SIZE" -echo "For example: bash run.sh /path/dataset 8" -echo "It is better to use the absolute path." -echo "==============================================================================================================" -set -e -DATA_PATH=$1 -export DATA_PATH=${DATA_PATH} -RANK_SIZE=$2 - -EXEC_PATH=$(pwd) - -test_dist_8pcs() -{ - export RANK_TABLE_FILE=${EXEC_PATH}/rank_table_8pcs.json - export RANK_SIZE=8 -} - -test_dist_2pcs() -{ - export RANK_TABLE_FILE=${EXEC_PATH}/rank_table_2pcs.json - export RANK_SIZE=2 -} - -test_dist_${RANK_SIZE}pcs - -for((i=1;i<${RANK_SIZE};i++)) -do - rm -rf device$i - mkdir device$i - cp ./resnet50_distributed_training.py ./resnet.py ./device$i - cd ./device$i - export DEVICE_ID=$i - export RANK_ID=$i - echo "start training for device $i" - env > env$i.log - pytest -s -v ./resnet50_distributed_training.py > train.log$i 2>&1 & - cd ../ -done -rm -rf device0 -mkdir device0 -cp ./resnet50_distributed_training.py ./resnet.py ./device0 -cd ./device0 -export DEVICE_ID=0 -export RANK_ID=0 -echo "start training for device 0" -env > env0.log -pytest -s -v ./resnet50_distributed_training.py > train.log0 2>&1 -if [ $? -eq 0 ];then - echo "training success" -else - echo "training failed" - exit 2 -fi -cd ../ diff --git a/tutorials/tutorial_code/distributed_training/run_cluster.sh b/tutorials/tutorial_code/distributed_training/run_cluster.sh deleted file mode 100644 index 0601a5cd6e3d6349af234114de18c51fcfcf9c8e..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_training/run_cluster.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash - -echo "==============================================================================================================" -echo "Please run the script as: " -echo "bash run.sh DATA_PATH RANK_TABLE_FILE RANK_SIZE RANK_START" -echo "For example: bash run.sh /path/dataset /path/rank_table.json 16 0" -echo "It is better to use the absolute path." -echo "==============================================================================================================" - -execute_path=$(pwd) -echo ${execute_path} -script_self=$(readlink -f "$0") -self_path=$(dirname "${script_self}") -echo ${self_path} - -export DATA_PATH=$1 -export RANK_TABLE_FILE=$2 -export RANK_SIZE=$3 -RANK_START=$4 -DEVICE_START=0 -for((i=0;i<=7;i++)); -do - export RANK_ID=$[i+RANK_START] - export DEVICE_ID=$[i+DEVICE_START] - rm -rf ${execute_path}/device_$RANK_ID - mkdir ${execute_path}/device_$RANK_ID - cd ${execute_path}/device_$RANK_ID || exit - pytest -s ${self_path}/resnet50_distributed_training.py >train$RANK_ID.log 2>&1 & -done \ No newline at end of file diff --git a/tutorials/tutorial_code/distributed_training/run_gpu.sh b/tutorials/tutorial_code/distributed_training/run_gpu.sh deleted file mode 100644 index 6c89fdc31def48f2de1d5421b33976a76eeb6e6c..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_training/run_gpu.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -echo "==============================================================================================================" -echo "Please run the script as: " -echo "bash run_gpu.sh DATA_PATH" -echo "For example: bash run_gpu.sh /path/dataset" -echo "It is better to use the absolute path." -echo "==============================================================================================================" -set -e -DATA_PATH=$1 -export DATA_PATH=${DATA_PATH} - -rm -rf device -mkdir device -cp ./resnet50_distributed_training_gpu.py ./resnet.py ./device -cd ./device -echo "start training" -mpirun -n 8 pytest -s -v ./resnet50_distributed_training_gpu.py > train.log 2>&1 & diff --git a/tutorials/tutorial_code/distributed_training/run_grad_accu.sh b/tutorials/tutorial_code/distributed_training/run_grad_accu.sh deleted file mode 100644 index 326bed86a148382831dbdb17f2c9a8e6b29f3c05..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/distributed_training/run_grad_accu.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash - -echo "==============================================================================================================" -echo "Please run the script as: " -echo "bash run.sh DATA_PATH RANK_SIZE" -echo "For example: bash run.sh /path/dataset 8" -echo "It is better to use the absolute path." -echo "==============================================================================================================" -set -e -DATA_PATH=$1 -export DATA_PATH=${DATA_PATH} -RANK_SIZE=$2 - -EXEC_PATH=$(pwd) - -test_dist_8pcs() -{ - export RANK_TABLE_FILE=${EXEC_PATH}/rank_table_8pcs.json - export RANK_SIZE=8 -} - -test_dist_2pcs() -{ - export RANK_TABLE_FILE=${EXEC_PATH}/rank_table_2pcs.json - export RANK_SIZE=2 -} - -test_dist_${RANK_SIZE}pcs - -for((i=1;i<${RANK_SIZE};i++)) -do - rm -rf device$i - mkdir device$i - cp *.py ./device$i - cd ./device$i - export DEVICE_ID=$i - export RANK_ID=$i - echo "start training for device $i" - env > env$i.log - pytest -s -v ./resnet50_distributed_training_grad_accu.py > train.log$i 2>&1 & - cd ../ -done -rm -rf device0 -mkdir device0 -cp *.py ./device0 -cd ./device0 -export DEVICE_ID=0 -export RANK_ID=0 -echo "start training for device 0" -env > env0.log -pytest -s -v ./resnet50_distributed_training_grad_accu.py > train.log0 2>&1 -if [ $? -eq 0 ];then - echo "training success" -else - echo "training failed" - exit 2 -fi -cd ../ diff --git a/tutorials/tutorial_code/evaluate_the_model_during_training/README.md b/tutorials/tutorial_code/evaluate_the_model_during_training/README.md deleted file mode 100644 index fa88bea14330e68c12ad28224f295bad50920908..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/evaluate_the_model_during_training/README.md +++ /dev/null @@ -1,20 +0,0 @@ -# README - -使用数据集: [MNIST](http://yann.lecun.com/exdb/mnist/) -下载后按照下述结构放置: - -```text -├─evaluate_the_model_during_training.py -│ -└─datasets - └─MNIST_Data - ├─test - │ t10k-images.idx3-ubyte - │ t10k-labels.idx1-ubyte - │ - └─train - train-images.idx3-ubyte - train-labels.idx1-ubyte -``` - -使用命令`python evaluate_the_model_during_training.py >train.log 2>&1 &`运行(过程较长,大约需要3分钟),运行结果会记录在`log.txt`文件中。 diff --git a/tutorials/tutorial_code/evaluate_the_model_during_training/evaluate_the_model_during_training.py b/tutorials/tutorial_code/evaluate_the_model_during_training/evaluate_the_model_during_training.py deleted file mode 100644 index aebab4d0ae53f044d13a901c5a9ae126ac9e8ff1..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/evaluate_the_model_during_training/evaluate_the_model_during_training.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""Evaluate the model during training tutorial -This sample code is applicable to CPU, GPU and Ascend. -""" -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as CV -import mindspore.dataset.transforms.c_transforms as C -from mindspore.dataset.vision import Inter -from mindspore import dtype as mstype -from mindspore import nn, Model, context -from mindspore.common.initializer import Normal -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, Callback -from mindspore.nn import Accuracy -from mindspore.nn import SoftmaxCrossEntropyWithLogits - - -def create_dataset(data_path, batch_size=32, repeat_size=1, num_parallel_workers=1): - """ create dataset for train or test - Args: - data_path (str): Data path - batch_size (int): The number of data records in each group - repeat_size (int): The number of replicated data records - num_parallel_workers (int): The number of parallel workers - """ - # define dataset - mnist_ds = ds.MnistDataset(data_path) - - # define operation parameters - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - rescale_nml = 1 / 0.3081 - shift_nml = -1 * 0.1307 / 0.3081 - - # define map operations - type_cast_op = C.TypeCast(mstype.int32) - c_trans = [ - CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR), - CV.Rescale(rescale_nml, shift_nml), - CV.Rescale(rescale, shift), - CV.HWC2CHW() - ] - - # apply map operations on images - mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns="label", num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(operations=c_trans, input_columns="image", num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script - mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True) - mnist_ds = mnist_ds.repeat(repeat_size) - - return mnist_ds - - -class LeNet5(nn.Cell): - """Lenet network structure.""" - # define the operator required - def __init__(self, num_class=10, num_channel=1): - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02)) - self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02)) - self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02)) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - # use the preceding operators to construct networks - def construct(self, x): - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x - - -class EvalCallBack(Callback): - """Precision verification using callback function.""" - # define the operator required - def __init__(self, models, eval_dataset, eval_per_epochs, epochs_per_eval): - super(EvalCallBack, self).__init__() - self.models = models - self.eval_dataset = eval_dataset - self.eval_per_epochs = eval_per_epochs - self.epochs_per_eval = epochs_per_eval - - # define operator function in epoch end - def epoch_end(self, run_context): - cb_param = run_context.original_args() - cur_epoch = cb_param.cur_epoch_num - if cur_epoch % self.eval_per_epochs == 0: - acc = self.models.eval(self.eval_dataset, dataset_sink_mode=False) - self.epochs_per_eval["epoch"].append(cur_epoch) - self.epochs_per_eval["acc"].append(acc["Accuracy"]) - print(acc) - - -if __name__ == "__main__": - # set args, train it - context.set_context(mode=context.GRAPH_MODE, device_target="CPU") - train_data_path = "./datasets/MNIST_Data/train" - eval_data_path = "./datasets/MNIST_Data/test" - ckpt_save_dir = "./lenet_ckpt" - epoch_size = 10 - eval_per_epoch = 2 - repeat = 1 - train_data = create_dataset(train_data_path, repeat_size=repeat) - eval_data = create_dataset(eval_data_path, repeat_size=repeat) - # define the net - network = LeNet5() - # define the loss function - net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') - # define the optimizer - net_opt = nn.Momentum(network.trainable_params(), learning_rate=0.01, momentum=0.9) - config_ck = CheckpointConfig(save_checkpoint_steps=eval_per_epoch*1875, keep_checkpoint_max=15) - ckpoint_cb = ModelCheckpoint(prefix="checkpoint_lenet", directory=ckpt_save_dir, config=config_ck) - model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()}) - epoch_per_eval = {"epoch": [], "acc": []} - eval_cb = EvalCallBack(model, eval_data, eval_per_epoch, epoch_per_eval) - model.train(epoch_size, train_data, callbacks=[ckpoint_cb, LossMonitor(375), eval_cb], dataset_sink_mode=False) diff --git a/tutorials/tutorial_code/gradient_accumulation/train.py b/tutorials/tutorial_code/gradient_accumulation/train.py deleted file mode 100644 index c2e707b69700b90dec4be8a15f97644069a07c2f..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/gradient_accumulation/train.py +++ /dev/null @@ -1,159 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""train -This sample code is applicable to GPU and Ascend. -""" -import argparse -import os - -import mindspore.nn as nn -from mindspore import ParameterTuple, context, DatasetHelper, save_checkpoint -from mindspore.nn import Cell -import mindspore.ops as ops -from model_zoo.official.cv.lenet.src.dataset import create_dataset -from model_zoo.official.cv.lenet.src.lenet import LeNet5 - -_sum_op = ops.MultitypeFuncGraph("grad_sum_op") -_clear_op = ops.MultitypeFuncGraph("clear_op") - - -@_sum_op.register("Tensor", "Tensor") -def _cumulative_grad(grad_sum, grad): - """Apply grad sum to cumulative gradient.""" - add = ops.AssignAdd() - return add(grad_sum, grad) - - -@_clear_op.register("Tensor", "Tensor") -def _clear_grad_sum(grad_sum, zero): - """Apply zero to clear grad_sum.""" - success = True - success = ops.depend(success, ops.assign(grad_sum, zero)) - return success - - -class TrainForwardBackward(Cell): # pylint: disable=missing-docstring - def __init__(self, network, optimizer, grad_sum, sens=1.0): - super(TrainForwardBackward, self).__init__(auto_prefix=False) - self.network = network - self.network.set_grad() - self.network.add_flags(defer_inline=True) - self.weights = ParameterTuple(network.trainable_params()) - self.optimizer = optimizer - self.grad_sum = grad_sum - self.grad = ops.GradOperation(get_by_list=True, sens_param=True) - self.sens = sens - self.hyper_map = ops.HyperMap() - - def construct(self, *inputs): - weights = self.weights - loss = self.network(*inputs) - sens = ops.Fill()(ops.DType()(loss), ops.Shape()(loss), self.sens) - grads = self.grad(self.network, weights)(*inputs, sens) - return ops.depend(loss, self.hyper_map(ops.partial(_sum_op), self.grad_sum, grads)) - - -class TrainOptim(Cell): - def __init__(self, optimizer, grad_sum): - super(TrainOptim, self).__init__(auto_prefix=False) - self.optimizer = optimizer - self.grad_sum = grad_sum - - def construct(self): - return self.optimizer(self.grad_sum) - - -class TrainClear(Cell): - def __init__(self, grad_sum, zeros): - super(TrainClear, self).__init__(auto_prefix=False) - self.grad_sum = grad_sum - self.zeros = zeros - self.hyper_map = ops.HyperMap() - - def construct(self): - seccess = self.hyper_map(ops.partial(_clear_op), self.grad_sum, self.zeros) - return seccess - - -class GradientAccumulation: # pylint: disable=missing-docstring - def __init__(self, network, loss_fn, optimizer): - self._network = network - self._loss_fn = loss_fn - self._optimizer = optimizer - - params = self._optimizer.parameters - self._grad_sum = params.clone(prefix="grad_sum", init='zeros') - self._zeros = params.clone(prefix="zeros", init='zeros') - self._train_forward_backward = self._build_train_forward_backward_network() - self._train_optim = self._build_train_optim() - self._train_clear = self._build_train_clear() - - def _build_train_forward_backward_network(self): - """Build forward and backward network""" - network = self._network - network = nn.WithLossCell(network, self._loss_fn) - loss_scale = 1.0 - network = TrainForwardBackward(network, self._optimizer, self._grad_sum, loss_scale).set_train() - return network - - def _build_train_optim(self): - """Build optimizer network""" - network = TrainOptim(self._optimizer, self._grad_sum).set_train() - return network - - def _build_train_clear(self): - """Build clear network""" - network = TrainClear(self._grad_sum, self._zeros).set_train() - return network - - def train_process(self, epoch, train_dataset, mini_steps=None): - """ - Training process. The data would be passed to network directly. - """ - dataset_helper = DatasetHelper(train_dataset, dataset_sink_mode=False, epoch_num=epoch) - - for i in range(epoch): - step = 0 - for k, next_element in enumerate(dataset_helper): - loss = self._train_forward_backward(*next_element) - if (k + 1) % mini_steps == 0: - step += 1 - print("epoch:", i + 1, "step:", step, "loss is ", loss) - self._train_optim() - self._train_clear() - - train_dataset.reset() - - save_checkpoint(self._train_forward_backward, "gradient_accumulation.ckpt") - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='MindSpore Grad Cumulative Example') - parser.add_argument('--device_target', type=str, default="GPU", choices=['GPU'], - help='device where the code will be implemented (default: GPU)') - parser.add_argument('--data_path', type=str, default="./Data", - help='path where the dataset is saved') - args = parser.parse_args() - - context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target) - ds_train = create_dataset(os.path.join(args.data_path, "train"), 32) - - net = LeNet5(10) - net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - net_opt = nn.Momentum(net.trainable_params(), 0.01, 0.9) - model = GradientAccumulation(net, net_loss, net_opt) - - print("============== Starting Training ==============") - model.train_process(10, ds_train, mini_steps=4) diff --git a/tutorials/tutorial_code/high_order_differentiation/first_order/first_order_input.py b/tutorials/tutorial_code/high_order_differentiation/first_order/first_order_input.py deleted file mode 100644 index 6f69f43727a7e45c6b223980f6d4daccec7ef25d..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/high_order_differentiation/first_order/first_order_input.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ========================================================== - -"""first order input tutorial -This sample code is applicable to GPU and Ascend. -""" -import numpy as np -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -from mindspore import Parameter -from mindspore import dtype as mstype -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.matmul = ops.MatMul() - self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z') - def construct(self, x, y): - x = x * self.z - out = self.matmul(x, y) - return out - -class GradNetWrtX(nn.Cell): - def __init__(self, net): - super(GradNetWrtX, self).__init__() - self.net = net - self.grad_op = ops.GradOperation() - def construct(self, x, y): - gradient_function = self.grad_op(self.net) - return gradient_function(x, y) - -input_x = Tensor([[0.8, 0.6, 0.2], [1.8, 1.3, 1.1]], dtype=mstype.float32) -input_y = Tensor([[0.11, 3.3, 1.1], [1.1, 0.2, 1.4], [1.1, 2.2, 0.3]], dtype=mstype.float32) -output = GradNetWrtX(Net())(input_x, input_y) -print(output) diff --git a/tutorials/tutorial_code/high_order_differentiation/first_order/first_order_sense.py b/tutorials/tutorial_code/high_order_differentiation/first_order/first_order_sense.py deleted file mode 100644 index 3503b7947f213ac8a8e881b018353b443953a634..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/high_order_differentiation/first_order/first_order_sense.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ========================================================== - -"""first order sense tutorial -This sample code is applicable to GPU and Ascend. -""" -import numpy as np -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -from mindspore import Parameter -from mindspore import dtype as mstype -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.matmul = ops.MatMul() - self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z') - def construct(self, x, y): - x = x * self.z - out = self.matmul(x, y) - return out - -class GradNetWrtX(nn.Cell): - def __init__(self, net): - super(GradNetWrtX, self).__init__() - self.net = net - self.grad_op = ops.GradOperation(sens_param=True) - self.grad_wrt_output = Tensor([[0.1, 0.6, 0.2], [0.8, 1.3, 1.1]], dtype=mstype.float32) - def construct(self, x, y): - gradient_function = self.grad_op(self.net) - return gradient_function(x, y, self.grad_wrt_output) - -input_x = Tensor([[0.8, 0.6, 0.2], [1.8, 1.3, 1.1]], dtype=mstype.float32) -input_y = Tensor([[0.11, 3.3, 1.1], [1.1, 0.2, 1.4], [1.1, 2.2, 0.3]], dtype=mstype.float32) -output = GradNetWrtX(Net())(input_x, input_y) -print(output) diff --git a/tutorials/tutorial_code/high_order_differentiation/first_order/first_order_single.py b/tutorials/tutorial_code/high_order_differentiation/first_order/first_order_single.py deleted file mode 100644 index 77fc0f2901698c74d2551f2724593857d9dc3a12..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/high_order_differentiation/first_order/first_order_single.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ========================================================== - -"""first order single tutorial -This sample code is applicable to GPU and Ascend. -""" -import numpy as np -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -from mindspore import Parameter -from mindspore import dtype as mstype -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.matmul = ops.MatMul() - self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z') - def construct(self, x, y): - x = x * self.z - out = self.matmul(x, y) - return out[0][0] - -class GradNetWrtX(nn.Cell): - def __init__(self, net): - super(GradNetWrtX, self).__init__() - self.net = net - self.grad_op = ops.GradOperation() - def construct(self, x, y): - gradient_function = self.grad_op(self.net) - return gradient_function(x, y) - -input_x = Tensor([[0.8, 0.6, 0.2], [1.8, 1.3, 1.1]], dtype=mstype.float32) -input_y = Tensor([[0.11, 3.3, 1.1], [1.1, 0.2, 1.4], [1.1, 2.2, 0.3]], dtype=mstype.float32) -output = GradNetWrtX(Net())(input_x, input_y) -print(output) diff --git a/tutorials/tutorial_code/high_order_differentiation/first_order/first_order_weight.py b/tutorials/tutorial_code/high_order_differentiation/first_order/first_order_weight.py deleted file mode 100644 index 0c3962e3d9341afe8960fe6e33135703b323782c..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/high_order_differentiation/first_order/first_order_weight.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ========================================================== - -"""first order weight tutorial -This sample code is applicable to GPU and Ascend. -""" -import numpy as np -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -from mindspore import ParameterTuple, Parameter -from mindspore import dtype as mstype -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.matmul = ops.MatMul() - self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z') - def construct(self, x, y): - x = x * self.z - out = self.matmul(x, y) - return out - -class GradNetWrtX(nn.Cell): - def __init__(self, net): - super(GradNetWrtX, self).__init__() - self.net = net - self.params = ParameterTuple(net.trainable_params()) - self.grad_op = ops.GradOperation(get_by_list=True) - def construct(self, x, y): - gradient_function = self.grad_op(self.net, self.params) - return gradient_function(x, y) - -input_x = Tensor([[0.8, 0.6, 0.2], [1.8, 1.3, 1.1]], dtype=mstype.float32) -input_y = Tensor([[0.11, 3.3, 1.1], [1.1, 0.2, 1.4], [1.1, 2.2, 0.3]], dtype=mstype.float32) -output = GradNetWrtX(Net())(input_x, input_y) -print(output) diff --git a/tutorials/tutorial_code/high_order_differentiation/second_order/multiple_in_multiple_out.py b/tutorials/tutorial_code/high_order_differentiation/second_order/multiple_in_multiple_out.py deleted file mode 100644 index 5fb8df15b043978c238b9af23feb5462c545f558..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/high_order_differentiation/second_order/multiple_in_multiple_out.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ========================================================== - -"""multiple in multiple out tutorial -This sample code is applicable to GPU and Ascend. -""" -import numpy as np -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.mul = ops.Mul() - - def construct(self, x, y): - x_square = self.mul(x, x) - x_square_y = self.mul(x_square, y) - return x_square_y - -class Grad(nn.Cell): - def __init__(self, network): - super(Grad, self).__init__() - self.grad = ops.GradOperation(get_all=True, sens_param=False) - self.network = network - def construct(self, x, y): - gout = self.grad(self.network)(x, y) # return dx dy - return gout - -class GradSec(nn.Cell): - """construct secend grad""" - def __init__(self, network): - super(GradSec, self).__init__() - self.grad = ops.GradOperation(get_all=True, sens_param=True) - self.network = network - self.sens1 = Tensor(np.array([1]).astype('float32')) - self.sens2 = Tensor(np.array([0]).astype('float32')) - def construct(self, x, y): - dxdx, dxdy = self.grad(self.network)(x, y, (self.sens1, self.sens2)) - dydx, dydy = self.grad(self.network)(x, y, (self.sens2, self.sens1)) - return dxdx, dxdy, dydx, dydy - -net = Net() -firstgrad = Grad(net) # first order -secondgrad = GradSec(firstgrad) # second order -x_train = Tensor(np.array([4], dtype=np.float32)) -y_train = Tensor(np.array([5], dtype=np.float32)) -input_dxdx, input_dxdy, input_dydx, input_dydy = secondgrad(x_train, y_train) -print(input_dxdx, input_dxdy, input_dydx, input_dydy) diff --git a/tutorials/tutorial_code/high_order_differentiation/second_order/single_in_multiple_out.py b/tutorials/tutorial_code/high_order_differentiation/second_order/single_in_multiple_out.py deleted file mode 100644 index f13e911d6216342e3e829f06f1a48de14b4f4025..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/high_order_differentiation/second_order/single_in_multiple_out.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ========================================================== - -"""Single in multiple out tutorial -This sample code is applicable to GPU and Ascend. -""" -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -from mindspore import dtype as mstype -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.mul = ops.Mul() - def construct(self, x): - out = self.mul(x, x) - return out - -class Grad(nn.Cell): - def __init__(self, network): - super(Grad, self).__init__() - self.grad = ops.GradOperation(sens_param=False) - self.network = network - def construct(self, x): - gout = self.grad(self.network)(x) - return gout -class GradSec(nn.Cell): - def __init__(self, network): - super(GradSec, self).__init__() - self.grad = ops.GradOperation(sens_param=False) - self.network = network - def construct(self, x): - gout = self.grad(self.network)(x) - return gout - -net = Net() -firstgrad = Grad(net) # first order -secondgrad = GradSec(firstgrad) # second order -x = Tensor([0.1, 0.2, 0.3], dtype=mstype.float32) -output = secondgrad(x) -print(output) diff --git a/tutorials/tutorial_code/high_order_differentiation/second_order/single_in_single_out.py b/tutorials/tutorial_code/high_order_differentiation/second_order/single_in_single_out.py deleted file mode 100644 index 9251208ef3b157ccb38d8355e1b5d910d29e339a..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/high_order_differentiation/second_order/single_in_single_out.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ========================================================== - -"""Single in single out tutorial -This sample code is applicable to GPU and Ascend. -""" -import numpy as np -import mindspore.context as context -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor -context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - -class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - self.sin = ops.Sin() - def construct(self, x): - out = self.sin(x) - return out - -class Grad(nn.Cell): - def __init__(self, network): - super(Grad, self).__init__() - self.grad = ops.GradOperation() - self.network = network - def construct(self, x): - gout = self.grad(self.network)(x) - return gout -class GradSec(nn.Cell): - def __init__(self, network): - super(GradSec, self).__init__() - self.grad = ops.GradOperation() - self.network = network - def construct(self, x): - gout = self.grad(self.network)(x) - return gout - -net = Net() -firstgrad = Grad(net) # first order -secondgrad = GradSec(firstgrad) # second order -x_train = Tensor(np.array([1.0], dtype=np.float32)) -output = secondgrad(x_train) -print(output) diff --git a/tutorials/tutorial_code/lenet/lenet.py b/tutorials/tutorial_code/lenet/lenet.py deleted file mode 100644 index 9d72b4b7a99b8daf51a5adc529f0c586d44b59f5..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/lenet/lenet.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""Lenet Tutorial -This sample code is applicable to CPU, GPU and Ascend. -""" -import os -import argparse -import mindspore.dataset as ds -import mindspore.nn as nn -from mindspore import context, Model, load_checkpoint, load_param_into_net -from mindspore.common.initializer import Normal -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor -import mindspore.dataset.vision.c_transforms as CV -import mindspore.dataset.transforms.c_transforms as C -from mindspore.dataset.vision import Inter -from mindspore.nn.metrics import Accuracy -from mindspore import dtype as mstype -from mindspore.nn.loss import SoftmaxCrossEntropyWithLogits -from utils.dataset import download_dataset - -def create_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1): - """ create dataset for train or test - Args: - data_path: Data path - batch_size: The number of data records in each group - repeat_size: The number of replicated data records - num_parallel_workers: The number of parallel workers - """ - # define dataset - mnist_ds = ds.MnistDataset(data_path) - - # define operation parameters - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - rescale_nml = 1 / 0.3081 - shift_nml = -1 * 0.1307 / 0.3081 - - # define map operations - resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Resize images to (32, 32) - rescale_nml_op = CV.Rescale(rescale_nml, shift_nml) # normalize images - rescale_op = CV.Rescale(rescale, shift) # rescale images - hwc2chw_op = CV.HWC2CHW() # change shape from (height, width, channel) to (channel, height, width) to fit network. - type_cast_op = C.TypeCast(mstype.int32) # change data type of label to int32 to fit network - - # apply map operations on images - mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns="label", num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(operations=resize_op, input_columns="image", num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(operations=rescale_op, input_columns="image", num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(operations=rescale_nml_op, input_columns="image", num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(operations=hwc2chw_op, input_columns="image", num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script - mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True) - mnist_ds = mnist_ds.repeat(repeat_size) - - return mnist_ds - - -class LeNet5(nn.Cell): - """Lenet network structure.""" - # define the operator required - def __init__(self, num_class=10, num_channel=1): - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02)) - self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02)) - self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02)) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - # use the preceding operators to construct networks - def construct(self, x): - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x - - -def train_net(network_model, epoch_size, data_path, repeat_size, ckpoint_cb, sink_mode): - """Define the training method.""" - print("============== Starting Training ==============") - # load training dataset - ds_train = create_dataset(os.path.join(data_path, "train"), 32, repeat_size) - network_model.train(epoch_size, ds_train, callbacks=[ckpoint_cb, LossMonitor()], dataset_sink_mode=sink_mode) - - -def test_net(network, network_model, data_path): - """Define the evaluation method.""" - print("============== Starting Testing ==============") - # load the saved model for evaluation - param_dict = load_checkpoint("checkpoint_lenet-1_1875.ckpt") - # load parameter to the network - load_param_into_net(network, param_dict) - # load testing dataset - ds_eval = create_dataset(os.path.join(data_path, "test")) - acc = network_model.eval(ds_eval, dataset_sink_mode=False) - print("============== Accuracy:{} ==============".format(acc)) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='MindSpore LeNet Example') - parser.add_argument('--device_target', type=str, default="CPU", choices=['Ascend', 'GPU', 'CPU'], - help='device where the code will be implemented (default: CPU)') - args = parser.parse_args() - context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target) - dataset_sink_mode = not args.device_target == "CPU" - # download mnist dataset - download_dataset() - # learning rate setting - lr = 0.01 - momentum = 0.9 - dataset_size = 1 - mnist_path = "./MNIST_Data" - # define the loss function - net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') - train_epoch = 1 - # create the network - net = LeNet5() - # define the optimizer - net_opt = nn.Momentum(net.trainable_params(), lr, momentum) - config_ck = CheckpointConfig(save_checkpoint_steps=1875, keep_checkpoint_max=10) - # save the network model and parameters for subsequence fine-tuning - ckpoint = ModelCheckpoint(prefix="checkpoint_lenet", config=config_ck) - # group layers into an object with training and evaluation features - model = Model(net, net_loss, net_opt, metrics={"Accuracy": Accuracy()}) - - train_net(model, train_epoch, mnist_path, dataset_size, ckpoint, dataset_sink_mode) - test_net(net, model, mnist_path) diff --git a/tutorials/tutorial_code/lenet/utils/__init__.py b/tutorials/tutorial_code/lenet/utils/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/tutorials/tutorial_code/lenet/utils/dataset.py b/tutorials/tutorial_code/lenet/utils/dataset.py deleted file mode 100644 index eeee341fb46b7069e806f8977f967960ea768386..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/lenet/utils/dataset.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""download MNIST dataset""" -import os -import sys -import gzip -from urllib.parse import urlparse -import requests - -def unzipfile(gzip_path): - """unzip dataset file - Args: - gzip_path: dataset file path - """ - open_file = open(gzip_path.replace('.gz', ''), 'wb') - gz_file = gzip.GzipFile(gzip_path) - open_file.write(gz_file.read()) - gz_file.close() - -def download_progress(url, file_name): - """download mnist dataset - Args: - url: download url - file_name: dataset name - """ - res = requests.get(url, stream=True, verify=False) - # get mnist dataset size - total_size = int(res.headers["Content-Length"]) - temp_size = 0 - with open(file_name, "wb+") as f: - for chunk in res.iter_content(chunk_size=1024): - temp_size += len(chunk) - f.write(chunk) - f.flush() - done = int(100 * temp_size / total_size) - # show download progress - sys.stdout.write("\r[{}{}] {:.2f}%".format("█" * done, " " * (100 - done), 100 * temp_size / total_size)) - sys.stdout.flush() - print("\n============== {} is already ==============".format(file_name)) - unzipfile(file_name) - os.remove(file_name) - -def download_dataset(): - """Download the dataset from http://yann.lecun.com/exdb/mnist/.""" - print("************** Downloading the MNIST dataset **************") - train_path = "./MNIST_Data/train/" - test_path = "./MNIST_Data/test/" - train_path_check = os.path.exists(train_path) - test_path_check = os.path.exists(test_path) - if not train_path_check and not test_path_check: - os.makedirs(train_path) - os.makedirs(test_path) - train_url = {"http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz", "http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz"} - test_url = {"http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz", "http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz"} - for url in train_url: - url_parse = urlparse(url) - # split the file name from url - file_name = os.path.join(train_path, url_parse.path.split('/')[-1]) - if not os.path.exists(file_name.replace('.gz', '')): - download_progress(url, file_name) - for url in test_url: - url_parse = urlparse(url) - # split the file name from url - file_name = os.path.join(test_path, url_parse.path.split('/')[-1]) - if not os.path.exists(file_name.replace('.gz', '')): - download_progress(url, file_name) diff --git a/tutorials/tutorial_code/linear_regression.py b/tutorials/tutorial_code/linear_regression.py deleted file mode 100644 index c263f75cf596c29420335c9672c18d44c227903b..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/linear_regression.py +++ /dev/null @@ -1,52 +0,0 @@ -"""Linear Regression Tutorial -This sample code is applicable to CPU, GPU and Ascend. -""" -import numpy as np -from mindspore import dataset as ds -from mindspore.common.initializer import Normal -from mindspore import nn, Model, context -from mindspore.train.callback import LossMonitor - -context.set_context(mode=context.GRAPH_MODE, device_target="CPU") - - -def get_data(num, w=2.0, b=3.0): - for _ in range(num): - x = np.random.uniform(-10.0, 10.0) - noise = np.random.normal(0, 1) - y = x * w + b + noise - yield np.array([x]).astype(np.float32), np.array([y]).astype(np.float32) - - -def create_dataset(num_data, batch_size=16, repeat_size=1): - input_data = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data', 'label']) - input_data = input_data.batch(batch_size) - input_data = input_data.repeat(repeat_size) - return input_data - - -class LinearNet(nn.Cell): - def __init__(self): - super(LinearNet, self).__init__() - self.fc = nn.Dense(1, 1, Normal(0.02), Normal(0.02)) - - def construct(self, x): - x = self.fc(x) - return x - - -if __name__ == "__main__": - - data_number = 1600 - batch_number = 16 - repeat_number = 1 - lr = 0.005 - momentum = 0.9 - net = LinearNet() - net_loss = nn.loss.MSELoss() - opt = nn.Momentum(net.trainable_params(), lr, momentum) - model = Model(net, net_loss, opt) - ds_train = create_dataset(data_number, batch_size=batch_number, repeat_size=repeat_number) - model.train(1, ds_train, callbacks=LossMonitor(), dataset_sink_mode=False) - for param in net.trainable_params(): - print(param, param.asnumpy()) diff --git a/tutorials/tutorial_code/mixed_precision/mixed_precision.py b/tutorials/tutorial_code/mixed_precision/mixed_precision.py deleted file mode 100644 index 91f3257cb42c11d7d97844b40fdaff6b56c4b806..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/mixed_precision/mixed_precision.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ - -"""Mixed Precision Tutorial -This sample code is applicable to GPU and Ascend. -""" -import mindspore.nn as nn -from mindspore import context, Model -from mindspore.train.callback import LossMonitor -from mindspore.nn.metrics import Accuracy -from src.lenet import LeNet5 -from src.datasets import create_dataset - - -if __name__ == "__main__": - - context.set_context(mode=context.GRAPH_MODE, device_target="GPU") - - ds_train = create_dataset("./datasets/MNIST_Data/train", 32) - ds_eval = create_dataset("./datasets/MNIST_Data/test", 32) - # Initialize network - network = LeNet5(10) - - # Define Loss and Optimizer - net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - net_opt = nn.Momentum(network.trainable_params(), learning_rate=0.01, momentum=0.9) - # amp_leval=O2 in GPU, amp_leval=O3 in Ascend, O0 is without mixed precision - model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()}, amp_level="O2") - - # Run training - model.train(epoch=1, callbacks=[LossMonitor()], train_dataset=ds_train) - - # Run training - acc = model.eval(ds_eval, dataset_sink_mode=False) - print("====Accuracy====:", acc) diff --git a/tutorials/tutorial_code/mixed_precision/src/__init__.py b/tutorials/tutorial_code/mixed_precision/src/__init__.py deleted file mode 100644 index 919b0579077e1ca4dc63cedb7ae3a1a3e0134283..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/mixed_precision/src/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ diff --git a/tutorials/tutorial_code/mixed_precision/src/datasets.py b/tutorials/tutorial_code/mixed_precision/src/datasets.py deleted file mode 100644 index 9f77df9573f4f799a8b3dbd8aacabf5ce2b07234..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/mixed_precision/src/datasets.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ - -""" -Processing datasets -""" -import mindspore.dataset.vision.c_transforms as CV -from mindspore.dataset.vision import Inter -import mindspore.dataset as ds -import mindspore.dataset.transforms.c_transforms as CT -from mindspore import dtype as mstype - -def create_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1): - """ - create dataset for train or test - """ - # define dataset - mnist_ds = ds.MnistDataset(data_path) - - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - rescale_nml = 1 / 0.3081 - shift_nml = -1 * 0.1307 / 0.3081 - - # define map operations - resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Bilinear mode - rescale_nml_op = CV.Rescale(rescale_nml, shift_nml) - rescale_op = CV.Rescale(rescale, shift) - hwc2chw_op = CV.HWC2CHW() - type_cast_op = CT.TypeCast(mstype.int32) - - # apply map operations on images - mnist_ds = mnist_ds.map(input_columns="label", operations=type_cast_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=resize_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_nml_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=hwc2chw_op, num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script - mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True) - mnist_ds = mnist_ds.repeat(repeat_size) - - return mnist_ds diff --git a/tutorials/tutorial_code/mixed_precision/src/lenet.py b/tutorials/tutorial_code/mixed_precision/src/lenet.py deleted file mode 100644 index 9b1bba0325e4ce3ed7eb24e45e8d8ce0527f11c8..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/mixed_precision/src/lenet.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ - -""" -LeNet5 network -""" -import mindspore.nn as nn -from mindspore.common.initializer import Normal - -class LeNet5(nn.Cell): - """ - Lenet network - - Args: - num_class (int): the number of classes. Default: 10. - num_channel (int): the number of channels. Default: 1. - - Returns: - Tensor, output tensor - Examples: - >>> LeNet(num_class=10) - - """ - - def __init__(self, num_class=10, num_channel=1): - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02)) - self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02)) - self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02)) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x diff --git a/tutorials/tutorial_code/model_encrypt_protection/encrypt_checkpoint.py b/tutorials/tutorial_code/model_encrypt_protection/encrypt_checkpoint.py deleted file mode 100644 index b8c67792fc7ea3236ff4d7fc62512df5fcc79c73..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/model_encrypt_protection/encrypt_checkpoint.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""Tutorial of safely exporting and loading CheckPoint files. -This sample code is applicable to CPU, GPU and Ascend in the Linux platform. -""" -import mindspore.nn as nn -import mindspore.dataset.vision.c_transforms as CV -import mindspore.dataset.transforms.c_transforms as C -import mindspore.dataset as ds -from mindspore.dataset.vision import Inter -from mindspore import dtype as mstype - -from mindspore.nn import SoftmaxCrossEntropyWithLogits -from mindspore.nn import Accuracy -from mindspore import context, Model -from mindspore import save_checkpoint, load_checkpoint, load_param_into_net -from mindspore.train.callback import CheckpointConfig, ModelCheckpoint, LossMonitor -from mindspore.common.initializer import Normal - - -def create_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1): - """ - create dataset for train or test - - Args: - data_path (str): Data path - batch_size (int): The number of data records in each group - repeat_size (int): The number of replicated data records - num_parallel_workers (int): The number of parallel workers - """ - # define dataset - mnist_ds = ds.MnistDataset(data_path) - - # define some parameters needed for data enhancement and rough justification - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - rescale_nml = 1 / 0.3081 - shift_nml = -1 * 0.1307 / 0.3081 - - # according to the parameters, generate the corresponding data enhancement method - resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) - rescale_nml_op = CV.Rescale(rescale_nml, shift_nml) - rescale_op = CV.Rescale(rescale, shift) - hwc2chw_op = CV.HWC2CHW() - type_cast_op = C.TypeCast(mstype.int32) - - # using map to apply operations to a dataset - mnist_ds = mnist_ds.map(operations=type_cast_op, input_columns="label", num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(operations=resize_op, input_columns="image", num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(operations=rescale_op, input_columns="image", num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(operations=rescale_nml_op, input_columns="image", num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(operations=hwc2chw_op, input_columns="image", num_parallel_workers=num_parallel_workers) - - # process the generated dataset - buffer_size = 10000 - mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) - mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True) - mnist_ds = mnist_ds.repeat(repeat_size) - - return mnist_ds - - -class LeNet5(nn.Cell): - """Lenet network structure.""" - # define the operator required - def __init__(self, num_class=10, num_channel=1): - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02)) - self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02)) - self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02)) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - # use the preceding operators to construct networks - def construct(self, x): - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x - - -if __name__ == "__main__": - context.set_context(mode=context.GRAPH_MODE, device_target="CPU") - lr = 0.01 - momentum = 0.9 - - # create the network - network = LeNet5() - - # define the optimizer - net_opt = nn.Momentum(network.trainable_params(), lr, momentum) - - # define the loss function - net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') - - # define the model - model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()}) - - epoch_size = 1 - mnist_path = "./datasets/MNIST_Data" - - train_dataset = create_dataset("./datasets/MNIST_Data/train") - eval_dataset = create_dataset("./datasets/MNIST_Data/test") - - print("========== The Training Model is Defined. ==========") - - # train the model and export the encrypted CheckPoint file through Callback - config_ck = CheckpointConfig(save_checkpoint_steps=1875, keep_checkpoint_max=10, enc_key=b'0123456789ABCDEF', - enc_mode='AES-GCM') - ckpoint_cb = ModelCheckpoint(prefix='lenet_enc', directory=None, config=config_ck) - model.train(10, train_dataset, dataset_sink_mode=False, callbacks=[ckpoint_cb, LossMonitor(1875)]) - acc = model.eval(eval_dataset, dataset_sink_mode=False) - print("Accuracy: {}".format(acc["Accuracy"])) - - # export the encrypted CheckPoint file through save_checkpoint - save_checkpoint(network, 'lenet_enc.ckpt', enc_key=b'0123456789ABCDEF', enc_mode='AES-GCM') - - # load encrypted CheckPoint file and eval - param_dict = load_checkpoint('lenet_enc-10_1875.ckpt', dec_key=b'0123456789ABCDEF', dec_mode='AES-GCM') - load_param_into_net(network, param_dict) - acc = model.eval(eval_dataset, dataset_sink_mode=False) - print("Accuracy loading encrypted CheckPoint: {}".format(acc["Accuracy"])) diff --git a/tutorials/tutorial_code/resnet/cifar_resnet50.py b/tutorials/tutorial_code/resnet/cifar_resnet50.py deleted file mode 100644 index b6880e4ff9998e89ce3cae6625015d70ebe19488..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/resnet/cifar_resnet50.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""cifar_resnet50 -This sample code is applicable to Ascend. -""" -import os -import random -import argparse -from mindspore import dtype as mstype -import mindspore.dataset as ds -import mindspore.dataset.vision.c_transforms as C -import mindspore.dataset.transforms.c_transforms as C2 -from mindspore.nn.loss import SoftmaxCrossEntropyWithLogits -from mindspore.communication.management import init -from mindspore.nn.optim.momentum import Momentum -from mindspore import Model, context -from mindspore.context import ParallelMode -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor -from mindspore import load_checkpoint, load_param_into_net -from mindspore.parallel._auto_parallel_context import auto_parallel_context -from resnet import resnet50 - -random.seed(1) -parser = argparse.ArgumentParser(description='Image classification') -parser.add_argument('--run_distribute', type=bool, default=False, help='Run distribute.') -parser.add_argument('--device_num', type=int, default=1, help='Device num.') -parser.add_argument('--device_target', type=str, default="Ascend", help='Device choice Ascend or GPU') -parser.add_argument('--do_train', type=bool, default=True, help='Do train or not.') -parser.add_argument('--do_eval', type=bool, default=False, help='Do eval or not.') -parser.add_argument('--epoch_size', type=int, default=1, help='Epoch size.') -parser.add_argument('--batch_size', type=int, default=32, help='Batch size.') -parser.add_argument('--num_classes', type=int, default=10, help='Num classes.') -parser.add_argument('--checkpoint_path', type=str, default=None, help='CheckPoint file path.') -parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path.') -args_opt = parser.parse_args() - -data_home = args_opt.dataset_path - -context.set_context(mode=context.GRAPH_MODE, device_target=args_opt.device_target) - -if args_opt.device_target == "Ascend": - device_id = int(os.getenv('DEVICE_ID')) - context.set_context(device_id=device_id) - -def create_dataset(repeat_num=1, training=True): - """ - create data for next use such as training or inferring - """ - cifar_ds = ds.Cifar10Dataset(data_home) - - if args_opt.run_distribute: - rank_id = int(os.getenv('RANK_ID')) - rank_size = int(os.getenv('RANK_SIZE')) - cifar_ds = ds.Cifar10Dataset(data_home, num_shards=rank_size, shard_id=rank_id) - - resize_height = 224 - resize_width = 224 - rescale = 1.0 / 255.0 - shift = 0.0 - - # define map operations - random_crop_op = C.RandomCrop((32, 32), (4, 4, 4, 4)) # padding_mode default CONSTANT - random_horizontal_op = C.RandomHorizontalFlip() - resize_op = C.Resize((resize_height, resize_width)) # interpolation default BILINEAR - rescale_op = C.Rescale(rescale, shift) - normalize_op = C.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)) - changeswap_op = C.HWC2CHW() - type_cast_op = C2.TypeCast(mstype.int32) - - c_trans = [] - if training: - c_trans = [random_crop_op, random_horizontal_op] - c_trans += [resize_op, rescale_op, normalize_op, - changeswap_op] - - # apply map operations on images - cifar_ds = cifar_ds.map(operations=type_cast_op, input_columns="label") - cifar_ds = cifar_ds.map(operations=c_trans, input_columns="image") - - # apply shuffle operations - cifar_ds = cifar_ds.shuffle(buffer_size=10) - - # apply batch operations - cifar_ds = cifar_ds.batch(batch_size=args_opt.batch_size, drop_remainder=True) - - # apply repeat operations - cifar_ds = cifar_ds.repeat(repeat_num) - - return cifar_ds - -if __name__ == '__main__': - # in this way by judging the mark of args, users will decide which function to use - if not args_opt.do_eval and args_opt.run_distribute: - context.set_auto_parallel_context(device_num=args_opt.device_num, parallel_mode=ParallelMode.DATA_PARALLEL) - auto_parallel_context().set_all_reduce_fusion_split_indices([140]) - init() - - epoch_size = args_opt.epoch_size - net = resnet50(args_opt.batch_size, args_opt.num_classes) - ls = SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") - opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, 0.9) - - model = Model(net, loss_fn=ls, optimizer=opt, metrics={'acc'}) - - # as for train, users could use model.train - if args_opt.do_train: - dataset = create_dataset() - batch_num = dataset.get_dataset_size() - config_ck = CheckpointConfig(save_checkpoint_steps=batch_num, keep_checkpoint_max=35) - ckpoint_cb = ModelCheckpoint(prefix="train_resnet_cifar10", directory="./", config=config_ck) - loss_cb = LossMonitor() - model.train(epoch_size, dataset, callbacks=[ckpoint_cb, loss_cb]) - - # as for evaluation, users could use model.eval - if args_opt.do_eval: - if args_opt.checkpoint_path: - param_dict = load_checkpoint(args_opt.checkpoint_path) - load_param_into_net(net, param_dict) - eval_dataset = create_dataset(training=False) - res = model.eval(eval_dataset) - print("result: ", res) diff --git a/tutorials/tutorial_code/resnet/resnet.py b/tutorials/tutorial_code/resnet/resnet.py deleted file mode 100644 index eb93c81732311155b069d2ab9b239a4ffc2c4010..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/resnet/resnet.py +++ /dev/null @@ -1,310 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -'''resnet -The sample can be run on Ascend 910 AI processor. -''' -import numpy as np -import mindspore.nn as nn -from mindspore import Tensor -import mindspore.ops as ops - - -def weight_variable_0(shape): - """weight_variable_0""" - zeros = np.zeros(shape).astype(np.float32) - return Tensor(zeros) - - -def weight_variable_1(shape): - """weight_variable_1""" - ones = np.ones(shape).astype(np.float32) - return Tensor(ones) - - -def conv3x3(in_channels, out_channels, stride=1, padding=0): - """3x3 convolution """ - return nn.Conv2d(in_channels, out_channels, - kernel_size=3, stride=stride, padding=padding, weight_init='XavierUniform', - has_bias=False, pad_mode="same") - - -def conv1x1(in_channels, out_channels, stride=1, padding=0): - """1x1 convolution""" - return nn.Conv2d(in_channels, out_channels, - kernel_size=1, stride=stride, padding=padding, weight_init='XavierUniform', - has_bias=False, pad_mode="same") - - -def conv7x7(in_channels, out_channels, stride=1, padding=0): - """1x1 convolution""" - return nn.Conv2d(in_channels, out_channels, - kernel_size=7, stride=stride, padding=padding, weight_init='XavierUniform', - has_bias=False, pad_mode="same") - - -def bn_with_initialize(out_channels): - """bn_with_initialize""" - shape = (out_channels) - mean = weight_variable_0(shape) - var = weight_variable_1(shape) - beta = weight_variable_0(shape) - bn = nn.BatchNorm2d(out_channels, momentum=0.99, eps=0.00001, gamma_init='Uniform', - beta_init=beta, moving_mean_init=mean, moving_var_init=var) - return bn - - -def bn_with_initialize_last(out_channels): - """bn_with_initialize_last""" - shape = (out_channels) - mean = weight_variable_0(shape) - var = weight_variable_1(shape) - beta = weight_variable_0(shape) - bn = nn.BatchNorm2d(out_channels, momentum=0.99, eps=0.00001, gamma_init='Uniform', - beta_init=beta, moving_mean_init=mean, moving_var_init=var) - return bn - - -def fc_with_initialize(input_channels, out_channels): - """fc_with_initialize""" - return nn.Dense(input_channels, out_channels, weight_init='XavierUniform', bias_init='Uniform') - - -class ResidualBlock(nn.Cell): - """ResidualBlock""" - expansion = 4 - - def __init__(self, - in_channels, - out_channels, - stride=1): - """init block""" - super(ResidualBlock, self).__init__() - - out_chls = out_channels // self.expansion - self.conv1 = conv1x1(in_channels, out_chls, stride=stride, padding=0) - self.bn1 = bn_with_initialize(out_chls) - - self.conv2 = conv3x3(out_chls, out_chls, stride=1, padding=0) - self.bn2 = bn_with_initialize(out_chls) - - self.conv3 = conv1x1(out_chls, out_channels, stride=1, padding=0) - self.bn3 = bn_with_initialize_last(out_channels) - - self.relu = ops.ReLU() - self.add = ops.Add() - - def construct(self, x): - """construct""" - identity = x - - out = self.conv1(x) - out = self.bn1(out) - out = self.relu(out) - - out = self.conv2(out) - out = self.bn2(out) - out = self.relu(out) - - out = self.conv3(out) - out = self.bn3(out) - - out = self.add(out, identity) - out = self.relu(out) - - return out - - -class ResidualBlockWithDown(nn.Cell): - """ResidualBlockWithDown""" - expansion = 4 - - def __init__(self, - in_channels, - out_channels, - stride=1, - down_sample=False): - """init block with down""" - super(ResidualBlockWithDown, self).__init__() - - out_chls = out_channels // self.expansion - self.conv1 = conv1x1(in_channels, out_chls, stride=stride, padding=0) - self.bn1 = bn_with_initialize(out_chls) - - self.conv2 = conv3x3(out_chls, out_chls, stride=1, padding=0) - self.bn2 = bn_with_initialize(out_chls) - - self.conv3 = conv1x1(out_chls, out_channels, stride=1, padding=0) - self.bn3 = bn_with_initialize_last(out_channels) - - self.relu = ops.ReLU() - self.down_sample = down_sample - - self.conv_down_sample = conv1x1(in_channels, out_channels, stride=stride, padding=0) - self.bn_down_sample = bn_with_initialize(out_channels) - self.add = ops.Add() - - def construct(self, x): - """construct""" - identity = x - - out = self.conv1(x) - out = self.bn1(out) - out = self.relu(out) - - out = self.conv2(out) - out = self.bn2(out) - out = self.relu(out) - - out = self.conv3(out) - out = self.bn3(out) - - identity = self.conv_down_sample(identity) - identity = self.bn_down_sample(identity) - - out = self.add(out, identity) - out = self.relu(out) - - return out - - -class MakeLayer0(nn.Cell): - """MakeLayer0""" - - def __init__(self, block, in_channels, out_channels, stride): - """init""" - super(MakeLayer0, self).__init__() - self.a = ResidualBlockWithDown(in_channels, out_channels, stride=1, down_sample=True) - self.b = block(out_channels, out_channels, stride=stride) - self.c = block(out_channels, out_channels, stride=1) - - def construct(self, x): - """construct""" - x = self.a(x) - x = self.b(x) - x = self.c(x) - - return x - - -class MakeLayer1(nn.Cell): - """MakeLayer1""" - - def __init__(self, block, in_channels, out_channels, stride): - """init""" - super(MakeLayer1, self).__init__() - self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True) - self.b = block(out_channels, out_channels, stride=1) - self.c = block(out_channels, out_channels, stride=1) - self.d = block(out_channels, out_channels, stride=1) - - def construct(self, x): - """construct""" - x = self.a(x) - x = self.b(x) - x = self.c(x) - x = self.d(x) - - return x - - -class MakeLayer2(nn.Cell): - """MakeLayer2""" - - def __init__(self, block, in_channels, out_channels, stride): - """init""" - super(MakeLayer2, self).__init__() - self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True) - self.b = block(out_channels, out_channels, stride=1) - self.c = block(out_channels, out_channels, stride=1) - self.d = block(out_channels, out_channels, stride=1) - self.e = block(out_channels, out_channels, stride=1) - self.f = block(out_channels, out_channels, stride=1) - - def construct(self, x): - """construct""" - x = self.a(x) - x = self.b(x) - x = self.c(x) - x = self.d(x) - x = self.e(x) - x = self.f(x) - - return x - - -class MakeLayer3(nn.Cell): - """MakeLayer3""" - - def __init__(self, block, in_channels, out_channels, stride): - """init""" - super(MakeLayer3, self).__init__() - self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True) - self.b = block(out_channels, out_channels, stride=1) - self.c = block(out_channels, out_channels, stride=1) - - def construct(self, x): - """construct""" - x = self.a(x) - x = self.b(x) - x = self.c(x) - - return x - - -class ResNet(nn.Cell): - """ResNet""" - - def __init__(self, block, num_classes=100, batch_size=32): - """init""" - super(ResNet, self).__init__() - self.batch_size = batch_size - self.num_classes = num_classes - - self.conv1 = conv7x7(3, 64, stride=2, padding=0) - - self.bn1 = bn_with_initialize(64) - self.relu = ops.ReLU() - self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, pad_mode="same") - - self.layer1 = MakeLayer0(block, in_channels=64, out_channels=256, stride=1) - self.layer2 = MakeLayer1(block, in_channels=256, out_channels=512, stride=2) - self.layer3 = MakeLayer2(block, in_channels=512, out_channels=1024, stride=2) - self.layer4 = MakeLayer3(block, in_channels=1024, out_channels=2048, stride=2) - - self.pool = ops.ReduceMean(keep_dims=True) - self.squeeze = ops.Squeeze(axis=(2, 3)) - self.fc = fc_with_initialize(512 * block.expansion, num_classes) - - def construct(self, x): - """construct""" - x = self.conv1(x) - x = self.bn1(x) - x = self.relu(x) - x = self.maxpool(x) - - x = self.layer1(x) - x = self.layer2(x) - x = self.layer3(x) - x = self.layer4(x) - - x = self.pool(x, (2, 3)) - x = self.squeeze(x) - x = self.fc(x) - return x - - -def resnet50(batch_size, num_classes): - """create resnet50""" - return ResNet(ResidualBlock, num_classes, batch_size) diff --git a/tutorials/tutorial_code/sample_for_cloud/dataset.py b/tutorials/tutorial_code/sample_for_cloud/dataset.py deleted file mode 100644 index 95ae5186f93097c21f946624c21da862f655a154..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/sample_for_cloud/dataset.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""Create train or eval dataset.""" -import os -from mindspore import dtype as mstype -import mindspore.dataset.engine as de -import mindspore.dataset.vision.c_transforms as C -import mindspore.dataset.transforms.c_transforms as C2 - - -device_id = int(os.getenv('DEVICE_ID')) -device_num = int(os.getenv('RANK_SIZE')) - - -def create_dataset(dataset_path, do_train, repeat_num=1, batch_size=32): - """ - Create a train or eval dataset. - - Args: - dataset_path (str): The path of dataset. - do_train (bool): Whether dataset is used for train or eval. - repeat_num (int): The repeat times of dataset. Default: 1. - batch_size (int): The batch size of dataset. Default: 32. - - Returns: - Dataset. - """ - if do_train: - dataset_path = os.path.join(dataset_path, 'train') - do_shuffle = True - else: - dataset_path = os.path.join(dataset_path, 'eval') - do_shuffle = False - - if device_num == 1 or not do_train: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=do_shuffle) - else: - ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=8, shuffle=do_shuffle, - num_shards=device_num, shard_id=device_id) - - resize_height = 224 - resize_width = 224 - rescale = 1.0 / 255.0 - shift = 0.0 - - # define map operations - random_crop_op = C.RandomCrop((32, 32), (4, 4, 4, 4)) - random_horizontal_flip_op = C.RandomHorizontalFlip(device_id / (device_id + 1)) - - resize_op = C.Resize((resize_height, resize_width)) - rescale_op = C.Rescale(rescale, shift) - normalize_op = C.Normalize([0.4914, 0.4822, 0.4465], [0.2023, 0.1994, 0.2010]) - - change_swap_op = C.HWC2CHW() - - trans = [] - if do_train: - trans += [random_crop_op, random_horizontal_flip_op] - - trans += [resize_op, rescale_op, normalize_op, change_swap_op] - - type_cast_op = C2.TypeCast(mstype.int32) - - ds = ds.map(operations=type_cast_op, input_columns="label", num_parallel_workers=8) - ds = ds.map(operations=trans, input_columns="image", num_parallel_workers=8) - - # apply batch operations - ds = ds.batch(batch_size, drop_remainder=True) - - # apply dataset repeat operation - ds = ds.repeat(repeat_num) - - return ds diff --git a/tutorials/tutorial_code/sample_for_cloud/resnet.py b/tutorials/tutorial_code/sample_for_cloud/resnet.py deleted file mode 100755 index 31cae9a3e8e1a16e5b256cdb8eba192298060928..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/sample_for_cloud/resnet.py +++ /dev/null @@ -1,283 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""ResNet.""" -import numpy as np -import mindspore.nn as nn -import mindspore.ops as ops -from mindspore import Tensor - - -def _weight_variable(shape, factor=0.01): - init_value = np.random.randn(*shape).astype(np.float32) * factor - return Tensor(init_value) - - -def _conv3x3(in_channel, out_channel, stride=1): - weight_shape = (out_channel, in_channel, 3, 3) - weight = _weight_variable(weight_shape) - return nn.Conv2d(in_channel, out_channel, - kernel_size=3, stride=stride, padding=0, pad_mode='same', weight_init=weight) - - -def _conv1x1(in_channel, out_channel, stride=1): - weight_shape = (out_channel, in_channel, 1, 1) - weight = _weight_variable(weight_shape) - return nn.Conv2d(in_channel, out_channel, - kernel_size=1, stride=stride, padding=0, pad_mode='same', weight_init=weight) - - -def _conv7x7(in_channel, out_channel, stride=1): - weight_shape = (out_channel, in_channel, 7, 7) - weight = _weight_variable(weight_shape) - return nn.Conv2d(in_channel, out_channel, - kernel_size=7, stride=stride, padding=0, pad_mode='same', weight_init=weight) - - -def _bn(channel): - return nn.BatchNorm2d(channel, eps=1e-4, momentum=0.9, - gamma_init=1, beta_init=0, moving_mean_init=0, moving_var_init=1) - - -def _bn_last(channel): - return nn.BatchNorm2d(channel, eps=1e-4, momentum=0.9, - gamma_init=0, beta_init=0, moving_mean_init=0, moving_var_init=1) - - -def _fc(in_channel, out_channel): - weight_shape = (out_channel, in_channel) - weight = _weight_variable(weight_shape) - return nn.Dense(in_channel, out_channel, has_bias=True, weight_init=weight, bias_init=0) - - -class ResidualBlock(nn.Cell): - """ - ResNet V1 residual block definition. - - Args: - in_channel (int): Input channel. - out_channel (int): Output channel. - stride (int): Stride size for the first convolutional layer. Default: 1. - - Returns: - Tensor, output tensor. - - Examples: - >>> ResidualBlock(3, 256, stride=2) - """ - expansion = 4 - - def __init__(self, - in_channel, - out_channel, - stride=1): - super(ResidualBlock, self).__init__() - - channel = out_channel // self.expansion - self.conv1 = _conv1x1(in_channel, channel, stride=1) - self.bn1 = _bn(channel) - - self.conv2 = _conv3x3(channel, channel, stride=stride) - self.bn2 = _bn(channel) - - self.conv3 = _conv1x1(channel, out_channel, stride=1) - self.bn3 = _bn_last(out_channel) - - self.relu = nn.ReLU() - - self.down_sample = False - - if stride != 1 or in_channel != out_channel: - self.down_sample = True - self.down_sample_layer = None - - if self.down_sample: - self.down_sample_layer = nn.SequentialCell([_conv1x1(in_channel, out_channel, stride), - _bn(out_channel)]) - self.add = ops.Add() - - def construct(self, x): # pylint: disable=missing-docstring - identity = x - - out = self.conv1(x) - out = self.bn1(out) - out = self.relu(out) - - out = self.conv2(out) - out = self.bn2(out) - out = self.relu(out) - - out = self.conv3(out) - out = self.bn3(out) - - if self.down_sample: - identity = self.down_sample_layer(identity) - - out = self.add(out, identity) - out = self.relu(out) - - return out - - -class ResNet(nn.Cell): - """ - ResNet architecture. - - Args: - block (Cell): Block for network. - layer_nums (list): Numbers of block in different layers. - in_channels (list): Input channel in each layer. - out_channels (list): Output channel in each layer. - strides (list): Stride size in each layer. - num_classes (int): The number of classes that the training images are belonging to. - Returns: - Tensor, output tensor. - - Examples: - >>> ResNet(ResidualBlock, - >>> [3, 4, 6, 3], - >>> [64, 256, 512, 1024], - >>> [256, 512, 1024, 2048], - >>> [1, 2, 2, 2], - >>> 10) - """ - - def __init__(self, - block, - layer_nums, - in_channels, - out_channels, - strides, - num_classes): - super(ResNet, self).__init__() - - if not len(layer_nums) == len(in_channels) == len(out_channels) == 4: - raise ValueError("the length of layer_num, in_channels, out_channels list must be 4!") - - self.conv1 = _conv7x7(3, 64, stride=2) - self.bn1 = _bn(64) - self.relu = ops.ReLU() - self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, pad_mode="same") - - self.layer1 = self._make_layer(block, - layer_nums[0], - in_channel=in_channels[0], - out_channel=out_channels[0], - stride=strides[0]) - self.layer2 = self._make_layer(block, - layer_nums[1], - in_channel=in_channels[1], - out_channel=out_channels[1], - stride=strides[1]) - self.layer3 = self._make_layer(block, - layer_nums[2], - in_channel=in_channels[2], - out_channel=out_channels[2], - stride=strides[2]) - self.layer4 = self._make_layer(block, - layer_nums[3], - in_channel=in_channels[3], - out_channel=out_channels[3], - stride=strides[3]) - - self.mean = ops.ReduceMean(keep_dims=True) - self.flatten = nn.Flatten() - self.end_point = _fc(out_channels[3], num_classes) - - def _make_layer(self, block, layer_num, in_channel, out_channel, stride): - """ - Make stage network of ResNet. - - Args: - block (Cell): Resnet block. - layer_num (int): Layer number. - in_channel (int): Input channel. - out_channel (int): Output channel. - stride (int): Stride size for the first convolutional layer. - - Returns: - SequentialCell, the output layer. - - Examples: - >>> _make_layer(ResidualBlock, 3, 128, 256, 2) - """ - layers = [] - - resnet_block = block(in_channel, out_channel, stride=stride) - layers.append(resnet_block) - - for _ in range(1, layer_num): - resnet_block = block(out_channel, out_channel, stride=1) - layers.append(resnet_block) - - return nn.SequentialCell(layers) - - def construct(self, x): # pylint: disable=missing-docstring - x = self.conv1(x) - x = self.bn1(x) - x = self.relu(x) - c1 = self.maxpool(x) - - c2 = self.layer1(c1) - c3 = self.layer2(c2) - c4 = self.layer3(c3) - c5 = self.layer4(c4) - - out = self.mean(c5, (2, 3)) - out = self.flatten(out) - out = self.end_point(out) - - return out - - -def resnet50(class_num=10): - """ - Get ResNet50 neural network. - - Args: - class_num (int): Class number. - - Returns: - Cell, cell instance of ResNet50 neural network. - - Examples: - >>> net = resnet50(10) - """ - return ResNet(ResidualBlock, - [3, 4, 6, 3], - [64, 256, 512, 1024], - [256, 512, 1024, 2048], - [1, 2, 2, 2], - class_num) - - -def resnet101(class_num=1001): - """ - Get ResNet101 neural network. - - Args: - class_num (int): Class number. - - Returns: - Cell, cell instance of ResNet101 neural network. - - Examples: - >>> net = resnet101(1001) - """ - return ResNet(ResidualBlock, - [3, 4, 23, 3], - [64, 256, 512, 1024], - [256, 512, 1024, 2048], - [1, 2, 2, 2], - class_num) diff --git a/tutorials/tutorial_code/sample_for_cloud/resnet50_train.py b/tutorials/tutorial_code/sample_for_cloud/resnet50_train.py deleted file mode 100644 index b07b8f7fc776988f0f26aa0de1592bb5bec95ffe..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/sample_for_cloud/resnet50_train.py +++ /dev/null @@ -1,179 +0,0 @@ -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -"""ResNet50 model train with MindSpore -This sample code is applicable to GPU and Ascend -""" -import os -import argparse -import random -import time -import numpy as np -import moxing as mox - -from mindspore import context, Tensor, Model -from mindspore.nn.optim.momentum import Momentum -from mindspore.nn.loss import SoftmaxCrossEntropyWithLogits -from mindspore.context import ParallelMode -from mindspore.train.callback import Callback, LossMonitor -from mindspore import FixedLossScaleManager -from mindspore.communication.management import init -import mindspore.dataset.engine as de - -from dataset import create_dataset, device_id, device_num -from resnet import resnet50 - -random.seed(1) -np.random.seed(1) -de.config.set_seed(1) - - -class PerformanceCallback(Callback): - """ - Training performance callback. - - Args: - batch_size (int): Batch number for one step. - """ - def __init__(self, batch_size): - super(PerformanceCallback, self).__init__() - self.batch_size = batch_size - self.last_step = 0 - self.epoch_begin_time = 0 - - def step_begin(self, run_context): - self.epoch_begin_time = time.time() - - def step_end(self, run_context): - params = run_context.original_args() - cost_time = time.time() - self.epoch_begin_time - train_steps = params.cur_step_num -self.last_step - print(f'epoch {params.cur_epoch_num} cost time = {cost_time}, train step num: {train_steps}, ' - f'one step time: {1000*cost_time/train_steps} ms, ' - f'train samples per second of cluster: {device_num*train_steps*self.batch_size/cost_time:.1f}\n') - self.last_step = run_context.original_args().cur_step_num - - -def get_lr(global_step, - total_epochs, - steps_per_epoch, - lr_init=0.01, - lr_max=0.1, - warmup_epochs=5): - """ - Generate learning rate array. - - Args: - global_step (int): Initial step of training. - total_epochs (int): Total epoch of training. - steps_per_epoch (float): Steps of one epoch. - lr_init (float): Initial learning rate. Default: 0.01. - lr_max (float): Maximum learning rate. Default: 0.1. - warmup_epochs (int): The number of warming up epochs. Default: 5. - - Returns: - np.array, learning rate array. - """ - lr_each_step = [] - total_steps = steps_per_epoch * total_epochs - warmup_steps = steps_per_epoch * warmup_epochs - if warmup_steps != 0: - inc_each_step = (float(lr_max) - float(lr_init)) / float(warmup_steps) - else: - inc_each_step = 0 - for i in range(int(total_steps)): - if i < warmup_steps: - lr = float(lr_init) + inc_each_step * float(i) - else: - base = (1.0 - (float(i) - float(warmup_steps)) / (float(total_steps) - float(warmup_steps))) - lr = float(lr_max) * base * base - if lr < 0.0: - lr = 0.0 - lr_each_step.append(lr) - - current_step = global_step - lr_each_step = np.array(lr_each_step).astype(np.float32) - learning_rate = lr_each_step[current_step:] - - return learning_rate - - -def resnet50_train(args): - """Training the ResNet-50.""" - epoch_size = args.epoch_size - batch_size = 32 - class_num = 10 - loss_scale_num = 1024 - local_data_path = '/cache/data' - - # set graph mode and parallel mode - context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", save_graphs=False) - context.set_context(device_id=device_id) - if device_num > 1: - context.set_auto_parallel_context(device_num=device_num, - parallel_mode=ParallelMode.DATA_PARALLEL, - gradients_mean=True) - init() - local_data_path = os.path.join(local_data_path, str(device_id)) - - # data download - print('Download data.') - mox.file.copy_parallel(src_url=args.data_url, dst_url=local_data_path) - - # create dataset - print('Create train and evaluate dataset.') - train_dataset = create_dataset(dataset_path=local_data_path, do_train=True, - repeat_num=1, batch_size=batch_size) - eval_dataset = create_dataset(dataset_path=local_data_path, do_train=False, - repeat_num=1, batch_size=batch_size) - train_step_size = train_dataset.get_dataset_size() - print('Create dataset success.') - - # create model - net = resnet50(class_num=class_num) - # reduction='mean' means that apply reduction of mean to loss - loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') - lr = Tensor(get_lr(global_step=0, total_epochs=epoch_size, steps_per_epoch=train_step_size)) - opt = Momentum(net.trainable_params(), lr, momentum=0.9, weight_decay=1e-4, loss_scale=loss_scale_num) - loss_scale = FixedLossScaleManager(loss_scale_num, False) - - # amp_level="O2" means that the hybrid precision of O2 mode is used for training - # the whole network except that batchnoram will be cast into float16 format and dynamic loss scale will be used - # 'keep_batchnorm_fp32 = False' means that use the float16 format - model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'}, amp_level="O2", keep_batchnorm_fp32=False, - loss_scale_manager=loss_scale) - - # define performance callback to show ips and loss callback to show loss for every epoch - performance_cb = PerformanceCallback(batch_size) - loss_cb = LossMonitor() - cb = [performance_cb, loss_cb] - - print(f'Start run training, total epoch: {epoch_size}.') - model.train(epoch_size, train_dataset, callbacks=cb) - if device_num == 1 or device_id == 0: - print(f'Start run evaluation.') - output = model.eval(eval_dataset) - print(f'Evaluation result: {output}.') - - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='ResNet50 train.') - parser.add_argument('--data_url', required=True, default=None, help='Location of data.') - parser.add_argument('--train_url', required=True, default=None, help='Location of training outputs.') - parser.add_argument('--epoch_size', type=int, default=90, help='Train epoch size.') - - args_opt, unknown = parser.parse_known_args() - - resnet50_train(args_opt) - print('ResNet50 training success!') diff --git a/tutorials/tutorial_code/save_model/save_model.py b/tutorials/tutorial_code/save_model/save_model.py deleted file mode 100644 index c35cac8d7b44e074b92e268689afae54cabaf6be..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/save_model/save_model.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ - -"""Save Model Tutorial -This sample code is applicable to CPU, GPU and Ascend. -""" -import os -import mindspore.nn as nn -from mindspore.nn import SoftmaxCrossEntropyWithLogits, Accuracy -from mindspore import context, Model, Tensor, load_checkpoint, export -from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor -from src.lenet import LeNet5 -from src.datasets import create_dataset -import numpy as np - -context.set_context(mode=context.GRAPH_MODE, device_target="CPU") - -def output_file_formats(ckpt_path, net_work, batch_size, output_file_name, output_format): - load_checkpoint(ckpt_path, net=net_work) - input_data = np.random.uniform(0.0, 1.0, size=batch_size).astype(np.float32) - export(net_work, Tensor(input_data), file_name=output_file_name, file_format=output_format) - -if __name__ == "__main__": - lr = 0.01 - momentum = 0.9 - epoch_size = 1 - model_path = "./models/ckpt/save_model/" - model_file = model_path + "checkpoint_lenet-1_1875.ckpt" - - os.system('rm -f {0}*.ckpt {0}*.meta {0}*.pb'.format(model_path)) - - network = LeNet5() - net_opt = nn.Momentum(network.trainable_params(), lr, momentum) - net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') - model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()}) - - config_ck = CheckpointConfig(save_checkpoint_steps=1875, keep_checkpoint_max=16) - ckpoint_cb = ModelCheckpoint(prefix="checkpoint_lenet", directory=model_path, config=config_ck) - - train_dataset = create_dataset("./datasets/MNIST_Data/train") - eval_dataset = create_dataset("./datasets/MNIST_Data/test") - print("===============start training===============") - model.train(epoch_size, train_dataset, callbacks=[ckpoint_cb, LossMonitor()], dataset_sink_mode=False) - print("===============saving models in mindir and onnx formats===============") - - output_file_formats(model_file, network, [32, 1, 32, 32], "checkpoint_lenet", "MINDIR") - output_file_formats(model_file, network, [32, 1, 32, 32], "checkpoint_lenet", "ONNX") diff --git a/tutorials/tutorial_code/save_model/src/__init__.py b/tutorials/tutorial_code/save_model/src/__init__.py deleted file mode 100644 index 919b0579077e1ca4dc63cedb7ae3a1a3e0134283..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/save_model/src/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ diff --git a/tutorials/tutorial_code/save_model/src/datasets.py b/tutorials/tutorial_code/save_model/src/datasets.py deleted file mode 100644 index 9f77df9573f4f799a8b3dbd8aacabf5ce2b07234..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/save_model/src/datasets.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ - -""" -Processing datasets -""" -import mindspore.dataset.vision.c_transforms as CV -from mindspore.dataset.vision import Inter -import mindspore.dataset as ds -import mindspore.dataset.transforms.c_transforms as CT -from mindspore import dtype as mstype - -def create_dataset(data_path, batch_size=32, repeat_size=1, - num_parallel_workers=1): - """ - create dataset for train or test - """ - # define dataset - mnist_ds = ds.MnistDataset(data_path) - - resize_height, resize_width = 32, 32 - rescale = 1.0 / 255.0 - shift = 0.0 - rescale_nml = 1 / 0.3081 - shift_nml = -1 * 0.1307 / 0.3081 - - # define map operations - resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Bilinear mode - rescale_nml_op = CV.Rescale(rescale_nml, shift_nml) - rescale_op = CV.Rescale(rescale, shift) - hwc2chw_op = CV.HWC2CHW() - type_cast_op = CT.TypeCast(mstype.int32) - - # apply map operations on images - mnist_ds = mnist_ds.map(input_columns="label", operations=type_cast_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=resize_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_nml_op, num_parallel_workers=num_parallel_workers) - mnist_ds = mnist_ds.map(input_columns="image", operations=hwc2chw_op, num_parallel_workers=num_parallel_workers) - - # apply DatasetOps - buffer_size = 10000 - mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script - mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True) - mnist_ds = mnist_ds.repeat(repeat_size) - - return mnist_ds diff --git a/tutorials/tutorial_code/save_model/src/lenet.py b/tutorials/tutorial_code/save_model/src/lenet.py deleted file mode 100644 index 397217d46a9e595c73e84eaa5ee0a35885a245da..0000000000000000000000000000000000000000 --- a/tutorials/tutorial_code/save_model/src/lenet.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -""" -LeNet5 network -""" -import mindspore.nn as nn -from mindspore.common.initializer import Normal - -class LeNet5(nn.Cell): - """ - Lenet network - - Args: - num_class (int): the number of classes. Default: 10. - num_channel (int): the number of channels. Default: 1. - - Returns: - Tensor, output tensor - Examples: - >>> LeNet(num_class=10) - - """ - - def __init__(self, num_class=10, num_channel=1): - super(LeNet5, self).__init__() - self.conv1 = nn.Conv2d(num_channel, 6, 5, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') - self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=Normal(0.02)) - self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.02)) - self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.02)) - self.relu = nn.ReLU() - self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) - self.flatten = nn.Flatten() - - def construct(self, x): - x = self.max_pool2d(self.relu(self.conv1(x))) - x = self.max_pool2d(self.relu(self.conv2(x))) - x = self.flatten(x) - x = self.relu(self.fc1(x)) - x = self.relu(self.fc2(x)) - x = self.fc3(x) - return x